Compare commits
202 Commits
217919fa77
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 2a7881ac6e | |||
| 0950018b38 | |||
| bc796f4ee5 | |||
| 232ecdda41 | |||
| 32d13984a1 | |||
| eab7f2e21b | |||
| b6422fbf3e | |||
| 6dbd0c80e6 | |||
| 0c42a9ed04 | |||
| 95a6e54d06 | |||
| e26219989f | |||
| 5d33a35407 | |||
| d53572dc33 | |||
| cf1953d11f | |||
| 6f663eaee7 | |||
| ee65c6263a | |||
| 691b6b22ab | |||
| 11c80a16a3 | |||
| c366b44c54 | |||
| 92f80542e6 | |||
| 3a25e42a20 | |||
| 24763bf5a7 | |||
| 08f0397029 | |||
| 766e3a01b2 | |||
| 626e2e035d | |||
| cfd2321db2 | |||
| 1b715033ce | |||
| 81d1586501 | |||
| bd74c9e3e3 | |||
| 41228430cf | |||
| 6a4ba06fac | |||
| e5c3542d3f | |||
| 24516f1069 | |||
| 5383cdef60 | |||
| be5c3f7a34 | |||
| caa9922ff9 | |||
| 135f000c71 | |||
| d9e50a4235 | |||
| 5f6eb5a5cb | |||
| 41c77fca2e | |||
| 49621f3fb1 | |||
| 6df743b2e6 | |||
| edfefc0128 | |||
| b0185abefe | |||
| b9e54cbfd8 | |||
| 3f0bd783cd | |||
| fc8856c83f | |||
| bd09f3d943 | |||
| 1f434c3d67 | |||
| 4972a403df | |||
| 629708cdd0 | |||
| 560087a897 | |||
| 27f553b005 | |||
| ed7665248e | |||
| 736b8aedc0 | |||
| 3daa49ae6c | |||
| 5fb24188e1 | |||
| 54f972db17 | |||
| acd8b62382 | |||
| cc65e3d1ad | |||
| 70889ca955 | |||
| 4ad6d57271 | |||
| fe5de3d5c1 | |||
| 5a224c48c0 | |||
| d08fe31b1b | |||
| 4d69ed91c5 | |||
| c6ddd3e6c7 | |||
| 504185f31f | |||
| acd0cce3f8 | |||
| e14da4fc8d | |||
| c04d4fb618 | |||
| 57bc82703d | |||
| e6aa7ebed0 | |||
| c44b51d6ef | |||
| d4c48de780 | |||
| 8948f75d62 | |||
| d304877a83 | |||
| 9cec32ba3e | |||
| e8768dfad7 | |||
| cfc98819ab | |||
| bfc1c76fe2 | |||
| 39e9f35acb | |||
| 36987f59b9 | |||
| 931d0e06f4 | |||
| 741a4da878 | |||
| e28b78d0e6 | |||
| 163dc3698c | |||
| 818bd82e0f | |||
| 76c8bcbf2c | |||
| 00094b22c6 | |||
| 1e4d9acebe | |||
| b226aa3a35 | |||
| d913be9d2a | |||
| e9bb951d97 | |||
| 037ede2750 | |||
| 06a245d90a | |||
| 63d5fcaa13 | |||
| 020cb6baae | |||
| 6db8042ffe | |||
| d4f87c4044 | |||
| 055c376222 | |||
| 1cc5d049ea | |||
| b955c2697c | |||
| 9a8c1577af | |||
| 52b9b0e00e | |||
| 51ef2fa725 | |||
| 7d53babc84 | |||
| 00f4445924 | |||
| 1a91c051b5 | |||
| 48ca9d0a8b | |||
| f75d795215 | |||
| ac13f53124 | |||
| c9ccf5cd90 | |||
| a99bfb5a91 | |||
| 389d71b42f | |||
| 2985ef5561 | |||
| 4be8177683 | |||
| a675dcd2a4 | |||
| 127cd8a42c | |||
| 1b9f2d3915 | |||
| f095bf050b | |||
| b17718df9b | |||
| 5c3ddf7819 | |||
| c56d02a895 | |||
| bc98067871 | |||
| a085924f8a | |||
| 9fbdf793d0 | |||
| b14accbbe0 | |||
| 330239d2c3 | |||
| bf5a20882b | |||
| 44c6dd626a | |||
| 9153b0c750 | |||
| e18bbba4ce | |||
| 2870dd9dbc | |||
| cf2e7a0be7 | |||
| 82444cda02 | |||
| 1d25c8869f | |||
| fd277602c9 | |||
| 673777bc8d | |||
| 03af82d065 | |||
| 78e28a269d | |||
| ee05df26c4 | |||
| 96d9efdeed | |||
| 9f5183848b | |||
| 6f9dd108ef | |||
| 61bc307715 | |||
| c7f3ad981d | |||
| 0d60d46cae | |||
| 6947af10fe | |||
| fe54f55f47 | |||
| f71ca92e85 | |||
| 7cca7e40c2 | |||
| 5db2a7501b | |||
| 85e0945c9d | |||
| efc2773199 | |||
| 1d9a1c76d2 | |||
| 3e3e0154fa | |||
| e73498cc60 | |||
| 0f4025369c | |||
| 7d3670e951 | |||
| 09682f5836 | |||
| db11c62d2f | |||
| 7346f1d5b7 | |||
| 358896c7d5 | |||
| 1d10044d46 | |||
| 8d98056375 | |||
| 4aafed3d31 | |||
| 3bd2fb7c1f | |||
| 3b6cc2903d | |||
| 6abaa96fba | |||
| f2d9bedcc7 | |||
| 1c106a4ff2 | |||
| 3ab5b223a8 | |||
| 7cfb6cf001 | |||
| d2fe7f12ab | |||
| 64347edabc | |||
| 8261050943 | |||
| a2da5081ea | |||
| 648d86970f | |||
| 278f422206 | |||
| ff59ac1eff | |||
| 7eb9e2dcad | |||
| c81f7ce1b7 | |||
| 137e8ce11c | |||
| e0b80cae38 | |||
| e8bb014874 | |||
| 4c75e08056 | |||
| f1b3aec94a | |||
| 473e849dfa | |||
| cfc896e92f | |||
| 36af34443e | |||
| 85cad1a7e7 | |||
| 0f5094575a | |||
| 131c50b1a1 | |||
| 6d4c400017 | |||
| 539dc77d57 | |||
| 9c7120c3dc | |||
| b1844a4f01 | |||
| e64848a216 | |||
| c93a7d5d29 | |||
| 360d6e85de | |||
| 162b4712e7 |
152
.claude/commands/opsx/apply.md
Normal file
152
.claude/commands/opsx/apply.md
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
---
|
||||||
|
name: "OPSX: Apply"
|
||||||
|
description: Implement tasks from an OpenSpec change (Experimental)
|
||||||
|
category: Workflow
|
||||||
|
tags: [workflow, artifacts, experimental]
|
||||||
|
---
|
||||||
|
|
||||||
|
Implement tasks from an OpenSpec change.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name (e.g., `/opsx:apply add-auth`). If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **Select the change**
|
||||||
|
|
||||||
|
If a name is provided, use it. Otherwise:
|
||||||
|
- Infer from conversation context if the user mentioned a change
|
||||||
|
- Auto-select if only one active change exists
|
||||||
|
- If ambiguous, run `openspec list --json` to get available changes and use the **AskUserQuestion tool** to let the user select
|
||||||
|
|
||||||
|
Always announce: "Using change: <name>" and how to override (e.g., `/opsx:apply <other>`).
|
||||||
|
|
||||||
|
2. **Check status to understand the schema**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used (e.g., "spec-driven")
|
||||||
|
- Which artifact contains the tasks (typically "tasks" for spec-driven, check status for others)
|
||||||
|
|
||||||
|
3. **Get apply instructions**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
openspec instructions apply --change "<name>" --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This returns:
|
||||||
|
- Context file paths (varies by schema)
|
||||||
|
- Progress (total, complete, remaining)
|
||||||
|
- Task list with status
|
||||||
|
- Dynamic instruction based on current state
|
||||||
|
|
||||||
|
**Handle states:**
|
||||||
|
- If `state: "blocked"` (missing artifacts): show message, suggest using `/opsx:continue`
|
||||||
|
- If `state: "all_done"`: congratulate, suggest archive
|
||||||
|
- Otherwise: proceed to implementation
|
||||||
|
|
||||||
|
4. **Read context files**
|
||||||
|
|
||||||
|
Read the files listed in `contextFiles` from the apply instructions output.
|
||||||
|
The files depend on the schema being used:
|
||||||
|
- **spec-driven**: proposal, specs, design, tasks
|
||||||
|
- Other schemas: follow the contextFiles from CLI output
|
||||||
|
|
||||||
|
5. **Show current progress**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Schema being used
|
||||||
|
- Progress: "N/M tasks complete"
|
||||||
|
- Remaining tasks overview
|
||||||
|
- Dynamic instruction from CLI
|
||||||
|
|
||||||
|
6. **Implement tasks (loop until done or blocked)**
|
||||||
|
|
||||||
|
For each pending task:
|
||||||
|
- Show which task is being worked on
|
||||||
|
- Make the code changes required
|
||||||
|
- Keep changes minimal and focused
|
||||||
|
- Mark task complete in the tasks file: `- [ ]` → `- [x]`
|
||||||
|
- Continue to next task
|
||||||
|
|
||||||
|
**Pause if:**
|
||||||
|
- Task is unclear → ask for clarification
|
||||||
|
- Implementation reveals a design issue → suggest updating artifacts
|
||||||
|
- Error or blocker encountered → report and wait for guidance
|
||||||
|
- User interrupts
|
||||||
|
|
||||||
|
7. **On completion or pause, show status**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Tasks completed this session
|
||||||
|
- Overall progress: "N/M tasks complete"
|
||||||
|
- If all done: suggest archive
|
||||||
|
- If paused: explain why and wait for guidance
|
||||||
|
|
||||||
|
**Output During Implementation**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementing: <change-name> (schema: <schema-name>)
|
||||||
|
|
||||||
|
Working on task 3/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
|
||||||
|
Working on task 4/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Completion**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 7/7 tasks complete ✓
|
||||||
|
|
||||||
|
### Completed This Session
|
||||||
|
- [x] Task 1
|
||||||
|
- [x] Task 2
|
||||||
|
...
|
||||||
|
|
||||||
|
All tasks complete! You can archive this change with `/opsx:archive`.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Pause (Issue Encountered)**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Paused
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 4/7 tasks complete
|
||||||
|
|
||||||
|
### Issue Encountered
|
||||||
|
<description of the issue>
|
||||||
|
|
||||||
|
**Options:**
|
||||||
|
1. <option 1>
|
||||||
|
2. <option 2>
|
||||||
|
3. Other approach
|
||||||
|
|
||||||
|
What would you like to do?
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Keep going through tasks until done or blocked
|
||||||
|
- Always read context files before starting (from the apply instructions output)
|
||||||
|
- If task is ambiguous, pause and ask before implementing
|
||||||
|
- If implementation reveals issues, pause and suggest artifact updates
|
||||||
|
- Keep code changes minimal and scoped to each task
|
||||||
|
- Update task checkbox immediately after completing each task
|
||||||
|
- Pause on errors, blockers, or unclear requirements - don't guess
|
||||||
|
- Use contextFiles from CLI output, don't assume specific file names
|
||||||
|
|
||||||
|
**Fluid Workflow Integration**
|
||||||
|
|
||||||
|
This skill supports the "actions on a change" model:
|
||||||
|
|
||||||
|
- **Can be invoked anytime**: Before all artifacts are done (if tasks exist), after partial implementation, interleaved with other actions
|
||||||
|
- **Allows artifact updates**: If implementation reveals design issues, suggest updating artifacts - not phase-locked, work fluidly
|
||||||
157
.claude/commands/opsx/archive.md
Normal file
157
.claude/commands/opsx/archive.md
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
---
|
||||||
|
name: "OPSX: Archive"
|
||||||
|
description: Archive a completed change in the experimental workflow
|
||||||
|
category: Workflow
|
||||||
|
tags: [workflow, archive, experimental]
|
||||||
|
---
|
||||||
|
|
||||||
|
Archive a completed change in the experimental workflow.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name after `/opsx:archive` (e.g., `/opsx:archive add-auth`). If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no change name provided, prompt for selection**
|
||||||
|
|
||||||
|
Run `openspec list --json` to get available changes. Use the **AskUserQuestion tool** to let the user select.
|
||||||
|
|
||||||
|
Show only active changes (not already archived).
|
||||||
|
Include the schema used for each change if available.
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT guess or auto-select a change. Always let the user choose.
|
||||||
|
|
||||||
|
2. **Check artifact completion status**
|
||||||
|
|
||||||
|
Run `openspec status --change "<name>" --json` to check artifact completion.
|
||||||
|
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used
|
||||||
|
- `artifacts`: List of artifacts with their status (`done` or other)
|
||||||
|
|
||||||
|
**If any artifacts are not `done`:**
|
||||||
|
- Display warning listing incomplete artifacts
|
||||||
|
- Prompt user for confirmation to continue
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
3. **Check task completion status**
|
||||||
|
|
||||||
|
Read the tasks file (typically `tasks.md`) to check for incomplete tasks.
|
||||||
|
|
||||||
|
Count tasks marked with `- [ ]` (incomplete) vs `- [x]` (complete).
|
||||||
|
|
||||||
|
**If incomplete tasks found:**
|
||||||
|
- Display warning showing count of incomplete tasks
|
||||||
|
- Prompt user for confirmation to continue
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
**If no tasks file exists:** Proceed without task-related warning.
|
||||||
|
|
||||||
|
4. **Assess delta spec sync state**
|
||||||
|
|
||||||
|
Check for delta specs at `openspec/changes/<name>/specs/`. If none exist, proceed without sync prompt.
|
||||||
|
|
||||||
|
**If delta specs exist:**
|
||||||
|
- Compare each delta spec with its corresponding main spec at `openspec/specs/<capability>/spec.md`
|
||||||
|
- Determine what changes would be applied (adds, modifications, removals, renames)
|
||||||
|
- Show a combined summary before prompting
|
||||||
|
|
||||||
|
**Prompt options:**
|
||||||
|
- If changes needed: "Sync now (recommended)", "Archive without syncing"
|
||||||
|
- If already synced: "Archive now", "Sync anyway", "Cancel"
|
||||||
|
|
||||||
|
If user chooses sync, use Task tool (subagent_type: "general-purpose", prompt: "Use Skill tool to invoke openspec-sync-specs for change '<name>'. Delta spec analysis: <include the analyzed delta spec summary>"). Proceed to archive regardless of choice.
|
||||||
|
|
||||||
|
5. **Perform the archive**
|
||||||
|
|
||||||
|
Create the archive directory if it doesn't exist:
|
||||||
|
```bash
|
||||||
|
mkdir -p openspec/changes/archive
|
||||||
|
```
|
||||||
|
|
||||||
|
Generate target name using current date: `YYYY-MM-DD-<change-name>`
|
||||||
|
|
||||||
|
**Check if target already exists:**
|
||||||
|
- If yes: Fail with error, suggest renaming existing archive or using different date
|
||||||
|
- If no: Move the change directory to archive
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mv openspec/changes/<name> openspec/changes/archive/YYYY-MM-DD-<name>
|
||||||
|
```
|
||||||
|
|
||||||
|
6. **Display summary**
|
||||||
|
|
||||||
|
Show archive completion summary including:
|
||||||
|
- Change name
|
||||||
|
- Schema that was used
|
||||||
|
- Archive location
|
||||||
|
- Spec sync status (synced / sync skipped / no delta specs)
|
||||||
|
- Note about any warnings (incomplete artifacts/tasks)
|
||||||
|
|
||||||
|
**Output On Success**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Archived to:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
**Specs:** ✓ Synced to main specs
|
||||||
|
|
||||||
|
All artifacts complete. All tasks complete.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Success (No Delta Specs)**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Archived to:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
**Specs:** No delta specs
|
||||||
|
|
||||||
|
All artifacts complete. All tasks complete.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Success With Warnings**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Complete (with warnings)
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Archived to:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
**Specs:** Sync skipped (user chose to skip)
|
||||||
|
|
||||||
|
**Warnings:**
|
||||||
|
- Archived with 2 incomplete artifacts
|
||||||
|
- Archived with 3 incomplete tasks
|
||||||
|
- Delta spec sync was skipped (user chose to skip)
|
||||||
|
|
||||||
|
Review the archive if this was not intentional.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Error (Archive Exists)**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Failed
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Target:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
|
||||||
|
Target archive directory already exists.
|
||||||
|
|
||||||
|
**Options:**
|
||||||
|
1. Rename the existing archive
|
||||||
|
2. Delete the existing archive if it's a duplicate
|
||||||
|
3. Wait until a different date to archive
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Always prompt for change selection if not provided
|
||||||
|
- Use artifact graph (openspec status --json) for completion checking
|
||||||
|
- Don't block archive on warnings - just inform and confirm
|
||||||
|
- Preserve .openspec.yaml when moving to archive (it moves with the directory)
|
||||||
|
- Show clear summary of what happened
|
||||||
|
- If sync is requested, use the Skill tool to invoke `openspec-sync-specs` (agent-driven)
|
||||||
|
- If delta specs exist, always run the sync assessment and show the combined summary before prompting
|
||||||
173
.claude/commands/opsx/explore.md
Normal file
173
.claude/commands/opsx/explore.md
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
---
|
||||||
|
name: "OPSX: Explore"
|
||||||
|
description: "Enter explore mode - think through ideas, investigate problems, clarify requirements"
|
||||||
|
category: Workflow
|
||||||
|
tags: [workflow, explore, experimental, thinking]
|
||||||
|
---
|
||||||
|
|
||||||
|
Enter explore mode. Think deeply. Visualize freely. Follow the conversation wherever it goes.
|
||||||
|
|
||||||
|
**IMPORTANT: Explore mode is for thinking, not implementing.** You may read files, search code, and investigate the codebase, but you must NEVER write code or implement features. If the user asks you to implement something, remind them to exit explore mode first and create a change proposal. You MAY create OpenSpec artifacts (proposals, designs, specs) if the user asks—that's capturing thinking, not implementing.
|
||||||
|
|
||||||
|
**This is a stance, not a workflow.** There are no fixed steps, no required sequence, no mandatory outputs. You're a thinking partner helping the user explore.
|
||||||
|
|
||||||
|
**Input**: The argument after `/opsx:explore` is whatever the user wants to think about. Could be:
|
||||||
|
- A vague idea: "real-time collaboration"
|
||||||
|
- A specific problem: "the auth system is getting unwieldy"
|
||||||
|
- A change name: "add-dark-mode" (to explore in context of that change)
|
||||||
|
- A comparison: "postgres vs sqlite for this"
|
||||||
|
- Nothing (just enter explore mode)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## The Stance
|
||||||
|
|
||||||
|
- **Curious, not prescriptive** - Ask questions that emerge naturally, don't follow a script
|
||||||
|
- **Open threads, not interrogations** - Surface multiple interesting directions and let the user follow what resonates. Don't funnel them through a single path of questions.
|
||||||
|
- **Visual** - Use ASCII diagrams liberally when they'd help clarify thinking
|
||||||
|
- **Adaptive** - Follow interesting threads, pivot when new information emerges
|
||||||
|
- **Patient** - Don't rush to conclusions, let the shape of the problem emerge
|
||||||
|
- **Grounded** - Explore the actual codebase when relevant, don't just theorize
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Might Do
|
||||||
|
|
||||||
|
Depending on what the user brings, you might:
|
||||||
|
|
||||||
|
**Explore the problem space**
|
||||||
|
- Ask clarifying questions that emerge from what they said
|
||||||
|
- Challenge assumptions
|
||||||
|
- Reframe the problem
|
||||||
|
- Find analogies
|
||||||
|
|
||||||
|
**Investigate the codebase**
|
||||||
|
- Map existing architecture relevant to the discussion
|
||||||
|
- Find integration points
|
||||||
|
- Identify patterns already in use
|
||||||
|
- Surface hidden complexity
|
||||||
|
|
||||||
|
**Compare options**
|
||||||
|
- Brainstorm multiple approaches
|
||||||
|
- Build comparison tables
|
||||||
|
- Sketch tradeoffs
|
||||||
|
- Recommend a path (if asked)
|
||||||
|
|
||||||
|
**Visualize**
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────┐
|
||||||
|
│ Use ASCII diagrams liberally │
|
||||||
|
├─────────────────────────────────────────┤
|
||||||
|
│ │
|
||||||
|
│ ┌────────┐ ┌────────┐ │
|
||||||
|
│ │ State │────────▶│ State │ │
|
||||||
|
│ │ A │ │ B │ │
|
||||||
|
│ └────────┘ └────────┘ │
|
||||||
|
│ │
|
||||||
|
│ System diagrams, state machines, │
|
||||||
|
│ data flows, architecture sketches, │
|
||||||
|
│ dependency graphs, comparison tables │
|
||||||
|
│ │
|
||||||
|
└─────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**Surface risks and unknowns**
|
||||||
|
- Identify what could go wrong
|
||||||
|
- Find gaps in understanding
|
||||||
|
- Suggest spikes or investigations
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## OpenSpec Awareness
|
||||||
|
|
||||||
|
You have full context of the OpenSpec system. Use it naturally, don't force it.
|
||||||
|
|
||||||
|
### Check for context
|
||||||
|
|
||||||
|
At the start, quickly check what exists:
|
||||||
|
```bash
|
||||||
|
openspec list --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This tells you:
|
||||||
|
- If there are active changes
|
||||||
|
- Their names, schemas, and status
|
||||||
|
- What the user might be working on
|
||||||
|
|
||||||
|
If the user mentioned a specific change name, read its artifacts for context.
|
||||||
|
|
||||||
|
### When no change exists
|
||||||
|
|
||||||
|
Think freely. When insights crystallize, you might offer:
|
||||||
|
|
||||||
|
- "This feels solid enough to start a change. Want me to create a proposal?"
|
||||||
|
- Or keep exploring - no pressure to formalize
|
||||||
|
|
||||||
|
### When a change exists
|
||||||
|
|
||||||
|
If the user mentions a change or you detect one is relevant:
|
||||||
|
|
||||||
|
1. **Read existing artifacts for context**
|
||||||
|
- `openspec/changes/<name>/proposal.md`
|
||||||
|
- `openspec/changes/<name>/design.md`
|
||||||
|
- `openspec/changes/<name>/tasks.md`
|
||||||
|
- etc.
|
||||||
|
|
||||||
|
2. **Reference them naturally in conversation**
|
||||||
|
- "Your design mentions using Redis, but we just realized SQLite fits better..."
|
||||||
|
- "The proposal scopes this to premium users, but we're now thinking everyone..."
|
||||||
|
|
||||||
|
3. **Offer to capture when decisions are made**
|
||||||
|
|
||||||
|
| Insight Type | Where to Capture |
|
||||||
|
|--------------|------------------|
|
||||||
|
| New requirement discovered | `specs/<capability>/spec.md` |
|
||||||
|
| Requirement changed | `specs/<capability>/spec.md` |
|
||||||
|
| Design decision made | `design.md` |
|
||||||
|
| Scope changed | `proposal.md` |
|
||||||
|
| New work identified | `tasks.md` |
|
||||||
|
| Assumption invalidated | Relevant artifact |
|
||||||
|
|
||||||
|
Example offers:
|
||||||
|
- "That's a design decision. Capture it in design.md?"
|
||||||
|
- "This is a new requirement. Add it to specs?"
|
||||||
|
- "This changes scope. Update the proposal?"
|
||||||
|
|
||||||
|
4. **The user decides** - Offer and move on. Don't pressure. Don't auto-capture.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Don't Have To Do
|
||||||
|
|
||||||
|
- Follow a script
|
||||||
|
- Ask the same questions every time
|
||||||
|
- Produce a specific artifact
|
||||||
|
- Reach a conclusion
|
||||||
|
- Stay on topic if a tangent is valuable
|
||||||
|
- Be brief (this is thinking time)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ending Discovery
|
||||||
|
|
||||||
|
There's no required ending. Discovery might:
|
||||||
|
|
||||||
|
- **Flow into a proposal**: "Ready to start? I can create a change proposal."
|
||||||
|
- **Result in artifact updates**: "Updated design.md with these decisions"
|
||||||
|
- **Just provide clarity**: User has what they need, moves on
|
||||||
|
- **Continue later**: "We can pick this up anytime"
|
||||||
|
|
||||||
|
When things crystallize, you might offer a summary - but it's optional. Sometimes the thinking IS the value.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Guardrails
|
||||||
|
|
||||||
|
- **Don't implement** - Never write code or implement features. Creating OpenSpec artifacts is fine, writing application code is not.
|
||||||
|
- **Don't fake understanding** - If something is unclear, dig deeper
|
||||||
|
- **Don't rush** - Discovery is thinking time, not task time
|
||||||
|
- **Don't force structure** - Let patterns emerge naturally
|
||||||
|
- **Don't auto-capture** - Offer to save insights, don't just do it
|
||||||
|
- **Do visualize** - A good diagram is worth many paragraphs
|
||||||
|
- **Do explore the codebase** - Ground discussions in reality
|
||||||
|
- **Do question assumptions** - Including the user's and your own
|
||||||
106
.claude/commands/opsx/propose.md
Normal file
106
.claude/commands/opsx/propose.md
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
---
|
||||||
|
name: "OPSX: Propose"
|
||||||
|
description: Propose a new change - create it and generate all artifacts in one step
|
||||||
|
category: Workflow
|
||||||
|
tags: [workflow, artifacts, experimental]
|
||||||
|
---
|
||||||
|
|
||||||
|
Propose a new change - create the change and generate all artifacts in one step.
|
||||||
|
|
||||||
|
I'll create a change with artifacts:
|
||||||
|
- proposal.md (what & why)
|
||||||
|
- design.md (how)
|
||||||
|
- tasks.md (implementation steps)
|
||||||
|
|
||||||
|
When ready to implement, run /opsx:apply
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Input**: The argument after `/opsx:propose` is the change name (kebab-case), OR a description of what the user wants to build.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no input provided, ask what they want to build**
|
||||||
|
|
||||||
|
Use the **AskUserQuestion tool** (open-ended, no preset options) to ask:
|
||||||
|
> "What change do you want to work on? Describe what you want to build or fix."
|
||||||
|
|
||||||
|
From their description, derive a kebab-case name (e.g., "add user authentication" → `add-user-auth`).
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT proceed without understanding what the user wants to build.
|
||||||
|
|
||||||
|
2. **Create the change directory**
|
||||||
|
```bash
|
||||||
|
openspec new change "<name>"
|
||||||
|
```
|
||||||
|
This creates a scaffolded change at `openspec/changes/<name>/` with `.openspec.yaml`.
|
||||||
|
|
||||||
|
3. **Get the artifact build order**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to get:
|
||||||
|
- `applyRequires`: array of artifact IDs needed before implementation (e.g., `["tasks"]`)
|
||||||
|
- `artifacts`: list of all artifacts with their status and dependencies
|
||||||
|
|
||||||
|
4. **Create artifacts in sequence until apply-ready**
|
||||||
|
|
||||||
|
Use the **TodoWrite tool** to track progress through the artifacts.
|
||||||
|
|
||||||
|
Loop through artifacts in dependency order (artifacts with no pending dependencies first):
|
||||||
|
|
||||||
|
a. **For each artifact that is `ready` (dependencies satisfied)**:
|
||||||
|
- Get instructions:
|
||||||
|
```bash
|
||||||
|
openspec instructions <artifact-id> --change "<name>" --json
|
||||||
|
```
|
||||||
|
- The instructions JSON includes:
|
||||||
|
- `context`: Project background (constraints for you - do NOT include in output)
|
||||||
|
- `rules`: Artifact-specific rules (constraints for you - do NOT include in output)
|
||||||
|
- `template`: The structure to use for your output file
|
||||||
|
- `instruction`: Schema-specific guidance for this artifact type
|
||||||
|
- `outputPath`: Where to write the artifact
|
||||||
|
- `dependencies`: Completed artifacts to read for context
|
||||||
|
- Read any completed dependency files for context
|
||||||
|
- Create the artifact file using `template` as the structure
|
||||||
|
- Apply `context` and `rules` as constraints - but do NOT copy them into the file
|
||||||
|
- Show brief progress: "Created <artifact-id>"
|
||||||
|
|
||||||
|
b. **Continue until all `applyRequires` artifacts are complete**
|
||||||
|
- After creating each artifact, re-run `openspec status --change "<name>" --json`
|
||||||
|
- Check if every artifact ID in `applyRequires` has `status: "done"` in the artifacts array
|
||||||
|
- Stop when all `applyRequires` artifacts are done
|
||||||
|
|
||||||
|
c. **If an artifact requires user input** (unclear context):
|
||||||
|
- Use **AskUserQuestion tool** to clarify
|
||||||
|
- Then continue with creation
|
||||||
|
|
||||||
|
5. **Show final status**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output**
|
||||||
|
|
||||||
|
After completing all artifacts, summarize:
|
||||||
|
- Change name and location
|
||||||
|
- List of artifacts created with brief descriptions
|
||||||
|
- What's ready: "All artifacts created! Ready for implementation."
|
||||||
|
- Prompt: "Run `/opsx:apply` to start implementing."
|
||||||
|
|
||||||
|
**Artifact Creation Guidelines**
|
||||||
|
|
||||||
|
- Follow the `instruction` field from `openspec instructions` for each artifact type
|
||||||
|
- The schema defines what each artifact should contain - follow it
|
||||||
|
- Read dependency artifacts for context before creating new ones
|
||||||
|
- Use `template` as the structure for your output file - fill in its sections
|
||||||
|
- **IMPORTANT**: `context` and `rules` are constraints for YOU, not content for the file
|
||||||
|
- Do NOT copy `<context>`, `<rules>`, `<project_context>` blocks into the artifact
|
||||||
|
- These guide what you write, but should never appear in the output
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Create ALL artifacts needed for implementation (as defined by schema's `apply.requires`)
|
||||||
|
- Always read dependency artifacts before creating a new one
|
||||||
|
- If context is critically unclear, ask the user - but prefer making reasonable decisions to keep momentum
|
||||||
|
- If a change with that name already exists, ask if user wants to continue it or create a new one
|
||||||
|
- Verify each artifact file exists after writing before proceeding to next
|
||||||
156
.claude/skills/openspec-apply-change/SKILL.md
Normal file
156
.claude/skills/openspec-apply-change/SKILL.md
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
---
|
||||||
|
name: openspec-apply-change
|
||||||
|
description: Implement tasks from an OpenSpec change. Use when the user wants to start implementing, continue implementation, or work through tasks.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Implement tasks from an OpenSpec change.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name. If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **Select the change**
|
||||||
|
|
||||||
|
If a name is provided, use it. Otherwise:
|
||||||
|
- Infer from conversation context if the user mentioned a change
|
||||||
|
- Auto-select if only one active change exists
|
||||||
|
- If ambiguous, run `openspec list --json` to get available changes and use the **AskUserQuestion tool** to let the user select
|
||||||
|
|
||||||
|
Always announce: "Using change: <name>" and how to override (e.g., `/opsx:apply <other>`).
|
||||||
|
|
||||||
|
2. **Check status to understand the schema**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used (e.g., "spec-driven")
|
||||||
|
- Which artifact contains the tasks (typically "tasks" for spec-driven, check status for others)
|
||||||
|
|
||||||
|
3. **Get apply instructions**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
openspec instructions apply --change "<name>" --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This returns:
|
||||||
|
- Context file paths (varies by schema - could be proposal/specs/design/tasks or spec/tests/implementation/docs)
|
||||||
|
- Progress (total, complete, remaining)
|
||||||
|
- Task list with status
|
||||||
|
- Dynamic instruction based on current state
|
||||||
|
|
||||||
|
**Handle states:**
|
||||||
|
- If `state: "blocked"` (missing artifacts): show message, suggest using openspec-continue-change
|
||||||
|
- If `state: "all_done"`: congratulate, suggest archive
|
||||||
|
- Otherwise: proceed to implementation
|
||||||
|
|
||||||
|
4. **Read context files**
|
||||||
|
|
||||||
|
Read the files listed in `contextFiles` from the apply instructions output.
|
||||||
|
The files depend on the schema being used:
|
||||||
|
- **spec-driven**: proposal, specs, design, tasks
|
||||||
|
- Other schemas: follow the contextFiles from CLI output
|
||||||
|
|
||||||
|
5. **Show current progress**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Schema being used
|
||||||
|
- Progress: "N/M tasks complete"
|
||||||
|
- Remaining tasks overview
|
||||||
|
- Dynamic instruction from CLI
|
||||||
|
|
||||||
|
6. **Implement tasks (loop until done or blocked)**
|
||||||
|
|
||||||
|
For each pending task:
|
||||||
|
- Show which task is being worked on
|
||||||
|
- Make the code changes required
|
||||||
|
- Keep changes minimal and focused
|
||||||
|
- Mark task complete in the tasks file: `- [ ]` → `- [x]`
|
||||||
|
- Continue to next task
|
||||||
|
|
||||||
|
**Pause if:**
|
||||||
|
- Task is unclear → ask for clarification
|
||||||
|
- Implementation reveals a design issue → suggest updating artifacts
|
||||||
|
- Error or blocker encountered → report and wait for guidance
|
||||||
|
- User interrupts
|
||||||
|
|
||||||
|
7. **On completion or pause, show status**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Tasks completed this session
|
||||||
|
- Overall progress: "N/M tasks complete"
|
||||||
|
- If all done: suggest archive
|
||||||
|
- If paused: explain why and wait for guidance
|
||||||
|
|
||||||
|
**Output During Implementation**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementing: <change-name> (schema: <schema-name>)
|
||||||
|
|
||||||
|
Working on task 3/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
|
||||||
|
Working on task 4/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Completion**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 7/7 tasks complete ✓
|
||||||
|
|
||||||
|
### Completed This Session
|
||||||
|
- [x] Task 1
|
||||||
|
- [x] Task 2
|
||||||
|
...
|
||||||
|
|
||||||
|
All tasks complete! Ready to archive this change.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Pause (Issue Encountered)**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Paused
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 4/7 tasks complete
|
||||||
|
|
||||||
|
### Issue Encountered
|
||||||
|
<description of the issue>
|
||||||
|
|
||||||
|
**Options:**
|
||||||
|
1. <option 1>
|
||||||
|
2. <option 2>
|
||||||
|
3. Other approach
|
||||||
|
|
||||||
|
What would you like to do?
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Keep going through tasks until done or blocked
|
||||||
|
- Always read context files before starting (from the apply instructions output)
|
||||||
|
- If task is ambiguous, pause and ask before implementing
|
||||||
|
- If implementation reveals issues, pause and suggest artifact updates
|
||||||
|
- Keep code changes minimal and scoped to each task
|
||||||
|
- Update task checkbox immediately after completing each task
|
||||||
|
- Pause on errors, blockers, or unclear requirements - don't guess
|
||||||
|
- Use contextFiles from CLI output, don't assume specific file names
|
||||||
|
|
||||||
|
**Fluid Workflow Integration**
|
||||||
|
|
||||||
|
This skill supports the "actions on a change" model:
|
||||||
|
|
||||||
|
- **Can be invoked anytime**: Before all artifacts are done (if tasks exist), after partial implementation, interleaved with other actions
|
||||||
|
- **Allows artifact updates**: If implementation reveals design issues, suggest updating artifacts - not phase-locked, work fluidly
|
||||||
114
.claude/skills/openspec-archive-change/SKILL.md
Normal file
114
.claude/skills/openspec-archive-change/SKILL.md
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
---
|
||||||
|
name: openspec-archive-change
|
||||||
|
description: Archive a completed change in the experimental workflow. Use when the user wants to finalize and archive a change after implementation is complete.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Archive a completed change in the experimental workflow.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name. If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no change name provided, prompt for selection**
|
||||||
|
|
||||||
|
Run `openspec list --json` to get available changes. Use the **AskUserQuestion tool** to let the user select.
|
||||||
|
|
||||||
|
Show only active changes (not already archived).
|
||||||
|
Include the schema used for each change if available.
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT guess or auto-select a change. Always let the user choose.
|
||||||
|
|
||||||
|
2. **Check artifact completion status**
|
||||||
|
|
||||||
|
Run `openspec status --change "<name>" --json` to check artifact completion.
|
||||||
|
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used
|
||||||
|
- `artifacts`: List of artifacts with their status (`done` or other)
|
||||||
|
|
||||||
|
**If any artifacts are not `done`:**
|
||||||
|
- Display warning listing incomplete artifacts
|
||||||
|
- Use **AskUserQuestion tool** to confirm user wants to proceed
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
3. **Check task completion status**
|
||||||
|
|
||||||
|
Read the tasks file (typically `tasks.md`) to check for incomplete tasks.
|
||||||
|
|
||||||
|
Count tasks marked with `- [ ]` (incomplete) vs `- [x]` (complete).
|
||||||
|
|
||||||
|
**If incomplete tasks found:**
|
||||||
|
- Display warning showing count of incomplete tasks
|
||||||
|
- Use **AskUserQuestion tool** to confirm user wants to proceed
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
**If no tasks file exists:** Proceed without task-related warning.
|
||||||
|
|
||||||
|
4. **Assess delta spec sync state**
|
||||||
|
|
||||||
|
Check for delta specs at `openspec/changes/<name>/specs/`. If none exist, proceed without sync prompt.
|
||||||
|
|
||||||
|
**If delta specs exist:**
|
||||||
|
- Compare each delta spec with its corresponding main spec at `openspec/specs/<capability>/spec.md`
|
||||||
|
- Determine what changes would be applied (adds, modifications, removals, renames)
|
||||||
|
- Show a combined summary before prompting
|
||||||
|
|
||||||
|
**Prompt options:**
|
||||||
|
- If changes needed: "Sync now (recommended)", "Archive without syncing"
|
||||||
|
- If already synced: "Archive now", "Sync anyway", "Cancel"
|
||||||
|
|
||||||
|
If user chooses sync, use Task tool (subagent_type: "general-purpose", prompt: "Use Skill tool to invoke openspec-sync-specs for change '<name>'. Delta spec analysis: <include the analyzed delta spec summary>"). Proceed to archive regardless of choice.
|
||||||
|
|
||||||
|
5. **Perform the archive**
|
||||||
|
|
||||||
|
Create the archive directory if it doesn't exist:
|
||||||
|
```bash
|
||||||
|
mkdir -p openspec/changes/archive
|
||||||
|
```
|
||||||
|
|
||||||
|
Generate target name using current date: `YYYY-MM-DD-<change-name>`
|
||||||
|
|
||||||
|
**Check if target already exists:**
|
||||||
|
- If yes: Fail with error, suggest renaming existing archive or using different date
|
||||||
|
- If no: Move the change directory to archive
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mv openspec/changes/<name> openspec/changes/archive/YYYY-MM-DD-<name>
|
||||||
|
```
|
||||||
|
|
||||||
|
6. **Display summary**
|
||||||
|
|
||||||
|
Show archive completion summary including:
|
||||||
|
- Change name
|
||||||
|
- Schema that was used
|
||||||
|
- Archive location
|
||||||
|
- Whether specs were synced (if applicable)
|
||||||
|
- Note about any warnings (incomplete artifacts/tasks)
|
||||||
|
|
||||||
|
**Output On Success**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Archived to:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
**Specs:** ✓ Synced to main specs (or "No delta specs" or "Sync skipped")
|
||||||
|
|
||||||
|
All artifacts complete. All tasks complete.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Always prompt for change selection if not provided
|
||||||
|
- Use artifact graph (openspec status --json) for completion checking
|
||||||
|
- Don't block archive on warnings - just inform and confirm
|
||||||
|
- Preserve .openspec.yaml when moving to archive (it moves with the directory)
|
||||||
|
- Show clear summary of what happened
|
||||||
|
- If sync is requested, use openspec-sync-specs approach (agent-driven)
|
||||||
|
- If delta specs exist, always run the sync assessment and show the combined summary before prompting
|
||||||
288
.claude/skills/openspec-explore/SKILL.md
Normal file
288
.claude/skills/openspec-explore/SKILL.md
Normal file
@@ -0,0 +1,288 @@
|
|||||||
|
---
|
||||||
|
name: openspec-explore
|
||||||
|
description: Enter explore mode - a thinking partner for exploring ideas, investigating problems, and clarifying requirements. Use when the user wants to think through something before or during a change.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Enter explore mode. Think deeply. Visualize freely. Follow the conversation wherever it goes.
|
||||||
|
|
||||||
|
**IMPORTANT: Explore mode is for thinking, not implementing.** You may read files, search code, and investigate the codebase, but you must NEVER write code or implement features. If the user asks you to implement something, remind them to exit explore mode first and create a change proposal. You MAY create OpenSpec artifacts (proposals, designs, specs) if the user asks—that's capturing thinking, not implementing.
|
||||||
|
|
||||||
|
**This is a stance, not a workflow.** There are no fixed steps, no required sequence, no mandatory outputs. You're a thinking partner helping the user explore.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## The Stance
|
||||||
|
|
||||||
|
- **Curious, not prescriptive** - Ask questions that emerge naturally, don't follow a script
|
||||||
|
- **Open threads, not interrogations** - Surface multiple interesting directions and let the user follow what resonates. Don't funnel them through a single path of questions.
|
||||||
|
- **Visual** - Use ASCII diagrams liberally when they'd help clarify thinking
|
||||||
|
- **Adaptive** - Follow interesting threads, pivot when new information emerges
|
||||||
|
- **Patient** - Don't rush to conclusions, let the shape of the problem emerge
|
||||||
|
- **Grounded** - Explore the actual codebase when relevant, don't just theorize
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Might Do
|
||||||
|
|
||||||
|
Depending on what the user brings, you might:
|
||||||
|
|
||||||
|
**Explore the problem space**
|
||||||
|
- Ask clarifying questions that emerge from what they said
|
||||||
|
- Challenge assumptions
|
||||||
|
- Reframe the problem
|
||||||
|
- Find analogies
|
||||||
|
|
||||||
|
**Investigate the codebase**
|
||||||
|
- Map existing architecture relevant to the discussion
|
||||||
|
- Find integration points
|
||||||
|
- Identify patterns already in use
|
||||||
|
- Surface hidden complexity
|
||||||
|
|
||||||
|
**Compare options**
|
||||||
|
- Brainstorm multiple approaches
|
||||||
|
- Build comparison tables
|
||||||
|
- Sketch tradeoffs
|
||||||
|
- Recommend a path (if asked)
|
||||||
|
|
||||||
|
**Visualize**
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────┐
|
||||||
|
│ Use ASCII diagrams liberally │
|
||||||
|
├─────────────────────────────────────────┤
|
||||||
|
│ │
|
||||||
|
│ ┌────────┐ ┌────────┐ │
|
||||||
|
│ │ State │────────▶│ State │ │
|
||||||
|
│ │ A │ │ B │ │
|
||||||
|
│ └────────┘ └────────┘ │
|
||||||
|
│ │
|
||||||
|
│ System diagrams, state machines, │
|
||||||
|
│ data flows, architecture sketches, │
|
||||||
|
│ dependency graphs, comparison tables │
|
||||||
|
│ │
|
||||||
|
└─────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**Surface risks and unknowns**
|
||||||
|
- Identify what could go wrong
|
||||||
|
- Find gaps in understanding
|
||||||
|
- Suggest spikes or investigations
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## OpenSpec Awareness
|
||||||
|
|
||||||
|
You have full context of the OpenSpec system. Use it naturally, don't force it.
|
||||||
|
|
||||||
|
### Check for context
|
||||||
|
|
||||||
|
At the start, quickly check what exists:
|
||||||
|
```bash
|
||||||
|
openspec list --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This tells you:
|
||||||
|
- If there are active changes
|
||||||
|
- Their names, schemas, and status
|
||||||
|
- What the user might be working on
|
||||||
|
|
||||||
|
### When no change exists
|
||||||
|
|
||||||
|
Think freely. When insights crystallize, you might offer:
|
||||||
|
|
||||||
|
- "This feels solid enough to start a change. Want me to create a proposal?"
|
||||||
|
- Or keep exploring - no pressure to formalize
|
||||||
|
|
||||||
|
### When a change exists
|
||||||
|
|
||||||
|
If the user mentions a change or you detect one is relevant:
|
||||||
|
|
||||||
|
1. **Read existing artifacts for context**
|
||||||
|
- `openspec/changes/<name>/proposal.md`
|
||||||
|
- `openspec/changes/<name>/design.md`
|
||||||
|
- `openspec/changes/<name>/tasks.md`
|
||||||
|
- etc.
|
||||||
|
|
||||||
|
2. **Reference them naturally in conversation**
|
||||||
|
- "Your design mentions using Redis, but we just realized SQLite fits better..."
|
||||||
|
- "The proposal scopes this to premium users, but we're now thinking everyone..."
|
||||||
|
|
||||||
|
3. **Offer to capture when decisions are made**
|
||||||
|
|
||||||
|
| Insight Type | Where to Capture |
|
||||||
|
|--------------|------------------|
|
||||||
|
| New requirement discovered | `specs/<capability>/spec.md` |
|
||||||
|
| Requirement changed | `specs/<capability>/spec.md` |
|
||||||
|
| Design decision made | `design.md` |
|
||||||
|
| Scope changed | `proposal.md` |
|
||||||
|
| New work identified | `tasks.md` |
|
||||||
|
| Assumption invalidated | Relevant artifact |
|
||||||
|
|
||||||
|
Example offers:
|
||||||
|
- "That's a design decision. Capture it in design.md?"
|
||||||
|
- "This is a new requirement. Add it to specs?"
|
||||||
|
- "This changes scope. Update the proposal?"
|
||||||
|
|
||||||
|
4. **The user decides** - Offer and move on. Don't pressure. Don't auto-capture.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Don't Have To Do
|
||||||
|
|
||||||
|
- Follow a script
|
||||||
|
- Ask the same questions every time
|
||||||
|
- Produce a specific artifact
|
||||||
|
- Reach a conclusion
|
||||||
|
- Stay on topic if a tangent is valuable
|
||||||
|
- Be brief (this is thinking time)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Handling Different Entry Points
|
||||||
|
|
||||||
|
**User brings a vague idea:**
|
||||||
|
```
|
||||||
|
User: I'm thinking about adding real-time collaboration
|
||||||
|
|
||||||
|
You: Real-time collab is a big space. Let me think about this...
|
||||||
|
|
||||||
|
COLLABORATION SPECTRUM
|
||||||
|
════════════════════════════════════════════
|
||||||
|
|
||||||
|
Awareness Coordination Sync
|
||||||
|
│ │ │
|
||||||
|
▼ ▼ ▼
|
||||||
|
┌────────┐ ┌────────┐ ┌────────┐
|
||||||
|
│Presence│ │Cursors │ │ CRDT │
|
||||||
|
│ "3 │ │ Multi │ │Conflict│
|
||||||
|
│online" │ │ select │ │ free │
|
||||||
|
└────────┘ └────────┘ └────────┘
|
||||||
|
│ │ │
|
||||||
|
trivial moderate complex
|
||||||
|
|
||||||
|
Where's your head at?
|
||||||
|
```
|
||||||
|
|
||||||
|
**User brings a specific problem:**
|
||||||
|
```
|
||||||
|
User: The auth system is a mess
|
||||||
|
|
||||||
|
You: [reads codebase]
|
||||||
|
|
||||||
|
┌─────────────────────────────────────────────┐
|
||||||
|
│ CURRENT AUTH FLOW │
|
||||||
|
└─────────────────────────────────────────────┘
|
||||||
|
│
|
||||||
|
┌─────────────┼─────────────┐
|
||||||
|
▼ ▼ ▼
|
||||||
|
┌─────────┐ ┌─────────┐ ┌─────────┐
|
||||||
|
│ Google │ │ GitHub │ │ Email │
|
||||||
|
│ OAuth │ │ OAuth │ │ Magic │
|
||||||
|
└────┬────┘ └────┬────┘ └────┬────┘
|
||||||
|
│ │ │
|
||||||
|
└─────────────┼─────────────┘
|
||||||
|
▼
|
||||||
|
┌───────────┐
|
||||||
|
│ Session │
|
||||||
|
└─────┬─────┘
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
┌───────────┐
|
||||||
|
│ Perms │
|
||||||
|
└───────────┘
|
||||||
|
|
||||||
|
I see three tangles. Which one's burning?
|
||||||
|
```
|
||||||
|
|
||||||
|
**User is stuck mid-implementation:**
|
||||||
|
```
|
||||||
|
User: /opsx:explore add-auth-system
|
||||||
|
The OAuth integration is more complex than expected
|
||||||
|
|
||||||
|
You: [reads change artifacts]
|
||||||
|
|
||||||
|
You're on task 4: "Implement OAuth flow"
|
||||||
|
|
||||||
|
Let me trace what's involved...
|
||||||
|
|
||||||
|
[draws diagram, explores options, suggests paths]
|
||||||
|
|
||||||
|
Want to update the design to reflect this?
|
||||||
|
Or add a spike task to investigate?
|
||||||
|
```
|
||||||
|
|
||||||
|
**User wants to compare options:**
|
||||||
|
```
|
||||||
|
User: Should we use Postgres or SQLite?
|
||||||
|
|
||||||
|
You: Generic answer is boring. What's the context?
|
||||||
|
|
||||||
|
User: A CLI tool that tracks local dev environments
|
||||||
|
|
||||||
|
You: That changes everything.
|
||||||
|
|
||||||
|
┌─────────────────────────────────────────────────┐
|
||||||
|
│ CLI TOOL DATA STORAGE │
|
||||||
|
└─────────────────────────────────────────────────┘
|
||||||
|
|
||||||
|
Key constraints:
|
||||||
|
• No daemon running
|
||||||
|
• Must work offline
|
||||||
|
• Single user
|
||||||
|
|
||||||
|
SQLite Postgres
|
||||||
|
Deployment embedded ✓ needs server ✗
|
||||||
|
Offline yes ✓ no ✗
|
||||||
|
Single file yes ✓ no ✗
|
||||||
|
|
||||||
|
SQLite. Not even close.
|
||||||
|
|
||||||
|
Unless... is there a sync component?
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ending Discovery
|
||||||
|
|
||||||
|
There's no required ending. Discovery might:
|
||||||
|
|
||||||
|
- **Flow into a proposal**: "Ready to start? I can create a change proposal."
|
||||||
|
- **Result in artifact updates**: "Updated design.md with these decisions"
|
||||||
|
- **Just provide clarity**: User has what they need, moves on
|
||||||
|
- **Continue later**: "We can pick this up anytime"
|
||||||
|
|
||||||
|
When it feels like things are crystallizing, you might summarize:
|
||||||
|
|
||||||
|
```
|
||||||
|
## What We Figured Out
|
||||||
|
|
||||||
|
**The problem**: [crystallized understanding]
|
||||||
|
|
||||||
|
**The approach**: [if one emerged]
|
||||||
|
|
||||||
|
**Open questions**: [if any remain]
|
||||||
|
|
||||||
|
**Next steps** (if ready):
|
||||||
|
- Create a change proposal
|
||||||
|
- Keep exploring: just keep talking
|
||||||
|
```
|
||||||
|
|
||||||
|
But this summary is optional. Sometimes the thinking IS the value.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Guardrails
|
||||||
|
|
||||||
|
- **Don't implement** - Never write code or implement features. Creating OpenSpec artifacts is fine, writing application code is not.
|
||||||
|
- **Don't fake understanding** - If something is unclear, dig deeper
|
||||||
|
- **Don't rush** - Discovery is thinking time, not task time
|
||||||
|
- **Don't force structure** - Let patterns emerge naturally
|
||||||
|
- **Don't auto-capture** - Offer to save insights, don't just do it
|
||||||
|
- **Do visualize** - A good diagram is worth many paragraphs
|
||||||
|
- **Do explore the codebase** - Ground discussions in reality
|
||||||
|
- **Do question assumptions** - Including the user's and your own
|
||||||
110
.claude/skills/openspec-propose/SKILL.md
Normal file
110
.claude/skills/openspec-propose/SKILL.md
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
---
|
||||||
|
name: openspec-propose
|
||||||
|
description: Propose a new change with all artifacts generated in one step. Use when the user wants to quickly describe what they want to build and get a complete proposal with design, specs, and tasks ready for implementation.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Propose a new change - create the change and generate all artifacts in one step.
|
||||||
|
|
||||||
|
I'll create a change with artifacts:
|
||||||
|
- proposal.md (what & why)
|
||||||
|
- design.md (how)
|
||||||
|
- tasks.md (implementation steps)
|
||||||
|
|
||||||
|
When ready to implement, run /opsx:apply
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Input**: The user's request should include a change name (kebab-case) OR a description of what they want to build.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no clear input provided, ask what they want to build**
|
||||||
|
|
||||||
|
Use the **AskUserQuestion tool** (open-ended, no preset options) to ask:
|
||||||
|
> "What change do you want to work on? Describe what you want to build or fix."
|
||||||
|
|
||||||
|
From their description, derive a kebab-case name (e.g., "add user authentication" → `add-user-auth`).
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT proceed without understanding what the user wants to build.
|
||||||
|
|
||||||
|
2. **Create the change directory**
|
||||||
|
```bash
|
||||||
|
openspec new change "<name>"
|
||||||
|
```
|
||||||
|
This creates a scaffolded change at `openspec/changes/<name>/` with `.openspec.yaml`.
|
||||||
|
|
||||||
|
3. **Get the artifact build order**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to get:
|
||||||
|
- `applyRequires`: array of artifact IDs needed before implementation (e.g., `["tasks"]`)
|
||||||
|
- `artifacts`: list of all artifacts with their status and dependencies
|
||||||
|
|
||||||
|
4. **Create artifacts in sequence until apply-ready**
|
||||||
|
|
||||||
|
Use the **TodoWrite tool** to track progress through the artifacts.
|
||||||
|
|
||||||
|
Loop through artifacts in dependency order (artifacts with no pending dependencies first):
|
||||||
|
|
||||||
|
a. **For each artifact that is `ready` (dependencies satisfied)**:
|
||||||
|
- Get instructions:
|
||||||
|
```bash
|
||||||
|
openspec instructions <artifact-id> --change "<name>" --json
|
||||||
|
```
|
||||||
|
- The instructions JSON includes:
|
||||||
|
- `context`: Project background (constraints for you - do NOT include in output)
|
||||||
|
- `rules`: Artifact-specific rules (constraints for you - do NOT include in output)
|
||||||
|
- `template`: The structure to use for your output file
|
||||||
|
- `instruction`: Schema-specific guidance for this artifact type
|
||||||
|
- `outputPath`: Where to write the artifact
|
||||||
|
- `dependencies`: Completed artifacts to read for context
|
||||||
|
- Read any completed dependency files for context
|
||||||
|
- Create the artifact file using `template` as the structure
|
||||||
|
- Apply `context` and `rules` as constraints - but do NOT copy them into the file
|
||||||
|
- Show brief progress: "Created <artifact-id>"
|
||||||
|
|
||||||
|
b. **Continue until all `applyRequires` artifacts are complete**
|
||||||
|
- After creating each artifact, re-run `openspec status --change "<name>" --json`
|
||||||
|
- Check if every artifact ID in `applyRequires` has `status: "done"` in the artifacts array
|
||||||
|
- Stop when all `applyRequires` artifacts are done
|
||||||
|
|
||||||
|
c. **If an artifact requires user input** (unclear context):
|
||||||
|
- Use **AskUserQuestion tool** to clarify
|
||||||
|
- Then continue with creation
|
||||||
|
|
||||||
|
5. **Show final status**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output**
|
||||||
|
|
||||||
|
After completing all artifacts, summarize:
|
||||||
|
- Change name and location
|
||||||
|
- List of artifacts created with brief descriptions
|
||||||
|
- What's ready: "All artifacts created! Ready for implementation."
|
||||||
|
- Prompt: "Run `/opsx:apply` or ask me to implement to start working on the tasks."
|
||||||
|
|
||||||
|
**Artifact Creation Guidelines**
|
||||||
|
|
||||||
|
- Follow the `instruction` field from `openspec instructions` for each artifact type
|
||||||
|
- The schema defines what each artifact should contain - follow it
|
||||||
|
- Read dependency artifacts for context before creating new ones
|
||||||
|
- Use `template` as the structure for your output file - fill in its sections
|
||||||
|
- **IMPORTANT**: `context` and `rules` are constraints for YOU, not content for the file
|
||||||
|
- Do NOT copy `<context>`, `<rules>`, `<project_context>` blocks into the artifact
|
||||||
|
- These guide what you write, but should never appear in the output
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Create ALL artifacts needed for implementation (as defined by schema's `apply.requires`)
|
||||||
|
- Always read dependency artifacts before creating a new one
|
||||||
|
- If context is critically unclear, ask the user - but prefer making reasonable decisions to keep momentum
|
||||||
|
- If a change with that name already exists, ask if user wants to continue it or create a new one
|
||||||
|
- Verify each artifact file exists after writing before proceeding to next
|
||||||
156
.codex/skills/openspec-apply-change/SKILL.md
Normal file
156
.codex/skills/openspec-apply-change/SKILL.md
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
---
|
||||||
|
name: openspec-apply-change
|
||||||
|
description: Implement tasks from an OpenSpec change. Use when the user wants to start implementing, continue implementation, or work through tasks.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Implement tasks from an OpenSpec change.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name. If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **Select the change**
|
||||||
|
|
||||||
|
If a name is provided, use it. Otherwise:
|
||||||
|
- Infer from conversation context if the user mentioned a change
|
||||||
|
- Auto-select if only one active change exists
|
||||||
|
- If ambiguous, run `openspec list --json` to get available changes and use the **AskUserQuestion tool** to let the user select
|
||||||
|
|
||||||
|
Always announce: "Using change: <name>" and how to override (e.g., `/opsx:apply <other>`).
|
||||||
|
|
||||||
|
2. **Check status to understand the schema**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used (e.g., "spec-driven")
|
||||||
|
- Which artifact contains the tasks (typically "tasks" for spec-driven, check status for others)
|
||||||
|
|
||||||
|
3. **Get apply instructions**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
openspec instructions apply --change "<name>" --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This returns:
|
||||||
|
- Context file paths (varies by schema - could be proposal/specs/design/tasks or spec/tests/implementation/docs)
|
||||||
|
- Progress (total, complete, remaining)
|
||||||
|
- Task list with status
|
||||||
|
- Dynamic instruction based on current state
|
||||||
|
|
||||||
|
**Handle states:**
|
||||||
|
- If `state: "blocked"` (missing artifacts): show message, suggest using openspec-continue-change
|
||||||
|
- If `state: "all_done"`: congratulate, suggest archive
|
||||||
|
- Otherwise: proceed to implementation
|
||||||
|
|
||||||
|
4. **Read context files**
|
||||||
|
|
||||||
|
Read the files listed in `contextFiles` from the apply instructions output.
|
||||||
|
The files depend on the schema being used:
|
||||||
|
- **spec-driven**: proposal, specs, design, tasks
|
||||||
|
- Other schemas: follow the contextFiles from CLI output
|
||||||
|
|
||||||
|
5. **Show current progress**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Schema being used
|
||||||
|
- Progress: "N/M tasks complete"
|
||||||
|
- Remaining tasks overview
|
||||||
|
- Dynamic instruction from CLI
|
||||||
|
|
||||||
|
6. **Implement tasks (loop until done or blocked)**
|
||||||
|
|
||||||
|
For each pending task:
|
||||||
|
- Show which task is being worked on
|
||||||
|
- Make the code changes required
|
||||||
|
- Keep changes minimal and focused
|
||||||
|
- Mark task complete in the tasks file: `- [ ]` → `- [x]`
|
||||||
|
- Continue to next task
|
||||||
|
|
||||||
|
**Pause if:**
|
||||||
|
- Task is unclear → ask for clarification
|
||||||
|
- Implementation reveals a design issue → suggest updating artifacts
|
||||||
|
- Error or blocker encountered → report and wait for guidance
|
||||||
|
- User interrupts
|
||||||
|
|
||||||
|
7. **On completion or pause, show status**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Tasks completed this session
|
||||||
|
- Overall progress: "N/M tasks complete"
|
||||||
|
- If all done: suggest archive
|
||||||
|
- If paused: explain why and wait for guidance
|
||||||
|
|
||||||
|
**Output During Implementation**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementing: <change-name> (schema: <schema-name>)
|
||||||
|
|
||||||
|
Working on task 3/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
|
||||||
|
Working on task 4/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Completion**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 7/7 tasks complete ✓
|
||||||
|
|
||||||
|
### Completed This Session
|
||||||
|
- [x] Task 1
|
||||||
|
- [x] Task 2
|
||||||
|
...
|
||||||
|
|
||||||
|
All tasks complete! Ready to archive this change.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Pause (Issue Encountered)**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Paused
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 4/7 tasks complete
|
||||||
|
|
||||||
|
### Issue Encountered
|
||||||
|
<description of the issue>
|
||||||
|
|
||||||
|
**Options:**
|
||||||
|
1. <option 1>
|
||||||
|
2. <option 2>
|
||||||
|
3. Other approach
|
||||||
|
|
||||||
|
What would you like to do?
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Keep going through tasks until done or blocked
|
||||||
|
- Always read context files before starting (from the apply instructions output)
|
||||||
|
- If task is ambiguous, pause and ask before implementing
|
||||||
|
- If implementation reveals issues, pause and suggest artifact updates
|
||||||
|
- Keep code changes minimal and scoped to each task
|
||||||
|
- Update task checkbox immediately after completing each task
|
||||||
|
- Pause on errors, blockers, or unclear requirements - don't guess
|
||||||
|
- Use contextFiles from CLI output, don't assume specific file names
|
||||||
|
|
||||||
|
**Fluid Workflow Integration**
|
||||||
|
|
||||||
|
This skill supports the "actions on a change" model:
|
||||||
|
|
||||||
|
- **Can be invoked anytime**: Before all artifacts are done (if tasks exist), after partial implementation, interleaved with other actions
|
||||||
|
- **Allows artifact updates**: If implementation reveals design issues, suggest updating artifacts - not phase-locked, work fluidly
|
||||||
114
.codex/skills/openspec-archive-change/SKILL.md
Normal file
114
.codex/skills/openspec-archive-change/SKILL.md
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
---
|
||||||
|
name: openspec-archive-change
|
||||||
|
description: Archive a completed change in the experimental workflow. Use when the user wants to finalize and archive a change after implementation is complete.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Archive a completed change in the experimental workflow.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name. If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no change name provided, prompt for selection**
|
||||||
|
|
||||||
|
Run `openspec list --json` to get available changes. Use the **AskUserQuestion tool** to let the user select.
|
||||||
|
|
||||||
|
Show only active changes (not already archived).
|
||||||
|
Include the schema used for each change if available.
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT guess or auto-select a change. Always let the user choose.
|
||||||
|
|
||||||
|
2. **Check artifact completion status**
|
||||||
|
|
||||||
|
Run `openspec status --change "<name>" --json` to check artifact completion.
|
||||||
|
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used
|
||||||
|
- `artifacts`: List of artifacts with their status (`done` or other)
|
||||||
|
|
||||||
|
**If any artifacts are not `done`:**
|
||||||
|
- Display warning listing incomplete artifacts
|
||||||
|
- Use **AskUserQuestion tool** to confirm user wants to proceed
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
3. **Check task completion status**
|
||||||
|
|
||||||
|
Read the tasks file (typically `tasks.md`) to check for incomplete tasks.
|
||||||
|
|
||||||
|
Count tasks marked with `- [ ]` (incomplete) vs `- [x]` (complete).
|
||||||
|
|
||||||
|
**If incomplete tasks found:**
|
||||||
|
- Display warning showing count of incomplete tasks
|
||||||
|
- Use **AskUserQuestion tool** to confirm user wants to proceed
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
**If no tasks file exists:** Proceed without task-related warning.
|
||||||
|
|
||||||
|
4. **Assess delta spec sync state**
|
||||||
|
|
||||||
|
Check for delta specs at `openspec/changes/<name>/specs/`. If none exist, proceed without sync prompt.
|
||||||
|
|
||||||
|
**If delta specs exist:**
|
||||||
|
- Compare each delta spec with its corresponding main spec at `openspec/specs/<capability>/spec.md`
|
||||||
|
- Determine what changes would be applied (adds, modifications, removals, renames)
|
||||||
|
- Show a combined summary before prompting
|
||||||
|
|
||||||
|
**Prompt options:**
|
||||||
|
- If changes needed: "Sync now (recommended)", "Archive without syncing"
|
||||||
|
- If already synced: "Archive now", "Sync anyway", "Cancel"
|
||||||
|
|
||||||
|
If user chooses sync, use Task tool (subagent_type: "general-purpose", prompt: "Use Skill tool to invoke openspec-sync-specs for change '<name>'. Delta spec analysis: <include the analyzed delta spec summary>"). Proceed to archive regardless of choice.
|
||||||
|
|
||||||
|
5. **Perform the archive**
|
||||||
|
|
||||||
|
Create the archive directory if it doesn't exist:
|
||||||
|
```bash
|
||||||
|
mkdir -p openspec/changes/archive
|
||||||
|
```
|
||||||
|
|
||||||
|
Generate target name using current date: `YYYY-MM-DD-<change-name>`
|
||||||
|
|
||||||
|
**Check if target already exists:**
|
||||||
|
- If yes: Fail with error, suggest renaming existing archive or using different date
|
||||||
|
- If no: Move the change directory to archive
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mv openspec/changes/<name> openspec/changes/archive/YYYY-MM-DD-<name>
|
||||||
|
```
|
||||||
|
|
||||||
|
6. **Display summary**
|
||||||
|
|
||||||
|
Show archive completion summary including:
|
||||||
|
- Change name
|
||||||
|
- Schema that was used
|
||||||
|
- Archive location
|
||||||
|
- Whether specs were synced (if applicable)
|
||||||
|
- Note about any warnings (incomplete artifacts/tasks)
|
||||||
|
|
||||||
|
**Output On Success**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Archived to:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
**Specs:** ✓ Synced to main specs (or "No delta specs" or "Sync skipped")
|
||||||
|
|
||||||
|
All artifacts complete. All tasks complete.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Always prompt for change selection if not provided
|
||||||
|
- Use artifact graph (openspec status --json) for completion checking
|
||||||
|
- Don't block archive on warnings - just inform and confirm
|
||||||
|
- Preserve .openspec.yaml when moving to archive (it moves with the directory)
|
||||||
|
- Show clear summary of what happened
|
||||||
|
- If sync is requested, use openspec-sync-specs approach (agent-driven)
|
||||||
|
- If delta specs exist, always run the sync assessment and show the combined summary before prompting
|
||||||
288
.codex/skills/openspec-explore/SKILL.md
Normal file
288
.codex/skills/openspec-explore/SKILL.md
Normal file
@@ -0,0 +1,288 @@
|
|||||||
|
---
|
||||||
|
name: openspec-explore
|
||||||
|
description: Enter explore mode - a thinking partner for exploring ideas, investigating problems, and clarifying requirements. Use when the user wants to think through something before or during a change.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Enter explore mode. Think deeply. Visualize freely. Follow the conversation wherever it goes.
|
||||||
|
|
||||||
|
**IMPORTANT: Explore mode is for thinking, not implementing.** You may read files, search code, and investigate the codebase, but you must NEVER write code or implement features. If the user asks you to implement something, remind them to exit explore mode first and create a change proposal. You MAY create OpenSpec artifacts (proposals, designs, specs) if the user asks—that's capturing thinking, not implementing.
|
||||||
|
|
||||||
|
**This is a stance, not a workflow.** There are no fixed steps, no required sequence, no mandatory outputs. You're a thinking partner helping the user explore.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## The Stance
|
||||||
|
|
||||||
|
- **Curious, not prescriptive** - Ask questions that emerge naturally, don't follow a script
|
||||||
|
- **Open threads, not interrogations** - Surface multiple interesting directions and let the user follow what resonates. Don't funnel them through a single path of questions.
|
||||||
|
- **Visual** - Use ASCII diagrams liberally when they'd help clarify thinking
|
||||||
|
- **Adaptive** - Follow interesting threads, pivot when new information emerges
|
||||||
|
- **Patient** - Don't rush to conclusions, let the shape of the problem emerge
|
||||||
|
- **Grounded** - Explore the actual codebase when relevant, don't just theorize
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Might Do
|
||||||
|
|
||||||
|
Depending on what the user brings, you might:
|
||||||
|
|
||||||
|
**Explore the problem space**
|
||||||
|
- Ask clarifying questions that emerge from what they said
|
||||||
|
- Challenge assumptions
|
||||||
|
- Reframe the problem
|
||||||
|
- Find analogies
|
||||||
|
|
||||||
|
**Investigate the codebase**
|
||||||
|
- Map existing architecture relevant to the discussion
|
||||||
|
- Find integration points
|
||||||
|
- Identify patterns already in use
|
||||||
|
- Surface hidden complexity
|
||||||
|
|
||||||
|
**Compare options**
|
||||||
|
- Brainstorm multiple approaches
|
||||||
|
- Build comparison tables
|
||||||
|
- Sketch tradeoffs
|
||||||
|
- Recommend a path (if asked)
|
||||||
|
|
||||||
|
**Visualize**
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────┐
|
||||||
|
│ Use ASCII diagrams liberally │
|
||||||
|
├─────────────────────────────────────────┤
|
||||||
|
│ │
|
||||||
|
│ ┌────────┐ ┌────────┐ │
|
||||||
|
│ │ State │────────▶│ State │ │
|
||||||
|
│ │ A │ │ B │ │
|
||||||
|
│ └────────┘ └────────┘ │
|
||||||
|
│ │
|
||||||
|
│ System diagrams, state machines, │
|
||||||
|
│ data flows, architecture sketches, │
|
||||||
|
│ dependency graphs, comparison tables │
|
||||||
|
│ │
|
||||||
|
└─────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**Surface risks and unknowns**
|
||||||
|
- Identify what could go wrong
|
||||||
|
- Find gaps in understanding
|
||||||
|
- Suggest spikes or investigations
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## OpenSpec Awareness
|
||||||
|
|
||||||
|
You have full context of the OpenSpec system. Use it naturally, don't force it.
|
||||||
|
|
||||||
|
### Check for context
|
||||||
|
|
||||||
|
At the start, quickly check what exists:
|
||||||
|
```bash
|
||||||
|
openspec list --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This tells you:
|
||||||
|
- If there are active changes
|
||||||
|
- Their names, schemas, and status
|
||||||
|
- What the user might be working on
|
||||||
|
|
||||||
|
### When no change exists
|
||||||
|
|
||||||
|
Think freely. When insights crystallize, you might offer:
|
||||||
|
|
||||||
|
- "This feels solid enough to start a change. Want me to create a proposal?"
|
||||||
|
- Or keep exploring - no pressure to formalize
|
||||||
|
|
||||||
|
### When a change exists
|
||||||
|
|
||||||
|
If the user mentions a change or you detect one is relevant:
|
||||||
|
|
||||||
|
1. **Read existing artifacts for context**
|
||||||
|
- `openspec/changes/<name>/proposal.md`
|
||||||
|
- `openspec/changes/<name>/design.md`
|
||||||
|
- `openspec/changes/<name>/tasks.md`
|
||||||
|
- etc.
|
||||||
|
|
||||||
|
2. **Reference them naturally in conversation**
|
||||||
|
- "Your design mentions using Redis, but we just realized SQLite fits better..."
|
||||||
|
- "The proposal scopes this to premium users, but we're now thinking everyone..."
|
||||||
|
|
||||||
|
3. **Offer to capture when decisions are made**
|
||||||
|
|
||||||
|
| Insight Type | Where to Capture |
|
||||||
|
|--------------|------------------|
|
||||||
|
| New requirement discovered | `specs/<capability>/spec.md` |
|
||||||
|
| Requirement changed | `specs/<capability>/spec.md` |
|
||||||
|
| Design decision made | `design.md` |
|
||||||
|
| Scope changed | `proposal.md` |
|
||||||
|
| New work identified | `tasks.md` |
|
||||||
|
| Assumption invalidated | Relevant artifact |
|
||||||
|
|
||||||
|
Example offers:
|
||||||
|
- "That's a design decision. Capture it in design.md?"
|
||||||
|
- "This is a new requirement. Add it to specs?"
|
||||||
|
- "This changes scope. Update the proposal?"
|
||||||
|
|
||||||
|
4. **The user decides** - Offer and move on. Don't pressure. Don't auto-capture.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Don't Have To Do
|
||||||
|
|
||||||
|
- Follow a script
|
||||||
|
- Ask the same questions every time
|
||||||
|
- Produce a specific artifact
|
||||||
|
- Reach a conclusion
|
||||||
|
- Stay on topic if a tangent is valuable
|
||||||
|
- Be brief (this is thinking time)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Handling Different Entry Points
|
||||||
|
|
||||||
|
**User brings a vague idea:**
|
||||||
|
```
|
||||||
|
User: I'm thinking about adding real-time collaboration
|
||||||
|
|
||||||
|
You: Real-time collab is a big space. Let me think about this...
|
||||||
|
|
||||||
|
COLLABORATION SPECTRUM
|
||||||
|
════════════════════════════════════════════
|
||||||
|
|
||||||
|
Awareness Coordination Sync
|
||||||
|
│ │ │
|
||||||
|
▼ ▼ ▼
|
||||||
|
┌────────┐ ┌────────┐ ┌────────┐
|
||||||
|
│Presence│ │Cursors │ │ CRDT │
|
||||||
|
│ "3 │ │ Multi │ │Conflict│
|
||||||
|
│online" │ │ select │ │ free │
|
||||||
|
└────────┘ └────────┘ └────────┘
|
||||||
|
│ │ │
|
||||||
|
trivial moderate complex
|
||||||
|
|
||||||
|
Where's your head at?
|
||||||
|
```
|
||||||
|
|
||||||
|
**User brings a specific problem:**
|
||||||
|
```
|
||||||
|
User: The auth system is a mess
|
||||||
|
|
||||||
|
You: [reads codebase]
|
||||||
|
|
||||||
|
┌─────────────────────────────────────────────┐
|
||||||
|
│ CURRENT AUTH FLOW │
|
||||||
|
└─────────────────────────────────────────────┘
|
||||||
|
│
|
||||||
|
┌─────────────┼─────────────┐
|
||||||
|
▼ ▼ ▼
|
||||||
|
┌─────────┐ ┌─────────┐ ┌─────────┐
|
||||||
|
│ Google │ │ GitHub │ │ Email │
|
||||||
|
│ OAuth │ │ OAuth │ │ Magic │
|
||||||
|
└────┬────┘ └────┬────┘ └────┬────┘
|
||||||
|
│ │ │
|
||||||
|
└─────────────┼─────────────┘
|
||||||
|
▼
|
||||||
|
┌───────────┐
|
||||||
|
│ Session │
|
||||||
|
└─────┬─────┘
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
┌───────────┐
|
||||||
|
│ Perms │
|
||||||
|
└───────────┘
|
||||||
|
|
||||||
|
I see three tangles. Which one's burning?
|
||||||
|
```
|
||||||
|
|
||||||
|
**User is stuck mid-implementation:**
|
||||||
|
```
|
||||||
|
User: /opsx:explore add-auth-system
|
||||||
|
The OAuth integration is more complex than expected
|
||||||
|
|
||||||
|
You: [reads change artifacts]
|
||||||
|
|
||||||
|
You're on task 4: "Implement OAuth flow"
|
||||||
|
|
||||||
|
Let me trace what's involved...
|
||||||
|
|
||||||
|
[draws diagram, explores options, suggests paths]
|
||||||
|
|
||||||
|
Want to update the design to reflect this?
|
||||||
|
Or add a spike task to investigate?
|
||||||
|
```
|
||||||
|
|
||||||
|
**User wants to compare options:**
|
||||||
|
```
|
||||||
|
User: Should we use Postgres or SQLite?
|
||||||
|
|
||||||
|
You: Generic answer is boring. What's the context?
|
||||||
|
|
||||||
|
User: A CLI tool that tracks local dev environments
|
||||||
|
|
||||||
|
You: That changes everything.
|
||||||
|
|
||||||
|
┌─────────────────────────────────────────────────┐
|
||||||
|
│ CLI TOOL DATA STORAGE │
|
||||||
|
└─────────────────────────────────────────────────┘
|
||||||
|
|
||||||
|
Key constraints:
|
||||||
|
• No daemon running
|
||||||
|
• Must work offline
|
||||||
|
• Single user
|
||||||
|
|
||||||
|
SQLite Postgres
|
||||||
|
Deployment embedded ✓ needs server ✗
|
||||||
|
Offline yes ✓ no ✗
|
||||||
|
Single file yes ✓ no ✗
|
||||||
|
|
||||||
|
SQLite. Not even close.
|
||||||
|
|
||||||
|
Unless... is there a sync component?
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ending Discovery
|
||||||
|
|
||||||
|
There's no required ending. Discovery might:
|
||||||
|
|
||||||
|
- **Flow into a proposal**: "Ready to start? I can create a change proposal."
|
||||||
|
- **Result in artifact updates**: "Updated design.md with these decisions"
|
||||||
|
- **Just provide clarity**: User has what they need, moves on
|
||||||
|
- **Continue later**: "We can pick this up anytime"
|
||||||
|
|
||||||
|
When it feels like things are crystallizing, you might summarize:
|
||||||
|
|
||||||
|
```
|
||||||
|
## What We Figured Out
|
||||||
|
|
||||||
|
**The problem**: [crystallized understanding]
|
||||||
|
|
||||||
|
**The approach**: [if one emerged]
|
||||||
|
|
||||||
|
**Open questions**: [if any remain]
|
||||||
|
|
||||||
|
**Next steps** (if ready):
|
||||||
|
- Create a change proposal
|
||||||
|
- Keep exploring: just keep talking
|
||||||
|
```
|
||||||
|
|
||||||
|
But this summary is optional. Sometimes the thinking IS the value.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Guardrails
|
||||||
|
|
||||||
|
- **Don't implement** - Never write code or implement features. Creating OpenSpec artifacts is fine, writing application code is not.
|
||||||
|
- **Don't fake understanding** - If something is unclear, dig deeper
|
||||||
|
- **Don't rush** - Discovery is thinking time, not task time
|
||||||
|
- **Don't force structure** - Let patterns emerge naturally
|
||||||
|
- **Don't auto-capture** - Offer to save insights, don't just do it
|
||||||
|
- **Do visualize** - A good diagram is worth many paragraphs
|
||||||
|
- **Do explore the codebase** - Ground discussions in reality
|
||||||
|
- **Do question assumptions** - Including the user's and your own
|
||||||
110
.codex/skills/openspec-propose/SKILL.md
Normal file
110
.codex/skills/openspec-propose/SKILL.md
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
---
|
||||||
|
name: openspec-propose
|
||||||
|
description: Propose a new change with all artifacts generated in one step. Use when the user wants to quickly describe what they want to build and get a complete proposal with design, specs, and tasks ready for implementation.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Propose a new change - create the change and generate all artifacts in one step.
|
||||||
|
|
||||||
|
I'll create a change with artifacts:
|
||||||
|
- proposal.md (what & why)
|
||||||
|
- design.md (how)
|
||||||
|
- tasks.md (implementation steps)
|
||||||
|
|
||||||
|
When ready to implement, run /opsx:apply
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Input**: The user's request should include a change name (kebab-case) OR a description of what they want to build.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no clear input provided, ask what they want to build**
|
||||||
|
|
||||||
|
Use the **AskUserQuestion tool** (open-ended, no preset options) to ask:
|
||||||
|
> "What change do you want to work on? Describe what you want to build or fix."
|
||||||
|
|
||||||
|
From their description, derive a kebab-case name (e.g., "add user authentication" → `add-user-auth`).
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT proceed without understanding what the user wants to build.
|
||||||
|
|
||||||
|
2. **Create the change directory**
|
||||||
|
```bash
|
||||||
|
openspec new change "<name>"
|
||||||
|
```
|
||||||
|
This creates a scaffolded change at `openspec/changes/<name>/` with `.openspec.yaml`.
|
||||||
|
|
||||||
|
3. **Get the artifact build order**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to get:
|
||||||
|
- `applyRequires`: array of artifact IDs needed before implementation (e.g., `["tasks"]`)
|
||||||
|
- `artifacts`: list of all artifacts with their status and dependencies
|
||||||
|
|
||||||
|
4. **Create artifacts in sequence until apply-ready**
|
||||||
|
|
||||||
|
Use the **TodoWrite tool** to track progress through the artifacts.
|
||||||
|
|
||||||
|
Loop through artifacts in dependency order (artifacts with no pending dependencies first):
|
||||||
|
|
||||||
|
a. **For each artifact that is `ready` (dependencies satisfied)**:
|
||||||
|
- Get instructions:
|
||||||
|
```bash
|
||||||
|
openspec instructions <artifact-id> --change "<name>" --json
|
||||||
|
```
|
||||||
|
- The instructions JSON includes:
|
||||||
|
- `context`: Project background (constraints for you - do NOT include in output)
|
||||||
|
- `rules`: Artifact-specific rules (constraints for you - do NOT include in output)
|
||||||
|
- `template`: The structure to use for your output file
|
||||||
|
- `instruction`: Schema-specific guidance for this artifact type
|
||||||
|
- `outputPath`: Where to write the artifact
|
||||||
|
- `dependencies`: Completed artifacts to read for context
|
||||||
|
- Read any completed dependency files for context
|
||||||
|
- Create the artifact file using `template` as the structure
|
||||||
|
- Apply `context` and `rules` as constraints - but do NOT copy them into the file
|
||||||
|
- Show brief progress: "Created <artifact-id>"
|
||||||
|
|
||||||
|
b. **Continue until all `applyRequires` artifacts are complete**
|
||||||
|
- After creating each artifact, re-run `openspec status --change "<name>" --json`
|
||||||
|
- Check if every artifact ID in `applyRequires` has `status: "done"` in the artifacts array
|
||||||
|
- Stop when all `applyRequires` artifacts are done
|
||||||
|
|
||||||
|
c. **If an artifact requires user input** (unclear context):
|
||||||
|
- Use **AskUserQuestion tool** to clarify
|
||||||
|
- Then continue with creation
|
||||||
|
|
||||||
|
5. **Show final status**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output**
|
||||||
|
|
||||||
|
After completing all artifacts, summarize:
|
||||||
|
- Change name and location
|
||||||
|
- List of artifacts created with brief descriptions
|
||||||
|
- What's ready: "All artifacts created! Ready for implementation."
|
||||||
|
- Prompt: "Run `/opsx:apply` or ask me to implement to start working on the tasks."
|
||||||
|
|
||||||
|
**Artifact Creation Guidelines**
|
||||||
|
|
||||||
|
- Follow the `instruction` field from `openspec instructions` for each artifact type
|
||||||
|
- The schema defines what each artifact should contain - follow it
|
||||||
|
- Read dependency artifacts for context before creating new ones
|
||||||
|
- Use `template` as the structure for your output file - fill in its sections
|
||||||
|
- **IMPORTANT**: `context` and `rules` are constraints for YOU, not content for the file
|
||||||
|
- Do NOT copy `<context>`, `<rules>`, `<project_context>` blocks into the artifact
|
||||||
|
- These guide what you write, but should never appear in the output
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Create ALL artifacts needed for implementation (as defined by schema's `apply.requires`)
|
||||||
|
- Always read dependency artifacts before creating a new one
|
||||||
|
- If context is critically unclear, ask the user - but prefer making reasonable decisions to keep momentum
|
||||||
|
- If a change with that name already exists, ask if user wants to continue it or create a new one
|
||||||
|
- Verify each artifact file exists after writing before proceeding to next
|
||||||
152
.cursor/commands/opsx-apply.md
Normal file
152
.cursor/commands/opsx-apply.md
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
---
|
||||||
|
name: /opsx-apply
|
||||||
|
id: opsx-apply
|
||||||
|
category: Workflow
|
||||||
|
description: Implement tasks from an OpenSpec change (Experimental)
|
||||||
|
---
|
||||||
|
|
||||||
|
Implement tasks from an OpenSpec change.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name (e.g., `/opsx:apply add-auth`). If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **Select the change**
|
||||||
|
|
||||||
|
If a name is provided, use it. Otherwise:
|
||||||
|
- Infer from conversation context if the user mentioned a change
|
||||||
|
- Auto-select if only one active change exists
|
||||||
|
- If ambiguous, run `openspec list --json` to get available changes and use the **AskUserQuestion tool** to let the user select
|
||||||
|
|
||||||
|
Always announce: "Using change: <name>" and how to override (e.g., `/opsx:apply <other>`).
|
||||||
|
|
||||||
|
2. **Check status to understand the schema**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used (e.g., "spec-driven")
|
||||||
|
- Which artifact contains the tasks (typically "tasks" for spec-driven, check status for others)
|
||||||
|
|
||||||
|
3. **Get apply instructions**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
openspec instructions apply --change "<name>" --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This returns:
|
||||||
|
- Context file paths (varies by schema)
|
||||||
|
- Progress (total, complete, remaining)
|
||||||
|
- Task list with status
|
||||||
|
- Dynamic instruction based on current state
|
||||||
|
|
||||||
|
**Handle states:**
|
||||||
|
- If `state: "blocked"` (missing artifacts): show message, suggest using `/opsx:continue`
|
||||||
|
- If `state: "all_done"`: congratulate, suggest archive
|
||||||
|
- Otherwise: proceed to implementation
|
||||||
|
|
||||||
|
4. **Read context files**
|
||||||
|
|
||||||
|
Read the files listed in `contextFiles` from the apply instructions output.
|
||||||
|
The files depend on the schema being used:
|
||||||
|
- **spec-driven**: proposal, specs, design, tasks
|
||||||
|
- Other schemas: follow the contextFiles from CLI output
|
||||||
|
|
||||||
|
5. **Show current progress**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Schema being used
|
||||||
|
- Progress: "N/M tasks complete"
|
||||||
|
- Remaining tasks overview
|
||||||
|
- Dynamic instruction from CLI
|
||||||
|
|
||||||
|
6. **Implement tasks (loop until done or blocked)**
|
||||||
|
|
||||||
|
For each pending task:
|
||||||
|
- Show which task is being worked on
|
||||||
|
- Make the code changes required
|
||||||
|
- Keep changes minimal and focused
|
||||||
|
- Mark task complete in the tasks file: `- [ ]` → `- [x]`
|
||||||
|
- Continue to next task
|
||||||
|
|
||||||
|
**Pause if:**
|
||||||
|
- Task is unclear → ask for clarification
|
||||||
|
- Implementation reveals a design issue → suggest updating artifacts
|
||||||
|
- Error or blocker encountered → report and wait for guidance
|
||||||
|
- User interrupts
|
||||||
|
|
||||||
|
7. **On completion or pause, show status**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Tasks completed this session
|
||||||
|
- Overall progress: "N/M tasks complete"
|
||||||
|
- If all done: suggest archive
|
||||||
|
- If paused: explain why and wait for guidance
|
||||||
|
|
||||||
|
**Output During Implementation**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementing: <change-name> (schema: <schema-name>)
|
||||||
|
|
||||||
|
Working on task 3/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
|
||||||
|
Working on task 4/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Completion**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 7/7 tasks complete ✓
|
||||||
|
|
||||||
|
### Completed This Session
|
||||||
|
- [x] Task 1
|
||||||
|
- [x] Task 2
|
||||||
|
...
|
||||||
|
|
||||||
|
All tasks complete! You can archive this change with `/opsx:archive`.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Pause (Issue Encountered)**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Paused
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 4/7 tasks complete
|
||||||
|
|
||||||
|
### Issue Encountered
|
||||||
|
<description of the issue>
|
||||||
|
|
||||||
|
**Options:**
|
||||||
|
1. <option 1>
|
||||||
|
2. <option 2>
|
||||||
|
3. Other approach
|
||||||
|
|
||||||
|
What would you like to do?
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Keep going through tasks until done or blocked
|
||||||
|
- Always read context files before starting (from the apply instructions output)
|
||||||
|
- If task is ambiguous, pause and ask before implementing
|
||||||
|
- If implementation reveals issues, pause and suggest artifact updates
|
||||||
|
- Keep code changes minimal and scoped to each task
|
||||||
|
- Update task checkbox immediately after completing each task
|
||||||
|
- Pause on errors, blockers, or unclear requirements - don't guess
|
||||||
|
- Use contextFiles from CLI output, don't assume specific file names
|
||||||
|
|
||||||
|
**Fluid Workflow Integration**
|
||||||
|
|
||||||
|
This skill supports the "actions on a change" model:
|
||||||
|
|
||||||
|
- **Can be invoked anytime**: Before all artifacts are done (if tasks exist), after partial implementation, interleaved with other actions
|
||||||
|
- **Allows artifact updates**: If implementation reveals design issues, suggest updating artifacts - not phase-locked, work fluidly
|
||||||
157
.cursor/commands/opsx-archive.md
Normal file
157
.cursor/commands/opsx-archive.md
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
---
|
||||||
|
name: /opsx-archive
|
||||||
|
id: opsx-archive
|
||||||
|
category: Workflow
|
||||||
|
description: Archive a completed change in the experimental workflow
|
||||||
|
---
|
||||||
|
|
||||||
|
Archive a completed change in the experimental workflow.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name after `/opsx:archive` (e.g., `/opsx:archive add-auth`). If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no change name provided, prompt for selection**
|
||||||
|
|
||||||
|
Run `openspec list --json` to get available changes. Use the **AskUserQuestion tool** to let the user select.
|
||||||
|
|
||||||
|
Show only active changes (not already archived).
|
||||||
|
Include the schema used for each change if available.
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT guess or auto-select a change. Always let the user choose.
|
||||||
|
|
||||||
|
2. **Check artifact completion status**
|
||||||
|
|
||||||
|
Run `openspec status --change "<name>" --json` to check artifact completion.
|
||||||
|
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used
|
||||||
|
- `artifacts`: List of artifacts with their status (`done` or other)
|
||||||
|
|
||||||
|
**If any artifacts are not `done`:**
|
||||||
|
- Display warning listing incomplete artifacts
|
||||||
|
- Prompt user for confirmation to continue
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
3. **Check task completion status**
|
||||||
|
|
||||||
|
Read the tasks file (typically `tasks.md`) to check for incomplete tasks.
|
||||||
|
|
||||||
|
Count tasks marked with `- [ ]` (incomplete) vs `- [x]` (complete).
|
||||||
|
|
||||||
|
**If incomplete tasks found:**
|
||||||
|
- Display warning showing count of incomplete tasks
|
||||||
|
- Prompt user for confirmation to continue
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
**If no tasks file exists:** Proceed without task-related warning.
|
||||||
|
|
||||||
|
4. **Assess delta spec sync state**
|
||||||
|
|
||||||
|
Check for delta specs at `openspec/changes/<name>/specs/`. If none exist, proceed without sync prompt.
|
||||||
|
|
||||||
|
**If delta specs exist:**
|
||||||
|
- Compare each delta spec with its corresponding main spec at `openspec/specs/<capability>/spec.md`
|
||||||
|
- Determine what changes would be applied (adds, modifications, removals, renames)
|
||||||
|
- Show a combined summary before prompting
|
||||||
|
|
||||||
|
**Prompt options:**
|
||||||
|
- If changes needed: "Sync now (recommended)", "Archive without syncing"
|
||||||
|
- If already synced: "Archive now", "Sync anyway", "Cancel"
|
||||||
|
|
||||||
|
If user chooses sync, use Task tool (subagent_type: "general-purpose", prompt: "Use Skill tool to invoke openspec-sync-specs for change '<name>'. Delta spec analysis: <include the analyzed delta spec summary>"). Proceed to archive regardless of choice.
|
||||||
|
|
||||||
|
5. **Perform the archive**
|
||||||
|
|
||||||
|
Create the archive directory if it doesn't exist:
|
||||||
|
```bash
|
||||||
|
mkdir -p openspec/changes/archive
|
||||||
|
```
|
||||||
|
|
||||||
|
Generate target name using current date: `YYYY-MM-DD-<change-name>`
|
||||||
|
|
||||||
|
**Check if target already exists:**
|
||||||
|
- If yes: Fail with error, suggest renaming existing archive or using different date
|
||||||
|
- If no: Move the change directory to archive
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mv openspec/changes/<name> openspec/changes/archive/YYYY-MM-DD-<name>
|
||||||
|
```
|
||||||
|
|
||||||
|
6. **Display summary**
|
||||||
|
|
||||||
|
Show archive completion summary including:
|
||||||
|
- Change name
|
||||||
|
- Schema that was used
|
||||||
|
- Archive location
|
||||||
|
- Spec sync status (synced / sync skipped / no delta specs)
|
||||||
|
- Note about any warnings (incomplete artifacts/tasks)
|
||||||
|
|
||||||
|
**Output On Success**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Archived to:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
**Specs:** ✓ Synced to main specs
|
||||||
|
|
||||||
|
All artifacts complete. All tasks complete.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Success (No Delta Specs)**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Archived to:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
**Specs:** No delta specs
|
||||||
|
|
||||||
|
All artifacts complete. All tasks complete.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Success With Warnings**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Complete (with warnings)
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Archived to:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
**Specs:** Sync skipped (user chose to skip)
|
||||||
|
|
||||||
|
**Warnings:**
|
||||||
|
- Archived with 2 incomplete artifacts
|
||||||
|
- Archived with 3 incomplete tasks
|
||||||
|
- Delta spec sync was skipped (user chose to skip)
|
||||||
|
|
||||||
|
Review the archive if this was not intentional.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Error (Archive Exists)**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Failed
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Target:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
|
||||||
|
Target archive directory already exists.
|
||||||
|
|
||||||
|
**Options:**
|
||||||
|
1. Rename the existing archive
|
||||||
|
2. Delete the existing archive if it's a duplicate
|
||||||
|
3. Wait until a different date to archive
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Always prompt for change selection if not provided
|
||||||
|
- Use artifact graph (openspec status --json) for completion checking
|
||||||
|
- Don't block archive on warnings - just inform and confirm
|
||||||
|
- Preserve .openspec.yaml when moving to archive (it moves with the directory)
|
||||||
|
- Show clear summary of what happened
|
||||||
|
- If sync is requested, use the Skill tool to invoke `openspec-sync-specs` (agent-driven)
|
||||||
|
- If delta specs exist, always run the sync assessment and show the combined summary before prompting
|
||||||
173
.cursor/commands/opsx-explore.md
Normal file
173
.cursor/commands/opsx-explore.md
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
---
|
||||||
|
name: /opsx-explore
|
||||||
|
id: opsx-explore
|
||||||
|
category: Workflow
|
||||||
|
description: "Enter explore mode - think through ideas, investigate problems, clarify requirements"
|
||||||
|
---
|
||||||
|
|
||||||
|
Enter explore mode. Think deeply. Visualize freely. Follow the conversation wherever it goes.
|
||||||
|
|
||||||
|
**IMPORTANT: Explore mode is for thinking, not implementing.** You may read files, search code, and investigate the codebase, but you must NEVER write code or implement features. If the user asks you to implement something, remind them to exit explore mode first and create a change proposal. You MAY create OpenSpec artifacts (proposals, designs, specs) if the user asks—that's capturing thinking, not implementing.
|
||||||
|
|
||||||
|
**This is a stance, not a workflow.** There are no fixed steps, no required sequence, no mandatory outputs. You're a thinking partner helping the user explore.
|
||||||
|
|
||||||
|
**Input**: The argument after `/opsx:explore` is whatever the user wants to think about. Could be:
|
||||||
|
- A vague idea: "real-time collaboration"
|
||||||
|
- A specific problem: "the auth system is getting unwieldy"
|
||||||
|
- A change name: "add-dark-mode" (to explore in context of that change)
|
||||||
|
- A comparison: "postgres vs sqlite for this"
|
||||||
|
- Nothing (just enter explore mode)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## The Stance
|
||||||
|
|
||||||
|
- **Curious, not prescriptive** - Ask questions that emerge naturally, don't follow a script
|
||||||
|
- **Open threads, not interrogations** - Surface multiple interesting directions and let the user follow what resonates. Don't funnel them through a single path of questions.
|
||||||
|
- **Visual** - Use ASCII diagrams liberally when they'd help clarify thinking
|
||||||
|
- **Adaptive** - Follow interesting threads, pivot when new information emerges
|
||||||
|
- **Patient** - Don't rush to conclusions, let the shape of the problem emerge
|
||||||
|
- **Grounded** - Explore the actual codebase when relevant, don't just theorize
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Might Do
|
||||||
|
|
||||||
|
Depending on what the user brings, you might:
|
||||||
|
|
||||||
|
**Explore the problem space**
|
||||||
|
- Ask clarifying questions that emerge from what they said
|
||||||
|
- Challenge assumptions
|
||||||
|
- Reframe the problem
|
||||||
|
- Find analogies
|
||||||
|
|
||||||
|
**Investigate the codebase**
|
||||||
|
- Map existing architecture relevant to the discussion
|
||||||
|
- Find integration points
|
||||||
|
- Identify patterns already in use
|
||||||
|
- Surface hidden complexity
|
||||||
|
|
||||||
|
**Compare options**
|
||||||
|
- Brainstorm multiple approaches
|
||||||
|
- Build comparison tables
|
||||||
|
- Sketch tradeoffs
|
||||||
|
- Recommend a path (if asked)
|
||||||
|
|
||||||
|
**Visualize**
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────┐
|
||||||
|
│ Use ASCII diagrams liberally │
|
||||||
|
├─────────────────────────────────────────┤
|
||||||
|
│ │
|
||||||
|
│ ┌────────┐ ┌────────┐ │
|
||||||
|
│ │ State │────────▶│ State │ │
|
||||||
|
│ │ A │ │ B │ │
|
||||||
|
│ └────────┘ └────────┘ │
|
||||||
|
│ │
|
||||||
|
│ System diagrams, state machines, │
|
||||||
|
│ data flows, architecture sketches, │
|
||||||
|
│ dependency graphs, comparison tables │
|
||||||
|
│ │
|
||||||
|
└─────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**Surface risks and unknowns**
|
||||||
|
- Identify what could go wrong
|
||||||
|
- Find gaps in understanding
|
||||||
|
- Suggest spikes or investigations
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## OpenSpec Awareness
|
||||||
|
|
||||||
|
You have full context of the OpenSpec system. Use it naturally, don't force it.
|
||||||
|
|
||||||
|
### Check for context
|
||||||
|
|
||||||
|
At the start, quickly check what exists:
|
||||||
|
```bash
|
||||||
|
openspec list --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This tells you:
|
||||||
|
- If there are active changes
|
||||||
|
- Their names, schemas, and status
|
||||||
|
- What the user might be working on
|
||||||
|
|
||||||
|
If the user mentioned a specific change name, read its artifacts for context.
|
||||||
|
|
||||||
|
### When no change exists
|
||||||
|
|
||||||
|
Think freely. When insights crystallize, you might offer:
|
||||||
|
|
||||||
|
- "This feels solid enough to start a change. Want me to create a proposal?"
|
||||||
|
- Or keep exploring - no pressure to formalize
|
||||||
|
|
||||||
|
### When a change exists
|
||||||
|
|
||||||
|
If the user mentions a change or you detect one is relevant:
|
||||||
|
|
||||||
|
1. **Read existing artifacts for context**
|
||||||
|
- `openspec/changes/<name>/proposal.md`
|
||||||
|
- `openspec/changes/<name>/design.md`
|
||||||
|
- `openspec/changes/<name>/tasks.md`
|
||||||
|
- etc.
|
||||||
|
|
||||||
|
2. **Reference them naturally in conversation**
|
||||||
|
- "Your design mentions using Redis, but we just realized SQLite fits better..."
|
||||||
|
- "The proposal scopes this to premium users, but we're now thinking everyone..."
|
||||||
|
|
||||||
|
3. **Offer to capture when decisions are made**
|
||||||
|
|
||||||
|
| Insight Type | Where to Capture |
|
||||||
|
|--------------|------------------|
|
||||||
|
| New requirement discovered | `specs/<capability>/spec.md` |
|
||||||
|
| Requirement changed | `specs/<capability>/spec.md` |
|
||||||
|
| Design decision made | `design.md` |
|
||||||
|
| Scope changed | `proposal.md` |
|
||||||
|
| New work identified | `tasks.md` |
|
||||||
|
| Assumption invalidated | Relevant artifact |
|
||||||
|
|
||||||
|
Example offers:
|
||||||
|
- "That's a design decision. Capture it in design.md?"
|
||||||
|
- "This is a new requirement. Add it to specs?"
|
||||||
|
- "This changes scope. Update the proposal?"
|
||||||
|
|
||||||
|
4. **The user decides** - Offer and move on. Don't pressure. Don't auto-capture.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Don't Have To Do
|
||||||
|
|
||||||
|
- Follow a script
|
||||||
|
- Ask the same questions every time
|
||||||
|
- Produce a specific artifact
|
||||||
|
- Reach a conclusion
|
||||||
|
- Stay on topic if a tangent is valuable
|
||||||
|
- Be brief (this is thinking time)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ending Discovery
|
||||||
|
|
||||||
|
There's no required ending. Discovery might:
|
||||||
|
|
||||||
|
- **Flow into a proposal**: "Ready to start? I can create a change proposal."
|
||||||
|
- **Result in artifact updates**: "Updated design.md with these decisions"
|
||||||
|
- **Just provide clarity**: User has what they need, moves on
|
||||||
|
- **Continue later**: "We can pick this up anytime"
|
||||||
|
|
||||||
|
When things crystallize, you might offer a summary - but it's optional. Sometimes the thinking IS the value.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Guardrails
|
||||||
|
|
||||||
|
- **Don't implement** - Never write code or implement features. Creating OpenSpec artifacts is fine, writing application code is not.
|
||||||
|
- **Don't fake understanding** - If something is unclear, dig deeper
|
||||||
|
- **Don't rush** - Discovery is thinking time, not task time
|
||||||
|
- **Don't force structure** - Let patterns emerge naturally
|
||||||
|
- **Don't auto-capture** - Offer to save insights, don't just do it
|
||||||
|
- **Do visualize** - A good diagram is worth many paragraphs
|
||||||
|
- **Do explore the codebase** - Ground discussions in reality
|
||||||
|
- **Do question assumptions** - Including the user's and your own
|
||||||
106
.cursor/commands/opsx-propose.md
Normal file
106
.cursor/commands/opsx-propose.md
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
---
|
||||||
|
name: /opsx-propose
|
||||||
|
id: opsx-propose
|
||||||
|
category: Workflow
|
||||||
|
description: Propose a new change - create it and generate all artifacts in one step
|
||||||
|
---
|
||||||
|
|
||||||
|
Propose a new change - create the change and generate all artifacts in one step.
|
||||||
|
|
||||||
|
I'll create a change with artifacts:
|
||||||
|
- proposal.md (what & why)
|
||||||
|
- design.md (how)
|
||||||
|
- tasks.md (implementation steps)
|
||||||
|
|
||||||
|
When ready to implement, run /opsx:apply
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Input**: The argument after `/opsx:propose` is the change name (kebab-case), OR a description of what the user wants to build.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no input provided, ask what they want to build**
|
||||||
|
|
||||||
|
Use the **AskUserQuestion tool** (open-ended, no preset options) to ask:
|
||||||
|
> "What change do you want to work on? Describe what you want to build or fix."
|
||||||
|
|
||||||
|
From their description, derive a kebab-case name (e.g., "add user authentication" → `add-user-auth`).
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT proceed without understanding what the user wants to build.
|
||||||
|
|
||||||
|
2. **Create the change directory**
|
||||||
|
```bash
|
||||||
|
openspec new change "<name>"
|
||||||
|
```
|
||||||
|
This creates a scaffolded change at `openspec/changes/<name>/` with `.openspec.yaml`.
|
||||||
|
|
||||||
|
3. **Get the artifact build order**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to get:
|
||||||
|
- `applyRequires`: array of artifact IDs needed before implementation (e.g., `["tasks"]`)
|
||||||
|
- `artifacts`: list of all artifacts with their status and dependencies
|
||||||
|
|
||||||
|
4. **Create artifacts in sequence until apply-ready**
|
||||||
|
|
||||||
|
Use the **TodoWrite tool** to track progress through the artifacts.
|
||||||
|
|
||||||
|
Loop through artifacts in dependency order (artifacts with no pending dependencies first):
|
||||||
|
|
||||||
|
a. **For each artifact that is `ready` (dependencies satisfied)**:
|
||||||
|
- Get instructions:
|
||||||
|
```bash
|
||||||
|
openspec instructions <artifact-id> --change "<name>" --json
|
||||||
|
```
|
||||||
|
- The instructions JSON includes:
|
||||||
|
- `context`: Project background (constraints for you - do NOT include in output)
|
||||||
|
- `rules`: Artifact-specific rules (constraints for you - do NOT include in output)
|
||||||
|
- `template`: The structure to use for your output file
|
||||||
|
- `instruction`: Schema-specific guidance for this artifact type
|
||||||
|
- `outputPath`: Where to write the artifact
|
||||||
|
- `dependencies`: Completed artifacts to read for context
|
||||||
|
- Read any completed dependency files for context
|
||||||
|
- Create the artifact file using `template` as the structure
|
||||||
|
- Apply `context` and `rules` as constraints - but do NOT copy them into the file
|
||||||
|
- Show brief progress: "Created <artifact-id>"
|
||||||
|
|
||||||
|
b. **Continue until all `applyRequires` artifacts are complete**
|
||||||
|
- After creating each artifact, re-run `openspec status --change "<name>" --json`
|
||||||
|
- Check if every artifact ID in `applyRequires` has `status: "done"` in the artifacts array
|
||||||
|
- Stop when all `applyRequires` artifacts are done
|
||||||
|
|
||||||
|
c. **If an artifact requires user input** (unclear context):
|
||||||
|
- Use **AskUserQuestion tool** to clarify
|
||||||
|
- Then continue with creation
|
||||||
|
|
||||||
|
5. **Show final status**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output**
|
||||||
|
|
||||||
|
After completing all artifacts, summarize:
|
||||||
|
- Change name and location
|
||||||
|
- List of artifacts created with brief descriptions
|
||||||
|
- What's ready: "All artifacts created! Ready for implementation."
|
||||||
|
- Prompt: "Run `/opsx:apply` to start implementing."
|
||||||
|
|
||||||
|
**Artifact Creation Guidelines**
|
||||||
|
|
||||||
|
- Follow the `instruction` field from `openspec instructions` for each artifact type
|
||||||
|
- The schema defines what each artifact should contain - follow it
|
||||||
|
- Read dependency artifacts for context before creating new ones
|
||||||
|
- Use `template` as the structure for your output file - fill in its sections
|
||||||
|
- **IMPORTANT**: `context` and `rules` are constraints for YOU, not content for the file
|
||||||
|
- Do NOT copy `<context>`, `<rules>`, `<project_context>` blocks into the artifact
|
||||||
|
- These guide what you write, but should never appear in the output
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Create ALL artifacts needed for implementation (as defined by schema's `apply.requires`)
|
||||||
|
- Always read dependency artifacts before creating a new one
|
||||||
|
- If context is critically unclear, ask the user - but prefer making reasonable decisions to keep momentum
|
||||||
|
- If a change with that name already exists, ask if user wants to continue it or create a new one
|
||||||
|
- Verify each artifact file exists after writing before proceeding to next
|
||||||
156
.cursor/skills/openspec-apply-change/SKILL.md
Normal file
156
.cursor/skills/openspec-apply-change/SKILL.md
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
---
|
||||||
|
name: openspec-apply-change
|
||||||
|
description: Implement tasks from an OpenSpec change. Use when the user wants to start implementing, continue implementation, or work through tasks.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Implement tasks from an OpenSpec change.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name. If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **Select the change**
|
||||||
|
|
||||||
|
If a name is provided, use it. Otherwise:
|
||||||
|
- Infer from conversation context if the user mentioned a change
|
||||||
|
- Auto-select if only one active change exists
|
||||||
|
- If ambiguous, run `openspec list --json` to get available changes and use the **AskUserQuestion tool** to let the user select
|
||||||
|
|
||||||
|
Always announce: "Using change: <name>" and how to override (e.g., `/opsx:apply <other>`).
|
||||||
|
|
||||||
|
2. **Check status to understand the schema**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used (e.g., "spec-driven")
|
||||||
|
- Which artifact contains the tasks (typically "tasks" for spec-driven, check status for others)
|
||||||
|
|
||||||
|
3. **Get apply instructions**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
openspec instructions apply --change "<name>" --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This returns:
|
||||||
|
- Context file paths (varies by schema - could be proposal/specs/design/tasks or spec/tests/implementation/docs)
|
||||||
|
- Progress (total, complete, remaining)
|
||||||
|
- Task list with status
|
||||||
|
- Dynamic instruction based on current state
|
||||||
|
|
||||||
|
**Handle states:**
|
||||||
|
- If `state: "blocked"` (missing artifacts): show message, suggest using openspec-continue-change
|
||||||
|
- If `state: "all_done"`: congratulate, suggest archive
|
||||||
|
- Otherwise: proceed to implementation
|
||||||
|
|
||||||
|
4. **Read context files**
|
||||||
|
|
||||||
|
Read the files listed in `contextFiles` from the apply instructions output.
|
||||||
|
The files depend on the schema being used:
|
||||||
|
- **spec-driven**: proposal, specs, design, tasks
|
||||||
|
- Other schemas: follow the contextFiles from CLI output
|
||||||
|
|
||||||
|
5. **Show current progress**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Schema being used
|
||||||
|
- Progress: "N/M tasks complete"
|
||||||
|
- Remaining tasks overview
|
||||||
|
- Dynamic instruction from CLI
|
||||||
|
|
||||||
|
6. **Implement tasks (loop until done or blocked)**
|
||||||
|
|
||||||
|
For each pending task:
|
||||||
|
- Show which task is being worked on
|
||||||
|
- Make the code changes required
|
||||||
|
- Keep changes minimal and focused
|
||||||
|
- Mark task complete in the tasks file: `- [ ]` → `- [x]`
|
||||||
|
- Continue to next task
|
||||||
|
|
||||||
|
**Pause if:**
|
||||||
|
- Task is unclear → ask for clarification
|
||||||
|
- Implementation reveals a design issue → suggest updating artifacts
|
||||||
|
- Error or blocker encountered → report and wait for guidance
|
||||||
|
- User interrupts
|
||||||
|
|
||||||
|
7. **On completion or pause, show status**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Tasks completed this session
|
||||||
|
- Overall progress: "N/M tasks complete"
|
||||||
|
- If all done: suggest archive
|
||||||
|
- If paused: explain why and wait for guidance
|
||||||
|
|
||||||
|
**Output During Implementation**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementing: <change-name> (schema: <schema-name>)
|
||||||
|
|
||||||
|
Working on task 3/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
|
||||||
|
Working on task 4/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Completion**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 7/7 tasks complete ✓
|
||||||
|
|
||||||
|
### Completed This Session
|
||||||
|
- [x] Task 1
|
||||||
|
- [x] Task 2
|
||||||
|
...
|
||||||
|
|
||||||
|
All tasks complete! Ready to archive this change.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Pause (Issue Encountered)**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Paused
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 4/7 tasks complete
|
||||||
|
|
||||||
|
### Issue Encountered
|
||||||
|
<description of the issue>
|
||||||
|
|
||||||
|
**Options:**
|
||||||
|
1. <option 1>
|
||||||
|
2. <option 2>
|
||||||
|
3. Other approach
|
||||||
|
|
||||||
|
What would you like to do?
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Keep going through tasks until done or blocked
|
||||||
|
- Always read context files before starting (from the apply instructions output)
|
||||||
|
- If task is ambiguous, pause and ask before implementing
|
||||||
|
- If implementation reveals issues, pause and suggest artifact updates
|
||||||
|
- Keep code changes minimal and scoped to each task
|
||||||
|
- Update task checkbox immediately after completing each task
|
||||||
|
- Pause on errors, blockers, or unclear requirements - don't guess
|
||||||
|
- Use contextFiles from CLI output, don't assume specific file names
|
||||||
|
|
||||||
|
**Fluid Workflow Integration**
|
||||||
|
|
||||||
|
This skill supports the "actions on a change" model:
|
||||||
|
|
||||||
|
- **Can be invoked anytime**: Before all artifacts are done (if tasks exist), after partial implementation, interleaved with other actions
|
||||||
|
- **Allows artifact updates**: If implementation reveals design issues, suggest updating artifacts - not phase-locked, work fluidly
|
||||||
114
.cursor/skills/openspec-archive-change/SKILL.md
Normal file
114
.cursor/skills/openspec-archive-change/SKILL.md
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
---
|
||||||
|
name: openspec-archive-change
|
||||||
|
description: Archive a completed change in the experimental workflow. Use when the user wants to finalize and archive a change after implementation is complete.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Archive a completed change in the experimental workflow.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name. If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no change name provided, prompt for selection**
|
||||||
|
|
||||||
|
Run `openspec list --json` to get available changes. Use the **AskUserQuestion tool** to let the user select.
|
||||||
|
|
||||||
|
Show only active changes (not already archived).
|
||||||
|
Include the schema used for each change if available.
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT guess or auto-select a change. Always let the user choose.
|
||||||
|
|
||||||
|
2. **Check artifact completion status**
|
||||||
|
|
||||||
|
Run `openspec status --change "<name>" --json` to check artifact completion.
|
||||||
|
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used
|
||||||
|
- `artifacts`: List of artifacts with their status (`done` or other)
|
||||||
|
|
||||||
|
**If any artifacts are not `done`:**
|
||||||
|
- Display warning listing incomplete artifacts
|
||||||
|
- Use **AskUserQuestion tool** to confirm user wants to proceed
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
3. **Check task completion status**
|
||||||
|
|
||||||
|
Read the tasks file (typically `tasks.md`) to check for incomplete tasks.
|
||||||
|
|
||||||
|
Count tasks marked with `- [ ]` (incomplete) vs `- [x]` (complete).
|
||||||
|
|
||||||
|
**If incomplete tasks found:**
|
||||||
|
- Display warning showing count of incomplete tasks
|
||||||
|
- Use **AskUserQuestion tool** to confirm user wants to proceed
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
**If no tasks file exists:** Proceed without task-related warning.
|
||||||
|
|
||||||
|
4. **Assess delta spec sync state**
|
||||||
|
|
||||||
|
Check for delta specs at `openspec/changes/<name>/specs/`. If none exist, proceed without sync prompt.
|
||||||
|
|
||||||
|
**If delta specs exist:**
|
||||||
|
- Compare each delta spec with its corresponding main spec at `openspec/specs/<capability>/spec.md`
|
||||||
|
- Determine what changes would be applied (adds, modifications, removals, renames)
|
||||||
|
- Show a combined summary before prompting
|
||||||
|
|
||||||
|
**Prompt options:**
|
||||||
|
- If changes needed: "Sync now (recommended)", "Archive without syncing"
|
||||||
|
- If already synced: "Archive now", "Sync anyway", "Cancel"
|
||||||
|
|
||||||
|
If user chooses sync, use Task tool (subagent_type: "general-purpose", prompt: "Use Skill tool to invoke openspec-sync-specs for change '<name>'. Delta spec analysis: <include the analyzed delta spec summary>"). Proceed to archive regardless of choice.
|
||||||
|
|
||||||
|
5. **Perform the archive**
|
||||||
|
|
||||||
|
Create the archive directory if it doesn't exist:
|
||||||
|
```bash
|
||||||
|
mkdir -p openspec/changes/archive
|
||||||
|
```
|
||||||
|
|
||||||
|
Generate target name using current date: `YYYY-MM-DD-<change-name>`
|
||||||
|
|
||||||
|
**Check if target already exists:**
|
||||||
|
- If yes: Fail with error, suggest renaming existing archive or using different date
|
||||||
|
- If no: Move the change directory to archive
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mv openspec/changes/<name> openspec/changes/archive/YYYY-MM-DD-<name>
|
||||||
|
```
|
||||||
|
|
||||||
|
6. **Display summary**
|
||||||
|
|
||||||
|
Show archive completion summary including:
|
||||||
|
- Change name
|
||||||
|
- Schema that was used
|
||||||
|
- Archive location
|
||||||
|
- Whether specs were synced (if applicable)
|
||||||
|
- Note about any warnings (incomplete artifacts/tasks)
|
||||||
|
|
||||||
|
**Output On Success**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Archived to:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
**Specs:** ✓ Synced to main specs (or "No delta specs" or "Sync skipped")
|
||||||
|
|
||||||
|
All artifacts complete. All tasks complete.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Always prompt for change selection if not provided
|
||||||
|
- Use artifact graph (openspec status --json) for completion checking
|
||||||
|
- Don't block archive on warnings - just inform and confirm
|
||||||
|
- Preserve .openspec.yaml when moving to archive (it moves with the directory)
|
||||||
|
- Show clear summary of what happened
|
||||||
|
- If sync is requested, use openspec-sync-specs approach (agent-driven)
|
||||||
|
- If delta specs exist, always run the sync assessment and show the combined summary before prompting
|
||||||
288
.cursor/skills/openspec-explore/SKILL.md
Normal file
288
.cursor/skills/openspec-explore/SKILL.md
Normal file
@@ -0,0 +1,288 @@
|
|||||||
|
---
|
||||||
|
name: openspec-explore
|
||||||
|
description: Enter explore mode - a thinking partner for exploring ideas, investigating problems, and clarifying requirements. Use when the user wants to think through something before or during a change.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Enter explore mode. Think deeply. Visualize freely. Follow the conversation wherever it goes.
|
||||||
|
|
||||||
|
**IMPORTANT: Explore mode is for thinking, not implementing.** You may read files, search code, and investigate the codebase, but you must NEVER write code or implement features. If the user asks you to implement something, remind them to exit explore mode first and create a change proposal. You MAY create OpenSpec artifacts (proposals, designs, specs) if the user asks—that's capturing thinking, not implementing.
|
||||||
|
|
||||||
|
**This is a stance, not a workflow.** There are no fixed steps, no required sequence, no mandatory outputs. You're a thinking partner helping the user explore.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## The Stance
|
||||||
|
|
||||||
|
- **Curious, not prescriptive** - Ask questions that emerge naturally, don't follow a script
|
||||||
|
- **Open threads, not interrogations** - Surface multiple interesting directions and let the user follow what resonates. Don't funnel them through a single path of questions.
|
||||||
|
- **Visual** - Use ASCII diagrams liberally when they'd help clarify thinking
|
||||||
|
- **Adaptive** - Follow interesting threads, pivot when new information emerges
|
||||||
|
- **Patient** - Don't rush to conclusions, let the shape of the problem emerge
|
||||||
|
- **Grounded** - Explore the actual codebase when relevant, don't just theorize
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Might Do
|
||||||
|
|
||||||
|
Depending on what the user brings, you might:
|
||||||
|
|
||||||
|
**Explore the problem space**
|
||||||
|
- Ask clarifying questions that emerge from what they said
|
||||||
|
- Challenge assumptions
|
||||||
|
- Reframe the problem
|
||||||
|
- Find analogies
|
||||||
|
|
||||||
|
**Investigate the codebase**
|
||||||
|
- Map existing architecture relevant to the discussion
|
||||||
|
- Find integration points
|
||||||
|
- Identify patterns already in use
|
||||||
|
- Surface hidden complexity
|
||||||
|
|
||||||
|
**Compare options**
|
||||||
|
- Brainstorm multiple approaches
|
||||||
|
- Build comparison tables
|
||||||
|
- Sketch tradeoffs
|
||||||
|
- Recommend a path (if asked)
|
||||||
|
|
||||||
|
**Visualize**
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────┐
|
||||||
|
│ Use ASCII diagrams liberally │
|
||||||
|
├─────────────────────────────────────────┤
|
||||||
|
│ │
|
||||||
|
│ ┌────────┐ ┌────────┐ │
|
||||||
|
│ │ State │────────▶│ State │ │
|
||||||
|
│ │ A │ │ B │ │
|
||||||
|
│ └────────┘ └────────┘ │
|
||||||
|
│ │
|
||||||
|
│ System diagrams, state machines, │
|
||||||
|
│ data flows, architecture sketches, │
|
||||||
|
│ dependency graphs, comparison tables │
|
||||||
|
│ │
|
||||||
|
└─────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**Surface risks and unknowns**
|
||||||
|
- Identify what could go wrong
|
||||||
|
- Find gaps in understanding
|
||||||
|
- Suggest spikes or investigations
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## OpenSpec Awareness
|
||||||
|
|
||||||
|
You have full context of the OpenSpec system. Use it naturally, don't force it.
|
||||||
|
|
||||||
|
### Check for context
|
||||||
|
|
||||||
|
At the start, quickly check what exists:
|
||||||
|
```bash
|
||||||
|
openspec list --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This tells you:
|
||||||
|
- If there are active changes
|
||||||
|
- Their names, schemas, and status
|
||||||
|
- What the user might be working on
|
||||||
|
|
||||||
|
### When no change exists
|
||||||
|
|
||||||
|
Think freely. When insights crystallize, you might offer:
|
||||||
|
|
||||||
|
- "This feels solid enough to start a change. Want me to create a proposal?"
|
||||||
|
- Or keep exploring - no pressure to formalize
|
||||||
|
|
||||||
|
### When a change exists
|
||||||
|
|
||||||
|
If the user mentions a change or you detect one is relevant:
|
||||||
|
|
||||||
|
1. **Read existing artifacts for context**
|
||||||
|
- `openspec/changes/<name>/proposal.md`
|
||||||
|
- `openspec/changes/<name>/design.md`
|
||||||
|
- `openspec/changes/<name>/tasks.md`
|
||||||
|
- etc.
|
||||||
|
|
||||||
|
2. **Reference them naturally in conversation**
|
||||||
|
- "Your design mentions using Redis, but we just realized SQLite fits better..."
|
||||||
|
- "The proposal scopes this to premium users, but we're now thinking everyone..."
|
||||||
|
|
||||||
|
3. **Offer to capture when decisions are made**
|
||||||
|
|
||||||
|
| Insight Type | Where to Capture |
|
||||||
|
|--------------|------------------|
|
||||||
|
| New requirement discovered | `specs/<capability>/spec.md` |
|
||||||
|
| Requirement changed | `specs/<capability>/spec.md` |
|
||||||
|
| Design decision made | `design.md` |
|
||||||
|
| Scope changed | `proposal.md` |
|
||||||
|
| New work identified | `tasks.md` |
|
||||||
|
| Assumption invalidated | Relevant artifact |
|
||||||
|
|
||||||
|
Example offers:
|
||||||
|
- "That's a design decision. Capture it in design.md?"
|
||||||
|
- "This is a new requirement. Add it to specs?"
|
||||||
|
- "This changes scope. Update the proposal?"
|
||||||
|
|
||||||
|
4. **The user decides** - Offer and move on. Don't pressure. Don't auto-capture.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Don't Have To Do
|
||||||
|
|
||||||
|
- Follow a script
|
||||||
|
- Ask the same questions every time
|
||||||
|
- Produce a specific artifact
|
||||||
|
- Reach a conclusion
|
||||||
|
- Stay on topic if a tangent is valuable
|
||||||
|
- Be brief (this is thinking time)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Handling Different Entry Points
|
||||||
|
|
||||||
|
**User brings a vague idea:**
|
||||||
|
```
|
||||||
|
User: I'm thinking about adding real-time collaboration
|
||||||
|
|
||||||
|
You: Real-time collab is a big space. Let me think about this...
|
||||||
|
|
||||||
|
COLLABORATION SPECTRUM
|
||||||
|
════════════════════════════════════════════
|
||||||
|
|
||||||
|
Awareness Coordination Sync
|
||||||
|
│ │ │
|
||||||
|
▼ ▼ ▼
|
||||||
|
┌────────┐ ┌────────┐ ┌────────┐
|
||||||
|
│Presence│ │Cursors │ │ CRDT │
|
||||||
|
│ "3 │ │ Multi │ │Conflict│
|
||||||
|
│online" │ │ select │ │ free │
|
||||||
|
└────────┘ └────────┘ └────────┘
|
||||||
|
│ │ │
|
||||||
|
trivial moderate complex
|
||||||
|
|
||||||
|
Where's your head at?
|
||||||
|
```
|
||||||
|
|
||||||
|
**User brings a specific problem:**
|
||||||
|
```
|
||||||
|
User: The auth system is a mess
|
||||||
|
|
||||||
|
You: [reads codebase]
|
||||||
|
|
||||||
|
┌─────────────────────────────────────────────┐
|
||||||
|
│ CURRENT AUTH FLOW │
|
||||||
|
└─────────────────────────────────────────────┘
|
||||||
|
│
|
||||||
|
┌─────────────┼─────────────┐
|
||||||
|
▼ ▼ ▼
|
||||||
|
┌─────────┐ ┌─────────┐ ┌─────────┐
|
||||||
|
│ Google │ │ GitHub │ │ Email │
|
||||||
|
│ OAuth │ │ OAuth │ │ Magic │
|
||||||
|
└────┬────┘ └────┬────┘ └────┬────┘
|
||||||
|
│ │ │
|
||||||
|
└─────────────┼─────────────┘
|
||||||
|
▼
|
||||||
|
┌───────────┐
|
||||||
|
│ Session │
|
||||||
|
└─────┬─────┘
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
┌───────────┐
|
||||||
|
│ Perms │
|
||||||
|
└───────────┘
|
||||||
|
|
||||||
|
I see three tangles. Which one's burning?
|
||||||
|
```
|
||||||
|
|
||||||
|
**User is stuck mid-implementation:**
|
||||||
|
```
|
||||||
|
User: /opsx:explore add-auth-system
|
||||||
|
The OAuth integration is more complex than expected
|
||||||
|
|
||||||
|
You: [reads change artifacts]
|
||||||
|
|
||||||
|
You're on task 4: "Implement OAuth flow"
|
||||||
|
|
||||||
|
Let me trace what's involved...
|
||||||
|
|
||||||
|
[draws diagram, explores options, suggests paths]
|
||||||
|
|
||||||
|
Want to update the design to reflect this?
|
||||||
|
Or add a spike task to investigate?
|
||||||
|
```
|
||||||
|
|
||||||
|
**User wants to compare options:**
|
||||||
|
```
|
||||||
|
User: Should we use Postgres or SQLite?
|
||||||
|
|
||||||
|
You: Generic answer is boring. What's the context?
|
||||||
|
|
||||||
|
User: A CLI tool that tracks local dev environments
|
||||||
|
|
||||||
|
You: That changes everything.
|
||||||
|
|
||||||
|
┌─────────────────────────────────────────────────┐
|
||||||
|
│ CLI TOOL DATA STORAGE │
|
||||||
|
└─────────────────────────────────────────────────┘
|
||||||
|
|
||||||
|
Key constraints:
|
||||||
|
• No daemon running
|
||||||
|
• Must work offline
|
||||||
|
• Single user
|
||||||
|
|
||||||
|
SQLite Postgres
|
||||||
|
Deployment embedded ✓ needs server ✗
|
||||||
|
Offline yes ✓ no ✗
|
||||||
|
Single file yes ✓ no ✗
|
||||||
|
|
||||||
|
SQLite. Not even close.
|
||||||
|
|
||||||
|
Unless... is there a sync component?
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ending Discovery
|
||||||
|
|
||||||
|
There's no required ending. Discovery might:
|
||||||
|
|
||||||
|
- **Flow into a proposal**: "Ready to start? I can create a change proposal."
|
||||||
|
- **Result in artifact updates**: "Updated design.md with these decisions"
|
||||||
|
- **Just provide clarity**: User has what they need, moves on
|
||||||
|
- **Continue later**: "We can pick this up anytime"
|
||||||
|
|
||||||
|
When it feels like things are crystallizing, you might summarize:
|
||||||
|
|
||||||
|
```
|
||||||
|
## What We Figured Out
|
||||||
|
|
||||||
|
**The problem**: [crystallized understanding]
|
||||||
|
|
||||||
|
**The approach**: [if one emerged]
|
||||||
|
|
||||||
|
**Open questions**: [if any remain]
|
||||||
|
|
||||||
|
**Next steps** (if ready):
|
||||||
|
- Create a change proposal
|
||||||
|
- Keep exploring: just keep talking
|
||||||
|
```
|
||||||
|
|
||||||
|
But this summary is optional. Sometimes the thinking IS the value.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Guardrails
|
||||||
|
|
||||||
|
- **Don't implement** - Never write code or implement features. Creating OpenSpec artifacts is fine, writing application code is not.
|
||||||
|
- **Don't fake understanding** - If something is unclear, dig deeper
|
||||||
|
- **Don't rush** - Discovery is thinking time, not task time
|
||||||
|
- **Don't force structure** - Let patterns emerge naturally
|
||||||
|
- **Don't auto-capture** - Offer to save insights, don't just do it
|
||||||
|
- **Do visualize** - A good diagram is worth many paragraphs
|
||||||
|
- **Do explore the codebase** - Ground discussions in reality
|
||||||
|
- **Do question assumptions** - Including the user's and your own
|
||||||
110
.cursor/skills/openspec-propose/SKILL.md
Normal file
110
.cursor/skills/openspec-propose/SKILL.md
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
---
|
||||||
|
name: openspec-propose
|
||||||
|
description: Propose a new change with all artifacts generated in one step. Use when the user wants to quickly describe what they want to build and get a complete proposal with design, specs, and tasks ready for implementation.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Propose a new change - create the change and generate all artifacts in one step.
|
||||||
|
|
||||||
|
I'll create a change with artifacts:
|
||||||
|
- proposal.md (what & why)
|
||||||
|
- design.md (how)
|
||||||
|
- tasks.md (implementation steps)
|
||||||
|
|
||||||
|
When ready to implement, run /opsx:apply
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Input**: The user's request should include a change name (kebab-case) OR a description of what they want to build.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no clear input provided, ask what they want to build**
|
||||||
|
|
||||||
|
Use the **AskUserQuestion tool** (open-ended, no preset options) to ask:
|
||||||
|
> "What change do you want to work on? Describe what you want to build or fix."
|
||||||
|
|
||||||
|
From their description, derive a kebab-case name (e.g., "add user authentication" → `add-user-auth`).
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT proceed without understanding what the user wants to build.
|
||||||
|
|
||||||
|
2. **Create the change directory**
|
||||||
|
```bash
|
||||||
|
openspec new change "<name>"
|
||||||
|
```
|
||||||
|
This creates a scaffolded change at `openspec/changes/<name>/` with `.openspec.yaml`.
|
||||||
|
|
||||||
|
3. **Get the artifact build order**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to get:
|
||||||
|
- `applyRequires`: array of artifact IDs needed before implementation (e.g., `["tasks"]`)
|
||||||
|
- `artifacts`: list of all artifacts with their status and dependencies
|
||||||
|
|
||||||
|
4. **Create artifacts in sequence until apply-ready**
|
||||||
|
|
||||||
|
Use the **TodoWrite tool** to track progress through the artifacts.
|
||||||
|
|
||||||
|
Loop through artifacts in dependency order (artifacts with no pending dependencies first):
|
||||||
|
|
||||||
|
a. **For each artifact that is `ready` (dependencies satisfied)**:
|
||||||
|
- Get instructions:
|
||||||
|
```bash
|
||||||
|
openspec instructions <artifact-id> --change "<name>" --json
|
||||||
|
```
|
||||||
|
- The instructions JSON includes:
|
||||||
|
- `context`: Project background (constraints for you - do NOT include in output)
|
||||||
|
- `rules`: Artifact-specific rules (constraints for you - do NOT include in output)
|
||||||
|
- `template`: The structure to use for your output file
|
||||||
|
- `instruction`: Schema-specific guidance for this artifact type
|
||||||
|
- `outputPath`: Where to write the artifact
|
||||||
|
- `dependencies`: Completed artifacts to read for context
|
||||||
|
- Read any completed dependency files for context
|
||||||
|
- Create the artifact file using `template` as the structure
|
||||||
|
- Apply `context` and `rules` as constraints - but do NOT copy them into the file
|
||||||
|
- Show brief progress: "Created <artifact-id>"
|
||||||
|
|
||||||
|
b. **Continue until all `applyRequires` artifacts are complete**
|
||||||
|
- After creating each artifact, re-run `openspec status --change "<name>" --json`
|
||||||
|
- Check if every artifact ID in `applyRequires` has `status: "done"` in the artifacts array
|
||||||
|
- Stop when all `applyRequires` artifacts are done
|
||||||
|
|
||||||
|
c. **If an artifact requires user input** (unclear context):
|
||||||
|
- Use **AskUserQuestion tool** to clarify
|
||||||
|
- Then continue with creation
|
||||||
|
|
||||||
|
5. **Show final status**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output**
|
||||||
|
|
||||||
|
After completing all artifacts, summarize:
|
||||||
|
- Change name and location
|
||||||
|
- List of artifacts created with brief descriptions
|
||||||
|
- What's ready: "All artifacts created! Ready for implementation."
|
||||||
|
- Prompt: "Run `/opsx:apply` or ask me to implement to start working on the tasks."
|
||||||
|
|
||||||
|
**Artifact Creation Guidelines**
|
||||||
|
|
||||||
|
- Follow the `instruction` field from `openspec instructions` for each artifact type
|
||||||
|
- The schema defines what each artifact should contain - follow it
|
||||||
|
- Read dependency artifacts for context before creating new ones
|
||||||
|
- Use `template` as the structure for your output file - fill in its sections
|
||||||
|
- **IMPORTANT**: `context` and `rules` are constraints for YOU, not content for the file
|
||||||
|
- Do NOT copy `<context>`, `<rules>`, `<project_context>` blocks into the artifact
|
||||||
|
- These guide what you write, but should never appear in the output
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Create ALL artifacts needed for implementation (as defined by schema's `apply.requires`)
|
||||||
|
- Always read dependency artifacts before creating a new one
|
||||||
|
- If context is critically unclear, ask the user - but prefer making reasonable decisions to keep momentum
|
||||||
|
- If a change with that name already exists, ask if user wants to continue it or create a new one
|
||||||
|
- Verify each artifact file exists after writing before proceeding to next
|
||||||
49
.env.example
49
.env.example
@@ -9,31 +9,49 @@
|
|||||||
# REQUIRED - Change these values in production!
|
# REQUIRED - Change these values in production!
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
# Master key for Meilisearch authentication (required)
|
|
||||||
MEILI_MASTER_KEY=change-me-in-production
|
|
||||||
|
|
||||||
# Bootstrap token for initial API admin access (required)
|
# Bootstrap token for initial API admin access (required)
|
||||||
# Use this token for the first API calls before creating proper API tokens
|
# Use this token for the first API calls before creating proper API tokens
|
||||||
API_BOOTSTRAP_TOKEN=change-me-in-production
|
API_BOOTSTRAP_TOKEN=change-me-in-production
|
||||||
|
|
||||||
|
# Backoffice admin credentials (required)
|
||||||
|
ADMIN_USERNAME=admin
|
||||||
|
ADMIN_PASSWORD=change-me-in-production
|
||||||
|
# Secret for signing session JWTs (min 32 chars, required)
|
||||||
|
SESSION_SECRET=change-me-in-production-use-32-chars-min
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Service Configuration
|
# Service Configuration
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
# API Service
|
# API Service
|
||||||
API_LISTEN_ADDR=0.0.0.0:8080
|
API_LISTEN_ADDR=0.0.0.0:7080
|
||||||
API_BASE_URL=http://api:8080
|
API_BASE_URL=http://api:7080
|
||||||
|
|
||||||
# Indexer Service
|
# Indexer Service
|
||||||
INDEXER_LISTEN_ADDR=0.0.0.0:8081
|
INDEXER_LISTEN_ADDR=0.0.0.0:7081
|
||||||
INDEXER_SCAN_INTERVAL_SECONDS=5
|
INDEXER_SCAN_INTERVAL_SECONDS=5
|
||||||
|
|
||||||
# Meilisearch Search Engine
|
|
||||||
MEILI_URL=http://meilisearch:7700
|
|
||||||
|
|
||||||
# PostgreSQL Database
|
# PostgreSQL Database
|
||||||
DATABASE_URL=postgres://stripstream:stripstream@postgres:5432/stripstream
|
DATABASE_URL=postgres://stripstream:stripstream@postgres:5432/stripstream
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Logging
|
||||||
|
# =============================================================================
|
||||||
|
# Log levels per domain. Default: indexer=info,scan=info,extraction=info,thumbnail=warn,watcher=info
|
||||||
|
# Domains:
|
||||||
|
# scan — filesystem scan (discovery phase)
|
||||||
|
# extraction — page extraction from archives (extracting_pages phase)
|
||||||
|
# thumbnail — thumbnail generation (resize/encode)
|
||||||
|
# watcher — file watcher polling
|
||||||
|
# indexer — general indexer logs
|
||||||
|
# Levels: error, warn, info, debug, trace
|
||||||
|
# Examples:
|
||||||
|
# RUST_LOG=indexer=info # default, quiet thumbnails
|
||||||
|
# RUST_LOG=indexer=info,thumbnail=debug # enable thumbnail timing logs
|
||||||
|
# RUST_LOG=indexer=info,extraction=debug # per-book extraction details
|
||||||
|
# RUST_LOG=indexer=debug,scan=debug,extraction=debug,thumbnail=debug,watcher=debug # tout voir
|
||||||
|
# RUST_LOG=indexer=info,scan=info,extraction=info,thumbnail=warn,watcher=info
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Storage Configuration
|
# Storage Configuration
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
@@ -46,14 +64,17 @@ LIBRARIES_ROOT_PATH=/libraries
|
|||||||
# Path to libraries directory on host machine (for Docker volume mount)
|
# Path to libraries directory on host machine (for Docker volume mount)
|
||||||
# Default: ../libraries (relative to infra/docker-compose.yml)
|
# Default: ../libraries (relative to infra/docker-compose.yml)
|
||||||
# You can change this to an absolute path on your machine
|
# You can change this to an absolute path on your machine
|
||||||
LIBRARIES_HOST_PATH=../libraries
|
LIBRARIES_HOST_PATH=./libraries
|
||||||
|
|
||||||
|
# Path to thumbnails directory on host machine (for Docker volume mount)
|
||||||
|
# Default: ../data/thumbnails (relative to infra/docker-compose.yml)
|
||||||
|
THUMBNAILS_HOST_PATH=./data/thumbnails
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Port Configuration
|
# Port Configuration
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# To change ports, edit docker-compose.yml directly:
|
# To change ports, edit docker-compose.yml directly:
|
||||||
# - API: change "7080:8080" to "YOUR_PORT:8080"
|
# - API: change "7080:7080" to "YOUR_PORT:7080"
|
||||||
# - Indexer: change "7081:8081" to "YOUR_PORT:8081"
|
# - Indexer: change "7081:7081" to "YOUR_PORT:7081"
|
||||||
# - Backoffice: change "7082:8082" to "YOUR_PORT:8082"
|
# - Backoffice: change "7082:7082" to "YOUR_PORT:7082"
|
||||||
# - Meilisearch: change "7700:7700" to "YOUR_PORT:7700"
|
|
||||||
# - PostgreSQL: change "6432:5432" to "YOUR_PORT:5432"
|
# - PostgreSQL: change "6432:5432" to "YOUR_PORT:5432"
|
||||||
|
|||||||
17
.gitea/workflows/deploy.yml
Normal file
17
.gitea/workflows/deploy.yml
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
name: Deploy with Docker Compose
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main # adapte la branche que tu veux déployer
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: mac-orbstack-runner # le nom que tu as donné au runner
|
||||||
|
steps:
|
||||||
|
- name: Deploy stack
|
||||||
|
env:
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
|
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||||
|
run: |
|
||||||
|
BUILDKIT_PROGRESS=plain cd /Users/julienfroidefond/Sites/docker-stack && docker pull julienfroidefond32/stripstream-backoffice && docker pull julienfroidefond32/stripstream-api && docker pull julienfroidefond32/stripstream-indexer && ./scripts/stack.sh up stripstream
|
||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -2,6 +2,7 @@ target/
|
|||||||
.env
|
.env
|
||||||
.DS_Store
|
.DS_Store
|
||||||
tmp/
|
tmp/
|
||||||
libraries/
|
/libraries/
|
||||||
node_modules/
|
node_modules/
|
||||||
.next/
|
.next/
|
||||||
|
data/thumbnails
|
||||||
|
|||||||
149
.opencode/command/opsx-apply.md
Normal file
149
.opencode/command/opsx-apply.md
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
---
|
||||||
|
description: Implement tasks from an OpenSpec change (Experimental)
|
||||||
|
---
|
||||||
|
|
||||||
|
Implement tasks from an OpenSpec change.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name (e.g., `/opsx-apply add-auth`). If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **Select the change**
|
||||||
|
|
||||||
|
If a name is provided, use it. Otherwise:
|
||||||
|
- Infer from conversation context if the user mentioned a change
|
||||||
|
- Auto-select if only one active change exists
|
||||||
|
- If ambiguous, run `openspec list --json` to get available changes and use the **AskUserQuestion tool** to let the user select
|
||||||
|
|
||||||
|
Always announce: "Using change: <name>" and how to override (e.g., `/opsx-apply <other>`).
|
||||||
|
|
||||||
|
2. **Check status to understand the schema**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used (e.g., "spec-driven")
|
||||||
|
- Which artifact contains the tasks (typically "tasks" for spec-driven, check status for others)
|
||||||
|
|
||||||
|
3. **Get apply instructions**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
openspec instructions apply --change "<name>" --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This returns:
|
||||||
|
- Context file paths (varies by schema)
|
||||||
|
- Progress (total, complete, remaining)
|
||||||
|
- Task list with status
|
||||||
|
- Dynamic instruction based on current state
|
||||||
|
|
||||||
|
**Handle states:**
|
||||||
|
- If `state: "blocked"` (missing artifacts): show message, suggest using `/opsx-continue`
|
||||||
|
- If `state: "all_done"`: congratulate, suggest archive
|
||||||
|
- Otherwise: proceed to implementation
|
||||||
|
|
||||||
|
4. **Read context files**
|
||||||
|
|
||||||
|
Read the files listed in `contextFiles` from the apply instructions output.
|
||||||
|
The files depend on the schema being used:
|
||||||
|
- **spec-driven**: proposal, specs, design, tasks
|
||||||
|
- Other schemas: follow the contextFiles from CLI output
|
||||||
|
|
||||||
|
5. **Show current progress**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Schema being used
|
||||||
|
- Progress: "N/M tasks complete"
|
||||||
|
- Remaining tasks overview
|
||||||
|
- Dynamic instruction from CLI
|
||||||
|
|
||||||
|
6. **Implement tasks (loop until done or blocked)**
|
||||||
|
|
||||||
|
For each pending task:
|
||||||
|
- Show which task is being worked on
|
||||||
|
- Make the code changes required
|
||||||
|
- Keep changes minimal and focused
|
||||||
|
- Mark task complete in the tasks file: `- [ ]` → `- [x]`
|
||||||
|
- Continue to next task
|
||||||
|
|
||||||
|
**Pause if:**
|
||||||
|
- Task is unclear → ask for clarification
|
||||||
|
- Implementation reveals a design issue → suggest updating artifacts
|
||||||
|
- Error or blocker encountered → report and wait for guidance
|
||||||
|
- User interrupts
|
||||||
|
|
||||||
|
7. **On completion or pause, show status**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Tasks completed this session
|
||||||
|
- Overall progress: "N/M tasks complete"
|
||||||
|
- If all done: suggest archive
|
||||||
|
- If paused: explain why and wait for guidance
|
||||||
|
|
||||||
|
**Output During Implementation**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementing: <change-name> (schema: <schema-name>)
|
||||||
|
|
||||||
|
Working on task 3/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
|
||||||
|
Working on task 4/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Completion**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 7/7 tasks complete ✓
|
||||||
|
|
||||||
|
### Completed This Session
|
||||||
|
- [x] Task 1
|
||||||
|
- [x] Task 2
|
||||||
|
...
|
||||||
|
|
||||||
|
All tasks complete! You can archive this change with `/opsx-archive`.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Pause (Issue Encountered)**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Paused
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 4/7 tasks complete
|
||||||
|
|
||||||
|
### Issue Encountered
|
||||||
|
<description of the issue>
|
||||||
|
|
||||||
|
**Options:**
|
||||||
|
1. <option 1>
|
||||||
|
2. <option 2>
|
||||||
|
3. Other approach
|
||||||
|
|
||||||
|
What would you like to do?
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Keep going through tasks until done or blocked
|
||||||
|
- Always read context files before starting (from the apply instructions output)
|
||||||
|
- If task is ambiguous, pause and ask before implementing
|
||||||
|
- If implementation reveals issues, pause and suggest artifact updates
|
||||||
|
- Keep code changes minimal and scoped to each task
|
||||||
|
- Update task checkbox immediately after completing each task
|
||||||
|
- Pause on errors, blockers, or unclear requirements - don't guess
|
||||||
|
- Use contextFiles from CLI output, don't assume specific file names
|
||||||
|
|
||||||
|
**Fluid Workflow Integration**
|
||||||
|
|
||||||
|
This skill supports the "actions on a change" model:
|
||||||
|
|
||||||
|
- **Can be invoked anytime**: Before all artifacts are done (if tasks exist), after partial implementation, interleaved with other actions
|
||||||
|
- **Allows artifact updates**: If implementation reveals design issues, suggest updating artifacts - not phase-locked, work fluidly
|
||||||
154
.opencode/command/opsx-archive.md
Normal file
154
.opencode/command/opsx-archive.md
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
---
|
||||||
|
description: Archive a completed change in the experimental workflow
|
||||||
|
---
|
||||||
|
|
||||||
|
Archive a completed change in the experimental workflow.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name after `/opsx-archive` (e.g., `/opsx-archive add-auth`). If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no change name provided, prompt for selection**
|
||||||
|
|
||||||
|
Run `openspec list --json` to get available changes. Use the **AskUserQuestion tool** to let the user select.
|
||||||
|
|
||||||
|
Show only active changes (not already archived).
|
||||||
|
Include the schema used for each change if available.
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT guess or auto-select a change. Always let the user choose.
|
||||||
|
|
||||||
|
2. **Check artifact completion status**
|
||||||
|
|
||||||
|
Run `openspec status --change "<name>" --json` to check artifact completion.
|
||||||
|
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used
|
||||||
|
- `artifacts`: List of artifacts with their status (`done` or other)
|
||||||
|
|
||||||
|
**If any artifacts are not `done`:**
|
||||||
|
- Display warning listing incomplete artifacts
|
||||||
|
- Prompt user for confirmation to continue
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
3. **Check task completion status**
|
||||||
|
|
||||||
|
Read the tasks file (typically `tasks.md`) to check for incomplete tasks.
|
||||||
|
|
||||||
|
Count tasks marked with `- [ ]` (incomplete) vs `- [x]` (complete).
|
||||||
|
|
||||||
|
**If incomplete tasks found:**
|
||||||
|
- Display warning showing count of incomplete tasks
|
||||||
|
- Prompt user for confirmation to continue
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
**If no tasks file exists:** Proceed without task-related warning.
|
||||||
|
|
||||||
|
4. **Assess delta spec sync state**
|
||||||
|
|
||||||
|
Check for delta specs at `openspec/changes/<name>/specs/`. If none exist, proceed without sync prompt.
|
||||||
|
|
||||||
|
**If delta specs exist:**
|
||||||
|
- Compare each delta spec with its corresponding main spec at `openspec/specs/<capability>/spec.md`
|
||||||
|
- Determine what changes would be applied (adds, modifications, removals, renames)
|
||||||
|
- Show a combined summary before prompting
|
||||||
|
|
||||||
|
**Prompt options:**
|
||||||
|
- If changes needed: "Sync now (recommended)", "Archive without syncing"
|
||||||
|
- If already synced: "Archive now", "Sync anyway", "Cancel"
|
||||||
|
|
||||||
|
If user chooses sync, use Task tool (subagent_type: "general-purpose", prompt: "Use Skill tool to invoke openspec-sync-specs for change '<name>'. Delta spec analysis: <include the analyzed delta spec summary>"). Proceed to archive regardless of choice.
|
||||||
|
|
||||||
|
5. **Perform the archive**
|
||||||
|
|
||||||
|
Create the archive directory if it doesn't exist:
|
||||||
|
```bash
|
||||||
|
mkdir -p openspec/changes/archive
|
||||||
|
```
|
||||||
|
|
||||||
|
Generate target name using current date: `YYYY-MM-DD-<change-name>`
|
||||||
|
|
||||||
|
**Check if target already exists:**
|
||||||
|
- If yes: Fail with error, suggest renaming existing archive or using different date
|
||||||
|
- If no: Move the change directory to archive
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mv openspec/changes/<name> openspec/changes/archive/YYYY-MM-DD-<name>
|
||||||
|
```
|
||||||
|
|
||||||
|
6. **Display summary**
|
||||||
|
|
||||||
|
Show archive completion summary including:
|
||||||
|
- Change name
|
||||||
|
- Schema that was used
|
||||||
|
- Archive location
|
||||||
|
- Spec sync status (synced / sync skipped / no delta specs)
|
||||||
|
- Note about any warnings (incomplete artifacts/tasks)
|
||||||
|
|
||||||
|
**Output On Success**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Archived to:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
**Specs:** ✓ Synced to main specs
|
||||||
|
|
||||||
|
All artifacts complete. All tasks complete.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Success (No Delta Specs)**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Archived to:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
**Specs:** No delta specs
|
||||||
|
|
||||||
|
All artifacts complete. All tasks complete.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Success With Warnings**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Complete (with warnings)
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Archived to:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
**Specs:** Sync skipped (user chose to skip)
|
||||||
|
|
||||||
|
**Warnings:**
|
||||||
|
- Archived with 2 incomplete artifacts
|
||||||
|
- Archived with 3 incomplete tasks
|
||||||
|
- Delta spec sync was skipped (user chose to skip)
|
||||||
|
|
||||||
|
Review the archive if this was not intentional.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Error (Archive Exists)**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Failed
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Target:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
|
||||||
|
Target archive directory already exists.
|
||||||
|
|
||||||
|
**Options:**
|
||||||
|
1. Rename the existing archive
|
||||||
|
2. Delete the existing archive if it's a duplicate
|
||||||
|
3. Wait until a different date to archive
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Always prompt for change selection if not provided
|
||||||
|
- Use artifact graph (openspec status --json) for completion checking
|
||||||
|
- Don't block archive on warnings - just inform and confirm
|
||||||
|
- Preserve .openspec.yaml when moving to archive (it moves with the directory)
|
||||||
|
- Show clear summary of what happened
|
||||||
|
- If sync is requested, use the Skill tool to invoke `openspec-sync-specs` (agent-driven)
|
||||||
|
- If delta specs exist, always run the sync assessment and show the combined summary before prompting
|
||||||
170
.opencode/command/opsx-explore.md
Normal file
170
.opencode/command/opsx-explore.md
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
---
|
||||||
|
description: Enter explore mode - think through ideas, investigate problems, clarify requirements
|
||||||
|
---
|
||||||
|
|
||||||
|
Enter explore mode. Think deeply. Visualize freely. Follow the conversation wherever it goes.
|
||||||
|
|
||||||
|
**IMPORTANT: Explore mode is for thinking, not implementing.** You may read files, search code, and investigate the codebase, but you must NEVER write code or implement features. If the user asks you to implement something, remind them to exit explore mode first and create a change proposal. You MAY create OpenSpec artifacts (proposals, designs, specs) if the user asks—that's capturing thinking, not implementing.
|
||||||
|
|
||||||
|
**This is a stance, not a workflow.** There are no fixed steps, no required sequence, no mandatory outputs. You're a thinking partner helping the user explore.
|
||||||
|
|
||||||
|
**Input**: The argument after `/opsx-explore` is whatever the user wants to think about. Could be:
|
||||||
|
- A vague idea: "real-time collaboration"
|
||||||
|
- A specific problem: "the auth system is getting unwieldy"
|
||||||
|
- A change name: "add-dark-mode" (to explore in context of that change)
|
||||||
|
- A comparison: "postgres vs sqlite for this"
|
||||||
|
- Nothing (just enter explore mode)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## The Stance
|
||||||
|
|
||||||
|
- **Curious, not prescriptive** - Ask questions that emerge naturally, don't follow a script
|
||||||
|
- **Open threads, not interrogations** - Surface multiple interesting directions and let the user follow what resonates. Don't funnel them through a single path of questions.
|
||||||
|
- **Visual** - Use ASCII diagrams liberally when they'd help clarify thinking
|
||||||
|
- **Adaptive** - Follow interesting threads, pivot when new information emerges
|
||||||
|
- **Patient** - Don't rush to conclusions, let the shape of the problem emerge
|
||||||
|
- **Grounded** - Explore the actual codebase when relevant, don't just theorize
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Might Do
|
||||||
|
|
||||||
|
Depending on what the user brings, you might:
|
||||||
|
|
||||||
|
**Explore the problem space**
|
||||||
|
- Ask clarifying questions that emerge from what they said
|
||||||
|
- Challenge assumptions
|
||||||
|
- Reframe the problem
|
||||||
|
- Find analogies
|
||||||
|
|
||||||
|
**Investigate the codebase**
|
||||||
|
- Map existing architecture relevant to the discussion
|
||||||
|
- Find integration points
|
||||||
|
- Identify patterns already in use
|
||||||
|
- Surface hidden complexity
|
||||||
|
|
||||||
|
**Compare options**
|
||||||
|
- Brainstorm multiple approaches
|
||||||
|
- Build comparison tables
|
||||||
|
- Sketch tradeoffs
|
||||||
|
- Recommend a path (if asked)
|
||||||
|
|
||||||
|
**Visualize**
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────┐
|
||||||
|
│ Use ASCII diagrams liberally │
|
||||||
|
├─────────────────────────────────────────┤
|
||||||
|
│ │
|
||||||
|
│ ┌────────┐ ┌────────┐ │
|
||||||
|
│ │ State │────────▶│ State │ │
|
||||||
|
│ │ A │ │ B │ │
|
||||||
|
│ └────────┘ └────────┘ │
|
||||||
|
│ │
|
||||||
|
│ System diagrams, state machines, │
|
||||||
|
│ data flows, architecture sketches, │
|
||||||
|
│ dependency graphs, comparison tables │
|
||||||
|
│ │
|
||||||
|
└─────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**Surface risks and unknowns**
|
||||||
|
- Identify what could go wrong
|
||||||
|
- Find gaps in understanding
|
||||||
|
- Suggest spikes or investigations
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## OpenSpec Awareness
|
||||||
|
|
||||||
|
You have full context of the OpenSpec system. Use it naturally, don't force it.
|
||||||
|
|
||||||
|
### Check for context
|
||||||
|
|
||||||
|
At the start, quickly check what exists:
|
||||||
|
```bash
|
||||||
|
openspec list --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This tells you:
|
||||||
|
- If there are active changes
|
||||||
|
- Their names, schemas, and status
|
||||||
|
- What the user might be working on
|
||||||
|
|
||||||
|
If the user mentioned a specific change name, read its artifacts for context.
|
||||||
|
|
||||||
|
### When no change exists
|
||||||
|
|
||||||
|
Think freely. When insights crystallize, you might offer:
|
||||||
|
|
||||||
|
- "This feels solid enough to start a change. Want me to create a proposal?"
|
||||||
|
- Or keep exploring - no pressure to formalize
|
||||||
|
|
||||||
|
### When a change exists
|
||||||
|
|
||||||
|
If the user mentions a change or you detect one is relevant:
|
||||||
|
|
||||||
|
1. **Read existing artifacts for context**
|
||||||
|
- `openspec/changes/<name>/proposal.md`
|
||||||
|
- `openspec/changes/<name>/design.md`
|
||||||
|
- `openspec/changes/<name>/tasks.md`
|
||||||
|
- etc.
|
||||||
|
|
||||||
|
2. **Reference them naturally in conversation**
|
||||||
|
- "Your design mentions using Redis, but we just realized SQLite fits better..."
|
||||||
|
- "The proposal scopes this to premium users, but we're now thinking everyone..."
|
||||||
|
|
||||||
|
3. **Offer to capture when decisions are made**
|
||||||
|
|
||||||
|
| Insight Type | Where to Capture |
|
||||||
|
|--------------|------------------|
|
||||||
|
| New requirement discovered | `specs/<capability>/spec.md` |
|
||||||
|
| Requirement changed | `specs/<capability>/spec.md` |
|
||||||
|
| Design decision made | `design.md` |
|
||||||
|
| Scope changed | `proposal.md` |
|
||||||
|
| New work identified | `tasks.md` |
|
||||||
|
| Assumption invalidated | Relevant artifact |
|
||||||
|
|
||||||
|
Example offers:
|
||||||
|
- "That's a design decision. Capture it in design.md?"
|
||||||
|
- "This is a new requirement. Add it to specs?"
|
||||||
|
- "This changes scope. Update the proposal?"
|
||||||
|
|
||||||
|
4. **The user decides** - Offer and move on. Don't pressure. Don't auto-capture.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Don't Have To Do
|
||||||
|
|
||||||
|
- Follow a script
|
||||||
|
- Ask the same questions every time
|
||||||
|
- Produce a specific artifact
|
||||||
|
- Reach a conclusion
|
||||||
|
- Stay on topic if a tangent is valuable
|
||||||
|
- Be brief (this is thinking time)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ending Discovery
|
||||||
|
|
||||||
|
There's no required ending. Discovery might:
|
||||||
|
|
||||||
|
- **Flow into a proposal**: "Ready to start? I can create a change proposal."
|
||||||
|
- **Result in artifact updates**: "Updated design.md with these decisions"
|
||||||
|
- **Just provide clarity**: User has what they need, moves on
|
||||||
|
- **Continue later**: "We can pick this up anytime"
|
||||||
|
|
||||||
|
When things crystallize, you might offer a summary - but it's optional. Sometimes the thinking IS the value.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Guardrails
|
||||||
|
|
||||||
|
- **Don't implement** - Never write code or implement features. Creating OpenSpec artifacts is fine, writing application code is not.
|
||||||
|
- **Don't fake understanding** - If something is unclear, dig deeper
|
||||||
|
- **Don't rush** - Discovery is thinking time, not task time
|
||||||
|
- **Don't force structure** - Let patterns emerge naturally
|
||||||
|
- **Don't auto-capture** - Offer to save insights, don't just do it
|
||||||
|
- **Do visualize** - A good diagram is worth many paragraphs
|
||||||
|
- **Do explore the codebase** - Ground discussions in reality
|
||||||
|
- **Do question assumptions** - Including the user's and your own
|
||||||
103
.opencode/command/opsx-propose.md
Normal file
103
.opencode/command/opsx-propose.md
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
---
|
||||||
|
description: Propose a new change - create it and generate all artifacts in one step
|
||||||
|
---
|
||||||
|
|
||||||
|
Propose a new change - create the change and generate all artifacts in one step.
|
||||||
|
|
||||||
|
I'll create a change with artifacts:
|
||||||
|
- proposal.md (what & why)
|
||||||
|
- design.md (how)
|
||||||
|
- tasks.md (implementation steps)
|
||||||
|
|
||||||
|
When ready to implement, run /opsx-apply
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Input**: The argument after `/opsx-propose` is the change name (kebab-case), OR a description of what the user wants to build.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no input provided, ask what they want to build**
|
||||||
|
|
||||||
|
Use the **AskUserQuestion tool** (open-ended, no preset options) to ask:
|
||||||
|
> "What change do you want to work on? Describe what you want to build or fix."
|
||||||
|
|
||||||
|
From their description, derive a kebab-case name (e.g., "add user authentication" → `add-user-auth`).
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT proceed without understanding what the user wants to build.
|
||||||
|
|
||||||
|
2. **Create the change directory**
|
||||||
|
```bash
|
||||||
|
openspec new change "<name>"
|
||||||
|
```
|
||||||
|
This creates a scaffolded change at `openspec/changes/<name>/` with `.openspec.yaml`.
|
||||||
|
|
||||||
|
3. **Get the artifact build order**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to get:
|
||||||
|
- `applyRequires`: array of artifact IDs needed before implementation (e.g., `["tasks"]`)
|
||||||
|
- `artifacts`: list of all artifacts with their status and dependencies
|
||||||
|
|
||||||
|
4. **Create artifacts in sequence until apply-ready**
|
||||||
|
|
||||||
|
Use the **TodoWrite tool** to track progress through the artifacts.
|
||||||
|
|
||||||
|
Loop through artifacts in dependency order (artifacts with no pending dependencies first):
|
||||||
|
|
||||||
|
a. **For each artifact that is `ready` (dependencies satisfied)**:
|
||||||
|
- Get instructions:
|
||||||
|
```bash
|
||||||
|
openspec instructions <artifact-id> --change "<name>" --json
|
||||||
|
```
|
||||||
|
- The instructions JSON includes:
|
||||||
|
- `context`: Project background (constraints for you - do NOT include in output)
|
||||||
|
- `rules`: Artifact-specific rules (constraints for you - do NOT include in output)
|
||||||
|
- `template`: The structure to use for your output file
|
||||||
|
- `instruction`: Schema-specific guidance for this artifact type
|
||||||
|
- `outputPath`: Where to write the artifact
|
||||||
|
- `dependencies`: Completed artifacts to read for context
|
||||||
|
- Read any completed dependency files for context
|
||||||
|
- Create the artifact file using `template` as the structure
|
||||||
|
- Apply `context` and `rules` as constraints - but do NOT copy them into the file
|
||||||
|
- Show brief progress: "Created <artifact-id>"
|
||||||
|
|
||||||
|
b. **Continue until all `applyRequires` artifacts are complete**
|
||||||
|
- After creating each artifact, re-run `openspec status --change "<name>" --json`
|
||||||
|
- Check if every artifact ID in `applyRequires` has `status: "done"` in the artifacts array
|
||||||
|
- Stop when all `applyRequires` artifacts are done
|
||||||
|
|
||||||
|
c. **If an artifact requires user input** (unclear context):
|
||||||
|
- Use **AskUserQuestion tool** to clarify
|
||||||
|
- Then continue with creation
|
||||||
|
|
||||||
|
5. **Show final status**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output**
|
||||||
|
|
||||||
|
After completing all artifacts, summarize:
|
||||||
|
- Change name and location
|
||||||
|
- List of artifacts created with brief descriptions
|
||||||
|
- What's ready: "All artifacts created! Ready for implementation."
|
||||||
|
- Prompt: "Run `/opsx-apply` to start implementing."
|
||||||
|
|
||||||
|
**Artifact Creation Guidelines**
|
||||||
|
|
||||||
|
- Follow the `instruction` field from `openspec instructions` for each artifact type
|
||||||
|
- The schema defines what each artifact should contain - follow it
|
||||||
|
- Read dependency artifacts for context before creating new ones
|
||||||
|
- Use `template` as the structure for your output file - fill in its sections
|
||||||
|
- **IMPORTANT**: `context` and `rules` are constraints for YOU, not content for the file
|
||||||
|
- Do NOT copy `<context>`, `<rules>`, `<project_context>` blocks into the artifact
|
||||||
|
- These guide what you write, but should never appear in the output
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Create ALL artifacts needed for implementation (as defined by schema's `apply.requires`)
|
||||||
|
- Always read dependency artifacts before creating a new one
|
||||||
|
- If context is critically unclear, ask the user - but prefer making reasonable decisions to keep momentum
|
||||||
|
- If a change with that name already exists, ask if user wants to continue it or create a new one
|
||||||
|
- Verify each artifact file exists after writing before proceeding to next
|
||||||
156
.opencode/skills/openspec-apply-change/SKILL.md
Normal file
156
.opencode/skills/openspec-apply-change/SKILL.md
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
---
|
||||||
|
name: openspec-apply-change
|
||||||
|
description: Implement tasks from an OpenSpec change. Use when the user wants to start implementing, continue implementation, or work through tasks.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Implement tasks from an OpenSpec change.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name. If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **Select the change**
|
||||||
|
|
||||||
|
If a name is provided, use it. Otherwise:
|
||||||
|
- Infer from conversation context if the user mentioned a change
|
||||||
|
- Auto-select if only one active change exists
|
||||||
|
- If ambiguous, run `openspec list --json` to get available changes and use the **AskUserQuestion tool** to let the user select
|
||||||
|
|
||||||
|
Always announce: "Using change: <name>" and how to override (e.g., `/opsx-apply <other>`).
|
||||||
|
|
||||||
|
2. **Check status to understand the schema**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used (e.g., "spec-driven")
|
||||||
|
- Which artifact contains the tasks (typically "tasks" for spec-driven, check status for others)
|
||||||
|
|
||||||
|
3. **Get apply instructions**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
openspec instructions apply --change "<name>" --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This returns:
|
||||||
|
- Context file paths (varies by schema - could be proposal/specs/design/tasks or spec/tests/implementation/docs)
|
||||||
|
- Progress (total, complete, remaining)
|
||||||
|
- Task list with status
|
||||||
|
- Dynamic instruction based on current state
|
||||||
|
|
||||||
|
**Handle states:**
|
||||||
|
- If `state: "blocked"` (missing artifacts): show message, suggest using openspec-continue-change
|
||||||
|
- If `state: "all_done"`: congratulate, suggest archive
|
||||||
|
- Otherwise: proceed to implementation
|
||||||
|
|
||||||
|
4. **Read context files**
|
||||||
|
|
||||||
|
Read the files listed in `contextFiles` from the apply instructions output.
|
||||||
|
The files depend on the schema being used:
|
||||||
|
- **spec-driven**: proposal, specs, design, tasks
|
||||||
|
- Other schemas: follow the contextFiles from CLI output
|
||||||
|
|
||||||
|
5. **Show current progress**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Schema being used
|
||||||
|
- Progress: "N/M tasks complete"
|
||||||
|
- Remaining tasks overview
|
||||||
|
- Dynamic instruction from CLI
|
||||||
|
|
||||||
|
6. **Implement tasks (loop until done or blocked)**
|
||||||
|
|
||||||
|
For each pending task:
|
||||||
|
- Show which task is being worked on
|
||||||
|
- Make the code changes required
|
||||||
|
- Keep changes minimal and focused
|
||||||
|
- Mark task complete in the tasks file: `- [ ]` → `- [x]`
|
||||||
|
- Continue to next task
|
||||||
|
|
||||||
|
**Pause if:**
|
||||||
|
- Task is unclear → ask for clarification
|
||||||
|
- Implementation reveals a design issue → suggest updating artifacts
|
||||||
|
- Error or blocker encountered → report and wait for guidance
|
||||||
|
- User interrupts
|
||||||
|
|
||||||
|
7. **On completion or pause, show status**
|
||||||
|
|
||||||
|
Display:
|
||||||
|
- Tasks completed this session
|
||||||
|
- Overall progress: "N/M tasks complete"
|
||||||
|
- If all done: suggest archive
|
||||||
|
- If paused: explain why and wait for guidance
|
||||||
|
|
||||||
|
**Output During Implementation**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementing: <change-name> (schema: <schema-name>)
|
||||||
|
|
||||||
|
Working on task 3/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
|
||||||
|
Working on task 4/7: <task description>
|
||||||
|
[...implementation happening...]
|
||||||
|
✓ Task complete
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Completion**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 7/7 tasks complete ✓
|
||||||
|
|
||||||
|
### Completed This Session
|
||||||
|
- [x] Task 1
|
||||||
|
- [x] Task 2
|
||||||
|
...
|
||||||
|
|
||||||
|
All tasks complete! Ready to archive this change.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output On Pause (Issue Encountered)**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Implementation Paused
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Progress:** 4/7 tasks complete
|
||||||
|
|
||||||
|
### Issue Encountered
|
||||||
|
<description of the issue>
|
||||||
|
|
||||||
|
**Options:**
|
||||||
|
1. <option 1>
|
||||||
|
2. <option 2>
|
||||||
|
3. Other approach
|
||||||
|
|
||||||
|
What would you like to do?
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Keep going through tasks until done or blocked
|
||||||
|
- Always read context files before starting (from the apply instructions output)
|
||||||
|
- If task is ambiguous, pause and ask before implementing
|
||||||
|
- If implementation reveals issues, pause and suggest artifact updates
|
||||||
|
- Keep code changes minimal and scoped to each task
|
||||||
|
- Update task checkbox immediately after completing each task
|
||||||
|
- Pause on errors, blockers, or unclear requirements - don't guess
|
||||||
|
- Use contextFiles from CLI output, don't assume specific file names
|
||||||
|
|
||||||
|
**Fluid Workflow Integration**
|
||||||
|
|
||||||
|
This skill supports the "actions on a change" model:
|
||||||
|
|
||||||
|
- **Can be invoked anytime**: Before all artifacts are done (if tasks exist), after partial implementation, interleaved with other actions
|
||||||
|
- **Allows artifact updates**: If implementation reveals design issues, suggest updating artifacts - not phase-locked, work fluidly
|
||||||
114
.opencode/skills/openspec-archive-change/SKILL.md
Normal file
114
.opencode/skills/openspec-archive-change/SKILL.md
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
---
|
||||||
|
name: openspec-archive-change
|
||||||
|
description: Archive a completed change in the experimental workflow. Use when the user wants to finalize and archive a change after implementation is complete.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Archive a completed change in the experimental workflow.
|
||||||
|
|
||||||
|
**Input**: Optionally specify a change name. If omitted, check if it can be inferred from conversation context. If vague or ambiguous you MUST prompt for available changes.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no change name provided, prompt for selection**
|
||||||
|
|
||||||
|
Run `openspec list --json` to get available changes. Use the **AskUserQuestion tool** to let the user select.
|
||||||
|
|
||||||
|
Show only active changes (not already archived).
|
||||||
|
Include the schema used for each change if available.
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT guess or auto-select a change. Always let the user choose.
|
||||||
|
|
||||||
|
2. **Check artifact completion status**
|
||||||
|
|
||||||
|
Run `openspec status --change "<name>" --json` to check artifact completion.
|
||||||
|
|
||||||
|
Parse the JSON to understand:
|
||||||
|
- `schemaName`: The workflow being used
|
||||||
|
- `artifacts`: List of artifacts with their status (`done` or other)
|
||||||
|
|
||||||
|
**If any artifacts are not `done`:**
|
||||||
|
- Display warning listing incomplete artifacts
|
||||||
|
- Use **AskUserQuestion tool** to confirm user wants to proceed
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
3. **Check task completion status**
|
||||||
|
|
||||||
|
Read the tasks file (typically `tasks.md`) to check for incomplete tasks.
|
||||||
|
|
||||||
|
Count tasks marked with `- [ ]` (incomplete) vs `- [x]` (complete).
|
||||||
|
|
||||||
|
**If incomplete tasks found:**
|
||||||
|
- Display warning showing count of incomplete tasks
|
||||||
|
- Use **AskUserQuestion tool** to confirm user wants to proceed
|
||||||
|
- Proceed if user confirms
|
||||||
|
|
||||||
|
**If no tasks file exists:** Proceed without task-related warning.
|
||||||
|
|
||||||
|
4. **Assess delta spec sync state**
|
||||||
|
|
||||||
|
Check for delta specs at `openspec/changes/<name>/specs/`. If none exist, proceed without sync prompt.
|
||||||
|
|
||||||
|
**If delta specs exist:**
|
||||||
|
- Compare each delta spec with its corresponding main spec at `openspec/specs/<capability>/spec.md`
|
||||||
|
- Determine what changes would be applied (adds, modifications, removals, renames)
|
||||||
|
- Show a combined summary before prompting
|
||||||
|
|
||||||
|
**Prompt options:**
|
||||||
|
- If changes needed: "Sync now (recommended)", "Archive without syncing"
|
||||||
|
- If already synced: "Archive now", "Sync anyway", "Cancel"
|
||||||
|
|
||||||
|
If user chooses sync, use Task tool (subagent_type: "general-purpose", prompt: "Use Skill tool to invoke openspec-sync-specs for change '<name>'. Delta spec analysis: <include the analyzed delta spec summary>"). Proceed to archive regardless of choice.
|
||||||
|
|
||||||
|
5. **Perform the archive**
|
||||||
|
|
||||||
|
Create the archive directory if it doesn't exist:
|
||||||
|
```bash
|
||||||
|
mkdir -p openspec/changes/archive
|
||||||
|
```
|
||||||
|
|
||||||
|
Generate target name using current date: `YYYY-MM-DD-<change-name>`
|
||||||
|
|
||||||
|
**Check if target already exists:**
|
||||||
|
- If yes: Fail with error, suggest renaming existing archive or using different date
|
||||||
|
- If no: Move the change directory to archive
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mv openspec/changes/<name> openspec/changes/archive/YYYY-MM-DD-<name>
|
||||||
|
```
|
||||||
|
|
||||||
|
6. **Display summary**
|
||||||
|
|
||||||
|
Show archive completion summary including:
|
||||||
|
- Change name
|
||||||
|
- Schema that was used
|
||||||
|
- Archive location
|
||||||
|
- Whether specs were synced (if applicable)
|
||||||
|
- Note about any warnings (incomplete artifacts/tasks)
|
||||||
|
|
||||||
|
**Output On Success**
|
||||||
|
|
||||||
|
```
|
||||||
|
## Archive Complete
|
||||||
|
|
||||||
|
**Change:** <change-name>
|
||||||
|
**Schema:** <schema-name>
|
||||||
|
**Archived to:** openspec/changes/archive/YYYY-MM-DD-<name>/
|
||||||
|
**Specs:** ✓ Synced to main specs (or "No delta specs" or "Sync skipped")
|
||||||
|
|
||||||
|
All artifacts complete. All tasks complete.
|
||||||
|
```
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Always prompt for change selection if not provided
|
||||||
|
- Use artifact graph (openspec status --json) for completion checking
|
||||||
|
- Don't block archive on warnings - just inform and confirm
|
||||||
|
- Preserve .openspec.yaml when moving to archive (it moves with the directory)
|
||||||
|
- Show clear summary of what happened
|
||||||
|
- If sync is requested, use openspec-sync-specs approach (agent-driven)
|
||||||
|
- If delta specs exist, always run the sync assessment and show the combined summary before prompting
|
||||||
288
.opencode/skills/openspec-explore/SKILL.md
Normal file
288
.opencode/skills/openspec-explore/SKILL.md
Normal file
@@ -0,0 +1,288 @@
|
|||||||
|
---
|
||||||
|
name: openspec-explore
|
||||||
|
description: Enter explore mode - a thinking partner for exploring ideas, investigating problems, and clarifying requirements. Use when the user wants to think through something before or during a change.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Enter explore mode. Think deeply. Visualize freely. Follow the conversation wherever it goes.
|
||||||
|
|
||||||
|
**IMPORTANT: Explore mode is for thinking, not implementing.** You may read files, search code, and investigate the codebase, but you must NEVER write code or implement features. If the user asks you to implement something, remind them to exit explore mode first and create a change proposal. You MAY create OpenSpec artifacts (proposals, designs, specs) if the user asks—that's capturing thinking, not implementing.
|
||||||
|
|
||||||
|
**This is a stance, not a workflow.** There are no fixed steps, no required sequence, no mandatory outputs. You're a thinking partner helping the user explore.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## The Stance
|
||||||
|
|
||||||
|
- **Curious, not prescriptive** - Ask questions that emerge naturally, don't follow a script
|
||||||
|
- **Open threads, not interrogations** - Surface multiple interesting directions and let the user follow what resonates. Don't funnel them through a single path of questions.
|
||||||
|
- **Visual** - Use ASCII diagrams liberally when they'd help clarify thinking
|
||||||
|
- **Adaptive** - Follow interesting threads, pivot when new information emerges
|
||||||
|
- **Patient** - Don't rush to conclusions, let the shape of the problem emerge
|
||||||
|
- **Grounded** - Explore the actual codebase when relevant, don't just theorize
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Might Do
|
||||||
|
|
||||||
|
Depending on what the user brings, you might:
|
||||||
|
|
||||||
|
**Explore the problem space**
|
||||||
|
- Ask clarifying questions that emerge from what they said
|
||||||
|
- Challenge assumptions
|
||||||
|
- Reframe the problem
|
||||||
|
- Find analogies
|
||||||
|
|
||||||
|
**Investigate the codebase**
|
||||||
|
- Map existing architecture relevant to the discussion
|
||||||
|
- Find integration points
|
||||||
|
- Identify patterns already in use
|
||||||
|
- Surface hidden complexity
|
||||||
|
|
||||||
|
**Compare options**
|
||||||
|
- Brainstorm multiple approaches
|
||||||
|
- Build comparison tables
|
||||||
|
- Sketch tradeoffs
|
||||||
|
- Recommend a path (if asked)
|
||||||
|
|
||||||
|
**Visualize**
|
||||||
|
```
|
||||||
|
┌─────────────────────────────────────────┐
|
||||||
|
│ Use ASCII diagrams liberally │
|
||||||
|
├─────────────────────────────────────────┤
|
||||||
|
│ │
|
||||||
|
│ ┌────────┐ ┌────────┐ │
|
||||||
|
│ │ State │────────▶│ State │ │
|
||||||
|
│ │ A │ │ B │ │
|
||||||
|
│ └────────┘ └────────┘ │
|
||||||
|
│ │
|
||||||
|
│ System diagrams, state machines, │
|
||||||
|
│ data flows, architecture sketches, │
|
||||||
|
│ dependency graphs, comparison tables │
|
||||||
|
│ │
|
||||||
|
└─────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
**Surface risks and unknowns**
|
||||||
|
- Identify what could go wrong
|
||||||
|
- Find gaps in understanding
|
||||||
|
- Suggest spikes or investigations
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## OpenSpec Awareness
|
||||||
|
|
||||||
|
You have full context of the OpenSpec system. Use it naturally, don't force it.
|
||||||
|
|
||||||
|
### Check for context
|
||||||
|
|
||||||
|
At the start, quickly check what exists:
|
||||||
|
```bash
|
||||||
|
openspec list --json
|
||||||
|
```
|
||||||
|
|
||||||
|
This tells you:
|
||||||
|
- If there are active changes
|
||||||
|
- Their names, schemas, and status
|
||||||
|
- What the user might be working on
|
||||||
|
|
||||||
|
### When no change exists
|
||||||
|
|
||||||
|
Think freely. When insights crystallize, you might offer:
|
||||||
|
|
||||||
|
- "This feels solid enough to start a change. Want me to create a proposal?"
|
||||||
|
- Or keep exploring - no pressure to formalize
|
||||||
|
|
||||||
|
### When a change exists
|
||||||
|
|
||||||
|
If the user mentions a change or you detect one is relevant:
|
||||||
|
|
||||||
|
1. **Read existing artifacts for context**
|
||||||
|
- `openspec/changes/<name>/proposal.md`
|
||||||
|
- `openspec/changes/<name>/design.md`
|
||||||
|
- `openspec/changes/<name>/tasks.md`
|
||||||
|
- etc.
|
||||||
|
|
||||||
|
2. **Reference them naturally in conversation**
|
||||||
|
- "Your design mentions using Redis, but we just realized SQLite fits better..."
|
||||||
|
- "The proposal scopes this to premium users, but we're now thinking everyone..."
|
||||||
|
|
||||||
|
3. **Offer to capture when decisions are made**
|
||||||
|
|
||||||
|
| Insight Type | Where to Capture |
|
||||||
|
|--------------|------------------|
|
||||||
|
| New requirement discovered | `specs/<capability>/spec.md` |
|
||||||
|
| Requirement changed | `specs/<capability>/spec.md` |
|
||||||
|
| Design decision made | `design.md` |
|
||||||
|
| Scope changed | `proposal.md` |
|
||||||
|
| New work identified | `tasks.md` |
|
||||||
|
| Assumption invalidated | Relevant artifact |
|
||||||
|
|
||||||
|
Example offers:
|
||||||
|
- "That's a design decision. Capture it in design.md?"
|
||||||
|
- "This is a new requirement. Add it to specs?"
|
||||||
|
- "This changes scope. Update the proposal?"
|
||||||
|
|
||||||
|
4. **The user decides** - Offer and move on. Don't pressure. Don't auto-capture.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## What You Don't Have To Do
|
||||||
|
|
||||||
|
- Follow a script
|
||||||
|
- Ask the same questions every time
|
||||||
|
- Produce a specific artifact
|
||||||
|
- Reach a conclusion
|
||||||
|
- Stay on topic if a tangent is valuable
|
||||||
|
- Be brief (this is thinking time)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Handling Different Entry Points
|
||||||
|
|
||||||
|
**User brings a vague idea:**
|
||||||
|
```
|
||||||
|
User: I'm thinking about adding real-time collaboration
|
||||||
|
|
||||||
|
You: Real-time collab is a big space. Let me think about this...
|
||||||
|
|
||||||
|
COLLABORATION SPECTRUM
|
||||||
|
════════════════════════════════════════════
|
||||||
|
|
||||||
|
Awareness Coordination Sync
|
||||||
|
│ │ │
|
||||||
|
▼ ▼ ▼
|
||||||
|
┌────────┐ ┌────────┐ ┌────────┐
|
||||||
|
│Presence│ │Cursors │ │ CRDT │
|
||||||
|
│ "3 │ │ Multi │ │Conflict│
|
||||||
|
│online" │ │ select │ │ free │
|
||||||
|
└────────┘ └────────┘ └────────┘
|
||||||
|
│ │ │
|
||||||
|
trivial moderate complex
|
||||||
|
|
||||||
|
Where's your head at?
|
||||||
|
```
|
||||||
|
|
||||||
|
**User brings a specific problem:**
|
||||||
|
```
|
||||||
|
User: The auth system is a mess
|
||||||
|
|
||||||
|
You: [reads codebase]
|
||||||
|
|
||||||
|
┌─────────────────────────────────────────────┐
|
||||||
|
│ CURRENT AUTH FLOW │
|
||||||
|
└─────────────────────────────────────────────┘
|
||||||
|
│
|
||||||
|
┌─────────────┼─────────────┐
|
||||||
|
▼ ▼ ▼
|
||||||
|
┌─────────┐ ┌─────────┐ ┌─────────┐
|
||||||
|
│ Google │ │ GitHub │ │ Email │
|
||||||
|
│ OAuth │ │ OAuth │ │ Magic │
|
||||||
|
└────┬────┘ └────┬────┘ └────┬────┘
|
||||||
|
│ │ │
|
||||||
|
└─────────────┼─────────────┘
|
||||||
|
▼
|
||||||
|
┌───────────┐
|
||||||
|
│ Session │
|
||||||
|
└─────┬─────┘
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
┌───────────┐
|
||||||
|
│ Perms │
|
||||||
|
└───────────┘
|
||||||
|
|
||||||
|
I see three tangles. Which one's burning?
|
||||||
|
```
|
||||||
|
|
||||||
|
**User is stuck mid-implementation:**
|
||||||
|
```
|
||||||
|
User: /opsx-explore add-auth-system
|
||||||
|
The OAuth integration is more complex than expected
|
||||||
|
|
||||||
|
You: [reads change artifacts]
|
||||||
|
|
||||||
|
You're on task 4: "Implement OAuth flow"
|
||||||
|
|
||||||
|
Let me trace what's involved...
|
||||||
|
|
||||||
|
[draws diagram, explores options, suggests paths]
|
||||||
|
|
||||||
|
Want to update the design to reflect this?
|
||||||
|
Or add a spike task to investigate?
|
||||||
|
```
|
||||||
|
|
||||||
|
**User wants to compare options:**
|
||||||
|
```
|
||||||
|
User: Should we use Postgres or SQLite?
|
||||||
|
|
||||||
|
You: Generic answer is boring. What's the context?
|
||||||
|
|
||||||
|
User: A CLI tool that tracks local dev environments
|
||||||
|
|
||||||
|
You: That changes everything.
|
||||||
|
|
||||||
|
┌─────────────────────────────────────────────────┐
|
||||||
|
│ CLI TOOL DATA STORAGE │
|
||||||
|
└─────────────────────────────────────────────────┘
|
||||||
|
|
||||||
|
Key constraints:
|
||||||
|
• No daemon running
|
||||||
|
• Must work offline
|
||||||
|
• Single user
|
||||||
|
|
||||||
|
SQLite Postgres
|
||||||
|
Deployment embedded ✓ needs server ✗
|
||||||
|
Offline yes ✓ no ✗
|
||||||
|
Single file yes ✓ no ✗
|
||||||
|
|
||||||
|
SQLite. Not even close.
|
||||||
|
|
||||||
|
Unless... is there a sync component?
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ending Discovery
|
||||||
|
|
||||||
|
There's no required ending. Discovery might:
|
||||||
|
|
||||||
|
- **Flow into a proposal**: "Ready to start? I can create a change proposal."
|
||||||
|
- **Result in artifact updates**: "Updated design.md with these decisions"
|
||||||
|
- **Just provide clarity**: User has what they need, moves on
|
||||||
|
- **Continue later**: "We can pick this up anytime"
|
||||||
|
|
||||||
|
When it feels like things are crystallizing, you might summarize:
|
||||||
|
|
||||||
|
```
|
||||||
|
## What We Figured Out
|
||||||
|
|
||||||
|
**The problem**: [crystallized understanding]
|
||||||
|
|
||||||
|
**The approach**: [if one emerged]
|
||||||
|
|
||||||
|
**Open questions**: [if any remain]
|
||||||
|
|
||||||
|
**Next steps** (if ready):
|
||||||
|
- Create a change proposal
|
||||||
|
- Keep exploring: just keep talking
|
||||||
|
```
|
||||||
|
|
||||||
|
But this summary is optional. Sometimes the thinking IS the value.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Guardrails
|
||||||
|
|
||||||
|
- **Don't implement** - Never write code or implement features. Creating OpenSpec artifacts is fine, writing application code is not.
|
||||||
|
- **Don't fake understanding** - If something is unclear, dig deeper
|
||||||
|
- **Don't rush** - Discovery is thinking time, not task time
|
||||||
|
- **Don't force structure** - Let patterns emerge naturally
|
||||||
|
- **Don't auto-capture** - Offer to save insights, don't just do it
|
||||||
|
- **Do visualize** - A good diagram is worth many paragraphs
|
||||||
|
- **Do explore the codebase** - Ground discussions in reality
|
||||||
|
- **Do question assumptions** - Including the user's and your own
|
||||||
110
.opencode/skills/openspec-propose/SKILL.md
Normal file
110
.opencode/skills/openspec-propose/SKILL.md
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
---
|
||||||
|
name: openspec-propose
|
||||||
|
description: Propose a new change with all artifacts generated in one step. Use when the user wants to quickly describe what they want to build and get a complete proposal with design, specs, and tasks ready for implementation.
|
||||||
|
license: MIT
|
||||||
|
compatibility: Requires openspec CLI.
|
||||||
|
metadata:
|
||||||
|
author: openspec
|
||||||
|
version: "1.0"
|
||||||
|
generatedBy: "1.2.0"
|
||||||
|
---
|
||||||
|
|
||||||
|
Propose a new change - create the change and generate all artifacts in one step.
|
||||||
|
|
||||||
|
I'll create a change with artifacts:
|
||||||
|
- proposal.md (what & why)
|
||||||
|
- design.md (how)
|
||||||
|
- tasks.md (implementation steps)
|
||||||
|
|
||||||
|
When ready to implement, run /opsx-apply
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Input**: The user's request should include a change name (kebab-case) OR a description of what they want to build.
|
||||||
|
|
||||||
|
**Steps**
|
||||||
|
|
||||||
|
1. **If no clear input provided, ask what they want to build**
|
||||||
|
|
||||||
|
Use the **AskUserQuestion tool** (open-ended, no preset options) to ask:
|
||||||
|
> "What change do you want to work on? Describe what you want to build or fix."
|
||||||
|
|
||||||
|
From their description, derive a kebab-case name (e.g., "add user authentication" → `add-user-auth`).
|
||||||
|
|
||||||
|
**IMPORTANT**: Do NOT proceed without understanding what the user wants to build.
|
||||||
|
|
||||||
|
2. **Create the change directory**
|
||||||
|
```bash
|
||||||
|
openspec new change "<name>"
|
||||||
|
```
|
||||||
|
This creates a scaffolded change at `openspec/changes/<name>/` with `.openspec.yaml`.
|
||||||
|
|
||||||
|
3. **Get the artifact build order**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>" --json
|
||||||
|
```
|
||||||
|
Parse the JSON to get:
|
||||||
|
- `applyRequires`: array of artifact IDs needed before implementation (e.g., `["tasks"]`)
|
||||||
|
- `artifacts`: list of all artifacts with their status and dependencies
|
||||||
|
|
||||||
|
4. **Create artifacts in sequence until apply-ready**
|
||||||
|
|
||||||
|
Use the **TodoWrite tool** to track progress through the artifacts.
|
||||||
|
|
||||||
|
Loop through artifacts in dependency order (artifacts with no pending dependencies first):
|
||||||
|
|
||||||
|
a. **For each artifact that is `ready` (dependencies satisfied)**:
|
||||||
|
- Get instructions:
|
||||||
|
```bash
|
||||||
|
openspec instructions <artifact-id> --change "<name>" --json
|
||||||
|
```
|
||||||
|
- The instructions JSON includes:
|
||||||
|
- `context`: Project background (constraints for you - do NOT include in output)
|
||||||
|
- `rules`: Artifact-specific rules (constraints for you - do NOT include in output)
|
||||||
|
- `template`: The structure to use for your output file
|
||||||
|
- `instruction`: Schema-specific guidance for this artifact type
|
||||||
|
- `outputPath`: Where to write the artifact
|
||||||
|
- `dependencies`: Completed artifacts to read for context
|
||||||
|
- Read any completed dependency files for context
|
||||||
|
- Create the artifact file using `template` as the structure
|
||||||
|
- Apply `context` and `rules` as constraints - but do NOT copy them into the file
|
||||||
|
- Show brief progress: "Created <artifact-id>"
|
||||||
|
|
||||||
|
b. **Continue until all `applyRequires` artifacts are complete**
|
||||||
|
- After creating each artifact, re-run `openspec status --change "<name>" --json`
|
||||||
|
- Check if every artifact ID in `applyRequires` has `status: "done"` in the artifacts array
|
||||||
|
- Stop when all `applyRequires` artifacts are done
|
||||||
|
|
||||||
|
c. **If an artifact requires user input** (unclear context):
|
||||||
|
- Use **AskUserQuestion tool** to clarify
|
||||||
|
- Then continue with creation
|
||||||
|
|
||||||
|
5. **Show final status**
|
||||||
|
```bash
|
||||||
|
openspec status --change "<name>"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Output**
|
||||||
|
|
||||||
|
After completing all artifacts, summarize:
|
||||||
|
- Change name and location
|
||||||
|
- List of artifacts created with brief descriptions
|
||||||
|
- What's ready: "All artifacts created! Ready for implementation."
|
||||||
|
- Prompt: "Run `/opsx-apply` or ask me to implement to start working on the tasks."
|
||||||
|
|
||||||
|
**Artifact Creation Guidelines**
|
||||||
|
|
||||||
|
- Follow the `instruction` field from `openspec instructions` for each artifact type
|
||||||
|
- The schema defines what each artifact should contain - follow it
|
||||||
|
- Read dependency artifacts for context before creating new ones
|
||||||
|
- Use `template` as the structure for your output file - fill in its sections
|
||||||
|
- **IMPORTANT**: `context` and `rules` are constraints for YOU, not content for the file
|
||||||
|
- Do NOT copy `<context>`, `<rules>`, `<project_context>` blocks into the artifact
|
||||||
|
- These guide what you write, but should never appear in the output
|
||||||
|
|
||||||
|
**Guardrails**
|
||||||
|
- Create ALL artifacts needed for implementation (as defined by schema's `apply.requires`)
|
||||||
|
- Always read dependency artifacts before creating a new one
|
||||||
|
- If context is critically unclear, ask the user - but prefer making reasonable decisions to keep momentum
|
||||||
|
- If a change with that name already exists, ask if user wants to continue it or create a new one
|
||||||
|
- Verify each artifact file exists after writing before proceeding to next
|
||||||
307
AGENTS.md
Normal file
307
AGENTS.md
Normal file
@@ -0,0 +1,307 @@
|
|||||||
|
# AGENTS.md - Agent Coding Guidelines for Stripstream Librarian
|
||||||
|
|
||||||
|
This file provides guidelines for agentic coding agents operating in this repository.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Build, Lint, and Test Commands
|
||||||
|
|
||||||
|
### Build Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build debug version (fastest for development)
|
||||||
|
cargo build
|
||||||
|
|
||||||
|
# Build release version (optimized)
|
||||||
|
cargo build --release
|
||||||
|
|
||||||
|
# Build specific crate
|
||||||
|
cargo build -p api
|
||||||
|
cargo build -p indexer
|
||||||
|
|
||||||
|
# Watch mode for development (requires cargo-watch)
|
||||||
|
cargo watch -x build
|
||||||
|
```
|
||||||
|
|
||||||
|
### Lint & Format Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run clippy lints
|
||||||
|
cargo clippy
|
||||||
|
|
||||||
|
# Fix auto-fixable clippy warnings
|
||||||
|
cargo clippy --fix
|
||||||
|
|
||||||
|
# Format code
|
||||||
|
cargo fmt
|
||||||
|
|
||||||
|
# Check formatting without making changes
|
||||||
|
cargo fmt -- --check
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all tests
|
||||||
|
cargo test
|
||||||
|
|
||||||
|
# Run tests for specific crate
|
||||||
|
cargo test -p api
|
||||||
|
cargo test -p indexer
|
||||||
|
cargo test -p parsers
|
||||||
|
|
||||||
|
# Run a single test by name
|
||||||
|
cargo test test_name_here
|
||||||
|
|
||||||
|
# Run tests with output display
|
||||||
|
cargo test -- --nocapture
|
||||||
|
|
||||||
|
# Run doc tests
|
||||||
|
cargo test --doc
|
||||||
|
```
|
||||||
|
|
||||||
|
### Database Migrations
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run migrations manually (via sqlx CLI)
|
||||||
|
# Ensure DATABASE_URL is set, then:
|
||||||
|
sqlx migrate run
|
||||||
|
|
||||||
|
# Create new migration
|
||||||
|
sqlx migrate add -r migration_name
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker Development
|
||||||
|
|
||||||
|
`docker-compose.yml` est à la **racine** du projet (pas dans `infra/`).
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start infrastructure only
|
||||||
|
docker compose up -d postgres
|
||||||
|
|
||||||
|
# Start full stack
|
||||||
|
docker compose up -d
|
||||||
|
|
||||||
|
# View logs
|
||||||
|
docker compose logs -f api
|
||||||
|
docker compose logs -f indexer
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Code Style Guidelines
|
||||||
|
|
||||||
|
### General Principles
|
||||||
|
|
||||||
|
- **Conciseness**: Keep responses short and direct. Avoid unnecessary preamble or explanation.
|
||||||
|
- **Idiomatic Rust**: Follow Rust best practices and ecosystem conventions.
|
||||||
|
- **Error Handling**: Use `anyhow::Result<T>` for application code, `std::io::Result<T>` for simple file operations.
|
||||||
|
- **Async**: Use `tokio` for async runtime. Prefer `#[tokio::main]` over manual runtime.
|
||||||
|
|
||||||
|
### Naming Conventions
|
||||||
|
|
||||||
|
| Element | Convention | Example |
|
||||||
|
|---------|------------|---------|
|
||||||
|
| Variables | snake_case | `let book_id = ...` |
|
||||||
|
| Functions | snake_case | `fn get_book(...)` |
|
||||||
|
| Structs/Enums | PascalCase | `struct BookItem` |
|
||||||
|
| Modules | snake_case | `mod books;` |
|
||||||
|
| Constants | SCREAMING_SNAKE_CASE | `const BATCH_SIZE: usize = 100;` |
|
||||||
|
| Types | PascalCase | `type MyResult<T> = Result<T, Error>;` |
|
||||||
|
|
||||||
|
### Imports
|
||||||
|
|
||||||
|
- **Absolute imports** for workspace crates: `use parsers::{detect_format, parse_metadata};`
|
||||||
|
- **Standard library** imports: `use std::path::Path;`
|
||||||
|
- **External crates**: `use sqlx::{postgres::PgPoolOptions, Row};`
|
||||||
|
- **Group by**: std → external → workspace → local (with blank lines between)
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::path::Path;
|
||||||
|
|
||||||
|
use anyhow::Context;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::error::ApiError;
|
||||||
|
use crate::AppState;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Error Handling
|
||||||
|
|
||||||
|
- Use `anyhow` for application-level error handling with context
|
||||||
|
- Use `with_context()` for adding context to errors
|
||||||
|
- Return `Result<T, ApiError>` in API handlers
|
||||||
|
- Use `?` operator instead of manual match/unwrap where possible
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// Good
|
||||||
|
fn process_book(path: &Path) -> anyhow::Result<Book> {
|
||||||
|
let file = std::fs::File::open(path)
|
||||||
|
.with_context(|| format!("cannot open file: {}", path.display()))?;
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
|
||||||
|
// Good - API error handling
|
||||||
|
async fn get_book(State(state): State<AppState>, Path(id): Path<Uuid>)
|
||||||
|
-> Result<Json<Book>, ApiError> {
|
||||||
|
let row = sqlx::query("SELECT * FROM books WHERE id = $1")
|
||||||
|
.bind(id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await
|
||||||
|
.map_err(ApiError::internal)?;
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Database (sqlx)
|
||||||
|
|
||||||
|
- Use **raw SQL queries** with `sqlx::query()` and `sqlx::query_scalar()`
|
||||||
|
- Prefer **batch operations** using `UNNEST` for bulk inserts/updates
|
||||||
|
- Always use **parameterized queries** (`$1`, `$2`, etc.) - never string interpolation
|
||||||
|
- Follow existing patterns for transactions:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
let mut tx = pool.begin().await?;
|
||||||
|
// ... queries ...
|
||||||
|
tx.commit().await?;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Async/Tokio
|
||||||
|
|
||||||
|
- Use `tokio::spawn` for background tasks
|
||||||
|
- Use `spawn_blocking` for CPU-bound work (image processing, file I/O)
|
||||||
|
- Keep async handlers non-blocking
|
||||||
|
- Use `tokio::time::timeout` for operations with timeouts
|
||||||
|
|
||||||
|
```rust
|
||||||
|
let bytes = tokio::time::timeout(
|
||||||
|
Duration::from_secs(60),
|
||||||
|
tokio::task::spawn_blocking(move || {
|
||||||
|
render_page(&abs_path_clone, n)
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|_| ApiError::internal("timeout"))?
|
||||||
|
.map_err(ApiError::internal)?;
|
||||||
|
```
|
||||||
|
|
||||||
|
### Structs and Serialization
|
||||||
|
|
||||||
|
- Use `#[derive(Serialize, Deserialize, ToSchema)]` for API types
|
||||||
|
- Add `utoipa` schemas for OpenAPI documentation
|
||||||
|
- Use `Option<T>` for nullable fields
|
||||||
|
- Document public structs briefly
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct BookItem {
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub id: Uuid,
|
||||||
|
pub title: String,
|
||||||
|
pub author: Option<String>,
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Performance Considerations
|
||||||
|
|
||||||
|
- Use **batch operations** for database inserts/updates (100 items recommended)
|
||||||
|
- Use **parallel iterators** (`rayon::par_iter()`) for CPU-intensive scans
|
||||||
|
- Implement **caching** for expensive operations (see `pages.rs` for disk/memory cache examples)
|
||||||
|
- Use **streaming** for large data where applicable
|
||||||
|
|
||||||
|
### Testing
|
||||||
|
|
||||||
|
- Currently there are no test files - consider adding unit tests for:
|
||||||
|
- Parser functions
|
||||||
|
- Thumbnail generation
|
||||||
|
- Configuration parsing
|
||||||
|
- Use `#[cfg(test)]` modules for integration tests
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
stripstream-librarian/
|
||||||
|
├── apps/
|
||||||
|
│ ├── api/ # REST API (axum) — port 7080
|
||||||
|
│ │ └── src/ # books.rs, pages.rs, thumbnails.rs, state.rs, auth.rs...
|
||||||
|
│ ├── indexer/ # Background indexing service — port 7081
|
||||||
|
│ │ └── src/ # worker.rs, scanner.rs, batch.rs, scheduler.rs, watcher.rs...
|
||||||
|
│ └── backoffice/ # Next.js admin UI — port 7082
|
||||||
|
├── crates/
|
||||||
|
│ ├── core/ # Shared config (env vars)
|
||||||
|
│ │ └── src/config.rs
|
||||||
|
│ └── parsers/ # Book parsing (CBZ, CBR, PDF)
|
||||||
|
├── infra/
|
||||||
|
│ └── migrations/ # SQL migrations (sqlx)
|
||||||
|
├── data/
|
||||||
|
│ └── thumbnails/ # Thumbnails générés par l'API
|
||||||
|
├── libraries/ # Book storage (mounted volume)
|
||||||
|
└── docker-compose.yml # À la racine (pas dans infra/)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Key Files
|
||||||
|
|
||||||
|
| File | Purpose |
|
||||||
|
|------|---------|
|
||||||
|
| `apps/api/src/books.rs` | Book CRUD endpoints |
|
||||||
|
| `apps/api/src/pages.rs` | Page rendering & caching (LRU + disk) |
|
||||||
|
| `apps/api/src/thumbnails.rs` | Endpoints pour créer des jobs thumbnail (rebuild/regenerate) |
|
||||||
|
| `apps/api/src/state.rs` | AppState, Semaphore concurrent_renders |
|
||||||
|
| `apps/indexer/src/scanner.rs` | Phase 1 discovery : scan rapide sans I/O archive, skip dossiers inchangés |
|
||||||
|
| `apps/indexer/src/analyzer.rs` | Phase 2 analysis : `analyze_book` + génération thumbnails WebP |
|
||||||
|
| `apps/indexer/src/batch.rs` | Bulk DB ops via UNNEST |
|
||||||
|
| `apps/indexer/src/worker.rs` | Job loop, watcher, scheduler orchestration |
|
||||||
|
| `crates/parsers/src/lib.rs` | Format detection, metadata parsing |
|
||||||
|
| `crates/core/src/config.rs` | Configuration from environment |
|
||||||
|
| `infra/migrations/*.sql` | Database schema |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Common Patterns
|
||||||
|
|
||||||
|
### Configuration from Environment
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// In crates/core/src/config.rs
|
||||||
|
impl IndexerConfig {
|
||||||
|
pub fn from_env() -> Result<Self> {
|
||||||
|
Ok(Self {
|
||||||
|
listen_addr: std::env::var("INDEXER_LISTEN_ADDR")
|
||||||
|
.unwrap_or_else(|_| "0.0.0.0:7081".to_string()),
|
||||||
|
database_url: std::env::var("DATABASE_URL")
|
||||||
|
.context("DATABASE_URL is required")?,
|
||||||
|
// ...
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Path Remapping
|
||||||
|
|
||||||
|
```rust
|
||||||
|
fn remap_libraries_path(path: &str) -> String {
|
||||||
|
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
||||||
|
if path.starts_with("/libraries/") {
|
||||||
|
return path.replacen("/libraries", &root, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
path.to_string()
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Important Notes
|
||||||
|
|
||||||
|
- **Workspace**: This is a Cargo workspace. Always specify the package when building specific apps.
|
||||||
|
- **Dependencies**: External crates are defined in workspace `Cargo.toml`, not individual `Cargo.toml`.
|
||||||
|
- **Database**: PostgreSQL is required. Run migrations before starting services.
|
||||||
|
- **External Tools**: 4 system tools required — `unrar` (CBR page count), `unar` (CBR extraction), `pdfinfo` (PDF page count), `pdftoppm` (PDF page render). Note: `unrar` and `unar` are distinct tools.
|
||||||
|
- **Thumbnails**: generated by the **indexer** service (phase 2, `analyzer.rs`). The API only creates jobs in DB — it does not generate thumbnails directly.
|
||||||
|
- **Sub-AGENTS.md**: module-specific guidelines in `apps/api/`, `apps/indexer/`, `apps/backoffice/`, `crates/parsers/`.
|
||||||
73
CLAUDE.md
Normal file
73
CLAUDE.md
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
# Stripstream Librarian
|
||||||
|
|
||||||
|
Gestionnaire de bibliothèque de bandes dessinées/ebooks. Workspace Cargo multi-crates avec backoffice Next.js.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
| Service | Dossier | Port local |
|
||||||
|
|---------|---------|------------|
|
||||||
|
| API REST (axum) | `apps/api/` | 7080 |
|
||||||
|
| Indexer (background) | `apps/indexer/` | 7081 |
|
||||||
|
| Backoffice (Next.js) | `apps/backoffice/` | 7082 |
|
||||||
|
| PostgreSQL | infra | 6432 |
|
||||||
|
|
||||||
|
Crates partagés : `crates/core` (config env), `crates/parsers` (CBZ/CBR/PDF).
|
||||||
|
|
||||||
|
## Commandes
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build
|
||||||
|
cargo build # workspace entier
|
||||||
|
cargo build -p api # crate spécifique
|
||||||
|
cargo build --release # version optimisée
|
||||||
|
|
||||||
|
# Linting / format
|
||||||
|
cargo clippy
|
||||||
|
cargo fmt
|
||||||
|
|
||||||
|
# Tests
|
||||||
|
cargo test
|
||||||
|
cargo test -p parsers
|
||||||
|
|
||||||
|
# Infra (dépendances uniquement) — docker-compose.yml est à la racine
|
||||||
|
docker compose up -d postgres
|
||||||
|
|
||||||
|
# Backoffice dev
|
||||||
|
cd apps/backoffice && npm install && npm run dev # http://localhost:7082
|
||||||
|
|
||||||
|
# Migrations
|
||||||
|
sqlx migrate run # DATABASE_URL doit être défini
|
||||||
|
```
|
||||||
|
|
||||||
|
## Environnement
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cp .env.example .env # puis éditer les valeurs REQUIRED
|
||||||
|
```
|
||||||
|
|
||||||
|
Variables **requises** au démarrage : `DATABASE_URL`, `API_BOOTSTRAP_TOKEN`.
|
||||||
|
|
||||||
|
## Gotchas
|
||||||
|
|
||||||
|
- **Dépendances système** : 4 outils requis — `unrar` (CBR listing), `unar` (CBR extraction), `pdfinfo` (PDF page count), `pdftoppm` (PDF rendu). `unrar` ≠ `unar`.
|
||||||
|
- **Port backoffice** : `npm run dev` écoute sur **7082**, pas 3000.
|
||||||
|
- **LIBRARIES_ROOT_PATH** : les chemins en DB commencent par `/libraries/` ; en dev local, définir cette variable pour remapper vers le dossier réel.
|
||||||
|
- **Thumbnails** : stockés dans `THUMBNAIL_DIRECTORY` (défaut `/data/thumbnails`), générés par **l'API** (pas l'indexer) — l'indexer déclenche un checkup via `POST /index/jobs/:id/thumbnails/checkup`.
|
||||||
|
- **Workspace Cargo** : les dépendances externes sont définies dans le `Cargo.toml` racine, pas dans les crates individuels.
|
||||||
|
- **Migrations** : dossier `infra/migrations/`, géré par sqlx. Toujours migrer avant de démarrer les services.
|
||||||
|
- **Recherche** : full-text via PostgreSQL (`ILIKE` + `pg_trgm`), pas de moteur de recherche externe.
|
||||||
|
|
||||||
|
## Fichiers clés
|
||||||
|
|
||||||
|
| Fichier | Rôle |
|
||||||
|
|---------|------|
|
||||||
|
| `crates/core/src/config.rs` | Config depuis env (API, Indexer, AdminUI) |
|
||||||
|
| `crates/parsers/src/lib.rs` | Détection format, extraction métadonnées |
|
||||||
|
| `apps/api/src/books.rs` | Endpoints CRUD livres |
|
||||||
|
| `apps/api/src/search.rs` | Recherche full-text PostgreSQL |
|
||||||
|
| `apps/api/src/pages.rs` | Rendu pages + cache LRU |
|
||||||
|
| `apps/indexer/src/scanner.rs` | Scan filesystem |
|
||||||
|
| `infra/migrations/*.sql` | Schéma DB |
|
||||||
|
|
||||||
|
> Voir `AGENTS.md` pour les conventions de code détaillées (error handling, patterns sqlx, async/tokio).
|
||||||
|
> Des `AGENTS.md` spécifiques existent dans `apps/api/`, `apps/indexer/`, `apps/backoffice/`, `crates/parsers/`.
|
||||||
813
Cargo.lock
generated
813
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
13
Cargo.toml
13
Cargo.toml
@@ -3,13 +3,14 @@ members = [
|
|||||||
"apps/api",
|
"apps/api",
|
||||||
"apps/indexer",
|
"apps/indexer",
|
||||||
"crates/core",
|
"crates/core",
|
||||||
|
"crates/notifications",
|
||||||
"crates/parsers",
|
"crates/parsers",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
version = "0.1.0"
|
version = "2.0.0"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
@@ -19,8 +20,10 @@ axum = "0.7"
|
|||||||
base64 = "0.22"
|
base64 = "0.22"
|
||||||
chrono = { version = "0.4", features = ["serde"] }
|
chrono = { version = "0.4", features = ["serde"] }
|
||||||
image = { version = "0.25", default-features = false, features = ["jpeg", "png", "webp"] }
|
image = { version = "0.25", default-features = false, features = ["jpeg", "png", "webp"] }
|
||||||
|
jpeg-decoder = "0.3"
|
||||||
lru = "0.12"
|
lru = "0.12"
|
||||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
rayon = "1.10"
|
||||||
|
reqwest = { version = "0.12", default-features = false, features = ["json", "multipart", "rustls-tls"] }
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
@@ -31,6 +34,12 @@ tower = { version = "0.5", features = ["limit"] }
|
|||||||
tracing = "0.1"
|
tracing = "0.1"
|
||||||
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] }
|
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] }
|
||||||
uuid = { version = "1.12", features = ["serde", "v4"] }
|
uuid = { version = "1.12", features = ["serde", "v4"] }
|
||||||
|
natord = "1.0"
|
||||||
|
num_cpus = "1.16"
|
||||||
|
pdfium-render = { version = "0.8", default-features = false, features = ["pdfium_latest", "image_latest", "thread_safe"] }
|
||||||
|
unrar = "0.5"
|
||||||
walkdir = "2.5"
|
walkdir = "2.5"
|
||||||
|
webp = "0.3"
|
||||||
utoipa = "4.0"
|
utoipa = "4.0"
|
||||||
utoipa-swagger-ui = "6.0"
|
utoipa-swagger-ui = "6.0"
|
||||||
|
scraper = "0.21"
|
||||||
|
|||||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2026 Julien Froidefond
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
10
PLAN.md
10
PLAN.md
@@ -12,7 +12,7 @@ Construire un serveur ultra performant pour indexer et servir des bibliotheques
|
|||||||
- Backend/API: Rust (`axum`)
|
- Backend/API: Rust (`axum`)
|
||||||
- Indexation: service Rust dedie (`indexer`)
|
- Indexation: service Rust dedie (`indexer`)
|
||||||
- DB: PostgreSQL
|
- DB: PostgreSQL
|
||||||
- Recherche: Meilisearch
|
- Recherche: PostgreSQL full-text (ILIKE + pg_trgm)
|
||||||
- Deploiement: Docker Compose
|
- Deploiement: Docker Compose
|
||||||
- Auth: token bootstrap env + tokens admin en DB (creables/revocables)
|
- Auth: token bootstrap env + tokens admin en DB (creables/revocables)
|
||||||
- Expiration tokens admin: aucune par defaut (revocation manuelle)
|
- Expiration tokens admin: aucune par defaut (revocation manuelle)
|
||||||
@@ -33,7 +33,7 @@ Construire un serveur ultra performant pour indexer et servir des bibliotheques
|
|||||||
**DoD:** Build des crates OK.
|
**DoD:** Build des crates OK.
|
||||||
|
|
||||||
### T2 - Infra Docker Compose
|
### T2 - Infra Docker Compose
|
||||||
- [x] Definir services `postgres`, `meilisearch`, `api`, `indexer`
|
- [x] Definir services `postgres`, `api`, `indexer`
|
||||||
- [x] Volumes persistants
|
- [x] Volumes persistants
|
||||||
- [x] Healthchecks
|
- [x] Healthchecks
|
||||||
|
|
||||||
@@ -114,7 +114,7 @@ Construire un serveur ultra performant pour indexer et servir des bibliotheques
|
|||||||
**DoD:** Pagination/filtres fonctionnels.
|
**DoD:** Pagination/filtres fonctionnels.
|
||||||
|
|
||||||
### T13 - Recherche
|
### T13 - Recherche
|
||||||
- [x] Projection vers Meilisearch
|
- [x] Recherche full-text PostgreSQL
|
||||||
- [x] `GET /search?q=...&library_id=...&type=...`
|
- [x] `GET /search?q=...&library_id=...&type=...`
|
||||||
- [x] Fuzzy + filtres
|
- [x] Fuzzy + filtres
|
||||||
|
|
||||||
@@ -264,10 +264,10 @@ Construire un serveur ultra performant pour indexer et servir des bibliotheques
|
|||||||
- Bootstrap token = break-glass (peut etre desactive plus tard)
|
- Bootstrap token = break-glass (peut etre desactive plus tard)
|
||||||
|
|
||||||
## Journal
|
## Journal
|
||||||
- 2026-03-05: `docker compose up -d --build` valide, stack complete en healthy (`postgres`, `meilisearch`, `api`, `indexer`, `admin-ui`).
|
- 2026-03-05: `docker compose up -d --build` valide, stack complete en healthy (`postgres`, `api`, `indexer`, `admin-ui`).
|
||||||
- 2026-03-05: ajustements infra appliques pour demarrage stable (`unrar` -> `unrar-free`, image `rust:1-bookworm`, healthchecks `127.0.0.1`).
|
- 2026-03-05: ajustements infra appliques pour demarrage stable (`unrar` -> `unrar-free`, image `rust:1-bookworm`, healthchecks `127.0.0.1`).
|
||||||
- 2026-03-05: ajout d'un service `migrate` dans Compose pour executer automatiquement `infra/migrations/0001_init.sql` au demarrage.
|
- 2026-03-05: ajout d'un service `migrate` dans Compose pour executer automatiquement `infra/migrations/0001_init.sql` au demarrage.
|
||||||
- 2026-03-05: Lot 2 termine (jobs, scan incremental, parsers `cbz/cbr/pdf`, API livres, sync + recherche Meilisearch).
|
- 2026-03-05: Lot 2 termine (jobs, scan incremental, parsers `cbz/cbr/pdf`, API livres, recherche PostgreSQL).
|
||||||
- 2026-03-05: verification de bout en bout OK sur une librairie de test (`/libraries/demo`) avec indexation, listing `/books` et recherche `/search` (1 CBZ detecte).
|
- 2026-03-05: verification de bout en bout OK sur une librairie de test (`/libraries/demo`) avec indexation, listing `/books` et recherche `/search` (1 CBZ detecte).
|
||||||
- 2026-03-05: Lot 3 avancee: endpoint pages (`/books/:id/pages/:n`) actif avec cache LRU, ETag/Cache-Control, limite concurrence rendu et timeouts.
|
- 2026-03-05: Lot 3 avancee: endpoint pages (`/books/:id/pages/:n`) actif avec cache LRU, ETag/Cache-Control, limite concurrence rendu et timeouts.
|
||||||
- 2026-03-05: hardening API: readiness expose sans auth via `route_layer`, metriques simples `/metrics`, rate limiting lecture (120 req/s).
|
- 2026-03-05: hardening API: readiness expose sans auth via `route_layer`, metriques simples `/metrics`, rate limiting lecture (120 req/s).
|
||||||
|
|||||||
141
PLAN_THUMBNAILS.md
Normal file
141
PLAN_THUMBNAILS.md
Normal file
@@ -0,0 +1,141 @@
|
|||||||
|
# Plan: Génération des vignettes à l'index
|
||||||
|
|
||||||
|
## 1. Base de données
|
||||||
|
|
||||||
|
### Migration SQL (`0010_add_thumbnails.sql`)
|
||||||
|
- [x] Ajouter `thumbnail_path TEXT` à la table `books` (nullable)
|
||||||
|
- [x] Ajouter settings pour thumbnails dans `app_settings`:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"thumbnail": {
|
||||||
|
"enabled": true,
|
||||||
|
"width": 300,
|
||||||
|
"height": 400,
|
||||||
|
"quality": 80,
|
||||||
|
"format": "webp"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Configuration
|
||||||
|
|
||||||
|
### `crates/core/src/config.rs`
|
||||||
|
- [x] Ajouter `ThumbnailConfig` struct
|
||||||
|
- [x] Ajouter champs dans `IndexerConfig`:
|
||||||
|
- `thumbnail_width: u32` (défaut: 300)
|
||||||
|
- `thumbnail_height: u32` (défaut: 400)
|
||||||
|
- `thumbnail_quality: u8` (défaut: 80)
|
||||||
|
- `thumbnail_dir: String` (défaut: `/data/thumbnails`)
|
||||||
|
- [x] Ajouter getter depuis env vars
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Indexer - Extraction de la 1ère page
|
||||||
|
|
||||||
|
### Fonction à créer dans `crates/parsers/src/lib.rs`
|
||||||
|
- [x] `extract_first_page(path: &Path, format: BookFormat) -> Result<Vec<u8>>`
|
||||||
|
- Réutiliser logique de `pages.rs:extract_cbz_page`
|
||||||
|
- Réutiliser logique de `pages.rs:extract_cbr_page`
|
||||||
|
- Réutiliser logique de `pages.rs:render_pdf_page`
|
||||||
|
|
||||||
|
### Fonction de génération vignette dans `apps/indexer/src/main.rs`
|
||||||
|
- [x] `generate_thumbnail(image_bytes: &[u8], config: &ThumbnailConfig) -> Result<Vec<u8>>`
|
||||||
|
- Load image avec `image::load_from_memory`
|
||||||
|
- Resize avec `image::resize` (ratio kept)
|
||||||
|
- Encode en WebP avec `webp::Encoder`
|
||||||
|
|
||||||
|
- [x] `save_thumbnail(book_id: Uuid, thumbnail_bytes: &[u8], config: &ThumbnailConfig) -> Result<String>`
|
||||||
|
|
||||||
|
### Intégration dans `scan_library`
|
||||||
|
- [x] Après parsing metadata, extraire 1ère page
|
||||||
|
- [x] Générer vignette et sauvegarder
|
||||||
|
- [x] Stocker chemin en DB (via batch insert)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Indexer - WalkDir parallèle
|
||||||
|
|
||||||
|
### Remplacement de `WalkDir` séquentiel
|
||||||
|
- [x] Utiliser `rayon` pour paralléliser le scan:
|
||||||
|
```rust
|
||||||
|
let total_files: usize = library_paths.par_iter()
|
||||||
|
.map(|root_path| { ... })
|
||||||
|
.sum();
|
||||||
|
```
|
||||||
|
- [x] Ajouter `rayon = "1.10"` dans workspace dependencies
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. API - Service des vignettes
|
||||||
|
|
||||||
|
### Mise à jour models dans `apps/api/src/books.rs`
|
||||||
|
- [x] Ajouter `thumbnail_url: Option<String>` à `BookItem`
|
||||||
|
- [x] Ajouter `thumbnail_url: Option<String>` à `BookDetails`
|
||||||
|
- [x] Mise à jour des requêtes SQL pour récupérer `thumbnail_path`
|
||||||
|
|
||||||
|
### Nouvelle route dans `apps/api/src/main.rs`
|
||||||
|
- [x] Route `/books/:id/thumbnail` (GET)
|
||||||
|
- Retourne fichier statique depuis `thumbnail_path`
|
||||||
|
- Content-Type: image/webp
|
||||||
|
- Cache-Control: public, max-age=31536000
|
||||||
|
|
||||||
|
### Suppression cache 1ère page (optionnel)
|
||||||
|
- [ ] Optionnel: simplifier `pages.rs` car thumbnail pré-générée
|
||||||
|
- [ ] Garder render pour pages > 1
|
||||||
|
|
||||||
|
### Adapter backoffice
|
||||||
|
|
||||||
|
La recupération des thumbnail est fait par une route page/1.
|
||||||
|
- [x] Passer par la nouvelle route avec une route clean /thumbnail pour chaque cover.
|
||||||
|
|
||||||
|
### refacto code entre api et indexer
|
||||||
|
|
||||||
|
En fait l'indexer pourrait appeler l'api pour qu'il fasse les vignettes et c'est l'api qui est responsable des images et des lectures ebooks. Je préfère que chaque domaine soit bien respecté. A la fin d'une build, on appelle l'api pour faire le checkup des thumbnails.
|
||||||
|
Il faudra que coté backoffice on voit partout ou on peut voir le traitement live des jobs, une phase ou on voit en sse le traitement des thumbnails. Coté api, si on a pas de thumbnail on passe par le code actuel de pages.
|
||||||
|
|
||||||
|
- [x] Migration `0010_index_job_thumbnails_phase.sql`: status `generating_thumbnails` dans index_jobs
|
||||||
|
- [x] API: `get_thumbnail` fallback sur page 1 si pas de thumbnail_path (via `pages::render_book_page_1`)
|
||||||
|
- [x] API: module `thumbnails.rs`, POST `/index/jobs/:id/thumbnails/checkup` (admin), lance la génération en tâche de fond et met à jour la job
|
||||||
|
- [x] Indexer: plus de génération de thumbnails; en fin de build: status = `generating_thumbnails`, puis appel API checkup; config `api_base_url` + `api_bootstrap_token` (core)
|
||||||
|
- [x] Backoffice: StatusBadge "Thumbnails" pour `generating_thumbnails`; JobProgress/JobRow/JobsIndicator/page job détail: phase thumbnails visible en SSE (X/Y thumbnails, barre de progression)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Settings API
|
||||||
|
|
||||||
|
### Endpoint settings existant
|
||||||
|
- [ ] Vérifier que `/settings` expose thumbnail config
|
||||||
|
- [ ] Ajouter endpoint PUT pour mettre à jour thumbnail settings
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Taches diverses
|
||||||
|
|
||||||
|
- [x] Ajouter dependency `image` et `webp` dans indexer `Cargo.toml`
|
||||||
|
- [x] Build release vérifié
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Ordre d'implémentation suggéré
|
||||||
|
|
||||||
|
1. [x] Migration DB + settings
|
||||||
|
2. [x] Config + parsers (extract first page)
|
||||||
|
3. [x] Indexer thumbnail generation + save to disk
|
||||||
|
4. [x] API serve thumbnail
|
||||||
|
5. [x] Parallel walkdir
|
||||||
|
6. [ ] Tests & polish (à faire)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Post-déploiement
|
||||||
|
|
||||||
|
- [ ] Appliquer migration SQL: `psql -f infra/migrations/0009_add_thumbnails.sql`
|
||||||
|
- [ ] Créer dossier thumbnails: `mkdir -p /data/thumbnails`
|
||||||
|
- [ ] Configurer env vars si besoin:
|
||||||
|
- `THUMBNAIL_ENABLED=true`
|
||||||
|
- `THUMBNAIL_WIDTH=300`
|
||||||
|
- `THUMBNAIL_HEIGHT=400`
|
||||||
|
- `THUMBNAIL_QUALITY=80`
|
||||||
|
- `THUMBNAIL_DIRECTORY=/data/thumbnails`
|
||||||
233
README.md
233
README.md
@@ -9,7 +9,7 @@ The project consists of the following components:
|
|||||||
- **API** (`apps/api/`) - Rust-based REST API service
|
- **API** (`apps/api/`) - Rust-based REST API service
|
||||||
- **Indexer** (`apps/indexer/`) - Rust-based background indexing service
|
- **Indexer** (`apps/indexer/`) - Rust-based background indexing service
|
||||||
- **Backoffice** (`apps/backoffice/`) - Next.js web administration interface
|
- **Backoffice** (`apps/backoffice/`) - Next.js web administration interface
|
||||||
- **Infrastructure** (`infra/`) - Docker Compose setup with PostgreSQL and Meilisearch
|
- **Infrastructure** (`infra/`) - Docker Compose setup with PostgreSQL
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
@@ -27,28 +27,24 @@ The project consists of the following components:
|
|||||||
```
|
```
|
||||||
|
|
||||||
2. Edit `.env` and set secure values for:
|
2. Edit `.env` and set secure values for:
|
||||||
- `MEILI_MASTER_KEY` - Master key for Meilisearch
|
|
||||||
- `API_BOOTSTRAP_TOKEN` - Bootstrap token for initial API authentication
|
- `API_BOOTSTRAP_TOKEN` - Bootstrap token for initial API authentication
|
||||||
|
|
||||||
### Running with Docker
|
### Running with Docker
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cd infra
|
|
||||||
docker compose up -d
|
docker compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
This will start:
|
This will start:
|
||||||
- PostgreSQL (port 5432)
|
- PostgreSQL (port 6432)
|
||||||
- Meilisearch (port 7700)
|
- API service (port 7080)
|
||||||
- API service (port 8080)
|
- Indexer service (port 7081)
|
||||||
- Indexer service (port 8081)
|
- Backoffice web UI (port 7082)
|
||||||
- Backoffice web UI (port 8082)
|
|
||||||
|
|
||||||
### Accessing the Application
|
### Accessing the Application
|
||||||
|
|
||||||
- **Backoffice**: http://localhost:8082
|
- **Backoffice**: http://localhost:7082
|
||||||
- **API**: http://localhost:8080
|
- **API**: http://localhost:7080
|
||||||
- **Meilisearch**: http://localhost:7700
|
|
||||||
|
|
||||||
### Default Credentials
|
### Default Credentials
|
||||||
|
|
||||||
@@ -62,8 +58,7 @@ The default bootstrap token is configured in your `.env` file. Use this for init
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Start dependencies
|
# Start dependencies
|
||||||
cd infra
|
docker compose up -d postgres
|
||||||
docker compose up -d postgres meilisearch
|
|
||||||
|
|
||||||
# Run API
|
# Run API
|
||||||
cd apps/api
|
cd apps/api
|
||||||
@@ -82,53 +77,114 @@ npm install
|
|||||||
npm run dev
|
npm run dev
|
||||||
```
|
```
|
||||||
|
|
||||||
The backoffice will be available at http://localhost:3000
|
The backoffice will be available at http://localhost:7082
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
### Libraries Management
|
> For the full feature list, business rules, and API details, see [docs/FEATURES.md](docs/FEATURES.md).
|
||||||
- Create and manage multiple libraries
|
|
||||||
- Configure automatic scanning schedules (hourly, daily, weekly)
|
|
||||||
- Real-time file watcher for instant indexing
|
|
||||||
- Full and incremental rebuild options
|
|
||||||
|
|
||||||
### Books Management
|
### Libraries
|
||||||
- Support for CBZ, CBR, and PDF formats
|
- Multi-library management with per-library configuration
|
||||||
- Automatic metadata extraction
|
- Incremental and full scanning, real-time filesystem watcher
|
||||||
- Series and volume detection
|
- Per-library metadata provider selection (Google Books, ComicVine, BedéThèque, AniList, Open Library)
|
||||||
- Full-text search with Meilisearch
|
|
||||||
|
|
||||||
### Jobs Monitoring
|
### Books & Series
|
||||||
- Real-time job progress tracking
|
- **Formats**: CBZ, CBR, PDF, EPUB
|
||||||
- Detailed statistics (scanned, indexed, removed, errors)
|
- Automatic metadata extraction (title, series, volume, authors, page count) from filenames and directory structure
|
||||||
- Job history and logs
|
- Series aggregation with missing volume detection
|
||||||
- Cancel pending jobs
|
- Thumbnail generation (WebP/JPEG/PNG) with lazy generation and bulk rebuild
|
||||||
|
- CBR → CBZ conversion
|
||||||
|
|
||||||
### Search
|
### Reading Progress
|
||||||
- Full-text search across titles, authors, and series
|
- Per-book tracking: unread / reading / read with current page
|
||||||
- Library filtering
|
- Series-level aggregated reading status
|
||||||
- Real-time suggestions
|
- Bulk mark-as-read for series
|
||||||
|
|
||||||
|
### Search & Discovery
|
||||||
|
- Full-text search across titles, authors, and series (PostgreSQL `pg_trgm`)
|
||||||
|
- Author listing with book/series counts
|
||||||
|
- Filtering by reading status, series status, format, metadata provider
|
||||||
|
|
||||||
|
### External Metadata
|
||||||
|
- Search, match, approve/reject workflow with confidence scoring
|
||||||
|
- Batch auto-matching and scheduled metadata refresh
|
||||||
|
- Field locking to protect manual edits from sync
|
||||||
|
|
||||||
|
### Notifications
|
||||||
|
- **Telegram**: real-time notifications via Telegram Bot API
|
||||||
|
- 12 granular event toggles (scans, thumbnails, conversions, metadata)
|
||||||
|
- Book thumbnail images included in notifications where applicable
|
||||||
|
- Test connection from settings
|
||||||
|
|
||||||
|
### External Integrations
|
||||||
|
- **Komga**: import reading progress
|
||||||
|
- **Prowlarr**: search for missing volumes
|
||||||
|
- **qBittorrent**: add torrents directly from search results
|
||||||
|
|
||||||
|
### Background Jobs
|
||||||
|
- Rebuild, rescan, thumbnail generation, metadata batch, CBR conversion
|
||||||
|
- Real-time progress via Server-Sent Events (SSE)
|
||||||
|
- Job history, error tracking, cancellation
|
||||||
|
|
||||||
|
### Page Rendering
|
||||||
|
- On-demand page extraction from all formats
|
||||||
|
- Image processing (format, quality, max width, resampling filter)
|
||||||
|
- LRU in-memory + disk cache
|
||||||
|
|
||||||
|
### Security
|
||||||
|
- Token-based auth (`admin` / `read` scopes) with Argon2 hashing
|
||||||
|
- Rate limiting, token expiration and revocation
|
||||||
|
|
||||||
|
### Web UI (Backoffice)
|
||||||
|
- Dashboard with statistics, interactive charts (recharts), and reading progress
|
||||||
|
- Currently reading & recently read sections
|
||||||
|
- Library, book, series, author management
|
||||||
|
- Live job monitoring, metadata search modals, settings panel
|
||||||
|
- Notification settings with per-event toggle configuration
|
||||||
|
|
||||||
## Environment Variables
|
## Environment Variables
|
||||||
|
|
||||||
| Variable | Description | Default |
|
Variables marquées **required** doivent être définies. Les autres ont une valeur par défaut.
|
||||||
|----------|-------------|---------|
|
|
||||||
| `API_LISTEN_ADDR` | API service bind address | `0.0.0.0:8080` |
|
### Partagées (API + Indexer)
|
||||||
| `INDEXER_LISTEN_ADDR` | Indexer service bind address | `0.0.0.0:8081` |
|
|
||||||
| `BACKOFFICE_PORT` | Backoffice web UI port | `8082` |
|
| Variable | Description | Défaut |
|
||||||
| `DATABASE_URL` | PostgreSQL connection string | `postgres://stripstream:stripstream@postgres:5432/stripstream` |
|
|----------|-------------|--------|
|
||||||
| `MEILI_URL` | Meilisearch connection URL | `http://meilisearch:7700` |
|
| `DATABASE_URL` | **required** — Connexion PostgreSQL | — |
|
||||||
| `MEILI_MASTER_KEY` | Meilisearch master key (required) | - |
|
|
||||||
| `API_BOOTSTRAP_TOKEN` | Initial API admin token (required) | - |
|
### API
|
||||||
| `INDEXER_SCAN_INTERVAL_SECONDS` | Watcher scan interval | `5` |
|
|
||||||
| `LIBRARIES_ROOT_PATH` | Path to libraries directory | `/libraries` |
|
| Variable | Description | Défaut |
|
||||||
|
|----------|-------------|--------|
|
||||||
|
| `API_BOOTSTRAP_TOKEN` | **required** — Token admin initial | — |
|
||||||
|
| `API_LISTEN_ADDR` | Adresse d'écoute | `0.0.0.0:7080` |
|
||||||
|
|
||||||
|
### Indexer
|
||||||
|
|
||||||
|
| Variable | Description | Défaut |
|
||||||
|
|----------|-------------|--------|
|
||||||
|
| `INDEXER_LISTEN_ADDR` | Adresse d'écoute | `0.0.0.0:7081` |
|
||||||
|
| `INDEXER_SCAN_INTERVAL_SECONDS` | Intervalle de scan du watcher | `5` |
|
||||||
|
| `THUMBNAIL_ENABLED` | Activer la génération de thumbnails | `true` |
|
||||||
|
| `THUMBNAIL_DIRECTORY` | Dossier de stockage des thumbnails | `/data/thumbnails` |
|
||||||
|
| `THUMBNAIL_WIDTH` | Largeur max des thumbnails (px) | `300` |
|
||||||
|
| `THUMBNAIL_HEIGHT` | Hauteur max des thumbnails (px) | `400` |
|
||||||
|
| `THUMBNAIL_QUALITY` | Qualité WebP (0–100) | `80` |
|
||||||
|
| `THUMBNAIL_FORMAT` | Format de sortie | `webp` |
|
||||||
|
|
||||||
|
### Backoffice
|
||||||
|
|
||||||
|
| Variable | Description | Défaut |
|
||||||
|
|----------|-------------|--------|
|
||||||
|
| `API_BOOTSTRAP_TOKEN` | **required** — Token d'accès à l'API | — |
|
||||||
|
| `API_BASE_URL` | URL interne de l'API (dans le réseau Docker) | `http://api:7080` |
|
||||||
|
|
||||||
## API Documentation
|
## API Documentation
|
||||||
|
|
||||||
The API is documented with OpenAPI/Swagger. When running locally, access the docs at:
|
The API is documented with OpenAPI/Swagger. When running locally, access the docs at:
|
||||||
|
|
||||||
```
|
```
|
||||||
http://localhost:8080/api-docs
|
http://localhost:7080/swagger-ui
|
||||||
```
|
```
|
||||||
|
|
||||||
## Project Structure
|
## Project Structure
|
||||||
@@ -140,12 +196,95 @@ stripstream-librarian/
|
|||||||
│ ├── indexer/ # Rust background indexer
|
│ ├── indexer/ # Rust background indexer
|
||||||
│ └── backoffice/ # Next.js web UI
|
│ └── backoffice/ # Next.js web UI
|
||||||
├── infra/
|
├── infra/
|
||||||
│ ├── docker-compose.yml
|
|
||||||
│ └── migrations/ # SQL database migrations
|
│ └── migrations/ # SQL database migrations
|
||||||
├── libraries/ # Book storage (mounted volume)
|
├── libraries/ # Book storage (mounted volume)
|
||||||
└── .env # Environment configuration
|
└── .env # Environment configuration
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Docker Registry
|
||||||
|
|
||||||
|
Images are built and pushed to Docker Hub with the naming convention `docker.io/{owner}/stripstream-{service}`.
|
||||||
|
|
||||||
|
### Publishing Images (Maintainers)
|
||||||
|
|
||||||
|
To build and push all service images to the registry:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Login to Docker Hub first
|
||||||
|
docker login -u julienfroidefond32
|
||||||
|
|
||||||
|
# Build and push all images
|
||||||
|
./scripts/docker-push.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
This script will:
|
||||||
|
- Build images for `api`, `indexer`, and `backoffice`
|
||||||
|
- Tag them with the current version (from `Cargo.toml`) and `latest`
|
||||||
|
- Push to the registry
|
||||||
|
|
||||||
|
### Using Published Images
|
||||||
|
|
||||||
|
To use the pre-built images in your own `docker-compose.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:16-alpine
|
||||||
|
environment:
|
||||||
|
POSTGRES_DB: stripstream
|
||||||
|
POSTGRES_USER: stripstream
|
||||||
|
POSTGRES_PASSWORD: stripstream
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
|
||||||
|
api:
|
||||||
|
image: julienfroidefond32/stripstream-api:latest
|
||||||
|
ports:
|
||||||
|
- "7080:7080"
|
||||||
|
volumes:
|
||||||
|
- ./libraries:/libraries
|
||||||
|
- ./data/thumbnails:/data/thumbnails
|
||||||
|
environment:
|
||||||
|
# --- Required ---
|
||||||
|
DATABASE_URL: postgres://stripstream:stripstream@postgres:5432/stripstream
|
||||||
|
API_BOOTSTRAP_TOKEN: your_bootstrap_token # required — change this
|
||||||
|
# --- Optional (defaults shown) ---
|
||||||
|
# API_LISTEN_ADDR: 0.0.0.0:7080
|
||||||
|
|
||||||
|
indexer:
|
||||||
|
image: julienfroidefond32/stripstream-indexer:latest
|
||||||
|
ports:
|
||||||
|
- "7081:7081"
|
||||||
|
volumes:
|
||||||
|
- ./libraries:/libraries
|
||||||
|
- ./data/thumbnails:/data/thumbnails
|
||||||
|
environment:
|
||||||
|
# --- Required ---
|
||||||
|
DATABASE_URL: postgres://stripstream:stripstream@postgres:5432/stripstream
|
||||||
|
# --- Optional (defaults shown) ---
|
||||||
|
# INDEXER_LISTEN_ADDR: 0.0.0.0:7081
|
||||||
|
# INDEXER_SCAN_INTERVAL_SECONDS: 5
|
||||||
|
# THUMBNAIL_ENABLED: true
|
||||||
|
# THUMBNAIL_DIRECTORY: /data/thumbnails
|
||||||
|
# THUMBNAIL_WIDTH: 300
|
||||||
|
# THUMBNAIL_HEIGHT: 400
|
||||||
|
# THUMBNAIL_QUALITY: 80
|
||||||
|
# THUMBNAIL_FORMAT: webp
|
||||||
|
|
||||||
|
backoffice:
|
||||||
|
image: julienfroidefond32/stripstream-backoffice:latest
|
||||||
|
ports:
|
||||||
|
- "7082:7082"
|
||||||
|
environment:
|
||||||
|
# --- Required ---
|
||||||
|
API_BOOTSTRAP_TOKEN: your_bootstrap_token # must match api above
|
||||||
|
# --- Optional (defaults shown) ---
|
||||||
|
# API_BASE_URL: http://api:7080
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
|
```
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
[Your License Here]
|
This project is licensed under the [MIT License](LICENSE).
|
||||||
|
|||||||
73
apps/api/AGENTS.md
Normal file
73
apps/api/AGENTS.md
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
# apps/api — REST API (axum)
|
||||||
|
|
||||||
|
Service HTTP sur le port **7080**. Voir `AGENTS.md` racine pour les conventions globales.
|
||||||
|
|
||||||
|
## Structure des fichiers
|
||||||
|
|
||||||
|
| Fichier | Rôle |
|
||||||
|
|---------|------|
|
||||||
|
| `main.rs` | Routes, initialisation AppState, Semaphore concurrent_renders |
|
||||||
|
| `state.rs` | `AppState` (pool, caches, métriques), `load_concurrent_renders` |
|
||||||
|
| `auth.rs` | Middlewares `require_admin` / `require_read`, authentification tokens |
|
||||||
|
| `error.rs` | `ApiError` avec constructeurs `bad_request`, `not_found`, `internal`, etc. |
|
||||||
|
| `books.rs` | CRUD livres, thumbnails |
|
||||||
|
| `pages.rs` | Rendu page + double cache (mémoire LRU + disque) |
|
||||||
|
| `libraries.rs` | CRUD bibliothèques, déclenchement scans |
|
||||||
|
| `index_jobs.rs` | Suivi jobs, SSE streaming progression |
|
||||||
|
| `thumbnails.rs` | Rebuild/regénération thumbnails |
|
||||||
|
| `tokens.rs` | Gestion tokens API (create/revoke) |
|
||||||
|
| `settings.rs` | Paramètres applicatifs (stockés en DB, clé `limits`) |
|
||||||
|
| `openapi.rs` | Doc OpenAPI via utoipa, accessible sur `/swagger-ui` |
|
||||||
|
|
||||||
|
## Patterns clés
|
||||||
|
|
||||||
|
### Handler type
|
||||||
|
```rust
|
||||||
|
async fn my_handler(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<MyDto>, ApiError> {
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Erreurs API
|
||||||
|
```rust
|
||||||
|
// Constructeurs disponibles dans error.rs
|
||||||
|
ApiError::bad_request("message")
|
||||||
|
ApiError::not_found("resource not found")
|
||||||
|
ApiError::internal("unexpected error")
|
||||||
|
ApiError::unauthorized("missing token")
|
||||||
|
ApiError::forbidden("admin required")
|
||||||
|
|
||||||
|
// Conversion auto depuis sqlx::Error et std::io::Error
|
||||||
|
```
|
||||||
|
|
||||||
|
### Authentification
|
||||||
|
- **Bootstrap token** : comparaison directe (`API_BOOTSTRAP_TOKEN`), scope Admin
|
||||||
|
- **Tokens DB** : format `stl_<prefix>_<secret>`, hash argon2 en DB, scope `admin` ou `read`
|
||||||
|
- Middleware `require_admin` → routes admin ; `require_read` → routes lecture
|
||||||
|
|
||||||
|
### OpenAPI (utoipa)
|
||||||
|
```rust
|
||||||
|
#[utoipa::path(get, path = "/books/{id}", ...)]
|
||||||
|
async fn get_book(...) { }
|
||||||
|
// Ajouter le handler dans openapi.rs (ApiDoc)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Cache pages (`pages.rs`)
|
||||||
|
- **Cache mémoire** : LRU 512 entrées (`AppState.page_cache`)
|
||||||
|
- **Cache disque** : `IMAGE_CACHE_DIR` (défaut `/tmp/stripstream-image-cache`), clé SHA256
|
||||||
|
- Concurrence limitée par `AppState.page_render_limit` (Semaphore, configurable en DB)
|
||||||
|
- `spawn_blocking` pour le rendu image (CPU-bound)
|
||||||
|
|
||||||
|
### Paramètre concurrent_renders
|
||||||
|
Stocké en DB : `SELECT value FROM app_settings WHERE key = 'limits'` → JSON `{"concurrent_renders": N}`.
|
||||||
|
Chargé au démarrage dans `load_concurrent_renders`.
|
||||||
|
|
||||||
|
## Gotchas
|
||||||
|
|
||||||
|
- **LIBRARIES_ROOT_PATH** : les `abs_path` en DB commencent par `/libraries/`. Appeler `remap_libraries_path()` avant tout accès fichier.
|
||||||
|
- **Rate limit lecture** : middleware `read_rate_limit` sur les routes read (100 req/5s par défaut).
|
||||||
|
- **Métriques** : `/metrics` expose `requests_total`, `page_cache_hits`, `page_cache_misses` (atomics dans `AppState.metrics`).
|
||||||
|
- **Swagger** : accessible sur `/swagger-ui`, spec JSON sur `/openapi.json`.
|
||||||
@@ -13,10 +13,14 @@ async-stream = "0.3"
|
|||||||
chrono.workspace = true
|
chrono.workspace = true
|
||||||
futures = "0.3"
|
futures = "0.3"
|
||||||
image.workspace = true
|
image.workspace = true
|
||||||
|
jpeg-decoder.workspace = true
|
||||||
lru.workspace = true
|
lru.workspace = true
|
||||||
|
notifications = { path = "../../crates/notifications" }
|
||||||
stripstream-core = { path = "../../crates/core" }
|
stripstream-core = { path = "../../crates/core" }
|
||||||
|
parsers = { path = "../../crates/parsers" }
|
||||||
rand.workspace = true
|
rand.workspace = true
|
||||||
tokio-stream = "0.1"
|
tokio-stream = "0.1"
|
||||||
|
regex = "1"
|
||||||
reqwest.workspace = true
|
reqwest.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
@@ -28,7 +32,7 @@ tower-http = { version = "0.6", features = ["cors"] }
|
|||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
tracing-subscriber.workspace = true
|
tracing-subscriber.workspace = true
|
||||||
uuid.workspace = true
|
uuid.workspace = true
|
||||||
zip = { version = "2.2", default-features = false, features = ["deflate"] }
|
|
||||||
utoipa.workspace = true
|
utoipa.workspace = true
|
||||||
utoipa-swagger-ui = { workspace = true, features = ["axum"] }
|
utoipa-swagger-ui = { workspace = true, features = ["axum"] }
|
||||||
webp = "0.3"
|
webp.workspace = true
|
||||||
|
scraper.workspace = true
|
||||||
|
|||||||
@@ -1,27 +1,68 @@
|
|||||||
FROM rust:1-bookworm AS builder
|
FROM rust:1-bookworm AS builder
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install sccache for faster builds
|
# Copy workspace manifests and create dummy source files to cache dependency builds
|
||||||
RUN cargo install sccache --locked
|
|
||||||
ENV RUSTC_WRAPPER=sccache
|
|
||||||
ENV SCCACHE_DIR=/sccache
|
|
||||||
|
|
||||||
COPY Cargo.toml ./
|
COPY Cargo.toml ./
|
||||||
COPY apps/api/Cargo.toml apps/api/Cargo.toml
|
COPY apps/api/Cargo.toml apps/api/Cargo.toml
|
||||||
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
|
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
|
||||||
COPY crates/core/Cargo.toml crates/core/Cargo.toml
|
COPY crates/core/Cargo.toml crates/core/Cargo.toml
|
||||||
|
COPY crates/notifications/Cargo.toml crates/notifications/Cargo.toml
|
||||||
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
|
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
|
||||||
|
|
||||||
|
RUN mkdir -p apps/api/src apps/indexer/src crates/core/src crates/notifications/src crates/parsers/src && \
|
||||||
|
echo "fn main() {}" > apps/api/src/main.rs && \
|
||||||
|
echo "fn main() {}" > apps/indexer/src/main.rs && \
|
||||||
|
echo "" > apps/indexer/src/lib.rs && \
|
||||||
|
echo "" > crates/core/src/lib.rs && \
|
||||||
|
echo "" > crates/notifications/src/lib.rs && \
|
||||||
|
echo "" > crates/parsers/src/lib.rs
|
||||||
|
|
||||||
|
# Build dependencies only (cached as long as Cargo.toml files don't change)
|
||||||
|
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||||
|
--mount=type=cache,target=/usr/local/cargo/git \
|
||||||
|
--mount=type=cache,target=/app/target \
|
||||||
|
cargo build --release -p api && \
|
||||||
|
cargo install sqlx-cli --no-default-features --features postgres --locked
|
||||||
|
|
||||||
|
# Copy real source code and build
|
||||||
COPY apps/api/src apps/api/src
|
COPY apps/api/src apps/api/src
|
||||||
COPY apps/indexer/src apps/indexer/src
|
COPY apps/indexer/src apps/indexer/src
|
||||||
COPY crates/core/src crates/core/src
|
COPY crates/core/src crates/core/src
|
||||||
|
COPY crates/notifications/src crates/notifications/src
|
||||||
COPY crates/parsers/src crates/parsers/src
|
COPY crates/parsers/src crates/parsers/src
|
||||||
|
|
||||||
# Build with sccache (cache persisted between builds via Docker cache mount)
|
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||||
RUN --mount=type=cache,target=/sccache \
|
--mount=type=cache,target=/usr/local/cargo/git \
|
||||||
cargo build --release -p api
|
--mount=type=cache,target=/app/target \
|
||||||
|
touch apps/api/src/main.rs crates/core/src/lib.rs crates/notifications/src/lib.rs crates/parsers/src/lib.rs && \
|
||||||
|
cargo build --release -p api && \
|
||||||
|
cp /app/target/release/api /usr/local/bin/api
|
||||||
|
|
||||||
FROM debian:bookworm-slim
|
FROM debian:bookworm-slim
|
||||||
RUN apt-get update && apt-get install -y --no-install-recommends ca-certificates wget unrar-free poppler-utils && rm -rf /var/lib/apt/lists/*
|
|
||||||
COPY --from=builder /app/target/release/api /usr/local/bin/api
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
EXPOSE 8080
|
ca-certificates wget locales postgresql-client \
|
||||||
CMD ["/usr/local/bin/api"]
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen
|
||||||
|
ENV LANG=en_US.UTF-8
|
||||||
|
ENV LC_ALL=en_US.UTF-8
|
||||||
|
|
||||||
|
# Download pdfium shared library (replaces pdftoppm subprocess)
|
||||||
|
RUN ARCH=$(dpkg --print-architecture) && \
|
||||||
|
case "$ARCH" in \
|
||||||
|
amd64) PDFIUM_ARCH="linux-x64" ;; \
|
||||||
|
arm64) PDFIUM_ARCH="linux-arm64" ;; \
|
||||||
|
*) echo "Unsupported arch: $ARCH" && exit 1 ;; \
|
||||||
|
esac && \
|
||||||
|
wget -q "https://github.com/bblanchon/pdfium-binaries/releases/latest/download/pdfium-${PDFIUM_ARCH}.tgz" -O /tmp/pdfium.tgz && \
|
||||||
|
tar -xzf /tmp/pdfium.tgz -C /tmp && \
|
||||||
|
cp /tmp/lib/libpdfium.so /usr/local/lib/ && \
|
||||||
|
rm -rf /tmp/pdfium.tgz /tmp/lib /tmp/include && \
|
||||||
|
ldconfig
|
||||||
|
COPY --from=builder /usr/local/bin/api /usr/local/bin/api
|
||||||
|
COPY --from=builder /usr/local/cargo/bin/sqlx /usr/local/bin/sqlx
|
||||||
|
COPY infra/migrations /app/migrations
|
||||||
|
COPY apps/api/entrypoint.sh /usr/local/bin/entrypoint.sh
|
||||||
|
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||||
|
EXPOSE 7080
|
||||||
|
CMD ["/usr/local/bin/entrypoint.sh"]
|
||||||
|
|||||||
63
apps/api/entrypoint.sh
Normal file
63
apps/api/entrypoint.sh
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# psql requires "postgresql://" but Rust/sqlx accepts both "postgres://" and "postgresql://"
|
||||||
|
PSQL_URL=$(echo "$DATABASE_URL" | sed 's|^postgres://|postgresql://|')
|
||||||
|
|
||||||
|
# Check 1: does the old schema exist (index_jobs table)?
|
||||||
|
HAS_OLD_TABLES=$(psql "$PSQL_URL" -tAc \
|
||||||
|
"SELECT EXISTS(SELECT 1 FROM information_schema.tables WHERE table_name='index_jobs')::text" \
|
||||||
|
2>/dev/null || echo "false")
|
||||||
|
|
||||||
|
# Check 2: is sqlx tracking present and non-empty?
|
||||||
|
HAS_SQLX_TABLE=$(psql "$PSQL_URL" -tAc \
|
||||||
|
"SELECT EXISTS(SELECT 1 FROM information_schema.tables WHERE table_name='_sqlx_migrations')::text" \
|
||||||
|
2>/dev/null || echo "false")
|
||||||
|
|
||||||
|
if [ "$HAS_SQLX_TABLE" = "true" ]; then
|
||||||
|
HAS_SQLX_ROWS=$(psql "$PSQL_URL" -tAc \
|
||||||
|
"SELECT EXISTS(SELECT 1 FROM _sqlx_migrations LIMIT 1)::text" \
|
||||||
|
2>/dev/null || echo "false")
|
||||||
|
else
|
||||||
|
HAS_SQLX_ROWS="false"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "==> Migration check: old_tables=$HAS_OLD_TABLES sqlx_table=$HAS_SQLX_TABLE sqlx_rows=$HAS_SQLX_ROWS"
|
||||||
|
|
||||||
|
if [ "$HAS_OLD_TABLES" = "true" ] && [ "$HAS_SQLX_ROWS" = "false" ]; then
|
||||||
|
echo "==> Upgrade from pre-sqlx migration system detected: creating baseline..."
|
||||||
|
|
||||||
|
psql "$PSQL_URL" -c "
|
||||||
|
CREATE TABLE IF NOT EXISTS _sqlx_migrations (
|
||||||
|
version BIGINT PRIMARY KEY,
|
||||||
|
description TEXT NOT NULL,
|
||||||
|
installed_on TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
success BOOLEAN NOT NULL,
|
||||||
|
checksum BYTEA NOT NULL,
|
||||||
|
execution_time BIGINT NOT NULL
|
||||||
|
)
|
||||||
|
"
|
||||||
|
|
||||||
|
for f in /app/migrations/*.sql; do
|
||||||
|
filename=$(basename "$f")
|
||||||
|
# Strip leading zeros to get the integer version (e.g. "0005" -> "5")
|
||||||
|
version=$(echo "$filename" | sed 's/^0*//' | cut -d'_' -f1)
|
||||||
|
description=$(echo "$filename" | sed 's/^[0-9]*_//' | sed 's/\.sql$//')
|
||||||
|
checksum=$(sha384sum "$f" | awk '{print $1}')
|
||||||
|
|
||||||
|
psql "$PSQL_URL" -c "
|
||||||
|
INSERT INTO _sqlx_migrations (version, description, installed_on, success, checksum, execution_time)
|
||||||
|
VALUES ($version, '$description', NOW(), TRUE, decode('$checksum', 'hex'), 0)
|
||||||
|
ON CONFLICT (version) DO NOTHING
|
||||||
|
"
|
||||||
|
echo " baselined: $filename"
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "==> Baseline complete."
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "==> Running migrations..."
|
||||||
|
sqlx migrate run --source /app/migrations
|
||||||
|
|
||||||
|
echo "==> Starting API..."
|
||||||
|
exec /usr/local/bin/api
|
||||||
51
apps/api/src/api_middleware.rs
Normal file
51
apps/api/src/api_middleware.rs
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
use axum::{
|
||||||
|
extract::State,
|
||||||
|
middleware::Next,
|
||||||
|
response::{IntoResponse, Response},
|
||||||
|
};
|
||||||
|
use std::time::Duration;
|
||||||
|
use std::sync::atomic::Ordering;
|
||||||
|
use tracing::info;
|
||||||
|
|
||||||
|
use crate::state::AppState;
|
||||||
|
|
||||||
|
pub async fn request_counter(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
req: axum::extract::Request,
|
||||||
|
next: Next,
|
||||||
|
) -> Response {
|
||||||
|
state.metrics.requests_total.fetch_add(1, Ordering::Relaxed);
|
||||||
|
let method = req.method().clone();
|
||||||
|
let uri = req.uri().clone();
|
||||||
|
let start = std::time::Instant::now();
|
||||||
|
let response = next.run(req).await;
|
||||||
|
let status = response.status().as_u16();
|
||||||
|
let elapsed = start.elapsed();
|
||||||
|
info!("{} {} {} {}ms", method, uri.path(), status, elapsed.as_millis());
|
||||||
|
response
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn read_rate_limit(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
req: axum::extract::Request,
|
||||||
|
next: Next,
|
||||||
|
) -> Response {
|
||||||
|
let mut limiter = state.read_rate_limit.lock().await;
|
||||||
|
if limiter.window_started_at.elapsed() >= Duration::from_secs(1) {
|
||||||
|
limiter.window_started_at = std::time::Instant::now();
|
||||||
|
limiter.requests_in_window = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
let rate_limit = state.settings.read().await.rate_limit_per_second;
|
||||||
|
if limiter.requests_in_window >= rate_limit {
|
||||||
|
return (
|
||||||
|
axum::http::StatusCode::TOO_MANY_REQUESTS,
|
||||||
|
"rate limit exceeded",
|
||||||
|
)
|
||||||
|
.into_response();
|
||||||
|
}
|
||||||
|
|
||||||
|
limiter.requests_in_window += 1;
|
||||||
|
drop(limiter);
|
||||||
|
next.run(req).await
|
||||||
|
}
|
||||||
@@ -8,12 +8,17 @@ use axum::{
|
|||||||
use chrono::Utc;
|
use chrono::Utc;
|
||||||
use sqlx::Row;
|
use sqlx::Row;
|
||||||
|
|
||||||
use crate::{error::ApiError, AppState};
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct AuthUser {
|
||||||
|
pub user_id: uuid::Uuid,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub enum Scope {
|
pub enum Scope {
|
||||||
Admin,
|
Admin,
|
||||||
Read,
|
Read { user_id: uuid::Uuid },
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn require_admin(
|
pub async fn require_admin(
|
||||||
@@ -40,6 +45,20 @@ pub async fn require_read(
|
|||||||
let token = bearer_token(&req).ok_or_else(|| ApiError::unauthorized("missing bearer token"))?;
|
let token = bearer_token(&req).ok_or_else(|| ApiError::unauthorized("missing bearer token"))?;
|
||||||
let scope = authenticate(&state, token).await?;
|
let scope = authenticate(&state, token).await?;
|
||||||
|
|
||||||
|
if let Scope::Read { user_id } = &scope {
|
||||||
|
req.extensions_mut().insert(AuthUser { user_id: *user_id });
|
||||||
|
} else if matches!(scope, Scope::Admin) {
|
||||||
|
// Admin peut s'impersonifier via le header X-As-User
|
||||||
|
if let Some(as_user_id) = req
|
||||||
|
.headers()
|
||||||
|
.get("X-As-User")
|
||||||
|
.and_then(|v| v.to_str().ok())
|
||||||
|
.and_then(|v| uuid::Uuid::parse_str(v).ok())
|
||||||
|
{
|
||||||
|
req.extensions_mut().insert(AuthUser { user_id: as_user_id });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
req.extensions_mut().insert(scope);
|
req.extensions_mut().insert(scope);
|
||||||
Ok(next.run(req).await)
|
Ok(next.run(req).await)
|
||||||
}
|
}
|
||||||
@@ -60,8 +79,7 @@ async fn authenticate(state: &AppState, token: &str) -> Result<Scope, ApiError>
|
|||||||
|
|
||||||
let maybe_row = sqlx::query(
|
let maybe_row = sqlx::query(
|
||||||
r#"
|
r#"
|
||||||
SELECT id, token_hash, scope
|
SELECT id, token_hash, scope, user_id FROM api_tokens
|
||||||
FROM api_tokens
|
|
||||||
WHERE prefix = $1 AND revoked_at IS NULL AND (expires_at IS NULL OR expires_at > NOW())
|
WHERE prefix = $1 AND revoked_at IS NULL AND (expires_at IS NULL OR expires_at > NOW())
|
||||||
"#,
|
"#,
|
||||||
)
|
)
|
||||||
@@ -88,17 +106,26 @@ async fn authenticate(state: &AppState, token: &str) -> Result<Scope, ApiError>
|
|||||||
let scope: String = row.try_get("scope").map_err(|_| ApiError::unauthorized("invalid token"))?;
|
let scope: String = row.try_get("scope").map_err(|_| ApiError::unauthorized("invalid token"))?;
|
||||||
match scope.as_str() {
|
match scope.as_str() {
|
||||||
"admin" => Ok(Scope::Admin),
|
"admin" => Ok(Scope::Admin),
|
||||||
"read" => Ok(Scope::Read),
|
"read" => {
|
||||||
|
let user_id: uuid::Uuid = row
|
||||||
|
.try_get("user_id")
|
||||||
|
.map_err(|_| ApiError::unauthorized("read token missing user_id"))?;
|
||||||
|
Ok(Scope::Read { user_id })
|
||||||
|
}
|
||||||
_ => Err(ApiError::unauthorized("invalid token scope")),
|
_ => Err(ApiError::unauthorized("invalid token scope")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_prefix(token: &str) -> Option<&str> {
|
fn parse_prefix(token: &str) -> Option<&str> {
|
||||||
let mut parts = token.split('_');
|
// Format: stl_{8-char prefix}_{secret}
|
||||||
let namespace = parts.next()?;
|
// Base64 URL_SAFE peut contenir '_', donc on ne peut pas splitter aveuglément
|
||||||
let prefix = parts.next()?;
|
let rest = token.strip_prefix("stl_")?;
|
||||||
let secret = parts.next()?;
|
if rest.len() < 10 {
|
||||||
if namespace != "stl" || secret.is_empty() || prefix.len() < 6 {
|
// 8 (prefix) + 1 ('_') + 1 (secret min)
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
let prefix = &rest[..8];
|
||||||
|
if rest.as_bytes().get(8) != Some(&b'_') {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
Some(prefix)
|
Some(prefix)
|
||||||
|
|||||||
178
apps/api/src/authors.rs
Normal file
178
apps/api/src/authors.rs
Normal file
@@ -0,0 +1,178 @@
|
|||||||
|
use axum::{extract::{Query, State}, Json};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct ListAuthorsQuery {
|
||||||
|
#[schema(value_type = Option<String>, example = "batman")]
|
||||||
|
pub q: Option<String>,
|
||||||
|
#[schema(value_type = Option<i64>, example = 1)]
|
||||||
|
pub page: Option<i64>,
|
||||||
|
#[schema(value_type = Option<i64>, example = 20)]
|
||||||
|
pub limit: Option<i64>,
|
||||||
|
/// Sort order: "name" (default), "books" (most books first)
|
||||||
|
#[schema(value_type = Option<String>, example = "books")]
|
||||||
|
pub sort: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct AuthorItem {
|
||||||
|
pub name: String,
|
||||||
|
pub book_count: i64,
|
||||||
|
pub series_count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct AuthorsPageResponse {
|
||||||
|
pub items: Vec<AuthorItem>,
|
||||||
|
pub total: i64,
|
||||||
|
pub page: i64,
|
||||||
|
pub limit: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List all unique authors with book/series counts
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/authors",
|
||||||
|
tag = "authors",
|
||||||
|
params(
|
||||||
|
("q" = Option<String>, Query, description = "Search by author name"),
|
||||||
|
("page" = Option<i64>, Query, description = "Page number (1-based)"),
|
||||||
|
("limit" = Option<i64>, Query, description = "Items per page (max 100)"),
|
||||||
|
("sort" = Option<String>, Query, description = "Sort: name (default) or books"),
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, body = AuthorsPageResponse),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn list_authors(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Query(query): Query<ListAuthorsQuery>,
|
||||||
|
) -> Result<Json<AuthorsPageResponse>, ApiError> {
|
||||||
|
let page = query.page.unwrap_or(1).max(1);
|
||||||
|
let limit = query.limit.unwrap_or(20).clamp(1, 100);
|
||||||
|
let offset = (page - 1) * limit;
|
||||||
|
let sort = query.sort.as_deref().unwrap_or("name");
|
||||||
|
|
||||||
|
let order_clause = match sort {
|
||||||
|
"books" => "book_count DESC, name ASC",
|
||||||
|
_ => "name ASC",
|
||||||
|
};
|
||||||
|
|
||||||
|
let q_pattern = query.q.as_deref()
|
||||||
|
.filter(|s| !s.trim().is_empty())
|
||||||
|
.map(|s| format!("%{s}%"));
|
||||||
|
|
||||||
|
// Aggregate unique authors from books.authors + books.author + series_metadata.authors
|
||||||
|
let sql = format!(
|
||||||
|
r#"
|
||||||
|
WITH all_authors AS (
|
||||||
|
SELECT DISTINCT UNNEST(
|
||||||
|
COALESCE(
|
||||||
|
NULLIF(authors, '{{}}'),
|
||||||
|
CASE WHEN author IS NOT NULL AND author != '' THEN ARRAY[author] ELSE ARRAY[]::text[] END
|
||||||
|
)
|
||||||
|
) AS name
|
||||||
|
FROM books
|
||||||
|
UNION
|
||||||
|
SELECT DISTINCT UNNEST(authors) AS name
|
||||||
|
FROM series_metadata
|
||||||
|
WHERE authors != '{{}}'
|
||||||
|
),
|
||||||
|
filtered AS (
|
||||||
|
SELECT name FROM all_authors
|
||||||
|
WHERE ($1::text IS NULL OR name ILIKE $1)
|
||||||
|
),
|
||||||
|
book_counts AS (
|
||||||
|
SELECT
|
||||||
|
f.name AS author_name,
|
||||||
|
COUNT(DISTINCT b.id) AS book_count
|
||||||
|
FROM filtered f
|
||||||
|
LEFT JOIN books b ON (
|
||||||
|
f.name = ANY(
|
||||||
|
COALESCE(
|
||||||
|
NULLIF(b.authors, '{{}}'),
|
||||||
|
CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
GROUP BY f.name
|
||||||
|
),
|
||||||
|
series_counts AS (
|
||||||
|
SELECT
|
||||||
|
f.name AS author_name,
|
||||||
|
COUNT(DISTINCT (sm.library_id, sm.name)) AS series_count
|
||||||
|
FROM filtered f
|
||||||
|
LEFT JOIN series_metadata sm ON (
|
||||||
|
f.name = ANY(sm.authors) AND sm.authors != '{{}}'
|
||||||
|
)
|
||||||
|
GROUP BY f.name
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
f.name,
|
||||||
|
COALESCE(bc.book_count, 0) AS book_count,
|
||||||
|
COALESCE(sc.series_count, 0) AS series_count
|
||||||
|
FROM filtered f
|
||||||
|
LEFT JOIN book_counts bc ON bc.author_name = f.name
|
||||||
|
LEFT JOIN series_counts sc ON sc.author_name = f.name
|
||||||
|
ORDER BY {order_clause}
|
||||||
|
LIMIT $2 OFFSET $3
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
|
||||||
|
let count_sql = r#"
|
||||||
|
WITH all_authors AS (
|
||||||
|
SELECT DISTINCT UNNEST(
|
||||||
|
COALESCE(
|
||||||
|
NULLIF(authors, '{}'),
|
||||||
|
CASE WHEN author IS NOT NULL AND author != '' THEN ARRAY[author] ELSE ARRAY[]::text[] END
|
||||||
|
)
|
||||||
|
) AS name
|
||||||
|
FROM books
|
||||||
|
UNION
|
||||||
|
SELECT DISTINCT UNNEST(authors) AS name
|
||||||
|
FROM series_metadata
|
||||||
|
WHERE authors != '{}'
|
||||||
|
)
|
||||||
|
SELECT COUNT(*) AS total
|
||||||
|
FROM all_authors
|
||||||
|
WHERE ($1::text IS NULL OR name ILIKE $1)
|
||||||
|
"#;
|
||||||
|
|
||||||
|
let (rows, count_row) = tokio::join!(
|
||||||
|
sqlx::query(&sql)
|
||||||
|
.bind(q_pattern.as_deref())
|
||||||
|
.bind(limit)
|
||||||
|
.bind(offset)
|
||||||
|
.fetch_all(&state.pool),
|
||||||
|
sqlx::query(count_sql)
|
||||||
|
.bind(q_pattern.as_deref())
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
);
|
||||||
|
|
||||||
|
let rows = rows.map_err(|e| ApiError::internal(format!("authors query failed: {e}")))?;
|
||||||
|
let total: i64 = count_row
|
||||||
|
.map_err(|e| ApiError::internal(format!("authors count failed: {e}")))?
|
||||||
|
.get("total");
|
||||||
|
|
||||||
|
let items: Vec<AuthorItem> = rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| AuthorItem {
|
||||||
|
name: r.get("name"),
|
||||||
|
book_count: r.get("book_count"),
|
||||||
|
series_count: r.get("series_count"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(Json(AuthorsPageResponse {
|
||||||
|
items,
|
||||||
|
total,
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
}))
|
||||||
|
}
|
||||||
@@ -1,11 +1,11 @@
|
|||||||
use axum::{extract::{Path, Query, State}, Json};
|
use axum::{extract::{Extension, Path, Query, State}, Json};
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::Row;
|
use sqlx::Row;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
use crate::{error::ApiError, AppState};
|
use crate::{auth::AuthUser, error::ApiError, index_jobs::IndexJobResponse, state::AppState};
|
||||||
|
|
||||||
#[derive(Deserialize, ToSchema)]
|
#[derive(Deserialize, ToSchema)]
|
||||||
pub struct ListBooksQuery {
|
pub struct ListBooksQuery {
|
||||||
@@ -13,12 +13,25 @@ pub struct ListBooksQuery {
|
|||||||
pub library_id: Option<Uuid>,
|
pub library_id: Option<Uuid>,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
pub kind: Option<String>,
|
pub kind: Option<String>,
|
||||||
|
#[schema(value_type = Option<String>, example = "cbz")]
|
||||||
|
pub format: Option<String>,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
pub series: Option<String>,
|
pub series: Option<String>,
|
||||||
|
#[schema(value_type = Option<String>, example = "unread,reading")]
|
||||||
|
pub reading_status: Option<String>,
|
||||||
|
/// Filter by exact author name (matches in authors array or scalar author field)
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
pub cursor: Option<Uuid>,
|
pub author: Option<String>,
|
||||||
|
#[schema(value_type = Option<i64>, example = 1)]
|
||||||
|
pub page: Option<i64>,
|
||||||
#[schema(value_type = Option<i64>, example = 50)]
|
#[schema(value_type = Option<i64>, example = 50)]
|
||||||
pub limit: Option<i64>,
|
pub limit: Option<i64>,
|
||||||
|
/// Sort order: "title" (default) or "latest" (most recently added first)
|
||||||
|
#[schema(value_type = Option<String>, example = "latest")]
|
||||||
|
pub sort: Option<String>,
|
||||||
|
/// Filter by metadata provider: "linked" (any provider), "unlinked" (no provider), or a specific provider name
|
||||||
|
#[schema(value_type = Option<String>, example = "linked")]
|
||||||
|
pub metadata_provider: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
@@ -28,21 +41,30 @@ pub struct BookItem {
|
|||||||
#[schema(value_type = String)]
|
#[schema(value_type = String)]
|
||||||
pub library_id: Uuid,
|
pub library_id: Uuid,
|
||||||
pub kind: String,
|
pub kind: String,
|
||||||
|
pub format: Option<String>,
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub author: Option<String>,
|
pub author: Option<String>,
|
||||||
|
pub authors: Vec<String>,
|
||||||
pub series: Option<String>,
|
pub series: Option<String>,
|
||||||
pub volume: Option<i32>,
|
pub volume: Option<i32>,
|
||||||
pub language: Option<String>,
|
pub language: Option<String>,
|
||||||
pub page_count: Option<i32>,
|
pub page_count: Option<i32>,
|
||||||
|
pub thumbnail_url: Option<String>,
|
||||||
#[schema(value_type = String)]
|
#[schema(value_type = String)]
|
||||||
pub updated_at: DateTime<Utc>,
|
pub updated_at: DateTime<Utc>,
|
||||||
|
/// Reading status: "unread", "reading", or "read"
|
||||||
|
pub reading_status: String,
|
||||||
|
pub reading_current_page: Option<i32>,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub reading_last_read_at: Option<DateTime<Utc>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
pub struct BooksPage {
|
pub struct BooksPage {
|
||||||
pub items: Vec<BookItem>,
|
pub items: Vec<BookItem>,
|
||||||
#[schema(value_type = Option<String>)]
|
pub total: i64,
|
||||||
pub next_cursor: Option<Uuid>,
|
pub page: i64,
|
||||||
|
pub limit: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
@@ -54,13 +76,26 @@ pub struct BookDetails {
|
|||||||
pub kind: String,
|
pub kind: String,
|
||||||
pub title: String,
|
pub title: String,
|
||||||
pub author: Option<String>,
|
pub author: Option<String>,
|
||||||
|
pub authors: Vec<String>,
|
||||||
pub series: Option<String>,
|
pub series: Option<String>,
|
||||||
pub volume: Option<i32>,
|
pub volume: Option<i32>,
|
||||||
pub language: Option<String>,
|
pub language: Option<String>,
|
||||||
pub page_count: Option<i32>,
|
pub page_count: Option<i32>,
|
||||||
|
pub thumbnail_url: Option<String>,
|
||||||
pub file_path: Option<String>,
|
pub file_path: Option<String>,
|
||||||
pub file_format: Option<String>,
|
pub file_format: Option<String>,
|
||||||
pub file_parse_status: Option<String>,
|
pub file_parse_status: Option<String>,
|
||||||
|
/// Reading status: "unread", "reading", or "read"
|
||||||
|
pub reading_status: String,
|
||||||
|
pub reading_current_page: Option<i32>,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub reading_last_read_at: Option<DateTime<Utc>>,
|
||||||
|
pub summary: Option<String>,
|
||||||
|
pub isbn: Option<String>,
|
||||||
|
pub publish_date: Option<String>,
|
||||||
|
/// Fields locked from external metadata sync
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub locked_fields: Option<serde_json::Value>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// List books with optional filtering and pagination
|
/// List books with optional filtering and pagination
|
||||||
@@ -70,10 +105,13 @@ pub struct BookDetails {
|
|||||||
tag = "books",
|
tag = "books",
|
||||||
params(
|
params(
|
||||||
("library_id" = Option<String>, Query, description = "Filter by library ID"),
|
("library_id" = Option<String>, Query, description = "Filter by library ID"),
|
||||||
("kind" = Option<String>, Query, description = "Filter by book kind (cbz, cbr, pdf)"),
|
("kind" = Option<String>, Query, description = "Filter by book kind (cbz, cbr, pdf, epub)"),
|
||||||
("series" = Option<String>, Query, description = "Filter by series name (use 'unclassified' for books without series)"),
|
("series" = Option<String>, Query, description = "Filter by series name (use 'unclassified' for books without series)"),
|
||||||
("cursor" = Option<String>, Query, description = "Cursor for pagination"),
|
("reading_status" = Option<String>, Query, description = "Filter by reading status, comma-separated (e.g. 'unread,reading')"),
|
||||||
("limit" = Option<i64>, Query, description = "Max items to return (max 200)"),
|
("page" = Option<i64>, Query, description = "Page number (1-indexed, default 1)"),
|
||||||
|
("limit" = Option<i64>, Query, description = "Items per page (max 200, default 50)"),
|
||||||
|
("sort" = Option<String>, Query, description = "Sort order: 'title' (default) or 'latest' (most recently added first)"),
|
||||||
|
("metadata_provider" = Option<String>, Query, description = "Filter by metadata provider: 'linked' (any provider), 'unlinked' (no provider), or a specific provider name"),
|
||||||
),
|
),
|
||||||
responses(
|
responses(
|
||||||
(status = 200, body = BooksPage),
|
(status = 200, body = BooksPage),
|
||||||
@@ -84,80 +122,162 @@ pub struct BookDetails {
|
|||||||
pub async fn list_books(
|
pub async fn list_books(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
Query(query): Query<ListBooksQuery>,
|
Query(query): Query<ListBooksQuery>,
|
||||||
|
user: Option<Extension<AuthUser>>,
|
||||||
) -> Result<Json<BooksPage>, ApiError> {
|
) -> Result<Json<BooksPage>, ApiError> {
|
||||||
|
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
|
||||||
let limit = query.limit.unwrap_or(50).clamp(1, 200);
|
let limit = query.limit.unwrap_or(50).clamp(1, 200);
|
||||||
|
let page = query.page.unwrap_or(1).max(1);
|
||||||
|
let offset = (page - 1) * limit;
|
||||||
|
|
||||||
// Build series filter condition
|
// Parse reading_status CSV → Vec<String>
|
||||||
let series_condition = match query.series.as_deref() {
|
let reading_statuses: Option<Vec<String>> = query.reading_status.as_deref().map(|s| {
|
||||||
Some("unclassified") => "AND (series IS NULL OR series = '')",
|
s.split(',').map(|v| v.trim().to_string()).filter(|v| !v.is_empty()).collect()
|
||||||
Some(_series_name) => "AND series = $5",
|
});
|
||||||
None => "",
|
|
||||||
|
// Conditions partagées COUNT et DATA — $1=library_id $2=kind $3=format, puis optionnels
|
||||||
|
let mut p: usize = 3;
|
||||||
|
let series_cond = match query.series.as_deref() {
|
||||||
|
Some("unclassified") => "AND (b.series IS NULL OR b.series = '')".to_string(),
|
||||||
|
Some(_) => { p += 1; format!("AND b.series = ${p}") }
|
||||||
|
None => String::new(),
|
||||||
};
|
};
|
||||||
|
let rs_cond = if reading_statuses.is_some() {
|
||||||
|
p += 1; format!("AND COALESCE(brp.status, 'unread') = ANY(${p})")
|
||||||
|
} else { String::new() };
|
||||||
|
let author_cond = if query.author.is_some() {
|
||||||
|
p += 1; format!("AND (${p} = ANY(COALESCE(NULLIF(b.authors, '{{}}'), CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END)) OR EXISTS (SELECT 1 FROM series_metadata sm WHERE sm.library_id = b.library_id AND sm.name = b.series AND ${p} = ANY(sm.authors)))")
|
||||||
|
} else { String::new() };
|
||||||
|
let metadata_cond = match query.metadata_provider.as_deref() {
|
||||||
|
Some("unlinked") => "AND eml.id IS NULL".to_string(),
|
||||||
|
Some("linked") => "AND eml.id IS NOT NULL".to_string(),
|
||||||
|
Some(_) => { p += 1; format!("AND eml.provider = ${p}") },
|
||||||
|
None => String::new(),
|
||||||
|
};
|
||||||
|
p += 1;
|
||||||
|
let uid_p = p;
|
||||||
|
|
||||||
let sql = format!(
|
let metadata_links_cte = r#"
|
||||||
r#"
|
metadata_links AS (
|
||||||
SELECT id, library_id, kind, title, author, series, volume, language, page_count, updated_at
|
SELECT DISTINCT ON (eml.series_name, eml.library_id)
|
||||||
FROM books
|
eml.series_name, eml.library_id, eml.provider, eml.id
|
||||||
WHERE ($1::uuid IS NULL OR library_id = $1)
|
FROM external_metadata_links eml
|
||||||
AND ($2::text IS NULL OR kind = $2)
|
WHERE eml.status = 'approved'
|
||||||
AND ($3::uuid IS NULL OR id > $3)
|
ORDER BY eml.series_name, eml.library_id, eml.created_at DESC
|
||||||
{}
|
)"#;
|
||||||
ORDER BY
|
|
||||||
-- Extract text part before numbers (case insensitive)
|
let count_sql = format!(
|
||||||
REGEXP_REPLACE(LOWER(title), '[0-9]+', '', 'g'),
|
r#"WITH {metadata_links_cte}
|
||||||
-- Extract first number group and convert to integer for numeric sort
|
SELECT COUNT(*) FROM books b
|
||||||
COALESCE(
|
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p}
|
||||||
(REGEXP_MATCH(LOWER(title), '\d+'))[1]::int,
|
LEFT JOIN metadata_links eml ON eml.series_name = b.series AND eml.library_id = b.library_id
|
||||||
0
|
WHERE ($1::uuid IS NULL OR b.library_id = $1)
|
||||||
),
|
AND ($2::text IS NULL OR b.kind = $2)
|
||||||
-- Then by full title as fallback
|
AND ($3::text IS NULL OR b.format = $3)
|
||||||
title ASC
|
{series_cond}
|
||||||
LIMIT $4
|
{rs_cond}
|
||||||
"#,
|
{author_cond}
|
||||||
series_condition
|
{metadata_cond}"#
|
||||||
);
|
);
|
||||||
|
|
||||||
let mut query_builder = sqlx::query(&sql)
|
let order_clause = if query.sort.as_deref() == Some("latest") {
|
||||||
|
"b.updated_at DESC".to_string()
|
||||||
|
} else {
|
||||||
|
"b.volume NULLS LAST, REGEXP_REPLACE(LOWER(b.title), '[0-9].*$', ''), COALESCE((REGEXP_MATCH(LOWER(b.title), '\\d+'))[1]::int, 0), b.title ASC".to_string()
|
||||||
|
};
|
||||||
|
|
||||||
|
// DATA: mêmes params filtre, puis $N+1=limit $N+2=offset
|
||||||
|
let limit_p = p + 1;
|
||||||
|
let offset_p = p + 2;
|
||||||
|
let data_sql = format!(
|
||||||
|
r#"
|
||||||
|
WITH {metadata_links_cte}
|
||||||
|
SELECT b.id, b.library_id, b.kind, b.format, b.title, b.author, b.authors, b.series, b.volume, b.language, b.page_count, b.thumbnail_path, b.updated_at,
|
||||||
|
COALESCE(brp.status, 'unread') AS reading_status,
|
||||||
|
brp.current_page AS reading_current_page,
|
||||||
|
brp.last_read_at AS reading_last_read_at
|
||||||
|
FROM books b
|
||||||
|
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ${uid_p}::uuid IS NOT NULL AND brp.user_id = ${uid_p}
|
||||||
|
LEFT JOIN metadata_links eml ON eml.series_name = b.series AND eml.library_id = b.library_id
|
||||||
|
WHERE ($1::uuid IS NULL OR b.library_id = $1)
|
||||||
|
AND ($2::text IS NULL OR b.kind = $2)
|
||||||
|
AND ($3::text IS NULL OR b.format = $3)
|
||||||
|
{series_cond}
|
||||||
|
{rs_cond}
|
||||||
|
{author_cond}
|
||||||
|
{metadata_cond}
|
||||||
|
ORDER BY {order_clause}
|
||||||
|
LIMIT ${limit_p} OFFSET ${offset_p}
|
||||||
|
"#
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut count_builder = sqlx::query(&count_sql)
|
||||||
.bind(query.library_id)
|
.bind(query.library_id)
|
||||||
.bind(query.kind.as_deref())
|
.bind(query.kind.as_deref())
|
||||||
.bind(query.cursor)
|
.bind(query.format.as_deref());
|
||||||
.bind(limit + 1);
|
let mut data_builder = sqlx::query(&data_sql)
|
||||||
|
.bind(query.library_id)
|
||||||
|
.bind(query.kind.as_deref())
|
||||||
|
.bind(query.format.as_deref());
|
||||||
|
|
||||||
// Bind series parameter if it's not unclassified
|
if let Some(s) = query.series.as_deref() {
|
||||||
if let Some(series) = query.series.as_deref() {
|
if s != "unclassified" {
|
||||||
if series != "unclassified" {
|
count_builder = count_builder.bind(s);
|
||||||
query_builder = query_builder.bind(series);
|
data_builder = data_builder.bind(s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if let Some(ref statuses) = reading_statuses {
|
||||||
|
count_builder = count_builder.bind(statuses.clone());
|
||||||
|
data_builder = data_builder.bind(statuses.clone());
|
||||||
|
}
|
||||||
|
if let Some(ref author) = query.author {
|
||||||
|
count_builder = count_builder.bind(author.clone());
|
||||||
|
data_builder = data_builder.bind(author.clone());
|
||||||
|
}
|
||||||
|
if let Some(ref mp) = query.metadata_provider {
|
||||||
|
if mp != "linked" && mp != "unlinked" {
|
||||||
|
count_builder = count_builder.bind(mp.clone());
|
||||||
|
data_builder = data_builder.bind(mp.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
count_builder = count_builder.bind(user_id);
|
||||||
|
data_builder = data_builder.bind(user_id).bind(limit).bind(offset);
|
||||||
|
|
||||||
let rows = query_builder.fetch_all(&state.pool).await?;
|
let (count_row, rows) = tokio::try_join!(
|
||||||
|
count_builder.fetch_one(&state.pool),
|
||||||
|
data_builder.fetch_all(&state.pool),
|
||||||
|
)?;
|
||||||
|
let total: i64 = count_row.get(0);
|
||||||
|
|
||||||
let mut items: Vec<BookItem> = rows
|
let mut items: Vec<BookItem> = rows
|
||||||
.iter()
|
.iter()
|
||||||
.take(limit as usize)
|
.map(|row| {
|
||||||
.map(|row| BookItem {
|
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||||
id: row.get("id"),
|
BookItem {
|
||||||
library_id: row.get("library_id"),
|
id: row.get("id"),
|
||||||
kind: row.get("kind"),
|
library_id: row.get("library_id"),
|
||||||
title: row.get("title"),
|
kind: row.get("kind"),
|
||||||
author: row.get("author"),
|
format: row.get("format"),
|
||||||
series: row.get("series"),
|
title: row.get("title"),
|
||||||
volume: row.get("volume"),
|
author: row.get("author"),
|
||||||
language: row.get("language"),
|
authors: row.get::<Vec<String>, _>("authors"),
|
||||||
page_count: row.get("page_count"),
|
series: row.get("series"),
|
||||||
updated_at: row.get("updated_at"),
|
volume: row.get("volume"),
|
||||||
|
language: row.get("language"),
|
||||||
|
page_count: row.get("page_count"),
|
||||||
|
thumbnail_url: thumbnail_path.map(|_p| format!("/books/{}/thumbnail", row.get::<Uuid, _>("id"))),
|
||||||
|
updated_at: row.get("updated_at"),
|
||||||
|
reading_status: row.get("reading_status"),
|
||||||
|
reading_current_page: row.get("reading_current_page"),
|
||||||
|
reading_last_read_at: row.get("reading_last_read_at"),
|
||||||
|
}
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let next_cursor = if rows.len() > limit as usize {
|
|
||||||
items.last().map(|b| b.id)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(Json(BooksPage {
|
Ok(Json(BooksPage {
|
||||||
items: std::mem::take(&mut items),
|
items: std::mem::take(&mut items),
|
||||||
next_cursor,
|
total,
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -179,11 +299,16 @@ pub async fn list_books(
|
|||||||
pub async fn get_book(
|
pub async fn get_book(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
Path(id): Path<Uuid>,
|
Path(id): Path<Uuid>,
|
||||||
|
user: Option<Extension<AuthUser>>,
|
||||||
) -> Result<Json<BookDetails>, ApiError> {
|
) -> Result<Json<BookDetails>, ApiError> {
|
||||||
|
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
|
||||||
let row = sqlx::query(
|
let row = sqlx::query(
|
||||||
r#"
|
r#"
|
||||||
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.series, b.volume, b.language, b.page_count,
|
SELECT b.id, b.library_id, b.kind, b.title, b.author, b.authors, b.series, b.volume, b.language, b.page_count, b.thumbnail_path, b.locked_fields, b.summary, b.isbn, b.publish_date,
|
||||||
bf.abs_path, bf.format, bf.parse_status
|
bf.abs_path, bf.format, bf.parse_status,
|
||||||
|
COALESCE(brp.status, 'unread') AS reading_status,
|
||||||
|
brp.current_page AS reading_current_page,
|
||||||
|
brp.last_read_at AS reading_last_read_at
|
||||||
FROM books b
|
FROM books b
|
||||||
LEFT JOIN LATERAL (
|
LEFT JOIN LATERAL (
|
||||||
SELECT abs_path, format, parse_status
|
SELECT abs_path, format, parse_status
|
||||||
@@ -192,143 +317,338 @@ pub async fn get_book(
|
|||||||
ORDER BY updated_at DESC
|
ORDER BY updated_at DESC
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
) bf ON TRUE
|
) bf ON TRUE
|
||||||
|
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND $2::uuid IS NOT NULL AND brp.user_id = $2
|
||||||
WHERE b.id = $1
|
WHERE b.id = $1
|
||||||
"#,
|
"#,
|
||||||
)
|
)
|
||||||
.bind(id)
|
.bind(id)
|
||||||
|
.bind(user_id)
|
||||||
.fetch_optional(&state.pool)
|
.fetch_optional(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||||
|
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||||
Ok(Json(BookDetails {
|
Ok(Json(BookDetails {
|
||||||
id: row.get("id"),
|
id: row.get("id"),
|
||||||
library_id: row.get("library_id"),
|
library_id: row.get("library_id"),
|
||||||
kind: row.get("kind"),
|
kind: row.get("kind"),
|
||||||
title: row.get("title"),
|
title: row.get("title"),
|
||||||
author: row.get("author"),
|
author: row.get("author"),
|
||||||
|
authors: row.get::<Vec<String>, _>("authors"),
|
||||||
series: row.get("series"),
|
series: row.get("series"),
|
||||||
volume: row.get("volume"),
|
volume: row.get("volume"),
|
||||||
language: row.get("language"),
|
language: row.get("language"),
|
||||||
page_count: row.get("page_count"),
|
page_count: row.get("page_count"),
|
||||||
|
thumbnail_url: thumbnail_path.map(|_| format!("/books/{}/thumbnail", id)),
|
||||||
file_path: row.get("abs_path"),
|
file_path: row.get("abs_path"),
|
||||||
file_format: row.get("format"),
|
file_format: row.get("format"),
|
||||||
file_parse_status: row.get("parse_status"),
|
file_parse_status: row.get("parse_status"),
|
||||||
|
reading_status: row.get("reading_status"),
|
||||||
|
reading_current_page: row.get("reading_current_page"),
|
||||||
|
reading_last_read_at: row.get("reading_last_read_at"),
|
||||||
|
summary: row.get("summary"),
|
||||||
|
isbn: row.get("isbn"),
|
||||||
|
publish_date: row.get("publish_date"),
|
||||||
|
locked_fields: Some(row.get::<serde_json::Value, _>("locked_fields")),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
// ─── Helpers ──────────────────────────────────────────────────────────────────
|
||||||
pub struct SeriesItem {
|
|
||||||
pub name: String,
|
pub(crate) fn remap_libraries_path(path: &str) -> String {
|
||||||
pub book_count: i64,
|
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
||||||
#[schema(value_type = String)]
|
if path.starts_with("/libraries/") {
|
||||||
pub first_book_id: Uuid,
|
return path.replacen("/libraries", &root, 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
path.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
fn unmap_libraries_path(path: &str) -> String {
|
||||||
pub struct SeriesPage {
|
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
||||||
pub items: Vec<SeriesItem>,
|
if path.starts_with(&root) {
|
||||||
#[schema(value_type = Option<String>)]
|
return path.replacen(&root, "/libraries", 1);
|
||||||
pub next_cursor: Option<String>,
|
}
|
||||||
|
}
|
||||||
|
path.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, ToSchema)]
|
// ─── Convert CBR → CBZ ───────────────────────────────────────────────────────
|
||||||
pub struct ListSeriesQuery {
|
|
||||||
#[schema(value_type = Option<String>)]
|
|
||||||
pub cursor: Option<String>,
|
|
||||||
#[schema(value_type = Option<i64>, example = 50)]
|
|
||||||
pub limit: Option<i64>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// List all series in a library with pagination
|
/// Enqueue a CBR → CBZ conversion job for a single book
|
||||||
#[utoipa::path(
|
#[utoipa::path(
|
||||||
get,
|
post,
|
||||||
path = "/libraries/{library_id}/series",
|
path = "/books/{id}/convert",
|
||||||
tag = "books",
|
tag = "books",
|
||||||
params(
|
params(
|
||||||
("library_id" = String, Path, description = "Library UUID"),
|
("id" = String, Path, description = "Book UUID"),
|
||||||
("cursor" = Option<String>, Query, description = "Cursor for pagination (series name)"),
|
|
||||||
("limit" = Option<i64>, Query, description = "Max items to return (max 200)"),
|
|
||||||
),
|
),
|
||||||
responses(
|
responses(
|
||||||
(status = 200, body = SeriesPage),
|
(status = 200, body = IndexJobResponse),
|
||||||
|
(status = 404, description = "Book not found"),
|
||||||
|
(status = 409, description = "Book is not CBR, or target CBZ already exists"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn convert_book(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(book_id): Path<Uuid>,
|
||||||
|
) -> Result<Json<IndexJobResponse>, ApiError> {
|
||||||
|
// Fetch book file info
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT b.id, bf.abs_path, bf.format
|
||||||
|
FROM books b
|
||||||
|
LEFT JOIN LATERAL (
|
||||||
|
SELECT abs_path, format
|
||||||
|
FROM book_files
|
||||||
|
WHERE book_id = b.id
|
||||||
|
ORDER BY updated_at DESC
|
||||||
|
LIMIT 1
|
||||||
|
) bf ON TRUE
|
||||||
|
WHERE b.id = $1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(book_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||||
|
let abs_path: Option<String> = row.get("abs_path");
|
||||||
|
let format: Option<String> = row.get("format");
|
||||||
|
|
||||||
|
if format.as_deref() != Some("cbr") {
|
||||||
|
return Err(ApiError {
|
||||||
|
status: axum::http::StatusCode::CONFLICT,
|
||||||
|
message: "book is not in CBR format".to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let abs_path = abs_path.ok_or_else(|| ApiError::not_found("book file path not found"))?;
|
||||||
|
|
||||||
|
// Check for existing CBZ with same stem
|
||||||
|
let physical_path = remap_libraries_path(&abs_path);
|
||||||
|
let cbr_path = std::path::Path::new(&physical_path);
|
||||||
|
if let (Some(parent), Some(stem)) = (cbr_path.parent(), cbr_path.file_stem()) {
|
||||||
|
let cbz_path = parent.join(format!("{}.cbz", stem.to_string_lossy()));
|
||||||
|
if cbz_path.exists() {
|
||||||
|
return Err(ApiError {
|
||||||
|
status: axum::http::StatusCode::CONFLICT,
|
||||||
|
message: format!(
|
||||||
|
"CBZ file already exists: {}",
|
||||||
|
unmap_libraries_path(&cbz_path.to_string_lossy())
|
||||||
|
),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the conversion job
|
||||||
|
let job_id = Uuid::new_v4();
|
||||||
|
sqlx::query(
|
||||||
|
"INSERT INTO index_jobs (id, book_id, type, status) VALUES ($1, $2, 'cbr_to_cbz', 'pending')",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(book_id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let job_row = sqlx::query(
|
||||||
|
"SELECT id, library_id, book_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(Json(crate::index_jobs::map_row(job_row)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Metadata editing ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct UpdateBookRequest {
|
||||||
|
pub title: String,
|
||||||
|
pub author: Option<String>,
|
||||||
|
#[serde(default)]
|
||||||
|
pub authors: Vec<String>,
|
||||||
|
pub series: Option<String>,
|
||||||
|
pub volume: Option<i32>,
|
||||||
|
pub language: Option<String>,
|
||||||
|
pub summary: Option<String>,
|
||||||
|
pub isbn: Option<String>,
|
||||||
|
pub publish_date: Option<String>,
|
||||||
|
/// Fields locked from external metadata sync
|
||||||
|
#[serde(default)]
|
||||||
|
pub locked_fields: Option<serde_json::Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update metadata for a specific book
|
||||||
|
#[utoipa::path(
|
||||||
|
patch,
|
||||||
|
path = "/books/{id}",
|
||||||
|
tag = "books",
|
||||||
|
params(("id" = String, Path, description = "Book UUID")),
|
||||||
|
request_body = UpdateBookRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = BookDetails),
|
||||||
|
(status = 400, description = "Invalid request"),
|
||||||
|
(status = 404, description = "Book not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn update_book(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
Json(body): Json<UpdateBookRequest>,
|
||||||
|
) -> Result<Json<BookDetails>, ApiError> {
|
||||||
|
let title = body.title.trim().to_string();
|
||||||
|
if title.is_empty() {
|
||||||
|
return Err(ApiError::bad_request("title cannot be empty"));
|
||||||
|
}
|
||||||
|
let author = body.author.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
|
||||||
|
let authors: Vec<String> = body.authors.iter()
|
||||||
|
.map(|a| a.trim().to_string())
|
||||||
|
.filter(|a| !a.is_empty())
|
||||||
|
.collect();
|
||||||
|
let series = body.series.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
|
||||||
|
let language = body.language.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
|
||||||
|
|
||||||
|
let summary = body.summary.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
|
||||||
|
let isbn = body.isbn.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
|
||||||
|
let publish_date = body.publish_date.as_deref().map(str::trim).filter(|s| !s.is_empty()).map(str::to_string);
|
||||||
|
let locked_fields = body.locked_fields.clone().unwrap_or(serde_json::json!({}));
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
UPDATE books
|
||||||
|
SET title = $2, author = $3, authors = $4, series = $5, volume = $6, language = $7,
|
||||||
|
summary = $8, isbn = $9, publish_date = $10, locked_fields = $11, updated_at = NOW()
|
||||||
|
WHERE id = $1
|
||||||
|
RETURNING id, library_id, kind, title, author, authors, series, volume, language, page_count, thumbnail_path,
|
||||||
|
summary, isbn, publish_date,
|
||||||
|
'unread' AS reading_status,
|
||||||
|
NULL::integer AS reading_current_page,
|
||||||
|
NULL::timestamptz AS reading_last_read_at
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.bind(&title)
|
||||||
|
.bind(&author)
|
||||||
|
.bind(&authors)
|
||||||
|
.bind(&series)
|
||||||
|
.bind(body.volume)
|
||||||
|
.bind(&language)
|
||||||
|
.bind(&summary)
|
||||||
|
.bind(&isbn)
|
||||||
|
.bind(&publish_date)
|
||||||
|
.bind(&locked_fields)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||||
|
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||||
|
|
||||||
|
Ok(Json(BookDetails {
|
||||||
|
id: row.get("id"),
|
||||||
|
library_id: row.get("library_id"),
|
||||||
|
kind: row.get("kind"),
|
||||||
|
title: row.get("title"),
|
||||||
|
author: row.get("author"),
|
||||||
|
authors: row.get::<Vec<String>, _>("authors"),
|
||||||
|
series: row.get("series"),
|
||||||
|
volume: row.get("volume"),
|
||||||
|
language: row.get("language"),
|
||||||
|
page_count: row.get("page_count"),
|
||||||
|
thumbnail_url: thumbnail_path.map(|_| format!("/books/{}/thumbnail", id)),
|
||||||
|
file_path: None,
|
||||||
|
file_format: None,
|
||||||
|
file_parse_status: None,
|
||||||
|
reading_status: row.get("reading_status"),
|
||||||
|
reading_current_page: row.get("reading_current_page"),
|
||||||
|
reading_last_read_at: row.get("reading_last_read_at"),
|
||||||
|
summary: row.get("summary"),
|
||||||
|
isbn: row.get("isbn"),
|
||||||
|
publish_date: row.get("publish_date"),
|
||||||
|
locked_fields: Some(locked_fields),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Thumbnail ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
use axum::{
|
||||||
|
body::Body,
|
||||||
|
http::{header, HeaderMap, HeaderValue, StatusCode},
|
||||||
|
response::IntoResponse,
|
||||||
|
};
|
||||||
|
|
||||||
|
/// Detect content type from thumbnail file extension.
|
||||||
|
fn detect_thumbnail_content_type(path: &str) -> &'static str {
|
||||||
|
if path.ends_with(".jpg") || path.ends_with(".jpeg") {
|
||||||
|
"image/jpeg"
|
||||||
|
} else if path.ends_with(".png") {
|
||||||
|
"image/png"
|
||||||
|
} else {
|
||||||
|
"image/webp"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get book thumbnail image
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/books/{id}/thumbnail",
|
||||||
|
tag = "books",
|
||||||
|
params(
|
||||||
|
("id" = String, Path, description = "Book UUID"),
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "WebP thumbnail image", content_type = "image/webp"),
|
||||||
|
(status = 404, description = "Book not found or thumbnail not available"),
|
||||||
(status = 401, description = "Unauthorized"),
|
(status = 401, description = "Unauthorized"),
|
||||||
),
|
),
|
||||||
security(("Bearer" = []))
|
security(("Bearer" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn list_series(
|
pub async fn get_thumbnail(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
Path(library_id): Path<Uuid>,
|
Path(book_id): Path<Uuid>,
|
||||||
Query(query): Query<ListSeriesQuery>,
|
) -> Result<impl IntoResponse, ApiError> {
|
||||||
) -> Result<Json<SeriesPage>, ApiError> {
|
let row = sqlx::query("SELECT thumbnail_path FROM books WHERE id = $1")
|
||||||
let limit = query.limit.unwrap_or(50).clamp(1, 200);
|
.bind(book_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(e.to_string()))?;
|
||||||
|
|
||||||
let rows = sqlx::query(
|
let row = row.ok_or_else(|| ApiError::not_found("book not found"))?;
|
||||||
r#"
|
let thumbnail_path: Option<String> = row.get("thumbnail_path");
|
||||||
WITH sorted_books AS (
|
|
||||||
SELECT
|
|
||||||
COALESCE(NULLIF(series, ''), 'unclassified') as name,
|
|
||||||
id,
|
|
||||||
-- Natural sort order for books within series
|
|
||||||
ROW_NUMBER() OVER (
|
|
||||||
PARTITION BY COALESCE(NULLIF(series, ''), 'unclassified')
|
|
||||||
ORDER BY
|
|
||||||
REGEXP_REPLACE(LOWER(title), '[0-9]+', '', 'g'),
|
|
||||||
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
|
|
||||||
title ASC
|
|
||||||
) as rn
|
|
||||||
FROM books
|
|
||||||
WHERE library_id = $1
|
|
||||||
),
|
|
||||||
series_counts AS (
|
|
||||||
SELECT
|
|
||||||
name,
|
|
||||||
COUNT(*) as book_count
|
|
||||||
FROM sorted_books
|
|
||||||
GROUP BY name
|
|
||||||
)
|
|
||||||
SELECT
|
|
||||||
sc.name,
|
|
||||||
sc.book_count,
|
|
||||||
sb.id as first_book_id
|
|
||||||
FROM series_counts sc
|
|
||||||
JOIN sorted_books sb ON sb.name = sc.name AND sb.rn = 1
|
|
||||||
WHERE ($2::text IS NULL OR sc.name > $2)
|
|
||||||
ORDER BY
|
|
||||||
-- Natural sort: extract text part before numbers
|
|
||||||
REGEXP_REPLACE(LOWER(sc.name), '[0-9]+', '', 'g'),
|
|
||||||
-- Extract first number group and convert to integer
|
|
||||||
COALESCE(
|
|
||||||
(REGEXP_MATCH(LOWER(sc.name), '\d+'))[1]::int,
|
|
||||||
0
|
|
||||||
),
|
|
||||||
sc.name ASC
|
|
||||||
LIMIT $3
|
|
||||||
"#,
|
|
||||||
)
|
|
||||||
.bind(library_id)
|
|
||||||
.bind(query.cursor.as_deref())
|
|
||||||
.bind(limit + 1)
|
|
||||||
.fetch_all(&state.pool)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut items: Vec<SeriesItem> = rows
|
let (data, content_type) = if let Some(ref path) = thumbnail_path {
|
||||||
.iter()
|
match std::fs::read(path) {
|
||||||
.take(limit as usize)
|
Ok(bytes) => {
|
||||||
.map(|row| SeriesItem {
|
let ct = detect_thumbnail_content_type(path);
|
||||||
name: row.get("name"),
|
(bytes, ct)
|
||||||
book_count: row.get("book_count"),
|
}
|
||||||
first_book_id: row.get("first_book_id"),
|
Err(_) => {
|
||||||
})
|
// File missing on disk (e.g. different mount in dev) — fall back to live render
|
||||||
.collect();
|
crate::pages::render_book_page_1(&state, book_id, 300, 80).await?
|
||||||
|
}
|
||||||
let next_cursor = if rows.len() > limit as usize {
|
}
|
||||||
items.last().map(|s| s.name.clone())
|
|
||||||
} else {
|
} else {
|
||||||
None
|
// No stored thumbnail yet — render page 1 on the fly
|
||||||
|
crate::pages::render_book_page_1(&state, book_id, 300, 80).await?
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(Json(SeriesPage {
|
let etag_value = format!("\"{}_{:x}\"", book_id, data.len());
|
||||||
items: std::mem::take(&mut items),
|
|
||||||
next_cursor,
|
let mut headers = HeaderMap::new();
|
||||||
}))
|
headers.insert(header::CONTENT_TYPE, HeaderValue::from_static(content_type));
|
||||||
|
headers.insert(
|
||||||
|
header::CACHE_CONTROL,
|
||||||
|
HeaderValue::from_static("public, max-age=31536000, immutable"),
|
||||||
|
);
|
||||||
|
if let Ok(v) = HeaderValue::from_str(&etag_value) {
|
||||||
|
headers.insert(header::ETAG, v);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((StatusCode::OK, headers, Body::from(data)))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,8 @@
|
|||||||
use axum::{http::StatusCode, response::{IntoResponse, Response}, Json};
|
use axum::{
|
||||||
|
http::StatusCode,
|
||||||
|
response::{IntoResponse, Response},
|
||||||
|
Json,
|
||||||
|
};
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
@@ -34,6 +38,13 @@ impl ApiError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn unprocessable_entity(message: impl Into<String>) -> Self {
|
||||||
|
Self {
|
||||||
|
status: StatusCode::UNPROCESSABLE_ENTITY,
|
||||||
|
message: message.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn not_found(message: impl Into<String>) -> Self {
|
pub fn not_found(message: impl Into<String>) -> Self {
|
||||||
Self {
|
Self {
|
||||||
status: StatusCode::NOT_FOUND,
|
status: StatusCode::NOT_FOUND,
|
||||||
@@ -51,7 +62,13 @@ impl ApiError {
|
|||||||
|
|
||||||
impl IntoResponse for ApiError {
|
impl IntoResponse for ApiError {
|
||||||
fn into_response(self) -> Response {
|
fn into_response(self) -> Response {
|
||||||
(self.status, Json(ErrorBody { error: &self.message })).into_response()
|
(
|
||||||
|
self.status,
|
||||||
|
Json(ErrorBody {
|
||||||
|
error: &self.message,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.into_response()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -60,3 +77,15 @@ impl From<sqlx::Error> for ApiError {
|
|||||||
Self::internal(format!("database error: {err}"))
|
Self::internal(format!("database error: {err}"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<std::io::Error> for ApiError {
|
||||||
|
fn from(err: std::io::Error) -> Self {
|
||||||
|
Self::internal(format!("IO error: {err}"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<reqwest::Error> for ApiError {
|
||||||
|
fn from(err: reqwest::Error) -> Self {
|
||||||
|
Self::internal(format!("HTTP client error: {err}"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
26
apps/api/src/handlers.rs
Normal file
26
apps/api/src/handlers.rs
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
use axum::{extract::State, Json};
|
||||||
|
use std::sync::atomic::Ordering;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
pub async fn health() -> &'static str {
|
||||||
|
"ok"
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn docs_redirect() -> impl axum::response::IntoResponse {
|
||||||
|
axum::response::Redirect::to("/swagger-ui/")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn ready(State(state): State<AppState>) -> Result<Json<serde_json::Value>, ApiError> {
|
||||||
|
sqlx::query("SELECT 1").execute(&state.pool).await?;
|
||||||
|
Ok(Json(serde_json::json!({"status": "ready"})))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn metrics(State(state): State<AppState>) -> String {
|
||||||
|
format!(
|
||||||
|
"requests_total {}\npage_cache_hits {}\npage_cache_misses {}\n",
|
||||||
|
state.metrics.requests_total.load(Ordering::Relaxed),
|
||||||
|
state.metrics.page_cache_hits.load(Ordering::Relaxed),
|
||||||
|
state.metrics.page_cache_misses.load(Ordering::Relaxed),
|
||||||
|
)
|
||||||
|
}
|
||||||
@@ -8,7 +8,7 @@ use tokio_stream::Stream;
|
|||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
use crate::{error::ApiError, AppState};
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
#[derive(Deserialize, ToSchema)]
|
#[derive(Deserialize, ToSchema)]
|
||||||
pub struct RebuildRequest {
|
pub struct RebuildRequest {
|
||||||
@@ -16,6 +16,10 @@ pub struct RebuildRequest {
|
|||||||
pub library_id: Option<Uuid>,
|
pub library_id: Option<Uuid>,
|
||||||
#[schema(value_type = Option<bool>, example = false)]
|
#[schema(value_type = Option<bool>, example = false)]
|
||||||
pub full: Option<bool>,
|
pub full: Option<bool>,
|
||||||
|
/// Deep rescan: clears directory mtimes to force re-walking all directories,
|
||||||
|
/// discovering newly supported formats without deleting existing data.
|
||||||
|
#[schema(value_type = Option<bool>, example = false)]
|
||||||
|
pub rescan: Option<bool>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
@@ -24,6 +28,8 @@ pub struct IndexJobResponse {
|
|||||||
pub id: Uuid,
|
pub id: Uuid,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
pub library_id: Option<Uuid>,
|
pub library_id: Option<Uuid>,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub book_id: Option<Uuid>,
|
||||||
pub r#type: String,
|
pub r#type: String,
|
||||||
pub status: String,
|
pub status: String,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
@@ -34,6 +40,9 @@ pub struct IndexJobResponse {
|
|||||||
pub error_opt: Option<String>,
|
pub error_opt: Option<String>,
|
||||||
#[schema(value_type = String)]
|
#[schema(value_type = String)]
|
||||||
pub created_at: DateTime<Utc>,
|
pub created_at: DateTime<Utc>,
|
||||||
|
pub progress_percent: Option<i32>,
|
||||||
|
pub processed_files: Option<i32>,
|
||||||
|
pub total_files: Option<i32>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
@@ -50,12 +59,18 @@ pub struct IndexJobDetailResponse {
|
|||||||
pub id: Uuid,
|
pub id: Uuid,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
pub library_id: Option<Uuid>,
|
pub library_id: Option<Uuid>,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub book_id: Option<Uuid>,
|
||||||
pub r#type: String,
|
pub r#type: String,
|
||||||
pub status: String,
|
pub status: String,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
pub started_at: Option<DateTime<Utc>>,
|
pub started_at: Option<DateTime<Utc>>,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
pub finished_at: Option<DateTime<Utc>>,
|
pub finished_at: Option<DateTime<Utc>>,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub phase2_started_at: Option<DateTime<Utc>>,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub generating_thumbnails_started_at: Option<DateTime<Utc>>,
|
||||||
pub stats_json: Option<serde_json::Value>,
|
pub stats_json: Option<serde_json::Value>,
|
||||||
pub error_opt: Option<String>,
|
pub error_opt: Option<String>,
|
||||||
#[schema(value_type = String)]
|
#[schema(value_type = String)]
|
||||||
@@ -106,7 +121,8 @@ pub async fn enqueue_rebuild(
|
|||||||
) -> Result<Json<IndexJobResponse>, ApiError> {
|
) -> Result<Json<IndexJobResponse>, ApiError> {
|
||||||
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
|
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
|
||||||
let is_full = payload.as_ref().and_then(|p| p.0.full).unwrap_or(false);
|
let is_full = payload.as_ref().and_then(|p| p.0.full).unwrap_or(false);
|
||||||
let job_type = if is_full { "full_rebuild" } else { "rebuild" };
|
let is_rescan = payload.as_ref().and_then(|p| p.0.rescan).unwrap_or(false);
|
||||||
|
let job_type = if is_full { "full_rebuild" } else if is_rescan { "rescan" } else { "rebuild" };
|
||||||
let id = Uuid::new_v4();
|
let id = Uuid::new_v4();
|
||||||
|
|
||||||
sqlx::query(
|
sqlx::query(
|
||||||
@@ -119,7 +135,7 @@ pub async fn enqueue_rebuild(
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let row = sqlx::query(
|
let row = sqlx::query(
|
||||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs WHERE id = $1",
|
"SELECT id, library_id, book_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs WHERE id = $1",
|
||||||
)
|
)
|
||||||
.bind(id)
|
.bind(id)
|
||||||
.fetch_one(&state.pool)
|
.fetch_one(&state.pool)
|
||||||
@@ -142,7 +158,7 @@ pub async fn enqueue_rebuild(
|
|||||||
)]
|
)]
|
||||||
pub async fn list_index_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> {
|
pub async fn list_index_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> {
|
||||||
let rows = sqlx::query(
|
let rows = sqlx::query(
|
||||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs ORDER BY created_at DESC LIMIT 100",
|
"SELECT id, library_id, book_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs ORDER BY created_at DESC LIMIT 100",
|
||||||
)
|
)
|
||||||
.fetch_all(&state.pool)
|
.fetch_all(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
@@ -171,7 +187,7 @@ pub async fn cancel_job(
|
|||||||
id: axum::extract::Path<Uuid>,
|
id: axum::extract::Path<Uuid>,
|
||||||
) -> Result<Json<IndexJobResponse>, ApiError> {
|
) -> Result<Json<IndexJobResponse>, ApiError> {
|
||||||
let rows_affected = sqlx::query(
|
let rows_affected = sqlx::query(
|
||||||
"UPDATE index_jobs SET status = 'cancelled' WHERE id = $1 AND status IN ('pending', 'running')",
|
"UPDATE index_jobs SET status = 'cancelled' WHERE id = $1 AND status IN ('pending', 'running', 'extracting_pages', 'generating_thumbnails')",
|
||||||
)
|
)
|
||||||
.bind(id.0)
|
.bind(id.0)
|
||||||
.execute(&state.pool)
|
.execute(&state.pool)
|
||||||
@@ -182,7 +198,7 @@ pub async fn cancel_job(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let row = sqlx::query(
|
let row = sqlx::query(
|
||||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs WHERE id = $1",
|
"SELECT id, library_id, book_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files FROM index_jobs WHERE id = $1",
|
||||||
)
|
)
|
||||||
.bind(id.0)
|
.bind(id.0)
|
||||||
.fetch_one(&state.pool)
|
.fetch_one(&state.pool)
|
||||||
@@ -235,16 +251,16 @@ pub async fn list_folders(
|
|||||||
base_path.to_path_buf()
|
base_path.to_path_buf()
|
||||||
};
|
};
|
||||||
|
|
||||||
// Ensure the path is within the libraries root
|
// Ensure the path is within the libraries root (avoid canonicalize — burns fd on Docker mounts)
|
||||||
let canonical_target = target_path.canonicalize().unwrap_or(target_path.clone());
|
let canonical_target = target_path.clone();
|
||||||
let canonical_base = base_path.canonicalize().unwrap_or(base_path.to_path_buf());
|
let canonical_base = base_path.to_path_buf();
|
||||||
|
|
||||||
if !canonical_target.starts_with(&canonical_base) {
|
if !canonical_target.starts_with(&canonical_base) {
|
||||||
return Err(ApiError::bad_request("Path is outside libraries root"));
|
return Err(ApiError::bad_request("Path is outside libraries root"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut folders = Vec::new();
|
let mut folders = Vec::new();
|
||||||
let depth = if params.get("path").is_some() {
|
let depth = if params.contains_key("path") {
|
||||||
canonical_target.strip_prefix(&canonical_base)
|
canonical_target.strip_prefix(&canonical_base)
|
||||||
.map(|p| p.components().count())
|
.map(|p| p.components().count())
|
||||||
.unwrap_or(0)
|
.unwrap_or(0)
|
||||||
@@ -252,19 +268,31 @@ pub async fn list_folders(
|
|||||||
0
|
0
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Ok(entries) = std::fs::read_dir(&canonical_target) {
|
let entries = std::fs::read_dir(&canonical_target)
|
||||||
for entry in entries.flatten() {
|
.map_err(|e| ApiError::internal(format!("cannot read directory {}: {}", canonical_target.display(), e)))?;
|
||||||
if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) {
|
|
||||||
|
for entry in entries {
|
||||||
|
let entry = match entry {
|
||||||
|
Ok(e) => e,
|
||||||
|
Err(e) => {
|
||||||
|
tracing::warn!("[FOLDERS] entry error in {}: {}", canonical_target.display(), e);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let is_dir = match entry.file_type() {
|
||||||
|
Ok(ft) => ft.is_dir(),
|
||||||
|
Err(e) => {
|
||||||
|
tracing::warn!("[FOLDERS] cannot stat {}: {}", entry.path().display(), e);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
if is_dir {
|
||||||
let name = entry.file_name().to_string_lossy().to_string();
|
let name = entry.file_name().to_string_lossy().to_string();
|
||||||
|
|
||||||
// Check if this folder has children
|
// Check if this folder has children (best-effort, default to true on error)
|
||||||
let has_children = if let Ok(sub_entries) = std::fs::read_dir(entry.path()) {
|
let has_children = std::fs::read_dir(entry.path())
|
||||||
sub_entries.flatten().any(|e| {
|
.map(|sub| sub.flatten().any(|e| e.file_type().map(|ft| ft.is_dir()).unwrap_or(false)))
|
||||||
e.file_type().map(|ft| ft.is_dir()).unwrap_or(false)
|
.unwrap_or(true);
|
||||||
})
|
|
||||||
} else {
|
|
||||||
false
|
|
||||||
};
|
|
||||||
|
|
||||||
// Calculate the full path relative to libraries root
|
// Calculate the full path relative to libraries root
|
||||||
let full_path = if let Ok(relative) = entry.path().strip_prefix(&canonical_base) {
|
let full_path = if let Ok(relative) = entry.path().strip_prefix(&canonical_base) {
|
||||||
@@ -279,7 +307,6 @@ pub async fn list_folders(
|
|||||||
depth,
|
depth,
|
||||||
has_children,
|
has_children,
|
||||||
});
|
});
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -291,6 +318,7 @@ pub fn map_row(row: sqlx::postgres::PgRow) -> IndexJobResponse {
|
|||||||
IndexJobResponse {
|
IndexJobResponse {
|
||||||
id: row.get("id"),
|
id: row.get("id"),
|
||||||
library_id: row.get("library_id"),
|
library_id: row.get("library_id"),
|
||||||
|
book_id: row.try_get("book_id").ok().flatten(),
|
||||||
r#type: row.get("type"),
|
r#type: row.get("type"),
|
||||||
status: row.get("status"),
|
status: row.get("status"),
|
||||||
started_at: row.get("started_at"),
|
started_at: row.get("started_at"),
|
||||||
@@ -298,6 +326,9 @@ pub fn map_row(row: sqlx::postgres::PgRow) -> IndexJobResponse {
|
|||||||
stats_json: row.get("stats_json"),
|
stats_json: row.get("stats_json"),
|
||||||
error_opt: row.get("error_opt"),
|
error_opt: row.get("error_opt"),
|
||||||
created_at: row.get("created_at"),
|
created_at: row.get("created_at"),
|
||||||
|
progress_percent: row.try_get("progress_percent").ok(),
|
||||||
|
processed_files: row.try_get("processed_files").ok(),
|
||||||
|
total_files: row.try_get("total_files").ok(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -305,10 +336,13 @@ fn map_row_detail(row: sqlx::postgres::PgRow) -> IndexJobDetailResponse {
|
|||||||
IndexJobDetailResponse {
|
IndexJobDetailResponse {
|
||||||
id: row.get("id"),
|
id: row.get("id"),
|
||||||
library_id: row.get("library_id"),
|
library_id: row.get("library_id"),
|
||||||
|
book_id: row.try_get("book_id").ok().flatten(),
|
||||||
r#type: row.get("type"),
|
r#type: row.get("type"),
|
||||||
status: row.get("status"),
|
status: row.get("status"),
|
||||||
started_at: row.get("started_at"),
|
started_at: row.get("started_at"),
|
||||||
finished_at: row.get("finished_at"),
|
finished_at: row.get("finished_at"),
|
||||||
|
phase2_started_at: row.try_get("phase2_started_at").ok().flatten(),
|
||||||
|
generating_thumbnails_started_at: row.try_get("generating_thumbnails_started_at").ok().flatten(),
|
||||||
stats_json: row.get("stats_json"),
|
stats_json: row.get("stats_json"),
|
||||||
error_opt: row.get("error_opt"),
|
error_opt: row.get("error_opt"),
|
||||||
created_at: row.get("created_at"),
|
created_at: row.get("created_at"),
|
||||||
@@ -333,9 +367,9 @@ fn map_row_detail(row: sqlx::postgres::PgRow) -> IndexJobDetailResponse {
|
|||||||
)]
|
)]
|
||||||
pub async fn get_active_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> {
|
pub async fn get_active_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> {
|
||||||
let rows = sqlx::query(
|
let rows = sqlx::query(
|
||||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at
|
"SELECT id, library_id, book_id, type, status, started_at, finished_at, stats_json, error_opt, created_at, progress_percent, processed_files, total_files
|
||||||
FROM index_jobs
|
FROM index_jobs
|
||||||
WHERE status IN ('pending', 'running')
|
WHERE status IN ('pending', 'running', 'extracting_pages', 'generating_thumbnails')
|
||||||
ORDER BY created_at ASC"
|
ORDER BY created_at ASC"
|
||||||
)
|
)
|
||||||
.fetch_all(&state.pool)
|
.fetch_all(&state.pool)
|
||||||
@@ -365,8 +399,8 @@ pub async fn get_job_details(
|
|||||||
id: axum::extract::Path<Uuid>,
|
id: axum::extract::Path<Uuid>,
|
||||||
) -> Result<Json<IndexJobDetailResponse>, ApiError> {
|
) -> Result<Json<IndexJobDetailResponse>, ApiError> {
|
||||||
let row = sqlx::query(
|
let row = sqlx::query(
|
||||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at,
|
"SELECT id, library_id, book_id, type, status, started_at, finished_at, phase2_started_at, generating_thumbnails_started_at,
|
||||||
current_file, progress_percent, total_files, processed_files
|
stats_json, error_opt, created_at, current_file, progress_percent, total_files, processed_files
|
||||||
FROM index_jobs WHERE id = $1"
|
FROM index_jobs WHERE id = $1"
|
||||||
)
|
)
|
||||||
.bind(id.0)
|
.bind(id.0)
|
||||||
|
|||||||
134
apps/api/src/job_poller.rs
Normal file
134
apps/api/src/job_poller.rs
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use sqlx::{PgPool, Row};
|
||||||
|
use tracing::{error, info, trace};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::{metadata_batch, metadata_refresh};
|
||||||
|
|
||||||
|
/// Poll for pending API-only jobs (`metadata_batch`, `metadata_refresh`) and process them.
|
||||||
|
/// This mirrors the indexer's worker loop but for job types handled by the API.
|
||||||
|
pub async fn run_job_poller(pool: PgPool, interval_seconds: u64) {
|
||||||
|
let wait = Duration::from_secs(interval_seconds.max(1));
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match claim_next_api_job(&pool).await {
|
||||||
|
Ok(Some((job_id, job_type, library_id))) => {
|
||||||
|
info!("[JOB_POLLER] Claimed {job_type} job {job_id} library={library_id}");
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let library_name: Option<String> =
|
||||||
|
sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let result = match job_type.as_str() {
|
||||||
|
"metadata_refresh" => {
|
||||||
|
metadata_refresh::process_metadata_refresh(
|
||||||
|
&pool_clone,
|
||||||
|
job_id,
|
||||||
|
library_id,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
"metadata_batch" => {
|
||||||
|
metadata_batch::process_metadata_batch(
|
||||||
|
&pool_clone,
|
||||||
|
job_id,
|
||||||
|
library_id,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
_ => Err(format!("Unknown API job type: {job_type}")),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(e) = result {
|
||||||
|
error!("[JOB_POLLER] {job_type} job {job_id} failed: {e}");
|
||||||
|
let _ = sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(e.to_string())
|
||||||
|
.execute(&pool_clone)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match job_type.as_str() {
|
||||||
|
"metadata_refresh" => {
|
||||||
|
notifications::notify(
|
||||||
|
pool_clone,
|
||||||
|
notifications::NotificationEvent::MetadataRefreshFailed {
|
||||||
|
library_name,
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
"metadata_batch" => {
|
||||||
|
notifications::notify(
|
||||||
|
pool_clone,
|
||||||
|
notifications::NotificationEvent::MetadataBatchFailed {
|
||||||
|
library_name,
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Ok(None) => {
|
||||||
|
trace!("[JOB_POLLER] No pending API jobs, waiting...");
|
||||||
|
tokio::time::sleep(wait).await;
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
error!("[JOB_POLLER] Error claiming job: {err}");
|
||||||
|
tokio::time::sleep(wait).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const API_JOB_TYPES: &[&str] = &["metadata_batch", "metadata_refresh"];
|
||||||
|
|
||||||
|
async fn claim_next_api_job(pool: &PgPool) -> Result<Option<(Uuid, String, Uuid)>, sqlx::Error> {
|
||||||
|
let mut tx = pool.begin().await?;
|
||||||
|
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT id, type, library_id
|
||||||
|
FROM index_jobs
|
||||||
|
WHERE status = 'pending'
|
||||||
|
AND type = ANY($1)
|
||||||
|
AND library_id IS NOT NULL
|
||||||
|
ORDER BY created_at ASC
|
||||||
|
FOR UPDATE SKIP LOCKED
|
||||||
|
LIMIT 1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(API_JOB_TYPES)
|
||||||
|
.fetch_optional(&mut *tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let Some(row) = row else {
|
||||||
|
tx.commit().await?;
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let id: Uuid = row.get("id");
|
||||||
|
let job_type: String = row.get("type");
|
||||||
|
let library_id: Uuid = row.get("library_id");
|
||||||
|
|
||||||
|
sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'running', started_at = NOW(), error_opt = NULL WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.execute(&mut *tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok(Some((id, job_type, library_id)))
|
||||||
|
}
|
||||||
410
apps/api/src/komga.rs
Normal file
410
apps/api/src/komga.rs
Normal file
@@ -0,0 +1,410 @@
|
|||||||
|
use axum::{extract::State, Json};
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
// ─── Komga API types ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct KomgaBooksResponse {
|
||||||
|
content: Vec<KomgaBook>,
|
||||||
|
#[serde(rename = "totalPages")]
|
||||||
|
total_pages: i32,
|
||||||
|
number: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct KomgaBook {
|
||||||
|
name: String,
|
||||||
|
#[serde(rename = "seriesTitle")]
|
||||||
|
series_title: String,
|
||||||
|
metadata: KomgaBookMetadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct KomgaBookMetadata {
|
||||||
|
title: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Request / Response ──────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct KomgaSyncRequest {
|
||||||
|
pub url: String,
|
||||||
|
pub username: String,
|
||||||
|
pub password: String,
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub user_id: Uuid,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct KomgaSyncResponse {
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub id: Uuid,
|
||||||
|
pub komga_url: String,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub user_id: Option<Uuid>,
|
||||||
|
pub total_komga_read: i64,
|
||||||
|
pub matched: i64,
|
||||||
|
pub already_read: i64,
|
||||||
|
pub newly_marked: i64,
|
||||||
|
pub matched_books: Vec<String>,
|
||||||
|
pub newly_marked_books: Vec<String>,
|
||||||
|
pub unmatched: Vec<String>,
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub created_at: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct KomgaSyncReportSummary {
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub id: Uuid,
|
||||||
|
pub komga_url: String,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub user_id: Option<Uuid>,
|
||||||
|
pub total_komga_read: i64,
|
||||||
|
pub matched: i64,
|
||||||
|
pub already_read: i64,
|
||||||
|
pub newly_marked: i64,
|
||||||
|
pub unmatched_count: i32,
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub created_at: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Handlers ────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Sync read books from a Komga server
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/komga/sync",
|
||||||
|
tag = "komga",
|
||||||
|
request_body = KomgaSyncRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = KomgaSyncResponse),
|
||||||
|
(status = 400, description = "Bad request"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 500, description = "Komga connection or sync error"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn sync_komga_read_books(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(body): Json<KomgaSyncRequest>,
|
||||||
|
) -> Result<Json<KomgaSyncResponse>, ApiError> {
|
||||||
|
let url = body.url.trim_end_matches('/').to_string();
|
||||||
|
if url.is_empty() {
|
||||||
|
return Err(ApiError::bad_request("url is required"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build HTTP client with basic auth
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(30))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||||
|
|
||||||
|
// Paginate through all READ books from Komga
|
||||||
|
let mut komga_books: Vec<(String, String)> = Vec::new(); // (series_title, title)
|
||||||
|
let mut page = 0;
|
||||||
|
let page_size = 100;
|
||||||
|
let max_pages = 500;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let resp = client
|
||||||
|
.post(format!("{url}/api/v1/books/list?page={page}&size={page_size}"))
|
||||||
|
.basic_auth(&body.username, Some(&body.password))
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.json(&serde_json::json!({ "condition": { "readStatus": { "operator": "is", "value": "READ" } } }))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(format!("Komga request failed: {e}")))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(ApiError::internal(format!(
|
||||||
|
"Komga returned {status}: {text}"
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: KomgaBooksResponse = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(format!("Failed to parse Komga response: {e}")))?;
|
||||||
|
|
||||||
|
for book in &data.content {
|
||||||
|
let title = if !book.metadata.title.is_empty() {
|
||||||
|
&book.metadata.title
|
||||||
|
} else {
|
||||||
|
&book.name
|
||||||
|
};
|
||||||
|
komga_books.push((book.series_title.clone(), title.clone()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if data.number >= data.total_pages - 1 || page >= max_pages {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
page += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
let total_komga_read = komga_books.len() as i64;
|
||||||
|
|
||||||
|
// Build local lookup maps
|
||||||
|
let rows = sqlx::query(
|
||||||
|
"SELECT id, title, COALESCE(series, '') as series, LOWER(title) as title_lower, LOWER(COALESCE(series, '')) as series_lower FROM books",
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
type BookEntry = (Uuid, String, String);
|
||||||
|
// Primary: (series_lower, title_lower) -> Vec<(Uuid, title, series)>
|
||||||
|
let mut primary_map: HashMap<(String, String), Vec<BookEntry>> = HashMap::new();
|
||||||
|
// Secondary: title_lower -> Vec<(Uuid, title, series)>
|
||||||
|
let mut secondary_map: HashMap<String, Vec<BookEntry>> = HashMap::new();
|
||||||
|
|
||||||
|
for row in &rows {
|
||||||
|
let id: Uuid = row.get("id");
|
||||||
|
let title: String = row.get("title");
|
||||||
|
let series: String = row.get("series");
|
||||||
|
let title_lower: String = row.get("title_lower");
|
||||||
|
let series_lower: String = row.get("series_lower");
|
||||||
|
let entry = (id, title, series);
|
||||||
|
|
||||||
|
primary_map
|
||||||
|
.entry((series_lower, title_lower.clone()))
|
||||||
|
.or_default()
|
||||||
|
.push(entry.clone());
|
||||||
|
secondary_map.entry(title_lower).or_default().push(entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Match Komga books to local books
|
||||||
|
let mut matched_entries: Vec<(Uuid, String)> = Vec::new(); // (id, display_title)
|
||||||
|
let mut unmatched: Vec<String> = Vec::new();
|
||||||
|
|
||||||
|
for (series_title, title) in &komga_books {
|
||||||
|
let title_lower = title.to_lowercase();
|
||||||
|
let series_lower = series_title.to_lowercase();
|
||||||
|
|
||||||
|
let found = if let Some(entries) = primary_map.get(&(series_lower.clone(), title_lower.clone())) {
|
||||||
|
Some(entries)
|
||||||
|
} else {
|
||||||
|
secondary_map.get(&title_lower)
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(entries) = found {
|
||||||
|
for (id, local_title, local_series) in entries {
|
||||||
|
let display = if local_series.is_empty() {
|
||||||
|
local_title.clone()
|
||||||
|
} else {
|
||||||
|
format!("{local_series} - {local_title}")
|
||||||
|
};
|
||||||
|
matched_entries.push((*id, display));
|
||||||
|
}
|
||||||
|
} else if series_title.is_empty() {
|
||||||
|
unmatched.push(title.clone());
|
||||||
|
} else {
|
||||||
|
unmatched.push(format!("{series_title} - {title}"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Deduplicate by ID
|
||||||
|
matched_entries.sort_by(|a, b| a.0.cmp(&b.0));
|
||||||
|
matched_entries.dedup_by(|a, b| a.0 == b.0);
|
||||||
|
|
||||||
|
let matched_ids: Vec<Uuid> = matched_entries.iter().map(|(id, _)| *id).collect();
|
||||||
|
let matched = matched_ids.len() as i64;
|
||||||
|
let mut already_read: i64 = 0;
|
||||||
|
let mut already_read_ids: std::collections::HashSet<Uuid> = std::collections::HashSet::new();
|
||||||
|
|
||||||
|
if !matched_ids.is_empty() {
|
||||||
|
// Get already-read book IDs for this user
|
||||||
|
let ar_rows = sqlx::query(
|
||||||
|
"SELECT book_id FROM book_reading_progress WHERE book_id = ANY($1) AND user_id = $2 AND status = 'read'",
|
||||||
|
)
|
||||||
|
.bind(&matched_ids)
|
||||||
|
.bind(body.user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
for row in &ar_rows {
|
||||||
|
already_read_ids.insert(row.get("book_id"));
|
||||||
|
}
|
||||||
|
already_read = already_read_ids.len() as i64;
|
||||||
|
|
||||||
|
// Bulk upsert all matched books as read for this user
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
|
||||||
|
SELECT unnest($1::uuid[]), $2, 'read', NULL, NOW(), NOW()
|
||||||
|
ON CONFLICT (book_id, user_id) DO UPDATE
|
||||||
|
SET status = 'read',
|
||||||
|
current_page = NULL,
|
||||||
|
last_read_at = NOW(),
|
||||||
|
updated_at = NOW()
|
||||||
|
WHERE book_reading_progress.status != 'read'
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(&matched_ids)
|
||||||
|
.bind(body.user_id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
let newly_marked = matched - already_read;
|
||||||
|
|
||||||
|
// Build matched_books and newly_marked_books lists
|
||||||
|
let mut newly_marked_books: Vec<String> = Vec::new();
|
||||||
|
let mut matched_books: Vec<String> = Vec::new();
|
||||||
|
for (id, title) in &matched_entries {
|
||||||
|
if !already_read_ids.contains(id) {
|
||||||
|
newly_marked_books.push(title.clone());
|
||||||
|
}
|
||||||
|
matched_books.push(title.clone());
|
||||||
|
}
|
||||||
|
// Sort: newly marked first, then alphabetical
|
||||||
|
let newly_marked_set: std::collections::HashSet<&str> =
|
||||||
|
newly_marked_books.iter().map(|s| s.as_str()).collect();
|
||||||
|
matched_books.sort_by(|a, b| {
|
||||||
|
let a_new = newly_marked_set.contains(a.as_str());
|
||||||
|
let b_new = newly_marked_set.contains(b.as_str());
|
||||||
|
b_new.cmp(&a_new).then(a.cmp(b))
|
||||||
|
});
|
||||||
|
newly_marked_books.sort();
|
||||||
|
|
||||||
|
// Save sync report
|
||||||
|
let unmatched_json = serde_json::to_value(&unmatched).unwrap_or_default();
|
||||||
|
let matched_books_json = serde_json::to_value(&matched_books).unwrap_or_default();
|
||||||
|
let newly_marked_books_json = serde_json::to_value(&newly_marked_books).unwrap_or_default();
|
||||||
|
let report_row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO komga_sync_reports (komga_url, user_id, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||||
|
RETURNING id, created_at
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(&url)
|
||||||
|
.bind(body.user_id)
|
||||||
|
.bind(total_komga_read)
|
||||||
|
.bind(matched)
|
||||||
|
.bind(already_read)
|
||||||
|
.bind(newly_marked)
|
||||||
|
.bind(&matched_books_json)
|
||||||
|
.bind(&newly_marked_books_json)
|
||||||
|
.bind(&unmatched_json)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(Json(KomgaSyncResponse {
|
||||||
|
id: report_row.get("id"),
|
||||||
|
komga_url: url,
|
||||||
|
user_id: Some(body.user_id),
|
||||||
|
total_komga_read,
|
||||||
|
matched,
|
||||||
|
already_read,
|
||||||
|
newly_marked,
|
||||||
|
matched_books,
|
||||||
|
newly_marked_books,
|
||||||
|
unmatched,
|
||||||
|
created_at: report_row.get("created_at"),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List Komga sync reports (most recent first)
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/komga/reports",
|
||||||
|
tag = "komga",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = Vec<KomgaSyncReportSummary>),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn list_sync_reports(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
) -> Result<Json<Vec<KomgaSyncReportSummary>>, ApiError> {
|
||||||
|
let rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT id, komga_url, user_id, total_komga_read, matched, already_read, newly_marked,
|
||||||
|
jsonb_array_length(unmatched) as unmatched_count, created_at
|
||||||
|
FROM komga_sync_reports
|
||||||
|
ORDER BY created_at DESC
|
||||||
|
LIMIT 20
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let reports: Vec<KomgaSyncReportSummary> = rows
|
||||||
|
.iter()
|
||||||
|
.map(|row| KomgaSyncReportSummary {
|
||||||
|
id: row.get("id"),
|
||||||
|
komga_url: row.get("komga_url"),
|
||||||
|
user_id: row.get("user_id"),
|
||||||
|
total_komga_read: row.get("total_komga_read"),
|
||||||
|
matched: row.get("matched"),
|
||||||
|
already_read: row.get("already_read"),
|
||||||
|
newly_marked: row.get("newly_marked"),
|
||||||
|
unmatched_count: row.get("unmatched_count"),
|
||||||
|
created_at: row.get("created_at"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(Json(reports))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get a specific sync report with full unmatched list
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/komga/reports/{id}",
|
||||||
|
tag = "komga",
|
||||||
|
params(("id" = String, Path, description = "Report UUID")),
|
||||||
|
responses(
|
||||||
|
(status = 200, body = KomgaSyncResponse),
|
||||||
|
(status = 404, description = "Report not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_sync_report(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
axum::extract::Path(id): axum::extract::Path<Uuid>,
|
||||||
|
) -> Result<Json<KomgaSyncResponse>, ApiError> {
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT id, komga_url, user_id, total_komga_read, matched, already_read, newly_marked, matched_books, newly_marked_books, unmatched, created_at
|
||||||
|
FROM komga_sync_reports
|
||||||
|
WHERE id = $1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let row = row.ok_or_else(|| ApiError::not_found("report not found"))?;
|
||||||
|
|
||||||
|
let matched_books_json: serde_json::Value = row.try_get("matched_books").unwrap_or(serde_json::Value::Array(vec![]));
|
||||||
|
let matched_books: Vec<String> = serde_json::from_value(matched_books_json).unwrap_or_default();
|
||||||
|
let newly_marked_books_json: serde_json::Value = row.try_get("newly_marked_books").unwrap_or(serde_json::Value::Array(vec![]));
|
||||||
|
let newly_marked_books: Vec<String> = serde_json::from_value(newly_marked_books_json).unwrap_or_default();
|
||||||
|
let unmatched_json: serde_json::Value = row.get("unmatched");
|
||||||
|
let unmatched: Vec<String> = serde_json::from_value(unmatched_json).unwrap_or_default();
|
||||||
|
|
||||||
|
Ok(Json(KomgaSyncResponse {
|
||||||
|
id: row.get("id"),
|
||||||
|
komga_url: row.get("komga_url"),
|
||||||
|
user_id: row.get("user_id"),
|
||||||
|
total_komga_read: row.get("total_komga_read"),
|
||||||
|
matched: row.get("matched"),
|
||||||
|
already_read: row.get("already_read"),
|
||||||
|
newly_marked: row.get("newly_marked"),
|
||||||
|
matched_books,
|
||||||
|
newly_marked_books,
|
||||||
|
unmatched,
|
||||||
|
created_at: row.get("created_at"),
|
||||||
|
}))
|
||||||
|
}
|
||||||
@@ -6,7 +6,7 @@ use sqlx::Row;
|
|||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
use crate::{error::ApiError, AppState};
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
pub struct LibraryResponse {
|
pub struct LibraryResponse {
|
||||||
@@ -18,8 +18,18 @@ pub struct LibraryResponse {
|
|||||||
pub book_count: i64,
|
pub book_count: i64,
|
||||||
pub monitor_enabled: bool,
|
pub monitor_enabled: bool,
|
||||||
pub scan_mode: String,
|
pub scan_mode: String,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
pub next_scan_at: Option<chrono::DateTime<chrono::Utc>>,
|
pub next_scan_at: Option<chrono::DateTime<chrono::Utc>>,
|
||||||
pub watcher_enabled: bool,
|
pub watcher_enabled: bool,
|
||||||
|
pub metadata_provider: Option<String>,
|
||||||
|
pub fallback_metadata_provider: Option<String>,
|
||||||
|
pub metadata_refresh_mode: String,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub next_metadata_refresh_at: Option<chrono::DateTime<chrono::Utc>>,
|
||||||
|
pub series_count: i64,
|
||||||
|
/// First book IDs from up to 5 distinct series (for thumbnail fan display)
|
||||||
|
#[schema(value_type = Vec<String>)]
|
||||||
|
pub thumbnail_book_ids: Vec<Uuid>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize, ToSchema)]
|
#[derive(Deserialize, ToSchema)]
|
||||||
@@ -38,14 +48,27 @@ pub struct CreateLibraryRequest {
|
|||||||
responses(
|
responses(
|
||||||
(status = 200, body = Vec<LibraryResponse>),
|
(status = 200, body = Vec<LibraryResponse>),
|
||||||
(status = 401, description = "Unauthorized"),
|
(status = 401, description = "Unauthorized"),
|
||||||
(status = 403, description = "Forbidden - Admin scope required"),
|
|
||||||
),
|
),
|
||||||
security(("Bearer" = []))
|
security(("Bearer" = []))
|
||||||
)]
|
)]
|
||||||
pub async fn list_libraries(State(state): State<AppState>) -> Result<Json<Vec<LibraryResponse>>, ApiError> {
|
pub async fn list_libraries(State(state): State<AppState>) -> Result<Json<Vec<LibraryResponse>>, ApiError> {
|
||||||
let rows = sqlx::query(
|
let rows = sqlx::query(
|
||||||
"SELECT l.id, l.name, l.root_path, l.enabled, l.monitor_enabled, l.scan_mode, l.next_scan_at, l.watcher_enabled,
|
"SELECT l.id, l.name, l.root_path, l.enabled, l.monitor_enabled, l.scan_mode, l.next_scan_at, l.watcher_enabled, l.metadata_provider, l.fallback_metadata_provider, l.metadata_refresh_mode, l.next_metadata_refresh_at,
|
||||||
(SELECT COUNT(*) FROM books b WHERE b.library_id = l.id) as book_count
|
(SELECT COUNT(*) FROM books b WHERE b.library_id = l.id) as book_count,
|
||||||
|
(SELECT COUNT(DISTINCT COALESCE(NULLIF(b.series, ''), 'unclassified')) FROM books b WHERE b.library_id = l.id) as series_count,
|
||||||
|
COALESCE((
|
||||||
|
SELECT ARRAY_AGG(first_id ORDER BY series_name)
|
||||||
|
FROM (
|
||||||
|
SELECT DISTINCT ON (COALESCE(NULLIF(b.series, ''), 'unclassified'))
|
||||||
|
COALESCE(NULLIF(b.series, ''), 'unclassified') as series_name,
|
||||||
|
b.id as first_id
|
||||||
|
FROM books b
|
||||||
|
WHERE b.library_id = l.id
|
||||||
|
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'),
|
||||||
|
b.volume NULLS LAST, b.title ASC
|
||||||
|
LIMIT 5
|
||||||
|
) sub
|
||||||
|
), ARRAY[]::uuid[]) as thumbnail_book_ids
|
||||||
FROM libraries l ORDER BY l.created_at DESC"
|
FROM libraries l ORDER BY l.created_at DESC"
|
||||||
)
|
)
|
||||||
.fetch_all(&state.pool)
|
.fetch_all(&state.pool)
|
||||||
@@ -59,10 +82,16 @@ pub async fn list_libraries(State(state): State<AppState>) -> Result<Json<Vec<Li
|
|||||||
root_path: row.get("root_path"),
|
root_path: row.get("root_path"),
|
||||||
enabled: row.get("enabled"),
|
enabled: row.get("enabled"),
|
||||||
book_count: row.get("book_count"),
|
book_count: row.get("book_count"),
|
||||||
|
series_count: row.get("series_count"),
|
||||||
monitor_enabled: row.get("monitor_enabled"),
|
monitor_enabled: row.get("monitor_enabled"),
|
||||||
scan_mode: row.get("scan_mode"),
|
scan_mode: row.get("scan_mode"),
|
||||||
next_scan_at: row.get("next_scan_at"),
|
next_scan_at: row.get("next_scan_at"),
|
||||||
watcher_enabled: row.get("watcher_enabled"),
|
watcher_enabled: row.get("watcher_enabled"),
|
||||||
|
metadata_provider: row.get("metadata_provider"),
|
||||||
|
fallback_metadata_provider: row.get("fallback_metadata_provider"),
|
||||||
|
metadata_refresh_mode: row.get("metadata_refresh_mode"),
|
||||||
|
next_metadata_refresh_at: row.get("next_metadata_refresh_at"),
|
||||||
|
thumbnail_book_ids: row.get("thumbnail_book_ids"),
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
@@ -110,10 +139,16 @@ pub async fn create_library(
|
|||||||
root_path,
|
root_path,
|
||||||
enabled: true,
|
enabled: true,
|
||||||
book_count: 0,
|
book_count: 0,
|
||||||
|
series_count: 0,
|
||||||
monitor_enabled: false,
|
monitor_enabled: false,
|
||||||
scan_mode: "manual".to_string(),
|
scan_mode: "manual".to_string(),
|
||||||
next_scan_at: None,
|
next_scan_at: None,
|
||||||
watcher_enabled: false,
|
watcher_enabled: false,
|
||||||
|
metadata_provider: None,
|
||||||
|
fallback_metadata_provider: None,
|
||||||
|
metadata_refresh_mode: "manual".to_string(),
|
||||||
|
next_metadata_refresh_at: None,
|
||||||
|
thumbnail_book_ids: vec![],
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -155,14 +190,19 @@ fn canonicalize_library_root(root_path: &str) -> Result<PathBuf, ApiError> {
|
|||||||
return Err(ApiError::bad_request("root_path must be absolute"));
|
return Err(ApiError::bad_request("root_path must be absolute"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let canonical = std::fs::canonicalize(path)
|
// Avoid fs::canonicalize — it opens extra file descriptors to resolve symlinks
|
||||||
.map_err(|_| ApiError::bad_request("root_path does not exist or is inaccessible"))?;
|
// and can fail on Docker volume mounts (ro, cached) when fd limits are low.
|
||||||
|
if !path.exists() {
|
||||||
if !canonical.is_dir() {
|
return Err(ApiError::bad_request(format!(
|
||||||
|
"root_path does not exist: {}",
|
||||||
|
root_path
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
if !path.is_dir() {
|
||||||
return Err(ApiError::bad_request("root_path must point to a directory"));
|
return Err(ApiError::bad_request("root_path must point to a directory"));
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(canonical)
|
Ok(path.to_path_buf())
|
||||||
}
|
}
|
||||||
|
|
||||||
use crate::index_jobs::{IndexJobResponse, RebuildRequest};
|
use crate::index_jobs::{IndexJobResponse, RebuildRequest};
|
||||||
@@ -180,7 +220,6 @@ use crate::index_jobs::{IndexJobResponse, RebuildRequest};
|
|||||||
(status = 200, body = IndexJobResponse),
|
(status = 200, body = IndexJobResponse),
|
||||||
(status = 404, description = "Library not found"),
|
(status = 404, description = "Library not found"),
|
||||||
(status = 401, description = "Unauthorized"),
|
(status = 401, description = "Unauthorized"),
|
||||||
(status = 403, description = "Forbidden - Admin scope required"),
|
|
||||||
),
|
),
|
||||||
security(("Bearer" = []))
|
security(("Bearer" = []))
|
||||||
)]
|
)]
|
||||||
@@ -200,7 +239,8 @@ pub async fn scan_library(
|
|||||||
}
|
}
|
||||||
|
|
||||||
let is_full = payload.as_ref().and_then(|p| p.full).unwrap_or(false);
|
let is_full = payload.as_ref().and_then(|p| p.full).unwrap_or(false);
|
||||||
let job_type = if is_full { "full_rebuild" } else { "rebuild" };
|
let is_rescan = payload.as_ref().and_then(|p| p.rescan).unwrap_or(false);
|
||||||
|
let job_type = if is_full { "full_rebuild" } else if is_rescan { "rescan" } else { "rebuild" };
|
||||||
|
|
||||||
// Create indexing job for this library
|
// Create indexing job for this library
|
||||||
let job_id = Uuid::new_v4();
|
let job_id = Uuid::new_v4();
|
||||||
@@ -229,6 +269,8 @@ pub struct UpdateMonitoringRequest {
|
|||||||
#[schema(value_type = String, example = "hourly")]
|
#[schema(value_type = String, example = "hourly")]
|
||||||
pub scan_mode: String, // 'manual', 'hourly', 'daily', 'weekly'
|
pub scan_mode: String, // 'manual', 'hourly', 'daily', 'weekly'
|
||||||
pub watcher_enabled: Option<bool>,
|
pub watcher_enabled: Option<bool>,
|
||||||
|
#[schema(value_type = Option<String>, example = "daily")]
|
||||||
|
pub metadata_refresh_mode: Option<String>, // 'manual', 'hourly', 'daily', 'weekly'
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Update monitoring settings for a library
|
/// Update monitoring settings for a library
|
||||||
@@ -259,6 +301,12 @@ pub async fn update_monitoring(
|
|||||||
return Err(ApiError::bad_request("scan_mode must be one of: manual, hourly, daily, weekly"));
|
return Err(ApiError::bad_request("scan_mode must be one of: manual, hourly, daily, weekly"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Validate metadata_refresh_mode
|
||||||
|
let metadata_refresh_mode = input.metadata_refresh_mode.as_deref().unwrap_or("manual");
|
||||||
|
if !valid_modes.contains(&metadata_refresh_mode) {
|
||||||
|
return Err(ApiError::bad_request("metadata_refresh_mode must be one of: manual, hourly, daily, weekly"));
|
||||||
|
}
|
||||||
|
|
||||||
// Calculate next_scan_at if monitoring is enabled
|
// Calculate next_scan_at if monitoring is enabled
|
||||||
let next_scan_at = if input.monitor_enabled {
|
let next_scan_at = if input.monitor_enabled {
|
||||||
let interval_minutes = match input.scan_mode.as_str() {
|
let interval_minutes = match input.scan_mode.as_str() {
|
||||||
@@ -272,16 +320,31 @@ pub async fn update_monitoring(
|
|||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Calculate next_metadata_refresh_at
|
||||||
|
let next_metadata_refresh_at = if metadata_refresh_mode != "manual" {
|
||||||
|
let interval_minutes = match metadata_refresh_mode {
|
||||||
|
"hourly" => 60,
|
||||||
|
"daily" => 1440,
|
||||||
|
"weekly" => 10080,
|
||||||
|
_ => 1440,
|
||||||
|
};
|
||||||
|
Some(chrono::Utc::now() + chrono::Duration::minutes(interval_minutes))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
let watcher_enabled = input.watcher_enabled.unwrap_or(false);
|
let watcher_enabled = input.watcher_enabled.unwrap_or(false);
|
||||||
|
|
||||||
let result = sqlx::query(
|
let result = sqlx::query(
|
||||||
"UPDATE libraries SET monitor_enabled = $2, scan_mode = $3, next_scan_at = $4, watcher_enabled = $5 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled"
|
"UPDATE libraries SET monitor_enabled = $2, scan_mode = $3, next_scan_at = $4, watcher_enabled = $5, metadata_refresh_mode = $6, next_metadata_refresh_at = $7 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled, metadata_provider, fallback_metadata_provider, metadata_refresh_mode, next_metadata_refresh_at"
|
||||||
)
|
)
|
||||||
.bind(library_id)
|
.bind(library_id)
|
||||||
.bind(input.monitor_enabled)
|
.bind(input.monitor_enabled)
|
||||||
.bind(input.scan_mode)
|
.bind(input.scan_mode)
|
||||||
.bind(next_scan_at)
|
.bind(next_scan_at)
|
||||||
.bind(watcher_enabled)
|
.bind(watcher_enabled)
|
||||||
|
.bind(metadata_refresh_mode)
|
||||||
|
.bind(next_metadata_refresh_at)
|
||||||
.fetch_optional(&state.pool)
|
.fetch_optional(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -294,15 +357,121 @@ pub async fn update_monitoring(
|
|||||||
.fetch_one(&state.pool)
|
.fetch_one(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')) FROM books WHERE library_id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar(
|
||||||
|
"SELECT b.id FROM books b
|
||||||
|
WHERE b.library_id = $1
|
||||||
|
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'), b.volume NULLS LAST, b.title ASC
|
||||||
|
LIMIT 5"
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
Ok(Json(LibraryResponse {
|
Ok(Json(LibraryResponse {
|
||||||
id: row.get("id"),
|
id: row.get("id"),
|
||||||
name: row.get("name"),
|
name: row.get("name"),
|
||||||
root_path: row.get("root_path"),
|
root_path: row.get("root_path"),
|
||||||
enabled: row.get("enabled"),
|
enabled: row.get("enabled"),
|
||||||
book_count,
|
book_count,
|
||||||
|
series_count,
|
||||||
monitor_enabled: row.get("monitor_enabled"),
|
monitor_enabled: row.get("monitor_enabled"),
|
||||||
scan_mode: row.get("scan_mode"),
|
scan_mode: row.get("scan_mode"),
|
||||||
next_scan_at: row.get("next_scan_at"),
|
next_scan_at: row.get("next_scan_at"),
|
||||||
watcher_enabled: row.get("watcher_enabled"),
|
watcher_enabled: row.get("watcher_enabled"),
|
||||||
|
metadata_provider: row.get("metadata_provider"),
|
||||||
|
fallback_metadata_provider: row.get("fallback_metadata_provider"),
|
||||||
|
metadata_refresh_mode: row.get("metadata_refresh_mode"),
|
||||||
|
next_metadata_refresh_at: row.get("next_metadata_refresh_at"),
|
||||||
|
thumbnail_book_ids,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct UpdateMetadataProviderRequest {
|
||||||
|
pub metadata_provider: Option<String>,
|
||||||
|
pub fallback_metadata_provider: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update the metadata provider for a library
|
||||||
|
#[utoipa::path(
|
||||||
|
patch,
|
||||||
|
path = "/libraries/{id}/metadata-provider",
|
||||||
|
tag = "libraries",
|
||||||
|
params(
|
||||||
|
("id" = String, Path, description = "Library UUID"),
|
||||||
|
),
|
||||||
|
request_body = UpdateMetadataProviderRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = LibraryResponse),
|
||||||
|
(status = 404, description = "Library not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn update_metadata_provider(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
AxumPath(library_id): AxumPath<Uuid>,
|
||||||
|
Json(input): Json<UpdateMetadataProviderRequest>,
|
||||||
|
) -> Result<Json<LibraryResponse>, ApiError> {
|
||||||
|
let provider = input.metadata_provider.as_deref().filter(|s| !s.is_empty());
|
||||||
|
let fallback = input.fallback_metadata_provider.as_deref().filter(|s| !s.is_empty());
|
||||||
|
|
||||||
|
let result = sqlx::query(
|
||||||
|
"UPDATE libraries SET metadata_provider = $2, fallback_metadata_provider = $3 WHERE id = $1 RETURNING id, name, root_path, enabled, monitor_enabled, scan_mode, next_scan_at, watcher_enabled, metadata_provider, fallback_metadata_provider, metadata_refresh_mode, next_metadata_refresh_at"
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.bind(provider)
|
||||||
|
.bind(fallback)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let Some(row) = result else {
|
||||||
|
return Err(ApiError::not_found("library not found"));
|
||||||
|
};
|
||||||
|
|
||||||
|
let book_count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM books WHERE library_id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let series_count: i64 = sqlx::query_scalar("SELECT COUNT(DISTINCT COALESCE(NULLIF(series, ''), 'unclassified')) FROM books WHERE library_id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let thumbnail_book_ids: Vec<Uuid> = sqlx::query_scalar(
|
||||||
|
"SELECT b.id FROM books b
|
||||||
|
WHERE b.library_id = $1
|
||||||
|
ORDER BY COALESCE(NULLIF(b.series, ''), 'unclassified'), b.volume NULLS LAST, b.title ASC
|
||||||
|
LIMIT 5"
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
Ok(Json(LibraryResponse {
|
||||||
|
id: row.get("id"),
|
||||||
|
name: row.get("name"),
|
||||||
|
root_path: row.get("root_path"),
|
||||||
|
enabled: row.get("enabled"),
|
||||||
|
book_count,
|
||||||
|
series_count,
|
||||||
|
monitor_enabled: row.get("monitor_enabled"),
|
||||||
|
scan_mode: row.get("scan_mode"),
|
||||||
|
next_scan_at: row.get("next_scan_at"),
|
||||||
|
watcher_enabled: row.get("watcher_enabled"),
|
||||||
|
metadata_provider: row.get("metadata_provider"),
|
||||||
|
fallback_metadata_provider: row.get("fallback_metadata_provider"),
|
||||||
|
metadata_refresh_mode: row.get("metadata_refresh_mode"),
|
||||||
|
next_metadata_refresh_at: row.get("next_metadata_refresh_at"),
|
||||||
|
thumbnail_book_ids,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,69 +1,50 @@
|
|||||||
mod auth;
|
mod auth;
|
||||||
|
mod authors;
|
||||||
mod books;
|
mod books;
|
||||||
mod error;
|
mod error;
|
||||||
|
mod handlers;
|
||||||
mod index_jobs;
|
mod index_jobs;
|
||||||
|
mod job_poller;
|
||||||
|
mod komga;
|
||||||
mod libraries;
|
mod libraries;
|
||||||
|
mod metadata;
|
||||||
|
mod metadata_batch;
|
||||||
|
mod metadata_refresh;
|
||||||
|
mod metadata_providers;
|
||||||
|
mod api_middleware;
|
||||||
mod openapi;
|
mod openapi;
|
||||||
mod pages;
|
mod pages;
|
||||||
|
mod prowlarr;
|
||||||
|
mod qbittorrent;
|
||||||
|
mod reading_progress;
|
||||||
mod search;
|
mod search;
|
||||||
|
mod series;
|
||||||
mod settings;
|
mod settings;
|
||||||
|
mod state;
|
||||||
|
mod stats;
|
||||||
|
mod telegram;
|
||||||
|
mod thumbnails;
|
||||||
mod tokens;
|
mod tokens;
|
||||||
|
mod users;
|
||||||
|
|
||||||
use std::{
|
use std::sync::Arc;
|
||||||
num::NonZeroUsize,
|
use std::time::Instant;
|
||||||
sync::{
|
|
||||||
atomic::{AtomicU64, Ordering},
|
|
||||||
Arc,
|
|
||||||
},
|
|
||||||
time::{Duration, Instant},
|
|
||||||
};
|
|
||||||
|
|
||||||
use axum::{
|
use axum::{
|
||||||
middleware,
|
middleware,
|
||||||
response::IntoResponse,
|
|
||||||
routing::{delete, get},
|
routing::{delete, get},
|
||||||
Json, Router,
|
Router,
|
||||||
};
|
};
|
||||||
use utoipa::OpenApi;
|
use utoipa::OpenApi;
|
||||||
use utoipa_swagger_ui::SwaggerUi;
|
use utoipa_swagger_ui::SwaggerUi;
|
||||||
use lru::LruCache;
|
use lru::LruCache;
|
||||||
|
use std::num::NonZeroUsize;
|
||||||
use stripstream_core::config::ApiConfig;
|
use stripstream_core::config::ApiConfig;
|
||||||
use sqlx::postgres::PgPoolOptions;
|
use sqlx::postgres::PgPoolOptions;
|
||||||
use tokio::sync::{Mutex, Semaphore};
|
use tokio::sync::{Mutex, RwLock, Semaphore};
|
||||||
use tracing::info;
|
use tracing::info;
|
||||||
|
|
||||||
#[derive(Clone)]
|
use crate::state::{load_concurrent_renders, load_dynamic_settings, AppState, Metrics, ReadRateLimit};
|
||||||
struct AppState {
|
|
||||||
pool: sqlx::PgPool,
|
|
||||||
bootstrap_token: Arc<str>,
|
|
||||||
meili_url: Arc<str>,
|
|
||||||
meili_master_key: Arc<str>,
|
|
||||||
page_cache: Arc<Mutex<LruCache<String, Arc<Vec<u8>>>>>,
|
|
||||||
page_render_limit: Arc<Semaphore>,
|
|
||||||
metrics: Arc<Metrics>,
|
|
||||||
read_rate_limit: Arc<Mutex<ReadRateLimit>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct Metrics {
|
|
||||||
requests_total: AtomicU64,
|
|
||||||
page_cache_hits: AtomicU64,
|
|
||||||
page_cache_misses: AtomicU64,
|
|
||||||
}
|
|
||||||
|
|
||||||
struct ReadRateLimit {
|
|
||||||
window_started_at: Instant,
|
|
||||||
requests_in_window: u32,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Metrics {
|
|
||||||
fn new() -> Self {
|
|
||||||
Self {
|
|
||||||
requests_total: AtomicU64::new(0),
|
|
||||||
page_cache_hits: AtomicU64::new(0),
|
|
||||||
page_cache_misses: AtomicU64::new(0),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> anyhow::Result<()> {
|
async fn main() -> anyhow::Result<()> {
|
||||||
@@ -79,26 +60,46 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
.connect(&config.database_url)
|
.connect(&config.database_url)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
// Load concurrent_renders from settings, default to 8
|
||||||
|
let concurrent_renders = load_concurrent_renders(&pool).await;
|
||||||
|
info!("Using concurrent_renders limit: {}", concurrent_renders);
|
||||||
|
|
||||||
|
let dynamic_settings = load_dynamic_settings(&pool).await;
|
||||||
|
info!(
|
||||||
|
"Dynamic settings: rate_limit={}, timeout={}s, format={}, quality={}, filter={}, max_width={}, cache_dir={}",
|
||||||
|
dynamic_settings.rate_limit_per_second,
|
||||||
|
dynamic_settings.timeout_seconds,
|
||||||
|
dynamic_settings.image_format,
|
||||||
|
dynamic_settings.image_quality,
|
||||||
|
dynamic_settings.image_filter,
|
||||||
|
dynamic_settings.image_max_width,
|
||||||
|
dynamic_settings.cache_directory,
|
||||||
|
);
|
||||||
|
|
||||||
let state = AppState {
|
let state = AppState {
|
||||||
pool,
|
pool,
|
||||||
bootstrap_token: Arc::from(config.api_bootstrap_token),
|
bootstrap_token: Arc::from(config.api_bootstrap_token),
|
||||||
meili_url: Arc::from(config.meili_url),
|
|
||||||
meili_master_key: Arc::from(config.meili_master_key),
|
|
||||||
page_cache: Arc::new(Mutex::new(LruCache::new(NonZeroUsize::new(512).expect("non-zero")))),
|
page_cache: Arc::new(Mutex::new(LruCache::new(NonZeroUsize::new(512).expect("non-zero")))),
|
||||||
page_render_limit: Arc::new(Semaphore::new(8)),
|
page_render_limit: Arc::new(Semaphore::new(concurrent_renders)),
|
||||||
metrics: Arc::new(Metrics::new()),
|
metrics: Arc::new(Metrics::new()),
|
||||||
read_rate_limit: Arc::new(Mutex::new(ReadRateLimit {
|
read_rate_limit: Arc::new(Mutex::new(ReadRateLimit {
|
||||||
window_started_at: Instant::now(),
|
window_started_at: Instant::now(),
|
||||||
requests_in_window: 0,
|
requests_in_window: 0,
|
||||||
})),
|
})),
|
||||||
|
settings: Arc::new(RwLock::new(dynamic_settings)),
|
||||||
};
|
};
|
||||||
|
|
||||||
let admin_routes = Router::new()
|
let admin_routes = Router::new()
|
||||||
.route("/libraries", get(libraries::list_libraries).post(libraries::create_library))
|
.route("/libraries", axum::routing::post(libraries::create_library))
|
||||||
.route("/libraries/:id", delete(libraries::delete_library))
|
.route("/libraries/:id", delete(libraries::delete_library))
|
||||||
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
|
|
||||||
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
|
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
|
||||||
|
.route("/libraries/:id/metadata-provider", axum::routing::patch(libraries::update_metadata_provider))
|
||||||
|
.route("/books/:id", axum::routing::patch(books::update_book))
|
||||||
|
.route("/books/:id/convert", axum::routing::post(books::convert_book))
|
||||||
|
.route("/libraries/:library_id/series/:name", axum::routing::patch(series::update_series))
|
||||||
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
|
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
|
||||||
|
.route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild))
|
||||||
|
.route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate))
|
||||||
.route("/index/status", get(index_jobs::list_index_jobs))
|
.route("/index/status", get(index_jobs::list_index_jobs))
|
||||||
.route("/index/jobs/active", get(index_jobs::get_active_jobs))
|
.route("/index/jobs/active", get(index_jobs::get_active_jobs))
|
||||||
.route("/index/jobs/:id", get(index_jobs::get_job_details))
|
.route("/index/jobs/:id", get(index_jobs::get_job_details))
|
||||||
@@ -106,8 +107,31 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
.route("/index/jobs/:id/errors", get(index_jobs::get_job_errors))
|
.route("/index/jobs/:id/errors", get(index_jobs::get_job_errors))
|
||||||
.route("/index/cancel/:id", axum::routing::post(index_jobs::cancel_job))
|
.route("/index/cancel/:id", axum::routing::post(index_jobs::cancel_job))
|
||||||
.route("/folders", get(index_jobs::list_folders))
|
.route("/folders", get(index_jobs::list_folders))
|
||||||
|
.route("/admin/users", get(users::list_users).post(users::create_user))
|
||||||
|
.route("/admin/users/:id", delete(users::delete_user).patch(users::update_user))
|
||||||
.route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token))
|
.route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token))
|
||||||
.route("/admin/tokens/:id", delete(tokens::revoke_token))
|
.route("/admin/tokens/:id", delete(tokens::revoke_token).patch(tokens::update_token))
|
||||||
|
.route("/admin/tokens/:id/delete", axum::routing::post(tokens::delete_token))
|
||||||
|
.route("/prowlarr/search", axum::routing::post(prowlarr::search_prowlarr))
|
||||||
|
.route("/prowlarr/test", get(prowlarr::test_prowlarr))
|
||||||
|
.route("/qbittorrent/add", axum::routing::post(qbittorrent::add_torrent))
|
||||||
|
.route("/qbittorrent/test", get(qbittorrent::test_qbittorrent))
|
||||||
|
.route("/telegram/test", get(telegram::test_telegram))
|
||||||
|
.route("/komga/sync", axum::routing::post(komga::sync_komga_read_books))
|
||||||
|
.route("/komga/reports", get(komga::list_sync_reports))
|
||||||
|
.route("/komga/reports/:id", get(komga::get_sync_report))
|
||||||
|
.route("/metadata/search", axum::routing::post(metadata::search_metadata))
|
||||||
|
.route("/metadata/match", axum::routing::post(metadata::create_metadata_match))
|
||||||
|
.route("/metadata/approve/:id", axum::routing::post(metadata::approve_metadata))
|
||||||
|
.route("/metadata/reject/:id", axum::routing::post(metadata::reject_metadata))
|
||||||
|
.route("/metadata/links", get(metadata::get_metadata_links))
|
||||||
|
.route("/metadata/missing/:id", get(metadata::get_missing_books))
|
||||||
|
.route("/metadata/links/:id", delete(metadata::delete_metadata_link))
|
||||||
|
.route("/metadata/batch", axum::routing::post(metadata_batch::start_batch))
|
||||||
|
.route("/metadata/batch/:id/report", get(metadata_batch::get_batch_report))
|
||||||
|
.route("/metadata/batch/:id/results", get(metadata_batch::get_batch_results))
|
||||||
|
.route("/metadata/refresh", axum::routing::post(metadata_refresh::start_refresh))
|
||||||
|
.route("/metadata/refresh/:id/report", get(metadata_refresh::get_refresh_report))
|
||||||
.merge(settings::settings_routes())
|
.merge(settings::settings_routes())
|
||||||
.route_layer(middleware::from_fn_with_state(
|
.route_layer(middleware::from_fn_with_state(
|
||||||
state.clone(),
|
state.clone(),
|
||||||
@@ -115,85 +139,52 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
));
|
));
|
||||||
|
|
||||||
let read_routes = Router::new()
|
let read_routes = Router::new()
|
||||||
|
.route("/libraries", get(libraries::list_libraries))
|
||||||
|
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
|
||||||
.route("/books", get(books::list_books))
|
.route("/books", get(books::list_books))
|
||||||
|
.route("/books/ongoing", get(series::ongoing_books))
|
||||||
.route("/books/:id", get(books::get_book))
|
.route("/books/:id", get(books::get_book))
|
||||||
|
.route("/books/:id/thumbnail", get(books::get_thumbnail))
|
||||||
.route("/books/:id/pages/:n", get(pages::get_page))
|
.route("/books/:id/pages/:n", get(pages::get_page))
|
||||||
.route("/libraries/:library_id/series", get(books::list_series))
|
.route("/books/:id/progress", get(reading_progress::get_reading_progress).patch(reading_progress::update_reading_progress))
|
||||||
|
.route("/libraries/:library_id/series", get(series::list_series))
|
||||||
|
.route("/libraries/:library_id/series/:name/metadata", get(series::get_series_metadata))
|
||||||
|
.route("/series", get(series::list_all_series))
|
||||||
|
.route("/series/ongoing", get(series::ongoing_series))
|
||||||
|
.route("/series/statuses", get(series::series_statuses))
|
||||||
|
.route("/series/provider-statuses", get(series::provider_statuses))
|
||||||
|
.route("/series/mark-read", axum::routing::post(reading_progress::mark_series_read))
|
||||||
|
.route("/authors", get(authors::list_authors))
|
||||||
|
.route("/stats", get(stats::get_stats))
|
||||||
.route("/search", get(search::search_books))
|
.route("/search", get(search::search_books))
|
||||||
.route_layer(middleware::from_fn_with_state(state.clone(), read_rate_limit))
|
.route_layer(middleware::from_fn_with_state(state.clone(), api_middleware::read_rate_limit))
|
||||||
.route_layer(middleware::from_fn_with_state(
|
.route_layer(middleware::from_fn_with_state(
|
||||||
state.clone(),
|
state.clone(),
|
||||||
auth::require_read,
|
auth::require_read,
|
||||||
));
|
));
|
||||||
|
|
||||||
|
// Clone pool before state is moved into the router
|
||||||
|
let poller_pool = state.pool.clone();
|
||||||
|
|
||||||
let app = Router::new()
|
let app = Router::new()
|
||||||
.route("/health", get(health))
|
.route("/health", get(handlers::health))
|
||||||
.route("/ready", get(ready))
|
.route("/ready", get(handlers::ready))
|
||||||
.route("/metrics", get(metrics))
|
.route("/metrics", get(handlers::metrics))
|
||||||
.route("/docs", get(docs_redirect))
|
.route("/docs", get(handlers::docs_redirect))
|
||||||
.merge(SwaggerUi::new("/swagger-ui").url("/openapi.json", openapi::ApiDoc::openapi()))
|
.merge(SwaggerUi::new("/swagger-ui").url("/openapi.json", openapi::ApiDoc::openapi()))
|
||||||
.merge(admin_routes)
|
.merge(admin_routes)
|
||||||
.merge(read_routes)
|
.merge(read_routes)
|
||||||
.layer(middleware::from_fn_with_state(state.clone(), request_counter))
|
.layer(middleware::from_fn_with_state(state.clone(), api_middleware::request_counter))
|
||||||
.with_state(state);
|
.with_state(state);
|
||||||
|
|
||||||
|
// Start background poller for API-only jobs (metadata_batch, metadata_refresh)
|
||||||
|
tokio::spawn(async move {
|
||||||
|
job_poller::run_job_poller(poller_pool, 5).await;
|
||||||
|
});
|
||||||
|
|
||||||
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
|
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
|
||||||
info!(addr = %config.listen_addr, "api listening");
|
info!(addr = %config.listen_addr, "api listening");
|
||||||
axum::serve(listener, app).await?;
|
axum::serve(listener, app).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn health() -> &'static str {
|
|
||||||
"ok"
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn docs_redirect() -> impl axum::response::IntoResponse {
|
|
||||||
axum::response::Redirect::to("/swagger-ui/")
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn ready(axum::extract::State(state): axum::extract::State<AppState>) -> Result<Json<serde_json::Value>, error::ApiError> {
|
|
||||||
sqlx::query("SELECT 1").execute(&state.pool).await?;
|
|
||||||
Ok(Json(serde_json::json!({"status": "ready"})))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn metrics(axum::extract::State(state): axum::extract::State<AppState>) -> String {
|
|
||||||
format!(
|
|
||||||
"requests_total {}\npage_cache_hits {}\npage_cache_misses {}\n",
|
|
||||||
state.metrics.requests_total.load(Ordering::Relaxed),
|
|
||||||
state.metrics.page_cache_hits.load(Ordering::Relaxed),
|
|
||||||
state.metrics.page_cache_misses.load(Ordering::Relaxed),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn request_counter(
|
|
||||||
axum::extract::State(state): axum::extract::State<AppState>,
|
|
||||||
req: axum::extract::Request,
|
|
||||||
next: axum::middleware::Next,
|
|
||||||
) -> axum::response::Response {
|
|
||||||
state.metrics.requests_total.fetch_add(1, Ordering::Relaxed);
|
|
||||||
next.run(req).await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn read_rate_limit(
|
|
||||||
axum::extract::State(state): axum::extract::State<AppState>,
|
|
||||||
req: axum::extract::Request,
|
|
||||||
next: axum::middleware::Next,
|
|
||||||
) -> axum::response::Response {
|
|
||||||
let mut limiter = state.read_rate_limit.lock().await;
|
|
||||||
if limiter.window_started_at.elapsed() >= Duration::from_secs(1) {
|
|
||||||
limiter.window_started_at = Instant::now();
|
|
||||||
limiter.requests_in_window = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if limiter.requests_in_window >= 120 {
|
|
||||||
return (
|
|
||||||
axum::http::StatusCode::TOO_MANY_REQUESTS,
|
|
||||||
"rate limit exceeded",
|
|
||||||
)
|
|
||||||
.into_response();
|
|
||||||
}
|
|
||||||
|
|
||||||
limiter.requests_in_window += 1;
|
|
||||||
drop(limiter);
|
|
||||||
next.run(req).await
|
|
||||||
}
|
|
||||||
|
|||||||
1097
apps/api/src/metadata.rs
Normal file
1097
apps/api/src/metadata.rs
Normal file
File diff suppressed because it is too large
Load Diff
1145
apps/api/src/metadata_batch.rs
Normal file
1145
apps/api/src/metadata_batch.rs
Normal file
File diff suppressed because it is too large
Load Diff
342
apps/api/src/metadata_providers/anilist.rs
Normal file
342
apps/api/src/metadata_providers/anilist.rs
Normal file
@@ -0,0 +1,342 @@
|
|||||||
|
use super::{BookCandidate, MetadataProvider, ProviderConfig, SeriesCandidate};
|
||||||
|
|
||||||
|
pub struct AniListProvider;
|
||||||
|
|
||||||
|
impl MetadataProvider for AniListProvider {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"anilist"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_series(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<SeriesCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let query = query.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { search_series_impl(&query, &config).await })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_series_books(
|
||||||
|
&self,
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<BookCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let external_id = external_id.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { get_series_books_impl(&external_id, &config).await })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const SEARCH_QUERY: &str = r#"
|
||||||
|
query ($search: String) {
|
||||||
|
Page(perPage: 20) {
|
||||||
|
media(search: $search, type: MANGA, sort: SEARCH_MATCH) {
|
||||||
|
id
|
||||||
|
title { romaji english native }
|
||||||
|
description(asHtml: false)
|
||||||
|
coverImage { large medium }
|
||||||
|
startDate { year }
|
||||||
|
status
|
||||||
|
volumes
|
||||||
|
chapters
|
||||||
|
staff { edges { node { name { full } } role } }
|
||||||
|
siteUrl
|
||||||
|
genres
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|
||||||
|
const DETAIL_QUERY: &str = r#"
|
||||||
|
query ($id: Int) {
|
||||||
|
Media(id: $id, type: MANGA) {
|
||||||
|
id
|
||||||
|
title { romaji english native }
|
||||||
|
description(asHtml: false)
|
||||||
|
coverImage { large medium }
|
||||||
|
startDate { year }
|
||||||
|
status
|
||||||
|
volumes
|
||||||
|
chapters
|
||||||
|
staff { edges { node { name { full } } role } }
|
||||||
|
siteUrl
|
||||||
|
genres
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"#;
|
||||||
|
|
||||||
|
async fn graphql_request(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
query: &str,
|
||||||
|
variables: serde_json::Value,
|
||||||
|
) -> Result<serde_json::Value, String> {
|
||||||
|
let resp = client
|
||||||
|
.post("https://graphql.anilist.co")
|
||||||
|
.header("Content-Type", "application/json")
|
||||||
|
.json(&serde_json::json!({
|
||||||
|
"query": query,
|
||||||
|
"variables": variables,
|
||||||
|
}))
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("AniList request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(format!("AniList returned {status}: {text}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
resp.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse AniList response: {e}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn search_series_impl(
|
||||||
|
query: &str,
|
||||||
|
_config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<SeriesCandidate>, String> {
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
|
||||||
|
|
||||||
|
let data = graphql_request(
|
||||||
|
&client,
|
||||||
|
SEARCH_QUERY,
|
||||||
|
serde_json::json!({ "search": query }),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let media = match data
|
||||||
|
.get("data")
|
||||||
|
.and_then(|d| d.get("Page"))
|
||||||
|
.and_then(|p| p.get("media"))
|
||||||
|
.and_then(|m| m.as_array())
|
||||||
|
{
|
||||||
|
Some(media) => media,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
|
||||||
|
let mut candidates: Vec<SeriesCandidate> = media
|
||||||
|
.iter()
|
||||||
|
.filter_map(|m| {
|
||||||
|
let id = m.get("id").and_then(|id| id.as_i64())?;
|
||||||
|
let title_obj = m.get("title")?;
|
||||||
|
let title = title_obj
|
||||||
|
.get("english")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.or_else(|| title_obj.get("romaji").and_then(|t| t.as_str()))?
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let description = m
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(|d| d.replace("\\n", "\n").trim().to_string())
|
||||||
|
.filter(|d| !d.is_empty());
|
||||||
|
|
||||||
|
let cover_url = m
|
||||||
|
.get("coverImage")
|
||||||
|
.and_then(|ci| ci.get("large").or_else(|| ci.get("medium")))
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
|
||||||
|
let start_year = m
|
||||||
|
.get("startDate")
|
||||||
|
.and_then(|sd| sd.get("year"))
|
||||||
|
.and_then(|y| y.as_i64())
|
||||||
|
.map(|y| y as i32);
|
||||||
|
|
||||||
|
let volumes = m
|
||||||
|
.get("volumes")
|
||||||
|
.and_then(|v| v.as_i64())
|
||||||
|
.map(|v| v as i32);
|
||||||
|
|
||||||
|
let chapters = m
|
||||||
|
.get("chapters")
|
||||||
|
.and_then(|v| v.as_i64())
|
||||||
|
.map(|v| v as i32);
|
||||||
|
|
||||||
|
let status = m
|
||||||
|
.get("status")
|
||||||
|
.and_then(|s| s.as_str())
|
||||||
|
.unwrap_or("UNKNOWN")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let site_url = m
|
||||||
|
.get("siteUrl")
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
|
||||||
|
let authors = extract_authors(m);
|
||||||
|
|
||||||
|
let confidence = compute_confidence(&title, &query_lower);
|
||||||
|
|
||||||
|
// Use volumes if known, otherwise fall back to chapters count
|
||||||
|
let (total_volumes, volume_source) = match volumes {
|
||||||
|
Some(v) => (Some(v), "volumes"),
|
||||||
|
None => match chapters {
|
||||||
|
Some(c) => (Some(c), "chapters"),
|
||||||
|
None => (None, "unknown"),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
Some(SeriesCandidate {
|
||||||
|
external_id: id.to_string(),
|
||||||
|
title,
|
||||||
|
authors,
|
||||||
|
description,
|
||||||
|
publishers: vec![],
|
||||||
|
start_year,
|
||||||
|
total_volumes,
|
||||||
|
cover_url,
|
||||||
|
external_url: site_url,
|
||||||
|
confidence,
|
||||||
|
metadata_json: serde_json::json!({
|
||||||
|
"status": status,
|
||||||
|
"chapters": chapters,
|
||||||
|
"volumes": volumes,
|
||||||
|
"volume_source": volume_source,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
candidates.sort_by(|a, b| b.confidence.partial_cmp(&a.confidence).unwrap_or(std::cmp::Ordering::Equal));
|
||||||
|
candidates.truncate(10);
|
||||||
|
Ok(candidates)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_series_books_impl(
|
||||||
|
external_id: &str,
|
||||||
|
_config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<BookCandidate>, String> {
|
||||||
|
let id: i64 = external_id
|
||||||
|
.parse()
|
||||||
|
.map_err(|_| "invalid AniList ID".to_string())?;
|
||||||
|
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
|
||||||
|
|
||||||
|
let data = graphql_request(
|
||||||
|
&client,
|
||||||
|
DETAIL_QUERY,
|
||||||
|
serde_json::json!({ "id": id }),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let media = match data.get("data").and_then(|d| d.get("Media")) {
|
||||||
|
Some(m) => m,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let title_obj = media.get("title").cloned().unwrap_or(serde_json::json!({}));
|
||||||
|
let title = title_obj
|
||||||
|
.get("english")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.or_else(|| title_obj.get("romaji").and_then(|t| t.as_str()))
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let volumes = media
|
||||||
|
.get("volumes")
|
||||||
|
.and_then(|v| v.as_i64())
|
||||||
|
.map(|v| v as i32);
|
||||||
|
|
||||||
|
let chapters = media
|
||||||
|
.get("chapters")
|
||||||
|
.and_then(|v| v.as_i64())
|
||||||
|
.map(|v| v as i32);
|
||||||
|
|
||||||
|
// Use volumes if known, otherwise fall back to chapters count
|
||||||
|
let total = volumes.or(chapters);
|
||||||
|
|
||||||
|
let cover_url = media
|
||||||
|
.get("coverImage")
|
||||||
|
.and_then(|ci| ci.get("large").or_else(|| ci.get("medium")))
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
|
||||||
|
let description = media
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(|d| d.replace("\\n", "\n").trim().to_string());
|
||||||
|
|
||||||
|
let authors = extract_authors(media);
|
||||||
|
|
||||||
|
// AniList doesn't have per-volume data — generate entries from volumes count (or chapters as fallback)
|
||||||
|
let mut books = Vec::new();
|
||||||
|
if let Some(total) = total {
|
||||||
|
for vol in 1..=total {
|
||||||
|
books.push(BookCandidate {
|
||||||
|
external_book_id: format!("{}-vol-{}", external_id, vol),
|
||||||
|
title: format!("{} Vol. {}", title, vol),
|
||||||
|
volume_number: Some(vol),
|
||||||
|
authors: authors.clone(),
|
||||||
|
isbn: None,
|
||||||
|
summary: if vol == 1 { description.clone() } else { None },
|
||||||
|
cover_url: if vol == 1 { cover_url.clone() } else { None },
|
||||||
|
page_count: None,
|
||||||
|
language: Some("ja".to_string()),
|
||||||
|
publish_date: None,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(books)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_authors(media: &serde_json::Value) -> Vec<String> {
|
||||||
|
let mut authors = Vec::new();
|
||||||
|
if let Some(edges) = media
|
||||||
|
.get("staff")
|
||||||
|
.and_then(|s| s.get("edges"))
|
||||||
|
.and_then(|e| e.as_array())
|
||||||
|
{
|
||||||
|
for edge in edges {
|
||||||
|
let role = edge
|
||||||
|
.get("role")
|
||||||
|
.and_then(|r| r.as_str())
|
||||||
|
.unwrap_or("");
|
||||||
|
let role_lower = role.to_lowercase();
|
||||||
|
if role_lower.contains("story") || role_lower.contains("art") || role_lower.contains("original") {
|
||||||
|
if let Some(name) = edge
|
||||||
|
.get("node")
|
||||||
|
.and_then(|n| n.get("name"))
|
||||||
|
.and_then(|n| n.get("full"))
|
||||||
|
.and_then(|f| f.as_str())
|
||||||
|
{
|
||||||
|
if !authors.contains(&name.to_string()) {
|
||||||
|
authors.push(name.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
authors
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_confidence(title: &str, query: &str) -> f32 {
|
||||||
|
let title_lower = title.to_lowercase();
|
||||||
|
if title_lower == query {
|
||||||
|
1.0
|
||||||
|
} else if title_lower.starts_with(query) || query.starts_with(&title_lower) {
|
||||||
|
0.8
|
||||||
|
} else if title_lower.contains(query) || query.contains(&title_lower) {
|
||||||
|
0.7
|
||||||
|
} else {
|
||||||
|
let common: usize = query.chars().filter(|c| title_lower.contains(*c)).count();
|
||||||
|
let max_len = query.len().max(title_lower.len()).max(1);
|
||||||
|
(common as f32 / max_len as f32).clamp(0.1, 0.6)
|
||||||
|
}
|
||||||
|
}
|
||||||
671
apps/api/src/metadata_providers/bedetheque.rs
Normal file
671
apps/api/src/metadata_providers/bedetheque.rs
Normal file
@@ -0,0 +1,671 @@
|
|||||||
|
use scraper::{Html, Selector};
|
||||||
|
|
||||||
|
use super::{BookCandidate, MetadataProvider, ProviderConfig, SeriesCandidate};
|
||||||
|
|
||||||
|
pub struct BedethequeProvider;
|
||||||
|
|
||||||
|
impl MetadataProvider for BedethequeProvider {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"bedetheque"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_series(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<SeriesCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let query = query.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { search_series_impl(&query, &config).await })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_series_books(
|
||||||
|
&self,
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<BookCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let external_id = external_id.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { get_series_books_impl(&external_id, &config).await })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_client() -> Result<reqwest::Client, String> {
|
||||||
|
reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(20))
|
||||||
|
.user_agent("Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:108.0) Gecko/20100101 Firefox/108.0")
|
||||||
|
.default_headers({
|
||||||
|
let mut h = reqwest::header::HeaderMap::new();
|
||||||
|
h.insert(
|
||||||
|
reqwest::header::ACCEPT,
|
||||||
|
"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
|
||||||
|
.parse()
|
||||||
|
.unwrap(),
|
||||||
|
);
|
||||||
|
h.insert(
|
||||||
|
reqwest::header::ACCEPT_LANGUAGE,
|
||||||
|
"fr-FR,fr;q=0.9,en;q=0.5".parse().unwrap(),
|
||||||
|
);
|
||||||
|
h.insert(reqwest::header::REFERER, "https://www.bedetheque.com/".parse().unwrap());
|
||||||
|
h
|
||||||
|
})
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Remove diacritics for URL construction (bedetheque uses ASCII slugs)
|
||||||
|
fn normalize_for_url(s: &str) -> String {
|
||||||
|
s.chars()
|
||||||
|
.map(|c| match c {
|
||||||
|
'é' | 'è' | 'ê' | 'ë' | 'É' | 'È' | 'Ê' | 'Ë' => 'e',
|
||||||
|
'à' | 'â' | 'ä' | 'À' | 'Â' | 'Ä' => 'a',
|
||||||
|
'ù' | 'û' | 'ü' | 'Ù' | 'Û' | 'Ü' => 'u',
|
||||||
|
'ô' | 'ö' | 'Ô' | 'Ö' => 'o',
|
||||||
|
'î' | 'ï' | 'Î' | 'Ï' => 'i',
|
||||||
|
'ç' | 'Ç' => 'c',
|
||||||
|
'ñ' | 'Ñ' => 'n',
|
||||||
|
_ => c,
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn urlencoded(s: &str) -> String {
|
||||||
|
let mut result = String::new();
|
||||||
|
for byte in s.bytes() {
|
||||||
|
match byte {
|
||||||
|
b'A'..=b'Z' | b'a'..=b'z' | b'0'..=b'9' | b'-' | b'_' | b'.' | b'~' => {
|
||||||
|
result.push(byte as char);
|
||||||
|
}
|
||||||
|
b' ' => result.push('+'),
|
||||||
|
_ => result.push_str(&format!("%{:02X}", byte)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Search
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async fn search_series_impl(
|
||||||
|
query: &str,
|
||||||
|
_config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<SeriesCandidate>, String> {
|
||||||
|
let client = build_client()?;
|
||||||
|
|
||||||
|
// Use the full-text search page
|
||||||
|
let url = format!(
|
||||||
|
"https://www.bedetheque.com/search/tout?RechTexte={}&RechWhere=0",
|
||||||
|
urlencoded(&normalize_for_url(query))
|
||||||
|
);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Bedetheque request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
return Err(format!("Bedetheque returned {status}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let html = resp
|
||||||
|
.text()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to read Bedetheque response: {e}"))?;
|
||||||
|
|
||||||
|
// Detect IP blacklist
|
||||||
|
if html.contains("<title></title>") || html.contains("<title> </title>") {
|
||||||
|
return Err("Bedetheque: IP may be rate-limited, please retry later".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse HTML in a block so the non-Send Html type is dropped before any .await
|
||||||
|
let candidates = {
|
||||||
|
let document = Html::parse_document(&html);
|
||||||
|
let link_sel =
|
||||||
|
Selector::parse("a[href*='/serie-']").map_err(|e| format!("selector error: {e}"))?;
|
||||||
|
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
let mut seen = std::collections::HashSet::new();
|
||||||
|
let mut candidates = Vec::new();
|
||||||
|
|
||||||
|
for el in document.select(&link_sel) {
|
||||||
|
let href = match el.value().attr("href") {
|
||||||
|
Some(h) => h.to_string(),
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
let (series_id, _slug) = match parse_serie_href(&href) {
|
||||||
|
Some(v) => v,
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
if !seen.insert(series_id.clone()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let title = el.text().collect::<String>().trim().to_string();
|
||||||
|
if title.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let confidence = compute_confidence(&title, &query_lower);
|
||||||
|
let cover_url = format!(
|
||||||
|
"https://www.bedetheque.com/cache/thb_series/PlancheS_{}.jpg",
|
||||||
|
series_id
|
||||||
|
);
|
||||||
|
|
||||||
|
candidates.push(SeriesCandidate {
|
||||||
|
external_id: series_id.clone(),
|
||||||
|
title: title.clone(),
|
||||||
|
authors: vec![],
|
||||||
|
description: None,
|
||||||
|
publishers: vec![],
|
||||||
|
start_year: None,
|
||||||
|
total_volumes: None,
|
||||||
|
cover_url: Some(cover_url),
|
||||||
|
external_url: Some(href),
|
||||||
|
confidence,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
candidates.sort_by(|a, b| {
|
||||||
|
b.confidence
|
||||||
|
.partial_cmp(&a.confidence)
|
||||||
|
.unwrap_or(std::cmp::Ordering::Equal)
|
||||||
|
});
|
||||||
|
candidates.truncate(10);
|
||||||
|
candidates
|
||||||
|
}; // document is dropped here — safe to .await below
|
||||||
|
|
||||||
|
// For the top candidates, fetch series details to enrich metadata
|
||||||
|
// (limit to top 3 to avoid hammering the site)
|
||||||
|
let mut enriched = Vec::new();
|
||||||
|
for mut c in candidates {
|
||||||
|
if enriched.len() < 3 {
|
||||||
|
if let Ok(details) = fetch_series_details(&client, &c.external_id, c.external_url.as_deref()).await {
|
||||||
|
if let Some(desc) = details.description {
|
||||||
|
c.description = Some(desc);
|
||||||
|
}
|
||||||
|
if !details.authors.is_empty() {
|
||||||
|
c.authors = details.authors;
|
||||||
|
}
|
||||||
|
if !details.publishers.is_empty() {
|
||||||
|
c.publishers = details.publishers;
|
||||||
|
}
|
||||||
|
if let Some(year) = details.start_year {
|
||||||
|
c.start_year = Some(year);
|
||||||
|
}
|
||||||
|
if let Some(count) = details.album_count {
|
||||||
|
c.total_volumes = Some(count);
|
||||||
|
}
|
||||||
|
c.metadata_json = serde_json::json!({
|
||||||
|
"description": c.description,
|
||||||
|
"authors": c.authors,
|
||||||
|
"publishers": c.publishers,
|
||||||
|
"start_year": c.start_year,
|
||||||
|
"genres": details.genres,
|
||||||
|
"status": details.status,
|
||||||
|
"origin": details.origin,
|
||||||
|
"language": details.language,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
enriched.push(c);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(enriched)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Parse serie URL to extract (id, slug)
|
||||||
|
fn parse_serie_href(href: &str) -> Option<(String, String)> {
|
||||||
|
// Patterns:
|
||||||
|
// https://www.bedetheque.com/serie-3-BD-Blacksad.html
|
||||||
|
// /serie-3-BD-Blacksad.html
|
||||||
|
let re = regex::Regex::new(r"/serie-(\d+)-[A-Za-z]+-(.+?)(?:__\d+)?\.html").ok()?;
|
||||||
|
let caps = re.captures(href)?;
|
||||||
|
Some((caps[1].to_string(), caps[2].to_string()))
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SeriesDetails {
|
||||||
|
description: Option<String>,
|
||||||
|
authors: Vec<String>,
|
||||||
|
publishers: Vec<String>,
|
||||||
|
start_year: Option<i32>,
|
||||||
|
album_count: Option<i32>,
|
||||||
|
genres: Vec<String>,
|
||||||
|
status: Option<String>,
|
||||||
|
origin: Option<String>,
|
||||||
|
language: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn fetch_series_details(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
series_id: &str,
|
||||||
|
series_url: Option<&str>,
|
||||||
|
) -> Result<SeriesDetails, String> {
|
||||||
|
// Build URL — append __10000 to get all albums on one page
|
||||||
|
let url = match series_url {
|
||||||
|
Some(u) => {
|
||||||
|
// Replace .html with __10000.html
|
||||||
|
u.replace(".html", "__10000.html")
|
||||||
|
}
|
||||||
|
None => format!(
|
||||||
|
"https://www.bedetheque.com/serie-{}-BD-Serie__10000.html",
|
||||||
|
series_id
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to fetch series page: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
return Err(format!("Series page returned {}", resp.status()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let html = resp
|
||||||
|
.text()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to read series page: {e}"))?;
|
||||||
|
|
||||||
|
let doc = Html::parse_document(&html);
|
||||||
|
let mut details = SeriesDetails {
|
||||||
|
description: None,
|
||||||
|
authors: vec![],
|
||||||
|
publishers: vec![],
|
||||||
|
start_year: None,
|
||||||
|
album_count: None,
|
||||||
|
genres: vec![],
|
||||||
|
status: None,
|
||||||
|
origin: None,
|
||||||
|
language: None,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Description from <meta name="description"> — format: "Tout sur la série {name} : {description}"
|
||||||
|
if let Ok(sel) = Selector::parse(r#"meta[name="description"]"#) {
|
||||||
|
if let Some(el) = doc.select(&sel).next() {
|
||||||
|
if let Some(content) = el.value().attr("content") {
|
||||||
|
let desc = content.trim().to_string();
|
||||||
|
// Strip the "Tout sur la série ... : " prefix
|
||||||
|
let cleaned = if let Some(pos) = desc.find(" : ") {
|
||||||
|
desc[pos + 3..].trim().to_string()
|
||||||
|
} else {
|
||||||
|
desc
|
||||||
|
};
|
||||||
|
if !cleaned.is_empty() {
|
||||||
|
details.description = Some(cleaned);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract authors from itemprop="author" and itemprop="illustrator" (deduplicated)
|
||||||
|
{
|
||||||
|
let mut authors_set = std::collections::HashSet::new();
|
||||||
|
for attr in ["author", "illustrator"] {
|
||||||
|
if let Ok(sel) = Selector::parse(&format!(r#"[itemprop="{attr}"]"#)) {
|
||||||
|
for el in doc.select(&sel) {
|
||||||
|
let name = el.text().collect::<String>().trim().to_string();
|
||||||
|
// Names are "Last, First" — normalize to "First Last"
|
||||||
|
let normalized = if let Some((last, first)) = name.split_once(',') {
|
||||||
|
format!("{} {}", first.trim(), last.trim())
|
||||||
|
} else {
|
||||||
|
name
|
||||||
|
};
|
||||||
|
if !normalized.is_empty() && is_real_author(&normalized) {
|
||||||
|
authors_set.insert(normalized);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
details.authors = authors_set.into_iter().collect();
|
||||||
|
details.authors.sort();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract publishers from itemprop="publisher" (deduplicated)
|
||||||
|
{
|
||||||
|
let mut publishers_set = std::collections::HashSet::new();
|
||||||
|
if let Ok(sel) = Selector::parse(r#"[itemprop="publisher"]"#) {
|
||||||
|
for el in doc.select(&sel) {
|
||||||
|
let name = el.text().collect::<String>().trim().to_string();
|
||||||
|
if !name.is_empty() {
|
||||||
|
publishers_set.insert(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
details.publishers = publishers_set.into_iter().collect();
|
||||||
|
details.publishers.sort();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract series-level info from <li><label>X :</label>value</li> blocks
|
||||||
|
// Genre: <li><label>Genre :</label><span class="style-serie">Animalier, Aventure, Humour</span></li>
|
||||||
|
if let Ok(sel) = Selector::parse("span.style-serie") {
|
||||||
|
if let Some(el) = doc.select(&sel).next() {
|
||||||
|
let text = el.text().collect::<String>();
|
||||||
|
details.genres = text
|
||||||
|
.split(',')
|
||||||
|
.map(|s| s.trim().to_string())
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
|
.collect();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parution: <li><label>Parution :</label><span class="parution-serie">Série finie</span></li>
|
||||||
|
if let Ok(sel) = Selector::parse("span.parution-serie") {
|
||||||
|
if let Some(el) = doc.select(&sel).next() {
|
||||||
|
let text = el.text().collect::<String>().trim().to_string();
|
||||||
|
if !text.is_empty() {
|
||||||
|
details.status = Some(text);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Origine and Langue from page text (no dedicated CSS class)
|
||||||
|
let page_text = doc.root_element().text().collect::<String>();
|
||||||
|
|
||||||
|
if let Some(val) = extract_info_value(&page_text, "Origine") {
|
||||||
|
let val = val.lines().next().unwrap_or(val).trim();
|
||||||
|
if !val.is_empty() {
|
||||||
|
details.origin = Some(val.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(val) = extract_info_value(&page_text, "Langue") {
|
||||||
|
let val = val.lines().next().unwrap_or(val).trim();
|
||||||
|
if !val.is_empty() {
|
||||||
|
details.language = Some(val.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Album count from serie-info text (e.g. "Tomes : 8")
|
||||||
|
if let Ok(re) = regex::Regex::new(r"Tomes?\s*:\s*(\d+)") {
|
||||||
|
if let Some(caps) = re.captures(&page_text) {
|
||||||
|
if let Ok(n) = caps[1].parse::<i32>() {
|
||||||
|
details.album_count = Some(n);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start year from first <meta itemprop="datePublished" content="YYYY-MM-DD">
|
||||||
|
if let Ok(sel) = Selector::parse(r#"[itemprop="datePublished"]"#) {
|
||||||
|
if let Some(el) = doc.select(&sel).next() {
|
||||||
|
if let Some(content) = el.value().attr("content") {
|
||||||
|
// content is "YYYY-MM-DD"
|
||||||
|
if let Some(year_str) = content.split('-').next() {
|
||||||
|
if let Ok(year) = year_str.parse::<i32>() {
|
||||||
|
details.start_year = Some(year);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(details)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract value after a label like "Scénario : Jean-Claude" → "Jean-Claude"
|
||||||
|
fn extract_info_value<'a>(text: &'a str, label: &str) -> Option<&'a str> {
|
||||||
|
// Handle both "Label :" and "Label:"
|
||||||
|
let patterns = [
|
||||||
|
format!("{} :", label),
|
||||||
|
format!("{}:", label),
|
||||||
|
format!("{} :", &label.to_lowercase()),
|
||||||
|
];
|
||||||
|
for pat in &patterns {
|
||||||
|
if let Some(pos) = text.find(pat.as_str()) {
|
||||||
|
let val = text[pos + pat.len()..].trim();
|
||||||
|
if !val.is_empty() {
|
||||||
|
return Some(val);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Get series books
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async fn get_series_books_impl(
|
||||||
|
external_id: &str,
|
||||||
|
_config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<BookCandidate>, String> {
|
||||||
|
let client = build_client()?;
|
||||||
|
|
||||||
|
// We need to find the series URL — try a direct fetch
|
||||||
|
// external_id is the numeric series ID
|
||||||
|
// We try to fetch the series page to get the album list
|
||||||
|
let url = format!(
|
||||||
|
"https://www.bedetheque.com/serie-{}-BD-Serie__10000.html",
|
||||||
|
external_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to fetch series: {e}"))?;
|
||||||
|
|
||||||
|
// If the generic slug fails, try without the slug part (bedetheque redirects)
|
||||||
|
let html = if resp.status().is_success() {
|
||||||
|
resp.text().await.map_err(|e| format!("Failed to read: {e}"))?
|
||||||
|
} else {
|
||||||
|
// Try alternative URL pattern
|
||||||
|
let alt_url = format!(
|
||||||
|
"https://www.bedetheque.com/serie-{}__10000.html",
|
||||||
|
external_id
|
||||||
|
);
|
||||||
|
let resp2 = client
|
||||||
|
.get(&alt_url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to fetch series (alt): {e}"))?;
|
||||||
|
if !resp2.status().is_success() {
|
||||||
|
return Err(format!("Series page not found for id {external_id}"));
|
||||||
|
}
|
||||||
|
resp2.text().await.map_err(|e| format!("Failed to read: {e}"))?
|
||||||
|
};
|
||||||
|
|
||||||
|
if html.contains("<title></title>") {
|
||||||
|
return Err("Bedetheque: IP may be rate-limited".to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
let doc = Html::parse_document(&html);
|
||||||
|
let mut books = Vec::new();
|
||||||
|
|
||||||
|
// Each album block starts before a .album-main div.
|
||||||
|
// The cover image (<img itemprop="image">) is OUTSIDE .album-main (sibling),
|
||||||
|
// so we iterate over a broader parent. But the simplest approach: parse all
|
||||||
|
// itemprop elements relative to each .album-main, plus pick covers separately.
|
||||||
|
let album_sel = Selector::parse(".album-main").map_err(|e| format!("selector: {e}"))?;
|
||||||
|
|
||||||
|
// Pre-collect cover images — they appear in <img itemprop="image"> before each .album-main
|
||||||
|
// and link to an album URL containing the book ID
|
||||||
|
let cover_sel = Selector::parse(r#"img[itemprop="image"]"#).map_err(|e| format!("selector: {e}"))?;
|
||||||
|
let covers: Vec<String> = doc.select(&cover_sel)
|
||||||
|
.filter_map(|el| el.value().attr("src").map(|s| {
|
||||||
|
if s.starts_with("http") { s.to_string() } else { format!("https://www.bedetheque.com{}", s) }
|
||||||
|
}))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
static RE_TOME: std::sync::LazyLock<regex::Regex> =
|
||||||
|
std::sync::LazyLock::new(|| regex::Regex::new(r"(?i)-Tome-\d+-").unwrap());
|
||||||
|
static RE_BOOK_ID: std::sync::LazyLock<regex::Regex> =
|
||||||
|
std::sync::LazyLock::new(|| regex::Regex::new(r"-(\d+)\.html").unwrap());
|
||||||
|
static RE_VOLUME: std::sync::LazyLock<regex::Regex> =
|
||||||
|
std::sync::LazyLock::new(|| regex::Regex::new(r"(?i)Tome-(\d+)-").unwrap());
|
||||||
|
|
||||||
|
for (idx, album_el) in doc.select(&album_sel).enumerate() {
|
||||||
|
// Title from <a class="titre" title="..."> — the title attribute is clean
|
||||||
|
let title_sel = Selector::parse("a.titre").ok();
|
||||||
|
let title_el = title_sel.as_ref().and_then(|s| album_el.select(s).next());
|
||||||
|
let title = title_el
|
||||||
|
.and_then(|el| el.value().attr("title"))
|
||||||
|
.unwrap_or("")
|
||||||
|
.trim()
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
if title.is_empty() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// External book ID from album URL (e.g. "...-1063.html")
|
||||||
|
let album_url = title_el.and_then(|el| el.value().attr("href")).unwrap_or("");
|
||||||
|
|
||||||
|
// Only keep main tomes — their URLs contain "Tome-{N}-"
|
||||||
|
// Skip hors-série (HS), intégrales (INT/INTFL), romans, coffrets, etc.
|
||||||
|
if !RE_TOME.is_match(album_url) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let external_book_id = RE_BOOK_ID
|
||||||
|
.captures(album_url)
|
||||||
|
.map(|c| c[1].to_string())
|
||||||
|
.unwrap_or_default();
|
||||||
|
|
||||||
|
// Volume number from URL pattern "Tome-{N}-" or from itemprop name
|
||||||
|
let volume_number = RE_VOLUME
|
||||||
|
.captures(album_url)
|
||||||
|
.and_then(|c| c[1].parse::<i32>().ok())
|
||||||
|
.or_else(|| extract_volume_from_title(&title));
|
||||||
|
|
||||||
|
// Authors from itemprop="author" and itemprop="illustrator"
|
||||||
|
let mut authors = Vec::new();
|
||||||
|
let author_sel = Selector::parse(r#"[itemprop="author"]"#).ok();
|
||||||
|
let illustrator_sel = Selector::parse(r#"[itemprop="illustrator"]"#).ok();
|
||||||
|
for sel in [&author_sel, &illustrator_sel].into_iter().flatten() {
|
||||||
|
for el in album_el.select(sel) {
|
||||||
|
let name = el.text().collect::<String>().trim().to_string();
|
||||||
|
// Names are "Last, First" format — normalize to "First Last"
|
||||||
|
let normalized = if let Some((last, first)) = name.split_once(',') {
|
||||||
|
format!("{} {}", first.trim(), last.trim())
|
||||||
|
} else {
|
||||||
|
name
|
||||||
|
};
|
||||||
|
if !normalized.is_empty() && is_real_author(&normalized) && !authors.contains(&normalized) {
|
||||||
|
authors.push(normalized);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ISBN from <span itemprop="isbn">
|
||||||
|
let isbn = Selector::parse(r#"[itemprop="isbn"]"#)
|
||||||
|
.ok()
|
||||||
|
.and_then(|s| album_el.select(&s).next())
|
||||||
|
.map(|el| el.text().collect::<String>().trim().to_string())
|
||||||
|
.filter(|s| !s.is_empty());
|
||||||
|
|
||||||
|
// Page count from <span itemprop="numberOfPages">
|
||||||
|
let page_count = Selector::parse(r#"[itemprop="numberOfPages"]"#)
|
||||||
|
.ok()
|
||||||
|
.and_then(|s| album_el.select(&s).next())
|
||||||
|
.and_then(|el| el.text().collect::<String>().trim().parse::<i32>().ok());
|
||||||
|
|
||||||
|
// Publish date from <meta itemprop="datePublished" content="YYYY-MM-DD">
|
||||||
|
let publish_date = Selector::parse(r#"[itemprop="datePublished"]"#)
|
||||||
|
.ok()
|
||||||
|
.and_then(|s| album_el.select(&s).next())
|
||||||
|
.and_then(|el| el.value().attr("content").map(|c| c.trim().to_string()))
|
||||||
|
.filter(|s| !s.is_empty());
|
||||||
|
|
||||||
|
// Cover from pre-collected covers (same index)
|
||||||
|
let cover_url = covers.get(idx).cloned();
|
||||||
|
|
||||||
|
books.push(BookCandidate {
|
||||||
|
external_book_id,
|
||||||
|
title,
|
||||||
|
volume_number,
|
||||||
|
authors,
|
||||||
|
isbn,
|
||||||
|
summary: None,
|
||||||
|
cover_url,
|
||||||
|
page_count,
|
||||||
|
language: Some("fr".to_string()),
|
||||||
|
publish_date,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
books.sort_by_key(|b| b.volume_number.unwrap_or(999));
|
||||||
|
Ok(books)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Filter out placeholder author names from Bédéthèque
|
||||||
|
fn is_real_author(name: &str) -> bool {
|
||||||
|
!name.starts_with('<') && !name.ends_with('>') && name != "Collectif"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_volume_from_title(title: &str) -> Option<i32> {
|
||||||
|
let patterns = [
|
||||||
|
r"(?i)(?:tome|t\.)\s*(\d+)",
|
||||||
|
r"(?i)(?:vol(?:ume)?\.?)\s*(\d+)",
|
||||||
|
r"#\s*(\d+)",
|
||||||
|
];
|
||||||
|
for pattern in &patterns {
|
||||||
|
if let Ok(re) = regex::Regex::new(pattern) {
|
||||||
|
if let Some(caps) = re.captures(title) {
|
||||||
|
if let Ok(n) = caps[1].parse::<i32>() {
|
||||||
|
return Some(n);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Normalize a title by removing French articles (leading or in parentheses)
|
||||||
|
/// and extra whitespace/punctuation, so that "Les Légendaires - Résistance"
|
||||||
|
/// and "Légendaires (Les) - Résistance" produce the same canonical form.
|
||||||
|
fn normalize_title(s: &str) -> String {
|
||||||
|
let lower = s.to_lowercase();
|
||||||
|
// Remove articles in parentheses: "(les)", "(la)", "(le)", "(l')", "(un)", "(une)", "(des)"
|
||||||
|
let re_parens = regex::Regex::new(r"\s*\((?:les?|la|l'|une?|des|du|d')\)").unwrap();
|
||||||
|
let cleaned = re_parens.replace_all(&lower, "");
|
||||||
|
// Remove leading articles: "les ", "la ", "le ", "l'", "un ", "une ", "des ", "du ", "d'"
|
||||||
|
let re_leading = regex::Regex::new(r"^(?:les?|la|l'|une?|des|du|d')\s+").unwrap();
|
||||||
|
let cleaned = re_leading.replace(&cleaned, "");
|
||||||
|
// Collapse multiple spaces/dashes into single
|
||||||
|
let re_spaces = regex::Regex::new(r"\s+").unwrap();
|
||||||
|
re_spaces.replace_all(cleaned.trim(), " ").to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_confidence(title: &str, query: &str) -> f32 {
|
||||||
|
let title_lower = title.to_lowercase();
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
if title_lower == query_lower {
|
||||||
|
return 1.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try with normalized forms (handles Bedetheque's "Name (Article)" convention)
|
||||||
|
let title_norm = normalize_title(title);
|
||||||
|
let query_norm = normalize_title(query);
|
||||||
|
if title_norm == query_norm {
|
||||||
|
return 1.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if title_lower.starts_with(&query_lower) || query_lower.starts_with(&title_lower)
|
||||||
|
|| title_norm.starts_with(&query_norm) || query_norm.starts_with(&title_norm)
|
||||||
|
{
|
||||||
|
0.85
|
||||||
|
} else if title_lower.contains(&query_lower) || query_lower.contains(&title_lower)
|
||||||
|
|| title_norm.contains(&query_norm) || query_norm.contains(&title_norm)
|
||||||
|
{
|
||||||
|
0.7
|
||||||
|
} else {
|
||||||
|
let common: usize = query_lower
|
||||||
|
.chars()
|
||||||
|
.filter(|c| title_lower.contains(*c))
|
||||||
|
.count();
|
||||||
|
let max_len = query_lower.len().max(title_lower.len()).max(1);
|
||||||
|
(common as f32 / max_len as f32).clamp(0.1, 0.6)
|
||||||
|
}
|
||||||
|
}
|
||||||
267
apps/api/src/metadata_providers/comicvine.rs
Normal file
267
apps/api/src/metadata_providers/comicvine.rs
Normal file
@@ -0,0 +1,267 @@
|
|||||||
|
use super::{BookCandidate, MetadataProvider, ProviderConfig, SeriesCandidate};
|
||||||
|
|
||||||
|
pub struct ComicVineProvider;
|
||||||
|
|
||||||
|
impl MetadataProvider for ComicVineProvider {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"comicvine"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_series(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<SeriesCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let query = query.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { search_series_impl(&query, &config).await })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_series_books(
|
||||||
|
&self,
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<BookCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let external_id = external_id.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { get_series_books_impl(&external_id, &config).await })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_client() -> Result<reqwest::Client, String> {
|
||||||
|
reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.user_agent("StripstreamLibrarian/1.0")
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn search_series_impl(
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<SeriesCandidate>, String> {
|
||||||
|
let api_key = config
|
||||||
|
.api_key
|
||||||
|
.as_deref()
|
||||||
|
.filter(|k| !k.is_empty())
|
||||||
|
.ok_or_else(|| "ComicVine requires an API key. Configure it in Settings > Integrations.".to_string())?;
|
||||||
|
|
||||||
|
let client = build_client()?;
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://comicvine.gamespot.com/api/search/?api_key={}&format=json&resources=volume&query={}&limit=20",
|
||||||
|
api_key,
|
||||||
|
urlencoded(query)
|
||||||
|
);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("ComicVine request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(format!("ComicVine returned {status}: {text}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: serde_json::Value = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse ComicVine response: {e}"))?;
|
||||||
|
|
||||||
|
let results = match data.get("results").and_then(|r| r.as_array()) {
|
||||||
|
Some(results) => results,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
|
||||||
|
let mut candidates: Vec<SeriesCandidate> = results
|
||||||
|
.iter()
|
||||||
|
.filter_map(|vol| {
|
||||||
|
let name = vol.get("name").and_then(|n| n.as_str())?.to_string();
|
||||||
|
let id = vol.get("id").and_then(|id| id.as_i64())?;
|
||||||
|
let description = vol
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(strip_html);
|
||||||
|
let publisher = vol
|
||||||
|
.get("publisher")
|
||||||
|
.and_then(|p| p.get("name"))
|
||||||
|
.and_then(|n| n.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let start_year = vol
|
||||||
|
.get("start_year")
|
||||||
|
.and_then(|y| y.as_str())
|
||||||
|
.and_then(|y| y.parse::<i32>().ok());
|
||||||
|
let count_of_issues = vol
|
||||||
|
.get("count_of_issues")
|
||||||
|
.and_then(|c| c.as_i64())
|
||||||
|
.map(|c| c as i32);
|
||||||
|
let cover_url = vol
|
||||||
|
.get("image")
|
||||||
|
.and_then(|img| img.get("medium_url").or_else(|| img.get("small_url")))
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let site_url = vol
|
||||||
|
.get("site_detail_url")
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
|
||||||
|
let confidence = compute_confidence(&name, &query_lower);
|
||||||
|
|
||||||
|
Some(SeriesCandidate {
|
||||||
|
external_id: id.to_string(),
|
||||||
|
title: name,
|
||||||
|
authors: vec![],
|
||||||
|
description,
|
||||||
|
publishers: publisher.into_iter().collect(),
|
||||||
|
start_year,
|
||||||
|
total_volumes: count_of_issues,
|
||||||
|
cover_url,
|
||||||
|
external_url: site_url,
|
||||||
|
confidence,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
candidates.sort_by(|a, b| b.confidence.partial_cmp(&a.confidence).unwrap_or(std::cmp::Ordering::Equal));
|
||||||
|
candidates.truncate(10);
|
||||||
|
Ok(candidates)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_series_books_impl(
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<BookCandidate>, String> {
|
||||||
|
let api_key = config
|
||||||
|
.api_key
|
||||||
|
.as_deref()
|
||||||
|
.filter(|k| !k.is_empty())
|
||||||
|
.ok_or_else(|| "ComicVine requires an API key".to_string())?;
|
||||||
|
|
||||||
|
let client = build_client()?;
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://comicvine.gamespot.com/api/issues/?api_key={}&format=json&filter=volume:{}&sort=issue_number:asc&limit=100&field_list=id,name,issue_number,description,image,cover_date,site_detail_url",
|
||||||
|
api_key,
|
||||||
|
external_id
|
||||||
|
);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("ComicVine request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(format!("ComicVine returned {status}: {text}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: serde_json::Value = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse ComicVine response: {e}"))?;
|
||||||
|
|
||||||
|
let results = match data.get("results").and_then(|r| r.as_array()) {
|
||||||
|
Some(results) => results,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let books: Vec<BookCandidate> = results
|
||||||
|
.iter()
|
||||||
|
.filter_map(|issue| {
|
||||||
|
let id = issue.get("id").and_then(|id| id.as_i64())?;
|
||||||
|
let name = issue
|
||||||
|
.get("name")
|
||||||
|
.and_then(|n| n.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
let issue_number = issue
|
||||||
|
.get("issue_number")
|
||||||
|
.and_then(|n| n.as_str())
|
||||||
|
.and_then(|n| n.parse::<f64>().ok())
|
||||||
|
.map(|n| n as i32);
|
||||||
|
let description = issue
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(strip_html);
|
||||||
|
let cover_url = issue
|
||||||
|
.get("image")
|
||||||
|
.and_then(|img| img.get("medium_url").or_else(|| img.get("small_url")))
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let cover_date = issue
|
||||||
|
.get("cover_date")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
|
||||||
|
Some(BookCandidate {
|
||||||
|
external_book_id: id.to_string(),
|
||||||
|
title: name,
|
||||||
|
volume_number: issue_number,
|
||||||
|
authors: vec![],
|
||||||
|
isbn: None,
|
||||||
|
summary: description,
|
||||||
|
cover_url,
|
||||||
|
page_count: None,
|
||||||
|
language: None,
|
||||||
|
publish_date: cover_date,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(books)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn strip_html(s: &str) -> String {
|
||||||
|
let mut result = String::new();
|
||||||
|
let mut in_tag = false;
|
||||||
|
for ch in s.chars() {
|
||||||
|
match ch {
|
||||||
|
'<' => in_tag = true,
|
||||||
|
'>' => in_tag = false,
|
||||||
|
_ if !in_tag => result.push(ch),
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.trim().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_confidence(title: &str, query: &str) -> f32 {
|
||||||
|
let title_lower = title.to_lowercase();
|
||||||
|
if title_lower == query {
|
||||||
|
1.0
|
||||||
|
} else if title_lower.starts_with(query) || query.starts_with(&title_lower) {
|
||||||
|
0.8
|
||||||
|
} else if title_lower.contains(query) || query.contains(&title_lower) {
|
||||||
|
0.7
|
||||||
|
} else {
|
||||||
|
let common: usize = query.chars().filter(|c| title_lower.contains(*c)).count();
|
||||||
|
let max_len = query.len().max(title_lower.len()).max(1);
|
||||||
|
(common as f32 / max_len as f32).clamp(0.1, 0.6)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn urlencoded(s: &str) -> String {
|
||||||
|
let mut result = String::new();
|
||||||
|
for byte in s.bytes() {
|
||||||
|
match byte {
|
||||||
|
b'A'..=b'Z' | b'a'..=b'z' | b'0'..=b'9' | b'-' | b'_' | b'.' | b'~' => {
|
||||||
|
result.push(byte as char);
|
||||||
|
}
|
||||||
|
_ => result.push_str(&format!("%{:02X}", byte)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
472
apps/api/src/metadata_providers/google_books.rs
Normal file
472
apps/api/src/metadata_providers/google_books.rs
Normal file
@@ -0,0 +1,472 @@
|
|||||||
|
use super::{BookCandidate, MetadataProvider, ProviderConfig, SeriesCandidate};
|
||||||
|
|
||||||
|
pub struct GoogleBooksProvider;
|
||||||
|
|
||||||
|
impl MetadataProvider for GoogleBooksProvider {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"google_books"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_series(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<SeriesCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let query = query.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { search_series_impl(&query, &config).await })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_series_books(
|
||||||
|
&self,
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<BookCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let external_id = external_id.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { get_series_books_impl(&external_id, &config).await })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn search_series_impl(
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<SeriesCandidate>, String> {
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
|
||||||
|
|
||||||
|
let search_query = format!("intitle:{}", query);
|
||||||
|
let mut url = format!(
|
||||||
|
"https://www.googleapis.com/books/v1/volumes?q={}&maxResults=20&printType=books&langRestrict={}",
|
||||||
|
urlencoded(&search_query),
|
||||||
|
urlencoded(&config.language),
|
||||||
|
);
|
||||||
|
if let Some(ref key) = config.api_key {
|
||||||
|
url.push_str(&format!("&key={}", key));
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Google Books request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(format!("Google Books returned {status}: {text}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: serde_json::Value = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse Google Books response: {e}"))?;
|
||||||
|
|
||||||
|
let items = match data.get("items").and_then(|i| i.as_array()) {
|
||||||
|
Some(items) => items,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Group volumes by series name to produce series candidates
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
let mut series_map: std::collections::HashMap<String, SeriesCandidateBuilder> =
|
||||||
|
std::collections::HashMap::new();
|
||||||
|
|
||||||
|
for item in items {
|
||||||
|
let volume_info = match item.get("volumeInfo") {
|
||||||
|
Some(vi) => vi,
|
||||||
|
None => continue,
|
||||||
|
};
|
||||||
|
|
||||||
|
let title = volume_info
|
||||||
|
.get("title")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
let authors: Vec<String> = volume_info
|
||||||
|
.get("authors")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.map(|arr| {
|
||||||
|
arr.iter()
|
||||||
|
.filter_map(|v| v.as_str().map(String::from))
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
let publisher = volume_info
|
||||||
|
.get("publisher")
|
||||||
|
.and_then(|p| p.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let published_date = volume_info
|
||||||
|
.get("publishedDate")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let description = volume_info
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
|
||||||
|
// Extract series info from title or seriesInfo
|
||||||
|
let series_name = volume_info
|
||||||
|
.get("seriesInfo")
|
||||||
|
.and_then(|si| si.get("title"))
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.map(String::from)
|
||||||
|
.unwrap_or_else(|| extract_series_name(&title));
|
||||||
|
|
||||||
|
let cover_url = volume_info
|
||||||
|
.get("imageLinks")
|
||||||
|
.and_then(|il| {
|
||||||
|
il.get("thumbnail")
|
||||||
|
.or_else(|| il.get("smallThumbnail"))
|
||||||
|
})
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(|s| s.replace("http://", "https://"));
|
||||||
|
|
||||||
|
let google_id = item
|
||||||
|
.get("id")
|
||||||
|
.and_then(|id| id.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let entry = series_map
|
||||||
|
.entry(series_name.clone())
|
||||||
|
.or_insert_with(|| SeriesCandidateBuilder {
|
||||||
|
title: series_name.clone(),
|
||||||
|
authors: vec![],
|
||||||
|
description: None,
|
||||||
|
publishers: vec![],
|
||||||
|
start_year: None,
|
||||||
|
volume_count: 0,
|
||||||
|
cover_url: None,
|
||||||
|
external_id: google_id.clone(),
|
||||||
|
external_url: None,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
});
|
||||||
|
|
||||||
|
entry.volume_count += 1;
|
||||||
|
|
||||||
|
// Merge authors
|
||||||
|
for a in &authors {
|
||||||
|
if !entry.authors.contains(a) {
|
||||||
|
entry.authors.push(a.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set description if not yet set
|
||||||
|
if entry.description.is_none() {
|
||||||
|
entry.description = description;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge publisher
|
||||||
|
if let Some(ref pub_name) = publisher {
|
||||||
|
if !entry.publishers.contains(pub_name) {
|
||||||
|
entry.publishers.push(pub_name.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract year
|
||||||
|
if let Some(ref date) = published_date {
|
||||||
|
if let Some(year) = extract_year(date) {
|
||||||
|
if entry.start_year.is_none() || entry.start_year.unwrap() > year {
|
||||||
|
entry.start_year = Some(year);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if entry.cover_url.is_none() {
|
||||||
|
entry.cover_url = cover_url;
|
||||||
|
}
|
||||||
|
|
||||||
|
entry.external_url = Some(format!(
|
||||||
|
"https://books.google.com/books?id={}",
|
||||||
|
google_id
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut candidates: Vec<SeriesCandidate> = series_map
|
||||||
|
.into_values()
|
||||||
|
.map(|b| {
|
||||||
|
let confidence = compute_confidence(&b.title, &query_lower);
|
||||||
|
SeriesCandidate {
|
||||||
|
external_id: b.external_id,
|
||||||
|
title: b.title,
|
||||||
|
authors: b.authors,
|
||||||
|
description: b.description,
|
||||||
|
publishers: b.publishers,
|
||||||
|
start_year: b.start_year,
|
||||||
|
total_volumes: if b.volume_count > 1 {
|
||||||
|
Some(b.volume_count)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
},
|
||||||
|
cover_url: b.cover_url,
|
||||||
|
external_url: b.external_url,
|
||||||
|
confidence,
|
||||||
|
metadata_json: b.metadata_json,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
candidates.sort_by(|a, b| b.confidence.partial_cmp(&a.confidence).unwrap_or(std::cmp::Ordering::Equal));
|
||||||
|
candidates.truncate(10);
|
||||||
|
|
||||||
|
Ok(candidates)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_series_books_impl(
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<BookCandidate>, String> {
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
|
||||||
|
|
||||||
|
// First fetch the volume to get its series info
|
||||||
|
let mut url = format!(
|
||||||
|
"https://www.googleapis.com/books/v1/volumes/{}",
|
||||||
|
external_id
|
||||||
|
);
|
||||||
|
if let Some(ref key) = config.api_key {
|
||||||
|
url.push_str(&format!("?key={}", key));
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Google Books request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(format!("Google Books returned {status}: {text}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let volume: serde_json::Value = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse Google Books response: {e}"))?;
|
||||||
|
|
||||||
|
let volume_info = volume.get("volumeInfo").cloned().unwrap_or(serde_json::json!({}));
|
||||||
|
let title = volume_info
|
||||||
|
.get("title")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.unwrap_or("");
|
||||||
|
|
||||||
|
// Search for more volumes in this series
|
||||||
|
let series_name = extract_series_name(title);
|
||||||
|
let search_query = format!("intitle:{}", series_name);
|
||||||
|
let mut search_url = format!(
|
||||||
|
"https://www.googleapis.com/books/v1/volumes?q={}&maxResults=40&printType=books&langRestrict={}",
|
||||||
|
urlencoded(&search_query),
|
||||||
|
urlencoded(&config.language),
|
||||||
|
);
|
||||||
|
if let Some(ref key) = config.api_key {
|
||||||
|
search_url.push_str(&format!("&key={}", key));
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&search_url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Google Books search failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
// Return just the single volume as a book
|
||||||
|
return Ok(vec![volume_to_book_candidate(&volume)]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: serde_json::Value = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse search response: {e}"))?;
|
||||||
|
|
||||||
|
let items = match data.get("items").and_then(|i| i.as_array()) {
|
||||||
|
Some(items) => items,
|
||||||
|
None => return Ok(vec![volume_to_book_candidate(&volume)]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut books: Vec<BookCandidate> = items
|
||||||
|
.iter()
|
||||||
|
.map(volume_to_book_candidate)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Sort by volume number
|
||||||
|
books.sort_by_key(|b| b.volume_number.unwrap_or(999));
|
||||||
|
|
||||||
|
Ok(books)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn volume_to_book_candidate(item: &serde_json::Value) -> BookCandidate {
|
||||||
|
let volume_info = item.get("volumeInfo").cloned().unwrap_or(serde_json::json!({}));
|
||||||
|
let title = volume_info
|
||||||
|
.get("title")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
let authors: Vec<String> = volume_info
|
||||||
|
.get("authors")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.map(|arr| {
|
||||||
|
arr.iter()
|
||||||
|
.filter_map(|v| v.as_str().map(String::from))
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
let isbn = volume_info
|
||||||
|
.get("industryIdentifiers")
|
||||||
|
.and_then(|ids| ids.as_array())
|
||||||
|
.and_then(|arr| {
|
||||||
|
arr.iter()
|
||||||
|
.find(|id| {
|
||||||
|
id.get("type")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.map(|t| t == "ISBN_13" || t == "ISBN_10")
|
||||||
|
.unwrap_or(false)
|
||||||
|
})
|
||||||
|
.and_then(|id| id.get("identifier").and_then(|i| i.as_str()))
|
||||||
|
})
|
||||||
|
.map(String::from);
|
||||||
|
let summary = volume_info
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let cover_url = volume_info
|
||||||
|
.get("imageLinks")
|
||||||
|
.and_then(|il| il.get("thumbnail").or_else(|| il.get("smallThumbnail")))
|
||||||
|
.and_then(|u| u.as_str())
|
||||||
|
.map(|s| s.replace("http://", "https://"));
|
||||||
|
let page_count = volume_info
|
||||||
|
.get("pageCount")
|
||||||
|
.and_then(|p| p.as_i64())
|
||||||
|
.map(|p| p as i32);
|
||||||
|
let language = volume_info
|
||||||
|
.get("language")
|
||||||
|
.and_then(|l| l.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let publish_date = volume_info
|
||||||
|
.get("publishedDate")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let google_id = item
|
||||||
|
.get("id")
|
||||||
|
.and_then(|id| id.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
let volume_number = extract_volume_number(&title);
|
||||||
|
|
||||||
|
BookCandidate {
|
||||||
|
external_book_id: google_id,
|
||||||
|
title,
|
||||||
|
volume_number,
|
||||||
|
authors,
|
||||||
|
isbn,
|
||||||
|
summary,
|
||||||
|
cover_url,
|
||||||
|
page_count,
|
||||||
|
language,
|
||||||
|
publish_date,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_series_name(title: &str) -> String {
|
||||||
|
// Remove trailing volume indicators like "Vol. 1", "Tome 2", "#3", "- Volume 1"
|
||||||
|
let re_patterns = [
|
||||||
|
r"(?i)\s*[-–—]\s*(?:vol(?:ume)?\.?\s*|tome\s*|t\.\s*|#)\s*\d+.*$",
|
||||||
|
r"(?i)\s*,?\s*(?:vol(?:ume)?\.?\s*|tome\s*|t\.\s*|#)\s*\d+.*$",
|
||||||
|
r"\s*\(\d+\)\s*$",
|
||||||
|
r"\s+\d+\s*$",
|
||||||
|
];
|
||||||
|
|
||||||
|
let mut result = title.to_string();
|
||||||
|
for pattern in &re_patterns {
|
||||||
|
if let Ok(re) = regex::Regex::new(pattern) {
|
||||||
|
let cleaned = re.replace(&result, "").to_string();
|
||||||
|
if !cleaned.is_empty() {
|
||||||
|
result = cleaned;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result.trim().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_volume_number(title: &str) -> Option<i32> {
|
||||||
|
let patterns = [
|
||||||
|
r"(?i)(?:vol(?:ume)?\.?\s*|tome\s*|t\.\s*|#)\s*(\d+)",
|
||||||
|
r"\((\d+)\)\s*$",
|
||||||
|
r"\b(\d+)\s*$",
|
||||||
|
];
|
||||||
|
|
||||||
|
for pattern in &patterns {
|
||||||
|
if let Ok(re) = regex::Regex::new(pattern) {
|
||||||
|
if let Some(caps) = re.captures(title) {
|
||||||
|
if let Some(num) = caps.get(1).and_then(|m| m.as_str().parse::<i32>().ok()) {
|
||||||
|
return Some(num);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_year(date: &str) -> Option<i32> {
|
||||||
|
date.get(..4).and_then(|s| s.parse::<i32>().ok())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_confidence(title: &str, query: &str) -> f32 {
|
||||||
|
let title_lower = title.to_lowercase();
|
||||||
|
if title_lower == query {
|
||||||
|
1.0
|
||||||
|
} else if title_lower.starts_with(query) || query.starts_with(&title_lower) {
|
||||||
|
0.8
|
||||||
|
} else if title_lower.contains(query) || query.contains(&title_lower) {
|
||||||
|
0.7
|
||||||
|
} else {
|
||||||
|
// Simple character overlap ratio
|
||||||
|
let common: usize = query
|
||||||
|
.chars()
|
||||||
|
.filter(|c| title_lower.contains(*c))
|
||||||
|
.count();
|
||||||
|
let max_len = query.len().max(title_lower.len()).max(1);
|
||||||
|
(common as f32 / max_len as f32).clamp(0.1, 0.6)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn urlencoded(s: &str) -> String {
|
||||||
|
let mut result = String::new();
|
||||||
|
for byte in s.bytes() {
|
||||||
|
match byte {
|
||||||
|
b'A'..=b'Z' | b'a'..=b'z' | b'0'..=b'9' | b'-' | b'_' | b'.' | b'~' => {
|
||||||
|
result.push(byte as char);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
result.push_str(&format!("%{:02X}", byte));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SeriesCandidateBuilder {
|
||||||
|
title: String,
|
||||||
|
authors: Vec<String>,
|
||||||
|
description: Option<String>,
|
||||||
|
publishers: Vec<String>,
|
||||||
|
start_year: Option<i32>,
|
||||||
|
volume_count: i32,
|
||||||
|
cover_url: Option<String>,
|
||||||
|
external_id: String,
|
||||||
|
external_url: Option<String>,
|
||||||
|
metadata_json: serde_json::Value,
|
||||||
|
}
|
||||||
295
apps/api/src/metadata_providers/mod.rs
Normal file
295
apps/api/src/metadata_providers/mod.rs
Normal file
@@ -0,0 +1,295 @@
|
|||||||
|
pub mod anilist;
|
||||||
|
pub mod bedetheque;
|
||||||
|
pub mod comicvine;
|
||||||
|
pub mod google_books;
|
||||||
|
pub mod open_library;
|
||||||
|
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// Configuration passed to providers (API keys, etc.)
|
||||||
|
#[derive(Debug, Clone, Default)]
|
||||||
|
pub struct ProviderConfig {
|
||||||
|
pub api_key: Option<String>,
|
||||||
|
/// Preferred language for metadata results (ISO 639-1: "en", "fr", "es"). Defaults to "en".
|
||||||
|
pub language: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A candidate series returned by a provider search
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct SeriesCandidate {
|
||||||
|
pub external_id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub authors: Vec<String>,
|
||||||
|
pub description: Option<String>,
|
||||||
|
pub publishers: Vec<String>,
|
||||||
|
pub start_year: Option<i32>,
|
||||||
|
pub total_volumes: Option<i32>,
|
||||||
|
pub cover_url: Option<String>,
|
||||||
|
pub external_url: Option<String>,
|
||||||
|
pub confidence: f32,
|
||||||
|
pub metadata_json: serde_json::Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A candidate book within a series
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
pub struct BookCandidate {
|
||||||
|
pub external_book_id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub volume_number: Option<i32>,
|
||||||
|
pub authors: Vec<String>,
|
||||||
|
pub isbn: Option<String>,
|
||||||
|
pub summary: Option<String>,
|
||||||
|
pub cover_url: Option<String>,
|
||||||
|
pub page_count: Option<i32>,
|
||||||
|
pub language: Option<String>,
|
||||||
|
pub publish_date: Option<String>,
|
||||||
|
pub metadata_json: serde_json::Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Trait that all metadata providers must implement
|
||||||
|
pub trait MetadataProvider: Send + Sync {
|
||||||
|
#[allow(dead_code)]
|
||||||
|
fn name(&self) -> &str;
|
||||||
|
|
||||||
|
fn search_series(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<SeriesCandidate>, String>> + Send + '_>,
|
||||||
|
>;
|
||||||
|
|
||||||
|
fn get_series_books(
|
||||||
|
&self,
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<BookCandidate>, String>> + Send + '_>,
|
||||||
|
>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Factory function to get a provider by name
|
||||||
|
pub fn get_provider(name: &str) -> Option<Box<dyn MetadataProvider>> {
|
||||||
|
match name {
|
||||||
|
"google_books" => Some(Box::new(google_books::GoogleBooksProvider)),
|
||||||
|
"open_library" => Some(Box::new(open_library::OpenLibraryProvider)),
|
||||||
|
"comicvine" => Some(Box::new(comicvine::ComicVineProvider)),
|
||||||
|
"anilist" => Some(Box::new(anilist::AniListProvider)),
|
||||||
|
"bedetheque" => Some(Box::new(bedetheque::BedethequeProvider)),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// End-to-end provider tests
|
||||||
|
//
|
||||||
|
// These tests hit real external APIs — run them explicitly with:
|
||||||
|
// cargo test -p api providers_e2e -- --ignored --nocapture
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod providers_e2e {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
fn config_fr() -> ProviderConfig {
|
||||||
|
ProviderConfig { api_key: None, language: "fr".to_string() }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn config_en() -> ProviderConfig {
|
||||||
|
ProviderConfig { api_key: None, language: "en".to_string() }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_candidate(name: &str, c: &SeriesCandidate) {
|
||||||
|
println!("\n=== {name} — best candidate ===");
|
||||||
|
println!(" title: {:?}", c.title);
|
||||||
|
println!(" external_id: {:?}", c.external_id);
|
||||||
|
println!(" authors: {:?}", c.authors);
|
||||||
|
println!(" description: {:?}", c.description.as_deref().map(|d| &d[..d.len().min(120)]));
|
||||||
|
println!(" publishers: {:?}", c.publishers);
|
||||||
|
println!(" start_year: {:?}", c.start_year);
|
||||||
|
println!(" total_volumes: {:?}", c.total_volumes);
|
||||||
|
println!(" cover_url: {}", c.cover_url.is_some());
|
||||||
|
println!(" external_url: {}", c.external_url.is_some());
|
||||||
|
println!(" confidence: {:.2}", c.confidence);
|
||||||
|
println!(" metadata_json: {}", serde_json::to_string_pretty(&c.metadata_json).unwrap_or_default());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_books(name: &str, books: &[BookCandidate]) {
|
||||||
|
println!("\n=== {name} — {} books ===", books.len());
|
||||||
|
for (i, b) in books.iter().take(5).enumerate() {
|
||||||
|
println!(
|
||||||
|
" [{}] vol={:?} title={:?} authors={} isbn={:?} pages={:?} lang={:?} date={:?} cover={}",
|
||||||
|
i, b.volume_number, b.title, b.authors.len(), b.isbn, b.page_count, b.language, b.publish_date, b.cover_url.is_some()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if books.len() > 5 { println!(" ... and {} more", books.len() - 5); }
|
||||||
|
|
||||||
|
let with_vol = books.iter().filter(|b| b.volume_number.is_some()).count();
|
||||||
|
let with_isbn = books.iter().filter(|b| b.isbn.is_some()).count();
|
||||||
|
let with_authors = books.iter().filter(|b| !b.authors.is_empty()).count();
|
||||||
|
let with_date = books.iter().filter(|b| b.publish_date.is_some()).count();
|
||||||
|
let with_cover = books.iter().filter(|b| b.cover_url.is_some()).count();
|
||||||
|
let with_pages = books.iter().filter(|b| b.page_count.is_some()).count();
|
||||||
|
println!(" --- field coverage ---");
|
||||||
|
println!(" volume_number: {with_vol}/{}", books.len());
|
||||||
|
println!(" isbn: {with_isbn}/{}", books.len());
|
||||||
|
println!(" authors: {with_authors}/{}", books.len());
|
||||||
|
println!(" publish_date: {with_date}/{}", books.len());
|
||||||
|
println!(" cover_url: {with_cover}/{}", books.len());
|
||||||
|
println!(" page_count: {with_pages}/{}", books.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Google Books ---
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn google_books_search_and_books() {
|
||||||
|
let p = get_provider("google_books").unwrap();
|
||||||
|
let cfg = config_en();
|
||||||
|
|
||||||
|
let candidates = p.search_series("Blacksad", &cfg).await.unwrap();
|
||||||
|
assert!(!candidates.is_empty(), "google_books: no results for Blacksad");
|
||||||
|
print_candidate("google_books", &candidates[0]);
|
||||||
|
|
||||||
|
let books = p.get_series_books(&candidates[0].external_id, &cfg).await.unwrap();
|
||||||
|
print_books("google_books", &books);
|
||||||
|
assert!(!books.is_empty(), "google_books: no books returned");
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Open Library ---
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn open_library_search_and_books() {
|
||||||
|
let p = get_provider("open_library").unwrap();
|
||||||
|
let cfg = config_en();
|
||||||
|
|
||||||
|
let candidates = p.search_series("Sandman Neil Gaiman", &cfg).await.unwrap();
|
||||||
|
assert!(!candidates.is_empty(), "open_library: no results for Sandman");
|
||||||
|
print_candidate("open_library", &candidates[0]);
|
||||||
|
|
||||||
|
let books = p.get_series_books(&candidates[0].external_id, &cfg).await.unwrap();
|
||||||
|
print_books("open_library", &books);
|
||||||
|
assert!(!books.is_empty(), "open_library: no books returned");
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- AniList ---
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn anilist_search_finished() {
|
||||||
|
let p = get_provider("anilist").unwrap();
|
||||||
|
let cfg = config_fr();
|
||||||
|
|
||||||
|
let candidates = p.search_series("Death Note", &cfg).await.unwrap();
|
||||||
|
assert!(!candidates.is_empty(), "anilist: no results for Death Note");
|
||||||
|
print_candidate("anilist (finished)", &candidates[0]);
|
||||||
|
|
||||||
|
let best = &candidates[0];
|
||||||
|
assert!(best.total_volumes.is_some(), "anilist: finished series should have total_volumes");
|
||||||
|
assert!(best.description.is_some(), "anilist: should have description");
|
||||||
|
assert!(!best.authors.is_empty(), "anilist: should have authors");
|
||||||
|
|
||||||
|
let status = best.metadata_json.get("status").and_then(|s| s.as_str());
|
||||||
|
assert_eq!(status, Some("FINISHED"), "anilist: Death Note should be FINISHED");
|
||||||
|
|
||||||
|
let books = p.get_series_books(&best.external_id, &cfg).await.unwrap();
|
||||||
|
print_books("anilist (Death Note)", &books);
|
||||||
|
assert!(books.len() >= 12, "anilist: Death Note should have ≥12 volumes, got {}", books.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn anilist_search_ongoing() {
|
||||||
|
let p = get_provider("anilist").unwrap();
|
||||||
|
let cfg = config_fr();
|
||||||
|
|
||||||
|
let candidates = p.search_series("One Piece", &cfg).await.unwrap();
|
||||||
|
assert!(!candidates.is_empty(), "anilist: no results for One Piece");
|
||||||
|
print_candidate("anilist (ongoing)", &candidates[0]);
|
||||||
|
|
||||||
|
let best = &candidates[0];
|
||||||
|
let status = best.metadata_json.get("status").and_then(|s| s.as_str());
|
||||||
|
assert_eq!(status, Some("RELEASING"), "anilist: One Piece should be RELEASING");
|
||||||
|
|
||||||
|
let volume_source = best.metadata_json.get("volume_source").and_then(|s| s.as_str());
|
||||||
|
println!(" volume_source: {:?}", volume_source);
|
||||||
|
println!(" total_volumes: {:?}", best.total_volumes);
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Bédéthèque ---
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn bedetheque_search_and_books() {
|
||||||
|
let p = get_provider("bedetheque").unwrap();
|
||||||
|
let cfg = config_fr();
|
||||||
|
|
||||||
|
let candidates = p.search_series("De Cape et de Crocs", &cfg).await.unwrap();
|
||||||
|
assert!(!candidates.is_empty(), "bedetheque: no results");
|
||||||
|
print_candidate("bedetheque", &candidates[0]);
|
||||||
|
|
||||||
|
let best = &candidates[0];
|
||||||
|
assert!(best.description.is_some(), "bedetheque: should have description");
|
||||||
|
assert!(!best.authors.is_empty(), "bedetheque: should have authors");
|
||||||
|
assert!(!best.publishers.is_empty(), "bedetheque: should have publishers");
|
||||||
|
assert!(best.start_year.is_some(), "bedetheque: should have start_year");
|
||||||
|
assert!(best.total_volumes.is_some(), "bedetheque: should have total_volumes");
|
||||||
|
|
||||||
|
// Enriched metadata_json
|
||||||
|
let mj = &best.metadata_json;
|
||||||
|
assert!(mj.get("genres").and_then(|g| g.as_array()).map(|a| !a.is_empty()).unwrap_or(false), "bedetheque: should have genres");
|
||||||
|
assert!(mj.get("status").and_then(|s| s.as_str()).is_some(), "bedetheque: should have status");
|
||||||
|
|
||||||
|
let books = p.get_series_books(&best.external_id, &cfg).await.unwrap();
|
||||||
|
print_books("bedetheque", &books);
|
||||||
|
assert!(books.len() >= 12, "bedetheque: De Cape et de Crocs should have ≥12 volumes, got {}", books.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- ComicVine (needs API key) ---
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn comicvine_no_key() {
|
||||||
|
let p = get_provider("comicvine").unwrap();
|
||||||
|
let cfg = config_en();
|
||||||
|
|
||||||
|
let result = p.search_series("Batman", &cfg).await;
|
||||||
|
println!("\n=== comicvine (no key) ===");
|
||||||
|
match result {
|
||||||
|
Ok(c) => println!(" returned {} candidates (unexpected without key)", c.len()),
|
||||||
|
Err(e) => println!(" expected error: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// --- Cross-provider comparison ---
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
#[ignore]
|
||||||
|
async fn cross_provider_blacksad() {
|
||||||
|
println!("\n{}", "=".repeat(60));
|
||||||
|
println!(" Cross-provider comparison: Blacksad");
|
||||||
|
println!("{}\n", "=".repeat(60));
|
||||||
|
|
||||||
|
let providers: Vec<(&str, ProviderConfig)> = vec![
|
||||||
|
("google_books", config_en()),
|
||||||
|
("open_library", config_en()),
|
||||||
|
("anilist", config_fr()),
|
||||||
|
("bedetheque", config_fr()),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (name, cfg) in &providers {
|
||||||
|
let p = get_provider(name).unwrap();
|
||||||
|
match p.search_series("Blacksad", cfg).await {
|
||||||
|
Ok(candidates) if !candidates.is_empty() => {
|
||||||
|
let b = &candidates[0];
|
||||||
|
println!("[{name}] title={:?} authors={} desc={} pubs={} year={:?} vols={:?} cover={} url={} conf={:.2}",
|
||||||
|
b.title, b.authors.len(), b.description.is_some(), b.publishers.len(),
|
||||||
|
b.start_year, b.total_volumes, b.cover_url.is_some(), b.external_url.is_some(), b.confidence);
|
||||||
|
}
|
||||||
|
Ok(_) => println!("[{name}] no results"),
|
||||||
|
Err(e) => println!("[{name}] error: {e}"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
351
apps/api/src/metadata_providers/open_library.rs
Normal file
351
apps/api/src/metadata_providers/open_library.rs
Normal file
@@ -0,0 +1,351 @@
|
|||||||
|
use super::{BookCandidate, MetadataProvider, ProviderConfig, SeriesCandidate};
|
||||||
|
|
||||||
|
pub struct OpenLibraryProvider;
|
||||||
|
|
||||||
|
impl MetadataProvider for OpenLibraryProvider {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"open_library"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_series(
|
||||||
|
&self,
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<SeriesCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let query = query.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { search_series_impl(&query, &config).await })
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_series_books(
|
||||||
|
&self,
|
||||||
|
external_id: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> std::pin::Pin<
|
||||||
|
Box<dyn std::future::Future<Output = Result<Vec<BookCandidate>, String>> + Send + '_>,
|
||||||
|
> {
|
||||||
|
let external_id = external_id.to_string();
|
||||||
|
let config = config.clone();
|
||||||
|
Box::pin(async move { get_series_books_impl(&external_id, &config).await })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn search_series_impl(
|
||||||
|
query: &str,
|
||||||
|
config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<SeriesCandidate>, String> {
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
|
||||||
|
|
||||||
|
// Open Library uses 3-letter language codes
|
||||||
|
let ol_lang = match config.language.as_str() {
|
||||||
|
"fr" => "fre",
|
||||||
|
"es" => "spa",
|
||||||
|
_ => "eng",
|
||||||
|
};
|
||||||
|
|
||||||
|
let url = format!(
|
||||||
|
"https://openlibrary.org/search.json?title={}&limit=20&language={}",
|
||||||
|
urlencoded(query),
|
||||||
|
ol_lang,
|
||||||
|
);
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(&url)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Open Library request failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(format!("Open Library returned {status}: {text}"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: serde_json::Value = resp
|
||||||
|
.json()
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("Failed to parse Open Library response: {e}"))?;
|
||||||
|
|
||||||
|
let docs = match data.get("docs").and_then(|d| d.as_array()) {
|
||||||
|
Some(docs) => docs,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let query_lower = query.to_lowercase();
|
||||||
|
let mut series_map: std::collections::HashMap<String, SeriesCandidateBuilder> =
|
||||||
|
std::collections::HashMap::new();
|
||||||
|
|
||||||
|
for doc in docs {
|
||||||
|
let title = doc
|
||||||
|
.get("title")
|
||||||
|
.and_then(|t| t.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
let authors: Vec<String> = doc
|
||||||
|
.get("author_name")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(String::from)).collect())
|
||||||
|
.unwrap_or_default();
|
||||||
|
let publishers: Vec<String> = doc
|
||||||
|
.get("publisher")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.map(|arr| {
|
||||||
|
let mut pubs: Vec<String> = arr.iter().filter_map(|v| v.as_str().map(String::from)).collect();
|
||||||
|
pubs.truncate(3);
|
||||||
|
pubs
|
||||||
|
})
|
||||||
|
.unwrap_or_default();
|
||||||
|
let first_publish_year = doc
|
||||||
|
.get("first_publish_year")
|
||||||
|
.and_then(|y| y.as_i64())
|
||||||
|
.map(|y| y as i32);
|
||||||
|
let cover_i = doc.get("cover_i").and_then(|c| c.as_i64());
|
||||||
|
let cover_url = cover_i.map(|id| format!("https://covers.openlibrary.org/b/id/{}-M.jpg", id));
|
||||||
|
let key = doc
|
||||||
|
.get("key")
|
||||||
|
.and_then(|k| k.as_str())
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let series_name = extract_series_name(&title);
|
||||||
|
|
||||||
|
let entry = series_map
|
||||||
|
.entry(series_name.clone())
|
||||||
|
.or_insert_with(|| SeriesCandidateBuilder {
|
||||||
|
title: series_name.clone(),
|
||||||
|
authors: vec![],
|
||||||
|
description: None,
|
||||||
|
publishers: vec![],
|
||||||
|
start_year: None,
|
||||||
|
volume_count: 0,
|
||||||
|
cover_url: None,
|
||||||
|
external_id: key.clone(),
|
||||||
|
external_url: if key.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(format!("https://openlibrary.org{}", key))
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
entry.volume_count += 1;
|
||||||
|
|
||||||
|
for a in &authors {
|
||||||
|
if !entry.authors.contains(a) {
|
||||||
|
entry.authors.push(a.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for p in &publishers {
|
||||||
|
if !entry.publishers.contains(p) {
|
||||||
|
entry.publishers.push(p.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (entry.start_year.is_none() || first_publish_year.is_some_and(|y| entry.start_year.unwrap() > y))
|
||||||
|
&& first_publish_year.is_some()
|
||||||
|
{
|
||||||
|
entry.start_year = first_publish_year;
|
||||||
|
}
|
||||||
|
if entry.cover_url.is_none() {
|
||||||
|
entry.cover_url = cover_url;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut candidates: Vec<SeriesCandidate> = series_map
|
||||||
|
.into_values()
|
||||||
|
.map(|b| {
|
||||||
|
let confidence = compute_confidence(&b.title, &query_lower);
|
||||||
|
SeriesCandidate {
|
||||||
|
external_id: b.external_id,
|
||||||
|
title: b.title,
|
||||||
|
authors: b.authors,
|
||||||
|
description: b.description,
|
||||||
|
publishers: b.publishers,
|
||||||
|
start_year: b.start_year,
|
||||||
|
total_volumes: if b.volume_count > 1 { Some(b.volume_count) } else { None },
|
||||||
|
cover_url: b.cover_url,
|
||||||
|
external_url: b.external_url,
|
||||||
|
confidence,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
candidates.sort_by(|a, b| b.confidence.partial_cmp(&a.confidence).unwrap_or(std::cmp::Ordering::Equal));
|
||||||
|
candidates.truncate(10);
|
||||||
|
Ok(candidates)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn get_series_books_impl(
|
||||||
|
external_id: &str,
|
||||||
|
_config: &ProviderConfig,
|
||||||
|
) -> Result<Vec<BookCandidate>, String> {
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(15))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| format!("failed to build HTTP client: {e}"))?;
|
||||||
|
|
||||||
|
// Fetch the work to get its title for series search
|
||||||
|
let url = format!("https://openlibrary.org{}.json", external_id);
|
||||||
|
let resp = client.get(&url).send().await.map_err(|e| format!("Open Library request failed: {e}"))?;
|
||||||
|
|
||||||
|
let work: serde_json::Value = if resp.status().is_success() {
|
||||||
|
resp.json().await.map_err(|e| format!("Failed to parse response: {e}"))?
|
||||||
|
} else {
|
||||||
|
serde_json::json!({})
|
||||||
|
};
|
||||||
|
|
||||||
|
let title = work.get("title").and_then(|t| t.as_str()).unwrap_or("");
|
||||||
|
let series_name = extract_series_name(title);
|
||||||
|
|
||||||
|
// Search for editions of this series
|
||||||
|
let search_url = format!(
|
||||||
|
"https://openlibrary.org/search.json?title={}&limit=40",
|
||||||
|
urlencoded(&series_name)
|
||||||
|
);
|
||||||
|
let resp = client.get(&search_url).send().await.map_err(|e| format!("Open Library search failed: {e}"))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let data: serde_json::Value = resp.json().await.map_err(|e| format!("Failed to parse response: {e}"))?;
|
||||||
|
let docs = match data.get("docs").and_then(|d| d.as_array()) {
|
||||||
|
Some(docs) => docs,
|
||||||
|
None => return Ok(vec![]),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut books: Vec<BookCandidate> = docs
|
||||||
|
.iter()
|
||||||
|
.map(|doc| {
|
||||||
|
let title = doc.get("title").and_then(|t| t.as_str()).unwrap_or("").to_string();
|
||||||
|
let authors: Vec<String> = doc
|
||||||
|
.get("author_name")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.map(|arr| arr.iter().filter_map(|v| v.as_str().map(String::from)).collect())
|
||||||
|
.unwrap_or_default();
|
||||||
|
let isbn = doc
|
||||||
|
.get("isbn")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.and_then(|arr| arr.first())
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let page_count = doc
|
||||||
|
.get("number_of_pages_median")
|
||||||
|
.and_then(|n| n.as_i64())
|
||||||
|
.map(|n| n as i32);
|
||||||
|
let cover_i = doc.get("cover_i").and_then(|c| c.as_i64());
|
||||||
|
let cover_url = cover_i.map(|id| format!("https://covers.openlibrary.org/b/id/{}-M.jpg", id));
|
||||||
|
let language = doc
|
||||||
|
.get("language")
|
||||||
|
.and_then(|a| a.as_array())
|
||||||
|
.and_then(|arr| arr.first())
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.map(String::from);
|
||||||
|
let publish_date = doc
|
||||||
|
.get("first_publish_year")
|
||||||
|
.and_then(|y| y.as_i64())
|
||||||
|
.map(|y| y.to_string());
|
||||||
|
let key = doc.get("key").and_then(|k| k.as_str()).unwrap_or("").to_string();
|
||||||
|
let volume_number = extract_volume_number(&title);
|
||||||
|
|
||||||
|
BookCandidate {
|
||||||
|
external_book_id: key,
|
||||||
|
title,
|
||||||
|
volume_number,
|
||||||
|
authors,
|
||||||
|
isbn,
|
||||||
|
summary: None,
|
||||||
|
cover_url,
|
||||||
|
page_count,
|
||||||
|
language,
|
||||||
|
publish_date,
|
||||||
|
metadata_json: serde_json::json!({}),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
books.sort_by_key(|b| b.volume_number.unwrap_or(999));
|
||||||
|
Ok(books)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_series_name(title: &str) -> String {
|
||||||
|
let re_patterns = [
|
||||||
|
r"(?i)\s*[-–—]\s*(?:vol(?:ume)?\.?\s*|tome\s*|t\.\s*|#)\s*\d+.*$",
|
||||||
|
r"(?i)\s*,?\s*(?:vol(?:ume)?\.?\s*|tome\s*|t\.\s*|#)\s*\d+.*$",
|
||||||
|
r"\s*\(\d+\)\s*$",
|
||||||
|
r"\s+\d+\s*$",
|
||||||
|
];
|
||||||
|
let mut result = title.to_string();
|
||||||
|
for pattern in &re_patterns {
|
||||||
|
if let Ok(re) = regex::Regex::new(pattern) {
|
||||||
|
let cleaned = re.replace(&result, "").to_string();
|
||||||
|
if !cleaned.is_empty() {
|
||||||
|
result = cleaned;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result.trim().to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_volume_number(title: &str) -> Option<i32> {
|
||||||
|
let patterns = [
|
||||||
|
r"(?i)(?:vol(?:ume)?\.?\s*|tome\s*|t\.\s*|#)\s*(\d+)",
|
||||||
|
r"\((\d+)\)\s*$",
|
||||||
|
r"\b(\d+)\s*$",
|
||||||
|
];
|
||||||
|
for pattern in &patterns {
|
||||||
|
if let Ok(re) = regex::Regex::new(pattern) {
|
||||||
|
if let Some(caps) = re.captures(title) {
|
||||||
|
if let Some(num) = caps.get(1).and_then(|m| m.as_str().parse::<i32>().ok()) {
|
||||||
|
return Some(num);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compute_confidence(title: &str, query: &str) -> f32 {
|
||||||
|
let title_lower = title.to_lowercase();
|
||||||
|
if title_lower == query {
|
||||||
|
1.0
|
||||||
|
} else if title_lower.starts_with(query) || query.starts_with(&title_lower) {
|
||||||
|
0.8
|
||||||
|
} else if title_lower.contains(query) || query.contains(&title_lower) {
|
||||||
|
0.7
|
||||||
|
} else {
|
||||||
|
let common: usize = query.chars().filter(|c| title_lower.contains(*c)).count();
|
||||||
|
let max_len = query.len().max(title_lower.len()).max(1);
|
||||||
|
(common as f32 / max_len as f32).clamp(0.1, 0.6)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn urlencoded(s: &str) -> String {
|
||||||
|
let mut result = String::new();
|
||||||
|
for byte in s.bytes() {
|
||||||
|
match byte {
|
||||||
|
b'A'..=b'Z' | b'a'..=b'z' | b'0'..=b'9' | b'-' | b'_' | b'.' | b'~' => {
|
||||||
|
result.push(byte as char);
|
||||||
|
}
|
||||||
|
_ => result.push_str(&format!("%{:02X}", byte)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SeriesCandidateBuilder {
|
||||||
|
title: String,
|
||||||
|
authors: Vec<String>,
|
||||||
|
description: Option<String>,
|
||||||
|
publishers: Vec<String>,
|
||||||
|
start_year: Option<i32>,
|
||||||
|
volume_count: i32,
|
||||||
|
cover_url: Option<String>,
|
||||||
|
external_id: String,
|
||||||
|
external_url: Option<String>,
|
||||||
|
}
|
||||||
836
apps/api/src/metadata_refresh.rs
Normal file
836
apps/api/src/metadata_refresh.rs
Normal file
@@ -0,0 +1,836 @@
|
|||||||
|
use axum::{
|
||||||
|
extract::{Path as AxumPath, State},
|
||||||
|
Json,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::{PgPool, Row};
|
||||||
|
use uuid::Uuid;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
use crate::{error::ApiError, metadata_providers, state::AppState};
|
||||||
|
use crate::metadata_batch::{load_provider_config_from_pool, is_job_cancelled, update_progress};
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// DTOs
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct MetadataRefreshRequest {
|
||||||
|
pub library_id: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A single field change: old → new
|
||||||
|
#[derive(Serialize, Clone)]
|
||||||
|
struct FieldDiff {
|
||||||
|
field: String,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
old: Option<serde_json::Value>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
new: Option<serde_json::Value>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Per-book changes
|
||||||
|
#[derive(Serialize, Clone)]
|
||||||
|
struct BookDiff {
|
||||||
|
book_id: String,
|
||||||
|
title: String,
|
||||||
|
volume: Option<i32>,
|
||||||
|
changes: Vec<FieldDiff>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Per-series change report
|
||||||
|
#[derive(Serialize, Clone)]
|
||||||
|
struct SeriesRefreshResult {
|
||||||
|
series_name: String,
|
||||||
|
provider: String,
|
||||||
|
status: String, // "updated", "unchanged", "error"
|
||||||
|
series_changes: Vec<FieldDiff>,
|
||||||
|
book_changes: Vec<BookDiff>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
error: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Response DTO for the report endpoint
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct MetadataRefreshReportDto {
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub job_id: Uuid,
|
||||||
|
pub status: String,
|
||||||
|
pub total_links: i64,
|
||||||
|
pub refreshed: i64,
|
||||||
|
pub unchanged: i64,
|
||||||
|
pub errors: i64,
|
||||||
|
pub changes: serde_json::Value,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// POST /metadata/refresh — Trigger a metadata refresh job
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/metadata/refresh",
|
||||||
|
tag = "metadata",
|
||||||
|
request_body = MetadataRefreshRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Job created"),
|
||||||
|
(status = 400, description = "Bad request"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn start_refresh(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(body): Json<MetadataRefreshRequest>,
|
||||||
|
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||||
|
let library_id: Uuid = body
|
||||||
|
.library_id
|
||||||
|
.parse()
|
||||||
|
.map_err(|_| ApiError::bad_request("invalid library_id"))?;
|
||||||
|
|
||||||
|
// Verify library exists
|
||||||
|
sqlx::query("SELECT 1 FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::not_found("library not found"))?;
|
||||||
|
|
||||||
|
// Check no existing running metadata_refresh job for this library
|
||||||
|
let existing: Option<Uuid> = sqlx::query_scalar(
|
||||||
|
"SELECT id FROM index_jobs WHERE library_id = $1 AND type = 'metadata_refresh' AND status IN ('pending', 'running') LIMIT 1",
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if let Some(existing_id) = existing {
|
||||||
|
return Ok(Json(serde_json::json!({
|
||||||
|
"id": existing_id.to_string(),
|
||||||
|
"status": "already_running",
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check there are approved links to refresh (only ongoing series)
|
||||||
|
let link_count: i64 = sqlx::query_scalar(
|
||||||
|
r#"
|
||||||
|
SELECT COUNT(*) FROM external_metadata_links eml
|
||||||
|
LEFT JOIN series_metadata sm
|
||||||
|
ON sm.library_id = eml.library_id AND sm.name = eml.series_name
|
||||||
|
WHERE eml.library_id = $1
|
||||||
|
AND eml.status = 'approved'
|
||||||
|
AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled')
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if link_count == 0 {
|
||||||
|
return Err(ApiError::bad_request("No approved metadata links to refresh for this library"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let job_id = Uuid::new_v4();
|
||||||
|
sqlx::query(
|
||||||
|
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'metadata_refresh', 'running', NOW())",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(library_id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Spawn the background processing task (status already 'running' to avoid poller race)
|
||||||
|
let pool = state.pool.clone();
|
||||||
|
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
tokio::spawn(async move {
|
||||||
|
if let Err(e) = process_metadata_refresh(&pool, job_id, library_id).await {
|
||||||
|
warn!("[METADATA_REFRESH] job {job_id} failed: {e}");
|
||||||
|
let _ = sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(e.to_string())
|
||||||
|
.execute(&pool)
|
||||||
|
.await;
|
||||||
|
notifications::notify(
|
||||||
|
pool.clone(),
|
||||||
|
notifications::NotificationEvent::MetadataRefreshFailed {
|
||||||
|
library_name,
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({
|
||||||
|
"id": job_id.to_string(),
|
||||||
|
"status": "pending",
|
||||||
|
})))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// GET /metadata/refresh/:id/report — Refresh report from stats_json
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/metadata/refresh/{id}/report",
|
||||||
|
tag = "metadata",
|
||||||
|
params(("id" = String, Path, description = "Job UUID")),
|
||||||
|
responses(
|
||||||
|
(status = 200, body = MetadataRefreshReportDto),
|
||||||
|
(status = 404, description = "Job not found"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_refresh_report(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
AxumPath(job_id): AxumPath<Uuid>,
|
||||||
|
) -> Result<Json<MetadataRefreshReportDto>, ApiError> {
|
||||||
|
let row = sqlx::query(
|
||||||
|
"SELECT status, stats_json, total_files FROM index_jobs WHERE id = $1 AND type = 'metadata_refresh'",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?
|
||||||
|
.ok_or_else(|| ApiError::not_found("job not found"))?;
|
||||||
|
|
||||||
|
let job_status: String = row.get("status");
|
||||||
|
let stats: Option<serde_json::Value> = row.get("stats_json");
|
||||||
|
let total_files: Option<i32> = row.get("total_files");
|
||||||
|
|
||||||
|
let (refreshed, unchanged, errors, changes) = if let Some(ref s) = stats {
|
||||||
|
(
|
||||||
|
s.get("refreshed").and_then(|v| v.as_i64()).unwrap_or(0),
|
||||||
|
s.get("unchanged").and_then(|v| v.as_i64()).unwrap_or(0),
|
||||||
|
s.get("errors").and_then(|v| v.as_i64()).unwrap_or(0),
|
||||||
|
s.get("changes").cloned().unwrap_or(serde_json::json!([])),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
(0, 0, 0, serde_json::json!([]))
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Json(MetadataRefreshReportDto {
|
||||||
|
job_id,
|
||||||
|
status: job_status,
|
||||||
|
total_links: total_files.unwrap_or(0) as i64,
|
||||||
|
refreshed,
|
||||||
|
unchanged,
|
||||||
|
errors,
|
||||||
|
changes,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Background processing
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
pub(crate) async fn process_metadata_refresh(
|
||||||
|
pool: &PgPool,
|
||||||
|
job_id: Uuid,
|
||||||
|
library_id: Uuid,
|
||||||
|
) -> Result<(), String> {
|
||||||
|
// Set job to running
|
||||||
|
sqlx::query("UPDATE index_jobs SET status = 'running', started_at = NOW() WHERE id = $1")
|
||||||
|
.bind(job_id)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
// Get approved links for this library, only for ongoing series (not ended/cancelled)
|
||||||
|
let links: Vec<(Uuid, String, String, String)> = sqlx::query_as(
|
||||||
|
r#"
|
||||||
|
SELECT eml.id, eml.series_name, eml.provider, eml.external_id
|
||||||
|
FROM external_metadata_links eml
|
||||||
|
LEFT JOIN series_metadata sm
|
||||||
|
ON sm.library_id = eml.library_id AND sm.name = eml.series_name
|
||||||
|
WHERE eml.library_id = $1
|
||||||
|
AND eml.status = 'approved'
|
||||||
|
AND COALESCE(sm.status, 'ongoing') NOT IN ('ended', 'cancelled')
|
||||||
|
ORDER BY eml.series_name
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let total = links.len() as i32;
|
||||||
|
sqlx::query("UPDATE index_jobs SET total_files = $2 WHERE id = $1")
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(total)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let mut processed = 0i32;
|
||||||
|
let mut refreshed = 0i32;
|
||||||
|
let mut unchanged = 0i32;
|
||||||
|
let mut errors = 0i32;
|
||||||
|
let mut all_results: Vec<SeriesRefreshResult> = Vec::new();
|
||||||
|
|
||||||
|
for (link_id, series_name, provider_name, external_id) in &links {
|
||||||
|
// Check cancellation
|
||||||
|
if is_job_cancelled(pool, job_id).await {
|
||||||
|
sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'cancelled', finished_at = NOW() WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
match refresh_link(pool, *link_id, library_id, series_name, provider_name, external_id).await {
|
||||||
|
Ok(result) => {
|
||||||
|
if result.status == "updated" {
|
||||||
|
refreshed += 1;
|
||||||
|
info!("[METADATA_REFRESH] job={job_id} updated series='{series_name}' via {provider_name}");
|
||||||
|
} else {
|
||||||
|
unchanged += 1;
|
||||||
|
}
|
||||||
|
all_results.push(result);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
errors += 1;
|
||||||
|
warn!("[METADATA_REFRESH] job={job_id} error on series='{series_name}': {e}");
|
||||||
|
all_results.push(SeriesRefreshResult {
|
||||||
|
series_name: series_name.clone(),
|
||||||
|
provider: provider_name.clone(),
|
||||||
|
status: "error".to_string(),
|
||||||
|
series_changes: vec![],
|
||||||
|
book_changes: vec![],
|
||||||
|
error: Some(e),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
processed += 1;
|
||||||
|
update_progress(pool, job_id, processed, total, series_name).await;
|
||||||
|
|
||||||
|
// Rate limit: 1s delay between provider calls
|
||||||
|
tokio::time::sleep(std::time::Duration::from_millis(1000)).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only keep series that have changes or errors (filter out "unchanged")
|
||||||
|
let changes_only: Vec<&SeriesRefreshResult> = all_results
|
||||||
|
.iter()
|
||||||
|
.filter(|r| r.status != "unchanged")
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Build stats summary
|
||||||
|
let stats = serde_json::json!({
|
||||||
|
"total_links": total,
|
||||||
|
"refreshed": refreshed,
|
||||||
|
"unchanged": unchanged,
|
||||||
|
"errors": errors,
|
||||||
|
"changes": changes_only,
|
||||||
|
});
|
||||||
|
|
||||||
|
sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'success', finished_at = NOW(), progress_percent = 100, stats_json = $2 WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(stats)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
info!("[METADATA_REFRESH] job={job_id} completed: {refreshed} updated, {unchanged} unchanged, {errors} errors");
|
||||||
|
|
||||||
|
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
notifications::notify(
|
||||||
|
pool.clone(),
|
||||||
|
notifications::NotificationEvent::MetadataRefreshCompleted {
|
||||||
|
library_name,
|
||||||
|
refreshed,
|
||||||
|
unchanged,
|
||||||
|
errors,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Refresh a single approved metadata link: re-fetch from provider, compare, sync, return diff
|
||||||
|
async fn refresh_link(
|
||||||
|
pool: &PgPool,
|
||||||
|
link_id: Uuid,
|
||||||
|
library_id: Uuid,
|
||||||
|
series_name: &str,
|
||||||
|
provider_name: &str,
|
||||||
|
external_id: &str,
|
||||||
|
) -> Result<SeriesRefreshResult, String> {
|
||||||
|
let provider = metadata_providers::get_provider(provider_name)
|
||||||
|
.ok_or_else(|| format!("Unknown provider: {provider_name}"))?;
|
||||||
|
|
||||||
|
let config = load_provider_config_from_pool(pool, provider_name).await;
|
||||||
|
|
||||||
|
let mut series_changes: Vec<FieldDiff> = Vec::new();
|
||||||
|
let mut book_changes: Vec<BookDiff> = Vec::new();
|
||||||
|
|
||||||
|
// ── Series-level refresh ──────────────────────────────────────────────
|
||||||
|
let candidates = provider
|
||||||
|
.search_series(series_name, &config)
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("provider search error: {e}"))?;
|
||||||
|
|
||||||
|
let candidate = candidates
|
||||||
|
.iter()
|
||||||
|
.find(|c| c.external_id == external_id)
|
||||||
|
.or_else(|| candidates.first());
|
||||||
|
|
||||||
|
if let Some(candidate) = candidate {
|
||||||
|
// Update link metadata_json
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
UPDATE external_metadata_links
|
||||||
|
SET metadata_json = $2,
|
||||||
|
total_volumes_external = $3,
|
||||||
|
updated_at = NOW()
|
||||||
|
WHERE id = $1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(link_id)
|
||||||
|
.bind(&candidate.metadata_json)
|
||||||
|
.bind(candidate.total_volumes)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
// Diff + sync series metadata
|
||||||
|
series_changes = sync_series_with_diff(pool, library_id, series_name, candidate).await?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ── Book-level refresh ────────────────────────────────────────────────
|
||||||
|
let books = provider
|
||||||
|
.get_series_books(external_id, &config)
|
||||||
|
.await
|
||||||
|
.map_err(|e| format!("provider books error: {e}"))?;
|
||||||
|
|
||||||
|
// Delete existing external_book_metadata for this link
|
||||||
|
sqlx::query("DELETE FROM external_book_metadata WHERE link_id = $1")
|
||||||
|
.bind(link_id)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
// Pre-fetch local books
|
||||||
|
let local_books: Vec<(Uuid, Option<i32>, String)> = sqlx::query_as(
|
||||||
|
r#"
|
||||||
|
SELECT id, volume, title FROM books
|
||||||
|
WHERE library_id = $1
|
||||||
|
AND COALESCE(NULLIF(series, ''), 'unclassified') = $2
|
||||||
|
ORDER BY volume NULLS LAST,
|
||||||
|
REGEXP_REPLACE(LOWER(title), '[0-9].*$', ''),
|
||||||
|
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
|
||||||
|
title ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.bind(series_name)
|
||||||
|
.fetch_all(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let local_books_with_pos: Vec<(Uuid, i32, String)> = local_books
|
||||||
|
.iter()
|
||||||
|
.enumerate()
|
||||||
|
.map(|(idx, (id, vol, title))| (*id, vol.unwrap_or((idx + 1) as i32), title.clone()))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut matched_local_ids = std::collections::HashSet::new();
|
||||||
|
|
||||||
|
for (ext_idx, book) in books.iter().enumerate() {
|
||||||
|
let ext_vol = book.volume_number.unwrap_or((ext_idx + 1) as i32);
|
||||||
|
|
||||||
|
// Match by volume number
|
||||||
|
let mut local_book_id: Option<Uuid> = local_books_with_pos
|
||||||
|
.iter()
|
||||||
|
.find(|(id, v, _)| *v == ext_vol && !matched_local_ids.contains(id))
|
||||||
|
.map(|(id, _, _)| *id);
|
||||||
|
|
||||||
|
// Match by title containment
|
||||||
|
if local_book_id.is_none() {
|
||||||
|
let ext_title_lower = book.title.to_lowercase();
|
||||||
|
local_book_id = local_books_with_pos
|
||||||
|
.iter()
|
||||||
|
.find(|(id, _, local_title)| {
|
||||||
|
if matched_local_ids.contains(id) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
let local_lower = local_title.to_lowercase();
|
||||||
|
local_lower.contains(&ext_title_lower) || ext_title_lower.contains(&local_lower)
|
||||||
|
})
|
||||||
|
.map(|(id, _, _)| *id);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some(id) = local_book_id {
|
||||||
|
matched_local_ids.insert(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert external_book_metadata
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO external_book_metadata
|
||||||
|
(link_id, book_id, external_book_id, volume_number, title, authors, isbn, summary, cover_url, page_count, language, publish_date, metadata_json)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(link_id)
|
||||||
|
.bind(local_book_id)
|
||||||
|
.bind(&book.external_book_id)
|
||||||
|
.bind(book.volume_number)
|
||||||
|
.bind(&book.title)
|
||||||
|
.bind(&book.authors)
|
||||||
|
.bind(&book.isbn)
|
||||||
|
.bind(&book.summary)
|
||||||
|
.bind(&book.cover_url)
|
||||||
|
.bind(book.page_count)
|
||||||
|
.bind(&book.language)
|
||||||
|
.bind(&book.publish_date)
|
||||||
|
.bind(&book.metadata_json)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
// Diff + push metadata to matched local book
|
||||||
|
if let Some(book_id) = local_book_id {
|
||||||
|
let diffs = sync_book_with_diff(pool, book_id, book).await?;
|
||||||
|
if !diffs.is_empty() {
|
||||||
|
let local_title = local_books_with_pos
|
||||||
|
.iter()
|
||||||
|
.find(|(id, _, _)| *id == book_id)
|
||||||
|
.map(|(_, _, t)| t.clone())
|
||||||
|
.unwrap_or_default();
|
||||||
|
book_changes.push(BookDiff {
|
||||||
|
book_id: book_id.to_string(),
|
||||||
|
title: local_title,
|
||||||
|
volume: book.volume_number,
|
||||||
|
changes: diffs,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update synced_at on the link
|
||||||
|
sqlx::query("UPDATE external_metadata_links SET synced_at = NOW(), updated_at = NOW() WHERE id = $1")
|
||||||
|
.bind(link_id)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let has_changes = !series_changes.is_empty() || !book_changes.is_empty();
|
||||||
|
|
||||||
|
Ok(SeriesRefreshResult {
|
||||||
|
series_name: series_name.to_string(),
|
||||||
|
provider: provider_name.to_string(),
|
||||||
|
status: if has_changes { "updated".to_string() } else { "unchanged".to_string() },
|
||||||
|
series_changes,
|
||||||
|
book_changes,
|
||||||
|
error: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Diff helpers
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Compare old/new for a nullable string field. Returns Some(FieldDiff) only if value actually changed.
|
||||||
|
fn diff_opt_str(field: &str, old: Option<&str>, new: Option<&str>) -> Option<FieldDiff> {
|
||||||
|
let new_val = new.filter(|s| !s.is_empty());
|
||||||
|
// Only report a change if there is a new non-empty value AND it differs from old
|
||||||
|
match (old, new_val) {
|
||||||
|
(Some(o), Some(n)) if o != n => Some(FieldDiff {
|
||||||
|
field: field.to_string(),
|
||||||
|
old: Some(serde_json::Value::String(o.to_string())),
|
||||||
|
new: Some(serde_json::Value::String(n.to_string())),
|
||||||
|
}),
|
||||||
|
(None, Some(n)) => Some(FieldDiff {
|
||||||
|
field: field.to_string(),
|
||||||
|
old: None,
|
||||||
|
new: Some(serde_json::Value::String(n.to_string())),
|
||||||
|
}),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn diff_opt_i32(field: &str, old: Option<i32>, new: Option<i32>) -> Option<FieldDiff> {
|
||||||
|
match (old, new) {
|
||||||
|
(Some(o), Some(n)) if o != n => Some(FieldDiff {
|
||||||
|
field: field.to_string(),
|
||||||
|
old: Some(serde_json::json!(o)),
|
||||||
|
new: Some(serde_json::json!(n)),
|
||||||
|
}),
|
||||||
|
(None, Some(n)) => Some(FieldDiff {
|
||||||
|
field: field.to_string(),
|
||||||
|
old: None,
|
||||||
|
new: Some(serde_json::json!(n)),
|
||||||
|
}),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn diff_str_vec(field: &str, old: &[String], new: &[String]) -> Option<FieldDiff> {
|
||||||
|
if new.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
if old != new {
|
||||||
|
Some(FieldDiff {
|
||||||
|
field: field.to_string(),
|
||||||
|
old: Some(serde_json::json!(old)),
|
||||||
|
new: Some(serde_json::json!(new)),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Series sync with diff tracking
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async fn sync_series_with_diff(
|
||||||
|
pool: &PgPool,
|
||||||
|
library_id: Uuid,
|
||||||
|
series_name: &str,
|
||||||
|
candidate: &metadata_providers::SeriesCandidate,
|
||||||
|
) -> Result<Vec<FieldDiff>, String> {
|
||||||
|
let new_description = candidate.metadata_json
|
||||||
|
.get("description")
|
||||||
|
.and_then(|d| d.as_str())
|
||||||
|
.or(candidate.description.as_deref());
|
||||||
|
let new_authors = &candidate.authors;
|
||||||
|
let new_publishers = &candidate.publishers;
|
||||||
|
let new_start_year = candidate.start_year;
|
||||||
|
let new_total_volumes = candidate.total_volumes;
|
||||||
|
let new_status = if let Some(raw) = candidate.metadata_json.get("status").and_then(|s| s.as_str()) {
|
||||||
|
Some(crate::metadata::normalize_series_status(pool, raw).await)
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
let new_status = new_status.as_deref();
|
||||||
|
|
||||||
|
// Fetch existing series metadata for diffing
|
||||||
|
let existing = sqlx::query(
|
||||||
|
r#"SELECT description, publishers, start_year, total_volumes, status, authors, locked_fields
|
||||||
|
FROM series_metadata WHERE library_id = $1 AND name = $2"#,
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.bind(series_name)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let locked = existing
|
||||||
|
.as_ref()
|
||||||
|
.map(|r| r.get::<serde_json::Value, _>("locked_fields"))
|
||||||
|
.unwrap_or(serde_json::json!({}));
|
||||||
|
let is_locked = |field: &str| -> bool {
|
||||||
|
locked.get(field).and_then(|v| v.as_bool()).unwrap_or(false)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build diffs (only for unlocked fields that actually change)
|
||||||
|
let mut diffs: Vec<FieldDiff> = Vec::new();
|
||||||
|
|
||||||
|
if !is_locked("description") {
|
||||||
|
let old_desc: Option<String> = existing.as_ref().and_then(|r| r.get("description"));
|
||||||
|
if let Some(d) = diff_opt_str("description", old_desc.as_deref(), new_description) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("authors") {
|
||||||
|
let old_authors: Vec<String> = existing.as_ref().map(|r| r.get("authors")).unwrap_or_default();
|
||||||
|
if let Some(d) = diff_str_vec("authors", &old_authors, new_authors) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("publishers") {
|
||||||
|
let old_publishers: Vec<String> = existing.as_ref().map(|r| r.get("publishers")).unwrap_or_default();
|
||||||
|
if let Some(d) = diff_str_vec("publishers", &old_publishers, new_publishers) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("start_year") {
|
||||||
|
let old_year: Option<i32> = existing.as_ref().and_then(|r| r.get("start_year"));
|
||||||
|
if let Some(d) = diff_opt_i32("start_year", old_year, new_start_year) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("total_volumes") {
|
||||||
|
let old_vols: Option<i32> = existing.as_ref().and_then(|r| r.get("total_volumes"));
|
||||||
|
if let Some(d) = diff_opt_i32("total_volumes", old_vols, new_total_volumes) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("status") {
|
||||||
|
let old_status: Option<String> = existing.as_ref().and_then(|r| r.get("status"));
|
||||||
|
if let Some(d) = diff_opt_str("status", old_status.as_deref(), new_status) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Now do the actual upsert
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO series_metadata (library_id, name, description, publishers, start_year, total_volumes, status, authors, created_at, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, NOW(), NOW())
|
||||||
|
ON CONFLICT (library_id, name)
|
||||||
|
DO UPDATE SET
|
||||||
|
description = CASE
|
||||||
|
WHEN (series_metadata.locked_fields->>'description')::boolean IS TRUE THEN series_metadata.description
|
||||||
|
ELSE COALESCE(NULLIF(EXCLUDED.description, ''), series_metadata.description)
|
||||||
|
END,
|
||||||
|
publishers = CASE
|
||||||
|
WHEN (series_metadata.locked_fields->>'publishers')::boolean IS TRUE THEN series_metadata.publishers
|
||||||
|
WHEN array_length(EXCLUDED.publishers, 1) > 0 THEN EXCLUDED.publishers
|
||||||
|
ELSE series_metadata.publishers
|
||||||
|
END,
|
||||||
|
start_year = CASE
|
||||||
|
WHEN (series_metadata.locked_fields->>'start_year')::boolean IS TRUE THEN series_metadata.start_year
|
||||||
|
ELSE COALESCE(EXCLUDED.start_year, series_metadata.start_year)
|
||||||
|
END,
|
||||||
|
total_volumes = CASE
|
||||||
|
WHEN (series_metadata.locked_fields->>'total_volumes')::boolean IS TRUE THEN series_metadata.total_volumes
|
||||||
|
ELSE COALESCE(EXCLUDED.total_volumes, series_metadata.total_volumes)
|
||||||
|
END,
|
||||||
|
status = CASE
|
||||||
|
WHEN (series_metadata.locked_fields->>'status')::boolean IS TRUE THEN series_metadata.status
|
||||||
|
ELSE COALESCE(EXCLUDED.status, series_metadata.status)
|
||||||
|
END,
|
||||||
|
authors = CASE
|
||||||
|
WHEN (series_metadata.locked_fields->>'authors')::boolean IS TRUE THEN series_metadata.authors
|
||||||
|
WHEN array_length(EXCLUDED.authors, 1) > 0 THEN EXCLUDED.authors
|
||||||
|
ELSE series_metadata.authors
|
||||||
|
END,
|
||||||
|
updated_at = NOW()
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.bind(series_name)
|
||||||
|
.bind(new_description)
|
||||||
|
.bind(new_publishers)
|
||||||
|
.bind(new_start_year)
|
||||||
|
.bind(new_total_volumes)
|
||||||
|
.bind(new_status)
|
||||||
|
.bind(new_authors)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
Ok(diffs)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Book sync with diff tracking
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
async fn sync_book_with_diff(
|
||||||
|
pool: &PgPool,
|
||||||
|
book_id: Uuid,
|
||||||
|
ext_book: &metadata_providers::BookCandidate,
|
||||||
|
) -> Result<Vec<FieldDiff>, String> {
|
||||||
|
// Fetch current book state
|
||||||
|
let current = sqlx::query(
|
||||||
|
"SELECT summary, isbn, publish_date, language, authors, locked_fields FROM books WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(book_id)
|
||||||
|
.fetch_one(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
let locked = current.get::<serde_json::Value, _>("locked_fields");
|
||||||
|
let is_locked = |field: &str| -> bool {
|
||||||
|
locked.get(field).and_then(|v| v.as_bool()).unwrap_or(false)
|
||||||
|
};
|
||||||
|
|
||||||
|
// Build diffs
|
||||||
|
let mut diffs: Vec<FieldDiff> = Vec::new();
|
||||||
|
|
||||||
|
if !is_locked("summary") {
|
||||||
|
let old: Option<String> = current.get("summary");
|
||||||
|
if let Some(d) = diff_opt_str("summary", old.as_deref(), ext_book.summary.as_deref()) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("isbn") {
|
||||||
|
let old: Option<String> = current.get("isbn");
|
||||||
|
if let Some(d) = diff_opt_str("isbn", old.as_deref(), ext_book.isbn.as_deref()) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("publish_date") {
|
||||||
|
let old: Option<String> = current.get("publish_date");
|
||||||
|
if let Some(d) = diff_opt_str("publish_date", old.as_deref(), ext_book.publish_date.as_deref()) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("language") {
|
||||||
|
let old: Option<String> = current.get("language");
|
||||||
|
if let Some(d) = diff_opt_str("language", old.as_deref(), ext_book.language.as_deref()) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !is_locked("authors") {
|
||||||
|
let old: Vec<String> = current.get("authors");
|
||||||
|
if let Some(d) = diff_str_vec("authors", &old, &ext_book.authors) {
|
||||||
|
diffs.push(d);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Do the actual update
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
UPDATE books SET
|
||||||
|
summary = CASE
|
||||||
|
WHEN (locked_fields->>'summary')::boolean IS TRUE THEN summary
|
||||||
|
ELSE COALESCE(NULLIF($2, ''), summary)
|
||||||
|
END,
|
||||||
|
isbn = CASE
|
||||||
|
WHEN (locked_fields->>'isbn')::boolean IS TRUE THEN isbn
|
||||||
|
ELSE COALESCE(NULLIF($3, ''), isbn)
|
||||||
|
END,
|
||||||
|
publish_date = CASE
|
||||||
|
WHEN (locked_fields->>'publish_date')::boolean IS TRUE THEN publish_date
|
||||||
|
ELSE COALESCE(NULLIF($4, ''), publish_date)
|
||||||
|
END,
|
||||||
|
language = CASE
|
||||||
|
WHEN (locked_fields->>'language')::boolean IS TRUE THEN language
|
||||||
|
ELSE COALESCE(NULLIF($5, ''), language)
|
||||||
|
END,
|
||||||
|
authors = CASE
|
||||||
|
WHEN (locked_fields->>'authors')::boolean IS TRUE THEN authors
|
||||||
|
WHEN CARDINALITY($6::text[]) > 0 THEN $6
|
||||||
|
ELSE authors
|
||||||
|
END,
|
||||||
|
author = CASE
|
||||||
|
WHEN (locked_fields->>'authors')::boolean IS TRUE THEN author
|
||||||
|
WHEN CARDINALITY($6::text[]) > 0 THEN $6[1]
|
||||||
|
ELSE author
|
||||||
|
END,
|
||||||
|
updated_at = NOW()
|
||||||
|
WHERE id = $1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(book_id)
|
||||||
|
.bind(&ext_book.summary)
|
||||||
|
.bind(&ext_book.isbn)
|
||||||
|
.bind(&ext_book.publish_date)
|
||||||
|
.bind(&ext_book.language)
|
||||||
|
.bind(&ext_book.authors)
|
||||||
|
.execute(pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| e.to_string())?;
|
||||||
|
|
||||||
|
Ok(diffs)
|
||||||
|
}
|
||||||
@@ -6,10 +6,23 @@ use utoipa::OpenApi;
|
|||||||
paths(
|
paths(
|
||||||
crate::books::list_books,
|
crate::books::list_books,
|
||||||
crate::books::get_book,
|
crate::books::get_book,
|
||||||
crate::books::list_series,
|
crate::reading_progress::get_reading_progress,
|
||||||
|
crate::reading_progress::update_reading_progress,
|
||||||
|
crate::reading_progress::mark_series_read,
|
||||||
|
crate::books::get_thumbnail,
|
||||||
|
crate::series::list_series,
|
||||||
|
crate::series::list_all_series,
|
||||||
|
crate::series::ongoing_series,
|
||||||
|
crate::series::ongoing_books,
|
||||||
|
crate::books::convert_book,
|
||||||
|
crate::books::update_book,
|
||||||
|
crate::series::get_series_metadata,
|
||||||
|
crate::series::update_series,
|
||||||
crate::pages::get_page,
|
crate::pages::get_page,
|
||||||
crate::search::search_books,
|
crate::search::search_books,
|
||||||
crate::index_jobs::enqueue_rebuild,
|
crate::index_jobs::enqueue_rebuild,
|
||||||
|
crate::thumbnails::start_thumbnails_rebuild,
|
||||||
|
crate::thumbnails::start_thumbnails_regenerate,
|
||||||
crate::index_jobs::list_index_jobs,
|
crate::index_jobs::list_index_jobs,
|
||||||
crate::index_jobs::get_active_jobs,
|
crate::index_jobs::get_active_jobs,
|
||||||
crate::index_jobs::get_job_details,
|
crate::index_jobs::get_job_details,
|
||||||
@@ -22,9 +35,43 @@ use utoipa::OpenApi;
|
|||||||
crate::libraries::delete_library,
|
crate::libraries::delete_library,
|
||||||
crate::libraries::scan_library,
|
crate::libraries::scan_library,
|
||||||
crate::libraries::update_monitoring,
|
crate::libraries::update_monitoring,
|
||||||
|
crate::libraries::update_metadata_provider,
|
||||||
crate::tokens::list_tokens,
|
crate::tokens::list_tokens,
|
||||||
crate::tokens::create_token,
|
crate::tokens::create_token,
|
||||||
crate::tokens::revoke_token,
|
crate::tokens::revoke_token,
|
||||||
|
crate::tokens::delete_token,
|
||||||
|
crate::authors::list_authors,
|
||||||
|
crate::stats::get_stats,
|
||||||
|
crate::settings::get_settings,
|
||||||
|
crate::settings::get_setting,
|
||||||
|
crate::settings::update_setting,
|
||||||
|
crate::settings::clear_cache,
|
||||||
|
crate::settings::get_cache_stats,
|
||||||
|
crate::settings::get_thumbnail_stats,
|
||||||
|
crate::metadata::search_metadata,
|
||||||
|
crate::metadata::create_metadata_match,
|
||||||
|
crate::metadata::approve_metadata,
|
||||||
|
crate::metadata::reject_metadata,
|
||||||
|
crate::metadata::get_metadata_links,
|
||||||
|
crate::metadata::get_missing_books,
|
||||||
|
crate::metadata::delete_metadata_link,
|
||||||
|
crate::series::series_statuses,
|
||||||
|
crate::series::provider_statuses,
|
||||||
|
crate::settings::list_status_mappings,
|
||||||
|
crate::settings::upsert_status_mapping,
|
||||||
|
crate::settings::delete_status_mapping,
|
||||||
|
crate::prowlarr::search_prowlarr,
|
||||||
|
crate::prowlarr::test_prowlarr,
|
||||||
|
crate::qbittorrent::add_torrent,
|
||||||
|
crate::qbittorrent::test_qbittorrent,
|
||||||
|
crate::metadata_batch::start_batch,
|
||||||
|
crate::metadata_batch::get_batch_report,
|
||||||
|
crate::metadata_batch::get_batch_results,
|
||||||
|
crate::metadata_refresh::start_refresh,
|
||||||
|
crate::metadata_refresh::get_refresh_report,
|
||||||
|
crate::komga::sync_komga_read_books,
|
||||||
|
crate::komga::list_sync_reports,
|
||||||
|
crate::komga::get_sync_report,
|
||||||
),
|
),
|
||||||
components(
|
components(
|
||||||
schemas(
|
schemas(
|
||||||
@@ -32,11 +79,24 @@ use utoipa::OpenApi;
|
|||||||
crate::books::BookItem,
|
crate::books::BookItem,
|
||||||
crate::books::BooksPage,
|
crate::books::BooksPage,
|
||||||
crate::books::BookDetails,
|
crate::books::BookDetails,
|
||||||
crate::books::SeriesItem,
|
crate::reading_progress::ReadingProgressResponse,
|
||||||
|
crate::reading_progress::UpdateReadingProgressRequest,
|
||||||
|
crate::reading_progress::MarkSeriesReadRequest,
|
||||||
|
crate::reading_progress::MarkSeriesReadResponse,
|
||||||
|
crate::series::SeriesItem,
|
||||||
|
crate::series::SeriesPage,
|
||||||
|
crate::series::ListAllSeriesQuery,
|
||||||
|
crate::series::OngoingQuery,
|
||||||
|
crate::books::UpdateBookRequest,
|
||||||
|
crate::series::SeriesMetadata,
|
||||||
|
crate::series::UpdateSeriesRequest,
|
||||||
|
crate::series::UpdateSeriesResponse,
|
||||||
crate::pages::PageQuery,
|
crate::pages::PageQuery,
|
||||||
crate::search::SearchQuery,
|
crate::search::SearchQuery,
|
||||||
crate::search::SearchResponse,
|
crate::search::SearchResponse,
|
||||||
|
crate::search::SeriesHit,
|
||||||
crate::index_jobs::RebuildRequest,
|
crate::index_jobs::RebuildRequest,
|
||||||
|
crate::thumbnails::ThumbnailsRebuildRequest,
|
||||||
crate::index_jobs::IndexJobResponse,
|
crate::index_jobs::IndexJobResponse,
|
||||||
crate::index_jobs::IndexJobDetailResponse,
|
crate::index_jobs::IndexJobDetailResponse,
|
||||||
crate::index_jobs::JobErrorResponse,
|
crate::index_jobs::JobErrorResponse,
|
||||||
@@ -45,9 +105,58 @@ use utoipa::OpenApi;
|
|||||||
crate::libraries::LibraryResponse,
|
crate::libraries::LibraryResponse,
|
||||||
crate::libraries::CreateLibraryRequest,
|
crate::libraries::CreateLibraryRequest,
|
||||||
crate::libraries::UpdateMonitoringRequest,
|
crate::libraries::UpdateMonitoringRequest,
|
||||||
|
crate::libraries::UpdateMetadataProviderRequest,
|
||||||
crate::tokens::CreateTokenRequest,
|
crate::tokens::CreateTokenRequest,
|
||||||
crate::tokens::TokenResponse,
|
crate::tokens::TokenResponse,
|
||||||
crate::tokens::CreatedTokenResponse,
|
crate::tokens::CreatedTokenResponse,
|
||||||
|
crate::settings::UpdateSettingRequest,
|
||||||
|
crate::settings::ClearCacheResponse,
|
||||||
|
crate::settings::CacheStats,
|
||||||
|
crate::settings::ThumbnailStats,
|
||||||
|
crate::settings::StatusMappingDto,
|
||||||
|
crate::settings::UpsertStatusMappingRequest,
|
||||||
|
crate::authors::ListAuthorsQuery,
|
||||||
|
crate::authors::AuthorItem,
|
||||||
|
crate::authors::AuthorsPageResponse,
|
||||||
|
crate::stats::StatsResponse,
|
||||||
|
crate::stats::StatsOverview,
|
||||||
|
crate::stats::ReadingStatusStats,
|
||||||
|
crate::stats::FormatCount,
|
||||||
|
crate::stats::LanguageCount,
|
||||||
|
crate::stats::LibraryStats,
|
||||||
|
crate::stats::TopSeries,
|
||||||
|
crate::stats::MonthlyAdditions,
|
||||||
|
crate::stats::MetadataStats,
|
||||||
|
crate::stats::ProviderCount,
|
||||||
|
crate::metadata::ApproveRequest,
|
||||||
|
crate::metadata::ApproveResponse,
|
||||||
|
crate::metadata::SyncReport,
|
||||||
|
crate::metadata::SeriesSyncReport,
|
||||||
|
crate::metadata::BookSyncReport,
|
||||||
|
crate::metadata::FieldChange,
|
||||||
|
crate::metadata::MetadataSearchRequest,
|
||||||
|
crate::metadata::SeriesCandidateDto,
|
||||||
|
crate::metadata::MetadataMatchRequest,
|
||||||
|
crate::metadata::ExternalMetadataLinkDto,
|
||||||
|
crate::metadata::MissingBooksDto,
|
||||||
|
crate::metadata::MissingBookItem,
|
||||||
|
crate::qbittorrent::QBittorrentAddRequest,
|
||||||
|
crate::qbittorrent::QBittorrentAddResponse,
|
||||||
|
crate::qbittorrent::QBittorrentTestResponse,
|
||||||
|
crate::prowlarr::ProwlarrSearchRequest,
|
||||||
|
crate::prowlarr::ProwlarrRelease,
|
||||||
|
crate::prowlarr::ProwlarrCategory,
|
||||||
|
crate::prowlarr::ProwlarrSearchResponse,
|
||||||
|
crate::prowlarr::MissingVolumeInput,
|
||||||
|
crate::prowlarr::ProwlarrTestResponse,
|
||||||
|
crate::metadata_batch::MetadataBatchRequest,
|
||||||
|
crate::metadata_batch::MetadataBatchReportDto,
|
||||||
|
crate::metadata_batch::MetadataBatchResultDto,
|
||||||
|
crate::metadata_refresh::MetadataRefreshRequest,
|
||||||
|
crate::metadata_refresh::MetadataRefreshReportDto,
|
||||||
|
crate::komga::KomgaSyncRequest,
|
||||||
|
crate::komga::KomgaSyncResponse,
|
||||||
|
crate::komga::KomgaSyncReportSummary,
|
||||||
ErrorResponse,
|
ErrorResponse,
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
@@ -55,10 +164,20 @@ use utoipa::OpenApi;
|
|||||||
("Bearer" = [])
|
("Bearer" = [])
|
||||||
),
|
),
|
||||||
tags(
|
tags(
|
||||||
(name = "books", description = "Read-only endpoints for browsing and searching books"),
|
(name = "books", description = "Book browsing, details and management"),
|
||||||
(name = "libraries", description = "Library management endpoints (Admin only)"),
|
(name = "series", description = "Series browsing, filtering and management"),
|
||||||
|
(name = "search", description = "Full-text search across books and series"),
|
||||||
|
(name = "reading-progress", description = "Reading progress tracking per book"),
|
||||||
|
(name = "authors", description = "Author browsing and listing"),
|
||||||
|
(name = "stats", description = "Collection statistics and dashboard data"),
|
||||||
|
(name = "libraries", description = "Library listing, scanning, and management (create/delete/settings: Admin only)"),
|
||||||
(name = "indexing", description = "Search index management and job control (Admin only)"),
|
(name = "indexing", description = "Search index management and job control (Admin only)"),
|
||||||
|
(name = "metadata", description = "External metadata providers and matching (Admin only)"),
|
||||||
|
(name = "komga", description = "Komga read-status sync (Admin only)"),
|
||||||
(name = "tokens", description = "API token management (Admin only)"),
|
(name = "tokens", description = "API token management (Admin only)"),
|
||||||
|
(name = "settings", description = "Application settings and cache management (Admin only)"),
|
||||||
|
(name = "prowlarr", description = "Prowlarr indexer integration (Admin only)"),
|
||||||
|
(name = "qbittorrent", description = "qBittorrent download client integration (Admin only)"),
|
||||||
),
|
),
|
||||||
modifiers(&SecurityAddon)
|
modifiers(&SecurityAddon)
|
||||||
)]
|
)]
|
||||||
@@ -103,15 +222,24 @@ mod tests {
|
|||||||
.to_pretty_json()
|
.to_pretty_json()
|
||||||
.expect("Failed to serialize OpenAPI");
|
.expect("Failed to serialize OpenAPI");
|
||||||
|
|
||||||
// Check that there are no references to non-existent schemas
|
// Check that all $ref targets exist in components/schemas
|
||||||
assert!(
|
let doc: serde_json::Value =
|
||||||
!json.contains("\"/components/schemas/Uuid\""),
|
serde_json::from_str(&json).expect("OpenAPI JSON should be valid");
|
||||||
"Uuid schema should not be referenced"
|
let empty = serde_json::Map::new();
|
||||||
);
|
let schemas = doc["components"]["schemas"]
|
||||||
assert!(
|
.as_object()
|
||||||
!json.contains("\"/components/schemas/DateTime\""),
|
.unwrap_or(&empty);
|
||||||
"DateTime schema should not be referenced"
|
let prefix = "#/components/schemas/";
|
||||||
);
|
let mut broken: Vec<String> = Vec::new();
|
||||||
|
for part in json.split(prefix).skip(1) {
|
||||||
|
if let Some(name) = part.split('"').next() {
|
||||||
|
if !schemas.contains_key(name) {
|
||||||
|
broken.push(name.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
broken.dedup();
|
||||||
|
assert!(broken.is_empty(), "Unresolved schema refs: {:?}", broken);
|
||||||
|
|
||||||
// Save to file for inspection
|
// Save to file for inspection
|
||||||
std::fs::write("/tmp/openapi.json", &json).expect("Failed to write file");
|
std::fs::write("/tmp/openapi.json", &json).expect("Failed to write file");
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
use std::{
|
use std::{
|
||||||
io::{Read, Write},
|
io::Write,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
sync::{atomic::Ordering, Arc},
|
sync::{atomic::Ordering, Arc},
|
||||||
time::Duration,
|
time::Duration,
|
||||||
@@ -16,10 +16,10 @@ use serde::Deserialize;
|
|||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
use sqlx::Row;
|
use sqlx::Row;
|
||||||
use tracing::{debug, error, info, instrument, warn};
|
use tracing::{error, info, instrument, warn};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::{error::ApiError, AppState};
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
fn remap_libraries_path(path: &str) -> String {
|
fn remap_libraries_path(path: &str) -> String {
|
||||||
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
||||||
@@ -30,10 +30,12 @@ fn remap_libraries_path(path: &str) -> String {
|
|||||||
path.to_string()
|
path.to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_image_cache_dir() -> PathBuf {
|
fn parse_filter(s: &str) -> image::imageops::FilterType {
|
||||||
std::env::var("IMAGE_CACHE_DIR")
|
match s {
|
||||||
.map(PathBuf::from)
|
"lanczos3" => image::imageops::FilterType::Lanczos3,
|
||||||
.unwrap_or_else(|_| PathBuf::from("/tmp/stripstream-image-cache"))
|
"nearest" => image::imageops::FilterType::Nearest,
|
||||||
|
_ => image::imageops::FilterType::Triangle, // Triangle (bilinear) is fast and good enough for comics
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_cache_key(abs_path: &str, page: u32, format: &str, quality: u8, width: u32) -> String {
|
fn get_cache_key(abs_path: &str, page: u32, format: &str, quality: u8, width: u32) -> String {
|
||||||
@@ -46,8 +48,7 @@ fn get_cache_key(abs_path: &str, page: u32, format: &str, quality: u8, width: u3
|
|||||||
format!("{:x}", hasher.finalize())
|
format!("{:x}", hasher.finalize())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_cache_path(cache_key: &str, format: &OutputFormat) -> PathBuf {
|
fn get_cache_path(cache_key: &str, format: &OutputFormat, cache_dir: &Path) -> PathBuf {
|
||||||
let cache_dir = get_image_cache_dir();
|
|
||||||
let prefix = &cache_key[..2];
|
let prefix = &cache_key[..2];
|
||||||
let ext = format.extension();
|
let ext = format.extension();
|
||||||
cache_dir.join(prefix).join(format!("{}.{}", cache_key, ext))
|
cache_dir.join(prefix).join(format!("{}.{}", cache_key, ext))
|
||||||
@@ -63,7 +64,7 @@ fn write_to_disk_cache(cache_path: &Path, data: &[u8]) -> Result<(), std::io::Er
|
|||||||
}
|
}
|
||||||
let mut file = std::fs::File::create(cache_path)?;
|
let mut file = std::fs::File::create(cache_path)?;
|
||||||
file.write_all(data)?;
|
file.write_all(data)?;
|
||||||
file.sync_data()?;
|
// No sync_data() — this is a cache, durability is not critical
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -79,6 +80,8 @@ pub struct PageQuery {
|
|||||||
|
|
||||||
#[derive(Clone, Copy, Debug)]
|
#[derive(Clone, Copy, Debug)]
|
||||||
enum OutputFormat {
|
enum OutputFormat {
|
||||||
|
/// Serve raw bytes from the archive — no decode, no re-encode.
|
||||||
|
Original,
|
||||||
Jpeg,
|
Jpeg,
|
||||||
Png,
|
Png,
|
||||||
Webp,
|
Webp,
|
||||||
@@ -86,16 +89,19 @@ enum OutputFormat {
|
|||||||
|
|
||||||
impl OutputFormat {
|
impl OutputFormat {
|
||||||
fn parse(value: Option<&str>) -> Result<Self, ApiError> {
|
fn parse(value: Option<&str>) -> Result<Self, ApiError> {
|
||||||
match value.unwrap_or("webp") {
|
match value {
|
||||||
"jpeg" | "jpg" => Ok(Self::Jpeg),
|
None => Ok(Self::Original),
|
||||||
"png" => Ok(Self::Png),
|
Some("original") => Ok(Self::Original),
|
||||||
"webp" => Ok(Self::Webp),
|
Some("jpeg") | Some("jpg") => Ok(Self::Jpeg),
|
||||||
_ => Err(ApiError::bad_request("format must be webp|jpeg|png")),
|
Some("png") => Ok(Self::Png),
|
||||||
|
Some("webp") => Ok(Self::Webp),
|
||||||
|
_ => Err(ApiError::bad_request("format must be original|webp|jpeg|png")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn content_type(&self) -> &'static str {
|
fn content_type(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
|
Self::Original => "application/octet-stream", // will be overridden by detected type
|
||||||
Self::Jpeg => "image/jpeg",
|
Self::Jpeg => "image/jpeg",
|
||||||
Self::Png => "image/png",
|
Self::Png => "image/png",
|
||||||
Self::Webp => "image/webp",
|
Self::Webp => "image/webp",
|
||||||
@@ -104,6 +110,7 @@ impl OutputFormat {
|
|||||||
|
|
||||||
fn extension(&self) -> &'static str {
|
fn extension(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
|
Self::Original => "orig",
|
||||||
Self::Jpeg => "jpg",
|
Self::Jpeg => "jpg",
|
||||||
Self::Png => "png",
|
Self::Png => "png",
|
||||||
Self::Webp => "webp",
|
Self::Webp => "webp",
|
||||||
@@ -111,6 +118,17 @@ impl OutputFormat {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Detect content type from raw image bytes.
|
||||||
|
fn detect_content_type(data: &[u8]) -> &'static str {
|
||||||
|
match image::guess_format(data) {
|
||||||
|
Ok(ImageFormat::Jpeg) => "image/jpeg",
|
||||||
|
Ok(ImageFormat::Png) => "image/png",
|
||||||
|
Ok(ImageFormat::WebP) => "image/webp",
|
||||||
|
Ok(ImageFormat::Avif) => "image/avif",
|
||||||
|
_ => "application/octet-stream",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Get a specific page image from a book with optional format conversion
|
/// Get a specific page image from a book with optional format conversion
|
||||||
#[utoipa::path(
|
#[utoipa::path(
|
||||||
get,
|
get,
|
||||||
@@ -131,36 +149,38 @@ impl OutputFormat {
|
|||||||
),
|
),
|
||||||
security(("Bearer" = []))
|
security(("Bearer" = []))
|
||||||
)]
|
)]
|
||||||
#[instrument(skip(state), fields(book_id = %book_id, page = n))]
|
#[instrument(skip(state, headers), fields(book_id = %book_id, page = n))]
|
||||||
pub async fn get_page(
|
pub async fn get_page(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
AxumPath((book_id, n)): AxumPath<(Uuid, u32)>,
|
AxumPath((book_id, n)): AxumPath<(Uuid, u32)>,
|
||||||
Query(query): Query<PageQuery>,
|
Query(query): Query<PageQuery>,
|
||||||
|
headers: HeaderMap,
|
||||||
) -> Result<Response, ApiError> {
|
) -> Result<Response, ApiError> {
|
||||||
info!("Processing image request");
|
|
||||||
|
|
||||||
if n == 0 {
|
if n == 0 {
|
||||||
warn!("Invalid page number: 0");
|
|
||||||
return Err(ApiError::bad_request("page index starts at 1"));
|
return Err(ApiError::bad_request("page index starts at 1"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let (default_quality, max_width, filter_str, timeout_secs, cache_dir) = {
|
||||||
|
let s = state.settings.read().await;
|
||||||
|
(s.image_quality, s.image_max_width, s.image_filter.clone(), s.timeout_seconds, s.cache_directory.clone())
|
||||||
|
};
|
||||||
|
|
||||||
let format = OutputFormat::parse(query.format.as_deref())?;
|
let format = OutputFormat::parse(query.format.as_deref())?;
|
||||||
let quality = query.quality.unwrap_or(80).clamp(1, 100);
|
let quality = query.quality.unwrap_or(default_quality).clamp(1, 100);
|
||||||
let width = query.width.unwrap_or(0);
|
let width = query.width.unwrap_or(0);
|
||||||
if width > 2160 {
|
if width > max_width {
|
||||||
warn!("Invalid width: {}", width);
|
return Err(ApiError::bad_request(format!("width must be <= {}", max_width)));
|
||||||
return Err(ApiError::bad_request("width must be <= 2160"));
|
|
||||||
}
|
}
|
||||||
|
let filter = parse_filter(&filter_str);
|
||||||
|
let cache_dir_path = std::path::PathBuf::from(&cache_dir);
|
||||||
|
|
||||||
let memory_cache_key = format!("{book_id}:{n}:{}:{quality}:{width}", format.extension());
|
let memory_cache_key = format!("{book_id}:{n}:{}:{quality}:{width}", format.extension());
|
||||||
|
|
||||||
if let Some(cached) = state.page_cache.lock().await.get(&memory_cache_key).cloned() {
|
if let Some(cached) = state.page_cache.lock().await.get(&memory_cache_key).cloned() {
|
||||||
state.metrics.page_cache_hits.fetch_add(1, Ordering::Relaxed);
|
state.metrics.page_cache_hits.fetch_add(1, Ordering::Relaxed);
|
||||||
debug!("Memory cache hit for key: {}", memory_cache_key);
|
return Ok(image_response(cached, format, None, &headers));
|
||||||
return Ok(image_response(cached, format.content_type(), None));
|
|
||||||
}
|
}
|
||||||
state.metrics.page_cache_misses.fetch_add(1, Ordering::Relaxed);
|
state.metrics.page_cache_misses.fetch_add(1, Ordering::Relaxed);
|
||||||
debug!("Memory cache miss for key: {}", memory_cache_key);
|
|
||||||
|
|
||||||
let row = sqlx::query(
|
let row = sqlx::query(
|
||||||
r#"
|
r#"
|
||||||
@@ -182,7 +202,6 @@ pub async fn get_page(
|
|||||||
let row = match row {
|
let row = match row {
|
||||||
Some(r) => r,
|
Some(r) => r,
|
||||||
None => {
|
None => {
|
||||||
error!("Book file not found for book_id: {}", book_id);
|
|
||||||
return Err(ApiError::not_found("book file not found"));
|
return Err(ApiError::not_found("book file not found"));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -191,18 +210,22 @@ pub async fn get_page(
|
|||||||
let abs_path = remap_libraries_path(&abs_path);
|
let abs_path = remap_libraries_path(&abs_path);
|
||||||
let input_format: String = row.get("format");
|
let input_format: String = row.get("format");
|
||||||
|
|
||||||
info!("Processing book file: {} (format: {})", abs_path, input_format);
|
|
||||||
|
|
||||||
let disk_cache_key = get_cache_key(&abs_path, n, format.extension(), quality, width);
|
let disk_cache_key = get_cache_key(&abs_path, n, format.extension(), quality, width);
|
||||||
let cache_path = get_cache_path(&disk_cache_key, &format);
|
let cache_path = get_cache_path(&disk_cache_key, &format, &cache_dir_path);
|
||||||
|
|
||||||
|
// If-None-Match: return 304 if the client already has this version
|
||||||
|
if let Some(if_none_match) = headers.get(header::IF_NONE_MATCH) {
|
||||||
|
let expected_etag = format!("\"{}\"", disk_cache_key);
|
||||||
|
if if_none_match.as_bytes() == expected_etag.as_bytes() {
|
||||||
|
return Ok(StatusCode::NOT_MODIFIED.into_response());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(cached_bytes) = read_from_disk_cache(&cache_path) {
|
if let Some(cached_bytes) = read_from_disk_cache(&cache_path) {
|
||||||
info!("Disk cache hit for: {}", cache_path.display());
|
|
||||||
let bytes = Arc::new(cached_bytes);
|
let bytes = Arc::new(cached_bytes);
|
||||||
state.page_cache.lock().await.put(memory_cache_key, bytes.clone());
|
state.page_cache.lock().await.put(memory_cache_key, bytes.clone());
|
||||||
return Ok(image_response(bytes, format.content_type(), Some(&disk_cache_key)));
|
return Ok(image_response(bytes, format, Some(&disk_cache_key), &headers));
|
||||||
}
|
}
|
||||||
debug!("Disk cache miss for: {}", cache_path.display());
|
|
||||||
|
|
||||||
let _permit = state
|
let _permit = state
|
||||||
.page_render_limit
|
.page_render_limit
|
||||||
@@ -214,15 +237,14 @@ pub async fn get_page(
|
|||||||
ApiError::internal("render limiter unavailable")
|
ApiError::internal("render limiter unavailable")
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
info!("Rendering page {} from {}", n, abs_path);
|
|
||||||
let abs_path_clone = abs_path.clone();
|
let abs_path_clone = abs_path.clone();
|
||||||
let format_clone = format;
|
let format_clone = format;
|
||||||
let start_time = std::time::Instant::now();
|
let start_time = std::time::Instant::now();
|
||||||
|
|
||||||
let bytes = tokio::time::timeout(
|
let bytes = tokio::time::timeout(
|
||||||
Duration::from_secs(12),
|
Duration::from_secs(timeout_secs),
|
||||||
tokio::task::spawn_blocking(move || {
|
tokio::task::spawn_blocking(move || {
|
||||||
render_page(&abs_path_clone, &input_format, n, &format_clone, quality, width)
|
render_page(&abs_path_clone, &input_format, n, &format_clone, quality, width, filter)
|
||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
@@ -239,18 +261,37 @@ pub async fn get_page(
|
|||||||
|
|
||||||
match bytes {
|
match bytes {
|
||||||
Ok(data) => {
|
Ok(data) => {
|
||||||
info!("Successfully rendered page {} in {:?}", n, duration);
|
info!("Rendered page {} in {:?}", n, duration);
|
||||||
|
|
||||||
if let Err(e) = write_to_disk_cache(&cache_path, &data) {
|
if let Err(e) = write_to_disk_cache(&cache_path, &data) {
|
||||||
warn!("Failed to write to disk cache: {}", e);
|
warn!("Failed to write to disk cache: {}", e);
|
||||||
} else {
|
|
||||||
info!("Cached rendered image to: {}", cache_path.display());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let bytes = Arc::new(data);
|
let bytes = Arc::new(data);
|
||||||
state.page_cache.lock().await.put(memory_cache_key, bytes.clone());
|
state.page_cache.lock().await.put(memory_cache_key.clone(), bytes.clone());
|
||||||
|
|
||||||
Ok(image_response(bytes, format.content_type(), Some(&disk_cache_key)))
|
// Prefetch next 2 pages in background (fire-and-forget)
|
||||||
|
for next_page in [n + 1, n + 2] {
|
||||||
|
let state2 = state.clone();
|
||||||
|
let abs_path2 = abs_path.clone();
|
||||||
|
let cache_dir2 = cache_dir_path.clone();
|
||||||
|
let format2 = format;
|
||||||
|
tokio::spawn(async move {
|
||||||
|
prefetch_page(state2, &PrefetchParams {
|
||||||
|
book_id,
|
||||||
|
abs_path: &abs_path2,
|
||||||
|
page: next_page,
|
||||||
|
format: format2,
|
||||||
|
quality,
|
||||||
|
width,
|
||||||
|
filter,
|
||||||
|
timeout_secs,
|
||||||
|
cache_dir: &cache_dir2,
|
||||||
|
}).await;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(image_response(bytes, format, Some(&disk_cache_key), &headers))
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("Failed to render page {} from {}: {:?}", n, abs_path, e);
|
error!("Failed to render page {} from {}: {:?}", n, abs_path, e);
|
||||||
@@ -259,11 +300,84 @@ pub async fn get_page(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn image_response(bytes: Arc<Vec<u8>>, content_type: &str, etag_suffix: Option<&str>) -> Response {
|
struct PrefetchParams<'a> {
|
||||||
let mut headers = HeaderMap::new();
|
book_id: Uuid,
|
||||||
headers.insert(header::CONTENT_TYPE, HeaderValue::from_str(content_type).unwrap_or(HeaderValue::from_static("application/octet-stream")));
|
abs_path: &'a str,
|
||||||
headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("public, max-age=31536000, immutable"));
|
page: u32,
|
||||||
|
format: OutputFormat,
|
||||||
|
quality: u8,
|
||||||
|
width: u32,
|
||||||
|
filter: image::imageops::FilterType,
|
||||||
|
timeout_secs: u64,
|
||||||
|
cache_dir: &'a Path,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Prefetch a single page into disk+memory cache (best-effort, ignores errors).
|
||||||
|
async fn prefetch_page(state: AppState, params: &PrefetchParams<'_>) {
|
||||||
|
let book_id = params.book_id;
|
||||||
|
let page = params.page;
|
||||||
|
let format = params.format;
|
||||||
|
let quality = params.quality;
|
||||||
|
let width = params.width;
|
||||||
|
let filter = params.filter;
|
||||||
|
let timeout_secs = params.timeout_secs;
|
||||||
|
let abs_path = params.abs_path;
|
||||||
|
let cache_dir = params.cache_dir;
|
||||||
|
|
||||||
|
let mem_key = format!("{book_id}:{page}:{}:{quality}:{width}", format.extension());
|
||||||
|
// Already in memory cache?
|
||||||
|
if state.page_cache.lock().await.contains(&mem_key) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Already on disk?
|
||||||
|
let disk_key = get_cache_key(abs_path, page, format.extension(), quality, width);
|
||||||
|
let cache_path = get_cache_path(&disk_key, &format, cache_dir);
|
||||||
|
if cache_path.exists() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Acquire render permit (don't block too long — if busy, skip)
|
||||||
|
let permit = tokio::time::timeout(
|
||||||
|
Duration::from_millis(100),
|
||||||
|
state.page_render_limit.clone().acquire_owned(),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
let _permit = match permit {
|
||||||
|
Ok(Ok(p)) => p,
|
||||||
|
_ => return,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Fetch the book format from the path extension as a shortcut
|
||||||
|
let input_format = match abs_path.rsplit('.').next().map(|e| e.to_ascii_lowercase()) {
|
||||||
|
Some(ref e) if e == "cbz" => "cbz",
|
||||||
|
Some(ref e) if e == "cbr" => "cbr",
|
||||||
|
Some(ref e) if e == "pdf" => "pdf",
|
||||||
|
Some(ref e) if e == "epub" => "epub",
|
||||||
|
_ => return,
|
||||||
|
}
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
let abs_clone = abs_path.to_string();
|
||||||
|
let fmt = format;
|
||||||
|
let result = tokio::time::timeout(
|
||||||
|
Duration::from_secs(timeout_secs),
|
||||||
|
tokio::task::spawn_blocking(move || {
|
||||||
|
render_page(&abs_clone, &input_format, page, &fmt, quality, width, filter)
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
if let Ok(Ok(Ok(data))) = result {
|
||||||
|
let _ = write_to_disk_cache(&cache_path, &data);
|
||||||
|
let bytes = Arc::new(data);
|
||||||
|
state.page_cache.lock().await.put(mem_key, bytes);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn image_response(bytes: Arc<Vec<u8>>, format: OutputFormat, etag_suffix: Option<&str>, req_headers: &HeaderMap) -> Response {
|
||||||
|
let content_type = match format {
|
||||||
|
OutputFormat::Original => detect_content_type(&bytes),
|
||||||
|
_ => format.content_type(),
|
||||||
|
};
|
||||||
let etag = if let Some(suffix) = etag_suffix {
|
let etag = if let Some(suffix) = etag_suffix {
|
||||||
format!("\"{}\"", suffix)
|
format!("\"{}\"", suffix)
|
||||||
} else {
|
} else {
|
||||||
@@ -272,10 +386,85 @@ fn image_response(bytes: Arc<Vec<u8>>, content_type: &str, etag_suffix: Option<&
|
|||||||
format!("\"{:x}\"", hasher.finalize())
|
format!("\"{:x}\"", hasher.finalize())
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Check If-None-Match for 304
|
||||||
|
if let Some(if_none_match) = req_headers.get(header::IF_NONE_MATCH) {
|
||||||
|
if if_none_match.as_bytes() == etag.as_bytes() {
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("public, max-age=31536000, immutable"));
|
||||||
|
if let Ok(v) = HeaderValue::from_str(&etag) {
|
||||||
|
headers.insert(header::ETAG, v);
|
||||||
|
}
|
||||||
|
return (StatusCode::NOT_MODIFIED, headers).into_response();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut headers = HeaderMap::new();
|
||||||
|
headers.insert(header::CONTENT_TYPE, HeaderValue::from_str(content_type).unwrap_or(HeaderValue::from_static("application/octet-stream")));
|
||||||
|
headers.insert(header::CACHE_CONTROL, HeaderValue::from_static("public, max-age=31536000, immutable"));
|
||||||
if let Ok(v) = HeaderValue::from_str(&etag) {
|
if let Ok(v) = HeaderValue::from_str(&etag) {
|
||||||
headers.insert(header::ETAG, v);
|
headers.insert(header::ETAG, v);
|
||||||
}
|
}
|
||||||
(StatusCode::OK, headers, Body::from((*bytes).clone())).into_response()
|
// Use Bytes to avoid cloning the Vec — shares the Arc's allocation via zero-copy
|
||||||
|
let body_bytes = axum::body::Bytes::from(Arc::unwrap_or_clone(bytes));
|
||||||
|
(StatusCode::OK, headers, Body::from(body_bytes)).into_response()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Render page 1 of a book (for thumbnail fallback or thumbnail checkup). Uses thumbnail dimensions by default.
|
||||||
|
/// Render page 1 as a thumbnail fallback. Returns (bytes, content_type).
|
||||||
|
pub async fn render_book_page_1(
|
||||||
|
state: &AppState,
|
||||||
|
book_id: Uuid,
|
||||||
|
width: u32,
|
||||||
|
quality: u8,
|
||||||
|
) -> Result<(Vec<u8>, &'static str), ApiError> {
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"SELECT abs_path, format FROM book_files WHERE book_id = $1 ORDER BY updated_at DESC LIMIT 1"#,
|
||||||
|
)
|
||||||
|
.bind(book_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(e.to_string()))?;
|
||||||
|
|
||||||
|
let row = row.ok_or_else(|| ApiError::not_found("book file not found"))?;
|
||||||
|
let abs_path: String = row.get("abs_path");
|
||||||
|
let abs_path = remap_libraries_path(&abs_path);
|
||||||
|
let input_format: String = row.get("format");
|
||||||
|
|
||||||
|
let _permit = state
|
||||||
|
.page_render_limit
|
||||||
|
.clone()
|
||||||
|
.acquire_owned()
|
||||||
|
.await
|
||||||
|
.map_err(|_| ApiError::internal("render limiter unavailable"))?;
|
||||||
|
|
||||||
|
let (timeout_secs, filter_str) = {
|
||||||
|
let s = state.settings.read().await;
|
||||||
|
(s.timeout_seconds, s.image_filter.clone())
|
||||||
|
};
|
||||||
|
let filter = parse_filter(&filter_str);
|
||||||
|
|
||||||
|
let abs_path_clone = abs_path.clone();
|
||||||
|
let bytes = tokio::time::timeout(
|
||||||
|
Duration::from_secs(timeout_secs),
|
||||||
|
tokio::task::spawn_blocking(move || {
|
||||||
|
render_page(
|
||||||
|
&abs_path_clone,
|
||||||
|
&input_format,
|
||||||
|
1,
|
||||||
|
&OutputFormat::Original,
|
||||||
|
quality,
|
||||||
|
width,
|
||||||
|
filter,
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|_| ApiError::internal("page rendering timeout"))?
|
||||||
|
.map_err(|e| ApiError::internal(format!("render task failed: {e}")))?;
|
||||||
|
|
||||||
|
let bytes = bytes?;
|
||||||
|
let content_type = detect_content_type(&bytes);
|
||||||
|
Ok((bytes, content_type))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_page(
|
fn render_page(
|
||||||
@@ -285,232 +474,115 @@ fn render_page(
|
|||||||
out_format: &OutputFormat,
|
out_format: &OutputFormat,
|
||||||
quality: u8,
|
quality: u8,
|
||||||
width: u32,
|
width: u32,
|
||||||
|
filter: image::imageops::FilterType,
|
||||||
) -> Result<Vec<u8>, ApiError> {
|
) -> Result<Vec<u8>, ApiError> {
|
||||||
let page_bytes = match input_format {
|
let format = match input_format {
|
||||||
"cbz" => extract_cbz_page(abs_path, page_number)?,
|
"cbz" => parsers::BookFormat::Cbz,
|
||||||
"cbr" => extract_cbr_page(abs_path, page_number)?,
|
"cbr" => parsers::BookFormat::Cbr,
|
||||||
"pdf" => render_pdf_page(abs_path, page_number, width)?,
|
"pdf" => parsers::BookFormat::Pdf,
|
||||||
|
"epub" => parsers::BookFormat::Epub,
|
||||||
_ => return Err(ApiError::bad_request("unsupported source format")),
|
_ => return Err(ApiError::bad_request("unsupported source format")),
|
||||||
};
|
};
|
||||||
|
|
||||||
transcode_image(&page_bytes, out_format, quality, width)
|
let pdf_render_width = if width > 0 { width } else { 1200 };
|
||||||
}
|
let page_bytes = parsers::extract_page(
|
||||||
|
std::path::Path::new(abs_path),
|
||||||
fn extract_cbz_page(abs_path: &str, page_number: u32) -> Result<Vec<u8>, ApiError> {
|
format,
|
||||||
debug!("Opening CBZ archive: {}", abs_path);
|
page_number,
|
||||||
let file = std::fs::File::open(abs_path).map_err(|e| {
|
pdf_render_width,
|
||||||
error!("Cannot open CBZ file {}: {}", abs_path, e);
|
)
|
||||||
ApiError::internal(format!("cannot open cbz: {e}"))
|
.map_err(|e| {
|
||||||
|
error!("Failed to extract page {} from {}: {}", page_number, abs_path, e);
|
||||||
|
ApiError::internal(format!("page extraction failed: {e}"))
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let mut archive = zip::ZipArchive::new(file).map_err(|e| {
|
// Original mode or source matches output with no resize → return raw bytes (zero transcoding)
|
||||||
error!("Invalid CBZ archive {}: {}", abs_path, e);
|
if matches!(out_format, OutputFormat::Original) && width == 0 {
|
||||||
ApiError::internal(format!("invalid cbz: {e}"))
|
return Ok(page_bytes);
|
||||||
})?;
|
}
|
||||||
|
if width == 0 {
|
||||||
let mut image_names: Vec<String> = Vec::new();
|
if let Ok(source_fmt) = image::guess_format(&page_bytes) {
|
||||||
for i in 0..archive.len() {
|
if format_matches(&source_fmt, out_format) {
|
||||||
let entry = archive.by_index(i).map_err(|e| {
|
return Ok(page_bytes);
|
||||||
error!("Failed to read CBZ entry {} in {}: {}", i, abs_path, e);
|
}
|
||||||
ApiError::internal(format!("cbz entry read failed: {e}"))
|
|
||||||
})?;
|
|
||||||
let name = entry.name().to_ascii_lowercase();
|
|
||||||
if is_image_name(&name) {
|
|
||||||
image_names.push(entry.name().to_string());
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
image_names.sort();
|
|
||||||
debug!("Found {} images in CBZ {}", image_names.len(), abs_path);
|
|
||||||
|
|
||||||
let index = page_number as usize - 1;
|
transcode_image(&page_bytes, out_format, quality, width, filter)
|
||||||
let selected = image_names.get(index).ok_or_else(|| {
|
|
||||||
error!("Page {} out of range in {} (total: {})", page_number, abs_path, image_names.len());
|
|
||||||
ApiError::not_found("page out of range")
|
|
||||||
})?;
|
|
||||||
|
|
||||||
debug!("Extracting page {} ({}) from {}", page_number, selected, abs_path);
|
|
||||||
let mut entry = archive.by_name(selected).map_err(|e| {
|
|
||||||
error!("Failed to read CBZ page {} from {}: {}", selected, abs_path, e);
|
|
||||||
ApiError::internal(format!("cbz page read failed: {e}"))
|
|
||||||
})?;
|
|
||||||
let mut buf = Vec::new();
|
|
||||||
entry.read_to_end(&mut buf).map_err(|e| {
|
|
||||||
error!("Failed to load CBZ page {} from {}: {}", selected, abs_path, e);
|
|
||||||
ApiError::internal(format!("cbz page load failed: {e}"))
|
|
||||||
})?;
|
|
||||||
Ok(buf)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_cbr_page(abs_path: &str, page_number: u32) -> Result<Vec<u8>, ApiError> {
|
|
||||||
debug!("Listing CBR archive: {}", abs_path);
|
/// Fast JPEG decode with DCT scaling: decodes directly at reduced resolution.
|
||||||
let list_output = std::process::Command::new("unrar")
|
fn fast_jpeg_decode(input: &[u8], target_w: u32, target_h: u32) -> Option<image::DynamicImage> {
|
||||||
.arg("lb")
|
if image::guess_format(input).ok()? != ImageFormat::Jpeg {
|
||||||
.arg(abs_path)
|
return None;
|
||||||
.output()
|
|
||||||
.map_err(|e| {
|
|
||||||
error!("unrar list command failed for {}: {}", abs_path, e);
|
|
||||||
ApiError::internal(format!("unrar list failed: {e}"))
|
|
||||||
})?;
|
|
||||||
if !list_output.status.success() {
|
|
||||||
let stderr = String::from_utf8_lossy(&list_output.stderr);
|
|
||||||
error!("unrar could not list archive {}: {}", abs_path, stderr);
|
|
||||||
return Err(ApiError::internal("unrar could not list archive"));
|
|
||||||
}
|
}
|
||||||
|
let mut decoder = jpeg_decoder::Decoder::new(std::io::Cursor::new(input));
|
||||||
let mut entries: Vec<String> = String::from_utf8_lossy(&list_output.stdout)
|
decoder.read_info().ok()?;
|
||||||
.lines()
|
decoder.scale(target_w as u16, target_h as u16).ok()?;
|
||||||
.filter(|line| is_image_name(&line.to_ascii_lowercase()))
|
let pixels = decoder.decode().ok()?;
|
||||||
.map(|s| s.to_string())
|
let info = decoder.info()?;
|
||||||
.collect();
|
let w = info.width as u32;
|
||||||
entries.sort();
|
let h = info.height as u32;
|
||||||
|
match info.pixel_format {
|
||||||
// Debug: show first few entries
|
jpeg_decoder::PixelFormat::RGB24 => {
|
||||||
if entries.len() > 0 {
|
let buf = image::RgbImage::from_raw(w, h, pixels)?;
|
||||||
debug!("First 5 entries in CBR {}: {:?}", abs_path, &entries[..entries.len().min(5)]);
|
Some(image::DynamicImage::ImageRgb8(buf))
|
||||||
|
}
|
||||||
|
jpeg_decoder::PixelFormat::L8 => {
|
||||||
|
let buf = image::GrayImage::from_raw(w, h, pixels)?;
|
||||||
|
Some(image::DynamicImage::ImageLuma8(buf))
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
}
|
}
|
||||||
debug!("Found {} images in CBR {}", entries.len(), abs_path);
|
|
||||||
|
|
||||||
let index = page_number as usize - 1;
|
|
||||||
let selected = entries.get(index).ok_or_else(|| {
|
|
||||||
error!("Page {} out of range in {} (total: {})", page_number, abs_path, entries.len());
|
|
||||||
ApiError::not_found("page out of range")
|
|
||||||
})?;
|
|
||||||
|
|
||||||
debug!("Extracting page {} ({}) from {}", page_number, selected, abs_path);
|
|
||||||
let page_output = std::process::Command::new("unrar")
|
|
||||||
.arg("p")
|
|
||||||
.arg("-inul")
|
|
||||||
.arg("-y")
|
|
||||||
.arg(abs_path)
|
|
||||||
.arg(selected)
|
|
||||||
.output()
|
|
||||||
.map_err(|e| {
|
|
||||||
error!("unrar extract command failed for {} page {}: {}", abs_path, selected, e);
|
|
||||||
ApiError::internal(format!("unrar extract failed: {e}"))
|
|
||||||
})?;
|
|
||||||
if !page_output.status.success() {
|
|
||||||
let stderr = String::from_utf8_lossy(&page_output.stderr);
|
|
||||||
error!("unrar could not extract page {} from {}: {}", selected, abs_path, stderr);
|
|
||||||
return Err(ApiError::internal("unrar could not extract page"));
|
|
||||||
}
|
|
||||||
|
|
||||||
let extracted_data = &page_output.stdout;
|
|
||||||
debug!("Extracted {} bytes from CBR page {}", extracted_data.len(), page_number);
|
|
||||||
|
|
||||||
// Verify it's actually an image by checking magic bytes
|
|
||||||
if extracted_data.len() < 1000 {
|
|
||||||
// Show first few bytes for debugging
|
|
||||||
let preview: Vec<u8> = extracted_data.iter().take(32).copied().collect();
|
|
||||||
let hex_preview: String = preview.iter().map(|b| format!("{:02x}", b)).collect();
|
|
||||||
error!("Extracted data too small ({} bytes) for page {} from {} - first bytes: {}",
|
|
||||||
extracted_data.len(), page_number, abs_path, hex_preview);
|
|
||||||
return Err(ApiError::internal("extracted data too small - not a valid image"));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check magic bytes to verify it's an image
|
|
||||||
let is_valid_image = extracted_data.len() > 4 && (
|
|
||||||
// JPEG
|
|
||||||
extracted_data.starts_with(&[0xFF, 0xD8, 0xFF]) ||
|
|
||||||
// PNG
|
|
||||||
extracted_data.starts_with(&[0x89, 0x50, 0x4E, 0x47]) ||
|
|
||||||
// GIF
|
|
||||||
extracted_data.starts_with(b"GIF87a") || extracted_data.starts_with(b"GIF89a") ||
|
|
||||||
// WebP
|
|
||||||
extracted_data.starts_with(b"RIFF") && extracted_data.len() > 12 && &extracted_data[8..12] == b"WEBP" ||
|
|
||||||
// TIFF
|
|
||||||
extracted_data.starts_with(&[0x49, 0x49, 0x2A, 0x00]) || extracted_data.starts_with(&[0x4D, 0x4D, 0x00, 0x2A])
|
|
||||||
);
|
|
||||||
|
|
||||||
if !is_valid_image {
|
|
||||||
// Show first few bytes for debugging
|
|
||||||
let preview: Vec<u8> = extracted_data.iter().take(32).copied().collect();
|
|
||||||
let hex_preview: String = preview.iter().map(|b| format!("{:02x}", b)).collect();
|
|
||||||
error!("Extracted data for page {} from {} is not a valid image format. First bytes: {} (size: {})",
|
|
||||||
page_number, abs_path, hex_preview, extracted_data.len());
|
|
||||||
return Err(ApiError::internal("extracted data is not a valid image"));
|
|
||||||
}
|
|
||||||
|
|
||||||
debug!("Successfully extracted {} bytes from CBR page {}", extracted_data.len(), page_number);
|
|
||||||
Ok(extracted_data.to_vec())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn render_pdf_page(abs_path: &str, page_number: u32, width: u32) -> Result<Vec<u8>, ApiError> {
|
fn transcode_image(input: &[u8], out_format: &OutputFormat, quality: u8, width: u32, filter: image::imageops::FilterType) -> Result<Vec<u8>, ApiError> {
|
||||||
let tmp_dir = std::env::temp_dir().join(format!("stripstream-pdf-{}", Uuid::new_v4()));
|
|
||||||
debug!("Creating temp dir for PDF rendering: {}", tmp_dir.display());
|
|
||||||
std::fs::create_dir_all(&tmp_dir).map_err(|e| {
|
|
||||||
error!("Cannot create temp dir {}: {}", tmp_dir.display(), e);
|
|
||||||
ApiError::internal(format!("cannot create temp dir: {e}"))
|
|
||||||
})?;
|
|
||||||
let output_prefix = tmp_dir.join("page");
|
|
||||||
|
|
||||||
let mut cmd = std::process::Command::new("pdftoppm");
|
|
||||||
cmd.arg("-f")
|
|
||||||
.arg(page_number.to_string())
|
|
||||||
.arg("-singlefile")
|
|
||||||
.arg("-png");
|
|
||||||
if width > 0 {
|
|
||||||
cmd.arg("-scale-to-x").arg(width.to_string()).arg("-scale-to-y").arg("-1");
|
|
||||||
}
|
|
||||||
cmd.arg(abs_path).arg(&output_prefix);
|
|
||||||
|
|
||||||
debug!("Running pdftoppm for page {} of {} (width: {})", page_number, abs_path, width);
|
|
||||||
let output = cmd
|
|
||||||
.output()
|
|
||||||
.map_err(|e| {
|
|
||||||
error!("pdftoppm command failed for {} page {}: {}", abs_path, page_number, e);
|
|
||||||
ApiError::internal(format!("pdf render failed: {e}"))
|
|
||||||
})?;
|
|
||||||
if !output.status.success() {
|
|
||||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
|
||||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
|
||||||
error!("pdftoppm failed for {} page {}: {}", abs_path, page_number, stderr);
|
|
||||||
return Err(ApiError::internal("pdf render command failed"));
|
|
||||||
}
|
|
||||||
|
|
||||||
let image_path = output_prefix.with_extension("png");
|
|
||||||
debug!("Reading rendered PDF page from: {}", image_path.display());
|
|
||||||
let bytes = std::fs::read(&image_path).map_err(|e| {
|
|
||||||
error!("Failed to read rendered PDF output {}: {}", image_path.display(), e);
|
|
||||||
ApiError::internal(format!("render output missing: {e}"))
|
|
||||||
})?;
|
|
||||||
let _ = std::fs::remove_dir_all(&tmp_dir);
|
|
||||||
debug!("Successfully rendered PDF page {} to {} bytes", page_number, bytes.len());
|
|
||||||
Ok(bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn transcode_image(input: &[u8], out_format: &OutputFormat, quality: u8, width: u32) -> Result<Vec<u8>, ApiError> {
|
|
||||||
debug!("Transcoding image: {} bytes, format: {:?}, quality: {}, width: {}", input.len(), out_format, quality, width);
|
|
||||||
let source_format = image::guess_format(input).ok();
|
let source_format = image::guess_format(input).ok();
|
||||||
debug!("Source format detected: {:?}", source_format);
|
|
||||||
let needs_transcode = source_format.map(|f| !format_matches(&f, out_format)).unwrap_or(true);
|
// Resolve "Original" to the actual source format for encoding
|
||||||
|
let effective_format = match out_format {
|
||||||
|
OutputFormat::Original => match source_format {
|
||||||
|
Some(ImageFormat::Png) => OutputFormat::Png,
|
||||||
|
Some(ImageFormat::WebP) => OutputFormat::Webp,
|
||||||
|
_ => OutputFormat::Jpeg, // default to JPEG for original resize
|
||||||
|
},
|
||||||
|
other => *other,
|
||||||
|
};
|
||||||
|
|
||||||
|
let needs_transcode = source_format.map(|f| !format_matches(&f, &effective_format)).unwrap_or(true);
|
||||||
|
|
||||||
if width == 0 && !needs_transcode {
|
if width == 0 && !needs_transcode {
|
||||||
debug!("No transcoding needed, returning original");
|
|
||||||
return Ok(input.to_vec());
|
return Ok(input.to_vec());
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("Loading image from memory...");
|
// For JPEG with resize: use DCT scaling to decode at ~target size (much faster)
|
||||||
let mut image = image::load_from_memory(input).map_err(|e| {
|
let mut image = if width > 0 {
|
||||||
error!("Failed to load image from memory: {} (input size: {} bytes)", e, input.len());
|
fast_jpeg_decode(input, width, u32::MAX)
|
||||||
ApiError::internal(format!("invalid source image: {e}"))
|
.unwrap_or_else(|| {
|
||||||
})?;
|
image::load_from_memory(input).unwrap_or_default()
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
image::load_from_memory(input).map_err(|e| {
|
||||||
|
ApiError::internal(format!("invalid source image: {e}"))
|
||||||
|
})?
|
||||||
|
};
|
||||||
|
|
||||||
if width > 0 {
|
if width > 0 {
|
||||||
debug!("Resizing image to width: {}", width);
|
image = image.resize(width, u32::MAX, filter);
|
||||||
image = image.resize(width, u32::MAX, image::imageops::FilterType::Lanczos3);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
debug!("Converting to RGBA...");
|
|
||||||
let rgba = image.to_rgba8();
|
let rgba = image.to_rgba8();
|
||||||
let (w, h) = rgba.dimensions();
|
let (w, h) = rgba.dimensions();
|
||||||
debug!("Image dimensions: {}x{}", w, h);
|
|
||||||
|
|
||||||
let mut out = Vec::new();
|
let mut out = Vec::new();
|
||||||
match out_format {
|
match effective_format {
|
||||||
OutputFormat::Jpeg => {
|
OutputFormat::Jpeg | OutputFormat::Original => {
|
||||||
|
// JPEG doesn't support alpha — convert RGBA to RGB
|
||||||
|
let rgb = image::DynamicImage::ImageRgba8(rgba.clone()).to_rgb8();
|
||||||
let mut encoder = JpegEncoder::new_with_quality(&mut out, quality);
|
let mut encoder = JpegEncoder::new_with_quality(&mut out, quality);
|
||||||
encoder
|
encoder
|
||||||
.encode(&rgba, w, h, ColorType::Rgba8.into())
|
.encode(&rgb, w, h, ColorType::Rgb8.into())
|
||||||
.map_err(|e| ApiError::internal(format!("jpeg encode failed: {e}")))?;
|
.map_err(|e| ApiError::internal(format!("jpeg encode failed: {e}")))?;
|
||||||
}
|
}
|
||||||
OutputFormat::Png => {
|
OutputFormat::Png => {
|
||||||
@@ -525,7 +597,7 @@ fn transcode_image(input: &[u8], out_format: &OutputFormat, quality: u8, width:
|
|||||||
.flat_map(|p| [p[0], p[1], p[2]])
|
.flat_map(|p| [p[0], p[1], p[2]])
|
||||||
.collect();
|
.collect();
|
||||||
let webp_data = webp::Encoder::new(&rgb_data, webp::PixelLayout::Rgb, w, h)
|
let webp_data = webp::Encoder::new(&rgb_data, webp::PixelLayout::Rgb, w, h)
|
||||||
.encode(f32::max(quality as f32, 85.0));
|
.encode(quality as f32);
|
||||||
out.extend_from_slice(&webp_data);
|
out.extend_from_slice(&webp_data);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -533,23 +605,11 @@ fn transcode_image(input: &[u8], out_format: &OutputFormat, quality: u8, width:
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn format_matches(source: &ImageFormat, target: &OutputFormat) -> bool {
|
fn format_matches(source: &ImageFormat, target: &OutputFormat) -> bool {
|
||||||
match (source, target) {
|
matches!(
|
||||||
(ImageFormat::Jpeg, OutputFormat::Jpeg) => true,
|
(source, target),
|
||||||
(ImageFormat::Png, OutputFormat::Png) => true,
|
(ImageFormat::Jpeg, OutputFormat::Jpeg)
|
||||||
(ImageFormat::WebP, OutputFormat::Webp) => true,
|
| (ImageFormat::Png, OutputFormat::Png)
|
||||||
_ => false,
|
| (ImageFormat::WebP, OutputFormat::Webp)
|
||||||
}
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_image_name(name: &str) -> bool {
|
|
||||||
name.ends_with(".jpg")
|
|
||||||
|| name.ends_with(".jpeg")
|
|
||||||
|| name.ends_with(".png")
|
|
||||||
|| name.ends_with(".webp")
|
|
||||||
|| name.ends_with(".avif")
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
fn _is_absolute_path(value: &str) -> bool {
|
|
||||||
Path::new(value).is_absolute()
|
|
||||||
}
|
|
||||||
|
|||||||
363
apps/api/src/prowlarr.rs
Normal file
363
apps/api/src/prowlarr.rs
Normal file
@@ -0,0 +1,363 @@
|
|||||||
|
use axum::{extract::State, Json};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct MissingVolumeInput {
|
||||||
|
pub volume_number: Option<i32>,
|
||||||
|
#[allow(dead_code)]
|
||||||
|
pub title: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct ProwlarrSearchRequest {
|
||||||
|
pub series_name: String,
|
||||||
|
pub volume_number: Option<i32>,
|
||||||
|
pub custom_query: Option<String>,
|
||||||
|
pub missing_volumes: Option<Vec<MissingVolumeInput>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, ToSchema)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct ProwlarrRawRelease {
|
||||||
|
pub guid: String,
|
||||||
|
pub title: String,
|
||||||
|
pub size: i64,
|
||||||
|
pub download_url: Option<String>,
|
||||||
|
pub indexer: Option<String>,
|
||||||
|
pub seeders: Option<i32>,
|
||||||
|
pub leechers: Option<i32>,
|
||||||
|
pub publish_date: Option<String>,
|
||||||
|
pub protocol: Option<String>,
|
||||||
|
pub info_url: Option<String>,
|
||||||
|
pub categories: Option<Vec<ProwlarrCategory>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct ProwlarrRelease {
|
||||||
|
pub guid: String,
|
||||||
|
pub title: String,
|
||||||
|
pub size: i64,
|
||||||
|
pub download_url: Option<String>,
|
||||||
|
pub indexer: Option<String>,
|
||||||
|
pub seeders: Option<i32>,
|
||||||
|
pub leechers: Option<i32>,
|
||||||
|
pub publish_date: Option<String>,
|
||||||
|
pub protocol: Option<String>,
|
||||||
|
pub info_url: Option<String>,
|
||||||
|
pub categories: Option<Vec<ProwlarrCategory>>,
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub matched_missing_volumes: Option<Vec<i32>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, Deserialize, ToSchema)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
|
pub struct ProwlarrCategory {
|
||||||
|
pub id: i32,
|
||||||
|
pub name: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct ProwlarrSearchResponse {
|
||||||
|
pub results: Vec<ProwlarrRelease>,
|
||||||
|
pub query: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct ProwlarrTestResponse {
|
||||||
|
pub success: bool,
|
||||||
|
pub message: String,
|
||||||
|
pub indexer_count: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Config helper ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct ProwlarrConfig {
|
||||||
|
url: String,
|
||||||
|
api_key: String,
|
||||||
|
categories: Option<Vec<i32>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn load_prowlarr_config(
|
||||||
|
pool: &sqlx::PgPool,
|
||||||
|
) -> Result<(String, String, Vec<i32>), ApiError> {
|
||||||
|
let row = sqlx::query("SELECT value FROM app_settings WHERE key = 'prowlarr'")
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let row = row.ok_or_else(|| ApiError::bad_request("Prowlarr is not configured"))?;
|
||||||
|
let value: serde_json::Value = row.get("value");
|
||||||
|
let config: ProwlarrConfig = serde_json::from_value(value)
|
||||||
|
.map_err(|e| ApiError::internal(format!("invalid prowlarr config: {e}")))?;
|
||||||
|
|
||||||
|
if config.url.is_empty() || config.api_key.is_empty() {
|
||||||
|
return Err(ApiError::bad_request(
|
||||||
|
"Prowlarr URL and API key must be configured in settings",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let url = config.url.trim_end_matches('/').to_string();
|
||||||
|
let categories = config.categories.unwrap_or_else(|| vec![7030, 7020]);
|
||||||
|
|
||||||
|
Ok((url, config.api_key, categories))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Volume matching ─────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Extract volume numbers from a release title.
|
||||||
|
/// Looks for patterns like: T01, Tome 01, Vol. 01, v01, #01,
|
||||||
|
/// or standalone numbers that appear after common separators.
|
||||||
|
fn extract_volumes_from_title(title: &str) -> Vec<i32> {
|
||||||
|
let lower = title.to_lowercase();
|
||||||
|
let mut volumes = Vec::new();
|
||||||
|
|
||||||
|
// Patterns: T01, Tome 01, Tome01, Vol 01, Vol.01, v01, #01
|
||||||
|
let prefixes = ["tome", "vol.", "vol ", "t", "v", "#"];
|
||||||
|
let chars: Vec<char> = lower.chars().collect();
|
||||||
|
let len = chars.len();
|
||||||
|
|
||||||
|
for prefix in &prefixes {
|
||||||
|
let mut start = 0;
|
||||||
|
while let Some(pos) = lower[start..].find(prefix) {
|
||||||
|
let abs_pos = start + pos;
|
||||||
|
let after = abs_pos + prefix.len();
|
||||||
|
|
||||||
|
// For single-char prefixes (t, v, #), ensure it's at a word boundary
|
||||||
|
if prefix.len() == 1 && *prefix != "#" {
|
||||||
|
if abs_pos > 0 && chars[abs_pos - 1].is_alphanumeric() {
|
||||||
|
start = after;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip optional spaces after prefix
|
||||||
|
let mut i = after;
|
||||||
|
while i < len && chars[i] == ' ' {
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Read digits
|
||||||
|
let digit_start = i;
|
||||||
|
while i < len && chars[i].is_ascii_digit() {
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if i > digit_start {
|
||||||
|
if let Ok(num) = lower[digit_start..i].parse::<i32>() {
|
||||||
|
if !volumes.contains(&num) {
|
||||||
|
volumes.push(num);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
start = after;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
volumes
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Match releases against missing volume numbers.
|
||||||
|
fn match_missing_volumes(
|
||||||
|
releases: Vec<ProwlarrRawRelease>,
|
||||||
|
missing: &[MissingVolumeInput],
|
||||||
|
) -> Vec<ProwlarrRelease> {
|
||||||
|
let missing_numbers: Vec<i32> = missing
|
||||||
|
.iter()
|
||||||
|
.filter_map(|m| m.volume_number)
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
releases
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| {
|
||||||
|
let matched = if missing_numbers.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
let title_volumes = extract_volumes_from_title(&r.title);
|
||||||
|
let matched: Vec<i32> = title_volumes
|
||||||
|
.into_iter()
|
||||||
|
.filter(|v| missing_numbers.contains(v))
|
||||||
|
.collect();
|
||||||
|
if matched.is_empty() {
|
||||||
|
None
|
||||||
|
} else {
|
||||||
|
Some(matched)
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ProwlarrRelease {
|
||||||
|
guid: r.guid,
|
||||||
|
title: r.title,
|
||||||
|
size: r.size,
|
||||||
|
download_url: r.download_url,
|
||||||
|
indexer: r.indexer,
|
||||||
|
seeders: r.seeders,
|
||||||
|
leechers: r.leechers,
|
||||||
|
publish_date: r.publish_date,
|
||||||
|
protocol: r.protocol,
|
||||||
|
info_url: r.info_url,
|
||||||
|
categories: r.categories,
|
||||||
|
matched_missing_volumes: matched,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Handlers ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Search for releases on Prowlarr
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/prowlarr/search",
|
||||||
|
tag = "prowlarr",
|
||||||
|
request_body = ProwlarrSearchRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = ProwlarrSearchResponse),
|
||||||
|
(status = 400, description = "Bad request or Prowlarr not configured"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 500, description = "Prowlarr connection error"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn search_prowlarr(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(body): Json<ProwlarrSearchRequest>,
|
||||||
|
) -> Result<Json<ProwlarrSearchResponse>, ApiError> {
|
||||||
|
let (url, api_key, categories) = load_prowlarr_config(&state.pool).await?;
|
||||||
|
|
||||||
|
let query = if let Some(custom) = &body.custom_query {
|
||||||
|
custom.clone()
|
||||||
|
} else if let Some(vol) = body.volume_number {
|
||||||
|
format!("\"{}\" {}", body.series_name, vol)
|
||||||
|
} else {
|
||||||
|
format!("\"{}\"", body.series_name)
|
||||||
|
};
|
||||||
|
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(30))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||||
|
|
||||||
|
let mut params: Vec<(&str, String)> = vec![
|
||||||
|
("query", query.clone()),
|
||||||
|
("type", "search".to_string()),
|
||||||
|
];
|
||||||
|
for cat in &categories {
|
||||||
|
params.push(("categories", cat.to_string()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(format!("{url}/api/v1/search"))
|
||||||
|
.query(¶ms)
|
||||||
|
.header("X-Api-Key", &api_key)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(format!("Prowlarr request failed: {e}")))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(ApiError::internal(format!(
|
||||||
|
"Prowlarr returned {status}: {text}"
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let raw_text = resp
|
||||||
|
.text()
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(format!("Failed to read Prowlarr response: {e}")))?;
|
||||||
|
|
||||||
|
tracing::debug!("Prowlarr raw response length: {} chars", raw_text.len());
|
||||||
|
|
||||||
|
let raw_releases: Vec<ProwlarrRawRelease> = serde_json::from_str(&raw_text)
|
||||||
|
.map_err(|e| {
|
||||||
|
tracing::error!("Failed to parse Prowlarr response: {e}");
|
||||||
|
tracing::error!("Raw response (first 500 chars): {}", &raw_text[..raw_text.len().min(500)]);
|
||||||
|
ApiError::internal(format!("Failed to parse Prowlarr response: {e}"))
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let results = if let Some(missing) = &body.missing_volumes {
|
||||||
|
match_missing_volumes(raw_releases, missing)
|
||||||
|
} else {
|
||||||
|
raw_releases
|
||||||
|
.into_iter()
|
||||||
|
.map(|r| ProwlarrRelease {
|
||||||
|
guid: r.guid,
|
||||||
|
title: r.title,
|
||||||
|
size: r.size,
|
||||||
|
download_url: r.download_url,
|
||||||
|
indexer: r.indexer,
|
||||||
|
seeders: r.seeders,
|
||||||
|
leechers: r.leechers,
|
||||||
|
publish_date: r.publish_date,
|
||||||
|
protocol: r.protocol,
|
||||||
|
info_url: r.info_url,
|
||||||
|
categories: r.categories,
|
||||||
|
matched_missing_volumes: None,
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Json(ProwlarrSearchResponse { results, query }))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test connection to Prowlarr
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/prowlarr/test",
|
||||||
|
tag = "prowlarr",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = ProwlarrTestResponse),
|
||||||
|
(status = 400, description = "Prowlarr not configured"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn test_prowlarr(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
) -> Result<Json<ProwlarrTestResponse>, ApiError> {
|
||||||
|
let (url, api_key, _categories) = load_prowlarr_config(&state.pool).await?;
|
||||||
|
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(10))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(format!("{url}/api/v1/indexer"))
|
||||||
|
.header("X-Api-Key", &api_key)
|
||||||
|
.send()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match resp {
|
||||||
|
Ok(r) if r.status().is_success() => {
|
||||||
|
let indexers: Vec<serde_json::Value> = r.json().await.unwrap_or_default();
|
||||||
|
Ok(Json(ProwlarrTestResponse {
|
||||||
|
success: true,
|
||||||
|
message: format!("Connected successfully ({} indexers)", indexers.len()),
|
||||||
|
indexer_count: Some(indexers.len() as i32),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
Ok(r) => {
|
||||||
|
let status = r.status();
|
||||||
|
let text = r.text().await.unwrap_or_default();
|
||||||
|
Ok(Json(ProwlarrTestResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("Prowlarr returned {status}: {text}"),
|
||||||
|
indexer_count: None,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
Err(e) => Ok(Json(ProwlarrTestResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("Connection failed: {e}"),
|
||||||
|
indexer_count: None,
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
}
|
||||||
218
apps/api/src/qbittorrent.rs
Normal file
218
apps/api/src/qbittorrent.rs
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
use axum::{extract::State, Json};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
// ─── Types ──────────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct QBittorrentAddRequest {
|
||||||
|
pub url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct QBittorrentAddResponse {
|
||||||
|
pub success: bool,
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct QBittorrentTestResponse {
|
||||||
|
pub success: bool,
|
||||||
|
pub message: String,
|
||||||
|
pub version: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Config helper ──────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
#[derive(Deserialize)]
|
||||||
|
struct QBittorrentConfig {
|
||||||
|
url: String,
|
||||||
|
username: String,
|
||||||
|
password: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn load_qbittorrent_config(
|
||||||
|
pool: &sqlx::PgPool,
|
||||||
|
) -> Result<(String, String, String), ApiError> {
|
||||||
|
let row = sqlx::query("SELECT value FROM app_settings WHERE key = 'qbittorrent'")
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let row = row.ok_or_else(|| ApiError::bad_request("qBittorrent is not configured"))?;
|
||||||
|
let value: serde_json::Value = row.get("value");
|
||||||
|
let config: QBittorrentConfig = serde_json::from_value(value)
|
||||||
|
.map_err(|e| ApiError::internal(format!("invalid qbittorrent config: {e}")))?;
|
||||||
|
|
||||||
|
if config.url.is_empty() || config.username.is_empty() {
|
||||||
|
return Err(ApiError::bad_request(
|
||||||
|
"qBittorrent URL and username must be configured in settings",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let url = config.url.trim_end_matches('/').to_string();
|
||||||
|
Ok((url, config.username, config.password))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Login helper ───────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
async fn qbittorrent_login(
|
||||||
|
client: &reqwest::Client,
|
||||||
|
base_url: &str,
|
||||||
|
username: &str,
|
||||||
|
password: &str,
|
||||||
|
) -> Result<String, ApiError> {
|
||||||
|
let resp = client
|
||||||
|
.post(format!("{base_url}/api/v2/auth/login"))
|
||||||
|
.form(&[("username", username), ("password", password)])
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(format!("qBittorrent login request failed: {e}")))?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
return Err(ApiError::internal(format!(
|
||||||
|
"qBittorrent login failed ({status}): {text}"
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract SID from Set-Cookie header
|
||||||
|
let cookie_header = resp
|
||||||
|
.headers()
|
||||||
|
.get("set-cookie")
|
||||||
|
.and_then(|v| v.to_str().ok())
|
||||||
|
.unwrap_or("");
|
||||||
|
|
||||||
|
let sid = cookie_header
|
||||||
|
.split(';')
|
||||||
|
.next()
|
||||||
|
.and_then(|s| s.strip_prefix("SID="))
|
||||||
|
.ok_or_else(|| ApiError::internal("Failed to get SID cookie from qBittorrent"))?
|
||||||
|
.to_string();
|
||||||
|
|
||||||
|
Ok(sid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ─── Handlers ───────────────────────────────────────────────────────────────
|
||||||
|
|
||||||
|
/// Add a torrent to qBittorrent
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/qbittorrent/add",
|
||||||
|
tag = "qbittorrent",
|
||||||
|
request_body = QBittorrentAddRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = QBittorrentAddResponse),
|
||||||
|
(status = 400, description = "Bad request or qBittorrent not configured"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 500, description = "qBittorrent connection error"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn add_torrent(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(body): Json<QBittorrentAddRequest>,
|
||||||
|
) -> Result<Json<QBittorrentAddResponse>, ApiError> {
|
||||||
|
if body.url.is_empty() {
|
||||||
|
return Err(ApiError::bad_request("url is required"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let (base_url, username, password) = load_qbittorrent_config(&state.pool).await?;
|
||||||
|
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(10))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||||
|
|
||||||
|
let sid = qbittorrent_login(&client, &base_url, &username, &password).await?;
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.post(format!("{base_url}/api/v2/torrents/add"))
|
||||||
|
.header("Cookie", format!("SID={sid}"))
|
||||||
|
.form(&[("urls", &body.url)])
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(format!("qBittorrent add request failed: {e}")))?;
|
||||||
|
|
||||||
|
if resp.status().is_success() {
|
||||||
|
Ok(Json(QBittorrentAddResponse {
|
||||||
|
success: true,
|
||||||
|
message: "Torrent added to qBittorrent".to_string(),
|
||||||
|
}))
|
||||||
|
} else {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
Ok(Json(QBittorrentAddResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("qBittorrent returned {status}: {text}"),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test connection to qBittorrent
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/qbittorrent/test",
|
||||||
|
tag = "qbittorrent",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = QBittorrentTestResponse),
|
||||||
|
(status = 400, description = "qBittorrent not configured"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn test_qbittorrent(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
) -> Result<Json<QBittorrentTestResponse>, ApiError> {
|
||||||
|
let (base_url, username, password) = load_qbittorrent_config(&state.pool).await?;
|
||||||
|
|
||||||
|
let client = reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(10))
|
||||||
|
.build()
|
||||||
|
.map_err(|e| ApiError::internal(format!("failed to build HTTP client: {e}")))?;
|
||||||
|
|
||||||
|
let sid = match qbittorrent_login(&client, &base_url, &username, &password).await {
|
||||||
|
Ok(sid) => sid,
|
||||||
|
Err(e) => {
|
||||||
|
return Ok(Json(QBittorrentTestResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("Login failed: {}", e.message),
|
||||||
|
version: None,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let resp = client
|
||||||
|
.get(format!("{base_url}/api/v2/app/version"))
|
||||||
|
.header("Cookie", format!("SID={sid}"))
|
||||||
|
.send()
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match resp {
|
||||||
|
Ok(r) if r.status().is_success() => {
|
||||||
|
let version = r.text().await.unwrap_or_default();
|
||||||
|
Ok(Json(QBittorrentTestResponse {
|
||||||
|
success: true,
|
||||||
|
message: format!("Connected successfully ({})", version.trim()),
|
||||||
|
version: Some(version.trim().to_string()),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
Ok(r) => {
|
||||||
|
let status = r.status();
|
||||||
|
let text = r.text().await.unwrap_or_default();
|
||||||
|
Ok(Json(QBittorrentTestResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("qBittorrent returned {status}: {text}"),
|
||||||
|
version: None,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
Err(e) => Ok(Json(QBittorrentTestResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("Connection failed: {e}"),
|
||||||
|
version: None,
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
}
|
||||||
290
apps/api/src/reading_progress.rs
Normal file
290
apps/api/src/reading_progress.rs
Normal file
@@ -0,0 +1,290 @@
|
|||||||
|
use axum::{extract::{Extension, Path, State}, Json};
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use uuid::Uuid;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{auth::AuthUser, error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct ReadingProgressResponse {
|
||||||
|
/// Reading status: "unread", "reading", or "read"
|
||||||
|
pub status: String,
|
||||||
|
/// Current page (only set when status is "reading")
|
||||||
|
pub current_page: Option<i32>,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub last_read_at: Option<DateTime<Utc>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct UpdateReadingProgressRequest {
|
||||||
|
/// Reading status: "unread", "reading", or "read"
|
||||||
|
pub status: String,
|
||||||
|
/// Required when status is "reading", must be > 0
|
||||||
|
pub current_page: Option<i32>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get reading progress for a book
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/books/{id}/progress",
|
||||||
|
tag = "reading-progress",
|
||||||
|
params(
|
||||||
|
("id" = String, Path, description = "Book UUID"),
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, body = ReadingProgressResponse),
|
||||||
|
(status = 404, description = "Book not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_reading_progress(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
user: Option<Extension<AuthUser>>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<ReadingProgressResponse>, ApiError> {
|
||||||
|
let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0;
|
||||||
|
// Verify book exists
|
||||||
|
let exists: bool = sqlx::query_scalar("SELECT EXISTS(SELECT 1 FROM books WHERE id = $1)")
|
||||||
|
.bind(id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return Err(ApiError::not_found("book not found"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let row = sqlx::query(
|
||||||
|
"SELECT status, current_page, last_read_at FROM book_reading_progress WHERE book_id = $1 AND user_id = $2",
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.bind(auth_user.user_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let response = match row {
|
||||||
|
Some(r) => ReadingProgressResponse {
|
||||||
|
status: r.get("status"),
|
||||||
|
current_page: r.get("current_page"),
|
||||||
|
last_read_at: r.get("last_read_at"),
|
||||||
|
},
|
||||||
|
None => ReadingProgressResponse {
|
||||||
|
status: "unread".to_string(),
|
||||||
|
current_page: None,
|
||||||
|
last_read_at: None,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Json(response))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update reading progress for a book
|
||||||
|
#[utoipa::path(
|
||||||
|
patch,
|
||||||
|
path = "/books/{id}/progress",
|
||||||
|
tag = "reading-progress",
|
||||||
|
params(
|
||||||
|
("id" = String, Path, description = "Book UUID"),
|
||||||
|
),
|
||||||
|
request_body = UpdateReadingProgressRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = ReadingProgressResponse),
|
||||||
|
(status = 404, description = "Book not found"),
|
||||||
|
(status = 422, description = "Validation error (missing or invalid current_page for status 'reading')"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn update_reading_progress(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
user: Option<Extension<AuthUser>>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
Json(body): Json<UpdateReadingProgressRequest>,
|
||||||
|
) -> Result<Json<ReadingProgressResponse>, ApiError> {
|
||||||
|
let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0;
|
||||||
|
// Validate status value
|
||||||
|
if !["unread", "reading", "read"].contains(&body.status.as_str()) {
|
||||||
|
return Err(ApiError::bad_request(format!(
|
||||||
|
"invalid status '{}': must be one of unread, reading, read",
|
||||||
|
body.status
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate current_page for "reading" status
|
||||||
|
if body.status == "reading" {
|
||||||
|
match body.current_page {
|
||||||
|
None => {
|
||||||
|
return Err(ApiError::unprocessable_entity(
|
||||||
|
"current_page is required when status is 'reading'",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
Some(p) if p <= 0 => {
|
||||||
|
return Err(ApiError::unprocessable_entity(
|
||||||
|
"current_page must be greater than 0",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify book exists
|
||||||
|
let exists: bool = sqlx::query_scalar("SELECT EXISTS(SELECT 1 FROM books WHERE id = $1)")
|
||||||
|
.bind(id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if !exists {
|
||||||
|
return Err(ApiError::not_found("book not found"));
|
||||||
|
}
|
||||||
|
|
||||||
|
// current_page is only stored for "reading" status
|
||||||
|
let current_page = if body.status == "reading" {
|
||||||
|
body.current_page
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
|
||||||
|
VALUES ($1, $2, $3, $4, NOW(), NOW())
|
||||||
|
ON CONFLICT (book_id, user_id) DO UPDATE
|
||||||
|
SET status = EXCLUDED.status,
|
||||||
|
current_page = EXCLUDED.current_page,
|
||||||
|
last_read_at = NOW(),
|
||||||
|
updated_at = NOW()
|
||||||
|
RETURNING status, current_page, last_read_at
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.bind(auth_user.user_id)
|
||||||
|
.bind(&body.status)
|
||||||
|
.bind(current_page)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(Json(ReadingProgressResponse {
|
||||||
|
status: row.get("status"),
|
||||||
|
current_page: row.get("current_page"),
|
||||||
|
last_read_at: row.get("last_read_at"),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct MarkSeriesReadRequest {
|
||||||
|
/// Series name (use "unclassified" for books without series)
|
||||||
|
pub series: String,
|
||||||
|
/// Status to set: "read" or "unread"
|
||||||
|
pub status: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct MarkSeriesReadResponse {
|
||||||
|
pub updated: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Mark all books in a series as read or unread
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/series/mark-read",
|
||||||
|
tag = "reading-progress",
|
||||||
|
request_body = MarkSeriesReadRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = MarkSeriesReadResponse),
|
||||||
|
(status = 422, description = "Invalid status"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn mark_series_read(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
user: Option<Extension<AuthUser>>,
|
||||||
|
Json(body): Json<MarkSeriesReadRequest>,
|
||||||
|
) -> Result<Json<MarkSeriesReadResponse>, ApiError> {
|
||||||
|
let auth_user = user.ok_or_else(|| ApiError::bad_request("admin tokens cannot track reading progress"))?.0;
|
||||||
|
if !["read", "unread"].contains(&body.status.as_str()) {
|
||||||
|
return Err(ApiError::bad_request(
|
||||||
|
"status must be 'read' or 'unread'",
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let series_filter = if body.series == "unclassified" {
|
||||||
|
"(series IS NULL OR series = '')"
|
||||||
|
} else {
|
||||||
|
"series = $1"
|
||||||
|
};
|
||||||
|
|
||||||
|
let sql = if body.status == "unread" {
|
||||||
|
// Delete progress records to reset to unread (scoped to this user)
|
||||||
|
if body.series == "unclassified" {
|
||||||
|
format!(
|
||||||
|
r#"
|
||||||
|
WITH target_books AS (
|
||||||
|
SELECT id FROM books WHERE {series_filter}
|
||||||
|
)
|
||||||
|
DELETE FROM book_reading_progress
|
||||||
|
WHERE book_id IN (SELECT id FROM target_books) AND user_id = $1
|
||||||
|
"#
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!(
|
||||||
|
r#"
|
||||||
|
WITH target_books AS (
|
||||||
|
SELECT id FROM books WHERE {series_filter}
|
||||||
|
)
|
||||||
|
DELETE FROM book_reading_progress
|
||||||
|
WHERE book_id IN (SELECT id FROM target_books) AND user_id = $2
|
||||||
|
"#
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else if body.series == "unclassified" {
|
||||||
|
format!(
|
||||||
|
r#"
|
||||||
|
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
|
||||||
|
SELECT id, $1, 'read', NULL, NOW(), NOW()
|
||||||
|
FROM books
|
||||||
|
WHERE {series_filter}
|
||||||
|
ON CONFLICT (book_id, user_id) DO UPDATE
|
||||||
|
SET status = 'read',
|
||||||
|
current_page = NULL,
|
||||||
|
last_read_at = NOW(),
|
||||||
|
updated_at = NOW()
|
||||||
|
"#
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
format!(
|
||||||
|
r#"
|
||||||
|
INSERT INTO book_reading_progress (book_id, user_id, status, current_page, last_read_at, updated_at)
|
||||||
|
SELECT id, $2, 'read', NULL, NOW(), NOW()
|
||||||
|
FROM books
|
||||||
|
WHERE {series_filter}
|
||||||
|
ON CONFLICT (book_id, user_id) DO UPDATE
|
||||||
|
SET status = 'read',
|
||||||
|
current_page = NULL,
|
||||||
|
last_read_at = NOW(),
|
||||||
|
updated_at = NOW()
|
||||||
|
"#
|
||||||
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = if body.series == "unclassified" {
|
||||||
|
// $1 = user_id (no series bind needed)
|
||||||
|
sqlx::query(&sql)
|
||||||
|
.bind(auth_user.user_id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?
|
||||||
|
} else {
|
||||||
|
// $1 = series, $2 = user_id
|
||||||
|
sqlx::query(&sql)
|
||||||
|
.bind(&body.series)
|
||||||
|
.bind(auth_user.user_id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Json(MarkSeriesReadResponse {
|
||||||
|
updated: result.rows_affected() as i64,
|
||||||
|
}))
|
||||||
|
}
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
use axum::{extract::{Query, State}, Json};
|
use axum::{extract::{Query, State}, Json};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
use crate::{error::ApiError, AppState};
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
#[derive(Deserialize, ToSchema)]
|
#[derive(Deserialize, ToSchema)]
|
||||||
pub struct SearchQuery {
|
pub struct SearchQuery {
|
||||||
@@ -18,24 +20,36 @@ pub struct SearchQuery {
|
|||||||
pub limit: Option<usize>,
|
pub limit: Option<usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct SeriesHit {
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub library_id: Uuid,
|
||||||
|
pub name: String,
|
||||||
|
pub book_count: i64,
|
||||||
|
pub books_read_count: i64,
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub first_book_id: Uuid,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
pub struct SearchResponse {
|
pub struct SearchResponse {
|
||||||
pub hits: serde_json::Value,
|
pub hits: serde_json::Value,
|
||||||
|
pub series_hits: Vec<SeriesHit>,
|
||||||
pub estimated_total_hits: Option<u64>,
|
pub estimated_total_hits: Option<u64>,
|
||||||
pub processing_time_ms: Option<u64>,
|
pub processing_time_ms: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Search books across all libraries using Meilisearch
|
/// Search books across all libraries
|
||||||
#[utoipa::path(
|
#[utoipa::path(
|
||||||
get,
|
get,
|
||||||
path = "/search",
|
path = "/search",
|
||||||
tag = "books",
|
tag = "search",
|
||||||
params(
|
params(
|
||||||
("q" = String, Query, description = "Search query"),
|
("q" = String, Query, description = "Search query (books + series via PostgreSQL full-text)"),
|
||||||
("library_id" = Option<String>, Query, description = "Filter by library ID"),
|
("library_id" = Option<String>, Query, description = "Filter by library ID"),
|
||||||
("type" = Option<String>, Query, description = "Filter by type (cbz, cbr, pdf)"),
|
("type" = Option<String>, Query, description = "Filter by type (cbz, cbr, pdf, epub)"),
|
||||||
("kind" = Option<String>, Query, description = "Filter by kind (alias for type)"),
|
("kind" = Option<String>, Query, description = "Filter by kind (alias for type)"),
|
||||||
("limit" = Option<usize>, Query, description = "Max results (max 100)"),
|
("limit" = Option<usize>, Query, description = "Max results per type (max 100)"),
|
||||||
),
|
),
|
||||||
responses(
|
responses(
|
||||||
(status = 200, body = SearchResponse),
|
(status = 200, body = SearchResponse),
|
||||||
@@ -51,51 +65,127 @@ pub async fn search_books(
|
|||||||
return Err(ApiError::bad_request("q is required"));
|
return Err(ApiError::bad_request("q is required"));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut filters: Vec<String> = Vec::new();
|
let limit_val = query.limit.unwrap_or(20).clamp(1, 100) as i64;
|
||||||
if let Some(library_id) = query.library_id.as_deref() {
|
let q_pattern = format!("%{}%", query.q);
|
||||||
filters.push(format!("library_id = '{}'", library_id.replace('"', "")));
|
let library_id_uuid: Option<Uuid> = query.library_id.as_deref()
|
||||||
}
|
.and_then(|s| s.parse().ok());
|
||||||
let kind_filter = query.r#type.as_deref().or(query.kind.as_deref());
|
let kind_filter: Option<&str> = query.r#type.as_deref().or(query.kind.as_deref());
|
||||||
if let Some(kind) = kind_filter {
|
|
||||||
filters.push(format!("kind = '{}'", kind.replace('"', "")));
|
|
||||||
}
|
|
||||||
|
|
||||||
let body = serde_json::json!({
|
let start = std::time::Instant::now();
|
||||||
"q": query.q,
|
|
||||||
"limit": query.limit.unwrap_or(20).clamp(1, 100),
|
|
||||||
"filter": if filters.is_empty() { serde_json::Value::Null } else { serde_json::Value::String(filters.join(" AND ")) }
|
|
||||||
});
|
|
||||||
|
|
||||||
let client = reqwest::Client::new();
|
// Book search via PostgreSQL ILIKE on title, authors, series
|
||||||
let url = format!("{}/indexes/books/search", state.meili_url.trim_end_matches('/'));
|
let books_sql = r#"
|
||||||
let response = client
|
SELECT b.id, b.library_id, b.kind, b.title,
|
||||||
.post(url)
|
COALESCE(b.authors, CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END) as authors,
|
||||||
.header("Authorization", format!("Bearer {}", state.meili_master_key))
|
b.series, b.volume, b.language
|
||||||
.json(&body)
|
FROM books b
|
||||||
.send()
|
LEFT JOIN series_metadata sm
|
||||||
.await
|
ON sm.library_id = b.library_id
|
||||||
.map_err(|e| ApiError::internal(format!("meili request failed: {e}")))?;
|
AND sm.name = COALESCE(NULLIF(b.series, ''), 'unclassified')
|
||||||
|
WHERE (
|
||||||
|
b.title ILIKE $1
|
||||||
|
OR b.series ILIKE $1
|
||||||
|
OR EXISTS (SELECT 1 FROM unnest(
|
||||||
|
COALESCE(b.authors, CASE WHEN b.author IS NOT NULL AND b.author != '' THEN ARRAY[b.author] ELSE ARRAY[]::text[] END)
|
||||||
|
|| COALESCE(sm.authors, ARRAY[]::text[])
|
||||||
|
) AS a WHERE a ILIKE $1)
|
||||||
|
)
|
||||||
|
AND ($2::uuid IS NULL OR b.library_id = $2)
|
||||||
|
AND ($3::text IS NULL OR b.kind = $3)
|
||||||
|
ORDER BY
|
||||||
|
CASE WHEN b.title ILIKE $1 THEN 0 ELSE 1 END,
|
||||||
|
b.title ASC
|
||||||
|
LIMIT $4
|
||||||
|
"#;
|
||||||
|
|
||||||
if !response.status().is_success() {
|
let series_sql = r#"
|
||||||
let body = response.text().await.unwrap_or_else(|_| "unknown meili error".to_string());
|
WITH sorted_books AS (
|
||||||
if body.contains("index_not_found") {
|
SELECT
|
||||||
return Ok(Json(SearchResponse {
|
library_id,
|
||||||
hits: serde_json::json!([]),
|
COALESCE(NULLIF(series, ''), 'unclassified') as name,
|
||||||
estimated_total_hits: Some(0),
|
id,
|
||||||
processing_time_ms: Some(0),
|
ROW_NUMBER() OVER (
|
||||||
}));
|
PARTITION BY library_id, COALESCE(NULLIF(series, ''), 'unclassified')
|
||||||
}
|
ORDER BY
|
||||||
return Err(ApiError::internal(format!("meili error: {body}")));
|
REGEXP_REPLACE(LOWER(title), '[0-9]+', '', 'g'),
|
||||||
}
|
COALESCE((REGEXP_MATCH(LOWER(title), '\d+'))[1]::int, 0),
|
||||||
|
title ASC
|
||||||
|
) as rn
|
||||||
|
FROM books
|
||||||
|
WHERE ($2::uuid IS NULL OR library_id = $2)
|
||||||
|
),
|
||||||
|
series_counts AS (
|
||||||
|
SELECT
|
||||||
|
sb.library_id,
|
||||||
|
sb.name,
|
||||||
|
COUNT(*) as book_count,
|
||||||
|
COUNT(brp.book_id) FILTER (WHERE brp.status = 'read') as books_read_count
|
||||||
|
FROM sorted_books sb
|
||||||
|
LEFT JOIN book_reading_progress brp ON brp.book_id = sb.id
|
||||||
|
GROUP BY sb.library_id, sb.name
|
||||||
|
)
|
||||||
|
SELECT sc.library_id, sc.name, sc.book_count, sc.books_read_count, sb.id as first_book_id
|
||||||
|
FROM series_counts sc
|
||||||
|
JOIN sorted_books sb ON sb.library_id = sc.library_id AND sb.name = sc.name AND sb.rn = 1
|
||||||
|
WHERE sc.name ILIKE $1
|
||||||
|
ORDER BY sc.name ASC
|
||||||
|
LIMIT $4
|
||||||
|
"#;
|
||||||
|
|
||||||
let payload: serde_json::Value = response
|
let (books_rows, series_rows) = tokio::join!(
|
||||||
.json()
|
sqlx::query(books_sql)
|
||||||
.await
|
.bind(&q_pattern)
|
||||||
.map_err(|e| ApiError::internal(format!("invalid meili response: {e}")))?;
|
.bind(library_id_uuid)
|
||||||
|
.bind(kind_filter)
|
||||||
|
.bind(limit_val)
|
||||||
|
.fetch_all(&state.pool),
|
||||||
|
sqlx::query(series_sql)
|
||||||
|
.bind(&q_pattern)
|
||||||
|
.bind(library_id_uuid)
|
||||||
|
.bind(kind_filter) // unused in series query but keeps bind positions consistent
|
||||||
|
.bind(limit_val)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
);
|
||||||
|
|
||||||
|
let elapsed_ms = start.elapsed().as_millis() as u64;
|
||||||
|
|
||||||
|
// Build book hits as JSON array (same shape as before)
|
||||||
|
let books_rows = books_rows.map_err(|e| ApiError::internal(format!("book search failed: {e}")))?;
|
||||||
|
let hits: Vec<serde_json::Value> = books_rows
|
||||||
|
.iter()
|
||||||
|
.map(|row| {
|
||||||
|
serde_json::json!({
|
||||||
|
"id": row.get::<Uuid, _>("id").to_string(),
|
||||||
|
"library_id": row.get::<Uuid, _>("library_id").to_string(),
|
||||||
|
"kind": row.get::<String, _>("kind"),
|
||||||
|
"title": row.get::<String, _>("title"),
|
||||||
|
"authors": row.get::<Vec<String>, _>("authors"),
|
||||||
|
"series": row.get::<Option<String>, _>("series"),
|
||||||
|
"volume": row.get::<Option<i32>, _>("volume"),
|
||||||
|
"language": row.get::<Option<String>, _>("language"),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let estimated_total_hits = hits.len() as u64;
|
||||||
|
|
||||||
|
// Series hits
|
||||||
|
let series_hits: Vec<SeriesHit> = series_rows
|
||||||
|
.unwrap_or_default()
|
||||||
|
.iter()
|
||||||
|
.map(|row| SeriesHit {
|
||||||
|
library_id: row.get("library_id"),
|
||||||
|
name: row.get("name"),
|
||||||
|
book_count: row.get("book_count"),
|
||||||
|
books_read_count: row.get("books_read_count"),
|
||||||
|
first_book_id: row.get("first_book_id"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
Ok(Json(SearchResponse {
|
Ok(Json(SearchResponse {
|
||||||
hits: payload.get("hits").cloned().unwrap_or_else(|| serde_json::json!([])),
|
hits: serde_json::Value::Array(hits),
|
||||||
estimated_total_hits: payload.get("estimatedTotalHits").and_then(|v| v.as_u64()),
|
series_hits,
|
||||||
processing_time_ms: payload.get("processingTimeMs").and_then(|v| v.as_u64()),
|
estimated_total_hits: Some(estimated_total_hits),
|
||||||
|
processing_time_ms: Some(elapsed_ms),
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|||||||
1043
apps/api/src/series.rs
Normal file
1043
apps/api/src/series.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,41 +1,70 @@
|
|||||||
use axum::{
|
use axum::{
|
||||||
extract::State,
|
extract::{Path as AxumPath, State},
|
||||||
routing::{get, post},
|
routing::{delete, get, post},
|
||||||
Json, Router,
|
Json, Router,
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use sqlx::Row;
|
use sqlx::Row;
|
||||||
|
use uuid::Uuid;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
use crate::{error::ApiError, AppState};
|
use crate::{error::ApiError, state::{AppState, load_dynamic_settings}};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
|
||||||
pub struct UpdateSettingRequest {
|
pub struct UpdateSettingRequest {
|
||||||
pub value: Value,
|
pub value: Value,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
|
||||||
pub struct ClearCacheResponse {
|
pub struct ClearCacheResponse {
|
||||||
pub success: bool,
|
pub success: bool,
|
||||||
pub message: String,
|
pub message: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
|
||||||
pub struct CacheStats {
|
pub struct CacheStats {
|
||||||
pub total_size_mb: f64,
|
pub total_size_mb: f64,
|
||||||
pub file_count: u64,
|
pub file_count: u64,
|
||||||
pub directory: String,
|
pub directory: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
|
||||||
|
pub struct ThumbnailStats {
|
||||||
|
pub total_size_mb: f64,
|
||||||
|
pub file_count: u64,
|
||||||
|
pub directory: String,
|
||||||
|
}
|
||||||
|
|
||||||
pub fn settings_routes() -> Router<AppState> {
|
pub fn settings_routes() -> Router<AppState> {
|
||||||
Router::new()
|
Router::new()
|
||||||
.route("/settings", get(get_settings))
|
.route("/settings", get(get_settings))
|
||||||
.route("/settings/:key", get(get_setting).post(update_setting))
|
.route("/settings/:key", get(get_setting).post(update_setting))
|
||||||
.route("/settings/cache/clear", post(clear_cache))
|
.route("/settings/cache/clear", post(clear_cache))
|
||||||
.route("/settings/cache/stats", get(get_cache_stats))
|
.route("/settings/cache/stats", get(get_cache_stats))
|
||||||
|
.route("/settings/thumbnail/stats", get(get_thumbnail_stats))
|
||||||
|
.route(
|
||||||
|
"/settings/status-mappings",
|
||||||
|
get(list_status_mappings).post(upsert_status_mapping),
|
||||||
|
)
|
||||||
|
.route(
|
||||||
|
"/settings/status-mappings/:id",
|
||||||
|
delete(delete_status_mapping),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_settings(State(state): State<AppState>) -> Result<Json<Value>, ApiError> {
|
/// List all settings
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/settings",
|
||||||
|
tag = "settings",
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "All settings as key/value object"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_settings(State(state): State<AppState>) -> Result<Json<Value>, ApiError> {
|
||||||
let rows = sqlx::query(r#"SELECT key, value FROM app_settings"#)
|
let rows = sqlx::query(r#"SELECT key, value FROM app_settings"#)
|
||||||
.fetch_all(&state.pool)
|
.fetch_all(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
@@ -50,7 +79,20 @@ async fn get_settings(State(state): State<AppState>) -> Result<Json<Value>, ApiE
|
|||||||
Ok(Json(Value::Object(settings)))
|
Ok(Json(Value::Object(settings)))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_setting(
|
/// Get a single setting by key
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/settings/{key}",
|
||||||
|
tag = "settings",
|
||||||
|
params(("key" = String, Path, description = "Setting key")),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Setting value"),
|
||||||
|
(status = 404, description = "Setting not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_setting(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
axum::extract::Path(key): axum::extract::Path<String>,
|
axum::extract::Path(key): axum::extract::Path<String>,
|
||||||
) -> Result<Json<Value>, ApiError> {
|
) -> Result<Json<Value>, ApiError> {
|
||||||
@@ -68,7 +110,20 @@ async fn get_setting(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn update_setting(
|
/// Create or update a setting
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/settings/{key}",
|
||||||
|
tag = "settings",
|
||||||
|
params(("key" = String, Path, description = "Setting key")),
|
||||||
|
request_body = UpdateSettingRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Updated setting value"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn update_setting(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
axum::extract::Path(key): axum::extract::Path<String>,
|
axum::extract::Path(key): axum::extract::Path<String>,
|
||||||
Json(body): Json<UpdateSettingRequest>,
|
Json(body): Json<UpdateSettingRequest>,
|
||||||
@@ -88,12 +143,29 @@ async fn update_setting(
|
|||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
let value: Value = row.get("value");
|
let value: Value = row.get("value");
|
||||||
|
|
||||||
|
// Rechargement des settings dynamiques si la clé affecte le comportement runtime
|
||||||
|
if key == "limits" || key == "image_processing" || key == "cache" {
|
||||||
|
let new_settings = load_dynamic_settings(&state.pool).await;
|
||||||
|
*state.settings.write().await = new_settings;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(Json(value))
|
Ok(Json(value))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn clear_cache(State(_state): State<AppState>) -> Result<Json<ClearCacheResponse>, ApiError> {
|
/// Clear the image page cache
|
||||||
let cache_dir = std::env::var("IMAGE_CACHE_DIR")
|
#[utoipa::path(
|
||||||
.unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string());
|
post,
|
||||||
|
path = "/settings/cache/clear",
|
||||||
|
tag = "settings",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = ClearCacheResponse),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn clear_cache(State(state): State<AppState>) -> Result<Json<ClearCacheResponse>, ApiError> {
|
||||||
|
let cache_dir = state.settings.read().await.cache_directory.clone();
|
||||||
|
|
||||||
let result = tokio::task::spawn_blocking(move || {
|
let result = tokio::task::spawn_blocking(move || {
|
||||||
if std::path::Path::new(&cache_dir).exists() {
|
if std::path::Path::new(&cache_dir).exists() {
|
||||||
@@ -120,9 +192,19 @@ async fn clear_cache(State(_state): State<AppState>) -> Result<Json<ClearCacheRe
|
|||||||
Ok(Json(result))
|
Ok(Json(result))
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn get_cache_stats(State(_state): State<AppState>) -> Result<Json<CacheStats>, ApiError> {
|
/// Get image page cache statistics
|
||||||
let cache_dir = std::env::var("IMAGE_CACHE_DIR")
|
#[utoipa::path(
|
||||||
.unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string());
|
get,
|
||||||
|
path = "/settings/cache/stats",
|
||||||
|
tag = "settings",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = CacheStats),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_cache_stats(State(state): State<AppState>) -> Result<Json<CacheStats>, ApiError> {
|
||||||
|
let cache_dir = state.settings.read().await.cache_directory.clone();
|
||||||
|
|
||||||
let cache_dir_clone = cache_dir.clone();
|
let cache_dir_clone = cache_dir.clone();
|
||||||
let stats = tokio::task::spawn_blocking(move || {
|
let stats = tokio::task::spawn_blocking(move || {
|
||||||
@@ -171,3 +253,205 @@ async fn get_cache_stats(State(_state): State<AppState>) -> Result<Json<CacheSta
|
|||||||
|
|
||||||
Ok(Json(stats))
|
Ok(Json(stats))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn compute_dir_stats(path: &std::path::Path) -> (u64, u64) {
|
||||||
|
let mut total_size: u64 = 0;
|
||||||
|
let mut file_count: u64 = 0;
|
||||||
|
|
||||||
|
fn visit_dirs(
|
||||||
|
dir: &std::path::Path,
|
||||||
|
total_size: &mut u64,
|
||||||
|
file_count: &mut u64,
|
||||||
|
) -> std::io::Result<()> {
|
||||||
|
if dir.is_dir() {
|
||||||
|
for entry in std::fs::read_dir(dir)? {
|
||||||
|
let entry = entry?;
|
||||||
|
let path = entry.path();
|
||||||
|
if path.is_dir() {
|
||||||
|
visit_dirs(&path, total_size, file_count)?;
|
||||||
|
} else {
|
||||||
|
*total_size += entry.metadata()?.len();
|
||||||
|
*file_count += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
let _ = visit_dirs(path, &mut total_size, &mut file_count);
|
||||||
|
(total_size, file_count)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get thumbnail storage statistics
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/settings/thumbnail/stats",
|
||||||
|
tag = "settings",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = ThumbnailStats),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_thumbnail_stats(State(_state): State<AppState>) -> Result<Json<ThumbnailStats>, ApiError> {
|
||||||
|
let settings = sqlx::query(r#"SELECT value FROM app_settings WHERE key = 'thumbnail'"#)
|
||||||
|
.fetch_optional(&_state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let directory = match settings {
|
||||||
|
Some(row) => {
|
||||||
|
let value: serde_json::Value = row.get("value");
|
||||||
|
value.get("directory")
|
||||||
|
.and_then(|v| v.as_str())
|
||||||
|
.unwrap_or("/data/thumbnails")
|
||||||
|
.to_string()
|
||||||
|
}
|
||||||
|
None => "/data/thumbnails".to_string(),
|
||||||
|
};
|
||||||
|
|
||||||
|
let directory_clone = directory.clone();
|
||||||
|
let stats = tokio::task::spawn_blocking(move || {
|
||||||
|
let path = std::path::Path::new(&directory_clone);
|
||||||
|
if !path.exists() {
|
||||||
|
return ThumbnailStats {
|
||||||
|
total_size_mb: 0.0,
|
||||||
|
file_count: 0,
|
||||||
|
directory: directory_clone,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let (total_size, file_count) = compute_dir_stats(path);
|
||||||
|
|
||||||
|
ThumbnailStats {
|
||||||
|
total_size_mb: total_size as f64 / 1024.0 / 1024.0,
|
||||||
|
file_count,
|
||||||
|
directory: directory_clone,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(format!("thumbnail stats failed: {}", e)))?;
|
||||||
|
|
||||||
|
Ok(Json(stats))
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Status Mappings
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
|
||||||
|
pub struct StatusMappingDto {
|
||||||
|
pub id: String,
|
||||||
|
pub provider_status: String,
|
||||||
|
pub mapped_status: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Deserialize, ToSchema)]
|
||||||
|
pub struct UpsertStatusMappingRequest {
|
||||||
|
pub provider_status: String,
|
||||||
|
pub mapped_status: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List all status mappings
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/settings/status-mappings",
|
||||||
|
tag = "settings",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = Vec<StatusMappingDto>),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn list_status_mappings(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
) -> Result<Json<Vec<StatusMappingDto>>, ApiError> {
|
||||||
|
let rows = sqlx::query(
|
||||||
|
"SELECT id, provider_status, mapped_status FROM status_mappings ORDER BY mapped_status NULLS LAST, provider_status",
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let mappings = rows
|
||||||
|
.iter()
|
||||||
|
.map(|row| StatusMappingDto {
|
||||||
|
id: row.get::<Uuid, _>("id").to_string(),
|
||||||
|
provider_status: row.get("provider_status"),
|
||||||
|
mapped_status: row.get::<Option<String>, _>("mapped_status"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(Json(mappings))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create or update a status mapping
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/settings/status-mappings",
|
||||||
|
tag = "settings",
|
||||||
|
request_body = UpsertStatusMappingRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = StatusMappingDto),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn upsert_status_mapping(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(body): Json<UpsertStatusMappingRequest>,
|
||||||
|
) -> Result<Json<StatusMappingDto>, ApiError> {
|
||||||
|
let provider_status = body.provider_status.to_lowercase();
|
||||||
|
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
INSERT INTO status_mappings (provider_status, mapped_status)
|
||||||
|
VALUES ($1, $2)
|
||||||
|
ON CONFLICT (provider_status)
|
||||||
|
DO UPDATE SET mapped_status = $2, updated_at = NOW()
|
||||||
|
RETURNING id, provider_status, mapped_status
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(&provider_status)
|
||||||
|
.bind(&body.mapped_status)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(Json(StatusMappingDto {
|
||||||
|
id: row.get::<Uuid, _>("id").to_string(),
|
||||||
|
provider_status: row.get("provider_status"),
|
||||||
|
mapped_status: row.get::<Option<String>, _>("mapped_status"),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Unmap a status mapping (sets mapped_status to NULL, keeps the provider status known)
|
||||||
|
#[utoipa::path(
|
||||||
|
delete,
|
||||||
|
path = "/settings/status-mappings/{id}",
|
||||||
|
tag = "settings",
|
||||||
|
params(("id" = String, Path, description = "Mapping UUID")),
|
||||||
|
responses(
|
||||||
|
(status = 200, body = StatusMappingDto),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 404, description = "Not found"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn delete_status_mapping(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
AxumPath(id): AxumPath<Uuid>,
|
||||||
|
) -> Result<Json<StatusMappingDto>, ApiError> {
|
||||||
|
let row = sqlx::query(
|
||||||
|
"UPDATE status_mappings SET mapped_status = NULL, updated_at = NOW() WHERE id = $1 RETURNING id, provider_status, mapped_status",
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
match row {
|
||||||
|
Some(row) => Ok(Json(StatusMappingDto {
|
||||||
|
id: row.get::<Uuid, _>("id").to_string(),
|
||||||
|
provider_status: row.get("provider_status"),
|
||||||
|
mapped_status: row.get::<Option<String>, _>("mapped_status"),
|
||||||
|
})),
|
||||||
|
None => Err(ApiError::not_found("status mapping not found")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
134
apps/api/src/state.rs
Normal file
134
apps/api/src/state.rs
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
use std::sync::{
|
||||||
|
atomic::AtomicU64,
|
||||||
|
Arc,
|
||||||
|
};
|
||||||
|
use std::time::Instant;
|
||||||
|
|
||||||
|
use lru::LruCache;
|
||||||
|
use sqlx::{Pool, Postgres, Row};
|
||||||
|
use tokio::sync::{Mutex, RwLock, Semaphore};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct AppState {
|
||||||
|
pub pool: sqlx::PgPool,
|
||||||
|
pub bootstrap_token: Arc<str>,
|
||||||
|
pub page_cache: Arc<Mutex<LruCache<String, Arc<Vec<u8>>>>>,
|
||||||
|
pub page_render_limit: Arc<Semaphore>,
|
||||||
|
pub metrics: Arc<Metrics>,
|
||||||
|
pub read_rate_limit: Arc<Mutex<ReadRateLimit>>,
|
||||||
|
pub settings: Arc<RwLock<DynamicSettings>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct DynamicSettings {
|
||||||
|
pub rate_limit_per_second: u32,
|
||||||
|
pub timeout_seconds: u64,
|
||||||
|
pub image_format: String,
|
||||||
|
pub image_quality: u8,
|
||||||
|
pub image_filter: String,
|
||||||
|
pub image_max_width: u32,
|
||||||
|
pub cache_directory: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Default for DynamicSettings {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self {
|
||||||
|
rate_limit_per_second: 120,
|
||||||
|
timeout_seconds: 12,
|
||||||
|
image_format: "webp".to_string(),
|
||||||
|
image_quality: 85,
|
||||||
|
image_filter: "triangle".to_string(),
|
||||||
|
image_max_width: 2160,
|
||||||
|
cache_directory: std::env::var("IMAGE_CACHE_DIR")
|
||||||
|
.unwrap_or_else(|_| "/tmp/stripstream-image-cache".to_string()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Metrics {
|
||||||
|
pub requests_total: AtomicU64,
|
||||||
|
pub page_cache_hits: AtomicU64,
|
||||||
|
pub page_cache_misses: AtomicU64,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ReadRateLimit {
|
||||||
|
pub window_started_at: Instant,
|
||||||
|
pub requests_in_window: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Metrics {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Self {
|
||||||
|
requests_total: AtomicU64::new(0),
|
||||||
|
page_cache_hits: AtomicU64::new(0),
|
||||||
|
page_cache_misses: AtomicU64::new(0),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_concurrent_renders(pool: &Pool<Postgres>) -> usize {
|
||||||
|
let default_concurrency = 8;
|
||||||
|
let row = sqlx::query(r#"SELECT value FROM app_settings WHERE key = 'limits'"#)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match row {
|
||||||
|
Ok(Some(row)) => {
|
||||||
|
let value: serde_json::Value = row.get("value");
|
||||||
|
value
|
||||||
|
.get("concurrent_renders")
|
||||||
|
.and_then(|v: &serde_json::Value| v.as_u64())
|
||||||
|
.map(|v| v as usize)
|
||||||
|
.unwrap_or(default_concurrency)
|
||||||
|
}
|
||||||
|
_ => default_concurrency,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn load_dynamic_settings(pool: &Pool<Postgres>) -> DynamicSettings {
|
||||||
|
let mut s = DynamicSettings::default();
|
||||||
|
|
||||||
|
if let Ok(Some(row)) = sqlx::query(r#"SELECT value FROM app_settings WHERE key = 'limits'"#)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
let v: serde_json::Value = row.get("value");
|
||||||
|
if let Some(n) = v.get("rate_limit_per_second").and_then(|x| x.as_u64()) {
|
||||||
|
s.rate_limit_per_second = n as u32;
|
||||||
|
}
|
||||||
|
if let Some(n) = v.get("timeout_seconds").and_then(|x| x.as_u64()) {
|
||||||
|
s.timeout_seconds = n;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(Some(row)) = sqlx::query(r#"SELECT value FROM app_settings WHERE key = 'image_processing'"#)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
let v: serde_json::Value = row.get("value");
|
||||||
|
if let Some(s2) = v.get("format").and_then(|x| x.as_str()) {
|
||||||
|
s.image_format = s2.to_string();
|
||||||
|
}
|
||||||
|
if let Some(n) = v.get("quality").and_then(|x| x.as_u64()) {
|
||||||
|
s.image_quality = n.clamp(1, 100) as u8;
|
||||||
|
}
|
||||||
|
if let Some(s2) = v.get("filter").and_then(|x| x.as_str()) {
|
||||||
|
s.image_filter = s2.to_string();
|
||||||
|
}
|
||||||
|
if let Some(n) = v.get("max_width").and_then(|x| x.as_u64()) {
|
||||||
|
s.image_max_width = n as u32;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Ok(Some(row)) = sqlx::query(r#"SELECT value FROM app_settings WHERE key = 'cache'"#)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
let v: serde_json::Value = row.get("value");
|
||||||
|
if let Some(dir) = v.get("directory").and_then(|x| x.as_str()) {
|
||||||
|
s.cache_directory = dir.to_string();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
s
|
||||||
|
}
|
||||||
819
apps/api/src/stats.rs
Normal file
819
apps/api/src/stats.rs
Normal file
@@ -0,0 +1,819 @@
|
|||||||
|
use axum::{
|
||||||
|
extract::{Extension, Query, State},
|
||||||
|
Json,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use utoipa::{IntoParams, ToSchema};
|
||||||
|
|
||||||
|
use crate::{auth::AuthUser, error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Deserialize, IntoParams)]
|
||||||
|
pub struct StatsQuery {
|
||||||
|
/// Granularity: "day", "week" or "month" (default: "month")
|
||||||
|
pub period: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct StatsOverview {
|
||||||
|
pub total_books: i64,
|
||||||
|
pub total_series: i64,
|
||||||
|
pub total_libraries: i64,
|
||||||
|
pub total_pages: i64,
|
||||||
|
pub total_size_bytes: i64,
|
||||||
|
pub total_authors: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct ReadingStatusStats {
|
||||||
|
pub unread: i64,
|
||||||
|
pub reading: i64,
|
||||||
|
pub read: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct FormatCount {
|
||||||
|
pub format: String,
|
||||||
|
pub count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct LanguageCount {
|
||||||
|
pub language: Option<String>,
|
||||||
|
pub count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct LibraryStats {
|
||||||
|
pub library_name: String,
|
||||||
|
pub book_count: i64,
|
||||||
|
pub size_bytes: i64,
|
||||||
|
pub read_count: i64,
|
||||||
|
pub reading_count: i64,
|
||||||
|
pub unread_count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct TopSeries {
|
||||||
|
pub series: String,
|
||||||
|
pub book_count: i64,
|
||||||
|
pub read_count: i64,
|
||||||
|
pub total_pages: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct MonthlyAdditions {
|
||||||
|
pub month: String,
|
||||||
|
pub books_added: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct MetadataStats {
|
||||||
|
pub total_series: i64,
|
||||||
|
pub series_linked: i64,
|
||||||
|
pub series_unlinked: i64,
|
||||||
|
pub books_with_summary: i64,
|
||||||
|
pub books_with_isbn: i64,
|
||||||
|
pub by_provider: Vec<ProviderCount>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct ProviderCount {
|
||||||
|
pub provider: String,
|
||||||
|
pub count: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct CurrentlyReadingItem {
|
||||||
|
pub book_id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub series: Option<String>,
|
||||||
|
pub current_page: i32,
|
||||||
|
pub page_count: i32,
|
||||||
|
pub username: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct RecentlyReadItem {
|
||||||
|
pub book_id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub series: Option<String>,
|
||||||
|
pub last_read_at: String,
|
||||||
|
pub username: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct MonthlyReading {
|
||||||
|
pub month: String,
|
||||||
|
pub books_read: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct UserMonthlyReading {
|
||||||
|
pub month: String,
|
||||||
|
pub username: String,
|
||||||
|
pub books_read: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct JobTimePoint {
|
||||||
|
pub label: String,
|
||||||
|
pub scan: i64,
|
||||||
|
pub rebuild: i64,
|
||||||
|
pub thumbnail: i64,
|
||||||
|
pub other: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct StatsResponse {
|
||||||
|
pub overview: StatsOverview,
|
||||||
|
pub reading_status: ReadingStatusStats,
|
||||||
|
pub currently_reading: Vec<CurrentlyReadingItem>,
|
||||||
|
pub recently_read: Vec<RecentlyReadItem>,
|
||||||
|
pub reading_over_time: Vec<MonthlyReading>,
|
||||||
|
pub by_format: Vec<FormatCount>,
|
||||||
|
pub by_language: Vec<LanguageCount>,
|
||||||
|
pub by_library: Vec<LibraryStats>,
|
||||||
|
pub top_series: Vec<TopSeries>,
|
||||||
|
pub additions_over_time: Vec<MonthlyAdditions>,
|
||||||
|
pub jobs_over_time: Vec<JobTimePoint>,
|
||||||
|
pub metadata: MetadataStats,
|
||||||
|
pub users_reading_over_time: Vec<UserMonthlyReading>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get collection statistics for the dashboard
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/stats",
|
||||||
|
tag = "stats",
|
||||||
|
params(StatsQuery),
|
||||||
|
responses(
|
||||||
|
(status = 200, body = StatsResponse),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn get_stats(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Query(query): Query<StatsQuery>,
|
||||||
|
user: Option<Extension<AuthUser>>,
|
||||||
|
) -> Result<Json<StatsResponse>, ApiError> {
|
||||||
|
let user_id: Option<uuid::Uuid> = user.map(|u| u.0.user_id);
|
||||||
|
let period = query.period.as_deref().unwrap_or("month");
|
||||||
|
// Overview + reading status in one query
|
||||||
|
let overview_row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
COUNT(*) AS total_books,
|
||||||
|
COUNT(DISTINCT NULLIF(series, '')) AS total_series,
|
||||||
|
COUNT(DISTINCT library_id) AS total_libraries,
|
||||||
|
COALESCE(SUM(page_count), 0)::BIGINT AS total_pages,
|
||||||
|
(SELECT COUNT(DISTINCT a) FROM (
|
||||||
|
SELECT DISTINCT UNNEST(authors) AS a FROM books WHERE authors != '{}'
|
||||||
|
UNION
|
||||||
|
SELECT DISTINCT author FROM books WHERE author IS NOT NULL AND author != ''
|
||||||
|
) sub) AS total_authors,
|
||||||
|
COUNT(*) FILTER (WHERE COALESCE(brp.status, 'unread') = 'unread') AS unread,
|
||||||
|
COUNT(*) FILTER (WHERE brp.status = 'reading') AS reading,
|
||||||
|
COUNT(*) FILTER (WHERE brp.status = 'read') AS read
|
||||||
|
FROM books b
|
||||||
|
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Total size from book_files
|
||||||
|
let size_row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT COALESCE(SUM(bf.size_bytes), 0)::BIGINT AS total_size_bytes
|
||||||
|
FROM (
|
||||||
|
SELECT DISTINCT ON (book_id) size_bytes
|
||||||
|
FROM book_files
|
||||||
|
ORDER BY book_id, updated_at DESC
|
||||||
|
) bf
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let overview = StatsOverview {
|
||||||
|
total_books: overview_row.get("total_books"),
|
||||||
|
total_series: overview_row.get("total_series"),
|
||||||
|
total_libraries: overview_row.get("total_libraries"),
|
||||||
|
total_pages: overview_row.get("total_pages"),
|
||||||
|
total_size_bytes: size_row.get("total_size_bytes"),
|
||||||
|
total_authors: overview_row.get("total_authors"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let reading_status = ReadingStatusStats {
|
||||||
|
unread: overview_row.get("unread"),
|
||||||
|
reading: overview_row.get("reading"),
|
||||||
|
read: overview_row.get("read"),
|
||||||
|
};
|
||||||
|
|
||||||
|
// By format
|
||||||
|
let format_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT COALESCE(bf.format, b.kind) AS fmt, COUNT(*) AS count
|
||||||
|
FROM books b
|
||||||
|
LEFT JOIN LATERAL (
|
||||||
|
SELECT format FROM book_files WHERE book_id = b.id ORDER BY updated_at DESC LIMIT 1
|
||||||
|
) bf ON TRUE
|
||||||
|
GROUP BY fmt
|
||||||
|
ORDER BY count DESC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let by_format: Vec<FormatCount> = format_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| FormatCount {
|
||||||
|
format: r.get::<Option<String>, _>("fmt").unwrap_or_else(|| "unknown".to_string()),
|
||||||
|
count: r.get("count"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// By language
|
||||||
|
let lang_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT language, COUNT(*) AS count
|
||||||
|
FROM books
|
||||||
|
GROUP BY language
|
||||||
|
ORDER BY count DESC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let by_language: Vec<LanguageCount> = lang_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| LanguageCount {
|
||||||
|
language: r.get("language"),
|
||||||
|
count: r.get("count"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// By library
|
||||||
|
let lib_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
l.name AS library_name,
|
||||||
|
COUNT(b.id) AS book_count,
|
||||||
|
COALESCE(SUM(bf.size_bytes), 0)::BIGINT AS size_bytes,
|
||||||
|
COUNT(*) FILTER (WHERE brp.status = 'read') AS read_count,
|
||||||
|
COUNT(*) FILTER (WHERE brp.status = 'reading') AS reading_count,
|
||||||
|
COUNT(*) FILTER (WHERE COALESCE(brp.status, 'unread') = 'unread') AS unread_count
|
||||||
|
FROM libraries l
|
||||||
|
LEFT JOIN books b ON b.library_id = l.id
|
||||||
|
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
LEFT JOIN LATERAL (
|
||||||
|
SELECT size_bytes FROM book_files WHERE book_id = b.id ORDER BY updated_at DESC LIMIT 1
|
||||||
|
) bf ON TRUE
|
||||||
|
GROUP BY l.id, l.name
|
||||||
|
ORDER BY book_count DESC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let by_library: Vec<LibraryStats> = lib_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| LibraryStats {
|
||||||
|
library_name: r.get("library_name"),
|
||||||
|
book_count: r.get("book_count"),
|
||||||
|
size_bytes: r.get("size_bytes"),
|
||||||
|
read_count: r.get("read_count"),
|
||||||
|
reading_count: r.get("reading_count"),
|
||||||
|
unread_count: r.get("unread_count"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Top series (by book count)
|
||||||
|
let series_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
b.series,
|
||||||
|
COUNT(*) AS book_count,
|
||||||
|
COUNT(*) FILTER (WHERE brp.status = 'read') AS read_count,
|
||||||
|
COALESCE(SUM(b.page_count), 0)::BIGINT AS total_pages
|
||||||
|
FROM books b
|
||||||
|
LEFT JOIN book_reading_progress brp ON brp.book_id = b.id AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
WHERE b.series IS NOT NULL AND b.series != ''
|
||||||
|
GROUP BY b.series
|
||||||
|
ORDER BY book_count DESC
|
||||||
|
LIMIT 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let top_series: Vec<TopSeries> = series_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| TopSeries {
|
||||||
|
series: r.get("series"),
|
||||||
|
book_count: r.get("book_count"),
|
||||||
|
read_count: r.get("read_count"),
|
||||||
|
total_pages: r.get("total_pages"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Additions over time (with gap filling)
|
||||||
|
let additions_rows = match period {
|
||||||
|
"day" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
COALESCE(cnt.books_added, 0) AS books_added
|
||||||
|
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT created_at::date AS dt, COUNT(*) AS books_added
|
||||||
|
FROM books
|
||||||
|
WHERE created_at >= CURRENT_DATE - INTERVAL '6 days'
|
||||||
|
GROUP BY created_at::date
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
"week" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
COALESCE(cnt.books_added, 0) AS books_added
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
|
||||||
|
DATE_TRUNC('week', NOW()),
|
||||||
|
'1 week'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('week', created_at) AS dt, COUNT(*) AS books_added
|
||||||
|
FROM books
|
||||||
|
WHERE created_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
|
||||||
|
GROUP BY DATE_TRUNC('week', created_at)
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM') AS month,
|
||||||
|
COALESCE(cnt.books_added, 0) AS books_added
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
|
||||||
|
DATE_TRUNC('month', NOW()),
|
||||||
|
'1 month'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('month', created_at) AS dt, COUNT(*) AS books_added
|
||||||
|
FROM books
|
||||||
|
WHERE created_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
||||||
|
GROUP BY DATE_TRUNC('month', created_at)
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let additions_over_time: Vec<MonthlyAdditions> = additions_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| MonthlyAdditions {
|
||||||
|
month: r.get("month"),
|
||||||
|
books_added: r.get("books_added"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Metadata stats
|
||||||
|
let meta_row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
(SELECT COUNT(DISTINCT NULLIF(series, '')) FROM books) AS total_series,
|
||||||
|
(SELECT COUNT(DISTINCT series_name) FROM external_metadata_links WHERE status = 'approved') AS series_linked,
|
||||||
|
(SELECT COUNT(*) FROM books WHERE summary IS NOT NULL AND summary != '') AS books_with_summary,
|
||||||
|
(SELECT COUNT(*) FROM books WHERE isbn IS NOT NULL AND isbn != '') AS books_with_isbn
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let meta_total_series: i64 = meta_row.get("total_series");
|
||||||
|
let meta_series_linked: i64 = meta_row.get("series_linked");
|
||||||
|
|
||||||
|
let provider_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT provider, COUNT(DISTINCT series_name) AS count
|
||||||
|
FROM external_metadata_links
|
||||||
|
WHERE status = 'approved'
|
||||||
|
GROUP BY provider
|
||||||
|
ORDER BY count DESC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let by_provider: Vec<ProviderCount> = provider_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| ProviderCount {
|
||||||
|
provider: r.get("provider"),
|
||||||
|
count: r.get("count"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let metadata = MetadataStats {
|
||||||
|
total_series: meta_total_series,
|
||||||
|
series_linked: meta_series_linked,
|
||||||
|
series_unlinked: meta_total_series - meta_series_linked,
|
||||||
|
books_with_summary: meta_row.get("books_with_summary"),
|
||||||
|
books_with_isbn: meta_row.get("books_with_isbn"),
|
||||||
|
by_provider,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Currently reading books
|
||||||
|
let reading_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT b.id AS book_id, b.title, b.series, brp.current_page, b.page_count, u.username
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
JOIN books b ON b.id = brp.book_id
|
||||||
|
LEFT JOIN users u ON u.id = brp.user_id
|
||||||
|
WHERE brp.status = 'reading' AND brp.current_page IS NOT NULL
|
||||||
|
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
ORDER BY brp.updated_at DESC
|
||||||
|
LIMIT 20
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let currently_reading: Vec<CurrentlyReadingItem> = reading_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| {
|
||||||
|
let id: uuid::Uuid = r.get("book_id");
|
||||||
|
CurrentlyReadingItem {
|
||||||
|
book_id: id.to_string(),
|
||||||
|
title: r.get("title"),
|
||||||
|
series: r.get("series"),
|
||||||
|
current_page: r.get::<Option<i32>, _>("current_page").unwrap_or(0),
|
||||||
|
page_count: r.get::<Option<i32>, _>("page_count").unwrap_or(0),
|
||||||
|
username: r.get("username"),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Recently read books
|
||||||
|
let recent_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT b.id AS book_id, b.title, b.series,
|
||||||
|
TO_CHAR(brp.last_read_at, 'YYYY-MM-DD') AS last_read_at,
|
||||||
|
u.username
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
JOIN books b ON b.id = brp.book_id
|
||||||
|
LEFT JOIN users u ON u.id = brp.user_id
|
||||||
|
WHERE brp.status = 'read' AND brp.last_read_at IS NOT NULL
|
||||||
|
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
ORDER BY brp.last_read_at DESC
|
||||||
|
LIMIT 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let recently_read: Vec<RecentlyReadItem> = recent_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| {
|
||||||
|
let id: uuid::Uuid = r.get("book_id");
|
||||||
|
RecentlyReadItem {
|
||||||
|
book_id: id.to_string(),
|
||||||
|
title: r.get("title"),
|
||||||
|
series: r.get("series"),
|
||||||
|
last_read_at: r.get::<Option<String>, _>("last_read_at").unwrap_or_default(),
|
||||||
|
username: r.get("username"),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Reading activity over time (with gap filling)
|
||||||
|
let reading_time_rows = match period {
|
||||||
|
"day" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT brp.last_read_at::date AS dt, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= CURRENT_DATE - INTERVAL '6 days'
|
||||||
|
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
GROUP BY brp.last_read_at::date
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
"week" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
|
||||||
|
DATE_TRUNC('week', NOW()),
|
||||||
|
'1 week'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('week', brp.last_read_at) AS dt, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
|
||||||
|
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
GROUP BY DATE_TRUNC('week', brp.last_read_at)
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM') AS month,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
|
||||||
|
DATE_TRUNC('month', NOW()),
|
||||||
|
'1 month'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('month', brp.last_read_at) AS dt, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
||||||
|
AND ($1::uuid IS NULL OR brp.user_id = $1)
|
||||||
|
GROUP BY DATE_TRUNC('month', brp.last_read_at)
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(user_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let reading_over_time: Vec<MonthlyReading> = reading_time_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| MonthlyReading {
|
||||||
|
month: r.get::<Option<String>, _>("month").unwrap_or_default(),
|
||||||
|
books_read: r.get("books_read"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Per-user reading over time (admin view — always all users, no user_id filter)
|
||||||
|
let users_reading_time_rows = match period {
|
||||||
|
"day" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
u.username,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
|
||||||
|
CROSS JOIN users u
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT brp.last_read_at::date AS dt, brp.user_id, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= CURRENT_DATE - INTERVAL '6 days'
|
||||||
|
GROUP BY brp.last_read_at::date, brp.user_id
|
||||||
|
) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id
|
||||||
|
ORDER BY month ASC, u.username
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
"week" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
u.username,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
|
||||||
|
DATE_TRUNC('week', NOW()),
|
||||||
|
'1 week'
|
||||||
|
) AS d(dt)
|
||||||
|
CROSS JOIN users u
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('week', brp.last_read_at) AS dt, brp.user_id, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
|
||||||
|
GROUP BY DATE_TRUNC('week', brp.last_read_at), brp.user_id
|
||||||
|
) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id
|
||||||
|
ORDER BY month ASC, u.username
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM') AS month,
|
||||||
|
u.username,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
|
||||||
|
DATE_TRUNC('month', NOW()),
|
||||||
|
'1 month'
|
||||||
|
) AS d(dt)
|
||||||
|
CROSS JOIN users u
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('month', brp.last_read_at) AS dt, brp.user_id, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
||||||
|
GROUP BY DATE_TRUNC('month', brp.last_read_at), brp.user_id
|
||||||
|
) cnt ON cnt.dt = d.dt AND cnt.user_id = u.id
|
||||||
|
ORDER BY month ASC, u.username
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let users_reading_over_time: Vec<UserMonthlyReading> = users_reading_time_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| UserMonthlyReading {
|
||||||
|
month: r.get::<Option<String>, _>("month").unwrap_or_default(),
|
||||||
|
username: r.get("username"),
|
||||||
|
books_read: r.get("books_read"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Jobs over time (with gap filling, grouped by type category)
|
||||||
|
let jobs_rows = match period {
|
||||||
|
"day" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS label,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'scan'), 0)::BIGINT AS scan,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'rebuild'), 0)::BIGINT AS rebuild,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'thumbnail'), 0)::BIGINT AS thumbnail,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'other'), 0)::BIGINT AS other
|
||||||
|
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT
|
||||||
|
finished_at::date AS dt,
|
||||||
|
CASE
|
||||||
|
WHEN type = 'scan' THEN 'scan'
|
||||||
|
WHEN type IN ('rebuild', 'full_rebuild', 'rescan') THEN 'rebuild'
|
||||||
|
WHEN type IN ('thumbnail_rebuild', 'thumbnail_regenerate') THEN 'thumbnail'
|
||||||
|
ELSE 'other'
|
||||||
|
END AS cat,
|
||||||
|
COUNT(*) AS c
|
||||||
|
FROM index_jobs
|
||||||
|
WHERE status IN ('success', 'failed')
|
||||||
|
AND finished_at >= CURRENT_DATE - INTERVAL '6 days'
|
||||||
|
GROUP BY finished_at::date, cat
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
GROUP BY d.dt
|
||||||
|
ORDER BY label ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
"week" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS label,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'scan'), 0)::BIGINT AS scan,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'rebuild'), 0)::BIGINT AS rebuild,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'thumbnail'), 0)::BIGINT AS thumbnail,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'other'), 0)::BIGINT AS other
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
|
||||||
|
DATE_TRUNC('week', NOW()),
|
||||||
|
'1 week'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT
|
||||||
|
DATE_TRUNC('week', finished_at) AS dt,
|
||||||
|
CASE
|
||||||
|
WHEN type = 'scan' THEN 'scan'
|
||||||
|
WHEN type IN ('rebuild', 'full_rebuild', 'rescan') THEN 'rebuild'
|
||||||
|
WHEN type IN ('thumbnail_rebuild', 'thumbnail_regenerate') THEN 'thumbnail'
|
||||||
|
ELSE 'other'
|
||||||
|
END AS cat,
|
||||||
|
COUNT(*) AS c
|
||||||
|
FROM index_jobs
|
||||||
|
WHERE status IN ('success', 'failed')
|
||||||
|
AND finished_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
|
||||||
|
GROUP BY DATE_TRUNC('week', finished_at), cat
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
GROUP BY d.dt
|
||||||
|
ORDER BY label ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM') AS label,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'scan'), 0)::BIGINT AS scan,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'rebuild'), 0)::BIGINT AS rebuild,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'thumbnail'), 0)::BIGINT AS thumbnail,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'other'), 0)::BIGINT AS other
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
|
||||||
|
DATE_TRUNC('month', NOW()),
|
||||||
|
'1 month'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT
|
||||||
|
DATE_TRUNC('month', finished_at) AS dt,
|
||||||
|
CASE
|
||||||
|
WHEN type = 'scan' THEN 'scan'
|
||||||
|
WHEN type IN ('rebuild', 'full_rebuild', 'rescan') THEN 'rebuild'
|
||||||
|
WHEN type IN ('thumbnail_rebuild', 'thumbnail_regenerate') THEN 'thumbnail'
|
||||||
|
ELSE 'other'
|
||||||
|
END AS cat,
|
||||||
|
COUNT(*) AS c
|
||||||
|
FROM index_jobs
|
||||||
|
WHERE status IN ('success', 'failed')
|
||||||
|
AND finished_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
||||||
|
GROUP BY DATE_TRUNC('month', finished_at), cat
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
GROUP BY d.dt
|
||||||
|
ORDER BY label ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let jobs_over_time: Vec<JobTimePoint> = jobs_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| JobTimePoint {
|
||||||
|
label: r.get("label"),
|
||||||
|
scan: r.get("scan"),
|
||||||
|
rebuild: r.get("rebuild"),
|
||||||
|
thumbnail: r.get("thumbnail"),
|
||||||
|
other: r.get("other"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(Json(StatsResponse {
|
||||||
|
overview,
|
||||||
|
reading_status,
|
||||||
|
currently_reading,
|
||||||
|
recently_read,
|
||||||
|
reading_over_time,
|
||||||
|
by_format,
|
||||||
|
by_language,
|
||||||
|
by_library,
|
||||||
|
top_series,
|
||||||
|
additions_over_time,
|
||||||
|
jobs_over_time,
|
||||||
|
metadata,
|
||||||
|
users_reading_over_time,
|
||||||
|
}))
|
||||||
|
}
|
||||||
46
apps/api/src/telegram.rs
Normal file
46
apps/api/src/telegram.rs
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
use axum::{extract::State, Json};
|
||||||
|
use serde::Serialize;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct TelegramTestResponse {
|
||||||
|
pub success: bool,
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test Telegram connection by sending a test message
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/telegram/test",
|
||||||
|
tag = "notifications",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = TelegramTestResponse),
|
||||||
|
(status = 400, description = "Telegram not configured"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn test_telegram(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
) -> Result<Json<TelegramTestResponse>, ApiError> {
|
||||||
|
let config = notifications::load_telegram_config(&state.pool)
|
||||||
|
.await
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::bad_request(
|
||||||
|
"Telegram is not configured or disabled. Set bot_token, chat_id, and enable it.",
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
match notifications::send_test_message(&config).await {
|
||||||
|
Ok(()) => Ok(Json(TelegramTestResponse {
|
||||||
|
success: true,
|
||||||
|
message: "Test message sent successfully".to_string(),
|
||||||
|
})),
|
||||||
|
Err(e) => Ok(Json(TelegramTestResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("Failed to send: {e}"),
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
}
|
||||||
83
apps/api/src/thumbnails.rs
Normal file
83
apps/api/src/thumbnails.rs
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
use axum::{
|
||||||
|
extract::State,
|
||||||
|
Json,
|
||||||
|
};
|
||||||
|
use serde::Deserialize;
|
||||||
|
use uuid::Uuid;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, index_jobs, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct ThumbnailsRebuildRequest {
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub library_id: Option<Uuid>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// POST /index/thumbnails/rebuild — create a job to generate thumbnails for books that don't have one.
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/index/thumbnails/rebuild",
|
||||||
|
tag = "indexing",
|
||||||
|
request_body = Option<ThumbnailsRebuildRequest>,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = IndexJobResponse),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn start_thumbnails_rebuild(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
payload: Option<Json<ThumbnailsRebuildRequest>>,
|
||||||
|
) -> Result<Json<index_jobs::IndexJobResponse>, ApiError> {
|
||||||
|
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
|
||||||
|
let job_id = Uuid::new_v4();
|
||||||
|
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"INSERT INTO index_jobs (id, library_id, type, status)
|
||||||
|
VALUES ($1, $2, 'thumbnail_rebuild', 'pending')
|
||||||
|
RETURNING id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at"#,
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(Json(index_jobs::map_row(row)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// POST /index/thumbnails/regenerate — create a job to regenerate all thumbnails (clears then regenerates).
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/index/thumbnails/regenerate",
|
||||||
|
tag = "indexing",
|
||||||
|
request_body = Option<ThumbnailsRebuildRequest>,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = IndexJobResponse),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn start_thumbnails_regenerate(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
payload: Option<Json<ThumbnailsRebuildRequest>>,
|
||||||
|
) -> Result<Json<index_jobs::IndexJobResponse>, ApiError> {
|
||||||
|
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
|
||||||
|
let job_id = Uuid::new_v4();
|
||||||
|
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"INSERT INTO index_jobs (id, library_id, type, status)
|
||||||
|
VALUES ($1, $2, 'thumbnail_regenerate', 'pending')
|
||||||
|
RETURNING id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at"#,
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| ApiError::internal(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(Json(index_jobs::map_row(row)))
|
||||||
|
}
|
||||||
@@ -8,7 +8,7 @@ use sqlx::Row;
|
|||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
use utoipa::ToSchema;
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
use crate::{error::ApiError, AppState};
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
#[derive(Deserialize, ToSchema)]
|
#[derive(Deserialize, ToSchema)]
|
||||||
pub struct CreateTokenRequest {
|
pub struct CreateTokenRequest {
|
||||||
@@ -16,6 +16,8 @@ pub struct CreateTokenRequest {
|
|||||||
pub name: String,
|
pub name: String,
|
||||||
#[schema(value_type = Option<String>, example = "read")]
|
#[schema(value_type = Option<String>, example = "read")]
|
||||||
pub scope: Option<String>,
|
pub scope: Option<String>,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub user_id: Option<Uuid>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
@@ -26,6 +28,9 @@ pub struct TokenResponse {
|
|||||||
pub scope: String,
|
pub scope: String,
|
||||||
pub prefix: String,
|
pub prefix: String,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub user_id: Option<Uuid>,
|
||||||
|
pub username: Option<String>,
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
pub last_used_at: Option<DateTime<Utc>>,
|
pub last_used_at: Option<DateTime<Utc>>,
|
||||||
#[schema(value_type = Option<String>)]
|
#[schema(value_type = Option<String>)]
|
||||||
pub revoked_at: Option<DateTime<Utc>>,
|
pub revoked_at: Option<DateTime<Utc>>,
|
||||||
@@ -71,6 +76,10 @@ pub async fn create_token(
|
|||||||
_ => return Err(ApiError::bad_request("scope must be 'admin' or 'read'")),
|
_ => return Err(ApiError::bad_request("scope must be 'admin' or 'read'")),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if scope == "read" && input.user_id.is_none() {
|
||||||
|
return Err(ApiError::bad_request("user_id is required for read-scoped tokens"));
|
||||||
|
}
|
||||||
|
|
||||||
let mut random = [0u8; 24];
|
let mut random = [0u8; 24];
|
||||||
OsRng.fill_bytes(&mut random);
|
OsRng.fill_bytes(&mut random);
|
||||||
let secret = URL_SAFE_NO_PAD.encode(random);
|
let secret = URL_SAFE_NO_PAD.encode(random);
|
||||||
@@ -85,13 +94,14 @@ pub async fn create_token(
|
|||||||
|
|
||||||
let id = Uuid::new_v4();
|
let id = Uuid::new_v4();
|
||||||
sqlx::query(
|
sqlx::query(
|
||||||
"INSERT INTO api_tokens (id, name, prefix, token_hash, scope) VALUES ($1, $2, $3, $4, $5)",
|
"INSERT INTO api_tokens (id, name, prefix, token_hash, scope, user_id) VALUES ($1, $2, $3, $4, $5, $6)",
|
||||||
)
|
)
|
||||||
.bind(id)
|
.bind(id)
|
||||||
.bind(input.name.trim())
|
.bind(input.name.trim())
|
||||||
.bind(&prefix)
|
.bind(&prefix)
|
||||||
.bind(token_hash)
|
.bind(token_hash)
|
||||||
.bind(scope)
|
.bind(scope)
|
||||||
|
.bind(input.user_id)
|
||||||
.execute(&state.pool)
|
.execute(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
@@ -118,7 +128,13 @@ pub async fn create_token(
|
|||||||
)]
|
)]
|
||||||
pub async fn list_tokens(State(state): State<AppState>) -> Result<Json<Vec<TokenResponse>>, ApiError> {
|
pub async fn list_tokens(State(state): State<AppState>) -> Result<Json<Vec<TokenResponse>>, ApiError> {
|
||||||
let rows = sqlx::query(
|
let rows = sqlx::query(
|
||||||
"SELECT id, name, scope, prefix, last_used_at, revoked_at, created_at FROM api_tokens ORDER BY created_at DESC",
|
r#"
|
||||||
|
SELECT t.id, t.name, t.scope, t.prefix, t.user_id, u.username,
|
||||||
|
t.last_used_at, t.revoked_at, t.created_at
|
||||||
|
FROM api_tokens t
|
||||||
|
LEFT JOIN users u ON u.id = t.user_id
|
||||||
|
ORDER BY t.created_at DESC
|
||||||
|
"#,
|
||||||
)
|
)
|
||||||
.fetch_all(&state.pool)
|
.fetch_all(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
@@ -130,6 +146,8 @@ pub async fn list_tokens(State(state): State<AppState>) -> Result<Json<Vec<Token
|
|||||||
name: row.get("name"),
|
name: row.get("name"),
|
||||||
scope: row.get("scope"),
|
scope: row.get("scope"),
|
||||||
prefix: row.get("prefix"),
|
prefix: row.get("prefix"),
|
||||||
|
user_id: row.get("user_id"),
|
||||||
|
username: row.get("username"),
|
||||||
last_used_at: row.get("last_used_at"),
|
last_used_at: row.get("last_used_at"),
|
||||||
revoked_at: row.get("revoked_at"),
|
revoked_at: row.get("revoked_at"),
|
||||||
created_at: row.get("created_at"),
|
created_at: row.get("created_at"),
|
||||||
@@ -170,3 +188,76 @@ pub async fn revoke_token(
|
|||||||
|
|
||||||
Ok(Json(serde_json::json!({"revoked": true, "id": id})))
|
Ok(Json(serde_json::json!({"revoked": true, "id": id})))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct UpdateTokenRequest {
|
||||||
|
#[schema(value_type = Option<String>)]
|
||||||
|
pub user_id: Option<Uuid>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update a token's assigned user
|
||||||
|
#[utoipa::path(
|
||||||
|
patch,
|
||||||
|
path = "/admin/tokens/{id}",
|
||||||
|
tag = "tokens",
|
||||||
|
params(
|
||||||
|
("id" = String, Path, description = "Token UUID"),
|
||||||
|
),
|
||||||
|
request_body = UpdateTokenRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Token updated"),
|
||||||
|
(status = 404, description = "Token not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn update_token(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
Json(input): Json<UpdateTokenRequest>,
|
||||||
|
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||||
|
let result = sqlx::query("UPDATE api_tokens SET user_id = $1 WHERE id = $2")
|
||||||
|
.bind(input.user_id)
|
||||||
|
.bind(id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if result.rows_affected() == 0 {
|
||||||
|
return Err(ApiError::not_found("token not found"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({"updated": true, "id": id})))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Permanently delete a revoked API token
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/admin/tokens/{id}/delete",
|
||||||
|
tag = "tokens",
|
||||||
|
params(
|
||||||
|
("id" = String, Path, description = "Token UUID"),
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "Token permanently deleted"),
|
||||||
|
(status = 404, description = "Token not found or not revoked"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn delete_token(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||||
|
let result = sqlx::query("DELETE FROM api_tokens WHERE id = $1 AND revoked_at IS NOT NULL")
|
||||||
|
.bind(id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if result.rows_affected() == 0 {
|
||||||
|
return Err(ApiError::not_found("token not found or not revoked"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({"deleted": true, "id": id})))
|
||||||
|
}
|
||||||
|
|||||||
195
apps/api/src/users.rs
Normal file
195
apps/api/src/users.rs
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
use axum::{extract::{Path, State}, Json};
|
||||||
|
use chrono::{DateTime, Utc};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use sqlx::Row;
|
||||||
|
use uuid::Uuid;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct UserResponse {
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub id: Uuid,
|
||||||
|
pub username: String,
|
||||||
|
pub token_count: i64,
|
||||||
|
pub books_read: i64,
|
||||||
|
pub books_reading: i64,
|
||||||
|
#[schema(value_type = String)]
|
||||||
|
pub created_at: DateTime<Utc>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Deserialize, ToSchema)]
|
||||||
|
pub struct CreateUserRequest {
|
||||||
|
pub username: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// List all reader users with their associated token count
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/admin/users",
|
||||||
|
tag = "users",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = Vec<UserResponse>),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn list_users(State(state): State<AppState>) -> Result<Json<Vec<UserResponse>>, ApiError> {
|
||||||
|
let rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT u.id, u.username, u.created_at,
|
||||||
|
COUNT(DISTINCT t.id) AS token_count,
|
||||||
|
COUNT(DISTINCT brp.book_id) FILTER (WHERE brp.status = 'read') AS books_read,
|
||||||
|
COUNT(DISTINCT brp.book_id) FILTER (WHERE brp.status = 'reading') AS books_reading
|
||||||
|
FROM users u
|
||||||
|
LEFT JOIN api_tokens t ON t.user_id = u.id AND t.revoked_at IS NULL
|
||||||
|
LEFT JOIN book_reading_progress brp ON brp.user_id = u.id
|
||||||
|
GROUP BY u.id, u.username, u.created_at
|
||||||
|
ORDER BY u.created_at DESC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let items = rows
|
||||||
|
.into_iter()
|
||||||
|
.map(|row| UserResponse {
|
||||||
|
id: row.get("id"),
|
||||||
|
username: row.get("username"),
|
||||||
|
token_count: row.get("token_count"),
|
||||||
|
books_read: row.get("books_read"),
|
||||||
|
books_reading: row.get("books_reading"),
|
||||||
|
created_at: row.get("created_at"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
Ok(Json(items))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Create a new reader user
|
||||||
|
#[utoipa::path(
|
||||||
|
post,
|
||||||
|
path = "/admin/users",
|
||||||
|
tag = "users",
|
||||||
|
request_body = CreateUserRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = UserResponse, description = "User created"),
|
||||||
|
(status = 400, description = "Invalid input"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn create_user(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Json(input): Json<CreateUserRequest>,
|
||||||
|
) -> Result<Json<UserResponse>, ApiError> {
|
||||||
|
if input.username.trim().is_empty() {
|
||||||
|
return Err(ApiError::bad_request("username is required"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let id = Uuid::new_v4();
|
||||||
|
let row = sqlx::query(
|
||||||
|
"INSERT INTO users (id, username) VALUES ($1, $2) RETURNING id, username, created_at",
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.bind(input.username.trim())
|
||||||
|
.fetch_one(&state.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
if let sqlx::Error::Database(ref db_err) = e {
|
||||||
|
if db_err.constraint() == Some("users_username_key") {
|
||||||
|
return ApiError::bad_request("username already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ApiError::from(e)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
Ok(Json(UserResponse {
|
||||||
|
id: row.get("id"),
|
||||||
|
username: row.get("username"),
|
||||||
|
token_count: 0,
|
||||||
|
books_read: 0,
|
||||||
|
books_reading: 0,
|
||||||
|
created_at: row.get("created_at"),
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update a reader user's username
|
||||||
|
#[utoipa::path(
|
||||||
|
patch,
|
||||||
|
path = "/admin/users/{id}",
|
||||||
|
tag = "users",
|
||||||
|
request_body = CreateUserRequest,
|
||||||
|
responses(
|
||||||
|
(status = 200, body = UserResponse, description = "User updated"),
|
||||||
|
(status = 400, description = "Invalid input"),
|
||||||
|
(status = 404, description = "User not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn update_user(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
Json(input): Json<CreateUserRequest>,
|
||||||
|
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||||
|
if input.username.trim().is_empty() {
|
||||||
|
return Err(ApiError::bad_request("username is required"));
|
||||||
|
}
|
||||||
|
|
||||||
|
let result = sqlx::query("UPDATE users SET username = $1 WHERE id = $2")
|
||||||
|
.bind(input.username.trim())
|
||||||
|
.bind(id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await
|
||||||
|
.map_err(|e| {
|
||||||
|
if let sqlx::Error::Database(ref db_err) = e {
|
||||||
|
if db_err.constraint() == Some("users_username_key") {
|
||||||
|
return ApiError::bad_request("username already exists");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ApiError::from(e)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
if result.rows_affected() == 0 {
|
||||||
|
return Err(ApiError::not_found("user not found"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({"updated": true, "id": id})))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete a reader user (cascades on tokens and reading progress)
|
||||||
|
#[utoipa::path(
|
||||||
|
delete,
|
||||||
|
path = "/admin/users/{id}",
|
||||||
|
tag = "users",
|
||||||
|
params(
|
||||||
|
("id" = String, Path, description = "User UUID"),
|
||||||
|
),
|
||||||
|
responses(
|
||||||
|
(status = 200, description = "User deleted"),
|
||||||
|
(status = 404, description = "User not found"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
(status = 403, description = "Forbidden - Admin scope required"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn delete_user(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
Path(id): Path<Uuid>,
|
||||||
|
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||||
|
let result = sqlx::query("DELETE FROM users WHERE id = $1")
|
||||||
|
.bind(id)
|
||||||
|
.execute(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
if result.rows_affected() == 0 {
|
||||||
|
return Err(ApiError::not_found("user not found"));
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Json(serde_json::json!({"deleted": true, "id": id})))
|
||||||
|
}
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
API_BASE_URL=http://localhost:8080
|
API_BASE_URL=http://localhost:7080
|
||||||
API_BOOTSTRAP_TOKEN=stripstream-dev-bootstrap-token
|
API_BOOTSTRAP_TOKEN=stripstream-dev-bootstrap-token
|
||||||
NEXT_PUBLIC_API_BASE_URL=http://localhost:8080
|
NEXT_PUBLIC_API_BASE_URL=http://localhost:7080
|
||||||
NEXT_PUBLIC_API_BOOTSTRAP_TOKEN=stripstream-dev-bootstrap-token
|
NEXT_PUBLIC_API_BOOTSTRAP_TOKEN=stripstream-dev-bootstrap-token
|
||||||
|
|||||||
66
apps/backoffice/AGENTS.md
Normal file
66
apps/backoffice/AGENTS.md
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
# apps/backoffice — Interface d'administration (Next.js)
|
||||||
|
|
||||||
|
App Next.js 16 avec React 19, Tailwind CSS v4, TypeScript. Port de dev : **7082** (`npm run dev`).
|
||||||
|
|
||||||
|
## Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
app/
|
||||||
|
├── layout.tsx # Layout global (nav sticky glassmorphism, ThemeProvider)
|
||||||
|
├── page.tsx # Dashboard
|
||||||
|
├── books/ # Liste et détail des livres
|
||||||
|
├── libraries/ # Gestion bibliothèques
|
||||||
|
├── jobs/ # Monitoring jobs
|
||||||
|
├── tokens/ # Tokens API
|
||||||
|
├── settings/ # Paramètres
|
||||||
|
├── components/ # Composants métier
|
||||||
|
│ ├── ui/ # Composants génériques (Button, Card, Badge, Icon, Input, ProgressBar, StatBox...)
|
||||||
|
│ ├── BookCard.tsx
|
||||||
|
│ ├── JobProgress.tsx
|
||||||
|
│ ├── JobsList.tsx
|
||||||
|
│ ├── LibraryForm.tsx
|
||||||
|
│ ├── FolderBrowser.tsx / FolderPicker.tsx
|
||||||
|
│ └── ...
|
||||||
|
└── globals.css # Variables CSS, Tailwind base
|
||||||
|
lib/
|
||||||
|
└── api.ts # Client API : types DTO + fonctions fetch vers l'API Rust
|
||||||
|
```
|
||||||
|
|
||||||
|
## Client API (lib/api.ts)
|
||||||
|
|
||||||
|
Tous les appels vers l'API Rust passent par `lib/api.ts`. Les types DTO sont définis là :
|
||||||
|
- `LibraryDto`, `IndexJobDto`, `BookDto`, `TokenDto`, `FolderItem`
|
||||||
|
|
||||||
|
Ajouter les nouveaux endpoints et types dans ce fichier.
|
||||||
|
|
||||||
|
## Composants UI
|
||||||
|
|
||||||
|
Les composants génériques sont dans `app/components/ui/`. Utiliser ces composants plutôt que des éléments HTML bruts :
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
import { Button, Card, Badge, Icon, Input, ProgressBar, StatBox } from "@/app/components/ui";
|
||||||
|
```
|
||||||
|
|
||||||
|
## Conventions
|
||||||
|
|
||||||
|
- **App Router** : toutes les pages sont des Server Components par défaut. Utiliser `"use client"` seulement pour l'interactivité.
|
||||||
|
- **Tailwind v4** : config dans `postcss.config.js` + `tailwind.config.js`. Variables CSS dans `globals.css`.
|
||||||
|
- **Thème** : `ThemeProvider` + `ThemeToggle` pour dark/light mode via `next-themes`.
|
||||||
|
- **Icônes** : composant `<Icon name="..." size="sm|md|lg" />` dans `ui/Icon.tsx` — pas de librairie externe.
|
||||||
|
- **Navigation** : routes typées dans `layout.tsx` (`"/" | "/books" | "/libraries" | "/jobs" | "/tokens" | "/settings"`).
|
||||||
|
|
||||||
|
## Commandes
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
npm run dev # http://localhost:7082
|
||||||
|
npm run build
|
||||||
|
npm run start # Production sur http://localhost:7082
|
||||||
|
```
|
||||||
|
|
||||||
|
## Gotchas
|
||||||
|
|
||||||
|
- **Port 7082** : pas le port Next.js par défaut (3000). Défini dans `package.json` scripts (`-p 7082`).
|
||||||
|
- **API_BASE_URL** : en prod, configuré via env. En dev local, l'API doit tourner sur `http://localhost:7080`.
|
||||||
|
- **React 19 + Next.js 16** : utiliser les nouvelles APIs (actions serveur, `use()` hook) si disponibles.
|
||||||
|
- **Pas de gestion d'état global** : fetch direct depuis les Server Components ou `useState`/`useEffect` dans les Client Components.
|
||||||
@@ -12,11 +12,11 @@ RUN npm run build
|
|||||||
FROM node:22-alpine AS runner
|
FROM node:22-alpine AS runner
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
ENV PORT=8082
|
ENV PORT=7082
|
||||||
ENV HOST=0.0.0.0
|
ENV HOST=0.0.0.0
|
||||||
RUN apk add --no-cache wget
|
RUN apk add --no-cache wget
|
||||||
COPY --from=builder /app/.next/standalone ./
|
COPY --from=builder /app/.next/standalone ./
|
||||||
COPY --from=builder /app/.next/static ./.next/static
|
COPY --from=builder /app/.next/static ./.next/static
|
||||||
COPY --from=builder /app/public ./public
|
COPY --from=builder /app/public ./public
|
||||||
EXPOSE 8082
|
EXPOSE 7082
|
||||||
CMD ["node", "server.js"]
|
CMD ["node", "server.js"]
|
||||||
|
|||||||
135
apps/backoffice/app/(app)/authors/[name]/page.tsx
Normal file
135
apps/backoffice/app/(app)/authors/[name]/page.tsx
Normal file
@@ -0,0 +1,135 @@
|
|||||||
|
import { fetchBooks, fetchAllSeries, BooksPageDto, SeriesPageDto, getBookCoverUrl } from "@/lib/api";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import { BooksGrid } from "@/app/components/BookCard";
|
||||||
|
import { OffsetPagination } from "@/app/components/ui";
|
||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function AuthorDetailPage({
|
||||||
|
params,
|
||||||
|
searchParams,
|
||||||
|
}: {
|
||||||
|
params: Promise<{ name: string }>;
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const { name: encodedName } = await params;
|
||||||
|
const authorName = decodeURIComponent(encodedName);
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||||
|
|
||||||
|
// Fetch books by this author (server-side filtering via API) and series by this author
|
||||||
|
const [booksPage, seriesPage] = await Promise.all([
|
||||||
|
fetchBooks(undefined, undefined, page, limit, undefined, undefined, authorName).catch(
|
||||||
|
() => ({ items: [], total: 0, page: 1, limit }) as BooksPageDto
|
||||||
|
),
|
||||||
|
fetchAllSeries(undefined, undefined, undefined, 1, 200, undefined, undefined, undefined, undefined, authorName).catch(
|
||||||
|
() => ({ items: [], total: 0, page: 1, limit: 200 }) as SeriesPageDto
|
||||||
|
),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const totalPages = Math.ceil(booksPage.total / limit);
|
||||||
|
|
||||||
|
const authorSeries = seriesPage.items;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{/* Breadcrumb */}
|
||||||
|
<nav className="flex items-center gap-2 text-sm text-muted-foreground mb-6">
|
||||||
|
<Link href="/authors" className="hover:text-foreground transition-colors">
|
||||||
|
{t("authors.title")}
|
||||||
|
</Link>
|
||||||
|
<span>/</span>
|
||||||
|
<span className="text-foreground font-medium">{authorName}</span>
|
||||||
|
</nav>
|
||||||
|
|
||||||
|
{/* Author Header */}
|
||||||
|
<div className="flex items-center gap-4 mb-8">
|
||||||
|
<div className="w-16 h-16 rounded-full bg-accent/50 flex items-center justify-center flex-shrink-0">
|
||||||
|
<span className="text-2xl font-bold text-accent-foreground">
|
||||||
|
{authorName.charAt(0).toUpperCase()}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<h1 className="text-3xl font-bold text-foreground">{authorName}</h1>
|
||||||
|
<div className="flex items-center gap-4 mt-1">
|
||||||
|
<span className="text-sm text-muted-foreground">
|
||||||
|
{t("authors.bookCount", { count: String(booksPage.total), plural: booksPage.total !== 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
{authorSeries.length > 0 && (
|
||||||
|
<span className="text-sm text-muted-foreground">
|
||||||
|
{t("authors.seriesCount", { count: String(authorSeries.length), plural: authorSeries.length !== 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Series Section */}
|
||||||
|
{authorSeries.length > 0 && (
|
||||||
|
<section className="mb-8">
|
||||||
|
<h2 className="text-xl font-semibold text-foreground mb-4">
|
||||||
|
{t("authors.seriesBy", { name: authorName })}
|
||||||
|
</h2>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 xl:grid-cols-6 gap-4">
|
||||||
|
{authorSeries.map((s) => (
|
||||||
|
<Link
|
||||||
|
key={`${s.library_id}-${s.name}`}
|
||||||
|
href={`/libraries/${s.library_id}/series/${encodeURIComponent(s.name)}`}
|
||||||
|
className="group"
|
||||||
|
>
|
||||||
|
<div className="bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md hover:-translate-y-1 transition-all duration-200">
|
||||||
|
<div className="aspect-[2/3] relative bg-muted/50">
|
||||||
|
<Image
|
||||||
|
src={getBookCoverUrl(s.first_book_id)}
|
||||||
|
alt={s.name}
|
||||||
|
fill
|
||||||
|
className="object-cover"
|
||||||
|
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="p-3">
|
||||||
|
<h3 className="font-medium text-foreground truncate text-sm" title={s.name}>
|
||||||
|
{s.name}
|
||||||
|
</h3>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1">
|
||||||
|
{t("authors.bookCount", { count: String(s.book_count), plural: s.book_count !== 1 ? "s" : "" })}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</section>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Books Section */}
|
||||||
|
{booksPage.items.length > 0 && (
|
||||||
|
<section>
|
||||||
|
<h2 className="text-xl font-semibold text-foreground mb-4">
|
||||||
|
{t("authors.booksBy", { name: authorName })}
|
||||||
|
</h2>
|
||||||
|
<BooksGrid books={booksPage.items} />
|
||||||
|
<OffsetPagination
|
||||||
|
currentPage={page}
|
||||||
|
totalPages={totalPages}
|
||||||
|
pageSize={limit}
|
||||||
|
totalItems={booksPage.total}
|
||||||
|
/>
|
||||||
|
</section>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Empty State */}
|
||||||
|
{booksPage.items.length === 0 && authorSeries.length === 0 && (
|
||||||
|
<div className="flex flex-col items-center justify-center py-16 text-center">
|
||||||
|
<p className="text-muted-foreground text-lg">
|
||||||
|
{t("authors.noResults")}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
122
apps/backoffice/app/(app)/authors/page.tsx
Normal file
122
apps/backoffice/app/(app)/authors/page.tsx
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
import { fetchAuthors, AuthorsPageDto } from "@/lib/api";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import { LiveSearchForm } from "@/app/components/LiveSearchForm";
|
||||||
|
import { Card, CardContent, OffsetPagination } from "@/app/components/ui";
|
||||||
|
import Link from "next/link";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function AuthorsPage({
|
||||||
|
searchParams,
|
||||||
|
}: {
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const searchQuery = typeof searchParamsAwaited.q === "string" ? searchParamsAwaited.q : "";
|
||||||
|
const sort = typeof searchParamsAwaited.sort === "string" ? searchParamsAwaited.sort : undefined;
|
||||||
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||||
|
|
||||||
|
const authorsPage = await fetchAuthors(
|
||||||
|
searchQuery || undefined,
|
||||||
|
page,
|
||||||
|
limit,
|
||||||
|
sort,
|
||||||
|
).catch(() => ({ items: [], total: 0, page: 1, limit }) as AuthorsPageDto);
|
||||||
|
|
||||||
|
const totalPages = Math.ceil(authorsPage.total / limit);
|
||||||
|
const hasFilters = searchQuery || sort;
|
||||||
|
|
||||||
|
const sortOptions = [
|
||||||
|
{ value: "", label: t("authors.sortName") },
|
||||||
|
{ value: "books", label: t("authors.sortBooks") },
|
||||||
|
];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="mb-6">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||||
|
<svg className="w-8 h-8 text-violet-500" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z" />
|
||||||
|
</svg>
|
||||||
|
{t("authors.title")}
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<LiveSearchForm
|
||||||
|
basePath="/authors"
|
||||||
|
fields={[
|
||||||
|
{ name: "q", type: "text", label: t("common.search"), placeholder: t("authors.searchPlaceholder") },
|
||||||
|
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Results count */}
|
||||||
|
<p className="text-sm text-muted-foreground mb-4">
|
||||||
|
{authorsPage.total} {t("authors.title").toLowerCase()}
|
||||||
|
{searchQuery && <> {t("authors.matchingQuery")} "{searchQuery}"</>}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
{/* Authors List */}
|
||||||
|
{authorsPage.items.length > 0 ? (
|
||||||
|
<>
|
||||||
|
<div className="grid grid-cols-1 sm:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4 gap-4">
|
||||||
|
{authorsPage.items.map((author) => (
|
||||||
|
<Link
|
||||||
|
key={author.name}
|
||||||
|
href={`/authors/${encodeURIComponent(author.name)}`}
|
||||||
|
className="group"
|
||||||
|
>
|
||||||
|
<div className="bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md hover:-translate-y-1 transition-all duration-200 p-4">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<div className="w-10 h-10 rounded-full bg-accent/50 flex items-center justify-center flex-shrink-0">
|
||||||
|
<span className="text-lg font-semibold text-violet-500">
|
||||||
|
{author.name.charAt(0).toUpperCase()}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="min-w-0">
|
||||||
|
<h3 className="font-medium text-foreground truncate text-sm group-hover:text-violet-500 transition-colors" title={author.name}>
|
||||||
|
{author.name}
|
||||||
|
</h3>
|
||||||
|
<div className="flex items-center gap-3 mt-0.5">
|
||||||
|
<span className="text-xs text-muted-foreground">
|
||||||
|
{t("authors.bookCount", { count: String(author.book_count), plural: author.book_count !== 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
<span className="text-xs text-muted-foreground">
|
||||||
|
{t("authors.seriesCount", { count: String(author.series_count), plural: author.series_count !== 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<OffsetPagination
|
||||||
|
currentPage={page}
|
||||||
|
totalPages={totalPages}
|
||||||
|
pageSize={limit}
|
||||||
|
totalItems={authorsPage.total}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<div className="flex flex-col items-center justify-center py-16 text-center">
|
||||||
|
<div className="w-16 h-16 mb-4 text-muted-foreground/30">
|
||||||
|
<svg fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={1.5} d="M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z" />
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<p className="text-muted-foreground text-lg">
|
||||||
|
{hasFilters ? t("authors.noResults") : t("authors.noAuthors")}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
242
apps/backoffice/app/(app)/books/[id]/page.tsx
Normal file
242
apps/backoffice/app/(app)/books/[id]/page.tsx
Normal file
@@ -0,0 +1,242 @@
|
|||||||
|
import { fetchLibraries, getBookCoverUrl, BookDto, apiFetch, ReadingStatus } from "@/lib/api";
|
||||||
|
import { BookPreview } from "@/app/components/BookPreview";
|
||||||
|
import { ConvertButton } from "@/app/components/ConvertButton";
|
||||||
|
import { MarkBookReadButton } from "@/app/components/MarkBookReadButton";
|
||||||
|
import nextDynamic from "next/dynamic";
|
||||||
|
import { SafeHtml } from "@/app/components/SafeHtml";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
|
||||||
|
const EditBookForm = nextDynamic(
|
||||||
|
() => import("@/app/components/EditBookForm").then(m => m.EditBookForm)
|
||||||
|
);
|
||||||
|
import { notFound } from "next/navigation";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
const readingStatusClassNames: Record<ReadingStatus, string> = {
|
||||||
|
unread: "bg-muted/60 text-muted-foreground border border-border",
|
||||||
|
reading: "bg-amber-500/15 text-amber-600 dark:text-amber-400 border border-amber-500/30",
|
||||||
|
read: "bg-green-500/15 text-green-600 dark:text-green-400 border border-green-500/30",
|
||||||
|
};
|
||||||
|
|
||||||
|
async function fetchBook(bookId: string): Promise<BookDto | null> {
|
||||||
|
try {
|
||||||
|
return await apiFetch<BookDto>(`/books/${bookId}`);
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function BookDetailPage({
|
||||||
|
params
|
||||||
|
}: {
|
||||||
|
params: Promise<{ id: string }>;
|
||||||
|
}) {
|
||||||
|
const { id } = await params;
|
||||||
|
const [book, libraries] = await Promise.all([
|
||||||
|
fetchBook(id),
|
||||||
|
fetchLibraries().catch(() => [] as { id: string; name: string }[])
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!book) {
|
||||||
|
notFound();
|
||||||
|
}
|
||||||
|
|
||||||
|
const { t, locale } = await getServerTranslations();
|
||||||
|
|
||||||
|
const library = libraries.find(l => l.id === book.library_id);
|
||||||
|
const formatBadge = (book.format ?? book.kind).toUpperCase();
|
||||||
|
const formatColor =
|
||||||
|
formatBadge === "CBZ" ? "bg-success/10 text-success border-success/30" :
|
||||||
|
formatBadge === "CBR" ? "bg-warning/10 text-warning border-warning/30" :
|
||||||
|
formatBadge === "PDF" ? "bg-destructive/10 text-destructive border-destructive/30" :
|
||||||
|
"bg-muted/50 text-muted-foreground border-border";
|
||||||
|
const statusLabel = t(`status.${book.reading_status}` as "status.unread" | "status.reading" | "status.read");
|
||||||
|
const statusClassName = readingStatusClassNames[book.reading_status];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Breadcrumb */}
|
||||||
|
<div className="flex items-center gap-2 text-sm">
|
||||||
|
<Link href="/libraries" className="text-muted-foreground hover:text-primary transition-colors">
|
||||||
|
{t("bookDetail.libraries")}
|
||||||
|
</Link>
|
||||||
|
<span className="text-muted-foreground">/</span>
|
||||||
|
{library && (
|
||||||
|
<>
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${book.library_id}/series`}
|
||||||
|
className="text-muted-foreground hover:text-primary transition-colors"
|
||||||
|
>
|
||||||
|
{library.name}
|
||||||
|
</Link>
|
||||||
|
<span className="text-muted-foreground">/</span>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
{book.series && (
|
||||||
|
<>
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${book.library_id}/series/${encodeURIComponent(book.series)}`}
|
||||||
|
className="text-muted-foreground hover:text-primary transition-colors"
|
||||||
|
>
|
||||||
|
{book.series}
|
||||||
|
</Link>
|
||||||
|
<span className="text-muted-foreground">/</span>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
<span className="text-foreground font-medium truncate">{book.title}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Hero */}
|
||||||
|
<div className="flex flex-col sm:flex-row gap-6">
|
||||||
|
{/* Cover */}
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<div className="w-48 aspect-[2/3] relative rounded-xl overflow-hidden shadow-card border border-border">
|
||||||
|
<Image
|
||||||
|
src={getBookCoverUrl(book.id)}
|
||||||
|
alt={t("bookDetail.coverOf", { title: book.title })}
|
||||||
|
fill
|
||||||
|
className="object-cover"
|
||||||
|
sizes="192px"
|
||||||
|
loading="lazy"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Info */}
|
||||||
|
<div className="flex-1 space-y-4">
|
||||||
|
<div className="flex items-start justify-between gap-4">
|
||||||
|
<div>
|
||||||
|
<h1 className="text-3xl font-bold text-foreground">{book.title}</h1>
|
||||||
|
{book.author && (
|
||||||
|
<p className="text-base text-muted-foreground mt-1">{book.author}</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
<EditBookForm book={book} />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Series + Volume link */}
|
||||||
|
{book.series && (
|
||||||
|
<div className="flex items-center gap-2 text-sm">
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${book.library_id}/series/${encodeURIComponent(book.series)}`}
|
||||||
|
className="text-primary hover:text-primary/80 transition-colors font-medium"
|
||||||
|
>
|
||||||
|
{book.series}
|
||||||
|
</Link>
|
||||||
|
{book.volume != null && (
|
||||||
|
<span className="px-2 py-0.5 bg-primary/10 text-primary rounded-md text-xs font-semibold">
|
||||||
|
Vol. {book.volume}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Reading status + actions */}
|
||||||
|
<div className="flex flex-wrap items-center gap-3">
|
||||||
|
<span className={`inline-flex items-center px-2.5 py-1 rounded-full text-xs font-semibold ${statusClassName}`}>
|
||||||
|
{statusLabel}
|
||||||
|
{book.reading_status === "reading" && book.reading_current_page != null && ` · p. ${book.reading_current_page}`}
|
||||||
|
</span>
|
||||||
|
{book.reading_last_read_at && (
|
||||||
|
<span className="text-xs text-muted-foreground">
|
||||||
|
{new Date(book.reading_last_read_at).toLocaleDateString(locale)}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
<MarkBookReadButton bookId={book.id} currentStatus={book.reading_status} />
|
||||||
|
{book.file_format === "cbr" && <ConvertButton bookId={book.id} />}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Metadata pills */}
|
||||||
|
<div className="flex flex-wrap items-center gap-2">
|
||||||
|
<span className={`inline-flex px-2.5 py-1 rounded-full text-xs font-semibold border ${formatColor}`}>
|
||||||
|
{formatBadge}
|
||||||
|
</span>
|
||||||
|
{book.page_count && (
|
||||||
|
<span className="inline-flex px-2.5 py-1 rounded-full text-xs font-medium bg-muted/50 text-muted-foreground border border-border">
|
||||||
|
{book.page_count} {t("dashboard.pages").toLowerCase()}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{book.language && (
|
||||||
|
<span className="inline-flex px-2.5 py-1 rounded-full text-xs font-medium bg-muted/50 text-muted-foreground border border-border">
|
||||||
|
{book.language.toUpperCase()}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{book.isbn && (
|
||||||
|
<span className="inline-flex px-2.5 py-1 rounded-full text-xs font-mono font-medium bg-muted/50 text-muted-foreground border border-border">
|
||||||
|
ISBN {book.isbn}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{book.publish_date && (
|
||||||
|
<span className="inline-flex px-2.5 py-1 rounded-full text-xs font-medium bg-muted/50 text-muted-foreground border border-border">
|
||||||
|
{book.publish_date}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Description */}
|
||||||
|
{book.summary && (
|
||||||
|
<SafeHtml html={book.summary} className="text-sm text-muted-foreground leading-relaxed" />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Technical info (collapsible) */}
|
||||||
|
<details className="group">
|
||||||
|
<summary className="cursor-pointer text-xs text-muted-foreground hover:text-foreground transition-colors select-none flex items-center gap-1.5">
|
||||||
|
<svg className="w-3.5 h-3.5 transition-transform group-open:rotate-90" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 5l7 7-7 7" />
|
||||||
|
</svg>
|
||||||
|
{t("bookDetail.technicalInfo")}
|
||||||
|
</summary>
|
||||||
|
<div className="mt-3 p-4 rounded-lg bg-muted/30 border border-border/50 space-y-2 text-xs">
|
||||||
|
{book.file_path && (
|
||||||
|
<div className="flex flex-col gap-0.5">
|
||||||
|
<span className="text-muted-foreground">{t("bookDetail.file")}</span>
|
||||||
|
<code className="font-mono text-foreground break-all">{book.file_path}</code>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{book.file_format && (
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<span className="text-muted-foreground">{t("bookDetail.fileFormat")}</span>
|
||||||
|
<span className="text-foreground">{book.file_format.toUpperCase()}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{book.file_parse_status && (
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<span className="text-muted-foreground">{t("bookDetail.parsing")}</span>
|
||||||
|
<span className={`inline-flex px-2 py-0.5 rounded-full text-xs font-medium ${
|
||||||
|
book.file_parse_status === "success" ? "bg-success/10 text-success" :
|
||||||
|
book.file_parse_status === "failed" ? "bg-destructive/10 text-destructive" :
|
||||||
|
"bg-muted/50 text-muted-foreground"
|
||||||
|
}`}>
|
||||||
|
{book.file_parse_status}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<span className="text-muted-foreground">Book ID</span>
|
||||||
|
<code className="font-mono text-foreground">{book.id}</code>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<span className="text-muted-foreground">Library ID</span>
|
||||||
|
<code className="font-mono text-foreground">{book.library_id}</code>
|
||||||
|
</div>
|
||||||
|
{book.updated_at && (
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<span className="text-muted-foreground">{t("bookDetail.updatedAt")}</span>
|
||||||
|
<span className="text-foreground">{new Date(book.updated_at).toLocaleString(locale)}</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</details>
|
||||||
|
|
||||||
|
{/* Book Preview */}
|
||||||
|
{book.page_count && book.page_count > 0 && (
|
||||||
|
<BookPreview bookId={book.id} pageCount={book.page_count} />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
211
apps/backoffice/app/(app)/books/page.tsx
Normal file
211
apps/backoffice/app/(app)/books/page.tsx
Normal file
@@ -0,0 +1,211 @@
|
|||||||
|
import { fetchBooks, searchBooks, fetchLibraries, BookDto, LibraryDto, SeriesHitDto, getBookCoverUrl } from "@/lib/api";
|
||||||
|
import { BooksGrid, EmptyState } from "@/app/components/BookCard";
|
||||||
|
import { LiveSearchForm } from "@/app/components/LiveSearchForm";
|
||||||
|
import { Card, CardContent, OffsetPagination } from "@/app/components/ui";
|
||||||
|
import Link from "next/link";
|
||||||
|
import Image from "next/image";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function BooksPage({
|
||||||
|
searchParams
|
||||||
|
}: {
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const libraryId = typeof searchParamsAwaited.library === "string" ? searchParamsAwaited.library : undefined;
|
||||||
|
const searchQuery = typeof searchParamsAwaited.q === "string" ? searchParamsAwaited.q : "";
|
||||||
|
const readingStatus = typeof searchParamsAwaited.status === "string" ? searchParamsAwaited.status : undefined;
|
||||||
|
const format = typeof searchParamsAwaited.format === "string" ? searchParamsAwaited.format : undefined;
|
||||||
|
const metadataProvider = typeof searchParamsAwaited.metadata === "string" ? searchParamsAwaited.metadata : undefined;
|
||||||
|
const sort = typeof searchParamsAwaited.sort === "string" ? searchParamsAwaited.sort : undefined;
|
||||||
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||||
|
|
||||||
|
const [libraries] = await Promise.all([
|
||||||
|
fetchLibraries().catch(() => [] as LibraryDto[])
|
||||||
|
]);
|
||||||
|
|
||||||
|
let books: BookDto[] = [];
|
||||||
|
let total = 0;
|
||||||
|
let searchResults: BookDto[] | null = null;
|
||||||
|
let seriesHits: SeriesHitDto[] = [];
|
||||||
|
let totalHits: number | null = null;
|
||||||
|
|
||||||
|
if (searchQuery) {
|
||||||
|
const searchResponse = await searchBooks(searchQuery, libraryId, limit).catch(() => null);
|
||||||
|
if (searchResponse) {
|
||||||
|
seriesHits = searchResponse.series_hits ?? [];
|
||||||
|
searchResults = searchResponse.hits.map(hit => ({
|
||||||
|
id: hit.id,
|
||||||
|
library_id: hit.library_id,
|
||||||
|
kind: hit.kind,
|
||||||
|
title: hit.title,
|
||||||
|
author: hit.authors?.[0] ?? null,
|
||||||
|
authors: hit.authors ?? [],
|
||||||
|
series: hit.series,
|
||||||
|
volume: hit.volume,
|
||||||
|
language: hit.language,
|
||||||
|
page_count: null,
|
||||||
|
format: null,
|
||||||
|
file_path: null,
|
||||||
|
file_format: null,
|
||||||
|
file_parse_status: null,
|
||||||
|
updated_at: "",
|
||||||
|
reading_status: "unread" as const,
|
||||||
|
reading_current_page: null,
|
||||||
|
reading_last_read_at: null,
|
||||||
|
summary: null,
|
||||||
|
isbn: null,
|
||||||
|
publish_date: null,
|
||||||
|
}));
|
||||||
|
totalHits = searchResponse.estimated_total_hits;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const booksPage = await fetchBooks(libraryId, undefined, page, limit, readingStatus, sort, undefined, format, metadataProvider).catch(() => ({
|
||||||
|
items: [] as BookDto[],
|
||||||
|
total: 0,
|
||||||
|
page: 1,
|
||||||
|
limit,
|
||||||
|
}));
|
||||||
|
books = booksPage.items;
|
||||||
|
total = booksPage.total;
|
||||||
|
}
|
||||||
|
|
||||||
|
const displayBooks = (searchResults || books).map(book => ({
|
||||||
|
...book,
|
||||||
|
coverUrl: getBookCoverUrl(book.id)
|
||||||
|
}));
|
||||||
|
|
||||||
|
const totalPages = Math.ceil(total / limit);
|
||||||
|
|
||||||
|
const libraryOptions = [
|
||||||
|
{ value: "", label: t("books.allLibraries") },
|
||||||
|
...libraries.map((lib) => ({ value: lib.id, label: lib.name })),
|
||||||
|
];
|
||||||
|
|
||||||
|
const statusOptions = [
|
||||||
|
{ value: "", label: t("common.all") },
|
||||||
|
{ value: "unread", label: t("status.unread") },
|
||||||
|
{ value: "reading", label: t("status.reading") },
|
||||||
|
{ value: "read", label: t("status.read") },
|
||||||
|
];
|
||||||
|
|
||||||
|
const formatOptions = [
|
||||||
|
{ value: "", label: t("books.allFormats") },
|
||||||
|
{ value: "cbz", label: "CBZ" },
|
||||||
|
{ value: "cbr", label: "CBR" },
|
||||||
|
{ value: "pdf", label: "PDF" },
|
||||||
|
{ value: "epub", label: "EPUB" },
|
||||||
|
];
|
||||||
|
|
||||||
|
const metadataOptions = [
|
||||||
|
{ value: "", label: t("series.metadataAll") },
|
||||||
|
{ value: "linked", label: t("series.metadataLinked") },
|
||||||
|
{ value: "unlinked", label: t("series.metadataUnlinked") },
|
||||||
|
];
|
||||||
|
|
||||||
|
const sortOptions = [
|
||||||
|
{ value: "", label: t("books.sortTitle") },
|
||||||
|
{ value: "latest", label: t("books.sortLatest") },
|
||||||
|
];
|
||||||
|
|
||||||
|
const hasFilters = searchQuery || libraryId || readingStatus || format || metadataProvider || sort;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="mb-6">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||||
|
<svg className="w-8 h-8 text-success" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253" />
|
||||||
|
</svg>
|
||||||
|
{t("books.title")}
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<LiveSearchForm
|
||||||
|
basePath="/books"
|
||||||
|
fields={[
|
||||||
|
{ name: "q", type: "text", label: t("common.search"), placeholder: t("books.searchPlaceholder") },
|
||||||
|
{ name: "library", type: "select", label: t("books.library"), options: libraryOptions },
|
||||||
|
{ name: "status", type: "select", label: t("books.status"), options: statusOptions },
|
||||||
|
{ name: "format", type: "select", label: t("books.format"), options: formatOptions },
|
||||||
|
{ name: "metadata", type: "select", label: t("series.metadata"), options: metadataOptions },
|
||||||
|
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Résultats */}
|
||||||
|
{searchQuery && totalHits !== null ? (
|
||||||
|
<p className="text-sm text-muted-foreground mb-4">
|
||||||
|
{t("books.resultCountFor", { count: String(totalHits), plural: totalHits !== 1 ? "s" : "", query: searchQuery })}
|
||||||
|
</p>
|
||||||
|
) : !searchQuery && (
|
||||||
|
<p className="text-sm text-muted-foreground mb-4">
|
||||||
|
{t("books.resultCount", { count: String(total), plural: total !== 1 ? "s" : "" })}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Séries matchantes */}
|
||||||
|
{seriesHits.length > 0 && (
|
||||||
|
<div className="mb-8">
|
||||||
|
<h2 className="text-lg font-semibold text-foreground mb-3">{t("books.seriesHeading")}</h2>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-6 gap-4">
|
||||||
|
{seriesHits.map((s) => (
|
||||||
|
<Link
|
||||||
|
key={`${s.library_id}-${s.name}`}
|
||||||
|
href={`/libraries/${s.library_id}/series/${encodeURIComponent(s.name)}`}
|
||||||
|
className="group"
|
||||||
|
>
|
||||||
|
<div className="bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md transition-shadow duration-200">
|
||||||
|
<div className="aspect-[2/3] relative bg-muted/50">
|
||||||
|
<Image
|
||||||
|
src={getBookCoverUrl(s.first_book_id)}
|
||||||
|
alt={t("books.coverOf", { name: s.name })}
|
||||||
|
fill
|
||||||
|
className="object-cover"
|
||||||
|
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="p-2">
|
||||||
|
<h3 className="font-medium text-foreground truncate text-sm" title={s.name}>
|
||||||
|
{s.name === "unclassified" ? t("books.unclassified") : s.name}
|
||||||
|
</h3>
|
||||||
|
<p className="text-xs text-muted-foreground mt-0.5">
|
||||||
|
{t("books.bookCount", { count: String(s.book_count), plural: s.book_count !== 1 ? "s" : "" })}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Grille de livres */}
|
||||||
|
{displayBooks.length > 0 ? (
|
||||||
|
<>
|
||||||
|
{searchQuery && <h2 className="text-lg font-semibold text-foreground mb-3">{t("books.title")}</h2>}
|
||||||
|
<BooksGrid books={displayBooks} />
|
||||||
|
|
||||||
|
{!searchQuery && (
|
||||||
|
<OffsetPagination
|
||||||
|
currentPage={page}
|
||||||
|
totalPages={totalPages}
|
||||||
|
pageSize={limit}
|
||||||
|
totalItems={total}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<EmptyState message={searchQuery ? t("books.noResults", { query: searchQuery }) : t("books.noBooks")} />
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
809
apps/backoffice/app/(app)/jobs/[id]/page.tsx
Normal file
809
apps/backoffice/app/(app)/jobs/[id]/page.tsx
Normal file
@@ -0,0 +1,809 @@
|
|||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
import { notFound } from "next/navigation";
|
||||||
|
import Link from "next/link";
|
||||||
|
import { apiFetch, getMetadataBatchReport, getMetadataBatchResults, getMetadataRefreshReport, MetadataBatchReportDto, MetadataBatchResultDto, MetadataRefreshReportDto } from "@/lib/api";
|
||||||
|
import {
|
||||||
|
Card, CardHeader, CardTitle, CardDescription, CardContent,
|
||||||
|
StatusBadge, JobTypeBadge, StatBox, ProgressBar
|
||||||
|
} from "@/app/components/ui";
|
||||||
|
import { JobDetailLive } from "@/app/components/JobDetailLive";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
|
interface JobDetailPageProps {
|
||||||
|
params: Promise<{ id: string }>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface JobDetails {
|
||||||
|
id: string;
|
||||||
|
library_id: string | null;
|
||||||
|
book_id: string | null;
|
||||||
|
type: string;
|
||||||
|
status: string;
|
||||||
|
created_at: string;
|
||||||
|
started_at: string | null;
|
||||||
|
finished_at: string | null;
|
||||||
|
phase2_started_at: string | null;
|
||||||
|
generating_thumbnails_started_at: string | null;
|
||||||
|
current_file: string | null;
|
||||||
|
progress_percent: number | null;
|
||||||
|
processed_files: number | null;
|
||||||
|
total_files: number | null;
|
||||||
|
stats_json: {
|
||||||
|
scanned_files: number;
|
||||||
|
indexed_files: number;
|
||||||
|
removed_files: number;
|
||||||
|
errors: number;
|
||||||
|
warnings: number;
|
||||||
|
} | null;
|
||||||
|
error_opt: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface JobError {
|
||||||
|
id: string;
|
||||||
|
file_path: string;
|
||||||
|
error_message: string;
|
||||||
|
created_at: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function getJobDetails(jobId: string): Promise<JobDetails | null> {
|
||||||
|
try {
|
||||||
|
return await apiFetch<JobDetails>(`/index/jobs/${jobId}`);
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getJobErrors(jobId: string): Promise<JobError[]> {
|
||||||
|
try {
|
||||||
|
return await apiFetch<JobError[]>(`/index/jobs/${jobId}/errors`);
|
||||||
|
} catch {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatDuration(start: string, end: string | null): string {
|
||||||
|
const startDate = new Date(start);
|
||||||
|
const endDate = end ? new Date(end) : new Date();
|
||||||
|
const diff = endDate.getTime() - startDate.getTime();
|
||||||
|
|
||||||
|
if (diff < 60000) return `${Math.floor(diff / 1000)}s`;
|
||||||
|
if (diff < 3600000) return `${Math.floor(diff / 60000)}m ${Math.floor((diff % 60000) / 1000)}s`;
|
||||||
|
return `${Math.floor(diff / 3600000)}h ${Math.floor((diff % 3600000) / 60000)}m`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatSpeed(count: number, durationMs: number): string {
|
||||||
|
if (durationMs === 0 || count === 0) return "-";
|
||||||
|
return `${(count / (durationMs / 1000)).toFixed(1)}/s`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function JobDetailPage({ params }: JobDetailPageProps) {
|
||||||
|
const { id } = await params;
|
||||||
|
const [job, errors] = await Promise.all([
|
||||||
|
getJobDetails(id),
|
||||||
|
getJobErrors(id),
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!job) {
|
||||||
|
notFound();
|
||||||
|
}
|
||||||
|
|
||||||
|
const { t, locale } = await getServerTranslations();
|
||||||
|
|
||||||
|
const JOB_TYPE_INFO: Record<string, { label: string; description: string; isThumbnailOnly: boolean }> = {
|
||||||
|
rebuild: {
|
||||||
|
label: t("jobType.rebuildLabel"),
|
||||||
|
description: t("jobType.rebuildDesc"),
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
},
|
||||||
|
full_rebuild: {
|
||||||
|
label: t("jobType.full_rebuildLabel"),
|
||||||
|
description: t("jobType.full_rebuildDesc"),
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
},
|
||||||
|
rescan: {
|
||||||
|
label: t("jobType.rescanLabel"),
|
||||||
|
description: t("jobType.rescanDesc"),
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
},
|
||||||
|
thumbnail_rebuild: {
|
||||||
|
label: t("jobType.thumbnail_rebuildLabel"),
|
||||||
|
description: t("jobType.thumbnail_rebuildDesc"),
|
||||||
|
isThumbnailOnly: true,
|
||||||
|
},
|
||||||
|
thumbnail_regenerate: {
|
||||||
|
label: t("jobType.thumbnail_regenerateLabel"),
|
||||||
|
description: t("jobType.thumbnail_regenerateDesc"),
|
||||||
|
isThumbnailOnly: true,
|
||||||
|
},
|
||||||
|
cbr_to_cbz: {
|
||||||
|
label: t("jobType.cbr_to_cbzLabel"),
|
||||||
|
description: t("jobType.cbr_to_cbzDesc"),
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
},
|
||||||
|
metadata_batch: {
|
||||||
|
label: t("jobType.metadata_batchLabel"),
|
||||||
|
description: t("jobType.metadata_batchDesc"),
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
},
|
||||||
|
metadata_refresh: {
|
||||||
|
label: t("jobType.metadata_refreshLabel"),
|
||||||
|
description: t("jobType.metadata_refreshDesc"),
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const isMetadataBatch = job.type === "metadata_batch";
|
||||||
|
const isMetadataRefresh = job.type === "metadata_refresh";
|
||||||
|
|
||||||
|
// Fetch batch report & results for metadata_batch jobs
|
||||||
|
let batchReport: MetadataBatchReportDto | null = null;
|
||||||
|
let batchResults: MetadataBatchResultDto[] = [];
|
||||||
|
if (isMetadataBatch) {
|
||||||
|
[batchReport, batchResults] = await Promise.all([
|
||||||
|
getMetadataBatchReport(id).catch(() => null),
|
||||||
|
getMetadataBatchResults(id).catch(() => []),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch refresh report for metadata_refresh jobs
|
||||||
|
let refreshReport: MetadataRefreshReportDto | null = null;
|
||||||
|
if (isMetadataRefresh) {
|
||||||
|
refreshReport = await getMetadataRefreshReport(id).catch(() => null);
|
||||||
|
}
|
||||||
|
|
||||||
|
const typeInfo = JOB_TYPE_INFO[job.type] ?? {
|
||||||
|
label: job.type,
|
||||||
|
description: null,
|
||||||
|
isThumbnailOnly: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
const durationMs = job.started_at
|
||||||
|
? new Date(job.finished_at || new Date()).getTime() - new Date(job.started_at).getTime()
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
const isCompleted = job.status === "success";
|
||||||
|
const isFailed = job.status === "failed";
|
||||||
|
const isCancelled = job.status === "cancelled";
|
||||||
|
const isTerminal = isCompleted || isFailed || isCancelled;
|
||||||
|
const isExtractingPages = job.status === "extracting_pages";
|
||||||
|
const isThumbnailPhase = job.status === "generating_thumbnails";
|
||||||
|
const isPhase2 = isExtractingPages || isThumbnailPhase;
|
||||||
|
const { isThumbnailOnly } = typeInfo;
|
||||||
|
|
||||||
|
// Which label to use for the progress card
|
||||||
|
const progressTitle = isMetadataBatch
|
||||||
|
? t("jobDetail.metadataSearch")
|
||||||
|
: isMetadataRefresh
|
||||||
|
? t("jobDetail.metadataRefresh")
|
||||||
|
: isThumbnailOnly
|
||||||
|
? t("jobType.thumbnail_rebuild")
|
||||||
|
: isExtractingPages
|
||||||
|
? t("jobDetail.phase2a")
|
||||||
|
: isThumbnailPhase
|
||||||
|
? t("jobDetail.phase2b")
|
||||||
|
: t("jobDetail.phase1");
|
||||||
|
|
||||||
|
const progressDescription = isMetadataBatch
|
||||||
|
? t("jobDetail.metadataSearchDesc")
|
||||||
|
: isMetadataRefresh
|
||||||
|
? t("jobDetail.metadataRefreshDesc")
|
||||||
|
: isThumbnailOnly
|
||||||
|
? undefined
|
||||||
|
: isExtractingPages
|
||||||
|
? t("jobDetail.phase2aDesc")
|
||||||
|
: isThumbnailPhase
|
||||||
|
? t("jobDetail.phase2bDesc")
|
||||||
|
: t("jobDetail.phase1Desc");
|
||||||
|
|
||||||
|
// Speed metric: thumbnail count for thumbnail jobs, scanned files for index jobs
|
||||||
|
const speedCount = isThumbnailOnly
|
||||||
|
? (job.processed_files ?? 0)
|
||||||
|
: (job.stats_json?.scanned_files ?? 0);
|
||||||
|
|
||||||
|
const showProgressCard =
|
||||||
|
(isCompleted || isFailed || job.status === "running" || isPhase2) &&
|
||||||
|
(job.total_files != null || !!job.current_file);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<JobDetailLive jobId={id} isTerminal={isTerminal} />
|
||||||
|
<div className="mb-6">
|
||||||
|
<Link
|
||||||
|
href="/jobs"
|
||||||
|
className="inline-flex items-center text-sm text-muted-foreground hover:text-primary transition-colors duration-200"
|
||||||
|
>
|
||||||
|
<svg className="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M15 19l-7-7 7-7" />
|
||||||
|
</svg>
|
||||||
|
{t("jobDetail.backToJobs")}
|
||||||
|
</Link>
|
||||||
|
<h1 className="text-3xl font-bold text-foreground mt-2">{t("jobDetail.title")}</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Summary banner — completed */}
|
||||||
|
{isCompleted && job.started_at && (
|
||||||
|
<div className="mb-6 p-4 rounded-xl bg-success/10 border border-success/30 flex items-start gap-3">
|
||||||
|
<svg className="w-5 h-5 text-success mt-0.5 shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 12l2 2 4-4m6 2a9 9 0 11-18 0 9 9 0 0118 0z" />
|
||||||
|
</svg>
|
||||||
|
<div className="text-sm text-success">
|
||||||
|
<span className="font-semibold">{t("jobDetail.completedIn", { duration: formatDuration(job.started_at, job.finished_at) })}</span>
|
||||||
|
{isMetadataBatch && batchReport && (
|
||||||
|
<span className="ml-2 text-success/80">
|
||||||
|
— {batchReport.auto_matched} {t("jobDetail.autoMatched").toLowerCase()}, {batchReport.already_linked} {t("jobDetail.alreadyLinked").toLowerCase()}, {batchReport.no_results} {t("jobDetail.noResults").toLowerCase()}, {batchReport.errors} {t("jobDetail.errors").toLowerCase()}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{isMetadataRefresh && refreshReport && (
|
||||||
|
<span className="ml-2 text-success/80">
|
||||||
|
— {refreshReport.refreshed} {t("jobDetail.refreshed").toLowerCase()}, {refreshReport.unchanged} {t("jobDetail.unchanged").toLowerCase()}, {refreshReport.errors} {t("jobDetail.errors").toLowerCase()}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{!isMetadataBatch && !isMetadataRefresh && job.stats_json && (
|
||||||
|
<span className="ml-2 text-success/80">
|
||||||
|
— {job.stats_json.scanned_files} {t("jobDetail.scanned").toLowerCase()}, {job.stats_json.indexed_files} {t("jobDetail.indexed").toLowerCase()}
|
||||||
|
{job.stats_json.removed_files > 0 && `, ${job.stats_json.removed_files} ${t("jobDetail.removed").toLowerCase()}`}
|
||||||
|
{(job.stats_json.warnings ?? 0) > 0 && `, ${job.stats_json.warnings} ${t("jobDetail.warnings").toLowerCase()}`}
|
||||||
|
{job.stats_json.errors > 0 && `, ${job.stats_json.errors} ${t("jobDetail.errors").toLowerCase()}`}
|
||||||
|
{job.total_files != null && job.total_files > 0 && `, ${job.total_files} ${t("jobType.thumbnail_rebuild").toLowerCase()}`}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{!isMetadataBatch && !isMetadataRefresh && !job.stats_json && isThumbnailOnly && job.total_files != null && (
|
||||||
|
<span className="ml-2 text-success/80">
|
||||||
|
— {job.processed_files ?? job.total_files} {t("jobDetail.generated").toLowerCase()}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Summary banner — failed */}
|
||||||
|
{isFailed && (
|
||||||
|
<div className="mb-6 p-4 rounded-xl bg-destructive/10 border border-destructive/30 flex items-start gap-3">
|
||||||
|
<svg className="w-5 h-5 text-destructive mt-0.5 shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 8v4m0 4h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" />
|
||||||
|
</svg>
|
||||||
|
<div className="text-sm text-destructive">
|
||||||
|
<span className="font-semibold">{t("jobDetail.jobFailed")}</span>
|
||||||
|
{job.started_at && (
|
||||||
|
<span className="ml-2 text-destructive/80">{t("jobDetail.failedAfter", { duration: formatDuration(job.started_at, job.finished_at) })}</span>
|
||||||
|
)}
|
||||||
|
{job.error_opt && (
|
||||||
|
<p className="mt-1 text-destructive/70 font-mono text-xs break-all">{job.error_opt}</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Summary banner — cancelled */}
|
||||||
|
{isCancelled && (
|
||||||
|
<div className="mb-6 p-4 rounded-xl bg-muted border border-border flex items-start gap-3">
|
||||||
|
<svg className="w-5 h-5 text-muted-foreground mt-0.5 shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M18.364 18.364A9 9 0 005.636 5.636m12.728 12.728A9 9 0 015.636 5.636m12.728 12.728L5.636 5.636" />
|
||||||
|
</svg>
|
||||||
|
<span className="text-sm text-muted-foreground">
|
||||||
|
<span className="font-semibold">{t("jobDetail.cancelled")}</span>
|
||||||
|
{job.started_at && (
|
||||||
|
<span className="ml-2">{t("jobDetail.failedAfter", { duration: formatDuration(job.started_at, job.finished_at) })}</span>
|
||||||
|
)}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
|
{/* Overview Card */}
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.overview")}</CardTitle>
|
||||||
|
{typeInfo.description && (
|
||||||
|
<CardDescription>{typeInfo.description}</CardDescription>
|
||||||
|
)}
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-3">
|
||||||
|
<div className="flex items-center justify-between py-2 border-b border-border/60">
|
||||||
|
<span className="text-sm text-muted-foreground">ID</span>
|
||||||
|
<code className="px-2 py-1 bg-muted rounded font-mono text-sm text-foreground">{job.id}</code>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center justify-between py-2 border-b border-border/60">
|
||||||
|
<span className="text-sm text-muted-foreground">{t("jobsList.type")}</span>
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<JobTypeBadge type={job.type} />
|
||||||
|
<span className="text-sm text-muted-foreground">{typeInfo.label}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center justify-between py-2 border-b border-border/60">
|
||||||
|
<span className="text-sm text-muted-foreground">{t("jobsList.status")}</span>
|
||||||
|
<StatusBadge status={job.status} />
|
||||||
|
</div>
|
||||||
|
<div className={`flex items-center justify-between py-2 ${(job.book_id || job.started_at) ? "border-b border-border/60" : ""}`}>
|
||||||
|
<span className="text-sm text-muted-foreground">{t("jobDetail.library")}</span>
|
||||||
|
<span className="text-sm text-foreground">{job.library_id || t("jobDetail.allLibraries")}</span>
|
||||||
|
</div>
|
||||||
|
{job.book_id && (
|
||||||
|
<div className={`flex items-center justify-between py-2 ${job.started_at ? "border-b border-border/60" : ""}`}>
|
||||||
|
<span className="text-sm text-muted-foreground">{t("jobDetail.book")}</span>
|
||||||
|
<Link
|
||||||
|
href={`/books/${job.book_id}`}
|
||||||
|
className="text-sm text-primary hover:text-primary/80 font-mono hover:underline"
|
||||||
|
>
|
||||||
|
{job.book_id.slice(0, 8)}…
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
{job.started_at && (
|
||||||
|
<div className="flex items-center justify-between py-2">
|
||||||
|
<span className="text-sm text-muted-foreground">{t("jobsList.duration")}</span>
|
||||||
|
<span className="text-sm font-semibold text-foreground">
|
||||||
|
{formatDuration(job.started_at, job.finished_at)}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Timeline Card */}
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.timeline")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="relative">
|
||||||
|
{/* Vertical line */}
|
||||||
|
<div className="absolute left-[7px] top-2 bottom-2 w-px bg-border" />
|
||||||
|
|
||||||
|
<div className="space-y-5">
|
||||||
|
{/* Created */}
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className="w-3.5 h-3.5 rounded-full mt-0.5 bg-muted border-2 border-border shrink-0 z-10" />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">{t("jobDetail.created")}</span>
|
||||||
|
<p className="text-xs text-muted-foreground">{new Date(job.created_at).toLocaleString(locale)}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Phase 1 start — for index jobs that have two phases */}
|
||||||
|
{job.started_at && job.phase2_started_at && (
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className="w-3.5 h-3.5 rounded-full mt-0.5 bg-primary shrink-0 z-10" />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">{t("jobDetail.phase1")}</span>
|
||||||
|
<p className="text-xs text-muted-foreground">{new Date(job.started_at).toLocaleString(locale)}</p>
|
||||||
|
<p className="text-xs text-primary/80 font-medium mt-0.5">
|
||||||
|
{t("jobDetail.duration", { duration: formatDuration(job.started_at, job.phase2_started_at) })}
|
||||||
|
{job.stats_json && (
|
||||||
|
<span className="text-muted-foreground font-normal ml-1">
|
||||||
|
· {job.stats_json.scanned_files} {t("jobDetail.scanned").toLowerCase()}, {job.stats_json.indexed_files} {t("jobDetail.indexed").toLowerCase()}
|
||||||
|
{job.stats_json.removed_files > 0 && `, ${job.stats_json.removed_files} ${t("jobDetail.removed").toLowerCase()}`}
|
||||||
|
{(job.stats_json.warnings ?? 0) > 0 && `, ${job.stats_json.warnings} ${t("jobDetail.warnings").toLowerCase()}`}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Phase 2a — Extracting pages (index jobs with phase2) */}
|
||||||
|
{job.phase2_started_at && !isThumbnailOnly && (
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className={`w-3.5 h-3.5 rounded-full mt-0.5 shrink-0 z-10 ${
|
||||||
|
job.generating_thumbnails_started_at || job.finished_at ? "bg-primary" : "bg-primary animate-pulse"
|
||||||
|
}`} />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">{t("jobDetail.phase2a")}</span>
|
||||||
|
<p className="text-xs text-muted-foreground">{new Date(job.phase2_started_at).toLocaleString(locale)}</p>
|
||||||
|
<p className="text-xs text-primary/80 font-medium mt-0.5">
|
||||||
|
{t("jobDetail.duration", { duration: formatDuration(job.phase2_started_at, job.generating_thumbnails_started_at ?? job.finished_at ?? null) })}
|
||||||
|
{!job.generating_thumbnails_started_at && !job.finished_at && isExtractingPages && (
|
||||||
|
<span className="text-muted-foreground font-normal ml-1">· {t("jobDetail.inProgress")}</span>
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Phase 2b — Generating thumbnails */}
|
||||||
|
{(job.generating_thumbnails_started_at || (job.phase2_started_at && isThumbnailOnly)) && (
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className={`w-3.5 h-3.5 rounded-full mt-0.5 shrink-0 z-10 ${
|
||||||
|
job.finished_at ? "bg-primary" : "bg-primary animate-pulse"
|
||||||
|
}`} />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">
|
||||||
|
{isThumbnailOnly ? t("jobType.thumbnail_rebuild") : t("jobDetail.phase2b")}
|
||||||
|
</span>
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{(job.generating_thumbnails_started_at ? new Date(job.generating_thumbnails_started_at) : job.phase2_started_at ? new Date(job.phase2_started_at) : null)?.toLocaleString(locale)}
|
||||||
|
</p>
|
||||||
|
{(job.generating_thumbnails_started_at || job.finished_at) && (
|
||||||
|
<p className="text-xs text-primary/80 font-medium mt-0.5">
|
||||||
|
{t("jobDetail.duration", { duration: formatDuration(
|
||||||
|
job.generating_thumbnails_started_at ?? job.phase2_started_at!,
|
||||||
|
job.finished_at ?? null
|
||||||
|
) })}
|
||||||
|
{job.total_files != null && job.total_files > 0 && (
|
||||||
|
<span className="text-muted-foreground font-normal ml-1">
|
||||||
|
· {job.processed_files ?? job.total_files} {t("jobType.thumbnail_rebuild").toLowerCase()}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
{!job.finished_at && isThumbnailPhase && (
|
||||||
|
<span className="text-xs text-muted-foreground">{t("jobDetail.inProgress")}</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Started — for jobs without phase2 (cbr_to_cbz, or no phase yet) */}
|
||||||
|
{job.started_at && !job.phase2_started_at && (
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className={`w-3.5 h-3.5 rounded-full mt-0.5 shrink-0 z-10 ${
|
||||||
|
job.finished_at ? "bg-primary" : "bg-primary animate-pulse"
|
||||||
|
}`} />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">{t("jobDetail.started")}</span>
|
||||||
|
<p className="text-xs text-muted-foreground">{new Date(job.started_at).toLocaleString(locale)}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Pending — not started yet */}
|
||||||
|
{!job.started_at && (
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className="w-3.5 h-3.5 rounded-full mt-0.5 bg-warning shrink-0 z-10" />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">{t("jobDetail.pendingStart")}</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Finished */}
|
||||||
|
{job.finished_at && (
|
||||||
|
<div className="flex items-start gap-4">
|
||||||
|
<div className={`w-3.5 h-3.5 rounded-full mt-0.5 shrink-0 z-10 ${
|
||||||
|
isCompleted ? "bg-success" : isFailed ? "bg-destructive" : "bg-muted"
|
||||||
|
}`} />
|
||||||
|
<div className="flex-1 min-w-0">
|
||||||
|
<span className="text-sm font-medium text-foreground">
|
||||||
|
{isCompleted ? t("jobDetail.finished") : isFailed ? t("jobDetail.failed") : t("jobDetail.cancelled")}
|
||||||
|
</span>
|
||||||
|
<p className="text-xs text-muted-foreground">{new Date(job.finished_at).toLocaleString(locale)}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Progress Card */}
|
||||||
|
{showProgressCard && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{progressTitle}</CardTitle>
|
||||||
|
{progressDescription && <CardDescription>{progressDescription}</CardDescription>}
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
{job.total_files != null && job.total_files > 0 && (
|
||||||
|
<>
|
||||||
|
<ProgressBar value={job.progress_percent || 0} showLabel size="lg" className="mb-4" />
|
||||||
|
<div className="grid grid-cols-3 gap-4">
|
||||||
|
<StatBox
|
||||||
|
value={job.processed_files ?? 0}
|
||||||
|
label={isThumbnailOnly || isPhase2 ? t("jobDetail.generated") : t("jobDetail.processed")}
|
||||||
|
variant="primary"
|
||||||
|
/>
|
||||||
|
<StatBox value={job.total_files} label={t("jobDetail.total")} />
|
||||||
|
<StatBox
|
||||||
|
value={Math.max(0, job.total_files - (job.processed_files ?? 0))}
|
||||||
|
label={t("jobDetail.remaining")}
|
||||||
|
variant={isCompleted ? "default" : "warning"}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
{job.current_file && (
|
||||||
|
<div className="mt-4 p-3 bg-muted/50 rounded-lg">
|
||||||
|
<span className="text-xs text-muted-foreground uppercase tracking-wide">{t("jobDetail.currentFile")}</span>
|
||||||
|
<code className="block mt-1 text-xs font-mono text-foreground break-all">{job.current_file}</code>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Index Statistics — index jobs only */}
|
||||||
|
{job.stats_json && !isThumbnailOnly && !isMetadataBatch && !isMetadataRefresh && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.indexStats")}</CardTitle>
|
||||||
|
{job.started_at && (
|
||||||
|
<CardDescription>
|
||||||
|
{formatDuration(job.started_at, job.finished_at)}
|
||||||
|
{speedCount > 0 && ` · ${formatSpeed(speedCount, durationMs)} scan rate`}
|
||||||
|
</CardDescription>
|
||||||
|
)}
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-5 gap-4">
|
||||||
|
<StatBox value={job.stats_json.scanned_files} label={t("jobDetail.scanned")} variant="success" />
|
||||||
|
<StatBox value={job.stats_json.indexed_files} label={t("jobDetail.indexed")} variant="primary" />
|
||||||
|
<StatBox value={job.stats_json.removed_files} label={t("jobDetail.removed")} variant="warning" />
|
||||||
|
<StatBox value={job.stats_json.warnings ?? 0} label={t("jobDetail.warnings")} variant={(job.stats_json.warnings ?? 0) > 0 ? "warning" : "default"} />
|
||||||
|
<StatBox value={job.stats_json.errors} label={t("jobDetail.errors")} variant={job.stats_json.errors > 0 ? "error" : "default"} />
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Thumbnail statistics — thumbnail-only jobs, completed */}
|
||||||
|
{isThumbnailOnly && isCompleted && job.total_files != null && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.thumbnailStats")}</CardTitle>
|
||||||
|
{job.started_at && (
|
||||||
|
<CardDescription>
|
||||||
|
{formatDuration(job.started_at, job.finished_at)}
|
||||||
|
{speedCount > 0 && ` · ${formatSpeed(speedCount, durationMs)} thumbnails/s`}
|
||||||
|
</CardDescription>
|
||||||
|
)}
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="grid grid-cols-2 gap-4">
|
||||||
|
<StatBox value={job.processed_files ?? job.total_files} label={t("jobDetail.generated")} variant="success" />
|
||||||
|
<StatBox value={job.total_files} label={t("jobDetail.total")} />
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Metadata batch report */}
|
||||||
|
{isMetadataBatch && batchReport && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.batchReport")}</CardTitle>
|
||||||
|
<CardDescription>{t("jobDetail.seriesAnalyzed", { count: String(batchReport.total_series) })}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-3 gap-4">
|
||||||
|
<StatBox value={batchReport.auto_matched} label={t("jobDetail.autoMatched")} variant="success" />
|
||||||
|
<StatBox value={batchReport.already_linked} label={t("jobDetail.alreadyLinked")} variant="primary" />
|
||||||
|
<StatBox value={batchReport.no_results} label={t("jobDetail.noResults")} />
|
||||||
|
<StatBox value={batchReport.too_many_results} label={t("jobDetail.tooManyResults")} variant="warning" />
|
||||||
|
<StatBox value={batchReport.low_confidence} label={t("jobDetail.lowConfidence")} variant="warning" />
|
||||||
|
<StatBox value={batchReport.errors} label={t("jobDetail.errors")} variant={batchReport.errors > 0 ? "error" : "default"} />
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Metadata refresh report */}
|
||||||
|
{isMetadataRefresh && refreshReport && (
|
||||||
|
<Card>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.refreshReport")}</CardTitle>
|
||||||
|
<CardDescription>{t("jobDetail.refreshReportDesc", { count: String(refreshReport.total_links) })}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-4 gap-4">
|
||||||
|
<StatBox
|
||||||
|
value={refreshReport.refreshed}
|
||||||
|
label={t("jobDetail.refreshed")}
|
||||||
|
variant="success"
|
||||||
|
icon={
|
||||||
|
<svg className="w-6 h-6 text-success" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||||
|
</svg>
|
||||||
|
}
|
||||||
|
/>
|
||||||
|
<StatBox value={refreshReport.unchanged} label={t("jobDetail.unchanged")} />
|
||||||
|
<StatBox value={refreshReport.errors} label={t("jobDetail.errors")} variant={refreshReport.errors > 0 ? "error" : "default"} />
|
||||||
|
<StatBox value={refreshReport.total_links} label={t("jobDetail.total")} />
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Metadata refresh changes detail */}
|
||||||
|
{isMetadataRefresh && refreshReport && refreshReport.changes.length > 0 && (
|
||||||
|
<Card className="lg:col-span-2">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.refreshChanges")}</CardTitle>
|
||||||
|
<CardDescription>{t("jobDetail.refreshChangesDesc", { count: String(refreshReport.changes.length) })}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-3 max-h-[600px] overflow-y-auto">
|
||||||
|
{refreshReport.changes.map((r, idx) => (
|
||||||
|
<div
|
||||||
|
key={idx}
|
||||||
|
className={`p-3 rounded-lg border ${
|
||||||
|
r.status === "updated" ? "bg-success/10 border-success/20" :
|
||||||
|
r.status === "error" ? "bg-destructive/10 border-destructive/20" :
|
||||||
|
"bg-muted/50 border-border/60"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between gap-2">
|
||||||
|
{job.library_id ? (
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${job.library_id}/series/${encodeURIComponent(r.series_name)}`}
|
||||||
|
className="font-medium text-sm text-primary hover:underline truncate"
|
||||||
|
>
|
||||||
|
{r.series_name}
|
||||||
|
</Link>
|
||||||
|
) : (
|
||||||
|
<span className="font-medium text-sm text-foreground truncate">{r.series_name}</span>
|
||||||
|
)}
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<span className="text-[10px] text-muted-foreground">{r.provider}</span>
|
||||||
|
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap ${
|
||||||
|
r.status === "updated" ? "bg-success/20 text-success" :
|
||||||
|
r.status === "error" ? "bg-destructive/20 text-destructive" :
|
||||||
|
"bg-muted text-muted-foreground"
|
||||||
|
}`}>
|
||||||
|
{r.status === "updated" ? t("jobDetail.refreshed") :
|
||||||
|
r.status === "error" ? t("common.error") :
|
||||||
|
t("jobDetail.unchanged")}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{r.error && (
|
||||||
|
<p className="text-xs text-destructive/80 mt-1">{r.error}</p>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Series field changes */}
|
||||||
|
{r.series_changes.length > 0 && (
|
||||||
|
<div className="mt-2">
|
||||||
|
<span className="text-[10px] uppercase tracking-wide text-muted-foreground font-semibold">{t("metadata.seriesLabel")}</span>
|
||||||
|
<div className="mt-1 space-y-1">
|
||||||
|
{r.series_changes.map((c, ci) => (
|
||||||
|
<div key={ci} className="flex items-start gap-2 text-xs">
|
||||||
|
<span className="font-medium text-foreground shrink-0 w-24">{t(`field.${c.field}` as never) || c.field}</span>
|
||||||
|
<span className="text-muted-foreground line-through truncate max-w-[200px]" title={String(c.old ?? "—")}>
|
||||||
|
{c.old != null ? (Array.isArray(c.old) ? (c.old as string[]).join(", ") : String(c.old)) : "—"}
|
||||||
|
</span>
|
||||||
|
<span className="text-success shrink-0">→</span>
|
||||||
|
<span className="text-success truncate max-w-[200px]" title={String(c.new ?? "—")}>
|
||||||
|
{c.new != null ? (Array.isArray(c.new) ? (c.new as string[]).join(", ") : String(c.new)) : "—"}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Book field changes */}
|
||||||
|
{r.book_changes.length > 0 && (
|
||||||
|
<div className="mt-2">
|
||||||
|
<span className="text-[10px] uppercase tracking-wide text-muted-foreground font-semibold">
|
||||||
|
{t("metadata.booksLabel")} ({r.book_changes.length})
|
||||||
|
</span>
|
||||||
|
<div className="mt-1 space-y-2">
|
||||||
|
{r.book_changes.map((b, bi) => (
|
||||||
|
<div key={bi} className="pl-2 border-l-2 border-border/60">
|
||||||
|
<Link
|
||||||
|
href={`/books/${b.book_id}`}
|
||||||
|
className="text-xs text-primary hover:underline font-medium"
|
||||||
|
>
|
||||||
|
{b.volume != null && <span className="text-muted-foreground mr-1">T.{b.volume}</span>}
|
||||||
|
{b.title}
|
||||||
|
</Link>
|
||||||
|
<div className="mt-0.5 space-y-0.5">
|
||||||
|
{b.changes.map((c, ci) => (
|
||||||
|
<div key={ci} className="flex items-start gap-2 text-xs">
|
||||||
|
<span className="font-medium text-foreground shrink-0 w-24">{t(`field.${c.field}` as never) || c.field}</span>
|
||||||
|
<span className="text-muted-foreground line-through truncate max-w-[150px]" title={String(c.old ?? "—")}>
|
||||||
|
{c.old != null ? (Array.isArray(c.old) ? (c.old as string[]).join(", ") : String(c.old).substring(0, 60)) : "—"}
|
||||||
|
</span>
|
||||||
|
<span className="text-success shrink-0">→</span>
|
||||||
|
<span className="text-success truncate max-w-[150px]" title={String(c.new ?? "—")}>
|
||||||
|
{c.new != null ? (Array.isArray(c.new) ? (c.new as string[]).join(", ") : String(c.new).substring(0, 60)) : "—"}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Metadata batch results */}
|
||||||
|
{isMetadataBatch && batchResults.length > 0 && (
|
||||||
|
<Card className="lg:col-span-2">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.resultsBySeries")}</CardTitle>
|
||||||
|
<CardDescription>{t("jobDetail.seriesProcessed", { count: String(batchResults.length) })}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-2 max-h-[600px] overflow-y-auto">
|
||||||
|
{batchResults.map((r) => (
|
||||||
|
<div
|
||||||
|
key={r.id}
|
||||||
|
className={`p-3 rounded-lg border ${
|
||||||
|
r.status === "auto_matched" ? "bg-success/10 border-success/20" :
|
||||||
|
r.status === "already_linked" ? "bg-primary/10 border-primary/20" :
|
||||||
|
r.status === "error" ? "bg-destructive/10 border-destructive/20" :
|
||||||
|
"bg-muted/50 border-border/60"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<div className="flex items-center justify-between gap-2">
|
||||||
|
{job.library_id ? (
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${job.library_id}/series/${encodeURIComponent(r.series_name)}`}
|
||||||
|
className="font-medium text-sm text-primary hover:underline truncate"
|
||||||
|
>
|
||||||
|
{r.series_name}
|
||||||
|
</Link>
|
||||||
|
) : (
|
||||||
|
<span className="font-medium text-sm text-foreground truncate">{r.series_name}</span>
|
||||||
|
)}
|
||||||
|
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium whitespace-nowrap ${
|
||||||
|
r.status === "auto_matched" ? "bg-success/20 text-success" :
|
||||||
|
r.status === "already_linked" ? "bg-primary/20 text-primary" :
|
||||||
|
r.status === "no_results" ? "bg-muted text-muted-foreground" :
|
||||||
|
r.status === "too_many_results" ? "bg-amber-500/15 text-amber-600" :
|
||||||
|
r.status === "low_confidence" ? "bg-amber-500/15 text-amber-600" :
|
||||||
|
r.status === "error" ? "bg-destructive/20 text-destructive" :
|
||||||
|
"bg-muted text-muted-foreground"
|
||||||
|
}`}>
|
||||||
|
{r.status === "auto_matched" ? t("jobDetail.autoMatched") :
|
||||||
|
r.status === "already_linked" ? t("jobDetail.alreadyLinked") :
|
||||||
|
r.status === "no_results" ? t("jobDetail.noResults") :
|
||||||
|
r.status === "too_many_results" ? t("jobDetail.tooManyResults") :
|
||||||
|
r.status === "low_confidence" ? t("jobDetail.lowConfidence") :
|
||||||
|
r.status === "error" ? t("common.error") :
|
||||||
|
r.status}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-3 mt-1 text-xs text-muted-foreground">
|
||||||
|
{r.provider_used && (
|
||||||
|
<span>{r.provider_used}{r.fallback_used ? ` ${t("metadata.fallbackUsed")}` : ""}</span>
|
||||||
|
)}
|
||||||
|
{r.candidates_count > 0 && (
|
||||||
|
<span>{r.candidates_count} {t("jobDetail.candidates", { plural: r.candidates_count > 1 ? "s" : "" })}</span>
|
||||||
|
)}
|
||||||
|
{r.best_confidence != null && (
|
||||||
|
<span>{Math.round(r.best_confidence * 100)}% {t("jobDetail.confidence")}</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
{r.best_candidate_json && (
|
||||||
|
<p className="text-xs text-muted-foreground mt-1">
|
||||||
|
{t("jobDetail.match", { title: (r.best_candidate_json as { title?: string }).title || r.best_candidate_json.toString() })}
|
||||||
|
</p>
|
||||||
|
)}
|
||||||
|
{r.error_message && (
|
||||||
|
<p className="text-xs text-destructive/80 mt-1">{r.error_message}</p>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* File errors */}
|
||||||
|
{errors.length > 0 && (
|
||||||
|
<Card className="lg:col-span-2">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobDetail.fileErrors", { count: String(errors.length) })}</CardTitle>
|
||||||
|
<CardDescription>{t("jobDetail.fileErrorsDesc")}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="space-y-2 max-h-80 overflow-y-auto">
|
||||||
|
{errors.map((error) => (
|
||||||
|
<div key={error.id} className="p-3 bg-destructive/10 rounded-lg border border-destructive/20">
|
||||||
|
<code className="block text-sm font-mono text-destructive mb-1">{error.file_path}</code>
|
||||||
|
<p className="text-sm text-destructive/80">{error.error_message}</p>
|
||||||
|
<span className="text-xs text-muted-foreground">{new Date(error.created_at).toLocaleString(locale)}</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
269
apps/backoffice/app/(app)/jobs/page.tsx
Normal file
269
apps/backoffice/app/(app)/jobs/page.tsx
Normal file
@@ -0,0 +1,269 @@
|
|||||||
|
import { revalidatePath } from "next/cache";
|
||||||
|
import { redirect } from "next/navigation";
|
||||||
|
import { listJobs, fetchLibraries, rebuildIndex, rebuildThumbnails, regenerateThumbnails, startMetadataBatch, startMetadataRefresh, IndexJobDto, LibraryDto } from "@/lib/api";
|
||||||
|
import { JobsList } from "@/app/components/JobsList";
|
||||||
|
import { Card, CardHeader, CardTitle, CardDescription, CardContent, FormField, FormSelect } from "@/app/components/ui";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function JobsPage({ searchParams }: { searchParams: Promise<{ highlight?: string }> }) {
|
||||||
|
const { highlight } = await searchParams;
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const [jobs, libraries] = await Promise.all([
|
||||||
|
listJobs().catch(() => [] as IndexJobDto[]),
|
||||||
|
fetchLibraries().catch(() => [] as LibraryDto[])
|
||||||
|
]);
|
||||||
|
|
||||||
|
const libraryMap = new Map(libraries.map(l => [l.id, l.name]));
|
||||||
|
|
||||||
|
async function triggerRebuild(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
const result = await rebuildIndex(libraryId || undefined);
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function triggerFullRebuild(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
const result = await rebuildIndex(libraryId || undefined, true);
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function triggerRescan(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
const result = await rebuildIndex(libraryId || undefined, false, true);
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function triggerThumbnailsRebuild(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
const result = await rebuildThumbnails(libraryId || undefined);
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function triggerThumbnailsRegenerate(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
const result = await regenerateThumbnails(libraryId || undefined);
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function triggerMetadataBatch(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
if (libraryId) {
|
||||||
|
let result;
|
||||||
|
try {
|
||||||
|
result = await startMetadataBatch(libraryId);
|
||||||
|
} catch {
|
||||||
|
// Library may have metadata disabled — ignore silently
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
} else {
|
||||||
|
// All libraries — skip those with metadata disabled
|
||||||
|
const allLibraries = await fetchLibraries().catch(() => [] as LibraryDto[]);
|
||||||
|
let lastId: string | undefined;
|
||||||
|
for (const lib of allLibraries) {
|
||||||
|
if (lib.metadata_provider === "none") continue;
|
||||||
|
try {
|
||||||
|
const result = await startMetadataBatch(lib.id);
|
||||||
|
if (result.status !== "already_running") lastId = result.id;
|
||||||
|
} catch {
|
||||||
|
// Library may have metadata disabled or other issue — skip
|
||||||
|
}
|
||||||
|
}
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(lastId ? `/jobs?highlight=${lastId}` : "/jobs");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function triggerMetadataRefresh(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const libraryId = formData.get("library_id") as string;
|
||||||
|
if (libraryId) {
|
||||||
|
let result;
|
||||||
|
try {
|
||||||
|
result = await startMetadataRefresh(libraryId);
|
||||||
|
} catch {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(`/jobs?highlight=${result.id}`);
|
||||||
|
} else {
|
||||||
|
// All libraries — skip those with metadata disabled
|
||||||
|
const allLibraries = await fetchLibraries().catch(() => [] as LibraryDto[]);
|
||||||
|
let lastId: string | undefined;
|
||||||
|
for (const lib of allLibraries) {
|
||||||
|
if (lib.metadata_provider === "none") continue;
|
||||||
|
try {
|
||||||
|
const result = await startMetadataRefresh(lib.id);
|
||||||
|
if (result.status !== "already_running") lastId = result.id;
|
||||||
|
} catch {
|
||||||
|
// Library may have metadata disabled or no approved links — skip
|
||||||
|
}
|
||||||
|
}
|
||||||
|
revalidatePath("/jobs");
|
||||||
|
redirect(lastId ? `/jobs?highlight=${lastId}` : "/jobs");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="mb-6">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||||
|
<svg className="w-8 h-8 text-warning" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M13 10V3L4 14h7v7l9-11h-7z" />
|
||||||
|
</svg>
|
||||||
|
{t("jobs.title")}
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("jobs.startJob")}</CardTitle>
|
||||||
|
<CardDescription>{t("jobs.startJobDescription")}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<form>
|
||||||
|
<div className="mb-6">
|
||||||
|
<FormField className="max-w-xs">
|
||||||
|
<FormSelect name="library_id" defaultValue="">
|
||||||
|
<option value="">{t("jobs.allLibraries")}</option>
|
||||||
|
{libraries.map((lib) => (
|
||||||
|
<option key={lib.id} value={lib.id}>{lib.name}</option>
|
||||||
|
))}
|
||||||
|
</FormSelect>
|
||||||
|
</FormField>
|
||||||
|
</div>
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-3 gap-6">
|
||||||
|
|
||||||
|
{/* Indexation group */}
|
||||||
|
<div className="space-y-3">
|
||||||
|
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
|
||||||
|
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" />
|
||||||
|
</svg>
|
||||||
|
{t("jobs.groupIndexation")}
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2">
|
||||||
|
<button type="submit" formAction={triggerRebuild}
|
||||||
|
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-foreground">{t("jobs.rebuild")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.rebuildShort")}</p>
|
||||||
|
</button>
|
||||||
|
<button type="submit" formAction={triggerRescan}
|
||||||
|
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-foreground">{t("jobs.rescan")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.rescanShort")}</p>
|
||||||
|
</button>
|
||||||
|
<button type="submit" formAction={triggerFullRebuild}
|
||||||
|
className="w-full text-left rounded-lg border border-destructive/30 bg-destructive/5 p-3 hover:bg-destructive/10 transition-colors group cursor-pointer">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-destructive shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-destructive">{t("jobs.fullRebuild")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.fullRebuildShort")}</p>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Thumbnails group */}
|
||||||
|
<div className="space-y-3">
|
||||||
|
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
|
||||||
|
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 16l4.586-4.586a2 2 0 012.828 0L16 16m-2-2l1.586-1.586a2 2 0 012.828 0L20 14m-6-6h.01M6 20h12a2 2 0 002-2V6a2 2 0 00-2-2H6a2 2 0 00-2 2v12a2 2 0 002 2z" />
|
||||||
|
</svg>
|
||||||
|
{t("jobs.groupThumbnails")}
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2">
|
||||||
|
<button type="submit" formAction={triggerThumbnailsRebuild}
|
||||||
|
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6v6m0 0v6m0-6h6m-6 0H6" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-foreground">{t("jobs.generateThumbnails")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.generateThumbnailsShort")}</p>
|
||||||
|
</button>
|
||||||
|
<button type="submit" formAction={triggerThumbnailsRegenerate}
|
||||||
|
className="w-full text-left rounded-lg border border-warning/30 bg-warning/5 p-3 hover:bg-warning/10 transition-colors group cursor-pointer">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-warning shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 9v2m0 4h.01m-6.938 4h13.856c1.54 0 2.502-1.667 1.732-3L13.732 4c-.77-1.333-2.694-1.333-3.464 0L3.34 16c-.77 1.333.192 3 1.732 3z" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-warning">{t("jobs.regenerateThumbnails")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.regenerateThumbnailsShort")}</p>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Metadata group */}
|
||||||
|
<div className="space-y-3">
|
||||||
|
<div className="flex items-center gap-2 text-sm font-semibold text-foreground">
|
||||||
|
<svg className="w-4 h-4 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M7 7h.01M7 3h5c.512 0 1.024.195 1.414.586l7 7a2 2 0 010 2.828l-7 7a2 2 0 01-2.828 0l-7-7A1.994 1.994 0 013 12V7a4 4 0 014-4z" />
|
||||||
|
</svg>
|
||||||
|
{t("jobs.groupMetadata")}
|
||||||
|
</div>
|
||||||
|
<div className="space-y-2">
|
||||||
|
<button type="submit" formAction={triggerMetadataBatch}
|
||||||
|
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer disabled:opacity-50 disabled:cursor-not-allowed disabled:hover:bg-background">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-foreground">{t("jobs.batchMetadata")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.batchMetadataShort")}</p>
|
||||||
|
</button>
|
||||||
|
<button type="submit" formAction={triggerMetadataRefresh}
|
||||||
|
className="w-full text-left rounded-lg border border-input bg-background p-3 hover:bg-accent/50 transition-colors group cursor-pointer disabled:opacity-50 disabled:cursor-not-allowed disabled:hover:bg-background">
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<svg className="w-4 h-4 text-primary shrink-0" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 4v5h.582m15.356 2A8.001 8.001 0 004.582 9m0 0H9m11 11v-5h-.581m0 0a8.003 8.003 0 01-15.357-2m15.357 2H15" />
|
||||||
|
</svg>
|
||||||
|
<span className="font-medium text-sm text-foreground">{t("jobs.refreshMetadata")}</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-xs text-muted-foreground mt-1 ml-6">{t("jobs.refreshMetadataShort")}</p>
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<JobsList
|
||||||
|
initialJobs={jobs}
|
||||||
|
libraries={libraryMap}
|
||||||
|
highlightJobId={highlight}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
127
apps/backoffice/app/(app)/layout.tsx
Normal file
127
apps/backoffice/app/(app)/layout.tsx
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
import type { ReactNode } from "react";
|
||||||
|
import { cookies } from "next/headers";
|
||||||
|
import { revalidatePath } from "next/cache";
|
||||||
|
import { ThemeToggle } from "@/app/theme-toggle";
|
||||||
|
import { JobsIndicator } from "@/app/components/JobsIndicator";
|
||||||
|
import { NavIcon, Icon } from "@/app/components/ui";
|
||||||
|
import { LogoutButton } from "@/app/components/LogoutButton";
|
||||||
|
import { MobileNav } from "@/app/components/MobileNav";
|
||||||
|
import { UserSwitcher } from "@/app/components/UserSwitcher";
|
||||||
|
import { fetchUsers } from "@/lib/api";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import type { TranslationKey } from "@/lib/i18n/fr";
|
||||||
|
|
||||||
|
type NavItem = {
|
||||||
|
href: "/" | "/books" | "/series" | "/authors" | "/libraries" | "/jobs" | "/tokens" | "/settings";
|
||||||
|
labelKey: TranslationKey;
|
||||||
|
icon: "dashboard" | "books" | "series" | "authors" | "libraries" | "jobs" | "tokens" | "settings";
|
||||||
|
};
|
||||||
|
|
||||||
|
const navItems: NavItem[] = [
|
||||||
|
{ href: "/", labelKey: "nav.dashboard", icon: "dashboard" },
|
||||||
|
{ href: "/books", labelKey: "nav.books", icon: "books" },
|
||||||
|
{ href: "/series", labelKey: "nav.series", icon: "series" },
|
||||||
|
{ href: "/authors", labelKey: "nav.authors", icon: "authors" },
|
||||||
|
{ href: "/libraries", labelKey: "nav.libraries", icon: "libraries" },
|
||||||
|
{ href: "/jobs", labelKey: "nav.jobs", icon: "jobs" },
|
||||||
|
{ href: "/tokens", labelKey: "nav.tokens", icon: "tokens" },
|
||||||
|
];
|
||||||
|
|
||||||
|
export default async function AppLayout({ children }: { children: ReactNode }) {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const cookieStore = await cookies();
|
||||||
|
const activeUserId = cookieStore.get("as_user_id")?.value || null;
|
||||||
|
const users = await fetchUsers().catch(() => []);
|
||||||
|
|
||||||
|
async function setActiveUserAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const userId = formData.get("user_id") as string;
|
||||||
|
const store = await cookies();
|
||||||
|
if (userId) {
|
||||||
|
store.set("as_user_id", userId, { path: "/", httpOnly: false, sameSite: "lax" });
|
||||||
|
} else {
|
||||||
|
store.delete("as_user_id");
|
||||||
|
}
|
||||||
|
revalidatePath("/", "layout");
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<header className="sticky top-0 z-50 w-full border-b border-border/40 bg-background/70 backdrop-blur-xl backdrop-saturate-150 supports-[backdrop-filter]:bg-background/60">
|
||||||
|
<nav className="container mx-auto flex h-16 items-center justify-between px-4">
|
||||||
|
<Link
|
||||||
|
href="/"
|
||||||
|
className="flex items-center gap-3 hover:opacity-80 transition-opacity duration-200"
|
||||||
|
>
|
||||||
|
<Image src="/logo.png" alt="StripStream" width={36} height={36} className="rounded-lg" />
|
||||||
|
<div className="flex items-baseline gap-2">
|
||||||
|
<span className="text-xl font-bold tracking-tight text-foreground">StripStream</span>
|
||||||
|
<span className="text-sm text-muted-foreground font-medium hidden xl:inline">
|
||||||
|
{t("common.backoffice")}
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-2">
|
||||||
|
<div className="hidden md:flex items-center gap-1">
|
||||||
|
{navItems.map((item) => (
|
||||||
|
<NavLink key={item.href} href={item.href} title={t(item.labelKey)}>
|
||||||
|
<NavIcon name={item.icon} />
|
||||||
|
<span className="ml-2 hidden xl:inline">{t(item.labelKey)}</span>
|
||||||
|
</NavLink>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<UserSwitcher
|
||||||
|
users={users}
|
||||||
|
activeUserId={activeUserId}
|
||||||
|
setActiveUserAction={setActiveUserAction}
|
||||||
|
/>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-1 pl-4 ml-2 border-l border-border/60">
|
||||||
|
<JobsIndicator />
|
||||||
|
<Link
|
||||||
|
href="/settings"
|
||||||
|
className="hidden xl:flex p-2 rounded-lg text-muted-foreground hover:text-foreground hover:bg-accent transition-colors"
|
||||||
|
title={t("nav.settings")}
|
||||||
|
>
|
||||||
|
<Icon name="settings" size="md" />
|
||||||
|
</Link>
|
||||||
|
<ThemeToggle />
|
||||||
|
<LogoutButton />
|
||||||
|
<MobileNav navItems={navItems.map(item => ({ ...item, label: t(item.labelKey) }))} />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</nav>
|
||||||
|
</header>
|
||||||
|
|
||||||
|
<main className="container mx-auto px-4 sm:px-6 lg:px-8 py-8 pb-16">
|
||||||
|
{children}
|
||||||
|
</main>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function NavLink({ href, title, children }: { href: NavItem["href"]; title?: string; children: React.ReactNode }) {
|
||||||
|
return (
|
||||||
|
<Link
|
||||||
|
href={href}
|
||||||
|
title={title}
|
||||||
|
className="
|
||||||
|
flex items-center
|
||||||
|
px-2 lg:px-3 py-2
|
||||||
|
rounded-lg
|
||||||
|
text-sm font-medium
|
||||||
|
text-muted-foreground
|
||||||
|
hover:text-foreground
|
||||||
|
hover:bg-accent
|
||||||
|
transition-colors duration-200
|
||||||
|
active:scale-[0.98]
|
||||||
|
"
|
||||||
|
>
|
||||||
|
{children}
|
||||||
|
</Link>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -1,8 +1,9 @@
|
|||||||
import { fetchLibraries, fetchBooks, getBookCoverUrl, LibraryDto, BookDto } from "../../../../lib/api";
|
import { fetchLibraries, fetchBooks, getBookCoverUrl, LibraryDto, BookDto } from "@/lib/api";
|
||||||
import { BooksGrid, EmptyState } from "../../../components/BookCard";
|
import { BooksGrid, EmptyState } from "@/app/components/BookCard";
|
||||||
import { LibrarySubPageHeader } from "../../../components/LibrarySubPageHeader";
|
import { LibrarySubPageHeader } from "@/app/components/LibrarySubPageHeader";
|
||||||
import { CursorPagination } from "../../../components/ui";
|
import { OffsetPagination } from "@/app/components/ui";
|
||||||
import { notFound } from "next/navigation";
|
import { notFound } from "next/navigation";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
export const dynamic = "force-dynamic";
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
@@ -14,16 +15,19 @@ export default async function LibraryBooksPage({
|
|||||||
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
}) {
|
}) {
|
||||||
const { id } = await params;
|
const { id } = await params;
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
const searchParamsAwaited = await searchParams;
|
const searchParamsAwaited = await searchParams;
|
||||||
const cursor = typeof searchParamsAwaited.cursor === "string" ? searchParamsAwaited.cursor : undefined;
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
const series = typeof searchParamsAwaited.series === "string" ? searchParamsAwaited.series : undefined;
|
const series = typeof searchParamsAwaited.series === "string" ? searchParamsAwaited.series : undefined;
|
||||||
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||||
|
|
||||||
const [library, booksPage] = await Promise.all([
|
const [library, booksPage] = await Promise.all([
|
||||||
fetchLibraries().then(libs => libs.find(l => l.id === id)),
|
fetchLibraries().then(libs => libs.find(l => l.id === id)),
|
||||||
fetchBooks(id, series, cursor, limit).catch(() => ({
|
fetchBooks(id, series, page, limit).catch(() => ({
|
||||||
items: [] as BookDto[],
|
items: [] as BookDto[],
|
||||||
next_cursor: null
|
total: 0,
|
||||||
|
page: 1,
|
||||||
|
limit,
|
||||||
}))
|
}))
|
||||||
]);
|
]);
|
||||||
|
|
||||||
@@ -35,17 +39,15 @@ export default async function LibraryBooksPage({
|
|||||||
...book,
|
...book,
|
||||||
coverUrl: getBookCoverUrl(book.id)
|
coverUrl: getBookCoverUrl(book.id)
|
||||||
}));
|
}));
|
||||||
const nextCursor = booksPage.next_cursor;
|
|
||||||
|
|
||||||
const seriesDisplayName = series === "unclassified" ? "Unclassified" : series;
|
const seriesDisplayName = series === "unclassified" ? t("books.unclassified") : (series ?? "");
|
||||||
const hasNextPage = !!nextCursor;
|
const totalPages = Math.ceil(booksPage.total / limit);
|
||||||
const hasPrevPage = !!cursor;
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="space-y-6">
|
<div className="space-y-6">
|
||||||
<LibrarySubPageHeader
|
<LibrarySubPageHeader
|
||||||
library={library}
|
library={library}
|
||||||
title={series ? `Books in "${seriesDisplayName}"` : "All Books"}
|
title={series ? t("libraryBooks.booksOfSeries", { series: seriesDisplayName }) : t("libraryBooks.allBooks")}
|
||||||
icon={
|
icon={
|
||||||
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253" />
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253" />
|
||||||
@@ -53,9 +55,9 @@ export default async function LibraryBooksPage({
|
|||||||
}
|
}
|
||||||
iconColor="text-success"
|
iconColor="text-success"
|
||||||
filterInfo={series ? {
|
filterInfo={series ? {
|
||||||
label: `Showing books from series "${seriesDisplayName}"`,
|
label: t("libraryBooks.filterLabel", { series: seriesDisplayName }),
|
||||||
clearHref: `/libraries/${id}/books`,
|
clearHref: `/libraries/${id}/books`,
|
||||||
clearLabel: "View all books"
|
clearLabel: t("libraryBooks.viewAll")
|
||||||
} : undefined}
|
} : undefined}
|
||||||
/>
|
/>
|
||||||
|
|
||||||
@@ -63,16 +65,15 @@ export default async function LibraryBooksPage({
|
|||||||
<>
|
<>
|
||||||
<BooksGrid books={books} />
|
<BooksGrid books={books} />
|
||||||
|
|
||||||
<CursorPagination
|
<OffsetPagination
|
||||||
hasNextPage={hasNextPage}
|
currentPage={page}
|
||||||
hasPrevPage={hasPrevPage}
|
totalPages={totalPages}
|
||||||
pageSize={limit}
|
pageSize={limit}
|
||||||
currentCount={books.length}
|
totalItems={booksPage.total}
|
||||||
nextCursor={nextCursor}
|
|
||||||
/>
|
/>
|
||||||
</>
|
</>
|
||||||
) : (
|
) : (
|
||||||
<EmptyState message={series ? `No books in series "${seriesDisplayName}"` : "No books in this library yet"} />
|
<EmptyState message={series ? t("libraryBooks.noBooksInSeries", { series: seriesDisplayName }) : t("libraryBooks.noBooks")} />
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
229
apps/backoffice/app/(app)/libraries/[id]/series/[name]/page.tsx
Normal file
229
apps/backoffice/app/(app)/libraries/[id]/series/[name]/page.tsx
Normal file
@@ -0,0 +1,229 @@
|
|||||||
|
import { fetchLibraries, fetchBooks, fetchSeriesMetadata, getBookCoverUrl, getMetadataLink, getMissingBooks, BookDto, SeriesMetadataDto, ExternalMetadataLinkDto, MissingBooksDto } from "@/lib/api";
|
||||||
|
import { BooksGrid, EmptyState } from "@/app/components/BookCard";
|
||||||
|
import { MarkSeriesReadButton } from "@/app/components/MarkSeriesReadButton";
|
||||||
|
import { MarkBookReadButton } from "@/app/components/MarkBookReadButton";
|
||||||
|
import nextDynamic from "next/dynamic";
|
||||||
|
import { OffsetPagination } from "@/app/components/ui";
|
||||||
|
import { SafeHtml } from "@/app/components/SafeHtml";
|
||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
|
||||||
|
const EditSeriesForm = nextDynamic(
|
||||||
|
() => import("@/app/components/EditSeriesForm").then(m => m.EditSeriesForm)
|
||||||
|
);
|
||||||
|
const MetadataSearchModal = nextDynamic(
|
||||||
|
() => import("@/app/components/MetadataSearchModal").then(m => m.MetadataSearchModal)
|
||||||
|
);
|
||||||
|
const ProwlarrSearchModal = nextDynamic(
|
||||||
|
() => import("@/app/components/ProwlarrSearchModal").then(m => m.ProwlarrSearchModal)
|
||||||
|
);
|
||||||
|
import { notFound } from "next/navigation";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function SeriesDetailPage({
|
||||||
|
params,
|
||||||
|
searchParams,
|
||||||
|
}: {
|
||||||
|
params: Promise<{ id: string; name: string }>;
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const { id, name } = await params;
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 50;
|
||||||
|
|
||||||
|
const seriesName = decodeURIComponent(name);
|
||||||
|
|
||||||
|
const [library, booksPage, seriesMeta, metadataLinks] = await Promise.all([
|
||||||
|
fetchLibraries().then((libs) => libs.find((l) => l.id === id)),
|
||||||
|
fetchBooks(id, seriesName, page, limit).catch(() => ({
|
||||||
|
items: [] as BookDto[],
|
||||||
|
total: 0,
|
||||||
|
page: 1,
|
||||||
|
limit,
|
||||||
|
})),
|
||||||
|
fetchSeriesMetadata(id, seriesName).catch(() => null as SeriesMetadataDto | null),
|
||||||
|
getMetadataLink(id, seriesName).catch(() => [] as ExternalMetadataLinkDto[]),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const existingLink = metadataLinks.find((l) => l.status === "approved") ?? metadataLinks[0] ?? null;
|
||||||
|
let missingData: MissingBooksDto | null = null;
|
||||||
|
if (existingLink && existingLink.status === "approved") {
|
||||||
|
missingData = await getMissingBooks(existingLink.id).catch(() => null);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!library) {
|
||||||
|
notFound();
|
||||||
|
}
|
||||||
|
|
||||||
|
const books = booksPage.items.map((book) => ({
|
||||||
|
...book,
|
||||||
|
coverUrl: getBookCoverUrl(book.id),
|
||||||
|
}));
|
||||||
|
|
||||||
|
const totalPages = Math.ceil(booksPage.total / limit);
|
||||||
|
const booksReadCount = booksPage.items.filter((b) => b.reading_status === "read").length;
|
||||||
|
const displayName = seriesName === "unclassified" ? t("books.unclassified") : seriesName;
|
||||||
|
|
||||||
|
// Use first book cover as series cover
|
||||||
|
const coverBookId = booksPage.items[0]?.id;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
{/* Breadcrumb */}
|
||||||
|
<div className="flex items-center gap-2 text-sm">
|
||||||
|
<Link
|
||||||
|
href="/libraries"
|
||||||
|
className="text-muted-foreground hover:text-primary transition-colors"
|
||||||
|
>
|
||||||
|
{t("nav.libraries")}
|
||||||
|
</Link>
|
||||||
|
<span className="text-muted-foreground">/</span>
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${id}/series`}
|
||||||
|
className="text-muted-foreground hover:text-primary transition-colors"
|
||||||
|
>
|
||||||
|
{library.name}
|
||||||
|
</Link>
|
||||||
|
<span className="text-muted-foreground">/</span>
|
||||||
|
<span className="text-foreground font-medium">{displayName}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Series Header */}
|
||||||
|
<div className="flex flex-col sm:flex-row gap-6">
|
||||||
|
{coverBookId && (
|
||||||
|
<div className="flex-shrink-0">
|
||||||
|
<div className="w-40 aspect-[2/3] relative rounded-xl overflow-hidden shadow-card border border-border">
|
||||||
|
<Image
|
||||||
|
src={getBookCoverUrl(coverBookId)}
|
||||||
|
alt={t("books.coverOf", { name: displayName })}
|
||||||
|
fill
|
||||||
|
className="object-cover"
|
||||||
|
sizes="160px"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="flex-1 space-y-4">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground">{displayName}</h1>
|
||||||
|
|
||||||
|
<div className="flex flex-wrap items-center gap-3">
|
||||||
|
{seriesMeta && seriesMeta.authors.length > 0 && (
|
||||||
|
<p className="text-base text-muted-foreground">{seriesMeta.authors.join(", ")}</p>
|
||||||
|
)}
|
||||||
|
{seriesMeta?.status && (
|
||||||
|
<span className={`inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium ${
|
||||||
|
seriesMeta.status === "ongoing" ? "bg-blue-500/15 text-blue-600" :
|
||||||
|
seriesMeta.status === "ended" ? "bg-green-500/15 text-green-600" :
|
||||||
|
seriesMeta.status === "hiatus" ? "bg-amber-500/15 text-amber-600" :
|
||||||
|
seriesMeta.status === "cancelled" ? "bg-red-500/15 text-red-600" :
|
||||||
|
"bg-muted text-muted-foreground"
|
||||||
|
}`}>
|
||||||
|
{t(`seriesStatus.${seriesMeta.status}` as any) || seriesMeta.status}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{seriesMeta?.description && (
|
||||||
|
<SafeHtml html={seriesMeta.description} className="text-sm text-muted-foreground leading-relaxed" />
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="flex flex-wrap items-center gap-4 text-sm">
|
||||||
|
{seriesMeta && seriesMeta.publishers.length > 0 && (
|
||||||
|
<span className="text-muted-foreground">
|
||||||
|
<span className="font-semibold text-foreground">{seriesMeta.publishers.join(", ")}</span>
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{seriesMeta?.start_year && (
|
||||||
|
<span className="text-muted-foreground">{seriesMeta.start_year}</span>
|
||||||
|
)}
|
||||||
|
{((seriesMeta && seriesMeta.publishers.length > 0) || seriesMeta?.start_year) && <span className="w-px h-4 bg-border" />}
|
||||||
|
<span className="text-muted-foreground">
|
||||||
|
<span className="font-semibold text-foreground">{booksPage.total}</span> {t("dashboard.books").toLowerCase()}
|
||||||
|
</span>
|
||||||
|
<span className="w-px h-4 bg-border" />
|
||||||
|
<span className="text-muted-foreground">
|
||||||
|
{t("series.readCount", { read: String(booksReadCount), total: String(booksPage.total), plural: booksPage.total !== 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
|
||||||
|
{/* Reading progress bar */}
|
||||||
|
<div className="flex items-center gap-2 flex-1 min-w-[120px] max-w-[200px]">
|
||||||
|
<div className="flex-1 h-2 bg-muted rounded-full overflow-hidden">
|
||||||
|
<div
|
||||||
|
className="h-full bg-green-500 rounded-full transition-all"
|
||||||
|
style={{ width: `${booksPage.total > 0 ? (booksReadCount / booksPage.total) * 100 : 0}%` }}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Collection progress bar (owned / expected) */}
|
||||||
|
{missingData && missingData.total_external > 0 && (
|
||||||
|
<>
|
||||||
|
<span className="w-px h-4 bg-border" />
|
||||||
|
<span className="text-muted-foreground">
|
||||||
|
{booksPage.total}/{missingData.total_external} — {t("series.missingCount", { count: missingData.missing_count, plural: missingData.missing_count !== 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
<div className="w-[150px] h-2 bg-muted rounded-full overflow-hidden">
|
||||||
|
<div
|
||||||
|
className="h-full bg-amber-500 rounded-full transition-all"
|
||||||
|
style={{ width: `${Math.round((booksPage.total / missingData.total_external) * 100)}%` }}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex flex-wrap items-center gap-3">
|
||||||
|
<MarkSeriesReadButton
|
||||||
|
seriesName={seriesName}
|
||||||
|
bookCount={booksPage.total}
|
||||||
|
booksReadCount={booksReadCount}
|
||||||
|
/>
|
||||||
|
<EditSeriesForm
|
||||||
|
libraryId={id}
|
||||||
|
seriesName={seriesName}
|
||||||
|
currentAuthors={seriesMeta?.authors ?? []}
|
||||||
|
currentPublishers={seriesMeta?.publishers ?? []}
|
||||||
|
currentBookAuthor={seriesMeta?.book_author ?? booksPage.items[0]?.author ?? null}
|
||||||
|
currentBookLanguage={seriesMeta?.book_language ?? booksPage.items[0]?.language ?? null}
|
||||||
|
currentDescription={seriesMeta?.description ?? null}
|
||||||
|
currentStartYear={seriesMeta?.start_year ?? null}
|
||||||
|
currentTotalVolumes={seriesMeta?.total_volumes ?? null}
|
||||||
|
currentStatus={seriesMeta?.status ?? null}
|
||||||
|
currentLockedFields={seriesMeta?.locked_fields ?? {}}
|
||||||
|
/>
|
||||||
|
<ProwlarrSearchModal
|
||||||
|
seriesName={seriesName}
|
||||||
|
missingBooks={missingData?.missing_books ?? null}
|
||||||
|
/>
|
||||||
|
<MetadataSearchModal
|
||||||
|
libraryId={id}
|
||||||
|
seriesName={seriesName}
|
||||||
|
existingLink={existingLink}
|
||||||
|
initialMissing={missingData}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Books Grid */}
|
||||||
|
{books.length > 0 ? (
|
||||||
|
<>
|
||||||
|
<BooksGrid books={books} />
|
||||||
|
<OffsetPagination
|
||||||
|
currentPage={page}
|
||||||
|
totalPages={totalPages}
|
||||||
|
pageSize={limit}
|
||||||
|
totalItems={booksPage.total}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<EmptyState message={t("librarySeries.noBooksInSeries")} />
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
144
apps/backoffice/app/(app)/libraries/[id]/series/page.tsx
Normal file
144
apps/backoffice/app/(app)/libraries/[id]/series/page.tsx
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
import { fetchLibraries, fetchSeries, fetchSeriesStatuses, getBookCoverUrl, LibraryDto, SeriesDto, SeriesPageDto } from "@/lib/api";
|
||||||
|
import { OffsetPagination } from "@/app/components/ui";
|
||||||
|
import { MarkSeriesReadButton } from "@/app/components/MarkSeriesReadButton";
|
||||||
|
import { SeriesFilters } from "@/app/components/SeriesFilters";
|
||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
import { notFound } from "next/navigation";
|
||||||
|
import { LibrarySubPageHeader } from "@/app/components/LibrarySubPageHeader";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function LibrarySeriesPage({
|
||||||
|
params,
|
||||||
|
searchParams
|
||||||
|
}: {
|
||||||
|
params: Promise<{ id: string }>;
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const { id } = await params;
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||||
|
const seriesStatus = typeof searchParamsAwaited.series_status === "string" ? searchParamsAwaited.series_status : undefined;
|
||||||
|
const hasMissing = searchParamsAwaited.has_missing === "true";
|
||||||
|
|
||||||
|
const [library, seriesPage, dbStatuses] = await Promise.all([
|
||||||
|
fetchLibraries().then(libs => libs.find(l => l.id === id)),
|
||||||
|
fetchSeries(id, page, limit, seriesStatus, hasMissing).catch(() => ({ items: [] as SeriesDto[], total: 0, page: 1, limit }) as SeriesPageDto),
|
||||||
|
fetchSeriesStatuses().catch(() => [] as string[]),
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!library) {
|
||||||
|
notFound();
|
||||||
|
}
|
||||||
|
|
||||||
|
const series = seriesPage.items;
|
||||||
|
const totalPages = Math.ceil(seriesPage.total / limit);
|
||||||
|
|
||||||
|
const KNOWN_STATUSES: Record<string, string> = {
|
||||||
|
ongoing: t("seriesStatus.ongoing"),
|
||||||
|
ended: t("seriesStatus.ended"),
|
||||||
|
hiatus: t("seriesStatus.hiatus"),
|
||||||
|
cancelled: t("seriesStatus.cancelled"),
|
||||||
|
upcoming: t("seriesStatus.upcoming"),
|
||||||
|
};
|
||||||
|
const seriesStatusOptions = [
|
||||||
|
{ value: "", label: t("seriesStatus.allStatuses") },
|
||||||
|
...dbStatuses.map((s) => ({ value: s, label: KNOWN_STATUSES[s] || s })),
|
||||||
|
];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="space-y-6">
|
||||||
|
<LibrarySubPageHeader
|
||||||
|
library={library}
|
||||||
|
title={t("series.title")}
|
||||||
|
icon={
|
||||||
|
<svg className="w-8 h-8" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" />
|
||||||
|
</svg>
|
||||||
|
}
|
||||||
|
iconColor="text-primary"
|
||||||
|
/>
|
||||||
|
|
||||||
|
<SeriesFilters
|
||||||
|
basePath={`/libraries/${id}/series`}
|
||||||
|
currentSeriesStatus={seriesStatus}
|
||||||
|
currentHasMissing={hasMissing}
|
||||||
|
seriesStatusOptions={seriesStatusOptions}
|
||||||
|
/>
|
||||||
|
|
||||||
|
{series.length > 0 ? (
|
||||||
|
<>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 gap-6">
|
||||||
|
{series.map((s) => (
|
||||||
|
<Link
|
||||||
|
key={s.name}
|
||||||
|
href={`/libraries/${id}/series/${encodeURIComponent(s.name)}`}
|
||||||
|
className="group"
|
||||||
|
>
|
||||||
|
<div className={`bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md transition-shadow duration-200 ${s.books_read_count >= s.book_count ? "opacity-50" : ""}`}>
|
||||||
|
<div className="aspect-[2/3] relative bg-muted/50">
|
||||||
|
<Image
|
||||||
|
src={getBookCoverUrl(s.first_book_id)}
|
||||||
|
alt={t("books.coverOf", { name: s.name })}
|
||||||
|
fill
|
||||||
|
className="object-cover"
|
||||||
|
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 20vw"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="p-3">
|
||||||
|
<h3 className="font-medium text-foreground truncate text-sm" title={s.name}>
|
||||||
|
{s.name === "unclassified" ? t("books.unclassified") : s.name}
|
||||||
|
</h3>
|
||||||
|
<div className="flex items-center justify-between mt-1">
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{t("series.readCount", { read: String(s.books_read_count), total: String(s.book_count), plural: s.book_count !== 1 ? "s" : "" })}
|
||||||
|
</p>
|
||||||
|
<MarkSeriesReadButton
|
||||||
|
seriesName={s.name}
|
||||||
|
bookCount={s.book_count}
|
||||||
|
booksReadCount={s.books_read_count}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-1 mt-1.5 flex-wrap">
|
||||||
|
{s.series_status && (
|
||||||
|
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium ${
|
||||||
|
s.series_status === "ongoing" ? "bg-blue-500/15 text-blue-600" :
|
||||||
|
s.series_status === "ended" ? "bg-green-500/15 text-green-600" :
|
||||||
|
s.series_status === "hiatus" ? "bg-amber-500/15 text-amber-600" :
|
||||||
|
s.series_status === "cancelled" ? "bg-red-500/15 text-red-600" :
|
||||||
|
"bg-muted text-muted-foreground"
|
||||||
|
}`}>
|
||||||
|
{KNOWN_STATUSES[s.series_status] || s.series_status}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{s.missing_count != null && s.missing_count > 0 && (
|
||||||
|
<span className="text-[10px] px-1.5 py-0.5 rounded-full font-medium bg-yellow-500/15 text-yellow-600">
|
||||||
|
{t("series.missingCount", { count: String(s.missing_count) })}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<OffsetPagination
|
||||||
|
currentPage={page}
|
||||||
|
totalPages={totalPages}
|
||||||
|
pageSize={limit}
|
||||||
|
totalItems={seriesPage.total}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<div className="text-center py-12 text-muted-foreground">
|
||||||
|
<p>{t("librarySeries.noSeries")}</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
227
apps/backoffice/app/(app)/libraries/page.tsx
Normal file
227
apps/backoffice/app/(app)/libraries/page.tsx
Normal file
@@ -0,0 +1,227 @@
|
|||||||
|
import { revalidatePath } from "next/cache";
|
||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
import { listFolders, createLibrary, deleteLibrary, fetchLibraries, getBookCoverUrl, LibraryDto, FolderItem } from "@/lib/api";
|
||||||
|
import type { TranslationKey } from "@/lib/i18n/fr";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import { LibraryActions } from "@/app/components/LibraryActions";
|
||||||
|
import { LibraryForm } from "@/app/components/LibraryForm";
|
||||||
|
import { ProviderIcon } from "@/app/components/ProviderIcon";
|
||||||
|
import {
|
||||||
|
Card, CardHeader, CardTitle, CardDescription, CardContent,
|
||||||
|
Button, Badge
|
||||||
|
} from "@/app/components/ui";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
function formatNextScan(nextScanAt: string | null, imminentLabel: string): string {
|
||||||
|
if (!nextScanAt) return "-";
|
||||||
|
const date = new Date(nextScanAt);
|
||||||
|
const now = new Date();
|
||||||
|
const diff = date.getTime() - now.getTime();
|
||||||
|
|
||||||
|
if (diff < 0) return imminentLabel;
|
||||||
|
if (diff < 60000) return "< 1 min";
|
||||||
|
if (diff < 3600000) return `${Math.floor(diff / 60000)}m`;
|
||||||
|
if (diff < 86400000) return `${Math.floor(diff / 3600000)}h`;
|
||||||
|
return `${Math.floor(diff / 86400000)}d`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function LibrariesPage() {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const [libraries, folders] = await Promise.all([
|
||||||
|
fetchLibraries().catch(() => [] as LibraryDto[]),
|
||||||
|
listFolders().catch(() => [] as FolderItem[])
|
||||||
|
]);
|
||||||
|
|
||||||
|
const thumbnailMap = new Map(
|
||||||
|
libraries.map(lib => [
|
||||||
|
lib.id,
|
||||||
|
(lib.thumbnail_book_ids || []).map(bookId => getBookCoverUrl(bookId)),
|
||||||
|
])
|
||||||
|
);
|
||||||
|
|
||||||
|
async function addLibrary(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const name = formData.get("name") as string;
|
||||||
|
const rootPath = formData.get("root_path") as string;
|
||||||
|
if (name && rootPath) {
|
||||||
|
await createLibrary(name, rootPath);
|
||||||
|
revalidatePath("/libraries");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function removeLibrary(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const id = formData.get("id") as string;
|
||||||
|
await deleteLibrary(id);
|
||||||
|
revalidatePath("/libraries");
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="mb-6">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||||
|
<svg className="w-8 h-8 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" />
|
||||||
|
</svg>
|
||||||
|
{t("libraries.title")}
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Add Library Form */}
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("libraries.addLibrary")}</CardTitle>
|
||||||
|
<CardDescription>{t("libraries.addLibraryDescription")}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<LibraryForm initialFolders={folders} action={addLibrary} />
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Libraries Grid */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
|
||||||
|
{libraries.map((lib) => {
|
||||||
|
const thumbnails = thumbnailMap.get(lib.id) || [];
|
||||||
|
return (
|
||||||
|
<Card key={lib.id} className="flex flex-col overflow-hidden">
|
||||||
|
{/* Thumbnail fan */}
|
||||||
|
{thumbnails.length > 0 ? (
|
||||||
|
<Link href={`/libraries/${lib.id}/series`} className="block relative h-48 overflow-hidden bg-muted/10">
|
||||||
|
<Image
|
||||||
|
src={thumbnails[0]}
|
||||||
|
alt=""
|
||||||
|
fill
|
||||||
|
className="object-cover blur-xl scale-110 opacity-40"
|
||||||
|
sizes="(max-width: 768px) 100vw, 33vw"
|
||||||
|
loading="lazy"
|
||||||
|
/>
|
||||||
|
<div className="absolute inset-0 flex items-end justify-center">
|
||||||
|
{thumbnails.map((url, i) => {
|
||||||
|
const count = thumbnails.length;
|
||||||
|
const mid = (count - 1) / 2;
|
||||||
|
const angle = (i - mid) * 12;
|
||||||
|
const radius = 220;
|
||||||
|
const rad = ((angle - 90) * Math.PI) / 180;
|
||||||
|
const cx = Math.cos(rad) * radius;
|
||||||
|
const cy = Math.sin(rad) * radius;
|
||||||
|
return (
|
||||||
|
<Image
|
||||||
|
key={i}
|
||||||
|
src={url}
|
||||||
|
alt=""
|
||||||
|
width={96}
|
||||||
|
height={144}
|
||||||
|
className="absolute object-cover shadow-lg"
|
||||||
|
style={{
|
||||||
|
transform: `translate(${cx}px, ${cy}px) rotate(${angle}deg)`,
|
||||||
|
transformOrigin: 'bottom center',
|
||||||
|
zIndex: count - Math.abs(Math.round(i - mid)),
|
||||||
|
bottom: '-185px',
|
||||||
|
}}
|
||||||
|
sizes="96px"
|
||||||
|
loading="lazy"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
) : (
|
||||||
|
<div className="h-8 bg-muted/10" />
|
||||||
|
)}
|
||||||
|
|
||||||
|
<CardHeader className="pb-2">
|
||||||
|
<div className="flex items-start justify-between">
|
||||||
|
<div>
|
||||||
|
<CardTitle className="text-lg">{lib.name}</CardTitle>
|
||||||
|
{!lib.enabled && <Badge variant="muted" className="mt-1">{t("libraries.disabled")}</Badge>}
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-1">
|
||||||
|
<LibraryActions
|
||||||
|
libraryId={lib.id}
|
||||||
|
monitorEnabled={lib.monitor_enabled}
|
||||||
|
scanMode={lib.scan_mode}
|
||||||
|
watcherEnabled={lib.watcher_enabled}
|
||||||
|
metadataProvider={lib.metadata_provider}
|
||||||
|
fallbackMetadataProvider={lib.fallback_metadata_provider}
|
||||||
|
metadataRefreshMode={lib.metadata_refresh_mode}
|
||||||
|
/>
|
||||||
|
<form>
|
||||||
|
<input type="hidden" name="id" value={lib.id} />
|
||||||
|
<Button type="submit" variant="ghost" size="sm" formAction={removeLibrary} className="text-muted-foreground hover:text-destructive">
|
||||||
|
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
|
||||||
|
</svg>
|
||||||
|
</Button>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<code className="text-xs font-mono text-muted-foreground break-all">{lib.root_path}</code>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent className="flex-1 pt-0">
|
||||||
|
{/* Stats */}
|
||||||
|
<div className="grid grid-cols-2 gap-3 mb-3">
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${lib.id}/books`}
|
||||||
|
className="text-center p-2.5 bg-muted/50 rounded-lg hover:bg-accent transition-colors duration-200"
|
||||||
|
>
|
||||||
|
<span className="block text-2xl font-bold text-primary">{lib.book_count}</span>
|
||||||
|
<span className="text-xs text-muted-foreground">{t("libraries.books")}</span>
|
||||||
|
</Link>
|
||||||
|
<Link
|
||||||
|
href={`/libraries/${lib.id}/series`}
|
||||||
|
className="text-center p-2.5 bg-muted/50 rounded-lg hover:bg-accent transition-colors duration-200"
|
||||||
|
>
|
||||||
|
<span className="block text-2xl font-bold text-foreground">{lib.series_count}</span>
|
||||||
|
<span className="text-xs text-muted-foreground">{t("libraries.series")}</span>
|
||||||
|
</Link>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Configuration tags */}
|
||||||
|
<div className="flex flex-wrap gap-1.5">
|
||||||
|
<span className={`inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium ${
|
||||||
|
lib.monitor_enabled
|
||||||
|
? 'bg-success/10 text-success'
|
||||||
|
: 'bg-muted/50 text-muted-foreground'
|
||||||
|
}`}>
|
||||||
|
<span className="text-[9px]">{lib.monitor_enabled ? '●' : '○'}</span>
|
||||||
|
{t("libraries.scanLabel", { mode: t(`monitoring.${lib.scan_mode}` as TranslationKey) })}
|
||||||
|
</span>
|
||||||
|
|
||||||
|
<span className={`inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium ${
|
||||||
|
lib.watcher_enabled
|
||||||
|
? 'bg-warning/10 text-warning'
|
||||||
|
: 'bg-muted/50 text-muted-foreground'
|
||||||
|
}`}>
|
||||||
|
<span>{lib.watcher_enabled ? '⚡' : '○'}</span>
|
||||||
|
<span>{t("libraries.watcherLabel")}</span>
|
||||||
|
</span>
|
||||||
|
|
||||||
|
{lib.metadata_provider && lib.metadata_provider !== "none" && (
|
||||||
|
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-primary/10 text-primary">
|
||||||
|
<ProviderIcon provider={lib.metadata_provider} size={11} />
|
||||||
|
{lib.metadata_provider.replace('_', ' ')}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{lib.metadata_refresh_mode !== "manual" && (
|
||||||
|
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-muted/50 text-muted-foreground">
|
||||||
|
{t("libraries.metaRefreshLabel", { mode: t(`monitoring.${lib.metadata_refresh_mode}` as TranslationKey) })}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{lib.monitor_enabled && lib.next_scan_at && (
|
||||||
|
<span className="inline-flex items-center gap-1 px-2 py-0.5 rounded-full text-[11px] font-medium bg-muted/50 text-muted-foreground">
|
||||||
|
{t("libraries.nextScan", { time: formatNextScan(lib.next_scan_at, t("libraries.imminent")) })}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
514
apps/backoffice/app/(app)/page.tsx
Normal file
514
apps/backoffice/app/(app)/page.tsx
Normal file
@@ -0,0 +1,514 @@
|
|||||||
|
import React from "react";
|
||||||
|
import { fetchStats, fetchUsers, StatsResponse, UserDto } from "@/lib/api";
|
||||||
|
import { Card, CardContent, CardHeader, CardTitle } from "@/app/components/ui";
|
||||||
|
import { RcDonutChart, RcBarChart, RcAreaChart, RcStackedBar, RcHorizontalBar, RcMultiLineChart } from "@/app/components/DashboardCharts";
|
||||||
|
import { PeriodToggle } from "@/app/components/PeriodToggle";
|
||||||
|
import { CurrentlyReadingList, RecentlyReadList } from "@/app/components/ReadingUserFilter";
|
||||||
|
import Link from "next/link";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import type { TranslateFunction } from "@/lib/i18n/dictionaries";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
function formatBytes(bytes: number): string {
|
||||||
|
if (bytes === 0) return "0 B";
|
||||||
|
const k = 1024;
|
||||||
|
const sizes = ["B", "KB", "MB", "GB", "TB"];
|
||||||
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
||||||
|
return `${(bytes / Math.pow(k, i)).toFixed(1)} ${sizes[i]}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatNumber(n: number, locale: string): string {
|
||||||
|
return n.toLocaleString(locale === "fr" ? "fr-FR" : "en-US");
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatChartLabel(raw: string, period: "day" | "week" | "month", locale: string): string {
|
||||||
|
const loc = locale === "fr" ? "fr-FR" : "en-US";
|
||||||
|
if (period === "month") {
|
||||||
|
// raw = "YYYY-MM"
|
||||||
|
const [y, m] = raw.split("-");
|
||||||
|
const d = new Date(Number(y), Number(m) - 1, 1);
|
||||||
|
return d.toLocaleDateString(loc, { month: "short" });
|
||||||
|
}
|
||||||
|
if (period === "week") {
|
||||||
|
// raw = "YYYY-MM-DD" (Monday of the week)
|
||||||
|
const d = new Date(raw + "T00:00:00");
|
||||||
|
return d.toLocaleDateString(loc, { day: "numeric", month: "short" });
|
||||||
|
}
|
||||||
|
// day: raw = "YYYY-MM-DD"
|
||||||
|
const d = new Date(raw + "T00:00:00");
|
||||||
|
return d.toLocaleDateString(loc, { weekday: "short", day: "numeric" });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Horizontal progress bar for metadata quality (stays server-rendered, no recharts needed)
|
||||||
|
function HorizontalBar({ label, value, max, subLabel, color = "var(--color-primary)" }: { label: string; value: number; max: number; subLabel?: string; color?: string }) {
|
||||||
|
const pct = max > 0 ? (value / max) * 100 : 0;
|
||||||
|
return (
|
||||||
|
<div className="space-y-1">
|
||||||
|
<div className="flex justify-between text-sm">
|
||||||
|
<span className="font-medium text-foreground truncate">{label}</span>
|
||||||
|
<span className="text-muted-foreground shrink-0 ml-2">{subLabel || value}</span>
|
||||||
|
</div>
|
||||||
|
<div className="h-2 bg-muted rounded-full overflow-hidden">
|
||||||
|
<div
|
||||||
|
className="h-full rounded-full transition-all duration-500"
|
||||||
|
style={{ width: `${pct}%`, backgroundColor: color }}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function DashboardPage({
|
||||||
|
searchParams,
|
||||||
|
}: {
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const rawPeriod = searchParamsAwaited.period;
|
||||||
|
const period = rawPeriod === "day" ? "day" as const : rawPeriod === "week" ? "week" as const : "month" as const;
|
||||||
|
const { t, locale } = await getServerTranslations();
|
||||||
|
|
||||||
|
let stats: StatsResponse | null = null;
|
||||||
|
let users: UserDto[] = [];
|
||||||
|
try {
|
||||||
|
[stats, users] = await Promise.all([
|
||||||
|
fetchStats(period),
|
||||||
|
fetchUsers().catch(() => []),
|
||||||
|
]);
|
||||||
|
} catch (e) {
|
||||||
|
console.error("Failed to fetch stats:", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!stats) {
|
||||||
|
return (
|
||||||
|
<div className="max-w-5xl mx-auto">
|
||||||
|
<div className="text-center mb-12">
|
||||||
|
<h1 className="text-4xl font-bold tracking-tight mb-4 text-foreground">StripStream Backoffice</h1>
|
||||||
|
<p className="text-lg text-muted-foreground">{t("dashboard.loadError")}</p>
|
||||||
|
</div>
|
||||||
|
<QuickLinks t={t} />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const {
|
||||||
|
overview,
|
||||||
|
reading_status,
|
||||||
|
currently_reading = [],
|
||||||
|
recently_read = [],
|
||||||
|
reading_over_time = [],
|
||||||
|
users_reading_over_time = [],
|
||||||
|
by_format,
|
||||||
|
by_library,
|
||||||
|
top_series,
|
||||||
|
additions_over_time,
|
||||||
|
jobs_over_time = [],
|
||||||
|
metadata = { total_series: 0, series_linked: 0, series_unlinked: 0, books_with_summary: 0, books_with_isbn: 0, by_provider: [] },
|
||||||
|
} = stats;
|
||||||
|
|
||||||
|
const readingColors = ["hsl(220 13% 70%)", "hsl(45 93% 47%)", "hsl(142 60% 45%)"];
|
||||||
|
const formatColors = [
|
||||||
|
"hsl(198 78% 37%)", "hsl(142 60% 45%)", "hsl(45 93% 47%)",
|
||||||
|
"hsl(2 72% 48%)", "hsl(280 60% 50%)", "hsl(32 80% 50%)",
|
||||||
|
"hsl(170 60% 45%)", "hsl(220 60% 50%)",
|
||||||
|
];
|
||||||
|
|
||||||
|
const noDataLabel = t("common.noData");
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="max-w-7xl mx-auto space-y-6">
|
||||||
|
{/* Header */}
|
||||||
|
<div className="mb-2">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||||
|
<svg className="w-8 h-8 text-primary" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 19v-6a2 2 0 00-2-2H5a2 2 0 00-2 2v6a2 2 0 002 2h2a2 2 0 002-2zm0 0V9a2 2 0 012-2h2a2 2 0 012 2v10m-6 0a2 2 0 002 2h2a2 2 0 002-2m0 0V5a2 2 0 012-2h2a2 2 0 012 2v14a2 2 0 01-2 2h-2a2 2 0 01-2-2z" />
|
||||||
|
</svg>
|
||||||
|
{t("dashboard.title")}
|
||||||
|
</h1>
|
||||||
|
<p className="text-muted-foreground mt-2 max-w-2xl">
|
||||||
|
{t("dashboard.subtitle")}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Overview stat cards */}
|
||||||
|
<div className="grid grid-cols-2 md:grid-cols-3 lg:grid-cols-6 gap-4">
|
||||||
|
<StatCard icon="book" label={t("dashboard.books")} value={formatNumber(overview.total_books, locale)} color="success" />
|
||||||
|
<StatCard icon="series" label={t("dashboard.series")} value={formatNumber(overview.total_series, locale)} color="primary" />
|
||||||
|
<StatCard icon="library" label={t("dashboard.libraries")} value={formatNumber(overview.total_libraries, locale)} color="warning" />
|
||||||
|
<StatCard icon="pages" label={t("dashboard.pages")} value={formatNumber(overview.total_pages, locale)} color="primary" />
|
||||||
|
<StatCard icon="author" label={t("dashboard.authors")} value={formatNumber(overview.total_authors, locale)} color="success" />
|
||||||
|
<StatCard icon="size" label={t("dashboard.totalSize")} value={formatBytes(overview.total_size_bytes)} color="warning" />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Currently reading + Recently read */}
|
||||||
|
{(currently_reading.length > 0 || recently_read.length > 0) && (
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
|
{/* Currently reading */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.currentlyReading")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<CurrentlyReadingList
|
||||||
|
items={currently_reading}
|
||||||
|
allLabel={t("dashboard.allUsers")}
|
||||||
|
emptyLabel={t("dashboard.noCurrentlyReading")}
|
||||||
|
pageProgressTemplate={t("dashboard.pageProgress")}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Recently read */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.recentlyRead")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RecentlyReadList
|
||||||
|
items={recently_read}
|
||||||
|
allLabel={t("dashboard.allUsers")}
|
||||||
|
emptyLabel={t("dashboard.noRecentlyRead")}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Reading activity line chart */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader className="flex flex-row items-center justify-between space-y-0">
|
||||||
|
<CardTitle className="text-base">{t("dashboard.readingActivity")}</CardTitle>
|
||||||
|
<PeriodToggle labels={{ day: t("dashboard.periodDay"), week: t("dashboard.periodWeek"), month: t("dashboard.periodMonth") }} />
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
{(() => {
|
||||||
|
const userColors = [
|
||||||
|
"hsl(142 60% 45%)", "hsl(198 78% 37%)", "hsl(45 93% 47%)",
|
||||||
|
"hsl(2 72% 48%)", "hsl(280 60% 50%)", "hsl(32 80% 50%)",
|
||||||
|
];
|
||||||
|
const usernames = [...new Set(users_reading_over_time.map(r => r.username))];
|
||||||
|
if (usernames.length === 0) {
|
||||||
|
return (
|
||||||
|
<RcAreaChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={reading_over_time.map((m) => ({ label: formatChartLabel(m.month, period, locale), value: m.books_read }))}
|
||||||
|
color="hsl(142 60% 45%)"
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
// Pivot: { label, username1: n, username2: n, ... }
|
||||||
|
const byMonth = new Map<string, Record<string, unknown>>();
|
||||||
|
for (const row of users_reading_over_time) {
|
||||||
|
const label = formatChartLabel(row.month, period, locale);
|
||||||
|
if (!byMonth.has(row.month)) byMonth.set(row.month, { label });
|
||||||
|
byMonth.get(row.month)![row.username] = row.books_read;
|
||||||
|
}
|
||||||
|
const chartData = [...byMonth.values()];
|
||||||
|
const lines = usernames.map((u, i) => ({
|
||||||
|
key: u,
|
||||||
|
label: u,
|
||||||
|
color: userColors[i % userColors.length],
|
||||||
|
}));
|
||||||
|
return <RcMultiLineChart data={chartData} lines={lines} noDataLabel={noDataLabel} />;
|
||||||
|
})()}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Charts row */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
|
||||||
|
{/* Reading status par lecteur */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.readingStatus")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
{users.length === 0 ? (
|
||||||
|
<RcDonutChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={[
|
||||||
|
{ name: t("status.unread"), value: reading_status.unread, color: readingColors[0] },
|
||||||
|
{ name: t("status.reading"), value: reading_status.reading, color: readingColors[1] },
|
||||||
|
{ name: t("status.read"), value: reading_status.read, color: readingColors[2] },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
) : (
|
||||||
|
<div className="space-y-3">
|
||||||
|
{users.map((user) => {
|
||||||
|
const total = overview.total_books;
|
||||||
|
const read = user.books_read;
|
||||||
|
const reading = user.books_reading;
|
||||||
|
const unread = Math.max(0, total - read - reading);
|
||||||
|
const readPct = total > 0 ? (read / total) * 100 : 0;
|
||||||
|
const readingPct = total > 0 ? (reading / total) * 100 : 0;
|
||||||
|
return (
|
||||||
|
<div key={user.id} className="space-y-1">
|
||||||
|
<div className="flex items-center justify-between text-sm">
|
||||||
|
<span className="font-medium text-foreground truncate">{user.username}</span>
|
||||||
|
<span className="text-xs text-muted-foreground shrink-0 ml-2">
|
||||||
|
<span className="text-success font-medium">{read}</span>
|
||||||
|
{reading > 0 && <span className="text-amber-500 font-medium"> · {reading}</span>}
|
||||||
|
<span className="text-muted-foreground/60"> / {total}</span>
|
||||||
|
</span>
|
||||||
|
</div>
|
||||||
|
<div className="h-2 bg-muted rounded-full overflow-hidden flex">
|
||||||
|
<div className="h-full bg-success transition-all duration-500" style={{ width: `${readPct}%` }} />
|
||||||
|
<div className="h-full bg-amber-500 transition-all duration-500" style={{ width: `${readingPct}%` }} />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* By format donut */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.byFormat")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcDonutChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={by_format.slice(0, 6).map((f, i) => ({
|
||||||
|
name: (f.format || t("dashboard.unknown")).toUpperCase(),
|
||||||
|
value: f.count,
|
||||||
|
color: formatColors[i % formatColors.length],
|
||||||
|
}))}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* By library donut */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.byLibrary")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcDonutChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={by_library.slice(0, 6).map((l, i) => ({
|
||||||
|
name: l.library_name,
|
||||||
|
value: l.book_count,
|
||||||
|
color: formatColors[i % formatColors.length],
|
||||||
|
}))}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Metadata row */}
|
||||||
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
|
||||||
|
{/* Series metadata coverage donut */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.metadataCoverage")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcDonutChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={[
|
||||||
|
{ name: t("dashboard.seriesLinked"), value: metadata.series_linked, color: "hsl(142 60% 45%)" },
|
||||||
|
{ name: t("dashboard.seriesUnlinked"), value: metadata.series_unlinked, color: "hsl(220 13% 70%)" },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* By provider donut */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.byProvider")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcDonutChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={metadata.by_provider.map((p, i) => ({
|
||||||
|
name: p.provider.replace(/_/g, " ").replace(/\b\w/g, (c) => c.toUpperCase()),
|
||||||
|
value: p.count,
|
||||||
|
color: formatColors[i % formatColors.length],
|
||||||
|
}))}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Book metadata quality */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.bookMetadata")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="space-y-4">
|
||||||
|
<HorizontalBar
|
||||||
|
label={t("dashboard.withSummary")}
|
||||||
|
value={metadata.books_with_summary}
|
||||||
|
max={overview.total_books}
|
||||||
|
subLabel={overview.total_books > 0 ? `${Math.round((metadata.books_with_summary / overview.total_books) * 100)}%` : "0%"}
|
||||||
|
color="hsl(198 78% 37%)"
|
||||||
|
/>
|
||||||
|
<HorizontalBar
|
||||||
|
label={t("dashboard.withIsbn")}
|
||||||
|
value={metadata.books_with_isbn}
|
||||||
|
max={overview.total_books}
|
||||||
|
subLabel={overview.total_books > 0 ? `${Math.round((metadata.books_with_isbn / overview.total_books) * 100)}%` : "0%"}
|
||||||
|
color="hsl(280 60% 50%)"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Libraries breakdown + Top series */}
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
|
{by_library.length > 0 && (
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.libraries")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcStackedBar
|
||||||
|
data={by_library.map((lib) => ({
|
||||||
|
name: lib.library_name,
|
||||||
|
read: lib.read_count,
|
||||||
|
reading: lib.reading_count,
|
||||||
|
unread: lib.unread_count,
|
||||||
|
sizeLabel: formatBytes(lib.size_bytes),
|
||||||
|
}))}
|
||||||
|
labels={{
|
||||||
|
read: t("status.read"),
|
||||||
|
reading: t("status.reading"),
|
||||||
|
unread: t("status.unread"),
|
||||||
|
books: t("dashboard.books"),
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Top series */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.popularSeries")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcHorizontalBar
|
||||||
|
noDataLabel={t("dashboard.noSeries")}
|
||||||
|
data={top_series.slice(0, 8).map((s) => ({
|
||||||
|
name: s.series,
|
||||||
|
value: s.book_count,
|
||||||
|
subLabel: t("dashboard.readCount", { read: s.read_count, total: s.book_count }),
|
||||||
|
}))}
|
||||||
|
color="hsl(142 60% 45%)"
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* Additions line chart – full width */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader className="flex flex-row items-center justify-between space-y-0">
|
||||||
|
<CardTitle className="text-base">{t("dashboard.booksAdded")}</CardTitle>
|
||||||
|
<PeriodToggle labels={{ day: t("dashboard.periodDay"), week: t("dashboard.periodWeek"), month: t("dashboard.periodMonth") }} />
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcAreaChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={additions_over_time.map((m) => ({ label: formatChartLabel(m.month, period, locale), value: m.books_added }))}
|
||||||
|
color="hsl(198 78% 37%)"
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Jobs over time – multi-line chart */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader className="flex flex-row items-center justify-between space-y-0">
|
||||||
|
<CardTitle className="text-base">{t("dashboard.jobsOverTime")}</CardTitle>
|
||||||
|
<PeriodToggle labels={{ day: t("dashboard.periodDay"), week: t("dashboard.periodWeek"), month: t("dashboard.periodMonth") }} />
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcMultiLineChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={jobs_over_time.map((j) => ({
|
||||||
|
label: formatChartLabel(j.label, period, locale),
|
||||||
|
scan: j.scan,
|
||||||
|
rebuild: j.rebuild,
|
||||||
|
thumbnail: j.thumbnail,
|
||||||
|
other: j.other,
|
||||||
|
}))}
|
||||||
|
lines={[
|
||||||
|
{ key: "scan", label: t("dashboard.jobScan"), color: "hsl(198 78% 37%)" },
|
||||||
|
{ key: "rebuild", label: t("dashboard.jobRebuild"), color: "hsl(142 60% 45%)" },
|
||||||
|
{ key: "thumbnail", label: t("dashboard.jobThumbnail"), color: "hsl(45 93% 47%)" },
|
||||||
|
{ key: "other", label: t("dashboard.jobOther"), color: "hsl(280 60% 50%)" },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Quick links */}
|
||||||
|
<QuickLinks t={t} />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function StatCard({ icon, label, value, color }: { icon: string; label: string; value: string; color: string }) {
|
||||||
|
const icons: Record<string, React.ReactNode> = {
|
||||||
|
book: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253" />,
|
||||||
|
series: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" />,
|
||||||
|
library: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" />,
|
||||||
|
pages: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z" />,
|
||||||
|
author: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M16 7a4 4 0 11-8 0 4 4 0 018 0zM12 14a7 7 0 00-7 7h14a7 7 0 00-7-7z" />,
|
||||||
|
size: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M4 7v10c0 2.21 3.582 4 8 4s8-1.79 8-4V7M4 7c0 2.21 3.582 4 8 4s8-1.79 8-4M4 7c0-2.21 3.582-4 8-4s8 1.79 8 4m0 5c0 2.21-3.582 4-8 4s-8-1.79-8-4" />,
|
||||||
|
};
|
||||||
|
|
||||||
|
const colorClasses: Record<string, string> = {
|
||||||
|
primary: "bg-primary/10 text-primary",
|
||||||
|
success: "bg-success/10 text-success",
|
||||||
|
warning: "bg-warning/10 text-warning",
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card hover={false} className="p-4">
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<div className={`w-10 h-10 rounded-lg flex items-center justify-center shrink-0 ${colorClasses[color]}`}>
|
||||||
|
<svg className="w-5 h-5" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
{icons[icon]}
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<div className="min-w-0">
|
||||||
|
<p className="text-xl font-bold text-foreground leading-tight">{value}</p>
|
||||||
|
<p className="text-xs text-muted-foreground">{label}</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function QuickLinks({ t }: { t: TranslateFunction }) {
|
||||||
|
const links = [
|
||||||
|
{ href: "/libraries", label: t("nav.libraries"), bg: "bg-primary/10", text: "text-primary", hoverBg: "group-hover:bg-primary", hoverText: "group-hover:text-primary-foreground", icon: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M3 7v10a2 2 0 002 2h14a2 2 0 002-2V9a2 2 0 00-2-2h-6l-2-2H5a2 2 0 00-2 2z" /> },
|
||||||
|
{ href: "/books", label: t("nav.books"), bg: "bg-success/10", text: "text-success", hoverBg: "group-hover:bg-success", hoverText: "group-hover:text-white", icon: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M12 6.253v13m0-13C10.832 5.477 9.246 5 7.5 5S4.168 5.477 3 6.253v13C4.168 18.477 5.754 18 7.5 18s3.332.477 4.5 1.253m0-13C13.168 5.477 14.754 5 16.5 5c1.747 0 3.332.477 4.5 1.253v13C19.832 18.477 18.247 18 16.5 18c-1.746 0-3.332.477-4.5 1.253" /> },
|
||||||
|
{ href: "/series", label: t("nav.series"), bg: "bg-warning/10", text: "text-warning", hoverBg: "group-hover:bg-warning", hoverText: "group-hover:text-white", icon: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" /> },
|
||||||
|
{ href: "/jobs", label: t("nav.jobs"), bg: "bg-destructive/10", text: "text-destructive", hoverBg: "group-hover:bg-destructive", hoverText: "group-hover:text-destructive-foreground", icon: <path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M13 10V3L4 14h7v7l9-11h-7z" /> },
|
||||||
|
];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="grid grid-cols-2 md:grid-cols-4 gap-4">
|
||||||
|
{links.map((l) => (
|
||||||
|
<Link
|
||||||
|
key={l.href}
|
||||||
|
href={l.href as any}
|
||||||
|
className="group p-4 bg-card/80 backdrop-blur-sm rounded-xl border border-border/50 shadow-sm hover:shadow-md hover:-translate-y-0.5 transition-all duration-200 flex items-center gap-3"
|
||||||
|
>
|
||||||
|
<div className={`w-9 h-9 rounded-lg flex items-center justify-center transition-colors duration-200 ${l.bg} ${l.hoverBg}`}>
|
||||||
|
<svg className={`w-5 h-5 ${l.text} ${l.hoverText}`} fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
{l.icon}
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<span className="font-medium text-foreground text-sm">{l.label}</span>
|
||||||
|
</Link>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
208
apps/backoffice/app/(app)/series/page.tsx
Normal file
208
apps/backoffice/app/(app)/series/page.tsx
Normal file
@@ -0,0 +1,208 @@
|
|||||||
|
import { fetchAllSeries, fetchLibraries, fetchSeriesStatuses, LibraryDto, SeriesDto, SeriesPageDto, getBookCoverUrl } from "@/lib/api";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
import { MarkSeriesReadButton } from "@/app/components/MarkSeriesReadButton";
|
||||||
|
import { LiveSearchForm } from "@/app/components/LiveSearchForm";
|
||||||
|
import { Card, CardContent, OffsetPagination } from "@/app/components/ui";
|
||||||
|
import Image from "next/image";
|
||||||
|
import Link from "next/link";
|
||||||
|
import { ProviderIcon } from "@/app/components/ProviderIcon";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function SeriesPage({
|
||||||
|
searchParams,
|
||||||
|
}: {
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const libraryId = typeof searchParamsAwaited.library === "string" ? searchParamsAwaited.library : undefined;
|
||||||
|
const searchQuery = typeof searchParamsAwaited.q === "string" ? searchParamsAwaited.q : "";
|
||||||
|
const readingStatus = typeof searchParamsAwaited.status === "string" ? searchParamsAwaited.status : undefined;
|
||||||
|
const sort = typeof searchParamsAwaited.sort === "string" ? searchParamsAwaited.sort : undefined;
|
||||||
|
const seriesStatus = typeof searchParamsAwaited.series_status === "string" ? searchParamsAwaited.series_status : undefined;
|
||||||
|
const hasMissing = searchParamsAwaited.has_missing === "true";
|
||||||
|
const metadataProvider = typeof searchParamsAwaited.metadata_provider === "string" ? searchParamsAwaited.metadata_provider : undefined;
|
||||||
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||||
|
|
||||||
|
const [libraries, seriesPage, dbStatuses] = await Promise.all([
|
||||||
|
fetchLibraries().catch(() => [] as LibraryDto[]),
|
||||||
|
fetchAllSeries(libraryId, searchQuery || undefined, readingStatus, page, limit, sort, seriesStatus, hasMissing, metadataProvider).catch(
|
||||||
|
() => ({ items: [] as SeriesDto[], total: 0, page: 1, limit }) as SeriesPageDto
|
||||||
|
),
|
||||||
|
fetchSeriesStatuses().catch(() => [] as string[]),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const series = seriesPage.items;
|
||||||
|
const totalPages = Math.ceil(seriesPage.total / limit);
|
||||||
|
const sortOptions = [
|
||||||
|
{ value: "", label: t("books.sortTitle") },
|
||||||
|
{ value: "latest", label: t("books.sortLatest") },
|
||||||
|
];
|
||||||
|
|
||||||
|
const hasFilters = searchQuery || libraryId || readingStatus || sort || seriesStatus || hasMissing || metadataProvider;
|
||||||
|
|
||||||
|
const libraryOptions = [
|
||||||
|
{ value: "", label: t("books.allLibraries") },
|
||||||
|
...libraries.map((lib) => ({ value: lib.id, label: lib.name })),
|
||||||
|
];
|
||||||
|
|
||||||
|
const statusOptions = [
|
||||||
|
{ value: "", label: t("common.all") },
|
||||||
|
{ value: "unread", label: t("status.unread") },
|
||||||
|
{ value: "reading", label: t("status.reading") },
|
||||||
|
{ value: "read", label: t("status.read") },
|
||||||
|
];
|
||||||
|
|
||||||
|
const KNOWN_STATUSES: Record<string, string> = {
|
||||||
|
ongoing: t("seriesStatus.ongoing"),
|
||||||
|
ended: t("seriesStatus.ended"),
|
||||||
|
hiatus: t("seriesStatus.hiatus"),
|
||||||
|
cancelled: t("seriesStatus.cancelled"),
|
||||||
|
upcoming: t("seriesStatus.upcoming"),
|
||||||
|
};
|
||||||
|
const seriesStatusOptions = [
|
||||||
|
{ value: "", label: t("seriesStatus.allStatuses") },
|
||||||
|
...dbStatuses.map((s) => ({ value: s, label: KNOWN_STATUSES[s] || s })),
|
||||||
|
];
|
||||||
|
|
||||||
|
const missingOptions = [
|
||||||
|
{ value: "", label: t("common.all") },
|
||||||
|
{ value: "true", label: t("series.missingBooks") },
|
||||||
|
];
|
||||||
|
|
||||||
|
const metadataOptions = [
|
||||||
|
{ value: "", label: t("series.metadataAll") },
|
||||||
|
{ value: "linked", label: t("series.metadataLinked") },
|
||||||
|
{ value: "unlinked", label: t("series.metadataUnlinked") },
|
||||||
|
{ value: "google_books", label: "Google Books" },
|
||||||
|
{ value: "open_library", label: "Open Library" },
|
||||||
|
{ value: "comicvine", label: "ComicVine" },
|
||||||
|
{ value: "anilist", label: "AniList" },
|
||||||
|
{ value: "bedetheque", label: "Bédéthèque" },
|
||||||
|
];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="mb-6">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||||
|
<svg className="w-8 h-8 text-warning" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" />
|
||||||
|
</svg>
|
||||||
|
{t("series.title")}
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardContent className="pt-6">
|
||||||
|
<LiveSearchForm
|
||||||
|
basePath="/series"
|
||||||
|
fields={[
|
||||||
|
{ name: "q", type: "text", label: t("common.search"), placeholder: t("series.searchPlaceholder") },
|
||||||
|
{ name: "library", type: "select", label: t("books.library"), options: libraryOptions },
|
||||||
|
{ name: "status", type: "select", label: t("series.reading"), options: statusOptions },
|
||||||
|
{ name: "series_status", type: "select", label: t("editSeries.status"), options: seriesStatusOptions },
|
||||||
|
{ name: "has_missing", type: "select", label: t("series.missing"), options: missingOptions },
|
||||||
|
{ name: "metadata_provider", type: "select", label: t("series.metadata"), options: metadataOptions },
|
||||||
|
{ name: "sort", type: "select", label: t("books.sort"), options: sortOptions },
|
||||||
|
]}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Results count */}
|
||||||
|
<p className="text-sm text-muted-foreground mb-4">
|
||||||
|
{seriesPage.total} {t("series.title").toLowerCase()}
|
||||||
|
{searchQuery && <> {t("series.matchingQuery")} "{searchQuery}"</>}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
{/* Series Grid */}
|
||||||
|
{series.length > 0 ? (
|
||||||
|
<>
|
||||||
|
<div className="grid grid-cols-2 sm:grid-cols-3 md:grid-cols-4 lg:grid-cols-5 xl:grid-cols-6 gap-4">
|
||||||
|
{series.map((s) => (
|
||||||
|
<Link
|
||||||
|
key={s.name}
|
||||||
|
href={`/libraries/${s.library_id}/series/${encodeURIComponent(s.name)}`}
|
||||||
|
className="group"
|
||||||
|
>
|
||||||
|
<div
|
||||||
|
className={`bg-card rounded-xl shadow-sm border border-border/60 overflow-hidden hover:shadow-md hover:-translate-y-1 transition-all duration-200 ${
|
||||||
|
s.books_read_count >= s.book_count ? "opacity-50" : ""
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
<div className="aspect-[2/3] relative bg-muted/50">
|
||||||
|
<Image
|
||||||
|
src={getBookCoverUrl(s.first_book_id)}
|
||||||
|
alt={t("books.coverOf", { name: s.name })}
|
||||||
|
fill
|
||||||
|
className="object-cover"
|
||||||
|
sizes="(max-width: 640px) 50vw, (max-width: 768px) 33vw, (max-width: 1024px) 25vw, 16vw"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="p-3">
|
||||||
|
<h3 className="font-medium text-foreground truncate text-sm" title={s.name}>
|
||||||
|
{s.name === "unclassified" ? t("books.unclassified") : s.name}
|
||||||
|
</h3>
|
||||||
|
<div className="flex items-center justify-between mt-1">
|
||||||
|
<p className="text-xs text-muted-foreground">
|
||||||
|
{t("series.readCount", { read: String(s.books_read_count), total: String(s.book_count), plural: s.book_count !== 1 ? "s" : "" })}
|
||||||
|
</p>
|
||||||
|
<MarkSeriesReadButton
|
||||||
|
seriesName={s.name}
|
||||||
|
bookCount={s.book_count}
|
||||||
|
booksReadCount={s.books_read_count}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
<div className="flex items-center gap-1 mt-1.5 flex-wrap">
|
||||||
|
{s.series_status && (
|
||||||
|
<span className={`text-[10px] px-1.5 py-0.5 rounded-full font-medium ${
|
||||||
|
s.series_status === "ongoing" ? "bg-blue-500/15 text-blue-600" :
|
||||||
|
s.series_status === "ended" ? "bg-green-500/15 text-green-600" :
|
||||||
|
s.series_status === "hiatus" ? "bg-amber-500/15 text-amber-600" :
|
||||||
|
s.series_status === "cancelled" ? "bg-red-500/15 text-red-600" :
|
||||||
|
"bg-muted text-muted-foreground"
|
||||||
|
}`}>
|
||||||
|
{KNOWN_STATUSES[s.series_status] || s.series_status}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{s.missing_count != null && s.missing_count > 0 && (
|
||||||
|
<span className="text-[10px] px-1.5 py-0.5 rounded-full font-medium bg-yellow-500/15 text-yellow-600">
|
||||||
|
{t("series.missingCount", { count: String(s.missing_count), plural: s.missing_count > 1 ? "s" : "" })}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
{s.metadata_provider && (
|
||||||
|
<span className="text-[10px] px-1.5 py-0.5 rounded-full font-medium bg-purple-500/15 text-purple-600 inline-flex items-center gap-0.5">
|
||||||
|
<ProviderIcon provider={s.metadata_provider} size={10} />
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<OffsetPagination
|
||||||
|
currentPage={page}
|
||||||
|
totalPages={totalPages}
|
||||||
|
pageSize={limit}
|
||||||
|
totalItems={seriesPage.total}
|
||||||
|
/>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<div className="flex flex-col items-center justify-center py-16 text-center">
|
||||||
|
<div className="w-16 h-16 mb-4 text-muted-foreground/30">
|
||||||
|
<svg fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={1.5} d="M19 11H5m14 0a2 2 0 012 2v6a2 2 0 01-2 2H5a2 2 0 01-2-2v-6a2 2 0 012-2m14 0V9a2 2 0 00-2-2M5 11V9a2 2 0 012-2m0 0V5a2 2 0 012-2h6a2 2 0 012 2v2M7 7h10" />
|
||||||
|
</svg>
|
||||||
|
</div>
|
||||||
|
<p className="text-muted-foreground text-lg">
|
||||||
|
{hasFilters ? t("series.noResults") : t("series.noSeries")}
|
||||||
|
</p>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
1755
apps/backoffice/app/(app)/settings/SettingsPage.tsx
Normal file
1755
apps/backoffice/app/(app)/settings/SettingsPage.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
|||||||
import { getSettings, getCacheStats } from "../../lib/api";
|
import { getSettings, getCacheStats, getThumbnailStats, fetchUsers } from "@/lib/api";
|
||||||
import SettingsPage from "./SettingsPage";
|
import SettingsPage from "./SettingsPage";
|
||||||
|
|
||||||
export const dynamic = "force-dynamic";
|
export const dynamic = "force-dynamic";
|
||||||
@@ -7,7 +7,8 @@ export default async function SettingsPageWrapper() {
|
|||||||
const settings = await getSettings().catch(() => ({
|
const settings = await getSettings().catch(() => ({
|
||||||
image_processing: { format: "webp", quality: 85, filter: "lanczos3", max_width: 2160 },
|
image_processing: { format: "webp", quality: 85, filter: "lanczos3", max_width: 2160 },
|
||||||
cache: { enabled: true, directory: "/tmp/stripstream-image-cache", max_size_mb: 10000 },
|
cache: { enabled: true, directory: "/tmp/stripstream-image-cache", max_size_mb: 10000 },
|
||||||
limits: { concurrent_renders: 4, timeout_seconds: 12, rate_limit_per_second: 120 }
|
limits: { concurrent_renders: 4, timeout_seconds: 12, rate_limit_per_second: 120 },
|
||||||
|
thumbnail: { enabled: true, width: 300, height: 400, quality: 80, format: "webp", directory: "/data/thumbnails" }
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const cacheStats = await getCacheStats().catch(() => ({
|
const cacheStats = await getCacheStats().catch(() => ({
|
||||||
@@ -16,5 +17,13 @@ export default async function SettingsPageWrapper() {
|
|||||||
directory: "/tmp/stripstream-image-cache"
|
directory: "/tmp/stripstream-image-cache"
|
||||||
}));
|
}));
|
||||||
|
|
||||||
return <SettingsPage initialSettings={settings} initialCacheStats={cacheStats} />;
|
const thumbnailStats = await getThumbnailStats().catch(() => ({
|
||||||
|
total_size_mb: 0,
|
||||||
|
file_count: 0,
|
||||||
|
directory: "/data/thumbnails"
|
||||||
|
}));
|
||||||
|
|
||||||
|
const users = await fetchUsers().catch(() => []);
|
||||||
|
|
||||||
|
return <SettingsPage initialSettings={settings} initialCacheStats={cacheStats} initialThumbnailStats={thumbnailStats} users={users} />;
|
||||||
}
|
}
|
||||||
316
apps/backoffice/app/(app)/tokens/page.tsx
Normal file
316
apps/backoffice/app/(app)/tokens/page.tsx
Normal file
@@ -0,0 +1,316 @@
|
|||||||
|
import { revalidatePath } from "next/cache";
|
||||||
|
import { redirect } from "next/navigation";
|
||||||
|
import { listTokens, createToken, revokeToken, deleteToken, updateToken, fetchUsers, createUser, deleteUser, updateUser, TokenDto, UserDto } from "@/lib/api";
|
||||||
|
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, Badge, FormField, FormInput, FormSelect, FormRow } from "@/app/components/ui";
|
||||||
|
import { TokenUserSelect } from "@/app/components/TokenUserSelect";
|
||||||
|
import { UsernameEdit } from "@/app/components/UsernameEdit";
|
||||||
|
import { getServerTranslations } from "@/lib/i18n/server";
|
||||||
|
|
||||||
|
export const dynamic = "force-dynamic";
|
||||||
|
|
||||||
|
export default async function TokensPage({
|
||||||
|
searchParams
|
||||||
|
}: {
|
||||||
|
searchParams: Promise<{ created?: string }>;
|
||||||
|
}) {
|
||||||
|
const { t } = await getServerTranslations();
|
||||||
|
const params = await searchParams;
|
||||||
|
const tokens = await listTokens().catch(() => [] as TokenDto[]);
|
||||||
|
const users = await fetchUsers().catch(() => [] as UserDto[]);
|
||||||
|
|
||||||
|
async function createTokenAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const name = formData.get("name") as string;
|
||||||
|
const scope = formData.get("scope") as string;
|
||||||
|
const userId = (formData.get("user_id") as string) || undefined;
|
||||||
|
if (name) {
|
||||||
|
const result = await createToken(name, scope, userId);
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
redirect(`/tokens?created=${encodeURIComponent(result.token)}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function revokeTokenAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const id = formData.get("id") as string;
|
||||||
|
await revokeToken(id);
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteTokenAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const id = formData.get("id") as string;
|
||||||
|
await deleteToken(id);
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createUserAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const username = formData.get("username") as string;
|
||||||
|
if (username) {
|
||||||
|
await createUser(username);
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function deleteUserAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const id = formData.get("id") as string;
|
||||||
|
await deleteUser(id);
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
}
|
||||||
|
|
||||||
|
async function renameUserAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const id = formData.get("id") as string;
|
||||||
|
const username = formData.get("username") as string;
|
||||||
|
if (username?.trim()) {
|
||||||
|
await updateUser(id, username.trim());
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function reassignTokenAction(formData: FormData) {
|
||||||
|
"use server";
|
||||||
|
const id = formData.get("id") as string;
|
||||||
|
const userId = (formData.get("user_id") as string) || null;
|
||||||
|
await updateToken(id, userId);
|
||||||
|
revalidatePath("/tokens");
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
<div className="mb-6">
|
||||||
|
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
|
||||||
|
<svg className="w-8 h-8 text-destructive" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M15 7a2 2 0 012 2m4 0a6 6 0 01-7.743 5.743L11 17H9v2H7v2H4a1 1 0 01-1-1v-2.586a1 1 0 01.293-.707l5.964-5.964A6 6 0 1121 9z" />
|
||||||
|
</svg>
|
||||||
|
{t("tokens.title")}
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{/* ── Lecteurs ─────────────────────────────────────────── */}
|
||||||
|
<div className="mb-2">
|
||||||
|
<h2 className="text-xl font-semibold text-foreground">{t("users.title")}</h2>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("users.createNew")}</CardTitle>
|
||||||
|
<CardDescription>{t("users.createDescription")}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<form action={createUserAction}>
|
||||||
|
<FormRow>
|
||||||
|
<FormField className="flex-1 min-w-48">
|
||||||
|
<FormInput name="username" placeholder={t("users.username")} required autoComplete="off" />
|
||||||
|
</FormField>
|
||||||
|
<Button type="submit">{t("users.createButton")}</Button>
|
||||||
|
</FormRow>
|
||||||
|
</form>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card className="overflow-hidden mb-10">
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="w-full">
|
||||||
|
<thead>
|
||||||
|
<tr className="border-b border-border/60 bg-muted/50">
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("users.name")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("users.tokenCount")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("status.read")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("status.reading")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("users.createdAt")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("users.actions")}</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-border/60">
|
||||||
|
{/* Ligne admin synthétique */}
|
||||||
|
<tr className="hover:bg-accent/50 transition-colors bg-destructive/5">
|
||||||
|
<td className="px-4 py-3 text-sm font-medium text-foreground flex items-center gap-2">
|
||||||
|
{process.env.ADMIN_USERNAME ?? "admin"}
|
||||||
|
<Badge variant="destructive">{t("tokens.scopeAdmin")}</Badge>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground">
|
||||||
|
{tokens.filter(tok => tok.scope === "admin" && !tok.revoked_at).length}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
</tr>
|
||||||
|
{/* Ligne tokens read non assignés */}
|
||||||
|
{(() => {
|
||||||
|
const unassigned = tokens.filter(tok => tok.scope === "read" && !tok.user_id && !tok.revoked_at);
|
||||||
|
if (unassigned.length === 0) return null;
|
||||||
|
return (
|
||||||
|
<tr className="hover:bg-accent/50 transition-colors bg-warning/5">
|
||||||
|
<td className="px-4 py-3 text-sm font-medium text-muted-foreground italic">
|
||||||
|
{t("tokens.noUser")}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-warning font-medium">{unassigned.length}</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground/50">—</td>
|
||||||
|
</tr>
|
||||||
|
);
|
||||||
|
})()}
|
||||||
|
{users.map((user) => (
|
||||||
|
<tr key={user.id} className="hover:bg-accent/50 transition-colors">
|
||||||
|
<td className="px-4 py-3">
|
||||||
|
<UsernameEdit userId={user.id} currentUsername={user.username} action={renameUserAction} />
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground">{user.token_count}</td>
|
||||||
|
<td className="px-4 py-3 text-sm">
|
||||||
|
{user.books_read > 0
|
||||||
|
? <span className="font-medium text-success">{user.books_read}</span>
|
||||||
|
: <span className="text-muted-foreground/50">—</span>}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm">
|
||||||
|
{user.books_reading > 0
|
||||||
|
? <span className="font-medium text-amber-500">{user.books_reading}</span>
|
||||||
|
: <span className="text-muted-foreground/50">—</span>}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm text-muted-foreground">
|
||||||
|
{new Date(user.created_at).toLocaleDateString()}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3">
|
||||||
|
<form action={deleteUserAction}>
|
||||||
|
<input type="hidden" name="id" value={user.id} />
|
||||||
|
<Button type="submit" variant="destructive" size="sm">
|
||||||
|
<svg className="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
|
||||||
|
</svg>
|
||||||
|
{t("common.delete")}
|
||||||
|
</Button>
|
||||||
|
</form>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* ── Tokens API ───────────────────────────────────────── */}
|
||||||
|
<div className="mb-2">
|
||||||
|
<h2 className="text-xl font-semibold text-foreground">{t("tokens.apiTokens")}</h2>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{params.created ? (
|
||||||
|
<Card className="mb-6 border-success/50 bg-success/5">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-success">{t("tokens.created")}</CardTitle>
|
||||||
|
<CardDescription>{t("tokens.createdDescription")}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<pre className="p-4 bg-background rounded-lg text-sm font-mono text-foreground overflow-x-auto border">{params.created}</pre>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
) : null}
|
||||||
|
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle>{t("tokens.createNew")}</CardTitle>
|
||||||
|
<CardDescription>{t("tokens.createDescription")}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<form action={createTokenAction}>
|
||||||
|
<FormRow>
|
||||||
|
<FormField className="flex-1 min-w-48">
|
||||||
|
<FormInput name="name" placeholder={t("tokens.tokenName")} required autoComplete="off" />
|
||||||
|
</FormField>
|
||||||
|
<FormField className="w-32">
|
||||||
|
<FormSelect name="scope" defaultValue="read">
|
||||||
|
<option value="read">{t("tokens.scopeRead")}</option>
|
||||||
|
<option value="admin">{t("tokens.scopeAdmin")}</option>
|
||||||
|
</FormSelect>
|
||||||
|
</FormField>
|
||||||
|
<FormField className="w-48">
|
||||||
|
<FormSelect name="user_id" defaultValue="">
|
||||||
|
<option value="">{t("tokens.noUser")}</option>
|
||||||
|
{users.map((user) => (
|
||||||
|
<option key={user.id} value={user.id}>{user.username}</option>
|
||||||
|
))}
|
||||||
|
</FormSelect>
|
||||||
|
</FormField>
|
||||||
|
<Button type="submit">{t("tokens.createButton")}</Button>
|
||||||
|
</FormRow>
|
||||||
|
</form>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
<Card className="overflow-hidden">
|
||||||
|
<div className="overflow-x-auto">
|
||||||
|
<table className="w-full">
|
||||||
|
<thead>
|
||||||
|
<tr className="border-b border-border/60 bg-muted/50">
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.name")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.user")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.scope")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.prefix")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.status")}</th>
|
||||||
|
<th className="px-4 py-3 text-left text-xs font-semibold text-muted-foreground uppercase tracking-wider">{t("tokens.actions")}</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody className="divide-y divide-border/60">
|
||||||
|
{tokens.map((token) => (
|
||||||
|
<tr key={token.id} className="hover:bg-accent/50 transition-colors">
|
||||||
|
<td className="px-4 py-3 text-sm text-foreground">{token.name}</td>
|
||||||
|
<td className="px-4 py-3 text-sm">
|
||||||
|
<TokenUserSelect
|
||||||
|
tokenId={token.id}
|
||||||
|
currentUserId={token.user_id}
|
||||||
|
users={users}
|
||||||
|
action={reassignTokenAction}
|
||||||
|
noUserLabel={t("tokens.noUser")}
|
||||||
|
/>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm">
|
||||||
|
<Badge variant={token.scope === "admin" ? "destructive" : "secondary"}>
|
||||||
|
{token.scope}
|
||||||
|
</Badge>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm">
|
||||||
|
<code className="px-2 py-1 bg-muted rounded font-mono text-foreground">{token.prefix}</code>
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3 text-sm">
|
||||||
|
{token.revoked_at ? (
|
||||||
|
<Badge variant="error">{t("tokens.revoked")}</Badge>
|
||||||
|
) : (
|
||||||
|
<Badge variant="success">{t("tokens.active")}</Badge>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
<td className="px-4 py-3">
|
||||||
|
{!token.revoked_at ? (
|
||||||
|
<form action={revokeTokenAction}>
|
||||||
|
<input type="hidden" name="id" value={token.id} />
|
||||||
|
<Button type="submit" variant="destructive" size="sm">
|
||||||
|
<svg className="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M10 14l2-2m0 0l2-2m-2 2l-2-2m2 2l2 2m7-2a9 9 0 11-18 0 9 9 0 0118 0z" />
|
||||||
|
</svg>
|
||||||
|
{t("tokens.revoke")}
|
||||||
|
</Button>
|
||||||
|
</form>
|
||||||
|
) : (
|
||||||
|
<form action={deleteTokenAction}>
|
||||||
|
<input type="hidden" name="id" value={token.id} />
|
||||||
|
<Button type="submit" variant="destructive" size="sm">
|
||||||
|
<svg className="w-4 h-4 mr-1" fill="none" stroke="currentColor" viewBox="0 0 24 24">
|
||||||
|
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M19 7l-.867 12.142A2 2 0 0116.138 21H7.862a2 2 0 01-1.995-1.858L5 7m5 4v6m4-6v6m1-10V4a1 1 0 00-1-1h-4a1 1 0 00-1 1v3M4 7h16" />
|
||||||
|
</svg>
|
||||||
|
{t("common.delete")}
|
||||||
|
</Button>
|
||||||
|
</form>
|
||||||
|
)}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
))}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
</Card>
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
||||||
31
apps/backoffice/app/api/auth/login/route.ts
Normal file
31
apps/backoffice/app/api/auth/login/route.ts
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { createSessionToken, SESSION_COOKIE } from "@/lib/session";
|
||||||
|
|
||||||
|
export async function POST(req: NextRequest) {
|
||||||
|
const body = await req.json().catch(() => null);
|
||||||
|
if (!body || typeof body.username !== "string" || typeof body.password !== "string") {
|
||||||
|
return NextResponse.json({ error: "Invalid request" }, { status: 400 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const expectedUsername = process.env.ADMIN_USERNAME || "admin";
|
||||||
|
const expectedPassword = process.env.ADMIN_PASSWORD;
|
||||||
|
|
||||||
|
if (!expectedPassword) {
|
||||||
|
return NextResponse.json({ error: "Server misconfiguration" }, { status: 500 });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.username !== expectedUsername || body.password !== expectedPassword) {
|
||||||
|
return NextResponse.json({ error: "Invalid credentials" }, { status: 401 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const token = await createSessionToken();
|
||||||
|
const response = NextResponse.json({ success: true });
|
||||||
|
response.cookies.set(SESSION_COOKIE, token, {
|
||||||
|
httpOnly: true,
|
||||||
|
secure: process.env.NODE_ENV === "production",
|
||||||
|
sameSite: "lax",
|
||||||
|
maxAge: 7 * 24 * 60 * 60,
|
||||||
|
path: "/",
|
||||||
|
});
|
||||||
|
return response;
|
||||||
|
}
|
||||||
8
apps/backoffice/app/api/auth/logout/route.ts
Normal file
8
apps/backoffice/app/api/auth/logout/route.ts
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
import { NextResponse } from "next/server";
|
||||||
|
import { SESSION_COOKIE } from "@/lib/session";
|
||||||
|
|
||||||
|
export async function POST() {
|
||||||
|
const response = NextResponse.json({ success: true });
|
||||||
|
response.cookies.delete(SESSION_COOKIE);
|
||||||
|
return response;
|
||||||
|
}
|
||||||
17
apps/backoffice/app/api/books/[bookId]/convert/route.ts
Normal file
17
apps/backoffice/app/api/books/[bookId]/convert/route.ts
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { convertBook } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function POST(
|
||||||
|
_request: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ bookId: string }> }
|
||||||
|
) {
|
||||||
|
const { bookId } = await params;
|
||||||
|
try {
|
||||||
|
const data = await convertBook(bookId);
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to start conversion";
|
||||||
|
const status = message.includes("409") ? 409 : 500;
|
||||||
|
return NextResponse.json({ error: message }, { status });
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,35 +1,25 @@
|
|||||||
import { NextRequest, NextResponse } from "next/server";
|
import { NextRequest, NextResponse } from "next/server";
|
||||||
|
import { config } from "@/lib/api";
|
||||||
|
|
||||||
export async function GET(
|
export async function GET(
|
||||||
request: NextRequest,
|
request: NextRequest,
|
||||||
{ params }: { params: Promise<{ bookId: string; pageNum: string }> }
|
{ params }: { params: Promise<{ bookId: string; pageNum: string }> }
|
||||||
) {
|
) {
|
||||||
const { bookId, pageNum } = await params;
|
const { bookId, pageNum } = await params;
|
||||||
|
|
||||||
// Récupérer les query params (format, width, quality)
|
|
||||||
const { searchParams } = new URL(request.url);
|
|
||||||
const format = searchParams.get("format") || "webp";
|
|
||||||
const width = searchParams.get("width") || "";
|
|
||||||
const quality = searchParams.get("quality") || "";
|
|
||||||
|
|
||||||
// Construire l'URL vers l'API backend
|
|
||||||
const apiBaseUrl = process.env.API_BASE_URL || "http://api:8080";
|
|
||||||
const apiUrl = new URL(`${apiBaseUrl}/books/${bookId}/pages/${pageNum}`);
|
|
||||||
apiUrl.searchParams.set("format", format);
|
|
||||||
if (width) apiUrl.searchParams.set("width", width);
|
|
||||||
if (quality) apiUrl.searchParams.set("quality", quality);
|
|
||||||
|
|
||||||
// Faire la requête à l'API
|
|
||||||
const token = process.env.API_BOOTSTRAP_TOKEN;
|
|
||||||
if (!token) {
|
|
||||||
return new NextResponse("API token not configured", { status: 500 });
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
const { baseUrl, token } = config();
|
||||||
|
const { searchParams } = new URL(request.url);
|
||||||
|
const format = searchParams.get("format") || "webp";
|
||||||
|
const width = searchParams.get("width") || "";
|
||||||
|
const quality = searchParams.get("quality") || "";
|
||||||
|
|
||||||
|
const apiUrl = new URL(`${baseUrl}/books/${bookId}/pages/${pageNum}`);
|
||||||
|
apiUrl.searchParams.set("format", format);
|
||||||
|
if (width) apiUrl.searchParams.set("width", width);
|
||||||
|
if (quality) apiUrl.searchParams.set("quality", quality);
|
||||||
|
|
||||||
const response = await fetch(apiUrl.toString(), {
|
const response = await fetch(apiUrl.toString(), {
|
||||||
headers: {
|
headers: { Authorization: `Bearer ${token}` },
|
||||||
Authorization: `Bearer ${token}`,
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
@@ -38,12 +28,9 @@ export async function GET(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Récupérer le content-type et les données
|
|
||||||
const contentType = response.headers.get("content-type") || "image/webp";
|
const contentType = response.headers.get("content-type") || "image/webp";
|
||||||
const imageBuffer = await response.arrayBuffer();
|
|
||||||
|
|
||||||
// Retourner l'image avec le bon content-type
|
return new NextResponse(response.body, {
|
||||||
return new NextResponse(imageBuffer, {
|
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": contentType,
|
"Content-Type": contentType,
|
||||||
"Cache-Control": "public, max-age=300",
|
"Cache-Control": "public, max-age=300",
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user