Compare commits
24 Commits
24516f1069
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 0c42a9ed04 | |||
| 95a6e54d06 | |||
| e26219989f | |||
| 5d33a35407 | |||
| d53572dc33 | |||
| cf1953d11f | |||
| 6f663eaee7 | |||
| ee65c6263a | |||
| 691b6b22ab | |||
| 11c80a16a3 | |||
| c366b44c54 | |||
| 92f80542e6 | |||
| 3a25e42a20 | |||
| 24763bf5a7 | |||
| 08f0397029 | |||
| 766e3a01b2 | |||
| 626e2e035d | |||
| cfd2321db2 | |||
| 1b715033ce | |||
| 81d1586501 | |||
| bd74c9e3e3 | |||
| 41228430cf | |||
| 6a4ba06fac | |||
| e5c3542d3f |
25
Cargo.lock
generated
25
Cargo.lock
generated
@@ -64,7 +64,7 @@ checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "api"
|
name = "api"
|
||||||
version = "1.21.1"
|
version = "1.27.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"argon2",
|
"argon2",
|
||||||
@@ -76,6 +76,7 @@ dependencies = [
|
|||||||
"image",
|
"image",
|
||||||
"jpeg-decoder",
|
"jpeg-decoder",
|
||||||
"lru",
|
"lru",
|
||||||
|
"notifications",
|
||||||
"parsers",
|
"parsers",
|
||||||
"rand 0.8.5",
|
"rand 0.8.5",
|
||||||
"regex",
|
"regex",
|
||||||
@@ -1232,7 +1233,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "indexer"
|
name = "indexer"
|
||||||
version = "1.21.1"
|
version = "1.27.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"axum",
|
"axum",
|
||||||
@@ -1240,6 +1241,7 @@ dependencies = [
|
|||||||
"futures",
|
"futures",
|
||||||
"image",
|
"image",
|
||||||
"jpeg-decoder",
|
"jpeg-decoder",
|
||||||
|
"notifications",
|
||||||
"num_cpus",
|
"num_cpus",
|
||||||
"parsers",
|
"parsers",
|
||||||
"reqwest",
|
"reqwest",
|
||||||
@@ -1663,6 +1665,19 @@ dependencies = [
|
|||||||
"nom",
|
"nom",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "notifications"
|
||||||
|
version = "1.27.1"
|
||||||
|
dependencies = [
|
||||||
|
"anyhow",
|
||||||
|
"reqwest",
|
||||||
|
"serde",
|
||||||
|
"serde_json",
|
||||||
|
"sqlx",
|
||||||
|
"tokio",
|
||||||
|
"tracing",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nu-ansi-term"
|
name = "nu-ansi-term"
|
||||||
version = "0.50.3"
|
version = "0.50.3"
|
||||||
@@ -1771,7 +1786,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "parsers"
|
name = "parsers"
|
||||||
version = "1.21.1"
|
version = "1.27.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"flate2",
|
"flate2",
|
||||||
@@ -2270,6 +2285,7 @@ dependencies = [
|
|||||||
"base64",
|
"base64",
|
||||||
"bytes",
|
"bytes",
|
||||||
"futures-core",
|
"futures-core",
|
||||||
|
"futures-util",
|
||||||
"http",
|
"http",
|
||||||
"http-body",
|
"http-body",
|
||||||
"http-body-util",
|
"http-body-util",
|
||||||
@@ -2278,6 +2294,7 @@ dependencies = [
|
|||||||
"hyper-util",
|
"hyper-util",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"log",
|
"log",
|
||||||
|
"mime_guess",
|
||||||
"percent-encoding",
|
"percent-encoding",
|
||||||
"pin-project-lite",
|
"pin-project-lite",
|
||||||
"quinn",
|
"quinn",
|
||||||
@@ -2906,7 +2923,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "stripstream-core"
|
name = "stripstream-core"
|
||||||
version = "1.21.1"
|
version = "1.27.1"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"serde",
|
"serde",
|
||||||
|
|||||||
@@ -3,13 +3,14 @@ members = [
|
|||||||
"apps/api",
|
"apps/api",
|
||||||
"apps/indexer",
|
"apps/indexer",
|
||||||
"crates/core",
|
"crates/core",
|
||||||
|
"crates/notifications",
|
||||||
"crates/parsers",
|
"crates/parsers",
|
||||||
]
|
]
|
||||||
resolver = "2"
|
resolver = "2"
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
version = "1.21.1"
|
version = "1.27.1"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
|
|
||||||
[workspace.dependencies]
|
[workspace.dependencies]
|
||||||
@@ -22,7 +23,7 @@ image = { version = "0.25", default-features = false, features = ["jpeg", "png",
|
|||||||
jpeg-decoder = "0.3"
|
jpeg-decoder = "0.3"
|
||||||
lru = "0.12"
|
lru = "0.12"
|
||||||
rayon = "1.10"
|
rayon = "1.10"
|
||||||
reqwest = { version = "0.12", default-features = false, features = ["json", "rustls-tls"] }
|
reqwest = { version = "0.12", default-features = false, features = ["json", "multipart", "rustls-tls"] }
|
||||||
rand = "0.8"
|
rand = "0.8"
|
||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
|||||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2026 Julien Froidefond
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
78
README.md
78
README.md
@@ -81,28 +81,66 @@ The backoffice will be available at http://localhost:7082
|
|||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
### Libraries Management
|
> For the full feature list, business rules, and API details, see [docs/FEATURES.md](docs/FEATURES.md).
|
||||||
- Create and manage multiple libraries
|
|
||||||
- Configure automatic scanning schedules (hourly, daily, weekly)
|
|
||||||
- Real-time file watcher for instant indexing
|
|
||||||
- Full and incremental rebuild options
|
|
||||||
|
|
||||||
### Books Management
|
### Libraries
|
||||||
- Support for CBZ, CBR, and PDF formats
|
- Multi-library management with per-library configuration
|
||||||
- Automatic metadata extraction
|
- Incremental and full scanning, real-time filesystem watcher
|
||||||
- Series and volume detection
|
- Per-library metadata provider selection (Google Books, ComicVine, BedéThèque, AniList, Open Library)
|
||||||
- Full-text search powered by PostgreSQL
|
|
||||||
|
|
||||||
### Jobs Monitoring
|
### Books & Series
|
||||||
- Real-time job progress tracking
|
- **Formats**: CBZ, CBR, PDF, EPUB
|
||||||
- Detailed statistics (scanned, indexed, removed, errors)
|
- Automatic metadata extraction (title, series, volume, authors, page count) from filenames and directory structure
|
||||||
- Job history and logs
|
- Series aggregation with missing volume detection
|
||||||
- Cancel pending jobs
|
- Thumbnail generation (WebP/JPEG/PNG) with lazy generation and bulk rebuild
|
||||||
|
- CBR → CBZ conversion
|
||||||
|
|
||||||
### Search
|
### Reading Progress
|
||||||
- Full-text search across titles, authors, and series
|
- Per-book tracking: unread / reading / read with current page
|
||||||
- Library filtering
|
- Series-level aggregated reading status
|
||||||
- Real-time suggestions
|
- Bulk mark-as-read for series
|
||||||
|
|
||||||
|
### Search & Discovery
|
||||||
|
- Full-text search across titles, authors, and series (PostgreSQL `pg_trgm`)
|
||||||
|
- Author listing with book/series counts
|
||||||
|
- Filtering by reading status, series status, format, metadata provider
|
||||||
|
|
||||||
|
### External Metadata
|
||||||
|
- Search, match, approve/reject workflow with confidence scoring
|
||||||
|
- Batch auto-matching and scheduled metadata refresh
|
||||||
|
- Field locking to protect manual edits from sync
|
||||||
|
|
||||||
|
### Notifications
|
||||||
|
- **Telegram**: real-time notifications via Telegram Bot API
|
||||||
|
- 12 granular event toggles (scans, thumbnails, conversions, metadata)
|
||||||
|
- Book thumbnail images included in notifications where applicable
|
||||||
|
- Test connection from settings
|
||||||
|
|
||||||
|
### External Integrations
|
||||||
|
- **Komga**: import reading progress
|
||||||
|
- **Prowlarr**: search for missing volumes
|
||||||
|
- **qBittorrent**: add torrents directly from search results
|
||||||
|
|
||||||
|
### Background Jobs
|
||||||
|
- Rebuild, rescan, thumbnail generation, metadata batch, CBR conversion
|
||||||
|
- Real-time progress via Server-Sent Events (SSE)
|
||||||
|
- Job history, error tracking, cancellation
|
||||||
|
|
||||||
|
### Page Rendering
|
||||||
|
- On-demand page extraction from all formats
|
||||||
|
- Image processing (format, quality, max width, resampling filter)
|
||||||
|
- LRU in-memory + disk cache
|
||||||
|
|
||||||
|
### Security
|
||||||
|
- Token-based auth (`admin` / `read` scopes) with Argon2 hashing
|
||||||
|
- Rate limiting, token expiration and revocation
|
||||||
|
|
||||||
|
### Web UI (Backoffice)
|
||||||
|
- Dashboard with statistics, interactive charts (recharts), and reading progress
|
||||||
|
- Currently reading & recently read sections
|
||||||
|
- Library, book, series, author management
|
||||||
|
- Live job monitoring, metadata search modals, settings panel
|
||||||
|
- Notification settings with per-event toggle configuration
|
||||||
|
|
||||||
## Environment Variables
|
## Environment Variables
|
||||||
|
|
||||||
@@ -249,4 +287,4 @@ volumes:
|
|||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
[Your License Here]
|
This project is licensed under the [MIT License](LICENSE).
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ futures = "0.3"
|
|||||||
image.workspace = true
|
image.workspace = true
|
||||||
jpeg-decoder.workspace = true
|
jpeg-decoder.workspace = true
|
||||||
lru.workspace = true
|
lru.workspace = true
|
||||||
|
notifications = { path = "../../crates/notifications" }
|
||||||
stripstream-core = { path = "../../crates/core" }
|
stripstream-core = { path = "../../crates/core" }
|
||||||
parsers = { path = "../../crates/parsers" }
|
parsers = { path = "../../crates/parsers" }
|
||||||
rand.workspace = true
|
rand.workspace = true
|
||||||
|
|||||||
@@ -6,13 +6,15 @@ COPY Cargo.toml ./
|
|||||||
COPY apps/api/Cargo.toml apps/api/Cargo.toml
|
COPY apps/api/Cargo.toml apps/api/Cargo.toml
|
||||||
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
|
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
|
||||||
COPY crates/core/Cargo.toml crates/core/Cargo.toml
|
COPY crates/core/Cargo.toml crates/core/Cargo.toml
|
||||||
|
COPY crates/notifications/Cargo.toml crates/notifications/Cargo.toml
|
||||||
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
|
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
|
||||||
|
|
||||||
RUN mkdir -p apps/api/src apps/indexer/src crates/core/src crates/parsers/src && \
|
RUN mkdir -p apps/api/src apps/indexer/src crates/core/src crates/notifications/src crates/parsers/src && \
|
||||||
echo "fn main() {}" > apps/api/src/main.rs && \
|
echo "fn main() {}" > apps/api/src/main.rs && \
|
||||||
echo "fn main() {}" > apps/indexer/src/main.rs && \
|
echo "fn main() {}" > apps/indexer/src/main.rs && \
|
||||||
echo "" > apps/indexer/src/lib.rs && \
|
echo "" > apps/indexer/src/lib.rs && \
|
||||||
echo "" > crates/core/src/lib.rs && \
|
echo "" > crates/core/src/lib.rs && \
|
||||||
|
echo "" > crates/notifications/src/lib.rs && \
|
||||||
echo "" > crates/parsers/src/lib.rs
|
echo "" > crates/parsers/src/lib.rs
|
||||||
|
|
||||||
# Build dependencies only (cached as long as Cargo.toml files don't change)
|
# Build dependencies only (cached as long as Cargo.toml files don't change)
|
||||||
@@ -26,12 +28,13 @@ RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
|||||||
COPY apps/api/src apps/api/src
|
COPY apps/api/src apps/api/src
|
||||||
COPY apps/indexer/src apps/indexer/src
|
COPY apps/indexer/src apps/indexer/src
|
||||||
COPY crates/core/src crates/core/src
|
COPY crates/core/src crates/core/src
|
||||||
|
COPY crates/notifications/src crates/notifications/src
|
||||||
COPY crates/parsers/src crates/parsers/src
|
COPY crates/parsers/src crates/parsers/src
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||||
--mount=type=cache,target=/usr/local/cargo/git \
|
--mount=type=cache,target=/usr/local/cargo/git \
|
||||||
--mount=type=cache,target=/app/target \
|
--mount=type=cache,target=/app/target \
|
||||||
touch apps/api/src/main.rs crates/core/src/lib.rs crates/parsers/src/lib.rs && \
|
touch apps/api/src/main.rs crates/core/src/lib.rs crates/notifications/src/lib.rs crates/parsers/src/lib.rs && \
|
||||||
cargo build --release -p api && \
|
cargo build --release -p api && \
|
||||||
cp /app/target/release/api /usr/local/bin/api
|
cp /app/target/release/api /usr/local/bin/api
|
||||||
|
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ pub async fn list_authors(
|
|||||||
.filter(|s| !s.trim().is_empty())
|
.filter(|s| !s.trim().is_empty())
|
||||||
.map(|s| format!("%{s}%"));
|
.map(|s| format!("%{s}%"));
|
||||||
|
|
||||||
// Aggregate unique authors from books.authors + books.author
|
// Aggregate unique authors from books.authors + books.author + series_metadata.authors
|
||||||
let sql = format!(
|
let sql = format!(
|
||||||
r#"
|
r#"
|
||||||
WITH all_authors AS (
|
WITH all_authors AS (
|
||||||
@@ -79,18 +79,21 @@ pub async fn list_authors(
|
|||||||
)
|
)
|
||||||
) AS name
|
) AS name
|
||||||
FROM books
|
FROM books
|
||||||
|
UNION
|
||||||
|
SELECT DISTINCT UNNEST(authors) AS name
|
||||||
|
FROM series_metadata
|
||||||
|
WHERE authors != '{{}}'
|
||||||
),
|
),
|
||||||
filtered AS (
|
filtered AS (
|
||||||
SELECT name FROM all_authors
|
SELECT name FROM all_authors
|
||||||
WHERE ($1::text IS NULL OR name ILIKE $1)
|
WHERE ($1::text IS NULL OR name ILIKE $1)
|
||||||
),
|
),
|
||||||
counted AS (
|
book_counts AS (
|
||||||
SELECT
|
SELECT
|
||||||
f.name,
|
f.name AS author_name,
|
||||||
COUNT(DISTINCT b.id) AS book_count,
|
COUNT(DISTINCT b.id) AS book_count
|
||||||
COUNT(DISTINCT NULLIF(b.series, '')) AS series_count
|
|
||||||
FROM filtered f
|
FROM filtered f
|
||||||
JOIN books b ON (
|
LEFT JOIN books b ON (
|
||||||
f.name = ANY(
|
f.name = ANY(
|
||||||
COALESCE(
|
COALESCE(
|
||||||
NULLIF(b.authors, '{{}}'),
|
NULLIF(b.authors, '{{}}'),
|
||||||
@@ -99,9 +102,24 @@ pub async fn list_authors(
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
GROUP BY f.name
|
GROUP BY f.name
|
||||||
|
),
|
||||||
|
series_counts AS (
|
||||||
|
SELECT
|
||||||
|
f.name AS author_name,
|
||||||
|
COUNT(DISTINCT (sm.library_id, sm.name)) AS series_count
|
||||||
|
FROM filtered f
|
||||||
|
LEFT JOIN series_metadata sm ON (
|
||||||
|
f.name = ANY(sm.authors) AND sm.authors != '{{}}'
|
||||||
|
)
|
||||||
|
GROUP BY f.name
|
||||||
)
|
)
|
||||||
SELECT name, book_count, series_count
|
SELECT
|
||||||
FROM counted
|
f.name,
|
||||||
|
COALESCE(bc.book_count, 0) AS book_count,
|
||||||
|
COALESCE(sc.series_count, 0) AS series_count
|
||||||
|
FROM filtered f
|
||||||
|
LEFT JOIN book_counts bc ON bc.author_name = f.name
|
||||||
|
LEFT JOIN series_counts sc ON sc.author_name = f.name
|
||||||
ORDER BY {order_clause}
|
ORDER BY {order_clause}
|
||||||
LIMIT $2 OFFSET $3
|
LIMIT $2 OFFSET $3
|
||||||
"#
|
"#
|
||||||
@@ -116,6 +134,10 @@ pub async fn list_authors(
|
|||||||
)
|
)
|
||||||
) AS name
|
) AS name
|
||||||
FROM books
|
FROM books
|
||||||
|
UNION
|
||||||
|
SELECT DISTINCT UNNEST(authors) AS name
|
||||||
|
FROM series_metadata
|
||||||
|
WHERE authors != '{}'
|
||||||
)
|
)
|
||||||
SELECT COUNT(*) AS total
|
SELECT COUNT(*) AS total
|
||||||
FROM all_authors
|
FROM all_authors
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
134
apps/api/src/job_poller.rs
Normal file
134
apps/api/src/job_poller.rs
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
use std::time::Duration;
|
||||||
|
|
||||||
|
use sqlx::{PgPool, Row};
|
||||||
|
use tracing::{error, info, trace};
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::{metadata_batch, metadata_refresh};
|
||||||
|
|
||||||
|
/// Poll for pending API-only jobs (`metadata_batch`, `metadata_refresh`) and process them.
|
||||||
|
/// This mirrors the indexer's worker loop but for job types handled by the API.
|
||||||
|
pub async fn run_job_poller(pool: PgPool, interval_seconds: u64) {
|
||||||
|
let wait = Duration::from_secs(interval_seconds.max(1));
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match claim_next_api_job(&pool).await {
|
||||||
|
Ok(Some((job_id, job_type, library_id))) => {
|
||||||
|
info!("[JOB_POLLER] Claimed {job_type} job {job_id} library={library_id}");
|
||||||
|
|
||||||
|
let pool_clone = pool.clone();
|
||||||
|
let library_name: Option<String> =
|
||||||
|
sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let result = match job_type.as_str() {
|
||||||
|
"metadata_refresh" => {
|
||||||
|
metadata_refresh::process_metadata_refresh(
|
||||||
|
&pool_clone,
|
||||||
|
job_id,
|
||||||
|
library_id,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
"metadata_batch" => {
|
||||||
|
metadata_batch::process_metadata_batch(
|
||||||
|
&pool_clone,
|
||||||
|
job_id,
|
||||||
|
library_id,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
_ => Err(format!("Unknown API job type: {job_type}")),
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(e) = result {
|
||||||
|
error!("[JOB_POLLER] {job_type} job {job_id} failed: {e}");
|
||||||
|
let _ = sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'failed', error_opt = $2, finished_at = NOW() WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(job_id)
|
||||||
|
.bind(e.to_string())
|
||||||
|
.execute(&pool_clone)
|
||||||
|
.await;
|
||||||
|
|
||||||
|
match job_type.as_str() {
|
||||||
|
"metadata_refresh" => {
|
||||||
|
notifications::notify(
|
||||||
|
pool_clone,
|
||||||
|
notifications::NotificationEvent::MetadataRefreshFailed {
|
||||||
|
library_name,
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
"metadata_batch" => {
|
||||||
|
notifications::notify(
|
||||||
|
pool_clone,
|
||||||
|
notifications::NotificationEvent::MetadataBatchFailed {
|
||||||
|
library_name,
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Ok(None) => {
|
||||||
|
trace!("[JOB_POLLER] No pending API jobs, waiting...");
|
||||||
|
tokio::time::sleep(wait).await;
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
error!("[JOB_POLLER] Error claiming job: {err}");
|
||||||
|
tokio::time::sleep(wait).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const API_JOB_TYPES: &[&str] = &["metadata_batch", "metadata_refresh"];
|
||||||
|
|
||||||
|
async fn claim_next_api_job(pool: &PgPool) -> Result<Option<(Uuid, String, Uuid)>, sqlx::Error> {
|
||||||
|
let mut tx = pool.begin().await?;
|
||||||
|
|
||||||
|
let row = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT id, type, library_id
|
||||||
|
FROM index_jobs
|
||||||
|
WHERE status = 'pending'
|
||||||
|
AND type = ANY($1)
|
||||||
|
AND library_id IS NOT NULL
|
||||||
|
ORDER BY created_at ASC
|
||||||
|
FOR UPDATE SKIP LOCKED
|
||||||
|
LIMIT 1
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.bind(API_JOB_TYPES)
|
||||||
|
.fetch_optional(&mut *tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let Some(row) = row else {
|
||||||
|
tx.commit().await?;
|
||||||
|
return Ok(None);
|
||||||
|
};
|
||||||
|
|
||||||
|
let id: Uuid = row.get("id");
|
||||||
|
let job_type: String = row.get("type");
|
||||||
|
let library_id: Uuid = row.get("library_id");
|
||||||
|
|
||||||
|
sqlx::query(
|
||||||
|
"UPDATE index_jobs SET status = 'running', started_at = NOW(), error_opt = NULL WHERE id = $1",
|
||||||
|
)
|
||||||
|
.bind(id)
|
||||||
|
.execute(&mut *tx)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok(Some((id, job_type, library_id)))
|
||||||
|
}
|
||||||
@@ -48,7 +48,6 @@ pub struct CreateLibraryRequest {
|
|||||||
responses(
|
responses(
|
||||||
(status = 200, body = Vec<LibraryResponse>),
|
(status = 200, body = Vec<LibraryResponse>),
|
||||||
(status = 401, description = "Unauthorized"),
|
(status = 401, description = "Unauthorized"),
|
||||||
(status = 403, description = "Forbidden - Admin scope required"),
|
|
||||||
),
|
),
|
||||||
security(("Bearer" = []))
|
security(("Bearer" = []))
|
||||||
)]
|
)]
|
||||||
@@ -221,7 +220,6 @@ use crate::index_jobs::{IndexJobResponse, RebuildRequest};
|
|||||||
(status = 200, body = IndexJobResponse),
|
(status = 200, body = IndexJobResponse),
|
||||||
(status = 404, description = "Library not found"),
|
(status = 404, description = "Library not found"),
|
||||||
(status = 401, description = "Unauthorized"),
|
(status = 401, description = "Unauthorized"),
|
||||||
(status = 403, description = "Forbidden - Admin scope required"),
|
|
||||||
),
|
),
|
||||||
security(("Bearer" = []))
|
security(("Bearer" = []))
|
||||||
)]
|
)]
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ mod books;
|
|||||||
mod error;
|
mod error;
|
||||||
mod handlers;
|
mod handlers;
|
||||||
mod index_jobs;
|
mod index_jobs;
|
||||||
|
mod job_poller;
|
||||||
mod komga;
|
mod komga;
|
||||||
mod libraries;
|
mod libraries;
|
||||||
mod metadata;
|
mod metadata;
|
||||||
@@ -17,9 +18,11 @@ mod prowlarr;
|
|||||||
mod qbittorrent;
|
mod qbittorrent;
|
||||||
mod reading_progress;
|
mod reading_progress;
|
||||||
mod search;
|
mod search;
|
||||||
|
mod series;
|
||||||
mod settings;
|
mod settings;
|
||||||
mod state;
|
mod state;
|
||||||
mod stats;
|
mod stats;
|
||||||
|
mod telegram;
|
||||||
mod thumbnails;
|
mod thumbnails;
|
||||||
mod tokens;
|
mod tokens;
|
||||||
|
|
||||||
@@ -86,14 +89,13 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let admin_routes = Router::new()
|
let admin_routes = Router::new()
|
||||||
.route("/libraries", get(libraries::list_libraries).post(libraries::create_library))
|
.route("/libraries", axum::routing::post(libraries::create_library))
|
||||||
.route("/libraries/:id", delete(libraries::delete_library))
|
.route("/libraries/:id", delete(libraries::delete_library))
|
||||||
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
|
|
||||||
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
|
.route("/libraries/:id/monitoring", axum::routing::patch(libraries::update_monitoring))
|
||||||
.route("/libraries/:id/metadata-provider", axum::routing::patch(libraries::update_metadata_provider))
|
.route("/libraries/:id/metadata-provider", axum::routing::patch(libraries::update_metadata_provider))
|
||||||
.route("/books/:id", axum::routing::patch(books::update_book))
|
.route("/books/:id", axum::routing::patch(books::update_book))
|
||||||
.route("/books/:id/convert", axum::routing::post(books::convert_book))
|
.route("/books/:id/convert", axum::routing::post(books::convert_book))
|
||||||
.route("/libraries/:library_id/series/:name", axum::routing::patch(books::update_series))
|
.route("/libraries/:library_id/series/:name", axum::routing::patch(series::update_series))
|
||||||
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
|
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
|
||||||
.route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild))
|
.route("/index/thumbnails/rebuild", axum::routing::post(thumbnails::start_thumbnails_rebuild))
|
||||||
.route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate))
|
.route("/index/thumbnails/regenerate", axum::routing::post(thumbnails::start_thumbnails_regenerate))
|
||||||
@@ -111,6 +113,7 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
.route("/prowlarr/test", get(prowlarr::test_prowlarr))
|
.route("/prowlarr/test", get(prowlarr::test_prowlarr))
|
||||||
.route("/qbittorrent/add", axum::routing::post(qbittorrent::add_torrent))
|
.route("/qbittorrent/add", axum::routing::post(qbittorrent::add_torrent))
|
||||||
.route("/qbittorrent/test", get(qbittorrent::test_qbittorrent))
|
.route("/qbittorrent/test", get(qbittorrent::test_qbittorrent))
|
||||||
|
.route("/telegram/test", get(telegram::test_telegram))
|
||||||
.route("/komga/sync", axum::routing::post(komga::sync_komga_read_books))
|
.route("/komga/sync", axum::routing::post(komga::sync_komga_read_books))
|
||||||
.route("/komga/reports", get(komga::list_sync_reports))
|
.route("/komga/reports", get(komga::list_sync_reports))
|
||||||
.route("/komga/reports/:id", get(komga::get_sync_report))
|
.route("/komga/reports/:id", get(komga::get_sync_report))
|
||||||
@@ -133,18 +136,20 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
));
|
));
|
||||||
|
|
||||||
let read_routes = Router::new()
|
let read_routes = Router::new()
|
||||||
|
.route("/libraries", get(libraries::list_libraries))
|
||||||
|
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
|
||||||
.route("/books", get(books::list_books))
|
.route("/books", get(books::list_books))
|
||||||
.route("/books/ongoing", get(books::ongoing_books))
|
.route("/books/ongoing", get(series::ongoing_books))
|
||||||
.route("/books/:id", get(books::get_book))
|
.route("/books/:id", get(books::get_book))
|
||||||
.route("/books/:id/thumbnail", get(books::get_thumbnail))
|
.route("/books/:id/thumbnail", get(books::get_thumbnail))
|
||||||
.route("/books/:id/pages/:n", get(pages::get_page))
|
.route("/books/:id/pages/:n", get(pages::get_page))
|
||||||
.route("/books/:id/progress", get(reading_progress::get_reading_progress).patch(reading_progress::update_reading_progress))
|
.route("/books/:id/progress", get(reading_progress::get_reading_progress).patch(reading_progress::update_reading_progress))
|
||||||
.route("/libraries/:library_id/series", get(books::list_series))
|
.route("/libraries/:library_id/series", get(series::list_series))
|
||||||
.route("/libraries/:library_id/series/:name/metadata", get(books::get_series_metadata))
|
.route("/libraries/:library_id/series/:name/metadata", get(series::get_series_metadata))
|
||||||
.route("/series", get(books::list_all_series))
|
.route("/series", get(series::list_all_series))
|
||||||
.route("/series/ongoing", get(books::ongoing_series))
|
.route("/series/ongoing", get(series::ongoing_series))
|
||||||
.route("/series/statuses", get(books::series_statuses))
|
.route("/series/statuses", get(series::series_statuses))
|
||||||
.route("/series/provider-statuses", get(books::provider_statuses))
|
.route("/series/provider-statuses", get(series::provider_statuses))
|
||||||
.route("/series/mark-read", axum::routing::post(reading_progress::mark_series_read))
|
.route("/series/mark-read", axum::routing::post(reading_progress::mark_series_read))
|
||||||
.route("/authors", get(authors::list_authors))
|
.route("/authors", get(authors::list_authors))
|
||||||
.route("/stats", get(stats::get_stats))
|
.route("/stats", get(stats::get_stats))
|
||||||
@@ -155,6 +160,9 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
auth::require_read,
|
auth::require_read,
|
||||||
));
|
));
|
||||||
|
|
||||||
|
// Clone pool before state is moved into the router
|
||||||
|
let poller_pool = state.pool.clone();
|
||||||
|
|
||||||
let app = Router::new()
|
let app = Router::new()
|
||||||
.route("/health", get(handlers::health))
|
.route("/health", get(handlers::health))
|
||||||
.route("/ready", get(handlers::ready))
|
.route("/ready", get(handlers::ready))
|
||||||
@@ -166,6 +174,11 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
.layer(middleware::from_fn_with_state(state.clone(), api_middleware::request_counter))
|
.layer(middleware::from_fn_with_state(state.clone(), api_middleware::request_counter))
|
||||||
.with_state(state);
|
.with_state(state);
|
||||||
|
|
||||||
|
// Start background poller for API-only jobs (metadata_batch, metadata_refresh)
|
||||||
|
tokio::spawn(async move {
|
||||||
|
job_poller::run_job_poller(poller_pool, 5).await;
|
||||||
|
});
|
||||||
|
|
||||||
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
|
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
|
||||||
info!(addr = %config.listen_addr, "api listening");
|
info!(addr = %config.listen_addr, "api listening");
|
||||||
axum::serve(listener, app).await?;
|
axum::serve(listener, app).await?;
|
||||||
|
|||||||
@@ -369,6 +369,26 @@ pub async fn approve_metadata(
|
|||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Notify via Telegram (with first book thumbnail if available)
|
||||||
|
let provider_for_notif: String = row.get("provider");
|
||||||
|
let thumbnail_path: Option<String> = sqlx::query_scalar(
|
||||||
|
"SELECT thumbnail_path FROM books WHERE library_id = $1 AND series_name = $2 AND thumbnail_path IS NOT NULL ORDER BY sort_order LIMIT 1",
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.bind(&series_name)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
notifications::notify(
|
||||||
|
state.pool.clone(),
|
||||||
|
notifications::NotificationEvent::MetadataApproved {
|
||||||
|
series_name: series_name.clone(),
|
||||||
|
provider: provider_for_notif,
|
||||||
|
thumbnail_path,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
Ok(Json(ApproveResponse {
|
Ok(Json(ApproveResponse {
|
||||||
status: "approved".to_string(),
|
status: "approved".to_string(),
|
||||||
report,
|
report,
|
||||||
|
|||||||
@@ -115,15 +115,21 @@ pub async fn start_batch(
|
|||||||
|
|
||||||
let job_id = Uuid::new_v4();
|
let job_id = Uuid::new_v4();
|
||||||
sqlx::query(
|
sqlx::query(
|
||||||
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, 'metadata_batch', 'pending')",
|
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'metadata_batch', 'running', NOW())",
|
||||||
)
|
)
|
||||||
.bind(job_id)
|
.bind(job_id)
|
||||||
.bind(library_id)
|
.bind(library_id)
|
||||||
.execute(&state.pool)
|
.execute(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
// Spawn the background processing task
|
// Spawn the background processing task (status already 'running' to avoid poller race)
|
||||||
let pool = state.pool.clone();
|
let pool = state.pool.clone();
|
||||||
|
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
if let Err(e) = process_metadata_batch(&pool, job_id, library_id).await {
|
if let Err(e) = process_metadata_batch(&pool, job_id, library_id).await {
|
||||||
warn!("[METADATA_BATCH] job {job_id} failed: {e}");
|
warn!("[METADATA_BATCH] job {job_id} failed: {e}");
|
||||||
@@ -134,6 +140,13 @@ pub async fn start_batch(
|
|||||||
.bind(e.to_string())
|
.bind(e.to_string())
|
||||||
.execute(&pool)
|
.execute(&pool)
|
||||||
.await;
|
.await;
|
||||||
|
notifications::notify(
|
||||||
|
pool.clone(),
|
||||||
|
notifications::NotificationEvent::MetadataBatchFailed {
|
||||||
|
library_name,
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -300,7 +313,7 @@ pub async fn get_batch_results(
|
|||||||
// Background processing
|
// Background processing
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
async fn process_metadata_batch(
|
pub(crate) async fn process_metadata_batch(
|
||||||
pool: &PgPool,
|
pool: &PgPool,
|
||||||
job_id: Uuid,
|
job_id: Uuid,
|
||||||
library_id: Uuid,
|
library_id: Uuid,
|
||||||
@@ -621,6 +634,21 @@ async fn process_metadata_batch(
|
|||||||
|
|
||||||
info!("[METADATA_BATCH] job={job_id} completed: {processed}/{total} series processed");
|
info!("[METADATA_BATCH] job={job_id} completed: {processed}/{total} series processed");
|
||||||
|
|
||||||
|
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
notifications::notify(
|
||||||
|
pool.clone(),
|
||||||
|
notifications::NotificationEvent::MetadataBatchCompleted {
|
||||||
|
library_name,
|
||||||
|
total_series: total,
|
||||||
|
processed,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -124,15 +124,21 @@ pub async fn start_refresh(
|
|||||||
|
|
||||||
let job_id = Uuid::new_v4();
|
let job_id = Uuid::new_v4();
|
||||||
sqlx::query(
|
sqlx::query(
|
||||||
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, 'metadata_refresh', 'pending')",
|
"INSERT INTO index_jobs (id, library_id, type, status, started_at) VALUES ($1, $2, 'metadata_refresh', 'running', NOW())",
|
||||||
)
|
)
|
||||||
.bind(job_id)
|
.bind(job_id)
|
||||||
.bind(library_id)
|
.bind(library_id)
|
||||||
.execute(&state.pool)
|
.execute(&state.pool)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
// Spawn the background processing task
|
// Spawn the background processing task (status already 'running' to avoid poller race)
|
||||||
let pool = state.pool.clone();
|
let pool = state.pool.clone();
|
||||||
|
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(&state.pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
if let Err(e) = process_metadata_refresh(&pool, job_id, library_id).await {
|
if let Err(e) = process_metadata_refresh(&pool, job_id, library_id).await {
|
||||||
warn!("[METADATA_REFRESH] job {job_id} failed: {e}");
|
warn!("[METADATA_REFRESH] job {job_id} failed: {e}");
|
||||||
@@ -143,6 +149,13 @@ pub async fn start_refresh(
|
|||||||
.bind(e.to_string())
|
.bind(e.to_string())
|
||||||
.execute(&pool)
|
.execute(&pool)
|
||||||
.await;
|
.await;
|
||||||
|
notifications::notify(
|
||||||
|
pool.clone(),
|
||||||
|
notifications::NotificationEvent::MetadataRefreshFailed {
|
||||||
|
library_name,
|
||||||
|
error: e.to_string(),
|
||||||
|
},
|
||||||
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -209,7 +222,7 @@ pub async fn get_refresh_report(
|
|||||||
// Background processing
|
// Background processing
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
async fn process_metadata_refresh(
|
pub(crate) async fn process_metadata_refresh(
|
||||||
pool: &PgPool,
|
pool: &PgPool,
|
||||||
job_id: Uuid,
|
job_id: Uuid,
|
||||||
library_id: Uuid,
|
library_id: Uuid,
|
||||||
@@ -319,6 +332,22 @@ async fn process_metadata_refresh(
|
|||||||
|
|
||||||
info!("[METADATA_REFRESH] job={job_id} completed: {refreshed} updated, {unchanged} unchanged, {errors} errors");
|
info!("[METADATA_REFRESH] job={job_id} completed: {refreshed} updated, {unchanged} unchanged, {errors} errors");
|
||||||
|
|
||||||
|
let library_name: Option<String> = sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
notifications::notify(
|
||||||
|
pool.clone(),
|
||||||
|
notifications::NotificationEvent::MetadataRefreshCompleted {
|
||||||
|
library_name,
|
||||||
|
refreshed,
|
||||||
|
unchanged,
|
||||||
|
errors,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -10,14 +10,14 @@ use utoipa::OpenApi;
|
|||||||
crate::reading_progress::update_reading_progress,
|
crate::reading_progress::update_reading_progress,
|
||||||
crate::reading_progress::mark_series_read,
|
crate::reading_progress::mark_series_read,
|
||||||
crate::books::get_thumbnail,
|
crate::books::get_thumbnail,
|
||||||
crate::books::list_series,
|
crate::series::list_series,
|
||||||
crate::books::list_all_series,
|
crate::series::list_all_series,
|
||||||
crate::books::ongoing_series,
|
crate::series::ongoing_series,
|
||||||
crate::books::ongoing_books,
|
crate::series::ongoing_books,
|
||||||
crate::books::convert_book,
|
crate::books::convert_book,
|
||||||
crate::books::update_book,
|
crate::books::update_book,
|
||||||
crate::books::get_series_metadata,
|
crate::series::get_series_metadata,
|
||||||
crate::books::update_series,
|
crate::series::update_series,
|
||||||
crate::pages::get_page,
|
crate::pages::get_page,
|
||||||
crate::search::search_books,
|
crate::search::search_books,
|
||||||
crate::index_jobs::enqueue_rebuild,
|
crate::index_jobs::enqueue_rebuild,
|
||||||
@@ -35,6 +35,7 @@ use utoipa::OpenApi;
|
|||||||
crate::libraries::delete_library,
|
crate::libraries::delete_library,
|
||||||
crate::libraries::scan_library,
|
crate::libraries::scan_library,
|
||||||
crate::libraries::update_monitoring,
|
crate::libraries::update_monitoring,
|
||||||
|
crate::libraries::update_metadata_provider,
|
||||||
crate::tokens::list_tokens,
|
crate::tokens::list_tokens,
|
||||||
crate::tokens::create_token,
|
crate::tokens::create_token,
|
||||||
crate::tokens::revoke_token,
|
crate::tokens::revoke_token,
|
||||||
@@ -54,8 +55,8 @@ use utoipa::OpenApi;
|
|||||||
crate::metadata::get_metadata_links,
|
crate::metadata::get_metadata_links,
|
||||||
crate::metadata::get_missing_books,
|
crate::metadata::get_missing_books,
|
||||||
crate::metadata::delete_metadata_link,
|
crate::metadata::delete_metadata_link,
|
||||||
crate::books::series_statuses,
|
crate::series::series_statuses,
|
||||||
crate::books::provider_statuses,
|
crate::series::provider_statuses,
|
||||||
crate::settings::list_status_mappings,
|
crate::settings::list_status_mappings,
|
||||||
crate::settings::upsert_status_mapping,
|
crate::settings::upsert_status_mapping,
|
||||||
crate::settings::delete_status_mapping,
|
crate::settings::delete_status_mapping,
|
||||||
@@ -63,6 +64,14 @@ use utoipa::OpenApi;
|
|||||||
crate::prowlarr::test_prowlarr,
|
crate::prowlarr::test_prowlarr,
|
||||||
crate::qbittorrent::add_torrent,
|
crate::qbittorrent::add_torrent,
|
||||||
crate::qbittorrent::test_qbittorrent,
|
crate::qbittorrent::test_qbittorrent,
|
||||||
|
crate::metadata_batch::start_batch,
|
||||||
|
crate::metadata_batch::get_batch_report,
|
||||||
|
crate::metadata_batch::get_batch_results,
|
||||||
|
crate::metadata_refresh::start_refresh,
|
||||||
|
crate::metadata_refresh::get_refresh_report,
|
||||||
|
crate::komga::sync_komga_read_books,
|
||||||
|
crate::komga::list_sync_reports,
|
||||||
|
crate::komga::get_sync_report,
|
||||||
),
|
),
|
||||||
components(
|
components(
|
||||||
schemas(
|
schemas(
|
||||||
@@ -74,14 +83,14 @@ use utoipa::OpenApi;
|
|||||||
crate::reading_progress::UpdateReadingProgressRequest,
|
crate::reading_progress::UpdateReadingProgressRequest,
|
||||||
crate::reading_progress::MarkSeriesReadRequest,
|
crate::reading_progress::MarkSeriesReadRequest,
|
||||||
crate::reading_progress::MarkSeriesReadResponse,
|
crate::reading_progress::MarkSeriesReadResponse,
|
||||||
crate::books::SeriesItem,
|
crate::series::SeriesItem,
|
||||||
crate::books::SeriesPage,
|
crate::series::SeriesPage,
|
||||||
crate::books::ListAllSeriesQuery,
|
crate::series::ListAllSeriesQuery,
|
||||||
crate::books::OngoingQuery,
|
crate::series::OngoingQuery,
|
||||||
crate::books::UpdateBookRequest,
|
crate::books::UpdateBookRequest,
|
||||||
crate::books::SeriesMetadata,
|
crate::series::SeriesMetadata,
|
||||||
crate::books::UpdateSeriesRequest,
|
crate::series::UpdateSeriesRequest,
|
||||||
crate::books::UpdateSeriesResponse,
|
crate::series::UpdateSeriesResponse,
|
||||||
crate::pages::PageQuery,
|
crate::pages::PageQuery,
|
||||||
crate::search::SearchQuery,
|
crate::search::SearchQuery,
|
||||||
crate::search::SearchResponse,
|
crate::search::SearchResponse,
|
||||||
@@ -96,6 +105,7 @@ use utoipa::OpenApi;
|
|||||||
crate::libraries::LibraryResponse,
|
crate::libraries::LibraryResponse,
|
||||||
crate::libraries::CreateLibraryRequest,
|
crate::libraries::CreateLibraryRequest,
|
||||||
crate::libraries::UpdateMonitoringRequest,
|
crate::libraries::UpdateMonitoringRequest,
|
||||||
|
crate::libraries::UpdateMetadataProviderRequest,
|
||||||
crate::tokens::CreateTokenRequest,
|
crate::tokens::CreateTokenRequest,
|
||||||
crate::tokens::TokenResponse,
|
crate::tokens::TokenResponse,
|
||||||
crate::tokens::CreatedTokenResponse,
|
crate::tokens::CreatedTokenResponse,
|
||||||
@@ -137,7 +147,16 @@ use utoipa::OpenApi;
|
|||||||
crate::prowlarr::ProwlarrRelease,
|
crate::prowlarr::ProwlarrRelease,
|
||||||
crate::prowlarr::ProwlarrCategory,
|
crate::prowlarr::ProwlarrCategory,
|
||||||
crate::prowlarr::ProwlarrSearchResponse,
|
crate::prowlarr::ProwlarrSearchResponse,
|
||||||
|
crate::prowlarr::MissingVolumeInput,
|
||||||
crate::prowlarr::ProwlarrTestResponse,
|
crate::prowlarr::ProwlarrTestResponse,
|
||||||
|
crate::metadata_batch::MetadataBatchRequest,
|
||||||
|
crate::metadata_batch::MetadataBatchReportDto,
|
||||||
|
crate::metadata_batch::MetadataBatchResultDto,
|
||||||
|
crate::metadata_refresh::MetadataRefreshRequest,
|
||||||
|
crate::metadata_refresh::MetadataRefreshReportDto,
|
||||||
|
crate::komga::KomgaSyncRequest,
|
||||||
|
crate::komga::KomgaSyncResponse,
|
||||||
|
crate::komga::KomgaSyncReportSummary,
|
||||||
ErrorResponse,
|
ErrorResponse,
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
@@ -145,11 +164,16 @@ use utoipa::OpenApi;
|
|||||||
("Bearer" = [])
|
("Bearer" = [])
|
||||||
),
|
),
|
||||||
tags(
|
tags(
|
||||||
(name = "authors", description = "Author browsing and listing"),
|
(name = "books", description = "Book browsing, details and management"),
|
||||||
(name = "books", description = "Read-only endpoints for browsing and searching books"),
|
(name = "series", description = "Series browsing, filtering and management"),
|
||||||
|
(name = "search", description = "Full-text search across books and series"),
|
||||||
(name = "reading-progress", description = "Reading progress tracking per book"),
|
(name = "reading-progress", description = "Reading progress tracking per book"),
|
||||||
(name = "libraries", description = "Library management endpoints (Admin only)"),
|
(name = "authors", description = "Author browsing and listing"),
|
||||||
|
(name = "stats", description = "Collection statistics and dashboard data"),
|
||||||
|
(name = "libraries", description = "Library listing, scanning, and management (create/delete/settings: Admin only)"),
|
||||||
(name = "indexing", description = "Search index management and job control (Admin only)"),
|
(name = "indexing", description = "Search index management and job control (Admin only)"),
|
||||||
|
(name = "metadata", description = "External metadata providers and matching (Admin only)"),
|
||||||
|
(name = "komga", description = "Komga read-status sync (Admin only)"),
|
||||||
(name = "tokens", description = "API token management (Admin only)"),
|
(name = "tokens", description = "API token management (Admin only)"),
|
||||||
(name = "settings", description = "Application settings and cache management (Admin only)"),
|
(name = "settings", description = "Application settings and cache management (Admin only)"),
|
||||||
(name = "prowlarr", description = "Prowlarr indexer integration (Admin only)"),
|
(name = "prowlarr", description = "Prowlarr indexer integration (Admin only)"),
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ pub struct SearchResponse {
|
|||||||
#[utoipa::path(
|
#[utoipa::path(
|
||||||
get,
|
get,
|
||||||
path = "/search",
|
path = "/search",
|
||||||
tag = "books",
|
tag = "search",
|
||||||
params(
|
params(
|
||||||
("q" = String, Query, description = "Search query (books + series via PostgreSQL full-text)"),
|
("q" = String, Query, description = "Search query (books + series via PostgreSQL full-text)"),
|
||||||
("library_id" = Option<String>, Query, description = "Filter by library ID"),
|
("library_id" = Option<String>, Query, description = "Filter by library ID"),
|
||||||
|
|||||||
1028
apps/api/src/series.rs
Normal file
1028
apps/api/src/series.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,19 @@
|
|||||||
use axum::{extract::State, Json};
|
use axum::{
|
||||||
use serde::Serialize;
|
extract::{Query, State},
|
||||||
|
Json,
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::Row;
|
use sqlx::Row;
|
||||||
use utoipa::ToSchema;
|
use utoipa::{IntoParams, ToSchema};
|
||||||
|
|
||||||
use crate::{error::ApiError, state::AppState};
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Deserialize, IntoParams)]
|
||||||
|
pub struct StatsQuery {
|
||||||
|
/// Granularity: "day", "week" or "month" (default: "month")
|
||||||
|
pub period: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
pub struct StatsOverview {
|
pub struct StatsOverview {
|
||||||
pub total_books: i64,
|
pub total_books: i64,
|
||||||
@@ -74,15 +83,51 @@ pub struct ProviderCount {
|
|||||||
pub count: i64,
|
pub count: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct CurrentlyReadingItem {
|
||||||
|
pub book_id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub series: Option<String>,
|
||||||
|
pub current_page: i32,
|
||||||
|
pub page_count: i32,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct RecentlyReadItem {
|
||||||
|
pub book_id: String,
|
||||||
|
pub title: String,
|
||||||
|
pub series: Option<String>,
|
||||||
|
pub last_read_at: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct MonthlyReading {
|
||||||
|
pub month: String,
|
||||||
|
pub books_read: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct JobTimePoint {
|
||||||
|
pub label: String,
|
||||||
|
pub scan: i64,
|
||||||
|
pub rebuild: i64,
|
||||||
|
pub thumbnail: i64,
|
||||||
|
pub other: i64,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize, ToSchema)]
|
#[derive(Serialize, ToSchema)]
|
||||||
pub struct StatsResponse {
|
pub struct StatsResponse {
|
||||||
pub overview: StatsOverview,
|
pub overview: StatsOverview,
|
||||||
pub reading_status: ReadingStatusStats,
|
pub reading_status: ReadingStatusStats,
|
||||||
|
pub currently_reading: Vec<CurrentlyReadingItem>,
|
||||||
|
pub recently_read: Vec<RecentlyReadItem>,
|
||||||
|
pub reading_over_time: Vec<MonthlyReading>,
|
||||||
pub by_format: Vec<FormatCount>,
|
pub by_format: Vec<FormatCount>,
|
||||||
pub by_language: Vec<LanguageCount>,
|
pub by_language: Vec<LanguageCount>,
|
||||||
pub by_library: Vec<LibraryStats>,
|
pub by_library: Vec<LibraryStats>,
|
||||||
pub top_series: Vec<TopSeries>,
|
pub top_series: Vec<TopSeries>,
|
||||||
pub additions_over_time: Vec<MonthlyAdditions>,
|
pub additions_over_time: Vec<MonthlyAdditions>,
|
||||||
|
pub jobs_over_time: Vec<JobTimePoint>,
|
||||||
pub metadata: MetadataStats,
|
pub metadata: MetadataStats,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -90,7 +135,8 @@ pub struct StatsResponse {
|
|||||||
#[utoipa::path(
|
#[utoipa::path(
|
||||||
get,
|
get,
|
||||||
path = "/stats",
|
path = "/stats",
|
||||||
tag = "books",
|
tag = "stats",
|
||||||
|
params(StatsQuery),
|
||||||
responses(
|
responses(
|
||||||
(status = 200, body = StatsResponse),
|
(status = 200, body = StatsResponse),
|
||||||
(status = 401, description = "Unauthorized"),
|
(status = 401, description = "Unauthorized"),
|
||||||
@@ -99,7 +145,9 @@ pub struct StatsResponse {
|
|||||||
)]
|
)]
|
||||||
pub async fn get_stats(
|
pub async fn get_stats(
|
||||||
State(state): State<AppState>,
|
State(state): State<AppState>,
|
||||||
|
Query(query): Query<StatsQuery>,
|
||||||
) -> Result<Json<StatsResponse>, ApiError> {
|
) -> Result<Json<StatsResponse>, ApiError> {
|
||||||
|
let period = query.period.as_deref().unwrap_or("month");
|
||||||
// Overview + reading status in one query
|
// Overview + reading status in one query
|
||||||
let overview_row = sqlx::query(
|
let overview_row = sqlx::query(
|
||||||
r#"
|
r#"
|
||||||
@@ -259,20 +307,74 @@ pub async fn get_stats(
|
|||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// Additions over time (last 12 months)
|
// Additions over time (with gap filling)
|
||||||
let additions_rows = sqlx::query(
|
let additions_rows = match period {
|
||||||
r#"
|
"day" => {
|
||||||
SELECT
|
sqlx::query(
|
||||||
TO_CHAR(DATE_TRUNC('month', created_at), 'YYYY-MM') AS month,
|
r#"
|
||||||
COUNT(*) AS books_added
|
SELECT
|
||||||
FROM books
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
WHERE created_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
COALESCE(cnt.books_added, 0) AS books_added
|
||||||
GROUP BY DATE_TRUNC('month', created_at)
|
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
|
||||||
ORDER BY month ASC
|
LEFT JOIN (
|
||||||
"#,
|
SELECT created_at::date AS dt, COUNT(*) AS books_added
|
||||||
)
|
FROM books
|
||||||
.fetch_all(&state.pool)
|
WHERE created_at >= CURRENT_DATE - INTERVAL '6 days'
|
||||||
.await?;
|
GROUP BY created_at::date
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
"week" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
COALESCE(cnt.books_added, 0) AS books_added
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
|
||||||
|
DATE_TRUNC('week', NOW()),
|
||||||
|
'1 week'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('week', created_at) AS dt, COUNT(*) AS books_added
|
||||||
|
FROM books
|
||||||
|
WHERE created_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
|
||||||
|
GROUP BY DATE_TRUNC('week', created_at)
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM') AS month,
|
||||||
|
COALESCE(cnt.books_added, 0) AS books_added
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
|
||||||
|
DATE_TRUNC('month', NOW()),
|
||||||
|
'1 month'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('month', created_at) AS dt, COUNT(*) AS books_added
|
||||||
|
FROM books
|
||||||
|
WHERE created_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
||||||
|
GROUP BY DATE_TRUNC('month', created_at)
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let additions_over_time: Vec<MonthlyAdditions> = additions_rows
|
let additions_over_time: Vec<MonthlyAdditions> = additions_rows
|
||||||
.iter()
|
.iter()
|
||||||
@@ -327,14 +429,273 @@ pub async fn get_stats(
|
|||||||
by_provider,
|
by_provider,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Currently reading books
|
||||||
|
let reading_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT b.id AS book_id, b.title, b.series, brp.current_page, b.page_count
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
JOIN books b ON b.id = brp.book_id
|
||||||
|
WHERE brp.status = 'reading' AND brp.current_page IS NOT NULL
|
||||||
|
ORDER BY brp.updated_at DESC
|
||||||
|
LIMIT 20
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let currently_reading: Vec<CurrentlyReadingItem> = reading_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| {
|
||||||
|
let id: uuid::Uuid = r.get("book_id");
|
||||||
|
CurrentlyReadingItem {
|
||||||
|
book_id: id.to_string(),
|
||||||
|
title: r.get("title"),
|
||||||
|
series: r.get("series"),
|
||||||
|
current_page: r.get::<Option<i32>, _>("current_page").unwrap_or(0),
|
||||||
|
page_count: r.get::<Option<i32>, _>("page_count").unwrap_or(0),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Recently read books
|
||||||
|
let recent_rows = sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT b.id AS book_id, b.title, b.series,
|
||||||
|
TO_CHAR(brp.last_read_at, 'YYYY-MM-DD') AS last_read_at
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
JOIN books b ON b.id = brp.book_id
|
||||||
|
WHERE brp.status = 'read' AND brp.last_read_at IS NOT NULL
|
||||||
|
ORDER BY brp.last_read_at DESC
|
||||||
|
LIMIT 10
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let recently_read: Vec<RecentlyReadItem> = recent_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| {
|
||||||
|
let id: uuid::Uuid = r.get("book_id");
|
||||||
|
RecentlyReadItem {
|
||||||
|
book_id: id.to_string(),
|
||||||
|
title: r.get("title"),
|
||||||
|
series: r.get("series"),
|
||||||
|
last_read_at: r.get::<Option<String>, _>("last_read_at").unwrap_or_default(),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Reading activity over time (with gap filling)
|
||||||
|
let reading_time_rows = match period {
|
||||||
|
"day" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT brp.last_read_at::date AS dt, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= CURRENT_DATE - INTERVAL '6 days'
|
||||||
|
GROUP BY brp.last_read_at::date
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
"week" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS month,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
|
||||||
|
DATE_TRUNC('week', NOW()),
|
||||||
|
'1 week'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('week', brp.last_read_at) AS dt, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
|
||||||
|
GROUP BY DATE_TRUNC('week', brp.last_read_at)
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM') AS month,
|
||||||
|
COALESCE(cnt.books_read, 0) AS books_read
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
|
||||||
|
DATE_TRUNC('month', NOW()),
|
||||||
|
'1 month'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT DATE_TRUNC('month', brp.last_read_at) AS dt, COUNT(*) AS books_read
|
||||||
|
FROM book_reading_progress brp
|
||||||
|
WHERE brp.status = 'read'
|
||||||
|
AND brp.last_read_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
||||||
|
GROUP BY DATE_TRUNC('month', brp.last_read_at)
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
ORDER BY month ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let reading_over_time: Vec<MonthlyReading> = reading_time_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| MonthlyReading {
|
||||||
|
month: r.get::<Option<String>, _>("month").unwrap_or_default(),
|
||||||
|
books_read: r.get("books_read"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
// Jobs over time (with gap filling, grouped by type category)
|
||||||
|
let jobs_rows = match period {
|
||||||
|
"day" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS label,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'scan'), 0)::BIGINT AS scan,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'rebuild'), 0)::BIGINT AS rebuild,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'thumbnail'), 0)::BIGINT AS thumbnail,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'other'), 0)::BIGINT AS other
|
||||||
|
FROM generate_series(CURRENT_DATE - INTERVAL '6 days', CURRENT_DATE, '1 day') AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT
|
||||||
|
finished_at::date AS dt,
|
||||||
|
CASE
|
||||||
|
WHEN type = 'scan' THEN 'scan'
|
||||||
|
WHEN type IN ('rebuild', 'full_rebuild', 'rescan') THEN 'rebuild'
|
||||||
|
WHEN type IN ('thumbnail_rebuild', 'thumbnail_regenerate') THEN 'thumbnail'
|
||||||
|
ELSE 'other'
|
||||||
|
END AS cat,
|
||||||
|
COUNT(*) AS c
|
||||||
|
FROM index_jobs
|
||||||
|
WHERE status IN ('success', 'failed')
|
||||||
|
AND finished_at >= CURRENT_DATE - INTERVAL '6 days'
|
||||||
|
GROUP BY finished_at::date, cat
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
GROUP BY d.dt
|
||||||
|
ORDER BY label ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
"week" => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM-DD') AS label,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'scan'), 0)::BIGINT AS scan,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'rebuild'), 0)::BIGINT AS rebuild,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'thumbnail'), 0)::BIGINT AS thumbnail,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'other'), 0)::BIGINT AS other
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('week', NOW() - INTERVAL '2 months'),
|
||||||
|
DATE_TRUNC('week', NOW()),
|
||||||
|
'1 week'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT
|
||||||
|
DATE_TRUNC('week', finished_at) AS dt,
|
||||||
|
CASE
|
||||||
|
WHEN type = 'scan' THEN 'scan'
|
||||||
|
WHEN type IN ('rebuild', 'full_rebuild', 'rescan') THEN 'rebuild'
|
||||||
|
WHEN type IN ('thumbnail_rebuild', 'thumbnail_regenerate') THEN 'thumbnail'
|
||||||
|
ELSE 'other'
|
||||||
|
END AS cat,
|
||||||
|
COUNT(*) AS c
|
||||||
|
FROM index_jobs
|
||||||
|
WHERE status IN ('success', 'failed')
|
||||||
|
AND finished_at >= DATE_TRUNC('week', NOW() - INTERVAL '2 months')
|
||||||
|
GROUP BY DATE_TRUNC('week', finished_at), cat
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
GROUP BY d.dt
|
||||||
|
ORDER BY label ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
sqlx::query(
|
||||||
|
r#"
|
||||||
|
SELECT
|
||||||
|
TO_CHAR(d.dt, 'YYYY-MM') AS label,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'scan'), 0)::BIGINT AS scan,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'rebuild'), 0)::BIGINT AS rebuild,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'thumbnail'), 0)::BIGINT AS thumbnail,
|
||||||
|
COALESCE(SUM(cnt.c) FILTER (WHERE cnt.cat = 'other'), 0)::BIGINT AS other
|
||||||
|
FROM generate_series(
|
||||||
|
DATE_TRUNC('month', NOW()) - INTERVAL '11 months',
|
||||||
|
DATE_TRUNC('month', NOW()),
|
||||||
|
'1 month'
|
||||||
|
) AS d(dt)
|
||||||
|
LEFT JOIN (
|
||||||
|
SELECT
|
||||||
|
DATE_TRUNC('month', finished_at) AS dt,
|
||||||
|
CASE
|
||||||
|
WHEN type = 'scan' THEN 'scan'
|
||||||
|
WHEN type IN ('rebuild', 'full_rebuild', 'rescan') THEN 'rebuild'
|
||||||
|
WHEN type IN ('thumbnail_rebuild', 'thumbnail_regenerate') THEN 'thumbnail'
|
||||||
|
ELSE 'other'
|
||||||
|
END AS cat,
|
||||||
|
COUNT(*) AS c
|
||||||
|
FROM index_jobs
|
||||||
|
WHERE status IN ('success', 'failed')
|
||||||
|
AND finished_at >= DATE_TRUNC('month', NOW()) - INTERVAL '11 months'
|
||||||
|
GROUP BY DATE_TRUNC('month', finished_at), cat
|
||||||
|
) cnt ON cnt.dt = d.dt
|
||||||
|
GROUP BY d.dt
|
||||||
|
ORDER BY label ASC
|
||||||
|
"#,
|
||||||
|
)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let jobs_over_time: Vec<JobTimePoint> = jobs_rows
|
||||||
|
.iter()
|
||||||
|
.map(|r| JobTimePoint {
|
||||||
|
label: r.get("label"),
|
||||||
|
scan: r.get("scan"),
|
||||||
|
rebuild: r.get("rebuild"),
|
||||||
|
thumbnail: r.get("thumbnail"),
|
||||||
|
other: r.get("other"),
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
Ok(Json(StatsResponse {
|
Ok(Json(StatsResponse {
|
||||||
overview,
|
overview,
|
||||||
reading_status,
|
reading_status,
|
||||||
|
currently_reading,
|
||||||
|
recently_read,
|
||||||
|
reading_over_time,
|
||||||
by_format,
|
by_format,
|
||||||
by_language,
|
by_language,
|
||||||
by_library,
|
by_library,
|
||||||
top_series,
|
top_series,
|
||||||
additions_over_time,
|
additions_over_time,
|
||||||
|
jobs_over_time,
|
||||||
metadata,
|
metadata,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|||||||
46
apps/api/src/telegram.rs
Normal file
46
apps/api/src/telegram.rs
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
use axum::{extract::State, Json};
|
||||||
|
use serde::Serialize;
|
||||||
|
use utoipa::ToSchema;
|
||||||
|
|
||||||
|
use crate::{error::ApiError, state::AppState};
|
||||||
|
|
||||||
|
#[derive(Serialize, ToSchema)]
|
||||||
|
pub struct TelegramTestResponse {
|
||||||
|
pub success: bool,
|
||||||
|
pub message: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Test Telegram connection by sending a test message
|
||||||
|
#[utoipa::path(
|
||||||
|
get,
|
||||||
|
path = "/telegram/test",
|
||||||
|
tag = "notifications",
|
||||||
|
responses(
|
||||||
|
(status = 200, body = TelegramTestResponse),
|
||||||
|
(status = 400, description = "Telegram not configured"),
|
||||||
|
(status = 401, description = "Unauthorized"),
|
||||||
|
),
|
||||||
|
security(("Bearer" = []))
|
||||||
|
)]
|
||||||
|
pub async fn test_telegram(
|
||||||
|
State(state): State<AppState>,
|
||||||
|
) -> Result<Json<TelegramTestResponse>, ApiError> {
|
||||||
|
let config = notifications::load_telegram_config(&state.pool)
|
||||||
|
.await
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ApiError::bad_request(
|
||||||
|
"Telegram is not configured or disabled. Set bot_token, chat_id, and enable it.",
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
match notifications::send_test_message(&config).await {
|
||||||
|
Ok(()) => Ok(Json(TelegramTestResponse {
|
||||||
|
success: true,
|
||||||
|
message: "Test message sent successfully".to_string(),
|
||||||
|
})),
|
||||||
|
Err(e) => Ok(Json(TelegramTestResponse {
|
||||||
|
success: false,
|
||||||
|
message: format!("Failed to send: {e}"),
|
||||||
|
})),
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -28,12 +28,9 @@ export async function GET(
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Récupérer le content-type et les données
|
|
||||||
const contentType = response.headers.get("content-type") || "image/webp";
|
const contentType = response.headers.get("content-type") || "image/webp";
|
||||||
const imageBuffer = await response.arrayBuffer();
|
|
||||||
|
|
||||||
// Retourner l'image avec le bon content-type
|
return new NextResponse(response.body, {
|
||||||
return new NextResponse(imageBuffer, {
|
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": contentType,
|
"Content-Type": contentType,
|
||||||
"Cache-Control": "public, max-age=300",
|
"Cache-Control": "public, max-age=300",
|
||||||
|
|||||||
@@ -9,10 +9,25 @@ export async function GET(
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
const { baseUrl, token } = config();
|
const { baseUrl, token } = config();
|
||||||
|
const ifNoneMatch = request.headers.get("if-none-match");
|
||||||
|
|
||||||
|
const fetchHeaders: Record<string, string> = {
|
||||||
|
Authorization: `Bearer ${token}`,
|
||||||
|
};
|
||||||
|
if (ifNoneMatch) {
|
||||||
|
fetchHeaders["If-None-Match"] = ifNoneMatch;
|
||||||
|
}
|
||||||
|
|
||||||
const response = await fetch(`${baseUrl}/books/${bookId}/thumbnail`, {
|
const response = await fetch(`${baseUrl}/books/${bookId}/thumbnail`, {
|
||||||
headers: { Authorization: `Bearer ${token}` },
|
headers: fetchHeaders,
|
||||||
|
next: { revalidate: 86400 },
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Forward 304 Not Modified as-is
|
||||||
|
if (response.status === 304) {
|
||||||
|
return new NextResponse(null, { status: 304 });
|
||||||
|
}
|
||||||
|
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
return new NextResponse(`Failed to fetch thumbnail: ${response.status}`, {
|
return new NextResponse(`Failed to fetch thumbnail: ${response.status}`, {
|
||||||
status: response.status
|
status: response.status
|
||||||
@@ -20,14 +35,17 @@ export async function GET(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const contentType = response.headers.get("content-type") || "image/webp";
|
const contentType = response.headers.get("content-type") || "image/webp";
|
||||||
const imageBuffer = await response.arrayBuffer();
|
const etag = response.headers.get("etag");
|
||||||
|
|
||||||
return new NextResponse(imageBuffer, {
|
const headers: Record<string, string> = {
|
||||||
headers: {
|
"Content-Type": contentType,
|
||||||
"Content-Type": contentType,
|
"Cache-Control": "public, max-age=31536000, immutable",
|
||||||
"Cache-Control": "public, max-age=31536000, immutable",
|
};
|
||||||
},
|
if (etag) {
|
||||||
});
|
headers["ETag"] = etag;
|
||||||
|
}
|
||||||
|
|
||||||
|
return new NextResponse(response.body, { headers });
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error fetching thumbnail:", error);
|
console.error("Error fetching thumbnail:", error);
|
||||||
return new NextResponse("Failed to fetch thumbnail", { status: 500 });
|
return new NextResponse("Failed to fetch thumbnail", { status: 500 });
|
||||||
|
|||||||
12
apps/backoffice/app/api/telegram/test/route.ts
Normal file
12
apps/backoffice/app/api/telegram/test/route.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { NextResponse } from "next/server";
|
||||||
|
import { apiFetch } from "@/lib/api";
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
try {
|
||||||
|
const data = await apiFetch("/telegram/test");
|
||||||
|
return NextResponse.json(data);
|
||||||
|
} catch (error) {
|
||||||
|
const message = error instanceof Error ? error.message : "Failed to test Telegram connection";
|
||||||
|
return NextResponse.json({ error: message }, { status: 500 });
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -21,26 +21,19 @@ export default async function AuthorDetailPage({
|
|||||||
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
const page = typeof searchParamsAwaited.page === "string" ? parseInt(searchParamsAwaited.page) : 1;
|
||||||
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
const limit = typeof searchParamsAwaited.limit === "string" ? parseInt(searchParamsAwaited.limit) : 20;
|
||||||
|
|
||||||
// Fetch books by this author (server-side filtering via API) and series
|
// Fetch books by this author (server-side filtering via API) and series by this author
|
||||||
const [booksPage, seriesPage] = await Promise.all([
|
const [booksPage, seriesPage] = await Promise.all([
|
||||||
fetchBooks(undefined, undefined, page, limit, undefined, undefined, authorName).catch(
|
fetchBooks(undefined, undefined, page, limit, undefined, undefined, authorName).catch(
|
||||||
() => ({ items: [], total: 0, page: 1, limit }) as BooksPageDto
|
() => ({ items: [], total: 0, page: 1, limit }) as BooksPageDto
|
||||||
),
|
),
|
||||||
fetchAllSeries(undefined, undefined, undefined, 1, 200).catch(
|
fetchAllSeries(undefined, undefined, undefined, 1, 200, undefined, undefined, undefined, undefined, authorName).catch(
|
||||||
() => ({ items: [], total: 0, page: 1, limit: 200 }) as SeriesPageDto
|
() => ({ items: [], total: 0, page: 1, limit: 200 }) as SeriesPageDto
|
||||||
),
|
),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const totalPages = Math.ceil(booksPage.total / limit);
|
const totalPages = Math.ceil(booksPage.total / limit);
|
||||||
|
|
||||||
// Extract unique series names from this author's books
|
const authorSeries = seriesPage.items;
|
||||||
const authorSeriesNames = new Set(
|
|
||||||
booksPage.items
|
|
||||||
.map((b) => b.series)
|
|
||||||
.filter((s): s is string => s != null && s !== "")
|
|
||||||
);
|
|
||||||
|
|
||||||
const authorSeries = seriesPage.items.filter((s) => authorSeriesNames.has(s.name));
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
|
|||||||
231
apps/backoffice/app/components/DashboardCharts.tsx
Normal file
231
apps/backoffice/app/components/DashboardCharts.tsx
Normal file
@@ -0,0 +1,231 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import {
|
||||||
|
PieChart, Pie, Cell, ResponsiveContainer, Tooltip,
|
||||||
|
BarChart, Bar, XAxis, YAxis, CartesianGrid,
|
||||||
|
AreaChart, Area, Line, LineChart,
|
||||||
|
Legend,
|
||||||
|
} from "recharts";
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Donut
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
export function RcDonutChart({
|
||||||
|
data,
|
||||||
|
noDataLabel,
|
||||||
|
}: {
|
||||||
|
data: { name: string; value: number; color: string }[];
|
||||||
|
noDataLabel?: string;
|
||||||
|
}) {
|
||||||
|
const total = data.reduce((s, d) => s + d.value, 0);
|
||||||
|
if (total === 0) return <p className="text-muted-foreground text-sm text-center py-8">{noDataLabel}</p>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex items-center gap-4">
|
||||||
|
<ResponsiveContainer width={130} height={130}>
|
||||||
|
<PieChart>
|
||||||
|
<Pie
|
||||||
|
data={data}
|
||||||
|
cx="50%"
|
||||||
|
cy="50%"
|
||||||
|
innerRadius={32}
|
||||||
|
outerRadius={55}
|
||||||
|
dataKey="value"
|
||||||
|
strokeWidth={0}
|
||||||
|
>
|
||||||
|
{data.map((d, i) => (
|
||||||
|
<Cell key={i} fill={d.color} />
|
||||||
|
))}
|
||||||
|
</Pie>
|
||||||
|
<Tooltip
|
||||||
|
formatter={(value) => value}
|
||||||
|
contentStyle={{ backgroundColor: "var(--color-card)", border: "1px solid var(--color-border)", borderRadius: 8, fontSize: 12 }}
|
||||||
|
/>
|
||||||
|
</PieChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
<div className="flex flex-col gap-1.5 min-w-0">
|
||||||
|
{data.map((d, i) => (
|
||||||
|
<div key={i} className="flex items-center gap-2 text-sm">
|
||||||
|
<span className="w-3 h-3 rounded-full shrink-0" style={{ backgroundColor: d.color }} />
|
||||||
|
<span className="text-muted-foreground truncate">{d.name}</span>
|
||||||
|
<span className="font-medium text-foreground ml-auto">{d.value}</span>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Bar chart
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
export function RcBarChart({
|
||||||
|
data,
|
||||||
|
color = "hsl(198 78% 37%)",
|
||||||
|
noDataLabel,
|
||||||
|
}: {
|
||||||
|
data: { label: string; value: number }[];
|
||||||
|
color?: string;
|
||||||
|
noDataLabel?: string;
|
||||||
|
}) {
|
||||||
|
if (data.length === 0) return <p className="text-muted-foreground text-sm text-center py-8">{noDataLabel}</p>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ResponsiveContainer width="100%" height={180}>
|
||||||
|
<BarChart data={data} margin={{ top: 5, right: 5, bottom: 0, left: -20 }}>
|
||||||
|
<CartesianGrid strokeDasharray="3 3" vertical={false} stroke="var(--color-border)" opacity={0.3} />
|
||||||
|
<XAxis dataKey="label" tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} />
|
||||||
|
<YAxis tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} allowDecimals={false} />
|
||||||
|
<Tooltip
|
||||||
|
contentStyle={{ backgroundColor: "var(--color-card)", border: "1px solid var(--color-border)", borderRadius: 8, fontSize: 12 }}
|
||||||
|
/>
|
||||||
|
<Bar dataKey="value" fill={color} radius={[4, 4, 0, 0]} />
|
||||||
|
</BarChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Area / Line chart
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
export function RcAreaChart({
|
||||||
|
data,
|
||||||
|
color = "hsl(142 60% 45%)",
|
||||||
|
noDataLabel,
|
||||||
|
}: {
|
||||||
|
data: { label: string; value: number }[];
|
||||||
|
color?: string;
|
||||||
|
noDataLabel?: string;
|
||||||
|
}) {
|
||||||
|
if (data.length === 0) return <p className="text-muted-foreground text-sm text-center py-8">{noDataLabel}</p>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ResponsiveContainer width="100%" height={180}>
|
||||||
|
<AreaChart data={data} margin={{ top: 5, right: 5, bottom: 0, left: -20 }}>
|
||||||
|
<defs>
|
||||||
|
<linearGradient id="areaGradient" x1="0" y1="0" x2="0" y2="1">
|
||||||
|
<stop offset="0%" stopColor={color} stopOpacity={0.3} />
|
||||||
|
<stop offset="100%" stopColor={color} stopOpacity={0} />
|
||||||
|
</linearGradient>
|
||||||
|
</defs>
|
||||||
|
<CartesianGrid strokeDasharray="3 3" vertical={false} stroke="var(--color-border)" opacity={0.3} />
|
||||||
|
<XAxis dataKey="label" tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} />
|
||||||
|
<YAxis tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} allowDecimals={false} />
|
||||||
|
<Tooltip
|
||||||
|
contentStyle={{ backgroundColor: "var(--color-card)", border: "1px solid var(--color-border)", borderRadius: 8, fontSize: 12 }}
|
||||||
|
/>
|
||||||
|
<Area type="monotone" dataKey="value" stroke={color} strokeWidth={2} fill="url(#areaGradient)" dot={{ r: 3, fill: color }} />
|
||||||
|
</AreaChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Horizontal stacked bar (libraries breakdown)
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
export function RcStackedBar({
|
||||||
|
data,
|
||||||
|
labels,
|
||||||
|
}: {
|
||||||
|
data: { name: string; read: number; reading: number; unread: number; sizeLabel: string }[];
|
||||||
|
labels: { read: string; reading: string; unread: string; books: string };
|
||||||
|
}) {
|
||||||
|
if (data.length === 0) return null;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ResponsiveContainer width="100%" height={data.length * 60 + 30}>
|
||||||
|
<BarChart data={data} layout="vertical" margin={{ top: 0, right: 5, bottom: 0, left: 5 }}>
|
||||||
|
<CartesianGrid strokeDasharray="3 3" horizontal={false} stroke="var(--color-border)" opacity={0.3} />
|
||||||
|
<XAxis type="number" tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} allowDecimals={false} />
|
||||||
|
<YAxis type="category" dataKey="name" tick={{ fontSize: 12, fill: "var(--color-foreground)" }} axisLine={false} tickLine={false} width={120} />
|
||||||
|
<Tooltip
|
||||||
|
contentStyle={{ backgroundColor: "var(--color-card)", border: "1px solid var(--color-border)", borderRadius: 8, fontSize: 12 }}
|
||||||
|
/>
|
||||||
|
<Legend
|
||||||
|
wrapperStyle={{ fontSize: 11 }}
|
||||||
|
formatter={(value: string) => <span className="text-muted-foreground">{value}</span>}
|
||||||
|
/>
|
||||||
|
<Bar dataKey="read" stackId="a" fill="hsl(142 60% 45%)" name={labels.read} radius={[0, 0, 0, 0]} />
|
||||||
|
<Bar dataKey="reading" stackId="a" fill="hsl(45 93% 47%)" name={labels.reading} />
|
||||||
|
<Bar dataKey="unread" stackId="a" fill="hsl(220 13% 70%)" name={labels.unread} radius={[0, 4, 4, 0]} />
|
||||||
|
</BarChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Horizontal bar chart (top series)
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
export function RcHorizontalBar({
|
||||||
|
data,
|
||||||
|
color = "hsl(142 60% 45%)",
|
||||||
|
noDataLabel,
|
||||||
|
}: {
|
||||||
|
data: { name: string; value: number; subLabel: string }[];
|
||||||
|
color?: string;
|
||||||
|
noDataLabel?: string;
|
||||||
|
}) {
|
||||||
|
if (data.length === 0) return <p className="text-muted-foreground text-sm text-center py-4">{noDataLabel}</p>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ResponsiveContainer width="100%" height={data.length * 40 + 10}>
|
||||||
|
<BarChart data={data} layout="vertical" margin={{ top: 0, right: 5, bottom: 0, left: 5 }}>
|
||||||
|
<CartesianGrid strokeDasharray="3 3" horizontal={false} stroke="var(--color-border)" opacity={0.3} />
|
||||||
|
<XAxis type="number" tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} allowDecimals={false} />
|
||||||
|
<YAxis type="category" dataKey="name" tick={{ fontSize: 11, fill: "var(--color-foreground)" }} axisLine={false} tickLine={false} width={120} />
|
||||||
|
<Tooltip
|
||||||
|
contentStyle={{ backgroundColor: "var(--color-card)", border: "1px solid var(--color-border)", borderRadius: 8, fontSize: 12 }}
|
||||||
|
/>
|
||||||
|
<Bar dataKey="value" fill={color} radius={[0, 4, 4, 0]} />
|
||||||
|
</BarChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Multi-line chart (jobs over time)
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
export function RcMultiLineChart({
|
||||||
|
data,
|
||||||
|
lines,
|
||||||
|
noDataLabel,
|
||||||
|
}: {
|
||||||
|
data: Record<string, unknown>[];
|
||||||
|
lines: { key: string; label: string; color: string }[];
|
||||||
|
noDataLabel?: string;
|
||||||
|
}) {
|
||||||
|
const hasData = data.some((d) => lines.some((l) => (d[l.key] as number) > 0));
|
||||||
|
if (data.length === 0 || !hasData)
|
||||||
|
return <p className="text-muted-foreground text-sm text-center py-8">{noDataLabel}</p>;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<ResponsiveContainer width="100%" height={180}>
|
||||||
|
<LineChart data={data} margin={{ top: 5, right: 5, bottom: 0, left: -20 }}>
|
||||||
|
<CartesianGrid strokeDasharray="3 3" vertical={false} stroke="var(--color-border)" opacity={0.3} />
|
||||||
|
<XAxis dataKey="label" tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} />
|
||||||
|
<YAxis tick={{ fontSize: 11, fill: "var(--color-muted-foreground)" }} axisLine={false} tickLine={false} allowDecimals={false} />
|
||||||
|
<Tooltip
|
||||||
|
contentStyle={{ backgroundColor: "var(--color-card)", border: "1px solid var(--color-border)", borderRadius: 8, fontSize: 12 }}
|
||||||
|
/>
|
||||||
|
<Legend wrapperStyle={{ fontSize: 11 }} />
|
||||||
|
{lines.map((l) => (
|
||||||
|
<Line
|
||||||
|
key={l.key}
|
||||||
|
type="monotone"
|
||||||
|
dataKey={l.key}
|
||||||
|
name={l.label}
|
||||||
|
stroke={l.color}
|
||||||
|
strokeWidth={2}
|
||||||
|
dot={{ r: 3, fill: l.color }}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</LineChart>
|
||||||
|
</ResponsiveContainer>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -40,34 +40,21 @@ function formatDuration(start: string, end: string | null): string {
|
|||||||
return `${Math.floor(diff / 3600000)}h ${Math.floor((diff % 3600000) / 60000)}m`;
|
return `${Math.floor(diff / 3600000)}h ${Math.floor((diff % 3600000) / 60000)}m`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getDateParts(dateStr: string): { mins: number; hours: number; useDate: boolean; date: Date } {
|
|
||||||
const date = new Date(dateStr);
|
|
||||||
const now = new Date();
|
|
||||||
const diff = now.getTime() - date.getTime();
|
|
||||||
|
|
||||||
if (diff < 3600000) {
|
|
||||||
const mins = Math.floor(diff / 60000);
|
|
||||||
return { mins, hours: 0, useDate: false, date };
|
|
||||||
}
|
|
||||||
if (diff < 86400000) {
|
|
||||||
const hours = Math.floor(diff / 3600000);
|
|
||||||
return { mins: 0, hours, useDate: false, date };
|
|
||||||
}
|
|
||||||
return { mins: 0, hours: 0, useDate: true, date };
|
|
||||||
}
|
|
||||||
|
|
||||||
export function JobsList({ initialJobs, libraries, highlightJobId }: JobsListProps) {
|
export function JobsList({ initialJobs, libraries, highlightJobId }: JobsListProps) {
|
||||||
const { t, locale } = useTranslation();
|
const { t, locale } = useTranslation();
|
||||||
const [jobs, setJobs] = useState(initialJobs);
|
const [jobs, setJobs] = useState(initialJobs);
|
||||||
|
|
||||||
const formatDate = (dateStr: string): string => {
|
const formatDate = (dateStr: string): string => {
|
||||||
const parts = getDateParts(dateStr);
|
const date = new Date(dateStr);
|
||||||
if (parts.useDate) {
|
if (isNaN(date.getTime())) return dateStr;
|
||||||
return parts.date.toLocaleDateString(locale);
|
const loc = locale === "fr" ? "fr-FR" : "en-US";
|
||||||
}
|
return date.toLocaleString(loc, {
|
||||||
if (parts.mins < 1) return t("time.justNow");
|
day: "2-digit",
|
||||||
if (parts.hours > 0) return t("time.hoursAgo", { count: parts.hours });
|
month: "2-digit",
|
||||||
return t("time.minutesAgo", { count: parts.mins });
|
year: "numeric",
|
||||||
|
hour: "2-digit",
|
||||||
|
minute: "2-digit",
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
// Refresh jobs list via SSE
|
// Refresh jobs list via SSE
|
||||||
|
|||||||
47
apps/backoffice/app/components/PeriodToggle.tsx
Normal file
47
apps/backoffice/app/components/PeriodToggle.tsx
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import { useRouter, useSearchParams } from "next/navigation";
|
||||||
|
|
||||||
|
type Period = "day" | "week" | "month";
|
||||||
|
|
||||||
|
export function PeriodToggle({
|
||||||
|
labels,
|
||||||
|
}: {
|
||||||
|
labels: { day: string; week: string; month: string };
|
||||||
|
}) {
|
||||||
|
const router = useRouter();
|
||||||
|
const searchParams = useSearchParams();
|
||||||
|
const raw = searchParams.get("period");
|
||||||
|
const current: Period = raw === "day" ? "day" : raw === "week" ? "week" : "month";
|
||||||
|
|
||||||
|
function setPeriod(period: Period) {
|
||||||
|
const params = new URLSearchParams(searchParams.toString());
|
||||||
|
if (period === "month") {
|
||||||
|
params.delete("period");
|
||||||
|
} else {
|
||||||
|
params.set("period", period);
|
||||||
|
}
|
||||||
|
const qs = params.toString();
|
||||||
|
router.push(qs ? `?${qs}` : "/", { scroll: false });
|
||||||
|
}
|
||||||
|
|
||||||
|
const options: Period[] = ["day", "week", "month"];
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex gap-1 bg-muted rounded-lg p-0.5">
|
||||||
|
{options.map((p) => (
|
||||||
|
<button
|
||||||
|
key={p}
|
||||||
|
onClick={() => setPeriod(p)}
|
||||||
|
className={`px-2.5 py-1 text-xs font-medium rounded-md transition-colors ${
|
||||||
|
current === p
|
||||||
|
? "bg-card text-foreground shadow-sm"
|
||||||
|
: "text-muted-foreground hover:text-foreground"
|
||||||
|
}`}
|
||||||
|
>
|
||||||
|
{labels[p]}
|
||||||
|
</button>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -34,7 +34,8 @@ type IconName =
|
|||||||
| "warning"
|
| "warning"
|
||||||
| "tag"
|
| "tag"
|
||||||
| "document"
|
| "document"
|
||||||
| "authors";
|
| "authors"
|
||||||
|
| "bell";
|
||||||
|
|
||||||
type IconSize = "sm" | "md" | "lg" | "xl";
|
type IconSize = "sm" | "md" | "lg" | "xl";
|
||||||
|
|
||||||
@@ -88,6 +89,7 @@ const icons: Record<IconName, string> = {
|
|||||||
tag: "M7 7h.01M7 3h5a1.99 1.99 0 011.414.586l7 7a2 2 0 010 2.828l-7 7a2 2 0 01-2.828 0l-7-7A1.994 1.994 0 013 12V7a4 4 0 014-4z",
|
tag: "M7 7h.01M7 3h5a1.99 1.99 0 011.414.586l7 7a2 2 0 010 2.828l-7 7a2 2 0 01-2.828 0l-7-7A1.994 1.994 0 013 12V7a4 4 0 014-4z",
|
||||||
document: "M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z",
|
document: "M9 12h6m-6 4h6m2 5H7a2 2 0 01-2-2V5a2 2 0 012-2h5.586a1 1 0 01.707.293l5.414 5.414a1 1 0 01.293.707V19a2 2 0 01-2 2z",
|
||||||
authors: "M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z",
|
authors: "M17 20h5v-2a3 3 0 00-5.356-1.857M17 20H7m10 0v-2c0-.656-.126-1.283-.356-1.857M7 20H2v-2a3 3 0 015.356-1.857M7 20v-2c0-.656.126-1.283.356-1.857m0 0a5.002 5.002 0 019.288 0M15 7a3 3 0 11-6 0 3 3 0 016 0zm6 3a2 2 0 11-4 0 2 2 0 014 0zM7 10a2 2 0 11-4 0 2 2 0 014 0z",
|
||||||
|
bell: "M15 17h5l-1.405-1.405A2.032 2.032 0 0118 14.158V11a6.002 6.002 0 00-4-5.659V5a2 2 0 10-4 0v.341C7.67 6.165 6 8.388 6 11v3.159c0 .538-.214 1.055-.595 1.436L4 17h5m6 0v1a3 3 0 11-6 0v-1m6 0H9",
|
||||||
};
|
};
|
||||||
|
|
||||||
const colorClasses: Partial<Record<IconName, string>> = {
|
const colorClasses: Partial<Record<IconName, string>> = {
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
import React from "react";
|
import React from "react";
|
||||||
import { fetchStats, StatsResponse } from "../lib/api";
|
import { fetchStats, StatsResponse, getBookCoverUrl } from "../lib/api";
|
||||||
import { Card, CardContent, CardHeader, CardTitle } from "./components/ui";
|
import { Card, CardContent, CardHeader, CardTitle } from "./components/ui";
|
||||||
|
import { RcDonutChart, RcBarChart, RcAreaChart, RcStackedBar, RcHorizontalBar, RcMultiLineChart } from "./components/DashboardCharts";
|
||||||
|
import { PeriodToggle } from "./components/PeriodToggle";
|
||||||
|
import Image from "next/image";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import { getServerTranslations } from "../lib/i18n/server";
|
import { getServerTranslations } from "../lib/i18n/server";
|
||||||
import type { TranslateFunction } from "../lib/i18n/dictionaries";
|
import type { TranslateFunction } from "../lib/i18n/dictionaries";
|
||||||
@@ -19,84 +22,25 @@ function formatNumber(n: number, locale: string): string {
|
|||||||
return n.toLocaleString(locale === "fr" ? "fr-FR" : "en-US");
|
return n.toLocaleString(locale === "fr" ? "fr-FR" : "en-US");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Donut chart via SVG
|
function formatChartLabel(raw: string, period: "day" | "week" | "month", locale: string): string {
|
||||||
function DonutChart({ data, colors, noDataLabel, locale = "fr" }: { data: { label: string; value: number; color: string }[]; colors?: string[]; noDataLabel?: string; locale?: string }) {
|
const loc = locale === "fr" ? "fr-FR" : "en-US";
|
||||||
const total = data.reduce((sum, d) => sum + d.value, 0);
|
if (period === "month") {
|
||||||
if (total === 0) return <p className="text-muted-foreground text-sm text-center py-8">{noDataLabel}</p>;
|
// raw = "YYYY-MM"
|
||||||
|
const [y, m] = raw.split("-");
|
||||||
const radius = 40;
|
const d = new Date(Number(y), Number(m) - 1, 1);
|
||||||
const circumference = 2 * Math.PI * radius;
|
return d.toLocaleDateString(loc, { month: "short" });
|
||||||
let offset = 0;
|
}
|
||||||
|
if (period === "week") {
|
||||||
return (
|
// raw = "YYYY-MM-DD" (Monday of the week)
|
||||||
<div className="flex items-center gap-6">
|
const d = new Date(raw + "T00:00:00");
|
||||||
<svg viewBox="0 0 100 100" className="w-32 h-32 shrink-0">
|
return d.toLocaleDateString(loc, { day: "numeric", month: "short" });
|
||||||
{data.map((d, i) => {
|
}
|
||||||
const pct = d.value / total;
|
// day: raw = "YYYY-MM-DD"
|
||||||
const dashLength = pct * circumference;
|
const d = new Date(raw + "T00:00:00");
|
||||||
const currentOffset = offset;
|
return d.toLocaleDateString(loc, { weekday: "short", day: "numeric" });
|
||||||
offset += dashLength;
|
|
||||||
return (
|
|
||||||
<circle
|
|
||||||
key={i}
|
|
||||||
cx="50"
|
|
||||||
cy="50"
|
|
||||||
r={radius}
|
|
||||||
fill="none"
|
|
||||||
stroke={d.color}
|
|
||||||
strokeWidth="16"
|
|
||||||
strokeDasharray={`${dashLength} ${circumference - dashLength}`}
|
|
||||||
strokeDashoffset={-currentOffset}
|
|
||||||
transform="rotate(-90 50 50)"
|
|
||||||
className="transition-all duration-500"
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
<text x="50" y="50" textAnchor="middle" dominantBaseline="central" className="fill-foreground text-[10px] font-bold">
|
|
||||||
{formatNumber(total, locale)}
|
|
||||||
</text>
|
|
||||||
</svg>
|
|
||||||
<div className="flex flex-col gap-1.5 min-w-0">
|
|
||||||
{data.map((d, i) => (
|
|
||||||
<div key={i} className="flex items-center gap-2 text-sm">
|
|
||||||
<span className="w-3 h-3 rounded-full shrink-0" style={{ backgroundColor: d.color }} />
|
|
||||||
<span className="text-muted-foreground truncate">{d.label}</span>
|
|
||||||
<span className="font-medium text-foreground ml-auto">{d.value}</span>
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Bar chart via pure CSS
|
// Horizontal progress bar for metadata quality (stays server-rendered, no recharts needed)
|
||||||
function BarChart({ data, color = "var(--color-primary)", noDataLabel }: { data: { label: string; value: number }[]; color?: string; noDataLabel?: string }) {
|
|
||||||
const max = Math.max(...data.map((d) => d.value), 1);
|
|
||||||
if (data.length === 0) return <p className="text-muted-foreground text-sm text-center py-8">{noDataLabel}</p>;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="flex items-end gap-1.5 h-40">
|
|
||||||
{data.map((d, i) => (
|
|
||||||
<div key={i} className="flex-1 flex flex-col items-center gap-1 min-w-0">
|
|
||||||
<span className="text-[10px] text-muted-foreground font-medium">{d.value || ""}</span>
|
|
||||||
<div
|
|
||||||
className="w-full rounded-t-sm transition-all duration-500 min-h-[2px]"
|
|
||||||
style={{
|
|
||||||
height: `${(d.value / max) * 100}%`,
|
|
||||||
backgroundColor: color,
|
|
||||||
opacity: d.value === 0 ? 0.2 : 1,
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
<span className="text-[10px] text-muted-foreground truncate w-full text-center">
|
|
||||||
{d.label}
|
|
||||||
</span>
|
|
||||||
</div>
|
|
||||||
))}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Horizontal progress bar for library breakdown
|
|
||||||
function HorizontalBar({ label, value, max, subLabel, color = "var(--color-primary)" }: { label: string; value: number; max: number; subLabel?: string; color?: string }) {
|
function HorizontalBar({ label, value, max, subLabel, color = "var(--color-primary)" }: { label: string; value: number; max: number; subLabel?: string; color?: string }) {
|
||||||
const pct = max > 0 ? (value / max) * 100 : 0;
|
const pct = max > 0 ? (value / max) * 100 : 0;
|
||||||
return (
|
return (
|
||||||
@@ -115,12 +59,19 @@ function HorizontalBar({ label, value, max, subLabel, color = "var(--color-prima
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
export default async function DashboardPage() {
|
export default async function DashboardPage({
|
||||||
|
searchParams,
|
||||||
|
}: {
|
||||||
|
searchParams: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||||
|
}) {
|
||||||
|
const searchParamsAwaited = await searchParams;
|
||||||
|
const rawPeriod = searchParamsAwaited.period;
|
||||||
|
const period = rawPeriod === "day" ? "day" as const : rawPeriod === "week" ? "week" as const : "month" as const;
|
||||||
const { t, locale } = await getServerTranslations();
|
const { t, locale } = await getServerTranslations();
|
||||||
|
|
||||||
let stats: StatsResponse | null = null;
|
let stats: StatsResponse | null = null;
|
||||||
try {
|
try {
|
||||||
stats = await fetchStats();
|
stats = await fetchStats(period);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error("Failed to fetch stats:", e);
|
console.error("Failed to fetch stats:", e);
|
||||||
}
|
}
|
||||||
@@ -137,7 +88,7 @@ export default async function DashboardPage() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const { overview, reading_status, by_format, by_language, by_library, top_series, additions_over_time, metadata } = stats;
|
const { overview, reading_status, currently_reading = [], recently_read = [], reading_over_time = [], by_format, by_library, top_series, additions_over_time, jobs_over_time = [], metadata } = stats;
|
||||||
|
|
||||||
const readingColors = ["hsl(220 13% 70%)", "hsl(45 93% 47%)", "hsl(142 60% 45%)"];
|
const readingColors = ["hsl(220 13% 70%)", "hsl(45 93% 47%)", "hsl(142 60% 45%)"];
|
||||||
const formatColors = [
|
const formatColors = [
|
||||||
@@ -146,7 +97,6 @@ export default async function DashboardPage() {
|
|||||||
"hsl(170 60% 45%)", "hsl(220 60% 50%)",
|
"hsl(170 60% 45%)", "hsl(220 60% 50%)",
|
||||||
];
|
];
|
||||||
|
|
||||||
const maxLibBooks = Math.max(...by_library.map((l) => l.book_count), 1);
|
|
||||||
const noDataLabel = t("common.noData");
|
const noDataLabel = t("common.noData");
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -174,6 +124,97 @@ export default async function DashboardPage() {
|
|||||||
<StatCard icon="size" label={t("dashboard.totalSize")} value={formatBytes(overview.total_size_bytes)} color="warning" />
|
<StatCard icon="size" label={t("dashboard.totalSize")} value={formatBytes(overview.total_size_bytes)} color="warning" />
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
{/* Currently reading + Recently read */}
|
||||||
|
{(currently_reading.length > 0 || recently_read.length > 0) && (
|
||||||
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
|
{/* Currently reading */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.currentlyReading")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
{currently_reading.length === 0 ? (
|
||||||
|
<p className="text-muted-foreground text-sm text-center py-4">{t("dashboard.noCurrentlyReading")}</p>
|
||||||
|
) : (
|
||||||
|
<div className="space-y-3 max-h-[216px] overflow-y-auto pr-1">
|
||||||
|
{currently_reading.slice(0, 8).map((book) => {
|
||||||
|
const pct = book.page_count > 0 ? Math.round((book.current_page / book.page_count) * 100) : 0;
|
||||||
|
return (
|
||||||
|
<Link key={book.book_id} href={`/books/${book.book_id}` as any} className="flex items-center gap-3 group">
|
||||||
|
<Image
|
||||||
|
src={getBookCoverUrl(book.book_id)}
|
||||||
|
alt={book.title}
|
||||||
|
width={40}
|
||||||
|
height={56}
|
||||||
|
className="w-10 h-14 object-cover rounded shadow-sm shrink-0 bg-muted"
|
||||||
|
/>
|
||||||
|
<div className="min-w-0 flex-1">
|
||||||
|
<p className="text-sm font-medium text-foreground truncate group-hover:text-primary transition-colors">{book.title}</p>
|
||||||
|
{book.series && <p className="text-xs text-muted-foreground truncate">{book.series}</p>}
|
||||||
|
<div className="mt-1.5 flex items-center gap-2">
|
||||||
|
<div className="h-1.5 flex-1 bg-muted rounded-full overflow-hidden">
|
||||||
|
<div className="h-full bg-warning rounded-full transition-all" style={{ width: `${pct}%` }} />
|
||||||
|
</div>
|
||||||
|
<span className="text-[10px] text-muted-foreground shrink-0">{pct}%</span>
|
||||||
|
</div>
|
||||||
|
<p className="text-[10px] text-muted-foreground mt-0.5">{t("dashboard.pageProgress", { current: book.current_page, total: book.page_count })}</p>
|
||||||
|
</div>
|
||||||
|
</Link>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
|
{/* Recently read */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="text-base">{t("dashboard.recentlyRead")}</CardTitle>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
{recently_read.length === 0 ? (
|
||||||
|
<p className="text-muted-foreground text-sm text-center py-4">{t("dashboard.noRecentlyRead")}</p>
|
||||||
|
) : (
|
||||||
|
<div className="space-y-3 max-h-[216px] overflow-y-auto pr-1">
|
||||||
|
{recently_read.map((book) => (
|
||||||
|
<Link key={book.book_id} href={`/books/${book.book_id}` as any} className="flex items-center gap-3 group">
|
||||||
|
<Image
|
||||||
|
src={getBookCoverUrl(book.book_id)}
|
||||||
|
alt={book.title}
|
||||||
|
width={40}
|
||||||
|
height={56}
|
||||||
|
className="w-10 h-14 object-cover rounded shadow-sm shrink-0 bg-muted"
|
||||||
|
/>
|
||||||
|
<div className="min-w-0 flex-1">
|
||||||
|
<p className="text-sm font-medium text-foreground truncate group-hover:text-primary transition-colors">{book.title}</p>
|
||||||
|
{book.series && <p className="text-xs text-muted-foreground truncate">{book.series}</p>}
|
||||||
|
</div>
|
||||||
|
<span className="text-xs text-muted-foreground shrink-0">{book.last_read_at}</span>
|
||||||
|
</Link>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
|
||||||
|
{/* Reading activity line chart */}
|
||||||
|
<Card hover={false}>
|
||||||
|
<CardHeader className="flex flex-row items-center justify-between space-y-0">
|
||||||
|
<CardTitle className="text-base">{t("dashboard.readingActivity")}</CardTitle>
|
||||||
|
<PeriodToggle labels={{ day: t("dashboard.periodDay"), week: t("dashboard.periodWeek"), month: t("dashboard.periodMonth") }} />
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<RcAreaChart
|
||||||
|
noDataLabel={noDataLabel}
|
||||||
|
data={reading_over_time.map((m) => ({ label: formatChartLabel(m.month, period, locale), value: m.books_read }))}
|
||||||
|
color="hsl(142 60% 45%)"
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
|
||||||
{/* Charts row */}
|
{/* Charts row */}
|
||||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
|
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-6">
|
||||||
{/* Reading status donut */}
|
{/* Reading status donut */}
|
||||||
@@ -182,13 +223,12 @@ export default async function DashboardPage() {
|
|||||||
<CardTitle className="text-base">{t("dashboard.readingStatus")}</CardTitle>
|
<CardTitle className="text-base">{t("dashboard.readingStatus")}</CardTitle>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<DonutChart
|
<RcDonutChart
|
||||||
locale={locale}
|
|
||||||
noDataLabel={noDataLabel}
|
noDataLabel={noDataLabel}
|
||||||
data={[
|
data={[
|
||||||
{ label: t("status.unread"), value: reading_status.unread, color: readingColors[0] },
|
{ name: t("status.unread"), value: reading_status.unread, color: readingColors[0] },
|
||||||
{ label: t("status.reading"), value: reading_status.reading, color: readingColors[1] },
|
{ name: t("status.reading"), value: reading_status.reading, color: readingColors[1] },
|
||||||
{ label: t("status.read"), value: reading_status.read, color: readingColors[2] },
|
{ name: t("status.read"), value: reading_status.read, color: readingColors[2] },
|
||||||
]}
|
]}
|
||||||
/>
|
/>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
@@ -200,11 +240,10 @@ export default async function DashboardPage() {
|
|||||||
<CardTitle className="text-base">{t("dashboard.byFormat")}</CardTitle>
|
<CardTitle className="text-base">{t("dashboard.byFormat")}</CardTitle>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<DonutChart
|
<RcDonutChart
|
||||||
locale={locale}
|
|
||||||
noDataLabel={noDataLabel}
|
noDataLabel={noDataLabel}
|
||||||
data={by_format.slice(0, 6).map((f, i) => ({
|
data={by_format.slice(0, 6).map((f, i) => ({
|
||||||
label: (f.format || t("dashboard.unknown")).toUpperCase(),
|
name: (f.format || t("dashboard.unknown")).toUpperCase(),
|
||||||
value: f.count,
|
value: f.count,
|
||||||
color: formatColors[i % formatColors.length],
|
color: formatColors[i % formatColors.length],
|
||||||
}))}
|
}))}
|
||||||
@@ -218,11 +257,10 @@ export default async function DashboardPage() {
|
|||||||
<CardTitle className="text-base">{t("dashboard.byLibrary")}</CardTitle>
|
<CardTitle className="text-base">{t("dashboard.byLibrary")}</CardTitle>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<DonutChart
|
<RcDonutChart
|
||||||
locale={locale}
|
|
||||||
noDataLabel={noDataLabel}
|
noDataLabel={noDataLabel}
|
||||||
data={by_library.slice(0, 6).map((l, i) => ({
|
data={by_library.slice(0, 6).map((l, i) => ({
|
||||||
label: l.library_name,
|
name: l.library_name,
|
||||||
value: l.book_count,
|
value: l.book_count,
|
||||||
color: formatColors[i % formatColors.length],
|
color: formatColors[i % formatColors.length],
|
||||||
}))}
|
}))}
|
||||||
@@ -239,12 +277,11 @@ export default async function DashboardPage() {
|
|||||||
<CardTitle className="text-base">{t("dashboard.metadataCoverage")}</CardTitle>
|
<CardTitle className="text-base">{t("dashboard.metadataCoverage")}</CardTitle>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<DonutChart
|
<RcDonutChart
|
||||||
locale={locale}
|
|
||||||
noDataLabel={noDataLabel}
|
noDataLabel={noDataLabel}
|
||||||
data={[
|
data={[
|
||||||
{ label: t("dashboard.seriesLinked"), value: metadata.series_linked, color: "hsl(142 60% 45%)" },
|
{ name: t("dashboard.seriesLinked"), value: metadata.series_linked, color: "hsl(142 60% 45%)" },
|
||||||
{ label: t("dashboard.seriesUnlinked"), value: metadata.series_unlinked, color: "hsl(220 13% 70%)" },
|
{ name: t("dashboard.seriesUnlinked"), value: metadata.series_unlinked, color: "hsl(220 13% 70%)" },
|
||||||
]}
|
]}
|
||||||
/>
|
/>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
@@ -256,11 +293,10 @@ export default async function DashboardPage() {
|
|||||||
<CardTitle className="text-base">{t("dashboard.byProvider")}</CardTitle>
|
<CardTitle className="text-base">{t("dashboard.byProvider")}</CardTitle>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<DonutChart
|
<RcDonutChart
|
||||||
locale={locale}
|
|
||||||
noDataLabel={noDataLabel}
|
noDataLabel={noDataLabel}
|
||||||
data={metadata.by_provider.map((p, i) => ({
|
data={metadata.by_provider.map((p, i) => ({
|
||||||
label: p.provider.replace(/_/g, " ").replace(/\b\w/g, (c) => c.toUpperCase()),
|
name: p.provider.replace(/_/g, " ").replace(/\b\w/g, (c) => c.toUpperCase()),
|
||||||
value: p.count,
|
value: p.count,
|
||||||
color: formatColors[i % formatColors.length],
|
color: formatColors[i % formatColors.length],
|
||||||
}))}
|
}))}
|
||||||
@@ -294,24 +330,32 @@ export default async function DashboardPage() {
|
|||||||
</Card>
|
</Card>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Second row */}
|
{/* Libraries breakdown + Top series */}
|
||||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
<div className="grid grid-cols-1 lg:grid-cols-2 gap-6">
|
||||||
{/* Monthly additions bar chart */}
|
{by_library.length > 0 && (
|
||||||
<Card hover={false}>
|
<Card hover={false}>
|
||||||
<CardHeader>
|
<CardHeader>
|
||||||
<CardTitle className="text-base">{t("dashboard.booksAdded")}</CardTitle>
|
<CardTitle className="text-base">{t("dashboard.libraries")}</CardTitle>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<BarChart
|
<RcStackedBar
|
||||||
noDataLabel={noDataLabel}
|
data={by_library.map((lib) => ({
|
||||||
data={additions_over_time.map((m) => ({
|
name: lib.library_name,
|
||||||
label: m.month.slice(5), // "MM" from "YYYY-MM"
|
read: lib.read_count,
|
||||||
value: m.books_added,
|
reading: lib.reading_count,
|
||||||
}))}
|
unread: lib.unread_count,
|
||||||
color="hsl(198 78% 37%)"
|
sizeLabel: formatBytes(lib.size_bytes),
|
||||||
/>
|
}))}
|
||||||
</CardContent>
|
labels={{
|
||||||
</Card>
|
read: t("status.read"),
|
||||||
|
reading: t("status.reading"),
|
||||||
|
unread: t("status.unread"),
|
||||||
|
books: t("dashboard.books"),
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
)}
|
||||||
|
|
||||||
{/* Top series */}
|
{/* Top series */}
|
||||||
<Card hover={false}>
|
<Card hover={false}>
|
||||||
@@ -319,67 +363,59 @@ export default async function DashboardPage() {
|
|||||||
<CardTitle className="text-base">{t("dashboard.popularSeries")}</CardTitle>
|
<CardTitle className="text-base">{t("dashboard.popularSeries")}</CardTitle>
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<div className="space-y-3">
|
<RcHorizontalBar
|
||||||
{top_series.slice(0, 8).map((s, i) => (
|
noDataLabel={t("dashboard.noSeries")}
|
||||||
<HorizontalBar
|
data={top_series.slice(0, 8).map((s) => ({
|
||||||
key={i}
|
name: s.series,
|
||||||
label={s.series}
|
value: s.book_count,
|
||||||
value={s.book_count}
|
subLabel: t("dashboard.readCount", { read: s.read_count, total: s.book_count }),
|
||||||
max={top_series[0]?.book_count || 1}
|
}))}
|
||||||
subLabel={t("dashboard.readCount", { read: s.read_count, total: s.book_count })}
|
color="hsl(142 60% 45%)"
|
||||||
color="hsl(142 60% 45%)"
|
/>
|
||||||
/>
|
|
||||||
))}
|
|
||||||
{top_series.length === 0 && (
|
|
||||||
<p className="text-muted-foreground text-sm text-center py-4">{t("dashboard.noSeries")}</p>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{/* Libraries breakdown */}
|
{/* Additions line chart – full width */}
|
||||||
{by_library.length > 0 && (
|
<Card hover={false}>
|
||||||
<Card hover={false}>
|
<CardHeader className="flex flex-row items-center justify-between space-y-0">
|
||||||
<CardHeader>
|
<CardTitle className="text-base">{t("dashboard.booksAdded")}</CardTitle>
|
||||||
<CardTitle className="text-base">{t("dashboard.libraries")}</CardTitle>
|
<PeriodToggle labels={{ day: t("dashboard.periodDay"), week: t("dashboard.periodWeek"), month: t("dashboard.periodMonth") }} />
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-x-8 gap-y-4">
|
<RcAreaChart
|
||||||
{by_library.map((lib, i) => (
|
noDataLabel={noDataLabel}
|
||||||
<div key={i} className="space-y-2">
|
data={additions_over_time.map((m) => ({ label: formatChartLabel(m.month, period, locale), value: m.books_added }))}
|
||||||
<div className="flex justify-between items-baseline">
|
color="hsl(198 78% 37%)"
|
||||||
<span className="font-medium text-foreground text-sm">{lib.library_name}</span>
|
/>
|
||||||
<span className="text-xs text-muted-foreground">{formatBytes(lib.size_bytes)}</span>
|
</CardContent>
|
||||||
</div>
|
</Card>
|
||||||
<div className="h-3 bg-muted rounded-full overflow-hidden flex">
|
|
||||||
<div
|
{/* Jobs over time – multi-line chart */}
|
||||||
className="h-full transition-all duration-500"
|
<Card hover={false}>
|
||||||
style={{ width: `${(lib.read_count / Math.max(lib.book_count, 1)) * 100}%`, backgroundColor: "hsl(142 60% 45%)" }}
|
<CardHeader className="flex flex-row items-center justify-between space-y-0">
|
||||||
title={`${t("status.read")} : ${lib.read_count}`}
|
<CardTitle className="text-base">{t("dashboard.jobsOverTime")}</CardTitle>
|
||||||
/>
|
<PeriodToggle labels={{ day: t("dashboard.periodDay"), week: t("dashboard.periodWeek"), month: t("dashboard.periodMonth") }} />
|
||||||
<div
|
</CardHeader>
|
||||||
className="h-full transition-all duration-500"
|
<CardContent>
|
||||||
style={{ width: `${(lib.reading_count / Math.max(lib.book_count, 1)) * 100}%`, backgroundColor: "hsl(45 93% 47%)" }}
|
<RcMultiLineChart
|
||||||
title={`${t("status.reading")} : ${lib.reading_count}`}
|
noDataLabel={noDataLabel}
|
||||||
/>
|
data={jobs_over_time.map((j) => ({
|
||||||
<div
|
label: formatChartLabel(j.label, period, locale),
|
||||||
className="h-full transition-all duration-500"
|
scan: j.scan,
|
||||||
style={{ width: `${(lib.unread_count / Math.max(lib.book_count, 1)) * 100}%`, backgroundColor: "hsl(220 13% 70%)" }}
|
rebuild: j.rebuild,
|
||||||
title={`${t("status.unread")} : ${lib.unread_count}`}
|
thumbnail: j.thumbnail,
|
||||||
/>
|
other: j.other,
|
||||||
</div>
|
}))}
|
||||||
<div className="flex gap-3 text-[11px] text-muted-foreground">
|
lines={[
|
||||||
<span>{lib.book_count} {t("dashboard.books").toLowerCase()}</span>
|
{ key: "scan", label: t("dashboard.jobScan"), color: "hsl(198 78% 37%)" },
|
||||||
<span className="text-success">{lib.read_count} {t("status.read").toLowerCase()}</span>
|
{ key: "rebuild", label: t("dashboard.jobRebuild"), color: "hsl(142 60% 45%)" },
|
||||||
<span className="text-warning">{lib.reading_count} {t("status.reading").toLowerCase()}</span>
|
{ key: "thumbnail", label: t("dashboard.jobThumbnail"), color: "hsl(45 93% 47%)" },
|
||||||
</div>
|
{ key: "other", label: t("dashboard.jobOther"), color: "hsl(280 60% 50%)" },
|
||||||
</div>
|
]}
|
||||||
))}
|
/>
|
||||||
</div>
|
</CardContent>
|
||||||
</CardContent>
|
</Card>
|
||||||
</Card>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Quick links */}
|
{/* Quick links */}
|
||||||
<QuickLinks t={t} />
|
<QuickLinks t={t} />
|
||||||
|
|||||||
@@ -150,11 +150,12 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const [activeTab, setActiveTab] = useState<"general" | "integrations">("general");
|
const [activeTab, setActiveTab] = useState<"general" | "integrations" | "notifications">("general");
|
||||||
|
|
||||||
const tabs = [
|
const tabs = [
|
||||||
{ id: "general" as const, label: t("settings.general"), icon: "settings" as const },
|
{ id: "general" as const, label: t("settings.general"), icon: "settings" as const },
|
||||||
{ id: "integrations" as const, label: t("settings.integrations"), icon: "refresh" as const },
|
{ id: "integrations" as const, label: t("settings.integrations"), icon: "refresh" as const },
|
||||||
|
{ id: "notifications" as const, label: t("settings.notifications"), icon: "bell" as const },
|
||||||
];
|
];
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -826,6 +827,11 @@ export default function SettingsPage({ initialSettings, initialCacheStats, initi
|
|||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
</>)}
|
</>)}
|
||||||
|
|
||||||
|
{activeTab === "notifications" && (<>
|
||||||
|
{/* Telegram Notifications */}
|
||||||
|
<TelegramCard handleUpdateSetting={handleUpdateSetting} />
|
||||||
|
</>)}
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -1480,3 +1486,254 @@ function QBittorrentCard({ handleUpdateSetting }: { handleUpdateSetting: (key: s
|
|||||||
</Card>
|
</Card>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Telegram Notifications sub-component
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
const DEFAULT_EVENTS = {
|
||||||
|
scan_completed: true,
|
||||||
|
scan_failed: true,
|
||||||
|
scan_cancelled: true,
|
||||||
|
thumbnail_completed: true,
|
||||||
|
thumbnail_failed: true,
|
||||||
|
conversion_completed: true,
|
||||||
|
conversion_failed: true,
|
||||||
|
metadata_approved: true,
|
||||||
|
metadata_batch_completed: true,
|
||||||
|
metadata_batch_failed: true,
|
||||||
|
metadata_refresh_completed: true,
|
||||||
|
metadata_refresh_failed: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
function TelegramCard({ handleUpdateSetting }: { handleUpdateSetting: (key: string, value: unknown) => Promise<void> }) {
|
||||||
|
const { t } = useTranslation();
|
||||||
|
const [botToken, setBotToken] = useState("");
|
||||||
|
const [chatId, setChatId] = useState("");
|
||||||
|
const [enabled, setEnabled] = useState(false);
|
||||||
|
const [events, setEvents] = useState(DEFAULT_EVENTS);
|
||||||
|
const [isTesting, setIsTesting] = useState(false);
|
||||||
|
const [testResult, setTestResult] = useState<{ success: boolean; message: string } | null>(null);
|
||||||
|
const [showHelp, setShowHelp] = useState(false);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
fetch("/api/settings/telegram")
|
||||||
|
.then((r) => (r.ok ? r.json() : null))
|
||||||
|
.then((data) => {
|
||||||
|
if (data) {
|
||||||
|
if (data.bot_token) setBotToken(data.bot_token);
|
||||||
|
if (data.chat_id) setChatId(data.chat_id);
|
||||||
|
if (data.enabled !== undefined) setEnabled(data.enabled);
|
||||||
|
if (data.events) setEvents({ ...DEFAULT_EVENTS, ...data.events });
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch(() => {});
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
function saveTelegram(token?: string, chat?: string, en?: boolean, ev?: typeof events) {
|
||||||
|
handleUpdateSetting("telegram", {
|
||||||
|
bot_token: token ?? botToken,
|
||||||
|
chat_id: chat ?? chatId,
|
||||||
|
enabled: en ?? enabled,
|
||||||
|
events: ev ?? events,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleTestConnection() {
|
||||||
|
setIsTesting(true);
|
||||||
|
setTestResult(null);
|
||||||
|
try {
|
||||||
|
const resp = await fetch("/api/telegram/test");
|
||||||
|
const data = await resp.json();
|
||||||
|
if (data.error) {
|
||||||
|
setTestResult({ success: false, message: data.error });
|
||||||
|
} else {
|
||||||
|
setTestResult(data);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
setTestResult({ success: false, message: "Failed to connect" });
|
||||||
|
} finally {
|
||||||
|
setIsTesting(false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Card className="mb-6">
|
||||||
|
<CardHeader>
|
||||||
|
<CardTitle className="flex items-center gap-2">
|
||||||
|
<Icon name="bell" size="md" />
|
||||||
|
{t("settings.telegram")}
|
||||||
|
</CardTitle>
|
||||||
|
<CardDescription>{t("settings.telegramDesc")}</CardDescription>
|
||||||
|
</CardHeader>
|
||||||
|
<CardContent>
|
||||||
|
<div className="space-y-4">
|
||||||
|
{/* Setup guide */}
|
||||||
|
<div>
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
onClick={() => setShowHelp(!showHelp)}
|
||||||
|
className="text-sm text-primary hover:text-primary/80 flex items-center gap-1 transition-colors"
|
||||||
|
>
|
||||||
|
<Icon name={showHelp ? "chevronDown" : "chevronRight"} size="sm" />
|
||||||
|
{t("settings.telegramHelp")}
|
||||||
|
</button>
|
||||||
|
{showHelp && (
|
||||||
|
<div className="mt-3 p-4 rounded-lg bg-muted/30 space-y-3 text-sm text-foreground">
|
||||||
|
<div>
|
||||||
|
<p className="font-medium mb-1">1. Bot Token</p>
|
||||||
|
<p className="text-muted-foreground" dangerouslySetInnerHTML={{ __html: t("settings.telegramHelpBot") }} />
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<p className="font-medium mb-1">2. Chat ID</p>
|
||||||
|
<p className="text-muted-foreground" dangerouslySetInnerHTML={{ __html: t("settings.telegramHelpChat") }} />
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<p className="font-medium mb-1">3. Group chat</p>
|
||||||
|
<p className="text-muted-foreground" dangerouslySetInnerHTML={{ __html: t("settings.telegramHelpGroup") }} />
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<label className="relative inline-flex items-center cursor-pointer">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={enabled}
|
||||||
|
onChange={(e) => {
|
||||||
|
setEnabled(e.target.checked);
|
||||||
|
saveTelegram(undefined, undefined, e.target.checked);
|
||||||
|
}}
|
||||||
|
className="sr-only peer"
|
||||||
|
/>
|
||||||
|
<div className="w-11 h-6 bg-muted rounded-full peer peer-checked:after:translate-x-full peer-checked:after:border-white after:content-[''] after:absolute after:top-[2px] after:left-[2px] after:bg-white after:border-gray-300 after:border after:rounded-full after:h-5 after:w-5 after:transition-all peer-checked:bg-primary"></div>
|
||||||
|
</label>
|
||||||
|
<span className="text-sm font-medium text-foreground">{t("settings.telegramEnabled")}</span>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<FormRow>
|
||||||
|
<FormField className="flex-1">
|
||||||
|
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.botToken")}</label>
|
||||||
|
<FormInput
|
||||||
|
type="password"
|
||||||
|
placeholder={t("settings.botTokenPlaceholder")}
|
||||||
|
value={botToken}
|
||||||
|
onChange={(e) => setBotToken(e.target.value)}
|
||||||
|
onBlur={() => saveTelegram()}
|
||||||
|
/>
|
||||||
|
</FormField>
|
||||||
|
</FormRow>
|
||||||
|
<FormRow>
|
||||||
|
<FormField className="flex-1">
|
||||||
|
<label className="text-sm font-medium text-muted-foreground mb-1 block">{t("settings.chatId")}</label>
|
||||||
|
<FormInput
|
||||||
|
type="text"
|
||||||
|
placeholder={t("settings.chatIdPlaceholder")}
|
||||||
|
value={chatId}
|
||||||
|
onChange={(e) => setChatId(e.target.value)}
|
||||||
|
onBlur={() => saveTelegram()}
|
||||||
|
/>
|
||||||
|
</FormField>
|
||||||
|
</FormRow>
|
||||||
|
|
||||||
|
{/* Event toggles grouped by category */}
|
||||||
|
<div className="border-t border-border/50 pt-4">
|
||||||
|
<h4 className="text-sm font-medium text-foreground mb-4">{t("settings.telegramEvents")}</h4>
|
||||||
|
<div className="grid grid-cols-2 gap-x-6 gap-y-5">
|
||||||
|
{([
|
||||||
|
{
|
||||||
|
category: t("settings.eventCategoryScan"),
|
||||||
|
icon: "search" as const,
|
||||||
|
items: [
|
||||||
|
{ key: "scan_completed" as const, label: t("settings.eventCompleted") },
|
||||||
|
{ key: "scan_failed" as const, label: t("settings.eventFailed") },
|
||||||
|
{ key: "scan_cancelled" as const, label: t("settings.eventCancelled") },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
category: t("settings.eventCategoryThumbnail"),
|
||||||
|
icon: "image" as const,
|
||||||
|
items: [
|
||||||
|
{ key: "thumbnail_completed" as const, label: t("settings.eventCompleted") },
|
||||||
|
{ key: "thumbnail_failed" as const, label: t("settings.eventFailed") },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
category: t("settings.eventCategoryConversion"),
|
||||||
|
icon: "refresh" as const,
|
||||||
|
items: [
|
||||||
|
{ key: "conversion_completed" as const, label: t("settings.eventCompleted") },
|
||||||
|
{ key: "conversion_failed" as const, label: t("settings.eventFailed") },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
category: t("settings.eventCategoryMetadata"),
|
||||||
|
icon: "tag" as const,
|
||||||
|
items: [
|
||||||
|
{ key: "metadata_approved" as const, label: t("settings.eventLinked") },
|
||||||
|
{ key: "metadata_batch_completed" as const, label: t("settings.eventBatchCompleted") },
|
||||||
|
{ key: "metadata_batch_failed" as const, label: t("settings.eventBatchFailed") },
|
||||||
|
{ key: "metadata_refresh_completed" as const, label: t("settings.eventRefreshCompleted") },
|
||||||
|
{ key: "metadata_refresh_failed" as const, label: t("settings.eventRefreshFailed") },
|
||||||
|
],
|
||||||
|
},
|
||||||
|
]).map(({ category, icon, items }) => (
|
||||||
|
<div key={category}>
|
||||||
|
<p className="text-xs font-medium text-muted-foreground uppercase tracking-wide mb-2 flex items-center gap-1.5">
|
||||||
|
<Icon name={icon} size="sm" className="text-muted-foreground" />
|
||||||
|
{category}
|
||||||
|
</p>
|
||||||
|
<div className="space-y-1">
|
||||||
|
{items.map(({ key, label }) => (
|
||||||
|
<label key={key} className="flex items-center justify-between py-1.5 cursor-pointer group">
|
||||||
|
<span className="text-sm text-foreground group-hover:text-foreground/80">{label}</span>
|
||||||
|
<div className="relative">
|
||||||
|
<input
|
||||||
|
type="checkbox"
|
||||||
|
checked={events[key]}
|
||||||
|
onChange={(e) => {
|
||||||
|
const updated = { ...events, [key]: e.target.checked };
|
||||||
|
setEvents(updated);
|
||||||
|
saveTelegram(undefined, undefined, undefined, updated);
|
||||||
|
}}
|
||||||
|
className="sr-only peer"
|
||||||
|
/>
|
||||||
|
<div className="w-9 h-5 bg-muted rounded-full peer peer-checked:after:translate-x-full peer-checked:after:border-white after:content-[''] after:absolute after:top-[2px] after:left-[2px] after:bg-white after:border-gray-300 after:border after:rounded-full after:h-4 after:w-4 after:transition-all peer-checked:bg-primary" />
|
||||||
|
</div>
|
||||||
|
</label>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="flex items-center gap-3">
|
||||||
|
<Button
|
||||||
|
onClick={handleTestConnection}
|
||||||
|
disabled={isTesting || !botToken || !chatId || !enabled}
|
||||||
|
>
|
||||||
|
{isTesting ? (
|
||||||
|
<>
|
||||||
|
<Icon name="spinner" size="sm" className="animate-spin -ml-1 mr-2" />
|
||||||
|
{t("settings.testing")}
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<Icon name="refresh" size="sm" className="mr-2" />
|
||||||
|
{t("settings.testConnection")}
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
{testResult && (
|
||||||
|
<span className={`text-sm font-medium ${testResult.success ? "text-success" : "text-destructive"}`}>
|
||||||
|
{testResult.message}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</CardContent>
|
||||||
|
</Card>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|||||||
@@ -342,6 +342,7 @@ export async function fetchAllSeries(
|
|||||||
seriesStatus?: string,
|
seriesStatus?: string,
|
||||||
hasMissing?: boolean,
|
hasMissing?: boolean,
|
||||||
metadataProvider?: string,
|
metadataProvider?: string,
|
||||||
|
author?: string,
|
||||||
): Promise<SeriesPageDto> {
|
): Promise<SeriesPageDto> {
|
||||||
const params = new URLSearchParams();
|
const params = new URLSearchParams();
|
||||||
if (libraryId) params.set("library_id", libraryId);
|
if (libraryId) params.set("library_id", libraryId);
|
||||||
@@ -351,6 +352,7 @@ export async function fetchAllSeries(
|
|||||||
if (seriesStatus) params.set("series_status", seriesStatus);
|
if (seriesStatus) params.set("series_status", seriesStatus);
|
||||||
if (hasMissing) params.set("has_missing", "true");
|
if (hasMissing) params.set("has_missing", "true");
|
||||||
if (metadataProvider) params.set("metadata_provider", metadataProvider);
|
if (metadataProvider) params.set("metadata_provider", metadataProvider);
|
||||||
|
if (author) params.set("author", author);
|
||||||
params.set("page", page.toString());
|
params.set("page", page.toString());
|
||||||
params.set("limit", limit.toString());
|
params.set("limit", limit.toString());
|
||||||
|
|
||||||
@@ -548,19 +550,52 @@ export type MetadataStats = {
|
|||||||
by_provider: ProviderCount[];
|
by_provider: ProviderCount[];
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type CurrentlyReadingItem = {
|
||||||
|
book_id: string;
|
||||||
|
title: string;
|
||||||
|
series: string | null;
|
||||||
|
current_page: number;
|
||||||
|
page_count: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type RecentlyReadItem = {
|
||||||
|
book_id: string;
|
||||||
|
title: string;
|
||||||
|
series: string | null;
|
||||||
|
last_read_at: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type MonthlyReading = {
|
||||||
|
month: string;
|
||||||
|
books_read: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type JobTimePoint = {
|
||||||
|
label: string;
|
||||||
|
scan: number;
|
||||||
|
rebuild: number;
|
||||||
|
thumbnail: number;
|
||||||
|
other: number;
|
||||||
|
};
|
||||||
|
|
||||||
export type StatsResponse = {
|
export type StatsResponse = {
|
||||||
overview: StatsOverview;
|
overview: StatsOverview;
|
||||||
reading_status: ReadingStatusStats;
|
reading_status: ReadingStatusStats;
|
||||||
|
currently_reading: CurrentlyReadingItem[];
|
||||||
|
recently_read: RecentlyReadItem[];
|
||||||
|
reading_over_time: MonthlyReading[];
|
||||||
by_format: FormatCount[];
|
by_format: FormatCount[];
|
||||||
by_language: LanguageCount[];
|
by_language: LanguageCount[];
|
||||||
by_library: LibraryStatsItem[];
|
by_library: LibraryStatsItem[];
|
||||||
top_series: TopSeriesItem[];
|
top_series: TopSeriesItem[];
|
||||||
additions_over_time: MonthlyAdditions[];
|
additions_over_time: MonthlyAdditions[];
|
||||||
|
jobs_over_time: JobTimePoint[];
|
||||||
metadata: MetadataStats;
|
metadata: MetadataStats;
|
||||||
};
|
};
|
||||||
|
|
||||||
export async function fetchStats() {
|
export async function fetchStats(period?: "day" | "week" | "month") {
|
||||||
return apiFetch<StatsResponse>("/stats", { next: { revalidate: 30 } });
|
const params = period && period !== "month" ? `?period=${period}` : "";
|
||||||
|
return apiFetch<StatsResponse>(`/stats${params}`, { next: { revalidate: 30 } });
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------------------------------------------------------
|
// ---------------------------------------------------------------------------
|
||||||
|
|||||||
@@ -70,7 +70,15 @@ const en: Record<TranslationKey, string> = {
|
|||||||
"dashboard.readingStatus": "Reading status",
|
"dashboard.readingStatus": "Reading status",
|
||||||
"dashboard.byFormat": "By format",
|
"dashboard.byFormat": "By format",
|
||||||
"dashboard.byLibrary": "By library",
|
"dashboard.byLibrary": "By library",
|
||||||
"dashboard.booksAdded": "Books added (last 12 months)",
|
"dashboard.booksAdded": "Books added",
|
||||||
|
"dashboard.jobsOverTime": "Job runs",
|
||||||
|
"dashboard.jobScan": "Scan",
|
||||||
|
"dashboard.jobRebuild": "Rebuild",
|
||||||
|
"dashboard.jobThumbnail": "Thumbnails",
|
||||||
|
"dashboard.jobOther": "Other",
|
||||||
|
"dashboard.periodDay": "Day",
|
||||||
|
"dashboard.periodWeek": "Week",
|
||||||
|
"dashboard.periodMonth": "Month",
|
||||||
"dashboard.popularSeries": "Popular series",
|
"dashboard.popularSeries": "Popular series",
|
||||||
"dashboard.noSeries": "No series yet",
|
"dashboard.noSeries": "No series yet",
|
||||||
"dashboard.unknown": "Unknown",
|
"dashboard.unknown": "Unknown",
|
||||||
@@ -82,6 +90,12 @@ const en: Record<TranslationKey, string> = {
|
|||||||
"dashboard.bookMetadata": "Book metadata",
|
"dashboard.bookMetadata": "Book metadata",
|
||||||
"dashboard.withSummary": "With summary",
|
"dashboard.withSummary": "With summary",
|
||||||
"dashboard.withIsbn": "With ISBN",
|
"dashboard.withIsbn": "With ISBN",
|
||||||
|
"dashboard.currentlyReading": "Currently reading",
|
||||||
|
"dashboard.recentlyRead": "Recently read",
|
||||||
|
"dashboard.readingActivity": "Reading activity",
|
||||||
|
"dashboard.pageProgress": "p. {{current}} / {{total}}",
|
||||||
|
"dashboard.noCurrentlyReading": "No books in progress",
|
||||||
|
"dashboard.noRecentlyRead": "No books read recently",
|
||||||
|
|
||||||
// Books page
|
// Books page
|
||||||
"books.title": "Books",
|
"books.title": "Books",
|
||||||
@@ -543,6 +557,33 @@ const en: Record<TranslationKey, string> = {
|
|||||||
"settings.qbittorrentUsername": "Username",
|
"settings.qbittorrentUsername": "Username",
|
||||||
"settings.qbittorrentPassword": "Password",
|
"settings.qbittorrentPassword": "Password",
|
||||||
|
|
||||||
|
// Settings - Telegram Notifications
|
||||||
|
"settings.notifications": "Notifications",
|
||||||
|
"settings.telegram": "Telegram",
|
||||||
|
"settings.telegramDesc": "Receive Telegram notifications for scans, errors, and metadata linking.",
|
||||||
|
"settings.botToken": "Bot Token",
|
||||||
|
"settings.botTokenPlaceholder": "123456:ABC-DEF1234ghIkl-zyx57W2v1u123ew11",
|
||||||
|
"settings.chatId": "Chat ID",
|
||||||
|
"settings.chatIdPlaceholder": "123456789",
|
||||||
|
"settings.telegramEnabled": "Enable Telegram notifications",
|
||||||
|
"settings.telegramEvents": "Events",
|
||||||
|
"settings.eventCategoryScan": "Scans",
|
||||||
|
"settings.eventCategoryThumbnail": "Thumbnails",
|
||||||
|
"settings.eventCategoryConversion": "CBR → CBZ Conversion",
|
||||||
|
"settings.eventCategoryMetadata": "Metadata",
|
||||||
|
"settings.eventCompleted": "Completed",
|
||||||
|
"settings.eventFailed": "Failed",
|
||||||
|
"settings.eventCancelled": "Cancelled",
|
||||||
|
"settings.eventLinked": "Linked",
|
||||||
|
"settings.eventBatchCompleted": "Batch completed",
|
||||||
|
"settings.eventBatchFailed": "Batch failed",
|
||||||
|
"settings.eventRefreshCompleted": "Refresh completed",
|
||||||
|
"settings.eventRefreshFailed": "Refresh failed",
|
||||||
|
"settings.telegramHelp": "How to get the required information?",
|
||||||
|
"settings.telegramHelpBot": "Open Telegram, search for <b>@BotFather</b>, send <code>/newbot</code> and follow the instructions. Copy the token it gives you.",
|
||||||
|
"settings.telegramHelpChat": "Send a message to your bot, then open <code>https://api.telegram.org/bot<TOKEN>/getUpdates</code> in your browser. The <b>chat id</b> is in <code>message.chat.id</code>.",
|
||||||
|
"settings.telegramHelpGroup": "For a group: add the bot to the group, send a message, then check the same URL. Group IDs are negative (e.g. <code>-123456789</code>).",
|
||||||
|
|
||||||
// Settings - Language
|
// Settings - Language
|
||||||
"settings.language": "Language",
|
"settings.language": "Language",
|
||||||
"settings.languageDesc": "Choose the interface language",
|
"settings.languageDesc": "Choose the interface language",
|
||||||
|
|||||||
@@ -68,7 +68,15 @@ const fr = {
|
|||||||
"dashboard.readingStatus": "Statut de lecture",
|
"dashboard.readingStatus": "Statut de lecture",
|
||||||
"dashboard.byFormat": "Par format",
|
"dashboard.byFormat": "Par format",
|
||||||
"dashboard.byLibrary": "Par bibliothèque",
|
"dashboard.byLibrary": "Par bibliothèque",
|
||||||
"dashboard.booksAdded": "Livres ajoutés (12 derniers mois)",
|
"dashboard.booksAdded": "Livres ajoutés",
|
||||||
|
"dashboard.jobsOverTime": "Exécutions de jobs",
|
||||||
|
"dashboard.jobScan": "Scan",
|
||||||
|
"dashboard.jobRebuild": "Rebuild",
|
||||||
|
"dashboard.jobThumbnail": "Thumbnails",
|
||||||
|
"dashboard.jobOther": "Autre",
|
||||||
|
"dashboard.periodDay": "Jour",
|
||||||
|
"dashboard.periodWeek": "Semaine",
|
||||||
|
"dashboard.periodMonth": "Mois",
|
||||||
"dashboard.popularSeries": "Séries populaires",
|
"dashboard.popularSeries": "Séries populaires",
|
||||||
"dashboard.noSeries": "Aucune série pour le moment",
|
"dashboard.noSeries": "Aucune série pour le moment",
|
||||||
"dashboard.unknown": "Inconnu",
|
"dashboard.unknown": "Inconnu",
|
||||||
@@ -80,6 +88,12 @@ const fr = {
|
|||||||
"dashboard.bookMetadata": "Métadonnées livres",
|
"dashboard.bookMetadata": "Métadonnées livres",
|
||||||
"dashboard.withSummary": "Avec résumé",
|
"dashboard.withSummary": "Avec résumé",
|
||||||
"dashboard.withIsbn": "Avec ISBN",
|
"dashboard.withIsbn": "Avec ISBN",
|
||||||
|
"dashboard.currentlyReading": "En cours de lecture",
|
||||||
|
"dashboard.recentlyRead": "Derniers livres lus",
|
||||||
|
"dashboard.readingActivity": "Activité de lecture",
|
||||||
|
"dashboard.pageProgress": "p. {{current}} / {{total}}",
|
||||||
|
"dashboard.noCurrentlyReading": "Aucun livre en cours",
|
||||||
|
"dashboard.noRecentlyRead": "Aucun livre lu récemment",
|
||||||
|
|
||||||
// Books page
|
// Books page
|
||||||
"books.title": "Livres",
|
"books.title": "Livres",
|
||||||
@@ -541,6 +555,33 @@ const fr = {
|
|||||||
"settings.qbittorrentUsername": "Nom d'utilisateur",
|
"settings.qbittorrentUsername": "Nom d'utilisateur",
|
||||||
"settings.qbittorrentPassword": "Mot de passe",
|
"settings.qbittorrentPassword": "Mot de passe",
|
||||||
|
|
||||||
|
// Settings - Telegram Notifications
|
||||||
|
"settings.notifications": "Notifications",
|
||||||
|
"settings.telegram": "Telegram",
|
||||||
|
"settings.telegramDesc": "Recevoir des notifications Telegram lors des scans, erreurs et liaisons de métadonnées.",
|
||||||
|
"settings.botToken": "Bot Token",
|
||||||
|
"settings.botTokenPlaceholder": "123456:ABC-DEF1234ghIkl-zyx57W2v1u123ew11",
|
||||||
|
"settings.chatId": "Chat ID",
|
||||||
|
"settings.chatIdPlaceholder": "123456789",
|
||||||
|
"settings.telegramEnabled": "Activer les notifications Telegram",
|
||||||
|
"settings.telegramEvents": "Événements",
|
||||||
|
"settings.eventCategoryScan": "Scans",
|
||||||
|
"settings.eventCategoryThumbnail": "Miniatures",
|
||||||
|
"settings.eventCategoryConversion": "Conversion CBR → CBZ",
|
||||||
|
"settings.eventCategoryMetadata": "Métadonnées",
|
||||||
|
"settings.eventCompleted": "Terminé",
|
||||||
|
"settings.eventFailed": "Échoué",
|
||||||
|
"settings.eventCancelled": "Annulé",
|
||||||
|
"settings.eventLinked": "Liée",
|
||||||
|
"settings.eventBatchCompleted": "Batch terminé",
|
||||||
|
"settings.eventBatchFailed": "Batch échoué",
|
||||||
|
"settings.eventRefreshCompleted": "Rafraîchissement terminé",
|
||||||
|
"settings.eventRefreshFailed": "Rafraîchissement échoué",
|
||||||
|
"settings.telegramHelp": "Comment obtenir les informations ?",
|
||||||
|
"settings.telegramHelpBot": "Ouvrez Telegram, recherchez <b>@BotFather</b>, envoyez <code>/newbot</code> et suivez les instructions. Copiez le token fourni.",
|
||||||
|
"settings.telegramHelpChat": "Envoyez un message à votre bot, puis ouvrez <code>https://api.telegram.org/bot<TOKEN>/getUpdates</code> dans votre navigateur. Le <b>chat id</b> apparaît dans <code>message.chat.id</code>.",
|
||||||
|
"settings.telegramHelpGroup": "Pour un groupe : ajoutez le bot au groupe, envoyez un message, puis consultez la même URL. Les IDs de groupe sont négatifs (ex: <code>-123456789</code>).",
|
||||||
|
|
||||||
// Settings - Language
|
// Settings - Language
|
||||||
"settings.language": "Langue",
|
"settings.language": "Langue",
|
||||||
"settings.languageDesc": "Choisir la langue de l'interface",
|
"settings.languageDesc": "Choisir la langue de l'interface",
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ const nextConfig = {
|
|||||||
typedRoutes: true,
|
typedRoutes: true,
|
||||||
images: {
|
images: {
|
||||||
minimumCacheTTL: 86400,
|
minimumCacheTTL: 86400,
|
||||||
|
unoptimized: true,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
415
apps/backoffice/package-lock.json
generated
415
apps/backoffice/package-lock.json
generated
@@ -1,17 +1,18 @@
|
|||||||
{
|
{
|
||||||
"name": "stripstream-backoffice",
|
"name": "stripstream-backoffice",
|
||||||
"version": "1.4.0",
|
"version": "1.23.0",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "stripstream-backoffice",
|
"name": "stripstream-backoffice",
|
||||||
"version": "1.4.0",
|
"version": "1.23.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"next": "^16.1.6",
|
"next": "^16.1.6",
|
||||||
"next-themes": "^0.4.6",
|
"next-themes": "^0.4.6",
|
||||||
"react": "19.0.0",
|
"react": "19.0.0",
|
||||||
"react-dom": "19.0.0",
|
"react-dom": "19.0.0",
|
||||||
|
"recharts": "^3.8.0",
|
||||||
"sanitize-html": "^2.17.1"
|
"sanitize-html": "^2.17.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
@@ -759,6 +760,54 @@
|
|||||||
"node": ">= 10"
|
"node": ">= 10"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@reduxjs/toolkit": {
|
||||||
|
"version": "2.11.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@reduxjs/toolkit/-/toolkit-2.11.2.tgz",
|
||||||
|
"integrity": "sha512-Kd6kAHTA6/nUpp8mySPqj3en3dm0tdMIgbttnQ1xFMVpufoj+ADi8pXLBsd4xzTRHQa7t/Jv8W5UnCuW4kuWMQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@standard-schema/spec": "^1.0.0",
|
||||||
|
"@standard-schema/utils": "^0.3.0",
|
||||||
|
"immer": "^11.0.0",
|
||||||
|
"redux": "^5.0.1",
|
||||||
|
"redux-thunk": "^3.1.0",
|
||||||
|
"reselect": "^5.1.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"react": "^16.9.0 || ^17.0.0 || ^18 || ^19",
|
||||||
|
"react-redux": "^7.2.1 || ^8.1.3 || ^9.0.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"react": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"react-redux": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@reduxjs/toolkit/node_modules/immer": {
|
||||||
|
"version": "11.1.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/immer/-/immer-11.1.4.tgz",
|
||||||
|
"integrity": "sha512-XREFCPo6ksxVzP4E0ekD5aMdf8WMwmdNaz6vuvxgI40UaEiu6q3p8X52aU6GdyvLY3XXX/8R7JOTXStz/nBbRw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"funding": {
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/immer"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@standard-schema/spec": {
|
||||||
|
"version": "1.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz",
|
||||||
|
"integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@standard-schema/utils": {
|
||||||
|
"version": "0.3.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/@standard-schema/utils/-/utils-0.3.0.tgz",
|
||||||
|
"integrity": "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@swc/helpers": {
|
"node_modules/@swc/helpers": {
|
||||||
"version": "0.5.15",
|
"version": "0.5.15",
|
||||||
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.15.tgz",
|
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.15.tgz",
|
||||||
@@ -1051,6 +1100,69 @@
|
|||||||
"tailwindcss": "4.2.1"
|
"tailwindcss": "4.2.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/d3-array": {
|
||||||
|
"version": "3.2.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz",
|
||||||
|
"integrity": "sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/d3-color": {
|
||||||
|
"version": "3.1.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz",
|
||||||
|
"integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/d3-ease": {
|
||||||
|
"version": "3.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz",
|
||||||
|
"integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/d3-interpolate": {
|
||||||
|
"version": "3.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz",
|
||||||
|
"integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/d3-color": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/d3-path": {
|
||||||
|
"version": "3.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz",
|
||||||
|
"integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/d3-scale": {
|
||||||
|
"version": "4.0.9",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz",
|
||||||
|
"integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/d3-time": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/d3-shape": {
|
||||||
|
"version": "3.1.8",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.8.tgz",
|
||||||
|
"integrity": "sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/d3-path": "*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/@types/d3-time": {
|
||||||
|
"version": "3.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz",
|
||||||
|
"integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/@types/d3-timer": {
|
||||||
|
"version": "3.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz",
|
||||||
|
"integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/@types/node": {
|
"node_modules/@types/node": {
|
||||||
"version": "22.13.14",
|
"version": "22.13.14",
|
||||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.14.tgz",
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.14.tgz",
|
||||||
@@ -1065,7 +1177,7 @@
|
|||||||
"version": "19.0.12",
|
"version": "19.0.12",
|
||||||
"resolved": "https://registry.npmjs.org/@types/react/-/react-19.0.12.tgz",
|
"resolved": "https://registry.npmjs.org/@types/react/-/react-19.0.12.tgz",
|
||||||
"integrity": "sha512-V6Ar115dBDrjbtXSrS+/Oruobc+qVbbUxDFC1RSbRqLt5SYvxxyIDrSC85RWml54g+jfNeEMZhEj7wW07ONQhA==",
|
"integrity": "sha512-V6Ar115dBDrjbtXSrS+/Oruobc+qVbbUxDFC1RSbRqLt5SYvxxyIDrSC85RWml54g+jfNeEMZhEj7wW07ONQhA==",
|
||||||
"dev": true,
|
"devOptional": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"csstype": "^3.0.2"
|
"csstype": "^3.0.2"
|
||||||
@@ -1124,6 +1236,12 @@
|
|||||||
"entities": "^7.0.1"
|
"entities": "^7.0.1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/@types/use-sync-external-store": {
|
||||||
|
"version": "0.0.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/@types/use-sync-external-store/-/use-sync-external-store-0.0.6.tgz",
|
||||||
|
"integrity": "sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/autoprefixer": {
|
"node_modules/autoprefixer": {
|
||||||
"version": "10.4.27",
|
"version": "10.4.27",
|
||||||
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.27.tgz",
|
"resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.27.tgz",
|
||||||
@@ -1233,11 +1351,147 @@
|
|||||||
"integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==",
|
"integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/clsx": {
|
||||||
|
"version": "2.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz",
|
||||||
|
"integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=6"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/csstype": {
|
"node_modules/csstype": {
|
||||||
"version": "3.2.3",
|
"version": "3.2.3",
|
||||||
"resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz",
|
||||||
"integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==",
|
"integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==",
|
||||||
"dev": true,
|
"devOptional": true,
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/d3-array": {
|
||||||
|
"version": "3.2.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz",
|
||||||
|
"integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"internmap": "1 - 2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/d3-color": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==",
|
||||||
|
"license": "ISC",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/d3-ease": {
|
||||||
|
"version": "3.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz",
|
||||||
|
"integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==",
|
||||||
|
"license": "BSD-3-Clause",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/d3-format": {
|
||||||
|
"version": "3.1.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.2.tgz",
|
||||||
|
"integrity": "sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==",
|
||||||
|
"license": "ISC",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/d3-interpolate": {
|
||||||
|
"version": "3.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz",
|
||||||
|
"integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"d3-color": "1 - 3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/d3-path": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==",
|
||||||
|
"license": "ISC",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/d3-scale": {
|
||||||
|
"version": "4.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz",
|
||||||
|
"integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"d3-array": "2.10.0 - 3",
|
||||||
|
"d3-format": "1 - 3",
|
||||||
|
"d3-interpolate": "1.2.0 - 3",
|
||||||
|
"d3-time": "2.1.1 - 3",
|
||||||
|
"d3-time-format": "2 - 4"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/d3-shape": {
|
||||||
|
"version": "3.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz",
|
||||||
|
"integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"d3-path": "^3.1.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/d3-time": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"d3-array": "2 - 3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/d3-time-format": {
|
||||||
|
"version": "4.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz",
|
||||||
|
"integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"d3-time": "1 - 3"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/d3-timer": {
|
||||||
|
"version": "3.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz",
|
||||||
|
"integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==",
|
||||||
|
"license": "ISC",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/decimal.js-light": {
|
||||||
|
"version": "2.5.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/decimal.js-light/-/decimal.js-light-2.5.1.tgz",
|
||||||
|
"integrity": "sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==",
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/deepmerge": {
|
"node_modules/deepmerge": {
|
||||||
@@ -1347,6 +1601,16 @@
|
|||||||
"url": "https://github.com/fb55/entities?sponsor=1"
|
"url": "https://github.com/fb55/entities?sponsor=1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/es-toolkit": {
|
||||||
|
"version": "1.45.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/es-toolkit/-/es-toolkit-1.45.1.tgz",
|
||||||
|
"integrity": "sha512-/jhoOj/Fx+A+IIyDNOvO3TItGmlMKhtX8ISAHKE90c4b/k1tqaqEZ+uUqfpU8DMnW5cgNJv606zS55jGvza0Xw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"workspaces": [
|
||||||
|
"docs",
|
||||||
|
"benchmarks"
|
||||||
|
]
|
||||||
|
},
|
||||||
"node_modules/escalade": {
|
"node_modules/escalade": {
|
||||||
"version": "3.2.0",
|
"version": "3.2.0",
|
||||||
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
|
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
|
||||||
@@ -1369,6 +1633,12 @@
|
|||||||
"url": "https://github.com/sponsors/sindresorhus"
|
"url": "https://github.com/sponsors/sindresorhus"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/eventemitter3": {
|
||||||
|
"version": "5.0.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.4.tgz",
|
||||||
|
"integrity": "sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/fraction.js": {
|
"node_modules/fraction.js": {
|
||||||
"version": "5.3.4",
|
"version": "5.3.4",
|
||||||
"resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz",
|
"resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz",
|
||||||
@@ -1409,6 +1679,25 @@
|
|||||||
"entities": "^4.4.0"
|
"entities": "^4.4.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/immer": {
|
||||||
|
"version": "10.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/immer/-/immer-10.2.0.tgz",
|
||||||
|
"integrity": "sha512-d/+XTN3zfODyjr89gM3mPq1WNX2B8pYsu7eORitdwyA2sBubnTl3laYlBk4sXY5FUa5qTZGBDPJICVbvqzjlbw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"funding": {
|
||||||
|
"type": "opencollective",
|
||||||
|
"url": "https://opencollective.com/immer"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/internmap": {
|
||||||
|
"version": "2.0.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz",
|
||||||
|
"integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==",
|
||||||
|
"license": "ISC",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/is-plain-object": {
|
"node_modules/is-plain-object": {
|
||||||
"version": "5.0.0",
|
"version": "5.0.0",
|
||||||
"resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz",
|
"resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz",
|
||||||
@@ -1895,6 +2184,87 @@
|
|||||||
"react": "^19.0.0"
|
"react": "^19.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/react-is": {
|
||||||
|
"version": "19.2.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/react-is/-/react-is-19.2.4.tgz",
|
||||||
|
"integrity": "sha512-W+EWGn2v0ApPKgKKCy/7s7WHXkboGcsrXE+2joLyVxkbyVQfO3MUEaUQDHoSmb8TFFrSKYa9mw64WZHNHSDzYA==",
|
||||||
|
"license": "MIT",
|
||||||
|
"peer": true
|
||||||
|
},
|
||||||
|
"node_modules/react-redux": {
|
||||||
|
"version": "9.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/react-redux/-/react-redux-9.2.0.tgz",
|
||||||
|
"integrity": "sha512-ROY9fvHhwOD9ySfrF0wmvu//bKCQ6AeZZq1nJNtbDC+kk5DuSuNX/n6YWYF/SYy7bSba4D4FSz8DJeKY/S/r+g==",
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/use-sync-external-store": "^0.0.6",
|
||||||
|
"use-sync-external-store": "^1.4.0"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"@types/react": "^18.2.25 || ^19",
|
||||||
|
"react": "^18.0 || ^19",
|
||||||
|
"redux": "^5.0.0"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"@types/react": {
|
||||||
|
"optional": true
|
||||||
|
},
|
||||||
|
"redux": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/recharts": {
|
||||||
|
"version": "3.8.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/recharts/-/recharts-3.8.0.tgz",
|
||||||
|
"integrity": "sha512-Z/m38DX3L73ExO4Tpc9/iZWHmHnlzWG4njQbxsF5aSjwqmHNDDIm0rdEBArkwsBvR8U6EirlEHiQNYWCVh9sGQ==",
|
||||||
|
"license": "MIT",
|
||||||
|
"workspaces": [
|
||||||
|
"www"
|
||||||
|
],
|
||||||
|
"dependencies": {
|
||||||
|
"@reduxjs/toolkit": "^1.9.0 || 2.x.x",
|
||||||
|
"clsx": "^2.1.1",
|
||||||
|
"decimal.js-light": "^2.5.1",
|
||||||
|
"es-toolkit": "^1.39.3",
|
||||||
|
"eventemitter3": "^5.0.1",
|
||||||
|
"immer": "^10.1.1",
|
||||||
|
"react-redux": "8.x.x || 9.x.x",
|
||||||
|
"reselect": "5.1.1",
|
||||||
|
"tiny-invariant": "^1.3.3",
|
||||||
|
"use-sync-external-store": "^1.2.2",
|
||||||
|
"victory-vendor": "^37.0.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18"
|
||||||
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0",
|
||||||
|
"react-dom": "^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0",
|
||||||
|
"react-is": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/redux": {
|
||||||
|
"version": "5.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/redux/-/redux-5.0.1.tgz",
|
||||||
|
"integrity": "sha512-M9/ELqF6fy8FwmkpnF0S3YKOqMyoWJ4+CS5Efg2ct3oY9daQvd/Pc71FpGZsVsbl3Cpb+IIcjBDUnnyBdQbq4w==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/redux-thunk": {
|
||||||
|
"version": "3.1.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/redux-thunk/-/redux-thunk-3.1.0.tgz",
|
||||||
|
"integrity": "sha512-NW2r5T6ksUKXCabzhL9z+h206HQw/NJkcLm1GPImRQ8IzfXwRGqjVhKJGauHirT0DAuyy6hjdnMZaRoAcy0Klw==",
|
||||||
|
"license": "MIT",
|
||||||
|
"peerDependencies": {
|
||||||
|
"redux": "^5.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/reselect": {
|
||||||
|
"version": "5.1.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/reselect/-/reselect-5.1.1.tgz",
|
||||||
|
"integrity": "sha512-K/BG6eIky/SBpzfHZv/dd+9JBFiS4SWV7FIujVyJRux6e45+73RaUHXLmIR1f7WOMaQ0U1km6qwklRQxpJJY0w==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/sanitize-html": {
|
"node_modules/sanitize-html": {
|
||||||
"version": "2.17.1",
|
"version": "2.17.1",
|
||||||
"resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-2.17.1.tgz",
|
"resolved": "https://registry.npmjs.org/sanitize-html/-/sanitize-html-2.17.1.tgz",
|
||||||
@@ -2026,6 +2396,12 @@
|
|||||||
"url": "https://opencollective.com/webpack"
|
"url": "https://opencollective.com/webpack"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/tiny-invariant": {
|
||||||
|
"version": "1.3.3",
|
||||||
|
"resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz",
|
||||||
|
"integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/tslib": {
|
"node_modules/tslib": {
|
||||||
"version": "2.8.1",
|
"version": "2.8.1",
|
||||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
||||||
@@ -2083,6 +2459,37 @@
|
|||||||
"peerDependencies": {
|
"peerDependencies": {
|
||||||
"browserslist": ">= 4.21.0"
|
"browserslist": ">= 4.21.0"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"node_modules/use-sync-external-store": {
|
||||||
|
"version": "1.6.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.6.0.tgz",
|
||||||
|
"integrity": "sha512-Pp6GSwGP/NrPIrxVFAIkOQeyw8lFenOHijQWkUTrDvrF4ALqylP2C/KCkeS9dpUM3KvYRQhna5vt7IL95+ZQ9w==",
|
||||||
|
"license": "MIT",
|
||||||
|
"peerDependencies": {
|
||||||
|
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/victory-vendor": {
|
||||||
|
"version": "37.3.6",
|
||||||
|
"resolved": "https://registry.npmjs.org/victory-vendor/-/victory-vendor-37.3.6.tgz",
|
||||||
|
"integrity": "sha512-SbPDPdDBYp+5MJHhBCAyI7wKM3d5ivekigc2Dk2s7pgbZ9wIgIBYGVw4zGHBml/qTFbexrofXW6Gu4noGxrOwQ==",
|
||||||
|
"license": "MIT AND ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"@types/d3-array": "^3.0.3",
|
||||||
|
"@types/d3-ease": "^3.0.0",
|
||||||
|
"@types/d3-interpolate": "^3.0.1",
|
||||||
|
"@types/d3-scale": "^4.0.2",
|
||||||
|
"@types/d3-shape": "^3.1.0",
|
||||||
|
"@types/d3-time": "^3.0.0",
|
||||||
|
"@types/d3-timer": "^3.0.0",
|
||||||
|
"d3-array": "^3.1.6",
|
||||||
|
"d3-ease": "^3.0.1",
|
||||||
|
"d3-interpolate": "^3.0.1",
|
||||||
|
"d3-scale": "^4.0.2",
|
||||||
|
"d3-shape": "^3.1.0",
|
||||||
|
"d3-time": "^3.0.0",
|
||||||
|
"d3-timer": "^3.0.1"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "stripstream-backoffice",
|
"name": "stripstream-backoffice",
|
||||||
"version": "1.21.1",
|
"version": "1.27.1",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "next dev -p 7082",
|
"dev": "next dev -p 7082",
|
||||||
@@ -12,6 +12,7 @@
|
|||||||
"next-themes": "^0.4.6",
|
"next-themes": "^0.4.6",
|
||||||
"react": "19.0.0",
|
"react": "19.0.0",
|
||||||
"react-dom": "19.0.0",
|
"react-dom": "19.0.0",
|
||||||
|
"recharts": "^3.8.0",
|
||||||
"sanitize-html": "^2.17.1"
|
"sanitize-html": "^2.17.1"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ futures = "0.3"
|
|||||||
image.workspace = true
|
image.workspace = true
|
||||||
jpeg-decoder.workspace = true
|
jpeg-decoder.workspace = true
|
||||||
num_cpus.workspace = true
|
num_cpus.workspace = true
|
||||||
|
notifications = { path = "../../crates/notifications" }
|
||||||
parsers = { path = "../../crates/parsers" }
|
parsers = { path = "../../crates/parsers" }
|
||||||
reqwest.workspace = true
|
reqwest.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
|
|||||||
@@ -6,13 +6,15 @@ COPY Cargo.toml ./
|
|||||||
COPY apps/api/Cargo.toml apps/api/Cargo.toml
|
COPY apps/api/Cargo.toml apps/api/Cargo.toml
|
||||||
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
|
COPY apps/indexer/Cargo.toml apps/indexer/Cargo.toml
|
||||||
COPY crates/core/Cargo.toml crates/core/Cargo.toml
|
COPY crates/core/Cargo.toml crates/core/Cargo.toml
|
||||||
|
COPY crates/notifications/Cargo.toml crates/notifications/Cargo.toml
|
||||||
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
|
COPY crates/parsers/Cargo.toml crates/parsers/Cargo.toml
|
||||||
|
|
||||||
RUN mkdir -p apps/api/src apps/indexer/src crates/core/src crates/parsers/src && \
|
RUN mkdir -p apps/api/src apps/indexer/src crates/core/src crates/notifications/src crates/parsers/src && \
|
||||||
echo "fn main() {}" > apps/api/src/main.rs && \
|
echo "fn main() {}" > apps/api/src/main.rs && \
|
||||||
echo "fn main() {}" > apps/indexer/src/main.rs && \
|
echo "fn main() {}" > apps/indexer/src/main.rs && \
|
||||||
echo "" > apps/indexer/src/lib.rs && \
|
echo "" > apps/indexer/src/lib.rs && \
|
||||||
echo "" > crates/core/src/lib.rs && \
|
echo "" > crates/core/src/lib.rs && \
|
||||||
|
echo "" > crates/notifications/src/lib.rs && \
|
||||||
echo "" > crates/parsers/src/lib.rs
|
echo "" > crates/parsers/src/lib.rs
|
||||||
|
|
||||||
# Build dependencies only (cached as long as Cargo.toml files don't change)
|
# Build dependencies only (cached as long as Cargo.toml files don't change)
|
||||||
@@ -25,12 +27,13 @@ RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
|||||||
COPY apps/api/src apps/api/src
|
COPY apps/api/src apps/api/src
|
||||||
COPY apps/indexer/src apps/indexer/src
|
COPY apps/indexer/src apps/indexer/src
|
||||||
COPY crates/core/src crates/core/src
|
COPY crates/core/src crates/core/src
|
||||||
|
COPY crates/notifications/src crates/notifications/src
|
||||||
COPY crates/parsers/src crates/parsers/src
|
COPY crates/parsers/src crates/parsers/src
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||||
--mount=type=cache,target=/usr/local/cargo/git \
|
--mount=type=cache,target=/usr/local/cargo/git \
|
||||||
--mount=type=cache,target=/app/target \
|
--mount=type=cache,target=/app/target \
|
||||||
touch apps/indexer/src/main.rs crates/core/src/lib.rs crates/parsers/src/lib.rs && \
|
touch apps/indexer/src/main.rs crates/core/src/lib.rs crates/notifications/src/lib.rs crates/parsers/src/lib.rs && \
|
||||||
cargo build --release -p indexer && \
|
cargo build --release -p indexer && \
|
||||||
cp /app/target/release/indexer /usr/local/bin/indexer
|
cp /app/target/release/indexer /usr/local/bin/indexer
|
||||||
|
|
||||||
|
|||||||
@@ -328,6 +328,7 @@ pub async fn process_job(
|
|||||||
removed_files: 0,
|
removed_files: 0,
|
||||||
errors: 0,
|
errors: 0,
|
||||||
warnings: 0,
|
warnings: 0,
|
||||||
|
new_series: 0,
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut total_processed_count = 0i32;
|
let mut total_processed_count = 0i32;
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ use crate::{
|
|||||||
utils,
|
utils,
|
||||||
AppState,
|
AppState,
|
||||||
};
|
};
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
pub struct JobStats {
|
pub struct JobStats {
|
||||||
@@ -22,6 +23,7 @@ pub struct JobStats {
|
|||||||
pub removed_files: usize,
|
pub removed_files: usize,
|
||||||
pub errors: usize,
|
pub errors: usize,
|
||||||
pub warnings: usize,
|
pub warnings: usize,
|
||||||
|
pub new_series: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
const BATCH_SIZE: usize = 100;
|
const BATCH_SIZE: usize = 100;
|
||||||
@@ -106,6 +108,18 @@ pub async fn scan_library_discovery(
|
|||||||
HashMap::new()
|
HashMap::new()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Track existing series names for new_series counting
|
||||||
|
let existing_series: HashSet<String> = sqlx::query_scalar(
|
||||||
|
"SELECT DISTINCT COALESCE(NULLIF(series, ''), 'unclassified') FROM books WHERE library_id = $1",
|
||||||
|
)
|
||||||
|
.bind(library_id)
|
||||||
|
.fetch_all(&state.pool)
|
||||||
|
.await
|
||||||
|
.unwrap_or_default()
|
||||||
|
.into_iter()
|
||||||
|
.collect();
|
||||||
|
let mut seen_new_series: HashSet<String> = HashSet::new();
|
||||||
|
|
||||||
let mut seen: HashMap<String, bool> = HashMap::new();
|
let mut seen: HashMap<String, bool> = HashMap::new();
|
||||||
let mut library_processed_count = 0i32;
|
let mut library_processed_count = 0i32;
|
||||||
let mut last_progress_update = std::time::Instant::now();
|
let mut last_progress_update = std::time::Instant::now();
|
||||||
@@ -382,6 +396,12 @@ pub async fn scan_library_discovery(
|
|||||||
let book_id = Uuid::new_v4();
|
let book_id = Uuid::new_v4();
|
||||||
let file_id = Uuid::new_v4();
|
let file_id = Uuid::new_v4();
|
||||||
|
|
||||||
|
// Track new series
|
||||||
|
let series_key = parsed.series.as_deref().unwrap_or("unclassified").to_string();
|
||||||
|
if !existing_series.contains(&series_key) && seen_new_series.insert(series_key) {
|
||||||
|
stats.new_series += 1;
|
||||||
|
}
|
||||||
|
|
||||||
books_to_insert.push(BookInsert {
|
books_to_insert.push(BookInsert {
|
||||||
book_id,
|
book_id,
|
||||||
library_id,
|
library_id,
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
use sqlx::Row;
|
||||||
use tracing::{error, info, trace};
|
use tracing::{error, info, trace};
|
||||||
|
use uuid::Uuid;
|
||||||
use crate::{job, scheduler, watcher, AppState};
|
use crate::{job, scheduler, watcher, AppState};
|
||||||
|
|
||||||
pub async fn run_worker(state: AppState, interval_seconds: u64) {
|
pub async fn run_worker(state: AppState, interval_seconds: u64) {
|
||||||
@@ -34,21 +36,183 @@ pub async fn run_worker(state: AppState, interval_seconds: u64) {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
struct JobInfo {
|
||||||
|
job_type: String,
|
||||||
|
library_name: Option<String>,
|
||||||
|
book_title: Option<String>,
|
||||||
|
thumbnail_path: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn load_job_info(
|
||||||
|
pool: &sqlx::PgPool,
|
||||||
|
job_id: Uuid,
|
||||||
|
library_id: Option<Uuid>,
|
||||||
|
) -> JobInfo {
|
||||||
|
let row = sqlx::query("SELECT type, book_id FROM index_jobs WHERE id = $1")
|
||||||
|
.bind(job_id)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
|
||||||
|
let (job_type, book_id): (String, Option<Uuid>) = match row {
|
||||||
|
Some(r) => (r.get("type"), r.get("book_id")),
|
||||||
|
None => ("unknown".to_string(), None),
|
||||||
|
};
|
||||||
|
|
||||||
|
let library_name: Option<String> = if let Some(lib_id) = library_id {
|
||||||
|
sqlx::query_scalar("SELECT name FROM libraries WHERE id = $1")
|
||||||
|
.bind(lib_id)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten()
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let (book_title, thumbnail_path): (Option<String>, Option<String>) = if let Some(bid) = book_id {
|
||||||
|
let row = sqlx::query("SELECT title, thumbnail_path FROM books WHERE id = $1")
|
||||||
|
.bind(bid)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
match row {
|
||||||
|
Some(r) => (r.get("title"), r.get("thumbnail_path")),
|
||||||
|
None => (None, None),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
(None, None)
|
||||||
|
};
|
||||||
|
|
||||||
|
JobInfo { job_type, library_name, book_title, thumbnail_path }
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn load_scan_stats(pool: &sqlx::PgPool, job_id: Uuid) -> notifications::ScanStats {
|
||||||
|
let row = sqlx::query("SELECT stats_json FROM index_jobs WHERE id = $1")
|
||||||
|
.bind(job_id)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.ok()
|
||||||
|
.flatten();
|
||||||
|
|
||||||
|
if let Some(row) = row {
|
||||||
|
if let Ok(val) = row.try_get::<serde_json::Value, _>("stats_json") {
|
||||||
|
return notifications::ScanStats {
|
||||||
|
scanned_files: val.get("scanned_files").and_then(|v| v.as_u64()).unwrap_or(0) as usize,
|
||||||
|
indexed_files: val.get("indexed_files").and_then(|v| v.as_u64()).unwrap_or(0) as usize,
|
||||||
|
removed_files: val.get("removed_files").and_then(|v| v.as_u64()).unwrap_or(0) as usize,
|
||||||
|
new_series: val.get("new_series").and_then(|v| v.as_u64()).unwrap_or(0) as usize,
|
||||||
|
errors: val.get("errors").and_then(|v| v.as_u64()).unwrap_or(0) as usize,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
notifications::ScanStats {
|
||||||
|
scanned_files: 0,
|
||||||
|
indexed_files: 0,
|
||||||
|
removed_files: 0,
|
||||||
|
new_series: 0,
|
||||||
|
errors: 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_completed_event(
|
||||||
|
job_type: &str,
|
||||||
|
library_name: Option<String>,
|
||||||
|
book_title: Option<String>,
|
||||||
|
thumbnail_path: Option<String>,
|
||||||
|
stats: notifications::ScanStats,
|
||||||
|
duration_seconds: u64,
|
||||||
|
) -> notifications::NotificationEvent {
|
||||||
|
match notifications::job_type_category(job_type) {
|
||||||
|
"thumbnail" => notifications::NotificationEvent::ThumbnailCompleted {
|
||||||
|
job_type: job_type.to_string(),
|
||||||
|
library_name,
|
||||||
|
duration_seconds,
|
||||||
|
},
|
||||||
|
"conversion" => notifications::NotificationEvent::ConversionCompleted {
|
||||||
|
library_name,
|
||||||
|
book_title,
|
||||||
|
thumbnail_path,
|
||||||
|
},
|
||||||
|
_ => notifications::NotificationEvent::ScanCompleted {
|
||||||
|
job_type: job_type.to_string(),
|
||||||
|
library_name,
|
||||||
|
stats,
|
||||||
|
duration_seconds,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build_failed_event(
|
||||||
|
job_type: &str,
|
||||||
|
library_name: Option<String>,
|
||||||
|
book_title: Option<String>,
|
||||||
|
thumbnail_path: Option<String>,
|
||||||
|
error: String,
|
||||||
|
) -> notifications::NotificationEvent {
|
||||||
|
match notifications::job_type_category(job_type) {
|
||||||
|
"thumbnail" => notifications::NotificationEvent::ThumbnailFailed {
|
||||||
|
job_type: job_type.to_string(),
|
||||||
|
library_name,
|
||||||
|
error,
|
||||||
|
},
|
||||||
|
"conversion" => notifications::NotificationEvent::ConversionFailed {
|
||||||
|
library_name,
|
||||||
|
book_title,
|
||||||
|
thumbnail_path,
|
||||||
|
error,
|
||||||
|
},
|
||||||
|
_ => notifications::NotificationEvent::ScanFailed {
|
||||||
|
job_type: job_type.to_string(),
|
||||||
|
library_name,
|
||||||
|
error,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match job::claim_next_job(&state.pool).await {
|
match job::claim_next_job(&state.pool).await {
|
||||||
Ok(Some((job_id, library_id))) => {
|
Ok(Some((job_id, library_id))) => {
|
||||||
info!("[INDEXER] Starting job {} library={:?}", job_id, library_id);
|
info!("[INDEXER] Starting job {} library={:?}", job_id, library_id);
|
||||||
|
let started_at = std::time::Instant::now();
|
||||||
|
let info = load_job_info(&state.pool, job_id, library_id).await;
|
||||||
|
|
||||||
if let Err(err) = job::process_job(&state, job_id, library_id).await {
|
if let Err(err) = job::process_job(&state, job_id, library_id).await {
|
||||||
let err_str = err.to_string();
|
let err_str = err.to_string();
|
||||||
if err_str.contains("cancelled") || err_str.contains("Cancelled") {
|
if err_str.contains("cancelled") || err_str.contains("Cancelled") {
|
||||||
info!("[INDEXER] Job {} was cancelled by user", job_id);
|
info!("[INDEXER] Job {} was cancelled by user", job_id);
|
||||||
// Status is already 'cancelled' in DB, don't change it
|
notifications::notify(
|
||||||
|
state.pool.clone(),
|
||||||
|
notifications::NotificationEvent::ScanCancelled {
|
||||||
|
job_type: info.job_type.clone(),
|
||||||
|
library_name: info.library_name.clone(),
|
||||||
|
},
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
error!("[INDEXER] Job {} failed: {}", job_id, err);
|
error!("[INDEXER] Job {} failed: {}", job_id, err);
|
||||||
let _ = job::fail_job(&state.pool, job_id, &err_str).await;
|
let _ = job::fail_job(&state.pool, job_id, &err_str).await;
|
||||||
|
notifications::notify(
|
||||||
|
state.pool.clone(),
|
||||||
|
build_failed_event(&info.job_type, info.library_name.clone(), info.book_title.clone(), info.thumbnail_path.clone(), err_str),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
info!("[INDEXER] Job {} completed", job_id);
|
info!("[INDEXER] Job {} completed", job_id);
|
||||||
|
let stats = load_scan_stats(&state.pool, job_id).await;
|
||||||
|
notifications::notify(
|
||||||
|
state.pool.clone(),
|
||||||
|
build_completed_event(
|
||||||
|
&info.job_type,
|
||||||
|
info.library_name.clone(),
|
||||||
|
info.book_title.clone(),
|
||||||
|
info.thumbnail_path.clone(),
|
||||||
|
stats,
|
||||||
|
started_at.elapsed().as_secs(),
|
||||||
|
),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
|
|||||||
13
crates/notifications/Cargo.toml
Normal file
13
crates/notifications/Cargo.toml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
[package]
|
||||||
|
name = "notifications"
|
||||||
|
version.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
anyhow.workspace = true
|
||||||
|
reqwest.workspace = true
|
||||||
|
serde.workspace = true
|
||||||
|
serde_json.workspace = true
|
||||||
|
sqlx.workspace = true
|
||||||
|
tokio.workspace = true
|
||||||
|
tracing.workspace = true
|
||||||
513
crates/notifications/src/lib.rs
Normal file
513
crates/notifications/src/lib.rs
Normal file
@@ -0,0 +1,513 @@
|
|||||||
|
use anyhow::Result;
|
||||||
|
use serde::Deserialize;
|
||||||
|
use sqlx::PgPool;
|
||||||
|
use tracing::{info, warn};
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Config
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct TelegramConfig {
|
||||||
|
pub bot_token: String,
|
||||||
|
pub chat_id: String,
|
||||||
|
#[serde(default)]
|
||||||
|
pub enabled: bool,
|
||||||
|
#[serde(default = "default_events")]
|
||||||
|
pub events: EventToggles,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Deserialize)]
|
||||||
|
pub struct EventToggles {
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub scan_completed: bool,
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub scan_failed: bool,
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub scan_cancelled: bool,
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub thumbnail_completed: bool,
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub thumbnail_failed: bool,
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub conversion_completed: bool,
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub conversion_failed: bool,
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub metadata_approved: bool,
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub metadata_batch_completed: bool,
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub metadata_batch_failed: bool,
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub metadata_refresh_completed: bool,
|
||||||
|
#[serde(default = "default_true")]
|
||||||
|
pub metadata_refresh_failed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_true() -> bool {
|
||||||
|
true
|
||||||
|
}
|
||||||
|
|
||||||
|
fn default_events() -> EventToggles {
|
||||||
|
EventToggles {
|
||||||
|
scan_completed: true,
|
||||||
|
scan_failed: true,
|
||||||
|
scan_cancelled: true,
|
||||||
|
thumbnail_completed: true,
|
||||||
|
thumbnail_failed: true,
|
||||||
|
conversion_completed: true,
|
||||||
|
conversion_failed: true,
|
||||||
|
metadata_approved: true,
|
||||||
|
metadata_batch_completed: true,
|
||||||
|
metadata_batch_failed: true,
|
||||||
|
metadata_refresh_completed: true,
|
||||||
|
metadata_refresh_failed: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load the Telegram config from `app_settings` (key = "telegram").
|
||||||
|
/// Returns `None` when the row is missing, disabled, or has empty credentials.
|
||||||
|
pub async fn load_telegram_config(pool: &PgPool) -> Option<TelegramConfig> {
|
||||||
|
let row = sqlx::query_scalar::<_, serde_json::Value>(
|
||||||
|
"SELECT value FROM app_settings WHERE key = 'telegram'",
|
||||||
|
)
|
||||||
|
.fetch_optional(pool)
|
||||||
|
.await
|
||||||
|
.ok()??;
|
||||||
|
|
||||||
|
let config: TelegramConfig = serde_json::from_value(row).ok()?;
|
||||||
|
|
||||||
|
if !config.enabled || config.bot_token.is_empty() || config.chat_id.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(config)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Telegram HTTP
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
fn build_client() -> Result<reqwest::Client> {
|
||||||
|
Ok(reqwest::Client::builder()
|
||||||
|
.timeout(std::time::Duration::from_secs(10))
|
||||||
|
.build()?)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn send_telegram(config: &TelegramConfig, text: &str) -> Result<()> {
|
||||||
|
let url = format!(
|
||||||
|
"https://api.telegram.org/bot{}/sendMessage",
|
||||||
|
config.bot_token
|
||||||
|
);
|
||||||
|
|
||||||
|
let body = serde_json::json!({
|
||||||
|
"chat_id": config.chat_id,
|
||||||
|
"text": text,
|
||||||
|
"parse_mode": "HTML",
|
||||||
|
});
|
||||||
|
|
||||||
|
let resp = build_client()?.post(&url).json(&body).send().await?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
anyhow::bail!("Telegram API returned {status}: {text}");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn send_telegram_photo(config: &TelegramConfig, caption: &str, photo_path: &str) -> Result<()> {
|
||||||
|
let url = format!(
|
||||||
|
"https://api.telegram.org/bot{}/sendPhoto",
|
||||||
|
config.bot_token
|
||||||
|
);
|
||||||
|
|
||||||
|
let photo_bytes = tokio::fs::read(photo_path).await?;
|
||||||
|
let filename = std::path::Path::new(photo_path)
|
||||||
|
.file_name()
|
||||||
|
.unwrap_or_default()
|
||||||
|
.to_string_lossy()
|
||||||
|
.to_string();
|
||||||
|
let mime = if filename.ends_with(".webp") {
|
||||||
|
"image/webp"
|
||||||
|
} else if filename.ends_with(".png") {
|
||||||
|
"image/png"
|
||||||
|
} else {
|
||||||
|
"image/jpeg"
|
||||||
|
};
|
||||||
|
|
||||||
|
let part = reqwest::multipart::Part::bytes(photo_bytes)
|
||||||
|
.file_name(filename)
|
||||||
|
.mime_str(mime)?;
|
||||||
|
|
||||||
|
let form = reqwest::multipart::Form::new()
|
||||||
|
.text("chat_id", config.chat_id.clone())
|
||||||
|
.text("caption", caption.to_string())
|
||||||
|
.text("parse_mode", "HTML")
|
||||||
|
.part("photo", part);
|
||||||
|
|
||||||
|
let resp = build_client()?.post(&url).multipart(form).send().await?;
|
||||||
|
|
||||||
|
if !resp.status().is_success() {
|
||||||
|
let status = resp.status();
|
||||||
|
let text = resp.text().await.unwrap_or_default();
|
||||||
|
anyhow::bail!("Telegram API returned {status}: {text}");
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Send a test message. Returns the result directly (not fire-and-forget).
|
||||||
|
pub async fn send_test_message(config: &TelegramConfig) -> Result<()> {
|
||||||
|
send_telegram(config, "🔔 <b>Stripstream Librarian</b>\nTest notification — connection OK!").await
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Notification events
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
pub struct ScanStats {
|
||||||
|
pub scanned_files: usize,
|
||||||
|
pub indexed_files: usize,
|
||||||
|
pub removed_files: usize,
|
||||||
|
pub new_series: usize,
|
||||||
|
pub errors: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum NotificationEvent {
|
||||||
|
// Scan jobs (rebuild, full_rebuild, rescan, scan)
|
||||||
|
ScanCompleted {
|
||||||
|
job_type: String,
|
||||||
|
library_name: Option<String>,
|
||||||
|
stats: ScanStats,
|
||||||
|
duration_seconds: u64,
|
||||||
|
},
|
||||||
|
ScanFailed {
|
||||||
|
job_type: String,
|
||||||
|
library_name: Option<String>,
|
||||||
|
error: String,
|
||||||
|
},
|
||||||
|
ScanCancelled {
|
||||||
|
job_type: String,
|
||||||
|
library_name: Option<String>,
|
||||||
|
},
|
||||||
|
// Thumbnail jobs (thumbnail_rebuild, thumbnail_regenerate)
|
||||||
|
ThumbnailCompleted {
|
||||||
|
job_type: String,
|
||||||
|
library_name: Option<String>,
|
||||||
|
duration_seconds: u64,
|
||||||
|
},
|
||||||
|
ThumbnailFailed {
|
||||||
|
job_type: String,
|
||||||
|
library_name: Option<String>,
|
||||||
|
error: String,
|
||||||
|
},
|
||||||
|
// CBR→CBZ conversion
|
||||||
|
ConversionCompleted {
|
||||||
|
library_name: Option<String>,
|
||||||
|
book_title: Option<String>,
|
||||||
|
thumbnail_path: Option<String>,
|
||||||
|
},
|
||||||
|
ConversionFailed {
|
||||||
|
library_name: Option<String>,
|
||||||
|
book_title: Option<String>,
|
||||||
|
thumbnail_path: Option<String>,
|
||||||
|
error: String,
|
||||||
|
},
|
||||||
|
// Metadata manual approve
|
||||||
|
MetadataApproved {
|
||||||
|
series_name: String,
|
||||||
|
provider: String,
|
||||||
|
thumbnail_path: Option<String>,
|
||||||
|
},
|
||||||
|
// Metadata batch (auto-match)
|
||||||
|
MetadataBatchCompleted {
|
||||||
|
library_name: Option<String>,
|
||||||
|
total_series: i32,
|
||||||
|
processed: i32,
|
||||||
|
},
|
||||||
|
MetadataBatchFailed {
|
||||||
|
library_name: Option<String>,
|
||||||
|
error: String,
|
||||||
|
},
|
||||||
|
// Metadata refresh
|
||||||
|
MetadataRefreshCompleted {
|
||||||
|
library_name: Option<String>,
|
||||||
|
refreshed: i32,
|
||||||
|
unchanged: i32,
|
||||||
|
errors: i32,
|
||||||
|
},
|
||||||
|
MetadataRefreshFailed {
|
||||||
|
library_name: Option<String>,
|
||||||
|
error: String,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Classify an indexer job_type string into the right event constructor category.
|
||||||
|
/// Returns "scan", "thumbnail", or "conversion".
|
||||||
|
pub fn job_type_category(job_type: &str) -> &'static str {
|
||||||
|
match job_type {
|
||||||
|
"thumbnail_rebuild" | "thumbnail_regenerate" => "thumbnail",
|
||||||
|
"cbr_to_cbz" => "conversion",
|
||||||
|
_ => "scan",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_event(event: &NotificationEvent) -> String {
|
||||||
|
match event {
|
||||||
|
NotificationEvent::ScanCompleted {
|
||||||
|
job_type,
|
||||||
|
library_name,
|
||||||
|
stats,
|
||||||
|
duration_seconds,
|
||||||
|
} => {
|
||||||
|
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||||
|
let duration = format_duration(*duration_seconds);
|
||||||
|
format!(
|
||||||
|
"📚 <b>Scan completed</b>\n\
|
||||||
|
Library: {lib}\n\
|
||||||
|
Type: {job_type}\n\
|
||||||
|
New books: {}\n\
|
||||||
|
New series: {}\n\
|
||||||
|
Files scanned: {}\n\
|
||||||
|
Removed: {}\n\
|
||||||
|
Errors: {}\n\
|
||||||
|
Duration: {duration}",
|
||||||
|
stats.indexed_files,
|
||||||
|
stats.new_series,
|
||||||
|
stats.scanned_files,
|
||||||
|
stats.removed_files,
|
||||||
|
stats.errors,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
NotificationEvent::ScanFailed {
|
||||||
|
job_type,
|
||||||
|
library_name,
|
||||||
|
error,
|
||||||
|
} => {
|
||||||
|
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||||
|
let err = truncate(error, 200);
|
||||||
|
format!(
|
||||||
|
"❌ <b>Scan failed</b>\n\
|
||||||
|
Library: {lib}\n\
|
||||||
|
Type: {job_type}\n\
|
||||||
|
Error: {err}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
NotificationEvent::ScanCancelled {
|
||||||
|
job_type,
|
||||||
|
library_name,
|
||||||
|
} => {
|
||||||
|
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||||
|
format!(
|
||||||
|
"⏹ <b>Scan cancelled</b>\n\
|
||||||
|
Library: {lib}\n\
|
||||||
|
Type: {job_type}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
NotificationEvent::ThumbnailCompleted {
|
||||||
|
job_type,
|
||||||
|
library_name,
|
||||||
|
duration_seconds,
|
||||||
|
} => {
|
||||||
|
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||||
|
let duration = format_duration(*duration_seconds);
|
||||||
|
format!(
|
||||||
|
"🖼 <b>Thumbnails completed</b>\n\
|
||||||
|
Library: {lib}\n\
|
||||||
|
Type: {job_type}\n\
|
||||||
|
Duration: {duration}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
NotificationEvent::ThumbnailFailed {
|
||||||
|
job_type,
|
||||||
|
library_name,
|
||||||
|
error,
|
||||||
|
} => {
|
||||||
|
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||||
|
let err = truncate(error, 200);
|
||||||
|
format!(
|
||||||
|
"❌ <b>Thumbnails failed</b>\n\
|
||||||
|
Library: {lib}\n\
|
||||||
|
Type: {job_type}\n\
|
||||||
|
Error: {err}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
NotificationEvent::ConversionCompleted {
|
||||||
|
library_name,
|
||||||
|
book_title,
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
let lib = library_name.as_deref().unwrap_or("Unknown");
|
||||||
|
let title = book_title.as_deref().unwrap_or("Unknown");
|
||||||
|
format!(
|
||||||
|
"🔄 <b>CBR→CBZ conversion completed</b>\n\
|
||||||
|
Library: {lib}\n\
|
||||||
|
Book: {title}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
NotificationEvent::ConversionFailed {
|
||||||
|
library_name,
|
||||||
|
book_title,
|
||||||
|
error,
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
let lib = library_name.as_deref().unwrap_or("Unknown");
|
||||||
|
let title = book_title.as_deref().unwrap_or("Unknown");
|
||||||
|
let err = truncate(error, 200);
|
||||||
|
format!(
|
||||||
|
"❌ <b>CBR→CBZ conversion failed</b>\n\
|
||||||
|
Library: {lib}\n\
|
||||||
|
Book: {title}\n\
|
||||||
|
Error: {err}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
NotificationEvent::MetadataApproved {
|
||||||
|
series_name,
|
||||||
|
provider,
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
format!(
|
||||||
|
"🔗 <b>Metadata linked</b>\n\
|
||||||
|
Series: {series_name}\n\
|
||||||
|
Provider: {provider}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
NotificationEvent::MetadataBatchCompleted {
|
||||||
|
library_name,
|
||||||
|
total_series,
|
||||||
|
processed,
|
||||||
|
} => {
|
||||||
|
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||||
|
format!(
|
||||||
|
"🔍 <b>Metadata batch completed</b>\n\
|
||||||
|
Library: {lib}\n\
|
||||||
|
Series processed: {processed}/{total_series}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
NotificationEvent::MetadataBatchFailed {
|
||||||
|
library_name,
|
||||||
|
error,
|
||||||
|
} => {
|
||||||
|
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||||
|
let err = truncate(error, 200);
|
||||||
|
format!(
|
||||||
|
"❌ <b>Metadata batch failed</b>\n\
|
||||||
|
Library: {lib}\n\
|
||||||
|
Error: {err}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
NotificationEvent::MetadataRefreshCompleted {
|
||||||
|
library_name,
|
||||||
|
refreshed,
|
||||||
|
unchanged,
|
||||||
|
errors,
|
||||||
|
} => {
|
||||||
|
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||||
|
format!(
|
||||||
|
"🔄 <b>Metadata refresh completed</b>\n\
|
||||||
|
Library: {lib}\n\
|
||||||
|
Updated: {refreshed}\n\
|
||||||
|
Unchanged: {unchanged}\n\
|
||||||
|
Errors: {errors}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
NotificationEvent::MetadataRefreshFailed {
|
||||||
|
library_name,
|
||||||
|
error,
|
||||||
|
} => {
|
||||||
|
let lib = library_name.as_deref().unwrap_or("All libraries");
|
||||||
|
let err = truncate(error, 200);
|
||||||
|
format!(
|
||||||
|
"❌ <b>Metadata refresh failed</b>\n\
|
||||||
|
Library: {lib}\n\
|
||||||
|
Error: {err}"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn truncate(s: &str, max: usize) -> String {
|
||||||
|
if s.len() > max {
|
||||||
|
format!("{}…", &s[..max])
|
||||||
|
} else {
|
||||||
|
s.to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn format_duration(secs: u64) -> String {
|
||||||
|
if secs < 60 {
|
||||||
|
format!("{secs}s")
|
||||||
|
} else {
|
||||||
|
let m = secs / 60;
|
||||||
|
let s = secs % 60;
|
||||||
|
format!("{m}m{s}s")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
// Public entry point — fire & forget
|
||||||
|
// ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/// Returns whether this event type is enabled in the config.
|
||||||
|
fn is_event_enabled(config: &TelegramConfig, event: &NotificationEvent) -> bool {
|
||||||
|
match event {
|
||||||
|
NotificationEvent::ScanCompleted { .. } => config.events.scan_completed,
|
||||||
|
NotificationEvent::ScanFailed { .. } => config.events.scan_failed,
|
||||||
|
NotificationEvent::ScanCancelled { .. } => config.events.scan_cancelled,
|
||||||
|
NotificationEvent::ThumbnailCompleted { .. } => config.events.thumbnail_completed,
|
||||||
|
NotificationEvent::ThumbnailFailed { .. } => config.events.thumbnail_failed,
|
||||||
|
NotificationEvent::ConversionCompleted { .. } => config.events.conversion_completed,
|
||||||
|
NotificationEvent::ConversionFailed { .. } => config.events.conversion_failed,
|
||||||
|
NotificationEvent::MetadataApproved { .. } => config.events.metadata_approved,
|
||||||
|
NotificationEvent::MetadataBatchCompleted { .. } => config.events.metadata_batch_completed,
|
||||||
|
NotificationEvent::MetadataBatchFailed { .. } => config.events.metadata_batch_failed,
|
||||||
|
NotificationEvent::MetadataRefreshCompleted { .. } => config.events.metadata_refresh_completed,
|
||||||
|
NotificationEvent::MetadataRefreshFailed { .. } => config.events.metadata_refresh_failed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Extract thumbnail path from event if present and file exists on disk.
|
||||||
|
fn event_thumbnail(event: &NotificationEvent) -> Option<&str> {
|
||||||
|
let path = match event {
|
||||||
|
NotificationEvent::ConversionCompleted { thumbnail_path, .. } => thumbnail_path.as_deref(),
|
||||||
|
NotificationEvent::ConversionFailed { thumbnail_path, .. } => thumbnail_path.as_deref(),
|
||||||
|
NotificationEvent::MetadataApproved { thumbnail_path, .. } => thumbnail_path.as_deref(),
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
path.filter(|p| std::path::Path::new(p).exists())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Load config + format + send in a spawned task. Errors are only logged.
|
||||||
|
pub fn notify(pool: PgPool, event: NotificationEvent) {
|
||||||
|
tokio::spawn(async move {
|
||||||
|
let config = match load_telegram_config(&pool).await {
|
||||||
|
Some(c) => c,
|
||||||
|
None => return, // disabled or not configured
|
||||||
|
};
|
||||||
|
|
||||||
|
if !is_event_enabled(&config, &event) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let text = format_event(&event);
|
||||||
|
let sent = if let Some(photo) = event_thumbnail(&event) {
|
||||||
|
match send_telegram_photo(&config, &text, photo).await {
|
||||||
|
Ok(()) => Ok(()),
|
||||||
|
Err(e) => {
|
||||||
|
warn!("[TELEGRAM] Photo send failed, falling back to text: {e}");
|
||||||
|
send_telegram(&config, &text).await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
send_telegram(&config, &text).await
|
||||||
|
};
|
||||||
|
|
||||||
|
match sent {
|
||||||
|
Ok(()) => info!("[TELEGRAM] Notification sent"),
|
||||||
|
Err(e) => warn!("[TELEGRAM] Failed to send notification: {e}"),
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
341
docs/FEATURES.md
Normal file
341
docs/FEATURES.md
Normal file
@@ -0,0 +1,341 @@
|
|||||||
|
# Stripstream Librarian — Features & Business Rules
|
||||||
|
|
||||||
|
## Libraries
|
||||||
|
|
||||||
|
### Multi-Library Management
|
||||||
|
- Create and manage multiple independent libraries, each with its own root path
|
||||||
|
- Enable/disable libraries individually
|
||||||
|
- Delete a library cascades to all its books, jobs, and metadata
|
||||||
|
|
||||||
|
### Scanning & Indexing
|
||||||
|
- **Incremental scan**: uses directory mtime tracking to skip unchanged directories
|
||||||
|
- **Full rebuild**: force re-walk all directories, ignoring cached mtimes
|
||||||
|
- **Rescan**: deep rescan to discover newly supported formats
|
||||||
|
- **Two-phase pipeline**:
|
||||||
|
- Phase 1 (Discovery): fast filename-based metadata extraction (no archive I/O)
|
||||||
|
- Phase 2 (Analysis): extract page counts, first page image from archives
|
||||||
|
|
||||||
|
### Real-Time Monitoring
|
||||||
|
- **Automatic periodic scanning**: configurable interval (default 5 seconds)
|
||||||
|
- **Filesystem watcher**: real-time detection of file changes for instant indexing
|
||||||
|
- Each can be toggled per library (`monitor_enabled`, `watcher_enabled`)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Books
|
||||||
|
|
||||||
|
### Format Support
|
||||||
|
- **CBZ** (ZIP-based comic archives)
|
||||||
|
- **CBR** (RAR-based comic archives)
|
||||||
|
- **PDF**
|
||||||
|
- **EPUB**
|
||||||
|
- Automatic format detection from file extension and magic bytes
|
||||||
|
|
||||||
|
### Metadata Extraction
|
||||||
|
- **Title**: derived from filename or external metadata
|
||||||
|
- **Series**: derived from directory structure (first directory level under library root)
|
||||||
|
- **Volume**: extracted from filename with pattern detection:
|
||||||
|
- `T##` (Tome) — most common for French comics
|
||||||
|
- `Vol.##`, `Vol ##`, `Volume ##`
|
||||||
|
- `###` (standalone number)
|
||||||
|
- `-## ` (dash-separated)
|
||||||
|
- **Author(s)**: single scalar and array support
|
||||||
|
- **Page count**: extracted from archive analysis
|
||||||
|
- **Language**, **kind** (ebook, comic, bd)
|
||||||
|
|
||||||
|
### Thumbnails
|
||||||
|
- Generated from the first page of each archive
|
||||||
|
- Output format configurable: WebP (default), JPEG, PNG
|
||||||
|
- Configurable dimensions (default 300×400)
|
||||||
|
- Lazy generation: created on first access if missing
|
||||||
|
- Bulk operations: rebuild missing or regenerate all
|
||||||
|
|
||||||
|
### CBR to CBZ Conversion
|
||||||
|
- Convert RAR archives to ZIP format
|
||||||
|
- Tracked as background job with progress
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Series
|
||||||
|
|
||||||
|
### Automatic Aggregation
|
||||||
|
- Series derived from directory structure during scanning
|
||||||
|
- Books without series grouped as "unclassified"
|
||||||
|
|
||||||
|
### Series Metadata
|
||||||
|
- Description, publisher, start year, status (`ongoing`, `ended`, `completed`, `on_hold`, `hiatus`)
|
||||||
|
- Total volume count (from external providers)
|
||||||
|
- Authors (aggregated from books or metadata)
|
||||||
|
|
||||||
|
### Filtering & Discovery
|
||||||
|
- Filter by: series name (partial match), reading status, series status, metadata provider linkage
|
||||||
|
- Sort by: name, reading status, book count
|
||||||
|
- **Missing books detection**: identifies gaps in volume numbering within a series
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Reading Progress
|
||||||
|
|
||||||
|
### Per-Book Tracking
|
||||||
|
- Three states: `unread` (default), `reading`, `read`
|
||||||
|
- Current page tracking when status is `reading`
|
||||||
|
- `last_read_at` timestamp auto-updated
|
||||||
|
|
||||||
|
### Series-Level Status
|
||||||
|
- Calculated from book statuses:
|
||||||
|
- All read → series `read`
|
||||||
|
- None read → series `unread`
|
||||||
|
- Mixed → series `reading`
|
||||||
|
|
||||||
|
### Bulk Operations
|
||||||
|
- Mark entire series as read (updates all books)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Search & Discovery
|
||||||
|
|
||||||
|
### Full-Text Search
|
||||||
|
- PostgreSQL-based (`ILIKE` + `pg_trgm`)
|
||||||
|
- Searches across: book titles, series names, authors (scalar and array fields), series metadata authors
|
||||||
|
- Case-insensitive partial matching
|
||||||
|
- Library-scoped filtering
|
||||||
|
|
||||||
|
### Results
|
||||||
|
- Book hits: title, authors, series, volume, language, kind
|
||||||
|
- Series hits: name, book count, read count, first book (for linking)
|
||||||
|
- Processing time included in response
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Authors
|
||||||
|
|
||||||
|
- Unique author aggregation from books and series metadata
|
||||||
|
- Per-author book and series count
|
||||||
|
- Searchable by name (partial match)
|
||||||
|
- Sortable by name or book count
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## External Metadata
|
||||||
|
|
||||||
|
### Supported Providers
|
||||||
|
| Provider | Focus |
|
||||||
|
|----------|-------|
|
||||||
|
| Google Books | General books (default fallback) |
|
||||||
|
| ComicVine | Comics |
|
||||||
|
| BedéThèque | Franco-Belgian comics |
|
||||||
|
| AniList | Manga/anime |
|
||||||
|
| Open Library | General books |
|
||||||
|
|
||||||
|
### Provider Configuration
|
||||||
|
- Global default provider with library-level override
|
||||||
|
- Fallback provider if primary is unavailable
|
||||||
|
|
||||||
|
### Matching Workflow
|
||||||
|
1. **Search**: query a provider, get candidates with confidence scores
|
||||||
|
2. **Match**: link a series to an external result (status `pending`)
|
||||||
|
3. **Approve**: validate and sync metadata to series and books
|
||||||
|
4. **Reject**: discard a match
|
||||||
|
|
||||||
|
### Batch Processing
|
||||||
|
- Auto-match all series in a library via `metadata_batch` job
|
||||||
|
- Configurable confidence threshold
|
||||||
|
- Result statuses: `auto_matched`, `no_results`, `too_many_results`, `low_confidence`, `already_linked`
|
||||||
|
|
||||||
|
### Metadata Refresh
|
||||||
|
- Update approved links with latest data from providers
|
||||||
|
- Change tracking reports per series/book
|
||||||
|
- Non-destructive: only updates when provider has new data
|
||||||
|
|
||||||
|
### Field Locking
|
||||||
|
- Individual book fields can be locked to prevent external sync from overwriting manual edits
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## External Integrations
|
||||||
|
|
||||||
|
### Komga Sync
|
||||||
|
- Import reading progress from a Komga server
|
||||||
|
- Matches local series/books by name
|
||||||
|
- Detailed sync report: matched, already read, newly marked, unmatched
|
||||||
|
|
||||||
|
### Prowlarr (Indexer Search)
|
||||||
|
- Search Prowlarr for missing volumes in a series
|
||||||
|
- Volume pattern matching against release titles
|
||||||
|
- Results: title, size, seeders/leechers, download URL, matched missing volumes
|
||||||
|
|
||||||
|
### qBittorrent
|
||||||
|
- Add torrents directly from Prowlarr search results
|
||||||
|
- Connection test endpoint
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Notifications
|
||||||
|
|
||||||
|
### Telegram
|
||||||
|
- Real-time notifications via Telegram Bot API (`sendMessage` and `sendPhoto`)
|
||||||
|
- Configuration: bot token, chat ID, enable/disable toggle
|
||||||
|
- Test connection button in settings
|
||||||
|
|
||||||
|
### Granular Event Toggles
|
||||||
|
12 individually configurable notification events grouped by category:
|
||||||
|
|
||||||
|
| Category | Events |
|
||||||
|
|----------|--------|
|
||||||
|
| Scans | `scan_completed`, `scan_failed`, `scan_cancelled` |
|
||||||
|
| Thumbnails | `thumbnail_completed`, `thumbnail_failed`, `thumbnail_cancelled` |
|
||||||
|
| Conversion | `conversion_completed`, `conversion_failed`, `conversion_cancelled` |
|
||||||
|
| Metadata | `metadata_approved`, `metadata_batch_completed`, `metadata_refresh_completed` |
|
||||||
|
|
||||||
|
### Thumbnail Images in Notifications
|
||||||
|
- Book cover thumbnails attached to applicable notifications (conversion, metadata approval)
|
||||||
|
- Uses `sendPhoto` multipart upload with fallback to text-only `sendMessage`
|
||||||
|
|
||||||
|
### Implementation
|
||||||
|
- Shared `crates/notifications` crate used by both API and indexer
|
||||||
|
- Fire-and-forget: notification failures are logged but never block the main operation
|
||||||
|
- Messages formatted in HTML with event-specific icons
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Page Rendering & Caching
|
||||||
|
|
||||||
|
### Page Extraction
|
||||||
|
- Render any page from supported archive formats
|
||||||
|
- 1-indexed page numbers
|
||||||
|
|
||||||
|
### Image Processing
|
||||||
|
- Output formats: original, JPEG, PNG, WebP
|
||||||
|
- Quality parameter (1–100)
|
||||||
|
- Max width parameter (1–2160 px)
|
||||||
|
- Configurable resampling filter: lanczos3, nearest, triangle/bilinear
|
||||||
|
- Concurrent render limit (default 8) with semaphore
|
||||||
|
|
||||||
|
### Caching
|
||||||
|
- **LRU in-memory cache**: 512 entries
|
||||||
|
- **Disk cache**: SHA256-keyed, two-level directory structure
|
||||||
|
- Cache key = hash(path + page + format + quality + width)
|
||||||
|
- Configurable cache directory and max size
|
||||||
|
- Manual cache clear via settings
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Background Jobs
|
||||||
|
|
||||||
|
### Job Types
|
||||||
|
| Type | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| `rebuild` | Incremental scan |
|
||||||
|
| `full_rebuild` | Full filesystem rescan |
|
||||||
|
| `rescan` | Deep rescan for new formats |
|
||||||
|
| `thumbnail_rebuild` | Generate missing thumbnails |
|
||||||
|
| `thumbnail_regenerate` | Clear and regenerate all thumbnails |
|
||||||
|
| `cbr_to_cbz` | Convert RAR to ZIP |
|
||||||
|
| `metadata_batch` | Auto-match series to metadata |
|
||||||
|
| `metadata_refresh` | Update approved metadata links |
|
||||||
|
|
||||||
|
### Job Lifecycle
|
||||||
|
- Status flow: `pending` → `running` → `success` | `failed` | `cancelled`
|
||||||
|
- Intermediate statuses: `extracting_pages`, `generating_thumbnails`
|
||||||
|
- Real-time progress via **Server-Sent Events** (SSE)
|
||||||
|
- Per-file error tracking (non-fatal: job continues on errors)
|
||||||
|
- Cancellation support for pending/running jobs
|
||||||
|
|
||||||
|
### Progress Tracking
|
||||||
|
- Percentage (0–100), current file, processed/total counts
|
||||||
|
- Timing: started_at, finished_at, phase2_started_at
|
||||||
|
- Stats JSON blob with job-specific metrics
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Authentication & Security
|
||||||
|
|
||||||
|
### Token System
|
||||||
|
- **Bootstrap token**: admin token via `API_BOOTSTRAP_TOKEN` env var
|
||||||
|
- **API tokens**: create, list, revoke with scopes
|
||||||
|
- Token format: `stl_{prefix}_{secret}` with Argon2 hashing
|
||||||
|
- Expiration dates, last usage tracking, revocation
|
||||||
|
|
||||||
|
### Access Control
|
||||||
|
- **Two scopes**: `admin` (full access) and `read` (read-only)
|
||||||
|
- Route-level middleware enforcement
|
||||||
|
- Rate limiting: configurable sliding window (default 120 req/s)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Backoffice (Web UI)
|
||||||
|
|
||||||
|
### Dashboard
|
||||||
|
- Statistics cards: books, series, authors, libraries, pages, total size
|
||||||
|
- Interactive charts (recharts): donut, area, stacked bar, horizontal bar
|
||||||
|
- Reading status breakdown, format distribution, library distribution
|
||||||
|
- Currently reading section with progress bars
|
||||||
|
- Recently read section with cover thumbnails
|
||||||
|
- Reading activity over time (area chart)
|
||||||
|
- Books added over time (area chart)
|
||||||
|
- Per-library stacked reading progress
|
||||||
|
- Top series by book count
|
||||||
|
- Metadata coverage and provider breakdown
|
||||||
|
|
||||||
|
### Pages
|
||||||
|
- **Libraries**: list, create, delete, configure monitoring and metadata provider
|
||||||
|
- **Books**: global list with filtering/sorting, detail view with metadata and page rendering
|
||||||
|
- **Series**: global list, per-library view, detail with metadata management
|
||||||
|
- **Authors**: list with book/series counts, detail with author's books
|
||||||
|
- **Jobs**: history, live progress via SSE, error details
|
||||||
|
- **Tokens**: create, list, revoke API tokens
|
||||||
|
- **Settings**: image processing, cache, thumbnails, external services (Prowlarr, qBittorrent), notifications (Telegram)
|
||||||
|
|
||||||
|
### Interactive Features
|
||||||
|
- Real-time search with suggestions
|
||||||
|
- Metadata search and matching modals
|
||||||
|
- Prowlarr search modal for missing volumes
|
||||||
|
- Folder browser/picker for library paths
|
||||||
|
- Book/series editing forms
|
||||||
|
- Quick reading status toggles
|
||||||
|
- CBR to CBZ conversion trigger
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## API
|
||||||
|
|
||||||
|
### Documentation
|
||||||
|
- OpenAPI/Swagger UI available at `/swagger-ui`
|
||||||
|
- Health check (`/health`), readiness (`/ready`), Prometheus metrics (`/metrics`)
|
||||||
|
|
||||||
|
### Public Endpoints (no auth)
|
||||||
|
- `GET /health`, `GET /ready`, `GET /metrics`, `GET /swagger-ui`
|
||||||
|
|
||||||
|
### Read Endpoints (read scope)
|
||||||
|
- Libraries, books, series, authors listing and detail
|
||||||
|
- Book pages and thumbnails
|
||||||
|
- Reading progress get/update
|
||||||
|
- Full-text search, collection statistics
|
||||||
|
|
||||||
|
### Admin Endpoints (admin scope)
|
||||||
|
- Library CRUD and configuration
|
||||||
|
- Book metadata editing, CBR conversion
|
||||||
|
- Series metadata editing
|
||||||
|
- Indexing job management (trigger, cancel, stream)
|
||||||
|
- API token management
|
||||||
|
- Metadata operations (search, match, approve, reject, batch, refresh)
|
||||||
|
- External integrations (Prowlarr, qBittorrent, Komga)
|
||||||
|
- Application settings and cache management
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Database
|
||||||
|
|
||||||
|
### Key Design Decisions
|
||||||
|
- PostgreSQL with `pg_trgm` for full-text search (no external search engine)
|
||||||
|
- All deletions cascade from libraries
|
||||||
|
- Unique constraints: file paths, token prefixes, metadata links (library + series + provider)
|
||||||
|
- Directory mtime caching for incremental scan optimization
|
||||||
|
- Connection pool: 10 (API), 20 (indexer)
|
||||||
|
|
||||||
|
### Archive Resilience
|
||||||
|
- CBZ: fallback streaming reader if central directory corrupted
|
||||||
|
- CBR: RAR extraction via system `unar`, fallback to CBZ parsing
|
||||||
|
- PDF: `pdfinfo` for page count, `pdftoppm` for rendering
|
||||||
|
- EPUB: ZIP-based extraction
|
||||||
|
- FD exhaustion detection: aborts if too many consecutive IO errors
|
||||||
3
infra/migrations/0048_add_telegram_settings.sql
Normal file
3
infra/migrations/0048_add_telegram_settings.sql
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
INSERT INTO app_settings (key, value) VALUES
|
||||||
|
('telegram', '{"bot_token": "", "chat_id": "", "enabled": false, "events": {"job_completed": true, "job_failed": true, "job_cancelled": true, "metadata_approved": true}}')
|
||||||
|
ON CONFLICT DO NOTHING;
|
||||||
8
infra/migrations/0049_update_telegram_events.sql
Normal file
8
infra/migrations/0049_update_telegram_events.sql
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
-- Update telegram events from 4 generic toggles to 12 granular toggles
|
||||||
|
UPDATE app_settings
|
||||||
|
SET value = jsonb_set(
|
||||||
|
value,
|
||||||
|
'{events}',
|
||||||
|
'{"scan_completed": true, "scan_failed": true, "scan_cancelled": true, "thumbnail_completed": true, "thumbnail_failed": true, "conversion_completed": true, "conversion_failed": true, "metadata_approved": true, "metadata_batch_completed": true, "metadata_batch_failed": true, "metadata_refresh_completed": true, "metadata_refresh_failed": true}'::jsonb
|
||||||
|
)
|
||||||
|
WHERE key = 'telegram';
|
||||||
Reference in New Issue
Block a user