feat: gestion des téléchargements qBittorrent avec import automatique

- Nouvelle table `torrent_downloads` pour suivre les téléchargements gérés
- API : endpoint POST /torrent-downloads/notify (webhook optionnel) et GET /torrent-downloads
- Poller background toutes les 30s qui interroge qBittorrent pour détecter
  les torrents terminés — aucune config "run external program" nécessaire
- Import automatique : déplacement des fichiers vers la série cible,
  renommage selon le pattern existant (détection de la largeur des digits),
  support packs multi-volumes, scan job déclenché après import
- Page /downloads dans le backoffice : filtres, auto-refresh, carte par download
- Toggle auto-import intégré dans la card qBittorrent des settings
- Erreurs de détection download affichées dans le détail des jobs
- Volume /downloads monté dans docker-compose

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-26 14:43:10 +01:00
parent a2de2e1601
commit 4bb142d1dd
21 changed files with 1197 additions and 39 deletions

View File

@@ -70,6 +70,15 @@ LIBRARIES_HOST_PATH=./libraries
# Default: ../data/thumbnails (relative to infra/docker-compose.yml) # Default: ../data/thumbnails (relative to infra/docker-compose.yml)
THUMBNAILS_HOST_PATH=./data/thumbnails THUMBNAILS_HOST_PATH=./data/thumbnails
# Path to qBittorrent downloads directory on host machine (for Docker volume mount)
# The container will see this as /downloads
DOWNLOADS_HOST_PATH=./data/downloads
# Path to downloads directory inside the container (for dev remapping, like LIBRARIES_ROOT_PATH)
# In Docker: leave as default /downloads
# For local dev: set to your local downloads folder path
# DOWNLOADS_PATH=/downloads
# ============================================================================= # =============================================================================
# Port Configuration # Port Configuration
# ============================================================================= # =============================================================================

1
.gitignore vendored
View File

@@ -6,3 +6,4 @@ tmp/
node_modules/ node_modules/
.next/ .next/
data/thumbnails data/thumbnails
*.pem

View File

@@ -1,6 +1,10 @@
FROM rust:1-bookworm AS builder FROM rust:1-bookworm AS builder
WORKDIR /app WORKDIR /app
# Install corporate CA certificate (Cato Networks)
COPY CATO-CDBDX-SUBCA.chain.pem /usr/local/share/ca-certificates/cato.crt
RUN update-ca-certificates
# Copy workspace manifests and create dummy source files to cache dependency builds # Copy workspace manifests and create dummy source files to cache dependency builds
COPY Cargo.toml ./ COPY Cargo.toml ./
COPY apps/api/Cargo.toml apps/api/Cargo.toml COPY apps/api/Cargo.toml apps/api/Cargo.toml

View File

@@ -19,6 +19,7 @@ mod pages;
mod prowlarr; mod prowlarr;
mod qbittorrent; mod qbittorrent;
mod reading_progress; mod reading_progress;
mod torrent_import;
mod reading_status_match; mod reading_status_match;
mod reading_status_push; mod reading_status_push;
mod search; mod search;
@@ -121,6 +122,7 @@ async fn main() -> anyhow::Result<()> {
.route("/prowlarr/test", get(prowlarr::test_prowlarr)) .route("/prowlarr/test", get(prowlarr::test_prowlarr))
.route("/qbittorrent/add", axum::routing::post(qbittorrent::add_torrent)) .route("/qbittorrent/add", axum::routing::post(qbittorrent::add_torrent))
.route("/qbittorrent/test", get(qbittorrent::test_qbittorrent)) .route("/qbittorrent/test", get(qbittorrent::test_qbittorrent))
.route("/torrent-downloads", get(torrent_import::list_torrent_downloads))
.route("/telegram/test", get(telegram::test_telegram)) .route("/telegram/test", get(telegram::test_telegram))
.route("/komga/sync", axum::routing::post(komga::sync_komga_read_books)) .route("/komga/sync", axum::routing::post(komga::sync_komga_read_books))
.route("/komga/reports", get(komga::list_sync_reports)) .route("/komga/reports", get(komga::list_sync_reports))
@@ -190,12 +192,14 @@ async fn main() -> anyhow::Result<()> {
// Clone pool before state is moved into the router // Clone pool before state is moved into the router
let poller_pool = state.pool.clone(); let poller_pool = state.pool.clone();
let torrent_poller_pool = state.pool.clone();
let app = Router::new() let app = Router::new()
.route("/health", get(handlers::health)) .route("/health", get(handlers::health))
.route("/ready", get(handlers::ready)) .route("/ready", get(handlers::ready))
.route("/metrics", get(handlers::metrics)) .route("/metrics", get(handlers::metrics))
.route("/docs", get(handlers::docs_redirect)) .route("/docs", get(handlers::docs_redirect))
.route("/torrent-downloads/notify", axum::routing::post(torrent_import::notify_torrent_done))
.merge(SwaggerUi::new("/swagger-ui").url("/openapi.json", openapi::ApiDoc::openapi())) .merge(SwaggerUi::new("/swagger-ui").url("/openapi.json", openapi::ApiDoc::openapi()))
.merge(admin_routes) .merge(admin_routes)
.merge(read_routes) .merge(read_routes)
@@ -207,6 +211,11 @@ async fn main() -> anyhow::Result<()> {
job_poller::run_job_poller(poller_pool, 5).await; job_poller::run_job_poller(poller_pool, 5).await;
}); });
// Start background poller for qBittorrent torrent completions (every 30s)
tokio::spawn(async move {
torrent_import::run_torrent_poller(torrent_poller_pool, 30).await;
});
let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?; let listener = tokio::net::TcpListener::bind(&config.listen_addr).await?;
info!(addr = %config.listen_addr, "api listening"); info!(addr = %config.listen_addr, "api listening");
axum::serve(listener, app).await?; axum::serve(listener, app).await?;

View File

@@ -2,6 +2,7 @@ use axum::{extract::State, Json};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::Row; use sqlx::Row;
use utoipa::ToSchema; use utoipa::ToSchema;
use uuid::Uuid;
use crate::{error::ApiError, state::AppState}; use crate::{error::ApiError, state::AppState};
@@ -10,12 +11,21 @@ use crate::{error::ApiError, state::AppState};
#[derive(Deserialize, ToSchema)] #[derive(Deserialize, ToSchema)]
pub struct QBittorrentAddRequest { pub struct QBittorrentAddRequest {
pub url: String, pub url: String,
/// When provided together with `series_name` and `expected_volumes`, tracks the download
/// in `torrent_downloads` and triggers automatic import on completion.
#[schema(value_type = Option<String>)]
pub library_id: Option<Uuid>,
pub series_name: Option<String>,
pub expected_volumes: Option<Vec<i32>>,
} }
#[derive(Serialize, ToSchema)] #[derive(Serialize, ToSchema)]
pub struct QBittorrentAddResponse { pub struct QBittorrentAddResponse {
pub success: bool, pub success: bool,
pub message: String, pub message: String,
/// Set when `library_id` + `series_name` + `expected_volumes` were provided.
#[schema(value_type = Option<String>)]
pub torrent_download_id: Option<Uuid>,
} }
#[derive(Serialize, ToSchema)] #[derive(Serialize, ToSchema)]
@@ -34,7 +44,7 @@ struct QBittorrentConfig {
password: String, password: String,
} }
async fn load_qbittorrent_config( pub(crate) async fn load_qbittorrent_config(
pool: &sqlx::PgPool, pool: &sqlx::PgPool,
) -> Result<(String, String, String), ApiError> { ) -> Result<(String, String, String), ApiError> {
let row = sqlx::query("SELECT value FROM app_settings WHERE key = 'qbittorrent'") let row = sqlx::query("SELECT value FROM app_settings WHERE key = 'qbittorrent'")
@@ -58,7 +68,7 @@ async fn load_qbittorrent_config(
// ─── Login helper ─────────────────────────────────────────────────────────── // ─── Login helper ───────────────────────────────────────────────────────────
async fn qbittorrent_login( pub(crate) async fn qbittorrent_login(
client: &reqwest::Client, client: &reqwest::Client,
base_url: &str, base_url: &str,
username: &str, username: &str,
@@ -120,6 +130,10 @@ pub async fn add_torrent(
return Err(ApiError::bad_request("url is required")); return Err(ApiError::bad_request("url is required"));
} }
let is_managed = body.library_id.is_some()
&& body.series_name.is_some()
&& body.expected_volumes.is_some();
let (base_url, username, password) = load_qbittorrent_config(&state.pool).await?; let (base_url, username, password) = load_qbittorrent_config(&state.pool).await?;
let client = reqwest::Client::builder() let client = reqwest::Client::builder()
@@ -129,27 +143,74 @@ pub async fn add_torrent(
let sid = qbittorrent_login(&client, &base_url, &username, &password).await?; let sid = qbittorrent_login(&client, &base_url, &username, &password).await?;
let mut form_params: Vec<(&str, &str)> = vec![("urls", &body.url)];
let savepath = "/downloads";
if is_managed {
form_params.push(("savepath", savepath));
}
let resp = client let resp = client
.post(format!("{base_url}/api/v2/torrents/add")) .post(format!("{base_url}/api/v2/torrents/add"))
.header("Cookie", format!("SID={sid}")) .header("Cookie", format!("SID={sid}"))
.form(&[("urls", &body.url)]) .form(&form_params)
.send() .send()
.await .await
.map_err(|e| ApiError::internal(format!("qBittorrent add request failed: {e}")))?; .map_err(|e| ApiError::internal(format!("qBittorrent add request failed: {e}")))?;
if resp.status().is_success() { if !resp.status().is_success() {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
return Ok(Json(QBittorrentAddResponse {
success: false,
message: format!("qBittorrent returned {status}: {text}"),
torrent_download_id: None,
}));
}
// If managed download: record in torrent_downloads
let torrent_download_id = if is_managed {
let library_id = body.library_id.unwrap();
let series_name = body.series_name.as_deref().unwrap();
let expected_volumes = body.expected_volumes.as_deref().unwrap();
let qb_hash = extract_magnet_hash(&body.url);
let id = Uuid::new_v4();
sqlx::query(
"INSERT INTO torrent_downloads (id, library_id, series_name, expected_volumes, qb_hash) \
VALUES ($1, $2, $3, $4, $5)",
)
.bind(id)
.bind(library_id)
.bind(series_name)
.bind(expected_volumes)
.bind(qb_hash.as_deref())
.execute(&state.pool)
.await?;
Some(id)
} else {
None
};
Ok(Json(QBittorrentAddResponse { Ok(Json(QBittorrentAddResponse {
success: true, success: true,
message: "Torrent added to qBittorrent".to_string(), message: "Torrent added to qBittorrent".to_string(),
})) torrent_download_id,
} else {
let status = resp.status();
let text = resp.text().await.unwrap_or_default();
Ok(Json(QBittorrentAddResponse {
success: false,
message: format!("qBittorrent returned {status}: {text}"),
})) }))
} }
/// Extract the info-hash from a magnet link (lowercased, hex or base32).
/// magnet:?xt=urn:btih:HASH...
fn extract_magnet_hash(url: &str) -> Option<String> {
let lower = url.to_lowercase();
let marker = "urn:btih:";
let start = lower.find(marker)? + marker.len();
let hash_part = &lower[start..];
let end = hash_part
.find(|c: char| !c.is_alphanumeric())
.unwrap_or(hash_part.len());
let hash = &hash_part[..end];
if hash.is_empty() { None } else { Some(hash.to_string()) }
} }
/// Test connection to qBittorrent /// Test connection to qBittorrent

View File

@@ -0,0 +1,657 @@
use axum::{extract::State, Json};
use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
use sqlx::{PgPool, Row};
use std::time::Duration;
use tracing::{info, trace, warn};
use uuid::Uuid;
use crate::{error::ApiError, prowlarr::extract_volumes_from_title_pub, qbittorrent::{load_qbittorrent_config, qbittorrent_login}, state::AppState};
// ─── Types ──────────────────────────────────────────────────────────────────
/// Called by qBittorrent on torrent completion.
/// Configure in qBittorrent: Tools → Options → Downloads → "Run external program on torrent completion":
/// curl -s -X POST http://api:7080/torrent-downloads/notify \
/// -H "Content-Type: application/json" \
/// -d "{\"hash\":\"%I\",\"name\":\"%N\",\"save_path\":\"%F\"}"
#[derive(Deserialize)]
pub struct TorrentNotifyRequest {
pub hash: String,
#[allow(dead_code)]
pub name: String,
/// %F from qBittorrent: path to content (folder for multi-file, file for single-file)
pub save_path: String,
}
#[derive(Serialize)]
pub struct TorrentDownloadDto {
pub id: String,
pub library_id: String,
pub series_name: String,
pub expected_volumes: Vec<i32>,
pub qb_hash: Option<String>,
pub content_path: Option<String>,
pub status: String,
pub imported_files: Option<serde_json::Value>,
pub error_message: Option<String>,
pub created_at: String,
pub updated_at: String,
}
#[derive(Serialize, Deserialize)]
struct ImportedFile {
volume: i32,
source: String,
destination: String,
}
// ─── Handlers ────────────────────────────────────────────────────────────────
/// Webhook called by qBittorrent when a torrent completes (no auth required).
pub async fn notify_torrent_done(
State(state): State<AppState>,
Json(body): Json<TorrentNotifyRequest>,
) -> Result<Json<serde_json::Value>, ApiError> {
if body.hash.is_empty() {
return Err(ApiError::bad_request("hash is required"));
}
if !is_torrent_import_enabled(&state.pool).await {
info!("Torrent import disabled, ignoring notification for hash {}", body.hash);
return Ok(Json(serde_json::json!({ "ok": true })));
}
let row = sqlx::query(
"SELECT id FROM torrent_downloads WHERE qb_hash = $1 AND status = 'downloading' LIMIT 1",
)
.bind(&body.hash)
.fetch_optional(&state.pool)
.await?;
let Some(row) = row else {
info!("Torrent notification for unknown hash {}, ignoring", body.hash);
return Ok(Json(serde_json::json!({ "ok": true })));
};
let torrent_id: Uuid = row.get("id");
sqlx::query(
"UPDATE torrent_downloads SET status = 'completed', content_path = $1, updated_at = NOW() WHERE id = $2",
)
.bind(&body.save_path)
.bind(torrent_id)
.execute(&state.pool)
.await?;
info!("Torrent {} completed, content at {}", body.hash, body.save_path);
let pool = state.pool.clone();
tokio::spawn(async move {
if let Err(e) = process_torrent_import(pool, torrent_id).await {
warn!("Torrent import {} failed: {:#}", torrent_id, e);
}
});
Ok(Json(serde_json::json!({ "ok": true })))
}
/// List recent torrent downloads (admin).
pub async fn list_torrent_downloads(
State(state): State<AppState>,
) -> Result<Json<Vec<TorrentDownloadDto>>, ApiError> {
let rows = sqlx::query(
"SELECT id, library_id, series_name, expected_volumes, qb_hash, content_path, \
status, imported_files, error_message, created_at, updated_at \
FROM torrent_downloads ORDER BY created_at DESC LIMIT 100",
)
.fetch_all(&state.pool)
.await?;
let dtos = rows
.into_iter()
.map(|row| {
let id: Uuid = row.get("id");
let library_id: Uuid = row.get("library_id");
let expected_volumes: Vec<i32> = row.get("expected_volumes");
let created_at: DateTime<Utc> = row.get("created_at");
let updated_at: DateTime<Utc> = row.get("updated_at");
TorrentDownloadDto {
id: id.to_string(),
library_id: library_id.to_string(),
series_name: row.get("series_name"),
expected_volumes,
qb_hash: row.get("qb_hash"),
content_path: row.get("content_path"),
status: row.get("status"),
imported_files: row.get("imported_files"),
error_message: row.get("error_message"),
created_at: created_at.to_rfc3339(),
updated_at: updated_at.to_rfc3339(),
}
})
.collect();
Ok(Json(dtos))
}
// ─── Background poller ────────────────────────────────────────────────────────
#[derive(Deserialize)]
struct QbTorrentInfo {
hash: String,
state: String,
content_path: Option<String>,
save_path: Option<String>,
name: Option<String>,
}
/// Completed states in qBittorrent: torrent is fully downloaded and seeding.
const QB_COMPLETED_STATES: &[&str] = &[
"uploading", "stalledUP", "pausedUP", "queuedUP", "checkingUP", "forcedUP",
];
pub async fn run_torrent_poller(pool: PgPool, interval_seconds: u64) {
let wait = Duration::from_secs(interval_seconds.max(5));
loop {
if let Err(e) = poll_qbittorrent_downloads(&pool).await {
warn!("[TORRENT_POLLER] {:#}", e);
}
tokio::time::sleep(wait).await;
}
}
async fn poll_qbittorrent_downloads(pool: &PgPool) -> anyhow::Result<()> {
if !is_torrent_import_enabled(pool).await {
return Ok(());
}
let rows = sqlx::query(
"SELECT id, qb_hash FROM torrent_downloads WHERE status = 'downloading' AND qb_hash IS NOT NULL",
)
.fetch_all(pool)
.await?;
if rows.is_empty() {
trace!("[TORRENT_POLLER] No active downloads to poll");
return Ok(());
}
let (base_url, username, password) = load_qbittorrent_config(pool)
.await
.map_err(|e| anyhow::anyhow!("qBittorrent config: {}", e.message))?;
let client = reqwest::Client::builder()
.timeout(Duration::from_secs(10))
.build()?;
let sid = qbittorrent_login(&client, &base_url, &username, &password)
.await
.map_err(|e| anyhow::anyhow!("qBittorrent login: {}", e.message))?;
let hashes: Vec<String> = rows
.iter()
.map(|r| { let h: String = r.get("qb_hash"); h })
.collect();
let hashes_param = hashes.join("|");
let resp = client
.get(format!("{base_url}/api/v2/torrents/info"))
.query(&[("hashes", &hashes_param)])
.header("Cookie", format!("SID={sid}"))
.send()
.await?;
if !resp.status().is_success() {
return Err(anyhow::anyhow!("qBittorrent API returned {}", resp.status()));
}
let infos: Vec<QbTorrentInfo> = resp.json().await?;
for info in &infos {
if !QB_COMPLETED_STATES.contains(&info.state.as_str()) {
continue;
}
// content_path is available since qBittorrent 4.3.2; fall back to save_path + name
let content_path = info.content_path.as_deref()
.filter(|p| !p.is_empty())
.map(str::to_owned)
.or_else(|| {
let save = info.save_path.as_deref().unwrap_or("").trim_end_matches('/');
let name = info.name.as_deref().unwrap_or("");
if name.is_empty() { None } else { Some(format!("{save}/{name}")) }
});
let Some(content_path) = content_path else {
warn!("[TORRENT_POLLER] Torrent {} completed but content_path unknown", info.hash);
continue;
};
let row = rows.iter().find(|r| {
let h: String = r.get("qb_hash");
h == info.hash
});
let Some(row) = row else { continue; };
let torrent_id: Uuid = row.get("id");
let updated = sqlx::query(
"UPDATE torrent_downloads SET status = 'completed', content_path = $1, updated_at = NOW() \
WHERE id = $2 AND status = 'downloading'",
)
.bind(&content_path)
.bind(torrent_id)
.execute(pool)
.await?;
if updated.rows_affected() > 0 {
info!("[TORRENT_POLLER] Torrent {} completed, content at {}, starting import", info.hash, content_path);
let pool_clone = pool.clone();
tokio::spawn(async move {
if let Err(e) = process_torrent_import(pool_clone, torrent_id).await {
warn!("Torrent import {} failed: {:#}", torrent_id, e);
}
});
}
}
Ok(())
}
// ─── Import processing ────────────────────────────────────────────────────────
async fn is_torrent_import_enabled(pool: &PgPool) -> bool {
let row = sqlx::query("SELECT value FROM app_settings WHERE key = 'torrent_import'")
.fetch_optional(pool)
.await
.ok()
.flatten();
row.map(|r| {
let v: serde_json::Value = r.get("value");
v.get("enabled").and_then(|e| e.as_bool()).unwrap_or(false)
})
.unwrap_or(false)
}
async fn process_torrent_import(pool: PgPool, torrent_id: Uuid) -> anyhow::Result<()> {
let row = sqlx::query(
"SELECT library_id, series_name, expected_volumes, content_path \
FROM torrent_downloads WHERE id = $1",
)
.bind(torrent_id)
.fetch_one(&pool)
.await?;
let library_id: Uuid = row.get("library_id");
let series_name: String = row.get("series_name");
let expected_volumes: Vec<i32> = row.get("expected_volumes");
let content_path: Option<String> = row.get("content_path");
let content_path =
content_path.ok_or_else(|| anyhow::anyhow!("content_path not set on torrent_download"))?;
sqlx::query(
"UPDATE torrent_downloads SET status = 'importing', updated_at = NOW() WHERE id = $1",
)
.bind(torrent_id)
.execute(&pool)
.await?;
match do_import(&pool, library_id, &series_name, &expected_volumes, &content_path).await {
Ok(imported) => {
let json = serde_json::to_value(&imported).unwrap_or(serde_json::json!([]));
sqlx::query(
"UPDATE torrent_downloads SET status = 'imported', imported_files = $1, updated_at = NOW() WHERE id = $2",
)
.bind(json)
.bind(torrent_id)
.execute(&pool)
.await?;
// Queue a scan job so the indexer picks up the new files
let job_id = Uuid::new_v4();
sqlx::query(
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, 'scan', 'pending')",
)
.bind(job_id)
.bind(library_id)
.execute(&pool)
.await?;
info!(
"Torrent import {} done: {} files imported, scan job {} queued",
torrent_id,
imported.len(),
job_id
);
}
Err(e) => {
let msg = format!("{e:#}");
warn!("Torrent import {} error: {}", torrent_id, msg);
sqlx::query(
"UPDATE torrent_downloads SET status = 'error', error_message = $1, updated_at = NOW() WHERE id = $2",
)
.bind(&msg)
.bind(torrent_id)
.execute(&pool)
.await?;
}
}
Ok(())
}
async fn do_import(
pool: &PgPool,
library_id: Uuid,
series_name: &str,
expected_volumes: &[i32],
content_path: &str,
) -> anyhow::Result<Vec<ImportedFile>> {
let physical_content = remap_downloads_path(content_path);
// Find the target directory and reference file (latest volume) from existing book_files.
let ref_row = sqlx::query(
"SELECT bf.abs_path, b.volume \
FROM book_files bf \
JOIN books b ON b.id = bf.book_id \
WHERE b.library_id = $1 AND b.series = $2 AND b.volume IS NOT NULL \
ORDER BY b.volume DESC LIMIT 1",
)
.bind(library_id)
.bind(series_name)
.fetch_optional(pool)
.await?;
let (target_dir, reference) = if let Some(r) = ref_row {
let abs_path: String = r.get("abs_path");
let volume: i32 = r.get("volume");
let physical = remap_libraries_path(&abs_path);
let parent = std::path::Path::new(&physical)
.parent()
.map(|p| p.to_string_lossy().into_owned())
.unwrap_or(physical);
(parent, Some((abs_path, volume)))
} else {
// No existing files: create series directory inside library root
let lib_row = sqlx::query("SELECT root_path FROM libraries WHERE id = $1")
.bind(library_id)
.fetch_one(pool)
.await?;
let root_path: String = lib_row.get("root_path");
let physical_root = remap_libraries_path(&root_path);
let dir = format!("{}/{}", physical_root.trim_end_matches('/'), series_name);
(dir, None)
};
std::fs::create_dir_all(&target_dir)?;
let expected_set: std::collections::HashSet<i32> = expected_volumes.iter().copied().collect();
let mut imported = Vec::new();
for source_path in collect_book_files(&physical_content)? {
let filename = std::path::Path::new(&source_path)
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("");
let ext = std::path::Path::new(&source_path)
.extension()
.and_then(|e| e.to_str())
.unwrap_or("");
let matched: Vec<i32> = extract_volumes_from_title_pub(filename)
.into_iter()
.filter(|v| expected_set.contains(v))
.collect();
if matched.is_empty() {
continue;
}
let target_filename = if matched.len() == 1 {
// Single volume: apply naming pattern from reference
let vol = matched[0];
if let Some((ref ref_path, ref_vol)) = reference {
build_target_filename(ref_path, ref_vol, vol, ext)
.unwrap_or_else(|| default_filename(series_name, vol, ext))
} else {
default_filename(series_name, vol, ext)
}
} else {
// Multi-volume pack: keep original filename (scanner handles ranges)
filename.to_string()
};
let dest = format!("{}/{}", target_dir, target_filename);
if std::path::Path::new(&dest).exists() {
info!("Skipping {} (already exists at destination)", dest);
continue;
}
move_file(&source_path, &dest)?;
info!("Imported {:?} → {}", matched, dest);
imported.push(ImportedFile {
volume: *matched.iter().min().unwrap(),
source: source_path.clone(),
destination: unmap_libraries_path(&dest),
});
}
Ok(imported)
}
// ─── Filesystem helpers ───────────────────────────────────────────────────────
fn collect_book_files(root: &str) -> anyhow::Result<Vec<String>> {
let extensions = ["cbz", "cbr", "pdf", "epub"];
let mut files = Vec::new();
collect_recursive(root, &extensions, &mut files)?;
Ok(files)
}
fn collect_recursive(path: &str, exts: &[&str], out: &mut Vec<String>) -> anyhow::Result<()> {
let p = std::path::Path::new(path);
if p.is_file() {
if let Some(ext) = p.extension().and_then(|e| e.to_str()) {
if exts.iter().any(|&e| e.eq_ignore_ascii_case(ext)) {
out.push(path.to_string());
}
}
return Ok(());
}
for entry in std::fs::read_dir(path)? {
let entry = entry?;
let child = entry.path().to_string_lossy().into_owned();
if entry.path().is_dir() {
collect_recursive(&child, exts, out)?;
} else if let Some(ext) = entry.path().extension().and_then(|e| e.to_str()) {
if exts.iter().any(|&e| e.eq_ignore_ascii_case(ext)) {
out.push(child);
}
}
}
Ok(())
}
fn move_file(src: &str, dst: &str) -> anyhow::Result<()> {
if std::fs::rename(src, dst).is_err() {
// Cross-device link: copy then remove
std::fs::copy(src, dst)?;
std::fs::remove_file(src)?;
}
Ok(())
}
// ─── Path remapping ───────────────────────────────────────────────────────────
fn remap_downloads_path(path: &str) -> String {
if let Ok(root) = std::env::var("DOWNLOADS_PATH") {
if path.starts_with("/downloads") {
return path.replacen("/downloads", &root, 1);
}
}
path.to_string()
}
fn remap_libraries_path(path: &str) -> String {
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
if path.starts_with("/libraries/") {
return path.replacen("/libraries", &root, 1);
}
}
path.to_string()
}
fn unmap_libraries_path(path: &str) -> String {
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
if path.starts_with(&root) {
return path.replacen(&root, "/libraries", 1);
}
}
path.to_string()
}
// ─── Naming helpers ───────────────────────────────────────────────────────────
fn default_filename(series_name: &str, volume: i32, ext: &str) -> String {
format!("{} - T{:02}.{}", series_name, volume, ext)
}
/// Infer the target filename for `new_volume` by reusing the naming pattern from
/// `reference_abs_path` (which stores `reference_volume`).
///
/// Strategy: find the last digit run in the reference stem that parses to `reference_volume`,
/// replace it with `new_volume` formatted to the same width (preserves leading zeros).
///
/// Example:
/// reference = "/libraries/bd/One Piece/One Piece - T104.cbz", reference_volume = 104
/// new_volume = 105, source_ext = "cbz"
/// → "One Piece - T105.cbz"
fn build_target_filename(
reference_abs_path: &str,
reference_volume: i32,
new_volume: i32,
source_ext: &str,
) -> Option<String> {
let path = std::path::Path::new(reference_abs_path);
let stem = path.file_stem()?.to_str()?;
let ref_ext = path.extension().and_then(|e| e.to_str()).unwrap_or("cbz");
let target_ext = if source_ext.is_empty() { ref_ext } else { source_ext };
// Iterate over raw bytes to find ASCII digit runs (safe: continuation bytes of
// multi-byte UTF-8 sequences are never in the ASCII digit range 0x300x39).
let bytes = stem.as_bytes();
let len = bytes.len();
let mut i = 0;
let mut last_match: Option<(usize, usize)> = None; // byte offsets into `stem`
while i < len {
if bytes[i].is_ascii_digit() {
let start = i;
while i < len && bytes[i].is_ascii_digit() {
i += 1;
}
let digit_str = &stem[start..i]; // valid UTF-8: all ASCII digits
if let Ok(n) = digit_str.parse::<i32>() {
if n == reference_volume {
last_match = Some((start, i));
}
}
} else {
i += 1;
}
}
let (start, end) = last_match?;
let digit_width = end - start;
let new_digits = format!("{:0>width$}", new_volume, width = digit_width);
let new_stem = format!("{}{}{}", &stem[..start], new_digits, &stem[end..]);
Some(format!("{}.{}", new_stem, target_ext))
}
#[cfg(test)]
mod tests {
use super::build_target_filename;
#[test]
fn simple_t_prefix() {
// "One Piece - T104.cbz" → replace 104 → 105
let result = build_target_filename(
"/libraries/One Piece/One Piece - T104.cbz",
104,
105,
"cbz",
);
assert_eq!(result, Some("One Piece - T105.cbz".to_string()));
}
#[test]
fn preserves_leading_zeros() {
let result = build_target_filename(
"/libraries/Asterix/Asterix - T01.cbz",
1,
2,
"cbz",
);
assert_eq!(result, Some("Asterix - T02.cbz".to_string()));
}
#[test]
fn three_digit_zero_padded() {
let result = build_target_filename(
"/libraries/Naruto/Naruto T001.cbz",
1,
72,
"cbz",
);
assert_eq!(result, Some("Naruto T072.cbz".to_string()));
}
#[test]
fn different_source_ext() {
// Source file is cbr, reference is cbz
let result = build_target_filename(
"/libraries/DBZ/Dragon Ball - T01.cbz",
1,
5,
"cbr",
);
assert_eq!(result, Some("Dragon Ball - T05.cbr".to_string()));
}
#[test]
fn accented_series_name() {
let result = build_target_filename(
"/libraries/bd/Astérix - T01.cbz",
1,
3,
"cbz",
);
assert_eq!(result, Some("Astérix - T03.cbz".to_string()));
}
#[test]
fn no_match_returns_none() {
// Volume 5 not present in "Series - T01.cbz" whose reference_volume is 99
let result = build_target_filename(
"/libraries/Series/Series - T01.cbz",
99,
100,
"cbz",
);
assert_eq!(result, None);
}
#[test]
fn uses_last_occurrence() {
// "Code 451 - T04.cbz" with reference_volume=4 should replace the "04" not the "4" in 451
let result = build_target_filename(
"/libraries/Code 451/Code 451 - T04.cbz",
4,
5,
"cbz",
);
assert_eq!(result, Some("Code 451 - T05.cbz".to_string()));
}
}

View File

@@ -0,0 +1,222 @@
"use client";
import { useState, useEffect, useCallback } from "react";
import { TorrentDownloadDto } from "@/lib/api";
import { Card, CardContent, Button, Icon } from "@/app/components/ui";
import { useTranslation } from "@/lib/i18n/context";
import type { TranslationKey } from "@/lib/i18n/fr";
type TFunction = (key: TranslationKey, vars?: Record<string, string | number>) => string;
const STATUS_ACTIVE = new Set(["downloading", "completed", "importing"]);
function statusLabel(status: string, t: TFunction): string {
const map: Record<string, TranslationKey> = {
downloading: "downloads.status.downloading",
completed: "downloads.status.completed",
importing: "downloads.status.importing",
imported: "downloads.status.imported",
error: "downloads.status.error",
};
return t(map[status] ?? "downloads.status.error");
}
function statusClass(status: string): string {
switch (status) {
case "downloading": return "bg-primary/10 text-primary";
case "completed": return "bg-warning/10 text-warning";
case "importing": return "bg-primary/10 text-primary";
case "imported": return "bg-success/10 text-success";
case "error": return "bg-destructive/10 text-destructive";
default: return "bg-muted/30 text-muted-foreground";
}
}
function formatVolumes(vols: number[]): string {
return [...vols].sort((a, b) => a - b).map(v => `T${String(v).padStart(2, "0")}`).join(", ");
}
function formatDate(iso: string): string {
return new Date(iso).toLocaleString("fr-FR", {
day: "2-digit", month: "2-digit", year: "numeric",
hour: "2-digit", minute: "2-digit",
});
}
interface DownloadsPageProps {
initialDownloads: TorrentDownloadDto[];
}
export function DownloadsPage({ initialDownloads }: DownloadsPageProps) {
const { t } = useTranslation();
const [downloads, setDownloads] = useState<TorrentDownloadDto[]>(initialDownloads);
const [filter, setFilter] = useState<string>("all");
const [isRefreshing, setIsRefreshing] = useState(false);
const refresh = useCallback(async (showSpinner = true) => {
if (showSpinner) setIsRefreshing(true);
try {
const resp = await fetch("/api/torrent-downloads");
if (resp.ok) setDownloads(await resp.json());
} finally {
if (showSpinner) setIsRefreshing(false);
}
}, []);
// Auto-refresh every 5s while there are active downloads
const hasActive = downloads.some(d => STATUS_ACTIVE.has(d.status));
useEffect(() => {
if (!hasActive) return;
const id = setInterval(() => refresh(false), 5000);
return () => clearInterval(id);
}, [hasActive, refresh]);
const filters = [
{ id: "all", label: t("common.all") },
{ id: "active", label: t("downloads.filterActive") },
{ id: "imported", label: t("downloads.status.imported") },
{ id: "error", label: t("downloads.status.error") },
];
const visible = downloads.filter(d => {
if (filter === "all") return true;
if (filter === "active") return STATUS_ACTIVE.has(d.status);
return d.status === filter;
});
return (
<>
<div className="flex items-center justify-between mb-6">
<h1 className="text-3xl font-bold text-foreground flex items-center gap-3">
<Icon name="download" size="xl" />
{t("downloads.title")}
</h1>
<Button onClick={() => refresh(true)} disabled={isRefreshing} variant="outline" size="sm">
{isRefreshing ? (
<Icon name="spinner" size="sm" className="animate-spin" />
) : (
<Icon name="refresh" size="sm" />
)}
<span className="ml-2">{t("downloads.refresh")}</span>
</Button>
</div>
{/* Filter bar */}
<div className="flex gap-1 mb-4 border-b border-border">
{filters.map(f => (
<button
key={f.id}
onClick={() => setFilter(f.id)}
className={`px-4 py-2 text-sm font-medium border-b-2 transition-colors -mb-px ${
filter === f.id
? "border-primary text-primary"
: "border-transparent text-muted-foreground hover:text-foreground hover:border-border"
}`}
>
{f.label}
{f.id !== "all" && (
<span className="ml-1.5 text-xs opacity-60">
{downloads.filter(d => f.id === "active" ? STATUS_ACTIVE.has(d.status) : d.status === f.id).length}
</span>
)}
</button>
))}
</div>
{visible.length === 0 ? (
<Card className="mt-4">
<CardContent className="pt-16 pb-16 flex flex-col items-center justify-center gap-3 text-muted-foreground">
<Icon name="download" size="xl" className="opacity-30" />
<p className="text-sm">{t("downloads.empty")}</p>
</CardContent>
</Card>
) : (
<div className="space-y-3">
{visible.map(dl => (
<DownloadCard key={dl.id} dl={dl} />
))}
</div>
)}
</>
);
}
function DownloadCard({ dl }: { dl: TorrentDownloadDto }) {
const { t } = useTranslation();
const importedCount = Array.isArray(dl.imported_files) ? dl.imported_files.length : 0;
return (
<Card>
<CardContent className="pt-4">
<div className="flex items-start gap-4">
{/* Status indicator */}
<div className="mt-0.5">
{dl.status === "importing" ? (
<Icon name="spinner" size="md" className="animate-spin text-primary" />
) : dl.status === "imported" ? (
<Icon name="check" size="md" className="text-success" />
) : dl.status === "error" ? (
<Icon name="warning" size="md" className="text-destructive" />
) : dl.status === "downloading" ? (
<Icon name="download" size="md" className="text-primary" />
) : (
<Icon name="refresh" size="md" className="text-warning" />
)}
</div>
{/* Main info */}
<div className="flex-1 min-w-0">
<div className="flex items-center gap-2 flex-wrap">
<span className="font-semibold text-foreground truncate">{dl.series_name}</span>
<span className={`text-xs font-medium px-2 py-0.5 rounded-full ${statusClass(dl.status)}`}>
{statusLabel(dl.status, t)}
</span>
</div>
<div className="mt-1 flex items-center gap-4 text-sm text-muted-foreground flex-wrap">
{dl.expected_volumes.length > 0 && (
<span>{t("downloads.volumes")} : {formatVolumes(dl.expected_volumes)}</span>
)}
{dl.status === "imported" && importedCount > 0 && (
<span className="text-success">{importedCount} {t("downloads.filesImported")}</span>
)}
{dl.qb_hash && (
<span className="font-mono text-xs opacity-50" title={dl.qb_hash}>
{dl.qb_hash.slice(0, 8)}
</span>
)}
</div>
{dl.content_path && dl.status !== "imported" && (
<p className="mt-1 text-xs font-mono text-muted-foreground truncate" title={dl.content_path}>
{dl.content_path}
</p>
)}
{dl.error_message && (
<p className="mt-1 text-sm text-destructive">{dl.error_message}</p>
)}
{dl.status === "imported" && Array.isArray(dl.imported_files) && dl.imported_files.length > 0 && (
<ul className="mt-2 space-y-0.5">
{(dl.imported_files as Array<{ volume: number; destination: string }>).map((f, i) => (
<li key={i} className="text-xs text-muted-foreground font-mono truncate" title={f.destination}>
T{String(f.volume).padStart(2, "0")} {f.destination.split("/").pop()}
</li>
))}
</ul>
)}
</div>
{/* Timestamp */}
<div className="text-xs text-muted-foreground shrink-0 text-right">
<p>{formatDate(dl.created_at)}</p>
{dl.updated_at !== dl.created_at && (
<p className="opacity-60">maj {formatDate(dl.updated_at)}</p>
)}
</div>
</div>
</CardContent>
</Card>
);
}

View File

@@ -0,0 +1,9 @@
import { fetchTorrentDownloads, TorrentDownloadDto } from "@/lib/api";
import { DownloadsPage } from "./DownloadsPage";
export const dynamic = "force-dynamic";
export default async function Page() {
const downloads = await fetchTorrentDownloads().catch(() => [] as TorrentDownloadDto[]);
return <DownloadsPage initialDownloads={downloads} />;
}

View File

@@ -24,6 +24,30 @@ export function DownloadDetectionReportCard({ report, t }: { report: DownloadDet
); );
} }
export function DownloadDetectionErrorsCard({ results, t }: {
results: DownloadDetectionResultDto[];
t: TranslateFunction;
}) {
if (results.length === 0) return null;
return (
<Card className="lg:col-span-2">
<CardHeader>
<CardTitle>{t("jobDetail.downloadErrors")}</CardTitle>
<CardDescription>{t("jobDetail.downloadErrorsDesc", { count: String(results.length) })}</CardDescription>
</CardHeader>
<CardContent className="space-y-2 max-h-80 overflow-y-auto">
{results.map((r) => (
<div key={r.id} className="p-3 bg-destructive/10 rounded-lg border border-destructive/20">
<p className="text-sm font-semibold text-destructive mb-1">{r.series_name}</p>
<p className="text-sm text-destructive/80">{r.error_message ?? "Erreur inconnue"}</p>
</div>
))}
</CardContent>
</Card>
);
}
export function DownloadDetectionResultsCard({ results, libraryId, t }: { export function DownloadDetectionResultsCard({ results, libraryId, t }: {
results: DownloadDetectionResultDto[]; results: DownloadDetectionResultDto[];
libraryId: string | null; libraryId: string | null;

View File

@@ -11,7 +11,7 @@ import { JobTimelineCard } from "./components/JobTimelineCard";
import { JobProgressCard, IndexStatsCard, ThumbnailStatsCard } from "./components/JobProgressCard"; import { JobProgressCard, IndexStatsCard, ThumbnailStatsCard } from "./components/JobProgressCard";
import { MetadataBatchReportCard, MetadataBatchResultsCard, MetadataRefreshReportCard, MetadataRefreshChangesCard } from "./components/MetadataReportCards"; import { MetadataBatchReportCard, MetadataBatchResultsCard, MetadataRefreshReportCard, MetadataRefreshChangesCard } from "./components/MetadataReportCards";
import { ReadingStatusMatchReportCard, ReadingStatusMatchResultsCard, ReadingStatusPushReportCard, ReadingStatusPushResultsCard } from "./components/ReadingStatusReportCards"; import { ReadingStatusMatchReportCard, ReadingStatusMatchResultsCard, ReadingStatusPushReportCard, ReadingStatusPushResultsCard } from "./components/ReadingStatusReportCards";
import { DownloadDetectionReportCard, DownloadDetectionResultsCard } from "./components/DownloadDetectionCards"; import { DownloadDetectionReportCard, DownloadDetectionResultsCard, DownloadDetectionErrorsCard } from "./components/DownloadDetectionCards";
import { JobErrorsCard } from "./components/JobErrorsCard"; import { JobErrorsCard } from "./components/JobErrorsCard";
interface JobDetailPageProps { interface JobDetailPageProps {
@@ -148,10 +148,12 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
let downloadDetectionReport: DownloadDetectionReportDto | null = null; let downloadDetectionReport: DownloadDetectionReportDto | null = null;
let downloadDetectionResults: DownloadDetectionResultDto[] = []; let downloadDetectionResults: DownloadDetectionResultDto[] = [];
let downloadDetectionErrors: DownloadDetectionResultDto[] = [];
if (isDownloadDetection) { if (isDownloadDetection) {
[downloadDetectionReport, downloadDetectionResults] = await Promise.all([ [downloadDetectionReport, downloadDetectionResults, downloadDetectionErrors] = await Promise.all([
getDownloadDetectionReport(id).catch(() => null), getDownloadDetectionReport(id).catch(() => null),
getDownloadDetectionResults(id, "found").catch(() => []), getDownloadDetectionResults(id, "found").catch(() => []),
getDownloadDetectionResults(id, "error").catch(() => []),
]); ]);
} }
@@ -270,6 +272,7 @@ export default async function JobDetailPage({ params }: JobDetailPageProps) {
{/* Download detection */} {/* Download detection */}
{isDownloadDetection && downloadDetectionReport && <DownloadDetectionReportCard report={downloadDetectionReport} t={t} />} {isDownloadDetection && downloadDetectionReport && <DownloadDetectionReportCard report={downloadDetectionReport} t={t} />}
{isDownloadDetection && <DownloadDetectionErrorsCard results={downloadDetectionErrors} t={t} />}
{isDownloadDetection && <DownloadDetectionResultsCard results={downloadDetectionResults} libraryId={job.library_id} t={t} />} {isDownloadDetection && <DownloadDetectionResultsCard results={downloadDetectionResults} libraryId={job.library_id} t={t} />}
{/* Metadata batch results */} {/* Metadata batch results */}

View File

@@ -6,6 +6,7 @@ import { revalidatePath } from "next/cache";
import { ThemeToggle } from "@/app/theme-toggle"; import { ThemeToggle } from "@/app/theme-toggle";
import { JobsIndicator } from "@/app/components/JobsIndicator"; import { JobsIndicator } from "@/app/components/JobsIndicator";
import { NavIcon, Icon } from "@/app/components/ui"; import { NavIcon, Icon } from "@/app/components/ui";
import { NavLink } from "@/app/components/NavLink";
import { LogoutButton } from "@/app/components/LogoutButton"; import { LogoutButton } from "@/app/components/LogoutButton";
import { MobileNav } from "@/app/components/MobileNav"; import { MobileNav } from "@/app/components/MobileNav";
import { UserSwitcher } from "@/app/components/UserSwitcher"; import { UserSwitcher } from "@/app/components/UserSwitcher";
@@ -14,9 +15,9 @@ import { getServerTranslations } from "@/lib/i18n/server";
import type { TranslationKey } from "@/lib/i18n/fr"; import type { TranslationKey } from "@/lib/i18n/fr";
type NavItem = { type NavItem = {
href: "/" | "/books" | "/series" | "/authors" | "/libraries" | "/jobs" | "/tokens" | "/settings"; href: "/" | "/books" | "/series" | "/authors" | "/libraries" | "/jobs" | "/tokens" | "/settings" | "/downloads";
labelKey: TranslationKey; labelKey: TranslationKey;
icon: "dashboard" | "books" | "series" | "authors" | "libraries" | "jobs" | "tokens" | "settings"; icon: "dashboard" | "books" | "series" | "authors" | "libraries" | "jobs" | "tokens" | "settings" | "download";
}; };
const navItems: NavItem[] = [ const navItems: NavItem[] = [
@@ -24,6 +25,7 @@ const navItems: NavItem[] = [
{ href: "/series", labelKey: "nav.series", icon: "series" }, { href: "/series", labelKey: "nav.series", icon: "series" },
{ href: "/authors", labelKey: "nav.authors", icon: "authors" }, { href: "/authors", labelKey: "nav.authors", icon: "authors" },
{ href: "/libraries", labelKey: "nav.libraries", icon: "libraries" }, { href: "/libraries", labelKey: "nav.libraries", icon: "libraries" },
{ href: "/downloads", labelKey: "nav.downloads", icon: "download" },
{ href: "/jobs", labelKey: "nav.jobs", icon: "jobs" }, { href: "/jobs", labelKey: "nav.jobs", icon: "jobs" },
{ href: "/tokens", labelKey: "nav.tokens", icon: "tokens" }, { href: "/tokens", labelKey: "nav.tokens", icon: "tokens" },
]; ];
@@ -113,24 +115,3 @@ export default async function AppLayout({ children }: { children: ReactNode }) {
); );
} }
function NavLink({ href, title, children }: { href: NavItem["href"]; title?: string; children: React.ReactNode }) {
return (
<Link
href={href}
title={title}
className="
flex items-center
px-2 lg:px-3 py-2
rounded-lg
text-sm font-medium
text-muted-foreground
hover:text-foreground
hover:bg-accent
transition-colors duration-200
active:scale-[0.98]
"
>
{children}
</Link>
);
}

View File

@@ -1,7 +1,7 @@
"use client"; "use client";
import { useState, useEffect } from "react"; import { useState, useEffect } from "react";
import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, Icon } from "@/app/components/ui"; import { Card, CardHeader, CardTitle, CardDescription, CardContent, Button, FormField, FormInput, FormSelect, Icon } from "@/app/components/ui";
import { useTranslation } from "@/lib/i18n/context"; import { useTranslation } from "@/lib/i18n/context";
export function QBittorrentCard({ handleUpdateSetting }: { handleUpdateSetting: (key: string, value: unknown) => Promise<void> }) { export function QBittorrentCard({ handleUpdateSetting }: { handleUpdateSetting: (key: string, value: unknown) => Promise<void> }) {
@@ -11,6 +11,7 @@ export function QBittorrentCard({ handleUpdateSetting }: { handleUpdateSetting:
const [qbPassword, setQbPassword] = useState(""); const [qbPassword, setQbPassword] = useState("");
const [isTesting, setIsTesting] = useState(false); const [isTesting, setIsTesting] = useState(false);
const [testResult, setTestResult] = useState<{ success: boolean; message: string } | null>(null); const [testResult, setTestResult] = useState<{ success: boolean; message: string } | null>(null);
const [importEnabled, setImportEnabled] = useState(false);
useEffect(() => { useEffect(() => {
fetch("/api/settings/qbittorrent") fetch("/api/settings/qbittorrent")
@@ -23,6 +24,10 @@ export function QBittorrentCard({ handleUpdateSetting }: { handleUpdateSetting:
} }
}) })
.catch(() => {}); .catch(() => {});
fetch("/api/settings/torrent_import")
.then((r) => (r.ok ? r.json() : null))
.then((data) => { if (data?.enabled !== undefined) setImportEnabled(data.enabled); })
.catch(() => {});
}, []); }, []);
function saveQbittorrent() { function saveQbittorrent() {
@@ -118,6 +123,32 @@ export function QBittorrentCard({ handleUpdateSetting }: { handleUpdateSetting:
</span> </span>
)} )}
</div> </div>
<div className="border-t border-border/40 pt-4">
<FormField className="max-w-xs">
<label className="text-sm font-medium text-muted-foreground mb-1 block">
{t("settings.torrentImportEnabled")}
</label>
<FormSelect
value={importEnabled ? "true" : "false"}
onChange={(e) => {
const val = e.target.value === "true";
setImportEnabled(val);
handleUpdateSetting("torrent_import", { enabled: val });
}}
>
<option value="false">{t("common.disabled")}</option>
<option value="true">{t("common.enabled")}</option>
</FormSelect>
</FormField>
{importEnabled && (
<div className="mt-3 rounded-lg border border-success/20 bg-success/5 p-3 flex items-start gap-2">
<Icon name="check" size="sm" className="text-success mt-0.5 shrink-0" />
<p className="text-sm text-muted-foreground">{t("settings.torrentImportPollingInfo")}</p>
</div>
)}
</div>
</div> </div>
</CardContent> </CardContent>
</Card> </Card>

View File

@@ -0,0 +1,11 @@
import { NextResponse } from "next/server";
import { apiFetch } from "@/lib/api";
export async function GET() {
try {
const data = await apiFetch("/torrent-downloads");
return NextResponse.json(data);
} catch {
return NextResponse.json({ error: "Failed to fetch torrent downloads" }, { status: 500 });
}
}

View File

@@ -8,9 +8,9 @@ import { useTranslation } from "../../lib/i18n/context";
import type { UserDto } from "@/lib/api"; import type { UserDto } from "@/lib/api";
type NavItem = { type NavItem = {
href: "/" | "/books" | "/series" | "/authors" | "/libraries" | "/jobs" | "/tokens" | "/settings"; href: "/" | "/books" | "/series" | "/authors" | "/libraries" | "/downloads" | "/jobs" | "/tokens" | "/settings";
label: string; label: string;
icon: "dashboard" | "books" | "series" | "authors" | "libraries" | "jobs" | "tokens" | "settings"; icon: "dashboard" | "books" | "series" | "authors" | "libraries" | "download" | "jobs" | "tokens" | "settings";
}; };
const HamburgerIcon = () => ( const HamburgerIcon = () => (

View File

@@ -0,0 +1,38 @@
"use client";
import Link from "next/link";
import { usePathname } from "next/navigation";
export function NavLink({
href,
title,
children,
}: {
href: string;
title?: string;
children: React.ReactNode;
}) {
const pathname = usePathname();
const isActive = pathname === href || (href !== "/" && pathname.startsWith(href));
return (
<Link
href={href as "/"}
title={title}
className={`
flex items-center
px-2 lg:px-3 py-2
rounded-lg
text-sm font-medium
transition-colors duration-200
active:scale-[0.98]
${isActive
? "text-primary bg-primary/10 hover:bg-primary/15"
: "text-muted-foreground hover:text-foreground hover:bg-accent"
}
`}
>
{children}
</Link>
);
}

View File

@@ -1282,8 +1282,27 @@ export type ProwlarrTestResponse = {
export type QBittorrentAddResponse = { export type QBittorrentAddResponse = {
success: boolean; success: boolean;
message: string; message: string;
torrent_download_id?: string | null;
}; };
export type TorrentDownloadDto = {
id: string;
library_id: string;
series_name: string;
expected_volumes: number[];
qb_hash: string | null;
content_path: string | null;
status: "downloading" | "completed" | "importing" | "imported" | "error";
imported_files: Array<{ volume: number; source: string; destination: string }> | null;
error_message: string | null;
created_at: string;
updated_at: string;
};
export async function fetchTorrentDownloads(): Promise<TorrentDownloadDto[]> {
return apiFetch<TorrentDownloadDto[]>("/torrent-downloads");
}
export type QBittorrentTestResponse = { export type QBittorrentTestResponse = {
success: boolean; success: boolean;
message: string; message: string;

View File

@@ -6,6 +6,7 @@ const en: Record<TranslationKey, string> = {
"nav.books": "Books", "nav.books": "Books",
"nav.series": "Series", "nav.series": "Series",
"nav.libraries": "Libraries", "nav.libraries": "Libraries",
"nav.downloads": "Downloads",
"nav.jobs": "Jobs", "nav.jobs": "Jobs",
"nav.tokens": "Tokens", "nav.tokens": "Tokens",
"nav.users": "Users", "nav.users": "Users",
@@ -401,6 +402,8 @@ const en: Record<TranslationKey, string> = {
"jobDetail.downloadAvailableReleases": "Available releases", "jobDetail.downloadAvailableReleases": "Available releases",
"jobDetail.downloadAvailableReleasesDesc": "{{count}} series with at least one release found", "jobDetail.downloadAvailableReleasesDesc": "{{count}} series with at least one release found",
"jobDetail.downloadMissingCount": "{{count}} missing", "jobDetail.downloadMissingCount": "{{count}} missing",
"jobDetail.downloadErrors": "Detection errors",
"jobDetail.downloadErrorsDesc": "{{count}} series with errors during detection",
// Job types // Job types
"jobType.rebuild": "Indexing", "jobType.rebuild": "Indexing",
@@ -882,6 +885,25 @@ const en: Record<TranslationKey, string> = {
"series.metadataAll": "All", "series.metadataAll": "All",
"series.metadataLinked": "Linked", "series.metadataLinked": "Linked",
"series.metadataUnlinked": "Not linked", "series.metadataUnlinked": "Not linked",
// Downloads page
"downloads.title": "Downloads",
"downloads.refresh": "Refresh",
"downloads.filterActive": "In progress",
"downloads.empty": "No downloads",
"downloads.volumes": "Volumes",
"downloads.filesImported": "files imported",
"downloads.status.downloading": "Downloading",
"downloads.status.completed": "Completed",
"downloads.status.importing": "Importing",
"downloads.status.imported": "Imported",
"downloads.status.error": "Error",
// Settings - Torrent Import
"settings.torrentImport": "Auto import",
"settings.torrentImportDesc": "When enabled, torrents added via the backoffice are tracked and files are automatically imported into the library when the download completes.",
"settings.torrentImportEnabled": "Enable auto import",
"settings.torrentImportPollingInfo": "The API polls qBittorrent every 30 seconds to detect completed downloads. No additional configuration in qBittorrent is required.",
}; };
export default en; export default en;

View File

@@ -4,6 +4,7 @@ const fr = {
"nav.books": "Livres", "nav.books": "Livres",
"nav.series": "Séries", "nav.series": "Séries",
"nav.libraries": "Bibliothèques", "nav.libraries": "Bibliothèques",
"nav.downloads": "Téléchargements",
"nav.jobs": "Tâches", "nav.jobs": "Tâches",
"nav.tokens": "Jetons", "nav.tokens": "Jetons",
"nav.users": "Utilisateurs", "nav.users": "Utilisateurs",
@@ -399,6 +400,8 @@ const fr = {
"jobDetail.downloadAvailableReleases": "Releases disponibles", "jobDetail.downloadAvailableReleases": "Releases disponibles",
"jobDetail.downloadAvailableReleasesDesc": "{{count}} série(s) avec au moins une release trouvée", "jobDetail.downloadAvailableReleasesDesc": "{{count}} série(s) avec au moins une release trouvée",
"jobDetail.downloadMissingCount": "{{count}} manquant(s)", "jobDetail.downloadMissingCount": "{{count}} manquant(s)",
"jobDetail.downloadErrors": "Erreurs de détection",
"jobDetail.downloadErrorsDesc": "{{count}} série(s) en erreur lors de la détection",
// Job types // Job types
"jobType.rebuild": "Indexation", "jobType.rebuild": "Indexation",
@@ -880,6 +883,25 @@ const fr = {
"series.metadataAll": "Toutes", "series.metadataAll": "Toutes",
"series.metadataLinked": "Associée", "series.metadataLinked": "Associée",
"series.metadataUnlinked": "Non associée", "series.metadataUnlinked": "Non associée",
// Downloads page
"downloads.title": "Téléchargements",
"downloads.refresh": "Actualiser",
"downloads.filterActive": "En cours",
"downloads.empty": "Aucun téléchargement",
"downloads.volumes": "Volumes",
"downloads.filesImported": "fichiers importés",
"downloads.status.downloading": "Téléchargement",
"downloads.status.completed": "Terminé",
"downloads.status.importing": "Import en cours",
"downloads.status.imported": "Importé",
"downloads.status.error": "Erreur",
// Settings - Torrent Import
"settings.torrentImport": "Import automatique",
"settings.torrentImportDesc": "Lorsqu'activé, les torrents ajoutés via le backoffice sont suivis et les fichiers sont automatiquement importés dans la bibliothèque à la fin du téléchargement.",
"settings.torrentImportEnabled": "Activer l'import automatique",
"settings.torrentImportPollingInfo": "L'API interroge qBittorrent toutes les 30 secondes pour détecter les téléchargements terminés. Aucune configuration supplémentaire dans qBittorrent n'est nécessaire.",
} as const; } as const;
export type TranslationKey = keyof typeof fr; export type TranslationKey = keyof typeof fr;

View File

@@ -1,6 +1,10 @@
FROM rust:1-bookworm AS builder FROM rust:1-bookworm AS builder
WORKDIR /app WORKDIR /app
# Install corporate CA certificate (Cato Networks)
COPY CATO-CDBDX-SUBCA.chain.pem /usr/local/share/ca-certificates/cato.crt
RUN update-ca-certificates
# Copy workspace manifests and create dummy source files to cache dependency builds # Copy workspace manifests and create dummy source files to cache dependency builds
COPY Cargo.toml ./ COPY Cargo.toml ./
COPY apps/api/Cargo.toml apps/api/Cargo.toml COPY apps/api/Cargo.toml apps/api/Cargo.toml

View File

@@ -26,6 +26,7 @@ services:
volumes: volumes:
- ${LIBRARIES_HOST_PATH:-./libraries}:/libraries - ${LIBRARIES_HOST_PATH:-./libraries}:/libraries
- ${THUMBNAILS_HOST_PATH:-./data/thumbnails}:/data/thumbnails - ${THUMBNAILS_HOST_PATH:-./data/thumbnails}:/data/thumbnails
- ${DOWNLOADS_HOST_PATH:-./data/downloads}:/downloads
depends_on: depends_on:
postgres: postgres:
condition: service_healthy condition: service_healthy

View File

@@ -0,0 +1,30 @@
ALTER TABLE index_jobs
DROP CONSTRAINT IF EXISTS index_jobs_type_check,
ADD CONSTRAINT index_jobs_type_check
CHECK (type IN (
'scan', 'rebuild', 'full_rebuild', 'rescan',
'thumbnail_rebuild', 'thumbnail_regenerate',
'cbr_to_cbz',
'metadata_batch', 'metadata_refresh',
'reading_status_match', 'reading_status_push',
'download_detection',
'torrent_import'
));
CREATE TABLE torrent_downloads (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
library_id UUID NOT NULL REFERENCES libraries(id) ON DELETE CASCADE,
series_name TEXT NOT NULL,
expected_volumes INTEGER[] NOT NULL DEFAULT '{}',
qb_hash TEXT,
content_path TEXT,
status TEXT NOT NULL DEFAULT 'downloading'
CHECK (status IN ('downloading', 'completed', 'importing', 'imported', 'error')),
imported_files JSONB,
error_message TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE INDEX torrent_downloads_status_idx ON torrent_downloads(status);
CREATE INDEX torrent_downloads_qb_hash_idx ON torrent_downloads(qb_hash) WHERE qb_hash IS NOT NULL;