chore: bump version to 2.12.1
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 55s
All checks were successful
Deploy with Docker Compose / deploy (push) Successful in 55s
This commit is contained in:
@@ -123,6 +123,7 @@ async fn main() -> anyhow::Result<()> {
|
||||
.route("/qbittorrent/add", axum::routing::post(qbittorrent::add_torrent))
|
||||
.route("/qbittorrent/test", get(qbittorrent::test_qbittorrent))
|
||||
.route("/torrent-downloads", get(torrent_import::list_torrent_downloads))
|
||||
.route("/torrent-downloads/:id", axum::routing::delete(torrent_import::delete_torrent_download))
|
||||
.route("/telegram/test", get(telegram::test_telegram))
|
||||
.route("/komga/sync", axum::routing::post(komga::sync_komga_read_books))
|
||||
.route("/komga/reports", get(komga::list_sync_reports))
|
||||
|
||||
@@ -143,10 +143,24 @@ pub async fn add_torrent(
|
||||
|
||||
let sid = qbittorrent_login(&client, &base_url, &username, &password).await?;
|
||||
|
||||
// Pre-generate the download ID so we can tag the torrent with it
|
||||
let download_id = if is_managed { Some(Uuid::new_v4()) } else { None };
|
||||
let tag = download_id.map(|id| format!("sl-{id}"));
|
||||
|
||||
// Snapshot existing torrents before adding (for hash resolution)
|
||||
let torrents_before = if is_managed {
|
||||
list_qbittorrent_torrents(&client, &base_url, &sid).await.unwrap_or_default()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
let mut form_params: Vec<(&str, &str)> = vec![("urls", &body.url)];
|
||||
let savepath = "/downloads";
|
||||
if is_managed {
|
||||
form_params.push(("savepath", savepath));
|
||||
if let Some(ref t) = tag {
|
||||
form_params.push(("tags", t));
|
||||
}
|
||||
}
|
||||
|
||||
let resp = client
|
||||
@@ -172,9 +186,16 @@ pub async fn add_torrent(
|
||||
let library_id = body.library_id.unwrap();
|
||||
let series_name = body.series_name.as_deref().unwrap();
|
||||
let expected_volumes = body.expected_volumes.as_deref().unwrap();
|
||||
let qb_hash = extract_magnet_hash(&body.url);
|
||||
|
||||
let id = Uuid::new_v4();
|
||||
// Try to resolve hash: first from magnet, then by querying qBittorrent
|
||||
let mut qb_hash = extract_magnet_hash(&body.url);
|
||||
if qb_hash.is_none() {
|
||||
// For .torrent URLs: wait briefly then query qBittorrent to find the torrent
|
||||
tokio::time::sleep(std::time::Duration::from_secs(2)).await;
|
||||
qb_hash = resolve_hash_from_qbittorrent(&client, &base_url, &sid, &torrents_before, series_name).await;
|
||||
}
|
||||
|
||||
let id = download_id.unwrap();
|
||||
sqlx::query(
|
||||
"INSERT INTO torrent_downloads (id, library_id, series_name, expected_volumes, qb_hash) \
|
||||
VALUES ($1, $2, $3, $4, $5)",
|
||||
@@ -187,6 +208,8 @@ pub async fn add_torrent(
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
tracing::info!("Created torrent download {id} for {series_name}, qb_hash={qb_hash:?}");
|
||||
|
||||
Some(id)
|
||||
} else {
|
||||
None
|
||||
@@ -199,8 +222,9 @@ pub async fn add_torrent(
|
||||
}))
|
||||
}
|
||||
|
||||
/// Extract the info-hash from a magnet link (lowercased, hex or base32).
|
||||
/// Extract the info-hash from a magnet link (lowercased hex).
|
||||
/// magnet:?xt=urn:btih:HASH...
|
||||
/// Handles both hex (40 chars) and base32 (32 chars) encoded hashes.
|
||||
fn extract_magnet_hash(url: &str) -> Option<String> {
|
||||
let lower = url.to_lowercase();
|
||||
let marker = "urn:btih:";
|
||||
@@ -210,7 +234,119 @@ fn extract_magnet_hash(url: &str) -> Option<String> {
|
||||
.find(|c: char| !c.is_alphanumeric())
|
||||
.unwrap_or(hash_part.len());
|
||||
let hash = &hash_part[..end];
|
||||
if hash.is_empty() { None } else { Some(hash.to_string()) }
|
||||
if hash.is_empty() {
|
||||
return None;
|
||||
}
|
||||
// 40-char hex hash: use as-is
|
||||
if hash.len() == 40 && hash.chars().all(|c| c.is_ascii_hexdigit()) {
|
||||
return Some(hash.to_string());
|
||||
}
|
||||
// 32-char base32 hash: decode to hex
|
||||
if hash.len() == 32 {
|
||||
if let Some(hex) = base32_to_hex(hash) {
|
||||
return Some(hex);
|
||||
}
|
||||
}
|
||||
// Fallback: return as-is (may not match qBittorrent)
|
||||
Some(hash.to_string())
|
||||
}
|
||||
|
||||
/// Decode a base32-encoded string to a lowercase hex string.
|
||||
fn base32_to_hex(input: &str) -> Option<String> {
|
||||
let input = input.to_uppercase();
|
||||
let alphabet = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ234567";
|
||||
let mut bits: u64 = 0;
|
||||
let mut bit_count = 0u32;
|
||||
let mut bytes = Vec::with_capacity(20);
|
||||
|
||||
for ch in input.bytes() {
|
||||
let val = alphabet.iter().position(|&c| c == ch)? as u64;
|
||||
bits = (bits << 5) | val;
|
||||
bit_count += 5;
|
||||
if bit_count >= 8 {
|
||||
bit_count -= 8;
|
||||
bytes.push((bits >> bit_count) as u8);
|
||||
bits &= (1u64 << bit_count) - 1;
|
||||
}
|
||||
}
|
||||
|
||||
if bytes.len() != 20 {
|
||||
return None;
|
||||
}
|
||||
Some(bytes.iter().map(|b| format!("{b:02x}")).collect())
|
||||
}
|
||||
|
||||
/// Torrent entry from qBittorrent API.
|
||||
#[derive(Deserialize, Clone)]
|
||||
struct QbTorrentEntry {
|
||||
hash: String,
|
||||
#[serde(default)]
|
||||
name: String,
|
||||
}
|
||||
|
||||
/// List all torrents currently in qBittorrent.
|
||||
async fn list_qbittorrent_torrents(
|
||||
client: &reqwest::Client,
|
||||
base_url: &str,
|
||||
sid: &str,
|
||||
) -> Result<Vec<QbTorrentEntry>, ApiError> {
|
||||
let resp = client
|
||||
.get(format!("{base_url}/api/v2/torrents/info"))
|
||||
.header("Cookie", format!("SID={sid}"))
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| ApiError::internal(format!("qBittorrent list failed: {e}")))?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
Ok(resp.json().await.unwrap_or_default())
|
||||
}
|
||||
|
||||
/// Resolve the hash of a torrent after adding it to qBittorrent.
|
||||
/// Strategy:
|
||||
/// 1. Compare before/after snapshots to find the new torrent (works for new torrents)
|
||||
/// 2. If no new torrent found (already existed), search by series name in torrent names
|
||||
async fn resolve_hash_from_qbittorrent(
|
||||
client: &reqwest::Client,
|
||||
base_url: &str,
|
||||
sid: &str,
|
||||
torrents_before: &[QbTorrentEntry],
|
||||
series_name: &str,
|
||||
) -> Option<String> {
|
||||
let torrents_after = list_qbittorrent_torrents(client, base_url, sid).await.ok()?;
|
||||
let before_hashes: std::collections::HashSet<&str> = torrents_before.iter().map(|t| t.hash.as_str()).collect();
|
||||
|
||||
// Strategy 1: diff — find the one new torrent
|
||||
let new_torrents: Vec<&QbTorrentEntry> = torrents_after.iter()
|
||||
.filter(|t| !before_hashes.contains(t.hash.as_str()))
|
||||
.collect();
|
||||
if new_torrents.len() == 1 {
|
||||
tracing::info!("[QBITTORRENT] Resolved hash {} via diff (new torrent: {})", new_torrents[0].hash, new_torrents[0].name);
|
||||
return Some(new_torrents[0].hash.clone());
|
||||
}
|
||||
|
||||
// Strategy 2: torrent already existed — search by series name in torrent names
|
||||
let series_lower = series_name.to_lowercase();
|
||||
// Normalize: "Dandadan" matches "Dandadan.T02.FRENCH.CBZ..."
|
||||
let candidates: Vec<&QbTorrentEntry> = torrents_after.iter()
|
||||
.filter(|t| t.name.to_lowercase().contains(&series_lower))
|
||||
.collect();
|
||||
|
||||
if candidates.len() == 1 {
|
||||
tracing::info!("[QBITTORRENT] Resolved hash {} via name match ({})", candidates[0].hash, candidates[0].name);
|
||||
return Some(candidates[0].hash.clone());
|
||||
}
|
||||
|
||||
if candidates.len() > 1 {
|
||||
tracing::warn!("[QBITTORRENT] Multiple torrents match series '{}': {}", series_name,
|
||||
candidates.iter().map(|c| c.name.as_str()).collect::<Vec<_>>().join(", "));
|
||||
} else {
|
||||
tracing::warn!("[QBITTORRENT] No torrent found matching series '{}'", series_name);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Test connection to qBittorrent
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use axum::{extract::State, Json};
|
||||
use axum::{extract::{Path, State}, Json};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::{PgPool, Row};
|
||||
@@ -35,6 +35,9 @@ pub struct TorrentDownloadDto {
|
||||
pub status: String,
|
||||
pub imported_files: Option<serde_json::Value>,
|
||||
pub error_message: Option<String>,
|
||||
pub progress: f32,
|
||||
pub download_speed: i64,
|
||||
pub eta: i64,
|
||||
pub created_at: String,
|
||||
pub updated_at: String,
|
||||
}
|
||||
@@ -102,7 +105,7 @@ pub async fn list_torrent_downloads(
|
||||
) -> Result<Json<Vec<TorrentDownloadDto>>, ApiError> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, library_id, series_name, expected_volumes, qb_hash, content_path, \
|
||||
status, imported_files, error_message, created_at, updated_at \
|
||||
status, imported_files, error_message, progress, download_speed, eta, created_at, updated_at \
|
||||
FROM torrent_downloads ORDER BY created_at DESC LIMIT 100",
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
@@ -126,6 +129,9 @@ pub async fn list_torrent_downloads(
|
||||
status: row.get("status"),
|
||||
imported_files: row.get("imported_files"),
|
||||
error_message: row.get("error_message"),
|
||||
progress: row.get("progress"),
|
||||
download_speed: row.get("download_speed"),
|
||||
eta: row.get("eta"),
|
||||
created_at: created_at.to_rfc3339(),
|
||||
updated_at: updated_at.to_rfc3339(),
|
||||
}
|
||||
@@ -135,6 +141,55 @@ pub async fn list_torrent_downloads(
|
||||
Ok(Json(dtos))
|
||||
}
|
||||
|
||||
/// Delete a torrent download entry. If the torrent is still downloading, also remove it from qBittorrent.
|
||||
pub async fn delete_torrent_download(
|
||||
State(state): State<AppState>,
|
||||
Path(id): Path<Uuid>,
|
||||
) -> Result<Json<serde_json::Value>, ApiError> {
|
||||
let row = sqlx::query("SELECT qb_hash, status FROM torrent_downloads WHERE id = $1")
|
||||
.bind(id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
let Some(row) = row else {
|
||||
return Err(ApiError::not_found("torrent download not found"));
|
||||
};
|
||||
|
||||
let qb_hash: Option<String> = row.get("qb_hash");
|
||||
let status: String = row.get("status");
|
||||
|
||||
// If downloading, try to cancel in qBittorrent
|
||||
if status == "downloading" {
|
||||
if let Some(ref hash) = qb_hash {
|
||||
if let Ok((base_url, username, password)) = load_qbittorrent_config(&state.pool).await {
|
||||
let client = reqwest::Client::builder()
|
||||
.timeout(Duration::from_secs(10))
|
||||
.build()
|
||||
.ok();
|
||||
if let Some(client) = client {
|
||||
if let Ok(sid) = qbittorrent_login(&client, &base_url, &username, &password).await {
|
||||
let _ = client
|
||||
.post(format!("{base_url}/api/v2/torrents/delete"))
|
||||
.header("Cookie", format!("SID={sid}"))
|
||||
.form(&[("hashes", hash.as_str()), ("deleteFiles", "true")])
|
||||
.send()
|
||||
.await;
|
||||
info!("Deleted torrent {} from qBittorrent", hash);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sqlx::query("DELETE FROM torrent_downloads WHERE id = $1")
|
||||
.bind(id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
info!("Deleted torrent download {id}");
|
||||
Ok(Json(serde_json::json!({ "ok": true })))
|
||||
}
|
||||
|
||||
// ─── Background poller ────────────────────────────────────────────────────────
|
||||
|
||||
#[derive(Deserialize)]
|
||||
@@ -144,6 +199,12 @@ struct QbTorrentInfo {
|
||||
content_path: Option<String>,
|
||||
save_path: Option<String>,
|
||||
name: Option<String>,
|
||||
#[serde(default)]
|
||||
progress: f64,
|
||||
#[serde(default)]
|
||||
dlspeed: i64,
|
||||
#[serde(default)]
|
||||
eta: i64,
|
||||
}
|
||||
|
||||
/// Completed states in qBittorrent: torrent is fully downloaded and seeding.
|
||||
@@ -152,29 +213,35 @@ const QB_COMPLETED_STATES: &[&str] = &[
|
||||
];
|
||||
|
||||
pub async fn run_torrent_poller(pool: PgPool, interval_seconds: u64) {
|
||||
let wait = Duration::from_secs(interval_seconds.max(5));
|
||||
let idle_wait = Duration::from_secs(interval_seconds.max(5));
|
||||
let active_wait = Duration::from_secs(2);
|
||||
loop {
|
||||
if let Err(e) = poll_qbittorrent_downloads(&pool).await {
|
||||
warn!("[TORRENT_POLLER] {:#}", e);
|
||||
}
|
||||
tokio::time::sleep(wait).await;
|
||||
let has_active = match poll_qbittorrent_downloads(&pool).await {
|
||||
Ok(active) => active,
|
||||
Err(e) => {
|
||||
warn!("[TORRENT_POLLER] {:#}", e);
|
||||
false
|
||||
}
|
||||
};
|
||||
tokio::time::sleep(if has_active { active_wait } else { idle_wait }).await;
|
||||
}
|
||||
}
|
||||
|
||||
async fn poll_qbittorrent_downloads(pool: &PgPool) -> anyhow::Result<()> {
|
||||
/// Returns Ok(true) if there are active downloads, Ok(false) otherwise.
|
||||
async fn poll_qbittorrent_downloads(pool: &PgPool) -> anyhow::Result<bool> {
|
||||
if !is_torrent_import_enabled(pool).await {
|
||||
return Ok(());
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, qb_hash FROM torrent_downloads WHERE status = 'downloading' AND qb_hash IS NOT NULL",
|
||||
"SELECT id, qb_hash FROM torrent_downloads WHERE status = 'downloading'",
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await?;
|
||||
|
||||
if rows.is_empty() {
|
||||
trace!("[TORRENT_POLLER] No active downloads to poll");
|
||||
return Ok(());
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
let (base_url, username, password) = load_qbittorrent_config(pool)
|
||||
@@ -189,6 +256,16 @@ async fn poll_qbittorrent_downloads(pool: &PgPool) -> anyhow::Result<()> {
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("qBittorrent login: {}", e.message))?;
|
||||
|
||||
// Filter to rows that have a resolved hash
|
||||
let rows: Vec<_> = rows.into_iter().filter(|r| {
|
||||
let qb_hash: Option<String> = r.get("qb_hash");
|
||||
qb_hash.is_some()
|
||||
}).collect();
|
||||
|
||||
if rows.is_empty() {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let hashes: Vec<String> = rows
|
||||
.iter()
|
||||
.map(|r| { let h: String = r.get("qb_hash"); h })
|
||||
@@ -209,6 +286,25 @@ async fn poll_qbittorrent_downloads(pool: &PgPool) -> anyhow::Result<()> {
|
||||
let infos: Vec<QbTorrentInfo> = resp.json().await?;
|
||||
|
||||
for info in &infos {
|
||||
// Update progress for all active torrents
|
||||
let row = rows.iter().find(|r| {
|
||||
let h: String = r.get("qb_hash");
|
||||
h == info.hash
|
||||
});
|
||||
if let Some(row) = row {
|
||||
let tid: Uuid = row.get("id");
|
||||
let _ = sqlx::query(
|
||||
"UPDATE torrent_downloads SET progress = $1, download_speed = $2, eta = $3, updated_at = NOW() \
|
||||
WHERE id = $4 AND status = 'downloading'",
|
||||
)
|
||||
.bind(info.progress as f32)
|
||||
.bind(info.dlspeed)
|
||||
.bind(info.eta)
|
||||
.bind(tid)
|
||||
.execute(pool)
|
||||
.await;
|
||||
}
|
||||
|
||||
if !QB_COMPLETED_STATES.contains(&info.state.as_str()) {
|
||||
continue;
|
||||
}
|
||||
@@ -228,15 +324,15 @@ async fn poll_qbittorrent_downloads(pool: &PgPool) -> anyhow::Result<()> {
|
||||
continue;
|
||||
};
|
||||
|
||||
let row = rows.iter().find(|r| {
|
||||
let Some(row) = rows.iter().find(|r| {
|
||||
let h: String = r.get("qb_hash");
|
||||
h == info.hash
|
||||
});
|
||||
let Some(row) = row else { continue; };
|
||||
}) else { continue; };
|
||||
let torrent_id: Uuid = row.get("id");
|
||||
|
||||
let updated = sqlx::query(
|
||||
"UPDATE torrent_downloads SET status = 'completed', content_path = $1, updated_at = NOW() \
|
||||
"UPDATE torrent_downloads SET status = 'completed', content_path = $1, progress = 1, \
|
||||
download_speed = 0, eta = 0, updated_at = NOW() \
|
||||
WHERE id = $2 AND status = 'downloading'",
|
||||
)
|
||||
.bind(&content_path)
|
||||
@@ -255,7 +351,15 @@ async fn poll_qbittorrent_downloads(pool: &PgPool) -> anyhow::Result<()> {
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
// Still active if any rows remain in 'downloading' status
|
||||
let still_active = sqlx::query_scalar::<_, i64>(
|
||||
"SELECT COUNT(*) FROM torrent_downloads WHERE status = 'downloading'",
|
||||
)
|
||||
.fetch_one(pool)
|
||||
.await
|
||||
.unwrap_or(0);
|
||||
|
||||
Ok(still_active > 0)
|
||||
}
|
||||
|
||||
// ─── Import processing ────────────────────────────────────────────────────────
|
||||
|
||||
Reference in New Issue
Block a user