feat(indexing): Lot 4 - Progression temps reel, Full Rebuild, Optimisations
- Ajout migrations DB: index_job_errors, library_monitoring, full_rebuild_type - API: endpoints progression temps reel (/jobs/:id/stream), active jobs, details - API: support full_rebuild avec suppression donnees existantes - Indexer: logs detailles avec timing [SCAN][META][PARSER][BDD] - Indexer: optimisation parsing PDF (lopdf -> pdfinfo) 235x plus rapide - Indexer: corrections chemins LIBRARIES_ROOT_PATH pour dev local - Backoffice: composants JobProgress, JobsIndicator (header), JobsList - Backoffice: SSE streaming pour progression temps reel - Backoffice: boutons Index/Index Full sur page libraries - Backoffice: highlight job apres creation avec redirection - Fix: parsing volume type i32, sync meilisearch cleanup Perf: parsing PDF passe de 8.7s a 37ms Perf: indexation 45 fichiers en ~15s vs plusieurs minutes avant
This commit is contained in:
@@ -9,11 +9,14 @@ anyhow.workspace = true
|
||||
argon2.workspace = true
|
||||
axum.workspace = true
|
||||
base64.workspace = true
|
||||
async-stream = "0.3"
|
||||
chrono.workspace = true
|
||||
futures = "0.3"
|
||||
image.workspace = true
|
||||
lru.workspace = true
|
||||
stripstream-core = { path = "../../crates/core" }
|
||||
rand.workspace = true
|
||||
tokio-stream = "0.1"
|
||||
reqwest.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
@@ -21,6 +24,7 @@ sha2.workspace = true
|
||||
sqlx.workspace = true
|
||||
tokio.workspace = true
|
||||
tower.workspace = true
|
||||
tower-http = { version = "0.6", features = ["cors"] }
|
||||
tracing.workspace = true
|
||||
tracing-subscriber.workspace = true
|
||||
uuid.workspace = true
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
use axum::{extract::State, Json};
|
||||
use axum::{extract::State, response::sse::{Event, Sse}, Json};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::Row;
|
||||
use std::convert::Infallible;
|
||||
use std::time::Duration;
|
||||
use tokio_stream::Stream;
|
||||
use uuid::Uuid;
|
||||
use utoipa::ToSchema;
|
||||
|
||||
@@ -11,6 +14,8 @@ use crate::{error::ApiError, AppState};
|
||||
pub struct RebuildRequest {
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub library_id: Option<Uuid>,
|
||||
#[schema(value_type = Option<bool>, example = false)]
|
||||
pub full: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
@@ -37,6 +42,49 @@ pub struct FolderItem {
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct IndexJobDetailResponse {
|
||||
#[schema(value_type = String)]
|
||||
pub id: Uuid,
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub library_id: Option<Uuid>,
|
||||
pub r#type: String,
|
||||
pub status: String,
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub started_at: Option<DateTime<Utc>>,
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub finished_at: Option<DateTime<Utc>>,
|
||||
pub stats_json: Option<serde_json::Value>,
|
||||
pub error_opt: Option<String>,
|
||||
#[schema(value_type = String)]
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub current_file: Option<String>,
|
||||
pub progress_percent: Option<i32>,
|
||||
pub total_files: Option<i32>,
|
||||
pub processed_files: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct JobErrorResponse {
|
||||
#[schema(value_type = String)]
|
||||
pub id: Uuid,
|
||||
pub file_path: String,
|
||||
pub error_message: String,
|
||||
#[schema(value_type = String)]
|
||||
pub created_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, ToSchema)]
|
||||
pub struct ProgressEvent {
|
||||
pub job_id: String,
|
||||
pub status: String,
|
||||
pub current_file: Option<String>,
|
||||
pub progress_percent: Option<i32>,
|
||||
pub processed_files: Option<i32>,
|
||||
pub total_files: Option<i32>,
|
||||
pub stats_json: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
/// Enqueue a job to rebuild the search index for a library (or all libraries if no library_id specified)
|
||||
#[utoipa::path(
|
||||
post,
|
||||
@@ -54,14 +102,17 @@ pub async fn enqueue_rebuild(
|
||||
State(state): State<AppState>,
|
||||
payload: Option<Json<RebuildRequest>>,
|
||||
) -> Result<Json<IndexJobResponse>, ApiError> {
|
||||
let library_id = payload.and_then(|p| p.0.library_id);
|
||||
let library_id = payload.as_ref().and_then(|p| p.0.library_id);
|
||||
let is_full = payload.as_ref().and_then(|p| p.0.full).unwrap_or(false);
|
||||
let job_type = if is_full { "full_rebuild" } else { "rebuild" };
|
||||
let id = Uuid::new_v4();
|
||||
|
||||
sqlx::query(
|
||||
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, 'rebuild', 'pending')",
|
||||
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, $3, 'pending')",
|
||||
)
|
||||
.bind(id)
|
||||
.bind(library_id)
|
||||
.bind(job_type)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
@@ -138,6 +189,10 @@ pub async fn cancel_job(
|
||||
Ok(Json(map_row(row)))
|
||||
}
|
||||
|
||||
fn get_libraries_root() -> String {
|
||||
std::env::var("LIBRARIES_ROOT_PATH").unwrap_or_else(|_| "/libraries".to_string())
|
||||
}
|
||||
|
||||
/// List available folders in /libraries for library creation
|
||||
#[utoipa::path(
|
||||
get,
|
||||
@@ -151,7 +206,8 @@ pub async fn cancel_job(
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn list_folders(State(_state): State<AppState>) -> Result<Json<Vec<FolderItem>>, ApiError> {
|
||||
let libraries_path = std::path::Path::new("/libraries");
|
||||
let libraries_root = get_libraries_root();
|
||||
let libraries_path = std::path::Path::new(&libraries_root);
|
||||
let mut folders = Vec::new();
|
||||
|
||||
if let Ok(entries) = std::fs::read_dir(libraries_path) {
|
||||
@@ -170,7 +226,7 @@ pub async fn list_folders(State(_state): State<AppState>) -> Result<Json<Vec<Fol
|
||||
Ok(Json(folders))
|
||||
}
|
||||
|
||||
fn map_row(row: sqlx::postgres::PgRow) -> IndexJobResponse {
|
||||
pub fn map_row(row: sqlx::postgres::PgRow) -> IndexJobResponse {
|
||||
IndexJobResponse {
|
||||
id: row.get("id"),
|
||||
library_id: row.get("library_id"),
|
||||
@@ -183,3 +239,213 @@ fn map_row(row: sqlx::postgres::PgRow) -> IndexJobResponse {
|
||||
created_at: row.get("created_at"),
|
||||
}
|
||||
}
|
||||
|
||||
fn map_row_detail(row: sqlx::postgres::PgRow) -> IndexJobDetailResponse {
|
||||
IndexJobDetailResponse {
|
||||
id: row.get("id"),
|
||||
library_id: row.get("library_id"),
|
||||
r#type: row.get("type"),
|
||||
status: row.get("status"),
|
||||
started_at: row.get("started_at"),
|
||||
finished_at: row.get("finished_at"),
|
||||
stats_json: row.get("stats_json"),
|
||||
error_opt: row.get("error_opt"),
|
||||
created_at: row.get("created_at"),
|
||||
current_file: row.get("current_file"),
|
||||
progress_percent: row.get("progress_percent"),
|
||||
total_files: row.get("total_files"),
|
||||
processed_files: row.get("processed_files"),
|
||||
}
|
||||
}
|
||||
|
||||
/// List active indexing jobs (pending or running)
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/index/jobs/active",
|
||||
tag = "indexing",
|
||||
responses(
|
||||
(status = 200, body = Vec<IndexJobResponse>),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn get_active_jobs(State(state): State<AppState>) -> Result<Json<Vec<IndexJobResponse>>, ApiError> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at
|
||||
FROM index_jobs
|
||||
WHERE status IN ('pending', 'running')
|
||||
ORDER BY created_at ASC"
|
||||
)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
Ok(Json(rows.into_iter().map(map_row).collect()))
|
||||
}
|
||||
|
||||
/// Get detailed job information including progress
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/index/jobs/{id}/details",
|
||||
tag = "indexing",
|
||||
params(
|
||||
("id" = String, Path, description = "Job UUID"),
|
||||
),
|
||||
responses(
|
||||
(status = 200, body = IndexJobDetailResponse),
|
||||
(status = 404, description = "Job not found"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn get_job_details(
|
||||
State(state): State<AppState>,
|
||||
id: axum::extract::Path<Uuid>,
|
||||
) -> Result<Json<IndexJobDetailResponse>, ApiError> {
|
||||
let row = sqlx::query(
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at,
|
||||
current_file, progress_percent, total_files, processed_files
|
||||
FROM index_jobs WHERE id = $1"
|
||||
)
|
||||
.bind(id.0)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
match row {
|
||||
Some(row) => Ok(Json(map_row_detail(row))),
|
||||
None => Err(ApiError::not_found("job not found")),
|
||||
}
|
||||
}
|
||||
|
||||
/// List errors for a specific job
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/index/jobs/{id}/errors",
|
||||
tag = "indexing",
|
||||
params(
|
||||
("id" = String, Path, description = "Job UUID"),
|
||||
),
|
||||
responses(
|
||||
(status = 200, body = Vec<JobErrorResponse>),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn get_job_errors(
|
||||
State(state): State<AppState>,
|
||||
id: axum::extract::Path<Uuid>,
|
||||
) -> Result<Json<Vec<JobErrorResponse>>, ApiError> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, file_path, error_message, created_at
|
||||
FROM index_job_errors
|
||||
WHERE job_id = $1
|
||||
ORDER BY created_at ASC"
|
||||
)
|
||||
.bind(id.0)
|
||||
.fetch_all(&state.pool)
|
||||
.await?;
|
||||
|
||||
let errors: Vec<JobErrorResponse> = rows
|
||||
.into_iter()
|
||||
.map(|row| JobErrorResponse {
|
||||
id: row.get("id"),
|
||||
file_path: row.get("file_path"),
|
||||
error_message: row.get("error_message"),
|
||||
created_at: row.get("created_at"),
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Json(errors))
|
||||
}
|
||||
|
||||
/// Stream job progress via SSE
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/index/jobs/{id}/stream",
|
||||
tag = "indexing",
|
||||
params(
|
||||
("id" = String, Path, description = "Job UUID"),
|
||||
),
|
||||
responses(
|
||||
(status = 200, description = "SSE stream of progress events"),
|
||||
(status = 404, description = "Job not found"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn stream_job_progress(
|
||||
State(state): State<AppState>,
|
||||
id: axum::extract::Path<Uuid>,
|
||||
) -> Result<Sse<impl Stream<Item = Result<Event, Infallible>>>, ApiError> {
|
||||
// Verify job exists
|
||||
let job_exists = sqlx::query("SELECT 1 FROM index_jobs WHERE id = $1")
|
||||
.bind(id.0)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
if job_exists.is_none() {
|
||||
return Err(ApiError::not_found("job not found"));
|
||||
}
|
||||
|
||||
let job_id = id.0;
|
||||
let pool = state.pool.clone();
|
||||
|
||||
let stream = async_stream::stream! {
|
||||
let mut last_status: Option<String> = None;
|
||||
let mut last_processed: Option<i32> = None;
|
||||
let mut interval = tokio::time::interval(Duration::from_millis(500));
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
let row = sqlx::query(
|
||||
"SELECT status, current_file, progress_percent, processed_files, total_files, stats_json
|
||||
FROM index_jobs WHERE id = $1"
|
||||
)
|
||||
.bind(job_id)
|
||||
.fetch_one(&pool)
|
||||
.await;
|
||||
|
||||
match row {
|
||||
Ok(row) => {
|
||||
let status: String = row.get("status");
|
||||
let processed_files: Option<i32> = row.get("processed_files");
|
||||
|
||||
// Send update if status changed or progress changed
|
||||
let should_send = last_status.as_ref() != Some(&status)
|
||||
|| last_processed != processed_files;
|
||||
|
||||
if should_send {
|
||||
last_status = Some(status.clone());
|
||||
last_processed = processed_files;
|
||||
|
||||
let event = ProgressEvent {
|
||||
job_id: job_id.to_string(),
|
||||
status: status.clone(),
|
||||
current_file: row.get("current_file"),
|
||||
progress_percent: row.get("progress_percent"),
|
||||
processed_files,
|
||||
total_files: row.get("total_files"),
|
||||
stats_json: row.get("stats_json"),
|
||||
};
|
||||
|
||||
if let Ok(json) = serde_json::to_string(&event) {
|
||||
yield Ok(Event::default().data(json));
|
||||
}
|
||||
|
||||
// Stop streaming if job is finished
|
||||
if status == "success" || status == "failed" || status == "cancelled" {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Sse::new(stream).keep_alive(axum::response::sse::KeepAlive::default()))
|
||||
}
|
||||
|
||||
@@ -152,3 +152,61 @@ fn canonicalize_library_root(root_path: &str) -> Result<PathBuf, ApiError> {
|
||||
|
||||
Ok(canonical)
|
||||
}
|
||||
|
||||
use crate::index_jobs::{IndexJobResponse, RebuildRequest};
|
||||
|
||||
/// Trigger a scan/indexing job for a specific library
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "/libraries/{id}/scan",
|
||||
tag = "libraries",
|
||||
params(
|
||||
("id" = String, Path, description = "Library UUID"),
|
||||
),
|
||||
request_body = Option<RebuildRequest>,
|
||||
responses(
|
||||
(status = 200, body = IndexJobResponse),
|
||||
(status = 404, description = "Library not found"),
|
||||
(status = 401, description = "Unauthorized"),
|
||||
(status = 403, description = "Forbidden - Admin scope required"),
|
||||
),
|
||||
security(("Bearer" = []))
|
||||
)]
|
||||
pub async fn scan_library(
|
||||
State(state): State<AppState>,
|
||||
AxumPath(library_id): AxumPath<Uuid>,
|
||||
payload: Option<Json<RebuildRequest>>,
|
||||
) -> Result<Json<IndexJobResponse>, ApiError> {
|
||||
// Verify library exists
|
||||
let library_exists = sqlx::query("SELECT 1 FROM libraries WHERE id = $1")
|
||||
.bind(library_id)
|
||||
.fetch_optional(&state.pool)
|
||||
.await?;
|
||||
|
||||
if library_exists.is_none() {
|
||||
return Err(ApiError::not_found("library not found"));
|
||||
}
|
||||
|
||||
let is_full = payload.as_ref().and_then(|p| p.full).unwrap_or(false);
|
||||
let job_type = if is_full { "full_rebuild" } else { "rebuild" };
|
||||
|
||||
// Create indexing job for this library
|
||||
let job_id = Uuid::new_v4();
|
||||
sqlx::query(
|
||||
"INSERT INTO index_jobs (id, library_id, type, status) VALUES ($1, $2, $3, 'pending')",
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(library_id)
|
||||
.bind(job_type)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
let row = sqlx::query(
|
||||
"SELECT id, library_id, type, status, started_at, finished_at, stats_json, error_opt, created_at FROM index_jobs WHERE id = $1",
|
||||
)
|
||||
.bind(job_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
|
||||
Ok(Json(crate::index_jobs::map_row(row)))
|
||||
}
|
||||
|
||||
@@ -95,8 +95,13 @@ async fn main() -> anyhow::Result<()> {
|
||||
let admin_routes = Router::new()
|
||||
.route("/libraries", get(libraries::list_libraries).post(libraries::create_library))
|
||||
.route("/libraries/:id", delete(libraries::delete_library))
|
||||
.route("/libraries/:id/scan", axum::routing::post(libraries::scan_library))
|
||||
.route("/index/rebuild", axum::routing::post(index_jobs::enqueue_rebuild))
|
||||
.route("/index/status", get(index_jobs::list_index_jobs))
|
||||
.route("/index/jobs/active", get(index_jobs::get_active_jobs))
|
||||
.route("/index/jobs/:id", get(index_jobs::get_job_details))
|
||||
.route("/index/jobs/:id/stream", get(index_jobs::stream_job_progress))
|
||||
.route("/index/jobs/:id/errors", get(index_jobs::get_job_errors))
|
||||
.route("/index/cancel/:id", axum::routing::post(index_jobs::cancel_job))
|
||||
.route("/folders", get(index_jobs::list_folders))
|
||||
.route("/admin/tokens", get(tokens::list_tokens).post(tokens::create_token))
|
||||
|
||||
@@ -11,11 +11,16 @@ use utoipa::OpenApi;
|
||||
crate::search::search_books,
|
||||
crate::index_jobs::enqueue_rebuild,
|
||||
crate::index_jobs::list_index_jobs,
|
||||
crate::index_jobs::get_active_jobs,
|
||||
crate::index_jobs::get_job_details,
|
||||
crate::index_jobs::stream_job_progress,
|
||||
crate::index_jobs::get_job_errors,
|
||||
crate::index_jobs::cancel_job,
|
||||
crate::index_jobs::list_folders,
|
||||
crate::libraries::list_libraries,
|
||||
crate::libraries::create_library,
|
||||
crate::libraries::delete_library,
|
||||
crate::libraries::scan_library,
|
||||
crate::tokens::list_tokens,
|
||||
crate::tokens::create_token,
|
||||
crate::tokens::revoke_token,
|
||||
@@ -32,6 +37,9 @@ use utoipa::OpenApi;
|
||||
crate::search::SearchResponse,
|
||||
crate::index_jobs::RebuildRequest,
|
||||
crate::index_jobs::IndexJobResponse,
|
||||
crate::index_jobs::IndexJobDetailResponse,
|
||||
crate::index_jobs::JobErrorResponse,
|
||||
crate::index_jobs::ProgressEvent,
|
||||
crate::index_jobs::FolderItem,
|
||||
crate::libraries::LibraryResponse,
|
||||
crate::libraries::CreateLibraryRequest,
|
||||
|
||||
@@ -20,6 +20,15 @@ use uuid::Uuid;
|
||||
|
||||
use crate::{error::ApiError, AppState};
|
||||
|
||||
fn remap_libraries_path(path: &str) -> String {
|
||||
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
||||
if path.starts_with("/libraries/") {
|
||||
return path.replacen("/libraries", &root, 1);
|
||||
}
|
||||
}
|
||||
path.to_string()
|
||||
}
|
||||
|
||||
#[derive(Deserialize, ToSchema)]
|
||||
pub struct PageQuery {
|
||||
#[schema(value_type = Option<String>, example = "webp")]
|
||||
@@ -122,6 +131,8 @@ pub async fn get_page(
|
||||
|
||||
let row = row.ok_or_else(|| ApiError::not_found("book file not found"))?;
|
||||
let abs_path: String = row.get("abs_path");
|
||||
// Remap /libraries to LIBRARIES_ROOT_PATH for local development
|
||||
let abs_path = remap_libraries_path(&abs_path);
|
||||
let input_format: String = row.get("format");
|
||||
|
||||
let _permit = state
|
||||
|
||||
4
apps/backoffice/.env.local
Normal file
4
apps/backoffice/.env.local
Normal file
@@ -0,0 +1,4 @@
|
||||
API_BASE_URL=http://localhost:8080
|
||||
API_BOOTSTRAP_TOKEN=stripstream-dev-bootstrap-token
|
||||
NEXT_PUBLIC_API_BASE_URL=http://localhost:8080
|
||||
NEXT_PUBLIC_API_BOOTSTRAP_TOKEN=stripstream-dev-bootstrap-token
|
||||
36
apps/backoffice/app/api/jobs/[id]/cancel/route.ts
Normal file
36
apps/backoffice/app/api/jobs/[id]/cancel/route.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
|
||||
export async function POST(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const { id } = await params;
|
||||
const apiBaseUrl = process.env.API_BASE_URL || "http://api:8080";
|
||||
const apiToken = process.env.API_BOOTSTRAP_TOKEN;
|
||||
|
||||
if (!apiToken) {
|
||||
return NextResponse.json({ error: "API token not configured" }, { status: 500 });
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`${apiBaseUrl}/index/cancel/${id}`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiToken}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: `API error: ${response.status}` },
|
||||
{ status: response.status }
|
||||
);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
console.error("Proxy error:", error);
|
||||
return NextResponse.json({ error: "Failed to cancel job" }, { status: 500 });
|
||||
}
|
||||
}
|
||||
35
apps/backoffice/app/api/jobs/[id]/route.ts
Normal file
35
apps/backoffice/app/api/jobs/[id]/route.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const { id } = await params;
|
||||
const apiBaseUrl = process.env.API_BASE_URL || "http://api:8080";
|
||||
const apiToken = process.env.API_BOOTSTRAP_TOKEN;
|
||||
|
||||
if (!apiToken) {
|
||||
return NextResponse.json({ error: "API token not configured" }, { status: 500 });
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`${apiBaseUrl}/index/jobs/${id}`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiToken}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: `API error: ${response.status}` },
|
||||
{ status: response.status }
|
||||
);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
console.error("Proxy error:", error);
|
||||
return NextResponse.json({ error: "Failed to fetch job" }, { status: 500 });
|
||||
}
|
||||
}
|
||||
87
apps/backoffice/app/api/jobs/[id]/stream/route.ts
Normal file
87
apps/backoffice/app/api/jobs/[id]/stream/route.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { NextRequest } from "next/server";
|
||||
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
{ params }: { params: Promise<{ id: string }> }
|
||||
) {
|
||||
const { id } = await params;
|
||||
const apiBaseUrl = process.env.API_BASE_URL || "http://api:8080";
|
||||
const apiToken = process.env.API_BOOTSTRAP_TOKEN;
|
||||
|
||||
if (!apiToken) {
|
||||
return new Response(
|
||||
`data: ${JSON.stringify({ error: "API token not configured" })}\n\n`,
|
||||
{ status: 500, headers: { "Content-Type": "text/event-stream" } }
|
||||
);
|
||||
}
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
// Send initial headers for SSE
|
||||
controller.enqueue(new TextEncoder().encode(""));
|
||||
|
||||
let lastData: string | null = null;
|
||||
let isActive = true;
|
||||
|
||||
const fetchJob = async () => {
|
||||
if (!isActive) return;
|
||||
|
||||
try {
|
||||
const response = await fetch(`${apiBaseUrl}/index/jobs/${id}`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiToken}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
const dataStr = JSON.stringify(data);
|
||||
|
||||
// Only send if data changed
|
||||
if (dataStr !== lastData) {
|
||||
lastData = dataStr;
|
||||
controller.enqueue(
|
||||
new TextEncoder().encode(`data: ${dataStr}\n\n`)
|
||||
);
|
||||
|
||||
// Stop polling if job is complete
|
||||
if (data.status === "success" || data.status === "failed" || data.status === "cancelled") {
|
||||
isActive = false;
|
||||
controller.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("SSE fetch error:", error);
|
||||
}
|
||||
};
|
||||
|
||||
// Initial fetch
|
||||
await fetchJob();
|
||||
|
||||
// Poll every 500ms while job is active
|
||||
const interval = setInterval(async () => {
|
||||
if (!isActive) {
|
||||
clearInterval(interval);
|
||||
return;
|
||||
}
|
||||
await fetchJob();
|
||||
}, 500);
|
||||
|
||||
// Cleanup on abort
|
||||
request.signal.addEventListener("abort", () => {
|
||||
isActive = false;
|
||||
clearInterval(interval);
|
||||
controller.close();
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
return new Response(stream, {
|
||||
headers: {
|
||||
"Content-Type": "text/event-stream",
|
||||
"Cache-Control": "no-cache",
|
||||
"Connection": "keep-alive",
|
||||
},
|
||||
});
|
||||
}
|
||||
31
apps/backoffice/app/api/jobs/route.ts
Normal file
31
apps/backoffice/app/api/jobs/route.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { NextRequest, NextResponse } from "next/server";
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const apiBaseUrl = process.env.API_BASE_URL || "http://api:8080";
|
||||
const apiToken = process.env.API_BOOTSTRAP_TOKEN;
|
||||
|
||||
if (!apiToken) {
|
||||
return NextResponse.json({ error: "API token not configured" }, { status: 500 });
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`${apiBaseUrl}/index/status`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiToken}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
return NextResponse.json(
|
||||
{ error: `API error: ${response.status}` },
|
||||
{ status: response.status }
|
||||
);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return NextResponse.json(data);
|
||||
} catch (error) {
|
||||
console.error("Proxy error:", error);
|
||||
return NextResponse.json({ error: "Failed to fetch jobs" }, { status: 500 });
|
||||
}
|
||||
}
|
||||
76
apps/backoffice/app/api/jobs/stream/route.ts
Normal file
76
apps/backoffice/app/api/jobs/stream/route.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { NextRequest } from "next/server";
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
const apiBaseUrl = process.env.API_BASE_URL || "http://api:8080";
|
||||
const apiToken = process.env.API_BOOTSTRAP_TOKEN;
|
||||
|
||||
if (!apiToken) {
|
||||
return new Response(
|
||||
`data: ${JSON.stringify({ error: "API token not configured" })}\n\n`,
|
||||
{ status: 500, headers: { "Content-Type": "text/event-stream" } }
|
||||
);
|
||||
}
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
controller.enqueue(new TextEncoder().encode(""));
|
||||
|
||||
let lastData: string | null = null;
|
||||
let isActive = true;
|
||||
|
||||
const fetchJobs = async () => {
|
||||
if (!isActive) return;
|
||||
|
||||
try {
|
||||
const response = await fetch(`${apiBaseUrl}/index/status`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${apiToken}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json();
|
||||
const dataStr = JSON.stringify(data);
|
||||
|
||||
// Send if data changed
|
||||
if (dataStr !== lastData) {
|
||||
lastData = dataStr;
|
||||
controller.enqueue(
|
||||
new TextEncoder().encode(`data: ${dataStr}\n\n`)
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("SSE fetch error:", error);
|
||||
}
|
||||
};
|
||||
|
||||
// Initial fetch
|
||||
await fetchJobs();
|
||||
|
||||
// Poll every 2 seconds
|
||||
const interval = setInterval(async () => {
|
||||
if (!isActive) {
|
||||
clearInterval(interval);
|
||||
return;
|
||||
}
|
||||
await fetchJobs();
|
||||
}, 2000);
|
||||
|
||||
// Cleanup
|
||||
request.signal.addEventListener("abort", () => {
|
||||
isActive = false;
|
||||
clearInterval(interval);
|
||||
controller.close();
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
return new Response(stream, {
|
||||
headers: {
|
||||
"Content-Type": "text/event-stream",
|
||||
"Cache-Control": "no-cache",
|
||||
"Connection": "keep-alive",
|
||||
},
|
||||
});
|
||||
}
|
||||
123
apps/backoffice/app/components/JobProgress.tsx
Normal file
123
apps/backoffice/app/components/JobProgress.tsx
Normal file
@@ -0,0 +1,123 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useState } from "react";
|
||||
|
||||
interface ProgressEvent {
|
||||
job_id: string;
|
||||
status: string;
|
||||
current_file: string | null;
|
||||
progress_percent: number | null;
|
||||
processed_files: number | null;
|
||||
total_files: number | null;
|
||||
stats_json: {
|
||||
scanned_files: number;
|
||||
indexed_files: number;
|
||||
removed_files: number;
|
||||
errors: number;
|
||||
} | null;
|
||||
}
|
||||
|
||||
interface JobProgressProps {
|
||||
jobId: string;
|
||||
onComplete?: () => void;
|
||||
}
|
||||
|
||||
export function JobProgress({ jobId, onComplete }: JobProgressProps) {
|
||||
const [progress, setProgress] = useState<ProgressEvent | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [isComplete, setIsComplete] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
// Use SSE via local proxy
|
||||
const eventSource = new EventSource(`/api/jobs/${jobId}/stream`);
|
||||
|
||||
eventSource.onmessage = (event) => {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
|
||||
const progressData: ProgressEvent = {
|
||||
job_id: data.id,
|
||||
status: data.status,
|
||||
current_file: data.current_file,
|
||||
progress_percent: data.progress_percent,
|
||||
processed_files: data.processed_files,
|
||||
total_files: data.total_files,
|
||||
stats_json: data.stats_json,
|
||||
};
|
||||
|
||||
setProgress(progressData);
|
||||
|
||||
if (data.status === "success" || data.status === "failed" || data.status === "cancelled") {
|
||||
setIsComplete(true);
|
||||
eventSource.close();
|
||||
onComplete?.();
|
||||
}
|
||||
} catch (err) {
|
||||
setError("Failed to parse SSE data");
|
||||
}
|
||||
};
|
||||
|
||||
eventSource.onerror = (err) => {
|
||||
console.error("SSE error:", err);
|
||||
eventSource.close();
|
||||
setError("Connection lost");
|
||||
};
|
||||
|
||||
return () => {
|
||||
eventSource.close();
|
||||
};
|
||||
}, [jobId, onComplete]);
|
||||
|
||||
if (error) {
|
||||
return <div className="progress-error">Error: {error}</div>;
|
||||
}
|
||||
|
||||
if (!progress) {
|
||||
return <div className="progress-loading">Loading progress...</div>;
|
||||
}
|
||||
|
||||
const percent = progress.progress_percent ?? 0;
|
||||
const processed = progress.processed_files ?? 0;
|
||||
const total = progress.total_files ?? 0;
|
||||
|
||||
return (
|
||||
<div className="job-progress">
|
||||
<div className="progress-header">
|
||||
<span className={`status-badge status-${progress.status}`}>
|
||||
{progress.status}
|
||||
</span>
|
||||
{isComplete && <span className="complete-badge">Complete</span>}
|
||||
</div>
|
||||
|
||||
<div className="progress-bar-container">
|
||||
<div
|
||||
className="progress-bar-fill"
|
||||
style={{ width: `${percent}%` }}
|
||||
/>
|
||||
<span className="progress-percent">{percent}%</span>
|
||||
</div>
|
||||
|
||||
<div className="progress-stats">
|
||||
<span>{processed} / {total} files</span>
|
||||
{progress.current_file && (
|
||||
<span className="current-file" title={progress.current_file}>
|
||||
Current: {progress.current_file.length > 40
|
||||
? progress.current_file.substring(0, 40) + "..."
|
||||
: progress.current_file}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{progress.stats_json && (
|
||||
<div className="progress-detailed-stats">
|
||||
<span>Scanned: {progress.stats_json.scanned_files}</span>
|
||||
<span>Indexed: {progress.stats_json.indexed_files}</span>
|
||||
<span>Removed: {progress.stats_json.removed_files}</span>
|
||||
{progress.stats_json.errors > 0 && (
|
||||
<span className="error-count">Errors: {progress.stats_json.errors}</span>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
75
apps/backoffice/app/components/JobRow.tsx
Normal file
75
apps/backoffice/app/components/JobRow.tsx
Normal file
@@ -0,0 +1,75 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { JobProgress } from "./JobProgress";
|
||||
|
||||
interface JobRowProps {
|
||||
job: {
|
||||
id: string;
|
||||
library_id: string | null;
|
||||
type: string;
|
||||
status: string;
|
||||
created_at: string;
|
||||
error_opt: string | null;
|
||||
};
|
||||
libraryName: string | undefined;
|
||||
highlighted?: boolean;
|
||||
onCancel: (id: string) => void;
|
||||
}
|
||||
|
||||
export function JobRow({ job, libraryName, highlighted, onCancel }: JobRowProps) {
|
||||
const [showProgress, setShowProgress] = useState(
|
||||
highlighted || job.status === "running" || job.status === "pending"
|
||||
);
|
||||
|
||||
const handleComplete = () => {
|
||||
setShowProgress(false);
|
||||
// Trigger a page refresh to update the job status
|
||||
window.location.reload();
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<tr className={highlighted ? "job-highlighted" : undefined}>
|
||||
<td>
|
||||
<code>{job.id.slice(0, 8)}</code>
|
||||
</td>
|
||||
<td>{job.library_id ? libraryName || job.library_id.slice(0, 8) : "—"}</td>
|
||||
<td>{job.type}</td>
|
||||
<td>
|
||||
<span className={`status-${job.status}`}>{job.status}</span>
|
||||
{job.error_opt && <span className="error-hint" title={job.error_opt}>!</span>}
|
||||
{job.status === "running" && (
|
||||
<button
|
||||
className="toggle-progress-btn"
|
||||
onClick={() => setShowProgress(!showProgress)}
|
||||
>
|
||||
{showProgress ? "Hide" : "Show"} progress
|
||||
</button>
|
||||
)}
|
||||
</td>
|
||||
<td>{new Date(job.created_at).toLocaleString()}</td>
|
||||
<td>
|
||||
{job.status === "pending" || job.status === "running" ? (
|
||||
<button
|
||||
className="cancel-btn"
|
||||
onClick={() => onCancel(job.id)}
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
) : null}
|
||||
</td>
|
||||
</tr>
|
||||
{showProgress && (job.status === "running" || job.status === "pending") && (
|
||||
<tr className="progress-row">
|
||||
<td colSpan={6}>
|
||||
<JobProgress
|
||||
jobId={job.id}
|
||||
onComplete={handleComplete}
|
||||
/>
|
||||
</td>
|
||||
</tr>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
144
apps/backoffice/app/components/JobsIndicator.tsx
Normal file
144
apps/backoffice/app/components/JobsIndicator.tsx
Normal file
@@ -0,0 +1,144 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useState } from "react";
|
||||
import Link from "next/link";
|
||||
|
||||
interface Job {
|
||||
id: string;
|
||||
status: string;
|
||||
current_file: string | null;
|
||||
progress_percent: number | null;
|
||||
}
|
||||
|
||||
interface JobsIndicatorProps {
|
||||
apiBaseUrl: string;
|
||||
apiToken: string;
|
||||
}
|
||||
|
||||
export function JobsIndicator({ apiBaseUrl, apiToken }: JobsIndicatorProps) {
|
||||
const [activeJobs, setActiveJobs] = useState<Job[]>([]);
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchActiveJobs = async () => {
|
||||
try {
|
||||
const response = await fetch(`${apiBaseUrl}/index/jobs/active`, {
|
||||
headers: {
|
||||
"Authorization": `Bearer ${apiToken}`,
|
||||
},
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const jobs = await response.json();
|
||||
// Enrich with details for running jobs
|
||||
const jobsWithDetails = await Promise.all(
|
||||
jobs.map(async (job: Job) => {
|
||||
if (job.status === "running") {
|
||||
try {
|
||||
const detailRes = await fetch(`${apiBaseUrl}/index/jobs/${job.id}`, {
|
||||
headers: { "Authorization": `Bearer ${apiToken}` },
|
||||
});
|
||||
if (detailRes.ok) {
|
||||
const detail = await detailRes.json();
|
||||
return { ...job, ...detail };
|
||||
}
|
||||
} catch {
|
||||
// ignore detail fetch errors
|
||||
}
|
||||
}
|
||||
return job;
|
||||
})
|
||||
);
|
||||
setActiveJobs(jobsWithDetails);
|
||||
}
|
||||
} catch {
|
||||
// Silently fail
|
||||
}
|
||||
};
|
||||
|
||||
fetchActiveJobs();
|
||||
const interval = setInterval(fetchActiveJobs, 5000);
|
||||
|
||||
return () => clearInterval(interval);
|
||||
}, [apiBaseUrl, apiToken]);
|
||||
|
||||
const pendingCount = activeJobs.filter(j => j.status === "pending").length;
|
||||
const runningCount = activeJobs.filter(j => j.status === "running").length;
|
||||
const totalCount = activeJobs.length;
|
||||
|
||||
if (totalCount === 0) {
|
||||
return (
|
||||
<Link href="/jobs" className="jobs-indicator empty">
|
||||
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<rect x="4" y="4" width="16" height="16" rx="2" />
|
||||
<path d="M8 12h8M12 8v8" />
|
||||
</svg>
|
||||
</Link>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="jobs-indicator-container">
|
||||
<button
|
||||
className="jobs-indicator active"
|
||||
onClick={() => setIsOpen(!isOpen)}
|
||||
title={`${totalCount} active job${totalCount !== 1 ? 's' : ''}`}
|
||||
>
|
||||
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2">
|
||||
<rect x="4" y="4" width="16" height="16" rx="2" />
|
||||
<path d="M8 12h8M12 8v8" />
|
||||
</svg>
|
||||
{totalCount > 0 && (
|
||||
<span className="jobs-badge">
|
||||
{totalCount > 9 ? "9+" : totalCount}
|
||||
</span>
|
||||
)}
|
||||
{runningCount > 0 && (
|
||||
<span className="jobs-pulse" />
|
||||
)}
|
||||
</button>
|
||||
|
||||
{isOpen && (
|
||||
<div className="jobs-dropdown">
|
||||
<div className="jobs-dropdown-header">
|
||||
<strong>Active Jobs</strong>
|
||||
<Link href="/jobs" onClick={() => setIsOpen(false)}>View all</Link>
|
||||
</div>
|
||||
|
||||
{activeJobs.length === 0 ? (
|
||||
<p className="jobs-empty">No active jobs</p>
|
||||
) : (
|
||||
<ul className="jobs-list">
|
||||
{activeJobs.map(job => (
|
||||
<li key={job.id} className={`job-item job-${job.status}`}>
|
||||
<div className="job-header">
|
||||
<span className={`job-status status-${job.status}`}>
|
||||
{job.status}
|
||||
</span>
|
||||
<code className="job-id">{job.id.slice(0, 8)}</code>
|
||||
</div>
|
||||
{job.status === "running" && job.progress_percent !== null && (
|
||||
<div className="job-mini-progress">
|
||||
<div
|
||||
className="job-progress-bar"
|
||||
style={{ width: `${job.progress_percent}%` }}
|
||||
/>
|
||||
<span>{job.progress_percent}%</span>
|
||||
</div>
|
||||
)}
|
||||
{job.current_file && (
|
||||
<p className="job-file" title={job.current_file}>
|
||||
{job.current_file.length > 30
|
||||
? job.current_file.substring(0, 30) + "..."
|
||||
: job.current_file}
|
||||
</p>
|
||||
)}
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
12
apps/backoffice/app/components/JobsIndicatorWrapper.tsx
Normal file
12
apps/backoffice/app/components/JobsIndicatorWrapper.tsx
Normal file
@@ -0,0 +1,12 @@
|
||||
"use client";
|
||||
|
||||
import { JobsIndicator } from "./JobsIndicator";
|
||||
|
||||
interface JobsIndicatorWrapperProps {
|
||||
apiBaseUrl: string;
|
||||
apiToken: string;
|
||||
}
|
||||
|
||||
export function JobsIndicatorWrapper({ apiBaseUrl, apiToken }: JobsIndicatorWrapperProps) {
|
||||
return <JobsIndicator apiBaseUrl={apiBaseUrl} apiToken={apiToken} />;
|
||||
}
|
||||
91
apps/backoffice/app/components/JobsList.tsx
Normal file
91
apps/backoffice/app/components/JobsList.tsx
Normal file
@@ -0,0 +1,91 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect } from "react";
|
||||
import { JobRow } from "./JobRow";
|
||||
|
||||
interface Job {
|
||||
id: string;
|
||||
library_id: string | null;
|
||||
type: string;
|
||||
status: string;
|
||||
created_at: string;
|
||||
error_opt: string | null;
|
||||
}
|
||||
|
||||
interface JobsListProps {
|
||||
initialJobs: Job[];
|
||||
libraries: Map<string, string>;
|
||||
highlightJobId?: string;
|
||||
}
|
||||
|
||||
export function JobsList({ initialJobs, libraries, highlightJobId }: JobsListProps) {
|
||||
const [jobs, setJobs] = useState(initialJobs);
|
||||
|
||||
// Refresh jobs list via SSE
|
||||
useEffect(() => {
|
||||
const eventSource = new EventSource("/api/jobs/stream");
|
||||
|
||||
eventSource.onmessage = (event) => {
|
||||
try {
|
||||
const data = JSON.parse(event.data);
|
||||
if (Array.isArray(data)) {
|
||||
setJobs(data);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to parse SSE data:", error);
|
||||
}
|
||||
};
|
||||
|
||||
eventSource.onerror = (err) => {
|
||||
console.error("SSE error:", err);
|
||||
eventSource.close();
|
||||
};
|
||||
|
||||
return () => {
|
||||
eventSource.close();
|
||||
};
|
||||
}, []);
|
||||
|
||||
const handleCancel = async (id: string) => {
|
||||
try {
|
||||
const response = await fetch(`/api/jobs/${id}/cancel`, {
|
||||
method: "POST",
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
// Update local state to reflect cancellation
|
||||
setJobs(jobs.map(job =>
|
||||
job.id === id ? { ...job, status: "cancelled" } : job
|
||||
));
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to cancel job:", error);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>ID</th>
|
||||
<th>Library</th>
|
||||
<th>Type</th>
|
||||
<th>Status</th>
|
||||
<th>Created</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{jobs.map((job) => (
|
||||
<JobRow
|
||||
key={job.id}
|
||||
job={job}
|
||||
libraryName={job.library_id ? libraries.get(job.library_id) : undefined}
|
||||
highlighted={job.id === highlightJobId}
|
||||
onCancel={handleCancel}
|
||||
/>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
);
|
||||
}
|
||||
@@ -215,6 +215,25 @@ button:hover {
|
||||
border-color: hsl(2 72% 48% / 0.5);
|
||||
}
|
||||
|
||||
.scan-btn {
|
||||
background: linear-gradient(95deg, hsl(142 60% 45% / 0.15), hsl(142 60% 55% / 0.2));
|
||||
border-color: hsl(142 60% 45% / 0.5);
|
||||
padding: 4px 12px;
|
||||
font-size: 0.85rem;
|
||||
}
|
||||
|
||||
.delete-btn {
|
||||
background: linear-gradient(95deg, hsl(2 72% 48% / 0.15), hsl(338 82% 62% / 0.2));
|
||||
border-color: hsl(2 72% 48% / 0.5);
|
||||
padding: 4px 12px;
|
||||
font-size: 0.85rem;
|
||||
}
|
||||
|
||||
.full-rebuild-btn {
|
||||
background: linear-gradient(95deg, hsl(280 60% 45% / 0.15), hsl(280 60% 55% / 0.2));
|
||||
border-color: hsl(280 60% 45% / 0.5);
|
||||
}
|
||||
|
||||
.status-pending { color: hsl(45 93% 47%); }
|
||||
.status-running { color: hsl(192 85% 55%); }
|
||||
.status-completed { color: hsl(142 60% 45%); }
|
||||
@@ -729,3 +748,426 @@ button:hover {
|
||||
max-width: 400px;
|
||||
display: inline-block;
|
||||
}
|
||||
|
||||
/* Job Progress Component */
|
||||
.job-progress {
|
||||
background: var(--card);
|
||||
border: 1px solid var(--line);
|
||||
border-radius: 12px;
|
||||
padding: 16px;
|
||||
margin: 8px 0;
|
||||
}
|
||||
|
||||
.progress-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.status-badge {
|
||||
padding: 4px 10px;
|
||||
border-radius: 6px;
|
||||
font-size: 0.8rem;
|
||||
font-weight: 700;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.status-badge.status-pending {
|
||||
background: hsl(45 93% 90%);
|
||||
color: hsl(45 93% 35%);
|
||||
}
|
||||
|
||||
.status-badge.status-running {
|
||||
background: hsl(198 52% 90%);
|
||||
color: hsl(198 78% 37%);
|
||||
}
|
||||
|
||||
.status-badge.status-success {
|
||||
background: hsl(142 60% 90%);
|
||||
color: hsl(142 60% 35%);
|
||||
}
|
||||
|
||||
.status-badge.status-failed {
|
||||
background: hsl(2 72% 90%);
|
||||
color: hsl(2 72% 45%);
|
||||
}
|
||||
|
||||
.status-badge.status-cancelled {
|
||||
background: hsl(220 13% 90%);
|
||||
color: hsl(220 13% 40%);
|
||||
}
|
||||
|
||||
.complete-badge {
|
||||
padding: 4px 10px;
|
||||
border-radius: 6px;
|
||||
font-size: 0.75rem;
|
||||
font-weight: 700;
|
||||
background: hsl(142 60% 90%);
|
||||
color: hsl(142 60% 35%);
|
||||
}
|
||||
|
||||
.progress-bar-container {
|
||||
position: relative;
|
||||
height: 24px;
|
||||
background: var(--line);
|
||||
border-radius: 12px;
|
||||
overflow: hidden;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.progress-bar-fill {
|
||||
height: 100%;
|
||||
background: linear-gradient(90deg, hsl(198 78% 37%), hsl(192 85% 55%));
|
||||
border-radius: 12px;
|
||||
transition: width 0.3s ease;
|
||||
}
|
||||
|
||||
.progress-percent {
|
||||
position: absolute;
|
||||
right: 8px;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
font-size: 0.8rem;
|
||||
font-weight: 700;
|
||||
color: var(--foreground);
|
||||
}
|
||||
|
||||
.progress-stats {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
font-size: 0.9rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.current-file {
|
||||
font-size: 0.8rem;
|
||||
max-width: 300px;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.progress-detailed-stats {
|
||||
display: flex;
|
||||
gap: 16px;
|
||||
margin-top: 12px;
|
||||
padding-top: 12px;
|
||||
border-top: 1px solid var(--line);
|
||||
font-size: 0.85rem;
|
||||
}
|
||||
|
||||
.error-count {
|
||||
color: hsl(2 72% 48%);
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
.progress-row {
|
||||
background: hsl(198 52% 95%);
|
||||
}
|
||||
|
||||
.progress-row td {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.toggle-progress-btn {
|
||||
margin-left: 8px;
|
||||
padding: 2px 8px;
|
||||
font-size: 0.75rem;
|
||||
background: transparent;
|
||||
border: 1px solid var(--line);
|
||||
}
|
||||
|
||||
/* Jobs Indicator */
|
||||
.jobs-indicator-container {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.jobs-indicator {
|
||||
position: relative;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 36px;
|
||||
height: 36px;
|
||||
padding: 0;
|
||||
border-radius: 8px;
|
||||
background: transparent;
|
||||
border: 1px solid var(--line);
|
||||
color: var(--foreground);
|
||||
cursor: pointer;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
|
||||
.jobs-indicator:hover {
|
||||
background: hsl(198 52% 90% / 0.5);
|
||||
}
|
||||
|
||||
.jobs-indicator.active {
|
||||
border-color: hsl(198 78% 37% / 0.5);
|
||||
}
|
||||
|
||||
.jobs-badge {
|
||||
position: absolute;
|
||||
top: -4px;
|
||||
right: -4px;
|
||||
min-width: 18px;
|
||||
height: 18px;
|
||||
padding: 0 5px;
|
||||
background: hsl(2 72% 48%);
|
||||
color: white;
|
||||
font-size: 0.7rem;
|
||||
font-weight: 700;
|
||||
border-radius: 9px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.jobs-pulse {
|
||||
position: absolute;
|
||||
bottom: 2px;
|
||||
left: 2px;
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
background: hsl(142 60% 45%);
|
||||
border-radius: 50%;
|
||||
animation: pulse 2s infinite;
|
||||
}
|
||||
|
||||
@keyframes pulse {
|
||||
0%, 100% { opacity: 1; }
|
||||
50% { opacity: 0.5; }
|
||||
}
|
||||
|
||||
.jobs-dropdown {
|
||||
position: absolute;
|
||||
top: 100%;
|
||||
right: 0;
|
||||
margin-top: 8px;
|
||||
min-width: 320px;
|
||||
max-width: 400px;
|
||||
background: var(--card);
|
||||
border: 1px solid var(--line);
|
||||
border-radius: 12px;
|
||||
box-shadow: var(--shadow-2);
|
||||
z-index: 100;
|
||||
padding: 16px;
|
||||
}
|
||||
|
||||
.jobs-dropdown-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 12px;
|
||||
padding-bottom: 12px;
|
||||
border-bottom: 1px solid var(--line);
|
||||
}
|
||||
|
||||
.jobs-dropdown-header a {
|
||||
font-size: 0.85rem;
|
||||
}
|
||||
|
||||
.jobs-empty {
|
||||
text-align: center;
|
||||
color: var(--text-muted);
|
||||
padding: 16px;
|
||||
}
|
||||
|
||||
.jobs-list {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
max-height: 300px;
|
||||
overflow-y: auto;
|
||||
}
|
||||
|
||||
.job-item {
|
||||
padding: 12px;
|
||||
border-bottom: 1px solid var(--line);
|
||||
}
|
||||
|
||||
.job-item:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.job-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
.job-id {
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.job-status {
|
||||
font-size: 0.7rem;
|
||||
padding: 2px 6px;
|
||||
border-radius: 4px;
|
||||
font-weight: 700;
|
||||
text-transform: uppercase;
|
||||
}
|
||||
|
||||
.job-status.status-pending {
|
||||
background: hsl(45 93% 90%);
|
||||
color: hsl(45 93% 35%);
|
||||
}
|
||||
|
||||
.job-status.status-running {
|
||||
background: hsl(198 52% 90%);
|
||||
color: hsl(198 78% 37%);
|
||||
}
|
||||
|
||||
.job-mini-progress {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 8px;
|
||||
margin-bottom: 4px;
|
||||
}
|
||||
|
||||
.job-progress-bar {
|
||||
flex: 1;
|
||||
height: 6px;
|
||||
background: hsl(198 78% 37%);
|
||||
border-radius: 3px;
|
||||
}
|
||||
|
||||
.job-mini-progress span {
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
min-width: 35px;
|
||||
}
|
||||
|
||||
.job-file {
|
||||
margin: 0;
|
||||
font-size: 0.75rem;
|
||||
color: var(--text-muted);
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
.progress-loading,
|
||||
.progress-error {
|
||||
padding: 16px;
|
||||
text-align: center;
|
||||
color: var(--text-muted);
|
||||
}
|
||||
|
||||
.progress-error {
|
||||
color: hsl(2 72% 48%);
|
||||
}
|
||||
|
||||
/* Dark mode overrides for new components */
|
||||
.dark .status-badge.status-pending {
|
||||
background: hsl(45 93% 25%);
|
||||
color: hsl(45 93% 65%);
|
||||
}
|
||||
|
||||
.dark .status-badge.status-running {
|
||||
background: hsl(198 52% 25%);
|
||||
color: hsl(198 78% 75%);
|
||||
}
|
||||
|
||||
.dark .status-badge.status-success {
|
||||
background: hsl(142 60% 25%);
|
||||
color: hsl(142 60% 65%);
|
||||
}
|
||||
|
||||
.dark .status-badge.status-failed {
|
||||
background: hsl(2 72% 25%);
|
||||
color: hsl(2 72% 65%);
|
||||
}
|
||||
|
||||
.dark .status-badge.status-cancelled {
|
||||
background: hsl(220 13% 25%);
|
||||
color: hsl(220 13% 65%);
|
||||
}
|
||||
|
||||
.dark .complete-badge {
|
||||
background: hsl(142 60% 25%);
|
||||
color: hsl(142 60% 65%);
|
||||
}
|
||||
|
||||
.dark .progress-row {
|
||||
background: hsl(198 52% 15%);
|
||||
}
|
||||
|
||||
.dark .jobs-indicator:hover {
|
||||
background: hsl(210 34% 24% / 0.5);
|
||||
}
|
||||
|
||||
.dark .job-status.status-pending {
|
||||
background: hsl(45 93% 25%);
|
||||
color: hsl(45 93% 65%);
|
||||
}
|
||||
|
||||
.dark .job-status.status-running {
|
||||
background: hsl(198 52% 25%);
|
||||
color: hsl(198 78% 75%);
|
||||
}
|
||||
|
||||
/* Progress bar visibility fix */
|
||||
.job-progress {
|
||||
background: var(--card);
|
||||
border: 1px solid var(--line);
|
||||
border-radius: 12px;
|
||||
padding: 16px;
|
||||
margin: 8px 0;
|
||||
min-height: 120px;
|
||||
}
|
||||
|
||||
.progress-bar-container {
|
||||
position: relative;
|
||||
height: 24px;
|
||||
background: hsl(220 13% 90%);
|
||||
border-radius: 12px;
|
||||
overflow: hidden;
|
||||
margin: 12px 0;
|
||||
}
|
||||
|
||||
.progress-bar-fill {
|
||||
height: 100%;
|
||||
background: linear-gradient(90deg, hsl(198 78% 37%), hsl(192 85% 55%));
|
||||
border-radius: 12px;
|
||||
transition: width 0.5s ease;
|
||||
min-width: 2px;
|
||||
}
|
||||
|
||||
.progress-percent {
|
||||
position: absolute;
|
||||
right: 8px;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
font-size: 0.85rem;
|
||||
font-weight: 700;
|
||||
color: var(--foreground);
|
||||
text-shadow: 0 0 2px rgba(255,255,255,0.5);
|
||||
}
|
||||
|
||||
.progress-row {
|
||||
background: hsl(198 52% 95%);
|
||||
}
|
||||
|
||||
.progress-row td {
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
/* Highlighted job row */
|
||||
tr.job-highlighted {
|
||||
background: hsl(198 78% 95%);
|
||||
box-shadow: inset 0 0 0 2px hsl(198 78% 37%);
|
||||
}
|
||||
|
||||
tr.job-highlighted td {
|
||||
animation: pulse-border 2s ease-in-out infinite;
|
||||
}
|
||||
|
||||
@keyframes pulse-border {
|
||||
0%, 100% { box-shadow: inset 0 0 0 1px hsl(198 78% 37% / 0.3); }
|
||||
50% { box-shadow: inset 0 0 0 2px hsl(198 78% 37% / 0.6); }
|
||||
}
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import { revalidatePath } from "next/cache";
|
||||
import { listJobs, fetchLibraries, rebuildIndex, cancelJob, IndexJobDto, LibraryDto } from "../../lib/api";
|
||||
import { redirect } from "next/navigation";
|
||||
import { listJobs, fetchLibraries, rebuildIndex, IndexJobDto, LibraryDto } from "../../lib/api";
|
||||
import { JobsList } from "../components/JobsList";
|
||||
|
||||
export const dynamic = "force-dynamic";
|
||||
|
||||
export default async function JobsPage() {
|
||||
export default async function JobsPage({ searchParams }: { searchParams: Promise<{ highlight?: string }> }) {
|
||||
const { highlight } = await searchParams;
|
||||
const [jobs, libraries] = await Promise.all([
|
||||
listJobs().catch(() => [] as IndexJobDto[]),
|
||||
fetchLibraries().catch(() => [] as LibraryDto[])
|
||||
@@ -14,17 +17,22 @@ export default async function JobsPage() {
|
||||
async function triggerRebuild(formData: FormData) {
|
||||
"use server";
|
||||
const libraryId = formData.get("library_id") as string;
|
||||
await rebuildIndex(libraryId || undefined);
|
||||
const result = await rebuildIndex(libraryId || undefined);
|
||||
revalidatePath("/jobs");
|
||||
redirect(`/jobs?highlight=${result.id}`);
|
||||
}
|
||||
|
||||
async function cancelJobAction(formData: FormData) {
|
||||
async function triggerFullRebuild(formData: FormData) {
|
||||
"use server";
|
||||
const id = formData.get("id") as string;
|
||||
await cancelJob(id);
|
||||
const libraryId = formData.get("library_id") as string;
|
||||
const result = await rebuildIndex(libraryId || undefined, true);
|
||||
revalidatePath("/jobs");
|
||||
redirect(`/jobs?highlight=${result.id}`);
|
||||
}
|
||||
|
||||
const apiBaseUrl = process.env.API_BASE_URL || "http://api:8080";
|
||||
const apiToken = process.env.API_BOOTSTRAP_TOKEN || "";
|
||||
|
||||
return (
|
||||
<>
|
||||
<h1>Index Jobs</h1>
|
||||
@@ -40,43 +48,23 @@ export default async function JobsPage() {
|
||||
</select>
|
||||
<button type="submit">Queue Rebuild</button>
|
||||
</form>
|
||||
<form action={triggerFullRebuild} style={{ marginTop: '12px' }}>
|
||||
<select name="library_id" defaultValue="">
|
||||
<option value="">All libraries</option>
|
||||
{libraries.map((lib) => (
|
||||
<option key={lib.id} value={lib.id}>
|
||||
{lib.name}
|
||||
</option>
|
||||
))}
|
||||
</select>
|
||||
<button type="submit" className="full-rebuild-btn">Full Rebuild (Reindex All)</button>
|
||||
</form>
|
||||
</div>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>ID</th>
|
||||
<th>Library</th>
|
||||
<th>Type</th>
|
||||
<th>Status</th>
|
||||
<th>Created</th>
|
||||
<th>Actions</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{jobs.map((job) => (
|
||||
<tr key={job.id}>
|
||||
<td>
|
||||
<code>{job.id.slice(0, 8)}</code>
|
||||
</td>
|
||||
<td>{job.library_id ? libraryMap.get(job.library_id) || job.library_id.slice(0, 8) : "—"}</td>
|
||||
<td>{job.type}</td>
|
||||
<td>
|
||||
<span className={`status-${job.status}`}>{job.status}</span>
|
||||
{job.error_opt && <span className="error-hint" title={job.error_opt}>!</span>}
|
||||
</td>
|
||||
<td>{new Date(job.created_at).toLocaleString()}</td>
|
||||
<td>
|
||||
{job.status === "pending" || job.status === "running" ? (
|
||||
<form action={cancelJobAction}>
|
||||
<input type="hidden" name="id" value={job.id} />
|
||||
<button type="submit" className="cancel-btn">Cancel</button>
|
||||
</form>
|
||||
) : null}
|
||||
</td>
|
||||
</tr>
|
||||
))}
|
||||
</tbody>
|
||||
</table>
|
||||
<JobsList
|
||||
initialJobs={jobs}
|
||||
libraries={libraryMap}
|
||||
highlightJobId={highlight}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import type { ReactNode } from "react";
|
||||
import "./globals.css";
|
||||
import { ThemeProvider } from "./theme-provider";
|
||||
import { ThemeToggle } from "./theme-toggle";
|
||||
import { JobsIndicatorWrapper } from "./components/JobsIndicatorWrapper";
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "Stripstream Backoffice",
|
||||
@@ -12,6 +13,9 @@ export const metadata: Metadata = {
|
||||
};
|
||||
|
||||
export default function RootLayout({ children }: { children: ReactNode }) {
|
||||
const apiBaseUrl = process.env.API_BASE_URL || "http://api:8080";
|
||||
const apiToken = process.env.API_BOOTSTRAP_TOKEN || "";
|
||||
|
||||
return (
|
||||
<html lang="en" suppressHydrationWarning>
|
||||
<body>
|
||||
@@ -30,6 +34,7 @@ export default function RootLayout({ children }: { children: ReactNode }) {
|
||||
<Link href="/jobs">Jobs</Link>
|
||||
<Link href="/tokens">Tokens</Link>
|
||||
</div>
|
||||
<JobsIndicatorWrapper apiBaseUrl={apiBaseUrl} apiToken={apiToken} />
|
||||
<ThemeToggle />
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
@@ -123,12 +123,23 @@ export async function deleteLibrary(id: string) {
|
||||
return apiFetch<void>(`/libraries/${id}`, { method: "DELETE" });
|
||||
}
|
||||
|
||||
export async function scanLibrary(libraryId: string, full?: boolean) {
|
||||
const body: { full?: boolean } = {};
|
||||
if (full) body.full = true;
|
||||
return apiFetch<IndexJobDto>(`/libraries/${libraryId}/scan`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body)
|
||||
});
|
||||
}
|
||||
|
||||
export async function listJobs() {
|
||||
return apiFetch<IndexJobDto[]>("/index/status");
|
||||
}
|
||||
|
||||
export async function rebuildIndex(libraryId?: string) {
|
||||
const body = libraryId ? { library_id: libraryId } : {};
|
||||
export async function rebuildIndex(libraryId?: string, full?: boolean) {
|
||||
const body: { library_id?: string; full?: boolean } = {};
|
||||
if (libraryId) body.library_id = libraryId;
|
||||
if (full) body.full = true;
|
||||
return apiFetch<IndexJobDto>("/index/rebuild", {
|
||||
method: "POST",
|
||||
body: JSON.stringify(body)
|
||||
|
||||
2
apps/backoffice/next-env.d.ts
vendored
2
apps/backoffice/next-env.d.ts
vendored
@@ -1,6 +1,6 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
import "./.next/types/routes.d.ts";
|
||||
import "./.next/dev/types/routes.d.ts";
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
|
||||
|
||||
@@ -8,10 +8,28 @@ use sha2::{Digest, Sha256};
|
||||
use sqlx::{postgres::PgPoolOptions, Row};
|
||||
use std::{collections::HashMap, path::Path, time::Duration};
|
||||
use stripstream_core::config::IndexerConfig;
|
||||
use tracing::{error, info};
|
||||
use tracing::{error, info, trace, warn};
|
||||
use uuid::Uuid;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
fn remap_libraries_path(path: &str) -> String {
|
||||
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
||||
if path.starts_with("/libraries/") {
|
||||
return path.replacen("/libraries", &root, 1);
|
||||
}
|
||||
}
|
||||
path.to_string()
|
||||
}
|
||||
|
||||
fn unmap_libraries_path(path: &str) -> String {
|
||||
if let Ok(root) = std::env::var("LIBRARIES_ROOT_PATH") {
|
||||
if path.starts_with(&root) {
|
||||
return path.replacen(&root, "/libraries", 1);
|
||||
}
|
||||
}
|
||||
path.to_string()
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AppState {
|
||||
pool: sqlx::PgPool,
|
||||
@@ -77,14 +95,20 @@ async fn run_worker(state: AppState, interval_seconds: u64) {
|
||||
loop {
|
||||
match claim_next_job(&state.pool).await {
|
||||
Ok(Some((job_id, library_id))) => {
|
||||
info!("[INDEXER] Starting job {} library={:?}", job_id, library_id);
|
||||
if let Err(err) = process_job(&state, job_id, library_id).await {
|
||||
error!(job_id = %job_id, error = %err, "index job failed");
|
||||
error!("[INDEXER] Job {} failed: {}", job_id, err);
|
||||
let _ = fail_job(&state.pool, job_id, &err.to_string()).await;
|
||||
} else {
|
||||
info!("[INDEXER] Job {} completed", job_id);
|
||||
}
|
||||
}
|
||||
Ok(None) => tokio::time::sleep(wait).await,
|
||||
Ok(None) => {
|
||||
trace!("[INDEXER] No pending jobs, waiting...");
|
||||
tokio::time::sleep(wait).await;
|
||||
}
|
||||
Err(err) => {
|
||||
error!(error = %err, "worker loop error");
|
||||
error!("[INDEXER] Worker error: {}", err);
|
||||
tokio::time::sleep(wait).await;
|
||||
}
|
||||
}
|
||||
@@ -124,6 +148,38 @@ async fn claim_next_job(pool: &sqlx::PgPool) -> anyhow::Result<Option<(Uuid, Opt
|
||||
}
|
||||
|
||||
async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<Uuid>) -> anyhow::Result<()> {
|
||||
info!("[JOB] Processing {} library={:?}", job_id, target_library_id);
|
||||
|
||||
// Get job type to check if it's a full rebuild
|
||||
let job_type: String = sqlx::query_scalar("SELECT type FROM index_jobs WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.fetch_one(&state.pool)
|
||||
.await?;
|
||||
let is_full_rebuild = job_type == "full_rebuild";
|
||||
info!("[JOB] {} type={} full_rebuild={}", job_id, job_type, is_full_rebuild);
|
||||
|
||||
// For full rebuilds, delete existing data first
|
||||
if is_full_rebuild {
|
||||
info!("[JOB] Full rebuild: deleting existing data");
|
||||
if let Some(library_id) = target_library_id {
|
||||
// Delete books and files for specific library
|
||||
sqlx::query("DELETE FROM book_files WHERE book_id IN (SELECT id FROM books WHERE library_id = $1)")
|
||||
.bind(library_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
sqlx::query("DELETE FROM books WHERE library_id = $1")
|
||||
.bind(library_id)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
info!("[JOB] Deleted existing data for library {}", library_id);
|
||||
} else {
|
||||
// Delete all books and files
|
||||
sqlx::query("DELETE FROM book_files").execute(&state.pool).await?;
|
||||
sqlx::query("DELETE FROM books").execute(&state.pool).await?;
|
||||
info!("[JOB] Deleted all existing data");
|
||||
}
|
||||
}
|
||||
|
||||
let libraries = if let Some(library_id) = target_library_id {
|
||||
sqlx::query("SELECT id, root_path FROM libraries WHERE id = $1 AND enabled = TRUE")
|
||||
.bind(library_id)
|
||||
@@ -135,6 +191,25 @@ async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<U
|
||||
.await?
|
||||
};
|
||||
|
||||
// First pass: count total files for progress estimation
|
||||
let mut total_files = 0usize;
|
||||
for library in &libraries {
|
||||
let root_path: String = library.get("root_path");
|
||||
let root_path = remap_libraries_path(&root_path);
|
||||
for entry in WalkDir::new(&root_path).into_iter().filter_map(Result::ok) {
|
||||
if entry.file_type().is_file() && detect_format(entry.path()).is_some() {
|
||||
total_files += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update job with total estimate
|
||||
sqlx::query("UPDATE index_jobs SET total_files = $2 WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.bind(total_files as i32)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
let mut stats = JobStats {
|
||||
scanned_files: 0,
|
||||
indexed_files: 0,
|
||||
@@ -145,7 +220,8 @@ async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<U
|
||||
for library in libraries {
|
||||
let library_id: Uuid = library.get("id");
|
||||
let root_path: String = library.get("root_path");
|
||||
match scan_library(state, library_id, Path::new(&root_path), &mut stats).await {
|
||||
let root_path = remap_libraries_path(&root_path);
|
||||
match scan_library(state, job_id, library_id, Path::new(&root_path), &mut stats, total_files, is_full_rebuild).await {
|
||||
Ok(()) => {}
|
||||
Err(err) => {
|
||||
stats.errors += 1;
|
||||
@@ -156,7 +232,7 @@ async fn process_job(state: &AppState, job_id: Uuid, target_library_id: Option<U
|
||||
|
||||
sync_meili(&state.pool, &state.meili_url, &state.meili_master_key).await?;
|
||||
|
||||
sqlx::query("UPDATE index_jobs SET status = 'success', finished_at = NOW(), stats_json = $2 WHERE id = $1")
|
||||
sqlx::query("UPDATE index_jobs SET status = 'success', finished_at = NOW(), stats_json = $2, current_file = NULL WHERE id = $1")
|
||||
.bind(job_id)
|
||||
.bind(serde_json::to_value(&stats)?)
|
||||
.execute(&state.pool)
|
||||
@@ -176,9 +252,12 @@ async fn fail_job(pool: &sqlx::PgPool, job_id: Uuid, error_message: &str) -> any
|
||||
|
||||
async fn scan_library(
|
||||
state: &AppState,
|
||||
job_id: Uuid,
|
||||
library_id: Uuid,
|
||||
root: &Path,
|
||||
stats: &mut JobStats,
|
||||
total_files: usize,
|
||||
is_full_rebuild: bool,
|
||||
) -> anyhow::Result<()> {
|
||||
let existing_rows = sqlx::query(
|
||||
r#"
|
||||
@@ -193,14 +272,22 @@ async fn scan_library(
|
||||
.await?;
|
||||
|
||||
let mut existing: HashMap<String, (Uuid, Uuid, String)> = HashMap::new();
|
||||
for row in existing_rows {
|
||||
existing.insert(
|
||||
row.get("abs_path"),
|
||||
(row.get("file_id"), row.get("book_id"), row.get("fingerprint")),
|
||||
);
|
||||
// For full rebuilds, don't use existing files - force reindex of everything
|
||||
if !is_full_rebuild {
|
||||
for row in existing_rows {
|
||||
let abs_path: String = row.get("abs_path");
|
||||
// Remap for local development to match scanned paths
|
||||
let remapped_path = remap_libraries_path(&abs_path);
|
||||
existing.insert(
|
||||
remapped_path,
|
||||
(row.get("file_id"), row.get("book_id"), row.get("fingerprint")),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let mut seen: HashMap<String, bool> = HashMap::new();
|
||||
let mut processed_count = 0i32;
|
||||
|
||||
for entry in WalkDir::new(root).into_iter().filter_map(Result::ok) {
|
||||
if !entry.file_type().is_file() {
|
||||
continue;
|
||||
@@ -212,9 +299,43 @@ async fn scan_library(
|
||||
};
|
||||
|
||||
stats.scanned_files += 1;
|
||||
let abs_path = path.to_string_lossy().to_string();
|
||||
processed_count += 1;
|
||||
let abs_path_local = path.to_string_lossy().to_string();
|
||||
// Convert local path to /libraries format for DB storage
|
||||
let abs_path = unmap_libraries_path(&abs_path_local);
|
||||
let file_name = path.file_name()
|
||||
.map(|s| s.to_string_lossy().to_string())
|
||||
.unwrap_or_else(|| abs_path.clone());
|
||||
|
||||
info!("[SCAN] Job {} processing file {}/{}: {}", job_id, processed_count, total_files, file_name);
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
// Update progress in DB
|
||||
let progress_percent = if total_files > 0 {
|
||||
((processed_count as f64 / total_files as f64) * 100.0) as i32
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let db_start = std::time::Instant::now();
|
||||
sqlx::query(
|
||||
"UPDATE index_jobs SET current_file = $2, processed_files = $3, progress_percent = $4 WHERE id = $1"
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(&file_name)
|
||||
.bind(processed_count)
|
||||
.bind(progress_percent)
|
||||
.execute(&state.pool)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
error!("[BDD] Failed to update progress for job {}: {}", job_id, e);
|
||||
e
|
||||
})?;
|
||||
info!("[BDD] Progress update took {:?}", db_start.elapsed());
|
||||
|
||||
seen.insert(abs_path.clone(), true);
|
||||
|
||||
let meta_start = std::time::Instant::now();
|
||||
let metadata = std::fs::metadata(path)
|
||||
.with_context(|| format!("cannot stat {}", path.display()))?;
|
||||
let mtime: DateTime<Utc> = metadata
|
||||
@@ -222,14 +343,22 @@ async fn scan_library(
|
||||
.map(DateTime::<Utc>::from)
|
||||
.unwrap_or_else(|_| Utc::now());
|
||||
let fingerprint = compute_fingerprint(path, metadata.len(), &mtime)?;
|
||||
info!("[META] Metadata+fingerprint took {:?}", meta_start.elapsed());
|
||||
|
||||
if let Some((file_id, book_id, old_fingerprint)) = existing.get(&abs_path).cloned() {
|
||||
if old_fingerprint == fingerprint {
|
||||
// Skip fingerprint check for full rebuilds - always reindex
|
||||
if !is_full_rebuild && old_fingerprint == fingerprint {
|
||||
info!("[SKIP] File unchanged, skipping: {} (total time: {:?})", file_name, start_time.elapsed());
|
||||
continue;
|
||||
}
|
||||
|
||||
info!("[PARSER] Starting parse_metadata for: {}", file_name);
|
||||
let parse_start = std::time::Instant::now();
|
||||
match parse_metadata(path, format, root) {
|
||||
Ok(parsed) => {
|
||||
info!("[PARSER] Parsing took {:?} for {} (pages={:?})", parse_start.elapsed(), file_name, parsed.page_count);
|
||||
|
||||
let db_start = std::time::Instant::now();
|
||||
sqlx::query(
|
||||
"UPDATE books SET title = $2, kind = $3, series = $4, volume = $5, page_count = $6, updated_at = NOW() WHERE id = $1",
|
||||
)
|
||||
@@ -252,10 +381,13 @@ async fn scan_library(
|
||||
.bind(fingerprint)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
info!("[BDD] UPDATE took {:?} for {}", db_start.elapsed(), file_name);
|
||||
|
||||
stats.indexed_files += 1;
|
||||
info!("[DONE] Updated file {} (total time: {:?})", file_name, start_time.elapsed());
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("[PARSER] Failed to parse {} after {:?}: {}", file_name, parse_start.elapsed(), err);
|
||||
stats.errors += 1;
|
||||
sqlx::query(
|
||||
"UPDATE book_files SET parse_status = 'error', parse_error_opt = $2, updated_at = NOW() WHERE id = $1",
|
||||
@@ -264,14 +396,29 @@ async fn scan_library(
|
||||
.bind(err.to_string())
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
// Store error in index_job_errors table
|
||||
sqlx::query(
|
||||
"INSERT INTO index_job_errors (job_id, file_path, error_message) VALUES ($1, $2, $3)"
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(&abs_path)
|
||||
.bind(err.to_string())
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
info!("[PARSER] Starting parse_metadata for new file: {}", file_name);
|
||||
let parse_start = std::time::Instant::now();
|
||||
match parse_metadata(path, format, root) {
|
||||
Ok(parsed) => {
|
||||
info!("[PARSER] Parsing took {:?} for {} (pages={:?})", parse_start.elapsed(), file_name, parsed.page_count);
|
||||
|
||||
let db_start = std::time::Instant::now();
|
||||
let book_id = Uuid::new_v4();
|
||||
let file_id = Uuid::new_v4();
|
||||
sqlx::query(
|
||||
@@ -299,10 +446,13 @@ async fn scan_library(
|
||||
.bind(fingerprint)
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
info!("[BDD] INSERT took {:?} for {}", db_start.elapsed(), file_name);
|
||||
|
||||
stats.indexed_files += 1;
|
||||
info!("[DONE] Inserted new file {} (total time: {:?})", file_name, start_time.elapsed());
|
||||
}
|
||||
Err(err) => {
|
||||
warn!("[PARSER] Failed to parse {} after {:?}: {}", file_name, parse_start.elapsed(), err);
|
||||
stats.errors += 1;
|
||||
let book_id = Uuid::new_v4();
|
||||
let file_id = Uuid::new_v4();
|
||||
@@ -329,6 +479,16 @@ async fn scan_library(
|
||||
.bind(err.to_string())
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
|
||||
// Store error in index_job_errors table
|
||||
sqlx::query(
|
||||
"INSERT INTO index_job_errors (job_id, file_path, error_message) VALUES ($1, $2, $3)"
|
||||
)
|
||||
.bind(job_id)
|
||||
.bind(&abs_path)
|
||||
.bind(err.to_string())
|
||||
.execute(&state.pool)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -383,7 +543,7 @@ struct SearchDoc {
|
||||
title: String,
|
||||
author: Option<String>,
|
||||
series: Option<String>,
|
||||
volume: Option<String>,
|
||||
volume: Option<i32>,
|
||||
language: Option<String>,
|
||||
}
|
||||
|
||||
@@ -405,6 +565,13 @@ async fn sync_meili(pool: &sqlx::PgPool, meili_url: &str, meili_master_key: &str
|
||||
.send()
|
||||
.await;
|
||||
|
||||
// Clear existing documents to avoid stale data
|
||||
let _ = client
|
||||
.delete(format!("{base}/indexes/books/documents"))
|
||||
.header("Authorization", format!("Bearer {meili_master_key}"))
|
||||
.send()
|
||||
.await;
|
||||
|
||||
let rows = sqlx::query(
|
||||
"SELECT id, library_id, kind, title, author, series, volume, language FROM books",
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user