feat: add caching debug logs and configurable max concurrent requests for Komga API to enhance performance monitoring

This commit is contained in:
Julien Froidefond
2025-10-18 09:08:41 +02:00
parent ae4b766085
commit b7704207ec
42 changed files with 1141 additions and 1302 deletions

View File

@@ -6,7 +6,6 @@ import { ERROR_CODES } from "../../constants/errorCodes";
import { AppError } from "../../utils/errors";
import type { KomgaConfig } from "@/types/komga";
import type { ServerCacheService } from "./server-cache.service";
import { DebugService } from "./debug.service";
import { RequestMonitorService } from "./request-monitor.service";
import { RequestQueueService } from "./request-queue.service";
@@ -109,8 +108,6 @@ export abstract class BaseApiService {
}
}
const startTime = performance.now();
// Timeout de 60 secondes au lieu de 10 par défaut
const timeoutMs = 60000;
const controller = new AbortController();
@@ -149,19 +146,27 @@ export abstract class BaseApiService {
family: 4,
});
}
// Retry automatique sur timeout de connexion (cold start)
if (fetchError?.cause?.code === 'UND_ERR_CONNECT_TIMEOUT') {
// eslint-disable-next-line no-console
console.log(`⏱️ Connection timeout for ${url}. Retrying once (cold start)...`);
return await fetch(url, {
headers,
...options,
signal: controller.signal,
// @ts-ignore - undici-specific options
connectTimeout: timeoutMs,
bodyTimeout: timeoutMs,
headersTimeout: timeoutMs,
});
}
throw fetchError;
}
});
clearTimeout(timeoutId);
const endTime = performance.now();
// Logger la requête côté serveur
await DebugService.logRequest({
url: url,
startTime,
endTime,
fromCache: false, // Côté serveur, on ne peut pas détecter le cache navigateur
});
if (!response.ok) {
throw new AppError(ERROR_CODES.KOMGA.HTTP_ERROR, {
@@ -172,16 +177,6 @@ export abstract class BaseApiService {
return options.isImage ? (response as T) : response.json();
} catch (error) {
const endTime = performance.now();
// Logger l'erreur côté serveur
await DebugService.logRequest({
url: url,
startTime,
endTime,
fromCache: false,
});
throw error;
} finally {
clearTimeout(timeoutId);

View File

@@ -99,7 +99,7 @@ export class BookService extends BaseApiService {
const arrayBuffer = response.buffer.buffer.slice(
response.buffer.byteOffset,
response.buffer.byteOffset + response.buffer.byteLength
);
) as ArrayBuffer;
return new Response(arrayBuffer, {
headers: {

View File

@@ -1,5 +1,4 @@
import prisma from "@/lib/prisma";
import { DebugService } from "./debug.service";
import { getCurrentUser } from "../auth-utils";
import { ERROR_CODES } from "../../constants/errorCodes";
import { AppError } from "../../utils/errors";
@@ -50,12 +49,10 @@ export class ConfigDBService {
try {
const user: User | null = await this.getCurrentUser();
return DebugService.measureMongoOperation("getConfig", async () => {
const config = await prisma.komgaConfig.findUnique({
where: { userId: user.id },
});
return config as KomgaConfig | null;
const config = await prisma.komgaConfig.findUnique({
where: { userId: user.id },
});
return config as KomgaConfig | null;
} catch (error) {
if (error instanceof AppError) {
throw error;
@@ -68,12 +65,10 @@ export class ConfigDBService {
try {
const user: User | null = await this.getCurrentUser();
return DebugService.measureMongoOperation("getTTLConfig", async () => {
const config = await prisma.tTLConfig.findUnique({
where: { userId: user.id },
});
return config as TTLConfig | null;
const config = await prisma.tTLConfig.findUnique({
where: { userId: user.id },
});
return config as TTLConfig | null;
} catch (error) {
if (error instanceof AppError) {
throw error;
@@ -86,30 +81,28 @@ export class ConfigDBService {
try {
const user: User | null = await this.getCurrentUser();
return DebugService.measureMongoOperation("saveTTLConfig", async () => {
const config = await prisma.tTLConfig.upsert({
where: { userId: user.id },
update: {
defaultTTL: data.defaultTTL,
homeTTL: data.homeTTL,
librariesTTL: data.librariesTTL,
seriesTTL: data.seriesTTL,
booksTTL: data.booksTTL,
imagesTTL: data.imagesTTL,
},
create: {
userId: user.id,
defaultTTL: data.defaultTTL,
homeTTL: data.homeTTL,
librariesTTL: data.librariesTTL,
seriesTTL: data.seriesTTL,
booksTTL: data.booksTTL,
imagesTTL: data.imagesTTL,
},
});
return config as TTLConfig;
const config = await prisma.tTLConfig.upsert({
where: { userId: user.id },
update: {
defaultTTL: data.defaultTTL,
homeTTL: data.homeTTL,
librariesTTL: data.librariesTTL,
seriesTTL: data.seriesTTL,
booksTTL: data.booksTTL,
imagesTTL: data.imagesTTL,
},
create: {
userId: user.id,
defaultTTL: data.defaultTTL,
homeTTL: data.homeTTL,
librariesTTL: data.librariesTTL,
seriesTTL: data.seriesTTL,
booksTTL: data.booksTTL,
imagesTTL: data.imagesTTL,
},
});
return config as TTLConfig;
} catch (error) {
if (error instanceof AppError) {
throw error;

View File

@@ -1,326 +0,0 @@
import fs from "fs/promises";
import path from "path";
import type { RequestTiming } from "@/types/debug";
import { PreferencesService } from "./preferences.service";
import { getCurrentUser } from "../auth-utils";
import { ERROR_CODES } from "../../constants/errorCodes";
import { AppError } from "../../utils/errors";
export type { RequestTiming };
export class DebugService {
private static writeQueues = new Map<string, Promise<void>>();
private static async getCurrentUserId(): Promise<string> {
const user = await getCurrentUser();
if (!user) {
throw new AppError(ERROR_CODES.AUTH.UNAUTHENTICATED);
}
return user.id;
}
private static getLogFilePath(userId: string): string {
return path.join(process.cwd(), "debug-logs", `${userId}.json`);
}
private static async ensureDebugDir(): Promise<void> {
const debugDir = path.join(process.cwd(), "debug-logs");
try {
await fs.access(debugDir);
} catch {
await fs.mkdir(debugDir, { recursive: true });
}
}
private static async isDebugEnabled(): Promise<boolean> {
const user = await getCurrentUser();
if (!user) {
return false;
}
const preferences = await PreferencesService.getPreferences();
return preferences.debug === true;
}
private static async readLogs(filePath: string): Promise<RequestTiming[]> {
try {
const content = await fs.readFile(filePath, "utf-8");
return JSON.parse(content);
} catch {
// Essayer de lire un fichier de sauvegarde
try {
const backupPath = filePath + '.backup';
const backupContent = await fs.readFile(backupPath, "utf-8");
return JSON.parse(backupContent);
} catch {
return [];
}
}
}
private static async writeLogs(filePath: string, logs: RequestTiming[]): Promise<void> {
// Obtenir la queue existante ou créer une nouvelle
const existingQueue = this.writeQueues.get(filePath);
// Créer une nouvelle promesse qui attend la queue précédente
const newQueue = existingQueue
? existingQueue.then(() => this.performAppend(filePath, logs))
: this.performAppend(filePath, logs);
// Mettre à jour la queue
this.writeQueues.set(filePath, newQueue);
try {
await newQueue;
} finally {
// Nettoyer la queue si c'est la dernière opération
if (this.writeQueues.get(filePath) === newQueue) {
this.writeQueues.delete(filePath);
}
}
}
private static async performAppend(filePath: string, logs: RequestTiming[]): Promise<void> {
try {
// Lire le fichier existant
const existingLogs = await this.readLogs(filePath);
// Fusionner avec les nouveaux logs
const allLogs = [...existingLogs, ...logs];
// Garder seulement les 1000 derniers logs
const trimmedLogs = allLogs.slice(-1000);
// Créer une sauvegarde avant d'écrire
try {
await fs.copyFile(filePath, filePath + '.backup');
} catch {
// Ignorer si le fichier n'existe pas encore
}
// Écrire le fichier complet (c'est nécessaire pour maintenir l'ordre chronologique)
await fs.writeFile(filePath, JSON.stringify(trimmedLogs, null, 2), { flag: 'w' });
} catch (error) {
console.error(`Erreur lors de l'écriture des logs pour ${filePath}:`, error);
// Ne pas relancer l'erreur pour éviter de casser l'application
}
}
private static async appendLog(filePath: string, log: RequestTiming): Promise<void> {
// Obtenir la queue existante ou créer une nouvelle
const existingQueue = this.writeQueues.get(filePath);
// Créer une nouvelle promesse qui attend la queue précédente
const newQueue = existingQueue
? existingQueue.then(() => this.performSingleAppend(filePath, log))
: this.performSingleAppend(filePath, log);
// Mettre à jour la queue
this.writeQueues.set(filePath, newQueue);
try {
await newQueue;
} finally {
// Nettoyer la queue si c'est la dernière opération
if (this.writeQueues.get(filePath) === newQueue) {
this.writeQueues.delete(filePath);
}
}
}
private static async performSingleAppend(filePath: string, log: RequestTiming): Promise<void> {
try {
// Lire le fichier existant
const existingLogs = await this.readLogs(filePath);
// Vérifier les doublons avec des tolérances différentes selon le type
const isPageRender = log.pageRender !== undefined;
const timeTolerance = isPageRender ? 500 : 50; // 500ms pour les rendus, 50ms pour les requêtes
const exists = existingLogs.some(existingLog =>
existingLog.url === log.url &&
Math.abs(existingLog.duration - log.duration) < 10 && // Durée similaire (10ms de tolérance)
Math.abs(new Date(existingLog.timestamp).getTime() - new Date(log.timestamp).getTime()) < timeTolerance
);
if (!exists) {
// Ajouter le nouveau log
const allLogs = [...existingLogs, log];
// Garder seulement les 1000 derniers logs
const trimmedLogs = allLogs.slice(-1000);
// Écrire le fichier complet avec gestion d'erreur
await fs.writeFile(filePath, JSON.stringify(trimmedLogs, null, 2), { flag: 'w' });
}
} catch (error) {
console.error(`Erreur lors de l'écriture du log pour ${filePath}:`, error);
// Ne pas relancer l'erreur pour éviter de casser l'application
}
}
private static createTiming(
url: string,
startTime: number,
endTime: number,
fromCache: boolean,
additionalData?: Partial<RequestTiming>
): RequestTiming {
return {
url,
startTime,
endTime,
duration: endTime - startTime,
timestamp: new Date().toISOString(),
fromCache,
...additionalData,
};
}
static async logRequest(timing: Omit<RequestTiming, "duration" | "timestamp">) {
try {
if (!(await this.isDebugEnabled())) return;
const userId = await this.getCurrentUserId();
await this.ensureDebugDir();
const filePath = this.getLogFilePath(userId);
const newTiming = this.createTiming(
timing.url,
timing.startTime,
timing.endTime,
timing.fromCache,
{
cacheType: timing.cacheType,
mongoAccess: timing.mongoAccess,
pageRender: timing.pageRender,
}
);
// Utiliser un système d'append atomique
await this.appendLog(filePath, newTiming);
} catch (error) {
console.error("Erreur lors de l'enregistrement du log:", error);
}
}
static async getRequestLogs(): Promise<RequestTiming[]> {
try {
const userId = await this.getCurrentUserId();
const filePath = this.getLogFilePath(userId);
return await this.readLogs(filePath);
} catch (error) {
if (error instanceof AppError) throw error;
return [];
}
}
static async clearLogs(): Promise<void> {
try {
const userId = await this.getCurrentUserId();
const filePath = this.getLogFilePath(userId);
await this.clearFile(filePath);
} catch (error) {
if (error instanceof AppError) throw error;
}
}
private static async clearFile(filePath: string): Promise<void> {
try {
// Obtenir la queue existante ou créer une nouvelle
const existingQueue = this.writeQueues.get(filePath);
// Créer une nouvelle promesse qui attend la queue précédente
const newQueue = existingQueue
? existingQueue.then(() => this.performClear(filePath))
: this.performClear(filePath);
// Mettre à jour la queue
this.writeQueues.set(filePath, newQueue);
try {
await newQueue;
} finally {
// Nettoyer la queue si c'est la dernière opération
if (this.writeQueues.get(filePath) === newQueue) {
this.writeQueues.delete(filePath);
}
}
} catch (error) {
console.error(`Erreur lors du vidage du fichier ${filePath}:`, error);
}
}
private static async performClear(filePath: string): Promise<void> {
try {
// Créer une sauvegarde avant de vider
try {
await fs.copyFile(filePath, filePath + '.backup');
} catch {
// Ignorer si le fichier n'existe pas encore
}
// Écrire un tableau vide pour vider le fichier
await fs.writeFile(filePath, JSON.stringify([], null, 2), { flag: 'w' });
} catch (error) {
console.error(`Erreur lors du vidage du fichier ${filePath}:`, error);
}
}
static async logPageRender(page: string, duration: number) {
try {
if (!(await this.isDebugEnabled())) return;
const userId = await this.getCurrentUserId();
await this.ensureDebugDir();
const filePath = this.getLogFilePath(userId);
const now = performance.now();
const newTiming = this.createTiming(`Page Render: ${page}`, now - duration, now, false, {
pageRender: { page, duration },
});
// Utiliser le même système d'append atomique
await this.appendLog(filePath, newTiming);
} catch (error) {
console.error("Erreur lors de l'enregistrement du log de rendu:", error);
}
}
static async measureMongoOperation<T>(operation: string, func: () => Promise<T>): Promise<T> {
const startTime = performance.now();
try {
if (!(await this.isDebugEnabled())) {
return func();
}
const result = await func();
const endTime = performance.now();
await this.logRequest({
url: `MongoDB: ${operation}`,
startTime,
endTime,
fromCache: false,
mongoAccess: {
operation,
duration: endTime - startTime,
},
});
return result;
} catch (error) {
const endTime = performance.now();
await this.logRequest({
url: `MongoDB Error: ${operation}`,
startTime,
endTime,
fromCache: false,
mongoAccess: {
operation,
duration: endTime - startTime,
},
});
throw error;
}
}
}

View File

@@ -1,5 +1,4 @@
import prisma from "@/lib/prisma";
import { DebugService } from "./debug.service";
import { getCurrentUser } from "../auth-utils";
import { ERROR_CODES } from "../../constants/errorCodes";
import { AppError } from "../../utils/errors";
@@ -30,15 +29,13 @@ export class FavoriteService {
try {
const user = await this.getCurrentUser();
return DebugService.measureMongoOperation("isFavorite", async () => {
const favorite = await prisma.favorite.findFirst({
where: {
userId: user.id,
seriesId: seriesId,
},
});
return !!favorite;
const favorite = await prisma.favorite.findFirst({
where: {
userId: user.id,
seriesId: seriesId,
},
});
return !!favorite;
} catch (error) {
console.error("Erreur lors de la vérification du favori:", error);
return false;
@@ -52,20 +49,18 @@ export class FavoriteService {
try {
const user = await this.getCurrentUser();
await DebugService.measureMongoOperation("addToFavorites", async () => {
await prisma.favorite.upsert({
where: {
userId_seriesId: {
userId: user.id,
seriesId,
},
},
update: {},
create: {
await prisma.favorite.upsert({
where: {
userId_seriesId: {
userId: user.id,
seriesId,
},
});
},
update: {},
create: {
userId: user.id,
seriesId,
},
});
this.dispatchFavoritesChanged();
@@ -81,13 +76,11 @@ export class FavoriteService {
try {
const user = await this.getCurrentUser();
await DebugService.measureMongoOperation("removeFromFavorites", async () => {
await prisma.favorite.deleteMany({
where: {
userId: user.id,
seriesId,
},
});
await prisma.favorite.deleteMany({
where: {
userId: user.id,
seriesId,
},
});
this.dispatchFavoritesChanged();
@@ -102,47 +95,41 @@ export class FavoriteService {
static async getAllFavoriteIds(): Promise<string[]> {
const user = await this.getCurrentUser();
return DebugService.measureMongoOperation("getAllFavoriteIds", async () => {
const favorites = await prisma.favorite.findMany({
where: { userId: user.id },
select: { seriesId: true },
});
return favorites.map((favorite) => favorite.seriesId);
const favorites = await prisma.favorite.findMany({
where: { userId: user.id },
select: { seriesId: true },
});
return favorites.map((favorite) => favorite.seriesId);
}
static async addFavorite(seriesId: string) {
const user = await this.getCurrentUser();
return DebugService.measureMongoOperation("addFavorite", async () => {
const favorite = await prisma.favorite.upsert({
where: {
userId_seriesId: {
userId: user.id,
seriesId,
},
},
update: {},
create: {
const favorite = await prisma.favorite.upsert({
where: {
userId_seriesId: {
userId: user.id,
seriesId,
},
});
return favorite;
},
update: {},
create: {
userId: user.id,
seriesId,
},
});
return favorite;
}
static async removeFavorite(seriesId: string): Promise<boolean> {
const user = await this.getCurrentUser();
return DebugService.measureMongoOperation("removeFavorite", async () => {
const result = await prisma.favorite.deleteMany({
where: {
userId: user.id,
seriesId,
},
});
return result.count > 0;
const result = await prisma.favorite.deleteMany({
where: {
userId: user.id,
seriesId,
},
});
return result.count > 0;
}
}

View File

@@ -84,9 +84,13 @@ export class LibraryService extends BaseApiService {
try {
// Récupérer toutes les séries depuis le cache
const allSeries = await this.getAllLibrarySeries(libraryId);
// Filtrer les séries
let filteredSeries = allSeries;
// Filtrer les séries supprimées (fichiers manquants sur le filesystem)
filteredSeries = filteredSeries.filter((series) => !series.deleted);
if (unreadOnly) {
filteredSeries = filteredSeries.filter(
(series) => series.booksReadCount < series.booksCount
@@ -96,7 +100,8 @@ export class LibraryService extends BaseApiService {
if (search) {
const searchLower = search.toLowerCase();
filteredSeries = filteredSeries.filter((series) =>
series.metadata.title.toLowerCase().includes(searchLower)
series.metadata.title.toLowerCase().includes(searchLower) ||
series.id.toLowerCase().includes(searchLower)
);
}
@@ -108,6 +113,7 @@ export class LibraryService extends BaseApiService {
const totalPages = Math.ceil(totalElements / size);
const startIndex = page * size;
const endIndex = Math.min(startIndex + size, totalElements);
const paginatedSeries = filteredSeries.slice(startIndex, endIndex);
// Construire la réponse

View File

@@ -31,7 +31,6 @@ export class PreferencesService {
showThumbnails: preferences.showThumbnails,
cacheMode: preferences.cacheMode as "memory" | "file",
showOnlyUnread: preferences.showOnlyUnread,
debug: preferences.debug,
displayMode: preferences.displayMode as UserPreferences["displayMode"],
background: preferences.background as unknown as BackgroundPreferences,
};
@@ -51,7 +50,6 @@ export class PreferencesService {
if (preferences.showThumbnails !== undefined) updateData.showThumbnails = preferences.showThumbnails;
if (preferences.cacheMode !== undefined) updateData.cacheMode = preferences.cacheMode;
if (preferences.showOnlyUnread !== undefined) updateData.showOnlyUnread = preferences.showOnlyUnread;
if (preferences.debug !== undefined) updateData.debug = preferences.debug;
if (preferences.displayMode !== undefined) updateData.displayMode = preferences.displayMode;
if (preferences.background !== undefined) updateData.background = preferences.background;
@@ -63,7 +61,6 @@ export class PreferencesService {
showThumbnails: preferences.showThumbnails ?? defaultPreferences.showThumbnails,
cacheMode: preferences.cacheMode ?? defaultPreferences.cacheMode,
showOnlyUnread: preferences.showOnlyUnread ?? defaultPreferences.showOnlyUnread,
debug: preferences.debug ?? defaultPreferences.debug,
displayMode: preferences.displayMode ?? defaultPreferences.displayMode,
background: (preferences.background ?? defaultPreferences.background) as unknown as Prisma.InputJsonValue,
},
@@ -73,7 +70,6 @@ export class PreferencesService {
showThumbnails: updatedPreferences.showThumbnails,
cacheMode: updatedPreferences.cacheMode as "memory" | "file",
showOnlyUnread: updatedPreferences.showOnlyUnread,
debug: updatedPreferences.debug,
displayMode: updatedPreferences.displayMode as UserPreferences["displayMode"],
background: updatedPreferences.background as unknown as BackgroundPreferences,
};

View File

@@ -14,8 +14,10 @@ class RequestQueue {
private activeCount = 0;
private maxConcurrent: number;
constructor(maxConcurrent: number = 5) {
this.maxConcurrent = maxConcurrent;
constructor(maxConcurrent?: number) {
// Lire depuis env ou utiliser la valeur par défaut
const envValue = process.env.KOMGA_MAX_CONCURRENT_REQUESTS;
this.maxConcurrent = maxConcurrent ?? (envValue ? parseInt(envValue, 10) : 5);
}
async enqueue<T>(execute: () => Promise<T>): Promise<T> {
@@ -68,6 +70,10 @@ class RequestQueue {
}
}
// Singleton instance - Limite à 2 requêtes simultanées vers Komga (réduit pour CPU)
export const RequestQueueService = new RequestQueue(2);
// Singleton instance - Par défaut limite à 2 requêtes simultanées (configurable via KOMGA_MAX_CONCURRENT_REQUESTS)
export const RequestQueueService = new RequestQueue(
process.env.KOMGA_MAX_CONCURRENT_REQUESTS
? parseInt(process.env.KOMGA_MAX_CONCURRENT_REQUESTS, 10)
: 2
);

View File

@@ -92,6 +92,9 @@ export class SeriesService extends BaseApiService {
// Filtrer les livres
let filteredBooks = allBooks;
// Filtrer les livres supprimés (fichiers manquants sur le filesystem)
filteredBooks = filteredBooks.filter((book: KomgaBook) => !book.deleted);
if (unreadOnly) {
filteredBooks = filteredBooks.filter(
(book: KomgaBook) => !book.readProgress || !book.readProgress.completed

View File

@@ -1,7 +1,6 @@
import fs from "fs";
import path from "path";
import { PreferencesService } from "./preferences.service";
import { DebugService } from "./debug.service";
import { getCurrentUser } from "../auth-utils";
export type CacheMode = "file" | "memory";
@@ -440,14 +439,13 @@ class ServerCacheService {
const { data, isStale } = cachedResult;
const endTime = performance.now();
// Log la requête avec l'indication du cache
await DebugService.logRequest({
url: `[CACHE${isStale ? '-STALE' : ''}] ${key}`,
startTime,
endTime,
fromCache: true,
cacheType: type,
});
// Debug logging
if (process.env.CACHE_DEBUG === 'true') {
const icon = isStale ? '⚠️' : '';
const status = isStale ? 'STALE' : 'HIT';
// eslint-disable-next-line no-console
console.log(`${icon} [CACHE ${status}] ${key} | ${type} | ${(endTime - startTime).toFixed(2)}ms`);
}
// Si le cache est expiré, revalider en background sans bloquer la réponse
if (isStale) {
@@ -459,9 +457,21 @@ class ServerCacheService {
}
// Pas de cache du tout, fetch normalement
if (process.env.CACHE_DEBUG === 'true') {
// eslint-disable-next-line no-console
console.log(`❌ [CACHE MISS] ${key} | ${type}`);
}
try {
const data = await fetcher();
this.set(cacheKey, data, type);
const endTime = performance.now();
if (process.env.CACHE_DEBUG === 'true') {
// eslint-disable-next-line no-console
console.log(`💾 [CACHE SET] ${key} | ${type} | ${(endTime - startTime).toFixed(2)}ms`);
}
return data;
} catch (error) {
throw error;
@@ -482,16 +492,13 @@ class ServerCacheService {
const data = await fetcher();
this.set(cacheKey, data, type);
const endTime = performance.now();
await DebugService.logRequest({
url: `[REVALIDATE] ${debugKey}`,
startTime,
endTime,
fromCache: false,
cacheType: type,
});
if (process.env.CACHE_DEBUG === 'true') {
const endTime = performance.now();
// eslint-disable-next-line no-console
console.log(`🔄 [CACHE REVALIDATE] ${debugKey} | ${type} | ${(endTime - startTime).toFixed(2)}ms`);
}
} catch (error) {
console.error(`Background revalidation failed for ${debugKey}:`, error);
console.error(`🔴 [CACHE REVALIDATE ERROR] ${debugKey}:`, error);
// Ne pas relancer l'erreur car c'est en background
}
}