feat: enhance backup functionality and logging
- Updated `createBackup` method to accept a `force` parameter, allowing backups to be created even if no changes are detected. - Added user alerts in `AdvancedSettingsPageClient` and `BackupSettingsPageClient` for backup status feedback. - Implemented `getBackupLogs` method in `BackupService` to retrieve backup logs, with a new API route for accessing logs. - Enhanced UI in `BackupSettingsPageClient` to display backup logs and provide a refresh option. - Updated `BackupManagerCLI` to support forced backups via command line.
This commit is contained in:
@@ -1,11 +1,8 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import path from 'path';
|
||||
import { prisma } from './database';
|
||||
import { userPreferencesService } from './user-preferences';
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
import { BackupUtils } from '../lib/backup-utils';
|
||||
|
||||
export interface BackupConfig {
|
||||
enabled: boolean;
|
||||
@@ -24,6 +21,7 @@ export interface BackupInfo {
|
||||
type: 'manual' | 'automatic';
|
||||
status: 'success' | 'failed' | 'in_progress';
|
||||
error?: string;
|
||||
databaseHash?: string;
|
||||
}
|
||||
|
||||
export class BackupService {
|
||||
@@ -39,15 +37,7 @@ export class BackupService {
|
||||
}
|
||||
|
||||
private getDefaultBackupPath(): string {
|
||||
// 1. Variable d'environnement explicite
|
||||
if (process.env.BACKUP_STORAGE_PATH) {
|
||||
return path.resolve(process.cwd(), process.env.BACKUP_STORAGE_PATH);
|
||||
}
|
||||
|
||||
// 2. Chemin par défaut selon l'environnement
|
||||
return process.env.NODE_ENV === 'production'
|
||||
? path.join(process.cwd(), 'data', 'backups') // Docker: /app/data/backups
|
||||
: path.join(process.cwd(), 'backups'); // Local: ./backups
|
||||
return BackupUtils.resolveBackupStoragePath();
|
||||
}
|
||||
|
||||
private config: BackupConfig;
|
||||
@@ -106,27 +96,180 @@ export class BackupService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Crée une sauvegarde complète de la base de données
|
||||
* Calcule un hash de la base de données pour détecter les changements
|
||||
*/
|
||||
async createBackup(type: 'manual' | 'automatic' = 'manual'): Promise<BackupInfo> {
|
||||
private async calculateDatabaseHash(): Promise<string> {
|
||||
try {
|
||||
const dbPath = BackupUtils.resolveDatabasePath();
|
||||
return await BackupUtils.calculateFileHash(dbPath);
|
||||
} catch (error) {
|
||||
console.error('Error calculating database hash:', error);
|
||||
throw new Error(`Failed to calculate database hash: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Vérifie si la base de données a changé depuis le dernier backup
|
||||
*/
|
||||
async hasChangedSinceLastBackup(): Promise<boolean> {
|
||||
try {
|
||||
const currentHash = await this.calculateDatabaseHash();
|
||||
const backups = await this.listBackups();
|
||||
|
||||
if (backups.length === 0) {
|
||||
// Pas de backup précédent, donc il y a forcément des changements
|
||||
return true;
|
||||
}
|
||||
|
||||
// Récupérer le hash du dernier backup
|
||||
const lastBackup = backups[0]; // Les backups sont triés par date décroissante
|
||||
const lastBackupHash = await this.getBackupHash(lastBackup.filename);
|
||||
|
||||
if (!lastBackupHash) {
|
||||
// Pas de hash disponible pour le dernier backup, considérer qu'il y a des changements
|
||||
console.log('No hash available for last backup, assuming changes');
|
||||
return true;
|
||||
}
|
||||
|
||||
const hasChanged = currentHash !== lastBackupHash;
|
||||
console.log(`Database hash comparison: current=${currentHash.substring(0, 8)}..., last=${lastBackupHash.substring(0, 8)}..., changed=${hasChanged}`);
|
||||
|
||||
return hasChanged;
|
||||
} catch (error) {
|
||||
console.error('Error checking database changes:', error);
|
||||
// En cas d'erreur, on assume qu'il y a des changements pour être sûr
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Récupère le hash d'un backup depuis ses métadonnées
|
||||
*/
|
||||
private async getBackupHash(filename: string): Promise<string | null> {
|
||||
try {
|
||||
const metadataPath = path.join(this.getCurrentBackupPath(), `${filename}.meta.json`);
|
||||
|
||||
try {
|
||||
const metadataContent = await fs.readFile(metadataPath, 'utf-8');
|
||||
const metadata = JSON.parse(metadataContent);
|
||||
return metadata.databaseHash || null;
|
||||
} catch {
|
||||
// Fichier de métadonnées n'existe pas, essayer de calculer le hash du backup
|
||||
return await this.calculateBackupFileHash(filename);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error getting backup hash for ${filename}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calcule le hash d'un fichier de backup existant
|
||||
*/
|
||||
private async calculateBackupFileHash(filename: string): Promise<string | null> {
|
||||
try {
|
||||
const backupPath = path.join(this.getCurrentBackupPath(), filename);
|
||||
|
||||
// Si le fichier est compressé, il faut le décompresser temporairement
|
||||
if (filename.endsWith('.gz')) {
|
||||
const tempFile = path.join(this.getCurrentBackupPath(), `temp_${Date.now()}.db`);
|
||||
|
||||
try {
|
||||
await BackupUtils.decompressFileTemp(backupPath, tempFile);
|
||||
const hash = await BackupUtils.calculateFileHash(tempFile);
|
||||
|
||||
// Nettoyer le fichier temporaire
|
||||
await fs.unlink(tempFile);
|
||||
|
||||
return hash;
|
||||
} catch (error) {
|
||||
// Nettoyer le fichier temporaire en cas d'erreur
|
||||
try {
|
||||
await fs.unlink(tempFile);
|
||||
} catch {}
|
||||
throw error;
|
||||
}
|
||||
} else {
|
||||
// Fichier non compressé
|
||||
return await BackupUtils.calculateFileHash(backupPath);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error calculating hash for backup file ${filename}:`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sauvegarde les métadonnées d'un backup
|
||||
*/
|
||||
private async saveBackupMetadata(filename: string, metadata: { databaseHash: string; createdAt: Date; type: string }): Promise<void> {
|
||||
try {
|
||||
const metadataPath = path.join(this.getCurrentBackupPath(), `${filename}.meta.json`);
|
||||
await fs.writeFile(metadataPath, JSON.stringify(metadata, null, 2));
|
||||
} catch (error) {
|
||||
console.error(`Error saving backup metadata for ${filename}:`, error);
|
||||
// Ne pas faire échouer le backup si on ne peut pas sauvegarder les métadonnées
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Écrit une entrée dans le fichier de log des backups
|
||||
*/
|
||||
private async logBackupAction(type: 'manual' | 'automatic', action: 'created' | 'skipped' | 'failed', details: string, extra?: { hash?: string; size?: number; previousHash?: string }): Promise<void> {
|
||||
const logPath = path.join(this.getCurrentBackupPath(), 'backup.log');
|
||||
await BackupUtils.writeLogEntry(logPath, type, action, details, extra);
|
||||
}
|
||||
|
||||
/**
|
||||
* Crée une sauvegarde complète de la base de données
|
||||
* Vérifie d'abord s'il y a eu des changements (sauf si forcé)
|
||||
*/
|
||||
async createBackup(type: 'manual' | 'automatic' = 'manual', forceCreate: boolean = false): Promise<BackupInfo | null> {
|
||||
const backupId = `backup_${Date.now()}`;
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
const filename = `towercontrol_${type}_${timestamp}.db`;
|
||||
const filename = BackupUtils.generateBackupFilename(type);
|
||||
const backupPath = path.join(this.getCurrentBackupPath(), filename);
|
||||
|
||||
console.log(`🔄 Starting ${type} backup: ${filename}`);
|
||||
|
||||
try {
|
||||
// Créer le dossier de backup si nécessaire
|
||||
await this.ensureBackupDirectory();
|
||||
// Vérifier les changements (sauf si forcé)
|
||||
if (!forceCreate) {
|
||||
const hasChanged = await this.hasChangedSinceLastBackup();
|
||||
|
||||
if (!hasChanged) {
|
||||
const currentHash = await this.calculateDatabaseHash();
|
||||
const backups = await this.listBackups();
|
||||
const lastBackupHash = backups.length > 0 ? await this.getBackupHash(backups[0].filename) : null;
|
||||
|
||||
const message = `No changes detected since last backup`;
|
||||
console.log(`⏭️ Skipping ${type} backup: ${message}`);
|
||||
await this.logBackupAction(type, 'skipped', message, {
|
||||
hash: currentHash,
|
||||
previousHash: lastBackupHash || undefined
|
||||
});
|
||||
return null;
|
||||
}
|
||||
|
||||
console.log(`📝 Changes detected, proceeding with ${type} backup`);
|
||||
} else {
|
||||
console.log(`🔧 Forced ${type} backup, skipping change detection`);
|
||||
}
|
||||
|
||||
// Créer la sauvegarde SQLite (sans vérification de santé pour éviter les conflits)
|
||||
await this.createSQLiteBackup(backupPath);
|
||||
// Calculer le hash de la base de données avant le backup
|
||||
const databaseHash = await this.calculateDatabaseHash();
|
||||
|
||||
// Créer le dossier de backup si nécessaire
|
||||
await BackupUtils.ensureDirectory(this.getCurrentBackupPath());
|
||||
|
||||
// Créer la sauvegarde SQLite
|
||||
const dbPath = BackupUtils.resolveDatabasePath();
|
||||
await BackupUtils.createSQLiteBackup(dbPath, backupPath);
|
||||
|
||||
// Compresser si activé
|
||||
let finalPath = backupPath;
|
||||
if (this.config.compression) {
|
||||
finalPath = await this.compressBackup(backupPath);
|
||||
finalPath = await BackupUtils.compressFile(backupPath);
|
||||
await fs.unlink(backupPath); // Supprimer le fichier non compressé
|
||||
}
|
||||
|
||||
@@ -140,16 +283,31 @@ export class BackupService {
|
||||
createdAt: new Date(),
|
||||
type,
|
||||
status: 'success',
|
||||
databaseHash,
|
||||
};
|
||||
|
||||
// Sauvegarder les métadonnées du backup
|
||||
await this.saveBackupMetadata(path.basename(finalPath), {
|
||||
databaseHash,
|
||||
createdAt: new Date(),
|
||||
type,
|
||||
});
|
||||
|
||||
// Nettoyer les anciennes sauvegardes
|
||||
await this.cleanOldBackups();
|
||||
|
||||
console.log(`✅ Backup completed: ${backupInfo.filename} (${this.formatFileSize(backupInfo.size)})`);
|
||||
const successMessage = `${backupInfo.filename} created successfully`;
|
||||
console.log(`✅ Backup completed: ${successMessage}`);
|
||||
await this.logBackupAction(type, 'created', successMessage, {
|
||||
hash: databaseHash,
|
||||
size: backupInfo.size
|
||||
});
|
||||
|
||||
return backupInfo;
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
console.error(`❌ Backup failed:`, error);
|
||||
await this.logBackupAction(type, 'failed', `${filename} failed: ${errorMessage}`);
|
||||
|
||||
return {
|
||||
id: backupId,
|
||||
@@ -158,71 +316,11 @@ export class BackupService {
|
||||
createdAt: new Date(),
|
||||
type,
|
||||
status: 'failed',
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
error: errorMessage,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Crée une sauvegarde SQLite en utilisant la commande .backup
|
||||
*/
|
||||
private async createSQLiteBackup(backupPath: string): Promise<void> {
|
||||
// Résoudre le chemin de la base de données
|
||||
let dbPath: string;
|
||||
if (process.env.BACKUP_DATABASE_PATH) {
|
||||
// Utiliser la variable spécifique aux backups
|
||||
dbPath = path.resolve(process.cwd(), process.env.BACKUP_DATABASE_PATH);
|
||||
} else if (process.env.DATABASE_URL) {
|
||||
// Fallback sur DATABASE_URL si BACKUP_DATABASE_PATH n'est pas défini
|
||||
dbPath = path.resolve(process.env.DATABASE_URL.replace('file:', ''));
|
||||
} else {
|
||||
// Chemin par défaut vers prisma/dev.db
|
||||
dbPath = path.resolve(process.cwd(), 'prisma', 'dev.db');
|
||||
}
|
||||
|
||||
// Vérifier que le fichier source existe
|
||||
try {
|
||||
await fs.stat(dbPath);
|
||||
} catch (error) {
|
||||
console.error(`❌ Source database not found: ${dbPath}`, error);
|
||||
throw new Error(`Source database not found: ${dbPath}`);
|
||||
}
|
||||
|
||||
// Méthode 1: Utiliser sqlite3 CLI (plus fiable)
|
||||
try {
|
||||
const command = `sqlite3 "${dbPath}" ".backup '${backupPath}'"`;
|
||||
await execAsync(command);
|
||||
console.log(`✅ SQLite backup created using CLI: ${backupPath}`);
|
||||
return;
|
||||
} catch (cliError) {
|
||||
console.warn(`⚠️ SQLite CLI backup failed, trying copy method:`, cliError);
|
||||
}
|
||||
|
||||
// Méthode 2: Copie simple du fichier (fallback)
|
||||
try {
|
||||
await fs.copyFile(dbPath, backupPath);
|
||||
console.log(`✅ SQLite backup created using file copy: ${backupPath}`);
|
||||
} catch (copyError) {
|
||||
throw new Error(`Failed to create SQLite backup: ${copyError}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compresse une sauvegarde
|
||||
*/
|
||||
private async compressBackup(filePath: string): Promise<string> {
|
||||
const compressedPath = `${filePath}.gz`;
|
||||
|
||||
try {
|
||||
const command = `gzip -c "${filePath}" > "${compressedPath}"`;
|
||||
await execAsync(command);
|
||||
console.log(`✅ Backup compressed: ${compressedPath}`);
|
||||
return compressedPath;
|
||||
} catch (error) {
|
||||
console.warn(`⚠️ Compression failed, keeping uncompressed backup:`, error);
|
||||
return filePath;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restaure une sauvegarde
|
||||
@@ -258,7 +356,7 @@ export class BackupService {
|
||||
console.log(`🔄 Decompressing ${backupPath} to ${tempFile}`);
|
||||
|
||||
try {
|
||||
await execAsync(`gunzip -c "${backupPath}" > "${tempFile}"`);
|
||||
await BackupUtils.decompressFileTemp(backupPath, tempFile);
|
||||
console.log(`✅ Decompression successful`);
|
||||
|
||||
// Vérifier que le fichier décompressé existe
|
||||
@@ -273,8 +371,10 @@ export class BackupService {
|
||||
}
|
||||
|
||||
// Créer une sauvegarde de la base actuelle avant restauration
|
||||
const currentBackup = await this.createBackup('manual');
|
||||
console.log(`✅ Current database backed up as: ${currentBackup.filename}`);
|
||||
const currentBackup = await this.createBackup('manual', true); // Forcer la création
|
||||
if (currentBackup) {
|
||||
console.log(`✅ Current database backed up as: ${currentBackup.filename}`);
|
||||
}
|
||||
|
||||
// Fermer toutes les connexions
|
||||
await prisma.$disconnect();
|
||||
@@ -322,41 +422,19 @@ export class BackupService {
|
||||
async listBackups(): Promise<BackupInfo[]> {
|
||||
try {
|
||||
const currentBackupPath = this.getCurrentBackupPath();
|
||||
await this.ensureBackupDirectory();
|
||||
await BackupUtils.ensureDirectory(currentBackupPath);
|
||||
const files = await fs.readdir(currentBackupPath);
|
||||
|
||||
const backups: BackupInfo[] = [];
|
||||
|
||||
for (const file of files) {
|
||||
for (const file of files) {
|
||||
if (file.startsWith('towercontrol_') && (file.endsWith('.db') || file.endsWith('.db.gz'))) {
|
||||
const filePath = path.join(currentBackupPath, file);
|
||||
const stats = await fs.stat(filePath);
|
||||
|
||||
// Extraire le type et la date du nom de fichier
|
||||
// Nouveau format: towercontrol_manual_2025-09-18T14-12-05-737Z.db
|
||||
// Ancien format: towercontrol_2025-09-18T14-12-05-737Z.db (considéré comme automatic)
|
||||
let type: 'manual' | 'automatic' = 'automatic';
|
||||
let dateMatch = file.match(/towercontrol_(manual|automatic)_(\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2}-\d{3}Z)/);
|
||||
|
||||
if (!dateMatch) {
|
||||
// Format ancien sans type - considérer comme automatic
|
||||
dateMatch = file.match(/towercontrol_(\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2}-\d{3}Z)/);
|
||||
if (dateMatch) {
|
||||
dateMatch = [dateMatch[0], 'automatic', dateMatch[1]]; // Restructurer pour compatibilité
|
||||
}
|
||||
} else {
|
||||
type = dateMatch[1] as 'manual' | 'automatic';
|
||||
}
|
||||
|
||||
let createdAt = stats.birthtime;
|
||||
|
||||
if (dateMatch && dateMatch[2]) {
|
||||
// Convertir le format de fichier vers ISO string valide
|
||||
// Format: 2025-09-18T14-12-05-737Z -> 2025-09-18T14:12:05.737Z
|
||||
const isoString = dateMatch[2]
|
||||
.replace(/T(\d{2})-(\d{2})-(\d{2})-(\d{3})Z/, 'T$1:$2:$3.$4Z');
|
||||
createdAt = new Date(isoString);
|
||||
}
|
||||
// Utiliser l'utilitaire pour parser le nom de fichier
|
||||
const { type, date } = BackupUtils.parseBackupFilename(file);
|
||||
const createdAt = date || stats.birthtime;
|
||||
|
||||
backups.push({
|
||||
id: file,
|
||||
@@ -381,9 +459,19 @@ export class BackupService {
|
||||
*/
|
||||
async deleteBackup(filename: string): Promise<void> {
|
||||
const backupPath = path.join(this.getCurrentBackupPath(), filename);
|
||||
const metadataPath = path.join(this.getCurrentBackupPath(), `${filename}.meta.json`);
|
||||
|
||||
try {
|
||||
// Supprimer le fichier de backup
|
||||
await fs.unlink(backupPath);
|
||||
|
||||
// Supprimer le fichier de métadonnées s'il existe
|
||||
try {
|
||||
await fs.unlink(metadataPath);
|
||||
} catch {
|
||||
// Ignorer si le fichier de métadonnées n'existe pas
|
||||
}
|
||||
|
||||
console.log(`✅ Backup deleted: ${filename}`);
|
||||
} catch (error) {
|
||||
console.error(`❌ Failed to delete backup ${filename}:`, error);
|
||||
@@ -434,34 +522,6 @@ export class BackupService {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* S'assure que le dossier de backup existe
|
||||
*/
|
||||
private async ensureBackupDirectory(): Promise<void> {
|
||||
const currentBackupPath = this.getCurrentBackupPath();
|
||||
try {
|
||||
await fs.access(currentBackupPath);
|
||||
} catch {
|
||||
await fs.mkdir(currentBackupPath, { recursive: true });
|
||||
console.log(`📁 Created backup directory: ${currentBackupPath}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Formate la taille de fichier
|
||||
*/
|
||||
private formatFileSize(bytes: number): string {
|
||||
const units = ['B', 'KB', 'MB', 'GB'];
|
||||
let size = bytes;
|
||||
let unitIndex = 0;
|
||||
|
||||
while (size >= 1024 && unitIndex < units.length - 1) {
|
||||
size /= 1024;
|
||||
unitIndex++;
|
||||
}
|
||||
|
||||
return `${size.toFixed(1)} ${units[unitIndex]}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Met à jour la configuration
|
||||
@@ -481,6 +541,29 @@ export class BackupService {
|
||||
backupPath: this.getCurrentBackupPath()
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Lit le fichier de log des backups
|
||||
*/
|
||||
async getBackupLogs(maxLines: number = 100): Promise<string[]> {
|
||||
try {
|
||||
const logPath = path.join(this.getCurrentBackupPath(), 'backup.log');
|
||||
|
||||
try {
|
||||
const logContent = await fs.readFile(logPath, 'utf-8');
|
||||
const lines = logContent.trim().split('\n').filter(line => line.length > 0);
|
||||
|
||||
// Retourner les dernières lignes (les plus récentes)
|
||||
return lines.slice(-maxLines).reverse();
|
||||
} catch {
|
||||
// Fichier de log n'existe pas encore
|
||||
return [];
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error reading backup logs:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Instance singleton
|
||||
|
||||
Reference in New Issue
Block a user