- Marked tasks in `TODO.md` as completed for moving backup-related files to the `data-management` directory and correcting imports across the codebase. - Updated imports in `backup-manager.ts`, API routes, and various components to reflect the new structure. - Removed obsolete `backup.ts` and `backup-scheduler.ts` files to streamline the codebase. - Added new tasks in `TODO.md` for future cleaning and organization of service imports.
572 lines
18 KiB
TypeScript
572 lines
18 KiB
TypeScript
import { promises as fs } from 'fs';
|
|
import path from 'path';
|
|
import { prisma } from '../core/database';
|
|
import { userPreferencesService } from '../core/user-preferences';
|
|
import { BackupUtils } from '../../lib/backup-utils';
|
|
import { getToday } from '@/lib/date-utils';
|
|
|
|
export interface BackupConfig {
|
|
enabled: boolean;
|
|
interval: 'hourly' | 'daily' | 'weekly';
|
|
maxBackups: number;
|
|
backupPath: string;
|
|
includeUploads?: boolean;
|
|
compression?: boolean;
|
|
}
|
|
|
|
export interface BackupInfo {
|
|
id: string;
|
|
filename: string;
|
|
size: number;
|
|
createdAt: Date;
|
|
type: 'manual' | 'automatic';
|
|
status: 'success' | 'failed' | 'in_progress';
|
|
error?: string;
|
|
databaseHash?: string;
|
|
}
|
|
|
|
export class BackupService {
|
|
private get defaultConfig(): BackupConfig {
|
|
return {
|
|
enabled: true,
|
|
interval: 'hourly',
|
|
maxBackups: 5,
|
|
backupPath: this.getDefaultBackupPath(),
|
|
includeUploads: true,
|
|
compression: true,
|
|
};
|
|
}
|
|
|
|
private getDefaultBackupPath(): string {
|
|
return BackupUtils.resolveBackupStoragePath();
|
|
}
|
|
|
|
private config: BackupConfig;
|
|
|
|
constructor(config?: Partial<BackupConfig>) {
|
|
this.config = { ...this.defaultConfig, ...config };
|
|
// Charger la config depuis la DB de manière asynchrone
|
|
this.loadConfigFromDB().catch(() => {
|
|
// Ignorer les erreurs de chargement initial
|
|
});
|
|
}
|
|
|
|
/**
|
|
* Charge la configuration depuis la base de données
|
|
*/
|
|
private async loadConfigFromDB(): Promise<void> {
|
|
try {
|
|
const preferences = await userPreferencesService.getAllPreferences();
|
|
if (preferences.viewPreferences && typeof preferences.viewPreferences === 'object') {
|
|
const backupConfig = (preferences.viewPreferences as Record<string, unknown>).backupConfig;
|
|
if (backupConfig) {
|
|
this.config = { ...this.defaultConfig, ...backupConfig };
|
|
}
|
|
}
|
|
} catch (error) {
|
|
console.warn('Could not load backup config from DB, using defaults:', error);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Sauvegarde la configuration dans la base de données
|
|
*/
|
|
private async saveConfigToDB(): Promise<void> {
|
|
try {
|
|
// Pour l'instant, on stocke la config backup en tant que JSON dans viewPreferences
|
|
// TODO: Ajouter un champ dédié dans le schéma pour la config backup
|
|
await prisma.userPreferences.upsert({
|
|
where: { id: 'default' },
|
|
update: {
|
|
viewPreferences: JSON.parse(JSON.stringify({
|
|
...(await userPreferencesService.getViewPreferences()),
|
|
backupConfig: this.config
|
|
}))
|
|
},
|
|
create: {
|
|
id: 'default',
|
|
kanbanFilters: {},
|
|
viewPreferences: JSON.parse(JSON.stringify({ backupConfig: this.config })),
|
|
columnVisibility: {},
|
|
jiraConfig: {}
|
|
}
|
|
});
|
|
} catch (error) {
|
|
console.error('Failed to save backup config to DB:', error);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Calcule un hash de la base de données pour détecter les changements
|
|
*/
|
|
private async calculateDatabaseHash(): Promise<string> {
|
|
try {
|
|
const dbPath = BackupUtils.resolveDatabasePath();
|
|
return await BackupUtils.calculateFileHash(dbPath);
|
|
} catch (error) {
|
|
console.error('Error calculating database hash:', error);
|
|
throw new Error(`Failed to calculate database hash: ${error}`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Vérifie si la base de données a changé depuis le dernier backup
|
|
*/
|
|
async hasChangedSinceLastBackup(): Promise<boolean> {
|
|
try {
|
|
const currentHash = await this.calculateDatabaseHash();
|
|
const backups = await this.listBackups();
|
|
|
|
if (backups.length === 0) {
|
|
// Pas de backup précédent, donc il y a forcément des changements
|
|
return true;
|
|
}
|
|
|
|
// Récupérer le hash du dernier backup
|
|
const lastBackup = backups[0]; // Les backups sont triés par date décroissante
|
|
const lastBackupHash = await this.getBackupHash(lastBackup.filename);
|
|
|
|
if (!lastBackupHash) {
|
|
// Pas de hash disponible pour le dernier backup, considérer qu'il y a des changements
|
|
console.log('No hash available for last backup, assuming changes');
|
|
return true;
|
|
}
|
|
|
|
const hasChanged = currentHash !== lastBackupHash;
|
|
console.log(`Database hash comparison: current=${currentHash.substring(0, 8)}..., last=${lastBackupHash.substring(0, 8)}..., changed=${hasChanged}`);
|
|
|
|
return hasChanged;
|
|
} catch (error) {
|
|
console.error('Error checking database changes:', error);
|
|
// En cas d'erreur, on assume qu'il y a des changements pour être sûr
|
|
return true;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Récupère le hash d'un backup depuis ses métadonnées
|
|
*/
|
|
private async getBackupHash(filename: string): Promise<string | null> {
|
|
try {
|
|
const metadataPath = path.join(this.getCurrentBackupPath(), `${filename}.meta.json`);
|
|
|
|
try {
|
|
const metadataContent = await fs.readFile(metadataPath, 'utf-8');
|
|
const metadata = JSON.parse(metadataContent);
|
|
return metadata.databaseHash || null;
|
|
} catch {
|
|
// Fichier de métadonnées n'existe pas, essayer de calculer le hash du backup
|
|
return await this.calculateBackupFileHash(filename);
|
|
}
|
|
} catch (error) {
|
|
console.error(`Error getting backup hash for ${filename}:`, error);
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Calcule le hash d'un fichier de backup existant
|
|
*/
|
|
private async calculateBackupFileHash(filename: string): Promise<string | null> {
|
|
try {
|
|
const backupPath = path.join(this.getCurrentBackupPath(), filename);
|
|
|
|
// Si le fichier est compressé, il faut le décompresser temporairement
|
|
if (filename.endsWith('.gz')) {
|
|
const tempFile = path.join(this.getCurrentBackupPath(), `temp_${Date.now()}.db`);
|
|
|
|
try {
|
|
await BackupUtils.decompressFileTemp(backupPath, tempFile);
|
|
const hash = await BackupUtils.calculateFileHash(tempFile);
|
|
|
|
// Nettoyer le fichier temporaire
|
|
await fs.unlink(tempFile);
|
|
|
|
return hash;
|
|
} catch (error) {
|
|
// Nettoyer le fichier temporaire en cas d'erreur
|
|
try {
|
|
await fs.unlink(tempFile);
|
|
} catch {}
|
|
throw error;
|
|
}
|
|
} else {
|
|
// Fichier non compressé
|
|
return await BackupUtils.calculateFileHash(backupPath);
|
|
}
|
|
} catch (error) {
|
|
console.error(`Error calculating hash for backup file ${filename}:`, error);
|
|
return null;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Sauvegarde les métadonnées d'un backup
|
|
*/
|
|
private async saveBackupMetadata(filename: string, metadata: { databaseHash: string; createdAt: Date; type: string }): Promise<void> {
|
|
try {
|
|
const metadataPath = path.join(this.getCurrentBackupPath(), `${filename}.meta.json`);
|
|
await fs.writeFile(metadataPath, JSON.stringify(metadata, null, 2));
|
|
} catch (error) {
|
|
console.error(`Error saving backup metadata for ${filename}:`, error);
|
|
// Ne pas faire échouer le backup si on ne peut pas sauvegarder les métadonnées
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
* Écrit une entrée dans le fichier de log des backups
|
|
*/
|
|
private async logBackupAction(type: 'manual' | 'automatic', action: 'created' | 'skipped' | 'failed', details: string, extra?: { hash?: string; size?: number; previousHash?: string }): Promise<void> {
|
|
const logPath = path.join(this.getCurrentBackupPath(), 'backup.log');
|
|
await BackupUtils.writeLogEntry(logPath, type, action, details, extra);
|
|
}
|
|
|
|
/**
|
|
* Crée une sauvegarde complète de la base de données
|
|
* Vérifie d'abord s'il y a eu des changements (sauf si forcé)
|
|
*/
|
|
async createBackup(type: 'manual' | 'automatic' = 'manual', forceCreate: boolean = false): Promise<BackupInfo | null> {
|
|
const backupId = `backup_${Date.now()}`;
|
|
const filename = BackupUtils.generateBackupFilename(type);
|
|
const backupPath = path.join(this.getCurrentBackupPath(), filename);
|
|
|
|
console.log(`🔄 Starting ${type} backup: ${filename}`);
|
|
|
|
try {
|
|
// Vérifier les changements (sauf si forcé)
|
|
if (!forceCreate) {
|
|
const hasChanged = await this.hasChangedSinceLastBackup();
|
|
|
|
if (!hasChanged) {
|
|
const currentHash = await this.calculateDatabaseHash();
|
|
const backups = await this.listBackups();
|
|
const lastBackupHash = backups.length > 0 ? await this.getBackupHash(backups[0].filename) : null;
|
|
|
|
const message = `No changes detected since last backup`;
|
|
console.log(`⏭️ Skipping ${type} backup: ${message}`);
|
|
await this.logBackupAction(type, 'skipped', message, {
|
|
hash: currentHash,
|
|
previousHash: lastBackupHash || undefined
|
|
});
|
|
return null;
|
|
}
|
|
|
|
console.log(`📝 Changes detected, proceeding with ${type} backup`);
|
|
} else {
|
|
console.log(`🔧 Forced ${type} backup, skipping change detection`);
|
|
}
|
|
|
|
// Calculer le hash de la base de données avant le backup
|
|
const databaseHash = await this.calculateDatabaseHash();
|
|
|
|
// Créer le dossier de backup si nécessaire
|
|
await BackupUtils.ensureDirectory(this.getCurrentBackupPath());
|
|
|
|
// Créer la sauvegarde SQLite
|
|
const dbPath = BackupUtils.resolveDatabasePath();
|
|
await BackupUtils.createSQLiteBackup(dbPath, backupPath);
|
|
|
|
// Compresser si activé
|
|
let finalPath = backupPath;
|
|
if (this.config.compression) {
|
|
finalPath = await BackupUtils.compressFile(backupPath);
|
|
await fs.unlink(backupPath); // Supprimer le fichier non compressé
|
|
}
|
|
|
|
// Obtenir les stats du fichier
|
|
const stats = await fs.stat(finalPath);
|
|
|
|
const backupInfo: BackupInfo = {
|
|
id: backupId,
|
|
filename: path.basename(finalPath),
|
|
size: stats.size,
|
|
createdAt: getToday(),
|
|
type,
|
|
status: 'success',
|
|
databaseHash,
|
|
};
|
|
|
|
// Sauvegarder les métadonnées du backup
|
|
await this.saveBackupMetadata(path.basename(finalPath), {
|
|
databaseHash,
|
|
createdAt: getToday(),
|
|
type,
|
|
});
|
|
|
|
// Nettoyer les anciennes sauvegardes
|
|
await this.cleanOldBackups();
|
|
|
|
const successMessage = `${backupInfo.filename} created successfully`;
|
|
console.log(`✅ Backup completed: ${successMessage}`);
|
|
await this.logBackupAction(type, 'created', successMessage, {
|
|
hash: databaseHash,
|
|
size: backupInfo.size
|
|
});
|
|
|
|
return backupInfo;
|
|
} catch (error) {
|
|
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
|
console.error(`❌ Backup failed:`, error);
|
|
await this.logBackupAction(type, 'failed', `${filename} failed: ${errorMessage}`);
|
|
|
|
return {
|
|
id: backupId,
|
|
filename,
|
|
size: 0,
|
|
createdAt: getToday(),
|
|
type,
|
|
status: 'failed',
|
|
error: errorMessage,
|
|
};
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
* Restaure une sauvegarde
|
|
*/
|
|
async restoreBackup(filename: string): Promise<void> {
|
|
const backupPath = path.join(this.getCurrentBackupPath(), filename);
|
|
|
|
// Résoudre le chemin de la base de données
|
|
let dbPath: string;
|
|
if (process.env.BACKUP_DATABASE_PATH) {
|
|
// Utiliser la variable spécifique aux backups
|
|
dbPath = path.resolve(process.cwd(), process.env.BACKUP_DATABASE_PATH);
|
|
} else if (process.env.DATABASE_URL) {
|
|
// Fallback sur DATABASE_URL si BACKUP_DATABASE_PATH n'est pas défini
|
|
dbPath = path.resolve(process.env.DATABASE_URL.replace('file:', ''));
|
|
} else {
|
|
// Chemin par défaut vers prisma/dev.db
|
|
dbPath = path.resolve(process.cwd(), 'prisma', 'dev.db');
|
|
}
|
|
|
|
console.log(`🔄 Restore paths - backup: ${backupPath}, target: ${dbPath}`);
|
|
|
|
console.log(`🔄 Starting restore from: ${filename}`);
|
|
|
|
try {
|
|
// Vérifier que le fichier de sauvegarde existe
|
|
await fs.access(backupPath);
|
|
|
|
// Décompresser si nécessaire
|
|
let sourceFile = backupPath;
|
|
if (filename.endsWith('.gz')) {
|
|
const tempFile = backupPath.replace('.gz', '');
|
|
console.log(`🔄 Decompressing ${backupPath} to ${tempFile}`);
|
|
|
|
try {
|
|
await BackupUtils.decompressFileTemp(backupPath, tempFile);
|
|
console.log(`✅ Decompression successful`);
|
|
|
|
// Vérifier que le fichier décompressé existe
|
|
await fs.access(tempFile);
|
|
console.log(`✅ Decompressed file exists: ${tempFile}`);
|
|
|
|
sourceFile = tempFile;
|
|
} catch (decompError) {
|
|
console.error(`❌ Decompression failed:`, decompError);
|
|
throw decompError;
|
|
}
|
|
}
|
|
|
|
// Créer une sauvegarde de la base actuelle avant restauration
|
|
const currentBackup = await this.createBackup('manual', true); // Forcer la création
|
|
if (currentBackup) {
|
|
console.log(`✅ Current database backed up as: ${currentBackup.filename}`);
|
|
}
|
|
|
|
// Fermer toutes les connexions
|
|
await prisma.$disconnect();
|
|
|
|
// Vérifier que le fichier source existe
|
|
await fs.access(sourceFile);
|
|
console.log(`✅ Source file verified: ${sourceFile}`);
|
|
|
|
// Remplacer la base de données
|
|
console.log(`🔄 Copying ${sourceFile} to ${dbPath}`);
|
|
await fs.copyFile(sourceFile, dbPath);
|
|
console.log(`✅ Database file copied successfully`);
|
|
|
|
// Nettoyer le fichier temporaire si décompressé
|
|
if (sourceFile !== backupPath) {
|
|
await fs.unlink(sourceFile);
|
|
}
|
|
|
|
// Reconnecter à la base
|
|
await prisma.$connect();
|
|
|
|
// Vérifier l'intégrité après restauration
|
|
await this.verifyDatabaseHealth();
|
|
|
|
console.log(`✅ Database restored from: ${filename}`);
|
|
} catch (error) {
|
|
console.error(`❌ Restore failed:`, error);
|
|
throw new Error(`Failed to restore backup: ${error}`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Obtient le chemin de sauvegarde actuel (toujours à jour)
|
|
* Force la relecture des variables d'environnement à chaque appel
|
|
*/
|
|
private getCurrentBackupPath(): string {
|
|
// Toujours recalculer depuis les variables d'environnement
|
|
// pour éviter les problèmes de cache lors des refresh
|
|
return this.getDefaultBackupPath();
|
|
}
|
|
|
|
/**
|
|
* Liste toutes les sauvegardes disponibles
|
|
*/
|
|
async listBackups(): Promise<BackupInfo[]> {
|
|
try {
|
|
const currentBackupPath = this.getCurrentBackupPath();
|
|
await BackupUtils.ensureDirectory(currentBackupPath);
|
|
const files = await fs.readdir(currentBackupPath);
|
|
|
|
const backups: BackupInfo[] = [];
|
|
|
|
for (const file of files) {
|
|
if (file.startsWith('towercontrol_') && (file.endsWith('.db') || file.endsWith('.db.gz'))) {
|
|
const filePath = path.join(currentBackupPath, file);
|
|
const stats = await fs.stat(filePath);
|
|
|
|
// Utiliser l'utilitaire pour parser le nom de fichier
|
|
const { type, date } = BackupUtils.parseBackupFilename(file);
|
|
const createdAt = date || stats.birthtime;
|
|
|
|
backups.push({
|
|
id: file,
|
|
filename: file,
|
|
size: stats.size,
|
|
createdAt,
|
|
type,
|
|
status: 'success',
|
|
});
|
|
}
|
|
}
|
|
|
|
return backups.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
|
|
} catch (error) {
|
|
console.error('Error listing backups:', error);
|
|
return [];
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Supprime une sauvegarde
|
|
*/
|
|
async deleteBackup(filename: string): Promise<void> {
|
|
const backupPath = path.join(this.getCurrentBackupPath(), filename);
|
|
const metadataPath = path.join(this.getCurrentBackupPath(), `${filename}.meta.json`);
|
|
|
|
try {
|
|
// Supprimer le fichier de backup
|
|
await fs.unlink(backupPath);
|
|
|
|
// Supprimer le fichier de métadonnées s'il existe
|
|
try {
|
|
await fs.unlink(metadataPath);
|
|
} catch {
|
|
// Ignorer si le fichier de métadonnées n'existe pas
|
|
}
|
|
|
|
console.log(`✅ Backup deleted: ${filename}`);
|
|
} catch (error) {
|
|
console.error(`❌ Failed to delete backup ${filename}:`, error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Vérifie l'intégrité de la base de données
|
|
*/
|
|
async verifyDatabaseHealth(): Promise<void> {
|
|
try {
|
|
// Test de connexion simple
|
|
await prisma.$queryRaw`SELECT 1`;
|
|
|
|
// Vérification de l'intégrité SQLite
|
|
const result = await prisma.$queryRaw<{integrity_check: string}[]>`PRAGMA integrity_check`;
|
|
|
|
if (result.length > 0 && result[0].integrity_check !== 'ok') {
|
|
throw new Error(`Database integrity check failed: ${result[0].integrity_check}`);
|
|
}
|
|
|
|
console.log('✅ Database health check passed');
|
|
} catch (error) {
|
|
console.error('❌ Database health check failed:', error);
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Nettoie les anciennes sauvegardes selon la configuration
|
|
*/
|
|
private async cleanOldBackups(): Promise<void> {
|
|
try {
|
|
const backups = await this.listBackups();
|
|
|
|
if (backups.length > this.config.maxBackups) {
|
|
const toDelete = backups.slice(this.config.maxBackups);
|
|
|
|
for (const backup of toDelete) {
|
|
await this.deleteBackup(backup.filename);
|
|
}
|
|
|
|
console.log(`🧹 Cleaned ${toDelete.length} old backups`);
|
|
}
|
|
} catch (error) {
|
|
console.error('Error cleaning old backups:', error);
|
|
}
|
|
}
|
|
|
|
|
|
/**
|
|
* Met à jour la configuration
|
|
*/
|
|
async updateConfig(newConfig: Partial<BackupConfig>): Promise<void> {
|
|
this.config = { ...this.config, ...newConfig };
|
|
await this.saveConfigToDB();
|
|
}
|
|
|
|
/**
|
|
* Obtient la configuration actuelle
|
|
*/
|
|
getConfig(): BackupConfig {
|
|
// Retourner une config avec le chemin à jour
|
|
return {
|
|
...this.config,
|
|
backupPath: this.getCurrentBackupPath()
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Lit le fichier de log des backups
|
|
*/
|
|
async getBackupLogs(maxLines: number = 100): Promise<string[]> {
|
|
try {
|
|
const logPath = path.join(this.getCurrentBackupPath(), 'backup.log');
|
|
|
|
try {
|
|
const logContent = await fs.readFile(logPath, 'utf-8');
|
|
const lines = logContent.trim().split('\n').filter(line => line.length > 0);
|
|
|
|
// Retourner les dernières lignes (les plus récentes)
|
|
return lines.slice(-maxLines).reverse();
|
|
} catch {
|
|
// Fichier de log n'existe pas encore
|
|
return [];
|
|
}
|
|
} catch (error) {
|
|
console.error('Error reading backup logs:', error);
|
|
return [];
|
|
}
|
|
}
|
|
}
|
|
|
|
// Instance singleton
|
|
export const backupService = new BackupService();
|