chore: refactor project structure and clean up unused components

- Updated `TODO.md` to reflect new testing tasks and final structure expectations.
- Simplified TypeScript path mappings in `tsconfig.json` for better clarity.
- Revised business logic separation rules in `.cursor/rules` to align with new directory structure.
- Deleted unused client components and services to streamline the codebase.
- Adjusted import paths in scripts to match the new structure.
This commit is contained in:
Julien Froidefond
2025-09-21 10:26:35 +02:00
parent 9dc1fafa76
commit 4152b0bdfc
130 changed files with 360 additions and 413 deletions

211
src/lib/backup-utils.ts Normal file
View File

@@ -0,0 +1,211 @@
import { promises as fs } from 'fs';
import { exec } from 'child_process';
import { promisify } from 'util';
import path from 'path';
import { createHash } from 'crypto';
const execAsync = promisify(exec);
/**
* Utilitaires pour les opérations de backup
*/
export class BackupUtils {
/**
* Calcule un hash SHA-256 d'un fichier
*/
static async calculateFileHash(filePath: string): Promise<string> {
try {
const fileBuffer = await fs.readFile(filePath);
return createHash('sha256').update(fileBuffer).digest('hex');
} catch (error) {
throw new Error(`Failed to calculate hash for ${filePath}: ${error}`);
}
}
/**
* Résout le chemin de la base de données selon la configuration
*/
static resolveDatabasePath(): string {
if (process.env.BACKUP_DATABASE_PATH) {
return path.resolve(process.cwd(), process.env.BACKUP_DATABASE_PATH);
} else if (process.env.DATABASE_URL) {
return path.resolve(process.env.DATABASE_URL.replace('file:', ''));
} else {
return path.resolve(process.cwd(), 'prisma', 'dev.db');
}
}
/**
* Résout le chemin de stockage des backups
*/
static resolveBackupStoragePath(): string {
if (process.env.BACKUP_STORAGE_PATH) {
return path.resolve(process.cwd(), process.env.BACKUP_STORAGE_PATH);
}
return process.env.NODE_ENV === 'production'
? path.join(process.cwd(), 'data', 'backups')
: path.join(process.cwd(), 'backups');
}
/**
* Crée une sauvegarde SQLite en utilisant la commande .backup
*/
static async createSQLiteBackup(sourcePath: string, backupPath: string): Promise<void> {
// Vérifier que le fichier source existe
try {
await fs.stat(sourcePath);
} catch {
throw new Error(`Source database not found: ${sourcePath}`);
}
// Méthode 1: Utiliser sqlite3 CLI (plus fiable)
try {
const command = `sqlite3 "${sourcePath}" ".backup '${backupPath}'"`;
await execAsync(command);
console.log(`✅ SQLite backup created using CLI: ${backupPath}`);
return;
} catch (cliError) {
console.warn(`⚠️ SQLite CLI backup failed, trying copy method:`, cliError);
}
// Méthode 2: Copie simple du fichier (fallback)
try {
await fs.copyFile(sourcePath, backupPath);
console.log(`✅ SQLite backup created using file copy: ${backupPath}`);
} catch (copyError) {
throw new Error(`Failed to create SQLite backup: ${copyError}`);
}
}
/**
* Compresse un fichier avec gzip
*/
static async compressFile(filePath: string): Promise<string> {
const compressedPath = `${filePath}.gz`;
try {
const command = `gzip -c "${filePath}" > "${compressedPath}"`;
await execAsync(command);
console.log(`✅ File compressed: ${compressedPath}`);
return compressedPath;
} catch (error) {
console.warn(`⚠️ Compression failed, keeping uncompressed file:`, error);
return filePath;
}
}
/**
* Décompresse un fichier gzip temporairement
*/
static async decompressFileTemp(compressedPath: string, tempPath: string): Promise<void> {
try {
await execAsync(`gunzip -c "${compressedPath}" > "${tempPath}"`);
} catch (error) {
throw new Error(`Failed to decompress ${compressedPath}: ${error}`);
}
}
/**
* Formate la taille de fichier en unités lisibles
*/
static formatFileSize(bytes: number): string {
const units = ['B', 'KB', 'MB', 'GB'];
let size = bytes;
let unitIndex = 0;
while (size >= 1024 && unitIndex < units.length - 1) {
size /= 1024;
unitIndex++;
}
return `${size.toFixed(1)} ${units[unitIndex]}`;
}
/**
* S'assure qu'un dossier existe
*/
static async ensureDirectory(dirPath: string): Promise<void> {
try {
await fs.access(dirPath);
} catch {
await fs.mkdir(dirPath, { recursive: true });
console.log(`📁 Created directory: ${dirPath}`);
}
}
/**
* Parse le nom de fichier de backup pour extraire les métadonnées
*/
static parseBackupFilename(filename: string): { type: 'manual' | 'automatic'; date: Date | null } {
// Nouveau format: towercontrol_manual_2025-09-18T14-12-05-737Z.db
// Ancien format: towercontrol_2025-09-18T14-12-05-737Z.db (considéré comme automatic)
let type: 'manual' | 'automatic' = 'automatic';
let dateMatch = filename.match(/towercontrol_(manual|automatic)_(\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2}-\d{3}Z)/);
if (!dateMatch) {
// Format ancien sans type - considérer comme automatic
dateMatch = filename.match(/towercontrol_(\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2}-\d{3}Z)/);
if (dateMatch) {
dateMatch = [dateMatch[0], 'automatic', dateMatch[1]]; // Restructurer pour compatibilité
}
} else {
type = dateMatch[1] as 'manual' | 'automatic';
}
let date: Date | null = null;
if (dateMatch && dateMatch[2]) {
// Convertir le format de fichier vers ISO string valide
// Format: 2025-09-18T14-12-05-737Z -> 2025-09-18T14:12:05.737Z
const isoString = dateMatch[2]
.replace(/T(\d{2})-(\d{2})-(\d{2})-(\d{3})Z/, 'T$1:$2:$3.$4Z');
date = new Date(isoString);
}
return { type, date };
}
/**
* Génère un nom de fichier de backup
*/
static generateBackupFilename(type: 'manual' | 'automatic'): string {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
return `towercontrol_${type}_${timestamp}.db`;
}
/**
* Écrit une entrée dans le fichier de log
*/
static async writeLogEntry(
logPath: string,
type: 'manual' | 'automatic',
action: 'created' | 'skipped' | 'failed',
details: string,
extra?: { hash?: string; size?: number; previousHash?: string }
): Promise<void> {
try {
const date = new Date().toLocaleString('fr-FR');
let logEntry = `[${date}] ${type.toUpperCase()} BACKUP ${action.toUpperCase()}: ${details}`;
if (extra) {
if (extra.hash) {
logEntry += ` | Hash: ${extra.hash.substring(0, 12)}...`;
}
if (extra.size) {
logEntry += ` | Size: ${BackupUtils.formatFileSize(extra.size)}`;
}
if (extra.previousHash) {
logEntry += ` | Previous: ${extra.previousHash.substring(0, 12)}...`;
}
}
logEntry += '\n';
await fs.appendFile(logPath, logEntry);
} catch (error) {
console.error('Error writing to backup log:', error);
// Ne pas faire échouer l'opération si on ne peut pas logger
}
}
}