mirror of
https://github.com/jlengrand/Maestro.git
synced 2026-03-10 08:31:19 +00:00
Add daily backup system with 7-day rotation and auto-restore on corruption
- Daily backups created on app startup (stats.db.daily.YYYY-MM-DD) - Automatic rotation removes backups older than 7 days - On corruption, iterate through backups to find valid one to restore - Add getAvailableBackups() and restoreFromBackup() public methods - Backups validated with integrity_check before restoration - Legacy timestamp backups also recognized for restore candidates
This commit is contained in:
@@ -69,6 +69,7 @@ const mockFsRenameSync = vi.fn();
|
||||
const mockFsStatSync = vi.fn(() => ({ size: 1024 }));
|
||||
const mockFsReadFileSync = vi.fn(() => '0'); // Default: old timestamp (triggers vacuum check)
|
||||
const mockFsWriteFileSync = vi.fn();
|
||||
const mockFsReaddirSync = vi.fn(() => [] as string[]); // Default: empty directory
|
||||
|
||||
// Mock fs
|
||||
vi.mock('fs', () => ({
|
||||
@@ -80,6 +81,7 @@ vi.mock('fs', () => ({
|
||||
statSync: (...args: unknown[]) => mockFsStatSync(...args),
|
||||
readFileSync: (...args: unknown[]) => mockFsReadFileSync(...args),
|
||||
writeFileSync: (...args: unknown[]) => mockFsWriteFileSync(...args),
|
||||
readdirSync: (...args: unknown[]) => mockFsReaddirSync(...args),
|
||||
}));
|
||||
|
||||
// Mock logger
|
||||
@@ -677,6 +679,183 @@ describe('Database file creation on first launch', () => {
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Daily backup system tests
|
||||
*/
|
||||
describe('Daily backup system', () => {
|
||||
const mockFsReaddirSync = vi.fn();
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
lastDbPath = null;
|
||||
mockDb.pragma.mockReturnValue([{ user_version: 3 }]);
|
||||
mockDb.prepare.mockReturnValue(mockStatement);
|
||||
mockStatement.run.mockReturnValue({ changes: 1 });
|
||||
mockStatement.get.mockReturnValue({ value: '0' }); // Old vacuum timestamp
|
||||
mockStatement.all.mockReturnValue([]);
|
||||
mockFsExistsSync.mockReturnValue(true);
|
||||
mockFsReaddirSync.mockReturnValue([]);
|
||||
|
||||
// Mock readdirSync in the fs mock
|
||||
vi.doMock('fs', () => ({
|
||||
existsSync: (...args: unknown[]) => mockFsExistsSync(...args),
|
||||
mkdirSync: (...args: unknown[]) => mockFsMkdirSync(...args),
|
||||
copyFileSync: (...args: unknown[]) => mockFsCopyFileSync(...args),
|
||||
unlinkSync: (...args: unknown[]) => mockFsUnlinkSync(...args),
|
||||
renameSync: (...args: unknown[]) => mockFsRenameSync(...args),
|
||||
statSync: (...args: unknown[]) => mockFsStatSync(...args),
|
||||
readFileSync: (...args: unknown[]) => mockFsReadFileSync(...args),
|
||||
writeFileSync: (...args: unknown[]) => mockFsWriteFileSync(...args),
|
||||
readdirSync: (...args: unknown[]) => mockFsReaddirSync(...args),
|
||||
}));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
describe('getAvailableBackups', () => {
|
||||
it('should return empty array when no backups exist', async () => {
|
||||
mockFsReaddirSync.mockReturnValue([]);
|
||||
|
||||
const { StatsDB } = await import('../../../main/stats');
|
||||
const db = new StatsDB();
|
||||
db.initialize();
|
||||
|
||||
const backups = db.getAvailableBackups();
|
||||
expect(backups).toEqual([]);
|
||||
});
|
||||
|
||||
it('should detect daily backup files (stats.db.daily.YYYY-MM-DD)', async () => {
|
||||
mockFsReaddirSync.mockReturnValue([
|
||||
'stats.db.daily.2026-02-01',
|
||||
'stats.db.daily.2026-02-02',
|
||||
'stats.db.daily.2026-02-03',
|
||||
]);
|
||||
|
||||
const { StatsDB } = await import('../../../main/stats');
|
||||
const db = new StatsDB();
|
||||
db.initialize();
|
||||
|
||||
const backups = db.getAvailableBackups();
|
||||
expect(backups).toHaveLength(3);
|
||||
expect(backups[0].date).toBe('2026-02-03'); // Newest first
|
||||
expect(backups[1].date).toBe('2026-02-02');
|
||||
expect(backups[2].date).toBe('2026-02-01');
|
||||
});
|
||||
|
||||
it('should detect legacy timestamp backup files (stats.db.backup.TIMESTAMP)', async () => {
|
||||
// Timestamp for 2026-02-03
|
||||
const timestamp = new Date('2026-02-03').getTime();
|
||||
mockFsReaddirSync.mockReturnValue([`stats.db.backup.${timestamp}`]);
|
||||
|
||||
const { StatsDB } = await import('../../../main/stats');
|
||||
const db = new StatsDB();
|
||||
db.initialize();
|
||||
|
||||
const backups = db.getAvailableBackups();
|
||||
expect(backups).toHaveLength(1);
|
||||
expect(backups[0].date).toBe('2026-02-03');
|
||||
});
|
||||
|
||||
it('should sort backups by date descending (newest first)', async () => {
|
||||
mockFsReaddirSync.mockReturnValue([
|
||||
'stats.db.daily.2026-01-15',
|
||||
'stats.db.daily.2026-02-01',
|
||||
'stats.db.daily.2026-01-20',
|
||||
]);
|
||||
|
||||
const { StatsDB } = await import('../../../main/stats');
|
||||
const db = new StatsDB();
|
||||
db.initialize();
|
||||
|
||||
const backups = db.getAvailableBackups();
|
||||
expect(backups[0].date).toBe('2026-02-01');
|
||||
expect(backups[1].date).toBe('2026-01-20');
|
||||
expect(backups[2].date).toBe('2026-01-15');
|
||||
});
|
||||
});
|
||||
|
||||
describe('restoreFromBackup', () => {
|
||||
it('should return false when backup file does not exist', async () => {
|
||||
mockFsExistsSync.mockImplementation((p: unknown) => {
|
||||
if (typeof p === 'string' && p.includes('nonexistent')) return false;
|
||||
return true;
|
||||
});
|
||||
|
||||
const { StatsDB } = await import('../../../main/stats');
|
||||
const db = new StatsDB();
|
||||
db.initialize();
|
||||
|
||||
const result = db.restoreFromBackup('/path/to/nonexistent/backup');
|
||||
expect(result).toBe(false);
|
||||
});
|
||||
|
||||
it('should close database before restoring', async () => {
|
||||
const { StatsDB } = await import('../../../main/stats');
|
||||
const db = new StatsDB();
|
||||
db.initialize();
|
||||
|
||||
db.restoreFromBackup('/path/to/backup');
|
||||
|
||||
expect(mockDb.close).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should copy backup file to main database path', async () => {
|
||||
const { StatsDB } = await import('../../../main/stats');
|
||||
const db = new StatsDB();
|
||||
db.initialize();
|
||||
|
||||
db.restoreFromBackup('/path/to/backup.db');
|
||||
|
||||
expect(mockFsCopyFileSync).toHaveBeenCalledWith(
|
||||
'/path/to/backup.db',
|
||||
expect.stringContaining('stats.db')
|
||||
);
|
||||
});
|
||||
|
||||
it('should remove WAL and SHM files before restoring', async () => {
|
||||
const { StatsDB } = await import('../../../main/stats');
|
||||
const db = new StatsDB();
|
||||
db.initialize();
|
||||
|
||||
db.restoreFromBackup('/path/to/backup.db');
|
||||
|
||||
// Should attempt to unlink WAL and SHM files
|
||||
expect(mockFsUnlinkSync).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('daily backup creation on initialize', () => {
|
||||
it('should attempt to create daily backup on initialization', async () => {
|
||||
const { StatsDB } = await import('../../../main/stats');
|
||||
const db = new StatsDB();
|
||||
db.initialize();
|
||||
|
||||
// Should have attempted to copy the database for backup
|
||||
expect(mockFsCopyFileSync).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should skip backup creation if today backup already exists', async () => {
|
||||
const today = new Date().toISOString().split('T')[0];
|
||||
mockFsExistsSync.mockImplementation((p: unknown) => {
|
||||
if (typeof p === 'string' && p.includes(`daily.${today}`)) return true;
|
||||
return true;
|
||||
});
|
||||
|
||||
const { StatsDB } = await import('../../../main/stats');
|
||||
const db = new StatsDB();
|
||||
db.initialize();
|
||||
|
||||
// copyFileSync should not be called for daily backup (might be called for other reasons)
|
||||
const dailyBackupCalls = mockFsCopyFileSync.mock.calls.filter(
|
||||
(call) => typeof call[1] === 'string' && call[1].includes('daily')
|
||||
);
|
||||
expect(dailyBackupCalls).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
* Auto Run session and task recording tests
|
||||
*/
|
||||
|
||||
@@ -127,6 +127,9 @@ export class StatsDB {
|
||||
this.initialized = true;
|
||||
logger.info(`Stats database initialized at ${this.dbPath}`, LOG_CONTEXT);
|
||||
|
||||
// Create daily backup (keeps last 7 days)
|
||||
this.createDailyBackupIfNeeded();
|
||||
|
||||
// Schedule VACUUM to run weekly instead of on every startup
|
||||
this.vacuumIfNeededWeekly();
|
||||
} catch (error) {
|
||||
@@ -341,13 +344,175 @@ export class StatsDB {
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Daily Backup System
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Handle a corrupted database by backing it up and recreating a fresh database.
|
||||
* Create a daily backup if one hasn't been created today.
|
||||
* Automatically rotates old backups to keep only the last 7 days.
|
||||
*/
|
||||
private createDailyBackupIfNeeded(): void {
|
||||
try {
|
||||
if (!fs.existsSync(this.dbPath)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const today = new Date().toISOString().split('T')[0]; // YYYY-MM-DD
|
||||
const dailyBackupPath = `${this.dbPath}.daily.${today}`;
|
||||
|
||||
// Check if today's backup already exists
|
||||
if (fs.existsSync(dailyBackupPath)) {
|
||||
logger.debug(`Daily backup already exists for ${today}`, LOG_CONTEXT);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create today's backup
|
||||
fs.copyFileSync(this.dbPath, dailyBackupPath);
|
||||
logger.info(`Created daily backup: ${dailyBackupPath}`, LOG_CONTEXT);
|
||||
|
||||
// Rotate old backups (keep last 7 days)
|
||||
this.rotateOldBackups(7);
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to create daily backup: ${error}`, LOG_CONTEXT);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove daily backups older than the specified number of days.
|
||||
*/
|
||||
private rotateOldBackups(keepDays: number): void {
|
||||
try {
|
||||
const dir = path.dirname(this.dbPath);
|
||||
const baseName = path.basename(this.dbPath);
|
||||
const files = fs.readdirSync(dir);
|
||||
|
||||
const cutoffDate = new Date();
|
||||
cutoffDate.setDate(cutoffDate.getDate() - keepDays);
|
||||
const cutoffStr = cutoffDate.toISOString().split('T')[0];
|
||||
|
||||
let removedCount = 0;
|
||||
for (const file of files) {
|
||||
// Match daily backup pattern: stats.db.daily.YYYY-MM-DD
|
||||
const dailyMatch = file.match(new RegExp(`^${baseName}\\.daily\\.(\\d{4}-\\d{2}-\\d{2})$`));
|
||||
if (dailyMatch) {
|
||||
const backupDate = dailyMatch[1];
|
||||
if (backupDate < cutoffStr) {
|
||||
const fullPath = path.join(dir, file);
|
||||
fs.unlinkSync(fullPath);
|
||||
removedCount++;
|
||||
logger.debug(`Removed old daily backup: ${file}`, LOG_CONTEXT);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (removedCount > 0) {
|
||||
logger.info(`Rotated ${removedCount} old daily backup(s)`, LOG_CONTEXT);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to rotate old backups: ${error}`, LOG_CONTEXT);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available daily backups sorted by date (newest first).
|
||||
*/
|
||||
getAvailableBackups(): Array<{ path: string; date: string; size: number }> {
|
||||
try {
|
||||
const dir = path.dirname(this.dbPath);
|
||||
const baseName = path.basename(this.dbPath);
|
||||
const files = fs.readdirSync(dir);
|
||||
const backups: Array<{ path: string; date: string; size: number }> = [];
|
||||
|
||||
for (const file of files) {
|
||||
// Match daily backup pattern
|
||||
const dailyMatch = file.match(new RegExp(`^${baseName}\\.daily\\.(\\d{4}-\\d{2}-\\d{2})$`));
|
||||
if (dailyMatch) {
|
||||
const fullPath = path.join(dir, file);
|
||||
const stats = fs.statSync(fullPath);
|
||||
backups.push({
|
||||
path: fullPath,
|
||||
date: dailyMatch[1],
|
||||
size: stats.size,
|
||||
});
|
||||
}
|
||||
|
||||
// Also include timestamp-based backups (legacy format)
|
||||
const timestampMatch = file.match(new RegExp(`^${baseName}\\.backup\\.(\\d+)$`));
|
||||
if (timestampMatch) {
|
||||
const fullPath = path.join(dir, file);
|
||||
const stats = fs.statSync(fullPath);
|
||||
const timestamp = parseInt(timestampMatch[1], 10);
|
||||
const date = new Date(timestamp).toISOString().split('T')[0];
|
||||
backups.push({
|
||||
path: fullPath,
|
||||
date: date,
|
||||
size: stats.size,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by date descending (newest first)
|
||||
return backups.sort((a, b) => b.date.localeCompare(a.date));
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to list backups: ${error}`, LOG_CONTEXT);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Restore database from a backup file.
|
||||
* Returns true if restoration was successful.
|
||||
*/
|
||||
restoreFromBackup(backupPath: string): boolean {
|
||||
try {
|
||||
if (!fs.existsSync(backupPath)) {
|
||||
logger.error(`Backup file does not exist: ${backupPath}`, LOG_CONTEXT);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Close current database if open
|
||||
if (this.db) {
|
||||
try {
|
||||
this.db.close();
|
||||
} catch {
|
||||
// Ignore errors closing database
|
||||
}
|
||||
this.db = null;
|
||||
this.initialized = false;
|
||||
}
|
||||
|
||||
// Remove WAL and SHM files if they exist
|
||||
const walPath = `${this.dbPath}-wal`;
|
||||
const shmPath = `${this.dbPath}-shm`;
|
||||
if (fs.existsSync(walPath)) fs.unlinkSync(walPath);
|
||||
if (fs.existsSync(shmPath)) fs.unlinkSync(shmPath);
|
||||
|
||||
// Remove current database if it exists
|
||||
if (fs.existsSync(this.dbPath)) {
|
||||
fs.unlinkSync(this.dbPath);
|
||||
}
|
||||
|
||||
// Copy backup to main database path
|
||||
fs.copyFileSync(backupPath, this.dbPath);
|
||||
logger.info(`Restored database from backup: ${backupPath}`, LOG_CONTEXT);
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
logger.error(`Failed to restore from backup: ${error}`, LOG_CONTEXT);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a corrupted database by attempting to restore from the latest backup.
|
||||
* If no backup is available, creates a fresh database.
|
||||
*/
|
||||
private recoverFromCorruption(): CorruptionRecoveryResult {
|
||||
logger.warn('Attempting to recover from database corruption...', LOG_CONTEXT);
|
||||
|
||||
try {
|
||||
// Close current database if open
|
||||
if (this.db) {
|
||||
try {
|
||||
this.db.close();
|
||||
@@ -358,40 +523,59 @@ export class StatsDB {
|
||||
this.initialized = false;
|
||||
}
|
||||
|
||||
const backupResult = this.backupDatabase();
|
||||
if (!backupResult.success) {
|
||||
if (fs.existsSync(this.dbPath)) {
|
||||
const timestamp = Date.now();
|
||||
const emergencyBackupPath = `${this.dbPath}.corrupted.${timestamp}`;
|
||||
try {
|
||||
fs.renameSync(this.dbPath, emergencyBackupPath);
|
||||
logger.warn(`Emergency backup created at ${emergencyBackupPath}`, LOG_CONTEXT);
|
||||
} catch {
|
||||
logger.error('Failed to backup corrupted database, data will be lost', LOG_CONTEXT);
|
||||
fs.unlinkSync(this.dbPath);
|
||||
}
|
||||
// First, backup the corrupted database for forensics
|
||||
if (fs.existsSync(this.dbPath)) {
|
||||
const timestamp = Date.now();
|
||||
const corruptedBackupPath = `${this.dbPath}.corrupted.${timestamp}`;
|
||||
try {
|
||||
fs.renameSync(this.dbPath, corruptedBackupPath);
|
||||
logger.warn(`Corrupted database moved to: ${corruptedBackupPath}`, LOG_CONTEXT);
|
||||
} catch {
|
||||
logger.error('Failed to backup corrupted database', LOG_CONTEXT);
|
||||
fs.unlinkSync(this.dbPath);
|
||||
}
|
||||
}
|
||||
|
||||
// Delete WAL and SHM files
|
||||
const walPath = `${this.dbPath}-wal`;
|
||||
const shmPath = `${this.dbPath}-shm`;
|
||||
if (fs.existsSync(walPath)) {
|
||||
fs.unlinkSync(walPath);
|
||||
}
|
||||
if (fs.existsSync(shmPath)) {
|
||||
fs.unlinkSync(shmPath);
|
||||
if (fs.existsSync(walPath)) fs.unlinkSync(walPath);
|
||||
if (fs.existsSync(shmPath)) fs.unlinkSync(shmPath);
|
||||
|
||||
// Try to restore from the latest backup
|
||||
const backups = this.getAvailableBackups();
|
||||
for (const backup of backups) {
|
||||
logger.info(`Attempting to restore from backup: ${backup.path} (${backup.date})`, LOG_CONTEXT);
|
||||
|
||||
// Try to validate the backup before restoring
|
||||
try {
|
||||
const testDb = new Database(backup.path, { readonly: true });
|
||||
const result = testDb.pragma('integrity_check') as Array<{ integrity_check: string }>;
|
||||
testDb.close();
|
||||
|
||||
if (result.length === 1 && result[0].integrity_check === 'ok') {
|
||||
// Backup is valid, restore it
|
||||
if (this.restoreFromBackup(backup.path)) {
|
||||
logger.info(`Successfully restored database from backup: ${backup.date}`, LOG_CONTEXT);
|
||||
return {
|
||||
recovered: true,
|
||||
backupPath: backup.path,
|
||||
restoredFromBackup: true,
|
||||
};
|
||||
}
|
||||
} else {
|
||||
logger.warn(`Backup ${backup.date} failed integrity check, trying next...`, LOG_CONTEXT);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Backup ${backup.date} is unreadable: ${error}, trying next...`, LOG_CONTEXT);
|
||||
}
|
||||
}
|
||||
|
||||
if (fs.existsSync(this.dbPath)) {
|
||||
fs.unlinkSync(this.dbPath);
|
||||
}
|
||||
|
||||
logger.info('Corrupted database removed, will create fresh database', LOG_CONTEXT);
|
||||
|
||||
// No valid backup found, will create fresh database
|
||||
logger.warn('No valid backup found, will create fresh database', LOG_CONTEXT);
|
||||
return {
|
||||
recovered: true,
|
||||
backupPath: backupResult.backupPath,
|
||||
restoredFromBackup: false,
|
||||
};
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : String(error);
|
||||
|
||||
@@ -35,8 +35,10 @@ export interface BackupResult {
|
||||
export interface CorruptionRecoveryResult {
|
||||
/** Whether recovery was performed */
|
||||
recovered: boolean;
|
||||
/** Path to the backup of the corrupted database */
|
||||
/** Path to the backup used for restoration (if restored from backup) */
|
||||
backupPath?: string;
|
||||
/** Whether database was restored from a backup (vs creating fresh) */
|
||||
restoredFromBackup?: boolean;
|
||||
/** Error during recovery (if any) */
|
||||
error?: string;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user