Merge pull request #252 from pedramamini/code-refactor

refactor: decompose stats-db into focused modules
This commit is contained in:
Raza Mair
2026-01-30 20:37:15 +05:00
committed by GitHub
29 changed files with 9393 additions and 8278 deletions

View File

@@ -8,8 +8,8 @@
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { ipcMain, BrowserWindow } from 'electron';
import { registerStatsHandlers } from '../../../../main/ipc/handlers/stats';
import * as statsDbModule from '../../../../main/stats-db';
import type { StatsDB } from '../../../../main/stats-db';
import * as statsDbModule from '../../../../main/stats';
import type { StatsDB } from '../../../../main/stats';
// Mock electron's ipcMain and BrowserWindow
vi.mock('electron', () => ({
@@ -21,7 +21,7 @@ vi.mock('electron', () => ({
}));
// Mock the stats-db module
vi.mock('../../../../main/stats-db', () => ({
vi.mock('../../../../main/stats', () => ({
getStatsDB: vi.fn(),
}));

View File

@@ -8,7 +8,7 @@ import { setupStatsListener } from '../../../main/process-listeners/stats-listen
import type { ProcessManager } from '../../../main/process-manager';
import type { SafeSendFn } from '../../../main/utils/safe-send';
import type { QueryCompleteData } from '../../../main/process-manager/types';
import type { StatsDB } from '../../../main/stats-db';
import type { StatsDB } from '../../../main/stats';
import type { ProcessListenerDependencies } from '../../../main/process-listeners/types';
describe('Stats Listener', () => {

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,601 @@
/**
* Tests for VACUUM scheduling, clearOldData, and database maintenance.
*
* Note: better-sqlite3 is a native module compiled for Electron's Node version.
* Direct testing with the native module in vitest is not possible without
* electron-rebuild for the vitest runtime. These tests use mocked database
* operations to verify the logic without requiring the actual native module.
*
* For full integration testing of the SQLite database, use the Electron test
* environment (e2e tests) where the native module is properly loaded.
*/
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import * as path from 'path';
import * as os from 'os';
// Track Database constructor calls to verify file path
let lastDbPath: string | null = null;
// Store mock references so they can be accessed in tests
const mockStatement = {
run: vi.fn(() => ({ changes: 1 })),
get: vi.fn(() => ({ count: 0, total_duration: 0 })),
all: vi.fn(() => []),
};
const mockDb = {
pragma: vi.fn(() => [{ user_version: 0 }]),
prepare: vi.fn(() => mockStatement),
close: vi.fn(),
// Transaction mock that immediately executes the function
transaction: vi.fn((fn: () => void) => {
return () => fn();
}),
};
// Mock better-sqlite3 as a class
vi.mock('better-sqlite3', () => {
return {
default: class MockDatabase {
constructor(dbPath: string) {
lastDbPath = dbPath;
}
pragma = mockDb.pragma;
prepare = mockDb.prepare;
close = mockDb.close;
transaction = mockDb.transaction;
},
};
});
// Mock electron's app module with trackable userData path
const mockUserDataPath = path.join(os.tmpdir(), 'maestro-test-stats-db');
vi.mock('electron', () => ({
app: {
getPath: vi.fn((name: string) => {
if (name === 'userData') return mockUserDataPath;
return os.tmpdir();
}),
},
}));
// Track fs calls
const mockFsExistsSync = vi.fn(() => true);
const mockFsMkdirSync = vi.fn();
const mockFsCopyFileSync = vi.fn();
const mockFsUnlinkSync = vi.fn();
const mockFsRenameSync = vi.fn();
const mockFsStatSync = vi.fn(() => ({ size: 1024 }));
const mockFsReadFileSync = vi.fn(() => '0'); // Default: old timestamp (triggers vacuum check)
const mockFsWriteFileSync = vi.fn();
// Mock fs
vi.mock('fs', () => ({
existsSync: (...args: unknown[]) => mockFsExistsSync(...args),
mkdirSync: (...args: unknown[]) => mockFsMkdirSync(...args),
copyFileSync: (...args: unknown[]) => mockFsCopyFileSync(...args),
unlinkSync: (...args: unknown[]) => mockFsUnlinkSync(...args),
renameSync: (...args: unknown[]) => mockFsRenameSync(...args),
statSync: (...args: unknown[]) => mockFsStatSync(...args),
readFileSync: (...args: unknown[]) => mockFsReadFileSync(...args),
writeFileSync: (...args: unknown[]) => mockFsWriteFileSync(...args),
}));
// Mock logger
vi.mock('../../../main/utils/logger', () => ({
logger: {
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
},
}));
// Import types only - we'll test the type definitions
import type {
QueryEvent,
AutoRunSession,
AutoRunTask,
SessionLifecycleEvent,
StatsTimeRange,
StatsFilters,
StatsAggregation,
} from '../../../shared/stats-types';
describe('Database VACUUM functionality', () => {
beforeEach(() => {
vi.clearAllMocks();
lastDbPath = null;
mockDb.pragma.mockReturnValue([{ user_version: 0 }]);
mockDb.prepare.mockReturnValue(mockStatement);
mockStatement.run.mockReturnValue({ changes: 1 });
mockFsExistsSync.mockReturnValue(true);
// Reset statSync to throw by default (simulates file not existing)
mockFsStatSync.mockImplementation(() => {
throw new Error('ENOENT: no such file or directory');
});
});
afterEach(() => {
vi.resetModules();
});
describe('getDatabaseSize', () => {
it('should return 0 when statSync throws (file missing)', async () => {
// The mock fs.statSync is not configured to return size by default
// so getDatabaseSize will catch the error and return 0
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Since mockFsExistsSync.mockReturnValue(true) is set but statSync is not mocked,
// getDatabaseSize will try to call the real statSync on a non-existent path
// and catch the error, returning 0
const size = db.getDatabaseSize();
// The mock environment doesn't have actual file, so expect 0
expect(size).toBe(0);
});
it('should handle statSync gracefully when file does not exist', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// getDatabaseSize should not throw
expect(() => db.getDatabaseSize()).not.toThrow();
});
});
describe('vacuum', () => {
it('should execute VACUUM SQL command', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Clear mocks from initialization
mockStatement.run.mockClear();
mockDb.prepare.mockClear();
const result = db.vacuum();
expect(result.success).toBe(true);
expect(mockDb.prepare).toHaveBeenCalledWith('VACUUM');
expect(mockStatement.run).toHaveBeenCalled();
});
it('should return success true when vacuum completes', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const result = db.vacuum();
expect(result.success).toBe(true);
expect(result.error).toBeUndefined();
});
it('should return bytesFreed of 0 when sizes are equal (mocked)', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const result = db.vacuum();
// With mock fs, both before and after sizes will be 0
expect(result.bytesFreed).toBe(0);
});
it('should return error if database not initialized', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
// Don't initialize
const result = db.vacuum();
expect(result.success).toBe(false);
expect(result.bytesFreed).toBe(0);
expect(result.error).toBe('Database not initialized');
});
it('should handle VACUUM failure gracefully', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Make VACUUM fail
mockDb.prepare.mockImplementation((sql: string) => {
if (sql === 'VACUUM') {
return {
run: vi.fn().mockImplementation(() => {
throw new Error('database is locked');
}),
};
}
return mockStatement;
});
const result = db.vacuum();
expect(result.success).toBe(false);
expect(result.error).toContain('database is locked');
});
it('should log vacuum progress with size information', async () => {
const { logger } = await import('../../../main/utils/logger');
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Clear logger mocks from initialization
vi.mocked(logger.info).mockClear();
db.vacuum();
// Check that logger was called with vacuum-related messages
expect(logger.info).toHaveBeenCalledWith(
expect.stringContaining('Starting VACUUM'),
expect.any(String)
);
expect(logger.info).toHaveBeenCalledWith(
expect.stringContaining('VACUUM completed'),
expect.any(String)
);
});
});
describe('vacuumIfNeeded', () => {
it('should skip vacuum if database size is 0 (below threshold)', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Clear mocks from initialization
mockStatement.run.mockClear();
mockDb.prepare.mockClear();
const result = db.vacuumIfNeeded();
// Size is 0 (mock fs), which is below 100MB threshold
expect(result.vacuumed).toBe(false);
expect(result.databaseSize).toBe(0);
expect(result.result).toBeUndefined();
});
it('should return correct databaseSize in result', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const result = db.vacuumIfNeeded();
// Size property should be present
expect(typeof result.databaseSize).toBe('number');
});
it('should use default 100MB threshold when not specified', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// With 0 byte size (mocked), should skip vacuum
const result = db.vacuumIfNeeded();
expect(result.vacuumed).toBe(false);
});
it('should not vacuum with threshold 0 and size 0 since 0 is not > 0', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Clear mocks from initialization
mockStatement.run.mockClear();
mockDb.prepare.mockClear();
// With 0 threshold and 0 byte file: 0 is NOT greater than 0
const result = db.vacuumIfNeeded(0);
// The condition is: databaseSize < thresholdBytes
// 0 < 0 is false, so vacuumed should be true (it tries to vacuum)
expect(result.databaseSize).toBe(0);
// Since 0 is NOT less than 0, it proceeds to vacuum
expect(result.vacuumed).toBe(true);
});
it('should log appropriate message when skipping vacuum', async () => {
const { logger } = await import('../../../main/utils/logger');
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Clear logger mocks from initialization
vi.mocked(logger.debug).mockClear();
db.vacuumIfNeeded();
expect(logger.debug).toHaveBeenCalledWith(
expect.stringContaining('below vacuum threshold'),
expect.any(String)
);
});
});
describe('vacuumIfNeeded with custom thresholds', () => {
it('should respect custom threshold parameter (threshold = -1 means always vacuum)', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Clear mocks from initialization
mockStatement.run.mockClear();
mockDb.prepare.mockClear();
// With -1 threshold, 0 > -1 is true, so should vacuum
const result = db.vacuumIfNeeded(-1);
expect(result.vacuumed).toBe(true);
expect(mockDb.prepare).toHaveBeenCalledWith('VACUUM');
});
it('should not vacuum with very large threshold', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Clear mocks from initialization
mockStatement.run.mockClear();
mockDb.prepare.mockClear();
// With 1TB threshold, should NOT trigger vacuum
const result = db.vacuumIfNeeded(1024 * 1024 * 1024 * 1024);
expect(result.vacuumed).toBe(false);
expect(mockDb.prepare).not.toHaveBeenCalledWith('VACUUM');
});
});
describe('initialize with vacuumIfNeeded integration', () => {
it('should call vacuumIfNeededWeekly during initialization', async () => {
const { logger } = await import('../../../main/utils/logger');
// Clear logger mocks before test
vi.mocked(logger.debug).mockClear();
// Mock timestamp file as old (0 = epoch, triggers vacuum check)
mockFsReadFileSync.mockReturnValue('0');
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// With old timestamp, vacuumIfNeededWeekly should proceed to call vacuumIfNeeded
// which logs "below vacuum threshold" for small databases (mocked as 1024 bytes)
expect(logger.debug).toHaveBeenCalledWith(
expect.stringContaining('below vacuum threshold'),
expect.any(String)
);
});
it('should complete initialization even if vacuum would fail', async () => {
// Make VACUUM fail if called
mockDb.prepare.mockImplementation((sql: string) => {
if (sql === 'VACUUM') {
return {
run: vi.fn().mockImplementation(() => {
throw new Error('VACUUM failed: database is locked');
}),
};
}
return mockStatement;
});
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
// Initialize should not throw (vacuum is skipped due to 0 size anyway)
expect(() => db.initialize()).not.toThrow();
// Database should still be ready
expect(db.isReady()).toBe(true);
});
it('should not block initialization for small databases', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
// Time the initialization (should be fast for mock)
const start = Date.now();
db.initialize();
const elapsed = Date.now() - start;
expect(db.isReady()).toBe(true);
expect(elapsed).toBeLessThan(1000); // Should be fast in mock environment
});
});
describe('vacuum return types', () => {
it('vacuum should return object with success, bytesFreed, and optional error', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const result = db.vacuum();
expect(typeof result.success).toBe('boolean');
expect(typeof result.bytesFreed).toBe('number');
expect(result.error === undefined || typeof result.error === 'string').toBe(true);
});
it('vacuumIfNeeded should return object with vacuumed, databaseSize, and optional result', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const result = db.vacuumIfNeeded();
expect(typeof result.vacuumed).toBe('boolean');
expect(typeof result.databaseSize).toBe('number');
expect(result.result === undefined || typeof result.result === 'object').toBe(true);
});
it('vacuumIfNeeded should include result when vacuum is performed', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Use -1 threshold to force vacuum
const result = db.vacuumIfNeeded(-1);
expect(result.vacuumed).toBe(true);
expect(result.result).toBeDefined();
expect(result.result?.success).toBe(true);
});
});
describe('clearOldData method', () => {
beforeEach(() => {
vi.clearAllMocks();
vi.resetModules();
});
it('should return error when database is not initialized', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
// Don't initialize
const result = db.clearOldData(30);
expect(result.success).toBe(false);
expect(result.deletedQueryEvents).toBe(0);
expect(result.deletedAutoRunSessions).toBe(0);
expect(result.deletedAutoRunTasks).toBe(0);
expect(result.error).toBe('Database not initialized');
});
it('should return error when olderThanDays is 0 or negative', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const resultZero = db.clearOldData(0);
expect(resultZero.success).toBe(false);
expect(resultZero.error).toBe('olderThanDays must be greater than 0');
const resultNegative = db.clearOldData(-10);
expect(resultNegative.success).toBe(false);
expect(resultNegative.error).toBe('olderThanDays must be greater than 0');
});
it('should successfully clear old data with valid parameters', async () => {
// Mock prepare to return statements with expected behavior
mockStatement.all.mockReturnValue([{ id: 'session-1' }, { id: 'session-2' }]);
mockStatement.run.mockReturnValue({ changes: 5 });
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const result = db.clearOldData(30);
expect(result.success).toBe(true);
expect(result.deletedQueryEvents).toBe(5);
expect(result.deletedAutoRunSessions).toBe(5);
expect(result.deletedAutoRunTasks).toBe(5);
expect(result.error).toBeUndefined();
});
it('should handle empty results (no old data)', async () => {
mockStatement.all.mockReturnValue([]);
mockStatement.run.mockReturnValue({ changes: 0 });
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const result = db.clearOldData(365);
expect(result.success).toBe(true);
expect(result.deletedQueryEvents).toBe(0);
expect(result.deletedAutoRunSessions).toBe(0);
expect(result.deletedAutoRunTasks).toBe(0);
expect(result.error).toBeUndefined();
});
it('should calculate correct cutoff time based on days', async () => {
let capturedCutoffTime: number | null = null;
mockDb.prepare.mockImplementation((sql: string) => {
return {
run: vi.fn((cutoff: number) => {
if (sql.includes('DELETE FROM query_events')) {
capturedCutoffTime = cutoff;
}
return { changes: 0 };
}),
get: mockStatement.get,
all: vi.fn(() => []),
};
});
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const beforeCall = Date.now();
db.clearOldData(7);
const afterCall = Date.now();
// Cutoff should be approximately 7 days ago
const expectedCutoff = beforeCall - 7 * 24 * 60 * 60 * 1000;
expect(capturedCutoffTime).not.toBeNull();
expect(capturedCutoffTime!).toBeGreaterThanOrEqual(expectedCutoff - 1000);
expect(capturedCutoffTime!).toBeLessThanOrEqual(afterCall - 7 * 24 * 60 * 60 * 1000 + 1000);
});
it('should handle database errors gracefully', async () => {
mockDb.prepare.mockImplementation((sql: string) => {
if (sql.includes('DELETE FROM query_events')) {
return {
run: vi.fn(() => {
throw new Error('Database locked');
}),
};
}
return mockStatement;
});
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const result = db.clearOldData(30);
expect(result.success).toBe(false);
expect(result.error).toBe('Database locked');
expect(result.deletedQueryEvents).toBe(0);
expect(result.deletedAutoRunSessions).toBe(0);
expect(result.deletedAutoRunTasks).toBe(0);
});
it('should support various time periods', async () => {
mockStatement.all.mockReturnValue([]);
mockStatement.run.mockReturnValue({ changes: 0 });
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Test common time periods from Settings UI
const periods = [7, 30, 90, 180, 365];
for (const days of periods) {
const result = db.clearOldData(days);
expect(result.success).toBe(true);
}
});
});
// =====================================================================
});

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,732 @@
/**
* Tests for query event CRUD operations, filtering, and CSV export.
*
* Note: better-sqlite3 is a native module compiled for Electron's Node version.
* Direct testing with the native module in vitest is not possible without
* electron-rebuild for the vitest runtime. These tests use mocked database
* operations to verify the logic without requiring the actual native module.
*
* For full integration testing of the SQLite database, use the Electron test
* environment (e2e tests) where the native module is properly loaded.
*/
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import * as path from 'path';
import * as os from 'os';
// Track Database constructor calls to verify file path
let lastDbPath: string | null = null;
// Store mock references so they can be accessed in tests
const mockStatement = {
run: vi.fn(() => ({ changes: 1 })),
get: vi.fn(() => ({ count: 0, total_duration: 0 })),
all: vi.fn(() => []),
};
const mockDb = {
pragma: vi.fn(() => [{ user_version: 0 }]),
prepare: vi.fn(() => mockStatement),
close: vi.fn(),
// Transaction mock that immediately executes the function
transaction: vi.fn((fn: () => void) => {
return () => fn();
}),
};
// Mock better-sqlite3 as a class
vi.mock('better-sqlite3', () => {
return {
default: class MockDatabase {
constructor(dbPath: string) {
lastDbPath = dbPath;
}
pragma = mockDb.pragma;
prepare = mockDb.prepare;
close = mockDb.close;
transaction = mockDb.transaction;
},
};
});
// Mock electron's app module with trackable userData path
const mockUserDataPath = path.join(os.tmpdir(), 'maestro-test-stats-db');
vi.mock('electron', () => ({
app: {
getPath: vi.fn((name: string) => {
if (name === 'userData') return mockUserDataPath;
return os.tmpdir();
}),
},
}));
// Track fs calls
const mockFsExistsSync = vi.fn(() => true);
const mockFsMkdirSync = vi.fn();
const mockFsCopyFileSync = vi.fn();
const mockFsUnlinkSync = vi.fn();
const mockFsRenameSync = vi.fn();
const mockFsStatSync = vi.fn(() => ({ size: 1024 }));
const mockFsReadFileSync = vi.fn(() => '0'); // Default: old timestamp (triggers vacuum check)
const mockFsWriteFileSync = vi.fn();
// Mock fs
vi.mock('fs', () => ({
existsSync: (...args: unknown[]) => mockFsExistsSync(...args),
mkdirSync: (...args: unknown[]) => mockFsMkdirSync(...args),
copyFileSync: (...args: unknown[]) => mockFsCopyFileSync(...args),
unlinkSync: (...args: unknown[]) => mockFsUnlinkSync(...args),
renameSync: (...args: unknown[]) => mockFsRenameSync(...args),
statSync: (...args: unknown[]) => mockFsStatSync(...args),
readFileSync: (...args: unknown[]) => mockFsReadFileSync(...args),
writeFileSync: (...args: unknown[]) => mockFsWriteFileSync(...args),
}));
// Mock logger
vi.mock('../../../main/utils/logger', () => ({
logger: {
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
},
}));
// Import types only - we'll test the type definitions
import type {
QueryEvent,
AutoRunSession,
AutoRunTask,
SessionLifecycleEvent,
StatsTimeRange,
StatsFilters,
StatsAggregation,
} from '../../../shared/stats-types';
describe('Stats aggregation and filtering', () => {
beforeEach(() => {
vi.clearAllMocks();
mockDb.pragma.mockReturnValue([{ user_version: 0 }]);
mockDb.prepare.mockReturnValue(mockStatement);
mockStatement.run.mockReturnValue({ changes: 1 });
mockFsExistsSync.mockReturnValue(true);
});
afterEach(() => {
vi.resetModules();
});
describe('time range filtering', () => {
it('should filter query events by day range', async () => {
mockStatement.all.mockReturnValue([]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
db.getQueryEvents('day');
// Verify the SQL includes time filter
const prepareCall = mockDb.prepare.mock.calls.find((call) =>
(call[0] as string).includes('SELECT * FROM query_events')
);
expect(prepareCall).toBeDefined();
});
it('should filter with agentType filter', async () => {
mockStatement.all.mockReturnValue([]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
db.getQueryEvents('week', { agentType: 'claude-code' });
// Verify the SQL includes agent_type filter
expect(mockStatement.all).toHaveBeenCalled();
});
it('should filter with source filter', async () => {
mockStatement.all.mockReturnValue([]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
db.getQueryEvents('month', { source: 'auto' });
// Verify the SQL includes source filter
expect(mockStatement.all).toHaveBeenCalled();
});
it('should filter with projectPath filter', async () => {
mockStatement.all.mockReturnValue([]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
db.getQueryEvents('year', { projectPath: '/test/project' });
// Verify the SQL includes project_path filter
expect(mockStatement.all).toHaveBeenCalled();
});
it('should filter with sessionId filter', async () => {
mockStatement.all.mockReturnValue([]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
db.getQueryEvents('all', { sessionId: 'session-123' });
// Verify the SQL includes session_id filter
expect(mockStatement.all).toHaveBeenCalled();
});
it('should combine multiple filters', async () => {
mockStatement.all.mockReturnValue([]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
db.getQueryEvents('week', {
agentType: 'claude-code',
source: 'user',
projectPath: '/test',
sessionId: 'session-1',
});
// Verify all parameters were passed
expect(mockStatement.all).toHaveBeenCalled();
});
});
describe('aggregation queries', () => {
it('should compute aggregated stats correctly', async () => {
mockStatement.get.mockReturnValue({ count: 100, total_duration: 500000 });
mockStatement.all.mockReturnValue([
{ agent_type: 'claude-code', count: 70, duration: 350000 },
{ agent_type: 'opencode', count: 30, duration: 150000 },
]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const stats = db.getAggregatedStats('week');
expect(stats.totalQueries).toBe(100);
expect(stats.totalDuration).toBe(500000);
expect(stats.avgDuration).toBe(5000);
});
it('should handle empty results for aggregation', async () => {
mockStatement.get.mockReturnValue({ count: 0, total_duration: 0 });
mockStatement.all.mockReturnValue([]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const stats = db.getAggregatedStats('day');
expect(stats.totalQueries).toBe(0);
expect(stats.avgDuration).toBe(0);
expect(stats.byAgent).toEqual({});
});
});
describe('CSV export', () => {
it('should export query events to CSV format', async () => {
const now = Date.now();
mockStatement.all.mockReturnValue([
{
id: 'event-1',
session_id: 'session-1',
agent_type: 'claude-code',
source: 'user',
start_time: now,
duration: 5000,
project_path: '/test',
tab_id: 'tab-1',
},
]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const csv = db.exportToCsv('week');
// Verify CSV structure
expect(csv).toContain('id,sessionId,agentType,source,startTime,duration,projectPath,tabId');
expect(csv).toContain('event-1');
expect(csv).toContain('session-1');
expect(csv).toContain('claude-code');
});
it('should handle empty data for CSV export', async () => {
mockStatement.all.mockReturnValue([]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const csv = db.exportToCsv('day');
// Should only contain headers
expect(csv).toBe(
'id,sessionId,agentType,source,startTime,duration,projectPath,tabId,isRemote'
);
});
});
});
/**
* Interactive session query event recording tests
*
* These tests verify that query events are properly recorded for interactive
* (user-initiated) sessions, which is the core validation for:
* - [ ] Verify query events are recorded for interactive sessions
*/
describe('Query events recorded for interactive sessions', () => {
beforeEach(() => {
vi.clearAllMocks();
mockDb.pragma.mockReturnValue([{ user_version: 1 }]);
mockDb.prepare.mockReturnValue(mockStatement);
mockStatement.run.mockReturnValue({ changes: 1 });
mockStatement.all.mockReturnValue([]);
mockFsExistsSync.mockReturnValue(true);
});
afterEach(() => {
vi.resetModules();
});
describe('user-initiated interactive session recording', () => {
it('should record query event with source="user" for interactive session', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const startTime = Date.now();
const eventId = db.insertQueryEvent({
sessionId: 'interactive-session-1',
agentType: 'claude-code',
source: 'user', // Interactive session is always 'user'
startTime,
duration: 5000,
projectPath: '/Users/test/myproject',
tabId: 'tab-1',
});
expect(eventId).toBeDefined();
expect(typeof eventId).toBe('string');
// Verify the INSERT was called with correct parameters
const runCalls = mockStatement.run.mock.calls;
const lastCall = runCalls[runCalls.length - 1];
// Parameters: id, session_id, agent_type, source, start_time, duration, project_path, tab_id
expect(lastCall[1]).toBe('interactive-session-1'); // session_id
expect(lastCall[2]).toBe('claude-code'); // agent_type
expect(lastCall[3]).toBe('user'); // source
expect(lastCall[4]).toBe(startTime); // start_time
expect(lastCall[5]).toBe(5000); // duration
expect(lastCall[6]).toBe('/Users/test/myproject'); // project_path
expect(lastCall[7]).toBe('tab-1'); // tab_id
});
it('should record interactive query without optional fields', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const startTime = Date.now();
const eventId = db.insertQueryEvent({
sessionId: 'minimal-session',
agentType: 'claude-code',
source: 'user',
startTime,
duration: 3000,
// projectPath and tabId are optional
});
expect(eventId).toBeDefined();
// Verify NULL values for optional fields
const runCalls = mockStatement.run.mock.calls;
const lastCall = runCalls[runCalls.length - 1];
expect(lastCall[6]).toBeNull(); // project_path
expect(lastCall[7]).toBeNull(); // tab_id
});
it('should record multiple interactive queries for the same session', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Clear mocks after initialize() to count only test operations
mockStatement.run.mockClear();
const baseTime = Date.now();
// First query
const id1 = db.insertQueryEvent({
sessionId: 'multi-query-session',
agentType: 'claude-code',
source: 'user',
startTime: baseTime,
duration: 5000,
projectPath: '/project',
tabId: 'tab-1',
});
// Second query (same session, different tab)
const id2 = db.insertQueryEvent({
sessionId: 'multi-query-session',
agentType: 'claude-code',
source: 'user',
startTime: baseTime + 10000,
duration: 3000,
projectPath: '/project',
tabId: 'tab-2',
});
// Third query (same session, same tab as first)
const id3 = db.insertQueryEvent({
sessionId: 'multi-query-session',
agentType: 'claude-code',
source: 'user',
startTime: baseTime + 20000,
duration: 7000,
projectPath: '/project',
tabId: 'tab-1',
});
// All should have unique IDs
expect(id1).not.toBe(id2);
expect(id2).not.toBe(id3);
expect(id1).not.toBe(id3);
// All should be recorded (3 INSERT calls after initialization)
expect(mockStatement.run).toHaveBeenCalledTimes(3);
});
it('should record interactive queries with different agent types', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Clear mocks after initialize() to count only test operations
mockStatement.run.mockClear();
const startTime = Date.now();
// Claude Code query
const claudeId = db.insertQueryEvent({
sessionId: 'session-1',
agentType: 'claude-code',
source: 'user',
startTime,
duration: 5000,
});
// OpenCode query
const opencodeId = db.insertQueryEvent({
sessionId: 'session-2',
agentType: 'opencode',
source: 'user',
startTime: startTime + 10000,
duration: 3000,
});
// Codex query
const codexId = db.insertQueryEvent({
sessionId: 'session-3',
agentType: 'codex',
source: 'user',
startTime: startTime + 20000,
duration: 4000,
});
expect(claudeId).toBeDefined();
expect(opencodeId).toBeDefined();
expect(codexId).toBeDefined();
// Verify different agent types were recorded
const runCalls = mockStatement.run.mock.calls;
expect(runCalls[0][2]).toBe('claude-code');
expect(runCalls[1][2]).toBe('opencode');
expect(runCalls[2][2]).toBe('codex');
});
});
describe('retrieval of interactive session query events', () => {
it('should retrieve interactive query events filtered by source=user', async () => {
const now = Date.now();
mockStatement.all.mockReturnValue([
{
id: 'event-1',
session_id: 'session-1',
agent_type: 'claude-code',
source: 'user',
start_time: now - 1000,
duration: 5000,
project_path: '/project',
tab_id: 'tab-1',
},
{
id: 'event-2',
session_id: 'session-2',
agent_type: 'claude-code',
source: 'user',
start_time: now - 2000,
duration: 3000,
project_path: '/project',
tab_id: 'tab-2',
},
]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Filter by source='user' to get only interactive sessions
const events = db.getQueryEvents('day', { source: 'user' });
expect(events).toHaveLength(2);
expect(events[0].source).toBe('user');
expect(events[1].source).toBe('user');
expect(events[0].sessionId).toBe('session-1');
expect(events[1].sessionId).toBe('session-2');
});
it('should retrieve interactive query events filtered by sessionId', async () => {
const now = Date.now();
mockStatement.all.mockReturnValue([
{
id: 'event-1',
session_id: 'target-session',
agent_type: 'claude-code',
source: 'user',
start_time: now - 1000,
duration: 5000,
project_path: '/project',
tab_id: 'tab-1',
},
]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const events = db.getQueryEvents('week', { sessionId: 'target-session' });
expect(events).toHaveLength(1);
expect(events[0].sessionId).toBe('target-session');
});
it('should retrieve interactive query events filtered by projectPath', async () => {
const now = Date.now();
mockStatement.all.mockReturnValue([
{
id: 'event-1',
session_id: 'session-1',
agent_type: 'claude-code',
source: 'user',
start_time: now - 1000,
duration: 5000,
project_path: '/specific/project',
tab_id: 'tab-1',
},
]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const events = db.getQueryEvents('month', { projectPath: '/specific/project' });
expect(events).toHaveLength(1);
expect(events[0].projectPath).toBe('/specific/project');
});
it('should correctly map database columns to QueryEvent interface fields', async () => {
const now = Date.now();
mockStatement.all.mockReturnValue([
{
id: 'db-event-id',
session_id: 'db-session-id',
agent_type: 'claude-code',
source: 'user',
start_time: now,
duration: 5000,
project_path: '/project/path',
tab_id: 'tab-123',
},
]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const events = db.getQueryEvents('day');
expect(events).toHaveLength(1);
const event = events[0];
// Verify snake_case -> camelCase mapping
expect(event.id).toBe('db-event-id');
expect(event.sessionId).toBe('db-session-id');
expect(event.agentType).toBe('claude-code');
expect(event.source).toBe('user');
expect(event.startTime).toBe(now);
expect(event.duration).toBe(5000);
expect(event.projectPath).toBe('/project/path');
expect(event.tabId).toBe('tab-123');
});
});
describe('aggregation includes interactive session data', () => {
it('should include interactive sessions in aggregated stats', async () => {
mockStatement.get.mockReturnValue({ count: 10, total_duration: 50000 });
// The aggregation calls mockStatement.all multiple times for different queries
// We return based on the call sequence: byAgent, bySource, byDay
let callCount = 0;
mockStatement.all.mockImplementation(() => {
callCount++;
if (callCount === 1) {
// byAgent breakdown
return [{ agent_type: 'claude-code', count: 10, duration: 50000 }];
}
if (callCount === 2) {
// bySource breakdown
return [{ source: 'user', count: 10 }];
}
// byDay breakdown
return [{ date: '2024-12-28', count: 10, duration: 50000 }];
});
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const stats = db.getAggregatedStats('week');
expect(stats.totalQueries).toBe(10);
expect(stats.totalDuration).toBe(50000);
expect(stats.avgDuration).toBe(5000);
expect(stats.bySource.user).toBe(10);
expect(stats.bySource.auto).toBe(0);
});
it('should correctly separate user vs auto queries in bySource', async () => {
mockStatement.get.mockReturnValue({ count: 15, total_duration: 75000 });
// Return by-source breakdown with both user and auto on second call
let callCount = 0;
mockStatement.all.mockImplementation(() => {
callCount++;
if (callCount === 2) {
// bySource breakdown
return [
{ source: 'user', count: 10 },
{ source: 'auto', count: 5 },
];
}
return [];
});
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const stats = db.getAggregatedStats('month');
expect(stats.bySource.user).toBe(10);
expect(stats.bySource.auto).toBe(5);
});
});
describe('timing accuracy for interactive sessions', () => {
it('should preserve exact startTime and duration values', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const exactStartTime = 1735344000000; // Specific timestamp
const exactDuration = 12345; // Specific duration in ms
db.insertQueryEvent({
sessionId: 'timing-test-session',
agentType: 'claude-code',
source: 'user',
startTime: exactStartTime,
duration: exactDuration,
});
const runCalls = mockStatement.run.mock.calls;
const lastCall = runCalls[runCalls.length - 1];
expect(lastCall[4]).toBe(exactStartTime); // Exact start_time preserved
expect(lastCall[5]).toBe(exactDuration); // Exact duration preserved
});
it('should handle zero duration (immediate responses)', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const eventId = db.insertQueryEvent({
sessionId: 'zero-duration-session',
agentType: 'claude-code',
source: 'user',
startTime: Date.now(),
duration: 0, // Zero duration is valid (e.g., cached response)
});
expect(eventId).toBeDefined();
const runCalls = mockStatement.run.mock.calls;
const lastCall = runCalls[runCalls.length - 1];
expect(lastCall[5]).toBe(0);
});
it('should handle very long durations', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const longDuration = 10 * 60 * 1000; // 10 minutes in ms
const eventId = db.insertQueryEvent({
sessionId: 'long-duration-session',
agentType: 'claude-code',
source: 'user',
startTime: Date.now(),
duration: longDuration,
});
expect(eventId).toBeDefined();
const runCalls = mockStatement.run.mock.calls;
const lastCall = runCalls[runCalls.length - 1];
expect(lastCall[5]).toBe(longDuration);
});
});
});
/**
* Comprehensive Auto Run session and task recording verification tests
*
* These tests verify the complete Auto Run tracking workflow:
* 1. Auto Run sessions are properly recorded when batch processing starts
* 2. Individual tasks within sessions are recorded with timing data
* 3. Sessions are updated correctly when batch processing completes
* 4. All data can be retrieved with proper field mapping
*/

View File

@@ -0,0 +1,682 @@
/**
* Tests for StatsDB core class, initialization, and singleton.
*
* Note: better-sqlite3 is a native module compiled for Electron's Node version.
* Direct testing with the native module in vitest is not possible without
* electron-rebuild for the vitest runtime. These tests use mocked database
* operations to verify the logic without requiring the actual native module.
*
* For full integration testing of the SQLite database, use the Electron test
* environment (e2e tests) where the native module is properly loaded.
*/
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import * as path from 'path';
import * as os from 'os';
// Track Database constructor calls to verify file path
let lastDbPath: string | null = null;
// Store mock references so they can be accessed in tests
const mockStatement = {
run: vi.fn(() => ({ changes: 1 })),
get: vi.fn(() => ({ count: 0, total_duration: 0 })),
all: vi.fn(() => []),
};
const mockDb = {
pragma: vi.fn(() => [{ user_version: 0 }]),
prepare: vi.fn(() => mockStatement),
close: vi.fn(),
// Transaction mock that immediately executes the function
transaction: vi.fn((fn: () => void) => {
return () => fn();
}),
};
// Mock better-sqlite3 as a class
vi.mock('better-sqlite3', () => {
return {
default: class MockDatabase {
constructor(dbPath: string) {
lastDbPath = dbPath;
}
pragma = mockDb.pragma;
prepare = mockDb.prepare;
close = mockDb.close;
transaction = mockDb.transaction;
},
};
});
// Mock electron's app module with trackable userData path
const mockUserDataPath = path.join(os.tmpdir(), 'maestro-test-stats-db');
vi.mock('electron', () => ({
app: {
getPath: vi.fn((name: string) => {
if (name === 'userData') return mockUserDataPath;
return os.tmpdir();
}),
},
}));
// Track fs calls
const mockFsExistsSync = vi.fn(() => true);
const mockFsMkdirSync = vi.fn();
const mockFsCopyFileSync = vi.fn();
const mockFsUnlinkSync = vi.fn();
const mockFsRenameSync = vi.fn();
const mockFsStatSync = vi.fn(() => ({ size: 1024 }));
const mockFsReadFileSync = vi.fn(() => '0'); // Default: old timestamp (triggers vacuum check)
const mockFsWriteFileSync = vi.fn();
// Mock fs
vi.mock('fs', () => ({
existsSync: (...args: unknown[]) => mockFsExistsSync(...args),
mkdirSync: (...args: unknown[]) => mockFsMkdirSync(...args),
copyFileSync: (...args: unknown[]) => mockFsCopyFileSync(...args),
unlinkSync: (...args: unknown[]) => mockFsUnlinkSync(...args),
renameSync: (...args: unknown[]) => mockFsRenameSync(...args),
statSync: (...args: unknown[]) => mockFsStatSync(...args),
readFileSync: (...args: unknown[]) => mockFsReadFileSync(...args),
writeFileSync: (...args: unknown[]) => mockFsWriteFileSync(...args),
}));
// Mock logger
vi.mock('../../../main/utils/logger', () => ({
logger: {
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
},
}));
// Import types only - we'll test the type definitions
import type {
QueryEvent,
AutoRunSession,
AutoRunTask,
SessionLifecycleEvent,
StatsTimeRange,
StatsFilters,
StatsAggregation,
} from '../../../shared/stats-types';
describe('StatsDB class (mocked)', () => {
beforeEach(() => {
vi.clearAllMocks();
lastDbPath = null;
mockDb.pragma.mockReturnValue([{ user_version: 0 }]);
mockDb.prepare.mockReturnValue(mockStatement);
mockStatement.run.mockReturnValue({ changes: 1 });
mockStatement.get.mockReturnValue({ count: 0, total_duration: 0 });
mockStatement.all.mockReturnValue([]);
mockFsExistsSync.mockReturnValue(true);
mockFsMkdirSync.mockClear();
});
afterEach(() => {
vi.resetModules();
});
describe('module exports', () => {
it('should export StatsDB class', async () => {
const { StatsDB } = await import('../../../main/stats');
expect(StatsDB).toBeDefined();
expect(typeof StatsDB).toBe('function');
});
it('should export singleton functions', async () => {
const { getStatsDB, initializeStatsDB, closeStatsDB } = await import('../../../main/stats');
expect(getStatsDB).toBeDefined();
expect(initializeStatsDB).toBeDefined();
expect(closeStatsDB).toBeDefined();
});
});
describe('StatsDB instantiation', () => {
it('should create instance without initialization', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
expect(db).toBeDefined();
expect(db.isReady()).toBe(false);
});
it('should return database path', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
expect(db.getDbPath()).toContain('stats.db');
});
});
describe('initialization', () => {
it('should initialize database and set isReady to true', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
expect(db.isReady()).toBe(true);
});
it('should enable WAL mode', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
expect(mockDb.pragma).toHaveBeenCalledWith('journal_mode = WAL');
});
it('should run v1 migration for fresh database', async () => {
mockDb.pragma.mockImplementation((sql: string) => {
if (sql === 'user_version') return [{ user_version: 0 }];
return undefined;
});
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Should set user_version to 1
expect(mockDb.pragma).toHaveBeenCalledWith('user_version = 1');
});
it('should skip migration for already migrated database', async () => {
mockDb.pragma.mockImplementation((sql: string) => {
if (sql === 'user_version') return [{ user_version: 1 }];
return undefined;
});
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Should NOT set user_version (no migration needed)
expect(mockDb.pragma).not.toHaveBeenCalledWith('user_version = 1');
});
it('should create _migrations table on initialization', async () => {
mockDb.pragma.mockImplementation((sql: string) => {
if (sql === 'user_version') return [{ user_version: 0 }];
return undefined;
});
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Should have prepared the CREATE TABLE IF NOT EXISTS _migrations statement
expect(mockDb.prepare).toHaveBeenCalledWith(
expect.stringContaining('CREATE TABLE IF NOT EXISTS _migrations')
);
});
it('should record successful migration in _migrations table', async () => {
mockDb.pragma.mockImplementation((sql: string) => {
if (sql === 'user_version') return [{ user_version: 0 }];
return undefined;
});
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Should have inserted a success record into _migrations
expect(mockDb.prepare).toHaveBeenCalledWith(
expect.stringContaining('INSERT OR REPLACE INTO _migrations')
);
});
it('should use transaction for migration atomicity', async () => {
mockDb.pragma.mockImplementation((sql: string) => {
if (sql === 'user_version') return [{ user_version: 0 }];
return undefined;
});
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Should have used transaction
expect(mockDb.transaction).toHaveBeenCalled();
});
});
describe('migration system API', () => {
beforeEach(() => {
vi.clearAllMocks();
mockDb.pragma.mockImplementation((sql: string) => {
if (sql === 'user_version') return [{ user_version: 1 }];
return undefined;
});
mockDb.prepare.mockReturnValue(mockStatement);
mockStatement.run.mockReturnValue({ changes: 1 });
mockStatement.get.mockReturnValue(null);
mockStatement.all.mockReturnValue([]);
mockFsExistsSync.mockReturnValue(true);
});
afterEach(() => {
vi.resetModules();
});
it('should return current version via getCurrentVersion()', async () => {
mockDb.pragma.mockImplementation((sql: string) => {
if (sql === 'user_version') return [{ user_version: 1 }];
return undefined;
});
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
expect(db.getCurrentVersion()).toBe(1);
});
it('should return target version via getTargetVersion()', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Currently we have version 3 migration (v1: initial schema, v2: is_remote column, v3: session_lifecycle table)
expect(db.getTargetVersion()).toBe(3);
});
it('should return false from hasPendingMigrations() when up to date', async () => {
mockDb.pragma.mockImplementation((sql: string) => {
if (sql === 'user_version') return [{ user_version: 3 }];
return undefined;
});
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
expect(db.hasPendingMigrations()).toBe(false);
});
it('should correctly identify pending migrations based on version difference', async () => {
// This test verifies the hasPendingMigrations() logic
// by checking current version < target version
// Simulate a database that's already at version 3 (target version)
let currentVersion = 3;
mockDb.pragma.mockImplementation((sql: string) => {
if (sql === 'user_version') return [{ user_version: currentVersion }];
// Handle version updates from migration
if (sql.startsWith('user_version = ')) {
currentVersion = parseInt(sql.replace('user_version = ', ''));
}
return undefined;
});
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// At version 3, target is 3, so no pending migrations
expect(db.getCurrentVersion()).toBe(3);
expect(db.getTargetVersion()).toBe(3);
expect(db.hasPendingMigrations()).toBe(false);
});
it('should return empty array from getMigrationHistory() when no _migrations table', async () => {
mockStatement.get.mockReturnValue(null); // No table exists
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const history = db.getMigrationHistory();
expect(history).toEqual([]);
});
it('should return migration records from getMigrationHistory()', async () => {
const mockMigrationRows = [
{
version: 1,
description: 'Initial schema',
applied_at: 1704067200000,
status: 'success' as const,
error_message: null,
},
];
mockStatement.get.mockReturnValue({ name: '_migrations' }); // Table exists
mockStatement.all.mockReturnValue(mockMigrationRows);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const history = db.getMigrationHistory();
expect(history).toHaveLength(1);
expect(history[0]).toEqual({
version: 1,
description: 'Initial schema',
appliedAt: 1704067200000,
status: 'success',
errorMessage: undefined,
});
});
it('should include errorMessage in migration history for failed migrations', async () => {
const mockMigrationRows = [
{
version: 2,
description: 'Add new column',
applied_at: 1704067200000,
status: 'failed' as const,
error_message: 'SQLITE_ERROR: duplicate column name',
},
];
mockStatement.get.mockReturnValue({ name: '_migrations' });
mockStatement.all.mockReturnValue(mockMigrationRows);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const history = db.getMigrationHistory();
expect(history[0].status).toBe('failed');
expect(history[0].errorMessage).toBe('SQLITE_ERROR: duplicate column name');
});
});
describe('error handling', () => {
it('should throw when calling insertQueryEvent before initialization', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
expect(() =>
db.insertQueryEvent({
sessionId: 'test',
agentType: 'claude-code',
source: 'user',
startTime: Date.now(),
duration: 1000,
})
).toThrow('Database not initialized');
});
it('should throw when calling getQueryEvents before initialization', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
expect(() => db.getQueryEvents('day')).toThrow('Database not initialized');
});
it('should throw when calling getAggregatedStats before initialization', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
expect(() => db.getAggregatedStats('week')).toThrow('Database not initialized');
});
});
describe('query events', () => {
it('should insert a query event and return an id', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const eventId = db.insertQueryEvent({
sessionId: 'session-1',
agentType: 'claude-code',
source: 'user',
startTime: Date.now(),
duration: 5000,
projectPath: '/test/project',
tabId: 'tab-1',
});
expect(eventId).toBeDefined();
expect(typeof eventId).toBe('string');
expect(mockStatement.run).toHaveBeenCalled();
});
it('should retrieve query events within time range', async () => {
mockStatement.all.mockReturnValue([
{
id: 'event-1',
session_id: 'session-1',
agent_type: 'claude-code',
source: 'user',
start_time: Date.now(),
duration: 5000,
project_path: '/test',
tab_id: 'tab-1',
},
]);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const events = db.getQueryEvents('day');
expect(events).toHaveLength(1);
expect(events[0].sessionId).toBe('session-1');
expect(events[0].agentType).toBe('claude-code');
});
});
describe('close', () => {
it('should close the database connection', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
db.close();
expect(mockDb.close).toHaveBeenCalled();
expect(db.isReady()).toBe(false);
});
});
});
/**
* Database file creation verification tests
*
* These tests verify that the database file is created at the correct path
* in the user's application data directory on first launch.
*/
describe('Database file creation on first launch', () => {
beforeEach(() => {
vi.clearAllMocks();
lastDbPath = null;
mockDb.pragma.mockReturnValue([{ user_version: 0 }]);
mockDb.prepare.mockReturnValue(mockStatement);
mockFsExistsSync.mockReturnValue(true);
mockFsMkdirSync.mockClear();
});
afterEach(() => {
vi.resetModules();
});
describe('database path computation', () => {
it('should compute database path using electron app.getPath("userData")', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
const dbPath = db.getDbPath();
// Verify the path is in the userData directory
expect(dbPath).toContain(mockUserDataPath);
expect(dbPath).toContain('stats.db');
});
it('should create database file at userData/stats.db path', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Verify better-sqlite3 was called with the correct path
expect(lastDbPath).toBe(path.join(mockUserDataPath, 'stats.db'));
});
it('should use platform-appropriate userData path', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
// The path should be absolute and contain stats.db
const dbPath = db.getDbPath();
expect(path.isAbsolute(dbPath)).toBe(true);
expect(path.basename(dbPath)).toBe('stats.db');
});
});
describe('directory creation', () => {
it('should create userData directory if it does not exist', async () => {
// Simulate directory not existing
mockFsExistsSync.mockReturnValue(false);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Verify mkdirSync was called with recursive option
expect(mockFsMkdirSync).toHaveBeenCalledWith(mockUserDataPath, { recursive: true });
});
it('should not create directory if it already exists', async () => {
// Simulate directory already existing
mockFsExistsSync.mockReturnValue(true);
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Verify mkdirSync was NOT called
expect(mockFsMkdirSync).not.toHaveBeenCalled();
});
});
describe('database initialization', () => {
it('should open database connection on initialize', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
expect(db.isReady()).toBe(false);
db.initialize();
expect(db.isReady()).toBe(true);
});
it('should only initialize once (idempotent)', async () => {
mockDb.pragma.mockClear();
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const firstCallCount = mockDb.pragma.mock.calls.length;
db.initialize(); // Second call should be a no-op
const secondCallCount = mockDb.pragma.mock.calls.length;
expect(secondCallCount).toBe(firstCallCount);
});
it('should create all three tables on fresh database', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
// Verify prepare was called with CREATE TABLE statements
const prepareCalls = mockDb.prepare.mock.calls.map((call) => call[0]);
// Check for query_events table
expect(
prepareCalls.some((sql: string) => sql.includes('CREATE TABLE IF NOT EXISTS query_events'))
).toBe(true);
// Check for auto_run_sessions table
expect(
prepareCalls.some((sql: string) =>
sql.includes('CREATE TABLE IF NOT EXISTS auto_run_sessions')
)
).toBe(true);
// Check for auto_run_tasks table
expect(
prepareCalls.some((sql: string) =>
sql.includes('CREATE TABLE IF NOT EXISTS auto_run_tasks')
)
).toBe(true);
});
it('should create all required indexes', async () => {
const { StatsDB } = await import('../../../main/stats');
const db = new StatsDB();
db.initialize();
const prepareCalls = mockDb.prepare.mock.calls.map((call) => call[0]);
// Verify all 7 indexes are created
const expectedIndexes = [
'idx_query_start_time',
'idx_query_agent_type',
'idx_query_source',
'idx_query_session',
'idx_auto_session_start',
'idx_task_auto_session',
'idx_task_start',
];
for (const indexName of expectedIndexes) {
expect(prepareCalls.some((sql: string) => sql.includes(indexName))).toBe(true);
}
});
});
describe('singleton pattern', () => {
it('should return same instance from getStatsDB', async () => {
const { getStatsDB, closeStatsDB } = await import('../../../main/stats');
const instance1 = getStatsDB();
const instance2 = getStatsDB();
expect(instance1).toBe(instance2);
// Cleanup
closeStatsDB();
});
it('should initialize database via initializeStatsDB', async () => {
const { initializeStatsDB, getStatsDB, closeStatsDB } = await import('../../../main/stats');
initializeStatsDB();
const db = getStatsDB();
expect(db.isReady()).toBe(true);
// Cleanup
closeStatsDB();
});
it('should close database and reset singleton via closeStatsDB', async () => {
const { initializeStatsDB, getStatsDB, closeStatsDB } = await import('../../../main/stats');
initializeStatsDB();
const dbBefore = getStatsDB();
expect(dbBefore.isReady()).toBe(true);
closeStatsDB();
// After close, a new instance should be returned
const dbAfter = getStatsDB();
expect(dbAfter).not.toBe(dbBefore);
expect(dbAfter.isReady()).toBe(false);
});
});
});
/**
* Auto Run session and task recording tests
*/

View File

@@ -0,0 +1,319 @@
/**
* Tests for shared stats type definitions.
*
* Note: better-sqlite3 is a native module compiled for Electron's Node version.
* Direct testing with the native module in vitest is not possible without
* electron-rebuild for the vitest runtime. These tests use mocked database
* operations to verify the logic without requiring the actual native module.
*
* For full integration testing of the SQLite database, use the Electron test
* environment (e2e tests) where the native module is properly loaded.
*/
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import * as path from 'path';
import * as os from 'os';
// Track Database constructor calls to verify file path
let lastDbPath: string | null = null;
// Store mock references so they can be accessed in tests
const mockStatement = {
run: vi.fn(() => ({ changes: 1 })),
get: vi.fn(() => ({ count: 0, total_duration: 0 })),
all: vi.fn(() => []),
};
const mockDb = {
pragma: vi.fn(() => [{ user_version: 0 }]),
prepare: vi.fn(() => mockStatement),
close: vi.fn(),
// Transaction mock that immediately executes the function
transaction: vi.fn((fn: () => void) => {
return () => fn();
}),
};
// Mock better-sqlite3 as a class
vi.mock('better-sqlite3', () => {
return {
default: class MockDatabase {
constructor(dbPath: string) {
lastDbPath = dbPath;
}
pragma = mockDb.pragma;
prepare = mockDb.prepare;
close = mockDb.close;
transaction = mockDb.transaction;
},
};
});
// Mock electron's app module with trackable userData path
const mockUserDataPath = path.join(os.tmpdir(), 'maestro-test-stats-db');
vi.mock('electron', () => ({
app: {
getPath: vi.fn((name: string) => {
if (name === 'userData') return mockUserDataPath;
return os.tmpdir();
}),
},
}));
// Track fs calls
const mockFsExistsSync = vi.fn(() => true);
const mockFsMkdirSync = vi.fn();
const mockFsCopyFileSync = vi.fn();
const mockFsUnlinkSync = vi.fn();
const mockFsRenameSync = vi.fn();
const mockFsStatSync = vi.fn(() => ({ size: 1024 }));
const mockFsReadFileSync = vi.fn(() => '0'); // Default: old timestamp (triggers vacuum check)
const mockFsWriteFileSync = vi.fn();
// Mock fs
vi.mock('fs', () => ({
existsSync: (...args: unknown[]) => mockFsExistsSync(...args),
mkdirSync: (...args: unknown[]) => mockFsMkdirSync(...args),
copyFileSync: (...args: unknown[]) => mockFsCopyFileSync(...args),
unlinkSync: (...args: unknown[]) => mockFsUnlinkSync(...args),
renameSync: (...args: unknown[]) => mockFsRenameSync(...args),
statSync: (...args: unknown[]) => mockFsStatSync(...args),
readFileSync: (...args: unknown[]) => mockFsReadFileSync(...args),
writeFileSync: (...args: unknown[]) => mockFsWriteFileSync(...args),
}));
// Mock logger
vi.mock('../../../main/utils/logger', () => ({
logger: {
info: vi.fn(),
warn: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
},
}));
// Import types only - we'll test the type definitions
import type {
QueryEvent,
AutoRunSession,
AutoRunTask,
SessionLifecycleEvent,
StatsTimeRange,
StatsFilters,
StatsAggregation,
} from '../../../shared/stats-types';
describe('stats-types.ts', () => {
describe('QueryEvent interface', () => {
it('should define proper QueryEvent structure', () => {
const event: QueryEvent = {
id: 'test-id',
sessionId: 'session-1',
agentType: 'claude-code',
source: 'user',
startTime: Date.now(),
duration: 5000,
projectPath: '/test/project',
tabId: 'tab-1',
};
expect(event.id).toBe('test-id');
expect(event.sessionId).toBe('session-1');
expect(event.source).toBe('user');
});
it('should allow optional fields to be undefined', () => {
const event: QueryEvent = {
id: 'test-id',
sessionId: 'session-1',
agentType: 'claude-code',
source: 'auto',
startTime: Date.now(),
duration: 3000,
};
expect(event.projectPath).toBeUndefined();
expect(event.tabId).toBeUndefined();
});
});
describe('AutoRunSession interface', () => {
it('should define proper AutoRunSession structure', () => {
const session: AutoRunSession = {
id: 'auto-run-1',
sessionId: 'session-1',
agentType: 'claude-code',
documentPath: '/docs/task.md',
startTime: Date.now(),
duration: 60000,
tasksTotal: 5,
tasksCompleted: 3,
projectPath: '/test/project',
};
expect(session.id).toBe('auto-run-1');
expect(session.tasksTotal).toBe(5);
expect(session.tasksCompleted).toBe(3);
});
});
describe('AutoRunTask interface', () => {
it('should define proper AutoRunTask structure', () => {
const task: AutoRunTask = {
id: 'task-1',
autoRunSessionId: 'auto-run-1',
sessionId: 'session-1',
agentType: 'claude-code',
taskIndex: 0,
taskContent: 'First task content',
startTime: Date.now(),
duration: 10000,
success: true,
};
expect(task.id).toBe('task-1');
expect(task.taskIndex).toBe(0);
expect(task.success).toBe(true);
});
it('should handle failed tasks', () => {
const task: AutoRunTask = {
id: 'task-2',
autoRunSessionId: 'auto-run-1',
sessionId: 'session-1',
agentType: 'claude-code',
taskIndex: 1,
startTime: Date.now(),
duration: 5000,
success: false,
};
expect(task.success).toBe(false);
expect(task.taskContent).toBeUndefined();
});
});
describe('SessionLifecycleEvent interface', () => {
it('should define proper SessionLifecycleEvent structure for created session', () => {
const event: SessionLifecycleEvent = {
id: 'lifecycle-1',
sessionId: 'session-1',
agentType: 'claude-code',
projectPath: '/test/project',
createdAt: Date.now(),
isRemote: false,
};
expect(event.id).toBe('lifecycle-1');
expect(event.sessionId).toBe('session-1');
expect(event.agentType).toBe('claude-code');
expect(event.closedAt).toBeUndefined();
expect(event.duration).toBeUndefined();
});
it('should define proper SessionLifecycleEvent structure for closed session', () => {
// Use fixed timestamps to avoid race conditions from multiple Date.now() calls
const createdAt = 1700000000000; // Fixed timestamp
const closedAt = 1700003600000; // Exactly 1 hour later
const event: SessionLifecycleEvent = {
id: 'lifecycle-2',
sessionId: 'session-2',
agentType: 'claude-code',
projectPath: '/test/project',
createdAt,
closedAt,
duration: closedAt - createdAt,
isRemote: true,
};
expect(event.closedAt).toBe(closedAt);
expect(event.duration).toBe(3600000);
expect(event.isRemote).toBe(true);
});
it('should allow optional fields to be undefined', () => {
const event: SessionLifecycleEvent = {
id: 'lifecycle-3',
sessionId: 'session-3',
agentType: 'opencode',
createdAt: Date.now(),
};
expect(event.projectPath).toBeUndefined();
expect(event.closedAt).toBeUndefined();
expect(event.duration).toBeUndefined();
expect(event.isRemote).toBeUndefined();
});
});
describe('StatsTimeRange type', () => {
it('should accept valid time ranges', () => {
const ranges: StatsTimeRange[] = ['day', 'week', 'month', 'year', 'all'];
expect(ranges).toHaveLength(5);
expect(ranges).toContain('day');
expect(ranges).toContain('all');
});
});
describe('StatsFilters interface', () => {
it('should allow partial filters', () => {
const filters1: StatsFilters = { agentType: 'claude-code' };
const filters2: StatsFilters = { source: 'user' };
const filters3: StatsFilters = {
agentType: 'opencode',
source: 'auto',
projectPath: '/test',
};
expect(filters1.agentType).toBe('claude-code');
expect(filters2.source).toBe('user');
expect(filters3.projectPath).toBe('/test');
});
});
describe('StatsAggregation interface', () => {
it('should define proper aggregation structure', () => {
const aggregation: StatsAggregation = {
totalQueries: 100,
totalDuration: 500000,
avgDuration: 5000,
byAgent: {
'claude-code': { count: 70, duration: 350000 },
opencode: { count: 30, duration: 150000 },
},
bySource: { user: 60, auto: 40 },
byLocation: { local: 80, remote: 20 },
byDay: [
{ date: '2024-01-01', count: 10, duration: 50000 },
{ date: '2024-01-02', count: 15, duration: 75000 },
],
byHour: [
{ hour: 9, count: 20, duration: 100000 },
{ hour: 10, count: 25, duration: 125000 },
],
// Session lifecycle fields
totalSessions: 15,
sessionsByAgent: {
'claude-code': 10,
opencode: 5,
},
sessionsByDay: [
{ date: '2024-01-01', count: 3 },
{ date: '2024-01-02', count: 5 },
],
avgSessionDuration: 1800000,
};
expect(aggregation.totalQueries).toBe(100);
expect(aggregation.byAgent['claude-code'].count).toBe(70);
expect(aggregation.bySource.user).toBe(60);
expect(aggregation.byDay).toHaveLength(2);
// Session lifecycle assertions
expect(aggregation.totalSessions).toBe(15);
expect(aggregation.sessionsByAgent['claude-code']).toBe(10);
expect(aggregation.sessionsByDay).toHaveLength(2);
expect(aggregation.avgSessionDuration).toBe(1800000);
});
});
});

View File

@@ -51,7 +51,7 @@ import {
cleanupAllGroomingSessions,
getActiveGroomingSessionCount,
} from './ipc/handlers';
import { initializeStatsDB, closeStatsDB, getStatsDB } from './stats-db';
import { initializeStatsDB, closeStatsDB, getStatsDB } from './stats';
import { groupChatEmitters } from './ipc/handlers/groupChat';
import {
routeModeratorResponse,

View File

@@ -15,7 +15,7 @@
import { ipcMain, BrowserWindow } from 'electron';
import { logger } from '../../utils/logger';
import { withIpcErrorLogging, CreateHandlerOptions } from '../../utils/ipcHandler';
import { getStatsDB } from '../../stats-db';
import { getStatsDB } from '../../stats';
import {
QueryEvent,
AutoRunSession,

View File

@@ -7,7 +7,7 @@ import type { ProcessManager } from '../process-manager';
import type { WebServer } from '../web-server';
import type { AgentDetector } from '../agents';
import type { SafeSendFn } from '../utils/safe-send';
import type { StatsDB } from '../stats-db';
import type { StatsDB } from '../stats';
import type { GroupChat, GroupChatParticipant } from '../group-chat/group-chat-storage';
import type { GroupChatState } from '../../shared/group-chat-types';
import type { ParticipantState } from '../ipc/handlers/groupChat';

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,353 @@
/**
* Stats Aggregation Queries
*
* Decomposes the monolithic getAggregatedStats into focused sub-query functions,
* each independently testable and readable.
*/
import type Database from 'better-sqlite3';
import type { StatsTimeRange, StatsAggregation } from '../../shared/stats-types';
import { PERFORMANCE_THRESHOLDS } from '../../shared/performance-metrics';
import { getTimeRangeStart, perfMetrics, LOG_CONTEXT } from './utils';
import { logger } from '../utils/logger';
// ============================================================================
// Sub-query Functions
// ============================================================================
function queryTotals(
db: Database.Database,
startTime: number
): { count: number; total_duration: number } {
const perfStart = perfMetrics.start();
const result = db
.prepare(
`
SELECT COUNT(*) as count, COALESCE(SUM(duration), 0) as total_duration
FROM query_events
WHERE start_time >= ?
`
)
.get(startTime) as { count: number; total_duration: number };
perfMetrics.end(perfStart, 'getAggregatedStats:totals');
return result;
}
function queryByAgent(
db: Database.Database,
startTime: number
): Record<string, { count: number; duration: number }> {
const perfStart = perfMetrics.start();
const rows = db
.prepare(
`
SELECT agent_type, COUNT(*) as count, SUM(duration) as duration
FROM query_events
WHERE start_time >= ?
GROUP BY agent_type
`
)
.all(startTime) as Array<{ agent_type: string; count: number; duration: number }>;
const result: Record<string, { count: number; duration: number }> = {};
for (const row of rows) {
result[row.agent_type] = { count: row.count, duration: row.duration };
}
perfMetrics.end(perfStart, 'getAggregatedStats:byAgent', { agentCount: rows.length });
return result;
}
function queryBySource(db: Database.Database, startTime: number): { user: number; auto: number } {
const perfStart = perfMetrics.start();
const rows = db
.prepare(
`
SELECT source, COUNT(*) as count
FROM query_events
WHERE start_time >= ?
GROUP BY source
`
)
.all(startTime) as Array<{ source: 'user' | 'auto'; count: number }>;
const result = { user: 0, auto: 0 };
for (const row of rows) {
result[row.source] = row.count;
}
perfMetrics.end(perfStart, 'getAggregatedStats:bySource');
return result;
}
function queryByLocation(
db: Database.Database,
startTime: number
): { local: number; remote: number } {
const perfStart = perfMetrics.start();
const rows = db
.prepare(
`
SELECT is_remote, COUNT(*) as count
FROM query_events
WHERE start_time >= ?
GROUP BY is_remote
`
)
.all(startTime) as Array<{ is_remote: number | null; count: number }>;
const result = { local: 0, remote: 0 };
for (const row of rows) {
if (row.is_remote === 1) {
result.remote = row.count;
} else {
// Treat NULL (legacy data) and 0 as local
result.local += row.count;
}
}
perfMetrics.end(perfStart, 'getAggregatedStats:byLocation');
return result;
}
function queryByDay(
db: Database.Database,
startTime: number
): Array<{ date: string; count: number; duration: number }> {
const perfStart = perfMetrics.start();
const rows = db
.prepare(
`
SELECT date(start_time / 1000, 'unixepoch', 'localtime') as date,
COUNT(*) as count,
SUM(duration) as duration
FROM query_events
WHERE start_time >= ?
GROUP BY date(start_time / 1000, 'unixepoch', 'localtime')
ORDER BY date ASC
`
)
.all(startTime) as Array<{ date: string; count: number; duration: number }>;
perfMetrics.end(perfStart, 'getAggregatedStats:byDay', { dayCount: rows.length });
return rows;
}
function queryByAgentByDay(
db: Database.Database,
startTime: number
): Record<string, Array<{ date: string; count: number; duration: number }>> {
const perfStart = perfMetrics.start();
const rows = db
.prepare(
`
SELECT agent_type,
date(start_time / 1000, 'unixepoch', 'localtime') as date,
COUNT(*) as count,
SUM(duration) as duration
FROM query_events
WHERE start_time >= ?
GROUP BY agent_type, date(start_time / 1000, 'unixepoch', 'localtime')
ORDER BY agent_type, date ASC
`
)
.all(startTime) as Array<{
agent_type: string;
date: string;
count: number;
duration: number;
}>;
const result: Record<string, Array<{ date: string; count: number; duration: number }>> = {};
for (const row of rows) {
if (!result[row.agent_type]) {
result[row.agent_type] = [];
}
result[row.agent_type].push({ date: row.date, count: row.count, duration: row.duration });
}
perfMetrics.end(perfStart, 'getAggregatedStats:byAgentByDay');
return result;
}
function queryByHour(
db: Database.Database,
startTime: number
): Array<{ hour: number; count: number; duration: number }> {
const perfStart = perfMetrics.start();
const rows = db
.prepare(
`
SELECT CAST(strftime('%H', start_time / 1000, 'unixepoch', 'localtime') AS INTEGER) as hour,
COUNT(*) as count,
SUM(duration) as duration
FROM query_events
WHERE start_time >= ?
GROUP BY hour
ORDER BY hour ASC
`
)
.all(startTime) as Array<{ hour: number; count: number; duration: number }>;
perfMetrics.end(perfStart, 'getAggregatedStats:byHour');
return rows;
}
function querySessionStats(
db: Database.Database,
startTime: number
): {
totalSessions: number;
sessionsByAgent: Record<string, number>;
sessionsByDay: Array<{ date: string; count: number }>;
avgSessionDuration: number;
} {
const perfStart = perfMetrics.start();
// Total unique sessions with queries
const sessionTotals = db
.prepare(
`
SELECT COUNT(DISTINCT session_id) as count
FROM query_events
WHERE start_time >= ?
`
)
.get(startTime) as { count: number };
// Average session duration from lifecycle table
const avgResult = db
.prepare(
`
SELECT COALESCE(AVG(duration), 0) as avg_duration
FROM session_lifecycle
WHERE created_at >= ? AND duration IS NOT NULL
`
)
.get(startTime) as { avg_duration: number };
// Sessions by agent type
const byAgentRows = db
.prepare(
`
SELECT agent_type, COUNT(*) as count
FROM session_lifecycle
WHERE created_at >= ?
GROUP BY agent_type
`
)
.all(startTime) as Array<{ agent_type: string; count: number }>;
const sessionsByAgent: Record<string, number> = {};
for (const row of byAgentRows) {
sessionsByAgent[row.agent_type] = row.count;
}
// Sessions by day
const byDayRows = db
.prepare(
`
SELECT date(created_at / 1000, 'unixepoch', 'localtime') as date,
COUNT(*) as count
FROM session_lifecycle
WHERE created_at >= ?
GROUP BY date(created_at / 1000, 'unixepoch', 'localtime')
ORDER BY date ASC
`
)
.all(startTime) as Array<{ date: string; count: number }>;
perfMetrics.end(perfStart, 'getAggregatedStats:sessions', {
sessionCount: sessionTotals.count,
});
return {
totalSessions: sessionTotals.count,
sessionsByAgent,
sessionsByDay: byDayRows,
avgSessionDuration: Math.round(avgResult.avg_duration),
};
}
function queryBySessionByDay(
db: Database.Database,
startTime: number
): Record<string, Array<{ date: string; count: number; duration: number }>> {
const perfStart = perfMetrics.start();
const rows = db
.prepare(
`
SELECT session_id,
date(start_time / 1000, 'unixepoch', 'localtime') as date,
COUNT(*) as count,
SUM(duration) as duration
FROM query_events
WHERE start_time >= ?
GROUP BY session_id, date(start_time / 1000, 'unixepoch', 'localtime')
ORDER BY session_id, date ASC
`
)
.all(startTime) as Array<{
session_id: string;
date: string;
count: number;
duration: number;
}>;
const result: Record<string, Array<{ date: string; count: number; duration: number }>> = {};
for (const row of rows) {
if (!result[row.session_id]) {
result[row.session_id] = [];
}
result[row.session_id].push({ date: row.date, count: row.count, duration: row.duration });
}
perfMetrics.end(perfStart, 'getAggregatedStats:bySessionByDay');
return result;
}
// ============================================================================
// Orchestrator
// ============================================================================
/**
* Get aggregated statistics for a time range.
*
* Composes results from focused sub-query functions for readability
* and independent testability.
*/
export function getAggregatedStats(db: Database.Database, range: StatsTimeRange): StatsAggregation {
const perfStart = perfMetrics.start();
const startTime = getTimeRangeStart(range);
const totals = queryTotals(db, startTime);
const byAgent = queryByAgent(db, startTime);
const bySource = queryBySource(db, startTime);
const byLocation = queryByLocation(db, startTime);
const byDay = queryByDay(db, startTime);
const byAgentByDay = queryByAgentByDay(db, startTime);
const byHour = queryByHour(db, startTime);
const sessionStats = querySessionStats(db, startTime);
const bySessionByDay = queryBySessionByDay(db, startTime);
const totalDuration = perfMetrics.end(perfStart, 'getAggregatedStats:total', {
range,
totalQueries: totals.count,
});
// Log warning if the aggregation is slow
if (totalDuration > PERFORMANCE_THRESHOLDS.DASHBOARD_LOAD) {
logger.warn(
`getAggregatedStats took ${totalDuration.toFixed(0)}ms (threshold: ${PERFORMANCE_THRESHOLDS.DASHBOARD_LOAD}ms)`,
LOG_CONTEXT,
{ range, totalQueries: totals.count }
);
}
return {
totalQueries: totals.count,
totalDuration: totals.total_duration,
avgDuration: totals.count > 0 ? Math.round(totals.total_duration / totals.count) : 0,
byAgent,
bySource,
byDay,
byLocation,
byHour,
...sessionStats,
byAgentByDay,
bySessionByDay,
};
}

169
src/main/stats/auto-run.ts Normal file
View File

@@ -0,0 +1,169 @@
/**
* Auto Run CRUD Operations
*
* Handles insertion, updating, and retrieval of Auto Run sessions and tasks.
*/
import type Database from 'better-sqlite3';
import type { AutoRunSession, AutoRunTask, StatsTimeRange } from '../../shared/stats-types';
import { generateId, getTimeRangeStart, normalizePath, LOG_CONTEXT } from './utils';
import {
mapAutoRunSessionRow,
mapAutoRunTaskRow,
type AutoRunSessionRow,
type AutoRunTaskRow,
} from './row-mappers';
import { StatementCache } from './utils';
import { logger } from '../utils/logger';
const stmtCache = new StatementCache();
// ============================================================================
// Auto Run Sessions
// ============================================================================
const INSERT_SESSION_SQL = `
INSERT INTO auto_run_sessions (id, session_id, agent_type, document_path, start_time, duration, tasks_total, tasks_completed, project_path)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`;
/**
* Insert a new Auto Run session
*/
export function insertAutoRunSession(
db: Database.Database,
session: Omit<AutoRunSession, 'id'>
): string {
const id = generateId();
const stmt = stmtCache.get(db, INSERT_SESSION_SQL);
stmt.run(
id,
session.sessionId,
session.agentType,
normalizePath(session.documentPath),
session.startTime,
session.duration,
session.tasksTotal ?? null,
session.tasksCompleted ?? null,
normalizePath(session.projectPath)
);
logger.debug(`Inserted Auto Run session ${id}`, LOG_CONTEXT);
return id;
}
/**
* Update an existing Auto Run session (e.g., when it completes)
*/
export function updateAutoRunSession(
db: Database.Database,
id: string,
updates: Partial<AutoRunSession>
): boolean {
const setClauses: string[] = [];
const params: (string | number | null)[] = [];
if (updates.duration !== undefined) {
setClauses.push('duration = ?');
params.push(updates.duration);
}
if (updates.tasksTotal !== undefined) {
setClauses.push('tasks_total = ?');
params.push(updates.tasksTotal ?? null);
}
if (updates.tasksCompleted !== undefined) {
setClauses.push('tasks_completed = ?');
params.push(updates.tasksCompleted ?? null);
}
if (updates.documentPath !== undefined) {
setClauses.push('document_path = ?');
params.push(normalizePath(updates.documentPath));
}
if (setClauses.length === 0) {
return false;
}
params.push(id);
const sql = `UPDATE auto_run_sessions SET ${setClauses.join(', ')} WHERE id = ?`;
const stmt = db.prepare(sql);
const result = stmt.run(...params);
logger.debug(`Updated Auto Run session ${id}`, LOG_CONTEXT);
return result.changes > 0;
}
/**
* Get Auto Run sessions within a time range
*/
export function getAutoRunSessions(db: Database.Database, range: StatsTimeRange): AutoRunSession[] {
const startTime = getTimeRangeStart(range);
const stmt = stmtCache.get(
db,
`
SELECT * FROM auto_run_sessions
WHERE start_time >= ?
ORDER BY start_time DESC
`
);
const rows = stmt.all(startTime) as AutoRunSessionRow[];
return rows.map(mapAutoRunSessionRow);
}
// ============================================================================
// Auto Run Tasks
// ============================================================================
const INSERT_TASK_SQL = `
INSERT INTO auto_run_tasks (id, auto_run_session_id, session_id, agent_type, task_index, task_content, start_time, duration, success)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`;
/**
* Insert a new Auto Run task
*/
export function insertAutoRunTask(db: Database.Database, task: Omit<AutoRunTask, 'id'>): string {
const id = generateId();
const stmt = stmtCache.get(db, INSERT_TASK_SQL);
stmt.run(
id,
task.autoRunSessionId,
task.sessionId,
task.agentType,
task.taskIndex,
task.taskContent ?? null,
task.startTime,
task.duration,
task.success ? 1 : 0
);
logger.debug(`Inserted Auto Run task ${id}`, LOG_CONTEXT);
return id;
}
/**
* Get all tasks for a specific Auto Run session
*/
export function getAutoRunTasks(db: Database.Database, autoRunSessionId: string): AutoRunTask[] {
const stmt = stmtCache.get(
db,
`
SELECT * FROM auto_run_tasks
WHERE auto_run_session_id = ?
ORDER BY task_index ASC
`
);
const rows = stmt.all(autoRunSessionId) as AutoRunTaskRow[];
return rows.map(mapAutoRunTaskRow);
}
/**
* Clear the statement cache (call when database is closed)
*/
export function clearAutoRunCache(): void {
stmtCache.clear();
}

View File

@@ -0,0 +1,170 @@
/**
* Data Management Operations
*
* Handles data cleanup (with transactional safety) and CSV export
* (with proper escaping and complete field coverage).
*/
import type Database from 'better-sqlite3';
import type { StatsTimeRange } from '../../shared/stats-types';
import { getQueryEvents } from './query-events';
import { LOG_CONTEXT } from './utils';
import { logger } from '../utils/logger';
// ============================================================================
// Data Cleanup
// ============================================================================
/**
* Clear old data from the database.
*
* Deletes query_events, auto_run_sessions, auto_run_tasks, and session_lifecycle
* records that are older than the specified number of days.
*
* All deletes run within a single transaction for atomicity — either all tables
* are cleaned or none are.
*
* @param olderThanDays - Delete records older than this many days
*/
export function clearOldData(
db: Database.Database,
olderThanDays: number
): {
success: boolean;
deletedQueryEvents: number;
deletedAutoRunSessions: number;
deletedAutoRunTasks: number;
deletedSessionLifecycle: number;
error?: string;
} {
if (olderThanDays <= 0) {
return {
success: false,
deletedQueryEvents: 0,
deletedAutoRunSessions: 0,
deletedAutoRunTasks: 0,
deletedSessionLifecycle: 0,
error: 'olderThanDays must be greater than 0',
};
}
try {
const cutoffTime = Date.now() - olderThanDays * 24 * 60 * 60 * 1000;
logger.info(
`Clearing stats data older than ${olderThanDays} days (before ${new Date(cutoffTime).toISOString()})`,
LOG_CONTEXT
);
let deletedEvents = 0;
let deletedSessions = 0;
let deletedTasks = 0;
let deletedLifecycle = 0;
// Wrap all deletes in a transaction for atomicity
const runCleanup = db.transaction(() => {
// Delete auto_run_tasks for sessions being deleted (cascade)
const tasksResult = db
.prepare(
'DELETE FROM auto_run_tasks WHERE auto_run_session_id IN (SELECT id FROM auto_run_sessions WHERE start_time < ?)'
)
.run(cutoffTime);
deletedTasks = tasksResult.changes;
// Delete auto_run_sessions
const sessionsResult = db
.prepare('DELETE FROM auto_run_sessions WHERE start_time < ?')
.run(cutoffTime);
deletedSessions = sessionsResult.changes;
// Delete query_events
const eventsResult = db
.prepare('DELETE FROM query_events WHERE start_time < ?')
.run(cutoffTime);
deletedEvents = eventsResult.changes;
// Delete session_lifecycle
const lifecycleResult = db
.prepare('DELETE FROM session_lifecycle WHERE created_at < ?')
.run(cutoffTime);
deletedLifecycle = lifecycleResult.changes;
});
runCleanup();
const totalDeleted = deletedEvents + deletedSessions + deletedTasks + deletedLifecycle;
logger.info(
`Cleared ${totalDeleted} old stats records (${deletedEvents} query events, ${deletedSessions} auto-run sessions, ${deletedTasks} auto-run tasks, ${deletedLifecycle} session lifecycle)`,
LOG_CONTEXT
);
return {
success: true,
deletedQueryEvents: deletedEvents,
deletedAutoRunSessions: deletedSessions,
deletedAutoRunTasks: deletedTasks,
deletedSessionLifecycle: deletedLifecycle,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error(`Failed to clear old stats data: ${errorMessage}`, LOG_CONTEXT);
return {
success: false,
deletedQueryEvents: 0,
deletedAutoRunSessions: 0,
deletedAutoRunTasks: 0,
deletedSessionLifecycle: 0,
error: errorMessage,
};
}
}
// ============================================================================
// CSV Export
// ============================================================================
/**
* Escape a value for CSV output.
*
* Wraps the value in double quotes and escapes any embedded double quotes
* by doubling them (RFC 4180 compliant).
*/
function csvEscape(value: string): string {
return `"${value.replace(/"/g, '""')}"`;
}
/**
* Export query events to CSV format.
*
* Includes all fields (including isRemote added in migration v2)
* with proper CSV escaping for values containing quotes, commas, or newlines.
*/
export function exportToCsv(db: Database.Database, range: StatsTimeRange): string {
const events = getQueryEvents(db, range);
const headers = [
'id',
'sessionId',
'agentType',
'source',
'startTime',
'duration',
'projectPath',
'tabId',
'isRemote',
];
const rows = events.map((e) => [
csvEscape(e.id),
csvEscape(e.sessionId),
csvEscape(e.agentType),
csvEscape(e.source),
csvEscape(new Date(e.startTime).toISOString()),
csvEscape(e.duration.toString()),
csvEscape(e.projectPath ?? ''),
csvEscape(e.tabId ?? ''),
csvEscape(e.isRemote !== undefined ? String(e.isRemote) : ''),
]);
return [headers.join(','), ...rows.map((row) => row.join(','))].join('\n');
}

44
src/main/stats/index.ts Normal file
View File

@@ -0,0 +1,44 @@
/**
* Stats Module
*
* Consolidated module for all stats database functionality:
* - SQLite database lifecycle and integrity management
* - Migration system for schema evolution
* - CRUD operations for query events, auto-run sessions/tasks, and session lifecycle
* - Aggregated statistics for the Usage Dashboard
* - Data management (cleanup, CSV export)
* - Singleton instance management
* - Performance metrics API
*
* Usage:
* ```typescript
* import { getStatsDB, initializeStatsDB, closeStatsDB } from './stats';
* import type { StatsDB } from './stats';
* ```
*/
// ============ Types ============
export type {
IntegrityCheckResult,
BackupResult,
CorruptionRecoveryResult,
Migration,
MigrationRecord,
} from './types';
// ============ Utilities ============
export { normalizePath } from './utils';
// ============ Core Database ============
export { StatsDB } from './stats-db';
// ============ Singleton & Lifecycle ============
export { getStatsDB, initializeStatsDB, closeStatsDB } from './singleton';
// ============ Performance Metrics API ============
export {
setPerformanceLoggingEnabled,
isPerformanceLoggingEnabled,
getPerformanceMetrics,
clearPerformanceMetrics,
} from './singleton';

View File

@@ -0,0 +1,234 @@
/**
* Stats Database Migration System
*
* Manages schema evolution through versioned, sequential migrations.
* Each migration runs exactly once and is recorded in the _migrations table.
*
* ### Adding New Migrations
*
* 1. Create a new `migrateVN()` function
* 2. Add it to the `getMigrations()` array with version number and description
* 3. Update `STATS_DB_VERSION` in `../../shared/stats-types.ts`
*/
import type Database from 'better-sqlite3';
import type { Migration, MigrationRecord } from './types';
import { mapMigrationRecordRow, type MigrationRecordRow } from './row-mappers';
import {
CREATE_MIGRATIONS_TABLE_SQL,
CREATE_QUERY_EVENTS_SQL,
CREATE_QUERY_EVENTS_INDEXES_SQL,
CREATE_AUTO_RUN_SESSIONS_SQL,
CREATE_AUTO_RUN_SESSIONS_INDEXES_SQL,
CREATE_AUTO_RUN_TASKS_SQL,
CREATE_AUTO_RUN_TASKS_INDEXES_SQL,
CREATE_SESSION_LIFECYCLE_SQL,
CREATE_SESSION_LIFECYCLE_INDEXES_SQL,
runStatements,
} from './schema';
import { LOG_CONTEXT } from './utils';
import { logger } from '../utils/logger';
// ============================================================================
// Migration Registry
// ============================================================================
/**
* Registry of all database migrations.
* Migrations must be sequential starting from version 1.
*/
export function getMigrations(): Migration[] {
return [
{
version: 1,
description: 'Initial schema: query_events, auto_run_sessions, auto_run_tasks tables',
up: (db) => migrateV1(db),
},
{
version: 2,
description: 'Add is_remote column to query_events for tracking SSH sessions',
up: (db) => migrateV2(db),
},
{
version: 3,
description: 'Add session_lifecycle table for tracking session creation and closure',
up: (db) => migrateV3(db),
},
];
}
// ============================================================================
// Migration Execution
// ============================================================================
/**
* Run all pending database migrations.
*
* 1. Creates the _migrations table if it doesn't exist
* 2. Gets the current schema version from user_version pragma
* 3. Runs each pending migration in a transaction
* 4. Records each migration in the _migrations table
* 5. Updates the user_version pragma
*/
export function runMigrations(db: Database.Database): void {
// Create migrations table (the only table created outside the migration system)
db.prepare(CREATE_MIGRATIONS_TABLE_SQL).run();
// Get current version (0 if fresh database)
const versionResult = db.pragma('user_version') as Array<{ user_version: number }>;
const currentVersion = versionResult[0]?.user_version ?? 0;
const migrations = getMigrations();
const pendingMigrations = migrations.filter((m) => m.version > currentVersion);
if (pendingMigrations.length === 0) {
logger.debug(`Database is up to date (version ${currentVersion})`, LOG_CONTEXT);
return;
}
// Sort by version to ensure sequential execution
pendingMigrations.sort((a, b) => a.version - b.version);
logger.info(
`Running ${pendingMigrations.length} pending migration(s) (current version: ${currentVersion})`,
LOG_CONTEXT
);
for (const migration of pendingMigrations) {
applyMigration(db, migration);
}
}
/**
* Apply a single migration within a transaction.
* Records the migration in the _migrations table with success/failure status.
*/
function applyMigration(db: Database.Database, migration: Migration): void {
const startTime = Date.now();
logger.info(`Applying migration v${migration.version}: ${migration.description}`, LOG_CONTEXT);
try {
const runMigrationTxn = db.transaction(() => {
migration.up(db);
db.prepare(
`
INSERT OR REPLACE INTO _migrations (version, description, applied_at, status, error_message)
VALUES (?, ?, ?, 'success', NULL)
`
).run(migration.version, migration.description, Date.now());
db.pragma(`user_version = ${migration.version}`);
});
runMigrationTxn();
const duration = Date.now() - startTime;
logger.info(`Migration v${migration.version} completed in ${duration}ms`, LOG_CONTEXT);
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
db.prepare(
`
INSERT OR REPLACE INTO _migrations (version, description, applied_at, status, error_message)
VALUES (?, ?, ?, 'failed', ?)
`
).run(migration.version, migration.description, Date.now(), errorMessage);
logger.error(`Migration v${migration.version} failed: ${errorMessage}`, LOG_CONTEXT);
throw error;
}
}
// ============================================================================
// Migration Queries
// ============================================================================
/**
* Get the list of applied migrations from the _migrations table.
*/
export function getMigrationHistory(db: Database.Database): MigrationRecord[] {
const tableExists = db
.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'")
.get();
if (!tableExists) {
return [];
}
const rows = db
.prepare(
`
SELECT version, description, applied_at, status, error_message
FROM _migrations
ORDER BY version ASC
`
)
.all() as MigrationRecordRow[];
return rows.map(mapMigrationRecordRow);
}
/**
* Get the current database schema version.
*/
export function getCurrentVersion(db: Database.Database): number {
const versionResult = db.pragma('user_version') as Array<{ user_version: number }>;
return versionResult[0]?.user_version ?? 0;
}
/**
* Get the target version (highest version in migrations registry).
*/
export function getTargetVersion(): number {
const migrations = getMigrations();
if (migrations.length === 0) return 0;
return Math.max(...migrations.map((m) => m.version));
}
/**
* Check if any migrations are pending.
*/
export function hasPendingMigrations(db: Database.Database): boolean {
return getCurrentVersion(db) < getTargetVersion();
}
// ============================================================================
// Individual Migration Functions
// ============================================================================
/**
* Migration v1: Initial schema creation
*/
function migrateV1(db: Database.Database): void {
db.prepare(CREATE_QUERY_EVENTS_SQL).run();
runStatements(db, CREATE_QUERY_EVENTS_INDEXES_SQL);
db.prepare(CREATE_AUTO_RUN_SESSIONS_SQL).run();
runStatements(db, CREATE_AUTO_RUN_SESSIONS_INDEXES_SQL);
db.prepare(CREATE_AUTO_RUN_TASKS_SQL).run();
runStatements(db, CREATE_AUTO_RUN_TASKS_INDEXES_SQL);
logger.debug('Created stats database tables and indexes', LOG_CONTEXT);
}
/**
* Migration v2: Add is_remote column for SSH session tracking
*/
function migrateV2(db: Database.Database): void {
db.prepare('ALTER TABLE query_events ADD COLUMN is_remote INTEGER').run();
db.prepare('CREATE INDEX IF NOT EXISTS idx_query_is_remote ON query_events(is_remote)').run();
logger.debug('Added is_remote column to query_events table', LOG_CONTEXT);
}
/**
* Migration v3: Add session_lifecycle table
*/
function migrateV3(db: Database.Database): void {
db.prepare(CREATE_SESSION_LIFECYCLE_SQL).run();
runStatements(db, CREATE_SESSION_LIFECYCLE_INDEXES_SQL);
logger.debug('Created session_lifecycle table', LOG_CONTEXT);
}

View File

@@ -0,0 +1,87 @@
/**
* Query Event CRUD Operations
*
* Handles insertion and retrieval of individual AI query/response cycle records.
*/
import type Database from 'better-sqlite3';
import type { QueryEvent, StatsTimeRange, StatsFilters } from '../../shared/stats-types';
import { generateId, getTimeRangeStart, normalizePath, LOG_CONTEXT } from './utils';
import { mapQueryEventRow, type QueryEventRow } from './row-mappers';
import { StatementCache } from './utils';
import { logger } from '../utils/logger';
const stmtCache = new StatementCache();
const INSERT_SQL = `
INSERT INTO query_events (id, session_id, agent_type, source, start_time, duration, project_path, tab_id, is_remote)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`;
/**
* Insert a new query event
*/
export function insertQueryEvent(db: Database.Database, event: Omit<QueryEvent, 'id'>): string {
const id = generateId();
const stmt = stmtCache.get(db, INSERT_SQL);
stmt.run(
id,
event.sessionId,
event.agentType,
event.source,
event.startTime,
event.duration,
normalizePath(event.projectPath),
event.tabId ?? null,
event.isRemote !== undefined ? (event.isRemote ? 1 : 0) : null
);
logger.debug(`Inserted query event ${id}`, LOG_CONTEXT);
return id;
}
/**
* Get query events within a time range with optional filters
*/
export function getQueryEvents(
db: Database.Database,
range: StatsTimeRange,
filters?: StatsFilters
): QueryEvent[] {
const startTime = getTimeRangeStart(range);
let sql = 'SELECT * FROM query_events WHERE start_time >= ?';
const params: (string | number)[] = [startTime];
if (filters?.agentType) {
sql += ' AND agent_type = ?';
params.push(filters.agentType);
}
if (filters?.source) {
sql += ' AND source = ?';
params.push(filters.source);
}
if (filters?.projectPath) {
sql += ' AND project_path = ?';
// Normalize filter path to match stored format
params.push(normalizePath(filters.projectPath) ?? '');
}
if (filters?.sessionId) {
sql += ' AND session_id = ?';
params.push(filters.sessionId);
}
sql += ' ORDER BY start_time DESC';
const stmt = db.prepare(sql);
const rows = stmt.all(...params) as QueryEventRow[];
return rows.map(mapQueryEventRow);
}
/**
* Clear the statement cache (call when database is closed)
*/
export function clearQueryEventCache(): void {
stmtCache.clear();
}

View File

@@ -0,0 +1,142 @@
/**
* Row Mapper Functions
*
* Converts snake_case SQLite row objects to camelCase TypeScript interfaces.
* Centralizes the mapping logic that was previously duplicated across CRUD methods.
*/
import type {
QueryEvent,
AutoRunSession,
AutoRunTask,
SessionLifecycleEvent,
} from '../../shared/stats-types';
import type { MigrationRecord } from './types';
// ============================================================================
// Raw Row Types (snake_case from SQLite)
// ============================================================================
export interface QueryEventRow {
id: string;
session_id: string;
agent_type: string;
source: 'user' | 'auto';
start_time: number;
duration: number;
project_path: string | null;
tab_id: string | null;
is_remote: number | null;
}
export interface AutoRunSessionRow {
id: string;
session_id: string;
agent_type: string;
document_path: string | null;
start_time: number;
duration: number;
tasks_total: number | null;
tasks_completed: number | null;
project_path: string | null;
}
export interface AutoRunTaskRow {
id: string;
auto_run_session_id: string;
session_id: string;
agent_type: string;
task_index: number;
task_content: string | null;
start_time: number;
duration: number;
success: number;
}
export interface SessionLifecycleRow {
id: string;
session_id: string;
agent_type: string;
project_path: string | null;
created_at: number;
closed_at: number | null;
duration: number | null;
is_remote: number | null;
}
export interface MigrationRecordRow {
version: number;
description: string;
applied_at: number;
status: 'success' | 'failed';
error_message: string | null;
}
// ============================================================================
// Mapper Functions
// ============================================================================
export function mapQueryEventRow(row: QueryEventRow): QueryEvent {
return {
id: row.id,
sessionId: row.session_id,
agentType: row.agent_type,
source: row.source,
startTime: row.start_time,
duration: row.duration,
projectPath: row.project_path ?? undefined,
tabId: row.tab_id ?? undefined,
isRemote: row.is_remote !== null ? row.is_remote === 1 : undefined,
};
}
export function mapAutoRunSessionRow(row: AutoRunSessionRow): AutoRunSession {
return {
id: row.id,
sessionId: row.session_id,
agentType: row.agent_type,
documentPath: row.document_path ?? undefined,
startTime: row.start_time,
duration: row.duration,
tasksTotal: row.tasks_total ?? undefined,
tasksCompleted: row.tasks_completed ?? undefined,
projectPath: row.project_path ?? undefined,
};
}
export function mapAutoRunTaskRow(row: AutoRunTaskRow): AutoRunTask {
return {
id: row.id,
autoRunSessionId: row.auto_run_session_id,
sessionId: row.session_id,
agentType: row.agent_type,
taskIndex: row.task_index,
taskContent: row.task_content ?? undefined,
startTime: row.start_time,
duration: row.duration,
success: row.success === 1,
};
}
export function mapSessionLifecycleRow(row: SessionLifecycleRow): SessionLifecycleEvent {
return {
id: row.id,
sessionId: row.session_id,
agentType: row.agent_type,
projectPath: row.project_path ?? undefined,
createdAt: row.created_at,
closedAt: row.closed_at ?? undefined,
duration: row.duration ?? undefined,
isRemote: row.is_remote !== null ? row.is_remote === 1 : undefined,
};
}
export function mapMigrationRecordRow(row: MigrationRecordRow): MigrationRecord {
return {
version: row.version,
description: row.description,
appliedAt: row.applied_at,
status: row.status,
errorMessage: row.error_message ?? undefined,
};
}

141
src/main/stats/schema.ts Normal file
View File

@@ -0,0 +1,141 @@
/**
* Stats Database Schema
*
* SQL definitions for all tables and indexes, plus helper utilities
* for executing multi-statement SQL strings.
*/
import type Database from 'better-sqlite3';
// ============================================================================
// Migrations Infrastructure
// ============================================================================
export const CREATE_MIGRATIONS_TABLE_SQL = `
CREATE TABLE IF NOT EXISTS _migrations (
version INTEGER PRIMARY KEY,
description TEXT NOT NULL,
applied_at INTEGER NOT NULL,
status TEXT NOT NULL CHECK(status IN ('success', 'failed')),
error_message TEXT
)
`;
// ============================================================================
// Metadata Table (for internal key-value storage like vacuum timestamps)
// ============================================================================
export const CREATE_META_TABLE_SQL = `
CREATE TABLE IF NOT EXISTS _meta (
key TEXT PRIMARY KEY,
value TEXT NOT NULL
)
`;
// ============================================================================
// Query Events (Migration v1)
// ============================================================================
export const CREATE_QUERY_EVENTS_SQL = `
CREATE TABLE IF NOT EXISTS query_events (
id TEXT PRIMARY KEY,
session_id TEXT NOT NULL,
agent_type TEXT NOT NULL,
source TEXT NOT NULL CHECK(source IN ('user', 'auto')),
start_time INTEGER NOT NULL,
duration INTEGER NOT NULL,
project_path TEXT,
tab_id TEXT
)
`;
export const CREATE_QUERY_EVENTS_INDEXES_SQL = `
CREATE INDEX IF NOT EXISTS idx_query_start_time ON query_events(start_time);
CREATE INDEX IF NOT EXISTS idx_query_agent_type ON query_events(agent_type);
CREATE INDEX IF NOT EXISTS idx_query_source ON query_events(source);
CREATE INDEX IF NOT EXISTS idx_query_session ON query_events(session_id);
CREATE INDEX IF NOT EXISTS idx_query_project_path ON query_events(project_path);
CREATE INDEX IF NOT EXISTS idx_query_agent_time ON query_events(agent_type, start_time)
`;
// ============================================================================
// Auto Run Sessions (Migration v1)
// ============================================================================
export const CREATE_AUTO_RUN_SESSIONS_SQL = `
CREATE TABLE IF NOT EXISTS auto_run_sessions (
id TEXT PRIMARY KEY,
session_id TEXT NOT NULL,
agent_type TEXT NOT NULL,
document_path TEXT,
start_time INTEGER NOT NULL,
duration INTEGER NOT NULL,
tasks_total INTEGER,
tasks_completed INTEGER,
project_path TEXT
)
`;
export const CREATE_AUTO_RUN_SESSIONS_INDEXES_SQL = `
CREATE INDEX IF NOT EXISTS idx_auto_session_start ON auto_run_sessions(start_time)
`;
// ============================================================================
// Auto Run Tasks (Migration v1)
// ============================================================================
export const CREATE_AUTO_RUN_TASKS_SQL = `
CREATE TABLE IF NOT EXISTS auto_run_tasks (
id TEXT PRIMARY KEY,
auto_run_session_id TEXT NOT NULL REFERENCES auto_run_sessions(id),
session_id TEXT NOT NULL,
agent_type TEXT NOT NULL,
task_index INTEGER NOT NULL,
task_content TEXT,
start_time INTEGER NOT NULL,
duration INTEGER NOT NULL,
success INTEGER NOT NULL CHECK(success IN (0, 1))
)
`;
export const CREATE_AUTO_RUN_TASKS_INDEXES_SQL = `
CREATE INDEX IF NOT EXISTS idx_task_auto_session ON auto_run_tasks(auto_run_session_id);
CREATE INDEX IF NOT EXISTS idx_task_start ON auto_run_tasks(start_time)
`;
// ============================================================================
// Session Lifecycle (Migration v3)
// ============================================================================
export const CREATE_SESSION_LIFECYCLE_SQL = `
CREATE TABLE IF NOT EXISTS session_lifecycle (
id TEXT PRIMARY KEY,
session_id TEXT NOT NULL UNIQUE,
agent_type TEXT NOT NULL,
project_path TEXT,
created_at INTEGER NOT NULL,
closed_at INTEGER,
duration INTEGER,
is_remote INTEGER
)
`;
export const CREATE_SESSION_LIFECYCLE_INDEXES_SQL = `
CREATE INDEX IF NOT EXISTS idx_session_created_at ON session_lifecycle(created_at);
CREATE INDEX IF NOT EXISTS idx_session_agent_type ON session_lifecycle(agent_type)
`;
// ============================================================================
// Utilities
// ============================================================================
/**
* Execute a multi-statement SQL string by splitting on semicolons.
*
* Useful for running multiple CREATE INDEX statements defined in a single string.
*/
export function runStatements(db: Database.Database, multiStatementSql: string): void {
for (const sql of multiStatementSql.split(';').filter((s) => s.trim())) {
db.prepare(sql).run();
}
}

View File

@@ -0,0 +1,105 @@
/**
* Session Lifecycle CRUD Operations
*
* Tracks when sessions are created (launched) and closed,
* enabling session duration and lifecycle analytics.
*/
import type Database from 'better-sqlite3';
import type { SessionLifecycleEvent, StatsTimeRange } from '../../shared/stats-types';
import { generateId, getTimeRangeStart, normalizePath, LOG_CONTEXT } from './utils';
import { mapSessionLifecycleRow, type SessionLifecycleRow } from './row-mappers';
import { StatementCache } from './utils';
import { logger } from '../utils/logger';
const stmtCache = new StatementCache();
const INSERT_SQL = `
INSERT INTO session_lifecycle (id, session_id, agent_type, project_path, created_at, is_remote)
VALUES (?, ?, ?, ?, ?, ?)
`;
/**
* Record a session being created (launched)
*/
export function recordSessionCreated(
db: Database.Database,
event: Omit<SessionLifecycleEvent, 'id' | 'closedAt' | 'duration'>
): string {
const id = generateId();
const stmt = stmtCache.get(db, INSERT_SQL);
stmt.run(
id,
event.sessionId,
event.agentType,
normalizePath(event.projectPath),
event.createdAt,
event.isRemote !== undefined ? (event.isRemote ? 1 : 0) : null
);
logger.debug(`Recorded session created: ${event.sessionId}`, LOG_CONTEXT);
return id;
}
/**
* Record a session being closed
*/
export function recordSessionClosed(
db: Database.Database,
sessionId: string,
closedAt: number
): boolean {
// Get the session's created_at time to calculate duration
const session = db
.prepare('SELECT created_at FROM session_lifecycle WHERE session_id = ?')
.get(sessionId) as { created_at: number } | undefined;
if (!session) {
logger.debug(`Session not found for closure: ${sessionId}`, LOG_CONTEXT);
return false;
}
const duration = closedAt - session.created_at;
const stmt = stmtCache.get(
db,
`
UPDATE session_lifecycle
SET closed_at = ?, duration = ?
WHERE session_id = ?
`
);
const result = stmt.run(closedAt, duration, sessionId);
logger.debug(`Recorded session closed: ${sessionId}, duration: ${duration}ms`, LOG_CONTEXT);
return result.changes > 0;
}
/**
* Get session lifecycle events within a time range
*/
export function getSessionLifecycleEvents(
db: Database.Database,
range: StatsTimeRange
): SessionLifecycleEvent[] {
const startTime = getTimeRangeStart(range);
const stmt = stmtCache.get(
db,
`
SELECT * FROM session_lifecycle
WHERE created_at >= ?
ORDER BY created_at DESC
`
);
const rows = stmt.all(startTime) as SessionLifecycleRow[];
return rows.map(mapSessionLifecycleRow);
}
/**
* Clear the statement cache (call when database is closed)
*/
export function clearSessionLifecycleCache(): void {
stmtCache.clear();
}

View File

@@ -0,0 +1,87 @@
/**
* Stats Database Singleton Management & Performance Metrics API
*
* Provides the global StatsDB instance and performance monitoring utilities.
*/
import { StatsDB } from './stats-db';
import { perfMetrics, LOG_CONTEXT } from './utils';
import { logger } from '../utils/logger';
// ============================================================================
// Singleton Instance
// ============================================================================
let statsDbInstance: StatsDB | null = null;
/**
* Get the singleton StatsDB instance
*/
export function getStatsDB(): StatsDB {
if (!statsDbInstance) {
statsDbInstance = new StatsDB();
}
return statsDbInstance;
}
/**
* Initialize the stats database (call on app ready)
*/
export function initializeStatsDB(): void {
const db = getStatsDB();
db.initialize();
}
/**
* Close the stats database (call on app quit)
*/
export function closeStatsDB(): void {
if (statsDbInstance) {
statsDbInstance.close();
statsDbInstance = null;
}
}
// ============================================================================
// Performance Metrics API
// ============================================================================
/**
* Enable or disable performance metrics logging for StatsDB operations.
*
* When enabled, detailed timing information is logged at debug level for:
* - Database queries (getAggregatedStats, getQueryEvents, etc.)
* - Individual SQL operations (totals, byAgent, bySource, byDay queries)
*
* Performance warnings are always logged (even when metrics are disabled)
* when operations exceed defined thresholds.
*
* @param enabled - Whether to enable performance metrics logging
*/
export function setPerformanceLoggingEnabled(enabled: boolean): void {
perfMetrics.setEnabled(enabled);
logger.info(`Performance metrics logging ${enabled ? 'enabled' : 'disabled'}`, LOG_CONTEXT);
}
/**
* Check if performance metrics logging is currently enabled.
*/
export function isPerformanceLoggingEnabled(): boolean {
return perfMetrics.isEnabled();
}
/**
* Get collected performance metrics for analysis.
*
* Returns the last 100 recorded metrics (when enabled).
*/
export function getPerformanceMetrics() {
return perfMetrics.getMetrics();
}
/**
* Clear collected performance metrics.
*/
export function clearPerformanceMetrics(): void {
perfMetrics.clearMetrics();
}

543
src/main/stats/stats-db.ts Normal file
View File

@@ -0,0 +1,543 @@
/**
* Stats Database Core Class
*
* Manages the SQLite database lifecycle: initialization, integrity checks,
* corruption recovery, VACUUM scheduling, and connection management.
*
* CRUD operations are delegated to focused modules (query-events, auto-run,
* session-lifecycle, aggregations, data-management).
*/
import Database from 'better-sqlite3';
import * as path from 'path';
import * as fs from 'fs';
import { app } from 'electron';
import { logger } from '../utils/logger';
import type {
QueryEvent,
AutoRunSession,
AutoRunTask,
SessionLifecycleEvent,
StatsTimeRange,
StatsFilters,
StatsAggregation,
} from '../../shared/stats-types';
import type {
IntegrityCheckResult,
BackupResult,
CorruptionRecoveryResult,
MigrationRecord,
} from './types';
import { LOG_CONTEXT } from './utils';
import { CREATE_META_TABLE_SQL } from './schema';
import {
runMigrations,
getMigrationHistory,
getCurrentVersion,
getTargetVersion,
hasPendingMigrations,
} from './migrations';
import { insertQueryEvent, getQueryEvents, clearQueryEventCache } from './query-events';
import {
insertAutoRunSession,
updateAutoRunSession,
getAutoRunSessions,
insertAutoRunTask,
getAutoRunTasks,
clearAutoRunCache,
} from './auto-run';
import {
recordSessionCreated,
recordSessionClosed,
getSessionLifecycleEvents,
clearSessionLifecycleCache,
} from './session-lifecycle';
import { getAggregatedStats } from './aggregations';
import { clearOldData, exportToCsv } from './data-management';
/**
* StatsDB manages the SQLite database for usage statistics.
*/
export class StatsDB {
private db: Database.Database | null = null;
private dbPath: string;
private initialized = false;
constructor() {
this.dbPath = path.join(app.getPath('userData'), 'stats.db');
}
// ============================================================================
// Database Accessor
// ============================================================================
/**
* Get the underlying database handle, throwing if not initialized.
* Replaces the repeated `if (!this.db) throw` guard clauses.
*/
get database(): Database.Database {
if (!this.db) throw new Error('Database not initialized');
return this.db;
}
// ============================================================================
// Lifecycle
// ============================================================================
/**
* Initialize the database - create file, tables, and indexes.
*
* If the database is corrupted, this method will:
* 1. Backup the corrupted database file
* 2. Delete the corrupted file and any associated WAL/SHM files
* 3. Create a fresh database
*/
initialize(): void {
if (this.initialized) {
return;
}
try {
const dir = path.dirname(this.dbPath);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
const dbExists = fs.existsSync(this.dbPath);
if (dbExists) {
const db = this.openWithCorruptionHandling();
if (!db) {
throw new Error('Failed to open or recover database');
}
this.db = db;
} else {
this.db = new Database(this.dbPath);
}
// Enable WAL mode for better concurrent access
this.db.pragma('journal_mode = WAL');
// Create the _meta table for internal key-value storage
this.db.prepare(CREATE_META_TABLE_SQL).run();
// Run migrations
runMigrations(this.db);
this.initialized = true;
logger.info(`Stats database initialized at ${this.dbPath}`, LOG_CONTEXT);
// Schedule VACUUM to run weekly instead of on every startup
this.vacuumIfNeededWeekly();
} catch (error) {
logger.error(`Failed to initialize stats database: ${error}`, LOG_CONTEXT);
throw error;
}
}
/**
* Close the database connection
*/
close(): void {
if (this.db) {
this.db.close();
this.db = null;
this.initialized = false;
// Clear all statement caches
clearQueryEventCache();
clearAutoRunCache();
clearSessionLifecycleCache();
logger.info('Stats database closed', LOG_CONTEXT);
}
}
/**
* Check if database is initialized and ready
*/
isReady(): boolean {
return this.initialized && this.db !== null;
}
/**
* Get the database file path
*/
getDbPath(): string {
return this.dbPath;
}
/**
* Get the database file size in bytes.
*/
getDatabaseSize(): number {
try {
const stats = fs.statSync(this.dbPath);
return stats.size;
} catch {
return 0;
}
}
// ============================================================================
// VACUUM
// ============================================================================
/**
* Run VACUUM on the database to reclaim unused space and optimize structure.
*/
vacuum(): { success: boolean; bytesFreed: number; error?: string } {
if (!this.db) {
return { success: false, bytesFreed: 0, error: 'Database not initialized' };
}
try {
const sizeBefore = this.getDatabaseSize();
logger.info(
`Starting VACUUM (current size: ${(sizeBefore / 1024 / 1024).toFixed(2)} MB)`,
LOG_CONTEXT
);
this.db.prepare('VACUUM').run();
const sizeAfter = this.getDatabaseSize();
const bytesFreed = sizeBefore - sizeAfter;
logger.info(
`VACUUM completed: ${(sizeBefore / 1024 / 1024).toFixed(2)} MB -> ${(sizeAfter / 1024 / 1024).toFixed(2)} MB (freed ${(bytesFreed / 1024 / 1024).toFixed(2)} MB)`,
LOG_CONTEXT
);
return { success: true, bytesFreed };
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error(`VACUUM failed: ${errorMessage}`, LOG_CONTEXT);
return { success: false, bytesFreed: 0, error: errorMessage };
}
}
/**
* Conditionally vacuum the database if it exceeds a size threshold.
*
* @param thresholdBytes - Size threshold in bytes (default: 100MB)
*/
vacuumIfNeeded(thresholdBytes: number = 100 * 1024 * 1024): {
vacuumed: boolean;
databaseSize: number;
result?: { success: boolean; bytesFreed: number; error?: string };
} {
const databaseSize = this.getDatabaseSize();
if (databaseSize < thresholdBytes) {
logger.debug(
`Database size (${(databaseSize / 1024 / 1024).toFixed(2)} MB) below vacuum threshold (${(thresholdBytes / 1024 / 1024).toFixed(2)} MB), skipping VACUUM`,
LOG_CONTEXT
);
return { vacuumed: false, databaseSize };
}
logger.info(
`Database size (${(databaseSize / 1024 / 1024).toFixed(2)} MB) exceeds vacuum threshold (${(thresholdBytes / 1024 / 1024).toFixed(2)} MB), running VACUUM`,
LOG_CONTEXT
);
const result = this.vacuum();
return { vacuumed: true, databaseSize, result };
}
/**
* Run VACUUM only if it hasn't been run in the last 7 days.
*
* Stores the last vacuum timestamp in the database's _meta table
* instead of an external file.
*
* @param intervalMs - Minimum time between vacuums (default: 7 days)
*/
private vacuumIfNeededWeekly(intervalMs: number = 7 * 24 * 60 * 60 * 1000): void {
try {
// Read last vacuum timestamp from _meta table
const row = this.database
.prepare("SELECT value FROM _meta WHERE key = 'last_vacuum_at'")
.get() as { value: string } | undefined;
const lastVacuum = row ? parseInt(row.value, 10) || 0 : 0;
const now = Date.now();
const timeSinceLastVacuum = now - lastVacuum;
if (timeSinceLastVacuum < intervalMs) {
const daysRemaining = ((intervalMs - timeSinceLastVacuum) / (24 * 60 * 60 * 1000)).toFixed(
1
);
logger.debug(
`Skipping VACUUM (last run ${((now - lastVacuum) / (24 * 60 * 60 * 1000)).toFixed(1)} days ago, next in ${daysRemaining} days)`,
LOG_CONTEXT
);
return;
}
// Run VACUUM if database is large enough
const result = this.vacuumIfNeeded();
if (result.vacuumed) {
// Update timestamp in _meta table
this.database
.prepare("INSERT OR REPLACE INTO _meta (key, value) VALUES ('last_vacuum_at', ?)")
.run(String(now));
logger.info('Updated VACUUM timestamp in _meta table', LOG_CONTEXT);
}
} catch (error) {
// Non-fatal - log and continue
logger.warn(`Failed to check/update VACUUM schedule: ${error}`, LOG_CONTEXT);
}
}
// ============================================================================
// Integrity & Corruption Handling
// ============================================================================
/**
* Check the integrity of the database using SQLite's PRAGMA integrity_check.
*/
checkIntegrity(): IntegrityCheckResult {
if (!this.db) {
return { ok: false, errors: ['Database not initialized'] };
}
try {
const result = this.db.pragma('integrity_check') as Array<{ integrity_check: string }>;
if (result.length === 1 && result[0].integrity_check === 'ok') {
return { ok: true, errors: [] };
}
const errors = result.map((row) => row.integrity_check);
return { ok: false, errors };
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
return { ok: false, errors: [errorMessage] };
}
}
/**
* Create a backup of the current database file.
*/
backupDatabase(): BackupResult {
try {
if (!fs.existsSync(this.dbPath)) {
return { success: false, error: 'Database file does not exist' };
}
const timestamp = Date.now();
const backupPath = `${this.dbPath}.backup.${timestamp}`;
fs.copyFileSync(this.dbPath, backupPath);
logger.info(`Created database backup at ${backupPath}`, LOG_CONTEXT);
return { success: true, backupPath };
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error(`Failed to create database backup: ${errorMessage}`, LOG_CONTEXT);
return { success: false, error: errorMessage };
}
}
/**
* Handle a corrupted database by backing it up and recreating a fresh database.
*/
private recoverFromCorruption(): CorruptionRecoveryResult {
logger.warn('Attempting to recover from database corruption...', LOG_CONTEXT);
try {
if (this.db) {
try {
this.db.close();
} catch {
// Ignore errors closing corrupted database
}
this.db = null;
this.initialized = false;
}
const backupResult = this.backupDatabase();
if (!backupResult.success) {
if (fs.existsSync(this.dbPath)) {
const timestamp = Date.now();
const emergencyBackupPath = `${this.dbPath}.corrupted.${timestamp}`;
try {
fs.renameSync(this.dbPath, emergencyBackupPath);
logger.warn(`Emergency backup created at ${emergencyBackupPath}`, LOG_CONTEXT);
} catch {
logger.error('Failed to backup corrupted database, data will be lost', LOG_CONTEXT);
fs.unlinkSync(this.dbPath);
}
}
}
// Delete WAL and SHM files
const walPath = `${this.dbPath}-wal`;
const shmPath = `${this.dbPath}-shm`;
if (fs.existsSync(walPath)) {
fs.unlinkSync(walPath);
}
if (fs.existsSync(shmPath)) {
fs.unlinkSync(shmPath);
}
if (fs.existsSync(this.dbPath)) {
fs.unlinkSync(this.dbPath);
}
logger.info('Corrupted database removed, will create fresh database', LOG_CONTEXT);
return {
recovered: true,
backupPath: backupResult.backupPath,
};
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
logger.error(`Failed to recover from database corruption: ${errorMessage}`, LOG_CONTEXT);
return {
recovered: false,
error: errorMessage,
};
}
}
/**
* Attempt to open and validate a database, handling corruption if detected.
*/
private openWithCorruptionHandling(): Database.Database | null {
try {
const db = new Database(this.dbPath);
const result = db.pragma('integrity_check') as Array<{ integrity_check: string }>;
if (result.length === 1 && result[0].integrity_check === 'ok') {
return db;
}
const errors = result.map((row) => row.integrity_check);
logger.error(`Database integrity check failed: ${errors.join(', ')}`, LOG_CONTEXT);
db.close();
} catch (error) {
logger.error(`Failed to open database: ${error}`, LOG_CONTEXT);
}
const recoveryResult = this.recoverFromCorruption();
if (!recoveryResult.recovered) {
logger.error('Database corruption recovery failed', LOG_CONTEXT);
return null;
}
try {
const db = new Database(this.dbPath);
logger.info('Fresh database created after corruption recovery', LOG_CONTEXT);
return db;
} catch (error) {
logger.error(`Failed to create fresh database after recovery: ${error}`, LOG_CONTEXT);
return null;
}
}
// ============================================================================
// Migration Queries (delegated)
// ============================================================================
getMigrationHistory(): MigrationRecord[] {
return getMigrationHistory(this.database);
}
getCurrentVersion(): number {
return getCurrentVersion(this.database);
}
getTargetVersion(): number {
return getTargetVersion();
}
hasPendingMigrations(): boolean {
return hasPendingMigrations(this.database);
}
// ============================================================================
// Query Events (delegated)
// ============================================================================
insertQueryEvent(event: Omit<QueryEvent, 'id'>): string {
return insertQueryEvent(this.database, event);
}
getQueryEvents(range: StatsTimeRange, filters?: StatsFilters): QueryEvent[] {
return getQueryEvents(this.database, range, filters);
}
// ============================================================================
// Auto Run (delegated)
// ============================================================================
insertAutoRunSession(session: Omit<AutoRunSession, 'id'>): string {
return insertAutoRunSession(this.database, session);
}
updateAutoRunSession(id: string, updates: Partial<AutoRunSession>): boolean {
return updateAutoRunSession(this.database, id, updates);
}
getAutoRunSessions(range: StatsTimeRange): AutoRunSession[] {
return getAutoRunSessions(this.database, range);
}
insertAutoRunTask(task: Omit<AutoRunTask, 'id'>): string {
return insertAutoRunTask(this.database, task);
}
getAutoRunTasks(autoRunSessionId: string): AutoRunTask[] {
return getAutoRunTasks(this.database, autoRunSessionId);
}
// ============================================================================
// Session Lifecycle (delegated)
// ============================================================================
recordSessionCreated(event: Omit<SessionLifecycleEvent, 'id' | 'closedAt' | 'duration'>): string {
return recordSessionCreated(this.database, event);
}
recordSessionClosed(sessionId: string, closedAt: number): boolean {
return recordSessionClosed(this.database, sessionId, closedAt);
}
getSessionLifecycleEvents(range: StatsTimeRange): SessionLifecycleEvent[] {
return getSessionLifecycleEvents(this.database, range);
}
// ============================================================================
// Aggregations (delegated)
// ============================================================================
getAggregatedStats(range: StatsTimeRange): StatsAggregation {
return getAggregatedStats(this.database, range);
}
// ============================================================================
// Data Management (delegated)
// ============================================================================
clearOldData(olderThanDays: number) {
if (!this.db) {
return {
success: false,
deletedQueryEvents: 0,
deletedAutoRunSessions: 0,
deletedAutoRunTasks: 0,
deletedSessionLifecycle: 0,
error: 'Database not initialized',
};
}
return clearOldData(this.database, olderThanDays);
}
exportToCsv(range: StatsTimeRange): string {
return exportToCsv(this.database, range);
}
}

65
src/main/stats/types.ts Normal file
View File

@@ -0,0 +1,65 @@
/**
* Stats Database Internal Types
*
* These types are specific to the stats database implementation.
* Shared types (QueryEvent, AutoRunSession, etc.) remain in src/shared/stats-types.ts.
*/
import type Database from 'better-sqlite3';
/**
* Result of a database integrity check
*/
export interface IntegrityCheckResult {
/** Whether the database passed the integrity check */
ok: boolean;
/** Error messages from the integrity check (empty if ok is true) */
errors: string[];
}
/**
* Result of a database backup operation
*/
export interface BackupResult {
/** Whether the backup succeeded */
success: boolean;
/** Path to the backup file (if success is true) */
backupPath?: string;
/** Error message (if success is false) */
error?: string;
}
/**
* Result of corruption recovery
*/
export interface CorruptionRecoveryResult {
/** Whether recovery was performed */
recovered: boolean;
/** Path to the backup of the corrupted database */
backupPath?: string;
/** Error during recovery (if any) */
error?: string;
}
/**
* Represents a single database migration
*/
export interface Migration {
/** Version number (must be sequential starting from 1) */
version: number;
/** Human-readable description of the migration */
description: string;
/** Function to apply the migration */
up: (db: Database.Database) => void;
}
/**
* Record of an applied migration stored in the migrations table
*/
export interface MigrationRecord {
version: number;
description: string;
appliedAt: number;
status: 'success' | 'failed';
errorMessage?: string;
}

97
src/main/stats/utils.ts Normal file
View File

@@ -0,0 +1,97 @@
/**
* Stats Database Utilities
*
* Shared helper functions and constants used across the stats module.
*/
import type Database from 'better-sqlite3';
import { logger } from '../utils/logger';
import { PerformanceMetrics } from '../../shared/performance-metrics';
import type { StatsTimeRange } from '../../shared/stats-types';
export const LOG_CONTEXT = '[StatsDB]';
/**
* Performance metrics logger for StatsDB operations.
*
* Disabled by default - enable via setPerformanceLoggingEnabled(true).
* Logs at debug level through the main process logger.
*/
export const perfMetrics = new PerformanceMetrics(
'StatsDB',
(message, context) => logger.debug(message, context ?? LOG_CONTEXT),
false // Disabled by default - enable for debugging
);
/**
* Generate a unique ID for database entries
*/
export function generateId(): string {
return `${Date.now()}-${Math.random().toString(36).substring(2, 11)}`;
}
/**
* Get timestamp for start of time range
*/
export function getTimeRangeStart(range: StatsTimeRange): number {
const now = Date.now();
const day = 24 * 60 * 60 * 1000;
switch (range) {
case 'day':
return now - day;
case 'week':
return now - 7 * day;
case 'month':
return now - 30 * day;
case 'year':
return now - 365 * day;
case 'all':
return 0;
}
}
/**
* Normalize file paths to use forward slashes consistently across platforms.
*
* This ensures that paths stored in the database use a consistent format
* regardless of the operating system, enabling cross-platform data portability
* and consistent filtering by project path.
*
* - Converts Windows-style backslashes to forward slashes
* - Preserves UNC paths (\\server\share -> //server/share)
* - Handles null/undefined by returning null
*
* @param filePath - The file path to normalize (may be Windows or Unix style)
* @returns The normalized path with forward slashes, or null if input is null/undefined
*/
export function normalizePath(filePath: string | null | undefined): string | null {
if (filePath == null) {
return null;
}
// Replace all backslashes with forward slashes
return filePath.replace(/\\/g, '/');
}
/**
* Cache for prepared SQL statements.
*
* Eliminates repeated `db.prepare()` overhead for frequently executed queries.
* Each cache instance should be cleared when the database connection is closed.
*/
export class StatementCache {
private cache = new Map<string, Database.Statement>();
get(db: Database.Database, sql: string): Database.Statement {
let stmt = this.cache.get(sql);
if (!stmt) {
stmt = db.prepare(sql);
this.cache.set(sql, stmt);
}
return stmt;
}
clear(): void {
this.cache.clear();
}
}

View File

@@ -1,7 +1,7 @@
/**
* Type definitions for the stats tracking system
*
* These types are shared between main process (stats-db.ts) and renderer (dashboard).
* These types are shared between main process (stats/) and renderer (dashboard).
*/
/**