summaryrefslogtreecommitdiff
path: root/packages/server/src
diff options
context:
space:
mode:
authorTaylor Mullen <[email protected]>2025-05-26 15:59:19 -0700
committerN. Taylor Mullen <[email protected]>2025-05-26 16:13:37 -0700
commitb3f52e215a98835b752bd3ba3ec08ae44e480eda (patch)
tree2d32179018257d0501bc4eaa4ff3b9a209f13d87 /packages/server/src
parent9e1cfca53f4e71697f883c17d71bfcae86f01408 (diff)
feat: Replace SQLite with JSON logging for macOS sandbox compatibility
- Removes the sqlite3 dependency and refactors the logging mechanism to use a JSON file (logs.json) instead of a database. - This change is a temporary workaround to address issues with macOS sandboxing that were caused by the SQLite native module. - Storing all logs in a single JSON file may introduce scalability concerns in the future. Fixes https://github.com/google-gemini/gemini-cli/issues/522
Diffstat (limited to 'packages/server/src')
-rw-r--r--packages/server/src/core/logger.test.ts503
-rw-r--r--packages/server/src/core/logger.ts290
2 files changed, 568 insertions, 225 deletions
diff --git a/packages/server/src/core/logger.test.ts b/packages/server/src/core/logger.test.ts
index 9b4f4555..2663a6be 100644
--- a/packages/server/src/core/logger.test.ts
+++ b/packages/server/src/core/logger.test.ts
@@ -4,194 +4,429 @@
* SPDX-License-Identifier: Apache-2.0
*/
-import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
-import { Logger, MessageSenderType } from './logger.js';
+import {
+ describe,
+ it,
+ expect,
+ vi,
+ beforeEach,
+ afterEach,
+ afterAll,
+} from 'vitest';
+import { Logger, MessageSenderType, LogEntry } from './logger.js';
+import { promises as fs } from 'node:fs';
+import path from 'node:path';
-// Mocks
-const mockDb = {
- exec: vi.fn((_sql, callback) => callback?.(null)),
- all: vi.fn((_sql, _params, callback) => callback?.(null, [])),
- run: vi.fn((_sql, _params, callback) => callback?.(null)),
- close: vi.fn((callback) => callback?.(null)),
-};
+const GEMINI_DIR = '.gemini';
+const LOG_FILE_NAME = 'logs.json';
+const TEST_LOG_FILE_PATH = path.join(process.cwd(), GEMINI_DIR, LOG_FILE_NAME);
-vi.mock('sqlite3', () => ({
- Database: vi.fn((_dbPath, _options, callback) => {
- process.nextTick(() => callback?.(null));
- return mockDb;
- }),
- default: {
- Database: vi.fn((_dbPath, _options, callback) => {
- process.nextTick(() => callback?.(null));
- return mockDb;
- }),
- },
-}));
+async function cleanupLogFile() {
+ try {
+ await fs.unlink(TEST_LOG_FILE_PATH);
+ } catch (error) {
+ if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
+ // Other errors during unlink are ignored for cleanup purposes
+ }
+ }
+ try {
+ const geminiDirPath = path.join(process.cwd(), GEMINI_DIR);
+ const dirContents = await fs.readdir(geminiDirPath);
+ for (const file of dirContents) {
+ if (file.startsWith(LOG_FILE_NAME + '.') && file.endsWith('.bak')) {
+ try {
+ await fs.unlink(path.join(geminiDirPath, file));
+ } catch (_e) {
+ /* ignore */
+ }
+ }
+ }
+ } catch (error) {
+ if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
+ /* ignore if .gemini dir itself is missing */
+ }
+ }
+}
+
+async function readLogFile(): Promise<LogEntry[]> {
+ try {
+ const content = await fs.readFile(TEST_LOG_FILE_PATH, 'utf-8');
+ return JSON.parse(content) as LogEntry[];
+ } catch (error) {
+ if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
+ return [];
+ }
+ throw error;
+ }
+}
describe('Logger', () => {
let logger: Logger;
beforeEach(async () => {
- vi.resetAllMocks();
-
- // Get a new instance for each test to ensure isolation,
+ vi.useFakeTimers();
+ vi.setSystemTime(new Date('2025-01-01T12:00:00.000Z'));
+ await cleanupLogFile();
logger = new Logger();
- // We need to wait for the async initialize to complete
- await logger.initialize().catch((err) => {
- console.error('Error initializing logger:', err);
- });
+ // Initialize is usually called here, but some tests initialize their own instances.
+ // For tests that use the global `logger`, it will be initialized here.
+ await logger.initialize();
+ });
+
+ afterEach(async () => {
+ logger.close();
+ await cleanupLogFile();
+ vi.useRealTimers();
+ vi.resetAllMocks(); // Ensure mocks are reset for every test
});
- afterEach(() => {
- vi.restoreAllMocks();
- logger.close(); // Close the database connection after each test
+ afterAll(async () => {
+ await cleanupLogFile();
});
describe('initialize', () => {
- it('should execute create tables if not exists', async () => {
- expect(mockDb.exec).toHaveBeenCalledWith(
- expect.stringMatching(/CREATE TABLE IF NOT EXISTS messages/),
- expect.any(Function),
- );
+ it('should create .gemini directory and an empty log file if none exist', async () => {
+ await cleanupLogFile();
+ const geminiDirPath = path.join(process.cwd(), GEMINI_DIR);
+ try {
+ await fs.rm(geminiDirPath, { recursive: true, force: true });
+ } catch (_e) {
+ /* ignore */
+ }
+
+ const newLogger = new Logger();
+ await newLogger.initialize();
+
+ const dirExists = await fs
+ .access(geminiDirPath)
+ .then(() => true)
+ .catch(() => false);
+ expect(dirExists).toBe(true);
+ const fileExists = await fs
+ .access(TEST_LOG_FILE_PATH)
+ .then(() => true)
+ .catch(() => false);
+ expect(fileExists).toBe(true);
+ const logContent = await readLogFile();
+ expect(logContent).toEqual([]);
+ newLogger.close();
});
- it('should be idempotent', async () => {
- mockDb.exec.mockClear();
+ it('should load existing logs and set correct messageId for the current session', async () => {
+ const fixedTime = new Date('2025-01-01T10:00:00.000Z');
+ vi.setSystemTime(fixedTime);
+ const currentSessionId = Math.floor(fixedTime.getTime() / 1000);
+ const existingLogs: LogEntry[] = [
+ {
+ sessionId: currentSessionId,
+ messageId: 0,
+ timestamp: new Date('2025-01-01T10:00:05.000Z').toISOString(),
+ type: MessageSenderType.USER,
+ message: 'Msg1',
+ },
+ {
+ sessionId: currentSessionId - 100,
+ messageId: 5,
+ timestamp: new Date('2025-01-01T09:00:00.000Z').toISOString(),
+ type: MessageSenderType.USER,
+ message: 'OldMsg',
+ },
+ {
+ sessionId: currentSessionId,
+ messageId: 1,
+ timestamp: new Date('2025-01-01T10:00:10.000Z').toISOString(),
+ type: MessageSenderType.USER,
+ message: 'Msg2',
+ },
+ ];
+ await fs.mkdir(path.join(process.cwd(), GEMINI_DIR), { recursive: true });
+ await fs.writeFile(TEST_LOG_FILE_PATH, JSON.stringify(existingLogs));
+ const newLogger = new Logger();
+ await newLogger.initialize();
+ expect(newLogger['messageId']).toBe(2);
+ expect(newLogger['logs']).toEqual(existingLogs);
+ newLogger.close();
+ });
- await logger.initialize(); // Second call
+ it('should set messageId to 0 for a new session if log file exists but has no logs for current session', async () => {
+ const fixedTime = new Date('2025-01-01T14:00:00.000Z');
+ vi.setSystemTime(fixedTime);
+ const existingLogs: LogEntry[] = [
+ {
+ sessionId: Math.floor(fixedTime.getTime() / 1000) - 500,
+ messageId: 5,
+ timestamp: new Date().toISOString(),
+ type: MessageSenderType.USER,
+ message: 'OldMsg',
+ },
+ ];
+ await fs.mkdir(path.join(process.cwd(), GEMINI_DIR), { recursive: true });
+ await fs.writeFile(TEST_LOG_FILE_PATH, JSON.stringify(existingLogs));
+ const newLogger = new Logger();
+ await newLogger.initialize();
+ expect(newLogger['messageId']).toBe(0);
+ newLogger.close();
+ });
- expect(mockDb.exec).not.toHaveBeenCalled();
+ it('should be idempotent', async () => {
+ // logger is initialized in beforeEach
+ await logger.logMessage(MessageSenderType.USER, 'test message');
+ const initialMessageId = logger['messageId'];
+ const initialLogCount = logger['logs'].length;
+ await logger.initialize(); // Second call should not change state
+ expect(logger['messageId']).toBe(initialMessageId);
+ expect(logger['logs'].length).toBe(initialLogCount);
+ const logsFromFile = await readLogFile();
+ expect(logsFromFile.length).toBe(1);
});
- });
- describe('logMessage', () => {
- it('should insert a message into the database', async () => {
- const type = MessageSenderType.USER;
- const message = 'Hello, world!';
- await logger.logMessage(type, message);
- expect(mockDb.run).toHaveBeenCalledWith(
- "INSERT INTO messages (session_id, message_id, type, message, timestamp) VALUES (?, ?, ?, ?, datetime('now'))",
- [expect.any(Number), 0, type, message], // sessionId, messageId, type, message
- expect.any(Function),
+ it('should handle invalid JSON in log file by backing it up and starting fresh', async () => {
+ await fs.mkdir(path.join(process.cwd(), GEMINI_DIR), { recursive: true });
+ await fs.writeFile(TEST_LOG_FILE_PATH, 'invalid json');
+ const consoleDebugSpy = vi
+ .spyOn(console, 'debug')
+ .mockImplementation(() => {});
+ const newLogger = new Logger();
+ await newLogger.initialize();
+ expect(consoleDebugSpy).toHaveBeenCalledWith(
+ expect.stringContaining('Invalid JSON in log file'),
+ expect.any(SyntaxError),
);
+ const logContent = await readLogFile();
+ expect(logContent).toEqual([]);
+ const dirContents = await fs.readdir(
+ path.join(process.cwd(), GEMINI_DIR),
+ );
+ expect(
+ dirContents.some(
+ (f) =>
+ f.startsWith(LOG_FILE_NAME + '.invalid_json') && f.endsWith('.bak'),
+ ),
+ ).toBe(true);
+ consoleDebugSpy.mockRestore();
+ newLogger.close();
});
- it('should increment messageId for subsequent messages', async () => {
- await logger.logMessage(MessageSenderType.USER, 'First message');
- expect(mockDb.run).toHaveBeenCalledWith(
- expect.any(String),
- [expect.any(Number), 0, MessageSenderType.USER, 'First message'],
- expect.any(Function),
+ it('should handle non-array JSON in log file by backing it up and starting fresh', async () => {
+ await fs.mkdir(path.join(process.cwd(), GEMINI_DIR), { recursive: true });
+ await fs.writeFile(
+ TEST_LOG_FILE_PATH,
+ JSON.stringify({ not: 'an array' }),
);
- await logger.logMessage(MessageSenderType.USER, 'Second message');
- expect(mockDb.run).toHaveBeenCalledWith(
- expect.any(String),
- [expect.any(Number), 1, MessageSenderType.USER, 'Second message'], // messageId is now 1
- expect.any(Function),
+ const consoleDebugSpy = vi
+ .spyOn(console, 'debug')
+ .mockImplementation(() => {});
+ const newLogger = new Logger();
+ await newLogger.initialize();
+ expect(consoleDebugSpy).toHaveBeenCalledWith(
+ `Log file at ${TEST_LOG_FILE_PATH} is not a valid JSON array. Starting with empty logs.`,
);
+ const logContent = await readLogFile();
+ expect(logContent).toEqual([]);
+ const dirContents = await fs.readdir(
+ path.join(process.cwd(), GEMINI_DIR),
+ );
+ expect(
+ dirContents.some(
+ (f) =>
+ f.startsWith(LOG_FILE_NAME + '.malformed_array') &&
+ f.endsWith('.bak'),
+ ),
+ ).toBe(true);
+ consoleDebugSpy.mockRestore();
+ newLogger.close();
+ });
+ });
+
+ describe('logMessage', () => {
+ it('should append a message to the log file and update in-memory logs', async () => {
+ await logger.logMessage(MessageSenderType.USER, 'Hello, world!');
+ const logsFromFile = await readLogFile();
+ expect(logsFromFile.length).toBe(1);
+ expect(logsFromFile[0]).toMatchObject({
+ sessionId: logger['sessionId'],
+ messageId: 0,
+ type: MessageSenderType.USER,
+ message: 'Hello, world!',
+ timestamp: new Date('2025-01-01T12:00:00.000Z').toISOString(),
+ });
+ expect(logger['logs'].length).toBe(1);
+ expect(logger['logs'][0]).toEqual(logsFromFile[0]);
+ expect(logger['messageId']).toBe(1);
+ });
+
+ it('should correctly increment messageId for subsequent messages in the same session', async () => {
+ await logger.logMessage(MessageSenderType.USER, 'First');
+ vi.advanceTimersByTime(1000);
+ await logger.logMessage(MessageSenderType.USER, 'Second');
+ const logs = await readLogFile();
+ expect(logs.length).toBe(2);
+ expect(logs[0].messageId).toBe(0);
+ expect(logs[1].messageId).toBe(1);
+ expect(logs[1].timestamp).not.toBe(logs[0].timestamp);
+ expect(logger['messageId']).toBe(2);
});
- it('should handle database not initialized', async () => {
+ it('should handle logger not initialized', async () => {
const uninitializedLogger = new Logger();
- // uninitializedLogger.initialize() is not called
- const consoleErrorSpy = vi
- .spyOn(console, 'error')
+ const consoleDebugSpy = vi
+ .spyOn(console, 'debug')
.mockImplementation(() => {});
-
await uninitializedLogger.logMessage(MessageSenderType.USER, 'test');
-
- expect(consoleErrorSpy).toHaveBeenCalledWith('Database not initialized.');
- expect(mockDb.run).not.toHaveBeenCalled();
- consoleErrorSpy.mockRestore();
+ expect(consoleDebugSpy).toHaveBeenCalledWith(
+ 'Logger not initialized or session ID missing. Cannot log message.',
+ );
+ expect((await readLogFile()).length).toBe(0);
+ consoleDebugSpy.mockRestore();
+ uninitializedLogger.close();
});
- it('should handle error during db.run', async () => {
- const error = new Error('db.run failed');
- mockDb.run.mockImplementationOnce(
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- (_sql: any, _params: any, callback: any) => callback?.(error),
- );
+ it('should simulate concurrent writes from different logger instances to the same file', async () => {
+ const logger1 = new Logger(); // logger1
+ vi.setSystemTime(new Date('2025-01-01T13:00:00.000Z'));
+ await logger1.initialize();
+ const s1 = logger1['sessionId'];
- await expect(
- logger.logMessage(MessageSenderType.USER, 'test'),
- ).rejects.toThrow('db.run failed');
- });
- });
+ const logger2 = new Logger(); // logger2, will share same session if time is same
+ vi.setSystemTime(new Date('2025-01-01T13:00:00.000Z'));
+ await logger2.initialize();
+ expect(logger2['sessionId']).toEqual(s1);
- describe('getPreviousUserMessages', () => {
- it('should query the database for messages', async () => {
- mockDb.all.mockImplementationOnce(
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- (_sql: any, params: any, callback: any) =>
- callback?.(null, [{ message: 'msg1' }, { message: 'msg2' }]),
- );
+ // Log from logger1
+ await logger1.logMessage(MessageSenderType.USER, 'L1M1'); // L1 internal msgId becomes 1, writes {s1, 0}
+ vi.advanceTimersByTime(10);
- const messages = await logger.getPreviousUserMessages();
+ // Log from logger2. It reads file (sees {s1,0}), its internal msgId for s1 is 1.
+ await logger2.logMessage(MessageSenderType.USER, 'L2M1'); // L2 internal msgId becomes 2, writes {s1, 1}
+ vi.advanceTimersByTime(10);
- expect(mockDb.all).toHaveBeenCalledWith(
- expect.stringMatching(/SELECT message FROM messages/),
- [],
- expect.any(Function),
- );
- expect(messages).toEqual(['msg1', 'msg2']);
+ // Log from logger1. It reads file (sees {s1,0}, {s1,1}), its internal msgId for s1 is 2.
+ await logger1.logMessage(MessageSenderType.USER, 'L1M2'); // L1 internal msgId becomes 3, writes {s1, 2}
+ vi.advanceTimersByTime(10);
+
+ // Log from logger2. It reads file (sees {s1,0}, {s1,1}, {s1,2}), its internal msgId for s1 is 3.
+ await logger2.logMessage(MessageSenderType.USER, 'L2M2'); // L2 internal msgId becomes 4, writes {s1, 3}
+
+ const logsFromFile = await readLogFile();
+ expect(logsFromFile.length).toBe(4);
+ const messageIdsInFile = logsFromFile
+ .map((log) => log.messageId)
+ .sort((a, b) => a - b);
+ expect(messageIdsInFile).toEqual([0, 1, 2, 3]);
+
+ const messagesInFile = logsFromFile
+ .sort((a, b) => a.messageId - b.messageId)
+ .map((l) => l.message);
+ expect(messagesInFile).toEqual(['L1M1', 'L2M1', 'L1M2', 'L2M2']);
+
+ // Check internal state (next messageId each logger would use for that session)
+ expect(logger1['messageId']).toBe(3); // L1 wrote 0, then 2. Next is 3.
+ expect(logger2['messageId']).toBe(4); // L2 wrote 1, then 3. Next is 4.
+
+ logger1.close();
+ logger2.close();
});
- it('should handle database not initialized', async () => {
- const uninitializedLogger = new Logger();
- // uninitializedLogger.initialize() is not called
- const consoleErrorSpy = vi
- .spyOn(console, 'error')
+ it('should not throw, not increment messageId, and log error if writing to file fails', async () => {
+ const writeFileSpy = vi
+ .spyOn(fs, 'writeFile')
+ .mockRejectedValueOnce(new Error('Disk full'));
+ const consoleDebugSpy = vi
+ .spyOn(console, 'debug')
.mockImplementation(() => {});
+ const initialMessageId = logger['messageId'];
+ const initialLogCount = logger['logs'].length;
- const messages = await uninitializedLogger.getPreviousUserMessages();
+ await logger.logMessage(MessageSenderType.USER, 'test fail write');
- expect(consoleErrorSpy).toHaveBeenCalledWith('Database not initialized.');
- expect(messages).toEqual([]);
- expect(mockDb.all).not.toHaveBeenCalled();
- consoleErrorSpy.mockRestore();
+ expect(consoleDebugSpy).toHaveBeenCalledWith(
+ 'Error writing to log file:',
+ expect.any(Error),
+ );
+ expect(logger['messageId']).toBe(initialMessageId); // Not incremented
+ expect(logger['logs'].length).toBe(initialLogCount); // Log not added to in-memory cache
+
+ writeFileSpy.mockRestore();
+ consoleDebugSpy.mockRestore();
});
+ });
- it('should handle error during db.all', async () => {
- const error = new Error('db.all failed');
- mockDb.all.mockImplementationOnce(
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- (_sql: any, _params: any, callback: any) => callback?.(error, []),
- );
+ describe('getPreviousUserMessages', () => {
+ it('should retrieve user messages, sorted newest first by session, then timestamp, then messageId', async () => {
+ const loggerSort = new Logger();
+ vi.setSystemTime(new Date('2025-01-01T10:00:00.000Z'));
+ await loggerSort.initialize();
+ const s1 = loggerSort['sessionId']!;
+ await loggerSort.logMessage(MessageSenderType.USER, 'S1M0_ts100000'); // msgId 0
+ vi.advanceTimersByTime(10);
+ await loggerSort.logMessage(MessageSenderType.USER, 'S1M1_ts100010'); // msgId 1
+ loggerSort.close(); // Close to ensure next initialize starts a new session if time changed
- await expect(logger.getPreviousUserMessages()).rejects.toThrow(
- 'db.all failed',
+ vi.setSystemTime(new Date('2025-01-01T11:00:00.000Z'));
+ await loggerSort.initialize(); // Re-initialize for a new session
+ const s2 = loggerSort['sessionId']!;
+ expect(s2).not.toEqual(s1);
+ await loggerSort.logMessage(MessageSenderType.USER, 'S2M0_ts110000'); // msgId 0 for s2
+ vi.advanceTimersByTime(10);
+ await loggerSort.logMessage(
+ 'model' as MessageSenderType,
+ 'S2_Model_ts110010',
);
+ vi.advanceTimersByTime(10);
+ await loggerSort.logMessage(MessageSenderType.USER, 'S2M1_ts110020'); // msgId 1 for s2
+ loggerSort.close();
+
+ // To test the sorting thoroughly, especially the session part, we'll read the file
+ // as if it was written by multiple sessions and then initialize a new logger to load them.
+ const combinedLogs = await readLogFile();
+ const finalLogger = new Logger();
+ // Manually set its internal logs to simulate loading from a file with mixed sessions
+ finalLogger['logs'] = combinedLogs;
+ finalLogger['initialized'] = true; // Mark as initialized to allow getPreviousUserMessages to run
+
+ const messages = await finalLogger.getPreviousUserMessages();
+ expect(messages).toEqual([
+ 'S2M1_ts110020',
+ 'S2M0_ts110000',
+ 'S1M1_ts100010',
+ 'S1M0_ts100000',
+ ]);
+ finalLogger.close();
});
- });
- describe('close', () => {
- it('should close the database connection', () => {
- logger.close();
- expect(mockDb.close).toHaveBeenCalled();
+ it('should return empty array if no user messages exist', async () => {
+ await logger.logMessage('system' as MessageSenderType, 'System boot');
+ const messages = await logger.getPreviousUserMessages();
+ expect(messages).toEqual([]);
});
- it('should handle database not initialized', () => {
+ it('should return empty array if logger not initialized', async () => {
const uninitializedLogger = new Logger();
- // uninitializedLogger.initialize() is not called
+ const messages = await uninitializedLogger.getPreviousUserMessages();
+ expect(messages).toEqual([]);
uninitializedLogger.close();
- expect(() => uninitializedLogger.close()).not.toThrow();
});
+ });
- it('should handle error during db.close', () => {
- const error = new Error('db.close failed');
- mockDb.close.mockImplementationOnce((callback: (error: Error) => void) =>
- callback?.(error),
- );
- const consoleErrorSpy = vi
- .spyOn(console, 'error')
- .mockImplementation(() => {});
-
+ describe('close', () => {
+ it('should reset logger state', async () => {
+ await logger.logMessage(MessageSenderType.USER, 'A message');
logger.close();
- expect(consoleErrorSpy).toHaveBeenCalledWith(
- 'Error closing database:',
- error.message,
+ const consoleDebugSpy = vi
+ .spyOn(console, 'debug')
+ .mockImplementation(() => {});
+ await logger.logMessage(MessageSenderType.USER, 'Another message');
+ expect(consoleDebugSpy).toHaveBeenCalledWith(
+ 'Logger not initialized or session ID missing. Cannot log message.',
);
- consoleErrorSpy.mockRestore();
+ const messages = await logger.getPreviousUserMessages();
+ expect(messages).toEqual([]);
+ expect(logger['initialized']).toBe(false);
+ expect(logger['logFilePath']).toBeUndefined();
+ expect(logger['logs']).toEqual([]);
+ expect(logger['sessionId']).toBeUndefined();
+ expect(logger['messageId']).toBe(0);
+ consoleDebugSpy.mockRestore();
});
});
});
diff --git a/packages/server/src/core/logger.ts b/packages/server/src/core/logger.ts
index d12d4240..feb16944 100644
--- a/packages/server/src/core/logger.ts
+++ b/packages/server/src/core/logger.ts
@@ -5,127 +5,235 @@
*/
import path from 'node:path';
-import sqlite3 from 'sqlite3';
import { promises as fs } from 'node:fs';
const GEMINI_DIR = '.gemini';
-const DB_NAME = 'logs.db';
-const CREATE_TABLE_SQL = `
-CREATE TABLE IF NOT EXISTS messages (
- session_id INTEGER,
- message_id INTEGER,
- timestamp TEXT,
- type TEXT,
- message TEXT
-);`;
+const LOG_FILE_NAME = 'logs.json';
export enum MessageSenderType {
USER = 'user',
}
+export interface LogEntry {
+ sessionId: number;
+ messageId: number;
+ timestamp: string;
+ type: MessageSenderType;
+ message: string;
+}
+
export class Logger {
- private db: sqlite3.Database | undefined;
+ private logFilePath: string | undefined;
private sessionId: number | undefined;
- private messageId: number | undefined;
+ private messageId = 0; // Instance-specific counter for the next messageId
+ private initialized = false;
+ private logs: LogEntry[] = []; // In-memory cache, ideally reflects the last known state of the file
constructor() {}
+ private async _readLogFile(): Promise<LogEntry[]> {
+ if (!this.logFilePath) {
+ throw new Error('Log file path not set during read attempt.');
+ }
+ try {
+ const fileContent = await fs.readFile(this.logFilePath, 'utf-8');
+ const parsedLogs = JSON.parse(fileContent);
+ if (!Array.isArray(parsedLogs)) {
+ console.debug(
+ `Log file at ${this.logFilePath} is not a valid JSON array. Starting with empty logs.`,
+ );
+ await this._backupCorruptedLogFile('malformed_array');
+ return [];
+ }
+ return parsedLogs.filter(
+ (entry) =>
+ typeof entry.sessionId === 'number' &&
+ typeof entry.messageId === 'number' &&
+ typeof entry.timestamp === 'string' &&
+ typeof entry.type === 'string' &&
+ typeof entry.message === 'string',
+ ) as LogEntry[];
+ } catch (error) {
+ const nodeError = error as NodeJS.ErrnoException;
+ if (nodeError.code === 'ENOENT') {
+ return [];
+ }
+ if (error instanceof SyntaxError) {
+ console.debug(
+ `Invalid JSON in log file ${this.logFilePath}. Backing up and starting fresh.`,
+ error,
+ );
+ await this._backupCorruptedLogFile('invalid_json');
+ return [];
+ }
+ console.debug(
+ `Failed to read or parse log file ${this.logFilePath}:`,
+ error,
+ );
+ throw error;
+ }
+ }
+
+ private async _backupCorruptedLogFile(reason: string): Promise<void> {
+ if (!this.logFilePath) return;
+ const backupPath = `${this.logFilePath}.${reason}.${Date.now()}.bak`;
+ try {
+ await fs.rename(this.logFilePath, backupPath);
+ console.debug(`Backed up corrupted log file to ${backupPath}`);
+ } catch (_backupError) {
+ // If rename fails (e.g. file doesn't exist), no need to log an error here as the primary error (e.g. invalid JSON) is already handled.
+ }
+ }
+
async initialize(): Promise<void> {
- if (this.db) {
+ if (this.initialized) {
return;
}
-
this.sessionId = Math.floor(Date.now() / 1000);
- this.messageId = 0;
-
- // Could be cleaner if our sqlite package supported promises.
- return new Promise((resolve, reject) => {
- const DB_DIR = path.resolve(process.cwd(), GEMINI_DIR);
- const DB_PATH = path.join(DB_DIR, DB_NAME);
- fs.mkdir(DB_DIR, { recursive: true })
- .then(() => {
- this.db = new sqlite3.Database(
- DB_PATH,
- sqlite3.OPEN_READWRITE |
- sqlite3.OPEN_CREATE |
- sqlite3.OPEN_FULLMUTEX,
- (err: Error | null) => {
- if (err) {
- reject(err);
- }
+ const geminiDir = path.resolve(process.cwd(), GEMINI_DIR);
+ this.logFilePath = path.join(geminiDir, LOG_FILE_NAME);
- // Read and execute the SQL script in create_tables.sql
- this.db?.exec(CREATE_TABLE_SQL, (err: Error | null) => {
- if (err) {
- this.db?.close();
- reject(err);
- }
- resolve();
- });
- },
- );
- })
- .catch(reject);
- });
+ try {
+ await fs.mkdir(geminiDir, { recursive: true });
+ let fileExisted = true;
+ try {
+ await fs.access(this.logFilePath);
+ } catch (_e) {
+ fileExisted = false;
+ }
+ this.logs = await this._readLogFile();
+ if (!fileExisted && this.logs.length === 0) {
+ await fs.writeFile(this.logFilePath, '[]', 'utf-8');
+ }
+ const sessionLogs = this.logs.filter(
+ (entry) => entry.sessionId === this.sessionId,
+ );
+ this.messageId =
+ sessionLogs.length > 0
+ ? Math.max(...sessionLogs.map((entry) => entry.messageId)) + 1
+ : 0;
+ this.initialized = true;
+ } catch (err) {
+ console.error('Failed to initialize logger:', err);
+ this.initialized = false;
+ }
}
- /**
- * Get list of previous user inputs sorted most recent first.
- * @returns list of messages.
- */
- async getPreviousUserMessages(): Promise<string[]> {
- if (!this.db) {
- console.error('Database not initialized.');
- return [];
+ private async _updateLogFile(
+ entryToAppend: LogEntry,
+ ): Promise<LogEntry | null> {
+ if (!this.logFilePath) {
+ console.debug('Log file path not set. Cannot persist log entry.');
+ throw new Error('Log file path not set during update attempt.');
+ }
+
+ let currentLogsOnDisk: LogEntry[];
+ try {
+ currentLogsOnDisk = await this._readLogFile();
+ } catch (readError) {
+ console.debug(
+ 'Critical error reading log file before append:',
+ readError,
+ );
+ throw readError;
}
- return new Promise((resolve, reject) => {
- // Most recent messages first
- const query = `SELECT message FROM messages
- WHERE type = '${MessageSenderType.USER}'
- ORDER BY session_id DESC, message_id DESC`;
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- this.db!.all(query, [], (err: Error | null, rows: any[]) => {
- if (err) {
- reject(err);
- } else {
- resolve(rows.map((row) => row.message));
- }
- });
- });
+ // Determine the correct messageId for the new entry based on current disk state for its session
+ const sessionLogsOnDisk = currentLogsOnDisk.filter(
+ (e) => e.sessionId === entryToAppend.sessionId,
+ );
+ const nextMessageIdForSession =
+ sessionLogsOnDisk.length > 0
+ ? Math.max(...sessionLogsOnDisk.map((e) => e.messageId)) + 1
+ : 0;
+
+ // Update the messageId of the entry we are about to append
+ entryToAppend.messageId = nextMessageIdForSession;
+
+ // Check if this entry (same session, same *recalculated* messageId, same content) might already exist
+ // This is a stricter check for true duplicates if multiple instances try to log the exact same thing
+ // at the exact same calculated messageId slot.
+ const entryExists = currentLogsOnDisk.some(
+ (e) =>
+ e.sessionId === entryToAppend.sessionId &&
+ e.messageId === entryToAppend.messageId &&
+ e.timestamp === entryToAppend.timestamp && // Timestamps are good for distinguishing
+ e.message === entryToAppend.message,
+ );
+
+ if (entryExists) {
+ console.debug(
+ `Duplicate log entry detected and skipped: session ${entryToAppend.sessionId}, messageId ${entryToAppend.messageId}`,
+ );
+ this.logs = currentLogsOnDisk; // Ensure in-memory is synced with disk
+ return null; // Indicate that no new entry was actually added
+ }
+
+ currentLogsOnDisk.push(entryToAppend);
+
+ try {
+ await fs.writeFile(
+ this.logFilePath,
+ JSON.stringify(currentLogsOnDisk, null, 2),
+ 'utf-8',
+ );
+ this.logs = currentLogsOnDisk;
+ return entryToAppend; // Return the successfully appended entry
+ } catch (error) {
+ console.debug('Error writing to log file:', error);
+ throw error;
+ }
+ }
+
+ async getPreviousUserMessages(): Promise<string[]> {
+ if (!this.initialized) return [];
+ return this.logs
+ .filter((entry) => entry.type === MessageSenderType.USER)
+ .sort((a, b) => {
+ if (b.sessionId !== a.sessionId) return b.sessionId - a.sessionId;
+ const dateA = new Date(a.timestamp).getTime();
+ const dateB = new Date(b.timestamp).getTime();
+ if (dateB !== dateA) return dateB - dateA;
+ return b.messageId - a.messageId;
+ })
+ .map((entry) => entry.message);
}
async logMessage(type: MessageSenderType, message: string): Promise<void> {
- if (!this.db) {
- console.error('Database not initialized.');
+ if (!this.initialized || this.sessionId === undefined) {
+ console.debug(
+ 'Logger not initialized or session ID missing. Cannot log message.',
+ );
return;
}
- return new Promise((resolve, reject) => {
- const query = `INSERT INTO messages (session_id, message_id, type, message, timestamp) VALUES (?, ?, ?, ?, datetime('now'))`;
- this.messageId = this.messageId! + 1;
- this.db!.run(
- query,
- [this.sessionId || 0, this.messageId - 1, type, message],
- (err: Error | null) => {
- if (err) {
- reject(err);
- } else {
- resolve();
- }
- },
- );
- });
+ // The messageId used here is the instance's idea of the next ID.
+ // _updateLogFile will verify and potentially recalculate based on the file's actual state.
+ const newEntryObject: LogEntry = {
+ sessionId: this.sessionId,
+ messageId: this.messageId, // This will be recalculated in _updateLogFile
+ type,
+ message,
+ timestamp: new Date().toISOString(),
+ };
+
+ try {
+ const writtenEntry = await this._updateLogFile(newEntryObject);
+ if (writtenEntry) {
+ // If an entry was actually written (not a duplicate skip),
+ // then this instance can increment its idea of the next messageId for this session.
+ this.messageId = writtenEntry.messageId + 1;
+ }
+ } catch (_error) {
+ // Error already logged by _updateLogFile or _readLogFile
+ }
}
close(): void {
- if (this.db) {
- this.db.close((err: Error | null) => {
- if (err) {
- console.error('Error closing database:', err.message);
- }
- });
- this.db = undefined;
- }
+ this.initialized = false;
+ this.logFilePath = undefined;
+ this.logs = [];
+ this.sessionId = undefined;
+ this.messageId = 0;
}
}