summaryrefslogtreecommitdiff
path: root/packages/core/src
diff options
context:
space:
mode:
authoranj-s <[email protected]>2025-06-11 15:05:41 -0700
committerGitHub <[email protected]>2025-06-11 15:05:41 -0700
commit2dc79b3bd0f020ede1a923a9c8311058a23977b0 (patch)
tree9e4931038de43d6d8a96a264aca7f96453044f4f /packages/core/src
parent6ecdecbdcca59d4daff7ec5921afc746e7d725ab (diff)
Add support for local logging per session (#936)
Diffstat (limited to 'packages/core/src')
-rw-r--r--packages/core/src/core/logger.test.ts172
-rw-r--r--packages/core/src/core/logger.ts14
2 files changed, 80 insertions, 106 deletions
diff --git a/packages/core/src/core/logger.test.ts b/packages/core/src/core/logger.test.ts
index a3198277..b5ec42a4 100644
--- a/packages/core/src/core/logger.test.ts
+++ b/packages/core/src/core/logger.test.ts
@@ -19,38 +19,27 @@ import path from 'node:path';
import { Content } from '@google/genai';
const GEMINI_DIR = '.gemini';
-const LOG_FILE_NAME = 'logs.json';
+const LOG_FILE_NAME_PREFIX = 'logs';
const CHECKPOINT_FILE_NAME = 'checkpoint.json';
-const TEST_LOG_FILE_PATH = path.join(process.cwd(), GEMINI_DIR, LOG_FILE_NAME);
+const TEST_LOG_FILE_PATH = path.join(
+ process.cwd(),
+ GEMINI_DIR,
+ LOG_FILE_NAME_PREFIX,
+);
const TEST_CHECKPOINT_FILE_PATH = path.join(
process.cwd(),
GEMINI_DIR,
CHECKPOINT_FILE_NAME,
);
-async function cleanupLogFile() {
- try {
- await fs.unlink(TEST_LOG_FILE_PATH);
- } catch (error) {
- if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
- // Other errors during unlink are ignored for cleanup purposes
- }
- }
- try {
- await fs.unlink(TEST_CHECKPOINT_FILE_PATH);
- } catch (error) {
- if ((error as NodeJS.ErrnoException).code !== 'ENOENT') {
- // Other errors during unlink are ignored for cleanup purposes
- }
- }
+async function cleanupLogFiles() {
try {
const geminiDirPath = path.join(process.cwd(), GEMINI_DIR);
const dirContents = await fs.readdir(geminiDirPath);
for (const file of dirContents) {
if (
- (file.startsWith(LOG_FILE_NAME + '.') ||
- file.startsWith(CHECKPOINT_FILE_NAME + '.')) &&
- file.endsWith('.bak')
+ file.startsWith(LOG_FILE_NAME_PREFIX) ||
+ file.startsWith(CHECKPOINT_FILE_NAME)
) {
try {
await fs.unlink(path.join(geminiDirPath, file));
@@ -66,9 +55,12 @@ async function cleanupLogFile() {
}
}
-async function readLogFile(): Promise<LogEntry[]> {
+async function readLogFile(sessionId: string): Promise<LogEntry[]> {
try {
- const content = await fs.readFile(TEST_LOG_FILE_PATH, 'utf-8');
+ const content = await fs.readFile(
+ `${TEST_LOG_FILE_PATH}-${sessionId}.json`,
+ 'utf-8',
+ );
return JSON.parse(content) as LogEntry[];
} catch (error) {
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
@@ -90,25 +82,25 @@ describe('Logger', () => {
vi.resetAllMocks();
vi.useFakeTimers();
vi.setSystemTime(new Date('2025-01-01T12:00:00.000Z'));
- await cleanupLogFile();
+ await cleanupLogFiles();
logger = new Logger(testSessionId);
await logger.initialize();
});
afterEach(async () => {
logger.close();
- await cleanupLogFile();
+ await cleanupLogFiles();
vi.useRealTimers();
vi.restoreAllMocks();
});
afterAll(async () => {
- await cleanupLogFile();
+ await cleanupLogFiles();
});
describe('initialize', () => {
it('should create .gemini directory and an empty log file if none exist', async () => {
- await cleanupLogFile();
+ await cleanupLogFiles();
const geminiDirPath = path.join(process.cwd(), GEMINI_DIR);
try {
await fs.rm(geminiDirPath, { recursive: true, force: true });
@@ -118,18 +110,17 @@ describe('Logger', () => {
const newLogger = new Logger(testSessionId);
await newLogger.initialize();
-
const dirExists = await fs
.access(geminiDirPath)
.then(() => true)
.catch(() => false);
expect(dirExists).toBe(true);
const fileExists = await fs
- .access(TEST_LOG_FILE_PATH)
+ .access(path.join('', newLogger.getLogFilePath() ?? ''))
.then(() => true)
.catch(() => false);
expect(fileExists).toBe(true);
- const logContent = await readLogFile();
+ const logContent = await readLogFile(testSessionId);
expect(logContent).toEqual([]);
newLogger.close();
});
@@ -161,10 +152,17 @@ describe('Logger', () => {
},
];
await fs.mkdir(path.join(process.cwd(), GEMINI_DIR), { recursive: true });
- await fs.writeFile(TEST_LOG_FILE_PATH, JSON.stringify(existingLogs));
+ await fs.writeFile(
+ `${TEST_LOG_FILE_PATH}-${currentSessionId}.json`,
+ JSON.stringify(existingLogs),
+ );
const newLogger = new Logger(currentSessionId);
await newLogger.initialize();
- expect(newLogger['messageId']).toBe(2);
+
+ const messageCount = existingLogs.filter(
+ (log) => log.sessionId === currentSessionId,
+ ).length;
+ expect(newLogger['messageId']).toBe(messageCount);
expect(newLogger['logs']).toEqual(existingLogs);
newLogger.close();
});
@@ -180,7 +178,10 @@ describe('Logger', () => {
},
];
await fs.mkdir(path.join(process.cwd(), GEMINI_DIR), { recursive: true });
- await fs.writeFile(TEST_LOG_FILE_PATH, JSON.stringify(existingLogs));
+ await fs.writeFile(
+ `${TEST_LOG_FILE_PATH}-some-other-session.json`,
+ JSON.stringify(existingLogs),
+ );
const newLogger = new Logger('a-new-session');
await newLogger.initialize();
expect(newLogger['messageId']).toBe(0);
@@ -195,70 +196,82 @@ describe('Logger', () => {
await logger.initialize(); // Second call should not change state
expect(logger['messageId']).toBe(initialMessageId);
expect(logger['logs'].length).toBe(initialLogCount);
- const logsFromFile = await readLogFile();
+ const logsFromFile = await readLogFile(testSessionId);
expect(logsFromFile.length).toBe(1);
});
it('should handle invalid JSON in log file by backing it up and starting fresh', async () => {
- await fs.mkdir(path.join(process.cwd(), GEMINI_DIR), { recursive: true });
- await fs.writeFile(TEST_LOG_FILE_PATH, 'invalid json');
+ const logFilePath = `${TEST_LOG_FILE_PATH}-${testSessionId}.json`;
+ await fs.mkdir(path.dirname(logFilePath), { recursive: true });
+ await fs.writeFile(logFilePath, 'invalid json');
+
+ const newLogger = new Logger(testSessionId);
const consoleDebugSpy = vi
.spyOn(console, 'debug')
.mockImplementation(() => {});
- const newLogger = new Logger(testSessionId);
await newLogger.initialize();
+
expect(consoleDebugSpy).toHaveBeenCalledWith(
expect.stringContaining('Invalid JSON in log file'),
expect.any(SyntaxError),
);
- const logContent = await readLogFile();
- expect(logContent).toEqual([]);
+
+ expect(newLogger['logs']).toEqual([]);
+
const dirContents = await fs.readdir(
path.join(process.cwd(), GEMINI_DIR),
);
expect(
dirContents.some(
(f) =>
- f.startsWith(LOG_FILE_NAME + '.invalid_json') && f.endsWith('.bak'),
+ f.startsWith(`${path.basename(logFilePath)}.invalid_json`) &&
+ f.endsWith('.bak'),
),
).toBe(true);
+
newLogger.close();
+ consoleDebugSpy.mockRestore();
});
it('should handle non-array JSON in log file by backing it up and starting fresh', async () => {
- await fs.mkdir(path.join(process.cwd(), GEMINI_DIR), { recursive: true });
- await fs.writeFile(
- TEST_LOG_FILE_PATH,
- JSON.stringify({ not: 'an array' }),
- );
+ const logFilePath = `${TEST_LOG_FILE_PATH}-${testSessionId}.json`;
+ await fs.mkdir(path.dirname(logFilePath), { recursive: true });
+ await fs.writeFile(logFilePath, JSON.stringify({ not: 'an array' }));
+
+ const newLogger = new Logger(testSessionId);
const consoleDebugSpy = vi
.spyOn(console, 'debug')
.mockImplementation(() => {});
- const newLogger = new Logger(testSessionId);
await newLogger.initialize();
+ await fs.writeFile(logFilePath, JSON.stringify({ not: 'an array' }));
expect(consoleDebugSpy).toHaveBeenCalledWith(
- `Log file at ${TEST_LOG_FILE_PATH} is not a valid JSON array. Starting with empty logs.`,
+ `Log file at ${logFilePath} is not a valid JSON array. Starting with empty logs.`,
);
- const logContent = await readLogFile();
- expect(logContent).toEqual([]);
+ expect(newLogger['logs']).toEqual([]);
+
+ const logContent = await fs.readFile(logFilePath, 'utf-8');
+ expect(JSON.parse(logContent)).toEqual({ not: 'an array' });
+
const dirContents = await fs.readdir(
path.join(process.cwd(), GEMINI_DIR),
);
expect(
dirContents.some(
(f) =>
- f.startsWith(LOG_FILE_NAME + '.malformed_array') &&
+ f.startsWith(`${path.basename(logFilePath)}.malformed_array`) &&
f.endsWith('.bak'),
),
).toBe(true);
+
newLogger.close();
+ consoleDebugSpy.mockRestore();
});
});
describe('logMessage', () => {
it('should append a message to the log file and update in-memory logs', async () => {
await logger.logMessage(MessageSenderType.USER, 'Hello, world!');
- const logsFromFile = await readLogFile();
+ const logsFromFile = await readLogFile(testSessionId);
expect(logsFromFile.length).toBe(1);
expect(logsFromFile[0]).toMatchObject({
sessionId: testSessionId,
@@ -276,7 +289,7 @@ describe('Logger', () => {
await logger.logMessage(MessageSenderType.USER, 'First');
vi.advanceTimersByTime(1000);
await logger.logMessage(MessageSenderType.USER, 'Second');
- const logs = await readLogFile();
+ const logs = await readLogFile(testSessionId);
expect(logs.length).toBe(2);
expect(logs[0].messageId).toBe(0);
expect(logs[1].messageId).toBe(1);
@@ -294,7 +307,7 @@ describe('Logger', () => {
expect(consoleDebugSpy).toHaveBeenCalledWith(
'Logger not initialized or session ID missing. Cannot log message.',
);
- expect((await readLogFile()).length).toBe(0);
+ expect((await readLogFile(testSessionId)).length).toBe(0);
uninitializedLogger.close();
});
@@ -322,7 +335,7 @@ describe('Logger', () => {
// Log from logger2. It reads file (sees {s1,0}, {s1,1}, {s1,2}), its internal msgId for s1 is 3.
await logger2.logMessage(MessageSenderType.USER, 'L2M2'); // L2 internal msgId becomes 4, writes {s1, 3}
- const logsFromFile = await readLogFile();
+ const logsFromFile = await readLogFile(concurrentSessionId);
expect(logsFromFile.length).toBe(4);
const messageIdsInFile = logsFromFile
.map((log) => log.messageId)
@@ -335,8 +348,8 @@ describe('Logger', () => {
expect(messagesInFile).toEqual(['L1M1', 'L2M1', 'L1M2', 'L2M2']);
// Check internal state (next messageId each logger would use for that session)
- expect(logger1['messageId']).toBe(3); // L1 wrote 0, then 2. Next is 3.
- expect(logger2['messageId']).toBe(4); // L2 wrote 1, then 3. Next is 4.
+ expect(logger1['messageId']).toBe(3);
+ expect(logger2['messageId']).toBe(4);
logger1.close();
logger2.close();
@@ -361,55 +374,6 @@ describe('Logger', () => {
});
});
- describe('getPreviousUserMessages', () => {
- it('should retrieve all user messages from logs, sorted newest first', async () => {
- // This test now verifies that messages from different sessions are included
- // and sorted correctly by timestamp, as the session-based sorting was removed.
- const loggerSort = new Logger('session-1');
- await loggerSort.initialize();
- await loggerSort.logMessage(MessageSenderType.USER, 'S1M0_ts100000'); // msgId 0
- vi.advanceTimersByTime(1000);
- await loggerSort.logMessage(MessageSenderType.USER, 'S1M1_ts101000'); // msgId 1
- vi.advanceTimersByTime(1000);
- await loggerSort.logMessage(MessageSenderType.USER, 'S2M0_ts102000'); // msgId 0 for s2
- vi.advanceTimersByTime(1000);
- await loggerSort.logMessage(
- 'model' as MessageSenderType,
- 'S2_Model_ts103000',
- );
- vi.advanceTimersByTime(1000);
- await loggerSort.logMessage(MessageSenderType.USER, 'S2M1_ts104000'); // msgId 1 for s2
- loggerSort.close();
-
- // A new logger will load all previous logs regardless of session
- const finalLogger = new Logger('final-session');
- await finalLogger.initialize();
-
- const messages = await finalLogger.getPreviousUserMessages();
- expect(messages).toEqual([
- 'S2M1_ts104000',
- 'S2M0_ts102000',
- 'S1M1_ts101000',
- 'S1M0_ts100000',
- ]);
- finalLogger.close();
- });
-
- it('should return empty array if no user messages exist', async () => {
- await logger.logMessage('system' as MessageSenderType, 'System boot');
- const messages = await logger.getPreviousUserMessages();
- expect(messages).toEqual([]);
- });
-
- it('should return empty array if logger not initialized', async () => {
- const uninitializedLogger = new Logger(testSessionId);
- uninitializedLogger.close();
- const messages = await uninitializedLogger.getPreviousUserMessages();
- expect(messages).toEqual([]);
- uninitializedLogger.close();
- });
- });
-
describe('saveCheckpoint', () => {
const conversation: Content[] = [
{ role: 'user', parts: [{ text: 'Hello' }] },
diff --git a/packages/core/src/core/logger.ts b/packages/core/src/core/logger.ts
index dd57860b..2cc0f53b 100644
--- a/packages/core/src/core/logger.ts
+++ b/packages/core/src/core/logger.ts
@@ -9,11 +9,14 @@ import { promises as fs } from 'node:fs';
import { Content } from '@google/genai';
const GEMINI_DIR = '.gemini';
-const LOG_FILE_NAME = 'logs.json';
+const LOG_FILE_NAME_PREFIX = 'logs';
const CHECKPOINT_FILE_NAME = 'checkpoint.json';
export enum MessageSenderType {
USER = 'user',
+ SYSTEM = 'system',
+ TOOL_REQUEST = 'tool_request',
+ SERVER_ERROR = 'server_error',
}
export interface LogEntry {
@@ -37,6 +40,10 @@ export class Logger {
this.sessionId = sessionId;
}
+ getLogFilePath(): string | undefined {
+ return this.logFilePath;
+ }
+
private async _readLogFile(): Promise<LogEntry[]> {
if (!this.logFilePath) {
throw new Error('Log file path not set during read attempt.');
@@ -96,7 +103,10 @@ export class Logger {
return;
}
this.geminiDir = path.resolve(process.cwd(), GEMINI_DIR);
- this.logFilePath = path.join(this.geminiDir, LOG_FILE_NAME);
+ this.logFilePath = path.join(
+ this.geminiDir,
+ `${LOG_FILE_NAME_PREFIX}-${this.sessionId}.json`,
+ );
this.checkpointFilePath = path.join(this.geminiDir, CHECKPOINT_FILE_NAME);
try {