summaryrefslogtreecommitdiff
path: root/packages/server/src/utils/editCorrector.ts
diff options
context:
space:
mode:
authorTaylor Mullen <[email protected]>2025-05-27 23:40:25 -0700
committerN. Taylor Mullen <[email protected]>2025-05-27 23:46:37 -0700
commitf2f2ecf9d83224778e5fc38cfcc4a1edddf9f7d4 (patch)
tree6ad7ce8c34f16016c67c208a5182a016739b2c07 /packages/server/src/utils/editCorrector.ts
parentbfeaac844186153698d3a7079b41214bbf1e4371 (diff)
feat: Allow cancellation of in-progress Gemini requests and pre-execution checks
- Implements cancellation for Gemini requests while they are actively being processed by the model. - Extends cancellation support to the logic within tools. This allows users to cancel operations during the phase where the system is determining if a tool execution requires user confirmation, which can include potentially long-running pre-flight checks or LLM-based corrections. - Underlying LLM calls for edit corrections (within and ) and next speaker checks can now also be cancelled. - Previously, cancellation of the main request was not possible until text started streaming, and pre-execution checks were not cancellable. - This change leverages the updated SDK's ability to accept an abort token and threads s throughout the request, tool execution, and pre-execution check lifecycle. Fixes https://github.com/google-gemini/gemini-cli/issues/531
Diffstat (limited to 'packages/server/src/utils/editCorrector.ts')
-rw-r--r--packages/server/src/utils/editCorrector.ts37
1 files changed, 36 insertions, 1 deletions
diff --git a/packages/server/src/utils/editCorrector.ts b/packages/server/src/utils/editCorrector.ts
index 92551478..78663954 100644
--- a/packages/server/src/utils/editCorrector.ts
+++ b/packages/server/src/utils/editCorrector.ts
@@ -63,6 +63,7 @@ export async function ensureCorrectEdit(
currentContent: string,
originalParams: EditToolParams, // This is the EditToolParams from edit.ts, without \'corrected\'
client: GeminiClient,
+ abortSignal: AbortSignal,
): Promise<CorrectedEditResult> {
const cacheKey = `${currentContent}---${originalParams.old_string}---${originalParams.new_string}`;
const cachedResult = editCorrectionCache.get(cacheKey);
@@ -84,6 +85,7 @@ export async function ensureCorrectEdit(
client,
finalOldString,
originalParams.new_string,
+ abortSignal,
);
}
} else if (occurrences > 1) {
@@ -108,6 +110,7 @@ export async function ensureCorrectEdit(
originalParams.old_string, // original old
unescapedOldStringAttempt, // corrected old
originalParams.new_string, // original new (which is potentially escaped)
+ abortSignal,
);
}
} else if (occurrences === 0) {
@@ -115,6 +118,7 @@ export async function ensureCorrectEdit(
client,
currentContent,
unescapedOldStringAttempt,
+ abortSignal,
);
const llmOldOccurrences = countOccurrences(
currentContent,
@@ -134,6 +138,7 @@ export async function ensureCorrectEdit(
originalParams.old_string, // original old
llmCorrectedOldString, // corrected old
baseNewStringForLLMCorrection, // base new for correction
+ abortSignal,
);
}
} else {
@@ -180,6 +185,7 @@ export async function ensureCorrectEdit(
export async function ensureCorrectFileContent(
content: string,
client: GeminiClient,
+ abortSignal: AbortSignal,
): Promise<string> {
const cachedResult = fileContentCorrectionCache.get(content);
if (cachedResult) {
@@ -193,7 +199,11 @@ export async function ensureCorrectFileContent(
return content;
}
- const correctedContent = await correctStringEscaping(content, client);
+ const correctedContent = await correctStringEscaping(
+ content,
+ client,
+ abortSignal,
+ );
fileContentCorrectionCache.set(content, correctedContent);
return correctedContent;
}
@@ -215,6 +225,7 @@ export async function correctOldStringMismatch(
geminiClient: GeminiClient,
fileContent: string,
problematicSnippet: string,
+ abortSignal: AbortSignal,
): Promise<string> {
const prompt = `
Context: A process needs to find an exact literal, unique match for a specific text snippet within a file's content. The provided snippet failed to match exactly. This is most likely because it has been overly escaped.
@@ -243,6 +254,7 @@ Return ONLY the corrected target snippet in the specified JSON format with the k
const result = await geminiClient.generateJson(
contents,
OLD_STRING_CORRECTION_SCHEMA,
+ abortSignal,
EditModel,
EditConfig,
);
@@ -257,10 +269,15 @@ Return ONLY the corrected target snippet in the specified JSON format with the k
return problematicSnippet;
}
} catch (error) {
+ if (abortSignal.aborted) {
+ throw error;
+ }
+
console.error(
'Error during LLM call for old string snippet correction:',
error,
);
+
return problematicSnippet;
}
}
@@ -286,6 +303,7 @@ export async function correctNewString(
originalOldString: string,
correctedOldString: string,
originalNewString: string,
+ abortSignal: AbortSignal,
): Promise<string> {
if (originalOldString === correctedOldString) {
return originalNewString;
@@ -324,6 +342,7 @@ Return ONLY the corrected string in the specified JSON format with the key 'corr
const result = await geminiClient.generateJson(
contents,
NEW_STRING_CORRECTION_SCHEMA,
+ abortSignal,
EditModel,
EditConfig,
);
@@ -338,6 +357,10 @@ Return ONLY the corrected string in the specified JSON format with the key 'corr
return originalNewString;
}
} catch (error) {
+ if (abortSignal.aborted) {
+ throw error;
+ }
+
console.error('Error during LLM call for new_string correction:', error);
return originalNewString;
}
@@ -359,6 +382,7 @@ export async function correctNewStringEscaping(
geminiClient: GeminiClient,
oldString: string,
potentiallyProblematicNewString: string,
+ abortSignal: AbortSignal,
): Promise<string> {
const prompt = `
Context: A text replacement operation is planned. The text to be replaced (old_string) has been correctly identified in the file. However, the replacement text (new_string) might have been improperly escaped by a previous LLM generation (e.g. too many backslashes for newlines like \\n instead of \n, or unnecessarily quotes like \\"Hello\\" instead of "Hello").
@@ -387,6 +411,7 @@ Return ONLY the corrected string in the specified JSON format with the key 'corr
const result = await geminiClient.generateJson(
contents,
CORRECT_NEW_STRING_ESCAPING_SCHEMA,
+ abortSignal,
EditModel,
EditConfig,
);
@@ -401,6 +426,10 @@ Return ONLY the corrected string in the specified JSON format with the key 'corr
return potentiallyProblematicNewString;
}
} catch (error) {
+ if (abortSignal.aborted) {
+ throw error;
+ }
+
console.error(
'Error during LLM call for new_string escaping correction:',
error,
@@ -424,6 +453,7 @@ const CORRECT_STRING_ESCAPING_SCHEMA: SchemaUnion = {
export async function correctStringEscaping(
potentiallyProblematicString: string,
client: GeminiClient,
+ abortSignal: AbortSignal,
): Promise<string> {
const prompt = `
Context: An LLM has just generated potentially_problematic_string and the text might have been improperly escaped (e.g. too many backslashes for newlines like \\n instead of \n, or unnecessarily quotes like \\"Hello\\" instead of "Hello").
@@ -447,6 +477,7 @@ Return ONLY the corrected string in the specified JSON format with the key 'corr
const result = await client.generateJson(
contents,
CORRECT_STRING_ESCAPING_SCHEMA,
+ abortSignal,
EditModel,
EditConfig,
);
@@ -461,6 +492,10 @@ Return ONLY the corrected string in the specified JSON format with the key 'corr
return potentiallyProblematicString;
}
} catch (error) {
+ if (abortSignal.aborted) {
+ throw error;
+ }
+
console.error(
'Error during LLM call for string escaping correction:',
error,