summaryrefslogtreecommitdiff
path: root/packages/server/src/utils/LruCache.ts
diff options
context:
space:
mode:
authorTaylor Mullen <[email protected]>2025-05-25 14:21:27 -0700
committerN. Taylor Mullen <[email protected]>2025-05-25 14:24:09 -0700
commit1a5fe16b22dbb13861bd3aa97c8ae5f40566b6af (patch)
treeee5b89a4f4d866807c5473002ec7b0b13049c228 /packages/server/src/utils/LruCache.ts
parentc181fc1cf3368cefa195207fcba9c5e585f29851 (diff)
Ensure edit correction isn't re-done after confirm.
- Edit corretion leans on LLM-isms to ensure we properly fix poorly escaped content. Beacues of this we need to ensure that we don't re-run edit correction in many cases. - To ensure this an `LruCache` has been added to capture intermediate steps of edit correction to avoid re-computations. - Max cache size is 50 currently. This means a user can have a muti-confirmation flow of 25 items without recomputing anything (assuming they all break edit correction). - Laid some groundwork for future testing. Part of https://github.com/google-gemini/gemini-cli/issues/484
Diffstat (limited to 'packages/server/src/utils/LruCache.ts')
-rw-r--r--packages/server/src/utils/LruCache.ts41
1 files changed, 41 insertions, 0 deletions
diff --git a/packages/server/src/utils/LruCache.ts b/packages/server/src/utils/LruCache.ts
new file mode 100644
index 00000000..076828c4
--- /dev/null
+++ b/packages/server/src/utils/LruCache.ts
@@ -0,0 +1,41 @@
+/**
+ * @license
+ * Copyright 2025 Google LLC
+ * SPDX-License-Identifier: Apache-2.0
+ */
+
+export class LruCache<K, V> {
+ private cache: Map<K, V>;
+ private maxSize: number;
+
+ constructor(maxSize: number) {
+ this.cache = new Map<K, V>();
+ this.maxSize = maxSize;
+ }
+
+ get(key: K): V | undefined {
+ const value = this.cache.get(key);
+ if (value) {
+ // Move to end to mark as recently used
+ this.cache.delete(key);
+ this.cache.set(key, value);
+ }
+ return value;
+ }
+
+ set(key: K, value: V): void {
+ if (this.cache.has(key)) {
+ this.cache.delete(key);
+ } else if (this.cache.size >= this.maxSize) {
+ const firstKey = this.cache.keys().next().value;
+ if (firstKey !== undefined) {
+ this.cache.delete(firstKey);
+ }
+ }
+ this.cache.set(key, value);
+ }
+
+ clear(): void {
+ this.cache.clear();
+ }
+}