diff options
| author | Olcan <[email protected]> | 2025-05-30 00:02:30 -0700 |
|---|---|---|
| committer | GitHub <[email protected]> | 2025-05-30 00:02:30 -0700 |
| commit | b0aeeb53b101ed73dfebbff74197efdc4e18b142 (patch) | |
| tree | 3eed5731b4244766cbd4e1b770eb7f869a73a90d /packages/cli/src/ui/hooks/useToolScheduler.ts | |
| parent | 2582c20e2a541b0a90d9f62754c7008af13c194f (diff) | |
update shell output at an interval to reduce flicker (#614)
Diffstat (limited to 'packages/cli/src/ui/hooks/useToolScheduler.ts')
| -rw-r--r-- | packages/cli/src/ui/hooks/useToolScheduler.ts | 12 |
1 files changed, 5 insertions, 7 deletions
diff --git a/packages/cli/src/ui/hooks/useToolScheduler.ts b/packages/cli/src/ui/hooks/useToolScheduler.ts index e6e80785..af8715e9 100644 --- a/packages/cli/src/ui/hooks/useToolScheduler.ts +++ b/packages/cli/src/ui/hooks/useToolScheduler.ts @@ -288,11 +288,9 @@ export function useToolScheduler( const callId = t.request.callId; setToolCalls(setStatus(t.request.callId, 'executing')); - let accumulatedOutput = ''; - const onOutputChunk = + const updateOutput = t.tool.name === 'execute_bash_command' - ? (chunk: string) => { - accumulatedOutput += chunk; + ? (output: string) => { setPendingHistoryItem( (prevItem: HistoryItemWithoutId | null) => { if (prevItem?.type === 'tool_group') { @@ -304,7 +302,7 @@ export function useToolScheduler( toolDisplay.status === ToolCallStatus.Executing ? { ...toolDisplay, - resultDisplay: accumulatedOutput, + resultDisplay: output, } : toolDisplay, ), @@ -319,7 +317,7 @@ export function useToolScheduler( setToolCalls((prevToolCalls) => prevToolCalls.map((tc) => tc.request.callId === callId && tc.status === 'executing' - ? { ...tc, liveOutput: accumulatedOutput } + ? { ...tc, liveOutput: output } : tc, ), ); @@ -327,7 +325,7 @@ export function useToolScheduler( : undefined; t.tool - .execute(t.request.args, signal, onOutputChunk) + .execute(t.request.args, signal, updateOutput) .then((result: ToolResult) => { if (signal.aborted) { // TODO(jacobr): avoid stringifying the LLM content. |
