diff --git a/vscode/src/chat/agentic/CodyChatAgent.ts b/vscode/src/chat/agentic/CodyChatAgent.ts index 237efb9940c..822c99e47fc 100644 --- a/vscode/src/chat/agentic/CodyChatAgent.ts +++ b/vscode/src/chat/agentic/CodyChatAgent.ts @@ -4,7 +4,7 @@ import { type ContextItem, type Message, type PromptMixin, - PromptString, + type PromptString, newPromptMixin, } from '@sourcegraph/cody-shared' import { getCategorizedMentions } from '../../prompt-builder/utils' @@ -45,7 +45,7 @@ export abstract class CodyChatAgent { message: Message[], signal?: AbortSignal, model?: string - ): Promise { + ): Promise { const stream = await this.chatClient.chat( message, { model: model, maxTokensToSample: 4000 }, @@ -67,7 +67,7 @@ export abstract class CodyChatAgent { } } - return PromptString.unsafe_fromLLMResponse(accumulated) + return accumulated } protected getPrompter(items: ContextItem[]): DefaultPrompter { diff --git a/vscode/src/chat/agentic/DeepCody.ts b/vscode/src/chat/agentic/DeepCody.ts index dc3c92287b4..e492b6fd8f3 100644 --- a/vscode/src/chat/agentic/DeepCody.ts +++ b/vscode/src/chat/agentic/DeepCody.ts @@ -74,7 +74,7 @@ export class DeepCodyAgent extends CodyChatAgent { try { const res = await this.processStream(promptData.prompt, chatAbortSignal, model) - if (res?.toString().includes('CONTEXT_SUFFICIENT')) { + if (!res || res?.includes('CONTEXT_SUFFICIENT')) { // Process the response without generating any context items. for (const tool of this.toolHandlers.values()) { tool.processResponse?.()