Skip to content

Commit

Permalink
remove unused unsafe_fromLLMResponse
Browse files Browse the repository at this point in the history
  • Loading branch information
abeatrix committed Nov 5, 2024
1 parent 1a0ee02 commit 5b25062
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
6 changes: 3 additions & 3 deletions vscode/src/chat/agentic/CodyChatAgent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import {
type ContextItem,
type Message,
type PromptMixin,
PromptString,
type PromptString,
newPromptMixin,
} from '@sourcegraph/cody-shared'
import { getCategorizedMentions } from '../../prompt-builder/utils'
Expand Down Expand Up @@ -45,7 +45,7 @@ export abstract class CodyChatAgent {
message: Message[],
signal?: AbortSignal,
model?: string
): Promise<PromptString> {
): Promise<string> {
const stream = await this.chatClient.chat(
message,
{ model: model, maxTokensToSample: 4000 },
Expand All @@ -67,7 +67,7 @@ export abstract class CodyChatAgent {
}
}

return PromptString.unsafe_fromLLMResponse(accumulated)
return accumulated
}

protected getPrompter(items: ContextItem[]): DefaultPrompter {
Expand Down
2 changes: 1 addition & 1 deletion vscode/src/chat/agentic/DeepCody.ts
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ export class DeepCodyAgent extends CodyChatAgent {

try {
const res = await this.processStream(promptData.prompt, chatAbortSignal, model)
if (res?.toString().includes('CONTEXT_SUFFICIENT')) {
if (!res || res?.includes('CONTEXT_SUFFICIENT')) {
// Process the response without generating any context items.
for (const tool of this.toolHandlers.values()) {
tool.processResponse?.()
Expand Down

0 comments on commit 5b25062

Please sign in to comment.