From f6f6f0407b782518f94d1ff1e821ed7cb752110b Mon Sep 17 00:00:00 2001 From: YK <1811651+ykdojo@users.noreply.github.com> Date: Fri, 13 Sep 2024 06:01:55 -0700 Subject: [PATCH] feat(chat): improve messaging for non-streaming models - Update the `AssistantMessageCell` component to provide more informative messaging when a non-streaming model is used - The new message explains that non-streaming models may take longer to respond, but are recommended for complex reasoning and coding tasks --- vscode/CHANGELOG.md | 2 +- .../chat/cells/messageCell/assistant/AssistantMessageCell.tsx | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/vscode/CHANGELOG.md b/vscode/CHANGELOG.md index f72de1b40b9e..b9ee23acc945 100644 --- a/vscode/CHANGELOG.md +++ b/vscode/CHANGELOG.md @@ -6,7 +6,7 @@ This is a log of all notable changes to Cody for VS Code. [Unreleased] changes a ### Added -- The [new OpenAI models (OpenAI O1 & OpenAI O1-mini)](https://sourcegraph.com/blog/openai-o1-for-cody) are now available to selected Cody Pro users for early access. [pull/5508](https://github.com/sourcegraph/cody/pull/5508) +- The [new OpenAI models (OpenAI o1-preview & OpenAI o1-mini)](https://sourcegraph.com/blog/openai-o1-for-cody) are now available to selected Cody Pro users for early access. [pull/5508](https://github.com/sourcegraph/cody/pull/5508) - Cody Pro users can join the waitlist for the new models by clicking the "Join Waitlist" button. [pull/5508](https://github.com/sourcegraph/cody/pull/5508) - Chat: Support non-streaming requests. [pull/5565](https://github.com/sourcegraph/cody/pull/5565) diff --git a/vscode/webviews/chat/cells/messageCell/assistant/AssistantMessageCell.tsx b/vscode/webviews/chat/cells/messageCell/assistant/AssistantMessageCell.tsx index ed8bfcfcc169..3bc47a1efc97 100644 --- a/vscode/webviews/chat/cells/messageCell/assistant/AssistantMessageCell.tsx +++ b/vscode/webviews/chat/cells/messageCell/assistant/AssistantMessageCell.tsx @@ -119,7 +119,8 @@ export const AssistantMessageCell: FunctionComponent<{
- This model may take longer to response. + This model may take longer to respond because it takes time + to "think". Recommended for complex reasoning & coding tasks.
)}