diff --git a/.env.local.example b/.env.local.example index ae980ee3c2..003bf57dd8 100644 --- a/.env.local.example +++ b/.env.local.example @@ -1,3 +1,8 @@ +# Chatbot UI DEFAULT_MODEL=gpt-3.5-turbo DEFAULT_SYSTEM_PROMPT=You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown. -OPENAI_API_KEY=YOUR_KEY \ No newline at end of file +OPENAI_API_KEY=YOUR_KEY + +# Google +GOOGLE_API_KEY=YOUR_API_KEY +GOOGLE_CSE_ID=YOUR_ENGINE_ID diff --git a/components/Chat/Chat.tsx b/components/Chat/Chat.tsx index d5e7a43d97..c7dcf525c8 100644 --- a/components/Chat/Chat.tsx +++ b/components/Chat/Chat.tsx @@ -2,14 +2,15 @@ import { Conversation, Message } from '@/types/chat'; import { KeyValuePair } from '@/types/data'; import { ErrorMessage } from '@/types/error'; import { OpenAIModel, OpenAIModelID } from '@/types/openai'; +import { Plugin } from '@/types/plugin'; import { Prompt } from '@/types/prompt'; import { throttle } from '@/utils'; import { IconArrowDown, IconClearAll, IconSettings } from '@tabler/icons-react'; import { useTranslation } from 'next-i18next'; import { FC, - memo, MutableRefObject, + memo, useCallback, useEffect, useRef, @@ -33,7 +34,11 @@ interface Props { modelError: ErrorMessage | null; loading: boolean; prompts: Prompt[]; - onSend: (message: Message, deleteCount?: number) => void; + onSend: ( + message: Message, + deleteCount: number, + plugin: Plugin | null, + ) => void; onUpdateConversation: ( conversation: Conversation, data: KeyValuePair, @@ -116,8 +121,6 @@ export const Chat: FC = memo( }; const throttledScrollDown = throttle(scrollDown, 250); - // appear scroll down button only when user scrolls up - useEffect(() => { throttledScrollDown(); setCurrentMessage( @@ -300,16 +303,15 @@ export const Chat: FC = memo( textareaRef={textareaRef} messageIsStreaming={messageIsStreaming} conversationIsEmpty={conversation.messages.length === 0} - messages={conversation.messages} model={conversation.model} prompts={prompts} - onSend={(message) => { + onSend={(message, plugin) => { setCurrentMessage(message); - onSend(message); + onSend(message, 0, plugin); }} onRegenerate={() => { if (currentMessage) { - onSend(currentMessage, 2); + onSend(currentMessage, 2, null); } }} /> diff --git a/components/Chat/ChatInput.tsx b/components/Chat/ChatInput.tsx index 286883cb95..e5fc4216c4 100644 --- a/components/Chat/ChatInput.tsx +++ b/components/Chat/ChatInput.tsx @@ -1,7 +1,14 @@ import { Message } from '@/types/chat'; import { OpenAIModel } from '@/types/openai'; +import { Plugin } from '@/types/plugin'; import { Prompt } from '@/types/prompt'; -import { IconPlayerStop, IconRepeat, IconSend } from '@tabler/icons-react'; +import { + IconBolt, + IconBrandGoogle, + IconPlayerStop, + IconRepeat, + IconSend, +} from '@tabler/icons-react'; import { useTranslation } from 'next-i18next'; import { FC, @@ -12,6 +19,7 @@ import { useRef, useState, } from 'react'; +import { PluginSelect } from './PluginSelect'; import { PromptList } from './PromptList'; import { VariableModal } from './VariableModal'; @@ -19,9 +27,8 @@ interface Props { messageIsStreaming: boolean; model: OpenAIModel; conversationIsEmpty: boolean; - messages: Message[]; prompts: Prompt[]; - onSend: (message: Message) => void; + onSend: (message: Message, plugin: Plugin | null) => void; onRegenerate: () => void; stopConversationRef: MutableRefObject; textareaRef: MutableRefObject; @@ -31,7 +38,6 @@ export const ChatInput: FC = ({ messageIsStreaming, model, conversationIsEmpty, - messages, prompts, onSend, onRegenerate, @@ -47,6 +53,8 @@ export const ChatInput: FC = ({ const [promptInputValue, setPromptInputValue] = useState(''); const [variables, setVariables] = useState([]); const [isModalVisible, setIsModalVisible] = useState(false); + const [showPluginSelect, setShowPluginSelect] = useState(false); + const [plugin, setPlugin] = useState(null); const promptListRef = useRef(null); @@ -82,8 +90,9 @@ export const ChatInput: FC = ({ return; } - onSend({ role: 'user', content }); + onSend({ role: 'user', content }, plugin); setContent(''); + setPlugin(null); if (window.innerWidth < 640 && textareaRef && textareaRef.current) { textareaRef.current.blur(); @@ -149,6 +158,9 @@ export const ChatInput: FC = ({ } else if (e.key === 'Enter' && !isTyping && !isMobile() && !e.shiftKey) { e.preventDefault(); handleSend(); + } else if (e.key === '/' && e.metaKey) { + e.preventDefault(); + setShowPluginSelect(!showPluginSelect); } }; @@ -214,8 +226,9 @@ export const ChatInput: FC = ({ if (textareaRef && textareaRef.current) { textareaRef.current.style.height = 'inherit'; textareaRef.current.style.height = `${textareaRef.current?.scrollHeight}px`; - textareaRef.current.style.overflow = `${textareaRef?.current?.scrollHeight > 400 ? 'auto' : 'hidden' - }`; + textareaRef.current.style.overflow = `${ + textareaRef?.current?.scrollHeight > 400 ? 'auto' : 'hidden' + }`; } }, [content]); @@ -241,7 +254,7 @@ export const ChatInput: FC = ({
{messageIsStreaming && ( + + {showPluginSelect && ( +
+ { + setPlugin(plugin); + setShowPluginSelect(false); + + if (textareaRef && textareaRef.current) { + textareaRef.current.focus(); + } + }} + /> +
+ )} +