diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..38bb57c --- /dev/null +++ b/.dockerignore @@ -0,0 +1,4 @@ +.env +.env.local +node_modules +test-results diff --git a/.env.local.example b/.env.local.example new file mode 100644 index 0000000..752af4e --- /dev/null +++ b/.env.local.example @@ -0,0 +1,8 @@ +# Chatbot UI +DEFAULT_MODEL=gpt-3.5-turbo +NEXT_PUBLIC_DEFAULT_SYSTEM_PROMPT=You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown. +OPENAI_API_KEY=YOUR_KEY + +# Google +GOOGLE_API_KEY=YOUR_API_KEY +GOOGLE_CSE_ID=YOUR_ENGINE_ID diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 0000000..bffb357 --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "next/core-web-vitals" +} diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000..d6494ef --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,3 @@ +# If you find my open-source work helpful, please consider sponsoring me! + +github: mckaywrigley diff --git a/.github/workflows/deploy-docker-image.yaml b/.github/workflows/deploy-docker-image.yaml new file mode 100644 index 0000000..3e7ad3c --- /dev/null +++ b/.github/workflows/deploy-docker-image.yaml @@ -0,0 +1,69 @@ +name: Docker + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +on: + push: + branches: ['main'] + +env: + # Use docker.io for Docker Hub if empty + REGISTRY: ghcr.io + # github.repository as / + IMAGE_NAME: ${{ github.repository }} + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + # This is used to complete the identity challenge + # with sigstore/fulcio when running outside of PRs. + id-token: write + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2.1.0 + + # Workaround: https://github.com/docker/build-push-action/issues/461 + - name: Setup Docker buildx + uses: docker/setup-buildx-action@79abd3f86f79a9d68a23c75a09a9a85889262adf + + # Login against a Docker registry except on PR + # https://github.com/docker/login-action + - name: Log into registry ${{ env.REGISTRY }} + if: github.event_name != 'pull_request' + uses: docker/login-action@28218f9b04b4f3f62068d7b6ce6ca5b26e35336c + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # Extract metadata (tags, labels) for Docker + # https://github.com/docker/metadata-action + - name: Extract Docker metadata + id: meta + uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + # Build and push Docker image with Buildx (don't push on PR) + # https://github.com/docker/build-push-action + - name: Build and push Docker image + id: build-and-push + uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a + with: + context: . + platforms: "linux/amd64,linux/arm64" + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max diff --git a/.github/workflows/run-test-suite.yml b/.github/workflows/run-test-suite.yml new file mode 100644 index 0000000..c0914db --- /dev/null +++ b/.github/workflows/run-test-suite.yml @@ -0,0 +1,24 @@ +name: Run Unit Tests +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + test: + runs-on: ubuntu-latest + container: + image: node:16 + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Install dependencies + run: npm ci + + - name: Run Vitest Suite + run: npm test diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5be3dc7 --- /dev/null +++ b/.gitignore @@ -0,0 +1,40 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js + +# testing +/coverage +/test-results + +# next.js +/.next/ +/out/ +/dist + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# local env files +.env*.local + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts +.idea +pnpm-lock.yaml diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..2fc8637 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,45 @@ +# Contributing Guidelines + +**Welcome to Chatbot UI!** + +We appreciate your interest in contributing to our project. + +Before you get started, please read our guidelines for contributing. + +## Types of Contributions + +We welcome the following types of contributions: + +- Bug fixes +- New features +- Documentation improvements +- Code optimizations +- Translations +- Tests + +## Getting Started + +To get started, fork the project on GitHub and clone it locally on your machine. Then, create a new branch to work on your changes. + +``` +git clone https://github.com/mckaywrigley/chatbot-ui.git +cd chatbot-ui +git checkout -b my-branch-name + +``` + +Before submitting your pull request, please make sure your changes pass our automated tests and adhere to our code style guidelines. + +## Pull Request Process + +1. Fork the project on GitHub. +2. Clone your forked repository locally on your machine. +3. Create a new branch from the main branch. +4. Make your changes on the new branch. +5. Ensure that your changes adhere to our code style guidelines and pass our automated tests. +6. Commit your changes and push them to your forked repository. +7. Submit a pull request to the main branch of the main repository. + +## Contact + +If you have any questions or need help getting started, feel free to reach out to me on [Twitter](https://twitter.com/mckaywrigley). diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..6a79faf --- /dev/null +++ b/Dockerfile @@ -0,0 +1,29 @@ +# ---- Base Node ---- +FROM node:19-alpine AS base +WORKDIR /app +COPY package*.json ./ + +# ---- Dependencies ---- +FROM base AS dependencies +RUN npm ci + +# ---- Build ---- +FROM dependencies AS build +COPY . . +RUN npm run build + +# ---- Production ---- +FROM node:19-alpine AS production +WORKDIR /app +COPY --from=dependencies /app/node_modules ./node_modules +COPY --from=build /app/.next ./.next +COPY --from=build /app/public ./public +COPY --from=build /app/package*.json ./ +COPY --from=build /app/next.config.js ./next.config.js +COPY --from=build /app/next-i18next.config.js ./next-i18next.config.js + +# Expose the port the app will run on +EXPOSE 3000 + +# Start the application +CMD ["npm", "start"] diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..8dc4e12 --- /dev/null +++ b/Makefile @@ -0,0 +1,18 @@ +include .env + +.PHONY: all + +build: + docker build -t chatbot-ui . + +run: + export $(cat .env | xargs) + docker stop chatbot-ui || true && docker rm chatbot-ui || true + docker run --name chatbot-ui --rm -e OPENAI_API_KEY=${OPENAI_API_KEY} -p 3000:3000 chatbot-ui + +logs: + docker logs -f chatbot-ui + +push: + docker tag chatbot-ui:latest ${DOCKER_USER}/chatbot-ui:${DOCKER_TAG} + docker push ${DOCKER_USER}/chatbot-ui:${DOCKER_TAG} \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..ed1aab6 --- /dev/null +++ b/README.md @@ -0,0 +1,38 @@ +# Chatbot UI + +## News + +Chatbot UI 2.0 will launch on January 3rd, 2024. + +See a [preview](https://x.com/mckaywrigley/status/1738273242283151777?s=20). + +This repo will be completely overhauled with the updated codebase. + +The old codebase will be available in a new repo. + +There will be several hours of downtime on ChatbotUI.com while the new version is deployed. + +We recommend exporting your data before the update! + +## About + +Chatbot UI is an open source chat UI for AI models. + +See a [demo](https://twitter.com/mckaywrigley/status/1640380021423603713?s=46&t=AowqkodyK6B4JccSOxSPew). + +![Chatbot UI](./public/screenshots/screenshot-0402023.jpg) + +## Updates + +Chatbot UI will be updated over time. + +Expect frequent improvements. + +**Next up:** + +- [ ] Sharing +- [ ] "Bots" + +## Deploy + + diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000..42f7994 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,53 @@ +# Security Policy + + +This security policy outlines the process for reporting vulnerabilities and secrets found within this GitHub repository. It is essential that all contributors and users adhere to this policy in order to maintain a secure and stable environment. + +## Reporting a Vulnerability + +If you discover a vulnerability within the code, dependencies, or any other component of this repository, please follow these steps: + +1. **Do not disclose the vulnerability publicly.** Publicly disclosing a vulnerability may put the project at risk and could potentially harm other users. + +2. **Contact the repository maintainer(s) privately.** Send a private message or email to the maintainer(s) with a detailed description of the vulnerability. Include the following information: + + - The affected component(s) + - Steps to reproduce the issue + - Potential impact of the vulnerability + - Any possible mitigations or workarounds + +3. **Wait for a response from the maintainer(s).** Please be patient, as they may need time to investigate and verify the issue. The maintainer(s) should acknowledge receipt of your report and provide an estimated time frame for addressing the vulnerability. + +4. **Cooperate with the maintainer(s).** If requested, provide additional information or assistance to help resolve the issue. + +5. **Do not disclose the vulnerability until the maintainer(s) have addressed it.** Once the issue has been resolved, the maintainer(s) may choose to publicly disclose the vulnerability and credit you for the discovery. + +## Reporting Secrets + +If you discover any secrets, such as API keys or passwords, within the repository, follow these steps: + +1. **Do not share the secret or use it for unauthorized purposes.** Misusing a secret could have severe consequences for the project and its users. + +2. **Contact the repository maintainer(s) privately.** Notify them of the discovered secret, its location, and any potential risks associated with it. + +3. **Wait for a response and further instructions.** + +## Responsible Disclosure + +We encourage responsible disclosure of vulnerabilities and secrets. If you follow the steps outlined in this policy, we will work with you to understand and address the issue. We will not take legal action against individuals who discover and report vulnerabilities or secrets in accordance with this policy. + +## Patching and Updates + +We are committed to maintaining the security of our project. When vulnerabilities are reported and confirmed, we will: + +1. Work diligently to develop and apply a patch or implement a mitigation strategy. +2. Keep the reporter informed about the progress of the fix. +3. Update the repository with the necessary patches and document the changes in the release notes or changelog. +4. Credit the reporter for the discovery, if they wish to be acknowledged. + +## Contributing to Security + +We welcome contributions that help improve the security of our project. If you have suggestions or want to contribute code to address security issues, please follow the standard contribution guidelines for this repository. When submitting a pull request related to security, please mention that it addresses a security issue and provide any necessary context. + +By adhering to this security policy, you contribute to the overall security and stability of the project. Thank you for your cooperation and responsible handling of vulnerabilities and secrets. + diff --git a/__tests__/utils/app/importExports.test.ts b/__tests__/utils/app/importExports.test.ts new file mode 100644 index 0000000..aa51cbc --- /dev/null +++ b/__tests__/utils/app/importExports.test.ts @@ -0,0 +1,264 @@ +import { DEFAULT_SYSTEM_PROMPT, DEFAULT_TEMPERATURE } from '@/utils/app/const'; +import { + cleanData, + isExportFormatV1, + isExportFormatV2, + isExportFormatV3, + isExportFormatV4, + isLatestExportFormat, +} from '@/utils/app/importExport'; + +import { ExportFormatV1, ExportFormatV2, ExportFormatV4 } from '@/types/export'; +import { OpenAIModelID, OpenAIModels } from '@/types/openai'; + +import { describe, expect, it } from 'vitest'; + +describe('Export Format Functions', () => { + describe('isExportFormatV1', () => { + it('should return true for v1 format', () => { + const obj = [{ id: 1 }]; + expect(isExportFormatV1(obj)).toBe(true); + }); + + it('should return false for non-v1 formats', () => { + const obj = { version: 3, history: [], folders: [] }; + expect(isExportFormatV1(obj)).toBe(false); + }); + }); + + describe('isExportFormatV2', () => { + it('should return true for v2 format', () => { + const obj = { history: [], folders: [] }; + expect(isExportFormatV2(obj)).toBe(true); + }); + + it('should return false for non-v2 formats', () => { + const obj = { version: 3, history: [], folders: [] }; + expect(isExportFormatV2(obj)).toBe(false); + }); + }); + + describe('isExportFormatV3', () => { + it('should return true for v3 format', () => { + const obj = { version: 3, history: [], folders: [] }; + expect(isExportFormatV3(obj)).toBe(true); + }); + + it('should return false for non-v3 formats', () => { + const obj = { version: 4, history: [], folders: [] }; + expect(isExportFormatV3(obj)).toBe(false); + }); + }); + + describe('isExportFormatV4', () => { + it('should return true for v4 format', () => { + const obj = { version: 4, history: [], folders: [], prompts: [] }; + expect(isExportFormatV4(obj)).toBe(true); + }); + + it('should return false for non-v4 formats', () => { + const obj = { version: 5, history: [], folders: [], prompts: [] }; + expect(isExportFormatV4(obj)).toBe(false); + }); + }); +}); + +describe('cleanData Functions', () => { + describe('cleaning v1 data', () => { + it('should return the latest format', () => { + const data = [ + { + id: 1, + name: 'conversation 1', + messages: [ + { + role: 'user', + content: "what's up ?", + }, + { + role: 'assistant', + content: 'Hi', + }, + ], + }, + ] as ExportFormatV1; + const obj = cleanData(data); + expect(isLatestExportFormat(obj)).toBe(true); + expect(obj).toEqual({ + version: 4, + history: [ + { + id: 1, + name: 'conversation 1', + messages: [ + { + role: 'user', + content: "what's up ?", + }, + { + role: 'assistant', + content: 'Hi', + }, + ], + model: OpenAIModels[OpenAIModelID.GPT_3_5], + prompt: DEFAULT_SYSTEM_PROMPT, + temperature: DEFAULT_TEMPERATURE, + folderId: null, + }, + ], + folders: [], + prompts: [], + }); + }); + }); + + describe('cleaning v2 data', () => { + it('should return the latest format', () => { + const data = { + history: [ + { + id: '1', + name: 'conversation 1', + messages: [ + { + role: 'user', + content: "what's up ?", + }, + { + role: 'assistant', + content: 'Hi', + }, + ], + }, + ], + folders: [ + { + id: 1, + name: 'folder 1', + }, + ], + } as ExportFormatV2; + const obj = cleanData(data); + expect(isLatestExportFormat(obj)).toBe(true); + expect(obj).toEqual({ + version: 4, + history: [ + { + id: '1', + name: 'conversation 1', + messages: [ + { + role: 'user', + content: "what's up ?", + }, + { + role: 'assistant', + content: 'Hi', + }, + ], + model: OpenAIModels[OpenAIModelID.GPT_3_5], + prompt: DEFAULT_SYSTEM_PROMPT, + temperature: DEFAULT_TEMPERATURE, + folderId: null, + }, + ], + folders: [ + { + id: '1', + name: 'folder 1', + type: 'chat', + }, + ], + prompts: [], + }); + }); + }); + + describe('cleaning v4 data', () => { + it('should return the latest format', () => { + const data = { + version: 4, + history: [ + { + id: '1', + name: 'conversation 1', + messages: [ + { + role: 'user', + content: "what's up ?", + }, + { + role: 'assistant', + content: 'Hi', + }, + ], + model: OpenAIModels[OpenAIModelID.GPT_3_5], + prompt: DEFAULT_SYSTEM_PROMPT, + temperature: DEFAULT_TEMPERATURE, + folderId: null, + }, + ], + folders: [ + { + id: '1', + name: 'folder 1', + type: 'chat', + }, + ], + prompts: [ + { + id: '1', + name: 'prompt 1', + description: '', + content: '', + model: OpenAIModels[OpenAIModelID.GPT_3_5], + folderId: null, + }, + ], + } as ExportFormatV4; + + const obj = cleanData(data); + expect(isLatestExportFormat(obj)).toBe(true); + expect(obj).toEqual({ + version: 4, + history: [ + { + id: '1', + name: 'conversation 1', + messages: [ + { + role: 'user', + content: "what's up ?", + }, + { + role: 'assistant', + content: 'Hi', + }, + ], + model: OpenAIModels[OpenAIModelID.GPT_3_5], + prompt: DEFAULT_SYSTEM_PROMPT, + temperature: DEFAULT_TEMPERATURE, + folderId: null, + }, + ], + folders: [ + { + id: '1', + name: 'folder 1', + type: 'chat', + }, + ], + prompts: [ + { + id: '1', + name: 'prompt 1', + description: '', + content: '', + model: OpenAIModels[OpenAIModelID.GPT_3_5], + folderId: null, + }, + ], + }); + }); + }); +}); diff --git a/components/Buttons/SidebarActionButton/SidebarActionButton.tsx b/components/Buttons/SidebarActionButton/SidebarActionButton.tsx new file mode 100644 index 0000000..2fdc79d --- /dev/null +++ b/components/Buttons/SidebarActionButton/SidebarActionButton.tsx @@ -0,0 +1,17 @@ +import { MouseEventHandler, ReactElement } from 'react'; + +interface Props { + handleClick: MouseEventHandler; + children: ReactElement; +} + +const SidebarActionButton = ({ handleClick, children }: Props) => ( + +); + +export default SidebarActionButton; diff --git a/components/Buttons/SidebarActionButton/index.ts b/components/Buttons/SidebarActionButton/index.ts new file mode 100644 index 0000000..1fce00e --- /dev/null +++ b/components/Buttons/SidebarActionButton/index.ts @@ -0,0 +1 @@ +export { default } from './SidebarActionButton'; diff --git a/components/Chat/Chat.tsx b/components/Chat/Chat.tsx new file mode 100644 index 0000000..fa6b69d --- /dev/null +++ b/components/Chat/Chat.tsx @@ -0,0 +1,512 @@ +import { IconClearAll, IconSettings } from '@tabler/icons-react'; +import { + MutableRefObject, + memo, + useCallback, + useContext, + useEffect, + useRef, + useState, +} from 'react'; +import toast from 'react-hot-toast'; + +import { useTranslation } from 'next-i18next'; + +import { getEndpoint } from '@/utils/app/api'; +import { + saveConversation, + saveConversations, + updateConversation, +} from '@/utils/app/conversation'; +import { throttle } from '@/utils/data/throttle'; + +import { ChatBody, Conversation, Message } from '@/types/chat'; +import { Plugin } from '@/types/plugin'; + +import HomeContext from '@/pages/api/home/home.context'; + +import Spinner from '../Spinner'; +import { ChatInput } from './ChatInput'; +import { ChatLoader } from './ChatLoader'; +import { ErrorMessageDiv } from './ErrorMessageDiv'; +import { ModelSelect } from './ModelSelect'; +import { SystemPrompt } from './SystemPrompt'; +import { TemperatureSlider } from './Temperature'; +import { MemoizedChatMessage } from './MemoizedChatMessage'; + +interface Props { + stopConversationRef: MutableRefObject; +} + +export const Chat = memo(({ stopConversationRef }: Props) => { + const { t } = useTranslation('chat'); + + const { + state: { + selectedConversation, + conversations, + models, + apiKey, + pluginKeys, + serverSideApiKeyIsSet, + messageIsStreaming, + modelError, + loading, + prompts, + }, + handleUpdateConversation, + dispatch: homeDispatch, + } = useContext(HomeContext); + + const [currentMessage, setCurrentMessage] = useState(); + const [autoScrollEnabled, setAutoScrollEnabled] = useState(true); + const [showSettings, setShowSettings] = useState(false); + const [showScrollDownButton, setShowScrollDownButton] = + useState(false); + + const messagesEndRef = useRef(null); + const chatContainerRef = useRef(null); + const textareaRef = useRef(null); + + const handleSend = useCallback( + async (message: Message, deleteCount = 0, plugin: Plugin | null = null) => { + if (selectedConversation) { + let updatedConversation: Conversation; + if (deleteCount) { + const updatedMessages = [...selectedConversation.messages]; + for (let i = 0; i < deleteCount; i++) { + updatedMessages.pop(); + } + updatedConversation = { + ...selectedConversation, + messages: [...updatedMessages, message], + }; + } else { + updatedConversation = { + ...selectedConversation, + messages: [...selectedConversation.messages, message], + }; + } + homeDispatch({ + field: 'selectedConversation', + value: updatedConversation, + }); + homeDispatch({ field: 'loading', value: true }); + homeDispatch({ field: 'messageIsStreaming', value: true }); + const chatBody: ChatBody = { + model: updatedConversation.model, + messages: updatedConversation.messages, + key: apiKey, + prompt: updatedConversation.prompt, + temperature: updatedConversation.temperature, + }; + const endpoint = getEndpoint(plugin); + let body; + if (!plugin) { + body = JSON.stringify(chatBody); + } else { + body = JSON.stringify({ + ...chatBody, + googleAPIKey: pluginKeys + .find((key) => key.pluginId === 'google-search') + ?.requiredKeys.find((key) => key.key === 'GOOGLE_API_KEY')?.value, + googleCSEId: pluginKeys + .find((key) => key.pluginId === 'google-search') + ?.requiredKeys.find((key) => key.key === 'GOOGLE_CSE_ID')?.value, + }); + } + const controller = new AbortController(); + const response = await fetch(endpoint, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + signal: controller.signal, + body, + }); + if (!response.ok) { + homeDispatch({ field: 'loading', value: false }); + homeDispatch({ field: 'messageIsStreaming', value: false }); + toast.error(response.statusText); + return; + } + const data = response.body; + if (!data) { + homeDispatch({ field: 'loading', value: false }); + homeDispatch({ field: 'messageIsStreaming', value: false }); + return; + } + if (!plugin) { + if (updatedConversation.messages.length === 1) { + const { content } = message; + const customName = + content.length > 30 ? content.substring(0, 30) + '...' : content; + updatedConversation = { + ...updatedConversation, + name: customName, + }; + } + homeDispatch({ field: 'loading', value: false }); + const reader = data.getReader(); + const decoder = new TextDecoder(); + let done = false; + let isFirst = true; + let text = ''; + while (!done) { + if (stopConversationRef.current === true) { + controller.abort(); + done = true; + break; + } + const { value, done: doneReading } = await reader.read(); + done = doneReading; + const chunkValue = decoder.decode(value); + text += chunkValue; + if (isFirst) { + isFirst = false; + const updatedMessages: Message[] = [ + ...updatedConversation.messages, + { role: 'assistant', content: chunkValue }, + ]; + updatedConversation = { + ...updatedConversation, + messages: updatedMessages, + }; + homeDispatch({ + field: 'selectedConversation', + value: updatedConversation, + }); + } else { + const updatedMessages: Message[] = + updatedConversation.messages.map((message, index) => { + if (index === updatedConversation.messages.length - 1) { + return { + ...message, + content: text, + }; + } + return message; + }); + updatedConversation = { + ...updatedConversation, + messages: updatedMessages, + }; + homeDispatch({ + field: 'selectedConversation', + value: updatedConversation, + }); + } + } + saveConversation(updatedConversation); + const updatedConversations: Conversation[] = conversations.map( + (conversation) => { + if (conversation.id === selectedConversation.id) { + return updatedConversation; + } + return conversation; + }, + ); + if (updatedConversations.length === 0) { + updatedConversations.push(updatedConversation); + } + homeDispatch({ field: 'conversations', value: updatedConversations }); + saveConversations(updatedConversations); + homeDispatch({ field: 'messageIsStreaming', value: false }); + } else { + const { answer } = await response.json(); + const updatedMessages: Message[] = [ + ...updatedConversation.messages, + { role: 'assistant', content: answer }, + ]; + updatedConversation = { + ...updatedConversation, + messages: updatedMessages, + }; + homeDispatch({ + field: 'selectedConversation', + value: updateConversation, + }); + saveConversation(updatedConversation); + const updatedConversations: Conversation[] = conversations.map( + (conversation) => { + if (conversation.id === selectedConversation.id) { + return updatedConversation; + } + return conversation; + }, + ); + if (updatedConversations.length === 0) { + updatedConversations.push(updatedConversation); + } + homeDispatch({ field: 'conversations', value: updatedConversations }); + saveConversations(updatedConversations); + homeDispatch({ field: 'loading', value: false }); + homeDispatch({ field: 'messageIsStreaming', value: false }); + } + } + }, + [ + apiKey, + conversations, + pluginKeys, + selectedConversation, + stopConversationRef, + ], + ); + + const scrollToBottom = useCallback(() => { + if (autoScrollEnabled) { + messagesEndRef.current?.scrollIntoView({ behavior: 'smooth' }); + textareaRef.current?.focus(); + } + }, [autoScrollEnabled]); + + const handleScroll = () => { + if (chatContainerRef.current) { + const { scrollTop, scrollHeight, clientHeight } = + chatContainerRef.current; + const bottomTolerance = 30; + + if (scrollTop + clientHeight < scrollHeight - bottomTolerance) { + setAutoScrollEnabled(false); + setShowScrollDownButton(true); + } else { + setAutoScrollEnabled(true); + setShowScrollDownButton(false); + } + } + }; + + const handleScrollDown = () => { + chatContainerRef.current?.scrollTo({ + top: chatContainerRef.current.scrollHeight, + behavior: 'smooth', + }); + }; + + const handleSettings = () => { + setShowSettings(!showSettings); + }; + + const onClearAll = () => { + if ( + confirm(t('Are you sure you want to clear all messages?')) && + selectedConversation + ) { + handleUpdateConversation(selectedConversation, { + key: 'messages', + value: [], + }); + } + }; + + const scrollDown = () => { + if (autoScrollEnabled) { + messagesEndRef.current?.scrollIntoView(true); + } + }; + const throttledScrollDown = throttle(scrollDown, 250); + + // useEffect(() => { + // console.log('currentMessage', currentMessage); + // if (currentMessage) { + // handleSend(currentMessage); + // homeDispatch({ field: 'currentMessage', value: undefined }); + // } + // }, [currentMessage]); + + useEffect(() => { + throttledScrollDown(); + selectedConversation && + setCurrentMessage( + selectedConversation.messages[selectedConversation.messages.length - 2], + ); + }, [selectedConversation, throttledScrollDown]); + + useEffect(() => { + const observer = new IntersectionObserver( + ([entry]) => { + setAutoScrollEnabled(entry.isIntersecting); + if (entry.isIntersecting) { + textareaRef.current?.focus(); + } + }, + { + root: null, + threshold: 0.5, + }, + ); + const messagesEndElement = messagesEndRef.current; + if (messagesEndElement) { + observer.observe(messagesEndElement); + } + return () => { + if (messagesEndElement) { + observer.unobserve(messagesEndElement); + } + }; + }, [messagesEndRef]); + + return ( +
+ {!(apiKey || serverSideApiKeyIsSet) ? ( +
+
+ Welcome to Chatbot UI +
+
+
{`Chatbot UI is an open source clone of OpenAI's ChatGPT UI.`}
+
+ Important: Chatbot UI is 100% unaffiliated with OpenAI. +
+
+
+
+ Chatbot UI allows you to plug in your API key to use this UI with + their API. +
+
+ It is only used to communicate + with their API. +
+
+ {t( + 'Please set your OpenAI API key in the bottom left of the sidebar.', + )} +
+
+ {t("If you don't have an OpenAI API key, you can get one here: ")} + + openai.com + +
+
+
+ ) : modelError ? ( + + ) : ( + <> +
+ {selectedConversation?.messages.length === 0 ? ( + <> +
+
+ {models.length === 0 ? ( +
+ +
+ ) : ( + 'Chatbot UI' + )} +
+ + {models.length > 0 && ( +
+ + + + handleUpdateConversation(selectedConversation, { + key: 'prompt', + value: prompt, + }) + } + /> + + + handleUpdateConversation(selectedConversation, { + key: 'temperature', + value: temperature, + }) + } + /> +
+ )} +
+ + ) : ( + <> +
+ {t('Model')}: {selectedConversation?.model.name} | {t('Temp')} + : {selectedConversation?.temperature} | + + +
+ {showSettings && ( +
+
+ +
+
+ )} + + {selectedConversation?.messages.map((message, index) => ( + { + setCurrentMessage(editedMessage); + // discard edited message and the ones that come after then resend + handleSend( + editedMessage, + selectedConversation?.messages.length - index, + ); + }} + /> + ))} + + {loading && } + +
+ + )} +
+ + { + setCurrentMessage(message); + handleSend(message, 0, plugin); + }} + onScrollDownClick={handleScrollDown} + onRegenerate={() => { + if (currentMessage) { + handleSend(currentMessage, 2, null); + } + }} + showScrollDownButton={showScrollDownButton} + /> + + )} +
+ ); +}); +Chat.displayName = 'Chat'; diff --git a/components/Chat/ChatInput.tsx b/components/Chat/ChatInput.tsx new file mode 100644 index 0000000..64f8df6 --- /dev/null +++ b/components/Chat/ChatInput.tsx @@ -0,0 +1,398 @@ +import { + IconArrowDown, + IconBolt, + IconBrandGoogle, + IconPlayerStop, + IconRepeat, + IconSend, +} from '@tabler/icons-react'; +import { + KeyboardEvent, + MutableRefObject, + useCallback, + useContext, + useEffect, + useRef, + useState, +} from 'react'; + +import { useTranslation } from 'next-i18next'; + +import { Message } from '@/types/chat'; +import { Plugin } from '@/types/plugin'; +import { Prompt } from '@/types/prompt'; + +import HomeContext from '@/pages/api/home/home.context'; + +import { PluginSelect } from './PluginSelect'; +import { PromptList } from './PromptList'; +import { VariableModal } from './VariableModal'; + +interface Props { + onSend: (message: Message, plugin: Plugin | null) => void; + onRegenerate: () => void; + onScrollDownClick: () => void; + stopConversationRef: MutableRefObject; + textareaRef: MutableRefObject; + showScrollDownButton: boolean; +} + +export const ChatInput = ({ + onSend, + onRegenerate, + onScrollDownClick, + stopConversationRef, + textareaRef, + showScrollDownButton, +}: Props) => { + const { t } = useTranslation('chat'); + + const { + state: { selectedConversation, messageIsStreaming, prompts }, + + dispatch: homeDispatch, + } = useContext(HomeContext); + + const [content, setContent] = useState(); + const [isTyping, setIsTyping] = useState(false); + const [showPromptList, setShowPromptList] = useState(false); + const [activePromptIndex, setActivePromptIndex] = useState(0); + const [promptInputValue, setPromptInputValue] = useState(''); + const [variables, setVariables] = useState([]); + const [isModalVisible, setIsModalVisible] = useState(false); + const [showPluginSelect, setShowPluginSelect] = useState(false); + const [plugin, setPlugin] = useState(null); + + const promptListRef = useRef(null); + + const filteredPrompts = prompts.filter((prompt) => + prompt.name.toLowerCase().includes(promptInputValue.toLowerCase()), + ); + + const handleChange = (e: React.ChangeEvent) => { + const value = e.target.value; + const maxLength = selectedConversation?.model.maxLength; + + if (maxLength && value.length > maxLength) { + alert( + t( + `Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.`, + { maxLength, valueLength: value.length }, + ), + ); + return; + } + + setContent(value); + updatePromptListVisibility(value); + }; + + const handleSend = () => { + if (messageIsStreaming) { + return; + } + + if (!content) { + alert(t('Please enter a message')); + return; + } + + onSend({ role: 'user', content }, plugin); + setContent(''); + setPlugin(null); + + if (window.innerWidth < 640 && textareaRef && textareaRef.current) { + textareaRef.current.blur(); + } + }; + + const handleStopConversation = () => { + stopConversationRef.current = true; + setTimeout(() => { + stopConversationRef.current = false; + }, 1000); + }; + + const isMobile = () => { + const userAgent = + typeof window.navigator === 'undefined' ? '' : navigator.userAgent; + const mobileRegex = + /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini|Mobile|mobile|CriOS/i; + return mobileRegex.test(userAgent); + }; + + const handleInitModal = () => { + const selectedPrompt = filteredPrompts[activePromptIndex]; + if (selectedPrompt) { + setContent((prevContent) => { + const newContent = prevContent?.replace( + /\/\w*$/, + selectedPrompt.content, + ); + return newContent; + }); + handlePromptSelect(selectedPrompt); + } + setShowPromptList(false); + }; + + const handleKeyDown = (e: KeyboardEvent) => { + if (showPromptList) { + if (e.key === 'ArrowDown') { + e.preventDefault(); + setActivePromptIndex((prevIndex) => + prevIndex < prompts.length - 1 ? prevIndex + 1 : prevIndex, + ); + } else if (e.key === 'ArrowUp') { + e.preventDefault(); + setActivePromptIndex((prevIndex) => + prevIndex > 0 ? prevIndex - 1 : prevIndex, + ); + } else if (e.key === 'Tab') { + e.preventDefault(); + setActivePromptIndex((prevIndex) => + prevIndex < prompts.length - 1 ? prevIndex + 1 : 0, + ); + } else if (e.key === 'Enter') { + e.preventDefault(); + handleInitModal(); + } else if (e.key === 'Escape') { + e.preventDefault(); + setShowPromptList(false); + } else { + setActivePromptIndex(0); + } + } else if (e.key === 'Enter' && !isTyping && !isMobile() && !e.shiftKey) { + e.preventDefault(); + handleSend(); + } else if (e.key === '/' && e.metaKey) { + e.preventDefault(); + setShowPluginSelect(!showPluginSelect); + } + }; + + const parseVariables = (content: string) => { + const regex = /{{(.*?)}}/g; + const foundVariables = []; + let match; + + while ((match = regex.exec(content)) !== null) { + foundVariables.push(match[1]); + } + + return foundVariables; + }; + + const updatePromptListVisibility = useCallback((text: string) => { + const match = text.match(/\/\w*$/); + + if (match) { + setShowPromptList(true); + setPromptInputValue(match[0].slice(1)); + } else { + setShowPromptList(false); + setPromptInputValue(''); + } + }, []); + + const handlePromptSelect = (prompt: Prompt) => { + const parsedVariables = parseVariables(prompt.content); + setVariables(parsedVariables); + + if (parsedVariables.length > 0) { + setIsModalVisible(true); + } else { + setContent((prevContent) => { + const updatedContent = prevContent?.replace(/\/\w*$/, prompt.content); + return updatedContent; + }); + updatePromptListVisibility(prompt.content); + } + }; + + const handleSubmit = (updatedVariables: string[]) => { + const newContent = content?.replace(/{{(.*?)}}/g, (match, variable) => { + const index = variables.indexOf(variable); + return updatedVariables[index]; + }); + + setContent(newContent); + + if (textareaRef && textareaRef.current) { + textareaRef.current.focus(); + } + }; + + useEffect(() => { + if (promptListRef.current) { + promptListRef.current.scrollTop = activePromptIndex * 30; + } + }, [activePromptIndex]); + + useEffect(() => { + if (textareaRef && textareaRef.current) { + textareaRef.current.style.height = 'inherit'; + textareaRef.current.style.height = `${textareaRef.current?.scrollHeight}px`; + textareaRef.current.style.overflow = `${ + textareaRef?.current?.scrollHeight > 400 ? 'auto' : 'hidden' + }`; + } + }, [content]); + + useEffect(() => { + const handleOutsideClick = (e: MouseEvent) => { + if ( + promptListRef.current && + !promptListRef.current.contains(e.target as Node) + ) { + setShowPromptList(false); + } + }; + + window.addEventListener('click', handleOutsideClick); + + return () => { + window.removeEventListener('click', handleOutsideClick); + }; + }, []); + + return ( +
+
+ {messageIsStreaming && ( + + )} + + {!messageIsStreaming && + selectedConversation && + selectedConversation.messages.length > 0 && ( + + )} + +
+ + + {showPluginSelect && ( +
+ { + if (e.key === 'Escape') { + e.preventDefault(); + setShowPluginSelect(false); + textareaRef.current?.focus(); + } + }} + onPluginChange={(plugin: Plugin) => { + setPlugin(plugin); + setShowPluginSelect(false); + + if (textareaRef && textareaRef.current) { + textareaRef.current.focus(); + } + }} + /> +
+ )} + +