From 0e07687a4a5252923c7693969a1f243c66d7ec12 Mon Sep 17 00:00:00 2001 From: Christian Martinez Date: Sat, 9 Dec 2023 19:42:50 -0700 Subject: [PATCH 1/4] Separate css classes for minimalist and default loader components --- README.md | 479 ++++++++++++-------- package.json | 2 +- src/components/default/chattr-loader.tsx | 6 +- src/components/minimalist/chattr-loader.tsx | 6 +- 4 files changed, 297 insertions(+), 196 deletions(-) diff --git a/README.md b/README.md index f2a3df1..91fd5dc 100644 --- a/README.md +++ b/README.md @@ -4,13 +4,15 @@ # chattr -A customizable chatgpt chatbot component library for React, built with tailwindcss. +A customizable chatgpt chatbot component library for Nextjs, built with React, Tailwindcss, and Typescript. ### Table of Contents **[Installation Instructions](#installation)**
**[Configuration Instructions](#configuration)**
+**[Themes](#themes)**
**[Endpoints](#endpoints)**
+**[Function Calling](#function-calling)**
**[Usage for Nextjs](#usage-for-nextjs)**
**[Usage for Reactjs](#usage-for-reactjs)**
**[Customizations](#customizations)**
@@ -27,12 +29,12 @@ npm i chattr@latest # Configuration -Before using `chattr`, we need to configure a few things. First, ensure that you are on the latest versions of `react, react-dom, and tailwindcss` +Before using `chattr`, we need to configure a few things. First, ensure that you are on the latest versions of `react, react-dom, and tailwindcss`. Feel free to try other versions, however do note that they have not been tested. ```json "react": "^18.2.0", "react-dom": "^18.2.0", - "tailwindcss": "^3.3.5" + "tailwindcss": "^3.3.6" ``` If you aren't on the latest versions, start a **new** branch. Then update your dependencies on that branch: @@ -98,7 +100,7 @@ And your `globals.css` file looks like this: @tailwind components; @tailwind utilities; -/* Animation styles for the ChattrLoader component */ +/* Animation styles for the ChattrLoader components */ @layer utilities { .animation-delay-200 { animation-delay: 0.15s; @@ -107,102 +109,343 @@ And your `globals.css` file looks like this: animation-delay: 0.3s; } } -/* Dot style for the ChattrLoader component */ -.chattrDot { +/* Dot styles for the ChattrLoader components */ +.chattrDotDefault { @apply bg-chattrWhite mx-0.5 h-[6px] w-[6px] rounded-full; } + +.chattrDotMinimalist { + @apply bg-chattrSecondary dark:bg-chattrSecondaryDark mx-0.5 h-[6px] w-[6px] rounded-full; +} ``` -This is for the custom loader that comes shipped with the chatbot in between states of sent messages. You can customize it, or create your own. +This is for the overall chattr styles, and a custom loader that comes shipped with the chatbot in between states of sent messages. You can customize it, or create your own! You can view the repo [here](https://github.com/christianbmartinez/chattr) Next, you need an `OPENAI_API_KEY`. If you don't have one already, click [here](https://platform.openai.com/api-keys) to get one. -Once you have your key, install `dotenv` if required and create a `.env` file in the root of your project. Insert your api key there. **In production, remember to copy your api key, to your environment variables section.** +Once you have your key, install `dotenv` if required and create a `.env` file in the root of your project. Insert your api key there, along with any other api keys if you plan on using [function calling](#function-calling). **In production, remember to copy your api key, to your environment variables section.** ```bash OPENAI_API_KEY='YOUR_OPENAI_API_KEY' +WEATHER_APP_ID='YOUR_OPENWEATHERMAPS_API_KEY' +REPLICATE_API_TOKEN='YOUR_REPLICATE_TOKEN' ``` -# Endpoints +# Themes -In order to use chattr, you have to create an endpoint that handles a post request to the chatGpt completions api. By default, chattr passes the users message as `prompt` to the backend. When developing your api endpoint, you must extract the "prompt" parameter. If you decide to [customize](#customizations) your chatbot with the boilerplate, you can configure it to send a request directly to open ai's completions endpoint instead. +Chattr currently has two themes- `Default` and `Minimalist`. The default theme was styled by me, and the minimalist theme was styled originally by [shadcn](https://ui.shadcn.com/themes) and has been highly customized to be used as a chattrbot. Shadcn himself even reposted the tweet of his chat component being used in chattr: [tweet](https://twitter.com/_coderchris/status/1729318382355587335) To see how the themes work, please visit the chattr [repo](https://github.com/christianbmartinez/chattr) -For Next js, here is an example of an api route where we are destructuring the prompt that contains the users message, and inserting it into a payload. +# Endpoints -In the payload, we have our chatgpt model, instructions, the users prompt, and other configurations. +In order to use chattr, you have to create an endpoint that handles a post request to the chatGpt completions api. -We are then posting that data to the chat gpt completions endpoint as a string with our api key, to get back a subsequent response from open ai. +If you're using the `Default.Chattrbot`, you can copy and paste this route to `app/api/chat-gpt/route.ts` as a starting point: ```typescript -// app/api/chatGpt/route.ts +// app/api/chat-gpt/route.ts import { NextRequest, NextResponse } from 'next/server' export async function POST(req: NextRequest) { try { - const { prompt } = await req.json() + const { + prompt, + chattrBotName, + chattrBotHistory, + }: { + prompt: string + chattrBotName: string | number + chattrBotHistory: string + } = await req.json() + + const chatHistory = JSON.stringify(chattrBotHistory) const payload = { - model: 'gpt-4-0613', + model: 'gpt-4-1106-preview', messages: [ { role: 'system', - content: - "You are a chatbot that responds with any information the user requests. If the user asks an inappropriate question, answer with I'm sorry, but I can't assist you with that question. Use a professional tone.", + content: ` + You are a chatbot named ${chattrBotName}. + Respond with any information that the user requests. + You can view the entire chat history here, where your role is the assistant, and the users role is user: ${chatHistory}. + This history is helpful if you need to recall any information or understand context from chat. + Use a professional tone in your responses.`, + }, + { + role: 'assistant', + content: `Hey! Thanks for visiting. I'm ${chattrBotName}, you can ask me anything!`, // Replace with your own greeting }, { role: 'user', - content: prompt, + content: prompt, // The users prompt }, ], - temperature: 0.7, + temperature: 0.7, // Your configs frequency_penalty: 0, presence_penalty: 0, - max_tokens: 50, + max_tokens: 75, n: 1, } - const response = await fetch('https://api.openai.com/v1/chat/completions', { - headers: { - 'Content-Type': 'application/json', - Authorization: `Bearer ${process.env.OPENAI_API_KEY}`, - }, - method: 'POST', - body: JSON.stringify(payload), + const response: Response = await fetch( + 'https://api.openai.com/v1/chat/completions', + { + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${process.env.OPENAI_API_KEY}`, + }, + method: 'POST', + body: JSON.stringify(payload), + } + ) + + if (!response.ok) { + return NextResponse.json({ + ok: false, + error: + 'Looks like something went wrong fetching that answer! Try again later.', + }) + } + + const completion = await response.json() + + return NextResponse.json({ + ok: true, + content: { text: completion.choices[0].message.content }, }) + } catch (error) { + console.log(error) + return NextResponse.json({ + ok: false, + error: 'Looks like something went wrong. Try again later.', + }) + } +} +``` + +You can view the default chattrbot [here](https://github.com/christianbmartinez/chattr/blob/main/src/components/default/chattrbot.tsx) to understand how it works with the `app/api/chat-gpt/route.ts` route. + +# Function calling + +If you're using the `Minimalist.Chattrbot`, the route is a lot different in that it uses function calling. A solution that I came up with is using a ui key value pair within the response object to tell the client what type of component to render. Here's how it works in the [component](https://github.com/christianbmartinez/chattr/blob/main/src/components/minimalist/chattr-messages.tsx). + +You can copy and paste the following as a starting point to `app/api/function-calling/route.ts` + +```typescript +// app/api/function-calling/route.ts +import { NextRequest, NextResponse } from 'next/server' +import { get_current_weather, create_image, create_video } from '@/helpers' // Your functions - const data = await response.json() +export async function POST(req: NextRequest) { + try { + const { + prompt, + chattrBotName, + chattrBotHistory, + }: { + prompt: string + chattrBotName: string | number + chattrBotHistory: string + } = await req.json() + + const chatHistory = JSON.stringify(chattrBotHistory) + + const payload = { + model: 'gpt-4-1106-preview', + messages: [ + { + role: 'system', + content: ` + You are a chatbot named ${chattrBotName}. + Respond with any information that the user requests. + You can view the entire chat history here, where your role is the assistant, and the users role is user: ${chatHistory}. + This history is helpful if you need to recall any information or understand context from chat. + Use a professional tone in your responses.`, + }, + { + role: 'assistant', + content: `Hey! Thanks for visiting. I'm ${chattrBotName}, you can ask me anything!`, + }, + { + role: 'user', + content: prompt, + }, + ], + functions: [ + // Define your functions see more at https://platform.openai.com/docs/guides/function-calling + { + name: 'get_current_weather', + description: 'Get the current weather', + parameters: { + type: 'object', + properties: { + zipcode: { + type: 'string', + description: + 'The zipcode of the city. For example, 90210 for Beverly Hills. If the user passes in a city, retrieve any zip code for that city and use it as the zipcode value.', + }, + state: { + type: 'string', + description: + 'The state of the city. For example: CA if the user asks for the weather in Beverly Hills, or UT if the user asks for the weather in Salt Lake City, etc. If the user passes a zip code or a city name as a zip code, retrieve the state that belongs to that zip code and use it as the state value.', + }, + }, + required: ['zipcode', 'state'], + }, + }, + { + name: 'create_image', + description: 'Create an image for the given description', + parameters: { + type: 'object', + properties: { + description: { + type: 'string', + description: 'Description of what the image should be.', + }, + }, + required: ['description'], + }, + }, + { + name: 'create_video', + description: 'Create a video for a given description', + parameters: { + type: 'object', + properties: { + description: { + type: 'string', + description: 'Description of what the video should be.', + }, + }, + required: ['description'], + }, + }, + ], + function_call: 'auto', // the completions api will automatically call the functions for you + temperature: 0.7, + frequency_penalty: 0, + presence_penalty: 0, + max_tokens: 75, + n: 1, + } - return NextResponse.json({ data }) + const response: Response = await fetch( + 'https://api.openai.com/v1/chat/completions', + { + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${process.env.OPENAI_API_KEY}`, + }, + method: 'POST', + body: JSON.stringify(payload), + } + ) + + const completion = await response.json() + + if (completion.choices[0].message.content === null) { + // If the content is null, that means it's a function call + const args = JSON.parse( + completion.choices[0].message.function_call.arguments + ) + + const functionCall = completion.choices[0].message.function_call.name + + if (functionCall === 'get_current_weather') { + const { + temperature, + celcius, + location, + url, + description, + humidity, + wind, + clouds, + state, + } = await get_current_weather(args.zipcode, args.state) + + return NextResponse.json({ + ok: true, + ui: 'weather', + content: { + function_response: { + temperature: temperature, + celcius: celcius, + location: location, + url: url, + description: description, + humidity: humidity, + wind: wind, + clouds: clouds, + state: state, + }, + }, + }) + } else if (functionCall === 'create_image') { + const { description, url } = await create_image(args.description) + + return NextResponse.json({ + ok: true, + ui: 'image', + content: { + function_response: { + description: description, + url: url, + }, + }, + }) + } else if (functionCall === 'create_video') { + const { description, url } = await create_video(args.description) + + return NextResponse.json({ + ok: true, + ui: 'video', + content: { + function_response: { + description: description, + url: url, + }, + }, + }) + } else { + return NextResponse.json({ + ok: false, + error: + 'Looks like something went wrong while generating that. Please try again! If the problem persists, let us know at hello@example.com.', + }) + } + } else { + return NextResponse.json({ + ok: true, + content: { text: completion.choices[0].message.content }, + }) + } } catch (error) { console.log(error) - return NextResponse.json({ error }) + return NextResponse.json({ ok: false, error: JSON.stringify(error) }) } } ``` -It's worth mentioning that you should protect this route with some type of authentication, or at the very least, use a rate limiter. `@upstash/ratelimit @upstash/redis` is a great option. You can view the package [here](https://www.npmjs.com/package/@upstash/ratelimit). +You can view the minimalist chattrbot [here](https://github.com/christianbmartinez/chattr/blob/main/src/components/minimalist/chattrbot.tsx) to understand how it works with the `app/api/function-calling/route.ts` route. + +It's worth mentioning that you should protect your routes with some type of authentication, or at the very least, use a rate limiter. `@upstash/ratelimit @upstash/redis` is a great option. You can view the package [here](https://www.npmjs.com/package/@upstash/ratelimit). # Usage for Nextjs -For quick and easy setup in Next js, you can import a ready made chatbot. Just wrap it in a separate component with the `use client` directive: +After you have setup the route you need, you can import a chattrbot! Just wrap it in a separate component with the `use client` directive: ```tsx // components/chattr-example.tsx 'use client' -import { ChattrBot } from 'chattr' +import { Default } from 'chattr' +//import { Minimalist } from 'chattr for function calling' export default function ChattrExample() { - return ( - // These are the default props. If you don't need to customize these, just return - - ) + return + //return } ``` @@ -241,151 +484,9 @@ export default function RootLayout({ } ``` -# Usage for Reactjs - -If you are using React, you can use it without creating a wrapper component since you do not need the `use client` directive. Note: The library is still in development, it has not been tested in any other environment aside from next. - -```tsx -// Your specified file -import { Navigation, Footer } from './your-components' -import { ChattrBot } from 'chattr' - -export default function Layout({ children }: { children: React.ReactNode }) { - return ( - <> - -
{children}
-