Skip to content

Commit

Permalink
feat(ui): add client example from https://github.com/vercel-labs/ai-c…
Browse files Browse the repository at this point in the history
  • Loading branch information
phodal committed Aug 16, 2023
1 parent 522a450 commit aad05e0
Show file tree
Hide file tree
Showing 83 changed files with 9,301 additions and 0 deletions.
20 changes: 20 additions & 0 deletions counit-client/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# You must first activate a Billing Account here: https://platform.openai.com/account/billing/overview
# Then get your OpenAI API Key here: https://platform.openai.com/account/api-keys
OPENAI_API_KEY=XXXXXXXX

# Generate a random secret: https://generate-secret.vercel.app/32 or `openssl rand -base64 32`
AUTH_SECRET=XXXXXXXX
# Create a GitHub OAuth app here: https://github.com/settings/applications/new
# Authorization callback URL: https://authjs.dev/reference/core/providers_github#callback-url
AUTH_GITHUB_ID=XXXXXXXX
AUTH_GITHUB_SECRET=XXXXXXXX
# Support OAuth login on preview deployments, see: https://authjs.dev/guides/basics/deployment#securing-a-preview-deployment
# Set the following only when deployed. In this example, we can reuse the same OAuth app, but if you are storing users, we recommend using a different OAuth app for development/production so that you don't mix your test and production user base.
# AUTH_REDIRECT_PROXY_URL=https://YOURAPP.vercel.app/api/auth

# Instructions to create kv database here: https://vercel.com/docs/storage/vercel-kv/quickstart and
KV_URL=XXXXXXXX
KV_REST_API_URL=XXXXXXXX
KV_REST_API_TOKEN=XXXXXXXX
KV_REST_API_READ_ONLY_TOKEN=XXXXXXXX

25 changes: 25 additions & 0 deletions counit-client/.eslintrc.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
{
"$schema": "https://json.schemastore.org/eslintrc",
"root": true,
"extends": [
"next/core-web-vitals",
"prettier",
"plugin:tailwindcss/recommended"
],
"plugins": ["tailwindcss"],
"rules": {
"tailwindcss/no-custom-classname": "off"
},
"settings": {
"tailwindcss": {
"callees": ["cn", "cva"],
"config": "tailwind.config.js"
}
},
"overrides": [
{
"files": ["*.ts", "*.tsx"],
"parser": "@typescript-eslint/parser"
}
]
}
38 changes: 38 additions & 0 deletions counit-client/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.

# dependencies
node_modules
.pnp
.pnp.js

# testing
coverage

# next.js
.next/
out/
build

# misc
.DS_Store
*.pem

# debug
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*

# local env files
.env.local
.env.development.local
.env.test.local
.env.production.local

# turbo
.turbo

.contentlayer
.env
.vercel
.vscode
13 changes: 13 additions & 0 deletions counit-client/LICENSE
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
Copyright 2023 Vercel, Inc.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
73 changes: 73 additions & 0 deletions counit-client/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
<a href="https://chat.vercel.ai/">
<img alt="Next.js 13 and app template Router-ready AI chatbot." src="https://chat.vercel.ai/opengraph-image.png">
<h1 align="center">Next.js AI Chatbot</h1>
</a>

<p align="center">
An open-source AI chatbot app template built with Next.js, the Vercel AI SDK, OpenAI, and Vercel KV.
</p>

<p align="center">
<a href="#features"><strong>Features</strong></a> ·
<a href="#model-providers"><strong>Model Providers</strong></a> ·
<a href="#deploy-your-own"><strong>Deploy Your Own</strong></a> ·
<a href="#running-locally"><strong>Running locally</strong></a> ·
<a href="#authors"><strong>Authors</strong></a>
</p>
<br/>

## Features

- [Next.js](https://nextjs.org) App Router
- React Server Components (RSCs), Suspense, and Server Actions
- [Vercel AI SDK](https://sdk.vercel.ai/docs) for streaming chat UI
- Support for OpenAI (default), Anthropic, Hugging Face, or custom AI chat models and/or LangChain
- Edge runtime-ready
- [shadcn/ui](https://ui.shadcn.com)
- Styling with [Tailwind CSS](https://tailwindcss.com)
- [Radix UI](https://radix-ui.com) for headless component primitives
- Icons from [Phosphor Icons](https://phosphoricons.com)
- Chat History, rate limiting, and session storage with [Vercel KV](https://vercel.com/storage/kv)
- [NextAuth.js](https://github.com/nextauthjs/next-auth) for authentication

## Model Providers

This template ships with OpenAI `gpt-3.5-turbo` as the default. However, thanks to the [Vercel AI SDK](https://sdk.vercel.ai/docs), you can switch LLM providers to [Anthropic](https://anthropic.com), [Hugging Face](https://huggingface.co), or using [LangChain](https://js.langchain.com) with just a few lines of code.

## Deploy Your Own

You can deploy your own version of the Next.js AI Chatbot to Vercel with one click:

[![Deploy with Vercel](https://vercel.com/button)](https://vercel.com/new/clone?demo-title=Next.js+Chat&demo-description=A+full-featured%2C+hackable+Next.js+AI+chatbot+built+by+Vercel+Labs&demo-url=https%3A%2F%2Fchat.vercel.ai%2F&demo-image=%2F%2Fimages.ctfassets.net%2Fe5382hct74si%2F4aVPvWuTmBvzM5cEdRdqeW%2F4234f9baf160f68ffb385a43c3527645%2FCleanShot_2023-06-16_at_17.09.21.png&project-name=Next.js+Chat&repository-name=nextjs-chat&repository-url=https%3A%2F%2Fgithub.com%2Fvercel-labs%2Fai-chatbot&from=templates&skippable-integrations=1&env=OPENAI_API_KEY%2CAUTH_GITHUB_ID%2CAUTH_GITHUB_SECRET%2CAUTH_SECRET&envDescription=How+to+get+these+env+vars&envLink=https%3A%2F%2Fgithub.com%2Fvercel-labs%2Fai-chatbot%2Fblob%2Fmain%2F.env.example&teamCreateStatus=hidden&stores=[{"type":"kv"}])

## Creating a KV Database Instance

Follow the steps outlined in the [quick start guide](https://vercel.com/docs/storage/vercel-kv/quickstart#create-a-kv-database) provided by Vercel. This guide will assist you in creating and configuring your KV database instance on Vercel, enabling your application to interact with it.

Remember to update your environment variables (`KV_URL`, `KV_REST_API_URL`, `KV_REST_API_TOKEN`, `KV_REST_API_READ_ONLY_TOKEN`) in the `.env` file with the appropriate credentials provided during the KV database setup.


## Running locally

You will need to use the environment variables [defined in `.env.example`](.env.example) to run Next.js AI Chatbot. It's recommended you use [Vercel Environment Variables](https://vercel.com/docs/concepts/projects/environment-variables) for this, but a `.env` file is all that is necessary.

> Note: You should not commit your `.env` file or it will expose secrets that will allow others to control access to your various OpenAI and authentication provider accounts.
1. Install Vercel CLI: `npm i -g vercel`
2. Link local instance with Vercel and GitHub accounts (creates `.vercel` directory): `vercel link`
3. Download your environment variables: `vercel env pull`

```bash
pnpm install
pnpm dev
```

Your app template should now be running on [localhost:3000](http://localhost:3000/).

## Authors

This library is created by [Vercel](https://vercel.com) and [Next.js](https://nextjs.org) team members, with contributions from:

- Jared Palmer ([@jaredpalmer](https://twitter.com/jaredpalmer)) - [Vercel](https://vercel.com)
- Shu Ding ([@shuding\_](https://twitter.com/shuding_)) - [Vercel](https://vercel.com)
- shadcn ([@shadcn](https://twitter.com/shadcn)) - [Contractor](https://shadcn.com)
120 changes: 120 additions & 0 deletions counit-client/app/actions.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
'use server'

import { revalidatePath } from 'next/cache'
import { redirect } from 'next/navigation'
import { kv } from '@vercel/kv'

import { auth } from '@/auth'
import { type Chat } from '@/lib/types'

export async function getChats(userId?: string | null) {
if (!userId) {
return []
}

try {
const pipeline = kv.pipeline()
const chats: string[] = await kv.zrange(`user:chat:${userId}`, 0, -1, {
rev: true
})

for (const chat of chats) {
pipeline.hgetall(chat)
}

const results = await pipeline.exec()

return results as Chat[]
} catch (error) {
return []
}
}

export async function getChat(id: string, userId: string) {
const chat = await kv.hgetall<Chat>(`chat:${id}`)

if (!chat || (userId && chat.userId !== userId)) {
return null
}

return chat
}

export async function removeChat({ id, path }: { id: string; path: string }) {
const session = await auth()

if (!session) {
return {
error: 'Unauthorized'
}
}

const uid = await kv.hget<string>(`chat:${id}`, 'userId')

if (uid !== session?.user?.id) {
return {
error: 'Unauthorized'
}
}

await kv.del(`chat:${id}`)
await kv.zrem(`user:chat:${session.user.id}`, `chat:${id}`)

revalidatePath('/')
return revalidatePath(path)
}

export async function clearChats() {
const session = await auth()

if (!session?.user?.id) {
return {
error: 'Unauthorized'
}
}

const chats: string[] = await kv.zrange(`user:chat:${session.user.id}`, 0, -1)
if (!chats.length) {
return redirect('/')
}
const pipeline = kv.pipeline()

for (const chat of chats) {
pipeline.del(chat)
pipeline.zrem(`user:chat:${session.user.id}`, chat)
}

await pipeline.exec()

revalidatePath('/')
return redirect('/')
}

export async function getSharedChat(id: string) {
const chat = await kv.hgetall<Chat>(`chat:${id}`)

if (!chat || !chat.sharePath) {
return null
}

return chat
}

export async function shareChat(chat: Chat) {
const session = await auth()

if (!session?.user?.id || session.user.id !== chat.userId) {
return {
error: 'Unauthorized'
}
}

const payload = {
...chat,
sharePath: `/share/${chat.id}`
}

await kv.hmset(`chat:${chat.id}`, payload)

return payload
}
2 changes: 2 additions & 0 deletions counit-client/app/api/auth/[...nextauth]/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
export { GET, POST } from '@/auth'
export const runtime = 'edge'
67 changes: 67 additions & 0 deletions counit-client/app/api/chat/route.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import { kv } from '@vercel/kv'
import { OpenAIStream, StreamingTextResponse } from 'ai'
import { Configuration, OpenAIApi } from 'openai-edge'

import { auth } from '@/auth'
import { nanoid } from '@/lib/utils'

export const runtime = 'edge'

const configuration = new Configuration({
apiKey: process.env.OPENAI_API_KEY
})

const openai = new OpenAIApi(configuration)

export async function POST(req: Request) {
const json = await req.json()
const { messages, previewToken } = json
const userId = (await auth())?.user.id

if (!userId) {
return new Response('Unauthorized', {
status: 401
})
}

if (previewToken) {
configuration.apiKey = previewToken
}

const res = await openai.createChatCompletion({
model: 'gpt-3.5-turbo',
messages,
temperature: 0.7,
stream: true
})

const stream = OpenAIStream(res, {
async onCompletion(completion) {
const title = json.messages[0].content.substring(0, 100)
const id = json.id ?? nanoid()
const createdAt = Date.now()
const path = `/chat/${id}`
const payload = {
id,
title,
userId,
createdAt,
path,
messages: [
...messages,
{
content: completion,
role: 'assistant'
}
]
}
await kv.hmset(`chat:${id}`, payload)
await kv.zadd(`user:chat:${userId}`, {
score: createdAt,
member: `chat:${id}`
})
}
})

return new StreamingTextResponse(stream)
}
Loading

0 comments on commit aad05e0

Please sign in to comment.