From fbfe2da4936e752f8310ffab16a5dba0a940ee51 Mon Sep 17 00:00:00 2001 From: tegnike Date: Fri, 30 Aug 2024 12:59:45 +0200 Subject: [PATCH 1/8] =?UTF-8?q?Vercel=20AI=20SDK=E3=82=92=E5=B0=8E?= =?UTF-8?q?=E5=85=A5=E3=81=97=E3=81=A6=E5=90=84AI=E3=82=B5=E3=83=BC?= =?UTF-8?q?=E3=83=93=E3=82=B9=E3=81=AE=E5=91=BC=E3=81=B3=E5=87=BA=E3=81=97?= =?UTF-8?q?=E5=87=A6=E7=90=86=E3=82=92=E7=B5=B1=E4=B8=80=E5=8C=96?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- package-lock.json | 1367 ++++++++++++++++- package.json | 6 + src/components/chatLog.tsx | 2 +- src/components/menu.tsx | 2 +- src/components/settings/log.tsx | 2 +- src/components/settings/modelProvider.tsx | 32 +- src/components/settings/slide.tsx | 17 +- src/components/settings/slideConvert.tsx | 68 +- src/components/settings/youtube.tsx | 4 +- src/features/chat/aiChatFactory.ts | 51 +- src/features/chat/anthropicChat.ts | 81 - src/features/chat/googleChat.ts | 164 -- src/features/chat/groqChat.ts | 85 - src/features/chat/handlers.ts | 65 +- src/features/chat/openAiChat.ts | 95 -- src/features/chat/vercelAIChat.ts | 96 ++ src/features/messages/messages.ts | 5 +- src/features/slide/slideAIHelpers.ts | 54 +- src/features/stores/settings.ts | 14 +- .../conversationContinuityFunctions.ts | 100 +- src/features/youtube/youtubeComments.ts | 28 +- src/pages/api/aiChat.ts | 120 ++ src/pages/api/chat.ts | 38 - src/pages/api/convertSlide.ts | 77 +- src/pages/api/google.ts | 67 - src/pages/api/groq.ts | 130 -- src/pages/api/openai.ts | 47 - 27 files changed, 1834 insertions(+), 983 deletions(-) delete mode 100644 src/features/chat/anthropicChat.ts delete mode 100644 src/features/chat/googleChat.ts delete mode 100644 src/features/chat/groqChat.ts delete mode 100644 src/features/chat/openAiChat.ts create mode 100644 src/features/chat/vercelAIChat.ts create mode 100644 src/pages/api/aiChat.ts delete mode 100644 src/pages/api/chat.ts delete mode 100644 src/pages/api/google.ts delete mode 100644 src/pages/api/groq.ts delete mode 100644 src/pages/api/openai.ts diff --git a/package-lock.json b/package-lock.json index 4ce471ed..5cac23f7 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,6 +8,9 @@ "name": "aituber-kit", "version": "0.1.0", "dependencies": { + "@ai-sdk/anthropic": "^0.0.48", + "@ai-sdk/google": "^0.0.46", + "@ai-sdk/openai": "^0.0.54", "@anthropic-ai/sdk": "^0.20.8", "@charcoal-ui/icons": "^2.6.0", "@google-cloud/text-to-speech": "^5.0.1", @@ -19,18 +22,21 @@ "@pixiv/three-vrm": "^3.0.0", "@tailwindcss/line-clamp": "^0.4.4", "@vercel/analytics": "^1.3.1", + "ai": "^3.3.20", "axios": "^1.6.8", "canvas": "^2.11.2", "formidable": "^3.5.1", "groq-sdk": "^0.3.3", "i18next": "^23.6.0", "next": "^14.2.5", + "ollama-ai-provider": "^0.13.0", "openai": "^4.38.5", "pdfjs-dist": "^4.5.136", "react": "^18.3.1", "react-dom": "^18.3.1", "react-i18next": "^13.3.1", "three": "^0.167.1", + "zod": "^3.23.8", "zustand": "^4.5.4" }, "devDependencies": { @@ -61,6 +67,223 @@ "node": "20.x" } }, + "node_modules/@ai-sdk/anthropic": { + "version": "0.0.48", + "resolved": "https://registry.npmjs.org/@ai-sdk/anthropic/-/anthropic-0.0.48.tgz", + "integrity": "sha512-o4DhUwLXsWJw+6LFWfJgg2m7xJu342/m373zzRWZXXKCrsc5oi9fOMfHtkuggUgC6nSY56Awq+cXkUiwqa8a+w==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.0.0" + } + }, + "node_modules/@ai-sdk/google": { + "version": "0.0.46", + "resolved": "https://registry.npmjs.org/@ai-sdk/google/-/google-0.0.46.tgz", + "integrity": "sha512-rsc3Wh54EfSt3l/7IqPdTeuxA7xvFk2p8/HxxyoHfcwvQYmQ/bpgxmadId862sVsK79L8k3iRxvVwGVkkaEeaA==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17", + "json-schema": "0.4.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.0.0" + } + }, + "node_modules/@ai-sdk/openai": { + "version": "0.0.54", + "resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-0.0.54.tgz", + "integrity": "sha512-0jqUSY9Lq0ie4AxnAucmiMhVBbs8ivvOW73sq3pCNA+LFeb2edOcnI0qmfGfHTn/VOjUCf2TvzQzHQx1Du3sYA==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.0.0" + } + }, + "node_modules/@ai-sdk/provider": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-0.0.22.tgz", + "integrity": "sha512-smZ1/2jL/JSKnbhC6ama/PxI2D/psj+YAe0c0qpd5ComQCNFltg72VFf0rpUSFMmFuj1pCCNoBOCrvyl8HTZHQ==", + "license": "Apache-2.0", + "dependencies": { + "json-schema": "0.4.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@ai-sdk/provider-utils": { + "version": "1.0.17", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-1.0.17.tgz", + "integrity": "sha512-2VyeTH5DQ6AxqvwdyytKIeiZyYTyJffpufWjE67zM2sXMIHgYl7fivo8m5wVl6Cbf1dFPSGKq//C9s+lz+NHrQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "0.0.22", + "eventsource-parser": "1.1.2", + "nanoid": "3.3.6", + "secure-json-parse": "2.7.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.0.0" + }, + "peerDependenciesMeta": { + "zod": { + "optional": true + } + } + }, + "node_modules/@ai-sdk/provider-utils/node_modules/nanoid": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz", + "integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/@ai-sdk/react": { + "version": "0.0.52", + "resolved": "https://registry.npmjs.org/@ai-sdk/react/-/react-0.0.52.tgz", + "integrity": "sha512-4Gm+AoINDXQ4lzIZFKOWOcKgjgiAFdyhmBxnyuaqzTJCoRWNUSea62xhjqRE0u8wagfPgxWUAyS8BAsY0EqOyg==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider-utils": "1.0.17", + "@ai-sdk/ui-utils": "0.0.39", + "swr": "2.2.5" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "react": "^18 || ^19", + "zod": "^3.0.0" + }, + "peerDependenciesMeta": { + "react": { + "optional": true + }, + "zod": { + "optional": true + } + } + }, + "node_modules/@ai-sdk/solid": { + "version": "0.0.42", + "resolved": "https://registry.npmjs.org/@ai-sdk/solid/-/solid-0.0.42.tgz", + "integrity": "sha512-tr1rXRg0bLls7ZEQCWfd0Tv7irFlKQRjBSKSCstwrGtTeDA7zwUP4tIiUaCyzM3lwyE6Qgl17SrAoxSD+xP+zQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider-utils": "1.0.17", + "@ai-sdk/ui-utils": "0.0.39" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "solid-js": "^1.7.7" + }, + "peerDependenciesMeta": { + "solid-js": { + "optional": true + } + } + }, + "node_modules/@ai-sdk/svelte": { + "version": "0.0.44", + "resolved": "https://registry.npmjs.org/@ai-sdk/svelte/-/svelte-0.0.44.tgz", + "integrity": "sha512-soSiEX1BUiwRSdoc+7mAoCeuM3Vs/ebdb1gNL7ta9Zma7GTHq802Wi7KfWfypoAqpgi0QUapzCRMvgrl4oW4AQ==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider-utils": "1.0.17", + "@ai-sdk/ui-utils": "0.0.39", + "sswr": "2.1.0" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "svelte": "^3.0.0 || ^4.0.0" + }, + "peerDependenciesMeta": { + "svelte": { + "optional": true + } + } + }, + "node_modules/@ai-sdk/ui-utils": { + "version": "0.0.39", + "resolved": "https://registry.npmjs.org/@ai-sdk/ui-utils/-/ui-utils-0.0.39.tgz", + "integrity": "sha512-yxlJBFEiWR7rf/oS7MFX9O5Hr7VYV0ipMBrvds66N3+m52/nCbBB5C/eBefzeR+hoGc/r5xGo7Yd1cncGYHHTw==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17", + "json-schema": "0.4.0", + "secure-json-parse": "2.7.0", + "zod-to-json-schema": "3.23.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.0.0" + }, + "peerDependenciesMeta": { + "zod": { + "optional": true + } + } + }, + "node_modules/@ai-sdk/vue": { + "version": "0.0.44", + "resolved": "https://registry.npmjs.org/@ai-sdk/vue/-/vue-0.0.44.tgz", + "integrity": "sha512-IsDCoy7u4V081dKT1i6b/Cxh2G0aftetbif+qNQGh5QeU9TtGs9KDW+onPkXeqlDQcpMN0Q5zaNGaZ7YBK50Gw==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider-utils": "1.0.17", + "@ai-sdk/ui-utils": "0.0.39", + "swrv": "1.0.4" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "vue": "^3.3.4" + }, + "peerDependenciesMeta": { + "vue": { + "optional": true + } + } + }, "node_modules/@alloc/quick-lru": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", @@ -72,6 +295,20 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "license": "Apache-2.0", + "peer": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/@anthropic-ai/sdk": { "version": "0.20.9", "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.20.9.tgz", @@ -100,6 +337,42 @@ "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" }, + "node_modules/@babel/helper-string-parser": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz", + "integrity": "sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", + "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.25.4.tgz", + "integrity": "sha512-nq+eWrOgdtu3jG5Os4TQP3x3cLA8hR8TvJNjD8vnPa20WGycimcparWnLK4jJhElTK6SDyuJo1weMKO/5LpmLA==", + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/types": "^7.25.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, "node_modules/@babel/runtime": { "version": "7.25.0", "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.0.tgz", @@ -111,6 +384,21 @@ "node": ">=6.9.0" } }, + "node_modules/@babel/types": { + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.25.4.tgz", + "integrity": "sha512-zQ1ijeeCXVEh+aNL0RlmkPkG8HUiDcU2pzQQFjtbntgAczRASFzj4H+6+bV+dy1ntKR14I/DypeuRG1uma98iQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/helper-string-parser": "^7.24.8", + "@babel/helper-validator-identifier": "^7.24.7", + "to-fast-properties": "^2.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, "node_modules/@charcoal-ui/foundation": { "version": "2.10.0", "resolved": "https://registry.npmjs.org/@charcoal-ui/foundation/-/foundation-2.10.0.tgz", @@ -875,6 +1163,15 @@ "node": ">= 8" } }, + "node_modules/@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", + "license": "Apache-2.0", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/@pixiv/three-vrm": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/@pixiv/three-vrm/-/three-vrm-3.1.0.tgz", @@ -1277,12 +1574,25 @@ "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz", "integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==" }, + "node_modules/@types/diff-match-patch": { + "version": "1.0.36", + "resolved": "https://registry.npmjs.org/@types/diff-match-patch/-/diff-match-patch-1.0.36.tgz", + "integrity": "sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==", + "license": "MIT" + }, "node_modules/@types/dom-speech-recognition": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/@types/dom-speech-recognition/-/dom-speech-recognition-0.0.1.tgz", "integrity": "sha512-udCxb8DvjcDKfk1WTBzDsxFbLgYxmQGKrE/ricoMqHRNjSlSUCcamVTA5lIQqzY10mY5qCY0QDwBfFEwhfoDPw==", "dev": true }, + "node_modules/@types/estree": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", + "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", + "license": "MIT", + "peer": true + }, "node_modules/@types/formidable": { "version": "3.4.5", "resolved": "https://registry.npmjs.org/@types/formidable/-/formidable-3.4.5.tgz", @@ -1599,6 +1909,129 @@ } } }, + "node_modules/@vue/compiler-core": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.4.38.tgz", + "integrity": "sha512-8IQOTCWnLFqfHzOGm9+P8OPSEDukgg3Huc92qSG49if/xI2SAwLHQO2qaPQbjCWPBcQoO1WYfXfTACUrWV3c5A==", + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/parser": "^7.24.7", + "@vue/shared": "3.4.38", + "entities": "^4.5.0", + "estree-walker": "^2.0.2", + "source-map-js": "^1.2.0" + } + }, + "node_modules/@vue/compiler-core/node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "license": "MIT", + "peer": true + }, + "node_modules/@vue/compiler-dom": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.4.38.tgz", + "integrity": "sha512-Osc/c7ABsHXTsETLgykcOwIxFktHfGSUDkb05V61rocEfsFDcjDLH/IHJSNJP+/Sv9KeN2Lx1V6McZzlSb9EhQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "@vue/compiler-core": "3.4.38", + "@vue/shared": "3.4.38" + } + }, + "node_modules/@vue/compiler-sfc": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.4.38.tgz", + "integrity": "sha512-s5QfZ+9PzPh3T5H4hsQDJtI8x7zdJaew/dCGgqZ2630XdzaZ3AD8xGZfBqpT8oaD/p2eedd+pL8tD5vvt5ZYJQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/parser": "^7.24.7", + "@vue/compiler-core": "3.4.38", + "@vue/compiler-dom": "3.4.38", + "@vue/compiler-ssr": "3.4.38", + "@vue/shared": "3.4.38", + "estree-walker": "^2.0.2", + "magic-string": "^0.30.10", + "postcss": "^8.4.40", + "source-map-js": "^1.2.0" + } + }, + "node_modules/@vue/compiler-sfc/node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "license": "MIT", + "peer": true + }, + "node_modules/@vue/compiler-ssr": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.4.38.tgz", + "integrity": "sha512-YXznKFQ8dxYpAz9zLuVvfcXhc31FSPFDcqr0kyujbOwNhlmaNvL2QfIy+RZeJgSn5Fk54CWoEUeW+NVBAogGaw==", + "license": "MIT", + "peer": true, + "dependencies": { + "@vue/compiler-dom": "3.4.38", + "@vue/shared": "3.4.38" + } + }, + "node_modules/@vue/reactivity": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.4.38.tgz", + "integrity": "sha512-4vl4wMMVniLsSYYeldAKzbk72+D3hUnkw9z8lDeJacTxAkXeDAP1uE9xr2+aKIN0ipOL8EG2GPouVTH6yF7Gnw==", + "license": "MIT", + "peer": true, + "dependencies": { + "@vue/shared": "3.4.38" + } + }, + "node_modules/@vue/runtime-core": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.4.38.tgz", + "integrity": "sha512-21z3wA99EABtuf+O3IhdxP0iHgkBs1vuoCAsCKLVJPEjpVqvblwBnTj42vzHRlWDCyxu9ptDm7sI2ZMcWrQqlA==", + "license": "MIT", + "peer": true, + "dependencies": { + "@vue/reactivity": "3.4.38", + "@vue/shared": "3.4.38" + } + }, + "node_modules/@vue/runtime-dom": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.4.38.tgz", + "integrity": "sha512-afZzmUreU7vKwKsV17H1NDThEEmdYI+GCAK/KY1U957Ig2NATPVjCROv61R19fjZNzMmiU03n79OMnXyJVN0UA==", + "license": "MIT", + "peer": true, + "dependencies": { + "@vue/reactivity": "3.4.38", + "@vue/runtime-core": "3.4.38", + "@vue/shared": "3.4.38", + "csstype": "^3.1.3" + } + }, + "node_modules/@vue/server-renderer": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.4.38.tgz", + "integrity": "sha512-NggOTr82FbPEkkUvBm4fTGcwUY8UuTsnWC/L2YZBmvaQ4C4Jl/Ao4HHTB+l7WnFCt5M/dN3l0XLuyjzswGYVCA==", + "license": "MIT", + "peer": true, + "dependencies": { + "@vue/compiler-ssr": "3.4.38", + "@vue/shared": "3.4.38" + }, + "peerDependencies": { + "vue": "3.4.38" + } + }, + "node_modules/@vue/shared": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.4.38.tgz", + "integrity": "sha512-q0xCiLkuWWQLzVrecPb0RMsNWyxICOjPrcrwxTUEHb1fsnvni4dcuyG7RT/Ie7VPTvnjzIaWzRMUBsrqNj/hhw==", + "license": "MIT", + "peer": true + }, "node_modules/abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -1619,7 +2052,6 @@ "version": "8.12.1", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", - "dev": true, "bin": { "acorn": "bin/acorn" }, @@ -1658,6 +2090,73 @@ "node": ">= 8.0.0" } }, + "node_modules/ai": { + "version": "3.3.20", + "resolved": "https://registry.npmjs.org/ai/-/ai-3.3.20.tgz", + "integrity": "sha512-GKiL34BPVGgSEDkUlt8nyVRZkx1btnbXyw/bZKj1Jx5sCn/OY/qgiACzakhRzC/90UlxmrMsU1rZGW+Xr8+kFA==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17", + "@ai-sdk/react": "0.0.52", + "@ai-sdk/solid": "0.0.42", + "@ai-sdk/svelte": "0.0.44", + "@ai-sdk/ui-utils": "0.0.39", + "@ai-sdk/vue": "0.0.44", + "@opentelemetry/api": "1.9.0", + "eventsource-parser": "1.1.2", + "json-schema": "0.4.0", + "jsondiffpatch": "0.6.0", + "nanoid": "3.3.6", + "secure-json-parse": "2.7.0", + "zod-to-json-schema": "3.23.2" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "openai": "^4.42.0", + "react": "^18 || ^19", + "sswr": "^2.1.0", + "svelte": "^3.0.0 || ^4.0.0", + "zod": "^3.0.0" + }, + "peerDependenciesMeta": { + "openai": { + "optional": true + }, + "react": { + "optional": true + }, + "sswr": { + "optional": true + }, + "svelte": { + "optional": true + }, + "zod": { + "optional": true + } + } + }, + "node_modules/ai/node_modules/nanoid": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz", + "integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, "node_modules/ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -2371,6 +2870,20 @@ "node": ">=6" } }, + "node_modules/code-red": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/code-red/-/code-red-1.0.4.tgz", + "integrity": "sha512-7qJWqItLA8/VPVlKJlFXU+NBlo/qyfs39aJcuMT/2ere32ZqvF5OSxgdM5xOfJJ7O429gg2HM47y8v9P+9wrNw==", + "license": "MIT", + "peer": true, + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15", + "@types/estree": "^1.0.1", + "acorn": "^8.10.0", + "estree-walker": "^3.0.3", + "periscopic": "^3.1.0" + } + }, "node_modules/color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -2455,6 +2968,20 @@ "node": "*" } }, + "node_modules/css-tree": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz", + "integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==", + "license": "MIT", + "peer": true, + "dependencies": { + "mdn-data": "2.0.30", + "source-map-js": "^1.0.1" + }, + "engines": { + "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0" + } + }, "node_modules/cssesc": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", @@ -2475,8 +3002,7 @@ "node_modules/csstype": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", - "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", - "devOptional": true + "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==" }, "node_modules/damerau-levenshtein": { "version": "1.0.8", @@ -2672,6 +3198,16 @@ "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==" }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=6" + } + }, "node_modules/detect-libc": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", @@ -2701,6 +3237,12 @@ "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==" }, + "node_modules/diff-match-patch": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.5.tgz", + "integrity": "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==", + "license": "Apache-2.0" + }, "node_modules/digest-fetch": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/digest-fetch/-/digest-fetch-1.3.0.tgz", @@ -3516,6 +4058,16 @@ "node": ">=4.0" } }, + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "license": "MIT", + "peer": true, + "dependencies": { + "@types/estree": "^1.0.0" + } + }, "node_modules/esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -3533,6 +4085,15 @@ "node": ">=6" } }, + "node_modules/eventsource-parser": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-1.1.2.tgz", + "integrity": "sha512-v0eOBUbiaFojBu2s2NPBfYUoRR9GjcDNvCXVaqEf5vVfpIAh9f8RCo4vXTP8c63QRKCFwoLpMpTdPwwhEKVgzA==", + "license": "MIT", + "engines": { + "node": ">=14.18" + } + }, "node_modules/extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", @@ -4841,6 +5402,16 @@ "node": ">=8" } }, + "node_modules/is-reference": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.2.tgz", + "integrity": "sha512-v3rht/LgVcsdZa3O2Nqs+NMowLOxeOm7Ay9+/ARQ2F+qEoANRcqrjAZKGN0v8ymUetZGgkp26LTnGT7H0Qo9Pg==", + "license": "MIT", + "peer": true, + "dependencies": { + "@types/estree": "*" + } + }, "node_modules/is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -5078,6 +5649,12 @@ "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", "dev": true }, + "node_modules/json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==", + "license": "(AFL-2.1 OR BSD-3-Clause)" + }, "node_modules/json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", @@ -5109,6 +5686,35 @@ "json5": "lib/cli.js" } }, + "node_modules/jsondiffpatch": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/jsondiffpatch/-/jsondiffpatch-0.6.0.tgz", + "integrity": "sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ==", + "license": "MIT", + "dependencies": { + "@types/diff-match-patch": "^1.0.36", + "chalk": "^5.3.0", + "diff-match-patch": "^1.0.5" + }, + "bin": { + "jsondiffpatch": "bin/jsondiffpatch.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + } + }, + "node_modules/jsondiffpatch/node_modules/chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", + "license": "MIT", + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, "node_modules/jsonfile": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", @@ -5254,6 +5860,13 @@ "node": ">=4" } }, + "node_modules/locate-character": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-character/-/locate-character-3.0.0.tgz", + "integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==", + "license": "MIT", + "peer": true + }, "node_modules/locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -5322,6 +5935,16 @@ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==" }, + "node_modules/magic-string": { + "version": "0.30.11", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.11.tgz", + "integrity": "sha512-+Wri9p0QHMy+545hKww7YAu5NyzF8iomPL/RQazugQ9+Ez4Ic3mERMd8ZTX5rfK944j+560ZJi8iAwgak1Ac7A==", + "license": "MIT", + "peer": true, + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0" + } + }, "node_modules/make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", @@ -5394,6 +6017,13 @@ "is-buffer": "~1.1.6" } }, + "node_modules/mdn-data": { + "version": "2.0.30", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz", + "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==", + "license": "CC0-1.0", + "peer": true + }, "node_modules/mdurl": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", @@ -6087,6 +6717,28 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/ollama-ai-provider": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/ollama-ai-provider/-/ollama-ai-provider-0.13.0.tgz", + "integrity": "sha512-ZEtKD6ixxLIVLVRIZMh4yl0HGcdc8RotJW4ncdeAYXYT3fJ3LMvbatv9x8BeFkeb6+xy8Smc+4DbPMVFT+cjug==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17", + "partial-json": "0.1.7" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.0.0" + }, + "peerDependenciesMeta": { + "zod": { + "optional": true + } + } + }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -6214,6 +6866,12 @@ "node": ">=4" } }, + "node_modules/partial-json": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/partial-json/-/partial-json-0.1.7.tgz", + "integrity": "sha512-Njv/59hHaokb/hRUjce3Hdv12wd60MtM9Z5Olmn+nehe0QDAsRtRbJPvJ0Z91TusF0SuZRIvnM+S4l6EIP8leA==", + "license": "MIT" + }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -6295,6 +6953,18 @@ "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", "dev": true }, + "node_modules/periscopic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/periscopic/-/periscopic-3.1.0.tgz", + "integrity": "sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==", + "license": "MIT", + "peer": true, + "dependencies": { + "@types/estree": "^1.0.0", + "estree-walker": "^3.0.0", + "is-reference": "^3.0.0" + } + }, "node_modules/picocolors": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", @@ -7073,6 +7743,12 @@ "loose-envify": "^1.1.0" } }, + "node_modules/secure-json-parse": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz", + "integrity": "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==", + "license": "BSD-3-Clause" + }, "node_modules/semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -7362,6 +8038,18 @@ "dev": true, "optional": true }, + "node_modules/sswr": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/sswr/-/sswr-2.1.0.tgz", + "integrity": "sha512-Cqc355SYlTAaUt8iDPaC/4DPPXK925PePLMxyBKuWd5kKc5mwsG3nT9+Mq2tyguL5s7b4Jg+IRMpTRsNTAfpSQ==", + "license": "MIT", + "dependencies": { + "swrev": "^4.0.0" + }, + "peerDependencies": { + "svelte": "^4.0.0 || ^5.0.0-next.0" + } + }, "node_modules/stop-iteration-iterator": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", @@ -7689,18 +8377,92 @@ "has-flag": "^4.0.0" }, "engines": { - "node": ">=8" + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/svelte": { + "version": "4.2.19", + "resolved": "https://registry.npmjs.org/svelte/-/svelte-4.2.19.tgz", + "integrity": "sha512-IY1rnGr6izd10B0A8LqsBfmlT5OILVuZ7XsI0vdGPEvuonFV7NYEUK4dAkm9Zg2q0Um92kYjTpS1CAP3Nh/KWw==", + "license": "MIT", + "peer": true, + "dependencies": { + "@ampproject/remapping": "^2.2.1", + "@jridgewell/sourcemap-codec": "^1.4.15", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/estree": "^1.0.1", + "acorn": "^8.9.0", + "aria-query": "^5.3.0", + "axobject-query": "^4.0.0", + "code-red": "^1.0.3", + "css-tree": "^2.3.1", + "estree-walker": "^3.0.3", + "is-reference": "^3.0.1", + "locate-character": "^3.0.0", + "magic-string": "^0.30.4", + "periscopic": "^3.1.0" + }, + "engines": { + "node": ">=16" } }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", - "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "node_modules/svelte/node_modules/aria-query": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", + "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", + "license": "Apache-2.0", + "peer": true, + "dependencies": { + "dequal": "^2.0.3" + } + }, + "node_modules/svelte/node_modules/axobject-query": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz", + "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==", + "license": "Apache-2.0", + "peer": true, "engines": { "node": ">= 0.4" + } + }, + "node_modules/swr": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/swr/-/swr-2.2.5.tgz", + "integrity": "sha512-QtxqyclFeAsxEUeZIYmsaQ0UjimSq1RZ9Un7I68/0ClKK/U3LoyQunwkQfJZr2fc22DfIXLNDc2wFyTEikCUpg==", + "license": "MIT", + "dependencies": { + "client-only": "^0.0.1", + "use-sync-external-store": "^1.2.0" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "peerDependencies": { + "react": "^16.11.0 || ^17.0.0 || ^18.0.0" + } + }, + "node_modules/swrev": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/swrev/-/swrev-4.0.0.tgz", + "integrity": "sha512-LqVcOHSB4cPGgitD1riJ1Hh4vdmITOp+BkmfmXRh4hSF/t7EnS4iD+SOTmq7w5pPm/SiPeto4ADbKS6dHUDWFA==", + "license": "MIT" + }, + "node_modules/swrv": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/swrv/-/swrv-1.0.4.tgz", + "integrity": "sha512-zjEkcP8Ywmj+xOJW3lIT65ciY/4AL4e/Or7Gj0MzU3zBJNMdJiT8geVZhINavnlHRMMCcJLHhraLTAiDOTmQ9g==", + "license": "Apache-2.0", + "peerDependencies": { + "vue": ">=3.2.26 < 4" } }, "node_modules/synckit": { @@ -7861,6 +8623,16 @@ "resolved": "https://registry.npmjs.org/three/-/three-0.167.1.tgz", "integrity": "sha512-gYTLJA/UQip6J/tJvl91YYqlZF47+D/kxiWrbTon35ZHlXEN0VOo+Qke2walF1/x92v55H6enomymg4Dak52kw==" }, + "node_modules/to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", + "license": "MIT", + "peer": true, + "engines": { + "node": ">=4" + } + }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -8012,7 +8784,7 @@ "version": "5.0.2", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.2.tgz", "integrity": "sha512-wVORMBGO/FAs/++blGNeAVdbNKtIh1rbBL2EyQ1+J9lClJ93KiiKe8PmFIVdXhHcyv44SL9oglmfeSsndo0jRw==", - "dev": true, + "devOptional": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -8138,6 +8910,28 @@ "node": ">=0.10.0" } }, + "node_modules/vue": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.4.38.tgz", + "integrity": "sha512-f0ZgN+mZ5KFgVv9wz0f4OgVKukoXtS3nwET4c2vLBGQR50aI8G0cqbFtLlX9Yiyg3LFGBitruPHt2PxwTduJEw==", + "license": "MIT", + "peer": true, + "dependencies": { + "@vue/compiler-dom": "3.4.38", + "@vue/compiler-sfc": "3.4.38", + "@vue/runtime-dom": "3.4.38", + "@vue/server-renderer": "3.4.38", + "@vue/shared": "3.4.38" + }, + "peerDependencies": { + "typescript": "*" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, "node_modules/wait-on": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/wait-on/-/wait-on-7.2.0.tgz", @@ -8537,12 +9331,20 @@ "version": "3.23.8", "resolved": "https://registry.npmjs.org/zod/-/zod-3.23.8.tgz", "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==", - "optional": true, - "peer": true, + "license": "MIT", "funding": { "url": "https://github.com/sponsors/colinhacks" } }, + "node_modules/zod-to-json-schema": { + "version": "3.23.2", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.23.2.tgz", + "integrity": "sha512-uSt90Gzc/tUfyNqxnjlfBs8W6WSGpNBv0rVsNxP/BVSMHMKGdthPYff4xtCHYloJGM0CFxFsb3NbC0eqPhfImw==", + "license": "ISC", + "peerDependencies": { + "zod": "^3.23.3" + } + }, "node_modules/zustand": { "version": "4.5.4", "resolved": "https://registry.npmjs.org/zustand/-/zustand-4.5.4.tgz", @@ -8572,11 +9374,126 @@ } }, "dependencies": { + "@ai-sdk/anthropic": { + "version": "0.0.48", + "resolved": "https://registry.npmjs.org/@ai-sdk/anthropic/-/anthropic-0.0.48.tgz", + "integrity": "sha512-o4DhUwLXsWJw+6LFWfJgg2m7xJu342/m373zzRWZXXKCrsc5oi9fOMfHtkuggUgC6nSY56Awq+cXkUiwqa8a+w==", + "requires": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17" + } + }, + "@ai-sdk/google": { + "version": "0.0.46", + "resolved": "https://registry.npmjs.org/@ai-sdk/google/-/google-0.0.46.tgz", + "integrity": "sha512-rsc3Wh54EfSt3l/7IqPdTeuxA7xvFk2p8/HxxyoHfcwvQYmQ/bpgxmadId862sVsK79L8k3iRxvVwGVkkaEeaA==", + "requires": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17", + "json-schema": "0.4.0" + } + }, + "@ai-sdk/openai": { + "version": "0.0.54", + "resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-0.0.54.tgz", + "integrity": "sha512-0jqUSY9Lq0ie4AxnAucmiMhVBbs8ivvOW73sq3pCNA+LFeb2edOcnI0qmfGfHTn/VOjUCf2TvzQzHQx1Du3sYA==", + "requires": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17" + } + }, + "@ai-sdk/provider": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-0.0.22.tgz", + "integrity": "sha512-smZ1/2jL/JSKnbhC6ama/PxI2D/psj+YAe0c0qpd5ComQCNFltg72VFf0rpUSFMmFuj1pCCNoBOCrvyl8HTZHQ==", + "requires": { + "json-schema": "0.4.0" + } + }, + "@ai-sdk/provider-utils": { + "version": "1.0.17", + "resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-1.0.17.tgz", + "integrity": "sha512-2VyeTH5DQ6AxqvwdyytKIeiZyYTyJffpufWjE67zM2sXMIHgYl7fivo8m5wVl6Cbf1dFPSGKq//C9s+lz+NHrQ==", + "requires": { + "@ai-sdk/provider": "0.0.22", + "eventsource-parser": "1.1.2", + "nanoid": "3.3.6", + "secure-json-parse": "2.7.0" + }, + "dependencies": { + "nanoid": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz", + "integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==" + } + } + }, + "@ai-sdk/react": { + "version": "0.0.52", + "resolved": "https://registry.npmjs.org/@ai-sdk/react/-/react-0.0.52.tgz", + "integrity": "sha512-4Gm+AoINDXQ4lzIZFKOWOcKgjgiAFdyhmBxnyuaqzTJCoRWNUSea62xhjqRE0u8wagfPgxWUAyS8BAsY0EqOyg==", + "requires": { + "@ai-sdk/provider-utils": "1.0.17", + "@ai-sdk/ui-utils": "0.0.39", + "swr": "2.2.5" + } + }, + "@ai-sdk/solid": { + "version": "0.0.42", + "resolved": "https://registry.npmjs.org/@ai-sdk/solid/-/solid-0.0.42.tgz", + "integrity": "sha512-tr1rXRg0bLls7ZEQCWfd0Tv7irFlKQRjBSKSCstwrGtTeDA7zwUP4tIiUaCyzM3lwyE6Qgl17SrAoxSD+xP+zQ==", + "requires": { + "@ai-sdk/provider-utils": "1.0.17", + "@ai-sdk/ui-utils": "0.0.39" + } + }, + "@ai-sdk/svelte": { + "version": "0.0.44", + "resolved": "https://registry.npmjs.org/@ai-sdk/svelte/-/svelte-0.0.44.tgz", + "integrity": "sha512-soSiEX1BUiwRSdoc+7mAoCeuM3Vs/ebdb1gNL7ta9Zma7GTHq802Wi7KfWfypoAqpgi0QUapzCRMvgrl4oW4AQ==", + "requires": { + "@ai-sdk/provider-utils": "1.0.17", + "@ai-sdk/ui-utils": "0.0.39", + "sswr": "2.1.0" + } + }, + "@ai-sdk/ui-utils": { + "version": "0.0.39", + "resolved": "https://registry.npmjs.org/@ai-sdk/ui-utils/-/ui-utils-0.0.39.tgz", + "integrity": "sha512-yxlJBFEiWR7rf/oS7MFX9O5Hr7VYV0ipMBrvds66N3+m52/nCbBB5C/eBefzeR+hoGc/r5xGo7Yd1cncGYHHTw==", + "requires": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17", + "json-schema": "0.4.0", + "secure-json-parse": "2.7.0", + "zod-to-json-schema": "3.23.2" + } + }, + "@ai-sdk/vue": { + "version": "0.0.44", + "resolved": "https://registry.npmjs.org/@ai-sdk/vue/-/vue-0.0.44.tgz", + "integrity": "sha512-IsDCoy7u4V081dKT1i6b/Cxh2G0aftetbif+qNQGh5QeU9TtGs9KDW+onPkXeqlDQcpMN0Q5zaNGaZ7YBK50Gw==", + "requires": { + "@ai-sdk/provider-utils": "1.0.17", + "@ai-sdk/ui-utils": "0.0.39", + "swrv": "1.0.4" + } + }, "@alloc/quick-lru": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==" }, + "@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "peer": true, + "requires": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, "@anthropic-ai/sdk": { "version": "0.20.9", "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.20.9.tgz", @@ -8607,6 +9524,27 @@ } } }, + "@babel/helper-string-parser": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz", + "integrity": "sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==", + "peer": true + }, + "@babel/helper-validator-identifier": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", + "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", + "peer": true + }, + "@babel/parser": { + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.25.4.tgz", + "integrity": "sha512-nq+eWrOgdtu3jG5Os4TQP3x3cLA8hR8TvJNjD8vnPa20WGycimcparWnLK4jJhElTK6SDyuJo1weMKO/5LpmLA==", + "peer": true, + "requires": { + "@babel/types": "^7.25.4" + } + }, "@babel/runtime": { "version": "7.25.0", "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.0.tgz", @@ -8615,6 +9553,17 @@ "regenerator-runtime": "^0.14.0" } }, + "@babel/types": { + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.25.4.tgz", + "integrity": "sha512-zQ1ijeeCXVEh+aNL0RlmkPkG8HUiDcU2pzQQFjtbntgAczRASFzj4H+6+bV+dy1ntKR14I/DypeuRG1uma98iQ==", + "peer": true, + "requires": { + "@babel/helper-string-parser": "^7.24.8", + "@babel/helper-validator-identifier": "^7.24.7", + "to-fast-properties": "^2.0.0" + } + }, "@charcoal-ui/foundation": { "version": "2.10.0", "resolved": "https://registry.npmjs.org/@charcoal-ui/foundation/-/foundation-2.10.0.tgz", @@ -9143,6 +10092,11 @@ "fastq": "^1.6.0" } }, + "@opentelemetry/api": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", + "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==" + }, "@pixiv/three-vrm": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/@pixiv/three-vrm/-/three-vrm-3.1.0.tgz", @@ -9469,12 +10423,23 @@ "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz", "integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==" }, + "@types/diff-match-patch": { + "version": "1.0.36", + "resolved": "https://registry.npmjs.org/@types/diff-match-patch/-/diff-match-patch-1.0.36.tgz", + "integrity": "sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==" + }, "@types/dom-speech-recognition": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/@types/dom-speech-recognition/-/dom-speech-recognition-0.0.1.tgz", "integrity": "sha512-udCxb8DvjcDKfk1WTBzDsxFbLgYxmQGKrE/ricoMqHRNjSlSUCcamVTA5lIQqzY10mY5qCY0QDwBfFEwhfoDPw==", "dev": true }, + "@types/estree": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", + "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", + "peer": true + }, "@types/formidable": { "version": "3.4.5", "resolved": "https://registry.npmjs.org/@types/formidable/-/formidable-3.4.5.tgz", @@ -9719,6 +10684,119 @@ "server-only": "^0.0.1" } }, + "@vue/compiler-core": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.4.38.tgz", + "integrity": "sha512-8IQOTCWnLFqfHzOGm9+P8OPSEDukgg3Huc92qSG49if/xI2SAwLHQO2qaPQbjCWPBcQoO1WYfXfTACUrWV3c5A==", + "peer": true, + "requires": { + "@babel/parser": "^7.24.7", + "@vue/shared": "3.4.38", + "entities": "^4.5.0", + "estree-walker": "^2.0.2", + "source-map-js": "^1.2.0" + }, + "dependencies": { + "estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "peer": true + } + } + }, + "@vue/compiler-dom": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.4.38.tgz", + "integrity": "sha512-Osc/c7ABsHXTsETLgykcOwIxFktHfGSUDkb05V61rocEfsFDcjDLH/IHJSNJP+/Sv9KeN2Lx1V6McZzlSb9EhQ==", + "peer": true, + "requires": { + "@vue/compiler-core": "3.4.38", + "@vue/shared": "3.4.38" + } + }, + "@vue/compiler-sfc": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.4.38.tgz", + "integrity": "sha512-s5QfZ+9PzPh3T5H4hsQDJtI8x7zdJaew/dCGgqZ2630XdzaZ3AD8xGZfBqpT8oaD/p2eedd+pL8tD5vvt5ZYJQ==", + "peer": true, + "requires": { + "@babel/parser": "^7.24.7", + "@vue/compiler-core": "3.4.38", + "@vue/compiler-dom": "3.4.38", + "@vue/compiler-ssr": "3.4.38", + "@vue/shared": "3.4.38", + "estree-walker": "^2.0.2", + "magic-string": "^0.30.10", + "postcss": "^8.4.40", + "source-map-js": "^1.2.0" + }, + "dependencies": { + "estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "peer": true + } + } + }, + "@vue/compiler-ssr": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.4.38.tgz", + "integrity": "sha512-YXznKFQ8dxYpAz9zLuVvfcXhc31FSPFDcqr0kyujbOwNhlmaNvL2QfIy+RZeJgSn5Fk54CWoEUeW+NVBAogGaw==", + "peer": true, + "requires": { + "@vue/compiler-dom": "3.4.38", + "@vue/shared": "3.4.38" + } + }, + "@vue/reactivity": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.4.38.tgz", + "integrity": "sha512-4vl4wMMVniLsSYYeldAKzbk72+D3hUnkw9z8lDeJacTxAkXeDAP1uE9xr2+aKIN0ipOL8EG2GPouVTH6yF7Gnw==", + "peer": true, + "requires": { + "@vue/shared": "3.4.38" + } + }, + "@vue/runtime-core": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.4.38.tgz", + "integrity": "sha512-21z3wA99EABtuf+O3IhdxP0iHgkBs1vuoCAsCKLVJPEjpVqvblwBnTj42vzHRlWDCyxu9ptDm7sI2ZMcWrQqlA==", + "peer": true, + "requires": { + "@vue/reactivity": "3.4.38", + "@vue/shared": "3.4.38" + } + }, + "@vue/runtime-dom": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.4.38.tgz", + "integrity": "sha512-afZzmUreU7vKwKsV17H1NDThEEmdYI+GCAK/KY1U957Ig2NATPVjCROv61R19fjZNzMmiU03n79OMnXyJVN0UA==", + "peer": true, + "requires": { + "@vue/reactivity": "3.4.38", + "@vue/runtime-core": "3.4.38", + "@vue/shared": "3.4.38", + "csstype": "^3.1.3" + } + }, + "@vue/server-renderer": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.4.38.tgz", + "integrity": "sha512-NggOTr82FbPEkkUvBm4fTGcwUY8UuTsnWC/L2YZBmvaQ4C4Jl/Ao4HHTB+l7WnFCt5M/dN3l0XLuyjzswGYVCA==", + "peer": true, + "requires": { + "@vue/compiler-ssr": "3.4.38", + "@vue/shared": "3.4.38" + } + }, + "@vue/shared": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.4.38.tgz", + "integrity": "sha512-q0xCiLkuWWQLzVrecPb0RMsNWyxICOjPrcrwxTUEHb1fsnvni4dcuyG7RT/Ie7VPTvnjzIaWzRMUBsrqNj/hhw==", + "peer": true + }, "abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", @@ -9735,8 +10813,7 @@ "acorn": { "version": "8.12.1", "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", - "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", - "dev": true + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==" }, "acorn-jsx": { "version": "5.3.2", @@ -9761,6 +10838,34 @@ "humanize-ms": "^1.2.1" } }, + "ai": { + "version": "3.3.20", + "resolved": "https://registry.npmjs.org/ai/-/ai-3.3.20.tgz", + "integrity": "sha512-GKiL34BPVGgSEDkUlt8nyVRZkx1btnbXyw/bZKj1Jx5sCn/OY/qgiACzakhRzC/90UlxmrMsU1rZGW+Xr8+kFA==", + "requires": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17", + "@ai-sdk/react": "0.0.52", + "@ai-sdk/solid": "0.0.42", + "@ai-sdk/svelte": "0.0.44", + "@ai-sdk/ui-utils": "0.0.39", + "@ai-sdk/vue": "0.0.44", + "@opentelemetry/api": "1.9.0", + "eventsource-parser": "1.1.2", + "json-schema": "0.4.0", + "jsondiffpatch": "0.6.0", + "nanoid": "3.3.6", + "secure-json-parse": "2.7.0", + "zod-to-json-schema": "3.23.2" + }, + "dependencies": { + "nanoid": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.6.tgz", + "integrity": "sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==" + } + } + }, "ajv": { "version": "6.12.6", "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", @@ -10249,6 +11354,19 @@ "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==" }, + "code-red": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/code-red/-/code-red-1.0.4.tgz", + "integrity": "sha512-7qJWqItLA8/VPVlKJlFXU+NBlo/qyfs39aJcuMT/2ere32ZqvF5OSxgdM5xOfJJ7O429gg2HM47y8v9P+9wrNw==", + "peer": true, + "requires": { + "@jridgewell/sourcemap-codec": "^1.4.15", + "@types/estree": "^1.0.1", + "acorn": "^8.10.0", + "estree-walker": "^3.0.3", + "periscopic": "^3.1.0" + } + }, "color-convert": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", @@ -10314,6 +11432,16 @@ "resolved": "https://registry.npmjs.org/crypt/-/crypt-0.0.2.tgz", "integrity": "sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==" }, + "css-tree": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz", + "integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==", + "peer": true, + "requires": { + "mdn-data": "2.0.30", + "source-map-js": "^1.0.1" + } + }, "cssesc": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", @@ -10327,8 +11455,7 @@ "csstype": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", - "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", - "devOptional": true + "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==" }, "damerau-levenshtein": { "version": "1.0.8", @@ -10464,6 +11591,12 @@ "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==" }, + "dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "peer": true + }, "detect-libc": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", @@ -10490,6 +11623,11 @@ "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==" }, + "diff-match-patch": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.5.tgz", + "integrity": "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==" + }, "digest-fetch": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/digest-fetch/-/digest-fetch-1.3.0.tgz", @@ -11117,6 +12255,15 @@ "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", "dev": true }, + "estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "peer": true, + "requires": { + "@types/estree": "^1.0.0" + } + }, "esutils": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", @@ -11128,6 +12275,11 @@ "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" }, + "eventsource-parser": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-1.1.2.tgz", + "integrity": "sha512-v0eOBUbiaFojBu2s2NPBfYUoRR9GjcDNvCXVaqEf5vVfpIAh9f8RCo4vXTP8c63QRKCFwoLpMpTdPwwhEKVgzA==" + }, "extend": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", @@ -12076,6 +13228,15 @@ "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", "dev": true }, + "is-reference": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/is-reference/-/is-reference-3.0.2.tgz", + "integrity": "sha512-v3rht/LgVcsdZa3O2Nqs+NMowLOxeOm7Ay9+/ARQ2F+qEoANRcqrjAZKGN0v8ymUetZGgkp26LTnGT7H0Qo9Pg==", + "peer": true, + "requires": { + "@types/estree": "*" + } + }, "is-regex": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.4.tgz", @@ -12242,6 +13403,11 @@ "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", "dev": true }, + "json-schema": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz", + "integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==" + }, "json-schema-traverse": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", @@ -12270,6 +13436,23 @@ "minimist": "^1.2.0" } }, + "jsondiffpatch": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/jsondiffpatch/-/jsondiffpatch-0.6.0.tgz", + "integrity": "sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ==", + "requires": { + "@types/diff-match-patch": "^1.0.36", + "chalk": "^5.3.0", + "diff-match-patch": "^1.0.5" + }, + "dependencies": { + "chalk": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", + "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==" + } + } + }, "jsonfile": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-4.0.0.tgz", @@ -12389,6 +13572,12 @@ "strip-bom": "^3.0.0" } }, + "locate-character": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-character/-/locate-character-3.0.0.tgz", + "integrity": "sha512-SW13ws7BjaeJ6p7Q6CO2nchbYEc3X3J6WrmTTDto7yMPqVSZTUyY5Tjbid+Ab8gLnATtygYtiDIJGQRRn2ZOiA==", + "peer": true + }, "locate-path": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", @@ -12444,6 +13633,15 @@ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==" }, + "magic-string": { + "version": "0.30.11", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.11.tgz", + "integrity": "sha512-+Wri9p0QHMy+545hKww7YAu5NyzF8iomPL/RQazugQ9+Ez4Ic3mERMd8ZTX5rfK944j+560ZJi8iAwgak1Ac7A==", + "peer": true, + "requires": { + "@jridgewell/sourcemap-codec": "^1.5.0" + } + }, "make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", @@ -12501,6 +13699,12 @@ "is-buffer": "~1.1.6" } }, + "mdn-data": { + "version": "2.0.30", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz", + "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==", + "peer": true + }, "mdurl": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", @@ -12973,6 +14177,16 @@ "es-object-atoms": "^1.0.0" } }, + "ollama-ai-provider": { + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/ollama-ai-provider/-/ollama-ai-provider-0.13.0.tgz", + "integrity": "sha512-ZEtKD6ixxLIVLVRIZMh4yl0HGcdc8RotJW4ncdeAYXYT3fJ3LMvbatv9x8BeFkeb6+xy8Smc+4DbPMVFT+cjug==", + "requires": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17", + "partial-json": "0.1.7" + } + }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -13067,6 +14281,11 @@ "json-parse-better-errors": "^1.0.1" } }, + "partial-json": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/partial-json/-/partial-json-0.1.7.tgz", + "integrity": "sha512-Njv/59hHaokb/hRUjce3Hdv12wd60MtM9Z5Olmn+nehe0QDAsRtRbJPvJ0Z91TusF0SuZRIvnM+S4l6EIP8leA==" + }, "path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -13124,6 +14343,17 @@ "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==", "dev": true }, + "periscopic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/periscopic/-/periscopic-3.1.0.tgz", + "integrity": "sha512-vKiQ8RRtkl9P+r/+oefh25C3fhybptkHKCZSPlcXiJux2tJF55GnEj3BVn4A5gKfq9NWWXXrxkHBwVPUfH0opw==", + "peer": true, + "requires": { + "@types/estree": "^1.0.0", + "estree-walker": "^3.0.0", + "is-reference": "^3.0.0" + } + }, "picocolors": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", @@ -13618,6 +14848,11 @@ "loose-envify": "^1.1.0" } }, + "secure-json-parse": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz", + "integrity": "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==" + }, "semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -13833,6 +15068,14 @@ "dev": true, "optional": true }, + "sswr": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/sswr/-/sswr-2.1.0.tgz", + "integrity": "sha512-Cqc355SYlTAaUt8iDPaC/4DPPXK925PePLMxyBKuWd5kKc5mwsG3nT9+Mq2tyguL5s7b4Jg+IRMpTRsNTAfpSQ==", + "requires": { + "swrev": "^4.0.0" + } + }, "stop-iteration-iterator": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.0.0.tgz", @@ -14074,6 +15317,65 @@ "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==" }, + "svelte": { + "version": "4.2.19", + "resolved": "https://registry.npmjs.org/svelte/-/svelte-4.2.19.tgz", + "integrity": "sha512-IY1rnGr6izd10B0A8LqsBfmlT5OILVuZ7XsI0vdGPEvuonFV7NYEUK4dAkm9Zg2q0Um92kYjTpS1CAP3Nh/KWw==", + "peer": true, + "requires": { + "@ampproject/remapping": "^2.2.1", + "@jridgewell/sourcemap-codec": "^1.4.15", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/estree": "^1.0.1", + "acorn": "^8.9.0", + "aria-query": "^5.3.0", + "axobject-query": "^4.0.0", + "code-red": "^1.0.3", + "css-tree": "^2.3.1", + "estree-walker": "^3.0.3", + "is-reference": "^3.0.1", + "locate-character": "^3.0.0", + "magic-string": "^0.30.4", + "periscopic": "^3.1.0" + }, + "dependencies": { + "aria-query": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/aria-query/-/aria-query-5.3.0.tgz", + "integrity": "sha512-b0P0sZPKtyu8HkeRAfCq0IfURZK+SuwMjY1UXGBU27wpAiTwQAIlq56IbIO+ytk/JjS1fMR14ee5WBBfKi5J6A==", + "peer": true, + "requires": { + "dequal": "^2.0.3" + } + }, + "axobject-query": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-4.1.0.tgz", + "integrity": "sha512-qIj0G9wZbMGNLjLmg1PT6v2mE9AH2zlnADJD/2tC6E00hgmhUOfEB6greHPAfLRSufHqROIUTkw6E+M3lH0PTQ==", + "peer": true + } + } + }, + "swr": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/swr/-/swr-2.2.5.tgz", + "integrity": "sha512-QtxqyclFeAsxEUeZIYmsaQ0UjimSq1RZ9Un7I68/0ClKK/U3LoyQunwkQfJZr2fc22DfIXLNDc2wFyTEikCUpg==", + "requires": { + "client-only": "^0.0.1", + "use-sync-external-store": "^1.2.0" + } + }, + "swrev": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/swrev/-/swrev-4.0.0.tgz", + "integrity": "sha512-LqVcOHSB4cPGgitD1riJ1Hh4vdmITOp+BkmfmXRh4hSF/t7EnS4iD+SOTmq7w5pPm/SiPeto4ADbKS6dHUDWFA==" + }, + "swrv": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/swrv/-/swrv-1.0.4.tgz", + "integrity": "sha512-zjEkcP8Ywmj+xOJW3lIT65ciY/4AL4e/Or7Gj0MzU3zBJNMdJiT8geVZhINavnlHRMMCcJLHhraLTAiDOTmQ9g==", + "requires": {} + }, "synckit": { "version": "0.9.1", "resolved": "https://registry.npmjs.org/synckit/-/synckit-0.9.1.tgz", @@ -14202,6 +15504,12 @@ "resolved": "https://registry.npmjs.org/three/-/three-0.167.1.tgz", "integrity": "sha512-gYTLJA/UQip6J/tJvl91YYqlZF47+D/kxiWrbTon35ZHlXEN0VOo+Qke2walF1/x92v55H6enomymg4Dak52kw==" }, + "to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", + "peer": true + }, "to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -14315,7 +15623,7 @@ "version": "5.0.2", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.2.tgz", "integrity": "sha512-wVORMBGO/FAs/++blGNeAVdbNKtIh1rbBL2EyQ1+J9lClJ93KiiKe8PmFIVdXhHcyv44SL9oglmfeSsndo0jRw==", - "dev": true + "devOptional": true }, "uc.micro": { "version": "2.1.0", @@ -14395,6 +15703,19 @@ "resolved": "https://registry.npmjs.org/void-elements/-/void-elements-3.1.0.tgz", "integrity": "sha512-Dhxzh5HZuiHQhbvTW9AMetFfBHDMYpo23Uo9btPXgdYP+3T5S+p+jgNy7spra+veYhBP2dCSgxR/i2Y02h5/6w==" }, + "vue": { + "version": "3.4.38", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.4.38.tgz", + "integrity": "sha512-f0ZgN+mZ5KFgVv9wz0f4OgVKukoXtS3nwET4c2vLBGQR50aI8G0cqbFtLlX9Yiyg3LFGBitruPHt2PxwTduJEw==", + "peer": true, + "requires": { + "@vue/compiler-dom": "3.4.38", + "@vue/compiler-sfc": "3.4.38", + "@vue/runtime-dom": "3.4.38", + "@vue/server-renderer": "3.4.38", + "@vue/shared": "3.4.38" + } + }, "wait-on": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/wait-on/-/wait-on-7.2.0.tgz", @@ -14690,9 +16011,13 @@ "zod": { "version": "3.23.8", "resolved": "https://registry.npmjs.org/zod/-/zod-3.23.8.tgz", - "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==", - "optional": true, - "peer": true + "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==" + }, + "zod-to-json-schema": { + "version": "3.23.2", + "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.23.2.tgz", + "integrity": "sha512-uSt90Gzc/tUfyNqxnjlfBs8W6WSGpNBv0rVsNxP/BVSMHMKGdthPYff4xtCHYloJGM0CFxFsb3NbC0eqPhfImw==", + "requires": {} }, "zustand": { "version": "4.5.4", diff --git a/package.json b/package.json index 89c3ff10..d6e96d42 100644 --- a/package.json +++ b/package.json @@ -15,6 +15,9 @@ "desktop": "NEXT_PUBLIC_BACKGROUND_IMAGE_PATH=\"\" run-p dev electron" }, "dependencies": { + "@ai-sdk/anthropic": "^0.0.48", + "@ai-sdk/google": "^0.0.46", + "@ai-sdk/openai": "^0.0.54", "@anthropic-ai/sdk": "^0.20.8", "@charcoal-ui/icons": "^2.6.0", "@google-cloud/text-to-speech": "^5.0.1", @@ -26,18 +29,21 @@ "@pixiv/three-vrm": "^3.0.0", "@tailwindcss/line-clamp": "^0.4.4", "@vercel/analytics": "^1.3.1", + "ai": "^3.3.20", "axios": "^1.6.8", "canvas": "^2.11.2", "formidable": "^3.5.1", "groq-sdk": "^0.3.3", "i18next": "^23.6.0", "next": "^14.2.5", + "ollama-ai-provider": "^0.13.0", "openai": "^4.38.5", "pdfjs-dist": "^4.5.136", "react": "^18.3.1", "react-dom": "^18.3.1", "react-i18next": "^13.3.1", "three": "^0.167.1", + "zod": "^3.23.8", "zustand": "^4.5.4" }, "devDependencies": { diff --git a/src/components/chatLog.tsx b/src/components/chatLog.tsx index e51b540a..dba7a9f9 100644 --- a/src/components/chatLog.tsx +++ b/src/components/chatLog.tsx @@ -45,7 +45,7 @@ export const ChatLog = () => { /> diff --git a/src/components/menu.tsx b/src/components/menu.tsx index 799ef9d6..a2133cb9 100644 --- a/src/components/menu.tsx +++ b/src/components/menu.tsx @@ -144,7 +144,7 @@ export const Menu = () => { /> )} - {selectAIService === 'openai' && !youtubeMode && ( + {!youtubeMode && ( <>
{ > ) : ( 画像 { const webSocketMode = settingsStore((s) => s.webSocketMode) - const openAiKey = settingsStore((s) => s.openAiKey) + const openaiKey = settingsStore((s) => s.openaiKey) const anthropicKey = settingsStore((s) => s.anthropicKey) const googleKey = settingsStore((s) => s.googleKey) const groqKey = settingsStore((s) => s.groqKey) @@ -30,7 +31,7 @@ const ModelProvider = () => { // ローカルLLMが選択された場合、AIモデルを空文字に設定 const defaultModels = { openai: 'gpt-4o', - anthropic: 'claude-3.5-sonnet-20240620', + anthropic: 'claude-3-5-sonnet-20240620', google: 'gemini-1.5-pro', groq: 'gemma-7b-it', localLlm: '', @@ -44,24 +45,17 @@ const ModelProvider = () => { selectAIModel: defaultModels[newService], }) - if (newService !== 'openai') { + if (!multiModalAIServices.includes(newService as any)) { homeStore.setState({ modalImage: '' }) menuStore.setState({ showWebcam: false }) - if (newService !== 'anthropic') { - settingsStore.setState({ - conversationContinuityMode: false, - }) - } - - if (newService !== 'anthropic' && newService !== 'google') { - settingsStore.setState({ - slideMode: false, - }) - slideStore.setState({ - isPlaying: false, - }) - } + settingsStore.setState({ + conversationContinuityMode: false, + slideMode: false, + }) + slideStore.setState({ + isPlaying: false, + }) } }, [] @@ -100,9 +94,9 @@ const ModelProvider = () => { className="text-ellipsis px-16 py-8 w-col-span-2 bg-surface1 hover:bg-surface1-hover rounded-8" type="text" placeholder="sk-..." - value={openAiKey} + value={openaiKey} onChange={(e) => - settingsStore.setState({ openAiKey: e.target.value }) + settingsStore.setState({ openaiKey: e.target.value }) } />
diff --git a/src/components/settings/slide.tsx b/src/components/settings/slide.tsx index 9043e6e4..4509f4f6 100644 --- a/src/components/settings/slide.tsx +++ b/src/components/settings/slide.tsx @@ -1,6 +1,9 @@ import { useTranslation } from 'react-i18next' import { useEffect, useState } from 'react' -import settingsStore from '@/features/stores/settings' +import settingsStore, { + multiModalAIServices, + multiModalAIServiceKey, +} from '@/features/stores/settings' import menuStore from '@/features/stores/menu' import slideStore from '@/features/stores/slide' import { TextButton } from '../textButton' @@ -66,9 +69,9 @@ const Slide = () => { {slideMode ? t('StatusOn') : t('StatusOff')} @@ -92,9 +95,9 @@ const Slide = () => { ))} - {selectAIService === 'openai' && ( - - )} + {multiModalAIServices.includes( + selectAIService as multiModalAIServiceKey + ) && } )} diff --git a/src/components/settings/slideConvert.tsx b/src/components/settings/slideConvert.tsx index a53e71eb..a7d58ff4 100644 --- a/src/components/settings/slideConvert.tsx +++ b/src/components/settings/slideConvert.tsx @@ -1,6 +1,9 @@ import React, { useState } from 'react' import { useTranslation } from 'react-i18next' -import settingsStore from '@/features/stores/settings' +import settingsStore, { + multiModalAIServiceKey, + multiModalAIServices, +} from '@/features/stores/settings' import { TextButton } from '../textButton' interface SlideConvertProps { @@ -11,7 +14,9 @@ const SlideConvert: React.FC = ({ onFolderUpdate }) => { const { t } = useTranslation() const [file, setFile] = useState(null) const [folderName, setFolderName] = useState('') - const [apiKey] = useState(settingsStore.getState().openAiKey) + const aiService = settingsStore.getState() + .selectAIService as multiModalAIServiceKey + const [model, setModel] = useState('gpt-4o') const [isLoading, setIsLoading] = useState(false) const selectLanguage = settingsStore.getState().selectLanguage @@ -27,6 +32,15 @@ const SlideConvert: React.FC = ({ onFolderUpdate }) => { const handleFormSubmit = async (event: React.FormEvent) => { event.preventDefault() + + if (!multiModalAIServices.includes(aiService)) { + alert(t('InvalidAIService')) + return + } + + const apiKeyName = `${aiService}Key` as const + const apiKey = settingsStore.getState()[apiKeyName] + if (!file || !folderName || !apiKey || !model) { alert(t('PdfConvertSubmitError')) return @@ -37,6 +51,7 @@ const SlideConvert: React.FC = ({ onFolderUpdate }) => { const formData = new FormData() formData.append('file', file) formData.append('folderName', folderName) + formData.append('aiService', aiService) formData.append('apiKey', apiKey) formData.append('model', model) formData.append('selectLanguage', selectLanguage) @@ -101,11 +116,50 @@ const SlideConvert: React.FC = ({ onFolderUpdate }) => { onChange={(e) => setModel(e.target.value)} className="text-ellipsis px-16 py-8 w-col-span-4 bg-surface1 hover:bg-surface1-hover rounded-8" > - - - - - + {aiService === 'openai' && ( + <> + + + + + + + )} + {aiService === 'anthropic' && ( + <> + + + + + + )} + {aiService === 'google' && ( + <> + + + + + + + )}
diff --git a/src/components/settings/youtube.tsx b/src/components/settings/youtube.tsx index 2c130528..87b817f7 100644 --- a/src/components/settings/youtube.tsx +++ b/src/components/settings/youtube.tsx @@ -4,6 +4,7 @@ import homeStore from '@/features/stores/home' import menuStore from '@/features/stores/menu' import settingsStore from '@/features/stores/settings' import { TextButton } from '../textButton' +import { multiModalAIServices } from '@/features/stores/settings' const YouTube = () => { const youtubeApiKey = settingsStore((s) => s.youtubeApiKey) @@ -98,8 +99,7 @@ const YouTube = () => { }) } disabled={ - (selectAIService !== 'openai' && - selectAIService !== 'anthropic') || + !multiModalAIServices.includes(selectAIService as any) || slideMode || webSocketMode } diff --git a/src/features/chat/aiChatFactory.ts b/src/features/chat/aiChatFactory.ts index 85382694..7bd4bc22 100644 --- a/src/features/chat/aiChatFactory.ts +++ b/src/features/chat/aiChatFactory.ts @@ -1,54 +1,41 @@ import { Message } from '@/features/messages/messages' -import { AIService, AIServiceConfig } from '@/features/constants/settings' -import { getAnthropicChatResponseStream } from './anthropicChat' -import { getDifyChatResponseStream } from './difyChat' -import { getGoogleChatResponseStream } from './googleChat' -import { getGroqChatResponseStream } from './groqChat' +import { AIService } from '@/features/constants/settings' import { getLocalLLMChatResponseStream } from './localLLMChat' -import { getOpenAIChatResponseStream } from './openAiChat' +import { getDifyChatResponseStream } from './difyChat' +import { getVercelAIChatResponseStream } from './vercelAIChat' +import settingsStore from '@/features/stores/settings' export async function getAIChatResponseStream( service: AIService, - messages: Message[], - config: AIServiceConfig + messages: Message[] ): Promise | null> { + const ss = settingsStore.getState() + switch (service) { case 'openai': - return getOpenAIChatResponseStream( - messages, - config.openai.key, - config.openai.model - ) case 'anthropic': - return getAnthropicChatResponseStream( - messages, - config.anthropic.key, - config.anthropic.model - ) case 'google': - return getGoogleChatResponseStream( + case 'groq': + return getVercelAIChatResponseStream( messages, - config.google.key, - config.google.model + ss[`${service}Key`] || + process.env[`NEXT_PUBLIC_${service.toUpperCase()}_KEY`] || + '', + service, + ss.selectAIModel ) case 'localLlm': return getLocalLLMChatResponseStream( messages, - config.localLlm.url, - config.localLlm.model - ) - case 'groq': - return getGroqChatResponseStream( - messages, - config.groq.key, - config.groq.model + ss.localLlmUrl || process.env.NEXT_PUBLIC_LOCAL_LLM_URL || '', + ss.selectAIModel || process.env.NEXT_PUBLIC_LOCAL_LLM_MODEL || '' ) case 'dify': return getDifyChatResponseStream( messages, - config.dify.key, - config.dify.url, - config.dify.conversationId + ss.difyKey || process.env.NEXT_PUBLIC_DIFY_KEY || '', + ss.difyUrl || process.env.NEXT_PUBLIC_DIFY_URL || '', + ss.difyConversationId ) default: throw new Error(`Unsupported AI service: ${service}`) diff --git a/src/features/chat/anthropicChat.ts b/src/features/chat/anthropicChat.ts deleted file mode 100644 index f0882151..00000000 --- a/src/features/chat/anthropicChat.ts +++ /dev/null @@ -1,81 +0,0 @@ -import { Message } from '../messages/messages' - -export async function getAnthropicChatResponse( - messages: Message[], - apiKey: string, - model: string -) { - const response = await fetch('/api/anthropic', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ messages, apiKey, model }), - }) - - const data = await response.json() - return { message: data.message[0].text } -} - -export async function getAnthropicChatResponseStream( - messages: Message[], - apiKey: string, - model: string -) { - const response = await fetch('/api/anthropic', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ messages, apiKey, model, stream: true }), - }) - - if (!response.ok) { - throw new Error('Anthropic APIリクエストに失敗しました') - } - - if (!response.body) { - throw new Error('Anthropic APIレスポンスが空です') - } - - const reader = response.body.getReader() - const decoder = new TextDecoder('utf-8') - - return new ReadableStream({ - async start(controller) { - while (true) { - const { done, value } = await reader.read() - - if (done) { - break - } - - const chunk = decoder.decode(value) - const lines = chunk.split('\n') - - for (const line of lines) { - if (line.startsWith('data:')) { - const data = line.substring(5).trim() - if (data !== '[DONE]') { - const event = JSON.parse(data) - switch (event.type) { - case 'content_block_delta': - controller.enqueue(event.text) - break - case 'error': - throw new Error( - `Anthropic API error: ${JSON.stringify(event.error)}` - ) - case 'message_stop': - controller.close() - return - } - } - } - } - } - - controller.close() - }, - }) -} diff --git a/src/features/chat/googleChat.ts b/src/features/chat/googleChat.ts deleted file mode 100644 index 5ec46341..00000000 --- a/src/features/chat/googleChat.ts +++ /dev/null @@ -1,164 +0,0 @@ -// import { Message } from "../messages/messages"; - -// export async function getGoogleChatResponse(messages: Message[], apiKey: string, model: string) { -// const response = await fetch("/api/google", { -// method: "POST", -// headers: { -// "Content-Type": "application/json", -// }, -// body: JSON.stringify({ messages, apiKey, model }), -// }); - -// const data = await response.json(); -// return data; -// } - -// export async function getGoogleChatResponseStream( -// messages: Message[], -// apiKey: string, -// model: string -// ) { -// const response = await fetch("/api/google", { -// method: "POST", -// headers: { -// "Content-Type": "application/json", -// }, -// body: JSON.stringify({ messages, apiKey, model, stream: true }), -// }); - -// if (!response.ok) { -// throw new Error("Google Gemini APIリクエストに失敗しました"); -// } - -// if (response.body === null) { -// throw new Error("Google Gemini APIリクエストに失敗しました"); -// } - -// const reader = response.body.getReader(); -// const decoder = new TextDecoder("utf-8"); - -// return new ReadableStream({ -// async start(controller) { -// while (true) { -// const { done, value } = await reader.read(); - -// if (done) { -// break; -// } - -// const chunk = decoder.decode(value); -// // 各行を個別に処理 -// const lines = chunk.split('\n'); -// lines.forEach(line => { -// if (line.startsWith('event: end')) { -// controller.close(); // ストリームの終了を検出 -// return; -// } -// // 'data: ' プレフィックスを取り除く -// const jsonStr = line.replace(/^data: /, '').trim(); -// if (jsonStr) { -// try { -// const json = JSON.parse(jsonStr); -// if (json.text) { -// controller.enqueue(json.text); -// } -// } catch (error) { -// console.error("Failed to parse JSON:", error); -// } -// } -// }); -// } - -// controller.close(); -// }, -// }); -// } - -import { GoogleGenerativeAI } from '@google/generative-ai' -import { Message } from '../messages/messages' - -export async function getGoogleChatResponse( - messages: Message[], - apiKey: string, - model: string -) { - const { history, systemMessage } = processMessages(messages) - - const genAI = new GoogleGenerativeAI(apiKey) - const chatModel = genAI.getGenerativeModel({ - model: model, - systemInstruction: systemMessage, - }) - - const chat = chatModel.startChat({ history }) - const lastMessage = messages[messages.length - 1].content - const result = await chat.sendMessage( - typeof lastMessage === 'string' ? lastMessage : lastMessage[0].text - ) - const response = await result.response - const text = response.text() - - return { text } -} - -export async function getGoogleChatResponseStream( - messages: Message[], - apiKey: string, - model: string -) { - const { history, systemMessage } = processMessages(messages) - - const genAI = new GoogleGenerativeAI(apiKey) - const chatModel = genAI.getGenerativeModel({ - model: model, - systemInstruction: systemMessage, - }) - - const chat = chatModel.startChat({ history }) - const lastMessage = messages[messages.length - 1].content - const result = await chat.sendMessageStream( - typeof lastMessage === 'string' ? lastMessage : lastMessage[0].text - ) - - const stream = new ReadableStream({ - async start(controller) { - let text = '' - for await (const chunk of result.stream) { - const chunkText = chunk.text() - text += chunkText - controller.enqueue(chunkText) - } - controller.close() - }, - }) - - return stream -} - -function processMessages(messages: Message[]) { - let systemMessage = '' - const history = messages - .filter((message, index) => { - if (message.role === 'system') { - systemMessage = - typeof message.content === 'string' - ? message.content - : message.content[0].text - return false - } - return index === 0 ? message.role === 'user' : true - }) - .map((message) => ({ - role: message.role === 'assistant' ? 'model' : message.role, - parts: [ - { - text: - typeof message.content === 'string' - ? message.content - : message.content[0].text, - }, - ], - })) - - return { history, systemMessage } -} diff --git a/src/features/chat/groqChat.ts b/src/features/chat/groqChat.ts deleted file mode 100644 index e7439593..00000000 --- a/src/features/chat/groqChat.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { Message } from '../messages/messages' - -export async function getGroqChatResponse( - messages: Message[], - apiKey: string, - model: string -) { - const response = await fetch('/api/groq', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ messages, apiKey, model }), - }) - - const data = await response.json() - return data -} - -export async function getGroqChatResponseStream( - messages: Message[], - apiKey: string, - model: string -) { - const response = await fetch('/api/groq', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ messages, apiKey, model, stream: false }), - }) - - if (!response.ok) { - throw new Error('Groq API request failed') - } - - if (!response.body) { - throw new Error('Groq API response is empty') - } - - const reader = response.body.getReader() - const decoder = new TextDecoder('utf-8') - - return new ReadableStream({ - async start(controller) { - let buffer = '' - - while (true) { - const { done, value } = await reader.read() - - if (done) { - break - } - - // const chunk = decoder.decode(value, { stream: true }); - // controller.enqueue(chunk); - - buffer += decoder.decode(value, { stream: true }) - - // バッファを処理し、「{"message":」文字列を削除する - buffer = buffer.replace(/{"message":\s*"/g, '') - - // バッファが完全なメッセージを含んでいる場合、それを送信する。 - if (buffer.includes('"}')) { - const messages = buffer.split('"}') - - for (let i = 0; i < messages.length - 1; i++) { - controller.enqueue(messages[i]) - } - - buffer = messages[messages.length - 1] - } - } - - console.log('buffer', buffer) - - // 残りのバッファを処理する。 - if (buffer.length > 0) { - controller.enqueue(buffer) - } - - controller.close() - }, - }) -} diff --git a/src/features/chat/handlers.ts b/src/features/chat/handlers.ts index cb89e26a..1db47608 100644 --- a/src/features/chat/handlers.ts +++ b/src/features/chat/handlers.ts @@ -1,5 +1,5 @@ import { getAIChatResponseStream } from '@/features/chat/aiChatFactory' -import { AIService, AIServiceConfig } from '@/features/constants/settings' +import { AIService } from '@/features/constants/settings' import { textsToScreenplay, Message } from '@/features/messages/messages' import { speakCharacter } from '@/features/messages/speakCharacter' import { judgeSlide } from '@/features/slide/slideAIHelpers' @@ -143,39 +143,10 @@ export const processAIResponse = async ( const hs = homeStore.getState() const currentSlideMessages: string[] = [] - const aiServiceConfig: AIServiceConfig = { - openai: { - key: ss.openAiKey || process.env.NEXT_PUBLIC_OPEN_AI_KEY || '', - model: ss.selectAIModel, - }, - anthropic: { - key: ss.anthropicKey || process.env.NEXT_PUBLIC_ANTHROPIC_KEY || '', - model: ss.selectAIModel, - }, - google: { - key: ss.googleKey || process.env.NEXT_PUBLIC_GOOGLE_KEY || '', - model: ss.selectAIModel, - }, - localLlm: { - url: ss.localLlmUrl || process.env.NEXT_PUBLIC_LOCAL_LLM_URL || '', - model: ss.selectAIModel || process.env.NEXT_PUBLIC_LOCAL_LLM_MODEL || '', - }, - groq: { - key: ss.groqKey || process.env.NEXT_PUBLIC_GROQ_KEY || '', - model: ss.selectAIModel, - }, - dify: { - key: ss.difyKey || process.env.NEXT_PUBLIC_DIFY_KEY || '', - url: ss.difyUrl || process.env.NEXT_PUBLIC_DIFY_URL || '', - conversationId: ss.difyConversationId, - }, - } - try { stream = await getAIChatResponseStream( ss.selectAIService as AIService, - messages, - aiServiceConfig + messages ) } catch (e) { console.error(e) @@ -197,6 +168,8 @@ export const processAIResponse = async ( try { while (true) { const { done, value } = await reader.read() + console.log(done) + console.log(value) if (done && receivedMessage.length === 0) break if (value) receivedMessage += value @@ -345,9 +318,9 @@ export const processAIResponse = async ( if ( typeof item.content != 'string' && item.content[0] && - item.content[1].image_url + item.content[1] ) { - lastImageUrl = item.content[1].image_url.url + lastImageUrl = item.content[1].image } const lastItem = acc[acc.length - 1] @@ -365,7 +338,7 @@ export const processAIResponse = async ( ...item, content: [ { type: 'text', text: text.trim() }, - { type: 'image_url', image_url: { url: lastImageUrl } }, + { type: 'image', image: lastImageUrl }, ], }) lastImageUrl = '' @@ -430,7 +403,7 @@ export const handleSendChatFn = // ChatVRM original mode const emptyKeys = [ ss.selectAIService === 'openai' && - !ss.openAiKey && + !ss.openaiKey && !process.env.NEXT_PUBLIC_OPEN_AI_KEY, ss.selectAIService === 'anthropic' && @@ -500,13 +473,12 @@ export const handleSendChatFn = ...hs.chatLog, { role: 'user', - content: - hs.modalImage && ss.selectAIService === 'openai' - ? [ - { type: 'text', text: newMessage }, - { type: 'image_url', image_url: { url: hs.modalImage } }, - ] - : newMessage, + content: hs.modalImage + ? [ + { type: 'text', text: newMessage }, + { type: 'image', image: hs.modalImage }, + ] + : newMessage, }, ] if (hs.modalImage) { @@ -515,14 +487,17 @@ export const handleSendChatFn = homeStore.setState({ chatLog: messageLog }) // TODO: AIに送信するメッセージの加工、処理がひどいので要修正 - const processedMessageLog = messageLog.map((message) => ({ + // 画像は直近のものしか送らない + const processedMessageLog = messageLog.map((message, index) => ({ role: ['assistant', 'user', 'system'].includes(message.role) ? message.role : 'assistant', content: - typeof message.content === 'string' || ss.selectAIService === 'openai' + index === messageLog.length - 1 ? message.content - : message.content[0].text, + : Array.isArray(message.content) + ? message.content[0].text + : message.content, })) const messages: Message[] = [ diff --git a/src/features/chat/openAiChat.ts b/src/features/chat/openAiChat.ts deleted file mode 100644 index 51cc9c4b..00000000 --- a/src/features/chat/openAiChat.ts +++ /dev/null @@ -1,95 +0,0 @@ -import { Message } from '../messages/messages' - -export async function getOpenAIChatResponse( - messages: Message[], - apiKey: string, - model: string -) { - try { - const response = await fetch('/api/openai', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ messages, apiKey, model }), - }) - - if (!response.ok) { - throw new Error('Failed to fetch OpenAI API response') - } - - const data = await response.json() - return { message: data.message } - } catch (error) { - console.error('Error fetching OpenAI API response:', error) - throw error - } -} - -export async function getOpenAIChatResponseStream( - messages: Message[], - apiKey: string, - model: string -) { - try { - const response = await fetch('/api/openai', { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ messages, apiKey, model, stream: true }), - }) - - if (!response.ok) { - throw new Error('OpenAI APIリクエストに失敗しました') - } - - if (!response.body) { - throw new Error('OpenAI APIレスポンスが空です') - } - - const reader = response.body.getReader() - const decoder = new TextDecoder('utf-8') - - return new ReadableStream({ - async start(controller) { - while (true) { - const { done, value } = await reader.read() - - if (done) { - break - } - - const chunk = decoder.decode(value) - const lines = chunk.split('\n') - - for (const line of lines) { - if (line.startsWith('data:')) { - const data = line.substring(5).trim() - if (data !== '[DONE]') { - const event = JSON.parse(data) - switch (event.type) { - case 'content_block_delta': - controller.enqueue(event.text) - break - case 'error': - throw new Error( - `OpenAI API error: ${JSON.stringify(event.error)}` - ) - case 'message_stop': - controller.close() - return - } - } - } - } - } - - controller.close() - }, - }) - } catch (error) { - console.error('Error fetching OpenAI API response stream:', error) - throw error - } -} diff --git a/src/features/chat/vercelAIChat.ts b/src/features/chat/vercelAIChat.ts new file mode 100644 index 00000000..84005236 --- /dev/null +++ b/src/features/chat/vercelAIChat.ts @@ -0,0 +1,96 @@ +import { Message } from '../messages/messages' + +export async function getVercelAIChatResponse( + messages: Message[], + apiKey: string, + aiService: string, + model: string +) { + try { + const response = await fetch('/api/aiChat', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + messages, + apiKey, + aiService, + model, + stream: false, + }), + }) + + if (!response.ok) { + throw new Error(`API request to ${aiService} failed`) + } + + if (!response.body) { + throw new Error(`API response from ${aiService} is empty`) + } + + const data = await response.json() + return { message: data.message } + } catch (error) { + console.error(`Error fetching ${aiService} API response:`, error) + throw error + } +} + +export async function getVercelAIChatResponseStream( + messages: Message[], + apiKey: string, + aiService: string, + model: string +): Promise> { + const response = await fetch('/api/aiChat', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + messages, + apiKey, + aiService, + model, + stream: true, + }), + }) + + if (!response.ok) { + throw new Error(`API request to ${aiService} failed`) + } + + return new ReadableStream({ + async start(controller) { + if (!response.body) { + throw new Error(`API response from ${aiService} is empty`) + } + + const reader = response.body.getReader() + const decoder = new TextDecoder('utf-8') + + try { + while (true) { + const { done, value } = await reader.read() + if (done) break + + const chunk = decoder.decode(value) + const lines = chunk.split('\n') + + for (const line of lines) { + if (line.startsWith('0:')) { + const content = line.substring(2).trim().replace(/^"|"$/g, '') + controller.enqueue(content) + } + } + } + } catch (error) { + controller.error(error) + } finally { + controller.close() + reader.releaseLock() + } + }, + }) +} diff --git a/src/features/messages/messages.ts b/src/features/messages/messages.ts index 3e065f8a..bb39c37a 100644 --- a/src/features/messages/messages.ts +++ b/src/features/messages/messages.ts @@ -6,10 +6,7 @@ export type Message = { role: string // "assistant" | "system" | "user"; content: | string - | [ - { type: string; text: string }, - { type: string; image_url: { url: string } }, - ] // マルチモーダル拡張 + | [{ type: 'text'; text: string }, { type: 'image'; image: string }] // マルチモーダル拡張 } const talkStyles = [ diff --git a/src/features/slide/slideAIHelpers.ts b/src/features/slide/slideAIHelpers.ts index d22b1b0b..afc18627 100644 --- a/src/features/slide/slideAIHelpers.ts +++ b/src/features/slide/slideAIHelpers.ts @@ -1,7 +1,8 @@ -import { Message } from '@/features/messages/messages' -import { getOpenAIChatResponse } from '@/features/chat/openAiChat' -import { getAnthropicChatResponse } from '@/features/chat/anthropicChat' -import settingsStore from '@/features/stores/settings' +import { getVercelAIChatResponse } from '@/features/chat/vercelAIChat' +import settingsStore, { + multiModalAIServiceKey, + multiModalAIServices, +} from '@/features/stores/settings' export const judgeSlide = async ( queryText: string, @@ -9,6 +10,18 @@ export const judgeSlide = async ( supplement: string ): Promise => { const ss = settingsStore.getState() + const aiService = ss.selectAIService as multiModalAIServiceKey + + if (!multiModalAIServices.includes(aiService)) { + throw new Error('Invalid AI service') + } + + const apiKeyName = `${aiService}Key` as const + const apiKey = ss[apiKeyName] + + if (!apiKey) { + throw new Error('API key not found') + } const systemMessage = ` You are an AI tasked with determining whether a user's comment is a question about a given script document and supplementary text, and if so, which page of the document is most relevant to the question. Follow these instructions carefully: @@ -46,27 +59,14 @@ ${supplement} Based on the user's comment and the content of both the script document and supplementary text, provide "only" your final answer in the specified JSON format. ` - if (ss.selectAIService === 'openai') { - const response = await getOpenAIChatResponse( - [ - { role: 'system', content: systemMessage }, - { role: 'user', content: queryText }, - ], - ss.openAiKey, - ss.selectAIModel - ) - return response.message - } else if (ss.selectAIService === 'anthropic') { - const response = await getAnthropicChatResponse( - [ - { role: 'system', content: systemMessage }, - { role: 'user', content: queryText }, - ], - ss.anthropicKey, - ss.selectAIModel - ) - return response.message - } else { - throw new Error('Unsupported AI service') - } + const response = await getVercelAIChatResponse( + [ + { role: 'system', content: systemMessage }, + { role: 'user', content: queryText }, + ], + aiService, + apiKey, + ss.selectAIModel + ) + return response.message } diff --git a/src/features/stores/settings.ts b/src/features/stores/settings.ts index 50a7a4b4..619e7c37 100644 --- a/src/features/stores/settings.ts +++ b/src/features/stores/settings.ts @@ -10,8 +10,15 @@ import { VoiceLanguage, } from '../constants/settings' +export const multiModalAIServices = ['openai', 'anthropic', 'google'] as const +export type multiModalAIServiceKey = (typeof multiModalAIServices)[number] + +type multiModalAPIKeys = { + [K in multiModalAIServiceKey as `${K}Key`]: string +} + interface APIKeys { - openAiKey: string + openaiKey: string anthropicKey: string googleKey: string groqKey: string @@ -71,6 +78,7 @@ interface General { } export type SettingsState = APIKeys & + multiModalAPIKeys & ModelProvider & Integrations & Character & @@ -80,7 +88,7 @@ const settingsStore = create()( persist( (set, get) => ({ // API Keys - openAiKey: '', + openaiKey: '', anthropicKey: '', googleKey: '', groqKey: '', @@ -139,7 +147,7 @@ const settingsStore = create()( { name: 'aitube-kit-settings', partialize: (state) => ({ - openAiKey: state.openAiKey, + openaiKey: state.openaiKey, anthropicKey: state.anthropicKey, googleKey: state.googleKey, groqKey: state.groqKey, diff --git a/src/features/youtube/conversationContinuityFunctions.ts b/src/features/youtube/conversationContinuityFunctions.ts index b06fd792..01c83b23 100644 --- a/src/features/youtube/conversationContinuityFunctions.ts +++ b/src/features/youtube/conversationContinuityFunctions.ts @@ -1,26 +1,42 @@ import { Message } from '@/features/messages/messages' -import { getOpenAIChatResponse } from '@/features/chat/openAiChat' -import { getAnthropicChatResponse } from '@/features/chat/anthropicChat' - -const fetchAIResponse = async ( - queryMessages: any[], - aiApiKey: string, - selectAIService: string, - selectAIModel: string -): Promise => { - if (selectAIService === 'openai') { - return await getOpenAIChatResponse(queryMessages, aiApiKey, selectAIModel) - } else if (selectAIService === 'anthropic') { - return await getAnthropicChatResponse( - queryMessages, - aiApiKey, - selectAIModel - ) - } else { - throw new Error('Unsupported AI service') +import { getVercelAIChatResponse } from '@/features/chat/vercelAIChat' +import settingsStore, { + multiModalAIServiceKey, + multiModalAIServices, +} from '@/features/stores/settings' + +const getAIConfig = () => { + const ss = settingsStore.getState() + const aiService = ss.selectAIService as multiModalAIServiceKey + + if (!multiModalAIServices.includes(aiService)) { + throw new Error('Invalid AI service') + } + + const apiKeyName = `${aiService}Key` as const + const apiKey = ss[apiKeyName] + + if (!apiKey) { + throw new Error('API key not found') + } + + return { + aiApiKey: apiKey, + selectAIService: aiService, + selectAIModel: ss.selectAIModel, } } +const fetchAIResponse = async (queryMessages: any[]): Promise => { + const { aiApiKey, selectAIService, selectAIModel } = getAIConfig() + return await getVercelAIChatResponse( + queryMessages, + aiApiKey, + selectAIService, + selectAIModel + ) +} + /** * 共通のシステムメッセージをを返します。 * @@ -112,16 +128,11 @@ const getLastMessages = ( * * @param {Message[]} messages - メッセージの配列 * @param {any[]} youtubeComments - Youtubeのコメントの配列 - * @param {string} openAiKey - OpenAIのAPIキー - * @param {string} selectAIModel - 使用するモデル * @returns {Promise} - 最適なコメント */ export const getBestComment = async ( messages: Message[], - youtubeComments: any[], - aiApiKey: string, - selectAIService: string, - selectAIModel: string + youtubeComments: any[] ): Promise => { console.log('getBestComment') const lastTenMessages = getLastMessages(messages, 10) @@ -159,12 +170,7 @@ ${lastTenMessages} }, ] - const response = await fetchAIResponse( - queryMessages, - aiApiKey, - selectAIService, - selectAIModel - ) + const response = await fetchAIResponse(queryMessages) return response.message } @@ -194,16 +200,9 @@ export const getMessagesForSleep = async ( * メッセージを受け取り、最新の4つのメッセージを使用して別の話題を取得します。 * * @param {Message[]} messages - メッセージの配列 - * @param {string} openAiKey - OpenAIのAPIキー - * @param {string} selectAIModel - 使用するモデル * @returns {Promise} - 別の話題 */ -export const getAnotherTopic = async ( - messages: Message[], - aiApiKey: string, - selectAIService: string, - selectAIModel: string -): Promise => { +export const getAnotherTopic = async (messages: Message[]): Promise => { console.log('getAnotherTopic') const lastTenMessages = getLastMessages(messages, 10) const queryMessages = [ @@ -223,12 +222,7 @@ export const getAnotherTopic = async ( ...lastTenMessages, ] - const response = await fetchAIResponse( - queryMessages, - aiApiKey, - selectAIService, - selectAIModel - ) + const response = await fetchAIResponse(queryMessages) return response.message } @@ -260,15 +254,10 @@ export const getMessagesForNewTopic = async ( * メッセージを受け取り、次の発言者を判断します。 * * @param {Message[]} messages - メッセージの配列 - * @param {string} openAiKey - OpenAIのAPIキー - * @param {string} selectAIModel - 使用するモデル * @returns {Promise} - 次の発言者 */ export const checkIfResponseContinuationIsRequired = async ( - messages: Message[], - aiApiKey: string, - selectAIService: string, - selectAIModel: string + messages: Message[] ): Promise => { console.log('checkIfResponseContinuationIsRequired') const lastTenMessages = getLastMessages(messages, 10) @@ -277,7 +266,7 @@ export const checkIfResponseContinuationIsRequired = async ( } const systemMessage = `与えられた会話文の文脈から、次にどの話者が発言すべきかを判断してください。 -最後の話者が話を続けるべきならば "true" を、逆に交代が必要な場合は "false" を返します。 +最後��話者が話を続けるべきならば "true" を、逆に交代が必要な場合は "false" を返します。 回答はJSON形式で、answerとreasonの2つのキーを持つオブジェクトとしてください。 ## 例 @@ -342,12 +331,7 @@ B: 見てみたいな。送ってくれない? // エラーが発生した場合はfalseを返す let answer try { - const response = await fetchAIResponse( - queryMessages, - aiApiKey, - selectAIService, - selectAIModel - ) + const response = await fetchAIResponse(queryMessages) console.log('response.message:', response.message) const responseJson = JSON.parse(response.message) answer = responseJson.answer diff --git a/src/features/youtube/youtubeComments.ts b/src/features/youtube/youtubeComments.ts index ccb69b08..05d55e9b 100644 --- a/src/features/youtube/youtubeComments.ts +++ b/src/features/youtube/youtubeComments.ts @@ -105,8 +105,6 @@ export const fetchAndProcessComments = async ( try { const liveChatId = await getLiveChatId(ss.youtubeLiveId, ss.youtubeApiKey) - const aiApiKey = - ss.selectAIService === 'anthropic' ? ss.anthropicKey : ss.openAiKey if (liveChatId) { // 会話の継続が必要かどうかを確認 @@ -116,12 +114,7 @@ export const fetchAndProcessComments = async ( ss.conversationContinuityMode ) { const isContinuationNeeded = - await checkIfResponseContinuationIsRequired( - hs.chatLog, - aiApiKey, - ss.selectAIService, - ss.selectAIModel - ) + await checkIfResponseContinuationIsRequired(hs.chatLog) if (isContinuationNeeded) { const continuationMessage = await getMessagesForContinuation( ss.systemPrompt, @@ -153,17 +146,7 @@ export const fetchAndProcessComments = async ( settingsStore.setState({ youtubeSleepMode: false }) let selectedComment = '' if (ss.conversationContinuityMode) { - if (youtubeComments.length > 1) { - selectedComment = await getBestComment( - hs.chatLog, - youtubeComments, - aiApiKey, - ss.selectAIService, - ss.selectAIModel - ) - } else { - selectedComment = youtubeComments[0].userComment - } + selectedComment = await getBestComment(hs.chatLog, youtubeComments) } else { selectedComment = youtubeComments[Math.floor(Math.random() * youtubeComments.length)] @@ -187,12 +170,7 @@ export const fetchAndProcessComments = async ( preProcessAIResponse(continuationMessage) } else if (noCommentCount === 3) { // 新しいトピックを生成 - const anotherTopic = await getAnotherTopic( - hs.chatLog, - aiApiKey, - ss.selectAIService, - ss.selectAIModel - ) + const anotherTopic = await getAnotherTopic(hs.chatLog) console.log('anotherTopic:', anotherTopic) const newTopicMessage = await getMessagesForNewTopic( ss.systemPrompt, diff --git a/src/pages/api/aiChat.ts b/src/pages/api/aiChat.ts new file mode 100644 index 00000000..e52164eb --- /dev/null +++ b/src/pages/api/aiChat.ts @@ -0,0 +1,120 @@ +import { createOpenAI } from '@ai-sdk/openai' +import { createAnthropic } from '@ai-sdk/anthropic' +import { createGoogleGenerativeAI } from '@ai-sdk/google' +import { streamText, generateText } from 'ai' +import { NextRequest } from 'next/server' + +type AIServiceKey = 'openai' | 'anthropic' | 'google' | 'groq' +type AIServiceConfig = Record any> + +// Allow streaming responses up to 30 seconds +export const maxDuration = 30 + +export const config = { + runtime: 'edge', +} + +export default async function handler(req: NextRequest) { + if (req.method !== 'POST') { + return new Response(JSON.stringify({ error: 'Method Not Allowed' }), { + status: 405, + headers: { 'Content-Type': 'application/json' }, + }) + } + + const { messages, apiKey, aiService, model, stream } = await req.json() + const aiServiceConfig: AIServiceConfig = { + openai: () => createOpenAI({ apiKey }), + anthropic: () => createAnthropic({ apiKey }), + google: () => createGoogleGenerativeAI({ apiKey }), + groq: () => + createOpenAI({ baseURL: 'https://api.groq.com/openai/v1', apiKey }), + } + const aiServiceInstance = aiServiceConfig[aiService as AIServiceKey] + + if (!aiServiceInstance) { + return new Response(JSON.stringify({ error: 'Invalid AI service' }), { + status: 400, + headers: { 'Content-Type': 'application/json' }, + }) + } + + const instance = aiServiceInstance() + + const modifiedMessages = modifyMessages(aiService, messages) + + if (stream) { + try { + const result = await streamText({ + model: instance(model), + messages: modifiedMessages, + }) + + return result.toDataStreamResponse() + } catch (error) { + console.error('Error in OpenAI API call:', error) + return new Response(JSON.stringify({ error: 'Internal Server Error' }), { + status: 500, + headers: { 'Content-Type': 'application/json' }, + }) + } + } else { + const result = await generateText({ + model: instance(model), + messages: modifiedMessages, + }) + debugger + return result + } +} + +function modifyMessages(aiService: string, messages: any[]) { + if (aiService === 'anthropic') { + return modifyAnthropicMessages(messages) + } + return messages +} + +// Anthropicのメッセージを修正する +function modifyAnthropicMessages(messages: any[]) { + const systemMessage = messages.find((message) => message.role === 'system') + let userMessages = messages + .filter((message) => message.role !== 'system') + .filter((message) => message.content !== '') + + userMessages = consolidateMessages(userMessages) + + while (userMessages.length > 0 && userMessages[0].role !== 'user') { + userMessages.shift() + } + + return [systemMessage, ...userMessages] +} + +// 同じroleのメッセージを結合する +function consolidateMessages(messages: any[]) { + const consolidated: any[] = [] + let lastRole: string | null = null + let combinedContent = '' + + messages.forEach((message, index) => { + if (message.role === lastRole) { + combinedContent += '\n' + message.content + } else { + if (lastRole !== null) { + consolidated.push({ role: lastRole, content: combinedContent }) + } + lastRole = message.role + combinedContent = + typeof message.content === 'string' + ? message.content + : message.content[0].text + } + + if (index === messages.length - 1) { + consolidated.push({ role: lastRole, content: combinedContent }) + } + }) + + return consolidated +} diff --git a/src/pages/api/chat.ts b/src/pages/api/chat.ts deleted file mode 100644 index 0b72f406..00000000 --- a/src/pages/api/chat.ts +++ /dev/null @@ -1,38 +0,0 @@ -// import { Configuration, OpenAIApi } from "openai"; - -// import type { NextApiRequest, NextApiResponse } from "next"; - -// type Data = { -// message: string; -// }; - -// export default async function handler( -// req: NextApiRequest, -// res: NextApiResponse -// ) { -// const apiKey = req.body.apiKey || process.env.OPEN_AI_KEY; - -// if (!apiKey) { -// res -// .status(400) -// .json({ message: "APIキーが間違っているか、設定されていません。" }); - -// return; -// } - -// const configuration = new Configuration({ -// apiKey: apiKey, -// }); - -// const openai = new OpenAIApi(configuration); - -// const { data } = await openai.createChatCompletion({ -// model: "gpt-3.5-turbo", -// messages: req.body.messages, -// }); - -// const [aiRes] = data.choices; -// const message = aiRes.message?.content || "エラーが発生しました"; - -// res.status(200).json({ message: message }); -// } diff --git a/src/pages/api/convertSlide.ts b/src/pages/api/convertSlide.ts index 2698e611..4b38271d 100644 --- a/src/pages/api/convertSlide.ts +++ b/src/pages/api/convertSlide.ts @@ -3,9 +3,17 @@ import formidable from 'formidable' import fs from 'fs' import path from 'path' import { createCanvas } from 'canvas' -import { OpenAI } from 'openai' import * as pdfjsLib from 'pdfjs-dist/legacy/build/pdf.mjs' +import { createOpenAI } from '@ai-sdk/openai' +import { createAnthropic } from '@ai-sdk/anthropic' +import { createGoogleGenerativeAI } from '@ai-sdk/google' +import { generateObject } from 'ai' + +import { multiModalAIServiceKey } from '@/features/stores/settings' + +type AIServiceConfig = Record any> + export const config = { api: { bodyParser: false, @@ -62,20 +70,40 @@ async function convertPdfToImages(pdfBuffer: Buffer): Promise { return images } +interface SlideLineResponse { + line: string + notes: string + page?: number +} + async function createSlideLine( imageBase64: string, apiKey: string, + aiService: string, model: string, selectLanguage: string, previousResult: string | null -) { - const client = new OpenAI({ apiKey }) +): Promise { const additionalPrompt = previousResult ? `Previous slide content: ${previousResult}` : 'This is the first slide.' - const response = await client.chat.completions.create({ - model: `${model}`, + const aiServiceConfig: AIServiceConfig = { + openai: () => createOpenAI({ apiKey }), + anthropic: () => createAnthropic({ apiKey }), + google: () => createGoogleGenerativeAI({ apiKey }), + } + + const aiServiceInstance = aiServiceConfig[aiService as multiModalAIServiceKey] + + if (!aiServiceInstance) { + throw new Error('Invalid AI service') + } + + const instance = aiServiceInstance() + + const response = await generateObject({ + model: instance(model), messages: [ { role: 'system', @@ -85,19 +113,21 @@ async function createSlideLine( role: 'user', content: [ { - type: 'image_url', - image_url: { - url: `${imageBase64}`, - }, + type: 'text', + text: '', + }, + { + type: 'image', + image: `${imageBase64}`, }, ], }, ], - response_format: { type: 'json_object' }, + output: 'no-schema', + mode: 'json', }) - const result = JSON.parse(response.choices[0].message?.content || '{}') - return result + return response.object as unknown as SlideLineResponse } async function handler(req: NextApiRequest, res: NextApiResponse) { @@ -109,17 +139,17 @@ async function handler(req: NextApiRequest, res: NextApiResponse) { return } + const getField = (fieldName: string) => { + const field = fields[fieldName] + return Array.isArray(field) ? field[0] : field + } + const file = Array.isArray(files.file) ? files.file[0] : files.file - const folderName = Array.isArray(fields.folderName) - ? fields.folderName[0] - : fields.folderName - const apiKey = Array.isArray(fields.apiKey) - ? fields.apiKey[0] - : fields.apiKey - const model = Array.isArray(fields.model) ? fields.model[0] : fields.model - const selectLanguage = Array.isArray(fields.selectLanguage) - ? fields.selectLanguage[0] - : fields.selectLanguage + const folderName = getField('folderName') + const aiService = getField('aiService') + const apiKey = getField('apiKey') + const model = getField('model') + const selectLanguage = getField('selectLanguage') if (!file) { res.status(400).send('No file uploaded') @@ -152,10 +182,11 @@ async function handler(req: NextApiRequest, res: NextApiResponse) { for (let i = 0; i < images.length; i++) { const imgBase64 = images[i] - if (apiKey && model) { + if (aiService && apiKey && model) { const slideLine = await createSlideLine( imgBase64, apiKey, + aiService, model, language, previousResult diff --git a/src/pages/api/google.ts b/src/pages/api/google.ts deleted file mode 100644 index d0288141..00000000 --- a/src/pages/api/google.ts +++ /dev/null @@ -1,67 +0,0 @@ -// import { NextApiRequest, NextApiResponse } from "next"; -// import { GoogleGenerativeAI } from "@google/generative-ai"; -// import { Message } from "@/features/messages/messages"; - -// export default async function handler( -// req: NextApiRequest, -// res: NextApiResponse -// ) { -// const { messages, apiKey, model, stream } = req.body; -// const systemMessage = messages.find((message: any) => message.role === "system"); -// let userMessages = messages.filter((message: any) => message.role !== "system"); - -// const genAI = new GoogleGenerativeAI(apiKey); -// const geminiModel = genAI.getGenerativeModel({ model: "models/" + model, systemInstruction: systemMessage.content }); - -// let filteredMessages = userMessages -// .filter((message: Message) => message.content !== "") -// .map((message: Message) => ({ -// role: message.role === "assistant" ? "model" : message.role, -// parts: [{ text: message.content }], -// })); - -// // 最初の要素の role が 'user' でなければ、その要素を除外 -// if (filteredMessages.length > 0 && filteredMessages[0].role !== 'user') { -// filteredMessages = filteredMessages.slice(1); -// } - -// const lastMessage = filteredMessages[filteredMessages.length - 1].parts[0].text; - -// if (stream) { -// res.writeHead(200, { -// "Content-Type": "text/event-stream", -// "Cache-Control": "no-cache", -// Connection: "keep-alive", -// }); - -// const chat = geminiModel.startChat({ -// history: filteredMessages, -// generationConfig: { -// maxOutputTokens: 200, -// }, -// }); - -// const result = await chat.sendMessageStream(lastMessage); - -// for await (const chunk of result.stream) { -// const text = await chunk.text(); -// console.log(text); -// res.write(`data: ${JSON.stringify({ text: text })}\n\n`); -// } - -// res.write("event: end\n\n"); -// res.end(); -// } else { -// const chat = await geminiModel.startChat({ -// history: filteredMessages, -// generationConfig: { -// maxOutputTokens: 200, -// }, -// }); - -// const result = await chat.sendMessage(lastMessage); -// const response = await result.response; - -// res.status(200).json({ message: response.text() }); -// } -// } diff --git a/src/pages/api/groq.ts b/src/pages/api/groq.ts deleted file mode 100644 index 25668202..00000000 --- a/src/pages/api/groq.ts +++ /dev/null @@ -1,130 +0,0 @@ -import { NextApiRequest, NextApiResponse } from 'next' -import Groq from 'groq-sdk' -import { Message } from '@/features/messages/messages' - -export default async function handler( - req: NextApiRequest, - res: NextApiResponse -) { - const { messages, apiKey, model, stream } = req.body - - console.log('Request body:', req.body) - - const client = new Groq({ apiKey }) - - const systemMessage = messages.find( - (message: any) => message.role === 'system' - ) - let userMessages = messages.filter( - (message: any) => message.role !== 'system' - ) - - userMessages = userMessages.filter( - (message: Message) => message.content !== '' - ) - - const consolidatedMessages: Message[] = [] - let lastRole: string | null = null - let combinedContent = '' - - userMessages.forEach((message: Message, index: number) => { - if (message.role === lastRole) { - combinedContent += '\n' + message.content - } else { - if (lastRole !== null) { - consolidatedMessages.push({ role: lastRole, content: combinedContent }) - } - lastRole = message.role - combinedContent = - typeof message.content === 'string' - ? message.content - : message.content[0].text - } - - // 最後のメッセージの場合、現在の内容を追加 - if (index === userMessages.length - 1) { - consolidatedMessages.push({ role: lastRole, content: combinedContent }) - } - }) - - userMessages = consolidatedMessages - - if (stream) { - res.writeHead(200, { - 'Content-Type': 'text/event-stream', - 'Cache-Control': 'no-cache', - Connection: 'keep-alive', - }) - - const stream = await client.chat.completions - .create({ - messages: [ - { role: 'system', content: systemMessage?.content }, - ...userMessages, - ], - model: model, - max_tokens: 200, - stream: true, - }) - .catch(async (err) => { - if (err instanceof Groq.APIError) { - res.write( - `data: ${JSON.stringify({ type: 'error', error: err })}\n\n` - ) - res.end() - } else { - throw err - } - }) - - if (stream) { - const reader = ( - stream as unknown as { - getReader: () => ReadableStreamDefaultReader - } - ).getReader() - const decoder = new TextDecoder('utf-8') - - while (true) { - const { done, value } = await reader.read() - - if (done) { - res.write(`data: ${JSON.stringify({ type: 'message_stop' })}\n\n`) - res.end() - break - } - - const chunk = decoder.decode(value, { stream: true }) - console.log('chunk:', chunk) - res.write( - `data: ${JSON.stringify({ type: 'content_block_delta', text: chunk })}\n\n` - ) - } - } - } else { - const response = await client.chat.completions - .create({ - messages: [ - { role: 'system', content: systemMessage?.content }, - ...userMessages, - ], - model: model, - max_tokens: 200, - }) - .catch(async (err) => { - if (err instanceof Groq.APIError) { - console.error('Groq API Error:', err) - if (err.status) { - res.status(err.status).json({ error: err }) - } - } else { - throw err - } - }) - - if (response) { - let messageContent = response.choices[0].message.content - res.status(200).json({ message: messageContent }) - } - } -} diff --git a/src/pages/api/openai.ts b/src/pages/api/openai.ts deleted file mode 100644 index 8b58b4e2..00000000 --- a/src/pages/api/openai.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { NextApiRequest, NextApiResponse } from 'next' -import OpenAI from 'openai' -import { Message } from '@/features/messages/messages' - -export default async function handler( - req: NextApiRequest, - res: NextApiResponse -) { - const { messages, apiKey, model, stream } = req.body - - const client = new OpenAI({ apiKey }) - - if (stream) { - res.writeHead(200, { - 'Content-Type': 'text/event-stream', - 'Cache-Control': 'no-cache', - Connection: 'keep-alive', - }) - - const stream = await client.chat.completions.create({ - model: model, - messages: messages, - stream: true, - max_tokens: 200, - }) - - for await (const chunk of stream) { - const messagePiece = chunk.choices[0].delta.content - if (messagePiece) { - res.write( - `data: ${JSON.stringify({ type: 'content_block_delta', text: messagePiece })}\n\n` - ) - } - } - - res.write(`data: ${JSON.stringify({ type: 'message_stop' })}\n\n`) - res.end() - } else { - const response = await client.chat.completions.create({ - model: model, - messages: messages, - max_tokens: 200, - }) - - res.status(200).json({ message: response.choices[0].message.content }) - } -} From b33b5421257a46bb8a189dac79c8cd8976af5db4 Mon Sep 17 00:00:00 2001 From: tegnike Date: Fri, 30 Aug 2024 13:04:04 +0200 Subject: [PATCH 2/8] =?UTF-8?q?=E4=B8=8D=E8=A6=81=E3=83=95=E3=82=A1?= =?UTF-8?q?=E3=82=A4=E3=83=AB=E5=89=8A=E9=99=A4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/pages/api/anthropic.ts | 92 -------------------------------------- 1 file changed, 92 deletions(-) delete mode 100644 src/pages/api/anthropic.ts diff --git a/src/pages/api/anthropic.ts b/src/pages/api/anthropic.ts deleted file mode 100644 index c805c686..00000000 --- a/src/pages/api/anthropic.ts +++ /dev/null @@ -1,92 +0,0 @@ -import { NextApiRequest, NextApiResponse } from 'next' -import { Anthropic } from '@anthropic-ai/sdk' -import { Message } from '@/features/messages/messages' - -export default async function handler( - req: NextApiRequest, - res: NextApiResponse -) { - const { messages, apiKey, model, stream } = req.body - - const client = new Anthropic({ apiKey }) - const systemMessage = messages.find( - (message: any) => message.role === 'system' - ) - let userMessages = messages.filter( - (message: any) => message.role !== 'system' - ) - - userMessages = userMessages.filter( - (message: Message) => message.content !== '' - ) - - const consolidatedMessages: Message[] = [] - let lastRole: string | null = null - let combinedContent = '' - - userMessages.forEach((message: Message, index: number) => { - if (message.role === lastRole) { - combinedContent += '\n' + message.content - } else { - if (lastRole !== null) { - consolidatedMessages.push({ role: lastRole, content: combinedContent }) - } - lastRole = message.role - combinedContent = - typeof message.content === 'string' - ? message.content - : message.content[0].text - } - - // 最後のメッセージの場合、現在の内容を追加 - if (index === userMessages.length - 1) { - consolidatedMessages.push({ role: lastRole, content: combinedContent }) - } - }) - - while ( - consolidatedMessages.length > 0 && - consolidatedMessages[0].role !== 'user' - ) { - consolidatedMessages.shift() - } - - userMessages = consolidatedMessages - - if (stream) { - res.writeHead(200, { - 'Content-Type': 'text/event-stream', - 'Cache-Control': 'no-cache', - Connection: 'keep-alive', - }) - - await client.messages - .stream({ - system: systemMessage?.content, - messages: userMessages, - model: model, - max_tokens: 200, - }) - .on('text', (text) => { - res.write( - `data: ${JSON.stringify({ type: 'content_block_delta', text })}\n\n` - ) - }) - .on('error', (error) => { - res.write(`data: ${JSON.stringify({ type: 'error', error })}\n\n`) - }) - .on('end', () => { - res.write(`data: ${JSON.stringify({ type: 'message_stop' })}\n\n`) - res.end() - }) - } else { - const response = await client.messages.create({ - system: systemMessage?.content, - messages: userMessages, - model: model, - max_tokens: 200, - }) - - res.status(200).json({ message: response.content }) - } -} From ef27e0212f891f87e17e142e891d67db4540d0e9 Mon Sep 17 00:00:00 2001 From: tegnike Date: Fri, 30 Aug 2024 13:29:21 +0200 Subject: [PATCH 3/8] =?UTF-8?q?=E3=82=A8=E3=83=A9=E3=83=BC=E3=83=A1?= =?UTF-8?q?=E3=83=83=E3=82=BB=E3=83=BC=E3=82=B8=E5=BC=B7=E5=8C=96?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/features/chat/vercelAIChat.ts | 16 ++++++++++++---- src/features/slide/slideAIHelpers.ts | 4 +++- .../youtube/conversationContinuityFunctions.ts | 4 +++- src/pages/api/aiChat.ts | 2 +- 4 files changed, 19 insertions(+), 7 deletions(-) diff --git a/src/features/chat/vercelAIChat.ts b/src/features/chat/vercelAIChat.ts index 84005236..087d9d16 100644 --- a/src/features/chat/vercelAIChat.ts +++ b/src/features/chat/vercelAIChat.ts @@ -22,11 +22,15 @@ export async function getVercelAIChatResponse( }) if (!response.ok) { - throw new Error(`API request to ${aiService} failed`) + throw new Error( + `API request to ${aiService} failed with status ${response.status} and body ${await response.text()}` + ) } if (!response.body) { - throw new Error(`API response from ${aiService} is empty`) + throw new Error( + `API response from ${aiService} is empty, status ${response.status}` + ) } const data = await response.json() @@ -58,13 +62,17 @@ export async function getVercelAIChatResponseStream( }) if (!response.ok) { - throw new Error(`API request to ${aiService} failed`) + throw new Error( + `API request to ${aiService} failed with status ${response.status} and body ${await response.text()}` + ) } return new ReadableStream({ async start(controller) { if (!response.body) { - throw new Error(`API response from ${aiService} is empty`) + throw new Error( + `API response from ${aiService} is empty, status ${response.status}` + ) } const reader = response.body.getReader() diff --git a/src/features/slide/slideAIHelpers.ts b/src/features/slide/slideAIHelpers.ts index afc18627..fd18e746 100644 --- a/src/features/slide/slideAIHelpers.ts +++ b/src/features/slide/slideAIHelpers.ts @@ -20,7 +20,9 @@ export const judgeSlide = async ( const apiKey = ss[apiKeyName] if (!apiKey) { - throw new Error('API key not found') + throw new Error( + `API key for ${aiService} is missing. Unable to proceed with the AI service.` + ) } const systemMessage = ` diff --git a/src/features/youtube/conversationContinuityFunctions.ts b/src/features/youtube/conversationContinuityFunctions.ts index 01c83b23..6ab1fee8 100644 --- a/src/features/youtube/conversationContinuityFunctions.ts +++ b/src/features/youtube/conversationContinuityFunctions.ts @@ -17,7 +17,9 @@ const getAIConfig = () => { const apiKey = ss[apiKeyName] if (!apiKey) { - throw new Error('API key not found') + throw new Error( + `API key for ${aiService} is missing. Unable to proceed with the AI service.` + ) } return { diff --git a/src/pages/api/aiChat.ts b/src/pages/api/aiChat.ts index e52164eb..477469d8 100644 --- a/src/pages/api/aiChat.ts +++ b/src/pages/api/aiChat.ts @@ -63,7 +63,7 @@ export default async function handler(req: NextRequest) { model: instance(model), messages: modifiedMessages, }) - debugger + return result } } From afe996f06c29e50b553a45f7758d2d739887c33c Mon Sep 17 00:00:00 2001 From: tegnike Date: Fri, 30 Aug 2024 14:48:07 +0200 Subject: [PATCH 4/8] =?UTF-8?q?locales=E3=83=95=E3=82=A1=E3=82=A4=E3=83=AB?= =?UTF-8?q?=E6=9B=B4=E6=96=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- locales/en/translation.json | 6 +++--- locales/ja/translation.json | 6 +++--- locales/ko/translation.json | 6 +++--- locales/zh/translation.json | 6 +++--- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/locales/en/translation.json b/locales/en/translation.json index 90b1e36e..1e3e2c01 100644 --- a/locales/en/translation.json +++ b/locales/en/translation.json @@ -11,7 +11,7 @@ "YoutubeAPIKey": "YouTube API Key", "YoutubeLiveID": "YouTube Live ID", "ConversationContinuityMode": "Conversation Continuity Mode (Beta)", - "ConversationContinuityModeInfo": "When there is no comment, AI tries to continue the conversation. Currently only OpenAI or Anthropic is supported.", + "ConversationContinuityModeInfo": "When there is no comment, AI tries to continue the conversation. Currently only OpenAI, Anthropic Claude, Google Gemini are supported.", "ConversationContinuityModeInfo2": "One answer calls LLM multiple times, so API usage may increase. Please be aware of this.", "ConversationContinuityModeInfo3": "gpt-4o, gpt-4-turbo, claude-3-opus, claude-3.5-sonnet work relatively stably.", "StatusOn": "Status: ON", @@ -107,9 +107,9 @@ "ShowControlPanelInfo": "The settings screen can be displayed by pressing Cmd + . (Mac) / Ctrl + . (Windows) .", "SlideMode": "Slide Mode", "SelectedSlideDocs": "Selected Slide Documents", - "SlideModeDescription": "This is a mode where AI automatically presents slides. It is only available when the selected AI service is OpenAI, Anthropic, or Google Gemini.", + "SlideModeDescription": "This is a mode where AI automatically presents slides. It is only available when the selected AI service is OpenAI, Anthropic Claude, or Google Gemini.", "PdfConvertLabel": "PDF Slide Conversion", - "PdfConvertDescription": "Convert PDF to slide mode data. Available only when the selected AI service is OpenAI.", + "PdfConvertDescription": "Convert PDF to slide mode data. Available only when the selected AI service is OpenAI, Anthropic Claude, or Google Gemini.", "PdfConvertFileUpload": "Select PDF file", "PdfConvertFolderName": "Save folder name", "PdfConvertModelSelect": "Select model", diff --git a/locales/ja/translation.json b/locales/ja/translation.json index e0d27355..5e557a5f 100644 --- a/locales/ja/translation.json +++ b/locales/ja/translation.json @@ -11,7 +11,7 @@ "YoutubeAPIKey": "YouTube API キー", "YoutubeLiveID": "YouTube Live ID", "ConversationContinuityMode": "会話継続モード(ベータ版)", - "ConversationContinuityModeInfo": "コメントが無いときにAIが自ら会話を継続しようとするモードです。現在OpenAI または Anthropicのみ対応しています。", + "ConversationContinuityModeInfo": "コメントが無いときにAIが自ら会話を継続しようとするモードです。現在OpenAI, Anthropic Claude, Google Geminiのみ対応しています。", "ConversationContinuityModeInfo2": "一度の回答で複数回LLMを呼び出すため、API利用料が増える可能性があります。ご注意ください。", "ConversationContinuityModeInfo3": "gpt-4o, gpt-4-turbo, claude-3-opus, claude-3.5-sonnetで比較的安定動作します。", "StatusOn": "状態:ON", @@ -108,9 +108,9 @@ "ShowControlPanelInfo": "設定画面は Cmd + . (Mac) / Ctrl + . (Windows) で表示することができます。", "SlideMode": "スライドモード", "SelectedSlideDocs": "使用するスライド", - "SlideModeDescription": "AIが自動でスライドを発表するモードです。選択しているAIサービスがOpenAIまたはAnthropicまたはGoogle Geminiの場合のみ有効です。", + "SlideModeDescription": "AIが自動でスライドを発表するモードです。選択しているAIサービスがOpenAI, Anthropic Claude, Google Geminiの場合のみ有効です。", "PdfConvertLabel": "PDFスライド変換", - "PdfConvertDescription": "PDFをスライドモード用のデータに変換します。選択しているAIサービスがOpenAIの場合のみ利用可能です。", + "PdfConvertDescription": "PDFをスライドモード用のデータに変換します。選択しているAIサービスがOpenAI, Anthropic Claude, Google Geminiの場合のみ利用可能です。", "PdfConvertFileUpload": "PDFファイルを選択", "PdfConvertFolderName": "保存フォルダ名", "PdfConvertModelSelect": "モデルを選択", diff --git a/locales/ko/translation.json b/locales/ko/translation.json index 2a9126a2..42344f76 100644 --- a/locales/ko/translation.json +++ b/locales/ko/translation.json @@ -11,7 +11,7 @@ "YoutubeAPIKey": "YouTube API 키", "YoutubeLiveID": "YouTube Live ID", "ConversationContinuityMode": "대화 지속 모드 (베타 버전)", - "ConversationContinuityModeInfo": "댓글이 없을 때 AI가 스스로 대화를 지속하려고 하는 모드입니다. 현재 OpenAI 또는 Anthropic만 지원됩니다.", + "ConversationContinuityModeInfo": "댓글이 없을 때 AI가 스스로 대화를 지속하려고 하는 모드입니다. 현재 OpenAI, Anthropic Claude, Google Gemini만 지원됩니다.", "ConversationContinuityModeInfo2": "한 번의 응답에 여러 번 LLM을 호출하기 때문에 API 사용료가 증가할 수 있습니다. 주의하십시오.", "ConversationContinuityModeInfo3": "gpt-4o, gpt-4-turbo, claude-3-opus, claude-3.5-sonnet에서 비교적 안정적으로 동작합니다.", "StatusOn": "상태: ON", @@ -107,9 +107,9 @@ "ShowControlPanelInfo": "설정 화면은 Cmd + . (Mac) / Ctrl + . (Windows)를 눌러 표시할 수 있습니다.", "SlideMode": "슬라이드 모드", "SelectedSlideDocs": "사용할 슬라이드", - "SlideModeDescription": "AI가 자동으로 슬라이드를 발표하는 모드입니다. 선택한 AI 서비스가 OpenAI, Anthropic 또는 Google Gemini인 경우에만 사용 가능합니다.", + "SlideModeDescription": "AI가 자동으로 슬라이드를 발표하는 모드입니다. 선택한 AI 서비스가 OpenAI, Anthropic Claude, Google Gemini인 경우에만 사용 가능합니다.", "PdfConvertLabel": "PDF 슬라이드 변환", - "PdfConvertDescription": "PDF를 슬라이드 모드 데이터로 변환합니다. 선택한 AI 서비스가 OpenAI인 경우에만 사용 가능합니다.", + "PdfConvertDescription": "PDF를 슬라이드 모드 데이터로 변환합니다. 선택한 AI 서비스가 OpenAI, Anthropic Claude, Google Gemini인 경우에만 사용 가능합니다.", "PdfConvertFileUpload": "PDF 파일 선택", "PdfConvertFolderName": "저장 폴더 이름", "PdfConvertModelSelect": "모델 선택", diff --git a/locales/zh/translation.json b/locales/zh/translation.json index e19b65a3..ffc9fd0e 100644 --- a/locales/zh/translation.json +++ b/locales/zh/translation.json @@ -11,7 +11,7 @@ "YoutubeAPIKey": "YouTube API 金鑰", "YoutubeLiveID": "YouTube 直播 ID", "ConversationContinuityMode": "會話持續模式(測試版)", - "ConversationContinuityModeInfo": "這是一個在沒有評論時,AI會自行嘗試繼續會話的模式。目前僅支援 OpenAI 或 Anthropic。", + "ConversationContinuityModeInfo": "這是一個在沒有評論時,AI會自行嘗試繼續會話的模式。目前僅支援 OpenAI, Anthropic Claude, Google Gemini。", "ConversationContinuityModeInfo2": "由於一次回答可能多次調用 LLM,因此 API 使用費用可能會增加,請注意。", "ConversationContinuityModeInfo3": "在 gpt-4o、gpt-4-turbo、claude-3-opus、claude-3.5-sonnet 上能夠比較穩定地運行。", "StatusOn": "狀態:開", @@ -107,9 +107,9 @@ "ShowControlPanelInfo": "可以通过按 Cmd + . (Mac) / Ctrl + . (Windows) 来显示设置界面。", "SlideMode": "投影片模式", "SelectedSlideDocs": "使用的投影片", - "SlideModeDescription": "這是一個 AI 自動展示投影片的模式。僅在選擇的 AI 服務為 OpenAI、Anthropic 或 Google Gemini 時有效。", + "SlideModeDescription": "這是一個 AI 自動展示投影片的模式。僅在選擇的 AI 服務為 OpenAI、Anthropic Claude或 Google Gemini 時有效。", "PdfConvertLabel": "PDF 投影片轉換", - "PdfConvertDescription": "將 PDF 轉換為投影片模式數據。僅在選擇的 AI 服務為 OpenAI 時可用。", + "PdfConvertDescription": "將 PDF 轉換為投影片模式數據。僅在選擇的 AI 服務為 OpenAI、Anthropic Claude或 Google Gemini 時可用。", "PdfConvertFileUpload": "選擇 PDF 文件", "PdfConvertFolderName": "保存文件夹名称", "PdfConvertModelSelect": "选择模型", From 847d2805c076dd73338d81eba169fbf3ee5b92fa Mon Sep 17 00:00:00 2001 From: tegnike Date: Fri, 30 Aug 2024 22:09:56 +0200 Subject: [PATCH 5/8] =?UTF-8?q?=E3=81=84=E3=81=8F=E3=81=A4=E3=81=8B?= =?UTF-8?q?=E3=81=AEAI=E3=82=B5=E3=83=BC=E3=83=93=E3=82=B9=E3=82=92?= =?UTF-8?q?=E9=81=B8=E6=8A=9E=E3=81=A7=E3=81=8D=E3=82=8B=E3=82=88=E3=81=86?= =?UTF-8?q?=E3=81=AB=E4=BF=AE=E6=AD=A3?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- locales/en/translation.json | 9 +- locales/ja/translation.json | 9 +- locales/ko/translation.json | 9 +- locales/zh/translation.json | 9 +- package-lock.json | 80 ++++ package.json | 3 + src/components/settings/modelProvider.tsx | 556 +++++++++++++++++----- src/features/chat/aiChatFactory.ts | 5 + src/features/chat/handlers.ts | 20 + src/features/constants/settings.ts | 10 + src/features/stores/settings.ts | 15 + src/pages/api/aiChat.ts | 39 +- 12 files changed, 622 insertions(+), 142 deletions(-) diff --git a/locales/en/translation.json b/locales/en/translation.json index 1e3e2c01..cc101320 100644 --- a/locales/en/translation.json +++ b/locales/en/translation.json @@ -24,14 +24,19 @@ "OpenAIAPIKeyLabel": "OpenAI API Key", "AnthropicAPIKeyLabel": "Anthropic API Key", "GoogleAPIKeyLabel": "Google Gemini API Key", + "AzureAPIKeyLabel": "Azure OpenAI API Key", + "AzureAPIURL": "Azure OpenAI API URL", "GroqAPIKeyLabel": "Groq API Key", + "CohereAPIKeyLabel": "Cohere API Key", + "MistralAIAPIKeyLabel": "MistralAI API Key", + "PerplexityAPIKeyLabel": "Perplexity API Key", + "FireworksAPIKeyLabel": "Fireworks API Key", "DifyAPIKeyLabel": "Dify API Key", "APIKeyInstruction": "You can obtain the API key below. Enter the obtained API key into the form.", - "ChatGPTInfo": "ChatGPT API is accessed directly from the browser.", "LocalLLMInfo": "Local LLM server must be running. Setup is as follows.", "LocalLLMInfo2": "Please enter the URL of the local LLM server (including port number) and the model name.", "GroqInfo": "Groq API is accessed directly from the browser.", - "DifyInfo": "Dify only supports chatbot type.", + "DifyInfo": "Dify only supports chatbot and agent type.", "DifyInfo2": "The length of the conversation history is dependent on the specifications of Dify.", "DifyInfo3": "Example: http://localhost:80/v1/chat-messages", "DifyInstruction": "If you are using Dify, the system prompt will not be used. Please set Dify chatbot.", diff --git a/locales/ja/translation.json b/locales/ja/translation.json index 5e557a5f..d3cc7267 100644 --- a/locales/ja/translation.json +++ b/locales/ja/translation.json @@ -24,14 +24,19 @@ "OpenAIAPIKeyLabel": "OpenAI API キー", "AnthropicAPIKeyLabel": "Anthropic API キー", "GoogleAPIKeyLabel": "Google Gemini API キー", + "AzureAPIKeyLabel": "Azure OpenAI API キー", + "AzureAPIURL": "Azure OpenAI API URL", "GroqAPIKeyLabel": "Groq API キー", + "CohereAPIKeyLabel": "Cohere API キー", + "MistralAIAPIKeyLabel": "MistralAI API キー", + "PerplexityAPIKeyLabel": "Perplexity API キー", + "FireworksAPIKeyLabel": "Fireworks API キー", "DifyAPIKeyLabel": "Dify API キー", "APIKeyInstruction": "APIキーは下記のリンクから取得できます。取得したAPIキーをフォームに入力してください。", - "ChatGPTInfo": "ChatGPT APIはブラウザから直接アクセスしています。", "LocalLLMInfo": "ローカルLLM サーバーを起動している必要があります。", "LocalLLMInfo2": "ローカルLLMのURL(ポート番号込み)とモデル名を入力してください。", "GroqInfo": "Groq APIはブラウザから直接アクセスしています。", - "DifyInfo": "Difyでは、チャットボットタイプのみ対応しています。", + "DifyInfo": "Difyでは、チャットボット または エージェントタイプのみ対応しています。", "DifyInfo2": "会話履歴の長さはDifyチャットボットの設定に依存します。", "DifyInfo3": "例:http://localhost:80/v1/chat-messages", "DifyInstruction": "Difyを使用している場合、このシステムプロンプトは使用されません。Difyチャットボットに設定してください。", diff --git a/locales/ko/translation.json b/locales/ko/translation.json index 42344f76..9a9c26da 100644 --- a/locales/ko/translation.json +++ b/locales/ko/translation.json @@ -24,14 +24,19 @@ "OpenAIAPIKeyLabel": "OpenAI API 키", "AnthropicAPIKeyLabel": "Anthropic API 키", "GoogleAPIKeyLabel": "Google Gemini API 키", + "AzureAPIKeyLabel": "Azure OpenAI API 키", + "AzureAPIURL": "Azure OpenAI API URL", "GroqAPIKeyLabel": "Groq API 키", + "CohereAPIKeyLabel": "Cohere API 키", + "MistralAIAPIKeyLabel": "MistralAI API 키", + "PerplexityAPIKeyLabel": "Perplexity API 키", + "FireworksAPIKeyLabel": "Fireworks API 키", "DifyAPIKeyLabel": "Dify API 키", "APIKeyInstruction": "API 키는 아래 링크에서 얻을 수 있습니다. 얻은 API 키를 폼에 입력하십시오.", - "ChatGPTInfo": "ChatGPT API는 브라우저에서 직접 접근하고 있습니다.", "LocalLLMInfo": "로컬 LLM 서버를 실행해야 합니다.", "LocalLLMInfo2": "로컬 LLM의 URL(포트 번호 포함)과 모델 이름을 입력하십시오.", "GroqInfo": "Groq API는 브라우저에서 직접 접근하고 있습니다.", - "DifyInfo": "Dify에서는 채팅 봇 타입만 지원됩니다.", + "DifyInfo": "Dify에서는 채팅 봇과 에이전트 타입만 지원됩니다.", "DifyInfo2": "대화 기록의 길이는 Dify 채팅봇 설정에 따라 달라집니다.", "DifyInfo3": "예: http://localhost:80/v1/chat-messages", "DifyInstruction": "Dify를 사용하는 경우, 이 시스템 프롬프트는 사용되지 않습니다. Dify 채팅 봇에 설정하십시오.", diff --git a/locales/zh/translation.json b/locales/zh/translation.json index ffc9fd0e..f9b6643c 100644 --- a/locales/zh/translation.json +++ b/locales/zh/translation.json @@ -9,6 +9,10 @@ "YoutubeMode": "YouTube 模式", "YoutubeInfo": "YouTube 直播 ID 是直播 ID 而不是頻道 ID", "YoutubeAPIKey": "YouTube API 金鑰", + "CohereAPIKeyLabel": "Cohere API 金鑰", + "MistralAIAPIKeyLabel": "MistralAI API 金鑰", + "PerplexityAPIKeyLabel": "Perplexity API 金鑰", + "FireworksAPIKeyLabel": "Fireworks API 金鑰", "YoutubeLiveID": "YouTube 直播 ID", "ConversationContinuityMode": "會話持續模式(測試版)", "ConversationContinuityModeInfo": "這是一個在沒有評論時,AI會自行嘗試繼續會話的模式。目前僅支援 OpenAI, Anthropic Claude, Google Gemini。", @@ -24,14 +28,15 @@ "OpenAIAPIKeyLabel": "OpenAI API 金鑰", "AnthropicAPIKeyLabel": "Anthropic API 金鑰", "GoogleAPIKeyLabel": "Google Gemini API 金鑰", + "AzureAPIKeyLabel": "Azure OpenAI API 金鑰", + "AzureAPIURL": "Azure OpenAI API URL", "GroqAPIKeyLabel": "Groq API 金鑰", "DifyAPIKeyLabel": "Dify API 金鑰", "APIKeyInstruction": "您可以在下方獲取 API 金鑰。請將獲得的 API 金鑰輸入到表單中。", - "ChatGPTInfo": "ChatGPT API 直接從瀏覽器存取。", "LocalLLMInfo": "Local LLM 伺服器必須正在運行。", "LocalLLMInfo2": "Local LLM 伺服器URL 和 模型名 必須正確填寫。", "GroqInfo": "Groq API 直接從瀏覽器存取。", - "DifyInfo": "Dify 只支援聊天機器人型態。", + "DifyInfo": "Dify 只支援聊天機器人和代理型態。", "DifyInfo2": "会話履歴的長度取決於Dify聊天機器人的規格。", "DifyInfo3": "例:http://localhost:80/v1/chat-messages", "DifyInstruction": "如果您正在使用 Dify,則此系統提示將不會被使用。請將 Dify 聊天機器人設定為系統提示。", diff --git a/package-lock.json b/package-lock.json index 5cac23f7..40c86fc2 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,10 @@ "version": "0.1.0", "dependencies": { "@ai-sdk/anthropic": "^0.0.48", + "@ai-sdk/azure": "^0.0.32", + "@ai-sdk/cohere": "^0.0.22", "@ai-sdk/google": "^0.0.46", + "@ai-sdk/mistral": "^0.0.38", "@ai-sdk/openai": "^0.0.54", "@anthropic-ai/sdk": "^0.20.8", "@charcoal-ui/icons": "^2.6.0", @@ -83,6 +86,39 @@ "zod": "^3.0.0" } }, + "node_modules/@ai-sdk/azure": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/@ai-sdk/azure/-/azure-0.0.32.tgz", + "integrity": "sha512-4wYFZhKvLoa4CnydQR2anv4NQeKfX0cab/YVk2xvd0bVC5qMWI1QCiJ8+T7M9S17VFOJ3HY03M8QMtr6DD5Zww==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/openai": "0.0.54", + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.0.0" + } + }, + "node_modules/@ai-sdk/cohere": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@ai-sdk/cohere/-/cohere-0.0.22.tgz", + "integrity": "sha512-UMUOsbSf1uBOOZT76+r28DJKestluGjfHc31kgeJkQx8Pve6acgrIvH0A7mvCAO3H8TDWIO3SmOWos2+XlMBRA==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.0.0" + } + }, "node_modules/@ai-sdk/google": { "version": "0.0.46", "resolved": "https://registry.npmjs.org/@ai-sdk/google/-/google-0.0.46.tgz", @@ -100,6 +136,22 @@ "zod": "^3.0.0" } }, + "node_modules/@ai-sdk/mistral": { + "version": "0.0.38", + "resolved": "https://registry.npmjs.org/@ai-sdk/mistral/-/mistral-0.0.38.tgz", + "integrity": "sha512-eKPWcEGJzN0/NaeStWUBtQWxqiT2GBrKmZWfPxVv6EeXfAno3g3Q63xIhOvoqAW0S7Td62hjcePZ7QvzqmXSmg==", + "license": "Apache-2.0", + "dependencies": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17" + }, + "engines": { + "node": ">=18" + }, + "peerDependencies": { + "zod": "^3.0.0" + } + }, "node_modules/@ai-sdk/openai": { "version": "0.0.54", "resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-0.0.54.tgz", @@ -9383,6 +9435,25 @@ "@ai-sdk/provider-utils": "1.0.17" } }, + "@ai-sdk/azure": { + "version": "0.0.32", + "resolved": "https://registry.npmjs.org/@ai-sdk/azure/-/azure-0.0.32.tgz", + "integrity": "sha512-4wYFZhKvLoa4CnydQR2anv4NQeKfX0cab/YVk2xvd0bVC5qMWI1QCiJ8+T7M9S17VFOJ3HY03M8QMtr6DD5Zww==", + "requires": { + "@ai-sdk/openai": "0.0.54", + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17" + } + }, + "@ai-sdk/cohere": { + "version": "0.0.22", + "resolved": "https://registry.npmjs.org/@ai-sdk/cohere/-/cohere-0.0.22.tgz", + "integrity": "sha512-UMUOsbSf1uBOOZT76+r28DJKestluGjfHc31kgeJkQx8Pve6acgrIvH0A7mvCAO3H8TDWIO3SmOWos2+XlMBRA==", + "requires": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17" + } + }, "@ai-sdk/google": { "version": "0.0.46", "resolved": "https://registry.npmjs.org/@ai-sdk/google/-/google-0.0.46.tgz", @@ -9393,6 +9464,15 @@ "json-schema": "0.4.0" } }, + "@ai-sdk/mistral": { + "version": "0.0.38", + "resolved": "https://registry.npmjs.org/@ai-sdk/mistral/-/mistral-0.0.38.tgz", + "integrity": "sha512-eKPWcEGJzN0/NaeStWUBtQWxqiT2GBrKmZWfPxVv6EeXfAno3g3Q63xIhOvoqAW0S7Td62hjcePZ7QvzqmXSmg==", + "requires": { + "@ai-sdk/provider": "0.0.22", + "@ai-sdk/provider-utils": "1.0.17" + } + }, "@ai-sdk/openai": { "version": "0.0.54", "resolved": "https://registry.npmjs.org/@ai-sdk/openai/-/openai-0.0.54.tgz", diff --git a/package.json b/package.json index d6e96d42..f9bfdf27 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,10 @@ }, "dependencies": { "@ai-sdk/anthropic": "^0.0.48", + "@ai-sdk/azure": "^0.0.32", + "@ai-sdk/cohere": "^0.0.22", "@ai-sdk/google": "^0.0.46", + "@ai-sdk/mistral": "^0.0.38", "@ai-sdk/openai": "^0.0.54", "@anthropic-ai/sdk": "^0.20.8", "@charcoal-ui/icons": "^2.6.0", diff --git a/src/components/settings/modelProvider.tsx b/src/components/settings/modelProvider.tsx index b22e99aa..cbc510fa 100644 --- a/src/components/settings/modelProvider.tsx +++ b/src/components/settings/modelProvider.tsx @@ -15,7 +15,12 @@ const ModelProvider = () => { const openaiKey = settingsStore((s) => s.openaiKey) const anthropicKey = settingsStore((s) => s.anthropicKey) const googleKey = settingsStore((s) => s.googleKey) + const azureKey = settingsStore((s) => s.azureKey) const groqKey = settingsStore((s) => s.groqKey) + const cohereKey = settingsStore((s) => s.cohereKey) + const mistralaiKey = settingsStore((s) => s.mistralaiKey) + const perplexityKey = settingsStore((s) => s.perplexityKey) + const fireworksKey = settingsStore((s) => s.fireworksKey) const difyKey = settingsStore((s) => s.difyKey) const selectAIService = settingsStore((s) => s.selectAIService) @@ -33,7 +38,12 @@ const ModelProvider = () => { openai: 'gpt-4o', anthropic: 'claude-3-5-sonnet-20240620', google: 'gemini-1.5-pro', + azure: '', groq: 'gemma-7b-it', + cohere: 'command-r-plus', + mistralai: 'mistral-large-latest', + perplexity: 'llama-3-sonar-large-32k-online', + fireworks: 'accounts/fireworks/models/firefunction-v2', localLlm: '', dify: '', } @@ -77,7 +87,12 @@ const ModelProvider = () => { + + + + + @@ -86,28 +101,29 @@ const ModelProvider = () => { {(() => { if (selectAIService === 'openai') { return ( -
-
- {t('OpenAIAPIKeyLabel')} -
- - settingsStore.setState({ openaiKey: e.target.value }) - } - /> -
- {t('APIKeyInstruction')} -
- +
+
+ {t('OpenAIAPIKeyLabel')} +
+
+ {t('APIKeyInstruction')} +
+ +
+ + settingsStore.setState({ openaiKey: e.target.value }) + } />
-
{t('ChatGPTInfo')}
{t('SelectModel')} @@ -136,27 +152,29 @@ const ModelProvider = () => {
-
+ ) } else if (selectAIService === 'anthropic') { return ( -
-
- {t('AnthropicAPIKeyLabel')} -
- - settingsStore.setState({ anthropicKey: e.target.value }) - } - /> -
- {t('APIKeyInstruction')} -
- + <> +
+
+ {t('AnthropicAPIKeyLabel')} +
+
+ {t('APIKeyInstruction')} +
+ +
+ + settingsStore.setState({ anthropicKey: e.target.value }) + } + />
@@ -185,29 +203,31 @@ const ModelProvider = () => {
-
+ ) } else if (selectAIService === 'google') { return ( -
-
- {t('GoogleAPIKeyLabel')} -
- - settingsStore.setState({ googleKey: e.target.value }) - } - /> -
- {t('APIKeyInstruction')} -
- +
+
+ {t('GoogleAPIKeyLabel')} +
+
+ {t('APIKeyInstruction')} +
+ +
+ + settingsStore.setState({ googleKey: e.target.value }) + } />
@@ -240,29 +260,76 @@ const ModelProvider = () => {
-
+ ) - } else if (selectAIService === 'groq') { + } else if (selectAIService === 'azure') { return ( -
-
- {t('GroqAPIKeyLabel')} + <> +
+
+ {t('AzureAPIKeyLabel')} +
+
+ {t('APIKeyInstruction')} +
+ +
+ + settingsStore.setState({ azureKey: e.target.value }) + } + />
- - settingsStore.setState({ groqKey: e.target.value }) - } - /> -
- {t('APIKeyInstruction')} -
- +
+ {t('AzureAPIURL')} +
+
+ ex. + https://RESOURCE_NAME.openai.azure.com/openai/deployments/DEPLOYMENT_NAME/completions?api-version=2024-06-01 +
+ + settingsStore.setState({ selectAIModel: e.target.value }) + } + /> +
+ + ) + } else if (selectAIService === 'groq') { + return ( + <> +
+
+ {t('GroqAPIKeyLabel')} +
+
+ {t('APIKeyInstruction')} +
+ +
+ + settingsStore.setState({ groqKey: e.target.value }) + } />
@@ -284,69 +351,296 @@ const ModelProvider = () => {
-
+ ) - } else if (selectAIService === 'localLlm') { + } else if (selectAIService === 'cohere') { return ( -
-
- {t('LocalLLMInfo')} -
- ex. Ollama:{' '} - +
+
+ {t('CohereAPIKeyLabel')} +
+
+ {t('APIKeyInstruction')} +
+ +
+ + settingsStore.setState({ cohereKey: e.target.value }) + } />
-
- {t('LocalLLMInfo2')} -
- ex. Ollama: http://localhost:11434/v1/chat/completions +
+
+ {t('SelectModel')} +
+
-
- {t('EnterURL')} + + ) + } else if (selectAIService === 'mistralai') { + return ( + <> +
+
+ {t('MistralAIAPIKeyLabel')} +
+
+ {t('APIKeyInstruction')} +
+ +
+ + settingsStore.setState({ mistralaiKey: e.target.value }) + } + />
- - settingsStore.setState({ localLlmUrl: e.target.value }) - } - /> -
- {t('SelectModel')} +
+
+ {t('SelectModel')} +
+ +
+ + ) + } else if (selectAIService === 'perplexity') { + return ( + <> +
+
+ {t('PerplexityAPIKeyLabel')} +
+
+ {t('APIKeyInstruction')} +
+ +
+ + settingsStore.setState({ perplexityKey: e.target.value }) + } + /> +
+
+
+ {t('SelectModel')} +
+
- - settingsStore.setState({ - selectAIModel: e.target.value, - }) - } - /> -
+ + ) + } else if (selectAIService === 'fireworks') { + return ( + <> +
+
+ {t('FireworksAPIKeyLabel')} +
+
+ {t('APIKeyInstruction')} +
+ +
+ + settingsStore.setState({ fireworksKey: e.target.value }) + } + /> +
+
+
+ {t('SelectModel')} +
+ +
+ + ) + } else if (selectAIService === 'localLlm') { + return ( + <> +
+
+ {t('LocalLLMInfo')} +
+ ex. Ollama:{' '} + +
+
+ {t('LocalLLMInfo2')} +
+ ex. Ollama: http://localhost:11434/v1/chat/completions +
+
+ {t('EnterURL')} +
+ + settingsStore.setState({ localLlmUrl: e.target.value }) + } + /> +
+
+
+ {t('SelectModel')} +
+ + settingsStore.setState({ + selectAIModel: e.target.value, + }) + } + /> +
+ ) } else if (selectAIService === 'dify') { return ( -
-
{t('DifyInfo')}
-
- {t('DifyAPIKeyLabel')} + <> +
+
{t('DifyInfo')}
+
+ {t('DifyAPIKeyLabel')} +
+ + settingsStore.setState({ difyKey: e.target.value }) + } + />
- - settingsStore.setState({ difyKey: e.target.value }) - } - />
{t('EnterURL')} @@ -362,7 +656,7 @@ const ModelProvider = () => { } />
-
+ ) } })()} diff --git a/src/features/chat/aiChatFactory.ts b/src/features/chat/aiChatFactory.ts index 7bd4bc22..c37e374f 100644 --- a/src/features/chat/aiChatFactory.ts +++ b/src/features/chat/aiChatFactory.ts @@ -15,7 +15,12 @@ export async function getAIChatResponseStream( case 'openai': case 'anthropic': case 'google': + case 'azure': case 'groq': + case 'cohere': + case 'mistralai': + case 'perplexity': + case 'fireworks': return getVercelAIChatResponseStream( messages, ss[`${service}Key`] || diff --git a/src/features/chat/handlers.ts b/src/features/chat/handlers.ts index 1db47608..7e8d9dc7 100644 --- a/src/features/chat/handlers.ts +++ b/src/features/chat/handlers.ts @@ -414,10 +414,30 @@ export const handleSendChatFn = !ss.googleKey && !process.env.NEXT_PUBLIC_GOOGLE_KEY, + ss.selectAIService === 'azure' && + !ss.azureKey && + !process.env.NEXT_PUBLIC_AZURE_KEY, + ss.selectAIService === 'groq' && !ss.groqKey && !process.env.NEXT_PUBLIC_GROQ_KEY, + ss.selectAIService === 'cohere' && + !ss.cohereKey && + !process.env.NEXT_PUBLIC_COHERE_KEY, + + ss.selectAIService === 'mistralai' && + !ss.mistralaiKey && + !process.env.NEXT_PUBLIC_MISTRALAI_KEY, + + ss.selectAIService === 'perplexity' && + !ss.perplexityKey && + !process.env.NEXT_PUBLIC_PERPLEXITY_KEY, + + ss.selectAIService === 'fireworks' && + !ss.fireworksKey && + !process.env.NEXT_PUBLIC_FIREWORKS_KEY, + ss.selectAIService === 'dify' && !ss.difyKey && !process.env.NEXT_PUBLIC_DIFY_KEY, diff --git a/src/features/constants/settings.ts b/src/features/constants/settings.ts index 59242653..a9a9ab13 100644 --- a/src/features/constants/settings.ts +++ b/src/features/constants/settings.ts @@ -3,7 +3,12 @@ export type AIService = | 'anthropic' | 'google' | 'localLlm' + | 'azure' | 'groq' + | 'cohere' + | 'mistralai' + | 'perplexity' + | 'fireworks' | 'dify' export interface AIServiceConfig { @@ -11,7 +16,12 @@ export interface AIServiceConfig { anthropic: { key: string; model: string } google: { key: string; model: string } localLlm: { url: string; model: string } + azure: { key: string; model: string } groq: { key: string; model: string } + cohere: { key: string; model: string } + mistralai: { key: string; model: string } + perplexity: { key: string; model: string } + fireworks: { key: string; model: string } dify: { key: string url: string diff --git a/src/features/stores/settings.ts b/src/features/stores/settings.ts index 619e7c37..84fd1e79 100644 --- a/src/features/stores/settings.ts +++ b/src/features/stores/settings.ts @@ -21,8 +21,13 @@ interface APIKeys { openaiKey: string anthropicKey: string googleKey: string + azureKey: string groqKey: string difyKey: string + cohereKey: string + mistralaiKey: string + perplexityKey: string + fireworksKey: string koeiromapKey: string youtubeApiKey: string elevenlabsApiKey: string @@ -91,7 +96,12 @@ const settingsStore = create()( openaiKey: '', anthropicKey: '', googleKey: '', + azureKey: '', groqKey: '', + cohereKey: '', + mistralaiKey: '', + perplexityKey: '', + fireworksKey: '', difyKey: '', koeiromapKey: '', youtubeApiKey: '', @@ -150,7 +160,12 @@ const settingsStore = create()( openaiKey: state.openaiKey, anthropicKey: state.anthropicKey, googleKey: state.googleKey, + azureKey: state.azureKey, groqKey: state.groqKey, + cohereKey: state.cohereKey, + mistralaiKey: state.mistralaiKey, + perplexityKey: state.perplexityKey, + fireworksKey: state.fireworksKey, difyKey: state.difyKey, koeiromapKey: state.koeiromapKey, youtubeApiKey: state.youtubeApiKey, diff --git a/src/pages/api/aiChat.ts b/src/pages/api/aiChat.ts index 477469d8..06bc3fe9 100644 --- a/src/pages/api/aiChat.ts +++ b/src/pages/api/aiChat.ts @@ -1,10 +1,22 @@ import { createOpenAI } from '@ai-sdk/openai' import { createAnthropic } from '@ai-sdk/anthropic' import { createGoogleGenerativeAI } from '@ai-sdk/google' +import { createCohere } from '@ai-sdk/cohere' +import { createMistral } from '@ai-sdk/mistral' +import { createAzure } from '@ai-sdk/azure' import { streamText, generateText } from 'ai' import { NextRequest } from 'next/server' -type AIServiceKey = 'openai' | 'anthropic' | 'google' | 'groq' +type AIServiceKey = + | 'openai' + | 'anthropic' + | 'google' + | 'azure' + | 'groq' + | 'cohere' + | 'mistralai' + | 'perplexity' + | 'fireworks' type AIServiceConfig = Record any> // Allow streaming responses up to 30 seconds @@ -27,8 +39,23 @@ export default async function handler(req: NextRequest) { openai: () => createOpenAI({ apiKey }), anthropic: () => createAnthropic({ apiKey }), google: () => createGoogleGenerativeAI({ apiKey }), + azure: () => + createAzure({ + resourceName: + model.match(/https:\/\/(.+?)\.openai\.azure\.com/)?.[1] || '', + apiKey, + }), groq: () => createOpenAI({ baseURL: 'https://api.groq.com/openai/v1', apiKey }), + cohere: () => createCohere({ apiKey }), + mistralai: () => createMistral({ apiKey }), + perplexity: () => + createOpenAI({ baseURL: 'https://api.perplexity.ai/', apiKey }), + fireworks: () => + createOpenAI({ + baseURL: 'https://api.fireworks.ai/inference/v1', + apiKey, + }), } const aiServiceInstance = aiServiceConfig[aiService as AIServiceKey] @@ -40,16 +67,22 @@ export default async function handler(req: NextRequest) { } const instance = aiServiceInstance() - const modifiedMessages = modifyMessages(aiService, messages) + let modifiedModel = model + if (aiService === 'azure') { + modifiedModel = + model.match(/\/deployments\/(.+?)\/completions/)?.[1] || model + } if (stream) { try { const result = await streamText({ - model: instance(model), + model: instance(modifiedModel), messages: modifiedMessages, }) + console.log(result) + return result.toDataStreamResponse() } catch (error) { console.error('Error in OpenAI API call:', error) From e98159ead22620364d04c6e8b64538704e3c77ce Mon Sep 17 00:00:00 2001 From: tegnike Date: Fri, 30 Aug 2024 22:56:37 +0200 Subject: [PATCH 6/8] =?UTF-8?q?=E3=82=B3=E3=83=BC=E3=83=89=E3=83=96?= =?UTF-8?q?=E3=83=AD=E3=83=83=E3=82=AF=E3=81=8C=E6=AD=A3=E3=81=97=E3=81=8F?= =?UTF-8?q?=E9=80=81=E4=BF=A1=E3=81=95=E3=82=8C=E3=82=8B=E3=82=88=E3=81=86?= =?UTF-8?q?=E3=81=AB=E3=83=AA=E3=83=95=E3=82=A1=E3=82=AF=E3=82=BF=E3=83=AA?= =?UTF-8?q?=E3=83=B3=E3=82=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/features/chat/vercelAIChat.ts | 11 ++++++---- src/pages/api/aiChat.ts | 35 +++++++++++++++++++++---------- 2 files changed, 31 insertions(+), 15 deletions(-) diff --git a/src/features/chat/vercelAIChat.ts b/src/features/chat/vercelAIChat.ts index 087d9d16..8b2c3c44 100644 --- a/src/features/chat/vercelAIChat.ts +++ b/src/features/chat/vercelAIChat.ts @@ -77,19 +77,22 @@ export async function getVercelAIChatResponseStream( const reader = response.body.getReader() const decoder = new TextDecoder('utf-8') + let buffer = '' try { while (true) { const { done, value } = await reader.read() if (done) break - const chunk = decoder.decode(value) - const lines = chunk.split('\n') + buffer += decoder.decode(value, { stream: true }) + const lines = buffer.split('\n') + buffer = lines.pop() || '' for (const line of lines) { if (line.startsWith('0:')) { - const content = line.substring(2).trim().replace(/^"|"$/g, '') - controller.enqueue(content) + const content = line.substring(2).trim() + const decodedContent = JSON.parse(content) + controller.enqueue(decodedContent) } } } diff --git a/src/pages/api/aiChat.ts b/src/pages/api/aiChat.ts index 06bc3fe9..a90bcefc 100644 --- a/src/pages/api/aiChat.ts +++ b/src/pages/api/aiChat.ts @@ -1,3 +1,4 @@ +import { Message } from '@/features/messages/messages' import { createOpenAI } from '@ai-sdk/openai' import { createAnthropic } from '@ai-sdk/anthropic' import { createGoogleGenerativeAI } from '@ai-sdk/google' @@ -101,15 +102,15 @@ export default async function handler(req: NextRequest) { } } -function modifyMessages(aiService: string, messages: any[]) { - if (aiService === 'anthropic') { +function modifyMessages(aiService: string, messages: Message[]) { + if (aiService === 'anthropic' || aiService === 'perplexity') { return modifyAnthropicMessages(messages) } return messages } // Anthropicのメッセージを修正する -function modifyAnthropicMessages(messages: any[]) { +function modifyAnthropicMessages(messages: Message[]) { const systemMessage = messages.find((message) => message.role === 'system') let userMessages = messages .filter((message) => message.role !== 'system') @@ -125,23 +126,35 @@ function modifyAnthropicMessages(messages: any[]) { } // 同じroleのメッセージを結合する -function consolidateMessages(messages: any[]) { - const consolidated: any[] = [] +function consolidateMessages(messages: Message[]) { + const consolidated: Message[] = [] let lastRole: string | null = null - let combinedContent = '' + let combinedContent: + | string + | [ + { + type: 'text' + text: string + }, + { + type: 'image' + image: string + }, + ] messages.forEach((message, index) => { if (message.role === lastRole) { - combinedContent += '\n' + message.content + if (typeof combinedContent === 'string') { + combinedContent += '\n' + message.content + } else { + combinedContent[0].text += '\n' + message.content + } } else { if (lastRole !== null) { consolidated.push({ role: lastRole, content: combinedContent }) } lastRole = message.role - combinedContent = - typeof message.content === 'string' - ? message.content - : message.content[0].text + combinedContent = message.content } if (index === messages.length - 1) { From d663dbb83bcad2e4a0b1eb0b3f750978879a60a1 Mon Sep 17 00:00:00 2001 From: tegnike Date: Sat, 31 Aug 2024 18:25:13 +0200 Subject: [PATCH 7/8] =?UTF-8?q?=E3=82=B9=E3=83=A9=E3=82=A4=E3=83=89?= =?UTF-8?q?=E3=83=A2=E3=83=BC=E3=83=89=E3=81=AE=E4=B8=8D=E5=85=B7=E5=90=88?= =?UTF-8?q?=E8=AA=BF=E6=95=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/components/settings/slideConvert.tsx | 21 ++- src/pages/api/aiChat.ts | 21 ++- src/pages/api/convertSlide.ts | 201 ++++++++++++++++++----- 3 files changed, 194 insertions(+), 49 deletions(-) diff --git a/src/components/settings/slideConvert.tsx b/src/components/settings/slideConvert.tsx index a7d58ff4..cce6caa5 100644 --- a/src/components/settings/slideConvert.tsx +++ b/src/components/settings/slideConvert.tsx @@ -1,4 +1,4 @@ -import React, { useState } from 'react' +import React, { useState, useEffect } from 'react' import { useTranslation } from 'react-i18next' import settingsStore, { multiModalAIServiceKey, @@ -17,7 +17,24 @@ const SlideConvert: React.FC = ({ onFolderUpdate }) => { const aiService = settingsStore.getState() .selectAIService as multiModalAIServiceKey - const [model, setModel] = useState('gpt-4o') + const [model, setModel] = useState('') + + useEffect(() => { + switch (aiService) { + case 'openai': + setModel('gpt-4o') + break + case 'anthropic': + setModel('claude-3-5-sonnet-20240620') + break + case 'google': + setModel('gemini-1.5-flash-latest') + break + default: + setModel('') + } + }, [aiService]) + const [isLoading, setIsLoading] = useState(false) const selectLanguage = settingsStore.getState().selectLanguage const [selectedFileName, setSelectedFileName] = useState('') diff --git a/src/pages/api/aiChat.ts b/src/pages/api/aiChat.ts index a90bcefc..64f2a10b 100644 --- a/src/pages/api/aiChat.ts +++ b/src/pages/api/aiChat.ts @@ -5,7 +5,7 @@ import { createGoogleGenerativeAI } from '@ai-sdk/google' import { createCohere } from '@ai-sdk/cohere' import { createMistral } from '@ai-sdk/mistral' import { createAzure } from '@ai-sdk/azure' -import { streamText, generateText } from 'ai' +import { streamText, generateText, CoreMessage } from 'ai' import { NextRequest } from 'next/server' type AIServiceKey = @@ -68,7 +68,7 @@ export default async function handler(req: NextRequest) { } const instance = aiServiceInstance() - const modifiedMessages = modifyMessages(aiService, messages) + const modifiedMessages: Message[] = modifyMessages(aiService, messages) let modifiedModel = model if (aiService === 'azure') { modifiedModel = @@ -79,7 +79,7 @@ export default async function handler(req: NextRequest) { try { const result = await streamText({ model: instance(modifiedModel), - messages: modifiedMessages, + messages: modifiedMessages as CoreMessage[], }) console.log(result) @@ -95,14 +95,14 @@ export default async function handler(req: NextRequest) { } else { const result = await generateText({ model: instance(model), - messages: modifiedMessages, + messages: modifiedMessages as CoreMessage[], }) return result } } -function modifyMessages(aiService: string, messages: Message[]) { +function modifyMessages(aiService: string, messages: Message[]): Message[] { if (aiService === 'anthropic' || aiService === 'perplexity') { return modifyAnthropicMessages(messages) } @@ -110,8 +110,10 @@ function modifyMessages(aiService: string, messages: Message[]) { } // Anthropicのメッセージを修正する -function modifyAnthropicMessages(messages: Message[]) { - const systemMessage = messages.find((message) => message.role === 'system') +function modifyAnthropicMessages(messages: Message[]): Message[] { + const systemMessage: Message | undefined = messages.find( + (message) => message.role === 'system' + ) let userMessages = messages .filter((message) => message.role !== 'system') .filter((message) => message.content !== '') @@ -122,7 +124,10 @@ function modifyAnthropicMessages(messages: Message[]) { userMessages.shift() } - return [systemMessage, ...userMessages] + const result: Message[] = systemMessage + ? [systemMessage, ...userMessages] + : userMessages + return result } // 同じroleのメッセージを結合する diff --git a/src/pages/api/convertSlide.ts b/src/pages/api/convertSlide.ts index 4b38271d..5097538e 100644 --- a/src/pages/api/convertSlide.ts +++ b/src/pages/api/convertSlide.ts @@ -4,11 +4,11 @@ import fs from 'fs' import path from 'path' import { createCanvas } from 'canvas' import * as pdfjsLib from 'pdfjs-dist/legacy/build/pdf.mjs' - import { createOpenAI } from '@ai-sdk/openai' import { createAnthropic } from '@ai-sdk/anthropic' import { createGoogleGenerativeAI } from '@ai-sdk/google' import { generateObject } from 'ai' +import { z } from 'zod' import { multiModalAIServiceKey } from '@/features/stores/settings' @@ -20,11 +20,17 @@ export const config = { }, } +export const schema = z.object({ + line: z.string(), + notes: z.string(), + page: z.number().optional(), +}) + const systemPrompt = `You are a presentation expert. Given an image of a slide, please create a script that is easy to understand for first-time listeners. Please follow these constraints: - No need for opening and closing greetings - Create the script in {{language}} - - If the language is Japanese, use katakana only for parts that use the alphabet + - If the language is Japanese, use hiragana or katakana instead of alphabet words - Do not include line breaks - The script for each slide should be no longer than about 60 seconds of speech - The output format should follow the JSON format below. Do not provide any response other than JSON @@ -36,6 +42,72 @@ const systemPrompt = `You are a presentation expert. Given an image of a slide, - Do not include any links in either the "line" or "notes" fields ` +const systemPromptForAnthropic = `You are an AI assistant tasked with creating a presentation script based on an image of a slide. Your goal is to produce a script that is easy to understand for first-time listeners, along with supporting notes, all in a specific JSON format. Follow these instructions carefully: + +1. You must follow the below constraints: + You must create the script in {{language}} + You will be provided with an image of a presentation slide that you will analyze to create the script. + +2. Begin by carefully analyzing the slide image. Look for: + - The main title or topic + - Key points or bullet points + - Any graphs, charts, or visual elements + - Important numbers or statistics + - Overall theme or message of the slide + +3. Create a script based on the slide content that: + - Is easy to understand for first-time listeners + - Can be spoken in approximately 60 seconds + - Does not include opening or closing greetings + - Explains the main points of the slide clearly and concisely + - Describes any visual elements if they are crucial to understanding the content + +4. If the specified language is Japanese, use hiragana or katakana instead of alphabet words + +5. Prepare additional notes that couldn't be included in the main script due to time constraints or complexity. These notes should provide extra context, explanations, or examples that support the main script. + +6. Format your response strictly as a JSON object with two fields: + - "line": Contains the main script (string) + - "notes": Contains the supporting notes (string) + +7. Ensure that: + - The script does not include line breaks + - Neither the "line" nor "notes" fields contain any links + - The entire response is valid JSON + +Here's an example of a good response format: + +{ + "line": "This slide showcases our company's revenue growth over the past five years. We've seen a steady increase from $1 million in 2018 to $5 million in 2022, representing a 400% growth. The graph clearly illustrates this upward trend, with the steepest rise occurring between 2020 and 2021.", + "notes": "Key factors contributing to growth: 1. Launch of new product line in 2020. 2. Expansion into international markets in 2021. 3. Improved customer retention strategies. Consider discussing challenges faced during COVID-19 pandemic and how they were overcome." +} + + +Here's an example of a bad response format: + +{ + "line": "Hello everyone! Today we'll be discussing our company's revenue growth. + + As you can see from the slide, our revenue has increased significantly over the past five years. + + In 2018, we started at $1 million, and by 2022, we reached $5 million. That's an impressive 400% growth! + + The graph clearly shows this upward trend. Notice how the line gets steeper between 2020 and 2021? That was a particularly good year for us. + + Thank you for your attention!", + "notes": "For more information, visit our website at www.ourcompany.com" +} + + +Remember: +- Do not include any content outside of the JSON structure +- Ensure the script ("line") can be spoken in about 60 seconds +- Do not include links in either field +- Avoid line breaks in the script +- Focus on creating clear, concise content that first-time listeners can easily understand + +Now, analyze the provided slide image and create the appropriate JSON response.` + async function convertPdfToImages(pdfBuffer: Buffer): Promise { // PDFファイルを読み込む const pdfData = new Uint8Array(pdfBuffer) @@ -102,30 +174,66 @@ async function createSlideLine( const instance = aiServiceInstance() - const response = await generateObject({ - model: instance(model), - messages: [ - { - role: 'system', - content: `${systemPrompt.replace('{{language}}', selectLanguage)}\n${additionalPrompt}`, - }, - { - role: 'user', - content: [ + let response: any + try { + if (aiService == 'anthropic') { + response = await generateObject({ + model: instance(model), + messages: [ { - type: 'text', - text: '', + role: 'system', + content: `${systemPromptForAnthropic.replace('{{language}}', selectLanguage)}\n${additionalPrompt}`, }, { - type: 'image', - image: `${imageBase64}`, + role: 'user', + content: [ + { + type: 'text', + text: 'Describe the image in detail.', + }, + { + type: 'image', + image: `${imageBase64}`, + }, + ], }, ], - }, - ], - output: 'no-schema', - mode: 'json', - }) + schema: schema, + }) + } else { + response = await generateObject({ + model: instance(model), + messages: [ + { + role: 'system', + content: `${systemPrompt.replace('{{language}}', selectLanguage)}\n${additionalPrompt}`, + }, + { + role: 'user', + content: [ + { + type: 'text', + text: 'Describe the image in detail.', + }, + { + type: 'image', + image: `${imageBase64}`, + }, + ], + }, + ], + output: 'no-schema', + mode: 'json', + }) + } + } catch (error) { + console.error('AI service request error:', error) + throw new Error(`Failed to request AI service: ${error}`) + } + + if (!response || !response.object) { + throw new Error('Invalid response from AI service') + } return response.object as unknown as SlideLineResponse } @@ -135,7 +243,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) { form.parse(req, async (err, fields, files) => { if (err) { - res.status(500).send('Form parse error') + res.status(500).json({ error: 'Form parse error' }) return } @@ -183,21 +291,30 @@ async function handler(req: NextApiRequest, res: NextApiResponse) { for (let i = 0; i < images.length; i++) { const imgBase64 = images[i] if (aiService && apiKey && model) { - const slideLine = await createSlideLine( - imgBase64, - apiKey, - aiService, - model, - language, - previousResult - ) - slideLine.page = i // ページ番号を追加 - scriptList.push(slideLine) - console.log(`=== slideLine ${i} ===`) - console.log(slideLine.line) - previousResult = slideLine.line + try { + const slideLine = await createSlideLine( + imgBase64, + apiKey, + aiService, + model, + language, + previousResult + ) + slideLine.page = i // ページ番号を追加 + scriptList.push(slideLine) + console.log(`=== slideLine ${i} ===`) + console.log(slideLine.line) + previousResult = slideLine.line + } catch (error) { + console.error(`Error processing slide ${i}:`, error) + res.status(500).json({ error: `Error processing slide ${i}` }) + return + } } else { - throw new Error('API Key and Model must not be undefined') + res + .status(500) + .json({ error: 'API Key and Model must not be undefined' }) + return } // Markdownコンテンツの形成 @@ -207,10 +324,16 @@ async function handler(req: NextApiRequest, res: NextApiResponse) { console.log('end convert') // MarkdownファイルとJSONファイルを保存 - fs.writeFileSync(markdownPath, markdownContent) - fs.writeFileSync(jsonPath, JSON.stringify(scriptList, null, 2)) + try { + fs.writeFileSync(markdownPath, markdownContent) + fs.writeFileSync(jsonPath, JSON.stringify(scriptList, null, 2)) + } catch (error) { + console.error('Error occurred while saving files:', error) + res.status(500).json({ error: `Failed to save files: ${error}` }) + return + } - res.status(200).json({ message: 'PDFが変換されました' }) + res.status(200).json({ message: 'PDF has been converted' }) }) } From a7b180666cb6717cd98163833b52e9cd4af49685 Mon Sep 17 00:00:00 2001 From: tegnike Date: Sat, 31 Aug 2024 19:08:23 +0200 Subject: [PATCH 8/8] =?UTF-8?q?Youtube=E3=83=A2=E3=83=BC=E3=83=89=E3=81=AE?= =?UTF-8?q?=E4=B8=8D=E5=85=B7=E5=90=88=E8=AA=BF=E6=95=B4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/components/settings/youtube.tsx | 3 +++ src/features/chat/handlers.ts | 2 -- src/features/chat/vercelAIChat.ts | 2 +- src/features/slide/slideAIHelpers.ts | 2 +- src/features/youtube/conversationContinuityFunctions.ts | 8 ++++---- src/pages/api/aiChat.ts | 7 ++++--- 6 files changed, 13 insertions(+), 11 deletions(-) diff --git a/src/components/settings/youtube.tsx b/src/components/settings/youtube.tsx index 87b817f7..fc058198 100644 --- a/src/components/settings/youtube.tsx +++ b/src/components/settings/youtube.tsx @@ -3,6 +3,7 @@ import { useTranslation } from 'react-i18next' import homeStore from '@/features/stores/home' import menuStore from '@/features/stores/menu' import settingsStore from '@/features/stores/settings' +import slideStore from '@/features/stores/slide' import { TextButton } from '../textButton' import { multiModalAIServices } from '@/features/stores/settings' @@ -26,6 +27,8 @@ const YouTube = () => { if (youtubeMode) { homeStore.setState({ modalImage: '' }) menuStore.setState({ showWebcam: false }) + settingsStore.setState({ slideMode: false }) + slideStore.setState({ isPlaying: false }) } else { settingsStore.setState({ youtubePlaying: false }) } diff --git a/src/features/chat/handlers.ts b/src/features/chat/handlers.ts index 7e8d9dc7..04628851 100644 --- a/src/features/chat/handlers.ts +++ b/src/features/chat/handlers.ts @@ -168,8 +168,6 @@ export const processAIResponse = async ( try { while (true) { const { done, value } = await reader.read() - console.log(done) - console.log(value) if (done && receivedMessage.length === 0) break if (value) receivedMessage += value diff --git a/src/features/chat/vercelAIChat.ts b/src/features/chat/vercelAIChat.ts index 8b2c3c44..c00e26f5 100644 --- a/src/features/chat/vercelAIChat.ts +++ b/src/features/chat/vercelAIChat.ts @@ -34,7 +34,7 @@ export async function getVercelAIChatResponse( } const data = await response.json() - return { message: data.message } + return { text: data.text } } catch (error) { console.error(`Error fetching ${aiService} API response:`, error) throw error diff --git a/src/features/slide/slideAIHelpers.ts b/src/features/slide/slideAIHelpers.ts index fd18e746..1d41e877 100644 --- a/src/features/slide/slideAIHelpers.ts +++ b/src/features/slide/slideAIHelpers.ts @@ -70,5 +70,5 @@ Based on the user's comment and the content of both the script document and supp apiKey, ss.selectAIModel ) - return response.message + return response.text } diff --git a/src/features/youtube/conversationContinuityFunctions.ts b/src/features/youtube/conversationContinuityFunctions.ts index 6ab1fee8..388d6119 100644 --- a/src/features/youtube/conversationContinuityFunctions.ts +++ b/src/features/youtube/conversationContinuityFunctions.ts @@ -174,7 +174,7 @@ ${lastTenMessages} const response = await fetchAIResponse(queryMessages) - return response.message + return response.text } /** @@ -226,7 +226,7 @@ export const getAnotherTopic = async (messages: Message[]): Promise => { const response = await fetchAIResponse(queryMessages) - return response.message + return response.text } /** @@ -334,8 +334,8 @@ B: 見てみたいな。送ってくれない? let answer try { const response = await fetchAIResponse(queryMessages) - console.log('response.message:', response.message) - const responseJson = JSON.parse(response.message) + console.log('response.message:', response.text) + const responseJson = JSON.parse(response.text) answer = responseJson.answer answer = answer.toString() } catch (error) { diff --git a/src/pages/api/aiChat.ts b/src/pages/api/aiChat.ts index 64f2a10b..9d942534 100644 --- a/src/pages/api/aiChat.ts +++ b/src/pages/api/aiChat.ts @@ -82,8 +82,6 @@ export default async function handler(req: NextRequest) { messages: modifiedMessages as CoreMessage[], }) - console.log(result) - return result.toDataStreamResponse() } catch (error) { console.error('Error in OpenAI API call:', error) @@ -98,7 +96,10 @@ export default async function handler(req: NextRequest) { messages: modifiedMessages as CoreMessage[], }) - return result + return new Response(JSON.stringify({ text: result.text }), { + status: 200, + headers: { 'Content-Type': 'application/json' }, + }) } }