diff --git a/src/App.vue b/src/App.vue
index 2b1b6fc022..e796f59ddb 100644
--- a/src/App.vue
+++ b/src/App.vue
@@ -91,6 +91,7 @@
+
{
+ const messages = store.getters.currentChat.messages || [];
+ return messages.filter(v => v.type === 'response').some(v => !v.done);
+});
const columns = computed(() => store.state.columns);
const changeColumns = (columns) => store.commit("changeColumns", columns);
diff --git a/src/bots/Bot.js b/src/bots/Bot.js
index fafa069d5f..d9a3e7fc6a 100644
--- a/src/bots/Bot.js
+++ b/src/bots/Bot.js
@@ -153,7 +153,13 @@ export default class Bot {
async _sendPrompt(prompt, onUpdateResponse, callbackParam) {
throw new Error(i18n.global.t("bot.notImplemented"));
}
- /* eslint-enable no-unused-vars */
+
+ async _stopGenerating() {
+ }
+
+ stopGenerating() {
+ this._stopGenerating();
+ }
async sendPrompt(prompt, onUpdateResponse, callbackParam) {
// If not logged in, handle the error
diff --git a/src/bots/LangChainBot.js b/src/bots/LangChainBot.js
index b1078de4fa..9c2c4000e8 100644
--- a/src/bots/LangChainBot.js
+++ b/src/bots/LangChainBot.js
@@ -1,56 +1,166 @@
import Bot from "@/bots/Bot";
-import { HumanMessage, AIMessage, SystemMessage } from "langchain/schema";
+import store from "@/store";
+import { SSE } from 'sse.js';
export default class LangChainBot extends Bot {
static _brandId = "langChainBot";
static _chatModel = undefined; // ChatModel instance
constructor() {
- super();
+ super();
+ this.source = null;
}
+ async _sendPrompt(prompt, onUpdateResponse, callbackParam) {
+ let messages = await this.getChatContext();
+ // Remove old messages if exceeding the pastRounds limit
+ while (messages.length > store.state.openaiApi.pastRounds * 2) {
+ messages.shift();
+ }
- async _sendPrompt(prompt, onUpdateResponse, callbackParam) {
- let messages = await this.getChatContext();
- // Remove old messages if exceeding the pastRounds limit
- while (messages.length > this.getPastRounds() * 2) {
- messages.shift();
+ // Send the prompt to the OpenAI API
+ try {
+ const headers = {
+ 'Content-Type': 'application/json',
+ Authorization: `Bearer ${store.state.openaiApi.apiKey}`,
+ };
+
+ messages.push({ role: 'user', content: `‘${prompt}’` });
+ const payload = JSON.stringify({
+ model: this.constructor._model,
+ messages: messages,
+ temperature: store.state.openaiApi.temperature,
+ stream: true,
+ });
+
+ const requestConfig = {
+ headers,
+ method: 'POST',
+ payload,
+ };
+
+ let res = '';
+ return new Promise((resolve, reject) => {
+ // call OpenAI API
+ const apiUrl
+ = store.state.openaiApi.alterUrl
+ || 'https://api.openai.com/v1/chat/completions';
+ this.source = new SSE(apiUrl, requestConfig);
+ this.source.addEventListener('message', event => {
+ const regex = /^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d{6}$/;
+ if (event.data === '[DONE]') {
+ onUpdateResponse(callbackParam, { done: true });
+ messages.push({ role: 'assistant', content: res });
+ this.setChatContext(messages);
+ this.source.close();
+ resolve();
+ }
+ else if (regex.test(event.data)) {
+ // Ignore the timestamp
+ return;
+ }
+ else {
+ if (event?.source?.chunk?.startsWith('{')) {
+ const { code, msg, error } = JSON.parse(event?.source?.chunk || '{}');
+ if (error && error?.message) {
+ this.source.close();
+ reject(error?.message);
+ return;
+ }
+ if (code >= 400) {
+ this.source.close();
+ reject(`${code}: ${msg}`);
+ return;
+ }
+ }
+ try {
+ const data = JSON.parse(event.data);
+ const partialText = data.choices?.[0]?.delta?.content;
+ if (partialText) {
+ res += partialText;
+ onUpdateResponse(callbackParam, { content: res, done: false });
+ }
+ }
+ catch (e) {
+ this.source.close();
+ reject(e);
+ }
+ }
+ });
+ this.source.addEventListener('error', error => {
+ try {
+ const data = (() => {
+ if (error?.data) {
+ return error?.data?.startsWith('{')
+ ? JSON.parse(error.data) : typeof error?.data === 'object'
+ ? JSON.stringify(error.data) : error.data;
+ }
+ return error;
+ })();
+ this.source.close();
+ reject(data?.error?.message || data?.error?.msg || data?.data || data || '');
+ }
+ catch (e) {
+ this.source.close();
+ console.error(e);
+ reject(e);
+ }
+
+ });
+ this.source.stream();
+ });
+ }
+ catch (error) {
+ console.error('Error sending prompt to OpenAIAPI:', error);
+ throw error;
+ }
}
- // Deserialize the messages and convert them to the correct format
- messages = messages.map((item) => {
- let storedMessage = JSON.parse(item); // Deserialize
- if (storedMessage.type === "human") {
- return new HumanMessage(storedMessage.data);
- } else if (storedMessage.type === "ai") {
- return new AIMessage(storedMessage.data);
- } else if (storedMessage.type === "system") {
- return new SystemMessage(storedMessage.data);
- }
- });
+ _stopGenerating() {
+ this.source && this.source.close();
+ }
- // Add the prompt to the messages
- messages.push(new HumanMessage(prompt));
+// async _sendPrompt(prompt, onUpdateResponse, callbackParam) {
+// let messages = await this.getChatContext();
+// // Remove old messages if exceeding the pastRounds limit
+// while (messages.length > this.getPastRounds() * 2) {
+// messages.shift();
+// }
- let res = "";
- const model = this.constructor._chatModel;
- const callbacks = [
- {
- handleLLMNewToken(token) {
- res += token;
- onUpdateResponse(callbackParam, { content: res, done: false });
- },
- handleLLMEnd() {
- onUpdateResponse(callbackParam, { done: true });
- },
- },
- ];
- model.callbacks = callbacks;
- await model.call(messages);
- messages.push(new AIMessage(res));
- // Serialize the messages before storing
- messages = messages.map((item) => JSON.stringify(item.toDict()));
- this.setChatContext(messages);
- }
+// // Deserialize the messages and convert them to the correct format
+// messages = messages.map((item) => {
+// let storedMessage = JSON.parse(item); // Deserialize
+// if (storedMessage.type === "human") {
+// return new HumanMessage(storedMessage.data);
+// } else if (storedMessage.type === "ai") {
+// return new AIMessage(storedMessage.data);
+// } else if (storedMessage.type === "system") {
+// return new SystemMessage(storedMessage.data);
+// }
+// });
+
+// // Add the prompt to the messages
+// messages.push(new HumanMessage(prompt));
+
+// let res = "";
+// const model = this.constructor._chatModel;
+// const callbacks = [
+// {
+// handleLLMNewToken(token) {
+// res += token;
+// onUpdateResponse(callbackParam, { content: res, done: false });
+// },
+// handleLLMEnd() {
+// onUpdateResponse(callbackParam, { done: true });
+// },
+// },
+// ];
+// model.callbacks = callbacks;
+// await model.call(messages);
+// messages.push(new AIMessage(res));
+// // Serialize the messages before storing
+// messages = messages.map((item) => JSON.stringify(item.toDict()));
+// this.setChatContext(messages);
+// }
async createChatContext() {
return [];
diff --git a/src/components/Footer/FooterBar.vue b/src/components/Footer/FooterBar.vue
index b0df5a34fe..ce9ce15c07 100644
--- a/src/components/Footer/FooterBar.vue
+++ b/src/components/Footer/FooterBar.vue
@@ -33,10 +33,7 @@
{{ $t("footer.sendPrompt") }}
@@ -121,6 +118,12 @@ const favBots = computed(() => {
});
const prompt = ref("");
+const isSendDisabled = computed(() => {
+ const messages = store.getters.currentChat.messages || [];
+ return prompt.value.trim() === ''
+ || favBots.value.filter(favBot => activeBots[favBot.classname]).length === 0
+ || messages.filter(v => v.type === 'response').some(v => !v.done);
+});
const clickedBot = ref(null);
const isMakeAvailableOpen = ref(false);
@@ -184,7 +187,9 @@ function filterEnterKey(event) {
!event.metaKey
) {
event.preventDefault();
- sendPromptToBots();
+ if (!isSendDisabled.value) {
+ sendPromptToBots();
+ }
}
}
diff --git a/src/components/StopGenerating/StopGenerating.vue b/src/components/StopGenerating/StopGenerating.vue
new file mode 100644
index 0000000000..4844086b78
--- /dev/null
+++ b/src/components/StopGenerating/StopGenerating.vue
@@ -0,0 +1,55 @@
+
+
+ {{ $t("footer.stopGenerating") }}
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/i18n/locales/zh.json b/src/i18n/locales/zh.json
index b068c17098..bfe3087b8a 100644
--- a/src/i18n/locales/zh.json
+++ b/src/i18n/locales/zh.json
@@ -26,6 +26,7 @@
},
"footer": {
"chooseFavorite": "选择你喜欢的 AI",
+ "stopGenerating": "停止生成",
"sendPrompt": "发送到:",
"promptPlaceholder": "输入消息。(Shift+Enter 换行)"
},
diff --git a/src/store/index.js b/src/store/index.js
index f78d174570..91c4029a8c 100644
--- a/src/store/index.js
+++ b/src/store/index.js
@@ -311,6 +311,17 @@ export default createStore({
},
},
actions: {
+ stopGenerating({ state, }, { bots }) {
+ bots?.map(v => v?.stopGenerating());
+ const currentChat = state.chats[state.currentChatIndex];
+ if (currentChat.messages) {
+ currentChat.messages.forEach(
+ item => {
+ item.done = true;
+ }
+ );
+ }
+ },
sendPrompt({ commit, state, dispatch }, { prompt, bots, promptIndex }) {
isThrottle = false;
const currentChat = state.chats[state.currentChatIndex];