From 4223529651b64b2ba409c4835a50779b5bf65528 Mon Sep 17 00:00:00 2001 From: ellvix Date: Tue, 27 Feb 2024 23:26:17 -0700 Subject: [PATCH] feat: add heading levels to multi ai responses for better AT navigation (#418) Fixes #414 --- src/js/constants.js | 37 +++++++++++++++++++++++++++++-------- 1 file changed, 29 insertions(+), 8 deletions(-) diff --git a/src/js/constants.js b/src/js/constants.js index db0476b7..5df71f00 100644 --- a/src/js/constants.js +++ b/src/js/constants.js @@ -272,7 +272,7 @@ class Resources { empty: 'Empty', openai: 'OpenAI Vision', gemini: 'Gemini Pro Vision', - multi: 'Multiple', + multi: 'Multiple AI', }, }, }; @@ -873,9 +873,10 @@ class Menu { */ class ChatLLM { constructor() { + this.firstTime = true; + this.firstMulti = true; this.CreateComponent(); this.SetEvents(); - this.firstTime = true; } /** @@ -1060,6 +1061,8 @@ class ChatLLM { } let img = null; + this.firstMulti = true; + if (constants.LLMOpenAiMulti || constants.LLMModel == 'openai') { if (firsttime) { img = await this.ConvertSVGtoJPG(singleMaidr.id, 'openai'); @@ -1131,7 +1134,7 @@ class ChatLLM { this.requestJson.messages[i].content = text; if (data.error) { - chatLLM.DisplayChatMessage(LLMName, 'Error processing request.'); + chatLLM.DisplayChatMessage(LLMName, 'Error processing request.', true); } else { chatLLM.DisplayChatMessage(LLMName, text); } @@ -1145,7 +1148,7 @@ class ChatLLM { } } if (data.error) { - chatLLM.DisplayChatMessage(LLMName, 'Error processing request.'); + chatLLM.DisplayChatMessage(LLMName, 'Error processing request.', true); } else { // todo: display actual response } @@ -1240,7 +1243,7 @@ class ChatLLM { .catch((error) => { chatLLM.WaitingSound(false); console.error('Error:', error); - chatLLM.DisplayChatMessage('LLM', 'Error processing request.'); + chatLLM.DisplayChatMessage(LLMName, 'Error processing request.', true); // also todo: handle errors somehow }); } @@ -1342,15 +1345,33 @@ class ChatLLM { * @memberof module:constants * @returns {void} */ - DisplayChatMessage(user = 'User', text = '') { + DisplayChatMessage(user = 'User', text = '', isSystem = false) { + let hLevel = 'h3'; + if (!isSystem && constants.LLMModel == 'multi' && user != 'User') { + if (this.firstMulti) { + let multiAIName = resources.GetString('multi'); + let titleHtml = ` +
+

${multiAIName} Responses

+
+ `; + this.RenderChatMessage(titleHtml); + this.firstMulti = false; + } + hLevel = 'h4'; + } let html = `
-

${user}

+ <${hLevel} class="chatLLM_message_user">${user}

${text}

`; + + this.RenderChatMessage(html); + } + RenderChatMessage(html) { document .getElementById('chatLLM_chat_history') .insertAdjacentHTML('beforeend', html); @@ -1424,7 +1445,7 @@ class ChatLLM { // get name from resource let LLMName = resources.GetString(constants.LLMModel); this.firstTime = false; - this.DisplayChatMessage(LLMName, 'Processing Chart...'); + this.DisplayChatMessage(LLMName, 'Processing Chart...', true); let defaultPrompt = this.GetDefaultPrompt(); this.Submit(defaultPrompt, true); }