From 130fc1b65de861ea80ce550fe635e4ac349796c8 Mon Sep 17 00:00:00 2001 From: moqsien Date: Tue, 24 Oct 2023 12:44:10 +0800 Subject: [PATCH] update --- go.mod | 2 +- go.sum | 6 ++++ pkgs/gpt/conversation.go | 4 +++ pkgs/tui/ui.go | 2 +- pkgs/tui/ui_conf.go | 56 ++++++++++++++++++++++--------------- pkgs/tui/ui_conversation.go | 14 ++++++++-- pkgs/tui/ui_help.go | 3 +- pkgs/tui/ui_tabs.go | 4 +-- 8 files changed, 61 insertions(+), 30 deletions(-) diff --git a/go.mod b/go.mod index b1e7262..cdfe2de 100644 --- a/go.mod +++ b/go.mod @@ -9,7 +9,7 @@ require ( github.com/charmbracelet/glamour v0.6.0 github.com/charmbracelet/lipgloss v0.8.0 github.com/gogf/gf v1.16.9 - github.com/moqsien/goutils v0.6.6 + github.com/moqsien/goutils v0.7.0 github.com/muesli/reflow v0.3.0 github.com/pkoukk/tiktoken-go v0.1.6 github.com/postfinance/single v0.0.2 diff --git a/go.sum b/go.sum index 5c7d13b..f2918ea 100644 --- a/go.sum +++ b/go.sum @@ -323,6 +323,12 @@ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lN github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/moqsien/goutils v0.6.6 h1:iolFn1t5a5KUZY/uPMEvy13sW8Y64qrpvQvFAtw021E= github.com/moqsien/goutils v0.6.6/go.mod h1:FjA34cXS8BkoEX/eECIfA3cjA3a4YJdbz4CemZfrvmE= +github.com/moqsien/goutils v0.6.8 h1:kDYKrQXTJfFXVAF1H7Tdjs7AVX9Me947Ie2SoHKS6Ms= +github.com/moqsien/goutils v0.6.8/go.mod h1:FjA34cXS8BkoEX/eECIfA3cjA3a4YJdbz4CemZfrvmE= +github.com/moqsien/goutils v0.6.9 h1:Bv2MSmyhI1lzuhPRIsP1hUDrGmDLI5KfVHxLDPVk1r4= +github.com/moqsien/goutils v0.6.9/go.mod h1:FjA34cXS8BkoEX/eECIfA3cjA3a4YJdbz4CemZfrvmE= +github.com/moqsien/goutils v0.7.0 h1:Tuv/oRaUwYbAW2q105qcd+INQ97ssX3MSwSkDgMDX3c= +github.com/moqsien/goutils v0.7.0/go.mod h1:FjA34cXS8BkoEX/eECIfA3cjA3a4YJdbz4CemZfrvmE= github.com/moqsien/xtractr v0.0.2 h1:u/1YpZxY042OMeF0YE5niFKnF3Vrouaa84jaczztZ/g= github.com/moqsien/xtractr v0.0.2/go.mod h1:ASDHTK1TKxtwLesJeH9wc3OnKH7v5FPZGPJlyvZleVo= github.com/muesli/ansi v0.0.0-20211018074035-2e021307bc4b h1:1XF24mVaiu7u+CFywTdcDo2ie1pzzhwjt6RHqzpMU34= diff --git a/pkgs/gpt/conversation.go b/pkgs/gpt/conversation.go index c918e5e..5994dc5 100644 --- a/pkgs/gpt/conversation.go +++ b/pkgs/gpt/conversation.go @@ -188,3 +188,7 @@ func (that *Conversation) Load() { } } } + +func (that *Conversation) ClearCurrentAnswer() { + that.Current.A = "" +} diff --git a/pkgs/tui/ui.go b/pkgs/tui/ui.go index 68a917e..f19f82c 100644 --- a/pkgs/tui/ui.go +++ b/pkgs/tui/ui.go @@ -59,7 +59,7 @@ func (that *GPTUI) AddHelpInfo() { func (that *GPTUI) Run() { if that.Program == nil { - that.Program = tea.NewProgram(that.GVM, tea.WithAltScreen(), tea.WithMouseCellMotion()) + that.Program = tea.NewProgram(that.GVM, tea.WithAltScreen()) } if _, err := that.Program.Run(); err != nil { gprint.PrintError("%+v", err) diff --git a/pkgs/tui/ui_conf.go b/pkgs/tui/ui_conf.go index 2467324..c6f4787 100644 --- a/pkgs/tui/ui_conf.go +++ b/pkgs/tui/ui_conf.go @@ -6,6 +6,7 @@ import ( "strings" tea "github.com/charmbracelet/bubbletea" + "github.com/charmbracelet/lipgloss" "github.com/gogf/gf/util/gconv" "github.com/moqsien/gogpt/pkgs/config" "github.com/moqsien/gogpt/pkgs/gpt" @@ -31,7 +32,7 @@ var ( gptModel string = "select_model" apiKey string = "apiKey" proxy string = "proxy" - apiType string = "apiType" + apiType string = "select_APIType" apiVersion string = "apiVersion" orgID string = "orgID" engine string = "engine" @@ -45,7 +46,21 @@ var ( func GetGoGPTConfigModel(prompt *gpt.GPTPrompt) ExtraModel { mi := input.NewInputMultiModel() - mi.AddOneInput(baseUrl, input.MWithPlaceholder("base_url"), input.MWithWidth(150)) + mi.SetInputPromptFormat("%-15s") + placeHolderStyle := input.MWithPlaceholderStyle(lipgloss.NewStyle().Foreground(lipgloss.Color("#BEBEBE"))) + mi.AddOneInput(apiKey, input.MWithPlaceholder("ChatGPT auth token"), input.MWithWidth(100), placeHolderStyle) + mi.AddOneInput(proxy, input.MWithPlaceholder("Local proxy"), input.MWithWidth(150), placeHolderStyle) + mi.AddOneInput(ctxLen, input.MWithPlaceholder("Conversation context length"), input.MWithWidth(100), placeHolderStyle) + + // Select ChatGPT API type + gptApiTypeList := []string{ + string(openai.APITypeOpenAI), + string(openai.APITypeAzure), + string(openai.APITypeAzureAD), + } + mi.AddOneOption(apiType, gptApiTypeList, input.MWithPlaceholder("ChatGPT Api Type."), input.MWithWidth(100), placeHolderStyle) + + // Select ChatGPT Model gptModelList := []string{ openai.GPT3Dot5Turbo0613, openai.GPT3Dot5Turbo, @@ -68,38 +83,33 @@ func GetGoGPTConfigModel(prompt *gpt.GPTPrompt) ExtraModel { openai.GPT3Babbage, openai.GPT3Babbage002, } - mi.AddOneOption(gptModel, gptModelList, input.MWithPlaceholder("gpt_model"), input.MWithWidth(100)) + mi.AddOneOption(gptModel, gptModelList, input.MWithPlaceholder("ChatGPT Model."), input.MWithWidth(100), placeHolderStyle) + + // Select ChatGPT Prompt gptPromptList := []gutils.IComparable{} for _, item := range *prompt.PromptList { gptPromptList = append(gptPromptList, PromptString(item.Title)) } gutils.QuickSort(gptPromptList, 0, len(gptPromptList)-1) - pList := []string{} for _, p := range gptPromptList { pStr := p.(PromptString) pList = append(pList, string(pStr)) } - mi.AddOneOption(gptPrompt, pList, input.MWithPlaceholder("gpt_prompt"), input.MWithWidth(100)) - mi.AddOneInput(gptPromptValue, input.MWithPlaceholder("enter_gpt_prompt"), input.MWithWidth(100)) - - mi.AddOneInput(apiKey, input.MWithPlaceholder("api_key"), input.MWithWidth(100)) - mi.AddOneInput(proxy, input.MWithPlaceholder("proxy"), input.MWithWidth(150)) - mi.AddOneInput(apiVersion, input.MWithPlaceholder("api_version"), input.MWithWidth(100)) - - gptApiTypeList := []string{ - string(openai.APITypeOpenAI), - string(openai.APITypeAzure), - string(openai.APITypeAzureAD), - } - mi.AddOneOption(apiType, gptApiTypeList, input.MWithPlaceholder("gpt_api_type"), input.MWithWidth(100)) + mi.AddOneOption(gptPrompt, pList, input.MWithPlaceholder("gpt_prompt"), input.MWithWidth(100), placeHolderStyle) + // Enter you own ChatGPT Prompt + mi.AddOneInput(gptPromptValue, input.MWithPlaceholder("Enter your own prompt info instead of a selection from above."), input.MWithWidth(100), placeHolderStyle) + // Some configs + mi.AddOneInput(limit, input.MWithPlaceholder("Max empty message limit. Int."), input.MWithWidth(100), placeHolderStyle) + mi.AddOneInput(maxTokens, input.MWithPlaceholder("Max tokens. Int."), input.MWithWidth(100), placeHolderStyle) + mi.AddOneInput(temperature, input.MWithPlaceholder("Temperautue. Float."), input.MWithWidth(100), placeHolderStyle) - mi.AddOneInput(orgID, input.MWithPlaceholder("org_id"), input.MWithWidth(100)) - mi.AddOneInput(engine, input.MWithPlaceholder("engine"), input.MWithWidth(100)) - mi.AddOneInput(limit, input.MWithPlaceholder("empty_message_limit"), input.MWithWidth(100)) - mi.AddOneInput(maxTokens, input.MWithPlaceholder("max_tokens"), input.MWithWidth(100)) - mi.AddOneInput(ctxLen, input.MWithPlaceholder("context_length"), input.MWithWidth(100)) - mi.AddOneInput(temperature, input.MWithPlaceholder("temperature"), input.MWithWidth(100)) + // Custom baseUrl + mi.AddOneInput(baseUrl, input.MWithPlaceholder("defaul:https://api.openai.com/v1"), input.MWithWidth(150), placeHolderStyle) + // For AzureGPT + mi.AddOneInput(apiVersion, input.MWithPlaceholder("API version."), input.MWithWidth(100), placeHolderStyle) + mi.AddOneInput(orgID, input.MWithPlaceholder("Organization ID."), input.MWithWidth(100), placeHolderStyle) + mi.AddOneInput(engine, input.MWithPlaceholder("Engine."), input.MWithWidth(100), placeHolderStyle) return mi } diff --git a/pkgs/tui/ui_conversation.go b/pkgs/tui/ui_conversation.go index 4bceefb..b9d48a5 100644 --- a/pkgs/tui/ui_conversation.go +++ b/pkgs/tui/ui_conversation.go @@ -103,10 +103,13 @@ func (that *ConversationModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { return msg }) } + that.Conversation.AddAnswer(answerStr, !that.Receiving) if err != nil && err != io.EOF { that.Error = err + // clear errored answer, continue to Q&A + that.Conversation.ClearCurrentAnswer() + that.Receiving = false } - that.Conversation.AddAnswer(answerStr, !that.Receiving) that.Viewport.SetContent(that.RenderQA(*that.Conversation.Current)) that.Viewport.GotoBottom() } @@ -155,6 +158,13 @@ func (that *ConversationModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { that.Error = err } that.Conversation.AddAnswer(answerStr, !that.Receiving) + + if err != nil && err != io.EOF { + that.Error = err + // clear errored answer, continue to Q&A + that.Conversation.ClearCurrentAnswer() + that.Receiving = false + } if that.Conversation.Current != nil { that.Viewport.SetContent(that.RenderQA(*that.Conversation.Current)) that.Viewport.GotoBottom() @@ -183,7 +193,7 @@ var ( senderStyle = lipgloss.NewStyle().Bold(true).Foreground(lipgloss.Color("5")) botStyle = lipgloss.NewStyle().Bold(true).Foreground(lipgloss.Color("6")) errorStyle = lipgloss.NewStyle().Bold(true).Foreground(lipgloss.Color("#FF0000")) - footerStyle = lipgloss.NewStyle().Height(1).Foreground(lipgloss.Color("#FFA500")).Faint(true) + footerStyle = lipgloss.NewStyle().Height(1).Foreground(lipgloss.Color("#00FFFF")).Faint(true) ) func (that *ConversationModel) RenderQA(qa gpt.QuesAnsw) string { diff --git a/pkgs/tui/ui_help.go b/pkgs/tui/ui_help.go index cce0ed7..4df6830 100644 --- a/pkgs/tui/ui_help.go +++ b/pkgs/tui/ui_help.go @@ -28,7 +28,7 @@ func (that *HelpModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) { } func (that *HelpModel) View() string { - pattern := "%-10s %s" + pattern := "%-12s %s" helpList := []string{ fmt.Sprintf(pattern, "enter", "Submit your message to gpt."), fmt.Sprintf(pattern, "↑", "Scroll up."), @@ -37,6 +37,7 @@ func (that *HelpModel) View() string { fmt.Sprintf(pattern, "ctrl+f", "Show the next QA."), fmt.Sprintf(pattern, "ctrl+s", "Save conversation."), fmt.Sprintf(pattern, "ctrl+l", "Load conversation."), + fmt.Sprintf(pattern, "ctrl+c/esc", "Exit."), fmt.Sprintf(pattern, "→", "Switch to the next Tab."), fmt.Sprintf(pattern, "←", "Switch to the previous Tab."), } diff --git a/pkgs/tui/ui_tabs.go b/pkgs/tui/ui_tabs.go index 36d0eca..9d0ce06 100644 --- a/pkgs/tui/ui_tabs.go +++ b/pkgs/tui/ui_tabs.go @@ -98,9 +98,9 @@ func (that *GPTViewModel) View() string { var style lipgloss.Style for i, t := range that.TabList { if i == that.ActiveTab { - style = lipgloss.NewStyle().Foreground(lipgloss.Color("229")) + style = lipgloss.NewStyle().Foreground(lipgloss.Color("#FFFF00")) } else { - style = lipgloss.NewStyle().Foreground(lipgloss.Color("57")) + style = lipgloss.NewStyle().Foreground(lipgloss.Color("#D2691E")) } newTabs = append(newTabs, style.Render(t.Title)) }