diff --git a/.github/workflows/go.yml b/.github/workflows/go.yml new file mode 100644 index 0000000..66f978c --- /dev/null +++ b/.github/workflows/go.yml @@ -0,0 +1,29 @@ +name: Go +on: [push] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Setup Go + uses: actions/setup-go@v4 + with: + go-version: '1.21' + - name: Install dependencies + run: go get . + - name: Test with the Go CLI + run: go test -v ./... + - name: Update coverage report + uses: ncruces/go-coverage-report@v0 + with: + report: true + chart: true + amend: true + reuse-go: true + if: | + matrix.os == 'ubuntu-latest' && + github.event_name == 'push' && + github.ref_name == github.event.repository.default_branch + continue-on-error: true + diff --git a/README.md b/README.md index 5767691..85ef03f 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +[![Go Coverage](https://github.com/jaffee/aicli/wiki/coverage.svg)](https://raw.githack.com/wiki/jaffee/aicli/coverage.html) + # aicli aicli is a command line interface for AI chatbots. Currently only OpenAI is supported. Think of it like ChatGPT, but in your terminal instead of in the browser. You need an OpenAI API key to use it. diff --git a/pkg/aicli/cmd.go b/pkg/aicli/cmd.go index fcbcb4b..e0697ba 100644 --- a/pkg/aicli/cmd.go +++ b/pkg/aicli/cmd.go @@ -43,6 +43,8 @@ func NewCmd(client AI) *Cmd { stdin: os.Stdin, stdout: os.Stdout, stderr: os.Stderr, + + client: client, } } @@ -56,10 +58,9 @@ func (cmd *Cmd) Run() error { } rl, err := readline.NewEx(&readline.Config{ - Prompt: "> ", - HistoryFile: cmd.getHistoryFilePath(), - HistoryLimit: 1000000, - ForceUseInteractive: true, // seems to be needed for testing + Prompt: "> ", + HistoryFile: cmd.getHistoryFilePath(), + HistoryLimit: 1000000, Stdin: cmd.stdin, Stdout: cmd.stdout, diff --git a/pkg/aicli/cmd_test.go b/pkg/aicli/cmd_test.go index 1c48079..a114e50 100644 --- a/pkg/aicli/cmd_test.go +++ b/pkg/aicli/cmd_test.go @@ -27,9 +27,21 @@ func TestCmd(t *testing.T) { runErr = cmd.Run() close(done) }() - time.Sleep(time.Millisecond * 10) require.NoError(t, runErr) - expect(t, stdout, []byte{0x20, 0x08, 0x1b, 0x5b, 0x36, 0x6e, 0x3e, 0x20}) + // expect(t, stdout, []byte{0x20, 0x08, 0x1b, 0x5b, 0x36, 0x6e, 0x3e, 0x20}) + stdinw.Write([]byte("blah\n")) + require.NoError(t, runErr) + expect(t, stdout, []byte("msgs: 1, role: assistant, content: blah\n")) + stdinw.Write([]byte("bleh\n")) + require.NoError(t, runErr) + expect(t, stdout, []byte("msgs: 3, role: assistant, content: bleh\n")) + stdinw.Write([]byte("\\messages\n")) + expect(t, stdout, []byte(" user: blah\nassistant: msgs: 1, role: assistant, content: blah\n user: bleh\nassistant: msgs: 3, role: assistant, content: bleh\n")) + stdinw.Write([]byte("\\reset\n")) + require.NoError(t, runErr) + stdinw.Write([]byte("\\config\n")) + expect(t, stderr, []byte("OpenAI_API_Key: length=4\nOpenAIModel: gpt-3.5-turbo\nTemperature: 0.700000\nVerbose: false\n")) + stdinw.Close() select { @@ -43,11 +55,23 @@ func TestCmd(t *testing.T) { func expect(t *testing.T, r io.Reader, exp []byte) { t.Helper() - buffer := make([]byte, len(exp)) - - n, err := r.Read(buffer) - if err != nil && err.Error() != "EOF" { - require.NoError(t, err) + buffer := make([]byte, len(exp)*20) + i := 0 + var n int + var err error + for { + i++ + n, err = r.Read(buffer) + if err != nil && err.Error() != "EOF" { + require.NoError(t, err) + } + if n > 0 { + break + } + if i > 100 { + t.Fatal("spent too long waiting for output") + } + time.Sleep(time.Millisecond) } require.Equal(t, exp, buffer[:n])