Skip to content

Commit

Permalink
Persistance
Browse files Browse the repository at this point in the history
  • Loading branch information
edofic committed Mar 30, 2023
1 parent d5601cb commit a49c179
Showing 1 changed file with 46 additions and 14 deletions.
60 changes: 46 additions & 14 deletions main.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package main

import (
"context"
"encoding/json"
"errors"
"flag"
"fmt"
Expand All @@ -18,6 +19,7 @@ func main() {
systemMsg := flag.String("systemMsg", "", "System message to include with the prompt")
includeFile := flag.String("includeFile", "", "File to include with the prompt")
temperature := flag.Float64("temperature", 0, "ChatGPT temperature")
continueSession := flag.Bool("c", false, "Continue last session (ignores other flags)")
flag.Usage = func() {
fmt.Fprintf(os.Stderr, "Usage: %s [options] message\n", os.Args[0])
flag.PrintDefaults()
Expand All @@ -30,24 +32,39 @@ func main() {
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
defer cancel()

msgs := []openai.ChatCompletionMessage{}
if *systemMsg != "" {
msgs = append(msgs, openai.ChatCompletionMessage{Role: openai.ChatMessageRoleSystem, Content: *systemMsg})
var req openai.ChatCompletionRequest
if *continueSession {
session, err := os.ReadFile("/tmp/openai-cli-last-session.json")
if err != nil {
panic(err)
}
err = json.Unmarshal(session, &req)
if err != nil {
panic(err)
}
} else {
msgs := []openai.ChatCompletionMessage{}
if *systemMsg != "" {
msgs = append(msgs, openai.ChatCompletionMessage{Role: openai.ChatMessageRoleSystem, Content: *systemMsg})
}
msgs = append(msgs, openai.ChatCompletionMessage{Role: openai.ChatMessageRoleUser, Content: msg})
req = openai.ChatCompletionRequest{
Model: openai.GPT3Dot5Turbo,
MaxTokens: *maxTokens,
Temperature: float32(*temperature),
Stream: true,
Messages: msgs,
}
}
msgs = append(msgs, openai.ChatCompletionMessage{Role: openai.ChatMessageRoleUser, Content: msg})
if *includeFile != "" {
contents, err := os.ReadFile(*includeFile)
if err != nil {
panic(err)
}
msgs = append(msgs, openai.ChatCompletionMessage{Role: openai.ChatMessageRoleUser, Content: string(contents)})
}
req := openai.ChatCompletionRequest{
Model: openai.GPT3Dot5Turbo,
MaxTokens: *maxTokens,
Temperature: float32(*temperature),
Stream: true,
Messages: msgs,
req.Messages = append(
req.Messages,
openai.ChatCompletionMessage{Role: openai.ChatMessageRoleUser, Content: string(contents)},
)
}
stream, err := client.CreateChatCompletionStream(ctx, req)
if err != nil {
Expand All @@ -56,18 +73,33 @@ func main() {
}
defer stream.Close()

responseChunks := []string{}
for {
response, err := stream.Recv()
if errors.Is(err, io.EOF) {
fmt.Println()
return
break
}

if err != nil {
fmt.Printf("\nStream error: %v\n", err)
return
}

fmt.Printf(response.Choices[0].Delta.Content)
chunk := response.Choices[0].Delta.Content
fmt.Print(chunk)
responseChunks = append(responseChunks, chunk)
}

fullResponse := strings.Join(responseChunks, "")
req.Messages = append(req.Messages, openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleAssistant,
Content: fullResponse,
})

resJson, err := json.Marshal(req)
if err != nil {
panic(err)
}
os.WriteFile("/tmp/openai-cli-last-session.json", resJson, 0644)
}

0 comments on commit a49c179

Please sign in to comment.