Skip to content

Commit

Permalink
Merge pull request #28 from stefanjwojcik/main
Browse files Browse the repository at this point in the history
Add ability to swap different chat models
  • Loading branch information
ThatcherC authored Oct 4, 2023
2 parents 7bcb8fd + 52b4148 commit 387647e
Show file tree
Hide file tree
Showing 3 changed files with 60 additions and 3 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,4 @@
Manifest.toml

settings.json
LocalPreferences.toml
16 changes: 13 additions & 3 deletions src/ReplGPT.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ using Preferences

include("formatting.jl")
include("keys.jl")
include("models.jl")

"""
function ReplGPT.generate_empty_conversation()
Expand Down Expand Up @@ -114,15 +115,24 @@ end

function call_chatgpt(s)
key = getAPIkey()
model = getmodelname()
if !ismissing(key)
userMessage = Dict("role" => "user", "content" => s)
push!(conversation, userMessage)

r = OpenAI.create_chat(key, "gpt-3.5-turbo", conversation)
r = OpenAI.create_chat(key, model, conversation)

# TODO: check for errors!
#if !=(r.status, 200)
# @test false
while !=(r.status, 200)
format("ChatGPT is busy! Do you want to try again? y/n")
userreply = readline()
if userreply == "y"
r = OpenAI.create_chat(key, model, conversation)
else
format("ChatGPT is busy! Do you want to try again?")
break
end
end
#end
response = r.response["choices"][begin]["message"]["content"]

Expand Down
44 changes: 44 additions & 0 deletions src/models.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
const api_model_name = "OPENAI_API_MODEL"
const api_pref_model_name = "openai_api_model"

"""
function getmodelname()
Returns an OpenAI API model name to use from either the `LocalPreferences.toml` file or the
`OPENAI_API_MODEL` environment variable. If neither is present, returns `gpt-3.5-turbo`.
"""
function getmodelname()
model = "gpt-3.5-turbo"

# try to load model from Preferences:
model = @load_preference(api_pref_model_name, "gpt-3.5-turbo")

# if not koaded from preferences, look in environment variables
if model == "gpt-3.5-turbo" && haskey(ENV, api_model_name)
model = ENV[api_model_name]
end

return model
end

"""
function setmodelname(model::String)
Sets the OpenAI API model for ReplGPT to use. The model will be saved as plaintext to your environment's
`LocalPreferences.toml` file (perhaps somewhere like `~/.julia/environments/v1.8/LocalPreferences.toml`).
The model can be deleted with `ReplGPT.clearmodelname()`.
"""
function setmodelname(model::String)
@set_preferences!(api_pref_model_name => model)
end

"""
function clearmodelname()
Deletes the OpenAI API model saved in `LocalPreferences.toml` if present.
See also: ReplGPT.setmodelname(model::String)
"""
function clearmodelname()
@delete_preferences!(api_pref_model_name)
end

0 comments on commit 387647e

Please sign in to comment.