Skip to content

Commit

Permalink
1.2.18
Browse files Browse the repository at this point in the history
  • Loading branch information
acharneski committed Nov 8, 2023
1 parent 604bc27 commit 33fb0d8
Show file tree
Hide file tree
Showing 5 changed files with 2 additions and 6 deletions.
4 changes: 2 additions & 2 deletions build.gradle.kts
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ repositories {
val kotlin_version = "1.7.22"
val jetty_version = "11.0.15"
val slf4j_version = "2.0.9"
val skyenet_version = "1.0.20"
val skyenet_version = "1.0.21"
dependencies {

implementation(group = "com.simiacryptus", name = "joe-penai", version = "1.0.23")
implementation(group = "com.simiacryptus", name = "joe-penai", version = "1.0.24")

implementation(group = "com.simiacryptus.skyenet", name = "util", version = skyenet_version)
implementation(group = "com.simiacryptus.skyenet", name = "core", version = skyenet_version)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@ class AnalogueFileAction extends FileContextAction<AnalogueFileAction.Settings>
def chatRequest = new ChatRequest()
def model = AppSettingsState.instance.defaultChatModel()
chatRequest.model = model.modelName
chatRequest.max_tokens = model.maxTokens
chatRequest.temperature = AppSettingsState.instance.temperature
chatRequest.messages = [
new ChatMessage(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,6 @@ class CreateFileAction extends FileContextAction<CreateFileAction.Settings> {
def chatRequest = new ChatRequest()
def model = AppSettingsState.instance.defaultChatModel()
chatRequest.model = model.modelName
chatRequest.max_tokens = model.maxTokens
chatRequest.temperature = AppSettingsState.instance.temperature
chatRequest.messages = [
//language=TEXT
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@ rootMessageTrail =
val chatRequest = OpenAIClient.ChatRequest()
val model = AppSettingsState.instance.defaultChatModel()
chatRequest.model = model.modelName
chatRequest.max_tokens = model.maxTokens
chatRequest.temperature = AppSettingsState.instance.temperature
chatRequest.messages = messages.toTypedArray()
return chatRequest
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ class AppSettingsState : PersistentStateComponent<SimpleEnvelope> {
val chatRequest = ChatRequest()
chatRequest.model = model.modelName
chatRequest.temperature = temperature
chatRequest.max_tokens = model.maxTokens
return chatRequest
}

Expand Down

0 comments on commit 33fb0d8

Please sign in to comment.