Skip to content

Commit

Permalink
Include response tests in serialization errors
Browse files Browse the repository at this point in the history
  • Loading branch information
raulraja committed Jun 5, 2024
1 parent ca1f4ae commit b6f0d6b
Show file tree
Hide file tree
Showing 4 changed files with 54 additions and 1 deletion.
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
package com.xebia.functional.xef.errors

import com.xebia.functional.openai.generated.model.CreateChatCompletionRequestModel
import com.xebia.functional.xef.AI
import com.xebia.functional.xef.Config
import com.xebia.functional.xef.OpenAI
import com.xebia.functional.xef.prompt.Prompt
import com.xebia.functional.xef.prompt.PromptBuilder.Companion.user
import com.xebia.functional.xef.prompt.configuration.PromptConfiguration
import io.kotest.core.spec.style.StringSpec
import io.kotest.matchers.string.shouldContain

class SerializationErrorTests :
StringSpec({
val config =
Config(
token = "<bad-token>",
)
val openAI = OpenAI(config)
val chat = openAI.chat
val model = CreateChatCompletionRequestModel.gpt_3_5_turbo

"serialization errors should include response" {
try {
val prompt =
Prompt(
model = model,
configuration = PromptConfiguration.invoke { maxDeserializationAttempts = 1 }
) {
+user("Hello, how are you?")
}
AI<String>(prompt = prompt, api = chat)
} catch (e: Exception) {
e.message.shouldContain("Incorrect API key")
}
}
})
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
package com.xebia.functional.openai.errors

class ResponseSerializerError(message: String, cause: Throwable? = null) :
Exception(message, cause)
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
package com.xebia.functional.openai.errors

import io.ktor.client.call.*
import io.ktor.client.statement.*

suspend inline fun <reified A> HttpResponse.serializeOrThrowWithResponseInfo(): A =
try {
this.body() ?: throw ResponseSerializerError("Response body is null")
} catch (e: Exception) {
throw ResponseSerializerError("Failed to deserialize response body:\n${bodyAsText()}", e)
}
3 changes: 2 additions & 1 deletion openai-client/generator/config/api.mustache
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import com.xebia.functional.openai.UploadFile
import com.xebia.functional.openai.appendGen
import com.xebia.functional.openai.generated.api.{{classname}}.*
import com.xebia.functional.openai.streamEvents
import com.xebia.functional.openai.errors.serializeOrThrowWithResponseInfo
import io.ktor.client.HttpClient
import io.ktor.client.call.body
import io.ktor.client.plugins.timeout
Expand Down Expand Up @@ -135,7 +136,7 @@ fun {{classname}}(client: HttpClient, config: Config): {{apiPackage}}.{{classnam
io.ktor.client.utils.EmptyContent
{{/hasFormParams}}
{{/hasBodyParam}})
}{{#returnProperty}}{{^isFile}}.body(){{/isFile}}{{/returnProperty}}
}{{#returnProperty}}{{^isFile}}.serializeOrThrowWithResponseInfo(){{/isFile}}{{/returnProperty}}

{{#vendorExtensions.x-streaming}}
override fun {{operationId}}Stream({{#allParams}}{{{paramName}}}: {{#isEnum}}{{# isContainer}}kotlin.collections.List<{{enumName}}{{operationIdCamelCase}}>{{/isContainer}}{{^isContainer}}{{enumName}}{{operationIdCamelCase}}{{/isContainer}}{{/isEnum}}{{^isEnum}}{{{dataType}}}{{/isEnum}}{{#required}}{{#defaultValue}} = {{^isNumber}}{{#isEnum}}{{enumName}}{{operationIdCamelCase}}.{{enumDefaultValue}}{{/isEnum}}{{^isEnum}}{{{defaultValue}}}{{/isEnum}}{{/isNumber}}{{#isNumber}}{{{defaultValue}}}.toDouble(){{/isNumber}}{{/defaultValue}}{{/required}}{{^required}}?{{#defaultValue}} = {{^isNumber}}{{#isEnum}}{{enumName}}{{operationIdCamelCase}}.{{enumDefaultValue}}{{/isEnum}}{{^isEnum}}{{{defaultValue}}}{{/isEnum}}{{/isNumber}}{{#isNumber}}{{{defaultValue}}}.toDouble(){{/isNumber}}{{/defaultValue}}{{^defaultValue}} = null{{/defaultValue}}{{/required}}, {{/allParams}}configure: HttpRequestBuilder.() -> Unit): Flow<{{{vendorExtensions.x-streaming-return}}}> = flow {
Expand Down

0 comments on commit b6f0d6b

Please sign in to comment.