diff --git a/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java b/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java index e4479ff3..5595c10b 100644 --- a/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java +++ b/api/src/main/java/com/theokanning/openai/completion/chat/ChatCompletionRequest.java @@ -27,6 +27,14 @@ public class ChatCompletionRequest { */ List messages; + /** + * Must be either 'text' or 'json_object'.
+ * When specifying 'json_object' as the request format it's still necessary to instruct the model to return JSON. + * You may use {@link ChatResponseFormat.ResponseFormat} enum. + */ + @JsonProperty("response_format") + ChatResponseFormat responseFormat; + /** * What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower * values like 0.2 will make it more focused and deterministic.
diff --git a/api/src/main/java/com/theokanning/openai/completion/chat/ChatResponseFormat.java b/api/src/main/java/com/theokanning/openai/completion/chat/ChatResponseFormat.java new file mode 100644 index 00000000..8e498b3f --- /dev/null +++ b/api/src/main/java/com/theokanning/openai/completion/chat/ChatResponseFormat.java @@ -0,0 +1,30 @@ +package com.theokanning.openai.completion.chat; + +import com.fasterxml.jackson.annotation.JsonValue; +import lombok.Builder; +import lombok.Data; + +/** + * see {@link ChatCompletionRequest} documentation. + */ +@Data +@Builder +public class ChatResponseFormat { + private ResponseFormat type; + + public enum ResponseFormat { + TEXT("text"), + JSON("json_object"); + + private final String value; + + ResponseFormat(final String value) { + this.value = value; + } + + @JsonValue + public String value() { + return value; + } + } +} \ No newline at end of file diff --git a/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java b/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java index 25f0defb..cd05751b 100644 --- a/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java +++ b/service/src/test/java/com/theokanning/openai/service/ChatCompletionTest.java @@ -2,7 +2,9 @@ import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyDescription; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import com.theokanning.openai.completion.chat.*; import org.junit.jupiter.api.Test; @@ -84,6 +86,35 @@ void streamChatCompletion() { assertNotNull(chunks.get(0).getChoices().get(0)); } + @Test + void createChatCompletionWithJsonMode() { + final List messages = new ArrayList<>(); + final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You will generate a random name and return it in JSON format."); + messages.add(systemMessage); + + ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest + .builder() + .model("gpt-3.5-turbo-1106") + .messages(messages) + .responseFormat(ChatResponseFormat.builder().type(ChatResponseFormat.ResponseFormat.JSON).build()) + .maxTokens(50) + .logitBias(new HashMap<>()) + .build(); + + ChatCompletionChoice choice = service.createChatCompletion(chatCompletionRequest).getChoices().get(0); + assertTrue(isValidJson(choice.getMessage().getContent()), "Response is not valid JSON"); + } + + private boolean isValidJson(String jsonString) { + ObjectMapper objectMapper = new ObjectMapper(); + try { + objectMapper.readTree(jsonString); + return true; + } catch (JsonProcessingException e) { + return false; + } + } + @Test void createChatCompletionWithFunctions() { final List functions = Collections.singletonList(ChatFunction.builder()