Skip to content
This repository has been archived by the owner on Jun 6, 2024. It is now read-only.

Add response_format to allow use of new JSON mode #449

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,14 @@ public class ChatCompletionRequest {
*/
List<ChatMessage> messages;

/**
* Must be either 'text' or 'json_object'. <br>
* When specifying 'json_object' as the request format it's still necessary to instruct the model to return JSON.
* You may use {@link ChatResponseFormat.ResponseFormat} enum.
*/
@JsonProperty("response_format")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think you can just put the class ChatResponseFormat within the request class

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I tried to follow the format that was used for other attributes.

ChatResponseFormat responseFormat;

/**
* What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower
* values like 0.2 will make it more focused and deterministic.<br>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package com.theokanning.openai.completion.chat;

import com.fasterxml.jackson.annotation.JsonValue;
import lombok.Builder;
import lombok.Data;

/**
* see {@link ChatCompletionRequest} documentation.
*/
@Data
@Builder
public class ChatResponseFormat {
private ResponseFormat type;

public enum ResponseFormat {
TEXT("text"),
JSON("json_object");

private final String value;

ResponseFormat(final String value) {
this.value = value;
}

@JsonValue
public String value() {
return value;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@

import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyDescription;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.theokanning.openai.completion.chat.*;
import org.junit.jupiter.api.Test;
Expand Down Expand Up @@ -84,6 +86,35 @@ void streamChatCompletion() {
assertNotNull(chunks.get(0).getChoices().get(0));
}

@Test
void createChatCompletionWithJsonMode() {
final List<ChatMessage> messages = new ArrayList<>();
final ChatMessage systemMessage = new ChatMessage(ChatMessageRole.SYSTEM.value(), "You will generate a random name and return it in JSON format.");
messages.add(systemMessage);

ChatCompletionRequest chatCompletionRequest = ChatCompletionRequest
.builder()
.model("gpt-3.5-turbo-1106")
.messages(messages)
.responseFormat(ChatResponseFormat.builder().type(ChatResponseFormat.ResponseFormat.JSON).build())
.maxTokens(50)
.logitBias(new HashMap<>())
.build();

ChatCompletionChoice choice = service.createChatCompletion(chatCompletionRequest).getChoices().get(0);
assertTrue(isValidJson(choice.getMessage().getContent()), "Response is not valid JSON");
}

private boolean isValidJson(String jsonString) {
ObjectMapper objectMapper = new ObjectMapper();
try {
objectMapper.readTree(jsonString);
return true;
} catch (JsonProcessingException e) {
return false;
}
}

@Test
void createChatCompletionWithFunctions() {
final List<ChatFunction> functions = Collections.singletonList(ChatFunction.builder()
Expand Down