Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added missing javadoc to improve the doc quality #1136

Merged
merged 9 commits into from
Dec 17, 2024
22 changes: 11 additions & 11 deletions src/java/src/main/java/ai/onnxruntime/genai/Config.java
Original file line number Diff line number Diff line change
Expand Up @@ -29,30 +29,30 @@ public void clearProviders() {
}

/**
* Add the provider at the end of the list of providers in the given config if it doesn't already exist.
* If it already exists, does nothing.
* Add the provider at the end of the list of providers in the given config if it doesn't already
* exist. If it already exists, does nothing.
*
* @param provider_name The provider name.
* @param providerName The provider name.
*/
public void appendProvider(String provider_name) {
public void appendProvider(String providerName) {
if (nativeHandle == 0) {
throw new IllegalStateException("Instance has been freed and is invalid");
}
appendProvider(nativeHandle, provider_name);
appendProvider(nativeHandle, providerName);
}

/**
* Set a provider option.
*
* @param provider_name The provider name.
* @param option_key The key of the option to set.
* @param option_value The value of the option to set.
* @param providerName The provider name.
* @param optionKey The key of the option to set.
* @param optionValue The value of the option to set.
*/
public void setProviderOption(String provider_name, String option_key, String option_value) {
public void setProviderOption(String providerName, String optionKey, String optionValue) {
if (nativeHandle == 0) {
throw new IllegalStateException("Instance has been freed and is invalid");
}
setProviderOption(nativeHandle, provider_name, option_key, option_value);
setProviderOption(nativeHandle, providerName, optionKey, optionValue);
}

@Override
Expand Down Expand Up @@ -84,5 +84,5 @@ long nativeHandle() {
private native void appendProvider(long configHandle, String provider_name);

private native void setProviderOption(
long configHandle, String provider_name, String option_key, String option_value);
long configHandle, String providerName, String optionKey, String optionValue);
}
7 changes: 4 additions & 3 deletions src/java/src/main/java/ai/onnxruntime/genai/Generator.java
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,8 @@ public void appendTokenSequences(Sequences sequences) throws GenAIException {
}

/**
* Rewinds the generator to the given length. This is useful when the user wants to rewind the generator to a specific length
* and continue generating from that point.
* Rewinds the generator to the given length. This is useful when the user wants to rewind the
* generator to a specific length and continue generating from that point.
*
* @param newLength The desired length in tokens after rewinding.
* @throws GenAIException If the call to the GenAI native API fails.
Expand All @@ -109,7 +109,8 @@ public void rewindTo(int newLength) throws GenAIException {
}

/**
* Computes the logits from the model based on the input ids and the past state. The computed logits are stored in the generator.
* Computes the logits from the model based on the input ids and the past state. The computed
* logits are stored in the generator.
*
* @throws GenAIException If the call to the GenAI native API fails.
*/
Expand Down
12 changes: 9 additions & 3 deletions src/java/src/main/java/ai/onnxruntime/genai/GeneratorParams.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,13 @@ public final class GeneratorParams implements AutoCloseable {
private long nativeHandle = 0;
private ByteBuffer tokenIdsBuffer;

GeneratorParams(Model model) throws GenAIException {
/**
* Creates a GeneratorParams from the given model.
*
* @param model The model to use.
* @throws GenAIException If the call to the GenAI native API fails.
*/
public GeneratorParams(Model model) throws GenAIException {
if (model.nativeHandle() == 0) {
throw new IllegalStateException("model has been freed and is invalid");
}
Expand All @@ -27,7 +33,7 @@ public final class GeneratorParams implements AutoCloseable {
*
* @param optionName The option name.
* @param value The option value.
* @throws GenAIException
* @throws GenAIException If the call to the GenAI native API fails.
*/
public void setSearchOption(String optionName, double value) throws GenAIException {
if (nativeHandle == 0) {
Expand All @@ -42,7 +48,7 @@ public void setSearchOption(String optionName, double value) throws GenAIExcepti
*
* @param optionName The option name.
* @param value The option value.
* @throws GenAIException
* @throws GenAIException If the call to the GenAI native API fails.
*/
public void setSearchOption(String optionName, boolean value) throws GenAIException {
if (nativeHandle == 0) {
Expand Down
35 changes: 1 addition & 34 deletions src/java/src/main/java/ai/onnxruntime/genai/Model.java
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public Model(String modelPath) throws GenAIException {
}

/**
* Construct a Model from Config
* Construct a Model from the given Config.
*
* @param config The config to use.
* @throws GenAIException If the call to the GenAI native API fails.
Expand All @@ -27,39 +27,6 @@ public Model(Config config) throws GenAIException {
nativeHandle = createModelFromConfig(config.nativeHandle());
}

/**
* Creates a Tokenizer instance for this model. The model contains the configuration information
* that determines the tokenizer to use.
*
* @return The Tokenizer instance.
* @throws GenAIException If the call to the GenAI native API fails.
*/
public Tokenizer createTokenizer() throws GenAIException {
if (nativeHandle == 0) {
throw new IllegalStateException("Instance has been freed and is invalid");
}

return new Tokenizer(this);
}

// NOTE: Having model.createGeneratorParams is still under discussion.
skyline75489 marked this conversation as resolved.
Show resolved Hide resolved
// model.createTokenizer is consistent with the python setup at least and agreed upon.

/**
* Creates a GeneratorParams instance for executing the model. NOTE: GeneratorParams internally
* uses the Model, so the Model instance must remain valid
*
* @return The GeneratorParams instance.
* @throws GenAIException If the call to the GenAI native API fails.
*/
public GeneratorParams createGeneratorParams() throws GenAIException {
if (nativeHandle == 0) {
throw new IllegalStateException("Instance has been freed and is invalid");
}
skyline75489 marked this conversation as resolved.
Show resolved Hide resolved

return new GeneratorParams(this);
}

@Override
public void close() {
if (nativeHandle != 0) {
Expand Down
4 changes: 2 additions & 2 deletions src/java/src/main/java/ai/onnxruntime/genai/SimpleGenAI.java
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public class SimpleGenAI implements AutoCloseable {
*/
public SimpleGenAI(String modelPath) throws GenAIException {
model = new Model(modelPath);
tokenizer = model.createTokenizer();
tokenizer = new Tokenizer(model);
}

/**
Expand All @@ -48,7 +48,7 @@ public SimpleGenAI(String modelPath) throws GenAIException {
* @throws GenAIException on failure
*/
public GeneratorParams createGeneratorParams() throws GenAIException {
return model.createGeneratorParams();
return new GeneratorParams(model);
}

/**
Expand Down
8 changes: 7 additions & 1 deletion src/java/src/main/java/ai/onnxruntime/genai/Tokenizer.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,13 @@
public class Tokenizer implements AutoCloseable {
private long nativeHandle;

Tokenizer(Model model) throws GenAIException {
/**
* Creates a Tokenizer from the given model.
*
* @param model The model to use.
* @throws GenAIException If the call to the GenAI native API fails.
*/
public Tokenizer(Model model) throws GenAIException {
assert (model.nativeHandle() != 0); // internal code should never pass an invalid model

nativeHandle = createTokenizer(model.nativeHandle());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,15 +68,15 @@ public void testUsageWithListener() throws GenAIException {
@EnabledIf("haveAdapters")
public void testUsageWithAdapters() throws GenAIException {
try (Model model = new Model(TestUtils.testAdapterTestModelPath());
Tokenizer tokenizer = model.createTokenizer()) {
Tokenizer tokenizer = new Tokenizer(model)) {
String[] prompts = {
TestUtils.applyPhi2ChatTemplate("def is_prime(n):"),
TestUtils.applyPhi2ChatTemplate("def compute_gcd(x, y):"),
TestUtils.applyPhi2ChatTemplate("def binary_search(arr, x):"),
};

try (Sequences sequences = tokenizer.encodeBatch(prompts);
GeneratorParams params = model.createGeneratorParams()) {
GeneratorParams params = new GeneratorParams(model)) {
params.setSearchOption("max_length", 200);
params.setSearchOption("batch_size", prompts.length);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ public void testBatchEncodeDecode() throws GenAIException {
try (Model model = new Model(TestUtils.testVisionModelPath());
MultiModalProcessor multiModalProcessor = new MultiModalProcessor(model);
TokenizerStream stream = multiModalProcessor.createStream();
GeneratorParams generatorParams = model.createGeneratorParams()) {
GeneratorParams generatorParams = new GeneratorParams(model)) {
String inputs =
new String(
"<|user|>\n<|image_1|>\n Can you convert the table to markdown format?\n<|end|>\n<|assistant|>\n");
Expand Down
Loading