diff --git a/.github/workflows/ios-build.yml b/.github/workflows/ios-build.yml new file mode 100644 index 000000000..71c51267b --- /dev/null +++ b/.github/workflows/ios-build.yml @@ -0,0 +1,41 @@ +name: "iOS ARM64 Build" +on: + workflow_dispatch: + push: + branches: + - main + - rel-* + pull_request: +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true +jobs: + iphonesimulator-arm64-build: + runs-on: macos-latest # arm64 + steps: + - name: Checkout OnnxRuntime GenAI repo + uses: actions/checkout@v4 + with: + submodules: true + + - uses: actions/setup-python@v5 + with: + python-version: '3.12.x' + + - name: Install the python wheel and dependencies + run: | + python3 -m venv genai-macos-venv + source genai-macos-venv/bin/activate + python3 -m pip install requests + + - name: Run iOS Build + run: | + set -e -x + source genai-macos-venv/bin/activate + python3 build.py --ios \ + --parallel \ + --apple_sysroot iphonesimulator \ + --osx_arch arm64 \ + --apple_deploy_target 15.4 \ + --cmake_generator 'Xcode' \ + --build_dir build_iphonesimulator \ No newline at end of file diff --git a/src/java/src/main/java/ai/onnxruntime/genai/Adapters.java b/src/java/src/main/java/ai/onnxruntime/genai/Adapters.java index 4f5f93b7a..40b90e946 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/Adapters.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/Adapters.java @@ -4,6 +4,7 @@ */ package ai.onnxruntime.genai; +/** A container of adapters. */ public final class Adapters implements AutoCloseable { private long nativeHandle = 0; @@ -22,13 +23,13 @@ public Adapters(Model model) throws GenAIException { } /** - * Load an adapter from the specified path. + * Loads the model adapter from the given adapter file path and adapter name. * * @param adapterFilePath The path of the adapter. * @param adapterName A unique user supplied adapter identifier. * @throws GenAIException If the call to the GenAI native API fails. */ - public void loadAdapters(String adapterFilePath, String adapterName) throws GenAIException { + public void loadAdapter(String adapterFilePath, String adapterName) throws GenAIException { if (nativeHandle == 0) { throw new IllegalStateException("Instance has been freed and is invalid"); } @@ -37,12 +38,13 @@ public void loadAdapters(String adapterFilePath, String adapterName) throws GenA } /** - * Unload an adapter. + * Unloads the adapter with the given identifier from the previosly loaded adapters. If the + * adapter is not found, or if it cannot be unloaded (when it is in use), an error is returned. * * @param adapterName A unique user supplied adapter identifier. * @throws GenAIException If the call to the GenAI native API fails. */ - public void unloadAdapters(String adapterName) throws GenAIException { + public void unloadAdapter(String adapterName) throws GenAIException { if (nativeHandle == 0) { throw new IllegalStateException("Instance has been freed and is invalid"); } diff --git a/src/java/src/main/java/ai/onnxruntime/genai/Config.java b/src/java/src/main/java/ai/onnxruntime/genai/Config.java index 857eebaa1..25d1f052b 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/Config.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/Config.java @@ -3,13 +3,24 @@ */ package ai.onnxruntime.genai; +/** + * Use Config to set the ORT execution providers (EPs) and their options. The EPs are applied based on + * insertion order. + */ public final class Config implements AutoCloseable { private long nativeHandle; + /** + * Creates a Config from the given configuration directory. + * + * @param modelPath The path to the configuration directory. + * @throws GenAIException If the call to the GenAI native API fails. + */ public Config(String modelPath) throws GenAIException { nativeHandle = createConfig(modelPath); } + /** Clear the list of providers in the config */ public void clearProviders() { if (nativeHandle == 0) { throw new IllegalStateException("Instance has been freed and is invalid"); @@ -17,18 +28,31 @@ public void clearProviders() { clearProviders(nativeHandle); } - public void appendProvider(String provider_name) { + /** + * Add the provider at the end of the list of providers in the given config if it doesn't already + * exist. If it already exists, does nothing. + * + * @param providerName The provider name. + */ + public void appendProvider(String providerName) { if (nativeHandle == 0) { throw new IllegalStateException("Instance has been freed and is invalid"); } - appendProvider(nativeHandle, provider_name); + appendProvider(nativeHandle, providerName); } - public void setProviderOption(String provider_name, String option_name, String option_value) { + /** + * Set a provider option. + * + * @param providerName The provider name. + * @param optionKey The key of the option to set. + * @param optionValue The value of the option to set. + */ + public void setProviderOption(String providerName, String optionKey, String optionValue) { if (nativeHandle == 0) { throw new IllegalStateException("Instance has been freed and is invalid"); } - setProviderOption(nativeHandle, provider_name, option_name, option_value); + setProviderOption(nativeHandle, providerName, optionKey, optionValue); } @Override @@ -60,5 +84,5 @@ long nativeHandle() { private native void appendProvider(long configHandle, String provider_name); private native void setProviderOption( - long configHandle, String provider_name, String option_name, String option_value); + long configHandle, String providerName, String optionKey, String optionValue); } diff --git a/src/java/src/main/java/ai/onnxruntime/genai/GenAI.java b/src/java/src/main/java/ai/onnxruntime/genai/GenAI.java index 2d31fe196..bcba3c5d0 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/GenAI.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/GenAI.java @@ -34,7 +34,7 @@ final class GenAI { /** The short name of the ONNX runtime shared library */ static final String ONNXRUNTIME_LIBRARY_NAME = "onnxruntime"; - /** The value of the {@link #GENAI_NATIVE_PATH} system property */ + /** The value of the GENAI_NATIVE_PATH system property */ private static String libraryDirPathProperty; /** The OS & CPU architecture string */ @@ -268,7 +268,7 @@ private static Optional extractFromResources(String library) { /** * Maps the library name into a platform dependent library filename. Converts macOS's "jnilib" to - * "dylib" but otherwise is the same as {@link System#mapLibraryName(String)}. + * "dylib" but otherwise is the same as System#mapLibraryName(String). * * @param library The library name * @return The library filename. diff --git a/src/java/src/main/java/ai/onnxruntime/genai/GenAIException.java b/src/java/src/main/java/ai/onnxruntime/genai/GenAIException.java index 30c1d58c1..ff3c741f2 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/GenAIException.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/GenAIException.java @@ -5,11 +5,11 @@ /** An exception which contains the error message and code produced by the native layer. */ public final class GenAIException extends Exception { - public GenAIException(String message) { + GenAIException(String message) { super(message); } - public GenAIException(String message, Exception innerException) { + GenAIException(String message, Exception innerException) { super(message, innerException); } } diff --git a/src/java/src/main/java/ai/onnxruntime/genai/Generator.java b/src/java/src/main/java/ai/onnxruntime/genai/Generator.java index 8d727cacc..e2ea75c39 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/Generator.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/Generator.java @@ -94,7 +94,8 @@ public void appendTokenSequences(Sequences sequences) throws GenAIException { } /** - * Rewinds the generator by the specified number of tokens. + * Rewinds the generator to the given length. This is useful when the user wants to rewind the + * generator to a specific length and continue generating from that point. * * @param newLength The desired length in tokens after rewinding. * @throws GenAIException If the call to the GenAI native API fails. @@ -108,7 +109,8 @@ public void rewindTo(int newLength) throws GenAIException { } /** - * Generates the next token in the sequence. + * Computes the logits from the model based on the input ids and the past state. The computed + * logits are stored in the generator. * * @throws GenAIException If the call to the GenAI native API fails. */ @@ -151,9 +153,10 @@ public int getLastTokenInSequence(long sequenceIndex) throws GenAIException { } /** - * Fetches and returns the output tensor with the given name. + * Returns a copy of the model output identified by the given name as a Tensor. * * @param name The name of the output needed. + * @return The tensor. * @throws GenAIException If the call to the GenAI native API fails. */ public Tensor getOutput(String name) throws GenAIException { @@ -162,7 +165,7 @@ public Tensor getOutput(String name) throws GenAIException { } /** - * Activates one of the loaded adapters. + * Sets the adapter with the given adapter name as active. * * @param adapters The Adapters container. * @param adapterName The adapter name that was previously loaded. diff --git a/src/java/src/main/java/ai/onnxruntime/genai/GeneratorParams.java b/src/java/src/main/java/ai/onnxruntime/genai/GeneratorParams.java index 5f9cca786..c55b082c7 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/GeneratorParams.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/GeneratorParams.java @@ -7,14 +7,20 @@ import java.nio.ByteBuffer; /** - * The `GeneratorParams` class represents the parameters used for generating sequences with a model. - * Set the prompt using setInput, and any other search options using setSearchOption. + * Represents the parameters used for generating sequences with a model. Set the prompt using + * setInput, and any other search options using setSearchOption. */ public final class GeneratorParams implements AutoCloseable { private long nativeHandle = 0; private ByteBuffer tokenIdsBuffer; - GeneratorParams(Model model) throws GenAIException { + /** + * Creates a GeneratorParams from the given model. + * + * @param model The model to use. + * @throws GenAIException If the call to the GenAI native API fails. + */ + public GeneratorParams(Model model) throws GenAIException { if (model.nativeHandle() == 0) { throw new IllegalStateException("model has been freed and is invalid"); } @@ -22,6 +28,13 @@ public final class GeneratorParams implements AutoCloseable { nativeHandle = createGeneratorParams(model.nativeHandle()); } + /** + * Set seach option with double value. + * + * @param optionName The option name. + * @param value The option value. + * @throws GenAIException If the call to the GenAI native API fails. + */ public void setSearchOption(String optionName, double value) throws GenAIException { if (nativeHandle == 0) { throw new IllegalStateException("Instance has been freed and is invalid"); @@ -30,6 +43,13 @@ public void setSearchOption(String optionName, double value) throws GenAIExcepti setSearchOptionNumber(nativeHandle, optionName, value); } + /** + * Set search option with boolean value. + * + * @param optionName The option name. + * @param value The option value. + * @throws GenAIException If the call to the GenAI native API fails. + */ public void setSearchOption(String optionName, boolean value) throws GenAIException { if (nativeHandle == 0) { throw new IllegalStateException("Instance has been freed and is invalid"); diff --git a/src/java/src/main/java/ai/onnxruntime/genai/Images.java b/src/java/src/main/java/ai/onnxruntime/genai/Images.java index fe60c6ff5..20d8db124 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/Images.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/Images.java @@ -3,9 +3,16 @@ */ package ai.onnxruntime.genai; +/** This class can load images from the given path and prepare them for processing. */ public class Images implements AutoCloseable { private long nativeHandle; + /** + * Construct a Images instance. + * + * @param imagePath The image path. + * @throws GenAIException If the call to the GenAI native API fails. + */ public Images(String imagePath) throws GenAIException { nativeHandle = loadImages(imagePath); } diff --git a/src/java/src/main/java/ai/onnxruntime/genai/Model.java b/src/java/src/main/java/ai/onnxruntime/genai/Model.java index 6500dd805..558bb2593 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/Model.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/Model.java @@ -3,48 +3,28 @@ */ package ai.onnxruntime.genai; +/** An ORT GenAI model. */ public final class Model implements AutoCloseable { private long nativeHandle; - public Model(String modelPath) throws GenAIException { - nativeHandle = createModel(modelPath); - } - - public Model(Config config) throws GenAIException { - nativeHandle = createModelFromConfig(config.nativeHandle()); - } - /** - * Creates a Tokenizer instance for this model. The model contains the configuration information - * that determines the tokenizer to use. + * Construct a Model from folder path. * - * @return The Tokenizer instance. + * @param modelPath The path of the GenAI model. * @throws GenAIException If the call to the GenAI native API fails. */ - public Tokenizer createTokenizer() throws GenAIException { - if (nativeHandle == 0) { - throw new IllegalStateException("Instance has been freed and is invalid"); - } - - return new Tokenizer(this); + public Model(String modelPath) throws GenAIException { + nativeHandle = createModel(modelPath); } - // NOTE: Having model.createGeneratorParams is still under discussion. - // model.createTokenizer is consistent with the python setup at least and agreed upon. - /** - * Creates a GeneratorParams instance for executing the model. NOTE: GeneratorParams internally - * uses the Model, so the Model instance must remain valid + * Construct a Model from the given Config. * - * @return The GeneratorParams instance. + * @param config The config to use. * @throws GenAIException If the call to the GenAI native API fails. */ - public GeneratorParams createGeneratorParams() throws GenAIException { - if (nativeHandle == 0) { - throw new IllegalStateException("Instance has been freed and is invalid"); - } - - return new GeneratorParams(this); + public Model(Config config) throws GenAIException { + nativeHandle = createModelFromConfig(config.nativeHandle()); } @Override diff --git a/src/java/src/main/java/ai/onnxruntime/genai/MultiModalProcessor.java b/src/java/src/main/java/ai/onnxruntime/genai/MultiModalProcessor.java index 62c72a2db..7938eb1d6 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/MultiModalProcessor.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/MultiModalProcessor.java @@ -10,6 +10,12 @@ public class MultiModalProcessor implements AutoCloseable { private long nativeHandle; + /** + * Construct a MultiModalProcessor for a given model. + * + * @param model The model to be used. + * @throws GenAIException If the call to the GenAI native API fails. + */ public MultiModalProcessor(Model model) throws GenAIException { assert (model.nativeHandle() != 0); // internal code should never pass an invalid model diff --git a/src/java/src/main/java/ai/onnxruntime/genai/NamedTensors.java b/src/java/src/main/java/ai/onnxruntime/genai/NamedTensors.java index b7b27f71d..b2b2f686f 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/NamedTensors.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/NamedTensors.java @@ -3,9 +3,17 @@ */ package ai.onnxruntime.genai; +/** + * This class is a list of tensors with names that match up with model input names. + */ public class NamedTensors implements AutoCloseable { private long nativeHandle; + /** + * Construct a NamedTensor from native handle. + * + * @param handle The native handle. + */ public NamedTensors(long handle) { nativeHandle = handle; } diff --git a/src/java/src/main/java/ai/onnxruntime/genai/SimpleGenAI.java b/src/java/src/main/java/ai/onnxruntime/genai/SimpleGenAI.java index cfd230e7e..dce360bf6 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/SimpleGenAI.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/SimpleGenAI.java @@ -29,9 +29,15 @@ public class SimpleGenAI implements AutoCloseable { private Model model; private Tokenizer tokenizer; + /** + * Construct a SimpleGenAI instance from model path. + * + * @param modelPath The path to the GenAI model. + * @throws GenAIException If the call to the GenAI native API fails. + */ public SimpleGenAI(String modelPath) throws GenAIException { model = new Model(modelPath); - tokenizer = model.createTokenizer(); + tokenizer = new Tokenizer(model); } /** @@ -42,7 +48,7 @@ public SimpleGenAI(String modelPath) throws GenAIException { * @throws GenAIException on failure */ public GeneratorParams createGeneratorParams() throws GenAIException { - return model.createGeneratorParams(); + return new GeneratorParams(model); } /** diff --git a/src/java/src/main/java/ai/onnxruntime/genai/Tensor.java b/src/java/src/main/java/ai/onnxruntime/genai/Tensor.java index 6c98ba488..3eadc0a4a 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/Tensor.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/Tensor.java @@ -7,6 +7,7 @@ import java.nio.ByteBuffer; import java.nio.ByteOrder; +/** Currently wraps an ORT Tensor. */ public final class Tensor implements AutoCloseable { private long nativeHandle = 0; private final ElementType elementType; @@ -17,6 +18,7 @@ public final class Tensor implements AutoCloseable { // The values in this enum must match ONNX values // https://github.com/onnx/onnx/blob/159fa47b7c4d40e6d9740fcf14c36fff1d11ccd8/onnx/onnx.proto#L499-L544 + /** Element types that correspond to OnnxRuntime supported element types. */ public enum ElementType { undefined, float32, diff --git a/src/java/src/main/java/ai/onnxruntime/genai/Tokenizer.java b/src/java/src/main/java/ai/onnxruntime/genai/Tokenizer.java index 6a0bf703f..91f788bbb 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/Tokenizer.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/Tokenizer.java @@ -7,7 +7,13 @@ public class Tokenizer implements AutoCloseable { private long nativeHandle; - Tokenizer(Model model) throws GenAIException { + /** + * Creates a Tokenizer from the given model. + * + * @param model The model to use. + * @throws GenAIException If the call to the GenAI native API fails. + */ + public Tokenizer(Model model) throws GenAIException { assert (model.nativeHandle() != 0); // internal code should never pass an invalid model nativeHandle = createTokenizer(model.nativeHandle()); diff --git a/src/java/src/main/java/ai/onnxruntime/genai/TokenizerStream.java b/src/java/src/main/java/ai/onnxruntime/genai/TokenizerStream.java index a30c1724c..fedb13b35 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/TokenizerStream.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/TokenizerStream.java @@ -10,11 +10,23 @@ public class TokenizerStream implements AutoCloseable { private long nativeHandle = 0; + /** + * Construct a TokenizerStream. + * + * @param tokenizerStreamHandle The native handle. + */ TokenizerStream(long tokenizerStreamHandle) { assert (tokenizerStreamHandle != 0); // internal usage should never pass an invalid handle nativeHandle = tokenizerStreamHandle; } + /** + * Decode one token. + * + * @param token The token. + * @return The decoded result. + * @throws GenAIException If the call to the GenAI native API fails. + */ public String decode(int token) throws GenAIException { if (nativeHandle == 0) { throw new IllegalStateException("Instance has been freed and is invalid"); diff --git a/src/java/src/main/java/ai/onnxruntime/genai/package-info.java b/src/java/src/main/java/ai/onnxruntime/genai/package-info.java index 9a43f976a..a71bedc73 100644 --- a/src/java/src/main/java/ai/onnxruntime/genai/package-info.java +++ b/src/java/src/main/java/ai/onnxruntime/genai/package-info.java @@ -8,27 +8,26 @@ * *

There are two shared libraries required: onnxruntime-genai and * onnxruntime-genai-jni - * . The loader is in {@link ai.onnxruntime.genai.GenAI} and the logic is in this order: + * . The loader is in ai.onnxruntime.genai.GenAI and the logic is in this order: * *

    *
  1. The user may signal to skip loading of a shared library using a property in the form * onnxruntime-genai.native.LIB_NAME.skip with a value of true. This means * the user has decided to load the library by some other means. *
  2. The user may specify an explicit location of all native library files using a property in - * the form onnxruntime-genai.native.path. This uses {@link - * java.lang.System#load}. + * the form onnxruntime-genai.native.path. This uses {java.lang.System#load}. *
  3. The user may specify an explicit location of the shared library file using a property in - * the form onnxruntime-genai.native.LIB_NAME.path. This uses {@link - * java.lang.System#load}. + * the form onnxruntime-genai.native.LIB_NAME.path. This uses + * {java.lang.System#load}. *
  4. The shared library is autodiscovered: *
      - *
    1. If the shared library is present in the classpath resources, load using {@link + *
    2. If the shared library is present in the classpath resources, load using { * java.lang.System#load} via a temporary file. Ideally, this should be the default use * case when adding JAR's/dependencies containing the shared libraries to your * classpath. *
    3. If the shared library is not present in the classpath resources, then load using - * {@link java.lang.System#loadLibrary}, which usually looks elsewhere on the filesystem - * for the library. The semantics and behavior of that method are system/JVM dependent. + * {java.lang.System#loadLibrary}, which usually looks elsewhere on the filesystem for + * the library. The semantics and behavior of that method are system/JVM dependent. * Typically, the java.library.path property is used to specify the * location of native libraries. *
    diff --git a/src/java/src/main/native/ai_onnxruntime_genai_Config.cpp b/src/java/src/main/native/ai_onnxruntime_genai_Config.cpp index d77744d03..68c407bd3 100644 --- a/src/java/src/main/native/ai_onnxruntime_genai_Config.cpp +++ b/src/java/src/main/native/ai_onnxruntime_genai_Config.cpp @@ -42,11 +42,11 @@ Java_ai_onnxruntime_genai_Config_appendProvider(JNIEnv* env, jobject thiz, jlong } JNIEXPORT void JNICALL -Java_ai_onnxruntime_genai_Config_setProvider(JNIEnv* env, jobject thiz, jlong native_handle, jstring provider_name, jstring option_name, jstring option_value) { +Java_ai_onnxruntime_genai_Config_setProvider(JNIEnv* env, jobject thiz, jlong native_handle, jstring provider_name, jstring option_key, jstring option_value) { CString c_provider_name{env, provider_name}; - CString c_option_name{env, option_name}; + CString c_option_key{env, option_key}; CString c_option_value{env, option_value}; OgaConfig* config = reinterpret_cast(native_handle); - ThrowIfError(env, OgaConfigSetProviderOption(config, c_provider_name, c_option_name, c_option_value)); + ThrowIfError(env, OgaConfigSetProviderOption(config, c_provider_name, c_option_key, c_option_value)); } diff --git a/src/java/src/test/java/ai/onnxruntime/genai/GenerationTest.java b/src/java/src/test/java/ai/onnxruntime/genai/GenerationTest.java index d67faeb36..406c438ac 100644 --- a/src/java/src/test/java/ai/onnxruntime/genai/GenerationTest.java +++ b/src/java/src/test/java/ai/onnxruntime/genai/GenerationTest.java @@ -68,7 +68,7 @@ public void testUsageWithListener() throws GenAIException { @EnabledIf("haveAdapters") public void testUsageWithAdapters() throws GenAIException { try (Model model = new Model(TestUtils.testAdapterTestModelPath()); - Tokenizer tokenizer = model.createTokenizer()) { + Tokenizer tokenizer = new Tokenizer(model)) { String[] prompts = { TestUtils.applyPhi2ChatTemplate("def is_prime(n):"), TestUtils.applyPhi2ChatTemplate("def compute_gcd(x, y):"), @@ -76,7 +76,7 @@ public void testUsageWithAdapters() throws GenAIException { }; try (Sequences sequences = tokenizer.encodeBatch(prompts); - GeneratorParams params = model.createGeneratorParams()) { + GeneratorParams params = new GeneratorParams(model)) { params.setSearchOption("max_length", 200); params.setSearchOption("batch_size", prompts.length); @@ -97,7 +97,7 @@ public void testUsageWithAdapters() throws GenAIException { try (Adapters adapters = new Adapters(model); Generator generator = new Generator(model, params); ) { generator.appendTokenSequences(sequences); - adapters.loadAdapters(TestUtils.testAdapterTestAdaptersPath(), "adapters_a_and_b"); + adapters.loadAdapter(TestUtils.testAdapterTestAdaptersPath(), "adapters_a_and_b"); generator.setActiveAdapter(adapters, "adapters_a_and_b"); while (!generator.isDone()) { generator.generateNextToken(); diff --git a/src/java/src/test/java/ai/onnxruntime/genai/MultiModalProcessorTest.java b/src/java/src/test/java/ai/onnxruntime/genai/MultiModalProcessorTest.java index 7f90692d2..25eeb8a97 100644 --- a/src/java/src/test/java/ai/onnxruntime/genai/MultiModalProcessorTest.java +++ b/src/java/src/test/java/ai/onnxruntime/genai/MultiModalProcessorTest.java @@ -18,7 +18,7 @@ public void testBatchEncodeDecode() throws GenAIException { try (Model model = new Model(TestUtils.testVisionModelPath()); MultiModalProcessor multiModalProcessor = new MultiModalProcessor(model); TokenizerStream stream = multiModalProcessor.createStream(); - GeneratorParams generatorParams = model.createGeneratorParams()) { + GeneratorParams generatorParams = new GeneratorParams(model)) { String inputs = new String( "<|user|>\n<|image_1|>\n Can you convert the table to markdown format?\n<|end|>\n<|assistant|>\n");