diff --git a/.github/workflows/check_compatibility_deepimagej.yaml b/.github/workflows/check_compatibility_deepimagej.yaml new file mode 100644 index 00000000..814e2053 --- /dev/null +++ b/.github/workflows/check_compatibility_deepimagej.yaml @@ -0,0 +1,105 @@ +name: check compatibility deepimagej + +concurrency: deepimagej + +on: + push: + branches: + - main + paths: + - .github/workflows/check_compatibility_deepimagej.yaml + - scripts/get_java_software_versions.py + - scripts/check_compatibility_java_software/**/* + workflow_dispatch: + schedule: + - cron: 0 1 * * * # update compatibility once a day + +jobs: + generate-version-dict: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.set-dict.outputs.matrix }} + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.x' + - name: Install deps + run: pip install requests==2.28.2 beautifulsoup4==4.12.3 packaging==23.0 + - name: Generate dict + id: set-dict + run: | + matrix_output=$(python scripts/get_java_software_versions.py deepimagej) + echo "matrix=${matrix_output}" >> $GITHUB_OUTPUT + - name: Versions matrix + run: echo '${{ steps.set-dict.outputs.matrix }}' + + run: + needs: generate-version-dict + runs-on: ubuntu-latest + strategy: + matrix: ${{ fromJson(needs.generate-version-dict.outputs.matrix) }} + steps: + - uses: actions/checkout@v3 + - uses: actions/checkout@v3 + with: + repository: bioimage-io/collection-bioimage-io + ref: gh-pages + path: bioimageio-gh-pages + - name: Setup Maven Action + uses: stCarolas/setup-maven@v4.5 + with: + java-version: 11 + maven-version: 3.9.5 + - name: Build with Maven + run: | + cd scripts/check_compatibility_java_software + mvn clean install org.apache.maven.plugins:maven-shade-plugin:3.2.4:shade -Dshade.mainClass=io.bioimage.modelrunner.ci.ContinuousIntegration + - name: Get jar file name + id: get-jar + run: | + cd scripts/check_compatibility_java_software + jarfile=$(mvn -q exec:exec -Dexec.executable=echo -Dexec.args='${project.build.finalName}.jar') + if [ -z "$jarfile" ]; then + echo "Failed to get jar file name" + exit 1 + fi + if [ ! -f "target/$jarfile" ]; then + echo "Jar file not found: target/$jarfile" + exit 1 + fi + echo "Jar file found: target/$jarfile" + echo "jarfile=$jarfile" >> $GITHUB_OUTPUT + - name: Download engines + run: | + java -cp "scripts/check_compatibility_java_software/target/${{ steps.get-jar.outputs.jarfile }}" io.bioimage.modelrunner.ci.DownloadEngines deepimagej + - name: Run models + run: | + java -cp "scripts/check_compatibility_java_software/target/${{ steps.get-jar.outputs.jarfile }}" io.bioimage.modelrunner.ci.ContinuousIntegration deepimagej ${{ matrix.key }} + - name: Upload artifacts + uses: actions/upload-artifact@v3 + with: + path: test_summaries_deepimagej_${{ matrix.key }} + + sendreport: + runs-on: ubuntu-latest + needs: [generate-version-dict, run] + strategy: + matrix: ${{ fromJson(needs.generate-version-dict.outputs.matrix) }} + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.10' + - name: Get test results + uses: actions/download-artifact@v3 + with: + path: test_summaries_deepimagej_${{ matrix.key }} + - name: Install deps + run: | + pip install . + - name: Send deepimagej ${{ matrix.key }} tests + shell: bash -l {0} + run: python scripts/check_compatibility_java_software.py deepimagej ${{ matrix.key }} --summaries_dir "test_summaries_deepimagej_${{ matrix.key }}/artifact" diff --git a/.github/workflows/check_compatibility_icy.yaml b/.github/workflows/check_compatibility_icy.yaml new file mode 100644 index 00000000..f6a5fdf8 --- /dev/null +++ b/.github/workflows/check_compatibility_icy.yaml @@ -0,0 +1,105 @@ +name: check compatibility icy + +concurrency: icy + +on: + push: + branches: + - main + paths: + - .github/workflows/check_compatibility_icy.yaml + - scripts/get_java_software_versions.py + - scripts/check_compatibility_java_software/**/* + workflow_dispatch: + schedule: + - cron: 0 1 * * * # update compatibility once a day + +jobs: + generate-version-dict: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.set-dict.outputs.matrix }} + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.x' + - name: Install deps + run: pip install requests==2.28.2 beautifulsoup4==4.12.3 packaging==23.0 + - name: Generate dict + id: set-dict + run: | + matrix_output=$(python scripts/get_java_software_versions.py icy) + echo "matrix=${matrix_output}" >> $GITHUB_OUTPUT + - name: Versions matrix + run: echo '${{ steps.set-dict.outputs.matrix }}' + + run: + needs: generate-version-dict + runs-on: ubuntu-latest + strategy: + matrix: ${{ fromJson(needs.generate-version-dict.outputs.matrix) }} + steps: + - uses: actions/checkout@v3 + - uses: actions/checkout@v3 + with: + repository: bioimage-io/collection-bioimage-io + ref: gh-pages + path: bioimageio-gh-pages + - name: Setup Maven Action + uses: stCarolas/setup-maven@v4.5 + with: + java-version: 11 + maven-version: 3.9.5 + - name: Build with Maven + run: | + cd scripts/check_compatibility_java_software + mvn clean install org.apache.maven.plugins:maven-shade-plugin:3.2.4:shade -Dshade.mainClass=io.bioimage.modelrunner.ci.ContinuousIntegration + - name: Get jar file name + id: get-jar + run: | + cd scripts/check_compatibility_java_software + jarfile=$(mvn -q exec:exec -Dexec.executable=echo -Dexec.args='${project.build.finalName}.jar') + if [ -z "$jarfile" ]; then + echo "Failed to get jar file name" + exit 1 + fi + if [ ! -f "target/$jarfile" ]; then + echo "Jar file not found: target/$jarfile" + exit 1 + fi + echo "Jar file found: target/$jarfile" + echo "jarfile=$jarfile" >> $GITHUB_OUTPUT + - name: Download engines + run: | + java -cp "scripts/check_compatibility_java_software/target/${{ steps.get-jar.outputs.jarfile }}" io.bioimage.modelrunner.ci.DownloadEngines icy + - name: Run models + run: | + java -cp "scripts/check_compatibility_java_software/target/${{ steps.get-jar.outputs.jarfile }}" io.bioimage.modelrunner.ci.ContinuousIntegration icy ${{ matrix.key }} + - name: Upload artifacts + uses: actions/upload-artifact@v3 + with: + path: test_summaries_icy_${{ matrix.key }} + + sendreport: + runs-on: ubuntu-latest + needs: [generate-version-dict, run] + strategy: + matrix: ${{ fromJson(needs.generate-version-dict.outputs.matrix) }} + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.10' + - name: Get test results + uses: actions/download-artifact@v3 + with: + path: test_summaries_icy_${{ matrix.key }} + - name: Install deps + run: | + pip install . + - name: Send icy ${{ matrix.key }} tests + shell: bash -l {0} + run: python scripts/check_compatibility_java_software.py icy ${{ matrix.key }} --summaries_dir "test_summaries_icy_${{ matrix.key }}/artifact" diff --git a/.gitignore b/.gitignore index 671d4066..aa48971c 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,7 @@ __pycache__/ *.egg-info/ docs/ *.pyc +TEMP/ +bioimageio-gh-pages +scripts/check_compatibility_java_software/test_summaries_null_null +scripts/check_compatibility_java_software/test_summaries_default_default \ No newline at end of file diff --git a/scripts/check_compatibility_java_software.py b/scripts/check_compatibility_java_software.py new file mode 100644 index 00000000..ccce7ef3 --- /dev/null +++ b/scripts/check_compatibility_java_software.py @@ -0,0 +1,184 @@ +import argparse +from typing import List, Dict, Any +import os + +import requests + +from loguru import logger +from ruyaml import YAML, YAMLError + +from bioimageio_collection_backoffice.db_structure.compatibility import ( + CompatiblityReport, +) +from bioimageio_collection_backoffice.remote_collection import Record, RemoteCollection +from bioimageio_collection_backoffice.s3_client import Client + + + +def find_java_summaries(directory: str) -> List[str]: + """Walks through a directory and its subdirectories to find all YAML files.""" + yaml_files: List[str] = [] + # Walk through all directories and files in the specified directory + for root, _, files in os.walk(directory): + for file in files: + # Check if the file ends with .yaml or .yml + if file.endswith('.yaml') or file.endswith('.yml'): + # Create the full path to the file + full_path = os.path.join(root, file) + # Append the full path to the list of YAML files + yaml_files.append(full_path) + return yaml_files + + + +def read_yaml_from_url(url: str): + """Fetch and parse a YAML file from a specified URL. + + Args: + url (str): The URL of the YAML file. + + Returns: + dict: Parsed YAML data as a dictionary. + """ + response = requests.get(url) + response.raise_for_status() + + yaml=YAML(typ="safe") + data = yaml.load(response.text) + return data + + +def get_tests_from_summaries(rdf: str, path_to_summaries: str) -> Dict[str, str]: + summary = {} + try: + rdf_yaml = read_yaml_from_url(rdf) + except requests.RequestException as e: + summary["status"] = "failed" + summary["error"] = "Unable to access rdf.yaml file" + summary["details"] = str(e) + return summary + except YAMLError as e: + summary["status"] = "failed" + summary["error"] = "Unable to read rdf.yaml file" + summary["details"] = str(e) + return summary + + id = rdf_yaml["id"] + rdf_path = os.path.join(path_to_summaries, id) + test_files = os.listdir(rdf_path) + + if len(test_files) == 0: + summary["status"] = "failed" + summary["error"] = "No tests executed" + summary["details"] = "The model tests were not executed or the test files could not be located" + return summary + + summaries_yaml = None + error: str = "" + for test_file in test_files: + try: + summaries_yaml=YAML(typ='safe') # default, if not specfied, is 'rt' (round-trip) + summaries_yaml.load(test_file) + summary = find_passed_test(summaries_yaml) + if summary["status"] == "passed": + return summary + except YAMLError as e: + error += str(e) + os.linesep + continue + + if summary["status"] is not None: + return summary + + summary["status"] = "failed" + summary["error"] = "Unable to read the test results yaml file" + summary["details"] = str(error) + + return summary + + +def find_passed_test(summaries_yaml: List[Dict[Any, Any]]) -> Dict[Any, Any]: + summary = {} + for elem in summaries_yaml: + if not isinstance(elem, dict): + summary["status"] = "failed" + summary["error"] = "Invalid test output format" + summary["details"] = "Expected a list of dictionaries, but received an improperly formatted element." + return summary + elif elem.get("status") is not None: + return elem + + summary["status"] = "failed" + summary["error"] = "No test contents found" + summary["details"] = "test file was empty." + return summary + + + +def check_compatibility_java_software_impl( + record: Record, + software_name: str, + version: str, + summaries_dir: str = "test_summaries/artifact", +): + tool = f"{software_name}{version}" + report_path = record.get_compatibility_report_path(tool) + if list(record.client.ls(report_path)): + return + + rdf_data = record.client.load_file(record.rdf_path) + assert rdf_data is not None + rdf = yaml.load(rdf_data) + assert isinstance(rdf, dict) + if rdf.get("type") != "model": + return CompatiblityReport( + tool=tool, + status="not-applicable", + error=None, + details="only 'model' resources can be used in icy.", + ) + + #summaries = find_java_summaries(summaries_dir) + summary = get_tests_from_summaries(record.client.get_file_url(record.rdf_path), summaries_dir) + # produce test summaries for each weight format + # TODO check what it produces summary = test_model(record.client.get_file_url(record.rdf_path)) + + return CompatiblityReport( + tool=tool, + status=summary["status"], + error=None if summary["status"] == "passed" else summary["error"], + details=summary["details"], + links=["{software_name}/{software_name}"], + ) + + +def check_compatibility_java_software( + software_name: str, + version: str, + summaries_dir: str = "test_summaries", +): + """preliminary icy check + + only checks if test outputs are reproduced for onnx, torchscript, or pytorch_state_dict weights. + + """ + collection = RemoteCollection(Client()) + for record in collection.get_published_versions(): + try: + report = check_compatibility_java_software_impl( + record, software_name, version, summaries_dir + ) + except Exception as e: + logger.error(f"failed to check '{record.id}': {e}") + else: + print(report) + if report is not None: + record.set_compatibility_report(report) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + _ = parser.add_argument("software_name") + _ = parser.add_argument("version") + _ = parser.add_argument("--summaries_dir", default="test_summaries/artifact", help="Directory path where summaries are stored.") + + check_compatibility_java_software(parser.parse_args().software_name, parser.parse_args().version, parser.parse_args().summaries_dir) diff --git a/scripts/check_compatibility_java_software/.gitignore b/scripts/check_compatibility_java_software/.gitignore new file mode 100644 index 00000000..7aa8b531 --- /dev/null +++ b/scripts/check_compatibility_java_software/.gitignore @@ -0,0 +1,16 @@ +/engines/ +/models/ + +# maven specific files +/target/ + +# eclipse specific files +/.settings/ +.classpath +.project + +# intellij specific files +.idea + +# miscellaneous +*.swp diff --git a/scripts/check_compatibility_java_software/pom.xml b/scripts/check_compatibility_java_software/pom.xml new file mode 100644 index 00000000..cf80c081 --- /dev/null +++ b/scripts/check_compatibility_java_software/pom.xml @@ -0,0 +1,99 @@ + + + 4.0.0 + + + org.scijava + pom-scijava + 37.0.0 + + + + io.bioimage + dl-modelrunner-ci + 0.0.1-SNAPSHOT + + JDLL CI + Continuous integration code for JDLL + + https://github.com/bioimage-io/pytorch-java-interface-javacpp + 2023 + + Icy Bioimage Analysis + https://icy.bioimageanalysis.org/ + + + + Apache License, Version 2.0 + http://www.apache.org/licenses/LICENSE-2.0.txt + repo + + + + + + carlosuc3m + Carlos Javier Garcia Lopez de Haro + https://github.com/carlosuc3m + + developer + debugger + reviewer + support + maintainer + + + + + + Carlos Garcia + https://github.com/carlosuc3m + carlosuc3m + + + + + + Image.sc Forum + https://forum.image.sc/tag/bioimage-io + + + + + scm:git:https://github.com/Icy-imaging/icy-bioimage-io + scm:git:git@github.com:Icy-imaging/icy-bioimage-io + HEAD + https://github.com/Icy-imaging/icy-bioimage-io + + + GitHub Issues + https://github.com/Icy-imaging/icy-bioimage-io/issues + + + None + + + + io.bioimage.modelrunner.ci + + apache_v2 + Institut Pasteur + + 0.5.8 + + + + + scijava.public + https://maven.scijava.org/content/groups/public + + + + + + io.bioimage + dl-modelrunner + ${dl-modelrunner.version} + + + diff --git a/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/ContinuousIntegration.java b/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/ContinuousIntegration.java new file mode 100644 index 00000000..4984463d --- /dev/null +++ b/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/ContinuousIntegration.java @@ -0,0 +1,399 @@ +/*- + * #%L + * This project performs Continuous Integration tasks on java software based on JDLL + * %% + * Copyright (C) 2023 Institut Pasteur. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package io.bioimage.modelrunner.ci; + +import java.io.File; +import java.io.IOException; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.PathMatcher; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import io.bioimage.modelrunner.bioimageio.BioimageioRepo; +import io.bioimage.modelrunner.bioimageio.description.ModelDescriptor; +import io.bioimage.modelrunner.bioimageio.description.TransformSpec; +import io.bioimage.modelrunner.bioimageio.description.exceptions.ModelSpecsException; +import io.bioimage.modelrunner.bioimageio.description.weights.ModelWeight; +import io.bioimage.modelrunner.bioimageio.description.weights.WeightFormat; +import io.bioimage.modelrunner.engine.EngineInfo; +import io.bioimage.modelrunner.engine.installation.EngineInstall; +import io.bioimage.modelrunner.model.Model; +import io.bioimage.modelrunner.numpy.DecodeNumpy; +import io.bioimage.modelrunner.tensor.Tensor; +import io.bioimage.modelrunner.utils.Constants; +import io.bioimage.modelrunner.utils.YAMLUtils; +import net.imglib2.Cursor; +import net.imglib2.RandomAccessibleInterval; +import net.imglib2.loops.LoopBuilder; +import net.imglib2.type.NativeType; +import net.imglib2.type.numeric.RealType; +import net.imglib2.type.numeric.real.FloatType; +import net.imglib2.view.Views; + +/** + * + */ +public class ContinuousIntegration { + + private static Map downloadedModelsCorrectly = new HashMap(); + private static Map downloadedModelsIncorrectly = new HashMap(); + + private static String version; + + private static String software; + private static final String TEST_NAME = "reproduce test outputs from test inputs"; + + public static void main(String[] args) throws IOException { + if (args.length != 0) { + software = args[0]; + version = args[1]; + } else { + software = "default"; + version = "default"; + } + + Path currentDir = Paths.get(ContinuousIntegration.class.getProtectionDomain().getCodeSource().getLocation().getPath()).getParent(); + Path rdfDir = currentDir.resolve("../../../bioimageio-gh-pages/rdfs").normalize(); + + // Create a matcher for the pattern 'rdf.yaml' + runTests(rdfDir, "**", "**", Paths.get("test_summaries_" + software + "_" + version)); + } + + + public static void runTests(Path rdfDir, String resourceID, String versionID, Path summariesDir) throws IOException { + + PathMatcher matcher = FileSystems.getDefault().getPathMatcher("glob:" + resourceID + File.separator + versionID + File.separator + Constants.RDF_FNAME); + + List rdfFiles = Files.walk(rdfDir).filter(matcher::matches).collect(Collectors.toList()); + EngineInstall installer = EngineInstall.createInstaller(); + installer.basicEngineInstallation(); + + for (Path rdfPath : rdfFiles) { + System.out.println(""); + System.out.println(""); + System.out.println(rdfPath); + Map rdf = new LinkedHashMap(); + try { + rdf = YAMLUtils.load(rdfPath.toAbsolutePath().toString()); + } catch (Exception | Error ex) { + ex.printStackTrace(); + continue; + } + + Object rdID = rdf.get("id"); + String summariesPath = summariesDir.toAbsolutePath() + File.separator + + (rdID != null ? rdID : "") + File.separator + "test_summary" + ".yaml"; + Object type = rdf.get("type"); + Object weightFormats = rdf.get("weights"); + if (rdID == null || !(rdID instanceof String)) { + new Exception(rdfPath.toAbsolutePath().toString() + " is missing ID field").printStackTrace(); + continue; + } else if (type == null || !(type instanceof String) || !((String) type).equals("model")) { + Map summary = create(rdfPath, "not-applicable", null, null, "not a model"); + writeSummary(summariesPath, summary); + continue; + } else if (weightFormats == null || !(weightFormats instanceof Map)) { + Map summary = create(rdfPath, + "failed", "Missing weights dictionary for " + rdID, null, weightFormats.toString()); + writeSummary(summariesPath, summary); + continue; + } + ModelWeight weights = null; + try { + weights = ModelWeight.build((Map) weightFormats); + } catch (Exception | Error ex) { + Map summary = create(rdfPath, + "failed", "Missing/Invalid weight formats for " + rdID, stackTrace(ex), "Unable to read weight formats"); + writeSummary(summariesPath, summary); + continue; + } + + if (weights != null && weights.gettAllSupportedWeightObjects().size() == 0) { + Map summary = create(rdfPath, + "failed", "Unsupported model weights", null, "The model weights belong to a Deep Learning " + + "framework not supported by " + software + "_" + version + "."); + writeSummary(summariesPath, summary); + continue; + } + + + for (WeightFormat ww : weights.gettAllSupportedWeightObjects()) { + Map summaryWeightFormat = new LinkedHashMap(); + try { + summaryWeightFormat = testResource(rdfPath.toAbsolutePath().toString(), ww, 4, "model"); + } catch (Exception | Error ex) { + ex.printStackTrace(); + summaryWeightFormat = create(rdfPath, "failed", "exception thrown during testing", + stackTrace(ex), "test was interrupted by an exception while testing " + ww.getFramework() + " weigths"); + } + summariesPath = summariesDir.toAbsolutePath() + File.separator + + rdID + File.separator + "test_summary_" + ww.getFramework() + ".yaml"; + writeSummary(summariesPath, summaryWeightFormat); + } + + } + } + + private static Map create(Path rdfPath, String status, String error, + String tb, String details) { + Map summaryMap = new LinkedHashMap(); + summaryMap.put("name", TEST_NAME); + summaryMap.put("status", status); + summaryMap.put("error", error); + summaryMap.put("source_name", rdfPath.toAbsolutePath().toString()); + summaryMap.put("traceback", tb); + summaryMap.put("details", details); + summaryMap.put(software, version);; + return summaryMap; + } + + private static void writeSummaries(String summariesPath, List summaries) throws IOException { + Path path = Paths.get(summariesPath).getParent(); + if (path != null && !Files.exists(path)) + Files.createDirectories(path); + YAMLUtils.writeYamlFile(summariesPath, summaries); + } + + private static void writeSummary(String summariesPath, Map summary) throws IOException { + List summaries = new ArrayList(); + summaries.add(summary); + Path path = Paths.get(summariesPath).getParent(); + if (path != null && !Files.exists(path)) + Files.createDirectories(path); + YAMLUtils.writeYamlFile(summariesPath, summaries); + } + + private static Map testResource(String rdf, WeightFormat weightFormat, int decimal, String expectedType) { + ModelDescriptor rd = null; + try { + rd = ModelDescriptor.readFromLocalFile(rdf, false); + } catch (ModelSpecsException e) { + Map summary = create(Paths.get(rdf), + "failed", "Unable to parse specs from rdf.yaml file", stackTrace(e), + software + "_" + version + " is unable to read the specs from the rdf.yaml file. Spec version" + + " might not be compatible with the software version."); + return summary; + } + + Map test1 = testExpectedResourceType(rd, expectedType); + if (test1.get("status").equals("failed")) return test1; + + Map test2 = testModelDownload(rd); + if (test2.get("status").equals("failed")) return test2; + + return testModelInference(rd, weightFormat, decimal); + } + + private static Map testExpectedResourceType(ModelDescriptor rd, String type) { + boolean yes = rd.getType().equals(type); + Path path = Paths.get(rd.getModelPath() + File.separator + Constants.RDF_FNAME); + return create(path, yes ? "passed" : "failed", + yes ? null : "expected type was " + type + " but found " + rd.getType(), null, null); + } + + private static Map testModelDownload(ModelDescriptor rd) { + Path path = Paths.get(rd.getModelPath() + File.separator + Constants.RDF_FNAME); + String error = null; + if (downloadedModelsCorrectly.keySet().contains(rd.getName())) { + rd.addModelPath(Paths.get(downloadedModelsCorrectly.get(rd.getName()))); + } else if (downloadedModelsIncorrectly.keySet().contains(rd.getName())) { + error = downloadedModelsIncorrectly.get(rd.getName()); + } else { + error = downloadModel(rd); + } + String details = null; + if (error != null && error.contains("The provided name does not correspond to")) + details = "Model does not exist on the Bioimage.io repo"; + else if (error != null) + details = error; + + return create(path, error == null ? "passed" : "failed", + error == null ? null : software + " unable to download model", + error, details); + } + + private static String downloadModel(ModelDescriptor rd) { + String error = null; + try { + BioimageioRepo br = BioimageioRepo.connect(); + String folder = br.downloadByName(rd.getName(), "models"); + rd.addModelPath(Paths.get(folder).toAbsolutePath()); + downloadedModelsCorrectly.put(rd.getName(), folder); + } catch (Exception | Error ex) { + error = stackTrace(ex); + downloadedModelsIncorrectly.put(rd.getName(), error); + } + return error; + } + + private static < T extends RealType< T > & NativeType< T > > + Map testModelInference(ModelDescriptor rd, WeightFormat ww, int decimal) { + System.out.println(rd.getName()); + System.out.println(ww.getFramework()); + Map inferTest = new LinkedHashMap(); + inferTest.put("name", "reproduce test inputs from test outptus for " + ww.getFramework()); + inferTest.put("source_name", rd.getName()); + inferTest.put(software, version); + if (rd.getModelPath() == null) { + return create(null, "failed", + "model was not correctly downloaded", null, null); + } + if (software.equals(Tags.DEEPIMAGEJ) && rd.getInputTensors().size() != 1) { + return create(null, "failed", + software + " only supports models with 1 (one) input", null, software + " only supports models " + + "with 1 input and this model has " + rd.getInputTensors().size()); + } else if (rd.getInputTensors().size() != rd.getTestInputs().size()) { + return create(null, "failed", + "the number of test inputs should be the same as the number of model inputs", null, + "the number of test inputs should be the same as the number of model inputs," + + rd.getInputTensors().size() + " vs " + rd.getTestInputs().size()); + } else if (rd.getOutputTensors().size() != rd.getTestOutputs().size()) { + return create(null, "failed", + "the number of test outputs should be the same as the number of model outputs", null, + "the number of test outputs should be the same as the number of model outputs," + + rd.getInputTensors().size() + " vs " + rd.getTestInputs().size()); + } + + List> inps = new ArrayList>(); + List> outs = new ArrayList>(); + for (int i = 0; i < rd.getInputTensors().size(); i ++) { + RandomAccessibleInterval rai; + try { + rai = DecodeNumpy.retrieveImgLib2FromNpy(rd.getTestInputs().get(i).getLocalPath().toAbsolutePath().toString()); + } catch (Exception | Error e) { + return failInferenceTest(rd.getName(), "unable to open test input: " + rd.getTestInputs().get(i).getString(), stackTrace(e)); + } + Tensor inputTensor = Tensor.build(rd.getInputTensors().get(i).getName(), rd.getInputTensors().get(i).getAxesOrder(), rai); + if (rd.getInputTensors().get(i).getPreprocessing().size() > 0) { + TransformSpec transform = rd.getInputTensors().get(i).getPreprocessing().get(0); + JavaProcessing preproc; + try { + preproc = JavaProcessing.definePreprocessing(transform.getName(), transform.getKwargs()); + } catch (Exception | Error e) { + e.printStackTrace(); + return failInferenceTest(rd.getName(), "pre-processing transformation not supported by " + software + ": " + transform.getName(), stackTrace(e)); + } + inputTensor = preproc.execute(rd.getInputTensors().get(i), inputTensor); + } + inps.add(inputTensor); + } + for (int i = 0; i < rd.getOutputTensors().size(); i ++) { + Tensor outputTensor = Tensor.buildEmptyTensor(rd.getOutputTensors().get(i).getName(), rd.getOutputTensors().get(i).getAxesOrder()); + outs.add(outputTensor); + } + EngineInfo engineInfo; + try { + engineInfo = EngineInfo.defineCompatibleDLEngineWithRdfYamlWeights(ww); + } catch (Exception | Error e) { + e.printStackTrace(); + return failInferenceTest(rd.getName(), "selected weights not supported by " + software + ": " + ww.getFramework(), stackTrace(e)); + } + if (engineInfo == null) return create(Paths.get(rd.getModelPath()), "failed", "no compatible weights", + null, "no compatible weights for " + ww.getFramework() + "_" + ww.getTrainingVersion()); + Model model; + try { + model = Model.createDeepLearningModel(rd.getModelPath(), rd.getModelPath() + File.separator + ww.getSourceFileName(), engineInfo); + model.loadModel(); + } catch (Exception | Error e) { + e.printStackTrace(); + return failInferenceTest(rd.getName(), "unable to instantiate/load model", stackTrace(e)); + } + try { + model.runModel(inps, outs); + } catch (Exception | Error e) { + e.printStackTrace(); + return failInferenceTest(rd.getName(), "unable to run model", stackTrace(e)); + } + + for (int i = 0; i < rd.getOutputTensors().size(); i ++) { + Tensor tt = (Tensor) outs.get(i); + if (rd.getOutputTensors().get(i).getPostprocessing().size() > 0) { + TransformSpec transform = rd.getOutputTensors().get(i).getPostprocessing().get(0); + if (transform.getName().equals("python")) continue; + JavaProcessing preproc; + try { + preproc = JavaProcessing.definePreprocessing(transform.getName(), transform.getKwargs()); + } catch (Exception | Error e) { + e.printStackTrace(); + return failInferenceTest(rd.getName(), "post-processing transformation not supported by " + software + ": " + transform.getName(), stackTrace(e)); + } + tt = preproc.execute(rd.getInputTensors().get(i), tt); + } + RandomAccessibleInterval rai; + try { + rai = DecodeNumpy.retrieveImgLib2FromNpy(rd.getTestOutputs().get(i).getLocalPath().toAbsolutePath().toString()); + } catch (Exception | Error e) { + e.printStackTrace(); + return failInferenceTest(rd.getName(), "unable to open test output: " + rd.getTestOutputs().get(i).getString(), stackTrace(e)); + } + LoopBuilder.setImages( tt.getData(), rai ) + .multiThreaded().forEachPixel( ( j, o ) -> o.set( (T) new FloatType(o.getRealFloat() - j.getRealFloat())) ); + double diff = computeMaxDiff(rai); + if (diff > Math.pow(10, -decimal)) + return failInferenceTest(rd.getModelPath(), "output number " + i + " is not correct", + "output number " + i + " produces a very different result, " + + "the max difference is " + diff +", bigger than max alllowed " + Math.pow(10, -decimal)); + } + + return create(Paths.get(rd.getModelPath()), "passed", null, null, null); + } + + private static Map failInferenceTest(String sourceName, String error, String tb) { + return create(Paths.get(sourceName), "failed", error, tb, tb); + } + + + public static < T extends RealType< T > & NativeType< T > > double computeMaxDiff(final RandomAccessibleInterval< T > input) { + Cursor iterator = Views.iterable(input).cursor(); + T type = iterator.next(); + T min = type.copy(); + T max = type.copy(); + while ( iterator.hasNext() ) + { + type = iterator.next(); + if ( type.compareTo( min ) < 0 ) + min.set( type ); + if ( type.compareTo( max ) > 0 ) + max.set( type ); + } + return Math.max(-min.getRealDouble(), min.getRealDouble()); + } + + /** Dumps the given exception, including stack trace, to a string. + * + * @param t + * the given exception {@link Throwable} + * @return the String containing the whole exception trace + */ + public static String stackTrace(Throwable t) { + StringWriter sw = new StringWriter(); + t.printStackTrace(new PrintWriter(sw)); + return sw.toString(); + } +} diff --git a/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/DownloadEngines.java b/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/DownloadEngines.java new file mode 100644 index 00000000..f248fd39 --- /dev/null +++ b/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/DownloadEngines.java @@ -0,0 +1,67 @@ +/*- + * #%L + * This project performs Continuous Integration tasks on the JDLL library + * %% + * Copyright (C) 2023 Institut Pasteur. + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ +package io.bioimage.modelrunner.ci; + +import java.io.File; +import java.io.IOException; +import java.util.List; + +import io.bioimage.modelrunner.engine.installation.EngineInstall; +import io.bioimage.modelrunner.versionmanagement.AvailableEngines; +import io.bioimage.modelrunner.versionmanagement.DeepLearningVersion; +import io.bioimage.modelrunner.versionmanagement.InstalledEngines; + +/** + * Class to install the engines that a DIJ or Icy distribution would install + * + * @author Carlos Javier GArcia Lopez de Haro + */ +public class DownloadEngines { + /** + * Current directory + */ + private static final String CWD = new File("").getAbsolutePath(); + /** + * Directory where the engine will be downloaded, if you want to download it + * into another folder, please change it. + */ + private static final String ENGINES_DIR = new File(CWD, "engines").getAbsolutePath(); + + public static void main(String[] args) throws IOException { + if (args[0].equals(Tags.DEEPIMAGEJ) || args[0].equals(Tags.ICY)) { + EngineInstall engineManager = EngineInstall.createInstaller(ENGINES_DIR); + engineManager.basicEngineInstallation(); + //InstalledEngines.buildEnginesFinder(ENGINES_DIR).getDownloadedForOS().stream().map(i -> i.toString()) + System.out.println(InstalledEngines.buildEnginesFinder(ENGINES_DIR).getDownloadedForOS()); + } else if (args[0].equals(Tags.ICY)) { + List allEngines = AvailableEngines.getForCurrentOS(); + for (DeepLearningVersion engine : allEngines) { + try { + EngineInstall.installEngineInDir(engine, ENGINES_DIR); + } catch (IOException e) { + e.printStackTrace(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + System.out.println(InstalledEngines.buildEnginesFinder(ENGINES_DIR).getDownloadedForOS()); + } + } +} diff --git a/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/JavaProcessing.java b/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/JavaProcessing.java new file mode 100644 index 00000000..24e05b34 --- /dev/null +++ b/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/JavaProcessing.java @@ -0,0 +1,427 @@ +package io.bioimage.modelrunner.ci; + +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; +import java.lang.reflect.Parameter; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +import io.bioimage.modelrunner.bioimageio.description.TensorSpec; +import io.bioimage.modelrunner.tensor.Tensor; + +import net.imglib2.RandomAccessibleInterval; +import net.imglib2.type.NativeType; +import net.imglib2.type.Type; +import net.imglib2.type.numeric.RealType; + +/** + * Class that handles the Java pre-processing transformations. + * The transformations can be either custom designed by the developer or + * coming from the Bioimage.io Java library + * and the tensors produced by them + * + * @author Carlos Garcia Lopez de Haro + * + */ +public class JavaProcessing { + /** + * Transformation as specified in the rdf.yaml + */ + private String rdfSpec; + /** + * String containing the name of the method that runs JAva pre-processing + */ + private String javaMethodName; + /** + * String referring to the BioImage.io class that contains the specified transformations + */ + private String javaClassName; + /** + * Class that contains the pre-processing + */ + private Class transformationClass; + /** + * Arguments used for the Java method + */ + private Map args; + /** + * Name of the tensor that is going to be pre-processed + */ + private String tensorName; + /** + * Specs of the tensor that is going to be pre-processed + */ + private TensorSpec tensorSpec; + /** + * INDArray containing all the information of the input tensor to which this + * pre-processing transformation corresponds + */ + private Tensor tensor; + /** + * Package where the BioImage.io transformations are. + */ + private static final String BIOIMAGEIO_TRANSFORMATIONS_PACKAGE = "io.bioimage.modelrunner.transformations."; + /** + * Name of the standard method used by the BioImage.io transformations to call the + * pre-processing routine + */ + private static String bioImageIoExecutionMethodName = "apply"; + + /** + * Create pre-processing object that contains the path to a pre-processing protocol file + * @param javaMethod + * file that contains the Icy protocols to run pre-processing + * @param args + * args of the pre-processing specified in the rdf.yaml + * @throws ClassNotFoundException if the pre-processing transformation is not found in the classpath + */ + private JavaProcessing(String javaMethod, Map args) throws ClassNotFoundException { + this.rdfSpec = javaMethod; + this.args = args; + checkMethodExists(); + } + + /** + * Create a Java pre-processing object + * @param methodName + * name of the Java method for pre-processing + * @param args + * arguments of the Java method + * @return result of runnning Java pre-processing on the tensor + * @throws ClassNotFoundException if the pre-processing transformation is not found + */ + public static JavaProcessing definePreprocessing(String methodName, Map args) throws ClassNotFoundException { + return new JavaProcessing(methodName, args); + } + + /** + * Executes the Java pre-processing transformation specified + * on the rdf.yaml on the input map with the corresponding tensor name + * @param tensorSpec + * specs of the tensor where pre-processing is going to be executed + * @param inputMap + * map containing the inputs of the model + * @return a Map with the results of pre-processing the input tensor containing + * the inputs provided in the input map too. + * @throws IllegalArgumentException if the tensor that the pre-processing refers to is not found + */ + public Tensor execute(TensorSpec tensorSpec, Tensor input) + throws IllegalArgumentException { + this.tensorSpec = tensorSpec; + this.tensorName = tensorSpec.getName(); + this.tensor = input;; + LinkedHashMap resultsMap = executeJavaTransformation(); + return (Tensor) resultsMap.get(tensor.getName()); + } + + /** + * Fill the args map with variables provided by the input map. + * This is done to allow several pre- and post-processing that can be + * executed one after another + * @param inputsMap + * the provided map with different inputs + */ + private void fillArgs(Map inputsMap) { + // TODO decide whether to use a key word when one of the inputs to pre + // processing has to be given by the previous pre-processing or + // the arg should just be null + for (String kk : this.args.keySet()) { + if (this.args.get(kk) == null && inputsMap.get(kk) != null) { + this.args.put(kk, inputsMap.get(kk)); + } + } + } + + /** + * Method that adds the tensor to the input dictionary as a NDArray. + * NDArrays are the objects used by Java pre-processings so they can + * be shared among different Java softwares + * @param inputMap + * map of input tensors for the model + * @throws IllegalArgumentException if the tensor that the pre-processing refers to is not found + */ + private < T extends RealType< T > & NativeType< T > > void addTensorToInputs(Map inputMap) throws IllegalArgumentException { + Object inputTensor = inputMap.get(this.tensorName); + if (inputTensor == null) { + throw new IllegalArgumentException("There should be an input tensor called '" + + tensorName + "', but no object referring to it has been found."); + } + + if (inputTensor instanceof RandomAccessibleInterval) { + this.tensor = Tensor.build(tensorSpec.getName(), tensorSpec.getAxesOrder(), (RandomAccessibleInterval) inputTensor); + } else if (inputTensor instanceof Tensor) { + this.tensor = ((Tensor) inputTensor); + } + } + + /** + * Method used to convert Strings in using snake case (snake_case) into camel + * case with the first letter as upper case (CamelCase) + * @param str + * the String to be converted + * @return String converted into camel case with first upper + */ + public static String snakeCaseToCamelCaseFirstCap(String str) { + while(str.contains("_")) { + str = str.replaceFirst("_[a-z]", String.valueOf(Character.toUpperCase(str.charAt(str.indexOf("_") + 1)))); + } + str = str.substring(0, 1).toUpperCase() + str.substring(1); + return str; + } + + /** + * Method that checks if the pre-processing transformations specified in the + * rdf,yaml exist in the classpath + * @throws ClassNotFoundException if the transformations are not found in the classpath + */ + private void checkMethodExists() throws ClassNotFoundException { + if (rdfSpec.contains(".") && !rdfSpec.contains("::")) { + javaClassName = rdfSpec; + javaMethodName = bioImageIoExecutionMethodName; + findClassInClassPath(); + } else if (rdfSpec.contains(".") && rdfSpec.contains("::")) { + javaClassName = rdfSpec.substring(0, rdfSpec.indexOf("::")); + javaMethodName = rdfSpec.substring(rdfSpec.indexOf("::") + 2); + findClassInClassPath(); + } else { + findMethodInBioImageIo(); + } + } + + /** + * Tries to find a given class in the classpath + * @throws ClassNotFoundException if the class does not exist in the classpath + */ + private void findClassInClassPath() throws ClassNotFoundException { + Class.forName(this.javaClassName, false, JavaProcessing.class.getClassLoader()); + } + + /** + * Find of the transformation exists in the BioImage.io Java Core + * @throws ClassNotFoundException if the BioImage.io transformation does not exist + */ + private void findMethodInBioImageIo() throws ClassNotFoundException { + this.javaMethodName = snakeCaseToCamelCaseFirstCap(this.rdfSpec) + "Transformation"; + this.javaClassName = BIOIMAGEIO_TRANSFORMATIONS_PACKAGE + this.javaMethodName; + findClassInClassPath(); + this.javaMethodName = bioImageIoExecutionMethodName; + } + + /** + * Execute the transformation form the BioImage.io defined in the rdf.yaml + * @throws IllegalArgumentException if the transformation is not correctly defined, + * does not exist or is missing any argument + */ + private LinkedHashMap executeJavaTransformation() throws IllegalArgumentException { + try { + return runJavaTransformationWithArgs(); + + } catch (ClassNotFoundException e) { + throw new IllegalArgumentException("Processing method '" + this.rdfSpec +"' not found" + + " in the BioImage.io Java Core Transformations or in the Java Classpath: " + + "https://github.com/bioimage-io/model-runner-java/tree/nd4j/src/main/java/org/bioimageanalysis/icy/deeplearning/transformations" + + ". " + System.lineSeparator() + e.getCause()); + } catch (InstantiationException e) { + e.printStackTrace(); + throw new IllegalArgumentException("Processing method '" + this.rdfSpec +"' failed" + + " due to an error instantiating the class that defines the transformation (" + + this.javaClassName + "). Go to the following link to see valid transformations:" + + "https://github.com/bioimage-io/core-bioimage-io-java/tree/master/src/main/java/io/bioimage/specification/transformation" + + ". " + System.lineSeparator() + e.getCause()); + } catch (IllegalAccessException e) { + e.printStackTrace(); + throw new IllegalArgumentException("Processing method '" + this.rdfSpec +"' failed" + + " throwing an IllegalAccessException." + + " Go to the following link to see valid transformations:" + + "https://github.com/bioimage-io/core-bioimage-io-java/tree/master/src/main/java/io/bioimage/specification/transformation" + + ". " + System.lineSeparator() + e.getCause()); + } catch (InvocationTargetException e) { + e.printStackTrace(); + throw new IllegalArgumentException("Processing method '" + this.rdfSpec +"' failed" + + " throwing an InvocationTargetException." + + " Go to the following link to see valid transformations:" + + "https://github.com/bioimage-io/core-bioimage-io-java/tree/master/src/main/java/io/bioimage/specification/transformation" + + ". " + System.lineSeparator() + System.lineSeparator() + e.getCause()); + } catch (NoSuchMethodException e) { + e.printStackTrace(); + throw new IllegalArgumentException("Processing method '" + this.rdfSpec +"' failed" + + " because the method needed to call the transformation (" + this.javaMethodName + + ") was not found in the transformation class (" + + this.javaClassName + "). Go to the following link to see valid transformations:" + + "https://github.com/bioimage-io/core-bioimage-io-java/tree/master/src/main/java/io/bioimage/specification/transformation" + + ". " + System.lineSeparator() + e.getCause()); + } catch (SecurityException e) { + e.printStackTrace(); + throw new IllegalArgumentException("Processing method '" + this.rdfSpec +"' failed" + + " throwing an SecurityException." + + " Go to the following link to see valid transformations:" + + "https://github.com/bioimage-io/core-bioimage-io-java/tree/master/src/main/java/io/bioimage/specification/transformation" + + ". " + System.lineSeparator() + e.getCause()); + } + } + + /** + * Run the transformation from the Java transformation class + * @throws IllegalAccessException if the method or class cannot be accessed with reflection + * @throws InstantiationException if there is an error instantiating the transformation class + * @throws InvocationTargetException if there is any error invoking the methods + * @throws IllegalArgumentException if any of the arguments provided with reflection is illegal + * @throws SecurityException if there is any security breach + * @throws NoSuchMethodException if the method tried to run does not exist + * @throws ClassNotFoundException if the class referenced for the transformation does not exist + */ + private LinkedHashMap runJavaTransformationWithArgs() throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException, NoSuchMethodException, SecurityException, ClassNotFoundException { + this.transformationClass = getClass().getClassLoader().loadClass(this.javaClassName); + Method[] publicMethods = this.transformationClass.getMethods(); + Method transformationMethod = null; + for (Method mm : publicMethods) { + if (mm.getName().equals(this.javaMethodName)) { + transformationMethod = mm; + break; + } + } + if (transformationMethod == null) + throw new IllegalArgumentException("The pre-processing transformation class does not contain" + + "the method '" + this.javaMethodName + "' needed to call the transformation."); + // Check that the arguments specified in the rdf.yaml are of the corect type + return executeMethodWithArgs(transformationMethod); + } + + /** + * + * @param mm + * @return + * @throws InstantiationException if there is any error instantiating the class + * @throws if it is illegal to instantiate the class or to call the method + * @throws IllegalArgumentException if any of the arguments for the method is wrong + * @throws InvocationTargetException if the target of the method is incorrectly captured + * @throws SecurityException if there is any security violation + * @throws NoSuchMethodException if the constructor with the needed argument does not exist + */ + private > LinkedHashMap executeMethodWithArgs(Method mm) throws InstantiationException, + IllegalAccessException, + IllegalArgumentException, + InvocationTargetException, NoSuchMethodException, SecurityException { + + + LinkedHashMap resultsMap = new LinkedHashMap(); + Object instance = createInstanceWitArgs(); + + if (mm.getReturnType().equals(Void.TYPE)) { + mm.invoke(instance, tensor); + resultsMap.put(tensorName, tensor); + } else { + Object returnObject = mm.invoke(instance, tensor); + // Depending on what the output is, do one thing or another + if ((returnObject instanceof HashMap) || (returnObject instanceof HashMap)) { + // If the output is a HashMap, assume the pre-processing already provides + // the inputs map of the model + resultsMap = (LinkedHashMap) returnObject; + } else if (returnObject instanceof RandomAccessibleInterval) { + resultsMap.put(tensorName, (RandomAccessibleInterval) returnObject); + } else if (returnObject instanceof Tensor) { + resultsMap.put(tensorName, (Tensor) returnObject); + } else { + throw new IllegalArgumentException("The processing transformation '" + + rdfSpec + "' corresponding to tensor '" + tensorName + + "' outputs an object whose Type is not supported as" + + " an output for transformations in DeepIcy. The supported " + + " Types are Icy Sequences, Icy Tensors, NDArrays and Maps or" + + " HashMaps."); + } + } + return resultsMap; + } + + /** + * + * @return + * @throws IllegalArgumentException + * @throws IllegalAccessException + * @throws InvocationTargetException + * @throws InstantiationException + * @throws NoSuchMethodException + * @throws SecurityException + */ + public Object createInstanceWitArgs() throws IllegalArgumentException, IllegalAccessException, InvocationTargetException, InstantiationException, NoSuchMethodException, SecurityException { + // The instance of the pre-processing transformation should be initialized + // with the corresponding input tensor + Object transformationObject = transformationClass.getConstructor().newInstance(); + for (String arg : this.args.keySet()) { + setArg(transformationObject, arg); + } + return transformationObject; + } + + /** + * Set the argument in the processing trasnformation instance + * @param instance + * instance of the processing trasnformation + * @param argName + * name of the argument + * @throws IllegalArgumentException if no method is found for the given argument + * @throws InvocationTargetExceptionif there is any error invoking the method + * @throws IllegalAccessException if it is illegal to access the method + */ + public void setArg(Object instance, String argName) throws IllegalArgumentException, IllegalAccessException, InvocationTargetException { + String mName = getArgumentSetterName(argName); + Method mm = checkArgType(argName, mName); + mm.invoke(instance, args.get(argName)); + } + + /** + * Get the setter that the Java transformation class uses to set the argument of the + * pre-processing. The setter has to be named as the argument but in CamelCase with the + * first letter in upper case and preceded by set. For example: min_distance -> setMinDistance + * @param argName + * the name of the argument + * @return the method name + * @throws IllegalArgumentException if no method is found for the given argument + */ + public String getArgumentSetterName(String argName) throws IllegalArgumentException { + String mName = "set" + snakeCaseToCamelCaseFirstCap(argName); + // Check that the method exists + Method[] methods = transformationClass.getMethods(); + for (Method mm : methods) { + if (mm.getName().equals(mName)) + return mName; + } + throw new IllegalArgumentException("Setter for argument '" + argName + "' of the processing " + + "transformation '" + rdfSpec + "' of tensor '" + tensorName + + "' not found in the Java transformation class '" + this.javaClassName + "'. " + + "A method called '" + mName + "' should be present."); + } + + /** + * Method that checks that the type of the arguments provided in the rdf.yaml is correct. + * It also returns the setter method to set the argument + * + * @param mm + * the method that executes the pre-processing transformation + * @return the method used to provide the argument to the instance + * @throws IllegalArgumentException if any of the arguments' type is not correct + */ + private Method checkArgType(String argName, String mName) throws IllegalArgumentException { + Object arg = this.args.get(argName); + Method[] methods = this.transformationClass.getMethods(); + List possibleMethods = new ArrayList(); + for (Method mm : methods) { + if (mm.getName().equals(mName)) + possibleMethods.add(mm); + } + if (possibleMethods.size() == 0) + getArgumentSetterName(argName); + for (Method mm : possibleMethods) { + Parameter[] pps = mm.getParameters(); + if (pps.length != 1) { + continue; + } + if (pps[0].getType() == Object.class) + return mm; + } + throw new IllegalArgumentException("Setter '" + mName + "' should have only one input parameter with type Object.class."); + } +} diff --git a/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/Tags.java b/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/Tags.java new file mode 100644 index 00000000..41b6f3d5 --- /dev/null +++ b/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/Tags.java @@ -0,0 +1,9 @@ +package io.bioimage.modelrunner.ci; + +public class Tags { + + protected static final String DEEPIMAGEJ = "deepimagej"; + + protected static final String ICY = "icy"; + +} diff --git a/scripts/get_java_software_versions.py b/scripts/get_java_software_versions.py new file mode 100644 index 00000000..33746af5 --- /dev/null +++ b/scripts/get_java_software_versions.py @@ -0,0 +1,147 @@ +import argparse +import requests +from bs4 import BeautifulSoup +import re +import datetime +from typing import Dict, Any, List +from packaging.version import Version +from pathlib import Path +import os +import zipfile +import xml.etree.ElementTree as ET +import json + +DEEPIMAGEJ_UPDATE_SITE_URL = "https://sites.imagej.net/DeepImageJ/plugins/" +DEEPIMAGEJ_PATTERN = r"DeepImageJ-(\d+\.\d+\.\d+)\.jar-(\d{14})" +DIJ_POM_FILE = 'META-INF/maven/io.github.deepimagej/DeepImageJ_/pom.xml' +MINIMUM_DIJ_VERSION = Version("3.0.4") +DEEPIMAGEJ_TAG = "deepimagej" + + +ICY_POM_FILE = '' +ICY_TAG = "icy" + +TEMP_PATH = os.path.abspath("TEMP") + +JDLL_GROUP_ID = "io.bioimage" + +JDLL_ARTIFACT_ID = "dl-modelrunner" + + +def download_file(url: str, local_filename: str): + with requests.get(url, stream=True) as response: + response.raise_for_status() + with open(local_filename, 'wb') as file: + for chunk in response.iter_content(chunk_size=8192): + _ = file.write(chunk) + + +def get_version_from_pomxml(content: str) -> str: + root = ET.fromstring(content) + namespace = "{http://maven.apache.org/POM/4.0.0}" + + version = "" + for dependency in root.findall(f".//{namespace}dependency"): + group_id = dependency.find(f"{namespace}groupId") + artifact_id = dependency.find(f"{namespace}artifactId") + if group_id is not None and artifact_id is not None: + if group_id.text == JDLL_GROUP_ID and artifact_id.text == JDLL_ARTIFACT_ID: + version = dependency.find(f"{namespace}version").text + break + if version == "": + raise FileNotFoundError("JDLL version not founf in pom.xml") + + return version + + +def read_file_in_jar(jar_file_path: str, file_name: str) -> str: + with zipfile.ZipFile(jar_file_path, 'r') as jar: + if file_name in jar.namelist(): + with jar.open(file_name) as file: + content = file.read().decode('utf-8') + return content + else: + raise FileNotFoundError(f"{file_name} not found in the JAR file") + + + +def find_associated_jdll_version(link: str) -> str: + Path(TEMP_PATH).mkdir(parents=True, exist_ok=True) + fname = os.path.join(TEMP_PATH, link.split("/")[-1]) + download_file(link, fname) + if "deepimagej" in link.lower(): + return get_version_from_pomxml(read_file_in_jar(fname, DIJ_POM_FILE)) + else: + return get_version_from_pomxml(read_file_in_jar(fname, ICY_POM_FILE)) + + +def parse_links() -> List[Any]: + response = requests.get(DEEPIMAGEJ_UPDATE_SITE_URL) + soup = BeautifulSoup(response.content, 'html.parser') + links = soup.find_all('a') + return links + + + +def get_deepimagej_versions() -> Dict[str, str]: + """ + Retrieves deepImageJ versions and their associated JDLL versions. + + This method parses available links, filters for relevant deepImageJ JAR files, + and extracts their versions and timestamps. It then associates each unique deepImageJ + version with the corresponding JDLL version deom the deepImageJ pom.xml file. + + Returns: + Dict[str, str]: A dictionary where each key is a deepImageJ version and the value + is the associated JDLL version. + """ + links = parse_links() + v_dic: Dict[str, Any] = {} + v_dic_rev: Dict[str, str] = {} + for link in links: + href = link.get('href') + if '.jar' in href and 'deepimagej' in href.lower() and 'deepimagej_' not in href.lower(): + tmp_dic = get_dij_version_and_date(href) + if Version(tmp_dic["vv"]) < MINIMUM_DIJ_VERSION: + continue + if tmp_dic["vv"] not in v_dic_rev.keys(): + v_dic_rev[tmp_dic["vv"]] = tmp_dic["ts"] + v_dic[href] = tmp_dic + elif tmp_dic["ts"] > v_dic_rev[tmp_dic["vv"]]: + v_dic[href] = tmp_dic + v_dic_rev[tmp_dic["vv"]] = tmp_dic["ts"] + assoc_dict: Dict[str, str] = {} + for kk in v_dic.keys(): + jdll_v = find_associated_jdll_version(DEEPIMAGEJ_UPDATE_SITE_URL + kk) + assoc_dict[v_dic[kk]["vv"]] = jdll_v + return assoc_dict + + + +def get_dij_version_and_date(filename: str) -> Dict[str, Any]: + match = re.search(DEEPIMAGEJ_PATTERN, filename) + + version_dic: Dict[str, Any] = {} + if match: + version = match.group(1) + date = match.group(2) + version_dic["vv"] = version + version_dic["ts"] = datetime.datetime(int(date[:4]), int(date[4:6]), int(date[6:8])).timestamp() + else: + version_dic["vv"] = None + version_dic["ts"] = None + return version_dic + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + _ = parser.add_argument("software_name") + if parser.parse_args().software_name == DEEPIMAGEJ_TAG: + matrix = get_deepimagej_versions() + #print(json.dumps(matrix)) + #print(f"matrix={json.dumps({"0.0.1": "0.5.9"})}") + matrix = {"key": ["3.0.4"], "value": ["0.5.9"]} + print(json.dumps(matrix)) + elif parser.parse_args().software_name == ICY_TAG: + matrix = {"key": ["0.0.1"], "value": ["0.5.9"]} + print(json.dumps(matrix))