diff --git a/.github/workflows/check_compatibility_deepimagej.yaml b/.github/workflows/check_compatibility_deepimagej.yaml
new file mode 100644
index 00000000..814e2053
--- /dev/null
+++ b/.github/workflows/check_compatibility_deepimagej.yaml
@@ -0,0 +1,105 @@
+name: check compatibility deepimagej
+
+concurrency: deepimagej
+
+on:
+ push:
+ branches:
+ - main
+ paths:
+ - .github/workflows/check_compatibility_deepimagej.yaml
+ - scripts/get_java_software_versions.py
+ - scripts/check_compatibility_java_software/**/*
+ workflow_dispatch:
+ schedule:
+ - cron: 0 1 * * * # update compatibility once a day
+
+jobs:
+ generate-version-dict:
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.set-dict.outputs.matrix }}
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.x'
+ - name: Install deps
+ run: pip install requests==2.28.2 beautifulsoup4==4.12.3 packaging==23.0
+ - name: Generate dict
+ id: set-dict
+ run: |
+ matrix_output=$(python scripts/get_java_software_versions.py deepimagej)
+ echo "matrix=${matrix_output}" >> $GITHUB_OUTPUT
+ - name: Versions matrix
+ run: echo '${{ steps.set-dict.outputs.matrix }}'
+
+ run:
+ needs: generate-version-dict
+ runs-on: ubuntu-latest
+ strategy:
+ matrix: ${{ fromJson(needs.generate-version-dict.outputs.matrix) }}
+ steps:
+ - uses: actions/checkout@v3
+ - uses: actions/checkout@v3
+ with:
+ repository: bioimage-io/collection-bioimage-io
+ ref: gh-pages
+ path: bioimageio-gh-pages
+ - name: Setup Maven Action
+ uses: stCarolas/setup-maven@v4.5
+ with:
+ java-version: 11
+ maven-version: 3.9.5
+ - name: Build with Maven
+ run: |
+ cd scripts/check_compatibility_java_software
+ mvn clean install org.apache.maven.plugins:maven-shade-plugin:3.2.4:shade -Dshade.mainClass=io.bioimage.modelrunner.ci.ContinuousIntegration
+ - name: Get jar file name
+ id: get-jar
+ run: |
+ cd scripts/check_compatibility_java_software
+ jarfile=$(mvn -q exec:exec -Dexec.executable=echo -Dexec.args='${project.build.finalName}.jar')
+ if [ -z "$jarfile" ]; then
+ echo "Failed to get jar file name"
+ exit 1
+ fi
+ if [ ! -f "target/$jarfile" ]; then
+ echo "Jar file not found: target/$jarfile"
+ exit 1
+ fi
+ echo "Jar file found: target/$jarfile"
+ echo "jarfile=$jarfile" >> $GITHUB_OUTPUT
+ - name: Download engines
+ run: |
+ java -cp "scripts/check_compatibility_java_software/target/${{ steps.get-jar.outputs.jarfile }}" io.bioimage.modelrunner.ci.DownloadEngines deepimagej
+ - name: Run models
+ run: |
+ java -cp "scripts/check_compatibility_java_software/target/${{ steps.get-jar.outputs.jarfile }}" io.bioimage.modelrunner.ci.ContinuousIntegration deepimagej ${{ matrix.key }}
+ - name: Upload artifacts
+ uses: actions/upload-artifact@v3
+ with:
+ path: test_summaries_deepimagej_${{ matrix.key }}
+
+ sendreport:
+ runs-on: ubuntu-latest
+ needs: [generate-version-dict, run]
+ strategy:
+ matrix: ${{ fromJson(needs.generate-version-dict.outputs.matrix) }}
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.10'
+ - name: Get test results
+ uses: actions/download-artifact@v3
+ with:
+ path: test_summaries_deepimagej_${{ matrix.key }}
+ - name: Install deps
+ run: |
+ pip install .
+ - name: Send deepimagej ${{ matrix.key }} tests
+ shell: bash -l {0}
+ run: python scripts/check_compatibility_java_software.py deepimagej ${{ matrix.key }} --summaries_dir "test_summaries_deepimagej_${{ matrix.key }}/artifact"
diff --git a/.github/workflows/check_compatibility_icy.yaml b/.github/workflows/check_compatibility_icy.yaml
new file mode 100644
index 00000000..f6a5fdf8
--- /dev/null
+++ b/.github/workflows/check_compatibility_icy.yaml
@@ -0,0 +1,105 @@
+name: check compatibility icy
+
+concurrency: icy
+
+on:
+ push:
+ branches:
+ - main
+ paths:
+ - .github/workflows/check_compatibility_icy.yaml
+ - scripts/get_java_software_versions.py
+ - scripts/check_compatibility_java_software/**/*
+ workflow_dispatch:
+ schedule:
+ - cron: 0 1 * * * # update compatibility once a day
+
+jobs:
+ generate-version-dict:
+ runs-on: ubuntu-latest
+ outputs:
+ matrix: ${{ steps.set-dict.outputs.matrix }}
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python
+ uses: actions/setup-python@v4
+ with:
+ python-version: '3.x'
+ - name: Install deps
+ run: pip install requests==2.28.2 beautifulsoup4==4.12.3 packaging==23.0
+ - name: Generate dict
+ id: set-dict
+ run: |
+ matrix_output=$(python scripts/get_java_software_versions.py icy)
+ echo "matrix=${matrix_output}" >> $GITHUB_OUTPUT
+ - name: Versions matrix
+ run: echo '${{ steps.set-dict.outputs.matrix }}'
+
+ run:
+ needs: generate-version-dict
+ runs-on: ubuntu-latest
+ strategy:
+ matrix: ${{ fromJson(needs.generate-version-dict.outputs.matrix) }}
+ steps:
+ - uses: actions/checkout@v3
+ - uses: actions/checkout@v3
+ with:
+ repository: bioimage-io/collection-bioimage-io
+ ref: gh-pages
+ path: bioimageio-gh-pages
+ - name: Setup Maven Action
+ uses: stCarolas/setup-maven@v4.5
+ with:
+ java-version: 11
+ maven-version: 3.9.5
+ - name: Build with Maven
+ run: |
+ cd scripts/check_compatibility_java_software
+ mvn clean install org.apache.maven.plugins:maven-shade-plugin:3.2.4:shade -Dshade.mainClass=io.bioimage.modelrunner.ci.ContinuousIntegration
+ - name: Get jar file name
+ id: get-jar
+ run: |
+ cd scripts/check_compatibility_java_software
+ jarfile=$(mvn -q exec:exec -Dexec.executable=echo -Dexec.args='${project.build.finalName}.jar')
+ if [ -z "$jarfile" ]; then
+ echo "Failed to get jar file name"
+ exit 1
+ fi
+ if [ ! -f "target/$jarfile" ]; then
+ echo "Jar file not found: target/$jarfile"
+ exit 1
+ fi
+ echo "Jar file found: target/$jarfile"
+ echo "jarfile=$jarfile" >> $GITHUB_OUTPUT
+ - name: Download engines
+ run: |
+ java -cp "scripts/check_compatibility_java_software/target/${{ steps.get-jar.outputs.jarfile }}" io.bioimage.modelrunner.ci.DownloadEngines icy
+ - name: Run models
+ run: |
+ java -cp "scripts/check_compatibility_java_software/target/${{ steps.get-jar.outputs.jarfile }}" io.bioimage.modelrunner.ci.ContinuousIntegration icy ${{ matrix.key }}
+ - name: Upload artifacts
+ uses: actions/upload-artifact@v3
+ with:
+ path: test_summaries_icy_${{ matrix.key }}
+
+ sendreport:
+ runs-on: ubuntu-latest
+ needs: [generate-version-dict, run]
+ strategy:
+ matrix: ${{ fromJson(needs.generate-version-dict.outputs.matrix) }}
+ steps:
+ - uses: actions/checkout@v3
+ - name: Set up Python
+ uses: actions/setup-python@v2
+ with:
+ python-version: '3.10'
+ - name: Get test results
+ uses: actions/download-artifact@v3
+ with:
+ path: test_summaries_icy_${{ matrix.key }}
+ - name: Install deps
+ run: |
+ pip install .
+ - name: Send icy ${{ matrix.key }} tests
+ shell: bash -l {0}
+ run: python scripts/check_compatibility_java_software.py icy ${{ matrix.key }} --summaries_dir "test_summaries_icy_${{ matrix.key }}/artifact"
diff --git a/.gitignore b/.gitignore
index 671d4066..aa48971c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,3 +3,7 @@ __pycache__/
*.egg-info/
docs/
*.pyc
+TEMP/
+bioimageio-gh-pages
+scripts/check_compatibility_java_software/test_summaries_null_null
+scripts/check_compatibility_java_software/test_summaries_default_default
\ No newline at end of file
diff --git a/scripts/check_compatibility_java_software.py b/scripts/check_compatibility_java_software.py
new file mode 100644
index 00000000..ccce7ef3
--- /dev/null
+++ b/scripts/check_compatibility_java_software.py
@@ -0,0 +1,184 @@
+import argparse
+from typing import List, Dict, Any
+import os
+
+import requests
+
+from loguru import logger
+from ruyaml import YAML, YAMLError
+
+from bioimageio_collection_backoffice.db_structure.compatibility import (
+ CompatiblityReport,
+)
+from bioimageio_collection_backoffice.remote_collection import Record, RemoteCollection
+from bioimageio_collection_backoffice.s3_client import Client
+
+
+
+def find_java_summaries(directory: str) -> List[str]:
+ """Walks through a directory and its subdirectories to find all YAML files."""
+ yaml_files: List[str] = []
+ # Walk through all directories and files in the specified directory
+ for root, _, files in os.walk(directory):
+ for file in files:
+ # Check if the file ends with .yaml or .yml
+ if file.endswith('.yaml') or file.endswith('.yml'):
+ # Create the full path to the file
+ full_path = os.path.join(root, file)
+ # Append the full path to the list of YAML files
+ yaml_files.append(full_path)
+ return yaml_files
+
+
+
+def read_yaml_from_url(url: str):
+ """Fetch and parse a YAML file from a specified URL.
+
+ Args:
+ url (str): The URL of the YAML file.
+
+ Returns:
+ dict: Parsed YAML data as a dictionary.
+ """
+ response = requests.get(url)
+ response.raise_for_status()
+
+ yaml=YAML(typ="safe")
+ data = yaml.load(response.text)
+ return data
+
+
+def get_tests_from_summaries(rdf: str, path_to_summaries: str) -> Dict[str, str]:
+ summary = {}
+ try:
+ rdf_yaml = read_yaml_from_url(rdf)
+ except requests.RequestException as e:
+ summary["status"] = "failed"
+ summary["error"] = "Unable to access rdf.yaml file"
+ summary["details"] = str(e)
+ return summary
+ except YAMLError as e:
+ summary["status"] = "failed"
+ summary["error"] = "Unable to read rdf.yaml file"
+ summary["details"] = str(e)
+ return summary
+
+ id = rdf_yaml["id"]
+ rdf_path = os.path.join(path_to_summaries, id)
+ test_files = os.listdir(rdf_path)
+
+ if len(test_files) == 0:
+ summary["status"] = "failed"
+ summary["error"] = "No tests executed"
+ summary["details"] = "The model tests were not executed or the test files could not be located"
+ return summary
+
+ summaries_yaml = None
+ error: str = ""
+ for test_file in test_files:
+ try:
+ summaries_yaml=YAML(typ='safe') # default, if not specfied, is 'rt' (round-trip)
+ summaries_yaml.load(test_file)
+ summary = find_passed_test(summaries_yaml)
+ if summary["status"] == "passed":
+ return summary
+ except YAMLError as e:
+ error += str(e) + os.linesep
+ continue
+
+ if summary["status"] is not None:
+ return summary
+
+ summary["status"] = "failed"
+ summary["error"] = "Unable to read the test results yaml file"
+ summary["details"] = str(error)
+
+ return summary
+
+
+def find_passed_test(summaries_yaml: List[Dict[Any, Any]]) -> Dict[Any, Any]:
+ summary = {}
+ for elem in summaries_yaml:
+ if not isinstance(elem, dict):
+ summary["status"] = "failed"
+ summary["error"] = "Invalid test output format"
+ summary["details"] = "Expected a list of dictionaries, but received an improperly formatted element."
+ return summary
+ elif elem.get("status") is not None:
+ return elem
+
+ summary["status"] = "failed"
+ summary["error"] = "No test contents found"
+ summary["details"] = "test file was empty."
+ return summary
+
+
+
+def check_compatibility_java_software_impl(
+ record: Record,
+ software_name: str,
+ version: str,
+ summaries_dir: str = "test_summaries/artifact",
+):
+ tool = f"{software_name}{version}"
+ report_path = record.get_compatibility_report_path(tool)
+ if list(record.client.ls(report_path)):
+ return
+
+ rdf_data = record.client.load_file(record.rdf_path)
+ assert rdf_data is not None
+ rdf = yaml.load(rdf_data)
+ assert isinstance(rdf, dict)
+ if rdf.get("type") != "model":
+ return CompatiblityReport(
+ tool=tool,
+ status="not-applicable",
+ error=None,
+ details="only 'model' resources can be used in icy.",
+ )
+
+ #summaries = find_java_summaries(summaries_dir)
+ summary = get_tests_from_summaries(record.client.get_file_url(record.rdf_path), summaries_dir)
+ # produce test summaries for each weight format
+ # TODO check what it produces summary = test_model(record.client.get_file_url(record.rdf_path))
+
+ return CompatiblityReport(
+ tool=tool,
+ status=summary["status"],
+ error=None if summary["status"] == "passed" else summary["error"],
+ details=summary["details"],
+ links=["{software_name}/{software_name}"],
+ )
+
+
+def check_compatibility_java_software(
+ software_name: str,
+ version: str,
+ summaries_dir: str = "test_summaries",
+):
+ """preliminary icy check
+
+ only checks if test outputs are reproduced for onnx, torchscript, or pytorch_state_dict weights.
+
+ """
+ collection = RemoteCollection(Client())
+ for record in collection.get_published_versions():
+ try:
+ report = check_compatibility_java_software_impl(
+ record, software_name, version, summaries_dir
+ )
+ except Exception as e:
+ logger.error(f"failed to check '{record.id}': {e}")
+ else:
+ print(report)
+ if report is not None:
+ record.set_compatibility_report(report)
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ _ = parser.add_argument("software_name")
+ _ = parser.add_argument("version")
+ _ = parser.add_argument("--summaries_dir", default="test_summaries/artifact", help="Directory path where summaries are stored.")
+
+ check_compatibility_java_software(parser.parse_args().software_name, parser.parse_args().version, parser.parse_args().summaries_dir)
diff --git a/scripts/check_compatibility_java_software/.gitignore b/scripts/check_compatibility_java_software/.gitignore
new file mode 100644
index 00000000..7aa8b531
--- /dev/null
+++ b/scripts/check_compatibility_java_software/.gitignore
@@ -0,0 +1,16 @@
+/engines/
+/models/
+
+# maven specific files
+/target/
+
+# eclipse specific files
+/.settings/
+.classpath
+.project
+
+# intellij specific files
+.idea
+
+# miscellaneous
+*.swp
diff --git a/scripts/check_compatibility_java_software/pom.xml b/scripts/check_compatibility_java_software/pom.xml
new file mode 100644
index 00000000..cf80c081
--- /dev/null
+++ b/scripts/check_compatibility_java_software/pom.xml
@@ -0,0 +1,99 @@
+
+
+ 4.0.0
+
+
+ org.scijava
+ pom-scijava
+ 37.0.0
+
+
+
+ io.bioimage
+ dl-modelrunner-ci
+ 0.0.1-SNAPSHOT
+
+ JDLL CI
+ Continuous integration code for JDLL
+
+ https://github.com/bioimage-io/pytorch-java-interface-javacpp
+ 2023
+
+ Icy Bioimage Analysis
+ https://icy.bioimageanalysis.org/
+
+
+
+ Apache License, Version 2.0
+ http://www.apache.org/licenses/LICENSE-2.0.txt
+ repo
+
+
+
+
+
+ carlosuc3m
+ Carlos Javier Garcia Lopez de Haro
+ https://github.com/carlosuc3m
+
+ developer
+ debugger
+ reviewer
+ support
+ maintainer
+
+
+
+
+
+ Carlos Garcia
+ https://github.com/carlosuc3m
+ carlosuc3m
+
+
+
+
+
+ Image.sc Forum
+ https://forum.image.sc/tag/bioimage-io
+
+
+
+
+ scm:git:https://github.com/Icy-imaging/icy-bioimage-io
+ scm:git:git@github.com:Icy-imaging/icy-bioimage-io
+ HEAD
+ https://github.com/Icy-imaging/icy-bioimage-io
+
+
+ GitHub Issues
+ https://github.com/Icy-imaging/icy-bioimage-io/issues
+
+
+ None
+
+
+
+ io.bioimage.modelrunner.ci
+
+ apache_v2
+ Institut Pasteur
+
+ 0.5.8
+
+
+
+
+ scijava.public
+ https://maven.scijava.org/content/groups/public
+
+
+
+
+
+ io.bioimage
+ dl-modelrunner
+ ${dl-modelrunner.version}
+
+
+
diff --git a/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/ContinuousIntegration.java b/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/ContinuousIntegration.java
new file mode 100644
index 00000000..4984463d
--- /dev/null
+++ b/scripts/check_compatibility_java_software/src/main/java/io/bioimage/modelrunner/ci/ContinuousIntegration.java
@@ -0,0 +1,399 @@
+/*-
+ * #%L
+ * This project performs Continuous Integration tasks on java software based on JDLL
+ * %%
+ * Copyright (C) 2023 Institut Pasteur.
+ * %%
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ * #L%
+ */
+package io.bioimage.modelrunner.ci;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.nio.file.FileSystems;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.PathMatcher;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import io.bioimage.modelrunner.bioimageio.BioimageioRepo;
+import io.bioimage.modelrunner.bioimageio.description.ModelDescriptor;
+import io.bioimage.modelrunner.bioimageio.description.TransformSpec;
+import io.bioimage.modelrunner.bioimageio.description.exceptions.ModelSpecsException;
+import io.bioimage.modelrunner.bioimageio.description.weights.ModelWeight;
+import io.bioimage.modelrunner.bioimageio.description.weights.WeightFormat;
+import io.bioimage.modelrunner.engine.EngineInfo;
+import io.bioimage.modelrunner.engine.installation.EngineInstall;
+import io.bioimage.modelrunner.model.Model;
+import io.bioimage.modelrunner.numpy.DecodeNumpy;
+import io.bioimage.modelrunner.tensor.Tensor;
+import io.bioimage.modelrunner.utils.Constants;
+import io.bioimage.modelrunner.utils.YAMLUtils;
+import net.imglib2.Cursor;
+import net.imglib2.RandomAccessibleInterval;
+import net.imglib2.loops.LoopBuilder;
+import net.imglib2.type.NativeType;
+import net.imglib2.type.numeric.RealType;
+import net.imglib2.type.numeric.real.FloatType;
+import net.imglib2.view.Views;
+
+/**
+ *
+ */
+public class ContinuousIntegration {
+
+ private static Map downloadedModelsCorrectly = new HashMap();
+ private static Map downloadedModelsIncorrectly = new HashMap();
+
+ private static String version;
+
+ private static String software;
+ private static final String TEST_NAME = "reproduce test outputs from test inputs";
+
+ public static void main(String[] args) throws IOException {
+ if (args.length != 0) {
+ software = args[0];
+ version = args[1];
+ } else {
+ software = "default";
+ version = "default";
+ }
+
+ Path currentDir = Paths.get(ContinuousIntegration.class.getProtectionDomain().getCodeSource().getLocation().getPath()).getParent();
+ Path rdfDir = currentDir.resolve("../../../bioimageio-gh-pages/rdfs").normalize();
+
+ // Create a matcher for the pattern 'rdf.yaml'
+ runTests(rdfDir, "**", "**", Paths.get("test_summaries_" + software + "_" + version));
+ }
+
+
+ public static void runTests(Path rdfDir, String resourceID, String versionID, Path summariesDir) throws IOException {
+
+ PathMatcher matcher = FileSystems.getDefault().getPathMatcher("glob:" + resourceID + File.separator + versionID + File.separator + Constants.RDF_FNAME);
+
+ List rdfFiles = Files.walk(rdfDir).filter(matcher::matches).collect(Collectors.toList());
+ EngineInstall installer = EngineInstall.createInstaller();
+ installer.basicEngineInstallation();
+
+ for (Path rdfPath : rdfFiles) {
+ System.out.println("");
+ System.out.println("");
+ System.out.println(rdfPath);
+ Map rdf = new LinkedHashMap();
+ try {
+ rdf = YAMLUtils.load(rdfPath.toAbsolutePath().toString());
+ } catch (Exception | Error ex) {
+ ex.printStackTrace();
+ continue;
+ }
+
+ Object rdID = rdf.get("id");
+ String summariesPath = summariesDir.toAbsolutePath() + File.separator
+ + (rdID != null ? rdID : "") + File.separator + "test_summary" + ".yaml";
+ Object type = rdf.get("type");
+ Object weightFormats = rdf.get("weights");
+ if (rdID == null || !(rdID instanceof String)) {
+ new Exception(rdfPath.toAbsolutePath().toString() + " is missing ID field").printStackTrace();
+ continue;
+ } else if (type == null || !(type instanceof String) || !((String) type).equals("model")) {
+ Map summary = create(rdfPath, "not-applicable", null, null, "not a model");
+ writeSummary(summariesPath, summary);
+ continue;
+ } else if (weightFormats == null || !(weightFormats instanceof Map)) {
+ Map summary = create(rdfPath,
+ "failed", "Missing weights dictionary for " + rdID, null, weightFormats.toString());
+ writeSummary(summariesPath, summary);
+ continue;
+ }
+ ModelWeight weights = null;
+ try {
+ weights = ModelWeight.build((Map) weightFormats);
+ } catch (Exception | Error ex) {
+ Map summary = create(rdfPath,
+ "failed", "Missing/Invalid weight formats for " + rdID, stackTrace(ex), "Unable to read weight formats");
+ writeSummary(summariesPath, summary);
+ continue;
+ }
+
+ if (weights != null && weights.gettAllSupportedWeightObjects().size() == 0) {
+ Map summary = create(rdfPath,
+ "failed", "Unsupported model weights", null, "The model weights belong to a Deep Learning "
+ + "framework not supported by " + software + "_" + version + ".");
+ writeSummary(summariesPath, summary);
+ continue;
+ }
+
+
+ for (WeightFormat ww : weights.gettAllSupportedWeightObjects()) {
+ Map summaryWeightFormat = new LinkedHashMap();
+ try {
+ summaryWeightFormat = testResource(rdfPath.toAbsolutePath().toString(), ww, 4, "model");
+ } catch (Exception | Error ex) {
+ ex.printStackTrace();
+ summaryWeightFormat = create(rdfPath, "failed", "exception thrown during testing",
+ stackTrace(ex), "test was interrupted by an exception while testing " + ww.getFramework() + " weigths");
+ }
+ summariesPath = summariesDir.toAbsolutePath() + File.separator
+ + rdID + File.separator + "test_summary_" + ww.getFramework() + ".yaml";
+ writeSummary(summariesPath, summaryWeightFormat);
+ }
+
+ }
+ }
+
+ private static Map create(Path rdfPath, String status, String error,
+ String tb, String details) {
+ Map summaryMap = new LinkedHashMap();
+ summaryMap.put("name", TEST_NAME);
+ summaryMap.put("status", status);
+ summaryMap.put("error", error);
+ summaryMap.put("source_name", rdfPath.toAbsolutePath().toString());
+ summaryMap.put("traceback", tb);
+ summaryMap.put("details", details);
+ summaryMap.put(software, version);;
+ return summaryMap;
+ }
+
+ private static void writeSummaries(String summariesPath, List