img, final long[] downsampleFactors) {
+ return Views.subsample(img, downsampleFactors);
+ }
+
+ /**
+ * Downsamples an image by factors of 2 using averaging.
+ *
+ * Not the most efficient when some dimensions are not downsampled.
+ *
+ * @param the image data type
+ * @param img the image
+ * @param downsampleFactors the factors
+ * @return a downsampled image
+ */
+ private static > RandomAccessibleInterval downsampleAvgBy2(
+ final RandomAccessibleInterval img, final long[] downsampleFactors) {
+
+ // ensure downsampleFactors contains only 1's and 2's
+ assert Arrays.stream(downsampleFactors).filter(x -> (x == 1) || (x == 2)).count() == downsampleFactors.length;
+
+ final int nd = downsampleFactors.length;
+ final double[] scale = new double[ nd ];
+ final double[] translation = new double[ nd ];
+
+ final long[] dims = new long[ nd ];
+
+ for (int i = 0; i < nd; i++) {
+
+ if (downsampleFactors[i] == 2) {
+ scale[i] = 0.5;
+ translation[i] = -0.25;
+ dims[i] = (long)Math.ceil( img.dimension(i) / 2 );
+ } else {
+ scale[i] = 1.0;
+ translation[i] = 0.0;
+ dims[i] = img.dimension(i);
+ }
+ }
+
+ // TODO clamping NLinearInterpFactory when relevant
+ // TODO record offset in metadata as (s-0.5)
+ final RealRandomAccessible imgE = Views.interpolate(Views.extendBorder(img), new NLinearInterpolatorFactory());
+ return Views.interval(RealViews.transform(imgE, new ScaleAndTranslation(scale, translation)),
+ new FinalInterval(dims));
+ }
+
+ private int[] sliceBlockSize(final int exclude) {
+
+ return removeElement(chunkSize, exclude);
+ }
+
+ private long[] sliceDownsamplingFactors(final int exclude) {
+
+ return removeElement(currentAbsoluteDownsampling, exclude);
+ }
+
+ private static int[] removeElement(final int[] arr, final int excludeIndex) {
+
+ final int[] out = new int[arr.length - 1];
+ int j = 0;
+ for (int i = 0; i < arr.length; i++)
+ if (i != excludeIndex) {
+ out[j] = arr[i];
+ j++;
+ }
+
+ return out;
+ }
+
+ private static long[] removeElement(final long[] arr, final int excludeIndex) {
+
+ final long[] out = new long[arr.length - 1];
+ int j = 0;
+ for (int i = 0; i < arr.length; i++)
+ if (i != excludeIndex) {
+ out[j] = arr[i];
+ j++;
+ }
+
+ return out;
+ }
+
+ @Override
+ public void run() {
+
+ // add more options
+ if (metadataStyle.equals(N5Importer.MetadataCustomKey)) {
+
+ metaSpecDialog = new N5MetadataSpecDialog(this);
+ metaSpecDialog.show(MetadataTemplateMapper.RESOLUTION_ONLY_MAPPER);
+
+ } else {
+
+ try {
+ processMultiscale();
+ } catch (IOException | InterruptedException | ExecutionException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ private void progressMonitor( final ThreadPoolExecutor exec )
+ {
+ new Thread()
+ {
+ @Override
+ public void run()
+ {
+ IJ.showProgress( 0.01 );
+ try
+ {
+ Thread.sleep( 333 );
+ boolean done = false;
+ while( !done && !exec.isShutdown() )
+ {
+ final long i = exec.getCompletedTaskCount();
+ final long N = exec.getTaskCount();
+ done = i == N;
+ IJ.showProgress( (double)i / N );
+ Thread.sleep( 333 );
+ }
+ }
+ catch ( final InterruptedException e ) { }
+ IJ.showProgress( 1.0 );
+ }
+ }.start();
+ return;
+ }
+
+ private Compression getCompression() {
+
+ return getCompression(compressionArg);
+ }
+
+ private final boolean promptOverwrite(final String dataset) {
+
+ return JOptionPane.showConfirmDialog(null,
+ String.format("Dataset (%s) already exists. Completely remove that dataa and overwrite?", dataset), "Warning",
+ JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE) == JOptionPane.YES_OPTION;
+ }
+
+ public static Compression getCompression( final String compressionArg ) {
+
+ switch (compressionArg) {
+ case GZIP_COMPRESSION:
+ return new GzipCompression();
+ case LZ4_COMPRESSION:
+ return new Lz4Compression();
+ case XZ_COMPRESSION:
+ return new XzCompression();
+ case RAW_COMPRESSION:
+ return new RawCompression();
+ case BLOSC_COMPRESSION:
+ return new BloscCompression();
+ case ZSTD_COMPRESSION:
+ return new ZstandardCompression();
+ default:
+ return new RawCompression();
+ }
+ }
+
+ @Override
+ public void windowOpened(final WindowEvent e) {}
+
+ @Override
+ public void windowIconified(final WindowEvent e) {}
+
+ @Override
+ public void windowDeiconified(final WindowEvent e) {}
+
+ @Override
+ public void windowDeactivated(final WindowEvent e) {}
+
+ @Override
+ public void windowClosing(final WindowEvent e) {
+
+ styles.put(N5Importer.MetadataCustomKey, metaSpecDialog.getMapper());
+ impMetaWriterTypes.put(MetadataTemplateMapper.class, new ImagePlusMetadataTemplate());
+ try {
+ processMultiscale();
+ } catch (IOException | InterruptedException | ExecutionException e1) {
+ e1.printStackTrace();
+ }
+ }
+
+ @Override
+ public void windowClosed(final WindowEvent e) {}
+
+ @Override
+ public void windowActivated(final WindowEvent e) {}
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/ij/N5SubsetExporter.java b/src/main/java/org/janelia/saalfeldlab/n5/ij/N5SubsetExporter.java
new file mode 100644
index 00000000..cf0b1251
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/ij/N5SubsetExporter.java
@@ -0,0 +1,315 @@
+/**
+ * Copyright (c) 2018--2020, Saalfeld lab
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright notice,
+ * this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright notice,
+ * this list of conditions and the following disclaimer in the documentation
+ * and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
+ * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.janelia.saalfeldlab.n5.ij;
+
+import java.io.IOException;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.N5Writer;
+import org.janelia.saalfeldlab.n5.imglib2.N5Utils;
+import org.janelia.saalfeldlab.n5.universe.N5Factory;
+import org.janelia.saalfeldlab.n5.universe.metadata.N5DatasetMetadata;
+import org.scijava.app.StatusService;
+import org.scijava.command.Command;
+import org.scijava.command.ContextCommand;
+import org.scijava.log.LogService;
+import org.scijava.plugin.Parameter;
+import org.scijava.plugin.Plugin;
+import org.scijava.ui.UIService;
+
+import ij.IJ;
+import ij.ImagePlus;
+import net.imglib2.FinalInterval;
+import net.imglib2.Interval;
+import net.imglib2.img.Img;
+import net.imglib2.img.display.imagej.ImageJFunctions;
+import net.imglib2.type.NativeType;
+import net.imglib2.type.numeric.RealType;
+import net.imglib2.util.Intervals;
+import net.imglib2.util.Util;
+import net.imglib2.view.IntervalView;
+import net.imglib2.view.Views;
+
+@Plugin(type = Command.class, menuPath = "File>Save As>HDF5/N5/Zarr/OME-NGFF (patch)",
+ description = "Insert the current image as a patch into an existing dataset at a user-defined offset. New datasets can be created and existing "
+ + "datsets can be extended.")
+public class N5SubsetExporter extends ContextCommand {
+
+// @Parameter(visibility = ItemVisibility.MESSAGE, required = false)
+// private final String message = "Insert the current image into an existing dataset at a user-defined offset. New datasets can be created, and existing"
+// + "datsets can be extended.";
+
+ @Parameter
+ private LogService log;
+
+ @Parameter
+ private StatusService status;
+
+ @Parameter
+ private UIService ui;
+
+ @Parameter(label = "Image")
+ private ImagePlus image; // or use Dataset? - maybe later
+
+ @Parameter(label = "Root url")
+ private String containerRoot;
+
+ @Parameter(label = "Dataset", required = false, description = "This argument is ignored if the N5ViewerMetadata style is selected")
+ private String dataset;
+
+ @Parameter(label = "Thread count", required = true, min = "1", max = "256")
+ private int nThreads = 1;
+
+ @Parameter(label = "Offset", required = false, description = "The point in pixel units where the origin of this image will be written into the n5-dataset (comma-delimited)")
+ private String subsetOffset;
+
+ @Parameter(label = "Chunk size", description = "The size of chunks to use if a new array is created. Comma separated, for example: \"64,32,16\".\n " +
+ "You may provide fewer values than the data dimension. In that case, the size will "
+ + "be expanded to necessary size with the last value, for example \"64\", will expand " +
+ "to \"64,64,64\" for 3D data.")
+ private String chunkSizeArg = "64";
+
+ @Parameter(label = "Compression", style = "listBox", description = "The compression type to use if a new array is created.",
+ choices = {
+ N5ScalePyramidExporter.GZIP_COMPRESSION,
+ N5ScalePyramidExporter.RAW_COMPRESSION,
+ N5ScalePyramidExporter.LZ4_COMPRESSION,
+ N5ScalePyramidExporter.XZ_COMPRESSION,
+ N5ScalePyramidExporter.BLOSC_COMPRESSION,
+ N5ScalePyramidExporter.ZSTD_COMPRESSION})
+ private String compressionArg = N5ScalePyramidExporter.GZIP_COMPRESSION;
+
+ private long[] offset;
+
+ public N5SubsetExporter() {}
+
+ public N5SubsetExporter(final ImagePlus image, final String n5RootLocation, final String n5Dataset, final String subsetOffset) {
+
+ setOptions(image, n5RootLocation, n5Dataset, subsetOffset);
+ }
+
+ public N5SubsetExporter(final ImagePlus image, final String n5RootLocation, final String n5Dataset, final long[] subsetOffset) {
+
+ setOptions(image, n5RootLocation, n5Dataset, subsetOffset);
+ }
+
+ public static void main(final String[] args) {
+
+// final ImageJ ij = new ImageJ();
+// final ImagePlus imp = IJ.openImage("/home/john/tmp/mitosis-xyct.tif");
+
+// final ImagePlus imp = IJ.openImage("/home/john/tmp/mri-stack.tif");
+// final String root = "/home/john/tmp/mri-test.n5";
+
+// final ImagePlus imp = IJ.openImage( "/home/john/tmp/mitosis.tif" );
+// final String root = "/home/john/tmp/mitosis-test.zarr";
+
+ final ImagePlus imp = IJ.openImage( "/home/john/tmp/boats.tif");
+ final String root = "/home/john/tmp/asdf.n5";
+ final String dset = "a/b";
+
+ final N5SubsetExporter exp = new N5SubsetExporter();
+ exp.setOptions(imp, root, dset, "200,400");
+ exp.run();
+ }
+
+ public void setOptions(final ImagePlus image, final String containerRoot, final String dataset, final String subsetOffset) {
+
+ this.image = image;
+ this.containerRoot = containerRoot;
+ this.dataset = dataset;
+ this.subsetOffset = subsetOffset;
+ }
+
+ public void setOptions(final ImagePlus image, final String containerRoot, final String dataset, final long[] subsetOffset) {
+
+ this.image = image;
+ this.containerRoot = containerRoot;
+ this.dataset = dataset;
+ this.offset = subsetOffset;
+ }
+
+ public void setOptions(final ImagePlus image, final String containerRoot, final String dataset, final String subsetOffset,
+ final String chunkSizeArg, final String compression) {
+
+ this.image = image;
+ this.containerRoot = containerRoot;
+ this.dataset = dataset;
+ this.subsetOffset = subsetOffset;
+ this.chunkSizeArg = chunkSizeArg;
+ this.compressionArg = compression;
+ }
+
+ public void setOptions(final ImagePlus image, final String containerRoot, final String dataset, final long[] subsetOffset,
+ final String chunkSizeArg, final String compression) {
+
+ this.image = image;
+ this.containerRoot = containerRoot;
+ this.dataset = dataset;
+ this.offset = subsetOffset;
+ this.chunkSizeArg = chunkSizeArg;
+ this.compressionArg = compression;
+ }
+
+ public void setOffset(final long[] offset) {
+
+ this.offset = offset;
+ }
+
+ public & NativeType, M extends N5DatasetMetadata> void process() throws IOException, InterruptedException, ExecutionException {
+
+ final N5Writer n5 = new N5Factory().openWriter(containerRoot);
+ write(n5);
+ n5.close();
+ }
+
+ public void parseOffset() {
+
+ if (this.offset != null)
+ return;
+
+ final int nd = image.getNDimensions();
+ final String[] blockArgList = subsetOffset.split(",");
+ final int[] dims = Intervals.dimensionsAsIntArray( ImageJFunctions.wrap( image ));
+
+ offset = new long[nd];
+ int i = 0;
+ while (i < blockArgList.length && i < nd) {
+ offset[i] = Integer.parseInt(blockArgList[i]);
+ i++;
+ }
+ final int N = blockArgList.length - 1;
+
+ while (i < nd) {
+ if( offset[N] > dims[i] )
+ offset[i] = dims[i];
+ else
+ offset[i] = offset[N];
+
+ i++;
+ }
+ }
+
+ @SuppressWarnings({"unchecked", "rawtypes"})
+ private void write(
+ final N5Writer n5) throws IOException, InterruptedException, ExecutionException {
+
+ parseOffset();
+
+ final Img ipImg;
+ if (image.getType() == ImagePlus.COLOR_RGB)
+ ipImg = (Img)N5IJUtils.wrapRgbAsInt(image);
+ else
+ ipImg = ImageJFunctions.wrap(image);
+
+ final IntervalView rai = Views.translate(ipImg, offset);
+
+ // create an empty dataset if it one does not exist
+ if (!n5.datasetExists(dataset)) {
+ final long[] dimensions = outputInterval(rai).dimensionsAsLongArray();
+ final int[] blockSize = N5ScalePyramidExporter.parseBlockSize(chunkSizeArg, dimensions);
+ final DatasetAttributes attributes = new DatasetAttributes(
+ dimensions,
+ blockSize,
+ N5Utils.dataType((T)Util.getTypeFromInterval(rai)),
+ N5ScalePyramidExporter.getCompression(compressionArg));
+
+ n5.createDataset(dataset, attributes);
+ }
+
+ if (nThreads > 1)
+ N5Utils.saveRegion(rai, n5, dataset);
+ else {
+ final ThreadPoolExecutor threadPool = new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue());
+ progressMonitor(threadPool);
+ N5Utils.saveRegion(rai, n5, dataset, threadPool);
+ threadPool.shutdown();
+ }
+ }
+
+ private Interval outputInterval(final Interval interval) {
+
+ final int N = interval.numDimensions();
+ final long[] min = new long[N];
+ final long[] max = new long[N];
+ for (int i = 0; i < N; i++) {
+ min[i] = 0;
+ if( interval.min(i) < 0 )
+ max[i] = interval.dimension(i) - 1;
+ else
+ max[i] = interval.max(i);
+ }
+
+ return new FinalInterval(min, max);
+ }
+
+ @Override
+ public void run() {
+
+ try {
+ process();
+ } catch (final IOException e) {
+ e.printStackTrace();
+ } catch (final InterruptedException e) {
+ e.printStackTrace();
+ } catch (final ExecutionException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private void progressMonitor( final ThreadPoolExecutor exec )
+ {
+ new Thread()
+ {
+ @Override
+ public void run()
+ {
+ IJ.showProgress( 0.01 );
+ try
+ {
+ Thread.sleep( 333 );
+ boolean done = false;
+ while( !done && !exec.isShutdown() )
+ {
+ final long i = exec.getCompletedTaskCount();
+ final long N = exec.getTaskCount();
+ done = i == N;
+ IJ.showProgress( (double)i / N );
+ Thread.sleep( 333 );
+ }
+ }
+ catch ( final InterruptedException e ) { }
+ IJ.showProgress( 1.0 );
+ }
+ }.start();
+ return;
+ }
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/ij/N5Exporter.java b/src/main/java/org/janelia/saalfeldlab/n5/ij/NgffExporter.java
similarity index 52%
rename from src/main/java/org/janelia/saalfeldlab/n5/ij/N5Exporter.java
rename to src/main/java/org/janelia/saalfeldlab/n5/ij/NgffExporter.java
index dd5ab64c..9544c496 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/ij/N5Exporter.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/ij/NgffExporter.java
@@ -32,24 +32,18 @@
import net.imglib2.img.display.imagej.ImageJFunctions;
import net.imglib2.type.NativeType;
import net.imglib2.type.numeric.RealType;
-import net.imglib2.view.IntervalView;
+import net.imglib2.view.SubsampleIntervalView;
import net.imglib2.view.Views;
import org.janelia.saalfeldlab.n5.Compression;
-import org.janelia.saalfeldlab.n5.DataType;
import org.janelia.saalfeldlab.n5.DatasetAttributes;
import org.janelia.saalfeldlab.n5.GzipCompression;
import org.janelia.saalfeldlab.n5.Lz4Compression;
+import org.janelia.saalfeldlab.n5.N5URI;
import org.janelia.saalfeldlab.n5.N5Writer;
import org.janelia.saalfeldlab.n5.RawCompression;
import org.janelia.saalfeldlab.n5.XzCompression;
import org.janelia.saalfeldlab.n5.blosc.BloscCompression;
import org.janelia.saalfeldlab.n5.imglib2.N5Utils;
-import org.janelia.saalfeldlab.n5.universe.N5Factory;
-import org.janelia.saalfeldlab.n5.universe.metadata.N5CosemMetadataParser;
-import org.janelia.saalfeldlab.n5.universe.metadata.N5DatasetMetadata;
-import org.janelia.saalfeldlab.n5.universe.metadata.N5Metadata;
-import org.janelia.saalfeldlab.n5.universe.metadata.N5MetadataWriter;
-import org.janelia.saalfeldlab.n5.universe.metadata.N5SingleScaleMetadataParser;
import org.janelia.saalfeldlab.n5.metadata.imagej.CosemToImagePlus;
import org.janelia.saalfeldlab.n5.metadata.imagej.ImagePlusLegacyMetadataParser;
import org.janelia.saalfeldlab.n5.metadata.imagej.ImagePlusMetadataTemplate;
@@ -57,25 +51,41 @@
import org.janelia.saalfeldlab.n5.metadata.imagej.MetadataTemplateMapper;
import org.janelia.saalfeldlab.n5.metadata.imagej.N5ViewerToImagePlus;
import org.janelia.saalfeldlab.n5.ui.N5MetadataSpecDialog;
+import org.janelia.saalfeldlab.n5.universe.N5Factory;
+import org.janelia.saalfeldlab.n5.universe.metadata.N5CosemMetadataParser;
+import org.janelia.saalfeldlab.n5.universe.metadata.N5DatasetMetadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.N5Metadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.N5MetadataWriter;
+import org.janelia.saalfeldlab.n5.universe.metadata.N5SingleScaleMetadataParser;
+import org.janelia.saalfeldlab.n5.universe.metadata.axes.Axis;
+import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMetadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMetadataParser;
+import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMultiScaleMetadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMultiScaleMetadata.OmeNgffDataset;
+import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.coordinateTransformations.CoordinateTransformation;
+import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.coordinateTransformations.ScaleCoordinateTransformation;
+import org.janelia.saalfeldlab.n5.zarr.ZarrDatasetAttributes;
import org.scijava.ItemVisibility;
import org.scijava.app.StatusService;
-import org.scijava.command.Command;
import org.scijava.command.ContextCommand;
import org.scijava.log.LogService;
import org.scijava.plugin.Parameter;
-import org.scijava.plugin.Plugin;
import org.scijava.ui.UIService;
import java.awt.event.WindowEvent;
import java.awt.event.WindowListener;
import java.io.IOException;
-import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
-import java.util.concurrent.*;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.ThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.DoubleStream;
-@Plugin(type = Command.class, menuPath = "File>Save As>Export HDF5/N5/Zarr")
-public class N5Exporter extends ContextCommand implements WindowListener {
+public class NgffExporter extends ContextCommand implements WindowListener {
+//public class NgffExporter {
public static final String GZIP_COMPRESSION = "gzip";
public static final String RAW_COMPRESSION = "raw";
@@ -92,7 +102,7 @@ public class N5Exporter extends ContextCommand implements WindowListener {
public static enum OVERWRITE_OPTIONS {NO_OVERWRITE, OVERWRITE, WRITE_SUBSET}
@Parameter(visibility = ItemVisibility.MESSAGE, required = false)
- private String message = "Export an ImagePlus to an HDF5, N5, or Zarr container.";
+ private final String message = "Export an ImagePlus to an OME-NGFF";
@Parameter
private LogService log;
@@ -106,36 +116,28 @@ public static enum OVERWRITE_OPTIONS {NO_OVERWRITE, OVERWRITE, WRITE_SUBSET}
@Parameter(label = "Image")
private ImagePlus image; // or use Dataset? - maybe later
- @Parameter(label = "N5 root url")
- private String n5RootLocation;
+ @Parameter(label = "Root url")
+ private String rootLocation;
@Parameter(
label = "Dataset",
- required = false,
- description = "This argument is ignored if the N5ViewerMetadata style is selected")
- private String n5Dataset;
+ required = false)
+ private String dataset;
@Parameter(label = "Block size")
private String blockSizeArg;
+ @Parameter(label = "Number of scales")
+ private Integer numScales = 1;
+
@Parameter(
label = "Compression",
choices = {GZIP_COMPRESSION, RAW_COMPRESSION, LZ4_COMPRESSION, XZ_COMPRESSION, BLOSC_COMPRESSION},
style = "listBox")
private String compressionArg = GZIP_COMPRESSION;
- @Parameter(
- label = "metadata type",
- description = "The style for metadata to be stored in the exported N5.",
- choices = {N5Importer.MetadataN5ViewerKey,
- N5Importer.MetadataN5CosemKey,
- N5Importer.MetadataImageJKey,
- N5Importer.MetadataCustomKey,
- NONE})
- private String metadataStyle = N5Importer.MetadataN5ViewerKey;
-
@Parameter(label = "Thread count", required = true, min = "1", max = "256")
- private int nThreads = 1;
+ private final int nThreads = 1;
@Parameter(
label = "Overwrite options", required = true,
@@ -150,15 +152,15 @@ public static enum OVERWRITE_OPTIONS {NO_OVERWRITE, OVERWRITE, WRITE_SUBSET}
private int[] blockSize;
- private Map> styles;
+ private final Map> styles;
private ImageplusMetadata> impMeta;
private N5MetadataSpecDialog metaSpecDialog;
- private HashMap, ImageplusMetadata>> impMetaWriterTypes;
+ private final HashMap, ImageplusMetadata>> impMetaWriterTypes;
- public N5Exporter() {
+ public NgffExporter() {
styles = new HashMap>();
styles.put(N5Importer.MetadataN5ViewerKey, new N5SingleScaleMetadataParser());
@@ -170,54 +172,43 @@ public N5Exporter() {
impMetaWriterTypes.put(ImagePlusLegacyMetadataParser.class, new ImagePlusLegacyMetadataParser());
impMetaWriterTypes.put(N5CosemMetadataParser.class, new CosemToImagePlus());
impMetaWriterTypes.put(N5SingleScaleMetadataParser.class, new N5ViewerToImagePlus());
-
+
}
public void setOptions(
final ImagePlus image,
- final String n5RootLocation,
- final String n5Dataset,
+ final String rootLocation,
+ final String dataset,
final String blockSizeArg,
- final String metadataStyle,
final String compression,
+ final int nScales,
final String overwriteOption,
final String subsetOffset) {
this.image = image;
- this.n5RootLocation = n5RootLocation;
-
- this.n5Dataset = n5Dataset;
+ this.rootLocation = rootLocation;
+ this.dataset = N5URI.normalizeGroupPath(dataset);
this.blockSizeArg = blockSizeArg;
- this.metadataStyle = metadataStyle;
this.compressionArg = compression;
+ this.numScales = nScales;
+
this.overwriteChoices = overwriteOption;
this.subsetOffset = subsetOffset;
}
- /**
- * Set the custom metadata mapper to use programmically.
- *
- * @param metadataMapper the metadata template mapper
- */
- public void setMetadataMapper(final MetadataTemplateMapper metadataMapper) {
-
- styles.put(N5Importer.MetadataCustomKey, metadataMapper);
- impMetaWriterTypes.put(MetadataTemplateMapper.class, new ImagePlusMetadataTemplate());
- }
-
public void parseBlockSize() {
final int nd = image.getNDimensions();
- String[] blockArgList = blockSizeArg.split(",");
+ final String[] blockArgList = blockSizeArg.split(",");
blockSize = new int[nd];
int i = 0;
while (i < blockArgList.length && i < nd) {
blockSize[i] = Integer.parseInt(blockArgList[i]);
i++;
}
- int N = blockArgList.length - 1;
+ final int N = blockArgList.length - 1;
while (i < nd) {
blockSize[i] = blockSize[N];
@@ -225,196 +216,176 @@ public void parseBlockSize() {
}
}
- @SuppressWarnings("unchecked")
- public & NativeType, M extends N5DatasetMetadata> void process() throws IOException, InterruptedException, ExecutionException {
+ public & NativeType, M extends N5DatasetMetadata> void process()
+ throws IOException, InterruptedException, ExecutionException {
+
+ final N5Writer n5 = new N5Factory()
+ .gsonBuilder(OmeNgffMetadataParser.gsonBuilder())
+ .openWriter(rootLocation);
+
+ final Compression compression = getCompression();
+ parseBlockSize();
+
+ final N5MetadataWriter writer = null;
+
+ // check and warn re: RGB image if relevant
+ // if (image.getType() == ImagePlus.COLOR_RGB && !(writer instanceof
+ // N5ImagePlusMetadata))
+ // log.warn("RGB images are best saved using ImageJ metatadata. Other choices "
+ // + "may lead to unexpected behavior.");
+ final Img img = ImageJFunctions.wrap(image);
+ final int nd = img.numDimensions();
+ write( img, n5, dataset + "/s0", compression, writer);
+
+
+ final DatasetAttributes[] dsetAttrs = new DatasetAttributes[numScales];
+ final OmeNgffDataset[] msDatasets = new OmeNgffDataset[numScales];
+
+ String dset = dataset + "/s0";
+ dsetAttrs[0] = n5.getDatasetAttributes(dset);
+ msDatasets[0] = new OmeNgffDataset();
+ msDatasets[0].path = dset;
+ int scale = 1;
+ for( int i = 1; i < numScales; i++ ) {
- final N5Writer n5 = new N5Factory().openWriter(n5RootLocation);
- final Compression compression = getCompression();
- parseBlockSize();
+ scale *= 2;
+ final SubsampleIntervalView imgDown = downsampleSimple( img, scale );
+ dset = String.format("%s/s%d", dataset, i);
- N5MetadataWriter writer = null;
- if (!metadataStyle.equals(NONE)) {
- writer = (N5MetadataWriter)styles.get(metadataStyle);
- if (writer != null)
- {
- impMeta = impMetaWriterTypes.get(writer.getClass());
- }
+ write(imgDown, n5, dset, compression, writer);
+ dsetAttrs[i] = n5.getDatasetAttributes(dset);
+
+ msDatasets[i] = new OmeNgffDataset();
+ msDatasets[i].path = dset;
+ final double s = scale;
+ msDatasets[i].coordinateTransformations = new CoordinateTransformation[] {
+ new ScaleCoordinateTransformation( DoubleStream.generate(() -> s ).limit(nd).toArray())
+ };
+
+ }
+
+ final OmeNgffMultiScaleMetadata ms = buildMetadata(dataset, dsetAttrs, msDatasets);
+ final OmeNgffMultiScaleMetadata[] msList = new OmeNgffMultiScaleMetadata[] { ms };
+
+ final OmeNgffMetadata meta = new OmeNgffMetadata(dataset, msList);
+ try {
+ new OmeNgffMetadataParser().writeMetadata(meta, n5, dataset);
+ } catch (final Exception e) {
+ e.printStackTrace();
+ }
+
+ n5.close();
}
- // check and warn re: RGB image if relevant
- // if (image.getType() == ImagePlus.COLOR_RGB && !(writer instanceof N5ImagePlusMetadata))
- // log.warn("RGB images are best saved using ImageJ metatadata. Other choices "
- // + "may lead to unexpected behavior.");
-
- if (metadataStyle.equals(NONE) ||
- metadataStyle.equals(N5Importer.MetadataImageJKey) ||
- metadataStyle.equals(N5Importer.MetadataCustomKey)) {
- write(n5, compression, writer);
- } else {
- writeSplitChannels(n5, compression, writer);
+ public & NativeType, M extends N5DatasetMetadata> SubsampleIntervalView downsampleSimple(
+ final RandomAccessibleInterval img, final int downsampleFactor) {
+ return Views.subsample(img, downsampleFactor);
}
- n5.close();
- }
- @SuppressWarnings({"unchecked", "rawtypes"})
- private void write(
- final N5Writer n5,
- final Compression compression,
- final N5MetadataWriter writer) throws IOException, InterruptedException, ExecutionException {
-
- if (overwriteChoices.equals(WRITE_SUBSET)) {
- final long[] offset = Arrays.stream(subsetOffset.split(","))
- .mapToLong(Long::parseLong)
- .toArray();
-
- if (!n5.datasetExists(n5Dataset)) {
- // details don't matter, saveRegions changes this value
- final long[] dimensions = new long[image.getNDimensions()];
- Arrays.fill(dimensions, 1);
-
- // find data type
- int type = image.getType();
- DataType n5type;
-
- switch (type) {
- case ImagePlus.GRAY8:
- n5type = DataType.UINT8;
- break;
- case ImagePlus.GRAY16:
- n5type = DataType.UINT16;
- break;
- case ImagePlus.GRAY32:
- n5type = DataType.FLOAT32;
- break;
- case ImagePlus.COLOR_RGB:
- n5type = DataType.UINT32;
- break;
- default:
- n5type = null;
- }
+ public OmeNgffMultiScaleMetadata buildMetadata(final String path, final DatasetAttributes[] dsetAttrs, final OmeNgffDataset[] datasets) {
- final DatasetAttributes attributes = new DatasetAttributes(dimensions, blockSize, n5type, compression);
- n5.createDataset(n5Dataset, attributes);
- writeMetadata(n5, n5Dataset, writer);
- }
+ if( !OmeNgffMultiScaleMetadata.allSameAxisOrder(dsetAttrs))
+ throw new RuntimeException("All ome-zarr arrays must have same array order");
- final Img< T > ipImg;
- if( image.getType() == ImagePlus.COLOR_RGB )
- ipImg = ( Img< T > ) N5IJUtils.wrapRgbAsInt( image );
- else
- ipImg = ImageJFunctions.wrap( image );
-
- final IntervalView< T > rai = Views.translate( ipImg, offset );
- if (nThreads > 1)
- N5Utils.saveRegion( rai, n5, n5Dataset );
- else {
- final ThreadPoolExecutor threadPool = new ThreadPoolExecutor( nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue() );
- progressMonitor( threadPool );
- N5Utils.saveRegion( rai, n5, n5Dataset, threadPool);
- threadPool.shutdown();
- }
+ final int nc = image.getNChannels();
+ final int nz = image.getNSlices();
+ final int nt = image.getNFrames();
+ final String unit = image.getCalibration().getUnit();
+ int N = 2;
+ if (nc > 1) {
+ N++;
}
- else
- {
- if( overwriteChoices.equals( NO_OVERWRITE ) && n5.datasetExists( n5Dataset ))
- {
- if( ui != null )
- ui.showDialog( String.format("Dataset (%s) already exists, not writing.", n5Dataset ) );
- else
- System.out.println( String.format("Dataset (%s) already exists, not writing.", n5Dataset ) );
+ if (nz > 1) {
+ N++;
+ }
+ if (nt > 1) {
+ N++;
+ }
+ final Axis[] axes = new Axis[N];
+ final double[] pixelSpacing = new double[N];
- return;
- }
+ axes[0] = new Axis(Axis.SPACE, "x", unit);
+ pixelSpacing[0] = image.getCalibration().pixelWidth;
- // Here, either allowing overwrite, or not allowing, but the dataset does not exist
+ axes[1] = new Axis(Axis.SPACE, "y", unit);
+ pixelSpacing[1] = image.getCalibration().pixelHeight;
- // use threadPool even for single threaded execution for progress monitoring
- final ThreadPoolExecutor threadPool = new ThreadPoolExecutor( nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue() );
- progressMonitor( threadPool );
- N5IJUtils.save( image, n5, n5Dataset, blockSize, compression, threadPool);
- threadPool.shutdown();
+ int d = 2;
+ if (nc > 1) {
+ axes[d] = new Axis(Axis.CHANNEL, "c", "");
+ pixelSpacing[d] = 1.0;
+ d++;
+ }
- writeMetadata( n5, n5Dataset, writer );
+ if (nz > 1) {
+ axes[d] = new Axis(Axis.SPACE, "z", unit);
+ pixelSpacing[d] = image.getCalibration().pixelDepth;
+ d++;
}
- }
- @SuppressWarnings( "unused" )
- private static long[] getOffsetForSaveSubset3d( final ImagePlus imp )
- {
- final int nd = imp.getNDimensions();
- long[] offset = new long[ nd ];
+ if (nt > 1) {
+ axes[d] = new Axis(Axis.TIME, "t", image.getCalibration().getTimeUnit());
+ pixelSpacing[d] = image.getCalibration().frameInterval;
+ d++;
+ }
- offset[ 0 ] = (int)imp.getCalibration().xOrigin;
- offset[ 1 ] = (int)imp.getCalibration().yOrigin;
+ // need to reverse the axes if the arrays are in C order
+ final Axis[] axesToWrite = OmeNgffMultiScaleMetadata.reverseIfCorder( dsetAttrs[0], axes );
- int j = 2;
- if( imp.getNSlices() > 1 )
- offset[ j++ ] = (int)imp.getCalibration().zOrigin;
+ final String name = image.getTitle();
+ final String type = "sampling";
+ final String version = "0.4";
- return offset;
+ return new OmeNgffMultiScaleMetadata(
+ N, path, name, type, version, axesToWrite,
+ datasets, dsetAttrs,
+ null, null); // no global coordinate transforms of downsampling metadata
}
- @SuppressWarnings({"rawtypes", "unchecked"})
- private void writeSplitChannels(
- final N5Writer n5,
- final Compression compression,
- final N5MetadataWriter writer) throws IOException, InterruptedException, ExecutionException
- {
- final Img img;
- if( image.getType() == ImagePlus.COLOR_RGB )
- img = (( Img< T > ) N5IJUtils.wrapRgbAsInt( image ));
- else
- img = ImageJFunctions.wrap(image);
-
- String datasetString = "";
- int[] blkSz = blockSize;
- for (int c = 0; c < image.getNChannels(); c++) {
- RandomAccessibleInterval channelImg;
- final int nd = img.numDimensions();
- // If there is only one channel, img may be 3d, but we don't want to slice
- // so if we have a 3d image check that the image is multichannel
- if ( nd >= 4 || (nd == 3 && image.getNChannels() > 1)) {
- channelImg = Views.hyperSlice(img, 2, c);
-
- // if we slice the image, appropriately slice the block size also
- blkSz = sliceBlockSize( 2 );
-
- } else {
- channelImg = img;
- }
- if (metadataStyle.equals(N5Importer.MetadataN5ViewerKey)) {
- datasetString = String.format("%s/c%d/s0", n5Dataset, c);
- } else if (image.getNChannels() > 1) {
- datasetString = String.format("%s/c%d", n5Dataset, c);
- } else {
- datasetString = n5Dataset;
- }
+ @SuppressWarnings({ "rawtypes" })
+ private void write(
+ final RandomAccessibleInterval image,
+ final N5Writer n5,
+ final String dataset,
+ final Compression compression, final N5MetadataWriter writer)
+ throws IOException, InterruptedException, ExecutionException {
- // use threadPool even for single threaded execution for progress monitoring
- final ThreadPoolExecutor threadPool = new ThreadPoolExecutor( nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue() );
- progressMonitor( threadPool );
- N5Utils.save( channelImg, n5, datasetString, blkSz, compression, threadPool );
- threadPool.shutdown();
+ if (overwriteChoices.equals(NO_OVERWRITE) && n5.datasetExists(dataset)) {
+ if (ui != null)
+ ui.showDialog(String.format("Dataset (%s) already exists, not writing.", dataset));
+ else
+ System.out.println(String.format("Dataset (%s) already exists, not writing.", dataset));
- writeMetadata(n5, datasetString, writer);
+ return;
}
+
+ // Here, either allowing overwrite, or not allowing, but the dataset does not
+ // exist
+
+ // use threadPool even for single threaded execution for progress monitoring
+ final ThreadPoolExecutor threadPool = new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS,
+ new LinkedBlockingQueue());
+ progressMonitor(threadPool);
+ N5Utils.save(image, n5, dataset, blockSize, compression, Executors.newFixedThreadPool(nThreads));
+ writeMetadata(n5, dataset, writer);
}
- private int[] sliceBlockSize( int exclude )
- {
- int[] out = new int[ blockSize.length - 1 ];
- int j = 0;
- for( int i = 0; i < blockSize.length; i++ )
- if( i != exclude )
- {
- out[j] = blockSize[i];
- j++;
- }
+ @SuppressWarnings("unused")
+ private static long[] getOffsetForSaveSubset3d(final ImagePlus imp) {
+ final int nd = imp.getNDimensions();
+ final long[] offset = new long[nd];
- return out;
+ offset[0] = (int) imp.getCalibration().xOrigin;
+ offset[1] = (int) imp.getCalibration().yOrigin;
+
+ int j = 2;
+ if (imp.getNSlices() > 1)
+ offset[j++] = (int) imp.getCalibration().zOrigin;
+
+ return offset;
}
private void writeMetadata(
@@ -437,19 +408,14 @@ private void writeMetadata(
public void run() {
// add more options
- if (metadataStyle.equals(N5Importer.MetadataCustomKey)) {
- metaSpecDialog = new N5MetadataSpecDialog(this);
- metaSpecDialog.show(MetadataTemplateMapper.RESOLUTION_ONLY_MAPPER);
- } else {
- try {
- process();
- } catch (final IOException e) {
- e.printStackTrace();
- } catch (final InterruptedException e) {
- e.printStackTrace();
- } catch (final ExecutionException e) {
- e.printStackTrace();
- }
+ try {
+ process();
+ } catch (final IOException e) {
+ e.printStackTrace();
+ } catch (final InterruptedException e) {
+ e.printStackTrace();
+ } catch (final ExecutionException e) {
+ e.printStackTrace();
}
}
@@ -457,6 +423,7 @@ private void progressMonitor( final ThreadPoolExecutor exec )
{
new Thread()
{
+ @Override
public void run()
{
IJ.showProgress( 0.01 );
@@ -473,7 +440,7 @@ public void run()
Thread.sleep( 333 );
}
}
- catch ( InterruptedException e ) { }
+ catch ( final InterruptedException e ) { }
IJ.showProgress( 1.0 );
}
}.start();
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/CosemToImagePlus.java b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/CosemToImagePlus.java
index 1d7e70f5..37d1bc4c 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/CosemToImagePlus.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/CosemToImagePlus.java
@@ -2,7 +2,6 @@
import ij.ImagePlus;
import ij.measure.Calibration;
-import net.imglib2.realtransform.AffineTransform3D;
import org.janelia.saalfeldlab.n5.DataType;
import org.janelia.saalfeldlab.n5.DatasetAttributes;
@@ -15,6 +14,12 @@
public class CosemToImagePlus extends SpatialMetadataToImagePlus {
+ private boolean includeChannelAxis = false;
+
+ public void includeChannelAxis( boolean includeChannelAxis ) {
+ this.includeChannelAxis = includeChannelAxis;
+ }
+
@Override
public void writeMetadata(final N5CosemMetadata t, final ImagePlus ip) throws IOException {
@@ -23,42 +28,30 @@ public void writeMetadata(final N5CosemMetadata t, final ImagePlus ip) throws IO
final int nd = t.getAttributes().getNumDimensions();
final long[] dims = t.getAttributes().getDimensions();
+ final int[] spatialIndexes = spatialIndexes( t.getCosemTransform().axes );
final CosemTransform transform = t.getCosemTransform();
- if( nd == 2 )
- {
- cal.pixelWidth = transform.scale[1];
- cal.pixelHeight = transform.scale[0];
- cal.pixelDepth = 1;
-
- cal.xOrigin = transform.translate[1];
- cal.yOrigin = transform.translate[0];
- cal.zOrigin = 0;
- }
- else if( nd == 3 )
- {
- cal.pixelWidth = transform.scale[2];
- cal.pixelHeight = transform.scale[1];
- cal.pixelDepth = transform.scale[0];
-
- cal.xOrigin = transform.translate[2];
- cal.yOrigin = transform.translate[1];
- cal.zOrigin = transform.translate[0];
- }
+ cal.pixelWidth = spatialIndexes[0] > -1 ? transform.scale[spatialIndexes[0]] : 1;
+ cal.pixelHeight = spatialIndexes[1] > -1 ? transform.scale[spatialIndexes[1]] : 1;
+ cal.pixelDepth = spatialIndexes[2] > -1 ? transform.scale[spatialIndexes[2]] : 1;
+
+ cal.xOrigin = spatialIndexes[0] > -1 ? transform.translate[spatialIndexes[0]] : 0 ;
+ cal.yOrigin = spatialIndexes[1] > -1 ? transform.translate[spatialIndexes[1]] : 0 ;
+ cal.zOrigin = spatialIndexes[2] > -1 ? transform.translate[spatialIndexes[2]] : 0 ;
cal.setUnit(t.unit());
if (nd == 3)
ip.setDimensions(1, (int) dims[2], 1);
else if (nd == 4)
- ip.setDimensions((int) dims[3], (int) dims[2], 1);
+ ip.setDimensions(1, (int) dims[2], (int) dims[4]);
}
@Override
public N5CosemMetadata readMetadata(final ImagePlus imp) throws IOException {
int nd = 2;
- if (imp.getNChannels() > 1) {
+ if (includeChannelAxis && imp.getNChannels() > 1) {
nd++;
}
if (imp.getNSlices() > 1) {
@@ -69,43 +62,57 @@ public N5CosemMetadata readMetadata(final ImagePlus imp) throws IOException {
}
final String[] axes = new String[nd];
- if (nd == 2) {
- axes[0] = "y";
- axes[1] = "x";
- } else if (nd == 3) {
- axes[0] = "z";
- axes[1] = "y";
- axes[2] = "x";
+ final double[] scale = new double[nd];
+ Arrays.fill(scale, 1);
+
+ final double[] translation = new double[nd];
+
+ int k = nd-1;
+ scale[k] = imp.getCalibration().pixelWidth;
+ translation[k] = imp.getCalibration().xOrigin;
+ axes[k--]="x";
+
+ scale[k] = imp.getCalibration().pixelHeight;
+ translation[k] = imp.getCalibration().yOrigin;
+ axes[k--]="y";
+
+ if (includeChannelAxis && imp.getNChannels() > 1) {
+ axes[k--]="c";
+ }
+ if (imp.getNSlices() > 1) {
+ scale[k] = imp.getCalibration().pixelDepth;
+ translation[k] = imp.getCalibration().zOrigin;
+ axes[k--]="z";
+ }
+ if (imp.getNFrames() > 1) {
+ axes[k--]="t";
}
// unit
final String[] units = new String[nd];
Arrays.fill(units, imp.getCalibration().getUnit());
- final double[] scale = new double[3];
- final double[] translation = new double[3];
-
- if (nd == 2) {
- scale[0] = imp.getCalibration().pixelHeight;
- scale[1] = imp.getCalibration().pixelWidth;
- scale[2] = 1;
-
- translation[0] = imp.getCalibration().yOrigin;
- translation[1] = imp.getCalibration().xOrigin;
- translation[2] = 0;
- } else if (nd == 3) {
- scale[0] = imp.getCalibration().pixelDepth;
- scale[1] = imp.getCalibration().pixelHeight;
- scale[2] = imp.getCalibration().pixelWidth;
-
- translation[2] = imp.getCalibration().zOrigin;
- translation[1] = imp.getCalibration().yOrigin;
- translation[0] = imp.getCalibration().xOrigin;
- }
-
- //TODO what to do about DatasetAttributes?
return new N5CosemMetadata("",
new CosemTransform(axes, scale, translation, units),
- new DatasetAttributes(new long[]{}, imp.getDimensions(), DataType.FLOAT32, new GzipCompression()));
+ ImageplusMetadata.datasetAttributes(imp));
}
+
+ private int[] spatialIndexes( final String[] axes ) {
+ final int[] spaceIndexes = new int[3];
+ Arrays.fill(spaceIndexes, -1);
+
+ // COSEM scales and translations are in c-order
+ // but detect the axis types to be extra safe
+ for( int i = 0; i < axes.length; i++ )
+ {
+ if( axes[i].equals("x"))
+ spaceIndexes[0] = i;
+ else if( axes[i].equals("y"))
+ spaceIndexes[1] = i;
+ else if( axes[i].equals("z"))
+ spaceIndexes[2] = i;
+ }
+ return spaceIndexes;
+ }
+
}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/ImagePlusLegacyMetadataParser.java b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/ImagePlusLegacyMetadataParser.java
index 786736d5..034e035a 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/ImagePlusLegacyMetadataParser.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/ImagePlusLegacyMetadataParser.java
@@ -17,8 +17,8 @@
import ij.ImagePlus;
import ij.measure.Calibration;
-public class ImagePlusLegacyMetadataParser implements N5MetadataParser,
- N5MetadataWriter, ImageplusMetadata
+public class ImagePlusLegacyMetadataParser implements N5MetadataParser,
+ N5MetadataWriter, ImageplusMetadata
{
public static final String titleKey = "title";
@@ -41,14 +41,14 @@ public class ImagePlusLegacyMetadataParser implements N5MetadataParser attrs = new HashMap<>();
+ final HashMap attrs = new HashMap<>();
attrs.put(titleKey, t.name);
attrs.put(fpsKey, t.fps);
@@ -111,7 +111,7 @@ public void writeMetadata(final N5ImagePlusMetadata t, final ImagePlus ip) throw
}
}
}
-
+
@Override
public N5ImagePlusMetadata readMetadata(final ImagePlus ip) throws IOException {
@@ -179,9 +179,8 @@ public Optional parseMetadata(N5Reader n5, N5TreeNode node)
return Optional.of(meta);
- } catch (N5Exception e) {
- }
-
+ } catch (final N5Exception e) { }
+
return Optional.empty();
}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/ImageplusMetadata.java b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/ImageplusMetadata.java
index 2346b800..f363024d 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/ImageplusMetadata.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/ImageplusMetadata.java
@@ -26,9 +26,18 @@
package org.janelia.saalfeldlab.n5.metadata.imagej;
import ij.ImagePlus;
+import net.imglib2.img.Img;
+import net.imglib2.img.display.imagej.ImageJFunctions;
+import net.imglib2.type.NativeType;
+import net.imglib2.util.Util;
import java.io.IOException;
+import java.util.Arrays;
+import org.janelia.saalfeldlab.n5.DataType;
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.RawCompression;
+import org.janelia.saalfeldlab.n5.imglib2.N5Utils;
import org.janelia.saalfeldlab.n5.universe.metadata.N5DatasetMetadata;
/**
@@ -55,7 +64,7 @@ public interface ImageplusMetadata {
/**
* Create and return a new metadata object from the given {@link ImagePlus}.
- *
+ *
* @param ip
* the ImagePlus
* @return
@@ -63,4 +72,18 @@ public interface ImageplusMetadata {
* the io exception
*/
public T readMetadata(ImagePlus ip) throws IOException;
+
+ public static > DatasetAttributes datasetAttributes( final ImagePlus imp )
+ {
+ @SuppressWarnings("unchecked")
+ final Img img = (Img)ImageJFunctions.wrap(imp);
+ final DataType dtype = N5Utils.dataType(Util.getTypeFromInterval(img));
+ final long[] dims = img.dimensionsAsLongArray();
+
+ return new DatasetAttributes(
+ dims,
+ Arrays.stream(dims).mapToInt(x -> (int)x).toArray(),
+ dtype,
+ new RawCompression());
+ }
}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/N5ImagePlusMetadata.java b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/N5ImagePlusMetadata.java
index 301e668c..3a411a0c 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/N5ImagePlusMetadata.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/N5ImagePlusMetadata.java
@@ -30,11 +30,13 @@
import org.janelia.saalfeldlab.n5.DatasetAttributes;
import org.janelia.saalfeldlab.n5.universe.metadata.AbstractN5DatasetMetadata;
import org.janelia.saalfeldlab.n5.universe.metadata.SpatialMetadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.axes.Axis;
+import org.janelia.saalfeldlab.n5.universe.metadata.axes.AxisMetadata;
import java.util.Map;
import java.util.Objects;
-public class N5ImagePlusMetadata extends AbstractN5DatasetMetadata implements SpatialMetadata {
+public class N5ImagePlusMetadata extends AbstractN5DatasetMetadata implements SpatialMetadata, AxisMetadata {
public final String name;
@@ -57,88 +59,108 @@ public class N5ImagePlusMetadata extends AbstractN5DatasetMetadata implements Sp
public final Map properties;
- public N5ImagePlusMetadata(final String path, final DatasetAttributes attributes, final String name,
- final double fps, final double frameInterval, final String unit, final Double pixelWidth,
- final Double pixelHeight, final Double pixelDepth, final Double xOrigin, final Double yOrigin,
- final Double zOrigin, final Integer numChannels, final Integer numSlices, final Integer numFrames,
- final Integer type, final Map properties) {
-
- super(path, attributes);
-
- this.name = name;
- this.fps = Objects.requireNonNull(fps, "fps must be non null");
- this.frameInterval = Objects.requireNonNull(frameInterval, "frameInterval must be non null");
-
- this.unit = Objects.requireNonNull(unit, "unit must be non null");
- this.pixelWidth = Objects.requireNonNull(pixelWidth, "pixelWidth must be non null");
- this.pixelHeight = Objects.requireNonNull(pixelHeight, "pixelHeight must be non null");
- this.pixelDepth = Objects.requireNonNull(pixelDepth, "pixelDepth must be non null");
-
- this.xOrigin = Objects.requireNonNull(xOrigin, "xOrigin must be non null");
- this.yOrigin = Objects.requireNonNull(yOrigin, "yOrigin must be non null");
- this.zOrigin = Objects.requireNonNull(zOrigin, "zOrigin must be non null");
-
- this.numChannels = Objects.requireNonNull(numChannels, "numChannels must be non null");
- this.numSlices = Objects.requireNonNull(numSlices, "numSlices must be non null");
- this.numFrames = Objects.requireNonNull(numFrames, "numFrames must be non null");
-
- // type is not required and so may be null
- if( type == null )
- this.type = -1;
- else
- this.type = type;
-
- this.properties = properties;
- }
-
- // public void crop( final Interval cropInterval )
- // {
- // int i = 2;
- // if( numChannels > 1 )
- // numChannels = (int)cropInterval.dimension( i++ );
- //
- // if( numSlices > 1 )
- // numSlices = (int)cropInterval.dimension( i++ );
- //
- // if( numFrames > 1 )
- // numFrames = (int)cropInterval.dimension( i++ );
- // }
-
- // public static double[] getPixelSpacing( final N5Reader n5, final String dataset ) throws IOException
- // {
- // final double rx = n5.getAttribute( dataset, pixelWidthKey, double.class );
- // final double ry = n5.getAttribute( dataset, pixelHeightKey, double.class );
- // final double rz = n5.getAttribute( dataset, pixelDepthKey, double.class );
- // return new double[] { rx, ry, rz };
- // }
-
- public int getType() {
-
- return type;
- }
-
- @Override public AffineGet spatialTransform() {
-
- final int nd = numSlices > 1 ? 3 : 2;
- final double[] spacing = new double[nd];
- final double[] offset = new double[nd];
-
- spacing[0] = pixelWidth;
- spacing[1] = pixelHeight;
- if (numSlices > 1)
- spacing[2] = pixelDepth;
-
- offset[0] = xOrigin;
- offset[1] = yOrigin;
- if (numSlices > 1)
- offset[2] = zOrigin;
-
- return new ScaleAndTranslation(spacing, offset);
- }
-
- @Override public String unit() {
-
- return unit;
- }
+ private transient Axis[] axes;
+
+ public N5ImagePlusMetadata(final String path, final DatasetAttributes attributes, final String name,
+ final double fps, final double frameInterval, final String unit, final Double pixelWidth,
+ final Double pixelHeight, final Double pixelDepth, final Double xOrigin, final Double yOrigin,
+ final Double zOrigin, final Integer numChannels, final Integer numSlices, final Integer numFrames,
+ final Integer type, final Map properties) {
+
+ super(path, attributes);
+
+ this.name = name;
+ this.fps = Objects.requireNonNull(fps, "fps must be non null");
+ this.frameInterval = Objects.requireNonNull(frameInterval, "frameInterval must be non null");
+
+ this.unit = Objects.requireNonNull(unit, "unit must be non null");
+ this.pixelWidth = Objects.requireNonNull(pixelWidth, "pixelWidth must be non null");
+ this.pixelHeight = Objects.requireNonNull(pixelHeight, "pixelHeight must be non null");
+ this.pixelDepth = Objects.requireNonNull(pixelDepth, "pixelDepth must be non null");
+
+ this.xOrigin = Objects.requireNonNull(xOrigin, "xOrigin must be non null");
+ this.yOrigin = Objects.requireNonNull(yOrigin, "yOrigin must be non null");
+ this.zOrigin = Objects.requireNonNull(zOrigin, "zOrigin must be non null");
+
+ this.numChannels = Objects.requireNonNull(numChannels, "numChannels must be non null");
+ this.numSlices = Objects.requireNonNull(numSlices, "numSlices must be non null");
+ this.numFrames = Objects.requireNonNull(numFrames, "numFrames must be non null");
+
+ // type is not required and so may be null
+ if (type == null)
+ this.type = -1;
+ else
+ this.type = type;
+
+ this.properties = properties;
+
+ axes = buildAxes();
+ }
+
+ private Axis[] buildAxes() {
+
+ int nd = 2;
+ if( numChannels > 1 )
+ nd++;
+
+ if( numSlices > 1 )
+ nd++;
+
+ if( numFrames > 1 )
+ nd++;
+
+ axes = new Axis[nd];
+ axes[0] = new Axis(Axis.SPACE, "x", unit);
+ axes[1] = new Axis(Axis.SPACE, "y", unit);
+
+ int i = 2;
+ if( numChannels > 1 )
+ axes[i++] = new Axis(Axis.CHANNEL, "c", "");
+
+ if( numSlices > 1 )
+ axes[i++] = new Axis(Axis.SPACE, "z", unit);
+
+ if( numFrames > 1 )
+ axes[i++] = new Axis(Axis.TIME, "t", "sec");
+
+ return axes;
+ }
+
+ public int getType() {
+
+ return type;
+ }
+
+ @Override
+ public AffineGet spatialTransform() {
+
+ final int nd = numSlices > 1 ? 3 : 2;
+ final double[] spacing = new double[nd];
+ final double[] offset = new double[nd];
+
+ spacing[0] = pixelWidth;
+ spacing[1] = pixelHeight;
+ if (numSlices > 1)
+ spacing[2] = pixelDepth;
+
+ offset[0] = xOrigin;
+ offset[1] = yOrigin;
+ if (numSlices > 1)
+ offset[2] = zOrigin;
+
+ return new ScaleAndTranslation(spacing, offset);
+ }
+
+ @Override
+ public String unit() {
+
+ return unit;
+ }
+
+ @Override
+ public Axis[] getAxes() {
+
+ return axes;
+ }
}
\ No newline at end of file
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/N5ViewerToImagePlus.java b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/N5ViewerToImagePlus.java
index 71042596..f55e8c0b 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/N5ViewerToImagePlus.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/N5ViewerToImagePlus.java
@@ -46,7 +46,7 @@ public N5SingleScaleMetadata readMetadata(final ImagePlus imp) throws IOExceptio
final AffineTransform3D transform = N5SingleScaleMetadataParser.buildTransform(downsamplingFactors, scale, Optional.empty());
return new N5SingleScaleMetadata("", transform, downsamplingFactors,
scale, translation, units[0],
- null);
+ ImageplusMetadata.datasetAttributes(imp));
}
}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/NgffToImagePlus.java b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/NgffToImagePlus.java
new file mode 100644
index 00000000..3bc2551a
--- /dev/null
+++ b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/NgffToImagePlus.java
@@ -0,0 +1,235 @@
+package org.janelia.saalfeldlab.n5.metadata.imagej;
+
+import java.io.IOException;
+import java.util.Arrays;
+
+import org.janelia.saalfeldlab.n5.DatasetAttributes;
+import org.janelia.saalfeldlab.n5.universe.metadata.axes.Axis;
+import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.NgffSingleScaleAxesMetadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMultiScaleMetadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMultiScaleMetadata.OmeNgffDataset;
+
+import ij.ImagePlus;
+import ij.measure.Calibration;
+
+public class NgffToImagePlus extends SpatialMetadataToImagePlus {
+
+ @Override
+ public void writeMetadata(final NgffSingleScaleAxesMetadata t, final ImagePlus ip) throws IOException {
+
+ ip.setTitle(t.getPath());
+ final Calibration cal = ip.getCalibration();
+
+ final int nd = t.getAttributes().getNumDimensions();
+ final long[] dims = t.getAttributes().getDimensions();
+
+ int numChannels = 0;
+ int numTimes = 0;
+ int numZ = 0;
+ int numSpace = 0;
+
+ int xIdx = -1, yIdx = -1, cIdx = -1, zIdx = -1, tIdx = -1;
+ for (int i = 0; i < t.getAxes().length; i++) {
+
+ final Axis axis = t.getAxis(i);
+ if (axis.getType().equals(Axis.TIME)) {
+ numTimes = (int) t.getAttributes().getDimensions()[i];
+ tIdx = i;
+ }
+
+ if (axis.getType().equals(Axis.CHANNEL)) {
+ numChannels = (int) t.getAttributes().getDimensions()[i];
+ cIdx = i;
+ }
+
+ if( axis.getType().equals(Axis.SPACE))
+ {
+ numSpace++;
+
+ if( numSpace == 1 )
+ xIdx = i;
+ else if ( numSpace == 2 )
+ yIdx = i;
+ else if ( numSpace == 3 )
+ zIdx = i;
+
+ if( numSpace > 2 )
+ numZ = (int)t.getAttributes().getDimensions()[i];
+ }
+ }
+
+
+ // permuting data if axes are in non-standard order
+ // must happen before calling this method
+
+ // setDimensions can't handle zeros, so set these to one if they're zero
+ numChannels = numChannels == 0 ? 1 : numChannels;
+ numZ = numZ == 0 ? 1 : numZ;
+ numTimes = numTimes == 0 ? 1 : numTimes;
+ ip.setDimensions(numChannels, numZ, numTimes);
+
+ if( xIdx >= 0 ) {
+ ip.getCalibration().pixelWidth = t.getScale()[xIdx];
+ ip.getCalibration().xOrigin = t.getTranslation()[xIdx];
+ }
+
+ if( yIdx >= 0 ) {
+ ip.getCalibration().pixelHeight = t.getScale()[yIdx];
+ ip.getCalibration().yOrigin = t.getTranslation()[yIdx];
+ }
+
+ if( zIdx >= 0 ) {
+ ip.getCalibration().pixelDepth = t.getScale()[zIdx];
+ ip.getCalibration().zOrigin = t.getTranslation()[zIdx];
+ }
+
+ if( tIdx > 0 )
+ ip.getCalibration().frameInterval = t.getScale()[tIdx];
+
+ }
+
+ @Override
+ public NgffSingleScaleAxesMetadata readMetadata(final ImagePlus ip) throws IOException {
+
+ final int nc = ip.getNChannels();
+ final int nz = ip.getNSlices();
+ final int nt = ip.getNFrames();
+
+ int N = 2;
+ if (nz > 1)
+ N++;
+
+ if (nc > 1)
+ N++;
+
+ if (nt > 1)
+ N++;
+
+ final Axis[] axes = new Axis[N];
+ final double[] scale = new double[N];
+ final double[] offset = new double[N];
+
+ final String spaceUnit = ip.getCalibration().getUnit();
+
+ axes[0] = new Axis(Axis.SPACE, "x", spaceUnit);
+ scale[0] = ip.getCalibration().pixelWidth;
+ offset[0] = ip.getCalibration().xOrigin;
+
+ axes[1] = new Axis(Axis.SPACE, "y", spaceUnit);
+ scale[1] = ip.getCalibration().pixelHeight;
+ offset[1] = ip.getCalibration().yOrigin;
+
+ int k = 2;
+ // channels
+ if (nc > 1) {
+ axes[k] = new Axis(Axis.CHANNEL, "c", "" );
+ scale[k] = 1;
+ offset[k] = 0;
+ k++;
+ }
+
+ // space z
+ if (nz > 1) {
+ axes[k] = new Axis(Axis.SPACE, "z", spaceUnit);
+ scale[k] = ip.getCalibration().pixelDepth;
+ offset[k] = ip.getCalibration().zOrigin;
+ k++;
+ }
+
+ // time
+ if (nt > 1) {
+ axes[k] = new Axis(Axis.TIME, "t", ip.getCalibration().getTimeUnit());
+ scale[k] = ip.getCalibration().frameInterval;
+ if( scale[k] == 0.0 )
+ scale[k] = 1.0;
+
+ offset[k] = 0;
+ k++;
+ }
+
+ final boolean noOffset = Arrays.stream(offset).allMatch( x -> x == 0.0 );
+ if( noOffset )
+ return new NgffSingleScaleAxesMetadata("", scale, null, axes, ImageplusMetadata.datasetAttributes(ip));
+ else
+ return new NgffSingleScaleAxesMetadata("", scale, offset, axes, ImageplusMetadata.datasetAttributes(ip));
+ }
+
+ public static OmeNgffMultiScaleMetadata buildMetadata(final ImagePlus image, final String path, final DatasetAttributes[] dsetAttrs,
+ final OmeNgffDataset[] datasets) {
+
+ final int nc = image.getNChannels();
+ final int nz = image.getNSlices();
+ final int nt = image.getNFrames();
+ final String unit = image.getCalibration().getUnit();
+
+ int N = 2;
+ if (nc > 1) {
+ N++;
+ }
+ if (nz > 1) {
+ N++;
+ }
+ if (nt > 1) {
+ N++;
+ }
+ final Axis[] axes = new Axis[N];
+ final double[] pixelSpacing = new double[N];
+
+ axes[0] = new Axis(Axis.SPACE, "x", unit);
+ pixelSpacing[0] = image.getCalibration().pixelWidth;
+
+ axes[1] = new Axis(Axis.SPACE, "y", unit);
+ pixelSpacing[1] = image.getCalibration().pixelHeight;
+
+ int d = 2;
+ if (nc > 1) {
+ axes[d] = new Axis(Axis.CHANNEL, "c", "");
+ pixelSpacing[d] = 1.0;
+ d++;
+ }
+
+ if (nz > 1) {
+ axes[d] = new Axis(Axis.SPACE, "z", unit);
+ pixelSpacing[d] = image.getCalibration().pixelDepth;
+ d++;
+ }
+
+ if (nt > 1) {
+ axes[d] = new Axis(Axis.TIME, "t", image.getCalibration().getTimeUnit());
+ pixelSpacing[d] = image.getCalibration().frameInterval;
+ d++;
+ }
+
+ // need to reverse the axes if the arrays are in C order
+ final Axis[] axesToWrite;
+ if( dsetAttrs != null )
+ axesToWrite = OmeNgffMultiScaleMetadata.reverseIfCorder( dsetAttrs[0], axes );
+ else
+ axesToWrite = axes;
+
+ final String name = image.getTitle();
+ final String type = "sampling";
+ final String version = "0.4";
+
+ return new OmeNgffMultiScaleMetadata(
+ N, path, name, type, version, axesToWrite,
+ datasets, dsetAttrs,
+ null, null); // no global coordinate transforms of downsampling metadata
+ }
+
+ public static OmeNgffMultiScaleMetadata buildMetadata(final NgffSingleScaleAxesMetadata meta, final String name, final String path, final DatasetAttributes[] dsetAttrs,
+ final OmeNgffDataset[] datasets) {
+
+ final int N = meta.getScale().length;
+
+ // need to reverse the axes if the arrays are in C order
+ final String type = "sampling";
+ final String version = "0.4";
+
+ return new OmeNgffMultiScaleMetadata(
+ N, path, name, type, version, meta.getAxes(),
+ datasets, dsetAttrs,
+ null, null); // no global coordinate transforms of downsampling metadata
+ }
+
+}
diff --git a/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/SpatialMetadataToImagePlus.java b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/SpatialMetadataToImagePlus.java
index 7c0fc3fd..5afffa38 100644
--- a/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/SpatialMetadataToImagePlus.java
+++ b/src/main/java/org/janelia/saalfeldlab/n5/metadata/imagej/SpatialMetadataToImagePlus.java
@@ -13,7 +13,7 @@ public abstract class SpatialMetadataToImagePlus n5NodeFilter;
+ private Predicate selectionFilter;
+
private TreeCellRenderer treeRenderer;
private final N5MetadataParser>[] groupParsers;
@@ -256,6 +300,17 @@ public void setRecursiveFilterCallback(final Predicate n5NodeFilter)
this.n5NodeFilter = n5NodeFilter;
}
+ /**
+ * Sets a selection filter. A {@link N5TreeNode} will not be selectable if the
+ * selection filter returns false for its metadata.
+ *
+ * @param selectionFilter the predicate
+ */
+ public void setSelectionFilter(final Predicate selectionFilter) {
+
+ this.selectionFilter = selectionFilter;
+ }
+
public void setCancelCallback(final Consumer cancelCallback) {
this.cancelCallback = cancelCallback;
@@ -285,6 +340,34 @@ public boolean getCropOption() {
return cropOption;
}
+// // add custom metadata parser into the first position in the list if it exists
+// final Optional parserOptional = spatialMetaSpec.getParserOptional();
+// if( parserOptional.isPresent() ) {
+// parserList.add(parserOptional.get());
+// parserList.addAll(Arrays.asList(parsers));
+// }
+// else
+// parserList.addAll(Arrays.asList(parsers));
+//
+// final Gson gson;
+// if( n5 instanceof CachedGsonKeyValueN5Reader )
+// gson = ((CachedGsonKeyValueN5Reader) n5).getGson();
+// else
+// {
+// final GsonBuilder gsonBuilder = new GsonBuilder();
+// gsonBuilder.registerTypeAdapter(DataType.class, new DataType.JsonAdapter());
+// gsonBuilder.registerTypeHierarchyAdapter(Compression.class, CompressionAdapter.getJsonAdapter());
+// gsonBuilder.disableHtmlEscaping();
+// gson = gsonBuilder.create();
+// }
+//
+// boolean isTranslated = false;
+// final Optional translatedN5 = translationPanel.getTranslatedN5Optional(n5, gson);
+// if( translatedN5.isPresent() )
+// {
+// n5 = translatedN5.get();
+// isTranslated = true;
+//>>>>>>> origin/ome-zarr-v0.4
public boolean isCropSelected() {
@@ -295,6 +378,10 @@ public boolean isVirtual() {
return (virtualBox != null) && virtualBox.isSelected();
}
+// final N5TreeNode tmpRootNode = new N5TreeNode( rootPath );
+// rootNode = new N5SwingTreeNode( rootPath, treeModel );
+// treeModel.setRoot(rootNode);
+//>>>>>>> origin/ome-zarr-v0.4
public String getN5RootPath() {
@@ -410,8 +497,9 @@ private JFrame buildDialog() {
TreeSelectionModel.DISCONTIGUOUS_TREE_SELECTION);
// disable selection of nodes that are not open-able
- containerTree.addTreeSelectionListener(
- new N5IjTreeSelectionListener(containerTree.getSelectionModel()));
+ final N5IjTreeSelectionListener treeSelectionListener = new N5IjTreeSelectionListener(containerTree.getSelectionModel());
+ treeSelectionListener.setSelectionFilter(selectionFilter);
+ containerTree.addTreeSelectionListener(treeSelectionListener);
// By default leaf nodes (datasets) are displayed as files. This changes
// the default behavior to display them as folders
@@ -592,7 +680,7 @@ private void openContainer(final Function n5Fun, final Supplie
final Function pathToRoot) {
if (ijProgressBar != null)
- ijProgressBar.show(0.1);
+ ijProgressBar.show(0.2);
SwingUtilities.invokeLater(() -> {
messageLabel.setText("Building reader...");
@@ -705,6 +793,7 @@ private void openContainer(final Function n5Fun, final Supplie
if (node.getParent() != null && node.getChildCount() == 0) {
treeModel.removeNodeFromParent(node);
}
+
}
}
});
@@ -719,6 +808,7 @@ private void openContainer(final Function n5Fun, final Supplie
if (ijProgressBar != null)
ijProgressBar.show(0.3);
+
SwingUtilities.invokeLater(() -> {
messageLabel.setText("Listing...");
messageLabel.repaint();
@@ -730,12 +820,14 @@ private void openContainer(final Function n5Fun, final Supplie
for (final String p : datasetPaths)
rootNode.addPath(p);
+
sortRecursive(rootNode);
containerTree.expandRow(0);
if (ijProgressBar != null)
ijProgressBar.show(0.5);
+
SwingUtilities.invokeLater(() -> {
messageLabel.setText("Parsing...");
messageLabel.repaint();
@@ -748,13 +840,14 @@ private void openContainer(final Function n5Fun, final Supplie
if (ijProgressBar != null)
ijProgressBar.show(0.8);
+
SwingUtilities.invokeLater(() -> {
messageLabel.setText("Done");
messageLabel.repaint();
});
if (ijProgressBar != null)
- ijProgressBar.show(1.0);
+ ijProgressBar.show(1.1);
Thread.sleep(1000);
SwingUtilities.invokeLater(() -> {
@@ -770,7 +863,6 @@ private void openContainer(final Function n5Fun, final Supplie
} catch (final N5Exception e) {
e.printStackTrace();
}
-
});
if (isTranslated) {
@@ -856,11 +948,18 @@ public static class N5IjTreeSelectionListener implements TreeSelectionListener {
private final TreeSelectionModel selectionModel;
+ private Predicate selectionFilter;
+
public N5IjTreeSelectionListener(final TreeSelectionModel selectionModel) {
this.selectionModel = selectionModel;
}
+ public void setSelectionFilter(final Predicate selectionFilter) {
+
+ this.selectionFilter = selectionFilter;
+ }
+
@Override
public void valueChanged(final TreeSelectionEvent sel) {
@@ -875,6 +974,9 @@ public void valueChanged(final TreeSelectionEvent sel) {
if (node.getMetadata() == null) {
selectionModel.removeSelectionPath(path);
}
+ else if( selectionFilter != null && !selectionFilter.test(node.getMetadata()) ) {
+ selectionModel.removeSelectionPath(path);
+ }
}
i++;
}
diff --git a/src/main/resources/plugins.config b/src/main/resources/plugins.config
index 44674634..18e79a1a 100644
--- a/src/main/resources/plugins.config
+++ b/src/main/resources/plugins.config
@@ -1,3 +1,3 @@
# Name: N5 ImageJ
-File>Import, "N5", org.janelia.saalfeldlab.n5.ij.N5Importer
+File>Import, "HDF5/N5/Zarr/OME-NGFF ... ", org.janelia.saalfeldlab.n5.ij.N5Importer
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/RunImportExportTest.java b/src/test/java/org/janelia/saalfeldlab/n5/RunImportExportTest.java
index a7539b16..09b01059 100644
--- a/src/test/java/org/janelia/saalfeldlab/n5/RunImportExportTest.java
+++ b/src/test/java/org/janelia/saalfeldlab/n5/RunImportExportTest.java
@@ -28,8 +28,8 @@
import java.util.HashMap;
import java.util.List;
-import org.janelia.saalfeldlab.n5.ij.N5Exporter;
import org.janelia.saalfeldlab.n5.ij.N5Importer;
+import org.janelia.saalfeldlab.n5.ij.N5ScalePyramidExporter;
import ij.IJ;
import ij.ImagePlus;
@@ -84,10 +84,9 @@ public RunImportExportTest(
public void run()
{
- final N5Exporter writer = new N5Exporter();
- writer.setOptions( imp, outputPath, dataset,
- blockSizeString, metadataType, compressionType,
- N5Exporter.OVERWRITE, "");
+ final N5ScalePyramidExporter writer = new N5ScalePyramidExporter();
+ writer.setOptions( imp, outputPath, dataset, blockSizeString, false,
+ metadataType, N5ScalePyramidExporter.DOWN_SAMPLE, compressionType);
writer.run();
final String n5PathAndDataset = outputPath + dataset;
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/TestExportImports.java b/src/test/java/org/janelia/saalfeldlab/n5/TestExportImports.java
index 2a8c89fc..e11f0188 100644
--- a/src/test/java/org/janelia/saalfeldlab/n5/TestExportImports.java
+++ b/src/test/java/org/janelia/saalfeldlab/n5/TestExportImports.java
@@ -1,16 +1,16 @@
package org.janelia.saalfeldlab.n5;
+import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
-import java.io.IOException;
import java.net.URL;
import java.util.HashMap;
import java.util.List;
-import org.janelia.saalfeldlab.n5.ij.N5Exporter;
import org.janelia.saalfeldlab.n5.ij.N5Importer;
+import org.janelia.saalfeldlab.n5.ij.N5ScalePyramidExporter;
import org.janelia.saalfeldlab.n5.universe.N5Factory;
import org.junit.Assert;
import org.junit.Before;
@@ -36,7 +36,7 @@ public class TestExportImports
@Before
public void before()
{
- URL configUrl = RunImportExportTest.class.getResource( "/plugins.config" );
+ final URL configUrl = RunImportExportTest.class.getResource( "/plugins.config" );
baseDir = new File( configUrl.getFile()).getParentFile();
}
@@ -44,12 +44,12 @@ public void before()
public void testEmptyMeta()
{
final ImagePlus imp = NewImage.createImage("test", 8, 6, 2, 16, NewImage.FILL_NOISE);
- String metaType = N5Importer.MetadataDefaultKey;
+ final String metaType = N5Importer.MetadataDefaultKey;
final String n5RootPath = baseDir + "/test_none.n5";
final String dataset = "/test";
final String blockSizeString = "32,32,32";
- final String compressionString = "raw";
+ final String compressionString = N5ScalePyramidExporter.RAW_COMPRESSION;
singleReadWriteParseTest( imp, n5RootPath, dataset, blockSizeString, metaType, compressionString, false );
}
@@ -62,11 +62,12 @@ public void test4dN5v()
imp.setDimensions( nChannels, nSlices, 1 ); // 3 channels, 5 slices
final String n5RootPath = baseDir + "/test.n5" ;
- final String dataset = "/n5v_4d";
+ final String dataset = "n5v_4d";
- final N5Exporter writer = new N5Exporter();
- writer.setOptions( imp, n5RootPath, dataset, "32", N5Importer.MetadataN5ViewerKey, "gzip", N5Exporter.OVERWRITE, "");
- writer.run();
+ final N5ScalePyramidExporter writer = new N5ScalePyramidExporter();
+ writer.setOptions( imp, n5RootPath, dataset, "32", false,
+ N5ScalePyramidExporter.DOWN_SAMPLE, N5Importer.MetadataN5ViewerKey, N5ScalePyramidExporter.GZIP_COMPRESSION);
+ writer.run(); // run() closes the n5 writer
try {
final N5Importer reader = new N5Importer();
@@ -79,13 +80,13 @@ public void test4dN5v()
Assert.assertTrue("n5v channel equals", equalChannel(imp, i, impList.get(0)));
}
}
- catch(Exception e)
+ catch(final Exception e)
{
e.printStackTrace();
Assert.fail();
}
}
-
+
@Test
public void testReadWriteParse()
{
@@ -95,16 +96,15 @@ public void testReadWriteParse()
typeToExtension.put( "HDF5", "h5" );
final String blockSizeString = "16,16,16";
- final String compressionString = "gzip";
- String[] containerTypes = new String[] { "FILESYSTEM", "ZARR", "HDF5" };
-
+ final String compressionString = N5ScalePyramidExporter.GZIP_COMPRESSION;
+ final String[] containerTypes = new String[] { "FILESYSTEM", "ZARR", "HDF5" };
final String[] metadataTypes = new String[]{
N5Importer.MetadataImageJKey,
N5Importer.MetadataN5CosemKey,
N5Importer.MetadataN5ViewerKey
};
- for( int bitDepth : new int[]{ 8, 16, 32 })
+ for( final int bitDepth : new int[]{ 8, 16, 32 })
{
final ImagePlus imp = NewImage.createImage("test", 8, 6, 4, bitDepth, NewImage.FILL_NOISE);
imp.setDimensions( 1, 4, 1 );
@@ -158,7 +158,7 @@ private static < T extends RealType< T > & NativeType< T > > boolean equalChanne
final Img imgAll = ImageJFunctions.wrapRealNative( all );
final Img imgC = ImageJFunctions.wrapRealNative( cimg );
- IntervalView channelGtImg = Views.hyperSlice( imgAll, 2, i);
+ final IntervalView channelGtImg = Views.hyperSlice( imgAll, 2, i);
final Cursor< T > c = channelGtImg.cursor();
final RandomAccess< T > r = imgC.randomAccess();
while( c.hasNext() )
@@ -207,14 +207,35 @@ public void singleReadWriteParseTest(
final String blockSizeString,
final String metadataType,
final String compressionType,
- boolean testMeta )
+ final boolean testMeta )
+ {
+ singleReadWriteParseTest( imp, outputPath, dataset, blockSizeString, metadataType, compressionType, testMeta, true);
+ }
+
+ public void singleReadWriteParseTest(
+ final ImagePlus imp,
+ final String outputPath,
+ final String dataset,
+ final String blockSizeString,
+ final String metadataType,
+ final String compressionType,
+ boolean testMeta,
+ boolean testData )
{
- final N5Exporter writer = new N5Exporter();
- writer.setOptions( imp, outputPath, dataset, blockSizeString, metadataType, compressionType,
- N5Exporter.OVERWRITE, "");
+// System.out.println("outputPath: " + outputPath + " " + dataset);
+ final N5ScalePyramidExporter writer = new N5ScalePyramidExporter();
+ writer.setOptions( imp, outputPath, dataset, blockSizeString, false,
+ N5ScalePyramidExporter.DOWN_SAMPLE, metadataType, compressionType);
writer.run(); // run() closes the n5 writer
- final String readerDataset = metadataType.equals( N5Importer.MetadataN5ViewerKey ) ? dataset + "/c0/s0" : dataset;
+ final String readerDataset;
+ if (metadataType.equals(N5Importer.MetadataN5ViewerKey) || (metadataType.equals(N5Importer.MetadataN5CosemKey) && imp.getNChannels() > 1))
+ readerDataset = dataset + "/c0/s0";
+ else if (metadataType.equals(N5Importer.MetadataOmeZarrKey) || metadataType.equals(N5Importer.MetadataN5CosemKey))
+ readerDataset = dataset + "/s0";
+ else
+ readerDataset = dataset;
+
final String n5PathAndDataset = outputPath + readerDataset;
final N5Importer reader = new N5Importer();
@@ -224,34 +245,36 @@ public void singleReadWriteParseTest(
assertEquals( String.format( "%s %s one image opened ", outputPath, dataset ), 1, impList.size() );
final ImagePlus impRead = impList.get( 0 );
-
if( testMeta )
{
- boolean resEqual = impRead.getCalibration().pixelWidth == imp.getCalibration().pixelWidth &&
+ final boolean resEqual = impRead.getCalibration().pixelWidth == imp.getCalibration().pixelWidth &&
impRead.getCalibration().pixelHeight == imp.getCalibration().pixelHeight
&& impRead.getCalibration().pixelDepth == imp.getCalibration().pixelDepth;
assertTrue( String.format( "%s resolutions ", dataset ), resEqual );
- boolean unitsEqual = impRead.getCalibration().getUnit().equals( imp.getCalibration().getUnit() );
+ final boolean unitsEqual = impRead.getCalibration().getUnit().equals( imp.getCalibration().getUnit() );
assertTrue( String.format( "%s units ", dataset ), unitsEqual );
}
- boolean imagesEqual;
- if( imp.getType() == ImagePlus.COLOR_RGB )
+ if( testData )
{
- imagesEqual = equalRGB( imp, impRead );
- assertEquals( String.format( "%s as rgb ", dataset ), ImagePlus.COLOR_RGB, impRead.getType() );
- }
- else
- imagesEqual = equal( imp, impRead );
+ boolean imagesEqual;
+ if( imp.getType() == ImagePlus.COLOR_RGB )
+ {
+ imagesEqual = equalRGB( imp, impRead );
+ assertEquals( String.format( "%s as rgb ", dataset ), ImagePlus.COLOR_RGB, impRead.getType() );
+ }
+ else
+ imagesEqual = equal( imp, impRead );
- assertTrue( String.format( "%s data ", dataset ), imagesEqual );
+ assertTrue( String.format( "%s data ", dataset ), imagesEqual );
+ }
try {
- N5Writer n5w = new N5Factory().openWriter(outputPath);
+ final N5Writer n5w = new N5Factory().openWriter(outputPath);
n5w.remove();
- } catch (N5Exception e) {
+ } catch (final N5Exception e) {
e.printStackTrace();
}
@@ -263,7 +286,7 @@ public void singleReadWriteParseTest(
public void testRgb()
{
final ImagePlus imp = NewImage.createRGBImage("test", 8, 6, 4, NewImage.FILL_NOISE);
- String metaType = N5Importer.MetadataImageJKey;
+ final String metaType = N5Importer.MetadataImageJKey;
final String n5RootPath = baseDir + "/test_rgb.n5";
final String dataset = "/ij";
@@ -274,26 +297,205 @@ public void testRgb()
}
/**
- * A test if we ever expand n5-viewer style metadata to be able
+ * A test if we ever expand n5-viewer style metadata to be able
* to describe arrays of more than 3 dimensions.
- *
+ *
*/
-// @Test
-// public void testMultiChannelN5V()
-// {
-// final int bitDepth = 8;
-// final ImagePlus imp = NewImage.createImage("test", 8, 6, 4*3, bitDepth, NewImage.FILL_NOISE);
-// imp.setDimensions( 3, 4, 1 );
-// imp.getCalibration().pixelWidth = 0.5;
-// imp.getCalibration().pixelHeight = 0.6;
-// imp.getCalibration().pixelDepth = 0.7;
-//
-// String metatype = N5Importer.MetadataN5ViewerSingleKey;
-// final String n5RootPath = baseDir + "/test_n5v_mcSingle.n5";
-// final String dataset = "/n5vs";
-// final String blockSizeString = "16,16,16,16";
-// final String compressionString = "raw";
-//
-// singleReadWriteParseTest( imp, n5RootPath, dataset, blockSizeString, metatype, compressionString, true );
-// }
+ @Test
+ public void testMultiChannel()
+ {
+ for( final String suffix : new String[] { ".h5", ".n5", ".zarr" })
+ {
+ testMultiChannelHelper(N5Importer.MetadataN5ViewerKey, suffix);
+ testMultiChannelHelper(N5Importer.MetadataN5CosemKey, suffix);
+ testMultiChannelHelper(N5Importer.MetadataOmeZarrKey, suffix);
+ testMultiChannelHelper(N5Importer.MetadataImageJKey, suffix);
+ }
+ }
+
+ @Test
+ public void testOverwrite() {
+
+ final String n5Root = baseDir + "/overwriteTest.n5";
+ final String dataset = "dataset";
+ final String blockSizeString = "16";
+ final String compressionString = "raw";
+
+ String metadataType = N5ScalePyramidExporter.NONE;
+
+ final long[] szBig = new long[]{8, 6, 4};
+ final long[] szSmall = new long[]{6, 4, 2};
+ final ImagePlus impBig = NewImage.createImage("test", (int)szBig[0], (int)szBig[1], (int)szBig[2], 8, NewImage.FILL_NOISE);
+ final ImagePlus impSmall = NewImage.createImage("test", (int)szSmall[0], (int)szSmall[1], (int)szSmall[2], 8, NewImage.FILL_NOISE);
+
+ final N5ScalePyramidExporter writer = new N5ScalePyramidExporter();
+ writer.setOptions(impBig, n5Root, dataset, blockSizeString, false,
+ N5ScalePyramidExporter.DOWN_SAMPLE, metadataType, compressionString);
+ writer.setOverwrite(true);
+ writer.run();
+
+ final N5Reader n5 = new N5FSReader(n5Root);
+ assertTrue(n5.datasetExists(dataset));
+
+ assertArrayEquals("size orig", szBig, n5.getDatasetAttributes(dataset).getDimensions());
+
+ final N5ScalePyramidExporter writerNoOverride = new N5ScalePyramidExporter();
+ writerNoOverride.setOptions(impSmall, n5Root, dataset, blockSizeString, false,
+ N5ScalePyramidExporter.DOWN_SAMPLE, metadataType, compressionString);
+ writerNoOverride.setOverwrite(false);
+ writerNoOverride.run();
+
+ assertArrayEquals("size after no overwrite", szBig, n5.getDatasetAttributes(dataset).getDimensions());
+
+ final N5ScalePyramidExporter writerOverride = new N5ScalePyramidExporter();
+ writerOverride.setOptions(impSmall, n5Root, dataset, blockSizeString, false,
+ N5ScalePyramidExporter.DOWN_SAMPLE, metadataType, compressionString);
+ writerOverride.setOverwrite(true);
+ writerOverride.run();
+
+ assertArrayEquals("size after overwrite", szSmall, n5.getDatasetAttributes(dataset).getDimensions());
+ }
+
+ public void testMultiChannelHelper( final String metatype, final String suffix )
+ {
+ final int bitDepth = 8;
+
+ final String n5RootPath = baseDir + "/test_"+ metatype+"_dimCombos" + suffix;
+ final String blockSizeString = "16";
+ final String compressionString = "raw";
+
+ // add zero to avoid eclipse making these variables final
+ int nc = 3; nc += 0;
+ int nz = 1; nz += 0;
+ int nt = 1; nt += 0;
+
+ for( nc = 1; nc <= 3; nc += 2)
+ {
+ for( nz = 1; nz <= 4; nz += 3)
+ {
+ for( nt = 1; nt <= 5; nt += 4)
+ {
+ final int N = nc * nz * nt;
+ final ImagePlus imp = NewImage.createImage("test", 8, 6, N, bitDepth, NewImage.FILL_NOISE);
+ imp.setDimensions( nc, nz, nt );
+ imp.getCalibration().pixelWidth = 0.5;
+ imp.getCalibration().pixelHeight = 0.6;
+
+ if( nz > 1 )
+ imp.getCalibration().pixelDepth = 0.7;
+
+ final String dataset = String.format("/c%dz%dt%d", nc, nz, nt);
+ singleReadWriteParseTest( imp, n5RootPath, dataset, blockSizeString, metatype, compressionString, true, nc == 1 );
+ }
+ }
+
+ }
+ }
+
+ public void pyramidReadWriteParseTest(
+ final ImagePlus imp,
+ final String outputPath,
+ final String dataset,
+ final String blockSizeString,
+ final String downsampleMethod,
+ final String metadataType,
+ final String compressionType,
+ boolean testMeta,
+ boolean testData )
+ {
+ final N5ScalePyramidExporter writer = new N5ScalePyramidExporter();
+ writer.setOptions( imp, outputPath, dataset, blockSizeString, true, downsampleMethod, metadataType, compressionType);
+ writer.run(); // run() closes the n5 writer
+
+ final String readerDataset;
+ if( metadataType.equals( N5Importer.MetadataN5ViewerKey ))
+ readerDataset = dataset + "/c0/s0";
+ else if( metadataType.equals( N5Importer.MetadataN5CosemKey ) && imp.getNChannels() > 1 )
+ readerDataset = dataset + "/c0";
+ else
+ readerDataset = dataset;
+
+ final String n5PathAndDataset = outputPath + readerDataset;
+
+ final N5Importer reader = new N5Importer();
+ reader.setShow( false );
+ final List< ImagePlus > impList = reader.process( n5PathAndDataset, false );
+
+ assertEquals( String.format( "%s %s one image opened ", outputPath, dataset ), 1, impList.size() );
+
+ final ImagePlus impRead = impList.get( 0 );
+
+ if( testMeta )
+ {
+ final boolean resEqual = impRead.getCalibration().pixelWidth == imp.getCalibration().pixelWidth &&
+ impRead.getCalibration().pixelHeight == imp.getCalibration().pixelHeight
+ && impRead.getCalibration().pixelDepth == imp.getCalibration().pixelDepth;
+
+ assertTrue( String.format( "%s resolutions ", dataset ), resEqual );
+
+ final boolean unitsEqual = impRead.getCalibration().getUnit().equals( imp.getCalibration().getUnit() );
+ assertTrue( String.format( "%s units ", dataset ), unitsEqual );
+ }
+
+ if( testData )
+ {
+ boolean imagesEqual;
+ if( imp.getType() == ImagePlus.COLOR_RGB )
+ {
+ imagesEqual = equalRGB( imp, impRead );
+ assertEquals( String.format( "%s as rgb ", dataset ), ImagePlus.COLOR_RGB, impRead.getType() );
+ }
+ else
+ imagesEqual = equal( imp, impRead );
+
+ assertTrue( String.format( "%s data ", dataset ), imagesEqual );
+ }
+
+ try {
+ final N5Writer n5w = new N5Factory().openWriter(outputPath);
+ n5w.remove();
+ } catch (final N5Exception e) {
+ e.printStackTrace();
+ }
+
+ impRead.close();
+ }
+
+ public void testPyramidHelper( final String metatype, final String suffix )
+ {
+ final int bitDepth = 8;
+
+ final String n5RootPath = baseDir + "/test_"+ metatype+"_dimCombos" + suffix;
+ final String blockSizeString = "3";
+ final String compressionString = "raw";
+ final String downsamplingType = N5ScalePyramidExporter.DOWN_SAMPLE;
+
+ int nc = 1;
+ int nz = 1;
+ int nt = 5;
+
+ for( nc = 1; nc <= 3; nc += 2)
+ {
+ for( nz = 1; nz <= 4; nz += 3)
+ {
+ for( nt = 1; nt <= 5; nt += 4)
+ {
+ final int N = nc * nz * nt;
+ final ImagePlus imp = NewImage.createImage("test", 8, 6, N, bitDepth, NewImage.FILL_NOISE);
+ imp.setDimensions( nc, nz, nt );
+ imp.getCalibration().pixelWidth = 0.5;
+ imp.getCalibration().pixelHeight = 0.6;
+
+ if( nz > 1 )
+ imp.getCalibration().pixelDepth = 0.7;
+
+ final String dataset = String.format("/c%dz%dt%d", nc, nz, nt);
+ pyramidReadWriteParseTest( imp, n5RootPath, dataset, blockSizeString, downsamplingType,
+ metatype, compressionString, true, nc == 1 );
+ }
+ }
+
+ }
+ }
+
}
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/TestRegionExport.java b/src/test/java/org/janelia/saalfeldlab/n5/TestRegionExport.java
new file mode 100644
index 00000000..85b97773
--- /dev/null
+++ b/src/test/java/org/janelia/saalfeldlab/n5/TestRegionExport.java
@@ -0,0 +1,181 @@
+package org.janelia.saalfeldlab.n5;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
+
+import java.io.File;
+import java.nio.file.Files;
+import java.util.Arrays;
+import java.util.stream.Collectors;
+
+import org.janelia.saalfeldlab.n5.ij.N5ScalePyramidExporter;
+import org.janelia.saalfeldlab.n5.ij.N5SubsetExporter;
+import org.janelia.saalfeldlab.n5.imglib2.N5Utils;
+import org.junit.Test;
+
+import ij.ImagePlus;
+import ij.gui.NewImage;
+import net.imglib2.FinalInterval;
+import net.imglib2.RandomAccessibleInterval;
+import net.imglib2.cache.img.CachedCellImg;
+import net.imglib2.img.array.ArrayImg;
+import net.imglib2.img.array.ArrayImgs;
+import net.imglib2.img.basictypeaccess.array.ByteArray;
+import net.imglib2.loops.LoopBuilder;
+import net.imglib2.type.numeric.integer.UnsignedByteType;
+import net.imglib2.util.Intervals;
+import net.imglib2.view.Views;
+
+public class TestRegionExport {
+
+ private static String tempN5PathName() {
+
+ try {
+ final File tmpFile = Files.createTempDirectory("n5-region-test-").toFile();
+ tmpFile.deleteOnExit();
+ return tmpFile.getCanonicalPath();
+ } catch (final Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Test
+ public void testCreate() {
+
+ long[] trueDims = new long[]{8, 6, 2};
+ final ImagePlus imp = NewImage.createImage("test",
+ (int)trueDims[0], (int)trueDims[1], (int)trueDims[2],
+ 16, NewImage.FILL_NOISE);
+
+ String baseDir = tempN5PathName();
+ System.out.println(baseDir);
+
+ final String rootPath = baseDir + "/test_create.n5";
+ final String blockSizeString = "32";
+ final String compressionString = N5ScalePyramidExporter.RAW_COMPRESSION;
+
+ final String dsetZeroOffset = "/zeroOffset";
+ final String zeroOffsetString = "0,0,0";
+
+ // should create a dataset
+ // a zero offset should write an array of the same size as the input
+ final N5SubsetExporter writerZero = new N5SubsetExporter();
+ writerZero.setOptions(imp, rootPath, dsetZeroOffset, zeroOffsetString, blockSizeString, compressionString);
+ writerZero.run();
+
+ final N5Reader n5 = new N5FSReader(rootPath);
+ final long[] dims = n5.getDatasetAttributes(dsetZeroOffset).getDimensions();
+ assertArrayEquals("zero-offset", trueDims, dims);
+
+ // should create a dataset
+ // a non-zero offset should write an array of size larger than the input
+ final String dsetOffset = "/offset";
+ final int[] offset = new int[]{10, 20, 30};
+ final String offsetString = Arrays.stream(offset).mapToObj(Integer::toString).collect(Collectors.joining(","));
+
+ final N5SubsetExporter writerOffset = new N5SubsetExporter();
+ writerOffset.setOptions(imp, rootPath, dsetOffset, offsetString, blockSizeString, compressionString);
+ writerOffset.run();
+
+ final long[] trueOffsetDims = new long[3];
+ for (int i = 0; i < 3; i++)
+ trueOffsetDims[i] = trueDims[i] + offset[i];
+
+ final long[] dimsOffset = n5.getDatasetAttributes(dsetOffset).getDimensions();
+ assertArrayEquals("offset", trueOffsetDims, dimsOffset);
+
+ n5.close();
+ }
+
+ @Test
+ public void testOverwrite() {
+
+ final long[] origDims = new long[]{16, 16, 16};
+ final ImagePlus impBase = NewImage.createImage("test",
+ (int)origDims[0], (int)origDims[1], (int)origDims[2],
+ 8, NewImage.FILL_BLACK);
+
+ final long[] patchDims = new long[]{3, 3, 3};
+ final ImagePlus impFill = NewImage.createImage("test",
+ (int)patchDims[0], (int)patchDims[1], (int)patchDims[2],
+ 8, NewImage.FILL_WHITE);
+
+ String baseDir = tempN5PathName();
+ System.out.println(baseDir);
+
+ final String rootPath = baseDir + "/test_patch.n5";
+ final String blockSizeString = "32";
+ final String compressionString = N5ScalePyramidExporter.RAW_COMPRESSION;
+
+ final String dset = "/patch";
+ final String zeroOffsetString = "0,0,0";
+
+ // should create a dataset
+ // a zero offset should write an array of the same size as the input
+ final N5SubsetExporter writerZero = new N5SubsetExporter();
+ writerZero.setOptions(impBase, rootPath, dset, zeroOffsetString, blockSizeString, compressionString);
+ writerZero.run();
+
+ final N5Reader n5 = new N5FSReader(rootPath);
+ final CachedCellImg origImg = N5Utils.open(n5, dset);
+ final byte[] dataBefore = copyToArray(origImg);
+
+ final byte[] zeros = new byte[(int)Intervals.numElements(origImg)];
+ assertArrayEquals("orig data", zeros, dataBefore);
+
+
+ // should create a dataset
+ // a non-zero offset should write an array of size larger than the input
+ final long[] offset = new long[]{1,2,3};
+ final String offsetString = Arrays.stream(offset).mapToObj(Long::toString).collect(Collectors.joining(","));
+
+ final N5SubsetExporter writerOffset = new N5SubsetExporter();
+ writerOffset.setOptions(impFill, rootPath, dset, offsetString, blockSizeString, compressionString);
+ writerOffset.run();
+
+ final long[] dimsOffset = n5.getDatasetAttributes(dset).getDimensions();
+ assertArrayEquals("dims unchanged", origDims, dimsOffset);
+
+ final CachedCellImg patchedImg = N5Utils.open(n5, dset);
+ final byte[] dataPatched = copyToArray(patchedImg);
+
+ // '-1' when represented as a signed byte
+ final byte UBYTEMAX = new UnsignedByteType(255).getByte();
+
+ // check that every value is either 0 or 255
+ int numZero = 0;
+ int num255 = 0;
+ for( int i = 0; i < dataPatched.length; i++ )
+ if( dataPatched[i] == 0)
+ numZero++;
+ else if( dataPatched[i] == UBYTEMAX)
+ num255++;
+
+ assertEquals("all values must be 0 or 255", dataPatched.length, numZero + num255);
+
+ // check that every value in the patch is 255
+ final long[] min = offset;
+ final long[] max = new long[ min.length ];
+ for( int i = 0; i < min.length; i++ )
+ max[i] = min[i] + patchDims[i] - 1;
+
+ final FinalInterval patchInterval = new FinalInterval(min, max);
+ final byte[] dataInPatch = copyToArray(Views.interval(patchedImg, patchInterval));
+ final byte[] data255 = new byte[dataInPatch.length];
+ Arrays.fill(data255, UBYTEMAX);
+ assertArrayEquals("patched data", data255, dataInPatch);
+
+ n5.close();
+ }
+
+ private static final byte[] copyToArray( final RandomAccessibleInterval img ) {
+
+ final byte[] data = new byte[(int)Intervals.numElements(img)];
+ ArrayImg imgCopy = ArrayImgs.unsignedBytes(data, img.dimensionsAsLongArray());
+ LoopBuilder.setImages(img, imgCopy).forEachPixel((x, y) -> {
+ y.set(x.get());
+ });
+ return data;
+ }
+
+}
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/ij/MacroTests.java b/src/test/java/org/janelia/saalfeldlab/n5/ij/MacroTests.java
index 57158545..20f86361 100644
--- a/src/test/java/org/janelia/saalfeldlab/n5/ij/MacroTests.java
+++ b/src/test/java/org/janelia/saalfeldlab/n5/ij/MacroTests.java
@@ -43,19 +43,17 @@ public void before() {
final String n5Root = "src/test/resources/test.n5";
n5rootF = new File(n5Root);
- URL configUrl = RunImportExportTest.class.getResource( "/plugins.config" );
- File baseDir = new File( configUrl.getFile() ).getParentFile();
+ final URL configUrl = RunImportExportTest.class.getResource( "/plugins.config" );
+ final File baseDir = new File( configUrl.getFile() ).getParentFile();
containerDir = new File( baseDir, "macrotest.n5" );
System.out.println( containerDir.getAbsolutePath() );
imp = NewImage.createImage("test", 8, 7, 9, 16, NewImage.FILL_NOISE);
- final N5Exporter writer = new N5Exporter();
- writer.setOptions( imp, containerDir.getAbsolutePath(), "dataset", "16,16,16",
- N5Exporter.NONE,
- N5Exporter.RAW_COMPRESSION,
- N5Exporter.OVERWRITE, "");
- writer.run();
+ final N5ScalePyramidExporter writer = new N5ScalePyramidExporter();
+ writer.setOptions( imp, containerDir.getAbsolutePath(), "dataset", "16,16,16", false,
+ N5ScalePyramidExporter.NONE, N5ScalePyramidExporter.DOWN_SAMPLE, N5ScalePyramidExporter.RAW_COMPRESSION);
+ writer.run(); // run() closes the n5 writer
}
@After
@@ -66,24 +64,24 @@ public void after() {
@Test
public void testMacroContent() {
- N5Importer plugin = (N5Importer)IJ.runPlugIn("org.janelia.saalfeldlab.n5.ij.N5Importer",
- String.format("n5=%s/%s hide", containerDir.getAbsolutePath(), "dataset" ));
+ final N5Importer plugin = (N5Importer)IJ.runPlugIn("org.janelia.saalfeldlab.n5.ij.N5Importer",
+ String.format("url=%s/%s hide", containerDir.getAbsolutePath(), "dataset" ));
- List res = plugin.getResult();
+ final List res = plugin.getResult();
final ImagePlus imgImported = res.get(0);
assertTrue( "equal content", TestExportImports.equal(imp, imgImported));
- N5Importer pluginCrop = (N5Importer)IJ.runPlugIn("org.janelia.saalfeldlab.n5.ij.N5Importer",
- String.format("n5=%s/%s hide min=0,1,2 max=5,5,5",
+ final N5Importer pluginCrop = (N5Importer)IJ.runPlugIn("org.janelia.saalfeldlab.n5.ij.N5Importer",
+ String.format("url=%s/%s hide min=0,1,2 max=5,5,5",
containerDir.getAbsolutePath(), "dataset" ));
- List resCrop = pluginCrop.getResult();
+ final List resCrop = pluginCrop.getResult();
final ImagePlus imgImportedCrop = resCrop.get(0);
- IntervalView imgCrop = Views.zeroMin( Views.interval(
+ final IntervalView imgCrop = Views.zeroMin( Views.interval(
ImageJFunctions.wrapShort(imp),
Intervals.createMinMax( 0, 1, 2, 5, 5, 5 )));
- ImagePlus impCrop = ImageJFunctions.wrap(imgCrop, "imgCrop");
+ final ImagePlus impCrop = ImageJFunctions.wrap(imgCrop, "imgCrop");
impCrop.setDimensions(1, 4, 1);
assertEquals( " cont crop w", impCrop.getWidth(), imgImportedCrop.getWidth());
@@ -94,10 +92,11 @@ public void testMacroContent() {
@Test
public void testMacro() {
- N5Importer plugin = (N5Importer)IJ.runPlugIn("org.janelia.saalfeldlab.n5.ij.N5Importer",
- String.format("n5=%s/%s hide", n5rootF.getAbsolutePath(), "cosem" ));
- List res = plugin.getResult();
+ final N5Importer plugin = (N5Importer)IJ.runPlugIn("org.janelia.saalfeldlab.n5.ij.N5Importer",
+ String.format("url=%s/%s hide", n5rootF.getAbsolutePath(), "cosem" ));
+
+ final List res = plugin.getResult();
assertEquals(" crop num", 1, res.size());
final ImagePlus img = res.get(0);
@@ -108,10 +107,10 @@ public void testMacro() {
@Test
public void testMacroVirtual() {
- N5Importer plugin = (N5Importer)IJ.runPlugIn("org.janelia.saalfeldlab.n5.ij.N5Importer",
- String.format("n5=%s/%s hide virtual", n5rootF.getAbsolutePath(), "cosem" ));
+ final N5Importer plugin = (N5Importer)IJ.runPlugIn("org.janelia.saalfeldlab.n5.ij.N5Importer",
+ String.format("url=%s/%s hide virtual", n5rootF.getAbsolutePath(), "cosem" ));
- List res = plugin.getResult();
+ final List res = plugin.getResult();
assertEquals(" crop num", 1, res.size());
final ImagePlus img = res.get(0);
assertTrue( " is virtual", (img.getStack() instanceof ImageJVirtualStack) );
@@ -119,14 +118,14 @@ public void testMacroVirtual() {
@Test
public void testMacroCrop() {
- String minString = "100,100,50";
- String maxString = "250,250,120";
+ final String minString = "100,100,50";
+ final String maxString = "250,250,120";
- N5Importer plugin = (N5Importer)IJ.runPlugIn("org.janelia.saalfeldlab.n5.ij.N5Importer",
- String.format("n5=%s/%s hide min=%s max=%s",
+ final N5Importer plugin = (N5Importer)IJ.runPlugIn("org.janelia.saalfeldlab.n5.ij.N5Importer",
+ String.format("url=%s/%s hide min=%s max=%s",
n5rootF.getAbsolutePath(), "cosem", minString, maxString ));
- List res = plugin.getResult();
+ final List res = plugin.getResult();
assertEquals(" crop num", 1, res.size());
final ImagePlus img = res.get(0);
diff --git a/src/test/java/org/janelia/saalfeldlab/n5/metadata/ome/ngff/v04/WriteAxesTests.java b/src/test/java/org/janelia/saalfeldlab/n5/metadata/ome/ngff/v04/WriteAxesTests.java
new file mode 100644
index 00000000..4730626b
--- /dev/null
+++ b/src/test/java/org/janelia/saalfeldlab/n5/metadata/ome/ngff/v04/WriteAxesTests.java
@@ -0,0 +1,248 @@
+package org.janelia.saalfeldlab.n5.metadata.ome.ngff.v04;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.Executors;
+
+import org.janelia.saalfeldlab.n5.N5Reader;
+import org.janelia.saalfeldlab.n5.TestExportImports;
+import org.janelia.saalfeldlab.n5.ij.N5Importer;
+import org.janelia.saalfeldlab.n5.ij.N5ScalePyramidExporter;
+import org.janelia.saalfeldlab.n5.metadata.imagej.CanonicalMetadataToImagePlus;
+import org.janelia.saalfeldlab.n5.metadata.imagej.CosemToImagePlus;
+import org.janelia.saalfeldlab.n5.metadata.imagej.ImagePlusLegacyMetadataParser;
+import org.janelia.saalfeldlab.n5.metadata.imagej.ImageplusMetadata;
+import org.janelia.saalfeldlab.n5.metadata.imagej.N5ImagePlusMetadata;
+import org.janelia.saalfeldlab.n5.metadata.imagej.N5ViewerToImagePlus;
+import org.janelia.saalfeldlab.n5.metadata.imagej.NgffToImagePlus;
+import org.janelia.saalfeldlab.n5.universe.N5DatasetDiscoverer;
+import org.janelia.saalfeldlab.n5.universe.N5Factory;
+import org.janelia.saalfeldlab.n5.universe.N5TreeNode;
+import org.janelia.saalfeldlab.n5.universe.metadata.N5CosemMetadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.N5DatasetMetadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.N5GenericSingleScaleMetadataParser;
+import org.janelia.saalfeldlab.n5.universe.metadata.N5Metadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.N5SingleScaleMetadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.axes.Axis;
+import org.janelia.saalfeldlab.n5.universe.metadata.canonical.CanonicalDatasetMetadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.canonical.CanonicalSpatialDatasetMetadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.NgffSingleScaleAxesMetadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMetadata;
+import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMetadataParser;
+import org.junit.Before;
+import org.junit.Test;
+
+import ij.ImagePlus;
+import ij.gui.NewImage;
+
+public class WriteAxesTests {
+
+ private final String UNIT = "nm";
+
+ final int nx = 10;
+ final int ny = 8;
+
+ private HashMap, ImageplusMetadata>> impWriters;
+
+ private static String tempPathName() {
+
+ try {
+ final File parent = Files.createTempDirectory("ome-zarr-test-").toFile();
+ parent.deleteOnExit();
+ return parent.getCanonicalPath();
+ } catch (final Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private static HashMap, ImageplusMetadata>> defaultImagePlusMetadataWriters()
+ {
+ final HashMap, ImageplusMetadata>> impMetaWriterTypes = new HashMap<>();
+ impMetaWriterTypes.put(N5ImagePlusMetadata.class, new ImagePlusLegacyMetadataParser());
+ impMetaWriterTypes.put(NgffSingleScaleAxesMetadata.class, new NgffToImagePlus());
+ impMetaWriterTypes.put(N5CosemMetadata.class, new CosemToImagePlus());
+ impMetaWriterTypes.put(N5SingleScaleMetadata.class, new N5ViewerToImagePlus());
+ impMetaWriterTypes.put(CanonicalDatasetMetadata.class, new CanonicalMetadataToImagePlus());
+ impMetaWriterTypes.put(CanonicalSpatialDatasetMetadata.class, new CanonicalMetadataToImagePlus());
+ return impMetaWriterTypes;
+ }
+
+ @Before
+ public void before() {
+
+ /* To explicitly test headless */
+// System.setProperty("java.awt.headless", "true");
+ impWriters = defaultImagePlusMetadataWriters();
+ }
+
+ @Test
+ public void testXYZ() throws IOException, InterruptedException, ExecutionException {
+
+ final int nc = 1;
+ final int nz = 6;
+ final int nt = 1;
+ final ImagePlus imp = createImage( nc, nz, nt );
+ final String rootLocation = createDataset("xyz.zarr", imp );
+
+ final OmeNgffMetadata ngffMeta = readMetadata(rootLocation);
+ assertTrue(Arrays.stream(ngffMeta.multiscales[0].axes).allMatch(x -> x.getUnit().equals(UNIT)));
+ assertEquals(3, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.SPACE)).count());
+ assertEquals(0, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.CHANNEL)).count());
+ assertEquals(0, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.TIME)).count());
+
+ final ImagePlus impRead = readImage( rootLocation );
+ assertTrue( TestExportImports.equal(imp, impRead));
+ }
+
+ @Test
+ public void testXYC() throws IOException, InterruptedException, ExecutionException {
+
+ final int nc = 6;
+ final int nz = 1;
+ final int nt = 1;
+ final ImagePlus imp = createImage( nc, nz, nt );
+ final String rootLocation = createDataset("xyc.zarr", imp );
+
+ final OmeNgffMetadata ngffMeta = readMetadata(rootLocation);
+ assertEquals(2, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.SPACE)).count());
+ assertEquals(1, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.CHANNEL)).count());
+ assertEquals(0, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.TIME)).count());
+
+ final ImagePlus impRead = readImage( rootLocation );
+ assertTrue( TestExportImports.equal(imp, impRead));
+ }
+
+ @Test
+ public void testXYT() throws IOException, InterruptedException, ExecutionException {
+
+ final int nc = 1;
+ final int nz = 1;
+ final int nt = 6;
+ final ImagePlus imp = createImage( nc, nz, nt );
+ final String rootLocation = createDataset("xyt.zarr", imp );
+
+ final OmeNgffMetadata ngffMeta = readMetadata(rootLocation);
+ assertEquals(2, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.SPACE)).count());
+ assertEquals(0, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.CHANNEL)).count());
+ assertEquals(1, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.TIME)).count());
+
+ final ImagePlus impRead = readImage( rootLocation );
+ assertTrue( TestExportImports.equal(imp, impRead));
+ }
+
+ @Test
+ public void testXYCZ() throws IOException, InterruptedException, ExecutionException {
+
+ final int nc = 3;
+ final int nz = 2;
+ final int nt = 1;
+ final ImagePlus imp = createImage( nc, nz, nt );
+ final String rootLocation = createDataset("xycz.zarr", imp );
+
+ final OmeNgffMetadata ngffMeta = readMetadata(rootLocation);
+ assertEquals(3, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.SPACE)).count());
+ assertEquals(1, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.CHANNEL)).count());
+ assertEquals(0, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.TIME)).count());
+
+ final ImagePlus impRead = readImage( rootLocation );
+ assertTrue( TestExportImports.equal(imp, impRead));
+ }
+
+ @Test
+ public void testCZYX() throws IOException, InterruptedException, ExecutionException {
+
+ final int nc = 3;
+ final int nz = 2;
+ final int nt = 1;
+ final ImagePlus imp = createImage( nc, nz, nt );
+ final String rootLocation = createDataset("czyx.zarr", imp);
+
+ final OmeNgffMetadata ngffMeta = readMetadata(rootLocation);
+ assertEquals(3, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.SPACE)).count());
+ assertEquals(1, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.CHANNEL)).count());
+ assertEquals(0, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.TIME)).count());
+
+ final ImagePlus impRead = readImage( rootLocation );
+ assertTrue( TestExportImports.equal(imp, impRead));
+ // TODO other checks?
+ }
+
+ @Test
+ public void testXYCZT() throws IOException, InterruptedException, ExecutionException {
+
+ final int nc = 4;
+ final int nz = 3;
+ final int nt = 2;
+ final ImagePlus imp = createImage( nc, nz, nt );
+ final String rootLocation = createDataset("xyczt.zarr", imp);
+
+ final OmeNgffMetadata ngffMeta = readMetadata(rootLocation);
+ assertEquals(3, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.SPACE)).count());
+ assertEquals(1, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.CHANNEL)).count());
+ assertEquals(1, Arrays.stream(ngffMeta.multiscales[0].axes).filter(x -> x.getType().equals(Axis.TIME)).count());
+ }
+
+ private ImagePlus createImage( final int nc, final int nz, final int nt ) {
+ final ImagePlus imp = NewImage.createImage("test", nx, ny, nc * nz * nt, 8, NewImage.FILL_NOISE);
+ imp.setDimensions(nc, nz, nt);
+ imp.getCalibration().setUnit(UNIT);
+ return imp;
+ }
+
+ private String createDataset(final String containerName, final ImagePlus imp )
+ throws IOException, InterruptedException, ExecutionException {
+
+ final String rootLocation = tempPathName() + File.separator + containerName;
+ final String dataset = "/";
+ final String blockSizeArg = "32,32,32";
+ final String compression = N5ScalePyramidExporter.GZIP_COMPRESSION;
+
+ final N5ScalePyramidExporter writer = new N5ScalePyramidExporter();
+ writer.setOptions( imp, rootLocation, dataset, blockSizeArg, false,
+ N5ScalePyramidExporter.DOWN_SAMPLE, N5Importer.MetadataOmeZarrKey, compression);
+ writer.run(); // run() closes the n5 writer
+
+ return rootLocation;
+ }
+
+ private OmeNgffMetadata readMetadata(final String rootLocation ) {
+
+ final N5Reader zarr = new N5Factory().openReader(rootLocation);
+ final N5TreeNode node = N5DatasetDiscoverer.discover(zarr, Collections.singletonList(new N5GenericSingleScaleMetadataParser()),
+ Collections.singletonList(new OmeNgffMetadataParser()));
+
+ final N5Metadata meta = node.getMetadata();
+ if( meta instanceof OmeNgffMetadata ) {
+ return (OmeNgffMetadata) meta;
+ }
+ return null;
+ }
+
+ private ImagePlus readImage(final String rootLocation ) {
+
+ final N5Reader zarr = new N5Factory().openReader(rootLocation);
+ final N5TreeNode node = N5DatasetDiscoverer.discover(zarr,
+ Collections.singletonList(new N5GenericSingleScaleMetadataParser()),
+ Collections.singletonList(new OmeNgffMetadataParser()));
+
+ final N5Metadata meta = node.getDescendant("s0").get().getMetadata();
+ if( meta instanceof N5DatasetMetadata ) {
+
+ final N5DatasetMetadata dsetmeta = (N5DatasetMetadata)meta;
+ final List metaList = Collections.singletonList( dsetmeta );
+ final List impList = N5Importer.process(zarr, rootLocation, Executors.newFixedThreadPool(1), metaList, false, null, false, impWriters);
+ return impList.size() == 0 ? null : impList.get(0);
+ }
+ return null;
+ }
+
+}