From b6916680cae731bffd4541c7917827f939ee495f Mon Sep 17 00:00:00 2001 From: Stephan Preibisch Date: Tue, 10 Sep 2024 10:12:54 -0400 Subject: [PATCH] consolidate Fusion and Resave methods for BDV metadata writing --- .../mvrecon/fiji/plugin/resave/Resave_N5.java | 165 ++++++++---------- .../mvrecon/process/export/ExportN5API.java | 8 +- .../mvrecon/process/export/ExportTools.java | 159 +++++++++++++++-- .../mvrecon/process/resave/N5ResaveTools.java | 62 ++----- 4 files changed, 237 insertions(+), 157 deletions(-) diff --git a/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_N5.java b/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_N5.java index 0dcdeee2..24056183 100644 --- a/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_N5.java +++ b/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_N5.java @@ -50,6 +50,8 @@ import net.preibisch.mvrecon.fiji.plugin.queryXML.LoadParseQueryXML; import net.preibisch.mvrecon.fiji.spimdata.SpimData2; import net.preibisch.mvrecon.fiji.spimdata.XmlIoSpimData2; +import net.preibisch.mvrecon.process.export.ExportN5API.StorageType; +import net.preibisch.mvrecon.process.export.ExportTools; import net.preibisch.mvrecon.process.resave.N5ResaveTools; import net.preibisch.mvrecon.process.resave.SpimData2Tools; import util.Grid; @@ -109,126 +111,101 @@ public static SpimData2 resaveN5( }); } - /* - // re-save data to file - if ( URITools.isFile( n5Params.n5URI ) ) - { - try - { - WriteSequenceToN5.writeN5File( - sdReduced.getSequenceDescription(), - n5Params.proposedMipmaps, - n5Params.compression, //new GzipCompression() - new File( URITools.removeFilePrefix( n5Params.n5URI ) ), - new bdv.export.ExportScalePyramid.DefaultLoopbackHeuristic(), - null, - n5Params.numCellCreatorThreads, // Runtime.getRuntime().availableProcessors() - progressWriter ); - } - catch ( IOException e ) - { - e.printStackTrace(); - } - } - else if ( URITools.isS3( n5Params.n5URI ) || URITools.isGC( n5Params.n5URI ) )*/ - { - // save to cloud or file - final N5Writer n5Writer = URITools.instantiateGuessedN5Writer( n5Params.n5URI ); + // save to cloud or file + final N5Writer n5Writer = URITools.instantiateGuessedN5Writer( n5Params.n5URI ); + + final int[] blockSize = n5Params.subdivisions[ 0 ]; + final int[] computeBlockSize = new int[ blockSize.length ]; + final Compression compression = n5Params.compression; - final int[] blockSize = n5Params.subdivisions[ 0 ]; - final int[] computeBlockSize = new int[ blockSize.length ]; - final Compression compression = n5Params.compression; + for ( int d = 0; d < blockSize.length; ++d ) + computeBlockSize[ d ] = blockSize[ d ] * n5Params.blockSizeFactor[ d ]; - for ( int d = 0; d < blockSize.length; ++d ) - computeBlockSize[ d ] = blockSize[ d ] * n5Params.blockSizeFactor[ d ]; + final HashMap dimensions = + N5ResaveTools.assembleDimensions( data, vidsToResave ); - //final ArrayList viewSetups = - // N5ResaveTools.assembleViewSetups( data, vidsToResave ); + final int[][] downsamplings = + N5ResaveTools.mipMapInfoToDownsamplings( n5Params.proposedMipmaps ); - final HashMap viewSetupIdToDimensions = - N5ResaveTools.assembleDimensions( data, vidsToResave ); + final ArrayList grid = + N5ResaveTools.assembleS0Jobs( vidsToResave, dimensions, blockSize, computeBlockSize ); - IOFunctions.println( "Dimensions of raw images: " ); - viewSetupIdToDimensions.forEach( (id,dim ) -> IOFunctions.println( "ViewSetup " + id + ": " + Arrays.toString( dim )) ); + final Map dataTypes = + N5ResaveTools.assembleDataTypes( data, dimensions.keySet() ); - final int[][] downsamplings = - N5ResaveTools.mipMapInfoToDownsamplings( n5Params.proposedMipmaps ); + // write BDV metadata for all ViewIds (including downsampling) + vidsToResave.forEach( viewId -> ExportTools.writeBDVDatasetMetadataN5( + n5Writer, viewId, dataTypes.get( viewId.getViewSetupId() ), dimensions.get( viewId.getViewSetupId() ), compression, blockSize, downsamplings)); - IOFunctions.println( "Downsamplings: " + Arrays.deepToString( downsamplings ) ); + IOFunctions.println( "Dimensions of raw images: " ); + dimensions.forEach( ( id,dim ) -> IOFunctions.println( "ViewSetup " + id + ": " + Arrays.toString( dim )) ); - final ArrayList grid = - N5ResaveTools.assembleAllS0Jobs( vidsToResave, viewSetupIdToDimensions, blockSize, computeBlockSize ); + IOFunctions.println( "Downsamplings: " + Arrays.deepToString( downsamplings ) ); - final Map dataTypes = - N5ResaveTools.createGroups( n5Writer, data, viewSetupIdToDimensions, blockSize, downsamplings, compression ); + // + // Save full resolution dataset (s0) + // + final ForkJoinPool myPool = new ForkJoinPool( n5Params.numCellCreatorThreads ); - N5ResaveTools.createS0Datasets( n5Writer, vidsToResave, dataTypes, viewSetupIdToDimensions, blockSize, compression ); + long time = System.currentTimeMillis(); + + try + { + myPool.submit(() -> grid.parallelStream().forEach( + gridBlock -> N5ResaveTools.resaveS0Block( + data, + n5Writer, + dataTypes.get( (int)gridBlock[ 3 ][ 1 ] ), + N5ResaveTools.datasetMappingFunctionBdv( 0, StorageType.N5 ), + gridBlock ) ) ).get(); + } + catch (InterruptedException | ExecutionException e) + { + IOFunctions.println( "Failed to write s0 for N5 '" + n5Params.n5URI + "'. Error: " + e ); + e.printStackTrace(); + return null; + } - // - // Save full resolution dataset (s0) - // - final ForkJoinPool myPool = new ForkJoinPool( n5Params.numCellCreatorThreads ); + IOFunctions.println( "Saved level s0, took: " + (System.currentTimeMillis() - time ) + " ms." ); - long time = System.currentTimeMillis(); + // + // Save remaining downsampling levels (s1 ... sN) + // + for ( int level = 1; level < downsamplings.length; ++level ) + { + final int s = level; + final int[] ds = N5ResaveTools.computeRelativeDownsampling( downsamplings, s ); + IOFunctions.println( "Downsampling: " + Util.printCoordinates( downsamplings[ s ] ) + " with relative downsampling of " + Util.printCoordinates( ds )); + + final ArrayList allBlocks = + N5ResaveTools.prepareDownsampling( vidsToResave, n5Writer, level, ds, downsamplings[ s ], blockSize, compression ); + + time = System.currentTimeMillis(); try { - myPool.submit(() -> grid.parallelStream().forEach( - gridBlock -> N5ResaveTools.resaveS0Block( - data, + myPool.submit(() -> allBlocks.parallelStream().forEach( + gridBlock -> N5ResaveTools.writeDownsampledBlock( n5Writer, - dataTypes.get( (int)gridBlock[ 3 ][ 1 ] ), - N5ResaveTools.datasetMappingFunctionBdvN5( 0 ), + N5ResaveTools.datasetMappingFunctionBdv( s, StorageType.N5 ), + N5ResaveTools.datasetMappingFunctionBdv( s - 1, StorageType.N5 ), + ds, gridBlock ) ) ).get(); } catch (InterruptedException | ExecutionException e) { - IOFunctions.println( "Failed to write s0 for N5 '" + n5Params.n5URI + "'. Error: " + e ); + IOFunctions.println( "Failed to write downsample step s" + s +" for N5 '" + n5Params.n5URI + "'. Error: " + e ); e.printStackTrace(); return null; } - IOFunctions.println( "Saved level s0, took: " + (System.currentTimeMillis() - time ) + " ms." ); - - // - // Save remaining downsampling levels (s1 ... sN) - // - for ( int level = 1; level < downsamplings.length; ++level ) - { - final int s = level; - final int[] ds = N5ResaveTools.computeRelativeDownsampling( downsamplings, s ); - IOFunctions.println( "Downsampling: " + Util.printCoordinates( downsamplings[ s ] ) + " with relative downsampling of " + Util.printCoordinates( ds )); - - final ArrayList allBlocks = - N5ResaveTools.prepareDownsampling( vidsToResave, n5Writer, level, ds, downsamplings[ s ], blockSize, compression ); - - time = System.currentTimeMillis(); - - try - { - myPool.submit(() -> allBlocks.parallelStream().forEach( - gridBlock -> N5ResaveTools.writeDownsampledBlock( - n5Writer, - N5ResaveTools.datasetMappingFunctionBdvN5( s ), - N5ResaveTools.datasetMappingFunctionBdvN5( s - 1 ), - ds, - gridBlock ) ) ).get(); - } - catch (InterruptedException | ExecutionException e) - { - IOFunctions.println( "Failed to write downsample step s" + s +" for N5 '" + n5Params.n5URI + "'. Error: " + e ); - e.printStackTrace(); - return null; - } - - IOFunctions.println( "Resaved N5 s" + s + " level, took: " + (System.currentTimeMillis() - time ) + " ms." ); - } + IOFunctions.println( "Resaved N5 s" + s + " level, took: " + (System.currentTimeMillis() - time ) + " ms." ); + } - myPool.shutdown(); - try { myPool.awaitTermination( Long.MAX_VALUE, TimeUnit.HOURS ); } catch (InterruptedException e) { e.printStackTrace(); } + myPool.shutdown(); + try { myPool.awaitTermination( Long.MAX_VALUE, TimeUnit.HOURS ); } catch (InterruptedException e) { e.printStackTrace(); } - n5Writer.close(); - } + n5Writer.close(); sdReduced.getSequenceDescription().setImgLoader( new N5ImageLoader( n5Params.n5URI, sdReduced.getSequenceDescription() ) ); sdReduced.setBasePathURI( URITools.getParent( n5Params.xmlURI ) ); diff --git a/src/main/java/net/preibisch/mvrecon/process/export/ExportN5API.java b/src/main/java/net/preibisch/mvrecon/process/export/ExportN5API.java index 4052e357..3dcfa58f 100644 --- a/src/main/java/net/preibisch/mvrecon/process/export/ExportN5API.java +++ b/src/main/java/net/preibisch/mvrecon/process/export/ExportN5API.java @@ -25,13 +25,11 @@ import java.io.File; import java.io.IOException; import java.net.URI; -import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import java.util.function.Function; @@ -39,13 +37,10 @@ import org.janelia.saalfeldlab.n5.Compression; import org.janelia.saalfeldlab.n5.DataType; import org.janelia.saalfeldlab.n5.GzipCompression; -import org.janelia.saalfeldlab.n5.N5FSWriter; import org.janelia.saalfeldlab.n5.N5Writer; import org.janelia.saalfeldlab.n5.hdf5.N5HDF5Writer; import org.janelia.saalfeldlab.n5.imglib2.N5Utils; -import org.janelia.saalfeldlab.n5.universe.N5Factory; import org.janelia.saalfeldlab.n5.universe.N5Factory.StorageFormat; -import org.janelia.saalfeldlab.n5.zarr.N5ZarrWriter; import bdv.export.ProposeMipmaps; import fiji.util.gui.GenericDialogPlus; @@ -70,7 +65,6 @@ import net.preibisch.mvrecon.fiji.plugin.resave.PluginHelper; import net.preibisch.mvrecon.fiji.plugin.util.GUIHelper; import net.preibisch.mvrecon.process.deconvolution.DeconViews; -import net.preibisch.mvrecon.process.downsampling.lazy.LazyHalfPixelDownsample2x; import net.preibisch.mvrecon.process.export.ExportTools.InstantiateViewSetupBigStitcher; import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group; import net.preibisch.mvrecon.process.resave.N5ResaveTools; @@ -241,7 +235,7 @@ else if ( FloatType.class.isInstance( type ) ) IOFunctions.println( "Assigning ViewId " + Group.pvid( viewId ) ); - dataset = ExportTools.createBDVPath( viewId, this.storageType ); + dataset = ExportTools.createBDVPath( viewId, 0, this.storageType ); } // diff --git a/src/main/java/net/preibisch/mvrecon/process/export/ExportTools.java b/src/main/java/net/preibisch/mvrecon/process/export/ExportTools.java index 43aa3048..528baaf1 100644 --- a/src/main/java/net/preibisch/mvrecon/process/export/ExportTools.java +++ b/src/main/java/net/preibisch/mvrecon/process/export/ExportTools.java @@ -41,9 +41,7 @@ import bdv.export.ProposeMipmaps; import bdv.img.hdf5.Hdf5ImageLoader; import bdv.img.n5.N5ImageLoader; -import mpicbg.spim.data.SpimData; import mpicbg.spim.data.SpimDataException; -import mpicbg.spim.data.XmlIoSpimData; import mpicbg.spim.data.generic.sequence.BasicViewSetup; import mpicbg.spim.data.registration.ViewRegistration; import mpicbg.spim.data.registration.ViewRegistrations; @@ -91,6 +89,132 @@ public static int[][] estimateMultiResPyramid( final Dimensions dimensions, fina return emi.getExportResolutions(); } + public static boolean writeBDVDatasetMetadataN5( + final N5Writer driverVolumeWriter, + final ViewId viewId, + final DataType dataType, + final long[] dimensions, + final Compression compression, + final int[] blockSize, + final int[][] downsamplings ) + { + String ds = "setup" + viewId.getViewSetupId(); + + final Map> attribs = driverVolumeWriter.listAttributes(ds); + + // if viewsetup does not exist + if ( !attribs.containsKey( "dataType" ) || !attribs.containsKey( "blockSize" ) || !attribs.containsKey( "dimensions" ) || !attribs.containsKey( "compression" ) || !attribs.containsKey( "downsamplingFactors" ) ) + { + // set N5 attributes for setup + // e.g. {"compression":{"type":"gzip","useZlib":false,"level":1},"downsamplingFactors":[[1,1,1],[2,2,1]],"blockSize":[128,128,32],"dataType":"uint16","dimensions":[512,512,86]} + IOFunctions.println( "setting attributes for '" + "setup" + viewId.getViewSetupId() + "'"); + + driverVolumeWriter.setAttribute(ds, "dataType", dataType ); + driverVolumeWriter.setAttribute(ds, "blockSize", blockSize ); + driverVolumeWriter.setAttribute(ds, "dimensions", dimensions ); + driverVolumeWriter.setAttribute(ds, "compression", compression ); + + if ( downsamplings == null || downsamplings.length == 0 ) + driverVolumeWriter.setAttribute(ds, "downsamplingFactors", new int[][] {{1,1,1}} ); + else + driverVolumeWriter.setAttribute(ds, "downsamplingFactors", downsamplings ); + } + else + { + // TODO: test that the values are consistent? + } + + // set N5 attributes for timepoint + // e.g. {"resolution":[1.0,1.0,3.0],"saved_completely":true,"multiScale":true} + ds ="setup" + viewId.getViewSetupId() + "/" + "timepoint" + viewId.getTimePointId(); + driverVolumeWriter.setAttribute(ds, "resolution", new double[] {1,1,1} ); + driverVolumeWriter.setAttribute(ds, "saved_completely", true ); + driverVolumeWriter.setAttribute(ds, "multiScale", downsamplings != null && downsamplings.length != 0 ); + + if ( downsamplings == null || downsamplings.length == 0 ) + { + // set additional N5 attributes for s0 dataset + ds = ds + "/s0"; + driverVolumeWriter.createGroup( ds ); + driverVolumeWriter.setAttribute(ds, "downsamplingFactors", new int[] {1,1,1} ); + } + else + { + for ( int level = 0; level < downsamplings.length; ++level ) + { + // set additional N5 attributes for s0 ... sN datasets + final String dsLevel = ds + "/s" + level; + driverVolumeWriter.createGroup( dsLevel ); + driverVolumeWriter.setAttribute(dsLevel, "downsamplingFactors", downsamplings[ level ] ); + } + } + + return true; + } + + public static boolean writeBDVDatasetMetadataHDF5( + final N5Writer driverVolumeWriter, + final ViewId viewId, + final int[] blockSize, + final int[][] downsamplings ) + { + final String subdivisionsDatasets = "s" + String.format("%02d", viewId.getViewSetupId()) + "/subdivisions"; + final String resolutionsDatasets = "s" + String.format("%02d", viewId.getViewSetupId()) + "/resolutions"; + + if ( driverVolumeWriter.datasetExists( subdivisionsDatasets ) && driverVolumeWriter.datasetExists( resolutionsDatasets ) ) + { + // TODO: test that the values are consistent? + return true; + } + + final Img subdivisions; + final Img resolutions; + + if ( downsamplings == null || downsamplings.length == 0 ) + { + subdivisions = ArrayImgs.ints( blockSize, new long[] { 3, 1 } ); // blocksize + resolutions = ArrayImgs.doubles( new double[] { 1,1,1 }, new long[] { 3, 1 } ); // downsampling + } + else + { + final int[] blocksizes = new int[ 3 * downsamplings.length ]; + final double[] downsamples = new double[ 3 * downsamplings.length ]; + + int i = 0; + for ( int level = 0; level < downsamplings.length; ++level ) + { + downsamples[ i ] = downsamplings[ level ][ 0 ]; + blocksizes[ i++ ] = blockSize[ 0 ]; + downsamples[ i ] = downsamplings[ level ][ 1 ]; + blocksizes[ i++ ] = blockSize[ 1 ]; + downsamples[ i ] = downsamplings[ level ][ 2 ]; + blocksizes[ i++ ] = blockSize[ 2 ]; + } + + subdivisions = ArrayImgs.ints( blocksizes, new long[] { 3, downsamplings.length } ); // blocksize + resolutions = ArrayImgs.doubles( downsamples, new long[] { 3, downsamplings.length } ); // downsampling + } + + driverVolumeWriter.createDataset( + subdivisionsDatasets, + subdivisions.dimensionsAsLongArray(),// new long[] { 3, 1 }, + new int[] { (int)subdivisions.dimension( 0 ), (int)subdivisions.dimension( 1 ) }, //new int[] { 3, 1 }, + DataType.INT32, + new RawCompression() ); + + driverVolumeWriter.createDataset( + resolutionsDatasets, + resolutions.dimensionsAsLongArray(),// new long[] { 3, 1 }, + new int[] { (int)resolutions.dimension( 0 ), (int)resolutions.dimension( 1 ) },//new int[] { 3, 1 }, + DataType.FLOAT64, + new RawCompression() ); + + N5Utils.saveBlock(subdivisions, driverVolumeWriter, "s" + String.format("%02d", viewId.getViewSetupId()) + "/subdivisions", new long[] {0,0,0} ); + N5Utils.saveBlock(resolutions, driverVolumeWriter, "s" + String.format("%02d", viewId.getViewSetupId()) + "/resolutions", new long[] {0,0,0} ); + + return true; + } + public static boolean writeBDVMetaData( final N5Writer driverVolumeWriter, final StorageType storageType, @@ -108,7 +232,7 @@ public static boolean writeBDVMetaData( //final String xmlPath = null; if ( StorageType.N5.equals(storageType) ) - { + { System.out.println( "XML: " + xmlOutPathURI ); final Pair exists = writeSpimData( @@ -122,6 +246,9 @@ public static boolean writeBDVMetaData( if ( exists == null ) return false; + return writeBDVDatasetMetadataN5( driverVolumeWriter, viewId, dataType, dimensions, compression, blockSize, downsamplings ); + + /* String ds = "setup" + viewId.getViewSetupId(); // if viewsetup does not exist @@ -166,7 +293,7 @@ public static boolean writeBDVMetaData( } } - return true; + return true;*/ } else if ( StorageType.HDF5.equals(storageType) ) { @@ -183,6 +310,13 @@ else if ( StorageType.HDF5.equals(storageType) ) if ( exists == null ) return false; + return writeBDVDatasetMetadataHDF5( + driverVolumeWriter, + viewId, + blockSize, + downsamplings ); + + /* // if viewsetup does not exist if ( !exists.getB() ) { @@ -231,8 +365,11 @@ else if ( StorageType.HDF5.equals(storageType) ) N5Utils.saveBlock(subdivisions, driverVolumeWriter, "s" + String.format("%02d", viewId.getViewSetupId()) + "/subdivisions", new long[] {0,0,0} ); N5Utils.saveBlock(resolutions, driverVolumeWriter, "s" + String.format("%02d", viewId.getViewSetupId()) + "/resolutions", new long[] {0,0,0} ); } - - return true; + else + { + return true; + } + */ } else { @@ -389,22 +526,22 @@ public static ViewId getViewId(final String bdvString ) return new ViewId(timepointId, viewSetupId); } - public static String createBDVPath(final String bdvString, final StorageType storageType) + public static String createBDVPath(final String bdvString, final int level, final StorageType storageType) { - return createBDVPath(getViewId(bdvString), storageType); + return createBDVPath(getViewId(bdvString), level, storageType); } - public static String createBDVPath( final ViewId viewId, final StorageType storageType) + public static String createBDVPath( final ViewId viewId, final int level, final StorageType storageType) { String path = null; if ( StorageType.N5.equals(storageType) ) { - path = "setup" + viewId.getViewSetupId() + "/" + "timepoint" + viewId.getTimePointId() + "/s0"; + path = "setup" + viewId.getViewSetupId() + "/" + "timepoint" + viewId.getTimePointId() + "/s" + level; } else if ( StorageType.HDF5.equals(storageType) ) { - path = "t" + String.format("%05d", viewId.getTimePointId()) + "/" + "s" + String.format("%02d", viewId.getViewSetupId()) + "/0/cells"; + path = "t" + String.format("%05d", viewId.getTimePointId()) + "/" + "s" + String.format("%02d", viewId.getViewSetupId()) + "/" + level + "/cells"; } else { diff --git a/src/main/java/net/preibisch/mvrecon/process/resave/N5ResaveTools.java b/src/main/java/net/preibisch/mvrecon/process/resave/N5ResaveTools.java index e2a0e415..765dc956 100644 --- a/src/main/java/net/preibisch/mvrecon/process/resave/N5ResaveTools.java +++ b/src/main/java/net/preibisch/mvrecon/process/resave/N5ResaveTools.java @@ -34,6 +34,8 @@ import net.preibisch.legacy.io.IOFunctions; import net.preibisch.mvrecon.fiji.spimdata.SpimData2; import net.preibisch.mvrecon.process.downsampling.lazy.LazyHalfPixelDownsample2x; +import net.preibisch.mvrecon.process.export.ExportTools; +import net.preibisch.mvrecon.process.export.ExportN5API.StorageType; import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group; import util.Grid; @@ -41,33 +43,20 @@ public class N5ResaveTools { /** * @param level - the downsampling level + * @param storageType - N5 or HDF5 (soon Zarr) * @return a Function that maps the gridBlock to a N5 dataset name */ - public static Function datasetMappingFunctionBdvN5( final int level ) + public static Function datasetMappingFunctionBdv( final int level, final StorageType storageType ) { return gridBlock -> { if ( gridBlock.length <= 3 ) throw new RuntimeException( "mappingFunctionBDV() needs an extended GridBlock long[][], where Gridblock[3][] encodes the ViewId"); - final ViewId viewId = new ViewId( (int)gridBlock[ 3 ][ 0 ], (int)gridBlock[ 3 ][ 1 ]); - return "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s" + (level); - }; - } - - /** - * @param level - the downsampling level - * @return a Function that maps the gridBlock to a HDF5 dataset name - */ - public static Function datasetMappingFunctionBdvHDF5( final int level ) - { - return gridBlock -> - { - if ( gridBlock.length <= 3 ) - throw new RuntimeException( "mappingFunctionBDV() needs an extended GridBlock long[][], where Gridblock[3][] encodes the ViewId"); - - final ViewId viewId = new ViewId( (int)gridBlock[ 3 ][ 0 ], (int)gridBlock[ 3 ][ 1 ]); - return null; //TODO //"setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s" + (level); + return ExportTools.createBDVPath( + new ViewId( (int)gridBlock[ 3 ][ 0 ], (int)gridBlock[ 3 ][ 1 ]), + level, + storageType ); }; } @@ -220,7 +209,8 @@ public static int[] computeRelativeDownsampling( return ds; } - public static void createS0Datasets( + /* + public static void createS0DatasetsBdvN5( final N5Writer n5, final Collection< ? extends ViewId > viewIds, final Map dataTypes, @@ -256,6 +246,7 @@ public static void createS0Datasets( n5.setAttribute(ds, "downsamplingFactors", new int[] {1,1,1} ); } } + */ public static > void resaveS0Block( final SpimData2 data, @@ -281,19 +272,15 @@ public static > void resaveS0Block( System.out.println( "ViewId " + Group.pvid( viewId ) + ", written block: offset=" + Util.printCoordinates( gridBlock[0] ) + ", dimension=" + Util.printCoordinates( gridBlock[1] ) ); } - public static Map< Integer, DataType > createGroups( - final N5Writer n5, + public static Map< Integer, DataType > assembleDataTypes( final AbstractSpimData data, - final Map viewSetupIdToDimensions, - final int[] blockSize, - final int[][] downsamplingFactors, - final Compression compression ) + final Collection< Integer > viewSetupIds ) { final HashMap< Integer, DataType > dataTypes = new HashMap<>(); - for ( final Entry< Integer, long[] > viewSetup : viewSetupIdToDimensions.entrySet() ) + for ( final int viewSetupId : viewSetupIds ) { - final Object type = data.getSequenceDescription().getImgLoader().getSetupImgLoader( viewSetup.getKey() ).getImageType(); + final Object type = data.getSequenceDescription().getImgLoader().getSetupImgLoader( viewSetupId ).getImageType(); final DataType dataType; if ( UnsignedShortType.class.isInstance( type ) ) @@ -305,22 +292,7 @@ else if ( FloatType.class.isInstance( type ) ) else throw new RuntimeException("Unsupported pixel type: " + type.getClass().getCanonicalName() ); - dataTypes.put( viewSetup.getKey(), dataType ); - - // ViewSetupId needs to contain: {"downsamplingFactors":[[1,1,1],[2,2,1]],"dataType":"uint16"} - final String n5Dataset = "setup" + viewSetup.getKey(); - - System.out.println( "Creating group: " + "'setup" + viewSetup.getKey() + "'" ); - - n5.createGroup( n5Dataset ); - - System.out.println( "setting attributes for '" + "setup" + viewSetup.getKey() + "'"); - - n5.setAttribute( n5Dataset, "downsamplingFactors", downsamplingFactors ); - n5.setAttribute( n5Dataset, "dataType", dataType ); - n5.setAttribute( n5Dataset, "blockSize", blockSize ); - n5.setAttribute( n5Dataset, "dimensions", viewSetup.getValue() ); - n5.setAttribute( n5Dataset, "compression", compression ); + dataTypes.put( viewSetupId, dataType ); } return dataTypes; @@ -338,7 +310,7 @@ public static int[][] mipMapInfoToDownsamplings( final Map< Integer, ExportMipma return downsamplings; } - public static ArrayList assembleAllS0Jobs( + public static ArrayList assembleS0Jobs( final Collection< ? extends ViewId > viewIds, final HashMap< Integer, long[] > viewSetupIdToDimensions, final int[] blockSize,