diff --git a/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_HDF5.java b/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_HDF5.java index bf15fbd3..161b4ffc 100644 --- a/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_HDF5.java +++ b/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_HDF5.java @@ -47,7 +47,7 @@ import net.preibisch.mvrecon.fiji.plugin.resave.Generic_Resave_HDF5.ParametersResaveHDF5; import net.preibisch.mvrecon.fiji.spimdata.SpimData2; import net.preibisch.mvrecon.fiji.spimdata.XmlIoSpimData2; -import net.preibisch.mvrecon.process.resave.SpimData2Tools; +import net.preibisch.mvrecon.process.n5api.SpimData2Tools; public class Resave_HDF5 implements PlugIn { diff --git a/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_N5.java b/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_N5.java index 194e0c86..c36ff985 100644 --- a/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_N5.java +++ b/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_N5.java @@ -51,10 +51,9 @@ import net.preibisch.mvrecon.fiji.spimdata.SpimData2; import net.preibisch.mvrecon.fiji.spimdata.XmlIoSpimData2; import net.preibisch.mvrecon.process.export.ExportN5API.StorageType; -import net.preibisch.mvrecon.process.export.ExportTools; -import net.preibisch.mvrecon.process.export.ExportTools.MultiResolutionLevelInfo; -import net.preibisch.mvrecon.process.resave.N5ResaveTools; -import net.preibisch.mvrecon.process.resave.SpimData2Tools; +import net.preibisch.mvrecon.process.n5api.N5ApiTools; +import net.preibisch.mvrecon.process.n5api.SpimData2Tools; +import net.preibisch.mvrecon.process.n5api.N5ApiTools.MultiResolutionLevelInfo; import util.Grid; import util.URITools; @@ -123,16 +122,16 @@ public static SpimData2 resaveN5( computeBlockSize[ d ] = blockSize[ d ] * n5Params.blockSizeFactor[ d ]; final HashMap dimensions = - N5ResaveTools.assembleDimensions( data, vidsToResave ); + N5ApiTools.assembleDimensions( data, vidsToResave ); final int[][] downsamplings = - N5ResaveTools.mipMapInfoToDownsamplings( n5Params.proposedMipmaps ); + N5ApiTools.mipMapInfoToDownsamplings( n5Params.proposedMipmaps ); final ArrayList grid = - N5ResaveTools.assembleS0Jobs( vidsToResave, dimensions, blockSize, computeBlockSize ); + N5ApiTools.assembleS0Jobs( vidsToResave, dimensions, blockSize, computeBlockSize ); final Map dataTypes = - N5ResaveTools.assembleDataTypes( data, dimensions.keySet() ); + N5ApiTools.assembleDataTypes( data, dimensions.keySet() ); // create all datasets and write BDV metadata for all ViewIds (including downsampling) final HashMap< ViewId, MultiResolutionLevelInfo[] > viewIdToMrInfo = new HashMap<>(); @@ -140,7 +139,7 @@ public static SpimData2 resaveN5( long time = System.currentTimeMillis(); for ( final ViewId viewId : vidsToResave ) - viewIdToMrInfo.put( viewId , ExportTools.setupBdvDatasetsN5( + viewIdToMrInfo.put( viewId , N5ApiTools.setupBdvDatasetsN5( n5Writer, viewId, dataTypes.get( viewId.getViewSetupId() ), dimensions.get( viewId.getViewSetupId() ), @@ -164,11 +163,11 @@ public static SpimData2 resaveN5( try { myPool.submit(() -> grid.parallelStream().forEach( - gridBlock -> N5ResaveTools.resaveS0Block( + gridBlock -> N5ApiTools.resaveS0Block( data, n5Writer, - dataTypes.get( N5ResaveTools.gridBlockToViewId( gridBlock ).getViewSetupId() ), - N5ResaveTools.gridToDatasetBdv( 0, StorageType.N5 ), // a function mapping the gridblock to the dataset name for level 0 and N5 + dataTypes.get( N5ApiTools.gridBlockToViewId( gridBlock ).getViewSetupId() ), + N5ApiTools.gridToDatasetBdv( 0, StorageType.N5 ), // a function mapping the gridblock to the dataset name for level 0 and N5 gridBlock ) ) ).get(); } catch (InterruptedException | ExecutionException e) @@ -186,9 +185,9 @@ public static SpimData2 resaveN5( for ( int level = 1; level < downsamplings.length; ++level ) { final int s = level; - final int[] ds = N5ResaveTools.computeRelativeDownsampling( downsamplings, s ); + final int[] ds = N5ApiTools.computeRelativeDownsampling( downsamplings, s ); final ArrayList allBlocks = - N5ResaveTools.assembleDownsamplingJobs( vidsToResave, viewIdToMrInfo, level ); + N5ApiTools.assembleDownsamplingJobs( vidsToResave, viewIdToMrInfo, level ); IOFunctions.println( "Downsampling: " + Util.printCoordinates( downsamplings[ s ] ) + " with relative downsampling of " + Util.printCoordinates( ds )); IOFunctions.println( "Number of compute blocks: " + allBlocks.size() ); @@ -200,10 +199,10 @@ public static SpimData2 resaveN5( myPool.submit(() -> allBlocks.parallelStream().forEach( gridBlock -> { - N5ResaveTools.writeDownsampledBlock( + N5ApiTools.writeDownsampledBlock( n5Writer, - viewIdToMrInfo.get( N5ResaveTools.gridBlockToViewId( gridBlock ) )[ s ], //N5ResaveTools.gridToDatasetBdv( s, StorageType.N5 ), - viewIdToMrInfo.get( N5ResaveTools.gridBlockToViewId( gridBlock ) )[ s - 1 ],//N5ResaveTools.gridToDatasetBdv( s - 1, StorageType.N5 ), + viewIdToMrInfo.get( N5ApiTools.gridBlockToViewId( gridBlock ) )[ s ], //N5ResaveTools.gridToDatasetBdv( s, StorageType.N5 ), + viewIdToMrInfo.get( N5ApiTools.gridBlockToViewId( gridBlock ) )[ s - 1 ],//N5ResaveTools.gridToDatasetBdv( s - 1, StorageType.N5 ), gridBlock ); } ) ).get(); } diff --git a/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_TIFF.java b/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_TIFF.java index af2c2dda..7abc9d8d 100644 --- a/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_TIFF.java +++ b/src/main/java/net/preibisch/mvrecon/fiji/plugin/resave/Resave_TIFF.java @@ -43,7 +43,7 @@ import net.preibisch.mvrecon.fiji.spimdata.SpimData2; import net.preibisch.mvrecon.fiji.spimdata.imgloaders.StackImgLoaderIJ; import net.preibisch.mvrecon.process.export.Save3dTIFF; -import net.preibisch.mvrecon.process.resave.SpimData2Tools; +import net.preibisch.mvrecon.process.n5api.SpimData2Tools; import util.URITools; public class Resave_TIFF implements PlugIn diff --git a/src/main/java/net/preibisch/mvrecon/fiji/spimdata/explorer/popup/ResavePopup.java b/src/main/java/net/preibisch/mvrecon/fiji/spimdata/explorer/popup/ResavePopup.java index 7d3e8f6e..a30f7cf3 100644 --- a/src/main/java/net/preibisch/mvrecon/fiji/spimdata/explorer/popup/ResavePopup.java +++ b/src/main/java/net/preibisch/mvrecon/fiji/spimdata/explorer/popup/ResavePopup.java @@ -52,7 +52,7 @@ import net.preibisch.mvrecon.fiji.spimdata.explorer.ExplorerWindow; import net.preibisch.mvrecon.fiji.spimdata.explorer.FilteredAndGroupedExplorerPanel; import net.preibisch.mvrecon.fiji.spimdata.interestpoints.ViewInterestPointLists; -import net.preibisch.mvrecon.process.resave.SpimData2Tools; +import net.preibisch.mvrecon.process.n5api.SpimData2Tools; import util.URITools; public class ResavePopup extends JMenu implements ExplorerWindowSetable diff --git a/src/main/java/net/preibisch/mvrecon/process/export/ExportN5API.java b/src/main/java/net/preibisch/mvrecon/process/export/ExportN5API.java index 367a4926..aba4c350 100644 --- a/src/main/java/net/preibisch/mvrecon/process/export/ExportN5API.java +++ b/src/main/java/net/preibisch/mvrecon/process/export/ExportN5API.java @@ -65,9 +65,10 @@ import net.preibisch.mvrecon.fiji.plugin.resave.PluginHelper; import net.preibisch.mvrecon.fiji.plugin.util.GUIHelper; import net.preibisch.mvrecon.process.deconvolution.DeconViews; -import net.preibisch.mvrecon.process.export.ExportTools.InstantiateViewSetupBigStitcher; import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group; -import net.preibisch.mvrecon.process.resave.N5ResaveTools; +import net.preibisch.mvrecon.process.n5api.N5ApiTools; +import net.preibisch.mvrecon.process.n5api.SpimData2Tools; +import net.preibisch.mvrecon.process.n5api.SpimData2Tools.InstantiateViewSetupBigStitcher; import util.Grid; import util.URITools; @@ -225,7 +226,7 @@ else if ( FloatType.class.isInstance( type ) ) IOFunctions.println( "Assigning ViewId " + Group.pvid( viewId ) ); - dataset = ExportTools.createBDVPath( viewId, 0, this.storageType ); + dataset = N5ApiTools.createBDVPath( viewId, 0, this.storageType ); } // @@ -264,7 +265,7 @@ else if ( FloatType.class.isInstance( type ) ) try { // the first time the XML does not exist, thus instantiate is not called - if ( ExportTools.writeBDVMetaData( + if ( SpimData2Tools.writeBDVMetaData( driverVolumeWriter, storageType, dataType, @@ -359,7 +360,7 @@ else if ( FloatType.class.isInstance( type ) ) for ( int level = 1; level < this.downsampling.length; ++level ) { final int s = level; - final int[] ds = N5ResaveTools.computeRelativeDownsampling( this.downsampling, level ); + final int[] ds = N5ApiTools.computeRelativeDownsampling( this.downsampling, level ); IOFunctions.println( "Downsampling: " + Util.printCoordinates( this.downsampling[ level ] ) + " with relative downsampling of " + Util.printCoordinates( ds )); @@ -368,7 +369,7 @@ else if ( FloatType.class.isInstance( type ) ) dim[ d ] = previousDim[ d ] / ds[ d ]; final String datasetDownsampling = bdv ? - ExportTools.createDownsampledBDVPath(dataset, level, storageType) : dataset.substring(0, dataset.length() - 3) + "/s" + level; + N5ApiTools.createDownsampledBDVPath(dataset, level, storageType) : dataset.substring(0, dataset.length() - 3) + "/s" + level; try { @@ -400,7 +401,7 @@ else if ( FloatType.class.isInstance( type ) ) final Function gridBlockToDatasetPreviousScale = (gridBlock -> datasetPrev); // there is only one ViewId e.submit( () -> gridDS.parallelStream().forEach( - gridBlock -> N5ResaveTools.writeDownsampledBlock( + gridBlock -> N5ApiTools.writeDownsampledBlock( driverVolumeWriter, gridBlockToDataset, gridBlockToDatasetPreviousScale, diff --git a/src/main/java/net/preibisch/mvrecon/process/export/ExportTools.java b/src/main/java/net/preibisch/mvrecon/process/export/ExportTools.java deleted file mode 100644 index ddd1efba..00000000 --- a/src/main/java/net/preibisch/mvrecon/process/export/ExportTools.java +++ /dev/null @@ -1,868 +0,0 @@ -/*- - * #%L - * Software for the reconstruction of multi-view microscopic acquisitions - * like Selective Plane Illumination Microscopy (SPIM) Data. - * %% - * Copyright (C) 2012 - 2024 Multiview Reconstruction developers. - * %% - * This program is free software: you can redistribute it and/or modify - * it under the terms of the GNU General Public License as - * published by the Free Software Foundation, either version 2 of the - * License, or (at your option) any later version. - * - * This program is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - * GNU General Public License for more details. - * - * You should have received a copy of the GNU General Public - * License along with this program. If not, see - * . - * #L% - */ -package net.preibisch.mvrecon.process.export; - -import java.io.File; -import java.io.IOException; -import java.io.Serializable; -import java.net.URI; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.function.BiFunction; -import java.util.function.Function; - -import org.janelia.saalfeldlab.n5.Compression; -import org.janelia.saalfeldlab.n5.DataType; -import org.janelia.saalfeldlab.n5.N5Writer; -import org.janelia.saalfeldlab.n5.RawCompression; -import org.janelia.saalfeldlab.n5.imglib2.N5Utils; - -import bdv.export.ExportMipmapInfo; -import bdv.export.ProposeMipmaps; -import bdv.img.hdf5.Hdf5ImageLoader; -import bdv.img.n5.N5ImageLoader; -import mpicbg.spim.data.SpimDataException; -import mpicbg.spim.data.generic.sequence.BasicViewSetup; -import mpicbg.spim.data.registration.ViewRegistration; -import mpicbg.spim.data.registration.ViewRegistrations; -import mpicbg.spim.data.sequence.Angle; -import mpicbg.spim.data.sequence.Channel; -import mpicbg.spim.data.sequence.FinalVoxelDimensions; -import mpicbg.spim.data.sequence.Illumination; -import mpicbg.spim.data.sequence.SequenceDescription; -import mpicbg.spim.data.sequence.Tile; -import mpicbg.spim.data.sequence.TimePoint; -import mpicbg.spim.data.sequence.TimePoints; -import mpicbg.spim.data.sequence.ViewDescription; -import mpicbg.spim.data.sequence.ViewId; -import mpicbg.spim.data.sequence.ViewSetup; -import mpicbg.spim.data.sequence.VoxelDimensions; -import net.imglib2.Dimensions; -import net.imglib2.FinalDimensions; -import net.imglib2.img.Img; -import net.imglib2.img.array.ArrayImgs; -import net.imglib2.type.numeric.integer.IntType; -import net.imglib2.type.numeric.real.DoubleType; -import net.imglib2.util.Intervals; -import net.imglib2.util.Pair; -import net.imglib2.util.ValuePair; -import net.preibisch.legacy.io.IOFunctions; -import net.preibisch.mvrecon.fiji.spimdata.SpimData2; -import net.preibisch.mvrecon.fiji.spimdata.XmlIoSpimData2; -import net.preibisch.mvrecon.fiji.spimdata.boundingbox.BoundingBoxes; -import net.preibisch.mvrecon.fiji.spimdata.intensityadjust.IntensityAdjustments; -import net.preibisch.mvrecon.fiji.spimdata.interestpoints.ViewInterestPoints; -import net.preibisch.mvrecon.fiji.spimdata.pointspreadfunctions.PointSpreadFunctions; -import net.preibisch.mvrecon.fiji.spimdata.stitchingresults.StitchingResults; -import net.preibisch.mvrecon.process.export.ExportN5API.StorageType; -import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group; -import net.preibisch.mvrecon.process.resave.N5ResaveTools; -import util.URITools; - -public class ExportTools { - - public static int[][] estimateMultiResPyramid( final Dimensions dimensions, final double aniso ) - { - final VoxelDimensions v = new FinalVoxelDimensions( "px", 1.0, 1.0, Double.isNaN( aniso ) ? 1.0 : aniso ); - final BasicViewSetup setup = new BasicViewSetup(0, "fusion", dimensions, v ); - final ExportMipmapInfo emi = ProposeMipmaps.proposeMipmaps( setup ); - - return emi.getExportResolutions(); - } - - public static MultiResolutionLevelInfo[] setupBdvDatasetsHDF5( - final N5Writer driverVolumeWriter, - final ViewId viewId, - final int[] blockSize, - final int[][] downsamplings ) - { - final String subdivisionsDatasets = "s" + String.format("%02d", viewId.getViewSetupId()) + "/subdivisions"; - final String resolutionsDatasets = "s" + String.format("%02d", viewId.getViewSetupId()) + "/resolutions"; - - if ( driverVolumeWriter.datasetExists( subdivisionsDatasets ) && driverVolumeWriter.datasetExists( resolutionsDatasets ) ) - { - // TODO: test that the values are consistent? - return null; - } - - final Img subdivisions; - final Img resolutions; - - if ( downsamplings == null || downsamplings.length == 0 ) - { - subdivisions = ArrayImgs.ints( blockSize, new long[] { 3, 1 } ); // blocksize - resolutions = ArrayImgs.doubles( new double[] { 1,1,1 }, new long[] { 3, 1 } ); // downsampling - } - else - { - final int[] blocksizes = new int[ 3 * downsamplings.length ]; - final double[] downsamples = new double[ 3 * downsamplings.length ]; - - int i = 0; - for ( int level = 0; level < downsamplings.length; ++level ) - { - downsamples[ i ] = downsamplings[ level ][ 0 ]; - blocksizes[ i++ ] = blockSize[ 0 ]; - downsamples[ i ] = downsamplings[ level ][ 1 ]; - blocksizes[ i++ ] = blockSize[ 1 ]; - downsamples[ i ] = downsamplings[ level ][ 2 ]; - blocksizes[ i++ ] = blockSize[ 2 ]; - } - - subdivisions = ArrayImgs.ints( blocksizes, new long[] { 3, downsamplings.length } ); // blocksize - resolutions = ArrayImgs.doubles( downsamples, new long[] { 3, downsamplings.length } ); // downsampling - } - - driverVolumeWriter.createDataset( - subdivisionsDatasets, - subdivisions.dimensionsAsLongArray(),// new long[] { 3, 1 }, - new int[] { (int)subdivisions.dimension( 0 ), (int)subdivisions.dimension( 1 ) }, //new int[] { 3, 1 }, - DataType.INT32, - new RawCompression() ); - - driverVolumeWriter.createDataset( - resolutionsDatasets, - resolutions.dimensionsAsLongArray(),// new long[] { 3, 1 }, - new int[] { (int)resolutions.dimension( 0 ), (int)resolutions.dimension( 1 ) },//new int[] { 3, 1 }, - DataType.FLOAT64, - new RawCompression() ); - - N5Utils.saveBlock(subdivisions, driverVolumeWriter, "s" + String.format("%02d", viewId.getViewSetupId()) + "/subdivisions", new long[] {0,0,0} ); - N5Utils.saveBlock(resolutions, driverVolumeWriter, "s" + String.format("%02d", viewId.getViewSetupId()) + "/resolutions", new long[] {0,0,0} ); - - return null; - } - - public static MultiResolutionLevelInfo[] setupBdvDatasetsN5( - final N5Writer driverVolumeWriter, - final ViewId viewId, - final DataType dataType, - final long[] dimensions, - final Compression compression, - final int[] blockSize, - final int[][] downsamplings ) - { - final String s0Dataset = createBDVPath( viewId, 0, StorageType.N5 ); - - driverVolumeWriter.createDataset( - s0Dataset, - dimensions, - blockSize, - dataType, - compression ); - - final String setupDataset = s0Dataset.substring(0, s0Dataset.indexOf( "/timepoint" )); - final String timepointDataset = s0Dataset.substring(0, s0Dataset.indexOf("/s0" )); - - final Map> attribs = driverVolumeWriter.listAttributes( setupDataset ); - - // if viewsetup does not exist - if ( !attribs.containsKey( "dataType" ) || !attribs.containsKey( "blockSize" ) || !attribs.containsKey( "dimensions" ) || !attribs.containsKey( "compression" ) || !attribs.containsKey( "downsamplingFactors" ) ) - { - // set N5 attributes for setup - // e.g. {"compression":{"type":"gzip","useZlib":false,"level":1},"downsamplingFactors":[[1,1,1],[2,2,1]],"blockSize":[128,128,32],"dataType":"uint16","dimensions":[512,512,86]} - IOFunctions.println( "setting attributes for '" + "setup" + viewId.getViewSetupId() + "'"); - - driverVolumeWriter.setAttribute(setupDataset, "dataType", dataType ); - driverVolumeWriter.setAttribute(setupDataset, "blockSize", blockSize ); - driverVolumeWriter.setAttribute(setupDataset, "dimensions", dimensions ); - driverVolumeWriter.setAttribute(setupDataset, "compression", compression ); - - if ( downsamplings == null || downsamplings.length == 0 ) - driverVolumeWriter.setAttribute(setupDataset, "downsamplingFactors", new int[][] {{1,1,1}} ); - else - driverVolumeWriter.setAttribute(setupDataset, "downsamplingFactors", downsamplings ); - } - else - { - // TODO: test that the values are consistent? - } - - // set N5 attributes for timepoint - // e.g. {"resolution":[1.0,1.0,3.0],"saved_completely":true,"multiScale":true} - driverVolumeWriter.setAttribute(timepointDataset, "resolution", new double[] {1,1,1} ); - driverVolumeWriter.setAttribute(timepointDataset, "saved_completely", true ); - driverVolumeWriter.setAttribute(timepointDataset, "multiScale", downsamplings != null && downsamplings.length != 0 ); - - final MultiResolutionLevelInfo[] mrInfo; - - if ( downsamplings == null || downsamplings.length == 0 ) - { - // set additional N5 attributes for s0 dataset - driverVolumeWriter.setAttribute( s0Dataset, "downsamplingFactors", new int[] {1,1,1} ); - - mrInfo = new MultiResolutionLevelInfo[] { new MultiResolutionLevelInfo( s0Dataset, dimensions.clone(), dataType, new int[] {1,1,1}, new int[] {1,1,1}, blockSize ) }; - } - else - { - mrInfo = setupMultiResolutionPyramid( - driverVolumeWriter, - viewId, - N5ResaveTools.viewIdToDatasetBdv( StorageType.N5 ), - dataType, - dimensions, - compression, - blockSize, - downsamplings); - - driverVolumeWriter.setAttribute( s0Dataset, "downsamplingFactors", downsamplings[ 0 ] ); - - for ( int level = 1; level < downsamplings.length; ++level ) - { - // set additional N5 attributes for s0 ... sN datasets - driverVolumeWriter.setAttribute( mrInfo[ level ].dataset, "downsamplingFactors", downsamplings[ level ] ); - } - } - - return mrInfo; - } - - public static class MultiResolutionLevelInfo implements Serializable - { - private static final long serialVersionUID = 5392269335394869108L; - - final public int[] relativeDownsampling, absoluteDownsampling, blockSize; - final public long[] dimensions; - final public String dataset; - final public DataType dataType; - - public MultiResolutionLevelInfo( - final String dataset, - final long[] dimensions, - final DataType dataType, - final int[] relativeDownsampling, - final int[] absoluteDownsampling, - final int[] blockSize ) - { - this.dataset = dataset; - this.dimensions = dimensions; - this.dataType = dataType; - this.relativeDownsampling = relativeDownsampling; - this.absoluteDownsampling = absoluteDownsampling; - this.blockSize = blockSize; - } - } - - public static MultiResolutionLevelInfo[] setupMultiResolutionPyramid( - final N5Writer driverVolumeWriter, - final ViewId viewId, - final BiFunction viewIdToDataset, - final DataType dataType, - final long[] dimensionsS0, - final Compression compression, - final int[] blockSize, - final int[][] downsamplings ) - { - final MultiResolutionLevelInfo[] mrInfo = new MultiResolutionLevelInfo[ downsamplings.length]; - - mrInfo[ 0 ] = new MultiResolutionLevelInfo( - viewIdToDataset.apply( viewId, 0 ), dimensionsS0.clone(), dataType, downsamplings[ 0 ], downsamplings[ 0 ], blockSize ); - - long[] previousDim = dimensionsS0.clone(); - - for ( int level = 1; level < downsamplings.length; ++level ) - { - final int[] relativeDownsampling = N5ResaveTools.computeRelativeDownsampling( downsamplings, level ); - - final String datasetLevel = viewIdToDataset.apply( viewId, level ); - - final long[] dim = new long[ previousDim.length ]; - for ( int d = 0; d < dim.length; ++d ) - dim[ d ] = previousDim[ d ] / relativeDownsampling[ d ]; - - mrInfo[ level ] = new MultiResolutionLevelInfo( - datasetLevel, dim.clone(), dataType, relativeDownsampling, downsamplings[ level ], blockSize ); - - driverVolumeWriter.createDataset( - datasetLevel, - dim, - blockSize, - dataType, - compression ); - - previousDim = dim; - } - - return mrInfo; - } - - public static MultiResolutionLevelInfo[] writeBDVMetaData( - final N5Writer driverVolumeWriter, - final StorageType storageType, - final DataType dataType, - final long[] dimensions, - final Compression compression, - final int[] blockSize, - final int[][] downsamplings, - final ViewId viewId, - final URI n5PathURI, - final URI xmlOutPathURI, - final InstantiateViewSetup instantiateViewSetup ) throws SpimDataException, IOException - { - IOFunctions.println( "Creating datasets and writing BDV-metadata ... " ); - - //final String xmlPath = null; - if ( StorageType.N5.equals(storageType) ) - { - System.out.println( "XML: " + xmlOutPathURI ); - - final Pair exists = writeSpimData( - viewId, - storageType, - dimensions, - n5PathURI, - xmlOutPathURI, - instantiateViewSetup ); - - if ( exists == null ) - return null; - - return setupBdvDatasetsN5( driverVolumeWriter, viewId, dataType, dimensions, compression, blockSize, downsamplings ); - - /* - String ds = "setup" + viewId.getViewSetupId(); - - // if viewsetup does not exist - if ( !exists.getB() ) - { - // set N5 attributes for setup - // e.g. {"compression":{"type":"gzip","useZlib":false,"level":1},"downsamplingFactors":[[1,1,1],[2,2,1]],"blockSize":[128,128,32],"dataType":"uint16","dimensions":[512,512,86]} - System.out.println( "setting attributes for '" + "setup" + viewId.getViewSetupId() + "'"); - driverVolumeWriter.setAttribute(ds, "dataType", dataType ); - driverVolumeWriter.setAttribute(ds, "blockSize", blockSize ); - driverVolumeWriter.setAttribute(ds, "dimensions", dimensions ); - driverVolumeWriter.setAttribute(ds, "compression", compression ); - - if ( downsamplings == null || downsamplings.length == 0 ) - driverVolumeWriter.setAttribute(ds, "downsamplingFactors", new int[][] {{1,1,1}} ); - else - driverVolumeWriter.setAttribute(ds, "downsamplingFactors", downsamplings ); - } - - // set N5 attributes for timepoint - // e.g. {"resolution":[1.0,1.0,3.0],"saved_completely":true,"multiScale":true} - ds ="setup" + viewId.getViewSetupId() + "/" + "timepoint" + viewId.getTimePointId(); - driverVolumeWriter.setAttribute(ds, "resolution", new double[] {1,1,1} ); - driverVolumeWriter.setAttribute(ds, "saved_completely", true ); - driverVolumeWriter.setAttribute(ds, "multiScale", downsamplings != null && downsamplings.length != 0 ); - - if ( downsamplings == null || downsamplings.length == 0 ) - { - // set additional N5 attributes for s0 dataset - ds = ds + "/s0"; - driverVolumeWriter.createGroup( ds ); - driverVolumeWriter.setAttribute(ds, "downsamplingFactors", new int[] {1,1,1} ); - } - else - { - for ( int level = 0; level < downsamplings.length; ++level ) - { - // set additional N5 attributes for s0 ... sN datasets - final String dsLevel = ds + "/s" + level; - driverVolumeWriter.createGroup( dsLevel ); - driverVolumeWriter.setAttribute(dsLevel, "downsamplingFactors", downsamplings[ level ] ); - } - } - - return true;*/ - } - else if ( StorageType.HDF5.equals(storageType) ) - { - System.out.println( "XML: " + xmlOutPathURI ); - - final Pair exists = writeSpimData( - viewId, - storageType, - dimensions, - n5PathURI, - xmlOutPathURI, - instantiateViewSetup ); - - if ( exists == null ) - return null; - - return setupBdvDatasetsHDF5( - driverVolumeWriter, - viewId, - blockSize, - downsamplings ); - - /* - // if viewsetup does not exist - if ( !exists.getB() ) - { - final Img subdivisions; - final Img resolutions; - - if ( downsamplings == null || downsamplings.length == 0 ) - { - subdivisions = ArrayImgs.ints( blockSize, new long[] { 3, 1 } ); // blocksize - resolutions = ArrayImgs.doubles( new double[] { 1,1,1 }, new long[] { 3, 1 } ); // downsampling - } - else - { - final int[] blocksizes = new int[ 3 * downsamplings.length ]; - final double[] downsamples = new double[ 3 * downsamplings.length ]; - - int i = 0; - for ( int level = 0; level < downsamplings.length; ++level ) - { - downsamples[ i ] = downsamplings[ level ][ 0 ]; - blocksizes[ i++ ] = blockSize[ 0 ]; - downsamples[ i ] = downsamplings[ level ][ 1 ]; - blocksizes[ i++ ] = blockSize[ 1 ]; - downsamples[ i ] = downsamplings[ level ][ 2 ]; - blocksizes[ i++ ] = blockSize[ 2 ]; - } - - subdivisions = ArrayImgs.ints( blocksizes, new long[] { 3, downsamplings.length } ); // blocksize - resolutions = ArrayImgs.doubles( downsamples, new long[] { 3, downsamplings.length } ); // downsampling - } - - driverVolumeWriter.createDataset( - "s" + String.format("%02d", viewId.getViewSetupId()) + "/subdivisions", - subdivisions.dimensionsAsLongArray(),// new long[] { 3, 1 }, - new int[] { (int)subdivisions.dimension( 0 ), (int)subdivisions.dimension( 1 ) }, //new int[] { 3, 1 }, - DataType.INT32, - new RawCompression() ); - - driverVolumeWriter.createDataset( - "s" + String.format("%02d", viewId.getViewSetupId()) + "/resolutions", - resolutions.dimensionsAsLongArray(),// new long[] { 3, 1 }, - new int[] { (int)resolutions.dimension( 0 ), (int)resolutions.dimension( 1 ) },//new int[] { 3, 1 }, - DataType.FLOAT64, - new RawCompression() ); - - N5Utils.saveBlock(subdivisions, driverVolumeWriter, "s" + String.format("%02d", viewId.getViewSetupId()) + "/subdivisions", new long[] {0,0,0} ); - N5Utils.saveBlock(resolutions, driverVolumeWriter, "s" + String.format("%02d", viewId.getViewSetupId()) + "/resolutions", new long[] {0,0,0} ); - } - else - { - return true; - } - */ - } - else - { - IOFunctions.println( "BDV-compatible dataset cannot be written for " + storageType + " (yet)."); - return null; - } - } - - public static Pair writeSpimData( - final ViewId viewId, - final StorageType storageType, - final long[] dimensions, - final URI n5PathURI, - final URI xmlOutPathURI, - final InstantiateViewSetup instantiateViewSetup ) throws SpimDataException - { - SpimData2 existingSpimData; - - try - { - existingSpimData = new XmlIoSpimData2().load( xmlOutPathURI ); - } - catch (Exception e ) - { - existingSpimData = null; - } - - if ( existingSpimData != null ) //xmlOutPath.exists() ) - { - System.out.println( "XML exists. Parsing and adding."); - - boolean tpExists = false; - boolean viewSetupExists = false; - - for ( final ViewDescription viewId2 : existingSpimData.getSequenceDescription().getViewDescriptions().values() ) - { - /* - // uncommented this because if you make a second timepoint and do not add missing views, they all exist already - if ( viewId2.equals( viewId ) ) - { - IOFunctions.println( "ViewId you specified (" + Group.pvid(viewId) + ") already exists in the XML, cannot continue." ); - return null; - } - */ - - if ( viewId2.getTimePointId() == viewId.getTimePointId() ) - tpExists = true; - - if ( viewId2.getViewSetupId() == viewId.getViewSetupId() ) - { - viewSetupExists = true; - - // dimensions have to match - if ( !Intervals.equalDimensions( new FinalDimensions( dimensions ), viewId2.getViewSetup().getSize() ) ) - { - IOFunctions.println( "ViewSetup you specified (" + Group.pvid(viewId) + ") already exists in the XML, but with different dimensions, cannot continue." ); - return null; - } - } - } - - final List setups = new ArrayList<>( existingSpimData.getSequenceDescription().getViewSetups().values() ); - - if ( !viewSetupExists ) - setups.add( instantiateViewSetup.instantiate( viewId, tpExists, new FinalDimensions( dimensions ), setups ) ); - - final TimePoints timepoints; - if ( !tpExists) { - final List tps = new ArrayList<>(existingSpimData.getSequenceDescription().getTimePoints().getTimePointsOrdered()); - tps.add(new TimePoint(viewId.getTimePointId())); - timepoints = new TimePoints(tps); - } - else - { - timepoints = existingSpimData.getSequenceDescription().getTimePoints(); - } - - final Map registrations = existingSpimData.getViewRegistrations().getViewRegistrations(); - registrations.put( viewId, new ViewRegistration( viewId.getTimePointId(), viewId.getViewSetupId() ) ); - final ViewRegistrations viewRegistrations = new ViewRegistrations( registrations ); - - final SequenceDescription sequence = new SequenceDescription(timepoints, setups, null); - - if ( StorageType.N5.equals(storageType) ) - sequence.setImgLoader( new N5ImageLoader( n5PathURI, sequence) ); - else if ( StorageType.HDF5.equals(storageType) ) - sequence.setImgLoader( new Hdf5ImageLoader( new File( URITools.removeFilePrefix( n5PathURI ) ), null, sequence) ); - else - throw new RuntimeException( storageType + " not supported." ); - - final SpimData2 spimDataNew = - new SpimData2( - existingSpimData.getBasePathURI(), - sequence, - viewRegistrations, - existingSpimData.getViewInterestPoints(), - existingSpimData.getBoundingBoxes(), - existingSpimData.getPointSpreadFunctions(), - existingSpimData.getStitchingResults(), - existingSpimData.getIntensityAdjustments() ); - - new XmlIoSpimData2().save( spimDataNew, existingSpimData.getBasePathURI() ); - - return new ValuePair<>(tpExists, viewSetupExists); - } - else - { - System.out.println( "New XML."); - - final ArrayList< ViewSetup > setups = new ArrayList<>(); - - setups.add( instantiateViewSetup.instantiate( viewId, false, new FinalDimensions( dimensions ), setups ) ); - /* - final Channel c0 = new Channel( 0 ); - final Angle a0 = new Angle( 0 ); - final Illumination i0 = new Illumination( 0 ); - final Tile t0 = new Tile( 0 ); - - final Dimensions d0 = new FinalDimensions( dimensions ); - final VoxelDimensions vd0 = new FinalVoxelDimensions( "px", 1, 1, 1 ); - setups.add( new ViewSetup( viewId.getViewSetupId(), "setup " + viewId.getViewSetupId(), d0, vd0, t0, c0, a0, i0 ) );*/ - - final ArrayList< TimePoint > tps = new ArrayList<>(); - tps.add( new TimePoint( viewId.getTimePointId() ) ); - final TimePoints timepoints = new TimePoints( tps ); - - final HashMap< ViewId, ViewRegistration > registrations = new HashMap<>(); - registrations.put( viewId, new ViewRegistration( viewId.getTimePointId(), viewId.getViewSetupId() ) ); - final ViewRegistrations viewRegistrations = new ViewRegistrations( registrations ); - - final SequenceDescription sequence = new SequenceDescription(timepoints, setups, null); - if ( StorageType.N5.equals(storageType) ) - sequence.setImgLoader( new N5ImageLoader( n5PathURI, sequence) ); - else if ( StorageType.HDF5.equals(storageType) ) - sequence.setImgLoader( new Hdf5ImageLoader( new File( URITools.removeFilePrefix( n5PathURI ) ), null, sequence) ); - else - throw new RuntimeException( storageType + " not supported." ); - - final SpimData2 spimData = new SpimData2( xmlOutPathURI, sequence, viewRegistrations, new ViewInterestPoints(), new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() ); - - new XmlIoSpimData2().save( spimData, xmlOutPathURI ); - - return new ValuePair<>(false, false); - } - - } - - public static ViewId getViewId(final String bdvString ) - { - final String[] entries = bdvString.trim().split( "," ); - final int timepointId = Integer.parseInt( entries[ 0 ].trim() ); - final int viewSetupId = Integer.parseInt( entries[ 1 ].trim() ); - - return new ViewId(timepointId, viewSetupId); - } - - public static String createBDVPath(final String bdvString, final int level, final StorageType storageType) - { - return createBDVPath( getViewId( bdvString ), level, storageType); - } - - public static String createBDVPath( final ViewId viewId, final int level, final StorageType storageType) - { - String path = null; - - if ( StorageType.N5.equals(storageType) ) - { - path = "setup" + viewId.getViewSetupId() + "/" + "timepoint" + viewId.getTimePointId() + "/s" + level; - } - else if ( StorageType.HDF5.equals(storageType) ) - { - path = "t" + String.format("%05d", viewId.getTimePointId()) + "/" + "s" + String.format("%02d", viewId.getViewSetupId()) + "/" + level + "/cells"; - } - else - { - new RuntimeException( "BDV-compatible dataset cannot be written for " + storageType + " (yet)."); - } - - return path; - } - - public static String createDownsampledBDVPath( final String s0path, final int level, final StorageType storageType ) - { - if ( StorageType.N5.equals(storageType) ) - { - return s0path.substring( 0, s0path.length() - 3 ) + "/s" + level; - } - else if ( StorageType.HDF5.equals(storageType) ) - { - return s0path.substring( 0, s0path.length() - 8 ) + "/" + level + "/cells"; - } - else - { - throw new RuntimeException( "BDV-compatible dataset cannot be written for " + storageType + " (yet)."); - } - } - - @FunctionalInterface - public static interface InstantiateViewSetup - { - public ViewSetup instantiate( final ViewId viewId, final boolean tpExists, final Dimensions d, final List existingSetups ); - } - - public static class InstantiateViewSetupBigStitcher implements InstantiateViewSetup - { - final int splittingType; - - // count the fusion groups - int count = 0; - - // new indicies - int newA = -1; - int newC = -1; - int newI = -1; - int newT = -1; - - public InstantiateViewSetupBigStitcher( - final int splittingType) - { - this.splittingType = splittingType; - } - - @Override - public ViewSetup instantiate( final ViewId viewId, final boolean tpExists, final Dimensions d, final List existingSetups ) - { - if ( existingSetups == null || existingSetups.size() == 0 ) - { - newA = 0; - newC = 0; - newI = 0; - newT = 0; - - } - else - { - final Iterator i = existingSetups.iterator(); - ViewSetup tmp = i.next(); - - Channel c0 = tmp.getChannel(); - Angle a0 = tmp.getAngle(); - Illumination i0 = tmp.getIllumination(); - Tile t0 = tmp.getTile(); - - // get the highest id for all entities - while ( i.hasNext() ) - { - tmp = i.next(); - if ( tmp.getChannel().getId() > c0.getId() ) - c0 = tmp.getChannel(); - if ( tmp.getAngle().getId() > a0.getId() ) - a0 = tmp.getAngle(); - if ( tmp.getIllumination().getId() > i0.getId() ) - i0 = tmp.getIllumination(); - if ( tmp.getTile().getId() > t0.getId() ) - t0 = tmp.getTile(); - } - - // new unique id's for all, initialized once - if ( newA < 0 ) - { - newA = a0.getId() + 1; - newC = c0.getId() + 1; - newI = i0.getId() + 1; - newT = t0.getId() + 1; - } - } - - Angle a0; - Channel c0; - Illumination i0; - Tile t0; - - // 0 == "Each timepoint & channel", - // 1 == "Each timepoint, channel & illumination", - // 2 == "All views together", - // 3 == "Each view" - if ( splittingType == 0 ) - { - // a new channel for each fusion group - c0 = new Channel( newC + count ); - - a0 = new Angle( newA ); - i0 = new Illumination( newI ); - t0 = new Tile( newT ); - } - else if ( splittingType == 1 ) - { - // TODO: we need to know what changed - // a new channel and illumination for each fusion group - c0 = new Channel( newC + count ); - i0 = new Illumination( newI + count ); - - a0 = new Angle( newA ); - t0 = new Tile( newT ); - } - else if ( splittingType == 2 ) - { - // a new channel, angle, tile and illumination for the single fusion group - a0 = new Angle( newA ); - c0 = new Channel( newC ); - i0 = new Illumination( newI ); - t0 = new Tile( newT ); - } - else if ( splittingType == 3 ) - { - // TODO: use previous ones - // TODO: we need to know what changed - c0 = new Channel( newC + count ); - i0 = new Illumination( newI + count ); - a0 = new Angle( newA + count ); - t0 = new Tile( newT + count ); - } - else - { - IOFunctions.println( "SplittingType " + splittingType + " unknown. Stopping."); - return null; - } - - final VoxelDimensions vd0 = new FinalVoxelDimensions( "px", 1, 1, 1 ); - - ++count; - - return new ViewSetup( viewId.getViewSetupId(), "setup " + viewId.getViewSetupId(), d, vd0, t0, c0, a0, i0 ); - } - } - - public static class InstantiateViewSetupBigStitcherSpark implements InstantiateViewSetup - { - final String angleIds; - final String illuminationIds; - final String channelIds; - final String tileIds; - - public InstantiateViewSetupBigStitcherSpark( - final String angleIds, - final String illuminationIds, - final String channelIds, - final String tileIds ) - { - this.angleIds = angleIds; - this.illuminationIds = illuminationIds; - this.channelIds = channelIds; - this.tileIds = tileIds; - } - - @Override - public ViewSetup instantiate( final ViewId viewId, final boolean tpExists, final Dimensions d, final List existingSetups ) - { - Angle a0; - Channel c0; - Illumination i0; - Tile t0; - - if ( existingSetups == null || existingSetups.size() == 0 ) - { - a0 = new Angle( 0 ); - c0 = new Channel( 0 ); - i0 = new Illumination( 0 ); - t0 = new Tile( 0 ); - } - else - { - final Iterator i = existingSetups.iterator(); - ViewSetup tmp = i.next(); - - c0 = tmp.getChannel(); - a0 = tmp.getAngle(); - i0 = tmp.getIllumination(); - t0 = tmp.getTile(); - - // get the highest id for all entities - while ( i.hasNext() ) - { - tmp = i.next(); - if ( tmp.getChannel().getId() > c0.getId() ) - c0 = tmp.getChannel(); - if ( tmp.getAngle().getId() > a0.getId() ) - a0 = tmp.getAngle(); - if ( tmp.getIllumination().getId() > i0.getId() ) - i0 = tmp.getIllumination(); - if ( tmp.getTile().getId() > t0.getId() ) - t0 = tmp.getTile(); - } - - if ( angleIds != null ) - a0 = new Angle( a0.getId() + 1 ); - if ( illuminationIds != null ) - i0 = new Illumination( i0.getId() + 1 ); - if ( tileIds != null ) - t0 = new Tile( t0.getId() + 1 ); - if ( tileIds != null || ( angleIds == null && illuminationIds == null && tileIds == null && tpExists ) ) // nothing was defined, then increase channel - c0 = new Channel( c0.getId() + 1 ); - } - - //final Dimensions d0 = new FinalDimensions( dimensions ); - final VoxelDimensions vd0 = new FinalVoxelDimensions( "px", 1, 1, 1 ); - - return new ViewSetup( viewId.getViewSetupId(), "setup " + viewId.getViewSetupId(), d, vd0, t0, c0, a0, i0 ); - } - } -} diff --git a/src/main/java/net/preibisch/mvrecon/process/resave/N5ResaveTools.java b/src/main/java/net/preibisch/mvrecon/process/n5api/N5ApiTools.java similarity index 53% rename from src/main/java/net/preibisch/mvrecon/process/resave/N5ResaveTools.java rename to src/main/java/net/preibisch/mvrecon/process/n5api/N5ApiTools.java index f643a278..4e237ff3 100644 --- a/src/main/java/net/preibisch/mvrecon/process/resave/N5ResaveTools.java +++ b/src/main/java/net/preibisch/mvrecon/process/n5api/N5ApiTools.java @@ -1,7 +1,7 @@ -package net.preibisch.mvrecon.process.resave; +package net.preibisch.mvrecon.process.n5api; +import java.io.Serializable; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; @@ -12,20 +12,30 @@ import org.janelia.saalfeldlab.n5.Compression; import org.janelia.saalfeldlab.n5.DataType; import org.janelia.saalfeldlab.n5.N5Writer; +import org.janelia.saalfeldlab.n5.RawCompression; import org.janelia.saalfeldlab.n5.imglib2.N5Utils; import bdv.export.ExportMipmapInfo; +import bdv.export.ProposeMipmaps; import mpicbg.spim.data.SpimData; import mpicbg.spim.data.generic.AbstractSpimData; +import mpicbg.spim.data.generic.sequence.BasicViewSetup; +import mpicbg.spim.data.sequence.FinalVoxelDimensions; import mpicbg.spim.data.sequence.SetupImgLoader; import mpicbg.spim.data.sequence.ViewDescription; import mpicbg.spim.data.sequence.ViewId; import mpicbg.spim.data.sequence.ViewSetup; +import mpicbg.spim.data.sequence.VoxelDimensions; +import net.imglib2.Dimensions; import net.imglib2.FinalInterval; import net.imglib2.RandomAccessibleInterval; +import net.imglib2.img.Img; +import net.imglib2.img.array.ArrayImgs; import net.imglib2.type.NativeType; +import net.imglib2.type.numeric.integer.IntType; import net.imglib2.type.numeric.integer.UnsignedByteType; import net.imglib2.type.numeric.integer.UnsignedShortType; +import net.imglib2.type.numeric.real.DoubleType; import net.imglib2.type.numeric.real.FloatType; import net.imglib2.util.Cast; import net.imglib2.util.Util; @@ -34,12 +44,10 @@ import net.preibisch.mvrecon.fiji.spimdata.SpimData2; import net.preibisch.mvrecon.process.downsampling.lazy.LazyHalfPixelDownsample2x; import net.preibisch.mvrecon.process.export.ExportN5API.StorageType; -import net.preibisch.mvrecon.process.export.ExportTools; -import net.preibisch.mvrecon.process.export.ExportTools.MultiResolutionLevelInfo; import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group; import util.Grid; -public class N5ResaveTools +public class N5ApiTools { public static ViewId gridBlockToViewId( final long[][] gridBlock ) { @@ -66,7 +74,7 @@ public static Function gridToDatasetBdv( final int level, fina */ public static Function viewIdToDatasetBdv( final int level, final StorageType storageType ) { - return (viewId) -> ExportTools.createBDVPath( viewId, level, storageType ); + return (viewId) -> createBDVPath( viewId, level, storageType ); } /** @@ -87,6 +95,280 @@ public static BiFunction gridToDatasetBdv( final Stor return (gridBlock, level) -> gridToDatasetBdv( level, storageType ).apply( gridBlock ); } + public static ViewId getViewId(final String bdvString ) + { + final String[] entries = bdvString.trim().split( "," ); + final int timepointId = Integer.parseInt( entries[ 0 ].trim() ); + final int viewSetupId = Integer.parseInt( entries[ 1 ].trim() ); + + return new ViewId(timepointId, viewSetupId); + } + + public static String createBDVPath(final String bdvString, final int level, final StorageType storageType) + { + return createBDVPath( getViewId( bdvString ), level, storageType); + } + + public static String createBDVPath( final ViewId viewId, final int level, final StorageType storageType) + { + String path = null; + + if ( StorageType.N5.equals(storageType) ) + { + path = "setup" + viewId.getViewSetupId() + "/" + "timepoint" + viewId.getTimePointId() + "/s" + level; + } + else if ( StorageType.HDF5.equals(storageType) ) + { + path = "t" + String.format("%05d", viewId.getTimePointId()) + "/" + "s" + String.format("%02d", viewId.getViewSetupId()) + "/" + level + "/cells"; + } + else + { + new RuntimeException( "BDV-compatible dataset cannot be written for " + storageType + " (yet)."); + } + + return path; + } + + public static String createDownsampledBDVPath( final String s0path, final int level, final StorageType storageType ) + { + if ( StorageType.N5.equals(storageType) ) + { + return s0path.substring( 0, s0path.length() - 3 ) + "/s" + level; + } + else if ( StorageType.HDF5.equals(storageType) ) + { + return s0path.substring( 0, s0path.length() - 8 ) + "/" + level + "/cells"; + } + else + { + throw new RuntimeException( "BDV-compatible dataset cannot be written for " + storageType + " (yet)."); + } + } + + public static int[][] estimateMultiResPyramid( final Dimensions dimensions, final double aniso ) + { + final VoxelDimensions v = new FinalVoxelDimensions( "px", 1.0, 1.0, Double.isNaN( aniso ) ? 1.0 : aniso ); + final BasicViewSetup setup = new BasicViewSetup(0, "fusion", dimensions, v ); + final ExportMipmapInfo emi = ProposeMipmaps.proposeMipmaps( setup ); + + return emi.getExportResolutions(); + } + + public static class MultiResolutionLevelInfo implements Serializable + { + private static final long serialVersionUID = 5392269335394869108L; + + final public int[] relativeDownsampling, absoluteDownsampling, blockSize; + final public long[] dimensions; + final public String dataset; + final public DataType dataType; + + public MultiResolutionLevelInfo( + final String dataset, + final long[] dimensions, + final DataType dataType, + final int[] relativeDownsampling, + final int[] absoluteDownsampling, + final int[] blockSize ) + { + this.dataset = dataset; + this.dimensions = dimensions; + this.dataType = dataType; + this.relativeDownsampling = relativeDownsampling; + this.absoluteDownsampling = absoluteDownsampling; + this.blockSize = blockSize; + } + } + + public static MultiResolutionLevelInfo[] setupMultiResolutionPyramid( + final N5Writer driverVolumeWriter, + final ViewId viewId, + final BiFunction viewIdToDataset, + final DataType dataType, + final long[] dimensionsS0, + final Compression compression, + final int[] blockSize, + final int[][] downsamplings ) + { + final MultiResolutionLevelInfo[] mrInfo = new MultiResolutionLevelInfo[ downsamplings.length]; + + mrInfo[ 0 ] = new MultiResolutionLevelInfo( + viewIdToDataset.apply( viewId, 0 ), dimensionsS0.clone(), dataType, downsamplings[ 0 ], downsamplings[ 0 ], blockSize ); + + long[] previousDim = dimensionsS0.clone(); + + for ( int level = 1; level < downsamplings.length; ++level ) + { + final int[] relativeDownsampling = computeRelativeDownsampling( downsamplings, level ); + + final String datasetLevel = viewIdToDataset.apply( viewId, level ); + + final long[] dim = new long[ previousDim.length ]; + for ( int d = 0; d < dim.length; ++d ) + dim[ d ] = previousDim[ d ] / relativeDownsampling[ d ]; + + mrInfo[ level ] = new MultiResolutionLevelInfo( + datasetLevel, dim.clone(), dataType, relativeDownsampling, downsamplings[ level ], blockSize ); + + driverVolumeWriter.createDataset( + datasetLevel, + dim, + blockSize, + dataType, + compression ); + + previousDim = dim; + } + + return mrInfo; + } + + public static MultiResolutionLevelInfo[] setupBdvDatasetsHDF5( + final N5Writer driverVolumeWriter, + final ViewId viewId, + final int[] blockSize, + final int[][] downsamplings ) + { + final String subdivisionsDatasets = "s" + String.format("%02d", viewId.getViewSetupId()) + "/subdivisions"; + final String resolutionsDatasets = "s" + String.format("%02d", viewId.getViewSetupId()) + "/resolutions"; + + if ( driverVolumeWriter.datasetExists( subdivisionsDatasets ) && driverVolumeWriter.datasetExists( resolutionsDatasets ) ) + { + // TODO: test that the values are consistent? + return null; + } + + final Img subdivisions; + final Img resolutions; + + if ( downsamplings == null || downsamplings.length == 0 ) + { + subdivisions = ArrayImgs.ints( blockSize, new long[] { 3, 1 } ); // blocksize + resolutions = ArrayImgs.doubles( new double[] { 1,1,1 }, new long[] { 3, 1 } ); // downsampling + } + else + { + final int[] blocksizes = new int[ 3 * downsamplings.length ]; + final double[] downsamples = new double[ 3 * downsamplings.length ]; + + int i = 0; + for ( int level = 0; level < downsamplings.length; ++level ) + { + downsamples[ i ] = downsamplings[ level ][ 0 ]; + blocksizes[ i++ ] = blockSize[ 0 ]; + downsamples[ i ] = downsamplings[ level ][ 1 ]; + blocksizes[ i++ ] = blockSize[ 1 ]; + downsamples[ i ] = downsamplings[ level ][ 2 ]; + blocksizes[ i++ ] = blockSize[ 2 ]; + } + + subdivisions = ArrayImgs.ints( blocksizes, new long[] { 3, downsamplings.length } ); // blocksize + resolutions = ArrayImgs.doubles( downsamples, new long[] { 3, downsamplings.length } ); // downsampling + } + + driverVolumeWriter.createDataset( + subdivisionsDatasets, + subdivisions.dimensionsAsLongArray(),// new long[] { 3, 1 }, + new int[] { (int)subdivisions.dimension( 0 ), (int)subdivisions.dimension( 1 ) }, //new int[] { 3, 1 }, + DataType.INT32, + new RawCompression() ); + + driverVolumeWriter.createDataset( + resolutionsDatasets, + resolutions.dimensionsAsLongArray(),// new long[] { 3, 1 }, + new int[] { (int)resolutions.dimension( 0 ), (int)resolutions.dimension( 1 ) },//new int[] { 3, 1 }, + DataType.FLOAT64, + new RawCompression() ); + + N5Utils.saveBlock(subdivisions, driverVolumeWriter, "s" + String.format("%02d", viewId.getViewSetupId()) + "/subdivisions", new long[] {0,0,0} ); + N5Utils.saveBlock(resolutions, driverVolumeWriter, "s" + String.format("%02d", viewId.getViewSetupId()) + "/resolutions", new long[] {0,0,0} ); + + return null; // TODO: this is not done. + } + + public static MultiResolutionLevelInfo[] setupBdvDatasetsN5( + final N5Writer driverVolumeWriter, + final ViewId viewId, + final DataType dataType, + final long[] dimensions, + final Compression compression, + final int[] blockSize, + final int[][] downsamplings ) + { + final String s0Dataset = createBDVPath( viewId, 0, StorageType.N5 ); + + driverVolumeWriter.createDataset( + s0Dataset, + dimensions, + blockSize, + dataType, + compression ); + + final String setupDataset = s0Dataset.substring(0, s0Dataset.indexOf( "/timepoint" )); + final String timepointDataset = s0Dataset.substring(0, s0Dataset.indexOf("/s0" )); + + final Map> attribs = driverVolumeWriter.listAttributes( setupDataset ); + + // if viewsetup does not exist + if ( !attribs.containsKey( "dataType" ) || !attribs.containsKey( "blockSize" ) || !attribs.containsKey( "dimensions" ) || !attribs.containsKey( "compression" ) || !attribs.containsKey( "downsamplingFactors" ) ) + { + // set N5 attributes for setup + // e.g. {"compression":{"type":"gzip","useZlib":false,"level":1},"downsamplingFactors":[[1,1,1],[2,2,1]],"blockSize":[128,128,32],"dataType":"uint16","dimensions":[512,512,86]} + IOFunctions.println( "setting attributes for '" + "setup" + viewId.getViewSetupId() + "'"); + + driverVolumeWriter.setAttribute(setupDataset, "dataType", dataType ); + driverVolumeWriter.setAttribute(setupDataset, "blockSize", blockSize ); + driverVolumeWriter.setAttribute(setupDataset, "dimensions", dimensions ); + driverVolumeWriter.setAttribute(setupDataset, "compression", compression ); + + if ( downsamplings == null || downsamplings.length == 0 ) + driverVolumeWriter.setAttribute(setupDataset, "downsamplingFactors", new int[][] {{1,1,1}} ); + else + driverVolumeWriter.setAttribute(setupDataset, "downsamplingFactors", downsamplings ); + } + else + { + // TODO: test that the values are consistent? + } + + // set N5 attributes for timepoint + // e.g. {"resolution":[1.0,1.0,3.0],"saved_completely":true,"multiScale":true} + driverVolumeWriter.setAttribute(timepointDataset, "resolution", new double[] {1,1,1} ); + driverVolumeWriter.setAttribute(timepointDataset, "saved_completely", true ); + driverVolumeWriter.setAttribute(timepointDataset, "multiScale", downsamplings != null && downsamplings.length != 0 ); + + final MultiResolutionLevelInfo[] mrInfo; + + if ( downsamplings == null || downsamplings.length == 0 ) + { + // set additional N5 attributes for s0 dataset + driverVolumeWriter.setAttribute( s0Dataset, "downsamplingFactors", new int[] {1,1,1} ); + + mrInfo = new MultiResolutionLevelInfo[] { new MultiResolutionLevelInfo( s0Dataset, dimensions.clone(), dataType, new int[] {1,1,1}, new int[] {1,1,1}, blockSize ) }; + } + else + { + mrInfo = setupMultiResolutionPyramid( + driverVolumeWriter, + viewId, + viewIdToDatasetBdv( StorageType.N5 ), + dataType, + dimensions, + compression, + blockSize, + downsamplings); + + driverVolumeWriter.setAttribute( s0Dataset, "downsamplingFactors", downsamplings[ 0 ] ); + + for ( int level = 1; level < downsamplings.length; ++level ) + { + // set additional N5 attributes for s0 ... sN datasets + driverVolumeWriter.setAttribute( mrInfo[ level ].dataset, "downsamplingFactors", downsamplings[ level ] ); + } + } + + return mrInfo; + } public static void writeDownsampledBlock( final N5Writer n5, final MultiResolutionLevelInfo mrInfo, @@ -186,72 +468,6 @@ public static ArrayList assembleDownsamplingJobs( return allBlocks; } - /* - public static ArrayList prepareDownsampling( - final Collection< ? extends ViewId > viewIds, - final N5Writer n5, - final int level, - final int[] relativeDownsampling, - final int[] absoluteDownsampling, - final int[] blockSize, - final Compression compression ) - { - // all blocks (a.k.a. grids) across all ViewId's - final ArrayList allBlocks = new ArrayList<>(); - - System.out.println( "relativeDownsampling: " + Arrays.toString( relativeDownsampling )); - System.out.println( "absoluteDownsampling: " + Arrays.toString( absoluteDownsampling )); - System.out.println( "blockSize: " + Arrays.toString( blockSize )); - - // adjust dimensions - for ( final ViewId viewId : viewIds ) - { - final long[] previousDim = n5.getAttribute( "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s" + (level-1), "dimensions", long[].class ); - final long[] dim = new long[ previousDim.length ]; - for ( int d = 0; d < dim.length; ++d ) - dim[ d ] = previousDim[ d ] / relativeDownsampling[ d ]; - final DataType dataType = n5.getAttribute( "setup" + viewId.getViewSetupId(), "dataType", DataType.class ); - - System.out.println( Group.pvid( viewId ) + ": s" + (level-1) + " dim=" + Util.printCoordinates( previousDim ) + ", s" + level + " dim=" + Util.printCoordinates( dim ) + ", datatype=" + dataType ); - - final String dataset = "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s" + level; - - try - { - n5.createDataset( - dataset, - dim, // dimensions - blockSize, - dataType, - compression ); - } - catch ( Exception e ) - { - IOFunctions.println( "Couldn't create downsampling level " + level + ", dataset '" + dataset + "': " + e ); - return null; - } - - final List grid = Grid.create( - dim, - new int[] { - blockSize[0], - blockSize[1], - blockSize[2] - }, - blockSize); - - // add timepointId and ViewSetupId to the gridblock - for ( final long[][] gridBlock : grid ) - allBlocks.add( new long[][]{ - gridBlock[ 0 ].clone(), - gridBlock[ 1 ].clone(), - gridBlock[ 2 ].clone(), - new long[] { viewId.getTimePointId(), viewId.getViewSetupId() } - }); - } - - return allBlocks; - }*/ public static int[] computeRelativeDownsampling( final int[][] downsamplings, @@ -265,45 +481,6 @@ public static int[] computeRelativeDownsampling( return ds; } - /* - public static void createS0DatasetsBdvN5( - final N5Writer n5, - final Collection< ? extends ViewId > viewIds, - final Map dataTypes, - final Map viewSetupIdToDimensions, - final int[] blockSize, - final Compression compression ) - { - for ( final ViewId viewId : viewIds ) - { - IOFunctions.println( "Creating dataset for " + Group.pvid( viewId ) ); - - final String dataset = "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s0"; - //final DataType dataType = n5.getAttribute( "setup" + viewId.getViewSetupId(), "dataType", DataType.class ); - - n5.createDataset( - dataset, - viewSetupIdToDimensions.get( viewId.getViewSetupId() ), // dimensions - blockSize, - dataTypes.get( viewId.getViewSetupId() ), // datatype - compression ); - - System.out.println( "Setting attributes for " + Group.pvid( viewId ) ); - - // set N5 attributes for timepoint - // e.g. {"resolution":[1.0,1.0,3.0],"saved_completely":true,"multiScale":true} - String ds ="setup" + viewId.getViewSetupId() + "/" + "timepoint" + viewId.getTimePointId(); - n5.setAttribute(ds, "resolution", new double[] {1,1,1} ); - n5.setAttribute(ds, "saved_completely", true ); - n5.setAttribute(ds, "multiScale", true ); - - // set additional N5 attributes for s0 dataset - ds = ds + "/s0"; - n5.setAttribute(ds, "downsamplingFactors", new int[] {1,1,1} ); - } - } - */ - public static > void resaveS0Block( final SpimData2 data, final N5Writer n5, diff --git a/src/main/java/net/preibisch/mvrecon/process/n5api/SpimData2Tools.java b/src/main/java/net/preibisch/mvrecon/process/n5api/SpimData2Tools.java new file mode 100644 index 00000000..cc73b4b4 --- /dev/null +++ b/src/main/java/net/preibisch/mvrecon/process/n5api/SpimData2Tools.java @@ -0,0 +1,646 @@ +package net.preibisch.mvrecon.process.n5api; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.URI; +import java.text.ParseException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.janelia.saalfeldlab.n5.Compression; +import org.janelia.saalfeldlab.n5.DataType; +import org.janelia.saalfeldlab.n5.N5Writer; + +import bdv.img.hdf5.Hdf5ImageLoader; +import bdv.img.n5.N5ImageLoader; +import mpicbg.spim.data.SpimDataException; +import mpicbg.spim.data.registration.ViewRegistration; +import mpicbg.spim.data.registration.ViewRegistrations; +import mpicbg.spim.data.sequence.Angle; +import mpicbg.spim.data.sequence.Channel; +import mpicbg.spim.data.sequence.FinalVoxelDimensions; +import mpicbg.spim.data.sequence.Illumination; +import mpicbg.spim.data.sequence.MissingViews; +import mpicbg.spim.data.sequence.SequenceDescription; +import mpicbg.spim.data.sequence.Tile; +import mpicbg.spim.data.sequence.TimePoint; +import mpicbg.spim.data.sequence.TimePoints; +import mpicbg.spim.data.sequence.TimePointsPattern; +import mpicbg.spim.data.sequence.ViewDescription; +import mpicbg.spim.data.sequence.ViewId; +import mpicbg.spim.data.sequence.ViewSetup; +import mpicbg.spim.data.sequence.VoxelDimensions; +import net.imglib2.Dimensions; +import net.imglib2.FinalDimensions; +import net.imglib2.util.Intervals; +import net.imglib2.util.Pair; +import net.imglib2.util.ValuePair; +import net.preibisch.legacy.io.IOFunctions; +import net.preibisch.mvrecon.fiji.spimdata.SpimData2; +import net.preibisch.mvrecon.fiji.spimdata.XmlIoSpimData2; +import net.preibisch.mvrecon.fiji.spimdata.boundingbox.BoundingBoxes; +import net.preibisch.mvrecon.fiji.spimdata.intensityadjust.IntensityAdjustments; +import net.preibisch.mvrecon.fiji.spimdata.interestpoints.CorrespondingInterestPoints; +import net.preibisch.mvrecon.fiji.spimdata.interestpoints.InterestPoint; +import net.preibisch.mvrecon.fiji.spimdata.interestpoints.InterestPoints; +import net.preibisch.mvrecon.fiji.spimdata.interestpoints.ViewInterestPointLists; +import net.preibisch.mvrecon.fiji.spimdata.interestpoints.ViewInterestPoints; +import net.preibisch.mvrecon.fiji.spimdata.pointspreadfunctions.PointSpreadFunctions; +import net.preibisch.mvrecon.fiji.spimdata.stitchingresults.StitchingResults; +import net.preibisch.mvrecon.process.export.ExportN5API.StorageType; +import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group; +import net.preibisch.mvrecon.process.n5api.N5ApiTools.MultiResolutionLevelInfo; +import util.URITools; + +public class SpimData2Tools +{ + public static MultiResolutionLevelInfo[] writeBDVMetaData( + final N5Writer driverVolumeWriter, + final StorageType storageType, + final DataType dataType, + final long[] dimensions, + final Compression compression, + final int[] blockSize, + final int[][] downsamplings, + final ViewId viewId, + final URI n5PathURI, + final URI xmlOutPathURI, + final InstantiateViewSetup instantiateViewSetup ) throws SpimDataException, IOException + { + IOFunctions.println( "Creating datasets and writing BDV-metadata ... " ); + + //final String xmlPath = null; + if ( StorageType.N5.equals(storageType) ) + { + System.out.println( "XML: " + xmlOutPathURI ); + + final Pair exists = writeSpimData( + viewId, + storageType, + dimensions, + n5PathURI, + xmlOutPathURI, + instantiateViewSetup ); + + if ( exists == null ) + return null; + + return N5ApiTools.setupBdvDatasetsN5( driverVolumeWriter, viewId, dataType, dimensions, compression, blockSize, downsamplings ); + } + else if ( StorageType.HDF5.equals(storageType) ) + { + System.out.println( "XML: " + xmlOutPathURI ); + + final Pair exists = writeSpimData( + viewId, + storageType, + dimensions, + n5PathURI, + xmlOutPathURI, + instantiateViewSetup ); + + if ( exists == null ) + return null; + + return N5ApiTools.setupBdvDatasetsHDF5( + driverVolumeWriter, + viewId, + blockSize, + downsamplings ); + } + else + { + IOFunctions.println( "BDV-compatible dataset cannot be written for " + storageType + " (yet)."); + return null; + } + } + public static Pair writeSpimData( + final ViewId viewId, + final StorageType storageType, + final long[] dimensions, + final URI n5PathURI, + final URI xmlOutPathURI, + final InstantiateViewSetup instantiateViewSetup ) throws SpimDataException + { + SpimData2 existingSpimData; + + try + { + existingSpimData = new XmlIoSpimData2().load( xmlOutPathURI ); + } + catch (Exception e ) + { + existingSpimData = null; + } + + if ( existingSpimData != null ) //xmlOutPath.exists() ) + { + System.out.println( "XML exists. Parsing and adding."); + + boolean tpExists = false; + boolean viewSetupExists = false; + + for ( final ViewDescription viewId2 : existingSpimData.getSequenceDescription().getViewDescriptions().values() ) + { + /* + // uncommented this because if you make a second timepoint and do not add missing views, they all exist already + if ( viewId2.equals( viewId ) ) + { + IOFunctions.println( "ViewId you specified (" + Group.pvid(viewId) + ") already exists in the XML, cannot continue." ); + return null; + } + */ + + if ( viewId2.getTimePointId() == viewId.getTimePointId() ) + tpExists = true; + + if ( viewId2.getViewSetupId() == viewId.getViewSetupId() ) + { + viewSetupExists = true; + + // dimensions have to match + if ( !Intervals.equalDimensions( new FinalDimensions( dimensions ), viewId2.getViewSetup().getSize() ) ) + { + IOFunctions.println( "ViewSetup you specified (" + Group.pvid(viewId) + ") already exists in the XML, but with different dimensions, cannot continue." ); + return null; + } + } + } + + final List setups = new ArrayList<>( existingSpimData.getSequenceDescription().getViewSetups().values() ); + + if ( !viewSetupExists ) + setups.add( instantiateViewSetup.instantiate( viewId, tpExists, new FinalDimensions( dimensions ), setups ) ); + + final TimePoints timepoints; + if ( !tpExists) { + final List tps = new ArrayList<>(existingSpimData.getSequenceDescription().getTimePoints().getTimePointsOrdered()); + tps.add(new TimePoint(viewId.getTimePointId())); + timepoints = new TimePoints(tps); + } + else + { + timepoints = existingSpimData.getSequenceDescription().getTimePoints(); + } + + final Map registrations = existingSpimData.getViewRegistrations().getViewRegistrations(); + registrations.put( viewId, new ViewRegistration( viewId.getTimePointId(), viewId.getViewSetupId() ) ); + final ViewRegistrations viewRegistrations = new ViewRegistrations( registrations ); + + final SequenceDescription sequence = new SequenceDescription(timepoints, setups, null); + + if ( StorageType.N5.equals(storageType) ) + sequence.setImgLoader( new N5ImageLoader( n5PathURI, sequence) ); + else if ( StorageType.HDF5.equals(storageType) ) + sequence.setImgLoader( new Hdf5ImageLoader( new File( URITools.removeFilePrefix( n5PathURI ) ), null, sequence) ); + else + throw new RuntimeException( storageType + " not supported." ); + + final SpimData2 spimDataNew = + new SpimData2( + existingSpimData.getBasePathURI(), + sequence, + viewRegistrations, + existingSpimData.getViewInterestPoints(), + existingSpimData.getBoundingBoxes(), + existingSpimData.getPointSpreadFunctions(), + existingSpimData.getStitchingResults(), + existingSpimData.getIntensityAdjustments() ); + + new XmlIoSpimData2().save( spimDataNew, existingSpimData.getBasePathURI() ); + + return new ValuePair<>(tpExists, viewSetupExists); + } + else + { + System.out.println( "New XML."); + + final ArrayList< ViewSetup > setups = new ArrayList<>(); + + setups.add( instantiateViewSetup.instantiate( viewId, false, new FinalDimensions( dimensions ), setups ) ); + /* + final Channel c0 = new Channel( 0 ); + final Angle a0 = new Angle( 0 ); + final Illumination i0 = new Illumination( 0 ); + final Tile t0 = new Tile( 0 ); + + final Dimensions d0 = new FinalDimensions( dimensions ); + final VoxelDimensions vd0 = new FinalVoxelDimensions( "px", 1, 1, 1 ); + setups.add( new ViewSetup( viewId.getViewSetupId(), "setup " + viewId.getViewSetupId(), d0, vd0, t0, c0, a0, i0 ) );*/ + + final ArrayList< TimePoint > tps = new ArrayList<>(); + tps.add( new TimePoint( viewId.getTimePointId() ) ); + final TimePoints timepoints = new TimePoints( tps ); + + final HashMap< ViewId, ViewRegistration > registrations = new HashMap<>(); + registrations.put( viewId, new ViewRegistration( viewId.getTimePointId(), viewId.getViewSetupId() ) ); + final ViewRegistrations viewRegistrations = new ViewRegistrations( registrations ); + + final SequenceDescription sequence = new SequenceDescription(timepoints, setups, null); + if ( StorageType.N5.equals(storageType) ) + sequence.setImgLoader( new N5ImageLoader( n5PathURI, sequence) ); + else if ( StorageType.HDF5.equals(storageType) ) + sequence.setImgLoader( new Hdf5ImageLoader( new File( URITools.removeFilePrefix( n5PathURI ) ), null, sequence) ); + else + throw new RuntimeException( storageType + " not supported." ); + + final SpimData2 spimData = new SpimData2( xmlOutPathURI, sequence, viewRegistrations, new ViewInterestPoints(), new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() ); + + new XmlIoSpimData2().save( spimData, xmlOutPathURI ); + + return new ValuePair<>(false, false); + } + + } + + @FunctionalInterface + public static interface InstantiateViewSetup + { + public ViewSetup instantiate( final ViewId viewId, final boolean tpExists, final Dimensions d, final List existingSetups ); + } + + public static class InstantiateViewSetupBigStitcher implements InstantiateViewSetup + { + final int splittingType; + + // count the fusion groups + int count = 0; + + // new indicies + int newA = -1; + int newC = -1; + int newI = -1; + int newT = -1; + + public InstantiateViewSetupBigStitcher( + final int splittingType) + { + this.splittingType = splittingType; + } + + @Override + public ViewSetup instantiate( final ViewId viewId, final boolean tpExists, final Dimensions d, final List existingSetups ) + { + if ( existingSetups == null || existingSetups.size() == 0 ) + { + newA = 0; + newC = 0; + newI = 0; + newT = 0; + + } + else + { + final Iterator i = existingSetups.iterator(); + ViewSetup tmp = i.next(); + + Channel c0 = tmp.getChannel(); + Angle a0 = tmp.getAngle(); + Illumination i0 = tmp.getIllumination(); + Tile t0 = tmp.getTile(); + + // get the highest id for all entities + while ( i.hasNext() ) + { + tmp = i.next(); + if ( tmp.getChannel().getId() > c0.getId() ) + c0 = tmp.getChannel(); + if ( tmp.getAngle().getId() > a0.getId() ) + a0 = tmp.getAngle(); + if ( tmp.getIllumination().getId() > i0.getId() ) + i0 = tmp.getIllumination(); + if ( tmp.getTile().getId() > t0.getId() ) + t0 = tmp.getTile(); + } + + // new unique id's for all, initialized once + if ( newA < 0 ) + { + newA = a0.getId() + 1; + newC = c0.getId() + 1; + newI = i0.getId() + 1; + newT = t0.getId() + 1; + } + } + + Angle a0; + Channel c0; + Illumination i0; + Tile t0; + + // 0 == "Each timepoint & channel", + // 1 == "Each timepoint, channel & illumination", + // 2 == "All views together", + // 3 == "Each view" + if ( splittingType == 0 ) + { + // a new channel for each fusion group + c0 = new Channel( newC + count ); + + a0 = new Angle( newA ); + i0 = new Illumination( newI ); + t0 = new Tile( newT ); + } + else if ( splittingType == 1 ) + { + // TODO: we need to know what changed + // a new channel and illumination for each fusion group + c0 = new Channel( newC + count ); + i0 = new Illumination( newI + count ); + + a0 = new Angle( newA ); + t0 = new Tile( newT ); + } + else if ( splittingType == 2 ) + { + // a new channel, angle, tile and illumination for the single fusion group + a0 = new Angle( newA ); + c0 = new Channel( newC ); + i0 = new Illumination( newI ); + t0 = new Tile( newT ); + } + else if ( splittingType == 3 ) + { + // TODO: use previous ones + // TODO: we need to know what changed + c0 = new Channel( newC + count ); + i0 = new Illumination( newI + count ); + a0 = new Angle( newA + count ); + t0 = new Tile( newT + count ); + } + else + { + IOFunctions.println( "SplittingType " + splittingType + " unknown. Stopping."); + return null; + } + + final VoxelDimensions vd0 = new FinalVoxelDimensions( "px", 1, 1, 1 ); + + ++count; + + return new ViewSetup( viewId.getViewSetupId(), "setup " + viewId.getViewSetupId(), d, vd0, t0, c0, a0, i0 ); + } + } + + public static class InstantiateViewSetupBigStitcherSpark implements InstantiateViewSetup + { + final String angleIds; + final String illuminationIds; + final String channelIds; + final String tileIds; + + public InstantiateViewSetupBigStitcherSpark( + final String angleIds, + final String illuminationIds, + final String channelIds, + final String tileIds ) + { + this.angleIds = angleIds; + this.illuminationIds = illuminationIds; + this.channelIds = channelIds; + this.tileIds = tileIds; + } + + @Override + public ViewSetup instantiate( final ViewId viewId, final boolean tpExists, final Dimensions d, final List existingSetups ) + { + Angle a0; + Channel c0; + Illumination i0; + Tile t0; + + if ( existingSetups == null || existingSetups.size() == 0 ) + { + a0 = new Angle( 0 ); + c0 = new Channel( 0 ); + i0 = new Illumination( 0 ); + t0 = new Tile( 0 ); + } + else + { + final Iterator i = existingSetups.iterator(); + ViewSetup tmp = i.next(); + + c0 = tmp.getChannel(); + a0 = tmp.getAngle(); + i0 = tmp.getIllumination(); + t0 = tmp.getTile(); + + // get the highest id for all entities + while ( i.hasNext() ) + { + tmp = i.next(); + if ( tmp.getChannel().getId() > c0.getId() ) + c0 = tmp.getChannel(); + if ( tmp.getAngle().getId() > a0.getId() ) + a0 = tmp.getAngle(); + if ( tmp.getIllumination().getId() > i0.getId() ) + i0 = tmp.getIllumination(); + if ( tmp.getTile().getId() > t0.getId() ) + t0 = tmp.getTile(); + } + + if ( angleIds != null ) + a0 = new Angle( a0.getId() + 1 ); + if ( illuminationIds != null ) + i0 = new Illumination( i0.getId() + 1 ); + if ( tileIds != null ) + t0 = new Tile( t0.getId() + 1 ); + if ( tileIds != null || ( angleIds == null && illuminationIds == null && tileIds == null && tpExists ) ) // nothing was defined, then increase channel + c0 = new Channel( c0.getId() + 1 ); + } + + //final Dimensions d0 = new FinalDimensions( dimensions ); + final VoxelDimensions vd0 = new FinalVoxelDimensions( "px", 1, 1, 1 ); + + return new ViewSetup( viewId.getViewSetupId(), "setup " + viewId.getViewSetupId(), d, vd0, t0, c0, a0, i0 ); + } + } + + /** + * Reduces a given SpimData2 to the subset of timepoints and viewsetups as selected by the user, + * including the original imgloader and keeping the basepath (i.e. interest points still work) + * + * Note: PSF's will be lost. + * + * @param oldSpimData - the original SpimData + * @param viewIds - the views to keep + * @return - reduced SpimData2 + */ + public static SpimData2 reduceSpimData2( final SpimData2 oldSpimData, final List< ViewId > viewIds ) + { + return reduceSpimData2( oldSpimData, viewIds, null ); + } + + /** + * Reduces a given SpimData2 to the subset of timepoints and viewsetups as selected by the user, + * including the original imgloader and changing the base path (you still need to save to materialize the returned object!) + * + * Note: PSF's will be lost. + * + * @param oldSpimData - the original SpimData + * @param viewIds - the views to keep + * @param basePath - the new base path (can be null); if you set a new base path it will load all interest points so the new SpimData2 object can be saved including these points + * @return - reduced SpimData2 + */ + public static SpimData2 reduceSpimData2( final SpimData2 oldSpimData, final List< ViewId > viewIds, final URI basePath ) + { + final TimePoints timepoints; + + try + { + timepoints = new TimePointsPattern( listAllTimePoints( SpimData2.getAllTimePointsSorted( oldSpimData, viewIds ) ) ); + } + catch (ParseException e) + { + IOFunctions.println( "Automatically created list of timepoints failed to parse. This should not happen, really :) -- " + e ); + IOFunctions.println( "Here is the list: " + listAllTimePoints( SpimData2.getAllTimePointsSorted( oldSpimData, viewIds ) ) ); + e.printStackTrace(); + return null; + } + + final List< ViewSetup > viewSetupsToProcess = SpimData2.getAllViewSetupsSorted( oldSpimData, viewIds ); + + // a hashset for all viewsetups that remain + final Set< ViewId > views = new HashSet< ViewId >(); + + for ( final ViewId viewId : viewIds ) + views.add( new ViewId( viewId.getTimePointId(), viewId.getViewSetupId() ) ); + + final MissingViews oldMissingViews = oldSpimData.getSequenceDescription().getMissingViews(); + final HashSet< ViewId > missingViews = new HashSet< ViewId >(); + + if( oldMissingViews != null && oldMissingViews.getMissingViews() != null ) + for ( final ViewId id : oldMissingViews.getMissingViews() ) + if ( views.contains( id ) ) + missingViews.add( id ); + + // add the new missing views!!! + for ( final TimePoint t : timepoints.getTimePointsOrdered() ) + for ( final ViewSetup v : viewSetupsToProcess ) + { + final ViewId viewId = new ViewId( t.getId(), v.getId() ); + + if ( !views.contains( viewId ) ) + missingViews.add( viewId ); + } + + // instantiate the sequencedescription + final SequenceDescription sequenceDescription = new SequenceDescription( timepoints, viewSetupsToProcess, oldSpimData.getSequenceDescription().getImgLoader(), new MissingViews( missingViews ) ); + + // re-assemble the registrations + final Map< ViewId, ViewRegistration > oldRegMap = oldSpimData.getViewRegistrations().getViewRegistrations(); + final Map< ViewId, ViewRegistration > newRegMap = new HashMap< ViewId, ViewRegistration >(); + + for ( final ViewId viewId : oldRegMap.keySet() ) + if ( views.contains( viewId ) ) + newRegMap.put( viewId, oldRegMap.get( viewId ) ); + + final ViewRegistrations viewRegistrations = new ViewRegistrations( newRegMap ); + + // re-assemble the interestpoints and a list of filenames to copy + final Map< ViewId, ViewInterestPointLists > oldInterestPoints = oldSpimData.getViewInterestPoints().getViewInterestPoints(); + final Map< ViewId, ViewInterestPointLists > newInterestPoints = new HashMap< ViewId, ViewInterestPointLists >(); + + oldInterestPoints.forEach( (viewId, ipLists) -> + { + if ( views.contains( viewId ) ) + { + if ( basePath != null ) + { + final ViewInterestPointLists ipListsNew = new ViewInterestPointLists(viewId.getTimePointId(), viewId.getViewSetupId() ); + + ipLists.getHashMap().forEach( (label,interestpoints) -> + { + final List points = interestpoints.getInterestPointsCopy(); + final List corr = interestpoints.getCorrespondingInterestPointsCopy(); + + final InterestPoints interestpointsNew = InterestPoints.newInstance( basePath, viewId, label ); + interestpointsNew.setInterestPoints( points ); + interestpointsNew.setCorrespondingInterestPoints( corr ); + + ipListsNew.addInterestPointList( label, interestpointsNew ); + } ); + + newInterestPoints.put( viewId, ipListsNew ); + } + else + { + // if the basepath doesn't change we can keep interestpoints as-is + newInterestPoints.put( viewId, ipLists ); + } + } + }); + + final ViewInterestPoints viewsInterestPoints = new ViewInterestPoints( newInterestPoints ); + + //TODO: copy PSFs? + + final SpimData2 newSpimData = new SpimData2( + basePath == null ? oldSpimData.getBasePathURI() : basePath, + sequenceDescription, + viewRegistrations, + viewsInterestPoints, + oldSpimData.getBoundingBoxes(), + new PointSpreadFunctions(), //oldSpimData.getPointSpreadFunctions() + oldSpimData.getStitchingResults(), + oldSpimData.getIntensityAdjustments() ); + + return newSpimData; + } + + public static String listAllTimePoints( final List timePointsToProcess ) + { + String t = "" + timePointsToProcess.get( 0 ).getId(); + + for ( int i = 1; i < timePointsToProcess.size(); ++i ) + t += ", " + timePointsToProcess.get( i ).getId(); + + return t; + } + + private static void copyFolder( final File src, final File dest, final List< String > filesToCopy ) throws IOException + { + if ( src.isDirectory() ) + { + if( !dest.exists() ) + dest.mkdir(); + + for ( final String file : src.list() ) + copyFolder( new File( src, file ), new File( dest, file ), filesToCopy ); + } + else + { + boolean contains = false; + + for ( int i = 0; i < filesToCopy.size() && !contains; ++i ) + if ( src.getName().contains( filesToCopy.get( i ) ) ) + contains = true; + + if ( contains ) + { + final InputStream in = new FileInputStream( src ); + final OutputStream out = new FileOutputStream( dest ); + + final byte[] buffer = new byte[ 65535 ]; + + int length; + + while ( ( length = in.read(buffer) ) > 0 ) + out.write(buffer, 0, length); + + in.close(); + out.close(); + } + } + } +} diff --git a/src/main/java/net/preibisch/mvrecon/process/resave/SpimData2Tools.java b/src/main/java/net/preibisch/mvrecon/process/resave/SpimData2Tools.java deleted file mode 100644 index 27127b4d..00000000 --- a/src/main/java/net/preibisch/mvrecon/process/resave/SpimData2Tools.java +++ /dev/null @@ -1,215 +0,0 @@ -package net.preibisch.mvrecon.process.resave; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.net.URI; -import java.text.ParseException; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import mpicbg.spim.data.registration.ViewRegistration; -import mpicbg.spim.data.registration.ViewRegistrations; -import mpicbg.spim.data.sequence.MissingViews; -import mpicbg.spim.data.sequence.SequenceDescription; -import mpicbg.spim.data.sequence.TimePoint; -import mpicbg.spim.data.sequence.TimePoints; -import mpicbg.spim.data.sequence.TimePointsPattern; -import mpicbg.spim.data.sequence.ViewId; -import mpicbg.spim.data.sequence.ViewSetup; -import net.preibisch.legacy.io.IOFunctions; -import net.preibisch.mvrecon.fiji.spimdata.SpimData2; -import net.preibisch.mvrecon.fiji.spimdata.interestpoints.CorrespondingInterestPoints; -import net.preibisch.mvrecon.fiji.spimdata.interestpoints.InterestPoint; -import net.preibisch.mvrecon.fiji.spimdata.interestpoints.InterestPoints; -import net.preibisch.mvrecon.fiji.spimdata.interestpoints.ViewInterestPointLists; -import net.preibisch.mvrecon.fiji.spimdata.interestpoints.ViewInterestPoints; -import net.preibisch.mvrecon.fiji.spimdata.pointspreadfunctions.PointSpreadFunctions; - -public class SpimData2Tools -{ - - /** - * Reduces a given SpimData2 to the subset of timepoints and viewsetups as selected by the user, - * including the original imgloader and keeping the basepath (i.e. interest points still work) - * - * Note: PSF's will be lost. - * - * @param oldSpimData - the original SpimData - * @param viewIds - the views to keep - * @return - reduced SpimData2 - */ - public static SpimData2 reduceSpimData2( final SpimData2 oldSpimData, final List< ViewId > viewIds ) - { - return reduceSpimData2( oldSpimData, viewIds, null ); - } - - /** - * Reduces a given SpimData2 to the subset of timepoints and viewsetups as selected by the user, - * including the original imgloader and changing the base path (you still need to save to materialize the returned object!) - * - * Note: PSF's will be lost. - * - * @param oldSpimData - the original SpimData - * @param viewIds - the views to keep - * @param basePath - the new base path (can be null); if you set a new base path it will load all interest points so the new SpimData2 object can be saved including these points - * @return - reduced SpimData2 - */ - public static SpimData2 reduceSpimData2( final SpimData2 oldSpimData, final List< ViewId > viewIds, final URI basePath ) - { - final TimePoints timepoints; - - try - { - timepoints = new TimePointsPattern( listAllTimePoints( SpimData2.getAllTimePointsSorted( oldSpimData, viewIds ) ) ); - } - catch (ParseException e) - { - IOFunctions.println( "Automatically created list of timepoints failed to parse. This should not happen, really :) -- " + e ); - IOFunctions.println( "Here is the list: " + listAllTimePoints( SpimData2.getAllTimePointsSorted( oldSpimData, viewIds ) ) ); - e.printStackTrace(); - return null; - } - - final List< ViewSetup > viewSetupsToProcess = SpimData2.getAllViewSetupsSorted( oldSpimData, viewIds ); - - // a hashset for all viewsetups that remain - final Set< ViewId > views = new HashSet< ViewId >(); - - for ( final ViewId viewId : viewIds ) - views.add( new ViewId( viewId.getTimePointId(), viewId.getViewSetupId() ) ); - - final MissingViews oldMissingViews = oldSpimData.getSequenceDescription().getMissingViews(); - final HashSet< ViewId > missingViews = new HashSet< ViewId >(); - - if( oldMissingViews != null && oldMissingViews.getMissingViews() != null ) - for ( final ViewId id : oldMissingViews.getMissingViews() ) - if ( views.contains( id ) ) - missingViews.add( id ); - - // add the new missing views!!! - for ( final TimePoint t : timepoints.getTimePointsOrdered() ) - for ( final ViewSetup v : viewSetupsToProcess ) - { - final ViewId viewId = new ViewId( t.getId(), v.getId() ); - - if ( !views.contains( viewId ) ) - missingViews.add( viewId ); - } - - // instantiate the sequencedescription - final SequenceDescription sequenceDescription = new SequenceDescription( timepoints, viewSetupsToProcess, oldSpimData.getSequenceDescription().getImgLoader(), new MissingViews( missingViews ) ); - - // re-assemble the registrations - final Map< ViewId, ViewRegistration > oldRegMap = oldSpimData.getViewRegistrations().getViewRegistrations(); - final Map< ViewId, ViewRegistration > newRegMap = new HashMap< ViewId, ViewRegistration >(); - - for ( final ViewId viewId : oldRegMap.keySet() ) - if ( views.contains( viewId ) ) - newRegMap.put( viewId, oldRegMap.get( viewId ) ); - - final ViewRegistrations viewRegistrations = new ViewRegistrations( newRegMap ); - - // re-assemble the interestpoints and a list of filenames to copy - final Map< ViewId, ViewInterestPointLists > oldInterestPoints = oldSpimData.getViewInterestPoints().getViewInterestPoints(); - final Map< ViewId, ViewInterestPointLists > newInterestPoints = new HashMap< ViewId, ViewInterestPointLists >(); - - oldInterestPoints.forEach( (viewId, ipLists) -> - { - if ( views.contains( viewId ) ) - { - if ( basePath != null ) - { - final ViewInterestPointLists ipListsNew = new ViewInterestPointLists(viewId.getTimePointId(), viewId.getViewSetupId() ); - - ipLists.getHashMap().forEach( (label,interestpoints) -> - { - final List points = interestpoints.getInterestPointsCopy(); - final List corr = interestpoints.getCorrespondingInterestPointsCopy(); - - final InterestPoints interestpointsNew = InterestPoints.newInstance( basePath, viewId, label ); - interestpointsNew.setInterestPoints( points ); - interestpointsNew.setCorrespondingInterestPoints( corr ); - - ipListsNew.addInterestPointList( label, interestpointsNew ); - } ); - - newInterestPoints.put( viewId, ipListsNew ); - } - else - { - // if the basepath doesn't change we can keep interestpoints as-is - newInterestPoints.put( viewId, ipLists ); - } - } - }); - - final ViewInterestPoints viewsInterestPoints = new ViewInterestPoints( newInterestPoints ); - - //TODO: copy PSFs? - - final SpimData2 newSpimData = new SpimData2( - basePath == null ? oldSpimData.getBasePathURI() : basePath, - sequenceDescription, - viewRegistrations, - viewsInterestPoints, - oldSpimData.getBoundingBoxes(), - new PointSpreadFunctions(), //oldSpimData.getPointSpreadFunctions() - oldSpimData.getStitchingResults(), - oldSpimData.getIntensityAdjustments() ); - - return newSpimData; - } - - public static String listAllTimePoints( final List timePointsToProcess ) - { - String t = "" + timePointsToProcess.get( 0 ).getId(); - - for ( int i = 1; i < timePointsToProcess.size(); ++i ) - t += ", " + timePointsToProcess.get( i ).getId(); - - return t; - } - - private static void copyFolder( final File src, final File dest, final List< String > filesToCopy ) throws IOException - { - if ( src.isDirectory() ) - { - if( !dest.exists() ) - dest.mkdir(); - - for ( final String file : src.list() ) - copyFolder( new File( src, file ), new File( dest, file ), filesToCopy ); - } - else - { - boolean contains = false; - - for ( int i = 0; i < filesToCopy.size() && !contains; ++i ) - if ( src.getName().contains( filesToCopy.get( i ) ) ) - contains = true; - - if ( contains ) - { - final InputStream in = new FileInputStream( src ); - final OutputStream out = new FileOutputStream( dest ); - - final byte[] buffer = new byte[ 65535 ]; - - int length; - - while ( ( length = in.read(buffer) ) > 0 ) - out.write(buffer, 0, length); - - in.close(); - out.close(); - } - } - } -}