Skip to content

Commit

Permalink
fix downsampling export
Browse files Browse the repository at this point in the history
  • Loading branch information
StephanPreibisch committed Sep 7, 2024
1 parent 2f87a8e commit e99dc0f
Show file tree
Hide file tree
Showing 4 changed files with 73 additions and 33 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ public static ParametersResaveN5 getParamtersIJ(
final int firstviewSetupId = setupsToProcess.iterator().next().getId();// xml.getData().getSequenceDescription().getViewSetupsOrdered().get( 0 ).getId();
final ExportMipmapInfo autoMipmapSettings = perSetupExportMipmapInfo.get( firstviewSetupId );

// block size should be bigger than hdf5
// block size should be bigger than hdf5, and all the same
for ( final int[] row : autoMipmapSettings.getSubdivisions() )
{
Arrays.fill( row, defaultBlockSize );
Expand All @@ -113,8 +113,8 @@ public static ParametersResaveN5 getParamtersIJ(
gdp.addMessage( "N5 saving options", new Font( Font.SANS_SERIF, Font.BOLD, 13 ) );

gdp.addChoice( "Compression", compressions, compressions[ defaultCompression ] );
gdp.addStringField( "Subsampling_factors", ProposeMipmaps.getArrayString( autoMipmapSettings.getExportResolutions() ), 40 );
gdp.addStringField( "N5_block_sizes", ProposeMipmaps.getArrayString( autoMipmapSettings.getSubdivisions() ), 40 );
gdp.addStringField( "Downsampling_factors", ProposeMipmaps.getArrayString( autoMipmapSettings.getExportResolutions() ), 40 );
gdp.addStringField( "Block_size (all the same)", ProposeMipmaps.getArrayString( autoMipmapSettings.getSubdivisions() ), 40 );
gdp.addNumericField( "Number_of_threads (CPUs:" + Runtime.getRuntime().availableProcessors() + ")", defaultNumThreads, 0 );

if ( askForPaths )
Expand Down Expand Up @@ -169,28 +169,29 @@ else if ( compression == 4 )
else
n5params.compression = new RawCompression();

final int[][] resolutions = PluginHelper.parseResolutionsString( subsampling );
final int[][] subdivisions = PluginHelper.parseResolutionsString( chunkSizes );
n5params.resolutions = PluginHelper.parseResolutionsString( subsampling );
n5params.subdivisions = PluginHelper.parseResolutionsString( chunkSizes );

if ( resolutions.length == 0 )
if ( n5params.resolutions.length == 0 )
{
IOFunctions.println( "Cannot parse subsampling factors " + subsampling );
IOFunctions.println( "Cannot parse downsampling factors " + subsampling );
return null;
}
if ( subdivisions.length == 0 )

if ( n5params.subdivisions.length == 0 )
{
IOFunctions.println( "Cannot parse hdf5 chunk sizes " + chunkSizes );
IOFunctions.println( "Cannot parse block sizes " + chunkSizes );
return null;
}
else if ( resolutions.length != subdivisions.length )
else if ( n5params.resolutions.length != n5params.subdivisions.length )
{
IOFunctions.println( "subsampling factors and hdf5 chunk sizes must have the same number of elements" );
IOFunctions.println( "downsampling factors and block sizes must have the same number of elements" );
return null;
}

n5params.proposedMipmaps = createProposedMipMaps(
resolutions,
subdivisions,
n5params.resolutions,
n5params.subdivisions,
setupsToProcess.stream().map( vs -> vs.getId() ).collect( Collectors.toList() ) );

return n5params;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,6 @@
*/
package net.preibisch.mvrecon.fiji.plugin.resave;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
Expand All @@ -32,14 +30,14 @@
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

import org.janelia.saalfeldlab.n5.Compression;
import org.janelia.saalfeldlab.n5.N5Writer;

import bdv.export.ExportMipmapInfo;
import bdv.export.ProgressWriter;
import bdv.export.n5.WriteSequenceToN5;
import bdv.img.n5.N5ImageLoader;
import ij.plugin.PlugIn;
import mpicbg.spim.data.sequence.TimePoint;
Expand Down Expand Up @@ -109,6 +107,7 @@ public static SpimData2 resaveN5(
});
}

/*
// re-save data to file
if ( URITools.isFile( n5Params.n5URI ) )
{
Expand All @@ -129,24 +128,32 @@ public static SpimData2 resaveN5(
e.printStackTrace();
}
}
else if ( URITools.isS3( n5Params.n5URI ) || URITools.isGC( n5Params.n5URI ) )
else if ( URITools.isS3( n5Params.n5URI ) || URITools.isGC( n5Params.n5URI ) )*/
{
// save to cloud or file
final N5Writer n5Writer = URITools.instantiateGuessedN5Writer( n5Params.n5URI );

final int[] blockSize = null;
final int[] computeBlockSize = null;
final Compression compression = null;
final int[] blockSize = n5Params.subdivisions[ 0 ];
final int[] computeBlockSize = n5Params.subdivisions[ 0 ];
final Compression compression = n5Params.compression;

computeBlockSize[ 0 ] *= 4;
computeBlockSize[ 1 ] *= 4;

//final ArrayList<ViewSetup> viewSetups =
// N5ResaveTools.assembleViewSetups( data, vidsToResave );

final HashMap<Integer, long[]> viewSetupIdToDimensions =
N5ResaveTools.assembleDimensions( data, vidsToResave );

IOFunctions.println( "Dimensions of raw images: " );
viewSetupIdToDimensions.forEach( (id,dim ) -> IOFunctions.println( "ViewSetup " + id + ": " + Arrays.toString( dim )) );

final int[][] downsamplings =
N5ResaveTools.mipMapInfoToDownsamplings( n5Params.proposedMipmaps );

IOFunctions.println( "Downsamplings: " + Arrays.deepToString( downsamplings ) );

final ArrayList<long[][]> grid =
N5ResaveTools.assembleAllS0Jobs( vidsToResave, viewSetupIdToDimensions, blockSize, computeBlockSize );

Expand Down Expand Up @@ -189,7 +196,13 @@ else if ( URITools.isS3( n5Params.n5URI ) || URITools.isGC( n5Params.n5URI ) )

try
{
myPool.submit(() -> allBlocks.parallelStream().forEach( gridBlock -> N5ResaveTools.writeDownsampledBlock( n5Writer, s, ds, gridBlock ) ) ).get();
myPool.submit(() -> allBlocks.parallelStream().forEach(
gridBlock -> N5ResaveTools.writeDownsampledBlock(
n5Writer,
N5ResaveTools.mappingFunctionBDV( s ),
N5ResaveTools.mappingFunctionBDV( s - 1 ),
ds,
gridBlock ) ) ).get();
}
catch (InterruptedException | ExecutionException e)
{
Expand All @@ -202,7 +215,7 @@ else if ( URITools.isS3( n5Params.n5URI ) || URITools.isGC( n5Params.n5URI ) )
}

myPool.shutdown();
//myPool.awaitTermination( Long.MAX_VALUE, TimeUnit.HOURS );
try { myPool.awaitTermination( Long.MAX_VALUE, TimeUnit.HOURS ); } catch (InterruptedException e) { e.printStackTrace(); }

n5Writer.close();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;

import org.janelia.saalfeldlab.n5.Compression;
import org.janelia.saalfeldlab.n5.DataType;
Expand Down Expand Up @@ -425,7 +426,7 @@ else if ( FloatType.class.isInstance( type ) )
return false;
}

final List<long[][]> gridDS = Grid.create( dim, blocksize());
final List<long[][]> gridDS = Grid.create( dim, blocksize() );

IOFunctions.println( new Date( System.currentTimeMillis() ) + ": s" + level + " num blocks=" + gridDS.size() );

Expand All @@ -436,7 +437,16 @@ else if ( FloatType.class.isInstance( type ) )

time = System.currentTimeMillis();

e.submit( () -> gridDS.parallelStream().forEach( gridBlock -> N5ResaveTools.writeDownsampledBlock( driverVolumeWriter, s, ds, gridBlock ) ) );
final Function<long[][], String> viewIdToDataset = (gridBlock -> datasetDownsampling); // there is only one ViewId, so no matter which gridBlock, its always the same
final Function<long[][], String> viewIdToDatasetPreviousScale = (gridBlock -> datasetPrev); // there is only one ViewId

e.submit( () -> gridDS.parallelStream().forEach(
gridBlock -> N5ResaveTools.writeDownsampledBlock(
driverVolumeWriter,
viewIdToDataset,
viewIdToDatasetPreviousScale,
ds,
gridBlock ) ) );

/*
e.submit(() ->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.function.Function;

import org.janelia.saalfeldlab.n5.Compression;
import org.janelia.saalfeldlab.n5.DataType;
Expand Down Expand Up @@ -35,22 +36,37 @@

public class N5ResaveTools
{
/**
* @param level - the downsampling level
* @return a Function that maps the gridBlock to a N5 dataset name
*/
public static Function<long[][], String> mappingFunctionBDV( final int level )
{
return gridBlock ->
{
final ViewId viewId = gridBlock.length > 3 ? new ViewId( (int)gridBlock[ 3 ][ 0 ], (int)gridBlock[ 3 ][ 1 ]) : new ViewId( 0, 0 );
return "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s" + (level);
};
}

public static void writeDownsampledBlock(
final N5Writer n5,
final int level,
final Function<long[][], String> viewIdToDataset, // gridBlock to dataset name (e.g. s1, s2, ...)
final Function<long[][], String> viewIdToDatasetPreviousScale, // gridblock to name of previous dataset (e.g. s0 when writing s1, s1 when writing s2, ... )
final int[] relativeDownsampling,
final long[][] gridBlock )
{
final ViewId viewId = new ViewId( (int)gridBlock[ 3 ][ 0 ], (int)gridBlock[ 3 ][ 1 ]);
final String dataset = viewIdToDataset.apply( gridBlock );
final String datasetPreviousScale = viewIdToDatasetPreviousScale.apply( gridBlock );

final DataType dataType = n5.getAttribute( "setup" + viewId.getViewSetupId(), DatasetAttributes.DATA_TYPE_KEY, DataType.class );
final int[] blockSize = n5.getAttribute( "setup" + viewId.getViewSetupId(), DatasetAttributes.BLOCK_SIZE_KEY, int[].class );
final String datasetPrev = "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s" + (level-1);
final String dataset = "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s" + (level);
final DataType dataType = n5.getAttribute( datasetPreviousScale, DatasetAttributes.DATA_TYPE_KEY, DataType.class );
final int[] blockSize = n5.getAttribute( datasetPreviousScale, DatasetAttributes.BLOCK_SIZE_KEY, int[].class );
//final String datasetPrev = "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s" + (level-1);
//final String dataset = "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s" + (level);

if ( dataType == DataType.UINT16 )
{
RandomAccessibleInterval<UnsignedShortType> downsampled = N5Utils.open(n5, datasetPrev);
RandomAccessibleInterval<UnsignedShortType> downsampled = N5Utils.open(n5, datasetPreviousScale);

for ( int d = 0; d < downsampled.numDimensions(); ++d )
if ( relativeDownsampling[ d ] > 1 )
Expand All @@ -66,7 +82,7 @@ public static void writeDownsampledBlock(
}
else if ( dataType == DataType.UINT8 )
{
RandomAccessibleInterval<UnsignedByteType> downsampled = N5Utils.open(n5, datasetPrev);
RandomAccessibleInterval<UnsignedByteType> downsampled = N5Utils.open(n5, datasetPreviousScale);

for ( int d = 0; d < downsampled.numDimensions(); ++d )
if ( relativeDownsampling[ d ] > 1 )
Expand All @@ -82,7 +98,7 @@ else if ( dataType == DataType.UINT8 )
}
else if ( dataType == DataType.FLOAT32 )
{
RandomAccessibleInterval<FloatType> downsampled = N5Utils.open(n5, datasetPrev);;
RandomAccessibleInterval<FloatType> downsampled = N5Utils.open(n5, datasetPreviousScale);;

for ( int d = 0; d < downsampled.numDimensions(); ++d )
if ( relativeDownsampling[ d ] > 1 )
Expand Down

0 comments on commit e99dc0f

Please sign in to comment.