Skip to content

Commit

Permalink
work on mapping functions, keep datatypes
Browse files Browse the repository at this point in the history
  • Loading branch information
StephanPreibisch committed Sep 9, 2024
1 parent 25efeb5 commit d65b1df
Show file tree
Hide file tree
Showing 2 changed files with 49 additions and 14 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,14 @@
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;

import org.janelia.saalfeldlab.n5.Compression;
import org.janelia.saalfeldlab.n5.DataType;
import org.janelia.saalfeldlab.n5.N5Writer;

import bdv.export.ExportMipmapInfo;
Expand Down Expand Up @@ -157,7 +159,9 @@ else if ( URITools.isS3( n5Params.n5URI ) || URITools.isGC( n5Params.n5URI ) )*/
final ArrayList<long[][]> grid =
N5ResaveTools.assembleAllS0Jobs( vidsToResave, viewSetupIdToDimensions, blockSize, computeBlockSize );

N5ResaveTools.createGroups( n5Writer, data, viewSetupIdToDimensions, blockSize, downsamplings, compression );
final Map<Integer, DataType> dataTypes =
N5ResaveTools.createGroups( n5Writer, data, viewSetupIdToDimensions, blockSize, downsamplings, compression );

N5ResaveTools.createS0Datasets( n5Writer, vidsToResave, viewSetupIdToDimensions, blockSize, compression );

//
Expand All @@ -169,7 +173,13 @@ else if ( URITools.isS3( n5Params.n5URI ) || URITools.isGC( n5Params.n5URI ) )*/

try
{
myPool.submit(() -> grid.parallelStream().forEach( gridBlock -> N5ResaveTools.writeS0Block( data, n5Writer, gridBlock ) ) ).get();
myPool.submit(() -> grid.parallelStream().forEach(
gridBlock -> N5ResaveTools.resaveS0Block(
data,
n5Writer,
dataTypes.get( (int)gridBlock[ 3 ][ 1 ] ),
N5ResaveTools.datasetMappingFunctionBdvN5( 0 ),
gridBlock ) ) ).get();
}
catch (InterruptedException | ExecutionException e)
{
Expand Down Expand Up @@ -199,8 +209,8 @@ else if ( URITools.isS3( n5Params.n5URI ) || URITools.isGC( n5Params.n5URI ) )*/
myPool.submit(() -> allBlocks.parallelStream().forEach(
gridBlock -> N5ResaveTools.writeDownsampledBlock(
n5Writer,
N5ResaveTools.mappingFunctionBDV( s ),
N5ResaveTools.mappingFunctionBDV( s - 1 ),
N5ResaveTools.datasetMappingFunctionBdvN5( s ),
N5ResaveTools.datasetMappingFunctionBdvN5( s - 1 ),
ds,
gridBlock ) ) ).get();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,19 +41,38 @@ public class N5ResaveTools
* @param level - the downsampling level
* @return a Function that maps the gridBlock to a N5 dataset name
*/
public static Function<long[][], String> mappingFunctionBDV( final int level )
public static Function<long[][], String> datasetMappingFunctionBdvN5( final int level )
{
return gridBlock ->
{
final ViewId viewId = gridBlock.length > 3 ? new ViewId( (int)gridBlock[ 3 ][ 0 ], (int)gridBlock[ 3 ][ 1 ]) : new ViewId( 0, 0 );
if ( gridBlock.length <= 3 )
throw new RuntimeException( "mappingFunctionBDV() needs an extended GridBlock long[][], where Gridblock[3][] encodes the ViewId");

final ViewId viewId = new ViewId( (int)gridBlock[ 3 ][ 0 ], (int)gridBlock[ 3 ][ 1 ]);
return "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s" + (level);
};
}

/**
* @param level - the downsampling level
* @return a Function that maps the gridBlock to a HDF5 dataset name
*/
public static Function<long[][], String> datasetMappingFunctionBdvHDF5( final int level )
{
return gridBlock ->
{
if ( gridBlock.length <= 3 )
throw new RuntimeException( "mappingFunctionBDV() needs an extended GridBlock long[][], where Gridblock[3][] encodes the ViewId");

final ViewId viewId = new ViewId( (int)gridBlock[ 3 ][ 0 ], (int)gridBlock[ 3 ][ 1 ]);
return null; //TODO //"setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s" + (level);
};
}

public static void writeDownsampledBlock(
final N5Writer n5,
final Function<long[][], String> viewIdToDataset, // gridBlock to dataset name (e.g. s1, s2, ...)
final Function<long[][], String> viewIdToDatasetPreviousScale, // gridblock to name of previous dataset (e.g. s0 when writing s1, s1 when writing s2, ... )
final Function<long[][], String> viewIdToDataset, // gridBlock to dataset name (e.g. for s1, s2, ...)
final Function<long[][], String> viewIdToDatasetPreviousScale, // gridblock to name of previous dataset (e.g. for s0 when writing s1, s1 when writing s2, ... )
final int[] relativeDownsampling,
final long[][] gridBlock )
{
Expand All @@ -62,8 +81,6 @@ public static void writeDownsampledBlock(

final DataType dataType = n5.getAttribute( datasetPreviousScale, DatasetAttributes.DATA_TYPE_KEY, DataType.class );
final int[] blockSize = n5.getAttribute( datasetPreviousScale, DatasetAttributes.BLOCK_SIZE_KEY, int[].class );
//final String datasetPrev = "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s" + (level-1);
//final String dataset = "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s" + (level);

if ( dataType == DataType.UINT16 )
{
Expand Down Expand Up @@ -237,9 +254,11 @@ public static void createS0Datasets(
}
}

public static void writeS0Block(
public static void resaveS0Block(
final SpimData2 data,
final N5Writer n5,
final DataType dataType,
final Function<long[][], String> gridBlockToDataset, // gridBlock to dataset name for s0
final long[][] gridBlock )
{
final ViewId viewId = new ViewId( (int)gridBlock[ 3 ][ 0 ], (int)gridBlock[ 3 ][ 1 ]);
Expand All @@ -249,8 +268,8 @@ public static void writeS0Block(
@SuppressWarnings("rawtypes")
final RandomAccessibleInterval img = imgLoader.getImage( viewId.getTimePointId() );

final DataType dataType = n5.getAttribute( "setup" + viewId.getViewSetupId(), "dataType", DataType.class );
final String dataset = "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s0";
//final DataType dataType = n5.getAttribute( "setup" + viewId.getViewSetupId(), "dataType", DataType.class );
final String dataset = gridBlockToDataset.apply( gridBlock );// "setup" + viewId.getViewSetupId() + "/timepoint" + viewId.getTimePointId() + "/s0";

if ( dataType == DataType.UINT16 )
{
Expand Down Expand Up @@ -279,14 +298,16 @@ else if ( dataType == DataType.FLOAT32 )
System.out.println( "ViewId " + Group.pvid( viewId ) + ", written block: offset=" + Util.printCoordinates( gridBlock[0] ) + ", dimension=" + Util.printCoordinates( gridBlock[1] ) );
}

public static void createGroups(
public static Map< Integer, DataType > createGroups(
final N5Writer n5,
final AbstractSpimData<?> data,
final Map<Integer, long[]> viewSetupIdToDimensions,
final int[] blockSize,
final int[][] downsamplingFactors,
final Compression compression )
{
final HashMap< Integer, DataType > dataTypes = new HashMap<>();

for ( final Entry< Integer, long[] > viewSetup : viewSetupIdToDimensions.entrySet() )
{
final Object type = data.getSequenceDescription().getImgLoader().getSetupImgLoader( viewSetup.getKey() ).getImageType();
Expand All @@ -301,6 +322,8 @@ else if ( FloatType.class.isInstance( type ) )
else
throw new RuntimeException("Unsupported pixel type: " + type.getClass().getCanonicalName() );

dataTypes.put( viewSetup.getKey(), dataType );

// ViewSetupId needs to contain: {"downsamplingFactors":[[1,1,1],[2,2,1]],"dataType":"uint16"}
final String n5Dataset = "setup" + viewSetup.getKey();

Expand All @@ -316,6 +339,8 @@ else if ( FloatType.class.isInstance( type ) )
n5.setAttribute( n5Dataset, "dimensions", viewSetup.getValue() );
n5.setAttribute( n5Dataset, "compression", compression );
}

return dataTypes;
}

public static int[][] mipMapInfoToDownsamplings( final Map< Integer, ExportMipmapInfo > mipmaps )
Expand Down

0 comments on commit d65b1df

Please sign in to comment.