Skip to content

Commit

Permalink
SpimData2 now has a constructor that takes a URI
Browse files Browse the repository at this point in the history
  • Loading branch information
StephanPreibisch committed Aug 28, 2024
1 parent cc6be02 commit 96ad3cc
Show file tree
Hide file tree
Showing 23 changed files with 124 additions and 1,473 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ public SpimData2 createDataset( final String xmlFileName )
//viewInterestPoints.createViewInterestPoints( sequenceDescription.getViewDescriptions() );

// finally create the SpimData itself based on the sequence description and the view registration
final SpimData2 spimData = new SpimData2( meta.getDir(), sequenceDescription, viewRegistrations, viewInterestPoints, new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() );
final SpimData2 spimData = new SpimData2( meta.getDir().toURI(), sequenceDescription, viewRegistrations, viewInterestPoints, new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() );

return spimData;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -645,7 +645,7 @@ else if ( aInfoI.axis == 2 )
final ViewInterestPoints viewInterestPoints = new ViewInterestPoints();
//viewInterestPoints.createViewInterestPoints( sd.getViewDescriptions() );

SpimData2 data = new SpimData2( new File("/"), sd, vrs, viewInterestPoints, new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() );
SpimData2 data = new SpimData2( new File("/").toURI(), sd, vrs, viewInterestPoints, new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() );
return data;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ public SpimData2 createDataset( final String xmlFileName )
//viewInterestPoints.createViewInterestPoints( sequenceDescription.getViewDescriptions() );

// finally create the SpimData itself based on the sequence description and the view registration
final SpimData2 spimData = new SpimData2( new File( directory ), sequenceDescription, viewRegistrations, viewInterestPoints, new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() );
final SpimData2 spimData = new SpimData2( new File( directory ).toURI(), sequenceDescription, viewRegistrations, viewInterestPoints, new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() );

if ( meta.applyAxis() )
Apply_Transformation.applyAxis( spimData );
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ public SpimData2 createDataset( final String xmlFileName )
//viewInterestPoints.createViewInterestPoints( sequenceDescription.getViewDescriptions() );

// finally create the SpimData itself based on the sequence description and the view registration
final SpimData2 spimData = new SpimData2( new File( directory ), sequenceDescription, viewRegistrations, viewInterestPoints, new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() );
final SpimData2 spimData = new SpimData2( new File( directory ).toURI(), sequenceDescription, viewRegistrations, viewInterestPoints, new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() );

if ( reader.applyAxis() )
Apply_Transformation.applyAxis( spimData );
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ public SpimData2 createDataset( final String xmlFileName )
//viewInterestPoints.createViewInterestPoints( sequenceDescription.getViewDescriptions() );

// finally create the SpimData itself based on the sequence description and the view registration
final SpimData2 spimData = new SpimData2( new File( directory ), sequenceDescription, viewRegistrations, viewInterestPoints, new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() );
final SpimData2 spimData = new SpimData2( new File( directory ).toURI(), sequenceDescription, viewRegistrations, viewInterestPoints, new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() );

return spimData;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ public SpimData2 createDataset( final String xmlFileName )
//viewInterestPoints.createViewInterestPoints( sequenceDescription.getViewDescriptions() );

// finally create the SpimData itself based on the sequence description and the view registration
final SpimData2 spimData = new SpimData2( new File( directory ), sequenceDescription, viewRegistrations, viewInterestPoints, new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() );
final SpimData2 spimData = new SpimData2( new File( directory ).toURI(), sequenceDescription, viewRegistrations, viewInterestPoints, new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() );

return spimData;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,17 +73,15 @@
import net.preibisch.mvrecon.process.deconvolution.iteration.mul.ComputeBlockMulThreadCPUFactory;
import net.preibisch.mvrecon.process.deconvolution.iteration.sequential.ComputeBlockSeqThreadCPUFactory;
import net.preibisch.mvrecon.process.deconvolution.iteration.sequential.ComputeBlockSeqThreadCUDAFactory;
import net.preibisch.mvrecon.process.export.AppendSpimData2HDF5;
import net.preibisch.mvrecon.process.downsampling.DownsampleTools;
import net.preibisch.mvrecon.process.export.DisplayImage;
import net.preibisch.mvrecon.process.export.ExportSpimData2HDF5;
import net.preibisch.mvrecon.process.export.ExportSpimData2TIFF;
import net.preibisch.mvrecon.process.export.ExportN5API;
import net.preibisch.mvrecon.process.export.ImgExport;
import net.preibisch.mvrecon.process.export.Save3dTIFF;
import net.preibisch.mvrecon.process.fusion.FusionTools;
import net.preibisch.mvrecon.process.fusion.FusionTools.ImgDataType;
import net.preibisch.mvrecon.process.fusion.intensityadjust.IntensityAdjustmentTools;
import net.preibisch.mvrecon.process.fusion.transformed.TransformVirtual;
import net.preibisch.mvrecon.process.downsampling.DownsampleTools;
import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group;

public class DeconvolutionGUI implements FusionExportInterface
Expand All @@ -97,9 +95,7 @@ public class DeconvolutionGUI implements FusionExportInterface

staticImgExportAlgorithms.add( new DisplayImage() );
staticImgExportAlgorithms.add( new Save3dTIFF( null ) );
staticImgExportAlgorithms.add( new ExportSpimData2TIFF() );
staticImgExportAlgorithms.add( new ExportSpimData2HDF5() );
staticImgExportAlgorithms.add( new AppendSpimData2HDF5() );
staticImgExportAlgorithms.add( new ExportN5API() );

imgExportDescriptions = new String[ staticImgExportAlgorithms.size() ];

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,14 +40,11 @@
import net.preibisch.mvrecon.fiji.spimdata.SpimData2;
import net.preibisch.mvrecon.fiji.spimdata.boundingbox.BoundingBox;
import net.preibisch.mvrecon.process.boundingbox.BoundingBoxTools;
import net.preibisch.mvrecon.process.export.AppendSpimData2HDF5;
import net.preibisch.mvrecon.process.downsampling.DownsampleTools;
import net.preibisch.mvrecon.process.export.DisplayImage;
import net.preibisch.mvrecon.process.export.ExportSpimData2HDF5;
import net.preibisch.mvrecon.process.export.ExportSpimData2TIFF;
import net.preibisch.mvrecon.process.export.ImgExport;
import net.preibisch.mvrecon.process.export.Save3dTIFF;
import net.preibisch.mvrecon.process.fusion.transformed.TransformVirtual;
import net.preibisch.mvrecon.process.downsampling.DownsampleTools;
import net.preibisch.mvrecon.process.interestpointregistration.TransformationTools;
import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group;

Expand Down Expand Up @@ -89,9 +86,6 @@ public class QualityGUI implements FusionExportInterface

staticImgExportAlgorithms.add( new DisplayImage() );
staticImgExportAlgorithms.add( new Save3dTIFF( null ) );
staticImgExportAlgorithms.add( new ExportSpimData2TIFF() );
staticImgExportAlgorithms.add( new ExportSpimData2HDF5() );
staticImgExportAlgorithms.add( new AppendSpimData2HDF5() );

imgExportDescriptions = new String[ staticImgExportAlgorithms.size() ];

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ public static SpimData2 reduceSpimData2( final SpimData2 oldSpimData, final List
//TODO: copy PSFs

final SpimData2 newSpimData = new SpimData2(
oldSpimData.getBasePath(),
oldSpimData.getBasePathURI(),
sequenceDescription,
viewRegistrations,
viewsInterestPoints,
Expand All @@ -288,7 +288,7 @@ public static Pair< SpimData2, List< String > > createXMLObject(
{
// Re-assemble a new SpimData object containing the subset of viewsetups and timepoints selected
final List< String > filesToCopy = new ArrayList< String >();
final SpimData2 newSpimData = Resave_TIFF.assemblePartialSpimData2( spimData, viewIds, params.seqFile.getParentFile(), filesToCopy );
final SpimData2 newSpimData = Resave_TIFF.assemblePartialSpimData2( spimData, viewIds, params.seqFile.getParentFile().toURI(), filesToCopy );
final ArrayList< Partition > partitions = Generic_Resave_HDF5.getPartitions( newSpimData, params );

final Hdf5ImageLoader hdf5Loader;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,18 +28,31 @@
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import java.util.stream.Collectors;

import org.janelia.saalfeldlab.n5.Compression;
import org.janelia.saalfeldlab.n5.DataType;
import org.janelia.saalfeldlab.n5.N5Writer;
import org.janelia.saalfeldlab.n5.universe.N5Factory;

import bdv.export.ExportMipmapInfo;
import bdv.export.ProgressWriter;
import bdv.export.n5.WriteSequenceToN5;
import bdv.img.n5.N5ImageLoader;
import ij.ImageJ;
import ij.plugin.PlugIn;
import mpicbg.spim.data.SpimData;
import mpicbg.spim.data.SpimDataException;
import mpicbg.spim.data.generic.AbstractSpimData;
import mpicbg.spim.data.sequence.TimePoint;
import mpicbg.spim.data.sequence.ViewId;
import mpicbg.spim.data.sequence.ViewSetup;
import net.imglib2.type.numeric.integer.UnsignedByteType;
import net.imglib2.type.numeric.integer.UnsignedShortType;
import net.imglib2.type.numeric.real.FloatType;
import net.preibisch.mvrecon.fiji.plugin.queryXML.LoadParseQueryXML;
import net.preibisch.mvrecon.fiji.spimdata.SpimData2;
import net.preibisch.mvrecon.fiji.spimdata.XmlIoSpimData2;
Expand Down Expand Up @@ -120,6 +133,7 @@ public static void resaveN5(
}
else if ( URITools.isS3( n5Params.n5URI ) || URITools.isGC( n5Params.n5URI ) )
{
final N5Writer = new N5Factory().openWriter( URITools.appendName( baseDir, baseN5 ) ); // cloud support, avoid dependency hell if it is a local file
// TODO: save to cloud
}

Expand All @@ -134,6 +148,44 @@ else if ( URITools.isS3( n5Params.n5URI ) || URITools.isGC( n5Params.n5URI ) )
progressWriter.out().println( new Date( System.currentTimeMillis() ) + ": Finished saving " + n5Params.n5URI + " and " + n5Params.xmlURI );
}

public static void createDatasets(
final N5Writer n5,
final AbstractSpimData<?> data,
final int[] blockSize,
final int[][] downsamplingFactors,
final Compression compression,
final Map<Integer, long[]> viewSetupIdToDimensions )
{
for ( final Entry<Integer, long[]> viewSetup : viewSetupIdToDimensions.entrySet() )
{
final Object type = data.getSequenceDescription().getImgLoader().getSetupImgLoader( viewSetup.getKey() ).getImageType();
final DataType dataType;

if ( UnsignedShortType.class.isInstance( type ) )
dataType = DataType.UINT16;
else if ( UnsignedByteType.class.isInstance( type ) )
dataType = DataType.UINT8;
else if ( FloatType.class.isInstance( type ) )
dataType = DataType.FLOAT32;
else
throw new RuntimeException("Unsupported pixel type: " + type.getClass().getCanonicalName() );

// TODO: ViewSetupId needs to contain: {"downsamplingFactors":[[1,1,1],[2,2,1]],"dataType":"uint16"}
final String n5Dataset = "setup" + viewSetup.getKey();

System.out.println( "Creating group: " + "'setup" + viewSetup.getKey() + "'" );

n5.createGroup( n5Dataset );

System.out.println( "setting attributes for '" + "setup" + viewSetup.getKey() + "'");

n5.setAttribute( n5Dataset, "downsamplingFactors", downsamplingFactors );
n5.setAttribute( n5Dataset, "dataType", dataType );
n5.setAttribute( n5Dataset, "blockSize", blockSize );
n5.setAttribute( n5Dataset, "dimensions", viewSetup.getValue() );
n5.setAttribute( n5Dataset, "compression", compression );
}
}

public static void main(String[] args)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ public static Pair< SpimData2, List< String > > createXMLObject( final SpimData2

// Re-assemble a new SpimData object containing the subset of viewsetups and timepoints selected
final List< String > filesToCopy = new ArrayList< String >();
final SpimData2 newSpimData = assemblePartialSpimData2( spimData, viewIds, new File( URITools.removeFilePrefix( params.getXMLPath() ) ).getParentFile(), filesToCopy );
final SpimData2 newSpimData = assemblePartialSpimData2( spimData, viewIds, new File( URITools.removeFilePrefix( params.getXMLPath() ) ).getParentFile().toURI(), filesToCopy );

final StackImgLoaderIJ imgLoader = new StackImgLoaderIJ(
new File( URITools.removeFilePrefix( params.getXMLPath() ) ).getParentFile(),
Expand Down Expand Up @@ -393,7 +393,7 @@ public static void copyFolder( final File src, final File dest, final List< Stri
* @param basePath - the base path
* @return new SpimData
*/
public static SpimData2 assemblePartialSpimData2( final SpimData2 spimData, final List< ? extends ViewId > viewIds, final File basePath, final List< String > filesToCopy )
public static SpimData2 assemblePartialSpimData2( final SpimData2 spimData, final List< ? extends ViewId > viewIds, final URI basePath, final List< String > filesToCopy )
{
final TimePoints timepoints;

Expand Down
13 changes: 9 additions & 4 deletions src/main/java/net/preibisch/mvrecon/fiji/spimdata/SpimData2.java
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,8 @@ public class SpimData2 extends SpimData
private IntensityAdjustments intensityAdjustments;
public boolean gridMoveRequested = false;

// TODO: only for compatibility with depending packages, remove at some point
/*
// only for compatibility with depending packages, remove at some point
public SpimData2(
final File basePath,
final SequenceDescription sequenceDescription,
Expand All @@ -85,9 +86,10 @@ public SpimData2(
{
this( basePath, sequenceDescription, viewRegistrations, viewsInterestPoints, boundingBoxes, pointSpreadFunctions, stitchingResults, new IntensityAdjustments() );
}
*/

public SpimData2(
final File basePath,
final URI basePathUri,
final SequenceDescription sequenceDescription,
final ViewRegistrations viewRegistrations,
final ViewInterestPoints viewsInterestPoints,
Expand All @@ -96,7 +98,10 @@ public SpimData2(
final StitchingResults stitchingResults,
final IntensityAdjustments intensityAdjustments )
{
super( basePath, sequenceDescription, viewRegistrations );
super( null, sequenceDescription, viewRegistrations );

// work-around for the super class not having a constructor for URI
this.setBasePathURI(basePathUri);

this.viewsInterestPoints = viewsInterestPoints;
this.boundingBoxes = boundingBoxes;
Expand Down Expand Up @@ -523,7 +528,7 @@ public static SpimData2 convert( final SpimData data1 )
final StitchingResults sr = new StitchingResults();
final IntensityAdjustments ia = new IntensityAdjustments();

return new SpimData2( data1.getBasePath(), s, vr, vipl, bb, psfs, sr, ia );
return new SpimData2( data1.getBasePathURI(), s, vr, vipl, bb, psfs, sr, ia );
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,40 +22,38 @@
*/
package net.preibisch.mvrecon.fiji.spimdata.imgloaders;

import java.io.File;
import java.util.Date;

import ij.ImagePlus;
import ij.ImageStack;
import ij.gui.GenericDialog;
import ij.io.Opener;
import ij.process.ImageProcessor;

import java.io.File;
import java.util.Date;

import mpicbg.spim.data.generic.sequence.AbstractSequenceDescription;
import mpicbg.spim.data.sequence.ViewId;
import net.imglib2.Cursor;
import net.imglib2.RandomAccessibleInterval;
import net.imglib2.converter.RealUnsignedShortConverter;
import net.imglib2.img.Img;
import net.imglib2.img.ImgFactory;
import net.imglib2.img.array.ArrayImg;
import net.imglib2.img.display.imagej.ImageJFunctions;
import net.imglib2.img.planar.PlanarImg;
import net.imglib2.type.NativeType;
import net.imglib2.type.numeric.RealType;
import net.imglib2.type.numeric.integer.UnsignedShortType;
import net.imglib2.type.numeric.real.FloatType;
import net.imglib2.view.Views;
import net.preibisch.legacy.io.IOFunctions;
import net.preibisch.mvrecon.fiji.datasetmanager.StackListImageJ;
import net.preibisch.mvrecon.fiji.plugin.resave.Generic_Resave_HDF5;
import net.preibisch.mvrecon.fiji.plugin.resave.Generic_Resave_HDF5.Parameters;
import net.preibisch.mvrecon.fiji.plugin.resave.Generic_Resave_HDF5.ParametersResaveHDF5;
import net.preibisch.mvrecon.fiji.plugin.util.GUIHelper;
import net.preibisch.mvrecon.process.export.ExportSpimData2HDF5;
import net.preibisch.mvrecon.process.fusion.FusionTools;
import util.ImgLib2Tools;

public class LegacyStackImgLoaderIJ extends LegacyStackImgLoader
{
Parameters params = null;
ParametersResaveHDF5 params = null;

public LegacyStackImgLoaderIJ(
final File path, final String fileNamePattern,
Expand Down Expand Up @@ -132,7 +130,7 @@ public RandomAccessibleInterval< UnsignedShortType > getImage( final ViewId view
if ( params == null )
return null;

final double[] minmax = ExportSpimData2HDF5.updateAndGetMinMax( ImageJFunctions.wrapFloat( imp ), params );
final double[] minmax = updateAndGetMinMax( ImageJFunctions.wrapFloat( imp ), params );
converter = new RealUnsignedShortConverter< FloatType >( minmax[ 0 ], minmax[ 1 ] );
}
else
Expand Down Expand Up @@ -246,7 +244,7 @@ public String toString()
return new StackListImageJ().getTitle() + ", ImgFactory=" + getImgFactory().getClass().getSimpleName();
}

protected static Parameters queryParameters()
protected static ParametersResaveHDF5 queryParameters()
{
final GenericDialog gd = new GenericDialog( "Opening 32bit TIFF as 16bit" );

Expand Down Expand Up @@ -288,6 +286,38 @@ protected static Parameters queryParameters()
Generic_Resave_HDF5.defaultMin = Generic_Resave_HDF5.defaultMax = Double.NaN;
}

return new Parameters( false, null, null, null, null, false, false, 0, 0, false, 0, Generic_Resave_HDF5.defaultConvertChoice, Generic_Resave_HDF5.defaultMin, Generic_Resave_HDF5.defaultMax );
return new ParametersResaveHDF5( false, null, null, null, null, false, false, 0, 0, false, 0, Generic_Resave_HDF5.defaultConvertChoice, Generic_Resave_HDF5.defaultMin, Generic_Resave_HDF5.defaultMax );
}

public static < T extends RealType< T > > double[] updateAndGetMinMax( final RandomAccessibleInterval< T > img, final ParametersResaveHDF5 params )
{
double min, max;

if ( params == null || params.getConvertChoice() == 0 || Double.isNaN( params.getMin() ) || Double.isNaN( params.getMin() ) )
{
final float[] minmax = FusionTools.minMax( img );
min = minmax[ 0 ];
max = minmax[ 1 ];

min = Math.max( 0, min - ((min+max)/2.0) * 0.1 );
max = max + ((min+max)/2.0) * 0.1;

if ( params != null )
{
params.setMin( min );
params.setMax( max );
}
}
else
{
min = params.getMin();
max = params.getMax();
}

IOFunctions.println( "Min intensity for 16bit conversion: " + min );
IOFunctions.println( "Max intensity for 16bit conversion: " + max );

return new double[]{ min, max };
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
package net.preibisch.mvrecon.headless.definedataset;

import java.io.File;
import java.net.URI;
import java.util.ArrayList;

import mpicbg.spim.data.registration.ViewRegistrations;
Expand Down Expand Up @@ -122,7 +123,7 @@ public static SpimData2 createDataset( final String cziFirstFile, final DefineDa

// finally create the SpimData itself based on the sequence description and the view registration
final SpimData2 spimData = new SpimData2(
new File( cziFile.getParent() ),
new File( cziFile.getParent() ).toURI(),
sequenceDescription,
viewRegistrations,
viewInterestPoints,
Expand Down
Loading

0 comments on commit 96ad3cc

Please sign in to comment.