diff --git a/src/main/java/net/preibisch/mvrecon/process/export/ExportN5API.java b/src/main/java/net/preibisch/mvrecon/process/export/ExportN5API.java index 110c5b91..b82f0382 100644 --- a/src/main/java/net/preibisch/mvrecon/process/export/ExportN5API.java +++ b/src/main/java/net/preibisch/mvrecon/process/export/ExportN5API.java @@ -24,6 +24,7 @@ import java.io.File; import java.io.IOException; +import java.net.URI; import java.util.Arrays; import java.util.Date; import java.util.HashMap; @@ -39,6 +40,8 @@ import org.janelia.saalfeldlab.n5.N5Writer; import org.janelia.saalfeldlab.n5.hdf5.N5HDF5Writer; import org.janelia.saalfeldlab.n5.imglib2.N5Utils; +import org.janelia.saalfeldlab.n5.universe.N5Factory; +import org.janelia.saalfeldlab.n5.universe.N5Factory.StorageFormat; import org.janelia.saalfeldlab.n5.zarr.N5ZarrWriter; import bdv.export.ProposeMipmaps; @@ -68,12 +71,13 @@ import net.preibisch.mvrecon.process.export.ExportTools.InstantiateViewSetupBigStitcher; import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group; import util.Grid; +import util.URITools; public class ExportN5API implements ImgExport { public enum StorageType { N5, ZARR, HDF5 } - public static String defaultPath = null; + public static String defaultPathURI = null; public static int defaultOption = 2; public static String defaultDatasetName = "fused"; public static String defaultBaseDataset = "/"; @@ -81,7 +85,7 @@ public enum StorageType { N5, ZARR, HDF5 } public static boolean defaultBDV = false; public static boolean defaultMultiRes = false; - public static String defaultXMLOut = null; + public static String defaultXMLOutURI = null; public static boolean defaultManuallyAssignViewId = false; public static int defaultTpId = 0; public static int defaultVSId = 0; @@ -103,12 +107,12 @@ public enum StorageType { N5, ZARR, HDF5 } public static int defaultBlocksizeFactorZ_H5 = 4; StorageType storageType = StorageType.values()[ defaultOption ]; - String path = defaultPath; + URI path = (defaultPathURI != null && defaultPathURI.trim().length() > 0 ) ? URI.create( defaultPathURI ) : null; String baseDataset = defaultBaseDataset; String datasetExtension = defaultDatasetExtension; boolean bdv = defaultBDV; - String xmlOut; + URI xmlOut; boolean manuallyAssignViewId = false; int tpId = defaultTpId; int vsId = defaultVSId; @@ -165,15 +169,25 @@ public & NativeType> boolean exportImage( try { if ( storageType == StorageType.N5 ) - driverVolumeWriter = new N5FSWriter(path); + { + if ( URITools.isFile( path ) ) + driverVolumeWriter = new N5FSWriter( URITools.removeFilePrefix( path ) ); + else + driverVolumeWriter = new N5Factory().openWriter( StorageFormat.N5, path ); // cloud support, avoid dependency hell if it is a local file + } else if ( storageType == StorageType.ZARR ) - driverVolumeWriter = new N5ZarrWriter(path); + { + if ( URITools.isFile( path ) ) + driverVolumeWriter = new N5ZarrWriter( URITools.removeFilePrefix( path ) ); + else + driverVolumeWriter = new N5Factory().openWriter( StorageFormat.ZARR, path ); // cloud support, avoid dependency hell if it is a local file + } else if ( storageType == StorageType.HDF5 ) { - final File dir = new File( path ).getParentFile(); + final File dir = new File( URITools.removeFilePrefix( path ) ).getParentFile(); if ( !dir.exists() ) dir.mkdirs(); - driverVolumeWriter = new N5HDF5Writer(path); + driverVolumeWriter = new N5HDF5Writer( URITools.removeFilePrefix( path ) ); } else throw new RuntimeException( "storageType " + storageType + " not supported." ); @@ -185,7 +199,7 @@ else if ( storageType == StorageType.HDF5 ) } } - final T type = Views.iterable( imgInterval ).firstElement().createVariable(); + final T type = imgInterval.firstElement().createVariable(); final DataType dataType; if ( UnsignedByteType.class.isInstance( type ) ) @@ -267,7 +281,7 @@ else if ( FloatType.class.isInstance( type ) ) { try { - // TODO: the first time the XML does not exist, thus instantiate is not called + // the first time the XML does not exist, thus instantiate is not called if ( !ExportTools.writeBDVMetaData( driverVolumeWriter, storageType, @@ -354,6 +368,7 @@ else if ( FloatType.class.isInstance( type ) ) // save multiresolution pyramid (s1 ... sN) // + // TODO: use code from N5ResaveTools if ( this.downsampling != null ) { long[] previousDim = bb.dimensionsAsLongArray(); @@ -602,40 +617,23 @@ else if ( storageType == StorageType.N5 ) // final GenericDialogPlus gd = new GenericDialogPlus( "Export " + name +" using N5-API" ); - if ( defaultPath == null || defaultPath.length() == 0 ) - { - defaultPath = fusion.getSpimData().getBasePath().getAbsolutePath(); - - if ( defaultPath.endsWith( "/." ) ) - defaultPath = defaultPath.substring( 0, defaultPath.length() - 1 ); - - if ( defaultPath.endsWith( "/./" ) ) - defaultPath = defaultPath.substring( 0, defaultPath.length() - 2 ); - - defaultPath = new File( defaultPath, defaultDatasetName + "/" + defaultDatasetName+ext ).getAbsolutePath(); - } + if ( defaultPathURI == null || defaultPathURI.toString().trim().length() == 0 ) + defaultPathURI = URITools.appendName( fusion.getSpimData().getBasePathURI(), defaultDatasetName + "/" + defaultDatasetName+ext ); if ( storageType == StorageType.HDF5 ) - PluginHelper.addSaveAsFileField( gd, name + "_file (should end with "+ext+")", defaultPath, 80 ); + PluginHelper.addSaveAsFileField( gd, name + "_file (local only, end with "+ext+")", defaultPathURI, 80 ); else - PluginHelper.addSaveAsDirectoryField( gd, name + "_dataset_path (should end with "+ext+")", defaultPath, 80 ); + PluginHelper.addSaveAsDirectoryField( gd, name + "_dataset_path (local or cloud, end with "+ext+")", defaultPathURI, 80 ); if ( bdv ) { - if ( defaultXMLOut == null ) - { - defaultXMLOut = fusion.getSpimData().getBasePath().getAbsolutePath(); + if ( defaultXMLOutURI == null ) + defaultXMLOutURI = URITools.appendName( fusion.getSpimData().getBasePathURI(), defaultDatasetName + "/dataset.xml" ); - if ( defaultXMLOut.endsWith( "/." ) ) - defaultXMLOut = defaultXMLOut.substring( 0, defaultXMLOut.length() - 1 ); - - if ( defaultXMLOut.endsWith( "/./" ) ) - defaultXMLOut = defaultXMLOut.substring( 0, defaultXMLOut.length() - 2 ); - - defaultXMLOut = new File( defaultXMLOut, defaultDatasetName + "/dataset.xml" ).toString(); - } - - PluginHelper.addSaveAsFileField( gd, "XML_output_file", defaultXMLOut, 80 ); + if ( storageType == StorageType.HDF5 ) + PluginHelper.addSaveAsFileField( gd, "XML_output_file (local)", defaultXMLOutURI, 80 ); + else + PluginHelper.addSaveAsFileField( gd, "XML_output_file (local or cloud)", defaultXMLOutURI, 80 ); if ( fusion.getFusionGroups().size() == 1 ) { @@ -703,11 +701,51 @@ else if ( storageType == StorageType.N5 ) if ( gd.wasCanceled() ) return false; - this.path = defaultPath = gd.getNextString().trim(); + try + { + this.path = new URI( defaultPathURI = gd.getNextString().trim() ); + } + catch ( Exception e ) + { + IOFunctions.println( "Could not create URI from provided path '" + defaultPathURI+ "'. Stopping." ); + return false; + } + + if ( !URITools.isKnownScheme( this.path ) ) + { + IOFunctions.println( "You provided an unkown scheme ('" + this.path+ "'). Stopping." ); + return false; + } + + if ( storageType == StorageType.HDF5 && !URITools.isFile( this.path )) + { + IOFunctions.println( "When storing as HDF5, only local paths are supported; you specified '" + this.path+ "', which appears to not be local. Stopping." ); + return false; + } if ( bdv ) { - this.xmlOut = defaultXMLOut = gd.getNextString(); + try + { + this.xmlOut = new URI( defaultXMLOutURI = gd.getNextString().trim() );//defaultXMLOut = gd.getNextString(); + } + catch ( Exception e ) + { + IOFunctions.println( "Could not create URI from provided path '" + defaultXMLOutURI+ "'. Stopping." ); + return false; + } + + if ( !URITools.isKnownScheme( this.xmlOut ) ) + { + IOFunctions.println( "You provided an unkown scheme ('" + this.xmlOut+ "'). Stopping." ); + return false; + } + + if ( storageType == StorageType.HDF5 && !URITools.isFile( this.xmlOut )) + { + IOFunctions.println( "When storing as HDF5, only local paths are supported; you specified '" + this.xmlOut+ "', which appears to not be local. Stopping." ); + return false; + } if ( fusion.getFusionGroups().size() == 1 ) { diff --git a/src/main/java/net/preibisch/mvrecon/process/export/ExportTools.java b/src/main/java/net/preibisch/mvrecon/process/export/ExportTools.java index b2e0f8fa..43aa3048 100644 --- a/src/main/java/net/preibisch/mvrecon/process/export/ExportTools.java +++ b/src/main/java/net/preibisch/mvrecon/process/export/ExportTools.java @@ -24,6 +24,7 @@ import java.io.File; import java.io.IOException; +import java.net.URI; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; @@ -68,8 +69,16 @@ import net.imglib2.util.Pair; import net.imglib2.util.ValuePair; import net.preibisch.legacy.io.IOFunctions; +import net.preibisch.mvrecon.fiji.spimdata.SpimData2; +import net.preibisch.mvrecon.fiji.spimdata.XmlIoSpimData2; +import net.preibisch.mvrecon.fiji.spimdata.boundingbox.BoundingBoxes; +import net.preibisch.mvrecon.fiji.spimdata.intensityadjust.IntensityAdjustments; +import net.preibisch.mvrecon.fiji.spimdata.interestpoints.ViewInterestPoints; +import net.preibisch.mvrecon.fiji.spimdata.pointspreadfunctions.PointSpreadFunctions; +import net.preibisch.mvrecon.fiji.spimdata.stitchingresults.StitchingResults; import net.preibisch.mvrecon.process.export.ExportN5API.StorageType; import net.preibisch.mvrecon.process.interestpointregistration.pairwise.constellation.grouping.Group; +import util.URITools; public class ExportTools { @@ -91,8 +100,8 @@ public static boolean writeBDVMetaData( final int[] blockSize, final int[][] downsamplings, final ViewId viewId, - final String n5Path, - final String xmlOutPathString, + final URI n5PathURI, + final URI xmlOutPathURI, final InstantiateViewSetup instantiateViewSetup ) throws SpimDataException, IOException { System.out.println( "Writing BDV-metadata ... " ); @@ -100,20 +109,14 @@ public static boolean writeBDVMetaData( //final String xmlPath = null; if ( StorageType.N5.equals(storageType) ) { - final File xmlOutPath; - if ( xmlOutPathString == null ) - xmlOutPath = new File( new File( n5Path ).getParent(), "dataset.xml" ); - else - xmlOutPath = new File( xmlOutPathString ); - - System.out.println( "XML: " + xmlOutPath.getAbsolutePath() ); + System.out.println( "XML: " + xmlOutPathURI ); final Pair exists = writeSpimData( viewId, storageType, dimensions, - n5Path, - xmlOutPath, + n5PathURI, + xmlOutPathURI, instantiateViewSetup ); if ( exists == null ) @@ -167,20 +170,14 @@ public static boolean writeBDVMetaData( } else if ( StorageType.HDF5.equals(storageType) ) { - final File xmlOutPath; - if ( xmlOutPathString == null ) - xmlOutPath = new File( new File( n5Path ).getParent(), "dataset.xml" ); - else - xmlOutPath = new File( xmlOutPathString ); - - System.out.println( "XML: " + xmlOutPath.getAbsolutePath() ); + System.out.println( "XML: " + xmlOutPathURI ); final Pair exists = writeSpimData( viewId, storageType, dimensions, - n5Path, - xmlOutPath, + n5PathURI, + xmlOutPathURI, instantiateViewSetup ); if ( exists == null ) @@ -248,20 +245,29 @@ public static Pair writeSpimData( final ViewId viewId, final StorageType storageType, final long[] dimensions, - final String n5Path, - final File xmlOutPath, + final URI n5PathURI, + final URI xmlOutPathURI, final InstantiateViewSetup instantiateViewSetup ) throws SpimDataException { - if ( xmlOutPath.exists() ) + SpimData2 existingSpimData; + + try + { + existingSpimData = new XmlIoSpimData2().load( xmlOutPathURI ); + } + catch (Exception e ) + { + existingSpimData = null; + } + + if ( existingSpimData != null ) //xmlOutPath.exists() ) { System.out.println( "XML exists. Parsing and adding."); - final XmlIoSpimData io = new XmlIoSpimData(); - final SpimData spimData = io.load( xmlOutPath.getAbsolutePath() ); boolean tpExists = false; boolean viewSetupExists = false; - for ( final ViewDescription viewId2 : spimData.getSequenceDescription().getViewDescriptions().values() ) + for ( final ViewDescription viewId2 : existingSpimData.getSequenceDescription().getViewDescriptions().values() ) { /* // uncommented this because if you make a second timepoint and do not add missing views, they all exist already @@ -288,37 +294,47 @@ public static Pair writeSpimData( } } - final List setups = new ArrayList<>( spimData.getSequenceDescription().getViewSetups().values() ); + final List setups = new ArrayList<>( existingSpimData.getSequenceDescription().getViewSetups().values() ); if ( !viewSetupExists ) setups.add( instantiateViewSetup.instantiate( viewId, tpExists, new FinalDimensions( dimensions ), setups ) ); final TimePoints timepoints; if ( !tpExists) { - final List tps = new ArrayList<>(spimData.getSequenceDescription().getTimePoints().getTimePointsOrdered()); + final List tps = new ArrayList<>(existingSpimData.getSequenceDescription().getTimePoints().getTimePointsOrdered()); tps.add(new TimePoint(viewId.getTimePointId())); timepoints = new TimePoints(tps); } else { - timepoints = spimData.getSequenceDescription().getTimePoints(); + timepoints = existingSpimData.getSequenceDescription().getTimePoints(); } - final Map registrations = spimData.getViewRegistrations().getViewRegistrations(); + final Map registrations = existingSpimData.getViewRegistrations().getViewRegistrations(); registrations.put( viewId, new ViewRegistration( viewId.getTimePointId(), viewId.getViewSetupId() ) ); final ViewRegistrations viewRegistrations = new ViewRegistrations( registrations ); final SequenceDescription sequence = new SequenceDescription(timepoints, setups, null); if ( StorageType.N5.equals(storageType) ) - sequence.setImgLoader( new N5ImageLoader( new File( n5Path ), sequence) ); + sequence.setImgLoader( new N5ImageLoader( n5PathURI, sequence) ); else if ( StorageType.HDF5.equals(storageType) ) - sequence.setImgLoader( new Hdf5ImageLoader( new File( n5Path ), null, sequence) ); + sequence.setImgLoader( new Hdf5ImageLoader( new File( URITools.removeFilePrefix( n5PathURI ) ), null, sequence) ); else throw new RuntimeException( storageType + " not supported." ); - final SpimData spimDataNew = new SpimData( xmlOutPath.getParentFile(), sequence, viewRegistrations); - new XmlIoSpimData().save( spimDataNew, xmlOutPath.getAbsolutePath() ); + final SpimData2 spimDataNew = + new SpimData2( + existingSpimData.getBasePathURI(), + sequence, + viewRegistrations, + existingSpimData.getViewInterestPoints(), + existingSpimData.getBoundingBoxes(), + existingSpimData.getPointSpreadFunctions(), + existingSpimData.getStitchingResults(), + existingSpimData.getIntensityAdjustments() ); + + new XmlIoSpimData2().save( spimDataNew, existingSpimData.getBasePathURI() ); return new ValuePair<>(tpExists, viewSetupExists); } @@ -349,15 +365,15 @@ else if ( StorageType.HDF5.equals(storageType) ) final SequenceDescription sequence = new SequenceDescription(timepoints, setups, null); if ( StorageType.N5.equals(storageType) ) - sequence.setImgLoader( new N5ImageLoader( new File( n5Path ), sequence) ); + sequence.setImgLoader( new N5ImageLoader( n5PathURI, sequence) ); else if ( StorageType.HDF5.equals(storageType) ) - sequence.setImgLoader( new Hdf5ImageLoader( new File( n5Path ), null, sequence) ); + sequence.setImgLoader( new Hdf5ImageLoader( new File( URITools.removeFilePrefix( n5PathURI ) ), null, sequence) ); else throw new RuntimeException( storageType + " not supported." ); - final SpimData spimData = new SpimData( xmlOutPath.getParentFile(), sequence, viewRegistrations); + final SpimData2 spimData = new SpimData2( xmlOutPathURI, sequence, viewRegistrations, new ViewInterestPoints(), new BoundingBoxes(), new PointSpreadFunctions(), new StitchingResults(), new IntensityAdjustments() ); - new XmlIoSpimData().save( spimData, xmlOutPath.getAbsolutePath() ); + new XmlIoSpimData2().save( spimData, xmlOutPathURI ); return new ValuePair<>(false, false); }