Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;

import mpicbg.spim.data.sequence.VoxelDimensions;
import org.bigdataviewer.n5.N5CloudImageLoader;
import org.janelia.saalfeldlab.n5.Compression;
import org.janelia.saalfeldlab.n5.DataType;
Expand Down Expand Up @@ -188,13 +189,15 @@ else if ( n5Params.format == StorageFormat.HDF5 )
}
else
{
VoxelDimensions vx = data.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize();
// 5d OME-ZARR with dimension=1 in c and t
mrInfo = N5ApiTools.setupBdvDatasetsOMEZARR(
n5Writer,
viewId,
dataTypes.get( viewId.getViewSetupId() ),
dimensions.get( viewId.getViewSetupId() ),
//data.getSequenceDescription().getViewDescription( viewId ).getViewSetup().getVoxelSize().dimensionsAsDoubleArray(),
vx.dimensionsAsDoubleArray(), // resolutionS0
vx.unit(),
compression,
blockSize,
downsamplings);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,7 @@
import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.coordinateTransformations.ScaleCoordinateTransformation;
import org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.coordinateTransformations.TranslationCoordinateTransformation;

import mpicbg.spim.data.sequence.VoxelDimensions;
import net.imglib2.realtransform.AffineTransform3D;
import net.preibisch.mvrecon.process.interestpointregistration.TransformationTools;
import util.URITools;

public class OMEZarrAttibutes
Expand All @@ -61,12 +59,6 @@ public static OmeNgffMultiScaleMetadata[] createOMEZarrMetadata(
final Function<Integer, String> levelToName,
final Function<Integer, AffineTransform3D > levelToMipmapTransform )
{
// TODO: make sure the unit is supported by OME-ZARR, if not replace it because otherwise readers will fail
// TODO: e.g. um -> micrometer
// TODO: etc.
// TODO: can you find out what the correct unit for 'unit unknown' is, because that is what I would replace it with, otherwise micrometer
// TOOD: then please also change in TransformationTools.computeCalibration

final OmeNgffMultiScaleMetadata[] meta = new OmeNgffMultiScaleMetadata[ 1 ];

// dataset name and co
Expand All @@ -85,9 +77,10 @@ public static OmeNgffMultiScaleMetadata[] createOMEZarrMetadata(
if ( n >= 4 )
axes[ index++ ] = new Axis( "channel", "c", null );

axes[ index++ ] = new Axis( "space", "z", unitXYZ );
axes[ index++ ] = new Axis( "space", "y", unitXYZ );
axes[ index++ ] = new Axis( "space", "x", unitXYZ );
String unit = adaptSpatialUnit( unitXYZ );
axes[ index ] = new Axis( "space", "z", unit );
axes[ index + 1 ] = new Axis( "space", "y", unit );
axes[ index + 2 ] = new Axis( "space", "x", unit );

// multiresolution-pyramid
// TODO: seem to be in XYZCT order (but in the file it seems reversed)
Expand All @@ -106,8 +99,8 @@ public static OmeNgffMultiScaleMetadata[] createOMEZarrMetadata(

for ( int d = 0; d < 3; ++d )
{
translation[ d ] = m.getTranslation()[ d ];
scale[ d ] = resolutionS0[ d ] * m.get( d, d );
translation[ d ] = resolutionS0[d] * m.getTranslation()[ d ];
scale[ d ] = resolutionS0[d] * m.get( d, d );
}

// if 4d and 5d, add 1's for C and T
Expand Down Expand Up @@ -137,27 +130,74 @@ public static OmeNgffMultiScaleMetadata[] createOMEZarrMetadata(
return meta;
}


// Note: TransformationTools.computeAverageCalibration does this reasonably correct
/*
public static double[] getResolutionS0( final VoxelDimensions vx, final double anisoF, final double downsamplingF )
public static double[] getResolutionS0( final double[] cal, final double anisoF, final double downsamplingF )
{
final double[] resolutionS0 = vx.dimensionsAsDoubleArray();
double[] resolutionS0 = Arrays.copyOf( cal, cal.length );

// not preserving anisotropy
if ( Double.isNaN( anisoF ) )
resolutionS0[ 2 ] = resolutionS0[ 0 ];
if ( !Double.isNaN( anisoF ) ) {
// preserving anisotropy
resolutionS0[2] = cal[2] * anisoF;
}

// downsampling
if ( !Double.isNaN( downsamplingF ) )
Arrays.setAll( resolutionS0, d -> resolutionS0[ d ] * downsamplingF );

// TODO: this is a hack so the export downsampling pyramid is working
Arrays.setAll( resolutionS0, d -> 1 );

return resolutionS0;
}
*/

/**
* Adapt various space unit namings to the units supported by Neuroglancer.
* OME NGFF spec does not have any restrictions on units but Neuroglancer only supports the ones that end in meter or the US customary units.
* @param unit
* @return
*/
private static String adaptSpatialUnit(String unit)
{
if ( unit == null )
return "micrometer";

switch ( unit.toLowerCase() ) {
case "angstrom":
case "ångström":
case "ångströms":
return "angstrom";
case "nm":
case "nanometers":
case "nanometer":
return "nanometer";
case "mm":
case "millimeters":
case "millimeter":
return "millimeter";
case "m":
case "meters":
case "meter":
return "meter";
case "km":
case "kilometer":
case "kilometers":
return "kilometer";
case "inch":
case "inches":
return "inch";
case "foot":
case "feet":
return "foot";
case "yard":
case "yards":
return "yard";
case "mile":
case "miles":
return "mile";
case "um":
case "μm":
case "microns":
case "micron":
default:
return "micrometer";
}
}

public static void loadOMEZarr( final N5Reader n5, final String dataset )
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -245,23 +245,21 @@ else if ( storageType == StorageFormat.N5 || storageType == StorageFormat.ZARR )
final Function<Integer, AffineTransform3D> levelToMipmapTransform =
(level) -> MipmapTransforms.getMipmapTransformDefault( mrInfoZarr[level].absoluteDownsamplingDouble() );

IOFunctions.println( "Resolution of level 0: " + Util.printCoordinates( cal ) + " " + unit ); //vx.unit() might not be OME-ZARR compatible
double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( cal, anisoF, downsamplingF );

IOFunctions.println( "Calibration: " + Util.printCoordinates( cal ) + " micrometer; resolution at S0: " + Util.printCoordinates( resolutionS0 ) + " " + unit);

// create metadata
final OmeNgffMultiScaleMetadata[] meta = OMEZarrAttibutes.createOMEZarrMetadata(
5, // int n
"/", // String name, I also saw "/"
cal, // double[] resolutionS0,
resolutionS0, // double[] resolutionS0,
unit, //"micrometer", //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer
mrInfoZarr.length, // int numResolutionLevels,
levelToName,
levelToMipmapTransform );

// save metadata

//org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMetadata
// for this to work you need to register an adapter in the N5Factory class
// final GsonBuilder builder = new GsonBuilder().registerTypeAdapter( CoordinateTransformation.class, new CoordinateTransformationAdapter() );
driverVolumeWriter.setAttribute( "/", "multiscales", meta );
}
}
Expand Down Expand Up @@ -362,14 +360,16 @@ else if ( storageType == StorageFormat.ZARR ) // OME-Zarr export
final Function<Integer, AffineTransform3D> levelToMipmapTransform =
(level) -> MipmapTransforms.getMipmapTransformDefault( mrInfo[level].absoluteDownsamplingDouble() );

IOFunctions.println( "Resolution of level 0: " + Util.printCoordinates( cal ) + " micrometer" );
double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( cal, anisoF, downsamplingF );

IOFunctions.println( "Calibration: " + Util.printCoordinates( cal ) + " micrometer; resolution at S0: " + Util.printCoordinates( resolutionS0 ) + " " + unit);

// create metadata
final OmeNgffMultiScaleMetadata[] meta = OMEZarrAttibutes.createOMEZarrMetadata(
3, // int n
omeZarrSubContainer, // String name, I also saw "/"
cal, // double[] resolutionS0,
unit, //"micrometer", //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer
resolutionS0, // double[] resolutionS0,
unit, // might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer
mrInfo.length, // int numResolutionLevels,
(level) -> "/" + level,
levelToMipmapTransform );
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -320,13 +320,13 @@ public static Pair< double[], String > computeAverageCalibration(
else if ( unit.equalsIgnoreCase( transformedCal.getB() ) )
unit = transformedCal.getB();
else
unit = "inconsisistent";
unit = "inconsistent";

System.out.println( "Calibration (transformed): " + Util.printCoordinates( transformedCal.getA() ) + " " + transformedCal.getB() );
}

if ( count == 0 )
return new ValuePair<>( new double[] { 1, 1, 1 }, "px" );
return new ValuePair<>( new double[] { 1, 1, 1 }, "micrometer" );
else
return new ValuePair<>( new double[] { avgCalX / (double)count, avgCalY / (double)count, avgCalZ / (double)count }, unit );
}
Expand Down
17 changes: 4 additions & 13 deletions src/main/java/net/preibisch/mvrecon/process/n5api/N5ApiTools.java
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.function.BiFunction;
Expand All @@ -42,7 +41,6 @@
import org.janelia.saalfeldlab.n5.Compression;
import org.janelia.saalfeldlab.n5.DataType;
import org.janelia.saalfeldlab.n5.DatasetAttributes;
import org.janelia.saalfeldlab.n5.GzipCompression;
import org.janelia.saalfeldlab.n5.N5Writer;
import org.janelia.saalfeldlab.n5.RawCompression;
import org.janelia.saalfeldlab.n5.imglib2.N5Utils;
Expand Down Expand Up @@ -382,7 +380,8 @@ public static MultiResolutionLevelInfo[] setupBdvDatasetsOMEZARR(
final ViewId viewId,
final DataType dataType,
final long[] dimensions,
//final double[] resolutionS0, // TODO: this is a hack (uses 1,1,1) so the export downsampling pyramid is working
final double[] resolutionS0,
final String resolutionUnit,
final Compression compression,
final int[] blockSize,
int[][] downsamplings )
Expand Down Expand Up @@ -414,25 +413,17 @@ public static MultiResolutionLevelInfo[] setupBdvDatasetsOMEZARR(
final Function<Integer, AffineTransform3D> levelToMipmapTransform =
(level) -> MipmapTransforms.getMipmapTransformDefault( mrInfo[level].absoluteDownsamplingDouble() );

// extract the resolution of the s0 export
//final VoxelDimensions vx = fusionGroup.iterator().next().getViewSetup().getVoxelSize();
//final double[] resolutionS0 = OMEZarrAttibutes.getResolutionS0( vx, anisoF, downsamplingF );

// create metadata
final OmeNgffMultiScaleMetadata[] meta = OMEZarrAttibutes.createOMEZarrMetadata(
5, // int n
"/", // String name, I also saw "/"
new double[] { 1, 1, 1 }, //resolutionS0, // double[] resolutionS0,
"micrometer", //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer
resolutionS0, // double[] resolutionS0,
resolutionUnit, //vx.unit() might not be OME-ZARR compatible // String unitXYZ, // e.g micrometer
mrInfo.length, // int numResolutionLevels,
levelToName,
levelToMipmapTransform );

// save metadata

//org.janelia.saalfeldlab.n5.universe.metadata.ome.ngff.v04.OmeNgffMetadata
// for this to work you need to register an adapter in the N5Factory class
// final GsonBuilder builder = new GsonBuilder().registerTypeAdapter( CoordinateTransformation.class, new CoordinateTransformationAdapter() );
driverVolumeWriter.setAttribute( baseDataset, "multiscales", meta );

return mrInfo;
Expand Down