From e889f2bdbbf76e8a56bd540c87bb6654393fa018 Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Thu, 24 Jul 2025 15:35:29 -0400 Subject: [PATCH 01/19] Remove support for geff versions < 0.2 --- src/main/java/org/mastodon/geff/Geff.java | 3 +- src/main/java/org/mastodon/geff/GeffEdge.java | 83 +------- .../java/org/mastodon/geff/GeffMetadata.java | 177 ++---------------- src/main/java/org/mastodon/geff/GeffNode.java | 116 +----------- 4 files changed, 34 insertions(+), 345 deletions(-) diff --git a/src/main/java/org/mastodon/geff/Geff.java b/src/main/java/org/mastodon/geff/Geff.java index f8ea61f..860f79b 100644 --- a/src/main/java/org/mastodon/geff/Geff.java +++ b/src/main/java/org/mastodon/geff/Geff.java @@ -42,8 +42,7 @@ public class Geff { // This class serves as a placeholder for the Geff package. // It can be used to define package-level constants or utility methods in - // the - // future. + // the future. private List< GeffNode > nodes = new ArrayList<>(); private List< GeffEdge > edges = new ArrayList<>(); diff --git a/src/main/java/org/mastodon/geff/GeffEdge.java b/src/main/java/org/mastodon/geff/GeffEdge.java index 5bfdb8b..fd7c0c2 100644 --- a/src/main/java/org/mastodon/geff/GeffEdge.java +++ b/src/main/java/org/mastodon/geff/GeffEdge.java @@ -6,13 +6,13 @@ * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -220,63 +220,7 @@ public static List< GeffEdge > readFromZarrWithChunks( String zarrPath, String g System.out.println( "Reading edges from Zarr path: " + zarrPath + " with Geff version: " + geffVersion ); - if ( geffVersion.startsWith( "0.1" ) ) - { - - int[][] edgeIds = ZarrUtils.readChunkedIntMatrix( edgesGroup, "ids", "edge IDs" ); - - double[] distances = new double[ 0 ]; - double[] scores = new double[ 0 ]; - - if ( edgesGroup.getGroupKeys().contains( "attrs" ) ) - { - - // Read attributes - ZarrGroup attrsGroup = edgesGroup.openSubGroup( "attrs" ); - - // Read distances from chunks - try - { - distances = ZarrUtils.readChunkedDoubleArray( attrsGroup, "distance/values", "distances" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read distances: " + e.getMessage() + " skipping..." ); - } - - // Read scores from chunks - try - { - scores = ZarrUtils.readChunkedDoubleArray( attrsGroup, "score/values", "scores" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read scores: " + e.getMessage() + " skipping..." ); - } - } - - // 2D array case: each row is [source, target] - for ( int i = 0; i < edgeIds.length; i++ ) - { - if ( edgeIds[ i ].length == 2 ) - { - GeffEdge edge = GeffEdge.builder() - .setId( i ) - .setSourceNodeId( edgeIds[ i ][ 0 ] ) - .setTargetNodeId( edgeIds[ i ][ 1 ] ) - .setDistance( i < distances.length ? distances[ i ] : DEFAULT_DISTANCE ) - .setScore( i < scores.length ? scores[ i ] : DEFAULT_SCORE ) - .build(); - edges.add( edge ); - } - else - { - System.err.println( "Unexpected edge format at index " + i + ": " + edgeIds[ i ].length - + " elements. Expected 2 (source, target)." ); - } - } - } - else if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) + if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) { int[][] edgeIds = ZarrUtils.readChunkedIntMatrix( edgesGroup, "ids", "edge IDs" ); @@ -381,26 +325,7 @@ public static void writeToZarr( List< GeffEdge > edges, String zarrPath, int chu System.out.println( "Writing " + edges.size() + " edges to Zarr path: " + zarrPath + " with chunk size: " + chunks ); - if ( geffVersion.startsWith( "0.1" ) ) - { - // Create attrs subgroup for 0.1 versions - - // Create the main edges group - ZarrGroup rootGroup = ZarrGroup.create( zarrPath ); - - ZarrGroup edgesGroup = rootGroup.createSubGroup( "edges" ); - - writeChunkedEdgeIds( edgesGroup, edges, chunks ); - - ZarrGroup attrsGroup = edgesGroup.createSubGroup( "attrs" ); - - // Write distances - ZarrUtils.writeChunkedDoubleAttribute( edges, attrsGroup, "distance", chunks, GeffEdge::getDistance ); - - // Write scores - ZarrUtils.writeChunkedDoubleAttribute( edges, attrsGroup, "score", chunks, GeffEdge::getScore ); - } - else if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) + if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) { // Create props subgroup for 0.3 version diff --git a/src/main/java/org/mastodon/geff/GeffMetadata.java b/src/main/java/org/mastodon/geff/GeffMetadata.java index f829520..041a7f4 100644 --- a/src/main/java/org/mastodon/geff/GeffMetadata.java +++ b/src/main/java/org/mastodon/geff/GeffMetadata.java @@ -6,13 +6,13 @@ * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -42,7 +42,7 @@ /** * Represents metadata for a Geff (Graph Exchange Format for Features) dataset. * This class handles reading and writing metadata from/to Zarr format. - * + * * This is the Java equivalent of the Python GeffMetadata schema from: * https://github.com/live-image-tracking-tools/geff/blob/main/src/geff/metadata_schema.py */ @@ -50,13 +50,13 @@ public class GeffMetadata { // Supported GEFF versions - public static final List< String > SUPPORTED_VERSIONS = Arrays.asList( "0.0", "0.1", "0.2", "0.3" ); + public static final List< String > SUPPORTED_VERSIONS = Arrays.asList( "0.2", "0.3" ); // Pattern to match major.minor versions, allowing for patch versions and // development versions // Examples: 0.1.1, 0.2.2.dev20+g611e7a2.d20250719, 0.2.0-alpha.1, etc. private static final Pattern SUPPORTED_VERSIONS_PATTERN = Pattern - .compile( "(0\\.0|0\\.1|0\\.2|0\\.3)(?:\\.\\d+)?(?:\\.[a-zA-Z0-9]+(?:\\d+)?)?(?:[+\\-][a-zA-Z0-9\\.]+)*" ); + .compile( "(0\\.2|0\\.3)(?:\\.\\d+)?(?:\\.[a-zA-Z0-9]+(?:\\d+)?)?(?:[+\\-][a-zA-Z0-9\\.]+)*" ); // Metadata attributes - matching the Python schema private String geffVersion; @@ -173,13 +173,7 @@ public static GeffMetadata readFromZarr( ZarrGroup group ) throws IOException // Check if geff_version exists in zattrs String geffVersion = null; Map< ?, ? > attrs = null; - if ( group.getAttributes().containsKey( "geff_version" ) ) - { - geffVersion = ( String ) group.getAttributes().get( "geff_version" ); - System.out.println( "Found geff_version in " + group + ": " + geffVersion ); - attrs = group.getAttributes(); - } - else if ( group.getAttributes().containsKey( "geff" ) ) + if ( group.getAttributes().containsKey( "geff" ) ) { System.out.println( "Found geff entry in " + group ); Object geffRootObj = group.getAttributes().get( "geff" ); @@ -206,11 +200,13 @@ else if ( group.getAttributes().containsKey( "geff" ) ) } } } - if ( geffVersion == null ) - { throw new IllegalArgumentException( - "No geff_version found in " + group + ". This may indicate the path is incorrect or " + - "zarr group name is not specified (e.g. /dataset.zarr/tracks/ instead of " + - "/dataset.zarr/)." ); } + if ( geffVersion == null ) + { + throw new IllegalArgumentException( + "No geff_version found in " + group + ". This may indicate the path is incorrect or " + + "zarr group name is not specified (e.g. /dataset.zarr/tracks/ instead of " + + "/dataset.zarr/)." ); + } GeffMetadata metadata = new GeffMetadata(); @@ -218,80 +214,7 @@ else if ( group.getAttributes().containsKey( "geff" ) ) metadata.setGeffVersion( geffVersion ); - if ( geffVersion.startsWith( "0.1" ) ) - { - Object directedObj = attrs.get( "directed" ); - if ( directedObj instanceof Boolean ) - { - metadata.setDirected( ( Boolean ) directedObj ); - } - else if ( directedObj instanceof String ) - { - metadata.setDirected( Boolean.parseBoolean( ( String ) directedObj ) ); - } - - // Read optional fields - double[] roiMins = null; - double[] roiMaxs = null; - String[] axisNames = null; - String[] axisUnits = null; - - int ndim = 0; - Object roiMinObj = attrs.get( "roi_min" ); - if ( roiMinObj != null ) - { - roiMins = convertToDoubleArray( roiMinObj ); - ndim = roiMins.length; - } - - Object roiMaxObj = attrs.get( "roi_max" ); - if ( roiMaxObj != null ) - { - roiMaxs = convertToDoubleArray( roiMaxObj ); - if ( roiMaxs.length != ndim ) - { throw new IllegalArgumentException( - "Roi max dimensions " + roiMaxs.length + " do not match roi min dimensions " + - roiMins.length ); } - } - - Object axisNamesObj = attrs.get( "axis_names" ); - if ( axisNamesObj != null ) - { - axisNames = convertToStringArray( axisNamesObj ); - if ( axisNames.length != ndim ) - { throw new IllegalArgumentException( - "Axis names dimensions " + axisNames.length + " do not match roi min dimensions " + - roiMins.length ); } - } - - Object axisUnitsObj = attrs.get( "axis_units" ); - if ( axisUnitsObj != null ) - { - axisUnits = convertToStringArray( axisUnitsObj ); - if ( axisUnits.length != ndim ) - { throw new IllegalArgumentException( - "Axis units dimensions " + axisUnits.length + " do not match roi min dimensions " + - roiMins.length ); } - } - - String positionAttr = ( String ) attrs.get( "position_attr" ); - if ( ndim != 0 && !positionAttr.equals( "position" ) ) - { throw new IllegalArgumentException( "Invalid position attribute: " + positionAttr ); } - - GeffAxis[] axes = new GeffAxis[ ndim ]; - for ( int i = 0; i < ndim; i++ ) - { - GeffAxis axis = new GeffAxis(); - axis.setName( axisNames != null ? axisNames[ i ] : null ); - axis.setType( axisNames[ i ] == GeffAxis.NAME_TIME ? GeffAxis.TYPE_TIME : GeffAxis.TYPE_SPACE ); - axis.setUnit( axisUnits != null ? axisUnits[ i ] : null ); - axis.setMin( roiMins != null ? roiMins[ i ] : null ); - axis.setMax( roiMaxs != null ? roiMaxs[ i ] : null ); - axes[ i ] = axis; - } - metadata.setGeffAxes( axes ); - } - else if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) + if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) { // For 0.2 and 0.3, we expect a different structure metadata.setDirected( ( Boolean ) attrs.get( "directed" ) ); @@ -348,74 +271,12 @@ public void writeToZarr( ZarrGroup group ) throws IOException // Validate before writing validate(); - if ( geffVersion == null ) - { throw new IllegalArgumentException( "Geff version must be set before writing metadata." ); } - - if ( geffVersion.startsWith( "0.1" ) ) - { - // Create a TreeMap to ensure attributes are ordered alphabetically - // by key - java.util.Map< String, Object > attrs = new java.util.TreeMap<>(); - // Write required fields - attrs.put( "geff_version", geffVersion ); - attrs.put( "directed", directed ); - - if ( geffAxes != null ) - { - attrs.put( "position_attr", "position" ); - double[] roiMins = new double[ geffAxes.length ]; - double[] roiMaxs = new double[ geffAxes.length ]; - String[] axisNames = new String[ geffAxes.length ]; - String[] axisTypes = new String[ geffAxes.length ]; - String[] axisUnits = new String[ geffAxes.length ]; - for ( int i = 0; i < geffAxes.length; i++ ) - { - GeffAxis axis = geffAxes[ i ]; - if ( axis.getName() != null ) - { - axisNames[ i ] = axis.getName(); - } - if ( axis.getType() != null ) - { - axisTypes[ i ] = axis.getType(); - } - if ( axis.getUnit() != null ) - { - axisUnits[ i ] = axis.getUnit(); - } - if ( axis.getMin() != null ) - { - roiMins[ i ] = axis.getMin(); - } - if ( axis.getMax() != null ) - { - roiMaxs[ i ] = axis.getMax(); - } - } + if ( geffVersion == null ) + { + throw new IllegalArgumentException( "Geff version must be set before writing metadata." ); + } - // Write optional fields - if ( roiMins != null ) - { - attrs.put( "roi_min", roiMins ); - } - if ( roiMaxs != null ) - { - attrs.put( "roi_max", roiMaxs ); - } - if ( axisNames != null ) - { - attrs.put( "axis_names", axisNames ); - } - // Always write axis_units, even if null - attrs.put( "axis_units", axisUnits ); - } - - // Write the attributes to the Zarr group - group.writeAttributes( attrs ); - - System.out.println( "Written metadata attributes: " + attrs.keySet() ); - } - else if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) + if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) { java.util.Map< String, Object > rootAttrs = new java.util.TreeMap<>(); java.util.Map< String, Object > attrs = new java.util.TreeMap<>(); diff --git a/src/main/java/org/mastodon/geff/GeffNode.java b/src/main/java/org/mastodon/geff/GeffNode.java index d833387..c8b5a85 100644 --- a/src/main/java/org/mastodon/geff/GeffNode.java +++ b/src/main/java/org/mastodon/geff/GeffNode.java @@ -6,13 +6,13 @@ * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -225,7 +225,7 @@ public void setCovariance3d( double[] covariance3d ) /** * Returns the position of the node as a 3D array. - * + * * @deprecated Use {@link #getX()}, {@link #getY()}, {@link #getZ()} * instead. * @return The position of the node as a 3D array. @@ -238,7 +238,7 @@ public double[] getPosition() /** * Set the position of the node. - * + * * @deprecated Use {@link #setX(double)}, {@link #setY(double)}, * {@link #setZ(double)} instead. * @param position @@ -267,7 +267,7 @@ else if ( position != null && position.length == 3 ) /** * Builder for creating GeffNode instance. - * + * * @return A new Builder instance for GeffNode. */ public static Builder builder() @@ -386,7 +386,7 @@ public GeffNode build() /** * Read nodes from Zarr format with default version and chunked structure - * + * * @param zarrPath * The path to the Zarr directory containing nodes. * @return List of GeffNode objects read from the Zarr path. @@ -398,7 +398,7 @@ public static List< GeffNode > readFromZarr( String zarrPath ) throws IOExceptio /** * Read nodes from Zarr format with specified version and chunked structure - * + * * @param zarrPath * The path to the Zarr directory containing nodes. * @param geffVersion @@ -414,7 +414,7 @@ public static List< GeffNode > readFromZarr( String zarrPath, String geffVersion /** * Read nodes from Zarr format with chunked structure. This method handles * different Geff versions and reads node attributes accordingly. - * + * * @param zarrPath * The path to the Zarr directory containing nodes. * @param geffVersion @@ -431,63 +431,7 @@ public static List< GeffNode > readFromZarrWithChunks( String zarrPath, String g System.out.println( "Reading nodes from Zarr path: " + zarrPath + " with Geff version: " + geffVersion ); - if ( geffVersion.startsWith( "0.1" ) ) - { - - // Read node IDs from chunks - int[] nodeIds = ZarrUtils.readChunkedIntArray( nodesGroup, "ids", "node IDs" ); - - // Read attributes - ZarrGroup attrsGroup = nodesGroup.openSubGroup( "attrs" ); - - // Read time points from chunks - int[] timepoints = ZarrUtils.readChunkedIntArray( attrsGroup, "t/values", "timepoints" ); - - // Read X coordinates from chunks - double[] xCoords = ZarrUtils.readChunkedDoubleArray( attrsGroup, "x/values", "X coordinates" ); - - // Read Y coordinates from chunks - double[] yCoords = ZarrUtils.readChunkedDoubleArray( attrsGroup, "y/values", "Y coordinates" ); - - // Read segment IDs from chunks - int[] segmentIds = new int[ 0 ]; - try - { - segmentIds = ZarrUtils.readChunkedIntArray( attrsGroup, "seg_id/values", "segment IDs" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read segment IDs: " + e.getMessage() + " skipping..." ); - } - - // Read positions if available from chunks - double[][] positions = new double[ 0 ][]; - try - { - positions = ZarrUtils.readChunkedDoubleMatrix( attrsGroup, "position/values", "positions" ); - } - catch ( Exception e ) - { - // Position array might not exist or be in different format - System.out.println( "Warning: Could not read position array: " + e.getMessage() ); - } - - // Create node objects - for ( int i = 0; i < nodeIds.length; i++ ) - { - GeffNode node = new Builder() - .id( nodeIds[ i ] ) - .timepoint( i < timepoints.length ? timepoints[ i ] : -1 ) - .x( i < xCoords.length ? xCoords[ i ] : Double.NaN ) - .y( i < yCoords.length ? yCoords[ i ] : Double.NaN ) - .z( i < positions.length ? positions[ i ][ 0 ] : Double.NaN ) - .segmentId( i < segmentIds.length ? segmentIds[ i ] : -1 ) - .build(); - - nodes.add( node ); - } - } - else if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) + if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) { // Read node IDs from chunks int[] nodeIds = ZarrUtils.readChunkedIntArray( nodesGroup, "ids", "node IDs" ); @@ -641,47 +585,7 @@ public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chu "Writing " + nodes.size() + " nodes to Zarr path: " + zarrPath + " with chunk size: " + chunkSize + " to Geff version: " + geffVersion ); - if ( geffVersion.startsWith( "0.1" ) ) - { - // Create the main nodes group - ZarrGroup rootGroup = ZarrGroup.create( zarrPath ); - - // Create the main nodes group - ZarrGroup nodesGroup = rootGroup.createSubGroup( "nodes" ); - - // Create attrs subgroup for chunked storage - ZarrGroup attrsGroup = nodesGroup.createSubGroup( "attrs" ); - - // Check if any nodes have 3D positions - boolean hasPositions = nodes.stream() - .anyMatch( node -> node.getPosition() != null && node.getPosition().length >= 3 ); - - System.out.println( "Node analysis:" ); - System.out.println( "- Has 3D positions: " + hasPositions ); - System.out.println( "- Format: Chunked arrays with separate values subgroups" ); - - // Write node IDs in chunks - writeChunkedNodeIds( nodes, nodesGroup, chunkSize ); - - // Write timepoints in chunks - ZarrUtils.writeChunkedIntAttribute( nodes, attrsGroup, "t", chunkSize, GeffNode::getT ); - - // Write X coordinates in chunks - ZarrUtils.writeChunkedDoubleAttribute( nodes, attrsGroup, "x", chunkSize, GeffNode::getX ); - - // Write Y coordinates in chunks - ZarrUtils.writeChunkedDoubleAttribute( nodes, attrsGroup, "y", chunkSize, GeffNode::getY ); - - // Write segment IDs in chunks - ZarrUtils.writeChunkedIntAttribute( nodes, attrsGroup, "seg_id", chunkSize, GeffNode::getSegmentId ); - - // Write positions if available in chunks - if ( hasPositions ) - { - ZarrUtils.writeChunkedDoubleMatrix( nodes, attrsGroup, "position", chunkSize, GeffNode::getPosition, 3 ); - } - } - else if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) + if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) { // Create the main nodes group ZarrGroup rootGroup = ZarrGroup.create( zarrPath ); From f83dc6d9883cb513ddcabc6faca7b082e7f3c7ae Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Thu, 24 Jul 2025 15:36:48 -0400 Subject: [PATCH 02/19] POM: Add dependency n5-blosc-2.0.0-alpha-1 --- pom.xml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pom.xml b/pom.xml index be76f1a..da0b102 100644 --- a/pom.xml +++ b/pom.xml @@ -24,6 +24,7 @@ true 4.0.0-alpha-3-SNAPSHOT 2.0.0-alpha-2-SNAPSHOT + 2.0.0-alpha-1 https://sonarcloud.io jacoco @@ -59,6 +60,11 @@ n5-zarr + + org.janelia.saalfeldlab + n5-blosc + + com.bc.zarr jzarr From 28c63d93adc868493eb0038f1294b27630cf5eca Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Thu, 24 Jul 2025 17:48:03 -0400 Subject: [PATCH 03/19] Use N5 to de/serialize GeffMetadata --- .../java/org/mastodon/geff/GeffMetadata.java | 406 ++++++------------ 1 file changed, 130 insertions(+), 276 deletions(-) diff --git a/src/main/java/org/mastodon/geff/GeffMetadata.java b/src/main/java/org/mastodon/geff/GeffMetadata.java index 041a7f4..d66984b 100644 --- a/src/main/java/org/mastodon/geff/GeffMetadata.java +++ b/src/main/java/org/mastodon/geff/GeffMetadata.java @@ -29,15 +29,19 @@ package org.mastodon.geff; import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Map; +import java.util.Objects; import java.util.regex.Pattern; -import com.bc.zarr.ZarrGroup; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrReader; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrWriter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.gson.GsonBuilder; +import com.google.gson.reflect.TypeToken; -import ucar.ma2.InvalidRangeException; /** * Represents metadata for a Geff (Graph Exchange Format for Features) dataset. @@ -48,6 +52,7 @@ */ public class GeffMetadata { + private static final Logger LOG = LoggerFactory.getLogger( GeffMetadata.class ); // Supported GEFF versions public static final List< String > SUPPORTED_VERSIONS = Arrays.asList( "0.2", "0.3" ); @@ -63,7 +68,7 @@ public class GeffMetadata private boolean directed; - private GeffAxis[] geffAxes; + private GeffAxis[] geffAxes; // TODO make List /** * Default constructor @@ -83,14 +88,24 @@ public GeffMetadata( String geffVersion, boolean directed ) /** * Constructor with all parameters */ - public GeffMetadata( String geffVersion, boolean directed, GeffAxis[] geffAxes ) - { - setGeffVersion( geffVersion ); - this.directed = directed; - setGeffAxes( geffAxes ); - } - - // Getters and Setters + public GeffMetadata( String geffVersion, boolean directed, GeffAxis[] geffAxes ) + { + setGeffVersion( geffVersion ); + this.directed = directed; + setGeffAxes( geffAxes ); + } + + /** + * Constructor with all parameters + */ + public GeffMetadata( String geffVersion, boolean directed, List< GeffAxis > geffAxes ) + { + setGeffVersion( geffVersion ); + this.directed = directed; + setGeffAxes( geffAxes ); + } + + // Getters and Setters public String getGeffVersion() { return geffVersion; @@ -117,23 +132,39 @@ public void setDirected( boolean directed ) this.directed = directed; } - public GeffAxis[] getGeffAxes() - { - return geffAxes; - } - - public void setGeffAxes( GeffAxis[] geffAxes ) - { - this.geffAxes = geffAxes != null ? geffAxes.clone() : null; - validate(); - } - - /** + public GeffAxis[] getGeffAxes() // TODO make List + { + return geffAxes; + } + + public List< GeffAxis > getGeffAxesList() // TODO rename getGeffAxes() + { + return ( geffAxes != null ) ? Arrays.asList( geffAxes ) : null; + } + + public void setGeffAxes( GeffAxis[] geffAxes ) // TODO make List + { + this.geffAxes = geffAxes != null ? geffAxes.clone() : null; + validate(); + } + + public void setGeffAxes( final List< GeffAxis > geffAxes ) + { + this.geffAxes = ( geffAxes != null ) ? geffAxes.toArray( GeffAxis[]::new ) : null; + validate(); + } + + /** * Validates the metadata according to the GEFF schema rules */ public void validate() { - // Check spatial metadata consistency if position is provided + if ( geffVersion == null ) + { + throw new IllegalArgumentException( "geff_version is missing." ); + } + + // Check spatial metadata consistency if position is provided if ( geffAxes != null ) { for ( GeffAxis axis : geffAxes ) @@ -156,50 +187,21 @@ public void validate() } } - /** - * Read metadata from a Zarr group - */ - public static GeffMetadata readFromZarr( String zarrPath ) throws IOException, InvalidRangeException - { - ZarrGroup group = ZarrGroup.open( zarrPath ); - return readFromZarr( group ); - } + /** + * Read metadata from a Zarr group + */ + public static GeffMetadata readFromZarr( final String zarrPath ) throws IOException + { + try ( final N5ZarrReader reader = new N5ZarrReader( zarrPath, true ) ) + { + return readFromZarr( reader, "/" ); + } + } - /** - * Read metadata from a Zarr group - */ - public static GeffMetadata readFromZarr( ZarrGroup group ) throws IOException - { - // Check if geff_version exists in zattrs - String geffVersion = null; - Map< ?, ? > attrs = null; - if ( group.getAttributes().containsKey( "geff" ) ) - { - System.out.println( "Found geff entry in " + group ); - Object geffRootObj = group.getAttributes().get( "geff" ); - if ( geffRootObj instanceof Map ) - { - try - { - // Check if geff_version exists in the geff entry - if ( ( ( Map< ?, ? > ) geffRootObj ).containsKey( "geff_version" ) ) - { - System.out.println( - "Found geff_version in geff entry: " + ( ( Map< ?, ? > ) geffRootObj ).get( "geff_version" ) ); - geffVersion = ( String ) ( ( Map< ?, ? > ) geffRootObj ).get( "geff_version" ); - attrs = ( Map< ?, ? > ) geffRootObj; - } - else - { - System.out.println( "No geff_version found in geff entry." ); - } - } - catch ( ClassCastException e ) - { - System.err.println( "Invalid geff entry format: " + e.getMessage() ); - } - } - } + public static GeffMetadata readFromZarr( final N5ZarrReader reader, final String group ) throws IOException + { + final String geffVersion = reader.getAttribute( group, "geff/geff_version", String.class ); + LOG.debug( "found geff/geff_version = {}", geffVersion ); if ( geffVersion == null ) { throw new IllegalArgumentException( @@ -208,194 +210,63 @@ public static GeffMetadata readFromZarr( ZarrGroup group ) throws IOException "/dataset.zarr/)." ); } - GeffMetadata metadata = new GeffMetadata(); - - // Read required fields - - metadata.setGeffVersion( geffVersion ); - - if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) - { - // For 0.2 and 0.3, we expect a different structure - metadata.setDirected( ( Boolean ) attrs.get( "directed" ) ); - - // Read axes - List< GeffAxis > axes = new ArrayList<>(); - if ( attrs.containsKey( "axes" ) ) - { - Object axesObj = attrs.get( "axes" ); - if ( axesObj instanceof List ) - { - for ( Object axisObj : ( List< ? > ) axesObj ) - { - if ( axisObj instanceof Map ) - { - Map< ?, ? > axisMap = ( Map< ?, ? > ) axisObj; - String name = ( String ) axisMap.get( "name" ); - String type = ( String ) axisMap.get( "type" ); - String unit = ( String ) axisMap.get( "unit" ); - Double min = ( Double ) axisMap.get( "min" ); - Double max = ( Double ) axisMap.get( "max" ); - axes.add( new GeffAxis( name, type, unit, min, max ) ); - } - } - } - else - { - throw new IllegalArgumentException( "Invalid axes format: " + axesObj ); - } - } - metadata.setGeffAxes( axes.toArray( new GeffAxis[ 0 ] ) ); - } + if ( !( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) ) + { + throw new IllegalArgumentException( "geff_version " + geffVersion + " not supported." ); + } - // Validate the loaded metadata - metadata.validate(); + final Boolean directed = reader.getAttribute( group, "geff/directed", Boolean.class ); + LOG.debug( "found geff/directed = {}", directed ); + if ( directed == null ) + { + throw new IllegalArgumentException( "required attribute 'geff/directed' is missing." ); + } - return metadata; - } + final List< GeffAxis > axes = reader.getAttribute( group, "geff/axes", + new TypeToken< List< GeffAxis > >() {}.getType() ); + LOG.debug( "found geff/axes = {}", axes ); - /** - * Write metadata to Zarr format at specified path - */ - public static void writeToZarr( GeffMetadata metadata, String zarrPath ) throws IOException - { - ZarrGroup group = ZarrGroup.create( zarrPath ); - metadata.writeToZarr( group ); - } + final GeffMetadata metadata = new GeffMetadata( geffVersion, directed, axes ); + metadata.validate(); - /** - * Write metadata to Zarr format - */ - public void writeToZarr( ZarrGroup group ) throws IOException - { - // Validate before writing - validate(); + return metadata; + } - if ( geffVersion == null ) + /** + * Write metadata to Zarr format at specified path + */ + public static void writeToZarr( final GeffMetadata metadata, final String zarrPath ) throws IOException + { + try ( final N5ZarrWriter writer = new N5ZarrWriter( zarrPath, new GsonBuilder().setPrettyPrinting(),true ) ) { - throw new IllegalArgumentException( "Geff version must be set before writing metadata." ); + metadata.writeToZarr( writer, "/" ); } + } - if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) - { - java.util.Map< String, Object > rootAttrs = new java.util.TreeMap<>(); - java.util.Map< String, Object > attrs = new java.util.TreeMap<>(); - // Write required fields - attrs.put( "directed", directed ); - attrs.put( "geff_version", geffVersion ); - ArrayList< Map< String, Object > > axisMaps = new ArrayList<>(); - for ( GeffAxis axis : geffAxes ) - { - if ( axis.getName() == null || axis.getType() == null ) - { throw new IllegalArgumentException( - "Axis name and type must be set for all axes in version 0.2 and 0.3." ); } - Map< String, Object > axisMap = new java.util.TreeMap<>(); - axisMap.put( "name", axis.getName() ); - axisMap.put( "type", axis.getType() ); - axisMap.put( "unit", axis.getUnit() ); - if ( axis.getMin() != null ) - { - axisMap.put( "min", axis.getMin() ); - } - if ( axis.getMax() != null ) - { - axisMap.put( "max", axis.getMax() ); - } - axisMaps.add( axisMap ); - } - attrs.put( "axes", axisMaps ); - rootAttrs.put( "geff", attrs ); - // Write the attributes to the Zarr group - group.writeAttributes( rootAttrs ); - System.out.println( "Written metadata attributes: " + rootAttrs.keySet() ); - } + public void writeToZarr( final N5ZarrWriter writer, final String group ) + { + // Validate before writing + validate(); - } + if ( !( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) ) + { + throw new IllegalArgumentException( "geff_version " + geffVersion + " not supported." ); + } - // Helper methods for type conversion - private static double[] convertToDoubleArray( Object obj ) - { - if ( obj instanceof double[] ) - { - return ( double[] ) obj; - } - else if ( obj instanceof java.util.ArrayList ) - { - @SuppressWarnings( "unchecked" ) - java.util.ArrayList< Object > list = ( java.util.ArrayList< Object > ) obj; - double[] result = new double[ list.size() ]; - for ( int i = 0; i < list.size(); i++ ) - { - if ( list.get( i ) instanceof Number ) - { - result[ i ] = ( ( Number ) list.get( i ) ).doubleValue(); - } - else - { - result[ i ] = Double.parseDouble( list.get( i ).toString() ); - } - } - return result; - } - else if ( obj instanceof Object[] ) - { - Object[] arr = ( Object[] ) obj; - double[] result = new double[ arr.length ]; - for ( int i = 0; i < arr.length; i++ ) - { - if ( arr[ i ] instanceof Number ) - { - result[ i ] = ( ( Number ) arr[ i ] ).doubleValue(); - } - else - { - result[ i ] = Double.parseDouble( arr[ i ].toString() ); - } - } - return result; - } - else if ( obj instanceof float[] ) - { - float[] floatArray = ( float[] ) obj; - double[] result = new double[ floatArray.length ]; - for ( int i = 0; i < floatArray.length; i++ ) - { - result[ i ] = floatArray[ i ]; - } - return result; - } - return null; - } + // required + LOG.debug( "writing geff/geff_version {}", getGeffVersion() ); + writer.setAttribute( group, "geff/geff_version", getGeffVersion() ); + LOG.debug( "writing geff/directed {}", isDirected() ); + writer.setAttribute( group, "geff/directed", isDirected() ); - private static String[] convertToStringArray( Object obj ) - { - if ( obj instanceof String[] ) - { - return ( String[] ) obj; - } - else if ( obj instanceof java.util.ArrayList ) - { - @SuppressWarnings( "unchecked" ) - java.util.ArrayList< Object > list = ( java.util.ArrayList< Object > ) obj; - String[] result = new String[ list.size() ]; - for ( int i = 0; i < list.size(); i++ ) - { - result[ i ] = list.get( i ) != null ? list.get( i ).toString() : null; - } - return result; - } - else if ( obj instanceof Object[] ) - { - Object[] arr = ( Object[] ) obj; - String[] result = new String[ arr.length ]; - for ( int i = 0; i < arr.length; i++ ) - { - result[ i ] = arr[ i ] != null ? arr[ i ].toString() : null; - } - return result; - } - return null; - } + // optional + final List< GeffAxis > axes = getGeffAxesList(); + if ( axes != null ) + { + LOG.debug( "writing geff/axes {}", axes ); + writer.setAttribute( group, "geff/axes", axes ); + } + } @Override public String toString() @@ -405,34 +276,17 @@ public String toString() geffVersion, directed, Arrays.toString( geffAxes ) ); } - @Override - public boolean equals( Object obj ) - { - if ( this == obj ) - return true; - if ( obj == null || getClass() != obj.getClass() ) - return false; - - GeffMetadata that = ( GeffMetadata ) obj; - - if ( directed != that.directed ) - return false; - if ( geffVersion != null ? !geffVersion.equals( that.geffVersion ) : that.geffVersion != null ) - return false; - for ( int i = 0; i < geffAxes.length; i++ ) - { - if ( !geffAxes[ i ].equals( that.geffAxes[ i ] ) ) - { return false; } - } - return true; - } - - @Override - public int hashCode() - { - int result = geffVersion != null ? geffVersion.hashCode() : 0; - result = 31 * result + ( directed ? 1 : 0 ); - result = 31 * result + Arrays.hashCode( geffAxes ); - return result; - } + @Override + public boolean equals( final Object o ) + { + if ( !( o instanceof final GeffMetadata that ) ) + return false; + return directed == that.directed && Objects.equals( geffVersion, that.geffVersion ) && Objects.deepEquals( geffAxes, that.geffAxes ); + } + + @Override + public int hashCode() + { + return Objects.hash( geffVersion, directed, Arrays.hashCode( geffAxes ) ); + } } From 231c9376cd576bd5df9d6bbf8eebe56cf51ddcfd Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Sat, 26 Jul 2025 09:35:13 -0400 Subject: [PATCH 04/19] WIP --- pom.xml | 6 + .../java/org/mastodon/geff/GeffMetadata.java | 30 +- src/main/java/org/mastodon/geff/GeffNode.java | 175 +++++++++++- src/main/java/org/mastodon/geff/GeffUtil.java | 17 ++ .../java/org/mastodon/geff/GeffUtils.java | 258 ++++++++++++++++++ 5 files changed, 466 insertions(+), 20 deletions(-) create mode 100644 src/main/java/org/mastodon/geff/GeffUtil.java create mode 100644 src/main/java/org/mastodon/geff/GeffUtils.java diff --git a/pom.xml b/pom.xml index da0b102..8b68df5 100644 --- a/pom.xml +++ b/pom.xml @@ -25,6 +25,7 @@ 4.0.0-alpha-3-SNAPSHOT 2.0.0-alpha-2-SNAPSHOT 2.0.0-alpha-1 + 7.1.5 https://sonarcloud.io jacoco @@ -65,6 +66,11 @@ n5-blosc + + net.imglib2 + imglib2 + + com.bc.zarr jzarr diff --git a/src/main/java/org/mastodon/geff/GeffMetadata.java b/src/main/java/org/mastodon/geff/GeffMetadata.java index d66984b..4bc2af4 100644 --- a/src/main/java/org/mastodon/geff/GeffMetadata.java +++ b/src/main/java/org/mastodon/geff/GeffMetadata.java @@ -28,12 +28,16 @@ */ package org.mastodon.geff; +import static org.mastodon.geff.GeffUtil.checkSupportedVersion; + import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.regex.Pattern; +import org.janelia.saalfeldlab.n5.N5Reader; +import org.janelia.saalfeldlab.n5.N5Writer; import org.janelia.saalfeldlab.n5.zarr.N5ZarrReader; import org.janelia.saalfeldlab.n5.zarr.N5ZarrWriter; import org.slf4j.Logger; @@ -150,7 +154,7 @@ public void setGeffAxes( GeffAxis[] geffAxes ) // TODO make List public void setGeffAxes( final List< GeffAxis > geffAxes ) { - this.geffAxes = ( geffAxes != null ) ? geffAxes.toArray( GeffAxis[]::new ) : null; + this.geffAxes = ( geffAxes != null ) ? geffAxes.toArray( new GeffAxis[ 0 ] ) : null; validate(); } @@ -190,15 +194,15 @@ public void validate() /** * Read metadata from a Zarr group */ - public static GeffMetadata readFromZarr( final String zarrPath ) throws IOException + public static GeffMetadata readFromZarr( final String zarrPath ) { try ( final N5ZarrReader reader = new N5ZarrReader( zarrPath, true ) ) { - return readFromZarr( reader, "/" ); + return readFromN5( reader, "/" ); } } - public static GeffMetadata readFromZarr( final N5ZarrReader reader, final String group ) throws IOException + public static GeffMetadata readFromN5( final N5Reader reader, final String group ) { final String geffVersion = reader.getAttribute( group, "geff/geff_version", String.class ); LOG.debug( "found geff/geff_version = {}", geffVersion ); @@ -209,11 +213,7 @@ public static GeffMetadata readFromZarr( final N5ZarrReader reader, final String "zarr group name is not specified (e.g. /dataset.zarr/tracks/ instead of " + "/dataset.zarr/)." ); } - - if ( !( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) ) - { - throw new IllegalArgumentException( "geff_version " + geffVersion + " not supported." ); - } + checkSupportedVersion( geffVersion ); final Boolean directed = reader.getAttribute( group, "geff/directed", Boolean.class ); LOG.debug( "found geff/directed = {}", directed ); @@ -239,19 +239,16 @@ public static void writeToZarr( final GeffMetadata metadata, final String zarrPa { try ( final N5ZarrWriter writer = new N5ZarrWriter( zarrPath, new GsonBuilder().setPrettyPrinting(),true ) ) { - metadata.writeToZarr( writer, "/" ); + metadata.writeToN5( writer, "/" ); } } - public void writeToZarr( final N5ZarrWriter writer, final String group ) + public void writeToN5( final N5Writer writer, final String group ) { // Validate before writing validate(); - if ( !( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) ) - { - throw new IllegalArgumentException( "geff_version " + geffVersion + " not supported." ); - } + checkSupportedVersion( geffVersion ); // required LOG.debug( "writing geff/geff_version {}", getGeffVersion() ); @@ -279,8 +276,9 @@ public String toString() @Override public boolean equals( final Object o ) { - if ( !( o instanceof final GeffMetadata that ) ) + if ( !( o instanceof GeffMetadata ) ) return false; + GeffMetadata that = ( GeffMetadata ) o; return directed == that.directed && Objects.equals( geffVersion, that.geffVersion ) && Objects.deepEquals( geffAxes, that.geffAxes ); } diff --git a/src/main/java/org/mastodon/geff/GeffNode.java b/src/main/java/org/mastodon/geff/GeffNode.java index c8b5a85..0789c0d 100644 --- a/src/main/java/org/mastodon/geff/GeffNode.java +++ b/src/main/java/org/mastodon/geff/GeffNode.java @@ -28,10 +28,21 @@ */ package org.mastodon.geff; +import static org.mastodon.geff.GeffUtil.checkSupportedVersion; + import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; +import org.janelia.saalfeldlab.n5.DatasetAttributes; +import org.janelia.saalfeldlab.n5.N5Reader; +import org.janelia.saalfeldlab.n5.N5URI; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrReader; +import org.mastodon.geff.GeffUtils.FlattenedDoubles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import com.bc.zarr.ArrayParams; import com.bc.zarr.DataType; import com.bc.zarr.ZarrArray; @@ -45,6 +56,7 @@ */ public class GeffNode implements ZarrEntity { + private static final Logger LOG = LoggerFactory.getLogger( GeffNode.class ); // Node attributes private int id; @@ -384,14 +396,169 @@ public GeffNode build() } } - /** + + + + + + + + + + + + // ------ n5 version ------------------------------------------- + // + // + + + + + /** + * Read nodes from Zarr format with specified version and chunked structure + * + * @param zarrPath + * The path to the Zarr directory containing nodes. + * @param geffVersion + * The version of the GEFF format to read. + * @return List of GeffNode objects read from the Zarr path. + */ + public static List< GeffNode > n5ReadFromZarr( final String zarrPath, final String geffVersion ) + { + LOG.debug( "Reading nodes from Zarr path: " + zarrPath + " with Geff version: " + geffVersion ); + try ( final N5ZarrReader reader = new N5ZarrReader( zarrPath, true ) ) + { + return readFromN5( reader, "/", geffVersion ); + } + } + + private static void verifyLength( final int[] array, final int expectedLength, final String name ) + { + if ( array != null && array.length != expectedLength ) + throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.length + " vs " + expectedLength + ")" ); + } + + private static void verifyLength( final double[] array, final int expectedLength, final String name ) + { + if ( array != null && array.length != expectedLength ) + throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.length + " vs " + expectedLength + ")" ); + } + + private static void verifyLength( final FlattenedDoubles array, final int expectedLength, final String name ) + { + if ( array != null && array.size()[ 0 ] != expectedLength ) + { + throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.size()[ 0 ] + " vs " + expectedLength + ")" ); + } + } + + public static List< GeffNode > readFromN5( final N5Reader reader, final String group, final String geffVersion ) + { + checkSupportedVersion( geffVersion ); + final String path = N5URI.normalizeGroupPath( group ); + final DatasetAttributes attributes = reader.getDatasetAttributes( path + "/edges/ids" ); + + // Read node IDs from chunks + final int[] nodeIds = GeffUtils.readAsIntArray( reader, path + "/nodes/ids", "node IDs" ); + if ( nodeIds == null ) + { + throw new IllegalArgumentException( "required property '/nodes/ids' not found" ); + } + + // Read time points from chunks + final int[] timepoints = GeffUtils.readAsIntArray( reader, "/nodes/props/t/values", "timepoints" ); + verifyLength( timepoints, nodeIds.length, "/nodes/props/t/values" ); + + // Read X coordinates from chunks + final double[] xCoords = GeffUtils.readAsDoubleArray( reader, "/nodes/props/x/values", "X coordinates" ); + verifyLength( xCoords, nodeIds.length, "/nodes/props/x/values" ); + + // Read Y coordinates from chunks + final double[] yCoords = GeffUtils.readAsDoubleArray( reader, "/nodes/props/y/values", "Y coordinates" ); + verifyLength( yCoords, nodeIds.length, "/nodes/props/y/values" ); + + // Read Z coordinates from chunks + final double[] zCoords = GeffUtils.readAsDoubleArray( reader, "/nodes/props/z/values", "Z coordinates" ); + verifyLength( zCoords, nodeIds.length, "/nodes/props/z/values" ); + + // Read color from chunks + final FlattenedDoubles colors = GeffUtils.readAsDoubleMatrix( reader, "/nodes/props/color/values", "color" ); + verifyLength( colors, nodeIds.length, "/nodes/props/color/values" ); + + // Read radius from chunks + int[] radius = GeffUtils.readAsIntArray( reader, "/nodes/props/radius/values", "track IDs" ); + verifyLength( radius, nodeIds.length, "/nodes/props/radius/values" ); + + printArray( nodeIds,"nodeIds" ); + printArray( timepoints,"timepoints" ); + printArray( xCoords,"xCoords" ); + printArray( yCoords,"yCoords" ); + printArray( zCoords,"zCoords" ); + + // Create node objects + for ( int i = 0; i < nodeIds.length; i++ ) + { + GeffNode node = new Builder() + .id( nodeIds[ i ] ) + .timepoint( i < timepoints.length ? timepoints[ i ] : -1 ) + .x( i < xCoords.length ? xCoords[ i ] : Double.NaN ) + .y( i < yCoords.length ? yCoords[ i ] : Double.NaN ) + .z( i < zCoords.length ? zCoords[ i ] : Double.NaN ) + .color( i < colors.length ? colors[ i ] : DEFAULT_COLOR ) + .segmentId( i < trackIds.length ? trackIds[ i ] : -1 ) + .radius( i < radii.length ? radii[ i ] : Double.NaN ) + .covariance2d( i < covariance2ds.length ? covariance2ds[ i ] : DEFAULT_COVARIANCE_2D ) + .covariance3d( i < covariance3ds.length ? covariance3ds[ i ] : DEFAULT_COVARIANCE_3D ) + .build(); + + nodes.add( node ); + } + + throw new UnsupportedOperationException( "TODO. not implemented." ); + } + + private static final int maxLength = 5; + private static void printArray( int[] array, String name ) + { + if ( array == null ) + { + System.out.println( name + " = null " ); + } + else + { + final int[] a = array.length <= maxLength ? array : Arrays.copyOf( array, maxLength ); + final String suffix = array.length > maxLength ? " ... and " + ( array.length - maxLength ) + " more elements" : ""; + System.out.println( name + " = " + Arrays.toString( a ) + suffix ); + } + } + private static void printArray( double[] array, String name ) + { + if ( array == null ) + { + System.out.println( name + " = null " ); + } + else + { + final double[] a = array.length <= maxLength ? array : Arrays.copyOf( array, maxLength ); + final String suffix = array.length > maxLength ? " ... and " + ( array.length - maxLength ) + " more elements" : ""; + System.out.println( name + " = " + Arrays.toString( a ) + suffix ); + } + } + + + + // ------ jzarr version ------------------------------------------- + // + // + + /** * Read nodes from Zarr format with default version and chunked structure * * @param zarrPath * The path to the Zarr directory containing nodes. * @return List of GeffNode objects read from the Zarr path. */ - public static List< GeffNode > readFromZarr( String zarrPath ) throws IOException, InvalidRangeException + public static List< GeffNode > readFromZarr( String zarrPath ) throws IOException { return readFromZarrWithChunks( zarrPath, Geff.VERSION ); } @@ -406,7 +573,7 @@ public static List< GeffNode > readFromZarr( String zarrPath ) throws IOExceptio * @return List of GeffNode objects read from the Zarr path. */ public static List< GeffNode > readFromZarr( String zarrPath, String geffVersion ) - throws IOException, InvalidRangeException + throws IOException { return readFromZarrWithChunks( zarrPath, geffVersion ); } @@ -422,7 +589,7 @@ public static List< GeffNode > readFromZarr( String zarrPath, String geffVersion * @return List of GeffNode objects read from the Zarr path. */ public static List< GeffNode > readFromZarrWithChunks( String zarrPath, String geffVersion ) - throws IOException, InvalidRangeException + throws IOException { List< GeffNode > nodes = new ArrayList<>(); diff --git a/src/main/java/org/mastodon/geff/GeffUtil.java b/src/main/java/org/mastodon/geff/GeffUtil.java new file mode 100644 index 0000000..eeeebad --- /dev/null +++ b/src/main/java/org/mastodon/geff/GeffUtil.java @@ -0,0 +1,17 @@ +package org.mastodon.geff; + +class GeffUtil +{ + static void checkSupportedVersion( final String version ) throws IllegalArgumentException + { + if ( !( version.startsWith( "0.2" ) || version.startsWith( "0.3" ) ) ) + { + throw new IllegalArgumentException( "geff_version " + version + " not supported." ); + } + } + + private GeffUtil() + { + // static utility methods. don't instantiate. + } +} diff --git a/src/main/java/org/mastodon/geff/GeffUtils.java b/src/main/java/org/mastodon/geff/GeffUtils.java new file mode 100644 index 0000000..b3be742 --- /dev/null +++ b/src/main/java/org/mastodon/geff/GeffUtils.java @@ -0,0 +1,258 @@ +package org.mastodon.geff; + +import java.util.Arrays; +import java.util.function.IntFunction; +import java.util.function.ToIntFunction; + +import org.janelia.saalfeldlab.n5.DataBlock; +import org.janelia.saalfeldlab.n5.DataType; +import org.janelia.saalfeldlab.n5.DatasetAttributes; +import org.janelia.saalfeldlab.n5.N5Reader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import net.imglib2.FinalInterval; +import net.imglib2.Interval; +import net.imglib2.blocks.BlockInterval; +import net.imglib2.blocks.SubArrayCopy; +import net.imglib2.iterator.IntervalIterator; +import net.imglib2.util.Intervals; +import net.imglib2.util.Util; + +// TODO: split good parts into GeffN5Utils, move questionable parts to ZarrUtils +public class GeffUtils +{ + private static final Logger LOG = LoggerFactory.getLogger( GeffUtils.class ); + + public static int[] readAsIntArray( final N5Reader reader, final String dataset, final String description ) + { + if ( !reader.datasetExists( dataset ) ) + { + LOG.debug( "No arrays found in group for " + description ); + return null; + } + if ( reader.getDatasetAttributes( dataset ).getNumDimensions() != 1 ) + { + throw new IllegalArgumentException( "Expected 1D array" ); + } + return convertToIntArray( readFully( reader, dataset ), description ); + } + + public static double[] readAsDoubleArray( final N5Reader reader, final String dataset, final String description ) + { + if ( !reader.datasetExists( dataset ) ) + { + LOG.debug( "No arrays found in group for " + description ); + return null; + } + if ( reader.getDatasetAttributes( dataset ).getNumDimensions() != 1 ) + { + throw new IllegalArgumentException( "Expected 1D array" ); + } + return convertToDoubleArray( readFully( reader, dataset ), description ); + } + + static class FlattenedDoubles + { + private final double[] data; + + private final int[] size; + + FlattenedDoubles( final double[] data, final int[] size ) + { + this.data = data; + this.size = size; + } + + FlattenedDoubles( final double[] data, final long[] size ) + { + this( data, Util.long2int( size ) ); + } + + int[] size() + { + return size; + } + + double at(final int i0) + { + return data[ i0 ]; + } + + double at(final int i0, final int i1) + { + return data[ i0 + size[ 0 ] * i1 ]; + } + + double at(final int i0, final int i1, final int i2) + { + return data[ i0 + size[ 0 ] * ( i1 * i2 * size[ 1 ] ) ]; + } + } + + public static FlattenedDoubles readAsDoubleMatrix( final N5Reader reader, final String dataset, final String description ) + { + if ( !reader.datasetExists( dataset ) ) + { + LOG.debug( "No arrays found in group for " + description ); + return null; + } + final DatasetAttributes attributes = reader.getDatasetAttributes( dataset ); + if ( attributes.getNumDimensions() != 2 ) + { + throw new IllegalArgumentException( "Expected 2D array" ); + } + return new FlattenedDoubles( convertToDoubleArray( readFully( reader, dataset ), description ), attributes.getDimensions() ); + } + + + + + + + @FunctionalInterface + private interface IntValueAtIndex< T > + { + int apply( T data, int index ); + } + + private static < T > int[] copyToIntArray( final T data, final ToIntFunction< T > numElements, final IntValueAtIndex< T > elementAtIndex ) + { + final int[] ints = new int[ numElements.applyAsInt( data ) ]; + Arrays.setAll( ints, i -> elementAtIndex.apply( data, i ) ); + return ints; + } + + public static int[] convertToIntArray( final Object data, final String fieldName ) + { + if ( data instanceof int[] ) + return ( int[] ) data; + else if ( data instanceof long[] ) + return copyToIntArray( ( long[] ) data, a -> a.length, ( a, i ) -> ( int ) a[ i ] ); + else if ( data instanceof double[] ) + return copyToIntArray( ( double[] ) data, a -> a.length, ( a, i ) -> ( int ) a[ i ] ); + else if ( data instanceof float[] ) + return copyToIntArray( ( float[] ) data, a -> a.length, ( a, i ) -> ( int ) a[ i ] ); + else + throw new IllegalArgumentException( + "Unsupported data type for " + fieldName + ": " + + ( data != null ? data.getClass().getName() : "null" ) ); + } + + + @FunctionalInterface + private interface DoubleValueAtIndex< T > + { + double apply( T data, int index ); + } + + private static < T > double[] copyToDoubleArray( final T data, final ToIntFunction< T > numElements, final DoubleValueAtIndex< T > elementAtIndex ) + { + final double[] doubles = new double[ numElements.applyAsInt( data ) ]; + Arrays.setAll( doubles, i -> elementAtIndex.apply( data, i ) ); + return doubles; + } + + public static double[] convertToDoubleArray( final Object data, final String fieldName ) + { + if ( data instanceof double[] ) + return ( double[] ) data; + else if ( data instanceof int[] ) + return copyToDoubleArray( ( int[] ) data, a -> a.length, ( a, i ) -> a[ i ] ); + else if ( data instanceof long[] ) + return copyToDoubleArray( ( long[] ) data, a -> a.length, ( a, i ) -> a[ i ] ); + else if ( data instanceof float[] ) + return copyToDoubleArray( ( float[] ) data, a -> a.length, ( a, i ) -> a[ i ] ); + else + throw new IllegalArgumentException( + "Unsupported data type for " + fieldName + ": " + + ( data != null ? data.getClass().getName() : "null" ) ); + } + + + + + + + + + + + public static Object readFully( final N5Reader reader, final String dataset ) + { + final DatasetAttributes attributes = reader.getDatasetAttributes( dataset ); + final DataType dataType = attributes.getDataType(); + final int numElements = Util.safeInt( Intervals.numElements( attributes.getDimensions() ) ); + final Object dest = createArray( dataType ).apply( numElements ); + copy( dest, new FinalInterval( attributes.getDimensions() ), reader, dataset ); + return dest; + } + + private static IntFunction< ? > createArray( final DataType dataType ) + { + switch ( dataType ) + { + case INT8: + case UINT8: + return byte[]::new; + case INT16: + case UINT16: + return short[]::new; + case INT32: + case UINT32: + return int[]::new; + case INT64: + case UINT64: + return long[]::new; + case FLOAT32: + return float[]::new; + case FLOAT64: + return double[]::new; + case STRING: + return String[]::new; + case OBJECT: + return Object[]::new; + } + throw new IllegalArgumentException(); + } + + private static void copy( + final Object dest, + final Interval destInterval, + final N5Reader reader, + final String dataset ) + { + final DatasetAttributes attributes = reader.getDatasetAttributes( dataset ); + final int[] blockSize = attributes.getBlockSize(); + final int n = attributes.getNumDimensions(); + + final long[] gridMin = new long[ n ]; + final long[] gridMax = new long[ n ]; + final long[] gridSize = new long[ n ]; + for ( int d = 0; d < n; d++ ) + { + gridMin[ d ] = destInterval.min( d ) / blockSize[ d ]; + gridMax[ d ] = destInterval.max( d ) / blockSize[ d ]; + gridSize[ d ] = gridMax[ d ] + 1 - gridMin[ d ]; + } + + final long[] gridPos = new long[ n ]; + final long[] blockMin = new long[ n ]; + final int[] srcPos = new int[ n ]; + final int[] destSize = Util.long2int( destInterval.dimensionsAsLongArray() ); + final int[] destPos = new int[ n ]; + final IntervalIterator gridIter = new IntervalIterator( gridSize ); + while ( gridIter.hasNext() ) + { + gridIter.fwd(); + gridIter.localize( gridPos ); + Arrays.setAll( blockMin, d -> gridPos[ d ] * blockSize[ d ] ); + final DataBlock< ? > block = reader.readBlock( dataset, attributes, gridPos ); + final BlockInterval blockInterval = BlockInterval.wrap( blockMin, block.getSize() ); + final FinalInterval intersection = Intervals.intersect( blockInterval, destInterval ); + Arrays.setAll( srcPos, d -> ( int ) ( intersection.min( d ) - blockMin[ d ] ) ); + Arrays.setAll( destPos, d -> ( int ) ( intersection.min( d ) - destInterval.min( d ) ) ); + SubArrayCopy.copy( block.getData(), blockInterval.size(), srcPos, dest, destSize, destPos, Util.long2int( intersection.dimensionsAsLongArray() ) ); + } + } +} From a6dbc7a0121ced309114ee9dbe194d8455acb1ea Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Sat, 26 Jul 2025 10:03:33 -0400 Subject: [PATCH 05/19] WIP --- src/main/java/org/mastodon/geff/GeffNode.java | 266 +++--------------- .../java/org/mastodon/geff/GeffUtils.java | 11 + 2 files changed, 54 insertions(+), 223 deletions(-) diff --git a/src/main/java/org/mastodon/geff/GeffNode.java b/src/main/java/org/mastodon/geff/GeffNode.java index 0789c0d..c10bca1 100644 --- a/src/main/java/org/mastodon/geff/GeffNode.java +++ b/src/main/java/org/mastodon/geff/GeffNode.java @@ -79,9 +79,7 @@ public class GeffNode implements ZarrEntity private double[] covariance3d; - public static final double[] DEFAULT_COLOR = { 1.0, 1.0, 1.0, 1.0 }; // Default - // white - // color + public static final double[] DEFAULT_COLOR = { 1.0, 1.0, 1.0, 1.0 }; // Default white color public static final double DEFAULT_RADIUS = 1.0; @@ -411,8 +409,17 @@ public GeffNode build() // // - - + /** + * Read nodes from Zarr format with default version and chunked structure + * + * @param zarrPath + * The path to the Zarr directory containing nodes. + * @return List of GeffNode objects read from the Zarr path. + */ + public static List< GeffNode > readFromZarr( String zarrPath ) throws IOException + { + return readFromZarr( zarrPath, Geff.VERSION ); + } /** * Read nodes from Zarr format with specified version and chunked structure @@ -423,7 +430,7 @@ public GeffNode build() * The version of the GEFF format to read. * @return List of GeffNode objects read from the Zarr path. */ - public static List< GeffNode > n5ReadFromZarr( final String zarrPath, final String geffVersion ) + public static List< GeffNode > readFromZarr( final String zarrPath, final String geffVersion ) { LOG.debug( "Reading nodes from Zarr path: " + zarrPath + " with Geff version: " + geffVersion ); try ( final N5ZarrReader reader = new N5ZarrReader( zarrPath, true ) ) @@ -432,18 +439,21 @@ public static List< GeffNode > n5ReadFromZarr( final String zarrPath, final Stri } } + // TODO: move to Util private static void verifyLength( final int[] array, final int expectedLength, final String name ) { if ( array != null && array.length != expectedLength ) throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.length + " vs " + expectedLength + ")" ); } + // TODO: move to Util private static void verifyLength( final double[] array, final int expectedLength, final String name ) { if ( array != null && array.length != expectedLength ) throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.length + " vs " + expectedLength + ")" ); } + // TODO: move to Util private static void verifyLength( final FlattenedDoubles array, final int expectedLength, final String name ) { if ( array != null && array.size()[ 0 ] != expectedLength ) @@ -464,252 +474,62 @@ public static List< GeffNode > readFromN5( final N5Reader reader, final String g { throw new IllegalArgumentException( "required property '/nodes/ids' not found" ); } + final int numNodes = nodeIds.length; // Read time points from chunks final int[] timepoints = GeffUtils.readAsIntArray( reader, "/nodes/props/t/values", "timepoints" ); - verifyLength( timepoints, nodeIds.length, "/nodes/props/t/values" ); + verifyLength( timepoints, numNodes, "/nodes/props/t/values" ); // Read X coordinates from chunks final double[] xCoords = GeffUtils.readAsDoubleArray( reader, "/nodes/props/x/values", "X coordinates" ); - verifyLength( xCoords, nodeIds.length, "/nodes/props/x/values" ); + verifyLength( xCoords, numNodes, "/nodes/props/x/values" ); // Read Y coordinates from chunks final double[] yCoords = GeffUtils.readAsDoubleArray( reader, "/nodes/props/y/values", "Y coordinates" ); - verifyLength( yCoords, nodeIds.length, "/nodes/props/y/values" ); + verifyLength( yCoords, numNodes, "/nodes/props/y/values" ); // Read Z coordinates from chunks final double[] zCoords = GeffUtils.readAsDoubleArray( reader, "/nodes/props/z/values", "Z coordinates" ); - verifyLength( zCoords, nodeIds.length, "/nodes/props/z/values" ); + verifyLength( zCoords, numNodes, "/nodes/props/z/values" ); // Read color from chunks final FlattenedDoubles colors = GeffUtils.readAsDoubleMatrix( reader, "/nodes/props/color/values", "color" ); - verifyLength( colors, nodeIds.length, "/nodes/props/color/values" ); + verifyLength( colors, numNodes, "/nodes/props/color/values" ); + + // Read track IDs from chunks + final int[] trackIds = GeffUtils.readAsIntArray( reader, "/nodes/props/track_id/values", "track IDs" ); + verifyLength( trackIds, numNodes, "/nodes/props/track_id/values" ); // Read radius from chunks - int[] radius = GeffUtils.readAsIntArray( reader, "/nodes/props/radius/values", "track IDs" ); - verifyLength( radius, nodeIds.length, "/nodes/props/radius/values" ); + double[] radius = GeffUtils.readAsDoubleArray( reader, "/nodes/props/radius/values", "radius" ); + verifyLength( radius, numNodes, "/nodes/props/radius/values" ); - printArray( nodeIds,"nodeIds" ); - printArray( timepoints,"timepoints" ); - printArray( xCoords,"xCoords" ); - printArray( yCoords,"yCoords" ); - printArray( zCoords,"zCoords" ); + // TODO: ellipsoid etc // Create node objects - for ( int i = 0; i < nodeIds.length; i++ ) + final List< GeffNode > nodes = new ArrayList<>( numNodes ); + for ( int i = 0; i < numNodes; i++ ) { - GeffNode node = new Builder() - .id( nodeIds[ i ] ) - .timepoint( i < timepoints.length ? timepoints[ i ] : -1 ) - .x( i < xCoords.length ? xCoords[ i ] : Double.NaN ) - .y( i < yCoords.length ? yCoords[ i ] : Double.NaN ) - .z( i < zCoords.length ? zCoords[ i ] : Double.NaN ) - .color( i < colors.length ? colors[ i ] : DEFAULT_COLOR ) - .segmentId( i < trackIds.length ? trackIds[ i ] : -1 ) - .radius( i < radii.length ? radii[ i ] : Double.NaN ) - .covariance2d( i < covariance2ds.length ? covariance2ds[ i ] : DEFAULT_COVARIANCE_2D ) - .covariance3d( i < covariance3ds.length ? covariance3ds[ i ] : DEFAULT_COVARIANCE_3D ) - .build(); - + final int id = nodeIds[ i ]; + final int t = timepoints != null ? timepoints[ i ] : -1; + final double x = xCoords != null ? xCoords[ i ] : Double.NaN; + final double y = yCoords != null ? yCoords[ i ] : Double.NaN; + final double z = zCoords != null ? zCoords[ i ] : Double.NaN; + final double[] color = colors != null ? colors.rowAt( i ) : DEFAULT_COLOR; + final int segmentId = trackIds != null ? trackIds[ i ] : -1; + final double r = radius != null ? radius[ i ] : Double.NaN; + final double[] covariance2d = DEFAULT_COVARIANCE_2D; + final double[] covariance3d = DEFAULT_COVARIANCE_2D; + final GeffNode node = new GeffNode( id, t, x, y, z, color, segmentId, r, covariance2d, covariance3d ); nodes.add( node ); } - - throw new UnsupportedOperationException( "TODO. not implemented." ); + return nodes; } - private static final int maxLength = 5; - private static void printArray( int[] array, String name ) - { - if ( array == null ) - { - System.out.println( name + " = null " ); - } - else - { - final int[] a = array.length <= maxLength ? array : Arrays.copyOf( array, maxLength ); - final String suffix = array.length > maxLength ? " ... and " + ( array.length - maxLength ) + " more elements" : ""; - System.out.println( name + " = " + Arrays.toString( a ) + suffix ); - } - } - private static void printArray( double[] array, String name ) - { - if ( array == null ) - { - System.out.println( name + " = null " ); - } - else - { - final double[] a = array.length <= maxLength ? array : Arrays.copyOf( array, maxLength ); - final String suffix = array.length > maxLength ? " ... and " + ( array.length - maxLength ) + " more elements" : ""; - System.out.println( name + " = " + Arrays.toString( a ) + suffix ); - } - } - - - // ------ jzarr version ------------------------------------------- // // - /** - * Read nodes from Zarr format with default version and chunked structure - * - * @param zarrPath - * The path to the Zarr directory containing nodes. - * @return List of GeffNode objects read from the Zarr path. - */ - public static List< GeffNode > readFromZarr( String zarrPath ) throws IOException - { - return readFromZarrWithChunks( zarrPath, Geff.VERSION ); - } - - /** - * Read nodes from Zarr format with specified version and chunked structure - * - * @param zarrPath - * The path to the Zarr directory containing nodes. - * @param geffVersion - * The version of the GEFF format to read. - * @return List of GeffNode objects read from the Zarr path. - */ - public static List< GeffNode > readFromZarr( String zarrPath, String geffVersion ) - throws IOException - { - return readFromZarrWithChunks( zarrPath, geffVersion ); - } - - /** - * Read nodes from Zarr format with chunked structure. This method handles - * different Geff versions and reads node attributes accordingly. - * - * @param zarrPath - * The path to the Zarr directory containing nodes. - * @param geffVersion - * The version of the GEFF format to read. - * @return List of GeffNode objects read from the Zarr path. - */ - public static List< GeffNode > readFromZarrWithChunks( String zarrPath, String geffVersion ) - throws IOException - { - List< GeffNode > nodes = new ArrayList<>(); - - ZarrGroup nodesGroup = ZarrGroup.open( zarrPath + "/nodes" ); - - System.out.println( - "Reading nodes from Zarr path: " + zarrPath + " with Geff version: " + geffVersion ); - - if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) - { - // Read node IDs from chunks - int[] nodeIds = ZarrUtils.readChunkedIntArray( nodesGroup, "ids", "node IDs" ); - - // Read properties - ZarrGroup propsGroup = nodesGroup.openSubGroup( "props" ); - - // Read time points from chunks - int[] timepoints = ZarrUtils.readChunkedIntArray( propsGroup, "t/values", "timepoints" ); - - // Read X coordinates from chunks - double[] xCoords = ZarrUtils.readChunkedDoubleArray( propsGroup, "x/values", "X coordinates" ); - - // Read Y coordinates from chunks - double[] yCoords = ZarrUtils.readChunkedDoubleArray( propsGroup, "y/values", "Y coordinates" ); - - // Read Z coordinates from chunks - double[] zCoords = new double[ 0 ]; - try - { - zCoords = ZarrUtils.readChunkedDoubleArray( propsGroup, "z/values", "Z coordinates" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read Z coordinates: " + e.getMessage() + " skipping..." ); - } - - // Read color from chunks - double[][] colors = new double[ 0 ][]; - try - { - colors = ZarrUtils.readChunkedDoubleMatrix( propsGroup, "color/values", "color" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read color array: " + e.getMessage() + " skipping..." ); - } - - // Read track IDs from chunks - int[] trackIds = new int[ 0 ]; - try - { - trackIds = ZarrUtils.readChunkedIntArray( propsGroup, "track_id/values", "track IDs" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read track IDs: " + e.getMessage() + " skipping..." ); - } - - // Read radius from chunks - double[] radii = new double[ 0 ]; - try - { - radii = ZarrUtils.readChunkedDoubleArray( propsGroup, "radius/values", "radius" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read radius: " + e.getMessage() + " skipping..." ); - } - - // Read covariance2d from chunks - double[][] covariance2ds = new double[ 0 ][]; - try - { - covariance2ds = ZarrUtils.readChunkedDoubleMatrix( propsGroup, "covariance2d/values", - "covariance2d" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read covariance2d: " + e.getMessage() + " skipping..." ); - } - - // Read covariance3d from chunks - double[][] covariance3ds = new double[ 0 ][]; - try - { - covariance3ds = ZarrUtils.readChunkedDoubleMatrix( propsGroup, "covariance3d/values", - "covariance3d" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read covariance3d: " + e.getMessage() + " skipping..." ); - } - - // Create node objects - for ( int i = 0; i < nodeIds.length; i++ ) - { - GeffNode node = new Builder() - .id( nodeIds[ i ] ) - .timepoint( i < timepoints.length ? timepoints[ i ] : -1 ) - .x( i < xCoords.length ? xCoords[ i ] : Double.NaN ) - .y( i < yCoords.length ? yCoords[ i ] : Double.NaN ) - .z( i < zCoords.length ? zCoords[ i ] : Double.NaN ) - .color( i < colors.length ? colors[ i ] : DEFAULT_COLOR ) - .segmentId( i < trackIds.length ? trackIds[ i ] : -1 ) - .radius( i < radii.length ? radii[ i ] : Double.NaN ) - .covariance2d( i < covariance2ds.length ? covariance2ds[ i ] : DEFAULT_COVARIANCE_2D ) - .covariance3d( i < covariance3ds.length ? covariance3ds[ i ] : DEFAULT_COVARIANCE_3D ) - .build(); - - nodes.add( node ); - } - } - else - { - throw new IOException( "Unsupported Geff version: " + geffVersion ); - } - - return nodes; - } - /** * Write nodes to Zarr format with chunked structure */ diff --git a/src/main/java/org/mastodon/geff/GeffUtils.java b/src/main/java/org/mastodon/geff/GeffUtils.java index b3be742..f29ed86 100644 --- a/src/main/java/org/mastodon/geff/GeffUtils.java +++ b/src/main/java/org/mastodon/geff/GeffUtils.java @@ -76,18 +76,29 @@ int[] size() double at(final int i0) { + assert size.length == 1; return data[ i0 ]; } double at(final int i0, final int i1) { + assert size.length == 2; return data[ i0 + size[ 0 ] * i1 ]; } double at(final int i0, final int i1, final int i2) { + assert size.length == 3; return data[ i0 + size[ 0 ] * ( i1 * i2 * size[ 1 ] ) ]; } + + double[] rowAt(final int i0) + { + assert size.length == 2; + final double[] row = new double[ size[ 1 ] ]; + Arrays.setAll( row, i1 -> at( i0, i1 ) ); + return row; + } } public static FlattenedDoubles readAsDoubleMatrix( final N5Reader reader, final String dataset, final String description ) From 8c9a483dd586d259ddf2e74ae914076b5bc28282 Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Sat, 26 Jul 2025 15:40:01 -0400 Subject: [PATCH 06/19] Use N5 to de/serialize GeffNode --- src/main/java/org/mastodon/geff/GeffNode.java | 209 ++++++------------ .../java/org/mastodon/geff/GeffUtils.java | 192 ++++++++++++---- 2 files changed, 215 insertions(+), 186 deletions(-) diff --git a/src/main/java/org/mastodon/geff/GeffNode.java b/src/main/java/org/mastodon/geff/GeffNode.java index c10bca1..94c2609 100644 --- a/src/main/java/org/mastodon/geff/GeffNode.java +++ b/src/main/java/org/mastodon/geff/GeffNode.java @@ -32,13 +32,14 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import org.janelia.saalfeldlab.n5.DatasetAttributes; import org.janelia.saalfeldlab.n5.N5Reader; import org.janelia.saalfeldlab.n5.N5URI; +import org.janelia.saalfeldlab.n5.N5Writer; import org.janelia.saalfeldlab.n5.zarr.N5ZarrReader; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrWriter; import org.mastodon.geff.GeffUtils.FlattenedDoubles; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -394,20 +395,83 @@ public GeffNode build() } } + /** + * Write nodes to Zarr format with chunked structure + */ + public static void writeToZarr( List< GeffNode > nodes, String zarrPath ) throws IOException, InvalidRangeException + { + writeToZarr( nodes, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE ); + } + /** + * Write nodes to Zarr format with specified chunk size + */ + public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize ) + throws IOException, InvalidRangeException + { + writeToZarr( nodes, zarrPath, chunkSize, Geff.VERSION ); + } + public static void writeToZarr( List< GeffNode > nodes, String zarrPath, String geffVersion ) + throws IOException, InvalidRangeException + { + writeToZarr( nodes, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE, geffVersion ); + } + public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize, String geffVersion ) + { + LOG.debug( "Writing {} nodes to Zarr path: {} with chunk size: {} to Geff version: {}" + geffVersion, nodes.size(), zarrPath, chunkSize, geffVersion ); + try ( final N5ZarrWriter writer = new N5ZarrWriter( zarrPath, true ) ) + { + writeToN5( nodes, writer, "/", chunkSize, geffVersion ); + } + } + + public static void writeToN5( + final List< GeffNode > nodes, + final N5Writer writer, + final String group, + final int chunkSize, + String geffVersion ) + { + if ( nodes == null ) + throw new NullPointerException( "Nodes list cannot be null or empty" ); + if ( geffVersion == null || geffVersion.isEmpty() ) + { + geffVersion = Geff.VERSION; // Use default version if not specified + } + GeffUtil.checkSupportedVersion( geffVersion ); + final String path = N5URI.normalizeGroupPath( group ); + // Write node IDs in chunks + GeffUtils.writeIntArray( nodes, GeffNode::getId, writer, path + "/nodes/ids", chunkSize ); + // Write timepoints in chunks + GeffUtils.writeIntArray( nodes, GeffNode::getT, writer, path + "/nodes/props/t/values", chunkSize ); + // Write X coordinates in chunks + GeffUtils.writeDoubleArray( nodes, GeffNode::getX, writer, path + "/nodes/props/x/values", chunkSize ); + // Write Y coordinates in chunks + GeffUtils.writeDoubleArray( nodes, GeffNode::getY, writer, path + "/nodes/props/y/values", chunkSize ); + // Write Z coordinates in chunks + GeffUtils.writeDoubleArray( nodes, GeffNode::getZ, writer, path + "/nodes/props/z/values", chunkSize ); + // Write color in chunks + GeffUtils.writeDoubleMatrix( nodes, 4, GeffNode::getColor, writer, path + "/nodes/props/color/values", chunkSize ); - // ------ n5 version ------------------------------------------- - // - // + // Write segment IDs in chunks + GeffUtils.writeIntArray( nodes, GeffNode::getSegmentId, writer, path + "/nodes/props/track_id/values", chunkSize ); + + // Write radius and covariance attributes if available + GeffUtils.writeDoubleArray( nodes, GeffNode::getRadius, writer, path + "/nodes/props/radius/values", chunkSize ); + + // TODO: ellipsoid etc + + LOG.debug( "Successfully wrote nodes to Zarr format with chunked structure" ); + } /** * Read nodes from Zarr format with default version and chunked structure @@ -526,143 +590,6 @@ public static List< GeffNode > readFromN5( final N5Reader reader, final String g return nodes; } - // ------ jzarr version ------------------------------------------- - // - // - - /** - * Write nodes to Zarr format with chunked structure - */ - public static void writeToZarr( List< GeffNode > nodes, String zarrPath ) throws IOException, InvalidRangeException - { - writeToZarr( nodes, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE ); - } - - public static void writeToZarr( List< GeffNode > nodes, String zarrPath, String geffVersion ) - throws IOException, InvalidRangeException - { - if ( geffVersion == null || geffVersion.isEmpty() ) - { - geffVersion = Geff.VERSION; // Use default version if not specified - } - writeToZarr( nodes, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE, geffVersion ); - } - - /** - * Write nodes to Zarr format with specified chunk size - */ - public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize ) - throws IOException, InvalidRangeException - { - writeToZarr( nodes, zarrPath, chunkSize, Geff.VERSION ); - } - - public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize, String geffVersion ) - throws IOException, InvalidRangeException - { - if ( nodes == null ) - { throw new IllegalArgumentException( "Nodes list cannot be null or empty" ); } - - if ( geffVersion == null || geffVersion.isEmpty() ) - { - geffVersion = Geff.VERSION; // Use default version if not specified - } - - System.out.println( - "Writing " + nodes.size() + " nodes to Zarr path: " + zarrPath + " with chunk size: " + chunkSize - + " to Geff version: " + geffVersion ); - - if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) - { - // Create the main nodes group - ZarrGroup rootGroup = ZarrGroup.create( zarrPath ); - - // Create the main nodes group - ZarrGroup nodesGroup = rootGroup.createSubGroup( "nodes" ); - - // Create props subgroup for chunked storage - ZarrGroup propsGroup = nodesGroup.createSubGroup( "props" ); - - // Write node IDs in chunks - writeChunkedNodeIds( nodes, nodesGroup, chunkSize ); - - // Write timepoints in chunks - ZarrUtils.writeChunkedIntAttribute( nodes, propsGroup, "t", chunkSize, GeffNode::getT ); - - // Write X coordinates in chunks - ZarrUtils.writeChunkedDoubleAttribute( nodes, propsGroup, "x", chunkSize, GeffNode::getX ); - - // Write Y coordinates in chunks - ZarrUtils.writeChunkedDoubleAttribute( nodes, propsGroup, "y", chunkSize, GeffNode::getY ); - - // Write Z coordinates in chunks - ZarrUtils.writeChunkedDoubleAttribute( nodes, propsGroup, "z", chunkSize, GeffNode::getZ ); - - // Write color in chunks - ZarrUtils.writeChunkedDoubleMatrix( nodes, propsGroup, "color", chunkSize, GeffNode::getColor, 4 ); - - // Write segment IDs in chunks - ZarrUtils.writeChunkedIntAttribute( nodes, propsGroup, "track_id", chunkSize, GeffNode::getSegmentId ); - - // Write radius and covariance attributes if available - ZarrUtils.writeChunkedDoubleAttribute( nodes, propsGroup, "radius", chunkSize, GeffNode::getRadius ); - - // Write covariance2d in chunks - ZarrUtils.writeChunkedDoubleMatrix( nodes, propsGroup, "covariance2d", chunkSize, GeffNode::getCovariance2d, - 4 ); - - // Write covariance3d in chunks - ZarrUtils.writeChunkedDoubleMatrix( nodes, propsGroup, "covariance3d", chunkSize, GeffNode::getCovariance3d, - 6 ); - - } - - System.out.println( "Successfully wrote nodes to Zarr format with chunked structure" ); - } - - /** - * Helper method to write chunked node IDs - */ - private static void writeChunkedNodeIds( List< GeffNode > nodes, ZarrGroup parentGroup, int chunkSize ) - throws IOException, InvalidRangeException - { - - int totalNodes = nodes.size(); - - // Create the ids subgroup - ZarrGroup idsGroup = parentGroup.createSubGroup( "ids" ); - - // Create a single ZarrArray for all IDs with proper chunking - ZarrArray idsArray = idsGroup.createArray( "", new ArrayParams() - .shape( totalNodes ) - .chunks( chunkSize ) - .dataType( DataType.i4 ) ); - - // Write data in chunks - int chunkIndex = 0; - for ( int startIdx = 0; startIdx < totalNodes; startIdx += chunkSize ) - { - int endIdx = Math.min( startIdx + chunkSize, totalNodes ); - int currentChunkSize = endIdx - startIdx; - - // Prepare chunk data array - int[] chunkData = new int[ currentChunkSize ]; - - // Fill chunk data array - for ( int i = 0; i < currentChunkSize; i++ ) - { - chunkData[ i ] = nodes.get( startIdx + i ).getId(); - } - - // Write chunk at specific offset - idsArray.write( chunkData, new int[] { currentChunkSize }, new int[] { startIdx } ); - - System.out.println( "- Wrote node IDs chunk " + chunkIndex + ": " + currentChunkSize + " nodes (indices " - + startIdx + "-" + ( endIdx - 1 ) + ")" ); - chunkIndex++; - } - } - @Override public String toString() { diff --git a/src/main/java/org/mastodon/geff/GeffUtils.java b/src/main/java/org/mastodon/geff/GeffUtils.java index f29ed86..c55f30e 100644 --- a/src/main/java/org/mastodon/geff/GeffUtils.java +++ b/src/main/java/org/mastodon/geff/GeffUtils.java @@ -1,13 +1,18 @@ package org.mastodon.geff; import java.util.Arrays; +import java.util.List; +import java.util.function.Function; import java.util.function.IntFunction; +import java.util.function.ToDoubleFunction; import java.util.function.ToIntFunction; import org.janelia.saalfeldlab.n5.DataBlock; import org.janelia.saalfeldlab.n5.DataType; import org.janelia.saalfeldlab.n5.DatasetAttributes; import org.janelia.saalfeldlab.n5.N5Reader; +import org.janelia.saalfeldlab.n5.N5Writer; +import org.janelia.saalfeldlab.n5.blosc.BloscCompression; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -15,6 +20,7 @@ import net.imglib2.Interval; import net.imglib2.blocks.BlockInterval; import net.imglib2.blocks.SubArrayCopy; +import net.imglib2.img.cell.CellGrid; import net.imglib2.iterator.IntervalIterator; import net.imglib2.util.Intervals; import net.imglib2.util.Util; @@ -24,6 +30,75 @@ public class GeffUtils { private static final Logger LOG = LoggerFactory.getLogger( GeffUtils.class ); + + + public static < T > void writeIntArray( + final List< T > elements, + final ToIntFunction< T > extractor, + final N5Writer writer, + final String dataset, + final int chunkSize ) + { + final int size = elements.size(); + final int[] data = new int[ size ]; + Arrays.setAll(data, i -> extractor.applyAsInt(elements.get(i))); + final DatasetAttributes attributes = new DatasetAttributes( + new long[] { size }, + new int[] { chunkSize }, + DataType.INT32, + new BloscCompression() ); + writer.createDataset(dataset, attributes); + write( data, writer, dataset, attributes ); + } + + public static < T > void writeDoubleArray( + final List< T > elements, + final ToDoubleFunction< T > extractor, + final N5Writer writer, + final String dataset, + final int chunkSize ) + { + final int size = elements.size(); + final double[] data = new double[ size ]; + Arrays.setAll(data, i -> extractor.applyAsDouble(elements.get(i))); + final DatasetAttributes attributes = new DatasetAttributes( + new long[] { size }, + new int[] { chunkSize }, + DataType.FLOAT64, + new BloscCompression() ); + writer.createDataset(dataset, attributes); + write( data, writer, dataset, attributes ); + } + + public static < T > void writeDoubleMatrix( + final List< T > elements, + final int numColumns, + final Function< T, double[] > extractor, + final N5Writer writer, + final String dataset, + final int chunkSize ) + { + final int size = elements.size(); + final double[] data = new double[ numColumns * size ]; + for ( int i = 0; i < size; ++i ) { + final double[] row = extractor.apply( elements.get( i ) ); + System.arraycopy( row, 0, data, numColumns * i, numColumns ); + } + final DatasetAttributes attributes = new DatasetAttributes( + new long[] { numColumns, size }, + new int[] { size, chunkSize }, + DataType.FLOAT64, + new BloscCompression() ); + writer.createDataset(dataset, attributes); + write( data, writer, dataset, attributes ); + } + + + + + + + public static int[] readAsIntArray( final N5Reader reader, final String dataset, final String description ) { if ( !reader.datasetExists( dataset ) ) @@ -52,7 +127,7 @@ public static double[] readAsDoubleArray( final N5Reader reader, final String da return convertToDoubleArray( readFully( reader, dataset ), description ); } - static class FlattenedDoubles + public static class FlattenedDoubles { private final double[] data; @@ -74,25 +149,27 @@ int[] size() return size; } - double at(final int i0) + // TODO: remove until needed + double at( final int i0 ) { assert size.length == 1; return data[ i0 ]; } - double at(final int i0, final int i1) + double at( final int i0, final int i1 ) { assert size.length == 2; return data[ i0 + size[ 0 ] * i1 ]; } - double at(final int i0, final int i1, final int i2) + // TODO: remove until needed + double at( final int i0, final int i1, final int i2 ) { assert size.length == 3; return data[ i0 + size[ 0 ] * ( i1 * i2 * size[ 1 ] ) ]; } - double[] rowAt(final int i0) + double[] rowAt( final int i0 ) { assert size.length == 2; final double[] row = new double[ size[ 1 ] ]; @@ -117,77 +194,103 @@ public static FlattenedDoubles readAsDoubleMatrix( final N5Reader reader, final } - - - + public static int[] convertToIntArray( final Object array, final String fieldName ) + { + if ( array instanceof int[] ) + return ( int[] ) array; + else if ( array instanceof long[] ) + return copyToIntArray( ( long[] ) array, a -> a.length, ( a, i ) -> ( int ) a[ i ] ); + else if ( array instanceof double[] ) + return copyToIntArray( ( double[] ) array, a -> a.length, ( a, i ) -> ( int ) a[ i ] ); + else if ( array instanceof float[] ) + return copyToIntArray( ( float[] ) array, a -> a.length, ( a, i ) -> ( int ) a[ i ] ); + else + throw new IllegalArgumentException( + "Unsupported data type for " + fieldName + ": " + + ( array != null ? array.getClass().getName() : "null" ) ); + } @FunctionalInterface private interface IntValueAtIndex< T > { - int apply( T data, int index ); + int apply( T array, int index ); } - private static < T > int[] copyToIntArray( final T data, final ToIntFunction< T > numElements, final IntValueAtIndex< T > elementAtIndex ) + private static < T > int[] copyToIntArray( final T array, final ToIntFunction< T > numElements, final IntValueAtIndex< T > elementAtIndex ) { - final int[] ints = new int[ numElements.applyAsInt( data ) ]; - Arrays.setAll( ints, i -> elementAtIndex.apply( data, i ) ); + final int[] ints = new int[ numElements.applyAsInt( array ) ]; + Arrays.setAll( ints, i -> elementAtIndex.apply( array, i ) ); return ints; } - public static int[] convertToIntArray( final Object data, final String fieldName ) + public static double[] convertToDoubleArray( final Object array, final String fieldName ) { - if ( data instanceof int[] ) - return ( int[] ) data; - else if ( data instanceof long[] ) - return copyToIntArray( ( long[] ) data, a -> a.length, ( a, i ) -> ( int ) a[ i ] ); - else if ( data instanceof double[] ) - return copyToIntArray( ( double[] ) data, a -> a.length, ( a, i ) -> ( int ) a[ i ] ); - else if ( data instanceof float[] ) - return copyToIntArray( ( float[] ) data, a -> a.length, ( a, i ) -> ( int ) a[ i ] ); + if ( array instanceof double[] ) + return ( double[] ) array; + else if ( array instanceof int[] ) + return copyToDoubleArray( ( int[] ) array, a -> a.length, ( a, i ) -> a[ i ] ); + else if ( array instanceof long[] ) + return copyToDoubleArray( ( long[] ) array, a -> a.length, ( a, i ) -> a[ i ] ); + else if ( array instanceof float[] ) + return copyToDoubleArray( ( float[] ) array, a -> a.length, ( a, i ) -> a[ i ] ); else throw new IllegalArgumentException( "Unsupported data type for " + fieldName + ": " + - ( data != null ? data.getClass().getName() : "null" ) ); + ( array != null ? array.getClass().getName() : "null" ) ); } - @FunctionalInterface private interface DoubleValueAtIndex< T > { - double apply( T data, int index ); + double apply( T array, int index ); } - private static < T > double[] copyToDoubleArray( final T data, final ToIntFunction< T > numElements, final DoubleValueAtIndex< T > elementAtIndex ) + private static < T > double[] copyToDoubleArray( final T array, final ToIntFunction< T > numElements, final DoubleValueAtIndex< T > elementAtIndex ) { - final double[] doubles = new double[ numElements.applyAsInt( data ) ]; - Arrays.setAll( doubles, i -> elementAtIndex.apply( data, i ) ); + final double[] doubles = new double[ numElements.applyAsInt( array ) ]; + Arrays.setAll( doubles, i -> elementAtIndex.apply( array, i ) ); return doubles; } - public static double[] convertToDoubleArray( final Object data, final String fieldName ) - { - if ( data instanceof double[] ) - return ( double[] ) data; - else if ( data instanceof int[] ) - return copyToDoubleArray( ( int[] ) data, a -> a.length, ( a, i ) -> a[ i ] ); - else if ( data instanceof long[] ) - return copyToDoubleArray( ( long[] ) data, a -> a.length, ( a, i ) -> a[ i ] ); - else if ( data instanceof float[] ) - return copyToDoubleArray( ( float[] ) data, a -> a.length, ( a, i ) -> a[ i ] ); - else - throw new IllegalArgumentException( - "Unsupported data type for " + fieldName + ": " + - ( data != null ? data.getClass().getName() : "null" ) ); - } + // -- write dataset fully -- + + public static void write( + final Object src, + final N5Writer writer, + final String dataset, + final DatasetAttributes attributes ) + { + final int[] blockSize = attributes.getBlockSize(); + final long[] size = attributes.getDimensions(); + final int n = attributes.getNumDimensions(); + final DataType dataType = attributes.getDataType(); + final CellGrid grid = new CellGrid( size, blockSize ); + final int[] srcSize = Util.long2int( size ); + final long[] srcPos = new long[ n ]; + final int[] destSize = new int[ n ]; + final int[] destPos = new int[ n ]; + final long[] gridPos = new long[ n ]; + final IntervalIterator gridIter = new IntervalIterator( grid.getGridDimensions() ); + while ( gridIter.hasNext() ) + { + gridIter.fwd(); + gridIter.localize( gridPos ); + grid.getCellDimensions( gridPos, srcPos, destSize ); + final DataBlock< ? > block = dataType.createDataBlock( destSize, gridPos ); + SubArrayCopy.copy( src, srcSize, Util.long2int( srcPos ), block.getData(), destSize, destPos, destSize ); + writer.writeBlock( dataset, attributes, block ); + } + } + // -- read dataset fully -- public static Object readFully( final N5Reader reader, final String dataset ) { @@ -238,13 +341,12 @@ private static void copy( final int n = attributes.getNumDimensions(); final long[] gridMin = new long[ n ]; - final long[] gridMax = new long[ n ]; final long[] gridSize = new long[ n ]; for ( int d = 0; d < n; d++ ) { gridMin[ d ] = destInterval.min( d ) / blockSize[ d ]; - gridMax[ d ] = destInterval.max( d ) / blockSize[ d ]; - gridSize[ d ] = gridMax[ d ] + 1 - gridMin[ d ]; + final long gridMax = destInterval.max( d ) / blockSize[ d ]; + gridSize[ d ] = gridMax + 1 - gridMin[ d ]; } final long[] gridPos = new long[ n ]; From ea2691089c23a07b50003ac83bfd0d48ec9d0737 Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Sat, 26 Jul 2025 17:23:04 -0400 Subject: [PATCH 07/19] Use N5 to de/serialize GeffEdge --- src/main/java/org/mastodon/geff/GeffEdge.java | 330 +++++++----------- src/main/java/org/mastodon/geff/GeffNode.java | 43 +-- .../java/org/mastodon/geff/GeffUtils.java | 116 +++++- 3 files changed, 237 insertions(+), 252 deletions(-) diff --git a/src/main/java/org/mastodon/geff/GeffEdge.java b/src/main/java/org/mastodon/geff/GeffEdge.java index fd7c0c2..fa6c5f4 100644 --- a/src/main/java/org/mastodon/geff/GeffEdge.java +++ b/src/main/java/org/mastodon/geff/GeffEdge.java @@ -28,16 +28,20 @@ */ package org.mastodon.geff; -import java.io.IOException; +import static org.mastodon.geff.GeffUtil.checkSupportedVersion; +import static org.mastodon.geff.GeffUtils.verifyLength; + import java.util.ArrayList; import java.util.List; -import com.bc.zarr.ArrayParams; -import com.bc.zarr.DataType; -import com.bc.zarr.ZarrArray; -import com.bc.zarr.ZarrGroup; - -import ucar.ma2.InvalidRangeException; +import org.janelia.saalfeldlab.n5.N5Reader; +import org.janelia.saalfeldlab.n5.N5URI; +import org.janelia.saalfeldlab.n5.N5Writer; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrReader; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrWriter; +import org.mastodon.geff.GeffUtils.FlattenedInts; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Represents an edge in the Geff (Graph Exchange Format for Features) format. @@ -47,6 +51,7 @@ */ public class GeffEdge implements ZarrEntity { + private static final Logger LOG = LoggerFactory.getLogger( GeffEdge.class ); public static final int DEFAULT_EDGE_ID = -1; // Default ID for edges if not // specified @@ -193,205 +198,124 @@ public GeffEdge build() } } - /** - * Read edges from a Zarr group - */ - public static List< GeffEdge > readFromZarr( String zarrPath ) throws IOException, InvalidRangeException - { - return readFromZarr( zarrPath, Geff.VERSION ); - } - - public static List< GeffEdge > readFromZarr( String zarrPath, String geffVersion ) - throws IOException, InvalidRangeException - { - return readFromZarrWithChunks( zarrPath, geffVersion ); - } - - /** - * Alternative method to read edges with different chunk handling - */ - public static List< GeffEdge > readFromZarrWithChunks( String zarrPath, String geffVersion ) - throws IOException, InvalidRangeException - { - List< GeffEdge > edges = new ArrayList<>(); - - ZarrGroup edgesGroup = ZarrGroup.open( zarrPath + "/edges" ); - - System.out.println( - "Reading edges from Zarr path: " + zarrPath + " with Geff version: " + geffVersion ); - - if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) - { - - int[][] edgeIds = ZarrUtils.readChunkedIntMatrix( edgesGroup, "ids", "edge IDs" ); - - double[] distances = new double[ 0 ]; - double[] scores = new double[ 0 ]; - - // Read attributes - if ( edgesGroup.getGroupKeys().contains( "props" ) ) - { - ZarrGroup propsGroup = edgesGroup.openSubGroup( "props" ); - - // Read distances from chunks - try - { - distances = ZarrUtils.readChunkedDoubleArray( propsGroup, "distance/values", "distances" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read distances: " + e.getMessage() + " skipping..." ); - } - - // Read scores from chunks - try - { - scores = ZarrUtils.readChunkedDoubleArray( propsGroup, "score/values", "scores" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read scores: " + e.getMessage() + " skipping..." ); - } - } - - // 2D array case: each row is [source, target] - for ( int i = 0; i < edgeIds.length; i++ ) - { - if ( edgeIds[ i ].length == 2 ) - { - GeffEdge edge = GeffEdge.builder() - .setId( i ) - .setSourceNodeId( edgeIds[ i ][ 0 ] ) - .setTargetNodeId( edgeIds[ i ][ 1 ] ) - .setDistance( i < distances.length ? distances[ i ] : DEFAULT_DISTANCE ) - .setScore( i < scores.length ? scores[ i ] : DEFAULT_SCORE ) - .build(); - edges.add( edge ); - } - else - { - System.err.println( "Unexpected edge format at index " + i + ": " + edgeIds[ i ].length - + " elements. Expected 2 (source, target)." ); - } - } - } - else - { - throw new UnsupportedOperationException( "Unsupported Geff version: " + geffVersion ); - } - - return edges; - } - - /** - * Write edges to Zarr format with chunked structure - */ - public static void writeToZarr( List< GeffEdge > edges, String zarrPath ) throws IOException, InvalidRangeException - { - writeToZarr( edges, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE ); // Default - // chunk - // size - } - - public static void writeToZarr( List< GeffEdge > edges, String zarrPath, String geffVersion ) - throws IOException, InvalidRangeException - { - writeToZarr( edges, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE, geffVersion ); // Default - // chunk - // size - } - - /** - * Write edges to Zarr format with specified chunk size - */ - public static void writeToZarr( List< GeffEdge > edges, String zarrPath, int chunks ) - throws IOException, InvalidRangeException - { - writeToZarr( edges, zarrPath, chunks, Geff.VERSION ); // Default Geff - // version - } - - public static void writeToZarr( List< GeffEdge > edges, String zarrPath, int chunks, String geffVersion ) - throws IOException, InvalidRangeException - { - if ( edges == null ) - { throw new IllegalArgumentException( "Edges list cannot be null or empty" ); } - - if ( geffVersion == null || geffVersion.isEmpty() ) - { - geffVersion = Geff.VERSION; // Use default version if not specified - } - - System.out.println( - "Writing " + edges.size() + " edges to Zarr path: " + zarrPath + " with chunk size: " + chunks ); - - if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) - { - // Create props subgroup for 0.3 version - - // Create the main edges group - ZarrGroup edgesGroup = ZarrGroup.create( zarrPath ); - - writeChunkedEdgeIds( edgesGroup, edges, chunks ); - - ZarrGroup propsGroup = edgesGroup.createSubGroup( "props" ); - - // Write distances - ZarrUtils.writeChunkedDoubleAttribute( edges, propsGroup, "distance", chunks, GeffEdge::getDistance ); - - // Write scores - ZarrUtils.writeChunkedDoubleAttribute( edges, propsGroup, "score", chunks, GeffEdge::getScore ); - } - else - { - throw new UnsupportedOperationException( "Unsupported Geff version: " + geffVersion ); - } - } - - private static void writeChunkedEdgeIds( ZarrGroup edgesGroup, List< GeffEdge > edges, int chunks ) - throws InvalidRangeException, IOException + /** + * Read edges from a Zarr group + */ + public static List< GeffEdge > readFromZarr( String zarrPath ) + { + return readFromZarr( zarrPath, Geff.VERSION ); + } + + public static List< GeffEdge > readFromZarr( String zarrPath, String geffVersion ) + { + LOG.debug( "Reading edges from Zarr path: " + zarrPath + " with Geff version: " + geffVersion ); + try ( final N5ZarrReader reader = new N5ZarrReader( zarrPath, true ) ) + { + return readFromN5( reader, "/", geffVersion ); + } + } + + public static List< GeffEdge > readFromN5( final N5Reader reader, final String group, final String geffVersion ) + { + checkSupportedVersion( geffVersion ); + final String path = N5URI.normalizeGroupPath( group ); + +// final DatasetAttributes attributes = reader.getDatasetAttributes( path + "/edges/ids" ); +// System.out.println( "attributes.getNumDimensions() = " + attributes.getNumDimensions() ); +// System.out.println( "attributes.getDimensions() = " + Arrays.toString( attributes.getDimensions() ) ); +// System.out.println( "attributes.getBlockSize() = " + Arrays.toString( attributes.getBlockSize() ) ); + + final FlattenedInts edgeIds = GeffUtils.readAsIntMatrix( reader, path + "/edges/ids", "edge IDs" ); + if ( edgeIds == null ) + { + throw new IllegalArgumentException( "required property '/edges/ids' not found" ); + } + final int numEdges = edgeIds.size()[ 1 ]; + + // Read distances from chunks + final double[] distances = GeffUtils.readAsDoubleArray( reader, "/edges/props/distance/values", "distances" ); + verifyLength( distances, numEdges, "/edges/props/distance/values" ); + + // Read scores from chunks + final double[] scores = GeffUtils.readAsDoubleArray( reader, "/edges/props/score/values", "scores" ); + verifyLength( scores, numEdges, "/edges/props/score/values" ); + + // Create edge objects + final List< GeffEdge > edges = new ArrayList<>(); + for ( int i = 0; i < numEdges; i++ ) + { + final int sourceNodeId = edgeIds.at( 0, i ); + final int targetNodeId = edgeIds.at( 1, i ); + final double score = scores != null ? scores[ i ] : DEFAULT_SCORE; + final double distance = distances != null ? distances[ i ] : DEFAULT_DISTANCE; + final GeffEdge edge = new GeffEdge( i, sourceNodeId, targetNodeId, score, distance ); + edges.add( edge ); + } + return edges; + } + + /** + * Write edges to Zarr format with chunked structure + */ + public static void writeToZarr( List< GeffEdge > edges, String zarrPath ) + { + writeToZarr( edges, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE ); + } + + public static void writeToZarr( List< GeffEdge > edges, String zarrPath, String geffVersion ) + { + writeToZarr( edges, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE, geffVersion ); + } + + /** + * Write edges to Zarr format with specified chunk size + */ + public static void writeToZarr( List< GeffEdge > edges, String zarrPath, int chunkSize ) + { + writeToZarr( edges, zarrPath, chunkSize, Geff.VERSION ); + } + + public static void writeToZarr( List< GeffEdge > edges, String zarrPath, int chunkSize, String geffVersion ) + { + LOG.debug( "Writing {} edges to Zarr path: {} with chunk size: {} to Geff version: {}", edges.size(), zarrPath, chunkSize, geffVersion ); + try ( final N5ZarrWriter writer = new N5ZarrWriter( zarrPath, true ) ) + { + writeToN5( edges, writer, "/", chunkSize, geffVersion ); + } + } + + public static void writeToN5( + final List< GeffEdge > edges, + final N5Writer writer, + final String group, + final int chunkSize, + String geffVersion ) + { + if ( edges == null ) + throw new NullPointerException( "Edges list cannot be null" ); + + if ( geffVersion == null || geffVersion.isEmpty() ) + { + geffVersion = Geff.VERSION; // Use default version if not specified + } + GeffUtil.checkSupportedVersion( geffVersion ); + + final String path = N5URI.normalizeGroupPath( group ); + + GeffUtils.writeIntMatrix( edges, 2, e -> new int[] { e.getSourceNodeId(), e.getTargetNodeId() }, writer, path + "/edges/ids", chunkSize ); + + // Write distances + GeffUtils.writeDoubleArray( edges, GeffEdge::getDistance, writer, path + "/edges/props/distance/values", chunkSize ); + + // Write scores + GeffUtils.writeDoubleArray( edges, GeffEdge::getScore, writer, path + "/edges/props/score/values", chunkSize ); + } + + private static void printEdgeIdStuff( List< GeffEdge > edges ) { // Write edges in chunks int totalEdges = edges.size(); - // Create ids subgroup - ZarrGroup idsGroup = edgesGroup.createSubGroup( "ids" ); - - // Create a single ZarrArray for all edges with proper chunking - ZarrArray edgesArray = idsGroup.createArray( "", new ArrayParams() - .shape( totalEdges, 2 ) - .chunks( chunks, 2 ) - .dataType( DataType.i4 ) ); - - int chunkIndex = 0; - for ( int startIdx = 0; startIdx < totalEdges; startIdx += chunks ) - { - int endIdx = Math.min( startIdx + chunks, totalEdges ); - int currentChunkSize = endIdx - startIdx; - - // Prepare chunk data array - int[] chunkData = new int[ currentChunkSize * 2 ]; // Flattened - // pairs for this - // chunk - - // Fill chunk data array - for ( int i = 0; i < currentChunkSize; i++ ) - { - GeffEdge edge = edges.get( startIdx + i ); - chunkData[ i * 2 ] = edge.getSourceNodeId(); // Source node ID - chunkData[ i * 2 + 1 ] = edge.getTargetNodeId(); // Target node - // ID - } - - // Write chunk at specific offset - edgesArray.write( chunkData, new int[] { currentChunkSize, 2 }, new int[] { startIdx, 0 } ); - - String chunkKey = String.format( "%.1f", ( double ) chunkIndex ); - System.out.println( "- Wrote chunk " + chunkKey + ": " + currentChunkSize + " edges (indices " + startIdx - + "-" + ( endIdx - 1 ) + ")" ); - chunkIndex++; - } - // Analyze edge data format long validEdges = edges.stream().filter( GeffEdge::isValid ).count(); long selfLoops = edges.stream().filter( GeffEdge::isSelfLoop ).count(); @@ -409,7 +333,7 @@ private static void writeChunkedEdgeIds( ZarrGroup edgesGroup, List< GeffEdge > int uniqueTargetNodes = ( int ) edges.stream().mapToInt( GeffEdge::getTargetNodeId ).distinct().count(); System.out.println( "Successfully wrote edges to Zarr format:" ); - System.out.println( "- " + totalEdges + " edges written in " + chunkIndex + " chunks" ); + System.out.println( "- " + totalEdges + " edges written" ); System.out.println( "- Source nodes: " + uniqueSourceNodes + " unique" ); System.out.println( "- Target nodes: " + uniqueTargetNodes + " unique" ); diff --git a/src/main/java/org/mastodon/geff/GeffNode.java b/src/main/java/org/mastodon/geff/GeffNode.java index 94c2609..27e88f0 100644 --- a/src/main/java/org/mastodon/geff/GeffNode.java +++ b/src/main/java/org/mastodon/geff/GeffNode.java @@ -29,12 +29,12 @@ package org.mastodon.geff; import static org.mastodon.geff.GeffUtil.checkSupportedVersion; +import static org.mastodon.geff.GeffUtils.verifyLength; import java.io.IOException; import java.util.ArrayList; import java.util.List; -import org.janelia.saalfeldlab.n5.DatasetAttributes; import org.janelia.saalfeldlab.n5.N5Reader; import org.janelia.saalfeldlab.n5.N5URI; import org.janelia.saalfeldlab.n5.N5Writer; @@ -44,13 +44,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.bc.zarr.ArrayParams; -import com.bc.zarr.DataType; -import com.bc.zarr.ZarrArray; -import com.bc.zarr.ZarrGroup; - -import ucar.ma2.InvalidRangeException; - /** * Represents a node in the Geff (Graph Exchange Format for Features) format. * This class handles reading and writing node data from/to Zarr format. @@ -398,7 +391,7 @@ public GeffNode build() /** * Write nodes to Zarr format with chunked structure */ - public static void writeToZarr( List< GeffNode > nodes, String zarrPath ) throws IOException, InvalidRangeException + public static void writeToZarr( List< GeffNode > nodes, String zarrPath ) { writeToZarr( nodes, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE ); } @@ -406,20 +399,18 @@ public static void writeToZarr( List< GeffNode > nodes, String zarrPath ) throws * Write nodes to Zarr format with specified chunk size */ public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize ) - throws IOException, InvalidRangeException { writeToZarr( nodes, zarrPath, chunkSize, Geff.VERSION ); } public static void writeToZarr( List< GeffNode > nodes, String zarrPath, String geffVersion ) - throws IOException, InvalidRangeException { writeToZarr( nodes, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE, geffVersion ); } public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize, String geffVersion ) { - LOG.debug( "Writing {} nodes to Zarr path: {} with chunk size: {} to Geff version: {}" + geffVersion, nodes.size(), zarrPath, chunkSize, geffVersion ); + LOG.debug( "Writing {} nodes to Zarr path: {} with chunk size: {} to Geff version: {}", nodes.size(), zarrPath, chunkSize, geffVersion ); try ( final N5ZarrWriter writer = new N5ZarrWriter( zarrPath, true ) ) { writeToN5( nodes, writer, "/", chunkSize, geffVersion ); @@ -434,7 +425,7 @@ public static void writeToN5( String geffVersion ) { if ( nodes == null ) - throw new NullPointerException( "Nodes list cannot be null or empty" ); + throw new NullPointerException( "Nodes list cannot be null" ); if ( geffVersion == null || geffVersion.isEmpty() ) { @@ -503,34 +494,10 @@ public static List< GeffNode > readFromZarr( final String zarrPath, final String } } - // TODO: move to Util - private static void verifyLength( final int[] array, final int expectedLength, final String name ) - { - if ( array != null && array.length != expectedLength ) - throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.length + " vs " + expectedLength + ")" ); - } - - // TODO: move to Util - private static void verifyLength( final double[] array, final int expectedLength, final String name ) - { - if ( array != null && array.length != expectedLength ) - throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.length + " vs " + expectedLength + ")" ); - } - - // TODO: move to Util - private static void verifyLength( final FlattenedDoubles array, final int expectedLength, final String name ) - { - if ( array != null && array.size()[ 0 ] != expectedLength ) - { - throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.size()[ 0 ] + " vs " + expectedLength + ")" ); - } - } - public static List< GeffNode > readFromN5( final N5Reader reader, final String group, final String geffVersion ) { checkSupportedVersion( geffVersion ); final String path = N5URI.normalizeGroupPath( group ); - final DatasetAttributes attributes = reader.getDatasetAttributes( path + "/edges/ids" ); // Read node IDs from chunks final int[] nodeIds = GeffUtils.readAsIntArray( reader, path + "/nodes/ids", "node IDs" ); @@ -574,7 +541,7 @@ public static List< GeffNode > readFromN5( final N5Reader reader, final String g final List< GeffNode > nodes = new ArrayList<>( numNodes ); for ( int i = 0; i < numNodes; i++ ) { - final int id = nodeIds[ i ]; + final int id = nodeIds[ i ]; final int t = timepoints != null ? timepoints[ i ] : -1; final double x = xCoords != null ? xCoords[ i ] : Double.NaN; final double y = yCoords != null ? yCoords[ i ] : Double.NaN; diff --git a/src/main/java/org/mastodon/geff/GeffUtils.java b/src/main/java/org/mastodon/geff/GeffUtils.java index c55f30e..e191d16 100644 --- a/src/main/java/org/mastodon/geff/GeffUtils.java +++ b/src/main/java/org/mastodon/geff/GeffUtils.java @@ -51,6 +51,29 @@ public static < T > void writeIntArray( write( data, writer, dataset, attributes ); } + public static < T > void writeIntMatrix( + final List< T > elements, + final int numColumns, + final Function< T, int[] > extractor, + final N5Writer writer, + final String dataset, + final int chunkSize ) + { + final int size = elements.size(); + final int[] data = new int[ numColumns * size ]; + for ( int i = 0; i < size; ++i ) { + final int[] row = extractor.apply( elements.get( i ) ); + System.arraycopy( row, 0, data, numColumns * i, numColumns ); + } + final DatasetAttributes attributes = new DatasetAttributes( + new long[] { numColumns, size }, + new int[] { numColumns, chunkSize }, + DataType.INT32, + new BloscCompression() ); + writer.createDataset(dataset, attributes); + write( data, writer, dataset, attributes ); + } + public static < T > void writeDoubleArray( final List< T > elements, final ToDoubleFunction< T > extractor, @@ -86,7 +109,7 @@ public static < T > void writeDoubleMatrix( } final DatasetAttributes attributes = new DatasetAttributes( new long[] { numColumns, size }, - new int[] { size, chunkSize }, + new int[] { numColumns, chunkSize }, DataType.FLOAT64, new BloscCompression() ); writer.createDataset(dataset, attributes); @@ -149,13 +172,6 @@ int[] size() return size; } - // TODO: remove until needed - double at( final int i0 ) - { - assert size.length == 1; - return data[ i0 ]; - } - double at( final int i0, final int i1 ) { assert size.length == 2; @@ -169,11 +185,11 @@ int[] size() return data[ i0 + size[ 0 ] * ( i1 * i2 * size[ 1 ] ) ]; } - double[] rowAt( final int i0 ) + double[] rowAt( final int i1 ) { assert size.length == 2; - final double[] row = new double[ size[ 1 ] ]; - Arrays.setAll( row, i1 -> at( i0, i1 ) ); + final double[] row = new double[ size[ 0 ] ]; + Arrays.setAll( row, i0 -> at( i0, i1 ) ); return row; } } @@ -193,6 +209,58 @@ public static FlattenedDoubles readAsDoubleMatrix( final N5Reader reader, final return new FlattenedDoubles( convertToDoubleArray( readFully( reader, dataset ), description ), attributes.getDimensions() ); } + public static class FlattenedInts + { + private final int[] data; + + private final int[] size; + + FlattenedInts( final int[] data, final int[] size ) + { + this.data = data; + this.size = size; + } + + FlattenedInts( final int[] data, final long[] size ) + { + this( data, Util.long2int( size ) ); + } + + int[] size() + { + return size; + } + + int at( final int i0, final int i1 ) + { + assert size.length == 2; + return data[ i0 + size[ 0 ] * i1 ]; + } + + int[] rowAt( final int i0 ) + { + assert size.length == 2; + final int[] row = new int[ size[ 1 ] ]; + Arrays.setAll( row, i1 -> at( i0, i1 ) ); + return row; + } + } + + public static FlattenedInts readAsIntMatrix( final N5Reader reader, final String dataset, final String description ) + { + if ( !reader.datasetExists( dataset ) ) + { + LOG.debug( "No arrays found in group for " + description ); + return null; + } + final DatasetAttributes attributes = reader.getDatasetAttributes( dataset ); + if ( attributes.getNumDimensions() != 2 ) + { + throw new IllegalArgumentException( "Expected 2D array" ); + } + return new FlattenedInts( convertToIntArray( readFully( reader, dataset ), description ), attributes.getDimensions() ); + } + public static int[] convertToIntArray( final Object array, final String fieldName ) { @@ -252,7 +320,33 @@ private static < T > double[] copyToDoubleArray( final T array, final ToIntFunct return doubles; } + public static void verifyLength( final int[] array, final int expectedLength, final String name ) + { + if ( array != null && array.length != expectedLength ) + throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.length + " vs " + expectedLength + ")" ); + } + + public static void verifyLength( final double[] array, final int expectedLength, final String name ) + { + if ( array != null && array.length != expectedLength ) + throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.length + " vs " + expectedLength + ")" ); + } + + public static void verifyLength( final FlattenedDoubles array, final int expectedLength, final String name ) + { + if ( array != null && array.size()[ array.size().length - 1 ] != expectedLength ) + { + throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.size()[ array.size().length - 1 ] + " vs " + expectedLength + ")" ); + } + } + public static void verifyLength( final FlattenedInts array, final int expectedLength, final String name ) + { + if ( array != null && array.size()[ array.size().length - 1 ] != expectedLength ) + { + throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.size()[ array.size().length - 1 ] + " vs " + expectedLength + ")" ); + } + } // -- write dataset fully -- From 8658d4289c88c9e36a6362b07b2b453105b8a666 Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Sat, 26 Jul 2025 17:24:44 -0400 Subject: [PATCH 08/19] POM: add direct slf4j-api dependency --- pom.xml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/pom.xml b/pom.xml index 8b68df5..9eb2046 100644 --- a/pom.xml +++ b/pom.xml @@ -71,6 +71,18 @@ imglib2 + + org.slf4j + slf4j-api + + + + com.bc.zarr jzarr From c233f642505534aca74a94f121b1ec9938982738 Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Sat, 26 Jul 2025 17:47:57 -0400 Subject: [PATCH 09/19] WIP clean up --- src/main/java/org/mastodon/geff/Geff.java | 77 +- src/main/java/org/mastodon/geff/GeffEdge.java | 10 +- .../java/org/mastodon/geff/GeffMetadata.java | 2 +- src/main/java/org/mastodon/geff/GeffNode.java | 10 +- src/main/java/org/mastodon/geff/GeffUtil.java | 17 - .../java/org/mastodon/geff/GeffUtils.java | 42 +- .../java/org/mastodon/geff/ZarrEntity.java | 34 - .../java/org/mastodon/geff/ZarrUtils.java | 772 ------------------ .../geff/function/ToDoubleArrayFunction.java | 59 -- .../geff/function/ToIntArrayFunction.java | 59 -- .../org/mastodon/geff/ChunkedWriteTest.java | 12 +- .../org/mastodon/geff/GeffCreateTest.java | 9 +- src/test/java/org/mastodon/geff/GeffTest.java | 8 +- 13 files changed, 67 insertions(+), 1044 deletions(-) delete mode 100644 src/main/java/org/mastodon/geff/GeffUtil.java delete mode 100644 src/main/java/org/mastodon/geff/ZarrEntity.java delete mode 100644 src/main/java/org/mastodon/geff/ZarrUtils.java delete mode 100644 src/main/java/org/mastodon/geff/function/ToDoubleArrayFunction.java delete mode 100644 src/main/java/org/mastodon/geff/function/ToIntArrayFunction.java diff --git a/src/main/java/org/mastodon/geff/Geff.java b/src/main/java/org/mastodon/geff/Geff.java index 860f79b..bfe35bc 100644 --- a/src/main/java/org/mastodon/geff/Geff.java +++ b/src/main/java/org/mastodon/geff/Geff.java @@ -6,13 +6,13 @@ * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -28,15 +28,10 @@ */ package org.mastodon.geff; -import java.io.IOException; import java.util.ArrayList; -import java.util.Iterator; import java.util.List; -import com.bc.zarr.ZarrArray; -import com.bc.zarr.ZarrGroup; - -import ucar.ma2.InvalidRangeException; +import org.janelia.saalfeldlab.n5.N5Exception.N5IOException; public class Geff { @@ -63,18 +58,19 @@ public static void main( String[] args ) System.out.println( "Geff library version: " + VERSION ); String zarrPath = "src/test/resources/mouse-20250719.zarr/tracks"; - String outputZarrPath = "src/test/resources/mouse-20250719_output.zarr/tracks"; + String outputZarrPath = "src/test/resources/mouse-20250719_output.zarr/tracks"; + String n5OutputZarrPath = "src/test/resources/n5-mouse-20250719_output.zarr/tracks"; try { // Demonstrate reading metadata System.out.println( "\n=== Reading Metadata ===" ); - GeffMetadata metadata = GeffMetadata.readFromZarr( zarrPath ); + GeffMetadata metadata = GeffMetadata.readFromZarr( zarrPath ); System.out.println( "Metadata loaded:" + metadata ); // Demonstrate reading nodes System.out.println( "\n=== Reading Nodes ===" ); - List< GeffNode > nodes = GeffNode.readFromZarr( zarrPath, metadata.getGeffVersion() ); + List< GeffNode > nodes = GeffNode.readFromZarr( zarrPath, metadata.getGeffVersion() ); System.out.println( "Read " + nodes.size() + " nodes:" ); for ( int i = 0; i < Math.min( 5, nodes.size() ); i++ ) { @@ -101,21 +97,17 @@ public static void main( String[] args ) // Try to write nodes (will show what would be written) try { - GeffNode.writeToZarr( nodes, outputZarrPath, ZarrUtils.getChunkSize( zarrPath ) ); + GeffNode.writeToZarr( nodes, outputZarrPath, GeffUtils.getChunkSize( zarrPath ) ); } catch ( UnsupportedOperationException e ) { System.out.println( "Note: " + e.getMessage() ); } - catch ( InvalidRangeException e ) - { - System.err.println( "InvalidRangeException during node writing: " + e.getMessage() ); - } // Try to write edges (will show what would be written) try { - GeffEdge.writeToZarr( edges, outputZarrPath, ZarrUtils.getChunkSize( zarrPath ) ); + GeffEdge.writeToZarr( edges, outputZarrPath, GeffUtils.getChunkSize( zarrPath ) ); } catch ( UnsupportedOperationException e ) { @@ -139,60 +131,15 @@ public static void main( String[] args ) + " edges" ); } - catch ( IOException e ) - { - System.err.println( "IOException occurred: " + e.getMessage() ); - e.printStackTrace(); - } - catch ( InvalidRangeException e ) + catch ( N5IOException e ) { - System.err.println( "InvalidRangeException occurred: " + e.getMessage() ); + System.err.println( "N5IOException occurred: " + e.getMessage() ); e.printStackTrace(); } catch ( Exception e ) { System.err.println( "Unexpected exception occurred: " + e.getMessage() ); - e.printStackTrace(); - } - - // Also demonstrate the original Zarr exploration code - System.out.println( "\n=== Original Zarr Exploration ===" ); - try - { - final ZarrGroup zarrTracks = ZarrGroup.open( zarrPath ); - final Iterator< String > groupKeyIter = zarrTracks.getGroupKeys().iterator(); - while ( groupKeyIter.hasNext() ) - { - String groupKey = groupKeyIter.next(); - System.out.println( "Found group: " + groupKey ); - } - final Iterator< String > arrayKeyIter = zarrTracks.getArrayKeys().iterator(); - while ( arrayKeyIter.hasNext() ) - { - String arrayKey = arrayKeyIter.next(); - System.out.println( "Found array: " + arrayKey ); - } - final Iterator< String > attrKeyIter = zarrTracks.getAttributes().keySet().iterator(); - while ( attrKeyIter.hasNext() ) - { - String attrKey = attrKeyIter.next(); - System.out.print( "Found attribute: " + attrKey ); - Object attrValue = zarrTracks.getAttributes().get( attrKey ); - System.out.println( " Value: " + attrValue ); - } - // Example of opening an array - System.out.println( "Opening 'nodes/ids' array..." ); - ZarrArray nodesIds = zarrTracks.openArray( "nodes/ids" ); - double[] nodesIdsData = ( double[] ) nodesIds.read(); - System.out.println( "Read nodes/ids data: " + nodesIdsData.length + " elements." ); - } - catch ( IOException e ) - { - e.printStackTrace(); - } - catch ( InvalidRangeException e ) - { - e.printStackTrace(); + e.printStackTrace(); } } diff --git a/src/main/java/org/mastodon/geff/GeffEdge.java b/src/main/java/org/mastodon/geff/GeffEdge.java index fa6c5f4..c24e65a 100644 --- a/src/main/java/org/mastodon/geff/GeffEdge.java +++ b/src/main/java/org/mastodon/geff/GeffEdge.java @@ -28,7 +28,7 @@ */ package org.mastodon.geff; -import static org.mastodon.geff.GeffUtil.checkSupportedVersion; +import static org.mastodon.geff.GeffUtils.checkSupportedVersion; import static org.mastodon.geff.GeffUtils.verifyLength; import java.util.ArrayList; @@ -49,7 +49,7 @@ * connects two nodes in a tracking graph, typically representing temporal * connections between objects across time points. */ -public class GeffEdge implements ZarrEntity +public class GeffEdge { private static final Logger LOG = LoggerFactory.getLogger( GeffEdge.class ); @@ -259,12 +259,12 @@ public static List< GeffEdge > readFromN5( final N5Reader reader, final String g */ public static void writeToZarr( List< GeffEdge > edges, String zarrPath ) { - writeToZarr( edges, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE ); + writeToZarr( edges, zarrPath, GeffUtils.DEFAULT_CHUNK_SIZE ); } public static void writeToZarr( List< GeffEdge > edges, String zarrPath, String geffVersion ) { - writeToZarr( edges, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE, geffVersion ); + writeToZarr( edges, zarrPath, GeffUtils.DEFAULT_CHUNK_SIZE, geffVersion ); } /** @@ -298,7 +298,7 @@ public static void writeToN5( { geffVersion = Geff.VERSION; // Use default version if not specified } - GeffUtil.checkSupportedVersion( geffVersion ); + GeffUtils.checkSupportedVersion( geffVersion ); final String path = N5URI.normalizeGroupPath( group ); diff --git a/src/main/java/org/mastodon/geff/GeffMetadata.java b/src/main/java/org/mastodon/geff/GeffMetadata.java index 4bc2af4..6e5ce47 100644 --- a/src/main/java/org/mastodon/geff/GeffMetadata.java +++ b/src/main/java/org/mastodon/geff/GeffMetadata.java @@ -28,7 +28,7 @@ */ package org.mastodon.geff; -import static org.mastodon.geff.GeffUtil.checkSupportedVersion; +import static org.mastodon.geff.GeffUtils.checkSupportedVersion; import java.io.IOException; import java.util.Arrays; diff --git a/src/main/java/org/mastodon/geff/GeffNode.java b/src/main/java/org/mastodon/geff/GeffNode.java index 27e88f0..83ebadf 100644 --- a/src/main/java/org/mastodon/geff/GeffNode.java +++ b/src/main/java/org/mastodon/geff/GeffNode.java @@ -28,7 +28,7 @@ */ package org.mastodon.geff; -import static org.mastodon.geff.GeffUtil.checkSupportedVersion; +import static org.mastodon.geff.GeffUtils.checkSupportedVersion; import static org.mastodon.geff.GeffUtils.verifyLength; import java.io.IOException; @@ -48,7 +48,7 @@ * Represents a node in the Geff (Graph Exchange Format for Features) format. * This class handles reading and writing node data from/to Zarr format. */ -public class GeffNode implements ZarrEntity +public class GeffNode { private static final Logger LOG = LoggerFactory.getLogger( GeffNode.class ); @@ -393,7 +393,7 @@ public GeffNode build() */ public static void writeToZarr( List< GeffNode > nodes, String zarrPath ) { - writeToZarr( nodes, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE ); + writeToZarr( nodes, zarrPath, GeffUtils.DEFAULT_CHUNK_SIZE ); } /** * Write nodes to Zarr format with specified chunk size @@ -405,7 +405,7 @@ public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chu public static void writeToZarr( List< GeffNode > nodes, String zarrPath, String geffVersion ) { - writeToZarr( nodes, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE, geffVersion ); + writeToZarr( nodes, zarrPath, GeffUtils.DEFAULT_CHUNK_SIZE, geffVersion ); } public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize, String geffVersion ) @@ -431,7 +431,7 @@ public static void writeToN5( { geffVersion = Geff.VERSION; // Use default version if not specified } - GeffUtil.checkSupportedVersion( geffVersion ); + GeffUtils.checkSupportedVersion( geffVersion ); final String path = N5URI.normalizeGroupPath( group ); diff --git a/src/main/java/org/mastodon/geff/GeffUtil.java b/src/main/java/org/mastodon/geff/GeffUtil.java deleted file mode 100644 index eeeebad..0000000 --- a/src/main/java/org/mastodon/geff/GeffUtil.java +++ /dev/null @@ -1,17 +0,0 @@ -package org.mastodon.geff; - -class GeffUtil -{ - static void checkSupportedVersion( final String version ) throws IllegalArgumentException - { - if ( !( version.startsWith( "0.2" ) || version.startsWith( "0.3" ) ) ) - { - throw new IllegalArgumentException( "geff_version " + version + " not supported." ); - } - } - - private GeffUtil() - { - // static utility methods. don't instantiate. - } -} diff --git a/src/main/java/org/mastodon/geff/GeffUtils.java b/src/main/java/org/mastodon/geff/GeffUtils.java index e191d16..0a7009a 100644 --- a/src/main/java/org/mastodon/geff/GeffUtils.java +++ b/src/main/java/org/mastodon/geff/GeffUtils.java @@ -10,9 +10,11 @@ import org.janelia.saalfeldlab.n5.DataBlock; import org.janelia.saalfeldlab.n5.DataType; import org.janelia.saalfeldlab.n5.DatasetAttributes; +import org.janelia.saalfeldlab.n5.N5Exception; import org.janelia.saalfeldlab.n5.N5Reader; import org.janelia.saalfeldlab.n5.N5Writer; import org.janelia.saalfeldlab.n5.blosc.BloscCompression; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -25,12 +27,35 @@ import net.imglib2.util.Intervals; import net.imglib2.util.Util; -// TODO: split good parts into GeffN5Utils, move questionable parts to ZarrUtils -public class GeffUtils +class GeffUtils { private static final Logger LOG = LoggerFactory.getLogger( GeffUtils.class ); + public static void checkSupportedVersion( final String version ) throws IllegalArgumentException + { + if ( !( version.startsWith( "0.2" ) || version.startsWith( "0.3" ) ) ) + { + throw new IllegalArgumentException( "geff_version " + version + " not supported." ); + } + } + + // Default chunk size if not specified + public static final int DEFAULT_CHUNK_SIZE = 1000; + public static int getChunkSize( final String zarrPath ) + { + try ( final N5ZarrReader reader = new N5ZarrReader( zarrPath, true ) ) + { + final int[] chunkSize = reader.getDatasetAttributes( "/nodes/ids" ).getBlockSize(); + return chunkSize[ 0 ]; + } + catch ( final N5Exception.N5IOException e ) + { + // If the path doesn't exist, return a default chunk size + System.out.println( "Path doesn't exist, using default chunk size: " + e.getMessage() ); + return DEFAULT_CHUNK_SIZE; // Default chunk size + } + } public static < T > void writeIntArray( final List< T > elements, @@ -116,12 +141,6 @@ public static < T > void writeDoubleMatrix( write( data, writer, dataset, attributes ); } - - - - - - public static int[] readAsIntArray( final N5Reader reader, final String dataset, final String description ) { if ( !reader.datasetExists( dataset ) ) @@ -261,7 +280,6 @@ public static FlattenedInts readAsIntMatrix( final N5Reader reader, final String return new FlattenedInts( convertToIntArray( readFully( reader, dataset ), description ), attributes.getDimensions() ); } - public static int[] convertToIntArray( final Object array, final String fieldName ) { if ( array instanceof int[] ) @@ -383,7 +401,6 @@ public static void write( } - // -- read dataset fully -- public static Object readFully( final N5Reader reader, final String dataset ) @@ -462,4 +479,9 @@ private static void copy( SubArrayCopy.copy( block.getData(), blockInterval.size(), srcPos, dest, destSize, destPos, Util.long2int( intersection.dimensionsAsLongArray() ) ); } } + + private GeffUtils() + { + // static utility methods. don't instantiate. + } } diff --git a/src/main/java/org/mastodon/geff/ZarrEntity.java b/src/main/java/org/mastodon/geff/ZarrEntity.java deleted file mode 100644 index cd8b29d..0000000 --- a/src/main/java/org/mastodon/geff/ZarrEntity.java +++ /dev/null @@ -1,34 +0,0 @@ -/*- - * #%L - * geff-java - * %% - * Copyright (C) 2025 Ko Sugawara - * %% - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * #L% - */ -package org.mastodon.geff; - -public interface ZarrEntity -{ - -} diff --git a/src/main/java/org/mastodon/geff/ZarrUtils.java b/src/main/java/org/mastodon/geff/ZarrUtils.java deleted file mode 100644 index ac0a128..0000000 --- a/src/main/java/org/mastodon/geff/ZarrUtils.java +++ /dev/null @@ -1,772 +0,0 @@ -/*- - * #%L - * geff-java - * %% - * Copyright (C) 2025 Ko Sugawara - * %% - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * #L% - */ -package org.mastodon.geff; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.function.ToIntFunction; - -import org.mastodon.geff.function.ToDoubleArrayFunction; -import org.mastodon.geff.function.ToIntArrayFunction; - -import com.bc.zarr.ArrayParams; -import com.bc.zarr.DataType; -import com.bc.zarr.ZarrArray; -import com.bc.zarr.ZarrGroup; - -import ucar.ma2.InvalidRangeException; - -public class ZarrUtils -{ - - public static final int DEFAULT_CHUNK_SIZE = 1000; // Default chunk size if - // not specified - - /** - * Helper method to read chunked int arrays - */ - public static int[] readChunkedIntArray( final ZarrGroup group, final String arrayPath, final String description ) - throws IOException - { - if ( group.getArrayKeys() == null || group.getArrayKeys().isEmpty() ) - { - System.out.println( "No arrays found in group for " + description ); - return new int[ 0 ]; // Return empty array if no arrays found - } - try - { - // First try reading as a whole array - final ZarrArray array = group.openArray( arrayPath ); - final Object data = array.read(); - return convertToIntArray( data, description ); - } - catch ( final Exception e ) - { - - // Try reading individual chunks if whole array reading fails - final List< Integer > allData = new ArrayList<>(); - - // Look for numeric chunk keys (0, 1, 2, etc.) - final ZarrGroup arrayGroup = group.openSubGroup( arrayPath ); - final String[] chunkKeys = arrayGroup.getArrayKeys().toArray( new String[ 0 ] ); - - for ( final String chunkKey : chunkKeys ) - { - try - { - if ( chunkKey.matches( "\\d+(\\.\\d+)?" ) ) - { // numeric chunk key - final ZarrArray chunkArray = arrayGroup.openArray( chunkKey ); - final Object chunkData = chunkArray.read(); - final int[] chunkValues = convertToIntArray( chunkData, description + " chunk " + chunkKey ); - for ( final int value : chunkValues ) - { - allData.add( value ); - } - System.out - .println( "Read chunk " + chunkKey + " with " + chunkValues.length + " " + description ); - } - } - catch ( final Exception chunkException ) - { - System.err.println( "Could not read chunk " + chunkKey + " for " + description + ": " - + chunkException.getMessage() ); - } - } - - return allData.stream().mapToInt( Integer::intValue ).toArray(); - } - } - - /** - * Helper method to read chunked double arrays - */ - public static double[] readChunkedDoubleArray( final ZarrGroup group, final String arrayPath, final String description ) - throws IOException - { - if ( group.getArrayKeys() == null || group.getArrayKeys().isEmpty() ) - { - System.out.println( "No arrays found in group for " + description ); - return new double[ 0 ]; // Return empty array if no arrays found - } - try - { - // First try reading as a whole array - final ZarrArray array = group.openArray( arrayPath ); - final Object data = array.read(); - return convertToDoubleArray( data, description ); - } - catch ( final Exception e ) - { - - // Try reading individual chunks if whole array reading fails - final List< Double > allData = new ArrayList<>(); - - // Look for numeric chunk keys (0, 1, 2, etc.) - final ZarrGroup arrayGroup = group.openSubGroup( arrayPath ); - final String[] chunkKeys = arrayGroup.getArrayKeys().toArray( new String[ 0 ] ); - - for ( final String chunkKey : chunkKeys ) - { - try - { - if ( chunkKey.matches( "\\d+(\\.\\d+)?" ) ) - { // numeric chunk key - final ZarrArray chunkArray = arrayGroup.openArray( chunkKey ); - final Object chunkData = chunkArray.read(); - final double[] chunkValues = convertToDoubleArray( chunkData, description + " chunk " + chunkKey ); - for ( final double value : chunkValues ) - { - allData.add( value ); - } - System.out - .println( "Read chunk " + chunkKey + " with " + chunkValues.length + " " + description ); - } - } - catch ( final Exception chunkException ) - { - System.err.println( "Could not read chunk " + chunkKey + " for " + description + ": " - + chunkException.getMessage() ); - } - } - - return allData.stream().mapToDouble( Double::doubleValue ).toArray(); - } - } - - /** - * Helper method to read chunked integer matrix - */ - public static int[][] readChunkedIntMatrix( final ZarrGroup group, final String arrayPath, final String description ) - throws IOException - { - if ( group.getArrayKeys() == null || group.getArrayKeys().isEmpty() ) - { - System.out.println( "No arrays found in group for " + description ); - return new int[ 0 ][]; // Return empty matrix if no arrays found - } - try - { - // First try reading as a whole array - final ZarrArray array = group.openArray( arrayPath ); - final Object data = array.read(); - return copyToIntMatrix( data, description, array.getShape() ); - } - catch ( final Exception e ) - { - - // Try reading individual chunks if whole array reading fails - final List< int[] > allData = new ArrayList<>(); - - // Look for numeric chunk keys (0, 1, 2, etc.) - final ZarrGroup arrayGroup = group.openSubGroup( arrayPath ); - final String[] chunkKeys = arrayGroup.getArrayKeys().toArray( new String[ 0 ] ); - - for ( final String chunkKey : chunkKeys ) - { - try - { - if ( chunkKey.matches( "\\d+(\\.\\d+)?" ) ) - { // numeric chunk key - final ZarrArray chunkArray = arrayGroup.openArray( chunkKey ); - final Object chunkData = chunkArray.read(); - final int[][] chunkMatrix = copyToIntMatrix( chunkData, description, chunkArray.getShape() ); - for ( final int[] row : chunkMatrix ) - { - allData.add( row ); - } - System.out.println( - "Read " + description + " chunk " + chunkKey + " with " + chunkMatrix.length ); - } - } - catch ( final Exception chunkException ) - { - System.err - .println( "Could not read " + description + " chunk " + chunkKey + ": " - + chunkException.getMessage() ); - } - } - - return allData.toArray( new int[ 0 ][] ); - } - } - - /** - * Helper method to read chunked double matrix - */ - public static double[][] readChunkedDoubleMatrix( final ZarrGroup group, final String arrayPath, final String description ) - throws IOException - { - if ( group.getArrayKeys() == null || group.getArrayKeys().isEmpty() ) - { - System.out.println( "No arrays found in group for " + description ); - return new double[ 0 ][]; // Return empty matrix if no arrays found - } - try - { - // First try reading as a whole array - final ZarrArray array = group.openArray( arrayPath ); - final Object data = array.read(); - return copyToDoubleMatrix( data, description, array.getShape() ); - } - catch ( final Exception e ) - { - - // Try reading individual chunks if whole array reading fails - final List< double[] > allData = new ArrayList<>(); - - // Look for numeric chunk keys (0, 1, 2, etc.) - final ZarrGroup arrayGroup = group.openSubGroup( arrayPath ); - final String[] chunkKeys = arrayGroup.getArrayKeys().toArray( new String[ 0 ] ); - - for ( final String chunkKey : chunkKeys ) - { - try - { - if ( chunkKey.matches( "\\d+(\\.\\d+)?" ) ) - { // numeric chunk key - final ZarrArray chunkArray = arrayGroup.openArray( chunkKey ); - final Object chunkData = chunkArray.read(); - final double[][] chunkMatrix = copyToDoubleMatrix( chunkData, description, chunkArray.getShape() ); - for ( final double[] row : chunkMatrix ) - { - allData.add( row ); - } - System.out.println( - "Read " + description + " chunk " + chunkKey + " with " + chunkMatrix.length ); - } - } - catch ( final Exception chunkException ) - { - System.err - .println( "Could not read " + description + " chunk " + chunkKey + ": " - + chunkException.getMessage() ); - } - } - - return allData.toArray( new double[ 0 ][] ); - } - } - - public static int getChunkSize( final String zarrPath ) throws IOException, InvalidRangeException - { - try - { - final ZarrGroup group = ZarrGroup.open( zarrPath + "/nodes" ); - return group.openArray( "ids" ).getChunks()[ 0 ]; - } - catch ( final IOException e ) - { - // If the path doesn't exist, return a default chunk size - System.out.println( "Path doesn't exist, using default chunk size: " + e.getMessage() ); - return DEFAULT_CHUNK_SIZE; // Default chunk size - } - } - - /** - * Helper method to write chunked int attributes - */ - public static < T extends ZarrEntity > void writeChunkedIntAttribute( final List< T > nodes, final ZarrGroup attrsGroup, - final String attrName, - final int chunkSize, final ToIntFunction< T > extractor ) - throws IOException, InvalidRangeException - { - - final int totalNodes = nodes.size(); - - // Create the attribute subgroup - final ZarrGroup attrGroup = attrsGroup.createSubGroup( attrName ); - final ZarrGroup valuesGroup = attrGroup.createSubGroup( "values" ); - - // Create a single ZarrArray for all values with proper chunking - final ZarrArray valuesArray = valuesGroup.createArray( "", new ArrayParams() - .shape( totalNodes ) - .chunks( chunkSize ) - .dataType( DataType.i4 ) ); - - // Write data in chunks - int chunkIndex = 0; - for ( int startIdx = 0; startIdx < totalNodes; startIdx += chunkSize ) - { - final int endIdx = Math.min( startIdx + chunkSize, totalNodes ); - final int currentChunkSize = endIdx - startIdx; - - // Prepare chunk data array - final int[] chunkData = new int[ currentChunkSize ]; - - // Fill chunk data array - for ( int i = 0; i < currentChunkSize; i++ ) - { - chunkData[ i ] = extractor.applyAsInt( nodes.get( startIdx + i ) ); - } - - // Write chunk at specific offset - valuesArray.write( chunkData, new int[] { currentChunkSize }, new int[] { startIdx } ); - - System.out.println( "- Wrote " + attrName + " chunk " + chunkIndex + ": " + currentChunkSize + " values" ); - chunkIndex++; - } - } - - /** - * Helper method to write chunked double attributes - */ - public static < T extends ZarrEntity > void writeChunkedDoubleAttribute( final List< T > nodes, final ZarrGroup attrsGroup, - final String attrName, - final int chunkSize, final java.util.function.ToDoubleFunction< T > extractor ) - throws IOException, InvalidRangeException - { - - final int totalNodes = nodes.size(); - - // Create the attribute subgroup - final ZarrGroup attrGroup = attrsGroup.createSubGroup( attrName ); - final ZarrGroup valuesGroup = attrGroup.createSubGroup( "values" ); - - // Create a single ZarrArray for all values with proper chunking - final ZarrArray valuesArray = valuesGroup.createArray( "", new ArrayParams() - .shape( totalNodes ) - .chunks( chunkSize ) - .dataType( DataType.f8 ) ); - - // Write data in chunks - int chunkIndex = 0; - for ( int startIdx = 0; startIdx < totalNodes; startIdx += chunkSize ) - { - final int endIdx = Math.min( startIdx + chunkSize, totalNodes ); - final int currentChunkSize = endIdx - startIdx; - - // Prepare chunk data array - final double[] chunkData = new double[ currentChunkSize ]; - - // Fill chunk data array - for ( int i = 0; i < currentChunkSize; i++ ) - { - chunkData[ i ] = extractor.applyAsDouble( nodes.get( startIdx + i ) ); - } - - // Write chunk at specific offset - valuesArray.write( chunkData, new int[] { currentChunkSize }, new int[] { startIdx } ); - - System.out.println( "- Wrote " + attrName + " chunk " + chunkIndex + ": " + currentChunkSize + " values" ); - chunkIndex++; - } - } - - /** - * Helper method to write chunked integer matrices - */ - public static < T extends ZarrEntity > void writeChunkedIntMatrix( final List< T > nodes, final ZarrGroup attrsGroup, - final String attrName, - final int chunkSize, final ToIntArrayFunction< T > extractor, final int numColumns ) - throws IOException, InvalidRangeException - { - final int totalNodes = nodes.size(); - - // Create the attribute subgroup - final ZarrGroup attrGroup = attrsGroup.createSubGroup( attrName ); - final ZarrGroup valuesGroup = attrGroup.createSubGroup( "values" ); - - // Create a single ZarrArray for all data with proper chunking - final ZarrArray array2d = valuesGroup.createArray( "", new ArrayParams() - .shape( totalNodes, numColumns ) - .chunks( new int[] { chunkSize, numColumns } ) - .dataType( DataType.f4 ) ); - - // Write data in chunks - int chunkIndex = 0; - for ( int startIdx = 0; startIdx < totalNodes; startIdx += chunkSize ) - { - final int endIdx = Math.min( startIdx + chunkSize, totalNodes ); - final int currentChunkSize = endIdx - startIdx; - - // Prepare chunk data array - final int[] chunkData = new int[ currentChunkSize * numColumns ]; - - // Fill chunk data array - for ( int i = 0; i < currentChunkSize; i++ ) - { - final T node = nodes.get( startIdx + i ); - final int[] values = extractor.applyAsIntArray( node ); - if ( values != null && values.length == numColumns ) - { - for ( int j = 0; j < numColumns; j++ ) - { - chunkData[ i * numColumns + j ] = values[ j ]; - } - } - else - { - for ( int j = 0; j < numColumns; j++ ) - { - chunkData[ i * numColumns + j ] = 0; // Default to zero - // if not set - } - } - } - - // Write chunk at specific offset - array2d.write( chunkData, new int[] { currentChunkSize, numColumns }, - new int[] { startIdx, 0 } ); - - System.out.println( "- Wrote " + attrName + " chunk " + chunkIndex + ": " + currentChunkSize + " values" ); - chunkIndex++; - } - } - - /** - * Helper method to write chunked double matrices - */ - public static < T extends ZarrEntity > void writeChunkedDoubleMatrix( final List< T > nodes, final ZarrGroup attrsGroup, - final String attrName, - final int chunkSize, final ToDoubleArrayFunction< T > extractor, final int numColumns ) - throws IOException, InvalidRangeException - { - final int totalNodes = nodes.size(); - - // Create the attribute subgroup - final ZarrGroup attrGroup = attrsGroup.createSubGroup( attrName ); - final ZarrGroup valuesGroup = attrGroup.createSubGroup( "values" ); - - // Create a single ZarrArray for all data with proper chunking - final ZarrArray array2d = valuesGroup.createArray( "", new ArrayParams() - .shape( totalNodes, numColumns ) - .chunks( new int[] { chunkSize, numColumns } ) - .dataType( DataType.f4 ) ); - - // Write data in chunks - int chunkIndex = 0; - for ( int startIdx = 0; startIdx < totalNodes; startIdx += chunkSize ) - { - final int endIdx = Math.min( startIdx + chunkSize, totalNodes ); - final int currentChunkSize = endIdx - startIdx; - - // Prepare chunk data array - final double[] chunkData = new double[ currentChunkSize * numColumns ]; - - // Fill chunk data array - for ( int i = 0; i < currentChunkSize; i++ ) - { - final T node = nodes.get( startIdx + i ); - final double[] values = extractor.applyAsDoubleArray( node ); - if ( values != null && values.length == numColumns ) - { - for ( int j = 0; j < numColumns; j++ ) - { - chunkData[ i * numColumns + j ] = values[ j ]; - } - } - else - { - for ( int j = 0; j < numColumns; j++ ) - { - chunkData[ i * numColumns + j ] = 0.0; // Default to - // zero if not - // set - } - } - } - - // Write chunk at specific offset - array2d.write( chunkData, new int[] { currentChunkSize, numColumns }, - new int[] { startIdx, 0 } ); - - System.out.println( "- Wrote " + attrName + " chunk " + chunkIndex + ": " + currentChunkSize + " values" ); - chunkIndex++; - } - } - - // Helper methods for type conversion - public static int[] convertToIntArray( final Object data, final String fieldName ) - { - if ( data instanceof int[] ) - { - return ( int[] ) data; - } - else if ( data instanceof long[] ) - { - final long[] longArray = ( long[] ) data; - final int[] intArray = new int[ longArray.length ]; - for ( int i = 0; i < longArray.length; i++ ) - { - intArray[ i ] = ( int ) longArray[ i ]; - } - return intArray; - } - else if ( data instanceof double[] ) - { - final double[] doubleArray = ( double[] ) data; - final int[] intArray = new int[ doubleArray.length ]; - for ( int i = 0; i < doubleArray.length; i++ ) - { - intArray[ i ] = ( int ) doubleArray[ i ]; - } - return intArray; - } - else if ( data instanceof float[] ) - { - final float[] floatArray = ( float[] ) data; - final int[] intArray = new int[ floatArray.length ]; - for ( int i = 0; i < floatArray.length; i++ ) - { - intArray[ i ] = ( int ) floatArray[ i ]; - } - return intArray; - } - else - { - throw new IllegalArgumentException( - "Unsupported data type for " + fieldName + ": " + - ( data != null ? data.getClass().getName() : "null" ) ); - } - } - - public static double[] convertToDoubleArray( final Object data, final String fieldName ) - { - if ( data instanceof double[] ) - { - return ( double[] ) data; - } - else if ( data instanceof float[] ) - { - final float[] floatArray = ( float[] ) data; - final double[] doubleArray = new double[ floatArray.length ]; - for ( int i = 0; i < floatArray.length; i++ ) - { - doubleArray[ i ] = floatArray[ i ]; - } - return doubleArray; - } - else if ( data instanceof int[] ) - { - final int[] intArray = ( int[] ) data; - final double[] doubleArray = new double[ intArray.length ]; - for ( int i = 0; i < intArray.length; i++ ) - { - doubleArray[ i ] = intArray[ i ]; - } - return doubleArray; - } - else if ( data instanceof long[] ) - { - final long[] longArray = ( long[] ) data; - final double[] doubleArray = new double[ longArray.length ]; - for ( int i = 0; i < longArray.length; i++ ) - { - doubleArray[ i ] = longArray[ i ]; - } - return doubleArray; - } - else - { - throw new IllegalArgumentException( - "Unsupported data type for " + fieldName + ": " + - ( data != null ? data.getClass().getName() : "null" ) ); - } - } - - public static int[][] copyToIntMatrix( final Object data, final String description, final int[] shape ) - { - if ( shape.length != 2 ) - throw new IllegalArgumentException( "Shape must have exactly 2 dimensions for a matrix, but had " + shape.length ); - - final int N = shape[ 0 ]; - final int nel = shape[ 1 ]; - if ( data.getClass().isArray() ) - { - final Class< ? > componentType = data.getClass().getComponentType(); - if ( componentType.isPrimitive() ) - { - if ( componentType == int.class ) - { - final int[] arr = ( int[] ) data; - final int[][] matrix = new int[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = arr[ i * nel + j ]; - - return matrix; - } - else if ( componentType == byte.class ) - { - final byte[] byteArray = ( byte[] ) data; - final int[][] matrix = new int[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = byteArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == short.class ) - { - final short[] shortArray = ( short[] ) data; - final int[][] matrix = new int[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = shortArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == long.class ) - { - final long[] longArray = ( long[] ) data; - final int[][] matrix = new int[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = ( int ) longArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == float.class ) - { - final float[] floatArray = ( float[] ) data; - final int[][] matrix = new int[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = ( int ) floatArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == double.class ) - { - final double[] doubleArray = ( double[] ) data; - final int[][] matrix = new int[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = ( int ) doubleArray[ i * nel + j ]; - - return matrix; - } - else - { - throw new IllegalArgumentException( - "Unsupported primitive type for " + description + ": " + componentType.getName() ); - } - } - else - { - throw new IllegalArgumentException( "The array is not of a primitive type." ); - } - } - else - { - throw new IllegalArgumentException( "The object is not an array." ); - } - } - - public static double[][] copyToDoubleMatrix( final Object data, final String description, final int[] shape ) - { - if ( shape.length != 2 ) - throw new IllegalArgumentException( "Shape must have exactly 2 dimensions for a matrix, but had " + shape.length ); - - final int N = shape[ 0 ]; - final int nel = shape[ 1 ]; - if ( data.getClass().isArray() ) - { - final Class< ? > componentType = data.getClass().getComponentType(); - if ( componentType.isPrimitive() ) - { - if ( componentType == int.class ) - { - final int[] arr = ( int[] ) data; - final double[][] matrix = new double[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = arr[ i * nel + j ]; - - return matrix; - } - else if ( componentType == byte.class ) - { - final byte[] byteArray = ( byte[] ) data; - final double[][] matrix = new double[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = byteArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == short.class ) - { - final short[] shortArray = ( short[] ) data; - final double[][] matrix = new double[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = shortArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == long.class ) - { - final long[] longArray = ( long[] ) data; - final double[][] matrix = new double[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = longArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == float.class ) - { - final float[] floatArray = ( float[] ) data; - final double[][] matrix = new double[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = floatArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == double.class ) - { - final double[] doubleArray = ( double[] ) data; - final double[][] matrix = new double[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = doubleArray[ i * nel + j ]; - - return matrix; - } - else - { - throw new IllegalArgumentException( - "Unsupported primitive type for " + description + ": " + componentType.getName() ); - } - } - else - { - throw new IllegalArgumentException( "The array is not of a primitive type." ); - } - } - else - { - throw new IllegalArgumentException( "The object is not an array." ); - } - } -} diff --git a/src/main/java/org/mastodon/geff/function/ToDoubleArrayFunction.java b/src/main/java/org/mastodon/geff/function/ToDoubleArrayFunction.java deleted file mode 100644 index 59a92c2..0000000 --- a/src/main/java/org/mastodon/geff/function/ToDoubleArrayFunction.java +++ /dev/null @@ -1,59 +0,0 @@ -/*- - * #%L - * geff-java - * %% - * Copyright (C) 2025 Ko Sugawara - * %% - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * #L% - */ -package org.mastodon.geff.function; - -import java.util.function.Function; - -/** - * Represents a function that produces a double-array result. This is the - * {@code double} array-producing primitive specialization for {@link Function}. - * - *

- * This is a functional interface whose - * functional method is {@link #applyAsDoubleArray(Object)}. - * - * @param - * the type of the input to the function - * - * @see Function - * @since 1.8 - */ -@FunctionalInterface -public interface ToDoubleArrayFunction< T > -{ - - /** - * Applies this function to the given argument. - * - * @param value - * the function argument - * @return the function result - */ - double[] applyAsDoubleArray( T value ); -} diff --git a/src/main/java/org/mastodon/geff/function/ToIntArrayFunction.java b/src/main/java/org/mastodon/geff/function/ToIntArrayFunction.java deleted file mode 100644 index 13b138c..0000000 --- a/src/main/java/org/mastodon/geff/function/ToIntArrayFunction.java +++ /dev/null @@ -1,59 +0,0 @@ -/*- - * #%L - * geff-java - * %% - * Copyright (C) 2025 Ko Sugawara - * %% - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * #L% - */ -package org.mastodon.geff.function; - -import java.util.function.Function; - -/** - * Represents a function that produces a int-array result. This is the - * {@code int} array-producing primitive specialization for {@link Function}. - * - *

- * This is a functional interface whose - * functional method is {@link #applyAsIntArray(Object)}. - * - * @param - * the type of the input to the function - * - * @see Function - * @since 1.8 - */ -@FunctionalInterface -public interface ToIntArrayFunction< T > -{ - - /** - * Applies this function to the given argument. - * - * @param value - * the function argument - * @return the function result - */ - int[] applyAsIntArray( T value ); -} diff --git a/src/test/java/org/mastodon/geff/ChunkedWriteTest.java b/src/test/java/org/mastodon/geff/ChunkedWriteTest.java index 8fd7a34..6bc34d4 100644 --- a/src/test/java/org/mastodon/geff/ChunkedWriteTest.java +++ b/src/test/java/org/mastodon/geff/ChunkedWriteTest.java @@ -6,13 +6,13 @@ * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -28,8 +28,6 @@ */ package org.mastodon.geff; -import ucar.ma2.InvalidRangeException; - import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -60,7 +58,7 @@ public static void main( String[] args ) /** * Test writing nodes with chunked structure */ - private static void testNodeChunkedWriting() throws IOException, InvalidRangeException + private static void testNodeChunkedWriting() { System.out.println( "=== Testing Node Chunked Writing ===" ); @@ -92,7 +90,7 @@ private static void testNodeChunkedWriting() throws IOException, InvalidRangeExc /** * Test writing edges with chunked structure */ - private static void testEdgeChunkedWriting() throws IOException, InvalidRangeException + private static void testEdgeChunkedWriting() { System.out.println( "\n=== Testing Edge Chunked Writing ===" ); @@ -121,7 +119,7 @@ private static void testEdgeChunkedWriting() throws IOException, InvalidRangeExc /** * Test writing metadata with GEFF schema compliance */ - private static void testMetadataWriting() throws IOException, InvalidRangeException + private static void testMetadataWriting() throws IOException { System.out.println( "\n=== Testing Metadata Writing ===" ); diff --git a/src/test/java/org/mastodon/geff/GeffCreateTest.java b/src/test/java/org/mastodon/geff/GeffCreateTest.java index 26a79f9..54fae89 100644 --- a/src/test/java/org/mastodon/geff/GeffCreateTest.java +++ b/src/test/java/org/mastodon/geff/GeffCreateTest.java @@ -6,13 +6,13 @@ * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -32,12 +32,9 @@ import java.util.ArrayList; import java.util.List; -import ucar.ma2.InvalidRangeException; - public class GeffCreateTest { - - public static void main( String[] args ) throws IOException, InvalidRangeException + public static void main( String[] args ) throws IOException { List< GeffNode > newNodes = new ArrayList<>(); GeffNode node0 = new GeffNode.Builder() diff --git a/src/test/java/org/mastodon/geff/GeffTest.java b/src/test/java/org/mastodon/geff/GeffTest.java index e84106e..4b6ce18 100644 --- a/src/test/java/org/mastodon/geff/GeffTest.java +++ b/src/test/java/org/mastodon/geff/GeffTest.java @@ -6,13 +6,13 @@ * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -73,8 +73,7 @@ void setUp() node.setX( i * 10.0 ); node.setY( i * 15.0 ); node.setSegmentId( i + 100 ); - // Note: setPosition is deprecated, using individual coordinates - // instead + // Note: setPosition is deprecated, using individual coordinates instead testNodes.add( node ); } @@ -250,6 +249,7 @@ void testWriteOperations( @TempDir Path tempDir ) catch ( Exception e ) { // If any exception occurs, fail with details + e.printStackTrace(); fail( "Write operations should not throw exceptions: " + e.getMessage() ); } } ); From 8d16b7a020eb96a7c3baa0ef5da209980f425a94 Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Sat, 26 Jul 2025 17:57:27 -0400 Subject: [PATCH 10/19] tolerate missing data --- src/main/java/org/mastodon/geff/GeffUtils.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/main/java/org/mastodon/geff/GeffUtils.java b/src/main/java/org/mastodon/geff/GeffUtils.java index 0a7009a..8aee472 100644 --- a/src/main/java/org/mastodon/geff/GeffUtils.java +++ b/src/main/java/org/mastodon/geff/GeffUtils.java @@ -88,6 +88,8 @@ public static < T > void writeIntMatrix( final int[] data = new int[ numColumns * size ]; for ( int i = 0; i < size; ++i ) { final int[] row = extractor.apply( elements.get( i ) ); + if ( row == null || row.length < numColumns ) + continue; System.arraycopy( row, 0, data, numColumns * i, numColumns ); } final DatasetAttributes attributes = new DatasetAttributes( @@ -130,6 +132,8 @@ public static < T > void writeDoubleMatrix( final double[] data = new double[ numColumns * size ]; for ( int i = 0; i < size; ++i ) { final double[] row = extractor.apply( elements.get( i ) ); + if ( row == null || row.length < numColumns ) + continue; System.arraycopy( row, 0, data, numColumns * i, numColumns ); } final DatasetAttributes attributes = new DatasetAttributes( From 95f48e7bdb929fb00a0ec782e55dc57f720f1fb4 Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Sat, 26 Jul 2025 17:58:26 -0400 Subject: [PATCH 11/19] remove outdated versions from tests --- src/test/java/org/mastodon/geff/GeffTest.java | 8 ++++---- .../java/org/mastodon/geff/VersionPatternTest.java | 12 ++++++------ 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/src/test/java/org/mastodon/geff/GeffTest.java b/src/test/java/org/mastodon/geff/GeffTest.java index 4b6ce18..fddda27 100644 --- a/src/test/java/org/mastodon/geff/GeffTest.java +++ b/src/test/java/org/mastodon/geff/GeffTest.java @@ -177,7 +177,7 @@ void testMetadataValidation() // Test invalid metadata - create axes with invalid bounds GeffMetadata invalidMetadata = new GeffMetadata(); - invalidMetadata.setGeffVersion( "0.1" ); + invalidMetadata.setGeffVersion( "0.2" ); invalidMetadata.setDirected( false ); // Create invalid axes (min > max) @@ -279,9 +279,9 @@ void testVersionValidationEdgeCases() // Test various valid version formats String[] validVersions = { - "0.0", "0.1", "0.2", "0.3", - "0.1.1", "0.2.0", "0.3.5", - "0.2.2.dev20", "0.1.0-alpha.1", "0.3.0-beta.2+build.123" + "0.2", "0.3", + "0.2.0", "0.3.5", + "0.2.2.dev20", "0.2.0-alpha.1", "0.3.0-beta.2+build.123" }; for ( String version : validVersions ) diff --git a/src/test/java/org/mastodon/geff/VersionPatternTest.java b/src/test/java/org/mastodon/geff/VersionPatternTest.java index 63cd2d3..0ffde81 100644 --- a/src/test/java/org/mastodon/geff/VersionPatternTest.java +++ b/src/test/java/org/mastodon/geff/VersionPatternTest.java @@ -6,13 +6,13 @@ * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -44,15 +44,15 @@ public void testValidVersionPatterns() { // Test cases for different version formats that should be accepted String[] validVersions = { - "0.1", // Basic major.minor - "0.1.1", // With patch version + "0.2", // Basic major.minor + "0.2.1", // With patch version "0.2.2", // Another patch version "0.2.2.dev20", // Development version "0.2.2.dev20+g611e7a2", // With git hash "0.2.2.dev20+g611e7a2.d20250719", // Full development version "0.3.0-alpha.1", // Alpha version - "0.1.0-beta.2+build.123", // Beta with build metadata - "0.0.5.rc1", // Release candidate + "0.2.0-beta.2+build.123", // Beta with build metadata + "0.2.5.rc1", // Release candidate }; for ( String version : validVersions ) From bdd9810f2202c071303565668e048169f17d4994 Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Sat, 26 Jul 2025 18:11:19 -0400 Subject: [PATCH 12/19] update with origin/main changes --- README.md | 45 ++++++++------ pom.xml | 2 +- src/main/java/org/mastodon/geff/GeffAxis.java | 8 +-- src/main/java/org/mastodon/geff/GeffEdge.java | 3 +- .../java/org/mastodon/geff/GeffMetadata.java | 4 +- .../java/org/mastodon/geff/GeffUtils.java | 2 +- .../geff/geom/GeffSerializableVertex.java | 19 ++++++ .../org/mastodon/geff/ChunkedWriteTest.java | 6 +- .../java/org/mastodon/geff/GeffAxisTest.java | 62 +++++++++---------- src/test/java/org/mastodon/geff/GeffTest.java | 20 +++--- .../org/mastodon/geff/VersionPatternTest.java | 1 - 11 files changed, 99 insertions(+), 73 deletions(-) create mode 100644 src/main/java/org/mastodon/geff/geom/GeffSerializableVertex.java diff --git a/README.md b/README.md index 4546505..288a02c 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ [![Build Status](https://github.com/live-image-tracking-tools/geff-java/actions/workflows/build.yml/badge.svg)](https://github.com/live-image-tracking-tools/geff-java/actions/workflows/build.yml) [![License](https://img.shields.io/badge/License-BSD%202--Clause-orange.svg)](https://opensource.org/licenses/BSD-2-Clause) -[![Coverage](https://sonarcloud.io/api/project_badges/measure?project=mastodon-sc_geff-java&metric=coverage)](https://sonarcloud.io/summary/overall?id=mastodon-sc_geff-java) -[![Lines of Code](https://sonarcloud.io/api/project_badges/measure?project=mastodon-sc_geff-java&metric=ncloc)](https://sonarcloud.io/summary/overall?id=mastodon-sc_geff-java) +[![Coverage](https://sonarcloud.io/api/project_badges/measure?project=live-image-tracking-tools_geff-java&metric=coverage)](https://sonarcloud.io/summary/overall?id=live-image-tracking-tools_geff-java) +[![Lines of Code](https://sonarcloud.io/api/project_badges/measure?project=live-image-tracking-tools_geff-java&metric=ncloc)](https://sonarcloud.io/summary/overall?id=live-image-tracking-tools_geff-java) # geff Java implementation @@ -27,6 +27,7 @@ Represents nodes in tracking graphs with spatial and temporal attributes: - Spatial coordinates (x, y, z) - Segment identifiers - Additional properties: color, radius, covariance2d, covariance3d +- Polygon geometry: separate polygonX and polygonY coordinate arrays with polygon offset for serialization - Builder pattern for convenient object construction - Chunked Zarr I/O support for versions 0.1, 0.2, and 0.3 @@ -45,6 +46,11 @@ Represents axis metadata for spatial and temporal dimensions: - Unit specifications with common constants - Optional min/max bounds for ROI definition +### GeffSerializableVertex +Lightweight geometry class internally used for storing polygon vertex coordinates: +- Simple (x, y) coordinate storage +- Part of the geometry package for efficient polygon handling + ### GeffMetadata Handles Geff metadata with schema validation: - Version compatibility checking with pattern matching for development versions @@ -93,6 +99,8 @@ GeffNode node0 = new GeffNode.Builder() .color(new double[]{1.0, 0.0, 0.0, 1.0}) // Red color .radius(2.5) .covariance2d(new double[]{1.0, 0.2, 0.2, 1.5}) // 2x2 covariance matrix flattened + .polygonX(new double[]{1.0, 2.0, 3.0, 4.0}) // Polygon X coordinates + .polygonY(new double[]{5.0, 6.0, 7.0, 8.0}) // Polygon Y coordinates .build(); newNodes.add(node0); @@ -104,6 +112,8 @@ GeffNode node1 = new GeffNode.Builder() .z(6.0) .segmentId(1) .covariance2d(new double[]{0.8, 0.1, 0.1, 1.2}) // Different covariance + .polygonX(new double[]{-1.0, -2.0, -3.0, -4.0}) // Different polygon X coordinates + .polygonY(new double[]{-5.0, -6.0, -7.0, -8.0}) // Different polygon Y coordinates .build(); newNodes.add(node1); @@ -155,22 +165,19 @@ dataset.zarr/ ├── .zgroup ├── nodes/ │ ├── .zgroup - │ ├── attrs/ # For Geff 0.1 format - │ │ ├── t/ # Time points - │ │ ├── x/ # X coordinates - │ │ ├── y/ # Y coordinates - │ │ ├── seg_id/ # Segment IDs - │ │ └── position/ # Multidimensional positions │ ├── props/ # For Geff 0.2/0.3 format - │ │ ├── t/ # Time points - │ │ ├── x/ # X coordinates - │ │ ├── y/ # Y coordinates - │ │ ├── z/ # Z coordinates (optional) - │ │ ├── color/ # Node colors (optional) - │ │ ├── radius/ # Node radii (optional) - │ │ ├── track_id/ # Track identifiers (optional) - │ │ ├── covariance2d/ # 2D covariance matrices (optional) - │ │ └── covariance3d/ # 3D covariance matrices (optional) + │ │ ├── t/ # Time points [N] + │ │ ├── x/ # X coordinates [N] + │ │ ├── y/ # Y coordinates [N] + │ │ ├── z/ # Z coordinates [N] (optional) + │ │ ├── color/ # Node colors [N] (optional) + │ │ ├── radius/ # Node radii [N] (optional) + │ │ ├── track_id/ # Track identifiers [N] (optional) + │ │ ├── covariance2d/ # 2D covariance matrices for ellipse serialized in 1D [N, 4] (optional) + │ │ ├── covariance3d/ # 3D covariance matrices for ellipsoid serialized in 1D [N, 6] (optional) + │ │ └── polygon/ # Polygon coordinates (optional) + │ │ ├── slices/ # Polygon slices with startIndex and endIndex [N, 2] (optional) + │ │ └── values/ # XY coordinates of vertices in polygons [numVertices, 2] (optional) │ └── ids/ │ └── 0 # Node ID chunks └── edges/ @@ -200,8 +207,8 @@ dataset.zarr/ ### Contribute Code or Provide Feedback * You are welcome to submit Pull Requests to this repository. This repository runs code analyses on - every Pull Request using [SonarCloud](https://sonarcloud.io/dashboard?id=mastodon-sc_geff-java). -* Please read the [general advice](https://github.com/mastodon-sc/) re contributing to Mastodon and its plugins. + every Pull Request using [SonarCloud](https://sonarcloud.io/dashboard?id=live-image-tracking-tools_geff-java). +* Please read the [general advice](https://github.com/live-image-tracking-tools/) for contributing to Live Image Tracking Tools and its projects. ### Contribute Documentation diff --git a/pom.xml b/pom.xml index 9eb2046..a5e332c 100644 --- a/pom.xml +++ b/pom.xml @@ -10,7 +10,7 @@ org.litt geff - 0.3.1-SNAPSHOT + 1.0.0-SNAPSHOT org.litt.geff diff --git a/src/main/java/org/mastodon/geff/GeffAxis.java b/src/main/java/org/mastodon/geff/GeffAxis.java index 2466008..55507f3 100644 --- a/src/main/java/org/mastodon/geff/GeffAxis.java +++ b/src/main/java/org/mastodon/geff/GeffAxis.java @@ -56,13 +56,13 @@ public class GeffAxis public static final String TYPE_SPACE = "space"; // Common units - public static final String UNIT_SECONDS = "seconds"; + public static final String UNIT_SECOND = "second"; - public static final String UNIT_MICROMETERS = "micrometers"; + public static final String UNIT_MICROMETER = "micrometer"; - public static final String UNIT_PIXELS = "pixels"; + public static final String UNIT_PIXEL = "pixel"; - public static final String UNIT_MILLIMETERS = "millimeters"; + public static final String UNIT_MILLIMETER = "millimeter"; private String name; diff --git a/src/main/java/org/mastodon/geff/GeffEdge.java b/src/main/java/org/mastodon/geff/GeffEdge.java index c24e65a..fac9509 100644 --- a/src/main/java/org/mastodon/geff/GeffEdge.java +++ b/src/main/java/org/mastodon/geff/GeffEdge.java @@ -384,7 +384,8 @@ public boolean equals( Object obj ) return sourceNodeId == geffEdge.sourceNodeId && targetNodeId == geffEdge.targetNodeId && id == geffEdge.id && - Double.compare( geffEdge.score, score ) == 0; + Double.compare( geffEdge.score, score ) == 0 && + Double.compare( geffEdge.distance, distance ) == 0; } @Override diff --git a/src/main/java/org/mastodon/geff/GeffMetadata.java b/src/main/java/org/mastodon/geff/GeffMetadata.java index 6e5ce47..7570846 100644 --- a/src/main/java/org/mastodon/geff/GeffMetadata.java +++ b/src/main/java/org/mastodon/geff/GeffMetadata.java @@ -59,13 +59,13 @@ public class GeffMetadata private static final Logger LOG = LoggerFactory.getLogger( GeffMetadata.class ); // Supported GEFF versions - public static final List< String > SUPPORTED_VERSIONS = Arrays.asList( "0.2", "0.3" ); + public static final List< String > SUPPORTED_VERSIONS = Arrays.asList( "0.2", "0.3", "0.4" ); // Pattern to match major.minor versions, allowing for patch versions and // development versions // Examples: 0.1.1, 0.2.2.dev20+g611e7a2.d20250719, 0.2.0-alpha.1, etc. private static final Pattern SUPPORTED_VERSIONS_PATTERN = Pattern - .compile( "(0\\.2|0\\.3)(?:\\.\\d+)?(?:\\.[a-zA-Z0-9]+(?:\\d+)?)?(?:[+\\-][a-zA-Z0-9\\.]+)*" ); + .compile( "(0\\.2|0\\.3|0\\.4)(?:\\.\\d+)?(?:\\.[a-zA-Z0-9]+(?:\\d+)?)?(?:[+\\-][a-zA-Z0-9\\.]+)*" ); // Metadata attributes - matching the Python schema private String geffVersion; diff --git a/src/main/java/org/mastodon/geff/GeffUtils.java b/src/main/java/org/mastodon/geff/GeffUtils.java index 8aee472..d088a57 100644 --- a/src/main/java/org/mastodon/geff/GeffUtils.java +++ b/src/main/java/org/mastodon/geff/GeffUtils.java @@ -33,7 +33,7 @@ class GeffUtils public static void checkSupportedVersion( final String version ) throws IllegalArgumentException { - if ( !( version.startsWith( "0.2" ) || version.startsWith( "0.3" ) ) ) + if ( !( version.startsWith( "0.2" ) || version.startsWith( "0.3" ) || version.startsWith( "0.4" ) ) ) { throw new IllegalArgumentException( "geff_version " + version + " not supported." ); } diff --git a/src/main/java/org/mastodon/geff/geom/GeffSerializableVertex.java b/src/main/java/org/mastodon/geff/geom/GeffSerializableVertex.java new file mode 100644 index 0000000..7df9625 --- /dev/null +++ b/src/main/java/org/mastodon/geff/geom/GeffSerializableVertex.java @@ -0,0 +1,19 @@ +package org.mastodon.geff.geom; + +public class GeffSerializableVertex +{ + final double x; + + final double y; + + public GeffSerializableVertex( final double x, final double y ) + { + this.x = x; + this.y = y; + } + + public double[] getCoordinates() + { + return new double[] { x, y }; + } +} diff --git a/src/test/java/org/mastodon/geff/ChunkedWriteTest.java b/src/test/java/org/mastodon/geff/ChunkedWriteTest.java index 6bc34d4..f996643 100644 --- a/src/test/java/org/mastodon/geff/ChunkedWriteTest.java +++ b/src/test/java/org/mastodon/geff/ChunkedWriteTest.java @@ -130,9 +130,9 @@ private static void testMetadataWriting() throws IOException // Create axes using GeffAxis GeffAxis[] axes = { - GeffAxis.createSpaceAxis( "x", GeffAxis.UNIT_MICROMETERS, 0.0, 100.0 ), - GeffAxis.createSpaceAxis( "y", GeffAxis.UNIT_MICROMETERS, 0.0, 100.0 ), - GeffAxis.createSpaceAxis( "z", GeffAxis.UNIT_MICROMETERS, 0.0, 50.0 ) + GeffAxis.createSpaceAxis( "x", GeffAxis.UNIT_MICROMETER, 0.0, 100.0 ), + GeffAxis.createSpaceAxis( "y", GeffAxis.UNIT_MICROMETER, 0.0, 100.0 ), + GeffAxis.createSpaceAxis( "z", GeffAxis.UNIT_MICROMETER, 0.0, 50.0 ) }; metadata.setGeffAxes( axes ); diff --git a/src/test/java/org/mastodon/geff/GeffAxisTest.java b/src/test/java/org/mastodon/geff/GeffAxisTest.java index a923c30..e5968b6 100644 --- a/src/test/java/org/mastodon/geff/GeffAxisTest.java +++ b/src/test/java/org/mastodon/geff/GeffAxisTest.java @@ -55,11 +55,11 @@ void testDefaultConstructor() @DisplayName( "Test constructor with required fields" ) void testConstructorWithRequiredFields() { - GeffAxis axis = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS ); + GeffAxis axis = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER ); assertEquals( "x", axis.getName() ); assertEquals( GeffAxis.TYPE_SPACE, axis.getType() ); - assertEquals( GeffAxis.UNIT_MICROMETERS, axis.getUnit() ); + assertEquals( GeffAxis.UNIT_MICROMETER, axis.getUnit() ); assertNull( axis.getMin() ); assertNull( axis.getMax() ); assertFalse( axis.hasBounds() ); @@ -69,11 +69,11 @@ void testConstructorWithRequiredFields() @DisplayName( "Test constructor with all fields" ) void testConstructorWithAllFields() { - GeffAxis axis = new GeffAxis( "t", GeffAxis.TYPE_TIME, GeffAxis.UNIT_SECONDS, 0.0, 125.0 ); + GeffAxis axis = new GeffAxis( "t", GeffAxis.TYPE_TIME, GeffAxis.UNIT_SECOND, 0.0, 125.0 ); assertEquals( "t", axis.getName() ); assertEquals( GeffAxis.TYPE_TIME, axis.getType() ); - assertEquals( GeffAxis.UNIT_SECONDS, axis.getUnit() ); + assertEquals( GeffAxis.UNIT_SECOND, axis.getUnit() ); assertEquals( 0.0, axis.getMin() ); assertEquals( 125.0, axis.getMax() ); assertTrue( axis.hasBounds() ); @@ -85,17 +85,17 @@ void testConstructorWithAllFields() void testTimeAxisFactoryMethods() { // With bounds - GeffAxis timeAxisWithBounds = GeffAxis.createTimeAxis( "t", GeffAxis.UNIT_SECONDS, 0.0, 125.0 ); + GeffAxis timeAxisWithBounds = GeffAxis.createTimeAxis( "t", GeffAxis.UNIT_SECOND, 0.0, 125.0 ); assertEquals( "t", timeAxisWithBounds.getName() ); assertEquals( GeffAxis.TYPE_TIME, timeAxisWithBounds.getType() ); - assertEquals( GeffAxis.UNIT_SECONDS, timeAxisWithBounds.getUnit() ); + assertEquals( GeffAxis.UNIT_SECOND, timeAxisWithBounds.getUnit() ); assertTrue( timeAxisWithBounds.hasBounds() ); // Without bounds - GeffAxis timeAxisNoBounds = GeffAxis.createTimeAxis( "t", GeffAxis.UNIT_SECONDS ); + GeffAxis timeAxisNoBounds = GeffAxis.createTimeAxis( "t", GeffAxis.UNIT_SECOND ); assertEquals( "t", timeAxisNoBounds.getName() ); assertEquals( GeffAxis.TYPE_TIME, timeAxisNoBounds.getType() ); - assertEquals( GeffAxis.UNIT_SECONDS, timeAxisNoBounds.getUnit() ); + assertEquals( GeffAxis.UNIT_SECOND, timeAxisNoBounds.getUnit() ); assertFalse( timeAxisNoBounds.hasBounds() ); } @@ -104,18 +104,18 @@ void testTimeAxisFactoryMethods() void testSpaceAxisFactoryMethods() { // With bounds - GeffAxis spaceAxisWithBounds = GeffAxis.createSpaceAxis( "x", GeffAxis.UNIT_MICROMETERS, 764.42, 2152.3 ); + GeffAxis spaceAxisWithBounds = GeffAxis.createSpaceAxis( "x", GeffAxis.UNIT_MICROMETER, 764.42, 2152.3 ); assertEquals( "x", spaceAxisWithBounds.getName() ); assertEquals( GeffAxis.TYPE_SPACE, spaceAxisWithBounds.getType() ); - assertEquals( GeffAxis.UNIT_MICROMETERS, spaceAxisWithBounds.getUnit() ); + assertEquals( GeffAxis.UNIT_MICROMETER, spaceAxisWithBounds.getUnit() ); assertTrue( spaceAxisWithBounds.hasBounds() ); assertEquals( 2152.3 - 764.42, spaceAxisWithBounds.getRange(), 0.001 ); // Without bounds - GeffAxis spaceAxisNoBounds = GeffAxis.createSpaceAxis( "y", GeffAxis.UNIT_MICROMETERS ); + GeffAxis spaceAxisNoBounds = GeffAxis.createSpaceAxis( "y", GeffAxis.UNIT_MICROMETER ); assertEquals( "y", spaceAxisNoBounds.getName() ); assertEquals( GeffAxis.TYPE_SPACE, spaceAxisNoBounds.getType() ); - assertEquals( GeffAxis.UNIT_MICROMETERS, spaceAxisNoBounds.getUnit() ); + assertEquals( GeffAxis.UNIT_MICROMETER, spaceAxisNoBounds.getUnit() ); assertFalse( spaceAxisNoBounds.hasBounds() ); } @@ -169,24 +169,24 @@ void testTypeValidation() void testAxisValidation() { // Valid axis - GeffAxis validAxis = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS, 0.0, 100.0 ); + GeffAxis validAxis = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER, 0.0, 100.0 ); assertDoesNotThrow( () -> validAxis.validate() ); // Missing name - GeffAxis missingName = new GeffAxis( null, GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS ); + GeffAxis missingName = new GeffAxis( null, GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER ); assertThrows( IllegalArgumentException.class, () -> missingName.validate() ); // Empty name - GeffAxis emptyName = new GeffAxis( "", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS ); + GeffAxis emptyName = new GeffAxis( "", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER ); assertThrows( IllegalArgumentException.class, () -> emptyName.validate() ); // Missing type - GeffAxis missingType = new GeffAxis( "x", null, GeffAxis.UNIT_MICROMETERS ); + GeffAxis missingType = new GeffAxis( "x", null, GeffAxis.UNIT_MICROMETER ); assertThrows( IllegalArgumentException.class, () -> missingType.validate() ); // Invalid type - create axis with valid type then test validation // separately - GeffAxis validAxisForTypeTest = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS ); + GeffAxis validAxisForTypeTest = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER ); assertDoesNotThrow( () -> validAxisForTypeTest.validate() ); // This // should // pass @@ -194,7 +194,7 @@ void testAxisValidation() // Test that setType rejects invalid types GeffAxis axisForInvalidType = new GeffAxis(); axisForInvalidType.setName( "x" ); - axisForInvalidType.setUnit( GeffAxis.UNIT_MICROMETERS ); + axisForInvalidType.setUnit( GeffAxis.UNIT_MICROMETER ); assertThrows( IllegalArgumentException.class, () -> axisForInvalidType.setType( "invalid" ) ); // Missing unit @@ -210,9 +210,9 @@ void testAxisValidation() @DisplayName( "Test equals and hashCode" ) void testEqualsAndHashCode() { - GeffAxis axis1 = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS, 0.0, 100.0 ); - GeffAxis axis2 = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS, 0.0, 100.0 ); - GeffAxis axis3 = new GeffAxis( "y", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS, 0.0, 100.0 ); + GeffAxis axis1 = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER, 0.0, 100.0 ); + GeffAxis axis2 = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER, 0.0, 100.0 ); + GeffAxis axis3 = new GeffAxis( "y", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER, 0.0, 100.0 ); // Test equals assertEquals( axis1, axis2 ); @@ -231,21 +231,21 @@ void testEqualsAndHashCode() @DisplayName( "Test toString" ) void testToString() { - GeffAxis axisWithBounds = new GeffAxis( "t", GeffAxis.TYPE_TIME, GeffAxis.UNIT_SECONDS, 0.0, 125.0 ); + GeffAxis axisWithBounds = new GeffAxis( "t", GeffAxis.TYPE_TIME, GeffAxis.UNIT_SECOND, 0.0, 125.0 ); String str = axisWithBounds.toString(); assertTrue( str.contains( "name='t'" ) ); assertTrue( str.contains( "type='time'" ) ); - assertTrue( str.contains( "unit='seconds'" ) ); + assertTrue( str.contains( "unit='second'" ) ); assertTrue( str.contains( "min=0.0" ) ); assertTrue( str.contains( "max=125.0" ) ); - GeffAxis axisNoBounds = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS ); + GeffAxis axisNoBounds = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER ); String strNoBounds = axisNoBounds.toString(); assertTrue( strNoBounds.contains( "name='x'" ) ); assertTrue( strNoBounds.contains( "type='space'" ) ); - assertTrue( strNoBounds.contains( "unit='micrometers'" ) ); + assertTrue( strNoBounds.contains( "unit='micrometer'" ) ); assertFalse( strNoBounds.contains( "min=" ) ); assertFalse( strNoBounds.contains( "max=" ) ); } @@ -257,35 +257,35 @@ void testSpecificationExamples() // Time axis: {'name': 't', 'type': "time", 'unit': "seconds", 'min': 0, // 'max': // 125} - GeffAxis timeAxis = new GeffAxis( "t", GeffAxis.TYPE_TIME, GeffAxis.UNIT_SECONDS, 0.0, 125.0 ); + GeffAxis timeAxis = new GeffAxis( "t", GeffAxis.TYPE_TIME, GeffAxis.UNIT_SECOND, 0.0, 125.0 ); assertDoesNotThrow( () -> timeAxis.validate() ); assertEquals( "t", timeAxis.getName() ); assertEquals( GeffAxis.TYPE_TIME, timeAxis.getType() ); - assertEquals( GeffAxis.UNIT_SECONDS, timeAxis.getUnit() ); + assertEquals( GeffAxis.UNIT_SECOND, timeAxis.getUnit() ); assertEquals( 0.0, timeAxis.getMin() ); assertEquals( 125.0, timeAxis.getMax() ); // Space axis: {'name': 'z', 'type': "space", 'unit': "micrometers", // 'min': // 1523.36, 'max': 4398.1} - GeffAxis zAxis = new GeffAxis( "z", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS, 1523.36, 4398.1 ); + GeffAxis zAxis = new GeffAxis( "z", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER, 1523.36, 4398.1 ); assertDoesNotThrow( () -> zAxis.validate() ); assertEquals( "z", zAxis.getName() ); assertEquals( GeffAxis.TYPE_SPACE, zAxis.getType() ); - assertEquals( GeffAxis.UNIT_MICROMETERS, zAxis.getUnit() ); + assertEquals( GeffAxis.UNIT_MICROMETER, zAxis.getUnit() ); assertEquals( 1523.36, zAxis.getMin() ); assertEquals( 4398.1, zAxis.getMax() ); // Space axis: {'name': 'y', 'type': "space", 'unit': "micrometers", // 'min': // 81.667, 'max': 1877.7} - GeffAxis yAxis = new GeffAxis( "y", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS, 81.667, 1877.7 ); + GeffAxis yAxis = new GeffAxis( "y", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER, 81.667, 1877.7 ); assertDoesNotThrow( () -> yAxis.validate() ); // Space axis: {'name': 'x', 'type': "space", 'unit': "micrometers", // 'min': // 764.42, 'max': 2152.3} - GeffAxis xAxis = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS, 764.42, 2152.3 ); + GeffAxis xAxis = new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER, 764.42, 2152.3 ); assertDoesNotThrow( () -> xAxis.validate() ); } } diff --git a/src/test/java/org/mastodon/geff/GeffTest.java b/src/test/java/org/mastodon/geff/GeffTest.java index fddda27..57b708b 100644 --- a/src/test/java/org/mastodon/geff/GeffTest.java +++ b/src/test/java/org/mastodon/geff/GeffTest.java @@ -58,9 +58,9 @@ void setUp() // Create axes using GeffAxis GeffAxis[] axes = { - GeffAxis.createSpaceAxis( "x", GeffAxis.UNIT_MICROMETERS, 0.0, 100.0 ), - GeffAxis.createSpaceAxis( "y", GeffAxis.UNIT_MICROMETERS, 0.0, 100.0 ), - GeffAxis.createSpaceAxis( "z", GeffAxis.UNIT_MICROMETERS, 0.0, 50.0 ) + GeffAxis.createSpaceAxis( "x", GeffAxis.UNIT_MICROMETER, 0.0, 100.0 ), + GeffAxis.createSpaceAxis( "y", GeffAxis.UNIT_MICROMETER, 0.0, 100.0 ), + GeffAxis.createSpaceAxis( "z", GeffAxis.UNIT_MICROMETER, 0.0, 50.0 ) }; testMetadata.setGeffAxes( axes ); @@ -73,7 +73,8 @@ void setUp() node.setX( i * 10.0 ); node.setY( i * 15.0 ); node.setSegmentId( i + 100 ); - // Note: setPosition is deprecated, using individual coordinates instead + // Note: setPosition is deprecated, using individual coordinates + // instead testNodes.add( node ); } @@ -161,7 +162,7 @@ void testMetadataDataStructure() // Test individual axes assertEquals( "x", axes[ 0 ].getName() ); assertEquals( GeffAxis.TYPE_SPACE, axes[ 0 ].getType() ); - assertEquals( GeffAxis.UNIT_MICROMETERS, axes[ 0 ].getUnit() ); + assertEquals( GeffAxis.UNIT_MICROMETER, axes[ 0 ].getUnit() ); assertEquals( 0.0, axes[ 0 ].getMin(), 0.001 ); assertEquals( 100.0, axes[ 0 ].getMax(), 0.001 ); @@ -182,9 +183,9 @@ void testMetadataValidation() // Create invalid axes (min > max) GeffAxis[] invalidAxes = { - new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS, 100.0, 50.0 ) // min - // > - // max + new GeffAxis( "x", GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER, 100.0, 50.0 ) // min + // > + // max }; assertThrows( IllegalArgumentException.class, () -> { @@ -249,7 +250,6 @@ void testWriteOperations( @TempDir Path tempDir ) catch ( Exception e ) { // If any exception occurs, fail with details - e.printStackTrace(); fail( "Write operations should not throw exceptions: " + e.getMessage() ); } } ); @@ -293,7 +293,7 @@ void testVersionValidationEdgeCases() } // Test invalid versions - String[] invalidVersions = { "1.0", "0.4", "invalid", "0.1..x" }; + String[] invalidVersions = { "1.0", "0.6", "invalid", "0.1..x" }; for ( String version : invalidVersions ) { diff --git a/src/test/java/org/mastodon/geff/VersionPatternTest.java b/src/test/java/org/mastodon/geff/VersionPatternTest.java index 0ffde81..04832e5 100644 --- a/src/test/java/org/mastodon/geff/VersionPatternTest.java +++ b/src/test/java/org/mastodon/geff/VersionPatternTest.java @@ -71,7 +71,6 @@ public void testInvalidVersionPatterns() // Test cases for version formats that should be rejected String[] invalidVersions = { "1.0", // Unsupported major version - "0.4", // Unsupported minor version "invalid", // Not a version at all "0.1..x", // Invalid patch format }; From 9b819d79bdd3dc53f7c7d3c9ac88624fd8393e0f Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Sat, 26 Jul 2025 20:55:28 -0400 Subject: [PATCH 13/19] update with origin/main changes --- .../org/mastodon/geff/GeffCreateTest.java | 79 +++++++++++-------- 1 file changed, 46 insertions(+), 33 deletions(-) diff --git a/src/test/java/org/mastodon/geff/GeffCreateTest.java b/src/test/java/org/mastodon/geff/GeffCreateTest.java index 54fae89..9e3b206 100644 --- a/src/test/java/org/mastodon/geff/GeffCreateTest.java +++ b/src/test/java/org/mastodon/geff/GeffCreateTest.java @@ -34,9 +34,10 @@ public class GeffCreateTest { + public static void main( String[] args ) throws IOException { - List< GeffNode > newNodes = new ArrayList<>(); + List< GeffNode > writeNodes = new ArrayList<>(); GeffNode node0 = new GeffNode.Builder() .id( 0 ) .timepoint( 0 ) @@ -44,14 +45,13 @@ public static void main( String[] args ) throws IOException .y( 20.3 ) .z( 5.0 ) .segmentId( 0 ) - .color( new double[] { 1.0, 0.0, 0.0, 1.0 } ) // Red - // color + .color( new double[] { 1.0, 0.0, 0.0, 1.0 } ) .radius( 2.5 ) - // .covariance2d(new double[] { 1.0, 0.2, 0.2, - // 1.5 }) // 2x2 covariance matrix - // flattened + .covariance2d( new double[] { 1.0, 0.2, 0.2, 1.5 } ) + .polygonX( new double[] { 0.1, 0.2, 0.3, 0.4 } ) + .polygonY( new double[] { 0.5, 0.6, 0.7, 0.8 } ) .build(); - newNodes.add( node0 ); + writeNodes.add( node0 ); GeffNode node1 = new GeffNode.Builder() .id( 1 ) @@ -60,18 +60,19 @@ public static void main( String[] args ) throws IOException .y( 21.3 ) .z( 6.0 ) .segmentId( 1 ) - // .covariance2d(new double[] { 0.8, 0.1, 0.1, - // 1.2 }) // Different covariance + .covariance2d( new double[] { 0.8, 0.1, 0.1, 1.2 } ) + .polygonX( new double[] { -0.1, -0.2, -0.3, -0.4 } ) + .polygonY( new double[] { -0.5, -0.6, -0.7, -0.8 } ) .build(); - newNodes.add( node1 ); + writeNodes.add( node1 ); // Write to Zarr format with version specification - GeffNode.writeToZarr( newNodes, - "/Users/sugawara/Repositories/geff-java/src/test/resources/create_test_output.zarr/tracks/nodes", - "0.3.0" ); + GeffNode.writeToZarr( writeNodes, + "src/test/resources/create_test_output.zarr/tracks", + "0.4.0" ); // Create new edges using builder pattern - List< GeffEdge > newEdges = new ArrayList<>(); + List< GeffEdge > writeEdges = new ArrayList<>(); GeffEdge edge = new GeffEdge.Builder() .setId( 0 ) .setSourceNodeId( 0 ) @@ -79,31 +80,43 @@ public static void main( String[] args ) throws IOException .setScore( 0.95 ) .setDistance( 1.4 ) .build(); - newEdges.add( edge ); + writeEdges.add( edge ); // Write to Zarr format - GeffEdge.writeToZarr( newEdges, - "/Users/sugawara/Repositories/geff-java/src/test/resources/create_test_output.zarr/tracks/edges", - "0.3.0" ); + GeffEdge.writeToZarr( writeEdges, + "src/test/resources/create_test_output.zarr/tracks", + "0.4.0" ); // Create metadata with axis information GeffAxis[] axes = { - new GeffAxis( GeffAxis.NAME_TIME, GeffAxis.TYPE_TIME, GeffAxis.UNIT_SECONDS, 0.0, 100.0 ), - new GeffAxis( GeffAxis.NAME_SPACE_X, GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS, 0.0, 1024.0 ), - new GeffAxis( GeffAxis.NAME_SPACE_Y, GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS, 0.0, 1024.0 ), - new GeffAxis( GeffAxis.NAME_SPACE_Z, GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETERS, 0.0, 100.0 ) + new GeffAxis( GeffAxis.NAME_TIME, GeffAxis.TYPE_TIME, GeffAxis.UNIT_SECOND, 0.0, 100.0 ), + new GeffAxis( GeffAxis.NAME_SPACE_X, GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER, 0.0, 1024.0 ), + new GeffAxis( GeffAxis.NAME_SPACE_Y, GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER, 0.0, 1024.0 ), + new GeffAxis( GeffAxis.NAME_SPACE_Z, GeffAxis.TYPE_SPACE, GeffAxis.UNIT_MICROMETER, 0.0, 100.0 ) }; - GeffMetadata metadata = new GeffMetadata( "0.3.0", true, axes ); - GeffMetadata.writeToZarr( metadata, - "/Users/sugawara/Repositories/geff-java/src/test/resources/create_test_output.zarr/tracks" ); + GeffMetadata writeMetadata = new GeffMetadata( "0.4.0", true, axes ); + GeffMetadata.writeToZarr( writeMetadata, + "src/test/resources/create_test_output.zarr/tracks" ); - GeffMetadata metadata_read = GeffMetadata.readFromZarr( - "/Users/sugawara/Repositories/geff-java/src/test/resources/create_test_output.zarr/tracks" ); - List< GeffNode > nodes = GeffNode.readFromZarr( - "/Users/sugawara/Repositories/geff-java/src/test/resources/create_test_output.zarr/tracks", - metadata_read.getGeffVersion() ); - List< GeffEdge > edges = GeffEdge.readFromZarr( - "/Users/sugawara/Repositories/geff-java/src/test/resources/create_test_output.zarr/tracks", - metadata_read.getGeffVersion() ); + GeffMetadata readMetadata = GeffMetadata.readFromZarr( + "src/test/resources/create_test_output.zarr/tracks" ); + List< GeffNode > readNodes = GeffNode.readFromZarr( + "src/test/resources/create_test_output.zarr/tracks", + readMetadata.getGeffVersion() ); + List< GeffEdge > readEdges = GeffEdge.readFromZarr( + "src/test/resources/create_test_output.zarr/tracks", + readMetadata.getGeffVersion() ); + // Check if read nodes and edges match written data with + // assertions + for ( int i = 0; i < writeNodes.size(); i++ ) + { + assert writeNodes.get( i ).equals( readNodes.get( i ) ): "Node mismatch at index " + i; + } + for ( int i = 0; i < writeEdges.size(); i++ ) + { + assert writeEdges.get( i ).equals( readEdges.get( i ) ): "Edge mismatch at index " + i; + } + assert writeMetadata.equals( readMetadata ): "Metadata mismatch"; + System.out.println( "GeffCreateTest completed successfully!" ); } } From 892e2973bd9b2cdc461658d72f300fe149feca45 Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Sat, 26 Jul 2025 21:08:50 -0400 Subject: [PATCH 14/19] WIP --- src/main/java/org/mastodon/geff/GeffNode.java | 534 +++++++++++++++--- 1 file changed, 452 insertions(+), 82 deletions(-) diff --git a/src/main/java/org/mastodon/geff/GeffNode.java b/src/main/java/org/mastodon/geff/GeffNode.java index 83ebadf..cdb9038 100644 --- a/src/main/java/org/mastodon/geff/GeffNode.java +++ b/src/main/java/org/mastodon/geff/GeffNode.java @@ -40,6 +40,7 @@ import org.janelia.saalfeldlab.n5.N5Writer; import org.janelia.saalfeldlab.n5.zarr.N5ZarrReader; import org.janelia.saalfeldlab.n5.zarr.N5ZarrWriter; +import org.mastodon.geff.geom.GeffSerializableVertex; import org.mastodon.geff.GeffUtils.FlattenedDoubles; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -73,7 +74,13 @@ public class GeffNode private double[] covariance3d; - public static final double[] DEFAULT_COLOR = { 1.0, 1.0, 1.0, 1.0 }; // Default white color + private int polygonStartIndex = -1; + + private double[] polygonX; + + private double[] polygonY; + + private static final double[] DEFAULT_COLOR = { 1.0, 1.0, 1.0, 1.0 }; // RGBA public static final double DEFAULT_RADIUS = 1.0; @@ -89,9 +96,34 @@ public GeffNode() /** * Constructor with basic node parameters + * + * @param id + * The unique identifier for the node. + * @param timepoint + * The timepoint of the node. + * @param x + * The x-coordinate of the node. + * @param y + * The y-coordinate of the node. + * @param z + * The z-coordinate of the node. + * @param color + * The color of the node (RGBA). + * @param segmentId + * The segment ID the node belongs to. + * @param radius + * The radius of the node. + * @param covariance2d + * The 2D covariance matrix of the node. + * @param covariance3d + * The 3D covariance matrix of the node. + * @param polygonX + * The x-coordinates of the polygon vertices. + * @param polygonY + * The y-coordinates of the polygon vertices. */ public GeffNode( int id, int timepoint, double x, double y, double z, double[] color, int segmentId, double radius, - double[] covariance2d, double[] covariance3d ) + double[] covariance2d, double[] covariance3d, double[] polygonX, double[] polygonY ) { this.id = id; this.t = timepoint; @@ -103,64 +135,131 @@ public GeffNode( int id, int timepoint, double x, double y, double z, double[] c this.radius = radius; this.covariance2d = covariance2d != null ? covariance2d : DEFAULT_COVARIANCE_2D; this.covariance3d = covariance3d != null ? covariance3d : DEFAULT_COVARIANCE_3D; + this.polygonX = polygonX != null ? polygonX : new double[ 0 ]; + this.polygonY = polygonY != null ? polygonY : new double[ 0 ]; } - // Getters and Setters + /** + * Get the unique identifier of the node. + * + * @return The unique identifier of the node. + */ public int getId() { return id; } + /** + * Set the unique identifier of the node. + * + * @param id + * The unique identifier to set. + */ public void setId( int id ) { this.id = id; } + /** + * Get the timepoint of the node. + * + * @return The timepoint of the node. + */ public int getT() { return t; } + /** + * Set the timepoint of the node. + * + * @param timepoint + * The timepoint to set. + */ public void setT( int timepoint ) { this.t = timepoint; } + /** + * Get the x-coordinate of the node. + * + * @return The x-coordinate of the node. + */ public double getX() { return x; } + /** + * Set the x-coordinate of the node. + * + * @param x + * The x-coordinate to set. + */ public void setX( double x ) { this.x = x; } + /** + * Get the y-coordinate of the node. + * + * @return The y-coordinate of the node. + */ public double getY() { return y; } + /** + * Set the y-coordinate of the node. + * + * @param y + * The y-coordinate to set. + */ public void setY( double y ) { this.y = y; } + /** + * Get the z-coordinate of the node. + * + * @return The z-coordinate of the node. + */ public double getZ() { return z; } + /** + * Set the z-coordinate of the node. + * + * @param z + * The z-coordinate to set. + */ public void setZ( double z ) { this.z = z; } + /** + * Get the color of the node. + * + * @return The color of the node as an RGBA array. + */ public double[] getColor() { return color; } + /** + * Set the color of the node. + * + * @param color + * The color to set as an RGBA array. + */ public void setColor( double[] color ) { if ( color != null && color.length == 4 ) @@ -173,31 +272,66 @@ public void setColor( double[] color ) } } + /** + * Get the segment ID of the node. + * + * @return The segment ID of the node. + */ public int getSegmentId() { return segmentId; } + /** + * Set the segment ID of the node. + * + * @param segmentId + * The segment ID to set. + */ public void setSegmentId( int segmentId ) { this.segmentId = segmentId; } + /** + * Get the radius of the node. + * + * @return The radius of the node. + */ public double getRadius() { return radius; } + /** + * Set the radius of the node. + * + * @param radius + * The radius to set. + */ public void setRadius( double radius ) { this.radius = radius; } + /** + * Get the 2D covariance matrix of the node. + * + * @return The 2D covariance matrix as a 4-element array. + */ public double[] getCovariance2d() { return covariance2d; } + /** + * Set the 2D covariance matrix of the node. + * + * @param covariance2d + * The 2D covariance matrix to set as a 4-element array. + * @throws IllegalArgumentException + * if the covariance2d array is not of length 4. + */ public void setCovariance2d( double[] covariance2d ) { if ( covariance2d != null && covariance2d.length == 4 ) @@ -210,11 +344,24 @@ public void setCovariance2d( double[] covariance2d ) } } + /** + * Get the 3D covariance matrix of the node. + * + * @return The 3D covariance matrix as a 6-element array. + */ public double[] getCovariance3d() { return covariance3d; } + /** + * Set the 3D covariance matrix of the node. + * + * @param covariance3d + * The 3D covariance matrix to set as a 6-element array. + * @throws IllegalArgumentException + * if the covariance3d array is not of length 6. + */ public void setCovariance3d( double[] covariance3d ) { if ( covariance3d != null && covariance3d.length == 6 ) @@ -227,6 +374,86 @@ public void setCovariance3d( double[] covariance3d ) } } + /** + * Get the polygon offset for the serialized vertex array. + * + * @return The polygon offset. + */ + public int getPolygonStartIndex() + { + return polygonStartIndex; + } + + /** + * Set the polygon offset for the serialized vertex array. + * + * @param polygonOffset + * The polygon offset to set. + */ + public void setPolygonStartIndex( int polygonOffset ) + { + this.polygonStartIndex = polygonOffset; + } + + /** + * Get the slice information for polygon vertices as an array. + * + * @return An array containing the polygon startIndex and endIndex. + */ + public int[] getPolygonSliceAsArray() + { + if ( polygonX == null || polygonY == null ) + { + System.err.println( "Warning: Polygon is null, returning empty array." ); + return new int[] { polygonStartIndex, 0 }; + } + if ( polygonStartIndex < 0 ) + throw new IllegalArgumentException( "Polygon startIndex is invalid: " + polygonStartIndex ); + return new int[] { polygonStartIndex, polygonStartIndex + polygonX.length }; + } + + /** + * Get the x-coordinates of the polygon vertices. + * + * @return The x-coordinates of the polygon vertices. + */ + public double[] getPolygonX() + { + return polygonX; + } + + /** + * Get the y-coordinates of the polygon vertices. + * + * @return The y-coordinates of the polygon vertices. + */ + public double[] getPolygonY() + { + return polygonY; + } + + /** + * Set the x-coordinates of the polygon vertices. + * + * @param polygonX + * The x-coordinates to set. + */ + public void setPolygonX( double[] polygonX ) + { + this.polygonX = polygonX != null ? polygonX : new double[ 0 ]; + } + + /** + * Set the y-coordinates of the polygon vertices. + * + * @param polygonY + * The y-coordinates to set. + */ + public void setPolygonY( double[] polygonY ) + { + this.polygonY = polygonY != null ? polygonY : new double[ 0 ]; + } + /** * Returns the position of the node as a 3D array. * @@ -301,6 +528,10 @@ public static class Builder private double[] covariance3d = DEFAULT_COVARIANCE_3D; + private double[] polygonX; + + private double[] polygonY; + public Builder id( int id ) { this.id = id; @@ -382,87 +613,23 @@ public Builder covariance3d( double[] covariance3d ) return this; } - public GeffNode build() + public Builder polygonX( double[] polygonX ) { - return new GeffNode( id, timepoint, x, y, z, color, segmentId, radius, covariance2d, covariance3d ); + this.polygonX = polygonX; + return this; } - } - - /** - * Write nodes to Zarr format with chunked structure - */ - public static void writeToZarr( List< GeffNode > nodes, String zarrPath ) - { - writeToZarr( nodes, zarrPath, GeffUtils.DEFAULT_CHUNK_SIZE ); - } - /** - * Write nodes to Zarr format with specified chunk size - */ - public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize ) - { - writeToZarr( nodes, zarrPath, chunkSize, Geff.VERSION ); - } - - public static void writeToZarr( List< GeffNode > nodes, String zarrPath, String geffVersion ) - { - writeToZarr( nodes, zarrPath, GeffUtils.DEFAULT_CHUNK_SIZE, geffVersion ); - } - - public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize, String geffVersion ) - { - LOG.debug( "Writing {} nodes to Zarr path: {} with chunk size: {} to Geff version: {}", nodes.size(), zarrPath, chunkSize, geffVersion ); - try ( final N5ZarrWriter writer = new N5ZarrWriter( zarrPath, true ) ) - { - writeToN5( nodes, writer, "/", chunkSize, geffVersion ); - } - } - - public static void writeToN5( - final List< GeffNode > nodes, - final N5Writer writer, - final String group, - final int chunkSize, - String geffVersion ) - { - if ( nodes == null ) - throw new NullPointerException( "Nodes list cannot be null" ); - - if ( geffVersion == null || geffVersion.isEmpty() ) - { - geffVersion = Geff.VERSION; // Use default version if not specified - } - GeffUtils.checkSupportedVersion( geffVersion ); - - final String path = N5URI.normalizeGroupPath( group ); - - // Write node IDs in chunks - GeffUtils.writeIntArray( nodes, GeffNode::getId, writer, path + "/nodes/ids", chunkSize ); - - // Write timepoints in chunks - GeffUtils.writeIntArray( nodes, GeffNode::getT, writer, path + "/nodes/props/t/values", chunkSize ); - - // Write X coordinates in chunks - GeffUtils.writeDoubleArray( nodes, GeffNode::getX, writer, path + "/nodes/props/x/values", chunkSize ); - - // Write Y coordinates in chunks - GeffUtils.writeDoubleArray( nodes, GeffNode::getY, writer, path + "/nodes/props/y/values", chunkSize ); - - // Write Z coordinates in chunks - GeffUtils.writeDoubleArray( nodes, GeffNode::getZ, writer, path + "/nodes/props/z/values", chunkSize ); - - // Write color in chunks - GeffUtils.writeDoubleMatrix( nodes, 4, GeffNode::getColor, writer, path + "/nodes/props/color/values", chunkSize ); - - // Write segment IDs in chunks - GeffUtils.writeIntArray( nodes, GeffNode::getSegmentId, writer, path + "/nodes/props/track_id/values", chunkSize ); - // Write radius and covariance attributes if available - GeffUtils.writeDoubleArray( nodes, GeffNode::getRadius, writer, path + "/nodes/props/radius/values", chunkSize ); - - // TODO: ellipsoid etc + public Builder polygonY( double[] polygonY ) + { + this.polygonY = polygonY; + return this; + } - LOG.debug( "Successfully wrote nodes to Zarr format with chunked structure" ); - } + public GeffNode build() + { + return new GeffNode( id, timepoint, x, y, z, color, segmentId, radius, covariance2d, covariance3d, polygonX, polygonY ); + } + } /** * Read nodes from Zarr format with default version and chunked structure @@ -536,6 +703,96 @@ public static List< GeffNode > readFromN5( final N5Reader reader, final String g verifyLength( radius, numNodes, "/nodes/props/radius/values" ); // TODO: ellipsoid etc + // --> begin origin/main -- + + + + // Read covariance2d from chunks + double[][] covariance2ds = new double[ 0 ][]; + try + { + covariance2ds = ZarrUtils.readChunkedDoubleMatrix( propsGroup, "covariance2d/values", + "covariance2d" ); + } + catch ( Exception e ) + { + System.out.println( "Warning: Could not read covariance2d: " + e.getMessage() + " skipping..." ); + } + + // Read covariance3d from chunks + double[][] covariance3ds = new double[ 0 ][]; + try + { + covariance3ds = ZarrUtils.readChunkedDoubleMatrix( propsGroup, "covariance3d/values", + "covariance3d" ); + } + catch ( Exception e ) + { + System.out.println( "Warning: Could not read covariance3d: " + e.getMessage() + " skipping..." ); + } + + // Read polygon from chunks + double[][] polygonsX = new double[ 0 ][]; + double[][] polygonsY = new double[ 0 ][]; + if ( geffVersion.startsWith( "0.4" ) ) + { + try + { + int[][] polygonSlices = ZarrUtils.readChunkedIntMatrix( serializedPropsGroup, "polygon/slices", "polygon slices" ); + // expected shape: [numVertices, 2] + double[][] polygonValues = ZarrUtils.readChunkedDoubleMatrix( serializedPropsGroup, "polygon/values", "polygon values" ); + polygonsX = new double[ polygonSlices.length ][]; + polygonsY = new double[ polygonSlices.length ][]; + for ( int i = 0; i < polygonSlices.length; i++ ) + { + int start = polygonSlices[ i ][ 0 ]; + int length = polygonSlices[ i ][ 1 ]; + if ( start >= 0 && start + length <= polygonValues.length ) + { + double[] xPoints = new double[ length ]; + double[] yPoints = new double[ length ]; + for ( int j = 0; j < length; j++ ) + { + xPoints[ j ] = polygonValues[ start + j ][ 0 ]; + yPoints[ j ] = polygonValues[ start + j ][ 1 ]; + } + polygonsX[ i ] = xPoints; + polygonsY[ i ] = yPoints; + } + else + { + System.out.println( "Warning: Invalid polygon slice at index " + i + ", skipping..." ); + } + } + } + catch ( Exception e ) + { + System.out.println( "Warning: Could not read polygon: " + e.getMessage() + " skipping..." ); + } + } + + // Create node objects + for ( int i = 0; i < nodeIds.length; i++ ) + { + GeffNode node = new Builder() + .id( nodeIds[ i ] ) + .timepoint( i < timepoints.length ? timepoints[ i ] : -1 ) + .x( i < xCoords.length ? xCoords[ i ] : Double.NaN ) + .y( i < yCoords.length ? yCoords[ i ] : Double.NaN ) + .z( i < zCoords.length ? zCoords[ i ] : Double.NaN ) + .color( i < colors.length ? colors[ i ] : DEFAULT_COLOR ) + .segmentId( i < trackIds.length ? trackIds[ i ] : -1 ) + .radius( i < radii.length ? radii[ i ] : Double.NaN ) + .covariance2d( i < covariance2ds.length ? covariance2ds[ i ] : DEFAULT_COVARIANCE_2D ) + .covariance3d( i < covariance3ds.length ? covariance3ds[ i ] : DEFAULT_COVARIANCE_3D ) + .polygonX( i < polygonsX.length ? polygonsX[ i ] : null ) + .polygonY( i < polygonsY.length ? polygonsY[ i ] : null ) + .build(); + + nodes.add( node ); + } + + // --> end origin/main -- // Create node objects final List< GeffNode > nodes = new ArrayList<>( numNodes ); @@ -557,7 +814,118 @@ public static List< GeffNode > readFromN5( final N5Reader reader, final String g return nodes; } - @Override + /** + * Write nodes to Zarr format with chunked structure + */ + public static void writeToZarr( List< GeffNode > nodes, String zarrPath ) + { + writeToZarr( nodes, zarrPath, GeffUtils.DEFAULT_CHUNK_SIZE ); + } + + /** + * Write nodes to Zarr format with specified chunk size + */ + public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize ) + { + writeToZarr( nodes, zarrPath, chunkSize, Geff.VERSION ); + } + + public static void writeToZarr( List< GeffNode > nodes, String zarrPath, String geffVersion ) + { + writeToZarr( nodes, zarrPath, GeffUtils.DEFAULT_CHUNK_SIZE, geffVersion ); + } + + public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize, String geffVersion ) + { + LOG.debug( "Writing {} nodes to Zarr path: {} with chunk size: {} to Geff version: {}", nodes.size(), zarrPath, chunkSize, geffVersion ); + try ( final N5ZarrWriter writer = new N5ZarrWriter( zarrPath, true ) ) + { + writeToN5( nodes, writer, "/", chunkSize, geffVersion ); + } + } + + public static void writeToN5( + final List< GeffNode > nodes, + final N5Writer writer, + final String group, + final int chunkSize, + String geffVersion ) + { + if ( nodes == null ) + throw new NullPointerException( "Nodes list cannot be null" ); + + if ( geffVersion == null || geffVersion.isEmpty() ) + { + geffVersion = Geff.VERSION; // Use default version if not specified + } + GeffUtils.checkSupportedVersion( geffVersion ); + + final String path = N5URI.normalizeGroupPath( group ); + + // Write node IDs in chunks + GeffUtils.writeIntArray( nodes, GeffNode::getId, writer, path + "/nodes/ids", chunkSize ); + + // Write timepoints in chunks + GeffUtils.writeIntArray( nodes, GeffNode::getT, writer, path + "/nodes/props/t/values", chunkSize ); + + // Write X coordinates in chunks + GeffUtils.writeDoubleArray( nodes, GeffNode::getX, writer, path + "/nodes/props/x/values", chunkSize ); + + // Write Y coordinates in chunks + GeffUtils.writeDoubleArray( nodes, GeffNode::getY, writer, path + "/nodes/props/y/values", chunkSize ); + + // Write Z coordinates in chunks + GeffUtils.writeDoubleArray( nodes, GeffNode::getZ, writer, path + "/nodes/props/z/values", chunkSize ); + + // Write color in chunks + GeffUtils.writeDoubleMatrix( nodes, 4, GeffNode::getColor, writer, path + "/nodes/props/color/values", chunkSize ); + + // Write segment IDs in chunks + GeffUtils.writeIntArray( nodes, GeffNode::getSegmentId, writer, path + "/nodes/props/track_id/values", chunkSize ); + + // Write radius and covariance attributes if available + GeffUtils.writeDoubleArray( nodes, GeffNode::getRadius, writer, path + "/nodes/props/radius/values", chunkSize ); + + // TODO: ellipsoid etc + // --> begin origin/main -- + + // Write covariance2d in chunks + ZarrUtils.writeChunkedDoubleMatrix( nodes, propsGroup, "covariance2d/values", chunkSize, GeffNode::getCovariance2d, + 4 ); + + // Write covariance3d in chunks + ZarrUtils.writeChunkedDoubleMatrix( nodes, propsGroup, "covariance3d/values", chunkSize, GeffNode::getCovariance3d, + 6 ); + + if ( geffVersion.startsWith( "0.4" ) ) + { + // Write polygon slices and values if available + List< GeffSerializableVertex > geffVertices = new ArrayList<>(); + int polygonOffset = 0; + for ( GeffNode node : nodes ) + { + if ( node.polygonX == null || node.polygonY == null ) + throw new IllegalArgumentException( "Polygon coordinates cannot be null" ); + if ( node.getPolygonX().length != node.getPolygonY().length ) + throw new IllegalArgumentException( "Polygon X and Y coordinates must have the same length" ); + node.setPolygonStartIndex( polygonOffset ); + for ( int i = 0; i < node.getPolygonX().length; i++ ) + { + geffVertices.add( new GeffSerializableVertex( node.getPolygonX()[ i ], + node.getPolygonY()[ i ] ) ); + } + polygonOffset += node.getPolygonX().length; + } + ZarrUtils.writeChunkedIntMatrix( nodes, serializedPropsGroup, "polygon/slices", chunkSize, GeffNode::getPolygonSliceAsArray, 2 ); + ZarrUtils.writeChunkedDoubleMatrix( geffVertices, serializedPropsGroup, "polygon/values", chunkSize, GeffSerializableVertex::getCoordinates, 2 ); + } + + // --> end origin/main -- + + LOG.debug( "Successfully wrote nodes to Zarr format with chunked structure" ); + } + + @Override public String toString() { StringBuilder sb = new StringBuilder( "GeffNode{" ) @@ -593,7 +961,9 @@ public boolean equals( Object obj ) segmentId == geffNode.segmentId && Double.compare( geffNode.radius, radius ) == 0 && java.util.Arrays.equals( covariance2d, geffNode.covariance2d ) && - java.util.Arrays.equals( covariance3d, geffNode.covariance3d ); + java.util.Arrays.equals( covariance3d, geffNode.covariance3d ) && + java.util.Arrays.equals( polygonX, geffNode.polygonX ) && + java.util.Arrays.equals( polygonY, geffNode.polygonY ); } @Override From 3b656d4226eb23c0b9d1f158e6485b07e2689900 Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Mon, 28 Jul 2025 13:47:06 -0400 Subject: [PATCH 15/19] WIP --- src/main/java/org/mastodon/geff/GeffNode.java | 50 ++++++------------- .../java/org/mastodon/geff/GeffUtils.java | 8 ++- 2 files changed, 22 insertions(+), 36 deletions(-) diff --git a/src/main/java/org/mastodon/geff/GeffNode.java b/src/main/java/org/mastodon/geff/GeffNode.java index cdb9038..1f0a7af 100644 --- a/src/main/java/org/mastodon/geff/GeffNode.java +++ b/src/main/java/org/mastodon/geff/GeffNode.java @@ -702,34 +702,15 @@ public static List< GeffNode > readFromN5( final N5Reader reader, final String g double[] radius = GeffUtils.readAsDoubleArray( reader, "/nodes/props/radius/values", "radius" ); verifyLength( radius, numNodes, "/nodes/props/radius/values" ); - // TODO: ellipsoid etc - // --> begin origin/main -- - + // Read covariance2d from chunks + final FlattenedDoubles covariance2ds = GeffUtils.readAsDoubleMatrix( reader, "/nodes/props/covariance2d/values", "covariance2d" ); + verifyLength( covariance2ds, numNodes, "/nodes/props/covariance2d/values" ); + // Read covariance3d from chunks + final FlattenedDoubles covariance3ds = GeffUtils.readAsDoubleMatrix( reader, "/nodes/props/covariance3d/values", "covariance3d" ); + verifyLength( covariance3ds, numNodes, "/nodes/props/covariance3d/values" ); - // Read covariance2d from chunks - double[][] covariance2ds = new double[ 0 ][]; - try - { - covariance2ds = ZarrUtils.readChunkedDoubleMatrix( propsGroup, "covariance2d/values", - "covariance2d" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read covariance2d: " + e.getMessage() + " skipping..." ); - } - - // Read covariance3d from chunks - double[][] covariance3ds = new double[ 0 ][]; - try - { - covariance3ds = ZarrUtils.readChunkedDoubleMatrix( propsGroup, "covariance3d/values", - "covariance3d" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read covariance3d: " + e.getMessage() + " skipping..." ); - } + // --> begin origin/main -- // Read polygon from chunks double[][] polygonsX = new double[ 0 ][]; @@ -886,16 +867,14 @@ public static void writeToN5( // Write radius and covariance attributes if available GeffUtils.writeDoubleArray( nodes, GeffNode::getRadius, writer, path + "/nodes/props/radius/values", chunkSize ); - // TODO: ellipsoid etc - // --> begin origin/main -- + // Write covariance2d in chunks + GeffUtils.writeDoubleMatrix( nodes, 4, GeffNode::getCovariance2d, writer, path + "/nodes/props/covariance2d/values", chunkSize ); + + // Write covariance3d in chunks + GeffUtils.writeDoubleMatrix( nodes, 6, GeffNode::getCovariance3d, writer, path + "/nodes/props/covariance3d/values", chunkSize ); - // Write covariance2d in chunks - ZarrUtils.writeChunkedDoubleMatrix( nodes, propsGroup, "covariance2d/values", chunkSize, GeffNode::getCovariance2d, - 4 ); - // Write covariance3d in chunks - ZarrUtils.writeChunkedDoubleMatrix( nodes, propsGroup, "covariance3d/values", chunkSize, GeffNode::getCovariance3d, - 6 ); + // --> begin origin/main -- if ( geffVersion.startsWith( "0.4" ) ) { @@ -908,6 +887,9 @@ public static void writeToN5( throw new IllegalArgumentException( "Polygon coordinates cannot be null" ); if ( node.getPolygonX().length != node.getPolygonY().length ) throw new IllegalArgumentException( "Polygon X and Y coordinates must have the same length" ); + // TODO: DO NOT DO THIS! + // Dont store something into GeffNode as temp variables just for writing! + // Instead: We know how many nodes. So make int[] polygonStartIndex locally here. node.setPolygonStartIndex( polygonOffset ); for ( int i = 0; i < node.getPolygonX().length; i++ ) { diff --git a/src/main/java/org/mastodon/geff/GeffUtils.java b/src/main/java/org/mastodon/geff/GeffUtils.java index d088a57..080dd6c 100644 --- a/src/main/java/org/mastodon/geff/GeffUtils.java +++ b/src/main/java/org/mastodon/geff/GeffUtils.java @@ -286,7 +286,9 @@ public static FlattenedInts readAsIntMatrix( final N5Reader reader, final String public static int[] convertToIntArray( final Object array, final String fieldName ) { - if ( array instanceof int[] ) + if (array == null) + return null; + else if ( array instanceof int[] ) return ( int[] ) array; else if ( array instanceof long[] ) return copyToIntArray( ( long[] ) array, a -> a.length, ( a, i ) -> ( int ) a[ i ] ); @@ -315,7 +317,9 @@ private static < T > int[] copyToIntArray( final T array, final ToIntFunction< T public static double[] convertToDoubleArray( final Object array, final String fieldName ) { - if ( array instanceof double[] ) + if (array == null) + return null; + else if ( array instanceof double[] ) return ( double[] ) array; else if ( array instanceof int[] ) return copyToDoubleArray( ( int[] ) array, a -> a.length, ( a, i ) -> a[ i ] ); From 3734dc321cb8028277fca98b8576ba37f67c9b85 Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Tue, 29 Jul 2025 17:25:33 -0400 Subject: [PATCH 16/19] Add polygon de/serialization --- src/main/java/org/mastodon/geff/GeffNode.java | 232 +++++++----------- .../java/org/mastodon/geff/GeffUtils.java | 25 +- 2 files changed, 104 insertions(+), 153 deletions(-) diff --git a/src/main/java/org/mastodon/geff/GeffNode.java b/src/main/java/org/mastodon/geff/GeffNode.java index 1f0a7af..8608ffc 100644 --- a/src/main/java/org/mastodon/geff/GeffNode.java +++ b/src/main/java/org/mastodon/geff/GeffNode.java @@ -33,13 +33,16 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; +import java.util.function.Function; import org.janelia.saalfeldlab.n5.N5Reader; import org.janelia.saalfeldlab.n5.N5URI; import org.janelia.saalfeldlab.n5.N5Writer; import org.janelia.saalfeldlab.n5.zarr.N5ZarrReader; import org.janelia.saalfeldlab.n5.zarr.N5ZarrWriter; +import org.mastodon.geff.GeffUtils.FlattenedInts; import org.mastodon.geff.geom.GeffSerializableVertex; import org.mastodon.geff.GeffUtils.FlattenedDoubles; import org.slf4j.Logger; @@ -74,8 +77,6 @@ public class GeffNode private double[] covariance3d; - private int polygonStartIndex = -1; - private double[] polygonX; private double[] polygonY; @@ -374,44 +375,6 @@ public void setCovariance3d( double[] covariance3d ) } } - /** - * Get the polygon offset for the serialized vertex array. - * - * @return The polygon offset. - */ - public int getPolygonStartIndex() - { - return polygonStartIndex; - } - - /** - * Set the polygon offset for the serialized vertex array. - * - * @param polygonOffset - * The polygon offset to set. - */ - public void setPolygonStartIndex( int polygonOffset ) - { - this.polygonStartIndex = polygonOffset; - } - - /** - * Get the slice information for polygon vertices as an array. - * - * @return An array containing the polygon startIndex and endIndex. - */ - public int[] getPolygonSliceAsArray() - { - if ( polygonX == null || polygonY == null ) - { - System.err.println( "Warning: Polygon is null, returning empty array." ); - return new int[] { polygonStartIndex, 0 }; - } - if ( polygonStartIndex < 0 ) - throw new IllegalArgumentException( "Polygon startIndex is invalid: " + polygonStartIndex ); - return new int[] { polygonStartIndex, polygonStartIndex + polygonX.length }; - } - /** * Get the x-coordinates of the polygon vertices. * @@ -675,105 +638,83 @@ public static List< GeffNode > readFromN5( final N5Reader reader, final String g final int numNodes = nodeIds.length; // Read time points from chunks - final int[] timepoints = GeffUtils.readAsIntArray( reader, "/nodes/props/t/values", "timepoints" ); + final int[] timepoints = GeffUtils.readAsIntArray( reader, path + "/nodes/props/t/values", "timepoints" ); verifyLength( timepoints, numNodes, "/nodes/props/t/values" ); // Read X coordinates from chunks - final double[] xCoords = GeffUtils.readAsDoubleArray( reader, "/nodes/props/x/values", "X coordinates" ); + final double[] xCoords = GeffUtils.readAsDoubleArray( reader, path + "/nodes/props/x/values", "X coordinates" ); verifyLength( xCoords, numNodes, "/nodes/props/x/values" ); // Read Y coordinates from chunks - final double[] yCoords = GeffUtils.readAsDoubleArray( reader, "/nodes/props/y/values", "Y coordinates" ); + final double[] yCoords = GeffUtils.readAsDoubleArray( reader, path + "/nodes/props/y/values", "Y coordinates" ); verifyLength( yCoords, numNodes, "/nodes/props/y/values" ); // Read Z coordinates from chunks - final double[] zCoords = GeffUtils.readAsDoubleArray( reader, "/nodes/props/z/values", "Z coordinates" ); + final double[] zCoords = GeffUtils.readAsDoubleArray( reader, path + "/nodes/props/z/values", "Z coordinates" ); verifyLength( zCoords, numNodes, "/nodes/props/z/values" ); // Read color from chunks - final FlattenedDoubles colors = GeffUtils.readAsDoubleMatrix( reader, "/nodes/props/color/values", "color" ); + final FlattenedDoubles colors = GeffUtils.readAsDoubleMatrix( reader, path + "/nodes/props/color/values", "color" ); verifyLength( colors, numNodes, "/nodes/props/color/values" ); // Read track IDs from chunks - final int[] trackIds = GeffUtils.readAsIntArray( reader, "/nodes/props/track_id/values", "track IDs" ); + final int[] trackIds = GeffUtils.readAsIntArray( reader, path + "/nodes/props/track_id/values", "track IDs" ); verifyLength( trackIds, numNodes, "/nodes/props/track_id/values" ); // Read radius from chunks - double[] radius = GeffUtils.readAsDoubleArray( reader, "/nodes/props/radius/values", "radius" ); + double[] radius = GeffUtils.readAsDoubleArray( reader, path + "/nodes/props/radius/values", "radius" ); verifyLength( radius, numNodes, "/nodes/props/radius/values" ); // Read covariance2d from chunks - final FlattenedDoubles covariance2ds = GeffUtils.readAsDoubleMatrix( reader, "/nodes/props/covariance2d/values", "covariance2d" ); + final FlattenedDoubles covariance2ds = GeffUtils.readAsDoubleMatrix( reader, path + "/nodes/props/covariance2d/values", "covariance2d" ); verifyLength( covariance2ds, numNodes, "/nodes/props/covariance2d/values" ); // Read covariance3d from chunks - final FlattenedDoubles covariance3ds = GeffUtils.readAsDoubleMatrix( reader, "/nodes/props/covariance3d/values", "covariance3d" ); + final FlattenedDoubles covariance3ds = GeffUtils.readAsDoubleMatrix( reader, path + "/nodes/props/covariance3d/values", "covariance3d" ); verifyLength( covariance3ds, numNodes, "/nodes/props/covariance3d/values" ); - // --> begin origin/main -- + // Read polygon from chunks + double[][] polygonsX = null; + double[][] polygonsY = null; + if ( geffVersion.startsWith( "0.4" ) ) + { + try + { + final FlattenedInts polygonSlices = GeffUtils.readAsIntMatrix( reader, path + "/nodes/serialized_props/polygon/slices", "polygon slices" ); + verifyLength( polygonSlices, numNodes, "/nodes/serialized_props/polygon/slices" ); - // Read polygon from chunks - double[][] polygonsX = new double[ 0 ][]; - double[][] polygonsY = new double[ 0 ][]; - if ( geffVersion.startsWith( "0.4" ) ) - { - try - { - int[][] polygonSlices = ZarrUtils.readChunkedIntMatrix( serializedPropsGroup, "polygon/slices", "polygon slices" ); - // expected shape: [numVertices, 2] - double[][] polygonValues = ZarrUtils.readChunkedDoubleMatrix( serializedPropsGroup, "polygon/values", "polygon values" ); - polygonsX = new double[ polygonSlices.length ][]; - polygonsY = new double[ polygonSlices.length ][]; - for ( int i = 0; i < polygonSlices.length; i++ ) - { - int start = polygonSlices[ i ][ 0 ]; - int length = polygonSlices[ i ][ 1 ]; - if ( start >= 0 && start + length <= polygonValues.length ) - { - double[] xPoints = new double[ length ]; - double[] yPoints = new double[ length ]; - for ( int j = 0; j < length; j++ ) - { - xPoints[ j ] = polygonValues[ start + j ][ 0 ]; - yPoints[ j ] = polygonValues[ start + j ][ 1 ]; - } - polygonsX[ i ] = xPoints; - polygonsY[ i ] = yPoints; - } - else - { - System.out.println( "Warning: Invalid polygon slice at index " + i + ", skipping..." ); - } - } - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read polygon: " + e.getMessage() + " skipping..." ); - } - } + final FlattenedDoubles polygonValues = GeffUtils.readAsDoubleMatrix( reader, path + "/nodes/serialized_props/polygon/values", "polygon values" ); - // Create node objects - for ( int i = 0; i < nodeIds.length; i++ ) + polygonsX = new double[ numNodes ][]; + polygonsY = new double[ numNodes ][]; + for ( int i = 0; i < numNodes; i++ ) + { + int start = polygonSlices.at( i, 0 ); + int length = polygonSlices.at( i, 1 ); + final int numVertices = polygonValues.size()[ 0 ]; + if ( start >= 0 && start + length < numVertices ) + { + final double[] xPoints = new double[ length ]; + final double[] yPoints = new double[ length ]; + for ( int j = 0; j < length; j++ ) + { + xPoints[ j ] = polygonValues.at( start + j, 0 ); + yPoints[ j ] = polygonValues.at( start + j, 1 ); + } + polygonsX[ i ] = xPoints; + polygonsY[ i ] = yPoints; + } + else + { + LOG.warn( "Warning: Invalid polygon slice at index {}, skipping...", i ); + } + } + } + catch ( Exception e ) { - GeffNode node = new Builder() - .id( nodeIds[ i ] ) - .timepoint( i < timepoints.length ? timepoints[ i ] : -1 ) - .x( i < xCoords.length ? xCoords[ i ] : Double.NaN ) - .y( i < yCoords.length ? yCoords[ i ] : Double.NaN ) - .z( i < zCoords.length ? zCoords[ i ] : Double.NaN ) - .color( i < colors.length ? colors[ i ] : DEFAULT_COLOR ) - .segmentId( i < trackIds.length ? trackIds[ i ] : -1 ) - .radius( i < radii.length ? radii[ i ] : Double.NaN ) - .covariance2d( i < covariance2ds.length ? covariance2ds[ i ] : DEFAULT_COVARIANCE_2D ) - .covariance3d( i < covariance3ds.length ? covariance3ds[ i ] : DEFAULT_COVARIANCE_3D ) - .polygonX( i < polygonsX.length ? polygonsX[ i ] : null ) - .polygonY( i < polygonsY.length ? polygonsY[ i ] : null ) - .build(); - - nodes.add( node ); + LOG.warn( "Warning: Could not read polygon: {}, skipping...", e.getMessage() ); } - - // --> end origin/main -- + } // Create node objects final List< GeffNode > nodes = new ArrayList<>( numNodes ); @@ -789,7 +730,9 @@ public static List< GeffNode > readFromN5( final N5Reader reader, final String g final double r = radius != null ? radius[ i ] : Double.NaN; final double[] covariance2d = DEFAULT_COVARIANCE_2D; final double[] covariance3d = DEFAULT_COVARIANCE_2D; - final GeffNode node = new GeffNode( id, t, x, y, z, color, segmentId, r, covariance2d, covariance3d ); + final double[] polygonX = polygonsX != null ? polygonsX[ i ] : null; + final double[] polygonY = polygonsY != null ? polygonsY[ i ] : null; + final GeffNode node = new GeffNode( id, t, x, y, z, color, segmentId, r, covariance2d, covariance3d, polygonX, polygonY ); nodes.add( node ); } return nodes; @@ -873,36 +816,29 @@ public static void writeToN5( // Write covariance3d in chunks GeffUtils.writeDoubleMatrix( nodes, 6, GeffNode::getCovariance3d, writer, path + "/nodes/props/covariance3d/values", chunkSize ); - - // --> begin origin/main -- - - if ( geffVersion.startsWith( "0.4" ) ) + if ( geffVersion.startsWith( "0.4" ) ) + { + // Write polygon slices and values if available + final List< GeffSerializableVertex > vertices = new ArrayList<>(); + final List< int[] > slices = new ArrayList<>(); + int polygonOffset = 0; + for ( final GeffNode node : nodes ) { - // Write polygon slices and values if available - List< GeffSerializableVertex > geffVertices = new ArrayList<>(); - int polygonOffset = 0; - for ( GeffNode node : nodes ) - { - if ( node.polygonX == null || node.polygonY == null ) - throw new IllegalArgumentException( "Polygon coordinates cannot be null" ); - if ( node.getPolygonX().length != node.getPolygonY().length ) - throw new IllegalArgumentException( "Polygon X and Y coordinates must have the same length" ); - // TODO: DO NOT DO THIS! - // Dont store something into GeffNode as temp variables just for writing! - // Instead: We know how many nodes. So make int[] polygonStartIndex locally here. - node.setPolygonStartIndex( polygonOffset ); - for ( int i = 0; i < node.getPolygonX().length; i++ ) - { - geffVertices.add( new GeffSerializableVertex( node.getPolygonX()[ i ], - node.getPolygonY()[ i ] ) ); - } - polygonOffset += node.getPolygonX().length; - } - ZarrUtils.writeChunkedIntMatrix( nodes, serializedPropsGroup, "polygon/slices", chunkSize, GeffNode::getPolygonSliceAsArray, 2 ); - ZarrUtils.writeChunkedDoubleMatrix( geffVertices, serializedPropsGroup, "polygon/values", chunkSize, GeffSerializableVertex::getCoordinates, 2 ); + if ( node.polygonX == null || node.polygonY == null ) + throw new IllegalArgumentException( "Polygon coordinates cannot be null" ); + if ( node.getPolygonX().length != node.getPolygonY().length ) + throw new IllegalArgumentException( "Polygon X and Y coordinates must have the same length" ); + final int numVertices = node.getPolygonX().length; + for ( int j = 0; j < numVertices; j++ ) + vertices.add( new GeffSerializableVertex( + node.getPolygonX()[ j ], + node.getPolygonY()[ j ] ) ); + slices.add( new int[] { polygonOffset, numVertices } ); + polygonOffset += numVertices; } - - // --> end origin/main -- + GeffUtils.writeIntMatrix( slices, 2, Function.identity(), writer, path + "/nodes/serialized_props/polygon/slices", chunkSize ); + GeffUtils.writeDoubleMatrix( vertices, 2, GeffSerializableVertex::getCoordinates, writer, path + "/nodes/serialized_props/polygon/values", chunkSize ); + } LOG.debug( "Successfully wrote nodes to Zarr format with chunked structure" ); } @@ -951,16 +887,16 @@ public boolean equals( Object obj ) @Override public int hashCode() { - int result = id; - result = 31 * result + t; - result = 31 * result + Double.hashCode( x ); - result = 31 * result + Double.hashCode( y ); - result = 31 * result + Double.hashCode( z ); - result = 31 * result + ( color != null ? java.util.Arrays.hashCode( color ) : 0 ); - result = 31 * result + segmentId; - result = 31 * result + Double.hashCode( radius ); - result = 31 * result + ( covariance2d != null ? java.util.Arrays.hashCode( covariance2d ) : 0 ); - result = 31 * result + ( covariance3d != null ? java.util.Arrays.hashCode( covariance3d ) : 0 ); + int result = id; + result = 31 * result + t; + result = 31 * result + Double.hashCode( x ); + result = 31 * result + Double.hashCode( y ); + result = 31 * result + Double.hashCode( z ); + result = 31 * result + Arrays.hashCode( color ); + result = 31 * result + segmentId; + result = 31 * result + Double.hashCode( radius ); + result = 31 * result + Arrays.hashCode( covariance2d ); + result = 31 * result + Arrays.hashCode( covariance3d ); return result; } } diff --git a/src/main/java/org/mastodon/geff/GeffUtils.java b/src/main/java/org/mastodon/geff/GeffUtils.java index 080dd6c..19e1e1c 100644 --- a/src/main/java/org/mastodon/geff/GeffUtils.java +++ b/src/main/java/org/mastodon/geff/GeffUtils.java @@ -84,16 +84,31 @@ public static < T > void writeIntMatrix( final String dataset, final int chunkSize ) { - final int size = elements.size(); - final int[] data = new int[ numColumns * size ]; - for ( int i = 0; i < size; ++i ) { - final int[] row = extractor.apply( elements.get( i ) ); + writeIntMatrix( elements.size(), numColumns, + i -> extractor.apply( elements.get( i ) ), + writer, dataset, chunkSize ); + } + + /** + * @param extractor function from row index to int[] with column data + */ + public static void writeIntMatrix( + final int numRows, + final int numColumns, + final IntFunction< int[] > extractor, + final N5Writer writer, + final String dataset, + final int chunkSize ) + { + final int[] data = new int[ numColumns * numRows ]; + for ( int i = 0; i < numRows; ++i ) { + final int[] row = extractor.apply( i ); if ( row == null || row.length < numColumns ) continue; System.arraycopy( row, 0, data, numColumns * i, numColumns ); } final DatasetAttributes attributes = new DatasetAttributes( - new long[] { numColumns, size }, + new long[] { numColumns, numRows }, new int[] { numColumns, chunkSize }, DataType.INT32, new BloscCompression() ); From e0afa82da121d5c4e7d4ffb5969ee369261721d4 Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Tue, 29 Jul 2025 18:11:41 -0400 Subject: [PATCH 17/19] POM: remove SNAPSHOT versions --- pom.xml | 3 --- 1 file changed, 3 deletions(-) diff --git a/pom.xml b/pom.xml index 34b1984..602823d 100644 --- a/pom.xml +++ b/pom.xml @@ -22,9 +22,6 @@ sign,deploy-to-scijava true - 4.0.0-alpha-3-SNAPSHOT - 2.0.0-alpha-2-SNAPSHOT - 2.0.0-alpha-1 7.1.5 https://sonarcloud.io From 6901ce27b0e5122ee754500af7e4245524c7832a Mon Sep 17 00:00:00 2001 From: tpietzsch Date: Tue, 29 Jul 2025 18:18:50 -0400 Subject: [PATCH 18/19] Trying to silence pre-commit.ci ... --- .pre-commit-ci.yaml | 3 +++ .pre-commit-config.yaml | 2 ++ 2 files changed, 5 insertions(+) create mode 100644 .pre-commit-ci.yaml create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-ci.yaml b/.pre-commit-ci.yaml new file mode 100644 index 0000000..99f0862 --- /dev/null +++ b/.pre-commit-ci.yaml @@ -0,0 +1,3 @@ +ci: + skip: true + diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..0952f47 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,2 @@ +repos: [] + From b2f501481d1ed69eea84389ad49fe2aa7b80cbb8 Mon Sep 17 00:00:00 2001 From: Jean-Yves Tinevez Date: Tue, 29 Jul 2025 21:07:49 -0400 Subject: [PATCH 19/19] Temporarily make the GeffUtils class. The TrackMate exporter still needs to access the low level mathods of this utility to get the chunk size of a file, and to write the feature values. --- src/main/java/org/mastodon/geff/GeffUtils.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/java/org/mastodon/geff/GeffUtils.java b/src/main/java/org/mastodon/geff/GeffUtils.java index 19e1e1c..daed636 100644 --- a/src/main/java/org/mastodon/geff/GeffUtils.java +++ b/src/main/java/org/mastodon/geff/GeffUtils.java @@ -27,7 +27,7 @@ import net.imglib2.util.Intervals; import net.imglib2.util.Util; -class GeffUtils +public class GeffUtils { private static final Logger LOG = LoggerFactory.getLogger( GeffUtils.class );