diff --git a/.pre-commit-ci.yaml b/.pre-commit-ci.yaml new file mode 100644 index 0000000..99f0862 --- /dev/null +++ b/.pre-commit-ci.yaml @@ -0,0 +1,3 @@ +ci: + skip: true + diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..0952f47 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,2 @@ +repos: [] + diff --git a/pom.xml b/pom.xml index d44bcbe..602823d 100644 --- a/pom.xml +++ b/pom.xml @@ -22,6 +22,7 @@ sign,deploy-to-scijava true + 7.1.5 https://sonarcloud.io jacoco @@ -47,11 +48,37 @@ - - com.bc.zarr - jzarr - 0.3.5 - + + org.janelia.saalfeldlab + n5 + + + + org.janelia.saalfeldlab + n5-zarr + + + + org.janelia.saalfeldlab + n5-blosc + + + + net.imglib2 + imglib2 + + + + org.slf4j + slf4j-api + + + diff --git a/src/main/java/org/mastodon/geff/Geff.java b/src/main/java/org/mastodon/geff/Geff.java index 860f79b..bfe35bc 100644 --- a/src/main/java/org/mastodon/geff/Geff.java +++ b/src/main/java/org/mastodon/geff/Geff.java @@ -6,13 +6,13 @@ * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -28,15 +28,10 @@ */ package org.mastodon.geff; -import java.io.IOException; import java.util.ArrayList; -import java.util.Iterator; import java.util.List; -import com.bc.zarr.ZarrArray; -import com.bc.zarr.ZarrGroup; - -import ucar.ma2.InvalidRangeException; +import org.janelia.saalfeldlab.n5.N5Exception.N5IOException; public class Geff { @@ -63,18 +58,19 @@ public static void main( String[] args ) System.out.println( "Geff library version: " + VERSION ); String zarrPath = "src/test/resources/mouse-20250719.zarr/tracks"; - String outputZarrPath = "src/test/resources/mouse-20250719_output.zarr/tracks"; + String outputZarrPath = "src/test/resources/mouse-20250719_output.zarr/tracks"; + String n5OutputZarrPath = "src/test/resources/n5-mouse-20250719_output.zarr/tracks"; try { // Demonstrate reading metadata System.out.println( "\n=== Reading Metadata ===" ); - GeffMetadata metadata = GeffMetadata.readFromZarr( zarrPath ); + GeffMetadata metadata = GeffMetadata.readFromZarr( zarrPath ); System.out.println( "Metadata loaded:" + metadata ); // Demonstrate reading nodes System.out.println( "\n=== Reading Nodes ===" ); - List< GeffNode > nodes = GeffNode.readFromZarr( zarrPath, metadata.getGeffVersion() ); + List< GeffNode > nodes = GeffNode.readFromZarr( zarrPath, metadata.getGeffVersion() ); System.out.println( "Read " + nodes.size() + " nodes:" ); for ( int i = 0; i < Math.min( 5, nodes.size() ); i++ ) { @@ -101,21 +97,17 @@ public static void main( String[] args ) // Try to write nodes (will show what would be written) try { - GeffNode.writeToZarr( nodes, outputZarrPath, ZarrUtils.getChunkSize( zarrPath ) ); + GeffNode.writeToZarr( nodes, outputZarrPath, GeffUtils.getChunkSize( zarrPath ) ); } catch ( UnsupportedOperationException e ) { System.out.println( "Note: " + e.getMessage() ); } - catch ( InvalidRangeException e ) - { - System.err.println( "InvalidRangeException during node writing: " + e.getMessage() ); - } // Try to write edges (will show what would be written) try { - GeffEdge.writeToZarr( edges, outputZarrPath, ZarrUtils.getChunkSize( zarrPath ) ); + GeffEdge.writeToZarr( edges, outputZarrPath, GeffUtils.getChunkSize( zarrPath ) ); } catch ( UnsupportedOperationException e ) { @@ -139,60 +131,15 @@ public static void main( String[] args ) + " edges" ); } - catch ( IOException e ) - { - System.err.println( "IOException occurred: " + e.getMessage() ); - e.printStackTrace(); - } - catch ( InvalidRangeException e ) + catch ( N5IOException e ) { - System.err.println( "InvalidRangeException occurred: " + e.getMessage() ); + System.err.println( "N5IOException occurred: " + e.getMessage() ); e.printStackTrace(); } catch ( Exception e ) { System.err.println( "Unexpected exception occurred: " + e.getMessage() ); - e.printStackTrace(); - } - - // Also demonstrate the original Zarr exploration code - System.out.println( "\n=== Original Zarr Exploration ===" ); - try - { - final ZarrGroup zarrTracks = ZarrGroup.open( zarrPath ); - final Iterator< String > groupKeyIter = zarrTracks.getGroupKeys().iterator(); - while ( groupKeyIter.hasNext() ) - { - String groupKey = groupKeyIter.next(); - System.out.println( "Found group: " + groupKey ); - } - final Iterator< String > arrayKeyIter = zarrTracks.getArrayKeys().iterator(); - while ( arrayKeyIter.hasNext() ) - { - String arrayKey = arrayKeyIter.next(); - System.out.println( "Found array: " + arrayKey ); - } - final Iterator< String > attrKeyIter = zarrTracks.getAttributes().keySet().iterator(); - while ( attrKeyIter.hasNext() ) - { - String attrKey = attrKeyIter.next(); - System.out.print( "Found attribute: " + attrKey ); - Object attrValue = zarrTracks.getAttributes().get( attrKey ); - System.out.println( " Value: " + attrValue ); - } - // Example of opening an array - System.out.println( "Opening 'nodes/ids' array..." ); - ZarrArray nodesIds = zarrTracks.openArray( "nodes/ids" ); - double[] nodesIdsData = ( double[] ) nodesIds.read(); - System.out.println( "Read nodes/ids data: " + nodesIdsData.length + " elements." ); - } - catch ( IOException e ) - { - e.printStackTrace(); - } - catch ( InvalidRangeException e ) - { - e.printStackTrace(); + e.printStackTrace(); } } diff --git a/src/main/java/org/mastodon/geff/GeffEdge.java b/src/main/java/org/mastodon/geff/GeffEdge.java index a0bb62f..8a773a3 100644 --- a/src/main/java/org/mastodon/geff/GeffEdge.java +++ b/src/main/java/org/mastodon/geff/GeffEdge.java @@ -28,16 +28,20 @@ */ package org.mastodon.geff; -import java.io.IOException; +import static org.mastodon.geff.GeffUtils.checkSupportedVersion; +import static org.mastodon.geff.GeffUtils.verifyLength; + import java.util.ArrayList; import java.util.List; -import com.bc.zarr.ArrayParams; -import com.bc.zarr.DataType; -import com.bc.zarr.ZarrArray; -import com.bc.zarr.ZarrGroup; - -import ucar.ma2.InvalidRangeException; +import org.janelia.saalfeldlab.n5.N5Reader; +import org.janelia.saalfeldlab.n5.N5URI; +import org.janelia.saalfeldlab.n5.N5Writer; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrReader; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrWriter; +import org.mastodon.geff.GeffUtils.FlattenedInts; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Represents an edge in the Geff (Graph Exchange Format for Features) format. @@ -45,509 +49,352 @@ * connects two nodes in a tracking graph, typically representing temporal * connections between objects across time points. */ -public class GeffEdge implements ZarrEntity +public class GeffEdge { + private static final Logger LOG = LoggerFactory.getLogger( GeffEdge.class ); + + public static final int DEFAULT_EDGE_ID = -1; // Default ID for edges if not + // specified + + public static final double DEFAULT_SCORE = -1; // Default score for edges if + // not specified + + public static final double DEFAULT_DISTANCE = -1; // Default distance for + // edges if not specified - public static final int DEFAULT_EDGE_ID = -1; // Default ID for edges if not - // specified - - public static final double DEFAULT_SCORE = -1; // Default score for edges if - // not specified - - public static final double DEFAULT_DISTANCE = -1; // Default distance for - // edges if not specified - - // Edge attributes - private int sourceNodeId; - - private int targetNodeId; - - private int id; // Edge ID if available - - private double score; // Optional score for the edge - - private double distance; // Optional distance metric for the edge - - /** - * Default constructor - */ - public GeffEdge() - {} - - /** - * Constructor with edge ID, source and target node IDs - */ - public GeffEdge( int id, int sourceNodeId, int targetNodeId, double score, double distance ) - { - this.id = id; - this.sourceNodeId = sourceNodeId; - this.targetNodeId = targetNodeId; - this.score = score; - this.distance = distance; - } - - // Getters and Setters - public int getId() - { - return id; - } - - public void setId( int id ) - { - this.id = id; - } - - public int getSourceNodeId() - { - return sourceNodeId; - } - - public void setSourceNodeId( int sourceNodeId ) - { - this.sourceNodeId = sourceNodeId; - } - - public int getTargetNodeId() - { - return targetNodeId; - } - - public void setTargetNodeId( int targetNodeId ) - { - this.targetNodeId = targetNodeId; - } - - public double getScore() - { - return score; - } - - public void setScore( double score ) - { - this.score = score; - } - - public double getDistance() - { - return distance; - } - - public void setDistance( double distance ) - { - this.distance = distance; - } - - /** - * Builder pattern for creating GeffEdge instances - */ - public static Builder builder() - { - return new Builder(); - } - - public static class Builder - { - private int id = DEFAULT_EDGE_ID; - - private int sourceNodeId; - - private int targetNodeId; - - private double score = DEFAULT_SCORE; - - private double distance = DEFAULT_DISTANCE; - - public Builder setId( int id ) - { - this.id = id; - return this; - } - - public Builder setSourceNodeId( int sourceNodeId ) - { - this.sourceNodeId = sourceNodeId; - return this; - } - - public Builder setTargetNodeId( int targetNodeId ) - { - this.targetNodeId = targetNodeId; - return this; - } - - public Builder setScore( double score ) - { - this.score = score; - return this; - } - - public Builder setDistance( double distance ) - { - this.distance = distance; - return this; - } - - public GeffEdge build() - { - return new GeffEdge( id, sourceNodeId, targetNodeId, score, distance ); - } - } - - /** - * Read edges from a Zarr group - */ - public static List< GeffEdge > readFromZarr( String zarrPath ) throws IOException, InvalidRangeException - { - return readFromZarr( zarrPath, Geff.VERSION ); - } - - public static List< GeffEdge > readFromZarr( String zarrPath, String geffVersion ) - throws IOException, InvalidRangeException - { - return readFromZarrWithChunks( zarrPath, geffVersion ); - } - - /** - * Alternative method to read edges with different chunk handling - */ - public static List< GeffEdge > readFromZarrWithChunks( String zarrPath, String geffVersion ) - throws IOException, InvalidRangeException - { - List< GeffEdge > edges = new ArrayList<>(); - - ZarrGroup edgesGroup = ZarrGroup.open( zarrPath + "/edges" ); - - System.out.println( - "Reading edges from Zarr path: " + zarrPath + " with Geff version: " + geffVersion ); - - if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) - { - - int[][] edgeIds = ZarrUtils.readChunkedIntMatrix( edgesGroup, "ids", "edge IDs" ); - - double[] distances = new double[ 0 ]; - double[] scores = new double[ 0 ]; - - if ( edgesGroup.getGroupKeys().contains( "attrs" ) ) - { - - // Read attributes - ZarrGroup attrsGroup = edgesGroup.openSubGroup( "attrs" ); - - // Read distances from chunks - try - { - distances = ZarrUtils.readChunkedDoubleArray( attrsGroup, "distance/values", "distances" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read distances: " + e.getMessage() + " skipping..." ); - } - - // Read scores from chunks - try - { - scores = ZarrUtils.readChunkedDoubleArray( attrsGroup, "score/values", "scores" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read scores: " + e.getMessage() + " skipping..." ); - } - } - - // 2D array case: each row is [source, target] - for ( int i = 0; i < edgeIds.length; i++ ) - { - if ( edgeIds[ i ].length == 2 ) - { - GeffEdge edge = GeffEdge.builder() - .setId( i ) - .setSourceNodeId( edgeIds[ i ][ 0 ] ) - .setTargetNodeId( edgeIds[ i ][ 1 ] ) - .setDistance( i < distances.length ? distances[ i ] : DEFAULT_DISTANCE ) - .setScore( i < scores.length ? scores[ i ] : DEFAULT_SCORE ) - .build(); - edges.add( edge ); - } - else - { - System.err.println( "Unexpected edge format at index " + i + ": " + edgeIds[ i ].length - + " elements. Expected 2 (source, target)." ); - } - } - } - else if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) || geffVersion.startsWith( "0.4" ) ) - { - - int[][] edgeIds = ZarrUtils.readChunkedIntMatrix( edgesGroup, "ids", "edge IDs" ); - - double[] distances = new double[ 0 ]; - double[] scores = new double[ 0 ]; - - // Read attributes - if ( edgesGroup.getGroupKeys().contains( "props" ) ) - { - ZarrGroup propsGroup = ZarrGroup.open( zarrPath + "/edges/props" ); - - // Read distances from chunks - try - { - distances = ZarrUtils.readChunkedDoubleArray( propsGroup, "distance/values", "distances" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read distances: " + e.getMessage() + " skipping..." ); - } - - // Read scores from chunks - try - { - scores = ZarrUtils.readChunkedDoubleArray( propsGroup, "score/values", "scores" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read scores: " + e.getMessage() + " skipping..." ); - } - } - - // 2D array case: each row is [source, target] - for ( int i = 0; i < edgeIds.length; i++ ) - { - if ( edgeIds[ i ].length == 2 ) - { - GeffEdge edge = GeffEdge.builder() - .setId( i ) - .setSourceNodeId( edgeIds[ i ][ 0 ] ) - .setTargetNodeId( edgeIds[ i ][ 1 ] ) - .setDistance( i < distances.length ? distances[ i ] : DEFAULT_DISTANCE ) - .setScore( i < scores.length ? scores[ i ] : DEFAULT_SCORE ) - .build(); - edges.add( edge ); - } - else - { - System.err.println( "Unexpected edge format at index " + i + ": " + edgeIds[ i ].length - + " elements. Expected 2 (source, target)." ); - } - } - } - else - { - throw new UnsupportedOperationException( "Unsupported Geff version: " + geffVersion ); - } - - return edges; - } - - /** - * Write edges to Zarr format with chunked structure - */ - public static void writeToZarr( List< GeffEdge > edges, String zarrPath ) throws IOException, InvalidRangeException - { - writeToZarr( edges, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE ); // Default - // chunk - // size - } - - public static void writeToZarr( List< GeffEdge > edges, String zarrPath, String geffVersion ) - throws IOException, InvalidRangeException - { - writeToZarr( edges, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE, geffVersion ); // Default - // chunk - // size - } - - /** - * Write edges to Zarr format with specified chunk size - */ - public static void writeToZarr( List< GeffEdge > edges, String zarrPath, int chunks ) - throws IOException, InvalidRangeException - { - writeToZarr( edges, zarrPath, chunks, Geff.VERSION ); // Default Geff - // version - } - - public static void writeToZarr( List< GeffEdge > edges, String zarrPath, int chunks, String geffVersion ) - throws IOException, InvalidRangeException - { - if ( edges == null ) - { throw new IllegalArgumentException( "Edges list cannot be null or empty" ); } - - if ( geffVersion == null || geffVersion.isEmpty() ) - { - geffVersion = Geff.VERSION; // Use default version if not specified - } - - System.out.println( - "Writing " + edges.size() + " edges to Zarr path: " + zarrPath + " with chunk size: " + chunks ); - - if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) - { - // Create attrs subgroup for 0.1 versions - - // Create the main edges group - ZarrGroup rootGroup = ZarrGroup.create( zarrPath ); - - ZarrGroup edgesGroup = rootGroup.createSubGroup( "edges" ); - - writeChunkedEdgeIds( edgesGroup, edges, chunks ); - - ZarrGroup attrsGroup = edgesGroup.createSubGroup( "attrs" ); - - // Write distances - ZarrUtils.writeChunkedDoubleAttribute( edges, attrsGroup, "distance/values", chunks, GeffEdge::getDistance ); - - // Write scores - ZarrUtils.writeChunkedDoubleAttribute( edges, attrsGroup, "score/values", chunks, GeffEdge::getScore ); - } - else if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) || geffVersion.startsWith( "0.4" ) ) - { - // Create props subgroup for 0.3 version - - // Create the main edges group - ZarrGroup rootGroup = ZarrGroup.create( zarrPath ); - - ZarrGroup edgesGroup = rootGroup.createSubGroup( "edges" ); - - writeChunkedEdgeIds( edgesGroup, edges, chunks ); - - ZarrGroup propsGroup = edgesGroup.createSubGroup( "props" ); - - // Write distances - ZarrUtils.writeChunkedDoubleAttribute( edges, propsGroup, "distance/values", chunks, GeffEdge::getDistance ); - - // Write scores - ZarrUtils.writeChunkedDoubleAttribute( edges, propsGroup, "score/values", chunks, GeffEdge::getScore ); - } - else - { - throw new UnsupportedOperationException( "Unsupported Geff version: " + geffVersion ); - } - } - - private static void writeChunkedEdgeIds( ZarrGroup edgesGroup, List< GeffEdge > edges, int chunks ) - throws InvalidRangeException, IOException - { - // Write edges in chunks - int totalEdges = edges.size(); - - // Create ids subgroup - ZarrGroup idsGroup = edgesGroup.createSubGroup( "ids" ); - - // Create a single ZarrArray for all edges with proper chunking - ZarrArray edgesArray = idsGroup.createArray( "", new ArrayParams() - .shape( totalEdges, 2 ) - .chunks( chunks, 2 ) - .dataType( DataType.i4 ) ); - - int chunkIndex = 0; - for ( int startIdx = 0; startIdx < totalEdges; startIdx += chunks ) - { - int endIdx = Math.min( startIdx + chunks, totalEdges ); - int currentChunkSize = endIdx - startIdx; - - // Prepare chunk data array - int[] chunkData = new int[ currentChunkSize * 2 ]; // Flattened - // pairs for this - // chunk - - // Fill chunk data array - for ( int i = 0; i < currentChunkSize; i++ ) - { - GeffEdge edge = edges.get( startIdx + i ); - chunkData[ i * 2 ] = edge.getSourceNodeId(); // Source node ID - chunkData[ i * 2 + 1 ] = edge.getTargetNodeId(); // Target node - // ID - } - - // Write chunk at specific offset - edgesArray.write( chunkData, new int[] { currentChunkSize, 2 }, new int[] { startIdx, 0 } ); - - String chunkKey = String.format( "%.1f", ( double ) chunkIndex ); - System.out.println( "- Wrote chunk " + chunkKey + ": " + currentChunkSize + " edges (indices " + startIdx - + "-" + ( endIdx - 1 ) + ")" ); - chunkIndex++; - } - - // Analyze edge data format - long validEdges = edges.stream().filter( GeffEdge::isValid ).count(); - long selfLoops = edges.stream().filter( GeffEdge::isSelfLoop ).count(); - - System.out.println( "Edge analysis:" ); - System.out.println( "- Valid edges: " + validEdges + "/" + edges.size() ); - if ( selfLoops > 0 ) - { - System.out.println( "- Self-loops detected: " + selfLoops ); - } - System.out.println( "- Format: Chunked 2D arrays [[source1, target1], [source2, target2], ...]" ); - - // Log summary - int uniqueSourceNodes = ( int ) edges.stream().mapToInt( GeffEdge::getSourceNodeId ).distinct().count(); - int uniqueTargetNodes = ( int ) edges.stream().mapToInt( GeffEdge::getTargetNodeId ).distinct().count(); - - System.out.println( "Successfully wrote edges to Zarr format:" ); - System.out.println( "- " + totalEdges + " edges written in " + chunkIndex + " chunks" ); - System.out.println( "- Source nodes: " + uniqueSourceNodes + " unique" ); - System.out.println( "- Target nodes: " + uniqueTargetNodes + " unique" ); - - // Sample verification - if ( !edges.isEmpty() ) - { - System.out.println( "Sample written edge data:" ); - for ( int i = 0; i < Math.min( 3, edges.size() ); i++ ) - { - GeffEdge edge = edges.get( i ); - System.out.println( " [" + edge.getSourceNodeId() + ", " + edge.getTargetNodeId() + "] - " + edge ); - } - } - } - - /** - * Check if this edge is valid (has valid source and target node IDs) - */ - public boolean isValid() - { - return sourceNodeId >= 0 && targetNodeId >= 0; - } - - /** - * Check if this edge represents a self-loop (source == target) - */ - public boolean isSelfLoop() - { - return sourceNodeId == targetNodeId; - } - - @Override - public String toString() - { - return String.format( "GeffEdge{id=%d, source=%d, target=%d}", - id, sourceNodeId, targetNodeId ); - } - - @Override - public boolean equals( Object obj ) - { - if ( this == obj ) - return true; - if ( obj == null || getClass() != obj.getClass() ) - return false; - - GeffEdge geffEdge = ( GeffEdge ) obj; - return sourceNodeId == geffEdge.sourceNodeId && - targetNodeId == geffEdge.targetNodeId && - id == geffEdge.id && - Double.compare( geffEdge.score, score ) == 0 && - Double.compare( geffEdge.distance, distance ) == 0; - } - - @Override - public int hashCode() - { - int result = sourceNodeId; - result = 31 * result + targetNodeId; - result = 31 * result + id; - result = 31 * result + Double.hashCode( score ); - return result; - } + // Edge attributes + private int sourceNodeId; + + private int targetNodeId; + + private int id; // Edge ID if available + + private double score; // Optional score for the edge + + private double distance; // Optional distance metric for the edge + + /** + * Default constructor + */ + public GeffEdge() + {} + + /** + * Constructor with edge ID, source and target node IDs + */ + public GeffEdge( int id, int sourceNodeId, int targetNodeId, double score, double distance ) + { + this.id = id; + this.sourceNodeId = sourceNodeId; + this.targetNodeId = targetNodeId; + this.score = score; + this.distance = distance; + } + + // Getters and Setters + public int getId() + { + return id; + } + + public void setId( int id ) + { + this.id = id; + } + + public int getSourceNodeId() + { + return sourceNodeId; + } + + public void setSourceNodeId( int sourceNodeId ) + { + this.sourceNodeId = sourceNodeId; + } + + public int getTargetNodeId() + { + return targetNodeId; + } + + public void setTargetNodeId( int targetNodeId ) + { + this.targetNodeId = targetNodeId; + } + + public double getScore() + { + return score; + } + + public void setScore( double score ) + { + this.score = score; + } + + public double getDistance() + { + return distance; + } + + public void setDistance( double distance ) + { + this.distance = distance; + } + + /** + * Builder pattern for creating GeffEdge instances + */ + public static Builder builder() + { + return new Builder(); + } + + public static class Builder + { + private int id = DEFAULT_EDGE_ID; + + private int sourceNodeId; + + private int targetNodeId; + + private double score = DEFAULT_SCORE; + + private double distance = DEFAULT_DISTANCE; + + public Builder setId( int id ) + { + this.id = id; + return this; + } + + public Builder setSourceNodeId( int sourceNodeId ) + { + this.sourceNodeId = sourceNodeId; + return this; + } + + public Builder setTargetNodeId( int targetNodeId ) + { + this.targetNodeId = targetNodeId; + return this; + } + + public Builder setScore( double score ) + { + this.score = score; + return this; + } + + public Builder setDistance( double distance ) + { + this.distance = distance; + return this; + } + + public GeffEdge build() + { + return new GeffEdge( id, sourceNodeId, targetNodeId, score, distance ); + } + } + + /** + * Read edges from a Zarr group + */ + public static List< GeffEdge > readFromZarr( String zarrPath ) + { + return readFromZarr( zarrPath, Geff.VERSION ); + } + + public static List< GeffEdge > readFromZarr( String zarrPath, String geffVersion ) + { + LOG.debug( "Reading edges from Zarr path: " + zarrPath + " with Geff version: " + geffVersion ); + try ( final N5ZarrReader reader = new N5ZarrReader( zarrPath, true ) ) + { + return readFromN5( reader, "/", geffVersion ); + } + } + + public static List< GeffEdge > readFromN5( final N5Reader reader, final String group, final String geffVersion ) + { + checkSupportedVersion( geffVersion ); + final String path = N5URI.normalizeGroupPath( group ); + +// final DatasetAttributes attributes = reader.getDatasetAttributes( path + "/edges/ids" ); +// System.out.println( "attributes.getNumDimensions() = " + attributes.getNumDimensions() ); +// System.out.println( "attributes.getDimensions() = " + Arrays.toString( attributes.getDimensions() ) ); +// System.out.println( "attributes.getBlockSize() = " + Arrays.toString( attributes.getBlockSize() ) ); + + final FlattenedInts edgeIds = GeffUtils.readAsIntMatrix( reader, path + "/edges/ids", "edge IDs" ); + if ( edgeIds == null ) + { + throw new IllegalArgumentException( "required property '/edges/ids' not found" ); + } + final int numEdges = edgeIds.size()[ 1 ]; + + // Read distances from chunks + final double[] distances = GeffUtils.readAsDoubleArray( reader, path + "/edges/props/distance/values", "distances" ); + verifyLength( distances, numEdges, "/edges/props/distance/values" ); + + // Read scores from chunks + final double[] scores = GeffUtils.readAsDoubleArray( reader, path + "/edges/props/score/values", "scores" ); + verifyLength( scores, numEdges, "/edges/props/score/values" ); + + // Create edge objects + final List< GeffEdge > edges = new ArrayList<>(); + for ( int i = 0; i < numEdges; i++ ) + { + final int sourceNodeId = edgeIds.at( 0, i ); + final int targetNodeId = edgeIds.at( 1, i ); + final double score = scores != null ? scores[ i ] : DEFAULT_SCORE; + final double distance = distances != null ? distances[ i ] : DEFAULT_DISTANCE; + final GeffEdge edge = new GeffEdge( i, sourceNodeId, targetNodeId, score, distance ); + edges.add( edge ); + } + return edges; + } + + /** + * Write edges to Zarr format with chunked structure + */ + public static void writeToZarr( List< GeffEdge > edges, String zarrPath ) + { + writeToZarr( edges, zarrPath, GeffUtils.DEFAULT_CHUNK_SIZE ); + } + + public static void writeToZarr( List< GeffEdge > edges, String zarrPath, String geffVersion ) + { + writeToZarr( edges, zarrPath, GeffUtils.DEFAULT_CHUNK_SIZE, geffVersion ); + } + + /** + * Write edges to Zarr format with specified chunk size + */ + public static void writeToZarr( List< GeffEdge > edges, String zarrPath, int chunkSize ) + { + writeToZarr( edges, zarrPath, chunkSize, Geff.VERSION ); + } + + public static void writeToZarr( List< GeffEdge > edges, String zarrPath, int chunkSize, String geffVersion ) + { + LOG.debug( "Writing {} edges to Zarr path: {} with chunk size: {} to Geff version: {}", edges.size(), zarrPath, chunkSize, geffVersion ); + try ( final N5ZarrWriter writer = new N5ZarrWriter( zarrPath, true ) ) + { + writeToN5( edges, writer, "/", chunkSize, geffVersion ); + } + } + + public static void writeToN5( + final List< GeffEdge > edges, + final N5Writer writer, + final String group, + final int chunkSize, + String geffVersion ) + { + if ( edges == null ) + throw new NullPointerException( "Edges list cannot be null" ); + + if ( geffVersion == null || geffVersion.isEmpty() ) + { + geffVersion = Geff.VERSION; // Use default version if not specified + } + GeffUtils.checkSupportedVersion( geffVersion ); + + final String path = N5URI.normalizeGroupPath( group ); + + GeffUtils.writeIntMatrix( edges, 2, e -> new int[] { e.getSourceNodeId(), e.getTargetNodeId() }, writer, path + "/edges/ids", chunkSize ); + + // Write distances + GeffUtils.writeDoubleArray( edges, GeffEdge::getDistance, writer, path + "/edges/props/distance/values", chunkSize ); + + // Write scores + GeffUtils.writeDoubleArray( edges, GeffEdge::getScore, writer, path + "/edges/props/score/values", chunkSize ); + } + + private static void printEdgeIdStuff( List< GeffEdge > edges ) + { + // Write edges in chunks + int totalEdges = edges.size(); + + // Analyze edge data format + long validEdges = edges.stream().filter( GeffEdge::isValid ).count(); + long selfLoops = edges.stream().filter( GeffEdge::isSelfLoop ).count(); + + System.out.println( "Edge analysis:" ); + System.out.println( "- Valid edges: " + validEdges + "/" + edges.size() ); + if ( selfLoops > 0 ) + { + System.out.println( "- Self-loops detected: " + selfLoops ); + } + System.out.println( "- Format: Chunked 2D arrays [[source1, target1], [source2, target2], ...]" ); + + // Log summary + int uniqueSourceNodes = ( int ) edges.stream().mapToInt( GeffEdge::getSourceNodeId ).distinct().count(); + int uniqueTargetNodes = ( int ) edges.stream().mapToInt( GeffEdge::getTargetNodeId ).distinct().count(); + + System.out.println( "Successfully wrote edges to Zarr format:" ); + System.out.println( "- " + totalEdges + " edges written" ); + System.out.println( "- Source nodes: " + uniqueSourceNodes + " unique" ); + System.out.println( "- Target nodes: " + uniqueTargetNodes + " unique" ); + + // Sample verification + if ( !edges.isEmpty() ) + { + System.out.println( "Sample written edge data:" ); + for ( int i = 0; i < Math.min( 3, edges.size() ); i++ ) + { + GeffEdge edge = edges.get( i ); + System.out.println( " [" + edge.getSourceNodeId() + ", " + edge.getTargetNodeId() + "] - " + edge ); + } + } + } + + /** + * Check if this edge is valid (has valid source and target node IDs) + */ + public boolean isValid() + { + return sourceNodeId >= 0 && targetNodeId >= 0; + } + + /** + * Check if this edge represents a self-loop (source == target) + */ + public boolean isSelfLoop() + { + return sourceNodeId == targetNodeId; + } + + @Override + public String toString() + { + return String.format( "GeffEdge{id=%d, source=%d, target=%d}", + id, sourceNodeId, targetNodeId ); + } + + @Override + public boolean equals( Object obj ) + { + if ( this == obj ) + return true; + if ( obj == null || getClass() != obj.getClass() ) + return false; + + GeffEdge geffEdge = ( GeffEdge ) obj; + return sourceNodeId == geffEdge.sourceNodeId && + targetNodeId == geffEdge.targetNodeId && + id == geffEdge.id && + Double.compare( geffEdge.score, score ) == 0 && + Double.compare( geffEdge.distance, distance ) == 0; + } + + @Override + public int hashCode() + { + int result = sourceNodeId; + result = 31 * result + targetNodeId; + result = 31 * result + id; + result = 31 * result + Double.hashCode( score ); + return result; + } } diff --git a/src/main/java/org/mastodon/geff/GeffMetadata.java b/src/main/java/org/mastodon/geff/GeffMetadata.java index d5be0b1..7570846 100644 --- a/src/main/java/org/mastodon/geff/GeffMetadata.java +++ b/src/main/java/org/mastodon/geff/GeffMetadata.java @@ -28,16 +28,24 @@ */ package org.mastodon.geff; +import static org.mastodon.geff.GeffUtils.checkSupportedVersion; + import java.io.IOException; -import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Map; +import java.util.Objects; import java.util.regex.Pattern; -import com.bc.zarr.ZarrGroup; +import org.janelia.saalfeldlab.n5.N5Reader; +import org.janelia.saalfeldlab.n5.N5Writer; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrReader; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrWriter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.gson.GsonBuilder; +import com.google.gson.reflect.TypeToken; -import ucar.ma2.InvalidRangeException; /** * Represents metadata for a Geff (Graph Exchange Format for Features) dataset. @@ -48,22 +56,23 @@ */ public class GeffMetadata { + private static final Logger LOG = LoggerFactory.getLogger( GeffMetadata.class ); // Supported GEFF versions - public static final List< String > SUPPORTED_VERSIONS = Arrays.asList( "0.0", "0.1", "0.2", "0.3", "0.4" ); + public static final List< String > SUPPORTED_VERSIONS = Arrays.asList( "0.2", "0.3", "0.4" ); // Pattern to match major.minor versions, allowing for patch versions and // development versions // Examples: 0.1.1, 0.2.2.dev20+g611e7a2.d20250719, 0.2.0-alpha.1, etc. private static final Pattern SUPPORTED_VERSIONS_PATTERN = Pattern - .compile( "(0\\.0|0\\.1|0\\.2|0\\.3|0\\.4)(?:\\.\\d+)?(?:\\.[a-zA-Z0-9]+(?:\\d+)?)?(?:[+\\-][a-zA-Z0-9\\.]+)*" ); + .compile( "(0\\.2|0\\.3|0\\.4)(?:\\.\\d+)?(?:\\.[a-zA-Z0-9]+(?:\\d+)?)?(?:[+\\-][a-zA-Z0-9\\.]+)*" ); // Metadata attributes - matching the Python schema private String geffVersion; private boolean directed; - private GeffAxis[] geffAxes; + private GeffAxis[] geffAxes; // TODO make List /** * Default constructor @@ -83,14 +92,24 @@ public GeffMetadata( String geffVersion, boolean directed ) /** * Constructor with all parameters */ - public GeffMetadata( String geffVersion, boolean directed, GeffAxis[] geffAxes ) - { - setGeffVersion( geffVersion ); - this.directed = directed; - setGeffAxes( geffAxes ); - } - - // Getters and Setters + public GeffMetadata( String geffVersion, boolean directed, GeffAxis[] geffAxes ) + { + setGeffVersion( geffVersion ); + this.directed = directed; + setGeffAxes( geffAxes ); + } + + /** + * Constructor with all parameters + */ + public GeffMetadata( String geffVersion, boolean directed, List< GeffAxis > geffAxes ) + { + setGeffVersion( geffVersion ); + this.directed = directed; + setGeffAxes( geffAxes ); + } + + // Getters and Setters public String getGeffVersion() { return geffVersion; @@ -117,23 +136,39 @@ public void setDirected( boolean directed ) this.directed = directed; } - public GeffAxis[] getGeffAxes() - { - return geffAxes; - } - - public void setGeffAxes( GeffAxis[] geffAxes ) - { - this.geffAxes = geffAxes != null ? geffAxes.clone() : null; - validate(); - } - - /** + public GeffAxis[] getGeffAxes() // TODO make List + { + return geffAxes; + } + + public List< GeffAxis > getGeffAxesList() // TODO rename getGeffAxes() + { + return ( geffAxes != null ) ? Arrays.asList( geffAxes ) : null; + } + + public void setGeffAxes( GeffAxis[] geffAxes ) // TODO make List + { + this.geffAxes = geffAxes != null ? geffAxes.clone() : null; + validate(); + } + + public void setGeffAxes( final List< GeffAxis > geffAxes ) + { + this.geffAxes = ( geffAxes != null ) ? geffAxes.toArray( new GeffAxis[ 0 ] ) : null; + validate(); + } + + /** * Validates the metadata according to the GEFF schema rules */ public void validate() { - // Check spatial metadata consistency if position is provided + if ( geffVersion == null ) + { + throw new IllegalArgumentException( "geff_version is missing." ); + } + + // Check spatial metadata consistency if position is provided if ( geffAxes != null ) { for ( GeffAxis axis : geffAxes ) @@ -156,50 +191,21 @@ public void validate() } } - /** - * Read metadata from a Zarr group - */ - public static GeffMetadata readFromZarr( String zarrPath ) throws IOException, InvalidRangeException - { - ZarrGroup group = ZarrGroup.open( zarrPath ); - return readFromZarr( group ); - } + /** + * Read metadata from a Zarr group + */ + public static GeffMetadata readFromZarr( final String zarrPath ) + { + try ( final N5ZarrReader reader = new N5ZarrReader( zarrPath, true ) ) + { + return readFromN5( reader, "/" ); + } + } - /** - * Read metadata from a Zarr group - */ - public static GeffMetadata readFromZarr( ZarrGroup group ) throws IOException - { - // Check if geff_version exists in zattrs - String geffVersion = null; - Map< ?, ? > attrs = null; - if ( group.getAttributes().containsKey( "geff" ) ) - { - System.out.println( "Found geff entry in " + group ); - Object geffRootObj = group.getAttributes().get( "geff" ); - if ( geffRootObj instanceof Map ) - { - try - { - // Check if geff_version exists in the geff entry - if ( ( ( Map< ?, ? > ) geffRootObj ).containsKey( "geff_version" ) ) - { - System.out.println( - "Found geff_version in geff entry: " + ( ( Map< ?, ? > ) geffRootObj ).get( "geff_version" ) ); - geffVersion = ( String ) ( ( Map< ?, ? > ) geffRootObj ).get( "geff_version" ); - attrs = ( Map< ?, ? > ) geffRootObj; - } - else - { - System.out.println( "No geff_version found in geff entry." ); - } - } - catch ( ClassCastException e ) - { - System.err.println( "Invalid geff entry format: " + e.getMessage() ); - } - } - } + public static GeffMetadata readFromN5( final N5Reader reader, final String group ) + { + final String geffVersion = reader.getAttribute( group, "geff/geff_version", String.class ); + LOG.debug( "found geff/geff_version = {}", geffVersion ); if ( geffVersion == null ) { throw new IllegalArgumentException( @@ -207,332 +213,57 @@ public static GeffMetadata readFromZarr( ZarrGroup group ) throws IOException "zarr group name is not specified (e.g. /dataset.zarr/tracks/ instead of " + "/dataset.zarr/)." ); } + checkSupportedVersion( geffVersion ); - GeffMetadata metadata = new GeffMetadata(); - - // Read required fields - - metadata.setGeffVersion( geffVersion ); - - if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) - { - Object directedObj = attrs.get( "directed" ); - if ( directedObj instanceof Boolean ) - { - metadata.setDirected( ( Boolean ) directedObj ); - } - else if ( directedObj instanceof String ) - { - metadata.setDirected( Boolean.parseBoolean( ( String ) directedObj ) ); - } - - // Read optional fields - double[] roiMins = null; - double[] roiMaxs = null; - String[] axisNames = null; - String[] axisUnits = null; - - int ndim = 0; - Object roiMinObj = attrs.get( "roi_min" ); - if ( roiMinObj != null ) - { - roiMins = convertToDoubleArray( roiMinObj ); - ndim = roiMins.length; - } - - Object roiMaxObj = attrs.get( "roi_max" ); - if ( roiMaxObj != null ) - { - roiMaxs = convertToDoubleArray( roiMaxObj ); - if ( roiMaxs.length != ndim ) - { throw new IllegalArgumentException( - "Roi max dimensions " + roiMaxs.length + " do not match roi min dimensions " + - roiMins.length ); } - } - - Object axisNamesObj = attrs.get( "axis_names" ); - if ( axisNamesObj != null ) - { - axisNames = convertToStringArray( axisNamesObj ); - if ( axisNames.length != ndim ) - { throw new IllegalArgumentException( - "Axis names dimensions " + axisNames.length + " do not match roi min dimensions " + - roiMins.length ); } - } - - Object axisUnitsObj = attrs.get( "axis_units" ); - if ( axisUnitsObj != null ) - { - axisUnits = convertToStringArray( axisUnitsObj ); - if ( axisUnits.length != ndim ) - { throw new IllegalArgumentException( - "Axis units dimensions " + axisUnits.length + " do not match roi min dimensions " + - roiMins.length ); } - } - - String positionAttr = ( String ) attrs.get( "position_attr" ); - if ( ndim != 0 && !positionAttr.equals( "position" ) ) - { throw new IllegalArgumentException( "Invalid position attribute: " + positionAttr ); } - - GeffAxis[] axes = new GeffAxis[ ndim ]; - for ( int i = 0; i < ndim; i++ ) - { - GeffAxis axis = new GeffAxis(); - axis.setName( axisNames != null ? axisNames[ i ] : null ); - axis.setType( axisNames[ i ] == GeffAxis.NAME_TIME ? GeffAxis.TYPE_TIME : GeffAxis.TYPE_SPACE ); - axis.setUnit( axisUnits != null ? axisUnits[ i ] : null ); - axis.setMin( roiMins != null ? roiMins[ i ] : null ); - axis.setMax( roiMaxs != null ? roiMaxs[ i ] : null ); - axes[ i ] = axis; - } - metadata.setGeffAxes( axes ); - } - else if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) || geffVersion.startsWith( "0.4" ) ) - { - // For 0.2, 0.3, and 0.4, we expect a different structure - metadata.setDirected( ( Boolean ) attrs.get( "directed" ) ); - - // Read axes - List< GeffAxis > axes = new ArrayList<>(); - if ( attrs.containsKey( "axes" ) ) - { - Object axesObj = attrs.get( "axes" ); - if ( axesObj instanceof List ) - { - for ( Object axisObj : ( List< ? > ) axesObj ) - { - if ( axisObj instanceof Map ) - { - Map< ?, ? > axisMap = ( Map< ?, ? > ) axisObj; - String name = ( String ) axisMap.get( "name" ); - String type = ( String ) axisMap.get( "type" ); - String unit = ( String ) axisMap.get( "unit" ); - Double min = ( Double ) axisMap.get( "min" ); - Double max = ( Double ) axisMap.get( "max" ); - axes.add( new GeffAxis( name, type, unit, min, max ) ); - } - } - } - else - { - throw new IllegalArgumentException( "Invalid axes format: " + axesObj ); - } - } - metadata.setGeffAxes( axes.toArray( new GeffAxis[ 0 ] ) ); - } - - // Validate the loaded metadata - metadata.validate(); + final Boolean directed = reader.getAttribute( group, "geff/directed", Boolean.class ); + LOG.debug( "found geff/directed = {}", directed ); + if ( directed == null ) + { + throw new IllegalArgumentException( "required attribute 'geff/directed' is missing." ); + } - return metadata; - } + final List< GeffAxis > axes = reader.getAttribute( group, "geff/axes", + new TypeToken< List< GeffAxis > >() {}.getType() ); + LOG.debug( "found geff/axes = {}", axes ); - /** - * Write metadata to Zarr format at specified path - */ - public static void writeToZarr( GeffMetadata metadata, String zarrPath ) throws IOException - { - ZarrGroup group = ZarrGroup.create( zarrPath ); - metadata.writeToZarr( group ); - } + final GeffMetadata metadata = new GeffMetadata( geffVersion, directed, axes ); + metadata.validate(); - /** - * Write metadata to Zarr format - */ - public void writeToZarr( ZarrGroup group ) throws IOException - { - // Validate before writing - validate(); + return metadata; + } - if ( geffVersion == null ) + /** + * Write metadata to Zarr format at specified path + */ + public static void writeToZarr( final GeffMetadata metadata, final String zarrPath ) throws IOException + { + try ( final N5ZarrWriter writer = new N5ZarrWriter( zarrPath, new GsonBuilder().setPrettyPrinting(),true ) ) { - throw new IllegalArgumentException( "Geff version must be set before writing metadata." ); + metadata.writeToN5( writer, "/" ); } + } - if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) - { - // Create a TreeMap to ensure attributes are ordered alphabetically - // by key - java.util.Map< String, Object > attrs = new java.util.TreeMap<>(); - // Write required fields - attrs.put( "geff_version", geffVersion ); - attrs.put( "directed", directed ); - - if ( geffAxes != null ) - { - attrs.put( "position_attr", "position" ); - double[] roiMins = new double[ geffAxes.length ]; - double[] roiMaxs = new double[ geffAxes.length ]; - String[] axisNames = new String[ geffAxes.length ]; - String[] axisTypes = new String[ geffAxes.length ]; - String[] axisUnits = new String[ geffAxes.length ]; - for ( int i = 0; i < geffAxes.length; i++ ) - { - GeffAxis axis = geffAxes[ i ]; - if ( axis.getName() != null ) - { - axisNames[ i ] = axis.getName(); - } - if ( axis.getType() != null ) - { - axisTypes[ i ] = axis.getType(); - } - if ( axis.getUnit() != null ) - { - axisUnits[ i ] = axis.getUnit(); - } - if ( axis.getMin() != null ) - { - roiMins[ i ] = axis.getMin(); - } - if ( axis.getMax() != null ) - { - roiMaxs[ i ] = axis.getMax(); - } - } - - // Write optional fields - if ( roiMins != null ) - { - attrs.put( "roi_min", roiMins ); - } - if ( roiMaxs != null ) - { - attrs.put( "roi_max", roiMaxs ); - } - if ( axisNames != null ) - { - attrs.put( "axis_names", axisNames ); - } - // Always write axis_units, even if null - attrs.put( "axis_units", axisUnits ); - } - - // Write the attributes to the Zarr group - group.writeAttributes( attrs ); + public void writeToN5( final N5Writer writer, final String group ) + { + // Validate before writing + validate(); - System.out.println( "Written metadata attributes: " + attrs.keySet() ); - } - else if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) || geffVersion.startsWith( "0.4" ) ) - { - java.util.Map< String, Object > rootAttrs = new java.util.TreeMap<>(); - java.util.Map< String, Object > attrs = new java.util.TreeMap<>(); - // Write required fields - attrs.put( "directed", directed ); - attrs.put( "geff_version", geffVersion ); - ArrayList< Map< String, Object > > axisMaps = new ArrayList<>(); - for ( GeffAxis axis : geffAxes ) - { - if ( axis.getName() == null || axis.getType() == null ) - { throw new IllegalArgumentException( - "Axis name and type must be set for all axes in version 0.2 and 0.3." ); } - Map< String, Object > axisMap = new java.util.TreeMap<>(); - axisMap.put( "name", axis.getName() ); - axisMap.put( "type", axis.getType() ); - axisMap.put( "unit", axis.getUnit() ); - if ( axis.getMin() != null ) - { - axisMap.put( "min", axis.getMin() ); - } - if ( axis.getMax() != null ) - { - axisMap.put( "max", axis.getMax() ); - } - axisMaps.add( axisMap ); - } - attrs.put( "axes", axisMaps ); - rootAttrs.put( "geff", attrs ); - // Write the attributes to the Zarr group - group.writeAttributes( rootAttrs ); - System.out.println( "Written metadata attributes: " + rootAttrs.keySet() ); - } - - } + checkSupportedVersion( geffVersion ); - // Helper methods for type conversion - private static double[] convertToDoubleArray( Object obj ) - { - if ( obj instanceof double[] ) - { - return ( double[] ) obj; - } - else if ( obj instanceof java.util.ArrayList ) - { - @SuppressWarnings( "unchecked" ) - java.util.ArrayList< Object > list = ( java.util.ArrayList< Object > ) obj; - double[] result = new double[ list.size() ]; - for ( int i = 0; i < list.size(); i++ ) - { - if ( list.get( i ) instanceof Number ) - { - result[ i ] = ( ( Number ) list.get( i ) ).doubleValue(); - } - else - { - result[ i ] = Double.parseDouble( list.get( i ).toString() ); - } - } - return result; - } - else if ( obj instanceof Object[] ) - { - Object[] arr = ( Object[] ) obj; - double[] result = new double[ arr.length ]; - for ( int i = 0; i < arr.length; i++ ) - { - if ( arr[ i ] instanceof Number ) - { - result[ i ] = ( ( Number ) arr[ i ] ).doubleValue(); - } - else - { - result[ i ] = Double.parseDouble( arr[ i ].toString() ); - } - } - return result; - } - else if ( obj instanceof float[] ) - { - float[] floatArray = ( float[] ) obj; - double[] result = new double[ floatArray.length ]; - for ( int i = 0; i < floatArray.length; i++ ) - { - result[ i ] = floatArray[ i ]; - } - return result; - } - return null; - } + // required + LOG.debug( "writing geff/geff_version {}", getGeffVersion() ); + writer.setAttribute( group, "geff/geff_version", getGeffVersion() ); + LOG.debug( "writing geff/directed {}", isDirected() ); + writer.setAttribute( group, "geff/directed", isDirected() ); - private static String[] convertToStringArray( Object obj ) - { - if ( obj instanceof String[] ) - { - return ( String[] ) obj; - } - else if ( obj instanceof java.util.ArrayList ) - { - @SuppressWarnings( "unchecked" ) - java.util.ArrayList< Object > list = ( java.util.ArrayList< Object > ) obj; - String[] result = new String[ list.size() ]; - for ( int i = 0; i < list.size(); i++ ) - { - result[ i ] = list.get( i ) != null ? list.get( i ).toString() : null; - } - return result; - } - else if ( obj instanceof Object[] ) - { - Object[] arr = ( Object[] ) obj; - String[] result = new String[ arr.length ]; - for ( int i = 0; i < arr.length; i++ ) - { - result[ i ] = arr[ i ] != null ? arr[ i ].toString() : null; - } - return result; - } - return null; - } + // optional + final List< GeffAxis > axes = getGeffAxesList(); + if ( axes != null ) + { + LOG.debug( "writing geff/axes {}", axes ); + writer.setAttribute( group, "geff/axes", axes ); + } + } @Override public String toString() @@ -542,34 +273,18 @@ public String toString() geffVersion, directed, Arrays.toString( geffAxes ) ); } - @Override - public boolean equals( Object obj ) - { - if ( this == obj ) - return true; - if ( obj == null || getClass() != obj.getClass() ) - return false; - - GeffMetadata that = ( GeffMetadata ) obj; - - if ( directed != that.directed ) - return false; - if ( geffVersion != null ? !geffVersion.equals( that.geffVersion ) : that.geffVersion != null ) - return false; - for ( int i = 0; i < geffAxes.length; i++ ) - { - if ( !geffAxes[ i ].equals( that.geffAxes[ i ] ) ) - { return false; } - } - return true; - } - - @Override - public int hashCode() - { - int result = geffVersion != null ? geffVersion.hashCode() : 0; - result = 31 * result + ( directed ? 1 : 0 ); - result = 31 * result + Arrays.hashCode( geffAxes ); - return result; - } + @Override + public boolean equals( final Object o ) + { + if ( !( o instanceof GeffMetadata ) ) + return false; + GeffMetadata that = ( GeffMetadata ) o; + return directed == that.directed && Objects.equals( geffVersion, that.geffVersion ) && Objects.deepEquals( geffAxes, that.geffAxes ); + } + + @Override + public int hashCode() + { + return Objects.hash( geffVersion, directed, Arrays.hashCode( geffAxes ) ); + } } diff --git a/src/main/java/org/mastodon/geff/GeffNode.java b/src/main/java/org/mastodon/geff/GeffNode.java index 34bdb98..cbd1dc3 100644 --- a/src/main/java/org/mastodon/geff/GeffNode.java +++ b/src/main/java/org/mastodon/geff/GeffNode.java @@ -28,1116 +28,881 @@ */ package org.mastodon.geff; +import static org.mastodon.geff.GeffUtils.checkSupportedVersion; +import static org.mastodon.geff.GeffUtils.verifyLength; + import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; - +import java.util.function.Function; + +import org.janelia.saalfeldlab.n5.N5Reader; +import org.janelia.saalfeldlab.n5.N5URI; +import org.janelia.saalfeldlab.n5.N5Writer; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrReader; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrWriter; +import org.mastodon.geff.GeffUtils.FlattenedInts; import org.mastodon.geff.geom.GeffSerializableVertex; - -import com.bc.zarr.ArrayParams; -import com.bc.zarr.DataType; -import com.bc.zarr.ZarrArray; -import com.bc.zarr.ZarrGroup; - -import ucar.ma2.InvalidRangeException; +import org.mastodon.geff.GeffUtils.FlattenedDoubles; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; /** * Represents a node in the Geff (Graph Exchange Format for Features) format. * This class handles reading and writing node data from/to Zarr format. */ -public class GeffNode implements ZarrEntity +public class GeffNode { - - // Node attributes - private int id; - - private int t; - - private double x; - - private double y; - - private double z; - - private double[] color; - - private int segmentId; - - private double radius; - - private double[] covariance2d; - - private double[] covariance3d; - - private int polygonStartIndex = -1; - - private double[] polygonX; - - private double[] polygonY; - - private static final double[] DEFAULT_COLOR = { 1.0, 1.0, 1.0, 1.0 }; // RGBA - - public static final double DEFAULT_RADIUS = 1.0; - - public static final double[] DEFAULT_COVARIANCE_2D = { 1.0, 0.0, 0.0, 1.0 }; - - public static final double[] DEFAULT_COVARIANCE_3D = { 1.0, 0.0, 0.0, 1.0, 0.0, 1.0 }; - - /** - * Default constructor - */ - public GeffNode() - {} - - /** - * Constructor with basic node parameters - * - * @param id - * The unique identifier for the node. - * @param timepoint - * The timepoint of the node. - * @param x - * The x-coordinate of the node. - * @param y - * The y-coordinate of the node. - * @param z - * The z-coordinate of the node. - * @param color - * The color of the node (RGBA). - * @param segmentId - * The segment ID the node belongs to. - * @param radius - * The radius of the node. - * @param covariance2d - * The 2D covariance matrix of the node. - * @param covariance3d - * The 3D covariance matrix of the node. - * @param polygonX - * The x-coordinates of the polygon vertices. - * @param polygonY - * The y-coordinates of the polygon vertices. - */ - public GeffNode( int id, int timepoint, double x, double y, double z, double[] color, int segmentId, double radius, - double[] covariance2d, double[] covariance3d, double[] polygonX, double[] polygonY ) - { - this.id = id; - this.t = timepoint; - this.x = x; - this.y = y; - this.z = z; - this.color = color != null ? color : DEFAULT_COLOR; - this.segmentId = segmentId; - this.radius = radius; - this.covariance2d = covariance2d != null ? covariance2d : DEFAULT_COVARIANCE_2D; - this.covariance3d = covariance3d != null ? covariance3d : DEFAULT_COVARIANCE_3D; - this.polygonX = polygonX != null ? polygonX : new double[ 0 ]; - this.polygonY = polygonY != null ? polygonY : new double[ 0 ]; - } - - /** - * Get the unique identifier of the node. - * - * @return The unique identifier of the node. - */ - public int getId() - { - return id; - } - - /** - * Set the unique identifier of the node. - * - * @param id - * The unique identifier to set. - */ - public void setId( int id ) - { - this.id = id; - } - - /** - * Get the timepoint of the node. - * - * @return The timepoint of the node. - */ - public int getT() - { - return t; - } - - /** - * Set the timepoint of the node. - * - * @param timepoint - * The timepoint to set. - */ - public void setT( int timepoint ) - { - this.t = timepoint; - } - - /** - * Get the x-coordinate of the node. - * - * @return The x-coordinate of the node. - */ - public double getX() - { - return x; - } - - /** - * Set the x-coordinate of the node. - * - * @param x - * The x-coordinate to set. - */ - public void setX( double x ) - { - this.x = x; - } - - /** - * Get the y-coordinate of the node. - * - * @return The y-coordinate of the node. - */ - public double getY() - { - return y; - } - - /** - * Set the y-coordinate of the node. - * - * @param y - * The y-coordinate to set. - */ - public void setY( double y ) - { - this.y = y; - } - - /** - * Get the z-coordinate of the node. - * - * @return The z-coordinate of the node. - */ - public double getZ() - { - return z; - } - - /** - * Set the z-coordinate of the node. - * - * @param z - * The z-coordinate to set. - */ - public void setZ( double z ) - { - this.z = z; - } - - /** - * Get the color of the node. - * - * @return The color of the node as an RGBA array. - */ - public double[] getColor() - { - return color; - } - - /** - * Set the color of the node. - * - * @param color - * The color to set as an RGBA array. - */ - public void setColor( double[] color ) - { - if ( color != null && color.length == 4 ) - { - this.color = color; - } - else - { - throw new IllegalArgumentException( "Color must be a 4-element array" ); - } - } - - /** - * Get the segment ID of the node. - * - * @return The segment ID of the node. - */ - public int getSegmentId() - { - return segmentId; - } - - /** - * Set the segment ID of the node. - * - * @param segmentId - * The segment ID to set. - */ - public void setSegmentId( int segmentId ) - { - this.segmentId = segmentId; - } - - /** - * Get the radius of the node. - * - * @return The radius of the node. - */ - public double getRadius() - { - return radius; - } - - /** - * Set the radius of the node. - * - * @param radius - * The radius to set. - */ - public void setRadius( double radius ) - { - this.radius = radius; - } - - /** - * Get the 2D covariance matrix of the node. - * - * @return The 2D covariance matrix as a 4-element array. - */ - public double[] getCovariance2d() - { - return covariance2d; - } - - /** - * Set the 2D covariance matrix of the node. - * - * @param covariance2d - * The 2D covariance matrix to set as a 4-element array. - * @throws IllegalArgumentException - * if the covariance2d array is not of length 4. - */ - public void setCovariance2d( double[] covariance2d ) - { - if ( covariance2d != null && covariance2d.length == 4 ) - { - this.covariance2d = covariance2d; - } - else - { - throw new IllegalArgumentException( "Covariance2D must be a 4-element array" ); - } - } - - /** - * Get the 3D covariance matrix of the node. - * - * @return The 3D covariance matrix as a 6-element array. - */ - public double[] getCovariance3d() - { - return covariance3d; - } - - /** - * Set the 3D covariance matrix of the node. - * - * @param covariance3d - * The 3D covariance matrix to set as a 6-element array. - * @throws IllegalArgumentException - * if the covariance3d array is not of length 6. - */ - public void setCovariance3d( double[] covariance3d ) - { - if ( covariance3d != null && covariance3d.length == 6 ) - { - this.covariance3d = covariance3d; - } - else - { - throw new IllegalArgumentException( "Covariance3D must be a 6-element array" ); - } - } - - /** - * Get the polygon offset for the serialized vertex array. - * - * @return The polygon offset. - */ - public int getPolygonStartIndex() - { - return polygonStartIndex; - } - - /** - * Set the polygon offset for the serialized vertex array. - * - * @param polygonOffset - * The polygon offset to set. - */ - public void setPolygonStartIndex( int polygonOffset ) - { - this.polygonStartIndex = polygonOffset; - } - - /** - * Get the slice information for polygon vertices as an array. - * - * @return An array containing the polygon startIndex and endIndex. - */ - public int[] getPolygonSliceAsArray() - { - if ( polygonX == null || polygonY == null ) - { - System.err.println( "Warning: Polygon is null, returning empty array." ); - return new int[] { polygonStartIndex, 0 }; - } - if ( polygonStartIndex < 0 ) - throw new IllegalArgumentException( "Polygon startIndex is invalid: " + polygonStartIndex ); - return new int[] { polygonStartIndex, polygonStartIndex + polygonX.length }; - } - - /** - * Get the x-coordinates of the polygon vertices. - * - * @return The x-coordinates of the polygon vertices. - */ - public double[] getPolygonX() - { - return polygonX; - } - - /** - * Get the y-coordinates of the polygon vertices. - * - * @return The y-coordinates of the polygon vertices. - */ - public double[] getPolygonY() - { - return polygonY; - } - - /** - * Set the x-coordinates of the polygon vertices. - * - * @param polygonX - * The x-coordinates to set. - */ - public void setPolygonX( double[] polygonX ) - { - this.polygonX = polygonX != null ? polygonX : new double[ 0 ]; - } - - /** - * Set the y-coordinates of the polygon vertices. - * - * @param polygonY - * The y-coordinates to set. - */ - public void setPolygonY( double[] polygonY ) - { - this.polygonY = polygonY != null ? polygonY : new double[ 0 ]; - } - - /** - * Returns the position of the node as a 3D array. - * - * @deprecated Use {@link #getX()}, {@link #getY()}, {@link #getZ()} - * instead. - * @return The position of the node as a 3D array. - */ - @Deprecated - public double[] getPosition() - { - return new double[] { x, y, z }; - } - - /** - * Set the position of the node. - * - * @deprecated Use {@link #setX(double)}, {@link #setY(double)}, - * {@link #setZ(double)} instead. - * @param position - * The position of the node as a 3D array. - */ - @Deprecated - public void setPosition( double[] position ) - { - if ( position != null && position.length == 2 ) - { - this.x = position[ 0 ]; - this.y = position[ 1 ]; - this.z = 0.0; // Default Z to 0 - } - else if ( position != null && position.length == 3 ) - { - this.x = position[ 0 ]; - this.y = position[ 1 ]; - this.z = position[ 2 ]; - } - else - { - throw new IllegalArgumentException( "Position must be a 2D or 3D array" ); - } - } - - /** - * Builder for creating GeffNode instance. - * - * @return A new Builder instance for GeffNode. - */ - public static Builder builder() - { - return new Builder(); - } - - public static class Builder - { - private int id; - - private int timepoint; - - private double x; - - private double y; - - private double z; - - private double[] color = DEFAULT_COLOR; - - private int segmentId; - - private double radius = DEFAULT_RADIUS; - - private double[] covariance2d = DEFAULT_COVARIANCE_2D; - - private double[] covariance3d = DEFAULT_COVARIANCE_3D; - - private double[] polygonX; - - private double[] polygonY; - - public Builder id( int id ) - { - this.id = id; - return this; - } - - public Builder timepoint( int timepoint ) - { - this.timepoint = timepoint; - return this; - } - - public Builder x( double x ) - { - this.x = x; - return this; - } - - public Builder y( double y ) - { - this.y = y; - return this; - } - - public Builder z( double z ) - { - this.z = z; - return this; - } - - public Builder color( double[] color ) - { - if ( color != null && color.length == 4 ) - { - this.color = color; - } - else - { - throw new IllegalArgumentException( "Color must be a 4-element array" ); - } - return this; - } - - public Builder segmentId( int segmentId ) - { - this.segmentId = segmentId; - return this; - } - - public Builder radius( double radius ) - { - this.radius = radius; - return this; - } - - public Builder covariance2d( double[] covariance2d ) - { - if ( covariance2d != null && covariance2d.length == 4 ) - { - this.covariance2d = covariance2d; - } - else - { - throw new IllegalArgumentException( "Covariance2D must be a 4-element array" ); - } - return this; - } - - public Builder covariance3d( double[] covariance3d ) - { - if ( covariance3d != null && covariance3d.length == 6 ) - { - this.covariance3d = covariance3d; - } - else - { - throw new IllegalArgumentException( "Covariance3D must be a 6-element array" ); - } - return this; - } - - public Builder polygonX( double[] polygonX ) - { - this.polygonX = polygonX; - return this; - } - - public Builder polygonY( double[] polygonY ) - { - this.polygonY = polygonY; - return this; - } - - public GeffNode build() - { - return new GeffNode( id, timepoint, x, y, z, color, segmentId, radius, covariance2d, covariance3d, polygonX, polygonY ); - } - } - - /** - * Read nodes from Zarr format with default version and chunked structure - * - * @param zarrPath - * The path to the Zarr directory containing nodes. - * @return List of GeffNode objects read from the Zarr path. - */ - public static List< GeffNode > readFromZarr( String zarrPath ) throws IOException, InvalidRangeException - { - return readFromZarrWithChunks( zarrPath, Geff.VERSION ); - } - - /** - * Read nodes from Zarr format with specified version and chunked structure - * - * @param zarrPath - * The path to the Zarr directory containing nodes. - * @param geffVersion - * The version of the GEFF format to read. - * @return List of GeffNode objects read from the Zarr path. - */ - public static List< GeffNode > readFromZarr( String zarrPath, String geffVersion ) - throws IOException, InvalidRangeException - { - return readFromZarrWithChunks( zarrPath, geffVersion ); - } - - /** - * Read nodes from Zarr format with chunked structure. This method handles - * different Geff versions and reads node attributes accordingly. - * - * @param zarrPath - * The path to the Zarr directory containing nodes. - * @param geffVersion - * The version of the GEFF format to read. - * @return List of GeffNode objects read from the Zarr path. - */ - public static List< GeffNode > readFromZarrWithChunks( String zarrPath, String geffVersion ) - throws IOException, InvalidRangeException - { - List< GeffNode > nodes = new ArrayList<>(); - - ZarrGroup nodesGroup = ZarrGroup.open( zarrPath + "/nodes" ); - - System.out.println( - "Reading nodes from Zarr path: " + zarrPath + " with Geff version: " + geffVersion ); - - if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) - { - // Read node IDs from chunks - int[] nodeIds = ZarrUtils.readChunkedIntArray( nodesGroup, "ids", "node IDs" ); - - // Read attributes - ZarrGroup attrsGroup = nodesGroup.openSubGroup( "attrs" ); - - // Read time points from chunks - int[] timepoints = ZarrUtils.readChunkedIntArray( attrsGroup, "t/values", "timepoints" ); - - // Read X coordinates from chunks - double[] xCoords = ZarrUtils.readChunkedDoubleArray( attrsGroup, "x/values", "X coordinates" ); - - // Read Y coordinates from chunks - double[] yCoords = ZarrUtils.readChunkedDoubleArray( attrsGroup, "y/values", "Y coordinates" ); - - // Read segment IDs from chunks - int[] segmentIds = new int[ 0 ]; - try - { - segmentIds = ZarrUtils.readChunkedIntArray( attrsGroup, "seg_id/values", "segment IDs" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read segment IDs: " + e.getMessage() + " skipping..." ); - } - - // Read positions if available from chunks - double[][] positions = new double[ 0 ][]; - try - { - positions = ZarrUtils.readChunkedDoubleMatrix( attrsGroup, "position/values", "positions" ); - } - catch ( Exception e ) - { - // Position array might not exist or be in different format - System.out.println( "Warning: Could not read position array: " + e.getMessage() ); - } - - // Create node objects - for ( int i = 0; i < nodeIds.length; i++ ) - { - GeffNode node = new Builder() - .id( nodeIds[ i ] ) - .timepoint( i < timepoints.length ? timepoints[ i ] : -1 ) - .x( i < xCoords.length ? xCoords[ i ] : Double.NaN ) - .y( i < yCoords.length ? yCoords[ i ] : Double.NaN ) - .z( i < positions.length ? positions[ i ][ 0 ] : Double.NaN ) - .segmentId( i < segmentIds.length ? segmentIds[ i ] : -1 ) - .build(); - - nodes.add( node ); - } - } - else if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) || - geffVersion.startsWith( "0.4" ) ) - { - // Read node IDs from chunks - int[] nodeIds = ZarrUtils.readChunkedIntArray( nodesGroup, "ids", "node IDs" ); - - // Read properties - ZarrGroup propsGroup = nodesGroup.openSubGroup( "props" ); - - // Read serialized properties - ZarrGroup serializedPropsGroup = nodesGroup.openSubGroup( "serialized_props" ); - - // Read time points from chunks - int[] timepoints = ZarrUtils.readChunkedIntArray( propsGroup, "t/values", "timepoints" ); - - // Read X coordinates from chunks - double[] xCoords = ZarrUtils.readChunkedDoubleArray( propsGroup, "x/values", "X coordinates" ); - - // Read Y coordinates from chunks - double[] yCoords = ZarrUtils.readChunkedDoubleArray( propsGroup, "y/values", "Y coordinates" ); - - // Read Z coordinates from chunks - double[] zCoords = new double[ 0 ]; - try - { - zCoords = ZarrUtils.readChunkedDoubleArray( propsGroup, "z/values", "Z coordinates" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read Z coordinates: " + e.getMessage() + " skipping..." ); - } - - // Read color from chunks - double[][] colors = new double[ 0 ][]; - try - { - colors = ZarrUtils.readChunkedDoubleMatrix( propsGroup, "color/values", "color" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read color array: " + e.getMessage() + " skipping..." ); - } - - // Read track IDs from chunks - int[] trackIds = new int[ 0 ]; - try - { - trackIds = ZarrUtils.readChunkedIntArray( propsGroup, "track_id/values", "track IDs" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read track IDs: " + e.getMessage() + " skipping..." ); - } - - // Read radius from chunks - double[] radii = new double[ 0 ]; - try - { - radii = ZarrUtils.readChunkedDoubleArray( propsGroup, "radius/values", "radius" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read radius: " + e.getMessage() + " skipping..." ); - } - - // Read covariance2d from chunks - double[][] covariance2ds = new double[ 0 ][]; - try - { - covariance2ds = ZarrUtils.readChunkedDoubleMatrix( propsGroup, "covariance2d/values", - "covariance2d" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read covariance2d: " + e.getMessage() + " skipping..." ); - } - - // Read covariance3d from chunks - double[][] covariance3ds = new double[ 0 ][]; - try - { - covariance3ds = ZarrUtils.readChunkedDoubleMatrix( propsGroup, "covariance3d/values", - "covariance3d" ); - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read covariance3d: " + e.getMessage() + " skipping..." ); - } - - // Read polygon from chunks - double[][] polygonsX = new double[ 0 ][]; - double[][] polygonsY = new double[ 0 ][]; - if ( geffVersion.startsWith( "0.4" ) ) - { - try - { - int[][] polygonSlices = ZarrUtils.readChunkedIntMatrix( serializedPropsGroup, "polygon/slices", "polygon slices" ); - // expected shape: [numVertices, 2] - double[][] polygonValues = ZarrUtils.readChunkedDoubleMatrix( serializedPropsGroup, "polygon/values", "polygon values" ); - polygonsX = new double[ polygonSlices.length ][]; - polygonsY = new double[ polygonSlices.length ][]; - for ( int i = 0; i < polygonSlices.length; i++ ) - { - int start = polygonSlices[ i ][ 0 ]; - int length = polygonSlices[ i ][ 1 ]; - if ( start >= 0 && start + length <= polygonValues.length ) - { - double[] xPoints = new double[ length ]; - double[] yPoints = new double[ length ]; - for ( int j = 0; j < length; j++ ) - { - xPoints[ j ] = polygonValues[ start + j ][ 0 ]; - yPoints[ j ] = polygonValues[ start + j ][ 1 ]; - } - polygonsX[ i ] = xPoints; - polygonsY[ i ] = yPoints; - } - else - { - System.out.println( "Warning: Invalid polygon slice at index " + i + ", skipping..." ); - } - } - } - catch ( Exception e ) - { - System.out.println( "Warning: Could not read polygon: " + e.getMessage() + " skipping..." ); - } - } - - // Create node objects - for ( int i = 0; i < nodeIds.length; i++ ) - { - GeffNode node = new Builder() - .id( nodeIds[ i ] ) - .timepoint( i < timepoints.length ? timepoints[ i ] : -1 ) - .x( i < xCoords.length ? xCoords[ i ] : Double.NaN ) - .y( i < yCoords.length ? yCoords[ i ] : Double.NaN ) - .z( i < zCoords.length ? zCoords[ i ] : Double.NaN ) - .color( i < colors.length ? colors[ i ] : DEFAULT_COLOR ) - .segmentId( i < trackIds.length ? trackIds[ i ] : -1 ) - .radius( i < radii.length ? radii[ i ] : Double.NaN ) - .covariance2d( i < covariance2ds.length ? covariance2ds[ i ] : DEFAULT_COVARIANCE_2D ) - .covariance3d( i < covariance3ds.length ? covariance3ds[ i ] : DEFAULT_COVARIANCE_3D ) - .polygonX( i < polygonsX.length ? polygonsX[ i ] : null ) - .polygonY( i < polygonsY.length ? polygonsY[ i ] : null ) - .build(); - - nodes.add( node ); - } - } - else - { - throw new IOException( "Unsupported Geff version: " + geffVersion ); - } - - return nodes; - } - - /** - * Write nodes to Zarr format with chunked structure - */ - public static void writeToZarr( List< GeffNode > nodes, String zarrPath ) throws IOException, InvalidRangeException - { - writeToZarr( nodes, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE ); - } - - public static void writeToZarr( List< GeffNode > nodes, String zarrPath, String geffVersion ) - throws IOException, InvalidRangeException - { - if ( geffVersion == null || geffVersion.isEmpty() ) - { - geffVersion = Geff.VERSION; // Use default version if not specified - } - writeToZarr( nodes, zarrPath, ZarrUtils.DEFAULT_CHUNK_SIZE, geffVersion ); - } - - /** - * Write nodes to Zarr format with specified chunk size - */ - public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize ) - throws IOException, InvalidRangeException - { - writeToZarr( nodes, zarrPath, chunkSize, Geff.VERSION ); - } - - public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize, String geffVersion ) - throws IOException, InvalidRangeException - { - if ( nodes == null ) - { throw new IllegalArgumentException( "Nodes list cannot be null or empty" ); } - - if ( geffVersion == null || geffVersion.isEmpty() ) - { - geffVersion = Geff.VERSION; // Use default version if not specified - } - - System.out.println( - "Writing " + nodes.size() + " nodes to Zarr path: " + zarrPath + " with chunk size: " + chunkSize - + " to Geff version: " + geffVersion ); - - if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) ) - { - // Create the main nodes group - ZarrGroup rootGroup = ZarrGroup.create( zarrPath ); - - // Create the main nodes group - ZarrGroup nodesGroup = rootGroup.createSubGroup( "nodes" ); - - // Create attrs subgroup for chunked storage - ZarrGroup attrsGroup = nodesGroup.createSubGroup( "attrs" ); - - // Check if any nodes have 3D positions - boolean hasPositions = nodes.stream() - .anyMatch( node -> node.getPosition() != null && node.getPosition().length >= 3 ); - - System.out.println( "Node analysis:" ); - System.out.println( "- Has 3D positions: " + hasPositions ); - System.out.println( "- Format: Chunked arrays with separate values subgroups" ); - - // Write node IDs in chunks - writeChunkedNodeIds( nodes, nodesGroup, chunkSize ); - - // Write timepoints in chunks - ZarrUtils.writeChunkedIntAttribute( nodes, attrsGroup, "t", chunkSize, GeffNode::getT ); - - // Write X coordinates in chunks - ZarrUtils.writeChunkedDoubleAttribute( nodes, attrsGroup, "x", chunkSize, GeffNode::getX ); - - // Write Y coordinates in chunks - ZarrUtils.writeChunkedDoubleAttribute( nodes, attrsGroup, "y", chunkSize, GeffNode::getY ); - - // Write segment IDs in chunks - ZarrUtils.writeChunkedIntAttribute( nodes, attrsGroup, "seg_id", chunkSize, GeffNode::getSegmentId ); - - // Write positions if available in chunks - if ( hasPositions ) - { - ZarrUtils.writeChunkedDoubleMatrix( nodes, attrsGroup, "position", chunkSize, GeffNode::getPosition, 3 ); - } - } - else if ( geffVersion.startsWith( "0.2" ) || geffVersion.startsWith( "0.3" ) || geffVersion.startsWith( "0.4" ) ) - { - // Create the main nodes group - ZarrGroup rootGroup = ZarrGroup.create( zarrPath ); - - // Create the main nodes group - ZarrGroup nodesGroup = rootGroup.createSubGroup( "nodes" ); - - // Create props subgroup for chunked storage - ZarrGroup propsGroup = nodesGroup.createSubGroup( "props" ); - - ZarrGroup serializedPropsGroup = nodesGroup.createSubGroup( "serialized_props" ); - - // Write node IDs in chunks - writeChunkedNodeIds( nodes, nodesGroup, chunkSize ); - - // Write timepoints in chunks - ZarrUtils.writeChunkedIntAttribute( nodes, propsGroup, "t/values", chunkSize, GeffNode::getT ); - - // Write X coordinates in chunks - ZarrUtils.writeChunkedDoubleAttribute( nodes, propsGroup, "x/values", chunkSize, GeffNode::getX ); - - // Write Y coordinates in chunks - ZarrUtils.writeChunkedDoubleAttribute( nodes, propsGroup, "y/values", chunkSize, GeffNode::getY ); - - // Write Z coordinates in chunks - ZarrUtils.writeChunkedDoubleAttribute( nodes, propsGroup, "z/values", chunkSize, GeffNode::getZ ); - - // Write color in chunks - ZarrUtils.writeChunkedDoubleMatrix( nodes, propsGroup, "color/values", chunkSize, GeffNode::getColor, 4 ); - - // Write segment IDs in chunks - ZarrUtils.writeChunkedIntAttribute( nodes, propsGroup, "track_id/values", chunkSize, GeffNode::getSegmentId ); - - // Write radius and covariance attributes if available - ZarrUtils.writeChunkedDoubleAttribute( nodes, propsGroup, "radius/values", chunkSize, GeffNode::getRadius ); - - // Write covariance2d in chunks - ZarrUtils.writeChunkedDoubleMatrix( nodes, propsGroup, "covariance2d/values", chunkSize, GeffNode::getCovariance2d, - 4 ); - - // Write covariance3d in chunks - ZarrUtils.writeChunkedDoubleMatrix( nodes, propsGroup, "covariance3d/values", chunkSize, GeffNode::getCovariance3d, - 6 ); - - if ( geffVersion.startsWith( "0.4" ) ) - { - // Write polygon slices and values if available - List< GeffSerializableVertex > geffVertices = new ArrayList<>(); - int polygonOffset = 0; - for ( GeffNode node : nodes ) - { - if ( node.polygonX == null || node.polygonY == null ) - throw new IllegalArgumentException( "Polygon coordinates cannot be null" ); - if ( node.getPolygonX().length != node.getPolygonY().length ) - throw new IllegalArgumentException( "Polygon X and Y coordinates must have the same length" ); - node.setPolygonStartIndex( polygonOffset ); - for ( int i = 0; i < node.getPolygonX().length; i++ ) - { - geffVertices.add( new GeffSerializableVertex( node.getPolygonX()[ i ], - node.getPolygonY()[ i ] ) ); - } - polygonOffset += node.getPolygonX().length; - } - ZarrUtils.writeChunkedIntMatrix( nodes, serializedPropsGroup, "polygon/slices", chunkSize, GeffNode::getPolygonSliceAsArray, 2 ); - ZarrUtils.writeChunkedDoubleMatrix( geffVertices, serializedPropsGroup, "polygon/values", chunkSize, GeffSerializableVertex::getCoordinates, 2 ); - } - - } - - System.out.println( "Successfully wrote nodes to Zarr format with chunked structure" ); - } - - /** - * Helper method to write chunked node IDs - */ - private static void writeChunkedNodeIds( List< GeffNode > nodes, ZarrGroup parentGroup, int chunkSize ) - throws IOException, InvalidRangeException - { - - int totalNodes = nodes.size(); - - // Create the ids subgroup - ZarrGroup idsGroup = parentGroup.createSubGroup( "ids" ); - - // Create a single ZarrArray for all IDs with proper chunking - ZarrArray idsArray = idsGroup.createArray( "", new ArrayParams() - .shape( totalNodes ) - .chunks( chunkSize ) - .dataType( DataType.i4 ) ); - - // Write data in chunks - int chunkIndex = 0; - for ( int startIdx = 0; startIdx < totalNodes; startIdx += chunkSize ) - { - int endIdx = Math.min( startIdx + chunkSize, totalNodes ); - int currentChunkSize = endIdx - startIdx; - - // Prepare chunk data array - int[] chunkData = new int[ currentChunkSize ]; - - // Fill chunk data array - for ( int i = 0; i < currentChunkSize; i++ ) - { - chunkData[ i ] = nodes.get( startIdx + i ).getId(); - } - - // Write chunk at specific offset - idsArray.write( chunkData, new int[] { currentChunkSize }, new int[] { startIdx } ); - - System.out.println( "- Wrote node IDs chunk " + chunkIndex + ": " + currentChunkSize + " nodes (indices " - + startIdx + "-" + ( endIdx - 1 ) + ")" ); - chunkIndex++; - } - } - - @Override - public String toString() - { - StringBuilder sb = new StringBuilder( "GeffNode{" ) - .append( "id=" ).append( id ) - .append( ", t=" ).append( t ) - .append( ", x=" ).append( String.format( "%.2f", x ) ) - .append( ", y=" ).append( String.format( "%.2f", y ) ) - .append( ", z=" ).append( String.format( "%.2f", z ) ) - .append( color != null ? ", color=" + java.util.Arrays.toString( color ) : "" ) - .append( ", segId=" ).append( segmentId ) - .append( "radius=" ).append( String.format( "%.2f", radius ) ) - .append( covariance2d != null ? ", covariance2d=" + java.util.Arrays.toString( covariance2d ) : "" ) - .append( covariance3d != null ? ", covariance3d=" + java.util.Arrays.toString( covariance3d ) : "" ) - .append( "}" ); - return sb.toString(); - } - - @Override - public boolean equals( Object obj ) - { - if ( this == obj ) - return true; - if ( obj == null || getClass() != obj.getClass() ) - return false; - - GeffNode geffNode = ( GeffNode ) obj; - return id == geffNode.id && - t == geffNode.t && - Double.compare( geffNode.x, x ) == 0 && - Double.compare( geffNode.y, y ) == 0 && - Double.compare( geffNode.z, z ) == 0 && - java.util.Arrays.equals( color, geffNode.color ) && - segmentId == geffNode.segmentId && - Double.compare( geffNode.radius, radius ) == 0 && - java.util.Arrays.equals( covariance2d, geffNode.covariance2d ) && - java.util.Arrays.equals( covariance3d, geffNode.covariance3d ) && - java.util.Arrays.equals( polygonX, geffNode.polygonX ) && - java.util.Arrays.equals( polygonY, geffNode.polygonY ); - } - - @Override - public int hashCode() - { - int result = id; - result = 31 * result + t; - result = 31 * result + Double.hashCode( x ); - result = 31 * result + Double.hashCode( y ); - result = 31 * result + Double.hashCode( z ); - result = 31 * result + ( color != null ? java.util.Arrays.hashCode( color ) : 0 ); - result = 31 * result + segmentId; - result = 31 * result + Double.hashCode( radius ); - result = 31 * result + ( covariance2d != null ? java.util.Arrays.hashCode( covariance2d ) : 0 ); - result = 31 * result + ( covariance3d != null ? java.util.Arrays.hashCode( covariance3d ) : 0 ); - return result; - } + private static final Logger LOG = LoggerFactory.getLogger( GeffNode.class ); + + // Node attributes + private int id; + + private int t; + + private double x; + + private double y; + + private double z; + + private double[] color; + + private int segmentId; + + private double radius; + + private double[] covariance2d; + + private double[] covariance3d; + + private double[] polygonX; + + private double[] polygonY; + + private static final double[] DEFAULT_COLOR = { 1.0, 1.0, 1.0, 1.0 }; // RGBA + + public static final double DEFAULT_RADIUS = 1.0; + + public static final double[] DEFAULT_COVARIANCE_2D = { 1.0, 0.0, 0.0, 1.0 }; + + public static final double[] DEFAULT_COVARIANCE_3D = { 1.0, 0.0, 0.0, 1.0, 0.0, 1.0 }; + + /** + * Default constructor + */ + public GeffNode() + {} + + /** + * Constructor with basic node parameters + * + * @param id + * The unique identifier for the node. + * @param timepoint + * The timepoint of the node. + * @param x + * The x-coordinate of the node. + * @param y + * The y-coordinate of the node. + * @param z + * The z-coordinate of the node. + * @param color + * The color of the node (RGBA). + * @param segmentId + * The segment ID the node belongs to. + * @param radius + * The radius of the node. + * @param covariance2d + * The 2D covariance matrix of the node. + * @param covariance3d + * The 3D covariance matrix of the node. + * @param polygonX + * The x-coordinates of the polygon vertices. + * @param polygonY + * The y-coordinates of the polygon vertices. + */ + public GeffNode( int id, int timepoint, double x, double y, double z, double[] color, int segmentId, double radius, + double[] covariance2d, double[] covariance3d, double[] polygonX, double[] polygonY ) + { + this.id = id; + this.t = timepoint; + this.x = x; + this.y = y; + this.z = z; + this.color = color != null ? color : DEFAULT_COLOR; + this.segmentId = segmentId; + this.radius = radius; + this.covariance2d = covariance2d != null ? covariance2d : DEFAULT_COVARIANCE_2D; + this.covariance3d = covariance3d != null ? covariance3d : DEFAULT_COVARIANCE_3D; + this.polygonX = polygonX != null ? polygonX : new double[ 0 ]; + this.polygonY = polygonY != null ? polygonY : new double[ 0 ]; + } + + /** + * Get the unique identifier of the node. + * + * @return The unique identifier of the node. + */ + public int getId() + { + return id; + } + + /** + * Set the unique identifier of the node. + * + * @param id + * The unique identifier to set. + */ + public void setId( int id ) + { + this.id = id; + } + + /** + * Get the timepoint of the node. + * + * @return The timepoint of the node. + */ + public int getT() + { + return t; + } + + /** + * Set the timepoint of the node. + * + * @param timepoint + * The timepoint to set. + */ + public void setT( int timepoint ) + { + this.t = timepoint; + } + + /** + * Get the x-coordinate of the node. + * + * @return The x-coordinate of the node. + */ + public double getX() + { + return x; + } + + /** + * Set the x-coordinate of the node. + * + * @param x + * The x-coordinate to set. + */ + public void setX( double x ) + { + this.x = x; + } + + /** + * Get the y-coordinate of the node. + * + * @return The y-coordinate of the node. + */ + public double getY() + { + return y; + } + + /** + * Set the y-coordinate of the node. + * + * @param y + * The y-coordinate to set. + */ + public void setY( double y ) + { + this.y = y; + } + + /** + * Get the z-coordinate of the node. + * + * @return The z-coordinate of the node. + */ + public double getZ() + { + return z; + } + + /** + * Set the z-coordinate of the node. + * + * @param z + * The z-coordinate to set. + */ + public void setZ( double z ) + { + this.z = z; + } + + /** + * Get the color of the node. + * + * @return The color of the node as an RGBA array. + */ + public double[] getColor() + { + return color; + } + + /** + * Set the color of the node. + * + * @param color + * The color to set as an RGBA array. + */ + public void setColor( double[] color ) + { + if ( color != null && color.length == 4 ) + { + this.color = color; + } + else + { + throw new IllegalArgumentException( "Color must be a 4-element array" ); + } + } + + /** + * Get the segment ID of the node. + * + * @return The segment ID of the node. + */ + public int getSegmentId() + { + return segmentId; + } + + /** + * Set the segment ID of the node. + * + * @param segmentId + * The segment ID to set. + */ + public void setSegmentId( int segmentId ) + { + this.segmentId = segmentId; + } + + /** + * Get the radius of the node. + * + * @return The radius of the node. + */ + public double getRadius() + { + return radius; + } + + /** + * Set the radius of the node. + * + * @param radius + * The radius to set. + */ + public void setRadius( double radius ) + { + this.radius = radius; + } + + /** + * Get the 2D covariance matrix of the node. + * + * @return The 2D covariance matrix as a 4-element array. + */ + public double[] getCovariance2d() + { + return covariance2d; + } + + /** + * Set the 2D covariance matrix of the node. + * + * @param covariance2d + * The 2D covariance matrix to set as a 4-element array. + * + * @throws IllegalArgumentException + * if the covariance2d array is not of length 4. + */ + public void setCovariance2d( double[] covariance2d ) + { + if ( covariance2d != null && covariance2d.length == 4 ) + { + this.covariance2d = covariance2d; + } + else + { + throw new IllegalArgumentException( "Covariance2D must be a 4-element array" ); + } + } + + /** + * Get the 3D covariance matrix of the node. + * + * @return The 3D covariance matrix as a 6-element array. + */ + public double[] getCovariance3d() + { + return covariance3d; + } + + /** + * Set the 3D covariance matrix of the node. + * + * @param covariance3d + * The 3D covariance matrix to set as a 6-element array. + * + * @throws IllegalArgumentException + * if the covariance3d array is not of length 6. + */ + public void setCovariance3d( double[] covariance3d ) + { + if ( covariance3d != null && covariance3d.length == 6 ) + { + this.covariance3d = covariance3d; + } + else + { + throw new IllegalArgumentException( "Covariance3D must be a 6-element array" ); + } + } + + /** + * Get the x-coordinates of the polygon vertices. + * + * @return The x-coordinates of the polygon vertices. + */ + public double[] getPolygonX() + { + return polygonX; + } + + /** + * Get the y-coordinates of the polygon vertices. + * + * @return The y-coordinates of the polygon vertices. + */ + public double[] getPolygonY() + { + return polygonY; + } + + /** + * Set the x-coordinates of the polygon vertices. + * + * @param polygonX + * The x-coordinates to set. + */ + public void setPolygonX( double[] polygonX ) + { + this.polygonX = polygonX != null ? polygonX : new double[ 0 ]; + } + + /** + * Set the y-coordinates of the polygon vertices. + * + * @param polygonY + * The y-coordinates to set. + */ + public void setPolygonY( double[] polygonY ) + { + this.polygonY = polygonY != null ? polygonY : new double[ 0 ]; + } + + /** + * Returns the position of the node as a 3D array. + * + * @return The position of the node as a 3D array. + * + * @deprecated Use {@link #getX()}, {@link #getY()}, {@link #getZ()} + * instead. + */ + @Deprecated + public double[] getPosition() + { + return new double[] { x, y, z }; + } + + /** + * Set the position of the node. + * + * @param position + * The position of the node as a 3D array. + * + * @deprecated Use {@link #setX(double)}, {@link #setY(double)}, + * {@link #setZ(double)} instead. + */ + @Deprecated + public void setPosition( double[] position ) + { + if ( position != null && position.length == 2 ) + { + this.x = position[ 0 ]; + this.y = position[ 1 ]; + this.z = 0.0; // Default Z to 0 + } + else if ( position != null && position.length == 3 ) + { + this.x = position[ 0 ]; + this.y = position[ 1 ]; + this.z = position[ 2 ]; + } + else + { + throw new IllegalArgumentException( "Position must be a 2D or 3D array" ); + } + } + + /** + * Builder for creating GeffNode instance. + * + * @return A new Builder instance for GeffNode. + */ + public static Builder builder() + { + return new Builder(); + } + + public static class Builder + { + private int id; + + private int timepoint; + + private double x; + + private double y; + + private double z; + + private double[] color = DEFAULT_COLOR; + + private int segmentId; + + private double radius = DEFAULT_RADIUS; + + private double[] covariance2d = DEFAULT_COVARIANCE_2D; + + private double[] covariance3d = DEFAULT_COVARIANCE_3D; + + private double[] polygonX; + + private double[] polygonY; + + public Builder id( int id ) + { + this.id = id; + return this; + } + + public Builder timepoint( int timepoint ) + { + this.timepoint = timepoint; + return this; + } + + public Builder x( double x ) + { + this.x = x; + return this; + } + + public Builder y( double y ) + { + this.y = y; + return this; + } + + public Builder z( double z ) + { + this.z = z; + return this; + } + + public Builder color( double[] color ) + { + if ( color != null && color.length == 4 ) + { + this.color = color; + } + else + { + throw new IllegalArgumentException( "Color must be a 4-element array" ); + } + return this; + } + + public Builder segmentId( int segmentId ) + { + this.segmentId = segmentId; + return this; + } + + public Builder radius( double radius ) + { + this.radius = radius; + return this; + } + + public Builder covariance2d( double[] covariance2d ) + { + if ( covariance2d != null && covariance2d.length == 4 ) + { + this.covariance2d = covariance2d; + } + else + { + throw new IllegalArgumentException( "Covariance2D must be a 4-element array" ); + } + return this; + } + + public Builder covariance3d( double[] covariance3d ) + { + if ( covariance3d != null && covariance3d.length == 6 ) + { + this.covariance3d = covariance3d; + } + else + { + throw new IllegalArgumentException( "Covariance3D must be a 6-element array" ); + } + return this; + } + + public Builder polygonX( double[] polygonX ) + { + this.polygonX = polygonX; + return this; + } + + public Builder polygonY( double[] polygonY ) + { + this.polygonY = polygonY; + return this; + } + + public GeffNode build() + { + return new GeffNode( id, timepoint, x, y, z, color, segmentId, radius, covariance2d, covariance3d, polygonX, polygonY ); + } + } + + /** + * Read nodes from Zarr format with default version and chunked structure + * + * @param zarrPath + * The path to the Zarr directory containing nodes. + * + * @return List of GeffNode objects read from the Zarr path. + */ + public static List< GeffNode > readFromZarr( String zarrPath ) throws IOException + { + return readFromZarr( zarrPath, Geff.VERSION ); + } + + /** + * Read nodes from Zarr format with specified version and chunked structure + * + * @param zarrPath + * The path to the Zarr directory containing nodes. + * @param geffVersion + * The version of the GEFF format to read. + * + * @return List of GeffNode objects read from the Zarr path. + */ + public static List< GeffNode > readFromZarr( final String zarrPath, final String geffVersion ) + { + LOG.debug( "Reading nodes from Zarr path: " + zarrPath + " with Geff version: " + geffVersion ); + try ( final N5ZarrReader reader = new N5ZarrReader( zarrPath, true ) ) + { + return readFromN5( reader, "/", geffVersion ); + } + } + + public static List< GeffNode > readFromN5( final N5Reader reader, final String group, final String geffVersion ) + { + checkSupportedVersion( geffVersion ); + final String path = N5URI.normalizeGroupPath( group ); + + // Read node IDs from chunks + final int[] nodeIds = GeffUtils.readAsIntArray( reader, path + "/nodes/ids", "node IDs" ); + if ( nodeIds == null ) + { + throw new IllegalArgumentException( "required property '/nodes/ids' not found" ); + } + final int numNodes = nodeIds.length; + + // Read time points from chunks + final int[] timepoints = GeffUtils.readAsIntArray( reader, path + "/nodes/props/t/values", "timepoints" ); + verifyLength( timepoints, numNodes, "/nodes/props/t/values" ); + + // Read X coordinates from chunks + final double[] xCoords = GeffUtils.readAsDoubleArray( reader, path + "/nodes/props/x/values", "X coordinates" ); + verifyLength( xCoords, numNodes, "/nodes/props/x/values" ); + + // Read Y coordinates from chunks + final double[] yCoords = GeffUtils.readAsDoubleArray( reader, path + "/nodes/props/y/values", "Y coordinates" ); + verifyLength( yCoords, numNodes, "/nodes/props/y/values" ); + + // Read Z coordinates from chunks + final double[] zCoords = GeffUtils.readAsDoubleArray( reader, path + "/nodes/props/z/values", "Z coordinates" ); + verifyLength( zCoords, numNodes, "/nodes/props/z/values" ); + + // Read color from chunks + final FlattenedDoubles colors = GeffUtils.readAsDoubleMatrix( reader, path + "/nodes/props/color/values", "color" ); + verifyLength( colors, numNodes, "/nodes/props/color/values" ); + + // Read track IDs from chunks + final int[] trackIds = GeffUtils.readAsIntArray( reader, path + "/nodes/props/track_id/values", "track IDs" ); + verifyLength( trackIds, numNodes, "/nodes/props/track_id/values" ); + + // Read radius from chunks + double[] radius = GeffUtils.readAsDoubleArray( reader, path + "/nodes/props/radius/values", "radius" ); + verifyLength( radius, numNodes, "/nodes/props/radius/values" ); + + // Read covariance2d from chunks + final FlattenedDoubles covariance2ds = GeffUtils.readAsDoubleMatrix( reader, path + "/nodes/props/covariance2d/values", "covariance2d" ); + verifyLength( covariance2ds, numNodes, "/nodes/props/covariance2d/values" ); + + // Read covariance3d from chunks + final FlattenedDoubles covariance3ds = GeffUtils.readAsDoubleMatrix( reader, path + "/nodes/props/covariance3d/values", "covariance3d" ); + verifyLength( covariance3ds, numNodes, "/nodes/props/covariance3d/values" ); + + // Read polygon from chunks + double[][] polygonsX = null; + double[][] polygonsY = null; + if ( geffVersion.startsWith( "0.4" ) ) + { + try + { + final FlattenedInts polygonSlices = GeffUtils.readAsIntMatrix( reader, path + "/nodes/serialized_props/polygon/slices", "polygon slices" ); + verifyLength( polygonSlices, numNodes, "/nodes/serialized_props/polygon/slices" ); + + final FlattenedDoubles polygonValues = GeffUtils.readAsDoubleMatrix( reader, path + "/nodes/serialized_props/polygon/values", "polygon values" ); + + polygonsX = new double[ numNodes ][]; + polygonsY = new double[ numNodes ][]; + for ( int i = 0; i < numNodes; i++ ) + { + int start = polygonSlices.at( i, 0 ); + int length = polygonSlices.at( i, 1 ); + final int numVertices = polygonValues.size()[ 0 ]; + if ( start >= 0 && start + length < numVertices ) + { + final double[] xPoints = new double[ length ]; + final double[] yPoints = new double[ length ]; + for ( int j = 0; j < length; j++ ) + { + xPoints[ j ] = polygonValues.at( start + j, 0 ); + yPoints[ j ] = polygonValues.at( start + j, 1 ); + } + polygonsX[ i ] = xPoints; + polygonsY[ i ] = yPoints; + } + else + { + LOG.warn( "Warning: Invalid polygon slice at index {}, skipping...", i ); + } + } + } + catch ( Exception e ) + { + LOG.warn( "Warning: Could not read polygon: {}, skipping...", e.getMessage() ); + } + } + + // Create node objects + final List< GeffNode > nodes = new ArrayList<>( numNodes ); + for ( int i = 0; i < numNodes; i++ ) + { + final int id = nodeIds[ i ]; + final int t = timepoints != null ? timepoints[ i ] : -1; + final double x = xCoords != null ? xCoords[ i ] : Double.NaN; + final double y = yCoords != null ? yCoords[ i ] : Double.NaN; + final double z = zCoords != null ? zCoords[ i ] : Double.NaN; + final double[] color = colors != null ? colors.rowAt( i ) : DEFAULT_COLOR; + final int segmentId = trackIds != null ? trackIds[ i ] : -1; + final double r = radius != null ? radius[ i ] : Double.NaN; + final double[] covariance2d = DEFAULT_COVARIANCE_2D; + final double[] covariance3d = DEFAULT_COVARIANCE_2D; + final double[] polygonX = polygonsX != null ? polygonsX[ i ] : null; + final double[] polygonY = polygonsY != null ? polygonsY[ i ] : null; + final GeffNode node = new GeffNode( id, t, x, y, z, color, segmentId, r, covariance2d, covariance3d, polygonX, polygonY ); + nodes.add( node ); + } + return nodes; + } + + /** + * Write nodes to Zarr format with chunked structure + */ + public static void writeToZarr( List< GeffNode > nodes, String zarrPath ) + { + writeToZarr( nodes, zarrPath, GeffUtils.DEFAULT_CHUNK_SIZE ); + } + + /** + * Write nodes to Zarr format with specified chunk size + */ + public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize ) + { + writeToZarr( nodes, zarrPath, chunkSize, Geff.VERSION ); + } + + public static void writeToZarr( List< GeffNode > nodes, String zarrPath, String geffVersion ) + { + writeToZarr( nodes, zarrPath, GeffUtils.DEFAULT_CHUNK_SIZE, geffVersion ); + } + + public static void writeToZarr( List< GeffNode > nodes, String zarrPath, int chunkSize, String geffVersion ) + { + LOG.debug( "Writing {} nodes to Zarr path: {} with chunk size: {} to Geff version: {}", nodes.size(), zarrPath, chunkSize, geffVersion ); + try ( final N5ZarrWriter writer = new N5ZarrWriter( zarrPath, true ) ) + { + writeToN5( nodes, writer, "/", chunkSize, geffVersion ); + } + } + + public static void writeToN5( + final List< GeffNode > nodes, + final N5Writer writer, + final String group, + final int chunkSize, + String geffVersion ) + { + if ( nodes == null ) + throw new NullPointerException( "Nodes list cannot be null" ); + + if ( geffVersion == null || geffVersion.isEmpty() ) + { + geffVersion = Geff.VERSION; // Use default version if not specified + } + GeffUtils.checkSupportedVersion( geffVersion ); + + final String path = N5URI.normalizeGroupPath( group ); + + // Write node IDs in chunks + GeffUtils.writeIntArray( nodes, GeffNode::getId, writer, path + "/nodes/ids", chunkSize ); + + // Write timepoints in chunks + GeffUtils.writeIntArray( nodes, GeffNode::getT, writer, path + "/nodes/props/t/values", chunkSize ); + + // Write X coordinates in chunks + GeffUtils.writeDoubleArray( nodes, GeffNode::getX, writer, path + "/nodes/props/x/values", chunkSize ); + + // Write Y coordinates in chunks + GeffUtils.writeDoubleArray( nodes, GeffNode::getY, writer, path + "/nodes/props/y/values", chunkSize ); + + // Write Z coordinates in chunks + GeffUtils.writeDoubleArray( nodes, GeffNode::getZ, writer, path + "/nodes/props/z/values", chunkSize ); + + // Write color in chunks + GeffUtils.writeDoubleMatrix( nodes, 4, GeffNode::getColor, writer, path + "/nodes/props/color/values", chunkSize ); + + // Write segment IDs in chunks + GeffUtils.writeIntArray( nodes, GeffNode::getSegmentId, writer, path + "/nodes/props/track_id/values", chunkSize ); + + // Write radius and covariance attributes if available + GeffUtils.writeDoubleArray( nodes, GeffNode::getRadius, writer, path + "/nodes/props/radius/values", chunkSize ); + + // Write covariance2d in chunks + GeffUtils.writeDoubleMatrix( nodes, 4, GeffNode::getCovariance2d, writer, path + "/nodes/props/covariance2d/values", chunkSize ); + + // Write covariance3d in chunks + GeffUtils.writeDoubleMatrix( nodes, 6, GeffNode::getCovariance3d, writer, path + "/nodes/props/covariance3d/values", chunkSize ); + + if ( geffVersion.startsWith( "0.4" ) ) + { + // Write polygon slices and values if available + final List< GeffSerializableVertex > vertices = new ArrayList<>(); + final List< int[] > slices = new ArrayList<>(); + int polygonOffset = 0; + for ( final GeffNode node : nodes ) + { + if ( node.polygonX == null || node.polygonY == null ) + throw new IllegalArgumentException( "Polygon coordinates cannot be null" ); + if ( node.getPolygonX().length != node.getPolygonY().length ) + throw new IllegalArgumentException( "Polygon X and Y coordinates must have the same length" ); + final int numVertices = node.getPolygonX().length; + for ( int j = 0; j < numVertices; j++ ) + vertices.add( new GeffSerializableVertex( + node.getPolygonX()[ j ], + node.getPolygonY()[ j ] ) ); + slices.add( new int[] { polygonOffset, numVertices } ); + polygonOffset += numVertices; + } + GeffUtils.writeIntMatrix( slices, 2, Function.identity(), writer, path + "/nodes/serialized_props/polygon/slices", chunkSize ); + GeffUtils.writeDoubleMatrix( vertices, 2, GeffSerializableVertex::getCoordinates, writer, path + "/nodes/serialized_props/polygon/values", chunkSize ); + } + + LOG.debug( "Successfully wrote nodes to Zarr format with chunked structure" ); + } + + @Override + public String toString() + { + StringBuilder sb = new StringBuilder( "GeffNode{" ) + .append( "id=" ).append( id ) + .append( ", t=" ).append( t ) + .append( ", x=" ).append( String.format( "%.2f", x ) ) + .append( ", y=" ).append( String.format( "%.2f", y ) ) + .append( ", z=" ).append( String.format( "%.2f", z ) ) + .append( color != null ? ", color=" + java.util.Arrays.toString( color ) : "" ) + .append( ", segId=" ).append( segmentId ) + .append( "radius=" ).append( String.format( "%.2f", radius ) ) + .append( covariance2d != null ? ", covariance2d=" + java.util.Arrays.toString( covariance2d ) : "" ) + .append( covariance3d != null ? ", covariance3d=" + java.util.Arrays.toString( covariance3d ) : "" ) + .append( "}" ); + return sb.toString(); + } + + @Override + public boolean equals( Object obj ) + { + if ( this == obj ) + return true; + if ( obj == null || getClass() != obj.getClass() ) + return false; + + GeffNode geffNode = ( GeffNode ) obj; + return id == geffNode.id && + t == geffNode.t && + Double.compare( geffNode.x, x ) == 0 && + Double.compare( geffNode.y, y ) == 0 && + Double.compare( geffNode.z, z ) == 0 && + java.util.Arrays.equals( color, geffNode.color ) && + segmentId == geffNode.segmentId && + Double.compare( geffNode.radius, radius ) == 0 && + java.util.Arrays.equals( covariance2d, geffNode.covariance2d ) && + java.util.Arrays.equals( covariance3d, geffNode.covariance3d ) && + java.util.Arrays.equals( polygonX, geffNode.polygonX ) && + java.util.Arrays.equals( polygonY, geffNode.polygonY ); + } + + @Override + public int hashCode() + { + int result = id; + result = 31 * result + t; + result = 31 * result + Double.hashCode( x ); + result = 31 * result + Double.hashCode( y ); + result = 31 * result + Double.hashCode( z ); + result = 31 * result + Arrays.hashCode( color ); + result = 31 * result + segmentId; + result = 31 * result + Double.hashCode( radius ); + result = 31 * result + Arrays.hashCode( covariance2d ); + result = 31 * result + Arrays.hashCode( covariance3d ); + return result; + } } diff --git a/src/main/java/org/mastodon/geff/GeffUtils.java b/src/main/java/org/mastodon/geff/GeffUtils.java new file mode 100644 index 0000000..daed636 --- /dev/null +++ b/src/main/java/org/mastodon/geff/GeffUtils.java @@ -0,0 +1,510 @@ +package org.mastodon.geff; + +import java.util.Arrays; +import java.util.List; +import java.util.function.Function; +import java.util.function.IntFunction; +import java.util.function.ToDoubleFunction; +import java.util.function.ToIntFunction; + +import org.janelia.saalfeldlab.n5.DataBlock; +import org.janelia.saalfeldlab.n5.DataType; +import org.janelia.saalfeldlab.n5.DatasetAttributes; +import org.janelia.saalfeldlab.n5.N5Exception; +import org.janelia.saalfeldlab.n5.N5Reader; +import org.janelia.saalfeldlab.n5.N5Writer; +import org.janelia.saalfeldlab.n5.blosc.BloscCompression; +import org.janelia.saalfeldlab.n5.zarr.N5ZarrReader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import net.imglib2.FinalInterval; +import net.imglib2.Interval; +import net.imglib2.blocks.BlockInterval; +import net.imglib2.blocks.SubArrayCopy; +import net.imglib2.img.cell.CellGrid; +import net.imglib2.iterator.IntervalIterator; +import net.imglib2.util.Intervals; +import net.imglib2.util.Util; + +public class GeffUtils +{ + private static final Logger LOG = LoggerFactory.getLogger( GeffUtils.class ); + + public static void checkSupportedVersion( final String version ) throws IllegalArgumentException + { + if ( !( version.startsWith( "0.2" ) || version.startsWith( "0.3" ) || version.startsWith( "0.4" ) ) ) + { + throw new IllegalArgumentException( "geff_version " + version + " not supported." ); + } + } + + // Default chunk size if not specified + public static final int DEFAULT_CHUNK_SIZE = 1000; + + public static int getChunkSize( final String zarrPath ) + { + try ( final N5ZarrReader reader = new N5ZarrReader( zarrPath, true ) ) + { + final int[] chunkSize = reader.getDatasetAttributes( "/nodes/ids" ).getBlockSize(); + return chunkSize[ 0 ]; + } + catch ( final N5Exception.N5IOException e ) + { + // If the path doesn't exist, return a default chunk size + System.out.println( "Path doesn't exist, using default chunk size: " + e.getMessage() ); + return DEFAULT_CHUNK_SIZE; // Default chunk size + } + } + + public static < T > void writeIntArray( + final List< T > elements, + final ToIntFunction< T > extractor, + final N5Writer writer, + final String dataset, + final int chunkSize ) + { + final int size = elements.size(); + final int[] data = new int[ size ]; + Arrays.setAll(data, i -> extractor.applyAsInt(elements.get(i))); + final DatasetAttributes attributes = new DatasetAttributes( + new long[] { size }, + new int[] { chunkSize }, + DataType.INT32, + new BloscCompression() ); + writer.createDataset(dataset, attributes); + write( data, writer, dataset, attributes ); + } + + public static < T > void writeIntMatrix( + final List< T > elements, + final int numColumns, + final Function< T, int[] > extractor, + final N5Writer writer, + final String dataset, + final int chunkSize ) + { + writeIntMatrix( elements.size(), numColumns, + i -> extractor.apply( elements.get( i ) ), + writer, dataset, chunkSize ); + } + + /** + * @param extractor function from row index to int[] with column data + */ + public static void writeIntMatrix( + final int numRows, + final int numColumns, + final IntFunction< int[] > extractor, + final N5Writer writer, + final String dataset, + final int chunkSize ) + { + final int[] data = new int[ numColumns * numRows ]; + for ( int i = 0; i < numRows; ++i ) { + final int[] row = extractor.apply( i ); + if ( row == null || row.length < numColumns ) + continue; + System.arraycopy( row, 0, data, numColumns * i, numColumns ); + } + final DatasetAttributes attributes = new DatasetAttributes( + new long[] { numColumns, numRows }, + new int[] { numColumns, chunkSize }, + DataType.INT32, + new BloscCompression() ); + writer.createDataset(dataset, attributes); + write( data, writer, dataset, attributes ); + } + + public static < T > void writeDoubleArray( + final List< T > elements, + final ToDoubleFunction< T > extractor, + final N5Writer writer, + final String dataset, + final int chunkSize ) + { + final int size = elements.size(); + final double[] data = new double[ size ]; + Arrays.setAll(data, i -> extractor.applyAsDouble(elements.get(i))); + final DatasetAttributes attributes = new DatasetAttributes( + new long[] { size }, + new int[] { chunkSize }, + DataType.FLOAT64, + new BloscCompression() ); + writer.createDataset(dataset, attributes); + write( data, writer, dataset, attributes ); + } + + public static < T > void writeDoubleMatrix( + final List< T > elements, + final int numColumns, + final Function< T, double[] > extractor, + final N5Writer writer, + final String dataset, + final int chunkSize ) + { + final int size = elements.size(); + final double[] data = new double[ numColumns * size ]; + for ( int i = 0; i < size; ++i ) { + final double[] row = extractor.apply( elements.get( i ) ); + if ( row == null || row.length < numColumns ) + continue; + System.arraycopy( row, 0, data, numColumns * i, numColumns ); + } + final DatasetAttributes attributes = new DatasetAttributes( + new long[] { numColumns, size }, + new int[] { numColumns, chunkSize }, + DataType.FLOAT64, + new BloscCompression() ); + writer.createDataset(dataset, attributes); + write( data, writer, dataset, attributes ); + } + + public static int[] readAsIntArray( final N5Reader reader, final String dataset, final String description ) + { + if ( !reader.datasetExists( dataset ) ) + { + LOG.debug( "No arrays found in group for " + description ); + return null; + } + if ( reader.getDatasetAttributes( dataset ).getNumDimensions() != 1 ) + { + throw new IllegalArgumentException( "Expected 1D array" ); + } + return convertToIntArray( readFully( reader, dataset ), description ); + } + + public static double[] readAsDoubleArray( final N5Reader reader, final String dataset, final String description ) + { + if ( !reader.datasetExists( dataset ) ) + { + LOG.debug( "No arrays found in group for " + description ); + return null; + } + if ( reader.getDatasetAttributes( dataset ).getNumDimensions() != 1 ) + { + throw new IllegalArgumentException( "Expected 1D array" ); + } + return convertToDoubleArray( readFully( reader, dataset ), description ); + } + + public static class FlattenedDoubles + { + private final double[] data; + + private final int[] size; + + FlattenedDoubles( final double[] data, final int[] size ) + { + this.data = data; + this.size = size; + } + + FlattenedDoubles( final double[] data, final long[] size ) + { + this( data, Util.long2int( size ) ); + } + + int[] size() + { + return size; + } + + double at( final int i0, final int i1 ) + { + assert size.length == 2; + return data[ i0 + size[ 0 ] * i1 ]; + } + + // TODO: remove until needed + double at( final int i0, final int i1, final int i2 ) + { + assert size.length == 3; + return data[ i0 + size[ 0 ] * ( i1 * i2 * size[ 1 ] ) ]; + } + + double[] rowAt( final int i1 ) + { + assert size.length == 2; + final double[] row = new double[ size[ 0 ] ]; + Arrays.setAll( row, i0 -> at( i0, i1 ) ); + return row; + } + } + + public static FlattenedDoubles readAsDoubleMatrix( final N5Reader reader, final String dataset, final String description ) + { + if ( !reader.datasetExists( dataset ) ) + { + LOG.debug( "No arrays found in group for " + description ); + return null; + } + final DatasetAttributes attributes = reader.getDatasetAttributes( dataset ); + if ( attributes.getNumDimensions() != 2 ) + { + throw new IllegalArgumentException( "Expected 2D array" ); + } + return new FlattenedDoubles( convertToDoubleArray( readFully( reader, dataset ), description ), attributes.getDimensions() ); + } + + public static class FlattenedInts + { + private final int[] data; + + private final int[] size; + + FlattenedInts( final int[] data, final int[] size ) + { + this.data = data; + this.size = size; + } + + FlattenedInts( final int[] data, final long[] size ) + { + this( data, Util.long2int( size ) ); + } + + int[] size() + { + return size; + } + + int at( final int i0, final int i1 ) + { + assert size.length == 2; + return data[ i0 + size[ 0 ] * i1 ]; + } + + int[] rowAt( final int i0 ) + { + assert size.length == 2; + final int[] row = new int[ size[ 1 ] ]; + Arrays.setAll( row, i1 -> at( i0, i1 ) ); + return row; + } + } + + public static FlattenedInts readAsIntMatrix( final N5Reader reader, final String dataset, final String description ) + { + if ( !reader.datasetExists( dataset ) ) + { + LOG.debug( "No arrays found in group for " + description ); + return null; + } + final DatasetAttributes attributes = reader.getDatasetAttributes( dataset ); + if ( attributes.getNumDimensions() != 2 ) + { + throw new IllegalArgumentException( "Expected 2D array" ); + } + return new FlattenedInts( convertToIntArray( readFully( reader, dataset ), description ), attributes.getDimensions() ); + } + + public static int[] convertToIntArray( final Object array, final String fieldName ) + { + if (array == null) + return null; + else if ( array instanceof int[] ) + return ( int[] ) array; + else if ( array instanceof long[] ) + return copyToIntArray( ( long[] ) array, a -> a.length, ( a, i ) -> ( int ) a[ i ] ); + else if ( array instanceof double[] ) + return copyToIntArray( ( double[] ) array, a -> a.length, ( a, i ) -> ( int ) a[ i ] ); + else if ( array instanceof float[] ) + return copyToIntArray( ( float[] ) array, a -> a.length, ( a, i ) -> ( int ) a[ i ] ); + else + throw new IllegalArgumentException( + "Unsupported data type for " + fieldName + ": " + + ( array != null ? array.getClass().getName() : "null" ) ); + } + + @FunctionalInterface + private interface IntValueAtIndex< T > + { + int apply( T array, int index ); + } + + private static < T > int[] copyToIntArray( final T array, final ToIntFunction< T > numElements, final IntValueAtIndex< T > elementAtIndex ) + { + final int[] ints = new int[ numElements.applyAsInt( array ) ]; + Arrays.setAll( ints, i -> elementAtIndex.apply( array, i ) ); + return ints; + } + + public static double[] convertToDoubleArray( final Object array, final String fieldName ) + { + if (array == null) + return null; + else if ( array instanceof double[] ) + return ( double[] ) array; + else if ( array instanceof int[] ) + return copyToDoubleArray( ( int[] ) array, a -> a.length, ( a, i ) -> a[ i ] ); + else if ( array instanceof long[] ) + return copyToDoubleArray( ( long[] ) array, a -> a.length, ( a, i ) -> a[ i ] ); + else if ( array instanceof float[] ) + return copyToDoubleArray( ( float[] ) array, a -> a.length, ( a, i ) -> a[ i ] ); + else + throw new IllegalArgumentException( + "Unsupported data type for " + fieldName + ": " + + ( array != null ? array.getClass().getName() : "null" ) ); + } + + @FunctionalInterface + private interface DoubleValueAtIndex< T > + { + double apply( T array, int index ); + } + + private static < T > double[] copyToDoubleArray( final T array, final ToIntFunction< T > numElements, final DoubleValueAtIndex< T > elementAtIndex ) + { + final double[] doubles = new double[ numElements.applyAsInt( array ) ]; + Arrays.setAll( doubles, i -> elementAtIndex.apply( array, i ) ); + return doubles; + } + + public static void verifyLength( final int[] array, final int expectedLength, final String name ) + { + if ( array != null && array.length != expectedLength ) + throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.length + " vs " + expectedLength + ")" ); + } + + public static void verifyLength( final double[] array, final int expectedLength, final String name ) + { + if ( array != null && array.length != expectedLength ) + throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.length + " vs " + expectedLength + ")" ); + } + + public static void verifyLength( final FlattenedDoubles array, final int expectedLength, final String name ) + { + if ( array != null && array.size()[ array.size().length - 1 ] != expectedLength ) + { + throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.size()[ array.size().length - 1 ] + " vs " + expectedLength + ")" ); + } + } + + public static void verifyLength( final FlattenedInts array, final int expectedLength, final String name ) + { + if ( array != null && array.size()[ array.size().length - 1 ] != expectedLength ) + { + throw new IllegalArgumentException( "property " + name + " does not have expected length (" + array.size()[ array.size().length - 1 ] + " vs " + expectedLength + ")" ); + } + } + + + // -- write dataset fully -- + + public static void write( + final Object src, + final N5Writer writer, + final String dataset, + final DatasetAttributes attributes ) + { + final int[] blockSize = attributes.getBlockSize(); + final long[] size = attributes.getDimensions(); + final int n = attributes.getNumDimensions(); + final DataType dataType = attributes.getDataType(); + + final CellGrid grid = new CellGrid( size, blockSize ); + + final int[] srcSize = Util.long2int( size ); + final long[] srcPos = new long[ n ]; + final int[] destSize = new int[ n ]; + final int[] destPos = new int[ n ]; + + final long[] gridPos = new long[ n ]; + final IntervalIterator gridIter = new IntervalIterator( grid.getGridDimensions() ); + while ( gridIter.hasNext() ) + { + gridIter.fwd(); + gridIter.localize( gridPos ); + grid.getCellDimensions( gridPos, srcPos, destSize ); + final DataBlock< ? > block = dataType.createDataBlock( destSize, gridPos ); + SubArrayCopy.copy( src, srcSize, Util.long2int( srcPos ), block.getData(), destSize, destPos, destSize ); + writer.writeBlock( dataset, attributes, block ); + } + } + + + // -- read dataset fully -- + + public static Object readFully( final N5Reader reader, final String dataset ) + { + final DatasetAttributes attributes = reader.getDatasetAttributes( dataset ); + final DataType dataType = attributes.getDataType(); + final int numElements = Util.safeInt( Intervals.numElements( attributes.getDimensions() ) ); + final Object dest = createArray( dataType ).apply( numElements ); + copy( dest, new FinalInterval( attributes.getDimensions() ), reader, dataset ); + return dest; + } + + private static IntFunction< ? > createArray( final DataType dataType ) + { + switch ( dataType ) + { + case INT8: + case UINT8: + return byte[]::new; + case INT16: + case UINT16: + return short[]::new; + case INT32: + case UINT32: + return int[]::new; + case INT64: + case UINT64: + return long[]::new; + case FLOAT32: + return float[]::new; + case FLOAT64: + return double[]::new; + case STRING: + return String[]::new; + case OBJECT: + return Object[]::new; + } + throw new IllegalArgumentException(); + } + + private static void copy( + final Object dest, + final Interval destInterval, + final N5Reader reader, + final String dataset ) + { + final DatasetAttributes attributes = reader.getDatasetAttributes( dataset ); + final int[] blockSize = attributes.getBlockSize(); + final int n = attributes.getNumDimensions(); + + final long[] gridMin = new long[ n ]; + final long[] gridSize = new long[ n ]; + for ( int d = 0; d < n; d++ ) + { + gridMin[ d ] = destInterval.min( d ) / blockSize[ d ]; + final long gridMax = destInterval.max( d ) / blockSize[ d ]; + gridSize[ d ] = gridMax + 1 - gridMin[ d ]; + } + + final long[] gridPos = new long[ n ]; + final long[] blockMin = new long[ n ]; + final int[] srcPos = new int[ n ]; + final int[] destSize = Util.long2int( destInterval.dimensionsAsLongArray() ); + final int[] destPos = new int[ n ]; + final IntervalIterator gridIter = new IntervalIterator( gridSize ); + while ( gridIter.hasNext() ) + { + gridIter.fwd(); + gridIter.localize( gridPos ); + Arrays.setAll( blockMin, d -> gridPos[ d ] * blockSize[ d ] ); + final DataBlock< ? > block = reader.readBlock( dataset, attributes, gridPos ); + final BlockInterval blockInterval = BlockInterval.wrap( blockMin, block.getSize() ); + final FinalInterval intersection = Intervals.intersect( blockInterval, destInterval ); + Arrays.setAll( srcPos, d -> ( int ) ( intersection.min( d ) - blockMin[ d ] ) ); + Arrays.setAll( destPos, d -> ( int ) ( intersection.min( d ) - destInterval.min( d ) ) ); + SubArrayCopy.copy( block.getData(), blockInterval.size(), srcPos, dest, destSize, destPos, Util.long2int( intersection.dimensionsAsLongArray() ) ); + } + } + + private GeffUtils() + { + // static utility methods. don't instantiate. + } +} diff --git a/src/main/java/org/mastodon/geff/ZarrEntity.java b/src/main/java/org/mastodon/geff/ZarrEntity.java deleted file mode 100644 index cd8b29d..0000000 --- a/src/main/java/org/mastodon/geff/ZarrEntity.java +++ /dev/null @@ -1,34 +0,0 @@ -/*- - * #%L - * geff-java - * %% - * Copyright (C) 2025 Ko Sugawara - * %% - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * #L% - */ -package org.mastodon.geff; - -public interface ZarrEntity -{ - -} diff --git a/src/main/java/org/mastodon/geff/ZarrUtils.java b/src/main/java/org/mastodon/geff/ZarrUtils.java deleted file mode 100644 index 1f50c25..0000000 --- a/src/main/java/org/mastodon/geff/ZarrUtils.java +++ /dev/null @@ -1,806 +0,0 @@ -/*- - * #%L - * geff-java - * %% - * Copyright (C) 2025 Ko Sugawara - * %% - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * #L% - */ -package org.mastodon.geff; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.function.ToIntFunction; - -import org.mastodon.geff.function.ToDoubleArrayFunction; -import org.mastodon.geff.function.ToIntArrayFunction; - -import com.bc.zarr.ArrayParams; -import com.bc.zarr.DataType; -import com.bc.zarr.ZarrArray; -import com.bc.zarr.ZarrGroup; - -import ucar.ma2.InvalidRangeException; - -public class ZarrUtils -{ - - public static final int DEFAULT_CHUNK_SIZE = 1000; // Default chunk size if - // not specified - - public static ZarrGroup openSubGroups( ZarrGroup parentGroup, String subGroupName ) throws IOException - { - ZarrGroup subGroup = null; - for ( final String groupName : subGroupName.split( "/" ) ) - { - if ( subGroup == null ) - { - subGroup = parentGroup.openSubGroup( groupName ); - } - else - { - subGroup = subGroup.openSubGroup( groupName ); - } - } - return subGroup; - } - - public static ZarrGroup createSubGroups( ZarrGroup parentGroup, String subGroupName ) throws IOException - { - ZarrGroup subGroup = null; - for ( final String groupName : subGroupName.split( "/" ) ) - { - if ( subGroup == null ) - { - subGroup = parentGroup.createSubGroup( groupName ); - } - else - { - subGroup = subGroup.createSubGroup( groupName ); - } - } - return subGroup; - } - - /** - * Helper method to read chunked int arrays - */ - public static int[] readChunkedIntArray( final ZarrGroup group, final String arrayPath, final String description ) - throws IOException - { - if ( group.getArrayKeys() == null || group.getArrayKeys().isEmpty() ) - { - System.out.println( "No arrays found in group for " + description ); - return new int[ 0 ]; // Return empty array if no arrays found - } - try - { - // First try reading as a whole array - final ZarrArray array = group.openArray( arrayPath ); - final Object data = array.read(); - return convertToIntArray( data, description ); - } - catch ( final Exception e ) - { - - // Try reading individual chunks if whole array reading fails - final List< Integer > allData = new ArrayList<>(); - - // Look for numeric chunk keys (0, 1, 2, etc.) - final ZarrGroup arrayGroup = openSubGroups( group, arrayPath ); - - final String[] chunkKeys = arrayGroup.getArrayKeys().toArray( new String[ 0 ] ); - - for ( final String chunkKey : chunkKeys ) - { - try - { - if ( chunkKey.matches( "\\d+(\\.\\d+)?" ) ) - { // numeric chunk key - final ZarrArray chunkArray = arrayGroup.openArray( chunkKey ); - final Object chunkData = chunkArray.read(); - final int[] chunkValues = convertToIntArray( chunkData, description + " chunk " + chunkKey ); - for ( final int value : chunkValues ) - { - allData.add( value ); - } - System.out - .println( "Read chunk " + chunkKey + " with " + chunkValues.length + " " + description ); - } - } - catch ( final Exception chunkException ) - { - System.err.println( "Could not read chunk " + chunkKey + " for " + description + ": " - + chunkException.getMessage() ); - } - } - - return allData.stream().mapToInt( Integer::intValue ).toArray(); - } - } - - /** - * Helper method to read chunked double arrays - */ - public static double[] readChunkedDoubleArray( final ZarrGroup group, final String arrayPath, final String description ) - throws IOException - { - if ( group.getArrayKeys() == null || group.getArrayKeys().isEmpty() ) - { - System.out.println( "No arrays found in group for " + description ); - return new double[ 0 ]; // Return empty array if no arrays found - } - try - { - // First try reading as a whole array - final ZarrArray array = group.openArray( arrayPath ); - final Object data = array.read(); - return convertToDoubleArray( data, description ); - } - catch ( final Exception e ) - { - - // Try reading individual chunks if whole array reading fails - final List< Double > allData = new ArrayList<>(); - - // Look for numeric chunk keys (0, 1, 2, etc.) - final ZarrGroup arrayGroup = openSubGroups( group, arrayPath ); - - final String[] chunkKeys = arrayGroup.getArrayKeys().toArray( new String[ 0 ] ); - - for ( final String chunkKey : chunkKeys ) - { - try - { - if ( chunkKey.matches( "\\d+(\\.\\d+)?" ) ) - { // numeric chunk key - final ZarrArray chunkArray = arrayGroup.openArray( chunkKey ); - final Object chunkData = chunkArray.read(); - final double[] chunkValues = convertToDoubleArray( chunkData, description + " chunk " + chunkKey ); - for ( final double value : chunkValues ) - { - allData.add( value ); - } - System.out - .println( "Read chunk " + chunkKey + " with " + chunkValues.length + " " + description ); - } - } - catch ( final Exception chunkException ) - { - System.err.println( "Could not read chunk " + chunkKey + " for " + description + ": " - + chunkException.getMessage() ); - } - } - - return allData.stream().mapToDouble( Double::doubleValue ).toArray(); - } - } - - /** - * Helper method to read chunked integer matrix - */ - public static int[][] readChunkedIntMatrix( final ZarrGroup group, final String arrayPath, final String description ) - throws IOException - { - if ( group.getArrayKeys() == null || group.getArrayKeys().isEmpty() ) - { - System.out.println( "No arrays found in group for " + description ); - return new int[ 0 ][]; // Return empty matrix if no arrays found - } - try - { - // First try reading as a whole array - final ZarrArray array = group.openArray( arrayPath ); - final Object data = array.read(); - return copyToIntMatrix( data, description, array.getShape() ); - } - catch ( final Exception e ) - { - - // Try reading individual chunks if whole array reading fails - final List< int[] > allData = new ArrayList<>(); - - // Look for numeric chunk keys (0, 1, 2, etc.) - final ZarrGroup arrayGroup = openSubGroups( group, arrayPath ); - - final String[] chunkKeys = arrayGroup.getArrayKeys().toArray( new String[ 0 ] ); - - for ( final String chunkKey : chunkKeys ) - { - try - { - if ( chunkKey.matches( "\\d+(\\.\\d+)?" ) ) - { // numeric chunk key - final ZarrArray chunkArray = arrayGroup.openArray( chunkKey ); - final Object chunkData = chunkArray.read(); - final int[][] chunkMatrix = copyToIntMatrix( chunkData, description, chunkArray.getShape() ); - for ( final int[] row : chunkMatrix ) - { - allData.add( row ); - } - System.out.println( - "Read " + description + " chunk " + chunkKey + " with " + chunkMatrix.length ); - } - } - catch ( final Exception chunkException ) - { - System.err - .println( "Could not read " + description + " chunk " + chunkKey + ": " - + chunkException.getMessage() ); - } - } - - return allData.toArray( new int[ 0 ][] ); - } - } - - /** - * Helper method to read chunked double matrix - */ - public static double[][] readChunkedDoubleMatrix( final ZarrGroup group, final String arrayPath, final String description ) - throws IOException - { - if ( group.getArrayKeys() == null || group.getArrayKeys().isEmpty() ) - { - System.out.println( "No arrays found in group for " + description ); - return new double[ 0 ][]; // Return empty matrix if no arrays found - } - try - { - // First try reading as a whole array - final ZarrArray array = group.openArray( arrayPath ); - final Object data = array.read(); - return copyToDoubleMatrix( data, description, array.getShape() ); - } - catch ( final Exception e ) - { - - // Try reading individual chunks if whole array reading fails - final List< double[] > allData = new ArrayList<>(); - - // Look for numeric chunk keys (0, 1, 2, etc.) - final ZarrGroup arrayGroup = openSubGroups( group, arrayPath ); - - final String[] chunkKeys = arrayGroup.getArrayKeys().toArray( new String[ 0 ] ); - - for ( final String chunkKey : chunkKeys ) - { - try - { - if ( chunkKey.matches( "\\d+(\\.\\d+)?" ) ) - { // numeric chunk key - final ZarrArray chunkArray = arrayGroup.openArray( chunkKey ); - final Object chunkData = chunkArray.read(); - final double[][] chunkMatrix = copyToDoubleMatrix( chunkData, description, chunkArray.getShape() ); - for ( final double[] row : chunkMatrix ) - { - allData.add( row ); - } - System.out.println( - "Read " + description + " chunk " + chunkKey + " with " + chunkMatrix.length ); - } - } - catch ( final Exception chunkException ) - { - System.err - .println( "Could not read " + description + " chunk " + chunkKey + ": " - + chunkException.getMessage() ); - } - } - - return allData.toArray( new double[ 0 ][] ); - } - } - - public static int getChunkSize( final String zarrPath ) throws IOException, InvalidRangeException - { - try - { - final ZarrGroup group = ZarrGroup.open( zarrPath + "/nodes" ); - return group.openArray( "ids" ).getChunks()[ 0 ]; - } - catch ( final IOException e ) - { - // If the path doesn't exist, return a default chunk size - System.out.println( "Path doesn't exist, using default chunk size: " + e.getMessage() ); - return DEFAULT_CHUNK_SIZE; // Default chunk size - } - } - - /** - * Helper method to write chunked int attributes - */ - public static < T extends ZarrEntity > void writeChunkedIntAttribute( final List< T > nodes, final ZarrGroup attrsGroup, - final String subGroupName, - final int chunkSize, final ToIntFunction< T > extractor ) - throws IOException, InvalidRangeException - { - - final int totalNodes = nodes.size(); - - // Create the attribute subgroup - final ZarrGroup valuesGroup = createSubGroups( attrsGroup, subGroupName ); - - // Create a single ZarrArray for all values with proper chunking - final ZarrArray valuesArray = valuesGroup.createArray( "", new ArrayParams() - .shape( totalNodes ) - .chunks( chunkSize ) - .dataType( DataType.i4 ) ); - - // Write data in chunks - int chunkIndex = 0; - for ( int startIdx = 0; startIdx < totalNodes; startIdx += chunkSize ) - { - final int endIdx = Math.min( startIdx + chunkSize, totalNodes ); - final int currentChunkSize = endIdx - startIdx; - - // Prepare chunk data array - final int[] chunkData = new int[ currentChunkSize ]; - - // Fill chunk data array - for ( int i = 0; i < currentChunkSize; i++ ) - { - chunkData[ i ] = extractor.applyAsInt( nodes.get( startIdx + i ) ); - } - - // Write chunk at specific offset - valuesArray.write( chunkData, new int[] { currentChunkSize }, new int[] { startIdx } ); - - System.out.println( "- Wrote " + subGroupName + " chunk " + chunkIndex + ": " + currentChunkSize + " values" ); - chunkIndex++; - } - } - - /** - * Helper method to write chunked double attributes - */ - public static < T extends ZarrEntity > void writeChunkedDoubleAttribute( final List< T > nodes, final ZarrGroup attrsGroup, - final String subGroupName, - final int chunkSize, final java.util.function.ToDoubleFunction< T > extractor ) - throws IOException, InvalidRangeException - { - - final int totalNodes = nodes.size(); - - // Create the attribute subgroup - final ZarrGroup valuesGroup = createSubGroups( attrsGroup, subGroupName ); - - // Create a single ZarrArray for all values with proper chunking - final ZarrArray valuesArray = valuesGroup.createArray( "", new ArrayParams() - .shape( totalNodes ) - .chunks( chunkSize ) - .dataType( DataType.f8 ) ); - - // Write data in chunks - int chunkIndex = 0; - for ( int startIdx = 0; startIdx < totalNodes; startIdx += chunkSize ) - { - final int endIdx = Math.min( startIdx + chunkSize, totalNodes ); - final int currentChunkSize = endIdx - startIdx; - - // Prepare chunk data array - final double[] chunkData = new double[ currentChunkSize ]; - - // Fill chunk data array - for ( int i = 0; i < currentChunkSize; i++ ) - { - chunkData[ i ] = extractor.applyAsDouble( nodes.get( startIdx + i ) ); - } - - // Write chunk at specific offset - valuesArray.write( chunkData, new int[] { currentChunkSize }, new int[] { startIdx } ); - - System.out.println( "- Wrote " + subGroupName + " chunk " + chunkIndex + ": " + currentChunkSize + " values" ); - chunkIndex++; - } - } - - /** - * Helper method to write chunked integer matrices - */ - public static < T extends ZarrEntity > void writeChunkedIntMatrix( final List< T > nodes, final ZarrGroup attrsGroup, - final String subGroupName, - final int chunkSize, final ToIntArrayFunction< T > extractor, final int numColumns ) - throws IOException, InvalidRangeException - { - final int totalNodes = nodes.size(); - - // Create the attribute subgroup - final ZarrGroup valuesGroup = createSubGroups( attrsGroup, subGroupName ); - - // Create a single ZarrArray for all data with proper chunking - final ZarrArray array2d = valuesGroup.createArray( "", new ArrayParams() - .shape( totalNodes, numColumns ) - .chunks( new int[] { chunkSize, numColumns } ) - .dataType( DataType.i8 ) ); - - // Write data in chunks - int chunkIndex = 0; - for ( int startIdx = 0; startIdx < totalNodes; startIdx += chunkSize ) - { - final int endIdx = Math.min( startIdx + chunkSize, totalNodes ); - final int currentChunkSize = endIdx - startIdx; - - // Prepare chunk data array - final int[] chunkData = new int[ currentChunkSize * numColumns ]; - - // Fill chunk data array - for ( int i = 0; i < currentChunkSize; i++ ) - { - final T node = nodes.get( startIdx + i ); - final int[] values = extractor.applyAsIntArray( node ); - if ( values != null && values.length == numColumns ) - { - for ( int j = 0; j < numColumns; j++ ) - { - chunkData[ i * numColumns + j ] = values[ j ]; - } - } - else - { - for ( int j = 0; j < numColumns; j++ ) - { - chunkData[ i * numColumns + j ] = 0; // Default to zero - // if not set - } - } - } - - // Write chunk at specific offset - array2d.write( chunkData, new int[] { currentChunkSize, numColumns }, - new int[] { startIdx, 0 } ); - - System.out.println( "- Wrote " + subGroupName + " chunk " + chunkIndex + ": " + currentChunkSize + " values" ); - chunkIndex++; - } - } - - /** - * Helper method to write chunked double matrices - */ - public static < T extends ZarrEntity > void writeChunkedDoubleMatrix( final List< T > nodes, final ZarrGroup attrsGroup, - final String subGroupName, - final int chunkSize, final ToDoubleArrayFunction< T > extractor, final int numColumns ) - throws IOException, InvalidRangeException - { - final int totalNodes = nodes.size(); - - // Create the attribute subgroup - final ZarrGroup valuesGroup = createSubGroups( attrsGroup, subGroupName ); - - // Create a single ZarrArray for all data with proper chunking - final ZarrArray array2d = valuesGroup.createArray( "", new ArrayParams() - .shape( totalNodes, numColumns ) - .chunks( new int[] { chunkSize, numColumns } ) - .dataType( DataType.f4 ) ); - - // Write data in chunks - int chunkIndex = 0; - for ( int startIdx = 0; startIdx < totalNodes; startIdx += chunkSize ) - { - final int endIdx = Math.min( startIdx + chunkSize, totalNodes ); - final int currentChunkSize = endIdx - startIdx; - - // Prepare chunk data array - final double[] chunkData = new double[ currentChunkSize * numColumns ]; - - // Fill chunk data array - for ( int i = 0; i < currentChunkSize; i++ ) - { - final T node = nodes.get( startIdx + i ); - final double[] values = extractor.applyAsDoubleArray( node ); - if ( values != null && values.length == numColumns ) - { - for ( int j = 0; j < numColumns; j++ ) - { - chunkData[ i * numColumns + j ] = values[ j ]; - } - } - else - { - for ( int j = 0; j < numColumns; j++ ) - { - chunkData[ i * numColumns + j ] = 0.0; // Default to - // zero if not - // set - } - } - } - - // Write chunk at specific offset - array2d.write( chunkData, new int[] { currentChunkSize, numColumns }, - new int[] { startIdx, 0 } ); - - System.out.println( "- Wrote " + subGroupName + " chunk " + chunkIndex + ": " + currentChunkSize + " values" ); - chunkIndex++; - } - } - - // Helper methods for type conversion - public static int[] convertToIntArray( final Object data, final String fieldName ) - { - if ( data instanceof int[] ) - { - return ( int[] ) data; - } - else if ( data instanceof long[] ) - { - final long[] longArray = ( long[] ) data; - final int[] intArray = new int[ longArray.length ]; - for ( int i = 0; i < longArray.length; i++ ) - { - intArray[ i ] = ( int ) longArray[ i ]; - } - return intArray; - } - else if ( data instanceof double[] ) - { - final double[] doubleArray = ( double[] ) data; - final int[] intArray = new int[ doubleArray.length ]; - for ( int i = 0; i < doubleArray.length; i++ ) - { - intArray[ i ] = ( int ) doubleArray[ i ]; - } - return intArray; - } - else if ( data instanceof float[] ) - { - final float[] floatArray = ( float[] ) data; - final int[] intArray = new int[ floatArray.length ]; - for ( int i = 0; i < floatArray.length; i++ ) - { - intArray[ i ] = ( int ) floatArray[ i ]; - } - return intArray; - } - else - { - throw new IllegalArgumentException( - "Unsupported data type for " + fieldName + ": " + - ( data != null ? data.getClass().getName() : "null" ) ); - } - } - - public static double[] convertToDoubleArray( final Object data, final String fieldName ) - { - if ( data instanceof double[] ) - { - return ( double[] ) data; - } - else if ( data instanceof float[] ) - { - final float[] floatArray = ( float[] ) data; - final double[] doubleArray = new double[ floatArray.length ]; - for ( int i = 0; i < floatArray.length; i++ ) - { - doubleArray[ i ] = floatArray[ i ]; - } - return doubleArray; - } - else if ( data instanceof int[] ) - { - final int[] intArray = ( int[] ) data; - final double[] doubleArray = new double[ intArray.length ]; - for ( int i = 0; i < intArray.length; i++ ) - { - doubleArray[ i ] = intArray[ i ]; - } - return doubleArray; - } - else if ( data instanceof long[] ) - { - final long[] longArray = ( long[] ) data; - final double[] doubleArray = new double[ longArray.length ]; - for ( int i = 0; i < longArray.length; i++ ) - { - doubleArray[ i ] = longArray[ i ]; - } - return doubleArray; - } - else - { - throw new IllegalArgumentException( - "Unsupported data type for " + fieldName + ": " + - ( data != null ? data.getClass().getName() : "null" ) ); - } - } - - public static int[][] copyToIntMatrix( final Object data, final String description, final int[] shape ) - { - if ( shape.length != 2 ) - throw new IllegalArgumentException( "Shape must have exactly 2 dimensions for a matrix, but had " + shape.length ); - - final int N = shape[ 0 ]; - final int nel = shape[ 1 ]; - if ( data.getClass().isArray() ) - { - final Class< ? > componentType = data.getClass().getComponentType(); - if ( componentType.isPrimitive() ) - { - if ( componentType == int.class ) - { - final int[] arr = ( int[] ) data; - final int[][] matrix = new int[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = arr[ i * nel + j ]; - - return matrix; - } - else if ( componentType == byte.class ) - { - final byte[] byteArray = ( byte[] ) data; - final int[][] matrix = new int[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = byteArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == short.class ) - { - final short[] shortArray = ( short[] ) data; - final int[][] matrix = new int[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = shortArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == long.class ) - { - final long[] longArray = ( long[] ) data; - final int[][] matrix = new int[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = ( int ) longArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == float.class ) - { - final float[] floatArray = ( float[] ) data; - final int[][] matrix = new int[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = ( int ) floatArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == double.class ) - { - final double[] doubleArray = ( double[] ) data; - final int[][] matrix = new int[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = ( int ) doubleArray[ i * nel + j ]; - - return matrix; - } - else - { - throw new IllegalArgumentException( - "Unsupported primitive type for " + description + ": " + componentType.getName() ); - } - } - else - { - throw new IllegalArgumentException( "The array is not of a primitive type." ); - } - } - else - { - throw new IllegalArgumentException( "The object is not an array." ); - } - } - - public static double[][] copyToDoubleMatrix( final Object data, final String description, final int[] shape ) - { - if ( shape.length != 2 ) - throw new IllegalArgumentException( "Shape must have exactly 2 dimensions for a matrix, but had " + shape.length ); - - final int N = shape[ 0 ]; - final int nel = shape[ 1 ]; - if ( data.getClass().isArray() ) - { - final Class< ? > componentType = data.getClass().getComponentType(); - if ( componentType.isPrimitive() ) - { - if ( componentType == int.class ) - { - final int[] arr = ( int[] ) data; - final double[][] matrix = new double[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = arr[ i * nel + j ]; - - return matrix; - } - else if ( componentType == byte.class ) - { - final byte[] byteArray = ( byte[] ) data; - final double[][] matrix = new double[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = byteArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == short.class ) - { - final short[] shortArray = ( short[] ) data; - final double[][] matrix = new double[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = shortArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == long.class ) - { - final long[] longArray = ( long[] ) data; - final double[][] matrix = new double[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = longArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == float.class ) - { - final float[] floatArray = ( float[] ) data; - final double[][] matrix = new double[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = floatArray[ i * nel + j ]; - - return matrix; - } - else if ( componentType == double.class ) - { - final double[] doubleArray = ( double[] ) data; - final double[][] matrix = new double[ N ][ nel ]; - for ( int i = 0; i < N; i++ ) - for ( int j = 0; j < nel; j++ ) - matrix[ i ][ j ] = doubleArray[ i * nel + j ]; - - return matrix; - } - else - { - throw new IllegalArgumentException( - "Unsupported primitive type for " + description + ": " + componentType.getName() ); - } - } - else - { - throw new IllegalArgumentException( "The array is not of a primitive type." ); - } - } - else - { - throw new IllegalArgumentException( "The object is not an array." ); - } - } -} diff --git a/src/main/java/org/mastodon/geff/function/ToDoubleArrayFunction.java b/src/main/java/org/mastodon/geff/function/ToDoubleArrayFunction.java deleted file mode 100644 index 59a92c2..0000000 --- a/src/main/java/org/mastodon/geff/function/ToDoubleArrayFunction.java +++ /dev/null @@ -1,59 +0,0 @@ -/*- - * #%L - * geff-java - * %% - * Copyright (C) 2025 Ko Sugawara - * %% - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * #L% - */ -package org.mastodon.geff.function; - -import java.util.function.Function; - -/** - * Represents a function that produces a double-array result. This is the - * {@code double} array-producing primitive specialization for {@link Function}. - * - *

- * This is a functional interface whose - * functional method is {@link #applyAsDoubleArray(Object)}. - * - * @param - * the type of the input to the function - * - * @see Function - * @since 1.8 - */ -@FunctionalInterface -public interface ToDoubleArrayFunction< T > -{ - - /** - * Applies this function to the given argument. - * - * @param value - * the function argument - * @return the function result - */ - double[] applyAsDoubleArray( T value ); -} diff --git a/src/main/java/org/mastodon/geff/function/ToIntArrayFunction.java b/src/main/java/org/mastodon/geff/function/ToIntArrayFunction.java deleted file mode 100644 index 13b138c..0000000 --- a/src/main/java/org/mastodon/geff/function/ToIntArrayFunction.java +++ /dev/null @@ -1,59 +0,0 @@ -/*- - * #%L - * geff-java - * %% - * Copyright (C) 2025 Ko Sugawara - * %% - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, - * this list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the documentation - * and/or other materials provided with the distribution. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" - * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE - * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE - * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE - * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR - * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF - * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS - * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN - * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) - * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE - * POSSIBILITY OF SUCH DAMAGE. - * #L% - */ -package org.mastodon.geff.function; - -import java.util.function.Function; - -/** - * Represents a function that produces a int-array result. This is the - * {@code int} array-producing primitive specialization for {@link Function}. - * - *

- * This is a functional interface whose - * functional method is {@link #applyAsIntArray(Object)}. - * - * @param - * the type of the input to the function - * - * @see Function - * @since 1.8 - */ -@FunctionalInterface -public interface ToIntArrayFunction< T > -{ - - /** - * Applies this function to the given argument. - * - * @param value - * the function argument - * @return the function result - */ - int[] applyAsIntArray( T value ); -} diff --git a/src/main/java/org/mastodon/geff/geom/GeffSerializableVertex.java b/src/main/java/org/mastodon/geff/geom/GeffSerializableVertex.java index aa03bc0..7df9625 100644 --- a/src/main/java/org/mastodon/geff/geom/GeffSerializableVertex.java +++ b/src/main/java/org/mastodon/geff/geom/GeffSerializableVertex.java @@ -1,8 +1,6 @@ package org.mastodon.geff.geom; -import org.mastodon.geff.ZarrEntity; - -public class GeffSerializableVertex implements ZarrEntity +public class GeffSerializableVertex { final double x; @@ -18,5 +16,4 @@ public double[] getCoordinates() { return new double[] { x, y }; } - } diff --git a/src/test/java/org/mastodon/geff/ChunkedWriteTest.java b/src/test/java/org/mastodon/geff/ChunkedWriteTest.java index 65dda3e..f996643 100644 --- a/src/test/java/org/mastodon/geff/ChunkedWriteTest.java +++ b/src/test/java/org/mastodon/geff/ChunkedWriteTest.java @@ -6,13 +6,13 @@ * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -28,8 +28,6 @@ */ package org.mastodon.geff; -import ucar.ma2.InvalidRangeException; - import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -60,7 +58,7 @@ public static void main( String[] args ) /** * Test writing nodes with chunked structure */ - private static void testNodeChunkedWriting() throws IOException, InvalidRangeException + private static void testNodeChunkedWriting() { System.out.println( "=== Testing Node Chunked Writing ===" ); @@ -92,7 +90,7 @@ private static void testNodeChunkedWriting() throws IOException, InvalidRangeExc /** * Test writing edges with chunked structure */ - private static void testEdgeChunkedWriting() throws IOException, InvalidRangeException + private static void testEdgeChunkedWriting() { System.out.println( "\n=== Testing Edge Chunked Writing ===" ); @@ -121,7 +119,7 @@ private static void testEdgeChunkedWriting() throws IOException, InvalidRangeExc /** * Test writing metadata with GEFF schema compliance */ - private static void testMetadataWriting() throws IOException, InvalidRangeException + private static void testMetadataWriting() throws IOException { System.out.println( "\n=== Testing Metadata Writing ===" ); diff --git a/src/test/java/org/mastodon/geff/GeffCreateTest.java b/src/test/java/org/mastodon/geff/GeffCreateTest.java index 4dc0cd9..9e3b206 100644 --- a/src/test/java/org/mastodon/geff/GeffCreateTest.java +++ b/src/test/java/org/mastodon/geff/GeffCreateTest.java @@ -6,13 +6,13 @@ * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -32,12 +32,10 @@ import java.util.ArrayList; import java.util.List; -import ucar.ma2.InvalidRangeException; - public class GeffCreateTest { - public static void main( String[] args ) throws IOException, InvalidRangeException + public static void main( String[] args ) throws IOException { List< GeffNode > writeNodes = new ArrayList<>(); GeffNode node0 = new GeffNode.Builder() diff --git a/src/test/java/org/mastodon/geff/GeffTest.java b/src/test/java/org/mastodon/geff/GeffTest.java index ca7dc08..1766503 100644 --- a/src/test/java/org/mastodon/geff/GeffTest.java +++ b/src/test/java/org/mastodon/geff/GeffTest.java @@ -6,13 +6,13 @@ * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -178,7 +178,7 @@ void testMetadataValidation() // Test invalid metadata - create axes with invalid bounds GeffMetadata invalidMetadata = new GeffMetadata(); - invalidMetadata.setGeffVersion( "0.1" ); + invalidMetadata.setGeffVersion( "0.2" ); invalidMetadata.setDirected( false ); // Create invalid axes (min > max) @@ -279,9 +279,9 @@ void testVersionValidationEdgeCases() // Test various valid version formats String[] validVersions = { - "0.0", "0.1", "0.2", "0.3", "0.4", - "0.1.1", "0.2.0", "0.3.5", - "0.2.2.dev20", "0.1.0-alpha.1", "0.3.0-beta.2+build.123" + "0.2", "0.3", "0.4", + "0.2.0", "0.3.5", + "0.2.2.dev20", "0.2.0-alpha.1", "0.3.0-beta.2+build.123" }; for ( String version : validVersions ) @@ -293,7 +293,7 @@ void testVersionValidationEdgeCases() } // Test invalid versions - String[] invalidVersions = { "1.0", "invalid", "0.1..x" }; + String[] invalidVersions = { "1.0", "0.6", "invalid", "0.1..x" }; for ( String version : invalidVersions ) { diff --git a/src/test/java/org/mastodon/geff/VersionPatternTest.java b/src/test/java/org/mastodon/geff/VersionPatternTest.java index 0d556f2..04832e5 100644 --- a/src/test/java/org/mastodon/geff/VersionPatternTest.java +++ b/src/test/java/org/mastodon/geff/VersionPatternTest.java @@ -6,13 +6,13 @@ * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: - * + * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. - * + * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE @@ -44,15 +44,15 @@ public void testValidVersionPatterns() { // Test cases for different version formats that should be accepted String[] validVersions = { - "0.1", // Basic major.minor - "0.1.1", // With patch version + "0.2", // Basic major.minor + "0.2.1", // With patch version "0.2.2", // Another patch version "0.2.2.dev20", // Development version "0.2.2.dev20+g611e7a2", // With git hash "0.2.2.dev20+g611e7a2.d20250719", // Full development version "0.3.0-alpha.1", // Alpha version - "0.1.0-beta.2+build.123", // Beta with build metadata - "0.0.5.rc1", // Release candidate + "0.2.0-beta.2+build.123", // Beta with build metadata + "0.2.5.rc1", // Release candidate }; for ( String version : validVersions )