Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
import java.util.List;
import java.util.Map;

import org.janelia.saalfeldlab.n5.N5FSWriter;
import org.janelia.saalfeldlab.n5.N5Writer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -135,13 +134,13 @@ public NeuroglancerAttributes(final List<Double> stackResolutionValues,
/**
* Writes all attribute.json files required by neuroglancer to display the specified dataset.
*
* @param n5BasePath base path for n5.
* @param n5Writer N5 writer to use for writing the attributes.
* @param fullScaleDatasetPath path of the full scale data set.
*
* @throws IOException
* if the writes fail for any reason.
*/
public void write(final Path n5BasePath,
public void write(final N5Writer n5Writer,
final Path fullScaleDatasetPath)
throws IOException {

Expand All @@ -150,10 +149,8 @@ public void write(final Path n5BasePath,
final Path ngAttributesPath = isMultiScaleDataset ?
fullScaleDatasetPath.getParent() : fullScaleDatasetPath;

LOG.info("write: entry, n5BasePath={}, fullScaleDatasetPath={}, ngAttributesPath={}",
n5BasePath, fullScaleDatasetPath, ngAttributesPath);

final N5Writer n5Writer = new N5FSWriter(n5BasePath.toAbsolutePath().toString());
LOG.info("write: entry, n5Base={}, fullScaleDatasetPath={}, ngAttributesPath={}",
n5Writer.getURI(), fullScaleDatasetPath, ngAttributesPath);

// Neuroglancer recursively looks for attribute.json files from root path and stops at
// the first subdirectory without an attributes.json file.
Expand All @@ -164,7 +161,7 @@ public void write(final Path n5BasePath,
for (Path path = ngAttributesPath.getParent();
(path != null) && (! path.endsWith("/"));
path = path.getParent()) {
LOG.info("write: saving supported attribute to {}{}/attributes.json", n5BasePath, path);
LOG.info("write: saving supported attribute to {}{}/attributes.json", n5Writer.getURI(), path);
n5Writer.setAttribute(path.toString(), SUPPORTED_KEY, true);
}

Expand All @@ -180,15 +177,15 @@ public void write(final Path n5BasePath,
attributes.put("pixelResolution", pixelResolution);
attributes.put("translate", translate);

LOG.info("write: saving neuroglancer attributes to {}{}/attributes.json", n5BasePath, ngAttributesPath);
LOG.info("write: saving neuroglancer attributes to {}{}/attributes.json", n5Writer.getURI(), ngAttributesPath);
n5Writer.setAttributes(ngAttributesPath.toString(), attributes);

if (isMultiScaleDataset) {
for (int scaleLevel = 0; scaleLevel < scales.size(); scaleLevel++) {
writeScaleLevelTransformAttributes(scaleLevel,
scales.get(scaleLevel),
n5Writer,
n5BasePath,
n5Writer.getURI().toString(),
ngAttributesPath);
}
}
Expand All @@ -197,16 +194,18 @@ public void write(final Path n5BasePath,
private void writeScaleLevelTransformAttributes(final int scaleLevel,
final List<Integer> scaleLevelFactors,
final N5Writer n5Writer,
final Path n5BasePath,
final String n5Base,
final Path ngAttributesPath)
throws IOException {

final String scaleName = "s" + scaleLevel;
final Path scaleAttributesPath = Paths.get(ngAttributesPath.toString(), scaleName);

final Path scaleLevelDirectoryPath = Paths.get(n5BasePath.toString(), ngAttributesPath.toString(), scaleName);
if (! scaleLevelDirectoryPath.toFile().exists()) {
throw new IOException(scaleLevelDirectoryPath.toAbsolutePath() + " does not exist");
if (n5Base.startsWith("/") || n5Base.startsWith("\\")) {
final Path scaleLevelDirectoryPath = Paths.get(n5Base, ngAttributesPath.toString(), scaleName);
if (! scaleLevelDirectoryPath.toFile().exists()) {
throw new IOException(scaleLevelDirectoryPath.toAbsolutePath() + " does not exist");
}
}

final Map<String, Object> transformAttributes = new HashMap<>();
Expand All @@ -232,7 +231,7 @@ private void writeScaleLevelTransformAttributes(final int scaleLevel,
final Map<String, Object> attributes = new HashMap<>();
attributes.put("transform", transformAttributes);

LOG.info("writeScaleLevelTransformAttributes: saving {}{}/attributes.json", n5BasePath, scaleAttributesPath);
LOG.info("writeScaleLevelTransformAttributes: saving {}{}/attributes.json", n5Base, scaleAttributesPath);
n5Writer.setAttributes(scaleAttributesPath.toString(), attributes);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ private static void createDataSet(final List<CrossCorrelationWithNextRegionalDat
stackResolutionUnit,
rowCount,
columnCount);
ngAttributes.write(Paths.get(basePath), Paths.get(datasetName));
ngAttributes.write(n5Writer, Paths.get(datasetName));

final String dataSetPath = attributesPath.getParent().toString();
final Path ccDataPath = Paths.get(dataSetPath, "cc_regional_data.json.gz");
Expand Down Expand Up @@ -161,7 +161,7 @@ private static void createDataSet(final List<CrossCorrelationWithNextRegionalDat
Files.write(ngUrlPath, ngUrlString.getBytes());
LOG.info("createDataSet: neuroglancer URL written to {}", ngUrlPath);
} catch (final IOException e) {
LOG.warn("ignoring failure to write " + ngUrlPath, e);
LOG.warn("ignoring failure to write {}", ngUrlPath, e);
}
}

Expand Down Expand Up @@ -327,7 +327,7 @@ private static String buildNgLayerStringForLatestRenderExport(final StackId stac
exportPrefix, renderExportProjectDir);
}
} catch (final IOException e) {
LOG.warn("ignoring failure to list files in " + renderExportProjectDir, e);
LOG.warn("ignoring failure to list files in {}", renderExportProjectDir, e);
}
} else {
LOG.warn("buildNgLayerStringForLatestRenderExport: failed to find render export directory {}",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,8 @@ private void runForStack(
Arrays.asList(min[0], min[1], min[2]),
NeuroglancerAttributes.NumpyContiguousOrdering.FORTRAN);

ngAttributes.write(Paths.get(parameters.targetN5Path), Paths.get(targetDataset, "s0"));
try (final N5Writer n5 = new N5FSWriter(parameters.targetN5Path)) {
ngAttributes.write(n5, Paths.get(targetDataset, "s0"));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -559,8 +559,7 @@ private void exportPreview(final JavaSparkContext sparkContext,
Arrays.asList(exportInfo.min[0], exportInfo.min[1], exportInfo.min[2]),
NeuroglancerAttributes.NumpyContiguousOrdering.FORTRAN);

ngAttributes.write(Paths.get(exportInfo.n5PathString),
Paths.get(exportInfo.fullScaleDatasetName));
ngAttributes.write(n5Supplier.get(), Paths.get(exportInfo.fullScaleDatasetName));
}

private static long findMinZToRender(final DatasetAttributes datasetAttributes,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,11 @@
import org.janelia.render.client.zspacing.ThicknessCorrectionData;
import org.janelia.saalfeldlab.n5.DataType;
import org.janelia.saalfeldlab.n5.GzipCompression;
import org.janelia.saalfeldlab.n5.N5FSWriter;
import org.janelia.saalfeldlab.n5.N5Writer;
import org.janelia.saalfeldlab.n5.imglib2.N5Utils;
import org.janelia.saalfeldlab.n5.spark.supplier.N5WriterSupplier;
import org.janelia.saalfeldlab.n5.universe.N5Factory;
import org.janelia.saalfeldlab.n5.universe.N5Factory.StorageFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -335,13 +336,12 @@ public void run()
}

int numberOfDownSampledDatasets = 0;
final N5WriterSupplier n5Supplier = new Util.N5PathSupplier(parameters.n5Path);
if (downsampleStack) {

LOG.info("run: downsample stack with factors {}", Arrays.toString(downsampleFactors));

// Now that the full resolution image is saved into n5, generate the scale pyramid
final N5WriterSupplier n5Supplier = new Util.N5PathSupplier(parameters.n5Path);

final List<String> downsampledDatasetPaths =
downsampleScalePyramid(sparkContext,
n5Supplier,
Expand All @@ -361,8 +361,7 @@ public void run()
Arrays.asList(min[0], min[1], min[2]),
NeuroglancerAttributes.NumpyContiguousOrdering.FORTRAN);

ngAttributes.write(Paths.get(parameters.n5Path),
Paths.get(fullScaleDatasetName));
ngAttributes.write(n5Supplier.get(), Paths.get(fullScaleDatasetName));

if (downsampleStackForReview) {

Expand Down Expand Up @@ -406,8 +405,7 @@ public void run()
Arrays.asList(min[0], min[1], min[2]),
NeuroglancerAttributes.NumpyContiguousOrdering.FORTRAN);

reviewNgAttributes.write(Paths.get(parameters.n5Path),
Paths.get(fullScaleReviewDatasetName));
reviewNgAttributes.write(n5ReviewSupplier.get(), Paths.get(fullScaleReviewDatasetName));
}

sparkContext.close();
Expand Down Expand Up @@ -475,7 +473,7 @@ public static void setupFullScaleExportN5(final Parameters parameters,
final int[] blockSize,
final DataType dataType) {

try (final N5Writer n5 = new N5FSWriter(parameters.n5Path)) {
try (final N5Writer n5 = new N5Factory().openWriter(StorageFormat.N5, parameters.n5Path)) {
n5.createDataset(fullScaleDatasetName,
dimensions,
blockSize,
Expand All @@ -494,7 +492,7 @@ public static void updateFullScaleExportAttributes(final Parameters parameters,

String exportAttributesDatasetName = fullScaleDatasetName;

try (final N5Writer n5 = new N5FSWriter(parameters.n5Path)) {
try (final N5Writer n5 = new N5Factory().openWriter(StorageFormat.N5, parameters.n5Path)) {
final Map<String, Object> export_attributes = new HashMap<>();
export_attributes.put("runTimestamp", new Date());
export_attributes.put("runParameters", parameters);
Expand Down Expand Up @@ -661,8 +659,9 @@ private static void saveRenderStack(final JavaSparkContext sc,
}
}

final N5Writer anotherN5Writer = new N5FSWriter(n5Path); // needed to prevent Spark serialization error
final N5Writer anotherN5Writer = new N5Factory().openWriter(StorageFormat.N5, n5Path); // needed to prevent Spark serialization error
N5Utils.saveNonEmptyBlock(block, anotherN5Writer, datasetName, gridBlock.gridPosition, new UnsignedByteType(0));
anotherN5Writer.close();
});
}

Expand Down Expand Up @@ -722,8 +721,9 @@ private static void save2DRenderStack(final JavaSparkContext sc,
out.next().set(in.next());
}

final N5Writer anotherN5Writer = new N5FSWriter(n5Path); // needed to prevent Spark serialization error
final N5Writer anotherN5Writer = new N5Factory().openWriter(StorageFormat.N5, n5Path); // needed to prevent Spark serialization error
N5Utils.saveNonEmptyBlock(block, anotherN5Writer, datasetName, gridBlock.gridPosition, new UnsignedByteType(0));
anotherN5Writer.close();
});

LOG.info("save2DRenderStack: exit");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

import java.io.IOException;

import org.janelia.saalfeldlab.n5.N5FSWriter;
import org.janelia.saalfeldlab.n5.N5Writer;
import org.janelia.saalfeldlab.n5.spark.supplier.N5WriterSupplier;
import org.janelia.saalfeldlab.n5.universe.N5Factory;

/**
* Utilities for N5 operations.
Expand All @@ -22,7 +22,7 @@ public N5PathSupplier(final String path) {
@Override
public N5Writer get()
throws IOException {
return new N5FSWriter(path);
return new N5Factory().openWriter(N5Factory.StorageFormat.N5, path);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,21 +150,21 @@ public void testSetupFullScaleExportN5() throws Exception {
@Test
public void testNeuroglancerAttributes() throws Exception {

final Path n5Path = n5PathDirectory.toPath().toAbsolutePath();
final String n5Path = n5PathDirectory.getAbsolutePath();
final Path fullScaleDatasetPath = Paths.get("/render/test_stack/one_more_nested_dir/s0");
final String datasetName = fullScaleDatasetPath.toString();

final long[] dimensions = { 100L, 200L, 300L };
final int[] blockSize = { 10, 20, 30 };
try (final N5Writer n5Writer = new N5FSWriter(n5Path.toString())) {
try (final N5Writer n5Writer = new N5FSWriter(n5Path)) {

final DatasetAttributes datasetAttributes = new DatasetAttributes(dimensions,
blockSize,
DataType.UINT8,
new GzipCompression());
n5Writer.createDataset(datasetName, datasetAttributes);

final N5Reader n5Reader = new N5FSReader(n5Path.toString());
final N5Reader n5Reader = new N5FSReader(n5Path);
Assert.assertTrue("dataset " + datasetName + " is missing", n5Reader.datasetExists(datasetName));

final Map<String, Object> originalDatasetAttributes = datasetAttributes.asMap();
Expand Down Expand Up @@ -198,7 +198,7 @@ public void testNeuroglancerAttributes() throws Exception {
Arrays.asList(5L, 25L, 125L),
NeuroglancerAttributes.NumpyContiguousOrdering.C);

ngAttributes.write(n5Path, fullScaleDatasetPath);
ngAttributes.write(n5Writer, fullScaleDatasetPath);

final String testStackDatasetName = fullScaleDatasetPath.getParent().toString();

Expand Down