Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,15 @@
-->
<n5-version>3.2.0</n5-version>

<!--
NOTE: The default version of the n5-google-cloud is 4.1.1 for scijava pom 39.0.0,
but we need to revert n5-google-cloud to 4.1.0 here because our n5-version is 3.2.0.
For future updates, n5-google-cloud 4.1.1 depends upon n5-version 3.3.0.
This is all typically managed by the scijava pom, but we override it here because
the hot-knife repo is behind (see comment above).
-->
<n5-google-cloud.version>4.1.0</n5-google-cloud.version>

<imglib2.version>7.1.4</imglib2.version>
<imglib2-algorithm.version>0.17.2</imglib2-algorithm.version>

Expand Down
4 changes: 2 additions & 2 deletions render-app/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -156,12 +156,12 @@

<dependency>
<groupId>org.janelia.saalfeldlab</groupId>
<artifactId>n5-imglib2</artifactId>
<artifactId>n5-hdf5</artifactId>
</dependency>

<dependency>
<groupId>org.janelia.saalfeldlab</groupId>
<artifactId>n5-hdf5</artifactId>
<artifactId>n5-universe</artifactId>
</dependency>

<!-- Use older version of jackson to keep consistent with render-ws jackson-jaxrs-json-provider version. -->
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
import java.util.List;
import java.util.Map;

import org.janelia.saalfeldlab.n5.N5FSWriter;
import org.janelia.saalfeldlab.n5.N5Writer;
import org.janelia.saalfeldlab.n5.universe.N5Factory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -135,13 +135,13 @@ public NeuroglancerAttributes(final List<Double> stackResolutionValues,
/**
* Writes all attribute.json files required by neuroglancer to display the specified dataset.
*
* @param n5BasePath base path for n5.
* @param n5Base base path or URL string for n5.
* @param fullScaleDatasetPath path of the full scale data set.
*
* @throws IOException
* if the writes fail for any reason.
*/
public void write(final Path n5BasePath,
public void write(final String n5Base,
final Path fullScaleDatasetPath)
throws IOException {

Expand All @@ -150,10 +150,10 @@ public void write(final Path n5BasePath,
final Path ngAttributesPath = isMultiScaleDataset ?
fullScaleDatasetPath.getParent() : fullScaleDatasetPath;

LOG.info("write: entry, n5BasePath={}, fullScaleDatasetPath={}, ngAttributesPath={}",
n5BasePath, fullScaleDatasetPath, ngAttributesPath);
LOG.info("write: entry, n5Base={}, fullScaleDatasetPath={}, ngAttributesPath={}",
n5Base, fullScaleDatasetPath, ngAttributesPath);

final N5Writer n5Writer = new N5FSWriter(n5BasePath.toAbsolutePath().toString());
final N5Writer n5Writer = new N5Factory().openWriter(N5Factory.StorageFormat.N5, n5Base); //new N5FSWriter(n5BasePath.toAbsolutePath().toString());

// Neuroglancer recursively looks for attribute.json files from root path and stops at
// the first subdirectory without an attributes.json file.
Expand All @@ -164,7 +164,7 @@ public void write(final Path n5BasePath,
for (Path path = ngAttributesPath.getParent();
(path != null) && (! path.endsWith("/"));
path = path.getParent()) {
LOG.info("write: saving supported attribute to {}{}/attributes.json", n5BasePath, path);
LOG.info("write: saving supported attribute to {}{}/attributes.json", n5Base, path);
n5Writer.setAttribute(path.toString(), SUPPORTED_KEY, true);
}

Expand All @@ -180,15 +180,15 @@ public void write(final Path n5BasePath,
attributes.put("pixelResolution", pixelResolution);
attributes.put("translate", translate);

LOG.info("write: saving neuroglancer attributes to {}{}/attributes.json", n5BasePath, ngAttributesPath);
LOG.info("write: saving neuroglancer attributes to {}{}/attributes.json", n5Base, ngAttributesPath);
n5Writer.setAttributes(ngAttributesPath.toString(), attributes);

if (isMultiScaleDataset) {
for (int scaleLevel = 0; scaleLevel < scales.size(); scaleLevel++) {
writeScaleLevelTransformAttributes(scaleLevel,
scales.get(scaleLevel),
n5Writer,
n5BasePath,
n5Base,
ngAttributesPath);
}
}
Expand All @@ -197,16 +197,18 @@ public void write(final Path n5BasePath,
private void writeScaleLevelTransformAttributes(final int scaleLevel,
final List<Integer> scaleLevelFactors,
final N5Writer n5Writer,
final Path n5BasePath,
final String n5Base,
final Path ngAttributesPath)
throws IOException {

final String scaleName = "s" + scaleLevel;
final Path scaleAttributesPath = Paths.get(ngAttributesPath.toString(), scaleName);

final Path scaleLevelDirectoryPath = Paths.get(n5BasePath.toString(), ngAttributesPath.toString(), scaleName);
if (! scaleLevelDirectoryPath.toFile().exists()) {
throw new IOException(scaleLevelDirectoryPath.toAbsolutePath() + " does not exist");
if (n5Base.startsWith("/") || n5Base.startsWith("\\")) {
final Path scaleLevelDirectoryPath = Paths.get(n5Base, ngAttributesPath.toString(), scaleName);
if (! scaleLevelDirectoryPath.toFile().exists()) {
throw new IOException(scaleLevelDirectoryPath.toAbsolutePath() + " does not exist");
}
}

final Map<String, Object> transformAttributes = new HashMap<>();
Expand All @@ -232,7 +234,7 @@ private void writeScaleLevelTransformAttributes(final int scaleLevel,
final Map<String, Object> attributes = new HashMap<>();
attributes.put("transform", transformAttributes);

LOG.info("writeScaleLevelTransformAttributes: saving {}{}/attributes.json", n5BasePath, scaleAttributesPath);
LOG.info("writeScaleLevelTransformAttributes: saving {}{}/attributes.json", n5Base, scaleAttributesPath);
n5Writer.setAttributes(scaleAttributesPath.toString(), attributes);
}

Expand Down
7 changes: 7 additions & 0 deletions render-ws-java-client/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,13 @@
<exclude>LICENSE</exclude>
<exclude>META-INF/*</exclude>
<exclude>META-INF/versions/**</exclude>
<exclude>canonical.json</exclude>
<exclude>cosem.json</exclude>
<exclude>n5-compression.jq</exclude>
<exclude>n5.jq</exclude>
<exclude>zyx.zattrs.json</exclude>
<exclude>META-INF/services/io.grpc.LoadBalancerProvider</exclude>
<exclude>META-INF/services/io.grpc.NameResolverProvider</exclude>
</excludes>
</filter>
<filter>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ private static void createDataSet(final List<CrossCorrelationWithNextRegionalDat
stackResolutionUnit,
rowCount,
columnCount);
ngAttributes.write(Paths.get(basePath), Paths.get(datasetName));
ngAttributes.write(basePath, Paths.get(datasetName));

final String dataSetPath = attributesPath.getParent().toString();
final Path ccDataPath = Paths.get(dataSetPath, "cc_regional_data.json.gz");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ public AffineAlignBlockWorker(

// NOTE: if you choose to stitch first, you need to pre-align, otherwise, it's OK to use the initial alignment for each tile
if (stitchFirst && parameters.preAlign() == PreAlign.NONE) {
throw new IllegalArgumentException("AffineBlockSolverSetup with --stitchFirst requires --preAlign to be TRANSLATION or RIGID");
throw new IllegalArgumentException("AffineBlockSolverSetup with --stitchFirst requires --preAlign to be TRANSLATION or RIGID or MULTI_SEM");
}

this.coreTileSpecIds = new HashSet<>(); // will be populated by call to assembleMatchData
Expand Down Expand Up @@ -922,6 +922,7 @@ protected void solve(
for (final Tile<?> tile : tileConfig.getTiles()) {
final AlignmentModel model = (AlignmentModel) tile.getModel();
model.setWeights(weights);
// TODO: per Tile sigmoidal weight regularization depending on numMatches * weights
}

final int numIterations = blockOptimizerIterations.get(k);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ public IntensityTile(

for (int i = 0; i < N; i++) {
final Affine1D<?> model = modelSupplier.get();
this.subTiles.add(new Tile<>((Model) model));
this.subTiles.add(new Tile((Model) model));
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,6 @@ private void runForStack(
Arrays.asList(min[0], min[1], min[2]),
NeuroglancerAttributes.NumpyContiguousOrdering.FORTRAN);

ngAttributes.write(Paths.get(parameters.targetN5Path), Paths.get(targetDataset, "s0"));
ngAttributes.write(parameters.targetN5Path, Paths.get(targetDataset, "s0"));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -559,7 +559,7 @@ private void exportPreview(final JavaSparkContext sparkContext,
Arrays.asList(exportInfo.min[0], exportInfo.min[1], exportInfo.min[2]),
NeuroglancerAttributes.NumpyContiguousOrdering.FORTRAN);

ngAttributes.write(Paths.get(exportInfo.n5PathString),
ngAttributes.write(exportInfo.n5PathString,
Paths.get(exportInfo.fullScaleDatasetName));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,11 @@
import org.janelia.render.client.zspacing.ThicknessCorrectionData;
import org.janelia.saalfeldlab.n5.DataType;
import org.janelia.saalfeldlab.n5.GzipCompression;
import org.janelia.saalfeldlab.n5.N5FSWriter;
import org.janelia.saalfeldlab.n5.N5Writer;
import org.janelia.saalfeldlab.n5.imglib2.N5Utils;
import org.janelia.saalfeldlab.n5.spark.supplier.N5WriterSupplier;
import org.janelia.saalfeldlab.n5.universe.N5Factory;
import org.janelia.saalfeldlab.n5.universe.N5Factory.StorageFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -361,7 +362,7 @@ public void run()
Arrays.asList(min[0], min[1], min[2]),
NeuroglancerAttributes.NumpyContiguousOrdering.FORTRAN);

ngAttributes.write(Paths.get(parameters.n5Path),
ngAttributes.write(parameters.n5Path,
Paths.get(fullScaleDatasetName));

if (downsampleStackForReview) {
Expand Down Expand Up @@ -406,7 +407,7 @@ public void run()
Arrays.asList(min[0], min[1], min[2]),
NeuroglancerAttributes.NumpyContiguousOrdering.FORTRAN);

reviewNgAttributes.write(Paths.get(parameters.n5Path),
reviewNgAttributes.write(parameters.n5Path,
Paths.get(fullScaleReviewDatasetName));
}

Expand Down Expand Up @@ -475,7 +476,7 @@ public static void setupFullScaleExportN5(final Parameters parameters,
final int[] blockSize,
final DataType dataType) {

try (final N5Writer n5 = new N5FSWriter(parameters.n5Path)) {
try (final N5Writer n5 = new N5Factory().openWriter( StorageFormat.N5, parameters.n5Path )) /*new N5FSWriter(parameters.n5Path))*/ {
n5.createDataset(fullScaleDatasetName,
dimensions,
blockSize,
Expand All @@ -494,7 +495,7 @@ public static void updateFullScaleExportAttributes(final Parameters parameters,

String exportAttributesDatasetName = fullScaleDatasetName;

try (final N5Writer n5 = new N5FSWriter(parameters.n5Path)) {
try (final N5Writer n5 = new N5Factory().openWriter( StorageFormat.N5, parameters.n5Path )/*new N5FSWriter(parameters.n5Path)*/) {
final Map<String, Object> export_attributes = new HashMap<>();
export_attributes.put("runTimestamp", new Date());
export_attributes.put("runParameters", parameters);
Expand Down Expand Up @@ -661,8 +662,9 @@ private static void saveRenderStack(final JavaSparkContext sc,
}
}

final N5Writer anotherN5Writer = new N5FSWriter(n5Path); // needed to prevent Spark serialization error
final N5Writer anotherN5Writer = new N5Factory().openWriter( StorageFormat.N5, n5Path )/*new N5FSWriter(n5Path)*/; // needed to prevent Spark serialization error
N5Utils.saveNonEmptyBlock(block, anotherN5Writer, datasetName, gridBlock.gridPosition, new UnsignedByteType(0));
anotherN5Writer.close();
});
}

Expand Down Expand Up @@ -722,8 +724,9 @@ private static void save2DRenderStack(final JavaSparkContext sc,
out.next().set(in.next());
}

final N5Writer anotherN5Writer = new N5FSWriter(n5Path); // needed to prevent Spark serialization error
final N5Writer anotherN5Writer = new N5Factory().openWriter( StorageFormat.N5, n5Path ) /*new N5FSWriter(n5Path)*/; // needed to prevent Spark serialization error
N5Utils.saveNonEmptyBlock(block, anotherN5Writer, datasetName, gridBlock.gridPosition, new UnsignedByteType(0));
anotherN5Writer.close();
});

LOG.info("save2DRenderStack: exit");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@

import java.io.IOException;

import org.janelia.saalfeldlab.n5.N5FSWriter;
import org.janelia.saalfeldlab.n5.N5Writer;
import org.janelia.saalfeldlab.n5.spark.supplier.N5WriterSupplier;
import org.janelia.saalfeldlab.n5.universe.N5Factory;

/**
* Utilities for N5 operations.
Expand All @@ -22,7 +22,7 @@ public N5PathSupplier(final String path) {
@Override
public N5Writer get()
throws IOException {
return new N5FSWriter(path);
return new N5Factory().openWriter(N5Factory.StorageFormat.N5, path);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,21 +150,21 @@ public void testSetupFullScaleExportN5() throws Exception {
@Test
public void testNeuroglancerAttributes() throws Exception {

final Path n5Path = n5PathDirectory.toPath().toAbsolutePath();
final String n5Path = n5PathDirectory.getAbsolutePath();
final Path fullScaleDatasetPath = Paths.get("/render/test_stack/one_more_nested_dir/s0");
final String datasetName = fullScaleDatasetPath.toString();

final long[] dimensions = { 100L, 200L, 300L };
final int[] blockSize = { 10, 20, 30 };
try (final N5Writer n5Writer = new N5FSWriter(n5Path.toString())) {
try (final N5Writer n5Writer = new N5FSWriter(n5Path)) {

final DatasetAttributes datasetAttributes = new DatasetAttributes(dimensions,
blockSize,
DataType.UINT8,
new GzipCompression());
n5Writer.createDataset(datasetName, datasetAttributes);

final N5Reader n5Reader = new N5FSReader(n5Path.toString());
final N5Reader n5Reader = new N5FSReader(n5Path);
Assert.assertTrue("dataset " + datasetName + " is missing", n5Reader.datasetExists(datasetName));

final Map<String, Object> originalDatasetAttributes = datasetAttributes.asMap();
Expand Down
8 changes: 8 additions & 0 deletions render-ws/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -67,11 +67,19 @@
<artifactId>render-app</artifactId>
<version>${project.version}</version>
<exclusions>

<!-- filter out slf4j jar so that it does not conflict with jetty version -->
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</exclusion>

<!-- filter out javax.annotation so that it does not conflict with jboss version -->
<exclusion>
<groupId>javax.annotation</groupId>
<artifactId>javax.annotation-api</artifactId>
</exclusion>

</exclusions>
</dependency>

Expand Down
Loading