T getAttribute(
final String pathName,
final String key,
final Type type) throws N5Exception {
final String normalPathName = N5URI.normalizeGroupPath(pathName);
final String normalizedAttributePath = N5URI.normalizeAttributePath(key);
JsonElement attributes;
if (cacheMeta()) {
attributes = getCache().getAttributes(normalPathName, getAttributesKey());
} else {
attributes = GsonKeyValueN5Reader.super.getAttributes(normalPathName);
}
try {
return GsonUtils.readAttribute(attributes, normalizedAttributePath, type, getGson());
} catch (JsonSyntaxException | NumberFormatException | ClassCastException e) {
throw new N5Exception.N5ClassCastException(e);
}
}
@Override
default boolean exists(final String pathName) {
final String normalPathName = N5URI.normalizeGroupPath(pathName);
if (cacheMeta())
return getCache().isGroup(normalPathName, getAttributesKey());
else {
return existsFromContainer(normalPathName, null);
}
}
@Override
default boolean existsFromContainer(final String normalPathName, final String normalCacheKey) {
final KeyValueAccess kva = getKeyValueAccess();
if (normalCacheKey == null)
return kva.isDirectory(kva.compose(getURI(), normalPathName));
else
return kva.isFile(kva.compose(getURI(), normalPathName, normalCacheKey));
}
@Override
default boolean groupExists(final String pathName) {
final String normalPathName = N5URI.normalizeGroupPath(pathName);
if (cacheMeta())
return getCache().isGroup(normalPathName, null);
else {
return isGroupFromContainer(normalPathName);
}
}
@Override
default boolean isGroupFromContainer(final String normalPathName) {
return GsonKeyValueN5Reader.super.groupExists(normalPathName);
}
@Override
default boolean isGroupFromAttributes(final String normalCacheKey, final JsonElement attributes) {
return true;
}
@Override
default boolean datasetExists(final String pathName) throws N5IOException {
final String normalPathName = N5URI.normalizeGroupPath(pathName);
if (cacheMeta()) {
return getCache().isDataset(normalPathName, getAttributesKey());
}
return isDatasetFromContainer(normalPathName);
}
@Override
default boolean isDatasetFromContainer(final String normalPathName) throws N5IOException {
return normalGetDatasetAttributes(normalPathName) != null;
}
@Override
default boolean isDatasetFromAttributes(final String normalCacheKey, final JsonElement attributes) {
return isGroupFromAttributes(normalCacheKey, attributes) && createDatasetAttributes(attributes) != null;
}
/**
* Reads or creates the attributes map of a group or dataset.
*
* @param pathName
* group path
* @return the attribute
* @throws N5IOException if an IO error occurs while reading the attribute
*/
@Override
default JsonElement getAttributes(final String pathName) throws N5IOException {
final String groupPath = N5URI.normalizeGroupPath(pathName);
/* If cached, return the cache */
if (cacheMeta()) {
return getCache().getAttributes(groupPath, getAttributesKey());
} else {
return GsonKeyValueN5Reader.super.getAttributes(groupPath);
}
}
@Override
default String[] list(final String pathName) throws N5IOException {
final String normalPath = N5URI.normalizeGroupPath(pathName);
if (cacheMeta()) {
return getCache().list(normalPath);
} else {
return GsonKeyValueN5Reader.super.list(normalPath);
}
}
@Override
default String[] listFromContainer(final String normalPathName) {
// this implementation doesn't use cache key, but rather depends on
return GsonKeyValueN5Reader.super.list(normalPathName);
}
/**
* Check for attributes that are required for a group to be a dataset.
*
* @param attributes
* to check for dataset attributes
* @return if {@link DatasetAttributes#DIMENSIONS_KEY} and
* {@link DatasetAttributes#DATA_TYPE_KEY} are present
*/
static boolean hasDatasetAttributes(final JsonElement attributes) {
if (attributes == null || !attributes.isJsonObject()) {
return false;
}
final JsonObject metadataCache = attributes.getAsJsonObject();
return metadataCache.has(DatasetAttributes.DIMENSIONS_KEY)
&& metadataCache.has(DatasetAttributes.DATA_TYPE_KEY);
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/CachedGsonKeyValueN5Writer.java
================================================
package org.janelia.saalfeldlab.n5;
import org.janelia.saalfeldlab.n5.N5Exception.N5IOException;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
/**
* Cached default implementation of {@link N5Writer} with JSON attributes parsed
* with {@link Gson}.
*/
public interface CachedGsonKeyValueN5Writer extends CachedGsonKeyValueN5Reader, GsonKeyValueN5Writer {
@Override
default void setVersion(final String path) throws N5Exception {
final Version version = getVersion();
if (!VERSION.isCompatible(version))
throw new N5IOException("Incompatible version " + version + " (this is " + VERSION + ").");
if (!VERSION.equals(version))
setAttribute("/", VERSION_KEY, VERSION.toString());;
}
@Override
default void createGroup(final String path) throws N5Exception {
final String normalPath = N5URI.normalizeGroupPath(path);
// avoid hitting the backend if this path is already a group according to the cache
// else if exists is true (then a dataset is present) so throw an exception to avoid
// overwriting / invalidating existing data
if (groupExists(normalPath))
return;
else if (datasetExists(normalPath))
throw new N5Exception("Can't make a group on existing dataset.");
getKeyValueAccess().createDirectories(absoluteGroupPath(normalPath));
if (cacheMeta()) {
// check all nodes that are parents of the added node, if they have
// a children set, add the new child to it
getKeyValueAccess().parent(normalPath);
String[] pathParts = getKeyValueAccess().components(normalPath);
String parent = N5URI.normalizeGroupPath("/");
if (pathParts.length == 0) {
pathParts = new String[]{""};
}
for (final String child : pathParts) {
final String childPath = parent.isEmpty() ? child : parent + "/" + child;
getCache().initializeNonemptyCache(childPath, getAttributesKey());
getCache().updateCacheInfo(childPath, getAttributesKey());
// only add if the parent exists and has children cached already
if (parent != null && !child.isEmpty())
getCache().addChildIfPresent(parent, child);
parent = childPath;
}
}
}
@Override
default void writeAttributes(
final String normalGroupPath,
final JsonElement attributes) throws N5Exception {
writeAndCacheAttributes(normalGroupPath, attributes);
}
default void writeAndCacheAttributes(
final String normalGroupPath,
final JsonElement attributes) throws N5Exception {
GsonKeyValueN5Writer.super.writeAttributes(normalGroupPath, attributes);
if (cacheMeta()) {
JsonElement nullRespectingAttributes = attributes;
/*
* Gson only filters out nulls when you write the JsonElement. This
* means it doesn't filter them out when caching.
* To handle this, we explicitly writer the existing JsonElement to
* a new JsonElement.
* The output is identical to the input if:
* - serializeNulls is true
* - no null values are present
* - caching is turned off
*/
if (!getGson().serializeNulls()) {
nullRespectingAttributes = getGson().toJsonTree(attributes);
}
/* Update the cache, and write to the writer */
getCache().updateCacheInfo(normalGroupPath, getAttributesKey(), nullRespectingAttributes);
}
}
@Override
default boolean remove(final String path) throws N5Exception {
// GsonKeyValueN5Writer.super.remove(path)
/*
* the lines below duplicate the single line above but would have to call
* normalizeGroupPath again the below duplicates code, but avoids extra work
*/
final String normalPath = N5URI.normalizeGroupPath(path);
final String groupPath = absoluteGroupPath(normalPath);
if (getKeyValueAccess().isDirectory(groupPath))
getKeyValueAccess().delete(groupPath);
if (cacheMeta()) {
final String parentPath = getKeyValueAccess().parent(normalPath);
getCache().removeCache(parentPath, normalPath);
}
/* an IOException should have occurred if anything had failed midway */
return true;
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/ChannelLock.java
================================================
package org.janelia.saalfeldlab.n5;
import java.io.Closeable;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.channels.FileChannel;
import java.nio.channels.FileLock;
import java.nio.channels.OverlappingFileLockException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
/**
* Holds a channel and system-level file lock (shared for writing, non-shared
* for reading) and keeps it open until this {@code ChannelLock} is {@link
* #close() closed}.
*/
class ChannelLock implements Closeable {
private final FileChannel channel;
/**
* Hold a hard reference to the {@code FileLock} to make sure it is not
* prematurely released.
*
* NB: We do not call {@code lock.release()} in {@link #close}, because at
* this point the channel might be already closed (by an external writer).
* {@code lock.release()} will throw an exception if the channel is already
* closed. Instead, we just close the channel which will automatically
* release the lock.
*/
@SuppressWarnings({"unused", "FieldCanBeLocal"})
private final FileLock lock;
private ChannelLock(final FileChannel channel, final FileLock lock) {
this.channel = channel;
this.lock = lock;
}
public void close() throws IOException {
// NB: We do not call lock.release() here, because it may throw an
// exception if the channel is already closed. Instead, we just close
// the channel. This will automatically release the lock. (And it is ok
// to close an already closed channel.)
channel.close();
}
FileChannel getChannel() {
return channel;
}
/**
* Create a {@link FileChannel} on the given {@code path} and lock it with a
* system-level {@link FileLock}. If there is an existing overlapping file
* lock, this method will block until the existing lock is released and the
* channel could be locked (by us).
*
* The {@code FileLock} is exclusive if the {@code path} is locked {@code
* forWriting}, and shared otherwise.
*
* If the {@code path} is locked {@code forWriting} non-existing file and
* the parent directories are created as needed.
*
* @throws IOException if an error occurs while opening the channel, or if
* the calling thread is interrupted while waiting for the {@code FileLock}.
*/
static ChannelLock lock(final Path path, final boolean forWriting, final LockingPolicy policy) throws IOException {
final FileChannel channel = openFileChannel(path, forWriting);
if (policy == LockingPolicy.UNSAFE) {
return new ChannelLock(channel, null);
}
try {
while (true) {
try {
final FileLock lock = channel.lock(0, Long.MAX_VALUE, !forWriting);
return new ChannelLock(channel, lock);
} catch (final OverlappingFileLockException e) {
try {
Thread.sleep(100);
} catch (final InterruptedException ie) {
Thread.currentThread().interrupt();
throw new IOException("Interrupted while waiting for file lock", ie);
}
}
}
} catch (Exception e) {
if (policy == LockingPolicy.STRICT) {
closeQuietly(channel);
throw e;
} else {
return new ChannelLock(channel, null);
}
}
}
/**
* Opens a file channel. If the channel is opened {@code forWriting},
* then this may create the file and the parent directories as needed.
*
* @throws IOException
* if the channel cannot be opened
*/
private static FileChannel openFileChannel(final Path path, final boolean forWriting) throws IOException {
if (forWriting) {
final Path parent = path.getParent();
if (parent != null) {
Files.createDirectories(parent);
}
return FileChannel.open(path, StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.CREATE);
} else {
return FileChannel.open(path, StandardOpenOption.READ);
}
}
private static void closeQuietly(final FileChannel fileChannel) {
if (fileChannel != null) {
try {
fileChannel.close();
} catch (final IOException | UncheckedIOException ignored) {
}
}
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/Compression.java
================================================
package org.janelia.saalfeldlab.n5;
import java.io.Serializable;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.janelia.saalfeldlab.n5.codec.DataCodec;
import org.janelia.saalfeldlab.n5.codec.CodecInfo;
import org.janelia.saalfeldlab.n5.codec.DataCodecInfo;
import org.scijava.annotations.Indexable;
/**
* This interface is used to indicate that a {@link DataCodec} can be
* serialized as a "compression" for the N5 format (using the N5 API).
*
* N5Readers and N5Writers for the N5 format can declare DataCodecs that
* implement this interface so that the {@link CompressionAdapter} is used for
* serialization.
*
* See also: an alternative method for serializing general {@link CodecInfo}s is
* with the {@link NameConfigAdapter}. This interface remains for legacy
* (de)serialization.
*
* @author Stephan Saalfeld
*/
public interface Compression extends Serializable, DataCodec, DataCodecInfo {
/**
* Annotation for runtime discovery of compression schemes.
*
*/
@Retention(RetentionPolicy.RUNTIME)
@Inherited
@Target(ElementType.TYPE)
@Indexable
@interface CompressionType {
String value();
}
/**
* Annotation for runtime discovery of compression schemes.
*
*/
@Retention(RetentionPolicy.RUNTIME)
@Inherited
@Target(ElementType.FIELD)
@interface CompressionParameter {}
default String getType() {
final CompressionType compressionType = getClass().getAnnotation(CompressionType.class);
if (compressionType == null)
return null;
else
return compressionType.value();
}
@Override
default DataCodec create() {
return this;
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/CompressionAdapter.java
================================================
package org.janelia.saalfeldlab.n5;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map.Entry;
import org.janelia.saalfeldlab.n5.Compression.CompressionParameter;
import org.janelia.saalfeldlab.n5.Compression.CompressionType;
import org.scijava.annotations.Index;
import org.scijava.annotations.IndexItem;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParseException;
import com.google.gson.JsonSerializationContext;
import com.google.gson.JsonSerializer;
/**
* Compression adapter, auto-discovers annotated compression implementations
* in the classpath.
*
* @author Stephan Saalfeld
*/
public class CompressionAdapter implements JsonDeserializer, JsonSerializer {
private static CompressionAdapter instance = null;
private final HashMap> compressionConstructors = new HashMap<>();
private final HashMap>> compressionParameters = new HashMap<>();
private static ArrayList getDeclaredFields(Class> clazz) {
final ArrayList fields = new ArrayList<>();
fields.addAll(Arrays.asList(clazz.getDeclaredFields()));
for (clazz = clazz.getSuperclass(); clazz != null; clazz = clazz.getSuperclass())
fields.addAll(Arrays.asList(clazz.getDeclaredFields()));
return fields;
}
@SuppressWarnings("unchecked")
public static synchronized void update(final boolean override) {
if (override || instance == null) {
final CompressionAdapter newInstance = new CompressionAdapter();
final ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
final Index annotationIndex = Index.load(CompressionType.class, classLoader);
for (final IndexItem item : annotationIndex) {
Class extends Compression> clazz;
try {
clazz = (Class extends Compression>)Class.forName(item.className());
final String type = clazz.getAnnotation(CompressionType.class).value();
final Constructor extends Compression> constructor = clazz.getDeclaredConstructor();
final HashMap> parameters = new HashMap<>();
final ArrayList fields = getDeclaredFields(clazz);
for (final Field field : fields) {
if (field.getAnnotation(CompressionParameter.class) != null) {
parameters.put(field.getName(), field.getType());
}
}
newInstance.compressionConstructors.put(type, constructor);
newInstance.compressionParameters.put(type, parameters);
} catch (final NoClassDefFoundError | ClassNotFoundException | NoSuchMethodException | ClassCastException
| UnsatisfiedLinkError e) {
System.err.println("Compression '" + item.className() + "' could not be registered");
}
}
instance = newInstance;
}
}
public static void update() {
update(false);
}
@Override
public JsonElement serialize(
final Compression compression,
final Type typeOfSrc,
final JsonSerializationContext context) {
final String type = compression.getType();
final Class extends Compression> clazz = compression.getClass();
final JsonObject json = new JsonObject();
json.addProperty("type", type);
final HashMap> parameterTypes = compressionParameters.get(type);
try {
for (final Entry> parameterType : parameterTypes.entrySet()) {
final String name = parameterType.getKey();
final Field field = clazz.getDeclaredField(name);
final boolean isAccessible = field.isAccessible();
field.setAccessible(true);
final Object value = field.get(compression);
field.setAccessible(isAccessible);
json.add(parameterType.getKey(), context.serialize(value));
}
} catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) {
e.printStackTrace(System.err);
return null;
}
return json;
}
@Override
public Compression deserialize(
final JsonElement json,
final Type typeOfT,
final JsonDeserializationContext context) throws JsonParseException {
final JsonObject jsonObject = json.getAsJsonObject();
final JsonElement jsonType = jsonObject.get("type");
if (jsonType == null)
return null;
final String type = jsonType.getAsString();
final Constructor extends Compression> constructor = compressionConstructors.get(type);
final Compression compression;
try {
compression = constructor.newInstance();
final HashMap> parameterTypes = compressionParameters.get(type);
for (final Entry> parameterType : parameterTypes.entrySet()) {
final String name = parameterType.getKey();
if (jsonObject.has(name)) {
final Object parameter = context.deserialize(jsonObject.get(name), parameterType.getValue());
ReflectionUtils.setFieldValue(compression, name, parameter);
}
}
} catch (InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException
| SecurityException | NoSuchFieldException e) {
e.printStackTrace(System.err);
return null;
}
return compression;
}
public static CompressionAdapter getJsonAdapter() {
if (instance == null)
update();
return instance;
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/DataBlock.java
================================================
package org.janelia.saalfeldlab.n5;
/**
* Interface for data blocks. A data block has data, a position on the block
* grid, and a size.
*
* @param type of the data contained in the DataBlock
*
* @author Stephan Saalfeld
*/
public interface DataBlock {
/**
* Returns the size of this data block.
*
* The size of a data block is expected to be smaller than or equal to the
* spacing of the block grid. The dimensionality of size is expected to be
* equal to the dimensionality of the dataset. Consistency is not enforced.
*
* @return size of the data block
*/
int[] getSize();
/**
* Returns the position of this data block on the block grid relative to dataset.
*
* The dimensionality of the grid position is expected to be equal to the
* dimensionality of the dataset. Consistency is not enforced.
*
* @return position on the block grid
*/
long[] getGridPosition();
/**
* Returns the data object held by this data block.
*
* @return data object
*/
T getData();
/**
* Returns the number of elements in this {@link DataBlock}. This number is
* not necessarily equal {@link #getNumElements(int[])
* getNumElements(getSize())}.
*
* @return the number of elements
*/
int getNumElements();
/**
* Returns the number of elements in a box of given size.
*
* @param size
* the size
* @return the number of elements
*/
static int getNumElements(final int[] size) {
int n = size[0];
for (int i = 1; i < size.length; ++i)
n *= size[i];
return n;
}
/**
* Factory for creating {@code DataBlock}.
*
* @param
* type of the data contained in the DataBlock
*/
interface DataBlockFactory {
/**
* Create a new {@link DataBlock} with the given {@code blockSize}, {@code gridPosition}, and {@code data} content.
*
* @param blockSize
* the block size
* @param gridPosition
* the grid position
* @param data
* the data object
*
* @return a new DataBlock
*/
DataBlock createDataBlock(int[] blockSize, long[] gridPosition, T data);
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/DataType.java
================================================
package org.janelia.saalfeldlab.n5;
import java.lang.reflect.Type;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonParseException;
import com.google.gson.JsonPrimitive;
import com.google.gson.JsonSerializationContext;
import com.google.gson.JsonSerializer;
/**
* Enumerates available data types.
*
* @author Stephan Saalfeld
*/
public enum DataType {
UINT8(
"uint8",
(blockSize, gridPosition, numElements) -> new ByteArrayDataBlock(
blockSize,
gridPosition,
new byte[numElements])),
UINT16(
"uint16",
(blockSize, gridPosition, numElements) -> new ShortArrayDataBlock(
blockSize,
gridPosition,
new short[numElements])),
UINT32(
"uint32",
(blockSize, gridPosition, numElements) -> new IntArrayDataBlock(
blockSize,
gridPosition,
new int[numElements])),
UINT64(
"uint64",
(blockSize, gridPosition, numElements) -> new LongArrayDataBlock(
blockSize,
gridPosition,
new long[numElements])),
INT8(
"int8",
(blockSize, gridPosition, numElements) -> new ByteArrayDataBlock(
blockSize,
gridPosition,
new byte[numElements])),
INT16(
"int16",
(blockSize, gridPosition, numElements) -> new ShortArrayDataBlock(
blockSize,
gridPosition,
new short[numElements])),
INT32(
"int32",
(blockSize, gridPosition, numElements) -> new IntArrayDataBlock(
blockSize,
gridPosition,
new int[numElements])),
INT64(
"int64",
(blockSize, gridPosition, numElements) -> new LongArrayDataBlock(
blockSize,
gridPosition,
new long[numElements])),
FLOAT32(
"float32",
(blockSize, gridPosition, numElements) -> new FloatArrayDataBlock(
blockSize,
gridPosition,
new float[numElements])),
FLOAT64(
"float64",
(blockSize, gridPosition, numElements) -> new DoubleArrayDataBlock(
blockSize,
gridPosition,
new double[numElements])),
STRING(
"string",
(blockSize, gridPosition, numElements) -> new StringDataBlock(
blockSize,
gridPosition,
new String[numElements])),
OBJECT(
"object",
(blockSize, gridPosition, numElements) -> new ByteArrayDataBlock(
blockSize,
gridPosition,
new byte[numElements]));
private final String label;
private final DataBlockFactory dataBlockFactory;
DataType(final String label, final DataBlockFactory dataBlockFactory) {
this.label = label;
this.dataBlockFactory = dataBlockFactory;
}
@Override
public String toString() {
return label;
}
public static DataType fromString(final String string) {
for (final DataType value : values())
if (value.toString().equals(string))
return value;
return null;
}
/**
* Factory for {@link DataBlock DataBlocks}.
*
* @param blockSize
* the block size
* @param gridPosition
* the grid position
* @param numElements
* the number of elements (not necessarily one element per block
* element)
* @return the data block
*/
public DataBlock> createDataBlock(final int[] blockSize, final long[] gridPosition, final int numElements) {
return dataBlockFactory.createDataBlock(blockSize, gridPosition, numElements);
}
/**
* Factory for {@link DataBlock DataBlocks} with one data element for each
* block element (e.g. pixel image).
*
* @param blockSize
* the block size
* @param gridPosition
* the grid position
* @return the data block
*/
public DataBlock> createDataBlock(final int[] blockSize, final long[] gridPosition) {
return dataBlockFactory.createDataBlock(blockSize, gridPosition, DataBlock.getNumElements(blockSize));
}
private interface DataBlockFactory {
DataBlock> createDataBlock(final int[] blockSize, final long[] gridPosition, final int numElements);
}
static public class JsonAdapter implements JsonDeserializer, JsonSerializer {
@Override
public DataType deserialize(
final JsonElement json,
final Type typeOfT,
final JsonDeserializationContext context) throws JsonParseException {
return DataType.fromString(json.getAsString());
}
@Override
public JsonElement serialize(
final DataType src,
final Type typeOfSrc,
final JsonSerializationContext context) {
return new JsonPrimitive(src.toString());
}
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/DatasetAttributes.java
================================================
package org.janelia.saalfeldlab.n5;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonDeserializer;
import com.google.gson.JsonElement;
import com.google.gson.JsonNull;
import com.google.gson.JsonObject;
import com.google.gson.JsonParseException;
import com.google.gson.JsonSerializationContext;
import com.google.gson.JsonSerializer;
import org.janelia.saalfeldlab.n5.codec.BlockCodec;
import org.janelia.saalfeldlab.n5.codec.BlockCodecInfo;
import org.janelia.saalfeldlab.n5.codec.CodecInfo;
import org.janelia.saalfeldlab.n5.codec.N5BlockCodecInfo;
import org.janelia.saalfeldlab.n5.shard.DatasetAccess;
import org.janelia.saalfeldlab.n5.shard.DefaultDatasetAccess;
import org.janelia.saalfeldlab.n5.shard.ShardCodecInfo;
import org.janelia.saalfeldlab.n5.shard.Nesting.NestedGrid;
import java.io.Serializable;
import java.lang.reflect.Type;
import java.util.Arrays;
import java.util.HashMap;
import java.util.stream.Collectors;
import org.janelia.saalfeldlab.n5.codec.DataCodecInfo;
import org.janelia.saalfeldlab.n5.codec.DatasetCodec;
import org.janelia.saalfeldlab.n5.codec.DatasetCodecInfo;
/**
* Mandatory dataset attributes:
*
*
* - long[] : dimensions
* - int[] : blockSize
* - {@link DataType} : dataType
* - {@link CodecInfo}... : encode/decode routines
*
*
* @author Stephan Saalfeld
*/
public class DatasetAttributes implements Serializable {
private static final long serialVersionUID = -4521467080388947553L;
public static final String DIMENSIONS_KEY = "dimensions";
public static final String BLOCK_SIZE_KEY = "blockSize";
public static final String SHARD_SIZE_KEY = "shardSize";
public static final String DATA_TYPE_KEY = "dataType";
public static final String COMPRESSION_KEY = "compression";
public static final String CODEC_KEY = "codecs";
public static final String[] N5_DATASET_ATTRIBUTES = new String[]{
DIMENSIONS_KEY, BLOCK_SIZE_KEY, DATA_TYPE_KEY, COMPRESSION_KEY, CODEC_KEY
};
/* version 0 */
protected static final String compressionTypeKey = "compressionType";
private final long[] dimensions;
// number of samples per chunk per dimension
private final int[] chunkSize;
// number of samples per block per dimension
// identical to chunkSize for non-sharded datasets
private final int[] blockSize;
private final DataType dataType;
private final JsonElement defaultValue;
private final BlockCodecInfo blockCodecInfo;
private final DataCodecInfo[] dataCodecInfos;
private final DatasetCodecInfo[] datasetCodecInfos;
private transient final DatasetAccess> access;
public DatasetAttributes(
final long[] dimensions,
final int[] blockSize,
final DataType dataType,
final JsonElement defaultValue,
final BlockCodecInfo blockCodecInfo,
final DatasetCodecInfo[] datasetCodecInfos,
final DataCodecInfo... dataCodecInfos) {
this.dimensions = dimensions;
this.dataType = dataType;
this.blockSize = blockSize;
this.defaultValue = defaultValue == null ? JsonNull.INSTANCE : defaultValue;
this.blockCodecInfo = blockCodecInfo == null ? defaultBlockCodecInfo() : blockCodecInfo;
this.datasetCodecInfos = datasetCodecInfos;
if (dataCodecInfos == null)
this.dataCodecInfos = new DataCodecInfo[0];
else
this.dataCodecInfos = Arrays.stream(dataCodecInfos)
.filter(it -> it != null && !(it instanceof RawCompression))
.toArray(DataCodecInfo[]::new);
access = createDatasetAccess();
chunkSize = access.getGrid().getBlockSize(0);
}
public DatasetAttributes(
final long[] dimensions,
final int[] outerBlockSize,
final DataType dataType,
final BlockCodecInfo blockCodecInfo,
final DatasetCodecInfo[] datasetCodecInfos,
final DataCodecInfo... dataCodecInfos) {
this(dimensions, outerBlockSize, dataType, JsonNull.INSTANCE,
blockCodecInfo, datasetCodecInfos, dataCodecInfos);
}
public DatasetAttributes(
final long[] dimensions,
final int[] outerBlockSize,
final DataType dataType,
final BlockCodecInfo blockCodecInfo,
final DataCodecInfo... dataCodecInfos) {
this(dimensions, outerBlockSize, dataType, blockCodecInfo, null, dataCodecInfos);
}
/**
* Constructs a DatasetAttributes instance with specified dimensions, block size, data type,
* and single compressor with default codec.
*
* @param dimensions the dimensions of the dataset
* @param blockSize the size of the blocks in the dataset
* @param dataType the data type of the dataset
* @param dataCodecInfos the codecs used encode/decode the data
*/
public DatasetAttributes(
final long[] dimensions,
final int[] blockSize,
final DataType dataType,
final DataCodecInfo... dataCodecInfos) {
this(dimensions, blockSize, dataType, null, dataCodecInfos);
}
/**
* Constructs a DatasetAttributes instance with specified dimensions, block size, data type, and default codecs
*
* @param dimensions the dimensions of the dataset
* @param blockSize the size of the blocks in the dataset
* @param dataType the data type of the dataset
*/
public DatasetAttributes(
final long[] dimensions,
final int[] blockSize,
final DataType dataType) {
this(dimensions, blockSize, dataType, new DataCodecInfo[0]);
}
private DatasetAccess> createDatasetAccess() {
final int m = nestingDepth(blockCodecInfo);
// There are m codecs: 1 DataBlock codecs, and m-1 shard codecs.
// The inner-most codec (the DataBlock codec) is at index 0.
final int[][] blockSizes = new int[m][];
// NestedGrid validates block sizes, so instantiate it before creating the blockCodecs
// blockCodecInfo.create below could fail unexpecedly with invalid
// blockSizes so validate first
blockSizes[m - 1] = blockSize;
BlockCodecInfo tmpInfo = blockCodecInfo;
for (int l = m - 1; l > 0; --l) {
final ShardCodecInfo info = (ShardCodecInfo)tmpInfo;
blockSizes[l - 1] = info.getInnerBlockSize();
tmpInfo = info.getInnerBlockCodecInfo();
}
BlockCodecInfo currentBlockCodecInfo = blockCodecInfo;
DataCodecInfo[] currentDataCodecInfos = dataCodecInfos;
DatasetCodecInfo[] datasetCodecInfos = this.datasetCodecInfos;
final NestedGrid grid = new NestedGrid(blockSizes, dimensions);
final BlockCodec>[] blockCodecs = new BlockCodec[m];
for (int l = m - 1; l >= 0; --l) {
blockCodecs[l] = currentBlockCodecInfo.create(dataType, blockSizes[l], currentDataCodecInfos);
if (l > 0) {
final ShardCodecInfo info = (ShardCodecInfo) currentBlockCodecInfo;
currentBlockCodecInfo = info.getInnerBlockCodecInfo();
currentDataCodecInfos = info.getInnerDataCodecInfos();
if (info.getInnerDataCodecInfos() != null) {
if (datasetCodecInfos != null && datasetCodecInfos.length > 0) {
throw new N5Exception.N5JsonParseException("Found DatasetCodecs both inside and outside of shards. Not handled");
}
else
datasetCodecInfos = info.getInnerDatasetCodecInfos();
}
}
}
// add dataset codecs
blockCodecs[0] = blockCodecWithDatasetCodecs(this, blockCodecs[0], datasetCodecInfos);
return new DefaultDatasetAccess<>(grid, blockCodecs);
}
@SuppressWarnings("unchecked")
private static BlockCodec> blockCodecWithDatasetCodecs(final DatasetAttributes attributes, final BlockCodec> blockCodec,
final DatasetCodecInfo[] datasetCodecInfos) {
BlockCodec> result = blockCodec;
if (datasetCodecInfos != null) {
for (final DatasetCodecInfo info : datasetCodecInfos) {
result = DatasetCodec.concatenate(info.create(attributes), (BlockCodec)result);
}
}
return result;
}
private static int nestingDepth(BlockCodecInfo info) {
if (info instanceof ShardCodecInfo) {
return 1 + nestingDepth(((ShardCodecInfo)info).getInnerBlockCodecInfo());
} else {
return 1;
}
}
protected BlockCodecInfo defaultBlockCodecInfo() {
return new N5BlockCodecInfo();
}
public long[] getDimensions() {
return dimensions;
}
public int getNumDimensions() {
return dimensions.length;
}
public int[] getChunkSize() {
return chunkSize;
}
public int[] getBlockSize() {
return blockSize;
}
public JsonElement getDefaultValue() {
return defaultValue;
}
public boolean isSharded() {
return blockCodecInfo instanceof ShardCodecInfo;
}
/**
* Only used for deserialization for N5 backwards compatibility.
* {@link Compression} is no longer a special case. Prefer to reference {@link #getDataCodecInfos()}
* Will return {@link RawCompression} if no compression is otherwise provided, for legacy compatibility.
*
* Deprecated in favor of {@link #getDataCodecInfos()}.
*
* @return compression CodecInfo, if one was present, or else RawCompression
*/
@Deprecated
public Compression getCompression() {
return Arrays.stream(dataCodecInfos)
.filter(it -> it instanceof Compression)
.map(it -> (Compression)it)
.findFirst()
.orElse(new RawCompression());
}
public DataType getDataType() {
return dataType;
}
/**
* Get the {@link DatasetAccess} for this dataset.
*
* @return the {@code DatasetAccess} for this dataset
*/
protected DatasetAccess getDatasetAccess() {
return (DatasetAccess) access;
}
/**
* Returns the {@code NestedGrid} for this dataset, from which block and
* shard sizes are accessible.
*
* @return the NestedGrid
*/
public NestedGrid getNestedBlockGrid() {
return getDatasetAccess().getGrid();
}
public BlockCodecInfo getBlockCodecInfo() {
return blockCodecInfo;
}
public DataCodecInfo[] getDataCodecInfos() {
return dataCodecInfos;
}
public DatasetCodecInfo[] getDatasetCodecInfos() {
return datasetCodecInfos;
}
public String relativeBlockPath(long... position) {
return Arrays.stream(position).mapToObj(Long::toString).collect(Collectors.joining("/"));
}
public HashMap asMap() {
final HashMap map = new HashMap<>();
map.put(DIMENSIONS_KEY, dimensions);
map.put(BLOCK_SIZE_KEY, chunkSize);
map.put(DATA_TYPE_KEY, dataType);
map.put(COMPRESSION_KEY, getCompression());
return map;
}
public static Builder builder(final long[] dimensions, final DataType dataType) {
return new Builder(dimensions, dataType);
}
public static Builder builder(final DatasetAttributes attributes) {
return new Builder(attributes);
}
private static final int[] DEFAULT_1D_BLOCK_SIZE = new int[]{65536};
private static final int[] DEFAULT_2D_BLOCK_SIZE = new int[]{512,512};
private static final int[] DEFAULT_3D_BLOCK_SIZE = new int[]{128,128,128};
private static final int DEFAULT_ND_DIM_LEN = 64;
protected static int[] defaultBlockSize(final long[] dimensions) {
final int[] blockSize;
if (dimensions.length == 1)
blockSize = DEFAULT_1D_BLOCK_SIZE.clone();
else if (dimensions.length == 2)
blockSize = DEFAULT_2D_BLOCK_SIZE.clone();
else if (dimensions.length == 3)
blockSize = DEFAULT_3D_BLOCK_SIZE.clone();
else {
blockSize = new int[dimensions.length];
Arrays.fill(blockSize, DEFAULT_ND_DIM_LEN);
}
for (int i = 0; i < blockSize.length; i++)
blockSize[i] = (int)Math.min(blockSize[i], dimensions[i]);
return blockSize;
}
public static class Builder {
private final long[] dimensions;
private final DataType dataType;
private int[] blockSize;
private DataCodecInfo[] dataCodecInfos = new DataCodecInfo[0];
public Builder(final long[] dimensions, final DataType dataType) {
this.dimensions = dimensions.clone();
this.dataType = dataType;
this.blockSize = defaultBlockSize(dimensions);
}
public Builder(final DatasetAttributes attributes) {
this.dimensions = attributes.getDimensions();
this.dataType = attributes.getDataType();
this.blockSize = attributes.getBlockSize();
this.dataCodecInfos = attributes.getDataCodecInfos();
}
public Builder blockSize(final int[] blockSize) {
this.blockSize = blockSize.clone();
return this;
}
/**
* Sets the compression codec. Has no effect if {@code compression} is
* null or {@link RawCompression}.
*
* @param compression the compression to use
* @return this builder
*/
public Builder compression(final Compression compression) {
if (compression != null && !(compression instanceof RawCompression))
this.dataCodecInfos = new DataCodecInfo[]{compression};
return this;
}
public DatasetAttributes build() {
final int[] resolvedBlockSize = blockSize != null ? blockSize : defaultBlockSize(dimensions);
return new DatasetAttributes(dimensions, resolvedBlockSize, dataType, new N5BlockCodecInfo(), null, dataCodecInfos);
}
}
private static DatasetAttributesAdapter adapter = null;
public static DatasetAttributesAdapter getJsonAdapter() {
if (adapter == null) {
adapter = new DatasetAttributesAdapter();
}
return adapter;
}
public static class DatasetAttributesAdapter implements JsonSerializer, JsonDeserializer {
@Override public DatasetAttributes deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
if (json == null || !json.isJsonObject())
return null;
final JsonObject obj = json.getAsJsonObject();
final boolean validKeySet = obj.has(DIMENSIONS_KEY)
&& obj.has(BLOCK_SIZE_KEY)
&& obj.has(DATA_TYPE_KEY)
&& (obj.has(CODEC_KEY) || obj.has(COMPRESSION_KEY) || obj.has(compressionTypeKey));
if (!validKeySet)
return null;
final long[] dimensions = context.deserialize(obj.get(DIMENSIONS_KEY), long[].class);
final int[] blockSize = context.deserialize(obj.get(BLOCK_SIZE_KEY), int[].class);
final DataType dataType = context.deserialize(obj.get(DATA_TYPE_KEY), DataType.class);
final BlockCodecInfo blockCodecInfo;
final DataCodecInfo[] dataCodecs;
if (obj.has(CODEC_KEY)) {
final CodecInfo[] codecs = context.deserialize(obj.get(CODEC_KEY), CodecInfo[].class);
blockCodecInfo = (BlockCodecInfo)codecs[0];
dataCodecs = new DataCodecInfo[codecs.length - 1];
for (int i = 1; i < codecs.length; i++) {
dataCodecs[i - 1] = (DataCodecInfo)codecs[i];
}
} else if (obj.has(COMPRESSION_KEY)) {
final Compression compression = CompressionAdapter.getJsonAdapter().deserialize(obj.get(COMPRESSION_KEY), Compression.class, context);
dataCodecs = new DataCodecInfo[]{compression};
blockCodecInfo = new N5BlockCodecInfo();
} else if (obj.has(compressionTypeKey)) {
final Compression compression = getCompressionVersion0(obj.get(compressionTypeKey).getAsString());
dataCodecs = new DataCodecInfo[]{compression};
blockCodecInfo = new N5BlockCodecInfo();
} else {
return null;
}
return new DatasetAttributes(dimensions, blockSize, dataType, blockCodecInfo, dataCodecs);
}
//FIXME
// this implements multi-codec serialization for N5. We probably don't want this now
@Override public JsonElement serialize(DatasetAttributes src, Type typeOfSrc, JsonSerializationContext context) {
final JsonObject obj = new JsonObject();
obj.add(DIMENSIONS_KEY, context.serialize(src.dimensions));
obj.add(BLOCK_SIZE_KEY, context.serialize(src.chunkSize));
obj.add(DATA_TYPE_KEY, context.serialize(src.dataType));
final DataCodecInfo[] codecs = src.dataCodecInfos;
// length > 1 is actually invalid, but this is checked on construction
if (codecs.length == 0)
obj.add(COMPRESSION_KEY, context.serialize(new RawCompression()));
else
obj.add(COMPRESSION_KEY, context.serialize(codecs[0]));
return obj;
}
private static Compression getCompressionVersion0(final String compressionVersion0Name) {
switch (compressionVersion0Name) {
case "raw":
return new RawCompression();
case "gzip":
return new GzipCompression();
case "bzip2":
return new Bzip2Compression();
case "lz4":
return new Lz4Compression();
case "xz":
return new XzCompression();
}
return null;
}
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/DoubleArrayDataBlock.java
================================================
package org.janelia.saalfeldlab.n5;
public class DoubleArrayDataBlock extends AbstractDataBlock {
public DoubleArrayDataBlock(final int[] size, final long[] gridPosition, final double[] data) {
super(size, gridPosition, data, a -> a.length);
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/FileKeyLockManager.java
================================================
package org.janelia.saalfeldlab.n5;
import java.io.IOException;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.WeakReference;
import java.nio.channels.FileLock;
import java.nio.file.Path;
import java.util.Collections;
import java.util.EnumMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import static org.janelia.saalfeldlab.n5.LockingPolicy.STRICT;
/**
* Provides thread-safe and process-safe read/write locking for filesystem paths.
* Uses thread locks for JVM coordination and file locks for inter-process coordination.
*/
class FileKeyLockManager {
private static final Map managers = Collections.synchronizedMap(new EnumMap<>(LockingPolicy.class));
static FileKeyLockManager forPolicy(final LockingPolicy policy) {
return managers.computeIfAbsent(policy, FileKeyLockManager::new);
}
/**
* @deprecated use {@link FileKeyLockManager#forPolicy(LockingPolicy)}
*/
@Deprecated
static final FileKeyLockManager FILE_LOCK_MANAGER = forPolicy(STRICT);
private final LockingPolicy policy;
/**
* Create a new {@link FileKeyLockManager} with the specified locking policy.
*
* The given locking {@link LockingPolicy policy} applies to OS-level locking.
* For both the {@code STRICT} and {@code PERMISSIVE} policy, a {@link
* FileLock} is obtained. If this fails, {@code STRICT} will throw an {@code
* IOException}. {@code PERMISSIVE} will proceed without locking. {@code
* UNSAFE} will not attempt OS-level locking, however will still manage
* mutual exclusion of readers and writers in the same JVM. Trying to lock
* the same path with different locking policies will throw an {@code
* IOException}.
*
* @param policy
* the locking policy
*/
private FileKeyLockManager(final LockingPolicy policy) {
this.policy = policy;
}
private final ConcurrentHashMap locks = new ConcurrentHashMap<>();
private final ReferenceQueue refQueue = new ReferenceQueue<>();
private static class WeakValue extends WeakReference {
final String key;
WeakValue(
final String key,
final KeyLockState value,
final ReferenceQueue queue) {
super(value, queue);
this.key = key;
}
}
/**
* Remove entries from the cache whose references have been
* garbage-collected.
*/
private void cleanUp()
{
while (true) {
final WeakValue ref = (WeakValue) refQueue.poll();
if (ref == null)
break;
locks.remove(ref.key, ref);
}
}
private KeyLockState keyLockState(final Path path, final LockingPolicy policy) throws IOException {
final String key = path.toAbsolutePath().toString();
cleanUp();
final WeakValue existingRef = locks.get(key);
KeyLockState state = existingRef == null ? null : existingRef.get();
if (state != null) {
return state;
}
final KeyLockState newState = new KeyLockState(path, policy);
while (state == null) {
final WeakValue ref = locks.compute(key,
(k, v) -> (v != null && v.get() != null)
? v
: new WeakValue(k, newState, refQueue));
state = ref.get();
}
return state;
}
/**
* Acquires a read lock for the specified key. Multiple threads can hold
* read locks for the same key simultaneously.
*
* The first reader will acquire a shared file lock. Subsequent readers
* only acquire the thread-level lock.
*
* @param path
* the key (file path) to lock for reading
*
* @return a {@link LockedChannel} that must be closed when done
*
* @throws IOException
* if acquiring the file lock fails
*/
public LockedFileChannel lockForReading(final Path path) throws IOException {
return keyLockState(path, policy).acquireRead();
}
/**
* Acquires a write lock for the specified key. Only one thread can hold a
* write lock for a key at a time, and no readers can hold locks.
*
* @param path
* the file path to lock for writing
*
* @return a {@link LockedChannel} that must be closed when done
*
* @throws IOException
* if acquiring the file lock fails
*/
public LockedFileChannel lockForWriting(final Path path) throws IOException {
return keyLockState(path, policy).acquireWrite();
}
/**
* Returns the number of keys currently being tracked.
*
* @return the number of keys with associated locks
*/
int size() {
return locks.size();
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/FileSystemKeyValueAccess.java
================================================
package org.janelia.saalfeldlab.n5;
import java.io.File;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import java.nio.file.DirectoryNotEmptyException;
import java.nio.file.FileAlreadyExistsException;
import java.nio.file.FileSystemException;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.FileAttribute;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Iterator;
import java.util.stream.Stream;
import org.janelia.saalfeldlab.n5.N5Exception.N5IOException;
import org.janelia.saalfeldlab.n5.N5Exception.N5NoSuchKeyException;
import org.janelia.saalfeldlab.n5.readdata.LazyRead;
import org.janelia.saalfeldlab.n5.readdata.ReadData;
import org.janelia.saalfeldlab.n5.readdata.VolatileReadData;
/**
* Filesystem {@link KeyValueAccess}.
*
* @author Stephan Saalfeld
* @author Igor Pisarev
* @author Philipp Hanslovsky
*/
public class FileSystemKeyValueAccess implements KeyValueAccess {
private final FileKeyLockManager fileKeyLockManager;
public FileSystemKeyValueAccess() {
final LockingPolicy policy = LockingPolicy.fromString(System.getProperty("n5.ioPolicy", "permissive"));
this.fileKeyLockManager = FileKeyLockManager.forPolicy(policy);
}
private LockedFileChannel lockForReading(final Path path) throws N5IOException {
try {
return fileKeyLockManager.lockForReading(path);
} catch (final NoSuchFileException e) {
throw new N5NoSuchKeyException("No such file", e);
} catch (IOException | UncheckedIOException e) {
throw new N5IOException("Failed to lock file for reading: " + path, e);
}
}
private LockedFileChannel lockForWriting(final Path path) throws N5IOException {
try {
return fileKeyLockManager.lockForWriting(path);
} catch (final NoSuchFileException e) {
throw new N5NoSuchKeyException("No such file", e);
} catch (IOException | UncheckedIOException e) {
throw new N5IOException("Failed to lock file for writing: " + path, e);
}
}
@Override
public VolatileReadData createReadData(final String normalPath) {
return VolatileReadData.from(new FileLazyRead(Paths.get(normalPath)));
}
@Override
public void write(final String normalPath, final ReadData data) throws N5IOException {
final Path path = Paths.get(normalPath);
try (final LockedFileChannel channel = lockForWriting(path)) {
data.writeTo(channel.asOutputStream());
} catch (IOException e) {
throw new N5IOException(e);
}
}
@Override
public boolean isDirectory(final String normalPath) {
final Path path = Paths.get(normalPath);
return Files.isDirectory(path);
}
@Override
public boolean isFile(final String normalPath) {
final Path path = Paths.get(normalPath);
return Files.isRegularFile(path);
}
@Override
public boolean exists(final String normalPath) {
final Path path = Paths.get(normalPath);
return Files.exists(path);
}
@Override
public long size(final String normalPath) {
return size(Paths.get(normalPath));
}
private static long size(final Path path) {
try {
return Files.size(path);
} catch (NoSuchFileException e) {
throw new N5NoSuchKeyException("No such file", e);
} catch (IOException | UncheckedIOException e) {
throw new N5IOException(e);
}
}
@Override
public String[] listDirectories(final String normalPath) throws N5IOException {
final Path path = Paths.get(normalPath);
try (final Stream pathStream = Files.list(path)) {
return pathStream
.filter(Files::isDirectory)
.map(a -> path.relativize(a).toString())
.toArray(String[]::new);
} catch (NoSuchFileException e) {
throw new N5NoSuchKeyException("No such file", e);
} catch (IOException | UncheckedIOException e) {
throw new N5IOException("Failed to list directories", e);
}
}
@Override
public String[] list(final String normalPath) throws N5IOException {
final Path path = Paths.get(normalPath);
try (final Stream pathStream = Files.list(path)) {
return pathStream
.map(a -> path.relativize(a).toString())
.toArray(String[]::new);
} catch (NoSuchFileException e) {
throw new N5NoSuchKeyException("No such file", e);
} catch (IOException | UncheckedIOException e) {
throw new N5IOException("Failed to list files", e);
}
}
@Override
public String[] components(final String path) {
final Path fsPath = Paths.get(path);
final Path root = fsPath.getRoot();
final String separator = fsPath.getFileSystem().getSeparator();
final String[] components;
int o;
if (root == null) {
components = new String[fsPath.getNameCount()];
o = 0;
} else {
components = new String[fsPath.getNameCount() + 1];
components[0] = root.toString();
o = 1;
}
for (int i = o; i < components.length; ++i) {
String name = fsPath.getName(i - o).toString();
/* Preserve trailing slash on final component if present*/
if (i == components.length - 1) {
final String trailingSeparator = path.endsWith(separator) ? separator : path.endsWith("/") ? "/" : "";
name += trailingSeparator;
}
components[i] = name;
}
return components;
}
@Override
public String parent(final String path) {
final Path parent = Paths.get(path).getParent();
if (parent == null)
return null;
else
return parent.toString();
}
@Override
public String relativize(final String path, final String base) {
final Path basePath = Paths.get(base);
return basePath.relativize(Paths.get(path)).toString();
}
/**
* Returns a normalized path. It ensures correctness on both Unix and
* Windows,
* otherwise {@code pathName} is treated as UNC path on Windows, and
* {@code Paths.get(pathName, ...)} fails with {@code InvalidPathException}.
*
* @param path the path
* @return the normalized path, without leading slash
*/
@Override
public String normalize(final String path) {
return Paths.get(path).normalize().toString();
}
@Override
public URI uri(final String normalPath) throws URISyntaxException {
// normalize make absolute the scheme specific part only
try {
final URI normalUri = URI.create(normalPath);
if (normalUri.isAbsolute()) return normalUri.normalize();
} catch (final IllegalArgumentException e) {
return new File(normalPath).toURI().normalize();
}
return new File(normalPath).toURI().normalize();
}
@Override
public String compose(final String... components) {
if (components == null || components.length == 0)
return null;
if (components.length == 1)
return Paths.get(components[0]).toString();
return Paths.get(components[0], Arrays.copyOfRange(components, 1, components.length)).normalize().toString();
}
@Override
public String compose(URI uri, String... components) {
Path composedPath;
if (uri.isAbsolute())
composedPath = Paths.get(uri);
else
composedPath = Paths.get(uri.toString());
for (String component : components) {
if (component == null || component.isEmpty())
continue;
composedPath = composedPath.resolve(component);
}
return composedPath.toAbsolutePath().toString();
}
@Override
public void createDirectories(final String normalPath) throws N5IOException {
try {
createDirectories(Paths.get(normalPath));
} catch (NoSuchFileException e) {
throw new N5NoSuchKeyException("No such file", e);
} catch (IOException | UncheckedIOException e) {
throw new N5IOException("Failed to create directories", e);
}
}
@Override
public void delete(final String normalPath) throws N5IOException {
try {
final Path path = Paths.get(normalPath);
if (Files.isRegularFile(path))
try (final LockedFileChannel channel = lockForWriting(path)) {
Files.delete(path);
}
else {
try (final Stream pathStream = Files.walk(path)) {
for (final Iterator i = pathStream.sorted(Comparator.reverseOrder()).iterator(); i.hasNext();) {
final Path childPath = i.next();
if (Files.isRegularFile(childPath))
try (final LockedFileChannel channel = lockForWriting(childPath)) {
Files.delete(childPath);
}
else
tryDelete(childPath);
}
}
}
} catch (NoSuchFileException ignore) {
/* It doesn't exist; that's sufficient for us to not complain on a `delete` call */
} catch (IOException | UncheckedIOException e) {
throw new N5IOException("Failed to delete file at " + normalPath, e);
}
}
protected static void tryDelete(final Path path) throws IOException {
try {
Files.delete(path);
} catch (final DirectoryNotEmptyException e) {
/*
* Even though path is expected to be an empty directory, sometimes
* deletion fails on network filesystems when lock files are not
* cleared immediately after the leaves have been removed.
*/
try {
/* wait and reattempt */
Thread.sleep(100);
Files.delete(path);
} catch (final InterruptedException ex) {
e.printStackTrace();
Thread.currentThread().interrupt();
}
}
}
/**
* This is a copy of {@link Files#createDirectories(Path, FileAttribute...)}
* that follows symlinks.
*
* Workaround for https://bugs.openjdk.java.net/browse/JDK-8130464
*
* Creates a directory by creating all nonexistent parent directories first.
* Unlike the {@link Files#createDirectories} method, an exception
* is not thrown if the directory could not be created because it already
* exists.
*
*
* The {@code attrs} parameter is optional {@link FileAttribute
* file-attributes} to set atomically when creating the nonexistent
* directories. Each file attribute is identified by its {@link
* FileAttribute#name name}. If more than one attribute of the same name is
* included in the array then all but the last occurrence is ignored.
*
*
* If this method fails, then it may do so after creating some, but not
* all, of the parent directories.
*
* @param dir
* the directory to create
*
* @param attrs
* an optional list of file attributes to set atomically when
* creating the directory
*
* @return the directory
*
* @throws UnsupportedOperationException
* if the array contains an attribute that cannot be set
* atomically
* when creating the directory
* @throws FileAlreadyExistsException
* if {@code dir} exists but is not a directory (optional
* specific
* exception)
* @throws IOException
* if an I/O error occurs
* @throws SecurityException
* in the case of the default provider, and a security manager
* is
* installed, the {@link SecurityManager#checkWrite(String)
* checkWrite}
* method is invoked prior to attempting to create a directory
* and
* its {@link SecurityManager#checkRead(String) checkRead} is
* invoked for each parent directory that is checked. If {@code
* dir} is not an absolute path then its {@link Path#toAbsolutePath
* toAbsolutePath} may need to be invoked to get its absolute
* path.
* This may invoke the security manager's {@link
* SecurityManager#checkPropertyAccess(String)
* checkPropertyAccess}
* method to check access to the system property
* {@code user.dir}
*/
protected static Path createDirectories(Path dir, final FileAttribute>... attrs) throws IOException {
// attempt to create the directory
try {
createAndCheckIsDirectory(dir, attrs);
return dir;
} catch (final FileAlreadyExistsException x) {
// file exists and is not a directory
throw x;
} catch (final IOException x) {
// parent may not exist or other reason
}
SecurityException se = null;
try {
dir = dir.toAbsolutePath();
} catch (final SecurityException x) {
// don't have permission to get absolute path
se = x;
}
// find a descendant that exists
Path parent = dir.getParent();
while (parent != null) {
try {
parent.getFileSystem().provider().checkAccess(parent);
break;
} catch (final NoSuchFileException x) {
// does not exist
}
parent = parent.getParent();
}
if (parent == null) {
// unable to find existing parent
if (se == null) {
throw new FileSystemException(
dir.toString(),
null,
"Unable to determine if root directory exists");
} else {
throw se;
}
}
// create directories
Path child = parent;
for (final Path name : parent.relativize(dir)) {
child = child.resolve(name);
createAndCheckIsDirectory(child, attrs);
}
return dir;
}
/**
* This is a copy of a previous Files#createAndCheckIsDirectory(Path,
* FileAttribute...) method that follows symlinks.
*
* Workaround for https://bugs.openjdk.java.net/browse/JDK-8130464
*
* Used by createDirectories to attempt to create a directory. A no-op if the
* directory already exists.
*
* @param dir directory path
* @param attrs file attributes
* @throws IOException the exception
*/
protected static void createAndCheckIsDirectory(
final Path dir,
final FileAttribute>... attrs) throws IOException {
try {
Files.createDirectory(dir, attrs);
} catch (final FileAlreadyExistsException x) {
if (!Files.isDirectory(dir))
throw x;
}
}
/**
* Verify that the range {@code [offset, offset+length)} is fully contained in {@code [0, channelSize)}.
*
* @throws IndexOutOfBoundsException
* if range is not fully contained
*/
private static void validBounds(final long channelSize, final long offset, final long length) throws IndexOutOfBoundsException {
if (offset < 0)
throw new IndexOutOfBoundsException("offset must be > 0, but was: " + offset);
else if (channelSize > 0 && offset >= channelSize) // offset == 0 and channelSize == 0 is okay
throw new IndexOutOfBoundsException("offset (" + offset + ") must be less than channel size (" + channelSize + ")");
else if (length >= 0 && offset + length > channelSize)
throw new IndexOutOfBoundsException("offset + length (" + (offset + length) + ") must be less than channel size (" + channelSize + ")");
}
private class FileLazyRead implements LazyRead {
private final Path path;
private LockedFileChannel lock; // TODO rename
FileLazyRead(final Path path) {
this.path = path;
lock = lockForReading(path);
}
@Override
public long size() throws N5IOException {
if (lock == null) {
throw new N5IOException("FileLazyRead is already closed.");
}
try {
return Files.size(path);
} catch (NoSuchFileException e) {
throw new N5NoSuchKeyException("No such file", e);
} catch (IOException | UncheckedIOException e) {
throw new N5IOException(e);
}
}
@Override
public ReadData materialize(final long offset, final long length) {
if (lock == null) {
throw new N5IOException("FileLazyRead is already closed.");
}
try {
final long channelSize = lock.size();
validBounds(channelSize, offset, length);
final long size = length < 0 ? (channelSize - offset) : length;
if (size > Integer.MAX_VALUE) {
throw new IndexOutOfBoundsException("Attempt to materialize too large data");
}
final byte[] data = new byte[(int) size];
lock.read(ByteBuffer.wrap(data), offset);
return ReadData.from(data);
} catch (IOException | UncheckedIOException e) {
throw new N5Exception.N5IOException(e);
}
}
@Override
public void close() throws IOException {
if (lock != null) {
lock.close();
lock = null;
}
}
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/FloatArrayDataBlock.java
================================================
package org.janelia.saalfeldlab.n5;
public class FloatArrayDataBlock extends AbstractDataBlock {
public FloatArrayDataBlock(final int[] size, final long[] gridPosition, final float[] data) {
super(size, gridPosition, data, a -> a.length);
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/FsIoPolicy.java
================================================
package org.janelia.saalfeldlab.n5;
import org.janelia.saalfeldlab.n5.readdata.LazyRead;
import org.janelia.saalfeldlab.n5.readdata.ReadData;
import org.janelia.saalfeldlab.n5.readdata.VolatileReadData;
import java.io.Closeable;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UncheckedIOException;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.file.*;
import static org.janelia.saalfeldlab.n5.FileKeyLockManager.FILE_LOCK_MANAGER;
public class FsIoPolicy {
static final IoPolicy atomicWithFallback = IoPolicy.withFallback(new Atomic(), new Unsafe());
private static boolean validBounds(long channelSize, long offset, long length) {
if (offset < 0)
throw new N5Exception("offset must be > 0, but was: " + offset);
else if (channelSize > 0 && offset >= channelSize) // offset == 0 and channelSize == 0 is okay
throw new N5Exception("offset (" + offset + ") must be less than channel size (" + channelSize + ")");
else if (length >= 0 && offset + length > channelSize)
throw new N5Exception("offset + length (" + (offset + length) + ") must be less than channel size (" + channelSize + ")");
return true;
}
/**
* Opens a file channel. If the channel is opened {@code forWriting},
* then this may create the file and the parent directories as needed.
*
* @throws IOException
* if the channel cannot be opened
*/
static FileChannel openFileChannel(final Path path, final boolean forWriting) throws IOException {
if (forWriting) {
final Path parent = path.getParent();
/* if not null and not directory, it will call `createDirectories` but we expect it to throw an IOException */
if (parent != null && !parent.toFile().isDirectory()) {
Files.createDirectories(parent);
}
return FileChannel.open(path, StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.CREATE);
} else {
return FileChannel.open(path, StandardOpenOption.READ);
}
}
/**
* This method is necessary to handle the situtation where writing is successful, but `close` fails on the file channel.
* This has been observed to happen fairly consistently on MacOS when writing to a file mounted over SMB.
*
* @param readData to write to the {@code Path}
* @param path to write to
* @throws IOException if writing failed.
*/
private static void writeToPathIgnoreCloseException(ReadData readData, Path path) throws IOException {
FileChannel channel = openFileChannel(path, true);
OutputStream os = Channels.newOutputStream(channel);
try {
readData.writeTo(os);
os.flush();
channel.force(true);
} catch (Throwable e) {
os.close();
channel.close();
throw e;
}
/* if we get here, the write succeeded, and the os/channel may not be closed yet */
try {
os.close();
channel.close();
} catch (IOException | UncheckedIOException ignore) {
/* Ignore; we know the data was written already. */
}
}
public static class Unsafe implements IoPolicy {
@Override
public void write(String key, ReadData readData) throws IOException {
final Path path = Paths.get(key);
writeToPathIgnoreCloseException(readData, path);
}
@Override
public VolatileReadData read(final String key) throws IOException {
final Path path = Paths.get(key);
FileLazyRead fileLazyRead = new FileLazyRead(path, false);
return VolatileReadData.from(fileLazyRead);
}
@Override
public void delete(final String key) throws IOException {
final Path path = Paths.get(key);
Files.deleteIfExists(path);
}
}
public static class Atomic implements IoPolicy {
@Override
public void write(String key, ReadData readData) throws IOException {
final Path path = Paths.get(key);
try (LockedFileChannel channel = FILE_LOCK_MANAGER.lockForWriting(path)) {
readData.writeTo(channel.asOutputStream());
}
}
@Override
public VolatileReadData read(String key) throws IOException {
final Path path = Paths.get(key);
FileLazyRead fileLazyRead = new FileLazyRead(path, true);
return VolatileReadData.from(fileLazyRead);
}
@Override
public void delete(final String key) throws IOException {
final Path path = Paths.get(key);
if (!Files.isRegularFile(path))
Files.delete(path);
try (LockedFileChannel ignore = FILE_LOCK_MANAGER.lockForWriting(path)) {
Files.delete(path);
}
}
}
static class FileLazyRead implements LazyRead {
private static final Closeable NO_OP = () -> { };
private final Path path;
private Closeable lock;
FileLazyRead(final Path path) throws IOException {
this(path, true);
}
FileLazyRead(final Path path, final boolean requireLock ) throws IOException {
this.path = path;
if (requireLock)
lock = FILE_LOCK_MANAGER.lockForReading(path);
else
lock = NO_OP;
}
@Override
public long size() throws N5Exception.N5IOException {
if (lock == null) {
throw new N5Exception.N5IOException("FileLazyRead is already closed.");
}
try {
return Files.size(path);
} catch (NoSuchFileException e) {
throw new N5Exception.N5NoSuchKeyException("No such file", e);
} catch (IOException | UncheckedIOException e) {
throw new N5Exception.N5IOException(e);
}
}
@Override
public ReadData materialize(final long offset, final long length) {
if (lock == null) {
throw new N5Exception.N5IOException("FileLazyRead is already closed.");
}
ReadData readData = null;
try (final FileChannel channel = FileChannel.open(path, StandardOpenOption.READ)) {
channel.position(offset);
final long channelSize = channel.size();
if (!validBounds(channelSize, offset, length)) {
throw new IndexOutOfBoundsException();
}
final long size = length < 0 ? (channelSize - offset) : length;
if (size > Integer.MAX_VALUE) {
throw new IndexOutOfBoundsException("Attempt to materialize too large data");
}
final byte[] data = new byte[(int) size];
final ByteBuffer buf = ByteBuffer.wrap(data);
channel.read(buf);
readData = ReadData.from(data);
} catch (final NoSuchFileException e) {
throw new N5Exception.N5NoSuchKeyException("No such file", e);
} catch (IOException | UncheckedIOException e) {
/* Occasionally (frequently for some source remote mounted file systems) this can throw exceptions during
* `channel.close()` which is called automatically in the try-with-resources block. In this case, we have
* successfully read the data, and we can return it, and ignore the exception.
* */
if (readData == null)
throw new N5Exception.N5IOException(e);
}
return readData;
}
@Override
public void close() throws IOException {
if (lock != null) {
lock.close();
lock = null;
}
}
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/GsonKeyValueN5Reader.java
================================================
package org.janelia.saalfeldlab.n5;
import java.io.InputStreamReader;
import java.io.UncheckedIOException;
import java.util.List;
import org.janelia.saalfeldlab.n5.N5Exception.N5IOException;
import org.janelia.saalfeldlab.n5.readdata.VolatileReadData;
import org.janelia.saalfeldlab.n5.shard.PositionValueAccess;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
/**
* {@link N5Reader} implementation through {@link KeyValueAccess} with JSON
* attributes parsed with {@link Gson}.
*
*/
public interface GsonKeyValueN5Reader extends GsonN5Reader {
KeyValueAccess getKeyValueAccess();
default boolean groupExists(final String normalPath) {
return getKeyValueAccess().isDirectory(absoluteGroupPath(normalPath));
}
@Override
default boolean exists(final String pathName) {
final String normalPath = N5URI.normalizeGroupPath(pathName);
return groupExists(normalPath) || datasetExists(normalPath);
}
@Override
default boolean datasetExists(final String pathName) throws N5Exception {
// for n5, every dataset must be a group
return getDatasetAttributes(pathName) != null;
}
/**
* Reads or creates the attributes map of a group or dataset.
*
* @param pathName
* group path
* @return the attribute
* @throws N5Exception if the attributes cannot be read
*/
@Override
default JsonElement getAttributes(final String pathName) throws N5Exception {
final String groupPath = N5URI.normalizeGroupPath(pathName);
final String attributesPath = absoluteAttributesPath(groupPath);
try (final VolatileReadData readData = getKeyValueAccess().createReadData(attributesPath);) {
if (readData == null) {
return null;
}
return GsonUtils.readAttributes(new InputStreamReader(readData.inputStream()), getGson());
} catch (final N5Exception.N5NoSuchKeyException e) {
return null;
} catch (final UncheckedIOException | N5IOException e) {
throw new N5IOException("Failed to read attributes from dataset " + pathName, e);
}
}
@Override
default DataBlock readChunk(
final String pathName,
final DatasetAttributes datasetAttributes,
final long... gridPosition) throws N5Exception {
DatasetAttributes convertedDatasetAttributes = getConvertedDatasetAttributes(datasetAttributes);
try {
final PositionValueAccess posKva = PositionValueAccess.fromKva(getKeyValueAccess(), getURI(), N5URI.normalizeGroupPath(pathName),
convertedDatasetAttributes);
return convertedDatasetAttributes. getDatasetAccess().readChunk(posKva, gridPosition);
} catch (N5Exception.N5NoSuchKeyException e) {
return null;
}
}
@Override
default List> readChunks(
final String pathName,
final DatasetAttributes datasetAttributes,
final List blockPositions) throws N5Exception {
DatasetAttributes convertedDatasetAttributes = getConvertedDatasetAttributes(datasetAttributes);
final PositionValueAccess posKva = PositionValueAccess.fromKva(getKeyValueAccess(), getURI(), N5URI.normalizeGroupPath(pathName), convertedDatasetAttributes);
return convertedDatasetAttributes. getDatasetAccess().readChunks(posKva, blockPositions);
}
@Override
default DataBlock readBlock(
final String pathName,
final DatasetAttributes datasetAttributes,
final long... gridPosition) throws N5Exception {
final DatasetAttributes convertedDatasetAttributes = getConvertedDatasetAttributes(datasetAttributes);
final int shardLevel = convertedDatasetAttributes.getNestedBlockGrid().numLevels() - 1;
try {
final PositionValueAccess posKva = PositionValueAccess.fromKva(getKeyValueAccess(), getURI(), N5URI.normalizeGroupPath(pathName),
convertedDatasetAttributes);
return convertedDatasetAttributes. getDatasetAccess().readBlock(posKva, gridPosition, shardLevel);
} catch (N5Exception.N5NoSuchKeyException e) {
return null;
}
}
@Override
default String[] list(final String pathName) throws N5Exception {
return getKeyValueAccess().listDirectories(absoluteGroupPath(pathName));
}
/**
* Constructs the absolute path (in terms of this store) for the group or
* dataset.
*
* @param normalGroupPath
* normalized group path without leading slash
* @return the absolute path to the group
*/
default String absoluteGroupPath(final String normalGroupPath) {
return getKeyValueAccess().compose(getURI(), normalGroupPath);
}
/**
* Constructs the absolute path (in terms of this store) for the attributes
* file of a group or dataset.
*
* @param normalPath
* normalized group path without leading slash
* @return the absolute path to the attributes
*/
default String absoluteAttributesPath(final String normalPath) {
return getKeyValueAccess().compose(getURI(), normalPath, getAttributesKey());
}
@Override
default boolean blockExists(
final String pathName,
final DatasetAttributes datasetAttributes,
final long... gridPosition) throws N5Exception {
final String normalPath = N5URI.normalizeGroupPath(pathName);
final String blockPath = getKeyValueAccess().compose(getURI(), normalPath,
datasetAttributes.relativeBlockPath(gridPosition));
return getKeyValueAccess().isFile(blockPath);
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/GsonKeyValueN5Writer.java
================================================
package org.janelia.saalfeldlab.n5;
import java.io.OutputStreamWriter;
import java.io.UncheckedIOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import com.google.gson.JsonSyntaxException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import org.janelia.saalfeldlab.n5.N5Exception.N5IOException;
import org.janelia.saalfeldlab.n5.readdata.ReadData;
import org.janelia.saalfeldlab.n5.shard.PositionValueAccess;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonNull;
import com.google.gson.JsonObject;
/**
* Default implementation of {@link N5Writer} with JSON attributes parsed with
* {@link Gson}.
*/
public interface GsonKeyValueN5Writer extends GsonN5Writer, GsonKeyValueN5Reader {
/**
* TODO This overrides the version even if incompatible, check
* if this is the desired behavior or if it is always overridden, e.g. as by
* the caching version. If this is true, delete this implementation.
*
* @param path to the group to write the version into
*/
default void setVersion(final String path) {
if (!VERSION.equals(getVersion()))
setAttribute("/", VERSION_KEY, VERSION.toString());
}
static String initializeContainer(
final KeyValueAccess keyValueAccess,
final String basePath) throws N5IOException {
final String normBasePath = keyValueAccess.normalize(basePath);
keyValueAccess.createDirectories(normBasePath);
return normBasePath;
}
@Override
default void createGroup(final String path) throws N5Exception {
final String normalPath = N5URI.normalizeGroupPath(path);
getKeyValueAccess().createDirectories(absoluteGroupPath(normalPath));
}
/**
* Helper method that writes an attributes tree into the store
*
* TODO This method is not part of the public API and should be protected
* in Java versions greater than 8
*
* @param normalGroupPath
* to write the attributes to
* @param attributes
* to write
* @throws N5Exception
* if unable to write the attributes at {@code normalGroupPath}
*/
default void writeAttributes(
final String normalGroupPath,
final JsonElement attributes) throws N5Exception {
final ReadData newAttributesReadData = ReadData.from(os -> {
final OutputStreamWriter writer = new OutputStreamWriter(os, StandardCharsets.UTF_8);
GsonUtils.writeAttributes(writer, attributes, getGson());
});
try {
getKeyValueAccess().write(absoluteAttributesPath(normalGroupPath), newAttributesReadData);
} catch (UncheckedIOException | N5IOException e) {
throw new N5Exception.N5IOException("Failed to write attributes into " + normalGroupPath, e);
}
}
@Override
default void setAttributes(
final String path,
final JsonElement attributes) throws N5Exception {
final String normalPath = N5URI.normalizeGroupPath(path);
if (!exists(normalPath))
throw new N5IOException("" + normalPath + " is not a group or dataset.");
writeAttributes(normalPath, attributes);
}
/**
* Helper method that reads the existing map of attributes, JSON encodes,
* inserts and overrides the provided attributes, and writes them back into
* the attributes store.
*
* TODO This method is not part of the public API and should be protected
* in Java greater than 8
*
* @param normalGroupPath
* to write the attributes to
* @param attributes
* to write
* @throws N5Exception
* if unable to read or write the attributes at
* {@code normalGroupPath}
*/
default void writeAttributes(
final String normalGroupPath,
final Map attributes) throws N5Exception {
if (attributes != null && !attributes.isEmpty()) {
JsonElement root = getAttributes(normalGroupPath);
root = root != null && root.isJsonObject()
? root.getAsJsonObject()
: new JsonObject();
root = GsonUtils.insertAttributes(root, attributes, getGson());
writeAttributes(normalGroupPath, root);
}
}
@Override
default void setAttributes(
final String path,
final Map attributes) throws N5Exception {
final String normalPath = N5URI.normalizeGroupPath(path);
if (!exists(normalPath))
throw new N5IOException("" + normalPath + " is not a group or dataset.");
writeAttributes(normalPath, attributes);
}
@Override
default boolean removeAttribute(final String groupPath, final String attributePath) throws N5Exception {
final String normalPath = N5URI.normalizeGroupPath(groupPath);
final String absoluteNormalPath = getKeyValueAccess().compose(getURI(), normalPath);
final String normalKey = N5URI.normalizeAttributePath(attributePath);
if (!getKeyValueAccess().isDirectory(absoluteNormalPath))
return false;
if (attributePath.equals("/")) {
setAttributes(normalPath, JsonNull.INSTANCE);
return true;
}
final JsonElement attributes = getAttributes(normalPath);
if (GsonUtils.removeAttribute(attributes, normalKey) != null) {
setAttributes(normalPath, attributes);
return true;
}
return false;
}
@Override
default T removeAttribute(final String pathName, final String key, final Class cls) throws N5Exception {
final String normalPath = N5URI.normalizeGroupPath(pathName);
final String normalKey = N5URI.normalizeAttributePath(key);
final JsonElement attributes = getAttributes(normalPath);
final T obj;
try {
obj = GsonUtils.removeAttribute(attributes, normalKey, cls, getGson());
} catch (JsonSyntaxException | NumberFormatException | ClassCastException e) {
throw new N5Exception.N5ClassCastException(e);
}
if (obj != null) {
setAttributes(normalPath, attributes);
}
return obj;
}
@Override
default boolean removeAttributes(final String pathName, final List attributes) throws N5Exception {
final String normalPath = N5URI.normalizeGroupPath(pathName);
boolean removed = false;
for (final String attribute : attributes) {
final String normalKey = N5URI.normalizeAttributePath(attribute);
removed |= removeAttribute(normalPath, normalKey);
}
return removed;
}
@Override
default void writeRegion(
final String datasetPath,
final DatasetAttributes datasetAttributes,
final long[] min,
final long[] size,
final DataBlockSupplier chunkSupplier,
final boolean writeFully) throws N5Exception {
DatasetAttributes convertedDatasetAttributes = getConvertedDatasetAttributes(datasetAttributes);
try {
final PositionValueAccess posKva = PositionValueAccess.fromKva(getKeyValueAccess(), getURI(), N5URI.normalizeGroupPath(datasetPath), convertedDatasetAttributes);
convertedDatasetAttributes.getDatasetAccess().writeRegion(posKva, min, size, chunkSupplier, writeFully);
} catch (final UncheckedIOException e) {
throw new N5IOException(
"Failed to write blocks into dataset " + datasetPath, e);
}
}
@Override
default void writeRegion(
final String datasetPath,
final DatasetAttributes datasetAttributes,
final long[] min,
final long[] size,
final DataBlockSupplier chunkSupplier,
final boolean writeFully,
final ExecutorService exec) throws N5Exception, InterruptedException, ExecutionException {
DatasetAttributes convertedDatasetAttributes = getConvertedDatasetAttributes(datasetAttributes);
try {
final PositionValueAccess posKva = PositionValueAccess.fromKva(getKeyValueAccess(), getURI(), N5URI.normalizeGroupPath(datasetPath), convertedDatasetAttributes);
convertedDatasetAttributes.getDatasetAccess().writeRegion(posKva, min, size, chunkSupplier, writeFully, exec);
} catch (final UncheckedIOException e) {
throw new N5IOException(
"Failed to write blocks into dataset " + datasetPath, e);
}
}
@Override
default void writeChunks(
final String datasetPath,
final DatasetAttributes datasetAttributes,
final DataBlock... chunks) throws N5Exception {
DatasetAttributes convertedDatasetAttributes = getConvertedDatasetAttributes(datasetAttributes);
try {
final PositionValueAccess posKva = PositionValueAccess.fromKva(getKeyValueAccess(), getURI(), N5URI.normalizeGroupPath(datasetPath), convertedDatasetAttributes);
convertedDatasetAttributes.getDatasetAccess().writeChunks(posKva, Arrays.asList(chunks));
} catch (final UncheckedIOException e) {
throw new N5IOException(
"Failed to write chunks into dataset " + datasetPath, e);
}
}
@Override
default void writeChunk(
final String path,
final DatasetAttributes datasetAttributes,
final DataBlock chunk) throws N5Exception {
DatasetAttributes convertedDatasetAttributes = getConvertedDatasetAttributes(datasetAttributes);
try {
final PositionValueAccess posKva = PositionValueAccess.fromKva(getKeyValueAccess(), getURI(), N5URI.normalizeGroupPath(path), convertedDatasetAttributes);
convertedDatasetAttributes. getDatasetAccess().writeChunk(posKva, chunk);
} catch (final UncheckedIOException e) {
throw new N5IOException(
"Failed to write chunk " + Arrays.toString(chunk.getGridPosition()) + " into dataset " + path,
e);
}
}
@Override
default void writeBlock(
final String path,
final DatasetAttributes datasetAttributes,
final DataBlock dataBlock) throws N5Exception {
final DatasetAttributes convertedDatasetAttributes = getConvertedDatasetAttributes(datasetAttributes);
final int shardLevel = convertedDatasetAttributes.getNestedBlockGrid().numLevels() - 1;
try {
final PositionValueAccess posKva = PositionValueAccess.fromKva(getKeyValueAccess(), getURI(), N5URI.normalizeGroupPath(path), convertedDatasetAttributes);
convertedDatasetAttributes. getDatasetAccess().writeBlock(posKva, dataBlock, shardLevel);
} catch (final UncheckedIOException e) {
throw new N5IOException(
"Failed to write block " + Arrays.toString(dataBlock.getGridPosition()) + " into dataset " + path,
e);
}
}
@Override
default boolean remove(final String path) throws N5Exception {
final String normalPath = N5URI.normalizeGroupPath(path);
final String groupPath = absoluteGroupPath(normalPath);
if (getKeyValueAccess().isDirectory(groupPath))
getKeyValueAccess().delete(groupPath);
/* an IOException should have occurred if anything had failed midway */
return true;
}
@Override
default boolean deleteBlock(
final String path,
final DatasetAttributes datasetAttributes,
final long... gridPosition) throws N5Exception {
final PositionValueAccess posKva = PositionValueAccess.fromKva(getKeyValueAccess(), getURI(), N5URI.normalizeGroupPath(path), datasetAttributes);
return posKva.remove(gridPosition);
}
@Override
default boolean deleteChunk(
final String path,
final DatasetAttributes datasetAttributes,
final long... gridPosition) throws N5Exception {
final PositionValueAccess posKva = PositionValueAccess.fromKva(getKeyValueAccess(), getURI(), N5URI.normalizeGroupPath(path), datasetAttributes);
return datasetAttributes.getDatasetAccess().deleteChunk(posKva, gridPosition);
}
@Override
default boolean deleteChunks(
final String path,
final DatasetAttributes datasetAttributes,
final List gridPositions) throws N5Exception {
final PositionValueAccess posKva = PositionValueAccess.fromKva(getKeyValueAccess(), getURI(), N5URI.normalizeGroupPath(path), datasetAttributes);
return datasetAttributes.getDatasetAccess().deleteChunks(posKva, gridPositions);
}
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/GsonN5Reader.java
================================================
package org.janelia.saalfeldlab.n5;
import java.lang.reflect.Type;
import java.util.Map;
import com.google.gson.JsonDeserializationContext;
import com.google.gson.JsonParseException;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonSyntaxException;
/**
* {@link N5Reader} with JSON attributes parsed with {@link Gson}.
*
*/
public interface GsonN5Reader extends N5Reader {
Gson getGson();
/**
* Get the key for the {@link KeyValueAccess}, that is used for storing attributes.
* The N5 format uses "attributes.json".
*
* @return the attributes key
*/
String getAttributesKey();
@Override
default Map> listAttributes(final String pathName) throws N5Exception {
return GsonUtils.listAttributes(getAttributes(pathName));
}
@Override
default DatasetAttributes getDatasetAttributes(final String pathName) throws N5Exception {
final String normalPath = N5URI.normalizeGroupPath(pathName);
final JsonElement attributes = getAttributes(normalPath);
return createDatasetAttributes(attributes);
}
default DatasetAttributes createDatasetAttributes(final JsonElement attributes) {
final JsonDeserializationContext context = new JsonDeserializationContext() {
@Override public T deserialize(JsonElement json, Type typeOfT) throws JsonParseException {
return getGson().fromJson(json, typeOfT);
}
};
return DatasetAttributes.getJsonAdapter().deserialize(attributes, DatasetAttributes.class, context);
}
@Override
default T getAttribute(final String pathName, final String key, final Class clazz) throws N5Exception {
final String normalPathName = N5URI.normalizeGroupPath(pathName);
final String normalizedAttributePath = N5URI.normalizeAttributePath(key);
final JsonElement attributes = getAttributes(normalPathName);
try {
return GsonUtils.readAttribute(attributes, normalizedAttributePath, clazz, getGson());
} catch (JsonSyntaxException | NumberFormatException | ClassCastException e) {
throw new N5Exception.N5ClassCastException(e);
}
}
@Override
default T getAttribute(final String pathName, final String key, final Type type) throws N5Exception {
final String normalPathName = N5URI.normalizeGroupPath(pathName);
final String normalizedAttributePath = N5URI.normalizeAttributePath(key);
final JsonElement attributes = getAttributes(normalPathName);
try {
return GsonUtils.readAttribute(attributes, normalizedAttributePath, type, getGson());
} catch (JsonSyntaxException | NumberFormatException | ClassCastException e) {
throw new N5Exception.N5ClassCastException(e);
}
}
/**
* Reads or the attributes of a group or dataset.
*
* @param pathName
* group path
* @return the attributes identified by pathName
* @throws N5Exception if the attribute cannot be returned
*/
JsonElement getAttributes(final String pathName) throws N5Exception;
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/GsonN5Writer.java
================================================
package org.janelia.saalfeldlab.n5;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
/**
* {@link N5Writer} with JSON attributes parsed with {@link Gson}.
*
*/
public interface GsonN5Writer extends GsonN5Reader, N5Writer {
/**
* Set the attributes of a group. This result of this method is equivalent
* with {@link N5Writer#setAttribute(String, String, Object) N5Writer#setAttribute(groupPath, "/", attributes)}.
*
* @param groupPath
* to write the attributes to
* @param attributes
* to write
* @throws N5Exception if the attributes cannot be set
*/
void setAttributes(
final String groupPath,
final JsonElement attributes) throws N5Exception;
}
================================================
FILE: src/main/java/org/janelia/saalfeldlab/n5/GsonUtils.java
================================================
package org.janelia.saalfeldlab.n5;
import java.io.IOException;
import java.io.Reader;
import java.io.Writer;
import java.lang.reflect.Array;
import java.lang.reflect.Type;
import java.util.HashMap;
import java.util.Map;
import java.util.regex.Matcher;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import com.google.gson.JsonSyntaxException;
import com.google.gson.reflect.TypeToken;
import org.janelia.saalfeldlab.n5.N5Exception.N5JsonParseException;
/**
* Utility class for working with JSON.
*
* @author Stephan Saalfeld
*/
public interface GsonUtils {
/**
* Reads the attributes json from a given {@link Reader}.
*
* @param reader
* the reader
* @param gson
* to parse Json from the {@code reader}
* @return the root {@link JsonObject} of the attributes
*/
static JsonElement readAttributes(final Reader reader, final Gson gson) {
return gson.fromJson(reader, JsonElement.class);
}
static T readAttribute(
final JsonElement root,
final String normalizedAttributePath,
final Class cls,
final Gson gson) throws JsonSyntaxException, NumberFormatException, ClassCastException {
return readAttribute(root, normalizedAttributePath, TypeToken.get(cls).getType(), gson);
}
static T readAttribute(
final JsonElement root,
final String normalizedAttributePath,
final Type type,
final Gson gson) throws JsonSyntaxException, NumberFormatException, ClassCastException {
final JsonElement attribute = getAttribute(root, normalizedAttributePath);
return parseAttributeElement(attribute, gson, type);
}
/**
* Deserialize the {@code attribute} as {@link Type type} {@code T}.
*
* @param attribute
* to deserialize as {@link Type type}
* @param gson
* used to deserialize {@code attribute}
* @param type
* to deserialize {@code attribute} as
* @param
* return type represented by {@link Type type}
* @return the deserialized attribute object, or {@code null} if
* {@code attribute} cannot deserialize to {@code T}
*/
@SuppressWarnings("unchecked")
static T parseAttributeElement(final JsonElement attribute, final Gson gson, final Type type) throws JsonSyntaxException, NumberFormatException, ClassCastException {
if (attribute == null)
return null;
final Class> clazz = (type instanceof Class>) ? ((Class>)type) : null;
if (clazz != null && clazz.isAssignableFrom(HashMap.class)) {
final Type mapType = new TypeToken