diff --git a/README.md b/README.md index cb3fd386..96521a61 100644 --- a/README.md +++ b/README.md @@ -31,7 +31,7 @@ See [WriteHdf5.java](jhdf/src/main/java/io/jhdf/examples/WriteHdf5.java) for a For more examples see package [io.jhdf.examples](jhdf/src/main/java/io/jhdf/examples) ## Why should I use jHDF? -- Easy integration with JVM based projects. The library is available on [Maven Central](https://search.maven.org/search?q=g:%22io.jhdf%22%20AND%20a:%22jhdf%22), and [GitHub Packages](https://github.com/jamesmudd/jhdf/packages/), so using it should be as easy as adding any other dependency. To use the libraries supplied by the HDF Group you need to load native code, which means you need to handle this in your build, and it complicates distribution of your software on multiple platforms. +- Easy integration with JVM based projects. The library is available on [Maven Central](https://central.sonatype.com/artifact/io.jhdf/jhdf), and [GitHub Packages](https://github.com/jamesmudd/jhdf/packages/), so using it should be as easy as adding any other dependency. To use the libraries supplied by the HDF Group you need to load native code, which means you need to handle this in your build, and it complicates distribution of your software on multiple platforms. - The API design intends to be familiar to Java programmers, so hopefully it works as you might expect. (If this is not the case, open an issue with suggestions for improvement) - No use of JNI, so you avoid all the issues associated with calling native code from the JVM. - Fully debug-able you can step fully through the library with a Java debugger. diff --git a/jhdf/src/main/java/io/jhdf/api/dataset/ChunkedDataset.java b/jhdf/src/main/java/io/jhdf/api/dataset/ChunkedDataset.java index 1887b055..99d6e562 100644 --- a/jhdf/src/main/java/io/jhdf/api/dataset/ChunkedDataset.java +++ b/jhdf/src/main/java/io/jhdf/api/dataset/ChunkedDataset.java @@ -37,4 +37,13 @@ public interface ChunkedDataset extends Dataset { */ ByteBuffer getRawChunkBuffer(int[] chunkOffset); + /** + * Gets the decompressed byte array for the specified chunk. + * + * @param chunkOffset the offset of the required chunk + * @return the decompressed byte array for this chunk + * @throws HdfException If the chunk offset is not valid for this dataset + */ + byte[] getDecompressedChunk(int[] chunkOffset); + } diff --git a/jhdf/src/main/java/io/jhdf/dataset/chunked/ChunkedDatasetBase.java b/jhdf/src/main/java/io/jhdf/dataset/chunked/ChunkedDatasetBase.java index dbebf256..e67e32fb 100644 --- a/jhdf/src/main/java/io/jhdf/dataset/chunked/ChunkedDatasetBase.java +++ b/jhdf/src/main/java/io/jhdf/dataset/chunked/ChunkedDatasetBase.java @@ -291,6 +291,16 @@ public ByteBuffer getRawChunkBuffer(int[] chunkOffset) { return getDataBuffer(chunk); } + @Override + public byte[] getDecompressedChunk(int[] chunkOffset) { + final Chunk chunk = getChunk(new ChunkOffset(chunkOffset)); + if (chunk == null) { + throw new HdfException("No chunk with offset " + Arrays.toString(chunkOffset) + + " in dataset: " + getPath()); + } + return decompressChunk(chunk); + } + private Collection getAllChunks() { return getChunkLookup().values(); }