Skip to content

Commit

Permalink
Update examples to use Paths (#391)
Browse files Browse the repository at this point in the history
* Update examples to use Paths
* Add examples of reading filters
  • Loading branch information
jamesmudd authored Jul 8, 2022
1 parent 1dd4a3b commit 284e1b0
Show file tree
Hide file tree
Showing 7 changed files with 104 additions and 24 deletions.
3 changes: 1 addition & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@ The intention is to make a clean Java API to access HDF5 data. Currently, the pr
Here is an example of reading a dataset with `jHDF` (see [ReadDataset.java](jhdf/src/main/java/io/jhdf/examples/ReadDataset.java))

```java
File file = new File("/path/to/file.hdf5");
try (HdfFile hdfFile = new HdfFile(file)) {
try (HdfFile hdfFile = new HdfFile(Paths.get("/path/to/file.hdf5")) {
Dataset dataset = hdfFile.getDatasetByPath("/path/to/dataset");
// data will be a Java array with the dimensions of the HDF5 dataset
Object data = dataset.getData();
Expand Down
10 changes: 4 additions & 6 deletions jhdf/src/main/java/io/jhdf/examples/PrintTree.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import io.jhdf.api.Node;
import org.apache.commons.lang3.StringUtils;

import java.io.File;
import java.nio.file.Paths;

/**
* An example of recursively parsing a HDF5 file tree and printing it to the
Expand All @@ -30,10 +30,8 @@ public class PrintTree {
* @param args ["path/to/file.hdf5""]
*/
public static void main(String[] args) {
File file = new File(args[0]);
System.out.println(file.getName());

try (HdfFile hdfFile = new HdfFile(file)) {
try (HdfFile hdfFile = new HdfFile(Paths.get(args[0]))) {
System.out.println(hdfFile.getFile().getName()); //NOSONAR - sout in example
recursivePrintGroup(hdfFile, 0);
}
}
Expand All @@ -42,7 +40,7 @@ private static void recursivePrintGroup(Group group, int level) {
level++;
String indent = StringUtils.repeat(" ", level);
for (Node node : group) {
System.out.println(indent + node.getName());
System.out.println(indent + node.getName()); //NOSONAR - sout in example
if (node instanceof Group) {
recursivePrintGroup((Group) node, level);
}
Expand Down
10 changes: 4 additions & 6 deletions jhdf/src/main/java/io/jhdf/examples/RawChunkAccess.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@
import io.jhdf.api.dataset.ChunkedDataset;
import org.apache.commons.lang3.ArrayUtils;

import java.io.File;
import java.nio.ByteBuffer;
import java.nio.file.Paths;

/**
* Example application for raw chunk access from HDF5
Expand All @@ -24,21 +24,19 @@
*/
public class RawChunkAccess {
public static void main(String[] args) {
File file = new File(args[0]);

try (HdfFile hdfFile = new HdfFile(file)) {
try (HdfFile hdfFile = new HdfFile(Paths.get(args[0]))) {
Dataset dataset = hdfFile.getDatasetByPath(args[1]);
if (dataset instanceof ChunkedDataset) {
ChunkedDataset chunkedDataset = (ChunkedDataset) dataset;
int[] chunkOffset = new int[chunkedDataset.getChunkDimensions().length];
System.out.println("Chunk offset: " + ArrayUtils.toString(chunkOffset));
System.out.println("Chunk offset: " + ArrayUtils.toString(chunkOffset)); //NOSONAR - sout in example
// For the example just get the zero chunk but you can get any
ByteBuffer rawChunkBuffer = chunkedDataset.getRawChunkBuffer(chunkOffset);
// If you need the buffer just use it directly here, if you want the byte[]
byte[] byteArray = new byte[rawChunkBuffer.capacity()];
rawChunkBuffer.get(byteArray);
// Now you have the byte[] to use as you like
System.out.println("Raw bytes: " + ArrayUtils.toString(byteArray));
System.out.println("Raw bytes: " + ArrayUtils.toString(byteArray)); //NOSONAR - sout in example
} else {
throw new IllegalArgumentException("Dataset is not chunked");
}
Expand Down
8 changes: 3 additions & 5 deletions jhdf/src/main/java/io/jhdf/examples/ReadAttribute.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import io.jhdf.api.Node;
import org.apache.commons.lang3.ArrayUtils;

import java.io.File;
import java.nio.file.Paths;

/**
* Example application for reading an attribute from HDF5
Expand All @@ -27,13 +27,11 @@ public class ReadAttribute {
* @param args ["path/to/file.hdf5", "path/to/node", "attributeName"]
*/
public static void main(String[] args) {
File file = new File(args[0]);

try (HdfFile hdfFile = new HdfFile(file)) {
try (HdfFile hdfFile = new HdfFile(Paths.get(args[0]))) {
Node node = hdfFile.getByPath(args[1]);
Attribute attribute = node.getAttribute(args[2]);
Object attributeData = attribute.getData();
System.out.println(ArrayUtils.toString(attributeData));
System.out.println(ArrayUtils.toString(attributeData)); //NOSONAR - sout in example
}
}
}
8 changes: 3 additions & 5 deletions jhdf/src/main/java/io/jhdf/examples/ReadDataset.java
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import io.jhdf.api.Dataset;
import org.apache.commons.lang3.ArrayUtils;

import java.io.File;
import java.nio.file.Paths;

/**
* Example application for reading a dataset from HDF5
Expand All @@ -22,13 +22,11 @@
*/
public class ReadDataset {
public static void main(String[] args) {
File file = new File(args[0]);

try (HdfFile hdfFile = new HdfFile(file)) {
try (HdfFile hdfFile = new HdfFile(Paths.get(args[0]))) {
Dataset dataset = hdfFile.getDatasetByPath(args[1]);
// data will be a java array of the dimensions of the HDF5 dataset
Object data = dataset.getData();
System.out.println(ArrayUtils.toString(data));
System.out.println(ArrayUtils.toString(data)); //NOSONAR - sout in example
}
}
}
33 changes: 33 additions & 0 deletions jhdf/src/main/java/io/jhdf/examples/ReadFilters.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
/*
* This file is part of jHDF. A pure Java library for accessing HDF5 files.
*
* http://jhdf.io
*
* Copyright (c) 2022 James Mudd
*
* MIT License see 'LICENSE' file
*/
package io.jhdf.examples;

import io.jhdf.HdfFile;
import io.jhdf.api.Dataset;
import io.jhdf.filter.PipelineFilterWithData;

import java.nio.file.Paths;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;

/**
* Example of reading the filters applied to a dataset
*/
public class ReadFilters {

public static void main(String[] args) {
try (HdfFile hdfFile = new HdfFile(Paths.get(args[0]))) {
Dataset dataset = hdfFile.getDatasetByPath(args[1]);
List<PipelineFilterWithData> filters = dataset.getFilters();
System.out.println(filters.stream().map(Objects::toString).collect(Collectors.joining(" -> "))); //NOSONAR - sout in example
}
}
}
56 changes: 56 additions & 0 deletions jhdf/src/test/java/io/jhdf/examples/ReadFiltersTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
/*
* This file is part of jHDF. A pure Java library for accessing HDF5 files.
*
* http://jhdf.io
*
* Copyright (c) 2022 James Mudd
*
* MIT License see 'LICENSE' file
*/
package io.jhdf.examples;

import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;

import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.stream.Stream;

import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;

class ReadFiltersTest {

private ByteArrayOutputStream outputStream;

@BeforeEach
void setUp() {
outputStream = new ByteArrayOutputStream();
System.setOut(new PrintStream(outputStream));
}


static Stream<Arguments> getTests() {
return Stream.of(
Arguments.of("/hdf5/test_byteshuffle_compressed_datasets_latest.hdf5", "/float/float32", "shuffle (id=2) data=[4] -> deflate (id=1) data=[4]"),
Arguments.of("/hdf5/bitshuffle_datasets.hdf5", "float32_bs8_comp2", "bitshuffle (id=32008) data=[0, 4, 4, 8, 2]")
);
}

@ParameterizedTest
@MethodSource("getTests")
void testReadFilters(String resourcePath, String datasetPath, String expectedOutput) throws Exception {

URL resource = ReadFiltersTest.class.getResource(resourcePath);
Path absolutePath = Paths.get(resource.toURI()).toAbsolutePath();

ReadFilters.main(new String[]{absolutePath.toString(), datasetPath});

assertThat(outputStream.toString(), containsString(expectedOutput));
}
}

0 comments on commit 284e1b0

Please sign in to comment.