Skip to content

Commit

Permalink
Merge pull request #578 from jamesmudd/impl-extra-writeable-methods
Browse files Browse the repository at this point in the history
Add implementation for more writtable dataset methods
  • Loading branch information
jamesmudd authored May 20, 2024
2 parents a733638 + 59806cc commit 55ea788
Show file tree
Hide file tree
Showing 18 changed files with 122 additions and 59 deletions.
18 changes: 18 additions & 0 deletions jhdf/src/main/java/io/jhdf/Utils.java
Original file line number Diff line number Diff line change
Expand Up @@ -424,4 +424,22 @@ public static void writeIntToBits(int value, BitSet bits, int start, int length)
bits.set(start + i, bi.testBit(i));
}
}

public static Object[] flatten(Object data) {
List<Object> flat = new ArrayList<>();
flattenInternal(data, flat);
return flat.toArray();
}

private static void flattenInternal(Object data, List<Object> flat) {
int length = Array.getLength(data);
for (int i = 0; i < length; i++) {
Object element = Array.get(data, i);
if (element.getClass().isArray()) {
flattenInternal(element, flat);
} else {
flat.add(element);
}
}
}
}
36 changes: 22 additions & 14 deletions jhdf/src/main/java/io/jhdf/WritableDatasetImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
import io.jhdf.api.Group;
import io.jhdf.api.NodeType;
import io.jhdf.api.WritiableDataset;
import io.jhdf.exceptions.HdfWritingException;
import io.jhdf.exceptions.UnsupportedHdfException;
import io.jhdf.filter.PipelineFilterWithData;
import io.jhdf.object.datatype.DataType;
Expand All @@ -36,6 +37,7 @@
import java.util.Collections;
import java.util.List;

import static io.jhdf.Utils.flatten;
import static io.jhdf.Utils.stripLeadingIndex;

public class WritableDatasetImpl extends AbstractWritableNode implements WritiableDataset {
Expand All @@ -56,17 +58,18 @@ public WritableDatasetImpl(Object data, String name, Group parent) {

@Override
public long getSize() {
return 0;
return dataSpace.getTotalLength();
}

@Override
public long getSizeInBytes() {
return 0;
return getSize() * dataType.getSize();
}

@Override
public long getStorageInBytes() {
return 0;
// As there is no compression this is correct ATM
return getSizeInBytes();
}

@Override
Expand All @@ -76,12 +79,15 @@ public int[] getDimensions() {

@Override
public boolean isScalar() {
return false;
if (isEmpty()) {
return false;
}
return getDimensions().length == 0;
}

@Override
public boolean isEmpty() {
return false;
return data == null;
}

@Override
Expand All @@ -96,27 +102,28 @@ public boolean isVariableLength() {

@Override
public long[] getMaxSize() {
return new long[0];
return dataSpace.getMaxSizes();
}

@Override
public DataLayout getDataLayout() {
return null;
// ATM we only support contiguous
return DataLayout.CONTIGUOUS;
}

@Override
public Object getData() {
return null;
return data;
}

@Override
public Object getDataFlat() {
return null;
return flatten(data);
}

@Override
public Object getData(long[] sliceOffset, int[] sliceDimensions) {
return null;
throw new HdfWritingException("Slicing a writable dataset not supported");
}

@Override
Expand All @@ -136,6 +143,7 @@ public Object getFillValue() {

@Override
public List<PipelineFilterWithData> getFilters() {
// ATM no filters support
return Collections.emptyList();
}

Expand All @@ -151,22 +159,22 @@ public boolean isGroup() {

@Override
public File getFile() {
return null;
return getParent().getFile();
}

@Override
public Path getFileAsPath() {
return null;
return getParent().getFileAsPath();
}

@Override
public HdfFile getHdfFile() {
return null;
return getParent().getHdfFile();
}

@Override
public long getAddress() {
return 0;
throw new HdfWritingException("Address not known until written");
}

@Override
Expand Down
7 changes: 1 addition & 6 deletions jhdf/src/main/java/io/jhdf/dataset/DatasetBase.java
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;

import static java.nio.ByteOrder.LITTLE_ENDIAN;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
Expand Down Expand Up @@ -92,11 +91,7 @@ public int[] getDimensions() {

@Override
public long[] getMaxSize() {
if (dataSpace.isMaxSizesPresent()) {
return dataSpace.getMaxSizes();
} else {
return Arrays.stream(getDimensions()).asLongStream().toArray();
}
return dataSpace.getMaxSizes();
}

@Override
Expand Down
8 changes: 5 additions & 3 deletions jhdf/src/main/java/io/jhdf/object/message/DataSpace.java
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import org.apache.commons.lang3.ArrayUtils;

import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.BitSet;
import java.util.stream.IntStream;

Expand Down Expand Up @@ -64,7 +65,7 @@ private DataSpace(ByteBuffer bb, Superblock sb) {
maxSizes[i] = Utils.readBytesAsUnsignedLong(bb, sb.getSizeOfLengths());
}
} else {
maxSizes = ArrayUtils.EMPTY_LONG_ARRAY;
maxSizes = Arrays.stream(dimensions).asLongStream().toArray();
}

// Permutation indices - Note never implemented in HDF library!
Expand All @@ -83,10 +84,11 @@ public static DataSpace readDataSpace(ByteBuffer bb, Superblock sb) {
}

public static DataSpace fromObject(Object data) {
int[] dimensions1 = Utils.getDimensions(data);
return new DataSpace((byte) 2,
false,
Utils.getDimensions(data),
ArrayUtils.EMPTY_LONG_ARRAY,
dimensions1,
Arrays.stream(dimensions1).asLongStream().toArray(),
(byte) 1
);
}
Expand Down
23 changes: 1 addition & 22 deletions jhdf/src/test/java/io/jhdf/TestUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,8 @@

import org.apache.commons.lang3.ArrayUtils;

import java.lang.reflect.Array;
import java.net.URL;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;

public final class TestUtils {

Expand All @@ -31,26 +28,8 @@ public static HdfFile loadTestHdfFile(String fileName) throws Exception {
return new HdfFile(Paths.get(url.toURI()));
}

public static Object[] flatten(Object data) {
List<Object> flat = new ArrayList<>();
flattenInternal(data, flat);
return flat.toArray();
}

private static void flattenInternal(Object data, List<Object> flat) {
int length = Array.getLength(data);
for (int i = 0; i < length; i++) {
Object element = Array.get(data, i);
if (element.getClass().isArray()) {
flattenInternal(element, flat);
} else {
flat.add(element);
}
}
}

public static String[] toStringArray(Object data) {
return ArrayUtils.toStringArray(flatten(data));
return ArrayUtils.toStringArray(Utils.flatten(data));
}

}
62 changes: 62 additions & 0 deletions jhdf/src/test/java/io/jhdf/WritableDatasetImplTest.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
/*
* This file is part of jHDF. A pure Java library for accessing HDF5 files.
*
* http://jhdf.io
*
* Copyright (c) 2024 James Mudd
*
* MIT License see 'LICENSE' file
*/
package io.jhdf;

import io.jhdf.api.NodeType;
import io.jhdf.exceptions.HdfException;
import io.jhdf.object.message.DataLayout;
import org.apache.commons.lang3.ArrayUtils;
import org.junit.jupiter.api.Test;

import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;

class WritableDatasetImplTest {

@Test
void testGettingSize() {
int[] data = new int[] {1,2,3};
WritableDatasetImpl writableDataset = new WritableDatasetImpl(data, "ints", null);
assertThat(writableDataset.getSize()).isEqualTo(3);
assertThat(writableDataset.getSizeInBytes()).isEqualTo(3 * 4);
assertThat(writableDataset.getStorageInBytes()).isEqualTo(3 * 4);
}
@Test
void testGettingData() {
int[][] data = new int[][] {{1,2,3}, {4,5,6}};
WritableDatasetImpl writableDataset = new WritableDatasetImpl(data, "ints", null);
assertThat(writableDataset.getData()).isEqualTo(new int[][] {{1,2,3}, {4,5,6}});
assertThat(writableDataset.getDataFlat()).isEqualTo(ArrayUtils.toObject(new int[] {1,2,3, 4,5,6}));
assertThat(writableDataset.getDimensions()).isEqualTo(new int[]{2,3});
assertThat(writableDataset.getMaxSize()).isEqualTo(new long[]{2,3});
assertThat(writableDataset.getJavaType()).isEqualTo(int.class);
assertThat(writableDataset.getDataLayout()).isEqualTo(DataLayout.CONTIGUOUS);
assertThrows(HdfException.class, () ->
writableDataset.getData(new long[] {1, 2}, new int[] {1, 2}));
assertThat(writableDataset.getFillValue()).isNull();
assertThat(writableDataset.getFilters()).isEmpty();
assertThat(writableDataset.getDataType()).isNotNull();
}

@Test
void testGettingFlags() {
int[][] data = new int[][] {{1,2,3}, {4,5,6}};
WritableDatasetImpl writableDataset = new WritableDatasetImpl(data, "ints", null);
assertThat(writableDataset.isScalar()).isFalse();
assertThat(writableDataset.isEmpty()).isFalse();
assertThat(writableDataset.isLink()).isFalse();
assertThat(writableDataset.isGroup()).isFalse();
assertThat(writableDataset.isVariableLength()).isFalse();
assertThat(writableDataset.isCompound()).isFalse();
assertThat(writableDataset.isAttributeCreationOrderTracked()).isFalse();
assertThat(writableDataset.getType()).isEqualTo(NodeType.DATASET);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

import java.util.stream.Stream;

import static io.jhdf.TestUtils.flatten;
import static io.jhdf.Utils.flatten;
import static io.jhdf.TestUtils.loadTestHdfFile;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import java.util.Collection;
import java.util.List;

import static io.jhdf.TestUtils.flatten;
import static io.jhdf.Utils.flatten;
import static io.jhdf.TestUtils.loadTestHdfFile;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.arrayContaining;
Expand Down
2 changes: 1 addition & 1 deletion jhdf/src/test/java/io/jhdf/dataset/ChunkedDatasetTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import java.util.Arrays;
import java.util.Collection;

import static io.jhdf.TestUtils.flatten;
import static io.jhdf.Utils.flatten;
import static io.jhdf.Utils.getDimensions;
import static io.jhdf.TestUtils.loadTestHdfFile;
import static org.hamcrest.MatcherAssert.assertThat;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import java.util.List;
import java.util.stream.Stream;

import static io.jhdf.TestUtils.flatten;
import static io.jhdf.Utils.flatten;
import static io.jhdf.TestUtils.loadTestHdfFile;
import static org.apache.commons.lang3.ArrayUtils.toObject;
import static org.hamcrest.CoreMatchers.equalTo;
Expand Down
2 changes: 1 addition & 1 deletion jhdf/src/test/java/io/jhdf/dataset/CompactDatasetTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import java.util.Collection;
import java.util.List;

import static io.jhdf.TestUtils.flatten;
import static io.jhdf.Utils.flatten;
import static io.jhdf.TestUtils.loadTestHdfFile;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.empty;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
package io.jhdf.dataset;

import io.jhdf.HdfFile;
import io.jhdf.TestUtils;
import io.jhdf.Utils;
import io.jhdf.api.Dataset;
import org.junit.jupiter.api.AfterAll;
Expand Down Expand Up @@ -77,7 +76,7 @@ private Executable createTest(HdfFile hdfFile, String datasetPath, double expect
Dataset dataset = hdfFile.getDatasetByPath(datasetPath);
Object data = dataset.getData();
assertThat(Utils.getDimensions(data), is(equalTo(new int[]{7, 5})));
Object[] flatData = TestUtils.flatten(data);
Object[] flatData = Utils.flatten(data);
for (int i = 0; i < flatData.length; i++) {
// Do element comparison as there are all different primitive numeric types
// convert to double
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import java.util.Collection;
import java.util.Collections;

import static io.jhdf.TestUtils.flatten;
import static io.jhdf.Utils.flatten;
import static io.jhdf.Utils.getDimensions;
import static io.jhdf.TestUtils.loadTestHdfFile;
import static org.hamcrest.MatcherAssert.assertThat;
Expand Down
2 changes: 1 addition & 1 deletion jhdf/src/test/java/io/jhdf/dataset/EnumDatasetTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import java.util.List;
import java.util.stream.Stream;

import static io.jhdf.TestUtils.flatten;
import static io.jhdf.Utils.flatten;
import static io.jhdf.TestUtils.loadTestHdfFile;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.MatcherAssert.assertThat;
Expand Down
2 changes: 1 addition & 1 deletion jhdf/src/test/java/io/jhdf/dataset/OddDatasetTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import java.util.Arrays;
import java.util.Collection;

import static io.jhdf.TestUtils.flatten;
import static io.jhdf.Utils.flatten;
import static io.jhdf.TestUtils.loadTestHdfFile;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
Expand Down
Loading

0 comments on commit 55ea788

Please sign in to comment.