Skip to content

Commit

Permalink
Merge pull request #593 from jamesmudd/scalar-attributes
Browse files Browse the repository at this point in the history
Filling out attribute support
  • Loading branch information
jamesmudd authored Jul 20, 2024
2 parents 29384a1 + c28b354 commit bf85a6f
Show file tree
Hide file tree
Showing 47 changed files with 1,229 additions and 244 deletions.
6 changes: 6 additions & 0 deletions jhdf/src/main/java/io/jhdf/AbstractWritableNode.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,13 @@
import io.jhdf.api.Group;
import io.jhdf.api.WritableAttributeImpl;
import io.jhdf.api.WritableNode;
import org.apache.commons.lang3.StringUtils;

import java.io.File;
import java.nio.file.Path;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;

public abstract class AbstractWritableNode implements WritableNode {
private final Group parent;
Expand Down Expand Up @@ -77,6 +79,10 @@ public HdfFile getHdfFile() {

@Override
public Attribute putAttribute(String name, Object data) {
if(StringUtils.isBlank(name)) {
throw new IllegalArgumentException("name cannot be null or blank");
}
Objects.requireNonNull(data, "Cannot write null attributes");
WritableAttributeImpl attribute = new WritableAttributeImpl(name, this, data);
return attributes.put(name, attribute);
}
Expand Down
5 changes: 2 additions & 3 deletions jhdf/src/main/java/io/jhdf/FractalHeap.java
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
*/
package io.jhdf;

import io.jhdf.btree.BTreeV2;
import io.jhdf.btree.record.HugeFractalHeapObjectUnfilteredRecord;
import io.jhdf.checksum.ChecksumUtils;
import io.jhdf.exceptions.HdfException;
import io.jhdf.exceptions.UnsupportedHdfException;
Expand All @@ -35,9 +37,6 @@
import static io.jhdf.Utils.readBytesAsUnsignedLong;
import static java.nio.ByteOrder.LITTLE_ENDIAN;

import io.jhdf.btree.BTreeV2;
import io.jhdf.btree.record.HugeFractalHeapObjectUnfilteredRecord;

/**
* Fractal heap implementation. Used for storing data which can be looked up via
* an ID.
Expand Down
2 changes: 1 addition & 1 deletion jhdf/src/main/java/io/jhdf/HdfFile.java
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@
import java.net.URI;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
Expand Down
32 changes: 23 additions & 9 deletions jhdf/src/main/java/io/jhdf/Utils.java
Original file line number Diff line number Diff line change
Expand Up @@ -379,9 +379,9 @@ public static void setBit(byte[] bytes, int bit, boolean value) {
final int byteIndex = bit / 8;
final int bitInByte = bit % 8;
if(value) {
bytes[byteIndex] |= 1 << bitInByte;
bytes[byteIndex] |= (byte) (1 << bitInByte);
} else {
bytes[byteIndex] &= ~(1 << bitInByte);
bytes[byteIndex] &= (byte) ~(1 << bitInByte);
}
}

Expand All @@ -403,6 +403,16 @@ public static int[] getDimensions(Object data) {
return ArrayUtils.toPrimitive(dims.toArray(new Integer[0]));
}

public static Class<?> getType(Object obj) {
final Class<?> type;
if(obj.getClass().isArray()) {
type = getArrayType(obj);
} else {
type = obj.getClass();
}
return type;
}

public static Class<?> getArrayType(Object array) {
Object element = Array.get(array, 0);
if (element.getClass().isArray()) {
Expand Down Expand Up @@ -432,14 +442,18 @@ public static Object[] flatten(Object data) {
}

private static void flattenInternal(Object data, List<Object> flat) {
int length = Array.getLength(data);
for (int i = 0; i < length; i++) {
Object element = Array.get(data, i);
if (element.getClass().isArray()) {
flattenInternal(element, flat);
} else {
flat.add(element);
if (data.getClass().isArray()) {
int length = Array.getLength(data);
for (int i = 0; i < length; i++) {
Object element = Array.get(data, i);
if (element.getClass().isArray()) {
flattenInternal(element, flat);
} else {
flat.add(element);
}
}
} else {
flat.add(data);
}
}
}
49 changes: 5 additions & 44 deletions jhdf/src/main/java/io/jhdf/WritableDatasetImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
import io.jhdf.api.NodeType;
import io.jhdf.api.WritiableDataset;
import io.jhdf.exceptions.HdfWritingException;
import io.jhdf.exceptions.UnsupportedHdfException;
import io.jhdf.filter.PipelineFilterWithData;
import io.jhdf.object.datatype.DataType;
import io.jhdf.object.message.AttributeInfoMessage;
Expand All @@ -34,7 +33,6 @@
import java.io.File;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
Expand Down Expand Up @@ -78,7 +76,7 @@ public long getStorageInBytes() {

@Override
public int[] getDimensions() {
return Utils.getDimensions(data);
return dataSpace.getDimensions();
}

@Override
Expand Down Expand Up @@ -232,52 +230,15 @@ public long write(HdfFileChannel hdfFileChannel, long position) {

private long writeData(HdfFileChannel hdfFileChannel, long dataAddress) {
logger.info("Writing data for dataset [{}] at position [{}]", getPath(), dataAddress);
Class<?> arrayType = Utils.getArrayType(this.data);
long totalBytes = dataSpace.getTotalLength() * dataType.getSize();

int[] dimensions = dataSpace.getDimensions();
int fastDimSize = dimensions[dimensions.length - 1];
ByteBuffer buffer = ByteBuffer.allocate(fastDimSize * dataType.getSize()).order(ByteOrder.nativeOrder());
hdfFileChannel.position(dataAddress);

// TODO move out into data types?
if(arrayType.equals(byte.class)) {
writeByteData(data, dimensions, buffer, hdfFileChannel);
} else if(arrayType.equals(int.class)) {
writeIntData(data, dimensions, buffer, hdfFileChannel);
} else if (arrayType.equals(double.class)) {
writeDoubleData(data, dimensions, buffer, hdfFileChannel);
} else {
throw new UnsupportedHdfException("Writing [" + arrayType.getSimpleName() + "] is not supported");
}
return totalBytes;
}
dataType.writeData(data, getDimensions(), hdfFileChannel);

private static void writeByteData(Object data, int[] dims, ByteBuffer buffer, HdfFileChannel hdfFileChannel) {
if (dims.length > 1) {
for (int i = 0; i < dims[0]; i++) {
Object newArray = Array.get(data, i);
writeByteData(newArray, stripLeadingIndex(dims), buffer, hdfFileChannel);
}
} else {
buffer.put((byte[]) data);
buffer.rewind();
hdfFileChannel.write(buffer);
buffer.clear();
}
}
private static void writeIntData(Object data, int[] dims, ByteBuffer buffer, HdfFileChannel hdfFileChannel) {
if (dims.length > 1) {
for (int i = 0; i < dims[0]; i++) {
Object newArray = Array.get(data, i);
writeIntData(newArray, stripLeadingIndex(dims), buffer, hdfFileChannel);
}
} else {
buffer.asIntBuffer().put((int[]) data);
hdfFileChannel.write(buffer);
buffer.clear();
}
return dataSpace.getTotalLength() * dataType.getSize();
}


private static void writeDoubleData(Object data, int[] dims, ByteBuffer buffer, HdfFileChannel hdfFileChannel) {
if (dims.length > 1) {
for (int i = 0; i < dims[0]; i++) {
Expand Down
9 changes: 8 additions & 1 deletion jhdf/src/main/java/io/jhdf/WritableGroupImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import io.jhdf.object.message.LinkMessage;
import io.jhdf.object.message.Message;
import io.jhdf.storage.HdfFileChannel;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -100,6 +101,9 @@ public boolean isAttributeCreationOrderTracked() {

@Override
public WritiableDataset putDataset(String name, Object data) {
if(StringUtils.isBlank(name)) {
throw new IllegalArgumentException("name cannot be null or blank");
}
WritableDatasetImpl writableDataset = new WritableDatasetImpl(data, name, this);
children.put(name, writableDataset);
logger.info("Added dataset [{}] to group [{}]", name, getPath());
Expand All @@ -108,6 +112,9 @@ public WritiableDataset putDataset(String name, Object data) {

@Override
public WritableGroup putGroup(String name) {
if(StringUtils.isBlank(name)) {
throw new IllegalArgumentException("name cannot be null or blank");
}
WritableGroupImpl newGroup = new WritableGroupImpl(this, name);
children.put(name, newGroup);
logger.info("Added group [{}] to group [{}]", name, getPath());
Expand Down Expand Up @@ -139,7 +146,7 @@ public long write(HdfFileChannel hdfFileChannel, long position) {
AttributeInfoMessage attributeInfoMessage = AttributeInfoMessage.create();
messages.add(attributeInfoMessage);
for (Map.Entry<String, Attribute> attribute : getAttributes().entrySet()) {
logger.info("Writing attribute [{}]", attribute.getKey());
logger.info("Writing attribute [{}] in group [{}]", attribute.getKey(), getName());
AttributeMessage attributeMessage = AttributeMessage.create(attribute.getKey(), attribute.getValue());
messages.add(attributeMessage);
}
Expand Down
1 change: 1 addition & 0 deletions jhdf/src/main/java/io/jhdf/dataset/DatasetReader.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

import io.jhdf.object.datatype.DataType;
import io.jhdf.storage.HdfBackingStorage;

import java.lang.reflect.Array;
import java.nio.ByteBuffer;

Expand Down
6 changes: 4 additions & 2 deletions jhdf/src/main/java/io/jhdf/object/datatype/ArrayDataType.java
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,14 @@
*/
package io.jhdf.object.datatype;

import static io.jhdf.Utils.readBytesAsUnsignedInt;
import io.jhdf.dataset.DatasetReader;
import io.jhdf.storage.HdfBackingStorage;
import org.apache.commons.lang3.ArrayUtils;

import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import org.apache.commons.lang3.ArrayUtils;

import static io.jhdf.Utils.readBytesAsUnsignedInt;

/**
* Class for reading array data type messages.
Expand Down
38 changes: 20 additions & 18 deletions jhdf/src/main/java/io/jhdf/object/datatype/DataType.java
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import io.jhdf.exceptions.HdfException;
import io.jhdf.exceptions.UnsupportedHdfException;
import io.jhdf.storage.HdfBackingStorage;
import io.jhdf.storage.HdfFileChannel;

import java.nio.ByteBuffer;
import java.util.BitSet;
Expand Down Expand Up @@ -94,25 +95,22 @@ protected DataType(ByteBuffer bb) {
}

public static DataType fromObject(Object data) {
if (data.getClass().isArray()) {
Class<?> type = Utils.getArrayType(data);
if(type.equals(byte.class)) {
return new FixedPoint(1);
} else if (type.equals(short.class)) {
return new FixedPoint(2);
} else if (type.equals(int.class)) {
return new FixedPoint(4);
} else if (type.equals(long.class)) {
return new FixedPoint(8);
} else if (type.equals(float.class)) {
return FloatingPoint.FLOAT;
} else if (type.equals(double.class)) {
return FloatingPoint.DOUBLE;
}
throw new HdfException("Could not create DataType for: " + type);

final Class<?> type = Utils.getType(data);

if (type == byte.class || type == Byte.class) {
return new FixedPoint(1);
} else if (type == short.class || type == Short.class) {
return new FixedPoint(2);
} else if (type == int.class || type == Integer.class) {
return new FixedPoint(4);
} else if (type == long.class || type == Long.class) {
return new FixedPoint(8);
} else if (type == float.class || type == Float.class) {
return FloatingPoint.FLOAT;
} else if (type == double.class || type == Double.class) {
return FloatingPoint.DOUBLE;
} else {
throw new UnsupportedHdfException("Only arrays can be written at the moment");
throw new HdfException("Could not create DataType for: " + type);
}
}

Expand Down Expand Up @@ -170,4 +168,8 @@ protected BufferBuilder toBufferBuilder() {
.writeBitSet(classBits, 3)
.writeInt(getSize());
}

public void writeData(Object data, int[] dimensions, HdfFileChannel hdfFileChannel) {
throw new UnsupportedHdfException("Data type [" + getClass().getSimpleName() + "] does not support writing");
}
}
Loading

0 comments on commit bf85a6f

Please sign in to comment.