Skip to content

Commit

Permalink
Merge pull request #535 from jamesmudd/writting-datasets
Browse files Browse the repository at this point in the history
Add Writting datasets
  • Loading branch information
jamesmudd authored Jan 29, 2024
2 parents b173427 + 590ac07 commit 1392d9f
Show file tree
Hide file tree
Showing 19 changed files with 534 additions and 67 deletions.
6 changes: 3 additions & 3 deletions jhdf/src/main/java/io/jhdf/BufferBuilder.java
Original file line number Diff line number Diff line change
Expand Up @@ -104,12 +104,12 @@ public ByteBuffer build() {
}
}

public BufferBuilder writeBitSet(BitSet bitSet, int length) {
if(bitSet.toByteArray().length > length) {
public BufferBuilder writeBitSet(BitSet bitSet, int lengthBytes) {
if(bitSet.toByteArray().length > lengthBytes) {
throw new IllegalArgumentException("BitSet is longer than length provided");
}
try {
final byte[] bytes = Arrays.copyOf(bitSet.toByteArray(), length); // Ensure empty Bitset are not shortened
final byte[] bytes = Arrays.copyOf(bitSet.toByteArray(), lengthBytes); // Ensure empty Bitset are not shortened
dataOutputStream.write(bytes);
return this;
} catch (IOException e) {
Expand Down
20 changes: 20 additions & 0 deletions jhdf/src/main/java/io/jhdf/Utils.java
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,13 @@ public static void seekBufferToNextMultipleOfEight(ByteBuffer bb) {
bb.position(pos + (8 - (pos % 8)));
}

public static long nextMultipleOfEight(long value) {
if (value % 8 == 0) {
return value; // Already on a 8 byte multiple
}
return value + (8 - (value % 8));
}

/**
* This reads the requested number of bytes from the buffer and returns the data
* as an unsigned <code>int</code>. After this call the buffer position will be
Expand Down Expand Up @@ -398,4 +405,17 @@ public static Class<?> getArrayType(Object array) {
return array.getClass().getComponentType();
}
}

public static void writeIntToBits(int value, BitSet bits, int start, int length) {
if(value < 0) {
throw new IllegalArgumentException("Value cannot be negative");
}
BigInteger bi = BigInteger.valueOf(value);
if(bi.bitLength() > length) {
throw new IllegalArgumentException("Value [" + value + "] to high to convert to bits");
}
for (int i = 0; i < length; i++) {
bits.set(start + i, bi.testBit(i));
}
}
}
93 changes: 91 additions & 2 deletions jhdf/src/main/java/io/jhdf/WritableDatasetImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -13,25 +13,45 @@
import io.jhdf.api.Group;
import io.jhdf.api.NodeType;
import io.jhdf.api.WritiableDataset;
import io.jhdf.exceptions.UnsupportedHdfException;
import io.jhdf.filter.PipelineFilterWithData;
import io.jhdf.object.datatype.DataType;
import io.jhdf.object.message.DataLayout;
import io.jhdf.object.message.DataLayoutMessage.ContiguousDataLayoutMessage;
import io.jhdf.object.message.DataSpace;
import io.jhdf.object.message.DataSpaceMessage;
import io.jhdf.object.message.DataTypeMessage;
import io.jhdf.object.message.FillValueMessage;
import io.jhdf.object.message.Message;
import io.jhdf.storage.HdfFileChannel;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

import static io.jhdf.Utils.stripLeadingIndex;

public class WritableDatasetImpl extends AbstractWritableNode implements WritiableDataset {

private static final Logger logger = LoggerFactory.getLogger(WritableDatasetImpl.class);

private final Object data;
private final DataType dataType;

private final DataSpace dataSpace;

public WritableDatasetImpl(Object data, String name, Group parent) {
super(parent, name);
this.data = data;
this.dataType = DataType.fromObject(data);
this.dataSpace = DataSpace.fromObject(data);
}

@Override
Expand Down Expand Up @@ -161,7 +181,76 @@ public boolean isAttributeCreationOrderTracked() {

@Override
public long write(HdfFileChannel hdfFileChannel, long position) {
return 0;
// throw new UnsupportedOperationException();
logger.info("Writing dataset [{}] at position [{}]", getPath(), position);
List<Message> messages = new ArrayList<>();
messages.add(DataTypeMessage.create(this.dataType));
messages.add(DataSpaceMessage.create(this.dataSpace));
messages.add(FillValueMessage.NO_FILL);
// TODO will have know fixed size so don't really need these objects but for now...
ContiguousDataLayoutMessage placeholder = ContiguousDataLayoutMessage.create(Constants.UNDEFINED_ADDRESS, Constants.UNDEFINED_ADDRESS);
messages.add(placeholder);

ObjectHeader.ObjectHeaderV2 objectHeader = new ObjectHeader.ObjectHeaderV2(position, messages);
int ohSize = objectHeader.toBuffer().limit();

// Now know where we will write the data
long dataAddress = position + ohSize;
long dataSize = writeData(hdfFileChannel, dataAddress);

// Now switch placeholder for real data layout message
messages.add(ContiguousDataLayoutMessage.create(dataAddress, dataSize));
messages.remove(placeholder);

objectHeader = new ObjectHeader.ObjectHeaderV2(position, messages);

hdfFileChannel.write(objectHeader.toBuffer(), position);

return dataAddress + dataSize;
}

private long writeData(HdfFileChannel hdfFileChannel, long dataAddress) {
logger.info("Writing data for dataset [{}] at position [{}]", getPath(), dataAddress);
Class<?> arrayType = Utils.getArrayType(this.data);
long totalBytes = dataSpace.getTotalLength() * dataType.getSize();

int[] dimensions = dataSpace.getDimensions();
int fastDimSize = dimensions[dimensions.length - 1];
ByteBuffer buffer = ByteBuffer.allocate(fastDimSize * dataType.getSize()).order(ByteOrder.nativeOrder());
hdfFileChannel.position(dataAddress);

// TODO move out into data types?
if(arrayType.equals(int.class)) {
writeIntData(data, dimensions, buffer, hdfFileChannel);
} else if (arrayType.equals(double.class)) {
writeDoubleData(data, dimensions, buffer, hdfFileChannel);
} else {
throw new UnsupportedHdfException("Writing [" + arrayType.getSimpleName() + "] is not supported");
}
return totalBytes;
}

private static void writeIntData(Object data, int[] dims, ByteBuffer buffer, HdfFileChannel hdfFileChannel) {
if (dims.length > 1) {
for (int i = 0; i < dims[0]; i++) {
Object newArray = Array.get(data, i);
writeIntData(newArray, stripLeadingIndex(dims), buffer, hdfFileChannel);
}
} else {
buffer.asIntBuffer().put((int[]) data);
hdfFileChannel.write(buffer);
buffer.clear();
}
}
private static void writeDoubleData(Object data, int[] dims, ByteBuffer buffer, HdfFileChannel hdfFileChannel) {
if (dims.length > 1) {
for (int i = 0; i < dims[0]; i++) {
Object newArray = Array.get(data, i);
writeDoubleData(newArray, stripLeadingIndex(dims), buffer, hdfFileChannel);
}
} else {
buffer.asDoubleBuffer().put((double[]) data);
hdfFileChannel.write(buffer);
buffer.clear();
}
}
}
4 changes: 2 additions & 2 deletions jhdf/src/main/java/io/jhdf/WritableHdfFile.java
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,9 @@ public WritableHdfFile(Path path) {
logger.info("Writing HDF5 file to [{}]", path.toAbsolutePath());
this.path = path;
try {
this.fileChannel = FileChannel.open(path, StandardOpenOption.WRITE, StandardOpenOption.CREATE);
this.fileChannel = FileChannel.open(path, StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.CREATE);
} catch (IOException e) {
throw new HdfWritingException("Failed to ope file: " + path.toAbsolutePath(), e);
throw new HdfWritingException("Failed to open file: " + path.toAbsolutePath(), e);
}
this.superblock = new Superblock.SuperblockV2V3();
this.hdfFileChannel = new HdfFileChannel(this.fileChannel, this.superblock);
Expand Down
32 changes: 24 additions & 8 deletions jhdf/src/main/java/io/jhdf/object/datatype/DataType.java
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@
*/
package io.jhdf.object.datatype;

import io.jhdf.BufferBuilder;
import io.jhdf.Utils;
import io.jhdf.exceptions.HdfException;
import io.jhdf.exceptions.UnsupportedHdfException;
import io.jhdf.storage.HdfBackingStorage;

import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.BitSet;

public abstract class DataType {
Expand All @@ -25,11 +25,11 @@ public abstract class DataType {
private final int size; // In bytes
protected final BitSet classBits;

public DataType(int dataClass, int size) {
this.version = 0;
protected DataType(int dataClass, int size) {
this.version = 1;
this.dataClass = dataClass;
this.size = size;
this.classBits = new BitSet(8); // TODO check size
this.classBits = new BitSet(24);
}

public static DataType readDataType(ByteBuffer bb) {
Expand All @@ -48,10 +48,11 @@ public static DataType readDataType(ByteBuffer bb) {
// Move the buffer back to the start of the data type message
bb.reset();

// TODO all class IDs
switch (dataClass) {
case 0: // Fixed point
case FixedPoint.CLASS_ID: // Fixed point
return new FixedPoint(bb);
case 1: // Floating point
case FloatingPoint.CLASS_ID: // Floating point
return new FloatingPoint(bb);
case 2: // Time
throw new UnsupportedHdfException("Time data type is not yet supported");
Expand Down Expand Up @@ -96,9 +97,9 @@ public static DataType fromObject(Object data) {
if (data.getClass().isArray()) {
Class<?> type = Utils.getArrayType(data);
if (type.equals(int.class)) {
return new FixedPoint((short) 4);
return new FixedPoint(4);
} else if (type.equals(double.class)) {
return new FloatingPoint(8);
return FloatingPoint.DOUBLE;
}
throw new HdfException("Error");

Expand Down Expand Up @@ -141,4 +142,19 @@ public int getSize() {
*/
public abstract Object fillData(ByteBuffer buffer, int[] dimensions, HdfBackingStorage hdfBackingStorage);

// TODO could be abstract when there are more impls
public ByteBuffer toBuffer() {
throw new UnsupportedHdfException("Data type [" + getClass().getSimpleName() + "] does not support writing");
}

protected BufferBuilder toBufferBuilder() {
BitSet classAndVersion = new BitSet(8);
Utils.writeIntToBits(dataClass, classAndVersion, 0, 4);
Utils.writeIntToBits(version, classAndVersion, 4, 4);

return new BufferBuilder()
.writeBitSet(classAndVersion,1)
.writeBitSet(classBits, 3)
.writeInt(getSize());
}
}
38 changes: 28 additions & 10 deletions jhdf/src/main/java/io/jhdf/object/datatype/FixedPoint.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,16 @@
import java.nio.IntBuffer;
import java.nio.LongBuffer;
import java.nio.ShortBuffer;
import java.util.BitSet;

import static io.jhdf.Utils.stripLeadingIndex;

public class FixedPoint extends DataType implements OrderedDataType {
public class FixedPoint extends DataType implements OrderedDataType, WritableDataType {

public static final int CLASS_ID = 0;
private static final int ORDER_BIT = 0;
private static final int LOW_PADDING_BIT = 1;
private static final int HIGH_PADDING_BIT = 2;
private static final int SIGNED_BIT = 3;
private final ByteOrder order;
private final boolean lowPadding;
private final boolean highPadding;
Expand All @@ -34,25 +39,25 @@ public class FixedPoint extends DataType implements OrderedDataType {
public FixedPoint(ByteBuffer bb) {
super(bb);

if (classBits.get(0)) {
if (classBits.get(ORDER_BIT)) {
order = ByteOrder.BIG_ENDIAN;
} else {
order = ByteOrder.LITTLE_ENDIAN;
}

lowPadding = classBits.get(1);
highPadding = classBits.get(2);
signed = classBits.get(3);
lowPadding = classBits.get(LOW_PADDING_BIT);
highPadding = classBits.get(HIGH_PADDING_BIT);
signed = classBits.get(SIGNED_BIT);

bitOffset = bb.getShort();
bitPrecision = bb.getShort();
}

public FixedPoint(int bitPrecision) {
public FixedPoint(int bytePrecision) {
// TODO arg validation
super(1, bitPrecision * 8);
super(CLASS_ID, bytePrecision);
this.order = ByteOrder.nativeOrder();
this.bitPrecision = (short) bitPrecision;
this.bitPrecision = (short) (bytePrecision * 8);
this.lowPadding = false;
this.highPadding = false;
this.signed = true;
Expand Down Expand Up @@ -109,7 +114,7 @@ public Class<?> getJavaType() {
case 64:
return BigInteger.class;
default:
throw new HdfTypeException("Unsupported signed fixed point data type");
throw new HdfTypeException("Unsupported unsigned fixed point data type");
}
}
}
Expand Down Expand Up @@ -274,4 +279,17 @@ private static void fillDataUnsigned(Object data, int[] dims, LongBuffer buffer)
}
}
}

@Override
public ByteBuffer toBuffer() {
classBits.set(ORDER_BIT, order.equals(ByteOrder.BIG_ENDIAN));
classBits.set(LOW_PADDING_BIT, lowPadding);
classBits.set(HIGH_PADDING_BIT, highPadding);
classBits.set(SIGNED_BIT, signed);

return super.toBufferBuilder()
.writeShort(bitOffset)
.writeShort(bitPrecision)
.build();
}
}
Loading

0 comments on commit 1392d9f

Please sign in to comment.