diff --git a/jhdf/src/main/java/io/jhdf/object/datatype/BitField.java b/jhdf/src/main/java/io/jhdf/object/datatype/BitField.java
index 562b0cb5..643043ae 100644
--- a/jhdf/src/main/java/io/jhdf/object/datatype/BitField.java
+++ b/jhdf/src/main/java/io/jhdf/object/datatype/BitField.java
@@ -9,15 +9,31 @@
  */
 package io.jhdf.object.datatype;
 
+import io.jhdf.Utils;
+import io.jhdf.exceptions.UnsupportedHdfException;
 import io.jhdf.storage.HdfBackingStorage;
+import io.jhdf.storage.HdfFileChannel;
+import org.apache.commons.lang3.ArrayUtils;
 
 import java.lang.reflect.Array;
 import java.nio.ByteBuffer;
 import java.nio.ByteOrder;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.Objects;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.LongAdder;
 
 import static io.jhdf.Utils.stripLeadingIndex;
 
 public class BitField extends DataType implements OrderedDataType {
+	public static final int CLASS_ID = 4;
+	private static final int ORDER_BIT = 0;
+	private static final int LOW_PADDING_BIT = 1;
+	private static final int HIGH_PADDING_BIT = 2;
+
+	public static final BitField INSTANCE = new BitField();
+
 	private final ByteOrder order;
 	private final boolean lowPadding;
 	private final boolean highPadding;
@@ -27,19 +43,29 @@ public class BitField extends DataType implements OrderedDataType {
 	public BitField(ByteBuffer bb) {
 		super(bb);
 
-		if (classBits.get(0)) {
+		if (classBits.get(ORDER_BIT)) {
 			order = ByteOrder.BIG_ENDIAN;
 		} else {
 			order = ByteOrder.LITTLE_ENDIAN;
 		}
 
-		lowPadding = classBits.get(1);
-		highPadding = classBits.get(2);
+		lowPadding = classBits.get(LOW_PADDING_BIT);
+		highPadding = classBits.get(HIGH_PADDING_BIT);
 
 		bitOffset = bb.getShort();
 		bitPrecision = bb.getShort();
 	}
 
+	private BitField() {
+		super(CLASS_ID, 1);
+
+		this.order = ByteOrder.nativeOrder();
+		this.bitPrecision = 8;
+		this.bitOffset = 0;
+		this.lowPadding = false;
+		this.highPadding = false;
+	}
+
 	@Override
 	public ByteOrder getByteOrder() {
 		return order;
@@ -80,11 +106,134 @@ private static void fillBitfieldData(Object data, int[] dims, ByteBuffer buffer)
 				fillBitfieldData(newArray, stripLeadingIndex(dims), buffer);
 			}
 		} else {
-			for (int i = 0; i < Array.getLength(data); i++) {
+			for (int i = 0; i < dims[0]; i++) {
 				Array.set(data, i, buffer.get() == 1);
 			}
 		}
 	}
 
+	@Override
+	public ByteBuffer toBuffer() {
+		classBits.set(ORDER_BIT, order.equals(ByteOrder.BIG_ENDIAN));
+		classBits.set(LOW_PADDING_BIT, lowPadding);
+		classBits.set(HIGH_PADDING_BIT, highPadding);
+
+		return  super.toBufferBuilder()
+			.writeShort(bitOffset)
+			.writeShort(bitPrecision)
+			.build();
+	}
+
+	@Override
+	public ByteBuffer encodeData(Object data) {
+		Objects.requireNonNull(data, "Cannot encode null");
+
+
+		if(data.getClass().isArray()) {
+			return encodeArrayData(data);
+		} else {
+			return encodeScalarData(data);
+		}
+	}
+
+
+	private ByteBuffer encodeScalarData(Object data) {
+		final ByteBuffer buffer = ByteBuffer.allocate(getSize()).order(order);
+		buffer.put(booleanToByte((Boolean) data));
+		return buffer;
+	}
+
+	private ByteBuffer encodeArrayData(Object data) {
+		final Class<?> type = Utils.getType(data);
+		final int[] dimensions = Utils.getDimensions(data);
+		final int totalElements = Arrays.stream(dimensions).reduce(1, Math::multiplyExact);
+		final ByteBuffer buffer = ByteBuffer.allocate(totalElements * getSize())
+			.order(order);
+		if(type == boolean.class) {
+			encodeBooleanData(data, dimensions, buffer, true);
+		} else if (type == Boolean.class) {
+			encodeBooleanData(data, dimensions, buffer, false);
+		} else {
+			throw new UnsupportedHdfException("Cant write type: " + type);
+		}
+		return buffer;
+	}
+
+	private static void encodeBooleanData(Object data, int[] dims, ByteBuffer buffer, boolean primitive) {
+		if (dims.length > 1) {
+			for (int i = 0; i < dims[0]; i++) {
+				Object newArray = Array.get(data, i);
+				encodeBooleanData(newArray, stripLeadingIndex(dims), buffer, primitive);
+			}
+		} else {
+			if(primitive) {
+				buffer.put(asByteArray((boolean[]) data));
+			} else {
+				buffer.put(asByteArray(ArrayUtils.toPrimitive((Boolean[]) data)));
+			}
+		}
+	}
+
+	private static byte[] asByteArray(boolean[] data) {
+		byte[] bytes = new byte[data.length];
+		for (int i = 0; i < data.length; i++) {
+            bytes[i] = booleanToByte(data[i]);
+        }
+		return bytes;
+	}
+
+	private static byte booleanToByte(boolean b) {
+		return b ? (byte) 1 : 0;
+	}
+
+	@Override
+	public void writeData(Object data, int[] dimensions, HdfFileChannel hdfFileChannel) {
+		if (data.getClass().isArray()) {
+			writeArrayData(data, dimensions, hdfFileChannel); // TODO
+		} else {
+			writeScalarData(data, hdfFileChannel);
+		}
+
+	}
+
+	private void writeScalarData(Object data, HdfFileChannel hdfFileChannel) {
+		ByteBuffer buffer = encodeScalarData(data);
+		buffer.rewind();
+		hdfFileChannel.write(buffer);
+	}
+
+	private void writeArrayData(Object data, int[] dimensions, HdfFileChannel hdfFileChannel) {
+		final Class<?> type = Utils.getType(data);
+		final int fastDimSize = dimensions[dimensions.length - 1];
+		// This buffer is reused
+		final ByteBuffer buffer = ByteBuffer.allocate(fastDimSize * getSize())
+			.order(order);
+		if (type == boolean.class) {
+			writeBooleanData(data, dimensions, buffer, hdfFileChannel, true);
+		} else if (type == Boolean.class) {
+			writeBooleanData(data, dimensions, buffer, hdfFileChannel, false);
+		} else {
+			throw new UnsupportedHdfException("Cant write type: " + type);
+		}
+	}
+
+
+	private static void writeBooleanData(Object data, int[] dims, ByteBuffer buffer, HdfFileChannel hdfFileChannel, boolean primitive) {
+		if (dims.length > 1) {
+			for (int i = 0; i < dims[0]; i++) {
+				Object newArray = Array.get(data, i);
+				writeBooleanData(newArray, stripLeadingIndex(dims), buffer, hdfFileChannel, primitive);
+			}
+		} else {
+			if(primitive) {
+				buffer.put(asByteArray((boolean[]) data));
+			} else {
+				buffer.put(asByteArray(ArrayUtils.toPrimitive((Boolean[]) data)));
+			}
+			buffer.rewind(); // Need to rewind as there is not a view
+			hdfFileChannel.write(buffer);
+			buffer.clear();
+		}
+	}
 
 }
diff --git a/jhdf/src/main/java/io/jhdf/object/datatype/DataType.java b/jhdf/src/main/java/io/jhdf/object/datatype/DataType.java
index 791eea44..f9af68be 100644
--- a/jhdf/src/main/java/io/jhdf/object/datatype/DataType.java
+++ b/jhdf/src/main/java/io/jhdf/object/datatype/DataType.java
@@ -59,7 +59,7 @@ public static DataType readDataType(ByteBuffer bb) {
 				throw new UnsupportedHdfException("Time data type is not yet supported");
 			case StringData.CLASS_ID: // String
 				return new StringData(bb);
-			case 4: // Bit field
+			case BitField.CLASS_ID: // Bit field
 				return new BitField(bb);
 			case 5: // Opaque
 				return new OpaqueDataType(bb);
@@ -111,6 +111,8 @@ public static DataType fromObject(Object data) {
 			return FloatingPoint.DOUBLE;
 		} else if (type == String.class) {
 			return StringData.create(data);
+		} else if (type == boolean.class || type == Boolean.class) {
+			return BitField.INSTANCE;
 		} else {
 			throw new HdfException("Could not create DataType for: " + type);
 		}
diff --git a/jhdf/src/test/java/io/jhdf/TestUtils.java b/jhdf/src/test/java/io/jhdf/TestUtils.java
index 3e763042..dc24bb35 100644
--- a/jhdf/src/test/java/io/jhdf/TestUtils.java
+++ b/jhdf/src/test/java/io/jhdf/TestUtils.java
@@ -13,6 +13,8 @@
 import io.jhdf.api.Dataset;
 import io.jhdf.api.Group;
 import io.jhdf.api.Node;
+import org.apache.commons.lang3.BooleanUtils;
+import org.apache.commons.lang3.math.NumberUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -54,6 +56,21 @@ public static String[] toStringArray(Object data) {
 			.toArray(String[]::new);
 	}
 
+	public static Boolean[] toBooleanArray(Object data) {
+		return Arrays.stream(Utils.flatten(data))
+			.map(el -> parseBoolean(el.toString()))
+			.toArray(Boolean[]::new);
+	}
+
+	private static Boolean parseBoolean(String str) {
+		Boolean aBoolean = BooleanUtils.toBooleanObject(str);
+		if(aBoolean != null) {
+			return aBoolean;
+		}
+		// Used for parsing h5dump output
+		return BooleanUtils.toBooleanObject(str, "0x01", "0x00", "null");
+	}
+
 	public static void compareGroups(Group group1, Group group2) {
 		logger.info("Comparing groups [{}]", group1.getPath());
 
@@ -79,8 +96,15 @@ private static void compareAttributes(Attribute attribute1, Attribute attribute2
 		assertThat(attribute1.getName(), is(equalTo(attribute2.getName())));
 		assertThat(attribute1.getDimensions(), is(equalTo(attribute2.getDimensions())));
 		assertThat(attribute1.getJavaType(), is(equalTo(attribute2.getJavaType())));
+		assertThat(attribute1.isScalar(), is(equalTo(attribute2.isScalar())));
+		assertThat(attribute1.isEmpty(), is(equalTo(attribute2.isEmpty())));
+
+
 		if(attribute1.getJavaType() == String.class) {
 			assertArrayEquals(toStringArray(attribute1.getData()), toStringArray(attribute2.getData()));
+		} else if (attribute1.getJavaType() == boolean.class ||
+					attribute1.getJavaType() == Boolean.class) {
+			assertArrayEquals(toBooleanArray(attribute1.getData()), toBooleanArray(attribute2.getData()));
 		} else {
 			assertArrayEquals(toDoubleArray(attribute1.getData()), toDoubleArray(attribute2.getData()), 0.002);
 		}
@@ -93,6 +117,9 @@ private static void compareDatasets(Dataset dataset1, Dataset dataset2) {
 		assertThat(dataset1.getJavaType(), is(equalTo(dataset2.getJavaType())));
 		if(dataset1.getJavaType() == String.class) {
 			assertArrayEquals(toStringArray(dataset1.getData()), toStringArray(dataset2.getData()));
+		} else if (dataset1.getJavaType() == boolean.class ||
+			dataset1.getJavaType() == Boolean.class) {
+			assertArrayEquals(toBooleanArray(dataset1.getData()), toBooleanArray(dataset2.getData()));
 		} else {
 			assertArrayEquals(toDoubleArray(dataset1.getData()), toDoubleArray(dataset2.getData()), 0.002);
 		}
diff --git a/jhdf/src/test/java/io/jhdf/h5dump/H5Dump.java b/jhdf/src/test/java/io/jhdf/h5dump/H5Dump.java
index b990fbfd..7172e01c 100644
--- a/jhdf/src/test/java/io/jhdf/h5dump/H5Dump.java
+++ b/jhdf/src/test/java/io/jhdf/h5dump/H5Dump.java
@@ -24,6 +24,7 @@
 import java.nio.file.Path;
 import java.util.concurrent.TimeUnit;
 
+import static io.jhdf.TestUtils.toBooleanArray;
 import static io.jhdf.TestUtils.toDoubleArray;
 import static io.jhdf.TestUtils.toStringArray;
 import static org.hamcrest.MatcherAssert.assertThat;
@@ -87,6 +88,9 @@ private static void compareAttributes(AttributeXml attributeXml, Attribute attri
 		assertThat(attributeXml.getDimensions(), is(equalTo(attribute.getDimensions())));
 		if(attribute.getJavaType() == String.class) {
 			assertArrayEquals(toStringArray(attributeXml.getData()), toStringArray(attribute.getData()));
+		} else if (attribute.getJavaType() == boolean.class ||
+			attribute.getJavaType() == Boolean.class) {
+			assertArrayEquals(toBooleanArray(attributeXml.getData()), toBooleanArray(attribute.getData()));
 		} else {
 			assertArrayEquals(toDoubleArray(attributeXml.getData()), toDoubleArray(attribute.getData()), 0.002);
 		}	}
@@ -97,6 +101,9 @@ private static void compareDatasets(DatasetXml datasetXml, Dataset dataset) {
 		assertThat(datasetXml.getDimensions(), is(equalTo(dataset.getDimensions())));
 		if(dataset.getJavaType() == String.class) {
 			assertArrayEquals(toStringArray(datasetXml.getData()), toStringArray(dataset.getData()));
+		} else if (dataset.getJavaType() == boolean.class ||
+				dataset.getJavaType() == Boolean.class) {
+			assertArrayEquals(toBooleanArray(datasetXml.getData()), toBooleanArray(dataset.getData()));
 		} else {
 			assertArrayEquals(toDoubleArray(datasetXml.getData()), toDoubleArray(dataset.getData()), 0.002);
 		}
diff --git a/jhdf/src/test/java/io/jhdf/writing/BooleanWritingTest.java b/jhdf/src/test/java/io/jhdf/writing/BooleanWritingTest.java
new file mode 100644
index 00000000..7100d2f0
--- /dev/null
+++ b/jhdf/src/test/java/io/jhdf/writing/BooleanWritingTest.java
@@ -0,0 +1,98 @@
+/*
+ * This file is part of jHDF. A pure Java library for accessing HDF5 files.
+ *
+ * https://jhdf.io
+ *
+ * Copyright (c) 2024 James Mudd
+ *
+ * MIT License see 'LICENSE' file
+ */
+
+package io.jhdf.writing;
+
+import io.jhdf.HdfFile;
+import io.jhdf.TestUtils;
+import io.jhdf.WritableHdfFile;
+import io.jhdf.api.Node;
+import io.jhdf.api.WritiableDataset;
+import io.jhdf.examples.TestAllFilesBase;
+import io.jhdf.h5dump.EnabledIfH5DumpAvailable;
+import io.jhdf.h5dump.H5Dump;
+import io.jhdf.h5dump.HDF5FileXml;
+import org.junit.jupiter.api.MethodOrderer;
+import org.junit.jupiter.api.Order;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.TestInstance;
+import org.junit.jupiter.api.TestMethodOrder;
+
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Map;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+@TestMethodOrder(MethodOrderer.OrderAnnotation.class)
+@TestInstance(TestInstance.Lifecycle.PER_CLASS)
+class BooleanWritingTest {
+
+	private Path tempFile;
+
+	@Test
+	@Order(1)
+	void writeBooleans() throws Exception {
+		tempFile = Files.createTempFile(this.getClass().getSimpleName(), ".hdf5");
+		WritableHdfFile writableHdfFile = HdfFile.write(tempFile);
+
+		WritiableDataset scalarTrueDataset = writableHdfFile.putDataset("scalarTrueBoolean", true);
+		WritiableDataset scalarFalseDataset = writableHdfFile.putDataset("scalarFalseBoolean", false);
+		scalarTrueDataset.putAttribute("scalarTrueAttribute", true);
+		scalarFalseDataset.putAttribute("scalarFalseAttribute", false);
+
+		WritiableDataset oneDBoolean = writableHdfFile.putDataset("1DBoolean", new boolean[]
+			{true, false, true, false, true, false});
+		oneDBoolean.putAttribute("1DBooleanAttr", new boolean[]
+			{true, false, true, false, true, false});
+
+		WritiableDataset oneDObjBoolean = writableHdfFile.putDataset("1DObjBoolean", new Boolean[]
+			{true, false, true, false, true, false});
+		oneDObjBoolean.putAttribute("1DObjBooleanAttr", new Boolean[]
+			{true, false, true, false, true, false});
+
+		WritiableDataset twoDBooleanDataset = writableHdfFile.putDataset("2DBoolean", new boolean[][]{
+			{true, false, true, false, true, false},
+			{false, true, false, true, false, true}});
+
+		twoDBooleanDataset.putAttribute("2DBooleanAttr", new boolean[][]{
+			{true, false, true, false, true, false},
+			{false, true, false, true, false, true}});
+
+		// Actually flush and write everything
+		writableHdfFile.close();
+
+		// Now read it back
+		try (HdfFile hdfFile = new HdfFile(tempFile)) {
+			Map<String, Node> datasets = hdfFile.getChildren();
+			assertThat(datasets).hasSize(5);
+
+			// Just check thw whole file is readable
+			TestAllFilesBase.verifyAttributes(hdfFile);
+			TestAllFilesBase.recurseGroup(hdfFile);
+
+			TestUtils.compareGroups(writableHdfFile, hdfFile);
+		}
+	}
+
+	@Test
+	@Order(2)
+	@EnabledIfH5DumpAvailable
+	void readBooleanDatasetsWithH5Dump() throws Exception {
+		// Read with h5dump
+		HDF5FileXml hdf5FileXml = H5Dump.dumpAndParse(tempFile);
+
+		// Read with jhdf
+		try (HdfFile hdfFile = new HdfFile(tempFile)) {
+			// Compare
+			H5Dump.assetXmlAndHdfFileMatch(hdf5FileXml, hdfFile);
+		}
+	}
+}