Skip to content

Commit

Permalink
Correctly set output_size when hidden_cell=None
Browse files Browse the repository at this point in the history
  • Loading branch information
drasmuss committed Apr 19, 2024
1 parent 4d7fe22 commit 1972b9f
Show file tree
Hide file tree
Showing 3 changed files with 37 additions and 0 deletions.
8 changes: 8 additions & 0 deletions CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,14 @@ Release history

*Compatible with TensorFlow 2.6 - 2.16*

**Added**

- Added an ``input_d`` parameter to ``LMUCell``. This only needs to be specified
when ``hidden_cell=None`` and ``input_to_hidden=True``; in that scenario it is
required in order to accurately set ``LMUCell.output_size``. (`#56`_)

.. _#56: https://github.com/nengo/keras-lmu/pull/56

0.7.0 (July 20, 2023)
=====================

Expand Down
23 changes: 23 additions & 0 deletions keras_lmu/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,10 @@ class to create a recurrent Keras layer to process the whole sequence. Calling
Dropout rate on input connections.
recurrent_dropout : float
Dropout rate on ``memory_to_memory`` connection.
input_d : Optional[int]
Size of last axis on input signals. This only needs to be specified if
hidden_cell=None and input_to_hidden=True, otherwise the input dimensionality
can be inferred dynamically.
References
----------
Expand Down Expand Up @@ -132,6 +136,7 @@ def __init__(
bias_regularizer=None,
dropout=0,
recurrent_dropout=0,
input_d=None,
seed=None,
**kwargs,
):
Expand All @@ -141,6 +146,7 @@ def __init__(
self.order = order
self._init_theta = theta
self.hidden_cell = hidden_cell

self.trainable_theta = trainable_theta
self.hidden_to_memory = hidden_to_memory
self.memory_to_memory = memory_to_memory
Expand All @@ -155,6 +161,7 @@ def __init__(
self.bias_regularizer = bias_regularizer
self.dropout = dropout
self.recurrent_dropout = recurrent_dropout
self.input_d = input_d
self.seed = seed
if tf_version >= version.parse("2.16.0"):
self.seed_generator = keras.random.SeedGenerator(seed)
Expand All @@ -178,6 +185,15 @@ def __init__(
)

self.hidden_output_size = self.memory_d * self.order

if self.input_to_hidden:
if self.input_d is None:
raise ValueError(
"input_d must be specified when setting input_to_hidden=True "
"with hidden_cell=None"
)
self.hidden_output_size += self.input_d

self.hidden_state_size = []
elif hasattr(self.hidden_cell, "state_size"):
self.hidden_output_size = self.hidden_cell.output_size
Expand Down Expand Up @@ -272,6 +288,12 @@ def build(self, input_shape):

super().build(input_shape)

if self.input_d is not None and input_shape[-1] != self.input_d:
raise ValueError(
f"Input dimensionality ({input_shape[-1]}) does not match expected "
f"dimensionality ({self.input_d})"
)

enc_d = input_shape[-1]
if self.hidden_to_memory:
enc_d += self.hidden_output_size
Expand Down Expand Up @@ -484,6 +506,7 @@ def get_config(self):
"bias_regularizer": self.bias_regularizer,
"dropout": self.dropout,
"recurrent_dropout": self.recurrent_dropout,
"input_d": self.input_d,
"seed": self.seed,
}
)
Expand Down
6 changes: 6 additions & 0 deletions keras_lmu/tests/test_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,6 +308,12 @@ def test_validation_errors():
with pytest.raises(ValueError, match="Unrecognized conv mode"):
layers.LMUFeedforward(1, 2, 3, None, conv_mode="raw_bad")

with pytest.raises(ValueError, match="input_d must be specified"):
layers.LMUCell(1, 2, 3, None, input_d=None, input_to_hidden=True)

with pytest.raises(ValueError, match="does not match expected dimensionality"):
layers.LMUCell(1, 2, 3, None, input_d=1).build((1, 1, 2))


@pytest.mark.parametrize(
"should_use_feedforward, hidden_to_memory, memory_to_memory, trainable_theta",
Expand Down

0 comments on commit 1972b9f

Please sign in to comment.