Skip to content

Commit

Permalink
Adjust a comment
Browse files Browse the repository at this point in the history
  • Loading branch information
IvanUkhov committed Feb 6, 2024
1 parent abae8fd commit 49e7c55
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion _posts/2024-01-31-gradient-accumulation.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ class Optimizer(tf.keras.optimizers.Adam):
# Compute a scaling factor that will reset the accumulated gradients at
# the beginning of each cycle and do nothing otherwise.
scale = 1 - tf.cast(self.iterations % self.accumulation == 0, tf.float32)
# Add the new gradients to the old ones after scaling.
# Add the new gradients to the old ones after scaling with averaging.
for gradient, increment in zip(self._gradients, gradients):
gradient.assign(scale * gradient + increment / self.accumulation)
# Apply the average accumulated gradients to the trainable variables.
Expand Down

0 comments on commit 49e7c55

Please sign in to comment.