From 6bbc28d123cdec20140331edc60df106d518a202 Mon Sep 17 00:00:00 2001 From: Milan Curcic Date: Thu, 22 Jun 2023 11:27:03 -0400 Subject: [PATCH] Connect `flatten`, `conv2d`, and `maxpool2d` layers in backward pass (#142) * Connect flatten, conv2d, and maxpool2d layers in backward pass * Bump minor version --- fpm.toml | 2 +- src/nf/nf_network_submodule.f90 | 16 +++++++++++----- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/fpm.toml b/fpm.toml index 75362da..a02d33a 100644 --- a/fpm.toml +++ b/fpm.toml @@ -1,5 +1,5 @@ name = "neural-fortran" -version = "0.12.0" +version = "0.13.0" license = "MIT" author = "Milan Curcic" maintainer = "milancurcic@hey.com" diff --git a/src/nf/nf_network_submodule.f90 b/src/nf/nf_network_submodule.f90 index 6a0156d..cdd8cad 100644 --- a/src/nf/nf_network_submodule.f90 +++ b/src/nf/nf_network_submodule.f90 @@ -283,7 +283,6 @@ end function get_activation_by_name pure module subroutine backward(self, output) class(network), intent(in out) :: self real, intent(in) :: output(:) - real, allocatable :: gradient(:) integer :: n, num_layers num_layers = size(self % layers) @@ -296,18 +295,25 @@ pure module subroutine backward(self, output) ! Output layer; apply the loss function select type(this_layer => self % layers(n) % p) type is(dense_layer) - gradient = quadratic_derivative(output, this_layer % output) + call self % layers(n) % backward( & + self % layers(n - 1), & + quadratic_derivative(output, this_layer % output) & + ) end select else ! Hidden layer; take the gradient from the next layer select type(next_layer => self % layers(n + 1) % p) type is(dense_layer) - gradient = next_layer % gradient + call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient) + type is(flatten_layer) + call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient) + type is(conv2d_layer) + call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient) + type is(maxpool2d_layer) + call self % layers(n) % backward(self % layers(n - 1), next_layer % gradient) end select end if - call self % layers(n) % backward(self % layers(n - 1), gradient) - end do end subroutine backward