-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathLayer.py
51 lines (40 loc) · 1.67 KB
/
Layer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
import numpy as np
class Layer:
"""
The Layer class. Defines a Layer given the input and output dimensions and the activation function.
Args:
input_dim: the dim the input.
output_dim: the dimension of the output.
activation: the activation function (a class not an object).
"""
def __init__(self, input_dim, output_dim, activation):
self.input_dim = input_dim
self.output_dim = output_dim
self.weights = np.random.rand(output_dim, input_dim) - 0.5
self.activation = activation()
self.freeze = False
def forward(self, input):
"""
The forward method. Does one forward pass for the layer and returns the output.
Args:
input: the input vector.
Returns:
returns a vector of dimension output_dim (the output dimension given to the Layer constructor).
"""
self.input = input
self.output = np.dot(self.weights, input)
self.activated_output = self.activation.activate(self.output)
return self.activated_output
def backward(self, next_error, lr):
"""
The backward method. Does one backward pass for the layer and returns the output.
Args:
next_error: the error of the next layer.
lr: the learning rate.
Returns:
returns the error of the layer.
"""
previous_layer_error = np.dot(self.weights.T, next_error)
if(not self.freeze):
self.weights -= lr * np.dot(self.activation.differentiate() * next_error, self.input.T)
return previous_layer_error