-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathactivations_layer.c
70 lines (56 loc) · 1.57 KB
/
activations_layer.c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
#include "activations_layer.h"
activations_layer* activations_alloc(int type) {
activations_layer *layer = aalloc(sizeof(*layer));
layer->cache = NULL;
layer->type = type;
return layer;
}
void activations_free(activations_layer *layer) {
matrix_free(layer->cache);
free(layer);
}
matrix* activations_forward(activations_layer *layer, matrix *input) {
matrix_free(layer->cache);
matrix *out = matrix_alloc(input->rows, input->columns);
int len = input->rows * input->columns;
if (layer->type == RELU) {
layer->cache = mat_copy(input);
}
for (int i = 0; i < len; i++) {
out->data[i] = activate(input->data[i], layer->type);
}
return out;
}
matrix* activations_backward(activations_layer *layer, matrix *dout) {
matrix *out = matrix_alloc(dout->rows, dout->columns);
int len = dout->rows * dout->columns;
if (layer->type == RELU) {
for (int i = 0; i < len; i++) {
out->data[i] = del_activate(dout->data[i], layer->cache->data[i], layer->type);
}
}
else {
for (int i = 0; i < len; i++) {
out->data[i] = del_activate(dout->data[i], 0, layer->type);
}
}
return out;
}
float _relu(float x) {
return x > 0 ? x : 0;
}
float _del_relu(float x, float old_x) {
return old_x > 0 ? x : 0;
}
float activate(float x, int type) {
if (type == RELU) {
return _relu(x);
}
return -1;
}
float del_activate(float x, float x_old, int type) {
if (type == RELU) {
return _del_relu(x, x_old);
}
return -1;
}