-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathnn.py
104 lines (86 loc) · 2.78 KB
/
nn.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
from dis import dis
import random
from core import Parameter
class Module:
def zero_grad(self):
for p in self.parameters():
p.grad = 0
def parameters(self):
return []
class Neuron(Module):
def __init__(self, number_of_inputs, act=None, dist = None):
self.w = [Parameter(dist(0.0, 1.0) if dist != None else random.uniform(-1, 1)) for _ in range(number_of_inputs)]
self.b = Parameter(0)
self.act = act
def __call__(self, x):
out = sum(
(
wi * (xi if isinstance(xi, Parameter) else Parameter(xi))
for wi, xi in zip(self.w, x)
),
start=self.b,
)
return (
out.relu()
if self.act == "relu"
else out.tanh()
if self.act == "tanh"
else out.sigmoid()
if self.act == "sigmoid"
else out.lrelu()
if self.act == "lrelu"
else out.c_softplus()
if self.act == "c_softplus"
else out.elu()
if self.act == "elu"
else out
)
def parameters(self):
return self.w + [self.b]
def __repr__(self):
act = (
"ReLU"
if self.act == "relu"
else "Tanh"
if self.act == "tanh"
else "Sigmoid"
if self.act == "sigmoid"
else "LReLU"
if self.act == "lrelu"
else "c_softplus"
if self.act == "c_softplus"
else "elu"
if self.act == "elu"
else "Linear"
)
return f"{act}_Neuron({len(self.w)})"
class Layer(Module):
def __init__(self, nin, non, act=None, dist = None):
self.layer = [Neuron(nin, act, dist) for _ in range(non)]
def __call__(self, input):
out = [n(input) for n in self.layer]
return out[0] if len(out) == 1 else out
def parameters(self):
return [param for neuron in self.layer for param in neuron.parameters()]
def __repr__(self):
return f"Layer of [{', '.join(str(n) for n in self.layer)}]"
class MLP(Module):
def __init__(self, nin, nouts, act, dist = None):
sizes = [nin] + nouts
self.layers = [
Layer(
sizes[i],
sizes[i + 1],
act=act if isinstance(act, str) else act[i] if i < len(act) else None,
dist=dist
)
for i in range(len(nouts))
]
def __call__(self, x):
for layer in self.layers:
x = layer(x)
return x
def parameters(self):
return [param for layer in self.layers for param in layer.parameters()]
def __repr__(self):
return f"MLP of [{', '.join(str(layer) for layer in self.layers)}]"