Skip to content

Commit

Permalink
updated codebase (#6)
Browse files Browse the repository at this point in the history
* first commit

* trigger CI

* fix BatchNorm

---------

Co-authored-by: wkcn <wkcn@live.cn>
  • Loading branch information
muaddibusulll and wkcn authored Jun 18, 2024
1 parent 2ad17c2 commit 6fcef88
Show file tree
Hide file tree
Showing 7 changed files with 55 additions and 34 deletions.
7 changes: 6 additions & 1 deletion mobula/layers/BatchNorm.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@

class BatchNorm(Layer):
def __init__(self, model, *args, **kwargs):
Layer.__init__(self, model, *args, **kwargs)
super().__init__(model, *args, **kwargs)
self.momentum = kwargs.get('momentum', 0.9)
self.eps = kwargs.get('eps', 1e-5)
self.use_global_stats = kwargs.get('use_global_stats', False)
self.axis = kwargs.get('axis', 1)

def reshape(self):
assert 0 <= self.axis < self.X.ndim
self.cshape = [1] * self.X.ndim
Expand All @@ -21,6 +22,7 @@ def reshape(self):
# Current Mean
self.moving_mean = np.zeros(self.cshape)
self.moving_var = np.ones(self.cshape)

def forward(self):
if self.is_training() and not self.use_global_stats:
# The mean and var of this batch
Expand All @@ -38,6 +40,7 @@ def forward(self):
self.nx = (self.X - self.batch_mean) * self.nd
# Scale and Shift
self.Y = np.multiply(self.nx, self.W) + self.b

def backward(self):
# Compute self.dX, self.dW, self.db
dnx = np.multiply(self.dY, self.W)
Expand All @@ -48,9 +51,11 @@ def backward(self):
self.dX = dnx * self.nd + dvar * xsm * (2.0 / m) + dmean * (1.0 / m)
self.dW = np.sum(self.dY * self.nx, self.valid_axes, keepdims = True)
self.db = np.sum(self.dY, self.valid_axes, keepdims = True)

@property
def params(self):
return [self.W, self.b, self.moving_mean, self.moving_var]

@property
def grads(self):
return [self.dW, self.db]
6 changes: 4 additions & 2 deletions mobula/layers/ContrastiveLoss.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from .LossLayer import *
import numpy as np

class ContrastiveLoss(LossLayer):
def __init__(self, models, *args, **kwargs):
# models = [X1, X2, sim]
LossLayer.__init__(self, models, *args, **kwargs)
super().__init__(models, *args, **kwargs)
self.margin = kwargs.get("margin", 1.0)

def forward(self):
self.sim = (self.X[2] == 1).ravel()
n = self.sim.shape[0]
Expand All @@ -14,6 +15,7 @@ def forward(self):
df = (self.margin - self.dist).ravel()
self.bdf = ((~self.sim) & (df > 0))
self.Y = (np.sum(dist_sq[self.sim]) + np.sum(np.square(df[self.bdf]))) / (2.0 * n)

def backward(self):
n = self.sim.shape[0]
dX = np.zeros(self.X[0].shape)
Expand Down
12 changes: 8 additions & 4 deletions mobula/layers/CrossEntropy.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
from .LossLayer import *
import numpy as np

class CrossEntropy(LossLayer):
def __init__(self, model, *args, **kwargs):
LossLayer.__init__(self, model, *args, **kwargs)
super().__init__(model, *args, **kwargs)

def reshape(self):
self.Y = 0.0

def forward(self):
self.Y = np.mean(- np.multiply(self.label, np.log(self.X)) - \
np.multiply(1.0 - self.label, np.log(1.0 - self.X)))
self.Y = np.mean(- np.multiply(self.label, np.log(self.X + 1e-15)) - \
np.multiply(1.0 - self.label, np.log(1.0 - self.X + 1e-15)))

def backward(self):
self.dX = (-self.label / self.X + (1.0 - self.label) / (1.0 - self.X)) * self.dY
self.dX = (-self.label / (self.X + 1e-15) + (1.0 - self.label) / (1.0 - self.X + 1e-15)) * self.dY
6 changes: 5 additions & 1 deletion mobula/layers/MSE.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
from .LossLayer import *
import numpy as np

class MSE(LossLayer):
def __init__(self, model, *args, **kwargs):
LossLayer.__init__(self, model, *args, **kwargs)
super().__init__(model, *args, **kwargs)

def reshape(self):
self.Y = 0.0

def forward(self):
self.d = (self.X - self.label)
self.Y = np.mean(np.square(self.d))

def backward(self):
self.dX = (2 * self.d) * self.dY
6 changes: 5 additions & 1 deletion mobula/layers/Tanh.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
from .Layer import *
import numpy as np

class Tanh(Layer):
def __init__(self, model, *args, **kwargs):
Layer.__init__(self, model, *args, **kwargs)
super().__init__(model, *args, **kwargs)

def reshape(self):
self.Y = np.zeros(self.X.shape)

def forward(self):
self.Y = 2.0 / (1.0 + np.exp(-2.0 * self.X)) - 1.0

def backward(self):
self.dX = np.multiply(self.dY, 1.0 - np.square(self.Y))
2 changes: 2 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
numpy
numpy_groupies
50 changes: 25 additions & 25 deletions setup.py
Original file line number Diff line number Diff line change
@@ -1,29 +1,29 @@
from setuptools import setup, find_packages

setup(
name = 'mobula',
version = '1.0.1',
description = 'A Lightweight & Flexible Deep Learning (Neural Network) Framework in Python',
author = 'wkcn',
author_email = 'wkcn@live.cn',
url = 'https://github.com/wkcn/mobula',
packages = find_packages(),
package_data = {
'' : ['*.md'],
'docs' : ['docs/*.md'],
'examples' : ['examples/*.py']
},
keywords = 'Deep Learning Framework in Python',
license = 'MIT',
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering :: Mathematics',
'License :: OSI Approved :: MIT License'
],
install_requires = [
'numpy',
'numpy_groupies'
]
name='mobula',
version='1.0.2',
description='A Lightweight & Flexible Deep Learning (Neural Network) Framework in Python',
author='wkcn',
author_email='wkcn@live.cn',
url='https://github.com/wkcn/mobula',
packages=find_packages(),
package_data={
'': ['*.md'],
'docs': ['docs/*.md'],
'examples': ['examples/*.py']
},
keywords='Deep Learning Framework in Python',
license='MIT',
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Scientific/Engineering :: Mathematics',
'License :: OSI Approved :: MIT License'
],
install_requires=[
'numpy',
'numpy_groupies'
],
python_requires='>=3.6',
)

0 comments on commit 6fcef88

Please sign in to comment.