Skip to content

Commit

Permalink
Bug fixes to ensure backward compatibility
Browse files Browse the repository at this point in the history
  • Loading branch information
maroba committed Sep 11, 2018
1 parent cd9bc03 commit d4aa8bf
Show file tree
Hide file tree
Showing 8 changed files with 84 additions and 82 deletions.
45 changes: 20 additions & 25 deletions examples/examples-non-uniform-grids.ipynb

Large diffs are not rendered by default.

58 changes: 20 additions & 38 deletions examples/examples-vector-calculus.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,7 @@
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": true
},
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
Expand All @@ -51,9 +49,7 @@
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"collapsed": false
},
"metadata": {},
"outputs": [],
"source": [
"x, y, z = [np.linspace(0, 10, 100)] * 3\n",
Expand All @@ -72,9 +68,7 @@
{
"cell_type": "code",
"execution_count": 3,
"metadata": {
"collapsed": false
},
"metadata": {},
"outputs": [
{
"data": {
Expand All @@ -100,13 +94,11 @@
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"collapsed": true
},
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"grad = Gradient(h=[dx, dy, dz])\n",
"grad = Gradient(spac=[dx, dy, dz])\n",
"grad_f = grad(f)"
]
},
Expand All @@ -119,18 +111,16 @@
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"collapsed": false
},
"execution_count": 6,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(3, 100, 100, 100)"
]
},
"execution_count": 5,
"execution_count": 6,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -148,18 +138,16 @@
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {
"collapsed": false
},
"execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(100, 100, 100)"
]
},
"execution_count": 6,
"execution_count": 7,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -184,18 +172,16 @@
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {
"collapsed": false
},
"execution_count": 8,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(3, 100, 100, 100)"
]
},
"execution_count": 7,
"execution_count": 8,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -214,24 +200,22 @@
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {
"collapsed": false
},
"execution_count": 10,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"(100, 100, 100)"
]
},
"execution_count": 8,
"execution_count": 10,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"div = Divergence(h=[dx, dy, dz])\n",
"div = Divergence(spac=[dx, dy, dz])\n",
"div_g = div(g)\n",
"div_g.shape"
]
Expand All @@ -246,9 +230,7 @@
{
"cell_type": "code",
"execution_count": 9,
"metadata": {
"collapsed": false
},
"metadata": {},
"outputs": [
{
"data": {
Expand Down
2 changes: 1 addition & 1 deletion findiff/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,4 @@
from .findiff import FinDiff, Coef, Identity, Coefficient
from .vector import Gradient, Divergence, Curl, Laplacian

__version__ = "0.6.0"
__version__ = "0.6.1"
29 changes: 19 additions & 10 deletions findiff/findiff.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,12 +311,17 @@ def __init__(self, *args):
tuples = self._convert_to_valid_tuple_list(args)
self.derivs = {}
self.spac = {}
self.coords = {}
for t in tuples:
axis, spac, order = t
axis, spac_or_coords, order = t
if axis in self.derivs:
raise ValueError("Derivative along axis %d specified more than once." % axis)
self.derivs[axis] = order
self.spac[axis] = spac

if hasattr(spac_or_coords, "__len__"):
self.coords[axis] = spac_or_coords
else:
self.spac[axis] = spac_or_coords

def axes(self):
return sorted(list(self.derivs.keys()))
Expand All @@ -329,13 +334,13 @@ def order(self, axis):
def apply(self, fd, u):

for axis, order in self.derivs.items():
if fd.is_uniform():
if self.spac:
u = fd.diff(u, self.spac[axis], order, axis, coefficients(order, fd.acc))
else:
coefs = []
for i in range(len(fd.coords[axis])):
coefs.append(coefficients_non_uni(order, fd.acc, fd.coords[axis], i))
u = fd.diff_non_uni(u, fd.coords[axis], axis, coefs)
for i in range(len(self.coords[axis])):
coefs.append(coefficients_non_uni(order, fd.acc, self.coords[axis], i))
u = fd.diff_non_uni(u, self.coords[axis], axis, coefs)

return u

Expand Down Expand Up @@ -368,10 +373,14 @@ def _assert_tuple_valid(self, t):

if len(t) > 3:
raise ValueError("Too many arguments in tuple.")
axis, h, order = t
axis, coords_or_spac, order = t
if not isinstance(axis, int) or axis < 0:
raise ValueError("Axis must be non-negative integer.")
if h <= 0:
raise ValueError("Spacing must be greater than zero.")
if not hasattr(coords_or_spac, "__len__"):
h = coords_or_spac
if h <= 0:
raise ValueError("Spacing must be greater than zero.")
if not isinstance(order, int) or order <= 0:
raise ValueError("Derivative order must be positive integer.")
raise ValueError("Derivative order must be positive integer.")


10 changes: 7 additions & 3 deletions findiff/vector.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,15 +25,19 @@ def __init__(self, **kwargs):
"""

if "spac" in kwargs:
self.h = kwargs["spac"]
if "spac" in kwargs or "h" in kwargs: # necessary for backward compatibility 0.5.2 => 0.6
if "spac" in kwargs:
kw = "spac"
else:
kw = "h"
self.h = kwargs[kw]
self.ndims = len(self.h)
self.components = [FinDiff((k, self.h[k]), **kwargs) for k in range(self.ndims)]

if "coords" in kwargs:
coords = kwargs.pop("coords")
self.ndims = self.__get_dimension(coords)
self.components = [FinDiff((k, 1), coords=coords, **kwargs) for k in range(self.ndims)]
self.components = [FinDiff((k, coords[k], 1), **kwargs) for k in range(self.ndims)]

def __get_dimension(self, coords):
if isinstance(coords, np.ndarray):
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

setup(
name='findiff',
version='0.6.0',
version='0.6.1',
description='A Python package for finite difference derivatives in any number of dimensions.',
long_description="""A Python package for finite difference derivatives in any number of dimensions.
Expand Down
8 changes: 4 additions & 4 deletions test/test_findiff.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,8 @@ def test_non_uniform_3d(self):
X, Y, Z = np.meshgrid(x, y, z, indexing='ij')
f = np.exp(-X**2-Y**2-Z**2)

d_dy = FinDiff(1, 1, acc=4)
fy = d_dy(f, coords=[x, y, z])
d_dy = FinDiff(1, y, acc=4)
fy = d_dy(f)
fye = - 2 * Y * np.exp(-X**2-Y**2-Z**2)
assert_array_almost_equal(fy, fye, decimal=4)

Expand All @@ -116,7 +116,7 @@ def test_FinDiff_NonUni_2d(self):
X, Y = np.meshgrid(x, y, indexing='ij')
f = np.exp(-X**2-Y**2)

d_dx = FinDiff((0, 1), coords=[x, y])
d_dx = FinDiff((0, x, 1))
fx = d_dx(f)
fxe = - 2 * X * np.exp(-X**2-Y**2)
assert_array_almost_equal(fx, fxe, decimal=4)
Expand All @@ -128,7 +128,7 @@ def test_BasicFinDiffNonUni_3d(self):
X, Y, Z = np.meshgrid(x, y, z, indexing='ij')
f = np.exp(-X**2-Y**2-Z**2)

d_dy = FinDiff(1, 1, coords=[x, y, z], acc=4)
d_dy = FinDiff(1, y, acc=4)
fy = d_dy(f)
fye = - 2 * Y * np.exp(-X**2-Y**2-Z**2)
assert_array_almost_equal(fy, fye, decimal=4)
Expand Down
12 changes: 12 additions & 0 deletions test/test_vector.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,18 @@ def test_3d_gradient_on_scalar_func(self):
grad_f = grad(f)
assert_array_almost_equal(grad_f, grad_f_ex)

def test_spacing_with_h(self):
axes, h, [X, Y, Z] = init_mesh(3, (50, 50, 50))
f = np.sin(X) * np.sin(Y) * np.sin(Z)
grad_f_ex = np.array([
np.cos(X) * np.sin(Y) * np.sin(Z),
np.sin(X) * np.cos(Y) * np.sin(Z),
np.sin(X) * np.sin(Y) * np.cos(Z),
])
grad = Gradient(h=h, acc=4)
grad_f = grad(f)
assert_array_almost_equal(grad_f, grad_f_ex)

def test_3d_gradient_on_scalar_func_non_uni(self):
axes, h, [X, Y, Z] = init_mesh(3, (50, 50, 50))
f = np.sin(X) * np.sin(Y) * np.sin(Z)
Expand Down

0 comments on commit d4aa8bf

Please sign in to comment.