Skip to content

Commit

Permalink
update marching_cube function and fix the morphofield bug
Browse files Browse the repository at this point in the history
  • Loading branch information
Yao-14 committed Jan 14, 2024
1 parent 0680171 commit edccbdd
Show file tree
Hide file tree
Showing 7 changed files with 66 additions and 35 deletions.
22 changes: 15 additions & 7 deletions spateo/alignment/methods/morpho.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,15 @@ def BA_align(
# Preprocessing
normalize_g = False if dissimilarity == "kl" else normalize_g
sampleA, sampleB = (sampleA, sampleB) if inplace else (sampleA.copy(), sampleB.copy())
(nx, type_as, new_samples, exp_matrices, spatial_coords, normalize_scale, normalize_mean_list,) = align_preprocess(
(
nx,
type_as,
new_samples,
exp_matrices,
spatial_coords,
normalize_scale_list,
normalize_mean_list,
) = align_preprocess(
samples=[sampleA, sampleB],
layer=layer,
genes=genes,
Expand Down Expand Up @@ -425,7 +433,7 @@ def BA_align(

for iter in iteration:
if iter_key_added is not None:
iter_XAHat = XAHat * normalize_scale + normalize_mean_list[0] if normalize_c else XAHat
iter_XAHat = XAHat * normalize_scale_list[0] + normalize_mean_list[0] if normalize_c else XAHat
sampleB.uns[iter_key_added][key_added][iter] = nx.to_numpy(iter_XAHat)
sampleB.uns[iter_key_added]["sigma2"][iter] = nx.to_numpy(sigma2)
sampleB.uns[iter_key_added]["beta2"][iter] = nx.to_numpy(beta2)
Expand Down Expand Up @@ -634,10 +642,10 @@ def BA_align(
XAHat = XAHat * (area / area_after)

if normalize_c:
XAHat = XAHat * normalize_scale + normalize_mean_list[0]
RnA = RnA * normalize_scale + normalize_mean_list[0]
optimal_RnA = optimal_RnA * normalize_scale + normalize_mean_list[0]
coarse_alignment = coarse_alignment * normalize_scale + normalize_mean_list[0]
XAHat = XAHat * normalize_scale_list[0] + normalize_mean_list[0]
RnA = RnA * normalize_scale_list[0] + normalize_mean_list[0]
optimal_RnA = optimal_RnA * normalize_scale_list[0] + normalize_mean_list[0]
coarse_alignment = coarse_alignment * normalize_scale_list[0] + normalize_mean_list[0]

# Save aligned coordinates
sampleB.obsm["Nonrigid_align_spatial"] = nx.to_numpy(XAHat).copy()
Expand All @@ -656,7 +664,7 @@ def BA_align(
"beta": beta,
"Coff": nx.to_numpy(Coff),
"ctrl_pts": nx.to_numpy(ctrl_pts),
"normalize_scale": nx.to_numpy(normalize_scale) if normalize_c else None,
"normalize_scale": nx.to_numpy(normalize_scale_list[0]) if normalize_c else None,
"normalize_mean_list": [nx.to_numpy(normalize_mean) for normalize_mean in normalize_mean_list]
if normalize_c
else None,
Expand Down
30 changes: 21 additions & 9 deletions spateo/alignment/methods/morpho_sparse.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ def BA_align_sparse(
device: str = "cpu",
inplace: bool = True,
verbose: bool = True,
nn_init: bool = True,
nn_init: bool = False,
partial_robust_level: float = 25,
use_label_prior: bool = False,
label_key: Optional[str] = "cluster",
Expand All @@ -178,7 +178,15 @@ def BA_align_sparse(
# Preprocessing and extract the spatial and expression information
normalize_g = False if dissimilarity == "kl" else normalize_g
sampleA, sampleB = (sampleA, sampleB) if inplace else (sampleA.copy(), sampleB.copy())
(nx, type_as, new_samples, exp_matrices, spatial_coords, normalize_scale, normalize_mean_list,) = align_preprocess(
(
nx,
type_as,
new_samples,
exp_matrices,
spatial_coords,
normalize_scale_list,
normalize_mean_list,
) = align_preprocess(
samples=[sampleA, sampleB],
layer=layer,
genes=genes,
Expand Down Expand Up @@ -321,7 +329,7 @@ def BA_align_sparse(
for iter in iteration:
# save intermediate results
if iter_key_added is not None:
iter_XAHat = XAHat * normalize_scale + normalize_mean_list[0] if normalize_c else XAHat
iter_XAHat = XAHat * normalize_scale_list[0] + normalize_mean_list[0] if normalize_c else XAHat
sampleB.uns[iter_key_added][key_added][iter] = nx.to_numpy(iter_XAHat)
sampleB.uns[iter_key_added]["sigma2"][iter] = nx.to_numpy(sigma2)
sampleB.uns[iter_key_added]["beta2"][iter] = nx.to_numpy(beta2)
Expand Down Expand Up @@ -551,12 +559,14 @@ def BA_align_sparse(

# de-normalize
if normalize_c:
XAHat = XAHat * normalize_scale + normalize_mean_list[0]
RnA = RnA * normalize_scale + normalize_mean_list[0]
optimal_RnA = optimal_RnA * normalize_scale + normalize_mean_list[0]
coarse_alignment = coarse_alignment * normalize_scale + normalize_mean_list[0]
XAHat = XAHat * normalize_scale_list[0] + normalize_mean_list[0]
RnA = RnA * normalize_scale_list[0] + normalize_mean_list[0]
optimal_RnA = optimal_RnA * normalize_scale_list[0] + normalize_mean_list[0]
coarse_alignment = coarse_alignment * normalize_scale_list[0] + normalize_mean_list[0]
output_R = optimal_R
output_t = optimal_t * normalize_scale + normalize_mean_list[0] - _dot(nx)(normalize_mean_list[1], optimal_R.T)
output_t = (
optimal_t * normalize_scale_list[0] + normalize_mean_list[0] - _dot(nx)(normalize_mean_list[1], optimal_R.T)
)

# Save aligned coordinates to adata
sampleB.obsm[key_added + "_nonrigid"] = nx.to_numpy(XAHat).copy()
Expand All @@ -567,7 +577,9 @@ def BA_align_sparse(
norm_dict = {
"mean_transformed": nx.to_numpy(normalize_mean_list[1]),
"mean_fixed": nx.to_numpy(normalize_mean_list[0]),
"scale": nx.to_numpy(normalize_scale),
"scale": nx.to_numpy(normalize_scale_list[1]),
"scale_transformed": nx.to_numpy(normalize_scale_list[1]),
"scale_fixed": nx.to_numpy(normalize_scale_list[0]),
}
sampleB.uns[vecfld_key_added] = {
"R": nx.to_numpy(R),
Expand Down
22 changes: 13 additions & 9 deletions spateo/alignment/methods/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def normalize_coords(
coords: Union[List[np.ndarray or torch.Tensor], np.ndarray, torch.Tensor],
nx: Union[ot.backend.TorchBackend, ot.backend.NumpyBackend] = ot.backend.NumpyBackend,
verbose: bool = True,
) -> Tuple[List[np.ndarray], float, List[np.ndarray]]:
) -> Tuple[List[np.ndarray], List[np.ndarray], List[np.ndarray]]:
"""Normalize the spatial coordinate.
Args:
Expand All @@ -138,22 +138,26 @@ def normalize_coords(
if type(coords) in [np.ndarray, torch.Tensor]:
coords = [coords]

normalize_scale = 0
# normalize_scale now becomes to a list
normalize_scale_list = []
normalize_mean_list = []
for i in range(len(coords)):
normalize_mean = nx.einsum("ij->j", coords[i]) / coords[i].shape[0]
normalize_mean_list.append(normalize_mean)
coords[i] -= normalize_mean
normalize_scale += nx.sqrt(nx.einsum("ij->", nx.einsum("ij,ij->ij", coords[i], coords[i])) / coords[i].shape[0])
normalize_scale = nx.sqrt(nx.einsum("ij->", nx.einsum("ij,ij->ij", coords[i], coords[i])) / coords[i].shape[0])
normalize_scale_list.append(normalize_scale)

normalize_scale /= len(coords)
if coords[0].shape[1] == 2:
normalize_scale = nx.mean(normalize_scale_list)
normalize_scale_list = [normalize_scale] * len(coords)
for i in range(len(coords)):
coords[i] /= normalize_scale
coords[i] /= normalize_scale_list[i]
if verbose:
lm.main_info(message=f"Coordinates normalization params:", indent_level=1)
lm.main_info(message=f"Scale: {normalize_scale}.", indent_level=2)
# lm.main_info(message=f"Mean: {normalize_mean_list}", indent_level=2)
return coords, normalize_scale, normalize_mean_list
return coords, normalize_scale_list, normalize_mean_list


def normalize_exps(
Expand Down Expand Up @@ -258,9 +262,9 @@ def align_preprocess(
# coords_dims = np.unique(np.asarray([c.shape[1] for c in spatial_coords]))
assert len(coords_dims) == 1, "Spatial coordinate dimensions are different, please check again."

normalize_scale, normalize_mean_list = None, None
normalize_scale_list, normalize_mean_list = None, None
if normalize_c:
spatial_coords, normalize_scale, normalize_mean_list = normalize_coords(
spatial_coords, normalize_scale_list, normalize_mean_list = normalize_coords(
coords=spatial_coords, nx=nx, verbose=verbose
)
if normalize_g:
Expand All @@ -272,7 +276,7 @@ def align_preprocess(
new_samples,
exp_matrices,
spatial_coords,
normalize_scale,
normalize_scale_list,
normalize_mean_list,
)

Expand Down
9 changes: 7 additions & 2 deletions spateo/tdr/models/models_individual/mesh.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,13 +207,18 @@ def construct_surface(
density_threshold=_cs_args["density_threshold"],
)
elif cs_method == "marching_cube":
_cs_args = {"levelset": 0, "mc_scale_factor": 1, "dist_sample_num": 100}
_cs_args = {"levelset": 0, "mc_scale_factor": 1, "dist_sample_num": None}
if not (cs_args is None):
_cs_args.update(cs_args)

from .mesh_methods import marching_cube_mesh

surf = marching_cube_mesh(pc=cloud, levelset=_cs_args["levelset"], mc_scale_factor=_cs_args["mc_scale_factor"])
surf = marching_cube_mesh(
pc=cloud,
levelset=_cs_args["levelset"],
mc_scale_factor=_cs_args["mc_scale_factor"],
dist_sample_num=_cs_args["dist_sample_num"],
)

else:
raise ValueError(
Expand Down
2 changes: 1 addition & 1 deletion spateo/tdr/models/models_individual/mesh_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def marching_cube_mesh(
pc: PolyData,
levelset: Union[int, float] = 0,
mc_scale_factor: Union[int, float] = 1.0,
dist_sample_num: Optional[int] = 100,
dist_sample_num: Optional[int] = None,
):
"""
Computes a triangle mesh from a point cloud based on the marching cube algorithm.
Expand Down
12 changes: 7 additions & 5 deletions spateo/tdr/morphometrics/morphofield/gaussian_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,18 +83,20 @@ def _con_K_geodist(


def _gp_velocity(X: np.ndarray, vf_dict: dict) -> np.ndarray:
pre_scale = vf_dict["pre_norm_scale"]
norm_x = (X - vf_dict["norm_dict"]["mean_transformed"]) / vf_dict["norm_dict"]["scale"]
# pre_scale = vf_dict["pre_norm_scale"]
norm_x = (X - vf_dict["norm_dict"]["mean_transformed"]) / vf_dict["norm_dict"]["scale_transformed"]
if vf_dict["kernel_dict"]["dist"] == "cdist":
quary_kernel = _con_K(norm_x, vf_dict["X_ctrl"], vf_dict["beta"])
elif vf_dict["kernel_dict"]["dist"] == "geodist":
quary_kernel = _con_K_geodist(norm_x, vf_dict["kernel_dict"], vf_dict["beta"])
else:
raise ValueError(f"current only support cdist and geodist")
quary_velocities = np.dot(quary_kernel, vf_dict["C"])
quary_velocities = quary_velocities * vf_dict["norm_dict"]["scale"]
quary_velocities = quary_velocities + (pre_scale - 1) * X
return quary_velocities / 10000
quary_rigid = np.dot(norm_x, vf_dict["R"].T) + vf_dict["t"]
quary_norm_x = quary_velocities + quary_rigid
quary_x = quary_norm_x * vf_dict["norm_dict"]["scale_fixed"] + vf_dict["norm_dict"]["mean_fixed"]
_velocities = quary_x - X
return _velocities / 10000


def morphofield_gp(
Expand Down
4 changes: 2 additions & 2 deletions spateo/tdr/morphometrics/morphofield_dg/GPVectorField.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,8 @@ def Jacobian_GP_gaussian_kernel(X: np.ndarray, vf_dict: dict, vectorize: bool =
"""
from ..morphofield.gaussian_process import _con_K, _con_K_geodist

pre_scale = vf_dict["pre_norm_scale"]
x_norm = (X - vf_dict["norm_dict"]["mean_transformed"]) / vf_dict["norm_dict"]["scale"]
pre_scale = vf_dict["norm_dict"]["scale_fixed"] / vf_dict["norm_dict"]["scale_transformed"]
x_norm = (X - vf_dict["norm_dict"]["mean_transformed"]) / vf_dict["norm_dict"]["scale_transformed"]
if x_norm.ndim == 1:
if vf_dict["kernel_dict"]["dist"] == "cdist":
K, D = _con_K(x_norm[None, :], vf_dict["X_ctrl"], vf_dict["beta"], return_d=True)
Expand Down

0 comments on commit edccbdd

Please sign in to comment.