-
Notifications
You must be signed in to change notification settings - Fork 16
/
scan2mesh_computations_metrical.py
279 lines (222 loc) · 10.9 KB
/
scan2mesh_computations_metrical.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
'''
Max-Planck-Gesellschaft zur Foerderung der Wissenschaften e.V. (MPG) is holder of all proprietary rights on this computer program.
Using this computer program means that you agree to the terms in the LICENSE file (https://ringnet.is.tue.mpg.de/license).
Any use not explicitly granted by the LICENSE is prohibited.
Copyright 2020 Max-Planck-Gesellschaft zur Foerderung der Wissenschaften e.V. (MPG). acting on behalf of its
Max Planck Institute for Intelligent Systems. All rights reserved.
More information about the NoW Challenge is available at https://ringnet.is.tue.mpg.de/challenge.
For comments or questions, please email us at ringnet@tue.mpg.de
'''
import numpy as np
from math import sqrt
import chumpy as ch
from psbody.mesh import Mesh
from smpl_webuser.posemapper import Rodrigues
from sbody.alignment.objectives import sample_from_mesh
from sbody.mesh_distance import ScanToMesh
from psbody.mesh.meshviewer import MeshViewer
from scipy.sparse.linalg import cg
def rigid_scan_2_mesh_alignment(scan, mesh, visualize=False):
options = {'sparse_solver': lambda A, x: cg(A, x, maxiter=2000)[0]}
options['disp'] = 1.0
options['delta_0'] = 0.1
options['e_3'] = 1e-4
s = ch.ones(1)
r = ch.zeros(3)
R = Rodrigues(r)
t = ch.zeros(3)
trafo_mesh = s*(R.dot(mesh.v.T)).T + t
sampler = sample_from_mesh(scan, sample_type='vertices')
s2m = ScanToMesh(scan, trafo_mesh, mesh.f, scan_sampler=sampler, signed=False, normalize=False)
if visualize:
#Visualization code
mv = MeshViewer()
mv.set_static_meshes([scan])
tmp_mesh = Mesh(trafo_mesh.r, mesh.f)
tmp_mesh.set_vertex_colors('light sky blue')
mv.set_dynamic_meshes([tmp_mesh])
def on_show(_):
tmp_mesh = Mesh(trafo_mesh.r, mesh.f)
tmp_mesh.set_vertex_colors('light sky blue')
mv.set_dynamic_meshes([tmp_mesh])
else:
def on_show(_):
pass
ch.minimize(fun={'dist': s2m}, x0=[r, t], callback=on_show, options=options)
return s,Rodrigues(r),t
def compute_mask(grundtruth_landmark_points):
"""
Computes a circular area around the center of the face.
:param grundtruth_landmark_points: Landmarks of the ground truth scans
:return face center and mask radius
"""
# Take the nose-bottom and go upwards a bit:
nose_bottom = np.array(grundtruth_landmark_points[4])
nose_bridge = (np.array(grundtruth_landmark_points[1]) + np.array(grundtruth_landmark_points[2])) / 2 # between the inner eye corners
face_centre = nose_bottom + 0.3 * (nose_bridge - nose_bottom)
# Compute the radius for the face mask:
outer_eye_dist = np.linalg.norm(np.array(grundtruth_landmark_points[0]) - np.array(grundtruth_landmark_points[3]))
nose_dist = np.linalg.norm(nose_bridge - nose_bottom)
# mask_radius = 1.2 * (outer_eye_dist + nose_dist) / 2
mask_radius = 1.4 * (outer_eye_dist + nose_dist) / 2
return (face_centre, mask_radius)
def crop_face_scan(groundtruth_vertices, groundtruth_faces, grundtruth_landmark_points):
"""
Crop face scan to a circular area around the center of the face.
:param groundtruth_vertices: An n x 3 numpy array of vertices from a ground truth scan.
:param groundtruth_faces: Faces of the ground truth scan
:param grundtruth_landmark_points: A 7 x 3 list with annotations of the ground truth scan.
return: Cropped face scan
"""
# Compute mask
face_centre, mask_radius = compute_mask(grundtruth_landmark_points)
# Compute mask vertex indiced
dist = np.linalg.norm(groundtruth_vertices-face_centre, axis=1)
ids = np.where(dist <= mask_radius)[0]
# Mask scan
masked_gt_scan = Mesh(v=groundtruth_vertices, f=groundtruth_faces)
masked_gt_scan.keep_vertices(ids)
return masked_gt_scan
def get_unit_factor(unit):
# Scans are in unit mm, return the factor to scale the data in the input unit to mm.
if unit == 'mm':
return 1.0
elif unit == 'cm':
return 10.0
elif unit == 'm':
return 1000.0
else:
raise ValueError('Unit %s not supported' % unit)
def compute_rigid_alignment(masked_gt_scan, grundtruth_landmark_points,
predicted_mesh_vertices, predicted_mesh_faces, predicted_mesh_landmark_points, predicted_mesh_unit='m', check_rigid_alignment=False):
"""
Computes the rigid alignment between the
:param masked_gt_scan: Masked face area mesh
:param grundtruth_landmark_points: A 7 x 3 list with annotations of the ground truth scan.
:param predicted_mesh_vertices: An m x 3 numpy array of vertices from a predicted mesh.
:param predicted_mesh_faces: A k x 3 numpy array of vertex indices composing the predicted mesh.
:param predicted_mesh_landmark_points: A 7 x 3 list containing the annotated 3D point locations in the predicted mesh.
"""
scale_factor = get_unit_factor(predicted_mesh_unit)
grundtruth_landmark_points = np.array(grundtruth_landmark_points)
predicted_mesh_landmark_points = np.array(predicted_mesh_landmark_points)
d, Z, tform = procrustes(grundtruth_landmark_points, predicted_mesh_landmark_points, scaling=True, reflection='best')
# Use tform to transform all vertices in predicted_mesh_vertices to the ground truth reference space:
predicted_mesh_vertices_aligned = scale_factor*(tform['rotation'].T.dot(predicted_mesh_vertices.T).T) + tform['translation']
# Refine rigid alignment
s , R, t = rigid_scan_2_mesh_alignment(masked_gt_scan, Mesh(predicted_mesh_vertices_aligned, predicted_mesh_faces))
predicted_mesh_vertices_aligned = s*(R.dot(predicted_mesh_vertices_aligned.T)).T + t
if check_rigid_alignment:
masked_gt_scan.write_obj('gt_scan_val.obj')
Mesh(predicted_mesh_vertices, predicted_mesh_faces).write_obj('predicted.obj')
Mesh(predicted_mesh_vertices_aligned, predicted_mesh_faces).write_obj('predicted_aligned.obj')
import pdb; pdb.set_trace()
return (predicted_mesh_vertices_aligned, masked_gt_scan)
def compute_errors(groundtruth_vertices, groundtruth_faces, grundtruth_landmark_points, predicted_mesh_vertices,
predicted_mesh_faces, predicted_mesh_landmark_points, predicted_mesh_unit, check_rigid_alignment=False):
"""
This script computes the reconstruction error between an input mesh and a ground truth mesh.
:param groundtruth_vertices: An n x 3 numpy array of vertices from a ground truth scan.
:param grundtruth_landmark_points: A 7 x 3 list with annotations of the ground truth scan.
:param predicted_mesh_vertices: An m x 3 numpy array of vertices from a predicted mesh.
:param predicted_mesh_faces: A k x 3 numpy array of vertex indices composing the predicted mesh.
:param predicted_mesh_landmark_points: A 7 x 3 list containing the annotated 3D point locations in the predicted mesh.
:param predicted_mesh_unit: Unit of measurement of the predicted meshes in ['m', 'cm', 'mm']
:param check_rigid_alignment [optional]: Returns aligned reconstruction and cropped ground truth scan
:return: A list of distances (errors)
"""
# Crop face scan
masked_gt_scan = crop_face_scan(groundtruth_vertices, groundtruth_faces, grundtruth_landmark_points)
# Rigidly align predicted mesh with the ground truth scan
predicted_mesh_vertices_aligned, masked_gt_scan = compute_rigid_alignment( masked_gt_scan, grundtruth_landmark_points,
predicted_mesh_vertices, predicted_mesh_faces,
predicted_mesh_landmark_points,
predicted_mesh_unit,
check_rigid_alignment)
# Compute error
sampler = sample_from_mesh(masked_gt_scan, sample_type='vertices')
s2m = ScanToMesh(masked_gt_scan, predicted_mesh_vertices_aligned, predicted_mesh_faces, scan_sampler=sampler, signed=False, normalize=False)
return s2m.r
def procrustes(X, Y, scaling=True, reflection='best'):
"""
Taken from https://github.com/patrikhuber/fg2018-competition
A port of MATLAB's `procrustes` function to Numpy.
Code from: https://stackoverflow.com/a/18927641.
Procrustes analysis determines a linear transformation (translation,
reflection, orthogonal rotation and scaling) of the points in Y to best
conform them to the points in matrix X, using the sum of squared errors
as the goodness of fit criterion.
d, Z, [tform] = procrustes(X, Y)
Inputs:
------------
X, Y
matrices of target and input coordinates. they must have equal
numbers of points (rows), but Y may have fewer dimensions
(columns) than X.
scaling
if False, the scaling component of the transformation is forced
to 1
reflection
if 'best' (default), the transformation solution may or may not
include a reflection component, depending on which fits the data
best. setting reflection to True or False forces a solution with
reflection or no reflection respectively.
Outputs
------------
d
the residual sum of squared errors, normalized according to a
measure of the scale of X, ((X - X.mean(0))**2).sum()
Z
the matrix of transformed Y-values
tform
a dict specifying the rotation, translation and scaling that
maps X --> Y
"""
n, m = X.shape
ny, my = Y.shape
muX = X.mean(0)
muY = Y.mean(0)
X0 = X - muX
Y0 = Y - muY
ssX = (X0 ** 2.).sum()
ssY = (Y0 ** 2.).sum()
# centred Frobenius norm
normX = np.sqrt(ssX)
normY = np.sqrt(ssY)
# scale to equal (unit) norm
X0 /= normX
Y0 /= normY
if my < m:
Y0 = np.concatenate((Y0, np.zeros(n, m - my)), 0)
# optimum rotation matrix of Y
A = np.dot(X0.T, Y0)
U, s, Vt = np.linalg.svd(A, full_matrices=False)
V = Vt.T
T = np.dot(V, U.T)
if reflection is not 'best':
# does the current solution use a reflection?
have_reflection = np.linalg.det(T) < 0
# if that's not what was specified, force another reflection
if reflection != have_reflection:
V[:, -1] *= -1
s[-1] *= -1
T = np.dot(V, U.T)
traceTA = s.sum()
if scaling:
# optimum scaling of Y
b = traceTA * normX / normY
# standarised distance between X and b*Y*T + c
d = 1 - traceTA ** 2
# transformed coords
Z = normX * traceTA * np.dot(Y0, T) + muX
else:
b = 1
d = 1 + ssY / ssX - 2 * traceTA * normY / normX
Z = normY * np.dot(Y0, T) + muX
# transformation matrix
if my < m:
T = T[:my, :]
c = muX - b * np.dot(muY, T)
# transformation values
tform = {'rotation': T, 'scale': b, 'translation': c}
return d, Z, tform