Refine registration
Input arguments
This script runs with python run_system.py [config] --refine
. In [config]
,
["path_dataset"]
should have subfolders fragments
which stores fragments
in .ply
files and a pose graph in a .json
file.
The main function runs local_refinement
and optimize_posegraph_for_scene
.
The first function performs pairwise registration on the pairs detected by
Register fragments. The second function performs
multiway registration.
Fine-grained registration
61# examples/python/reconstruction_system/refine_registration.py
62def multiscale_icp(source,
63 target,
64 voxel_size,
65 max_iter,
66 config,
67 init_transformation=np.identity(4)):
68 current_transformation = init_transformation
69 for i, scale in enumerate(range(len(max_iter))): # multi-scale approach
70 iter = max_iter[scale]
71 distance_threshold = config["voxel_size"] * 1.4
72 print("voxel_size {}".format(voxel_size[scale]))
73 source_down = source.voxel_down_sample(voxel_size[scale])
74 target_down = target.voxel_down_sample(voxel_size[scale])
75 if config["icp_method"] == "point_to_point":
76 result_icp = o3d.pipelines.registration.registration_icp(
77 source_down, target_down, distance_threshold,
78 current_transformation,
79 o3d.pipelines.registration.TransformationEstimationPointToPoint(
80 ),
81 o3d.pipelines.registration.ICPConvergenceCriteria(
82 max_iteration=iter))
83 else:
84 source_down.estimate_normals(
85 o3d.geometry.KDTreeSearchParamHybrid(radius=voxel_size[scale] *
86 2.0,
87 max_nn=30))
88 target_down.estimate_normals(
89 o3d.geometry.KDTreeSearchParamHybrid(radius=voxel_size[scale] *
90 2.0,
91 max_nn=30))
92 if config["icp_method"] == "point_to_plane":
93 result_icp = o3d.pipelines.registration.registration_icp(
94 source_down, target_down, distance_threshold,
95 current_transformation,
96 o3d.pipelines.registration.
97 TransformationEstimationPointToPlane(),
98 o3d.pipelines.registration.ICPConvergenceCriteria(
99 max_iteration=iter))
100 if config["icp_method"] == "color":
101 result_icp = o3d.pipelines.registration.registration_colored_icp(
102 source_down, target_down, distance_threshold,
103 current_transformation,
104 o3d.pipelines.registration.
105 TransformationEstimationForColoredICP(),
106 o3d.pipelines.registration.ICPConvergenceCriteria(
107 relative_fitness=1e-6,
108 relative_rmse=1e-6,
109 max_iteration=iter))
110 if config["icp_method"] == "generalized":
111 result_icp = o3d.pipelines.registration.registration_generalized_icp(
112 source_down, target_down, distance_threshold,
113 current_transformation,
114 o3d.pipelines.registration.
115 TransformationEstimationForGeneralizedICP(),
116 o3d.pipelines.registration.ICPConvergenceCriteria(
117 relative_fitness=1e-6,
118 relative_rmse=1e-6,
119 max_iteration=iter))
120 current_transformation = result_icp.transformation
121 if i == len(max_iter) - 1:
122 information_matrix = o3d.pipelines.registration.get_information_matrix_from_point_clouds(
123 source_down, target_down, voxel_size[scale] * 1.4,
124 result_icp.transformation)
125
126 return (result_icp.transformation, information_matrix)
Two options are given for the fine-grained registration. The color
option is
recommended since it uses color information to prevent drift. See [Park2017]
for details.
Multiway registration
38# examples/python/reconstruction_system/refine_registration.py
39def update_posegraph_for_scene(s, t, transformation, information, odometry,
40 pose_graph):
41 if t == s + 1: # odometry case
42 odometry = np.dot(transformation, odometry)
43 odometry_inv = np.linalg.inv(odometry)
44 pose_graph.nodes.append(
45 o3d.pipelines.registration.PoseGraphNode(odometry_inv))
46 pose_graph.edges.append(
47 o3d.pipelines.registration.PoseGraphEdge(s,
48 t,
49 transformation,
50 information,
51 uncertain=False))
52 else: # loop closure case
53 pose_graph.edges.append(
54 o3d.pipelines.registration.PoseGraphEdge(s,
55 t,
56 transformation,
57 information,
58 uncertain=True))
59 return (odometry, pose_graph)
This script uses the technique demonstrated in /tutorial/pipelines/multiway_registration.ipynb. Function update_posegraph_for_scene
builds a pose graph for multiway registration of all fragments. Each graph node represents a fragment and its pose which transforms the geometry to the global space.
Once a pose graph is built, function optimize_posegraph_for_scene
is called
for multiway registration.
66# examples/python/reconstruction_system/optimize_posegraph.py
67def optimize_posegraph_for_scene(path_dataset, config):
68 pose_graph_name = join(path_dataset, config["template_global_posegraph"])
69 pose_graph_optimized_name = join(
70 path_dataset, config["template_global_posegraph_optimized"])
71 run_posegraph_optimization(pose_graph_name, pose_graph_optimized_name,
72 max_correspondence_distance = config["voxel_size"] * 1.4,
73 preference_loop_closure = \
74 config["preference_loop_closure_registration"])
Main registration loop
- The function
make_posegraph_for_refined_scene
below calls all the functions introduced above.
170# examples/python/reconstruction_system/refine_registration.py
171def make_posegraph_for_refined_scene(ply_file_names, config):
172 pose_graph = o3d.io.read_pose_graph(
173 join(config["path_dataset"],
174 config["template_global_posegraph_optimized"]))
175
176 n_files = len(ply_file_names)
177 matching_results = {}
178 for edge in pose_graph.edges:
179 s = edge.source_node_id
180 t = edge.target_node_id
181
182 transformation_init = edge.transformation
183 matching_results[s * n_files + t] = \
184 matching_result(s, t, transformation_init)
185
186 if config["python_multi_threading"] == True:
187 from joblib import Parallel, delayed
188 import multiprocessing
189 import subprocess
190 MAX_THREAD = min(multiprocessing.cpu_count(),
191 max(len(pose_graph.edges), 1))
192 results = Parallel(n_jobs=MAX_THREAD)(
193 delayed(register_point_cloud_pair)(
194 ply_file_names, matching_results[r].s, matching_results[r].t,
195 matching_results[r].transformation, config)
196 for r in matching_results)
197 for i, r in enumerate(matching_results):
198 matching_results[r].transformation = results[i][0]
199 matching_results[r].information = results[i][1]
200 else:
201 for r in matching_results:
202 (matching_results[r].transformation,
203 matching_results[r].information) = \
204 register_point_cloud_pair(ply_file_names,
205 matching_results[r].s, matching_results[r].t,
206 matching_results[r].transformation, config)
207
208 pose_graph_new = o3d.pipelines.registration.PoseGraph()
209 odometry = np.identity(4)
210 pose_graph_new.nodes.append(
211 o3d.pipelines.registration.PoseGraphNode(odometry))
212 for r in matching_results:
213 (odometry, pose_graph_new) = update_posegraph_for_scene(
214 matching_results[r].s, matching_results[r].t,
215 matching_results[r].transformation, matching_results[r].information,
216 odometry, pose_graph_new)
217 print(pose_graph_new)
The main workflow is: pairwise local refinement -> multiway registration.
Results
The pose graph optimization outputs the following messages:
[GlobalOptimizationLM] Optimizing PoseGraph having 14 nodes and 35 edges.
Line process weight : 789.730200
[Initial ] residual : 1.208286e+04, lambda : 1.706306e+01
[Iteration 00] residual : 2.410383e+03, valid edges : 22, time : 0.000 sec.
[Iteration 01] residual : 8.127856e+01, valid edges : 22, time : 0.000 sec.
[Iteration 02] residual : 8.031355e+01, valid edges : 22, time : 0.000 sec.
Delta.norm() < 1.000000e-06 * (x.norm() + 1.000000e-06)
[GlobalOptimizationLM] total time : 0.001 sec.
[GlobalOptimizationLM] Optimizing PoseGraph having 14 nodes and 35 edges.
Line process weight : 789.730200
[Initial ] residual : 8.031355e+01, lambda : 1.716826e+01
Delta.norm() < 1.000000e-06 * (x.norm() + 1.000000e-06)
[GlobalOptimizationLM] total time : 0.000 sec.
CompensateReferencePoseGraphNode : reference : 0
There are 14 fragments and 52 valid matching pairs between fragments. After 23 iterations, 11 edges are detected to be false positives. After they are pruned, pose graph optimization runs again to achieve tight alignment.