Point Cloud Library (PCL)  1.11.0
 All Classes Namespaces Functions Variables Typedefs Enumerations Enumerator Friends Modules Pages
hv_go.hpp
1 /*
2  * Software License Agreement (BSD License)
3  *
4  * Point Cloud Library (PCL) - www.pointclouds.org
5  * Copyright (c) 2012 Aitor Aldoma, Federico Tombari
6  *
7  * All rights reserved.
8  *
9  * Redistribution and use in source and binary forms, with or without
10  * modification, are permitted provided that the following conditions
11  * are met:
12  *
13  * * Redistributions of source code must retain the above copyright
14  * notice, this list of conditions and the following disclaimer.
15  * * Redistributions in binary form must reproduce the above
16  * copyright notice, this list of conditions and the following
17  * disclaimer in the documentation and/or other materials provided
18  * with the distribution.
19  * * Neither the name of Willow Garage, Inc. nor the names of its
20  * contributors may be used to endorse or promote products derived
21  * from this software without specific prior written permission.
22  *
23  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
24  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
25  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
26  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
27  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
28  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
29  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
30  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
31  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
32  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
33  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34  * POSSIBILITY OF SUCH DAMAGE.
35  */
36 
37 #ifndef PCL_RECOGNITION_IMPL_HV_GO_HPP_
38 #define PCL_RECOGNITION_IMPL_HV_GO_HPP_
39 
40 #include <pcl/recognition/hv/hv_go.h>
41 #include <pcl/common/time.h>
42 #include <pcl/point_types.h>
43 
44 #include <memory>
45 #include <numeric>
46 
47 template<typename PointT, typename NormalT>
48 inline void extractEuclideanClustersSmooth(const typename pcl::PointCloud<PointT> &cloud, const typename pcl::PointCloud<NormalT> &normals, float tolerance,
49  const typename pcl::search::Search<PointT>::Ptr &tree, std::vector<pcl::PointIndices> &clusters, double eps_angle, float curvature_threshold,
50  unsigned int min_pts_per_cluster, unsigned int max_pts_per_cluster = (std::numeric_limits<int>::max) ())
51 {
52 
53  if (tree->getInputCloud ()->points.size () != cloud.points.size ())
54  {
55  PCL_ERROR("[pcl::extractEuclideanClusters] Tree built for a different point cloud dataset\n");
56  return;
57  }
58  if (cloud.points.size () != normals.points.size ())
59  {
60  PCL_ERROR("[pcl::extractEuclideanClusters] Number of points in the input point cloud different than normals!\n");
61  return;
62  }
63 
64  // Create a bool vector of processed point indices, and initialize it to false
65  std::vector<bool> processed (cloud.points.size (), false);
66 
67  std::vector<int> nn_indices;
68  std::vector<float> nn_distances;
69  // Process all points in the indices vector
70  int size = static_cast<int> (cloud.points.size ());
71  for (int i = 0; i < size; ++i)
72  {
73  if (processed[i])
74  continue;
75 
76  std::vector<unsigned int> seed_queue;
77  int sq_idx = 0;
78  seed_queue.push_back (i);
79 
80  processed[i] = true;
81 
82  while (sq_idx < static_cast<int> (seed_queue.size ()))
83  {
84 
85  if (normals.points[seed_queue[sq_idx]].curvature > curvature_threshold)
86  {
87  sq_idx++;
88  continue;
89  }
90 
91  // Search for sq_idx
92  if (!tree->radiusSearch (seed_queue[sq_idx], tolerance, nn_indices, nn_distances))
93  {
94  sq_idx++;
95  continue;
96  }
97 
98  for (std::size_t j = 1; j < nn_indices.size (); ++j) // nn_indices[0] should be sq_idx
99  {
100  if (processed[nn_indices[j]]) // Has this point been processed before ?
101  continue;
102 
103  if (normals.points[nn_indices[j]].curvature > curvature_threshold)
104  {
105  continue;
106  }
107 
108  //processed[nn_indices[j]] = true;
109  // [-1;1]
110 
111  double dot_p = normals.points[seed_queue[sq_idx]].normal[0] * normals.points[nn_indices[j]].normal[0]
112  + normals.points[seed_queue[sq_idx]].normal[1] * normals.points[nn_indices[j]].normal[1]
113  + normals.points[seed_queue[sq_idx]].normal[2] * normals.points[nn_indices[j]].normal[2];
114 
115  if (std::abs (std::acos (dot_p)) < eps_angle)
116  {
117  processed[nn_indices[j]] = true;
118  seed_queue.push_back (nn_indices[j]);
119  }
120  }
121 
122  sq_idx++;
123  }
124 
125  // If this queue is satisfactory, add to the clusters
126  if (seed_queue.size () >= min_pts_per_cluster && seed_queue.size () <= max_pts_per_cluster)
127  {
129  r.indices.resize (seed_queue.size ());
130  for (std::size_t j = 0; j < seed_queue.size (); ++j)
131  r.indices[j] = seed_queue[j];
132 
133  std::sort (r.indices.begin (), r.indices.end ());
134  r.indices.erase (std::unique (r.indices.begin (), r.indices.end ()), r.indices.end ());
135 
136  r.header = cloud.header;
137  clusters.push_back (r); // We could avoid a copy by working directly in the vector
138  }
139  }
140 }
141 
142 template<typename ModelT, typename SceneT>
143 mets::gol_type pcl::GlobalHypothesesVerification<ModelT, SceneT>::evaluateSolution(const std::vector<bool> & active, int changed)
144 {
145  float sign = 1.f;
146  //update explained_by_RM
147  if (active[changed])
148  {
149  //it has been activated
150  updateExplainedVector (recognition_models_[changed]->explained_, recognition_models_[changed]->explained_distances_, explained_by_RM_,
151  explained_by_RM_distance_weighted, 1.f);
152  updateUnexplainedVector (recognition_models_[changed]->unexplained_in_neighborhood, recognition_models_[changed]->unexplained_in_neighborhood_weights,
153  unexplained_by_RM_neighboorhods, recognition_models_[changed]->explained_, explained_by_RM_, 1.f);
154  updateCMDuplicity(recognition_models_[changed]->complete_cloud_occupancy_indices_, complete_cloud_occupancy_by_RM_, 1.f);
155  } else
156  {
157  //it has been deactivated
158  updateExplainedVector (recognition_models_[changed]->explained_, recognition_models_[changed]->explained_distances_, explained_by_RM_,
159  explained_by_RM_distance_weighted, -1.f);
160  updateUnexplainedVector (recognition_models_[changed]->unexplained_in_neighborhood, recognition_models_[changed]->unexplained_in_neighborhood_weights,
161  unexplained_by_RM_neighboorhods, recognition_models_[changed]->explained_, explained_by_RM_, -1.f);
162  updateCMDuplicity(recognition_models_[changed]->complete_cloud_occupancy_indices_, complete_cloud_occupancy_by_RM_, -1.f);
163  sign = -1.f;
164  }
165 
166  int duplicity = getDuplicity ();
167  float good_info = getExplainedValue ();
168 
169  float unexplained_info = getPreviousUnexplainedValue ();
170  float bad_info = static_cast<float> (getPreviousBadInfo ())
171  + (recognition_models_[changed]->outliers_weight_ * static_cast<float> (recognition_models_[changed]->bad_information_)) * sign;
172 
173  setPreviousBadInfo (bad_info);
174 
175  int n_active_hyp = 0;
176  for(const bool &i : active) {
177  if(i)
178  n_active_hyp++;
179  }
180 
181  float duplicity_cm = static_cast<float> (getDuplicityCM ()) * w_occupied_multiple_cm_;
182  return static_cast<mets::gol_type> ((good_info - bad_info - static_cast<float> (duplicity) - unexplained_info - duplicity_cm - static_cast<float> (n_active_hyp)) * -1.f); //return the dual to our max problem
183 }
184 
185 ///////////////////////////////////////////////////////////////////////////////////////////////////
186 template<typename ModelT, typename SceneT>
188 {
189  //clear stuff
190  recognition_models_.clear ();
191  unexplained_by_RM_neighboorhods.clear ();
192  explained_by_RM_distance_weighted.clear ();
193  explained_by_RM_.clear ();
194  mask_.clear ();
195  indices_.clear (),
196  complete_cloud_occupancy_by_RM_.clear ();
197 
198  // initialize mask to false
199  mask_.resize (complete_models_.size ());
200  for (std::size_t i = 0; i < complete_models_.size (); i++)
201  mask_[i] = false;
202 
203  indices_.resize (complete_models_.size ());
204 
205  NormalEstimator_ n3d;
206  scene_normals_.reset (new pcl::PointCloud<pcl::Normal> ());
207 
209  normals_tree->setInputCloud (scene_cloud_downsampled_);
210 
211  n3d.setRadiusSearch (radius_normals_);
212  n3d.setSearchMethod (normals_tree);
213  n3d.setInputCloud (scene_cloud_downsampled_);
214  n3d.compute (*scene_normals_);
215 
216  //check nans...
217  int j = 0;
218  for (std::size_t i = 0; i < scene_normals_->points.size (); ++i)
219  {
220  if (!std::isfinite (scene_normals_->points[i].normal_x) || !std::isfinite (scene_normals_->points[i].normal_y)
221  || !std::isfinite (scene_normals_->points[i].normal_z))
222  continue;
223 
224  scene_normals_->points[j] = scene_normals_->points[i];
225  scene_cloud_downsampled_->points[j] = scene_cloud_downsampled_->points[i];
226 
227  j++;
228  }
229 
230  scene_normals_->points.resize (j);
231  scene_normals_->width = j;
232  scene_normals_->height = 1;
233 
234  scene_cloud_downsampled_->points.resize (j);
235  scene_cloud_downsampled_->width = j;
236  scene_cloud_downsampled_->height = 1;
237 
238  explained_by_RM_.resize (scene_cloud_downsampled_->points.size (), 0);
239  explained_by_RM_distance_weighted.resize (scene_cloud_downsampled_->points.size (), 0.f);
240  unexplained_by_RM_neighboorhods.resize (scene_cloud_downsampled_->points.size (), 0.f);
241 
242  //compute segmentation of the scene if detect_clutter_
243  if (detect_clutter_)
244  {
245  //initialize kdtree for search
246  scene_downsampled_tree_.reset (new pcl::search::KdTree<SceneT>);
247  scene_downsampled_tree_->setInputCloud (scene_cloud_downsampled_);
248 
249  std::vector<pcl::PointIndices> clusters;
250  double eps_angle_threshold = 0.2;
251  int min_points = 20;
252  float curvature_threshold = 0.045f;
253 
254  extractEuclideanClustersSmooth<SceneT, pcl::Normal> (*scene_cloud_downsampled_, *scene_normals_, inliers_threshold_ * 2.f, scene_downsampled_tree_,
255  clusters, eps_angle_threshold, curvature_threshold, min_points);
256 
257  clusters_cloud_.reset (new pcl::PointCloud<pcl::PointXYZI>);
258  clusters_cloud_->points.resize (scene_cloud_downsampled_->points.size ());
259  clusters_cloud_->width = scene_cloud_downsampled_->width;
260  clusters_cloud_->height = 1;
261 
262  for (std::size_t i = 0; i < scene_cloud_downsampled_->points.size (); i++)
263  {
264  pcl::PointXYZI p;
265  p.getVector3fMap () = scene_cloud_downsampled_->points[i].getVector3fMap ();
266  p.intensity = 0.f;
267  clusters_cloud_->points[i] = p;
268  }
269 
270  float intens_incr = 100.f / static_cast<float> (clusters.size ());
271  float intens = intens_incr;
272  for (const auto &cluster : clusters)
273  {
274  for (const auto &vertex : cluster.indices)
275  {
276  clusters_cloud_->points[vertex].intensity = intens;
277  }
278 
279  intens += intens_incr;
280  }
281  }
282 
283  //compute cues
284  {
285  pcl::ScopeTime tcues ("Computing cues");
286  recognition_models_.resize (complete_models_.size ());
287  int valid = 0;
288  for (int i = 0; i < static_cast<int> (complete_models_.size ()); i++)
289  {
290  //create recognition model
291  recognition_models_[valid].reset (new RecognitionModel ());
292  if(addModel (visible_models_[i], complete_models_[i], recognition_models_[valid])) {
293  indices_[valid] = i;
294  valid++;
295  }
296  }
297 
298  recognition_models_.resize(valid);
299  indices_.resize(valid);
300  }
301 
302  //compute the bounding boxes for the models
303  ModelT min_pt_all, max_pt_all;
304  min_pt_all.x = min_pt_all.y = min_pt_all.z = std::numeric_limits<float>::max ();
305  max_pt_all.x = max_pt_all.y = max_pt_all.z = (std::numeric_limits<float>::max () - 0.001f) * -1;
306 
307  for (std::size_t i = 0; i < recognition_models_.size (); i++)
308  {
309  ModelT min_pt, max_pt;
310  pcl::getMinMax3D (*complete_models_[indices_[i]], min_pt, max_pt);
311  if (min_pt.x < min_pt_all.x)
312  min_pt_all.x = min_pt.x;
313 
314  if (min_pt.y < min_pt_all.y)
315  min_pt_all.y = min_pt.y;
316 
317  if (min_pt.z < min_pt_all.z)
318  min_pt_all.z = min_pt.z;
319 
320  if (max_pt.x > max_pt_all.x)
321  max_pt_all.x = max_pt.x;
322 
323  if (max_pt.y > max_pt_all.y)
324  max_pt_all.y = max_pt.y;
325 
326  if (max_pt.z > max_pt_all.z)
327  max_pt_all.z = max_pt.z;
328  }
329 
330  int size_x, size_y, size_z;
331  size_x = static_cast<int> (std::ceil (std::abs (max_pt_all.x - min_pt_all.x) / res_occupancy_grid_)) + 1;
332  size_y = static_cast<int> (std::ceil (std::abs (max_pt_all.y - min_pt_all.y) / res_occupancy_grid_)) + 1;
333  size_z = static_cast<int> (std::ceil (std::abs (max_pt_all.z - min_pt_all.z) / res_occupancy_grid_)) + 1;
334 
335  complete_cloud_occupancy_by_RM_.resize (size_x * size_y * size_z, 0);
336 
337  for (std::size_t i = 0; i < recognition_models_.size (); i++)
338  {
339 
340  std::map<int, bool> banned;
341  std::map<int, bool>::iterator banned_it;
342 
343  for (std::size_t j = 0; j < complete_models_[indices_[i]]->points.size (); j++)
344  {
345  int pos_x, pos_y, pos_z;
346  pos_x = static_cast<int> (std::floor ((complete_models_[indices_[i]]->points[j].x - min_pt_all.x) / res_occupancy_grid_));
347  pos_y = static_cast<int> (std::floor ((complete_models_[indices_[i]]->points[j].y - min_pt_all.y) / res_occupancy_grid_));
348  pos_z = static_cast<int> (std::floor ((complete_models_[indices_[i]]->points[j].z - min_pt_all.z) / res_occupancy_grid_));
349 
350  int idx = pos_z * size_x * size_y + pos_y * size_x + pos_x;
351  banned_it = banned.find (idx);
352  if (banned_it == banned.end ())
353  {
354  complete_cloud_occupancy_by_RM_[idx]++;
355  recognition_models_[i]->complete_cloud_occupancy_indices_.push_back (idx);
356  banned[idx] = true;
357  }
358  }
359  }
360 
361  {
362  pcl::ScopeTime tcues ("Computing clutter cues");
363 #pragma omp parallel for \
364  default(none) \
365  schedule(dynamic, 4) \
366  num_threads(omp_get_num_procs())
367  for (int j = 0; j < static_cast<int> (recognition_models_.size ()); j++)
368  computeClutterCue (recognition_models_[j]);
369  }
370 
371  cc_.clear ();
372  n_cc_ = 1;
373  cc_.resize (n_cc_);
374  for (std::size_t i = 0; i < recognition_models_.size (); i++)
375  cc_[0].push_back (static_cast<int> (i));
376 
377 }
378 
379 template<typename ModelT, typename SceneT>
380 void pcl::GlobalHypothesesVerification<ModelT, SceneT>::SAOptimize(std::vector<int> & cc_indices, std::vector<bool> & initial_solution)
381 {
382 
383  //temporal copy of recogniton_models_
384  std::vector<RecognitionModelPtr> recognition_models_copy;
385  recognition_models_copy = recognition_models_;
386 
387  recognition_models_.clear ();
388 
389  for (const int &cc_index : cc_indices)
390  {
391  recognition_models_.push_back (recognition_models_copy[cc_index]);
392  }
393 
394  for (std::size_t j = 0; j < recognition_models_.size (); j++)
395  {
396  RecognitionModelPtr recog_model = recognition_models_[j];
397  for (std::size_t i = 0; i < recog_model->explained_.size (); i++)
398  {
399  explained_by_RM_[recog_model->explained_[i]]++;
400  explained_by_RM_distance_weighted[recog_model->explained_[i]] += recog_model->explained_distances_[i];
401  }
402 
403  if (detect_clutter_)
404  {
405  for (std::size_t i = 0; i < recog_model->unexplained_in_neighborhood.size (); i++)
406  {
407  unexplained_by_RM_neighboorhods[recog_model->unexplained_in_neighborhood[i]] += recog_model->unexplained_in_neighborhood_weights[i];
408  }
409  }
410  }
411 
412  int occupied_multiple = 0;
413  for(std::size_t i=0; i < complete_cloud_occupancy_by_RM_.size(); i++) {
414  if(complete_cloud_occupancy_by_RM_[i] > 1) {
415  occupied_multiple+=complete_cloud_occupancy_by_RM_[i];
416  }
417  }
418 
419  setPreviousDuplicityCM(occupied_multiple);
420  //do optimization
421  //Define model SAModel, initial solution is all models activated
422 
423  int duplicity;
424  float good_information_ = getTotalExplainedInformation (explained_by_RM_, explained_by_RM_distance_weighted, &duplicity);
425  float bad_information_ = 0;
426  float unexplained_in_neighboorhod = getUnexplainedInformationInNeighborhood (unexplained_by_RM_neighboorhods, explained_by_RM_);
427 
428  for (std::size_t i = 0; i < initial_solution.size (); i++)
429  {
430  if (initial_solution[i])
431  bad_information_ += recognition_models_[i]->outliers_weight_ * static_cast<float> (recognition_models_[i]->bad_information_);
432  }
433 
434  setPreviousExplainedValue (good_information_);
435  setPreviousDuplicity (duplicity);
436  setPreviousBadInfo (bad_information_);
437  setPreviousUnexplainedValue (unexplained_in_neighboorhod);
438 
439  SAModel model;
440  model.cost_ = static_cast<mets::gol_type> ((good_information_ - bad_information_
441  - static_cast<float> (duplicity)
442  - static_cast<float> (occupied_multiple) * w_occupied_multiple_cm_
443  - static_cast<float> (recognition_models_.size ())
444  - unexplained_in_neighboorhod) * -1.f);
445 
446  model.setSolution (initial_solution);
447  model.setOptimizer (this);
448  SAModel best (model);
449 
450  move_manager neigh (static_cast<int> (cc_indices.size ()));
451 
452  mets::best_ever_solution best_recorder (best);
453  mets::noimprove_termination_criteria noimprove (max_iterations_);
454  mets::linear_cooling linear_cooling;
455  mets::simulated_annealing<move_manager> sa (model, best_recorder, neigh, noimprove, linear_cooling, initial_temp_, 1e-7, 2);
456  sa.setApplyAndEvaluate(true);
457 
458  {
459  pcl::ScopeTime t ("SA search...");
460  sa.search ();
461  }
462 
463  best_seen_ = static_cast<const SAModel&> (best_recorder.best_seen ());
464  for (std::size_t i = 0; i < best_seen_.solution_.size (); i++)
465  {
466  initial_solution[i] = best_seen_.solution_[i];
467  }
468 
469  recognition_models_ = recognition_models_copy;
470 
471 }
472 
473 ///////////////////////////////////////////////////////////////////////////////////////////////////
474 template<typename ModelT, typename SceneT>
476 {
477  initialize ();
478 
479  //for each connected component, find the optimal solution
480  for (int c = 0; c < n_cc_; c++)
481  {
482  //TODO: Check for trivial case...
483  //TODO: Check also the number of hypotheses and use exhaustive enumeration if smaller than 10
484  std::vector<bool> subsolution (cc_[c].size (), true);
485  SAOptimize (cc_[c], subsolution);
486  for (std::size_t i = 0; i < subsolution.size (); i++)
487  {
488  mask_[indices_[cc_[c][i]]] = (subsolution[i]);
489  }
490  }
491 }
492 
493 template<typename ModelT, typename SceneT>
495  typename pcl::PointCloud<ModelT>::ConstPtr & complete_model, RecognitionModelPtr & recog_model)
496 {
497  //voxelize model cloud
498  recog_model->cloud_.reset (new pcl::PointCloud<ModelT> ());
499  recog_model->complete_cloud_.reset (new pcl::PointCloud<ModelT> ());
500 
501  float size_model = resolution_;
502  pcl::VoxelGrid<ModelT> voxel_grid;
503  voxel_grid.setInputCloud (model);
504  voxel_grid.setLeafSize (size_model, size_model, size_model);
505  voxel_grid.filter (*(recog_model->cloud_));
506 
507  pcl::VoxelGrid<ModelT> voxel_grid2;
508  voxel_grid2.setInputCloud (complete_model);
509  voxel_grid2.setLeafSize (size_model, size_model, size_model);
510  voxel_grid2.filter (*(recog_model->complete_cloud_));
511 
512  {
513  //check nans...
514  int j = 0;
515  for (std::size_t i = 0; i < recog_model->cloud_->points.size (); ++i)
516  {
517  if (!std::isfinite (recog_model->cloud_->points[i].x) || !std::isfinite (recog_model->cloud_->points[i].y)
518  || !std::isfinite (recog_model->cloud_->points[i].z))
519  continue;
520 
521  recog_model->cloud_->points[j] = recog_model->cloud_->points[i];
522  j++;
523  }
524 
525  recog_model->cloud_->points.resize (j);
526  recog_model->cloud_->width = j;
527  recog_model->cloud_->height = 1;
528  }
529 
530  if (recog_model->cloud_->points.empty ())
531  {
532  PCL_WARN("The model cloud has no points..\n");
533  return false;
534  }
535 
536  //compute normals unless given (now do it always...)
539  recog_model->normals_.reset (new pcl::PointCloud<pcl::Normal> ());
540  normals_tree->setInputCloud (recog_model->cloud_);
541  n3d.setRadiusSearch (radius_normals_);
542  n3d.setSearchMethod (normals_tree);
543  n3d.setInputCloud ((recog_model->cloud_));
544  n3d.compute (*(recog_model->normals_));
545 
546  //check nans...
547  int j = 0;
548  for (std::size_t i = 0; i < recog_model->normals_->points.size (); ++i)
549  {
550  if (!std::isfinite (recog_model->normals_->points[i].normal_x) || !std::isfinite (recog_model->normals_->points[i].normal_y)
551  || !std::isfinite (recog_model->normals_->points[i].normal_z))
552  continue;
553 
554  recog_model->normals_->points[j] = recog_model->normals_->points[i];
555  recog_model->cloud_->points[j] = recog_model->cloud_->points[i];
556  j++;
557  }
558 
559  recog_model->normals_->points.resize (j);
560  recog_model->normals_->width = j;
561  recog_model->normals_->height = 1;
562 
563  recog_model->cloud_->points.resize (j);
564  recog_model->cloud_->width = j;
565  recog_model->cloud_->height = 1;
566 
567  std::vector<int> explained_indices;
568  std::vector<float> outliers_weight;
569  std::vector<float> explained_indices_distances;
570 
571  std::vector<int> nn_indices;
572  std::vector<float> nn_distances;
573 
574  std::map<int, std::shared_ptr<std::vector<std::pair<int, float>>>> model_explains_scene_points; //which point i from the scene is explained by a points j_k with dist d_k from the model
575 
576  outliers_weight.resize (recog_model->cloud_->points.size ());
577  recog_model->outlier_indices_.resize (recog_model->cloud_->points.size ());
578 
579  std::size_t o = 0;
580  for (std::size_t i = 0; i < recog_model->cloud_->points.size (); i++)
581  {
582  if (!scene_downsampled_tree_->radiusSearch (recog_model->cloud_->points[i], inliers_threshold_, nn_indices, nn_distances, std::numeric_limits<int>::max ()))
583  {
584  //outlier
585  outliers_weight[o] = regularizer_;
586  recog_model->outlier_indices_[o] = static_cast<int> (i);
587  o++;
588  } else
589  {
590  for (std::size_t k = 0; k < nn_distances.size (); k++)
591  {
592  std::pair<int, float> pair = std::make_pair (i, nn_distances[k]); //i is a index to a model point and then distance
593  auto it = model_explains_scene_points.find (nn_indices[k]);
594  if (it == model_explains_scene_points.end ())
595  {
596  std::shared_ptr<std::vector<std::pair<int, float>>> vec (new std::vector<std::pair<int, float>> ());
597  vec->push_back (pair);
598  model_explains_scene_points[nn_indices[k]] = vec;
599  } else
600  {
601  it->second->push_back (pair);
602  }
603  }
604  }
605  }
606 
607  outliers_weight.resize (o);
608  recog_model->outlier_indices_.resize (o);
609 
610  recog_model->outliers_weight_ = (std::accumulate (outliers_weight.begin (), outliers_weight.end (), 0.f) / static_cast<float> (outliers_weight.size ()));
611  if (outliers_weight.empty ())
612  recog_model->outliers_weight_ = 1.f;
613 
614  pcl::IndicesPtr indices_scene (new std::vector<int>);
615  //go through the map and keep the closest model point in case that several model points explain a scene point
616 
617  int p = 0;
618 
619  for (auto it = model_explains_scene_points.cbegin (); it != model_explains_scene_points.cend (); it++, p++)
620  {
621  std::size_t closest = 0;
622  float min_d = std::numeric_limits<float>::min ();
623  for (std::size_t i = 0; i < it->second->size (); i++)
624  {
625  if (it->second->at (i).second > min_d)
626  {
627  min_d = it->second->at (i).second;
628  closest = i;
629  }
630  }
631 
632  float d = it->second->at (closest).second;
633  float d_weight = -(d * d / (inliers_threshold_)) + 1;
634 
635  //it->first is index to scene point
636  //using normals to weight inliers
637  Eigen::Vector3f scene_p_normal = scene_normals_->points[it->first].getNormalVector3fMap ();
638  Eigen::Vector3f model_p_normal = recog_model->normals_->points[it->second->at (closest).first].getNormalVector3fMap ();
639  float dotp = scene_p_normal.dot (model_p_normal) * 1.f; //[-1,1] from antiparallel trough perpendicular to parallel
640 
641  if (dotp < 0.f)
642  dotp = 0.f;
643 
644  explained_indices.push_back (it->first);
645  explained_indices_distances.push_back (d_weight * dotp);
646 
647  }
648 
649  recog_model->bad_information_ = static_cast<int> (recog_model->outlier_indices_.size ());
650  recog_model->explained_ = explained_indices;
651  recog_model->explained_distances_ = explained_indices_distances;
652 
653  return true;
654 }
655 
656 template<typename ModelT, typename SceneT>
657 void pcl::GlobalHypothesesVerification<ModelT, SceneT>::computeClutterCue(RecognitionModelPtr & recog_model)
658 {
659  if (detect_clutter_)
660  {
661 
662  float rn_sqr = radius_neighborhood_GO_ * radius_neighborhood_GO_;
663  std::vector<int> nn_indices;
664  std::vector<float> nn_distances;
665 
666  std::vector < std::pair<int, int> > neighborhood_indices; //first is indices to scene point and second is indices to explained_ scene points
667  for (int i = 0; i < static_cast<int> (recog_model->explained_.size ()); i++)
668  {
669  if (scene_downsampled_tree_->radiusSearch (scene_cloud_downsampled_->points[recog_model->explained_[i]], radius_neighborhood_GO_, nn_indices,
670  nn_distances, std::numeric_limits<int>::max ()))
671  {
672  for (std::size_t k = 0; k < nn_distances.size (); k++)
673  {
674  if (nn_indices[k] != i)
675  neighborhood_indices.emplace_back (nn_indices[k], i);
676  }
677  }
678  }
679 
680  //sort neighborhood indices by id
681  std::sort (neighborhood_indices.begin (), neighborhood_indices.end (),
682  [] (const auto& p1, const auto& p2) { return p1.first < p2.first; });
683 
684  //erase duplicated unexplained points
685  neighborhood_indices.erase (
686  std::unique (neighborhood_indices.begin (), neighborhood_indices.end (),
687  [] (const auto& p1, const auto& p2) { return p1.first == p2.first; }), neighborhood_indices.end ());
688 
689  //sort explained points
690  std::vector<int> exp_idces (recog_model->explained_);
691  std::sort (exp_idces.begin (), exp_idces.end ());
692 
693  recog_model->unexplained_in_neighborhood.resize (neighborhood_indices.size ());
694  recog_model->unexplained_in_neighborhood_weights.resize (neighborhood_indices.size ());
695 
696  std::size_t p = 0;
697  std::size_t j = 0;
698  for (const auto &neighborhood_index : neighborhood_indices)
699  {
700  if ((j < exp_idces.size ()) && (neighborhood_index.first == exp_idces[j]))
701  {
702  //this index is explained by the hypothesis so ignore it, advance j
703  j++;
704  } else
705  {
706  //indices_in_nb[i] < exp_idces[j]
707  //recog_model->unexplained_in_neighborhood.push_back(neighborhood_indices[i]);
708  recog_model->unexplained_in_neighborhood[p] = neighborhood_index.first;
709 
710  if (clusters_cloud_->points[recog_model->explained_[neighborhood_index.second]].intensity != 0.f
711  && (clusters_cloud_->points[recog_model->explained_[neighborhood_index.second]].intensity
712  == clusters_cloud_->points[neighborhood_index.first].intensity))
713  {
714 
715  recog_model->unexplained_in_neighborhood_weights[p] = clutter_regularizer_;
716 
717  } else
718  {
719  //neighborhood_indices[i].first gives the index to the scene point and second to the explained scene point by the model causing this...
720  //calculate weight of this clutter point based on the distance of the scene point and the model point causing it
721  float d = static_cast<float> (pow (
722  (scene_cloud_downsampled_->points[recog_model->explained_[neighborhood_index.second]].getVector3fMap ()
723  - scene_cloud_downsampled_->points[neighborhood_index.first].getVector3fMap ()).norm (), 2));
724  float d_weight = -(d / rn_sqr) + 1; //points that are close have a strong weight*/
725 
726  //using normals to weight clutter points
727  Eigen::Vector3f scene_p_normal = scene_normals_->points[neighborhood_index.first].getNormalVector3fMap ();
728  Eigen::Vector3f model_p_normal = scene_normals_->points[recog_model->explained_[neighborhood_index.second]].getNormalVector3fMap ();
729  float dotp = scene_p_normal.dot (model_p_normal); //[-1,1] from antiparallel trough perpendicular to parallel
730 
731  if (dotp < 0)
732  dotp = 0.f;
733 
734  recog_model->unexplained_in_neighborhood_weights[p] = d_weight * dotp;
735  }
736  p++;
737  }
738  }
739 
740  recog_model->unexplained_in_neighborhood_weights.resize (p);
741  recog_model->unexplained_in_neighborhood.resize (p);
742  }
743 }
744 
745 #define PCL_INSTANTIATE_GoHV(T1,T2) template class PCL_EXPORTS pcl::GlobalHypothesesVerification<T1,T2>;
746 
747 #endif /* PCL_RECOGNITION_IMPL_HV_GO_HPP_ */
748 
shared_ptr< KdTree< PointT, Tree > > Ptr
Definition: kdtree.h:75
shared_ptr< Indices > IndicesPtr
Definition: pcl_base.h:61
Class to measure the time spent in a scope.
Definition: time.h:107
virtual int radiusSearch(const PointT &point, double radius, Indices &k_indices, std::vector< float > &k_sqr_distances, unsigned int max_nn=0) const =0
Search for all the nearest neighbors of the query point in a given radius.
void setRadiusSearch(double radius)
Set the sphere radius that is to be used for determining the nearest neighbors used for the feature e...
Definition: feature.h:201
void filter(PointCloud &output)
Calls the filtering method and returns the filtered dataset in output.
Definition: filter.h:124
VoxelGrid assembles a local 3D grid over a given PointCloud, and downsamples + filters the data...
Definition: voxel_grid.h:177
NormalEstimation estimates local surface properties (surface normals and curvatures)at each 3D point...
Definition: normal_3d.h:243
std::vector< PointT, Eigen::aligned_allocator< PointT > > points
The point data.
Definition: point_cloud.h:410
void getMinMax3D(const pcl::PointCloud< PointT > &cloud, PointT &min_pt, PointT &max_pt)
Get the minimum and maximum values on each of the 3 (x-y-z) dimensions in a given pointcloud...
Definition: common.hpp:242
::pcl::PCLHeader header
Definition: PointIndices.h:21
virtual PointCloudConstPtr getInputCloud() const
Get a pointer to the input point cloud dataset.
Definition: search.h:125
PointCloud represents the base class in PCL for storing collections of 3D points. ...
void setSearchMethod(const KdTreePtr &tree)
Provide a pointer to the search object.
Definition: feature.h:167
shared_ptr< const PointCloud< PointT > > ConstPtr
Definition: point_cloud.h:429
virtual void setInputCloud(const PointCloudConstPtr &cloud)
Provide a pointer to the input dataset.
Definition: pcl_base.hpp:65
shared_ptr< pcl::search::Search< PointT > > Ptr
Definition: search.h:81
void compute(PointCloudOut &output)
Base method for feature estimation for all points given in using th...
Definition: feature.hpp:193
A hypothesis verification method proposed in "A Global Hypotheses Verification Method for 3D Object R...
Definition: hv_go.h:36
void setInputCloud(const PointCloudConstPtr &cloud) override
Provide a pointer to the input dataset.
Definition: normal_3d.h:332
void setLeafSize(const Eigen::Vector4f &leaf_size)
Set the voxel grid leaf size.
Definition: voxel_grid.h:222
pcl::PCLHeader header
The point cloud header.
Definition: point_cloud.h:407