Point Cloud Library (PCL)  1.11.1-dev
hv_go.hpp
1 /*
2  * Software License Agreement (BSD License)
3  *
4  * Point Cloud Library (PCL) - www.pointclouds.org
5  * Copyright (c) 2012 Aitor Aldoma, Federico Tombari
6  *
7  * All rights reserved.
8  *
9  * Redistribution and use in source and binary forms, with or without
10  * modification, are permitted provided that the following conditions
11  * are met:
12  *
13  * * Redistributions of source code must retain the above copyright
14  * notice, this list of conditions and the following disclaimer.
15  * * Redistributions in binary form must reproduce the above
16  * copyright notice, this list of conditions and the following
17  * disclaimer in the documentation and/or other materials provided
18  * with the distribution.
19  * * Neither the name of Willow Garage, Inc. nor the names of its
20  * contributors may be used to endorse or promote products derived
21  * from this software without specific prior written permission.
22  *
23  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
24  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
25  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
26  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
27  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
28  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
29  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
30  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
31  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
32  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
33  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34  * POSSIBILITY OF SUCH DAMAGE.
35  */
36 
37 #ifndef PCL_RECOGNITION_IMPL_HV_GO_HPP_
38 #define PCL_RECOGNITION_IMPL_HV_GO_HPP_
39 
40 #include <pcl/recognition/hv/hv_go.h>
41 #include <pcl/common/common.h> // for getMinMax3D
42 #include <pcl/common/time.h>
43 #include <pcl/point_types.h>
44 
45 #include <memory>
46 #include <numeric>
47 
48 template<typename PointT, typename NormalT>
49 inline void extractEuclideanClustersSmooth(const typename pcl::PointCloud<PointT> &cloud, const typename pcl::PointCloud<NormalT> &normals, float tolerance,
50  const typename pcl::search::Search<PointT>::Ptr &tree, std::vector<pcl::PointIndices> &clusters, double eps_angle, float curvature_threshold,
51  unsigned int min_pts_per_cluster, unsigned int max_pts_per_cluster = (std::numeric_limits<int>::max) ())
52 {
53 
54  if (tree->getInputCloud ()->size () != cloud.size ())
55  {
56  PCL_ERROR("[pcl::extractEuclideanClusters] Tree built for a different point cloud dataset\n");
57  return;
58  }
59  if (cloud.size () != normals.size ())
60  {
61  PCL_ERROR("[pcl::extractEuclideanClusters] Number of points in the input point cloud different than normals!\n");
62  return;
63  }
64 
65  // Create a bool vector of processed point indices, and initialize it to false
66  std::vector<bool> processed (cloud.size (), false);
67 
68  std::vector<int> nn_indices;
69  std::vector<float> nn_distances;
70  // Process all points in the indices vector
71  int size = static_cast<int> (cloud.size ());
72  for (int i = 0; i < size; ++i)
73  {
74  if (processed[i])
75  continue;
76 
77  std::vector<unsigned int> seed_queue;
78  int sq_idx = 0;
79  seed_queue.push_back (i);
80 
81  processed[i] = true;
82 
83  while (sq_idx < static_cast<int> (seed_queue.size ()))
84  {
85 
86  if (normals[seed_queue[sq_idx]].curvature > curvature_threshold)
87  {
88  sq_idx++;
89  continue;
90  }
91 
92  // Search for sq_idx
93  if (!tree->radiusSearch (seed_queue[sq_idx], tolerance, nn_indices, nn_distances))
94  {
95  sq_idx++;
96  continue;
97  }
98 
99  for (std::size_t j = 1; j < nn_indices.size (); ++j) // nn_indices[0] should be sq_idx
100  {
101  if (processed[nn_indices[j]]) // Has this point been processed before ?
102  continue;
103 
104  if (normals[nn_indices[j]].curvature > curvature_threshold)
105  {
106  continue;
107  }
108 
109  //processed[nn_indices[j]] = true;
110  // [-1;1]
111 
112  double dot_p = normals[seed_queue[sq_idx]].normal[0] * normals[nn_indices[j]].normal[0]
113  + normals[seed_queue[sq_idx]].normal[1] * normals[nn_indices[j]].normal[1]
114  + normals[seed_queue[sq_idx]].normal[2] * normals[nn_indices[j]].normal[2];
115 
116  if (std::abs (std::acos (dot_p)) < eps_angle)
117  {
118  processed[nn_indices[j]] = true;
119  seed_queue.push_back (nn_indices[j]);
120  }
121  }
122 
123  sq_idx++;
124  }
125 
126  // If this queue is satisfactory, add to the clusters
127  if (seed_queue.size () >= min_pts_per_cluster && seed_queue.size () <= max_pts_per_cluster)
128  {
130  r.indices.resize (seed_queue.size ());
131  for (std::size_t j = 0; j < seed_queue.size (); ++j)
132  r.indices[j] = seed_queue[j];
133 
134  std::sort (r.indices.begin (), r.indices.end ());
135  r.indices.erase (std::unique (r.indices.begin (), r.indices.end ()), r.indices.end ());
136 
137  r.header = cloud.header;
138  clusters.push_back (r); // We could avoid a copy by working directly in the vector
139  }
140  }
141 }
142 
143 template<typename ModelT, typename SceneT>
144 mets::gol_type pcl::GlobalHypothesesVerification<ModelT, SceneT>::evaluateSolution(const std::vector<bool> & active, int changed)
145 {
146  float sign = 1.f;
147  //update explained_by_RM
148  if (active[changed])
149  {
150  //it has been activated
151  updateExplainedVector (recognition_models_[changed]->explained_, recognition_models_[changed]->explained_distances_, explained_by_RM_,
152  explained_by_RM_distance_weighted, 1.f);
153  updateUnexplainedVector (recognition_models_[changed]->unexplained_in_neighborhood, recognition_models_[changed]->unexplained_in_neighborhood_weights,
154  unexplained_by_RM_neighboorhods, recognition_models_[changed]->explained_, explained_by_RM_, 1.f);
155  updateCMDuplicity(recognition_models_[changed]->complete_cloud_occupancy_indices_, complete_cloud_occupancy_by_RM_, 1.f);
156  } else
157  {
158  //it has been deactivated
159  updateExplainedVector (recognition_models_[changed]->explained_, recognition_models_[changed]->explained_distances_, explained_by_RM_,
160  explained_by_RM_distance_weighted, -1.f);
161  updateUnexplainedVector (recognition_models_[changed]->unexplained_in_neighborhood, recognition_models_[changed]->unexplained_in_neighborhood_weights,
162  unexplained_by_RM_neighboorhods, recognition_models_[changed]->explained_, explained_by_RM_, -1.f);
163  updateCMDuplicity(recognition_models_[changed]->complete_cloud_occupancy_indices_, complete_cloud_occupancy_by_RM_, -1.f);
164  sign = -1.f;
165  }
166 
167  int duplicity = getDuplicity ();
168  float good_info = getExplainedValue ();
169 
170  float unexplained_info = getPreviousUnexplainedValue ();
171  float bad_info = static_cast<float> (getPreviousBadInfo ())
172  + (recognition_models_[changed]->outliers_weight_ * static_cast<float> (recognition_models_[changed]->bad_information_)) * sign;
173 
174  setPreviousBadInfo (bad_info);
175 
176  int n_active_hyp = 0;
177  for(const bool i : active) {
178  if(i)
179  n_active_hyp++;
180  }
181 
182  float duplicity_cm = static_cast<float> (getDuplicityCM ()) * w_occupied_multiple_cm_;
183  return static_cast<mets::gol_type> ((good_info - bad_info - static_cast<float> (duplicity) - unexplained_info - duplicity_cm - static_cast<float> (n_active_hyp)) * -1.f); //return the dual to our max problem
184 }
185 
186 ///////////////////////////////////////////////////////////////////////////////////////////////////
187 template<typename ModelT, typename SceneT>
189 {
190  //clear stuff
191  recognition_models_.clear ();
192  unexplained_by_RM_neighboorhods.clear ();
193  explained_by_RM_distance_weighted.clear ();
194  explained_by_RM_.clear ();
195  mask_.clear ();
196  indices_.clear (),
197  complete_cloud_occupancy_by_RM_.clear ();
198 
199  // initialize mask to false
200  mask_.resize (complete_models_.size ());
201  for (std::size_t i = 0; i < complete_models_.size (); i++)
202  mask_[i] = false;
203 
204  indices_.resize (complete_models_.size ());
205 
206  NormalEstimator_ n3d;
207  scene_normals_.reset (new pcl::PointCloud<pcl::Normal> ());
208 
210  normals_tree->setInputCloud (scene_cloud_downsampled_);
211 
212  n3d.setRadiusSearch (radius_normals_);
213  n3d.setSearchMethod (normals_tree);
214  n3d.setInputCloud (scene_cloud_downsampled_);
215  n3d.compute (*scene_normals_);
216 
217  //check nans...
218  int j = 0;
219  for (std::size_t i = 0; i < scene_normals_->size (); ++i)
220  {
221  if (!std::isfinite ((*scene_normals_)[i].normal_x) || !std::isfinite ((*scene_normals_)[i].normal_y)
222  || !std::isfinite ((*scene_normals_)[i].normal_z))
223  continue;
224 
225  (*scene_normals_)[j] = (*scene_normals_)[i];
226  (*scene_cloud_downsampled_)[j] = (*scene_cloud_downsampled_)[i];
227 
228  j++;
229  }
230 
231  scene_normals_->points.resize (j);
232  scene_normals_->width = j;
233  scene_normals_->height = 1;
234 
235  scene_cloud_downsampled_->points.resize (j);
236  scene_cloud_downsampled_->width = j;
237  scene_cloud_downsampled_->height = 1;
238 
239  explained_by_RM_.resize (scene_cloud_downsampled_->size (), 0);
240  explained_by_RM_distance_weighted.resize (scene_cloud_downsampled_->size (), 0.f);
241  unexplained_by_RM_neighboorhods.resize (scene_cloud_downsampled_->size (), 0.f);
242 
243  //compute segmentation of the scene if detect_clutter_
244  if (detect_clutter_)
245  {
246  //initialize kdtree for search
247  scene_downsampled_tree_.reset (new pcl::search::KdTree<SceneT>);
248  scene_downsampled_tree_->setInputCloud (scene_cloud_downsampled_);
249 
250  std::vector<pcl::PointIndices> clusters;
251  double eps_angle_threshold = 0.2;
252  int min_points = 20;
253  float curvature_threshold = 0.045f;
254 
255  extractEuclideanClustersSmooth<SceneT, pcl::Normal> (*scene_cloud_downsampled_, *scene_normals_, inliers_threshold_ * 2.f, scene_downsampled_tree_,
256  clusters, eps_angle_threshold, curvature_threshold, min_points);
257 
258  clusters_cloud_.reset (new pcl::PointCloud<pcl::PointXYZI>);
259  clusters_cloud_->points.resize (scene_cloud_downsampled_->size ());
260  clusters_cloud_->width = scene_cloud_downsampled_->width;
261  clusters_cloud_->height = 1;
262 
263  for (std::size_t i = 0; i < scene_cloud_downsampled_->size (); i++)
264  {
265  pcl::PointXYZI p;
266  p.getVector3fMap () = (*scene_cloud_downsampled_)[i].getVector3fMap ();
267  p.intensity = 0.f;
268  (*clusters_cloud_)[i] = p;
269  }
270 
271  float intens_incr = 100.f / static_cast<float> (clusters.size ());
272  float intens = intens_incr;
273  for (const auto &cluster : clusters)
274  {
275  for (const auto &vertex : cluster.indices)
276  {
277  (*clusters_cloud_)[vertex].intensity = intens;
278  }
279 
280  intens += intens_incr;
281  }
282  }
283 
284  //compute cues
285  {
286  pcl::ScopeTime tcues ("Computing cues");
287  recognition_models_.resize (complete_models_.size ());
288  int valid = 0;
289  for (int i = 0; i < static_cast<int> (complete_models_.size ()); i++)
290  {
291  //create recognition model
292  recognition_models_[valid].reset (new RecognitionModel ());
293  if(addModel (visible_models_[i], complete_models_[i], recognition_models_[valid])) {
294  indices_[valid] = i;
295  valid++;
296  }
297  }
298 
299  recognition_models_.resize(valid);
300  indices_.resize(valid);
301  }
302 
303  //compute the bounding boxes for the models
304  ModelT min_pt_all, max_pt_all;
305  min_pt_all.x = min_pt_all.y = min_pt_all.z = std::numeric_limits<float>::max ();
306  max_pt_all.x = max_pt_all.y = max_pt_all.z = (std::numeric_limits<float>::max () - 0.001f) * -1;
307 
308  for (std::size_t i = 0; i < recognition_models_.size (); i++)
309  {
310  ModelT min_pt, max_pt;
311  pcl::getMinMax3D (*complete_models_[indices_[i]], min_pt, max_pt);
312  if (min_pt.x < min_pt_all.x)
313  min_pt_all.x = min_pt.x;
314 
315  if (min_pt.y < min_pt_all.y)
316  min_pt_all.y = min_pt.y;
317 
318  if (min_pt.z < min_pt_all.z)
319  min_pt_all.z = min_pt.z;
320 
321  if (max_pt.x > max_pt_all.x)
322  max_pt_all.x = max_pt.x;
323 
324  if (max_pt.y > max_pt_all.y)
325  max_pt_all.y = max_pt.y;
326 
327  if (max_pt.z > max_pt_all.z)
328  max_pt_all.z = max_pt.z;
329  }
330 
331  int size_x, size_y, size_z;
332  size_x = static_cast<int> (std::ceil (std::abs (max_pt_all.x - min_pt_all.x) / res_occupancy_grid_)) + 1;
333  size_y = static_cast<int> (std::ceil (std::abs (max_pt_all.y - min_pt_all.y) / res_occupancy_grid_)) + 1;
334  size_z = static_cast<int> (std::ceil (std::abs (max_pt_all.z - min_pt_all.z) / res_occupancy_grid_)) + 1;
335 
336  complete_cloud_occupancy_by_RM_.resize (size_x * size_y * size_z, 0);
337 
338  for (std::size_t i = 0; i < recognition_models_.size (); i++)
339  {
340 
341  std::map<int, bool> banned;
342  std::map<int, bool>::iterator banned_it;
343 
344  for (const auto& point: *complete_models_[indices_[i]])
345  {
346  const int pos_x = static_cast<int> (std::floor ((point.x - min_pt_all.x) / res_occupancy_grid_));
347  const int pos_y = static_cast<int> (std::floor ((point.y - min_pt_all.y) / res_occupancy_grid_));
348  const int pos_z = static_cast<int> (std::floor ((point.z - min_pt_all.z) / res_occupancy_grid_));
349 
350  const int idx = pos_z * size_x * size_y + pos_y * size_x + pos_x;
351  banned_it = banned.find (idx);
352  if (banned_it == banned.end ())
353  {
354  complete_cloud_occupancy_by_RM_[idx]++;
355  recognition_models_[i]->complete_cloud_occupancy_indices_.push_back (idx);
356  banned[idx] = true;
357  }
358  }
359  }
360 
361  {
362  pcl::ScopeTime tcues ("Computing clutter cues");
363 #pragma omp parallel for \
364  default(none) \
365  schedule(dynamic, 4) \
366  num_threads(omp_get_num_procs())
367  for (int j = 0; j < static_cast<int> (recognition_models_.size ()); j++)
368  computeClutterCue (recognition_models_[j]);
369  }
370 
371  cc_.clear ();
372  n_cc_ = 1;
373  cc_.resize (n_cc_);
374  for (std::size_t i = 0; i < recognition_models_.size (); i++)
375  cc_[0].push_back (static_cast<int> (i));
376 
377 }
378 
379 template<typename ModelT, typename SceneT>
380 void pcl::GlobalHypothesesVerification<ModelT, SceneT>::SAOptimize(std::vector<int> & cc_indices, std::vector<bool> & initial_solution)
381 {
382 
383  //temporal copy of recogniton_models_
384  std::vector<RecognitionModelPtr> recognition_models_copy;
385  recognition_models_copy = recognition_models_;
386 
387  recognition_models_.clear ();
388 
389  for (const int &cc_index : cc_indices)
390  {
391  recognition_models_.push_back (recognition_models_copy[cc_index]);
392  }
393 
394  for (std::size_t j = 0; j < recognition_models_.size (); j++)
395  {
396  RecognitionModelPtr recog_model = recognition_models_[j];
397  for (std::size_t i = 0; i < recog_model->explained_.size (); i++)
398  {
399  explained_by_RM_[recog_model->explained_[i]]++;
400  explained_by_RM_distance_weighted[recog_model->explained_[i]] += recog_model->explained_distances_[i];
401  }
402 
403  if (detect_clutter_)
404  {
405  for (std::size_t i = 0; i < recog_model->unexplained_in_neighborhood.size (); i++)
406  {
407  unexplained_by_RM_neighboorhods[recog_model->unexplained_in_neighborhood[i]] += recog_model->unexplained_in_neighborhood_weights[i];
408  }
409  }
410  }
411 
412  int occupied_multiple = 0;
413  for(std::size_t i=0; i < complete_cloud_occupancy_by_RM_.size(); i++) {
414  if(complete_cloud_occupancy_by_RM_[i] > 1) {
415  occupied_multiple+=complete_cloud_occupancy_by_RM_[i];
416  }
417  }
418 
419  setPreviousDuplicityCM(occupied_multiple);
420  //do optimization
421  //Define model SAModel, initial solution is all models activated
422 
423  int duplicity;
424  float good_information_ = getTotalExplainedInformation (explained_by_RM_, explained_by_RM_distance_weighted, &duplicity);
425  float bad_information_ = 0;
426  float unexplained_in_neighboorhod = getUnexplainedInformationInNeighborhood (unexplained_by_RM_neighboorhods, explained_by_RM_);
427 
428  for (std::size_t i = 0; i < initial_solution.size (); i++)
429  {
430  if (initial_solution[i])
431  bad_information_ += recognition_models_[i]->outliers_weight_ * static_cast<float> (recognition_models_[i]->bad_information_);
432  }
433 
434  setPreviousExplainedValue (good_information_);
435  setPreviousDuplicity (duplicity);
436  setPreviousBadInfo (bad_information_);
437  setPreviousUnexplainedValue (unexplained_in_neighboorhod);
438 
439  SAModel model;
440  model.cost_ = static_cast<mets::gol_type> ((good_information_ - bad_information_
441  - static_cast<float> (duplicity)
442  - static_cast<float> (occupied_multiple) * w_occupied_multiple_cm_
443  - static_cast<float> (recognition_models_.size ())
444  - unexplained_in_neighboorhod) * -1.f);
445 
446  model.setSolution (initial_solution);
447  model.setOptimizer (this);
448  SAModel best (model);
449 
450  move_manager neigh (static_cast<int> (cc_indices.size ()));
451 
452  mets::best_ever_solution best_recorder (best);
453  mets::noimprove_termination_criteria noimprove (max_iterations_);
454  mets::linear_cooling linear_cooling;
455  mets::simulated_annealing<move_manager> sa (model, best_recorder, neigh, noimprove, linear_cooling, initial_temp_, 1e-7, 2);
456  sa.setApplyAndEvaluate(true);
457 
458  {
459  pcl::ScopeTime t ("SA search...");
460  sa.search ();
461  }
462 
463  best_seen_ = static_cast<const SAModel&> (best_recorder.best_seen ());
464  for (std::size_t i = 0; i < best_seen_.solution_.size (); i++)
465  {
466  initial_solution[i] = best_seen_.solution_[i];
467  }
468 
469  recognition_models_ = recognition_models_copy;
470 
471 }
472 
473 ///////////////////////////////////////////////////////////////////////////////////////////////////
474 template<typename ModelT, typename SceneT>
476 {
477  initialize ();
478 
479  //for each connected component, find the optimal solution
480  for (int c = 0; c < n_cc_; c++)
481  {
482  //TODO: Check for trivial case...
483  //TODO: Check also the number of hypotheses and use exhaustive enumeration if smaller than 10
484  std::vector<bool> subsolution (cc_[c].size (), true);
485  SAOptimize (cc_[c], subsolution);
486  for (std::size_t i = 0; i < subsolution.size (); i++)
487  {
488  mask_[indices_[cc_[c][i]]] = (subsolution[i]);
489  }
490  }
491 }
492 
493 template<typename ModelT, typename SceneT>
495  typename pcl::PointCloud<ModelT>::ConstPtr & complete_model, RecognitionModelPtr & recog_model)
496 {
497  //voxelize model cloud
498  recog_model->cloud_.reset (new pcl::PointCloud<ModelT> ());
499  recog_model->complete_cloud_.reset (new pcl::PointCloud<ModelT> ());
500 
501  float size_model = resolution_;
502  pcl::VoxelGrid<ModelT> voxel_grid;
503  voxel_grid.setInputCloud (model);
504  voxel_grid.setLeafSize (size_model, size_model, size_model);
505  voxel_grid.filter (*(recog_model->cloud_));
506 
507  pcl::VoxelGrid<ModelT> voxel_grid2;
508  voxel_grid2.setInputCloud (complete_model);
509  voxel_grid2.setLeafSize (size_model, size_model, size_model);
510  voxel_grid2.filter (*(recog_model->complete_cloud_));
511 
512  {
513  //check nans...
514  int j = 0;
515  for (auto& point: *(recog_model->cloud_))
516  {
517  if (!isXYZFinite (point))
518  continue;
519 
520  (*recog_model->cloud_)[j] = point;
521  j++;
522  }
523 
524  recog_model->cloud_->points.resize (j);
525  recog_model->cloud_->width = j;
526  recog_model->cloud_->height = 1;
527  }
528 
529  if (recog_model->cloud_->points.empty ())
530  {
531  PCL_WARN("The model cloud has no points..\n");
532  return false;
533  }
534 
535  //compute normals unless given (now do it always...)
538  recog_model->normals_.reset (new pcl::PointCloud<pcl::Normal> ());
539  normals_tree->setInputCloud (recog_model->cloud_);
540  n3d.setRadiusSearch (radius_normals_);
541  n3d.setSearchMethod (normals_tree);
542  n3d.setInputCloud ((recog_model->cloud_));
543  n3d.compute (*(recog_model->normals_));
544 
545  //check nans...
546  int j = 0;
547  for (std::size_t i = 0; i < recog_model->normals_->size (); ++i)
548  {
549  if (isNormalFinite((*recog_model->normals_)[i]))
550  continue;
551 
552  (*recog_model->normals_)[j] = (*recog_model->normals_)[i];
553  (*recog_model->cloud_)[j] = (*recog_model->cloud_)[i];
554  j++;
555  }
556 
557  recog_model->normals_->points.resize (j);
558  recog_model->normals_->width = j;
559  recog_model->normals_->height = 1;
560 
561  recog_model->cloud_->points.resize (j);
562  recog_model->cloud_->width = j;
563  recog_model->cloud_->height = 1;
564 
565  std::vector<int> explained_indices;
566  std::vector<float> outliers_weight;
567  std::vector<float> explained_indices_distances;
568 
569  std::vector<int> nn_indices;
570  std::vector<float> nn_distances;
571 
572  std::map<int, std::shared_ptr<std::vector<std::pair<int, float>>>> model_explains_scene_points; //which point i from the scene is explained by a points j_k with dist d_k from the model
573 
574  outliers_weight.resize (recog_model->cloud_->size ());
575  recog_model->outlier_indices_.resize (recog_model->cloud_->size ());
576 
577  std::size_t o = 0;
578  for (std::size_t i = 0; i < recog_model->cloud_->size (); i++)
579  {
580  if (!scene_downsampled_tree_->radiusSearch ((*recog_model->cloud_)[i], inliers_threshold_, nn_indices, nn_distances, std::numeric_limits<int>::max ()))
581  {
582  //outlier
583  outliers_weight[o] = regularizer_;
584  recog_model->outlier_indices_[o] = static_cast<int> (i);
585  o++;
586  } else
587  {
588  for (std::size_t k = 0; k < nn_distances.size (); k++)
589  {
590  std::pair<int, float> pair = std::make_pair (i, nn_distances[k]); //i is a index to a model point and then distance
591  auto it = model_explains_scene_points.find (nn_indices[k]);
592  if (it == model_explains_scene_points.end ())
593  {
594  std::shared_ptr<std::vector<std::pair<int, float>>> vec (new std::vector<std::pair<int, float>> ());
595  vec->push_back (pair);
596  model_explains_scene_points[nn_indices[k]] = vec;
597  } else
598  {
599  it->second->push_back (pair);
600  }
601  }
602  }
603  }
604 
605  outliers_weight.resize (o);
606  recog_model->outlier_indices_.resize (o);
607 
608  recog_model->outliers_weight_ = (std::accumulate (outliers_weight.begin (), outliers_weight.end (), 0.f) / static_cast<float> (outliers_weight.size ()));
609  if (outliers_weight.empty ())
610  recog_model->outliers_weight_ = 1.f;
611 
612  pcl::IndicesPtr indices_scene (new std::vector<int>);
613  //go through the map and keep the closest model point in case that several model points explain a scene point
614 
615  int p = 0;
616 
617  for (auto it = model_explains_scene_points.cbegin (); it != model_explains_scene_points.cend (); it++, p++)
618  {
619  std::size_t closest = 0;
620  float min_d = std::numeric_limits<float>::min ();
621  for (std::size_t i = 0; i < it->second->size (); i++)
622  {
623  if (it->second->at (i).second > min_d)
624  {
625  min_d = it->second->at (i).second;
626  closest = i;
627  }
628  }
629 
630  float d = it->second->at (closest).second;
631  float d_weight = -(d * d / (inliers_threshold_)) + 1;
632 
633  //it->first is index to scene point
634  //using normals to weight inliers
635  Eigen::Vector3f scene_p_normal = (*scene_normals_)[it->first].getNormalVector3fMap ();
636  Eigen::Vector3f model_p_normal =
637  (*recog_model->normals_)[it->second->at(closest).first].getNormalVector3fMap();
638  float dotp = scene_p_normal.dot (model_p_normal) * 1.f; //[-1,1] from antiparallel trough perpendicular to parallel
639 
640  if (dotp < 0.f)
641  dotp = 0.f;
642 
643  explained_indices.push_back (it->first);
644  explained_indices_distances.push_back (d_weight * dotp);
645 
646  }
647 
648  recog_model->bad_information_ = static_cast<int> (recog_model->outlier_indices_.size ());
649  recog_model->explained_ = explained_indices;
650  recog_model->explained_distances_ = explained_indices_distances;
651 
652  return true;
653 }
654 
655 template<typename ModelT, typename SceneT>
656 void pcl::GlobalHypothesesVerification<ModelT, SceneT>::computeClutterCue(RecognitionModelPtr & recog_model)
657 {
658  if (detect_clutter_)
659  {
660 
661  float rn_sqr = radius_neighborhood_GO_ * radius_neighborhood_GO_;
662  std::vector<int> nn_indices;
663  std::vector<float> nn_distances;
664 
665  std::vector < std::pair<int, int> > neighborhood_indices; //first is indices to scene point and second is indices to explained_ scene points
666  for (int i = 0; i < static_cast<int> (recog_model->explained_.size ()); i++)
667  {
668  if (scene_downsampled_tree_->radiusSearch ((*scene_cloud_downsampled_)[recog_model->explained_[i]], radius_neighborhood_GO_, nn_indices,
669  nn_distances, std::numeric_limits<int>::max ()))
670  {
671  for (std::size_t k = 0; k < nn_distances.size (); k++)
672  {
673  if (nn_indices[k] != i)
674  neighborhood_indices.emplace_back (nn_indices[k], i);
675  }
676  }
677  }
678 
679  //sort neighborhood indices by id
680  std::sort (neighborhood_indices.begin (), neighborhood_indices.end (),
681  [] (const auto& p1, const auto& p2) { return p1.first < p2.first; });
682 
683  //erase duplicated unexplained points
684  neighborhood_indices.erase (
685  std::unique (neighborhood_indices.begin (), neighborhood_indices.end (),
686  [] (const auto& p1, const auto& p2) { return p1.first == p2.first; }), neighborhood_indices.end ());
687 
688  //sort explained points
689  std::vector<int> exp_idces (recog_model->explained_);
690  std::sort (exp_idces.begin (), exp_idces.end ());
691 
692  recog_model->unexplained_in_neighborhood.resize (neighborhood_indices.size ());
693  recog_model->unexplained_in_neighborhood_weights.resize (neighborhood_indices.size ());
694 
695  std::size_t p = 0;
696  std::size_t j = 0;
697  for (const auto &neighborhood_index : neighborhood_indices)
698  {
699  if ((j < exp_idces.size ()) && (neighborhood_index.first == exp_idces[j]))
700  {
701  //this index is explained by the hypothesis so ignore it, advance j
702  j++;
703  } else
704  {
705  //indices_in_nb[i] < exp_idces[j]
706  //recog_model->unexplained_in_neighborhood.push_back(neighborhood_indices[i]);
707  recog_model->unexplained_in_neighborhood[p] = neighborhood_index.first;
708 
709  if ((*clusters_cloud_)[recog_model->explained_[neighborhood_index.second]].intensity != 0.f
710  && ((*clusters_cloud_)[recog_model->explained_[neighborhood_index.second]].intensity
711  == (*clusters_cloud_)[neighborhood_index.first].intensity))
712  {
713 
714  recog_model->unexplained_in_neighborhood_weights[p] = clutter_regularizer_;
715 
716  } else
717  {
718  //neighborhood_indices[i].first gives the index to the scene point and second to the explained scene point by the model causing this...
719  //calculate weight of this clutter point based on the distance of the scene point and the model point causing it
720  float d = static_cast<float> (pow (
721  ((*scene_cloud_downsampled_)[recog_model->explained_[neighborhood_index.second]].getVector3fMap ()
722  - (*scene_cloud_downsampled_)[neighborhood_index.first].getVector3fMap ()).norm (), 2));
723  float d_weight = -(d / rn_sqr) + 1; //points that are close have a strong weight*/
724 
725  //using normals to weight clutter points
726  Eigen::Vector3f scene_p_normal = (*scene_normals_)[neighborhood_index.first].getNormalVector3fMap ();
727  Eigen::Vector3f model_p_normal = (*scene_normals_)[recog_model->explained_[neighborhood_index.second]].getNormalVector3fMap ();
728  float dotp = scene_p_normal.dot (model_p_normal); //[-1,1] from antiparallel trough perpendicular to parallel
729 
730  if (dotp < 0)
731  dotp = 0.f;
732 
733  recog_model->unexplained_in_neighborhood_weights[p] = d_weight * dotp;
734  }
735  p++;
736  }
737  }
738 
739  recog_model->unexplained_in_neighborhood_weights.resize (p);
740  recog_model->unexplained_in_neighborhood.resize (p);
741  }
742 }
743 
744 #define PCL_INSTANTIATE_GoHV(T1,T2) template class PCL_EXPORTS pcl::GlobalHypothesesVerification<T1,T2>;
745 
746 #endif /* PCL_RECOGNITION_IMPL_HV_GO_HPP_ */
747 
pcl::VoxelGrid
VoxelGrid assembles a local 3D grid over a given PointCloud, and downsamples + filters the data.
Definition: voxel_grid.h:176
point_types.h
pcl::NormalEstimation
NormalEstimation estimates local surface properties (surface normals and curvatures)at each 3D point.
Definition: normal_3d.h:243
pcl::IndicesPtr
shared_ptr< Indices > IndicesPtr
Definition: pcl_base.h:58
common.h
pcl::search::Search::getInputCloud
virtual PointCloudConstPtr getInputCloud() const
Get a pointer to the input point cloud dataset.
Definition: search.h:125
pcl::VoxelGrid::setLeafSize
void setLeafSize(const Eigen::Vector4f &leaf_size)
Set the voxel grid leaf size.
Definition: voxel_grid.h:221
pcl::PointIndices::indices
Indices indices
Definition: PointIndices.h:21
pcl::search::Search::radiusSearch
virtual int radiusSearch(const PointT &point, double radius, Indices &k_indices, std::vector< float > &k_sqr_distances, unsigned int max_nn=0) const =0
Search for all the nearest neighbors of the query point in a given radius.
pcl::PointIndices::header
::pcl::PCLHeader header
Definition: PointIndices.h:19
pcl::GlobalHypothesesVerification
A hypothesis verification method proposed in "A Global Hypotheses Verification Method for 3D Object R...
Definition: hv_go.h:35
pcl::PointXYZI
Definition: point_types.hpp:464
pcl::Filter::filter
void filter(PointCloud &output)
Calls the filtering method and returns the filtered dataset in output.
Definition: filter.h:121
pcl::PointCloud
PointCloud represents the base class in PCL for storing collections of 3D points.
Definition: distances.h:55
pcl::PCLBase::setInputCloud
virtual void setInputCloud(const PointCloudConstPtr &cloud)
Provide a pointer to the input dataset.
Definition: pcl_base.hpp:65
pcl::search::KdTree::Ptr
shared_ptr< KdTree< PointT, Tree > > Ptr
Definition: kdtree.h:75
pcl::_PointXYZI::intensity
float intensity
Definition: point_types.hpp:456
pcl::NormalEstimation::setInputCloud
void setInputCloud(const PointCloudConstPtr &cloud) override
Provide a pointer to the input dataset.
Definition: normal_3d.h:332
pcl::search::KdTree< SceneT >
pcl::Feature::compute
void compute(PointCloudOut &output)
Base method for feature estimation for all points given in <setInputCloud (), setIndices ()> using th...
Definition: feature.hpp:194
pcl::search::Search::Ptr
shared_ptr< pcl::search::Search< PointT > > Ptr
Definition: search.h:81
pcl::PointIndices
Definition: PointIndices.h:11
pcl::PointCloud::header
pcl::PCLHeader header
The point cloud header.
Definition: point_cloud.h:385
pcl::PointCloud::size
std::size_t size() const
Definition: point_cloud.h:436
pcl::getMinMax3D
void getMinMax3D(const pcl::PointCloud< PointT > &cloud, PointT &min_pt, PointT &max_pt)
Get the minimum and maximum values on each of the 3 (x-y-z) dimensions in a given pointcloud.
Definition: common.hpp:295
time.h
pcl::Feature::setRadiusSearch
void setRadiusSearch(double radius)
Set the sphere radius that is to be used for determining the nearest neighbors used for the feature e...
Definition: feature.h:201
pcl::PointCloud::ConstPtr
shared_ptr< const PointCloud< PointT > > ConstPtr
Definition: point_cloud.h:407
pcl::GlobalHypothesesVerification::verify
void verify() override
Definition: hv_go.hpp:475
pcl::PointCloud::push_back
void push_back(const PointT &pt)
Insert a new point in the cloud, at the end of the container.
Definition: point_cloud.h:543
pcl::ScopeTime
Class to measure the time spent in a scope.
Definition: time.h:107
pcl::isNormalFinite
constexpr bool isNormalFinite(const PointT &) noexcept
Definition: point_tests.h:121
pcl::Feature::setSearchMethod
void setSearchMethod(const KdTreePtr &tree)
Provide a pointer to the search object.
Definition: feature.h:167