Point Cloud Library (PCL)  1.14.1
integral_image_normal.h
1 /*
2  * Software License Agreement (BSD License)
3  *
4  * Point Cloud Library (PCL) - www.pointclouds.org
5  * Copyright (c) 2010-2011, Willow Garage, Inc.
6  * Copyright (c) 2012-, Open Perception, Inc.
7  *
8  * All rights reserved.
9  *
10  * Redistribution and use in source and binary forms, with or without
11  * modification, are permitted provided that the following conditions
12  * are met:
13  *
14  * * Redistributions of source code must retain the above copyright
15  * notice, this list of conditions and the following disclaimer.
16  * * Redistributions in binary form must reproduce the above
17  * copyright notice, this list of conditions and the following
18  * disclaimer in the documentation and/or other materials provided
19  * with the distribution.
20  * * Neither the name of the copyright holder(s) nor the names of its
21  * contributors may be used to endorse or promote products derived
22  * from this software without specific prior written permission.
23  *
24  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
25  * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
26  * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
27  * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
28  * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
29  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
30  * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
31  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
32  * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
33  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
34  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
35  * POSSIBILITY OF SUCH DAMAGE.
36  *
37  */
38 
39 #pragma once
40 
41 #include <pcl/memory.h>
42 #include <pcl/pcl_macros.h>
43 #include <pcl/point_cloud.h>
44 #include <pcl/features/feature.h>
45 #include <pcl/features/integral_image2D.h>
46 
47 namespace pcl
48 {
49  /** \brief Surface normal estimation on organized data using integral images.
50  *
51  * For detailed information about this method see:
52  *
53  * S. Holzer and R. B. Rusu and M. Dixon and S. Gedikli and N. Navab,
54  * Adaptive Neighborhood Selection for Real-Time Surface Normal Estimation
55  * from Organized Point Cloud Data Using Integral Images, IROS 2012.
56  *
57  * D. Holz, S. Holzer, R. B. Rusu, and S. Behnke (2011, July).
58  * Real-Time Plane Segmentation using RGB-D Cameras. In Proceedings of
59  * the 15th RoboCup International Symposium, Istanbul, Turkey.
60  * http://www.ais.uni-bonn.de/~holz/papers/holz_2011_robocup.pdf
61  *
62  * \author Stefan Holzer
63  */
64  template <typename PointInT, typename PointOutT>
65  class IntegralImageNormalEstimation: public Feature<PointInT, PointOutT>
66  {
72 
73  public:
74  using Ptr = shared_ptr<IntegralImageNormalEstimation<PointInT, PointOutT> >;
75  using ConstPtr = shared_ptr<const IntegralImageNormalEstimation<PointInT, PointOutT> >;
76 
77  /** \brief Different types of border handling. */
79  {
82  };
83 
84  /** \brief Different normal estimation methods.
85  * <ul>
86  * <li><b>COVARIANCE_MATRIX</b> - creates 9 integral images to compute the normal for a specific point
87  * from the covariance matrix of its local neighborhood.</li>
88  * <li><b>AVERAGE_3D_GRADIENT</b> - creates 6 integral images to compute smoothed versions of
89  * horizontal and vertical 3D gradients and computes the normals using the cross-product between these
90  * two gradients.
91  * <li><b>AVERAGE_DEPTH_CHANGE</b> - creates only a single integral image and computes the normals
92  * from the average depth changes.
93  * </ul>
94  */
96  {
101  };
102 
105 
106  /** \brief Constructor */
108  : normal_estimation_method_(AVERAGE_3D_GRADIENT)
109  , border_policy_ (BORDER_POLICY_IGNORE)
110  , integral_image_DX_ (false)
111  , integral_image_DY_ (false)
112  , integral_image_depth_ (false)
113  , integral_image_XYZ_ (true)
114  , max_depth_change_factor_ (20.0f*0.001f)
115  {
116  feature_name_ = "IntegralImagesNormalEstimation";
117  tree_.reset ();
118  k_ = 1;
119  }
120 
121  /** \brief Destructor **/
122  ~IntegralImageNormalEstimation () override;
123 
124  /** \brief Set the regions size which is considered for normal estimation.
125  * \param[in] width the width of the search rectangle
126  * \param[in] height the height of the search rectangle
127  */
128  void
129  setRectSize (const int width, const int height);
130 
131  /** \brief Sets the policy for handling borders.
132  * \param[in] border_policy the border policy.
133  */
134  void
135  setBorderPolicy (const BorderPolicy border_policy)
136  {
137  border_policy_ = border_policy;
138  }
139 
140  /** \brief Computes the normal at the specified position.
141  * \param[in] pos_x x position (pixel)
142  * \param[in] pos_y y position (pixel)
143  * \param[in] point_index the position index of the point
144  * \param[out] normal the output estimated normal
145  */
146  void
147  computePointNormal (const int pos_x, const int pos_y, const unsigned point_index, PointOutT &normal);
148 
149  /** \brief Computes the normal at the specified position with mirroring for border handling.
150  * \param[in] pos_x x position (pixel)
151  * \param[in] pos_y y position (pixel)
152  * \param[in] point_index the position index of the point
153  * \param[out] normal the output estimated normal
154  */
155  void
156  computePointNormalMirror (const int pos_x, const int pos_y, const unsigned point_index, PointOutT &normal);
157 
158  /** \brief The depth change threshold for computing object borders
159  * \param[in] max_depth_change_factor the depth change threshold for computing object borders based on
160  * depth changes
161  */
162  void
163  setMaxDepthChangeFactor (float max_depth_change_factor)
164  {
165  max_depth_change_factor_ = max_depth_change_factor;
166  }
167 
168  /** \brief Set the normal smoothing size
169  * \param[in] normal_smoothing_size factor which influences the size of the area used to smooth normals
170  * (depth dependent if useDepthDependentSmoothing is true)
171  */
172  void
173  setNormalSmoothingSize (float normal_smoothing_size)
174  {
175  if (normal_smoothing_size < 2.0f)
176  {
177  PCL_ERROR ("[pcl::%s::setNormalSmoothingSize] Invalid normal smoothing size given! (%g). Must be at least 2. Defaulting to %g.\n",
178  feature_name_.c_str (), normal_smoothing_size, normal_smoothing_size_);
179  return;
180  }
181  normal_smoothing_size_ = normal_smoothing_size;
182  }
183 
184  /** \brief Set the normal estimation method. The current implemented algorithms are:
185  * <ul>
186  * <li><b>COVARIANCE_MATRIX</b> - creates 9 integral images to compute the normal for a specific point
187  * from the covariance matrix of its local neighborhood.</li>
188  * <li><b>AVERAGE_3D_GRADIENT</b> - creates 6 integral images to compute smoothed versions of
189  * horizontal and vertical 3D gradients and computes the normals using the cross-product between these
190  * two gradients.
191  * <li><b>AVERAGE_DEPTH_CHANGE</b> - creates only a single integral image and computes the normals
192  * from the average depth changes.
193  * </ul>
194  * \param[in] normal_estimation_method the method used for normal estimation
195  */
196  void
198  {
199  normal_estimation_method_ = normal_estimation_method;
200  }
201 
202  /** \brief Set whether to use depth depending smoothing or not
203  * \param[in] use_depth_dependent_smoothing decides whether the smoothing is depth dependent
204  */
205  void
206  setDepthDependentSmoothing (bool use_depth_dependent_smoothing)
207  {
208  use_depth_dependent_smoothing_ = use_depth_dependent_smoothing;
209  }
210 
211  /** \brief Provide a pointer to the input dataset (overwrites the PCLBase::setInputCloud method)
212  * \param[in] cloud the const boost shared pointer to a PointCloud message
213  */
214  inline void
215  setInputCloud (const typename PointCloudIn::ConstPtr &cloud) override
216  {
217  input_ = cloud;
218  if (!cloud->isOrganized ())
219  {
220  PCL_ERROR ("[pcl::IntegralImageNormalEstimation::setInputCloud] Input dataset is not organized (height = 1).\n");
221  return;
222  }
223 
224  init_covariance_matrix_ = init_average_3d_gradient_ = init_depth_change_ = false;
225 
226  if (use_sensor_origin_)
227  {
228  vpx_ = input_->sensor_origin_.coeff (0);
229  vpy_ = input_->sensor_origin_.coeff (1);
230  vpz_ = input_->sensor_origin_.coeff (2);
231  }
232 
233  // Initialize the correct data structure based on the normal estimation method chosen
234  initData ();
235  }
236 
237  /** \brief Returns a pointer to the distance map which was computed internally
238  */
239  inline float*
241  {
242  return (distance_map_);
243  }
244 
245  /** \brief Set the viewpoint.
246  * \param vpx the X coordinate of the viewpoint
247  * \param vpy the Y coordinate of the viewpoint
248  * \param vpz the Z coordinate of the viewpoint
249  */
250  inline void
251  setViewPoint (float vpx, float vpy, float vpz)
252  {
253  vpx_ = vpx;
254  vpy_ = vpy;
255  vpz_ = vpz;
256  use_sensor_origin_ = false;
257  }
258 
259  /** \brief Get the viewpoint.
260  * \param [out] vpx x-coordinate of the view point
261  * \param [out] vpy y-coordinate of the view point
262  * \param [out] vpz z-coordinate of the view point
263  * \note this method returns the currently used viewpoint for normal flipping.
264  * If the viewpoint is set manually using the setViewPoint method, this method will return the set view point coordinates.
265  * If an input cloud is set, it will return the sensor origin otherwise it will return the origin (0, 0, 0)
266  */
267  inline void
268  getViewPoint (float &vpx, float &vpy, float &vpz)
269  {
270  vpx = vpx_;
271  vpy = vpy_;
272  vpz = vpz_;
273  }
274 
275  /** \brief sets whether the sensor origin or a user given viewpoint should be used. After this method, the
276  * normal estimation method uses the sensor origin of the input cloud.
277  * to use a user defined view point, use the method setViewPoint
278  */
279  inline void
281  {
282  use_sensor_origin_ = true;
283  if (input_)
284  {
285  vpx_ = input_->sensor_origin_.coeff (0);
286  vpy_ = input_->sensor_origin_.coeff (1);
287  vpz_ = input_->sensor_origin_.coeff (2);
288  }
289  else
290  {
291  vpx_ = 0;
292  vpy_ = 0;
293  vpz_ = 0;
294  }
295  }
296 
297  protected:
298 
299  /** \brief Computes the normal for the complete cloud or only \a indices_ if provided.
300  * \param[out] output the resultant normals
301  */
302  void
303  computeFeature (PointCloudOut &output) override;
304 
305  /** \brief Computes the normal for the complete cloud.
306  * \param[in] distance_map distance map
307  * \param[in] bad_point constant given to invalid normal components
308  * \param[out] output the resultant normals
309  */
310  void
311  computeFeatureFull (const float* distance_map, const float& bad_point, PointCloudOut& output);
312 
313  /** \brief Computes the normal for part of the cloud specified by \a indices_
314  * \param[in] distance_map distance map
315  * \param[in] bad_point constant given to invalid normal components
316  * \param[out] output the resultant normals
317  */
318  void
319  computeFeaturePart (const float* distance_map, const float& bad_point, PointCloudOut& output);
320 
321  /** \brief Initialize the data structures, based on the normal estimation method chosen. */
322  void
323  initData ();
324 
325  private:
326 
327  /** \brief Flip (in place) the estimated normal of a point towards a given viewpoint
328  * \param point a given point
329  * \param vp_x the X coordinate of the viewpoint
330  * \param vp_y the X coordinate of the viewpoint
331  * \param vp_z the X coordinate of the viewpoint
332  * \param nx the resultant X component of the plane normal
333  * \param ny the resultant Y component of the plane normal
334  * \param nz the resultant Z component of the plane normal
335  * \ingroup features
336  */
337  inline void
338  flipNormalTowardsViewpoint (const PointInT &point,
339  float vp_x, float vp_y, float vp_z,
340  float &nx, float &ny, float &nz)
341  {
342  // See if we need to flip any plane normals
343  vp_x -= point.x;
344  vp_y -= point.y;
345  vp_z -= point.z;
346 
347  // Dot product between the (viewpoint - point) and the plane normal
348  float cos_theta = (vp_x * nx + vp_y * ny + vp_z * nz);
349 
350  // Flip the plane normal
351  if (cos_theta < 0)
352  {
353  nx *= -1;
354  ny *= -1;
355  nz *= -1;
356  }
357  }
358 
359  /** \brief The normal estimation method to use. Currently, 3 implementations are provided:
360  *
361  * - COVARIANCE_MATRIX
362  * - AVERAGE_3D_GRADIENT
363  * - AVERAGE_DEPTH_CHANGE
364  */
365  NormalEstimationMethod normal_estimation_method_;
366 
367  /** \brief The policy for handling borders. */
368  BorderPolicy border_policy_;
369 
370  /** The width of the neighborhood region used for computing the normal. */
371  int rect_width_{0};
372  int rect_width_2_{0};
373  int rect_width_4_{0};
374  /** The height of the neighborhood region used for computing the normal. */
375  int rect_height_{0};
376  int rect_height_2_{0};
377  int rect_height_4_{0};
378 
379  /** the threshold used to detect depth discontinuities */
380  float distance_threshold_{0.0f};
381 
382  /** integral image in x-direction */
383  IntegralImage2D<float, 3> integral_image_DX_;
384  /** integral image in y-direction */
385  IntegralImage2D<float, 3> integral_image_DY_;
386  /** integral image */
387  IntegralImage2D<float, 1> integral_image_depth_;
388  /** integral image xyz */
389  IntegralImage2D<float, 3> integral_image_XYZ_;
390 
391  /** derivatives in x-direction */
392  float *diff_x_{nullptr};
393  /** derivatives in y-direction */
394  float *diff_y_{nullptr};
395 
396  /** depth data */
397  float *depth_data_{nullptr};
398 
399  /** distance map */
400  float *distance_map_{nullptr};
401 
402  /** \brief Smooth data based on depth (true/false). */
403  bool use_depth_dependent_smoothing_{false};
404 
405  /** \brief Threshold for detecting depth discontinuities */
406  float max_depth_change_factor_;
407 
408  /** \brief */
409  float normal_smoothing_size_{10.0f};
410 
411  /** \brief True when a dataset has been received and the covariance_matrix data has been initialized. */
412  bool init_covariance_matrix_{false};
413 
414  /** \brief True when a dataset has been received and the average 3d gradient data has been initialized. */
415  bool init_average_3d_gradient_{false};
416 
417  /** \brief True when a dataset has been received and the simple 3d gradient data has been initialized. */
418  bool init_simple_3d_gradient_{false};
419 
420  /** \brief True when a dataset has been received and the depth change data has been initialized. */
421  bool init_depth_change_{false};
422 
423  /** \brief Values describing the viewpoint ("pinhole" camera model assumed). For per point viewpoints, inherit
424  * from NormalEstimation and provide your own computeFeature (). By default, the viewpoint is set to 0,0,0. */
425  float vpx_{0.0f}, vpy_{0.0f}, vpz_{0.0f};
426 
427  /** whether the sensor origin of the input cloud or a user given viewpoint should be used.*/
428  bool use_sensor_origin_{true};
429 
430  /** \brief This method should get called before starting the actual computation. */
431  bool
432  initCompute () override;
433 
434  /** \brief Internal initialization method for COVARIANCE_MATRIX estimation. */
435  void
436  initCovarianceMatrixMethod ();
437 
438  /** \brief Internal initialization method for AVERAGE_3D_GRADIENT estimation. */
439  void
440  initAverage3DGradientMethod ();
441 
442  /** \brief Internal initialization method for AVERAGE_DEPTH_CHANGE estimation. */
443  void
444  initAverageDepthChangeMethod ();
445 
446  /** \brief Internal initialization method for SIMPLE_3D_GRADIENT estimation. */
447  void
448  initSimple3DGradientMethod ();
449 
450  public:
452  };
453 }
454 
455 #ifdef PCL_NO_PRECOMPILE
456 #include <pcl/features/impl/integral_image_normal.hpp>
457 #endif
~IntegralImageNormalEstimation() override
Destructor.
void computePointNormalMirror(const int pos_x, const int pos_y, const unsigned point_index, PointOutT &normal)
Computes the normal at the specified position with mirroring for border handling. ...
std::string feature_name_
The feature name.
Definition: feature.h:220
int k_
The number of K nearest neighbors to use for each point.
Definition: feature.h:240
void computePointNormal(const int pos_x, const int pos_y, const unsigned point_index, PointOutT &normal)
Computes the normal at the specified position.
#define PCL_MAKE_ALIGNED_OPERATOR_NEW
Macro to signal a class requires a custom allocator.
Definition: memory.h:63
KdTreePtr tree_
A pointer to the spatial search object.
Definition: feature.h:231
void setBorderPolicy(const BorderPolicy border_policy)
Sets the policy for handling borders.
void getViewPoint(float &vpx, float &vpy, float &vpz)
Get the viewpoint.
shared_ptr< const Feature< pcl::PointXYZRGBA, pcl::pcl::Normal > > ConstPtr
Definition: feature.h:115
void setRectSize(const int width, const int height)
Set the regions size which is considered for normal estimation.
void initData()
Initialize the data structures, based on the normal estimation method chosen.
Surface normal estimation on organized data using integral images.
typename Feature< PointInT, PointOutT >::PointCloudOut PointCloudOut
void setMaxDepthChangeFactor(float max_depth_change_factor)
The depth change threshold for computing object borders.
void useSensorOriginAsViewPoint()
sets whether the sensor origin or a user given viewpoint should be used.
void computeFeatureFull(const float *distance_map, const float &bad_point, PointCloudOut &output)
Computes the normal for the complete cloud.
void setNormalEstimationMethod(NormalEstimationMethod normal_estimation_method)
Set the normal estimation method.
BorderPolicy
Different types of border handling.
void setInputCloud(const typename PointCloudIn::ConstPtr &cloud) override
Provide a pointer to the input dataset (overwrites the PCLBase::setInputCloud method) ...
void computeFeature(PointCloudOut &output) override
Computes the normal for the complete cloud or only indices_ if provided.
void setViewPoint(float vpx, float vpy, float vpz)
Set the viewpoint.
float * getDistanceMap()
Returns a pointer to the distance map which was computed internally.
void computeFeaturePart(const float *distance_map, const float &bad_point, PointCloudOut &output)
Computes the normal for part of the cloud specified by indices_.
shared_ptr< const PointCloud< pcl::PointXYZRGBA > > ConstPtr
Definition: point_cloud.h:414
PointCloudConstPtr input_
The input point cloud dataset.
Definition: pcl_base.h:147
Feature represents the base feature class.
Definition: feature.h:106
NormalEstimationMethod
Different normal estimation methods.
void setNormalSmoothingSize(float normal_smoothing_size)
Set the normal smoothing size.
shared_ptr< Feature< pcl::PointXYZRGBA, pcl::pcl::Normal > > Ptr
Definition: feature.h:114
void setDepthDependentSmoothing(bool use_depth_dependent_smoothing)
Set whether to use depth depending smoothing or not.