Point Cloud Library (PCL)  1.4.0
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines
cvfh.hpp
Go to the documentation of this file.
00001 /*
00002  * Software License Agreement (BSD License)
00003  *
00004  *  Point Cloud Library (PCL) - www.pointclouds.org
00005  *  Copyright (c) 2010-2011, Willow Garage, Inc.
00006  *
00007  *  All rights reserved.
00008  *
00009  *  Redistribution and use in source and binary forms, with or without
00010  *  modification, are permitted provided that the following conditions
00011  *  are met:
00012  *
00013  *   * Redistributions of source code must retain the above copyright
00014  *     notice, this list of conditions and the following disclaimer.
00015  *   * Redistributions in binary form must reproduce the above
00016  *     copyright notice, this list of conditions and the following
00017  *     disclaimer in the documentation and/or other materials provided
00018  *     with the distribution.
00019  *   * Neither the name of Willow Garage, Inc. nor the names of its
00020  *     contributors may be used to endorse or promote products derived
00021  *     from this software without specific prior written permission.
00022  *
00023  *  THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
00024  *  "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
00025  *  LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
00026  *  FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
00027  *  COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
00028  *  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
00029  *  BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
00030  *  LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
00031  *  CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
00032  *  LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
00033  *  ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
00034  *  POSSIBILITY OF SUCH DAMAGE.
00035  *
00036  * $Id: cvfh.hpp 3755 2011-12-31 23:45:30Z rusu $
00037  *
00038  */
00039 
00040 #ifndef PCL_FEATURES_IMPL_CVFH_H_
00041 #define PCL_FEATURES_IMPL_CVFH_H_
00042 
00043 #include "pcl/features/cvfh.h"
00044 #include "pcl/features/pfh.h"
00045 
00047 template<typename PointInT, typename PointNT, typename PointOutT> void
00048 pcl::CVFHEstimation<PointInT, PointNT, PointOutT>::extractEuclideanClustersSmooth (
00049     const pcl::PointCloud<pcl::PointNormal> &cloud,
00050     const pcl::PointCloud<pcl::PointNormal> &normals,
00051     float tolerance,
00052     const pcl::search::Search<pcl::PointNormal>::Ptr &tree,
00053     std::vector<pcl::PointIndices> &clusters,
00054     double eps_angle,
00055     unsigned int min_pts_per_cluster,
00056     unsigned int max_pts_per_cluster)
00057 {
00058   if (tree->getInputCloud ()->points.size () != cloud.points.size ())
00059   {
00060     PCL_ERROR ("[pcl::extractEuclideanClusters] Tree built for a different point cloud dataset (%lu) than the input cloud (%lu)!\n", (unsigned long)tree->getInputCloud ()->points.size (), (unsigned long)cloud.points.size ());
00061     return;
00062   }
00063   if (cloud.points.size () != normals.points.size ())
00064   {
00065     PCL_ERROR ("[pcl::extractEuclideanClusters] Number of points in the input point cloud (%lu) different than normals (%lu)!\n", (unsigned long)cloud.points.size (), (unsigned long)normals.points.size ());
00066     return;
00067   }
00068 
00069   // Create a bool vector of processed point indices, and initialize it to false
00070   std::vector<bool> processed (cloud.points.size (), false);
00071 
00072   std::vector<int> nn_indices;
00073   std::vector<float> nn_distances;
00074   // Process all points in the indices vector
00075   for (size_t i = 0; i < cloud.points.size (); ++i)
00076   {
00077     if (processed[i])
00078       continue;
00079 
00080     std::vector<unsigned int> seed_queue;
00081     int sq_idx = 0;
00082     seed_queue.push_back (i);
00083 
00084     processed[i] = true;
00085 
00086     while (sq_idx < (int)seed_queue.size ())
00087     {
00088       // Search for sq_idx
00089       if (!tree->radiusSearch (seed_queue[sq_idx], tolerance, nn_indices, nn_distances))
00090       {
00091         sq_idx++;
00092         continue;
00093       }
00094 
00095       for (size_t j = 1; j < nn_indices.size (); ++j) // nn_indices[0] should be sq_idx
00096       {
00097         if (processed[nn_indices[j]]) // Has this point been processed before ?
00098           continue;
00099 
00100         //processed[nn_indices[j]] = true;
00101         // [-1;1]
00102 
00103         double dot_p = normals.points[seed_queue[sq_idx]].normal[0] * normals.points[nn_indices[j]].normal[0]
00104                      + normals.points[seed_queue[sq_idx]].normal[1] * normals.points[nn_indices[j]].normal[1]
00105                      + normals.points[seed_queue[sq_idx]].normal[2] * normals.points[nn_indices[j]].normal[2];
00106 
00107         if (fabs (acos (dot_p)) < eps_angle)
00108         {
00109           processed[nn_indices[j]] = true;
00110           seed_queue.push_back (nn_indices[j]);
00111         }
00112       }
00113 
00114       sq_idx++;
00115     }
00116 
00117     // If this queue is satisfactory, add to the clusters
00118     if (seed_queue.size () >= min_pts_per_cluster && seed_queue.size () <= max_pts_per_cluster)
00119     {
00120       pcl::PointIndices r;
00121       r.indices.resize (seed_queue.size ());
00122       for (size_t j = 0; j < seed_queue.size (); ++j)
00123         r.indices[j] = seed_queue[j];
00124 
00125       std::sort (r.indices.begin (), r.indices.end ());
00126       r.indices.erase (std::unique (r.indices.begin (), r.indices.end ()), r.indices.end ());
00127 
00128       r.header = cloud.header;
00129       clusters.push_back (r); // We could avoid a copy by working directly in the vector
00130     }
00131   }
00132 }
00133 
00135 template<typename PointInT, typename PointNT, typename PointOutT> void
00136 pcl::CVFHEstimation<PointInT, PointNT, PointOutT>::filterNormalsWithHighCurvature (
00137     const pcl::PointCloud<PointNT> & cloud,
00138     std::vector<int> &indices_out,
00139     std::vector<int> &indices_in,
00140     float threshold)
00141 {
00142   indices_out.resize (cloud.points.size ());
00143   indices_in.resize (cloud.points.size ());
00144 
00145   size_t in, out;
00146   in = out = 0;
00147 
00148   for (size_t i = 0; i < cloud.points.size (); i++)
00149   {
00150     if (cloud.points[i].curvature > threshold)
00151     {
00152       indices_out[out] = i;
00153       out++;
00154     }
00155     else
00156     {
00157       indices_in[in] = i;
00158       in++;
00159     }
00160   }
00161 
00162   indices_out.resize (out);
00163   indices_in.resize (in);
00164 }
00165 
00167 template<typename PointInT, typename PointNT, typename PointOutT> void
00168 pcl::CVFHEstimation<PointInT, PointNT, PointOutT>::computeFeature (PointCloudOut &output)
00169 {
00170   // Check if input was set
00171   if (!normals_)
00172   {
00173     PCL_ERROR ("[pcl::%s::computeFeature] No input dataset containing normals was given!\n", getClassName ().c_str ());
00174     output.width = output.height = 0;
00175     output.points.clear ();
00176     return;
00177   }
00178   if (normals_->points.size () != surface_->points.size ())
00179   {
00180     PCL_ERROR ("[pcl::%s::computeFeature] The number of points in the input dataset differs from the number of points in the dataset containing the normals!\n", getClassName ().c_str ());
00181     output.width = output.height = 0;
00182     output.points.clear ();
00183     return;
00184   }
00185 
00186   centroids_dominant_orientations_.clear ();
00187 
00188   // ---[ Step 0: remove normals with high curvature
00189   float curv_threshold = curv_threshold_;
00190   std::vector<int> indices_out;
00191   std::vector<int> indices_in;
00192   filterNormalsWithHighCurvature (*normals_, indices_out, indices_in, curv_threshold);
00193 
00194   pcl::PointCloud<pcl::PointNormal>::Ptr filtered (new pcl::PointCloud<pcl::PointNormal> ());
00195 
00196   filtered->width = indices_in.size ();
00197   filtered->height = 1;
00198   filtered->points.resize (filtered->width);
00199 
00200   for (size_t i = 0; i < indices_in.size (); ++i)
00201   {
00202     filtered->points[i].x = surface_->points[indices_in[i]].x;
00203     filtered->points[i].y = surface_->points[indices_in[i]].y;
00204     filtered->points[i].z = surface_->points[indices_in[i]].z;
00205 
00206     filtered->points[i].normal[0] = normals_->points[indices_in[i]].normal[0];
00207     filtered->points[i].normal[1] = normals_->points[indices_in[i]].normal[1];
00208     filtered->points[i].normal[2] = normals_->points[indices_in[i]].normal[2];
00209   }
00210 
00211   // ---[ Step 1a : compute clustering
00212   pcl::PointCloud<pcl::PointNormal>::Ptr normals_filtered_cloud (new pcl::PointCloud<pcl::PointNormal> ());
00213   if (indices_in.size () >= 100) //TODO: parameter
00214   {
00215     normals_filtered_cloud->width = indices_in.size ();
00216     normals_filtered_cloud->height = 1;
00217     normals_filtered_cloud->points.resize (normals_filtered_cloud->width);
00218 
00219     for (size_t i = 0; i < indices_in.size (); ++i)
00220     {
00221       normals_filtered_cloud->points[i].x = surface_->points[indices_in[i]].x;
00222       normals_filtered_cloud->points[i].y = surface_->points[indices_in[i]].y;
00223       normals_filtered_cloud->points[i].z = surface_->points[indices_in[i]].z;
00224 
00225       normals_filtered_cloud->points[i].normal[0] = normals_->points[indices_in[i]].normal[0];
00226       normals_filtered_cloud->points[i].normal[1] = normals_->points[indices_in[i]].normal[1];
00227       normals_filtered_cloud->points[i].normal[2] = normals_->points[indices_in[i]].normal[2];
00228     }
00229   }
00230   else
00231   {
00232     normals_filtered_cloud->width = surface_->size ();
00233     normals_filtered_cloud->height = 1;
00234     normals_filtered_cloud->points.resize (normals_filtered_cloud->width);
00235 
00236     for (size_t i = 0; i < surface_->size (); ++i)
00237     {
00238       normals_filtered_cloud->points[i].x = surface_->points[i].x;
00239       normals_filtered_cloud->points[i].y = surface_->points[i].y;
00240       normals_filtered_cloud->points[i].z = surface_->points[i].z;
00241 
00242       normals_filtered_cloud->points[i].normal[0] = normals_->points[i].normal[0];
00243       normals_filtered_cloud->points[i].normal[1] = normals_->points[i].normal[1];
00244       normals_filtered_cloud->points[i].normal[2] = normals_->points[i].normal[2];
00245     }
00246   }
00247 
00248   //recompute normals normals and use them for clustering!
00249   KdTreePtr normals_tree_filtered (new pcl::search::KdTree<pcl::PointNormal> (false));
00250   normals_tree_filtered->setInputCloud (normals_filtered_cloud);
00251 
00252   NormalEstimator n3d;
00253   n3d.setRadiusSearch (radius_normals_);
00254   n3d.setSearchMethod (normals_tree_filtered);
00255   n3d.setInputCloud (normals_filtered_cloud);
00256   n3d.compute (*normals_filtered_cloud);
00257 
00258   KdTreePtr normals_tree (new pcl::search::KdTree<pcl::PointNormal> (false));
00259   normals_tree->setInputCloud (normals_filtered_cloud);
00260   std::vector<pcl::PointIndices> clusters;
00261 
00262   extractEuclideanClustersSmooth (*normals_filtered_cloud, *normals_filtered_cloud, cluster_tolerance_, normals_tree,
00263                                   clusters, eps_angle_threshold_, min_points_);
00264 
00265   VFHEstimator vfh;
00266   vfh.setInputCloud (surface_);
00267   vfh.setInputNormals (normals_);
00268   vfh.setSearchMethod (this->tree_);
00269   vfh.setUseGivenNormal (true);
00270   vfh.setUseGivenCentroid (true);
00271   vfh.setNormalizeBins (normalize_bins_);
00272   vfh.setNormalizeDistance (true);
00273   vfh.setFillSizeComponent (true);
00274   output.height = 1;
00275 
00276   // ---[ Step 1b : check if any dominant cluster was found
00277   if (clusters.size () > 0)
00278   { // ---[ Step 1b.1 : If yes, compute CVFH using the cluster information
00279 
00280     for (size_t i = 0; i < clusters.size (); ++i) //for each cluster
00281     {
00282       Eigen::Vector4f avg_normal = Eigen::Vector4f::Zero ();
00283       Eigen::Vector4f avg_centroid = Eigen::Vector4f::Zero ();
00284 
00285       for (size_t j = 0; j < clusters[i].indices.size (); j++)
00286       {
00287         avg_normal += normals_filtered_cloud->points[clusters[i].indices[j]].getNormalVector4fMap ();
00288         avg_centroid += normals_filtered_cloud->points[clusters[i].indices[j]].getVector4fMap ();
00289       }
00290 
00291       avg_normal /= clusters[i].indices.size ();
00292       avg_centroid /= clusters[i].indices.size ();
00293 
00294       Eigen::Vector4f centroid_test;
00295       pcl::compute3DCentroid (*normals_filtered_cloud, centroid_test);
00296       avg_normal.normalize ();
00297 
00298       Eigen::Vector3f avg_norm (avg_normal[0], avg_normal[1], avg_normal[2]);
00299       Eigen::Vector3f avg_dominant_centroid (avg_centroid[0], avg_centroid[1], avg_centroid[2]);
00300 
00301       //append normal and centroid for the clusters
00302       dominant_normals_.push_back (avg_norm);
00303       centroids_dominant_orientations_.push_back (avg_dominant_centroid);
00304     }
00305 
00306     //compute modified VFH for all dominant clusters and add them to the list!
00307     output.points.resize (dominant_normals_.size ());
00308     output.width = dominant_normals_.size ();
00309 
00310     for (size_t i = 0; i < dominant_normals_.size (); ++i)
00311     {
00312       //configure VFH computation for CVFH
00313       vfh.setNormalToUse (dominant_normals_[i]);
00314       vfh.setCentroidToUse (centroids_dominant_orientations_[i]);
00315       pcl::PointCloud<pcl::VFHSignature308> vfh_signature;
00316       vfh.compute (vfh_signature);
00317       output.points[i] = vfh_signature.points[0];
00318     }
00319   }
00320   else
00321   { // ---[ Step 1b.1 : If no, compute CVFH using all the object points
00322     Eigen::Vector4f avg_centroid;
00323     pcl::compute3DCentroid (*surface_, avg_centroid);
00324     Eigen::Vector3f cloud_centroid (avg_centroid[0], avg_centroid[1], avg_centroid[2]);
00325     centroids_dominant_orientations_.push_back (cloud_centroid);
00326 
00327     //configure VFH computation for CVFH using all object points
00328     vfh.setCentroidToUse (cloud_centroid);
00329     vfh.setUseGivenNormal (false);
00330 
00331     pcl::PointCloud<pcl::VFHSignature308> vfh_signature;
00332     vfh.compute (vfh_signature);
00333 
00334     output.points.resize (1);
00335     output.width = 1;
00336 
00337     output.points[0] = vfh_signature.points[0];
00338   }
00339 }
00340 
00341 #define PCL_INSTANTIATE_CVFHEstimation(T,NT,OutT) template class PCL_EXPORTS pcl::CVFHEstimation<T,NT,OutT>;
00342 
00343 #endif    // PCL_FEATURES_IMPL_VFH_H_ 
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Defines