+ add algorithm to estimate normals of points

This commit is contained in:
wmayer 2016-03-09 14:46:19 +01:00
parent 9af03d9b68
commit 6da5c23990
3 changed files with 144 additions and 6 deletions

View File

@ -53,6 +53,24 @@
#include <pcl/point_types.h>
#endif
/*
Dependency of pcl components:
common: none
features: common, kdtree, octree, search, (range_image)
filters: common, kdtree, octree, sample_consenus, search
geomety: common
io: common, octree
kdtree: common
keypoints: common, features, filters, kdtree, octree, search, (range_image)
octree: common
recognition: common, features, search
registration: common, features, kdtree, sample_consensus
sample_consensus: common
search: common, kdtree, octree
segmentation: common, kdtree, octree, sample_consensus, search
surface: common, kdtree, octree, search
*/
using namespace Reen;
namespace Reen {
@ -95,6 +113,9 @@ public:
add_keyword_method("filterVoxelGrid",&Module::filterVoxelGrid,
"filterVoxelGrid(dim)."
);
add_keyword_method("normalEstimation",&Module::normalEstimation,
"normalEstimation(Points)."
);
#endif
#if defined(HAVE_PCL_SEGMENTATION)
add_keyword_method("regionGrowingSegmentation",&Module::regionGrowingSegmentation,
@ -589,6 +610,35 @@ Mesh.show(m)
return Py::asObject(new Points::PointsPy(points_sample));
}
#endif
#if defined(HAVE_PCL_FILTERS)
Py::Object Module::normalEstimation(const Py::Tuple& args, const Py::Dict& kwds)
{
PyObject *pts;
int ksearch=0;
double searchRadius=0;
static char* kwds_normals[] = {"Points", "KSearch", "SearchRadius", NULL};
if (!PyArg_ParseTupleAndKeywords(args.ptr(), kwds.ptr(), "O!|id", kwds_normals,
&(Points::PointsPy::Type), &pts,
&ksearch, &searchRadius))
throw Py::Exception();
Points::PointKernel* points = static_cast<Points::PointsPy*>(pts)->getPointKernelPtr();
std::vector<Base::Vector3d> normals;
NormalEstimation estimate(*points);
estimate.setKSearch(ksearch);
estimate.setSearchRadius(searchRadius);
estimate.perform(normals);
Py::List list;
for (std::vector<Base::Vector3d>::iterator it = normals.begin(); it != normals.end(); ++it) {
list.append(Py::Vector(*it));
}
return list;
}
#endif
#if defined(HAVE_PCL_SEGMENTATION)
Py::Object regionGrowingSegmentation(const Py::Tuple& args, const Py::Dict& kwds)
{

View File

@ -27,16 +27,23 @@
#include <Mod/Points/App/Points.h>
#include <Base/Exception.h>
#if defined(HAVE_PCL_FILTERS)
#include <pcl/filters/extract_indices.h>
#include <pcl/filters/passthrough.h>
#include <pcl/features/normal_3d.h>
#endif
#if defined(HAVE_PCL_SAMPLE_CONSENSUS)
#include <pcl/sample_consensus/method_types.h>
#include <pcl/sample_consensus/model_types.h>
#endif
#if defined(HAVE_PCL_SEGMENTATION)
#include <pcl/ModelCoefficients.h>
#include <pcl/io/pcd_io.h>
#include <pcl/point_types.h>
#include <pcl/filters/extract_indices.h>
#include <pcl/filters/passthrough.h>
#include <pcl/features/normal_3d.h>
#include <pcl/sample_consensus/method_types.h>
#include <pcl/sample_consensus/model_types.h>
#include <pcl/segmentation/sac_segmentation.h>
#endif
using namespace std;
using namespace Reen;
@ -44,6 +51,7 @@ using pcl::PointXYZ;
using pcl::PointNormal;
using pcl::PointCloud;
#if defined(HAVE_PCL_SEGMENTATION)
Segmentation::Segmentation(const Points::PointKernel& pts, std::list<std::vector<int> >& clusters)
: myPoints(pts)
, myClusters(clusters)
@ -87,7 +95,7 @@ void Segmentation::perform(int ksearch)
// Estimate point normals
ne.setSearchMethod (tree);
ne.setInputCloud (cloud_filtered);
ne.setKSearch (50);
ne.setKSearch (ksearch);
ne.compute (*cloud_normals);
// Create the segmentation object for the planar model and set all the parameters
@ -146,3 +154,52 @@ void Segmentation::perform(int ksearch)
#endif // HAVE_PCL_SEGMENTATION
// ----------------------------------------------------------------------------
#if defined (HAVE_PCL_FILTERS)
NormalEstimation::NormalEstimation(const Points::PointKernel& pts)
: myPoints(pts)
, kSearch(0)
, searchRadius(0)
{
}
void NormalEstimation::perform(std::vector<Base::Vector3d>& normals)
{
// Copy the points
pcl::PointCloud<PointXYZ>::Ptr cloud (new pcl::PointCloud<PointXYZ>);
cloud->reserve(myPoints.size());
for (Points::PointKernel::const_iterator it = myPoints.begin(); it != myPoints.end(); ++it) {
cloud->push_back(pcl::PointXYZ(it->x, it->y, it->z));
}
cloud->width = int (cloud->points.size ());
cloud->height = 1;
// Build a passthrough filter to remove spurious NaNs
pcl::PointCloud<PointXYZ>::Ptr cloud_filtered (new pcl::PointCloud<PointXYZ>);
pcl::PassThrough<PointXYZ> pass;
pass.setInputCloud (cloud);
pass.setFilterFieldName ("z");
pass.setFilterLimits (0, 1.5);
pass.filter (*cloud_filtered);
// Estimate point normals
pcl::PointCloud<pcl::Normal>::Ptr cloud_normals (new pcl::PointCloud<pcl::Normal>);
pcl::search::KdTree<PointXYZ>::Ptr tree (new pcl::search::KdTree<PointXYZ> ());
pcl::NormalEstimation<PointXYZ, pcl::Normal> ne;
ne.setSearchMethod (tree);
ne.setInputCloud (cloud_filtered);
if (kSearch > 0)
ne.setKSearch (kSearch);
if (searchRadius > 0)
ne.setRadiusSearch (searchRadius);
ne.compute (*cloud_normals);
normals.reserve(cloud_normals->size());
for (pcl::PointCloud<pcl::Normal>::const_iterator it = cloud_normals->begin(); it != cloud_normals->end(); ++it) {
normals.push_back(Base::Vector3d(it->normal_x, it->normal_y, it->normal_z));
}
}
#endif // HAVE_PCL_FILTERS

View File

@ -46,6 +46,37 @@ private:
std::list<std::vector<int> >& myClusters;
};
class NormalEstimation
{
public:
NormalEstimation(const Points::PointKernel&);
/** \brief Set the number of k nearest neighbors to use for the feature estimation.
* \param[in] k the number of k-nearest neighbors
*/
inline void
setKSearch (int k) { kSearch = k; }
/** \brief Set the sphere radius that is to be used for determining the nearest neighbors used for the feature
* estimation.
* \param[in] radius the sphere radius used as the maximum distance to consider a point a neighbor
*/
inline void
setSearchRadius (double radius)
{
searchRadius = radius;
}
/** \brief Perform the normal estimation.
* \param[out] the estimated normals
*/
void perform(std::vector<Base::Vector3d>& normals);
private:
const Points::PointKernel& myPoints;
int kSearch;
double searchRadius;
};
} // namespace Reen
#endif // REEN_SEGMENTATION_H