Skip to content
Snippets Groups Projects
Commit f406bef6 authored by Nicolas Pope's avatar Nicolas Pope
Browse files

Feature/registration

parent 7e1e2795
No related branches found
No related tags found
No related merge requests found
......@@ -36,7 +36,7 @@ if(${CMAKE_VERSION} VERSION_GREATER "3.12.0")
endif()
if (WITH_PCL)
find_package( PCL QUIET COMPONENTS io common visualization registration )
find_package( PCL QUIET COMPONENTS io common visualization registration segmentation)
endif()
set(CMAKE_CXX_STANDARD 17) # For PCL/VTK https://github.com/PointCloudLibrary/pcl/issues/2686
......
......@@ -81,6 +81,15 @@
"depth": false,
"left": false,
"right": false
},
"registration": {
"reference-source" : "ftl://utu.fi/node1/rgb-d",
"calibration" : {
"run": false,
"iterations" : 10,
"delay" : 1000,
"patternsize" : [9, 6]
}
}
}
}
......
......@@ -3,6 +3,8 @@
#if defined HAVE_PCL
pcl::PointCloud<pcl::PointXYZRGB>::Ptr ftl::utility::matToPointXYZ(const cv::Mat &cvcloud, const cv::Mat &rgbimage) {
pcl::PointCloud<pcl::PointXYZRGB>::Ptr point_cloud_ptr(new pcl::PointCloud<pcl::PointXYZRGB>);
point_cloud_ptr->width = cvcloud.cols * cvcloud.rows;
point_cloud_ptr->height = 1;
for(int i=0;i<cvcloud.rows;i++) {
for(int j=0;j<cvcloud.cols;j++) {
......@@ -27,8 +29,6 @@ pcl::PointCloud<pcl::PointXYZRGB>::Ptr ftl::utility::matToPointXYZ(const cv::Mat
point_cloud_ptr -> points.push_back(point);
}
}
point_cloud_ptr->width = cvcloud.cols;
point_cloud_ptr->height = cvcloud.rows;
return point_cloud_ptr;
}
......
......@@ -2,9 +2,9 @@
include_directories(${PROJECT_SOURCE_DIR}/reconstruct/include)
#include_directories(${PROJECT_BINARY_DIR})
set(REPSRC
src/main.cpp
src/registration.cpp
)
add_executable(ftl-reconstruct ${REPSRC})
......
#ifndef _FTL_RECONSTRUCT_REGISTRATION_HPP_
#define _FTL_RECONSTRUCT_REGISTRATION_HPP_
#include <ftl/config.h>
#include <opencv2/opencv.hpp>
#ifdef HAVE_PCL
#include <pcl/common/common_headers.h>
#include <pcl/point_cloud.h>
namespace ftl {
namespace registration {
/* Find transformation matrix for transforming clouds_source to clouds_target
* Assumes that corresponding points in clouds_source[i] and clouds_target[i] have same indices
*/
Eigen::Matrix4f findTransformation( std::vector<pcl::PointCloud<pcl::PointXYZ>::Ptr> clouds_source,
std::vector<pcl::PointCloud<pcl::PointXYZ>::Ptr> clouds_target);
/* Convert chessboard corners found with OpenCV's findChessboardCorners to PCL point cloud.
*/
pcl::PointCloud<pcl::PointXYZ>::Ptr cornersToPointCloud(const std::vector<cv::Point2f> &corners, const cv::Mat &disp, const cv::Mat &Q);
/* Find chessboard corners from image.
*/
bool findChessboardCorners(cv::Mat &rgb, const cv::Mat &disp, const cv::Mat &Q, const cv::Size pattern_size, pcl::PointCloud<pcl::PointXYZ>::Ptr &out);
};
};
#endif // HAVE_PCL
#endif // _FTL_RECONSTRUCT_REGISTRATION_HPP_
......@@ -22,18 +22,23 @@
#include <ftl/display.hpp>
#include <nlohmann/json.hpp>
#include "opencv2/imgproc.hpp"
#include "opencv2/imgcodecs.hpp"
#include "opencv2/highgui.hpp"
#include "opencv2/core/utility.hpp"
#include <opencv2/imgproc.hpp>
#include <opencv2/imgcodecs.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/core/utility.hpp>
#include <ftl/utility/opencv_to_pcl.hpp>
#include <ftl/registration.hpp>
#ifdef WIN32
#pragma comment(lib, "Rpcrt4.lib")
#endif
//#include <pcl/point_cloud.h>
//#include <pcl/common/transforms.h>
//#include <pcl/filters/uniform_sampling.h>
#ifdef HAVE_PCL
#include <pcl/point_cloud.h>
#include <pcl/common/transforms.h>
#include <pcl/filters/uniform_sampling.h>
#endif
using ftl::net::Universe;
using ftl::Display;
......@@ -47,68 +52,230 @@ using std::chrono::milliseconds;
using std::mutex;
using std::unique_lock;
using cv::Mat;
using std::vector;
#ifdef HAVE_PCL
using pcl::PointCloud;
using pcl::PointXYZ;
using pcl::PointXYZRGB;
#endif
using cv::Mat;
class SourceStereo {
class InputStereo {
private:
Mat rgb, disp;
Mat q;
mutex m;
/*
static pcl::PointCloud<pcl::PointXYZRGB>::Ptr _getPC(Mat rgb, Mat depth, Mat q) {
auto pc = matToPointXYZ(depth, rgb);
std::vector<int> indices;
pcl::removeNaNFromPointCloud(*pc, *pc, indices);
return pc;
}*/
string uri_;
Mat rgb_, disp_;
Mat q_;
std::mutex m_;
#ifdef HAVE_PCL
static PointCloud<PointXYZRGB>::Ptr _getPointCloud(Mat rgb, Mat disp, Mat q) {
cv::Mat_<cv::Vec3f> XYZ(disp.rows, disp.cols);
reprojectImageTo3D(disp, XYZ, q, false);
return ftl::utility::matToPointXYZ(XYZ, rgb);
}
#endif
public:
string uri;
InputStereo(string uri, const Mat Q): uri_(uri), q_(Q) {};
void recv(const vector<unsigned char> &jpg, const vector<unsigned char> &d) {
unique_lock<mutex> lk(m);
cv::imdecode(jpg, cv::IMREAD_COLOR, &rgb);
Mat(rgb.size(), CV_16UC1);
cv::imdecode(d, cv::IMREAD_UNCHANGED, &disp);
disp.convertTo(disp, CV_32FC1, 1.0f/16.0f);
void set(Mat &rgb, Mat &disp) {
unique_lock<mutex> lk(m_);
rgb_ = rgb;
disp_ = disp;
}
string getURI() { return uri_; }
Mat getQ() { return q_; }
/* thread unsafe, use lock() */
#ifdef HAVE_PCL
PointCloud<PointXYZRGB>::Ptr getPointCloud() { return _getPointCloud(rgb_, disp_, q_); }
#endif
Mat getRGB() { return rgb_; }
Mat getDisparity() { return disp_; }
/* Mat getDepth() {} */
/* Mat getLeftRGB() {} */
/* Mat getRightRGB() {} */
unique_lock<mutex> lock() { return unique_lock<mutex>(m_); } // use recursive mutex instead (and move locking to methods)?
};
#ifdef HAVE_PCL
#include <pcl/filters/uniform_sampling.h>
/*
class InputMerged {
private:
// todo: Abstract input systems, can also use other 3D inputs (like InputMerged)
vector<InputStereo> inputs_;
vector<Eigen::Matrix4f> T_;
void setQ(Mat &Q) { q = Q; }
Mat getQ() { return q; }
/*
pcl::PointCloud<pcl::PointXYZRGB>::Ptr getPC() {
return _getPC(rgb, getDepth(), q);
public:
PointCloud<PointXYZRGB>::Ptr getPointCloud() {
PointCloud<PointXYZRGB>::Ptr result(new PointCloud<PointXYZRGB>);
for (size_t i = 0; i < T_.size(); i++) {
inputs_[i].lock();
PointCloud<PointXYZRGB>::Ptr cloud = inputs_[i].getPointCloud();
// Documentation: Can be used with cloud_in equal to cloud_out
pcl::transformPointCloud(*cloud, *cloud, T_[i]);
*result += *cloud;
}
PointCloud<PointXYZRGB>::Ptr result_sampled(new PointCloud<PointXYZRGB>);
pcl::UniformSampling<PointXYZRGB> uniform_sampling;
uniform_sampling.setInputCloud(result);
uniform_sampling.setRadiusSearch(0.1f); // todo parametrize
uniform_sampling.filter(*result_sampled);
return result_sampled;
}
};
*/
std::map<string, Eigen::Matrix4f> loadRegistration() {
std::map<string, Eigen::Matrix4f> registration;
std::ifstream file(string(FTL_LOCAL_CONFIG_ROOT) + "/registration.json");
nlohmann::json load;
file >> load;
pcl::PointCloud<pcl::PointXYZRGB>::Ptr getPC(cv::Size size) {
Mat rgb_, depth_;
for (auto it = load.begin(); it != load.end(); ++it) {
Eigen::Matrix4f m;
auto data = m.data();
for(size_t i = 0; i < 16; i++) {;
data[i] = it.value()[i];
}
cv::resize(rgb, rgb_, size);
cv::resize(getDepth(), depth_, size);
return _getPC(rgb_, depth_, q);
registration[it.key()] = m;
}
return registration;
}
void saveRegistration(std::map<string, Eigen::Matrix4f> registration) {
nlohmann::json save;
for (auto &item : registration) {
auto val = nlohmann::json::array();
for(size_t j = 0; j < 16; j++) { val.push_back((float) item.second.data()[j]); }
save[item.first] = val;
}
*/
std::ofstream file(string(FTL_LOCAL_CONFIG_ROOT) + "/registration.json");
file << save;
}
template <template<class> class Container>
std::map<string, Eigen::Matrix4f> runRegistration(Container<InputStereo> &inputs) {
Mat getDepth() {
cv::Mat_<cv::Vec3f> depth(disp.rows, disp.cols);
reprojectImageTo3D(disp, depth, q, false);
return depth;
std::map<string, Eigen::Matrix4f> registration;
// NOTE: uses config["registration"]
if (!config["registration"].is_object()) {
LOG(FATAL) << "Configuration missing \"registration\" entry!";
return registration;
}
Mat getRGB() {
return rgb;
int iter = (int) config["registration"]["calibration"]["iterations"];
int delay = (int) config["registration"]["calibration"]["delay"];
string ref_uri = (string) config["registration"]["reference-source"];
cv::Size pattern_size( (int) config["registration"]["calibration"]["patternsize"][0],
(int) config["registration"]["calibration"]["patternsize"][1]);
// config["registration"] done
size_t ref_i;
bool found = false;
for (size_t i = 0; i < inputs.size(); i++) {
if (inputs[i].getURI() == ref_uri) {
ref_i = i;
found = true;
break;
}
}
Mat getDisparity() {
return disp;
if (!found) { LOG(ERROR) << "Reference input not found!"; return registration; }
for (auto &input : inputs) {
cv::namedWindow("Registration: " + input.getURI(), cv::WINDOW_KEEPRATIO|cv::WINDOW_NORMAL);
}
unique_lock<mutex> lock() {return unique_lock<mutex>(m);} // use recursive mutex instead (and move locking to methods)?
};
// vector for every input: vector of point clouds of patterns
vector<vector<PointCloud<PointXYZ>::Ptr>> patterns(inputs.size());
while (iter > 0) {
bool retval = true; // set to false if pattern not found in one of the sources
vector<PointCloud<PointXYZ>::Ptr> patterns_iter(inputs.size());
for (size_t i = 0; i < inputs.size(); i++) {
InputStereo &input = inputs[i];
Mat rgb, disp, Q;
while(true) {
auto lk = input.lock();
rgb = input.getRGB();
disp = input.getDisparity();
Q = input.getQ();
lk.unlock();
// todo solve this somewhere else
if ((rgb.cols > 0) && (disp.cols > 0)) { break; }
else { sleep_for(milliseconds(500)); }
}
retval &= ftl::registration::findChessboardCorners(rgb, disp, Q, pattern_size, patterns_iter[i]);
cv::imshow("Registration: " + input.getURI(), rgb);
}
cv::waitKey(delay);
// every camera found the pattern
if (retval) {
for (size_t i = 0; i < patterns_iter.size(); i++) {
patterns[i].push_back(patterns_iter[i]);
}
iter--;
}
else { LOG(WARNING) << "Pattern not detected on all inputs";}
}
for (auto &input : inputs) { cv::destroyWindow("Registration: " + input.getURI()); }
for (size_t i = 0; i < inputs.size(); i++) {
Eigen::Matrix4f T;
if (i == ref_i) {
T.setIdentity();
}
else {
T = ftl::registration::findTransformation(patterns[i], patterns[ref_i]);
}
registration[inputs[i].getURI()] = T;
}
saveRegistration(registration);
return registration;
}
#endif
bool getCalibration(Universe &net, string src, Mat &Q) {
Q = Mat(cv::Size(4,4), CV_32F);
while(true) {
auto buf = net.findOne<vector<unsigned char>>((string) src +"/calibration");
if (buf) {
memcpy(Q.data, (*buf).data(), (*buf).size());
if (Q.step*Q.rows != (*buf).size()) {
LOG(ERROR) << "Corrupted calibration";
return false;
}
return true;
}
else {
LOG(INFO) << "Could not get calibration, retrying";
sleep_for(milliseconds(500));
}
}
}
static void run() {
Universe net(config["net"]);
......@@ -121,71 +288,93 @@ static void run() {
return;
}
// todo: create display objects at the same time, store in pair/tuple?
std::deque<InputStereo> inputs;
std::vector<Display> displays;
// todo networking and initilization for input should possibly be implemented in their own class
// with error handling etc.
//
std::deque<SourceStereo> sources; // mutex in SourceStereo
std::deque<Display> displays;
for (auto &src : config["sources"]) {
SourceStereo &in = sources.emplace_back();
Display &display = displays.emplace_back(Display(config["display"], src));
Mat Q;
if (!getCalibration(net, src, Q)) { continue; } // skips input if calibration bad
InputStereo &in = inputs.emplace_back(src, Q);
displays.emplace_back(config["display"], src);
LOG(INFO) << src << " loaded";
// get calibration parameters from nodes
while(true) {
auto buf = net.findOne<vector<unsigned char>>((string) src +"/calibration");
if (buf) {
Mat Q = Mat(cv::Size(4,4), CV_32F);
memcpy(Q.data, (*buf).data(), (*buf).size());
if (Q.step*Q.rows != (*buf).size()) LOG(ERROR) << "Corrupted calibration";
in.setQ(Q);
LOG(INFO) << "Calibration loaded for " << (string) src;
break;
}
else {
LOG(INFO) << "Could not get calibration, retrying";
sleep_for(milliseconds(500));
}
}
net.subscribe(src, [&in](const vector<unsigned char> &jpg, const vector<unsigned char> &d) {
in.recv(jpg, d);
Mat rgb, disp;
cv::imdecode(jpg, cv::IMREAD_COLOR, &rgb);
Mat(rgb.size(), CV_16UC1);
cv::imdecode(d, cv::IMREAD_UNCHANGED, &disp);
disp.convertTo(disp, CV_32FC1, 1.0f/16.0f);
in.set(rgb, disp);
});
}
// Displays and Inputs configured
// load point cloud transformations
#ifdef HAVE_PCL
std::map<string, Eigen::Matrix4f> registration;
if (config["registration"]["calibration"]["run"]) {
registration = runRegistration(inputs);
}
else {
registration = loadRegistration();
}
vector<Eigen::Matrix4f> T;
for (auto &input : inputs) { T.push_back(registration[input.getURI()]); }
//
vector<PointCloud<PointXYZRGB>::Ptr> clouds(inputs.size());
Display display_merged(config["display"], "Merged"); // todo
int active = displays.size();
while (active > 0) {
active = 0;
//std::vector<pcl::PointCloud<pcl::PointXYZRGB>::Ptr> clouds;
//pcl::PointCloud<pcl::PointXYZRGB>::Ptr pc(new pcl::PointCloud<pcl::PointXYZRGB>);
PointCloud<PointXYZRGB>::Ptr cloud(new PointCloud<PointXYZRGB>);
for (size_t i = 0; i < sources.size(); i++) {
for (size_t i = 0; i < inputs.size(); i++) {
Display &display = displays[i];
SourceStereo &source = sources[i];
InputStereo &input = inputs[i];
if (!display.active()) continue;
active += 1;
auto lk = source.lock();
Mat rgb = source.getRGB();
Mat disparity = source.getDisparity();
Mat q = source.getQ();
auto lk = input.lock();
//Mat rgb = input.getRGB();
//Mat disparity = input.getDisparity();
clouds[i] = input.getPointCloud();
lk.unlock();
display.render(rgb, disparity, q);
display.render(clouds[i]);
//display.render(rgb, disparity);
display.wait(50);
}
/*
pcl::transformPointCloud(*clouds[1], *clouds[1], T);
pcl::UniformSampling<pcl::PointXYZRGB> uniform_sampling;
uniform_sampling.setInputCloud(pc);
for (size_t i = 0; i < clouds.size(); i++) {
pcl::transformPointCloud(*clouds[i], *clouds[i], T[i]);
*cloud += *clouds[i];
}
pcl::UniformSampling<PointXYZRGB> uniform_sampling;
uniform_sampling.setInputCloud(cloud);
uniform_sampling.setRadiusSearch(0.1f);
uniform_sampling.filter(*pc);
*/
uniform_sampling.filter(*cloud);
display_merged.render(cloud);
display_merged.wait(50);
}
#endif
// TODO non-PCL (?)
}
int main(int argc, char **argv) {
ftl::configure(argc, argv, "representation"); // TODO(nick) change to "reconstruction"
run();
}
\ No newline at end of file
}
#include <ftl/registration.hpp>
#ifdef HAVE_PCL
#include <glog/logging.h>
#include <pcl/common/transforms.h>
#include <pcl/registration/transformation_estimation_svd.h>
#include <pcl/registration/transformation_estimation_svd_scale.h>
#include <pcl/segmentation/sac_segmentation.h>
#include <pcl/sample_consensus/method_types.h>
#include <pcl/sample_consensus/model_types.h>
#include <pcl/filters/project_inliers.h>
#include <pcl/ModelCoefficients.h>
#include <pcl/io/pcd_io.h>
#include <pcl/registration/transformation_validation.h>
#include <pcl/registration/transformation_validation_euclidean.h>
//#include <pcl/registration/icp_nl.h>
namespace ftl {
namespace registration {
using std::vector;
using pcl::PointCloud;
using pcl::PointXYZ;
using pcl::PointXYZRGB;
using cv::Mat;
// todo template: fitPlane<typename T>(PointCloud<T> cloud_in, PointCloud<T> cloud_out)
//
// Fit calibration pattern into plane using RANSAC + project points
//
void fitPlane(PointCloud<PointXYZ>::Ptr cloud_in, PointCloud<PointXYZ>::Ptr cloud_out) {
// TODO: include pattern in model (find best alignment of found points and return transformed reference?)
pcl::ModelCoefficients::Ptr coefficients(new pcl::ModelCoefficients);
pcl::PointIndices::Ptr inliers(new pcl::PointIndices);
// Estimate plane with RANSAC
pcl::SACSegmentation<PointXYZ> seg;
seg.setOptimizeCoefficients(true);
seg.setModelType(pcl::SACMODEL_PLANE);
seg.setMethodType(pcl::SAC_RANSAC);
seg.setDistanceThreshold(0.01);
seg.setInputCloud(cloud_in);
seg.segment(*inliers, *coefficients);
// Project points into plane
pcl::ProjectInliers<PointXYZ> proj;
proj.setModelType(pcl::SACMODEL_PLANE);
proj.setInputCloud(cloud_in);
proj.setModelCoefficients(coefficients);
proj.filter(*cloud_out);
}
//template<typename T = PointXYZ> typename
PointCloud<PointXYZ>::Ptr cornersToPointCloud(const vector<cv::Point2f> &corners, const Mat &disp, const Mat &Q) {
cv::Matx44d Q_;
Q.convertTo(Q_, CV_64F);
int corners_len = corners.size();
vector<cv::Vec3f> points(corners_len);
// Output point cloud
PointCloud<PointXYZ>::Ptr cloud(new PointCloud<PointXYZ>);
cloud->width = corners_len;
cloud->height = 1;
// Follows cv::reprojectImageTo3D(..)
// https://github.com/opencv/opencv/blob/371bba8f54560b374fbcd47e7e02f015ac4969ad/modules/calib3d/src/calibration.cpp#L2998
// documentation suggests using cv::perspectiveTransform(...) with sparse set of points
for (int i = 0; i < corners_len; i++) {
double x = corners[i].x;
double y = corners[i].y;
double d = disp.at<float>((int) y, (int) x); // todo: better estimation
cv::Vec4d homg_pt = Q_ * cv::Vec4d(x, y, d, 1.0);
cv::Vec3d p = cv::Vec3d(homg_pt.val) / homg_pt[3];
// no check since disparities assumed to be good in the calibration pattern
// if (fabs(d-minDisparity) <= FLT_EPSILON)
PointXYZ point;
point.x = p[0];
point.y = p[1];
point.z = p[2];
cloud->push_back(point);
}
return cloud;
}
bool findChessboardCorners(Mat &rgb, const Mat &disp, const Mat &Q, const cv::Size pattern_size, PointCloud<PointXYZ>::Ptr &out) {
vector<cv::Point2f> corners(pattern_size.width * pattern_size.height);
bool retval = cv::findChessboardCorners(rgb, pattern_size, corners); // (todo) change to findChessboardCornersSB (OpenCV > 4)
// todo: verify that disparities are good
cv::drawChessboardCorners(rgb, pattern_size, Mat(corners), retval);
if (!retval) { return false; }
auto corners_cloud = cornersToPointCloud(corners, disp, Q);
if (out) { *out += *corners_cloud; } // if cloud is valid, add the points
else { out = corners_cloud; }
return true;
}
Eigen::Matrix4f findTransformation(vector<PointCloud<PointXYZ>::Ptr> clouds_source, vector<PointCloud<PointXYZ>::Ptr> clouds_target) {
size_t n_clouds = clouds_source.size();
Eigen::Matrix4f T, T_tmp, T_new;
T.setIdentity();
if ((clouds_source.size() != clouds_target.size()) || (n_clouds == 0)) {
LOG(ERROR) << "Input vectors have invalid sizes: clouds_source " << clouds_source.size()
<< ", clouds_target " << clouds_target.size() << ", transformation can not be estimated";
return T; // identity
}
// corresponding points have same indices (!!!)
int n_points = clouds_source[0]->width * clouds_source[0]->height;
vector<int> idx(n_points);
for (int i = 0; i < n_points; i++) { idx[i] = i; }
pcl::registration::TransformationValidationEuclidean<PointXYZ, PointXYZ> validate;
pcl::registration::TransformationEstimationSVD<PointXYZ,PointXYZ> svd;
double score_prev = std::numeric_limits<float>::max();
for (size_t i = 0; i < n_clouds; ++i) {
PointCloud<PointXYZ> source;
PointCloud<PointXYZ> target = *clouds_target[i];
pcl::transformPointCloud(*clouds_source[i], source, T);
svd.estimateRigidTransformation(source, idx, target, idx, T_new);
// calculate new transformation
T_tmp = T_new * T;
// score new transformation
double score = 0.0;
for (size_t j = 0; j < n_clouds; ++j) {
score += validate.validateTransformation(clouds_source[j], clouds_target[j], T);
}
score /= n_clouds;
// if score doesn't improve, do not use as T, otherwise update T and score
if (score < score_prev) {
T = T_tmp;
score_prev = score;
}
LOG(INFO) << "Validation score: " << score;
}
return T;
}
} // namespace registration
} // namespace ftl
#endif // HAVE_PCL
\ No newline at end of file
......@@ -25,6 +25,14 @@ Display::Display(nlohmann::json &config, std::string name) : config_(config) {
pclviz_->setShowFPS(true);
pclviz_->initCameraParameters ();
pclviz_->registerPointPickingCallback(
[](const pcl::visualization::PointPickingEvent& event, void* viewer_void) {
if (event.getPointIndex () == -1) return;
float x, y, z;
event.getPoint(x, y, z);
LOG(INFO) << "( " << x << ", " << y << ", " << z << ")";
}, (void*) &pclviz_);
pclviz_->registerKeyboardCallback (
[](const pcl::visualization::KeyboardEvent &event, void* viewer_void) {
auto viewer = *static_cast<pcl::visualization::PCLVisualizer::Ptr*>(viewer_void);
......@@ -162,13 +170,13 @@ bool Display::render(const cv::Mat &rgb, const cv::Mat &rgbr, const cv::Mat &dep
}
#if defined HAVE_PCL
bool Display::render(pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr pc) {
bool Display::render(pcl::PointCloud<pcl::PointXYZRGB>::ConstPtr pc) {
pcl::visualization::PointCloudColorHandlerRGBField<pcl::PointXYZRGB> rgb(pc);
if (!pclviz_->updatePointCloud<pcl::PointXYZRGB> (pc, rgb, "reconstruction")) {
pclviz_->addPointCloud<pcl::PointXYZRGB> (pc, rgb, "reconstruction");
pclviz_->setCameraPosition(-878.0, -71.0, -2315.0, -0.1, -0.99, 0.068, 0.0, -1.0, 0.0);
pclviz_->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 2, "reconstruction");
}
pclviz_->setPointCloudRenderingProperties (pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 3, "reconstruction");
return true;
}
#endif // HAVE_PCL
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment