diff --git a/samples/tutorials/util/pc_viewer.cc b/samples/tutorials/util/pc_viewer.cc index bec96dc..1ad50a1 100644 --- a/samples/tutorials/util/pc_viewer.cc +++ b/samples/tutorials/util/pc_viewer.cc @@ -84,6 +84,7 @@ void PCViewer::ConvertMatToPointCloud( for (int i = 0; i < xyz.rows; i++) { for (int j = 0; j < xyz.cols; j++) { auto &&p = xyz.at(i, j); + if (std::abs(p.z) > 9999) continue; if (std::isfinite(p.x) && std::isfinite(p.y) && std::isfinite(p.z)) { // LOG(INFO) << "[" << i << "," << j << "] x: " << p.x << ", y: " << p.y // << ", z: " << p.z; diff --git a/src/mynteye/api/processor/disparity_processor.cc b/src/mynteye/api/processor/disparity_processor.cc index c22232c..b662366 100644 --- a/src/mynteye/api/processor/disparity_processor.cc +++ b/src/mynteye/api/processor/disparity_processor.cc @@ -26,37 +26,35 @@ const char DisparityProcessor::NAME[] = "DisparityProcessor"; DisparityProcessor::DisparityProcessor(std::int32_t proc_period) : Processor(std::move(proc_period)) { VLOG(2) << __func__ << ": proc_period=" << proc_period; - int sgbmWinSize = 3; - int numberOfDisparities = 64; + + int blockSize_ = 15; // 15 + int numDisparities_ = 64; // 64 #ifdef WITH_OPENCV2 - // StereoSGBM - // http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html?#stereosgbm - sgbm_ = cv::Ptr( - new cv::StereoSGBM( - 0, // minDisparity - numberOfDisparities, // numDisparities - sgbmWinSize, // SADWindowSize - 8 * sgbmWinSize * sgbmWinSize, // P1 - 32 * sgbmWinSize * sgbmWinSize, // P2 - 1, // disp12MaxDiff - 63, // preFilterCap - 10, // uniquenessRatio - 100, // speckleWindowSize - 32, // speckleRange - false)); // fullDP + bm_ = cv::Ptr( + new cv::StereoBM( + cv::StereoBM::BASIC_PRESET, + numDisparities_, + blockSize_)); #else - sgbm_ = cv::StereoSGBM::create(0, 16, 3); - sgbm_->setPreFilterCap(63); - sgbm_->setBlockSize(sgbmWinSize); - sgbm_->setP1(8 * sgbmWinSize * sgbmWinSize); - sgbm_->setP2(32 * sgbmWinSize * sgbmWinSize); - sgbm_->setMinDisparity(0); - sgbm_->setNumDisparities(numberOfDisparities); - sgbm_->setUniquenessRatio(10); - sgbm_->setSpeckleWindowSize(100); - sgbm_->setSpeckleRange(32); - sgbm_->setDisp12MaxDiff(1); + int minDisparity_ = 0; // 0 + int preFilterSize_ = 9; // 9 + int preFilterCap_ = 31; // 31 + int uniquenessRatio_ = 15; // 15 + int textureThreshold_ = 10; // 10 + int speckleWindowSize_ = 100; // 100 + int speckleRange_ = 4; // 4 + + bm_ = cv::StereoBM::create(16, 9); + bm_->setBlockSize(blockSize_); + bm_->setMinDisparity(minDisparity_); + bm_->setNumDisparities(numDisparities_); + bm_->setPreFilterSize(preFilterSize_); + bm_->setPreFilterCap(preFilterCap_); + bm_->setUniquenessRatio(uniquenessRatio_); + bm_->setTextureThreshold(textureThreshold_); + bm_->setSpeckleWindowSize(speckleWindowSize_); + bm_->setSpeckleRange(speckleRange_); #endif } @@ -80,27 +78,11 @@ bool DisparityProcessor::OnProcess( cv::Mat disparity; #ifdef WITH_OPENCV2 - // StereoSGBM::operator() - // http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html#stereosgbm-operator - // Output disparity map. It is a 16-bit signed single-channel image of the - // same size as the input image. - // It contains disparity values scaled by 16. So, to get the floating-point - // disparity map, - // you need to divide each disp element by 16. - (*sgbm_)(input->first, input->second, disparity); + (*bm_)(input->first, input->second, disparity); #else - // compute() - // http://docs.opencv.org/master/d2/d6e/classcv_1_1StereoMatcher.html - // Output disparity map. It has the same size as the input images. - // Some algorithms, like StereoBM or StereoSGBM compute 16-bit fixed-point - // disparity map - // (where each disparity value has 4 fractional bits), - // whereas other algorithms output 32-bit floating-point disparity map. - sgbm_->compute(input->first, input->second, disparity); + bm_->compute(input->first, input->second, disparity); #endif - output->value = disparity / 16 + 1; - output->id = input->first_id; - output->data = input->first_data; + disparity.convertTo(output->value, CV_32F, 1./16); return true; } diff --git a/src/mynteye/api/processor/disparity_processor.h b/src/mynteye/api/processor/disparity_processor.h index 370dca3..0ec43b6 100644 --- a/src/mynteye/api/processor/disparity_processor.h +++ b/src/mynteye/api/processor/disparity_processor.h @@ -17,11 +17,13 @@ #include +#include + #include "mynteye/api/processor.h" namespace cv { -class StereoSGBM; +class StereoBM; } // namespace cv @@ -42,7 +44,7 @@ class DisparityProcessor : public Processor { Object *const in, Object *const out, Processor *const parent) override; private: - cv::Ptr sgbm_; + cv::Ptr bm_; }; MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/api/processor/points_processor.cc b/src/mynteye/api/processor/points_processor.cc index 7d2b6d4..9b40e85 100644 --- a/src/mynteye/api/processor/points_processor.cc +++ b/src/mynteye/api/processor/points_processor.cc @@ -45,9 +45,38 @@ bool PointsProcessor::OnProcess( MYNTEYE_UNUSED(parent) const ObjMat *input = Object::Cast(in); ObjMat *output = Object::Cast(out); - cv::reprojectImageTo3D(input->value, output->value, Q_, true); - output->id = input->id; - output->data = input->data; + + cv::Mat disparity = input->value; + output->value.create(disparity.size(), CV_MAKETYPE(CV_32FC3, 3)); + cv::Mat _3dImage = output->value; + + const float bigZ = 10000.f; + cv::Matx44d Q; + Q_.convertTo(Q, CV_64F); + + int x, cols = disparity.cols; + CV_Assert(cols >= 0); + + double minDisparity = FLT_MAX; + + cv::minMaxIdx(disparity, &minDisparity, 0, 0, 0); + + for (int y = 0; y < disparity.rows; y++) { + float *sptr = disparity.ptr(y); + cv::Vec3f *dptr = _3dImage.ptr(y); + + for (x = 0; x < cols; x++) { + double d = sptr[x]; + cv::Vec4d homg_pt = Q * cv::Vec4d(x, y, d, 1.0); + dptr[x] = cv::Vec3d(homg_pt.val); + dptr[x] /= homg_pt[3]; + + if (fabs(d - minDisparity) <= FLT_EPSILON) { + dptr[x][2] = bigZ; + } + } + } + return true; } diff --git a/src/mynteye/api/processor/rectify_processor.cc b/src/mynteye/api/processor/rectify_processor.cc index 9c570f0..ab47402 100644 --- a/src/mynteye/api/processor/rectify_processor.cc +++ b/src/mynteye/api/processor/rectify_processor.cc @@ -72,12 +72,16 @@ void RectifyProcessor::InitParams( in_right.cy, 0, 0, 1); cv::Mat D1(1, 5, CV_64F, in_left.coeffs); cv::Mat D2(1, 5, CV_64F, in_right.coeffs); + /* cv::Mat R = (cv::Mat_(3, 3) << ex_right_to_left.rotation[0][0], ex_right_to_left.rotation[0][1], ex_right_to_left.rotation[0][2], ex_right_to_left.rotation[1][0], ex_right_to_left.rotation[1][1], ex_right_to_left.rotation[1][2], ex_right_to_left.rotation[2][0], ex_right_to_left.rotation[2][1], ex_right_to_left.rotation[2][2]); + */ + cv::Mat R = + (cv::Mat_(3, 3) << 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0); cv::Mat T(3, 1, CV_64F, ex_right_to_left.translation); VLOG(2) << "InitParams size: " << size; diff --git a/wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc b/wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc index 189418e..81c9d24 100644 --- a/wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc +++ b/wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc @@ -409,6 +409,25 @@ class ROSWrapperNodelet : public nodelet::Nodelet { } else { publishPoint(stream); } + api_->SetStreamCallback( + stream, [this, stream](const api::StreamData &data) { + // data.img is null, not hard timestamp + static std::size_t count = 0; + ++count; + publishCamera(stream, data, count, ros::Time::now()); + }); + is_published_[stream] = true; + } + + if (points_publisher_.getNumSubscribers() > 0 && + !is_published_[Stream::POINTS]) { + api_->SetStreamCallback( + Stream::POINTS, [this](const api::StreamData &data) { + static std::size_t count = 0; + ++count; + publishPoints(data, count, ros::Time::now()); + }); + is_published_[Stream::POINTS] = true; } if (!is_motion_published_) {