diff --git a/samples/api/camera.cc b/samples/api/camera.cc index 4ba3938..29261f2 100644 --- a/samples/api/camera.cc +++ b/samples/api/camera.cc @@ -53,11 +53,13 @@ int main(int argc, char *argv[]) { api->EnableStreamData(Stream::LEFT_RECTIFIED); api->EnableStreamData(Stream::RIGHT_RECTIFIED); + api->EnableStreamData(Stream::DISPARITY_NORMALIZED); // Enable this will cache the motion datas until you get them. api->EnableMotionDatas(); api->Start(Source::ALL); cv::namedWindow("frame"); + cv::namedWindow("disparity"); std::size_t motion_count = 0; auto &&time_beg = times::now(); @@ -68,8 +70,15 @@ int main(int argc, char *argv[]) { // auto &&right_data = api->GetStreamData(Stream::RIGHT); auto &&left_data = api->GetStreamData(Stream::LEFT_RECTIFIED); auto &&right_data = api->GetStreamData(Stream::RIGHT_RECTIFIED); - if (left_data.frame.empty() || right_data.frame.empty()) { - continue; + if (!left_data.frame.empty() && !right_data.frame.empty()) { + cv::Mat img; + cv::hconcat(left_data.frame, right_data.frame, img); + cv::imshow("frame", img); + } + + auto &&disp_data = api->GetStreamData(Stream::DISPARITY_NORMALIZED); + if (!disp_data.frame.empty()) { + cv::imshow("disparity", disp_data.frame); } auto &&motion_datas = api->GetMotionDatas(); @@ -86,10 +95,6 @@ int main(int argc, char *argv[]) { << ", temperature: " << data.imu->temperature; } - cv::Mat img; - cv::hconcat(left_data.frame, right_data.frame, img); - cv::imshow("frame", img); - char key = static_cast(cv::waitKey(1)); if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q break; diff --git a/src/api/processor/disparity_normalized_processor.cc b/src/api/processor/disparity_normalized_processor.cc index 57bdd63..65bc03f 100644 --- a/src/api/processor/disparity_normalized_processor.cc +++ b/src/api/processor/disparity_normalized_processor.cc @@ -1,5 +1,7 @@ #include "api/processor/disparity_normalized_processor.h" +#include + #include MYNTEYE_BEGIN_NAMESPACE @@ -17,14 +19,15 @@ std::string DisparityNormalizedProcessor::Name() { } Object *DisparityNormalizedProcessor::OnCreateOutput() { - return nullptr; + return new ObjMat(); } void DisparityNormalizedProcessor::OnProcess( Object *const in, Object *const out, Processor *const parent) { - UNUSED(in) - UNUSED(out) UNUSED(parent) + const ObjMat *input = Object::Cast(in); + ObjMat *output = Object::Cast(out); + cv::normalize(input->value, output->value, 0, 255, cv::NORM_MINMAX, CV_8UC1); } MYNTEYE_END_NAMESPACE diff --git a/src/api/processor/disparity_processor.cc b/src/api/processor/disparity_processor.cc index 642bc32..96ea67a 100644 --- a/src/api/processor/disparity_processor.cc +++ b/src/api/processor/disparity_processor.cc @@ -1,11 +1,45 @@ #include "api/processor/disparity_processor.h" +#include + #include MYNTEYE_BEGIN_NAMESPACE DisparityProcessor::DisparityProcessor() : Processor() { VLOG(2) << __func__; + int sgbmWinSize = 3; + int numberOfDisparities = 64; + +#ifdef USE_OPENCV2 + // StereoSGBM + // http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html?#stereosgbm + sgbm_ = cv::Ptr( + new cv::StereoSGBM( + 0, // minDisparity + numberOfDisparities, // numDisparities + sgbmWinSize, // SADWindowSize + 8 * sgbmWinSize * sgbmWinSize, // P1 + 32 * sgbmWinSize * sgbmWinSize, // P2 + 1, // disp12MaxDiff + 63, // preFilterCap + 10, // uniquenessRatio + 100, // speckleWindowSize + 32, // speckleRange + false)); // fullDP +#else + sgbm_ = cv::StereoSGBM::create(0, 16, 3); + sgbm_->setPreFilterCap(63); + sgbm_->setBlockSize(sgbmWinSize); + sgbm_->setP1(8 * sgbmWinSize * sgbmWinSize); + sgbm_->setP2(32 * sgbmWinSize * sgbmWinSize); + sgbm_->setMinDisparity(0); + sgbm_->setNumDisparities(numberOfDisparities); + sgbm_->setUniquenessRatio(10); + sgbm_->setSpeckleWindowSize(100); + sgbm_->setSpeckleRange(32); + sgbm_->setDisp12MaxDiff(1); +#endif } DisparityProcessor::~DisparityProcessor() { @@ -17,14 +51,36 @@ std::string DisparityProcessor::Name() { } Object *DisparityProcessor::OnCreateOutput() { - return nullptr; + return new ObjMat(); } void DisparityProcessor::OnProcess( Object *const in, Object *const out, Processor *const parent) { - UNUSED(in) - UNUSED(out) UNUSED(parent) + const ObjMat2 *input = Object::Cast(in); + ObjMat *output = Object::Cast(out); + + cv::Mat disparity; +#ifdef USE_OPENCV2 + // StereoSGBM::operator() + // http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html#stereosgbm-operator + // Output disparity map. It is a 16-bit signed single-channel image of the + // same size as the input image. + // It contains disparity values scaled by 16. So, to get the floating-point + // disparity map, + // you need to divide each disp element by 16. + (*sgbm_)(input->first, input->second, disparity); +#else + // compute() + // http://docs.opencv.org/master/d2/d6e/classcv_1_1StereoMatcher.html + // Output disparity map. It has the same size as the input images. + // Some algorithms, like StereoBM or StereoSGBM compute 16-bit fixed-point + // disparity map + // (where each disparity value has 4 fractional bits), + // whereas other algorithms output 32-bit floating-point disparity map. + sgbm_->compute(input->first, input->second, disparity); +#endif + output->value = disparity / 16 + 1; } MYNTEYE_END_NAMESPACE diff --git a/src/api/processor/disparity_processor.h b/src/api/processor/disparity_processor.h index b0e96d3..892c506 100644 --- a/src/api/processor/disparity_processor.h +++ b/src/api/processor/disparity_processor.h @@ -6,6 +6,12 @@ #include "api/processor/processor.h" +namespace cv { + +class StereoSGBM; + +} // namespace cv + MYNTEYE_BEGIN_NAMESPACE class DisparityProcessor : public Processor { @@ -21,6 +27,9 @@ class DisparityProcessor : public Processor { Object *OnCreateOutput() override; void OnProcess( Object *const in, Object *const out, Processor *const parent) override; + + private: + cv::Ptr sgbm_; }; MYNTEYE_END_NAMESPACE