From d453f98b6d8eef87703adc61b976ca98f0770f96 Mon Sep 17 00:00:00 2001 From: TinyOh Date: Wed, 13 Mar 2019 14:04:24 +0800 Subject: [PATCH] fix(api): device stream data dup frames remove --- samples/tutorials/data/get_depth.cc | 8 ++-- samples/tutorials/data/get_disparity.cc | 8 ++-- samples/tutorials/data/get_imu.cc | 46 ++++++++++--------- .../tutorials/data/get_imu_correspondence.cc | 4 ++ samples/tutorials/data/get_points.cc | 9 ++-- samples/tutorials/data/get_stereo.cc | 6 ++- src/mynteye/api/processor/depth_processor.h | 3 -- .../api/processor/depth_processor_ocv.h | 3 -- .../api/processor/disparity_processor.h | 3 -- .../api/processor/root_camera_processor.cc | 10 +++- 10 files changed, 56 insertions(+), 44 deletions(-) diff --git a/samples/tutorials/data/get_depth.cc b/samples/tutorials/data/get_depth.cc index 2f779b8..d1bbf8e 100644 --- a/samples/tutorials/data/get_depth.cc +++ b/samples/tutorials/data/get_depth.cc @@ -43,9 +43,11 @@ int main(int argc, char *argv[]) { auto &&left_data = api->GetStreamData(Stream::LEFT); auto &&right_data = api->GetStreamData(Stream::RIGHT); - cv::Mat img; - cv::hconcat(left_data.frame, right_data.frame, img); - cv::imshow("frame", img); + if (!left_data.frame.empty() && !right_data.frame.empty()) { + cv::Mat img; + cv::hconcat(left_data.frame, right_data.frame, img); + cv::imshow("frame", img); + } auto &&depth_data = api->GetStreamData(Stream::DEPTH); if (!depth_data.frame.empty()) { diff --git a/samples/tutorials/data/get_disparity.cc b/samples/tutorials/data/get_disparity.cc index a3ddaa6..b5f1fd5 100644 --- a/samples/tutorials/data/get_disparity.cc +++ b/samples/tutorials/data/get_disparity.cc @@ -43,9 +43,11 @@ int main(int argc, char *argv[]) { auto &&left_data = api->GetStreamData(Stream::LEFT); auto &&right_data = api->GetStreamData(Stream::RIGHT); - cv::Mat img; - cv::hconcat(left_data.frame, right_data.frame, img); - cv::imshow("frame", img); + if (!left_data.frame.empty() && !right_data.frame.empty()) { + cv::Mat img; + cv::hconcat(left_data.frame, right_data.frame, img); + cv::imshow("frame", img); + } // auto &&disp_data = api->GetStreamData(Stream::DISPARITY); // if (!disp_data.frame.empty()) { diff --git a/samples/tutorials/data/get_imu.cc b/samples/tutorials/data/get_imu.cc index c09d225..eca5b3d 100644 --- a/samples/tutorials/data/get_imu.cc +++ b/samples/tutorials/data/get_imu.cc @@ -44,30 +44,32 @@ int main(int argc, char *argv[]) { auto &&left_data = api->GetStreamData(Stream::LEFT); auto &&right_data = api->GetStreamData(Stream::RIGHT); - cv::Mat img; - cv::hconcat(left_data.frame, right_data.frame, img); + if (!left_data.frame.empty() && !right_data.frame.empty()) { + cv::Mat img; + cv::hconcat(left_data.frame, right_data.frame, img); - auto &&motion_datas = api->GetMotionDatas(); - /* - for (auto &&data : motion_datas) { - LOG(INFO) << "Imu frame_id: " << data.imu->frame_id - << ", timestamp: " << data.imu->timestamp - << ", accel_x: " << data.imu->accel[0] - << ", accel_y: " << data.imu->accel[1] - << ", accel_z: " << data.imu->accel[2] - << ", gyro_x: " << data.imu->gyro[0] - << ", gyro_y: " << data.imu->gyro[1] - << ", gyro_z: " << data.imu->gyro[2] - << ", temperature: " << data.imu->temperature; + auto &&motion_datas = api->GetMotionDatas(); + /* + for (auto &&data : motion_datas) { + LOG(INFO) << "Imu frame_id: " << data.imu->frame_id + << ", timestamp: " << data.imu->timestamp + << ", accel_x: " << data.imu->accel[0] + << ", accel_y: " << data.imu->accel[1] + << ", accel_z: " << data.imu->accel[2] + << ", gyro_x: " << data.imu->gyro[0] + << ", gyro_y: " << data.imu->gyro[1] + << ", gyro_z: " << data.imu->gyro[2] + << ", temperature: " << data.imu->temperature; + } + */ + + painter.DrawImgData(img, *left_data.img); + if (!motion_datas.empty()) { + painter.DrawImuData(img, *motion_datas[0].imu); + } + + cv::imshow("frame", img); } - */ - - painter.DrawImgData(img, *left_data.img); - if (!motion_datas.empty()) { - painter.DrawImuData(img, *motion_datas[0].imu); - } - - cv::imshow("frame", img); char key = static_cast(cv::waitKey(1)); if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q diff --git a/samples/tutorials/data/get_imu_correspondence.cc b/samples/tutorials/data/get_imu_correspondence.cc index 26a5870..a32a8df 100644 --- a/samples/tutorials/data/get_imu_correspondence.cc +++ b/samples/tutorials/data/get_imu_correspondence.cc @@ -61,6 +61,10 @@ int main(int argc, char *argv[]) { << ", diff_prev=" << (img_stamp - prev_img_stamp); prev_img_stamp = img_stamp; + // if (left_data.frame.empty() || right_data.frame.empty()) { + // continue; + // } + cv::Mat img; cv::hconcat(left_data.frame, right_data.frame, img); diff --git a/samples/tutorials/data/get_points.cc b/samples/tutorials/data/get_points.cc index e8693ac..b51c966 100644 --- a/samples/tutorials/data/get_points.cc +++ b/samples/tutorials/data/get_points.cc @@ -41,10 +41,11 @@ int main(int argc, char *argv[]) { auto &&left_data = api->GetStreamData(Stream::LEFT); auto &&right_data = api->GetStreamData(Stream::RIGHT); - - cv::Mat img; - cv::hconcat(left_data.frame, right_data.frame, img); - cv::imshow("frame", img); + if (!left_data.frame.empty() && !right_data.frame.empty()) { + cv::Mat img; + cv::hconcat(left_data.frame, right_data.frame, img); + cv::imshow("frame", img); + } auto &&points_data = api->GetStreamData(Stream::POINTS); if (!points_data.frame.empty()) { diff --git a/samples/tutorials/data/get_stereo.cc b/samples/tutorials/data/get_stereo.cc index fc97bc4..099e628 100644 --- a/samples/tutorials/data/get_stereo.cc +++ b/samples/tutorials/data/get_stereo.cc @@ -37,8 +37,10 @@ int main(int argc, char *argv[]) { auto &&right_data = api->GetStreamData(Stream::RIGHT); cv::Mat img; - cv::hconcat(left_data.frame, right_data.frame, img); - cv::imshow("frame", img); + if (!left_data.frame.empty() && !right_data.frame.empty()) { + cv::hconcat(left_data.frame, right_data.frame, img); + cv::imshow("frame", img); + } char key = static_cast(cv::waitKey(1)); if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q diff --git a/src/mynteye/api/processor/depth_processor.h b/src/mynteye/api/processor/depth_processor.h index 0d57d04..456893c 100644 --- a/src/mynteye/api/processor/depth_processor.h +++ b/src/mynteye/api/processor/depth_processor.h @@ -34,9 +34,6 @@ class DepthProcessor : public Processor { std::string Name() override; protected: - inline Processor::process_type ProcessOutputConnection() override { - return Processor::WITHOUT_CLONE; - } inline Processor::process_type ProcessInputConnection() override { return Processor::WITHOUT_CLONE; } diff --git a/src/mynteye/api/processor/depth_processor_ocv.h b/src/mynteye/api/processor/depth_processor_ocv.h index 3c497e3..1618713 100644 --- a/src/mynteye/api/processor/depth_processor_ocv.h +++ b/src/mynteye/api/processor/depth_processor_ocv.h @@ -29,9 +29,6 @@ class DepthProcessorOCV : public Processor { virtual ~DepthProcessorOCV(); std::string Name() override; - inline Processor::process_type ProcessOutputConnection() override { - return Processor::WITHOUT_CLONE; - } protected: Object *OnCreateOutput() override; diff --git a/src/mynteye/api/processor/disparity_processor.h b/src/mynteye/api/processor/disparity_processor.h index 5fd470d..fd6cc33 100644 --- a/src/mynteye/api/processor/disparity_processor.h +++ b/src/mynteye/api/processor/disparity_processor.h @@ -44,9 +44,6 @@ class DisparityProcessor : public Processor { inline Processor::process_type ProcessOutputConnection() override { return Processor::WITHOUT_CLONE; } - inline Processor::process_type ProcessInputConnection() override { - return Processor::WITHOUT_CLONE; - } Object *OnCreateOutput() override; bool OnProcess( Object *const in, Object *const out, diff --git a/src/mynteye/api/processor/root_camera_processor.cc b/src/mynteye/api/processor/root_camera_processor.cc index 2f8fe3c..ff22cb8 100644 --- a/src/mynteye/api/processor/root_camera_processor.cc +++ b/src/mynteye/api/processor/root_camera_processor.cc @@ -116,7 +116,15 @@ api::StreamData s1s2Processor::GetStreamData(const Stream &stream) { } } if (enable_mode == Synthetic::MODE_ON) { - return data2api(device_->GetStreamData(stream)); + auto res = data2api(device_->GetStreamData(stream)); + if (res.img == nullptr || + res.img->timestamp == last_frame_id_cd || + res.frame.empty()) { + return {}; + } + last_frame_id_cd = res.img->timestamp; + return res; + // return data2api(device_->GetStreamData(stream)); } LOG(ERROR) << "Failed to get device stream data of " << stream << ", unsupported or disabled";