diff --git a/CMakeLists.txt b/CMakeLists.txt index cc2c310..a79a007 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -14,7 +14,7 @@ cmake_minimum_required(VERSION 3.0) -project(mynteye VERSION 2.3.0 LANGUAGES C CXX) +project(mynteye VERSION 2.3.2 LANGUAGES C CXX) include(cmake/Common.cmake) @@ -219,6 +219,9 @@ if(WITH_API) src/mynteye/api/processor/depth_processor_ocv.cc src/mynteye/api/processor/rectify_processor_ocv.cc src/mynteye/api/config.cc + src/mynteye/api/correspondence.cc + src/mynteye/api/version_checker.cc + src/mynteye/api/data_tools.cc ) if(WITH_CAM_MODELS) list(APPEND MYNTEYE_SRCS diff --git a/Makefile b/Makefile index e9773b5..f4057ff 100644 --- a/Makefile +++ b/Makefile @@ -34,12 +34,7 @@ SUDO ?= sudo CAM_MODELS ?= -CMAKE_BUILD_EXTRA_OPTIONS := -ifeq ($(CAM_MODELS),) - CMAKE_BUILD_EXTRA_OPTIONS := $(CMAKE_BUILD_EXTRA_OPTIONS) -DWITH_CAM_MODELS=OFF -else - CMAKE_BUILD_EXTRA_OPTIONS := $(CMAKE_BUILD_EXTRA_OPTIONS) -DWITH_CAM_MODELS=ON -endif +CMAKE_BUILD_EXTRA_OPTIONS := $(CMAKE_BUILD_EXTRA_OPTIONS) -DWITH_CAM_MODELS=ON .DEFAULT_GOAL := all @@ -106,7 +101,7 @@ init: build: @$(call echo,Make $@) ifeq ($(HOST_OS),Win) - @$(call cmake_build,./_build,..,-DCMAKE_INSTALL_PREFIX=$(MKFILE_DIR)/_install) + @$(call cmake_build,./_build,..,-DCMAKE_INSTALL_PREFIX=$(MKFILE_DIR)/_install $(CMAKE_BUILD_EXTRA_OPTIONS)) else @$(call cmake_build,./_build,..,$(CMAKE_BUILD_EXTRA_OPTIONS)) endif @@ -135,7 +130,7 @@ endif # install -install: build +install: uninstall build @$(call echo,Make $@) ifeq ($(HOST_OS),Win) ifneq ($(HOST_NAME),MinGW) @@ -176,7 +171,13 @@ samples: install tools: install @$(call echo,Make $@) +ifeq ($(HOST_OS),Mac) + $(error "Can't make tools on $(HOST_OS)") +else @$(call cmake_build,./tools/_build) +endif + + .PHONY: tools diff --git a/README.md b/README.md index e5b4fb6..2755f43 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # MYNT® EYE S SDK -[![](https://img.shields.io/badge/MYNT%20EYE%20S%20SDK-2.3.0-brightgreen.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK) +[![](https://img.shields.io/badge/MYNT%20EYE%20S%20SDK-2.3.2-brightgreen.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK) ## Overview @@ -17,11 +17,11 @@ Please follow the guide doc to install the SDK on different platforms. ## Documentations * [API Doc](https://github.com/slightech/MYNT-EYE-S-SDK/releases): API reference, some guides and data spec. - * en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2764152/mynt-eye-s-sdk-apidoc-2.3.0-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2764156/mynt-eye-s-sdk-apidoc-2.3.0-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK/) - * zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2764160/mynt-eye-s-sdk-apidoc-2.3.0-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2764173/mynt-eye-s-sdk-apidoc-2.3.0-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/api/mynt-eye-s-sdk-apidoc-2.3.0-zh-Hans/mynt-eye-s-sdk-apidoc-2.3.0-zh-Hans/index.html) + * en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2913110/mynt-eye-s-sdk-apidoc-2.3.2-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2913111/mynt-eye-s-sdk-apidoc-2.3.2-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK/) + * zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2913112/mynt-eye-s-sdk-apidoc-2.3.2-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2913113/mynt-eye-s-sdk-apidoc-2.3.2-zh-Hans.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/api/mynt-eye-s-sdk-apidoc-2.3.2-zh-Hans/mynt-eye-s-sdk-apidoc-2.3.2-zh-Hans/index.html) * [Guide Doc](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/releases): How to install and start using the SDK. - * en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2764143/mynt-eye-s-sdk-guide-2.3.0-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2764145/mynt-eye-s-sdk-guide-2.3.0-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK-Guide/) - * zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2764150/mynt-eye-s-sdk-guide-2.3.0-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2764163/mynt-eye-s-sdk-guide-2.3.0-zh-Hans.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/sdk/mynt-eye-s-sdk-guide-2.3.0-zh-Hans/mynt-eye-s-sdk-guide-2.3.0-zh-Hans/index.html) + * en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2913052/mynt-eye-s-sdk-guide-2.3.2-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2913053/mynt-eye-s-sdk-guide-2.3.2-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK-Guide/) + * zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2913054/mynt-eye-s-sdk-guide-2.3.2-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2913056/mynt-eye-s-sdk-guide-2.3.2-zh-Hans.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/sdk/mynt-eye-s-sdk-guide-2.3.2-zh-Hans/mynt-eye-s-sdk-guide-2.3.2-zh-Hans/index.html) > Supported languages: `en`, `zh-Hans`. diff --git a/doc/en/api.doxyfile b/doc/en/api.doxyfile index 9a6bb26..9400c74 100644 --- a/doc/en/api.doxyfile +++ b/doc/en/api.doxyfile @@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE S SDK" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 2.3.0 +PROJECT_NUMBER = 2.3.2 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/doc/en/spec_hardware_info.md b/doc/en/spec_hardware_info.md index 7ad0981..16bf998 100644 --- a/doc/en/spec_hardware_info.md +++ b/doc/en/spec_hardware_info.md @@ -12,3 +12,5 @@ | Lens type | lens_type | - | × | √ Get/Set | 4 | vendor(2),product(2); default: 0 | | IMU type | imu_type | - | × | √ Get/Set | 4 | vendor(2),product(2); default: 0 | | Nominal baseline | nominal_baseline | - | × | √ Get/Set | 2 | unit: mm; default: 0 | +| Auxiliary chip version | auxiliary_chip_version | - | × | √ Get | 2 | major,minor | +| isp version | isp_version | - | × | √ Get | 2 | major,minor | diff --git a/doc/zh-Hans/api.doxyfile b/doc/zh-Hans/api.doxyfile index 312be3e..31d9a19 100644 --- a/doc/zh-Hans/api.doxyfile +++ b/doc/zh-Hans/api.doxyfile @@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE S SDK" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 2.3.0 +PROJECT_NUMBER = 2.3.2 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/doc/zh-Hans/spec_hardware_info.md b/doc/zh-Hans/spec_hardware_info.md index e05f1aa..ba2dd41 100644 --- a/doc/zh-Hans/spec_hardware_info.md +++ b/doc/zh-Hans/spec_hardware_info.md @@ -12,6 +12,8 @@ | 镜头类型 | lens_type | - | × | √ Get/Set | 4 | vendor(2),product(2) ,未 Set 默认 0 | | IMU 类型 | imu_type | - | × | √ Get/Set | 4 | vendor(2),product(2) ,未 Set 默认 0 | | 基线长度 | nominal_baseline | - | × | √ Get/Set | 2 | 单位 mm ,未 set 默认 0 | +| 辅助芯片版本 | auxiliary_chip_version | - | × | √ Get | 2 | major,minor | +| ISP版本 | isp_version | - | × | √ Get | 2 | major,minor | * 描述符获取:指通用 USB 设备信息,可用工具查看。 * 拓展通道获取:指通过拓展通道(UVC Extension Unit)问硬件获取到的信息,需要读取。 diff --git a/include/mynteye/api/api.h b/include/mynteye/api/api.h index ea7a27f..699247c 100644 --- a/include/mynteye/api/api.h +++ b/include/mynteye/api/api.h @@ -30,6 +30,7 @@ MYNTEYE_BEGIN_NAMESPACE struct DeviceInfo; +class Correspondence; class Device; class Synthetic; @@ -91,6 +92,8 @@ class MYNTEYE_API API { using stream_callback_t = std::function; /** The api::MotionData callback. */ using motion_callback_t = std::function; + /** The enable/disable switch callback. */ + using stream_switch_callback_t = std::function; explicit API(std::shared_ptr device, CalibrationModel calib_model); virtual ~API(); @@ -184,7 +187,10 @@ class MYNTEYE_API API { * Get the device info. */ std::string GetInfo(const Info &info) const; - + /** + * Get the sdk version. + */ + std::string GetSDKVersion() const; /** * @deprecated Get the intrinsics (pinhole) of stream. */ @@ -280,11 +286,31 @@ class MYNTEYE_API API { * still support this stream. */ void EnableStreamData(const Stream &stream); + + /** + * Enable the data of stream. + * callback function will call before the father processor enable. + * when try_tag is true, the function will do nothing except callback. + */ + void EnableStreamData( + const Stream &stream, + stream_switch_callback_t callback, + bool try_tag = false); /** * Disable the data of stream. */ void DisableStreamData(const Stream &stream); + /** + * Disable the data of stream. + * callback function will call before the children processor disable. + * when try_tag is true, the function will do nothing except callback. + */ + void DisableStreamData( + const Stream &stream, + stream_switch_callback_t callback, + bool try_tag = false); + /** * Get the latest data of stream. */ @@ -305,6 +331,11 @@ class MYNTEYE_API API { */ std::vector GetMotionDatas(); + /** + * Enable motion datas with timestamp correspondence of some stream. + */ + void EnableTimestampCorrespondence(const Stream &stream); + /** * Enable the plugin. */ @@ -317,6 +348,10 @@ class MYNTEYE_API API { std::unique_ptr synthetic_; + std::unique_ptr correspondence_; + + motion_callback_t callback_; + void CheckImageParams(); }; diff --git a/include/mynteye/device/device.h b/include/mynteye/device/device.h index 30634b9..e188918 100644 --- a/include/mynteye/device/device.h +++ b/include/mynteye/device/device.h @@ -278,6 +278,10 @@ class MYNTEYE_API Device { */ std::vector GetStreamDatas(const Stream &stream); + /** + * Disable cache motion datas. + */ + void DisableMotionDatas(); /** * Enable cache motion datas. */ diff --git a/include/mynteye/device/types.h b/include/mynteye/device/types.h index 7ad1b75..7e156b0 100644 --- a/include/mynteye/device/types.h +++ b/include/mynteye/device/types.h @@ -162,6 +162,8 @@ struct MYNTEYE_API DeviceInfo { Type lens_type; Type imu_type; std::uint16_t nominal_baseline; + Version auxiliary_chip_version; + Version isp_version; }; #undef MYNTEYE_PROPERTY diff --git a/include/mynteye/types.h b/include/mynteye/types.h index b95d228..adf52c8 100644 --- a/include/mynteye/types.h +++ b/include/mynteye/types.h @@ -122,6 +122,10 @@ enum class Info : std::uint8_t { IMU_TYPE, /** Nominal baseline */ NOMINAL_BASELINE, + /** Auxiliary chip version */ + AUXILIARY_CHIP_VERSION, + /** Isp version */ + ISP_VERSION, /** Last guard */ LAST }; diff --git a/samples/tutorials/CMakeLists.txt b/samples/tutorials/CMakeLists.txt index 911318b..60b01d2 100644 --- a/samples/tutorials/CMakeLists.txt +++ b/samples/tutorials/CMakeLists.txt @@ -105,6 +105,10 @@ if(PCL_FOUND) WITH_OPENCV WITH_PCL ) endif() +make_executable2(get_imu_correspondence + SRCS data/get_imu_correspondence.cc util/cv_painter.cc + WITH_OPENCV +) make_executable2(get_imu SRCS data/get_imu.cc util/cv_painter.cc WITH_OPENCV) make_executable2(get_from_callbacks SRCS data/get_from_callbacks.cc util/cv_painter.cc diff --git a/samples/tutorials/control/framerate.cc b/samples/tutorials/control/framerate.cc index d1226f5..41f1e5d 100644 --- a/samples/tutorials/control/framerate.cc +++ b/samples/tutorials/control/framerate.cc @@ -34,10 +34,6 @@ int main(int argc, char *argv[]) { // Set frame rate options for s1030 if (model == Model::STANDARD) { - // Attention: must set FRAME_RATE and IMU_FREQUENCY together, - // otherwise won't. - // succeed. - // FRAME_RATE values: 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60 api->SetOptionValue(Option::FRAME_RATE, 25); // IMU_FREQUENCY values: 100, 200, 250, 333, 500 diff --git a/samples/tutorials/data/get_device_info.cc b/samples/tutorials/data/get_device_info.cc index ddb3bd9..298d72e 100644 --- a/samples/tutorials/data/get_device_info.cc +++ b/samples/tutorials/data/get_device_info.cc @@ -28,6 +28,9 @@ int main(int argc, char *argv[]) { LOG(INFO) << "Lens type: " << api->GetInfo(Info::LENS_TYPE); LOG(INFO) << "IMU type: " << api->GetInfo(Info::IMU_TYPE); LOG(INFO) << "Nominal baseline: " << api->GetInfo(Info::NOMINAL_BASELINE); + LOG(INFO) << "Auxiliary chip version: " + << api->GetInfo(Info::AUXILIARY_CHIP_VERSION); + LOG(INFO) << "Nominal baseline: " << api->GetInfo(Info::ISP_VERSION); return 0; } diff --git a/samples/tutorials/data/get_imu_correspondence.cc b/samples/tutorials/data/get_imu_correspondence.cc new file mode 100644 index 0000000..9cec957 --- /dev/null +++ b/samples/tutorials/data/get_imu_correspondence.cc @@ -0,0 +1,85 @@ +// Copyright 2018 Slightech Co., Ltd. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#include + +#include "mynteye/logger.h" +#include "mynteye/api/api.h" + +#include "util/cv_painter.h" + +MYNTEYE_USE_NAMESPACE + +int main(int argc, char *argv[]) { + auto &&api = API::Create(argc, argv); + if (!api) return 1; + + bool ok; + auto &&request = api->SelectStreamRequest(&ok); + if (!ok) return 1; + api->ConfigStreamRequest(request); + + // Enable motion datas with timestamp correspondence of some stream + api->EnableTimestampCorrespondence(Stream::LEFT); + + api->Start(Source::ALL); + + CVPainter painter; + + cv::namedWindow("frame"); + + std::uint64_t prev_img_stamp = 0; + std::uint64_t prev_imu_stamp = 0; + while (true) { + api->WaitForStreams(); + + auto &&left_data = api->GetStreamData(Stream::LEFT); + auto &&right_data = api->GetStreamData(Stream::RIGHT); + + auto img_stamp = left_data.img->timestamp; + LOG(INFO) << "Img timestamp: " << img_stamp + << ", diff_prev=" << (img_stamp - prev_img_stamp); + prev_img_stamp = img_stamp; + + cv::Mat img; + cv::hconcat(left_data.frame, right_data.frame, img); + + auto &&motion_datas = api->GetMotionDatas(); + LOG(INFO) << "Imu count: " << motion_datas.size(); + for (auto &&data : motion_datas) { + auto imu_stamp = data.imu->timestamp; + LOG(INFO) << "Imu timestamp: " << imu_stamp + << ", diff_prev=" << (imu_stamp - prev_imu_stamp) + << ", diff_img=" << (1.f + imu_stamp - img_stamp); + prev_imu_stamp = imu_stamp; + } + LOG(INFO); + + /* + painter.DrawImgData(img, *left_data.img); + if (!motion_datas.empty()) { + painter.DrawImuData(img, *motion_datas[0].imu); + } + */ + + cv::imshow("frame", img); + + char key = static_cast(cv::waitKey(1)); + if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q + break; + } + } + + api->Stop(Source::ALL); + return 0; +} diff --git a/src/mynteye/api/api.cc b/src/mynteye/api/api.cc index 18e80cd..3f522db 100644 --- a/src/mynteye/api/api.cc +++ b/src/mynteye/api/api.cc @@ -22,9 +22,11 @@ #include #include "mynteye/logger.h" +#include "mynteye/api/correspondence.h" #include "mynteye/api/dl.h" #include "mynteye/api/plugin.h" #include "mynteye/api/synthetic.h" +#include "mynteye/api/version_checker.h" #include "mynteye/device/device.h" #include "mynteye/device/utils.h" @@ -208,7 +210,7 @@ std::vector get_plugin_paths() { } // namespace API::API(std::shared_ptr device, CalibrationModel calib_model) - : device_(device) { + : device_(device), correspondence_(nullptr) { VLOG(2) << __func__; // std::dynamic_pointer_cast(device_); synthetic_.reset(new Synthetic(this, calib_model)); @@ -221,7 +223,10 @@ API::~API() { std::shared_ptr API::Create(int argc, char *argv[]) { auto &&device = device::select(); if (!device) return nullptr; - return Create(argc, argv, device); + auto api = Create(argc, argv, device); + if (api && checkFirmwareVersion(api)) + return api; + return nullptr; } std::shared_ptr API::Create( @@ -260,7 +265,7 @@ std::shared_ptr API::Create(const std::shared_ptr &device) { } } else { LOG(ERROR) <<"no device!"; - api = std::make_shared(device, CalibrationModel::UNKNOW); + return nullptr; } return api; } @@ -326,6 +331,20 @@ std::string API::GetInfo(const Info &info) const { return device_->GetInfo(info); } +std::string API::GetSDKVersion() const { + std::string info_path = + utils::get_sdk_install_dir(); + info_path.append(MYNTEYE_OS_SEP "share" \ + MYNTEYE_OS_SEP "mynteye" MYNTEYE_OS_SEP "build.info"); + + cv::FileStorage fs(info_path, cv::FileStorage::READ); + if (!fs.isOpened()) { + LOG(WARNING) << "build.info not found: " << info_path; + return "null"; + } + return fs["MYNTEYE_VERSION"]; +} + IntrinsicsPinhole API::GetIntrinsics(const Stream &stream) const { auto in = GetIntrinsicsBase(stream); if (in->calib_model() == CalibrationModel::PINHOLE) { @@ -377,10 +396,15 @@ void API::SetStreamCallback(const Stream &stream, stream_callback_t callback) { } void API::SetMotionCallback(motion_callback_t callback) { - static auto callback_ = callback; + if (correspondence_) { + correspondence_->SetMotionCallback(callback); + return; + } + callback_ = callback; if (callback_) { - device_->SetMotionCallback( - [](const device::MotionData &data) { callback_({data.imu}); }, true); + device_->SetMotionCallback([this](const device::MotionData &data) { + callback_({data.imu}); + }, true); } else { device_->SetMotionCallback(nullptr); } @@ -435,7 +459,11 @@ void API::Stop(const Source &source) { } void API::WaitForStreams() { - synthetic_->WaitForStreams(); + if (correspondence_) { + correspondence_->WaitForStreams(); + } else { + synthetic_->WaitForStreams(); + } } void API::EnableStreamData(const Stream &stream) { @@ -446,24 +474,69 @@ void API::DisableStreamData(const Stream &stream) { synthetic_->DisableStreamData(stream); } +void API::EnableStreamData( + const Stream &stream, stream_switch_callback_t callback, + bool try_tag) { + synthetic_->EnableStreamData(stream, callback, try_tag); +} +void API::DisableStreamData( + const Stream &stream, stream_switch_callback_t callback, + bool try_tag) { + synthetic_->DisableStreamData(stream, callback, try_tag); +} + api::StreamData API::GetStreamData(const Stream &stream) { - return synthetic_->GetStreamData(stream); + if (correspondence_ && correspondence_->Watch(stream)) { + return correspondence_->GetStreamData(stream); + } else { + return synthetic_->GetStreamData(stream); + } } std::vector API::GetStreamDatas(const Stream &stream) { - return synthetic_->GetStreamDatas(stream); + if (correspondence_ && correspondence_->Watch(stream)) { + return correspondence_->GetStreamDatas(stream); + } else { + return synthetic_->GetStreamDatas(stream); + } } void API::EnableMotionDatas(std::size_t max_size) { + if (correspondence_) return; // not cache them device_->EnableMotionDatas(max_size); } std::vector API::GetMotionDatas() { - std::vector datas; - for (auto &&data : device_->GetMotionDatas()) { - datas.push_back({data.imu}); + if (correspondence_) { + return correspondence_->GetMotionDatas(); + } else { + std::vector datas; + for (auto &&data : device_->GetMotionDatas()) { + datas.push_back({data.imu}); + } + return datas; + } +} + +void API::EnableTimestampCorrespondence(const Stream &stream) { + if (correspondence_ == nullptr) { + correspondence_.reset(new Correspondence(device_, stream)); + { + device_->DisableMotionDatas(); + if (callback_) { + correspondence_->SetMotionCallback(callback_); + callback_ = nullptr; + } + } + using namespace std::placeholders; // NOLINT + device_->SetMotionCallback( + std::bind(&Correspondence::OnMotionDataCallback, + correspondence_.get(), _1), + true); + synthetic_->SetStreamDataListener( + std::bind(&Correspondence::OnStreamDataCallback, + correspondence_.get(), _1, _2)); } - return datas; } void API::EnablePlugin(const std::string &path) { diff --git a/src/mynteye/api/camodocal/include/camodocal/gpl/gpl.h b/src/mynteye/api/camodocal/include/camodocal/gpl/gpl.h index ecfaeb0..b5b555c 100644 --- a/src/mynteye/api/camodocal/include/camodocal/gpl/gpl.h +++ b/src/mynteye/api/camodocal/include/camodocal/gpl/gpl.h @@ -80,10 +80,6 @@ const T randomNormal(const T &sigma) { return x1 * w * sigma; } -unsigned long long timeInMicroseconds(void); // NOLINT - -double timeInSeconds(void); - void colorDepthImage( cv::Mat &imgDepth, cv::Mat &imgColoredDepth, float minRange, // NOLINT float maxRange); @@ -107,8 +103,6 @@ void LLtoUTM( void UTMtoLL( double utmNorthing, double utmEasting, const std::string &utmZone, // NOLINT double &latitude, double &longitude); // NOLINT - -long int timestampDiff(uint64_t t1, uint64_t t2); // NOLINT } #endif // SRC_MYNTEYE_API_CAMODOCAL_INCLUDE_CAMODOCAL_GPL_GPL_H_ diff --git a/src/mynteye/api/camodocal/src/camera_models/EquidistantCamera.cc b/src/mynteye/api/camodocal/src/camera_models/EquidistantCamera.cc index 5e9ce3b..34ff08f 100644 --- a/src/mynteye/api/camodocal/src/camera_models/EquidistantCamera.cc +++ b/src/mynteye/api/camodocal/src/camera_models/EquidistantCamera.cc @@ -276,7 +276,8 @@ void EquidistantCamera::estimateIntrinsics( double f0 = 0.0; for (size_t i = 0; i < imagePoints.size(); ++i) { std::vector center(boardSize.height); - double radius[boardSize.height]; // NOLINT + int arrayLength = boardSize.height; + double *radius = new double[arrayLength]; for (int r = 0; r < boardSize.height; ++r) { std::vector circle; for (int c = 0; c < boardSize.width; ++c) { @@ -320,6 +321,7 @@ void EquidistantCamera::estimateIntrinsics( } } } + delete[] radius; } if (f0 <= 0.0 && minReprojErr >= std::numeric_limits::max()) { diff --git a/src/mynteye/api/camodocal/src/gpl/gpl.cc b/src/mynteye/api/camodocal/src/gpl/gpl.cc index a318c69..bfeacb7 100644 --- a/src/mynteye/api/camodocal/src/gpl/gpl.cc +++ b/src/mynteye/api/camodocal/src/gpl/gpl.cc @@ -16,6 +16,7 @@ #include #ifdef _WIN32 #include +#define M_PI (3.14159265358979323846) #else #include #endif @@ -109,69 +110,8 @@ getFILETIMEoffset() { return (t); } -int clock_gettime(int X, struct timespec *tp) { - LARGE_INTEGER t; - FILETIME f; - double microseconds; - static LARGE_INTEGER offset; - static double frequencyToMicroseconds; - static int initialized = 0; - static BOOL usePerformanceCounter = 0; - - if (!initialized) { - LARGE_INTEGER performanceFrequency; - initialized = 1; - usePerformanceCounter = QueryPerformanceFrequency(&performanceFrequency); - if (usePerformanceCounter) { - QueryPerformanceCounter(&offset); - frequencyToMicroseconds = - static_cast(performanceFrequency.QuadPart / 1000000.); - } else { - offset = getFILETIMEoffset(); - frequencyToMicroseconds = 10.; - } - } - if (usePerformanceCounter) { - QueryPerformanceCounter(&t); - } else { - GetSystemTimeAsFileTime(&f); - t.QuadPart = f.dwHighDateTime; - t.QuadPart <<= 32; - t.QuadPart |= f.dwLowDateTime; - } - - t.QuadPart -= offset.QuadPart; - microseconds = static_cast(t.QuadPart / frequencyToMicroseconds); - t.QuadPart = microseconds; - tp->tv_sec = t.QuadPart / 1000000; - tp->tv_nsec = (t.QuadPart % 1000000) * 1000; - return (0); -} #endif -unsigned long long timeInMicroseconds(void) { // NOLINT - struct timespec tp; -#ifdef __APPLE__ - tp = orwl_gettime(); -#else - clock_gettime(CLOCK_REALTIME, &tp); -#endif - - return tp.tv_sec * 1000000 + tp.tv_nsec / 1000; -} - -double timeInSeconds(void) { - struct timespec tp; -#ifdef __APPLE__ - tp = orwl_gettime(); -#else - clock_gettime(CLOCK_REALTIME, &tp); -#endif - - return static_cast(tp.tv_sec) + - static_cast(tp.tv_nsec) / 1000000000.0; -} - float colormapAutumn[128][3] = { {1.0f, 0.f, 0.f}, {1.0f, 0.007874f, 0.f}, {1.0f, 0.015748f, 0.f}, {1.0f, 0.023622f, 0.f}, {1.0f, 0.031496f, 0.f}, {1.0f, 0.03937f, 0.f}, @@ -745,23 +685,4 @@ void UTMtoLL( longitude = LongOrigin + longitude / M_PI * 180.0; } -long int timestampDiff(uint64_t t1, uint64_t t2) { // NOLINT - if (t2 > t1) { - uint64_t d = t2 - t1; - - if (d > std::numeric_limits::max()) { // NOLINT - return std::numeric_limits::max(); // NOLINT - } else { - return d; - } - } else { - uint64_t d = t1 - t2; - - if (d > std::numeric_limits::max()) { // NOLINT - return std::numeric_limits::min(); // NOLINT - } else { - return -static_cast(d); // NOLINT - } - } -} } diff --git a/src/mynteye/api/correspondence.cc b/src/mynteye/api/correspondence.cc new file mode 100644 index 0000000..d4af6cd --- /dev/null +++ b/src/mynteye/api/correspondence.cc @@ -0,0 +1,277 @@ +// Copyright 2018 Slightech Co., Ltd. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#include "mynteye/api/correspondence.h" + +#include "mynteye/device/device.h" +#include "mynteye/logger.h" + +MYNTEYE_BEGIN_NAMESPACE + +Correspondence::Correspondence(const std::shared_ptr &device, + const Stream &stream) + : device_(device), stream_(stream), ready_image_timestamp_(0) { + VLOG(2) << __func__; + // set matched stream to be watched too, + // aim to make stream and matched stream correspondence + if (stream_ == Stream::LEFT) { + stream_match_ = Stream::RIGHT; + } else if (stream_ == Stream::RIGHT) { + stream_match_ = Stream::LEFT; + } else if (stream_ == Stream::LEFT_RECTIFIED) { + stream_match_ = Stream::RIGHT_RECTIFIED; + } else if (stream_ == Stream::RIGHT_RECTIFIED) { + stream_match_ = Stream::LEFT_RECTIFIED; + } else { + stream_match_ = Stream::LAST; + } + EnableStreamMatch(); + + auto framerate = device_->GetOptionValue(Option::FRAME_RATE); + stream_interval_us_ = 1000000.f / framerate; + stream_interval_us_half_ = 0.5f * stream_interval_us_; + VLOG(2) << "framerate: " << framerate + << ", interval_us: " << stream_interval_us_; +} + +Correspondence::~Correspondence() { + VLOG(2) << __func__; +} + +bool Correspondence::Watch(const Stream &stream) const { + if (stream == stream_) return true; + if (stream_match_enabled_ && stream == stream_match_) return true; + return false; +} + +void Correspondence::OnStreamDataCallback( + const Stream &stream, const api::StreamData &data) { + if (!Watch(stream)) { + return; // unwatched + } + // LOG(INFO) << __func__ << ", " << stream + // << ", id: " << data.frame_id << ", stamp: " << data.img->timestamp; + // if (data.img == nullptr) { + // LOG(FATAL) << "stream data image info is empty!"; + // } + std::lock_guard _(mtx_stream_datas_); + if (stream == stream_) { + stream_datas_.push_back(std::move(data)); + } else if (/*stream_match_enabled_ && */stream == stream_match_) { + stream_datas_match_.push_back(std::move(data)); + } + NotifyStreamDataReady(); +} + +void Correspondence::OnMotionDataCallback(const device::MotionData &data) { + // LOG(INFO) << __func__ << ", id: " << data.imu->frame_id + // << ", stamp: " << data.imu->timestamp; + { + std::lock_guard _(mtx_motion_datas_); + motion_datas_.push_back(data); + } + if (motion_callback_) { + motion_callback_({data.imu}); + } +} + +void Correspondence::SetMotionCallback(API::motion_callback_t callback) { + // LOG(INFO) << __func__; + motion_callback_ = callback; +} + +void Correspondence::WaitForStreams() { + if (stream_ == Stream::LEFT || stream_ == Stream::RIGHT) { + // Wait native stream ready, avoid get these stream empty + // Todo: determine native stream according to device + WaitStreamDataReady(); + return; + } + device_->WaitForStreams(); +} + +api::StreamData Correspondence::GetStreamData(const Stream &stream) { + auto datas = GetStreamDatas(stream); + return datas.empty() ? api::StreamData{} : datas.back(); +} + +std::vector Correspondence::GetStreamDatas( + const Stream &stream) { + if (!Watch(stream)) { + LOG(ERROR) << "Get unwatched stream data of " << stream; + return {}; + } + + std::lock_guard _(mtx_stream_datas_); + static std::uint32_t stream_count_ = 0; + static std::uint32_t stream_match_count_ = 0; + + if (stream == stream_) { + auto datas = GetReadyStreamData(false); + + if (stream_count_ < 10) { + ++stream_count_; + } else { + // get stream, but not get matched stream, disable it + if (stream_match_count_ == 0) { + DisableStreamMatch(); + } + } + + return datas; + } else if (/*stream_match_enabled_ && */stream == stream_match_) { + auto datas = GetReadyStreamData(true); + + if (stream_match_count_ < 10) { + ++stream_match_count_; + } + + return datas; + } + + return {}; +} + +std::vector Correspondence::GetMotionDatas() { + return GetReadyMotionDatas(); +} + +void Correspondence::EnableStreamMatch() { + stream_match_enabled_ = true; +} + +void Correspondence::DisableStreamMatch() { + stream_match_enabled_ = false; + stream_datas_match_.clear(); +} + +void Correspondence::WaitStreamDataReady() { + std::unique_lock lock(mtx_stream_datas_); + auto ready = std::bind(&Correspondence::IsStreamDataReady, this); + bool ok = cond_stream_datas_.wait_for(lock, std::chrono::seconds(3), ready); + if (!ok) { + LOG(FATAL) << "Timeout waiting for key frames. Please use USB 3.0, and not " + "in virtual machine."; + } +} + +void Correspondence::NotifyStreamDataReady() { + cond_stream_datas_.notify_one(); +} + +bool Correspondence::IsStreamDataReady() { + if (stream_datas_.empty()) return false; + if (stream_match_enabled_) { + if (stream_datas_match_.empty()) return false; + } + if (motion_datas_.empty()) return false; + + std::uint64_t img_stamp = 0; + std::uint64_t img_macth_stamp = 0; + { + std::lock_guard _(mtx_stream_datas_); + auto data = stream_datas_.front(); + if (data.img == nullptr) { + LOG(FATAL) << "stream data image info is empty!"; + } + img_stamp = data.img->timestamp; + + if (stream_match_enabled_) { + img_macth_stamp = stream_datas_match_.front().img->timestamp; + } + } + std::uint64_t imu_stamp = 0; + { + std::lock_guard _(mtx_motion_datas_); + auto data = motion_datas_.back(); + if (data.imu == nullptr) { + LOG(FATAL) << "motion data imu info is empty!"; + } + imu_stamp = data.imu->timestamp; + } + + if (stream_match_enabled_) { + return img_stamp + stream_interval_us_half_ < imu_stamp + && img_macth_stamp + stream_interval_us_half_ < imu_stamp; + } else { + return img_stamp + stream_interval_us_half_ < imu_stamp; + } +} + +std::vector Correspondence::GetReadyStreamData(bool matched) { + std::uint64_t imu_stamp = 0; + { + std::lock_guard _(mtx_motion_datas_); + if (motion_datas_.empty()) { + LOG(WARNING) << "motion data is unexpected empty!" + "\n\n Please ensure Start(Source::MOTION_TRACKING) " + "or Start(Source::ALL)\n"; + std::lock_guard _(mtx_stream_datas_); + return std::move(matched ? stream_datas_match_ : stream_datas_); + } + imu_stamp = motion_datas_.back().imu->timestamp; + } + std::lock_guard _(mtx_stream_datas_); + + std::vector &datas = + matched ? stream_datas_match_ : stream_datas_; + + // LOG(INFO) << "datas.size: " << datas.size() << ", matched: " << matched; + std::vector result; + + for (auto it = datas.begin(); it != datas.end(); ) { + // LOG(INFO) << "data.id: " << it->frame_id; + auto img_stamp = it->img->timestamp; + if (img_stamp + stream_interval_us_half_ < imu_stamp) { + // LOG(INFO) << "data.id: " << it->frame_id << " > result"; + result.push_back(std::move(*it)); + it = datas.erase(it); + } else { + // ++it; + break; + } + } + // LOG(INFO) << "datas.size: " << datas.size() + // << ", result.size: " << result.size(); + + if (!matched && !result.empty()) { + // last match stream timestamp + ready_image_timestamp_ = result.back().img->timestamp; + } + return result; +} + +std::vector Correspondence::GetReadyMotionDatas() { + if (ready_image_timestamp_ == 0) return {}; + std::lock_guard _(mtx_motion_datas_); + + std::vector result; + + auto &&datas = motion_datas_; + for (auto it = datas.begin(); it != datas.end(); ) { + auto imu_stamp = it->imu->timestamp; + if (imu_stamp < ready_image_timestamp_ - stream_interval_us_half_) { + it = datas.erase(it); + } else if (imu_stamp > ready_image_timestamp_ + stream_interval_us_half_) { + // ++it; + break; + } else { + result.push_back({it->imu}); + it = datas.erase(it); + } + } + + return result; +} + +MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/api/correspondence.h b/src/mynteye/api/correspondence.h new file mode 100644 index 0000000..df822e0 --- /dev/null +++ b/src/mynteye/api/correspondence.h @@ -0,0 +1,80 @@ +// Copyright 2018 Slightech Co., Ltd. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#ifndef MYNTEYE_API_CONFIG_H_ +#define MYNTEYE_API_CONFIG_H_ +#pragma once + +#include +#include +#include +#include +#include + +#include "mynteye/api/api.h" +#include "mynteye/device/callbacks.h" + +MYNTEYE_BEGIN_NAMESPACE + +class Correspondence { + public: + Correspondence(const std::shared_ptr &device, const Stream &stream); + ~Correspondence(); + + bool Watch(const Stream &stream) const; + + void OnStreamDataCallback(const Stream &stream, const api::StreamData &data); + void OnMotionDataCallback(const device::MotionData &data); + + void SetMotionCallback(API::motion_callback_t callback); + + void WaitForStreams(); + api::StreamData GetStreamData(const Stream &stream); + std::vector GetStreamDatas(const Stream &stream); + std::vector GetMotionDatas(); + + private: + void EnableStreamMatch(); + void DisableStreamMatch(); + + void WaitStreamDataReady(); + void NotifyStreamDataReady(); + + bool IsStreamDataReady(); + + std::vector GetReadyStreamData(bool matched); + std::vector GetReadyMotionDatas(); + + std::shared_ptr device_; + Stream stream_; + Stream stream_match_; + std::atomic_bool stream_match_enabled_; + + float stream_interval_us_; + float stream_interval_us_half_; + + API::motion_callback_t motion_callback_; + std::vector motion_datas_; + std::recursive_mutex mtx_motion_datas_; + + std::vector stream_datas_; + std::vector stream_datas_match_; + std::recursive_mutex mtx_stream_datas_; + std::condition_variable_any cond_stream_datas_; + + std::uint64_t ready_image_timestamp_; +}; + +MYNTEYE_END_NAMESPACE + +#endif // MYNTEYE_API_CONFIG_H_ diff --git a/src/mynteye/api/data_tools.cc b/src/mynteye/api/data_tools.cc new file mode 100644 index 0000000..73f5fd7 --- /dev/null +++ b/src/mynteye/api/data_tools.cc @@ -0,0 +1,78 @@ +// Copyright 2018 Slightech Co., Ltd. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include +#include "mynteye/api/data_tools.h" +#include "mynteye/logger.h" + +MYNTEYE_BEGIN_NAMESPACE + +cv::Mat frame2mat(const std::shared_ptr &frame) { + if (frame->format() == Format::YUYV) { + cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data()); + cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2); + return img; + } else if (frame->format() == Format::BGR888) { + cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data()); + return img; + } else { // Format::GRAY + return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data()); + } +} + +api::StreamData data2api(const device::StreamData &data) { + return {data.img, frame2mat(data.frame), data.frame, data.frame_id}; +} + +// ObjMat/ObjMat2 > api::StreamData + +api::StreamData obj_data_first(const ObjMat2 *obj) { + return {obj->first_data, obj->first, nullptr, obj->first_id}; +} + +api::StreamData obj_data_second(const ObjMat2 *obj) { + return {obj->second_data, obj->second, nullptr, obj->second_id}; +} + +api::StreamData obj_data(const ObjMat *obj) { + return {obj->data, obj->value, nullptr, obj->id}; +} + +api::StreamData obj_data_first(const std::shared_ptr &obj) { + return {obj->first_data, obj->first, nullptr, obj->first_id}; +} + +api::StreamData obj_data_second(const std::shared_ptr &obj) { + return {obj->second_data, obj->second, nullptr, obj->second_id}; +} + +api::StreamData obj_data(const std::shared_ptr &obj) { + return {obj->data, obj->value, nullptr, obj->id}; +} + +// api::StreamData > ObjMat/ObjMat2 + +ObjMat data_obj(const api::StreamData &data) { + return ObjMat{data.frame, data.frame_id, data.img}; +} + +ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second) { + return ObjMat2{ + first.frame, first.frame_id, first.img, + second.frame, second.frame_id, second.img}; +} + +MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/api/data_tools.h b/src/mynteye/api/data_tools.h new file mode 100644 index 0000000..845919b --- /dev/null +++ b/src/mynteye/api/data_tools.h @@ -0,0 +1,33 @@ +// Copyright 2018 Slightech Co., Ltd. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#ifndef MYNTEYE_API_DATA_TOOLS_H_ +#define MYNTEYE_API_DATA_TOOLS_H_ +#pragma once +#include +#include "mynteye/api/object.h" +#include "mynteye/api/api.h" +#include "mynteye/device/device.h" +MYNTEYE_BEGIN_NAMESPACE +cv::Mat frame2mat(const std::shared_ptr &frame); +api::StreamData data2api(const device::StreamData &data); +api::StreamData obj_data_first(const ObjMat2 *obj); +api::StreamData obj_data_second(const ObjMat2 *obj); +api::StreamData obj_data(const ObjMat *obj); +api::StreamData obj_data_first(const std::shared_ptr &obj); +api::StreamData obj_data_second(const std::shared_ptr &obj); +api::StreamData obj_data(const std::shared_ptr &obj); +ObjMat data_obj(const api::StreamData &data); +ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second); +MYNTEYE_END_NAMESPACE +#endif // MYNTEYE_API_DATA_TOOLS_H_ diff --git a/src/mynteye/api/processor.cc b/src/mynteye/api/processor.cc index 8d65b35..1f30e4c 100644 --- a/src/mynteye/api/processor.cc +++ b/src/mynteye/api/processor.cc @@ -19,6 +19,7 @@ #include "mynteye/logger.h" #include "mynteye/util/strings.h" #include "mynteye/util/times.h" +#include "mynteye/api/data_tools.h" MYNTEYE_BEGIN_NAMESPACE @@ -245,6 +246,74 @@ void Processor::Run() { VLOG(2) << Name() << " thread end"; } +api::StreamData Processor::GetStreamData(const Stream &stream) { + auto sum = getStreamsSum(); + auto &&out = GetOutput(); + Synthetic::Mode enable_mode = Synthetic::MODE_OFF; + auto streams = getTargetStreams(); + for (auto it_s : streams) { + if (it_s.stream == stream) { + enable_mode = it_s.enabled_mode_; + break; + } + } + if (enable_mode == Synthetic::MODE_ON) { + if (sum == 1) { + if (out != nullptr) { + auto &&output = Object::Cast(out); + if (output != nullptr) { + return obj_data(output); + } + VLOG(2) << "Rectify not ready now"; + } + } else if (sum == 2) { + static std::shared_ptr output = nullptr; + if (out != nullptr) { + output = Object::Cast(out); + } + auto streams = getTargetStreams(); + if (output != nullptr) { + int num = 0; + for (auto it : streams) { + if (it.stream == stream) { + if (num == 1) { + return obj_data_first(output); + } else { + return obj_data_second(output); + } + } + num++; + } + } + VLOG(2) << "Rectify not ready now"; + } else { + LOG(ERROR) << "error: invalid sum!"; + } + return {}; // frame.empty() == true + } + LOG(ERROR) << "Failed to get stream data of " << stream + << ", unsupported or disabled"; + return {}; // frame.empty() == true +} + +std::vector Processor::GetStreamDatas(const Stream &stream) { + Synthetic::Mode enable_mode = Synthetic::MODE_OFF; + auto streams = getTargetStreams(); + for (auto it_s : streams) { + if (it_s.stream == stream) { + enable_mode = it_s.enabled_mode_; + break; + } + } + if (enable_mode == Synthetic::MODE_ON) { + return {GetStreamData(stream)}; + } else { + LOG(ERROR) << "Failed to get stream data of " << stream + << ", unsupported or disabled"; + } + return {}; +} + void Processor::SetIdle(bool idle) { std::lock_guard lk(mtx_state_); idle_ = idle; diff --git a/src/mynteye/api/processor.h b/src/mynteye/api/processor.h index bf36792..6ac1644 100644 --- a/src/mynteye/api/processor.h +++ b/src/mynteye/api/processor.h @@ -66,6 +66,10 @@ class Processor : /** Returns dropped or not. */ bool Process(const Object &in); + virtual api::StreamData GetStreamData(const Stream &stream); + + virtual std::vector GetStreamDatas(const Stream &stream); + /** * Returns the last output. * @note Returns null if not output now. diff --git a/src/mynteye/api/processor/root_camera_processor.cc b/src/mynteye/api/processor/root_camera_processor.cc index d0e396a..a802b3a 100644 --- a/src/mynteye/api/processor/root_camera_processor.cc +++ b/src/mynteye/api/processor/root_camera_processor.cc @@ -14,15 +14,21 @@ #include "mynteye/api/processor/root_camera_processor.h" #include +#include #include #include #include "mynteye/logger.h" +#include "mynteye/api/synthetic.h" +#include "mynteye/device/device.h" +#include "mynteye/api/data_tools.h" MYNTEYE_BEGIN_NAMESPACE const char RootProcessor::NAME[] = "RootProcessor"; -RootProcessor::RootProcessor(std::int32_t proc_period) - : Processor(std::move(proc_period)) {} +RootProcessor::RootProcessor(std::shared_ptr device, + std::int32_t proc_period) + : Processor(std::move(proc_period)), + device_(device) {} RootProcessor::~RootProcessor() { VLOG(2) << __func__; } @@ -31,13 +37,114 @@ std::string RootProcessor::Name() { return NAME; } -Object *RootProcessor::OnCreateOutput() { +s1s2Processor::s1s2Processor(std::shared_ptr device, + std::int32_t proc_period) + : RootProcessor(device, std::move(proc_period)) {} +s1s2Processor::~s1s2Processor() { + VLOG(2) << __func__; +} + +Object *s1s2Processor::OnCreateOutput() { return new ObjMat2(); } -bool RootProcessor::OnProcess( +bool s1s2Processor::OnProcess( Object *const in, Object *const out, std::shared_ptr const parent) { + const ObjMat2 *input = Object::Cast(in); + ObjMat2 *output = Object::Cast(out); + output->second = input->second; + output->first = input->first; + output->first_id = input->first_id; + output->first_data = input->first_data; + output->second_id = input->second_id; + output->second_data = input->second_data; MYNTEYE_UNUSED(parent) return true; } + +void s1s2Processor::ProcessNativeStream( + const Stream &stream, const api::StreamData &data) { + std::unique_lock lk(mtx_left_right_ready_); + static api::StreamData left_data, right_data; + if (stream == Stream::LEFT) { + left_data = data; + } else if (stream == Stream::RIGHT) { + right_data = data; + } + if (left_data.img && right_data.img && + left_data.img->frame_id == right_data.img->frame_id) { + Process(data_obj(left_data, right_data)); + } + return; +} + +void s1s2Processor::StartVideoStreaming() { + Activate(); + auto streams = getTargetStreams(); + for (unsigned int j =0; j< streams.size(); j++) { + auto stream = streams[j].stream; + auto callback = streams[j].stream_callback; + target_streams_[j].enabled_mode_ = Synthetic::MODE_ON; + device_->SetStreamCallback( + stream, + [this, stream, callback](const device::StreamData &data) { + auto &&stream_data = data2api(data); + ProcessNativeStream(stream, stream_data); + // Need mutex if set callback after start + if (callback) { + callback(stream_data); + } + }, + true); + } + device_->Start(Source::VIDEO_STREAMING); +} + +void s1s2Processor::StopVideoStreaming() { + Deactivate(); + auto streams = getTargetStreams(); + for (unsigned int j =0; j< streams.size(); j++) { + auto stream = streams[j].stream; + target_streams_[j].enabled_mode_ = Synthetic::MODE_OFF; + device_->SetStreamCallback(stream, nullptr); + } + device_->Stop(Source::VIDEO_STREAMING); +} +api::StreamData s1s2Processor::GetStreamData(const Stream &stream) { + Synthetic::Mode enable_mode = Synthetic::MODE_OFF; + auto streams = getTargetStreams(); + for (auto it_s : streams) { + if (it_s.stream == stream) { + enable_mode = it_s.enabled_mode_; + break; + } + } + if (enable_mode == Synthetic::MODE_ON) { + return data2api(device_->GetStreamData(stream)); + } + LOG(ERROR) << "Failed to get device stream data of " << stream + << ", unsupported or disabled"; + LOG(ERROR) << "Make sure you have enable " << stream; + return {}; +} + +std::vector s1s2Processor::GetStreamDatas( + const Stream &stream) { + Synthetic::Mode enable_mode = Synthetic::MODE_OFF; + auto streams = getTargetStreams(); + for (auto it_s : streams) { + if (it_s.stream == stream) { + enable_mode = it_s.enabled_mode_; + break; + } + } + if (enable_mode == Synthetic::MODE_ON) { + std::vector datas; + for (auto &&data : device_->GetStreamDatas(stream)) { + datas.push_back(data2api(data)); + } + return datas; + } +} + MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/api/processor/root_camera_processor.h b/src/mynteye/api/processor/root_camera_processor.h index eaa3b6d..f7e1ed7 100644 --- a/src/mynteye/api/processor/root_camera_processor.h +++ b/src/mynteye/api/processor/root_camera_processor.h @@ -28,16 +28,42 @@ class RootProcessor : public Processor { public: static const char NAME[]; - explicit RootProcessor(std::int32_t proc_period = 0); + explicit RootProcessor(std::shared_ptr device, + std::int32_t proc_period = 0); virtual ~RootProcessor(); - std::string Name() override; + virtual std::string Name(); + virtual void StartVideoStreaming() = 0; + virtual void StopVideoStreaming() = 0; + virtual api::StreamData GetStreamData(const Stream &stream) = 0; + virtual std::vector GetStreamDatas(const Stream &stream) = 0; // NOLINT + protected: + virtual Object *OnCreateOutput() = 0; + virtual bool OnProcess( + Object *const in, Object *const out, + std::shared_ptr const parent) = 0; + std::shared_ptr device_; +}; + +class s1s2Processor : public RootProcessor { + public: + explicit s1s2Processor(std::shared_ptr device, + std::int32_t proc_period = 0); + virtual ~s1s2Processor(); + void StartVideoStreaming(); + void StopVideoStreaming(); + api::StreamData GetStreamData(const Stream &stream) override; + std::vector GetStreamDatas(const Stream &stream) override; // NOLINT protected: Object *OnCreateOutput() override; bool OnProcess( Object *const in, Object *const out, std::shared_ptr const parent) override; + private: + void ProcessNativeStream( + const Stream &stream, const api::StreamData &data); + std::mutex mtx_left_right_ready_; }; MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/api/synthetic.cc b/src/mynteye/api/synthetic.cc index f2a6998..b9a5401 100644 --- a/src/mynteye/api/synthetic.cc +++ b/src/mynteye/api/synthetic.cc @@ -36,6 +36,7 @@ #include "mynteye/api/processor/rectify_processor.h" #endif #include "mynteye/device/device.h" +#include "mynteye/api/data_tools.h" #define RECTIFY_PROC_PERIOD 0 #define DISPARITY_PROC_PERIOD 0 @@ -46,36 +47,6 @@ MYNTEYE_BEGIN_NAMESPACE -namespace { - -cv::Mat frame2mat(const std::shared_ptr &frame) { - if (frame->format() == Format::YUYV) { - cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data()); - cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2); - return img; - } else if (frame->format() == Format::BGR888) { - cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data()); - return img; - } else { // Format::GRAY - return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data()); - } -} - -api::StreamData data2api(const device::StreamData &data) { - return {data.img, frame2mat(data.frame), data.frame, data.frame_id}; -} - -void process_childs( - const std::shared_ptr &proc, const std::string &name, - const Object &obj) { - auto &&processor = find_processor(proc, name); - for (auto child : processor->GetChilds()) { - child->Process(obj); - } -} - -} // namespace - void Synthetic::InitCalibInfo() { if (calib_model_ == CalibrationModel::PINHOLE) { LOG(INFO) << "camera calib model: pinhole"; @@ -105,12 +76,12 @@ Synthetic::Synthetic(API *api, CalibrationModel calib_model) : api_(api), plugin_(nullptr), calib_model_(calib_model), - calib_default_tag_(false) { + calib_default_tag_(false), + stream_data_listener_(nullptr) { VLOG(2) << __func__; CHECK_NOTNULL(api_); InitCalibInfo(); InitProcessors(); - InitStreamSupports(); } Synthetic::~Synthetic() { @@ -121,6 +92,10 @@ Synthetic::~Synthetic() { } } +void Synthetic::SetStreamDataListener(stream_data_listener_t listener) { + stream_data_listener_ = listener; +} + void Synthetic::NotifyImageParamsChanged() { if (!calib_default_tag_) { intr_left_ = api_->GetIntrinsicsBase(Stream::LEFT); @@ -128,19 +103,18 @@ void Synthetic::NotifyImageParamsChanged() { extr_ = std::make_shared( api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT)); } - if (calib_model_ == CalibrationModel::PINHOLE) { - auto &&processor = find_processor(processor_); - if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_); + auto processor = getProcessorWithStream(Stream::LEFT_RECTIFIED); + + if (processor && calib_model_ == CalibrationModel::PINHOLE) { + auto proc = static_cast(&(*processor)); + proc->ReloadImageParams(intr_left_, intr_right_, extr_); #ifdef WITH_CAM_MODELS - } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { - auto &&processor = find_processor(processor_); - if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_); + } else if (processor && calib_model_ == CalibrationModel::KANNALA_BRANDT) { + auto proc = static_cast(&(*processor)); + proc->ReloadImageParams(intr_left_, intr_right_, extr_); #endif } else { - LOG(ERROR) << "Unknow calib model type in device: " - << calib_model_ << ", use default pinhole model"; - auto &&processor = find_processor(processor_); - if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_); + LOG(ERROR) << "Unknow calib model type in device" << std::endl; } } @@ -195,17 +169,29 @@ bool Synthetic::checkControlDateWithStream(const Stream& stream) const { return false; } -void Synthetic::EnableStreamData(const Stream &stream) { +bool Synthetic::Supports(const Stream &stream) const { + return checkControlDateWithStream(stream); +} + +void Synthetic::EnableStreamData( + const Stream &stream, stream_switch_callback_t callback, + bool try_tag) { // Activate processors of synthetic stream auto processor = getProcessorWithStream(stream); iterate_processor_CtoP_before(processor, - [](std::shared_ptr proce){ + [callback, try_tag](std::shared_ptr proce){ + if (proce->Name() == "RootProcessor") { + return; + } auto streams = proce->getTargetStreams(); int act_tag = 0; for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) { - if (proce->target_streams_[i].enabled_mode_ == MODE_LAST) { - act_tag++; - proce->target_streams_[i].enabled_mode_ = MODE_SYNTHETIC; + if (proce->target_streams_[i].enabled_mode_ == MODE_OFF) { + callback(proce->target_streams_[i].stream); + if (!try_tag) { + act_tag++; + proce->target_streams_[i].enabled_mode_ = MODE_ON; + } } } if (act_tag > 0 && !proce->IsActivated()) { @@ -214,30 +200,24 @@ void Synthetic::EnableStreamData(const Stream &stream) { } }); } - - -bool Synthetic::Supports(const Stream &stream) const { - return checkControlDateWithStream(stream); -} - -Synthetic::mode_t Synthetic::SupportsMode(const Stream &stream) const { - if (checkControlDateWithStream(stream)) { - auto data = getControlDateWithStream(stream); - return data.support_mode_; - } - return MODE_LAST; -} - -void Synthetic::DisableStreamData(const Stream &stream) { +void Synthetic::DisableStreamData( + const Stream &stream, stream_switch_callback_t callback, + bool try_tag) { auto processor = getProcessorWithStream(stream); iterate_processor_PtoC_before(processor, - [](std::shared_ptr proce){ + [callback, try_tag](std::shared_ptr proce){ + if (proce->Name() == "RootProcessor") { + return; + } auto streams = proce->getTargetStreams(); int act_tag = 0; for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) { - if (proce->target_streams_[i].enabled_mode_ == MODE_SYNTHETIC) { - act_tag++; - proce->target_streams_[i].enabled_mode_ = MODE_LAST; + if (proce->target_streams_[i].enabled_mode_ == MODE_ON) { + callback(proce->target_streams_[i].stream); + if (!try_tag) { + act_tag++; + proce->target_streams_[i].enabled_mode_ = MODE_OFF; + } } } if (act_tag > 0 && proce->IsActivated()) { @@ -247,11 +227,24 @@ void Synthetic::DisableStreamData(const Stream &stream) { }); } +void Synthetic::EnableStreamData(const Stream &stream) { + EnableStreamData(stream, [](const Stream &stream){ + // std::cout << stream << "enabled in callback" << std::endl; + MYNTEYE_UNUSED(stream); + }, false); +} + +void Synthetic::DisableStreamData(const Stream &stream) { + DisableStreamData(stream, [](const Stream &stream){ + // std::cout << stream << "disabled in callback" << std::endl; + MYNTEYE_UNUSED(stream); + }, false); +} + bool Synthetic::IsStreamDataEnabled(const Stream &stream) const { if (checkControlDateWithStream(stream)) { auto data = getControlDateWithStream(stream); - return data.enabled_mode_ == MODE_SYNTHETIC || - data.enabled_mode_ == MODE_NATIVE; + return data.enabled_mode_ == MODE_ON; } return false; } @@ -279,42 +272,11 @@ bool Synthetic::HasStreamCallback(const Stream &stream) const { } void Synthetic::StartVideoStreaming() { - auto &&device = api_->device(); - for (unsigned int i =0; i< processors_.size(); i++) { - auto streams = processors_[i]->getTargetStreams(); - for (unsigned int j =0; j< streams.size(); j++) { - if (processors_[i]->target_streams_[j].support_mode_ == MODE_NATIVE) { - auto stream = processors_[i]->target_streams_[j].stream; - device->SetStreamCallback( - stream, - [this, stream](const device::StreamData &data) { - auto &&stream_data = data2api(data); - ProcessNativeStream(stream, stream_data); - // Need mutex if set callback after start - if (HasStreamCallback(stream)) { - auto data = getControlDateWithStream(stream); - data.stream_callback(stream_data); - } - }, - true); - } - } - } - device->Start(Source::VIDEO_STREAMING); + processor_->StartVideoStreaming(); } void Synthetic::StopVideoStreaming() { - auto &&device = api_->device(); - for (unsigned int i =0; i< processors_.size(); i++) { - auto streams = processors_[i]->getTargetStreams(); - for (unsigned int j =0; j< streams.size(); j++) { - if (processors_[i]->target_streams_[j].support_mode_ == MODE_NATIVE) { - auto stream = processors_[i]->target_streams_[j].stream; - device->SetStreamCallback(stream, nullptr); - } - } - } - device->Stop(Source::VIDEO_STREAMING); + processor_->StopVideoStreaming(); } void Synthetic::WaitForStreams() { @@ -322,75 +284,11 @@ void Synthetic::WaitForStreams() { } api::StreamData Synthetic::GetStreamData(const Stream &stream) { - auto &&mode = GetStreamEnabledMode(stream); - if (mode == MODE_NATIVE) { - auto &&device = api_->device(); - return data2api(device->GetStreamData(stream)); - } else if (mode == MODE_SYNTHETIC) { - auto processor = getProcessorWithStream(stream); - auto sum = processor->getStreamsSum(); - auto &&out = processor->GetOutput(); - static std::shared_ptr output = nullptr; - if (sum == 1) { - if (out != nullptr) { - auto &&output = Object::Cast(out); - if (output != nullptr) { - return {output->data, output->value, nullptr, output->id}; - } - VLOG(2) << "Rectify not ready now"; - } - } else if (sum == 2) { - if (out != nullptr) { - output = Object::Cast(out); - } - auto streams = processor->getTargetStreams(); - if (output != nullptr) { - int num = 0; - for (auto it : streams) { - if (it.stream == stream) { - if (num == 1) { - return {output->first_data, - output->first, - nullptr, - output->first_id}; - } else { - return {output->second_data, - output->second, - nullptr, - output->second_id}; - } - } - num++; - } - } - VLOG(2) << "Rectify not ready now"; - } else { - LOG(ERROR) << "error: invalid sum!"; - } - return {}; // frame.empty() == true - } else { - LOG(ERROR) << "Failed to get stream data of " << stream - << ", unsupported or disabled"; - return {}; // frame.empty() == true - } + return getProcessorWithStream(stream)->GetStreamData(stream); } std::vector Synthetic::GetStreamDatas(const Stream &stream) { - auto &&mode = GetStreamEnabledMode(stream); - if (mode == MODE_NATIVE) { - auto &&device = api_->device(); - std::vector datas; - for (auto &&data : device->GetStreamDatas(stream)) { - datas.push_back(data2api(data)); - } - return datas; - } else if (mode == MODE_SYNTHETIC) { - return {GetStreamData(stream)}; - } else { - LOG(ERROR) << "Failed to get stream data of " << stream - << ", unsupported or disabled"; - } - return {}; + return getProcessorWithStream(stream)->GetStreamDatas(stream); } void Synthetic::SetPlugin(std::shared_ptr plugin) { @@ -401,134 +299,87 @@ bool Synthetic::HasPlugin() const { return plugin_ != nullptr; } -void Synthetic::InitStreamSupports() { - auto &&device = api_->device(); - if (device->Supports(Stream::LEFT) && device->Supports(Stream::RIGHT)) { - auto processor = getProcessorWithStream(Stream::LEFT); - for (unsigned int i = 0; i< processor->target_streams_.size(); i++) { - if (processor->target_streams_[i].stream == Stream::LEFT) { - processor->target_streams_[i].support_mode_ = MODE_NATIVE; - } - if (processor->target_streams_[i].stream == Stream::RIGHT) { - processor->target_streams_[i].support_mode_ = MODE_NATIVE; - } - } - - std::vector stream_chain{ - Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED, - Stream::DISPARITY, Stream::DISPARITY_NORMALIZED, - Stream::POINTS, Stream::DEPTH}; - for (auto &&stream : stream_chain) { - auto processor = getProcessorWithStream(stream); - for (unsigned int i = 0; i< processor->target_streams_.size(); i++) { - if (processor->target_streams_[i].stream == stream) { - if (device->Supports(stream)) { - processor->target_streams_[i].support_mode_ = MODE_NATIVE; - processor->target_streams_[i].enabled_mode_ = MODE_NATIVE; - } else { - processor->target_streams_[i].support_mode_ = MODE_SYNTHETIC; - } - } - } - } - } -} - Synthetic::mode_t Synthetic::GetStreamEnabledMode(const Stream &stream) const { if (checkControlDateWithStream(stream)) { auto data = getControlDateWithStream(stream); return data.enabled_mode_; } - return MODE_LAST; -} - -bool Synthetic::IsStreamEnabledNative(const Stream &stream) const { - return GetStreamEnabledMode(stream) == MODE_NATIVE; -} - -bool Synthetic::IsStreamEnabledSynthetic(const Stream &stream) const { - return GetStreamEnabledMode(stream) == MODE_SYNTHETIC; + return MODE_OFF; } void Synthetic::InitProcessors() { std::shared_ptr rectify_processor = nullptr; -#ifdef WITH_CAM_MODELS - std::shared_ptr rectify_processor_imp = nullptr; -#endif - cv::Mat Q; - if (calib_model_ == CalibrationModel::PINHOLE) { - auto &&rectify_processor_ocv = - std::make_shared(intr_left_, intr_right_, extr_, - RECTIFY_PROC_PERIOD); - rectify_processor = rectify_processor_ocv; - Q = rectify_processor_ocv->Q; -#ifdef WITH_CAM_MODELS - } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { - rectify_processor_imp = - std::make_shared(intr_left_, intr_right_, extr_, - RECTIFY_PROC_PERIOD); - rectify_processor = rectify_processor_imp; -#endif - } else { - LOG(ERROR) << "Unknow calib model type in device: " - << calib_model_ << ", use default pinhole model"; - auto &&rectify_processor_ocv = - std::make_shared(intr_left_, intr_right_, extr_, - RECTIFY_PROC_PERIOD); - rectify_processor = rectify_processor_ocv; - } + std::shared_ptr points_processor = nullptr; + std::shared_ptr depth_processor = nullptr; + auto &&disparity_processor = std::make_shared(DisparityComputingMethod::SGBM, DISPARITY_PROC_PERIOD); auto &&disparitynormalized_processor = std::make_shared( DISPARITY_NORM_PROC_PERIOD); - std::shared_ptr points_processor = nullptr; - if (calib_model_ == CalibrationModel::PINHOLE) { + + auto root_processor = + std::make_shared(api_->device(), ROOT_PROC_PERIOD); + + if (calib_model_ == CalibrationModel::PINHOLE) { + // PINHOLE + auto &&rectify_processor_ocv = + std::make_shared(intr_left_, intr_right_, extr_, + RECTIFY_PROC_PERIOD); + rectify_processor = rectify_processor_ocv; points_processor = std::make_shared( - Q, POINTS_PROC_PERIOD); + rectify_processor_ocv->Q, POINTS_PROC_PERIOD); + depth_processor = std::make_shared(DEPTH_PROC_PERIOD); + + root_processor->AddChild(rectify_processor); + rectify_processor->AddChild(disparity_processor); + disparity_processor->AddChild(disparitynormalized_processor); + disparity_processor->AddChild(points_processor); + points_processor->AddChild(depth_processor); #ifdef WITH_CAM_MODELS } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { + // KANNALA_BRANDT + auto rectify_processor_imp = + std::make_shared(intr_left_, intr_right_, extr_, + RECTIFY_PROC_PERIOD); + rectify_processor = rectify_processor_imp; points_processor = std::make_shared( rectify_processor_imp -> getCalibInfoPair(), POINTS_PROC_PERIOD); -#endif - } else { - points_processor = std::make_shared( - Q, POINTS_PROC_PERIOD); - } - std::shared_ptr depth_processor = nullptr; - if (calib_model_ == CalibrationModel::PINHOLE) { - depth_processor = std::make_shared(DEPTH_PROC_PERIOD); -#ifdef WITH_CAM_MODELS - } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { depth_processor = std::make_shared( rectify_processor_imp -> getCalibInfoPair(), DEPTH_PROC_PERIOD); + + root_processor->AddChild(rectify_processor); + rectify_processor->AddChild(disparity_processor); + disparity_processor->AddChild(disparitynormalized_processor); + disparity_processor->AddChild(depth_processor); + depth_processor->AddChild(points_processor); #endif } else { - depth_processor = std::make_shared(DEPTH_PROC_PERIOD); + // UNKNOW + LOG(ERROR) << "Unknow calib model type in device: " + << calib_model_; + return; } - auto root_processor = - std::make_shared(ROOT_PROC_PERIOD); - root_processor->AddChild(rectify_processor); rectify_processor->addTargetStreams( - {Stream::LEFT_RECTIFIED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); + {Stream::LEFT_RECTIFIED, Mode::MODE_OFF, nullptr}); rectify_processor->addTargetStreams( - {Stream::RIGHT_RECTIFIED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); + {Stream::RIGHT_RECTIFIED, Mode::MODE_OFF, nullptr}); disparity_processor->addTargetStreams( - {Stream::DISPARITY, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); + {Stream::DISPARITY, Mode::MODE_OFF, nullptr}); disparitynormalized_processor->addTargetStreams( - {Stream::DISPARITY_NORMALIZED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); + {Stream::DISPARITY_NORMALIZED, Mode::MODE_OFF, nullptr}); points_processor->addTargetStreams( - {Stream::POINTS, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); + {Stream::POINTS, Mode::MODE_OFF, nullptr}); depth_processor->addTargetStreams( - {Stream::DEPTH, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); + {Stream::DEPTH, Mode::MODE_OFF, nullptr}); root_processor->addTargetStreams( - {Stream::LEFT, Mode::MODE_NATIVE, Mode::MODE_NATIVE, nullptr}); + {Stream::LEFT, Mode::MODE_OFF, nullptr}); root_processor->addTargetStreams( - {Stream::RIGHT, Mode::MODE_NATIVE, Mode::MODE_NATIVE, nullptr}); + {Stream::RIGHT, Mode::MODE_OFF, nullptr}); processors_.push_back(root_processor); processors_.push_back(rectify_processor); @@ -537,6 +388,8 @@ void Synthetic::InitProcessors() { processors_.push_back(points_processor); processors_.push_back(depth_processor); using namespace std::placeholders; // NOLINT + root_processor->SetProcessCallback( + std::bind(&Synthetic::OnDeviceProcess, this, _1, _2, _3)); rectify_processor->SetProcessCallback( std::bind(&Synthetic::OnRectifyProcess, this, _1, _2, _3)); disparity_processor->SetProcessCallback( @@ -548,6 +401,8 @@ void Synthetic::InitProcessors() { depth_processor->SetProcessCallback( std::bind(&Synthetic::OnDepthProcess, this, _1, _2, _3)); + root_processor->SetPostProcessCallback( + std::bind(&Synthetic::OnDevicePostProcess, this, _1)); rectify_processor->SetPostProcessCallback( std::bind(&Synthetic::OnRectifyPostProcess, this, _1)); disparity_processor->SetPostProcessCallback( @@ -559,129 +414,14 @@ void Synthetic::InitProcessors() { depth_processor->SetPostProcessCallback( std::bind(&Synthetic::OnDepthPostProcess, this, _1)); - if (calib_model_ == CalibrationModel::PINHOLE) { - // PINHOLE - rectify_processor->AddChild(disparity_processor); - disparity_processor->AddChild(disparitynormalized_processor); - disparity_processor->AddChild(points_processor); - points_processor->AddChild(depth_processor); - } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { - // KANNALA_BRANDT - rectify_processor->AddChild(disparity_processor); - disparity_processor->AddChild(disparitynormalized_processor); - disparity_processor->AddChild(depth_processor); - depth_processor->AddChild(points_processor); - } else { - // UNKNOW - LOG(ERROR) << "Unknow calib model type in device: " - << calib_model_; - } - - processor_ = rectify_processor; + processor_ = root_processor; } -void Synthetic::ProcessNativeStream( - const Stream &stream, const api::StreamData &data) { - if (stream == Stream::LEFT || stream == Stream::RIGHT) { - static api::StreamData left_data, right_data; - if (stream == Stream::LEFT) { - left_data = data; - } else if (stream == Stream::RIGHT) { - right_data = data; - } - if (left_data.img && right_data.img && - left_data.img->frame_id == right_data.img->frame_id) { - std::shared_ptr processor = nullptr; - if (calib_model_ == CalibrationModel::PINHOLE) { - processor = find_processor(processor_); -#ifdef WITH_CAM_MODELS - } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { - processor = find_processor(processor_); -#endif - } else { - LOG(ERROR) << "Unknow calib model type in device: " - << calib_model_ << ", use default pinhole model"; - processor = find_processor(processor_); - } - processor->Process(ObjMat2{ - left_data.frame, left_data.frame_id, left_data.img, - right_data.frame, right_data.frame_id, right_data.img}); - } - return; - } - - if (stream == Stream::LEFT_RECTIFIED || stream == Stream::RIGHT_RECTIFIED) { - static api::StreamData left_rect_data, right_rect_data; - if (stream == Stream::LEFT_RECTIFIED) { - left_rect_data = data; - } else if (stream == Stream::RIGHT_RECTIFIED) { - right_rect_data = data; - } - if (left_rect_data.img && right_rect_data.img && - left_rect_data.img->frame_id == right_rect_data.img->frame_id) { - std::string name = RectifyProcessorOCV::NAME; - if (calib_model_ == CalibrationModel::PINHOLE) { - name = RectifyProcessorOCV::NAME; -#ifdef WITH_CAM_MODELS - } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { - name = RectifyProcessor::NAME; -#endif - } - process_childs( - processor_, name, ObjMat2{ - left_rect_data.frame, left_rect_data.frame_id, left_rect_data.img, - right_rect_data.frame, right_rect_data.frame_id, - right_rect_data.img}); - } - return; - } - - switch (stream) { - case Stream::DISPARITY: { - process_childs(processor_, DisparityProcessor::NAME, - ObjMat{data.frame, data.frame_id, data.img}); - } break; - case Stream::DISPARITY_NORMALIZED: { - process_childs(processor_, DisparityNormalizedProcessor::NAME, - ObjMat{data.frame, data.frame_id, data.img}); - } break; - case Stream::POINTS: { - if (calib_model_ == CalibrationModel::PINHOLE) { - // PINHOLE - process_childs(processor_, PointsProcessorOCV::NAME, - ObjMat{data.frame, data.frame_id, data.img}); -#ifdef WITH_CAM_MODELS - } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { - // KANNALA_BRANDT - process_childs(processor_, PointsProcessor::NAME, - ObjMat{data.frame, data.frame_id, data.img}); -#endif - } else { - // UNKNOW - LOG(ERROR) << "Unknow calib model type in device: " - << calib_model_; - } - } break; - case Stream::DEPTH: { - if (calib_model_ == CalibrationModel::PINHOLE) { - // PINHOLE - process_childs(processor_, DepthProcessorOCV::NAME, - ObjMat{data.frame, data.frame_id, data.img}); -#ifdef WITH_CAM_MODELS - } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { - // KANNALA_BRANDT - process_childs(processor_, DepthProcessor::NAME, - ObjMat{data.frame, data.frame_id, data.img}); -#endif - } else { - // UNKNOW - LOG(ERROR) << "Unknow calib model type in device: " - << calib_model_; - } - } break; - default: - break; - } +bool Synthetic::OnDeviceProcess( + Object *const in, Object *const out, + std::shared_ptr const parent) { + MYNTEYE_UNUSED(parent) + return GetStreamEnabledMode(Stream::LEFT) != MODE_ON; } bool Synthetic::OnRectifyProcess( @@ -691,8 +431,8 @@ bool Synthetic::OnRectifyProcess( if (plugin_ && plugin_->OnRectifyProcess(in, out)) { return true; } - return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_SYNTHETIC; - // && GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_SYNTHETIC + return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_ON; + // && GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_ON } bool Synthetic::OnDisparityProcess( @@ -702,7 +442,7 @@ bool Synthetic::OnDisparityProcess( if (plugin_ && plugin_->OnDisparityProcess(in, out)) { return true; } - return GetStreamEnabledMode(Stream::DISPARITY) != MODE_SYNTHETIC; + return GetStreamEnabledMode(Stream::DISPARITY) != MODE_ON; } bool Synthetic::OnDisparityNormalizedProcess( @@ -712,7 +452,7 @@ bool Synthetic::OnDisparityNormalizedProcess( if (plugin_ && plugin_->OnDisparityNormalizedProcess(in, out)) { return true; } - return GetStreamEnabledMode(Stream::DISPARITY_NORMALIZED) != MODE_SYNTHETIC; + return GetStreamEnabledMode(Stream::DISPARITY_NORMALIZED) != MODE_ON; } bool Synthetic::OnPointsProcess( @@ -722,7 +462,7 @@ bool Synthetic::OnPointsProcess( if (plugin_ && plugin_->OnPointsProcess(in, out)) { return true; } - return GetStreamEnabledMode(Stream::POINTS) != MODE_SYNTHETIC; + return GetStreamEnabledMode(Stream::POINTS) != MODE_ON; } bool Synthetic::OnDepthProcess( @@ -732,56 +472,71 @@ bool Synthetic::OnDepthProcess( if (plugin_ && plugin_->OnDepthProcess(in, out)) { return true; } - return GetStreamEnabledMode(Stream::DEPTH) != MODE_SYNTHETIC; + return GetStreamEnabledMode(Stream::DEPTH) != MODE_ON; +} + +void Synthetic::OnDevicePostProcess(Object *const out) { + const ObjMat2 *output = Object::Cast(out); + NotifyStreamData(Stream::LEFT, obj_data_first(output)); + NotifyStreamData(Stream::RIGHT, obj_data_second(output)); + if (HasStreamCallback(Stream::LEFT)) { + auto data = getControlDateWithStream(Stream::LEFT); + data.stream_callback(obj_data_first(output)); + } + if (HasStreamCallback(Stream::RIGHT)) { + auto data = getControlDateWithStream(Stream::RIGHT); + if (data.stream_callback) + data.stream_callback(obj_data_second(output)); + } } void Synthetic::OnRectifyPostProcess(Object *const out) { const ObjMat2 *output = Object::Cast(out); + NotifyStreamData(Stream::LEFT_RECTIFIED, obj_data_first(output)); + NotifyStreamData(Stream::RIGHT_RECTIFIED, obj_data_second(output)); if (HasStreamCallback(Stream::LEFT_RECTIFIED)) { auto data = getControlDateWithStream(Stream::LEFT_RECTIFIED); - data.stream_callback( - {output->first_data, output->first, nullptr, output->first_id}); + data.stream_callback(obj_data_first(output)); } if (HasStreamCallback(Stream::RIGHT_RECTIFIED)) { auto data = getControlDateWithStream(Stream::RIGHT_RECTIFIED); - data.stream_callback( - {output->second_data, output->second, nullptr, output->second_id}); + data.stream_callback(obj_data_second(output)); } } void Synthetic::OnDisparityPostProcess(Object *const out) { const ObjMat *output = Object::Cast(out); + NotifyStreamData(Stream::DISPARITY, obj_data(output)); if (HasStreamCallback(Stream::DISPARITY)) { auto data = getControlDateWithStream(Stream::DISPARITY); - data.stream_callback( - {output->data, output->value, nullptr, output->id}); + data.stream_callback(obj_data(output)); } } void Synthetic::OnDisparityNormalizedPostProcess(Object *const out) { const ObjMat *output = Object::Cast(out); + NotifyStreamData(Stream::DISPARITY_NORMALIZED, obj_data(output)); if (HasStreamCallback(Stream::DISPARITY_NORMALIZED)) { auto data = getControlDateWithStream(Stream::DISPARITY_NORMALIZED); - data.stream_callback( - {output->data, output->value, nullptr, output->id}); + data.stream_callback(obj_data(output)); } } void Synthetic::OnPointsPostProcess(Object *const out) { const ObjMat *output = Object::Cast(out); + NotifyStreamData(Stream::POINTS, obj_data(output)); if (HasStreamCallback(Stream::POINTS)) { auto data = getControlDateWithStream(Stream::POINTS); - data.stream_callback( - {output->data, output->value, nullptr, output->id}); + data.stream_callback(obj_data(output)); } } void Synthetic::OnDepthPostProcess(Object *const out) { const ObjMat *output = Object::Cast(out); + NotifyStreamData(Stream::DEPTH, obj_data(output)); if (HasStreamCallback(Stream::DEPTH)) { auto data = getControlDateWithStream(Stream::DEPTH); - data.stream_callback( - {output->data, output->value, nullptr, output->id}); + data.stream_callback(obj_data(output)); } } @@ -796,4 +551,11 @@ void Synthetic::SetDisparityComputingMethodType( LOG(ERROR) << "ERROR: no suited processor for disparity computing."; } +void Synthetic::NotifyStreamData( + const Stream &stream, const api::StreamData &data) { + if (stream_data_listener_) { + stream_data_listener_(stream, data); + } +} + MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/api/synthetic.h b/src/mynteye/api/synthetic.h index e39088b..dc44a4e 100644 --- a/src/mynteye/api/synthetic.h +++ b/src/mynteye/api/synthetic.h @@ -19,6 +19,7 @@ #include #include #include +#include #include "mynteye/api/api.h" #include "mynteye/api/config.h" @@ -28,22 +29,24 @@ MYNTEYE_BEGIN_NAMESPACE class API; class Plugin; class Processor; +class RootProcessor; struct Object; class Synthetic { public: using stream_callback_t = API::stream_callback_t; + using stream_data_listener_t = + std::function; + using stream_switch_callback_t = API::stream_switch_callback_t; typedef enum Mode { - MODE_NATIVE, // Native stream - MODE_SYNTHETIC, // Synthetic stream - MODE_LAST // Unsupported + MODE_ON, // On + MODE_OFF // Off } mode_t; struct stream_control_t { Stream stream; - mode_t support_mode_; mode_t enabled_mode_; stream_callback_t stream_callback; }; @@ -51,13 +54,19 @@ class Synthetic { explicit Synthetic(API *api, CalibrationModel calib_model); ~Synthetic(); + void SetStreamDataListener(stream_data_listener_t listener); + void NotifyImageParamsChanged(); bool Supports(const Stream &stream) const; - mode_t SupportsMode(const Stream &stream) const; void EnableStreamData(const Stream &stream); void DisableStreamData(const Stream &stream); + + void EnableStreamData( + const Stream &stream, stream_switch_callback_t callback, bool try_tag); + void DisableStreamData( + const Stream &stream, stream_switch_callback_t callback, bool try_tag); bool IsStreamDataEnabled(const Stream &stream) const; void SetStreamCallback(const Stream &stream, stream_callback_t callback); @@ -85,11 +94,8 @@ class Synthetic { private: void InitCalibInfo(); - void InitStreamSupports(); mode_t GetStreamEnabledMode(const Stream &stream) const; - bool IsStreamEnabledNative(const Stream &stream) const; - bool IsStreamEnabledSynthetic(const Stream &stream) const; void EnableStreamData(const Stream &stream, std::uint32_t depth); void DisableStreamData(const Stream &stream, std::uint32_t depth); @@ -101,8 +107,9 @@ class Synthetic { template bool DeactivateProcessor(bool tree = false); - void ProcessNativeStream(const Stream &stream, const api::StreamData &data); - + bool OnDeviceProcess( + Object *const in, Object *const out, + std::shared_ptr const parent); bool OnRectifyProcess( Object *const in, Object *const out, std::shared_ptr const parent); @@ -119,16 +126,19 @@ class Synthetic { Object *const in, Object *const out, std::shared_ptr const parent); + void OnDevicePostProcess(Object *const out); void OnRectifyPostProcess(Object *const out); void OnDisparityPostProcess(Object *const out); void OnDisparityNormalizedPostProcess(Object *const out); void OnPointsPostProcess(Object *const out); void OnDepthPostProcess(Object *const out); + void NotifyStreamData(const Stream &stream, const api::StreamData &data); + API *api_; - std::shared_ptr processor_; - + std::shared_ptr processor_; + std::vector> processors_; std::shared_ptr plugin_; CalibrationModel calib_model_; @@ -138,11 +148,11 @@ class Synthetic { std::shared_ptr extr_; bool calib_default_tag_; - std::vector> processors_; + stream_data_listener_t stream_data_listener_; }; class SyntheticProcessorPart { - private: + protected: inline std::vector getTargetStreams() { return target_streams_; } diff --git a/src/mynteye/api/version_checker.cc b/src/mynteye/api/version_checker.cc new file mode 100644 index 0000000..d958f35 --- /dev/null +++ b/src/mynteye/api/version_checker.cc @@ -0,0 +1,136 @@ +// Copyright 2018 Slightech Co., Ltd. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#include "mynteye/api/version_checker.h" +#include "mynteye/device/utils.h" +#include "mynteye/logger.h" +#include "mynteye/types.h" + +MYNTEYE_BEGIN_NAMESPACE + +typedef struct { + const std::string device_type; + const std::string sdk_version; + const std::string firmware_version; + const std::string status; +}firmware_version_match_table_unit; + +const char* ERRO_DESCRIPTION_F = + "Please update the firmware at first"; +const char* ERRO_DESCRIPTION_S = + "Please update the SDK at first"; +const char* WARN_DESCRIPTION_F = + "We suggest that you should update the firmware"; +const char* WARN_DESCRIPTION_S = + "We suggest that you should update the SDK"; +const char* PASS_DESCRIPTION = "pass"; + +/** firmware/sdk version matched table */ +/**----device type-----sdk version---firmware version-----pass tag-----*/ +static const firmware_version_match_table_unit FSVM_TABLE[] ={ +/** S1030 */ + {"MYNT-EYE-S1030", ">2.3.0", ">2.2.0", PASS_DESCRIPTION}, + {"MYNT-EYE-S1030", ">2.3.0", "2.2.0", WARN_DESCRIPTION_F}, + {"MYNT-EYE-S1030", ">2.3.0", "<2.2.0", ERRO_DESCRIPTION_F}, + {"MYNT-EYE-S1030", "<2.3.1", "<2.2.0", WARN_DESCRIPTION_S}, +/** S2100 */ + {"MYNT-EYE-S2100", ">2.3.0", "1.0", PASS_DESCRIPTION}, + {"MYNT-EYE-S2100", "<2.3.1", "1.0", ERRO_DESCRIPTION_S}, +/** S210A */ + {"MYNT-EYE-S210A", ">2.3.0", "1.0", PASS_DESCRIPTION}, + {"MYNT-EYE-S210A", "<2.3.1", "1.0", ERRO_DESCRIPTION_S}, +}; + +void getVersion(const std::string &str, char *version) { + std::string st1(""); + int j = 0; + for (size_t i = 0; i < str.size(); i++) { + if (str[i] == '.') { + version[j++] = atoi(st1.c_str()); + st1 = ""; + } else { + st1 += str[i]; + } + } + version[j++] = atoi(st1.c_str()); +} + +bool conditionMatch(const std::string& condition, const std::string& target) { + char version[4] = {0}; + char version_c[4] = {0}; + getVersion(target, version); + int tag_c = 0; + std::string condition_c; + if (condition[0] == '>') { + tag_c = 1; + condition_c = condition.substr(1); + } else if (condition[0] == '<') { + tag_c = -1; + condition_c = condition.substr(1); + } else { + tag_c = 0; + condition_c = condition; + } + getVersion(condition_c, version_c); + int tag_big = memcmp(version, version_c, 4); + if (tag_big * tag_c > 0 || (tag_big == 0 && tag_c == 0)) return true; + return false; +} + +enum STATUS_UNIT { + ST_PASS, + ST_ERRO_F, + ST_ERRO_S, + ST_NOT_PASS +}; + +STATUS_UNIT checkUnit(const std::string& sdkv, + const std::string& devn, + const std::string& firmv, + const firmware_version_match_table_unit& condition) { + if (condition.device_type == devn && + conditionMatch(condition.sdk_version, sdkv) && + conditionMatch(condition.firmware_version, firmv)) { + if (condition.status == ERRO_DESCRIPTION_F) return ST_ERRO_F; + if (condition.status == ERRO_DESCRIPTION_S) return ST_ERRO_S; + if (condition.status == WARN_DESCRIPTION_F || + condition.status == WARN_DESCRIPTION_S) { + LOG(WARNING) << condition.status; + } + return ST_PASS; + } + return ST_NOT_PASS; +} + +bool checkFirmwareVersion(const std::shared_ptr api) { + auto sdkv = api->GetSDKVersion(); + auto devn = api->GetInfo(Info::DEVICE_NAME); + auto firmv = api->GetInfo(Info::FIRMWARE_VERSION); + + for (size_t i =0; + i < sizeof(FSVM_TABLE)/sizeof(firmware_version_match_table_unit); + i++) { + auto res = checkUnit(sdkv, devn, firmv, FSVM_TABLE[i]); + if (res == ST_PASS) { + return true; + } else if (res == ST_ERRO_S || res == ST_ERRO_F) { + LOG(ERROR) << FSVM_TABLE[i].status; + return false; + } + } + LOG(ERROR) << ERRO_DESCRIPTION_S; + return false; +} + +MYNTEYE_END_NAMESPACE + diff --git a/src/mynteye/api/version_checker.h b/src/mynteye/api/version_checker.h new file mode 100644 index 0000000..6921975 --- /dev/null +++ b/src/mynteye/api/version_checker.h @@ -0,0 +1,25 @@ +// Copyright 2018 Slightech Co., Ltd. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#ifndef MYNTEYE_API_VERSION_CHECKER_H_ +#define MYNTEYE_API_VERSION_CHECKER_H_ +#pragma once + +#include +#include "mynteye/api/api.h" + +MYNTEYE_BEGIN_NAMESPACE +bool checkFirmwareVersion(const std::shared_ptr api); +MYNTEYE_END_NAMESPACE + +#endif // MYNTEYE_API_VERSION_CHECKER_H_ diff --git a/src/mynteye/device/channel/channels.cc b/src/mynteye/device/channel/channels.cc index 0794aa7..a51555e 100644 --- a/src/mynteye/device/channel/channels.cc +++ b/src/mynteye/device/channel/channels.cc @@ -460,6 +460,7 @@ bool Channels::GetFiles( while (i < end) { std::uint8_t file_id = *(data + i); std::uint16_t file_size = bytes::_from_data(data + i + 1); + LOG(INFO) << "GetFiles:data_size : " << file_size; VLOG(2) << "GetFiles id: " << static_cast(file_id) << ", size: " << file_size; i += 3; diff --git a/src/mynteye/device/channel/file_channel.cc b/src/mynteye/device/channel/file_channel.cc index 5c24777..7a4526e 100644 --- a/src/mynteye/device/channel/file_channel.cc +++ b/src/mynteye/device/channel/file_channel.cc @@ -32,6 +32,7 @@ std::size_t FileChannel::GetDeviceInfoFromData( const std::uint8_t *data, const std::uint16_t &data_size, device_info_t *info) { auto n = dev_info_parser_->GetFromData(data, data_size, info); + LOG(INFO) << "GetDeviceInfoFromData:data_size : " << data_size; auto spec_version = info->spec_version; img_params_parser_->SetSpecVersion(spec_version); imu_params_parser_->SetSpecVersion(spec_version); @@ -113,6 +114,22 @@ std::size_t DeviceInfoParser::GetFromData( info->nominal_baseline = bytes::_from_data(data + i); i += 2; + if (info->spec_version >= Version(1, 2)) { + // auxiliary_chip_version, 2 + info->auxiliary_chip_version.set_major(data[i]); + info->auxiliary_chip_version.set_minor(data[i + 1]); + i += 2; + // isp_version, 2 + info->isp_version.set_major(data[i]); + info->isp_version.set_minor(data[i + 1]); + i += 2; + } else { + info->auxiliary_chip_version.set_major(0); + info->auxiliary_chip_version.set_minor(0); + info->isp_version.set_major(0); + info->isp_version.set_minor(0); + } + // get other infos according to spec_version MYNTEYE_UNUSED(data_size) @@ -155,6 +172,17 @@ std::size_t DeviceInfoParser::SetToData( bytes::_to_data(info->nominal_baseline, data + i); i += 2; + if (info->spec_version >= Version(1, 2)) { + // auxiliary_chip_version, 2 + data[i] = info->auxiliary_chip_version.major(); + data[i + 1] = info->auxiliary_chip_version.minor(); + i += 2; + // isp_version, 2 + data[i] = info->isp_version.major(); + data[i + 1] = info->isp_version.minor(); + i += 2; + } + // set other infos according to spec_version // others @@ -181,7 +209,7 @@ std::size_t ImgParamsParser::GetFromData( return GetFromData_v1_0(data, data_size, img_params); } // s210a old params - if (spec_version_ == Version(1, 1) && data_size == 404) { + if (spec_version_ >= Version(1, 1) && data_size == 404) { return GetFromData_v1_1(data, data_size, img_params); } // get img params with new version format @@ -406,7 +434,7 @@ std::size_t ImuParamsParser::GetFromData( return GetFromData_old(data, data_size, imu_params); } // s210a old params - if (spec_version_ == Version(1, 1) && data_size == 384) { + if (spec_version_ >= Version(1, 1) && data_size == 384) { return GetFromData_old(data, data_size, imu_params); } // get imu params with new version format diff --git a/src/mynteye/device/device.cc b/src/mynteye/device/device.cc index 344eea7..271be74 100644 --- a/src/mynteye/device/device.cc +++ b/src/mynteye/device/device.cc @@ -234,6 +234,10 @@ std::string Device::GetInfo(const Info &info) const { return device_info_->imu_type.to_string(); case Info::NOMINAL_BASELINE: return std::to_string(device_info_->nominal_baseline); + case Info::AUXILIARY_CHIP_VERSION: + return device_info_->auxiliary_chip_version.to_string(); + case Info::ISP_VERSION: + return device_info_->isp_version.to_string(); default: LOG(WARNING) << "Unknown device info"; return ""; @@ -349,6 +353,9 @@ OptionInfo Device::GetOptionInfo(const Option &option) const { std::int32_t Device::GetOptionValue(const Option &option) const { if (!Supports(option)) { + if (option == Option::FRAME_RATE) { + return GetStreamRequest().fps; + } LOG(WARNING) << "Unsupported option: " << option; return -1; } @@ -466,6 +473,11 @@ std::vector Device::GetStreamDatas(const Stream &stream) { return streams_->GetStreamDatas(stream); } +void Device::DisableMotionDatas() { + CHECK_NOTNULL(motions_); + motions_->DisableMotionDatas(); +} + void Device::EnableMotionDatas() { EnableMotionDatas(std::numeric_limits::max()); } diff --git a/src/mynteye/device/motions.cc b/src/mynteye/device/motions.cc index 71c40ab..d7ebf93 100644 --- a/src/mynteye/device/motions.cc +++ b/src/mynteye/device/motions.cc @@ -66,7 +66,10 @@ void Motions::SetMotionCallback(motion_callback_t callback) { std::lock_guard _(mtx_datas_); motion_data_t data = {imu}; - if (motion_datas_enabled_) { + if (motion_datas_enabled_ && motion_datas_max_size_ > 0) { + if (motion_datas_.size() >= motion_datas_max_size_) { + motion_datas_.erase(motion_datas_.begin()); + } motion_datas_.push_back(data); } @@ -98,13 +101,21 @@ void Motions::StopMotionTracking() { } } +void Motions::DisableMotionDatas() { + std::lock_guard _(mtx_datas_); + motion_datas_enabled_ = false; + motion_datas_max_size_ = 0; + motion_datas_.clear(); +} + void Motions::EnableMotionDatas(std::size_t max_size) { if (max_size <= 0) { LOG(WARNING) << "Could not enable motion datas with max_size <= 0"; return; } + std::lock_guard _(mtx_datas_); motion_datas_enabled_ = true; - motion_datas_max_size = max_size; + motion_datas_max_size_ = max_size; } Motions::motion_datas_t Motions::GetMotionDatas() { diff --git a/src/mynteye/device/motions.h b/src/mynteye/device/motions.h index 9bb50d3..8149a68 100644 --- a/src/mynteye/device/motions.h +++ b/src/mynteye/device/motions.h @@ -42,6 +42,7 @@ class Motions { void StartMotionTracking(); void StopMotionTracking(); + void DisableMotionDatas(); void EnableMotionDatas(std::size_t max_size); motion_datas_t GetMotionDatas(); @@ -52,7 +53,7 @@ class Motions { motion_datas_t motion_datas_; bool motion_datas_enabled_; - std::size_t motion_datas_max_size; + std::size_t motion_datas_max_size_; bool is_imu_tracking; diff --git a/src/mynteye/types.cc b/src/mynteye/types.cc index e723031..27b6beb 100644 --- a/src/mynteye/types.cc +++ b/src/mynteye/types.cc @@ -92,6 +92,8 @@ const char *to_string(const Info &value) { CASE(LENS_TYPE) CASE(IMU_TYPE) CASE(NOMINAL_BASELINE) + CASE(AUXILIARY_CHIP_VERSION) + CASE(ISP_VERSION) default: CHECK(is_valid(value)); return "Info::UNKNOWN"; diff --git a/test/types_test.cc b/test/types_test.cc index cc2dfa8..28a292c 100644 --- a/test/types_test.cc +++ b/test/types_test.cc @@ -53,6 +53,8 @@ TEST(Info, VerifyToString) { EXPECT_STREQ("Info::LENS_TYPE", to_string(Info::LENS_TYPE)); EXPECT_STREQ("Info::IMU_TYPE", to_string(Info::IMU_TYPE)); EXPECT_STREQ("Info::NOMINAL_BASELINE", to_string(Info::NOMINAL_BASELINE)); + EXPECT_STREQ("Info::AUXILIARY_CHIP_VERSION", to_string(Info::AUXILIARY_CHIP_VERSION)); + EXPECT_STREQ("Info::ISP_VERSION", to_string(Info::ISP_VERSION)); } TEST(Option, VerifyToString) { diff --git a/tools/writer/config/S210A/device.info b/tools/writer/config/S210A/device.info index 6bce65d..f887695 100644 --- a/tools/writer/config/S210A/device.info +++ b/tools/writer/config/S210A/device.info @@ -2,9 +2,11 @@ --- device_name: MYNT-EYE-S210A serial_number: "07C40D1C0009071F" -firmware_version: "0.1" +firmware_version: "1.1" hardware_version: "1.0" -spec_version: "1.1" -lens_type: "0000" -imu_type: "0000" +spec_version: "1.2" +lens_type: "0001" +imu_type: "0001" nominal_baseline: 0 +auxiliary_chip_version: "1.0" +isp_version: "1.0" diff --git a/tools/writer/device_writer.cc b/tools/writer/device_writer.cc index cb57712..96d9f77 100644 --- a/tools/writer/device_writer.cc +++ b/tools/writer/device_writer.cc @@ -52,7 +52,11 @@ bool DeviceWriter::WriteDeviceInfo(const dev_info_t &info) { << ", spec_version: " << dev_info->spec_version.to_string() << ", lens_type: " << dev_info->lens_type.to_string() << ", imu_type: " << dev_info->imu_type.to_string() - << ", nominal_baseline: " << dev_info->nominal_baseline << "}"; + << ", nominal_baseline: " << dev_info->nominal_baseline + << ", auxiliary_chip_version: " + << dev_info->auxiliary_chip_version.to_string() + << ", isp_version: " + << dev_info->isp_version.to_string()<< "}"; return true; } else { LOG(ERROR) << "Write device info failed"; @@ -215,6 +219,8 @@ bool DeviceWriter::SaveDeviceInfo( fs << "lens_type" << info.lens_type.to_string(); fs << "imu_type" << info.imu_type.to_string(); fs << "nominal_baseline" << info.nominal_baseline; + fs << "auxiliary_chip_version" << info.auxiliary_chip_version.to_string(); + fs << "isp_version" << info.isp_version.to_string(); // save other infos according to spec_version fs.release(); return true; diff --git a/wrappers/python/src/mynteye_py.cc b/wrappers/python/src/mynteye_py.cc index 5c23cf3..b27a044 100644 --- a/wrappers/python/src/mynteye_py.cc +++ b/wrappers/python/src/mynteye_py.cc @@ -104,7 +104,7 @@ struct MYNTEYE_API MotionData { ImuData imu; bool operator==(const MotionData &other) const { - return imu.timestamp == other.imu.timestamp; + return imu.timestamp == other.imu.timestamp; } }; @@ -247,6 +247,8 @@ BOOST_PYTHON_MODULE(mynteye_py) { .value("LENS_TYPE", Info::LENS_TYPE) .value("IMU_TYPE", Info::IMU_TYPE) .value("NOMINAL_BASELINE", Info::NOMINAL_BASELINE) + .value("AUXILIARY_CHIP_VERSION", Info::AUXILIARY_CHIP_VERSION) + .value("ISP_VERSION", Info::ISP_VERSION) #ifdef ENUM_EXPORT_VALUES .export_values() #endif diff --git a/wrappers/ros/src/mynt_eye_ros_wrapper/launch/mynteye.launch b/wrappers/ros/src/mynt_eye_ros_wrapper/launch/mynteye.launch index 8a571db..020e254 100644 --- a/wrappers/ros/src/mynt_eye_ros_wrapper/launch/mynteye.launch +++ b/wrappers/ros/src/mynt_eye_ros_wrapper/launch/mynteye.launch @@ -153,7 +153,7 @@ - + @@ -347,6 +347,10 @@ - 'image_transport/compressedDepth' - + + + - 'image_transport/compressedDepth' + + diff --git a/wrappers/ros/src/mynt_eye_ros_wrapper/scripts/get_device_info.py b/wrappers/ros/src/mynt_eye_ros_wrapper/scripts/get_device_info.py index db4989b..7aeeeed 100755 --- a/wrappers/ros/src/mynt_eye_ros_wrapper/scripts/get_device_info.py +++ b/wrappers/ros/src/mynt_eye_ros_wrapper/scripts/get_device_info.py @@ -54,6 +54,8 @@ def main(): 'LENS_TYPE': GetInfoRequest.LENS_TYPE, 'IMU_TYPE': GetInfoRequest.IMU_TYPE, 'NOMINAL_BASELINE': GetInfoRequest.NOMINAL_BASELINE, + 'AUXILIARY_CHIP_VERSION': GetInfoRequest.AUXILIARY_CHIP_VERSION, + 'ISP_VERSION': GetInfoRequest.ISP_VERSION, } for k, v in get_device_info(**keys).items(): print('{}: {}'.format(k, v)) diff --git a/wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc b/wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc index d9048da..abaecf0 100644 --- a/wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc +++ b/wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc @@ -315,7 +315,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet { for (auto &&it = mono_topics.begin(); it != mono_topics.end(); ++it) { auto &&topic = mono_topics[it->first]; if (it->first == Stream::LEFT || it->first == Stream::RIGHT) { - mono_publishers_[it->first] = it_mynteye.advertiseCamera(topic, 1); + mono_publishers_[it->first] = it_mynteye.advertise(topic, 1); } NODELET_INFO_STREAM("Advertized on topic " << topic); } @@ -407,6 +407,12 @@ class ROSWrapperNodelet : public nodelet::Nodelet { case Request::NOMINAL_BASELINE: res.value = api_->GetInfo(Info::NOMINAL_BASELINE); break; + case Request::AUXILIARY_CHIP_VERSION: + res.value = api_->GetInfo(Info::AUXILIARY_CHIP_VERSION); + break; + case Request::ISP_VERSION: + res.value = api_->GetInfo(Info::ISP_VERSION); + break; case Request::IMG_INTRINSICS: { auto intri_left = api_->GetIntrinsicsBase(Stream::LEFT); @@ -554,86 +560,53 @@ class ROSWrapperNodelet : public nodelet::Nodelet { return true; } - void SetIsPublished(const Stream &stream) { - is_published_[stream] = false; - switch (stream) { - case Stream::LEFT_RECTIFIED: { - if (is_published_[Stream::RIGHT_RECTIFIED]) { - SetIsPublished(Stream::RIGHT_RECTIFIED); - } - if (is_published_[Stream::DISPARITY]) { - SetIsPublished(Stream::DISPARITY); - } - } break; - case Stream::RIGHT_RECTIFIED: { - if (is_published_[Stream::LEFT_RECTIFIED]) { - SetIsPublished(Stream::LEFT_RECTIFIED); - } - if (is_published_[Stream::DISPARITY]) { - SetIsPublished(Stream::DISPARITY); - } - } break; - case Stream::DISPARITY: { - if (is_published_[Stream::DISPARITY_NORMALIZED]) { - SetIsPublished(Stream::DISPARITY_NORMALIZED); - } - if (is_published_[Stream::POINTS]) { - SetIsPublished(Stream::POINTS); - } - } break; - case Stream::DISPARITY_NORMALIZED: { - } break; - case Stream::POINTS: { - if (is_published_[Stream::DEPTH]) { - SetIsPublished(Stream::DEPTH); - } - } break; - case Stream::DEPTH: { - } break; - default: - return; + void publishData( + const Stream &stream, const api::StreamData &data, std::uint32_t seq, + ros::Time stamp) { + if (stream == Stream::LEFT || stream == Stream::RIGHT) { + return; + } else if (stream == Stream::POINTS) { + publishPoints(data, seq, stamp); + } else { + publishCamera(stream, data, seq, stamp); } } - void publishPoint(const Stream &stream) { - auto &&points_num = points_publisher_.getNumSubscribers(); - if (points_num == 0 && is_published_[stream]) { - SetIsPublished(stream); - api_->DisableStreamData(stream); - } else if (points_num > 0 && !is_published_[Stream::POINTS]) { - api_->EnableStreamData(Stream::POINTS); - api_->SetStreamCallback( - Stream::POINTS, [this](const api::StreamData &data) { - // ros::Time stamp = hardTimeToSoftTime(data.img->timestamp); - ros::Time stamp = checkUpTimeStamp( - data.img->timestamp, Stream::POINTS); - static std::size_t count = 0; - ++count; - publishPoints(data, count, stamp); - }); - is_published_[Stream::POINTS] = true; + int getStreamSubscribers(const Stream &stream) { + if (stream == Stream::POINTS) { + return points_publisher_.getNumSubscribers(); } + auto pub = camera_publishers_[stream]; + if (pub) + return pub.getNumSubscribers(); + return -1; } void publishOthers(const Stream &stream) { - auto stream_num = camera_publishers_[stream].getNumSubscribers(); - if (stream_num == 0 && is_published_[stream]) { - // Stop computing when was not subcribed - SetIsPublished(stream); - api_->DisableStreamData(stream); - } else if (stream_num > 0 && !is_published_[stream]) { - // Start computing and publishing when was subcribed + if (getStreamSubscribers(stream) > 0 && !is_published_[stream]) { api_->EnableStreamData(stream); api_->SetStreamCallback( stream, [this, stream](const api::StreamData &data) { - // ros::Time stamp = hardTimeToSoftTime(data.img->timestamp); ros::Time stamp = checkUpTimeStamp( data.img->timestamp, stream); static std::size_t count = 0; ++count; - publishCamera(stream, data, count, stamp); + publishData(stream, data, count, stamp); }); is_published_[stream] = true; + return; + } + + int disable_tag = 0; + api_->DisableStreamData(stream, [&](const Stream &stream) { + disable_tag += getStreamSubscribers(stream); + }, true); + if (disable_tag == 0 && is_published_[stream]) { + api_->DisableStreamData(stream, [&](const Stream &stream) { + api_->SetStreamCallback(stream, nullptr); + is_published_[stream] = false; + }); + return; } } @@ -648,17 +621,6 @@ class ROSWrapperNodelet : public nodelet::Nodelet { // ros::Time stamp = hardTimeToSoftTime(data.img->timestamp); ros::Time stamp = checkUpTimeStamp( data.img->timestamp, Stream::LEFT); - - // static double img_time_prev = -1; - // NODELET_INFO_STREAM("ros_time_beg: " << FULL_PRECISION << - // ros_time_beg - // << ", img_time_elapsed: " << FULL_PRECISION - // << ((data.img->timestamp - img_time_beg) * 0.00001f) - // << ", img_time_diff: " << FULL_PRECISION - // << ((img_time_prev < 0) ? 0 - // : (data.img->timestamp - img_time_prev) * 0.01f) << " - // ms"); - // img_time_prev = data.img->timestamp; publishCamera(Stream::LEFT, data, left_count_, stamp); publishMono(Stream::LEFT, data, left_count_, stamp); NODELET_DEBUG_STREAM( @@ -698,14 +660,10 @@ class ROSWrapperNodelet : public nodelet::Nodelet { std::vector other_streams{ Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED, Stream::DISPARITY, Stream::DISPARITY_NORMALIZED, - Stream::POINTS, Stream::DEPTH}; - + Stream::POINTS, Stream::DEPTH + }; for (auto &&stream : other_streams) { - if (stream != Stream::POINTS) { - publishOthers(stream); - } else { - publishPoint(stream); - } + publishOthers(stream); } if (!is_motion_published_) { @@ -822,9 +780,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet { cv::cvtColor(data.frame, mono, CV_RGB2GRAY); auto &&msg = cv_bridge::CvImage(header, enc::MONO8, mono).toImageMsg(); pthread_mutex_unlock(&mutex_data_); - auto &&info = getCameraInfo(stream); - info->header.stamp = msg->header.stamp; - mono_publishers_[stream].publish(msg, info); + mono_publishers_[stream].publish(msg); } void publishPoints( @@ -1284,7 +1240,8 @@ class ROSWrapperNodelet : public nodelet::Nodelet { } for (int i = 0; i < p.rows; i++) { for (int j = 0; j < p.cols; j++) { - camera_info->P.at(i * p.cols + j) = p.at(i, j); + int scale = (i == 2 && j == 2)?1:1000; + camera_info->P.at(i * p.cols + j) = p.at(i, j) / scale; } } @@ -1510,7 +1467,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet { std::map image_encodings_; // mono: LEFT, RIGHT - std::map mono_publishers_; + std::map mono_publishers_; // pointcloud: POINTS ros::Publisher points_publisher_; diff --git a/wrappers/ros/src/mynt_eye_ros_wrapper/srv/GetInfo.srv b/wrappers/ros/src/mynt_eye_ros_wrapper/srv/GetInfo.srv index 3e2efe3..a917450 100644 --- a/wrappers/ros/src/mynt_eye_ros_wrapper/srv/GetInfo.srv +++ b/wrappers/ros/src/mynt_eye_ros_wrapper/srv/GetInfo.srv @@ -6,10 +6,12 @@ uint32 SPEC_VERSION=4 uint32 LENS_TYPE=5 uint32 IMU_TYPE=6 uint32 NOMINAL_BASELINE=7 -uint32 IMG_INTRINSICS=8 -uint32 IMG_EXTRINSICS_RTOL=9 -uint32 IMU_INTRINSICS=10 -uint32 IMU_EXTRINSICS=11 +uint32 AUXILIARY_CHIP_VERSION=8 +uint32 ISP_VERSION=9 +uint32 IMG_INTRINSICS=10 +uint32 IMG_EXTRINSICS_RTOL=11 +uint32 IMU_INTRINSICS=12 +uint32 IMU_EXTRINSICS=13 uint32 key --- string value