Merge branch 'develop' of http://gitlab.mynt.com/mynteye/mynt-eye-sdk-2 into develop
This commit is contained in:
commit
b93026bdcf
|
@ -14,7 +14,7 @@
|
||||||
|
|
||||||
cmake_minimum_required(VERSION 3.0)
|
cmake_minimum_required(VERSION 3.0)
|
||||||
|
|
||||||
project(mynteye VERSION 2.3.0 LANGUAGES C CXX)
|
project(mynteye VERSION 2.3.2 LANGUAGES C CXX)
|
||||||
|
|
||||||
include(cmake/Common.cmake)
|
include(cmake/Common.cmake)
|
||||||
|
|
||||||
|
@ -219,6 +219,9 @@ if(WITH_API)
|
||||||
src/mynteye/api/processor/depth_processor_ocv.cc
|
src/mynteye/api/processor/depth_processor_ocv.cc
|
||||||
src/mynteye/api/processor/rectify_processor_ocv.cc
|
src/mynteye/api/processor/rectify_processor_ocv.cc
|
||||||
src/mynteye/api/config.cc
|
src/mynteye/api/config.cc
|
||||||
|
src/mynteye/api/correspondence.cc
|
||||||
|
src/mynteye/api/version_checker.cc
|
||||||
|
src/mynteye/api/data_tools.cc
|
||||||
)
|
)
|
||||||
if(WITH_CAM_MODELS)
|
if(WITH_CAM_MODELS)
|
||||||
list(APPEND MYNTEYE_SRCS
|
list(APPEND MYNTEYE_SRCS
|
||||||
|
|
15
Makefile
15
Makefile
|
@ -34,12 +34,7 @@ SUDO ?= sudo
|
||||||
|
|
||||||
CAM_MODELS ?=
|
CAM_MODELS ?=
|
||||||
|
|
||||||
CMAKE_BUILD_EXTRA_OPTIONS :=
|
|
||||||
ifeq ($(CAM_MODELS),)
|
|
||||||
CMAKE_BUILD_EXTRA_OPTIONS := $(CMAKE_BUILD_EXTRA_OPTIONS) -DWITH_CAM_MODELS=OFF
|
|
||||||
else
|
|
||||||
CMAKE_BUILD_EXTRA_OPTIONS := $(CMAKE_BUILD_EXTRA_OPTIONS) -DWITH_CAM_MODELS=ON
|
CMAKE_BUILD_EXTRA_OPTIONS := $(CMAKE_BUILD_EXTRA_OPTIONS) -DWITH_CAM_MODELS=ON
|
||||||
endif
|
|
||||||
|
|
||||||
.DEFAULT_GOAL := all
|
.DEFAULT_GOAL := all
|
||||||
|
|
||||||
|
@ -106,7 +101,7 @@ init:
|
||||||
build:
|
build:
|
||||||
@$(call echo,Make $@)
|
@$(call echo,Make $@)
|
||||||
ifeq ($(HOST_OS),Win)
|
ifeq ($(HOST_OS),Win)
|
||||||
@$(call cmake_build,./_build,..,-DCMAKE_INSTALL_PREFIX=$(MKFILE_DIR)/_install)
|
@$(call cmake_build,./_build,..,-DCMAKE_INSTALL_PREFIX=$(MKFILE_DIR)/_install $(CMAKE_BUILD_EXTRA_OPTIONS))
|
||||||
else
|
else
|
||||||
@$(call cmake_build,./_build,..,$(CMAKE_BUILD_EXTRA_OPTIONS))
|
@$(call cmake_build,./_build,..,$(CMAKE_BUILD_EXTRA_OPTIONS))
|
||||||
endif
|
endif
|
||||||
|
@ -135,7 +130,7 @@ endif
|
||||||
|
|
||||||
# install
|
# install
|
||||||
|
|
||||||
install: build
|
install: uninstall build
|
||||||
@$(call echo,Make $@)
|
@$(call echo,Make $@)
|
||||||
ifeq ($(HOST_OS),Win)
|
ifeq ($(HOST_OS),Win)
|
||||||
ifneq ($(HOST_NAME),MinGW)
|
ifneq ($(HOST_NAME),MinGW)
|
||||||
|
@ -176,7 +171,13 @@ samples: install
|
||||||
|
|
||||||
tools: install
|
tools: install
|
||||||
@$(call echo,Make $@)
|
@$(call echo,Make $@)
|
||||||
|
ifeq ($(HOST_OS),Mac)
|
||||||
|
$(error "Can't make tools on $(HOST_OS)")
|
||||||
|
else
|
||||||
@$(call cmake_build,./tools/_build)
|
@$(call cmake_build,./tools/_build)
|
||||||
|
endif
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
.PHONY: tools
|
.PHONY: tools
|
||||||
|
|
||||||
|
|
10
README.md
10
README.md
|
@ -1,6 +1,6 @@
|
||||||
# MYNT® EYE S SDK
|
# MYNT® EYE S SDK
|
||||||
|
|
||||||
[![](https://img.shields.io/badge/MYNT%20EYE%20S%20SDK-2.3.0-brightgreen.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK)
|
[![](https://img.shields.io/badge/MYNT%20EYE%20S%20SDK-2.3.2-brightgreen.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK)
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
|
@ -17,11 +17,11 @@ Please follow the guide doc to install the SDK on different platforms.
|
||||||
## Documentations
|
## Documentations
|
||||||
|
|
||||||
* [API Doc](https://github.com/slightech/MYNT-EYE-S-SDK/releases): API reference, some guides and data spec.
|
* [API Doc](https://github.com/slightech/MYNT-EYE-S-SDK/releases): API reference, some guides and data spec.
|
||||||
* en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2764152/mynt-eye-s-sdk-apidoc-2.3.0-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2764156/mynt-eye-s-sdk-apidoc-2.3.0-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK/)
|
* en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2913110/mynt-eye-s-sdk-apidoc-2.3.2-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2913111/mynt-eye-s-sdk-apidoc-2.3.2-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK/)
|
||||||
* zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2764160/mynt-eye-s-sdk-apidoc-2.3.0-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2764173/mynt-eye-s-sdk-apidoc-2.3.0-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/api/mynt-eye-s-sdk-apidoc-2.3.0-zh-Hans/mynt-eye-s-sdk-apidoc-2.3.0-zh-Hans/index.html)
|
* zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2913112/mynt-eye-s-sdk-apidoc-2.3.2-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2913113/mynt-eye-s-sdk-apidoc-2.3.2-zh-Hans.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/api/mynt-eye-s-sdk-apidoc-2.3.2-zh-Hans/mynt-eye-s-sdk-apidoc-2.3.2-zh-Hans/index.html)
|
||||||
* [Guide Doc](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/releases): How to install and start using the SDK.
|
* [Guide Doc](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/releases): How to install and start using the SDK.
|
||||||
* en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2764143/mynt-eye-s-sdk-guide-2.3.0-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2764145/mynt-eye-s-sdk-guide-2.3.0-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK-Guide/)
|
* en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2913052/mynt-eye-s-sdk-guide-2.3.2-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2913053/mynt-eye-s-sdk-guide-2.3.2-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK-Guide/)
|
||||||
* zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2764150/mynt-eye-s-sdk-guide-2.3.0-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2764163/mynt-eye-s-sdk-guide-2.3.0-zh-Hans.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/sdk/mynt-eye-s-sdk-guide-2.3.0-zh-Hans/mynt-eye-s-sdk-guide-2.3.0-zh-Hans/index.html)
|
* zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2913054/mynt-eye-s-sdk-guide-2.3.2-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2913056/mynt-eye-s-sdk-guide-2.3.2-zh-Hans.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/sdk/mynt-eye-s-sdk-guide-2.3.2-zh-Hans/mynt-eye-s-sdk-guide-2.3.2-zh-Hans/index.html)
|
||||||
|
|
||||||
> Supported languages: `en`, `zh-Hans`.
|
> Supported languages: `en`, `zh-Hans`.
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE S SDK"
|
||||||
# could be handy for archiving the generated documentation or if some version
|
# could be handy for archiving the generated documentation or if some version
|
||||||
# control system is used.
|
# control system is used.
|
||||||
|
|
||||||
PROJECT_NUMBER = 2.3.0
|
PROJECT_NUMBER = 2.3.2
|
||||||
|
|
||||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||||
# for a project that appears at the top of each page and should give viewer a
|
# for a project that appears at the top of each page and should give viewer a
|
||||||
|
|
|
@ -12,3 +12,5 @@
|
||||||
| Lens type | lens_type | - | × | √ Get/Set | 4 | vendor(2),product(2); default: 0 |
|
| Lens type | lens_type | - | × | √ Get/Set | 4 | vendor(2),product(2); default: 0 |
|
||||||
| IMU type | imu_type | - | × | √ Get/Set | 4 | vendor(2),product(2); default: 0 |
|
| IMU type | imu_type | - | × | √ Get/Set | 4 | vendor(2),product(2); default: 0 |
|
||||||
| Nominal baseline | nominal_baseline | - | × | √ Get/Set | 2 | unit: mm; default: 0 |
|
| Nominal baseline | nominal_baseline | - | × | √ Get/Set | 2 | unit: mm; default: 0 |
|
||||||
|
| Auxiliary chip version | auxiliary_chip_version | - | × | √ Get | 2 | major,minor |
|
||||||
|
| isp version | isp_version | - | × | √ Get | 2 | major,minor |
|
||||||
|
|
|
@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE S SDK"
|
||||||
# could be handy for archiving the generated documentation or if some version
|
# could be handy for archiving the generated documentation or if some version
|
||||||
# control system is used.
|
# control system is used.
|
||||||
|
|
||||||
PROJECT_NUMBER = 2.3.0
|
PROJECT_NUMBER = 2.3.2
|
||||||
|
|
||||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||||
# for a project that appears at the top of each page and should give viewer a
|
# for a project that appears at the top of each page and should give viewer a
|
||||||
|
|
|
@ -12,6 +12,8 @@
|
||||||
| 镜头类型 | lens_type | - | × | √ Get/Set | 4 | vendor(2),product(2) ,未 Set 默认 0 |
|
| 镜头类型 | lens_type | - | × | √ Get/Set | 4 | vendor(2),product(2) ,未 Set 默认 0 |
|
||||||
| IMU 类型 | imu_type | - | × | √ Get/Set | 4 | vendor(2),product(2) ,未 Set 默认 0 |
|
| IMU 类型 | imu_type | - | × | √ Get/Set | 4 | vendor(2),product(2) ,未 Set 默认 0 |
|
||||||
| 基线长度 | nominal_baseline | - | × | √ Get/Set | 2 | 单位 mm ,未 set 默认 0 |
|
| 基线长度 | nominal_baseline | - | × | √ Get/Set | 2 | 单位 mm ,未 set 默认 0 |
|
||||||
|
| 辅助芯片版本 | auxiliary_chip_version | - | × | √ Get | 2 | major,minor |
|
||||||
|
| ISP版本 | isp_version | - | × | √ Get | 2 | major,minor |
|
||||||
|
|
||||||
* 描述符获取:指通用 USB 设备信息,可用工具查看。
|
* 描述符获取:指通用 USB 设备信息,可用工具查看。
|
||||||
* 拓展通道获取:指通过拓展通道(UVC Extension Unit)问硬件获取到的信息,需要读取。
|
* 拓展通道获取:指通过拓展通道(UVC Extension Unit)问硬件获取到的信息,需要读取。
|
||||||
|
|
|
@ -30,6 +30,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
struct DeviceInfo;
|
struct DeviceInfo;
|
||||||
|
|
||||||
|
class Correspondence;
|
||||||
class Device;
|
class Device;
|
||||||
class Synthetic;
|
class Synthetic;
|
||||||
|
|
||||||
|
@ -91,6 +92,8 @@ class MYNTEYE_API API {
|
||||||
using stream_callback_t = std::function<void(const api::StreamData &data)>;
|
using stream_callback_t = std::function<void(const api::StreamData &data)>;
|
||||||
/** The api::MotionData callback. */
|
/** The api::MotionData callback. */
|
||||||
using motion_callback_t = std::function<void(const api::MotionData &data)>;
|
using motion_callback_t = std::function<void(const api::MotionData &data)>;
|
||||||
|
/** The enable/disable switch callback. */
|
||||||
|
using stream_switch_callback_t = std::function<void(const Stream &stream)>;
|
||||||
|
|
||||||
explicit API(std::shared_ptr<Device> device, CalibrationModel calib_model);
|
explicit API(std::shared_ptr<Device> device, CalibrationModel calib_model);
|
||||||
virtual ~API();
|
virtual ~API();
|
||||||
|
@ -184,7 +187,10 @@ class MYNTEYE_API API {
|
||||||
* Get the device info.
|
* Get the device info.
|
||||||
*/
|
*/
|
||||||
std::string GetInfo(const Info &info) const;
|
std::string GetInfo(const Info &info) const;
|
||||||
|
/**
|
||||||
|
* Get the sdk version.
|
||||||
|
*/
|
||||||
|
std::string GetSDKVersion() const;
|
||||||
/**
|
/**
|
||||||
* @deprecated Get the intrinsics (pinhole) of stream.
|
* @deprecated Get the intrinsics (pinhole) of stream.
|
||||||
*/
|
*/
|
||||||
|
@ -280,11 +286,31 @@ class MYNTEYE_API API {
|
||||||
* still support this stream.
|
* still support this stream.
|
||||||
*/
|
*/
|
||||||
void EnableStreamData(const Stream &stream);
|
void EnableStreamData(const Stream &stream);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enable the data of stream.
|
||||||
|
* callback function will call before the father processor enable.
|
||||||
|
* when try_tag is true, the function will do nothing except callback.
|
||||||
|
*/
|
||||||
|
void EnableStreamData(
|
||||||
|
const Stream &stream,
|
||||||
|
stream_switch_callback_t callback,
|
||||||
|
bool try_tag = false);
|
||||||
/**
|
/**
|
||||||
* Disable the data of stream.
|
* Disable the data of stream.
|
||||||
*/
|
*/
|
||||||
void DisableStreamData(const Stream &stream);
|
void DisableStreamData(const Stream &stream);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Disable the data of stream.
|
||||||
|
* callback function will call before the children processor disable.
|
||||||
|
* when try_tag is true, the function will do nothing except callback.
|
||||||
|
*/
|
||||||
|
void DisableStreamData(
|
||||||
|
const Stream &stream,
|
||||||
|
stream_switch_callback_t callback,
|
||||||
|
bool try_tag = false);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the latest data of stream.
|
* Get the latest data of stream.
|
||||||
*/
|
*/
|
||||||
|
@ -305,6 +331,11 @@ class MYNTEYE_API API {
|
||||||
*/
|
*/
|
||||||
std::vector<api::MotionData> GetMotionDatas();
|
std::vector<api::MotionData> GetMotionDatas();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enable motion datas with timestamp correspondence of some stream.
|
||||||
|
*/
|
||||||
|
void EnableTimestampCorrespondence(const Stream &stream);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Enable the plugin.
|
* Enable the plugin.
|
||||||
*/
|
*/
|
||||||
|
@ -317,6 +348,10 @@ class MYNTEYE_API API {
|
||||||
|
|
||||||
std::unique_ptr<Synthetic> synthetic_;
|
std::unique_ptr<Synthetic> synthetic_;
|
||||||
|
|
||||||
|
std::unique_ptr<Correspondence> correspondence_;
|
||||||
|
|
||||||
|
motion_callback_t callback_;
|
||||||
|
|
||||||
void CheckImageParams();
|
void CheckImageParams();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -278,6 +278,10 @@ class MYNTEYE_API Device {
|
||||||
*/
|
*/
|
||||||
std::vector<device::StreamData> GetStreamDatas(const Stream &stream);
|
std::vector<device::StreamData> GetStreamDatas(const Stream &stream);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Disable cache motion datas.
|
||||||
|
*/
|
||||||
|
void DisableMotionDatas();
|
||||||
/**
|
/**
|
||||||
* Enable cache motion datas.
|
* Enable cache motion datas.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -162,6 +162,8 @@ struct MYNTEYE_API DeviceInfo {
|
||||||
Type lens_type;
|
Type lens_type;
|
||||||
Type imu_type;
|
Type imu_type;
|
||||||
std::uint16_t nominal_baseline;
|
std::uint16_t nominal_baseline;
|
||||||
|
Version auxiliary_chip_version;
|
||||||
|
Version isp_version;
|
||||||
};
|
};
|
||||||
|
|
||||||
#undef MYNTEYE_PROPERTY
|
#undef MYNTEYE_PROPERTY
|
||||||
|
|
|
@ -122,6 +122,10 @@ enum class Info : std::uint8_t {
|
||||||
IMU_TYPE,
|
IMU_TYPE,
|
||||||
/** Nominal baseline */
|
/** Nominal baseline */
|
||||||
NOMINAL_BASELINE,
|
NOMINAL_BASELINE,
|
||||||
|
/** Auxiliary chip version */
|
||||||
|
AUXILIARY_CHIP_VERSION,
|
||||||
|
/** Isp version */
|
||||||
|
ISP_VERSION,
|
||||||
/** Last guard */
|
/** Last guard */
|
||||||
LAST
|
LAST
|
||||||
};
|
};
|
||||||
|
|
|
@ -105,6 +105,10 @@ if(PCL_FOUND)
|
||||||
WITH_OPENCV WITH_PCL
|
WITH_OPENCV WITH_PCL
|
||||||
)
|
)
|
||||||
endif()
|
endif()
|
||||||
|
make_executable2(get_imu_correspondence
|
||||||
|
SRCS data/get_imu_correspondence.cc util/cv_painter.cc
|
||||||
|
WITH_OPENCV
|
||||||
|
)
|
||||||
make_executable2(get_imu SRCS data/get_imu.cc util/cv_painter.cc WITH_OPENCV)
|
make_executable2(get_imu SRCS data/get_imu.cc util/cv_painter.cc WITH_OPENCV)
|
||||||
make_executable2(get_from_callbacks
|
make_executable2(get_from_callbacks
|
||||||
SRCS data/get_from_callbacks.cc util/cv_painter.cc
|
SRCS data/get_from_callbacks.cc util/cv_painter.cc
|
||||||
|
|
|
@ -34,10 +34,6 @@ int main(int argc, char *argv[]) {
|
||||||
|
|
||||||
// Set frame rate options for s1030
|
// Set frame rate options for s1030
|
||||||
if (model == Model::STANDARD) {
|
if (model == Model::STANDARD) {
|
||||||
// Attention: must set FRAME_RATE and IMU_FREQUENCY together,
|
|
||||||
// otherwise won't.
|
|
||||||
// succeed.
|
|
||||||
|
|
||||||
// FRAME_RATE values: 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60
|
// FRAME_RATE values: 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60
|
||||||
api->SetOptionValue(Option::FRAME_RATE, 25);
|
api->SetOptionValue(Option::FRAME_RATE, 25);
|
||||||
// IMU_FREQUENCY values: 100, 200, 250, 333, 500
|
// IMU_FREQUENCY values: 100, 200, 250, 333, 500
|
||||||
|
|
|
@ -28,6 +28,9 @@ int main(int argc, char *argv[]) {
|
||||||
LOG(INFO) << "Lens type: " << api->GetInfo(Info::LENS_TYPE);
|
LOG(INFO) << "Lens type: " << api->GetInfo(Info::LENS_TYPE);
|
||||||
LOG(INFO) << "IMU type: " << api->GetInfo(Info::IMU_TYPE);
|
LOG(INFO) << "IMU type: " << api->GetInfo(Info::IMU_TYPE);
|
||||||
LOG(INFO) << "Nominal baseline: " << api->GetInfo(Info::NOMINAL_BASELINE);
|
LOG(INFO) << "Nominal baseline: " << api->GetInfo(Info::NOMINAL_BASELINE);
|
||||||
|
LOG(INFO) << "Auxiliary chip version: "
|
||||||
|
<< api->GetInfo(Info::AUXILIARY_CHIP_VERSION);
|
||||||
|
LOG(INFO) << "Nominal baseline: " << api->GetInfo(Info::ISP_VERSION);
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
85
samples/tutorials/data/get_imu_correspondence.cc
Normal file
85
samples/tutorials/data/get_imu_correspondence.cc
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#include <opencv2/highgui/highgui.hpp>
|
||||||
|
|
||||||
|
#include "mynteye/logger.h"
|
||||||
|
#include "mynteye/api/api.h"
|
||||||
|
|
||||||
|
#include "util/cv_painter.h"
|
||||||
|
|
||||||
|
MYNTEYE_USE_NAMESPACE
|
||||||
|
|
||||||
|
int main(int argc, char *argv[]) {
|
||||||
|
auto &&api = API::Create(argc, argv);
|
||||||
|
if (!api) return 1;
|
||||||
|
|
||||||
|
bool ok;
|
||||||
|
auto &&request = api->SelectStreamRequest(&ok);
|
||||||
|
if (!ok) return 1;
|
||||||
|
api->ConfigStreamRequest(request);
|
||||||
|
|
||||||
|
// Enable motion datas with timestamp correspondence of some stream
|
||||||
|
api->EnableTimestampCorrespondence(Stream::LEFT);
|
||||||
|
|
||||||
|
api->Start(Source::ALL);
|
||||||
|
|
||||||
|
CVPainter painter;
|
||||||
|
|
||||||
|
cv::namedWindow("frame");
|
||||||
|
|
||||||
|
std::uint64_t prev_img_stamp = 0;
|
||||||
|
std::uint64_t prev_imu_stamp = 0;
|
||||||
|
while (true) {
|
||||||
|
api->WaitForStreams();
|
||||||
|
|
||||||
|
auto &&left_data = api->GetStreamData(Stream::LEFT);
|
||||||
|
auto &&right_data = api->GetStreamData(Stream::RIGHT);
|
||||||
|
|
||||||
|
auto img_stamp = left_data.img->timestamp;
|
||||||
|
LOG(INFO) << "Img timestamp: " << img_stamp
|
||||||
|
<< ", diff_prev=" << (img_stamp - prev_img_stamp);
|
||||||
|
prev_img_stamp = img_stamp;
|
||||||
|
|
||||||
|
cv::Mat img;
|
||||||
|
cv::hconcat(left_data.frame, right_data.frame, img);
|
||||||
|
|
||||||
|
auto &&motion_datas = api->GetMotionDatas();
|
||||||
|
LOG(INFO) << "Imu count: " << motion_datas.size();
|
||||||
|
for (auto &&data : motion_datas) {
|
||||||
|
auto imu_stamp = data.imu->timestamp;
|
||||||
|
LOG(INFO) << "Imu timestamp: " << imu_stamp
|
||||||
|
<< ", diff_prev=" << (imu_stamp - prev_imu_stamp)
|
||||||
|
<< ", diff_img=" << (1.f + imu_stamp - img_stamp);
|
||||||
|
prev_imu_stamp = imu_stamp;
|
||||||
|
}
|
||||||
|
LOG(INFO);
|
||||||
|
|
||||||
|
/*
|
||||||
|
painter.DrawImgData(img, *left_data.img);
|
||||||
|
if (!motion_datas.empty()) {
|
||||||
|
painter.DrawImuData(img, *motion_datas[0].imu);
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
cv::imshow("frame", img);
|
||||||
|
|
||||||
|
char key = static_cast<char>(cv::waitKey(1));
|
||||||
|
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
api->Stop(Source::ALL);
|
||||||
|
return 0;
|
||||||
|
}
|
|
@ -22,9 +22,11 @@
|
||||||
#include <thread>
|
#include <thread>
|
||||||
|
|
||||||
#include "mynteye/logger.h"
|
#include "mynteye/logger.h"
|
||||||
|
#include "mynteye/api/correspondence.h"
|
||||||
#include "mynteye/api/dl.h"
|
#include "mynteye/api/dl.h"
|
||||||
#include "mynteye/api/plugin.h"
|
#include "mynteye/api/plugin.h"
|
||||||
#include "mynteye/api/synthetic.h"
|
#include "mynteye/api/synthetic.h"
|
||||||
|
#include "mynteye/api/version_checker.h"
|
||||||
#include "mynteye/device/device.h"
|
#include "mynteye/device/device.h"
|
||||||
#include "mynteye/device/utils.h"
|
#include "mynteye/device/utils.h"
|
||||||
|
|
||||||
|
@ -208,7 +210,7 @@ std::vector<std::string> get_plugin_paths() {
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
API::API(std::shared_ptr<Device> device, CalibrationModel calib_model)
|
API::API(std::shared_ptr<Device> device, CalibrationModel calib_model)
|
||||||
: device_(device) {
|
: device_(device), correspondence_(nullptr) {
|
||||||
VLOG(2) << __func__;
|
VLOG(2) << __func__;
|
||||||
// std::dynamic_pointer_cast<StandardDevice>(device_);
|
// std::dynamic_pointer_cast<StandardDevice>(device_);
|
||||||
synthetic_.reset(new Synthetic(this, calib_model));
|
synthetic_.reset(new Synthetic(this, calib_model));
|
||||||
|
@ -221,7 +223,10 @@ API::~API() {
|
||||||
std::shared_ptr<API> API::Create(int argc, char *argv[]) {
|
std::shared_ptr<API> API::Create(int argc, char *argv[]) {
|
||||||
auto &&device = device::select();
|
auto &&device = device::select();
|
||||||
if (!device) return nullptr;
|
if (!device) return nullptr;
|
||||||
return Create(argc, argv, device);
|
auto api = Create(argc, argv, device);
|
||||||
|
if (api && checkFirmwareVersion(api))
|
||||||
|
return api;
|
||||||
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::shared_ptr<API> API::Create(
|
std::shared_ptr<API> API::Create(
|
||||||
|
@ -260,7 +265,7 @@ std::shared_ptr<API> API::Create(const std::shared_ptr<Device> &device) {
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
LOG(ERROR) <<"no device!";
|
LOG(ERROR) <<"no device!";
|
||||||
api = std::make_shared<API>(device, CalibrationModel::UNKNOW);
|
return nullptr;
|
||||||
}
|
}
|
||||||
return api;
|
return api;
|
||||||
}
|
}
|
||||||
|
@ -326,6 +331,20 @@ std::string API::GetInfo(const Info &info) const {
|
||||||
return device_->GetInfo(info);
|
return device_->GetInfo(info);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string API::GetSDKVersion() const {
|
||||||
|
std::string info_path =
|
||||||
|
utils::get_sdk_install_dir();
|
||||||
|
info_path.append(MYNTEYE_OS_SEP "share" \
|
||||||
|
MYNTEYE_OS_SEP "mynteye" MYNTEYE_OS_SEP "build.info");
|
||||||
|
|
||||||
|
cv::FileStorage fs(info_path, cv::FileStorage::READ);
|
||||||
|
if (!fs.isOpened()) {
|
||||||
|
LOG(WARNING) << "build.info not found: " << info_path;
|
||||||
|
return "null";
|
||||||
|
}
|
||||||
|
return fs["MYNTEYE_VERSION"];
|
||||||
|
}
|
||||||
|
|
||||||
IntrinsicsPinhole API::GetIntrinsics(const Stream &stream) const {
|
IntrinsicsPinhole API::GetIntrinsics(const Stream &stream) const {
|
||||||
auto in = GetIntrinsicsBase(stream);
|
auto in = GetIntrinsicsBase(stream);
|
||||||
if (in->calib_model() == CalibrationModel::PINHOLE) {
|
if (in->calib_model() == CalibrationModel::PINHOLE) {
|
||||||
|
@ -377,10 +396,15 @@ void API::SetStreamCallback(const Stream &stream, stream_callback_t callback) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void API::SetMotionCallback(motion_callback_t callback) {
|
void API::SetMotionCallback(motion_callback_t callback) {
|
||||||
static auto callback_ = callback;
|
if (correspondence_) {
|
||||||
|
correspondence_->SetMotionCallback(callback);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
callback_ = callback;
|
||||||
if (callback_) {
|
if (callback_) {
|
||||||
device_->SetMotionCallback(
|
device_->SetMotionCallback([this](const device::MotionData &data) {
|
||||||
[](const device::MotionData &data) { callback_({data.imu}); }, true);
|
callback_({data.imu});
|
||||||
|
}, true);
|
||||||
} else {
|
} else {
|
||||||
device_->SetMotionCallback(nullptr);
|
device_->SetMotionCallback(nullptr);
|
||||||
}
|
}
|
||||||
|
@ -435,8 +459,12 @@ void API::Stop(const Source &source) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void API::WaitForStreams() {
|
void API::WaitForStreams() {
|
||||||
|
if (correspondence_) {
|
||||||
|
correspondence_->WaitForStreams();
|
||||||
|
} else {
|
||||||
synthetic_->WaitForStreams();
|
synthetic_->WaitForStreams();
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void API::EnableStreamData(const Stream &stream) {
|
void API::EnableStreamData(const Stream &stream) {
|
||||||
synthetic_->EnableStreamData(stream);
|
synthetic_->EnableStreamData(stream);
|
||||||
|
@ -446,25 +474,70 @@ void API::DisableStreamData(const Stream &stream) {
|
||||||
synthetic_->DisableStreamData(stream);
|
synthetic_->DisableStreamData(stream);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void API::EnableStreamData(
|
||||||
|
const Stream &stream, stream_switch_callback_t callback,
|
||||||
|
bool try_tag) {
|
||||||
|
synthetic_->EnableStreamData(stream, callback, try_tag);
|
||||||
|
}
|
||||||
|
void API::DisableStreamData(
|
||||||
|
const Stream &stream, stream_switch_callback_t callback,
|
||||||
|
bool try_tag) {
|
||||||
|
synthetic_->DisableStreamData(stream, callback, try_tag);
|
||||||
|
}
|
||||||
|
|
||||||
api::StreamData API::GetStreamData(const Stream &stream) {
|
api::StreamData API::GetStreamData(const Stream &stream) {
|
||||||
|
if (correspondence_ && correspondence_->Watch(stream)) {
|
||||||
|
return correspondence_->GetStreamData(stream);
|
||||||
|
} else {
|
||||||
return synthetic_->GetStreamData(stream);
|
return synthetic_->GetStreamData(stream);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
std::vector<api::StreamData> API::GetStreamDatas(const Stream &stream) {
|
std::vector<api::StreamData> API::GetStreamDatas(const Stream &stream) {
|
||||||
|
if (correspondence_ && correspondence_->Watch(stream)) {
|
||||||
|
return correspondence_->GetStreamDatas(stream);
|
||||||
|
} else {
|
||||||
return synthetic_->GetStreamDatas(stream);
|
return synthetic_->GetStreamDatas(stream);
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void API::EnableMotionDatas(std::size_t max_size) {
|
void API::EnableMotionDatas(std::size_t max_size) {
|
||||||
|
if (correspondence_) return; // not cache them
|
||||||
device_->EnableMotionDatas(max_size);
|
device_->EnableMotionDatas(max_size);
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<api::MotionData> API::GetMotionDatas() {
|
std::vector<api::MotionData> API::GetMotionDatas() {
|
||||||
|
if (correspondence_) {
|
||||||
|
return correspondence_->GetMotionDatas();
|
||||||
|
} else {
|
||||||
std::vector<api::MotionData> datas;
|
std::vector<api::MotionData> datas;
|
||||||
for (auto &&data : device_->GetMotionDatas()) {
|
for (auto &&data : device_->GetMotionDatas()) {
|
||||||
datas.push_back({data.imu});
|
datas.push_back({data.imu});
|
||||||
}
|
}
|
||||||
return datas;
|
return datas;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void API::EnableTimestampCorrespondence(const Stream &stream) {
|
||||||
|
if (correspondence_ == nullptr) {
|
||||||
|
correspondence_.reset(new Correspondence(device_, stream));
|
||||||
|
{
|
||||||
|
device_->DisableMotionDatas();
|
||||||
|
if (callback_) {
|
||||||
|
correspondence_->SetMotionCallback(callback_);
|
||||||
|
callback_ = nullptr;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
using namespace std::placeholders; // NOLINT
|
||||||
|
device_->SetMotionCallback(
|
||||||
|
std::bind(&Correspondence::OnMotionDataCallback,
|
||||||
|
correspondence_.get(), _1),
|
||||||
|
true);
|
||||||
|
synthetic_->SetStreamDataListener(
|
||||||
|
std::bind(&Correspondence::OnStreamDataCallback,
|
||||||
|
correspondence_.get(), _1, _2));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void API::EnablePlugin(const std::string &path) {
|
void API::EnablePlugin(const std::string &path) {
|
||||||
static DL dl;
|
static DL dl;
|
||||||
|
|
|
@ -80,10 +80,6 @@ const T randomNormal(const T &sigma) {
|
||||||
return x1 * w * sigma;
|
return x1 * w * sigma;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsigned long long timeInMicroseconds(void); // NOLINT
|
|
||||||
|
|
||||||
double timeInSeconds(void);
|
|
||||||
|
|
||||||
void colorDepthImage(
|
void colorDepthImage(
|
||||||
cv::Mat &imgDepth, cv::Mat &imgColoredDepth, float minRange, // NOLINT
|
cv::Mat &imgDepth, cv::Mat &imgColoredDepth, float minRange, // NOLINT
|
||||||
float maxRange);
|
float maxRange);
|
||||||
|
@ -107,8 +103,6 @@ void LLtoUTM(
|
||||||
void UTMtoLL(
|
void UTMtoLL(
|
||||||
double utmNorthing, double utmEasting, const std::string &utmZone, // NOLINT
|
double utmNorthing, double utmEasting, const std::string &utmZone, // NOLINT
|
||||||
double &latitude, double &longitude); // NOLINT
|
double &latitude, double &longitude); // NOLINT
|
||||||
|
|
||||||
long int timestampDiff(uint64_t t1, uint64_t t2); // NOLINT
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif // SRC_MYNTEYE_API_CAMODOCAL_INCLUDE_CAMODOCAL_GPL_GPL_H_
|
#endif // SRC_MYNTEYE_API_CAMODOCAL_INCLUDE_CAMODOCAL_GPL_GPL_H_
|
||||||
|
|
|
@ -276,7 +276,8 @@ void EquidistantCamera::estimateIntrinsics(
|
||||||
double f0 = 0.0;
|
double f0 = 0.0;
|
||||||
for (size_t i = 0; i < imagePoints.size(); ++i) {
|
for (size_t i = 0; i < imagePoints.size(); ++i) {
|
||||||
std::vector<Eigen::Vector2d> center(boardSize.height);
|
std::vector<Eigen::Vector2d> center(boardSize.height);
|
||||||
double radius[boardSize.height]; // NOLINT
|
int arrayLength = boardSize.height;
|
||||||
|
double *radius = new double[arrayLength];
|
||||||
for (int r = 0; r < boardSize.height; ++r) {
|
for (int r = 0; r < boardSize.height; ++r) {
|
||||||
std::vector<cv::Point2d> circle;
|
std::vector<cv::Point2d> circle;
|
||||||
for (int c = 0; c < boardSize.width; ++c) {
|
for (int c = 0; c < boardSize.width; ++c) {
|
||||||
|
@ -320,6 +321,7 @@ void EquidistantCamera::estimateIntrinsics(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
delete[] radius;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (f0 <= 0.0 && minReprojErr >= std::numeric_limits<double>::max()) {
|
if (f0 <= 0.0 && minReprojErr >= std::numeric_limits<double>::max()) {
|
||||||
|
|
|
@ -16,6 +16,7 @@
|
||||||
#include <set>
|
#include <set>
|
||||||
#ifdef _WIN32
|
#ifdef _WIN32
|
||||||
#include <winsock.h>
|
#include <winsock.h>
|
||||||
|
#define M_PI (3.14159265358979323846)
|
||||||
#else
|
#else
|
||||||
#include <time.h>
|
#include <time.h>
|
||||||
#endif
|
#endif
|
||||||
|
@ -109,69 +110,8 @@ getFILETIMEoffset() {
|
||||||
return (t);
|
return (t);
|
||||||
}
|
}
|
||||||
|
|
||||||
int clock_gettime(int X, struct timespec *tp) {
|
|
||||||
LARGE_INTEGER t;
|
|
||||||
FILETIME f;
|
|
||||||
double microseconds;
|
|
||||||
static LARGE_INTEGER offset;
|
|
||||||
static double frequencyToMicroseconds;
|
|
||||||
static int initialized = 0;
|
|
||||||
static BOOL usePerformanceCounter = 0;
|
|
||||||
|
|
||||||
if (!initialized) {
|
|
||||||
LARGE_INTEGER performanceFrequency;
|
|
||||||
initialized = 1;
|
|
||||||
usePerformanceCounter = QueryPerformanceFrequency(&performanceFrequency);
|
|
||||||
if (usePerformanceCounter) {
|
|
||||||
QueryPerformanceCounter(&offset);
|
|
||||||
frequencyToMicroseconds =
|
|
||||||
static_cast<double>(performanceFrequency.QuadPart / 1000000.);
|
|
||||||
} else {
|
|
||||||
offset = getFILETIMEoffset();
|
|
||||||
frequencyToMicroseconds = 10.;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (usePerformanceCounter) {
|
|
||||||
QueryPerformanceCounter(&t);
|
|
||||||
} else {
|
|
||||||
GetSystemTimeAsFileTime(&f);
|
|
||||||
t.QuadPart = f.dwHighDateTime;
|
|
||||||
t.QuadPart <<= 32;
|
|
||||||
t.QuadPart |= f.dwLowDateTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
t.QuadPart -= offset.QuadPart;
|
|
||||||
microseconds = static_cast<double>(t.QuadPart / frequencyToMicroseconds);
|
|
||||||
t.QuadPart = microseconds;
|
|
||||||
tp->tv_sec = t.QuadPart / 1000000;
|
|
||||||
tp->tv_nsec = (t.QuadPart % 1000000) * 1000;
|
|
||||||
return (0);
|
|
||||||
}
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
unsigned long long timeInMicroseconds(void) { // NOLINT
|
|
||||||
struct timespec tp;
|
|
||||||
#ifdef __APPLE__
|
|
||||||
tp = orwl_gettime();
|
|
||||||
#else
|
|
||||||
clock_gettime(CLOCK_REALTIME, &tp);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
return tp.tv_sec * 1000000 + tp.tv_nsec / 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
double timeInSeconds(void) {
|
|
||||||
struct timespec tp;
|
|
||||||
#ifdef __APPLE__
|
|
||||||
tp = orwl_gettime();
|
|
||||||
#else
|
|
||||||
clock_gettime(CLOCK_REALTIME, &tp);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
return static_cast<double>(tp.tv_sec) +
|
|
||||||
static_cast<double>(tp.tv_nsec) / 1000000000.0;
|
|
||||||
}
|
|
||||||
|
|
||||||
float colormapAutumn[128][3] = {
|
float colormapAutumn[128][3] = {
|
||||||
{1.0f, 0.f, 0.f}, {1.0f, 0.007874f, 0.f}, {1.0f, 0.015748f, 0.f},
|
{1.0f, 0.f, 0.f}, {1.0f, 0.007874f, 0.f}, {1.0f, 0.015748f, 0.f},
|
||||||
{1.0f, 0.023622f, 0.f}, {1.0f, 0.031496f, 0.f}, {1.0f, 0.03937f, 0.f},
|
{1.0f, 0.023622f, 0.f}, {1.0f, 0.031496f, 0.f}, {1.0f, 0.03937f, 0.f},
|
||||||
|
@ -745,23 +685,4 @@ void UTMtoLL(
|
||||||
longitude = LongOrigin + longitude / M_PI * 180.0;
|
longitude = LongOrigin + longitude / M_PI * 180.0;
|
||||||
}
|
}
|
||||||
|
|
||||||
long int timestampDiff(uint64_t t1, uint64_t t2) { // NOLINT
|
|
||||||
if (t2 > t1) {
|
|
||||||
uint64_t d = t2 - t1;
|
|
||||||
|
|
||||||
if (d > std::numeric_limits<long int>::max()) { // NOLINT
|
|
||||||
return std::numeric_limits<long int>::max(); // NOLINT
|
|
||||||
} else {
|
|
||||||
return d;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
uint64_t d = t1 - t2;
|
|
||||||
|
|
||||||
if (d > std::numeric_limits<long int>::max()) { // NOLINT
|
|
||||||
return std::numeric_limits<long int>::min(); // NOLINT
|
|
||||||
} else {
|
|
||||||
return -static_cast<long int>(d); // NOLINT
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
277
src/mynteye/api/correspondence.cc
Normal file
277
src/mynteye/api/correspondence.cc
Normal file
|
@ -0,0 +1,277 @@
|
||||||
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#include "mynteye/api/correspondence.h"
|
||||||
|
|
||||||
|
#include "mynteye/device/device.h"
|
||||||
|
#include "mynteye/logger.h"
|
||||||
|
|
||||||
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
|
Correspondence::Correspondence(const std::shared_ptr<Device> &device,
|
||||||
|
const Stream &stream)
|
||||||
|
: device_(device), stream_(stream), ready_image_timestamp_(0) {
|
||||||
|
VLOG(2) << __func__;
|
||||||
|
// set matched stream to be watched too,
|
||||||
|
// aim to make stream and matched stream correspondence
|
||||||
|
if (stream_ == Stream::LEFT) {
|
||||||
|
stream_match_ = Stream::RIGHT;
|
||||||
|
} else if (stream_ == Stream::RIGHT) {
|
||||||
|
stream_match_ = Stream::LEFT;
|
||||||
|
} else if (stream_ == Stream::LEFT_RECTIFIED) {
|
||||||
|
stream_match_ = Stream::RIGHT_RECTIFIED;
|
||||||
|
} else if (stream_ == Stream::RIGHT_RECTIFIED) {
|
||||||
|
stream_match_ = Stream::LEFT_RECTIFIED;
|
||||||
|
} else {
|
||||||
|
stream_match_ = Stream::LAST;
|
||||||
|
}
|
||||||
|
EnableStreamMatch();
|
||||||
|
|
||||||
|
auto framerate = device_->GetOptionValue(Option::FRAME_RATE);
|
||||||
|
stream_interval_us_ = 1000000.f / framerate;
|
||||||
|
stream_interval_us_half_ = 0.5f * stream_interval_us_;
|
||||||
|
VLOG(2) << "framerate: " << framerate
|
||||||
|
<< ", interval_us: " << stream_interval_us_;
|
||||||
|
}
|
||||||
|
|
||||||
|
Correspondence::~Correspondence() {
|
||||||
|
VLOG(2) << __func__;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Correspondence::Watch(const Stream &stream) const {
|
||||||
|
if (stream == stream_) return true;
|
||||||
|
if (stream_match_enabled_ && stream == stream_match_) return true;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Correspondence::OnStreamDataCallback(
|
||||||
|
const Stream &stream, const api::StreamData &data) {
|
||||||
|
if (!Watch(stream)) {
|
||||||
|
return; // unwatched
|
||||||
|
}
|
||||||
|
// LOG(INFO) << __func__ << ", " << stream
|
||||||
|
// << ", id: " << data.frame_id << ", stamp: " << data.img->timestamp;
|
||||||
|
// if (data.img == nullptr) {
|
||||||
|
// LOG(FATAL) << "stream data image info is empty!";
|
||||||
|
// }
|
||||||
|
std::lock_guard<std::recursive_mutex> _(mtx_stream_datas_);
|
||||||
|
if (stream == stream_) {
|
||||||
|
stream_datas_.push_back(std::move(data));
|
||||||
|
} else if (/*stream_match_enabled_ && */stream == stream_match_) {
|
||||||
|
stream_datas_match_.push_back(std::move(data));
|
||||||
|
}
|
||||||
|
NotifyStreamDataReady();
|
||||||
|
}
|
||||||
|
|
||||||
|
void Correspondence::OnMotionDataCallback(const device::MotionData &data) {
|
||||||
|
// LOG(INFO) << __func__ << ", id: " << data.imu->frame_id
|
||||||
|
// << ", stamp: " << data.imu->timestamp;
|
||||||
|
{
|
||||||
|
std::lock_guard<std::recursive_mutex> _(mtx_motion_datas_);
|
||||||
|
motion_datas_.push_back(data);
|
||||||
|
}
|
||||||
|
if (motion_callback_) {
|
||||||
|
motion_callback_({data.imu});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void Correspondence::SetMotionCallback(API::motion_callback_t callback) {
|
||||||
|
// LOG(INFO) << __func__;
|
||||||
|
motion_callback_ = callback;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Correspondence::WaitForStreams() {
|
||||||
|
if (stream_ == Stream::LEFT || stream_ == Stream::RIGHT) {
|
||||||
|
// Wait native stream ready, avoid get these stream empty
|
||||||
|
// Todo: determine native stream according to device
|
||||||
|
WaitStreamDataReady();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
device_->WaitForStreams();
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData Correspondence::GetStreamData(const Stream &stream) {
|
||||||
|
auto datas = GetStreamDatas(stream);
|
||||||
|
return datas.empty() ? api::StreamData{} : datas.back();
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<api::StreamData> Correspondence::GetStreamDatas(
|
||||||
|
const Stream &stream) {
|
||||||
|
if (!Watch(stream)) {
|
||||||
|
LOG(ERROR) << "Get unwatched stream data of " << stream;
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::lock_guard<std::recursive_mutex> _(mtx_stream_datas_);
|
||||||
|
static std::uint32_t stream_count_ = 0;
|
||||||
|
static std::uint32_t stream_match_count_ = 0;
|
||||||
|
|
||||||
|
if (stream == stream_) {
|
||||||
|
auto datas = GetReadyStreamData(false);
|
||||||
|
|
||||||
|
if (stream_count_ < 10) {
|
||||||
|
++stream_count_;
|
||||||
|
} else {
|
||||||
|
// get stream, but not get matched stream, disable it
|
||||||
|
if (stream_match_count_ == 0) {
|
||||||
|
DisableStreamMatch();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return datas;
|
||||||
|
} else if (/*stream_match_enabled_ && */stream == stream_match_) {
|
||||||
|
auto datas = GetReadyStreamData(true);
|
||||||
|
|
||||||
|
if (stream_match_count_ < 10) {
|
||||||
|
++stream_match_count_;
|
||||||
|
}
|
||||||
|
|
||||||
|
return datas;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<api::MotionData> Correspondence::GetMotionDatas() {
|
||||||
|
return GetReadyMotionDatas();
|
||||||
|
}
|
||||||
|
|
||||||
|
void Correspondence::EnableStreamMatch() {
|
||||||
|
stream_match_enabled_ = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Correspondence::DisableStreamMatch() {
|
||||||
|
stream_match_enabled_ = false;
|
||||||
|
stream_datas_match_.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
void Correspondence::WaitStreamDataReady() {
|
||||||
|
std::unique_lock<std::recursive_mutex> lock(mtx_stream_datas_);
|
||||||
|
auto ready = std::bind(&Correspondence::IsStreamDataReady, this);
|
||||||
|
bool ok = cond_stream_datas_.wait_for(lock, std::chrono::seconds(3), ready);
|
||||||
|
if (!ok) {
|
||||||
|
LOG(FATAL) << "Timeout waiting for key frames. Please use USB 3.0, and not "
|
||||||
|
"in virtual machine.";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void Correspondence::NotifyStreamDataReady() {
|
||||||
|
cond_stream_datas_.notify_one();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool Correspondence::IsStreamDataReady() {
|
||||||
|
if (stream_datas_.empty()) return false;
|
||||||
|
if (stream_match_enabled_) {
|
||||||
|
if (stream_datas_match_.empty()) return false;
|
||||||
|
}
|
||||||
|
if (motion_datas_.empty()) return false;
|
||||||
|
|
||||||
|
std::uint64_t img_stamp = 0;
|
||||||
|
std::uint64_t img_macth_stamp = 0;
|
||||||
|
{
|
||||||
|
std::lock_guard<std::recursive_mutex> _(mtx_stream_datas_);
|
||||||
|
auto data = stream_datas_.front();
|
||||||
|
if (data.img == nullptr) {
|
||||||
|
LOG(FATAL) << "stream data image info is empty!";
|
||||||
|
}
|
||||||
|
img_stamp = data.img->timestamp;
|
||||||
|
|
||||||
|
if (stream_match_enabled_) {
|
||||||
|
img_macth_stamp = stream_datas_match_.front().img->timestamp;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
std::uint64_t imu_stamp = 0;
|
||||||
|
{
|
||||||
|
std::lock_guard<std::recursive_mutex> _(mtx_motion_datas_);
|
||||||
|
auto data = motion_datas_.back();
|
||||||
|
if (data.imu == nullptr) {
|
||||||
|
LOG(FATAL) << "motion data imu info is empty!";
|
||||||
|
}
|
||||||
|
imu_stamp = data.imu->timestamp;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (stream_match_enabled_) {
|
||||||
|
return img_stamp + stream_interval_us_half_ < imu_stamp
|
||||||
|
&& img_macth_stamp + stream_interval_us_half_ < imu_stamp;
|
||||||
|
} else {
|
||||||
|
return img_stamp + stream_interval_us_half_ < imu_stamp;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<api::StreamData> Correspondence::GetReadyStreamData(bool matched) {
|
||||||
|
std::uint64_t imu_stamp = 0;
|
||||||
|
{
|
||||||
|
std::lock_guard<std::recursive_mutex> _(mtx_motion_datas_);
|
||||||
|
if (motion_datas_.empty()) {
|
||||||
|
LOG(WARNING) << "motion data is unexpected empty!"
|
||||||
|
"\n\n Please ensure Start(Source::MOTION_TRACKING) "
|
||||||
|
"or Start(Source::ALL)\n";
|
||||||
|
std::lock_guard<std::recursive_mutex> _(mtx_stream_datas_);
|
||||||
|
return std::move(matched ? stream_datas_match_ : stream_datas_);
|
||||||
|
}
|
||||||
|
imu_stamp = motion_datas_.back().imu->timestamp;
|
||||||
|
}
|
||||||
|
std::lock_guard<std::recursive_mutex> _(mtx_stream_datas_);
|
||||||
|
|
||||||
|
std::vector<api::StreamData> &datas =
|
||||||
|
matched ? stream_datas_match_ : stream_datas_;
|
||||||
|
|
||||||
|
// LOG(INFO) << "datas.size: " << datas.size() << ", matched: " << matched;
|
||||||
|
std::vector<api::StreamData> result;
|
||||||
|
|
||||||
|
for (auto it = datas.begin(); it != datas.end(); ) {
|
||||||
|
// LOG(INFO) << "data.id: " << it->frame_id;
|
||||||
|
auto img_stamp = it->img->timestamp;
|
||||||
|
if (img_stamp + stream_interval_us_half_ < imu_stamp) {
|
||||||
|
// LOG(INFO) << "data.id: " << it->frame_id << " > result";
|
||||||
|
result.push_back(std::move(*it));
|
||||||
|
it = datas.erase(it);
|
||||||
|
} else {
|
||||||
|
// ++it;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// LOG(INFO) << "datas.size: " << datas.size()
|
||||||
|
// << ", result.size: " << result.size();
|
||||||
|
|
||||||
|
if (!matched && !result.empty()) {
|
||||||
|
// last match stream timestamp
|
||||||
|
ready_image_timestamp_ = result.back().img->timestamp;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<api::MotionData> Correspondence::GetReadyMotionDatas() {
|
||||||
|
if (ready_image_timestamp_ == 0) return {};
|
||||||
|
std::lock_guard<std::recursive_mutex> _(mtx_motion_datas_);
|
||||||
|
|
||||||
|
std::vector<api::MotionData> result;
|
||||||
|
|
||||||
|
auto &&datas = motion_datas_;
|
||||||
|
for (auto it = datas.begin(); it != datas.end(); ) {
|
||||||
|
auto imu_stamp = it->imu->timestamp;
|
||||||
|
if (imu_stamp < ready_image_timestamp_ - stream_interval_us_half_) {
|
||||||
|
it = datas.erase(it);
|
||||||
|
} else if (imu_stamp > ready_image_timestamp_ + stream_interval_us_half_) {
|
||||||
|
// ++it;
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
result.push_back({it->imu});
|
||||||
|
it = datas.erase(it);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
MYNTEYE_END_NAMESPACE
|
80
src/mynteye/api/correspondence.h
Normal file
80
src/mynteye/api/correspondence.h
Normal file
|
@ -0,0 +1,80 @@
|
||||||
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#ifndef MYNTEYE_API_CONFIG_H_
|
||||||
|
#define MYNTEYE_API_CONFIG_H_
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <atomic>
|
||||||
|
#include <condition_variable>
|
||||||
|
#include <memory>
|
||||||
|
#include <mutex>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
|
#include "mynteye/api/api.h"
|
||||||
|
#include "mynteye/device/callbacks.h"
|
||||||
|
|
||||||
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
|
class Correspondence {
|
||||||
|
public:
|
||||||
|
Correspondence(const std::shared_ptr<Device> &device, const Stream &stream);
|
||||||
|
~Correspondence();
|
||||||
|
|
||||||
|
bool Watch(const Stream &stream) const;
|
||||||
|
|
||||||
|
void OnStreamDataCallback(const Stream &stream, const api::StreamData &data);
|
||||||
|
void OnMotionDataCallback(const device::MotionData &data);
|
||||||
|
|
||||||
|
void SetMotionCallback(API::motion_callback_t callback);
|
||||||
|
|
||||||
|
void WaitForStreams();
|
||||||
|
api::StreamData GetStreamData(const Stream &stream);
|
||||||
|
std::vector<api::StreamData> GetStreamDatas(const Stream &stream);
|
||||||
|
std::vector<api::MotionData> GetMotionDatas();
|
||||||
|
|
||||||
|
private:
|
||||||
|
void EnableStreamMatch();
|
||||||
|
void DisableStreamMatch();
|
||||||
|
|
||||||
|
void WaitStreamDataReady();
|
||||||
|
void NotifyStreamDataReady();
|
||||||
|
|
||||||
|
bool IsStreamDataReady();
|
||||||
|
|
||||||
|
std::vector<api::StreamData> GetReadyStreamData(bool matched);
|
||||||
|
std::vector<api::MotionData> GetReadyMotionDatas();
|
||||||
|
|
||||||
|
std::shared_ptr<Device> device_;
|
||||||
|
Stream stream_;
|
||||||
|
Stream stream_match_;
|
||||||
|
std::atomic_bool stream_match_enabled_;
|
||||||
|
|
||||||
|
float stream_interval_us_;
|
||||||
|
float stream_interval_us_half_;
|
||||||
|
|
||||||
|
API::motion_callback_t motion_callback_;
|
||||||
|
std::vector<device::MotionData> motion_datas_;
|
||||||
|
std::recursive_mutex mtx_motion_datas_;
|
||||||
|
|
||||||
|
std::vector<api::StreamData> stream_datas_;
|
||||||
|
std::vector<api::StreamData> stream_datas_match_;
|
||||||
|
std::recursive_mutex mtx_stream_datas_;
|
||||||
|
std::condition_variable_any cond_stream_datas_;
|
||||||
|
|
||||||
|
std::uint64_t ready_image_timestamp_;
|
||||||
|
};
|
||||||
|
|
||||||
|
MYNTEYE_END_NAMESPACE
|
||||||
|
|
||||||
|
#endif // MYNTEYE_API_CONFIG_H_
|
78
src/mynteye/api/data_tools.cc
Normal file
78
src/mynteye/api/data_tools.cc
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
#include <functional>
|
||||||
|
#include <stdexcept>
|
||||||
|
#include "mynteye/api/data_tools.h"
|
||||||
|
#include "mynteye/logger.h"
|
||||||
|
|
||||||
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
|
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
|
||||||
|
if (frame->format() == Format::YUYV) {
|
||||||
|
cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data());
|
||||||
|
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
|
||||||
|
return img;
|
||||||
|
} else if (frame->format() == Format::BGR888) {
|
||||||
|
cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data());
|
||||||
|
return img;
|
||||||
|
} else { // Format::GRAY
|
||||||
|
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData data2api(const device::StreamData &data) {
|
||||||
|
return {data.img, frame2mat(data.frame), data.frame, data.frame_id};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ObjMat/ObjMat2 > api::StreamData
|
||||||
|
|
||||||
|
api::StreamData obj_data_first(const ObjMat2 *obj) {
|
||||||
|
return {obj->first_data, obj->first, nullptr, obj->first_id};
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData obj_data_second(const ObjMat2 *obj) {
|
||||||
|
return {obj->second_data, obj->second, nullptr, obj->second_id};
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData obj_data(const ObjMat *obj) {
|
||||||
|
return {obj->data, obj->value, nullptr, obj->id};
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData obj_data_first(const std::shared_ptr<ObjMat2> &obj) {
|
||||||
|
return {obj->first_data, obj->first, nullptr, obj->first_id};
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData obj_data_second(const std::shared_ptr<ObjMat2> &obj) {
|
||||||
|
return {obj->second_data, obj->second, nullptr, obj->second_id};
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData obj_data(const std::shared_ptr<ObjMat> &obj) {
|
||||||
|
return {obj->data, obj->value, nullptr, obj->id};
|
||||||
|
}
|
||||||
|
|
||||||
|
// api::StreamData > ObjMat/ObjMat2
|
||||||
|
|
||||||
|
ObjMat data_obj(const api::StreamData &data) {
|
||||||
|
return ObjMat{data.frame, data.frame_id, data.img};
|
||||||
|
}
|
||||||
|
|
||||||
|
ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second) {
|
||||||
|
return ObjMat2{
|
||||||
|
first.frame, first.frame_id, first.img,
|
||||||
|
second.frame, second.frame_id, second.img};
|
||||||
|
}
|
||||||
|
|
||||||
|
MYNTEYE_END_NAMESPACE
|
33
src/mynteye/api/data_tools.h
Normal file
33
src/mynteye/api/data_tools.h
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#ifndef MYNTEYE_API_DATA_TOOLS_H_
|
||||||
|
#define MYNTEYE_API_DATA_TOOLS_H_
|
||||||
|
#pragma once
|
||||||
|
#include <opencv2/imgproc/imgproc.hpp>
|
||||||
|
#include "mynteye/api/object.h"
|
||||||
|
#include "mynteye/api/api.h"
|
||||||
|
#include "mynteye/device/device.h"
|
||||||
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame);
|
||||||
|
api::StreamData data2api(const device::StreamData &data);
|
||||||
|
api::StreamData obj_data_first(const ObjMat2 *obj);
|
||||||
|
api::StreamData obj_data_second(const ObjMat2 *obj);
|
||||||
|
api::StreamData obj_data(const ObjMat *obj);
|
||||||
|
api::StreamData obj_data_first(const std::shared_ptr<ObjMat2> &obj);
|
||||||
|
api::StreamData obj_data_second(const std::shared_ptr<ObjMat2> &obj);
|
||||||
|
api::StreamData obj_data(const std::shared_ptr<ObjMat> &obj);
|
||||||
|
ObjMat data_obj(const api::StreamData &data);
|
||||||
|
ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second);
|
||||||
|
MYNTEYE_END_NAMESPACE
|
||||||
|
#endif // MYNTEYE_API_DATA_TOOLS_H_
|
|
@ -19,6 +19,7 @@
|
||||||
#include "mynteye/logger.h"
|
#include "mynteye/logger.h"
|
||||||
#include "mynteye/util/strings.h"
|
#include "mynteye/util/strings.h"
|
||||||
#include "mynteye/util/times.h"
|
#include "mynteye/util/times.h"
|
||||||
|
#include "mynteye/api/data_tools.h"
|
||||||
|
|
||||||
MYNTEYE_BEGIN_NAMESPACE
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
|
@ -245,6 +246,74 @@ void Processor::Run() {
|
||||||
VLOG(2) << Name() << " thread end";
|
VLOG(2) << Name() << " thread end";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
api::StreamData Processor::GetStreamData(const Stream &stream) {
|
||||||
|
auto sum = getStreamsSum();
|
||||||
|
auto &&out = GetOutput();
|
||||||
|
Synthetic::Mode enable_mode = Synthetic::MODE_OFF;
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
for (auto it_s : streams) {
|
||||||
|
if (it_s.stream == stream) {
|
||||||
|
enable_mode = it_s.enabled_mode_;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (enable_mode == Synthetic::MODE_ON) {
|
||||||
|
if (sum == 1) {
|
||||||
|
if (out != nullptr) {
|
||||||
|
auto &&output = Object::Cast<ObjMat>(out);
|
||||||
|
if (output != nullptr) {
|
||||||
|
return obj_data(output);
|
||||||
|
}
|
||||||
|
VLOG(2) << "Rectify not ready now";
|
||||||
|
}
|
||||||
|
} else if (sum == 2) {
|
||||||
|
static std::shared_ptr<ObjMat2> output = nullptr;
|
||||||
|
if (out != nullptr) {
|
||||||
|
output = Object::Cast<ObjMat2>(out);
|
||||||
|
}
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
if (output != nullptr) {
|
||||||
|
int num = 0;
|
||||||
|
for (auto it : streams) {
|
||||||
|
if (it.stream == stream) {
|
||||||
|
if (num == 1) {
|
||||||
|
return obj_data_first(output);
|
||||||
|
} else {
|
||||||
|
return obj_data_second(output);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
num++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
VLOG(2) << "Rectify not ready now";
|
||||||
|
} else {
|
||||||
|
LOG(ERROR) << "error: invalid sum!";
|
||||||
|
}
|
||||||
|
return {}; // frame.empty() == true
|
||||||
|
}
|
||||||
|
LOG(ERROR) << "Failed to get stream data of " << stream
|
||||||
|
<< ", unsupported or disabled";
|
||||||
|
return {}; // frame.empty() == true
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<api::StreamData> Processor::GetStreamDatas(const Stream &stream) {
|
||||||
|
Synthetic::Mode enable_mode = Synthetic::MODE_OFF;
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
for (auto it_s : streams) {
|
||||||
|
if (it_s.stream == stream) {
|
||||||
|
enable_mode = it_s.enabled_mode_;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (enable_mode == Synthetic::MODE_ON) {
|
||||||
|
return {GetStreamData(stream)};
|
||||||
|
} else {
|
||||||
|
LOG(ERROR) << "Failed to get stream data of " << stream
|
||||||
|
<< ", unsupported or disabled";
|
||||||
|
}
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
void Processor::SetIdle(bool idle) {
|
void Processor::SetIdle(bool idle) {
|
||||||
std::lock_guard<std::mutex> lk(mtx_state_);
|
std::lock_guard<std::mutex> lk(mtx_state_);
|
||||||
idle_ = idle;
|
idle_ = idle;
|
||||||
|
|
|
@ -66,6 +66,10 @@ class Processor :
|
||||||
/** Returns dropped or not. */
|
/** Returns dropped or not. */
|
||||||
bool Process(const Object &in);
|
bool Process(const Object &in);
|
||||||
|
|
||||||
|
virtual api::StreamData GetStreamData(const Stream &stream);
|
||||||
|
|
||||||
|
virtual std::vector<api::StreamData> GetStreamDatas(const Stream &stream);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the last output.
|
* Returns the last output.
|
||||||
* @note Returns null if not output now.
|
* @note Returns null if not output now.
|
||||||
|
|
|
@ -14,15 +14,21 @@
|
||||||
#include "mynteye/api/processor/root_camera_processor.h"
|
#include "mynteye/api/processor/root_camera_processor.h"
|
||||||
|
|
||||||
#include <utility>
|
#include <utility>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
#include <opencv2/calib3d/calib3d.hpp>
|
#include <opencv2/calib3d/calib3d.hpp>
|
||||||
#include <opencv2/imgproc/imgproc.hpp>
|
#include <opencv2/imgproc/imgproc.hpp>
|
||||||
#include "mynteye/logger.h"
|
#include "mynteye/logger.h"
|
||||||
|
#include "mynteye/api/synthetic.h"
|
||||||
|
#include "mynteye/device/device.h"
|
||||||
|
#include "mynteye/api/data_tools.h"
|
||||||
MYNTEYE_BEGIN_NAMESPACE
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
const char RootProcessor::NAME[] = "RootProcessor";
|
const char RootProcessor::NAME[] = "RootProcessor";
|
||||||
|
|
||||||
RootProcessor::RootProcessor(std::int32_t proc_period)
|
RootProcessor::RootProcessor(std::shared_ptr<Device> device,
|
||||||
: Processor(std::move(proc_period)) {}
|
std::int32_t proc_period)
|
||||||
|
: Processor(std::move(proc_period)),
|
||||||
|
device_(device) {}
|
||||||
RootProcessor::~RootProcessor() {
|
RootProcessor::~RootProcessor() {
|
||||||
VLOG(2) << __func__;
|
VLOG(2) << __func__;
|
||||||
}
|
}
|
||||||
|
@ -31,13 +37,114 @@ std::string RootProcessor::Name() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
Object *RootProcessor::OnCreateOutput() {
|
s1s2Processor::s1s2Processor(std::shared_ptr<Device> device,
|
||||||
|
std::int32_t proc_period)
|
||||||
|
: RootProcessor(device, std::move(proc_period)) {}
|
||||||
|
s1s2Processor::~s1s2Processor() {
|
||||||
|
VLOG(2) << __func__;
|
||||||
|
}
|
||||||
|
|
||||||
|
Object *s1s2Processor::OnCreateOutput() {
|
||||||
return new ObjMat2();
|
return new ObjMat2();
|
||||||
}
|
}
|
||||||
bool RootProcessor::OnProcess(
|
bool s1s2Processor::OnProcess(
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
std::shared_ptr<Processor> const parent) {
|
std::shared_ptr<Processor> const parent) {
|
||||||
|
const ObjMat2 *input = Object::Cast<ObjMat2>(in);
|
||||||
|
ObjMat2 *output = Object::Cast<ObjMat2>(out);
|
||||||
|
output->second = input->second;
|
||||||
|
output->first = input->first;
|
||||||
|
output->first_id = input->first_id;
|
||||||
|
output->first_data = input->first_data;
|
||||||
|
output->second_id = input->second_id;
|
||||||
|
output->second_data = input->second_data;
|
||||||
MYNTEYE_UNUSED(parent)
|
MYNTEYE_UNUSED(parent)
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void s1s2Processor::ProcessNativeStream(
|
||||||
|
const Stream &stream, const api::StreamData &data) {
|
||||||
|
std::unique_lock<std::mutex> lk(mtx_left_right_ready_);
|
||||||
|
static api::StreamData left_data, right_data;
|
||||||
|
if (stream == Stream::LEFT) {
|
||||||
|
left_data = data;
|
||||||
|
} else if (stream == Stream::RIGHT) {
|
||||||
|
right_data = data;
|
||||||
|
}
|
||||||
|
if (left_data.img && right_data.img &&
|
||||||
|
left_data.img->frame_id == right_data.img->frame_id) {
|
||||||
|
Process(data_obj(left_data, right_data));
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
void s1s2Processor::StartVideoStreaming() {
|
||||||
|
Activate();
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
for (unsigned int j =0; j< streams.size(); j++) {
|
||||||
|
auto stream = streams[j].stream;
|
||||||
|
auto callback = streams[j].stream_callback;
|
||||||
|
target_streams_[j].enabled_mode_ = Synthetic::MODE_ON;
|
||||||
|
device_->SetStreamCallback(
|
||||||
|
stream,
|
||||||
|
[this, stream, callback](const device::StreamData &data) {
|
||||||
|
auto &&stream_data = data2api(data);
|
||||||
|
ProcessNativeStream(stream, stream_data);
|
||||||
|
// Need mutex if set callback after start
|
||||||
|
if (callback) {
|
||||||
|
callback(stream_data);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
true);
|
||||||
|
}
|
||||||
|
device_->Start(Source::VIDEO_STREAMING);
|
||||||
|
}
|
||||||
|
|
||||||
|
void s1s2Processor::StopVideoStreaming() {
|
||||||
|
Deactivate();
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
for (unsigned int j =0; j< streams.size(); j++) {
|
||||||
|
auto stream = streams[j].stream;
|
||||||
|
target_streams_[j].enabled_mode_ = Synthetic::MODE_OFF;
|
||||||
|
device_->SetStreamCallback(stream, nullptr);
|
||||||
|
}
|
||||||
|
device_->Stop(Source::VIDEO_STREAMING);
|
||||||
|
}
|
||||||
|
api::StreamData s1s2Processor::GetStreamData(const Stream &stream) {
|
||||||
|
Synthetic::Mode enable_mode = Synthetic::MODE_OFF;
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
for (auto it_s : streams) {
|
||||||
|
if (it_s.stream == stream) {
|
||||||
|
enable_mode = it_s.enabled_mode_;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (enable_mode == Synthetic::MODE_ON) {
|
||||||
|
return data2api(device_->GetStreamData(stream));
|
||||||
|
}
|
||||||
|
LOG(ERROR) << "Failed to get device stream data of " << stream
|
||||||
|
<< ", unsupported or disabled";
|
||||||
|
LOG(ERROR) << "Make sure you have enable " << stream;
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<api::StreamData> s1s2Processor::GetStreamDatas(
|
||||||
|
const Stream &stream) {
|
||||||
|
Synthetic::Mode enable_mode = Synthetic::MODE_OFF;
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
for (auto it_s : streams) {
|
||||||
|
if (it_s.stream == stream) {
|
||||||
|
enable_mode = it_s.enabled_mode_;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (enable_mode == Synthetic::MODE_ON) {
|
||||||
|
std::vector<api::StreamData> datas;
|
||||||
|
for (auto &&data : device_->GetStreamDatas(stream)) {
|
||||||
|
datas.push_back(data2api(data));
|
||||||
|
}
|
||||||
|
return datas;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
MYNTEYE_END_NAMESPACE
|
MYNTEYE_END_NAMESPACE
|
||||||
|
|
|
@ -28,16 +28,42 @@ class RootProcessor : public Processor {
|
||||||
public:
|
public:
|
||||||
static const char NAME[];
|
static const char NAME[];
|
||||||
|
|
||||||
explicit RootProcessor(std::int32_t proc_period = 0);
|
explicit RootProcessor(std::shared_ptr<Device> device,
|
||||||
|
std::int32_t proc_period = 0);
|
||||||
virtual ~RootProcessor();
|
virtual ~RootProcessor();
|
||||||
|
|
||||||
std::string Name() override;
|
virtual std::string Name();
|
||||||
|
|
||||||
|
virtual void StartVideoStreaming() = 0;
|
||||||
|
virtual void StopVideoStreaming() = 0;
|
||||||
|
virtual api::StreamData GetStreamData(const Stream &stream) = 0;
|
||||||
|
virtual std::vector<api::StreamData> GetStreamDatas(const Stream &stream) = 0; // NOLINT
|
||||||
|
protected:
|
||||||
|
virtual Object *OnCreateOutput() = 0;
|
||||||
|
virtual bool OnProcess(
|
||||||
|
Object *const in, Object *const out,
|
||||||
|
std::shared_ptr<Processor> const parent) = 0;
|
||||||
|
std::shared_ptr<Device> device_;
|
||||||
|
};
|
||||||
|
|
||||||
|
class s1s2Processor : public RootProcessor {
|
||||||
|
public:
|
||||||
|
explicit s1s2Processor(std::shared_ptr<Device> device,
|
||||||
|
std::int32_t proc_period = 0);
|
||||||
|
virtual ~s1s2Processor();
|
||||||
|
void StartVideoStreaming();
|
||||||
|
void StopVideoStreaming();
|
||||||
|
api::StreamData GetStreamData(const Stream &stream) override;
|
||||||
|
std::vector<api::StreamData> GetStreamDatas(const Stream &stream) override; // NOLINT
|
||||||
protected:
|
protected:
|
||||||
Object *OnCreateOutput() override;
|
Object *OnCreateOutput() override;
|
||||||
bool OnProcess(
|
bool OnProcess(
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
std::shared_ptr<Processor> const parent) override;
|
std::shared_ptr<Processor> const parent) override;
|
||||||
|
private:
|
||||||
|
void ProcessNativeStream(
|
||||||
|
const Stream &stream, const api::StreamData &data);
|
||||||
|
std::mutex mtx_left_right_ready_;
|
||||||
};
|
};
|
||||||
|
|
||||||
MYNTEYE_END_NAMESPACE
|
MYNTEYE_END_NAMESPACE
|
||||||
|
|
|
@ -36,6 +36,7 @@
|
||||||
#include "mynteye/api/processor/rectify_processor.h"
|
#include "mynteye/api/processor/rectify_processor.h"
|
||||||
#endif
|
#endif
|
||||||
#include "mynteye/device/device.h"
|
#include "mynteye/device/device.h"
|
||||||
|
#include "mynteye/api/data_tools.h"
|
||||||
|
|
||||||
#define RECTIFY_PROC_PERIOD 0
|
#define RECTIFY_PROC_PERIOD 0
|
||||||
#define DISPARITY_PROC_PERIOD 0
|
#define DISPARITY_PROC_PERIOD 0
|
||||||
|
@ -46,36 +47,6 @@
|
||||||
|
|
||||||
MYNTEYE_BEGIN_NAMESPACE
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
namespace {
|
|
||||||
|
|
||||||
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
|
|
||||||
if (frame->format() == Format::YUYV) {
|
|
||||||
cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data());
|
|
||||||
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
|
|
||||||
return img;
|
|
||||||
} else if (frame->format() == Format::BGR888) {
|
|
||||||
cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data());
|
|
||||||
return img;
|
|
||||||
} else { // Format::GRAY
|
|
||||||
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
api::StreamData data2api(const device::StreamData &data) {
|
|
||||||
return {data.img, frame2mat(data.frame), data.frame, data.frame_id};
|
|
||||||
}
|
|
||||||
|
|
||||||
void process_childs(
|
|
||||||
const std::shared_ptr<Processor> &proc, const std::string &name,
|
|
||||||
const Object &obj) {
|
|
||||||
auto &&processor = find_processor<Processor>(proc, name);
|
|
||||||
for (auto child : processor->GetChilds()) {
|
|
||||||
child->Process(obj);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace
|
|
||||||
|
|
||||||
void Synthetic::InitCalibInfo() {
|
void Synthetic::InitCalibInfo() {
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||||
LOG(INFO) << "camera calib model: pinhole";
|
LOG(INFO) << "camera calib model: pinhole";
|
||||||
|
@ -105,12 +76,12 @@ Synthetic::Synthetic(API *api, CalibrationModel calib_model)
|
||||||
: api_(api),
|
: api_(api),
|
||||||
plugin_(nullptr),
|
plugin_(nullptr),
|
||||||
calib_model_(calib_model),
|
calib_model_(calib_model),
|
||||||
calib_default_tag_(false) {
|
calib_default_tag_(false),
|
||||||
|
stream_data_listener_(nullptr) {
|
||||||
VLOG(2) << __func__;
|
VLOG(2) << __func__;
|
||||||
CHECK_NOTNULL(api_);
|
CHECK_NOTNULL(api_);
|
||||||
InitCalibInfo();
|
InitCalibInfo();
|
||||||
InitProcessors();
|
InitProcessors();
|
||||||
InitStreamSupports();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Synthetic::~Synthetic() {
|
Synthetic::~Synthetic() {
|
||||||
|
@ -121,6 +92,10 @@ Synthetic::~Synthetic() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Synthetic::SetStreamDataListener(stream_data_listener_t listener) {
|
||||||
|
stream_data_listener_ = listener;
|
||||||
|
}
|
||||||
|
|
||||||
void Synthetic::NotifyImageParamsChanged() {
|
void Synthetic::NotifyImageParamsChanged() {
|
||||||
if (!calib_default_tag_) {
|
if (!calib_default_tag_) {
|
||||||
intr_left_ = api_->GetIntrinsicsBase(Stream::LEFT);
|
intr_left_ = api_->GetIntrinsicsBase(Stream::LEFT);
|
||||||
|
@ -128,19 +103,18 @@ void Synthetic::NotifyImageParamsChanged() {
|
||||||
extr_ = std::make_shared<Extrinsics>(
|
extr_ = std::make_shared<Extrinsics>(
|
||||||
api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT));
|
api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT));
|
||||||
}
|
}
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
auto processor = getProcessorWithStream(Stream::LEFT_RECTIFIED);
|
||||||
auto &&processor = find_processor<RectifyProcessorOCV>(processor_);
|
|
||||||
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_);
|
if (processor && calib_model_ == CalibrationModel::PINHOLE) {
|
||||||
|
auto proc = static_cast<RectifyProcessorOCV*>(&(*processor));
|
||||||
|
proc->ReloadImageParams(intr_left_, intr_right_, extr_);
|
||||||
#ifdef WITH_CAM_MODELS
|
#ifdef WITH_CAM_MODELS
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
} else if (processor && calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||||
auto &&processor = find_processor<RectifyProcessor>(processor_);
|
auto proc = static_cast<RectifyProcessor*>(&(*processor));
|
||||||
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_);
|
proc->ReloadImageParams(intr_left_, intr_right_, extr_);
|
||||||
#endif
|
#endif
|
||||||
} else {
|
} else {
|
||||||
LOG(ERROR) << "Unknow calib model type in device: "
|
LOG(ERROR) << "Unknow calib model type in device" << std::endl;
|
||||||
<< calib_model_ << ", use default pinhole model";
|
|
||||||
auto &&processor = find_processor<RectifyProcessorOCV>(processor_);
|
|
||||||
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -195,17 +169,29 @@ bool Synthetic::checkControlDateWithStream(const Stream& stream) const {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::EnableStreamData(const Stream &stream) {
|
bool Synthetic::Supports(const Stream &stream) const {
|
||||||
|
return checkControlDateWithStream(stream);
|
||||||
|
}
|
||||||
|
|
||||||
|
void Synthetic::EnableStreamData(
|
||||||
|
const Stream &stream, stream_switch_callback_t callback,
|
||||||
|
bool try_tag) {
|
||||||
// Activate processors of synthetic stream
|
// Activate processors of synthetic stream
|
||||||
auto processor = getProcessorWithStream(stream);
|
auto processor = getProcessorWithStream(stream);
|
||||||
iterate_processor_CtoP_before(processor,
|
iterate_processor_CtoP_before(processor,
|
||||||
[](std::shared_ptr<Processor> proce){
|
[callback, try_tag](std::shared_ptr<Processor> proce){
|
||||||
|
if (proce->Name() == "RootProcessor") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
auto streams = proce->getTargetStreams();
|
auto streams = proce->getTargetStreams();
|
||||||
int act_tag = 0;
|
int act_tag = 0;
|
||||||
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
|
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
|
||||||
if (proce->target_streams_[i].enabled_mode_ == MODE_LAST) {
|
if (proce->target_streams_[i].enabled_mode_ == MODE_OFF) {
|
||||||
|
callback(proce->target_streams_[i].stream);
|
||||||
|
if (!try_tag) {
|
||||||
act_tag++;
|
act_tag++;
|
||||||
proce->target_streams_[i].enabled_mode_ = MODE_SYNTHETIC;
|
proce->target_streams_[i].enabled_mode_ = MODE_ON;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (act_tag > 0 && !proce->IsActivated()) {
|
if (act_tag > 0 && !proce->IsActivated()) {
|
||||||
|
@ -214,30 +200,24 @@ void Synthetic::EnableStreamData(const Stream &stream) {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
void Synthetic::DisableStreamData(
|
||||||
|
const Stream &stream, stream_switch_callback_t callback,
|
||||||
bool Synthetic::Supports(const Stream &stream) const {
|
bool try_tag) {
|
||||||
return checkControlDateWithStream(stream);
|
|
||||||
}
|
|
||||||
|
|
||||||
Synthetic::mode_t Synthetic::SupportsMode(const Stream &stream) const {
|
|
||||||
if (checkControlDateWithStream(stream)) {
|
|
||||||
auto data = getControlDateWithStream(stream);
|
|
||||||
return data.support_mode_;
|
|
||||||
}
|
|
||||||
return MODE_LAST;
|
|
||||||
}
|
|
||||||
|
|
||||||
void Synthetic::DisableStreamData(const Stream &stream) {
|
|
||||||
auto processor = getProcessorWithStream(stream);
|
auto processor = getProcessorWithStream(stream);
|
||||||
iterate_processor_PtoC_before(processor,
|
iterate_processor_PtoC_before(processor,
|
||||||
[](std::shared_ptr<Processor> proce){
|
[callback, try_tag](std::shared_ptr<Processor> proce){
|
||||||
|
if (proce->Name() == "RootProcessor") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
auto streams = proce->getTargetStreams();
|
auto streams = proce->getTargetStreams();
|
||||||
int act_tag = 0;
|
int act_tag = 0;
|
||||||
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
|
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
|
||||||
if (proce->target_streams_[i].enabled_mode_ == MODE_SYNTHETIC) {
|
if (proce->target_streams_[i].enabled_mode_ == MODE_ON) {
|
||||||
|
callback(proce->target_streams_[i].stream);
|
||||||
|
if (!try_tag) {
|
||||||
act_tag++;
|
act_tag++;
|
||||||
proce->target_streams_[i].enabled_mode_ = MODE_LAST;
|
proce->target_streams_[i].enabled_mode_ = MODE_OFF;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (act_tag > 0 && proce->IsActivated()) {
|
if (act_tag > 0 && proce->IsActivated()) {
|
||||||
|
@ -247,11 +227,24 @@ void Synthetic::DisableStreamData(const Stream &stream) {
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Synthetic::EnableStreamData(const Stream &stream) {
|
||||||
|
EnableStreamData(stream, [](const Stream &stream){
|
||||||
|
// std::cout << stream << "enabled in callback" << std::endl;
|
||||||
|
MYNTEYE_UNUSED(stream);
|
||||||
|
}, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
void Synthetic::DisableStreamData(const Stream &stream) {
|
||||||
|
DisableStreamData(stream, [](const Stream &stream){
|
||||||
|
// std::cout << stream << "disabled in callback" << std::endl;
|
||||||
|
MYNTEYE_UNUSED(stream);
|
||||||
|
}, false);
|
||||||
|
}
|
||||||
|
|
||||||
bool Synthetic::IsStreamDataEnabled(const Stream &stream) const {
|
bool Synthetic::IsStreamDataEnabled(const Stream &stream) const {
|
||||||
if (checkControlDateWithStream(stream)) {
|
if (checkControlDateWithStream(stream)) {
|
||||||
auto data = getControlDateWithStream(stream);
|
auto data = getControlDateWithStream(stream);
|
||||||
return data.enabled_mode_ == MODE_SYNTHETIC ||
|
return data.enabled_mode_ == MODE_ON;
|
||||||
data.enabled_mode_ == MODE_NATIVE;
|
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -279,42 +272,11 @@ bool Synthetic::HasStreamCallback(const Stream &stream) const {
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::StartVideoStreaming() {
|
void Synthetic::StartVideoStreaming() {
|
||||||
auto &&device = api_->device();
|
processor_->StartVideoStreaming();
|
||||||
for (unsigned int i =0; i< processors_.size(); i++) {
|
|
||||||
auto streams = processors_[i]->getTargetStreams();
|
|
||||||
for (unsigned int j =0; j< streams.size(); j++) {
|
|
||||||
if (processors_[i]->target_streams_[j].support_mode_ == MODE_NATIVE) {
|
|
||||||
auto stream = processors_[i]->target_streams_[j].stream;
|
|
||||||
device->SetStreamCallback(
|
|
||||||
stream,
|
|
||||||
[this, stream](const device::StreamData &data) {
|
|
||||||
auto &&stream_data = data2api(data);
|
|
||||||
ProcessNativeStream(stream, stream_data);
|
|
||||||
// Need mutex if set callback after start
|
|
||||||
if (HasStreamCallback(stream)) {
|
|
||||||
auto data = getControlDateWithStream(stream);
|
|
||||||
data.stream_callback(stream_data);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
device->Start(Source::VIDEO_STREAMING);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::StopVideoStreaming() {
|
void Synthetic::StopVideoStreaming() {
|
||||||
auto &&device = api_->device();
|
processor_->StopVideoStreaming();
|
||||||
for (unsigned int i =0; i< processors_.size(); i++) {
|
|
||||||
auto streams = processors_[i]->getTargetStreams();
|
|
||||||
for (unsigned int j =0; j< streams.size(); j++) {
|
|
||||||
if (processors_[i]->target_streams_[j].support_mode_ == MODE_NATIVE) {
|
|
||||||
auto stream = processors_[i]->target_streams_[j].stream;
|
|
||||||
device->SetStreamCallback(stream, nullptr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
device->Stop(Source::VIDEO_STREAMING);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::WaitForStreams() {
|
void Synthetic::WaitForStreams() {
|
||||||
|
@ -322,75 +284,11 @@ void Synthetic::WaitForStreams() {
|
||||||
}
|
}
|
||||||
|
|
||||||
api::StreamData Synthetic::GetStreamData(const Stream &stream) {
|
api::StreamData Synthetic::GetStreamData(const Stream &stream) {
|
||||||
auto &&mode = GetStreamEnabledMode(stream);
|
return getProcessorWithStream(stream)->GetStreamData(stream);
|
||||||
if (mode == MODE_NATIVE) {
|
|
||||||
auto &&device = api_->device();
|
|
||||||
return data2api(device->GetStreamData(stream));
|
|
||||||
} else if (mode == MODE_SYNTHETIC) {
|
|
||||||
auto processor = getProcessorWithStream(stream);
|
|
||||||
auto sum = processor->getStreamsSum();
|
|
||||||
auto &&out = processor->GetOutput();
|
|
||||||
static std::shared_ptr<ObjMat2> output = nullptr;
|
|
||||||
if (sum == 1) {
|
|
||||||
if (out != nullptr) {
|
|
||||||
auto &&output = Object::Cast<ObjMat>(out);
|
|
||||||
if (output != nullptr) {
|
|
||||||
return {output->data, output->value, nullptr, output->id};
|
|
||||||
}
|
|
||||||
VLOG(2) << "Rectify not ready now";
|
|
||||||
}
|
|
||||||
} else if (sum == 2) {
|
|
||||||
if (out != nullptr) {
|
|
||||||
output = Object::Cast<ObjMat2>(out);
|
|
||||||
}
|
|
||||||
auto streams = processor->getTargetStreams();
|
|
||||||
if (output != nullptr) {
|
|
||||||
int num = 0;
|
|
||||||
for (auto it : streams) {
|
|
||||||
if (it.stream == stream) {
|
|
||||||
if (num == 1) {
|
|
||||||
return {output->first_data,
|
|
||||||
output->first,
|
|
||||||
nullptr,
|
|
||||||
output->first_id};
|
|
||||||
} else {
|
|
||||||
return {output->second_data,
|
|
||||||
output->second,
|
|
||||||
nullptr,
|
|
||||||
output->second_id};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
num++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
VLOG(2) << "Rectify not ready now";
|
|
||||||
} else {
|
|
||||||
LOG(ERROR) << "error: invalid sum!";
|
|
||||||
}
|
|
||||||
return {}; // frame.empty() == true
|
|
||||||
} else {
|
|
||||||
LOG(ERROR) << "Failed to get stream data of " << stream
|
|
||||||
<< ", unsupported or disabled";
|
|
||||||
return {}; // frame.empty() == true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<api::StreamData> Synthetic::GetStreamDatas(const Stream &stream) {
|
std::vector<api::StreamData> Synthetic::GetStreamDatas(const Stream &stream) {
|
||||||
auto &&mode = GetStreamEnabledMode(stream);
|
return getProcessorWithStream(stream)->GetStreamDatas(stream);
|
||||||
if (mode == MODE_NATIVE) {
|
|
||||||
auto &&device = api_->device();
|
|
||||||
std::vector<api::StreamData> datas;
|
|
||||||
for (auto &&data : device->GetStreamDatas(stream)) {
|
|
||||||
datas.push_back(data2api(data));
|
|
||||||
}
|
|
||||||
return datas;
|
|
||||||
} else if (mode == MODE_SYNTHETIC) {
|
|
||||||
return {GetStreamData(stream)};
|
|
||||||
} else {
|
|
||||||
LOG(ERROR) << "Failed to get stream data of " << stream
|
|
||||||
<< ", unsupported or disabled";
|
|
||||||
}
|
|
||||||
return {};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::SetPlugin(std::shared_ptr<Plugin> plugin) {
|
void Synthetic::SetPlugin(std::shared_ptr<Plugin> plugin) {
|
||||||
|
@ -401,134 +299,87 @@ bool Synthetic::HasPlugin() const {
|
||||||
return plugin_ != nullptr;
|
return plugin_ != nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::InitStreamSupports() {
|
|
||||||
auto &&device = api_->device();
|
|
||||||
if (device->Supports(Stream::LEFT) && device->Supports(Stream::RIGHT)) {
|
|
||||||
auto processor = getProcessorWithStream(Stream::LEFT);
|
|
||||||
for (unsigned int i = 0; i< processor->target_streams_.size(); i++) {
|
|
||||||
if (processor->target_streams_[i].stream == Stream::LEFT) {
|
|
||||||
processor->target_streams_[i].support_mode_ = MODE_NATIVE;
|
|
||||||
}
|
|
||||||
if (processor->target_streams_[i].stream == Stream::RIGHT) {
|
|
||||||
processor->target_streams_[i].support_mode_ = MODE_NATIVE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
std::vector<Stream> stream_chain{
|
|
||||||
Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED,
|
|
||||||
Stream::DISPARITY, Stream::DISPARITY_NORMALIZED,
|
|
||||||
Stream::POINTS, Stream::DEPTH};
|
|
||||||
for (auto &&stream : stream_chain) {
|
|
||||||
auto processor = getProcessorWithStream(stream);
|
|
||||||
for (unsigned int i = 0; i< processor->target_streams_.size(); i++) {
|
|
||||||
if (processor->target_streams_[i].stream == stream) {
|
|
||||||
if (device->Supports(stream)) {
|
|
||||||
processor->target_streams_[i].support_mode_ = MODE_NATIVE;
|
|
||||||
processor->target_streams_[i].enabled_mode_ = MODE_NATIVE;
|
|
||||||
} else {
|
|
||||||
processor->target_streams_[i].support_mode_ = MODE_SYNTHETIC;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Synthetic::mode_t Synthetic::GetStreamEnabledMode(const Stream &stream) const {
|
Synthetic::mode_t Synthetic::GetStreamEnabledMode(const Stream &stream) const {
|
||||||
if (checkControlDateWithStream(stream)) {
|
if (checkControlDateWithStream(stream)) {
|
||||||
auto data = getControlDateWithStream(stream);
|
auto data = getControlDateWithStream(stream);
|
||||||
return data.enabled_mode_;
|
return data.enabled_mode_;
|
||||||
}
|
}
|
||||||
return MODE_LAST;
|
return MODE_OFF;
|
||||||
}
|
|
||||||
|
|
||||||
bool Synthetic::IsStreamEnabledNative(const Stream &stream) const {
|
|
||||||
return GetStreamEnabledMode(stream) == MODE_NATIVE;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool Synthetic::IsStreamEnabledSynthetic(const Stream &stream) const {
|
|
||||||
return GetStreamEnabledMode(stream) == MODE_SYNTHETIC;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::InitProcessors() {
|
void Synthetic::InitProcessors() {
|
||||||
std::shared_ptr<Processor> rectify_processor = nullptr;
|
std::shared_ptr<Processor> rectify_processor = nullptr;
|
||||||
#ifdef WITH_CAM_MODELS
|
std::shared_ptr<Processor> points_processor = nullptr;
|
||||||
std::shared_ptr<RectifyProcessor> rectify_processor_imp = nullptr;
|
std::shared_ptr<Processor> depth_processor = nullptr;
|
||||||
#endif
|
|
||||||
cv::Mat Q;
|
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
|
||||||
auto &&rectify_processor_ocv =
|
|
||||||
std::make_shared<RectifyProcessorOCV>(intr_left_, intr_right_, extr_,
|
|
||||||
RECTIFY_PROC_PERIOD);
|
|
||||||
rectify_processor = rectify_processor_ocv;
|
|
||||||
Q = rectify_processor_ocv->Q;
|
|
||||||
#ifdef WITH_CAM_MODELS
|
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
|
||||||
rectify_processor_imp =
|
|
||||||
std::make_shared<RectifyProcessor>(intr_left_, intr_right_, extr_,
|
|
||||||
RECTIFY_PROC_PERIOD);
|
|
||||||
rectify_processor = rectify_processor_imp;
|
|
||||||
#endif
|
|
||||||
} else {
|
|
||||||
LOG(ERROR) << "Unknow calib model type in device: "
|
|
||||||
<< calib_model_ << ", use default pinhole model";
|
|
||||||
auto &&rectify_processor_ocv =
|
|
||||||
std::make_shared<RectifyProcessorOCV>(intr_left_, intr_right_, extr_,
|
|
||||||
RECTIFY_PROC_PERIOD);
|
|
||||||
rectify_processor = rectify_processor_ocv;
|
|
||||||
}
|
|
||||||
auto &&disparity_processor =
|
auto &&disparity_processor =
|
||||||
std::make_shared<DisparityProcessor>(DisparityComputingMethod::SGBM,
|
std::make_shared<DisparityProcessor>(DisparityComputingMethod::SGBM,
|
||||||
DISPARITY_PROC_PERIOD);
|
DISPARITY_PROC_PERIOD);
|
||||||
auto &&disparitynormalized_processor =
|
auto &&disparitynormalized_processor =
|
||||||
std::make_shared<DisparityNormalizedProcessor>(
|
std::make_shared<DisparityNormalizedProcessor>(
|
||||||
DISPARITY_NORM_PROC_PERIOD);
|
DISPARITY_NORM_PROC_PERIOD);
|
||||||
std::shared_ptr<Processor> points_processor = nullptr;
|
|
||||||
|
auto root_processor =
|
||||||
|
std::make_shared<s1s2Processor>(api_->device(), ROOT_PROC_PERIOD);
|
||||||
|
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||||
|
// PINHOLE
|
||||||
|
auto &&rectify_processor_ocv =
|
||||||
|
std::make_shared<RectifyProcessorOCV>(intr_left_, intr_right_, extr_,
|
||||||
|
RECTIFY_PROC_PERIOD);
|
||||||
|
rectify_processor = rectify_processor_ocv;
|
||||||
points_processor = std::make_shared<PointsProcessorOCV>(
|
points_processor = std::make_shared<PointsProcessorOCV>(
|
||||||
Q, POINTS_PROC_PERIOD);
|
rectify_processor_ocv->Q, POINTS_PROC_PERIOD);
|
||||||
|
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
|
||||||
|
|
||||||
|
root_processor->AddChild(rectify_processor);
|
||||||
|
rectify_processor->AddChild(disparity_processor);
|
||||||
|
disparity_processor->AddChild(disparitynormalized_processor);
|
||||||
|
disparity_processor->AddChild(points_processor);
|
||||||
|
points_processor->AddChild(depth_processor);
|
||||||
#ifdef WITH_CAM_MODELS
|
#ifdef WITH_CAM_MODELS
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||||
|
// KANNALA_BRANDT
|
||||||
|
auto rectify_processor_imp =
|
||||||
|
std::make_shared<RectifyProcessor>(intr_left_, intr_right_, extr_,
|
||||||
|
RECTIFY_PROC_PERIOD);
|
||||||
|
rectify_processor = rectify_processor_imp;
|
||||||
points_processor = std::make_shared<PointsProcessor>(
|
points_processor = std::make_shared<PointsProcessor>(
|
||||||
rectify_processor_imp -> getCalibInfoPair(),
|
rectify_processor_imp -> getCalibInfoPair(),
|
||||||
POINTS_PROC_PERIOD);
|
POINTS_PROC_PERIOD);
|
||||||
#endif
|
|
||||||
} else {
|
|
||||||
points_processor = std::make_shared<PointsProcessorOCV>(
|
|
||||||
Q, POINTS_PROC_PERIOD);
|
|
||||||
}
|
|
||||||
std::shared_ptr<Processor> depth_processor = nullptr;
|
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
|
||||||
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
|
|
||||||
#ifdef WITH_CAM_MODELS
|
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
|
||||||
depth_processor = std::make_shared<DepthProcessor>(
|
depth_processor = std::make_shared<DepthProcessor>(
|
||||||
rectify_processor_imp -> getCalibInfoPair(),
|
rectify_processor_imp -> getCalibInfoPair(),
|
||||||
DEPTH_PROC_PERIOD);
|
DEPTH_PROC_PERIOD);
|
||||||
|
|
||||||
|
root_processor->AddChild(rectify_processor);
|
||||||
|
rectify_processor->AddChild(disparity_processor);
|
||||||
|
disparity_processor->AddChild(disparitynormalized_processor);
|
||||||
|
disparity_processor->AddChild(depth_processor);
|
||||||
|
depth_processor->AddChild(points_processor);
|
||||||
#endif
|
#endif
|
||||||
} else {
|
} else {
|
||||||
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
|
// UNKNOW
|
||||||
|
LOG(ERROR) << "Unknow calib model type in device: "
|
||||||
|
<< calib_model_;
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
auto root_processor =
|
|
||||||
std::make_shared<RootProcessor>(ROOT_PROC_PERIOD);
|
|
||||||
root_processor->AddChild(rectify_processor);
|
|
||||||
|
|
||||||
rectify_processor->addTargetStreams(
|
rectify_processor->addTargetStreams(
|
||||||
{Stream::LEFT_RECTIFIED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
{Stream::LEFT_RECTIFIED, Mode::MODE_OFF, nullptr});
|
||||||
rectify_processor->addTargetStreams(
|
rectify_processor->addTargetStreams(
|
||||||
{Stream::RIGHT_RECTIFIED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
{Stream::RIGHT_RECTIFIED, Mode::MODE_OFF, nullptr});
|
||||||
disparity_processor->addTargetStreams(
|
disparity_processor->addTargetStreams(
|
||||||
{Stream::DISPARITY, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
{Stream::DISPARITY, Mode::MODE_OFF, nullptr});
|
||||||
disparitynormalized_processor->addTargetStreams(
|
disparitynormalized_processor->addTargetStreams(
|
||||||
{Stream::DISPARITY_NORMALIZED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
{Stream::DISPARITY_NORMALIZED, Mode::MODE_OFF, nullptr});
|
||||||
points_processor->addTargetStreams(
|
points_processor->addTargetStreams(
|
||||||
{Stream::POINTS, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
{Stream::POINTS, Mode::MODE_OFF, nullptr});
|
||||||
depth_processor->addTargetStreams(
|
depth_processor->addTargetStreams(
|
||||||
{Stream::DEPTH, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
{Stream::DEPTH, Mode::MODE_OFF, nullptr});
|
||||||
root_processor->addTargetStreams(
|
root_processor->addTargetStreams(
|
||||||
{Stream::LEFT, Mode::MODE_NATIVE, Mode::MODE_NATIVE, nullptr});
|
{Stream::LEFT, Mode::MODE_OFF, nullptr});
|
||||||
root_processor->addTargetStreams(
|
root_processor->addTargetStreams(
|
||||||
{Stream::RIGHT, Mode::MODE_NATIVE, Mode::MODE_NATIVE, nullptr});
|
{Stream::RIGHT, Mode::MODE_OFF, nullptr});
|
||||||
|
|
||||||
processors_.push_back(root_processor);
|
processors_.push_back(root_processor);
|
||||||
processors_.push_back(rectify_processor);
|
processors_.push_back(rectify_processor);
|
||||||
|
@ -537,6 +388,8 @@ void Synthetic::InitProcessors() {
|
||||||
processors_.push_back(points_processor);
|
processors_.push_back(points_processor);
|
||||||
processors_.push_back(depth_processor);
|
processors_.push_back(depth_processor);
|
||||||
using namespace std::placeholders; // NOLINT
|
using namespace std::placeholders; // NOLINT
|
||||||
|
root_processor->SetProcessCallback(
|
||||||
|
std::bind(&Synthetic::OnDeviceProcess, this, _1, _2, _3));
|
||||||
rectify_processor->SetProcessCallback(
|
rectify_processor->SetProcessCallback(
|
||||||
std::bind(&Synthetic::OnRectifyProcess, this, _1, _2, _3));
|
std::bind(&Synthetic::OnRectifyProcess, this, _1, _2, _3));
|
||||||
disparity_processor->SetProcessCallback(
|
disparity_processor->SetProcessCallback(
|
||||||
|
@ -548,6 +401,8 @@ void Synthetic::InitProcessors() {
|
||||||
depth_processor->SetProcessCallback(
|
depth_processor->SetProcessCallback(
|
||||||
std::bind(&Synthetic::OnDepthProcess, this, _1, _2, _3));
|
std::bind(&Synthetic::OnDepthProcess, this, _1, _2, _3));
|
||||||
|
|
||||||
|
root_processor->SetPostProcessCallback(
|
||||||
|
std::bind(&Synthetic::OnDevicePostProcess, this, _1));
|
||||||
rectify_processor->SetPostProcessCallback(
|
rectify_processor->SetPostProcessCallback(
|
||||||
std::bind(&Synthetic::OnRectifyPostProcess, this, _1));
|
std::bind(&Synthetic::OnRectifyPostProcess, this, _1));
|
||||||
disparity_processor->SetPostProcessCallback(
|
disparity_processor->SetPostProcessCallback(
|
||||||
|
@ -559,129 +414,14 @@ void Synthetic::InitProcessors() {
|
||||||
depth_processor->SetPostProcessCallback(
|
depth_processor->SetPostProcessCallback(
|
||||||
std::bind(&Synthetic::OnDepthPostProcess, this, _1));
|
std::bind(&Synthetic::OnDepthPostProcess, this, _1));
|
||||||
|
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
processor_ = root_processor;
|
||||||
// PINHOLE
|
|
||||||
rectify_processor->AddChild(disparity_processor);
|
|
||||||
disparity_processor->AddChild(disparitynormalized_processor);
|
|
||||||
disparity_processor->AddChild(points_processor);
|
|
||||||
points_processor->AddChild(depth_processor);
|
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
|
||||||
// KANNALA_BRANDT
|
|
||||||
rectify_processor->AddChild(disparity_processor);
|
|
||||||
disparity_processor->AddChild(disparitynormalized_processor);
|
|
||||||
disparity_processor->AddChild(depth_processor);
|
|
||||||
depth_processor->AddChild(points_processor);
|
|
||||||
} else {
|
|
||||||
// UNKNOW
|
|
||||||
LOG(ERROR) << "Unknow calib model type in device: "
|
|
||||||
<< calib_model_;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
processor_ = rectify_processor;
|
bool Synthetic::OnDeviceProcess(
|
||||||
}
|
Object *const in, Object *const out,
|
||||||
|
std::shared_ptr<Processor> const parent) {
|
||||||
void Synthetic::ProcessNativeStream(
|
MYNTEYE_UNUSED(parent)
|
||||||
const Stream &stream, const api::StreamData &data) {
|
return GetStreamEnabledMode(Stream::LEFT) != MODE_ON;
|
||||||
if (stream == Stream::LEFT || stream == Stream::RIGHT) {
|
|
||||||
static api::StreamData left_data, right_data;
|
|
||||||
if (stream == Stream::LEFT) {
|
|
||||||
left_data = data;
|
|
||||||
} else if (stream == Stream::RIGHT) {
|
|
||||||
right_data = data;
|
|
||||||
}
|
|
||||||
if (left_data.img && right_data.img &&
|
|
||||||
left_data.img->frame_id == right_data.img->frame_id) {
|
|
||||||
std::shared_ptr<Processor> processor = nullptr;
|
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
|
||||||
processor = find_processor<RectifyProcessorOCV>(processor_);
|
|
||||||
#ifdef WITH_CAM_MODELS
|
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
|
||||||
processor = find_processor<RectifyProcessor>(processor_);
|
|
||||||
#endif
|
|
||||||
} else {
|
|
||||||
LOG(ERROR) << "Unknow calib model type in device: "
|
|
||||||
<< calib_model_ << ", use default pinhole model";
|
|
||||||
processor = find_processor<RectifyProcessorOCV>(processor_);
|
|
||||||
}
|
|
||||||
processor->Process(ObjMat2{
|
|
||||||
left_data.frame, left_data.frame_id, left_data.img,
|
|
||||||
right_data.frame, right_data.frame_id, right_data.img});
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stream == Stream::LEFT_RECTIFIED || stream == Stream::RIGHT_RECTIFIED) {
|
|
||||||
static api::StreamData left_rect_data, right_rect_data;
|
|
||||||
if (stream == Stream::LEFT_RECTIFIED) {
|
|
||||||
left_rect_data = data;
|
|
||||||
} else if (stream == Stream::RIGHT_RECTIFIED) {
|
|
||||||
right_rect_data = data;
|
|
||||||
}
|
|
||||||
if (left_rect_data.img && right_rect_data.img &&
|
|
||||||
left_rect_data.img->frame_id == right_rect_data.img->frame_id) {
|
|
||||||
std::string name = RectifyProcessorOCV::NAME;
|
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
|
||||||
name = RectifyProcessorOCV::NAME;
|
|
||||||
#ifdef WITH_CAM_MODELS
|
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
|
||||||
name = RectifyProcessor::NAME;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
process_childs(
|
|
||||||
processor_, name, ObjMat2{
|
|
||||||
left_rect_data.frame, left_rect_data.frame_id, left_rect_data.img,
|
|
||||||
right_rect_data.frame, right_rect_data.frame_id,
|
|
||||||
right_rect_data.img});
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (stream) {
|
|
||||||
case Stream::DISPARITY: {
|
|
||||||
process_childs(processor_, DisparityProcessor::NAME,
|
|
||||||
ObjMat{data.frame, data.frame_id, data.img});
|
|
||||||
} break;
|
|
||||||
case Stream::DISPARITY_NORMALIZED: {
|
|
||||||
process_childs(processor_, DisparityNormalizedProcessor::NAME,
|
|
||||||
ObjMat{data.frame, data.frame_id, data.img});
|
|
||||||
} break;
|
|
||||||
case Stream::POINTS: {
|
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
|
||||||
// PINHOLE
|
|
||||||
process_childs(processor_, PointsProcessorOCV::NAME,
|
|
||||||
ObjMat{data.frame, data.frame_id, data.img});
|
|
||||||
#ifdef WITH_CAM_MODELS
|
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
|
||||||
// KANNALA_BRANDT
|
|
||||||
process_childs(processor_, PointsProcessor::NAME,
|
|
||||||
ObjMat{data.frame, data.frame_id, data.img});
|
|
||||||
#endif
|
|
||||||
} else {
|
|
||||||
// UNKNOW
|
|
||||||
LOG(ERROR) << "Unknow calib model type in device: "
|
|
||||||
<< calib_model_;
|
|
||||||
}
|
|
||||||
} break;
|
|
||||||
case Stream::DEPTH: {
|
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
|
||||||
// PINHOLE
|
|
||||||
process_childs(processor_, DepthProcessorOCV::NAME,
|
|
||||||
ObjMat{data.frame, data.frame_id, data.img});
|
|
||||||
#ifdef WITH_CAM_MODELS
|
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
|
||||||
// KANNALA_BRANDT
|
|
||||||
process_childs(processor_, DepthProcessor::NAME,
|
|
||||||
ObjMat{data.frame, data.frame_id, data.img});
|
|
||||||
#endif
|
|
||||||
} else {
|
|
||||||
// UNKNOW
|
|
||||||
LOG(ERROR) << "Unknow calib model type in device: "
|
|
||||||
<< calib_model_;
|
|
||||||
}
|
|
||||||
} break;
|
|
||||||
default:
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Synthetic::OnRectifyProcess(
|
bool Synthetic::OnRectifyProcess(
|
||||||
|
@ -691,8 +431,8 @@ bool Synthetic::OnRectifyProcess(
|
||||||
if (plugin_ && plugin_->OnRectifyProcess(in, out)) {
|
if (plugin_ && plugin_->OnRectifyProcess(in, out)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_SYNTHETIC;
|
return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_ON;
|
||||||
// && GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_SYNTHETIC
|
// && GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_ON
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Synthetic::OnDisparityProcess(
|
bool Synthetic::OnDisparityProcess(
|
||||||
|
@ -702,7 +442,7 @@ bool Synthetic::OnDisparityProcess(
|
||||||
if (plugin_ && plugin_->OnDisparityProcess(in, out)) {
|
if (plugin_ && plugin_->OnDisparityProcess(in, out)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return GetStreamEnabledMode(Stream::DISPARITY) != MODE_SYNTHETIC;
|
return GetStreamEnabledMode(Stream::DISPARITY) != MODE_ON;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Synthetic::OnDisparityNormalizedProcess(
|
bool Synthetic::OnDisparityNormalizedProcess(
|
||||||
|
@ -712,7 +452,7 @@ bool Synthetic::OnDisparityNormalizedProcess(
|
||||||
if (plugin_ && plugin_->OnDisparityNormalizedProcess(in, out)) {
|
if (plugin_ && plugin_->OnDisparityNormalizedProcess(in, out)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return GetStreamEnabledMode(Stream::DISPARITY_NORMALIZED) != MODE_SYNTHETIC;
|
return GetStreamEnabledMode(Stream::DISPARITY_NORMALIZED) != MODE_ON;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Synthetic::OnPointsProcess(
|
bool Synthetic::OnPointsProcess(
|
||||||
|
@ -722,7 +462,7 @@ bool Synthetic::OnPointsProcess(
|
||||||
if (plugin_ && plugin_->OnPointsProcess(in, out)) {
|
if (plugin_ && plugin_->OnPointsProcess(in, out)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return GetStreamEnabledMode(Stream::POINTS) != MODE_SYNTHETIC;
|
return GetStreamEnabledMode(Stream::POINTS) != MODE_ON;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Synthetic::OnDepthProcess(
|
bool Synthetic::OnDepthProcess(
|
||||||
|
@ -732,56 +472,71 @@ bool Synthetic::OnDepthProcess(
|
||||||
if (plugin_ && plugin_->OnDepthProcess(in, out)) {
|
if (plugin_ && plugin_->OnDepthProcess(in, out)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return GetStreamEnabledMode(Stream::DEPTH) != MODE_SYNTHETIC;
|
return GetStreamEnabledMode(Stream::DEPTH) != MODE_ON;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Synthetic::OnDevicePostProcess(Object *const out) {
|
||||||
|
const ObjMat2 *output = Object::Cast<ObjMat2>(out);
|
||||||
|
NotifyStreamData(Stream::LEFT, obj_data_first(output));
|
||||||
|
NotifyStreamData(Stream::RIGHT, obj_data_second(output));
|
||||||
|
if (HasStreamCallback(Stream::LEFT)) {
|
||||||
|
auto data = getControlDateWithStream(Stream::LEFT);
|
||||||
|
data.stream_callback(obj_data_first(output));
|
||||||
|
}
|
||||||
|
if (HasStreamCallback(Stream::RIGHT)) {
|
||||||
|
auto data = getControlDateWithStream(Stream::RIGHT);
|
||||||
|
if (data.stream_callback)
|
||||||
|
data.stream_callback(obj_data_second(output));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::OnRectifyPostProcess(Object *const out) {
|
void Synthetic::OnRectifyPostProcess(Object *const out) {
|
||||||
const ObjMat2 *output = Object::Cast<ObjMat2>(out);
|
const ObjMat2 *output = Object::Cast<ObjMat2>(out);
|
||||||
|
NotifyStreamData(Stream::LEFT_RECTIFIED, obj_data_first(output));
|
||||||
|
NotifyStreamData(Stream::RIGHT_RECTIFIED, obj_data_second(output));
|
||||||
if (HasStreamCallback(Stream::LEFT_RECTIFIED)) {
|
if (HasStreamCallback(Stream::LEFT_RECTIFIED)) {
|
||||||
auto data = getControlDateWithStream(Stream::LEFT_RECTIFIED);
|
auto data = getControlDateWithStream(Stream::LEFT_RECTIFIED);
|
||||||
data.stream_callback(
|
data.stream_callback(obj_data_first(output));
|
||||||
{output->first_data, output->first, nullptr, output->first_id});
|
|
||||||
}
|
}
|
||||||
if (HasStreamCallback(Stream::RIGHT_RECTIFIED)) {
|
if (HasStreamCallback(Stream::RIGHT_RECTIFIED)) {
|
||||||
auto data = getControlDateWithStream(Stream::RIGHT_RECTIFIED);
|
auto data = getControlDateWithStream(Stream::RIGHT_RECTIFIED);
|
||||||
data.stream_callback(
|
data.stream_callback(obj_data_second(output));
|
||||||
{output->second_data, output->second, nullptr, output->second_id});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::OnDisparityPostProcess(Object *const out) {
|
void Synthetic::OnDisparityPostProcess(Object *const out) {
|
||||||
const ObjMat *output = Object::Cast<ObjMat>(out);
|
const ObjMat *output = Object::Cast<ObjMat>(out);
|
||||||
|
NotifyStreamData(Stream::DISPARITY, obj_data(output));
|
||||||
if (HasStreamCallback(Stream::DISPARITY)) {
|
if (HasStreamCallback(Stream::DISPARITY)) {
|
||||||
auto data = getControlDateWithStream(Stream::DISPARITY);
|
auto data = getControlDateWithStream(Stream::DISPARITY);
|
||||||
data.stream_callback(
|
data.stream_callback(obj_data(output));
|
||||||
{output->data, output->value, nullptr, output->id});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::OnDisparityNormalizedPostProcess(Object *const out) {
|
void Synthetic::OnDisparityNormalizedPostProcess(Object *const out) {
|
||||||
const ObjMat *output = Object::Cast<ObjMat>(out);
|
const ObjMat *output = Object::Cast<ObjMat>(out);
|
||||||
|
NotifyStreamData(Stream::DISPARITY_NORMALIZED, obj_data(output));
|
||||||
if (HasStreamCallback(Stream::DISPARITY_NORMALIZED)) {
|
if (HasStreamCallback(Stream::DISPARITY_NORMALIZED)) {
|
||||||
auto data = getControlDateWithStream(Stream::DISPARITY_NORMALIZED);
|
auto data = getControlDateWithStream(Stream::DISPARITY_NORMALIZED);
|
||||||
data.stream_callback(
|
data.stream_callback(obj_data(output));
|
||||||
{output->data, output->value, nullptr, output->id});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::OnPointsPostProcess(Object *const out) {
|
void Synthetic::OnPointsPostProcess(Object *const out) {
|
||||||
const ObjMat *output = Object::Cast<ObjMat>(out);
|
const ObjMat *output = Object::Cast<ObjMat>(out);
|
||||||
|
NotifyStreamData(Stream::POINTS, obj_data(output));
|
||||||
if (HasStreamCallback(Stream::POINTS)) {
|
if (HasStreamCallback(Stream::POINTS)) {
|
||||||
auto data = getControlDateWithStream(Stream::POINTS);
|
auto data = getControlDateWithStream(Stream::POINTS);
|
||||||
data.stream_callback(
|
data.stream_callback(obj_data(output));
|
||||||
{output->data, output->value, nullptr, output->id});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::OnDepthPostProcess(Object *const out) {
|
void Synthetic::OnDepthPostProcess(Object *const out) {
|
||||||
const ObjMat *output = Object::Cast<ObjMat>(out);
|
const ObjMat *output = Object::Cast<ObjMat>(out);
|
||||||
|
NotifyStreamData(Stream::DEPTH, obj_data(output));
|
||||||
if (HasStreamCallback(Stream::DEPTH)) {
|
if (HasStreamCallback(Stream::DEPTH)) {
|
||||||
auto data = getControlDateWithStream(Stream::DEPTH);
|
auto data = getControlDateWithStream(Stream::DEPTH);
|
||||||
data.stream_callback(
|
data.stream_callback(obj_data(output));
|
||||||
{output->data, output->value, nullptr, output->id});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -796,4 +551,11 @@ void Synthetic::SetDisparityComputingMethodType(
|
||||||
LOG(ERROR) << "ERROR: no suited processor for disparity computing.";
|
LOG(ERROR) << "ERROR: no suited processor for disparity computing.";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Synthetic::NotifyStreamData(
|
||||||
|
const Stream &stream, const api::StreamData &data) {
|
||||||
|
if (stream_data_listener_) {
|
||||||
|
stream_data_listener_(stream, data);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
MYNTEYE_END_NAMESPACE
|
MYNTEYE_END_NAMESPACE
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
#include <mutex>
|
||||||
|
|
||||||
#include "mynteye/api/api.h"
|
#include "mynteye/api/api.h"
|
||||||
#include "mynteye/api/config.h"
|
#include "mynteye/api/config.h"
|
||||||
|
@ -28,22 +29,24 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||||
class API;
|
class API;
|
||||||
class Plugin;
|
class Plugin;
|
||||||
class Processor;
|
class Processor;
|
||||||
|
class RootProcessor;
|
||||||
|
|
||||||
struct Object;
|
struct Object;
|
||||||
|
|
||||||
class Synthetic {
|
class Synthetic {
|
||||||
public:
|
public:
|
||||||
using stream_callback_t = API::stream_callback_t;
|
using stream_callback_t = API::stream_callback_t;
|
||||||
|
using stream_data_listener_t =
|
||||||
|
std::function<void(const Stream &stream, const api::StreamData &data)>;
|
||||||
|
using stream_switch_callback_t = API::stream_switch_callback_t;
|
||||||
|
|
||||||
typedef enum Mode {
|
typedef enum Mode {
|
||||||
MODE_NATIVE, // Native stream
|
MODE_ON, // On
|
||||||
MODE_SYNTHETIC, // Synthetic stream
|
MODE_OFF // Off
|
||||||
MODE_LAST // Unsupported
|
|
||||||
} mode_t;
|
} mode_t;
|
||||||
|
|
||||||
struct stream_control_t {
|
struct stream_control_t {
|
||||||
Stream stream;
|
Stream stream;
|
||||||
mode_t support_mode_;
|
|
||||||
mode_t enabled_mode_;
|
mode_t enabled_mode_;
|
||||||
stream_callback_t stream_callback;
|
stream_callback_t stream_callback;
|
||||||
};
|
};
|
||||||
|
@ -51,13 +54,19 @@ class Synthetic {
|
||||||
explicit Synthetic(API *api, CalibrationModel calib_model);
|
explicit Synthetic(API *api, CalibrationModel calib_model);
|
||||||
~Synthetic();
|
~Synthetic();
|
||||||
|
|
||||||
|
void SetStreamDataListener(stream_data_listener_t listener);
|
||||||
|
|
||||||
void NotifyImageParamsChanged();
|
void NotifyImageParamsChanged();
|
||||||
|
|
||||||
bool Supports(const Stream &stream) const;
|
bool Supports(const Stream &stream) const;
|
||||||
mode_t SupportsMode(const Stream &stream) const;
|
|
||||||
|
|
||||||
void EnableStreamData(const Stream &stream);
|
void EnableStreamData(const Stream &stream);
|
||||||
void DisableStreamData(const Stream &stream);
|
void DisableStreamData(const Stream &stream);
|
||||||
|
|
||||||
|
void EnableStreamData(
|
||||||
|
const Stream &stream, stream_switch_callback_t callback, bool try_tag);
|
||||||
|
void DisableStreamData(
|
||||||
|
const Stream &stream, stream_switch_callback_t callback, bool try_tag);
|
||||||
bool IsStreamDataEnabled(const Stream &stream) const;
|
bool IsStreamDataEnabled(const Stream &stream) const;
|
||||||
|
|
||||||
void SetStreamCallback(const Stream &stream, stream_callback_t callback);
|
void SetStreamCallback(const Stream &stream, stream_callback_t callback);
|
||||||
|
@ -85,11 +94,8 @@ class Synthetic {
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void InitCalibInfo();
|
void InitCalibInfo();
|
||||||
void InitStreamSupports();
|
|
||||||
|
|
||||||
mode_t GetStreamEnabledMode(const Stream &stream) const;
|
mode_t GetStreamEnabledMode(const Stream &stream) const;
|
||||||
bool IsStreamEnabledNative(const Stream &stream) const;
|
|
||||||
bool IsStreamEnabledSynthetic(const Stream &stream) const;
|
|
||||||
|
|
||||||
void EnableStreamData(const Stream &stream, std::uint32_t depth);
|
void EnableStreamData(const Stream &stream, std::uint32_t depth);
|
||||||
void DisableStreamData(const Stream &stream, std::uint32_t depth);
|
void DisableStreamData(const Stream &stream, std::uint32_t depth);
|
||||||
|
@ -101,8 +107,9 @@ class Synthetic {
|
||||||
template <class T>
|
template <class T>
|
||||||
bool DeactivateProcessor(bool tree = false);
|
bool DeactivateProcessor(bool tree = false);
|
||||||
|
|
||||||
void ProcessNativeStream(const Stream &stream, const api::StreamData &data);
|
bool OnDeviceProcess(
|
||||||
|
Object *const in, Object *const out,
|
||||||
|
std::shared_ptr<Processor> const parent);
|
||||||
bool OnRectifyProcess(
|
bool OnRectifyProcess(
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
std::shared_ptr<Processor> const parent);
|
std::shared_ptr<Processor> const parent);
|
||||||
|
@ -119,16 +126,19 @@ class Synthetic {
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
std::shared_ptr<Processor> const parent);
|
std::shared_ptr<Processor> const parent);
|
||||||
|
|
||||||
|
void OnDevicePostProcess(Object *const out);
|
||||||
void OnRectifyPostProcess(Object *const out);
|
void OnRectifyPostProcess(Object *const out);
|
||||||
void OnDisparityPostProcess(Object *const out);
|
void OnDisparityPostProcess(Object *const out);
|
||||||
void OnDisparityNormalizedPostProcess(Object *const out);
|
void OnDisparityNormalizedPostProcess(Object *const out);
|
||||||
void OnPointsPostProcess(Object *const out);
|
void OnPointsPostProcess(Object *const out);
|
||||||
void OnDepthPostProcess(Object *const out);
|
void OnDepthPostProcess(Object *const out);
|
||||||
|
|
||||||
|
void NotifyStreamData(const Stream &stream, const api::StreamData &data);
|
||||||
|
|
||||||
API *api_;
|
API *api_;
|
||||||
|
|
||||||
std::shared_ptr<Processor> processor_;
|
std::shared_ptr<RootProcessor> processor_;
|
||||||
|
std::vector<std::shared_ptr<Processor>> processors_;
|
||||||
std::shared_ptr<Plugin> plugin_;
|
std::shared_ptr<Plugin> plugin_;
|
||||||
|
|
||||||
CalibrationModel calib_model_;
|
CalibrationModel calib_model_;
|
||||||
|
@ -138,11 +148,11 @@ class Synthetic {
|
||||||
std::shared_ptr<Extrinsics> extr_;
|
std::shared_ptr<Extrinsics> extr_;
|
||||||
bool calib_default_tag_;
|
bool calib_default_tag_;
|
||||||
|
|
||||||
std::vector<std::shared_ptr<Processor>> processors_;
|
stream_data_listener_t stream_data_listener_;
|
||||||
};
|
};
|
||||||
|
|
||||||
class SyntheticProcessorPart {
|
class SyntheticProcessorPart {
|
||||||
private:
|
protected:
|
||||||
inline std::vector<Synthetic::stream_control_t> getTargetStreams() {
|
inline std::vector<Synthetic::stream_control_t> getTargetStreams() {
|
||||||
return target_streams_;
|
return target_streams_;
|
||||||
}
|
}
|
||||||
|
|
136
src/mynteye/api/version_checker.cc
Normal file
136
src/mynteye/api/version_checker.cc
Normal file
|
@ -0,0 +1,136 @@
|
||||||
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#include "mynteye/api/version_checker.h"
|
||||||
|
#include "mynteye/device/utils.h"
|
||||||
|
#include "mynteye/logger.h"
|
||||||
|
#include "mynteye/types.h"
|
||||||
|
|
||||||
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
const std::string device_type;
|
||||||
|
const std::string sdk_version;
|
||||||
|
const std::string firmware_version;
|
||||||
|
const std::string status;
|
||||||
|
}firmware_version_match_table_unit;
|
||||||
|
|
||||||
|
const char* ERRO_DESCRIPTION_F =
|
||||||
|
"Please update the firmware at first";
|
||||||
|
const char* ERRO_DESCRIPTION_S =
|
||||||
|
"Please update the SDK at first";
|
||||||
|
const char* WARN_DESCRIPTION_F =
|
||||||
|
"We suggest that you should update the firmware";
|
||||||
|
const char* WARN_DESCRIPTION_S =
|
||||||
|
"We suggest that you should update the SDK";
|
||||||
|
const char* PASS_DESCRIPTION = "pass";
|
||||||
|
|
||||||
|
/** firmware/sdk version matched table */
|
||||||
|
/**----device type-----sdk version---firmware version-----pass tag-----*/
|
||||||
|
static const firmware_version_match_table_unit FSVM_TABLE[] ={
|
||||||
|
/** S1030 */
|
||||||
|
{"MYNT-EYE-S1030", ">2.3.0", ">2.2.0", PASS_DESCRIPTION},
|
||||||
|
{"MYNT-EYE-S1030", ">2.3.0", "2.2.0", WARN_DESCRIPTION_F},
|
||||||
|
{"MYNT-EYE-S1030", ">2.3.0", "<2.2.0", ERRO_DESCRIPTION_F},
|
||||||
|
{"MYNT-EYE-S1030", "<2.3.1", "<2.2.0", WARN_DESCRIPTION_S},
|
||||||
|
/** S2100 */
|
||||||
|
{"MYNT-EYE-S2100", ">2.3.0", "1.0", PASS_DESCRIPTION},
|
||||||
|
{"MYNT-EYE-S2100", "<2.3.1", "1.0", ERRO_DESCRIPTION_S},
|
||||||
|
/** S210A */
|
||||||
|
{"MYNT-EYE-S210A", ">2.3.0", "1.0", PASS_DESCRIPTION},
|
||||||
|
{"MYNT-EYE-S210A", "<2.3.1", "1.0", ERRO_DESCRIPTION_S},
|
||||||
|
};
|
||||||
|
|
||||||
|
void getVersion(const std::string &str, char *version) {
|
||||||
|
std::string st1("");
|
||||||
|
int j = 0;
|
||||||
|
for (size_t i = 0; i < str.size(); i++) {
|
||||||
|
if (str[i] == '.') {
|
||||||
|
version[j++] = atoi(st1.c_str());
|
||||||
|
st1 = "";
|
||||||
|
} else {
|
||||||
|
st1 += str[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
version[j++] = atoi(st1.c_str());
|
||||||
|
}
|
||||||
|
|
||||||
|
bool conditionMatch(const std::string& condition, const std::string& target) {
|
||||||
|
char version[4] = {0};
|
||||||
|
char version_c[4] = {0};
|
||||||
|
getVersion(target, version);
|
||||||
|
int tag_c = 0;
|
||||||
|
std::string condition_c;
|
||||||
|
if (condition[0] == '>') {
|
||||||
|
tag_c = 1;
|
||||||
|
condition_c = condition.substr(1);
|
||||||
|
} else if (condition[0] == '<') {
|
||||||
|
tag_c = -1;
|
||||||
|
condition_c = condition.substr(1);
|
||||||
|
} else {
|
||||||
|
tag_c = 0;
|
||||||
|
condition_c = condition;
|
||||||
|
}
|
||||||
|
getVersion(condition_c, version_c);
|
||||||
|
int tag_big = memcmp(version, version_c, 4);
|
||||||
|
if (tag_big * tag_c > 0 || (tag_big == 0 && tag_c == 0)) return true;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
enum STATUS_UNIT {
|
||||||
|
ST_PASS,
|
||||||
|
ST_ERRO_F,
|
||||||
|
ST_ERRO_S,
|
||||||
|
ST_NOT_PASS
|
||||||
|
};
|
||||||
|
|
||||||
|
STATUS_UNIT checkUnit(const std::string& sdkv,
|
||||||
|
const std::string& devn,
|
||||||
|
const std::string& firmv,
|
||||||
|
const firmware_version_match_table_unit& condition) {
|
||||||
|
if (condition.device_type == devn &&
|
||||||
|
conditionMatch(condition.sdk_version, sdkv) &&
|
||||||
|
conditionMatch(condition.firmware_version, firmv)) {
|
||||||
|
if (condition.status == ERRO_DESCRIPTION_F) return ST_ERRO_F;
|
||||||
|
if (condition.status == ERRO_DESCRIPTION_S) return ST_ERRO_S;
|
||||||
|
if (condition.status == WARN_DESCRIPTION_F ||
|
||||||
|
condition.status == WARN_DESCRIPTION_S) {
|
||||||
|
LOG(WARNING) << condition.status;
|
||||||
|
}
|
||||||
|
return ST_PASS;
|
||||||
|
}
|
||||||
|
return ST_NOT_PASS;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool checkFirmwareVersion(const std::shared_ptr<API> api) {
|
||||||
|
auto sdkv = api->GetSDKVersion();
|
||||||
|
auto devn = api->GetInfo(Info::DEVICE_NAME);
|
||||||
|
auto firmv = api->GetInfo(Info::FIRMWARE_VERSION);
|
||||||
|
|
||||||
|
for (size_t i =0;
|
||||||
|
i < sizeof(FSVM_TABLE)/sizeof(firmware_version_match_table_unit);
|
||||||
|
i++) {
|
||||||
|
auto res = checkUnit(sdkv, devn, firmv, FSVM_TABLE[i]);
|
||||||
|
if (res == ST_PASS) {
|
||||||
|
return true;
|
||||||
|
} else if (res == ST_ERRO_S || res == ST_ERRO_F) {
|
||||||
|
LOG(ERROR) << FSVM_TABLE[i].status;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
LOG(ERROR) << ERRO_DESCRIPTION_S;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
MYNTEYE_END_NAMESPACE
|
||||||
|
|
25
src/mynteye/api/version_checker.h
Normal file
25
src/mynteye/api/version_checker.h
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#ifndef MYNTEYE_API_VERSION_CHECKER_H_
|
||||||
|
#define MYNTEYE_API_VERSION_CHECKER_H_
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include <string>
|
||||||
|
#include "mynteye/api/api.h"
|
||||||
|
|
||||||
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
bool checkFirmwareVersion(const std::shared_ptr<API> api);
|
||||||
|
MYNTEYE_END_NAMESPACE
|
||||||
|
|
||||||
|
#endif // MYNTEYE_API_VERSION_CHECKER_H_
|
|
@ -460,6 +460,7 @@ bool Channels::GetFiles(
|
||||||
while (i < end) {
|
while (i < end) {
|
||||||
std::uint8_t file_id = *(data + i);
|
std::uint8_t file_id = *(data + i);
|
||||||
std::uint16_t file_size = bytes::_from_data<std::uint16_t>(data + i + 1);
|
std::uint16_t file_size = bytes::_from_data<std::uint16_t>(data + i + 1);
|
||||||
|
LOG(INFO) << "GetFiles:data_size : " << file_size;
|
||||||
VLOG(2) << "GetFiles id: " << static_cast<int>(file_id)
|
VLOG(2) << "GetFiles id: " << static_cast<int>(file_id)
|
||||||
<< ", size: " << file_size;
|
<< ", size: " << file_size;
|
||||||
i += 3;
|
i += 3;
|
||||||
|
|
|
@ -32,6 +32,7 @@ std::size_t FileChannel::GetDeviceInfoFromData(
|
||||||
const std::uint8_t *data, const std::uint16_t &data_size,
|
const std::uint8_t *data, const std::uint16_t &data_size,
|
||||||
device_info_t *info) {
|
device_info_t *info) {
|
||||||
auto n = dev_info_parser_->GetFromData(data, data_size, info);
|
auto n = dev_info_parser_->GetFromData(data, data_size, info);
|
||||||
|
LOG(INFO) << "GetDeviceInfoFromData:data_size : " << data_size;
|
||||||
auto spec_version = info->spec_version;
|
auto spec_version = info->spec_version;
|
||||||
img_params_parser_->SetSpecVersion(spec_version);
|
img_params_parser_->SetSpecVersion(spec_version);
|
||||||
imu_params_parser_->SetSpecVersion(spec_version);
|
imu_params_parser_->SetSpecVersion(spec_version);
|
||||||
|
@ -113,6 +114,22 @@ std::size_t DeviceInfoParser::GetFromData(
|
||||||
info->nominal_baseline = bytes::_from_data<std::uint16_t>(data + i);
|
info->nominal_baseline = bytes::_from_data<std::uint16_t>(data + i);
|
||||||
i += 2;
|
i += 2;
|
||||||
|
|
||||||
|
if (info->spec_version >= Version(1, 2)) {
|
||||||
|
// auxiliary_chip_version, 2
|
||||||
|
info->auxiliary_chip_version.set_major(data[i]);
|
||||||
|
info->auxiliary_chip_version.set_minor(data[i + 1]);
|
||||||
|
i += 2;
|
||||||
|
// isp_version, 2
|
||||||
|
info->isp_version.set_major(data[i]);
|
||||||
|
info->isp_version.set_minor(data[i + 1]);
|
||||||
|
i += 2;
|
||||||
|
} else {
|
||||||
|
info->auxiliary_chip_version.set_major(0);
|
||||||
|
info->auxiliary_chip_version.set_minor(0);
|
||||||
|
info->isp_version.set_major(0);
|
||||||
|
info->isp_version.set_minor(0);
|
||||||
|
}
|
||||||
|
|
||||||
// get other infos according to spec_version
|
// get other infos according to spec_version
|
||||||
|
|
||||||
MYNTEYE_UNUSED(data_size)
|
MYNTEYE_UNUSED(data_size)
|
||||||
|
@ -155,6 +172,17 @@ std::size_t DeviceInfoParser::SetToData(
|
||||||
bytes::_to_data(info->nominal_baseline, data + i);
|
bytes::_to_data(info->nominal_baseline, data + i);
|
||||||
i += 2;
|
i += 2;
|
||||||
|
|
||||||
|
if (info->spec_version >= Version(1, 2)) {
|
||||||
|
// auxiliary_chip_version, 2
|
||||||
|
data[i] = info->auxiliary_chip_version.major();
|
||||||
|
data[i + 1] = info->auxiliary_chip_version.minor();
|
||||||
|
i += 2;
|
||||||
|
// isp_version, 2
|
||||||
|
data[i] = info->isp_version.major();
|
||||||
|
data[i + 1] = info->isp_version.minor();
|
||||||
|
i += 2;
|
||||||
|
}
|
||||||
|
|
||||||
// set other infos according to spec_version
|
// set other infos according to spec_version
|
||||||
|
|
||||||
// others
|
// others
|
||||||
|
@ -181,7 +209,7 @@ std::size_t ImgParamsParser::GetFromData(
|
||||||
return GetFromData_v1_0(data, data_size, img_params);
|
return GetFromData_v1_0(data, data_size, img_params);
|
||||||
}
|
}
|
||||||
// s210a old params
|
// s210a old params
|
||||||
if (spec_version_ == Version(1, 1) && data_size == 404) {
|
if (spec_version_ >= Version(1, 1) && data_size == 404) {
|
||||||
return GetFromData_v1_1(data, data_size, img_params);
|
return GetFromData_v1_1(data, data_size, img_params);
|
||||||
}
|
}
|
||||||
// get img params with new version format
|
// get img params with new version format
|
||||||
|
@ -406,7 +434,7 @@ std::size_t ImuParamsParser::GetFromData(
|
||||||
return GetFromData_old(data, data_size, imu_params);
|
return GetFromData_old(data, data_size, imu_params);
|
||||||
}
|
}
|
||||||
// s210a old params
|
// s210a old params
|
||||||
if (spec_version_ == Version(1, 1) && data_size == 384) {
|
if (spec_version_ >= Version(1, 1) && data_size == 384) {
|
||||||
return GetFromData_old(data, data_size, imu_params);
|
return GetFromData_old(data, data_size, imu_params);
|
||||||
}
|
}
|
||||||
// get imu params with new version format
|
// get imu params with new version format
|
||||||
|
|
|
@ -234,6 +234,10 @@ std::string Device::GetInfo(const Info &info) const {
|
||||||
return device_info_->imu_type.to_string();
|
return device_info_->imu_type.to_string();
|
||||||
case Info::NOMINAL_BASELINE:
|
case Info::NOMINAL_BASELINE:
|
||||||
return std::to_string(device_info_->nominal_baseline);
|
return std::to_string(device_info_->nominal_baseline);
|
||||||
|
case Info::AUXILIARY_CHIP_VERSION:
|
||||||
|
return device_info_->auxiliary_chip_version.to_string();
|
||||||
|
case Info::ISP_VERSION:
|
||||||
|
return device_info_->isp_version.to_string();
|
||||||
default:
|
default:
|
||||||
LOG(WARNING) << "Unknown device info";
|
LOG(WARNING) << "Unknown device info";
|
||||||
return "";
|
return "";
|
||||||
|
@ -349,6 +353,9 @@ OptionInfo Device::GetOptionInfo(const Option &option) const {
|
||||||
|
|
||||||
std::int32_t Device::GetOptionValue(const Option &option) const {
|
std::int32_t Device::GetOptionValue(const Option &option) const {
|
||||||
if (!Supports(option)) {
|
if (!Supports(option)) {
|
||||||
|
if (option == Option::FRAME_RATE) {
|
||||||
|
return GetStreamRequest().fps;
|
||||||
|
}
|
||||||
LOG(WARNING) << "Unsupported option: " << option;
|
LOG(WARNING) << "Unsupported option: " << option;
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
@ -466,6 +473,11 @@ std::vector<device::StreamData> Device::GetStreamDatas(const Stream &stream) {
|
||||||
return streams_->GetStreamDatas(stream);
|
return streams_->GetStreamDatas(stream);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Device::DisableMotionDatas() {
|
||||||
|
CHECK_NOTNULL(motions_);
|
||||||
|
motions_->DisableMotionDatas();
|
||||||
|
}
|
||||||
|
|
||||||
void Device::EnableMotionDatas() {
|
void Device::EnableMotionDatas() {
|
||||||
EnableMotionDatas(std::numeric_limits<std::size_t>::max());
|
EnableMotionDatas(std::numeric_limits<std::size_t>::max());
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,7 +66,10 @@ void Motions::SetMotionCallback(motion_callback_t callback) {
|
||||||
|
|
||||||
std::lock_guard<std::mutex> _(mtx_datas_);
|
std::lock_guard<std::mutex> _(mtx_datas_);
|
||||||
motion_data_t data = {imu};
|
motion_data_t data = {imu};
|
||||||
if (motion_datas_enabled_) {
|
if (motion_datas_enabled_ && motion_datas_max_size_ > 0) {
|
||||||
|
if (motion_datas_.size() >= motion_datas_max_size_) {
|
||||||
|
motion_datas_.erase(motion_datas_.begin());
|
||||||
|
}
|
||||||
motion_datas_.push_back(data);
|
motion_datas_.push_back(data);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,13 +101,21 @@ void Motions::StopMotionTracking() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Motions::DisableMotionDatas() {
|
||||||
|
std::lock_guard<std::mutex> _(mtx_datas_);
|
||||||
|
motion_datas_enabled_ = false;
|
||||||
|
motion_datas_max_size_ = 0;
|
||||||
|
motion_datas_.clear();
|
||||||
|
}
|
||||||
|
|
||||||
void Motions::EnableMotionDatas(std::size_t max_size) {
|
void Motions::EnableMotionDatas(std::size_t max_size) {
|
||||||
if (max_size <= 0) {
|
if (max_size <= 0) {
|
||||||
LOG(WARNING) << "Could not enable motion datas with max_size <= 0";
|
LOG(WARNING) << "Could not enable motion datas with max_size <= 0";
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
std::lock_guard<std::mutex> _(mtx_datas_);
|
||||||
motion_datas_enabled_ = true;
|
motion_datas_enabled_ = true;
|
||||||
motion_datas_max_size = max_size;
|
motion_datas_max_size_ = max_size;
|
||||||
}
|
}
|
||||||
|
|
||||||
Motions::motion_datas_t Motions::GetMotionDatas() {
|
Motions::motion_datas_t Motions::GetMotionDatas() {
|
||||||
|
|
|
@ -42,6 +42,7 @@ class Motions {
|
||||||
void StartMotionTracking();
|
void StartMotionTracking();
|
||||||
void StopMotionTracking();
|
void StopMotionTracking();
|
||||||
|
|
||||||
|
void DisableMotionDatas();
|
||||||
void EnableMotionDatas(std::size_t max_size);
|
void EnableMotionDatas(std::size_t max_size);
|
||||||
motion_datas_t GetMotionDatas();
|
motion_datas_t GetMotionDatas();
|
||||||
|
|
||||||
|
@ -52,7 +53,7 @@ class Motions {
|
||||||
|
|
||||||
motion_datas_t motion_datas_;
|
motion_datas_t motion_datas_;
|
||||||
bool motion_datas_enabled_;
|
bool motion_datas_enabled_;
|
||||||
std::size_t motion_datas_max_size;
|
std::size_t motion_datas_max_size_;
|
||||||
|
|
||||||
bool is_imu_tracking;
|
bool is_imu_tracking;
|
||||||
|
|
||||||
|
|
|
@ -92,6 +92,8 @@ const char *to_string(const Info &value) {
|
||||||
CASE(LENS_TYPE)
|
CASE(LENS_TYPE)
|
||||||
CASE(IMU_TYPE)
|
CASE(IMU_TYPE)
|
||||||
CASE(NOMINAL_BASELINE)
|
CASE(NOMINAL_BASELINE)
|
||||||
|
CASE(AUXILIARY_CHIP_VERSION)
|
||||||
|
CASE(ISP_VERSION)
|
||||||
default:
|
default:
|
||||||
CHECK(is_valid(value));
|
CHECK(is_valid(value));
|
||||||
return "Info::UNKNOWN";
|
return "Info::UNKNOWN";
|
||||||
|
|
|
@ -53,6 +53,8 @@ TEST(Info, VerifyToString) {
|
||||||
EXPECT_STREQ("Info::LENS_TYPE", to_string(Info::LENS_TYPE));
|
EXPECT_STREQ("Info::LENS_TYPE", to_string(Info::LENS_TYPE));
|
||||||
EXPECT_STREQ("Info::IMU_TYPE", to_string(Info::IMU_TYPE));
|
EXPECT_STREQ("Info::IMU_TYPE", to_string(Info::IMU_TYPE));
|
||||||
EXPECT_STREQ("Info::NOMINAL_BASELINE", to_string(Info::NOMINAL_BASELINE));
|
EXPECT_STREQ("Info::NOMINAL_BASELINE", to_string(Info::NOMINAL_BASELINE));
|
||||||
|
EXPECT_STREQ("Info::AUXILIARY_CHIP_VERSION", to_string(Info::AUXILIARY_CHIP_VERSION));
|
||||||
|
EXPECT_STREQ("Info::ISP_VERSION", to_string(Info::ISP_VERSION));
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST(Option, VerifyToString) {
|
TEST(Option, VerifyToString) {
|
||||||
|
|
|
@ -2,9 +2,11 @@
|
||||||
---
|
---
|
||||||
device_name: MYNT-EYE-S210A
|
device_name: MYNT-EYE-S210A
|
||||||
serial_number: "07C40D1C0009071F"
|
serial_number: "07C40D1C0009071F"
|
||||||
firmware_version: "0.1"
|
firmware_version: "1.1"
|
||||||
hardware_version: "1.0"
|
hardware_version: "1.0"
|
||||||
spec_version: "1.1"
|
spec_version: "1.2"
|
||||||
lens_type: "0000"
|
lens_type: "0001"
|
||||||
imu_type: "0000"
|
imu_type: "0001"
|
||||||
nominal_baseline: 0
|
nominal_baseline: 0
|
||||||
|
auxiliary_chip_version: "1.0"
|
||||||
|
isp_version: "1.0"
|
||||||
|
|
|
@ -52,7 +52,11 @@ bool DeviceWriter::WriteDeviceInfo(const dev_info_t &info) {
|
||||||
<< ", spec_version: " << dev_info->spec_version.to_string()
|
<< ", spec_version: " << dev_info->spec_version.to_string()
|
||||||
<< ", lens_type: " << dev_info->lens_type.to_string()
|
<< ", lens_type: " << dev_info->lens_type.to_string()
|
||||||
<< ", imu_type: " << dev_info->imu_type.to_string()
|
<< ", imu_type: " << dev_info->imu_type.to_string()
|
||||||
<< ", nominal_baseline: " << dev_info->nominal_baseline << "}";
|
<< ", nominal_baseline: " << dev_info->nominal_baseline
|
||||||
|
<< ", auxiliary_chip_version: "
|
||||||
|
<< dev_info->auxiliary_chip_version.to_string()
|
||||||
|
<< ", isp_version: "
|
||||||
|
<< dev_info->isp_version.to_string()<< "}";
|
||||||
return true;
|
return true;
|
||||||
} else {
|
} else {
|
||||||
LOG(ERROR) << "Write device info failed";
|
LOG(ERROR) << "Write device info failed";
|
||||||
|
@ -215,6 +219,8 @@ bool DeviceWriter::SaveDeviceInfo(
|
||||||
fs << "lens_type" << info.lens_type.to_string();
|
fs << "lens_type" << info.lens_type.to_string();
|
||||||
fs << "imu_type" << info.imu_type.to_string();
|
fs << "imu_type" << info.imu_type.to_string();
|
||||||
fs << "nominal_baseline" << info.nominal_baseline;
|
fs << "nominal_baseline" << info.nominal_baseline;
|
||||||
|
fs << "auxiliary_chip_version" << info.auxiliary_chip_version.to_string();
|
||||||
|
fs << "isp_version" << info.isp_version.to_string();
|
||||||
// save other infos according to spec_version
|
// save other infos according to spec_version
|
||||||
fs.release();
|
fs.release();
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -247,6 +247,8 @@ BOOST_PYTHON_MODULE(mynteye_py) {
|
||||||
.value("LENS_TYPE", Info::LENS_TYPE)
|
.value("LENS_TYPE", Info::LENS_TYPE)
|
||||||
.value("IMU_TYPE", Info::IMU_TYPE)
|
.value("IMU_TYPE", Info::IMU_TYPE)
|
||||||
.value("NOMINAL_BASELINE", Info::NOMINAL_BASELINE)
|
.value("NOMINAL_BASELINE", Info::NOMINAL_BASELINE)
|
||||||
|
.value("AUXILIARY_CHIP_VERSION", Info::AUXILIARY_CHIP_VERSION)
|
||||||
|
.value("ISP_VERSION", Info::ISP_VERSION)
|
||||||
#ifdef ENUM_EXPORT_VALUES
|
#ifdef ENUM_EXPORT_VALUES
|
||||||
.export_values()
|
.export_values()
|
||||||
#endif
|
#endif
|
||||||
|
|
|
@ -153,7 +153,7 @@
|
||||||
<!-- <arg name="standard2/ir_control" default="0" /> -->
|
<!-- <arg name="standard2/ir_control" default="0" /> -->
|
||||||
|
|
||||||
<!-- standard2/accel_range range: [6,48] -->
|
<!-- standard2/accel_range range: [6,48] -->
|
||||||
<arg name="standard2/accel_range" default="-1" />
|
<arg name="standard2/accel_range" default="24" />
|
||||||
<!-- <arg name="standard2/accel_range" default="6" /> -->
|
<!-- <arg name="standard2/accel_range" default="6" /> -->
|
||||||
|
|
||||||
<!-- standard2/gyro_range range: [250,4000] -->
|
<!-- standard2/gyro_range range: [250,4000] -->
|
||||||
|
@ -347,6 +347,10 @@
|
||||||
- 'image_transport/compressedDepth'
|
- 'image_transport/compressedDepth'
|
||||||
</rosparam>
|
</rosparam>
|
||||||
</group>
|
</group>
|
||||||
|
<group ns="$(arg depth_topic)">
|
||||||
|
<rosparam param="disable_pub_plugins">
|
||||||
|
- 'image_transport/compressedDepth'
|
||||||
|
</rosparam>
|
||||||
|
</group>
|
||||||
</group> <!-- mynteye -->
|
</group> <!-- mynteye -->
|
||||||
</launch>
|
</launch>
|
||||||
|
|
|
@ -54,6 +54,8 @@ def main():
|
||||||
'LENS_TYPE': GetInfoRequest.LENS_TYPE,
|
'LENS_TYPE': GetInfoRequest.LENS_TYPE,
|
||||||
'IMU_TYPE': GetInfoRequest.IMU_TYPE,
|
'IMU_TYPE': GetInfoRequest.IMU_TYPE,
|
||||||
'NOMINAL_BASELINE': GetInfoRequest.NOMINAL_BASELINE,
|
'NOMINAL_BASELINE': GetInfoRequest.NOMINAL_BASELINE,
|
||||||
|
'AUXILIARY_CHIP_VERSION': GetInfoRequest.AUXILIARY_CHIP_VERSION,
|
||||||
|
'ISP_VERSION': GetInfoRequest.ISP_VERSION,
|
||||||
}
|
}
|
||||||
for k, v in get_device_info(**keys).items():
|
for k, v in get_device_info(**keys).items():
|
||||||
print('{}: {}'.format(k, v))
|
print('{}: {}'.format(k, v))
|
||||||
|
|
|
@ -315,7 +315,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
for (auto &&it = mono_topics.begin(); it != mono_topics.end(); ++it) {
|
for (auto &&it = mono_topics.begin(); it != mono_topics.end(); ++it) {
|
||||||
auto &&topic = mono_topics[it->first];
|
auto &&topic = mono_topics[it->first];
|
||||||
if (it->first == Stream::LEFT || it->first == Stream::RIGHT) {
|
if (it->first == Stream::LEFT || it->first == Stream::RIGHT) {
|
||||||
mono_publishers_[it->first] = it_mynteye.advertiseCamera(topic, 1);
|
mono_publishers_[it->first] = it_mynteye.advertise(topic, 1);
|
||||||
}
|
}
|
||||||
NODELET_INFO_STREAM("Advertized on topic " << topic);
|
NODELET_INFO_STREAM("Advertized on topic " << topic);
|
||||||
}
|
}
|
||||||
|
@ -407,6 +407,12 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
case Request::NOMINAL_BASELINE:
|
case Request::NOMINAL_BASELINE:
|
||||||
res.value = api_->GetInfo(Info::NOMINAL_BASELINE);
|
res.value = api_->GetInfo(Info::NOMINAL_BASELINE);
|
||||||
break;
|
break;
|
||||||
|
case Request::AUXILIARY_CHIP_VERSION:
|
||||||
|
res.value = api_->GetInfo(Info::AUXILIARY_CHIP_VERSION);
|
||||||
|
break;
|
||||||
|
case Request::ISP_VERSION:
|
||||||
|
res.value = api_->GetInfo(Info::ISP_VERSION);
|
||||||
|
break;
|
||||||
case Request::IMG_INTRINSICS:
|
case Request::IMG_INTRINSICS:
|
||||||
{
|
{
|
||||||
auto intri_left = api_->GetIntrinsicsBase(Stream::LEFT);
|
auto intri_left = api_->GetIntrinsicsBase(Stream::LEFT);
|
||||||
|
@ -554,86 +560,53 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
void SetIsPublished(const Stream &stream) {
|
void publishData(
|
||||||
is_published_[stream] = false;
|
const Stream &stream, const api::StreamData &data, std::uint32_t seq,
|
||||||
switch (stream) {
|
ros::Time stamp) {
|
||||||
case Stream::LEFT_RECTIFIED: {
|
if (stream == Stream::LEFT || stream == Stream::RIGHT) {
|
||||||
if (is_published_[Stream::RIGHT_RECTIFIED]) {
|
|
||||||
SetIsPublished(Stream::RIGHT_RECTIFIED);
|
|
||||||
}
|
|
||||||
if (is_published_[Stream::DISPARITY]) {
|
|
||||||
SetIsPublished(Stream::DISPARITY);
|
|
||||||
}
|
|
||||||
} break;
|
|
||||||
case Stream::RIGHT_RECTIFIED: {
|
|
||||||
if (is_published_[Stream::LEFT_RECTIFIED]) {
|
|
||||||
SetIsPublished(Stream::LEFT_RECTIFIED);
|
|
||||||
}
|
|
||||||
if (is_published_[Stream::DISPARITY]) {
|
|
||||||
SetIsPublished(Stream::DISPARITY);
|
|
||||||
}
|
|
||||||
} break;
|
|
||||||
case Stream::DISPARITY: {
|
|
||||||
if (is_published_[Stream::DISPARITY_NORMALIZED]) {
|
|
||||||
SetIsPublished(Stream::DISPARITY_NORMALIZED);
|
|
||||||
}
|
|
||||||
if (is_published_[Stream::POINTS]) {
|
|
||||||
SetIsPublished(Stream::POINTS);
|
|
||||||
}
|
|
||||||
} break;
|
|
||||||
case Stream::DISPARITY_NORMALIZED: {
|
|
||||||
} break;
|
|
||||||
case Stream::POINTS: {
|
|
||||||
if (is_published_[Stream::DEPTH]) {
|
|
||||||
SetIsPublished(Stream::DEPTH);
|
|
||||||
}
|
|
||||||
} break;
|
|
||||||
case Stream::DEPTH: {
|
|
||||||
} break;
|
|
||||||
default:
|
|
||||||
return;
|
return;
|
||||||
|
} else if (stream == Stream::POINTS) {
|
||||||
|
publishPoints(data, seq, stamp);
|
||||||
|
} else {
|
||||||
|
publishCamera(stream, data, seq, stamp);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void publishPoint(const Stream &stream) {
|
int getStreamSubscribers(const Stream &stream) {
|
||||||
auto &&points_num = points_publisher_.getNumSubscribers();
|
if (stream == Stream::POINTS) {
|
||||||
if (points_num == 0 && is_published_[stream]) {
|
return points_publisher_.getNumSubscribers();
|
||||||
SetIsPublished(stream);
|
|
||||||
api_->DisableStreamData(stream);
|
|
||||||
} else if (points_num > 0 && !is_published_[Stream::POINTS]) {
|
|
||||||
api_->EnableStreamData(Stream::POINTS);
|
|
||||||
api_->SetStreamCallback(
|
|
||||||
Stream::POINTS, [this](const api::StreamData &data) {
|
|
||||||
// ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
|
|
||||||
ros::Time stamp = checkUpTimeStamp(
|
|
||||||
data.img->timestamp, Stream::POINTS);
|
|
||||||
static std::size_t count = 0;
|
|
||||||
++count;
|
|
||||||
publishPoints(data, count, stamp);
|
|
||||||
});
|
|
||||||
is_published_[Stream::POINTS] = true;
|
|
||||||
}
|
}
|
||||||
|
auto pub = camera_publishers_[stream];
|
||||||
|
if (pub)
|
||||||
|
return pub.getNumSubscribers();
|
||||||
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
void publishOthers(const Stream &stream) {
|
void publishOthers(const Stream &stream) {
|
||||||
auto stream_num = camera_publishers_[stream].getNumSubscribers();
|
if (getStreamSubscribers(stream) > 0 && !is_published_[stream]) {
|
||||||
if (stream_num == 0 && is_published_[stream]) {
|
|
||||||
// Stop computing when was not subcribed
|
|
||||||
SetIsPublished(stream);
|
|
||||||
api_->DisableStreamData(stream);
|
|
||||||
} else if (stream_num > 0 && !is_published_[stream]) {
|
|
||||||
// Start computing and publishing when was subcribed
|
|
||||||
api_->EnableStreamData(stream);
|
api_->EnableStreamData(stream);
|
||||||
api_->SetStreamCallback(
|
api_->SetStreamCallback(
|
||||||
stream, [this, stream](const api::StreamData &data) {
|
stream, [this, stream](const api::StreamData &data) {
|
||||||
// ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
|
|
||||||
ros::Time stamp = checkUpTimeStamp(
|
ros::Time stamp = checkUpTimeStamp(
|
||||||
data.img->timestamp, stream);
|
data.img->timestamp, stream);
|
||||||
static std::size_t count = 0;
|
static std::size_t count = 0;
|
||||||
++count;
|
++count;
|
||||||
publishCamera(stream, data, count, stamp);
|
publishData(stream, data, count, stamp);
|
||||||
});
|
});
|
||||||
is_published_[stream] = true;
|
is_published_[stream] = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
int disable_tag = 0;
|
||||||
|
api_->DisableStreamData(stream, [&](const Stream &stream) {
|
||||||
|
disable_tag += getStreamSubscribers(stream);
|
||||||
|
}, true);
|
||||||
|
if (disable_tag == 0 && is_published_[stream]) {
|
||||||
|
api_->DisableStreamData(stream, [&](const Stream &stream) {
|
||||||
|
api_->SetStreamCallback(stream, nullptr);
|
||||||
|
is_published_[stream] = false;
|
||||||
|
});
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -648,17 +621,6 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
// ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
|
// ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
|
||||||
ros::Time stamp = checkUpTimeStamp(
|
ros::Time stamp = checkUpTimeStamp(
|
||||||
data.img->timestamp, Stream::LEFT);
|
data.img->timestamp, Stream::LEFT);
|
||||||
|
|
||||||
// static double img_time_prev = -1;
|
|
||||||
// NODELET_INFO_STREAM("ros_time_beg: " << FULL_PRECISION <<
|
|
||||||
// ros_time_beg
|
|
||||||
// << ", img_time_elapsed: " << FULL_PRECISION
|
|
||||||
// << ((data.img->timestamp - img_time_beg) * 0.00001f)
|
|
||||||
// << ", img_time_diff: " << FULL_PRECISION
|
|
||||||
// << ((img_time_prev < 0) ? 0
|
|
||||||
// : (data.img->timestamp - img_time_prev) * 0.01f) << "
|
|
||||||
// ms");
|
|
||||||
// img_time_prev = data.img->timestamp;
|
|
||||||
publishCamera(Stream::LEFT, data, left_count_, stamp);
|
publishCamera(Stream::LEFT, data, left_count_, stamp);
|
||||||
publishMono(Stream::LEFT, data, left_count_, stamp);
|
publishMono(Stream::LEFT, data, left_count_, stamp);
|
||||||
NODELET_DEBUG_STREAM(
|
NODELET_DEBUG_STREAM(
|
||||||
|
@ -698,14 +660,10 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
std::vector<Stream> other_streams{
|
std::vector<Stream> other_streams{
|
||||||
Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED,
|
Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED,
|
||||||
Stream::DISPARITY, Stream::DISPARITY_NORMALIZED,
|
Stream::DISPARITY, Stream::DISPARITY_NORMALIZED,
|
||||||
Stream::POINTS, Stream::DEPTH};
|
Stream::POINTS, Stream::DEPTH
|
||||||
|
};
|
||||||
for (auto &&stream : other_streams) {
|
for (auto &&stream : other_streams) {
|
||||||
if (stream != Stream::POINTS) {
|
|
||||||
publishOthers(stream);
|
publishOthers(stream);
|
||||||
} else {
|
|
||||||
publishPoint(stream);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!is_motion_published_) {
|
if (!is_motion_published_) {
|
||||||
|
@ -822,9 +780,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
cv::cvtColor(data.frame, mono, CV_RGB2GRAY);
|
cv::cvtColor(data.frame, mono, CV_RGB2GRAY);
|
||||||
auto &&msg = cv_bridge::CvImage(header, enc::MONO8, mono).toImageMsg();
|
auto &&msg = cv_bridge::CvImage(header, enc::MONO8, mono).toImageMsg();
|
||||||
pthread_mutex_unlock(&mutex_data_);
|
pthread_mutex_unlock(&mutex_data_);
|
||||||
auto &&info = getCameraInfo(stream);
|
mono_publishers_[stream].publish(msg);
|
||||||
info->header.stamp = msg->header.stamp;
|
|
||||||
mono_publishers_[stream].publish(msg, info);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void publishPoints(
|
void publishPoints(
|
||||||
|
@ -1284,7 +1240,8 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
}
|
}
|
||||||
for (int i = 0; i < p.rows; i++) {
|
for (int i = 0; i < p.rows; i++) {
|
||||||
for (int j = 0; j < p.cols; j++) {
|
for (int j = 0; j < p.cols; j++) {
|
||||||
camera_info->P.at(i * p.cols + j) = p.at<double>(i, j);
|
int scale = (i == 2 && j == 2)?1:1000;
|
||||||
|
camera_info->P.at(i * p.cols + j) = p.at<double>(i, j) / scale;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1510,7 +1467,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
std::map<Stream, std::string> image_encodings_;
|
std::map<Stream, std::string> image_encodings_;
|
||||||
|
|
||||||
// mono: LEFT, RIGHT
|
// mono: LEFT, RIGHT
|
||||||
std::map<Stream, image_transport::CameraPublisher> mono_publishers_;
|
std::map<Stream, image_transport::Publisher> mono_publishers_;
|
||||||
|
|
||||||
// pointcloud: POINTS
|
// pointcloud: POINTS
|
||||||
ros::Publisher points_publisher_;
|
ros::Publisher points_publisher_;
|
||||||
|
|
|
@ -6,10 +6,12 @@ uint32 SPEC_VERSION=4
|
||||||
uint32 LENS_TYPE=5
|
uint32 LENS_TYPE=5
|
||||||
uint32 IMU_TYPE=6
|
uint32 IMU_TYPE=6
|
||||||
uint32 NOMINAL_BASELINE=7
|
uint32 NOMINAL_BASELINE=7
|
||||||
uint32 IMG_INTRINSICS=8
|
uint32 AUXILIARY_CHIP_VERSION=8
|
||||||
uint32 IMG_EXTRINSICS_RTOL=9
|
uint32 ISP_VERSION=9
|
||||||
uint32 IMU_INTRINSICS=10
|
uint32 IMG_INTRINSICS=10
|
||||||
uint32 IMU_EXTRINSICS=11
|
uint32 IMG_EXTRINSICS_RTOL=11
|
||||||
|
uint32 IMU_INTRINSICS=12
|
||||||
|
uint32 IMU_EXTRINSICS=13
|
||||||
uint32 key
|
uint32 key
|
||||||
---
|
---
|
||||||
string value
|
string value
|
||||||
|
|
Loading…
Reference in New Issue
Block a user