Compare commits
19 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bbf789b174 | ||
|
|
66c9af58fc | ||
|
|
375acb534c | ||
|
|
28204d5c0a | ||
|
|
c9483c2aca | ||
|
|
45b4fdeedf | ||
|
|
455b5b931e | ||
|
|
8ab9b42896 | ||
|
|
3258e969ee | ||
|
|
cc74b2663b | ||
|
|
e270e99340 | ||
|
|
0c92f770f2 | ||
|
|
5a9fffe3ed | ||
|
|
384ff028a8 | ||
|
|
0f9bdd5cf2 | ||
|
|
54165b1acb | ||
|
|
087ef28da1 | ||
|
|
b2bd1f389d | ||
|
|
6a82b06b84 |
13
Makefile
13
Makefile
@@ -16,6 +16,12 @@ include CommonDefs.mk
|
||||
MKFILE_PATH := $(abspath $(lastword $(MAKEFILE_LIST)))
|
||||
MKFILE_DIR := $(patsubst %/,%,$(dir $(MKFILE_PATH)))
|
||||
|
||||
# CMAKE_INSTALL_PREFIX:
|
||||
# https://cmake.org/cmake/help/latest/variable/CMAKE_INSTALL_PREFIX.html
|
||||
#
|
||||
# UNIX: /usr/local
|
||||
# Windows: c:/Program Files/${PROJECT_NAME}
|
||||
|
||||
.DEFAULT_GOAL := help
|
||||
|
||||
help:
|
||||
@@ -83,7 +89,8 @@ init: submodules
|
||||
|
||||
build: third_party
|
||||
@$(call echo,Make $@)
|
||||
@$(call cmake_build,./_build,..,-DCMAKE_INSTALL_PREFIX=$(MKFILE_DIR)/_install)
|
||||
@$(call cmake_build,./_build,..)
|
||||
@# @$(call cmake_build,./_build,..,-DCMAKE_INSTALL_PREFIX=$(MKFILE_DIR)/_install)
|
||||
|
||||
.PHONY: build
|
||||
|
||||
@@ -115,10 +122,10 @@ ifeq ($(HOST_OS),Win)
|
||||
ifneq ($(HOST_NAME),MinGW)
|
||||
@cd ./_build; msbuild.exe INSTALL.vcxproj /property:Configuration=Release
|
||||
else
|
||||
@cd ./_build; make install
|
||||
@cd ./_build; sudo make install
|
||||
endif
|
||||
else
|
||||
@cd ./_build; make install
|
||||
@cd ./_build; sudo make install
|
||||
endif
|
||||
|
||||
.PHONY: install
|
||||
|
||||
15
README.md
15
README.md
@@ -1,6 +1,6 @@
|
||||
# MYNT® EYE SDK
|
||||
|
||||
[](https://github.com/slightech/MYNT-EYE-SDK-2)
|
||||
[](https://github.com/slightech/MYNT-EYE-SDK-2)
|
||||
|
||||
## Overview
|
||||
|
||||
@@ -17,18 +17,19 @@ Please follow the guide doc to install the SDK on different platforms.
|
||||
## Documentations
|
||||
|
||||
* [API Doc](https://github.com/slightech/MYNT-EYE-SDK-2/releases): API reference, some guides and data spec.
|
||||
* zh-Hans: [](https://github.com/slightech/MYNT-EYE-SDK-2/files/2083629/mynt-eye-sdk-apidoc-2.0.0-rc-zh-Hans.pdf) [](https://github.com/slightech/MYNT-EYE-SDK-2/files/2083631/mynt-eye-sdk-apidoc-2.0.0-rc-html-zh-Hans.zip) [](https://slightech.github.io/MYNT-EYE-SDK-2/)
|
||||
* en: [](https://github.com/slightech/MYNT-EYE-SDK-2/files/2203869/mynt-eye-sdk-apidoc-2.0.1-rc0-en.pdf) [](https://github.com/slightech/MYNT-EYE-SDK-2/files/2203870/mynt-eye-sdk-apidoc-2.0.1-rc0-html-en.zip) [](https://slightech.github.io/MYNT-EYE-SDK-2/)
|
||||
* zh-Hans: [](https://github.com/slightech/MYNT-EYE-SDK-2/files/2203872/mynt-eye-sdk-apidoc-2.0.1-rc0-zh-Hans.pdf) [](https://github.com/slightech/MYNT-EYE-SDK-2/files/2203874/mynt-eye-sdk-apidoc-2.0.1-rc0-html-zh-Hans.zip) [](http://doc.myntai.com/resource/api/mynt-eye-sdk-apidoc-2.0.1-rc0-html-zh-Hans/mynt-eye-sdk-apidoc-2.0.1-rc0-html-zh-Hans/index.html)
|
||||
* [Guide Doc](https://github.com/slightech/MYNT-EYE-SDK-2-Guide/releases): How to install and start using the SDK.
|
||||
* zh-Hans: [](https://github.com/slightech/MYNT-EYE-SDK-2-Guide/files/2083740/mynt-eye-sdk-guide-2.0.0-rc-zh-Hans.pdf) [](https://github.com/slightech/MYNT-EYE-SDK-2-Guide/files/2083745/mynt-eye-sdk-guide-2.0.0-rc-html-zh-Hans.zip) [](https://slightech.github.io/MYNT-EYE-SDK-2-Guide/)
|
||||
* en: [](https://github.com/slightech/MYNT-EYE-SDK-2-Guide/files/2203877/mynt-eye-sdk-guide-2.0.1-rc0-en.pdf) [](https://github.com/slightech/MYNT-EYE-SDK-2-Guide/files/2203879/mynt-eye-sdk-guide-2.0.1-rc0-html-en.zip) [](https://slightech.github.io/MYNT-EYE-SDK-2-Guide/)
|
||||
* zh-Hans: [](https://github.com/slightech/MYNT-EYE-SDK-2-Guide/files/2203880/mynt-eye-sdk-guide-2.0.1-rc0-zh-Hans.pdf) [](https://github.com/slightech/MYNT-EYE-SDK-2-Guide/files/2203881/mynt-eye-sdk-guide-2.0.1-rc0-html-zh-Hans.zip) [](http://doc.myntai.com/resource/sdk/mynt-eye-sdk-guide-2.0.1-rc0-html-zh-Hans/mynt-eye-sdk-guide-2.0.1-rc0-html-zh-Hans/index.html )
|
||||
|
||||
> Supported languages: `zh-Hans`.
|
||||
> Supported languages: `en`, `zh-Hans`.
|
||||
|
||||
## Firmwares
|
||||
|
||||
[Google Drive]: https://drive.google.com/drive/folders/1tdFCcTBMNcImEGZ39tdOZmlX2SHKCr2f
|
||||
[百度网盘]: https://pan.baidu.com/s/1yPQDp2r0x4jvNwn2UjlMUQ
|
||||
[MYNTEYE_BOX]: http://doc.myntai.com/mynteye/s/download
|
||||
|
||||
Get firmwares from our online disks: [Google Drive][], [百度网盘][]. The latest version is `2.0.0-rc`.
|
||||
Get firmwares from our online disks: [MYNTEYE_BOX][]. The latest version is `2.0.1`.
|
||||
|
||||
## Usage
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE SDK"
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = 2.0.1-rc0
|
||||
PROJECT_NUMBER = 2.0.1-rc1
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
|
||||
@@ -14,7 +14,7 @@ There are two control modes, one is through UVC standard protocol, the other is
|
||||
|
||||
| Name | Field | Bytes | Default | Min | Max | Stored | Flash Address | Channel | Note |
|
||||
| :--- | :---- | :---- | :------ | :-- | :-- | :----- | :------------ | :------ | :----- |
|
||||
| Frame rate | frame_rate | 2 | 25 | 10 | 60 | √ | 0x21 | XU_CAM_CTRL | values: {10,15,20,25,30,35,40,45,50,55,60} |
|
||||
| Frame rate | frame_rate | 2 | 25 | 10 | √ | 0x21 | XU_CAM_CTRL | values: {10,15,20,25,30,35,40,45,50,55} |
|
||||
| IMU frequency | imu_frequency | 2 | 200 | 100 | 500 | √ | 0x23 | XU_CAM_CTRL | values: {100,200,250,333,500} |
|
||||
| Exposure mode | exposure_mode | 1 | 0 | 0 | 1 | √ | 0x0F | XU_CAM_CTRL | 0: enable auto-exposure; 1: manual-exposure |
|
||||
| Max gain | max_gain | 2 | 48 | 0 | 48 | √ | 0x1D | XU_CAM_CTRL | valid if auto-exposure |
|
||||
|
||||
@@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE SDK"
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = 2.0.1-rc0
|
||||
PROJECT_NUMBER = 2.0.1-rc1
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
|
||||
| 名称 | 字段 | 字节数 | 默认值 | 最小值 | 最大值 | 是否储存 | Flash 地址 | 所属通道 | 说明 |
|
||||
| :----- | :----- | :-------- | :-------- | :-------- | :-------- | :----------- | :----------- | :----------- | :----- |
|
||||
| 图像帧率 | frame_rate | 2 | 25 | 10 | 60 | √ | 0x21 | XU_CAM_CTRL | 步进为5,即有效值为{10,15,20,25,30,35,40,45,50,55,60} |
|
||||
| 图像帧率 | frame_rate | 2 | 25 | 10 | √ | 0x21 | XU_CAM_CTRL | 步进为5,即有效值为{10,15,20,25,30,35,40,45,50,55} |
|
||||
| IMU 频率 | imu_frequency | 2 | 200 | 100 | 500 | √ | 0x23 | XU_CAM_CTRL | 有效值为{100,200,250,333,500} |
|
||||
| 曝光模式 | exposure_mode | 1 | 0 | 0 | 1 | √ | 0x0F | XU_CAM_CTRL | 0:开启自动曝光; 1:关闭 |
|
||||
| 最大增益 | max_gain | 2 | 48 | 0 | 48 | √ | 0x1D | XU_CAM_CTRL | 开始自动曝光,可设定的阈值 |
|
||||
|
||||
@@ -25,14 +25,30 @@ struct glog_init {
|
||||
glog_init(int argc, char *argv[]) {
|
||||
(void)argc;
|
||||
|
||||
// FLAGS_logtostderr = true;
|
||||
FLAGS_alsologtostderr = true;
|
||||
// Set whether log messages go to stderr instead of logfiles
|
||||
FLAGS_logtostderr = true;
|
||||
|
||||
// Set whether log messages go to stderr in addition to logfiles.
|
||||
// FLAGS_alsologtostderr = true;
|
||||
|
||||
// Set color messages logged to stderr (if supported by terminal).
|
||||
FLAGS_colorlogtostderr = true;
|
||||
|
||||
// FLAGS_log_dir = ".";
|
||||
// Log suppression level: messages logged at a lower level than this
|
||||
// are suppressed.
|
||||
FLAGS_minloglevel = google::GLOG_INFO;
|
||||
|
||||
// If specified, logfiles are written into this directory instead of the
|
||||
// default logging directory.
|
||||
FLAGS_log_dir = ".";
|
||||
|
||||
// Sets the maximum log file size (in MB).
|
||||
FLAGS_max_log_size = 1024;
|
||||
|
||||
// Sets whether to avoid logging to the disk if the disk is full.
|
||||
FLAGS_stop_logging_if_full_disk = true;
|
||||
|
||||
// Show all VLOG(m) messages for m <= this.
|
||||
// FLAGS_v = 2;
|
||||
|
||||
google::InitGoogleLogging(argv[0]);
|
||||
|
||||
@@ -58,7 +58,7 @@ MYNTEYE_API_VERSION_CHECK( \
|
||||
# define MYNTEYE_USE_NAMESPACE
|
||||
#endif
|
||||
|
||||
constexpr char MYNTEYE_SDK_ROOT_DIR[] = "@MYNTEYE_SDK_ROOT_DIR@";
|
||||
constexpr char MYNTEYE_SDK_INSTALL_DIR[] = "@MYNTEYE_SDK_INSTALL_DIR@";
|
||||
const char MYNTEYE_SDK_ROOT_DIR[] = "@MYNTEYE_SDK_ROOT_DIR@";
|
||||
const char MYNTEYE_SDK_INSTALL_DIR[] = "@MYNTEYE_SDK_INSTALL_DIR@";
|
||||
|
||||
#endif // MYNTEYE_MYNTEYE_H_
|
||||
|
||||
@@ -144,7 +144,7 @@ enum class Option : std::uint8_t {
|
||||
/**
|
||||
* Image frame rate, must set IMU_FREQUENCY together
|
||||
*
|
||||
* values: {10,15,20,25,30,35,40,45,50,55,60}, default: 25
|
||||
* values: {10,15,20,25,30,35,40,45,50,55}, default: 25
|
||||
*/
|
||||
FRAME_RATE,
|
||||
/**
|
||||
|
||||
@@ -30,7 +30,7 @@ int main(int argc, char *argv[]) {
|
||||
// Attention: must set FRAME_RATE and IMU_FREQUENCY together, otherwise won't
|
||||
// succeed.
|
||||
|
||||
// FRAME_RATE values: 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60
|
||||
// FRAME_RATE values: 10, 15, 20, 25, 30, 35, 40, 45, 50, 55
|
||||
api->SetOptionValue(Option::FRAME_RATE, 25);
|
||||
// IMU_FREQUENCY values: 100, 200, 250, 333, 500
|
||||
api->SetOptionValue(Option::IMU_FREQUENCY, 500);
|
||||
|
||||
@@ -25,8 +25,8 @@ int main(int argc, char *argv[]) {
|
||||
LOG(INFO) << "Intrinsics left: {" << api->GetIntrinsics(Stream::LEFT) << "}";
|
||||
LOG(INFO) << "Intrinsics right: {" << api->GetIntrinsics(Stream::RIGHT)
|
||||
<< "}";
|
||||
LOG(INFO) << "Extrinsics left to right: {"
|
||||
<< api->GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
|
||||
LOG(INFO) << "Extrinsics right to left: {"
|
||||
<< api->GetExtrinsics(Stream::RIGHT, Stream::LEFT) << "}";
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -213,11 +213,11 @@ std::vector<std::string> get_plugin_paths() {
|
||||
API::API(std::shared_ptr<Device> device) : device_(device) {
|
||||
VLOG(2) << __func__;
|
||||
if (std::dynamic_pointer_cast<StandardDevice>(device_) != nullptr) {
|
||||
bool in_l_ok, in_r_ok, ex_l2r_ok;
|
||||
bool in_l_ok, in_r_ok, ex_r2l_ok;
|
||||
device_->GetIntrinsics(Stream::LEFT, &in_l_ok);
|
||||
device_->GetIntrinsics(Stream::RIGHT, &in_r_ok);
|
||||
device_->GetExtrinsics(Stream::LEFT, Stream::RIGHT, &ex_l2r_ok);
|
||||
if (!in_l_ok || !in_r_ok || !ex_l2r_ok) {
|
||||
device_->GetExtrinsics(Stream::RIGHT, Stream::LEFT, &ex_r2l_ok);
|
||||
if (!in_l_ok || !in_r_ok || !ex_r2l_ok) {
|
||||
#if defined(WITH_DEVICE_INFO_REQUIRED)
|
||||
LOG(FATAL)
|
||||
#else
|
||||
|
||||
@@ -19,6 +19,8 @@
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
const char DepthProcessor::NAME[] = "DepthProcessor";
|
||||
|
||||
DepthProcessor::DepthProcessor(std::int32_t proc_period)
|
||||
: Processor(std::move(proc_period)) {
|
||||
VLOG(2) << __func__ << ": proc_period=" << proc_period;
|
||||
|
||||
@@ -23,7 +23,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
class DepthProcessor : public Processor {
|
||||
public:
|
||||
static constexpr auto &&NAME = "DepthProcessor";
|
||||
static const char NAME[];
|
||||
|
||||
explicit DepthProcessor(std::int32_t proc_period = 0);
|
||||
virtual ~DepthProcessor();
|
||||
|
||||
@@ -21,6 +21,9 @@
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
const char DisparityNormalizedProcessor::NAME[] =
|
||||
"DisparityNormalizedProcessor";
|
||||
|
||||
DisparityNormalizedProcessor::DisparityNormalizedProcessor(
|
||||
std::int32_t proc_period)
|
||||
: Processor(std::move(proc_period)) {
|
||||
|
||||
@@ -23,7 +23,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
class DisparityNormalizedProcessor : public Processor {
|
||||
public:
|
||||
static constexpr auto &&NAME = "DisparityNormalizedProcessor";
|
||||
static const char NAME[];
|
||||
|
||||
explicit DisparityNormalizedProcessor(std::int32_t proc_period = 0);
|
||||
virtual ~DisparityNormalizedProcessor();
|
||||
|
||||
@@ -21,6 +21,8 @@
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
const char DisparityProcessor::NAME[] = "DisparityProcessor";
|
||||
|
||||
DisparityProcessor::DisparityProcessor(std::int32_t proc_period)
|
||||
: Processor(std::move(proc_period)) {
|
||||
VLOG(2) << __func__ << ": proc_period=" << proc_period;
|
||||
|
||||
@@ -29,7 +29,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
class DisparityProcessor : public Processor {
|
||||
public:
|
||||
static constexpr auto &&NAME = "DisparityProcessor";
|
||||
static const char NAME[];
|
||||
|
||||
explicit DisparityProcessor(std::int32_t proc_period = 0);
|
||||
virtual ~DisparityProcessor();
|
||||
|
||||
@@ -21,6 +21,8 @@
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
const char PointsProcessor::NAME[] = "PointsProcessor";
|
||||
|
||||
PointsProcessor::PointsProcessor(cv::Mat Q, std::int32_t proc_period)
|
||||
: Processor(std::move(proc_period)), Q_(std::move(Q)) {
|
||||
VLOG(2) << __func__ << ": proc_period=" << proc_period;
|
||||
|
||||
@@ -25,7 +25,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
class PointsProcessor : public Processor {
|
||||
public:
|
||||
static constexpr auto &&NAME = "PointsProcessor";
|
||||
static const char NAME[];
|
||||
|
||||
explicit PointsProcessor(cv::Mat Q, std::int32_t proc_period = 0);
|
||||
virtual ~PointsProcessor();
|
||||
|
||||
@@ -24,13 +24,15 @@
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
const char RectifyProcessor::NAME[] = "RectifyProcessor";
|
||||
|
||||
RectifyProcessor::RectifyProcessor(
|
||||
std::shared_ptr<Device> device, std::int32_t proc_period)
|
||||
: Processor(std::move(proc_period)) {
|
||||
VLOG(2) << __func__ << ": proc_period=" << proc_period;
|
||||
InitParams(
|
||||
device->GetIntrinsics(Stream::LEFT), device->GetIntrinsics(Stream::RIGHT),
|
||||
device->GetExtrinsics(Stream::LEFT, Stream::RIGHT));
|
||||
device->GetExtrinsics(Stream::RIGHT, Stream::LEFT));
|
||||
}
|
||||
|
||||
RectifyProcessor::~RectifyProcessor() {
|
||||
@@ -56,7 +58,7 @@ bool RectifyProcessor::OnProcess(
|
||||
}
|
||||
|
||||
void RectifyProcessor::InitParams(
|
||||
Intrinsics in_left, Intrinsics in_right, Extrinsics ex_left_to_right) {
|
||||
Intrinsics in_left, Intrinsics in_right, Extrinsics ex_right_to_left) {
|
||||
cv::Size size{in_left.width, in_left.height};
|
||||
|
||||
cv::Mat M1 =
|
||||
@@ -68,12 +70,12 @@ void RectifyProcessor::InitParams(
|
||||
cv::Mat D1(1, 5, CV_64F, in_left.coeffs);
|
||||
cv::Mat D2(1, 5, CV_64F, in_right.coeffs);
|
||||
cv::Mat R =
|
||||
(cv::Mat_<double>(3, 3) << ex_left_to_right.rotation[0][0],
|
||||
ex_left_to_right.rotation[0][1], ex_left_to_right.rotation[0][2],
|
||||
ex_left_to_right.rotation[1][0], ex_left_to_right.rotation[1][1],
|
||||
ex_left_to_right.rotation[1][2], ex_left_to_right.rotation[2][0],
|
||||
ex_left_to_right.rotation[2][1], ex_left_to_right.rotation[2][2]);
|
||||
cv::Mat T(3, 1, CV_64F, ex_left_to_right.translation);
|
||||
(cv::Mat_<double>(3, 3) << ex_right_to_left.rotation[0][0],
|
||||
ex_right_to_left.rotation[0][1], ex_right_to_left.rotation[0][2],
|
||||
ex_right_to_left.rotation[1][0], ex_right_to_left.rotation[1][1],
|
||||
ex_right_to_left.rotation[1][2], ex_right_to_left.rotation[2][0],
|
||||
ex_right_to_left.rotation[2][1], ex_right_to_left.rotation[2][2]);
|
||||
cv::Mat T(3, 1, CV_64F, ex_right_to_left.translation);
|
||||
|
||||
VLOG(2) << "InitParams size: " << size;
|
||||
VLOG(2) << "M1: " << M1;
|
||||
|
||||
@@ -29,7 +29,7 @@ class Device;
|
||||
|
||||
class RectifyProcessor : public Processor {
|
||||
public:
|
||||
static constexpr auto &&NAME = "RectifyProcessor";
|
||||
static const char NAME[];
|
||||
|
||||
RectifyProcessor(
|
||||
std::shared_ptr<Device> device, std::int32_t proc_period = 0);
|
||||
@@ -47,7 +47,7 @@ class RectifyProcessor : public Processor {
|
||||
|
||||
private:
|
||||
void InitParams(
|
||||
Intrinsics in_left, Intrinsics in_right, Extrinsics ex_left_to_right);
|
||||
Intrinsics in_left, Intrinsics in_right, Extrinsics ex_right_to_left);
|
||||
};
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
@@ -565,11 +565,11 @@ void Device::ReadAllInfos() {
|
||||
if (img_params.ok) {
|
||||
SetIntrinsics(Stream::LEFT, img_params.in_left);
|
||||
SetIntrinsics(Stream::RIGHT, img_params.in_right);
|
||||
SetExtrinsics(Stream::LEFT, Stream::RIGHT, img_params.ex_left_to_right);
|
||||
SetExtrinsics(Stream::RIGHT, Stream::LEFT, img_params.ex_right_to_left);
|
||||
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
|
||||
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
|
||||
VLOG(2) << "Extrinsics left to right: {"
|
||||
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
|
||||
VLOG(2) << "Extrinsics right to left: {"
|
||||
<< GetExtrinsics(Stream::RIGHT, Stream::LEFT) << "}";
|
||||
} else {
|
||||
LOG(WARNING) << "Intrinsics & extrinsics not exist";
|
||||
}
|
||||
|
||||
@@ -224,7 +224,7 @@ void Channels::SetControlValue(const Option &option, std::int32_t value) {
|
||||
} break;
|
||||
case Option::FRAME_RATE: {
|
||||
if (!in_range() ||
|
||||
!in_values({10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60}))
|
||||
!in_values({10, 15, 20, 25, 30, 35, 40, 45, 50, 55}))
|
||||
break;
|
||||
XuCamCtrlSet(option, value);
|
||||
} break;
|
||||
@@ -515,7 +515,7 @@ std::size_t from_data(
|
||||
std::size_t i = 0;
|
||||
i += from_data(&img_params->in_left, data + i, spec_version);
|
||||
i += from_data(&img_params->in_right, data + i, spec_version);
|
||||
i += from_data(&img_params->ex_left_to_right, data + i, spec_version);
|
||||
i += from_data(&img_params->ex_right_to_left, data + i, spec_version);
|
||||
return i;
|
||||
}
|
||||
|
||||
@@ -787,7 +787,7 @@ std::size_t to_data(
|
||||
std::size_t i = 3; // skip id, size
|
||||
i += to_data(&img_params->in_left, data + i, spec_version);
|
||||
i += to_data(&img_params->in_right, data + i, spec_version);
|
||||
i += to_data(&img_params->ex_left_to_right, data + i, spec_version);
|
||||
i += to_data(&img_params->ex_right_to_left, data + i, spec_version);
|
||||
// others
|
||||
std::size_t size = i - 3;
|
||||
data[0] = Channels::FID_IMG_PARAMS;
|
||||
|
||||
@@ -72,7 +72,7 @@ class MYNTEYE_API Channels {
|
||||
bool ok;
|
||||
Intrinsics in_left;
|
||||
Intrinsics in_right;
|
||||
Extrinsics ex_left_to_right;
|
||||
Extrinsics ex_right_to_left;
|
||||
} img_params_t;
|
||||
|
||||
typedef struct ImuParams {
|
||||
|
||||
@@ -100,9 +100,6 @@ float get_real_exposure_time(
|
||||
case 55:
|
||||
real_max = 16.325;
|
||||
break;
|
||||
case 60:
|
||||
real_max = 15;
|
||||
break;
|
||||
default:
|
||||
LOG(ERROR) << "Invalid frame rate: " << frame_rate;
|
||||
return exposure_time;
|
||||
|
||||
2
third_party/glog
vendored
2
third_party/glog
vendored
Submodule third_party/glog updated: a97d6b0e1c...8d7a107d68
@@ -22,7 +22,7 @@ in_right:
|
||||
model: 0
|
||||
coeffs: [ -5.1012886039889305e-01, 3.8764476500996770e-01, 0., 0.,
|
||||
0. ]
|
||||
ex_left_to_right:
|
||||
ex_right_to_left:
|
||||
rotation: [ 9.9701893306553813e-01, -9.5378124886236681e-04,
|
||||
-7.7151392794850615e-02, 1.4493996762830500e-03,
|
||||
9.9997867219985104e-01, 6.3682325649414354e-03,
|
||||
|
||||
@@ -72,7 +72,7 @@ bool DeviceWriter::WriteImgParams(const img_params_t ¶ms) {
|
||||
LOG(INFO) << "Write img params success";
|
||||
LOG(INFO) << "Intrinsics left: {" << params.in_left << "}";
|
||||
LOG(INFO) << "Intrinsics right: {" << params.in_right << "}";
|
||||
LOG(INFO) << "Extrinsics left to right: {" << params.ex_left_to_right
|
||||
LOG(INFO) << "Extrinsics right to left: {" << params.ex_right_to_left
|
||||
<< "}";
|
||||
return true;
|
||||
} else {
|
||||
@@ -185,8 +185,8 @@ bool DeviceWriter::SaveImgParams(
|
||||
return false;
|
||||
}
|
||||
fs << "in_left" << std::vector<Intrinsics>{params.in_left} << "in_right"
|
||||
<< std::vector<Intrinsics>{params.in_right} << "ex_left_to_right"
|
||||
<< params.ex_left_to_right;
|
||||
<< std::vector<Intrinsics>{params.in_right} << "ex_right_to_left"
|
||||
<< params.ex_right_to_left;
|
||||
fs.release();
|
||||
return true;
|
||||
}
|
||||
@@ -213,7 +213,7 @@ void DeviceWriter::SaveAllInfos(const std::string &dir) {
|
||||
SaveImgParams(
|
||||
{false, device_->GetIntrinsics(Stream::LEFT),
|
||||
device_->GetIntrinsics(Stream::RIGHT),
|
||||
device_->GetExtrinsics(Stream::LEFT, Stream::RIGHT)},
|
||||
device_->GetExtrinsics(Stream::RIGHT, Stream::LEFT)},
|
||||
dir + OS_SEP "img.params");
|
||||
auto &&m_in = device_->GetMotionIntrinsics();
|
||||
SaveImuParams(
|
||||
@@ -311,8 +311,8 @@ DeviceWriter::dev_info_t DeviceWriter::LoadDeviceInfo(
|
||||
LOG(FATAL) << "Failed to load file: " << filepath;
|
||||
}
|
||||
DeviceInfo info;
|
||||
info.lens_type = Type(fs["lens_type"].string());
|
||||
info.imu_type = Type(fs["imu_type"].string());
|
||||
info.lens_type = Type(std::string(fs["lens_type"]));
|
||||
info.imu_type = Type(std::string(fs["imu_type"]));
|
||||
fs["nominal_baseline"] >> info.nominal_baseline;
|
||||
fs.release();
|
||||
return info;
|
||||
@@ -348,11 +348,11 @@ DeviceWriter::img_params_t DeviceWriter::LoadImgParams(
|
||||
|
||||
to_intrinsics(w, h, m, M1, D1, ¶ms.in_left);
|
||||
to_intrinsics(w, h, m, M2, D2, ¶ms.in_right);
|
||||
to_extrinsics(R, T, ¶ms.ex_left_to_right);
|
||||
to_extrinsics(R, T, ¶ms.ex_right_to_left);
|
||||
} else {
|
||||
fs["in_left"][0] >> params.in_left;
|
||||
fs["in_right"][0] >> params.in_right;
|
||||
fs["ex_left_to_right"] >> params.ex_left_to_right;
|
||||
fs["ex_right_to_left"] >> params.ex_right_to_left;
|
||||
}
|
||||
|
||||
fs.release();
|
||||
|
||||
@@ -99,7 +99,7 @@ def main():
|
||||
# set_option_value
|
||||
|
||||
def set_rate(frame_rate=25, imu_frequency=500): # pylint: disable=unused-variable
|
||||
# FRAME_RATE values: 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60
|
||||
# FRAME_RATE values: 10, 15, 20, 25, 30, 35, 40, 45, 50, 55
|
||||
api.set_option_value(mynteye.FRAME_RATE, frame_rate)
|
||||
# IMU_FREQUENCY values: 100, 200, 250, 333, 500
|
||||
api.set_option_value(mynteye.IMU_FREQUENCY, imu_frequency)
|
||||
|
||||
@@ -54,7 +54,7 @@
|
||||
<arg name="contrast" default="-1" />
|
||||
<!-- <arg name="contrast" default="127" /> -->
|
||||
|
||||
<!-- frame_rate range: {10,15,20,25,30,35,40,45,50,55,60} -->
|
||||
<!-- frame_rate range: {10,15,20,25,30,35,40,45,50,55} -->
|
||||
<arg name="frame_rate" default="-1" />
|
||||
<!-- <arg name="frame_rate" default="25" /> -->
|
||||
|
||||
|
||||
@@ -61,7 +61,7 @@ Visualization Manager:
|
||||
Min Value: 0
|
||||
Name: Left
|
||||
Normalize Range: true
|
||||
Queue Size: 2
|
||||
Queue Size: 1
|
||||
Transport Hint: raw
|
||||
Unreliable: false
|
||||
Value: true
|
||||
@@ -73,7 +73,7 @@ Visualization Manager:
|
||||
Min Value: 0
|
||||
Name: Right
|
||||
Normalize Range: true
|
||||
Queue Size: 2
|
||||
Queue Size: 1
|
||||
Transport Hint: raw
|
||||
Unreliable: false
|
||||
Value: true
|
||||
@@ -87,7 +87,7 @@ Visualization Manager:
|
||||
Min Value: 0
|
||||
Name: LeftRect
|
||||
Normalize Range: true
|
||||
Queue Size: 2
|
||||
Queue Size: 1
|
||||
Transport Hint: raw
|
||||
Unreliable: false
|
||||
Value: true
|
||||
@@ -99,7 +99,7 @@ Visualization Manager:
|
||||
Min Value: 0
|
||||
Name: RightRect
|
||||
Normalize Range: true
|
||||
Queue Size: 2
|
||||
Queue Size: 1
|
||||
Transport Hint: raw
|
||||
Unreliable: false
|
||||
Value: true
|
||||
@@ -117,7 +117,7 @@ Visualization Manager:
|
||||
Min Value: 0
|
||||
Name: Disparity
|
||||
Normalize Range: true
|
||||
Queue Size: 2
|
||||
Queue Size: 1
|
||||
Transport Hint: raw
|
||||
Unreliable: false
|
||||
Value: true
|
||||
@@ -129,7 +129,7 @@ Visualization Manager:
|
||||
Min Value: 0
|
||||
Name: DisparityNorm
|
||||
Normalize Range: true
|
||||
Queue Size: 2
|
||||
Queue Size: 1
|
||||
Transport Hint: raw
|
||||
Unreliable: false
|
||||
Value: true
|
||||
@@ -143,7 +143,7 @@ Visualization Manager:
|
||||
Min Value: 0
|
||||
Name: Depth
|
||||
Normalize Range: true
|
||||
Queue Size: 2
|
||||
Queue Size: 1
|
||||
Transport Hint: raw
|
||||
Unreliable: false
|
||||
Value: true
|
||||
@@ -167,7 +167,7 @@ Visualization Manager:
|
||||
Min Intensity: 0
|
||||
Name: Points
|
||||
Position Transformer: XYZ
|
||||
Queue Size: 10
|
||||
Queue Size: 1
|
||||
Selectable: true
|
||||
Size (Pixels): 3
|
||||
Size (m): 0.00999999978
|
||||
|
||||
@@ -23,6 +23,8 @@
|
||||
#include <tf/tf.h>
|
||||
#include <tf2_ros/static_transform_broadcaster.h>
|
||||
|
||||
#include <opencv2/calib3d/calib3d.hpp>
|
||||
|
||||
#include <mynt_eye_ros_wrapper/GetInfo.h>
|
||||
#include <mynt_eye_ros_wrapper/Temp.h>
|
||||
|
||||
@@ -108,7 +110,12 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
for (auto &&it = stream_names.begin(); it != stream_names.end(); ++it) {
|
||||
stream_topics[it->first] = it->second;
|
||||
private_nh_.getParam(it->second + "_topic", stream_topics[it->first]);
|
||||
|
||||
// if published init
|
||||
is_published_[it->first] = false;
|
||||
}
|
||||
is_motion_published_ = false;
|
||||
is_started_ = false;
|
||||
|
||||
std::string imu_topic = "imu";
|
||||
std::string temp_topic = "temp";
|
||||
@@ -164,24 +171,21 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
|
||||
for (auto &&it = stream_names.begin(); it != stream_names.end(); ++it) {
|
||||
auto &&topic = stream_topics[it->first];
|
||||
if (it->first == Stream::LEFT || it->first == Stream::RIGHT) { // camera
|
||||
camera_publishers_[it->first] = it_mynteye.advertiseCamera(topic, 1);
|
||||
} else if (it->first == Stream::POINTS) { // pointcloud
|
||||
if (it->first == Stream::POINTS) { // pointcloud
|
||||
points_publisher_ = nh_.advertise<sensor_msgs::PointCloud2>(topic, 1);
|
||||
} else { // image
|
||||
image_publishers_[it->first] = it_mynteye.advertise(topic, 1);
|
||||
} else { // camera
|
||||
camera_publishers_[it->first] = it_mynteye.advertiseCamera(topic, 1);
|
||||
}
|
||||
NODELET_INFO_STREAM("Advertized on topic " << topic);
|
||||
}
|
||||
|
||||
camera_encodings_ = {{Stream::LEFT, enc::MONO8},
|
||||
{Stream::RIGHT, enc::MONO8}};
|
||||
|
||||
image_encodings_ = {{Stream::LEFT_RECTIFIED, enc::MONO8},
|
||||
{Stream::RIGHT_RECTIFIED, enc::MONO8},
|
||||
{Stream::DISPARITY, enc::MONO8}, // float
|
||||
{Stream::DISPARITY_NORMALIZED, enc::MONO8},
|
||||
{Stream::DEPTH, enc::MONO16}};
|
||||
{Stream::RIGHT, enc::MONO8},
|
||||
{Stream::LEFT_RECTIFIED, enc::MONO8},
|
||||
{Stream::RIGHT_RECTIFIED, enc::MONO8},
|
||||
{Stream::DISPARITY, enc::MONO8}, // float
|
||||
{Stream::DISPARITY_NORMALIZED, enc::MONO8},
|
||||
{Stream::DEPTH, enc::MONO16}};
|
||||
|
||||
pub_imu_ = nh_.advertise<sensor_msgs::Imu>(imu_topic, 1);
|
||||
NODELET_INFO_STREAM("Advertized on topic " << imu_topic);
|
||||
@@ -192,9 +196,9 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
// stream toggles
|
||||
|
||||
for (auto &&it = stream_names.begin(); it != stream_names.end(); ++it) {
|
||||
if (it->first == Stream::LEFT || it->first == Stream::RIGHT) { // camera
|
||||
continue;
|
||||
} else { // image, pointcloud
|
||||
if (it->first == Stream::LEFT || it->first == Stream::RIGHT) {
|
||||
continue; // native streams
|
||||
} else {
|
||||
if (!api_->Supports(it->first))
|
||||
continue;
|
||||
bool enabled = false;
|
||||
@@ -214,7 +218,9 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
NODELET_INFO_STREAM("Advertized service " << DEVICE_INFO_SERVICE);
|
||||
|
||||
publishStaticTransforms();
|
||||
publishTopics();
|
||||
while (private_nh_.ok()) {
|
||||
publishTopics();
|
||||
}
|
||||
}
|
||||
|
||||
bool getInfo(
|
||||
@@ -254,100 +260,126 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
}
|
||||
|
||||
void publishTopics() {
|
||||
api_->SetStreamCallback(
|
||||
Stream::LEFT, [this](const api::StreamData &data) {
|
||||
ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
|
||||
if (camera_publishers_[Stream::LEFT].getNumSubscribers() > 0 &&
|
||||
!is_published_[Stream::LEFT]) {
|
||||
api_->SetStreamCallback(
|
||||
Stream::LEFT, [this](const api::StreamData &data) {
|
||||
ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
|
||||
|
||||
// static double img_time_prev = -1;
|
||||
// NODELET_INFO_STREAM("ros_time_beg: " << FULL_PRECISION <<
|
||||
// ros_time_beg
|
||||
// << ", img_time_elapsed: " << FULL_PRECISION
|
||||
// << ((data.img->timestamp - img_time_beg) * 0.00001f)
|
||||
// << ", img_time_diff: " << FULL_PRECISION
|
||||
// << ((img_time_prev < 0) ? 0
|
||||
// : (data.img->timestamp - img_time_prev) * 0.01f) << " ms");
|
||||
// img_time_prev = data.img->timestamp;
|
||||
// static double img_time_prev = -1;
|
||||
// NODELET_INFO_STREAM("ros_time_beg: " << FULL_PRECISION <<
|
||||
// ros_time_beg
|
||||
// << ", img_time_elapsed: " << FULL_PRECISION
|
||||
// << ((data.img->timestamp - img_time_beg) * 0.00001f)
|
||||
// << ", img_time_diff: " << FULL_PRECISION
|
||||
// << ((img_time_prev < 0) ? 0
|
||||
// : (data.img->timestamp - img_time_prev) * 0.01f) << "
|
||||
// ms");
|
||||
// img_time_prev = data.img->timestamp;
|
||||
|
||||
++left_count_;
|
||||
publishCamera(Stream::LEFT, data, left_count_, stamp);
|
||||
NODELET_DEBUG_STREAM(
|
||||
Stream::LEFT << ", count: " << left_count_
|
||||
<< ", frame_id: " << data.img->frame_id
|
||||
<< ", timestamp: " << data.img->timestamp
|
||||
<< ", exposure_time: " << data.img->exposure_time);
|
||||
});
|
||||
++left_count_;
|
||||
publishCamera(Stream::LEFT, data, left_count_, stamp);
|
||||
NODELET_DEBUG_STREAM(
|
||||
Stream::LEFT << ", count: " << left_count_
|
||||
<< ", frame_id: " << data.img->frame_id
|
||||
<< ", timestamp: " << data.img->timestamp
|
||||
<< ", exposure_time: " << data.img->exposure_time);
|
||||
});
|
||||
is_published_[Stream::LEFT] = true;
|
||||
}
|
||||
|
||||
api_->SetStreamCallback(
|
||||
Stream::RIGHT, [this](const api::StreamData &data) {
|
||||
ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
|
||||
if (camera_publishers_[Stream::RIGHT].getNumSubscribers() > 0 &&
|
||||
!is_published_[Stream::RIGHT]) {
|
||||
api_->SetStreamCallback(
|
||||
Stream::RIGHT, [this](const api::StreamData &data) {
|
||||
ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
|
||||
|
||||
++right_count_;
|
||||
publishCamera(Stream::RIGHT, data, right_count_, stamp);
|
||||
NODELET_DEBUG_STREAM(
|
||||
Stream::RIGHT << ", count: " << right_count_
|
||||
<< ", frame_id: " << data.img->frame_id
|
||||
<< ", timestamp: " << data.img->timestamp
|
||||
<< ", exposure_time: " << data.img->exposure_time);
|
||||
});
|
||||
++right_count_;
|
||||
publishCamera(Stream::RIGHT, data, right_count_, stamp);
|
||||
NODELET_DEBUG_STREAM(
|
||||
Stream::RIGHT
|
||||
<< ", count: " << right_count_ << ", frame_id: "
|
||||
<< data.img->frame_id << ", timestamp: " << data.img->timestamp
|
||||
<< ", exposure_time: " << data.img->exposure_time);
|
||||
});
|
||||
is_published_[Stream::RIGHT] = true;
|
||||
}
|
||||
|
||||
std::vector<Stream> image_streams{
|
||||
std::vector<Stream> other_streams{
|
||||
Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED, Stream::DISPARITY,
|
||||
Stream::DISPARITY_NORMALIZED, Stream::DEPTH};
|
||||
|
||||
for (auto &&stream : image_streams) {
|
||||
for (auto &&stream : other_streams) {
|
||||
if (camera_publishers_[stream].getNumSubscribers() == 0 &&
|
||||
is_published_[stream]) {
|
||||
continue;
|
||||
}
|
||||
api_->SetStreamCallback(
|
||||
stream, [this, stream](const api::StreamData &data) {
|
||||
// data.img is null, not hard timestamp
|
||||
static std::size_t count = 0;
|
||||
++count;
|
||||
publishImage(stream, data, count, ros::Time::now());
|
||||
publishCamera(stream, data, count, ros::Time::now());
|
||||
});
|
||||
is_published_[stream] = true;
|
||||
}
|
||||
|
||||
api_->SetStreamCallback(
|
||||
Stream::POINTS, [this](const api::StreamData &data) {
|
||||
static std::size_t count = 0;
|
||||
++count;
|
||||
publishPoints(data, count, ros::Time::now());
|
||||
});
|
||||
if (camera_publishers_[Stream::POINTS].getNumSubscribers() > 0 &&
|
||||
!is_published_[Stream::POINTS]) {
|
||||
api_->SetStreamCallback(
|
||||
Stream::POINTS, [this](const api::StreamData &data) {
|
||||
static std::size_t count = 0;
|
||||
++count;
|
||||
publishPoints(data, count, ros::Time::now());
|
||||
});
|
||||
is_published_[Stream::POINTS] = true;
|
||||
}
|
||||
|
||||
api_->SetMotionCallback([this](const api::MotionData &data) {
|
||||
ros::Time stamp = hardTimeToSoftTime(data.imu->timestamp);
|
||||
if (!is_motion_published_) {
|
||||
api_->SetMotionCallback([this](const api::MotionData &data) {
|
||||
ros::Time stamp = hardTimeToSoftTime(data.imu->timestamp);
|
||||
|
||||
// static double imu_time_prev = -1;
|
||||
// NODELET_INFO_STREAM("ros_time_beg: " << FULL_PRECISION << ros_time_beg
|
||||
// << ", imu_time_elapsed: " << FULL_PRECISION
|
||||
// << ((data.imu->timestamp - imu_time_beg) * 0.00001f)
|
||||
// << ", imu_time_diff: " << FULL_PRECISION
|
||||
// << ((imu_time_prev < 0) ? 0
|
||||
// : (data.imu->timestamp - imu_time_prev) * 0.01f) << " ms");
|
||||
// imu_time_prev = data.imu->timestamp;
|
||||
// static double imu_time_prev = -1;
|
||||
// NODELET_INFO_STREAM("ros_time_beg: " << FULL_PRECISION <<
|
||||
// ros_time_beg
|
||||
// << ", imu_time_elapsed: " << FULL_PRECISION
|
||||
// << ((data.imu->timestamp - imu_time_beg) * 0.00001f)
|
||||
// << ", imu_time_diff: " << FULL_PRECISION
|
||||
// << ((imu_time_prev < 0) ? 0
|
||||
// : (data.imu->timestamp - imu_time_prev) * 0.01f) << " ms");
|
||||
// imu_time_prev = data.imu->timestamp;
|
||||
|
||||
++imu_count_;
|
||||
publishImu(data, imu_count_, stamp);
|
||||
publishTemp(data.imu->temperature, imu_count_, stamp);
|
||||
NODELET_DEBUG_STREAM(
|
||||
"Imu count: " << imu_count_ << ", frame_id: " << data.imu->frame_id
|
||||
<< ", timestamp: " << data.imu->timestamp
|
||||
<< ", accel_x: " << data.imu->accel[0]
|
||||
<< ", accel_y: " << data.imu->accel[1]
|
||||
<< ", accel_z: " << data.imu->accel[2]
|
||||
<< ", gyro_x: " << data.imu->gyro[0]
|
||||
<< ", gyro_y: " << data.imu->gyro[1]
|
||||
<< ", gyro_z: " << data.imu->gyro[2]
|
||||
<< ", temperature: " << data.imu->temperature);
|
||||
// Sleep 1ms, otherwise publish may drop some datas.
|
||||
ros::Duration(0.001).sleep();
|
||||
});
|
||||
++imu_count_;
|
||||
publishImu(data, imu_count_, stamp);
|
||||
publishTemp(data.imu->temperature, imu_count_, stamp);
|
||||
NODELET_DEBUG_STREAM(
|
||||
"Imu count: " << imu_count_ << ", frame_id: " << data.imu->frame_id
|
||||
<< ", timestamp: " << data.imu->timestamp
|
||||
<< ", accel_x: " << data.imu->accel[0]
|
||||
<< ", accel_y: " << data.imu->accel[1]
|
||||
<< ", accel_z: " << data.imu->accel[2]
|
||||
<< ", gyro_x: " << data.imu->gyro[0]
|
||||
<< ", gyro_y: " << data.imu->gyro[1]
|
||||
<< ", gyro_z: " << data.imu->gyro[2]
|
||||
<< ", temperature: " << data.imu->temperature);
|
||||
// Sleep 1ms, otherwise publish may drop some datas.
|
||||
ros::Duration(0.001).sleep();
|
||||
});
|
||||
is_motion_published_ = true;
|
||||
}
|
||||
|
||||
time_beg_ = ros::Time::now().toSec();
|
||||
api_->Start(Source::ALL);
|
||||
if (!is_started_) {
|
||||
api_->Start(Source::ALL);
|
||||
is_started_ = true;
|
||||
}
|
||||
}
|
||||
|
||||
void publishCamera(
|
||||
const Stream &stream, const api::StreamData &data, std::uint32_t seq,
|
||||
ros::Time stamp) {
|
||||
if (camera_publishers_[stream].getNumSubscribers() == 0)
|
||||
return;
|
||||
// if (camera_publishers_[stream].getNumSubscribers() == 0)
|
||||
// return;
|
||||
std_msgs::Header header;
|
||||
header.seq = seq;
|
||||
header.stamp = stamp;
|
||||
@@ -363,6 +395,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
camera_publishers_[stream].publish(msg, info);
|
||||
}
|
||||
|
||||
/*
|
||||
void publishImage(
|
||||
const Stream &stream, const api::StreamData &data, std::uint32_t seq,
|
||||
ros::Time stamp) {
|
||||
@@ -380,11 +413,12 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
cv_bridge::CvImage(header, image_encodings_[stream], img).toImageMsg();
|
||||
image_publishers_[stream].publish(msg);
|
||||
}
|
||||
*/
|
||||
|
||||
void publishPoints(
|
||||
const api::StreamData &data, std::uint32_t seq, ros::Time stamp) {
|
||||
if (points_publisher_.getNumSubscribers() == 0)
|
||||
return;
|
||||
// if (points_publisher_.getNumSubscribers() == 0)
|
||||
// return;
|
||||
|
||||
auto &&in = api_->GetIntrinsics(Stream::LEFT);
|
||||
|
||||
@@ -534,6 +568,43 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
}
|
||||
|
||||
api_ = API::Create(device);
|
||||
|
||||
computeRectTransforms();
|
||||
}
|
||||
|
||||
void computeRectTransforms() {
|
||||
ROS_ASSERT(api_);
|
||||
auto &&in_left = api_->GetIntrinsics(Stream::LEFT);
|
||||
auto &&in_right = api_->GetIntrinsics(Stream::RIGHT);
|
||||
auto &&ex_right_to_left = api_->GetExtrinsics(Stream::RIGHT, Stream::LEFT);
|
||||
|
||||
cv::Size size{in_left.width, in_left.height};
|
||||
|
||||
cv::Mat M1 =
|
||||
(cv::Mat_<double>(3, 3) << in_left.fx, 0, in_left.cx, 0, in_left.fy,
|
||||
in_left.cy, 0, 0, 1);
|
||||
cv::Mat M2 =
|
||||
(cv::Mat_<double>(3, 3) << in_right.fx, 0, in_right.cx, 0, in_right.fy,
|
||||
in_right.cy, 0, 0, 1);
|
||||
cv::Mat D1(1, 5, CV_64F, in_left.coeffs);
|
||||
cv::Mat D2(1, 5, CV_64F, in_right.coeffs);
|
||||
cv::Mat R =
|
||||
(cv::Mat_<double>(3, 3) << ex_right_to_left.rotation[0][0],
|
||||
ex_right_to_left.rotation[0][1], ex_right_to_left.rotation[0][2],
|
||||
ex_right_to_left.rotation[1][0], ex_right_to_left.rotation[1][1],
|
||||
ex_right_to_left.rotation[1][2], ex_right_to_left.rotation[2][0],
|
||||
ex_right_to_left.rotation[2][1], ex_right_to_left.rotation[2][2]);
|
||||
cv::Mat T(3, 1, CV_64F, ex_right_to_left.translation);
|
||||
|
||||
cv::stereoRectify(
|
||||
M1, D1, M2, D2, size, R, T, left_r_, right_r_, left_p_, right_p_, q_,
|
||||
cv::CALIB_ZERO_DISPARITY, 0, size, &left_roi_, &right_roi_);
|
||||
|
||||
NODELET_DEBUG_STREAM("left_r: " << left_r_);
|
||||
NODELET_DEBUG_STREAM("right_r: " << right_r_);
|
||||
NODELET_DEBUG_STREAM("left_p: " << left_p_);
|
||||
NODELET_DEBUG_STREAM("right_p: " << right_p_);
|
||||
NODELET_DEBUG_STREAM("q: " << q_);
|
||||
}
|
||||
|
||||
sensor_msgs::CameraInfoPtr getCameraInfo(const Stream &stream) {
|
||||
@@ -541,10 +612,16 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
return camera_info_ptrs_[stream];
|
||||
}
|
||||
|
||||
// http://docs.ros.org/kinetic/api/sensor_msgs/html/msg/CameraInfo.html
|
||||
sensor_msgs::CameraInfo *camera_info = new sensor_msgs::CameraInfo();
|
||||
camera_info_ptrs_[stream] = sensor_msgs::CameraInfoPtr(camera_info);
|
||||
|
||||
auto &&in = api_->GetIntrinsics(stream);
|
||||
Intrinsics in;
|
||||
if (stream == Stream::RIGHT || stream == Stream::RIGHT_RECTIFIED) {
|
||||
in = api_->GetIntrinsics(Stream::RIGHT);
|
||||
} else {
|
||||
in = api_->GetIntrinsics(Stream::LEFT);
|
||||
}
|
||||
|
||||
camera_info->header.frame_id = frame_ids_[stream];
|
||||
camera_info->width = in.width;
|
||||
@@ -562,26 +639,14 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
// [fx' 0 cx' Tx]
|
||||
// P = [ 0 fy' cy' Ty]
|
||||
// [ 0 0 1 0]
|
||||
camera_info->P.at(0) = camera_info->K.at(0);
|
||||
camera_info->P.at(1) = 0;
|
||||
camera_info->P.at(2) = camera_info->K.at(2);
|
||||
camera_info->P.at(3) = 0;
|
||||
|
||||
camera_info->P.at(4) = 0;
|
||||
camera_info->P.at(5) = camera_info->K.at(4);
|
||||
camera_info->P.at(6) = camera_info->K.at(5);
|
||||
camera_info->P.at(7) = 0;
|
||||
|
||||
camera_info->P.at(8) = 0;
|
||||
camera_info->P.at(9) = 0;
|
||||
camera_info->P.at(10) = 1;
|
||||
camera_info->P.at(11) = 0;
|
||||
|
||||
if (stream == Stream::RIGHT) {
|
||||
auto &&ex = api_->GetExtrinsics(stream, Stream::LEFT);
|
||||
camera_info->P.at(3) = ex.translation[0];
|
||||
camera_info->P.at(7) = ex.translation[1];
|
||||
camera_info->P.at(11) = ex.translation[2];
|
||||
cv::Mat p = left_p_;
|
||||
if (stream == Stream::RIGHT || stream == Stream::RIGHT_RECTIFIED) {
|
||||
p = right_p_;
|
||||
}
|
||||
for (int i = 0; i < p.rows; i++) {
|
||||
for (int j = 0; j < p.cols; j++) {
|
||||
camera_info->P.at(i * p.cols + j) = p.at<double>(i, j);
|
||||
}
|
||||
}
|
||||
|
||||
camera_info->distortion_model = "plumb_bob";
|
||||
@@ -759,16 +824,14 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
ros::NodeHandle nh_;
|
||||
ros::NodeHandle private_nh_;
|
||||
|
||||
// camera: LEFT, RIGHT
|
||||
// camera:
|
||||
// LEFT, RIGHT, LEFT_RECTIFIED, RIGHT_RECTIFIED,
|
||||
// DISPARITY, DISPARITY_NORMALIZED,
|
||||
// DEPTH
|
||||
std::map<Stream, image_transport::CameraPublisher> camera_publishers_;
|
||||
std::map<Stream, sensor_msgs::CameraInfoPtr> camera_info_ptrs_;
|
||||
std::map<Stream, std::string> camera_encodings_;
|
||||
|
||||
// image: LEFT_RECTIFIED, RIGHT_RECTIFIED, DISPARITY, DISPARITY_NORMALIZED,
|
||||
// DEPTH
|
||||
std::map<Stream, image_transport::Publisher> image_publishers_;
|
||||
std::map<Stream, std::string> image_encodings_;
|
||||
|
||||
// pointcloud: POINTS
|
||||
ros::Publisher points_publisher_;
|
||||
|
||||
@@ -792,10 +855,18 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
|
||||
std::shared_ptr<API> api_;
|
||||
|
||||
// rectification transforms
|
||||
cv::Mat left_r_, right_r_, left_p_, right_p_, q_;
|
||||
cv::Rect left_roi_, right_roi_;
|
||||
|
||||
double time_beg_ = -1;
|
||||
std::size_t left_count_ = 0;
|
||||
std::size_t right_count_ = 0;
|
||||
std::size_t imu_count_ = 0;
|
||||
|
||||
std::map<Stream, bool> is_published_;
|
||||
bool is_motion_published_;
|
||||
bool is_started_;
|
||||
};
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
Reference in New Issue
Block a user