Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fde0b17c0b | ||
|
|
9455ed8a0f | ||
|
|
50509d539e | ||
|
|
dd839a3cfd | ||
|
|
593abc1b3d | ||
|
|
47471ed2ea | ||
|
|
baa7243792 | ||
|
|
615fe74965 | ||
|
|
29a9895969 | ||
|
|
140b4aec18 | ||
|
|
559b1e4d80 | ||
|
|
c6525cca31 | ||
|
|
5bcf9ecf1f | ||
|
|
35d91c25b9 | ||
|
|
425af10e1f |
@@ -14,7 +14,7 @@
|
||||
|
||||
cmake_minimum_required(VERSION 3.0)
|
||||
|
||||
project(mynteye VERSION 2.4.0 LANGUAGES C CXX)
|
||||
project(mynteye VERSION 2.4.1 LANGUAGES C CXX)
|
||||
|
||||
include(cmake/Common.cmake)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# MYNT® EYE S SDK
|
||||
|
||||
[](https://github.com/slightech/MYNT-EYE-S-SDK)
|
||||
[](https://github.com/slightech/MYNT-EYE-S-SDK)
|
||||
|
||||
## Overview
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE S SDK"
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = 2.4.0
|
||||
PROJECT_NUMBER = 2.4.1
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
|
||||
@@ -24,7 +24,7 @@ copyright = '2019, MYNTAI'
|
||||
author = 'MYNTAI'
|
||||
|
||||
# The short X.Y version
|
||||
version = '2.4.0'
|
||||
version = '2.4.1'
|
||||
# The full version, including alpha/beta/rc tags
|
||||
release = version
|
||||
|
||||
|
||||
@@ -3,6 +3,11 @@
|
||||
Change Log
|
||||
==========
|
||||
|
||||
2019-08-17(v2.4.1)
|
||||
-------------------
|
||||
|
||||
1. Optimize disparity calculation
|
||||
|
||||
2019-08-09(v2.4.0)
|
||||
-------------------
|
||||
|
||||
|
||||
@@ -225,8 +225,8 @@ enum class Option : std::uint8_t {
|
||||
/**
|
||||
* HDR mode
|
||||
* <p>
|
||||
* 0: 10-bit<br>
|
||||
* 1: 12-bit
|
||||
* 0: normal<br>
|
||||
* 1: WDR
|
||||
* </p>
|
||||
*/
|
||||
HDR_MODE,
|
||||
@@ -775,6 +775,7 @@ struct CameraROSMsgInfoPair {
|
||||
struct CameraROSMsgInfo left;
|
||||
struct CameraROSMsgInfo right;
|
||||
double T_mul_f = -1.f;
|
||||
double cx1_minus_cx2 = 0.f;
|
||||
double R[9] = {0};
|
||||
double P[12] = {0};
|
||||
};
|
||||
|
||||
@@ -27,8 +27,14 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||
const char DisparityProcessor::NAME[] = "DisparityProcessor";
|
||||
|
||||
DisparityProcessor::DisparityProcessor(DisparityComputingMethod type,
|
||||
std::shared_ptr<struct CameraROSMsgInfoPair> calib_infos,
|
||||
std::int32_t proc_period)
|
||||
: Processor(std::move(proc_period)), type_(type) {
|
||||
if (calib_infos) {
|
||||
cx1_minus_cx2_ = calib_infos->cx1_minus_cx2;
|
||||
} else {
|
||||
cx1_minus_cx2_ = 1.f;
|
||||
}
|
||||
VLOG(2) << __func__ << ": proc_period=" << proc_period;
|
||||
int sgbmWinSize = 3;
|
||||
int numberOfDisparities = 64;
|
||||
@@ -81,7 +87,7 @@ DisparityProcessor::DisparityProcessor(DisparityComputingMethod type,
|
||||
bm_matcher->setPreFilterCap(31);
|
||||
bm_matcher->setBlockSize(15);
|
||||
bm_matcher->setMinDisparity(0);
|
||||
bm_matcher->setNumDisparities(64);
|
||||
bm_matcher->setNumDisparities(128);
|
||||
bm_matcher->setUniquenessRatio(60);
|
||||
bm_matcher->setTextureThreshold(10);
|
||||
bm_matcher->setSpeckleWindowSize(100);
|
||||
@@ -156,6 +162,7 @@ bool DisparityProcessor::OnProcess(
|
||||
} else if (type_ == DisparityComputingMethod::BM) {
|
||||
// LOG(ERROR) << "not supported in opencv 2.x";
|
||||
(*sgbm_matcher)(input->first, input->second, disparity);
|
||||
disparity.convertTo(output->value, CV_32F, 1./16, 1);
|
||||
// cv::Mat tmp1, tmp2;
|
||||
// cv::cvtColor(input->first, tmp1, CV_RGB2GRAY);
|
||||
// cv::cvtColor(input->second, tmp2, CV_RGB2GRAY);
|
||||
@@ -188,7 +195,7 @@ bool DisparityProcessor::OnProcess(
|
||||
sgbm_matcher->compute(input->first, input->second, disparity);
|
||||
}
|
||||
#endif
|
||||
disparity.convertTo(output->value, CV_32F, 1./16, 1);
|
||||
disparity.convertTo(output->value, CV_32F, 1./16, cx1_minus_cx2_);
|
||||
output->id = input->first_id;
|
||||
output->data = input->first_data;
|
||||
return true;
|
||||
|
||||
@@ -32,6 +32,7 @@ class DisparityProcessor : public Processor {
|
||||
static const char NAME[];
|
||||
|
||||
explicit DisparityProcessor(DisparityComputingMethod type,
|
||||
std::shared_ptr<struct CameraROSMsgInfoPair> calib_infos,
|
||||
std::int32_t proc_period = 0);
|
||||
virtual ~DisparityProcessor();
|
||||
|
||||
@@ -54,6 +55,7 @@ class DisparityProcessor : public Processor {
|
||||
cv::Ptr<cv::StereoSGBM> sgbm_matcher;
|
||||
cv::Ptr<cv::StereoBM> bm_matcher;
|
||||
DisparityComputingMethod type_;
|
||||
double cx1_minus_cx2_;
|
||||
};
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
@@ -42,6 +42,7 @@ void RectifyProcessor::stereoRectify(models::CameraPtr leftOdo,
|
||||
const CvMat* D1, const CvMat* D2, CvSize imageSize,
|
||||
const CvMat* matR, const CvMat* matT,
|
||||
CvMat* _R1, CvMat* _R2, CvMat* _P1, CvMat* _P2, double* T_mul_f,
|
||||
double *cx1_min_cx2,
|
||||
int flags, double alpha, CvSize newImgSize) {
|
||||
// std::cout << _alpha << std::endl;
|
||||
alpha = _alpha;
|
||||
@@ -218,6 +219,8 @@ void RectifyProcessor::stereoRectify(models::CameraPtr leftOdo,
|
||||
cvmSet(_P2, 0, 2, cx2);
|
||||
cvmSet(_P2, 1, 2, cy2);
|
||||
cvmSet(_P2, idx, 3, s*cvmGet(_P2, idx, 3));
|
||||
|
||||
*cx1_min_cx2 = -(cx1 - cx2);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -295,12 +298,14 @@ std::shared_ptr<struct CameraROSMsgInfoPair> RectifyProcessor::stereoRectify(
|
||||
cv::Mat P1 = cv::Mat(3, 4, CV_64F);
|
||||
cv::Mat P2 = cv::Mat(3, 4, CV_64F);
|
||||
|
||||
CvMat c_R = cv_R, c_t = cv_t;
|
||||
CvMat c_K1 = K1, c_K2 = K2, c_D1 = D1, c_D2 = D2;
|
||||
CvMat c_R1 = R1, c_R2 = R2, c_P1 = P1, c_P2 = P2;
|
||||
CvMat c_R = cvMat(cv_R), c_t = cvMat(cv_t);
|
||||
CvMat c_K1 = cvMat(K1), c_K2 = cvMat(K2), c_D1 =cvMat(D1), c_D2 = cvMat(D2);
|
||||
CvMat c_R1 = cvMat(R1), c_R2 = cvMat(R2), c_P1 = cvMat(P1), c_P2 = cvMat(P2);
|
||||
double T_mul_f;
|
||||
double cx1_min_cx2;
|
||||
stereoRectify(leftOdo, rightOdo, &c_K1, &c_K2, &c_D1, &c_D2,
|
||||
image_size1, &c_R, &c_t, &c_R1, &c_R2, &c_P1, &c_P2, &T_mul_f);
|
||||
cvSize(image_size1.width, image_size1.height), &c_R, &c_t, &c_R1, &c_R2, &c_P1, &c_P2, &T_mul_f,
|
||||
&cx1_min_cx2);
|
||||
|
||||
VLOG(2) << "K1: " << K1 << std::endl;
|
||||
VLOG(2) << "D1: " << D1 << std::endl;
|
||||
@@ -334,6 +339,7 @@ std::shared_ptr<struct CameraROSMsgInfoPair> RectifyProcessor::stereoRectify(
|
||||
info_pair.left = calib_mat_data_left;
|
||||
info_pair.right = calib_mat_data_right;
|
||||
info_pair.T_mul_f = T_mul_f;
|
||||
info_pair.cx1_minus_cx2 = cx1_min_cx2;
|
||||
for (std::size_t i = 0; i< 3 * 4; i++) {
|
||||
info_pair.P[i] = calib_mat_data_left.P[i];
|
||||
}
|
||||
|
||||
@@ -87,8 +87,9 @@ class RectifyProcessor : public Processor {
|
||||
const CvMat* D1, const CvMat* D2, CvSize imageSize,
|
||||
const CvMat* matR, const CvMat* matT,
|
||||
CvMat* _R1, CvMat* _R2, CvMat* _P1, CvMat* _P2, double* T_mul_f,
|
||||
double *cx1_min_cx2,
|
||||
int flags = cv::CALIB_ZERO_DISPARITY, double alpha = -1,
|
||||
CvSize newImgSize = cv::Size());
|
||||
CvSize newImgSize = cvSize(0,0));
|
||||
|
||||
Eigen::Matrix4d loadT(const mynteye::Extrinsics& in);
|
||||
void loadCameraMatrix(cv::Mat& K, cv::Mat& D, // NOLINT
|
||||
|
||||
@@ -58,7 +58,7 @@ void Synthetic::InitCalibInfo() {
|
||||
intr_left_ = api_->GetIntrinsicsBase(Stream::LEFT);
|
||||
intr_right_ = api_->GetIntrinsicsBase(Stream::RIGHT);
|
||||
extr_ = std::make_shared<Extrinsics>(
|
||||
api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT));
|
||||
api_->GetExtrinsics(Stream::RIGHT, Stream::LEFT));
|
||||
}
|
||||
|
||||
Synthetic::Synthetic(API *api, CalibrationModel calib_model)
|
||||
@@ -91,7 +91,7 @@ void Synthetic::NotifyImageParamsChanged() {
|
||||
intr_left_ = api_->GetIntrinsicsBase(Stream::LEFT);
|
||||
intr_right_ = api_->GetIntrinsicsBase(Stream::RIGHT);
|
||||
extr_ = std::make_shared<Extrinsics>(
|
||||
api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT));
|
||||
api_->GetExtrinsics(Stream::RIGHT, Stream::LEFT));
|
||||
}
|
||||
auto processor = getProcessorWithStream(Stream::LEFT_RECTIFIED);
|
||||
|
||||
@@ -313,10 +313,8 @@ void Synthetic::InitProcessors() {
|
||||
std::shared_ptr<Processor> rectify_processor = nullptr;
|
||||
std::shared_ptr<Processor> points_processor = nullptr;
|
||||
std::shared_ptr<Processor> depth_processor = nullptr;
|
||||
std::shared_ptr<Processor> disparity_processor = nullptr;
|
||||
|
||||
auto &&disparity_processor =
|
||||
std::make_shared<DisparityProcessor>(DisparityComputingMethod::BM,
|
||||
DISPARITY_PROC_PERIOD);
|
||||
auto &&disparitynormalized_processor =
|
||||
std::make_shared<DisparityNormalizedProcessor>(
|
||||
DISPARITY_NORM_PROC_PERIOD);
|
||||
@@ -333,6 +331,10 @@ void Synthetic::InitProcessors() {
|
||||
points_processor = std::make_shared<PointsProcessorOCV>(
|
||||
rectify_processor_ocv->Q, POINTS_PROC_PERIOD);
|
||||
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
|
||||
disparity_processor =
|
||||
std::make_shared<DisparityProcessor>(DisparityComputingMethod::BM,
|
||||
nullptr,
|
||||
DISPARITY_PROC_PERIOD);
|
||||
|
||||
root_processor->AddChild(rectify_processor);
|
||||
rectify_processor->AddChild(disparity_processor);
|
||||
@@ -352,6 +354,10 @@ void Synthetic::InitProcessors() {
|
||||
depth_processor = std::make_shared<DepthProcessor>(
|
||||
rectify_processor_imp -> getCameraROSMsgInfoPair(),
|
||||
DEPTH_PROC_PERIOD);
|
||||
disparity_processor =
|
||||
std::make_shared<DisparityProcessor>(DisparityComputingMethod::BM,
|
||||
rectify_processor_imp -> getCameraROSMsgInfoPair(),
|
||||
DISPARITY_PROC_PERIOD);
|
||||
|
||||
root_processor->AddChild(rectify_processor);
|
||||
rectify_processor->AddChild(disparity_processor);
|
||||
|
||||
@@ -70,8 +70,8 @@ const std::map<Model, std::map<Capabilities, StreamRequests>>
|
||||
stream_requests_map = {
|
||||
{Model::STANDARD,
|
||||
{{Capabilities::STEREO, {
|
||||
{752, 480, Format::YUYV, 0},
|
||||
{376, 240, Format::YUYV, 0}}
|
||||
{752, 480, Format::YUYV, 60},
|
||||
{376, 240, Format::YUYV, 60}}
|
||||
}}
|
||||
},
|
||||
{Model::STANDARD2,
|
||||
|
||||
@@ -660,7 +660,7 @@ void Device::ReadAllInfos() {
|
||||
VLOG(2) << "Intrinsics left: {" << *GetIntrinsics(Stream::LEFT) << "}";
|
||||
VLOG(2) << "Intrinsics right: {" << *GetIntrinsics(Stream::RIGHT) << "}";
|
||||
VLOG(2) << "Extrinsics left to right: {"
|
||||
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
|
||||
<< GetExtrinsics(Stream::RIGHT, Stream::LEFT) << "}";
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -709,11 +709,11 @@ void Device::UpdateStreamIntrinsics(
|
||||
if (ok) {
|
||||
SetIntrinsics(Stream::LEFT, img_params.in_left);
|
||||
SetIntrinsics(Stream::RIGHT, img_params.in_right);
|
||||
SetExtrinsics(Stream::LEFT, Stream::RIGHT, img_params.ex_right_to_left);
|
||||
SetExtrinsics(Stream::RIGHT, Stream::LEFT, img_params.ex_right_to_left);
|
||||
VLOG(2) << "Intrinsics left: {" << *GetIntrinsics(Stream::LEFT) << "}";
|
||||
VLOG(2) << "Intrinsics right: {" << *GetIntrinsics(Stream::RIGHT) << "}";
|
||||
VLOG(2) << "Extrinsics left to right: {"
|
||||
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
|
||||
<< GetExtrinsics(Stream::RIGHT, Stream::LEFT) << "}";
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -69,8 +69,8 @@ enum class Option : int {
|
||||
IR_CONTROL,
|
||||
/**
|
||||
* HDR mode
|
||||
* 0: 10-bit
|
||||
* 1: 12-bit
|
||||
* 0: normal
|
||||
* 1: WDR
|
||||
*/
|
||||
HDR_MODE,
|
||||
/**
|
||||
|
||||
@@ -69,8 +69,8 @@ public enum Option {
|
||||
IR_CONTROL,
|
||||
/**
|
||||
* HDR mode
|
||||
* 0: 10-bit
|
||||
* 1: 12-bit
|
||||
* 0: normal
|
||||
* 1: WDR
|
||||
*/
|
||||
HDR_MODE,
|
||||
/**
|
||||
|
||||
@@ -161,8 +161,8 @@ option = enum {
|
||||
# range: [0,160], default: 0
|
||||
ir_control;
|
||||
# HDR mode
|
||||
# 0: 10-bit
|
||||
# 1: 12-bit
|
||||
# 0: normal
|
||||
# 1: WDR
|
||||
hdr_mode;
|
||||
|
||||
# The range of accelerometer
|
||||
|
||||
@@ -38,7 +38,7 @@ standard/desired_brightness: -1
|
||||
standard/ir_control: 80
|
||||
# standard/ir_control: 0
|
||||
|
||||
# standard/hdr_mode, 0: 10-bit, 1: 12-bit
|
||||
# standard/hdr_mode, 0: normal, 1: WDR
|
||||
standard/hdr_mode: -1
|
||||
# standard/hdr_mode: 0
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ standard2/exposure_mode: -1
|
||||
standard2/max_gain: -1
|
||||
# standard2/max_gain: 8
|
||||
|
||||
# standard2/max_exposure_time range: [0,1000]
|
||||
# standard2/max_exposure_time range: [0,655]
|
||||
standard2/max_exposure_time: -1
|
||||
# standard2/max_exposure_time: 333
|
||||
|
||||
@@ -79,7 +79,7 @@ standard210a/exposure_mode: -1
|
||||
standard210a/max_gain: -1
|
||||
# standard210a/max_gain: 8
|
||||
|
||||
# standard210a/max_exposure_time range: [0,1000]
|
||||
# standard210a/max_exposure_time range: [0,655]
|
||||
standard210a/max_exposure_time: -1
|
||||
# standard210a/max_exposure_time: 333
|
||||
|
||||
@@ -125,7 +125,7 @@ standard200b/exposure_mode: -1
|
||||
standard200b/max_gain: -1
|
||||
# standard200b/max_gain: 8
|
||||
|
||||
# standard200b/max_exposure_time range: [0,1000]
|
||||
# standard200b/max_exposure_time range: [0,655]
|
||||
standard200b/max_exposure_time: -1
|
||||
# standard200b/max_exposure_time: 333
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ standard/desired_brightness: -1
|
||||
standard/ir_control: 80
|
||||
# standard/ir_control: 0
|
||||
|
||||
# standard/hdr_mode, 0: 10-bit, 1: 12-bit
|
||||
# standard/hdr_mode, 0: normal, 1: WDR
|
||||
standard/hdr_mode: -1
|
||||
# standard/hdr_mode: 0
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ standard/desired_brightness: -1
|
||||
standard/ir_control: 80
|
||||
# standard/ir_control: 0
|
||||
|
||||
# standard/hdr_mode, 0: 10-bit, 1: 12-bit
|
||||
# standard/hdr_mode, 0: normal, 1: WDR
|
||||
standard/hdr_mode: -1
|
||||
# standard/hdr_mode: 0
|
||||
|
||||
|
||||
@@ -123,26 +123,6 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
return ros::Time(soft_time_begin + time_sec_double);
|
||||
}
|
||||
|
||||
ros::Time imuHardTimeToSoftTime(std::uint64_t _hard_time) {
|
||||
static bool isInited = false;
|
||||
static double soft_time_begin(0);
|
||||
static std::uint64_t hard_time_begin(0);
|
||||
|
||||
if (false == isInited) {
|
||||
soft_time_begin = ros::Time::now().toSec();
|
||||
hard_time_begin = _hard_time;
|
||||
isInited = true;
|
||||
}
|
||||
|
||||
std::uint64_t time_ns_detal = (_hard_time - hard_time_begin);
|
||||
std::uint64_t time_ns_detal_s = time_ns_detal / 1000000;
|
||||
std::uint64_t time_ns_detal_ns = time_ns_detal % 1000000;
|
||||
double time_sec_double =
|
||||
ros::Time(time_ns_detal_s, time_ns_detal_ns * 1000).toSec();
|
||||
|
||||
return ros::Time(soft_time_begin + time_sec_double);
|
||||
}
|
||||
|
||||
// ros::Time hardTimeToSoftTime(std::uint64_t _hard_time) {
|
||||
// static bool isInited = false;
|
||||
// static double soft_time_begin(0);
|
||||
@@ -200,7 +180,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
|
||||
hard_time_now = _hard_time;
|
||||
|
||||
return imuHardTimeToSoftTime(
|
||||
return hardTimeToSoftTime(
|
||||
acc * unit_hard_time + _hard_time);
|
||||
}
|
||||
|
||||
@@ -300,12 +280,12 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
{Option::MAX_EXPOSURE_TIME, "standard2/max_exposure_time"},
|
||||
{Option::DESIRED_BRIGHTNESS, "standard2/desired_brightness"},
|
||||
{Option::MIN_EXPOSURE_TIME, "standard2/min_exposure_time"},
|
||||
{Option::IR_CONTROL, "STANDARD/ir_control"},
|
||||
{Option::ACCELEROMETER_RANGE, "standard2/accel_range"},
|
||||
{Option::GYROSCOPE_RANGE, "standard2/gyro_range"},
|
||||
{Option::ACCELEROMETER_LOW_PASS_FILTER, "standard2/accel_low_filter"},
|
||||
{Option::GYROSCOPE_LOW_PASS_FILTER, "standard2/gyro_low_filter"}};
|
||||
}
|
||||
|
||||
// device options of standard
|
||||
if (model_ == Model::STANDARD) {
|
||||
option_names_ = {
|
||||
@@ -324,20 +304,6 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
{Option::GYROSCOPE_RANGE, "standard/gyro_range"}};
|
||||
}
|
||||
|
||||
// device options of standard200b
|
||||
if (model_ == Model::STANDARD200B) {
|
||||
option_names_ = {
|
||||
{Option::BRIGHTNESS, "standard200b/brightness"},
|
||||
{Option::EXPOSURE_MODE, "standard200b/exposure_mode"},
|
||||
{Option::MAX_GAIN, "standard200b/max_gain"},
|
||||
{Option::MAX_EXPOSURE_TIME, "standard200b/max_exposure_time"},
|
||||
{Option::DESIRED_BRIGHTNESS, "standard200b/desired_brightness"},
|
||||
{Option::MIN_EXPOSURE_TIME, "standard200b/min_exposure_time"},
|
||||
{Option::ACCELEROMETER_RANGE, "standard200b/accel_range"},
|
||||
{Option::GYROSCOPE_RANGE, "standard200b/gyro_range"},
|
||||
{Option::ACCELEROMETER_LOW_PASS_FILTER, "standard200b/accel_low_filter"},
|
||||
{Option::GYROSCOPE_LOW_PASS_FILTER, "standard200b/gyro_low_filter"}};
|
||||
}
|
||||
for (auto &&it = option_names_.begin(); it != option_names_.end(); ++it) {
|
||||
if (!api_->Supports(it->first))
|
||||
continue;
|
||||
@@ -364,9 +330,8 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
NODELET_INFO_STREAM("Advertized on topic " << topic);
|
||||
}
|
||||
|
||||
// Only STANDARD2/STANDARD210A/STANDARD200B need publish mono_topics
|
||||
if (model_ == Model::STANDARD2 ||
|
||||
model_ == Model::STANDARD210A || model_ == Model::STANDARD200B) {
|
||||
// Only STANDARD2/STANDARD210A need publish mono_topics
|
||||
if (model_ == Model::STANDARD2 || model_ == Model::STANDARD210A) {
|
||||
for (auto &&it = mono_topics.begin(); it != mono_topics.end(); ++it) {
|
||||
auto &&topic = mono_topics[it->first];
|
||||
if (it->first == Stream::LEFT ||
|
||||
@@ -381,8 +346,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
|
||||
int depth_type = 0;
|
||||
private_nh_.getParamCached("depth_type", depth_type);
|
||||
if (model_ == Model::STANDARD2 ||
|
||||
model_ == Model::STANDARD210A || model_ == Model::STANDARD200B) {
|
||||
if (model_ == Model::STANDARD2 || model_ == Model::STANDARD210A) {
|
||||
camera_encodings_ = {{Stream::LEFT, enc::BGR8},
|
||||
{Stream::RIGHT, enc::BGR8},
|
||||
{Stream::LEFT_RECTIFIED, enc::BGR8},
|
||||
@@ -833,7 +797,6 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
pthread_mutex_unlock(&mutex_data_);
|
||||
auto &&info = getCameraInfo(stream);
|
||||
info->header.stamp = msg->header.stamp;
|
||||
info->header.frame_id = frame_ids_[stream];
|
||||
camera_publishers_[stream].publish(msg, info);
|
||||
}
|
||||
|
||||
@@ -880,7 +843,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
// return;
|
||||
|
||||
auto &&in = api_->GetIntrinsicsBase(Stream::LEFT);
|
||||
in -> ResizeIntrinsics();
|
||||
|
||||
sensor_msgs::PointCloud2 msg;
|
||||
msg.header.seq = seq;
|
||||
msg.header.stamp = stamp;
|
||||
@@ -1112,8 +1075,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
int request_index = 0;
|
||||
|
||||
model_ = api_->GetModel();
|
||||
if (model_ == Model::STANDARD2 ||
|
||||
model_ == Model::STANDARD210A || model_ == Model::STANDARD200B) {
|
||||
if (model_ == Model::STANDARD2 || model_ == Model::STANDARD210A) {
|
||||
private_nh_.getParamCached("standard2/request_index", request_index);
|
||||
switch (request_index) {
|
||||
case 0:
|
||||
@@ -1140,7 +1102,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
|
||||
std::int32_t process_mode = 0;
|
||||
if (model_ == Model::STANDARD2 ||
|
||||
model_ == Model::STANDARD210A || model_ == Model::STANDARD200B) {
|
||||
model_ == Model::STANDARD210A) {
|
||||
private_nh_.getParamCached("standard2/imu_process_mode", process_mode);
|
||||
api_->EnableProcessMode(process_mode);
|
||||
}
|
||||
@@ -1218,15 +1180,61 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
std::shared_ptr<IntrinsicsBase> getDefaultIntrinsics() {
|
||||
auto res = std::make_shared<IntrinsicsPinhole>();
|
||||
res->width = 640;
|
||||
res->height = 400;
|
||||
res->model = 0;
|
||||
res->fx = 3.6220059643202876e+02;
|
||||
res->fy = 3.6350065250745848e+02;
|
||||
res->cx = 4.0658699068023441e+02;
|
||||
res->cy = 2.3435161110061483e+02;
|
||||
double codffs[5] = {
|
||||
-2.5034765682756088e-01,
|
||||
5.0579399202897619e-02,
|
||||
-7.0536676161976066e-04,
|
||||
-8.5255451307033846e-03,
|
||||
0.
|
||||
};
|
||||
for (unsigned int i = 0; i < 5; i++) {
|
||||
res->coeffs[i] = codffs[i];
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
std::shared_ptr<Extrinsics> getDefaultExtrinsics() {
|
||||
auto res = std::make_shared<Extrinsics>();
|
||||
double rotation[9] = {
|
||||
9.9867908939669447e-01, -6.3445566137485428e-03, 5.0988459509619687e-02,
|
||||
5.9890316389333252e-03, 9.9995670037792639e-01, 7.1224201868366971e-03,
|
||||
-5.1031440326695092e-02, -6.8076406092671274e-03, 9.9867384471984544e-01
|
||||
};
|
||||
double translation[3] = {-1.2002489764113250e+02, -1.1782637409050747e+00,
|
||||
-5.2058205159996538e+00};
|
||||
for (unsigned int i = 0; i < 3; i++) {
|
||||
for (unsigned int j = 0; j < 3; j++) {
|
||||
res->rotation[i][j] = rotation[i*3 + j];
|
||||
}
|
||||
}
|
||||
for (unsigned int i = 0; i < 3; i++) {
|
||||
res->translation[i] = translation[i];
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
void publishMesh() {
|
||||
mesh_msg_.header.frame_id = base_frame_id_;
|
||||
mesh_msg_.header.stamp = ros::Time::now();
|
||||
mesh_msg_.type = visualization_msgs::Marker::MESH_RESOURCE;
|
||||
geometry_msgs::Quaternion q;
|
||||
double Pie = 3.1416;
|
||||
q = tf::createQuaternionMsgFromRollPitchYaw(Pie/2, 0.0, Pie/2);
|
||||
|
||||
// fill orientation
|
||||
mesh_msg_.pose.orientation.x = 1;
|
||||
mesh_msg_.pose.orientation.y = 1;
|
||||
mesh_msg_.pose.orientation.z = 1;
|
||||
mesh_msg_.pose.orientation.w = 1;
|
||||
mesh_msg_.pose.orientation.x = q.x;
|
||||
mesh_msg_.pose.orientation.y = q.y;
|
||||
mesh_msg_.pose.orientation.z = q.z;
|
||||
mesh_msg_.pose.orientation.w = q.w;
|
||||
|
||||
// fill position
|
||||
mesh_msg_.pose.position.x = 0;
|
||||
@@ -1239,7 +1247,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
mesh_msg_.scale.z = 0.003;
|
||||
|
||||
mesh_msg_.action = visualization_msgs::Marker::ADD;
|
||||
mesh_msg_.color.a = 1.0; // Don't forget to set the alpha!
|
||||
mesh_msg_.color.a = 0.5; // Don't forget to set the alpha!
|
||||
mesh_msg_.color.r = 1.0;
|
||||
mesh_msg_.color.g = 1.0;
|
||||
mesh_msg_.color.b = 1.0;
|
||||
@@ -1255,15 +1263,24 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
ROS_ASSERT(api_);
|
||||
auto in_left_base = api_->GetIntrinsicsBase(Stream::LEFT);
|
||||
auto in_right_base = api_->GetIntrinsicsBase(Stream::RIGHT);
|
||||
is_intrinsics_enable_ = in_left_base && in_right_base;
|
||||
if (is_intrinsics_enable_) {
|
||||
if (in_left_base->calib_model() != CalibrationModel::PINHOLE ||
|
||||
in_right_base->calib_model() != CalibrationModel::PINHOLE) {
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
in_left_base = getDefaultIntrinsics();
|
||||
in_right_base = getDefaultIntrinsics();
|
||||
}
|
||||
|
||||
auto in_left = *std::dynamic_pointer_cast<IntrinsicsPinhole>(in_left_base);
|
||||
auto in_right = *std::dynamic_pointer_cast<IntrinsicsPinhole>(
|
||||
in_right_base);
|
||||
auto ex_right_to_left = api_->GetExtrinsics(Stream::RIGHT, Stream::LEFT);
|
||||
if (!is_intrinsics_enable_) {
|
||||
ex_right_to_left = *(getDefaultExtrinsics());
|
||||
}
|
||||
|
||||
cv::Size size{in_left.width, in_left.height};
|
||||
cv::Mat M1 =
|
||||
@@ -1491,9 +1508,9 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||
l2i_msg.header.stamp = tf_stamp;
|
||||
l2i_msg.header.frame_id = frame_ids_[Stream::LEFT];
|
||||
l2i_msg.child_frame_id = imu_frame_id_;
|
||||
l2i_msg.transform.translation.x = 0;
|
||||
l2i_msg.transform.translation.y = 0;
|
||||
l2i_msg.transform.translation.z = 0;
|
||||
l2i_msg.transform.translation.x = l2i_ex.translation[0];
|
||||
l2i_msg.transform.translation.y = l2i_ex.translation[1];
|
||||
l2i_msg.transform.translation.z = l2i_ex.translation[2];
|
||||
if (l2i_ex.rotation[0][0] == 0 && l2i_ex.rotation[2][2] == 0) {
|
||||
l2i_msg.transform.rotation.x = 0;
|
||||
l2i_msg.transform.rotation.y = 0;
|
||||
|
||||
Reference in New Issue
Block a user