Merge branch 'develop' into feature/android

* develop: (28 commits)
  feat(*): add function to querry some hardware info
  feat: forbid 2100/210A uless update sdk to 2.3.1 and above
  refactor(synthetic): remove usless logic
  chore(readme): update readme
  docs(doxyfile): update version
  fix: change cmake version to 2.3.2
  feat(api) sdk/firmware version check
  feat(api): version check
  fix(wrapper): fix camera info repeat bug
  build(makefile): ensure uninstall before install
  fix(correspondence): also wait stream matched ready
  fix(record): shield diable logic temporarily
  chore(readme): chore(readme): update readme
  chore(readme): update readme
  chore(doc): update version
  fix(samples): delete useless comment
  fix(ros): fix camera info bug
  fix(correspondence): improve warning if not start motion tracking
  fix(correspondence): fix include header
  fix(ros): record close bug
  ...
This commit is contained in:
John Zhao
2019-02-28 15:58:55 +08:00
36 changed files with 1169 additions and 304 deletions

View File

@@ -104,7 +104,7 @@ struct MYNTEYE_API MotionData {
ImuData imu;
bool operator==(const MotionData &other) const {
return imu.timestamp == other.imu.timestamp;
return imu.timestamp == other.imu.timestamp;
}
};
@@ -247,6 +247,8 @@ BOOST_PYTHON_MODULE(mynteye_py) {
.value("LENS_TYPE", Info::LENS_TYPE)
.value("IMU_TYPE", Info::IMU_TYPE)
.value("NOMINAL_BASELINE", Info::NOMINAL_BASELINE)
.value("AUXILIARY_CHIP_VERSION", Info::AUXILIARY_CHIP_VERSION)
.value("ISP_VERSION", Info::ISP_VERSION)
#ifdef ENUM_EXPORT_VALUES
.export_values()
#endif

View File

@@ -153,7 +153,7 @@
<!-- <arg name="standard2/ir_control" default="0" /> -->
<!-- standard2/accel_range range: [6,48] -->
<arg name="standard2/accel_range" default="-1" />
<arg name="standard2/accel_range" default="24" />
<!-- <arg name="standard2/accel_range" default="6" /> -->
<!-- standard2/gyro_range range: [250,4000] -->
@@ -347,6 +347,10 @@
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="$(arg depth_topic)">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
</group> <!-- mynteye -->
</launch>

View File

@@ -54,6 +54,8 @@ def main():
'LENS_TYPE': GetInfoRequest.LENS_TYPE,
'IMU_TYPE': GetInfoRequest.IMU_TYPE,
'NOMINAL_BASELINE': GetInfoRequest.NOMINAL_BASELINE,
'AUXILIARY_CHIP_VERSION': GetInfoRequest.AUXILIARY_CHIP_VERSION,
'ISP_VERSION': GetInfoRequest.ISP_VERSION,
}
for k, v in get_device_info(**keys).items():
print('{}: {}'.format(k, v))

View File

@@ -315,7 +315,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
for (auto &&it = mono_topics.begin(); it != mono_topics.end(); ++it) {
auto &&topic = mono_topics[it->first];
if (it->first == Stream::LEFT || it->first == Stream::RIGHT) {
mono_publishers_[it->first] = it_mynteye.advertiseCamera(topic, 1);
mono_publishers_[it->first] = it_mynteye.advertise(topic, 1);
}
NODELET_INFO_STREAM("Advertized on topic " << topic);
}
@@ -407,6 +407,12 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
case Request::NOMINAL_BASELINE:
res.value = api_->GetInfo(Info::NOMINAL_BASELINE);
break;
case Request::AUXILIARY_CHIP_VERSION:
res.value = api_->GetInfo(Info::AUXILIARY_CHIP_VERSION);
break;
case Request::ISP_VERSION:
res.value = api_->GetInfo(Info::ISP_VERSION);
break;
case Request::IMG_INTRINSICS:
{
auto intri_left = api_->GetIntrinsicsBase(Stream::LEFT);
@@ -554,75 +560,41 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
return true;
}
void SetIsPublished(const Stream &stream) {
is_published_[stream] = false;
switch (stream) {
case Stream::LEFT_RECTIFIED: {
if (is_published_[Stream::RIGHT_RECTIFIED]) {
SetIsPublished(Stream::RIGHT_RECTIFIED);
}
if (is_published_[Stream::DISPARITY]) {
SetIsPublished(Stream::DISPARITY);
}
} break;
case Stream::RIGHT_RECTIFIED: {
if (is_published_[Stream::LEFT_RECTIFIED]) {
SetIsPublished(Stream::LEFT_RECTIFIED);
}
if (is_published_[Stream::DISPARITY]) {
SetIsPublished(Stream::DISPARITY);
}
} break;
case Stream::DISPARITY: {
if (is_published_[Stream::DISPARITY_NORMALIZED]) {
SetIsPublished(Stream::DISPARITY_NORMALIZED);
}
if (is_published_[Stream::POINTS]) {
SetIsPublished(Stream::POINTS);
}
} break;
case Stream::DISPARITY_NORMALIZED: {
} break;
case Stream::POINTS: {
if (is_published_[Stream::DEPTH]) {
SetIsPublished(Stream::DEPTH);
}
} break;
case Stream::DEPTH: {
} break;
default:
return;
void publishData(
const Stream &stream, const api::StreamData &data, std::uint32_t seq,
ros::Time stamp) {
if (stream == Stream::LEFT || stream == Stream::RIGHT) {
return;
} else if (stream == Stream::POINTS) {
publishPoints(data, seq, stamp);
} else {
publishCamera(stream, data, seq, stamp);
}
}
void publishPoint(const Stream &stream) {
auto &&points_num = points_publisher_.getNumSubscribers();
if (points_num == 0 && is_published_[stream]) {
SetIsPublished(stream);
api_->DisableStreamData(stream);
} else if (points_num > 0 && !is_published_[Stream::POINTS]) {
api_->EnableStreamData(Stream::POINTS);
api_->SetStreamCallback(
Stream::POINTS, [this](const api::StreamData &data) {
// ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
ros::Time stamp = checkUpTimeStamp(
data.img->timestamp, Stream::POINTS);
static std::size_t count = 0;
++count;
publishPoints(data, count, stamp);
});
is_published_[Stream::POINTS] = true;
int getStreamSubscribers(const Stream &stream) {
if (stream == Stream::POINTS) {
return points_publisher_.getNumSubscribers();
}
auto pub = camera_publishers_[stream];
if (pub)
return pub.getNumSubscribers();
return -1;
}
void publishOthers(const Stream &stream) {
auto stream_num = camera_publishers_[stream].getNumSubscribers();
if (stream_num == 0 && is_published_[stream]) {
// Stop computing when was not subcribed
SetIsPublished(stream);
api_->DisableStreamData(stream);
} else if (stream_num > 0 && !is_published_[stream]) {
// Start computing and publishing when was subcribed
// std::cout << stream << "===============================" << std::endl;
// int enable_tag = 0;
// api_->EnableStreamData(stream, [&](const Stream &stream) {
// enable_tag += getStreamSubscribers(stream);
// std::cout << "EnableStreamData callback test"
// << stream << "|| Sum:"
// << getStreamSubscribers(stream) << std::endl;
// }, true);
if (getStreamSubscribers(stream) > 0 && !is_published_[stream]) {
// std::cout << stream
// <<" enableStreamData tag = 0 return" << std::endl;
// std::cout << "enable " << stream << std::endl;
api_->EnableStreamData(stream);
api_->SetStreamCallback(
stream, [this, stream](const api::StreamData &data) {
@@ -631,81 +603,121 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
data.img->timestamp, stream);
static std::size_t count = 0;
++count;
publishCamera(stream, data, count, stamp);
publishData(stream, data, count, stamp);
});
is_published_[stream] = true;
return;
}
int disable_tag = 0;
api_->DisableStreamData(stream, [&](const Stream &stream) {
disable_tag += getStreamSubscribers(stream);
// std::cout << "DisableStreamData callback test: "
// << stream << "|| Sum:"<< getStreamSubscribers(stream) << std::endl;
}, true);
if (disable_tag == 0 && is_published_[stream]) {
api_->DisableStreamData(stream, [&](const Stream &stream) {
// std::cout << "disable " << stream << std::endl;
api_->SetStreamCallback(stream, nullptr);
is_published_[stream] = false;
});
return;
}
}
void publishTopics() {
if ((camera_publishers_[Stream::LEFT].getNumSubscribers() > 0 ||
mono_publishers_[Stream::LEFT].getNumSubscribers() > 0) &&
!is_published_[Stream::LEFT]) {
api_->SetStreamCallback(
Stream::LEFT, [&](const api::StreamData &data) {
++left_count_;
if (left_count_ > 10) {
// ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
ros::Time stamp = checkUpTimeStamp(
data.img->timestamp, Stream::LEFT);
std::vector<Stream> all_streams{
Stream::RIGHT,
Stream::LEFT,
Stream::LEFT_RECTIFIED,
Stream::RIGHT_RECTIFIED,
Stream::DISPARITY,
Stream::DISPARITY_NORMALIZED,
Stream::POINTS,
Stream::DEPTH
};
// static double img_time_prev = -1;
// NODELET_INFO_STREAM("ros_time_beg: " << FULL_PRECISION <<
// ros_time_beg
// << ", img_time_elapsed: " << FULL_PRECISION
// << ((data.img->timestamp - img_time_beg) * 0.00001f)
// << ", img_time_diff: " << FULL_PRECISION
// << ((img_time_prev < 0) ? 0
// : (data.img->timestamp - img_time_prev) * 0.01f) << "
// ms");
// img_time_prev = data.img->timestamp;
publishCamera(Stream::LEFT, data, left_count_, stamp);
publishMono(Stream::LEFT, data, left_count_, stamp);
NODELET_DEBUG_STREAM(
Stream::LEFT << ", count: " << left_count_
<< ", frame_id: " << data.img->frame_id
<< ", timestamp: " << data.img->timestamp
<< ", exposure_time: " << data.img->exposure_time);
}
});
left_time_beg_ = ros::Time::now().toSec();
is_published_[Stream::LEFT] = true;
static int sum = 0;
int sum_c = 0;
for (auto &&stream : all_streams) {
sum_c += getStreamSubscribers(stream);
}
if ((camera_publishers_[Stream::RIGHT].getNumSubscribers() > 0 ||
mono_publishers_[Stream::RIGHT].getNumSubscribers() > 0) &&
!is_published_[Stream::RIGHT]) {
api_->SetStreamCallback(
Stream::RIGHT, [&](const api::StreamData &data) {
++right_count_;
if (right_count_ > 10) {
// ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
ros::Time stamp = checkUpTimeStamp(
data.img->timestamp, Stream::RIGHT);
publishCamera(Stream::RIGHT, data, right_count_, stamp);
publishMono(Stream::RIGHT, data, right_count_, stamp);
NODELET_DEBUG_STREAM(
Stream::RIGHT << ", count: " << right_count_
<< ", frame_id: " << data.img->frame_id
<< ", timestamp: " << data.img->timestamp
<< ", exposure_time: " << data.img->exposure_time);
}
});
right_time_beg_ = ros::Time::now().toSec();
is_published_[Stream::RIGHT] = true;
}
std::vector<Stream> other_streams{
Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED,
Stream::DISPARITY, Stream::DISPARITY_NORMALIZED,
Stream::POINTS, Stream::DEPTH};
for (auto &&stream : other_streams) {
if (stream != Stream::POINTS) {
publishOthers(stream);
if (sum_c != sum) {
if (sum_c == 0) {
api_->Stop(Source::VIDEO_STREAMING);
for (auto &&stream : all_streams) {
is_published_[stream] = false;
}
api_->Start(Source::VIDEO_STREAMING);
} else {
publishPoint(stream);
if ((camera_publishers_[Stream::LEFT].getNumSubscribers() > 0 ||
mono_publishers_[Stream::LEFT].getNumSubscribers() > 0) &&
!is_published_[Stream::LEFT]) {
api_->SetStreamCallback(
Stream::LEFT, [&](const api::StreamData &data) {
++left_count_;
if (left_count_ > 10) {
// ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
ros::Time stamp = checkUpTimeStamp(
data.img->timestamp, Stream::LEFT);
// static double img_time_prev = -1;
// NODELET_INFO_STREAM("ros_time_beg: " << FULL_PRECISION <<
// ros_time_beg
// << ", img_time_elapsed: " << FULL_PRECISION
// << ((data.img->timestamp - img_time_beg) * 0.00001f)
// << ", img_time_diff: " << FULL_PRECISION
// << ((img_time_prev < 0) ? 0
// : (data.img->timestamp - img_time_prev) * 0.01f) << "
// ms");
// img_time_prev = data.img->timestamp;
publishCamera(Stream::LEFT, data, left_count_, stamp);
publishMono(Stream::LEFT, data, left_count_, stamp);
NODELET_DEBUG_STREAM(
Stream::LEFT << ", count: " << left_count_
<< ", frame_id: " << data.img->frame_id
<< ", timestamp: " << data.img->timestamp
<< ", exposure_time: " << data.img->exposure_time);
}
});
left_time_beg_ = ros::Time::now().toSec();
is_published_[Stream::LEFT] = true;
}
if ((camera_publishers_[Stream::RIGHT].getNumSubscribers() > 0 ||
mono_publishers_[Stream::RIGHT].getNumSubscribers() > 0) &&
!is_published_[Stream::RIGHT]) {
api_->SetStreamCallback(
Stream::RIGHT, [&](const api::StreamData &data) {
++right_count_;
if (right_count_ > 10) {
// ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
ros::Time stamp = checkUpTimeStamp(
data.img->timestamp, Stream::RIGHT);
publishCamera(Stream::RIGHT, data, right_count_, stamp);
publishMono(Stream::RIGHT, data, right_count_, stamp);
NODELET_DEBUG_STREAM(
Stream::RIGHT << ", count: " << right_count_
<< ", frame_id: " << data.img->frame_id
<< ", timestamp: " << data.img->timestamp
<< ", exposure_time: " << data.img->exposure_time);
}
});
right_time_beg_ = ros::Time::now().toSec();
is_published_[Stream::RIGHT] = true;
}
std::vector<Stream> other_streams{
Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED,
Stream::DISPARITY, Stream::DISPARITY_NORMALIZED,
Stream::POINTS, Stream::DEPTH
};
for (auto &&stream : other_streams) {
publishOthers(stream);
}
}
sum = sum_c;
}
if (!is_motion_published_) {
@@ -822,9 +834,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
cv::cvtColor(data.frame, mono, CV_RGB2GRAY);
auto &&msg = cv_bridge::CvImage(header, enc::MONO8, mono).toImageMsg();
pthread_mutex_unlock(&mutex_data_);
auto &&info = getCameraInfo(stream);
info->header.stamp = msg->header.stamp;
mono_publishers_[stream].publish(msg, info);
mono_publishers_[stream].publish(msg);
}
void publishPoints(
@@ -1284,7 +1294,8 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
}
for (int i = 0; i < p.rows; i++) {
for (int j = 0; j < p.cols; j++) {
camera_info->P.at(i * p.cols + j) = p.at<double>(i, j);
int scale = (i == 2 && j == 2)?1:1000;
camera_info->P.at(i * p.cols + j) = p.at<double>(i, j) / scale;
}
}
@@ -1510,7 +1521,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
std::map<Stream, std::string> image_encodings_;
// mono: LEFT, RIGHT
std::map<Stream, image_transport::CameraPublisher> mono_publishers_;
std::map<Stream, image_transport::Publisher> mono_publishers_;
// pointcloud: POINTS
ros::Publisher points_publisher_;

View File

@@ -6,10 +6,12 @@ uint32 SPEC_VERSION=4
uint32 LENS_TYPE=5
uint32 IMU_TYPE=6
uint32 NOMINAL_BASELINE=7
uint32 IMG_INTRINSICS=8
uint32 IMG_EXTRINSICS_RTOL=9
uint32 IMU_INTRINSICS=10
uint32 IMU_EXTRINSICS=11
uint32 AUXILIARY_CHIP_VERSION=8
uint32 ISP_VERSION=9
uint32 IMG_INTRINSICS=10
uint32 IMG_EXTRINSICS_RTOL=11
uint32 IMU_INTRINSICS=12
uint32 IMU_EXTRINSICS=13
uint32 key
---
string value