Add get stream data tutorials
This commit is contained in:
42
samples/tutorials/data/get_depth.cc
Normal file
42
samples/tutorials/data/get_depth.cc
Normal file
@@ -0,0 +1,42 @@
|
||||
#include <opencv2/highgui/highgui.hpp>
|
||||
|
||||
#include <glog/logging.h>
|
||||
|
||||
#include "mynteye/api.h"
|
||||
|
||||
MYNTEYE_USE_NAMESPACE
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
auto &&api = API::Create(argc, argv);
|
||||
|
||||
api->EnableStreamData(Stream::DEPTH);
|
||||
|
||||
api->Start(Source::VIDEO_STREAMING);
|
||||
|
||||
cv::namedWindow("frame");
|
||||
cv::namedWindow("depth");
|
||||
|
||||
while (true) {
|
||||
api->WaitForStreams();
|
||||
|
||||
auto &&left_data = api->GetStreamData(Stream::LEFT);
|
||||
auto &&right_data = api->GetStreamData(Stream::RIGHT);
|
||||
|
||||
cv::Mat img;
|
||||
cv::hconcat(left_data.frame, right_data.frame, img);
|
||||
cv::imshow("frame", img);
|
||||
|
||||
auto &&depth_data = api->GetStreamData(Stream::DEPTH);
|
||||
if (!depth_data.frame.empty()) {
|
||||
cv::imshow("depth", depth_data.frame); // CV_16UC1
|
||||
}
|
||||
|
||||
char key = static_cast<char>(cv::waitKey(1));
|
||||
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
api->Stop(Source::VIDEO_STREAMING);
|
||||
return 0;
|
||||
}
|
||||
49
samples/tutorials/data/get_disparity.cc
Normal file
49
samples/tutorials/data/get_disparity.cc
Normal file
@@ -0,0 +1,49 @@
|
||||
#include <opencv2/highgui/highgui.hpp>
|
||||
|
||||
#include <glog/logging.h>
|
||||
|
||||
#include "mynteye/api.h"
|
||||
|
||||
MYNTEYE_USE_NAMESPACE
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
auto &&api = API::Create(argc, argv);
|
||||
|
||||
// api->EnableStreamData(Stream::DISPARITY);
|
||||
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);
|
||||
|
||||
api->Start(Source::VIDEO_STREAMING);
|
||||
|
||||
cv::namedWindow("frame");
|
||||
// cv::namedWindow("disparity");
|
||||
cv::namedWindow("disparity_normalized");
|
||||
|
||||
while (true) {
|
||||
api->WaitForStreams();
|
||||
|
||||
auto &&left_data = api->GetStreamData(Stream::LEFT);
|
||||
auto &&right_data = api->GetStreamData(Stream::RIGHT);
|
||||
|
||||
cv::Mat img;
|
||||
cv::hconcat(left_data.frame, right_data.frame, img);
|
||||
cv::imshow("frame", img);
|
||||
|
||||
// auto &&disp_data = api->GetStreamData(Stream::DISPARITY);
|
||||
// if (!disp_data.frame.empty()) {
|
||||
// cv::imshow("disparity", disp_data.frame);
|
||||
// }
|
||||
|
||||
auto &&disp_norm_data = api->GetStreamData(Stream::DISPARITY_NORMALIZED);
|
||||
if (!disp_norm_data.frame.empty()) {
|
||||
cv::imshow("disparity_normalized", disp_norm_data.frame); // CV_8UC1
|
||||
}
|
||||
|
||||
char key = static_cast<char>(cv::waitKey(1));
|
||||
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
api->Stop(Source::VIDEO_STREAMING);
|
||||
return 0;
|
||||
}
|
||||
17
samples/tutorials/data/get_img_params.cc
Normal file
17
samples/tutorials/data/get_img_params.cc
Normal file
@@ -0,0 +1,17 @@
|
||||
#include <glog/logging.h>
|
||||
|
||||
#include "mynteye/api.h"
|
||||
|
||||
MYNTEYE_USE_NAMESPACE
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
auto &&api = API::Create(argc, argv);
|
||||
|
||||
LOG(INFO) << "Intrinsics left: {" << api->GetIntrinsics(Stream::LEFT) << "}";
|
||||
LOG(INFO) << "Intrinsics right: {" << api->GetIntrinsics(Stream::RIGHT)
|
||||
<< "}";
|
||||
LOG(INFO) << "Extrinsics left to right: {"
|
||||
<< api->GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
|
||||
|
||||
return 0;
|
||||
}
|
||||
15
samples/tutorials/data/get_imu_params.cc
Normal file
15
samples/tutorials/data/get_imu_params.cc
Normal file
@@ -0,0 +1,15 @@
|
||||
#include <glog/logging.h>
|
||||
|
||||
#include "mynteye/api.h"
|
||||
|
||||
MYNTEYE_USE_NAMESPACE
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
auto &&api = API::Create(argc, argv);
|
||||
|
||||
LOG(INFO) << "Motion intrinsics: {" << api->GetMotionIntrinsics() << "}";
|
||||
LOG(INFO) << "Motion extrinsics left to imu: {"
|
||||
<< api->GetMotionExtrinsics(Stream::LEFT) << "}";
|
||||
|
||||
return 0;
|
||||
}
|
||||
47
samples/tutorials/data/get_points.cc
Normal file
47
samples/tutorials/data/get_points.cc
Normal file
@@ -0,0 +1,47 @@
|
||||
#include <opencv2/highgui/highgui.hpp>
|
||||
|
||||
#include <glog/logging.h>
|
||||
|
||||
#include "mynteye/api.h"
|
||||
|
||||
#include "data/pcviewer.h"
|
||||
|
||||
MYNTEYE_USE_NAMESPACE
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
auto &&api = API::Create(argc, argv);
|
||||
|
||||
api->EnableStreamData(Stream::POINTS);
|
||||
|
||||
api->Start(Source::VIDEO_STREAMING);
|
||||
|
||||
cv::namedWindow("frame");
|
||||
PCViewer pcviewer;
|
||||
|
||||
while (true) {
|
||||
api->WaitForStreams();
|
||||
|
||||
auto &&left_data = api->GetStreamData(Stream::LEFT);
|
||||
auto &&right_data = api->GetStreamData(Stream::RIGHT);
|
||||
|
||||
cv::Mat img;
|
||||
cv::hconcat(left_data.frame, right_data.frame, img);
|
||||
cv::imshow("frame", img);
|
||||
|
||||
auto &&points_data = api->GetStreamData(Stream::POINTS);
|
||||
if (!points_data.frame.empty()) {
|
||||
pcviewer.Draw(points_data.frame);
|
||||
}
|
||||
|
||||
char key = static_cast<char>(cv::waitKey(1));
|
||||
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
|
||||
break;
|
||||
}
|
||||
if (pcviewer.WasDrew() && pcviewer.WasStopped()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
api->Stop(Source::VIDEO_STREAMING);
|
||||
return 0;
|
||||
}
|
||||
34
samples/tutorials/data/get_stereo.cc
Normal file
34
samples/tutorials/data/get_stereo.cc
Normal file
@@ -0,0 +1,34 @@
|
||||
#include <opencv2/highgui/highgui.hpp>
|
||||
|
||||
#include <glog/logging.h>
|
||||
|
||||
#include "mynteye/api.h"
|
||||
|
||||
MYNTEYE_USE_NAMESPACE
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
auto &&api = API::Create(argc, argv);
|
||||
|
||||
api->Start(Source::VIDEO_STREAMING);
|
||||
|
||||
cv::namedWindow("frame");
|
||||
|
||||
while (true) {
|
||||
api->WaitForStreams();
|
||||
|
||||
auto &&left_data = api->GetStreamData(Stream::LEFT);
|
||||
auto &&right_data = api->GetStreamData(Stream::RIGHT);
|
||||
|
||||
cv::Mat img;
|
||||
cv::hconcat(left_data.frame, right_data.frame, img);
|
||||
cv::imshow("frame", img);
|
||||
|
||||
char key = static_cast<char>(cv::waitKey(1));
|
||||
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
api->Stop(Source::VIDEO_STREAMING);
|
||||
return 0;
|
||||
}
|
||||
39
samples/tutorials/data/get_stereo_rectified.cc
Normal file
39
samples/tutorials/data/get_stereo_rectified.cc
Normal file
@@ -0,0 +1,39 @@
|
||||
#include <opencv2/highgui/highgui.hpp>
|
||||
|
||||
#include <glog/logging.h>
|
||||
|
||||
#include "mynteye/api.h"
|
||||
|
||||
MYNTEYE_USE_NAMESPACE
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
auto &&api = API::Create(argc, argv);
|
||||
|
||||
api->EnableStreamData(Stream::LEFT_RECTIFIED);
|
||||
api->EnableStreamData(Stream::RIGHT_RECTIFIED);
|
||||
|
||||
api->Start(Source::VIDEO_STREAMING);
|
||||
|
||||
cv::namedWindow("frame");
|
||||
|
||||
while (true) {
|
||||
api->WaitForStreams();
|
||||
|
||||
auto &&left_data = api->GetStreamData(Stream::LEFT_RECTIFIED);
|
||||
auto &&right_data = api->GetStreamData(Stream::RIGHT_RECTIFIED);
|
||||
|
||||
if (!left_data.frame.empty() && !right_data.frame.empty()) {
|
||||
cv::Mat img;
|
||||
cv::hconcat(left_data.frame, right_data.frame, img);
|
||||
cv::imshow("frame", img);
|
||||
}
|
||||
|
||||
char key = static_cast<char>(cv::waitKey(1));
|
||||
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
api->Stop(Source::VIDEO_STREAMING);
|
||||
return 0;
|
||||
}
|
||||
89
samples/tutorials/data/pcviewer.cc
Normal file
89
samples/tutorials/data/pcviewer.cc
Normal file
@@ -0,0 +1,89 @@
|
||||
#include "data/pcviewer.h"
|
||||
|
||||
#include <glog/logging.h>
|
||||
|
||||
// #include <pcl/common/common_headers.h>
|
||||
|
||||
#include <cmath>
|
||||
|
||||
std::shared_ptr<pcl::visualization::PCLVisualizer> CustomColorVis(
|
||||
pcl::PointCloud<pcl::PointXYZ>::ConstPtr pc) {
|
||||
// --------------------------------------------
|
||||
// -----Open 3D viewer and add point cloud-----
|
||||
// --------------------------------------------
|
||||
std::shared_ptr<pcl::visualization::PCLVisualizer> viewer(
|
||||
new pcl::visualization::PCLVisualizer("PointCloud Viewer"));
|
||||
viewer->setBackgroundColor(0, 0, 0);
|
||||
pcl::visualization::PointCloudColorHandlerCustom<pcl::PointXYZ> single_color(
|
||||
pc, 255, 255, 255);
|
||||
viewer->addPointCloud<pcl::PointXYZ>(pc, single_color, "point cloud");
|
||||
viewer->setPointCloudRenderingProperties(
|
||||
pcl::visualization::PCL_VISUALIZER_POINT_SIZE, 1, "point cloud");
|
||||
// viewer->addCoordinateSystem(1.0);
|
||||
viewer->addCoordinateSystem(1000.0);
|
||||
viewer->initCameraParameters();
|
||||
viewer->setCameraPosition(0, 0, -150, 0, 1, 0);
|
||||
return (viewer);
|
||||
}
|
||||
|
||||
PCViewer::PCViewer() : viewer_(nullptr) {
|
||||
VLOG(2) << __func__;
|
||||
}
|
||||
|
||||
PCViewer::~PCViewer() {
|
||||
VLOG(2) << __func__;
|
||||
if (viewer_) {
|
||||
// viewer_->saveCameraParameters("pcl_camera_params.txt");
|
||||
viewer_->close();
|
||||
viewer_ == nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void PCViewer::Draw(const cv::Mat &xyz) {
|
||||
pcl::PointCloud<pcl::PointXYZ>::Ptr pc(new pcl::PointCloud<pcl::PointXYZ>);
|
||||
ConvertMatToPointCloud(xyz, pc);
|
||||
Draw(pc);
|
||||
}
|
||||
|
||||
void PCViewer::Draw(pcl::PointCloud<pcl::PointXYZ>::ConstPtr pc) {
|
||||
if (viewer_ == nullptr) {
|
||||
viewer_ = CustomColorVis(pc);
|
||||
}
|
||||
viewer_->updatePointCloud(pc, "point cloud");
|
||||
viewer_->spinOnce();
|
||||
}
|
||||
|
||||
bool PCViewer::WasDrew() const {
|
||||
return viewer_ != nullptr;
|
||||
}
|
||||
|
||||
bool PCViewer::WasStopped() const {
|
||||
return viewer_ == nullptr || viewer_->wasStopped();
|
||||
}
|
||||
|
||||
void PCViewer::ConvertMatToPointCloud(
|
||||
const cv::Mat &xyz, pcl::PointCloud<pcl::PointXYZ>::Ptr pc) {
|
||||
// cv::Mat channels[3];
|
||||
// cv::split(xyz, channels);
|
||||
// double min, max;
|
||||
// cv::minMaxLoc(channels[2], &min, &max);
|
||||
|
||||
for (int i = 0; i < xyz.rows; i++) {
|
||||
for (int j = 0; j < xyz.cols; j++) {
|
||||
auto &&p = xyz.at<cv::Point3f>(i, j);
|
||||
if (std::isfinite(p.x) && std::isfinite(p.y) && std::isfinite(p.z)) {
|
||||
// LOG(INFO) << "[" << i << "," << j << "] x: " << p.x << ", y: " << p.y
|
||||
// << ", z: " << p.z;
|
||||
pcl::PointXYZ point;
|
||||
point.x = p.x;
|
||||
point.y = p.y;
|
||||
point.z = p.z;
|
||||
// point.z = p.z - min;
|
||||
pc->points.push_back(point);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pc->width = static_cast<int>(pc->points.size());
|
||||
pc->height = 1;
|
||||
}
|
||||
30
samples/tutorials/data/pcviewer.h
Normal file
30
samples/tutorials/data/pcviewer.h
Normal file
@@ -0,0 +1,30 @@
|
||||
#ifndef MYNTEYE_TUTORIALS_PCVIEWER_H_ // NOLINT
|
||||
#define MYNTEYE_TUTORIALS_PCVIEWER_H_
|
||||
#pragma once
|
||||
|
||||
#include <opencv2/core/core.hpp>
|
||||
|
||||
#include <pcl/visualization/pcl_visualizer.h>
|
||||
|
||||
#include <memory>
|
||||
|
||||
class PCViewer {
|
||||
public:
|
||||
PCViewer();
|
||||
~PCViewer();
|
||||
|
||||
void Draw(const cv::Mat &xyz);
|
||||
|
||||
void Draw(pcl::PointCloud<pcl::PointXYZ>::ConstPtr pc);
|
||||
|
||||
bool WasDrew() const;
|
||||
bool WasStopped() const;
|
||||
|
||||
private:
|
||||
void ConvertMatToPointCloud(
|
||||
const cv::Mat &xyz, pcl::PointCloud<pcl::PointXYZ>::Ptr pc);
|
||||
|
||||
std::shared_ptr<pcl::visualization::PCLVisualizer> viewer_;
|
||||
};
|
||||
|
||||
#endif // MYNTEYE_TUTORIALS_PCVIEWER_H_ NOLINT
|
||||
Reference in New Issue
Block a user