feat(*): sample record add feat.

This commit is contained in:
TinyO 2019-10-22 11:29:22 +08:00
parent c045198991
commit 762841c73a
4 changed files with 41 additions and 85 deletions

View File

@ -209,7 +209,6 @@ if(WITH_API)
)
make_executable2(get_imu SRCS get_imu.cc util_cv.cc WITH_OPENCV)
make_executable2(save_single_image SRCS save_single_image.cc WITH_OPENCV)
make_executable2(save_depth_and_disparity SRCS save_depth_and_disparity.cc WITH_OPENCV)
make_executable2(get_from_callbacks
SRCS get_from_callbacks.cc util_cv.cc
WITH_OPENCV

View File

@ -161,6 +161,12 @@ Dataset::writer_t Dataset::GetStreamWriter(const Stream &stream) {
case Stream::RIGHT: {
writer->outdir = outdir_ + MYNTEYE_OS_SEP "right";
} break;
case Stream::DEPTH: {
writer->outdir = outdir_ + MYNTEYE_OS_SEP "depth";
} break;
case Stream::DISPARITY: {
writer->outdir = outdir_ + MYNTEYE_OS_SEP "disparity";
} break;
default:
LOG(FATAL) << "Unsupported stream: " << stream;
}

View File

@ -26,19 +26,29 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
glog_init _(argc, argv);
auto &&device = device::select();
if (!device) return 1;
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = device::select_request(device, &ok);
if (!ok) return 1;
device->ConfigStreamRequest(request);
auto request = api->GetStreamRequest();
device->LogOptionInfos();
// struct StreamRequest {
// /** Stream width in pixels */
// std::uint16_t width;
// /** Stream height in pixels */
// std::uint16_t height;
// /** Stream pixel format */
// Format format;
// /** Stream frames per second */
// std::uint16_t fps;
// }
// Enable this will cache the motion datas until you get them.
device->EnableMotionDatas();
device->Start(Source::ALL);
request.fps = 10;
api->ConfigStreamRequest(request);
api->EnableMotionDatas();
api->EnableStreamData(Stream::DEPTH);
api->Start(Source::ALL);
const char *outdir;
if (argc >= 2) {
@ -54,17 +64,19 @@ int main(int argc, char *argv[]) {
std::size_t imu_count = 0;
auto &&time_beg = times::now();
while (true) {
device->WaitForStreams();
api->WaitForStreams();
auto &&left_datas = device->GetStreamDatas(Stream::LEFT);
auto &&right_datas = device->GetStreamDatas(Stream::RIGHT);
auto &&left_datas = api->GetStreamDatas(Stream::LEFT);
auto &&right_datas = api->GetStreamDatas(Stream::RIGHT);
auto &&depth_datas = api->GetStreamDatas(Stream::DEPTH);
auto &&disparity_datas = api->GetStreamDatas(Stream::DISPARITY);
img_count += left_datas.size();
auto &&motion_datas = device->GetMotionDatas();
auto &&motion_datas = api->GetMotionDatas();
imu_count += motion_datas.size();
auto &&left_frame = left_datas.back().frame;
auto &&right_frame = right_datas.back().frame;
auto &&left_frame = left_datas.back().frame_raw;
auto &&right_frame = right_datas.back().frame_raw;
cv::Mat img;
@ -105,6 +117,12 @@ int main(int argc, char *argv[]) {
for (auto &&right : right_datas) {
dataset.SaveStreamData(Stream::RIGHT, right);
}
for (auto &&depth : depth_datas) {
dataset.SaveStreamData(Stream::DEPTH, depth);
}
for (auto &&disparity : disparity_datas) {
dataset.SaveStreamData(Stream::DISPARITY, disparity);
}
for (auto &&motion : motion_datas) {
dataset.SaveMotionData(motion);
@ -122,7 +140,7 @@ int main(int argc, char *argv[]) {
std::cout << " to " << outdir << std::endl;
auto &&time_end = times::now();
device->Stop(Source::ALL);
api->Stop(Source::ALL);
float elapsed_ms =
times::count<times::microseconds>(time_end - time_beg) * 0.001f;

View File

@ -1,67 +0,0 @@
#include <stdio.h>
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/api/api.h"
MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->EnableStreamData(Stream::DEPTH);
api->Start(Source::VIDEO_STREAMING);
cv::namedWindow("disparity");
cv::namedWindow("depth_real");
std::int32_t count = 0;
std::cout << "Press 'Space' 's' 'S' to save image." << std::endl;
while (true) {
api->WaitForStreams();
auto &&depth_data = api->GetStreamData(Stream::DEPTH);
if (!depth_data.frame.empty()) {
cv::imshow("depth_real", depth_data.frame); // CV_16UC1
}
auto &&dis_data = api->GetStreamData(Stream::DISPARITY);
if (!dis_data.frame.empty()) {
cv::imshow("disparity", dis_data.frame); // CV_16UC1
}
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
break;
} else {
if (!depth_data.frame.empty()) {
char d_name[20];
++count;
snprintf(d_name, sizeof(d_name), "depth_%d.jpg", count);
cv::imwrite(d_name, depth_data.frame);
std::cout << "Saved " << d_name << " to current directory" << std::endl;
}
if (!dis_data.frame.empty()) {
char dis_name[20];
++count;
snprintf(dis_name, sizeof(dis_name), "disparity_%d.jpg", count);
cv::imwrite(dis_name, dis_data.frame);
std::cout << "Saved " << dis_name << " to current directory" << std::endl;
}
}
}
api->Stop(Source::VIDEO_STREAMING);
return 0;
}