Add frame id to synthetic streams

This commit is contained in:
John Zhao 2018-10-26 15:39:34 +08:00
parent 1529e77ffd
commit 08271be063
12 changed files with 74 additions and 24 deletions

View File

@ -135,6 +135,17 @@ endif
.PHONY: install
uninstall:
@$(call echo,Make $@)
ifeq ($(HOST_OS),Linux)
$(SUDO) rm -rf /usr/local/lib/libmynteye*
$(SUDO) rm -rf /usr/local/include/mynteye/
$(SUDO) rm -rf /usr/local/lib/cmake/mynteye/
$(SUDO) rm -rf /usr/local/share/mynteye/
endif
.PHONY: uninstall
# samples
samples: install

View File

@ -113,6 +113,8 @@ struct MYNTEYE_API StreamData {
std::shared_ptr<ImgData> img;
/** Frame. */
std::shared_ptr<Frame> frame;
/** Frame ID. */
std::uint16_t frame_id;
};
/**

View File

@ -15,6 +15,7 @@
#include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/api.h"
// #include "mynteye/logger.h"
#include "util/cv_painter.h"
#include "util/pc_viewer.h"
@ -186,6 +187,8 @@ int main(int argc, char *argv[]) {
painter.DrawImgData(img, *left_data.img);
cv::imshow("frame", img);
// LOG(INFO) << "left id: " << left_data.frame_id
// << ", right id: " << right_data.frame_id;
auto &&disp_data = api->GetStreamData(Stream::DISPARITY_NORMALIZED);
auto &&depth_data = api->GetStreamData(Stream::DEPTH);
@ -204,6 +207,7 @@ int main(int argc, char *argv[]) {
depth_region.DrawRect(depth_frame);
cv::imshow("depth", depth_frame);
// LOG(INFO) << "depth id: " << disp_data.frame_id;
depth_region.ShowElems<ushort>(
depth_data.frame,
@ -223,6 +227,7 @@ int main(int argc, char *argv[]) {
auto &&points_data = api->GetStreamData(Stream::POINTS);
if (!points_data.frame.empty()) {
pcviewer.Update(points_data.frame);
// LOG(INFO) << "points id: " << points_data.frame_id;
}
char key = static_cast<char>(cv::waitKey(1));

View File

@ -49,6 +49,8 @@ struct MYNTEYE_API StreamData {
cv::Mat frame;
/** Raw frame. */
std::shared_ptr<device::Frame> frame_raw;
/** Frame ID. */
std::uint16_t frame_id;
bool operator==(const StreamData &other) const {
if (img && other.img) {

View File

@ -46,6 +46,7 @@ bool DepthProcessor::OnProcess(
cv::Mat channels[3 /*input->value.channels()*/];
cv::split(input->value, channels);
channels[2].convertTo(output->value, CV_16UC1);
output->id = input->id;
return true;
}

View File

@ -49,6 +49,7 @@ bool DisparityNormalizedProcessor::OnProcess(
ObjMat *output = Object::Cast<ObjMat>(out);
cv::normalize(input->value, output->value, 0, 255, cv::NORM_MINMAX, CV_8UC1);
// cv::normalize maybe return empty ==
output->id = input->id;
return !output->value.empty();
}

View File

@ -99,6 +99,7 @@ bool DisparityProcessor::OnProcess(
sgbm_->compute(input->first, input->second, disparity);
#endif
output->value = disparity / 16 + 1;
output->id = input->first_id;
return true;
}

View File

@ -56,14 +56,18 @@ struct MYNTEYE_API Object {
*/
struct MYNTEYE_API ObjMat : public Object {
ObjMat() = default;
explicit ObjMat(const cv::Mat &value) : value(value) {}
ObjMat(const cv::Mat &value, std::uint16_t id)
: value(value), id(id) {}
/** The value */
cv::Mat value;
/** The id **/
std::uint16_t id;
Object *Clone() const {
ObjMat *mat = new ObjMat;
mat->value = value.clone();
mat->id = id;
return mat;
}
@ -77,19 +81,27 @@ struct MYNTEYE_API ObjMat : public Object {
*/
struct MYNTEYE_API ObjMat2 : public Object {
ObjMat2() = default;
ObjMat2(const cv::Mat &first, const cv::Mat &second)
: first(first), second(second) {}
ObjMat2(const cv::Mat &first, std::uint16_t first_id,
const cv::Mat &second, std::uint16_t second_id)
: first(first), first_id(first_id),
second(second), second_id(second_id) {}
/** The first value */
cv::Mat first;
/** The first id **/
std::uint16_t first_id;
/** The second value */
cv::Mat second;
/** The second id **/
std::uint16_t second_id;
Object *Clone() const {
ObjMat2 *mat2 = new ObjMat2;
mat2->first = first.clone();
mat2->first_id = first_id;
mat2->second = second.clone();
mat2->second_id = second_id;
return mat2;
}

View File

@ -46,6 +46,7 @@ bool PointsProcessor::OnProcess(
const ObjMat *input = Object::Cast<ObjMat>(in);
ObjMat *output = Object::Cast<ObjMat>(out);
cv::reprojectImageTo3D(input->value, output->value, Q_, true);
output->id = input->id;
return true;
}

View File

@ -54,6 +54,8 @@ bool RectifyProcessor::OnProcess(
ObjMat2 *output = Object::Cast<ObjMat2>(out);
cv::remap(input->first, output->first, map11, map12, cv::INTER_LINEAR);
cv::remap(input->second, output->second, map21, map22, cv::INTER_LINEAR);
output->first_id = input->first_id;
output->second_id = input->second_id;
return true;
}

View File

@ -46,7 +46,7 @@ cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
}
api::StreamData data2api(const device::StreamData &data) {
return {data.img, frame2mat(data.frame), data.frame};
return {data.img, frame2mat(data.frame), data.frame, data.frame_id};
}
void process_childs(
@ -165,9 +165,9 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
}
if (output != nullptr) {
if (stream == Stream::LEFT_RECTIFIED) {
return {nullptr, output->first, nullptr};
return {nullptr, output->first, nullptr, output->first_id};
} else {
return {nullptr, output->second, nullptr};
return {nullptr, output->second, nullptr, output->second_id};
}
}
VLOG(2) << "Rectify not ready now";
@ -179,7 +179,7 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {nullptr, output->value, nullptr};
return {nullptr, output->value, nullptr, output->id};
}
VLOG(2) << "Disparity not ready now";
} break;
@ -189,7 +189,7 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {nullptr, output->value, nullptr};
return {nullptr, output->value, nullptr, output->id};
}
VLOG(2) << "Disparity normalized not ready now";
} break;
@ -198,7 +198,7 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {nullptr, output->value, nullptr};
return {nullptr, output->value, nullptr, output->id};
}
VLOG(2) << "Points not ready now";
} break;
@ -207,7 +207,7 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {nullptr, output->value, nullptr};
return {nullptr, output->value, nullptr, output->id};
}
VLOG(2) << "Depth not ready now";
} break;
@ -456,7 +456,8 @@ void Synthetic::ProcessNativeStream(
if (left_data.img && right_data.img &&
left_data.img->frame_id == right_data.img->frame_id) {
auto &&processor = find_processor<RectifyProcessor>(processor_);
processor->Process(ObjMat2{left_data.frame, right_data.frame});
processor->Process(ObjMat2{left_data.frame, left_data.frame_id,
right_data.frame, right_data.frame_id});
}
return;
}
@ -472,24 +473,28 @@ void Synthetic::ProcessNativeStream(
left_rect_data.img->frame_id == right_rect_data.img->frame_id) {
process_childs(
processor_, RectifyProcessor::NAME,
ObjMat2{left_rect_data.frame, right_rect_data.frame});
ObjMat2{left_rect_data.frame, left_rect_data.frame_id,
right_rect_data.frame, right_rect_data.frame_id});
}
return;
}
switch (stream) {
case Stream::DISPARITY: {
process_childs(processor_, DisparityProcessor::NAME, ObjMat{data.frame});
process_childs(processor_, DisparityProcessor::NAME,
ObjMat{data.frame, data.frame_id});
} break;
case Stream::DISPARITY_NORMALIZED: {
process_childs(
processor_, DisparityNormalizedProcessor::NAME, ObjMat{data.frame});
process_childs(processor_, DisparityNormalizedProcessor::NAME,
ObjMat{data.frame, data.frame_id});
} break;
case Stream::POINTS: {
process_childs(processor_, PointsProcessor::NAME, ObjMat{data.frame});
process_childs(processor_, PointsProcessor::NAME,
ObjMat{data.frame, data.frame_id});
} break;
case Stream::DEPTH: {
process_childs(processor_, DepthProcessor::NAME, ObjMat{data.frame});
process_childs(processor_, DepthProcessor::NAME,
ObjMat{data.frame, data.frame_id});
} break;
default:
break;
@ -546,18 +551,19 @@ void Synthetic::OnRectifyPostProcess(Object *const out) {
const ObjMat2 *output = Object::Cast<ObjMat2>(out);
if (HasStreamCallback(Stream::LEFT_RECTIFIED)) {
stream_callbacks_.at(Stream::LEFT_RECTIFIED)(
{nullptr, output->first, nullptr});
{nullptr, output->first, nullptr, output->first_id});
}
if (HasStreamCallback(Stream::RIGHT_RECTIFIED)) {
stream_callbacks_.at(Stream::RIGHT_RECTIFIED)(
{nullptr, output->second, nullptr});
{nullptr, output->second, nullptr, output->second_id});
}
}
void Synthetic::OnDisparityPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out);
if (HasStreamCallback(Stream::DISPARITY)) {
stream_callbacks_.at(Stream::DISPARITY)({nullptr, output->value, nullptr});
stream_callbacks_.at(Stream::DISPARITY)(
{nullptr, output->value, nullptr, output->id});
}
}
@ -565,21 +571,23 @@ void Synthetic::OnDisparityNormalizedPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out);
if (HasStreamCallback(Stream::DISPARITY_NORMALIZED)) {
stream_callbacks_.at(Stream::DISPARITY_NORMALIZED)(
{nullptr, output->value, nullptr});
{nullptr, output->value, nullptr, output->id});
}
}
void Synthetic::OnPointsPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out);
if (HasStreamCallback(Stream::POINTS)) {
stream_callbacks_.at(Stream::POINTS)({nullptr, output->value, nullptr});
stream_callbacks_.at(Stream::POINTS)(
{nullptr, output->value, nullptr, output->id});
}
}
void Synthetic::OnDepthPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out);
if (HasStreamCallback(Stream::DEPTH)) {
stream_callbacks_.at(Stream::DEPTH)({nullptr, output->value, nullptr});
stream_callbacks_.at(Stream::DEPTH)(
{nullptr, output->value, nullptr, output->id});
}
}

View File

@ -147,10 +147,12 @@ bool Streams::PushStream(const Capabilities &capability, const void *data) {
// unpack img data
if (unpack_img_data_map_[Stream::LEFT](
data, request, left_data.img.get())) {
left_data.frame_id = left_data.img->frame_id;
// alloc right
AllocStreamData(Stream::RIGHT, request, Format::GREY);
auto &&right_data = stream_datas_map_[Stream::RIGHT].back();
*right_data.img = *left_data.img;
right_data.frame_id = left_data.img->frame_id;
// unpack frame
unpack_img_pixels_map_[Stream::LEFT](
data, request, left_data.frame.get());
@ -267,13 +269,14 @@ void Streams::AllocStreamData(
// reuse the dropped data
data.img = datas.front().img;
data.frame = datas.front().frame;
data.frame_id = 0;
datas.erase(datas.begin());
VLOG(2) << "Stream data of " << stream << " is dropped as out of limits";
}
}
if (stream == Stream::LEFT || stream == Stream::RIGHT) {
if(!data.img) {
if (!data.img) {
data.img = std::make_shared<ImgData>();
}
} else {
@ -283,6 +286,7 @@ void Streams::AllocStreamData(
data.frame = std::make_shared<frame_t>(
request.width, request.height, format, nullptr);
}
data.frame_id = 0;
stream_datas_map_[stream].push_back(data);
}