refactor(synthetic): remove usless logic

This commit is contained in:
TinyOh 2019-03-01 16:08:15 +08:00
parent d1cd7fadc5
commit 71e3286014
10 changed files with 375 additions and 338 deletions

View File

@ -221,6 +221,7 @@ if(WITH_API)
src/mynteye/api/config.cc src/mynteye/api/config.cc
src/mynteye/api/correspondence.cc src/mynteye/api/correspondence.cc
src/mynteye/api/version_checker.cc src/mynteye/api/version_checker.cc
src/mynteye/api/data_tools.cc
) )
if(WITH_CAM_MODELS) if(WITH_CAM_MODELS)
list(APPEND MYNTEYE_SRCS list(APPEND MYNTEYE_SRCS

View File

@ -25,6 +25,8 @@ int main(int argc, char *argv[]) {
auto &&request = api->SelectStreamRequest(&ok); auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1; if (!ok) return 1;
api->ConfigStreamRequest(request); api->ConfigStreamRequest(request);
api->EnableStreamData(Stream::LEFT);
api->EnableStreamData(Stream::RIGHT);
api->Start(Source::VIDEO_STREAMING); api->Start(Source::VIDEO_STREAMING);

View File

@ -0,0 +1,78 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <algorithm>
#include <functional>
#include <stdexcept>
#include "mynteye/api/data_tools.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
if (frame->format() == Format::YUYV) {
cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data());
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
return img;
} else if (frame->format() == Format::BGR888) {
cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data());
return img;
} else { // Format::GRAY
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
}
}
api::StreamData data2api(const device::StreamData &data) {
return {data.img, frame2mat(data.frame), data.frame, data.frame_id};
}
// ObjMat/ObjMat2 > api::StreamData
api::StreamData obj_data_first(const ObjMat2 *obj) {
return {obj->first_data, obj->first, nullptr, obj->first_id};
}
api::StreamData obj_data_second(const ObjMat2 *obj) {
return {obj->second_data, obj->second, nullptr, obj->second_id};
}
api::StreamData obj_data(const ObjMat *obj) {
return {obj->data, obj->value, nullptr, obj->id};
}
api::StreamData obj_data_first(const std::shared_ptr<ObjMat2> &obj) {
return {obj->first_data, obj->first, nullptr, obj->first_id};
}
api::StreamData obj_data_second(const std::shared_ptr<ObjMat2> &obj) {
return {obj->second_data, obj->second, nullptr, obj->second_id};
}
api::StreamData obj_data(const std::shared_ptr<ObjMat> &obj) {
return {obj->data, obj->value, nullptr, obj->id};
}
// api::StreamData > ObjMat/ObjMat2
ObjMat data_obj(const api::StreamData &data) {
return ObjMat{data.frame, data.frame_id, data.img};
}
ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second) {
return ObjMat2{
first.frame, first.frame_id, first.img,
second.frame, second.frame_id, second.img};
}
MYNTEYE_END_NAMESPACE

View File

@ -0,0 +1,33 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_DATA_TOOLS_H_
#define MYNTEYE_API_DATA_TOOLS_H_
#pragma once
#include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/api/object.h"
#include "mynteye/api/api.h"
#include "mynteye/device/device.h"
MYNTEYE_BEGIN_NAMESPACE
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame);
api::StreamData data2api(const device::StreamData &data);
api::StreamData obj_data_first(const ObjMat2 *obj);
api::StreamData obj_data_second(const ObjMat2 *obj);
api::StreamData obj_data(const ObjMat *obj);
api::StreamData obj_data_first(const std::shared_ptr<ObjMat2> &obj);
api::StreamData obj_data_second(const std::shared_ptr<ObjMat2> &obj);
api::StreamData obj_data(const std::shared_ptr<ObjMat> &obj);
ObjMat data_obj(const api::StreamData &data);
ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second);
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_DATA_TOOLS_H_

View File

@ -19,6 +19,7 @@
#include "mynteye/logger.h" #include "mynteye/logger.h"
#include "mynteye/util/strings.h" #include "mynteye/util/strings.h"
#include "mynteye/util/times.h" #include "mynteye/util/times.h"
#include "mynteye/api/data_tools.h"
MYNTEYE_BEGIN_NAMESPACE MYNTEYE_BEGIN_NAMESPACE
@ -245,6 +246,74 @@ void Processor::Run() {
VLOG(2) << Name() << " thread end"; VLOG(2) << Name() << " thread end";
} }
api::StreamData Processor::GetStreamData(const Stream &stream) {
auto sum = getStreamsSum();
auto &&out = GetOutput();
Synthetic::Mode enable_mode = Synthetic::MODE_LAST;
auto streams = getTargetStreams();
for (auto it_s : streams) {
if (it_s.stream == stream) {
enable_mode = it_s.enabled_mode_;
break;
}
}
if (enable_mode == Synthetic::MODE_SYNTHETIC) {
if (sum == 1) {
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
if (output != nullptr) {
return obj_data(output);
}
VLOG(2) << "Rectify not ready now";
}
} else if (sum == 2) {
static std::shared_ptr<ObjMat2> output = nullptr;
if (out != nullptr) {
output = Object::Cast<ObjMat2>(out);
}
auto streams = getTargetStreams();
if (output != nullptr) {
int num = 0;
for (auto it : streams) {
if (it.stream == stream) {
if (num == 1) {
return obj_data_first(output);
} else {
return obj_data_second(output);
}
}
num++;
}
}
VLOG(2) << "Rectify not ready now";
} else {
LOG(ERROR) << "error: invalid sum!";
}
return {}; // frame.empty() == true
}
LOG(ERROR) << "Failed to get stream data of " << stream
<< ", unsupported or disabled";
return {}; // frame.empty() == true
}
std::vector<api::StreamData> Processor::GetStreamDatas(const Stream &stream) {
Synthetic::Mode enable_mode = Synthetic::MODE_LAST;
auto streams = getTargetStreams();
for (auto it_s : streams) {
if (it_s.stream == stream) {
enable_mode = it_s.enabled_mode_;
break;
}
}
if (enable_mode == Synthetic::MODE_SYNTHETIC) {
return {GetStreamData(stream)};
} else {
LOG(ERROR) << "Failed to get stream data of " << stream
<< ", unsupported or disabled";
}
return {};
}
void Processor::SetIdle(bool idle) { void Processor::SetIdle(bool idle) {
std::lock_guard<std::mutex> lk(mtx_state_); std::lock_guard<std::mutex> lk(mtx_state_);
idle_ = idle; idle_ = idle;

View File

@ -66,6 +66,10 @@ class Processor :
/** Returns dropped or not. */ /** Returns dropped or not. */
bool Process(const Object &in); bool Process(const Object &in);
virtual api::StreamData GetStreamData(const Stream &stream);
virtual std::vector<api::StreamData> GetStreamDatas(const Stream &stream);
/** /**
* Returns the last output. * Returns the last output.
* @note Returns null if not output now. * @note Returns null if not output now.

View File

@ -14,15 +14,21 @@
#include "mynteye/api/processor/root_camera_processor.h" #include "mynteye/api/processor/root_camera_processor.h"
#include <utility> #include <utility>
#include <vector>
#include <opencv2/calib3d/calib3d.hpp> #include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/imgproc/imgproc.hpp> #include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/logger.h" #include "mynteye/logger.h"
#include "mynteye/api/synthetic.h"
#include "mynteye/device/device.h"
#include "mynteye/api/data_tools.h"
MYNTEYE_BEGIN_NAMESPACE MYNTEYE_BEGIN_NAMESPACE
const char RootProcessor::NAME[] = "RootProcessor"; const char RootProcessor::NAME[] = "RootProcessor";
RootProcessor::RootProcessor(std::int32_t proc_period) RootProcessor::RootProcessor(std::shared_ptr<Device> device,
: Processor(std::move(proc_period)) {} std::int32_t proc_period)
: Processor(std::move(proc_period)),
device_(device) {}
RootProcessor::~RootProcessor() { RootProcessor::~RootProcessor() {
VLOG(2) << __func__; VLOG(2) << __func__;
} }
@ -31,13 +37,110 @@ std::string RootProcessor::Name() {
return NAME; return NAME;
} }
Object *RootProcessor::OnCreateOutput() { s1s2Processor::s1s2Processor(std::shared_ptr<Device> device,
std::int32_t proc_period)
: RootProcessor(device, std::move(proc_period)) {}
s1s2Processor::~s1s2Processor() {
VLOG(2) << __func__;
}
Object *s1s2Processor::OnCreateOutput() {
return new ObjMat2(); return new ObjMat2();
} }
bool RootProcessor::OnProcess( bool s1s2Processor::OnProcess(
Object *const in, Object *const out, Object *const in, Object *const out,
std::shared_ptr<Processor> const parent) { std::shared_ptr<Processor> const parent) {
const ObjMat2 *input = Object::Cast<ObjMat2>(in);
ObjMat2 *output = Object::Cast<ObjMat2>(out);
output->second = input->second;
output->first = input->first;
output->first_id = input->first_id;
output->first_data = input->first_data;
output->second_id = input->second_id;
output->second_data = input->second_data;
MYNTEYE_UNUSED(parent) MYNTEYE_UNUSED(parent)
return true; return true;
} }
void s1s2Processor::ProcessNativeStream(
const Stream &stream, const api::StreamData &data) {
std::unique_lock<std::mutex> lk(mtx_left_right_ready_);
static api::StreamData left_data, right_data;
if (stream == Stream::LEFT) {
left_data = data;
} else if (stream == Stream::RIGHT) {
right_data = data;
}
if (left_data.img && right_data.img &&
left_data.img->frame_id == right_data.img->frame_id) {
Process(data_obj(left_data, right_data));
}
return;
}
void s1s2Processor::StartVideoStreaming() {
auto streams = getTargetStreams();
for (unsigned int j =0; j< streams.size(); j++) {
auto stream = streams[j].stream;
auto callback = streams[j].stream_callback;
device_->SetStreamCallback(
stream,
[this, stream, callback](const device::StreamData &data) {
auto &&stream_data = data2api(data);
ProcessNativeStream(stream, stream_data);
// Need mutex if set callback after start
if (callback) {
callback(stream_data);
}
},
true);
}
device_->Start(Source::VIDEO_STREAMING);
}
void s1s2Processor::StopVideoStreaming() {
auto streams = getTargetStreams();
for (unsigned int j =0; j< streams.size(); j++) {
auto stream = streams[j].stream;
device_->SetStreamCallback(stream, nullptr);
}
device_->Stop(Source::VIDEO_STREAMING);
}
api::StreamData s1s2Processor::GetStreamData(const Stream &stream) {
Synthetic::Mode enable_mode = Synthetic::MODE_LAST;
auto streams = getTargetStreams();
for (auto it_s : streams) {
if (it_s.stream == stream) {
enable_mode = it_s.enabled_mode_;
break;
}
}
if (enable_mode == Synthetic::MODE_SYNTHETIC) {
return data2api(device_->GetStreamData(stream));
}
LOG(ERROR) << "Failed to get device stream data of " << stream
<< ", unsupported or disabled";
return {};
}
std::vector<api::StreamData> s1s2Processor::GetStreamDatas(
const Stream &stream) {
Synthetic::Mode enable_mode = Synthetic::MODE_LAST;
auto streams = getTargetStreams();
for (auto it_s : streams) {
if (it_s.stream == stream) {
enable_mode = it_s.enabled_mode_;
break;
}
}
if (enable_mode == Synthetic::MODE_SYNTHETIC) {
std::vector<api::StreamData> datas;
for (auto &&data : device_->GetStreamDatas(stream)) {
datas.push_back(data2api(data));
}
return datas;
}
}
MYNTEYE_END_NAMESPACE MYNTEYE_END_NAMESPACE

View File

@ -28,16 +28,42 @@ class RootProcessor : public Processor {
public: public:
static const char NAME[]; static const char NAME[];
explicit RootProcessor(std::int32_t proc_period = 0); explicit RootProcessor(std::shared_ptr<Device> device,
std::int32_t proc_period = 0);
virtual ~RootProcessor(); virtual ~RootProcessor();
std::string Name() override; virtual std::string Name();
virtual void StartVideoStreaming() = 0;
virtual void StopVideoStreaming() = 0;
virtual api::StreamData GetStreamData(const Stream &stream) = 0;
virtual std::vector<api::StreamData> GetStreamDatas(const Stream &stream) = 0; // NOLINT
protected:
virtual Object *OnCreateOutput() = 0;
virtual bool OnProcess(
Object *const in, Object *const out,
std::shared_ptr<Processor> const parent) = 0;
std::shared_ptr<Device> device_;
};
class s1s2Processor : public RootProcessor {
public:
explicit s1s2Processor(std::shared_ptr<Device> device,
std::int32_t proc_period = 0);
virtual ~s1s2Processor();
void StartVideoStreaming();
void StopVideoStreaming();
api::StreamData GetStreamData(const Stream &stream) override;
std::vector<api::StreamData> GetStreamDatas(const Stream &stream) override; // NOLINT
protected: protected:
Object *OnCreateOutput() override; Object *OnCreateOutput() override;
bool OnProcess( bool OnProcess(
Object *const in, Object *const out, Object *const in, Object *const out,
std::shared_ptr<Processor> const parent) override; std::shared_ptr<Processor> const parent) override;
private:
void ProcessNativeStream(
const Stream &stream, const api::StreamData &data);
std::mutex mtx_left_right_ready_;
}; };
MYNTEYE_END_NAMESPACE MYNTEYE_END_NAMESPACE

View File

@ -36,6 +36,7 @@
#include "mynteye/api/processor/rectify_processor.h" #include "mynteye/api/processor/rectify_processor.h"
#endif #endif
#include "mynteye/device/device.h" #include "mynteye/device/device.h"
#include "mynteye/api/data_tools.h"
#define RECTIFY_PROC_PERIOD 0 #define RECTIFY_PROC_PERIOD 0
#define DISPARITY_PROC_PERIOD 0 #define DISPARITY_PROC_PERIOD 0
@ -46,74 +47,6 @@
MYNTEYE_BEGIN_NAMESPACE MYNTEYE_BEGIN_NAMESPACE
namespace {
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
if (frame->format() == Format::YUYV) {
cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data());
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
return img;
} else if (frame->format() == Format::BGR888) {
cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data());
return img;
} else { // Format::GRAY
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
}
}
api::StreamData data2api(const device::StreamData &data) {
return {data.img, frame2mat(data.frame), data.frame, data.frame_id};
}
void process_childs(
const std::shared_ptr<Processor> &proc, const std::string &name,
const Object &obj) {
auto &&processor = find_processor<Processor>(proc, name);
for (auto child : processor->GetChilds()) {
child->Process(obj);
}
}
// ObjMat/ObjMat2 > api::StreamData
api::StreamData obj_data_first(const ObjMat2 *obj) {
return {obj->first_data, obj->first, nullptr, obj->first_id};
}
api::StreamData obj_data_second(const ObjMat2 *obj) {
return {obj->second_data, obj->second, nullptr, obj->second_id};
}
api::StreamData obj_data(const ObjMat *obj) {
return {obj->data, obj->value, nullptr, obj->id};
}
api::StreamData obj_data_first(const std::shared_ptr<ObjMat2> &obj) {
return {obj->first_data, obj->first, nullptr, obj->first_id};
}
api::StreamData obj_data_second(const std::shared_ptr<ObjMat2> &obj) {
return {obj->second_data, obj->second, nullptr, obj->second_id};
}
api::StreamData obj_data(const std::shared_ptr<ObjMat> &obj) {
return {obj->data, obj->value, nullptr, obj->id};
}
// api::StreamData > ObjMat/ObjMat2
ObjMat data_obj(const api::StreamData &data) {
return ObjMat{data.frame, data.frame_id, data.img};
}
ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second) {
return ObjMat2{
first.frame, first.frame_id, first.img,
second.frame, second.frame_id, second.img};
}
} // namespace
void Synthetic::InitCalibInfo() { void Synthetic::InitCalibInfo() {
if (calib_model_ == CalibrationModel::PINHOLE) { if (calib_model_ == CalibrationModel::PINHOLE) {
LOG(INFO) << "camera calib model: pinhole"; LOG(INFO) << "camera calib model: pinhole";
@ -149,7 +82,6 @@ Synthetic::Synthetic(API *api, CalibrationModel calib_model)
CHECK_NOTNULL(api_); CHECK_NOTNULL(api_);
InitCalibInfo(); InitCalibInfo();
InitProcessors(); InitProcessors();
InitStreamSupports();
} }
Synthetic::~Synthetic() { Synthetic::~Synthetic() {
@ -171,19 +103,18 @@ void Synthetic::NotifyImageParamsChanged() {
extr_ = std::make_shared<Extrinsics>( extr_ = std::make_shared<Extrinsics>(
api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT)); api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT));
} }
if (calib_model_ == CalibrationModel::PINHOLE) { auto processor = getProcessorWithStream(Stream::LEFT_RECTIFIED);
auto &&processor = find_processor<RectifyProcessorOCV>(processor_);
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_); if (processor && calib_model_ == CalibrationModel::PINHOLE) {
auto proc = static_cast<RectifyProcessorOCV*>(&(*processor));
proc->ReloadImageParams(intr_left_, intr_right_, extr_);
#ifdef WITH_CAM_MODELS #ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { } else if (processor && calib_model_ == CalibrationModel::KANNALA_BRANDT) {
auto &&processor = find_processor<RectifyProcessor>(processor_); auto proc = static_cast<RectifyProcessor*>(&(*processor));
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_); proc->ReloadImageParams(intr_left_, intr_right_, extr_);
#endif #endif
} else { } else {
LOG(ERROR) << "Unknow calib model type in device: " LOG(ERROR) << "Unknow calib model type in device" << std::endl;
<< calib_model_ << ", use default pinhole model";
auto &&processor = find_processor<RectifyProcessorOCV>(processor_);
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_);
} }
} }
@ -242,14 +173,6 @@ bool Synthetic::Supports(const Stream &stream) const {
return checkControlDateWithStream(stream); return checkControlDateWithStream(stream);
} }
Synthetic::mode_t Synthetic::SupportsMode(const Stream &stream) const {
if (checkControlDateWithStream(stream)) {
auto data = getControlDateWithStream(stream);
return data.support_mode_;
}
return MODE_LAST;
}
void Synthetic::EnableStreamData( void Synthetic::EnableStreamData(
const Stream &stream, stream_switch_callback_t callback, const Stream &stream, stream_switch_callback_t callback,
bool try_tag) { bool try_tag) {
@ -344,42 +267,13 @@ bool Synthetic::HasStreamCallback(const Stream &stream) const {
} }
void Synthetic::StartVideoStreaming() { void Synthetic::StartVideoStreaming() {
auto &&device = api_->device(); auto processor_root = static_cast<RootProcessor*>(&(*processor_));
for (unsigned int i =0; i< processors_.size(); i++) { processor_root->StartVideoStreaming();
auto streams = processors_[i]->getTargetStreams();
for (unsigned int j =0; j< streams.size(); j++) {
if (processors_[i]->target_streams_[j].support_mode_ == MODE_NATIVE) {
auto stream = processors_[i]->target_streams_[j].stream;
device->SetStreamCallback(
stream,
[this, stream](const device::StreamData &data) {
auto &&stream_data = data2api(data);
ProcessNativeStream(stream, stream_data);
// Need mutex if set callback after start
if (HasStreamCallback(stream)) {
auto data = getControlDateWithStream(stream);
data.stream_callback(stream_data);
}
},
true);
}
}
}
device->Start(Source::VIDEO_STREAMING);
} }
void Synthetic::StopVideoStreaming() { void Synthetic::StopVideoStreaming() {
auto &&device = api_->device(); auto processor_root = static_cast<RootProcessor*>(&(*processor_));
for (unsigned int i =0; i< processors_.size(); i++) { processor_root->StopVideoStreaming();
auto streams = processors_[i]->getTargetStreams();
for (unsigned int j =0; j< streams.size(); j++) {
if (processors_[i]->target_streams_[j].support_mode_ == MODE_NATIVE) {
auto stream = processors_[i]->target_streams_[j].stream;
device->SetStreamCallback(stream, nullptr);
}
}
}
device->Stop(Source::VIDEO_STREAMING);
} }
void Synthetic::WaitForStreams() { void Synthetic::WaitForStreams() {
@ -387,69 +281,11 @@ void Synthetic::WaitForStreams() {
} }
api::StreamData Synthetic::GetStreamData(const Stream &stream) { api::StreamData Synthetic::GetStreamData(const Stream &stream) {
auto &&mode = GetStreamEnabledMode(stream); return getProcessorWithStream(stream)->GetStreamData(stream);
if (mode == MODE_NATIVE) {
auto &&device = api_->device();
return data2api(device->GetStreamData(stream));
} else if (mode == MODE_SYNTHETIC) {
auto processor = getProcessorWithStream(stream);
auto sum = processor->getStreamsSum();
auto &&out = processor->GetOutput();
static std::shared_ptr<ObjMat2> output = nullptr;
if (sum == 1) {
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
if (output != nullptr) {
return obj_data(output);
}
VLOG(2) << "Rectify not ready now";
}
} else if (sum == 2) {
if (out != nullptr) {
output = Object::Cast<ObjMat2>(out);
}
auto streams = processor->getTargetStreams();
if (output != nullptr) {
int num = 0;
for (auto it : streams) {
if (it.stream == stream) {
if (num == 1) {
return obj_data_first(output);
} else {
return obj_data_second(output);
}
}
num++;
}
}
VLOG(2) << "Rectify not ready now";
} else {
LOG(ERROR) << "error: invalid sum!";
}
return {}; // frame.empty() == true
} else {
LOG(ERROR) << "Failed to get stream data of " << stream
<< ", unsupported or disabled";
return {}; // frame.empty() == true
}
} }
std::vector<api::StreamData> Synthetic::GetStreamDatas(const Stream &stream) { std::vector<api::StreamData> Synthetic::GetStreamDatas(const Stream &stream) {
auto &&mode = GetStreamEnabledMode(stream); return getProcessorWithStream(stream)->GetStreamDatas(stream);
if (mode == MODE_NATIVE) {
auto &&device = api_->device();
std::vector<api::StreamData> datas;
for (auto &&data : device->GetStreamDatas(stream)) {
datas.push_back(data2api(data));
}
return datas;
} else if (mode == MODE_SYNTHETIC) {
return {GetStreamData(stream)};
} else {
LOG(ERROR) << "Failed to get stream data of " << stream
<< ", unsupported or disabled";
}
return {};
} }
void Synthetic::SetPlugin(std::shared_ptr<Plugin> plugin) { void Synthetic::SetPlugin(std::shared_ptr<Plugin> plugin) {
@ -460,39 +296,6 @@ bool Synthetic::HasPlugin() const {
return plugin_ != nullptr; return plugin_ != nullptr;
} }
void Synthetic::InitStreamSupports() {
auto &&device = api_->device();
if (device->Supports(Stream::LEFT) && device->Supports(Stream::RIGHT)) {
auto processor = getProcessorWithStream(Stream::LEFT);
for (unsigned int i = 0; i< processor->target_streams_.size(); i++) {
if (processor->target_streams_[i].stream == Stream::LEFT) {
processor->target_streams_[i].support_mode_ = MODE_NATIVE;
}
if (processor->target_streams_[i].stream == Stream::RIGHT) {
processor->target_streams_[i].support_mode_ = MODE_NATIVE;
}
}
std::vector<Stream> stream_chain{
Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED,
Stream::DISPARITY, Stream::DISPARITY_NORMALIZED,
Stream::POINTS, Stream::DEPTH};
for (auto &&stream : stream_chain) {
auto processor = getProcessorWithStream(stream);
for (unsigned int i = 0; i< processor->target_streams_.size(); i++) {
if (processor->target_streams_[i].stream == stream) {
if (device->Supports(stream)) {
processor->target_streams_[i].support_mode_ = MODE_NATIVE;
processor->target_streams_[i].enabled_mode_ = MODE_NATIVE;
} else {
processor->target_streams_[i].support_mode_ = MODE_SYNTHETIC;
}
}
}
}
}
}
Synthetic::mode_t Synthetic::GetStreamEnabledMode(const Stream &stream) const { Synthetic::mode_t Synthetic::GetStreamEnabledMode(const Stream &stream) const {
if (checkControlDateWithStream(stream)) { if (checkControlDateWithStream(stream)) {
auto data = getControlDateWithStream(stream); auto data = getControlDateWithStream(stream);
@ -501,14 +304,6 @@ Synthetic::mode_t Synthetic::GetStreamEnabledMode(const Stream &stream) const {
return MODE_LAST; return MODE_LAST;
} }
bool Synthetic::IsStreamEnabledNative(const Stream &stream) const {
return GetStreamEnabledMode(stream) == MODE_NATIVE;
}
bool Synthetic::IsStreamEnabledSynthetic(const Stream &stream) const {
return GetStreamEnabledMode(stream) == MODE_SYNTHETIC;
}
void Synthetic::InitProcessors() { void Synthetic::InitProcessors() {
std::shared_ptr<Processor> rectify_processor = nullptr; std::shared_ptr<Processor> rectify_processor = nullptr;
std::shared_ptr<Processor> points_processor = nullptr; std::shared_ptr<Processor> points_processor = nullptr;
@ -522,7 +317,7 @@ void Synthetic::InitProcessors() {
DISPARITY_NORM_PROC_PERIOD); DISPARITY_NORM_PROC_PERIOD);
auto root_processor = auto root_processor =
std::make_shared<RootProcessor>(ROOT_PROC_PERIOD); std::make_shared<s1s2Processor>(api_->device(), ROOT_PROC_PERIOD);
if (calib_model_ == CalibrationModel::PINHOLE) { if (calib_model_ == CalibrationModel::PINHOLE) {
// PINHOLE // PINHOLE
@ -567,21 +362,21 @@ void Synthetic::InitProcessors() {
} }
rectify_processor->addTargetStreams( rectify_processor->addTargetStreams(
{Stream::LEFT_RECTIFIED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); {Stream::LEFT_RECTIFIED, Mode::MODE_LAST, nullptr});
rectify_processor->addTargetStreams( rectify_processor->addTargetStreams(
{Stream::RIGHT_RECTIFIED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); {Stream::RIGHT_RECTIFIED, Mode::MODE_LAST, nullptr});
disparity_processor->addTargetStreams( disparity_processor->addTargetStreams(
{Stream::DISPARITY, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); {Stream::DISPARITY, Mode::MODE_LAST, nullptr});
disparitynormalized_processor->addTargetStreams( disparitynormalized_processor->addTargetStreams(
{Stream::DISPARITY_NORMALIZED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); {Stream::DISPARITY_NORMALIZED, Mode::MODE_LAST, nullptr});
points_processor->addTargetStreams( points_processor->addTargetStreams(
{Stream::POINTS, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); {Stream::POINTS, Mode::MODE_LAST, nullptr});
depth_processor->addTargetStreams( depth_processor->addTargetStreams(
{Stream::DEPTH, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); {Stream::DEPTH, Mode::MODE_LAST, nullptr});
root_processor->addTargetStreams( root_processor->addTargetStreams(
{Stream::LEFT, Mode::MODE_NATIVE, Mode::MODE_NATIVE, nullptr}); {Stream::LEFT, Mode::MODE_LAST, nullptr});
root_processor->addTargetStreams( root_processor->addTargetStreams(
{Stream::RIGHT, Mode::MODE_NATIVE, Mode::MODE_NATIVE, nullptr}); {Stream::RIGHT, Mode::MODE_LAST, nullptr});
processors_.push_back(root_processor); processors_.push_back(root_processor);
processors_.push_back(rectify_processor); processors_.push_back(rectify_processor);
@ -590,6 +385,8 @@ void Synthetic::InitProcessors() {
processors_.push_back(points_processor); processors_.push_back(points_processor);
processors_.push_back(depth_processor); processors_.push_back(depth_processor);
using namespace std::placeholders; // NOLINT using namespace std::placeholders; // NOLINT
root_processor->SetProcessCallback(
std::bind(&Synthetic::OnDeviceProcess, this, _1, _2, _3));
rectify_processor->SetProcessCallback( rectify_processor->SetProcessCallback(
std::bind(&Synthetic::OnRectifyProcess, this, _1, _2, _3)); std::bind(&Synthetic::OnRectifyProcess, this, _1, _2, _3));
disparity_processor->SetProcessCallback( disparity_processor->SetProcessCallback(
@ -601,6 +398,8 @@ void Synthetic::InitProcessors() {
depth_processor->SetProcessCallback( depth_processor->SetProcessCallback(
std::bind(&Synthetic::OnDepthProcess, this, _1, _2, _3)); std::bind(&Synthetic::OnDepthProcess, this, _1, _2, _3));
root_processor->SetPostProcessCallback(
std::bind(&Synthetic::OnDevicePostProcess, this, _1));
rectify_processor->SetPostProcessCallback( rectify_processor->SetPostProcessCallback(
std::bind(&Synthetic::OnRectifyPostProcess, this, _1)); std::bind(&Synthetic::OnRectifyPostProcess, this, _1));
disparity_processor->SetPostProcessCallback( disparity_processor->SetPostProcessCallback(
@ -615,100 +414,11 @@ void Synthetic::InitProcessors() {
processor_ = root_processor; processor_ = root_processor;
} }
void Synthetic::ProcessNativeStream( bool Synthetic::OnDeviceProcess(
const Stream &stream, const api::StreamData &data) { Object *const in, Object *const out,
NotifyStreamData(stream, data); std::shared_ptr<Processor> const parent) {
if (stream == Stream::LEFT || stream == Stream::RIGHT) { MYNTEYE_UNUSED(parent)
std::unique_lock<std::mutex> lk(mtx_left_right_ready_); return GetStreamEnabledMode(Stream::LEFT) != MODE_SYNTHETIC;
static api::StreamData left_data, right_data;
if (stream == Stream::LEFT) {
left_data = data;
} else if (stream == Stream::RIGHT) {
right_data = data;
}
if (left_data.img && right_data.img &&
left_data.img->frame_id == right_data.img->frame_id) {
std::shared_ptr<Processor> processor = nullptr;
if (calib_model_ == CalibrationModel::PINHOLE) {
processor = find_processor<RectifyProcessorOCV>(processor_);
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
processor = find_processor<RectifyProcessor>(processor_);
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
processor = find_processor<RectifyProcessorOCV>(processor_);
}
processor->Process(data_obj(left_data, right_data));
}
return;
}
if (stream == Stream::LEFT_RECTIFIED || stream == Stream::RIGHT_RECTIFIED) {
static api::StreamData left_rect_data, right_rect_data;
if (stream == Stream::LEFT_RECTIFIED) {
left_rect_data = data;
} else if (stream == Stream::RIGHT_RECTIFIED) {
right_rect_data = data;
}
if (left_rect_data.img && right_rect_data.img &&
left_rect_data.img->frame_id == right_rect_data.img->frame_id) {
std::string name = RectifyProcessorOCV::NAME;
if (calib_model_ == CalibrationModel::PINHOLE) {
name = RectifyProcessorOCV::NAME;
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
name = RectifyProcessor::NAME;
#endif
}
process_childs(processor_, name,
data_obj(left_rect_data, right_rect_data));
}
return;
}
switch (stream) {
case Stream::DISPARITY: {
process_childs(processor_, DisparityProcessor::NAME, data_obj(data));
} break;
case Stream::DISPARITY_NORMALIZED: {
process_childs(processor_, DisparityNormalizedProcessor::NAME,
data_obj(data));
} break;
case Stream::POINTS: {
if (calib_model_ == CalibrationModel::PINHOLE) {
// PINHOLE
process_childs(processor_, PointsProcessorOCV::NAME, data_obj(data));
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
// KANNALA_BRANDT
process_childs(processor_, PointsProcessor::NAME, data_obj(data));
#endif
} else {
// UNKNOW
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
}
} break;
case Stream::DEPTH: {
if (calib_model_ == CalibrationModel::PINHOLE) {
// PINHOLE
process_childs(processor_, DepthProcessorOCV::NAME, data_obj(data));
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
// KANNALA_BRANDT
process_childs(processor_, DepthProcessor::NAME, data_obj(data));
#endif
} else {
// UNKNOW
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
}
} break;
default:
break;
}
} }
bool Synthetic::OnRectifyProcess( bool Synthetic::OnRectifyProcess(
@ -762,6 +472,20 @@ bool Synthetic::OnDepthProcess(
return GetStreamEnabledMode(Stream::DEPTH) != MODE_SYNTHETIC; return GetStreamEnabledMode(Stream::DEPTH) != MODE_SYNTHETIC;
} }
void Synthetic::OnDevicePostProcess(Object *const out) {
const ObjMat2 *output = Object::Cast<ObjMat2>(out);
NotifyStreamData(Stream::LEFT, obj_data_first(output));
NotifyStreamData(Stream::RIGHT, obj_data_second(output));
if (HasStreamCallback(Stream::LEFT)) {
auto data = getControlDateWithStream(Stream::LEFT);
data.stream_callback(obj_data_first(output));
}
if (HasStreamCallback(Stream::RIGHT)) {
auto data = getControlDateWithStream(Stream::RIGHT);
data.stream_callback(obj_data_second(output));
}
}
void Synthetic::OnRectifyPostProcess(Object *const out) { void Synthetic::OnRectifyPostProcess(Object *const out) {
const ObjMat2 *output = Object::Cast<ObjMat2>(out); const ObjMat2 *output = Object::Cast<ObjMat2>(out);
NotifyStreamData(Stream::LEFT_RECTIFIED, obj_data_first(output)); NotifyStreamData(Stream::LEFT_RECTIFIED, obj_data_first(output));

View File

@ -47,7 +47,6 @@ class Synthetic {
struct stream_control_t { struct stream_control_t {
Stream stream; Stream stream;
mode_t support_mode_;
mode_t enabled_mode_; mode_t enabled_mode_;
stream_callback_t stream_callback; stream_callback_t stream_callback;
}; };
@ -60,7 +59,6 @@ class Synthetic {
void NotifyImageParamsChanged(); void NotifyImageParamsChanged();
bool Supports(const Stream &stream) const; bool Supports(const Stream &stream) const;
mode_t SupportsMode(const Stream &stream) const;
void EnableStreamData(const Stream &stream); void EnableStreamData(const Stream &stream);
void DisableStreamData(const Stream &stream); void DisableStreamData(const Stream &stream);
@ -96,11 +94,8 @@ class Synthetic {
private: private:
void InitCalibInfo(); void InitCalibInfo();
void InitStreamSupports();
mode_t GetStreamEnabledMode(const Stream &stream) const; mode_t GetStreamEnabledMode(const Stream &stream) const;
bool IsStreamEnabledNative(const Stream &stream) const;
bool IsStreamEnabledSynthetic(const Stream &stream) const;
void EnableStreamData(const Stream &stream, std::uint32_t depth); void EnableStreamData(const Stream &stream, std::uint32_t depth);
void DisableStreamData(const Stream &stream, std::uint32_t depth); void DisableStreamData(const Stream &stream, std::uint32_t depth);
@ -112,8 +107,9 @@ class Synthetic {
template <class T> template <class T>
bool DeactivateProcessor(bool tree = false); bool DeactivateProcessor(bool tree = false);
void ProcessNativeStream(const Stream &stream, const api::StreamData &data); bool OnDeviceProcess(
Object *const in, Object *const out,
std::shared_ptr<Processor> const parent);
bool OnRectifyProcess( bool OnRectifyProcess(
Object *const in, Object *const out, Object *const in, Object *const out,
std::shared_ptr<Processor> const parent); std::shared_ptr<Processor> const parent);
@ -130,6 +126,7 @@ class Synthetic {
Object *const in, Object *const out, Object *const in, Object *const out,
std::shared_ptr<Processor> const parent); std::shared_ptr<Processor> const parent);
void OnDevicePostProcess(Object *const out);
void OnRectifyPostProcess(Object *const out); void OnRectifyPostProcess(Object *const out);
void OnDisparityPostProcess(Object *const out); void OnDisparityPostProcess(Object *const out);
void OnDisparityNormalizedPostProcess(Object *const out); void OnDisparityNormalizedPostProcess(Object *const out);
@ -158,7 +155,7 @@ class Synthetic {
}; };
class SyntheticProcessorPart { class SyntheticProcessorPart {
private: protected:
inline std::vector<Synthetic::stream_control_t> getTargetStreams() { inline std::vector<Synthetic::stream_control_t> getTargetStreams() {
return target_streams_; return target_streams_;
} }