feat(synthetic): add DisparityComputingMethod
This commit is contained in:
parent
4011880148
commit
0b71d05813
|
@ -24,7 +24,6 @@ option(WITH_API "Build with API layer, need OpenCV" ON)
|
|||
option(WITH_DEVICE_INFO_REQUIRED "Build with device info required" ON)
|
||||
|
||||
option(WITH_CAM_MODELS "Build with more camera models, WITH_API must be ON" OFF)
|
||||
option(WITH_BM_SOBEL_FILTER "Build with bm and sobel filter, need OpenCV contronb" OFF)
|
||||
|
||||
# 3rdparty components
|
||||
|
||||
|
|
|
@ -224,6 +224,13 @@ class MYNTEYE_API API {
|
|||
* Get the option value.
|
||||
*/
|
||||
std::int32_t GetOptionValue(const Option &option) const;
|
||||
|
||||
/**
|
||||
* Set the disparity computing method.
|
||||
*/
|
||||
void SetDisparityComputingMethodType(
|
||||
const DisparityComputingMethod &MethodType);
|
||||
|
||||
/**
|
||||
* Set the option value.
|
||||
*/
|
||||
|
|
|
@ -542,6 +542,20 @@ struct MYNTEYE_API Extrinsics {
|
|||
MYNTEYE_API
|
||||
std::ostream &operator<<(std::ostream &os, const Extrinsics &ex);
|
||||
|
||||
/**
|
||||
* @ingroup disparity
|
||||
* Camera disparity computing method type.
|
||||
*/
|
||||
enum class DisparityComputingMethod : std::uint8_t {
|
||||
/** bm */
|
||||
SGBM = 0,
|
||||
/** sgbm */
|
||||
BM = 1,
|
||||
/** unknow */
|
||||
UNKNOW
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* @defgroup datatypes Datatypes
|
||||
* @brief Public data types.
|
||||
|
|
|
@ -29,6 +29,8 @@ int main(int argc, char *argv[]) {
|
|||
// api->EnableStreamData(Stream::DISPARITY);
|
||||
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);
|
||||
|
||||
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
|
||||
|
||||
api->Start(Source::VIDEO_STREAMING);
|
||||
|
||||
cv::namedWindow("frame");
|
||||
|
|
|
@ -485,6 +485,11 @@ void API::EnablePlugin(const std::string &path) {
|
|||
synthetic_->SetPlugin(plugin);
|
||||
}
|
||||
|
||||
void API::SetDisparityComputingMethodType(
|
||||
const DisparityComputingMethod &MethodType) {
|
||||
synthetic_->SetDisparityComputingMethodType(MethodType);
|
||||
}
|
||||
|
||||
std::shared_ptr<Device> API::device() {
|
||||
return device_;
|
||||
}
|
||||
|
|
|
@ -133,18 +133,36 @@ void iterate_processors_PtoC_before(
|
|||
}
|
||||
|
||||
template <typename T>
|
||||
void iterate_processors_CtoP_before(
|
||||
void iterate_processor_PtoC_after(
|
||||
T processor, std::function<void(std::shared_ptr<Processor>)> fn) {
|
||||
if (processor->GetParent() != nullptr)
|
||||
iterate_processors_CtoP_before(processor->GetParent(), fn);
|
||||
fn(processor);
|
||||
auto chids = processor->GetChilds();
|
||||
for (auto it : chids) {
|
||||
iterate_processor_PtoC_after(it, fn);
|
||||
}
|
||||
}
|
||||
template <typename T>
|
||||
void iterate_processor_PtoC_before(
|
||||
T processor, std::function<void(std::shared_ptr<Processor>)> fn) {
|
||||
auto chids = processor->GetChilds();
|
||||
for (auto it : chids) {
|
||||
iterate_processor_PtoC_before(it, fn);
|
||||
}
|
||||
fn(processor);
|
||||
}
|
||||
template <typename T>
|
||||
void iterate_processors_CtoP_after(
|
||||
void iterate_processor_CtoP_before(
|
||||
T processor, std::function<void(std::shared_ptr<Processor>)> fn) {
|
||||
if (processor->GetParent() != nullptr)
|
||||
iterate_processor_CtoP_before(processor->GetParent(), fn);
|
||||
fn(processor);
|
||||
}
|
||||
template <typename T>
|
||||
void iterate_processor_CtoP_after(
|
||||
T processor, std::function<void(std::shared_ptr<Processor>)> fn) {
|
||||
fn(processor);
|
||||
if (processor->GetParent() != nullptr)
|
||||
iterate_processors_CtoP_after(processor->GetParent(), fn);
|
||||
iterate_processor_CtoP_after(processor->GetParent(), fn);
|
||||
}
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
|
|
@ -20,21 +20,16 @@
|
|||
|
||||
#include "mynteye/logger.h"
|
||||
|
||||
#define WITH_BM_SOBEL_FILTER
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
const char DisparityProcessor::NAME[] = "DisparityProcessor";
|
||||
|
||||
DisparityProcessor::DisparityProcessor(DisparityProcessorType type,
|
||||
DisparityProcessor::DisparityProcessor(DisparityComputingMethod type,
|
||||
std::int32_t proc_period)
|
||||
: Processor(std::move(proc_period)), type_(type) {
|
||||
VLOG(2) << __func__ << ": proc_period=" << proc_period;
|
||||
|
||||
if (type_ == DisparityProcessorType::SGBM) {
|
||||
int sgbmWinSize = 3;
|
||||
int numberOfDisparities = 64;
|
||||
|
||||
#ifdef WITH_OPENCV2
|
||||
// StereoSGBM
|
||||
// http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html?#stereosgbm
|
||||
|
@ -51,22 +46,6 @@ DisparityProcessor::DisparityProcessor(DisparityProcessorType type,
|
|||
100, // speckleWindowSize
|
||||
32, // speckleRange
|
||||
false)); // fullDP
|
||||
#else
|
||||
sgbm_matcher = cv::StereoSGBM::create(0, 16, 3);
|
||||
sgbm_matcher->setPreFilterCap(63);
|
||||
sgbm_matcher->setBlockSize(sgbmWinSize);
|
||||
sgbm_matcher->setP1(8 * sgbmWinSize * sgbmWinSize);
|
||||
sgbm_matcher->setP2(32 * sgbmWinSize * sgbmWinSize);
|
||||
sgbm_matcher->setMinDisparity(0);
|
||||
sgbm_matcher->setNumDisparities(numberOfDisparities);
|
||||
sgbm_matcher->setUniquenessRatio(10);
|
||||
sgbm_matcher->setSpeckleWindowSize(100);
|
||||
sgbm_matcher->setSpeckleRange(32);
|
||||
sgbm_matcher->setDisp12MaxDiff(1);
|
||||
#endif
|
||||
#ifdef WITH_BM_SOBEL_FILTER
|
||||
} else if (type_ == DisparityProcessorType::BM) {
|
||||
#ifdef WITH_OPENCV2
|
||||
LOG(ERROR) << "not supported in opencv 2.x";
|
||||
// int bmWinSize = 3;
|
||||
// // StereoBM
|
||||
|
@ -83,6 +62,18 @@ DisparityProcessor::DisparityProcessor(DisparityProcessorType type,
|
|||
// 100,
|
||||
// 4));
|
||||
#else
|
||||
sgbm_matcher = cv::StereoSGBM::create(0, 16, 3);
|
||||
sgbm_matcher->setPreFilterCap(63);
|
||||
sgbm_matcher->setBlockSize(sgbmWinSize);
|
||||
sgbm_matcher->setP1(8 * sgbmWinSize * sgbmWinSize);
|
||||
sgbm_matcher->setP2(32 * sgbmWinSize * sgbmWinSize);
|
||||
sgbm_matcher->setMinDisparity(0);
|
||||
sgbm_matcher->setNumDisparities(numberOfDisparities);
|
||||
sgbm_matcher->setUniquenessRatio(10);
|
||||
sgbm_matcher->setSpeckleWindowSize(100);
|
||||
sgbm_matcher->setSpeckleRange(32);
|
||||
sgbm_matcher->setDisp12MaxDiff(1);
|
||||
|
||||
bm_matcher = cv::StereoBM::create(0, 3);
|
||||
bm_matcher->setPreFilterSize(9);
|
||||
bm_matcher->setPreFilterCap(31);
|
||||
|
@ -95,48 +86,23 @@ DisparityProcessor::DisparityProcessor(DisparityProcessorType type,
|
|||
bm_matcher->setSpeckleRange(4);
|
||||
bm_matcher->setPreFilterType(cv::StereoBM::PREFILTER_XSOBEL);
|
||||
#endif
|
||||
#endif
|
||||
} else {
|
||||
LOG(ERROR) << "no enum DisparityProcessorType,use default sgbm";
|
||||
int sgbmWinSize = 3;
|
||||
int numberOfDisparities = 64;
|
||||
NotifyComputingTypeChanged(type_);
|
||||
}
|
||||
|
||||
#ifdef WITH_OPENCV2
|
||||
// StereoSGBM
|
||||
// http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html?#stereosgbm
|
||||
sgbm_matcher = cv::Ptr<cv::StereoSGBM>(
|
||||
new cv::StereoSGBM(
|
||||
0, // minDisparity
|
||||
numberOfDisparities, // numDisparities
|
||||
sgbmWinSize, // SADWindowSize
|
||||
8 * sgbmWinSize * sgbmWinSize, // P1
|
||||
32 * sgbmWinSize * sgbmWinSize, // P2
|
||||
1, // disp12MaxDiff
|
||||
63, // preFilterCap
|
||||
10, // uniquenessRatio
|
||||
100, // speckleWindowSize
|
||||
32, // speckleRange
|
||||
false)); // fullDP
|
||||
#else
|
||||
sgbm_matcher = cv::StereoSGBM::create(0, 16, 3);
|
||||
sgbm_matcher->setPreFilterCap(63);
|
||||
sgbm_matcher->setBlockSize(sgbmWinSize);
|
||||
sgbm_matcher->setP1(8 * sgbmWinSize * sgbmWinSize);
|
||||
sgbm_matcher->setP2(32 * sgbmWinSize * sgbmWinSize);
|
||||
sgbm_matcher->setMinDisparity(0);
|
||||
sgbm_matcher->setNumDisparities(numberOfDisparities);
|
||||
sgbm_matcher->setUniquenessRatio(10);
|
||||
sgbm_matcher->setSpeckleWindowSize(100);
|
||||
sgbm_matcher->setSpeckleRange(32);
|
||||
sgbm_matcher->setDisp12MaxDiff(1);
|
||||
#endif
|
||||
}
|
||||
void DisparityProcessor::NotifyComputingTypeChanged(
|
||||
const DisparityComputingMethod &MethodType) {
|
||||
type_ = MethodType;
|
||||
}
|
||||
|
||||
DisparityProcessor::~DisparityProcessor() {
|
||||
VLOG(2) << __func__;
|
||||
}
|
||||
|
||||
void DisparityProcessor::SetDisparityComputingMethodType(
|
||||
const DisparityComputingMethod &MethodType) {
|
||||
NotifyComputingTypeChanged(MethodType);
|
||||
}
|
||||
|
||||
std::string DisparityProcessor::Name() {
|
||||
return NAME;
|
||||
}
|
||||
|
@ -161,16 +127,14 @@ bool DisparityProcessor::OnProcess(
|
|||
// It contains disparity values scaled by 16. So, to get the floating-point
|
||||
// disparity map,
|
||||
// you need to divide each disp element by 16.
|
||||
if (type_ == DisparityProcessorType::SGBM) {
|
||||
if (type_ == DisparityComputingMethod::SGBM) {
|
||||
(*sgbm_matcher)(input->first, input->second, disparity);
|
||||
#ifdef WITH_BM_SOBEL_FILTER
|
||||
} else if (type_ == DisparityProcessorType::BM) {
|
||||
} else if (type_ == DisparityComputingMethod::BM) {
|
||||
LOG(ERROR) << "not supported in opencv 2.x";
|
||||
// cv::Mat tmp1, tmp2;
|
||||
// cv::cvtColor(input->first, tmp1, CV_RGB2GRAY);
|
||||
// cv::cvtColor(input->second, tmp2, CV_RGB2GRAY);
|
||||
// (*bm_matcher)(tmp1, tmp2, disparity);
|
||||
#endif
|
||||
}
|
||||
#else
|
||||
// compute()
|
||||
|
@ -180,10 +144,9 @@ bool DisparityProcessor::OnProcess(
|
|||
// disparity map
|
||||
// (where each disparity value has 4 fractional bits),
|
||||
// whereas other algorithms output 32-bit floating-point disparity map.
|
||||
if (type_ == DisparityProcessorType::SGBM) {
|
||||
if (type_ == DisparityComputingMethod::SGBM) {
|
||||
sgbm_matcher->compute(input->first, input->second, disparity);
|
||||
#ifdef WITH_BM_SOBEL_FILTER
|
||||
} else if (type_ == DisparityProcessorType::BM) {
|
||||
} else if (type_ == DisparityComputingMethod::BM) {
|
||||
cv::Mat tmp1, tmp2;
|
||||
if (input->first.channels() == 1) {
|
||||
// s1030
|
||||
|
@ -195,7 +158,6 @@ bool DisparityProcessor::OnProcess(
|
|||
cv::cvtColor(input->second, tmp2, CV_RGB2GRAY);
|
||||
}
|
||||
bm_matcher->compute(tmp1, tmp2, disparity);
|
||||
#endif
|
||||
} else {
|
||||
// default
|
||||
sgbm_matcher->compute(input->first, input->second, disparity);
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
#include <string>
|
||||
#include "mynteye/api/processor.h"
|
||||
#include "mynteye/types.h"
|
||||
|
||||
namespace cv {
|
||||
|
||||
|
@ -24,27 +25,20 @@ class StereoSGBM;
|
|||
class StereoBM;
|
||||
} // namespace cv
|
||||
|
||||
enum class DisparityProcessorType : std::uint8_t {
|
||||
/** bm */
|
||||
SGBM = 0,
|
||||
/** sgbm */
|
||||
BM = 1,
|
||||
/** unknow */
|
||||
UNKNOW
|
||||
};
|
||||
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
class DisparityProcessor : public Processor {
|
||||
public:
|
||||
static const char NAME[];
|
||||
|
||||
explicit DisparityProcessor(DisparityProcessorType type,
|
||||
explicit DisparityProcessor(DisparityComputingMethod type,
|
||||
std::int32_t proc_period = 0);
|
||||
virtual ~DisparityProcessor();
|
||||
|
||||
std::string Name() override;
|
||||
void SetDisparityComputingMethodType(
|
||||
const DisparityComputingMethod &MethodType);
|
||||
void NotifyComputingTypeChanged(const DisparityComputingMethod &MethodType);
|
||||
|
||||
protected:
|
||||
Object *OnCreateOutput() override;
|
||||
|
@ -55,7 +49,7 @@ class DisparityProcessor : public Processor {
|
|||
private:
|
||||
cv::Ptr<cv::StereoSGBM> sgbm_matcher;
|
||||
cv::Ptr<cv::StereoBM> bm_matcher;
|
||||
DisparityProcessorType type_;
|
||||
DisparityComputingMethod type_;
|
||||
};
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
|
|
@ -22,9 +22,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
|||
const char RootProcessor::NAME[] = "RootProcessor";
|
||||
|
||||
RootProcessor::RootProcessor(std::int32_t proc_period)
|
||||
: Processor(std::move(proc_period)) {
|
||||
// todo
|
||||
}
|
||||
: Processor(std::move(proc_period)) {}
|
||||
RootProcessor::~RootProcessor() {
|
||||
VLOG(2) << __func__;
|
||||
}
|
||||
|
@ -40,14 +38,6 @@ bool RootProcessor::OnProcess(
|
|||
Object *const in, Object *const out,
|
||||
std::shared_ptr<Processor> const parent) {
|
||||
MYNTEYE_UNUSED(parent)
|
||||
// const ObjMat2 *input = Object::Cast<ObjMat2>(in);
|
||||
// ObjMat2 *output = Object::Cast<ObjMat2>(out);
|
||||
// cv::remap(input->first, output->first, map11, map12, cv::INTER_LINEAR);
|
||||
// cv::remap(input->second, output->second, map21, map22, cv::INTER_LINEAR);
|
||||
// output->first_id = input->first_id;
|
||||
// output->first_data = input->first_data;
|
||||
// output->second_id = input->second_id;
|
||||
// output->second_data = input->second_data;
|
||||
return true;
|
||||
}
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
|
|
@ -42,6 +42,7 @@
|
|||
#define DISPARITY_NORM_PROC_PERIOD 0
|
||||
#define POINTS_PROC_PERIOD 0
|
||||
#define DEPTH_PROC_PERIOD 0
|
||||
#define ROOT_PROC_PERIOD 0
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
|
@ -183,21 +184,6 @@ void Synthetic::setControlDateCallbackWithStream(
|
|||
LOG(ERROR) << "ERROR: no suited processor for stream "<< ctr_data.stream;
|
||||
}
|
||||
|
||||
// void Synthetic::setControlDateModeWithStream(
|
||||
// const struct stream_control_t& ctr_data) {
|
||||
// for (auto &&it : processors_) {
|
||||
// int i = 0;
|
||||
// for (auto it_s : it->getTargetStreams()) {
|
||||
// if (it_s.stream == ctr_data.stream) {
|
||||
// it->target_streams_[i].mode = ctr_data.mode;
|
||||
// return;
|
||||
// }
|
||||
// i++;
|
||||
// }
|
||||
// }
|
||||
// LOG(ERROR) << "ERROR: no suited processor for stream "<< ctr_data.stream;
|
||||
// }
|
||||
|
||||
bool Synthetic::checkControlDateWithStream(const Stream& stream) const {
|
||||
for (auto &&it : processors_) {
|
||||
for (auto it_s : it->getTargetStreams()) {
|
||||
|
@ -209,40 +195,25 @@ bool Synthetic::checkControlDateWithStream(const Stream& stream) const {
|
|||
return false;
|
||||
}
|
||||
|
||||
// bool Synthetic::Supports(const Stream &stream) const {
|
||||
// return checkControlDateWithStream(stream);
|
||||
// }
|
||||
|
||||
// Synthetic::status_mode_t Synthetic::GetStreamStatusMode(
|
||||
// const Stream &stream) const {
|
||||
// if (checkControlDateWithStream(stream)) {
|
||||
// auto ctrData = getControlDateWithStream(stream);
|
||||
// return ctrData.mode;
|
||||
// } else {
|
||||
// return MODE_STATUS_LAST;
|
||||
// }
|
||||
// }
|
||||
|
||||
// void Synthetic::EnableStreamData(const Stream &stream) {
|
||||
// // Activate processors of synthetic stream
|
||||
// auto processor = getProcessorWithStream(stream);
|
||||
// iterate_processors_CtoP_before(processor,
|
||||
// [](std::shared_ptr<Processor> proce){
|
||||
// auto streams = proce->getTargetStreams();
|
||||
// int act_tag = 0;
|
||||
// for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
|
||||
// if (proce->target_streams_[i].mode == MODE_STATUS_DISABLE) {
|
||||
// act_tag++;
|
||||
// proce->target_streams_[i].mode = MODE_STATUS_ENABLE;
|
||||
// }
|
||||
// }
|
||||
// if (act_tag > 0) {
|
||||
// std::cout << proce->Name() << "active" << std::endl;
|
||||
// proce->Activate();
|
||||
// }
|
||||
// // std::cout <<std::endl;
|
||||
// });
|
||||
// }
|
||||
void Synthetic::EnableStreamData(const Stream &stream) {
|
||||
// Activate processors of synthetic stream
|
||||
auto processor = getProcessorWithStream(stream);
|
||||
iterate_processor_CtoP_before(processor,
|
||||
[](std::shared_ptr<Processor> proce){
|
||||
auto streams = proce->getTargetStreams();
|
||||
int act_tag = 0;
|
||||
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
|
||||
if (proce->target_streams_[i].enabled_mode_ == MODE_LAST) {
|
||||
act_tag++;
|
||||
proce->target_streams_[i].enabled_mode_ = MODE_SYNTHETIC;
|
||||
}
|
||||
}
|
||||
if (act_tag > 0 && !proce->IsActivated()) {
|
||||
// std::cout << proce->Name() << " Active now" << std::endl;
|
||||
proce->Activate();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
bool Synthetic::Supports(const Stream &stream) const {
|
||||
|
@ -257,12 +228,23 @@ Synthetic::mode_t Synthetic::SupportsMode(const Stream &stream) const {
|
|||
return MODE_LAST;
|
||||
}
|
||||
|
||||
void Synthetic::EnableStreamData(const Stream &stream) {
|
||||
EnableStreamData(stream, 0);
|
||||
}
|
||||
|
||||
void Synthetic::DisableStreamData(const Stream &stream) {
|
||||
DisableStreamData(stream, 0);
|
||||
auto processor = getProcessorWithStream(stream);
|
||||
iterate_processor_PtoC_before(processor,
|
||||
[](std::shared_ptr<Processor> proce){
|
||||
auto streams = proce->getTargetStreams();
|
||||
int act_tag = 0;
|
||||
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
|
||||
if (proce->target_streams_[i].enabled_mode_ == MODE_SYNTHETIC) {
|
||||
act_tag++;
|
||||
proce->target_streams_[i].enabled_mode_ = MODE_LAST;
|
||||
}
|
||||
}
|
||||
if (act_tag > 0 && proce->IsActivated()) {
|
||||
// std::cout << proce->Name() << "Deactive now" << std::endl;
|
||||
proce->Deactivate();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
bool Synthetic::IsStreamDataEnabled(const Stream &stream) const {
|
||||
|
@ -349,7 +331,6 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
|
|||
auto sum = processor->getStreamsSum();
|
||||
auto &&out = processor->GetOutput();
|
||||
static std::shared_ptr<ObjMat2> output = nullptr;
|
||||
std::cout << processor->Name() << stream <<std::endl;
|
||||
if (sum == 1) {
|
||||
if (out != nullptr) {
|
||||
auto &&output = Object::Cast<ObjMat>(out);
|
||||
|
@ -469,205 +450,6 @@ bool Synthetic::IsStreamEnabledSynthetic(const Stream &stream) const {
|
|||
return GetStreamEnabledMode(stream) == MODE_SYNTHETIC;
|
||||
}
|
||||
|
||||
void Synthetic::EnableStreamData(const Stream &stream, std::uint32_t depth) {
|
||||
if (IsStreamDataEnabled(stream))
|
||||
return;
|
||||
// Activate processors of synthetic stream
|
||||
|
||||
auto processor = getProcessorWithStream(stream);
|
||||
for (unsigned int i = 0; i< processor->target_streams_.size(); i++) {
|
||||
if (processor->target_streams_[i].stream == stream) {
|
||||
processor->target_streams_[i].enabled_mode_ = MODE_SYNTHETIC;
|
||||
}
|
||||
}
|
||||
switch (stream) {
|
||||
case Stream::LEFT_RECTIFIED: {
|
||||
if (!IsStreamDataEnabled(Stream::LEFT))
|
||||
break;
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
CHECK(ActivateProcessor<RectifyProcessorOCV>());
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
CHECK(ActivateProcessor<RectifyProcessor>());
|
||||
#endif
|
||||
} else {
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_ << ", use default pinhole model";
|
||||
CHECK(ActivateProcessor<RectifyProcessorOCV>());
|
||||
}
|
||||
} return;
|
||||
case Stream::RIGHT_RECTIFIED: {
|
||||
if (!IsStreamDataEnabled(Stream::RIGHT))
|
||||
break;
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
CHECK(ActivateProcessor<RectifyProcessorOCV>());
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
CHECK(ActivateProcessor<RectifyProcessor>());
|
||||
#endif
|
||||
} else {
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_ << ", use default pinhole model";
|
||||
CHECK(ActivateProcessor<RectifyProcessorOCV>());
|
||||
}
|
||||
} return;
|
||||
case Stream::DISPARITY: {
|
||||
EnableStreamData(Stream::LEFT_RECTIFIED, depth + 1);
|
||||
EnableStreamData(Stream::RIGHT_RECTIFIED, depth + 1);
|
||||
CHECK(ActivateProcessor<DisparityProcessor>());
|
||||
} return;
|
||||
case Stream::DISPARITY_NORMALIZED: {
|
||||
EnableStreamData(Stream::DISPARITY, depth + 1);
|
||||
CHECK(ActivateProcessor<DisparityNormalizedProcessor>());
|
||||
} return;
|
||||
case Stream::POINTS: {
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
EnableStreamData(Stream::DISPARITY, depth + 1);
|
||||
CHECK(ActivateProcessor<PointsProcessorOCV>());
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
EnableStreamData(Stream::DEPTH, depth + 1);
|
||||
CHECK(ActivateProcessor<PointsProcessor>());
|
||||
#endif
|
||||
} else {
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_;
|
||||
}
|
||||
} return;
|
||||
case Stream::DEPTH: {
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
EnableStreamData(Stream::POINTS, depth + 1);
|
||||
CHECK(ActivateProcessor<DepthProcessorOCV>());
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
EnableStreamData(Stream::DISPARITY, depth + 1);
|
||||
CHECK(ActivateProcessor<DepthProcessor>());
|
||||
#endif
|
||||
} else {
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_;
|
||||
}
|
||||
} return;
|
||||
default: break;
|
||||
}
|
||||
if (depth == 0) {
|
||||
LOG(WARNING) << "Enable stream data of " << stream << " failed";
|
||||
}
|
||||
}
|
||||
|
||||
void Synthetic::DisableStreamData(const Stream &stream, std::uint32_t depth) {
|
||||
if (!IsStreamDataEnabled(stream))
|
||||
return;
|
||||
// Deactivate processors of synthetic stream
|
||||
auto data = getControlDateWithStream(stream);
|
||||
if (data.enabled_mode_ != MODE_NATIVE) {
|
||||
data.enabled_mode_ = MODE_LAST;
|
||||
switch (stream) {
|
||||
case Stream::LEFT_RECTIFIED: {
|
||||
if (IsStreamEnabledSynthetic(Stream::DISPARITY)) {
|
||||
DisableStreamData(Stream::DISPARITY, depth + 1);
|
||||
}
|
||||
if (IsStreamEnabledSynthetic(Stream::RIGHT_RECTIFIED)) {
|
||||
DisableStreamData(Stream::RIGHT_RECTIFIED, depth + 1);
|
||||
}
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
DeactivateProcessor<RectifyProcessorOCV>();
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
DeactivateProcessor<RectifyProcessor>();
|
||||
#endif
|
||||
} else {
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_ << ", use default pinhole model";
|
||||
DeactivateProcessor<RectifyProcessorOCV>();
|
||||
}
|
||||
} break;
|
||||
case Stream::RIGHT_RECTIFIED: {
|
||||
if (IsStreamEnabledSynthetic(Stream::DISPARITY)) {
|
||||
DisableStreamData(Stream::DISPARITY, depth + 1);
|
||||
}
|
||||
if (IsStreamEnabledSynthetic(Stream::LEFT_RECTIFIED)) {
|
||||
DisableStreamData(Stream::LEFT_RECTIFIED, depth + 1);
|
||||
}
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
DeactivateProcessor<RectifyProcessorOCV>();
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
DeactivateProcessor<RectifyProcessor>();
|
||||
#endif
|
||||
} else {
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_ << ", use default pinhole model";
|
||||
DeactivateProcessor<RectifyProcessorOCV>();
|
||||
}
|
||||
} break;
|
||||
case Stream::DISPARITY: {
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
if (IsStreamEnabledSynthetic(Stream::DISPARITY_NORMALIZED)) {
|
||||
DisableStreamData(Stream::DISPARITY_NORMALIZED, depth + 1);
|
||||
}
|
||||
if (IsStreamEnabledSynthetic(Stream::POINTS)) {
|
||||
DisableStreamData(Stream::POINTS, depth + 1);
|
||||
}
|
||||
DeactivateProcessor<DisparityProcessor>();
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
if (IsStreamEnabledSynthetic(Stream::DISPARITY_NORMALIZED)) {
|
||||
DisableStreamData(Stream::DISPARITY_NORMALIZED, depth + 1);
|
||||
}
|
||||
if (IsStreamEnabledSynthetic(Stream::DEPTH)) {
|
||||
DisableStreamData(Stream::DEPTH, depth + 1);
|
||||
}
|
||||
DeactivateProcessor<DisparityProcessor>();
|
||||
#endif
|
||||
} else {
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_;
|
||||
}
|
||||
} break;
|
||||
case Stream::DISPARITY_NORMALIZED: {
|
||||
DeactivateProcessor<DisparityNormalizedProcessor>();
|
||||
} break;
|
||||
case Stream::POINTS: {
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
if (IsStreamEnabledSynthetic(Stream::DEPTH)) {
|
||||
DisableStreamData(Stream::DEPTH, depth + 1);
|
||||
}
|
||||
DeactivateProcessor<PointsProcessorOCV>();
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
DeactivateProcessor<PointsProcessor>();
|
||||
#endif
|
||||
} else {
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_;
|
||||
}
|
||||
} break;
|
||||
case Stream::DEPTH: {
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
DeactivateProcessor<DepthProcessorOCV>();
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
if (IsStreamEnabledSynthetic(Stream::POINTS)) {
|
||||
DisableStreamData(Stream::POINTS, depth + 1);
|
||||
}
|
||||
DeactivateProcessor<DepthProcessor>();
|
||||
#endif
|
||||
} else {
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_;
|
||||
}
|
||||
} break;
|
||||
default: return;
|
||||
}
|
||||
if (depth > 0) {
|
||||
LOG(WARNING) << "Disable synthetic stream data of " << stream << " too";
|
||||
}
|
||||
} else if (depth == 0) {
|
||||
LOG(WARNING) << "Disable native stream data of " << stream << " failed";
|
||||
}
|
||||
}
|
||||
|
||||
void Synthetic::InitProcessors() {
|
||||
std::shared_ptr<Processor> rectify_processor = nullptr;
|
||||
#ifdef WITH_CAM_MODELS
|
||||
|
@ -696,7 +478,7 @@ void Synthetic::InitProcessors() {
|
|||
rectify_processor = rectify_processor_ocv;
|
||||
}
|
||||
auto &&disparity_processor =
|
||||
std::make_shared<DisparityProcessor>(DisparityProcessorType::SGBM,
|
||||
std::make_shared<DisparityProcessor>(DisparityComputingMethod::SGBM,
|
||||
DISPARITY_PROC_PERIOD);
|
||||
auto &&disparitynormalized_processor =
|
||||
std::make_shared<DisparityNormalizedProcessor>(
|
||||
|
@ -1003,4 +785,15 @@ void Synthetic::OnDepthPostProcess(Object *const out) {
|
|||
}
|
||||
}
|
||||
|
||||
void Synthetic::SetDisparityComputingMethodType(
|
||||
const DisparityComputingMethod &MethodType) {
|
||||
if (checkControlDateWithStream(Stream::LEFT_RECTIFIED)) {
|
||||
auto processor = find_processor<DisparityProcessor>(processor_);
|
||||
if (processor)
|
||||
processor->SetDisparityComputingMethodType(MethodType);
|
||||
return;
|
||||
}
|
||||
LOG(ERROR) << "ERROR: no suited processor for disparity computing.";
|
||||
}
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
|
|
@ -80,6 +80,8 @@ class Synthetic {
|
|||
const struct stream_control_t& ctr_data);
|
||||
bool checkControlDateWithStream(const Stream& stream) const;
|
||||
std::shared_ptr<Processor> getProcessorWithStream(const Stream& stream);
|
||||
void SetDisparityComputingMethodType(
|
||||
const DisparityComputingMethod &MethoType);
|
||||
|
||||
private:
|
||||
void InitCalibInfo();
|
||||
|
|
Loading…
Reference in New Issue
Block a user