Merge branch 'develop' of http://gitlab.mynt.com/mynteye/mynt-eye-sdk-2 into develop

This commit is contained in:
Osenberg 2019-01-09 16:00:51 +08:00
commit 292b8155b1
16 changed files with 486 additions and 205 deletions

View File

@ -94,6 +94,18 @@ if(OS_WIN)
) )
endif() endif()
# rpath
set(CMAKE_MACOSX_RPATH 1)
set(MYNTEYE_CMAKE_RPATH "")
if(WITH_OPENCV)
list(APPEND MYNTEYE_CMAKE_RPATH ${OpenCV_LIB_PATH})
endif()
if(MYNTEYE_CMAKE_RPATH)
message(STATUS "RPATH: ${MYNTEYE_CMAKE_RPATH}")
set(CMAKE_INSTALL_RPATH "${MYNTEYE_CMAKE_RPATH}")
endif()
# targets # targets
add_definitions(-DMYNTEYE_EXPORTS) add_definitions(-DMYNTEYE_EXPORTS)
@ -218,13 +230,19 @@ if(WITH_API)
src/mynteye/api/dl.cc src/mynteye/api/dl.cc
src/mynteye/api/processor.cc src/mynteye/api/processor.cc
src/mynteye/api/synthetic.cc src/mynteye/api/synthetic.cc
src/mynteye/api/processor/rectify_processor.cc
src/mynteye/api/processor/disparity_processor.cc src/mynteye/api/processor/disparity_processor.cc
src/mynteye/api/processor/disparity_normalized_processor.cc src/mynteye/api/processor/disparity_normalized_processor.cc
src/mynteye/api/processor/depth_processor.cc
src/mynteye/api/processor/points_processor.cc
src/mynteye/api/processor/points_processor_ocv.cc src/mynteye/api/processor/points_processor_ocv.cc
src/mynteye/api/processor/depth_processor_ocv.cc src/mynteye/api/processor/depth_processor_ocv.cc
src/mynteye/api/processor/rectify_processor_ocv.cc
)
endif()
if(WITH_CAM_MODELS)
list(APPEND MYNTEYE_SRCS
src/mynteye/api/processor/depth_processor.cc
src/mynteye/api/processor/points_processor.cc
src/mynteye/api/processor/rectify_processor.cc
) )
endif() endif()
if(NOT WITH_GLOG) if(NOT WITH_GLOG)

View File

@ -41,6 +41,25 @@ if(${__index} GREATER -1)
set(WITH_OPENCV_WORLD TRUE) set(WITH_OPENCV_WORLD TRUE)
endif() endif()
if(NOT OpenCV_LIB_PATH)
list(LENGTH OpenCV_INCLUDE_DIRS __length)
if(${__length} GREATER 0)
list(GET OpenCV_INCLUDE_DIRS 0 __include_dir)
string(REGEX REPLACE "include.*$" "lib" __lib_dir "${__include_dir}")
find_library(__opencv_lib
NAMES opencv_core3 opencv_core opencv_world
PATHS "${__lib_dir}" "${__lib_dir}/x86_64-linux-gnu"
NO_DEFAULT_PATH)
#message(STATUS "__opencv_lib: ${__opencv_lib}")
if(__opencv_lib)
get_filename_component(OpenCV_LIB_PATH "${__opencv_lib}" DIRECTORY)
else()
set(OpenCV_LIB_PATH "${__lib_dir}")
endif()
#message(STATUS "OpenCV_LIB_PATH: ${OpenCV_LIB_PATH}")
endif()
endif()
if(MSVC OR MSYS OR MINGW) if(MSVC OR MSYS OR MINGW)
get_filename_component(OpenCV_LIB_SEARCH_PATH "${OpenCV_LIB_PATH}/../bin" ABSOLUTE) get_filename_component(OpenCV_LIB_SEARCH_PATH "${OpenCV_LIB_PATH}/../bin" ABSOLUTE)
else() else()

View File

@ -41,13 +41,23 @@ Object *DepthProcessor::OnCreateOutput() {
bool DepthProcessor::OnProcess( bool DepthProcessor::OnProcess(
Object *const in, Object *const out, Processor *const parent) { Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent) MYNTEYE_UNUSED(parent)
// const ObjMat *input = Object::Cast<ObjMat>(in); const ObjMat *input = Object::Cast<ObjMat>(in);
// ObjMat *output = Object::Cast<ObjMat>(out); ObjMat *output = Object::Cast<ObjMat>(out);
// cv::Mat channels[3 /*input->value.channels()*/]; int rows = input->value.rows;
// cv::split(input->value, channels); int cols = input->value.cols;
// channels[2].convertTo(output->value, CV_16UC1); float T = 0.08;
// output->id = input->id; float f = 0.01;
// output->data = input->data; cv::Mat depth_mat = cv::Mat::zeros(rows, cols, CV_32F);
for (int i = 0; i < rows; i++) {
for (int j = 0; j < cols; j++) {
float disparity_value = input->value.at<float>(i, j);
float depth = T * f / disparity_value;
depth_mat.at<float>(i, j) = depth;
}
}
output->value = depth_mat;
output->id = input->id;
output->data = input->data;
return true; return true;
} }

View File

@ -19,11 +19,6 @@
#include <opencv2/imgproc/imgproc.hpp> #include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/logger.h" #include "mynteye/logger.h"
#include "mynteye/device/device.h" #include "mynteye/device/device.h"
// #define WITH_CAM_MODELS
#ifdef WITH_CAM_MODELS
#include <camodocal/camera_models/Camera.h> #include <camodocal/camera_models/Camera.h>
#include <camodocal/camera_models/CameraFactory.h> #include <camodocal/camera_models/CameraFactory.h>
#include <camodocal/camera_models/CataCamera.h> #include <camodocal/camera_models/CataCamera.h>
@ -344,130 +339,6 @@ struct camera_mat_info_pair stereoRectify(
return res; return res;
} }
#endif
MYNTEYE_BEGIN_NAMESPACE
const char RectifyProcessor::NAME[] = "RectifyProcessor";
RectifyProcessor::RectifyProcessor(
std::shared_ptr<Device> device, std::int32_t proc_period)
: Processor(std::move(proc_period)), device_(device) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
calib_model = CalibrationModel::UNKNOW;
NotifyImageParamsChanged();
}
RectifyProcessor::~RectifyProcessor() {
VLOG(2) << __func__;
}
std::string RectifyProcessor::Name() {
return NAME;
}
void RectifyProcessor::NotifyImageParamsChanged() {
auto in_left = device_->GetIntrinsics(Stream::LEFT);
auto in_right = device_->GetIntrinsics(Stream::RIGHT);
if (in_left->calib_model() == CalibrationModel::PINHOLE) {
InitParams(
*std::dynamic_pointer_cast<IntrinsicsPinhole>(in_left),
*std::dynamic_pointer_cast<IntrinsicsPinhole>(in_right),
device_->GetExtrinsics(Stream::RIGHT, Stream::LEFT));
} else if (in_left->calib_model() ==
CalibrationModel::KANNALA_BRANDT) {
#ifdef WITH_CAM_MODELS
InitParams(
*std::dynamic_pointer_cast<IntrinsicsEquidistant>(in_left),
*std::dynamic_pointer_cast<IntrinsicsEquidistant>(in_right),
device_->GetExtrinsics(Stream::RIGHT, Stream::LEFT));
#else
VLOG(2) << "calib model type KANNALA_BRANDT"
<< " is not been enabled.";
#endif
} else {
VLOG(2) << "calib model type "
<< in_left->calib_model()
<<" is not been enabled.";
}
}
Object *RectifyProcessor::OnCreateOutput() {
return new ObjMat2();
}
bool RectifyProcessor::OnProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
if (calib_model == CalibrationModel::PINHOLE) {
const ObjMat2 *input = Object::Cast<ObjMat2>(in);
ObjMat2 *output = Object::Cast<ObjMat2>(out);
cv::remap(input->first, output->first, map11, map12, cv::INTER_LINEAR);
cv::remap(input->second, output->second, map21, map22, cv::INTER_LINEAR);
output->first_id = input->first_id;
output->first_data = input->first_data;
output->second_id = input->second_id;
output->second_data = input->second_data;
return true;
} else if (calib_model == CalibrationModel::KANNALA_BRANDT) {
#ifdef WITH_CAM_MODELS
const ObjMat2 *input = Object::Cast<ObjMat2>(in);
ObjMat2 *output = Object::Cast<ObjMat2>(out);
cv::remap(input->first, output->first, map11, map12, cv::INTER_LINEAR);
cv::remap(input->second, output->second, map21, map22, cv::INTER_LINEAR);
output->first_id = input->first_id;
output->first_data = input->first_data;
output->second_id = input->second_id;
output->second_data = input->second_data;
return true;
#else
return false;
#endif
}
}
void RectifyProcessor::InitParams(
IntrinsicsPinhole in_left,
IntrinsicsPinhole in_right,
Extrinsics ex_right_to_left) {
calib_model = CalibrationModel::PINHOLE;
cv::Size size{in_left.width, in_left.height};
cv::Mat M1 =
(cv::Mat_<double>(3, 3) << in_left.fx, 0, in_left.cx, 0, in_left.fy,
in_left.cy, 0, 0, 1);
cv::Mat M2 =
(cv::Mat_<double>(3, 3) << in_right.fx, 0, in_right.cx, 0, in_right.fy,
in_right.cy, 0, 0, 1);
cv::Mat D1(1, 5, CV_64F, in_left.coeffs);
cv::Mat D2(1, 5, CV_64F, in_right.coeffs);
cv::Mat R =
(cv::Mat_<double>(3, 3) << ex_right_to_left.rotation[0][0],
ex_right_to_left.rotation[0][1], ex_right_to_left.rotation[0][2],
ex_right_to_left.rotation[1][0], ex_right_to_left.rotation[1][1],
ex_right_to_left.rotation[1][2], ex_right_to_left.rotation[2][0],
ex_right_to_left.rotation[2][1], ex_right_to_left.rotation[2][2]);
cv::Mat T(3, 1, CV_64F, ex_right_to_left.translation);
VLOG(2) << "InitParams size: " << size;
VLOG(2) << "M1: " << M1;
VLOG(2) << "M2: " << M2;
VLOG(2) << "D1: " << D1;
VLOG(2) << "D2: " << D2;
VLOG(2) << "R: " << R;
VLOG(2) << "T: " << T;
cv::Rect left_roi, right_roi;
cv::stereoRectify(
M1, D1, M2, D2, size, R, T, R1, R2, P1, P2, Q, cv::CALIB_ZERO_DISPARITY,
0, size, &left_roi, &right_roi);
cv::initUndistortRectifyMap(M1, D1, R1, P1, size, CV_16SC2, map11, map12);
cv::initUndistortRectifyMap(M2, D2, R2, P2, size, CV_16SC2, map21, map22);
}
#ifdef WITH_CAM_MODELS
camodocal::CameraPtr generateCameraFromIntrinsicsEquidistant( camodocal::CameraPtr generateCameraFromIntrinsicsEquidistant(
const mynteye::IntrinsicsEquidistant & in) { const mynteye::IntrinsicsEquidistant & in) {
camodocal::EquidistantCameraPtr camera( camodocal::EquidistantCameraPtr camera(
@ -485,6 +356,8 @@ camodocal::CameraPtr generateCameraFromIntrinsicsEquidistant(
return camera; return camera;
} }
MYNTEYE_BEGIN_NAMESPACE
void RectifyProcessor::InitParams( void RectifyProcessor::InitParams(
IntrinsicsEquidistant in_left, IntrinsicsEquidistant in_left,
IntrinsicsEquidistant in_right, IntrinsicsEquidistant in_right,
@ -526,6 +399,50 @@ void RectifyProcessor::InitParams(
cv::Size(0, 0), right_center[0], cv::Size(0, 0), right_center[0],
right_center[1], rect_R_r); right_center[1], rect_R_r);
} }
#endif
const char RectifyProcessor::NAME[] = "RectifyProcessor";
RectifyProcessor::RectifyProcessor(
std::shared_ptr<Device> device, std::int32_t proc_period)
: Processor(std::move(proc_period)), device_(device) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
calib_model = CalibrationModel::UNKNOW;
NotifyImageParamsChanged();
}
RectifyProcessor::~RectifyProcessor() {
VLOG(2) << __func__;
}
std::string RectifyProcessor::Name() {
return NAME;
}
void RectifyProcessor::NotifyImageParamsChanged() {
auto in_left = device_->GetIntrinsics(Stream::LEFT);
auto in_right = device_->GetIntrinsics(Stream::RIGHT);
InitParams(
*std::dynamic_pointer_cast<IntrinsicsEquidistant>(in_left),
*std::dynamic_pointer_cast<IntrinsicsEquidistant>(in_right),
device_->GetExtrinsics(Stream::RIGHT, Stream::LEFT));
}
Object *RectifyProcessor::OnCreateOutput() {
return new ObjMat2();
}
bool RectifyProcessor::OnProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
const ObjMat2 *input = Object::Cast<ObjMat2>(in);
ObjMat2 *output = Object::Cast<ObjMat2>(out);
cv::remap(input->first, output->first, map11, map12, cv::INTER_LINEAR);
cv::remap(input->second, output->second, map21, map22, cv::INTER_LINEAR);
output->first_id = input->first_id;
output->first_data = input->first_data;
output->second_id = input->second_id;
output->second_data = input->second_data;
return true;
}
MYNTEYE_END_NAMESPACE MYNTEYE_END_NAMESPACE

View File

@ -48,8 +48,6 @@ class RectifyProcessor : public Processor {
Object *const in, Object *const out, Processor *const parent) override; Object *const in, Object *const out, Processor *const parent) override;
private: private:
void InitParams(IntrinsicsPinhole in_left,
IntrinsicsPinhole in_right, Extrinsics ex_right_to_left);
void InitParams(IntrinsicsEquidistant in_left, void InitParams(IntrinsicsEquidistant in_left,
IntrinsicsEquidistant in_right, Extrinsics ex_right_to_left); IntrinsicsEquidistant in_right, Extrinsics ex_right_to_left);

View File

@ -0,0 +1,110 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/processor/rectify_processor_ocv.h"
#include <utility>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/logger.h"
#include "mynteye/device/device.h"
MYNTEYE_BEGIN_NAMESPACE
const char RectifyProcessorOCV::NAME[] = "RectifyProcessorOCV";
RectifyProcessorOCV::RectifyProcessorOCV(
std::shared_ptr<Device> device, std::int32_t proc_period)
: Processor(std::move(proc_period)), device_(device) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
calib_model = CalibrationModel::UNKNOW;
NotifyImageParamsChanged();
}
RectifyProcessorOCV::~RectifyProcessorOCV() {
VLOG(2) << __func__;
}
std::string RectifyProcessorOCV::Name() {
return NAME;
}
void RectifyProcessorOCV::NotifyImageParamsChanged() {
auto in_left = device_->GetIntrinsics(Stream::LEFT);
auto in_right = device_->GetIntrinsics(Stream::RIGHT);
InitParams(
*std::dynamic_pointer_cast<IntrinsicsPinhole>(in_left),
*std::dynamic_pointer_cast<IntrinsicsPinhole>(in_right),
device_->GetExtrinsics(Stream::RIGHT, Stream::LEFT));
}
Object *RectifyProcessorOCV::OnCreateOutput() {
return new ObjMat2();
}
bool RectifyProcessorOCV::OnProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
const ObjMat2 *input = Object::Cast<ObjMat2>(in);
ObjMat2 *output = Object::Cast<ObjMat2>(out);
cv::remap(input->first, output->first, map11, map12, cv::INTER_LINEAR);
cv::remap(input->second, output->second, map21, map22, cv::INTER_LINEAR);
output->first_id = input->first_id;
output->first_data = input->first_data;
output->second_id = input->second_id;
output->second_data = input->second_data;
return true;
}
void RectifyProcessorOCV::InitParams(
IntrinsicsPinhole in_left,
IntrinsicsPinhole in_right,
Extrinsics ex_right_to_left) {
calib_model = CalibrationModel::PINHOLE;
cv::Size size{in_left.width, in_left.height};
cv::Mat M1 =
(cv::Mat_<double>(3, 3) << in_left.fx, 0, in_left.cx, 0, in_left.fy,
in_left.cy, 0, 0, 1);
cv::Mat M2 =
(cv::Mat_<double>(3, 3) << in_right.fx, 0, in_right.cx, 0, in_right.fy,
in_right.cy, 0, 0, 1);
cv::Mat D1(1, 5, CV_64F, in_left.coeffs);
cv::Mat D2(1, 5, CV_64F, in_right.coeffs);
cv::Mat R =
(cv::Mat_<double>(3, 3) << ex_right_to_left.rotation[0][0],
ex_right_to_left.rotation[0][1], ex_right_to_left.rotation[0][2],
ex_right_to_left.rotation[1][0], ex_right_to_left.rotation[1][1],
ex_right_to_left.rotation[1][2], ex_right_to_left.rotation[2][0],
ex_right_to_left.rotation[2][1], ex_right_to_left.rotation[2][2]);
cv::Mat T(3, 1, CV_64F, ex_right_to_left.translation);
VLOG(2) << "InitParams size: " << size;
VLOG(2) << "M1: " << M1;
VLOG(2) << "M2: " << M2;
VLOG(2) << "D1: " << D1;
VLOG(2) << "D2: " << D2;
VLOG(2) << "R: " << R;
VLOG(2) << "T: " << T;
cv::Rect left_roi, right_roi;
cv::stereoRectify(
M1, D1, M2, D2, size, R, T, R1, R2, P1, P2, Q, cv::CALIB_ZERO_DISPARITY,
0, size, &left_roi, &right_roi);
cv::initUndistortRectifyMap(M1, D1, R1, P1, size, CV_16SC2, map11, map12);
cv::initUndistortRectifyMap(M2, D2, R2, P2, size, CV_16SC2, map21, map22);
}
MYNTEYE_END_NAMESPACE

View File

@ -0,0 +1,60 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_PROCESSOR_RECTIFY_PROCESSOR_OCV_H_
#define MYNTEYE_API_PROCESSOR_RECTIFY_PROCESSOR_OCV_H_
#pragma once
#include <memory>
#include <string>
#include <opencv2/core/core.hpp>
#include "mynteye/types.h"
#include "mynteye/api/processor.h"
MYNTEYE_BEGIN_NAMESPACE
class Device;
class RectifyProcessorOCV : public Processor {
public:
static const char NAME[];
RectifyProcessorOCV(
std::shared_ptr<Device> device, std::int32_t proc_period = 0);
virtual ~RectifyProcessorOCV();
std::string Name() override;
void NotifyImageParamsChanged();
cv::Mat R1, P1, R2, P2, Q;
cv::Mat map11, map12, map21, map22;
protected:
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out, Processor *const parent) override;
private:
void InitParams(IntrinsicsPinhole in_left,
IntrinsicsPinhole in_right, Extrinsics ex_right_to_left);
std::shared_ptr<Device> device_;
CalibrationModel calib_model;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_PROCESSOR_RECTIFY_PROCESSOR_OCV_H_

View File

@ -23,13 +23,16 @@
#include "mynteye/api/object.h" #include "mynteye/api/object.h"
#include "mynteye/api/plugin.h" #include "mynteye/api/plugin.h"
#include "mynteye/api/processor.h" #include "mynteye/api/processor.h"
#include "mynteye/api/processor/depth_processor.h"
#include "mynteye/api/processor/disparity_normalized_processor.h" #include "mynteye/api/processor/disparity_normalized_processor.h"
#include "mynteye/api/processor/disparity_processor.h" #include "mynteye/api/processor/disparity_processor.h"
#include "mynteye/api/processor/points_processor.h" #include "mynteye/api/processor/rectify_processor_ocv.h"
#include "mynteye/api/processor/rectify_processor.h"
#include "mynteye/api/processor/depth_processor_ocv.h" #include "mynteye/api/processor/depth_processor_ocv.h"
#include "mynteye/api/processor/points_processor_ocv.h" #include "mynteye/api/processor/points_processor_ocv.h"
#ifdef WITH_CAM_MODELS
#include "mynteye/api/processor/depth_processor.h"
#include "mynteye/api/processor/points_processor.h"
#include "mynteye/api/processor/rectify_processor.h"
#endif
#include "mynteye/device/device.h" #include "mynteye/device/device.h"
#define RECTIFY_PROC_PERIOD 0 #define RECTIFY_PROC_PERIOD 0
@ -87,8 +90,20 @@ Synthetic::~Synthetic() {
} }
void Synthetic::NotifyImageParamsChanged() { void Synthetic::NotifyImageParamsChanged() {
if (calib_model_ == CalibrationModel::PINHOLE) {
auto &&processor = find_processor<RectifyProcessorOCV>(processor_);
if (processor) processor->NotifyImageParamsChanged();
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
auto &&processor = find_processor<RectifyProcessor>(processor_); auto &&processor = find_processor<RectifyProcessor>(processor_);
if (processor) processor->NotifyImageParamsChanged(); if (processor) processor->NotifyImageParamsChanged();
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
auto &&processor = find_processor<RectifyProcessorOCV>(processor_);
if (processor) processor->NotifyImageParamsChanged();
}
} }
bool Synthetic::Supports(const Stream &stream) const { bool Synthetic::Supports(const Stream &stream) const {
@ -173,7 +188,18 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
} else if (mode == MODE_SYNTHETIC) { } else if (mode == MODE_SYNTHETIC) {
if (stream == Stream::LEFT_RECTIFIED || stream == Stream::RIGHT_RECTIFIED) { if (stream == Stream::LEFT_RECTIFIED || stream == Stream::RIGHT_RECTIFIED) {
static std::shared_ptr<ObjMat2> output = nullptr; static std::shared_ptr<ObjMat2> output = nullptr;
auto &&processor = find_processor<RectifyProcessor>(processor_); std::shared_ptr<Processor> processor = nullptr;
if (calib_model_ == CalibrationModel::PINHOLE) {
processor = find_processor<RectifyProcessorOCV>(processor_);
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
processor = find_processor<RectifyProcessor>(processor_);
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
processor = find_processor<RectifyProcessorOCV>(processor_);
}
auto &&out = processor->GetOutput(); auto &&out = processor->GetOutput();
if (out != nullptr) { if (out != nullptr) {
// Obtain the output, out will be nullptr if get again immediately. // Obtain the output, out will be nullptr if get again immediately.
@ -219,6 +245,7 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
return {output->data, output->value, nullptr, output->id}; return {output->data, output->value, nullptr, output->id};
} }
VLOG(2) << "Points not ready now"; VLOG(2) << "Points not ready now";
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
auto &&processor = find_processor<PointsProcessor>(processor_); auto &&processor = find_processor<PointsProcessor>(processor_);
auto &&out = processor->GetOutput(); auto &&out = processor->GetOutput();
@ -227,6 +254,7 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
return {output->data, output->value, nullptr, output->id}; return {output->data, output->value, nullptr, output->id};
} }
VLOG(2) << "Points not ready now"; VLOG(2) << "Points not ready now";
#endif
} else { } else {
// UNKNOW // UNKNOW
LOG(ERROR) << "Unknow calib model type in device: " LOG(ERROR) << "Unknow calib model type in device: "
@ -242,6 +270,7 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
return {output->data, output->value, nullptr, output->id}; return {output->data, output->value, nullptr, output->id};
} }
VLOG(2) << "Depth not ready now"; VLOG(2) << "Depth not ready now";
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
auto &&processor = find_processor<DepthProcessor>(processor_); auto &&processor = find_processor<DepthProcessor>(processor_);
auto &&out = processor->GetOutput(); auto &&out = processor->GetOutput();
@ -250,6 +279,7 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
return {output->data, output->value, nullptr, output->id}; return {output->data, output->value, nullptr, output->id};
} }
VLOG(2) << "Depth not ready now"; VLOG(2) << "Depth not ready now";
#endif
} else { } else {
// UNKNOW // UNKNOW
LOG(ERROR) << "Unknow calib model type in device: " LOG(ERROR) << "Unknow calib model type in device: "
@ -346,13 +376,33 @@ void Synthetic::EnableStreamData(const Stream &stream, std::uint32_t depth) {
if (!IsStreamDataEnabled(Stream::LEFT)) if (!IsStreamDataEnabled(Stream::LEFT))
break; break;
stream_enabled_mode_[stream] = MODE_SYNTHETIC; stream_enabled_mode_[stream] = MODE_SYNTHETIC;
if (calib_model_ == CalibrationModel::PINHOLE) {
CHECK(ActivateProcessor<RectifyProcessorOCV>());
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
CHECK(ActivateProcessor<RectifyProcessor>()); CHECK(ActivateProcessor<RectifyProcessor>());
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
CHECK(ActivateProcessor<RectifyProcessorOCV>());
}
} return; } return;
case Stream::RIGHT_RECTIFIED: { case Stream::RIGHT_RECTIFIED: {
if (!IsStreamDataEnabled(Stream::RIGHT)) if (!IsStreamDataEnabled(Stream::RIGHT))
break; break;
stream_enabled_mode_[stream] = MODE_SYNTHETIC; stream_enabled_mode_[stream] = MODE_SYNTHETIC;
if (calib_model_ == CalibrationModel::PINHOLE) {
CHECK(ActivateProcessor<RectifyProcessorOCV>());
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
CHECK(ActivateProcessor<RectifyProcessor>()); CHECK(ActivateProcessor<RectifyProcessor>());
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
CHECK(ActivateProcessor<RectifyProcessorOCV>());
}
} return; } return;
case Stream::DISPARITY: { case Stream::DISPARITY: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC; stream_enabled_mode_[stream] = MODE_SYNTHETIC;
@ -370,8 +420,10 @@ void Synthetic::EnableStreamData(const Stream &stream, std::uint32_t depth) {
EnableStreamData(Stream::DISPARITY, depth + 1); EnableStreamData(Stream::DISPARITY, depth + 1);
if (calib_model_ == CalibrationModel::PINHOLE) { if (calib_model_ == CalibrationModel::PINHOLE) {
CHECK(ActivateProcessor<PointsProcessorOCV>()); CHECK(ActivateProcessor<PointsProcessorOCV>());
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
CHECK(ActivateProcessor<PointsProcessor>()); CHECK(ActivateProcessor<PointsProcessor>());
#endif
} else { } else {
LOG(ERROR) << "Unknow calib model type in device: " LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_; << calib_model_;
@ -382,8 +434,10 @@ void Synthetic::EnableStreamData(const Stream &stream, std::uint32_t depth) {
EnableStreamData(Stream::POINTS, depth + 1); EnableStreamData(Stream::POINTS, depth + 1);
if (calib_model_ == CalibrationModel::PINHOLE) { if (calib_model_ == CalibrationModel::PINHOLE) {
CHECK(ActivateProcessor<DepthProcessorOCV>()); CHECK(ActivateProcessor<DepthProcessorOCV>());
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
CHECK(ActivateProcessor<DepthProcessor>()); CHECK(ActivateProcessor<DepthProcessor>());
#endif
} else { } else {
LOG(ERROR) << "Unknow calib model type in device: " LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_; << calib_model_;
@ -410,7 +464,17 @@ void Synthetic::DisableStreamData(const Stream &stream, std::uint32_t depth) {
if (IsStreamEnabledSynthetic(Stream::DISPARITY)) { if (IsStreamEnabledSynthetic(Stream::DISPARITY)) {
DisableStreamData(Stream::DISPARITY, depth + 1); DisableStreamData(Stream::DISPARITY, depth + 1);
} }
if (calib_model_ == CalibrationModel::PINHOLE) {
DeactivateProcessor<RectifyProcessorOCV>();
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
DeactivateProcessor<RectifyProcessor>(); DeactivateProcessor<RectifyProcessor>();
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
DeactivateProcessor<RectifyProcessorOCV>();
}
} break; } break;
case Stream::RIGHT_RECTIFIED: { case Stream::RIGHT_RECTIFIED: {
if (IsStreamEnabledSynthetic(Stream::LEFT_RECTIFIED)) { if (IsStreamEnabledSynthetic(Stream::LEFT_RECTIFIED)) {
@ -419,7 +483,17 @@ void Synthetic::DisableStreamData(const Stream &stream, std::uint32_t depth) {
if (IsStreamEnabledSynthetic(Stream::DISPARITY)) { if (IsStreamEnabledSynthetic(Stream::DISPARITY)) {
DisableStreamData(Stream::DISPARITY, depth + 1); DisableStreamData(Stream::DISPARITY, depth + 1);
} }
if (calib_model_ == CalibrationModel::PINHOLE) {
DeactivateProcessor<RectifyProcessorOCV>();
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
DeactivateProcessor<RectifyProcessor>(); DeactivateProcessor<RectifyProcessor>();
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
DeactivateProcessor<RectifyProcessorOCV>();
}
} break; } break;
case Stream::DISPARITY: { case Stream::DISPARITY: {
if (IsStreamEnabledSynthetic(Stream::DISPARITY_NORMALIZED)) { if (IsStreamEnabledSynthetic(Stream::DISPARITY_NORMALIZED)) {
@ -439,8 +513,10 @@ void Synthetic::DisableStreamData(const Stream &stream, std::uint32_t depth) {
DisableStreamData(Stream::DEPTH, depth + 1); DisableStreamData(Stream::DEPTH, depth + 1);
} }
DeactivateProcessor<PointsProcessorOCV>(); DeactivateProcessor<PointsProcessorOCV>();
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
DeactivateProcessor<PointsProcessor>(); DeactivateProcessor<PointsProcessor>();
#endif
} else { } else {
LOG(ERROR) << "Unknow calib model type in device: " LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_; << calib_model_;
@ -449,11 +525,13 @@ void Synthetic::DisableStreamData(const Stream &stream, std::uint32_t depth) {
case Stream::DEPTH: { case Stream::DEPTH: {
if (calib_model_ == CalibrationModel::PINHOLE) { if (calib_model_ == CalibrationModel::PINHOLE) {
DeactivateProcessor<DepthProcessorOCV>(); DeactivateProcessor<DepthProcessorOCV>();
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
if (IsStreamEnabledSynthetic(Stream::DEPTH)) { if (IsStreamEnabledSynthetic(Stream::DEPTH)) {
DisableStreamData(Stream::POINTS, depth + 1); DisableStreamData(Stream::POINTS, depth + 1);
} }
DeactivateProcessor<DepthProcessor>(); DeactivateProcessor<DepthProcessor>();
#endif
} else { } else {
LOG(ERROR) << "Unknow calib model type in device: " LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_; << calib_model_;
@ -470,8 +548,27 @@ void Synthetic::DisableStreamData(const Stream &stream, std::uint32_t depth) {
} }
void Synthetic::InitProcessors() { void Synthetic::InitProcessors() {
auto &&rectify_processor = std::shared_ptr<Processor> rectify_processor = nullptr;
cv::Mat Q;
if (calib_model_ == CalibrationModel::PINHOLE) {
auto &&rectify_processor_ocv =
std::make_shared<RectifyProcessorOCV>(api_->device(),
RECTIFY_PROC_PERIOD);
rectify_processor = rectify_processor_ocv;
Q = rectify_processor_ocv->Q;
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
rectify_processor =
std::make_shared<RectifyProcessor>(api_->device(), RECTIFY_PROC_PERIOD); std::make_shared<RectifyProcessor>(api_->device(), RECTIFY_PROC_PERIOD);
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
auto &&rectify_processor_ocv =
std::make_shared<RectifyProcessorOCV>(api_->device(),
RECTIFY_PROC_PERIOD);
rectify_processor = rectify_processor_ocv;
}
auto &&disparity_processor = auto &&disparity_processor =
std::make_shared<DisparityProcessor>(DISPARITY_PROC_PERIOD); std::make_shared<DisparityProcessor>(DISPARITY_PROC_PERIOD);
auto &&disparitynormalized_processor = auto &&disparitynormalized_processor =
@ -479,33 +576,26 @@ void Synthetic::InitProcessors() {
DISPARITY_NORM_PROC_PERIOD); DISPARITY_NORM_PROC_PERIOD);
std::shared_ptr<Processor> points_processor = nullptr; std::shared_ptr<Processor> points_processor = nullptr;
if (calib_model_ == CalibrationModel::PINHOLE) { if (calib_model_ == CalibrationModel::PINHOLE) {
auto &&points_processor_pin = std::make_shared<PointsProcessorOCV>( points_processor = std::make_shared<PointsProcessorOCV>(
rectify_processor->Q, POINTS_PROC_PERIOD); Q, POINTS_PROC_PERIOD);
points_processor = points_processor_pin; #ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
auto &&points_processor_kan = std::make_shared<PointsProcessor>( points_processor = std::make_shared<PointsProcessor>(
POINTS_PROC_PERIOD); POINTS_PROC_PERIOD);
points_processor = points_processor_kan; #endif
} else { } else {
LOG(ERROR) << "Unknow calib model type in device: " points_processor = std::make_shared<PointsProcessorOCV>(
<< calib_model_ << ", use default pinhole model"; Q, POINTS_PROC_PERIOD);
auto &&points_processor_pin = std::make_shared<PointsProcessorOCV>(
rectify_processor->Q, POINTS_PROC_PERIOD);
points_processor = points_processor_pin;
} }
std::shared_ptr<Processor> depth_processor = nullptr; std::shared_ptr<Processor> depth_processor = nullptr;
if (calib_model_ == CalibrationModel::PINHOLE) { if (calib_model_ == CalibrationModel::PINHOLE) {
auto &&depth_processor_pin = depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD); #ifdef WITH_CAM_MODELS
depth_processor = depth_processor_pin;
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
auto &&depth_processor_kan = depth_processor = std::make_shared<DepthProcessor>(DEPTH_PROC_PERIOD);
std::make_shared<DepthProcessor>(DEPTH_PROC_PERIOD); #endif
depth_processor = depth_processor_kan;
} else { } else {
auto &&depth_processor_pin = depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
depth_processor = depth_processor_pin;
} }
using namespace std::placeholders; // NOLINT using namespace std::placeholders; // NOLINT
@ -563,7 +653,18 @@ void Synthetic::ProcessNativeStream(
} }
if (left_data.img && right_data.img && if (left_data.img && right_data.img &&
left_data.img->frame_id == right_data.img->frame_id) { left_data.img->frame_id == right_data.img->frame_id) {
auto &&processor = find_processor<RectifyProcessor>(processor_); std::shared_ptr<Processor> processor = nullptr;
if (calib_model_ == CalibrationModel::PINHOLE) {
processor = find_processor<RectifyProcessorOCV>(processor_);
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
processor = find_processor<RectifyProcessor>(processor_);
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
processor = find_processor<RectifyProcessorOCV>(processor_);
}
processor->Process(ObjMat2{ processor->Process(ObjMat2{
left_data.frame, left_data.frame_id, left_data.img, left_data.frame, left_data.frame_id, left_data.img,
right_data.frame, right_data.frame_id, right_data.img}); right_data.frame, right_data.frame_id, right_data.img});
@ -580,8 +681,16 @@ void Synthetic::ProcessNativeStream(
} }
if (left_rect_data.img && right_rect_data.img && if (left_rect_data.img && right_rect_data.img &&
left_rect_data.img->frame_id == right_rect_data.img->frame_id) { left_rect_data.img->frame_id == right_rect_data.img->frame_id) {
std::string name = RectifyProcessorOCV::NAME;
if (calib_model_ == CalibrationModel::PINHOLE) {
name = RectifyProcessorOCV::NAME;
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
name = RectifyProcessor::NAME;
#endif
}
process_childs( process_childs(
processor_, RectifyProcessor::NAME, ObjMat2{ processor_, name, ObjMat2{
left_rect_data.frame, left_rect_data.frame_id, left_rect_data.img, left_rect_data.frame, left_rect_data.frame_id, left_rect_data.img,
right_rect_data.frame, right_rect_data.frame_id, right_rect_data.frame, right_rect_data.frame_id,
right_rect_data.img}); right_rect_data.img});
@ -603,10 +712,12 @@ void Synthetic::ProcessNativeStream(
// PINHOLE // PINHOLE
process_childs(processor_, PointsProcessorOCV::NAME, process_childs(processor_, PointsProcessorOCV::NAME,
ObjMat{data.frame, data.frame_id, data.img}); ObjMat{data.frame, data.frame_id, data.img});
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
// KANNALA_BRANDT // KANNALA_BRANDT
process_childs(processor_, PointsProcessor::NAME, process_childs(processor_, PointsProcessor::NAME,
ObjMat{data.frame, data.frame_id, data.img}); ObjMat{data.frame, data.frame_id, data.img});
#endif
} else { } else {
// UNKNOW // UNKNOW
LOG(ERROR) << "Unknow calib model type in device: " LOG(ERROR) << "Unknow calib model type in device: "
@ -618,10 +729,12 @@ void Synthetic::ProcessNativeStream(
// PINHOLE // PINHOLE
process_childs(processor_, DepthProcessorOCV::NAME, process_childs(processor_, DepthProcessorOCV::NAME,
ObjMat{data.frame, data.frame_id, data.img}); ObjMat{data.frame, data.frame_id, data.img});
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
// KANNALA_BRANDT // KANNALA_BRANDT
process_childs(processor_, DepthProcessor::NAME, process_childs(processor_, DepthProcessor::NAME,
ObjMat{data.frame, data.frame_id, data.img}); ObjMat{data.frame, data.frame_id, data.img});
#endif
} else { } else {
// UNKNOW // UNKNOW
LOG(ERROR) << "Unknow calib model type in device: " LOG(ERROR) << "Unknow calib model type in device: "

View File

@ -22,6 +22,7 @@
#include <string> #include <string>
#include <vector> #include <vector>
#include "mynteye/device/config.h"
#include "mynteye/logger.h" #include "mynteye/logger.h"
#include "mynteye/util/times.h" #include "mynteye/util/times.h"
@ -520,12 +521,33 @@ bool Channels::SetFiles(
} }
} }
if (img_params != nullptr) { if (img_params != nullptr) {
// remove not supported resolution
auto&& res = adapter_->GetResolutionSupports();
for (auto it = img_params->begin(); it != img_params->end(); ) {
if (res.find(it->first) == res.end()) {
LOG(WARNING) << "Image params of resolution "
<< it->first.width << "x" << it->first.height << " not supported";
it = img_params->erase(it);
} else {
++it;
}
}
if (img_params->empty()) {
std::ostringstream os;
os << "Image params resolution must be ";
for (auto&& r : res) {
os << r.width << "x" << r.height << " ";
}
LOG(WARNING) << os.str();
} else {
auto n = file_channel_.SetImgParamsToData(img_params, data + 3 + size); auto n = file_channel_.SetImgParamsToData(img_params, data + 3 + size);
if (n > 0) { if (n > 0) {
header[1] = true; header[1] = true;
size += n; size += n;
} }
} }
}
if (imu_params != nullptr) { if (imu_params != nullptr) {
auto n = file_channel_.SetImuParamsToData(imu_params, data + 3 + size); auto n = file_channel_.SetImuParamsToData(imu_params, data + 3 + size);
if (n > 0) { if (n > 0) {
@ -719,4 +741,31 @@ Channels::control_info_t Channels::XuControlInfo(Option option) const {
return {min, max, def}; return {min, max, def};
} }
// ChannelsAdapter
ChannelsAdapter::ChannelsAdapter(const Model &model)
: model_(model) {
}
ChannelsAdapter::~ChannelsAdapter() {
}
std::set<Option> ChannelsAdapter::GetOptionSupports() {
return option_supports_map.at(model_);
}
std::set<Resolution> ChannelsAdapter::GetResolutionSupports() {
std::set<Resolution> res;
auto requests_map = stream_requests_map.at(model_);
for (auto&& r_map : requests_map) {
if (r_map.first == Capabilities::STEREO ||
r_map.first == Capabilities::STEREO_COLOR) {
for (auto&& r : r_map.second) {
res.insert({r.width, r.height});
}
}
}
return res;
}
MYNTEYE_END_NAMESPACE MYNTEYE_END_NAMESPACE

View File

@ -136,9 +136,11 @@ class MYNTEYE_API Channels {
class ChannelsAdapter { class ChannelsAdapter {
public: public:
virtual ~ChannelsAdapter() {} explicit ChannelsAdapter(const Model &model);
virtual ~ChannelsAdapter();
virtual std::set<Option> GetOptionSupports() = 0; virtual std::set<Option> GetOptionSupports();
virtual std::set<Resolution> GetResolutionSupports();
virtual std::int32_t GetAccelRangeDefault() = 0; virtual std::int32_t GetAccelRangeDefault() = 0;
virtual std::vector<std::int32_t> GetAccelRangeValues() = 0; virtual std::vector<std::int32_t> GetAccelRangeValues() = 0;
@ -147,6 +149,9 @@ class ChannelsAdapter {
virtual std::vector<std::int32_t> GetGyroRangeValues() = 0; virtual std::vector<std::int32_t> GetGyroRangeValues() = 0;
virtual void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) = 0; virtual void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) = 0;
protected:
Model model_;
}; };
MYNTEYE_END_NAMESPACE MYNTEYE_END_NAMESPACE

View File

@ -13,7 +13,6 @@
// limitations under the License. // limitations under the License.
#include "mynteye/device/standard/channels_adapter_s.h" #include "mynteye/device/standard/channels_adapter_s.h"
#include "mynteye/device/config.h"
#include "mynteye/logger.h" #include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE MYNTEYE_BEGIN_NAMESPACE
@ -95,16 +94,13 @@ void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
} // namespace } // namespace
StandardChannelsAdapter::StandardChannelsAdapter() { StandardChannelsAdapter::StandardChannelsAdapter()
: ChannelsAdapter(Model::STANDARD) {
} }
StandardChannelsAdapter::~StandardChannelsAdapter() { StandardChannelsAdapter::~StandardChannelsAdapter() {
} }
std::set<Option> StandardChannelsAdapter::GetOptionSupports() {
return option_supports_map.at(Model::STANDARD);
}
std::int32_t StandardChannelsAdapter::GetAccelRangeDefault() { std::int32_t StandardChannelsAdapter::GetAccelRangeDefault() {
return 8; return 8;
} }

View File

@ -28,8 +28,6 @@ class StandardChannelsAdapter : public ChannelsAdapter {
StandardChannelsAdapter(); StandardChannelsAdapter();
virtual ~StandardChannelsAdapter(); virtual ~StandardChannelsAdapter();
std::set<Option> GetOptionSupports() override;
std::int32_t GetAccelRangeDefault() override; std::int32_t GetAccelRangeDefault() override;
std::vector<std::int32_t> GetAccelRangeValues() override; std::vector<std::int32_t> GetAccelRangeValues() override;

View File

@ -13,7 +13,6 @@
// limitations under the License. // limitations under the License.
#include "mynteye/device/standard2/channels_adapter_s2.h" #include "mynteye/device/standard2/channels_adapter_s2.h"
#include "mynteye/device/config.h"
#include "mynteye/logger.h" #include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE MYNTEYE_BEGIN_NAMESPACE
@ -91,16 +90,13 @@ void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
} // namespace } // namespace
Standard2ChannelsAdapter::Standard2ChannelsAdapter() { Standard2ChannelsAdapter::Standard2ChannelsAdapter()
: ChannelsAdapter(Model::STANDARD2) {
} }
Standard2ChannelsAdapter::~Standard2ChannelsAdapter() { Standard2ChannelsAdapter::~Standard2ChannelsAdapter() {
} }
std::set<Option> Standard2ChannelsAdapter::GetOptionSupports() {
return option_supports_map.at(Model::STANDARD2);
}
std::int32_t Standard2ChannelsAdapter::GetAccelRangeDefault() { std::int32_t Standard2ChannelsAdapter::GetAccelRangeDefault() {
return 12; return 12;
} }

View File

@ -28,8 +28,6 @@ class Standard2ChannelsAdapter : public ChannelsAdapter {
Standard2ChannelsAdapter(); Standard2ChannelsAdapter();
virtual ~Standard2ChannelsAdapter(); virtual ~Standard2ChannelsAdapter();
std::set<Option> GetOptionSupports() override;
std::int32_t GetAccelRangeDefault() override; std::int32_t GetAccelRangeDefault() override;
std::vector<std::int32_t> GetAccelRangeValues() override; std::vector<std::int32_t> GetAccelRangeValues() override;

View File

@ -13,7 +13,6 @@
// limitations under the License. // limitations under the License.
#include "mynteye/device/standard2/channels_adapter_s210a.h" #include "mynteye/device/standard2/channels_adapter_s210a.h"
#include "mynteye/device/config.h"
#include "mynteye/logger.h" #include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE MYNTEYE_BEGIN_NAMESPACE
@ -91,16 +90,13 @@ void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
} // namespace } // namespace
Standard210aChannelsAdapter::Standard210aChannelsAdapter() { Standard210aChannelsAdapter::Standard210aChannelsAdapter()
: ChannelsAdapter(Model::STANDARD210A) {
} }
Standard210aChannelsAdapter::~Standard210aChannelsAdapter() { Standard210aChannelsAdapter::~Standard210aChannelsAdapter() {
} }
std::set<Option> Standard210aChannelsAdapter::GetOptionSupports() {
return option_supports_map.at(Model::STANDARD210A);
}
std::int32_t Standard210aChannelsAdapter::GetAccelRangeDefault() { std::int32_t Standard210aChannelsAdapter::GetAccelRangeDefault() {
return 12; return 12;
} }

View File

@ -28,8 +28,6 @@ class Standard210aChannelsAdapter : public ChannelsAdapter {
Standard210aChannelsAdapter(); Standard210aChannelsAdapter();
virtual ~Standard210aChannelsAdapter(); virtual ~Standard210aChannelsAdapter();
std::set<Option> GetOptionSupports() override;
std::int32_t GetAccelRangeDefault() override; std::int32_t GetAccelRangeDefault() override;
std::vector<std::int32_t> GetAccelRangeValues() override; std::vector<std::int32_t> GetAccelRangeValues() override;