Merge branch 'develop' into feature/android

* develop: (28 commits)
  feat(*): add function to querry some hardware info
  feat: forbid 2100/210A uless update sdk to 2.3.1 and above
  refactor(synthetic): remove usless logic
  chore(readme): update readme
  docs(doxyfile): update version
  fix: change cmake version to 2.3.2
  feat(api) sdk/firmware version check
  feat(api): version check
  fix(wrapper): fix camera info repeat bug
  build(makefile): ensure uninstall before install
  fix(correspondence): also wait stream matched ready
  fix(record): shield diable logic temporarily
  chore(readme): chore(readme): update readme
  chore(readme): update readme
  chore(doc): update version
  fix(samples): delete useless comment
  fix(ros): fix camera info bug
  fix(correspondence): improve warning if not start motion tracking
  fix(correspondence): fix include header
  fix(ros): record close bug
  ...
This commit is contained in:
John Zhao
2019-02-28 15:58:55 +08:00
36 changed files with 1169 additions and 304 deletions

View File

@@ -22,9 +22,11 @@
#include <thread>
#include "mynteye/logger.h"
#include "mynteye/api/correspondence.h"
#include "mynteye/api/dl.h"
#include "mynteye/api/plugin.h"
#include "mynteye/api/synthetic.h"
#include "mynteye/api/version_checker.h"
#include "mynteye/device/device.h"
#include "mynteye/device/utils.h"
@@ -208,7 +210,7 @@ std::vector<std::string> get_plugin_paths() {
} // namespace
API::API(std::shared_ptr<Device> device, CalibrationModel calib_model)
: device_(device) {
: device_(device), correspondence_(nullptr) {
VLOG(2) << __func__;
// std::dynamic_pointer_cast<StandardDevice>(device_);
synthetic_.reset(new Synthetic(this, calib_model));
@@ -221,7 +223,10 @@ API::~API() {
std::shared_ptr<API> API::Create(int argc, char *argv[]) {
auto &&device = device::select();
if (!device) return nullptr;
return Create(argc, argv, device);
auto api = Create(argc, argv, device);
if (api && checkFirmwareVersion(api))
return api;
return nullptr;
}
std::shared_ptr<API> API::Create(
@@ -260,7 +265,7 @@ std::shared_ptr<API> API::Create(const std::shared_ptr<Device> &device) {
}
} else {
LOG(ERROR) <<"no device!";
api = std::make_shared<API>(device, CalibrationModel::UNKNOW);
return nullptr;
}
return api;
}
@@ -323,6 +328,20 @@ std::shared_ptr<DeviceInfo> API::GetInfo() const {
}
std::string API::GetInfo(const Info &info) const {
if (info == Info::SDK_VERSION) {
std::string info_path =
utils::get_sdk_install_dir();
info_path.append(MYNTEYE_OS_SEP "share" \
MYNTEYE_OS_SEP "mynteye" MYNTEYE_OS_SEP "build.info");
cv::FileStorage fs(info_path, cv::FileStorage::READ);
if (!fs.isOpened()) {
LOG(WARNING) << "build.info not found: " << info_path;
return "null";
}
return fs["MYNTEYE_VERSION"];
}
return device_->GetInfo(info);
}
@@ -377,10 +396,15 @@ void API::SetStreamCallback(const Stream &stream, stream_callback_t callback) {
}
void API::SetMotionCallback(motion_callback_t callback) {
static auto callback_ = callback;
if (correspondence_) {
correspondence_->SetMotionCallback(callback);
return;
}
callback_ = callback;
if (callback_) {
device_->SetMotionCallback(
[](const device::MotionData &data) { callback_({data.imu}); }, true);
device_->SetMotionCallback([this](const device::MotionData &data) {
callback_({data.imu});
}, true);
} else {
device_->SetMotionCallback(nullptr);
}
@@ -435,7 +459,11 @@ void API::Stop(const Source &source) {
}
void API::WaitForStreams() {
synthetic_->WaitForStreams();
if (correspondence_) {
correspondence_->WaitForStreams();
} else {
synthetic_->WaitForStreams();
}
}
void API::EnableStreamData(const Stream &stream) {
@@ -446,24 +474,69 @@ void API::DisableStreamData(const Stream &stream) {
synthetic_->DisableStreamData(stream);
}
void API::EnableStreamData(
const Stream &stream, stream_switch_callback_t callback,
bool try_tag) {
synthetic_->EnableStreamData(stream, callback, try_tag);
}
void API::DisableStreamData(
const Stream &stream, stream_switch_callback_t callback,
bool try_tag) {
synthetic_->DisableStreamData(stream, callback, try_tag);
}
api::StreamData API::GetStreamData(const Stream &stream) {
return synthetic_->GetStreamData(stream);
if (correspondence_ && correspondence_->Watch(stream)) {
return correspondence_->GetStreamData(stream);
} else {
return synthetic_->GetStreamData(stream);
}
}
std::vector<api::StreamData> API::GetStreamDatas(const Stream &stream) {
return synthetic_->GetStreamDatas(stream);
if (correspondence_ && correspondence_->Watch(stream)) {
return correspondence_->GetStreamDatas(stream);
} else {
return synthetic_->GetStreamDatas(stream);
}
}
void API::EnableMotionDatas(std::size_t max_size) {
if (correspondence_) return; // not cache them
device_->EnableMotionDatas(max_size);
}
std::vector<api::MotionData> API::GetMotionDatas() {
std::vector<api::MotionData> datas;
for (auto &&data : device_->GetMotionDatas()) {
datas.push_back({data.imu});
if (correspondence_) {
return correspondence_->GetMotionDatas();
} else {
std::vector<api::MotionData> datas;
for (auto &&data : device_->GetMotionDatas()) {
datas.push_back({data.imu});
}
return datas;
}
}
void API::EnableTimestampCorrespondence(const Stream &stream) {
if (correspondence_ == nullptr) {
correspondence_.reset(new Correspondence(device_, stream));
{
device_->DisableMotionDatas();
if (callback_) {
correspondence_->SetMotionCallback(callback_);
callback_ = nullptr;
}
}
using namespace std::placeholders; // NOLINT
device_->SetMotionCallback(
std::bind(&Correspondence::OnMotionDataCallback,
correspondence_.get(), _1),
true);
synthetic_->SetStreamDataListener(
std::bind(&Correspondence::OnStreamDataCallback,
correspondence_.get(), _1, _2));
}
return datas;
}
void API::EnablePlugin(const std::string &path) {

View File

@@ -0,0 +1,277 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/correspondence.h"
#include "mynteye/device/device.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
Correspondence::Correspondence(const std::shared_ptr<Device> &device,
const Stream &stream)
: device_(device), stream_(stream), ready_image_timestamp_(0) {
VLOG(2) << __func__;
// set matched stream to be watched too,
// aim to make stream and matched stream correspondence
if (stream_ == Stream::LEFT) {
stream_match_ = Stream::RIGHT;
} else if (stream_ == Stream::RIGHT) {
stream_match_ = Stream::LEFT;
} else if (stream_ == Stream::LEFT_RECTIFIED) {
stream_match_ = Stream::RIGHT_RECTIFIED;
} else if (stream_ == Stream::RIGHT_RECTIFIED) {
stream_match_ = Stream::LEFT_RECTIFIED;
} else {
stream_match_ = Stream::LAST;
}
EnableStreamMatch();
auto framerate = device_->GetOptionValue(Option::FRAME_RATE);
stream_interval_us_ = 1000000.f / framerate;
stream_interval_us_half_ = 0.5f * stream_interval_us_;
VLOG(2) << "framerate: " << framerate
<< ", interval_us: " << stream_interval_us_;
}
Correspondence::~Correspondence() {
VLOG(2) << __func__;
}
bool Correspondence::Watch(const Stream &stream) const {
if (stream == stream_) return true;
if (stream_match_enabled_ && stream == stream_match_) return true;
return false;
}
void Correspondence::OnStreamDataCallback(
const Stream &stream, const api::StreamData &data) {
if (!Watch(stream)) {
return; // unwatched
}
// LOG(INFO) << __func__ << ", " << stream
// << ", id: " << data.frame_id << ", stamp: " << data.img->timestamp;
// if (data.img == nullptr) {
// LOG(FATAL) << "stream data image info is empty!";
// }
std::lock_guard<std::recursive_mutex> _(mtx_stream_datas_);
if (stream == stream_) {
stream_datas_.push_back(std::move(data));
} else if (/*stream_match_enabled_ && */stream == stream_match_) {
stream_datas_match_.push_back(std::move(data));
}
NotifyStreamDataReady();
}
void Correspondence::OnMotionDataCallback(const device::MotionData &data) {
// LOG(INFO) << __func__ << ", id: " << data.imu->frame_id
// << ", stamp: " << data.imu->timestamp;
{
std::lock_guard<std::recursive_mutex> _(mtx_motion_datas_);
motion_datas_.push_back(data);
}
if (motion_callback_) {
motion_callback_({data.imu});
}
}
void Correspondence::SetMotionCallback(API::motion_callback_t callback) {
// LOG(INFO) << __func__;
motion_callback_ = callback;
}
void Correspondence::WaitForStreams() {
if (stream_ == Stream::LEFT || stream_ == Stream::RIGHT) {
// Wait native stream ready, avoid get these stream empty
// Todo: determine native stream according to device
WaitStreamDataReady();
return;
}
device_->WaitForStreams();
}
api::StreamData Correspondence::GetStreamData(const Stream &stream) {
auto datas = GetStreamDatas(stream);
return datas.empty() ? api::StreamData{} : datas.back();
}
std::vector<api::StreamData> Correspondence::GetStreamDatas(
const Stream &stream) {
if (!Watch(stream)) {
LOG(ERROR) << "Get unwatched stream data of " << stream;
return {};
}
std::lock_guard<std::recursive_mutex> _(mtx_stream_datas_);
static std::uint32_t stream_count_ = 0;
static std::uint32_t stream_match_count_ = 0;
if (stream == stream_) {
auto datas = GetReadyStreamData(false);
if (stream_count_ < 10) {
++stream_count_;
} else {
// get stream, but not get matched stream, disable it
if (stream_match_count_ == 0) {
DisableStreamMatch();
}
}
return datas;
} else if (/*stream_match_enabled_ && */stream == stream_match_) {
auto datas = GetReadyStreamData(true);
if (stream_match_count_ < 10) {
++stream_match_count_;
}
return datas;
}
return {};
}
std::vector<api::MotionData> Correspondence::GetMotionDatas() {
return GetReadyMotionDatas();
}
void Correspondence::EnableStreamMatch() {
stream_match_enabled_ = true;
}
void Correspondence::DisableStreamMatch() {
stream_match_enabled_ = false;
stream_datas_match_.clear();
}
void Correspondence::WaitStreamDataReady() {
std::unique_lock<std::recursive_mutex> lock(mtx_stream_datas_);
auto ready = std::bind(&Correspondence::IsStreamDataReady, this);
bool ok = cond_stream_datas_.wait_for(lock, std::chrono::seconds(3), ready);
if (!ok) {
LOG(FATAL) << "Timeout waiting for key frames. Please use USB 3.0, and not "
"in virtual machine.";
}
}
void Correspondence::NotifyStreamDataReady() {
cond_stream_datas_.notify_one();
}
bool Correspondence::IsStreamDataReady() {
if (stream_datas_.empty()) return false;
if (stream_match_enabled_) {
if (stream_datas_match_.empty()) return false;
}
if (motion_datas_.empty()) return false;
std::uint64_t img_stamp = 0;
std::uint64_t img_macth_stamp = 0;
{
std::lock_guard<std::recursive_mutex> _(mtx_stream_datas_);
auto data = stream_datas_.front();
if (data.img == nullptr) {
LOG(FATAL) << "stream data image info is empty!";
}
img_stamp = data.img->timestamp;
if (stream_match_enabled_) {
img_macth_stamp = stream_datas_match_.front().img->timestamp;
}
}
std::uint64_t imu_stamp = 0;
{
std::lock_guard<std::recursive_mutex> _(mtx_motion_datas_);
auto data = motion_datas_.back();
if (data.imu == nullptr) {
LOG(FATAL) << "motion data imu info is empty!";
}
imu_stamp = data.imu->timestamp;
}
if (stream_match_enabled_) {
return img_stamp + stream_interval_us_half_ < imu_stamp
&& img_macth_stamp + stream_interval_us_half_ < imu_stamp;
} else {
return img_stamp + stream_interval_us_half_ < imu_stamp;
}
}
std::vector<api::StreamData> Correspondence::GetReadyStreamData(bool matched) {
std::uint64_t imu_stamp = 0;
{
std::lock_guard<std::recursive_mutex> _(mtx_motion_datas_);
if (motion_datas_.empty()) {
LOG(WARNING) << "motion data is unexpected empty!"
"\n\n Please ensure Start(Source::MOTION_TRACKING) "
"or Start(Source::ALL)\n";
std::lock_guard<std::recursive_mutex> _(mtx_stream_datas_);
return std::move(matched ? stream_datas_match_ : stream_datas_);
}
imu_stamp = motion_datas_.back().imu->timestamp;
}
std::lock_guard<std::recursive_mutex> _(mtx_stream_datas_);
std::vector<api::StreamData> &datas =
matched ? stream_datas_match_ : stream_datas_;
// LOG(INFO) << "datas.size: " << datas.size() << ", matched: " << matched;
std::vector<api::StreamData> result;
for (auto it = datas.begin(); it != datas.end(); ) {
// LOG(INFO) << "data.id: " << it->frame_id;
auto img_stamp = it->img->timestamp;
if (img_stamp + stream_interval_us_half_ < imu_stamp) {
// LOG(INFO) << "data.id: " << it->frame_id << " > result";
result.push_back(std::move(*it));
it = datas.erase(it);
} else {
// ++it;
break;
}
}
// LOG(INFO) << "datas.size: " << datas.size()
// << ", result.size: " << result.size();
if (!matched && !result.empty()) {
// last match stream timestamp
ready_image_timestamp_ = result.back().img->timestamp;
}
return result;
}
std::vector<api::MotionData> Correspondence::GetReadyMotionDatas() {
if (ready_image_timestamp_ == 0) return {};
std::lock_guard<std::recursive_mutex> _(mtx_motion_datas_);
std::vector<api::MotionData> result;
auto &&datas = motion_datas_;
for (auto it = datas.begin(); it != datas.end(); ) {
auto imu_stamp = it->imu->timestamp;
if (imu_stamp < ready_image_timestamp_ - stream_interval_us_half_) {
it = datas.erase(it);
} else if (imu_stamp > ready_image_timestamp_ + stream_interval_us_half_) {
// ++it;
break;
} else {
result.push_back({it->imu});
it = datas.erase(it);
}
}
return result;
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,80 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_CONFIG_H_
#define MYNTEYE_API_CONFIG_H_
#pragma once
#include <atomic>
#include <condition_variable>
#include <memory>
#include <mutex>
#include <vector>
#include "mynteye/api/api.h"
#include "mynteye/device/callbacks.h"
MYNTEYE_BEGIN_NAMESPACE
class Correspondence {
public:
Correspondence(const std::shared_ptr<Device> &device, const Stream &stream);
~Correspondence();
bool Watch(const Stream &stream) const;
void OnStreamDataCallback(const Stream &stream, const api::StreamData &data);
void OnMotionDataCallback(const device::MotionData &data);
void SetMotionCallback(API::motion_callback_t callback);
void WaitForStreams();
api::StreamData GetStreamData(const Stream &stream);
std::vector<api::StreamData> GetStreamDatas(const Stream &stream);
std::vector<api::MotionData> GetMotionDatas();
private:
void EnableStreamMatch();
void DisableStreamMatch();
void WaitStreamDataReady();
void NotifyStreamDataReady();
bool IsStreamDataReady();
std::vector<api::StreamData> GetReadyStreamData(bool matched);
std::vector<api::MotionData> GetReadyMotionDatas();
std::shared_ptr<Device> device_;
Stream stream_;
Stream stream_match_;
std::atomic_bool stream_match_enabled_;
float stream_interval_us_;
float stream_interval_us_half_;
API::motion_callback_t motion_callback_;
std::vector<device::MotionData> motion_datas_;
std::recursive_mutex mtx_motion_datas_;
std::vector<api::StreamData> stream_datas_;
std::vector<api::StreamData> stream_datas_match_;
std::recursive_mutex mtx_stream_datas_;
std::condition_variable_any cond_stream_datas_;
std::uint64_t ready_image_timestamp_;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_CONFIG_H_

View File

@@ -74,6 +74,44 @@ void process_childs(
}
}
// ObjMat/ObjMat2 > api::StreamData
api::StreamData obj_data_first(const ObjMat2 *obj) {
return {obj->first_data, obj->first, nullptr, obj->first_id};
}
api::StreamData obj_data_second(const ObjMat2 *obj) {
return {obj->second_data, obj->second, nullptr, obj->second_id};
}
api::StreamData obj_data(const ObjMat *obj) {
return {obj->data, obj->value, nullptr, obj->id};
}
api::StreamData obj_data_first(const std::shared_ptr<ObjMat2> &obj) {
return {obj->first_data, obj->first, nullptr, obj->first_id};
}
api::StreamData obj_data_second(const std::shared_ptr<ObjMat2> &obj) {
return {obj->second_data, obj->second, nullptr, obj->second_id};
}
api::StreamData obj_data(const std::shared_ptr<ObjMat> &obj) {
return {obj->data, obj->value, nullptr, obj->id};
}
// api::StreamData > ObjMat/ObjMat2
ObjMat data_obj(const api::StreamData &data) {
return ObjMat{data.frame, data.frame_id, data.img};
}
ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second) {
return ObjMat2{
first.frame, first.frame_id, first.img,
second.frame, second.frame_id, second.img};
}
} // namespace
void Synthetic::InitCalibInfo() {
@@ -105,7 +143,8 @@ Synthetic::Synthetic(API *api, CalibrationModel calib_model)
: api_(api),
plugin_(nullptr),
calib_model_(calib_model),
calib_default_tag_(false) {
calib_default_tag_(false),
stream_data_listener_(nullptr) {
VLOG(2) << __func__;
CHECK_NOTNULL(api_);
InitCalibInfo();
@@ -121,6 +160,10 @@ Synthetic::~Synthetic() {
}
}
void Synthetic::SetStreamDataListener(stream_data_listener_t listener) {
stream_data_listener_ = listener;
}
void Synthetic::NotifyImageParamsChanged() {
if (!calib_default_tag_) {
intr_left_ = api_->GetIntrinsicsBase(Stream::LEFT);
@@ -195,27 +238,6 @@ bool Synthetic::checkControlDateWithStream(const Stream& stream) const {
return false;
}
void Synthetic::EnableStreamData(const Stream &stream) {
// Activate processors of synthetic stream
auto processor = getProcessorWithStream(stream);
iterate_processor_CtoP_before(processor,
[](std::shared_ptr<Processor> proce){
auto streams = proce->getTargetStreams();
int act_tag = 0;
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
if (proce->target_streams_[i].enabled_mode_ == MODE_LAST) {
act_tag++;
proce->target_streams_[i].enabled_mode_ = MODE_SYNTHETIC;
}
}
if (act_tag > 0 && !proce->IsActivated()) {
// std::cout << proce->Name() << " Active now" << std::endl;
proce->Activate();
}
});
}
bool Synthetic::Supports(const Stream &stream) const {
return checkControlDateWithStream(stream);
}
@@ -228,16 +250,45 @@ Synthetic::mode_t Synthetic::SupportsMode(const Stream &stream) const {
return MODE_LAST;
}
void Synthetic::DisableStreamData(const Stream &stream) {
void Synthetic::EnableStreamData(
const Stream &stream, stream_switch_callback_t callback,
bool try_tag) {
// Activate processors of synthetic stream
auto processor = getProcessorWithStream(stream);
iterate_processor_CtoP_before(processor,
[callback, try_tag](std::shared_ptr<Processor> proce){
auto streams = proce->getTargetStreams();
int act_tag = 0;
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
if (proce->target_streams_[i].enabled_mode_ == MODE_LAST) {
callback(proce->target_streams_[i].stream);
if (!try_tag) {
act_tag++;
proce->target_streams_[i].enabled_mode_ = MODE_SYNTHETIC;
}
}
}
if (act_tag > 0 && !proce->IsActivated()) {
// std::cout << proce->Name() << " Active now" << std::endl;
proce->Activate();
}
});
}
void Synthetic::DisableStreamData(
const Stream &stream, stream_switch_callback_t callback,
bool try_tag) {
auto processor = getProcessorWithStream(stream);
iterate_processor_PtoC_before(processor,
[](std::shared_ptr<Processor> proce){
[callback, try_tag](std::shared_ptr<Processor> proce){
auto streams = proce->getTargetStreams();
int act_tag = 0;
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
if (proce->target_streams_[i].enabled_mode_ == MODE_SYNTHETIC) {
act_tag++;
proce->target_streams_[i].enabled_mode_ = MODE_LAST;
callback(proce->target_streams_[i].stream);
if (!try_tag) {
act_tag++;
proce->target_streams_[i].enabled_mode_ = MODE_LAST;
}
}
}
if (act_tag > 0 && proce->IsActivated()) {
@@ -247,6 +298,20 @@ void Synthetic::DisableStreamData(const Stream &stream) {
});
}
void Synthetic::EnableStreamData(const Stream &stream) {
EnableStreamData(stream, [](const Stream &stream){
// std::cout << stream << "enabled in callback" << std::endl;
MYNTEYE_UNUSED(stream);
}, false);
}
void Synthetic::DisableStreamData(const Stream &stream) {
DisableStreamData(stream, [](const Stream &stream){
// std::cout << stream << "disabled in callback" << std::endl;
MYNTEYE_UNUSED(stream);
}, false);
}
bool Synthetic::IsStreamDataEnabled(const Stream &stream) const {
if (checkControlDateWithStream(stream)) {
auto data = getControlDateWithStream(stream);
@@ -335,7 +400,7 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
if (output != nullptr) {
return {output->data, output->value, nullptr, output->id};
return obj_data(output);
}
VLOG(2) << "Rectify not ready now";
}
@@ -349,15 +414,9 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
for (auto it : streams) {
if (it.stream == stream) {
if (num == 1) {
return {output->first_data,
output->first,
nullptr,
output->first_id};
return obj_data_first(output);
} else {
return {output->second_data,
output->second,
nullptr,
output->second_id};
return obj_data_second(output);
}
}
num++;
@@ -452,66 +511,60 @@ bool Synthetic::IsStreamEnabledSynthetic(const Stream &stream) const {
void Synthetic::InitProcessors() {
std::shared_ptr<Processor> rectify_processor = nullptr;
#ifdef WITH_CAM_MODELS
std::shared_ptr<RectifyProcessor> rectify_processor_imp = nullptr;
#endif
cv::Mat Q;
if (calib_model_ == CalibrationModel::PINHOLE) {
auto &&rectify_processor_ocv =
std::make_shared<RectifyProcessorOCV>(intr_left_, intr_right_, extr_,
RECTIFY_PROC_PERIOD);
Q = rectify_processor_ocv->Q;
rectify_processor = rectify_processor_ocv;
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
rectify_processor_imp =
std::make_shared<RectifyProcessor>(intr_left_, intr_right_, extr_,
RECTIFY_PROC_PERIOD);
rectify_processor = rectify_processor_imp;
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
auto &&rectify_processor_ocv =
std::make_shared<RectifyProcessorOCV>(intr_left_, intr_right_, extr_,
RECTIFY_PROC_PERIOD);
rectify_processor = rectify_processor_ocv;
}
std::shared_ptr<Processor> points_processor = nullptr;
std::shared_ptr<Processor> depth_processor = nullptr;
auto &&disparity_processor =
std::make_shared<DisparityProcessor>(DisparityComputingMethod::SGBM,
DISPARITY_PROC_PERIOD);
auto &&disparitynormalized_processor =
std::make_shared<DisparityNormalizedProcessor>(
DISPARITY_NORM_PROC_PERIOD);
std::shared_ptr<Processor> points_processor = nullptr;
if (calib_model_ == CalibrationModel::PINHOLE) {
auto root_processor =
std::make_shared<RootProcessor>(ROOT_PROC_PERIOD);
if (calib_model_ == CalibrationModel::PINHOLE) {
// PINHOLE
auto &&rectify_processor_ocv =
std::make_shared<RectifyProcessorOCV>(intr_left_, intr_right_, extr_,
RECTIFY_PROC_PERIOD);
rectify_processor = rectify_processor_ocv;
points_processor = std::make_shared<PointsProcessorOCV>(
Q, POINTS_PROC_PERIOD);
rectify_processor_ocv->Q, POINTS_PROC_PERIOD);
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
root_processor->AddChild(rectify_processor);
rectify_processor->AddChild(disparity_processor);
disparity_processor->AddChild(disparitynormalized_processor);
disparity_processor->AddChild(points_processor);
points_processor->AddChild(depth_processor);
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
// KANNALA_BRANDT
auto rectify_processor_imp =
std::make_shared<RectifyProcessor>(intr_left_, intr_right_, extr_,
RECTIFY_PROC_PERIOD);
rectify_processor = rectify_processor_imp;
points_processor = std::make_shared<PointsProcessor>(
rectify_processor_imp -> getCalibInfoPair(),
POINTS_PROC_PERIOD);
#endif
} else {
points_processor = std::make_shared<PointsProcessorOCV>(
Q, POINTS_PROC_PERIOD);
}
std::shared_ptr<Processor> depth_processor = nullptr;
if (calib_model_ == CalibrationModel::PINHOLE) {
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
depth_processor = std::make_shared<DepthProcessor>(
rectify_processor_imp -> getCalibInfoPair(),
DEPTH_PROC_PERIOD);
root_processor->AddChild(rectify_processor);
rectify_processor->AddChild(disparity_processor);
disparity_processor->AddChild(disparitynormalized_processor);
disparity_processor->AddChild(depth_processor);
depth_processor->AddChild(points_processor);
#endif
} else {
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
// UNKNOW
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
return;
}
auto root_processor =
std::make_shared<RootProcessor>(ROOT_PROC_PERIOD);
root_processor->AddChild(rectify_processor);
rectify_processor->addTargetStreams(
{Stream::LEFT_RECTIFIED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
@@ -559,30 +612,14 @@ void Synthetic::InitProcessors() {
depth_processor->SetPostProcessCallback(
std::bind(&Synthetic::OnDepthPostProcess, this, _1));
if (calib_model_ == CalibrationModel::PINHOLE) {
// PINHOLE
rectify_processor->AddChild(disparity_processor);
disparity_processor->AddChild(disparitynormalized_processor);
disparity_processor->AddChild(points_processor);
points_processor->AddChild(depth_processor);
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
// KANNALA_BRANDT
rectify_processor->AddChild(disparity_processor);
disparity_processor->AddChild(disparitynormalized_processor);
disparity_processor->AddChild(depth_processor);
depth_processor->AddChild(points_processor);
} else {
// UNKNOW
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
}
processor_ = rectify_processor;
processor_ = root_processor;
}
void Synthetic::ProcessNativeStream(
const Stream &stream, const api::StreamData &data) {
NotifyStreamData(stream, data);
if (stream == Stream::LEFT || stream == Stream::RIGHT) {
std::unique_lock<std::mutex> lk(mtx_left_right_ready_);
static api::StreamData left_data, right_data;
if (stream == Stream::LEFT) {
left_data = data;
@@ -603,9 +640,7 @@ void Synthetic::ProcessNativeStream(
<< calib_model_ << ", use default pinhole model";
processor = find_processor<RectifyProcessorOCV>(processor_);
}
processor->Process(ObjMat2{
left_data.frame, left_data.frame_id, left_data.img,
right_data.frame, right_data.frame_id, right_data.img});
processor->Process(data_obj(left_data, right_data));
}
return;
}
@@ -627,34 +662,28 @@ void Synthetic::ProcessNativeStream(
name = RectifyProcessor::NAME;
#endif
}
process_childs(
processor_, name, ObjMat2{
left_rect_data.frame, left_rect_data.frame_id, left_rect_data.img,
right_rect_data.frame, right_rect_data.frame_id,
right_rect_data.img});
process_childs(processor_, name,
data_obj(left_rect_data, right_rect_data));
}
return;
}
switch (stream) {
case Stream::DISPARITY: {
process_childs(processor_, DisparityProcessor::NAME,
ObjMat{data.frame, data.frame_id, data.img});
process_childs(processor_, DisparityProcessor::NAME, data_obj(data));
} break;
case Stream::DISPARITY_NORMALIZED: {
process_childs(processor_, DisparityNormalizedProcessor::NAME,
ObjMat{data.frame, data.frame_id, data.img});
data_obj(data));
} break;
case Stream::POINTS: {
if (calib_model_ == CalibrationModel::PINHOLE) {
// PINHOLE
process_childs(processor_, PointsProcessorOCV::NAME,
ObjMat{data.frame, data.frame_id, data.img});
process_childs(processor_, PointsProcessorOCV::NAME, data_obj(data));
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
// KANNALA_BRANDT
process_childs(processor_, PointsProcessor::NAME,
ObjMat{data.frame, data.frame_id, data.img});
process_childs(processor_, PointsProcessor::NAME, data_obj(data));
#endif
} else {
// UNKNOW
@@ -665,13 +694,11 @@ void Synthetic::ProcessNativeStream(
case Stream::DEPTH: {
if (calib_model_ == CalibrationModel::PINHOLE) {
// PINHOLE
process_childs(processor_, DepthProcessorOCV::NAME,
ObjMat{data.frame, data.frame_id, data.img});
process_childs(processor_, DepthProcessorOCV::NAME, data_obj(data));
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
// KANNALA_BRANDT
process_childs(processor_, DepthProcessor::NAME,
ObjMat{data.frame, data.frame_id, data.img});
process_childs(processor_, DepthProcessor::NAME, data_obj(data));
#endif
} else {
// UNKNOW
@@ -737,51 +764,51 @@ bool Synthetic::OnDepthProcess(
void Synthetic::OnRectifyPostProcess(Object *const out) {
const ObjMat2 *output = Object::Cast<ObjMat2>(out);
NotifyStreamData(Stream::LEFT_RECTIFIED, obj_data_first(output));
NotifyStreamData(Stream::RIGHT_RECTIFIED, obj_data_second(output));
if (HasStreamCallback(Stream::LEFT_RECTIFIED)) {
auto data = getControlDateWithStream(Stream::LEFT_RECTIFIED);
data.stream_callback(
{output->first_data, output->first, nullptr, output->first_id});
data.stream_callback(obj_data_first(output));
}
if (HasStreamCallback(Stream::RIGHT_RECTIFIED)) {
auto data = getControlDateWithStream(Stream::RIGHT_RECTIFIED);
data.stream_callback(
{output->second_data, output->second, nullptr, output->second_id});
data.stream_callback(obj_data_second(output));
}
}
void Synthetic::OnDisparityPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out);
NotifyStreamData(Stream::DISPARITY, obj_data(output));
if (HasStreamCallback(Stream::DISPARITY)) {
auto data = getControlDateWithStream(Stream::DISPARITY);
data.stream_callback(
{output->data, output->value, nullptr, output->id});
data.stream_callback(obj_data(output));
}
}
void Synthetic::OnDisparityNormalizedPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out);
NotifyStreamData(Stream::DISPARITY_NORMALIZED, obj_data(output));
if (HasStreamCallback(Stream::DISPARITY_NORMALIZED)) {
auto data = getControlDateWithStream(Stream::DISPARITY_NORMALIZED);
data.stream_callback(
{output->data, output->value, nullptr, output->id});
data.stream_callback(obj_data(output));
}
}
void Synthetic::OnPointsPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out);
NotifyStreamData(Stream::POINTS, obj_data(output));
if (HasStreamCallback(Stream::POINTS)) {
auto data = getControlDateWithStream(Stream::POINTS);
data.stream_callback(
{output->data, output->value, nullptr, output->id});
data.stream_callback(obj_data(output));
}
}
void Synthetic::OnDepthPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out);
NotifyStreamData(Stream::DEPTH, obj_data(output));
if (HasStreamCallback(Stream::DEPTH)) {
auto data = getControlDateWithStream(Stream::DEPTH);
data.stream_callback(
{output->data, output->value, nullptr, output->id});
data.stream_callback(obj_data(output));
}
}
@@ -796,4 +823,11 @@ void Synthetic::SetDisparityComputingMethodType(
LOG(ERROR) << "ERROR: no suited processor for disparity computing.";
}
void Synthetic::NotifyStreamData(
const Stream &stream, const api::StreamData &data) {
if (stream_data_listener_) {
stream_data_listener_(stream, data);
}
}
MYNTEYE_END_NAMESPACE

View File

@@ -19,6 +19,7 @@
#include <memory>
#include <string>
#include <vector>
#include <mutex>
#include "mynteye/api/api.h"
#include "mynteye/api/config.h"
@@ -34,6 +35,9 @@ struct Object;
class Synthetic {
public:
using stream_callback_t = API::stream_callback_t;
using stream_data_listener_t =
std::function<void(const Stream &stream, const api::StreamData &data)>;
using stream_switch_callback_t = API::stream_switch_callback_t;
typedef enum Mode {
MODE_NATIVE, // Native stream
@@ -51,6 +55,8 @@ class Synthetic {
explicit Synthetic(API *api, CalibrationModel calib_model);
~Synthetic();
void SetStreamDataListener(stream_data_listener_t listener);
void NotifyImageParamsChanged();
bool Supports(const Stream &stream) const;
@@ -58,6 +64,11 @@ class Synthetic {
void EnableStreamData(const Stream &stream);
void DisableStreamData(const Stream &stream);
void EnableStreamData(
const Stream &stream, stream_switch_callback_t callback, bool try_tag);
void DisableStreamData(
const Stream &stream, stream_switch_callback_t callback, bool try_tag);
bool IsStreamDataEnabled(const Stream &stream) const;
void SetStreamCallback(const Stream &stream, stream_callback_t callback);
@@ -125,6 +136,8 @@ class Synthetic {
void OnPointsPostProcess(Object *const out);
void OnDepthPostProcess(Object *const out);
void NotifyStreamData(const Stream &stream, const api::StreamData &data);
API *api_;
std::shared_ptr<Processor> processor_;
@@ -132,6 +145,7 @@ class Synthetic {
std::shared_ptr<Plugin> plugin_;
CalibrationModel calib_model_;
std::mutex mtx_left_right_ready_;
std::shared_ptr<IntrinsicsBase> intr_left_;
std::shared_ptr<IntrinsicsBase> intr_right_;
@@ -139,6 +153,8 @@ class Synthetic {
bool calib_default_tag_;
std::vector<std::shared_ptr<Processor>> processors_;
stream_data_listener_t stream_data_listener_;
};
class SyntheticProcessorPart {

View File

@@ -0,0 +1,136 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/version_checker.h"
#include "mynteye/device/utils.h"
#include "mynteye/logger.h"
#include "mynteye/types.h"
MYNTEYE_BEGIN_NAMESPACE
typedef struct {
const std::string device_type;
const std::string sdk_version;
const std::string firmware_version;
const std::string status;
}firmware_version_match_table_unit;
const char* ERRO_DESCRIPTION_F =
"Please update the firmware at first";
const char* ERRO_DESCRIPTION_S =
"Please update the SDK at first";
const char* WARN_DESCRIPTION_F =
"We suggest that you should update the firmware";
const char* WARN_DESCRIPTION_S =
"We suggest that you should update the SDK";
const char* PASS_DESCRIPTION = "pass";
/** firmware/sdk version matched table */
/**----device type-----sdk version---firmware version-----pass tag-----*/
static const firmware_version_match_table_unit FSVM_TABLE[] ={
/** S1030 */
{"MYNT-EYE-S1030", ">2.3.0", ">2.2.0", PASS_DESCRIPTION},
{"MYNT-EYE-S1030", ">2.3.0", "2.2.0", WARN_DESCRIPTION_F},
{"MYNT-EYE-S1030", ">2.3.0", "<2.2.0", ERRO_DESCRIPTION_F},
{"MYNT-EYE-S1030", "<2.3.1", "<2.2.0", WARN_DESCRIPTION_S},
/** S2100 */
{"MYNT-EYE-S2100", ">2.3.0", "1.0", PASS_DESCRIPTION},
{"MYNT-EYE-S2100", "<2.3.1", "1.0", ERRO_DESCRIPTION_S},
/** S210A */
{"MYNT-EYE-S210A", ">2.3.0", "1.0", PASS_DESCRIPTION},
{"MYNT-EYE-S210A", "<2.3.1", "1.0", ERRO_DESCRIPTION_S},
};
void getVersion(const std::string &str, char *version) {
std::string st1("");
int j = 0;
for (size_t i = 0; i < str.size(); i++) {
if (str[i] == '.') {
version[j++] = atoi(st1.c_str());
st1 = "";
} else {
st1 += str[i];
}
}
version[j++] = atoi(st1.c_str());
}
bool conditionMatch(const std::string& condition, const std::string& target) {
char version[4] = {0};
char version_c[4] = {0};
getVersion(target, version);
int tag_c = 0;
std::string condition_c;
if (condition[0] == '>') {
tag_c = 1;
condition_c = condition.substr(1);
} else if (condition[0] == '<') {
tag_c = -1;
condition_c = condition.substr(1);
} else {
tag_c = 0;
condition_c = condition;
}
getVersion(condition_c, version_c);
int tag_big = memcmp(version, version_c, 4);
if (tag_big * tag_c > 0 || (tag_big == 0 && tag_c == 0)) return true;
return false;
}
enum STATUS_UNIT {
ST_PASS,
ST_ERRO_F,
ST_ERRO_S,
ST_NOT_PASS
};
STATUS_UNIT checkUnit(const std::string& sdkv,
const std::string& devn,
const std::string& firmv,
const firmware_version_match_table_unit& condition) {
if (condition.device_type == devn &&
conditionMatch(condition.sdk_version, sdkv) &&
conditionMatch(condition.firmware_version, firmv)) {
if (condition.status == ERRO_DESCRIPTION_F) return ST_ERRO_F;
if (condition.status == ERRO_DESCRIPTION_S) return ST_ERRO_S;
if (condition.status == WARN_DESCRIPTION_F ||
condition.status == WARN_DESCRIPTION_S) {
LOG(WARNING) << condition.status;
}
return ST_PASS;
}
return ST_NOT_PASS;
}
bool checkFirmwareVersion(const std::shared_ptr<API> api) {
auto sdkv = api->GetInfo(Info::SDK_VERSION);
auto devn = api->GetInfo(Info::DEVICE_NAME);
auto firmv = api->GetInfo(Info::FIRMWARE_VERSION);
for (size_t i =0;
i < sizeof(FSVM_TABLE)/sizeof(firmware_version_match_table_unit);
i++) {
auto res = checkUnit(sdkv, devn, firmv, FSVM_TABLE[i]);
if (res == ST_PASS) {
return true;
} else if (res == ST_ERRO_S || res == ST_ERRO_F) {
LOG(ERROR) << FSVM_TABLE[i].status;
return false;
}
}
LOG(ERROR) << ERRO_DESCRIPTION_S;
return false;
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,25 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_VERSION_CHECKER_H_
#define MYNTEYE_API_VERSION_CHECKER_H_
#pragma once
#include <string>
#include "mynteye/api/api.h"
MYNTEYE_BEGIN_NAMESPACE
bool checkFirmwareVersion(const std::shared_ptr<API> api);
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_VERSION_CHECKER_H_

View File

@@ -460,6 +460,7 @@ bool Channels::GetFiles(
while (i < end) {
std::uint8_t file_id = *(data + i);
std::uint16_t file_size = bytes::_from_data<std::uint16_t>(data + i + 1);
LOG(INFO) << "GetFiles:data_size : " << file_size;
VLOG(2) << "GetFiles id: " << static_cast<int>(file_id)
<< ", size: " << file_size;
i += 3;

View File

@@ -32,6 +32,7 @@ std::size_t FileChannel::GetDeviceInfoFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
device_info_t *info) {
auto n = dev_info_parser_->GetFromData(data, data_size, info);
LOG(INFO) << "GetDeviceInfoFromData:data_size : " << data_size;
auto spec_version = info->spec_version;
img_params_parser_->SetSpecVersion(spec_version);
imu_params_parser_->SetSpecVersion(spec_version);
@@ -113,6 +114,22 @@ std::size_t DeviceInfoParser::GetFromData(
info->nominal_baseline = bytes::_from_data<std::uint16_t>(data + i);
i += 2;
if (info->spec_version >= Version(1, 2)) {
// auxiliary_chip_version, 2
info->auxiliary_chip_version.set_major(data[i]);
info->auxiliary_chip_version.set_minor(data[i + 1]);
i += 2;
// isp_version, 2
info->isp_version.set_major(data[i]);
info->isp_version.set_minor(data[i + 1]);
i += 2;
} else {
info->auxiliary_chip_version.set_major(0);
info->auxiliary_chip_version.set_minor(0);
info->isp_version.set_major(0);
info->isp_version.set_minor(0);
}
// get other infos according to spec_version
MYNTEYE_UNUSED(data_size)
@@ -155,6 +172,17 @@ std::size_t DeviceInfoParser::SetToData(
bytes::_to_data(info->nominal_baseline, data + i);
i += 2;
if (info->spec_version >= Version(1, 2)) {
// auxiliary_chip_version, 2
data[i] = info->auxiliary_chip_version.major();
data[i + 1] = info->auxiliary_chip_version.minor();
i += 2;
// isp_version, 2
data[i] = info->isp_version.major();
data[i + 1] = info->isp_version.minor();
i += 2;
}
// set other infos according to spec_version
// others
@@ -181,7 +209,7 @@ std::size_t ImgParamsParser::GetFromData(
return GetFromData_v1_0(data, data_size, img_params);
}
// s210a old params
if (spec_version_ == Version(1, 1) && data_size == 404) {
if (spec_version_ >= Version(1, 1) && data_size == 404) {
return GetFromData_v1_1(data, data_size, img_params);
}
// get img params with new version format
@@ -406,7 +434,7 @@ std::size_t ImuParamsParser::GetFromData(
return GetFromData_old(data, data_size, imu_params);
}
// s210a old params
if (spec_version_ == Version(1, 1) && data_size == 384) {
if (spec_version_ >= Version(1, 1) && data_size == 384) {
return GetFromData_old(data, data_size, imu_params);
}
// get imu params with new version format

View File

@@ -234,6 +234,10 @@ std::string Device::GetInfo(const Info &info) const {
return device_info_->imu_type.to_string();
case Info::NOMINAL_BASELINE:
return std::to_string(device_info_->nominal_baseline);
case Info::AUXILIARY_CHIP_VERSION:
return device_info_->auxiliary_chip_version.to_string();
case Info::ISP_VERSION:
return device_info_->isp_version.to_string();
default:
LOG(WARNING) << "Unknown device info";
return "";
@@ -349,6 +353,9 @@ OptionInfo Device::GetOptionInfo(const Option &option) const {
std::int32_t Device::GetOptionValue(const Option &option) const {
if (!Supports(option)) {
if (option == Option::FRAME_RATE) {
return GetStreamRequest().fps;
}
LOG(WARNING) << "Unsupported option: " << option;
return -1;
}
@@ -466,6 +473,11 @@ std::vector<device::StreamData> Device::GetStreamDatas(const Stream &stream) {
return streams_->GetStreamDatas(stream);
}
void Device::DisableMotionDatas() {
CHECK_NOTNULL(motions_);
motions_->DisableMotionDatas();
}
void Device::EnableMotionDatas() {
EnableMotionDatas(std::numeric_limits<std::size_t>::max());
}

View File

@@ -66,7 +66,10 @@ void Motions::SetMotionCallback(motion_callback_t callback) {
std::lock_guard<std::mutex> _(mtx_datas_);
motion_data_t data = {imu};
if (motion_datas_enabled_) {
if (motion_datas_enabled_ && motion_datas_max_size_ > 0) {
if (motion_datas_.size() >= motion_datas_max_size_) {
motion_datas_.erase(motion_datas_.begin());
}
motion_datas_.push_back(data);
}
@@ -98,13 +101,21 @@ void Motions::StopMotionTracking() {
}
}
void Motions::DisableMotionDatas() {
std::lock_guard<std::mutex> _(mtx_datas_);
motion_datas_enabled_ = false;
motion_datas_max_size_ = 0;
motion_datas_.clear();
}
void Motions::EnableMotionDatas(std::size_t max_size) {
if (max_size <= 0) {
LOG(WARNING) << "Could not enable motion datas with max_size <= 0";
return;
}
std::lock_guard<std::mutex> _(mtx_datas_);
motion_datas_enabled_ = true;
motion_datas_max_size = max_size;
motion_datas_max_size_ = max_size;
}
Motions::motion_datas_t Motions::GetMotionDatas() {

View File

@@ -42,6 +42,7 @@ class Motions {
void StartMotionTracking();
void StopMotionTracking();
void DisableMotionDatas();
void EnableMotionDatas(std::size_t max_size);
motion_datas_t GetMotionDatas();
@@ -52,7 +53,7 @@ class Motions {
motion_datas_t motion_datas_;
bool motion_datas_enabled_;
std::size_t motion_datas_max_size;
std::size_t motion_datas_max_size_;
bool is_imu_tracking;

View File

@@ -92,6 +92,8 @@ const char *to_string(const Info &value) {
CASE(LENS_TYPE)
CASE(IMU_TYPE)
CASE(NOMINAL_BASELINE)
CASE(AUXILIARY_CHIP_VERSION)
CASE(ISP_VERSION)
default:
CHECK(is_valid(value));
return "Info::UNKNOWN";