From 71e32860140e3dc9e25d8159c7b08d534a655bc2 Mon Sep 17 00:00:00 2001 From: TinyOh Date: Fri, 1 Mar 2019 16:08:15 +0800 Subject: [PATCH 01/10] refactor(synthetic): remove usless logic --- CMakeLists.txt | 1 + samples/tutorials/data/get_stereo.cc | 2 + src/mynteye/api/data_tools.cc | 78 ++++ src/mynteye/api/data_tools.h | 33 ++ src/mynteye/api/processor.cc | 69 ++++ src/mynteye/api/processor.h | 4 + .../api/processor/root_camera_processor.cc | 111 +++++- .../api/processor/root_camera_processor.h | 30 +- src/mynteye/api/synthetic.cc | 372 +++--------------- src/mynteye/api/synthetic.h | 13 +- 10 files changed, 375 insertions(+), 338 deletions(-) create mode 100644 src/mynteye/api/data_tools.cc create mode 100644 src/mynteye/api/data_tools.h diff --git a/CMakeLists.txt b/CMakeLists.txt index ace55a8..a79a007 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -221,6 +221,7 @@ if(WITH_API) src/mynteye/api/config.cc src/mynteye/api/correspondence.cc src/mynteye/api/version_checker.cc + src/mynteye/api/data_tools.cc ) if(WITH_CAM_MODELS) list(APPEND MYNTEYE_SRCS diff --git a/samples/tutorials/data/get_stereo.cc b/samples/tutorials/data/get_stereo.cc index 1dd024a..5f114c0 100644 --- a/samples/tutorials/data/get_stereo.cc +++ b/samples/tutorials/data/get_stereo.cc @@ -25,6 +25,8 @@ int main(int argc, char *argv[]) { auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; api->ConfigStreamRequest(request); + api->EnableStreamData(Stream::LEFT); + api->EnableStreamData(Stream::RIGHT); api->Start(Source::VIDEO_STREAMING); diff --git a/src/mynteye/api/data_tools.cc b/src/mynteye/api/data_tools.cc new file mode 100644 index 0000000..73f5fd7 --- /dev/null +++ b/src/mynteye/api/data_tools.cc @@ -0,0 +1,78 @@ +// Copyright 2018 Slightech Co., Ltd. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include +#include "mynteye/api/data_tools.h" +#include "mynteye/logger.h" + +MYNTEYE_BEGIN_NAMESPACE + +cv::Mat frame2mat(const std::shared_ptr &frame) { + if (frame->format() == Format::YUYV) { + cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data()); + cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2); + return img; + } else if (frame->format() == Format::BGR888) { + cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data()); + return img; + } else { // Format::GRAY + return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data()); + } +} + +api::StreamData data2api(const device::StreamData &data) { + return {data.img, frame2mat(data.frame), data.frame, data.frame_id}; +} + +// ObjMat/ObjMat2 > api::StreamData + +api::StreamData obj_data_first(const ObjMat2 *obj) { + return {obj->first_data, obj->first, nullptr, obj->first_id}; +} + +api::StreamData obj_data_second(const ObjMat2 *obj) { + return {obj->second_data, obj->second, nullptr, obj->second_id}; +} + +api::StreamData obj_data(const ObjMat *obj) { + return {obj->data, obj->value, nullptr, obj->id}; +} + +api::StreamData obj_data_first(const std::shared_ptr &obj) { + return {obj->first_data, obj->first, nullptr, obj->first_id}; +} + +api::StreamData obj_data_second(const std::shared_ptr &obj) { + return {obj->second_data, obj->second, nullptr, obj->second_id}; +} + +api::StreamData obj_data(const std::shared_ptr &obj) { + return {obj->data, obj->value, nullptr, obj->id}; +} + +// api::StreamData > ObjMat/ObjMat2 + +ObjMat data_obj(const api::StreamData &data) { + return ObjMat{data.frame, data.frame_id, data.img}; +} + +ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second) { + return ObjMat2{ + first.frame, first.frame_id, first.img, + second.frame, second.frame_id, second.img}; +} + +MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/api/data_tools.h b/src/mynteye/api/data_tools.h new file mode 100644 index 0000000..845919b --- /dev/null +++ b/src/mynteye/api/data_tools.h @@ -0,0 +1,33 @@ +// Copyright 2018 Slightech Co., Ltd. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#ifndef MYNTEYE_API_DATA_TOOLS_H_ +#define MYNTEYE_API_DATA_TOOLS_H_ +#pragma once +#include +#include "mynteye/api/object.h" +#include "mynteye/api/api.h" +#include "mynteye/device/device.h" +MYNTEYE_BEGIN_NAMESPACE +cv::Mat frame2mat(const std::shared_ptr &frame); +api::StreamData data2api(const device::StreamData &data); +api::StreamData obj_data_first(const ObjMat2 *obj); +api::StreamData obj_data_second(const ObjMat2 *obj); +api::StreamData obj_data(const ObjMat *obj); +api::StreamData obj_data_first(const std::shared_ptr &obj); +api::StreamData obj_data_second(const std::shared_ptr &obj); +api::StreamData obj_data(const std::shared_ptr &obj); +ObjMat data_obj(const api::StreamData &data); +ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second); +MYNTEYE_END_NAMESPACE +#endif // MYNTEYE_API_DATA_TOOLS_H_ diff --git a/src/mynteye/api/processor.cc b/src/mynteye/api/processor.cc index 8d65b35..93abed6 100644 --- a/src/mynteye/api/processor.cc +++ b/src/mynteye/api/processor.cc @@ -19,6 +19,7 @@ #include "mynteye/logger.h" #include "mynteye/util/strings.h" #include "mynteye/util/times.h" +#include "mynteye/api/data_tools.h" MYNTEYE_BEGIN_NAMESPACE @@ -245,6 +246,74 @@ void Processor::Run() { VLOG(2) << Name() << " thread end"; } +api::StreamData Processor::GetStreamData(const Stream &stream) { + auto sum = getStreamsSum(); + auto &&out = GetOutput(); + Synthetic::Mode enable_mode = Synthetic::MODE_LAST; + auto streams = getTargetStreams(); + for (auto it_s : streams) { + if (it_s.stream == stream) { + enable_mode = it_s.enabled_mode_; + break; + } + } + if (enable_mode == Synthetic::MODE_SYNTHETIC) { + if (sum == 1) { + if (out != nullptr) { + auto &&output = Object::Cast(out); + if (output != nullptr) { + return obj_data(output); + } + VLOG(2) << "Rectify not ready now"; + } + } else if (sum == 2) { + static std::shared_ptr output = nullptr; + if (out != nullptr) { + output = Object::Cast(out); + } + auto streams = getTargetStreams(); + if (output != nullptr) { + int num = 0; + for (auto it : streams) { + if (it.stream == stream) { + if (num == 1) { + return obj_data_first(output); + } else { + return obj_data_second(output); + } + } + num++; + } + } + VLOG(2) << "Rectify not ready now"; + } else { + LOG(ERROR) << "error: invalid sum!"; + } + return {}; // frame.empty() == true + } + LOG(ERROR) << "Failed to get stream data of " << stream + << ", unsupported or disabled"; + return {}; // frame.empty() == true +} + +std::vector Processor::GetStreamDatas(const Stream &stream) { + Synthetic::Mode enable_mode = Synthetic::MODE_LAST; + auto streams = getTargetStreams(); + for (auto it_s : streams) { + if (it_s.stream == stream) { + enable_mode = it_s.enabled_mode_; + break; + } + } + if (enable_mode == Synthetic::MODE_SYNTHETIC) { + return {GetStreamData(stream)}; + } else { + LOG(ERROR) << "Failed to get stream data of " << stream + << ", unsupported or disabled"; + } + return {}; +} + void Processor::SetIdle(bool idle) { std::lock_guard lk(mtx_state_); idle_ = idle; diff --git a/src/mynteye/api/processor.h b/src/mynteye/api/processor.h index bf36792..6ac1644 100644 --- a/src/mynteye/api/processor.h +++ b/src/mynteye/api/processor.h @@ -66,6 +66,10 @@ class Processor : /** Returns dropped or not. */ bool Process(const Object &in); + virtual api::StreamData GetStreamData(const Stream &stream); + + virtual std::vector GetStreamDatas(const Stream &stream); + /** * Returns the last output. * @note Returns null if not output now. diff --git a/src/mynteye/api/processor/root_camera_processor.cc b/src/mynteye/api/processor/root_camera_processor.cc index d0e396a..0280b1a 100644 --- a/src/mynteye/api/processor/root_camera_processor.cc +++ b/src/mynteye/api/processor/root_camera_processor.cc @@ -14,15 +14,21 @@ #include "mynteye/api/processor/root_camera_processor.h" #include +#include #include #include #include "mynteye/logger.h" +#include "mynteye/api/synthetic.h" +#include "mynteye/device/device.h" +#include "mynteye/api/data_tools.h" MYNTEYE_BEGIN_NAMESPACE const char RootProcessor::NAME[] = "RootProcessor"; -RootProcessor::RootProcessor(std::int32_t proc_period) - : Processor(std::move(proc_period)) {} +RootProcessor::RootProcessor(std::shared_ptr device, + std::int32_t proc_period) + : Processor(std::move(proc_period)), + device_(device) {} RootProcessor::~RootProcessor() { VLOG(2) << __func__; } @@ -31,13 +37,110 @@ std::string RootProcessor::Name() { return NAME; } -Object *RootProcessor::OnCreateOutput() { +s1s2Processor::s1s2Processor(std::shared_ptr device, + std::int32_t proc_period) + : RootProcessor(device, std::move(proc_period)) {} +s1s2Processor::~s1s2Processor() { + VLOG(2) << __func__; +} + +Object *s1s2Processor::OnCreateOutput() { return new ObjMat2(); } -bool RootProcessor::OnProcess( +bool s1s2Processor::OnProcess( Object *const in, Object *const out, std::shared_ptr const parent) { + const ObjMat2 *input = Object::Cast(in); + ObjMat2 *output = Object::Cast(out); + output->second = input->second; + output->first = input->first; + output->first_id = input->first_id; + output->first_data = input->first_data; + output->second_id = input->second_id; + output->second_data = input->second_data; MYNTEYE_UNUSED(parent) return true; } + +void s1s2Processor::ProcessNativeStream( + const Stream &stream, const api::StreamData &data) { + std::unique_lock lk(mtx_left_right_ready_); + static api::StreamData left_data, right_data; + if (stream == Stream::LEFT) { + left_data = data; + } else if (stream == Stream::RIGHT) { + right_data = data; + } + if (left_data.img && right_data.img && + left_data.img->frame_id == right_data.img->frame_id) { + Process(data_obj(left_data, right_data)); + } + return; +} + +void s1s2Processor::StartVideoStreaming() { + auto streams = getTargetStreams(); + for (unsigned int j =0; j< streams.size(); j++) { + auto stream = streams[j].stream; + auto callback = streams[j].stream_callback; + device_->SetStreamCallback( + stream, + [this, stream, callback](const device::StreamData &data) { + auto &&stream_data = data2api(data); + ProcessNativeStream(stream, stream_data); + // Need mutex if set callback after start + if (callback) { + callback(stream_data); + } + }, + true); + } + device_->Start(Source::VIDEO_STREAMING); +} + +void s1s2Processor::StopVideoStreaming() { + auto streams = getTargetStreams(); + for (unsigned int j =0; j< streams.size(); j++) { + auto stream = streams[j].stream; + device_->SetStreamCallback(stream, nullptr); + } + device_->Stop(Source::VIDEO_STREAMING); +} + +api::StreamData s1s2Processor::GetStreamData(const Stream &stream) { + Synthetic::Mode enable_mode = Synthetic::MODE_LAST; + auto streams = getTargetStreams(); + for (auto it_s : streams) { + if (it_s.stream == stream) { + enable_mode = it_s.enabled_mode_; + break; + } + } + if (enable_mode == Synthetic::MODE_SYNTHETIC) { + return data2api(device_->GetStreamData(stream)); + } + LOG(ERROR) << "Failed to get device stream data of " << stream + << ", unsupported or disabled"; + return {}; +} + +std::vector s1s2Processor::GetStreamDatas( + const Stream &stream) { + Synthetic::Mode enable_mode = Synthetic::MODE_LAST; + auto streams = getTargetStreams(); + for (auto it_s : streams) { + if (it_s.stream == stream) { + enable_mode = it_s.enabled_mode_; + break; + } + } + if (enable_mode == Synthetic::MODE_SYNTHETIC) { + std::vector datas; + for (auto &&data : device_->GetStreamDatas(stream)) { + datas.push_back(data2api(data)); + } + return datas; + } +} + MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/api/processor/root_camera_processor.h b/src/mynteye/api/processor/root_camera_processor.h index eaa3b6d..f7e1ed7 100644 --- a/src/mynteye/api/processor/root_camera_processor.h +++ b/src/mynteye/api/processor/root_camera_processor.h @@ -28,16 +28,42 @@ class RootProcessor : public Processor { public: static const char NAME[]; - explicit RootProcessor(std::int32_t proc_period = 0); + explicit RootProcessor(std::shared_ptr device, + std::int32_t proc_period = 0); virtual ~RootProcessor(); - std::string Name() override; + virtual std::string Name(); + virtual void StartVideoStreaming() = 0; + virtual void StopVideoStreaming() = 0; + virtual api::StreamData GetStreamData(const Stream &stream) = 0; + virtual std::vector GetStreamDatas(const Stream &stream) = 0; // NOLINT + protected: + virtual Object *OnCreateOutput() = 0; + virtual bool OnProcess( + Object *const in, Object *const out, + std::shared_ptr const parent) = 0; + std::shared_ptr device_; +}; + +class s1s2Processor : public RootProcessor { + public: + explicit s1s2Processor(std::shared_ptr device, + std::int32_t proc_period = 0); + virtual ~s1s2Processor(); + void StartVideoStreaming(); + void StopVideoStreaming(); + api::StreamData GetStreamData(const Stream &stream) override; + std::vector GetStreamDatas(const Stream &stream) override; // NOLINT protected: Object *OnCreateOutput() override; bool OnProcess( Object *const in, Object *const out, std::shared_ptr const parent) override; + private: + void ProcessNativeStream( + const Stream &stream, const api::StreamData &data); + std::mutex mtx_left_right_ready_; }; MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/api/synthetic.cc b/src/mynteye/api/synthetic.cc index 654d30e..9f04ff0 100644 --- a/src/mynteye/api/synthetic.cc +++ b/src/mynteye/api/synthetic.cc @@ -36,6 +36,7 @@ #include "mynteye/api/processor/rectify_processor.h" #endif #include "mynteye/device/device.h" +#include "mynteye/api/data_tools.h" #define RECTIFY_PROC_PERIOD 0 #define DISPARITY_PROC_PERIOD 0 @@ -46,74 +47,6 @@ MYNTEYE_BEGIN_NAMESPACE -namespace { - -cv::Mat frame2mat(const std::shared_ptr &frame) { - if (frame->format() == Format::YUYV) { - cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data()); - cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2); - return img; - } else if (frame->format() == Format::BGR888) { - cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data()); - return img; - } else { // Format::GRAY - return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data()); - } -} - -api::StreamData data2api(const device::StreamData &data) { - return {data.img, frame2mat(data.frame), data.frame, data.frame_id}; -} - -void process_childs( - const std::shared_ptr &proc, const std::string &name, - const Object &obj) { - auto &&processor = find_processor(proc, name); - for (auto child : processor->GetChilds()) { - child->Process(obj); - } -} - -// ObjMat/ObjMat2 > api::StreamData - -api::StreamData obj_data_first(const ObjMat2 *obj) { - return {obj->first_data, obj->first, nullptr, obj->first_id}; -} - -api::StreamData obj_data_second(const ObjMat2 *obj) { - return {obj->second_data, obj->second, nullptr, obj->second_id}; -} - -api::StreamData obj_data(const ObjMat *obj) { - return {obj->data, obj->value, nullptr, obj->id}; -} - -api::StreamData obj_data_first(const std::shared_ptr &obj) { - return {obj->first_data, obj->first, nullptr, obj->first_id}; -} - -api::StreamData obj_data_second(const std::shared_ptr &obj) { - return {obj->second_data, obj->second, nullptr, obj->second_id}; -} - -api::StreamData obj_data(const std::shared_ptr &obj) { - return {obj->data, obj->value, nullptr, obj->id}; -} - -// api::StreamData > ObjMat/ObjMat2 - -ObjMat data_obj(const api::StreamData &data) { - return ObjMat{data.frame, data.frame_id, data.img}; -} - -ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second) { - return ObjMat2{ - first.frame, first.frame_id, first.img, - second.frame, second.frame_id, second.img}; -} - -} // namespace - void Synthetic::InitCalibInfo() { if (calib_model_ == CalibrationModel::PINHOLE) { LOG(INFO) << "camera calib model: pinhole"; @@ -149,7 +82,6 @@ Synthetic::Synthetic(API *api, CalibrationModel calib_model) CHECK_NOTNULL(api_); InitCalibInfo(); InitProcessors(); - InitStreamSupports(); } Synthetic::~Synthetic() { @@ -171,19 +103,18 @@ void Synthetic::NotifyImageParamsChanged() { extr_ = std::make_shared( api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT)); } - if (calib_model_ == CalibrationModel::PINHOLE) { - auto &&processor = find_processor(processor_); - if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_); + auto processor = getProcessorWithStream(Stream::LEFT_RECTIFIED); + + if (processor && calib_model_ == CalibrationModel::PINHOLE) { + auto proc = static_cast(&(*processor)); + proc->ReloadImageParams(intr_left_, intr_right_, extr_); #ifdef WITH_CAM_MODELS - } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { - auto &&processor = find_processor(processor_); - if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_); + } else if (processor && calib_model_ == CalibrationModel::KANNALA_BRANDT) { + auto proc = static_cast(&(*processor)); + proc->ReloadImageParams(intr_left_, intr_right_, extr_); #endif } else { - LOG(ERROR) << "Unknow calib model type in device: " - << calib_model_ << ", use default pinhole model"; - auto &&processor = find_processor(processor_); - if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_); + LOG(ERROR) << "Unknow calib model type in device" << std::endl; } } @@ -242,14 +173,6 @@ bool Synthetic::Supports(const Stream &stream) const { return checkControlDateWithStream(stream); } -Synthetic::mode_t Synthetic::SupportsMode(const Stream &stream) const { - if (checkControlDateWithStream(stream)) { - auto data = getControlDateWithStream(stream); - return data.support_mode_; - } - return MODE_LAST; -} - void Synthetic::EnableStreamData( const Stream &stream, stream_switch_callback_t callback, bool try_tag) { @@ -344,42 +267,13 @@ bool Synthetic::HasStreamCallback(const Stream &stream) const { } void Synthetic::StartVideoStreaming() { - auto &&device = api_->device(); - for (unsigned int i =0; i< processors_.size(); i++) { - auto streams = processors_[i]->getTargetStreams(); - for (unsigned int j =0; j< streams.size(); j++) { - if (processors_[i]->target_streams_[j].support_mode_ == MODE_NATIVE) { - auto stream = processors_[i]->target_streams_[j].stream; - device->SetStreamCallback( - stream, - [this, stream](const device::StreamData &data) { - auto &&stream_data = data2api(data); - ProcessNativeStream(stream, stream_data); - // Need mutex if set callback after start - if (HasStreamCallback(stream)) { - auto data = getControlDateWithStream(stream); - data.stream_callback(stream_data); - } - }, - true); - } - } - } - device->Start(Source::VIDEO_STREAMING); + auto processor_root = static_cast(&(*processor_)); + processor_root->StartVideoStreaming(); } void Synthetic::StopVideoStreaming() { - auto &&device = api_->device(); - for (unsigned int i =0; i< processors_.size(); i++) { - auto streams = processors_[i]->getTargetStreams(); - for (unsigned int j =0; j< streams.size(); j++) { - if (processors_[i]->target_streams_[j].support_mode_ == MODE_NATIVE) { - auto stream = processors_[i]->target_streams_[j].stream; - device->SetStreamCallback(stream, nullptr); - } - } - } - device->Stop(Source::VIDEO_STREAMING); + auto processor_root = static_cast(&(*processor_)); + processor_root->StopVideoStreaming(); } void Synthetic::WaitForStreams() { @@ -387,69 +281,11 @@ void Synthetic::WaitForStreams() { } api::StreamData Synthetic::GetStreamData(const Stream &stream) { - auto &&mode = GetStreamEnabledMode(stream); - if (mode == MODE_NATIVE) { - auto &&device = api_->device(); - return data2api(device->GetStreamData(stream)); - } else if (mode == MODE_SYNTHETIC) { - auto processor = getProcessorWithStream(stream); - auto sum = processor->getStreamsSum(); - auto &&out = processor->GetOutput(); - static std::shared_ptr output = nullptr; - if (sum == 1) { - if (out != nullptr) { - auto &&output = Object::Cast(out); - if (output != nullptr) { - return obj_data(output); - } - VLOG(2) << "Rectify not ready now"; - } - } else if (sum == 2) { - if (out != nullptr) { - output = Object::Cast(out); - } - auto streams = processor->getTargetStreams(); - if (output != nullptr) { - int num = 0; - for (auto it : streams) { - if (it.stream == stream) { - if (num == 1) { - return obj_data_first(output); - } else { - return obj_data_second(output); - } - } - num++; - } - } - VLOG(2) << "Rectify not ready now"; - } else { - LOG(ERROR) << "error: invalid sum!"; - } - return {}; // frame.empty() == true - } else { - LOG(ERROR) << "Failed to get stream data of " << stream - << ", unsupported or disabled"; - return {}; // frame.empty() == true - } + return getProcessorWithStream(stream)->GetStreamData(stream); } std::vector Synthetic::GetStreamDatas(const Stream &stream) { - auto &&mode = GetStreamEnabledMode(stream); - if (mode == MODE_NATIVE) { - auto &&device = api_->device(); - std::vector datas; - for (auto &&data : device->GetStreamDatas(stream)) { - datas.push_back(data2api(data)); - } - return datas; - } else if (mode == MODE_SYNTHETIC) { - return {GetStreamData(stream)}; - } else { - LOG(ERROR) << "Failed to get stream data of " << stream - << ", unsupported or disabled"; - } - return {}; + return getProcessorWithStream(stream)->GetStreamDatas(stream); } void Synthetic::SetPlugin(std::shared_ptr plugin) { @@ -460,39 +296,6 @@ bool Synthetic::HasPlugin() const { return plugin_ != nullptr; } -void Synthetic::InitStreamSupports() { - auto &&device = api_->device(); - if (device->Supports(Stream::LEFT) && device->Supports(Stream::RIGHT)) { - auto processor = getProcessorWithStream(Stream::LEFT); - for (unsigned int i = 0; i< processor->target_streams_.size(); i++) { - if (processor->target_streams_[i].stream == Stream::LEFT) { - processor->target_streams_[i].support_mode_ = MODE_NATIVE; - } - if (processor->target_streams_[i].stream == Stream::RIGHT) { - processor->target_streams_[i].support_mode_ = MODE_NATIVE; - } - } - - std::vector stream_chain{ - Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED, - Stream::DISPARITY, Stream::DISPARITY_NORMALIZED, - Stream::POINTS, Stream::DEPTH}; - for (auto &&stream : stream_chain) { - auto processor = getProcessorWithStream(stream); - for (unsigned int i = 0; i< processor->target_streams_.size(); i++) { - if (processor->target_streams_[i].stream == stream) { - if (device->Supports(stream)) { - processor->target_streams_[i].support_mode_ = MODE_NATIVE; - processor->target_streams_[i].enabled_mode_ = MODE_NATIVE; - } else { - processor->target_streams_[i].support_mode_ = MODE_SYNTHETIC; - } - } - } - } - } -} - Synthetic::mode_t Synthetic::GetStreamEnabledMode(const Stream &stream) const { if (checkControlDateWithStream(stream)) { auto data = getControlDateWithStream(stream); @@ -501,14 +304,6 @@ Synthetic::mode_t Synthetic::GetStreamEnabledMode(const Stream &stream) const { return MODE_LAST; } -bool Synthetic::IsStreamEnabledNative(const Stream &stream) const { - return GetStreamEnabledMode(stream) == MODE_NATIVE; -} - -bool Synthetic::IsStreamEnabledSynthetic(const Stream &stream) const { - return GetStreamEnabledMode(stream) == MODE_SYNTHETIC; -} - void Synthetic::InitProcessors() { std::shared_ptr rectify_processor = nullptr; std::shared_ptr points_processor = nullptr; @@ -522,7 +317,7 @@ void Synthetic::InitProcessors() { DISPARITY_NORM_PROC_PERIOD); auto root_processor = - std::make_shared(ROOT_PROC_PERIOD); + std::make_shared(api_->device(), ROOT_PROC_PERIOD); if (calib_model_ == CalibrationModel::PINHOLE) { // PINHOLE @@ -567,21 +362,21 @@ void Synthetic::InitProcessors() { } rectify_processor->addTargetStreams( - {Stream::LEFT_RECTIFIED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); + {Stream::LEFT_RECTIFIED, Mode::MODE_LAST, nullptr}); rectify_processor->addTargetStreams( - {Stream::RIGHT_RECTIFIED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); + {Stream::RIGHT_RECTIFIED, Mode::MODE_LAST, nullptr}); disparity_processor->addTargetStreams( - {Stream::DISPARITY, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); + {Stream::DISPARITY, Mode::MODE_LAST, nullptr}); disparitynormalized_processor->addTargetStreams( - {Stream::DISPARITY_NORMALIZED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); + {Stream::DISPARITY_NORMALIZED, Mode::MODE_LAST, nullptr}); points_processor->addTargetStreams( - {Stream::POINTS, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); + {Stream::POINTS, Mode::MODE_LAST, nullptr}); depth_processor->addTargetStreams( - {Stream::DEPTH, Mode::MODE_LAST, Mode::MODE_LAST, nullptr}); + {Stream::DEPTH, Mode::MODE_LAST, nullptr}); root_processor->addTargetStreams( - {Stream::LEFT, Mode::MODE_NATIVE, Mode::MODE_NATIVE, nullptr}); + {Stream::LEFT, Mode::MODE_LAST, nullptr}); root_processor->addTargetStreams( - {Stream::RIGHT, Mode::MODE_NATIVE, Mode::MODE_NATIVE, nullptr}); + {Stream::RIGHT, Mode::MODE_LAST, nullptr}); processors_.push_back(root_processor); processors_.push_back(rectify_processor); @@ -590,6 +385,8 @@ void Synthetic::InitProcessors() { processors_.push_back(points_processor); processors_.push_back(depth_processor); using namespace std::placeholders; // NOLINT + root_processor->SetProcessCallback( + std::bind(&Synthetic::OnDeviceProcess, this, _1, _2, _3)); rectify_processor->SetProcessCallback( std::bind(&Synthetic::OnRectifyProcess, this, _1, _2, _3)); disparity_processor->SetProcessCallback( @@ -601,6 +398,8 @@ void Synthetic::InitProcessors() { depth_processor->SetProcessCallback( std::bind(&Synthetic::OnDepthProcess, this, _1, _2, _3)); + root_processor->SetPostProcessCallback( + std::bind(&Synthetic::OnDevicePostProcess, this, _1)); rectify_processor->SetPostProcessCallback( std::bind(&Synthetic::OnRectifyPostProcess, this, _1)); disparity_processor->SetPostProcessCallback( @@ -615,100 +414,11 @@ void Synthetic::InitProcessors() { processor_ = root_processor; } -void Synthetic::ProcessNativeStream( - const Stream &stream, const api::StreamData &data) { - NotifyStreamData(stream, data); - if (stream == Stream::LEFT || stream == Stream::RIGHT) { - std::unique_lock lk(mtx_left_right_ready_); - static api::StreamData left_data, right_data; - if (stream == Stream::LEFT) { - left_data = data; - } else if (stream == Stream::RIGHT) { - right_data = data; - } - if (left_data.img && right_data.img && - left_data.img->frame_id == right_data.img->frame_id) { - std::shared_ptr processor = nullptr; - if (calib_model_ == CalibrationModel::PINHOLE) { - processor = find_processor(processor_); -#ifdef WITH_CAM_MODELS - } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { - processor = find_processor(processor_); -#endif - } else { - LOG(ERROR) << "Unknow calib model type in device: " - << calib_model_ << ", use default pinhole model"; - processor = find_processor(processor_); - } - processor->Process(data_obj(left_data, right_data)); - } - return; - } - - if (stream == Stream::LEFT_RECTIFIED || stream == Stream::RIGHT_RECTIFIED) { - static api::StreamData left_rect_data, right_rect_data; - if (stream == Stream::LEFT_RECTIFIED) { - left_rect_data = data; - } else if (stream == Stream::RIGHT_RECTIFIED) { - right_rect_data = data; - } - if (left_rect_data.img && right_rect_data.img && - left_rect_data.img->frame_id == right_rect_data.img->frame_id) { - std::string name = RectifyProcessorOCV::NAME; - if (calib_model_ == CalibrationModel::PINHOLE) { - name = RectifyProcessorOCV::NAME; -#ifdef WITH_CAM_MODELS - } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { - name = RectifyProcessor::NAME; -#endif - } - process_childs(processor_, name, - data_obj(left_rect_data, right_rect_data)); - } - return; - } - - switch (stream) { - case Stream::DISPARITY: { - process_childs(processor_, DisparityProcessor::NAME, data_obj(data)); - } break; - case Stream::DISPARITY_NORMALIZED: { - process_childs(processor_, DisparityNormalizedProcessor::NAME, - data_obj(data)); - } break; - case Stream::POINTS: { - if (calib_model_ == CalibrationModel::PINHOLE) { - // PINHOLE - process_childs(processor_, PointsProcessorOCV::NAME, data_obj(data)); -#ifdef WITH_CAM_MODELS - } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { - // KANNALA_BRANDT - process_childs(processor_, PointsProcessor::NAME, data_obj(data)); -#endif - } else { - // UNKNOW - LOG(ERROR) << "Unknow calib model type in device: " - << calib_model_; - } - } break; - case Stream::DEPTH: { - if (calib_model_ == CalibrationModel::PINHOLE) { - // PINHOLE - process_childs(processor_, DepthProcessorOCV::NAME, data_obj(data)); -#ifdef WITH_CAM_MODELS - } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { - // KANNALA_BRANDT - process_childs(processor_, DepthProcessor::NAME, data_obj(data)); -#endif - } else { - // UNKNOW - LOG(ERROR) << "Unknow calib model type in device: " - << calib_model_; - } - } break; - default: - break; - } +bool Synthetic::OnDeviceProcess( + Object *const in, Object *const out, + std::shared_ptr const parent) { + MYNTEYE_UNUSED(parent) + return GetStreamEnabledMode(Stream::LEFT) != MODE_SYNTHETIC; } bool Synthetic::OnRectifyProcess( @@ -762,6 +472,20 @@ bool Synthetic::OnDepthProcess( return GetStreamEnabledMode(Stream::DEPTH) != MODE_SYNTHETIC; } +void Synthetic::OnDevicePostProcess(Object *const out) { + const ObjMat2 *output = Object::Cast(out); + NotifyStreamData(Stream::LEFT, obj_data_first(output)); + NotifyStreamData(Stream::RIGHT, obj_data_second(output)); + if (HasStreamCallback(Stream::LEFT)) { + auto data = getControlDateWithStream(Stream::LEFT); + data.stream_callback(obj_data_first(output)); + } + if (HasStreamCallback(Stream::RIGHT)) { + auto data = getControlDateWithStream(Stream::RIGHT); + data.stream_callback(obj_data_second(output)); + } +} + void Synthetic::OnRectifyPostProcess(Object *const out) { const ObjMat2 *output = Object::Cast(out); NotifyStreamData(Stream::LEFT_RECTIFIED, obj_data_first(output)); diff --git a/src/mynteye/api/synthetic.h b/src/mynteye/api/synthetic.h index f1c3d1d..5476792 100644 --- a/src/mynteye/api/synthetic.h +++ b/src/mynteye/api/synthetic.h @@ -47,7 +47,6 @@ class Synthetic { struct stream_control_t { Stream stream; - mode_t support_mode_; mode_t enabled_mode_; stream_callback_t stream_callback; }; @@ -60,7 +59,6 @@ class Synthetic { void NotifyImageParamsChanged(); bool Supports(const Stream &stream) const; - mode_t SupportsMode(const Stream &stream) const; void EnableStreamData(const Stream &stream); void DisableStreamData(const Stream &stream); @@ -96,11 +94,8 @@ class Synthetic { private: void InitCalibInfo(); - void InitStreamSupports(); mode_t GetStreamEnabledMode(const Stream &stream) const; - bool IsStreamEnabledNative(const Stream &stream) const; - bool IsStreamEnabledSynthetic(const Stream &stream) const; void EnableStreamData(const Stream &stream, std::uint32_t depth); void DisableStreamData(const Stream &stream, std::uint32_t depth); @@ -112,8 +107,9 @@ class Synthetic { template bool DeactivateProcessor(bool tree = false); - void ProcessNativeStream(const Stream &stream, const api::StreamData &data); - + bool OnDeviceProcess( + Object *const in, Object *const out, + std::shared_ptr const parent); bool OnRectifyProcess( Object *const in, Object *const out, std::shared_ptr const parent); @@ -130,6 +126,7 @@ class Synthetic { Object *const in, Object *const out, std::shared_ptr const parent); + void OnDevicePostProcess(Object *const out); void OnRectifyPostProcess(Object *const out); void OnDisparityPostProcess(Object *const out); void OnDisparityNormalizedPostProcess(Object *const out); @@ -158,7 +155,7 @@ class Synthetic { }; class SyntheticProcessorPart { - private: + protected: inline std::vector getTargetStreams() { return target_streams_; } From 3b22fa3abc899cebfdefe74bc4e8e41cadfb3290 Mon Sep 17 00:00:00 2001 From: TinyOh Date: Fri, 1 Mar 2019 16:25:59 +0800 Subject: [PATCH 02/10] style(api): change sdk version api --- include/mynteye/api/api.h | 5 ++++- include/mynteye/types.h | 2 -- src/mynteye/api/api.cc | 28 ++++++++++++++-------------- src/mynteye/api/synthetic.cc | 4 ---- src/mynteye/api/version_checker.cc | 2 +- 5 files changed, 19 insertions(+), 22 deletions(-) diff --git a/include/mynteye/api/api.h b/include/mynteye/api/api.h index d369ecd..699247c 100644 --- a/include/mynteye/api/api.h +++ b/include/mynteye/api/api.h @@ -187,7 +187,10 @@ class MYNTEYE_API API { * Get the device info. */ std::string GetInfo(const Info &info) const; - + /** + * Get the sdk version. + */ + std::string GetSDKVersion() const; /** * @deprecated Get the intrinsics (pinhole) of stream. */ diff --git a/include/mynteye/types.h b/include/mynteye/types.h index 5af327e..adf52c8 100644 --- a/include/mynteye/types.h +++ b/include/mynteye/types.h @@ -126,8 +126,6 @@ enum class Info : std::uint8_t { AUXILIARY_CHIP_VERSION, /** Isp version */ ISP_VERSION, - /** SDK version*/ - SDK_VERSION, /** Last guard */ LAST }; diff --git a/src/mynteye/api/api.cc b/src/mynteye/api/api.cc index 3f034be..3f522db 100644 --- a/src/mynteye/api/api.cc +++ b/src/mynteye/api/api.cc @@ -328,23 +328,23 @@ std::shared_ptr API::GetInfo() const { } std::string API::GetInfo(const Info &info) const { - if (info == Info::SDK_VERSION) { - std::string info_path = - utils::get_sdk_install_dir(); - info_path.append(MYNTEYE_OS_SEP "share" \ - MYNTEYE_OS_SEP "mynteye" MYNTEYE_OS_SEP "build.info"); - - cv::FileStorage fs(info_path, cv::FileStorage::READ); - if (!fs.isOpened()) { - LOG(WARNING) << "build.info not found: " << info_path; - return "null"; - } - return fs["MYNTEYE_VERSION"]; - } - return device_->GetInfo(info); } +std::string API::GetSDKVersion() const { + std::string info_path = + utils::get_sdk_install_dir(); + info_path.append(MYNTEYE_OS_SEP "share" \ + MYNTEYE_OS_SEP "mynteye" MYNTEYE_OS_SEP "build.info"); + + cv::FileStorage fs(info_path, cv::FileStorage::READ); + if (!fs.isOpened()) { + LOG(WARNING) << "build.info not found: " << info_path; + return "null"; + } + return fs["MYNTEYE_VERSION"]; +} + IntrinsicsPinhole API::GetIntrinsics(const Stream &stream) const { auto in = GetIntrinsicsBase(stream); if (in->calib_model() == CalibrationModel::PINHOLE) { diff --git a/src/mynteye/api/synthetic.cc b/src/mynteye/api/synthetic.cc index 9f04ff0..818dad0 100644 --- a/src/mynteye/api/synthetic.cc +++ b/src/mynteye/api/synthetic.cc @@ -385,8 +385,6 @@ void Synthetic::InitProcessors() { processors_.push_back(points_processor); processors_.push_back(depth_processor); using namespace std::placeholders; // NOLINT - root_processor->SetProcessCallback( - std::bind(&Synthetic::OnDeviceProcess, this, _1, _2, _3)); rectify_processor->SetProcessCallback( std::bind(&Synthetic::OnRectifyProcess, this, _1, _2, _3)); disparity_processor->SetProcessCallback( @@ -398,8 +396,6 @@ void Synthetic::InitProcessors() { depth_processor->SetProcessCallback( std::bind(&Synthetic::OnDepthProcess, this, _1, _2, _3)); - root_processor->SetPostProcessCallback( - std::bind(&Synthetic::OnDevicePostProcess, this, _1)); rectify_processor->SetPostProcessCallback( std::bind(&Synthetic::OnRectifyPostProcess, this, _1)); disparity_processor->SetPostProcessCallback( diff --git a/src/mynteye/api/version_checker.cc b/src/mynteye/api/version_checker.cc index 723bbf0..d958f35 100644 --- a/src/mynteye/api/version_checker.cc +++ b/src/mynteye/api/version_checker.cc @@ -113,7 +113,7 @@ STATUS_UNIT checkUnit(const std::string& sdkv, } bool checkFirmwareVersion(const std::shared_ptr api) { - auto sdkv = api->GetInfo(Info::SDK_VERSION); + auto sdkv = api->GetSDKVersion(); auto devn = api->GetInfo(Info::DEVICE_NAME); auto firmv = api->GetInfo(Info::FIRMWARE_VERSION); From 96a1b2a62dc8ef9652615c180dde7f4d22eb6a90 Mon Sep 17 00:00:00 2001 From: TinyOh Date: Sat, 2 Mar 2019 08:22:04 +0800 Subject: [PATCH 03/10] fix(samples): check sample enable code --- samples/tutorials/control/auto_exposure.cc | 2 ++ samples/tutorials/control/framerate.cc | 2 ++ samples/tutorials/control/imu_low_pass_filter.cc | 2 ++ samples/tutorials/control/imu_range.cc | 2 ++ samples/tutorials/control/infrared.cc | 2 ++ samples/tutorials/control/manual_exposure.cc | 2 ++ samples/tutorials/data/get_imu.cc | 2 ++ src/mynteye/api/synthetic.cc | 3 +-- 8 files changed, 15 insertions(+), 2 deletions(-) diff --git a/samples/tutorials/control/auto_exposure.cc b/samples/tutorials/control/auto_exposure.cc index cabe6ad..258504f 100644 --- a/samples/tutorials/control/auto_exposure.cc +++ b/samples/tutorials/control/auto_exposure.cc @@ -28,6 +28,8 @@ int main(int argc, char *argv[]) { bool ok; auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; + api->EnableStreamData(Stream::LEFT); + api->EnableStreamData(Stream::RIGHT); api->ConfigStreamRequest(request); Model model = api->GetModel(); diff --git a/samples/tutorials/control/framerate.cc b/samples/tutorials/control/framerate.cc index 41f1e5d..faa1aec 100644 --- a/samples/tutorials/control/framerate.cc +++ b/samples/tutorials/control/framerate.cc @@ -28,6 +28,8 @@ int main(int argc, char *argv[]) { bool ok; auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; + api->EnableStreamData(Stream::LEFT); + api->EnableStreamData(Stream::RIGHT); api->ConfigStreamRequest(request); Model model = api->GetModel(); diff --git a/samples/tutorials/control/imu_low_pass_filter.cc b/samples/tutorials/control/imu_low_pass_filter.cc index 88e71b8..f0119f2 100644 --- a/samples/tutorials/control/imu_low_pass_filter.cc +++ b/samples/tutorials/control/imu_low_pass_filter.cc @@ -28,6 +28,8 @@ int main(int argc, char *argv[]) { bool ok; auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; + api->EnableStreamData(Stream::LEFT); + api->EnableStreamData(Stream::RIGHT); api->ConfigStreamRequest(request); Model model = api->GetModel(); diff --git a/samples/tutorials/control/imu_range.cc b/samples/tutorials/control/imu_range.cc index 1431b20..e6edecf 100644 --- a/samples/tutorials/control/imu_range.cc +++ b/samples/tutorials/control/imu_range.cc @@ -28,6 +28,8 @@ int main(int argc, char *argv[]) { bool ok; auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; + api->EnableStreamData(Stream::LEFT); + api->EnableStreamData(Stream::RIGHT); api->ConfigStreamRequest(request); Model model = api->GetModel(); diff --git a/samples/tutorials/control/infrared.cc b/samples/tutorials/control/infrared.cc index c261e7a..b44d49d 100644 --- a/samples/tutorials/control/infrared.cc +++ b/samples/tutorials/control/infrared.cc @@ -25,6 +25,8 @@ int main(int argc, char *argv[]) { bool ok; auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; + api->EnableStreamData(Stream::LEFT); + api->EnableStreamData(Stream::RIGHT); api->ConfigStreamRequest(request); Model model = api->GetModel(); diff --git a/samples/tutorials/control/manual_exposure.cc b/samples/tutorials/control/manual_exposure.cc index 2761f58..b1ca5c5 100644 --- a/samples/tutorials/control/manual_exposure.cc +++ b/samples/tutorials/control/manual_exposure.cc @@ -28,6 +28,8 @@ int main(int argc, char *argv[]) { bool ok; auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; + api->EnableStreamData(Stream::LEFT); + api->EnableStreamData(Stream::RIGHT); api->ConfigStreamRequest(request); Model model = api->GetModel(); diff --git a/samples/tutorials/data/get_imu.cc b/samples/tutorials/data/get_imu.cc index c09d225..26d4653 100644 --- a/samples/tutorials/data/get_imu.cc +++ b/samples/tutorials/data/get_imu.cc @@ -28,6 +28,8 @@ int main(int argc, char *argv[]) { auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; api->ConfigStreamRequest(request); + api->EnableStreamData(Stream::LEFT); + api->EnableStreamData(Stream::RIGHT); // Enable this will cache the motion datas until you get them. api->EnableMotionDatas(); diff --git a/src/mynteye/api/synthetic.cc b/src/mynteye/api/synthetic.cc index 818dad0..abb7c36 100644 --- a/src/mynteye/api/synthetic.cc +++ b/src/mynteye/api/synthetic.cc @@ -238,8 +238,7 @@ void Synthetic::DisableStreamData(const Stream &stream) { bool Synthetic::IsStreamDataEnabled(const Stream &stream) const { if (checkControlDateWithStream(stream)) { auto data = getControlDateWithStream(stream); - return data.enabled_mode_ == MODE_SYNTHETIC || - data.enabled_mode_ == MODE_NATIVE; + return data.enabled_mode_ == MODE_SYNTHETIC; } return false; } From 22bd0fab3adb0912ebc8e1ffbd6299c276a7f445 Mon Sep 17 00:00:00 2001 From: TinyOh Date: Sat, 2 Mar 2019 08:35:33 +0800 Subject: [PATCH 04/10] fix(api): LEFT/RIGHT callback fix --- samples/tutorials/data/get_imu_correspondence.cc | 2 ++ src/mynteye/api/synthetic.cc | 4 ++++ 2 files changed, 6 insertions(+) diff --git a/samples/tutorials/data/get_imu_correspondence.cc b/samples/tutorials/data/get_imu_correspondence.cc index 9cec957..def7ba8 100644 --- a/samples/tutorials/data/get_imu_correspondence.cc +++ b/samples/tutorials/data/get_imu_correspondence.cc @@ -30,6 +30,8 @@ int main(int argc, char *argv[]) { api->ConfigStreamRequest(request); // Enable motion datas with timestamp correspondence of some stream + api->EnableStreamData(Stream::LEFT); + api->EnableStreamData(Stream::RIGHT); api->EnableTimestampCorrespondence(Stream::LEFT); api->Start(Source::ALL); diff --git a/src/mynteye/api/synthetic.cc b/src/mynteye/api/synthetic.cc index abb7c36..f3ddd9f 100644 --- a/src/mynteye/api/synthetic.cc +++ b/src/mynteye/api/synthetic.cc @@ -384,6 +384,8 @@ void Synthetic::InitProcessors() { processors_.push_back(points_processor); processors_.push_back(depth_processor); using namespace std::placeholders; // NOLINT + root_processor->SetProcessCallback( + std::bind(&Synthetic::OnDeviceProcess, this, _1, _2, _3)); rectify_processor->SetProcessCallback( std::bind(&Synthetic::OnRectifyProcess, this, _1, _2, _3)); disparity_processor->SetProcessCallback( @@ -395,6 +397,8 @@ void Synthetic::InitProcessors() { depth_processor->SetProcessCallback( std::bind(&Synthetic::OnDepthProcess, this, _1, _2, _3)); + root_processor->SetPostProcessCallback( + std::bind(&Synthetic::OnDevicePostProcess, this, _1)); rectify_processor->SetPostProcessCallback( std::bind(&Synthetic::OnRectifyPostProcess, this, _1)); disparity_processor->SetPostProcessCallback( From 95d733b2b4d4296f37570108f500aefa62bb48b2 Mon Sep 17 00:00:00 2001 From: TinyOh Date: Sat, 2 Mar 2019 09:58:48 +0800 Subject: [PATCH 05/10] fix(ros): ros record -a --- src/mynteye/api/processor.cc | 8 +- .../api/processor/root_camera_processor.cc | 9 +- src/mynteye/api/synthetic.cc | 45 ++--- src/mynteye/api/synthetic.h | 6 +- .../src/wrapper_nodelet.cc | 154 ++++++------------ 5 files changed, 84 insertions(+), 138 deletions(-) diff --git a/src/mynteye/api/processor.cc b/src/mynteye/api/processor.cc index 93abed6..1f30e4c 100644 --- a/src/mynteye/api/processor.cc +++ b/src/mynteye/api/processor.cc @@ -249,7 +249,7 @@ void Processor::Run() { api::StreamData Processor::GetStreamData(const Stream &stream) { auto sum = getStreamsSum(); auto &&out = GetOutput(); - Synthetic::Mode enable_mode = Synthetic::MODE_LAST; + Synthetic::Mode enable_mode = Synthetic::MODE_OFF; auto streams = getTargetStreams(); for (auto it_s : streams) { if (it_s.stream == stream) { @@ -257,7 +257,7 @@ api::StreamData Processor::GetStreamData(const Stream &stream) { break; } } - if (enable_mode == Synthetic::MODE_SYNTHETIC) { + if (enable_mode == Synthetic::MODE_ON) { if (sum == 1) { if (out != nullptr) { auto &&output = Object::Cast(out); @@ -297,7 +297,7 @@ api::StreamData Processor::GetStreamData(const Stream &stream) { } std::vector Processor::GetStreamDatas(const Stream &stream) { - Synthetic::Mode enable_mode = Synthetic::MODE_LAST; + Synthetic::Mode enable_mode = Synthetic::MODE_OFF; auto streams = getTargetStreams(); for (auto it_s : streams) { if (it_s.stream == stream) { @@ -305,7 +305,7 @@ std::vector Processor::GetStreamDatas(const Stream &stream) { break; } } - if (enable_mode == Synthetic::MODE_SYNTHETIC) { + if (enable_mode == Synthetic::MODE_ON) { return {GetStreamData(stream)}; } else { LOG(ERROR) << "Failed to get stream data of " << stream diff --git a/src/mynteye/api/processor/root_camera_processor.cc b/src/mynteye/api/processor/root_camera_processor.cc index 0280b1a..75fca23 100644 --- a/src/mynteye/api/processor/root_camera_processor.cc +++ b/src/mynteye/api/processor/root_camera_processor.cc @@ -108,7 +108,7 @@ void s1s2Processor::StopVideoStreaming() { } api::StreamData s1s2Processor::GetStreamData(const Stream &stream) { - Synthetic::Mode enable_mode = Synthetic::MODE_LAST; + Synthetic::Mode enable_mode = Synthetic::MODE_OFF; auto streams = getTargetStreams(); for (auto it_s : streams) { if (it_s.stream == stream) { @@ -116,17 +116,18 @@ api::StreamData s1s2Processor::GetStreamData(const Stream &stream) { break; } } - if (enable_mode == Synthetic::MODE_SYNTHETIC) { + if (enable_mode == Synthetic::MODE_ON) { return data2api(device_->GetStreamData(stream)); } LOG(ERROR) << "Failed to get device stream data of " << stream << ", unsupported or disabled"; + LOG(ERROR) << "Make sure you have enable " << stream; return {}; } std::vector s1s2Processor::GetStreamDatas( const Stream &stream) { - Synthetic::Mode enable_mode = Synthetic::MODE_LAST; + Synthetic::Mode enable_mode = Synthetic::MODE_OFF; auto streams = getTargetStreams(); for (auto it_s : streams) { if (it_s.stream == stream) { @@ -134,7 +135,7 @@ std::vector s1s2Processor::GetStreamDatas( break; } } - if (enable_mode == Synthetic::MODE_SYNTHETIC) { + if (enable_mode == Synthetic::MODE_ON) { std::vector datas; for (auto &&data : device_->GetStreamDatas(stream)) { datas.push_back(data2api(data)); diff --git a/src/mynteye/api/synthetic.cc b/src/mynteye/api/synthetic.cc index f3ddd9f..5f85ea4 100644 --- a/src/mynteye/api/synthetic.cc +++ b/src/mynteye/api/synthetic.cc @@ -183,11 +183,11 @@ void Synthetic::EnableStreamData( auto streams = proce->getTargetStreams(); int act_tag = 0; for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) { - if (proce->target_streams_[i].enabled_mode_ == MODE_LAST) { + if (proce->target_streams_[i].enabled_mode_ == MODE_OFF) { callback(proce->target_streams_[i].stream); if (!try_tag) { act_tag++; - proce->target_streams_[i].enabled_mode_ = MODE_SYNTHETIC; + proce->target_streams_[i].enabled_mode_ = MODE_ON; } } } @@ -206,11 +206,11 @@ void Synthetic::DisableStreamData( auto streams = proce->getTargetStreams(); int act_tag = 0; for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) { - if (proce->target_streams_[i].enabled_mode_ == MODE_SYNTHETIC) { + if (proce->target_streams_[i].enabled_mode_ == MODE_ON) { callback(proce->target_streams_[i].stream); if (!try_tag) { act_tag++; - proce->target_streams_[i].enabled_mode_ = MODE_LAST; + proce->target_streams_[i].enabled_mode_ = MODE_OFF; } } } @@ -238,7 +238,7 @@ void Synthetic::DisableStreamData(const Stream &stream) { bool Synthetic::IsStreamDataEnabled(const Stream &stream) const { if (checkControlDateWithStream(stream)) { auto data = getControlDateWithStream(stream); - return data.enabled_mode_ == MODE_SYNTHETIC; + return data.enabled_mode_ == MODE_ON; } return false; } @@ -300,7 +300,7 @@ Synthetic::mode_t Synthetic::GetStreamEnabledMode(const Stream &stream) const { auto data = getControlDateWithStream(stream); return data.enabled_mode_; } - return MODE_LAST; + return MODE_OFF; } void Synthetic::InitProcessors() { @@ -361,21 +361,21 @@ void Synthetic::InitProcessors() { } rectify_processor->addTargetStreams( - {Stream::LEFT_RECTIFIED, Mode::MODE_LAST, nullptr}); + {Stream::LEFT_RECTIFIED, Mode::MODE_OFF, nullptr}); rectify_processor->addTargetStreams( - {Stream::RIGHT_RECTIFIED, Mode::MODE_LAST, nullptr}); + {Stream::RIGHT_RECTIFIED, Mode::MODE_OFF, nullptr}); disparity_processor->addTargetStreams( - {Stream::DISPARITY, Mode::MODE_LAST, nullptr}); + {Stream::DISPARITY, Mode::MODE_OFF, nullptr}); disparitynormalized_processor->addTargetStreams( - {Stream::DISPARITY_NORMALIZED, Mode::MODE_LAST, nullptr}); + {Stream::DISPARITY_NORMALIZED, Mode::MODE_OFF, nullptr}); points_processor->addTargetStreams( - {Stream::POINTS, Mode::MODE_LAST, nullptr}); + {Stream::POINTS, Mode::MODE_OFF, nullptr}); depth_processor->addTargetStreams( - {Stream::DEPTH, Mode::MODE_LAST, nullptr}); + {Stream::DEPTH, Mode::MODE_OFF, nullptr}); root_processor->addTargetStreams( - {Stream::LEFT, Mode::MODE_LAST, nullptr}); + {Stream::LEFT, Mode::MODE_OFF, nullptr}); root_processor->addTargetStreams( - {Stream::RIGHT, Mode::MODE_LAST, nullptr}); + {Stream::RIGHT, Mode::MODE_OFF, nullptr}); processors_.push_back(root_processor); processors_.push_back(rectify_processor); @@ -417,7 +417,7 @@ bool Synthetic::OnDeviceProcess( Object *const in, Object *const out, std::shared_ptr const parent) { MYNTEYE_UNUSED(parent) - return GetStreamEnabledMode(Stream::LEFT) != MODE_SYNTHETIC; + return GetStreamEnabledMode(Stream::LEFT) != MODE_ON; } bool Synthetic::OnRectifyProcess( @@ -427,8 +427,8 @@ bool Synthetic::OnRectifyProcess( if (plugin_ && plugin_->OnRectifyProcess(in, out)) { return true; } - return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_SYNTHETIC; - // && GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_SYNTHETIC + return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_ON; + // && GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_ON } bool Synthetic::OnDisparityProcess( @@ -438,7 +438,7 @@ bool Synthetic::OnDisparityProcess( if (plugin_ && plugin_->OnDisparityProcess(in, out)) { return true; } - return GetStreamEnabledMode(Stream::DISPARITY) != MODE_SYNTHETIC; + return GetStreamEnabledMode(Stream::DISPARITY) != MODE_ON; } bool Synthetic::OnDisparityNormalizedProcess( @@ -448,7 +448,7 @@ bool Synthetic::OnDisparityNormalizedProcess( if (plugin_ && plugin_->OnDisparityNormalizedProcess(in, out)) { return true; } - return GetStreamEnabledMode(Stream::DISPARITY_NORMALIZED) != MODE_SYNTHETIC; + return GetStreamEnabledMode(Stream::DISPARITY_NORMALIZED) != MODE_ON; } bool Synthetic::OnPointsProcess( @@ -458,7 +458,7 @@ bool Synthetic::OnPointsProcess( if (plugin_ && plugin_->OnPointsProcess(in, out)) { return true; } - return GetStreamEnabledMode(Stream::POINTS) != MODE_SYNTHETIC; + return GetStreamEnabledMode(Stream::POINTS) != MODE_ON; } bool Synthetic::OnDepthProcess( @@ -468,7 +468,7 @@ bool Synthetic::OnDepthProcess( if (plugin_ && plugin_->OnDepthProcess(in, out)) { return true; } - return GetStreamEnabledMode(Stream::DEPTH) != MODE_SYNTHETIC; + return GetStreamEnabledMode(Stream::DEPTH) != MODE_ON; } void Synthetic::OnDevicePostProcess(Object *const out) { @@ -481,7 +481,8 @@ void Synthetic::OnDevicePostProcess(Object *const out) { } if (HasStreamCallback(Stream::RIGHT)) { auto data = getControlDateWithStream(Stream::RIGHT); - data.stream_callback(obj_data_second(output)); + if (data.stream_callback) + data.stream_callback(obj_data_second(output)); } } diff --git a/src/mynteye/api/synthetic.h b/src/mynteye/api/synthetic.h index 5476792..993b7c6 100644 --- a/src/mynteye/api/synthetic.h +++ b/src/mynteye/api/synthetic.h @@ -40,9 +40,8 @@ class Synthetic { using stream_switch_callback_t = API::stream_switch_callback_t; typedef enum Mode { - MODE_NATIVE, // Native stream - MODE_SYNTHETIC, // Synthetic stream - MODE_LAST // Unsupported + MODE_ON, // On + MODE_OFF // Off } mode_t; struct stream_control_t { @@ -142,7 +141,6 @@ class Synthetic { std::shared_ptr plugin_; CalibrationModel calib_model_; - std::mutex mtx_left_right_ready_; std::shared_ptr intr_left_; std::shared_ptr intr_right_; diff --git a/wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc b/wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc index 75b381b..abaecf0 100644 --- a/wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc +++ b/wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc @@ -583,22 +583,10 @@ class ROSWrapperNodelet : public nodelet::Nodelet { } void publishOthers(const Stream &stream) { - // std::cout << stream << "===============================" << std::endl; - // int enable_tag = 0; - // api_->EnableStreamData(stream, [&](const Stream &stream) { - // enable_tag += getStreamSubscribers(stream); - // std::cout << "EnableStreamData callback test" - // << stream << "|| Sum:" - // << getStreamSubscribers(stream) << std::endl; - // }, true); if (getStreamSubscribers(stream) > 0 && !is_published_[stream]) { - // std::cout << stream - // <<" enableStreamData tag = 0 return" << std::endl; - // std::cout << "enable " << stream << std::endl; api_->EnableStreamData(stream); api_->SetStreamCallback( stream, [this, stream](const api::StreamData &data) { - // ros::Time stamp = hardTimeToSoftTime(data.img->timestamp); ros::Time stamp = checkUpTimeStamp( data.img->timestamp, stream); static std::size_t count = 0; @@ -612,12 +600,9 @@ class ROSWrapperNodelet : public nodelet::Nodelet { int disable_tag = 0; api_->DisableStreamData(stream, [&](const Stream &stream) { disable_tag += getStreamSubscribers(stream); - // std::cout << "DisableStreamData callback test: " - // << stream << "|| Sum:"<< getStreamSubscribers(stream) << std::endl; }, true); if (disable_tag == 0 && is_published_[stream]) { api_->DisableStreamData(stream, [&](const Stream &stream) { - // std::cout << "disable " << stream << std::endl; api_->SetStreamCallback(stream, nullptr); is_published_[stream] = false; }); @@ -626,98 +611,59 @@ class ROSWrapperNodelet : public nodelet::Nodelet { } void publishTopics() { - std::vector all_streams{ - Stream::RIGHT, - Stream::LEFT, - Stream::LEFT_RECTIFIED, - Stream::RIGHT_RECTIFIED, - Stream::DISPARITY, - Stream::DISPARITY_NORMALIZED, - Stream::POINTS, - Stream::DEPTH - }; - - static int sum = 0; - int sum_c = 0; - for (auto &&stream : all_streams) { - sum_c += getStreamSubscribers(stream); + if ((camera_publishers_[Stream::LEFT].getNumSubscribers() > 0 || + mono_publishers_[Stream::LEFT].getNumSubscribers() > 0) && + !is_published_[Stream::LEFT]) { + api_->SetStreamCallback( + Stream::LEFT, [&](const api::StreamData &data) { + ++left_count_; + if (left_count_ > 10) { + // ros::Time stamp = hardTimeToSoftTime(data.img->timestamp); + ros::Time stamp = checkUpTimeStamp( + data.img->timestamp, Stream::LEFT); + publishCamera(Stream::LEFT, data, left_count_, stamp); + publishMono(Stream::LEFT, data, left_count_, stamp); + NODELET_DEBUG_STREAM( + Stream::LEFT << ", count: " << left_count_ + << ", frame_id: " << data.img->frame_id + << ", timestamp: " << data.img->timestamp + << ", exposure_time: " << data.img->exposure_time); + } + }); + left_time_beg_ = ros::Time::now().toSec(); + is_published_[Stream::LEFT] = true; } - if (sum_c != sum) { - if (sum_c == 0) { - api_->Stop(Source::VIDEO_STREAMING); - for (auto &&stream : all_streams) { - is_published_[stream] = false; - } - api_->Start(Source::VIDEO_STREAMING); - } else { - if ((camera_publishers_[Stream::LEFT].getNumSubscribers() > 0 || - mono_publishers_[Stream::LEFT].getNumSubscribers() > 0) && - !is_published_[Stream::LEFT]) { - api_->SetStreamCallback( - Stream::LEFT, [&](const api::StreamData &data) { - ++left_count_; - if (left_count_ > 10) { - // ros::Time stamp = hardTimeToSoftTime(data.img->timestamp); - ros::Time stamp = checkUpTimeStamp( - data.img->timestamp, Stream::LEFT); + if ((camera_publishers_[Stream::RIGHT].getNumSubscribers() > 0 || + mono_publishers_[Stream::RIGHT].getNumSubscribers() > 0) && + !is_published_[Stream::RIGHT]) { + api_->SetStreamCallback( + Stream::RIGHT, [&](const api::StreamData &data) { + ++right_count_; + if (right_count_ > 10) { + // ros::Time stamp = hardTimeToSoftTime(data.img->timestamp); + ros::Time stamp = checkUpTimeStamp( + data.img->timestamp, Stream::RIGHT); + publishCamera(Stream::RIGHT, data, right_count_, stamp); + publishMono(Stream::RIGHT, data, right_count_, stamp); + NODELET_DEBUG_STREAM( + Stream::RIGHT << ", count: " << right_count_ + << ", frame_id: " << data.img->frame_id + << ", timestamp: " << data.img->timestamp + << ", exposure_time: " << data.img->exposure_time); + } + }); + right_time_beg_ = ros::Time::now().toSec(); + is_published_[Stream::RIGHT] = true; + } - // static double img_time_prev = -1; - // NODELET_INFO_STREAM("ros_time_beg: " << FULL_PRECISION << - // ros_time_beg - // << ", img_time_elapsed: " << FULL_PRECISION - // << ((data.img->timestamp - img_time_beg) * 0.00001f) - // << ", img_time_diff: " << FULL_PRECISION - // << ((img_time_prev < 0) ? 0 - // : (data.img->timestamp - img_time_prev) * 0.01f) << " - // ms"); - // img_time_prev = data.img->timestamp; - publishCamera(Stream::LEFT, data, left_count_, stamp); - publishMono(Stream::LEFT, data, left_count_, stamp); - NODELET_DEBUG_STREAM( - Stream::LEFT << ", count: " << left_count_ - << ", frame_id: " << data.img->frame_id - << ", timestamp: " << data.img->timestamp - << ", exposure_time: " << data.img->exposure_time); - } - }); - left_time_beg_ = ros::Time::now().toSec(); - is_published_[Stream::LEFT] = true; - } - - if ((camera_publishers_[Stream::RIGHT].getNumSubscribers() > 0 || - mono_publishers_[Stream::RIGHT].getNumSubscribers() > 0) && - !is_published_[Stream::RIGHT]) { - api_->SetStreamCallback( - Stream::RIGHT, [&](const api::StreamData &data) { - ++right_count_; - if (right_count_ > 10) { - // ros::Time stamp = hardTimeToSoftTime(data.img->timestamp); - ros::Time stamp = checkUpTimeStamp( - data.img->timestamp, Stream::RIGHT); - publishCamera(Stream::RIGHT, data, right_count_, stamp); - publishMono(Stream::RIGHT, data, right_count_, stamp); - NODELET_DEBUG_STREAM( - Stream::RIGHT << ", count: " << right_count_ - << ", frame_id: " << data.img->frame_id - << ", timestamp: " << data.img->timestamp - << ", exposure_time: " << data.img->exposure_time); - } - }); - right_time_beg_ = ros::Time::now().toSec(); - is_published_[Stream::RIGHT] = true; - } - - std::vector other_streams{ - Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED, - Stream::DISPARITY, Stream::DISPARITY_NORMALIZED, - Stream::POINTS, Stream::DEPTH - }; - for (auto &&stream : other_streams) { - publishOthers(stream); - } - } - sum = sum_c; + std::vector other_streams{ + Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED, + Stream::DISPARITY, Stream::DISPARITY_NORMALIZED, + Stream::POINTS, Stream::DEPTH + }; + for (auto &&stream : other_streams) { + publishOthers(stream); } if (!is_motion_published_) { From 469ffe507534caf48bfa4b12dbe613ef33230a30 Mon Sep 17 00:00:00 2001 From: TinyOh Date: Sat, 2 Mar 2019 10:08:45 +0800 Subject: [PATCH 06/10] refactor(synthetic): use RootProcessor as root node --- src/mynteye/api/synthetic.cc | 6 ++---- src/mynteye/api/synthetic.h | 3 ++- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/src/mynteye/api/synthetic.cc b/src/mynteye/api/synthetic.cc index 5f85ea4..c34fe4b 100644 --- a/src/mynteye/api/synthetic.cc +++ b/src/mynteye/api/synthetic.cc @@ -266,13 +266,11 @@ bool Synthetic::HasStreamCallback(const Stream &stream) const { } void Synthetic::StartVideoStreaming() { - auto processor_root = static_cast(&(*processor_)); - processor_root->StartVideoStreaming(); + processor_->StartVideoStreaming(); } void Synthetic::StopVideoStreaming() { - auto processor_root = static_cast(&(*processor_)); - processor_root->StopVideoStreaming(); + processor_->StopVideoStreaming(); } void Synthetic::WaitForStreams() { diff --git a/src/mynteye/api/synthetic.h b/src/mynteye/api/synthetic.h index 993b7c6..bb2d86c 100644 --- a/src/mynteye/api/synthetic.h +++ b/src/mynteye/api/synthetic.h @@ -29,6 +29,7 @@ MYNTEYE_BEGIN_NAMESPACE class API; class Plugin; class Processor; +class RootProcessor; struct Object; @@ -136,7 +137,7 @@ class Synthetic { API *api_; - std::shared_ptr processor_; + std::shared_ptr processor_; std::shared_ptr plugin_; From c9bfdbb4d642e24acdeedca195684267122dfec6 Mon Sep 17 00:00:00 2001 From: TinyOh Date: Sat, 2 Mar 2019 12:46:17 +0800 Subject: [PATCH 07/10] fix(api): remove enable LEFT/RIGHT stream code --- samples/tutorials/control/auto_exposure.cc | 2 -- samples/tutorials/control/framerate.cc | 2 -- samples/tutorials/control/imu_low_pass_filter.cc | 2 -- samples/tutorials/control/imu_range.cc | 2 -- samples/tutorials/control/infrared.cc | 2 -- samples/tutorials/control/manual_exposure.cc | 2 -- samples/tutorials/data/get_imu.cc | 2 -- samples/tutorials/data/get_imu_correspondence.cc | 2 -- samples/tutorials/data/get_stereo.cc | 2 -- src/mynteye/api/processor/root_camera_processor.cc | 5 ++++- src/mynteye/api/synthetic.cc | 6 ++++++ src/mynteye/api/synthetic.h | 4 +--- 12 files changed, 11 insertions(+), 22 deletions(-) diff --git a/samples/tutorials/control/auto_exposure.cc b/samples/tutorials/control/auto_exposure.cc index 258504f..cabe6ad 100644 --- a/samples/tutorials/control/auto_exposure.cc +++ b/samples/tutorials/control/auto_exposure.cc @@ -28,8 +28,6 @@ int main(int argc, char *argv[]) { bool ok; auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; - api->EnableStreamData(Stream::LEFT); - api->EnableStreamData(Stream::RIGHT); api->ConfigStreamRequest(request); Model model = api->GetModel(); diff --git a/samples/tutorials/control/framerate.cc b/samples/tutorials/control/framerate.cc index faa1aec..41f1e5d 100644 --- a/samples/tutorials/control/framerate.cc +++ b/samples/tutorials/control/framerate.cc @@ -28,8 +28,6 @@ int main(int argc, char *argv[]) { bool ok; auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; - api->EnableStreamData(Stream::LEFT); - api->EnableStreamData(Stream::RIGHT); api->ConfigStreamRequest(request); Model model = api->GetModel(); diff --git a/samples/tutorials/control/imu_low_pass_filter.cc b/samples/tutorials/control/imu_low_pass_filter.cc index f0119f2..88e71b8 100644 --- a/samples/tutorials/control/imu_low_pass_filter.cc +++ b/samples/tutorials/control/imu_low_pass_filter.cc @@ -28,8 +28,6 @@ int main(int argc, char *argv[]) { bool ok; auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; - api->EnableStreamData(Stream::LEFT); - api->EnableStreamData(Stream::RIGHT); api->ConfigStreamRequest(request); Model model = api->GetModel(); diff --git a/samples/tutorials/control/imu_range.cc b/samples/tutorials/control/imu_range.cc index e6edecf..1431b20 100644 --- a/samples/tutorials/control/imu_range.cc +++ b/samples/tutorials/control/imu_range.cc @@ -28,8 +28,6 @@ int main(int argc, char *argv[]) { bool ok; auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; - api->EnableStreamData(Stream::LEFT); - api->EnableStreamData(Stream::RIGHT); api->ConfigStreamRequest(request); Model model = api->GetModel(); diff --git a/samples/tutorials/control/infrared.cc b/samples/tutorials/control/infrared.cc index b44d49d..c261e7a 100644 --- a/samples/tutorials/control/infrared.cc +++ b/samples/tutorials/control/infrared.cc @@ -25,8 +25,6 @@ int main(int argc, char *argv[]) { bool ok; auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; - api->EnableStreamData(Stream::LEFT); - api->EnableStreamData(Stream::RIGHT); api->ConfigStreamRequest(request); Model model = api->GetModel(); diff --git a/samples/tutorials/control/manual_exposure.cc b/samples/tutorials/control/manual_exposure.cc index b1ca5c5..2761f58 100644 --- a/samples/tutorials/control/manual_exposure.cc +++ b/samples/tutorials/control/manual_exposure.cc @@ -28,8 +28,6 @@ int main(int argc, char *argv[]) { bool ok; auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; - api->EnableStreamData(Stream::LEFT); - api->EnableStreamData(Stream::RIGHT); api->ConfigStreamRequest(request); Model model = api->GetModel(); diff --git a/samples/tutorials/data/get_imu.cc b/samples/tutorials/data/get_imu.cc index 26d4653..c09d225 100644 --- a/samples/tutorials/data/get_imu.cc +++ b/samples/tutorials/data/get_imu.cc @@ -28,8 +28,6 @@ int main(int argc, char *argv[]) { auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; api->ConfigStreamRequest(request); - api->EnableStreamData(Stream::LEFT); - api->EnableStreamData(Stream::RIGHT); // Enable this will cache the motion datas until you get them. api->EnableMotionDatas(); diff --git a/samples/tutorials/data/get_imu_correspondence.cc b/samples/tutorials/data/get_imu_correspondence.cc index def7ba8..9cec957 100644 --- a/samples/tutorials/data/get_imu_correspondence.cc +++ b/samples/tutorials/data/get_imu_correspondence.cc @@ -30,8 +30,6 @@ int main(int argc, char *argv[]) { api->ConfigStreamRequest(request); // Enable motion datas with timestamp correspondence of some stream - api->EnableStreamData(Stream::LEFT); - api->EnableStreamData(Stream::RIGHT); api->EnableTimestampCorrespondence(Stream::LEFT); api->Start(Source::ALL); diff --git a/samples/tutorials/data/get_stereo.cc b/samples/tutorials/data/get_stereo.cc index 5f114c0..1dd024a 100644 --- a/samples/tutorials/data/get_stereo.cc +++ b/samples/tutorials/data/get_stereo.cc @@ -25,8 +25,6 @@ int main(int argc, char *argv[]) { auto &&request = api->SelectStreamRequest(&ok); if (!ok) return 1; api->ConfigStreamRequest(request); - api->EnableStreamData(Stream::LEFT); - api->EnableStreamData(Stream::RIGHT); api->Start(Source::VIDEO_STREAMING); diff --git a/src/mynteye/api/processor/root_camera_processor.cc b/src/mynteye/api/processor/root_camera_processor.cc index 75fca23..a802b3a 100644 --- a/src/mynteye/api/processor/root_camera_processor.cc +++ b/src/mynteye/api/processor/root_camera_processor.cc @@ -79,10 +79,12 @@ void s1s2Processor::ProcessNativeStream( } void s1s2Processor::StartVideoStreaming() { + Activate(); auto streams = getTargetStreams(); for (unsigned int j =0; j< streams.size(); j++) { auto stream = streams[j].stream; auto callback = streams[j].stream_callback; + target_streams_[j].enabled_mode_ = Synthetic::MODE_ON; device_->SetStreamCallback( stream, [this, stream, callback](const device::StreamData &data) { @@ -99,14 +101,15 @@ void s1s2Processor::StartVideoStreaming() { } void s1s2Processor::StopVideoStreaming() { + Deactivate(); auto streams = getTargetStreams(); for (unsigned int j =0; j< streams.size(); j++) { auto stream = streams[j].stream; + target_streams_[j].enabled_mode_ = Synthetic::MODE_OFF; device_->SetStreamCallback(stream, nullptr); } device_->Stop(Source::VIDEO_STREAMING); } - api::StreamData s1s2Processor::GetStreamData(const Stream &stream) { Synthetic::Mode enable_mode = Synthetic::MODE_OFF; auto streams = getTargetStreams(); diff --git a/src/mynteye/api/synthetic.cc b/src/mynteye/api/synthetic.cc index c34fe4b..b9a5401 100644 --- a/src/mynteye/api/synthetic.cc +++ b/src/mynteye/api/synthetic.cc @@ -180,6 +180,9 @@ void Synthetic::EnableStreamData( auto processor = getProcessorWithStream(stream); iterate_processor_CtoP_before(processor, [callback, try_tag](std::shared_ptr proce){ + if (proce->Name() == "RootProcessor") { + return; + } auto streams = proce->getTargetStreams(); int act_tag = 0; for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) { @@ -203,6 +206,9 @@ void Synthetic::DisableStreamData( auto processor = getProcessorWithStream(stream); iterate_processor_PtoC_before(processor, [callback, try_tag](std::shared_ptr proce){ + if (proce->Name() == "RootProcessor") { + return; + } auto streams = proce->getTargetStreams(); int act_tag = 0; for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) { diff --git a/src/mynteye/api/synthetic.h b/src/mynteye/api/synthetic.h index bb2d86c..dc44a4e 100644 --- a/src/mynteye/api/synthetic.h +++ b/src/mynteye/api/synthetic.h @@ -138,7 +138,7 @@ class Synthetic { API *api_; std::shared_ptr processor_; - + std::vector> processors_; std::shared_ptr plugin_; CalibrationModel calib_model_; @@ -148,8 +148,6 @@ class Synthetic { std::shared_ptr extr_; bool calib_default_tag_; - std::vector> processors_; - stream_data_listener_t stream_data_listener_; }; From f744fa06d5aa190e975f57544bd986435f978236 Mon Sep 17 00:00:00 2001 From: TinyOh Date: Sat, 2 Mar 2019 16:13:00 +0800 Subject: [PATCH 08/10] refactor(synthetic): make switch of disable copy and disable unnessary copy --- src/mynteye/api/processor.cc | 32 ++++++++++++++----- src/mynteye/api/processor.h | 15 ++++++--- .../api/processor/depth_processor_ocv.h | 3 ++ .../disparity_normalized_processor.h | 6 ++++ src/mynteye/api/processor/points_processor.h | 3 ++ src/mynteye/api/processor/rectify_processor.h | 6 ++++ .../api/processor/rectify_processor_ocv.h | 7 ++++ .../api/processor/root_camera_processor.cc | 2 +- .../api/processor/root_camera_processor.h | 6 ++++ src/mynteye/api/synthetic.cc | 15 +++++---- 10 files changed, 75 insertions(+), 20 deletions(-) diff --git a/src/mynteye/api/processor.cc b/src/mynteye/api/processor.cc index 1f30e4c..175e0ba 100644 --- a/src/mynteye/api/processor.cc +++ b/src/mynteye/api/processor.cc @@ -42,9 +42,9 @@ Processor::Processor(std::int32_t proc_period) Processor::~Processor() { VLOG(2) << __func__; Deactivate(); - input_.reset(nullptr); - output_.reset(nullptr); - output_result_.reset(nullptr); + input_ = nullptr; + output_ = nullptr; + output_result_ = nullptr; childs_.clear(); } @@ -122,7 +122,7 @@ bool Processor::IsIdle() { return idle_; } -bool Processor::Process(const Object &in) { +bool Processor::Process(std::shared_ptr in) { if (!activated_) return false; if (!idle_) { @@ -132,13 +132,17 @@ bool Processor::Process(const Object &in) { return false; } } - if (!in.DecValidity()) { + if (in && !in->DecValidity()) { LOG(WARNING) << Name() << " process with invalid input"; return false; } { std::lock_guard lk(mtx_input_ready_); - input_.reset(in.Clone()); + if (ProcessInputConnection() == WITH_CLONE) { + input_.reset(in->Clone()); + } else { + input_ = in; + } input_ready_ = true; } cond_input_ready_.notify_all(); @@ -229,12 +233,16 @@ void Processor::Run() { } { std::unique_lock lk(mtx_result_); - output_result_.reset(output_->Clone()); + if (ProcessOutputConnection() == WITH_CLONE) { + output_result_.reset(output_->Clone()); + } else { + output_result_ = output_; + } } if (!childs_.empty()) { for (auto child : childs_) { - child->Process(*output_); + child->Process(output_); } } @@ -246,6 +254,14 @@ void Processor::Run() { VLOG(2) << Name() << " thread end"; } +Processor::process_type Processor::ProcessOutputConnection() { + return WITH_CLONE; +} + +Processor::process_type Processor::ProcessInputConnection() { + return WITH_CLONE; +} + api::StreamData Processor::GetStreamData(const Stream &stream) { auto sum = getStreamsSum(); auto &&out = GetOutput(); diff --git a/src/mynteye/api/processor.h b/src/mynteye/api/processor.h index 6ac1644..4cdaeac 100644 --- a/src/mynteye/api/processor.h +++ b/src/mynteye/api/processor.h @@ -64,7 +64,7 @@ class Processor : bool IsIdle(); /** Returns dropped or not. */ - bool Process(const Object &in); + bool Process(std::shared_ptr in); virtual api::StreamData GetStreamData(const Stream &stream); @@ -83,6 +83,13 @@ class Processor : virtual bool OnProcess( Object *const in, Object *const out, std::shared_ptr const parent) = 0; + enum process_type{ + WITH_CLONE, + WITHOUT_CLONE + }; + + virtual process_type ProcessOutputConnection(); + virtual process_type ProcessInputConnection(); private: /** Run in standalone thread. */ @@ -102,10 +109,10 @@ class Processor : std::uint64_t dropped_count_; std::mutex mtx_state_; - std::unique_ptr input_; - std::unique_ptr output_; + std::shared_ptr input_; + std::shared_ptr output_; - std::unique_ptr output_result_; + std::shared_ptr output_result_; std::mutex mtx_result_; PreProcessCallback pre_callback_; diff --git a/src/mynteye/api/processor/depth_processor_ocv.h b/src/mynteye/api/processor/depth_processor_ocv.h index 1618713..fbe7cd5 100644 --- a/src/mynteye/api/processor/depth_processor_ocv.h +++ b/src/mynteye/api/processor/depth_processor_ocv.h @@ -31,6 +31,9 @@ class DepthProcessorOCV : public Processor { std::string Name() override; protected: + inline Processor::process_type ProcessOutputConnection() override { + return Processor::WITHOUT_CLONE; + } Object *OnCreateOutput() override; bool OnProcess( Object *const in, Object *const out, diff --git a/src/mynteye/api/processor/disparity_normalized_processor.h b/src/mynteye/api/processor/disparity_normalized_processor.h index 1b61420..9c9fb0b 100644 --- a/src/mynteye/api/processor/disparity_normalized_processor.h +++ b/src/mynteye/api/processor/disparity_normalized_processor.h @@ -31,6 +31,12 @@ class DisparityNormalizedProcessor : public Processor { std::string Name() override; protected: + inline Processor::process_type ProcessOutputConnection() override { + return Processor::WITHOUT_CLONE; + } + inline Processor::process_type ProcessInputConnection() override { + return Processor::WITHOUT_CLONE; + } Object *OnCreateOutput() override; bool OnProcess( Object *const in, Object *const out, diff --git a/src/mynteye/api/processor/points_processor.h b/src/mynteye/api/processor/points_processor.h index 5cdf958..b9d1ab9 100644 --- a/src/mynteye/api/processor/points_processor.h +++ b/src/mynteye/api/processor/points_processor.h @@ -36,6 +36,9 @@ class PointsProcessor : public Processor { std::string Name() override; protected: + inline Processor::process_type ProcessOutputConnection() override { + return Processor::WITHOUT_CLONE; + } Object *OnCreateOutput() override; bool OnProcess( Object *const in, Object *const out, diff --git a/src/mynteye/api/processor/rectify_processor.h b/src/mynteye/api/processor/rectify_processor.h index 029ed6d..3ddb504 100644 --- a/src/mynteye/api/processor/rectify_processor.h +++ b/src/mynteye/api/processor/rectify_processor.h @@ -79,6 +79,12 @@ class RectifyProcessor : public Processor { bool OnProcess( Object *const in, Object *const out, std::shared_ptr const parent) override; + inline Processor::process_type ProcessOutputConnection() override { + return Processor::WITHOUT_CLONE; + } + inline Processor::process_type ProcessInputConnection() override { + return Processor::WITHOUT_CLONE; + } private: void InitParams(IntrinsicsEquidistant in_left, diff --git a/src/mynteye/api/processor/rectify_processor_ocv.h b/src/mynteye/api/processor/rectify_processor_ocv.h index 9c450ec..19e1ca7 100644 --- a/src/mynteye/api/processor/rectify_processor_ocv.h +++ b/src/mynteye/api/processor/rectify_processor_ocv.h @@ -49,6 +49,13 @@ class RectifyProcessorOCV : public Processor { cv::Mat map11, map12, map21, map22; protected: + inline Processor::process_type ProcessOutputConnection() override { + return Processor::WITHOUT_CLONE; + } + inline Processor::process_type ProcessInputConnection() override { + return Processor::WITHOUT_CLONE; + } + Object *OnCreateOutput() override; bool OnProcess( Object *const in, Object *const out, diff --git a/src/mynteye/api/processor/root_camera_processor.cc b/src/mynteye/api/processor/root_camera_processor.cc index a802b3a..2d7212f 100644 --- a/src/mynteye/api/processor/root_camera_processor.cc +++ b/src/mynteye/api/processor/root_camera_processor.cc @@ -73,7 +73,7 @@ void s1s2Processor::ProcessNativeStream( } if (left_data.img && right_data.img && left_data.img->frame_id == right_data.img->frame_id) { - Process(data_obj(left_data, right_data)); + Process(std::make_shared(data_obj(left_data, right_data))); } return; } diff --git a/src/mynteye/api/processor/root_camera_processor.h b/src/mynteye/api/processor/root_camera_processor.h index f7e1ed7..469067a 100644 --- a/src/mynteye/api/processor/root_camera_processor.h +++ b/src/mynteye/api/processor/root_camera_processor.h @@ -56,6 +56,12 @@ class s1s2Processor : public RootProcessor { api::StreamData GetStreamData(const Stream &stream) override; std::vector GetStreamDatas(const Stream &stream) override; // NOLINT protected: + inline Processor::process_type ProcessOutputConnection() override { + return Processor::WITHOUT_CLONE; + } + inline Processor::process_type ProcessInputConnection() override { + return Processor::WITHOUT_CLONE; + } Object *OnCreateOutput() override; bool OnProcess( Object *const in, Object *const out, diff --git a/src/mynteye/api/synthetic.cc b/src/mynteye/api/synthetic.cc index b9a5401..26c8947 100644 --- a/src/mynteye/api/synthetic.cc +++ b/src/mynteye/api/synthetic.cc @@ -364,6 +364,10 @@ void Synthetic::InitProcessors() { return; } + root_processor->addTargetStreams( + {Stream::LEFT, Mode::MODE_OFF, nullptr}); + root_processor->addTargetStreams( + {Stream::RIGHT, Mode::MODE_OFF, nullptr}); rectify_processor->addTargetStreams( {Stream::LEFT_RECTIFIED, Mode::MODE_OFF, nullptr}); rectify_processor->addTargetStreams( @@ -376,10 +380,6 @@ void Synthetic::InitProcessors() { {Stream::POINTS, Mode::MODE_OFF, nullptr}); depth_processor->addTargetStreams( {Stream::DEPTH, Mode::MODE_OFF, nullptr}); - root_processor->addTargetStreams( - {Stream::LEFT, Mode::MODE_OFF, nullptr}); - root_processor->addTargetStreams( - {Stream::RIGHT, Mode::MODE_OFF, nullptr}); processors_.push_back(root_processor); processors_.push_back(rectify_processor); @@ -421,7 +421,8 @@ bool Synthetic::OnDeviceProcess( Object *const in, Object *const out, std::shared_ptr const parent) { MYNTEYE_UNUSED(parent) - return GetStreamEnabledMode(Stream::LEFT) != MODE_ON; + return GetStreamEnabledMode(Stream::LEFT) != MODE_ON + || GetStreamEnabledMode(Stream::RIGHT) != MODE_ON; } bool Synthetic::OnRectifyProcess( @@ -431,8 +432,8 @@ bool Synthetic::OnRectifyProcess( if (plugin_ && plugin_->OnRectifyProcess(in, out)) { return true; } - return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_ON; - // && GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_ON + return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_ON + && GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_ON; } bool Synthetic::OnDisparityProcess( From a5b337b50f7a920d4ff84d19c9b427dcbc2785dc Mon Sep 17 00:00:00 2001 From: TinyOh Date: Sat, 2 Mar 2019 10:31:15 +0800 Subject: [PATCH 09/10] fix: improve processor close order --- src/mynteye/api/synthetic.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/mynteye/api/synthetic.cc b/src/mynteye/api/synthetic.cc index 26c8947..d5d9dbe 100644 --- a/src/mynteye/api/synthetic.cc +++ b/src/mynteye/api/synthetic.cc @@ -86,6 +86,7 @@ Synthetic::Synthetic(API *api, CalibrationModel calib_model) Synthetic::~Synthetic() { VLOG(2) << __func__; + processors_.clear(); if (processor_) { processor_->Deactivate(true); processor_ = nullptr; From 70e1e9e32ff8260d941d0963fd2cc4434696809b Mon Sep 17 00:00:00 2001 From: John Zhao Date: Sat, 2 Mar 2019 11:47:20 +0800 Subject: [PATCH 10/10] refactor(device): merge device_s210a to device_s2 --- CMakeLists.txt | 3 - src/mynteye/device/device.cc | 6 +- .../device/standard2/channels_adapter_s2.cc | 4 +- .../device/standard2/channels_adapter_s2.h | 2 +- .../standard2/channels_adapter_s210a.cc | 121 ------------ .../device/standard2/channels_adapter_s210a.h | 42 ---- src/mynteye/device/standard2/device_s2.cc | 10 +- src/mynteye/device/standard2/device_s2.h | 2 +- src/mynteye/device/standard2/device_s210a.cc | 45 ----- src/mynteye/device/standard2/device_s210a.h | 37 ---- .../device/standard2/streams_adapter_s2.cc | 70 ++++++- .../device/standard2/streams_adapter_s2.h | 5 +- .../device/standard2/streams_adapter_s210a.cc | 186 ------------------ .../device/standard2/streams_adapter_s210a.h | 42 ---- 14 files changed, 81 insertions(+), 494 deletions(-) delete mode 100644 src/mynteye/device/standard2/channels_adapter_s210a.cc delete mode 100644 src/mynteye/device/standard2/channels_adapter_s210a.h delete mode 100644 src/mynteye/device/standard2/device_s210a.cc delete mode 100644 src/mynteye/device/standard2/device_s210a.h delete mode 100644 src/mynteye/device/standard2/streams_adapter_s210a.cc delete mode 100644 src/mynteye/device/standard2/streams_adapter_s210a.h diff --git a/CMakeLists.txt b/CMakeLists.txt index a79a007..754fc12 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -199,9 +199,6 @@ set(MYNTEYE_SRCS src/mynteye/device/standard2/channels_adapter_s2.cc src/mynteye/device/standard2/device_s2.cc src/mynteye/device/standard2/streams_adapter_s2.cc - src/mynteye/device/standard2/channels_adapter_s210a.cc - src/mynteye/device/standard2/device_s210a.cc - src/mynteye/device/standard2/streams_adapter_s210a.cc src/mynteye/device/streams.cc src/mynteye/device/types.cc src/mynteye/device/utils.cc diff --git a/src/mynteye/device/device.cc b/src/mynteye/device/device.cc index 271be74..d98f915 100644 --- a/src/mynteye/device/device.cc +++ b/src/mynteye/device/device.cc @@ -26,7 +26,6 @@ #include "mynteye/device/motions.h" #include "mynteye/device/standard/device_s.h" #include "mynteye/device/standard2/device_s2.h" -#include "mynteye/device/standard2/device_s210a.h" #include "mynteye/device/streams.h" #include "mynteye/device/types.h" #include "mynteye/util/strings.h" @@ -104,7 +103,6 @@ std::shared_ptr Device::Create( if (name == "MYNTEYE") { return std::make_shared(device); } else if (strings::starts_with(name, "MYNT-EYE-")) { - // TODO(JohnZhao): Create different device by name, such as MYNT-EYE-S1000 std::string model_s = name.substr(9, 5); VLOG(2) << "MYNE EYE Model: " << model_s; DeviceModel model(model_s); @@ -113,9 +111,9 @@ std::shared_ptr Device::Create( return std::make_shared(device); } else if (model.generation == '2') { if (model.custom_code == '0') { - return std::make_shared(device); + return std::make_shared(Model::STANDARD2, device); } else if (model.custom_code == 'A') { - return std::make_shared(device); + return std::make_shared(Model::STANDARD210A, device); } else { LOG(FATAL) << "No such custom code now"; } diff --git a/src/mynteye/device/standard2/channels_adapter_s2.cc b/src/mynteye/device/standard2/channels_adapter_s2.cc index 99083c3..13efb89 100644 --- a/src/mynteye/device/standard2/channels_adapter_s2.cc +++ b/src/mynteye/device/standard2/channels_adapter_s2.cc @@ -90,8 +90,8 @@ void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) { } // namespace -Standard2ChannelsAdapter::Standard2ChannelsAdapter() - : ChannelsAdapter(Model::STANDARD2) { +Standard2ChannelsAdapter::Standard2ChannelsAdapter(const Model &model) + : ChannelsAdapter(model) { } Standard2ChannelsAdapter::~Standard2ChannelsAdapter() { diff --git a/src/mynteye/device/standard2/channels_adapter_s2.h b/src/mynteye/device/standard2/channels_adapter_s2.h index 3aaeb96..7301be0 100644 --- a/src/mynteye/device/standard2/channels_adapter_s2.h +++ b/src/mynteye/device/standard2/channels_adapter_s2.h @@ -25,7 +25,7 @@ MYNTEYE_BEGIN_NAMESPACE class Standard2ChannelsAdapter : public ChannelsAdapter { public: - Standard2ChannelsAdapter(); + explicit Standard2ChannelsAdapter(const Model &model); virtual ~Standard2ChannelsAdapter(); std::int32_t GetAccelRangeDefault() override; diff --git a/src/mynteye/device/standard2/channels_adapter_s210a.cc b/src/mynteye/device/standard2/channels_adapter_s210a.cc deleted file mode 100644 index e345a0f..0000000 --- a/src/mynteye/device/standard2/channels_adapter_s210a.cc +++ /dev/null @@ -1,121 +0,0 @@ -// Copyright 2018 Slightech Co., Ltd. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -#include "mynteye/device/standard2/channels_adapter_s210a.h" - -#include "mynteye/logger.h" - -MYNTEYE_BEGIN_NAMESPACE - -namespace { - -#pragma pack(push, 1) -struct ImuData { - std::uint32_t frame_id; - std::uint64_t timestamp; - std::uint8_t flag; - std::int16_t temperature; - std::int16_t accel_or_gyro[3]; - - ImuData() = default; - explicit ImuData(const std::uint8_t *data) { - from_data(data); - } - - void from_data(const std::uint8_t *data) { - std::uint32_t timestamp_l; - std::uint32_t timestamp_h; - - frame_id = (*(data) << 24) | (*(data + 1) << 16) | (*(data + 2) << 8) | - *(data + 3); - timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) | - (*(data + 6) << 8) | *(data + 7); - timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) | - (*(data + 10) << 8) | *(data + 11); - timestamp = (static_cast(timestamp_h) << 32) | timestamp_l; - flag = *(data + 12); - temperature = (*(data + 13) << 8) | *(data + 14); - accel_or_gyro[0] = (*(data + 15) << 8) | *(data + 16); - accel_or_gyro[1] = (*(data + 17) << 8) | *(data + 18); - accel_or_gyro[2] = (*(data + 19) << 8) | *(data + 20); - } -}; -#pragma pack(pop) - -void unpack_imu_segment(const ImuData &imu, ImuSegment *seg) { - seg->frame_id = imu.frame_id; - seg->timestamp = imu.timestamp; - seg->flag = imu.flag; - seg->temperature = imu.temperature; - seg->accel[0] = (seg->flag == 1) ? imu.accel_or_gyro[0] : 0; - seg->accel[1] = (seg->flag == 1) ? imu.accel_or_gyro[1] : 0; - seg->accel[2] = (seg->flag == 1) ? imu.accel_or_gyro[2] : 0; - seg->gyro[0] = (seg->flag == 2) ? imu.accel_or_gyro[0] : 0; - seg->gyro[1] = (seg->flag == 2) ? imu.accel_or_gyro[1] : 0; - seg->gyro[2] = (seg->flag == 2) ? imu.accel_or_gyro[2] : 0; -} - -void unpack_imu_packet(const std::uint8_t *data, ImuPacket *pkg) { - std::size_t data_n = sizeof(ImuData); // 21 - for (std::size_t i = 0; i < pkg->count; i++) { - ImuSegment seg; - unpack_imu_segment(ImuData(data + data_n * i), &seg); - pkg->segments.push_back(seg); - } - pkg->serial_number = pkg->segments.back().frame_id; -} - -void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) { - res->header = *data; - res->state = *(data + 1); - res->size = (*(data + 2) << 8) | *(data + 3); - - std::size_t data_n = sizeof(ImuData); // 21 - ImuPacket packet; - packet.count = res->size / data_n; - unpack_imu_packet(data + 4, &packet); - res->packets.push_back(packet); - res->checksum = *(data + 4 + res->size); -} - -} // namespace - -Standard210aChannelsAdapter::Standard210aChannelsAdapter() - : ChannelsAdapter(Model::STANDARD210A) { -} - -Standard210aChannelsAdapter::~Standard210aChannelsAdapter() { -} - -std::int32_t Standard210aChannelsAdapter::GetAccelRangeDefault() { - return 12; -} - -std::vector Standard210aChannelsAdapter::GetAccelRangeValues() { - return {6, 12, 24, 48}; -} - -std::int32_t Standard210aChannelsAdapter::GetGyroRangeDefault() { - return 1000; -} - -std::vector Standard210aChannelsAdapter::GetGyroRangeValues() { - return {250, 500, 1000, 2000, 4000}; -} - -void Standard210aChannelsAdapter::GetImuResPacket( - const std::uint8_t *data, ImuResPacket *res) { - unpack_imu_res_packet(data, res); -} - -MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/device/standard2/channels_adapter_s210a.h b/src/mynteye/device/standard2/channels_adapter_s210a.h deleted file mode 100644 index b700814..0000000 --- a/src/mynteye/device/standard2/channels_adapter_s210a.h +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2018 Slightech Co., Ltd. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -#ifndef MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_ -#define MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_ -#pragma once - -#include -#include -#include - -#include "mynteye/device/channel/channels.h" - -MYNTEYE_BEGIN_NAMESPACE - -class Standard210aChannelsAdapter : public ChannelsAdapter { - public: - Standard210aChannelsAdapter(); - virtual ~Standard210aChannelsAdapter(); - - std::int32_t GetAccelRangeDefault() override; - std::vector GetAccelRangeValues() override; - - std::int32_t GetGyroRangeDefault() override; - std::vector GetGyroRangeValues() override; - - void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) override; -}; - -MYNTEYE_END_NAMESPACE - -#endif // MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_ diff --git a/src/mynteye/device/standard2/device_s2.cc b/src/mynteye/device/standard2/device_s2.cc index 5428dcd..050cd4c 100644 --- a/src/mynteye/device/standard2/device_s2.cc +++ b/src/mynteye/device/standard2/device_s2.cc @@ -20,11 +20,13 @@ MYNTEYE_BEGIN_NAMESPACE -Standard2Device::Standard2Device(std::shared_ptr device) - : Device(Model::STANDARD2, device, - std::make_shared(), - std::make_shared()) { +Standard2Device::Standard2Device(const Model &model, + std::shared_ptr device) + : Device(model, device, + std::make_shared(model), + std::make_shared(model)) { VLOG(2) << __func__; + CHECK(model == Model::STANDARD2 || model == Model::STANDARD210A); } Standard2Device::~Standard2Device() { diff --git a/src/mynteye/device/standard2/device_s2.h b/src/mynteye/device/standard2/device_s2.h index badf6ac..f2fc5f7 100644 --- a/src/mynteye/device/standard2/device_s2.h +++ b/src/mynteye/device/standard2/device_s2.h @@ -24,7 +24,7 @@ MYNTEYE_BEGIN_NAMESPACE class Standard2Device : public Device { public: - explicit Standard2Device(std::shared_ptr device); + Standard2Device(const Model &model, std::shared_ptr device); virtual ~Standard2Device(); Capabilities GetKeyStreamCapability() const override; diff --git a/src/mynteye/device/standard2/device_s210a.cc b/src/mynteye/device/standard2/device_s210a.cc deleted file mode 100644 index e717156..0000000 --- a/src/mynteye/device/standard2/device_s210a.cc +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright 2018 Slightech Co., Ltd. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -#include "mynteye/device/standard2/device_s210a.h" - -#include "mynteye/logger.h" -#include "mynteye/device/motions.h" -#include "mynteye/device/standard2/channels_adapter_s210a.h" -#include "mynteye/device/standard2/streams_adapter_s210a.h" - -MYNTEYE_BEGIN_NAMESPACE - -Standard210aDevice::Standard210aDevice(std::shared_ptr device) - : Device(Model::STANDARD210A, device, - std::make_shared(), - std::make_shared()) { - VLOG(2) << __func__; -} - -Standard210aDevice::~Standard210aDevice() { - VLOG(2) << __func__; -} - -Capabilities Standard210aDevice::GetKeyStreamCapability() const { - return Capabilities::STEREO_COLOR; -} - -void Standard210aDevice::OnStereoStreamUpdate() { - if (motion_tracking_) { - auto &&motions = this->motions(); - motions->DoMotionTrack(); - } -} - -MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/device/standard2/device_s210a.h b/src/mynteye/device/standard2/device_s210a.h deleted file mode 100644 index aa55ac5..0000000 --- a/src/mynteye/device/standard2/device_s210a.h +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright 2018 Slightech Co., Ltd. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -#ifndef MYNTEYE_DEVICE_STANDARD2_DEVICE_S210A_H_ -#define MYNTEYE_DEVICE_STANDARD2_DEVICE_S210A_H_ -#pragma once - -#include -#include - -#include "mynteye/device/device.h" - -MYNTEYE_BEGIN_NAMESPACE - -class Standard210aDevice : public Device { - public: - explicit Standard210aDevice(std::shared_ptr device); - virtual ~Standard210aDevice(); - - Capabilities GetKeyStreamCapability() const override; - - void OnStereoStreamUpdate() override; -}; - -MYNTEYE_END_NAMESPACE - -#endif // MYNTEYE_DEVICE_STANDARD2_DEVICE_S210A_H_ diff --git a/src/mynteye/device/standard2/streams_adapter_s2.cc b/src/mynteye/device/standard2/streams_adapter_s2.cc index 83deb1c..1a2560b 100644 --- a/src/mynteye/device/standard2/streams_adapter_s2.cc +++ b/src/mynteye/device/standard2/streams_adapter_s2.cc @@ -143,7 +143,58 @@ bool unpack_stereo_img_data( } // namespace -Standard2StreamsAdapter::Standard2StreamsAdapter() { +namespace s210a { + +// image pixels + +bool unpack_left_img_pixels( + const void *data, const StreamRequest &request, Streams::frame_t *frame) { + CHECK_NOTNULL(frame); + CHECK_EQ(request.format, Format::BGR888); + CHECK_EQ(frame->format(), Format::BGR888); + auto data_new = reinterpret_cast(data); + std::size_t n = 3; + std::size_t w = frame->width(); + std::size_t h = frame->height(); + for (std::size_t i = 0; i < h; i++) { + for (std::size_t j = 0; j < w; j++) { + frame->data()[(i * w + j) * n] = + *(data_new + (2 * i * w + j) * n + 2); + frame->data()[(i * w + j) * n + 1] = + *(data_new + (2 * i * w + j) * n + 1); + frame->data()[(i * w + j) * n + 2] = + *(data_new + (2 * i * w + j) * n); + } + } + return true; +} + +bool unpack_right_img_pixels( + const void *data, const StreamRequest &request, Streams::frame_t *frame) { + CHECK_NOTNULL(frame); + CHECK_EQ(request.format, Format::BGR888); + CHECK_EQ(frame->format(), Format::BGR888); + auto data_new = reinterpret_cast(data); + std::size_t n = 3; + std::size_t w = frame->width(); + std::size_t h = frame->height(); + for (std::size_t i = 0; i < h; i++) { + for (std::size_t j = 0; j < w; j++) { + frame->data()[(i * w + j) * n] = + *(data_new + ((2 * i + 1) * w + j) * n + 2); + frame->data()[(i * w + j) * n + 1] = + *(data_new + ((2 * i + 1) * w + j) * n + 1); + frame->data()[(i * w + j) * n + 2] = + *(data_new + ((2 * i + 1) * w + j) * n); + } + } + return true; +} + +} // namespace s210a + +Standard2StreamsAdapter::Standard2StreamsAdapter(const Model &model) + : model_(model) { } Standard2StreamsAdapter::~Standard2StreamsAdapter() { @@ -167,10 +218,19 @@ Standard2StreamsAdapter::GetUnpackImgDataMap() { std::map Standard2StreamsAdapter::GetUnpackImgPixelsMap() { - return { - {Stream::LEFT, unpack_left_img_pixels}, - {Stream::RIGHT, unpack_right_img_pixels} - }; + switch (model_) { + case Model::STANDARD210A: + return { + {Stream::LEFT, s210a::unpack_left_img_pixels}, + {Stream::RIGHT, s210a::unpack_right_img_pixels} + }; + case Model::STANDARD2: + default: + return { + {Stream::LEFT, unpack_left_img_pixels}, + {Stream::RIGHT, unpack_right_img_pixels} + }; + } } MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/device/standard2/streams_adapter_s2.h b/src/mynteye/device/standard2/streams_adapter_s2.h index e0585cb..94479d6 100644 --- a/src/mynteye/device/standard2/streams_adapter_s2.h +++ b/src/mynteye/device/standard2/streams_adapter_s2.h @@ -25,7 +25,7 @@ MYNTEYE_BEGIN_NAMESPACE class Standard2StreamsAdapter : public StreamsAdapter { public: - Standard2StreamsAdapter(); + explicit Standard2StreamsAdapter(const Model &model); virtual ~Standard2StreamsAdapter(); std::vector GetKeyStreams() override; @@ -35,6 +35,9 @@ class Standard2StreamsAdapter : public StreamsAdapter { GetUnpackImgDataMap() override; std::map GetUnpackImgPixelsMap() override; + + private: + Model model_; }; MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/device/standard2/streams_adapter_s210a.cc b/src/mynteye/device/standard2/streams_adapter_s210a.cc deleted file mode 100644 index bbbfd4c..0000000 --- a/src/mynteye/device/standard2/streams_adapter_s210a.cc +++ /dev/null @@ -1,186 +0,0 @@ -// Copyright 2018 Slightech Co., Ltd. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -#include "mynteye/device/standard2/streams_adapter_s210a.h" - -#include - -#include "mynteye/logger.h" -#include "mynteye/device/types.h" - -MYNTEYE_BEGIN_NAMESPACE - -namespace { - -// image info - -#pragma pack(push, 1) -struct ImagePacket { - std::uint8_t header; - std::uint8_t size; - std::uint16_t frame_id; - std::uint64_t timestamp; - std::uint16_t exposure_time; - std::uint8_t checksum; - - ImagePacket() = default; - explicit ImagePacket(std::uint8_t *data) { - from_data(data); - } - - void from_data(std::uint8_t *data) { - std::uint32_t timestamp_l; - std::uint32_t timestamp_h; - - header = *data; - size = *(data + 1); - frame_id = (*(data + 2) << 8) | *(data + 3); - timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) | - (*(data + 6) << 8) | *(data + 7); - timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) | - (*(data + 10) << 8) | *(data + 11); - timestamp = (static_cast(timestamp_h) << 32) | timestamp_l; - exposure_time = (*(data + 12) << 8) | *(data + 13); - checksum = *(data + 14); - } -}; -#pragma pack(pop) - -// image pixels - -bool unpack_left_img_pixels( - const void *data, const StreamRequest &request, Streams::frame_t *frame) { - CHECK_NOTNULL(frame); - CHECK_EQ(request.format, Format::BGR888); - CHECK_EQ(frame->format(), Format::BGR888); - auto data_new = reinterpret_cast(data); - std::size_t n = 3; - std::size_t w = frame->width(); - std::size_t h = frame->height(); - for (std::size_t i = 0; i < h; i++) { - for (std::size_t j = 0; j < w; j++) { - frame->data()[(i * w + j) * n] = - *(data_new + (2 * i * w + j) * n + 2); - frame->data()[(i * w + j) * n + 1] = - *(data_new + (2 * i * w + j) * n + 1); - frame->data()[(i * w + j) * n + 2] = - *(data_new + (2 * i * w + j) * n); - } - } - return true; -} - -bool unpack_right_img_pixels( - const void *data, const StreamRequest &request, Streams::frame_t *frame) { - CHECK_NOTNULL(frame); - CHECK_EQ(request.format, Format::BGR888); - CHECK_EQ(frame->format(), Format::BGR888); - auto data_new = reinterpret_cast(data); - std::size_t n = 3; - std::size_t w = frame->width(); - std::size_t h = frame->height(); - for (std::size_t i = 0; i < h; i++) { - for (std::size_t j = 0; j < w; j++) { - frame->data()[(i * w + j) * n] = - *(data_new + ((2 * i + 1) * w + j) * n + 2); - frame->data()[(i * w + j) * n + 1] = - *(data_new + ((2 * i + 1) * w + j) * n + 1); - frame->data()[(i * w + j) * n + 2] = - *(data_new + ((2 * i + 1) * w + j) * n); - } - } - return true; -} - -bool unpack_stereo_img_data( - const void *data, const StreamRequest &request, ImgData *img) { - CHECK_NOTNULL(img); - - auto data_new = reinterpret_cast(data); - std::size_t data_n = - request.width * request.height * bytes_per_pixel(request.format); - auto data_end = data_new + data_n; - - std::size_t packet_n = sizeof(ImagePacket); - std::vector packet(packet_n); - std::reverse_copy(data_end - packet_n, data_end, packet.begin()); - - ImagePacket img_packet(packet.data()); - // LOG(INFO) << "ImagePacket: header=0x" << std::hex << - // static_cast(img_packet.header) - // << ", size=0x" << std::hex << static_cast(img_packet.size) - // << ", frame_id="<< std::dec << img_packet.frame_id - // << ", timestamp="<< std::dec << img_packet.timestamp - // << ", exposure_time="<< std::dec << img_packet.exposure_time - // << ", checksum=0x" << std::hex << static_cast(img_packet.checksum); - - if (img_packet.header != 0x3B) { - VLOG(2) << "Image packet header must be 0x3B, but 0x" << std::hex - << std::uppercase << std::setw(2) << std::setfill('0') - << static_cast(img_packet.header) << " now"; - return false; - } - - std::uint8_t checksum = 0; - for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9] - checksum = (checksum ^ packet[i]); - } -/* - if (img_packet.checksum != checksum) { - VLOG(2) << "Image packet checksum should be 0x" << std::hex - << std::uppercase << std::setw(2) << std::setfill('0') - << static_cast(img_packet.checksum) << ", but 0x" - << std::setw(2) << std::setfill('0') << static_cast(checksum) - << " now"; - return false; - } -*/ - img->frame_id = img_packet.frame_id; - img->timestamp = img_packet.timestamp; - img->exposure_time = img_packet.exposure_time; - return true; -} - -} // namespace - -Standard210aStreamsAdapter::Standard210aStreamsAdapter() { -} - -Standard210aStreamsAdapter::~Standard210aStreamsAdapter() { -} - -std::vector Standard210aStreamsAdapter::GetKeyStreams() { - return {Stream::LEFT, Stream::RIGHT}; -} - -std::vector Standard210aStreamsAdapter::GetStreamCapabilities() { - return {Capabilities::STEREO_COLOR}; -} - -std::map -Standard210aStreamsAdapter::GetUnpackImgDataMap() { - return { - {Stream::LEFT, unpack_stereo_img_data}, - {Stream::RIGHT, unpack_stereo_img_data} - }; -} - -std::map -Standard210aStreamsAdapter::GetUnpackImgPixelsMap() { - return { - {Stream::LEFT, unpack_left_img_pixels}, - {Stream::RIGHT, unpack_right_img_pixels} - }; -} - -MYNTEYE_END_NAMESPACE diff --git a/src/mynteye/device/standard2/streams_adapter_s210a.h b/src/mynteye/device/standard2/streams_adapter_s210a.h deleted file mode 100644 index e98fe7e..0000000 --- a/src/mynteye/device/standard2/streams_adapter_s210a.h +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright 2018 Slightech Co., Ltd. All rights reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -#ifndef MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S210A_H_ -#define MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S210A_H_ -#pragma once - -#include -#include -#include - -#include "mynteye/device/streams.h" - -MYNTEYE_BEGIN_NAMESPACE - -class Standard210aStreamsAdapter : public StreamsAdapter { - public: - Standard210aStreamsAdapter(); - virtual ~Standard210aStreamsAdapter(); - - std::vector GetKeyStreams() override; - std::vector GetStreamCapabilities() override; - - std::map - GetUnpackImgDataMap() override; - std::map - GetUnpackImgPixelsMap() override; -}; - -MYNTEYE_END_NAMESPACE - -#endif // MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S210A_H_