Merge branch 'develop' into feature/android
* develop: refactor(device): merge device_s210a to device_s2 fix: improve processor close order refactor(synthetic): make switch of disable copy and disable unnessary copy fix(api): remove enable LEFT/RIGHT stream code refactor(synthetic): use RootProcessor as root node fix(ros): ros record -a fix(api): LEFT/RIGHT callback fix fix(samples): check sample enable code style(api): change sdk version api refactor(synthetic): remove usless logic
This commit is contained in:
commit
d69263a2c7
|
@ -199,9 +199,6 @@ set(MYNTEYE_SRCS
|
||||||
src/mynteye/device/standard2/channels_adapter_s2.cc
|
src/mynteye/device/standard2/channels_adapter_s2.cc
|
||||||
src/mynteye/device/standard2/device_s2.cc
|
src/mynteye/device/standard2/device_s2.cc
|
||||||
src/mynteye/device/standard2/streams_adapter_s2.cc
|
src/mynteye/device/standard2/streams_adapter_s2.cc
|
||||||
src/mynteye/device/standard2/channels_adapter_s210a.cc
|
|
||||||
src/mynteye/device/standard2/device_s210a.cc
|
|
||||||
src/mynteye/device/standard2/streams_adapter_s210a.cc
|
|
||||||
src/mynteye/device/streams.cc
|
src/mynteye/device/streams.cc
|
||||||
src/mynteye/device/types.cc
|
src/mynteye/device/types.cc
|
||||||
src/mynteye/device/utils.cc
|
src/mynteye/device/utils.cc
|
||||||
|
@ -221,6 +218,7 @@ if(WITH_API)
|
||||||
src/mynteye/api/config.cc
|
src/mynteye/api/config.cc
|
||||||
src/mynteye/api/correspondence.cc
|
src/mynteye/api/correspondence.cc
|
||||||
src/mynteye/api/version_checker.cc
|
src/mynteye/api/version_checker.cc
|
||||||
|
src/mynteye/api/data_tools.cc
|
||||||
)
|
)
|
||||||
if(WITH_CAM_MODELS)
|
if(WITH_CAM_MODELS)
|
||||||
list(APPEND MYNTEYE_SRCS
|
list(APPEND MYNTEYE_SRCS
|
||||||
|
|
|
@ -187,7 +187,10 @@ class MYNTEYE_API API {
|
||||||
* Get the device info.
|
* Get the device info.
|
||||||
*/
|
*/
|
||||||
std::string GetInfo(const Info &info) const;
|
std::string GetInfo(const Info &info) const;
|
||||||
|
/**
|
||||||
|
* Get the sdk version.
|
||||||
|
*/
|
||||||
|
std::string GetSDKVersion() const;
|
||||||
/**
|
/**
|
||||||
* @deprecated Get the intrinsics (pinhole) of stream.
|
* @deprecated Get the intrinsics (pinhole) of stream.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -126,8 +126,6 @@ enum class Info : std::uint8_t {
|
||||||
AUXILIARY_CHIP_VERSION,
|
AUXILIARY_CHIP_VERSION,
|
||||||
/** Isp version */
|
/** Isp version */
|
||||||
ISP_VERSION,
|
ISP_VERSION,
|
||||||
/** SDK version*/
|
|
||||||
SDK_VERSION,
|
|
||||||
/** Last guard */
|
/** Last guard */
|
||||||
LAST
|
LAST
|
||||||
};
|
};
|
||||||
|
|
|
@ -328,7 +328,10 @@ std::shared_ptr<DeviceInfo> API::GetInfo() const {
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string API::GetInfo(const Info &info) const {
|
std::string API::GetInfo(const Info &info) const {
|
||||||
if (info == Info::SDK_VERSION) {
|
return device_->GetInfo(info);
|
||||||
|
}
|
||||||
|
|
||||||
|
std::string API::GetSDKVersion() const {
|
||||||
std::string info_path =
|
std::string info_path =
|
||||||
utils::get_sdk_install_dir();
|
utils::get_sdk_install_dir();
|
||||||
info_path.append(MYNTEYE_OS_SEP "share" \
|
info_path.append(MYNTEYE_OS_SEP "share" \
|
||||||
|
@ -340,9 +343,6 @@ std::string API::GetInfo(const Info &info) const {
|
||||||
return "null";
|
return "null";
|
||||||
}
|
}
|
||||||
return fs["MYNTEYE_VERSION"];
|
return fs["MYNTEYE_VERSION"];
|
||||||
}
|
|
||||||
|
|
||||||
return device_->GetInfo(info);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
IntrinsicsPinhole API::GetIntrinsics(const Stream &stream) const {
|
IntrinsicsPinhole API::GetIntrinsics(const Stream &stream) const {
|
||||||
|
|
78
src/mynteye/api/data_tools.cc
Normal file
78
src/mynteye/api/data_tools.cc
Normal file
|
@ -0,0 +1,78 @@
|
||||||
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
#include <algorithm>
|
||||||
|
#include <functional>
|
||||||
|
#include <stdexcept>
|
||||||
|
#include "mynteye/api/data_tools.h"
|
||||||
|
#include "mynteye/logger.h"
|
||||||
|
|
||||||
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
|
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
|
||||||
|
if (frame->format() == Format::YUYV) {
|
||||||
|
cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data());
|
||||||
|
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
|
||||||
|
return img;
|
||||||
|
} else if (frame->format() == Format::BGR888) {
|
||||||
|
cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data());
|
||||||
|
return img;
|
||||||
|
} else { // Format::GRAY
|
||||||
|
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData data2api(const device::StreamData &data) {
|
||||||
|
return {data.img, frame2mat(data.frame), data.frame, data.frame_id};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ObjMat/ObjMat2 > api::StreamData
|
||||||
|
|
||||||
|
api::StreamData obj_data_first(const ObjMat2 *obj) {
|
||||||
|
return {obj->first_data, obj->first, nullptr, obj->first_id};
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData obj_data_second(const ObjMat2 *obj) {
|
||||||
|
return {obj->second_data, obj->second, nullptr, obj->second_id};
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData obj_data(const ObjMat *obj) {
|
||||||
|
return {obj->data, obj->value, nullptr, obj->id};
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData obj_data_first(const std::shared_ptr<ObjMat2> &obj) {
|
||||||
|
return {obj->first_data, obj->first, nullptr, obj->first_id};
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData obj_data_second(const std::shared_ptr<ObjMat2> &obj) {
|
||||||
|
return {obj->second_data, obj->second, nullptr, obj->second_id};
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData obj_data(const std::shared_ptr<ObjMat> &obj) {
|
||||||
|
return {obj->data, obj->value, nullptr, obj->id};
|
||||||
|
}
|
||||||
|
|
||||||
|
// api::StreamData > ObjMat/ObjMat2
|
||||||
|
|
||||||
|
ObjMat data_obj(const api::StreamData &data) {
|
||||||
|
return ObjMat{data.frame, data.frame_id, data.img};
|
||||||
|
}
|
||||||
|
|
||||||
|
ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second) {
|
||||||
|
return ObjMat2{
|
||||||
|
first.frame, first.frame_id, first.img,
|
||||||
|
second.frame, second.frame_id, second.img};
|
||||||
|
}
|
||||||
|
|
||||||
|
MYNTEYE_END_NAMESPACE
|
33
src/mynteye/api/data_tools.h
Normal file
33
src/mynteye/api/data_tools.h
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#ifndef MYNTEYE_API_DATA_TOOLS_H_
|
||||||
|
#define MYNTEYE_API_DATA_TOOLS_H_
|
||||||
|
#pragma once
|
||||||
|
#include <opencv2/imgproc/imgproc.hpp>
|
||||||
|
#include "mynteye/api/object.h"
|
||||||
|
#include "mynteye/api/api.h"
|
||||||
|
#include "mynteye/device/device.h"
|
||||||
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame);
|
||||||
|
api::StreamData data2api(const device::StreamData &data);
|
||||||
|
api::StreamData obj_data_first(const ObjMat2 *obj);
|
||||||
|
api::StreamData obj_data_second(const ObjMat2 *obj);
|
||||||
|
api::StreamData obj_data(const ObjMat *obj);
|
||||||
|
api::StreamData obj_data_first(const std::shared_ptr<ObjMat2> &obj);
|
||||||
|
api::StreamData obj_data_second(const std::shared_ptr<ObjMat2> &obj);
|
||||||
|
api::StreamData obj_data(const std::shared_ptr<ObjMat> &obj);
|
||||||
|
ObjMat data_obj(const api::StreamData &data);
|
||||||
|
ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second);
|
||||||
|
MYNTEYE_END_NAMESPACE
|
||||||
|
#endif // MYNTEYE_API_DATA_TOOLS_H_
|
|
@ -19,6 +19,7 @@
|
||||||
#include "mynteye/logger.h"
|
#include "mynteye/logger.h"
|
||||||
#include "mynteye/util/strings.h"
|
#include "mynteye/util/strings.h"
|
||||||
#include "mynteye/util/times.h"
|
#include "mynteye/util/times.h"
|
||||||
|
#include "mynteye/api/data_tools.h"
|
||||||
|
|
||||||
MYNTEYE_BEGIN_NAMESPACE
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
|
@ -41,9 +42,9 @@ Processor::Processor(std::int32_t proc_period)
|
||||||
Processor::~Processor() {
|
Processor::~Processor() {
|
||||||
VLOG(2) << __func__;
|
VLOG(2) << __func__;
|
||||||
Deactivate();
|
Deactivate();
|
||||||
input_.reset(nullptr);
|
input_ = nullptr;
|
||||||
output_.reset(nullptr);
|
output_ = nullptr;
|
||||||
output_result_.reset(nullptr);
|
output_result_ = nullptr;
|
||||||
childs_.clear();
|
childs_.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -121,7 +122,7 @@ bool Processor::IsIdle() {
|
||||||
return idle_;
|
return idle_;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Processor::Process(const Object &in) {
|
bool Processor::Process(std::shared_ptr<Object> in) {
|
||||||
if (!activated_)
|
if (!activated_)
|
||||||
return false;
|
return false;
|
||||||
if (!idle_) {
|
if (!idle_) {
|
||||||
|
@ -131,13 +132,17 @@ bool Processor::Process(const Object &in) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!in.DecValidity()) {
|
if (in && !in->DecValidity()) {
|
||||||
LOG(WARNING) << Name() << " process with invalid input";
|
LOG(WARNING) << Name() << " process with invalid input";
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
std::lock_guard<std::mutex> lk(mtx_input_ready_);
|
std::lock_guard<std::mutex> lk(mtx_input_ready_);
|
||||||
input_.reset(in.Clone());
|
if (ProcessInputConnection() == WITH_CLONE) {
|
||||||
|
input_.reset(in->Clone());
|
||||||
|
} else {
|
||||||
|
input_ = in;
|
||||||
|
}
|
||||||
input_ready_ = true;
|
input_ready_ = true;
|
||||||
}
|
}
|
||||||
cond_input_ready_.notify_all();
|
cond_input_ready_.notify_all();
|
||||||
|
@ -228,12 +233,16 @@ void Processor::Run() {
|
||||||
}
|
}
|
||||||
{
|
{
|
||||||
std::unique_lock<std::mutex> lk(mtx_result_);
|
std::unique_lock<std::mutex> lk(mtx_result_);
|
||||||
|
if (ProcessOutputConnection() == WITH_CLONE) {
|
||||||
output_result_.reset(output_->Clone());
|
output_result_.reset(output_->Clone());
|
||||||
|
} else {
|
||||||
|
output_result_ = output_;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!childs_.empty()) {
|
if (!childs_.empty()) {
|
||||||
for (auto child : childs_) {
|
for (auto child : childs_) {
|
||||||
child->Process(*output_);
|
child->Process(output_);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -245,6 +254,82 @@ void Processor::Run() {
|
||||||
VLOG(2) << Name() << " thread end";
|
VLOG(2) << Name() << " thread end";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Processor::process_type Processor::ProcessOutputConnection() {
|
||||||
|
return WITH_CLONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
Processor::process_type Processor::ProcessInputConnection() {
|
||||||
|
return WITH_CLONE;
|
||||||
|
}
|
||||||
|
|
||||||
|
api::StreamData Processor::GetStreamData(const Stream &stream) {
|
||||||
|
auto sum = getStreamsSum();
|
||||||
|
auto &&out = GetOutput();
|
||||||
|
Synthetic::Mode enable_mode = Synthetic::MODE_OFF;
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
for (auto it_s : streams) {
|
||||||
|
if (it_s.stream == stream) {
|
||||||
|
enable_mode = it_s.enabled_mode_;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (enable_mode == Synthetic::MODE_ON) {
|
||||||
|
if (sum == 1) {
|
||||||
|
if (out != nullptr) {
|
||||||
|
auto &&output = Object::Cast<ObjMat>(out);
|
||||||
|
if (output != nullptr) {
|
||||||
|
return obj_data(output);
|
||||||
|
}
|
||||||
|
VLOG(2) << "Rectify not ready now";
|
||||||
|
}
|
||||||
|
} else if (sum == 2) {
|
||||||
|
static std::shared_ptr<ObjMat2> output = nullptr;
|
||||||
|
if (out != nullptr) {
|
||||||
|
output = Object::Cast<ObjMat2>(out);
|
||||||
|
}
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
if (output != nullptr) {
|
||||||
|
int num = 0;
|
||||||
|
for (auto it : streams) {
|
||||||
|
if (it.stream == stream) {
|
||||||
|
if (num == 1) {
|
||||||
|
return obj_data_first(output);
|
||||||
|
} else {
|
||||||
|
return obj_data_second(output);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
num++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
VLOG(2) << "Rectify not ready now";
|
||||||
|
} else {
|
||||||
|
LOG(ERROR) << "error: invalid sum!";
|
||||||
|
}
|
||||||
|
return {}; // frame.empty() == true
|
||||||
|
}
|
||||||
|
LOG(ERROR) << "Failed to get stream data of " << stream
|
||||||
|
<< ", unsupported or disabled";
|
||||||
|
return {}; // frame.empty() == true
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<api::StreamData> Processor::GetStreamDatas(const Stream &stream) {
|
||||||
|
Synthetic::Mode enable_mode = Synthetic::MODE_OFF;
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
for (auto it_s : streams) {
|
||||||
|
if (it_s.stream == stream) {
|
||||||
|
enable_mode = it_s.enabled_mode_;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (enable_mode == Synthetic::MODE_ON) {
|
||||||
|
return {GetStreamData(stream)};
|
||||||
|
} else {
|
||||||
|
LOG(ERROR) << "Failed to get stream data of " << stream
|
||||||
|
<< ", unsupported or disabled";
|
||||||
|
}
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
void Processor::SetIdle(bool idle) {
|
void Processor::SetIdle(bool idle) {
|
||||||
std::lock_guard<std::mutex> lk(mtx_state_);
|
std::lock_guard<std::mutex> lk(mtx_state_);
|
||||||
idle_ = idle;
|
idle_ = idle;
|
||||||
|
|
|
@ -64,7 +64,11 @@ class Processor :
|
||||||
bool IsIdle();
|
bool IsIdle();
|
||||||
|
|
||||||
/** Returns dropped or not. */
|
/** Returns dropped or not. */
|
||||||
bool Process(const Object &in);
|
bool Process(std::shared_ptr<Object> in);
|
||||||
|
|
||||||
|
virtual api::StreamData GetStreamData(const Stream &stream);
|
||||||
|
|
||||||
|
virtual std::vector<api::StreamData> GetStreamDatas(const Stream &stream);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the last output.
|
* Returns the last output.
|
||||||
|
@ -79,6 +83,13 @@ class Processor :
|
||||||
virtual bool OnProcess(
|
virtual bool OnProcess(
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
std::shared_ptr<Processor> const parent) = 0;
|
std::shared_ptr<Processor> const parent) = 0;
|
||||||
|
enum process_type{
|
||||||
|
WITH_CLONE,
|
||||||
|
WITHOUT_CLONE
|
||||||
|
};
|
||||||
|
|
||||||
|
virtual process_type ProcessOutputConnection();
|
||||||
|
virtual process_type ProcessInputConnection();
|
||||||
|
|
||||||
private:
|
private:
|
||||||
/** Run in standalone thread. */
|
/** Run in standalone thread. */
|
||||||
|
@ -98,10 +109,10 @@ class Processor :
|
||||||
std::uint64_t dropped_count_;
|
std::uint64_t dropped_count_;
|
||||||
std::mutex mtx_state_;
|
std::mutex mtx_state_;
|
||||||
|
|
||||||
std::unique_ptr<Object> input_;
|
std::shared_ptr<Object> input_;
|
||||||
std::unique_ptr<Object> output_;
|
std::shared_ptr<Object> output_;
|
||||||
|
|
||||||
std::unique_ptr<Object> output_result_;
|
std::shared_ptr<Object> output_result_;
|
||||||
std::mutex mtx_result_;
|
std::mutex mtx_result_;
|
||||||
|
|
||||||
PreProcessCallback pre_callback_;
|
PreProcessCallback pre_callback_;
|
||||||
|
|
|
@ -31,6 +31,9 @@ class DepthProcessorOCV : public Processor {
|
||||||
std::string Name() override;
|
std::string Name() override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
inline Processor::process_type ProcessOutputConnection() override {
|
||||||
|
return Processor::WITHOUT_CLONE;
|
||||||
|
}
|
||||||
Object *OnCreateOutput() override;
|
Object *OnCreateOutput() override;
|
||||||
bool OnProcess(
|
bool OnProcess(
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
|
|
|
@ -31,6 +31,12 @@ class DisparityNormalizedProcessor : public Processor {
|
||||||
std::string Name() override;
|
std::string Name() override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
inline Processor::process_type ProcessOutputConnection() override {
|
||||||
|
return Processor::WITHOUT_CLONE;
|
||||||
|
}
|
||||||
|
inline Processor::process_type ProcessInputConnection() override {
|
||||||
|
return Processor::WITHOUT_CLONE;
|
||||||
|
}
|
||||||
Object *OnCreateOutput() override;
|
Object *OnCreateOutput() override;
|
||||||
bool OnProcess(
|
bool OnProcess(
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
|
|
|
@ -36,6 +36,9 @@ class PointsProcessor : public Processor {
|
||||||
std::string Name() override;
|
std::string Name() override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
inline Processor::process_type ProcessOutputConnection() override {
|
||||||
|
return Processor::WITHOUT_CLONE;
|
||||||
|
}
|
||||||
Object *OnCreateOutput() override;
|
Object *OnCreateOutput() override;
|
||||||
bool OnProcess(
|
bool OnProcess(
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
|
|
|
@ -79,6 +79,12 @@ class RectifyProcessor : public Processor {
|
||||||
bool OnProcess(
|
bool OnProcess(
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
std::shared_ptr<Processor> const parent) override;
|
std::shared_ptr<Processor> const parent) override;
|
||||||
|
inline Processor::process_type ProcessOutputConnection() override {
|
||||||
|
return Processor::WITHOUT_CLONE;
|
||||||
|
}
|
||||||
|
inline Processor::process_type ProcessInputConnection() override {
|
||||||
|
return Processor::WITHOUT_CLONE;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void InitParams(IntrinsicsEquidistant in_left,
|
void InitParams(IntrinsicsEquidistant in_left,
|
||||||
|
|
|
@ -49,6 +49,13 @@ class RectifyProcessorOCV : public Processor {
|
||||||
cv::Mat map11, map12, map21, map22;
|
cv::Mat map11, map12, map21, map22;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
|
inline Processor::process_type ProcessOutputConnection() override {
|
||||||
|
return Processor::WITHOUT_CLONE;
|
||||||
|
}
|
||||||
|
inline Processor::process_type ProcessInputConnection() override {
|
||||||
|
return Processor::WITHOUT_CLONE;
|
||||||
|
}
|
||||||
|
|
||||||
Object *OnCreateOutput() override;
|
Object *OnCreateOutput() override;
|
||||||
bool OnProcess(
|
bool OnProcess(
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
|
|
|
@ -14,15 +14,21 @@
|
||||||
#include "mynteye/api/processor/root_camera_processor.h"
|
#include "mynteye/api/processor/root_camera_processor.h"
|
||||||
|
|
||||||
#include <utility>
|
#include <utility>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
#include <opencv2/calib3d/calib3d.hpp>
|
#include <opencv2/calib3d/calib3d.hpp>
|
||||||
#include <opencv2/imgproc/imgproc.hpp>
|
#include <opencv2/imgproc/imgproc.hpp>
|
||||||
#include "mynteye/logger.h"
|
#include "mynteye/logger.h"
|
||||||
|
#include "mynteye/api/synthetic.h"
|
||||||
|
#include "mynteye/device/device.h"
|
||||||
|
#include "mynteye/api/data_tools.h"
|
||||||
MYNTEYE_BEGIN_NAMESPACE
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
const char RootProcessor::NAME[] = "RootProcessor";
|
const char RootProcessor::NAME[] = "RootProcessor";
|
||||||
|
|
||||||
RootProcessor::RootProcessor(std::int32_t proc_period)
|
RootProcessor::RootProcessor(std::shared_ptr<Device> device,
|
||||||
: Processor(std::move(proc_period)) {}
|
std::int32_t proc_period)
|
||||||
|
: Processor(std::move(proc_period)),
|
||||||
|
device_(device) {}
|
||||||
RootProcessor::~RootProcessor() {
|
RootProcessor::~RootProcessor() {
|
||||||
VLOG(2) << __func__;
|
VLOG(2) << __func__;
|
||||||
}
|
}
|
||||||
|
@ -31,13 +37,114 @@ std::string RootProcessor::Name() {
|
||||||
return NAME;
|
return NAME;
|
||||||
}
|
}
|
||||||
|
|
||||||
Object *RootProcessor::OnCreateOutput() {
|
s1s2Processor::s1s2Processor(std::shared_ptr<Device> device,
|
||||||
|
std::int32_t proc_period)
|
||||||
|
: RootProcessor(device, std::move(proc_period)) {}
|
||||||
|
s1s2Processor::~s1s2Processor() {
|
||||||
|
VLOG(2) << __func__;
|
||||||
|
}
|
||||||
|
|
||||||
|
Object *s1s2Processor::OnCreateOutput() {
|
||||||
return new ObjMat2();
|
return new ObjMat2();
|
||||||
}
|
}
|
||||||
bool RootProcessor::OnProcess(
|
bool s1s2Processor::OnProcess(
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
std::shared_ptr<Processor> const parent) {
|
std::shared_ptr<Processor> const parent) {
|
||||||
|
const ObjMat2 *input = Object::Cast<ObjMat2>(in);
|
||||||
|
ObjMat2 *output = Object::Cast<ObjMat2>(out);
|
||||||
|
output->second = input->second;
|
||||||
|
output->first = input->first;
|
||||||
|
output->first_id = input->first_id;
|
||||||
|
output->first_data = input->first_data;
|
||||||
|
output->second_id = input->second_id;
|
||||||
|
output->second_data = input->second_data;
|
||||||
MYNTEYE_UNUSED(parent)
|
MYNTEYE_UNUSED(parent)
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void s1s2Processor::ProcessNativeStream(
|
||||||
|
const Stream &stream, const api::StreamData &data) {
|
||||||
|
std::unique_lock<std::mutex> lk(mtx_left_right_ready_);
|
||||||
|
static api::StreamData left_data, right_data;
|
||||||
|
if (stream == Stream::LEFT) {
|
||||||
|
left_data = data;
|
||||||
|
} else if (stream == Stream::RIGHT) {
|
||||||
|
right_data = data;
|
||||||
|
}
|
||||||
|
if (left_data.img && right_data.img &&
|
||||||
|
left_data.img->frame_id == right_data.img->frame_id) {
|
||||||
|
Process(std::make_shared<ObjMat2>(data_obj(left_data, right_data)));
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
void s1s2Processor::StartVideoStreaming() {
|
||||||
|
Activate();
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
for (unsigned int j =0; j< streams.size(); j++) {
|
||||||
|
auto stream = streams[j].stream;
|
||||||
|
auto callback = streams[j].stream_callback;
|
||||||
|
target_streams_[j].enabled_mode_ = Synthetic::MODE_ON;
|
||||||
|
device_->SetStreamCallback(
|
||||||
|
stream,
|
||||||
|
[this, stream, callback](const device::StreamData &data) {
|
||||||
|
auto &&stream_data = data2api(data);
|
||||||
|
ProcessNativeStream(stream, stream_data);
|
||||||
|
// Need mutex if set callback after start
|
||||||
|
if (callback) {
|
||||||
|
callback(stream_data);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
true);
|
||||||
|
}
|
||||||
|
device_->Start(Source::VIDEO_STREAMING);
|
||||||
|
}
|
||||||
|
|
||||||
|
void s1s2Processor::StopVideoStreaming() {
|
||||||
|
Deactivate();
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
for (unsigned int j =0; j< streams.size(); j++) {
|
||||||
|
auto stream = streams[j].stream;
|
||||||
|
target_streams_[j].enabled_mode_ = Synthetic::MODE_OFF;
|
||||||
|
device_->SetStreamCallback(stream, nullptr);
|
||||||
|
}
|
||||||
|
device_->Stop(Source::VIDEO_STREAMING);
|
||||||
|
}
|
||||||
|
api::StreamData s1s2Processor::GetStreamData(const Stream &stream) {
|
||||||
|
Synthetic::Mode enable_mode = Synthetic::MODE_OFF;
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
for (auto it_s : streams) {
|
||||||
|
if (it_s.stream == stream) {
|
||||||
|
enable_mode = it_s.enabled_mode_;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (enable_mode == Synthetic::MODE_ON) {
|
||||||
|
return data2api(device_->GetStreamData(stream));
|
||||||
|
}
|
||||||
|
LOG(ERROR) << "Failed to get device stream data of " << stream
|
||||||
|
<< ", unsupported or disabled";
|
||||||
|
LOG(ERROR) << "Make sure you have enable " << stream;
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<api::StreamData> s1s2Processor::GetStreamDatas(
|
||||||
|
const Stream &stream) {
|
||||||
|
Synthetic::Mode enable_mode = Synthetic::MODE_OFF;
|
||||||
|
auto streams = getTargetStreams();
|
||||||
|
for (auto it_s : streams) {
|
||||||
|
if (it_s.stream == stream) {
|
||||||
|
enable_mode = it_s.enabled_mode_;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (enable_mode == Synthetic::MODE_ON) {
|
||||||
|
std::vector<api::StreamData> datas;
|
||||||
|
for (auto &&data : device_->GetStreamDatas(stream)) {
|
||||||
|
datas.push_back(data2api(data));
|
||||||
|
}
|
||||||
|
return datas;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
MYNTEYE_END_NAMESPACE
|
MYNTEYE_END_NAMESPACE
|
||||||
|
|
|
@ -28,16 +28,48 @@ class RootProcessor : public Processor {
|
||||||
public:
|
public:
|
||||||
static const char NAME[];
|
static const char NAME[];
|
||||||
|
|
||||||
explicit RootProcessor(std::int32_t proc_period = 0);
|
explicit RootProcessor(std::shared_ptr<Device> device,
|
||||||
|
std::int32_t proc_period = 0);
|
||||||
virtual ~RootProcessor();
|
virtual ~RootProcessor();
|
||||||
|
|
||||||
std::string Name() override;
|
virtual std::string Name();
|
||||||
|
|
||||||
|
virtual void StartVideoStreaming() = 0;
|
||||||
|
virtual void StopVideoStreaming() = 0;
|
||||||
|
virtual api::StreamData GetStreamData(const Stream &stream) = 0;
|
||||||
|
virtual std::vector<api::StreamData> GetStreamDatas(const Stream &stream) = 0; // NOLINT
|
||||||
protected:
|
protected:
|
||||||
|
virtual Object *OnCreateOutput() = 0;
|
||||||
|
virtual bool OnProcess(
|
||||||
|
Object *const in, Object *const out,
|
||||||
|
std::shared_ptr<Processor> const parent) = 0;
|
||||||
|
std::shared_ptr<Device> device_;
|
||||||
|
};
|
||||||
|
|
||||||
|
class s1s2Processor : public RootProcessor {
|
||||||
|
public:
|
||||||
|
explicit s1s2Processor(std::shared_ptr<Device> device,
|
||||||
|
std::int32_t proc_period = 0);
|
||||||
|
virtual ~s1s2Processor();
|
||||||
|
void StartVideoStreaming();
|
||||||
|
void StopVideoStreaming();
|
||||||
|
api::StreamData GetStreamData(const Stream &stream) override;
|
||||||
|
std::vector<api::StreamData> GetStreamDatas(const Stream &stream) override; // NOLINT
|
||||||
|
protected:
|
||||||
|
inline Processor::process_type ProcessOutputConnection() override {
|
||||||
|
return Processor::WITHOUT_CLONE;
|
||||||
|
}
|
||||||
|
inline Processor::process_type ProcessInputConnection() override {
|
||||||
|
return Processor::WITHOUT_CLONE;
|
||||||
|
}
|
||||||
Object *OnCreateOutput() override;
|
Object *OnCreateOutput() override;
|
||||||
bool OnProcess(
|
bool OnProcess(
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
std::shared_ptr<Processor> const parent) override;
|
std::shared_ptr<Processor> const parent) override;
|
||||||
|
private:
|
||||||
|
void ProcessNativeStream(
|
||||||
|
const Stream &stream, const api::StreamData &data);
|
||||||
|
std::mutex mtx_left_right_ready_;
|
||||||
};
|
};
|
||||||
|
|
||||||
MYNTEYE_END_NAMESPACE
|
MYNTEYE_END_NAMESPACE
|
||||||
|
|
|
@ -36,6 +36,7 @@
|
||||||
#include "mynteye/api/processor/rectify_processor.h"
|
#include "mynteye/api/processor/rectify_processor.h"
|
||||||
#endif
|
#endif
|
||||||
#include "mynteye/device/device.h"
|
#include "mynteye/device/device.h"
|
||||||
|
#include "mynteye/api/data_tools.h"
|
||||||
|
|
||||||
#define RECTIFY_PROC_PERIOD 0
|
#define RECTIFY_PROC_PERIOD 0
|
||||||
#define DISPARITY_PROC_PERIOD 0
|
#define DISPARITY_PROC_PERIOD 0
|
||||||
|
@ -46,74 +47,6 @@
|
||||||
|
|
||||||
MYNTEYE_BEGIN_NAMESPACE
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
namespace {
|
|
||||||
|
|
||||||
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
|
|
||||||
if (frame->format() == Format::YUYV) {
|
|
||||||
cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data());
|
|
||||||
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
|
|
||||||
return img;
|
|
||||||
} else if (frame->format() == Format::BGR888) {
|
|
||||||
cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data());
|
|
||||||
return img;
|
|
||||||
} else { // Format::GRAY
|
|
||||||
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
api::StreamData data2api(const device::StreamData &data) {
|
|
||||||
return {data.img, frame2mat(data.frame), data.frame, data.frame_id};
|
|
||||||
}
|
|
||||||
|
|
||||||
void process_childs(
|
|
||||||
const std::shared_ptr<Processor> &proc, const std::string &name,
|
|
||||||
const Object &obj) {
|
|
||||||
auto &&processor = find_processor<Processor>(proc, name);
|
|
||||||
for (auto child : processor->GetChilds()) {
|
|
||||||
child->Process(obj);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ObjMat/ObjMat2 > api::StreamData
|
|
||||||
|
|
||||||
api::StreamData obj_data_first(const ObjMat2 *obj) {
|
|
||||||
return {obj->first_data, obj->first, nullptr, obj->first_id};
|
|
||||||
}
|
|
||||||
|
|
||||||
api::StreamData obj_data_second(const ObjMat2 *obj) {
|
|
||||||
return {obj->second_data, obj->second, nullptr, obj->second_id};
|
|
||||||
}
|
|
||||||
|
|
||||||
api::StreamData obj_data(const ObjMat *obj) {
|
|
||||||
return {obj->data, obj->value, nullptr, obj->id};
|
|
||||||
}
|
|
||||||
|
|
||||||
api::StreamData obj_data_first(const std::shared_ptr<ObjMat2> &obj) {
|
|
||||||
return {obj->first_data, obj->first, nullptr, obj->first_id};
|
|
||||||
}
|
|
||||||
|
|
||||||
api::StreamData obj_data_second(const std::shared_ptr<ObjMat2> &obj) {
|
|
||||||
return {obj->second_data, obj->second, nullptr, obj->second_id};
|
|
||||||
}
|
|
||||||
|
|
||||||
api::StreamData obj_data(const std::shared_ptr<ObjMat> &obj) {
|
|
||||||
return {obj->data, obj->value, nullptr, obj->id};
|
|
||||||
}
|
|
||||||
|
|
||||||
// api::StreamData > ObjMat/ObjMat2
|
|
||||||
|
|
||||||
ObjMat data_obj(const api::StreamData &data) {
|
|
||||||
return ObjMat{data.frame, data.frame_id, data.img};
|
|
||||||
}
|
|
||||||
|
|
||||||
ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second) {
|
|
||||||
return ObjMat2{
|
|
||||||
first.frame, first.frame_id, first.img,
|
|
||||||
second.frame, second.frame_id, second.img};
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace
|
|
||||||
|
|
||||||
void Synthetic::InitCalibInfo() {
|
void Synthetic::InitCalibInfo() {
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||||
LOG(INFO) << "camera calib model: pinhole";
|
LOG(INFO) << "camera calib model: pinhole";
|
||||||
|
@ -149,11 +82,11 @@ Synthetic::Synthetic(API *api, CalibrationModel calib_model)
|
||||||
CHECK_NOTNULL(api_);
|
CHECK_NOTNULL(api_);
|
||||||
InitCalibInfo();
|
InitCalibInfo();
|
||||||
InitProcessors();
|
InitProcessors();
|
||||||
InitStreamSupports();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Synthetic::~Synthetic() {
|
Synthetic::~Synthetic() {
|
||||||
VLOG(2) << __func__;
|
VLOG(2) << __func__;
|
||||||
|
processors_.clear();
|
||||||
if (processor_) {
|
if (processor_) {
|
||||||
processor_->Deactivate(true);
|
processor_->Deactivate(true);
|
||||||
processor_ = nullptr;
|
processor_ = nullptr;
|
||||||
|
@ -171,19 +104,18 @@ void Synthetic::NotifyImageParamsChanged() {
|
||||||
extr_ = std::make_shared<Extrinsics>(
|
extr_ = std::make_shared<Extrinsics>(
|
||||||
api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT));
|
api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT));
|
||||||
}
|
}
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
auto processor = getProcessorWithStream(Stream::LEFT_RECTIFIED);
|
||||||
auto &&processor = find_processor<RectifyProcessorOCV>(processor_);
|
|
||||||
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_);
|
if (processor && calib_model_ == CalibrationModel::PINHOLE) {
|
||||||
|
auto proc = static_cast<RectifyProcessorOCV*>(&(*processor));
|
||||||
|
proc->ReloadImageParams(intr_left_, intr_right_, extr_);
|
||||||
#ifdef WITH_CAM_MODELS
|
#ifdef WITH_CAM_MODELS
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
} else if (processor && calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||||
auto &&processor = find_processor<RectifyProcessor>(processor_);
|
auto proc = static_cast<RectifyProcessor*>(&(*processor));
|
||||||
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_);
|
proc->ReloadImageParams(intr_left_, intr_right_, extr_);
|
||||||
#endif
|
#endif
|
||||||
} else {
|
} else {
|
||||||
LOG(ERROR) << "Unknow calib model type in device: "
|
LOG(ERROR) << "Unknow calib model type in device" << std::endl;
|
||||||
<< calib_model_ << ", use default pinhole model";
|
|
||||||
auto &&processor = find_processor<RectifyProcessorOCV>(processor_);
|
|
||||||
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -242,14 +174,6 @@ bool Synthetic::Supports(const Stream &stream) const {
|
||||||
return checkControlDateWithStream(stream);
|
return checkControlDateWithStream(stream);
|
||||||
}
|
}
|
||||||
|
|
||||||
Synthetic::mode_t Synthetic::SupportsMode(const Stream &stream) const {
|
|
||||||
if (checkControlDateWithStream(stream)) {
|
|
||||||
auto data = getControlDateWithStream(stream);
|
|
||||||
return data.support_mode_;
|
|
||||||
}
|
|
||||||
return MODE_LAST;
|
|
||||||
}
|
|
||||||
|
|
||||||
void Synthetic::EnableStreamData(
|
void Synthetic::EnableStreamData(
|
||||||
const Stream &stream, stream_switch_callback_t callback,
|
const Stream &stream, stream_switch_callback_t callback,
|
||||||
bool try_tag) {
|
bool try_tag) {
|
||||||
|
@ -257,14 +181,17 @@ void Synthetic::EnableStreamData(
|
||||||
auto processor = getProcessorWithStream(stream);
|
auto processor = getProcessorWithStream(stream);
|
||||||
iterate_processor_CtoP_before(processor,
|
iterate_processor_CtoP_before(processor,
|
||||||
[callback, try_tag](std::shared_ptr<Processor> proce){
|
[callback, try_tag](std::shared_ptr<Processor> proce){
|
||||||
|
if (proce->Name() == "RootProcessor") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
auto streams = proce->getTargetStreams();
|
auto streams = proce->getTargetStreams();
|
||||||
int act_tag = 0;
|
int act_tag = 0;
|
||||||
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
|
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
|
||||||
if (proce->target_streams_[i].enabled_mode_ == MODE_LAST) {
|
if (proce->target_streams_[i].enabled_mode_ == MODE_OFF) {
|
||||||
callback(proce->target_streams_[i].stream);
|
callback(proce->target_streams_[i].stream);
|
||||||
if (!try_tag) {
|
if (!try_tag) {
|
||||||
act_tag++;
|
act_tag++;
|
||||||
proce->target_streams_[i].enabled_mode_ = MODE_SYNTHETIC;
|
proce->target_streams_[i].enabled_mode_ = MODE_ON;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -280,14 +207,17 @@ void Synthetic::DisableStreamData(
|
||||||
auto processor = getProcessorWithStream(stream);
|
auto processor = getProcessorWithStream(stream);
|
||||||
iterate_processor_PtoC_before(processor,
|
iterate_processor_PtoC_before(processor,
|
||||||
[callback, try_tag](std::shared_ptr<Processor> proce){
|
[callback, try_tag](std::shared_ptr<Processor> proce){
|
||||||
|
if (proce->Name() == "RootProcessor") {
|
||||||
|
return;
|
||||||
|
}
|
||||||
auto streams = proce->getTargetStreams();
|
auto streams = proce->getTargetStreams();
|
||||||
int act_tag = 0;
|
int act_tag = 0;
|
||||||
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
|
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
|
||||||
if (proce->target_streams_[i].enabled_mode_ == MODE_SYNTHETIC) {
|
if (proce->target_streams_[i].enabled_mode_ == MODE_ON) {
|
||||||
callback(proce->target_streams_[i].stream);
|
callback(proce->target_streams_[i].stream);
|
||||||
if (!try_tag) {
|
if (!try_tag) {
|
||||||
act_tag++;
|
act_tag++;
|
||||||
proce->target_streams_[i].enabled_mode_ = MODE_LAST;
|
proce->target_streams_[i].enabled_mode_ = MODE_OFF;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -315,8 +245,7 @@ void Synthetic::DisableStreamData(const Stream &stream) {
|
||||||
bool Synthetic::IsStreamDataEnabled(const Stream &stream) const {
|
bool Synthetic::IsStreamDataEnabled(const Stream &stream) const {
|
||||||
if (checkControlDateWithStream(stream)) {
|
if (checkControlDateWithStream(stream)) {
|
||||||
auto data = getControlDateWithStream(stream);
|
auto data = getControlDateWithStream(stream);
|
||||||
return data.enabled_mode_ == MODE_SYNTHETIC ||
|
return data.enabled_mode_ == MODE_ON;
|
||||||
data.enabled_mode_ == MODE_NATIVE;
|
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -344,42 +273,11 @@ bool Synthetic::HasStreamCallback(const Stream &stream) const {
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::StartVideoStreaming() {
|
void Synthetic::StartVideoStreaming() {
|
||||||
auto &&device = api_->device();
|
processor_->StartVideoStreaming();
|
||||||
for (unsigned int i =0; i< processors_.size(); i++) {
|
|
||||||
auto streams = processors_[i]->getTargetStreams();
|
|
||||||
for (unsigned int j =0; j< streams.size(); j++) {
|
|
||||||
if (processors_[i]->target_streams_[j].support_mode_ == MODE_NATIVE) {
|
|
||||||
auto stream = processors_[i]->target_streams_[j].stream;
|
|
||||||
device->SetStreamCallback(
|
|
||||||
stream,
|
|
||||||
[this, stream](const device::StreamData &data) {
|
|
||||||
auto &&stream_data = data2api(data);
|
|
||||||
ProcessNativeStream(stream, stream_data);
|
|
||||||
// Need mutex if set callback after start
|
|
||||||
if (HasStreamCallback(stream)) {
|
|
||||||
auto data = getControlDateWithStream(stream);
|
|
||||||
data.stream_callback(stream_data);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
device->Start(Source::VIDEO_STREAMING);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::StopVideoStreaming() {
|
void Synthetic::StopVideoStreaming() {
|
||||||
auto &&device = api_->device();
|
processor_->StopVideoStreaming();
|
||||||
for (unsigned int i =0; i< processors_.size(); i++) {
|
|
||||||
auto streams = processors_[i]->getTargetStreams();
|
|
||||||
for (unsigned int j =0; j< streams.size(); j++) {
|
|
||||||
if (processors_[i]->target_streams_[j].support_mode_ == MODE_NATIVE) {
|
|
||||||
auto stream = processors_[i]->target_streams_[j].stream;
|
|
||||||
device->SetStreamCallback(stream, nullptr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
device->Stop(Source::VIDEO_STREAMING);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::WaitForStreams() {
|
void Synthetic::WaitForStreams() {
|
||||||
|
@ -387,69 +285,11 @@ void Synthetic::WaitForStreams() {
|
||||||
}
|
}
|
||||||
|
|
||||||
api::StreamData Synthetic::GetStreamData(const Stream &stream) {
|
api::StreamData Synthetic::GetStreamData(const Stream &stream) {
|
||||||
auto &&mode = GetStreamEnabledMode(stream);
|
return getProcessorWithStream(stream)->GetStreamData(stream);
|
||||||
if (mode == MODE_NATIVE) {
|
|
||||||
auto &&device = api_->device();
|
|
||||||
return data2api(device->GetStreamData(stream));
|
|
||||||
} else if (mode == MODE_SYNTHETIC) {
|
|
||||||
auto processor = getProcessorWithStream(stream);
|
|
||||||
auto sum = processor->getStreamsSum();
|
|
||||||
auto &&out = processor->GetOutput();
|
|
||||||
static std::shared_ptr<ObjMat2> output = nullptr;
|
|
||||||
if (sum == 1) {
|
|
||||||
if (out != nullptr) {
|
|
||||||
auto &&output = Object::Cast<ObjMat>(out);
|
|
||||||
if (output != nullptr) {
|
|
||||||
return obj_data(output);
|
|
||||||
}
|
|
||||||
VLOG(2) << "Rectify not ready now";
|
|
||||||
}
|
|
||||||
} else if (sum == 2) {
|
|
||||||
if (out != nullptr) {
|
|
||||||
output = Object::Cast<ObjMat2>(out);
|
|
||||||
}
|
|
||||||
auto streams = processor->getTargetStreams();
|
|
||||||
if (output != nullptr) {
|
|
||||||
int num = 0;
|
|
||||||
for (auto it : streams) {
|
|
||||||
if (it.stream == stream) {
|
|
||||||
if (num == 1) {
|
|
||||||
return obj_data_first(output);
|
|
||||||
} else {
|
|
||||||
return obj_data_second(output);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
num++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
VLOG(2) << "Rectify not ready now";
|
|
||||||
} else {
|
|
||||||
LOG(ERROR) << "error: invalid sum!";
|
|
||||||
}
|
|
||||||
return {}; // frame.empty() == true
|
|
||||||
} else {
|
|
||||||
LOG(ERROR) << "Failed to get stream data of " << stream
|
|
||||||
<< ", unsupported or disabled";
|
|
||||||
return {}; // frame.empty() == true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
std::vector<api::StreamData> Synthetic::GetStreamDatas(const Stream &stream) {
|
std::vector<api::StreamData> Synthetic::GetStreamDatas(const Stream &stream) {
|
||||||
auto &&mode = GetStreamEnabledMode(stream);
|
return getProcessorWithStream(stream)->GetStreamDatas(stream);
|
||||||
if (mode == MODE_NATIVE) {
|
|
||||||
auto &&device = api_->device();
|
|
||||||
std::vector<api::StreamData> datas;
|
|
||||||
for (auto &&data : device->GetStreamDatas(stream)) {
|
|
||||||
datas.push_back(data2api(data));
|
|
||||||
}
|
|
||||||
return datas;
|
|
||||||
} else if (mode == MODE_SYNTHETIC) {
|
|
||||||
return {GetStreamData(stream)};
|
|
||||||
} else {
|
|
||||||
LOG(ERROR) << "Failed to get stream data of " << stream
|
|
||||||
<< ", unsupported or disabled";
|
|
||||||
}
|
|
||||||
return {};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::SetPlugin(std::shared_ptr<Plugin> plugin) {
|
void Synthetic::SetPlugin(std::shared_ptr<Plugin> plugin) {
|
||||||
|
@ -460,53 +300,12 @@ bool Synthetic::HasPlugin() const {
|
||||||
return plugin_ != nullptr;
|
return plugin_ != nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::InitStreamSupports() {
|
|
||||||
auto &&device = api_->device();
|
|
||||||
if (device->Supports(Stream::LEFT) && device->Supports(Stream::RIGHT)) {
|
|
||||||
auto processor = getProcessorWithStream(Stream::LEFT);
|
|
||||||
for (unsigned int i = 0; i< processor->target_streams_.size(); i++) {
|
|
||||||
if (processor->target_streams_[i].stream == Stream::LEFT) {
|
|
||||||
processor->target_streams_[i].support_mode_ = MODE_NATIVE;
|
|
||||||
}
|
|
||||||
if (processor->target_streams_[i].stream == Stream::RIGHT) {
|
|
||||||
processor->target_streams_[i].support_mode_ = MODE_NATIVE;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
std::vector<Stream> stream_chain{
|
|
||||||
Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED,
|
|
||||||
Stream::DISPARITY, Stream::DISPARITY_NORMALIZED,
|
|
||||||
Stream::POINTS, Stream::DEPTH};
|
|
||||||
for (auto &&stream : stream_chain) {
|
|
||||||
auto processor = getProcessorWithStream(stream);
|
|
||||||
for (unsigned int i = 0; i< processor->target_streams_.size(); i++) {
|
|
||||||
if (processor->target_streams_[i].stream == stream) {
|
|
||||||
if (device->Supports(stream)) {
|
|
||||||
processor->target_streams_[i].support_mode_ = MODE_NATIVE;
|
|
||||||
processor->target_streams_[i].enabled_mode_ = MODE_NATIVE;
|
|
||||||
} else {
|
|
||||||
processor->target_streams_[i].support_mode_ = MODE_SYNTHETIC;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Synthetic::mode_t Synthetic::GetStreamEnabledMode(const Stream &stream) const {
|
Synthetic::mode_t Synthetic::GetStreamEnabledMode(const Stream &stream) const {
|
||||||
if (checkControlDateWithStream(stream)) {
|
if (checkControlDateWithStream(stream)) {
|
||||||
auto data = getControlDateWithStream(stream);
|
auto data = getControlDateWithStream(stream);
|
||||||
return data.enabled_mode_;
|
return data.enabled_mode_;
|
||||||
}
|
}
|
||||||
return MODE_LAST;
|
return MODE_OFF;
|
||||||
}
|
|
||||||
|
|
||||||
bool Synthetic::IsStreamEnabledNative(const Stream &stream) const {
|
|
||||||
return GetStreamEnabledMode(stream) == MODE_NATIVE;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool Synthetic::IsStreamEnabledSynthetic(const Stream &stream) const {
|
|
||||||
return GetStreamEnabledMode(stream) == MODE_SYNTHETIC;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::InitProcessors() {
|
void Synthetic::InitProcessors() {
|
||||||
|
@ -522,7 +321,7 @@ void Synthetic::InitProcessors() {
|
||||||
DISPARITY_NORM_PROC_PERIOD);
|
DISPARITY_NORM_PROC_PERIOD);
|
||||||
|
|
||||||
auto root_processor =
|
auto root_processor =
|
||||||
std::make_shared<RootProcessor>(ROOT_PROC_PERIOD);
|
std::make_shared<s1s2Processor>(api_->device(), ROOT_PROC_PERIOD);
|
||||||
|
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||||
// PINHOLE
|
// PINHOLE
|
||||||
|
@ -566,22 +365,22 @@ void Synthetic::InitProcessors() {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
root_processor->addTargetStreams(
|
||||||
|
{Stream::LEFT, Mode::MODE_OFF, nullptr});
|
||||||
|
root_processor->addTargetStreams(
|
||||||
|
{Stream::RIGHT, Mode::MODE_OFF, nullptr});
|
||||||
rectify_processor->addTargetStreams(
|
rectify_processor->addTargetStreams(
|
||||||
{Stream::LEFT_RECTIFIED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
{Stream::LEFT_RECTIFIED, Mode::MODE_OFF, nullptr});
|
||||||
rectify_processor->addTargetStreams(
|
rectify_processor->addTargetStreams(
|
||||||
{Stream::RIGHT_RECTIFIED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
{Stream::RIGHT_RECTIFIED, Mode::MODE_OFF, nullptr});
|
||||||
disparity_processor->addTargetStreams(
|
disparity_processor->addTargetStreams(
|
||||||
{Stream::DISPARITY, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
{Stream::DISPARITY, Mode::MODE_OFF, nullptr});
|
||||||
disparitynormalized_processor->addTargetStreams(
|
disparitynormalized_processor->addTargetStreams(
|
||||||
{Stream::DISPARITY_NORMALIZED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
{Stream::DISPARITY_NORMALIZED, Mode::MODE_OFF, nullptr});
|
||||||
points_processor->addTargetStreams(
|
points_processor->addTargetStreams(
|
||||||
{Stream::POINTS, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
{Stream::POINTS, Mode::MODE_OFF, nullptr});
|
||||||
depth_processor->addTargetStreams(
|
depth_processor->addTargetStreams(
|
||||||
{Stream::DEPTH, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
{Stream::DEPTH, Mode::MODE_OFF, nullptr});
|
||||||
root_processor->addTargetStreams(
|
|
||||||
{Stream::LEFT, Mode::MODE_NATIVE, Mode::MODE_NATIVE, nullptr});
|
|
||||||
root_processor->addTargetStreams(
|
|
||||||
{Stream::RIGHT, Mode::MODE_NATIVE, Mode::MODE_NATIVE, nullptr});
|
|
||||||
|
|
||||||
processors_.push_back(root_processor);
|
processors_.push_back(root_processor);
|
||||||
processors_.push_back(rectify_processor);
|
processors_.push_back(rectify_processor);
|
||||||
|
@ -590,6 +389,8 @@ void Synthetic::InitProcessors() {
|
||||||
processors_.push_back(points_processor);
|
processors_.push_back(points_processor);
|
||||||
processors_.push_back(depth_processor);
|
processors_.push_back(depth_processor);
|
||||||
using namespace std::placeholders; // NOLINT
|
using namespace std::placeholders; // NOLINT
|
||||||
|
root_processor->SetProcessCallback(
|
||||||
|
std::bind(&Synthetic::OnDeviceProcess, this, _1, _2, _3));
|
||||||
rectify_processor->SetProcessCallback(
|
rectify_processor->SetProcessCallback(
|
||||||
std::bind(&Synthetic::OnRectifyProcess, this, _1, _2, _3));
|
std::bind(&Synthetic::OnRectifyProcess, this, _1, _2, _3));
|
||||||
disparity_processor->SetProcessCallback(
|
disparity_processor->SetProcessCallback(
|
||||||
|
@ -601,6 +402,8 @@ void Synthetic::InitProcessors() {
|
||||||
depth_processor->SetProcessCallback(
|
depth_processor->SetProcessCallback(
|
||||||
std::bind(&Synthetic::OnDepthProcess, this, _1, _2, _3));
|
std::bind(&Synthetic::OnDepthProcess, this, _1, _2, _3));
|
||||||
|
|
||||||
|
root_processor->SetPostProcessCallback(
|
||||||
|
std::bind(&Synthetic::OnDevicePostProcess, this, _1));
|
||||||
rectify_processor->SetPostProcessCallback(
|
rectify_processor->SetPostProcessCallback(
|
||||||
std::bind(&Synthetic::OnRectifyPostProcess, this, _1));
|
std::bind(&Synthetic::OnRectifyPostProcess, this, _1));
|
||||||
disparity_processor->SetPostProcessCallback(
|
disparity_processor->SetPostProcessCallback(
|
||||||
|
@ -615,100 +418,12 @@ void Synthetic::InitProcessors() {
|
||||||
processor_ = root_processor;
|
processor_ = root_processor;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::ProcessNativeStream(
|
bool Synthetic::OnDeviceProcess(
|
||||||
const Stream &stream, const api::StreamData &data) {
|
Object *const in, Object *const out,
|
||||||
NotifyStreamData(stream, data);
|
std::shared_ptr<Processor> const parent) {
|
||||||
if (stream == Stream::LEFT || stream == Stream::RIGHT) {
|
MYNTEYE_UNUSED(parent)
|
||||||
std::unique_lock<std::mutex> lk(mtx_left_right_ready_);
|
return GetStreamEnabledMode(Stream::LEFT) != MODE_ON
|
||||||
static api::StreamData left_data, right_data;
|
|| GetStreamEnabledMode(Stream::RIGHT) != MODE_ON;
|
||||||
if (stream == Stream::LEFT) {
|
|
||||||
left_data = data;
|
|
||||||
} else if (stream == Stream::RIGHT) {
|
|
||||||
right_data = data;
|
|
||||||
}
|
|
||||||
if (left_data.img && right_data.img &&
|
|
||||||
left_data.img->frame_id == right_data.img->frame_id) {
|
|
||||||
std::shared_ptr<Processor> processor = nullptr;
|
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
|
||||||
processor = find_processor<RectifyProcessorOCV>(processor_);
|
|
||||||
#ifdef WITH_CAM_MODELS
|
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
|
||||||
processor = find_processor<RectifyProcessor>(processor_);
|
|
||||||
#endif
|
|
||||||
} else {
|
|
||||||
LOG(ERROR) << "Unknow calib model type in device: "
|
|
||||||
<< calib_model_ << ", use default pinhole model";
|
|
||||||
processor = find_processor<RectifyProcessorOCV>(processor_);
|
|
||||||
}
|
|
||||||
processor->Process(data_obj(left_data, right_data));
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (stream == Stream::LEFT_RECTIFIED || stream == Stream::RIGHT_RECTIFIED) {
|
|
||||||
static api::StreamData left_rect_data, right_rect_data;
|
|
||||||
if (stream == Stream::LEFT_RECTIFIED) {
|
|
||||||
left_rect_data = data;
|
|
||||||
} else if (stream == Stream::RIGHT_RECTIFIED) {
|
|
||||||
right_rect_data = data;
|
|
||||||
}
|
|
||||||
if (left_rect_data.img && right_rect_data.img &&
|
|
||||||
left_rect_data.img->frame_id == right_rect_data.img->frame_id) {
|
|
||||||
std::string name = RectifyProcessorOCV::NAME;
|
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
|
||||||
name = RectifyProcessorOCV::NAME;
|
|
||||||
#ifdef WITH_CAM_MODELS
|
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
|
||||||
name = RectifyProcessor::NAME;
|
|
||||||
#endif
|
|
||||||
}
|
|
||||||
process_childs(processor_, name,
|
|
||||||
data_obj(left_rect_data, right_rect_data));
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (stream) {
|
|
||||||
case Stream::DISPARITY: {
|
|
||||||
process_childs(processor_, DisparityProcessor::NAME, data_obj(data));
|
|
||||||
} break;
|
|
||||||
case Stream::DISPARITY_NORMALIZED: {
|
|
||||||
process_childs(processor_, DisparityNormalizedProcessor::NAME,
|
|
||||||
data_obj(data));
|
|
||||||
} break;
|
|
||||||
case Stream::POINTS: {
|
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
|
||||||
// PINHOLE
|
|
||||||
process_childs(processor_, PointsProcessorOCV::NAME, data_obj(data));
|
|
||||||
#ifdef WITH_CAM_MODELS
|
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
|
||||||
// KANNALA_BRANDT
|
|
||||||
process_childs(processor_, PointsProcessor::NAME, data_obj(data));
|
|
||||||
#endif
|
|
||||||
} else {
|
|
||||||
// UNKNOW
|
|
||||||
LOG(ERROR) << "Unknow calib model type in device: "
|
|
||||||
<< calib_model_;
|
|
||||||
}
|
|
||||||
} break;
|
|
||||||
case Stream::DEPTH: {
|
|
||||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
|
||||||
// PINHOLE
|
|
||||||
process_childs(processor_, DepthProcessorOCV::NAME, data_obj(data));
|
|
||||||
#ifdef WITH_CAM_MODELS
|
|
||||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
|
||||||
// KANNALA_BRANDT
|
|
||||||
process_childs(processor_, DepthProcessor::NAME, data_obj(data));
|
|
||||||
#endif
|
|
||||||
} else {
|
|
||||||
// UNKNOW
|
|
||||||
LOG(ERROR) << "Unknow calib model type in device: "
|
|
||||||
<< calib_model_;
|
|
||||||
}
|
|
||||||
} break;
|
|
||||||
default:
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Synthetic::OnRectifyProcess(
|
bool Synthetic::OnRectifyProcess(
|
||||||
|
@ -718,8 +433,8 @@ bool Synthetic::OnRectifyProcess(
|
||||||
if (plugin_ && plugin_->OnRectifyProcess(in, out)) {
|
if (plugin_ && plugin_->OnRectifyProcess(in, out)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_SYNTHETIC;
|
return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_ON
|
||||||
// && GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_SYNTHETIC
|
&& GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_ON;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Synthetic::OnDisparityProcess(
|
bool Synthetic::OnDisparityProcess(
|
||||||
|
@ -729,7 +444,7 @@ bool Synthetic::OnDisparityProcess(
|
||||||
if (plugin_ && plugin_->OnDisparityProcess(in, out)) {
|
if (plugin_ && plugin_->OnDisparityProcess(in, out)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return GetStreamEnabledMode(Stream::DISPARITY) != MODE_SYNTHETIC;
|
return GetStreamEnabledMode(Stream::DISPARITY) != MODE_ON;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Synthetic::OnDisparityNormalizedProcess(
|
bool Synthetic::OnDisparityNormalizedProcess(
|
||||||
|
@ -739,7 +454,7 @@ bool Synthetic::OnDisparityNormalizedProcess(
|
||||||
if (plugin_ && plugin_->OnDisparityNormalizedProcess(in, out)) {
|
if (plugin_ && plugin_->OnDisparityNormalizedProcess(in, out)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return GetStreamEnabledMode(Stream::DISPARITY_NORMALIZED) != MODE_SYNTHETIC;
|
return GetStreamEnabledMode(Stream::DISPARITY_NORMALIZED) != MODE_ON;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Synthetic::OnPointsProcess(
|
bool Synthetic::OnPointsProcess(
|
||||||
|
@ -749,7 +464,7 @@ bool Synthetic::OnPointsProcess(
|
||||||
if (plugin_ && plugin_->OnPointsProcess(in, out)) {
|
if (plugin_ && plugin_->OnPointsProcess(in, out)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return GetStreamEnabledMode(Stream::POINTS) != MODE_SYNTHETIC;
|
return GetStreamEnabledMode(Stream::POINTS) != MODE_ON;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Synthetic::OnDepthProcess(
|
bool Synthetic::OnDepthProcess(
|
||||||
|
@ -759,7 +474,22 @@ bool Synthetic::OnDepthProcess(
|
||||||
if (plugin_ && plugin_->OnDepthProcess(in, out)) {
|
if (plugin_ && plugin_->OnDepthProcess(in, out)) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return GetStreamEnabledMode(Stream::DEPTH) != MODE_SYNTHETIC;
|
return GetStreamEnabledMode(Stream::DEPTH) != MODE_ON;
|
||||||
|
}
|
||||||
|
|
||||||
|
void Synthetic::OnDevicePostProcess(Object *const out) {
|
||||||
|
const ObjMat2 *output = Object::Cast<ObjMat2>(out);
|
||||||
|
NotifyStreamData(Stream::LEFT, obj_data_first(output));
|
||||||
|
NotifyStreamData(Stream::RIGHT, obj_data_second(output));
|
||||||
|
if (HasStreamCallback(Stream::LEFT)) {
|
||||||
|
auto data = getControlDateWithStream(Stream::LEFT);
|
||||||
|
data.stream_callback(obj_data_first(output));
|
||||||
|
}
|
||||||
|
if (HasStreamCallback(Stream::RIGHT)) {
|
||||||
|
auto data = getControlDateWithStream(Stream::RIGHT);
|
||||||
|
if (data.stream_callback)
|
||||||
|
data.stream_callback(obj_data_second(output));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Synthetic::OnRectifyPostProcess(Object *const out) {
|
void Synthetic::OnRectifyPostProcess(Object *const out) {
|
||||||
|
|
|
@ -29,6 +29,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||||
class API;
|
class API;
|
||||||
class Plugin;
|
class Plugin;
|
||||||
class Processor;
|
class Processor;
|
||||||
|
class RootProcessor;
|
||||||
|
|
||||||
struct Object;
|
struct Object;
|
||||||
|
|
||||||
|
@ -40,14 +41,12 @@ class Synthetic {
|
||||||
using stream_switch_callback_t = API::stream_switch_callback_t;
|
using stream_switch_callback_t = API::stream_switch_callback_t;
|
||||||
|
|
||||||
typedef enum Mode {
|
typedef enum Mode {
|
||||||
MODE_NATIVE, // Native stream
|
MODE_ON, // On
|
||||||
MODE_SYNTHETIC, // Synthetic stream
|
MODE_OFF // Off
|
||||||
MODE_LAST // Unsupported
|
|
||||||
} mode_t;
|
} mode_t;
|
||||||
|
|
||||||
struct stream_control_t {
|
struct stream_control_t {
|
||||||
Stream stream;
|
Stream stream;
|
||||||
mode_t support_mode_;
|
|
||||||
mode_t enabled_mode_;
|
mode_t enabled_mode_;
|
||||||
stream_callback_t stream_callback;
|
stream_callback_t stream_callback;
|
||||||
};
|
};
|
||||||
|
@ -60,7 +59,6 @@ class Synthetic {
|
||||||
void NotifyImageParamsChanged();
|
void NotifyImageParamsChanged();
|
||||||
|
|
||||||
bool Supports(const Stream &stream) const;
|
bool Supports(const Stream &stream) const;
|
||||||
mode_t SupportsMode(const Stream &stream) const;
|
|
||||||
|
|
||||||
void EnableStreamData(const Stream &stream);
|
void EnableStreamData(const Stream &stream);
|
||||||
void DisableStreamData(const Stream &stream);
|
void DisableStreamData(const Stream &stream);
|
||||||
|
@ -96,11 +94,8 @@ class Synthetic {
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void InitCalibInfo();
|
void InitCalibInfo();
|
||||||
void InitStreamSupports();
|
|
||||||
|
|
||||||
mode_t GetStreamEnabledMode(const Stream &stream) const;
|
mode_t GetStreamEnabledMode(const Stream &stream) const;
|
||||||
bool IsStreamEnabledNative(const Stream &stream) const;
|
|
||||||
bool IsStreamEnabledSynthetic(const Stream &stream) const;
|
|
||||||
|
|
||||||
void EnableStreamData(const Stream &stream, std::uint32_t depth);
|
void EnableStreamData(const Stream &stream, std::uint32_t depth);
|
||||||
void DisableStreamData(const Stream &stream, std::uint32_t depth);
|
void DisableStreamData(const Stream &stream, std::uint32_t depth);
|
||||||
|
@ -112,8 +107,9 @@ class Synthetic {
|
||||||
template <class T>
|
template <class T>
|
||||||
bool DeactivateProcessor(bool tree = false);
|
bool DeactivateProcessor(bool tree = false);
|
||||||
|
|
||||||
void ProcessNativeStream(const Stream &stream, const api::StreamData &data);
|
bool OnDeviceProcess(
|
||||||
|
Object *const in, Object *const out,
|
||||||
|
std::shared_ptr<Processor> const parent);
|
||||||
bool OnRectifyProcess(
|
bool OnRectifyProcess(
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
std::shared_ptr<Processor> const parent);
|
std::shared_ptr<Processor> const parent);
|
||||||
|
@ -130,6 +126,7 @@ class Synthetic {
|
||||||
Object *const in, Object *const out,
|
Object *const in, Object *const out,
|
||||||
std::shared_ptr<Processor> const parent);
|
std::shared_ptr<Processor> const parent);
|
||||||
|
|
||||||
|
void OnDevicePostProcess(Object *const out);
|
||||||
void OnRectifyPostProcess(Object *const out);
|
void OnRectifyPostProcess(Object *const out);
|
||||||
void OnDisparityPostProcess(Object *const out);
|
void OnDisparityPostProcess(Object *const out);
|
||||||
void OnDisparityNormalizedPostProcess(Object *const out);
|
void OnDisparityNormalizedPostProcess(Object *const out);
|
||||||
|
@ -140,25 +137,22 @@ class Synthetic {
|
||||||
|
|
||||||
API *api_;
|
API *api_;
|
||||||
|
|
||||||
std::shared_ptr<Processor> processor_;
|
std::shared_ptr<RootProcessor> processor_;
|
||||||
|
std::vector<std::shared_ptr<Processor>> processors_;
|
||||||
std::shared_ptr<Plugin> plugin_;
|
std::shared_ptr<Plugin> plugin_;
|
||||||
|
|
||||||
CalibrationModel calib_model_;
|
CalibrationModel calib_model_;
|
||||||
std::mutex mtx_left_right_ready_;
|
|
||||||
|
|
||||||
std::shared_ptr<IntrinsicsBase> intr_left_;
|
std::shared_ptr<IntrinsicsBase> intr_left_;
|
||||||
std::shared_ptr<IntrinsicsBase> intr_right_;
|
std::shared_ptr<IntrinsicsBase> intr_right_;
|
||||||
std::shared_ptr<Extrinsics> extr_;
|
std::shared_ptr<Extrinsics> extr_;
|
||||||
bool calib_default_tag_;
|
bool calib_default_tag_;
|
||||||
|
|
||||||
std::vector<std::shared_ptr<Processor>> processors_;
|
|
||||||
|
|
||||||
stream_data_listener_t stream_data_listener_;
|
stream_data_listener_t stream_data_listener_;
|
||||||
};
|
};
|
||||||
|
|
||||||
class SyntheticProcessorPart {
|
class SyntheticProcessorPart {
|
||||||
private:
|
protected:
|
||||||
inline std::vector<Synthetic::stream_control_t> getTargetStreams() {
|
inline std::vector<Synthetic::stream_control_t> getTargetStreams() {
|
||||||
return target_streams_;
|
return target_streams_;
|
||||||
}
|
}
|
||||||
|
|
|
@ -113,7 +113,7 @@ STATUS_UNIT checkUnit(const std::string& sdkv,
|
||||||
}
|
}
|
||||||
|
|
||||||
bool checkFirmwareVersion(const std::shared_ptr<API> api) {
|
bool checkFirmwareVersion(const std::shared_ptr<API> api) {
|
||||||
auto sdkv = api->GetInfo(Info::SDK_VERSION);
|
auto sdkv = api->GetSDKVersion();
|
||||||
auto devn = api->GetInfo(Info::DEVICE_NAME);
|
auto devn = api->GetInfo(Info::DEVICE_NAME);
|
||||||
auto firmv = api->GetInfo(Info::FIRMWARE_VERSION);
|
auto firmv = api->GetInfo(Info::FIRMWARE_VERSION);
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,6 @@
|
||||||
#include "mynteye/device/motions.h"
|
#include "mynteye/device/motions.h"
|
||||||
#include "mynteye/device/standard/device_s.h"
|
#include "mynteye/device/standard/device_s.h"
|
||||||
#include "mynteye/device/standard2/device_s2.h"
|
#include "mynteye/device/standard2/device_s2.h"
|
||||||
#include "mynteye/device/standard2/device_s210a.h"
|
|
||||||
#include "mynteye/device/streams.h"
|
#include "mynteye/device/streams.h"
|
||||||
#include "mynteye/device/types.h"
|
#include "mynteye/device/types.h"
|
||||||
#include "mynteye/util/strings.h"
|
#include "mynteye/util/strings.h"
|
||||||
|
@ -104,7 +103,6 @@ std::shared_ptr<Device> Device::Create(
|
||||||
if (name == "MYNTEYE") {
|
if (name == "MYNTEYE") {
|
||||||
return std::make_shared<StandardDevice>(device);
|
return std::make_shared<StandardDevice>(device);
|
||||||
} else if (strings::starts_with(name, "MYNT-EYE-")) {
|
} else if (strings::starts_with(name, "MYNT-EYE-")) {
|
||||||
// TODO(JohnZhao): Create different device by name, such as MYNT-EYE-S1000
|
|
||||||
std::string model_s = name.substr(9, 5);
|
std::string model_s = name.substr(9, 5);
|
||||||
VLOG(2) << "MYNE EYE Model: " << model_s;
|
VLOG(2) << "MYNE EYE Model: " << model_s;
|
||||||
DeviceModel model(model_s);
|
DeviceModel model(model_s);
|
||||||
|
@ -113,9 +111,9 @@ std::shared_ptr<Device> Device::Create(
|
||||||
return std::make_shared<StandardDevice>(device);
|
return std::make_shared<StandardDevice>(device);
|
||||||
} else if (model.generation == '2') {
|
} else if (model.generation == '2') {
|
||||||
if (model.custom_code == '0') {
|
if (model.custom_code == '0') {
|
||||||
return std::make_shared<Standard2Device>(device);
|
return std::make_shared<Standard2Device>(Model::STANDARD2, device);
|
||||||
} else if (model.custom_code == 'A') {
|
} else if (model.custom_code == 'A') {
|
||||||
return std::make_shared<Standard210aDevice>(device);
|
return std::make_shared<Standard2Device>(Model::STANDARD210A, device);
|
||||||
} else {
|
} else {
|
||||||
LOG(FATAL) << "No such custom code now";
|
LOG(FATAL) << "No such custom code now";
|
||||||
}
|
}
|
||||||
|
|
|
@ -90,8 +90,8 @@ void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
Standard2ChannelsAdapter::Standard2ChannelsAdapter()
|
Standard2ChannelsAdapter::Standard2ChannelsAdapter(const Model &model)
|
||||||
: ChannelsAdapter(Model::STANDARD2) {
|
: ChannelsAdapter(model) {
|
||||||
}
|
}
|
||||||
|
|
||||||
Standard2ChannelsAdapter::~Standard2ChannelsAdapter() {
|
Standard2ChannelsAdapter::~Standard2ChannelsAdapter() {
|
||||||
|
|
|
@ -25,7 +25,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
class Standard2ChannelsAdapter : public ChannelsAdapter {
|
class Standard2ChannelsAdapter : public ChannelsAdapter {
|
||||||
public:
|
public:
|
||||||
Standard2ChannelsAdapter();
|
explicit Standard2ChannelsAdapter(const Model &model);
|
||||||
virtual ~Standard2ChannelsAdapter();
|
virtual ~Standard2ChannelsAdapter();
|
||||||
|
|
||||||
std::int32_t GetAccelRangeDefault() override;
|
std::int32_t GetAccelRangeDefault() override;
|
||||||
|
|
|
@ -1,121 +0,0 @@
|
||||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
#include "mynteye/device/standard2/channels_adapter_s210a.h"
|
|
||||||
|
|
||||||
#include "mynteye/logger.h"
|
|
||||||
|
|
||||||
MYNTEYE_BEGIN_NAMESPACE
|
|
||||||
|
|
||||||
namespace {
|
|
||||||
|
|
||||||
#pragma pack(push, 1)
|
|
||||||
struct ImuData {
|
|
||||||
std::uint32_t frame_id;
|
|
||||||
std::uint64_t timestamp;
|
|
||||||
std::uint8_t flag;
|
|
||||||
std::int16_t temperature;
|
|
||||||
std::int16_t accel_or_gyro[3];
|
|
||||||
|
|
||||||
ImuData() = default;
|
|
||||||
explicit ImuData(const std::uint8_t *data) {
|
|
||||||
from_data(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
void from_data(const std::uint8_t *data) {
|
|
||||||
std::uint32_t timestamp_l;
|
|
||||||
std::uint32_t timestamp_h;
|
|
||||||
|
|
||||||
frame_id = (*(data) << 24) | (*(data + 1) << 16) | (*(data + 2) << 8) |
|
|
||||||
*(data + 3);
|
|
||||||
timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) |
|
|
||||||
(*(data + 6) << 8) | *(data + 7);
|
|
||||||
timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) |
|
|
||||||
(*(data + 10) << 8) | *(data + 11);
|
|
||||||
timestamp = (static_cast<std::uint64_t>(timestamp_h) << 32) | timestamp_l;
|
|
||||||
flag = *(data + 12);
|
|
||||||
temperature = (*(data + 13) << 8) | *(data + 14);
|
|
||||||
accel_or_gyro[0] = (*(data + 15) << 8) | *(data + 16);
|
|
||||||
accel_or_gyro[1] = (*(data + 17) << 8) | *(data + 18);
|
|
||||||
accel_or_gyro[2] = (*(data + 19) << 8) | *(data + 20);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
#pragma pack(pop)
|
|
||||||
|
|
||||||
void unpack_imu_segment(const ImuData &imu, ImuSegment *seg) {
|
|
||||||
seg->frame_id = imu.frame_id;
|
|
||||||
seg->timestamp = imu.timestamp;
|
|
||||||
seg->flag = imu.flag;
|
|
||||||
seg->temperature = imu.temperature;
|
|
||||||
seg->accel[0] = (seg->flag == 1) ? imu.accel_or_gyro[0] : 0;
|
|
||||||
seg->accel[1] = (seg->flag == 1) ? imu.accel_or_gyro[1] : 0;
|
|
||||||
seg->accel[2] = (seg->flag == 1) ? imu.accel_or_gyro[2] : 0;
|
|
||||||
seg->gyro[0] = (seg->flag == 2) ? imu.accel_or_gyro[0] : 0;
|
|
||||||
seg->gyro[1] = (seg->flag == 2) ? imu.accel_or_gyro[1] : 0;
|
|
||||||
seg->gyro[2] = (seg->flag == 2) ? imu.accel_or_gyro[2] : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
void unpack_imu_packet(const std::uint8_t *data, ImuPacket *pkg) {
|
|
||||||
std::size_t data_n = sizeof(ImuData); // 21
|
|
||||||
for (std::size_t i = 0; i < pkg->count; i++) {
|
|
||||||
ImuSegment seg;
|
|
||||||
unpack_imu_segment(ImuData(data + data_n * i), &seg);
|
|
||||||
pkg->segments.push_back(seg);
|
|
||||||
}
|
|
||||||
pkg->serial_number = pkg->segments.back().frame_id;
|
|
||||||
}
|
|
||||||
|
|
||||||
void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
|
|
||||||
res->header = *data;
|
|
||||||
res->state = *(data + 1);
|
|
||||||
res->size = (*(data + 2) << 8) | *(data + 3);
|
|
||||||
|
|
||||||
std::size_t data_n = sizeof(ImuData); // 21
|
|
||||||
ImuPacket packet;
|
|
||||||
packet.count = res->size / data_n;
|
|
||||||
unpack_imu_packet(data + 4, &packet);
|
|
||||||
res->packets.push_back(packet);
|
|
||||||
res->checksum = *(data + 4 + res->size);
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace
|
|
||||||
|
|
||||||
Standard210aChannelsAdapter::Standard210aChannelsAdapter()
|
|
||||||
: ChannelsAdapter(Model::STANDARD210A) {
|
|
||||||
}
|
|
||||||
|
|
||||||
Standard210aChannelsAdapter::~Standard210aChannelsAdapter() {
|
|
||||||
}
|
|
||||||
|
|
||||||
std::int32_t Standard210aChannelsAdapter::GetAccelRangeDefault() {
|
|
||||||
return 12;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::vector<std::int32_t> Standard210aChannelsAdapter::GetAccelRangeValues() {
|
|
||||||
return {6, 12, 24, 48};
|
|
||||||
}
|
|
||||||
|
|
||||||
std::int32_t Standard210aChannelsAdapter::GetGyroRangeDefault() {
|
|
||||||
return 1000;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::vector<std::int32_t> Standard210aChannelsAdapter::GetGyroRangeValues() {
|
|
||||||
return {250, 500, 1000, 2000, 4000};
|
|
||||||
}
|
|
||||||
|
|
||||||
void Standard210aChannelsAdapter::GetImuResPacket(
|
|
||||||
const std::uint8_t *data, ImuResPacket *res) {
|
|
||||||
unpack_imu_res_packet(data, res);
|
|
||||||
}
|
|
||||||
|
|
||||||
MYNTEYE_END_NAMESPACE
|
|
|
@ -1,42 +0,0 @@
|
||||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
#ifndef MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_
|
|
||||||
#define MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_
|
|
||||||
#pragma once
|
|
||||||
|
|
||||||
#include <cstdint>
|
|
||||||
#include <set>
|
|
||||||
#include <vector>
|
|
||||||
|
|
||||||
#include "mynteye/device/channel/channels.h"
|
|
||||||
|
|
||||||
MYNTEYE_BEGIN_NAMESPACE
|
|
||||||
|
|
||||||
class Standard210aChannelsAdapter : public ChannelsAdapter {
|
|
||||||
public:
|
|
||||||
Standard210aChannelsAdapter();
|
|
||||||
virtual ~Standard210aChannelsAdapter();
|
|
||||||
|
|
||||||
std::int32_t GetAccelRangeDefault() override;
|
|
||||||
std::vector<std::int32_t> GetAccelRangeValues() override;
|
|
||||||
|
|
||||||
std::int32_t GetGyroRangeDefault() override;
|
|
||||||
std::vector<std::int32_t> GetGyroRangeValues() override;
|
|
||||||
|
|
||||||
void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) override;
|
|
||||||
};
|
|
||||||
|
|
||||||
MYNTEYE_END_NAMESPACE
|
|
||||||
|
|
||||||
#endif // MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_
|
|
|
@ -20,11 +20,13 @@
|
||||||
|
|
||||||
MYNTEYE_BEGIN_NAMESPACE
|
MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
Standard2Device::Standard2Device(std::shared_ptr<uvc::device> device)
|
Standard2Device::Standard2Device(const Model &model,
|
||||||
: Device(Model::STANDARD2, device,
|
std::shared_ptr<uvc::device> device)
|
||||||
std::make_shared<Standard2StreamsAdapter>(),
|
: Device(model, device,
|
||||||
std::make_shared<Standard2ChannelsAdapter>()) {
|
std::make_shared<Standard2StreamsAdapter>(model),
|
||||||
|
std::make_shared<Standard2ChannelsAdapter>(model)) {
|
||||||
VLOG(2) << __func__;
|
VLOG(2) << __func__;
|
||||||
|
CHECK(model == Model::STANDARD2 || model == Model::STANDARD210A);
|
||||||
}
|
}
|
||||||
|
|
||||||
Standard2Device::~Standard2Device() {
|
Standard2Device::~Standard2Device() {
|
||||||
|
|
|
@ -24,7 +24,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
class Standard2Device : public Device {
|
class Standard2Device : public Device {
|
||||||
public:
|
public:
|
||||||
explicit Standard2Device(std::shared_ptr<uvc::device> device);
|
Standard2Device(const Model &model, std::shared_ptr<uvc::device> device);
|
||||||
virtual ~Standard2Device();
|
virtual ~Standard2Device();
|
||||||
|
|
||||||
Capabilities GetKeyStreamCapability() const override;
|
Capabilities GetKeyStreamCapability() const override;
|
||||||
|
|
|
@ -1,45 +0,0 @@
|
||||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
#include "mynteye/device/standard2/device_s210a.h"
|
|
||||||
|
|
||||||
#include "mynteye/logger.h"
|
|
||||||
#include "mynteye/device/motions.h"
|
|
||||||
#include "mynteye/device/standard2/channels_adapter_s210a.h"
|
|
||||||
#include "mynteye/device/standard2/streams_adapter_s210a.h"
|
|
||||||
|
|
||||||
MYNTEYE_BEGIN_NAMESPACE
|
|
||||||
|
|
||||||
Standard210aDevice::Standard210aDevice(std::shared_ptr<uvc::device> device)
|
|
||||||
: Device(Model::STANDARD210A, device,
|
|
||||||
std::make_shared<Standard210aStreamsAdapter>(),
|
|
||||||
std::make_shared<Standard210aChannelsAdapter>()) {
|
|
||||||
VLOG(2) << __func__;
|
|
||||||
}
|
|
||||||
|
|
||||||
Standard210aDevice::~Standard210aDevice() {
|
|
||||||
VLOG(2) << __func__;
|
|
||||||
}
|
|
||||||
|
|
||||||
Capabilities Standard210aDevice::GetKeyStreamCapability() const {
|
|
||||||
return Capabilities::STEREO_COLOR;
|
|
||||||
}
|
|
||||||
|
|
||||||
void Standard210aDevice::OnStereoStreamUpdate() {
|
|
||||||
if (motion_tracking_) {
|
|
||||||
auto &&motions = this->motions();
|
|
||||||
motions->DoMotionTrack();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
MYNTEYE_END_NAMESPACE
|
|
|
@ -1,37 +0,0 @@
|
||||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
#ifndef MYNTEYE_DEVICE_STANDARD2_DEVICE_S210A_H_
|
|
||||||
#define MYNTEYE_DEVICE_STANDARD2_DEVICE_S210A_H_
|
|
||||||
#pragma once
|
|
||||||
|
|
||||||
#include <memory>
|
|
||||||
#include <vector>
|
|
||||||
|
|
||||||
#include "mynteye/device/device.h"
|
|
||||||
|
|
||||||
MYNTEYE_BEGIN_NAMESPACE
|
|
||||||
|
|
||||||
class Standard210aDevice : public Device {
|
|
||||||
public:
|
|
||||||
explicit Standard210aDevice(std::shared_ptr<uvc::device> device);
|
|
||||||
virtual ~Standard210aDevice();
|
|
||||||
|
|
||||||
Capabilities GetKeyStreamCapability() const override;
|
|
||||||
|
|
||||||
void OnStereoStreamUpdate() override;
|
|
||||||
};
|
|
||||||
|
|
||||||
MYNTEYE_END_NAMESPACE
|
|
||||||
|
|
||||||
#endif // MYNTEYE_DEVICE_STANDARD2_DEVICE_S210A_H_
|
|
|
@ -143,7 +143,58 @@ bool unpack_stereo_img_data(
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
Standard2StreamsAdapter::Standard2StreamsAdapter() {
|
namespace s210a {
|
||||||
|
|
||||||
|
// image pixels
|
||||||
|
|
||||||
|
bool unpack_left_img_pixels(
|
||||||
|
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
|
||||||
|
CHECK_NOTNULL(frame);
|
||||||
|
CHECK_EQ(request.format, Format::BGR888);
|
||||||
|
CHECK_EQ(frame->format(), Format::BGR888);
|
||||||
|
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
||||||
|
std::size_t n = 3;
|
||||||
|
std::size_t w = frame->width();
|
||||||
|
std::size_t h = frame->height();
|
||||||
|
for (std::size_t i = 0; i < h; i++) {
|
||||||
|
for (std::size_t j = 0; j < w; j++) {
|
||||||
|
frame->data()[(i * w + j) * n] =
|
||||||
|
*(data_new + (2 * i * w + j) * n + 2);
|
||||||
|
frame->data()[(i * w + j) * n + 1] =
|
||||||
|
*(data_new + (2 * i * w + j) * n + 1);
|
||||||
|
frame->data()[(i * w + j) * n + 2] =
|
||||||
|
*(data_new + (2 * i * w + j) * n);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool unpack_right_img_pixels(
|
||||||
|
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
|
||||||
|
CHECK_NOTNULL(frame);
|
||||||
|
CHECK_EQ(request.format, Format::BGR888);
|
||||||
|
CHECK_EQ(frame->format(), Format::BGR888);
|
||||||
|
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
||||||
|
std::size_t n = 3;
|
||||||
|
std::size_t w = frame->width();
|
||||||
|
std::size_t h = frame->height();
|
||||||
|
for (std::size_t i = 0; i < h; i++) {
|
||||||
|
for (std::size_t j = 0; j < w; j++) {
|
||||||
|
frame->data()[(i * w + j) * n] =
|
||||||
|
*(data_new + ((2 * i + 1) * w + j) * n + 2);
|
||||||
|
frame->data()[(i * w + j) * n + 1] =
|
||||||
|
*(data_new + ((2 * i + 1) * w + j) * n + 1);
|
||||||
|
frame->data()[(i * w + j) * n + 2] =
|
||||||
|
*(data_new + ((2 * i + 1) * w + j) * n);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace s210a
|
||||||
|
|
||||||
|
Standard2StreamsAdapter::Standard2StreamsAdapter(const Model &model)
|
||||||
|
: model_(model) {
|
||||||
}
|
}
|
||||||
|
|
||||||
Standard2StreamsAdapter::~Standard2StreamsAdapter() {
|
Standard2StreamsAdapter::~Standard2StreamsAdapter() {
|
||||||
|
@ -167,10 +218,19 @@ Standard2StreamsAdapter::GetUnpackImgDataMap() {
|
||||||
|
|
||||||
std::map<Stream, Streams::unpack_img_pixels_t>
|
std::map<Stream, Streams::unpack_img_pixels_t>
|
||||||
Standard2StreamsAdapter::GetUnpackImgPixelsMap() {
|
Standard2StreamsAdapter::GetUnpackImgPixelsMap() {
|
||||||
|
switch (model_) {
|
||||||
|
case Model::STANDARD210A:
|
||||||
|
return {
|
||||||
|
{Stream::LEFT, s210a::unpack_left_img_pixels},
|
||||||
|
{Stream::RIGHT, s210a::unpack_right_img_pixels}
|
||||||
|
};
|
||||||
|
case Model::STANDARD2:
|
||||||
|
default:
|
||||||
return {
|
return {
|
||||||
{Stream::LEFT, unpack_left_img_pixels},
|
{Stream::LEFT, unpack_left_img_pixels},
|
||||||
{Stream::RIGHT, unpack_right_img_pixels}
|
{Stream::RIGHT, unpack_right_img_pixels}
|
||||||
};
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
MYNTEYE_END_NAMESPACE
|
MYNTEYE_END_NAMESPACE
|
||||||
|
|
|
@ -25,7 +25,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||||
|
|
||||||
class Standard2StreamsAdapter : public StreamsAdapter {
|
class Standard2StreamsAdapter : public StreamsAdapter {
|
||||||
public:
|
public:
|
||||||
Standard2StreamsAdapter();
|
explicit Standard2StreamsAdapter(const Model &model);
|
||||||
virtual ~Standard2StreamsAdapter();
|
virtual ~Standard2StreamsAdapter();
|
||||||
|
|
||||||
std::vector<Stream> GetKeyStreams() override;
|
std::vector<Stream> GetKeyStreams() override;
|
||||||
|
@ -35,6 +35,9 @@ class Standard2StreamsAdapter : public StreamsAdapter {
|
||||||
GetUnpackImgDataMap() override;
|
GetUnpackImgDataMap() override;
|
||||||
std::map<Stream, Streams::unpack_img_pixels_t>
|
std::map<Stream, Streams::unpack_img_pixels_t>
|
||||||
GetUnpackImgPixelsMap() override;
|
GetUnpackImgPixelsMap() override;
|
||||||
|
|
||||||
|
private:
|
||||||
|
Model model_;
|
||||||
};
|
};
|
||||||
|
|
||||||
MYNTEYE_END_NAMESPACE
|
MYNTEYE_END_NAMESPACE
|
||||||
|
|
|
@ -1,186 +0,0 @@
|
||||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
#include "mynteye/device/standard2/streams_adapter_s210a.h"
|
|
||||||
|
|
||||||
#include <iomanip>
|
|
||||||
|
|
||||||
#include "mynteye/logger.h"
|
|
||||||
#include "mynteye/device/types.h"
|
|
||||||
|
|
||||||
MYNTEYE_BEGIN_NAMESPACE
|
|
||||||
|
|
||||||
namespace {
|
|
||||||
|
|
||||||
// image info
|
|
||||||
|
|
||||||
#pragma pack(push, 1)
|
|
||||||
struct ImagePacket {
|
|
||||||
std::uint8_t header;
|
|
||||||
std::uint8_t size;
|
|
||||||
std::uint16_t frame_id;
|
|
||||||
std::uint64_t timestamp;
|
|
||||||
std::uint16_t exposure_time;
|
|
||||||
std::uint8_t checksum;
|
|
||||||
|
|
||||||
ImagePacket() = default;
|
|
||||||
explicit ImagePacket(std::uint8_t *data) {
|
|
||||||
from_data(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
void from_data(std::uint8_t *data) {
|
|
||||||
std::uint32_t timestamp_l;
|
|
||||||
std::uint32_t timestamp_h;
|
|
||||||
|
|
||||||
header = *data;
|
|
||||||
size = *(data + 1);
|
|
||||||
frame_id = (*(data + 2) << 8) | *(data + 3);
|
|
||||||
timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) |
|
|
||||||
(*(data + 6) << 8) | *(data + 7);
|
|
||||||
timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) |
|
|
||||||
(*(data + 10) << 8) | *(data + 11);
|
|
||||||
timestamp = (static_cast<std::uint64_t>(timestamp_h) << 32) | timestamp_l;
|
|
||||||
exposure_time = (*(data + 12) << 8) | *(data + 13);
|
|
||||||
checksum = *(data + 14);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
#pragma pack(pop)
|
|
||||||
|
|
||||||
// image pixels
|
|
||||||
|
|
||||||
bool unpack_left_img_pixels(
|
|
||||||
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
|
|
||||||
CHECK_NOTNULL(frame);
|
|
||||||
CHECK_EQ(request.format, Format::BGR888);
|
|
||||||
CHECK_EQ(frame->format(), Format::BGR888);
|
|
||||||
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
|
||||||
std::size_t n = 3;
|
|
||||||
std::size_t w = frame->width();
|
|
||||||
std::size_t h = frame->height();
|
|
||||||
for (std::size_t i = 0; i < h; i++) {
|
|
||||||
for (std::size_t j = 0; j < w; j++) {
|
|
||||||
frame->data()[(i * w + j) * n] =
|
|
||||||
*(data_new + (2 * i * w + j) * n + 2);
|
|
||||||
frame->data()[(i * w + j) * n + 1] =
|
|
||||||
*(data_new + (2 * i * w + j) * n + 1);
|
|
||||||
frame->data()[(i * w + j) * n + 2] =
|
|
||||||
*(data_new + (2 * i * w + j) * n);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool unpack_right_img_pixels(
|
|
||||||
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
|
|
||||||
CHECK_NOTNULL(frame);
|
|
||||||
CHECK_EQ(request.format, Format::BGR888);
|
|
||||||
CHECK_EQ(frame->format(), Format::BGR888);
|
|
||||||
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
|
||||||
std::size_t n = 3;
|
|
||||||
std::size_t w = frame->width();
|
|
||||||
std::size_t h = frame->height();
|
|
||||||
for (std::size_t i = 0; i < h; i++) {
|
|
||||||
for (std::size_t j = 0; j < w; j++) {
|
|
||||||
frame->data()[(i * w + j) * n] =
|
|
||||||
*(data_new + ((2 * i + 1) * w + j) * n + 2);
|
|
||||||
frame->data()[(i * w + j) * n + 1] =
|
|
||||||
*(data_new + ((2 * i + 1) * w + j) * n + 1);
|
|
||||||
frame->data()[(i * w + j) * n + 2] =
|
|
||||||
*(data_new + ((2 * i + 1) * w + j) * n);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool unpack_stereo_img_data(
|
|
||||||
const void *data, const StreamRequest &request, ImgData *img) {
|
|
||||||
CHECK_NOTNULL(img);
|
|
||||||
|
|
||||||
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
|
||||||
std::size_t data_n =
|
|
||||||
request.width * request.height * bytes_per_pixel(request.format);
|
|
||||||
auto data_end = data_new + data_n;
|
|
||||||
|
|
||||||
std::size_t packet_n = sizeof(ImagePacket);
|
|
||||||
std::vector<std::uint8_t> packet(packet_n);
|
|
||||||
std::reverse_copy(data_end - packet_n, data_end, packet.begin());
|
|
||||||
|
|
||||||
ImagePacket img_packet(packet.data());
|
|
||||||
// LOG(INFO) << "ImagePacket: header=0x" << std::hex <<
|
|
||||||
// static_cast<int>(img_packet.header)
|
|
||||||
// << ", size=0x" << std::hex << static_cast<int>(img_packet.size)
|
|
||||||
// << ", frame_id="<< std::dec << img_packet.frame_id
|
|
||||||
// << ", timestamp="<< std::dec << img_packet.timestamp
|
|
||||||
// << ", exposure_time="<< std::dec << img_packet.exposure_time
|
|
||||||
// << ", checksum=0x" << std::hex << static_cast<int>(img_packet.checksum);
|
|
||||||
|
|
||||||
if (img_packet.header != 0x3B) {
|
|
||||||
VLOG(2) << "Image packet header must be 0x3B, but 0x" << std::hex
|
|
||||||
<< std::uppercase << std::setw(2) << std::setfill('0')
|
|
||||||
<< static_cast<int>(img_packet.header) << " now";
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
std::uint8_t checksum = 0;
|
|
||||||
for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9]
|
|
||||||
checksum = (checksum ^ packet[i]);
|
|
||||||
}
|
|
||||||
/*
|
|
||||||
if (img_packet.checksum != checksum) {
|
|
||||||
VLOG(2) << "Image packet checksum should be 0x" << std::hex
|
|
||||||
<< std::uppercase << std::setw(2) << std::setfill('0')
|
|
||||||
<< static_cast<int>(img_packet.checksum) << ", but 0x"
|
|
||||||
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum)
|
|
||||||
<< " now";
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
*/
|
|
||||||
img->frame_id = img_packet.frame_id;
|
|
||||||
img->timestamp = img_packet.timestamp;
|
|
||||||
img->exposure_time = img_packet.exposure_time;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
} // namespace
|
|
||||||
|
|
||||||
Standard210aStreamsAdapter::Standard210aStreamsAdapter() {
|
|
||||||
}
|
|
||||||
|
|
||||||
Standard210aStreamsAdapter::~Standard210aStreamsAdapter() {
|
|
||||||
}
|
|
||||||
|
|
||||||
std::vector<Stream> Standard210aStreamsAdapter::GetKeyStreams() {
|
|
||||||
return {Stream::LEFT, Stream::RIGHT};
|
|
||||||
}
|
|
||||||
|
|
||||||
std::vector<Capabilities> Standard210aStreamsAdapter::GetStreamCapabilities() {
|
|
||||||
return {Capabilities::STEREO_COLOR};
|
|
||||||
}
|
|
||||||
|
|
||||||
std::map<Stream, Streams::unpack_img_data_t>
|
|
||||||
Standard210aStreamsAdapter::GetUnpackImgDataMap() {
|
|
||||||
return {
|
|
||||||
{Stream::LEFT, unpack_stereo_img_data},
|
|
||||||
{Stream::RIGHT, unpack_stereo_img_data}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
std::map<Stream, Streams::unpack_img_pixels_t>
|
|
||||||
Standard210aStreamsAdapter::GetUnpackImgPixelsMap() {
|
|
||||||
return {
|
|
||||||
{Stream::LEFT, unpack_left_img_pixels},
|
|
||||||
{Stream::RIGHT, unpack_right_img_pixels}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
MYNTEYE_END_NAMESPACE
|
|
|
@ -1,42 +0,0 @@
|
||||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
#ifndef MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S210A_H_
|
|
||||||
#define MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S210A_H_
|
|
||||||
#pragma once
|
|
||||||
|
|
||||||
#include <map>
|
|
||||||
#include <memory>
|
|
||||||
#include <vector>
|
|
||||||
|
|
||||||
#include "mynteye/device/streams.h"
|
|
||||||
|
|
||||||
MYNTEYE_BEGIN_NAMESPACE
|
|
||||||
|
|
||||||
class Standard210aStreamsAdapter : public StreamsAdapter {
|
|
||||||
public:
|
|
||||||
Standard210aStreamsAdapter();
|
|
||||||
virtual ~Standard210aStreamsAdapter();
|
|
||||||
|
|
||||||
std::vector<Stream> GetKeyStreams() override;
|
|
||||||
std::vector<Capabilities> GetStreamCapabilities() override;
|
|
||||||
|
|
||||||
std::map<Stream, Streams::unpack_img_data_t>
|
|
||||||
GetUnpackImgDataMap() override;
|
|
||||||
std::map<Stream, Streams::unpack_img_pixels_t>
|
|
||||||
GetUnpackImgPixelsMap() override;
|
|
||||||
};
|
|
||||||
|
|
||||||
MYNTEYE_END_NAMESPACE
|
|
||||||
|
|
||||||
#endif // MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S210A_H_
|
|
|
@ -583,22 +583,10 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
}
|
}
|
||||||
|
|
||||||
void publishOthers(const Stream &stream) {
|
void publishOthers(const Stream &stream) {
|
||||||
// std::cout << stream << "===============================" << std::endl;
|
|
||||||
// int enable_tag = 0;
|
|
||||||
// api_->EnableStreamData(stream, [&](const Stream &stream) {
|
|
||||||
// enable_tag += getStreamSubscribers(stream);
|
|
||||||
// std::cout << "EnableStreamData callback test"
|
|
||||||
// << stream << "|| Sum:"
|
|
||||||
// << getStreamSubscribers(stream) << std::endl;
|
|
||||||
// }, true);
|
|
||||||
if (getStreamSubscribers(stream) > 0 && !is_published_[stream]) {
|
if (getStreamSubscribers(stream) > 0 && !is_published_[stream]) {
|
||||||
// std::cout << stream
|
|
||||||
// <<" enableStreamData tag = 0 return" << std::endl;
|
|
||||||
// std::cout << "enable " << stream << std::endl;
|
|
||||||
api_->EnableStreamData(stream);
|
api_->EnableStreamData(stream);
|
||||||
api_->SetStreamCallback(
|
api_->SetStreamCallback(
|
||||||
stream, [this, stream](const api::StreamData &data) {
|
stream, [this, stream](const api::StreamData &data) {
|
||||||
// ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
|
|
||||||
ros::Time stamp = checkUpTimeStamp(
|
ros::Time stamp = checkUpTimeStamp(
|
||||||
data.img->timestamp, stream);
|
data.img->timestamp, stream);
|
||||||
static std::size_t count = 0;
|
static std::size_t count = 0;
|
||||||
|
@ -612,12 +600,9 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
int disable_tag = 0;
|
int disable_tag = 0;
|
||||||
api_->DisableStreamData(stream, [&](const Stream &stream) {
|
api_->DisableStreamData(stream, [&](const Stream &stream) {
|
||||||
disable_tag += getStreamSubscribers(stream);
|
disable_tag += getStreamSubscribers(stream);
|
||||||
// std::cout << "DisableStreamData callback test: "
|
|
||||||
// << stream << "|| Sum:"<< getStreamSubscribers(stream) << std::endl;
|
|
||||||
}, true);
|
}, true);
|
||||||
if (disable_tag == 0 && is_published_[stream]) {
|
if (disable_tag == 0 && is_published_[stream]) {
|
||||||
api_->DisableStreamData(stream, [&](const Stream &stream) {
|
api_->DisableStreamData(stream, [&](const Stream &stream) {
|
||||||
// std::cout << "disable " << stream << std::endl;
|
|
||||||
api_->SetStreamCallback(stream, nullptr);
|
api_->SetStreamCallback(stream, nullptr);
|
||||||
is_published_[stream] = false;
|
is_published_[stream] = false;
|
||||||
});
|
});
|
||||||
|
@ -626,31 +611,6 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
}
|
}
|
||||||
|
|
||||||
void publishTopics() {
|
void publishTopics() {
|
||||||
std::vector<Stream> all_streams{
|
|
||||||
Stream::RIGHT,
|
|
||||||
Stream::LEFT,
|
|
||||||
Stream::LEFT_RECTIFIED,
|
|
||||||
Stream::RIGHT_RECTIFIED,
|
|
||||||
Stream::DISPARITY,
|
|
||||||
Stream::DISPARITY_NORMALIZED,
|
|
||||||
Stream::POINTS,
|
|
||||||
Stream::DEPTH
|
|
||||||
};
|
|
||||||
|
|
||||||
static int sum = 0;
|
|
||||||
int sum_c = 0;
|
|
||||||
for (auto &&stream : all_streams) {
|
|
||||||
sum_c += getStreamSubscribers(stream);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (sum_c != sum) {
|
|
||||||
if (sum_c == 0) {
|
|
||||||
api_->Stop(Source::VIDEO_STREAMING);
|
|
||||||
for (auto &&stream : all_streams) {
|
|
||||||
is_published_[stream] = false;
|
|
||||||
}
|
|
||||||
api_->Start(Source::VIDEO_STREAMING);
|
|
||||||
} else {
|
|
||||||
if ((camera_publishers_[Stream::LEFT].getNumSubscribers() > 0 ||
|
if ((camera_publishers_[Stream::LEFT].getNumSubscribers() > 0 ||
|
||||||
mono_publishers_[Stream::LEFT].getNumSubscribers() > 0) &&
|
mono_publishers_[Stream::LEFT].getNumSubscribers() > 0) &&
|
||||||
!is_published_[Stream::LEFT]) {
|
!is_published_[Stream::LEFT]) {
|
||||||
|
@ -661,17 +621,6 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
// ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
|
// ros::Time stamp = hardTimeToSoftTime(data.img->timestamp);
|
||||||
ros::Time stamp = checkUpTimeStamp(
|
ros::Time stamp = checkUpTimeStamp(
|
||||||
data.img->timestamp, Stream::LEFT);
|
data.img->timestamp, Stream::LEFT);
|
||||||
|
|
||||||
// static double img_time_prev = -1;
|
|
||||||
// NODELET_INFO_STREAM("ros_time_beg: " << FULL_PRECISION <<
|
|
||||||
// ros_time_beg
|
|
||||||
// << ", img_time_elapsed: " << FULL_PRECISION
|
|
||||||
// << ((data.img->timestamp - img_time_beg) * 0.00001f)
|
|
||||||
// << ", img_time_diff: " << FULL_PRECISION
|
|
||||||
// << ((img_time_prev < 0) ? 0
|
|
||||||
// : (data.img->timestamp - img_time_prev) * 0.01f) << "
|
|
||||||
// ms");
|
|
||||||
// img_time_prev = data.img->timestamp;
|
|
||||||
publishCamera(Stream::LEFT, data, left_count_, stamp);
|
publishCamera(Stream::LEFT, data, left_count_, stamp);
|
||||||
publishMono(Stream::LEFT, data, left_count_, stamp);
|
publishMono(Stream::LEFT, data, left_count_, stamp);
|
||||||
NODELET_DEBUG_STREAM(
|
NODELET_DEBUG_STREAM(
|
||||||
|
@ -716,9 +665,6 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
for (auto &&stream : other_streams) {
|
for (auto &&stream : other_streams) {
|
||||||
publishOthers(stream);
|
publishOthers(stream);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
sum = sum_c;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!is_motion_published_) {
|
if (!is_motion_published_) {
|
||||||
api_->SetMotionCallback([this](const api::MotionData &data) {
|
api_->SetMotionCallback([this](const api::MotionData &data) {
|
||||||
|
|
Loading…
Reference in New Issue
Block a user