feat(synthetic): add stream data listener

This commit is contained in:
John Zhao 2019-02-21 22:21:17 +08:00
parent 0fb3610744
commit c6aa8d93ca
2 changed files with 84 additions and 41 deletions

View File

@ -74,6 +74,44 @@ void process_childs(
} }
} }
// ObjMat/ObjMat2 > api::StreamData
api::StreamData obj_data_first(const ObjMat2 *obj) {
return {obj->first_data, obj->first, nullptr, obj->first_id};
}
api::StreamData obj_data_second(const ObjMat2 *obj) {
return {obj->second_data, obj->second, nullptr, obj->second_id};
}
api::StreamData obj_data(const ObjMat *obj) {
return {obj->data, obj->value, nullptr, obj->id};
}
api::StreamData obj_data_first(const std::shared_ptr<ObjMat2> &obj) {
return {obj->first_data, obj->first, nullptr, obj->first_id};
}
api::StreamData obj_data_second(const std::shared_ptr<ObjMat2> &obj) {
return {obj->second_data, obj->second, nullptr, obj->second_id};
}
api::StreamData obj_data(const std::shared_ptr<ObjMat> &obj) {
return {obj->data, obj->value, nullptr, obj->id};
}
// api::StreamData > ObjMat/ObjMat2
ObjMat data_obj(const api::StreamData &data) {
return ObjMat{data.frame, data.frame_id, data.img};
}
ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second) {
return ObjMat2{
first.frame, first.frame_id, first.img,
second.frame, second.frame_id, second.img};
}
} // namespace } // namespace
void Synthetic::InitCalibInfo() { void Synthetic::InitCalibInfo() {
@ -105,7 +143,8 @@ Synthetic::Synthetic(API *api, CalibrationModel calib_model)
: api_(api), : api_(api),
plugin_(nullptr), plugin_(nullptr),
calib_model_(calib_model), calib_model_(calib_model),
calib_default_tag_(false) { calib_default_tag_(false),
stream_data_listener_(nullptr) {
VLOG(2) << __func__; VLOG(2) << __func__;
CHECK_NOTNULL(api_); CHECK_NOTNULL(api_);
InitCalibInfo(); InitCalibInfo();
@ -121,6 +160,10 @@ Synthetic::~Synthetic() {
} }
} }
void Synthetic::SetStreamDataListener(stream_data_listener_t listener) {
stream_data_listener_ = listener;
}
void Synthetic::NotifyImageParamsChanged() { void Synthetic::NotifyImageParamsChanged() {
if (!calib_default_tag_) { if (!calib_default_tag_) {
intr_left_ = api_->GetIntrinsicsBase(Stream::LEFT); intr_left_ = api_->GetIntrinsicsBase(Stream::LEFT);
@ -335,7 +378,7 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
if (out != nullptr) { if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out); auto &&output = Object::Cast<ObjMat>(out);
if (output != nullptr) { if (output != nullptr) {
return {output->data, output->value, nullptr, output->id}; return obj_data(output);
} }
VLOG(2) << "Rectify not ready now"; VLOG(2) << "Rectify not ready now";
} }
@ -349,15 +392,9 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
for (auto it : streams) { for (auto it : streams) {
if (it.stream == stream) { if (it.stream == stream) {
if (num == 1) { if (num == 1) {
return {output->first_data, return obj_data_first(output);
output->first,
nullptr,
output->first_id};
} else { } else {
return {output->second_data, return obj_data_second(output);
output->second,
nullptr,
output->second_id};
} }
} }
num++; num++;
@ -582,6 +619,7 @@ void Synthetic::InitProcessors() {
void Synthetic::ProcessNativeStream( void Synthetic::ProcessNativeStream(
const Stream &stream, const api::StreamData &data) { const Stream &stream, const api::StreamData &data) {
NotifyStreamData(stream, data);
if (stream == Stream::LEFT || stream == Stream::RIGHT) { if (stream == Stream::LEFT || stream == Stream::RIGHT) {
std::unique_lock<std::mutex> lk(mtx_left_right_ready_); std::unique_lock<std::mutex> lk(mtx_left_right_ready_);
static api::StreamData left_data, right_data; static api::StreamData left_data, right_data;
@ -604,9 +642,7 @@ void Synthetic::ProcessNativeStream(
<< calib_model_ << ", use default pinhole model"; << calib_model_ << ", use default pinhole model";
processor = find_processor<RectifyProcessorOCV>(processor_); processor = find_processor<RectifyProcessorOCV>(processor_);
} }
processor->Process(ObjMat2{ processor->Process(data_obj(left_data, right_data));
left_data.frame, left_data.frame_id, left_data.img,
right_data.frame, right_data.frame_id, right_data.img});
} }
return; return;
} }
@ -628,34 +664,28 @@ void Synthetic::ProcessNativeStream(
name = RectifyProcessor::NAME; name = RectifyProcessor::NAME;
#endif #endif
} }
process_childs( process_childs(processor_, name,
processor_, name, ObjMat2{ data_obj(left_rect_data, right_rect_data));
left_rect_data.frame, left_rect_data.frame_id, left_rect_data.img,
right_rect_data.frame, right_rect_data.frame_id,
right_rect_data.img});
} }
return; return;
} }
switch (stream) { switch (stream) {
case Stream::DISPARITY: { case Stream::DISPARITY: {
process_childs(processor_, DisparityProcessor::NAME, process_childs(processor_, DisparityProcessor::NAME, data_obj(data));
ObjMat{data.frame, data.frame_id, data.img});
} break; } break;
case Stream::DISPARITY_NORMALIZED: { case Stream::DISPARITY_NORMALIZED: {
process_childs(processor_, DisparityNormalizedProcessor::NAME, process_childs(processor_, DisparityNormalizedProcessor::NAME,
ObjMat{data.frame, data.frame_id, data.img}); data_obj(data));
} break; } break;
case Stream::POINTS: { case Stream::POINTS: {
if (calib_model_ == CalibrationModel::PINHOLE) { if (calib_model_ == CalibrationModel::PINHOLE) {
// PINHOLE // PINHOLE
process_childs(processor_, PointsProcessorOCV::NAME, process_childs(processor_, PointsProcessorOCV::NAME, data_obj(data));
ObjMat{data.frame, data.frame_id, data.img});
#ifdef WITH_CAM_MODELS #ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
// KANNALA_BRANDT // KANNALA_BRANDT
process_childs(processor_, PointsProcessor::NAME, process_childs(processor_, PointsProcessor::NAME, data_obj(data));
ObjMat{data.frame, data.frame_id, data.img});
#endif #endif
} else { } else {
// UNKNOW // UNKNOW
@ -666,13 +696,11 @@ void Synthetic::ProcessNativeStream(
case Stream::DEPTH: { case Stream::DEPTH: {
if (calib_model_ == CalibrationModel::PINHOLE) { if (calib_model_ == CalibrationModel::PINHOLE) {
// PINHOLE // PINHOLE
process_childs(processor_, DepthProcessorOCV::NAME, process_childs(processor_, DepthProcessorOCV::NAME, data_obj(data));
ObjMat{data.frame, data.frame_id, data.img});
#ifdef WITH_CAM_MODELS #ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) { } else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
// KANNALA_BRANDT // KANNALA_BRANDT
process_childs(processor_, DepthProcessor::NAME, process_childs(processor_, DepthProcessor::NAME, data_obj(data));
ObjMat{data.frame, data.frame_id, data.img});
#endif #endif
} else { } else {
// UNKNOW // UNKNOW
@ -738,51 +766,51 @@ bool Synthetic::OnDepthProcess(
void Synthetic::OnRectifyPostProcess(Object *const out) { void Synthetic::OnRectifyPostProcess(Object *const out) {
const ObjMat2 *output = Object::Cast<ObjMat2>(out); const ObjMat2 *output = Object::Cast<ObjMat2>(out);
NotifyStreamData(Stream::LEFT_RECTIFIED, obj_data_first(output));
NotifyStreamData(Stream::RIGHT_RECTIFIED, obj_data_second(output));
if (HasStreamCallback(Stream::LEFT_RECTIFIED)) { if (HasStreamCallback(Stream::LEFT_RECTIFIED)) {
auto data = getControlDateWithStream(Stream::LEFT_RECTIFIED); auto data = getControlDateWithStream(Stream::LEFT_RECTIFIED);
data.stream_callback( data.stream_callback(obj_data_first(output));
{output->first_data, output->first, nullptr, output->first_id});
} }
if (HasStreamCallback(Stream::RIGHT_RECTIFIED)) { if (HasStreamCallback(Stream::RIGHT_RECTIFIED)) {
auto data = getControlDateWithStream(Stream::RIGHT_RECTIFIED); auto data = getControlDateWithStream(Stream::RIGHT_RECTIFIED);
data.stream_callback( data.stream_callback(obj_data_second(output));
{output->second_data, output->second, nullptr, output->second_id});
} }
} }
void Synthetic::OnDisparityPostProcess(Object *const out) { void Synthetic::OnDisparityPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out); const ObjMat *output = Object::Cast<ObjMat>(out);
NotifyStreamData(Stream::DISPARITY, obj_data(output));
if (HasStreamCallback(Stream::DISPARITY)) { if (HasStreamCallback(Stream::DISPARITY)) {
auto data = getControlDateWithStream(Stream::DISPARITY); auto data = getControlDateWithStream(Stream::DISPARITY);
data.stream_callback( data.stream_callback(obj_data(output));
{output->data, output->value, nullptr, output->id});
} }
} }
void Synthetic::OnDisparityNormalizedPostProcess(Object *const out) { void Synthetic::OnDisparityNormalizedPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out); const ObjMat *output = Object::Cast<ObjMat>(out);
NotifyStreamData(Stream::DISPARITY_NORMALIZED, obj_data(output));
if (HasStreamCallback(Stream::DISPARITY_NORMALIZED)) { if (HasStreamCallback(Stream::DISPARITY_NORMALIZED)) {
auto data = getControlDateWithStream(Stream::DISPARITY_NORMALIZED); auto data = getControlDateWithStream(Stream::DISPARITY_NORMALIZED);
data.stream_callback( data.stream_callback(obj_data(output));
{output->data, output->value, nullptr, output->id});
} }
} }
void Synthetic::OnPointsPostProcess(Object *const out) { void Synthetic::OnPointsPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out); const ObjMat *output = Object::Cast<ObjMat>(out);
NotifyStreamData(Stream::POINTS, obj_data(output));
if (HasStreamCallback(Stream::POINTS)) { if (HasStreamCallback(Stream::POINTS)) {
auto data = getControlDateWithStream(Stream::POINTS); auto data = getControlDateWithStream(Stream::POINTS);
data.stream_callback( data.stream_callback(obj_data(output));
{output->data, output->value, nullptr, output->id});
} }
} }
void Synthetic::OnDepthPostProcess(Object *const out) { void Synthetic::OnDepthPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out); const ObjMat *output = Object::Cast<ObjMat>(out);
NotifyStreamData(Stream::DEPTH, obj_data(output));
if (HasStreamCallback(Stream::DEPTH)) { if (HasStreamCallback(Stream::DEPTH)) {
auto data = getControlDateWithStream(Stream::DEPTH); auto data = getControlDateWithStream(Stream::DEPTH);
data.stream_callback( data.stream_callback(obj_data(output));
{output->data, output->value, nullptr, output->id});
} }
} }
@ -797,4 +825,11 @@ void Synthetic::SetDisparityComputingMethodType(
LOG(ERROR) << "ERROR: no suited processor for disparity computing."; LOG(ERROR) << "ERROR: no suited processor for disparity computing.";
} }
void Synthetic::NotifyStreamData(
const Stream &stream, const api::StreamData &data) {
if (stream_data_listener_) {
stream_data_listener_(stream, data);
}
}
MYNTEYE_END_NAMESPACE MYNTEYE_END_NAMESPACE

View File

@ -35,6 +35,8 @@ struct Object;
class Synthetic { class Synthetic {
public: public:
using stream_callback_t = API::stream_callback_t; using stream_callback_t = API::stream_callback_t;
using stream_data_listener_t =
std::function<void(const Stream &stream, const api::StreamData &data)>;
typedef enum Mode { typedef enum Mode {
MODE_NATIVE, // Native stream MODE_NATIVE, // Native stream
@ -52,6 +54,8 @@ class Synthetic {
explicit Synthetic(API *api, CalibrationModel calib_model); explicit Synthetic(API *api, CalibrationModel calib_model);
~Synthetic(); ~Synthetic();
void SetStreamDataListener(stream_data_listener_t listener);
void NotifyImageParamsChanged(); void NotifyImageParamsChanged();
bool Supports(const Stream &stream) const; bool Supports(const Stream &stream) const;
@ -126,6 +130,8 @@ class Synthetic {
void OnPointsPostProcess(Object *const out); void OnPointsPostProcess(Object *const out);
void OnDepthPostProcess(Object *const out); void OnDepthPostProcess(Object *const out);
void NotifyStreamData(const Stream &stream, const api::StreamData &data);
API *api_; API *api_;
std::shared_ptr<Processor> processor_; std::shared_ptr<Processor> processor_;
@ -141,6 +147,8 @@ class Synthetic {
bool calib_default_tag_; bool calib_default_tag_;
std::vector<std::shared_ptr<Processor>> processors_; std::vector<std::shared_ptr<Processor>> processors_;
stream_data_listener_t stream_data_listener_;
}; };
class SyntheticProcessorPart { class SyntheticProcessorPart {