Merge branch 'devel-s210a-merge' into develop

This commit is contained in:
John Zhao
2018-12-24 10:41:40 +08:00
89 changed files with 2987 additions and 1338 deletions

View File

@@ -26,7 +26,6 @@
#include "mynteye/api/plugin.h"
#include "mynteye/api/synthetic.h"
#include "mynteye/device/device.h"
#include "mynteye/device/device_s.h"
#include "mynteye/device/utils.h"
#if defined(WITH_FILESYSTEM) && defined(WITH_NATIVE_FILESYSTEM)
@@ -210,26 +209,7 @@ std::vector<std::string> get_plugin_paths() {
API::API(std::shared_ptr<Device> device) : device_(device) {
VLOG(2) << __func__;
if (std::dynamic_pointer_cast<StandardDevice>(device_) != nullptr) {
bool in_l_ok, in_r_ok, ex_r2l_ok;
device_->GetIntrinsics(Stream::LEFT, &in_l_ok);
device_->GetIntrinsics(Stream::RIGHT, &in_r_ok);
device_->GetExtrinsics(Stream::RIGHT, Stream::LEFT, &ex_r2l_ok);
if (!in_l_ok || !in_r_ok || !ex_r2l_ok) {
#if defined(WITH_DEVICE_INFO_REQUIRED)
LOG(FATAL)
#else
LOG(WARNING)
#endif
<< "Image params not found, but we need it to process the "
"images. Please `make tools` and use `img_params_writer` "
"to write the image params. If you update the SDK from "
"1.x, the `SN*.conf` is the file contains them. Besides, "
"you could also calibrate them by yourself. Read the guide "
"doc (https://github.com/slightech/MYNT-EYE-S-SDK-Guide) "
"to learn more.";
}
}
// std::dynamic_pointer_cast<StandardDevice>(device_);
synthetic_.reset(new Synthetic(this));
}
@@ -237,29 +217,19 @@ API::~API() {
VLOG(2) << __func__;
}
std::shared_ptr<API> API::Create() {
return Create(device::select());
}
std::shared_ptr<API> API::Create(std::shared_ptr<Device> device) {
if (!device)
return nullptr;
return std::make_shared<API>(device);
}
std::shared_ptr<API> API::Create(int argc, char *argv[]) {
static glog_init _(argc, argv);
auto &&device = device::select();
if (!device)
return nullptr;
return std::make_shared<API>(device);
if (!device) return nullptr;
return Create(argc, argv, device);
}
std::shared_ptr<API> API::Create(
int argc, char *argv[], std::shared_ptr<Device> device) {
int argc, char *argv[], const std::shared_ptr<Device> &device) {
static glog_init _(argc, argv);
if (!device)
return nullptr;
return Create(device);
}
std::shared_ptr<API> API::Create(const std::shared_ptr<Device> &device) {
return std::make_shared<API>(device);
}
@@ -283,6 +253,10 @@ bool API::Supports(const AddOns &addon) const {
return device_->Supports(addon);
}
StreamRequest API::SelectStreamRequest(bool *ok) const {
return device::select_request(device_, ok);
}
const std::vector<StreamRequest> &API::GetStreamRequests(
const Capabilities &capability) const {
return device_->GetStreamRequests(capability);
@@ -291,6 +265,29 @@ const std::vector<StreamRequest> &API::GetStreamRequests(
void API::ConfigStreamRequest(
const Capabilities &capability, const StreamRequest &request) {
device_->ConfigStreamRequest(capability, request);
synthetic_->NotifyImageParamsChanged();
}
const StreamRequest &API::GetStreamRequest(
const Capabilities &capability) const {
return device_->GetStreamRequest(capability);
}
const std::vector<StreamRequest> &API::GetStreamRequests() const {
return device_->GetStreamRequests();
}
void API::ConfigStreamRequest(const StreamRequest &request) {
device_->ConfigStreamRequest(request);
synthetic_->NotifyImageParamsChanged();
}
const StreamRequest &API::GetStreamRequest() const {
return device_->GetStreamRequest();
}
std::shared_ptr<DeviceInfo> API::GetInfo() const {
return device_->GetInfo();
}
std::string API::GetInfo(const Info &info) const {
@@ -450,4 +447,23 @@ std::shared_ptr<Device> API::device() {
return device_;
}
// TODO(Kalman): Call this function in the appropriate place
void API::CheckImageParams() {
if (device_ != nullptr) {
bool in_l_ok, in_r_ok, ex_l2r_ok;
device_->GetIntrinsics(Stream::LEFT, &in_l_ok);
device_->GetIntrinsics(Stream::RIGHT, &in_r_ok);
device_->GetExtrinsics(Stream::LEFT, Stream::RIGHT, &ex_l2r_ok);
if (!in_l_ok || !in_r_ok || !ex_l2r_ok) {
LOG(FATAL) << "Image params not found, but we need it to process the "
"images. Please `make tools` and use `img_params_writer` "
"to write the image params. If you update the SDK from "
"1.x, the `SN*.conf` is the file contains them. Besides, "
"you could also calibrate them by yourself. Read the guide "
"doc (https://github.com/slightech/MYNT-EYE-SDK-2-Guide) "
"to learn more.";
}
}
}
MYNTEYE_END_NAMESPACE

View File

@@ -27,11 +27,9 @@ const char RectifyProcessor::NAME[] = "RectifyProcessor";
RectifyProcessor::RectifyProcessor(
std::shared_ptr<Device> device, std::int32_t proc_period)
: Processor(std::move(proc_period)) {
: Processor(std::move(proc_period)), device_(device) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
InitParams(
device->GetIntrinsics(Stream::LEFT), device->GetIntrinsics(Stream::RIGHT),
device->GetExtrinsics(Stream::RIGHT, Stream::LEFT));
NotifyImageParamsChanged();
}
RectifyProcessor::~RectifyProcessor() {
@@ -42,6 +40,13 @@ std::string RectifyProcessor::Name() {
return NAME;
}
void RectifyProcessor::NotifyImageParamsChanged() {
InitParams(
device_->GetIntrinsics(Stream::LEFT),
device_->GetIntrinsics(Stream::RIGHT),
device_->GetExtrinsics(Stream::RIGHT, Stream::LEFT));
}
Object *RectifyProcessor::OnCreateOutput() {
return new ObjMat2();
}

View File

@@ -37,6 +37,8 @@ class RectifyProcessor : public Processor {
std::string Name() override;
void NotifyImageParamsChanged();
cv::Mat R1, P1, R2, P2, Q;
cv::Mat map11, map12, map21, map22;
@@ -48,6 +50,8 @@ class RectifyProcessor : public Processor {
private:
void InitParams(
Intrinsics in_left, Intrinsics in_right, Extrinsics ex_right_to_left);
std::shared_ptr<Device> device_;
};
MYNTEYE_END_NAMESPACE

View File

@@ -17,6 +17,8 @@
#include <functional>
#include <stdexcept>
#include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/logger.h"
#include "mynteye/api/object.h"
#include "mynteye/api/plugin.h"
@@ -39,9 +41,16 @@ MYNTEYE_BEGIN_NAMESPACE
namespace {
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
// TODO(JohnZhao) Support different format frame to cv::Mat
CHECK_EQ(frame->format(), Format::GREY);
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
if (frame->format() == Format::YUYV) {
cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data());
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
return img;
} else if (frame->format() == Format::BGR888) {
cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data());
return img;
} else { // Format::GRAY
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
}
}
api::StreamData data2api(const device::StreamData &data) {
@@ -74,6 +83,11 @@ Synthetic::~Synthetic() {
}
}
void Synthetic::NotifyImageParamsChanged() {
auto &&processor = find_processor<RectifyProcessor>(processor_);
if (processor) processor->NotifyImageParamsChanged();
}
bool Synthetic::Supports(const Stream &stream) const {
return stream_supports_mode_.find(stream) != stream_supports_mode_.end();
}
@@ -152,7 +166,7 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
auto &&mode = GetStreamEnabledMode(stream);
if (mode == MODE_NATIVE) {
auto &&device = api_->device();
return data2api(device->GetLatestStreamData(stream));
return data2api(device->GetStreamData(stream));
} else if (mode == MODE_SYNTHETIC) {
if (stream == Stream::LEFT_RECTIFIED || stream == Stream::RIGHT_RECTIFIED) {
static std::shared_ptr<ObjMat2> output = nullptr;
@@ -302,42 +316,35 @@ void Synthetic::EnableStreamData(const Stream &stream, std::uint32_t depth) {
break;
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
CHECK(ActivateProcessor<RectifyProcessor>());
}
return;
} return;
case Stream::RIGHT_RECTIFIED: {
if (!IsStreamDataEnabled(Stream::RIGHT))
break;
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
CHECK(ActivateProcessor<RectifyProcessor>());
}
return;
} return;
case Stream::DISPARITY: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::LEFT_RECTIFIED, depth + 1);
EnableStreamData(Stream::RIGHT_RECTIFIED, depth + 1);
CHECK(ActivateProcessor<DisparityProcessor>());
}
return;
} return;
case Stream::DISPARITY_NORMALIZED: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::DISPARITY, depth + 1);
CHECK(ActivateProcessor<DisparityNormalizedProcessor>());
}
return;
} return;
case Stream::POINTS: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::DISPARITY, depth + 1);
CHECK(ActivateProcessor<PointsProcessor>());
}
return;
} return;
case Stream::DEPTH: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::POINTS, depth + 1);
CHECK(ActivateProcessor<DepthProcessor>());
}
return;
default:
break;
} return;
default: break;
}
if (depth == 0) {
LOG(WARNING) << "Enable stream data of " << stream << " failed";
@@ -390,8 +397,7 @@ void Synthetic::DisableStreamData(const Stream &stream, std::uint32_t depth) {
case Stream::DEPTH: {
DeactivateProcessor<DepthProcessor>();
} break;
default:
return;
default: return;
}
if (depth > 0) {
LOG(WARNING) << "Disable synthetic stream data of " << stream << " too";

View File

@@ -43,6 +43,8 @@ class Synthetic {
explicit Synthetic(API *api);
~Synthetic();
void NotifyImageParamsChanged();
bool Supports(const Stream &stream) const;
mode_t SupportsMode(const Stream &stream) const;

View File

@@ -19,6 +19,7 @@
#include <iterator>
#include <sstream>
#include <stdexcept>
#include <vector>
#include "mynteye/logger.h"
#include "mynteye/util/strings.h"
@@ -30,12 +31,12 @@ MYNTEYE_BEGIN_NAMESPACE
namespace {
const uvc::xu mynteye_xu = {3,
2,
{0x947a6d9f,
0x8a2f,
0x418d,
{0x85, 0x9e, 0x6c, 0x9a, 0xa0, 0x38, 0x10, 0x14}}};
const uvc::xu mynteye_xu = {3, 2,
{
0x947a6d9f, 0x8a2f, 0x418d,
{0x85, 0x9e, 0x6c, 0x9a, 0xa0, 0x38, 0x10, 0x14}
}
};
int XuCamCtrlId(Option option) {
switch (option) {
@@ -63,12 +64,21 @@ int XuCamCtrlId(Option option) {
case Option::FRAME_RATE:
return 7;
break;
case Option::MIN_EXPOSURE_TIME:
return 8;
break;
case Option::ACCELEROMETER_RANGE:
return 9;
break;
case Option::GYROSCOPE_RANGE:
return 10;
break;
case Option::ACCELEROMETER_LOW_PASS_FILTER:
return 11;
break;
case Option::GYROSCOPE_LOW_PASS_FILTER:
return 12;
break;
default:
LOG(FATAL) << "No cam ctrl id for " << option;
}
@@ -92,7 +102,7 @@ void CheckSpecVersion(const Version *spec_version) {
LOG(FATAL) << "Spec version must be specified";
}
std::vector<std::string> spec_versions{"1.0"};
std::vector<std::string> spec_versions{"1.0", "1.1"};
for (auto &&spec_ver : spec_versions) {
if (*spec_version == Version(spec_ver)) {
return; // supported
@@ -109,12 +119,14 @@ void CheckSpecVersion(const Version *spec_version) {
} // namespace
Channels::Channels(std::shared_ptr<uvc::device> device)
: device_(device),
is_imu_tracking_(false),
imu_track_stop_(false),
imu_sn_(0),
imu_callback_(nullptr) {
Channels::Channels(const std::shared_ptr<uvc::device> &device,
const std::shared_ptr<ChannelsAdapter> &adapter)
: device_(device),
adapter_(adapter),
is_imu_tracking_(false),
imu_track_stop_(false),
imu_sn_(0),
imu_callback_(nullptr) {
VLOG(2) << __func__;
UpdateControlInfos();
}
@@ -124,6 +136,14 @@ Channels::~Channels() {
StopImuTracking();
}
std::int32_t Channels::GetAccelRangeDefault() {
return adapter_->GetAccelRangeDefault();
}
std::int32_t Channels::GetGyroRangeDefault() {
return adapter_->GetGyroRangeDefault();
}
void Channels::LogControlInfos() const {
for (auto &&it = control_infos_.begin(); it != control_infos_.end(); it++) {
LOG(INFO) << it->first << ": min=" << it->second.min
@@ -133,17 +153,23 @@ void Channels::LogControlInfos() const {
}
void Channels::UpdateControlInfos() {
for (auto &&option : std::vector<Option>{Option::GAIN, Option::BRIGHTNESS,
Option::CONTRAST}) {
control_infos_[option] = PuControlInfo(option);
auto &&supports = adapter_->GetOptionSupports();
for (auto &&option : std::vector<Option>{
Option::GAIN, Option::BRIGHTNESS, Option::CONTRAST}) {
if (supports.find(option) != supports.end())
control_infos_[option] = PuControlInfo(option);
}
for (auto &&option : std::vector<Option>{
Option::FRAME_RATE, Option::IMU_FREQUENCY, Option::EXPOSURE_MODE,
Option::MAX_GAIN, Option::MAX_EXPOSURE_TIME,
Option::DESIRED_BRIGHTNESS, Option::IR_CONTROL, Option::HDR_MODE,
Option::ACCELEROMETER_RANGE, Option::GYROSCOPE_RANGE}) {
control_infos_[option] = XuControlInfo(option);
Option::FRAME_RATE, Option::IMU_FREQUENCY,
Option::EXPOSURE_MODE, Option::MAX_GAIN,
Option::MAX_EXPOSURE_TIME, Option::MIN_EXPOSURE_TIME,
Option::DESIRED_BRIGHTNESS, Option::IR_CONTROL,
Option::HDR_MODE, Option::ACCELEROMETER_RANGE,
Option::GYROSCOPE_RANGE, Option::ACCELEROMETER_LOW_PASS_FILTER,
Option::GYROSCOPE_LOW_PASS_FILTER}) {
if (supports.find(option) != supports.end())
control_infos_[option] = XuControlInfo(option);
}
if (VLOG_IS_ON(2)) {
@@ -184,8 +210,11 @@ std::int32_t Channels::GetControlValue(const Option &option) const {
case Option::DESIRED_BRIGHTNESS:
case Option::IR_CONTROL:
case Option::HDR_MODE:
case Option::MIN_EXPOSURE_TIME:
case Option::ACCELEROMETER_RANGE:
case Option::GYROSCOPE_RANGE:
case Option::ACCELEROMETER_LOW_PASS_FILTER:
case Option::GYROSCOPE_LOW_PASS_FILTER:
return XuCamCtrlGet(option);
case Option::ZERO_DRIFT_CALIBRATION:
case Option::ERASE_CHIP:
@@ -242,12 +271,22 @@ void Channels::SetControlValue(const Option &option, std::int32_t value) {
XuCamCtrlSet(option, value);
} break;
case Option::ACCELEROMETER_RANGE: {
if (!in_range() || !in_values({4, 8, 16, 32}))
if (!in_range() || !in_values(adapter_->GetAccelRangeValues()))
break;
XuCamCtrlSet(option, value);
} break;
case Option::GYROSCOPE_RANGE: {
if (!in_range() || !in_values({500, 1000, 2000, 4000}))
if (!in_range() || !in_values(adapter_->GetGyroRangeValues()))
break;
XuCamCtrlSet(option, value);
} break;
case Option::ACCELEROMETER_LOW_PASS_FILTER: {
if (!in_range() || !in_values({0, 1, 2}))
break;
XuCamCtrlSet(option, value);
} break;
case Option::GYROSCOPE_LOW_PASS_FILTER: {
if (!in_range() || !in_values({23, 64}))
break;
XuCamCtrlSet(option, value);
} break;
@@ -256,7 +295,8 @@ void Channels::SetControlValue(const Option &option, std::int32_t value) {
case Option::MAX_EXPOSURE_TIME:
case Option::DESIRED_BRIGHTNESS:
case Option::IR_CONTROL:
case Option::HDR_MODE: {
case Option::HDR_MODE:
case Option::MIN_EXPOSURE_TIME: {
if (!in_range())
break;
XuCamCtrlSet(option, value);
@@ -287,8 +327,11 @@ bool Channels::RunControlAction(const Option &option) const {
case Option::DESIRED_BRIGHTNESS:
case Option::IR_CONTROL:
case Option::HDR_MODE:
case Option::MIN_EXPOSURE_TIME:
case Option::ACCELEROMETER_RANGE:
case Option::GYROSCOPE_RANGE:
case Option::ACCELEROMETER_LOW_PASS_FILTER:
case Option::GYROSCOPE_LOW_PASS_FILTER:
LOG(WARNING) << option << " run action useless";
return false;
default:
@@ -318,6 +361,10 @@ void Channels::DoImuTrack() {
return;
}
if (res_packet.packets.back().count == 0) {
return;
}
VLOG(2) << "Imu req sn: " << imu_sn_ << ", res count: " << []() {
std::size_t n = 0;
for (auto &&packet : res_packet.packets) {
@@ -383,174 +430,6 @@ void Channels::StopImuTracking() {
}
}
namespace {
template <typename T>
T _from_data(const std::uint8_t *data) {
std::size_t size = sizeof(T) / sizeof(std::uint8_t);
T value = 0;
for (std::size_t i = 0; i < size; i++) {
value |= data[i] << (8 * (size - i - 1));
}
return value;
}
template <>
double _from_data(const std::uint8_t *data) {
return *(reinterpret_cast<const double *>(data));
}
std::string _from_data(const std::uint8_t *data, std::size_t count) {
std::string s(reinterpret_cast<const char *>(data), count);
strings::trim(s);
return s;
}
std::size_t from_data(Channels::device_info_t *info, const std::uint8_t *data) {
std::size_t i = 4; // skip vid, pid
// name, 16
info->name = _from_data(data + i, 16);
i += 16;
// serial_number, 16
info->serial_number = _from_data(data + i, 16);
i += 16;
// firmware_version, 2
info->firmware_version.set_major(data[i]);
info->firmware_version.set_minor(data[i + 1]);
i += 2;
// hardware_version, 3
info->hardware_version.set_major(data[i]);
info->hardware_version.set_minor(data[i + 1]);
info->hardware_version.set_flag(std::bitset<8>(data[i + 2]));
i += 3;
// spec_version, 2
info->spec_version.set_major(data[i]);
info->spec_version.set_minor(data[i + 1]);
i += 2;
// lens_type, 4
info->lens_type.set_vendor(_from_data<std::uint16_t>(data + i));
info->lens_type.set_product(_from_data<std::uint16_t>(data + i + 2));
i += 4;
// imu_type, 4
info->imu_type.set_vendor(_from_data<std::uint16_t>(data + i));
info->imu_type.set_product(_from_data<std::uint16_t>(data + i + 2));
i += 4;
// nominal_baseline, 2
info->nominal_baseline = _from_data<std::uint16_t>(data + i);
i += 2;
return i;
}
std::size_t from_data(
Intrinsics *in, const std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// width, 2
in->width = _from_data<std::uint16_t>(data + i);
i += 2;
// height, 2
in->height = _from_data<std::uint16_t>(data + i);
i += 2;
// fx, 8
in->fx = _from_data<double>(data + i);
i += 8;
// fy, 8
in->fy = _from_data<double>(data + i);
i += 8;
// cx, 8
in->cx = _from_data<double>(data + i);
i += 8;
// cy, 8
in->cy = _from_data<double>(data + i);
i += 8;
// model, 1
in->model = data[i];
i += 1;
// coeffs, 40
for (std::size_t j = 0; j < 5; j++) {
in->coeffs[j] = _from_data<double>(data + i + j * 8);
}
i += 40;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t from_data(
ImuIntrinsics *in, const std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// scale
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
in->scale[j][k] = _from_data<double>(data + i + (j * 3 + k) * 8);
}
}
i += 72;
// drift
for (std::size_t j = 0; j < 3; j++) {
in->drift[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
// noise
for (std::size_t j = 0; j < 3; j++) {
in->noise[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
// bias
for (std::size_t j = 0; j < 3; j++) {
in->bias[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t from_data(
Extrinsics *ex, const std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// rotation
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
ex->rotation[j][k] = _from_data<double>(data + i + (j * 3 + k) * 8);
}
}
i += 72;
// translation
for (std::size_t j = 0; j < 3; j++) {
ex->translation[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t from_data(
Channels::img_params_t *img_params, const std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
i += from_data(&img_params->in_left, data + i, spec_version);
i += from_data(&img_params->in_right, data + i, spec_version);
i += from_data(&img_params->ex_right_to_left, data + i, spec_version);
return i;
}
std::size_t from_data(
Channels::imu_params_t *imu_params, const std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
i += from_data(&imu_params->in_accel, data + i, spec_version);
i += from_data(&imu_params->in_gyro, data + i, spec_version);
i += from_data(&imu_params->ex_left_to_imu, data + i, spec_version);
return i;
}
} // namespace
bool Channels::GetFiles(
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params,
Version *spec_version) const {
@@ -578,7 +457,7 @@ bool Channels::GetFiles(
if (XuFileQuery(uvc::XU_QUERY_GET, 2000, data)) {
// header = std::bitset<8>(data[0]);
std::uint16_t size = _from_data<std::uint16_t>(data + 1);
std::uint16_t size = bytes::_from_data<std::uint16_t>(data + 1);
std::uint8_t checksum = data[3 + size];
VLOG(2) << "GetFiles data size: " << size << ", checksum: 0x" << std::hex
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum);
@@ -601,30 +480,34 @@ bool Channels::GetFiles(
std::size_t end = 3 + size;
while (i < end) {
std::uint8_t file_id = *(data + i);
std::uint16_t file_size = _from_data<std::uint16_t>(data + i + 1);
std::uint16_t file_size = bytes::_from_data<std::uint16_t>(data + i + 1);
VLOG(2) << "GetFiles id: " << static_cast<int>(file_id)
<< ", size: " << file_size;
i += 3;
switch (file_id) {
case FID_DEVICE_INFO: {
CHECK_EQ(from_data(info, data + i), file_size)
CHECK_EQ(bytes::from_data(info, data + i), file_size)
<< "The firmware not support getting device info, you could "
"upgrade to latest";
spec_ver = &info->spec_version;
CheckSpecVersion(spec_ver);
} break;
case FID_IMG_PARAMS: {
img_params->ok = file_size > 0;
if (img_params->ok) {
if (file_size > 0) {
CheckSpecVersion(spec_ver);
CHECK_EQ(from_data(img_params, data + i, spec_ver), file_size);
/*auto &&n = */adapter_->GetImgParamsFromData(
data + i, spec_ver, img_params);
// Considering the upgrade, comment this
// CHECK_EQ(n, file_size);
}
} break;
case FID_IMU_PARAMS: {
imu_params->ok = file_size > 0;
if (imu_params->ok) {
CheckSpecVersion(spec_ver);
CHECK_EQ(from_data(imu_params, data + i, spec_ver), file_size);
auto &&n = adapter_->GetImuParamsFromData(
data + i, spec_ver, imu_params);
CHECK_EQ(n, file_size);
}
} break;
default:
@@ -641,198 +524,6 @@ bool Channels::GetFiles(
}
}
namespace {
template <typename T>
std::size_t _to_data(T value, std::uint8_t *data) {
std::size_t size = sizeof(T) / sizeof(std::uint8_t);
for (std::size_t i = 0; i < size; i++) {
data[i] = static_cast<std::uint8_t>((value >> (8 * (size - i - 1))) & 0xFF);
}
return size;
}
template <>
std::size_t _to_data(double value, std::uint8_t *data) {
std::uint8_t *val = reinterpret_cast<std::uint8_t *>(&value);
std::copy(val, val + 8, data);
return 8;
}
std::size_t _to_data(std::string value, std::uint8_t *data, std::size_t count) {
std::copy(value.begin(), value.end(), data);
for (std::size_t i = value.size(); i < count; i++) {
data[i] = ' ';
}
return count;
}
std::size_t to_data(
const Channels::device_info_t *info, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 3; // skip id, size
i += 4; // skip vid, pid
// name, 16
_to_data(info->name, data + i, 16);
i += 16;
// serial_number, 16
_to_data(info->serial_number, data + i, 16);
i += 16;
// firmware_version, 2
data[i] = info->firmware_version.major();
data[i + 1] = info->firmware_version.minor();
i += 2;
// hardware_version, 3
data[i] = info->hardware_version.major();
data[i + 1] = info->hardware_version.minor();
data[i + 2] =
static_cast<std::uint8_t>(info->hardware_version.flag().to_ulong());
i += 3;
// spec_version, 2
data[i] = info->spec_version.major();
data[i + 1] = info->spec_version.minor();
i += 2;
// lens_type, 4
_to_data(info->lens_type.vendor(), data + i);
_to_data(info->lens_type.product(), data + i + 2);
i += 4;
// imu_type, 4
_to_data(info->imu_type.vendor(), data + i);
_to_data(info->imu_type.product(), data + i + 2);
i += 4;
// nominal_baseline, 2
_to_data(info->nominal_baseline, data + i);
i += 2;
MYNTEYE_UNUSED(spec_version)
// others
std::size_t size = i - 3;
data[0] = Channels::FID_DEVICE_INFO;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
std::size_t to_data(
const Intrinsics *in, std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// width, 2
_to_data(in->width, data + i);
i += 2;
// height, 2
_to_data(in->height, data + i);
i += 2;
// fx, 8
_to_data(in->fx, data + i);
i += 8;
// fy, 8
_to_data(in->fy, data + i);
i += 8;
// cx, 8
_to_data(in->cx, data + i);
i += 8;
// cy, 8
_to_data(in->cy, data + i);
i += 8;
// model, 1
data[i] = in->model;
i += 1;
// coeffs, 40
for (std::size_t j = 0; j < 5; j++) {
_to_data(in->coeffs[j], data + i + j * 8);
}
i += 40;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t to_data(
const ImuIntrinsics *in, std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// scale
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
_to_data(in->scale[j][k], data + i + (j * 3 + k) * 8);
}
}
i += 72;
// drift
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->drift[j], data + i + j * 8);
}
i += 24;
// noise
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->noise[j], data + i + j * 8);
}
i += 24;
// bias
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->bias[j], data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t to_data(
const Extrinsics *ex, std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// rotation
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
_to_data(ex->rotation[j][k], data + i + (j * 3 + k) * 8);
}
}
i += 72;
// translation
for (std::size_t j = 0; j < 3; j++) {
_to_data(ex->translation[j], data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t to_data(
const Channels::img_params_t *img_params, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 3; // skip id, size
i += to_data(&img_params->in_left, data + i, spec_version);
i += to_data(&img_params->in_right, data + i, spec_version);
i += to_data(&img_params->ex_right_to_left, data + i, spec_version);
// others
std::size_t size = i - 3;
data[0] = Channels::FID_IMG_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
std::size_t to_data(
const Channels::imu_params_t *imu_params, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 3; // skip id, size
i += to_data(&imu_params->in_accel, data + i, spec_version);
i += to_data(&imu_params->in_gyro, data + i, spec_version);
i += to_data(&imu_params->ex_left_to_imu, data + i, spec_version);
// others
std::size_t size = i - 3;
data[0] = Channels::FID_IMU_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
} // namespace
bool Channels::SetFiles(
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params,
Version *spec_version) {
@@ -854,15 +545,15 @@ bool Channels::SetFiles(
std::uint16_t size = 0;
if (info != nullptr) {
header[0] = true;
size += to_data(info, data + 3 + size, spec_ver);
size += bytes::to_data(info, data + 3 + size, spec_ver);
}
if (img_params != nullptr) {
header[1] = true;
size += to_data(img_params, data + 3 + size, spec_ver);
size += adapter_->SetImgParamsToData(img_params, spec_ver, data + 3 + size);
}
if (imu_params != nullptr) {
header[2] = true;
size += to_data(imu_params, data + 3 + size, spec_ver);
size += adapter_->SetImuParamsToData(imu_params, spec_ver, data + 3 + size);
}
data[0] = static_cast<std::uint8_t>(header.to_ulong());
@@ -986,7 +677,7 @@ bool Channels::XuImuRead(ImuResPacket *res) const {
static std::uint8_t data[2000]{};
// std::fill(data, data + 2000, 0); // reset
if (XuControlQuery(CHANNEL_IMU_READ, uvc::XU_QUERY_GET, 2000, data)) {
res->from_data(data);
adapter_->GetImuResPacket(data, res);
if (res->header != 0x5B) {
LOG(WARNING) << "Imu response packet header must be 0x5B, but 0x"
@@ -1047,4 +738,309 @@ Channels::control_info_t Channels::XuControlInfo(Option option) const {
return {min, max, def};
}
std::size_t ChannelsAdapter::GetImuParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::imu_params_t *imu_params) {
std::size_t i = 0;
i += bytes::from_data(&imu_params->in_accel, data + i, version);
i += bytes::from_data(&imu_params->in_gyro, data + i, version);
i += bytes::from_data(&imu_params->ex_left_to_imu, data + i, version);
return i;
}
std::size_t ChannelsAdapter::SetImuParamsToData(
const Channels::imu_params_t *imu_params, const Version *version,
std::uint8_t *data) {
std::size_t i = 3; // skip id, size
i += bytes::to_data(&imu_params->in_accel, data + i, version);
i += bytes::to_data(&imu_params->in_gyro, data + i, version);
i += bytes::to_data(&imu_params->ex_left_to_imu, data + i, version);
// others
std::size_t size = i - 3;
data[0] = Channels::FID_IMU_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
namespace bytes {
// from
std::string _from_data(const std::uint8_t *data, std::size_t count) {
std::string s(reinterpret_cast<const char *>(data), count);
strings::trim(s);
return s;
}
std::size_t from_data(Channels::device_info_t *info, const std::uint8_t *data) {
std::size_t i = 4; // skip vid, pid
// name, 16
info->name = _from_data(data + i, 16);
i += 16;
// serial_number, 16
info->serial_number = _from_data(data + i, 16);
i += 16;
// firmware_version, 2
info->firmware_version.set_major(data[i]);
info->firmware_version.set_minor(data[i + 1]);
i += 2;
// hardware_version, 3
info->hardware_version.set_major(data[i]);
info->hardware_version.set_minor(data[i + 1]);
info->hardware_version.set_flag(std::bitset<8>(data[i + 2]));
i += 3;
// spec_version, 2
info->spec_version.set_major(data[i]);
info->spec_version.set_minor(data[i + 1]);
i += 2;
// lens_type, 4
info->lens_type.set_vendor(_from_data<std::uint16_t>(data + i));
info->lens_type.set_product(_from_data<std::uint16_t>(data + i + 2));
i += 4;
// imu_type, 4
info->imu_type.set_vendor(_from_data<std::uint16_t>(data + i));
info->imu_type.set_product(_from_data<std::uint16_t>(data + i + 2));
i += 4;
// nominal_baseline, 2
info->nominal_baseline = _from_data<std::uint16_t>(data + i);
i += 2;
return i;
}
std::size_t from_data(Intrinsics *in, const std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
// width, 2
in->width = _from_data<std::uint16_t>(data + i);
i += 2;
// height, 2
in->height = _from_data<std::uint16_t>(data + i);
i += 2;
// fx, 8
in->fx = _from_data<double>(data + i);
i += 8;
// fy, 8
in->fy = _from_data<double>(data + i);
i += 8;
// cx, 8
in->cx = _from_data<double>(data + i);
i += 8;
// cy, 8
in->cy = _from_data<double>(data + i);
i += 8;
// model, 1
in->model = data[i];
i += 1;
// coeffs, 40
for (std::size_t j = 0; j < 5; j++) {
in->coeffs[j] = _from_data<double>(data + i + j * 8);
}
i += 40;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
// scale
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
in->scale[j][k] = _from_data<double>(data + i + (j * 3 + k) * 8);
}
}
i += 72;
// drift
for (std::size_t j = 0; j < 3; j++) {
in->drift[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
// noise
for (std::size_t j = 0; j < 3; j++) {
in->noise[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
// bias
for (std::size_t j = 0; j < 3; j++) {
in->bias[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t from_data(Extrinsics *ex, const std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
// rotation
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
ex->rotation[j][k] = _from_data<double>(data + i + (j * 3 + k) * 8);
}
}
i += 72;
// translation
for (std::size_t j = 0; j < 3; j++) {
ex->translation[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
// to
std::size_t _to_data(std::string value, std::uint8_t *data, std::size_t count) {
std::copy(value.begin(), value.end(), data);
for (std::size_t i = value.size(); i < count; i++) {
data[i] = ' ';
}
return count;
}
std::size_t to_data(const Channels::device_info_t *info, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 3; // skip id, size
i += 4; // skip vid, pid
// name, 16
_to_data(info->name, data + i, 16);
i += 16;
// serial_number, 16
_to_data(info->serial_number, data + i, 16);
i += 16;
// firmware_version, 2
data[i] = info->firmware_version.major();
data[i + 1] = info->firmware_version.minor();
i += 2;
// hardware_version, 3
data[i] = info->hardware_version.major();
data[i + 1] = info->hardware_version.minor();
data[i + 2] =
static_cast<std::uint8_t>(info->hardware_version.flag().to_ulong());
i += 3;
// spec_version, 2
data[i] = info->spec_version.major();
data[i + 1] = info->spec_version.minor();
i += 2;
// lens_type, 4
_to_data(info->lens_type.vendor(), data + i);
_to_data(info->lens_type.product(), data + i + 2);
i += 4;
// imu_type, 4
_to_data(info->imu_type.vendor(), data + i);
_to_data(info->imu_type.product(), data + i + 2);
i += 4;
// nominal_baseline, 2
_to_data(info->nominal_baseline, data + i);
i += 2;
MYNTEYE_UNUSED(spec_version)
// others
std::size_t size = i - 3;
data[0] = Channels::FID_DEVICE_INFO;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
std::size_t to_data(const Intrinsics *in, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
// width, 2
_to_data(in->width, data + i);
i += 2;
// height, 2
_to_data(in->height, data + i);
i += 2;
// fx, 8
_to_data(in->fx, data + i);
i += 8;
// fy, 8
_to_data(in->fy, data + i);
i += 8;
// cx, 8
_to_data(in->cx, data + i);
i += 8;
// cy, 8
_to_data(in->cy, data + i);
i += 8;
// model, 1
data[i] = in->model;
i += 1;
// coeffs, 40
for (std::size_t j = 0; j < 5; j++) {
_to_data(in->coeffs[j], data + i + j * 8);
}
i += 40;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t to_data(const ImuIntrinsics *in, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
// scale
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
_to_data(in->scale[j][k], data + i + (j * 3 + k) * 8);
}
}
i += 72;
// drift
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->drift[j], data + i + j * 8);
}
i += 24;
// noise
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->noise[j], data + i + j * 8);
}
i += 24;
// bias
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->bias[j], data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t to_data(const Extrinsics *ex, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
// rotation
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
_to_data(ex->rotation[j][k], data + i + (j * 3 + k) * 8);
}
}
i += 72;
// translation
for (std::size_t j = 0; j < 3; j++) {
_to_data(ex->translation[j], data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
} // namespace bytes
MYNTEYE_END_NAMESPACE

View File

@@ -15,12 +15,17 @@
#define MYNTEYE_DEVICE_CHANNELS_H_
#pragma once
#include <algorithm>
#include <map>
#include <memory>
#include <set>
#include <string>
#include <thread>
#include <vector>
#include "mynteye/mynteye.h"
#include "mynteye/types.h"
#include "mynteye/device/device.h"
#include "mynteye/device/types.h"
#include "mynteye/uvc/uvc.h"
@@ -33,6 +38,8 @@ struct xu;
} // namespace uvc
class ChannelsAdapter;
class MYNTEYE_API Channels {
public:
typedef enum Channel {
@@ -67,23 +74,16 @@ class MYNTEYE_API Channels {
using device_info_t = DeviceInfo;
typedef struct ImgParams {
bool ok;
Intrinsics in_left;
Intrinsics in_right;
Extrinsics ex_right_to_left;
} img_params_t;
using img_params_t = std::map<Resolution, device::img_params_t>;
using imu_params_t = device::imu_params_t;
typedef struct ImuParams {
bool ok;
ImuIntrinsics in_accel;
ImuIntrinsics in_gyro;
Extrinsics ex_left_to_imu;
} imu_params_t;
explicit Channels(std::shared_ptr<uvc::device> device);
Channels(const std::shared_ptr<uvc::device> &device,
const std::shared_ptr<ChannelsAdapter> &adapter);
~Channels();
std::int32_t GetAccelRangeDefault();
std::int32_t GetGyroRangeDefault();
void LogControlInfos() const;
void UpdateControlInfos();
control_info_t GetControlInfo(const Option &option) const;
@@ -140,6 +140,7 @@ class MYNTEYE_API Channels {
control_info_t XuControlInfo(Option option) const;
std::shared_ptr<uvc::device> device_;
std::shared_ptr<ChannelsAdapter> adapter_;
std::map<Option, control_info_t> control_infos_;
@@ -151,6 +152,101 @@ class MYNTEYE_API Channels {
imu_callback_t imu_callback_;
};
class ChannelsAdapter {
public:
virtual ~ChannelsAdapter() {}
virtual std::set<Option> GetOptionSupports() = 0;
virtual std::int32_t GetAccelRangeDefault() = 0;
virtual std::vector<std::int32_t> GetAccelRangeValues() = 0;
virtual std::int32_t GetGyroRangeDefault() = 0;
virtual std::vector<std::int32_t> GetGyroRangeValues() = 0;
virtual void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) = 0;
virtual std::size_t GetImgParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::img_params_t *img_params) = 0;
virtual std::size_t SetImgParamsToData(
const Channels::img_params_t *img_params, const Version *version,
std::uint8_t *data) = 0;
virtual std::size_t GetImuParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::imu_params_t *imu_params);
virtual std::size_t SetImuParamsToData(
const Channels::imu_params_t *imu_params, const Version *version,
std::uint8_t *data);
};
namespace bytes {
// from
template <typename T>
T _from_data(const std::uint8_t *data) {
std::size_t size = sizeof(T) / sizeof(std::uint8_t);
T value = 0;
for (std::size_t i = 0; i < size; i++) {
value |= data[i] << (8 * (size - i - 1));
}
return value;
}
template <>
inline double _from_data(const std::uint8_t *data) {
return *(reinterpret_cast<const double *>(data));
}
std::string _from_data(const std::uint8_t *data, std::size_t count);
std::size_t from_data(Channels::device_info_t *info, const std::uint8_t *data);
std::size_t from_data(Intrinsics *in, const std::uint8_t *data,
const Version *spec_version);
std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data,
const Version *spec_version);
std::size_t from_data(Extrinsics *ex, const std::uint8_t *data,
const Version *spec_version);
// to
template <typename T>
std::size_t _to_data(T value, std::uint8_t *data) {
std::size_t size = sizeof(T) / sizeof(std::uint8_t);
for (std::size_t i = 0; i < size; i++) {
data[i] = static_cast<std::uint8_t>((value >> (8 * (size - i - 1))) & 0xFF);
}
return size;
}
template <>
inline std::size_t _to_data(double value, std::uint8_t *data) {
std::uint8_t *val = reinterpret_cast<std::uint8_t *>(&value);
std::copy(val, val + 8, data);
return 8;
}
std::size_t _to_data(std::string value, std::uint8_t *data, std::size_t count);
std::size_t to_data(const Channels::device_info_t *info, std::uint8_t *data,
const Version *spec_version);
std::size_t to_data(const Intrinsics *in, std::uint8_t *data,
const Version *spec_version);
std::size_t to_data(const ImuIntrinsics *in, std::uint8_t *data,
const Version *spec_version);
std::size_t to_data(const Extrinsics *ex, std::uint8_t *data,
const Version *spec_version);
} // namespace bytes
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_CHANNELS_H_

View File

@@ -16,22 +16,55 @@
MYNTEYE_BEGIN_NAMESPACE
const std::map<Model, StreamSupports> stream_supports_map = {
{Model::STANDARD, {Stream::LEFT, Stream::RIGHT}}};
{Model::STANDARD, {Stream::LEFT, Stream::RIGHT}},
{Model::STANDARD2, {Stream::LEFT, Stream::RIGHT}}
};
const std::map<Model, CapabilitiesSupports> capabilities_supports_map = {
{Model::STANDARD, {Capabilities::STEREO, Capabilities::IMU}}};
{Model::STANDARD, {Capabilities::STEREO, Capabilities::IMU}},
{Model::STANDARD2, {Capabilities::STEREO_COLOR, Capabilities::IMU}}
};
const std::map<Model, OptionSupports> option_supports_map = {
{Model::STANDARD,
{Option::GAIN, Option::BRIGHTNESS, Option::CONTRAST, Option::FRAME_RATE,
Option::IMU_FREQUENCY, Option::EXPOSURE_MODE, Option::MAX_GAIN,
Option::MAX_EXPOSURE_TIME, Option::DESIRED_BRIGHTNESS, Option::IR_CONTROL,
Option::HDR_MODE, Option::ZERO_DRIFT_CALIBRATION, Option::ERASE_CHIP,
Option::ACCELEROMETER_RANGE, Option::GYROSCOPE_RANGE}}};
{Model::STANDARD, {
Option::GAIN, Option::BRIGHTNESS, Option::CONTRAST,
Option::FRAME_RATE, Option::IMU_FREQUENCY,
Option::EXPOSURE_MODE, Option::MAX_GAIN, Option::MAX_EXPOSURE_TIME,
Option::DESIRED_BRIGHTNESS,
Option::IR_CONTROL,
Option::HDR_MODE,
Option::ACCELEROMETER_RANGE, Option::GYROSCOPE_RANGE,
Option::ZERO_DRIFT_CALIBRATION,
Option::ERASE_CHIP}
},
{Model::STANDARD2, {
Option::BRIGHTNESS,
Option::EXPOSURE_MODE, Option::MAX_GAIN, Option::MAX_EXPOSURE_TIME,
Option::MIN_EXPOSURE_TIME, Option::DESIRED_BRIGHTNESS,
Option::ACCELEROMETER_RANGE, Option::GYROSCOPE_RANGE,
Option::ACCELEROMETER_LOW_PASS_FILTER, Option::GYROSCOPE_LOW_PASS_FILTER,
Option::ERASE_CHIP}
}
};
const std::map<Model, std::map<Capabilities, StreamRequests>>
stream_requests_map = {
{Model::STANDARD,
{{Capabilities::STEREO, {{752, 480, Format::YUYV, 25}}}}}};
stream_requests_map = {
{Model::STANDARD,
{{Capabilities::STEREO, {
{752, 480, Format::YUYV, 25}}
}}
},
{Model::STANDARD2,
{{Capabilities::STEREO_COLOR, {
{1280, 400, Format::BGR888, 10},
{1280, 400, Format::BGR888, 20},
{1280, 400, Format::BGR888, 30},
{1280, 400, Format::BGR888, 60},
{2560, 800, Format::BGR888, 10},
{2560, 800, Format::BGR888, 20},
{2560, 800, Format::BGR888, 30}}
}}
}
};
MYNTEYE_END_NAMESPACE

View File

@@ -23,8 +23,9 @@
#include "mynteye/device/async_callback.h"
#include "mynteye/device/channels.h"
#include "mynteye/device/config.h"
#include "mynteye/device/device_s.h"
#include "mynteye/device/motions.h"
#include "mynteye/device/standard/device_s.h"
#include "mynteye/device/standard2/device_s2.h"
#include "mynteye/device/streams.h"
#include "mynteye/device/types.h"
#include "mynteye/util/strings.h"
@@ -78,14 +79,17 @@ bool CheckSupports(
} // namespace
Device::Device(const Model &model, std::shared_ptr<uvc::device> device)
: video_streaming_(false),
motion_tracking_(false),
model_(model),
device_(device),
streams_(nullptr),
channels_(std::make_shared<Channels>(device)),
motions_(std::make_shared<Motions>(channels_)) {
Device::Device(const Model &model,
const std::shared_ptr<uvc::device> &device,
const std::shared_ptr<StreamsAdapter> &streams_adapter,
const std::shared_ptr<ChannelsAdapter> &channels_adapter)
: video_streaming_(false),
motion_tracking_(false),
model_(model),
device_(device),
streams_(std::make_shared<Streams>(streams_adapter)),
channels_(std::make_shared<Channels>(device_, channels_adapter)),
motions_(std::make_shared<Motions>(channels_)) {
VLOG(2) << __func__;
ReadAllInfos();
}
@@ -100,14 +104,20 @@ std::shared_ptr<Device> Device::Create(
return std::make_shared<StandardDevice>(device);
} else if (strings::starts_with(name, "MYNT-EYE-")) {
// TODO(JohnZhao): Create different device by name, such as MYNT-EYE-S1000
std::string model_s = name.substr(9);
std::string model_s = name.substr(9, 5);
VLOG(2) << "MYNE EYE Model: " << model_s;
DeviceModel model(model_s);
switch (model.type) {
case 'S':
return std::make_shared<StandardDevice>(device);
default:
LOG(FATAL) << "MYNT EYE model is not supported now";
if (model.type == 'S') {
switch (model.generation) {
case '1':
return std::make_shared<StandardDevice>(device);
case '2':
return std::make_shared<Standard2Device>(device);
default:
LOG(FATAL) << "No such generation now";
}
} else {
LOG(FATAL) << "MYNT EYE model is not supported now";
}
}
return nullptr;
@@ -166,6 +176,33 @@ void Device::ConfigStreamRequest(
return;
}
stream_config_requests_[capability] = request;
UpdateStreamIntrinsics(capability, request);
}
const StreamRequest &Device::GetStreamRequest(
const Capabilities &capability) const {
try {
return stream_config_requests_.at(capability);
} catch (const std::out_of_range &e) {
auto &&requests = GetStreamRequests(capability);
if (requests.size() >= 1) {
return requests[0];
} else {
LOG(FATAL) << "Please config the stream request of " << capability;
}
}
}
const std::vector<StreamRequest> &Device::GetStreamRequests() const {
return GetStreamRequests(GetKeyStreamCapability());
}
void Device::ConfigStreamRequest(const StreamRequest &request) {
ConfigStreamRequest(GetKeyStreamCapability(), request);
}
const StreamRequest &Device::GetStreamRequest() const {
return GetStreamRequest(GetKeyStreamCapability());
}
std::shared_ptr<DeviceInfo> Device::GetInfo() const {
@@ -400,6 +437,14 @@ void Device::WaitForStreams() {
streams_->WaitForStreams();
}
device::StreamData Device::GetStreamData(const Stream &stream) {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
CheckSupports(this, stream);
std::lock_guard<std::mutex> _(mtx_streams_);
return streams_->GetLatestStreamData(stream);
}
std::vector<device::StreamData> Device::GetStreamDatas(const Stream &stream) {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
@@ -408,14 +453,6 @@ std::vector<device::StreamData> Device::GetStreamDatas(const Stream &stream) {
return streams_->GetStreamDatas(stream);
}
device::StreamData Device::GetLatestStreamData(const Stream &stream) {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
CheckSupports(this, stream);
std::lock_guard<std::mutex> _(mtx_streams_);
return streams_->GetLatestStreamData(stream);
}
void Device::EnableMotionDatas() {
EnableMotionDatas(std::numeric_limits<std::size_t>::max());
}
@@ -431,51 +468,33 @@ std::vector<device::MotionData> Device::GetMotionDatas() {
return motions_->GetMotionDatas();
}
const StreamRequest &Device::GetStreamRequest(const Capabilities &capability) {
try {
return stream_config_requests_.at(capability);
} catch (const std::out_of_range &e) {
auto &&requests = GetStreamRequests(capability);
if (requests.size() >= 1) {
VLOG(2) << "Select the first one stream request of " << capability;
return requests[0];
} else {
LOG(FATAL) << "Please config the stream request of " << capability;
}
}
}
void Device::StartVideoStreaming() {
if (video_streaming_) {
LOG(WARNING) << "Cannot start video streaming without first stopping it";
return;
}
streams_ = std::make_shared<Streams>(GetKeyStreams());
// if stream capabilities are supported with subdevices of device_
/*
Capabilities stream_capabilities[] = {
Capabilities::STEREO,
Capabilities::COLOR,
Capabilities::DEPTH,
Capabilities::POINTS,
Capabilities::FISHEYE,
Capabilities::INFRARED,
Capabilities::INFRARED2
};
Capabilities::STEREO, Capabilities::STEREO_COLOR,
Capabilities::COLOR, Capabilities::DEPTH,
Capabilities::POINTS, Capabilities::FISHEYE,
Capabilities::INFRARED, Capabilities::INFRARED2};
for (auto &&capability : stream_capabilities) {
}
*/
if (Supports(Capabilities::STEREO)) {
auto &&stream_cap = GetKeyStreamCapability();
if (Supports(stream_cap)) {
// do stream request selection if more than one request of each stream
auto &&stream_request = GetStreamRequest(Capabilities::STEREO);
auto &&stream_request = GetStreamRequest(stream_cap);
streams_->ConfigStream(stream_cap, stream_request);
streams_->ConfigStream(Capabilities::STEREO, stream_request);
uvc::set_device_mode(
*device_, stream_request.width, stream_request.height,
static_cast<int>(stream_request.format), stream_request.fps,
[this](const void *data, std::function<void()> continuation) {
[this, stream_cap](
const void *data, std::function<void()> continuation) {
// drop the first stereo stream data
static std::uint8_t drop_count = 1;
if (drop_count > 0) {
@@ -486,7 +505,7 @@ void Device::StartVideoStreaming() {
// auto &&time_beg = times::now();
{
std::lock_guard<std::mutex> _(mtx_streams_);
if (streams_->PushStream(Capabilities::STEREO, data)) {
if (streams_->PushStream(stream_cap, data)) {
CallbackPushedStreamData(Stream::LEFT);
CallbackPushedStreamData(Stream::RIGHT);
}
@@ -543,9 +562,9 @@ void Device::ReadAllInfos() {
device_info_ = std::make_shared<DeviceInfo>();
CHECK_NOTNULL(channels_);
Channels::img_params_t img_params;
Channels::imu_params_t imu_params;
if (!channels_->GetFiles(device_info_.get(), &img_params, &imu_params)) {
all_img_params_.clear();
Device::imu_params_t imu_params;
if (!channels_->GetFiles(device_info_.get(), &all_img_params_, &imu_params)) {
#if defined(WITH_DEVICE_INFO_REQUIRED)
LOG(FATAL)
#else
@@ -566,18 +585,28 @@ void Device::ReadAllInfos() {
<< ", nominal_baseline: " << device_info_->nominal_baseline << "}";
device_info_->name = uvc::get_name(*device_);
if (img_params.ok) {
SetIntrinsics(Stream::LEFT, img_params.in_left);
SetIntrinsics(Stream::RIGHT, img_params.in_right);
SetExtrinsics(Stream::RIGHT, Stream::LEFT, img_params.ex_right_to_left);
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
VLOG(2) << "Extrinsics right to left: {"
<< GetExtrinsics(Stream::RIGHT, Stream::LEFT) << "}";
} else {
bool img_params_ok = false;
for (auto &&params : all_img_params_) {
auto &&img_params = params.second;
if (img_params.ok) {
img_params_ok = true;
SetIntrinsics(Stream::LEFT, img_params.in_left);
SetIntrinsics(Stream::RIGHT, img_params.in_right);
SetExtrinsics(Stream::LEFT, Stream::RIGHT, img_params.ex_right_to_left);
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
VLOG(2) << "Extrinsics left to right: {"
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
break;
}
}
if (!img_params_ok) {
LOG(WARNING) << "Intrinsics & extrinsics not exist";
}
if (imu_params.ok) {
imu_params_ = imu_params;
SetMotionIntrinsics({imu_params.in_accel, imu_params.in_gyro});
SetMotionExtrinsics(Stream::LEFT, imu_params.ex_left_to_imu);
VLOG(2) << "Motion intrinsics: {" << GetMotionIntrinsics() << "}";
@@ -588,6 +617,28 @@ void Device::ReadAllInfos() {
}
}
void Device::UpdateStreamIntrinsics(
const Capabilities &capability, const StreamRequest &request) {
if (capability != GetKeyStreamCapability()) {
return;
}
for (auto &&params : all_img_params_) {
auto &&img_res = params.first;
auto &&img_params = params.second;
if (img_params.ok && img_res == request.GetResolution()) {
SetIntrinsics(Stream::LEFT, img_params.in_left);
SetIntrinsics(Stream::RIGHT, img_params.in_right);
SetExtrinsics(Stream::LEFT, Stream::RIGHT, img_params.ex_right_to_left);
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
VLOG(2) << "Extrinsics left to right: {"
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
break;
}
}
}
void Device::CallbackPushedStreamData(const Stream &stream) {
if (HasStreamCallback(stream)) {
auto &&datas = streams_->stream_datas(stream);

View File

@@ -36,11 +36,11 @@ void Motions::SetMotionCallback(motion_callback_t callback) {
if (motion_callback_) {
accel_range = channels_->GetControlValue(Option::ACCELEROMETER_RANGE);
if (accel_range == -1)
accel_range = 8;
accel_range = channels_->GetAccelRangeDefault();
gyro_range = channels_->GetControlValue(Option::GYROSCOPE_RANGE);
if (gyro_range == -1)
gyro_range = 1000;
gyro_range = channels_->GetGyroRangeDefault();
channels_->SetImuCallback([this](const ImuPacket &packet) {
if (!motion_callback_ && !motion_datas_enabled_) {
@@ -48,19 +48,21 @@ void Motions::SetMotionCallback(motion_callback_t callback) {
}
for (auto &&seg : packet.segments) {
auto &&imu = std::make_shared<ImuData>();
imu->frame_id = seg.frame_id;
// imu->frame_id = seg.frame_id;
// if (seg.offset < 0 &&
// static_cast<uint32_t>(-seg.offset) > packet.timestamp) {
// LOG(WARNING) << "Imu timestamp offset is incorrect";
// }
imu->timestamp = packet.timestamp + seg.offset;
imu->frame_id = seg.frame_id;
imu->timestamp = seg.timestamp;
imu->flag = seg.flag;
imu->temperature = seg.temperature / 326.8f + 25;
imu->accel[0] = seg.accel[0] * 1.f * accel_range / 0x10000;
imu->accel[1] = seg.accel[1] * 1.f * accel_range / 0x10000;
imu->accel[2] = seg.accel[2] * 1.f * accel_range / 0x10000;
imu->gyro[0] = seg.gyro[0] * 1.f * gyro_range / 0x10000;
imu->gyro[1] = seg.gyro[1] * 1.f * gyro_range / 0x10000;
imu->gyro[2] = seg.gyro[2] * 1.f * gyro_range / 0x10000;
imu->temperature = seg.temperature / 326.8f + 25;
std::lock_guard<std::mutex> _(mtx_datas_);
motion_data_t data = {imu};

View File

@@ -58,8 +58,8 @@ class Motions {
std::mutex mtx_datas_;
int accel_range = 8;
int gyro_range = 1000;
int accel_range;
int gyro_range;
};
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,163 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard/channels_adapter_s.h"
#include "mynteye/device/config.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
#pragma pack(push, 1)
struct ImuData {
std::int16_t offset;
std::uint16_t frame_id;
std::int16_t accel[3];
std::int16_t temperature;
std::int16_t gyro[3];
ImuData() = default;
explicit ImuData(const std::uint8_t *data) {
from_data(data);
}
void from_data(const std::uint8_t *data) {
offset = (*(data) << 8) | *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
accel[0] = (*(data + 4) << 8) | *(data + 5);
accel[1] = (*(data + 6) << 8) | *(data + 7);
accel[2] = (*(data + 8) << 8) | *(data + 9);
temperature = (*(data + 10) << 8) | *(data + 11);
gyro[0] = (*(data + 12) << 8) | *(data + 13);
gyro[1] = (*(data + 14) << 8) | *(data + 15);
gyro[2] = (*(data + 16) << 8) | *(data + 17);
}
};
#pragma pack(pop)
void unpack_imu_segment(const ImuData &imu, const std::uint32_t &timestamp,
ImuSegment *seg) {
seg->frame_id = static_cast<uint32_t>(imu.frame_id);
seg->timestamp = static_cast<uint64_t>(timestamp + imu.offset) * 10;
seg->flag = 0;
seg->temperature = imu.temperature;
seg->accel[0] = imu.accel[0];
seg->accel[1] = imu.accel[1];
seg->accel[2] = imu.accel[2];
seg->gyro[0] = imu.gyro[0];
seg->gyro[1] = imu.gyro[1];
seg->gyro[2] = imu.gyro[2];
}
void unpack_imu_packet(const std::uint8_t *data, ImuPacket *pkg) {
pkg->serial_number =
(*(data) << 24) | (*(data + 1) << 16) |
(*(data + 2) << 8) | *(data + 3);
std::uint32_t timestamp =
(*(data + 4) << 24) | (*(data + 5) << 16)|
(*(data + 6) << 8) | *(data + 7);
pkg->count = *(data + 8);
std::size_t data_n = sizeof(ImuData); // 18
for (std::size_t i = 0; i < pkg->count; i++) {
ImuSegment seg;
unpack_imu_segment(ImuData(data + 9 + (data_n * i)), timestamp, &seg);
pkg->segments.push_back(seg);
}
}
void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
res->header = *data;
res->state = *(data + 1);
res->size = (*(data + 2) << 8) | *(data + 3);
std::size_t data_n = sizeof(ImuData); // 18
for (std::size_t i = 4; i < res->size;) {
ImuPacket packet;
unpack_imu_packet(data + i, &packet);
res->packets.push_back(packet);
i += 9 + (packet.count * data_n);
}
res->checksum = *(data + 4 + res->size);
}
} // namespace
StandardChannelsAdapter::StandardChannelsAdapter() {
}
StandardChannelsAdapter::~StandardChannelsAdapter() {
}
std::set<Option> StandardChannelsAdapter::GetOptionSupports() {
return option_supports_map.at(Model::STANDARD);
}
std::int32_t StandardChannelsAdapter::GetAccelRangeDefault() {
return 8;
}
std::vector<std::int32_t> StandardChannelsAdapter::GetAccelRangeValues() {
return {4, 8, 16, 32};
}
std::int32_t StandardChannelsAdapter::GetGyroRangeDefault() {
return 1000;
}
std::vector<std::int32_t> StandardChannelsAdapter::GetGyroRangeValues() {
return {500, 1000, 2000, 4000};
}
void StandardChannelsAdapter::GetImuResPacket(
const std::uint8_t *data, ImuResPacket *res) {
unpack_imu_res_packet(data, res);
}
std::size_t StandardChannelsAdapter::GetImgParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::img_params_t *img_params) {
std::size_t i = 0;
Intrinsics in_left, in_right;
Extrinsics ex_right_to_left;
i += bytes::from_data(&in_left, data + i, version);
i += bytes::from_data(&in_right, data + i, version);
i += bytes::from_data(&ex_right_to_left, data + i, version);
(*img_params)[{752, 480}] = {true, in_left, in_right, ex_right_to_left};
return i;
}
std::size_t StandardChannelsAdapter::SetImgParamsToData(
const Channels::img_params_t *img_params, const Version *version,
std::uint8_t *data) {
std::size_t i = 3; // skip id, size
auto &&params = (*img_params).at({752, 480});
i += bytes::to_data(&params.in_left, data + i, version);
i += bytes::to_data(&params.in_right, data + i, version);
i += bytes::to_data(&params.ex_right_to_left, data + i, version);
// others
std::size_t size = i - 3;
data[0] = Channels::FID_IMG_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,51 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD_CHANNELS_ADAPTER_S_H_
#define MYNTEYE_DEVICE_STANDARD_CHANNELS_ADAPTER_S_H_
#pragma once
#include <cstdint>
#include <set>
#include <vector>
#include "mynteye/device/channels.h"
MYNTEYE_BEGIN_NAMESPACE
class StandardChannelsAdapter : public ChannelsAdapter {
public:
StandardChannelsAdapter();
virtual ~StandardChannelsAdapter();
std::set<Option> GetOptionSupports() override;
std::int32_t GetAccelRangeDefault() override;
std::vector<std::int32_t> GetAccelRangeValues() override;
std::int32_t GetGyroRangeDefault() override;
std::vector<std::int32_t> GetGyroRangeValues() override;
void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) override;
std::size_t GetImgParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::img_params_t *img_params) override;
std::size_t SetImgParamsToData(
const Channels::img_params_t *img_params, const Version *version,
std::uint8_t *data) override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD_CHANNELS_ADAPTER_S_H_

View File

@@ -11,15 +11,19 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/device_s.h"
#include "mynteye/device/standard/device_s.h"
#include "mynteye/logger.h"
#include "mynteye/device/motions.h"
#include "mynteye/device/standard/channels_adapter_s.h"
#include "mynteye/device/standard/streams_adapter_s.h"
MYNTEYE_BEGIN_NAMESPACE
StandardDevice::StandardDevice(std::shared_ptr<uvc::device> device)
: Device(Model::STANDARD, device) {
: Device(Model::STANDARD, device,
std::make_shared<StandardStreamsAdapter>(),
std::make_shared<StandardChannelsAdapter>()) {
VLOG(2) << __func__;
}
@@ -27,8 +31,8 @@ StandardDevice::~StandardDevice() {
VLOG(2) << __func__;
}
std::vector<Stream> StandardDevice::GetKeyStreams() const {
return {Stream::LEFT, Stream::RIGHT};
Capabilities StandardDevice::GetKeyStreamCapability() const {
return Capabilities::STEREO;
}
void StandardDevice::OnStereoStreamUpdate() {

View File

@@ -11,8 +11,8 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_DEVICE_S_H_
#define MYNTEYE_DEVICE_DEVICE_S_H_
#ifndef MYNTEYE_DEVICE_STANDARD_DEVICE_S_H_
#define MYNTEYE_DEVICE_STANDARD_DEVICE_S_H_
#pragma once
#include <memory>
@@ -27,11 +27,11 @@ class StandardDevice : public Device {
explicit StandardDevice(std::shared_ptr<uvc::device> device);
virtual ~StandardDevice();
std::vector<Stream> GetKeyStreams() const override;
Capabilities GetKeyStreamCapability() const override;
void OnStereoStreamUpdate() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_DEVICE_S_H_
#endif // MYNTEYE_DEVICE_STANDARD_DEVICE_S_H_

View File

@@ -0,0 +1,163 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard/streams_adapter_s.h"
#include <iomanip>
#include "mynteye/logger.h"
#include "mynteye/device/types.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
// image info
#pragma pack(push, 1)
struct ImagePacket {
std::uint8_t header;
std::uint8_t size;
std::uint16_t frame_id;
std::uint32_t timestamp;
std::uint16_t exposure_time;
std::uint8_t checksum;
ImagePacket() = default;
explicit ImagePacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
header = *data;
size = *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
timestamp = (*(data + 4) << 24) | (*(data + 5) << 16) | (*(data + 6) << 8) |
*(data + 7);
exposure_time = (*(data + 8) << 8) | *(data + 9);
checksum = *(data + 10);
}
};
#pragma pack(pop)
bool unpack_stereo_img_data(
const void *data, const StreamRequest &request, ImgData *img) {
CHECK_NOTNULL(img);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t data_n =
request.width * request.height * bytes_per_pixel(request.format);
auto data_end = data_new + data_n;
std::size_t packet_n = sizeof(ImagePacket);
std::vector<std::uint8_t> packet(packet_n);
std::reverse_copy(data_end - packet_n, data_end, packet.begin());
ImagePacket img_packet(packet.data());
// LOG(INFO) << "ImagePacket: header=0x" << std::hex <<
// static_cast<int>(img_packet.header)
// << ", size=0x" << std::hex << static_cast<int>(img_packet.size)
// << ", frame_id="<< std::dec << img_packet.frame_id
// << ", timestamp="<< std::dec << img_packet.timestamp
// << ", exposure_time="<< std::dec << img_packet.exposure_time
// << ", checksum=0x" << std::hex << static_cast<int>(img_packet.checksum);
if (img_packet.header != 0x3B) {
VLOG(2) << "Image packet header must be 0x3B, but 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.header) << " now";
return false;
}
std::uint8_t checksum = 0;
for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9]
checksum = (checksum ^ packet[i]);
}
if (img_packet.checksum != checksum) {
VLOG(2) << "Image packet checksum should be 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.checksum) << ", but 0x"
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum)
<< " now";
return false;
}
img->frame_id = img_packet.frame_id;
// make timestamp unit from 10us to 1us
img->timestamp = static_cast<uint64_t>(img_packet.timestamp) * 10;
img->exposure_time = img_packet.exposure_time;
return true;
}
// image pixels
bool unpack_left_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame->format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame->width() * frame->height();
for (std::size_t i = 0; i < n; i++) {
frame->data()[i] = *(data_new + (i * 2));
}
return true;
}
bool unpack_right_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame->format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame->width() * frame->height();
for (std::size_t i = 0; i < n; i++) {
frame->data()[i] = *(data_new + (i * 2 + 1));
}
return true;
}
} // namespace
StandardStreamsAdapter::StandardStreamsAdapter() {
}
StandardStreamsAdapter::~StandardStreamsAdapter() {
}
std::vector<Stream> StandardStreamsAdapter::GetKeyStreams() {
return {Stream::LEFT, Stream::RIGHT};
}
std::vector<Capabilities> StandardStreamsAdapter::GetStreamCapabilities() {
return {Capabilities::STEREO};
}
std::map<Stream, Streams::unpack_img_data_t>
StandardStreamsAdapter::GetUnpackImgDataMap() {
return {
{Stream::LEFT, unpack_stereo_img_data},
{Stream::RIGHT, unpack_stereo_img_data}
};
}
std::map<Stream, Streams::unpack_img_pixels_t>
StandardStreamsAdapter::GetUnpackImgPixelsMap() {
return {
{Stream::LEFT, unpack_left_img_pixels},
{Stream::RIGHT, unpack_right_img_pixels}
};
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,42 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD_STREAMS_ADAPTER_S_H_
#define MYNTEYE_DEVICE_STANDARD_STREAMS_ADAPTER_S_H_
#pragma once
#include <map>
#include <memory>
#include <vector>
#include "mynteye/device/streams.h"
MYNTEYE_BEGIN_NAMESPACE
class StandardStreamsAdapter : public StreamsAdapter {
public:
StandardStreamsAdapter();
virtual ~StandardStreamsAdapter();
std::vector<Stream> GetKeyStreams() override;
std::vector<Capabilities> GetStreamCapabilities() override;
std::map<Stream, Streams::unpack_img_data_t>
GetUnpackImgDataMap() override;
std::map<Stream, Streams::unpack_img_pixels_t>
GetUnpackImgPixelsMap() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD_STREAMS_ADAPTER_S_H_

View File

@@ -0,0 +1,173 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard2/channels_adapter_s2.h"
#include "mynteye/device/config.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
#pragma pack(push, 1)
struct ImuData {
std::uint32_t frame_id;
std::uint64_t timestamp;
std::uint8_t flag;
std::int16_t temperature;
std::int16_t accel_or_gyro[3];
ImuData() = default;
explicit ImuData(const std::uint8_t *data) {
from_data(data);
}
void from_data(const std::uint8_t *data) {
std::uint32_t timestamp_l;
std::uint32_t timestamp_h;
frame_id = (*(data) << 24) | (*(data + 1) << 16) | (*(data + 2) << 8) |
*(data + 3);
timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) |
(*(data + 6) << 8) | *(data + 7);
timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) |
(*(data + 10) << 8) | *(data + 11);
timestamp = (static_cast<std::uint64_t>(timestamp_h) << 32) | timestamp_l;
flag = *(data + 12);
temperature = (*(data + 13) << 8) | *(data + 14);
accel_or_gyro[0] = (*(data + 15) << 8) | *(data + 16);
accel_or_gyro[1] = (*(data + 17) << 8) | *(data + 18);
accel_or_gyro[2] = (*(data + 19) << 8) | *(data + 20);
}
};
#pragma pack(pop)
void unpack_imu_segment(const ImuData &imu, ImuSegment *seg) {
seg->frame_id = imu.frame_id;
seg->timestamp = imu.timestamp;
seg->flag = imu.flag;
seg->temperature = imu.temperature;
seg->accel[0] = (seg->flag == 1) ? imu.accel_or_gyro[0] : 0;
seg->accel[1] = (seg->flag == 1) ? imu.accel_or_gyro[1] : 0;
seg->accel[2] = (seg->flag == 1) ? imu.accel_or_gyro[2] : 0;
seg->gyro[0] = (seg->flag == 2) ? imu.accel_or_gyro[0] : 0;
seg->gyro[1] = (seg->flag == 2) ? imu.accel_or_gyro[1] : 0;
seg->gyro[2] = (seg->flag == 2) ? imu.accel_or_gyro[2] : 0;
}
void unpack_imu_packet(const std::uint8_t *data, ImuPacket *pkg) {
std::size_t data_n = sizeof(ImuData); // 21
for (std::size_t i = 0; i < pkg->count; i++) {
ImuSegment seg;
unpack_imu_segment(ImuData(data + data_n * i), &seg);
pkg->segments.push_back(seg);
}
pkg->serial_number = pkg->segments.back().frame_id;
}
void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
res->header = *data;
res->state = *(data + 1);
res->size = (*(data + 2) << 8) | *(data + 3);
std::size_t data_n = sizeof(ImuData); // 21
ImuPacket packet;
packet.count = res->size / data_n;
unpack_imu_packet(data + 4, &packet);
res->packets.push_back(packet);
res->checksum = *(data + 4 + res->size);
}
} // namespace
Standard2ChannelsAdapter::Standard2ChannelsAdapter() {
}
Standard2ChannelsAdapter::~Standard2ChannelsAdapter() {
}
std::set<Option> Standard2ChannelsAdapter::GetOptionSupports() {
return option_supports_map.at(Model::STANDARD2);
}
std::int32_t Standard2ChannelsAdapter::GetAccelRangeDefault() {
return 12;
}
std::vector<std::int32_t> Standard2ChannelsAdapter::GetAccelRangeValues() {
return {6, 12, 24, 48};
}
std::int32_t Standard2ChannelsAdapter::GetGyroRangeDefault() {
return 1000;
}
std::vector<std::int32_t> Standard2ChannelsAdapter::GetGyroRangeValues() {
return {250, 500, 1000, 2000, 4000};
}
void Standard2ChannelsAdapter::GetImuResPacket(
const std::uint8_t *data, ImuResPacket *res) {
unpack_imu_res_packet(data, res);
}
std::size_t Standard2ChannelsAdapter::GetImgParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::img_params_t *img_params) {
std::size_t i = 0;
Intrinsics in_left, in_right;
Extrinsics ex_right_to_left;
i += bytes::from_data(&in_left, data + i, version);
i += bytes::from_data(&in_right, data + i, version);
(*img_params)[{1280, 400}] = {true, in_left, in_right, ex_right_to_left};
i += bytes::from_data(&in_left, data + i, version);
i += bytes::from_data(&in_right, data + i, version);
(*img_params)[{2560, 800}] = {true, in_left, in_right, ex_right_to_left};
i += bytes::from_data(&ex_right_to_left, data + i, version);
(*img_params)[{1280, 400}].ex_right_to_left = ex_right_to_left;
(*img_params)[{2560, 800}].ex_right_to_left = ex_right_to_left;
return i;
}
std::size_t Standard2ChannelsAdapter::SetImgParamsToData(
const Channels::img_params_t *img_params, const Version *version,
std::uint8_t *data) {
std::size_t i = 3; // skip id, size
{
auto &&params = (*img_params).at({1280, 400});
i += bytes::to_data(&params.in_left, data + i, version);
i += bytes::to_data(&params.in_right, data + i, version);
}
{
auto &&params = (*img_params).at({2560, 800});
i += bytes::to_data(&params.in_left, data + i, version);
i += bytes::to_data(&params.in_right, data + i, version);
i += bytes::to_data(&params.ex_right_to_left, data + i, version);
}
// others
std::size_t size = i - 3;
data[0] = Channels::FID_IMG_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,51 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S2_H_
#define MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S2_H_
#pragma once
#include <cstdint>
#include <set>
#include <vector>
#include "mynteye/device/channels.h"
MYNTEYE_BEGIN_NAMESPACE
class Standard2ChannelsAdapter : public ChannelsAdapter {
public:
Standard2ChannelsAdapter();
virtual ~Standard2ChannelsAdapter();
std::set<Option> GetOptionSupports() override;
std::int32_t GetAccelRangeDefault() override;
std::vector<std::int32_t> GetAccelRangeValues() override;
std::int32_t GetGyroRangeDefault() override;
std::vector<std::int32_t> GetGyroRangeValues() override;
void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) override;
std::size_t GetImgParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::img_params_t *img_params) override;
std::size_t SetImgParamsToData(
const Channels::img_params_t *img_params, const Version *version,
std::uint8_t *data) override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S2_H_

View File

@@ -0,0 +1,45 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard2/device_s2.h"
#include "mynteye/logger.h"
#include "mynteye/device/motions.h"
#include "mynteye/device/standard2/channels_adapter_s2.h"
#include "mynteye/device/standard2/streams_adapter_s2.h"
MYNTEYE_BEGIN_NAMESPACE
Standard2Device::Standard2Device(std::shared_ptr<uvc::device> device)
: Device(Model::STANDARD2, device,
std::make_shared<Standard2StreamsAdapter>(),
std::make_shared<Standard2ChannelsAdapter>()) {
VLOG(2) << __func__;
}
Standard2Device::~Standard2Device() {
VLOG(2) << __func__;
}
Capabilities Standard2Device::GetKeyStreamCapability() const {
return Capabilities::STEREO_COLOR;
}
void Standard2Device::OnStereoStreamUpdate() {
if (motion_tracking_) {
auto &&motions = this->motions();
motions->DoMotionTrack();
}
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,37 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD2_DEVICE_S2_H_
#define MYNTEYE_DEVICE_STANDARD2_DEVICE_S2_H_
#pragma once
#include <memory>
#include <vector>
#include "mynteye/device/device.h"
MYNTEYE_BEGIN_NAMESPACE
class Standard2Device : public Device {
public:
explicit Standard2Device(std::shared_ptr<uvc::device> device);
virtual ~Standard2Device();
Capabilities GetKeyStreamCapability() const override;
void OnStereoStreamUpdate() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD2_DEVICE_S2_H_

View File

@@ -0,0 +1,186 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard2/streams_adapter_s2.h"
#include <iomanip>
#include "mynteye/logger.h"
#include "mynteye/device/types.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
// image info
#pragma pack(push, 1)
struct ImagePacket {
std::uint8_t header;
std::uint8_t size;
std::uint16_t frame_id;
std::uint64_t timestamp;
std::uint16_t exposure_time;
std::uint8_t checksum;
ImagePacket() = default;
explicit ImagePacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
std::uint32_t timestamp_l;
std::uint32_t timestamp_h;
header = *data;
size = *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) |
(*(data + 6) << 8) | *(data + 7);
timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) |
(*(data + 10) << 8) | *(data + 11);
timestamp = (static_cast<std::uint64_t>(timestamp_h) << 32) | timestamp_l;
exposure_time = (*(data + 12) << 8) | *(data + 13);
checksum = *(data + 14);
}
};
#pragma pack(pop)
// image pixels
bool unpack_left_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::BGR888);
CHECK_EQ(frame->format(), Format::BGR888);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = 3;
std::size_t w = frame->width();
std::size_t h = frame->height();
for (std::size_t i = 0; i < h; i++) {
for (std::size_t j = 0; j < w; j++) {
frame->data()[(i * w + j) * n] =
*(data_new + (2 * i * w + j) * n + 2);
frame->data()[(i * w + j) * n + 1] =
*(data_new + (2 * i * w + j) * n + 1);
frame->data()[(i * w + j) * n + 2] =
*(data_new + (2 * i * w + j) * n);
}
}
return true;
}
bool unpack_right_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::BGR888);
CHECK_EQ(frame->format(), Format::BGR888);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = 3;
std::size_t w = frame->width();
std::size_t h = frame->height();
for (std::size_t i = 0; i < h; i++) {
for (std::size_t j = 0; j < w; j++) {
frame->data()[(i * w + j) * n] =
*(data_new + ((2 * i + 1) * w + j) * n + 2);
frame->data()[(i * w + j) * n + 1] =
*(data_new + ((2 * i + 1) * w + j) * n + 1);
frame->data()[(i * w + j) * n + 2] =
*(data_new + ((2 * i + 1) * w + j) * n);
}
}
return true;
}
bool unpack_stereo_img_data(
const void *data, const StreamRequest &request, ImgData *img) {
CHECK_NOTNULL(img);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t data_n =
request.width * request.height * bytes_per_pixel(request.format);
auto data_end = data_new + data_n;
std::size_t packet_n = sizeof(ImagePacket);
std::vector<std::uint8_t> packet(packet_n);
std::reverse_copy(data_end - packet_n, data_end, packet.begin());
ImagePacket img_packet(packet.data());
// LOG(INFO) << "ImagePacket: header=0x" << std::hex <<
// static_cast<int>(img_packet.header)
// << ", size=0x" << std::hex << static_cast<int>(img_packet.size)
// << ", frame_id="<< std::dec << img_packet.frame_id
// << ", timestamp="<< std::dec << img_packet.timestamp
// << ", exposure_time="<< std::dec << img_packet.exposure_time
// << ", checksum=0x" << std::hex << static_cast<int>(img_packet.checksum);
if (img_packet.header != 0x3B) {
VLOG(2) << "Image packet header must be 0x3B, but 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.header) << " now";
return false;
}
std::uint8_t checksum = 0;
for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9]
checksum = (checksum ^ packet[i]);
}
/*
if (img_packet.checksum != checksum) {
VLOG(2) << "Image packet checksum should be 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.checksum) << ", but 0x"
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum)
<< " now";
return false;
}
*/
img->frame_id = img_packet.frame_id;
img->timestamp = img_packet.timestamp;
img->exposure_time = img_packet.exposure_time;
return true;
}
} // namespace
Standard2StreamsAdapter::Standard2StreamsAdapter() {
}
Standard2StreamsAdapter::~Standard2StreamsAdapter() {
}
std::vector<Stream> Standard2StreamsAdapter::GetKeyStreams() {
return {Stream::LEFT, Stream::RIGHT};
}
std::vector<Capabilities> Standard2StreamsAdapter::GetStreamCapabilities() {
return {Capabilities::STEREO_COLOR};
}
std::map<Stream, Streams::unpack_img_data_t>
Standard2StreamsAdapter::GetUnpackImgDataMap() {
return {
{Stream::LEFT, unpack_stereo_img_data},
{Stream::RIGHT, unpack_stereo_img_data}
};
}
std::map<Stream, Streams::unpack_img_pixels_t>
Standard2StreamsAdapter::GetUnpackImgPixelsMap() {
return {
{Stream::LEFT, unpack_left_img_pixels},
{Stream::RIGHT, unpack_right_img_pixels}
};
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,42 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S2_H_
#define MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S2_H_
#pragma once
#include <map>
#include <memory>
#include <vector>
#include "mynteye/device/streams.h"
MYNTEYE_BEGIN_NAMESPACE
class Standard2StreamsAdapter : public StreamsAdapter {
public:
Standard2StreamsAdapter();
virtual ~Standard2StreamsAdapter();
std::vector<Stream> GetKeyStreams() override;
std::vector<Capabilities> GetStreamCapabilities() override;
std::map<Stream, Streams::unpack_img_data_t>
GetUnpackImgDataMap() override;
std::map<Stream, Streams::unpack_img_pixels_t>
GetUnpackImgPixelsMap() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S2_H_

View File

@@ -15,7 +15,6 @@
#include <algorithm>
#include <chrono>
#include <iomanip>
#include <stdexcept>
#include "mynteye/logger.h"
@@ -23,97 +22,11 @@
MYNTEYE_BEGIN_NAMESPACE
namespace {
bool unpack_stereo_img_data(
const void *data, const StreamRequest &request, ImgData *img) {
CHECK_NOTNULL(img);
CHECK_EQ(request.format, Format::YUYV);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t data_n =
request.width * request.height * bytes_per_pixel(request.format);
auto data_end = data_new + data_n;
std::size_t packet_n = sizeof(ImagePacket);
std::vector<std::uint8_t> packet(packet_n);
std::reverse_copy(data_end - packet_n, data_end, packet.begin());
ImagePacket img_packet(packet.data());
// LOG(INFO) << "ImagePacket: header=0x" << std::hex <<
// static_cast<int>(img_packet.header)
// << ", size=0x" << std::hex << static_cast<int>(img_packet.size)
// << ", frame_id="<< std::dec << img_packet.frame_id
// << ", timestamp="<< std::dec << img_packet.timestamp
// << ", exposure_time="<< std::dec << img_packet.exposure_time
// << ", checksum=0x" << std::hex << static_cast<int>(img_packet.checksum);
if (img_packet.header != 0x3B) {
VLOG(2) << "Image packet header must be 0x3B, but 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.header) << " now";
return false;
}
std::uint8_t checksum = 0;
for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9]
checksum = (checksum ^ packet[i]);
}
if (img_packet.checksum != checksum) {
VLOG(2) << "Image packet checksum should be 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.checksum) << ", but 0x"
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum)
<< " now";
return false;
}
img->frame_id = img_packet.frame_id;
img->timestamp = img_packet.timestamp;
img->exposure_time = img_packet.exposure_time;
return true;
}
bool unpack_left_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame->format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame->width() * frame->height();
for (std::size_t i = 0; i < n; i++) {
frame->data()[i] = *(data_new + (i * 2));
}
return true;
}
bool unpack_right_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame->format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame->width() * frame->height();
for (std::size_t i = 0; i < n; i++) {
frame->data()[i] = *(data_new + (i * 2 + 1));
}
return true;
}
} // namespace
Streams::Streams(const std::vector<Stream> key_streams)
: key_streams_(key_streams),
stream_capabilities_(
{Capabilities::STEREO, Capabilities::COLOR, Capabilities::DEPTH,
Capabilities::POINTS, Capabilities::FISHEYE, Capabilities::INFRARED,
Capabilities::INFRARED2}),
unpack_img_data_map_(
{{Stream::LEFT, unpack_stereo_img_data},
{Stream::RIGHT, unpack_stereo_img_data}}),
unpack_img_pixels_map_(
{{Stream::LEFT, unpack_left_img_pixels},
{Stream::RIGHT, unpack_right_img_pixels}}) {
Streams::Streams(const std::shared_ptr<StreamsAdapter> &adapter)
: key_streams_(std::move(adapter->GetKeyStreams())),
stream_capabilities_(std::move(adapter->GetStreamCapabilities())),
unpack_img_data_map_(std::move(adapter->GetUnpackImgDataMap())),
unpack_img_pixels_map_(std::move(adapter->GetUnpackImgPixelsMap())) {
VLOG(2) << __func__;
}
@@ -139,16 +52,17 @@ bool Streams::PushStream(const Capabilities &capability, const void *data) {
auto &&request = GetStreamConfigRequest(capability);
bool pushed = false;
switch (capability) {
case Capabilities::STEREO: {
case Capabilities::STEREO:
case Capabilities::STEREO_COLOR: {
// alloc left
AllocStreamData(Stream::LEFT, request, Format::GREY);
AllocStreamData(capability, Stream::LEFT, request);
auto &&left_data = stream_datas_map_[Stream::LEFT].back();
// unpack img data
if (unpack_img_data_map_[Stream::LEFT](
data, request, left_data.img.get())) {
left_data.frame_id = left_data.img->frame_id;
// alloc right
AllocStreamData(Stream::RIGHT, request, Format::GREY);
AllocStreamData(capability, Stream::RIGHT, request);
auto &&right_data = stream_datas_map_[Stream::RIGHT].back();
*right_data.img = *left_data.img;
right_data.frame_id = left_data.img->frame_id;
@@ -252,12 +166,16 @@ bool Streams::HasStreamDatas(const Stream &stream) const {
!stream_datas_map_.at(stream).empty();
}
void Streams::AllocStreamData(
void Streams::AllocStreamData(const Capabilities &capability,
const Stream &stream, const StreamRequest &request) {
AllocStreamData(stream, request, request.format);
auto format = request.format;
if (capability == Capabilities::STEREO) {
format = Format::GREY;
}
AllocStreamData(capability, stream, request, format);
}
void Streams::AllocStreamData(
void Streams::AllocStreamData(const Capabilities &capability,
const Stream &stream, const StreamRequest &request, const Format &format) {
stream_data_t data;
@@ -282,8 +200,12 @@ void Streams::AllocStreamData(
data.img = nullptr;
}
if (!data.frame) {
data.frame = std::make_shared<frame_t>(
request.width, request.height, format, nullptr);
auto width = request.width;
if (capability == Capabilities::STEREO_COLOR) {
width /= 2; // split to half
}
data.frame =
std::make_shared<frame_t>(width, request.height, format, nullptr);
}
data.frame_id = 0;
stream_datas_map_[stream].push_back(data);

View File

@@ -18,6 +18,7 @@
#include <condition_variable>
#include <functional>
#include <map>
#include <memory>
#include <mutex>
#include <vector>
@@ -27,6 +28,8 @@
MYNTEYE_BEGIN_NAMESPACE
class StreamsAdapter;
class Streams {
public:
using frame_t = device::Frame;
@@ -38,7 +41,7 @@ class Streams {
using unpack_img_pixels_t = std::function<bool(
const void *data, const StreamRequest &request, frame_t *frame)>;
explicit Streams(const std::vector<Stream> key_streams);
explicit Streams(const std::shared_ptr<StreamsAdapter> &adapter);
~Streams();
void ConfigStream(
@@ -65,8 +68,9 @@ class Streams {
bool HasStreamDatas(const Stream &stream) const;
void AllocStreamData(const Stream &stream, const StreamRequest &request);
void AllocStreamData(
void AllocStreamData(const Capabilities &capability,
const Stream &stream, const StreamRequest &request);
void AllocStreamData(const Capabilities &capability,
const Stream &stream, const StreamRequest &request, const Format &format);
void DiscardStreamData(const Stream &stream);
@@ -88,6 +92,19 @@ class Streams {
std::condition_variable cv_;
};
class StreamsAdapter {
public:
virtual ~StreamsAdapter() {}
virtual std::vector<Stream> GetKeyStreams() = 0;
virtual std::vector<Capabilities> GetStreamCapabilities() = 0;
virtual std::map<Stream, Streams::unpack_img_data_t>
GetUnpackImgDataMap() = 0;
virtual std::map<Stream, Streams::unpack_img_pixels_t>
GetUnpackImgPixelsMap() = 0;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STREAMS_H_

View File

@@ -16,7 +16,6 @@
#pragma once
#include <cstdint>
#include <array>
#include <bitset>
#include <string>
@@ -139,36 +138,6 @@ struct MYNTEYE_API DeviceInfo {
std::uint16_t nominal_baseline;
};
/**
* @ingroup datatypes
* Image packet.
*/
#pragma pack(push, 1)
struct ImagePacket {
std::uint8_t header;
std::uint8_t size;
std::uint16_t frame_id;
std::uint32_t timestamp;
std::uint16_t exposure_time;
std::uint8_t checksum;
ImagePacket() = default;
explicit ImagePacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
header = *data;
size = *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
timestamp = (*(data + 4) << 24) | (*(data + 5) << 16) | (*(data + 6) << 8) |
*(data + 7);
exposure_time = (*(data + 8) << 8) | *(data + 9);
checksum = *(data + 10);
}
};
#pragma pack(pop)
/**
* @ingroup datatypes
* Imu request packet.
@@ -199,28 +168,12 @@ struct ImuReqPacket {
*/
#pragma pack(push, 1)
struct ImuSegment {
std::int16_t offset;
std::uint16_t frame_id;
std::int16_t accel[3];
std::uint32_t frame_id;
std::uint64_t timestamp;
std::uint8_t flag;
std::int16_t temperature;
std::int16_t accel[3];
std::int16_t gyro[3];
ImuSegment() = default;
explicit ImuSegment(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
offset = (*(data) << 8) | *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
accel[0] = (*(data + 4) << 8) | *(data + 5);
accel[1] = (*(data + 6) << 8) | *(data + 7);
accel[2] = (*(data + 8) << 8) | *(data + 9);
temperature = (*(data + 10) << 8) | *(data + 11);
gyro[0] = (*(data + 12) << 8) | *(data + 13);
gyro[1] = (*(data + 14) << 8) | *(data + 15);
gyro[2] = (*(data + 16) << 8) | *(data + 17);
}
};
#pragma pack(pop)
@@ -230,28 +183,10 @@ struct ImuSegment {
*/
#pragma pack(push, 1)
struct ImuPacket {
std::uint32_t serial_number;
std::uint32_t timestamp;
std::uint8_t version;
std::uint8_t count;
std::uint32_t serial_number;
std::vector<ImuSegment> segments;
ImuPacket() = default;
explicit ImuPacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
serial_number = (*(data) << 24) | (*(data + 1) << 16) | (*(data + 2) << 8) |
*(data + 3);
timestamp = (*(data + 4) << 24) | (*(data + 5) << 16) | (*(data + 6) << 8) |
*(data + 7);
count = *(data + 8);
std::size_t seg_n = sizeof(ImuSegment); // 18
for (std::size_t i = 0; i < count; i++) {
segments.push_back(ImuSegment(data + 9 + (seg_n * i)));
}
}
};
#pragma pack(pop)
@@ -261,31 +196,12 @@ struct ImuPacket {
*/
#pragma pack(push, 1)
struct ImuResPacket {
std::uint8_t version;
std::uint8_t header;
std::uint8_t state;
std::uint16_t size;
std::vector<ImuPacket> packets;
std::uint8_t checksum;
ImuResPacket() = default;
explicit ImuResPacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
header = *data;
state = *(data + 1);
size = (*(data + 2) << 8) | *(data + 3);
std::size_t seg_n = sizeof(ImuSegment); // 18
for (std::size_t i = 4; i < size;) {
ImuPacket packet(data + i);
packets.push_back(packet);
i += 9 + (packet.count * seg_n);
}
checksum = *(data + 4 + size);
}
};
#pragma pack(pop)

View File

@@ -20,6 +20,8 @@
#include "mynteye/device/context.h"
#include "mynteye/device/device.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
namespace device {
@@ -29,14 +31,14 @@ std::shared_ptr<Device> select() {
Context context;
auto &&devices = context.devices();
size_t n = devices.size();
std::size_t n = devices.size();
if (n <= 0) {
LOG(ERROR) << "No MYNT EYE devices :(";
return nullptr;
}
LOG(INFO) << "MYNT EYE devices:";
for (size_t i = 0; i < n; i++) {
for (std::size_t i = 0; i < n; i++) {
auto &&device = devices[i];
LOG(INFO) << " index: " << i
<< ", name: " << device->GetInfo(Info::DEVICE_NAME)
@@ -49,7 +51,7 @@ std::shared_ptr<Device> select() {
LOG(INFO) << "Only one MYNT EYE device, select index: 0";
} else {
while (true) {
size_t i;
std::size_t i;
LOG(INFO) << "There are " << n << " MYNT EYE devices, select index: ";
std::cin >> i;
if (i >= n) {
@@ -64,6 +66,42 @@ std::shared_ptr<Device> select() {
return device;
}
MYNTEYE_NAMESPACE::StreamRequest select_request(
const std::shared_ptr<Device> &device, bool *ok) {
auto &&requests = device->GetStreamRequests();
std::size_t n = requests.size();
if (n <= 0) {
LOG(ERROR) << "No MYNT EYE devices :(";
*ok = false;
return {};
}
LOG(INFO) << "MYNT EYE devices:";
for (std::size_t i = 0; i < n; i++) {
auto &&request = requests[i];
LOG(INFO) << " index: " << i
<< ", request: " << request;
}
if (n <= 1) {
LOG(INFO) << "Only one stream request, select index: 0";
*ok = true;
return requests[0];
} else {
while (true) {
std::size_t i;
LOG(INFO) << "There are " << n << " stream requests, select index: ";
std::cin >> i;
if (i >= n) {
LOG(WARNING) << "Index out of range :(";
continue;
}
*ok = true;
return requests[i];
}
}
}
} // namespace device
namespace utils {

View File

@@ -29,6 +29,7 @@ const char *to_string(const Model &value) {
return "Model::" #X;
switch (value) {
CASE(STANDARD)
CASE(STANDARD2)
default:
CHECK(is_valid(value));
return "Model::UNKNOWN";
@@ -62,6 +63,7 @@ const char *to_string(const Capabilities &value) {
return "Capabilities::" #X;
switch (value) {
CASE(STEREO)
CASE(STEREO_COLOR)
CASE(COLOR)
CASE(DEPTH)
CASE(POINTS)
@@ -109,13 +111,16 @@ const char *to_string(const Option &value) {
CASE(EXPOSURE_MODE)
CASE(MAX_GAIN)
CASE(MAX_EXPOSURE_TIME)
CASE(MIN_EXPOSURE_TIME)
CASE(DESIRED_BRIGHTNESS)
CASE(IR_CONTROL)
CASE(HDR_MODE)
CASE(ZERO_DRIFT_CALIBRATION)
CASE(ERASE_CHIP)
CASE(ACCELEROMETER_RANGE)
CASE(GYROSCOPE_RANGE)
CASE(ACCELEROMETER_LOW_PASS_FILTER)
CASE(GYROSCOPE_LOW_PASS_FILTER)
CASE(ZERO_DRIFT_CALIBRATION)
CASE(ERASE_CHIP)
default:
CHECK(is_valid(value));
return "Option::UNKNOWN";
@@ -157,6 +162,7 @@ const char *to_string(const Format &value) {
switch (value) {
CASE(GREY)
CASE(YUYV)
CASE(BGR888)
default:
return "Format::UNKNOWN";
}
@@ -169,6 +175,8 @@ std::size_t bytes_per_pixel(const Format &value) {
return 1;
case Format::YUYV:
return 2;
case Format::BGR888:
return 3;
default:
LOG(FATAL) << "Unknown format";
}

View File

@@ -46,9 +46,10 @@ namespace uvc {
} while (0)
#define NO_DATA_MAX_COUNT 200
#define LIVING_MAX_COUNT 9000
int no_data_count = 0;
int living_count = 0;
/*
class device_error : public std::exception {
public:
@@ -394,6 +395,12 @@ struct device {
if (xioctl(fd, VIDIOC_QBUF, &buf) < 0)
throw_error("VIDIOC_QBUF");
});
if (living_count < LIVING_MAX_COUNT) {
living_count++;
} else {
living_count = 0;
// LOG(INFO) << "UVC pulse detection,Please ignore.";
}
}
no_data_count = 0;
@@ -402,7 +409,12 @@ struct device {
}
if (no_data_count > NO_DATA_MAX_COUNT) {
throw_error("v4l2 get stream time out!");
no_data_count = 0;
living_count = 0;
LOG(WARNING) << __func__
<< " failed: v4l2 get stream time out, Try to reboot!";
stop_capture();
start_capture();
}
}

View File

@@ -64,7 +64,8 @@ namespace uvc {
const std::map<uint32_t, uint32_t> fourcc_map = {
{ 0x56595559, 0x32595559 }, // 'VYUY' => '2YUY'
{ 0x59555956, 0x59555932 } // 'YUYV' => 'YUY2'
{ 0x59555956, 0x59555932 }, // 'YUYV' => 'YUY2'
{ 0x33524742, 0x14 }
};
struct throw_error {
@@ -756,6 +757,7 @@ void set_device_mode(device &device, int width, int height, int fourcc, int fps,
check("IMFMediaType::GetGUID", media_type->GetGUID(MF_MT_SUBTYPE, &subtype));
if (subtype.Data1 != fourcc) continue;
check("MFSetAttributeRatio", MFSetAttributeRatio(media_type, MF_MT_FRAME_RATE, fps, 1));
check("MFGetAttributeRatio", MFGetAttributeRatio(media_type, MF_MT_FRAME_RATE, &uvc_fps_num, &uvc_fps_denom));
if (uvc_fps_denom == 0) continue;
//int uvc_fps = uvc_fps_num / uvc_fps_denom;