Merge branch 'devel-s210a-merge' into develop

This commit is contained in:
John Zhao 2018-12-24 10:41:40 +08:00
commit 7e42cd65b1
89 changed files with 2987 additions and 1338 deletions

11
.commitlintrc.js Normal file
View File

@ -0,0 +1,11 @@
module.exports = {
extends: ['@commitlint/config-conventional'],
rules: {
'type-enum': [2, 'always', [
"feat", "fix", "docs", "style", "refactor", "perf", "test", "build", "ci", "chore", "revert"
]],
'scope-empty': [2, 'never'],
'subject-full-stop': [0, 'never'],
'subject-case': [0, 'never']
}
};

3
.gitignore vendored
View File

@ -21,6 +21,9 @@ _output/
/*.nsi
/*.exe
/node_modules/
/package-lock.json
# ros
/wrappers/ros/build

View File

@ -106,7 +106,6 @@ set_outdir(
)
## main
if(WITH_GLOG)
add_executable(main src/main.cc)
target_link_libraries(main glog::glog)
@ -124,6 +123,11 @@ if(NOT WITH_GLOG AND NOT OS_WIN)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${__MINIGLOG_FLAGS}")
unset(__MINIGLOG_FLAGS)
endif()
if(NOT WITH_GLOG)
list(APPEND MYNTEYE_PUBLIC_H
${CMAKE_CURRENT_SOURCE_DIR}/include/mynteye/miniglog.h
)
endif()
if(OS_WIN)
set(UVC_SRC src/mynteye/uvc/win/uvc-wmf.cc)
@ -146,7 +150,7 @@ elseif(OS_MAC)
SET(OSX_EXTRA_LIBS ${VVUVCKIT_LIBRARY} ${USB_LIBRARY})
set(UVC_SRC src/mynteye/uvc/macosx/CameraEngine.cpp src/mynteye/uvc/macosx/AVfoundationCamera.mm src/mynteye/uvc/macosx/uvc-vvuvckit.cc )
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -framework CoreFoundation -framework AVFoundation -framework IOKit -framework AppKit -framework Cocoa -framework CoreMedia -framework CoreData -framework Foundation -framework CoreVideo ${__MACUVCLOG_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -framework CoreFoundation -framework AVFoundation -framework IOKit -framework AppKit -framework Cocoa -framework CoreMedia -framework CoreData -framework Foundation -framework CoreVideo ${__MACUVCLOG_FLAGS}")
find_package(libuvc REQUIRED)
set(UVC_LIB ${libuvc_LIBRARIES})
@ -167,8 +171,13 @@ set(MYNTEYE_SRCS
src/mynteye/device/config.cc
src/mynteye/device/context.cc
src/mynteye/device/device.cc
src/mynteye/device/device_s.cc
src/mynteye/device/motions.cc
src/mynteye/device/standard/channels_adapter_s.cc
src/mynteye/device/standard/device_s.cc
src/mynteye/device/standard/streams_adapter_s.cc
src/mynteye/device/standard2/channels_adapter_s2.cc
src/mynteye/device/standard2/device_s2.cc
src/mynteye/device/standard2/streams_adapter_s2.cc
src/mynteye/device/streams.cc
src/mynteye/device/types.cc
src/mynteye/device/utils.cc

View File

@ -21,7 +21,6 @@ include(${CMAKE_CURRENT_LIST_DIR}/Utils.cmake)
# build components
option(WITH_API "Build with API layer, need OpenCV" ON)
option(WITH_DEVICE_INFO_REQUIRED "Build with device info required" ON)
# 3rdparty components
@ -32,7 +31,6 @@ option(WITH_BOOST "Include Boost support" ON)
# Ubuntu: `sudo apt-get install libgoogle-glog-dev`
option(WITH_GLOG "Include glog support" OFF)
# packages
if(WITH_API)

View File

@ -6,25 +6,22 @@
| 名称 | 字段 | 字节数 | 默认值 | 最小值 | 最大值 | 是否储存 | Flash 地址 | 说明 |
| :----- | :----- | :-------- | :-------- | :-------- | :-------- | :----------- | :----------- | :----- |
| 增益 | gain | 2 | 24 | 0 | 48 | √ | 0x12 | 关闭自动曝光,手动设定的参数 |
| 亮度 | brightness/exposure_time | 2 | 120 | 0 | 240 | √ | 0x14 | 关闭自动曝光,手动设定的参数 |
| 对比度 | contrast/black_level_calibration | 2 | 127 | 0 | 255 | √ | 0x10 | 关闭自动曝光,手动设定的参数 |
| 亮度 | brightness | 2 | 192 | 0 | 255 | √ | 0x14 | 关闭自动曝光,手动设定的参数 |
> UVC 标准协议实现的控制,有现成的 API 进行 Get & Set ,包括 Min, Max, Default 。
## 自定义协议
| 名称 | 字段 | 字节数 | 默认值 | 最小值 | 最大值 | 是否储存 | Flash 地址 | 所属通道 | 说明 |
| :----- | :----- | :-------- | :-------- | :-------- | :-------- | :----------- | :----------- | :----------- | :----- |
| 图像帧率 | frame_rate | 2 | 25 | 10 | 60 | √ | 0x21 | XU_CAM_CTRL | 步进为5即有效值为{10,15,20,25,30,35,40,45,50,55,60} |
| IMU 频率 | imu_frequency | 2 | 200 | 100 | 500 | √ | 0x23 | XU_CAM_CTRL | 有效值为{100,200,250,333,500} |
| 曝光模式 | exposure_mode | 1 | 0 | 0 | 1 | √ | 0x0F | XU_CAM_CTRL | 0开启自动曝光 1关闭 |
| 最大增益 | max_gain | 2 | 48 | 0 | 48 | √ | 0x1D | XU_CAM_CTRL | 开始自动曝光,可设定的阈值 |
| 最大曝光时间 | max_exposure_time | 2 | 240 | 0 | 240 | √ | 0x1B | XU_CAM_CTRL | 开始自动曝光,可设定的阈值 |
| 期望亮度 | desired_brightness | 2 | 192 | 0 | 255 | √ | 0x19 | XU_CAM_CTRL | 开始自动曝光,可设定的阈值 |
| IR 控制 | ir_control | 1 | 0 | 0 | 160 | × | - | XU_CAM_CTRL | |
| HDR 模式 | hdr_mode | 1 | 0 | 0 | 1 | √ | 0x1F | XU_CAM_CTRL | 010-bit112-bit |
| 零漂标定 | zero_drift_calibration | | - | - | - | × | - | XU_HALF_DUPLEX | |
| 擦除芯片 | erase_chip | | - | - | - | × | - | XU_HALF_DUPLEX | |
| 名称 | 字段 | 字节数 | 默认值 | 最小值 | 最大值 | 是否储存 | Flash 地址 | 所属通道 | 通道地址 | 说明 |
| :----- | :----- | :-------- | :-------- | :-------- | :-------- | :----------- | :----------- | :----------- | :----------- | :----- |
| 曝光模式 | exposure_mode | 1 | 0 | 0 | 1 | √ | 0x0F | XU_CAM_CTRL | 0x0100 | 0开启自动曝光 1关闭 |
| 最大增益 | max_gain | 2 | 8 | 0 | 255 | √ | 0x1D | XU_CAM_CTRL | 0x0100 | 开始自动曝光,可设定的阈值 |
| 最大曝光时间 | max_exposure_time | 2 | 333 | 0 | 1000 | √ | 0x1B | XU_CAM_CTRL | 0x0100 | 开始自动曝光,可设定的阈值 |
| 期望亮度 | desired_brightness | 2 | 122 | 1 | 255 | √ | 0x19 | XU_CAM_CTRL | 0x0100 | |
| 擦除芯片 | erase_chip | | - | - | - | × | - | XU_HALF_DUPLEX | 0x0200 | |
| 最小曝光时间 | min_exposure_time | 2 | 0 | 0 | 1000 | √ | - | XU_CAM_CTRL | 0x0100 | 开始自动曝光,可设定的阈值 |
| 加速度计量程 | accelerometer_range | 2 | 12 | 6 | 48 | √ | - | XU_CAM_CTRL | 0x0100 | |
| 陀螺仪量程 | gyroscope_range | 2 | 1000 | 250 | 4000 | √ | - | XU_CAM_CTRL | 0x0100 | |
| 加速度计低通滤波 | accelerometer_low_pass_filter | 2 | 2 | 0 | 2 | √ | - | XU_CAM_CTRL | 0x0100 | |
| 陀螺仪低通滤波 | gyroscope__low_pass_filter | 2 | 64 | 23 | 64 | √ | - | XU_CAM_CTRL | 0x0100 | |

View File

@ -49,5 +49,5 @@
| File | ID | Max Size |
| :--- | :- | :------- |
| 硬件信息 | 1 | 250 |
| 图像参数 | 2 | 250 |
| 图像参数 | 2 | 404 |
| IMU 参数 | 4 | 500 |

View File

@ -3,20 +3,19 @@
| 名称 | 字段 | 单位 | 字节数 | 说明 |
| :----- | :----- | :----- | :-------- | :----- |
| 帧 ID | frame_id | - | 2 | uint16_t; [0,65535] |
| 时间戳 | timestamp | 10 us | 4 | uint32_t |
| 曝光时间 | exposure_time | 10 us | 2 | uint16_t |
| 时间戳 | timestamp | 1 us | 8 | uint64_t |
| 曝光时间 | exposure_time | 1 us | 2 | uint16_t |
> 图像数据传输方式:倒序排在图像尾部。
## 图像数据包
| Name | Header | Size | Frame ID | Timestamp | Exposure Time | Checksum |
| :--- | :----- | :--- | :------- | :-------- | :------------ | :------- |
| 字节数 | 1 | 1 | 2 | 4 | 2 | 1 |
| 类型 | uint8_t | uint8_t | uint16_t | uint32_t | uint16_t | uint8_t |
| 描述 | 0x3B | 0x08 (数据内容大小) | 帧 ID | 时间戳 | 曝光时间 | 校验码(数据内容所有字节异或) |
| Name | Header | Size | FrameID | Timestamp | ExposureTime | Checksum |
| :--- | :----- | :--- | :------ | :-------- | :----------- | :------- |
| 字节数 | 1 | 1 | 2 | 8 | 2 | 1 |
| 类型 | uint8_t | uint8_t | uint16_t | uint64_t | uint16_t | uint8_t |
| 描述 | 0x3B | 0x10 (数据内容大小) | 帧 ID | 时间戳 | 曝光时间 | 校验码(数据内容所有字节异或) |
* 数据包校验不过,会丢弃该帧。
* 时间单位的精度为: 0.01 ms / 10 us 。
* 4 字节能表示的最大时间约是 11.9 小时,溢出后将重累计。
* 时间的单位精度为: 1 us 。
* 时间累计是从上电时从开始,而不是从打开时开始。

View File

@ -10,7 +10,7 @@
## IMU 响应数据包
IMU 响应数据包里会包含个 IMU 包,而每个 IMU 包又带有多个 IMU 段。
IMU 响应数据包里会包含1个 IMU 包,而每个 IMU 包又带有多个 IMU 段。
| Name | Header | State | Size | IMU Packets | Checksum |
| :--- | :----- | :---- | :--- | :---------- | :------- |
@ -22,21 +22,21 @@ IMU 响应数据包里会包含多个 IMU 包,而每个 IMU 包又带有多个
IMU 包/小包,是一组 IMU 数据。
| Name | Serial Number | Timestamp | Count | IMU Datas |
| :--- | :------------ | :-------- | :---- | :-------- |
| 字节数 | 4 | 4 | 1 | ... |
| 类型 | uint32_t | uint32_t | uint8_t | - |
| 描述 | 序列号 | IMU 基准时间戳 | IMU 段数量 | 所包含的 IMU 段 |
| Name | Count | IMU Datas |
| :--- | :-----| :-------- |
| 字节数 | 2 | ... |
| 类型 | uint16_t | - |
| 描述 | IMU 段数量 | 所包含的 IMU 段 |
### IMU 段
| Name | Offset | Frame ID | Accelerometer | Temperature | Gyroscope |
| :--- | :----- | :------- | :------------ | :---------- | :-------- |
| 字节数 | 2 | 2 | 6 | 2 | 6 |
| 类型 | int16_t | uint16_t | int16_t * 3 | int16_t | int16_t * 3 |
| 描述 | 相对基准时间戳的偏移量 | 图像帧 ID | 加速度计 x y z 三轴的值 | IMU 的温度 | 陀螺仪 x y z 三轴的值 |
| Name | Serial Number | Timestamp | flag | Temperature | Accelerometer or Gyroscope |
| :--- | :------------ | :-------- | :----| :----------- | :------------------------- |
| 字节数 | 4 | 8 | 1 | 2 | 6 |
| 类型 | uint32_t | uint64_t | int8_t | int16_t | int16_t * 3 |
| Description | 序列号 | 时间戳 | 指定传感器类型 | IMU 的温度 | 陀螺仪或陀螺仪 x y z 三轴的值 |
* 加速度计和陀螺仪的计量值换算成物理值公式: **real = data * range / 0x10000**
* 加速度计量程默认值为 **8 g** ,陀螺仪量程默认值为 **1000 deg/s**
* 加速度计量程默认值为 **12 g** ,陀螺仪量程默认值为 **1000 deg/s**
* 温度计量值换算成物理值公式: **real = data / ratio + offset**
* ``ratio`` 默认值为 **326.8** ``offset`` 默认值为 **25℃**

View File

@ -28,6 +28,8 @@
MYNTEYE_BEGIN_NAMESPACE
struct DeviceInfo;
class Device;
class Synthetic;
@ -72,8 +74,7 @@ struct MYNTEYE_API MotionData {
bool operator==(const MotionData &other) const {
if (imu && other.imu) {
return imu->frame_id == other.imu->frame_id &&
imu->timestamp == other.imu->timestamp;
return imu->timestamp == other.imu->timestamp;
}
return false;
}
@ -94,18 +95,6 @@ class MYNTEYE_API API {
explicit API(std::shared_ptr<Device> device);
virtual ~API();
/**
* Create the API instance.
* @return the API instance.
* @note This will call device::select() to select a device.
*/
static std::shared_ptr<API> Create();
/**
* Create the API instance.
* @param device the selected device.
* @return the API instance.
*/
static std::shared_ptr<API> Create(std::shared_ptr<Device> device);
/**
* Create the API instance.
* @param argc the arg count.
@ -124,7 +113,13 @@ class MYNTEYE_API API {
* @note This will init glog with args.
*/
static std::shared_ptr<API> Create(
int argc, char *argv[], std::shared_ptr<Device> device);
int argc, char *argv[], const std::shared_ptr<Device> &device);
/**
* Create the API instance.
* @param device the selected device.
* @return the API instance.
*/
static std::shared_ptr<API> Create(const std::shared_ptr<Device> &device);
/**
* Get the model.
@ -148,6 +143,11 @@ class MYNTEYE_API API {
*/
bool Supports(const AddOns &addon) const;
/**
* Log all stream requests and prompt user to select one.
*/
StreamRequest SelectStreamRequest(bool *ok) const;
/**
* Get all stream requests of the capability.
*/
@ -158,7 +158,28 @@ class MYNTEYE_API API {
*/
void ConfigStreamRequest(
const Capabilities &capability, const StreamRequest &request);
/**
* Get the config stream requests of the capability.
*/
const StreamRequest &GetStreamRequest(const Capabilities &capability) const;
/**
* Get all stream requests of the key stream capability.
*/
const std::vector<StreamRequest> &GetStreamRequests() const;
/**
* Config the stream request to the key stream capability.
*/
void ConfigStreamRequest(const StreamRequest &request);
/**
* Get the config stream requests of the key stream capability.
*/
const StreamRequest &GetStreamRequest() const;
/**
* Get the device info.
*/
std::shared_ptr<DeviceInfo> GetInfo() const;
/**
* Get the device info.
*/
@ -279,6 +300,8 @@ class MYNTEYE_API API {
std::shared_ptr<Device> device_;
std::unique_ptr<Synthetic> synthetic_;
void CheckImageParams();
};
MYNTEYE_END_NAMESPACE

View File

@ -19,6 +19,7 @@
#include <algorithm>
#include <functional>
#include <map>
#include <memory>
#include <vector>
@ -29,6 +30,20 @@ MYNTEYE_BEGIN_NAMESPACE
namespace device {
typedef struct ImgParams {
bool ok;
Intrinsics in_left;
Intrinsics in_right;
Extrinsics ex_right_to_left;
} img_params_t;
typedef struct ImuParams {
bool ok;
ImuIntrinsics in_accel;
ImuIntrinsics in_gyro;
Extrinsics ex_left_to_imu;
} imu_params_t;
/**
* @ingroup datatypes
* Frame with raw data.

View File

@ -43,8 +43,10 @@ struct DeviceInfo;
class API;
class Channels;
class ChannelsAdapter;
class Motions;
class Streams;
class StreamsAdapter;
template <class Data>
class AsyncCallback;
@ -66,7 +68,16 @@ class MYNTEYE_API Device {
using stream_async_callback_ptr_t = std::shared_ptr<stream_async_callback_t>;
using motion_async_callback_ptr_t = std::shared_ptr<motion_async_callback_t>;
Device(const Model &model, std::shared_ptr<uvc::device> device);
using img_params_t = device::img_params_t;
using imu_params_t = device::imu_params_t;
protected:
Device(const Model &model,
const std::shared_ptr<uvc::device> &device,
const std::shared_ptr<StreamsAdapter> &streams_adapter,
const std::shared_ptr<ChannelsAdapter> &channels_adapter);
public:
virtual ~Device();
/**
@ -112,6 +123,23 @@ class MYNTEYE_API Device {
*/
void ConfigStreamRequest(
const Capabilities &capability, const StreamRequest &request);
/**
* Get the config stream requests of the capability.
*/
const StreamRequest &GetStreamRequest(const Capabilities &capability) const;
/**
* Get all stream requests of the key stream capability.
*/
const std::vector<StreamRequest> &GetStreamRequests() const;
/**
* Config the stream request to the key stream capability.
*/
void ConfigStreamRequest(const StreamRequest &request);
/**
* Get the config stream requests of the key stream capability.
*/
const StreamRequest &GetStreamRequest() const;
/**
* Get the device info.
@ -231,15 +259,15 @@ class MYNTEYE_API Device {
*/
void WaitForStreams();
/**
* Get the latest data of stream.
*/
device::StreamData GetStreamData(const Stream &stream);
/**
* Get the datas of stream.
* @note default cache 4 datas at most.
*/
std::vector<device::StreamData> GetStreamDatas(const Stream &stream);
/**
* Get the latest data of stream.
*/
device::StreamData GetLatestStreamData(const Stream &stream);
/**
* Enable cache motion datas.
@ -271,8 +299,6 @@ class MYNTEYE_API Device {
return motions_;
}
const StreamRequest &GetStreamRequest(const Capabilities &capability);
virtual void StartVideoStreaming();
virtual void StopVideoStreaming();
@ -281,7 +307,14 @@ class MYNTEYE_API Device {
virtual void OnStereoStreamUpdate();
virtual std::vector<Stream> GetKeyStreams() const = 0;
virtual Capabilities GetKeyStreamCapability() const = 0;
std::map<Resolution, device::img_params_t> GetImgParams() const {
return all_img_params_;
}
device::imu_params_t GetImuParams() const {
return imu_params_;
}
bool video_streaming_;
bool motion_tracking_;
@ -291,6 +324,9 @@ class MYNTEYE_API Device {
std::shared_ptr<uvc::device> device_;
std::shared_ptr<DeviceInfo> device_info_;
std::map<Resolution, device::img_params_t> all_img_params_;
device::imu_params_t imu_params_;
std::map<Stream, Intrinsics> stream_intrinsics_;
std::map<Stream, std::map<Stream, Extrinsics>> stream_from_extrinsics_;
@ -314,6 +350,8 @@ class MYNTEYE_API Device {
std::shared_ptr<Motions> motions_;
void ReadAllInfos();
void UpdateStreamIntrinsics(
const Capabilities &capability, const StreamRequest &request);
void CallbackPushedStreamData(const Stream &stream);
void CallbackMotionData(const device::MotionData &data);

View File

@ -19,6 +19,7 @@
#include <string>
#include "mynteye/mynteye.h"
#include "mynteye/types.h"
MYNTEYE_BEGIN_NAMESPACE
@ -39,6 +40,16 @@ namespace device {
*/
MYNTEYE_API std::shared_ptr<Device> select();
/**
* @ingroup utils
*
* List stream requests and prompt user to select one.
*
* @return the selected request.
*/
MYNTEYE_API MYNTEYE_NAMESPACE::StreamRequest select_request(
const std::shared_ptr<Device> &device, bool *ok);
} // namespace device
namespace utils {
@ -58,14 +69,14 @@ MYNTEYE_API float get_real_exposure_time(
/**
* @ingroup utils
*
*
* Get sdk root dir.
*/
MYNTEYE_API std::string get_sdk_root_dir();
/**
* @ingroup utils
*
*
* Get sdk install dir.
*/
MYNTEYE_API std::string get_sdk_install_dir();

View File

@ -37,6 +37,8 @@ MYNTEYE_BEGIN_NAMESPACE
enum class Model : std::uint8_t {
/** Standard */
STANDARD,
/** Standard 2 generation */
STANDARD2,
/** Last guard */
LAST
};
@ -74,6 +76,8 @@ enum class Stream : std::uint8_t {
enum class Capabilities : std::uint8_t {
/** Provides stereo stream */
STEREO,
/** Provide stereo color stream */
STEREO_COLOR,
/** Provides color stream */
COLOR,
/** Provides depth stream */
@ -141,6 +145,7 @@ enum class Option : std::uint8_t {
* range: [0,255], default: 127
*/
CONTRAST,
/**
* Image frame rate, must set IMU_FREQUENCY together
*
@ -153,6 +158,7 @@ enum class Option : std::uint8_t {
* values: {100,200,250,333,500}, default: 200
*/
IMU_FREQUENCY,
/**
* Exposure mode
*
@ -163,21 +169,31 @@ enum class Option : std::uint8_t {
/**
* Max gain, valid if auto-exposure
*
* range: [0,48], default: 48
* range of standard 1: [0,48], default: 48
* range of standard 2: [0,255], default: 8
*/
MAX_GAIN,
/**
* Max exposure time, valid if auto-exposure
*
* range: [0,240], default: 240
* range of standard 1: [0,240], default: 240
* range of standard 2: [0,1000], default: 333
*/
MAX_EXPOSURE_TIME,
/**
* min exposure time, valid if auto-exposure
*
* range: [0,1000], default: 0
*/
MIN_EXPOSURE_TIME,
/**
* Desired brightness, valid if auto-exposure
*
* range: [0,255], default: 192
* range of standard 1: [0,255], default: 192
* range of standard 2: [1,255], default: 122
*/
DESIRED_BRIGHTNESS,
/**
* IR control
*
@ -191,22 +207,39 @@ enum class Option : std::uint8_t {
* 1: 12-bit
*/
HDR_MODE,
/** Zero drift calibration */
ZERO_DRIFT_CALIBRATION,
/** Erase chip */
ERASE_CHIP,
/**
* The range of accelerometer
*
* values: {4,8,16,32}, default: 8
* value of standard 1: {4,8,16,32}, default: 8
* value of standard 2: {6,12,24,48}, default: 12
*/
ACCELEROMETER_RANGE,
/**
* The range of gyroscope
*
* values: {500,1000,2000,4000}, default: 1000
* value of standard 1: {500,1000,2000,4000}, default: 1000
* value of standard 2: {250,500,1000,2000,4000}, default: 1000
*/
GYROSCOPE_RANGE,
/**
* The parameter of accelerometer low pass filter
*
* values: {0,1,2}, default: 2
*/
ACCELEROMETER_LOW_PASS_FILTER,
/**
* The parameter of gyroscope low pass filter
*
* values: {23,64}, default: 64
*/
GYROSCOPE_LOW_PASS_FILTER,
/** Zero drift calibration */
ZERO_DRIFT_CALIBRATION,
/** Erase chip */
ERASE_CHIP,
/** Last guard */
LAST
};
@ -278,6 +311,8 @@ enum class Format : std::uint32_t {
GREY = MYNTEYE_FOURCC('G', 'R', 'E', 'Y'),
/** YUV 4:2:2, 16 bits per pixel */
YUYV = MYNTEYE_FOURCC('Y', 'U', 'Y', 'V'),
/** BGR 8:8:8, 24 bits per pixel */
BGR888 = MYNTEYE_FOURCC('B', 'G', 'R', '3'),
/** Last guard */
LAST
};
@ -292,6 +327,26 @@ inline std::ostream &operator<<(std::ostream &os, const Format &value) {
MYNTEYE_API std::size_t bytes_per_pixel(const Format &value);
/**
* Resolution.
*/
struct MYNTEYE_API Resolution {
/** Width */
std::uint16_t width;
/** Height */
std::uint16_t height;
bool operator==(const Resolution &other) const {
return width == other.width && height == other.height;
}
bool operator!=(const Resolution &other) const {
return !(*this == other);
}
bool operator<(const Resolution &other) const {
return (width * height) < (other.width * other.height);
}
};
/**
* Stream request.
*/
@ -302,9 +357,21 @@ struct MYNTEYE_API StreamRequest {
std::uint16_t height;
/** Stream pixel format */
Format format;
/** Stream frames per second (unused) */
/** Stream frames per second */
std::uint16_t fps;
StreamRequest() {}
StreamRequest(
std::uint16_t width, std::uint16_t height, Format format,
std::uint16_t fps)
: width(width), height(height), format(format), fps(fps) {}
StreamRequest(const Resolution &res, Format format, std::uint16_t fps)
: width(res.width), height(res.height), format(format), fps(fps) {}
Resolution GetResolution() const { return {width, height}; }
bool operator==(const StreamRequest &other) const {
return width == other.width && height == other.height &&
format == other.format && fps == other.fps;
@ -421,8 +488,8 @@ std::ostream &operator<<(std::ostream &os, const Extrinsics &ex);
struct MYNTEYE_API ImgData {
/** Image frame id */
std::uint16_t frame_id;
/** Image timestamp in 0.01ms */
std::uint32_t timestamp;
/** Image timestamp in 1us */
std::uint64_t timestamp;
/** Image exposure time, virtual value in [1, 480] */
std::uint16_t exposure_time;
@ -453,10 +520,18 @@ struct MYNTEYE_API ImgData {
* IMU data.
*/
struct MYNTEYE_API ImuData {
/** Image frame id */
std::uint16_t frame_id;
/** IMU timestamp in 0.01ms */
std::uint32_t timestamp;
/** IMU frame id */
std::uint32_t frame_id;
/**
* IMU accel or gyro flag
*
* 0: accel and gyro are both valid
* 1: accel is valid
* 2: gyro is valid
*/
std::uint8_t flag;
/** IMU timestamp in 1us */
std::uint64_t timestamp;
/** IMU accelerometer data for 3-axis: X, Y, Z. */
double accel[3];
/** IMU gyroscope data for 3-axis: X, Y, Z. */
@ -465,7 +540,7 @@ struct MYNTEYE_API ImuData {
double temperature;
void Reset() {
frame_id = 0;
flag = 0;
timestamp = 0;
std::fill(accel, accel + 3, 0);
std::fill(gyro, gyro + 3, 0);

View File

@ -13,6 +13,8 @@
# limitations under the License.
@PACKAGE_INIT@
set(mynteye_WITH_API @WITH_API@)
set(mynteye_WITH_GLOG @WITH_GLOG@)
set(mynteye_WITH_API @WITH_API@)
set(mynteye_WITH_GLOG @WITH_GLOG@)

37
package.json Normal file
View File

@ -0,0 +1,37 @@
{
"name": "mynt-eye-s-sdk",
"version": "1.0.0",
"description": "MYNT EYE S SDK",
"main": "index.js",
"directories": {
"doc": "doc",
"test": "test"
},
"dependencies": {},
"devDependencies": {
"@commitlint/cli": "^7.2.1",
"@commitlint/config-conventional": "^7.1.2",
"cz-conventional-changelog": "^2.1.0",
"husky": "^1.3.0"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "https://github.com/slightech/MYNT-EYE-S-SDK.git"
},
"keywords": [],
"author": "",
"license": "Apache-2.0",
"config": {
"commitizen": {
"path": "./node_modules/cz-conventional-changelog"
}
},
"husky": {
"hooks": {
"commit-msg": "commitlint -E HUSKY_GIT_PARAMS"
}
}
}

View File

@ -53,6 +53,10 @@ message(STATUS "Found mynteye: ${mynteye_VERSION}")
include(${PRO_DIR}/cmake/DetectOpenCV.cmake)
if(mynteye_WITH_GLOG)
include(${PRO_DIR}/cmake/DetectGLog.cmake)
endif()
#LIST(APPEND CMAKE_MODULE_PATH ${PRO_DIR}/cmake)
# targets

View File

@ -21,12 +21,13 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
// api->SetOptionValue(Option::FRAME_RATE, 25);
// api->SetOptionValue(Option::IMU_FREQUENCY, 500);
api->SetOptionValue(Option::IR_CONTROL, 80);
api->LogOptionInfos();
std::size_t left_count = 0;
@ -55,8 +56,7 @@ int main(int argc, char *argv[]) {
CHECK_NOTNULL(data.imu);
++imu_count;
VLOG(2) << "Imu count: " << imu_count;
VLOG(2) << " frame_id: " << data.imu->frame_id
<< ", timestamp: " << data.imu->timestamp
VLOG(2) << ", timestamp: " << data.imu->timestamp
<< ", accel_x: " << data.imu->accel[0]
<< ", accel_y: " << data.imu->accel[1]
<< ", accel_z: " << data.imu->accel[2]
@ -106,8 +106,7 @@ int main(int argc, char *argv[]) {
auto &&motion_datas = api->GetMotionDatas();
motion_count += motion_datas.size();
for (auto &&data : motion_datas) {
LOG(INFO) << "Imu frame_id: " << data.imu->frame_id
<< ", timestamp: " << data.imu->timestamp
LOG(INFO) << ", timestamp: " << data.imu->timestamp
<< ", accel_x: " << data.imu->accel[0]
<< ", accel_y: " << data.imu->accel[1]
<< ", accel_z: " << data.imu->accel[2]

View File

@ -144,8 +144,12 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetOptionValue(Option::IR_CONTROL, 80);

View File

@ -14,6 +14,10 @@
get_filename_component(DIR_NAME ${CMAKE_CURRENT_LIST_DIR} NAME)
include_directories(
${PRO_DIR}/src
)
set_outdir(
"${OUT_DIR}/lib/${DIR_NAME}"
"${OUT_DIR}/lib/${DIR_NAME}"

View File

@ -25,35 +25,19 @@ int main(int argc, char *argv[]) {
glog_init _(argc, argv);
auto &&device = device::select();
if (!device)
return 1;
/*
{ // auto-exposure
device->SetOptionValue(Option::EXPOSURE_MODE, 0);
device->SetOptionValue(Option::MAX_GAIN, 40); // [0.48]
device->SetOptionValue(Option::MAX_EXPOSURE_TIME, 120); // [0,240]
device->SetOptionValue(Option::DESIRED_BRIGHTNESS, 200); // [0,255]
}
{ // manual-exposure
device->SetOptionValue(Option::EXPOSURE_MODE, 1);
device->SetOptionValue(Option::GAIN, 20); // [0.48]
device->SetOptionValue(Option::BRIGHTNESS, 20); // [0,240]
device->SetOptionValue(Option::CONTRAST, 20); // [0,255]
}
device->SetOptionValue(Option::IR_CONTROL, 80);
device->SetOptionValue(Option::FRAME_RATE, 25);
device->SetOptionValue(Option::IMU_FREQUENCY, 500);
*/
device->LogOptionInfos();
if (!device) return 1;
// device->RunOptionAction(Option::ZERO_DRIFT_CALIBRATION);
bool ok;
auto &&request = device::select_request(device, &ok);
if (!ok) return 1;
device->ConfigStreamRequest(request);
std::size_t left_count = 0;
device->SetStreamCallback(
Stream::LEFT, [&left_count](const device::StreamData &data) {
CHECK_NOTNULL(data.img);
++left_count;
VLOG(2) << Stream::LEFT << ", count: " << left_count;
VLOG(2) << Stream::LEFT << "count: " << left_count;
VLOG(2) << " frame_id: " << data.img->frame_id
<< ", timestamp: " << data.img->timestamp
<< ", exposure_time: " << data.img->exposure_time;
@ -63,19 +47,19 @@ int main(int argc, char *argv[]) {
Stream::RIGHT, [&right_count](const device::StreamData &data) {
CHECK_NOTNULL(data.img);
++right_count;
VLOG(2) << Stream::RIGHT << ", count: " << right_count;
VLOG(2) << Stream::RIGHT << "count: " << right_count;
VLOG(2) << " frame_id: " << data.img->frame_id
<< ", timestamp: " << data.img->timestamp
<< ", exposure_time: " << data.img->exposure_time;
});
std::size_t imu_count = 0;
device->SetMotionCallback([&imu_count](const device::MotionData &data) {
CHECK_NOTNULL(data.imu);
++imu_count;
VLOG(2) << "Imu count: " << imu_count;
VLOG(2) << " frame_id: " << data.imu->frame_id
<< ", timestamp: " << data.imu->timestamp
VLOG(2) << ", timestamp: " << data.imu->timestamp
<< ", accel_x: " << data.imu->accel[0]
<< ", accel_y: " << data.imu->accel[1]
<< ", accel_z: " << data.imu->accel[2]
@ -88,7 +72,6 @@ int main(int argc, char *argv[]) {
// Enable this will cache the motion datas until you get them.
device->EnableMotionDatas();
device->Start(Source::ALL);
cv::namedWindow("frame");
std::size_t motion_count = 0;
@ -96,14 +79,13 @@ int main(int argc, char *argv[]) {
while (true) {
device->WaitForStreams();
device::StreamData left_data = device->GetLatestStreamData(Stream::LEFT);
device::StreamData right_data = device->GetLatestStreamData(Stream::RIGHT);
device::StreamData left_data = device->GetStreamData(Stream::LEFT);
device::StreamData right_data = device->GetStreamData(Stream::RIGHT);
auto &&motion_datas = device->GetMotionDatas();
motion_count += motion_datas.size();
for (auto &&data : motion_datas) {
LOG(INFO) << "Imu frame_id: " << data.imu->frame_id
<< ", timestamp: " << data.imu->timestamp
LOG(INFO) << "timestamp: " << data.imu->timestamp
<< ", accel_x: " << data.imu->accel[0]
<< ", accel_y: " << data.imu->accel[1]
<< ", accel_z: " << data.imu->accel[2]
@ -113,15 +95,39 @@ int main(int argc, char *argv[]) {
<< ", temperature: " << data.imu->temperature;
}
cv::Mat left_img(
left_data.frame->height(), left_data.frame->width(), CV_8UC1,
left_data.frame->data());
cv::Mat right_img(
right_data.frame->height(), right_data.frame->width(), CV_8UC1,
right_data.frame->data());
cv::Mat img;
cv::hconcat(left_img, right_img, img);
// TODO(Kalman): Extract into public or internal method
if (left_data.frame->format() == Format::GREY) {
cv::Mat left_img(
left_data.frame->height(), left_data.frame->width(), CV_8UC1,
left_data.frame->data());
cv::Mat right_img(
right_data.frame->height(), right_data.frame->width(), CV_8UC1,
right_data.frame->data());
cv::hconcat(left_img, right_img, img);
} else if (left_data.frame->format() == Format::YUYV) {
cv::Mat left_img(
left_data.frame->height(), left_data.frame->width(), CV_8UC2,
left_data.frame->data());
cv::Mat right_img(
right_data.frame->height(), right_data.frame->width(), CV_8UC2,
right_data.frame->data());
cv::cvtColor(left_img, left_img, cv::COLOR_YUV2BGR_YUY2);
cv::cvtColor(right_img, right_img, cv::COLOR_YUV2BGR_YUY2);
cv::hconcat(left_img, right_img, img);
} else if (left_data.frame->format() == Format::BGR888) {
cv::Mat left_img(
left_data.frame->height(), left_data.frame->width(), CV_8UC3,
left_data.frame->data());
cv::Mat right_img(
right_data.frame->height(), right_data.frame->width(), CV_8UC3,
right_data.frame->data());
cv::hconcat(left_img, right_img, img);
} else {
return -1;
}
cv::imshow("frame", img);
char key = static_cast<char>(cv::waitKey(1));
@ -144,7 +150,7 @@ int main(int argc, char *argv[]) {
<< ", fps: " << (1000.f * right_count / elapsed_ms);
LOG(INFO) << "Imu count: " << imu_count
<< ", hz: " << (1000.f * imu_count / elapsed_ms);
// LOG(INFO) << "Motion count: " << motion_count
// << ", hz: " << (1000.f * motion_count / elapsed_ms);
LOG(INFO) << "Motion count: " << motion_count
<< ", hz: " << (1000.f * motion_count / elapsed_ms);
return 0;
}

View File

@ -16,6 +16,7 @@ get_filename_component(DIR_NAME ${CMAKE_CURRENT_LIST_DIR} NAME)
include_directories(
${CMAKE_CURRENT_SOURCE_DIR}
${PRO_DIR}/src
)
set_outdir(
@ -123,7 +124,6 @@ make_executable2(ctrl_manual_exposure
SRCS control/manual_exposure.cc util/cv_painter.cc
WITH_OPENCV
)
make_executable2(ctrl_infrared SRCS control/infrared.cc WITH_OPENCV)
# intermediate level

View File

@ -22,18 +22,24 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
// auto-exposure: 0
api->SetOptionValue(Option::EXPOSURE_MODE, 0);
// max_gain: range [0,48], default 48
api->SetOptionValue(Option::MAX_GAIN, 48);
// max_exposure_time: range [0,240], default 240
api->SetOptionValue(Option::MAX_EXPOSURE_TIME, 240);
// desired_brightness: range [0,255], default 192
api->SetOptionValue(Option::DESIRED_BRIGHTNESS, 192);
// max_gain: range [0,255], default 8
api->SetOptionValue(Option::MAX_GAIN, 8);
// max_exposure_time: range [0,1000], default 333
api->SetOptionValue(Option::MAX_EXPOSURE_TIME, 333);
// desired_brightness: range [1,255], default 122
api->SetOptionValue(Option::DESIRED_BRIGHTNESS, 122);
// min_exposure_time: range [0,1000], default 0
api->SetOptionValue(Option::MIN_EXPOSURE_TIME, 0);
LOG(INFO) << "Enable auto-exposure";
LOG(INFO) << "Set MAX_GAIN to " << api->GetOptionValue(Option::MAX_GAIN);
@ -41,10 +47,12 @@ int main(int argc, char *argv[]) {
<< api->GetOptionValue(Option::MAX_EXPOSURE_TIME);
LOG(INFO) << "Set DESIRED_BRIGHTNESS to "
<< api->GetOptionValue(Option::DESIRED_BRIGHTNESS);
LOG(INFO) << "Set MIN_EXPOSURE_TIME to "
<< api->GetOptionValue(Option::MIN_EXPOSURE_TIME);
api->Start(Source::VIDEO_STREAMING);
CVPainter painter(api->GetOptionValue(Option::FRAME_RATE));
CVPainter painter(30);
cv::namedWindow("frame");

View File

@ -23,20 +23,12 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
// Attention: must set FRAME_RATE and IMU_FREQUENCY together, otherwise won't
// succeed.
// FRAME_RATE values: 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60
api->SetOptionValue(Option::FRAME_RATE, 25);
// IMU_FREQUENCY values: 100, 200, 250, 333, 500
api->SetOptionValue(Option::IMU_FREQUENCY, 500);
LOG(INFO) << "Set FRAME_RATE to " << api->GetOptionValue(Option::FRAME_RATE);
LOG(INFO) << "Set IMU_FREQUENCY to "
<< api->GetOptionValue(Option::IMU_FREQUENCY);
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
// Count img
std::atomic_uint img_count(0);
@ -46,13 +38,6 @@ int main(int argc, char *argv[]) {
++img_count;
});
// Count imu
std::atomic_uint imu_count(0);
api->SetMotionCallback([&imu_count](const api::MotionData &data) {
CHECK_NOTNULL(data.imu);
++imu_count;
});
api->Start(Source::ALL);
cv::namedWindow("frame");
@ -85,7 +70,5 @@ int main(int argc, char *argv[]) {
<< ", cost: " << elapsed_ms << "ms";
LOG(INFO) << "Img count: " << img_count
<< ", fps: " << (1000.f * img_count / elapsed_ms);
LOG(INFO) << "Imu count: " << imu_count
<< ", hz: " << (1000.f * imu_count / elapsed_ms);
return 0;
}

View File

@ -23,12 +23,16 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
// ACCELEROMETER_RANGE values: 4, 8, 16, 32
api->SetOptionValue(Option::ACCELEROMETER_RANGE, 8);
// GYROSCOPE_RANGE values: 500, 1000, 2000, 4000
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
// ACCELEROMETER_RANGE values: 6, 12, 24, 32
api->SetOptionValue(Option::ACCELEROMETER_RANGE, 6);
// GYROSCOPE_RANGE values: 250, 500, 1000, 2000, 4000
api->SetOptionValue(Option::GYROSCOPE_RANGE, 1000);
LOG(INFO) << "Set ACCELEROMETER_RANGE to "

View File

@ -1,61 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/logger.h"
#include "mynteye/api/api.h"
MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
// Detect infrared add-ons
LOG(INFO) << "Support infrared: " << std::boolalpha
<< api->Supports(AddOns::INFRARED);
LOG(INFO) << "Support infrared2: " << std::boolalpha
<< api->Supports(AddOns::INFRARED2);
// Get infrared intensity range
auto &&info = api->GetOptionInfo(Option::IR_CONTROL);
LOG(INFO) << Option::IR_CONTROL << ": {" << info << "}";
// Set infrared intensity value
api->SetOptionValue(Option::IR_CONTROL, 80);
api->Start(Source::VIDEO_STREAMING);
cv::namedWindow("frame");
while (true) {
api->WaitForStreams();
auto &&left_data = api->GetStreamData(Stream::LEFT);
auto &&right_data = api->GetStreamData(Stream::RIGHT);
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
break;
}
}
api->Stop(Source::VIDEO_STREAMING);
return 0;
}

View File

@ -22,27 +22,25 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
// manual-exposure: 1
api->SetOptionValue(Option::EXPOSURE_MODE, 1);
// gain: range [0,48], default 24
api->SetOptionValue(Option::GAIN, 24);
// brightness/exposure_time: range [0,240], default 120
api->SetOptionValue(Option::BRIGHTNESS, 120);
// contrast/black_level_calibration: range [0,255], default 127
api->SetOptionValue(Option::CONTRAST, 127);
LOG(INFO) << "Enable manual-exposure";
LOG(INFO) << "Set GAIN to " << api->GetOptionValue(Option::GAIN);
LOG(INFO) << "Set BRIGHTNESS to " << api->GetOptionValue(Option::BRIGHTNESS);
LOG(INFO) << "Set CONTRAST to " << api->GetOptionValue(Option::CONTRAST);
api->Start(Source::VIDEO_STREAMING);
CVPainter painter(api->GetOptionValue(Option::FRAME_RATE));
CVPainter painter(30);
cv::namedWindow("frame");

View File

@ -19,8 +19,12 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->EnableStreamData(Stream::DEPTH);

View File

@ -18,8 +18,7 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
LOG(INFO) << "Device name: " << api->GetInfo(Info::DEVICE_NAME);
LOG(INFO) << "Serial number: " << api->GetInfo(Info::SERIAL_NUMBER);

View File

@ -19,8 +19,12 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
// api->EnableStreamData(Stream::DISPARITY);
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);

View File

@ -27,8 +27,12 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
// Attention: must not block the callbacks.

View File

@ -18,8 +18,12 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
LOG(INFO) << "Intrinsics left: {" << api->GetIntrinsics(Stream::LEFT) << "}";
LOG(INFO) << "Intrinsics right: {" << api->GetIntrinsics(Stream::RIGHT)

View File

@ -22,8 +22,12 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
// Enable this will cache the motion datas until you get them.
api->EnableMotionDatas();

View File

@ -18,8 +18,7 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
LOG(INFO) << "Motion intrinsics: {" << api->GetMotionIntrinsics() << "}";
LOG(INFO) << "Motion extrinsics left to imu: {"

View File

@ -21,8 +21,12 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->EnableStreamData(Stream::POINTS);

View File

@ -19,8 +19,12 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->Start(Source::VIDEO_STREAMING);

View File

@ -19,8 +19,12 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->EnableStreamData(Stream::LEFT_RECTIFIED);
api->EnableStreamData(Stream::RIGHT_RECTIFIED);

View File

@ -19,8 +19,12 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->EnablePlugin("plugins/linux-x86_64/libplugin_g_cuda9.1_opencv3.4.0.so");

View File

@ -148,8 +148,12 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetOptionValue(Option::IR_CONTROL, 80);

View File

@ -117,7 +117,7 @@ cv::Rect CVPainter::DrawImuData(
if (gravity == BOTTOM_LEFT || gravity == BOTTOM_RIGHT)
sign = -1;
Clear(ss) << "frame_id: " << data.frame_id << ", stamp: " << data.timestamp
Clear(ss) << "stamp: " << data.timestamp
<< ", temp: " << fmt_temp << data.temperature;
cv::Rect rect_i = DrawText(img, ss.str(), gravity, 5);

View File

@ -108,7 +108,7 @@ int main(int argc, char *argv[]) {
const auto frame_empty = [&frame]() { return frame == nullptr; };
uvc::set_device_mode(
*device, 752, 480, static_cast<int>(Format::YUYV), 25,
*device, 1280, 400, static_cast<int>(Format::BGR888), 20,
[&mtx, &cv, &frame, &frame_ready](
const void *data, std::function<void()> continuation) {
// reinterpret_cast<const std::uint8_t *>(data);
@ -143,8 +143,7 @@ int main(int argc, char *argv[]) {
}
// only lastest frame is valid
cv::Mat img(480, 752, CV_8UC2, const_cast<void *>(frame->data));
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
cv::Mat img(400, 1280, CV_8UC3, const_cast<void *>(frame->data));
cv::imshow("frame", img);
frame = nullptr;

View File

@ -15,6 +15,7 @@
# _VERBOSE_=1
# _INIT_LINTER_=1
# _INIT_COMMITIZEN_=1
# _FORCE_INSRALL_=1
_INSTALL_OPTIONS_=$@

View File

@ -15,6 +15,7 @@
_INIT_BUILD_=1
# _INIT_LINTER_=1
# _INIT_COMMITIZEN_=1
# _FORCE_INSRALL_=1
# _INSTALL_OPTIONS_=-y
@ -146,6 +147,7 @@ else # unexpected
exit 1
fi
## init linter - optional
if [ -n "${_INIT_LINTER_}" ]; then
@ -217,3 +219,30 @@ else
fi
fi # _INIT_LINTER_
## init commitizen - optional
if [ -n "${_INIT_COMMITIZEN_}" ]; then
if _detect_cmd npm; then
_echo_d "npm install commitizen -g; npm install"
npm install commitizen -g; npm install
# if _detect_cmd node; then
# commitizen init cz-conventional-changelog --save-dev --save-exact
# npm install --save-dev @commitlint/{config-conventional,cli}
# npm install husky --save-dev
# else
# _echo_en "Skipped commitizen init, as node not found"
# fi
else
_echo_en "Skipped npm install packages, as npm not found"
_echo
_echo_e "Download Node.js from https://nodejs.org/, then add to \`~/.bashrc\`."
_echo
_echo_e " export PATH=\"/home/john/node-v10.14.2-linux-x64/bin:\$PATH\""
_echo
_echo_e "p.s. not \"apt-get install npm\", it's too old."
fi
fi # _INIT_COMMITIZEN_

View File

@ -16,12 +16,12 @@
#if defined(WIN32) || defined(_WIN32) || defined(__WIN32__)
#define GLOG_NO_ABBREVIATED_SEVERITIES
#endif
#include <glog/logging.h>
#ifdef HAVE_LIB_GFLAGS
#include <gflags/gflags.h>
#endif
#include "mynteye/logger.h"
#include "mynteye/mynteye.h"
int main(int /*argc*/, char *argv[]) {

View File

@ -26,7 +26,6 @@
#include "mynteye/api/plugin.h"
#include "mynteye/api/synthetic.h"
#include "mynteye/device/device.h"
#include "mynteye/device/device_s.h"
#include "mynteye/device/utils.h"
#if defined(WITH_FILESYSTEM) && defined(WITH_NATIVE_FILESYSTEM)
@ -210,26 +209,7 @@ std::vector<std::string> get_plugin_paths() {
API::API(std::shared_ptr<Device> device) : device_(device) {
VLOG(2) << __func__;
if (std::dynamic_pointer_cast<StandardDevice>(device_) != nullptr) {
bool in_l_ok, in_r_ok, ex_r2l_ok;
device_->GetIntrinsics(Stream::LEFT, &in_l_ok);
device_->GetIntrinsics(Stream::RIGHT, &in_r_ok);
device_->GetExtrinsics(Stream::RIGHT, Stream::LEFT, &ex_r2l_ok);
if (!in_l_ok || !in_r_ok || !ex_r2l_ok) {
#if defined(WITH_DEVICE_INFO_REQUIRED)
LOG(FATAL)
#else
LOG(WARNING)
#endif
<< "Image params not found, but we need it to process the "
"images. Please `make tools` and use `img_params_writer` "
"to write the image params. If you update the SDK from "
"1.x, the `SN*.conf` is the file contains them. Besides, "
"you could also calibrate them by yourself. Read the guide "
"doc (https://github.com/slightech/MYNT-EYE-S-SDK-Guide) "
"to learn more.";
}
}
// std::dynamic_pointer_cast<StandardDevice>(device_);
synthetic_.reset(new Synthetic(this));
}
@ -237,29 +217,19 @@ API::~API() {
VLOG(2) << __func__;
}
std::shared_ptr<API> API::Create() {
return Create(device::select());
}
std::shared_ptr<API> API::Create(std::shared_ptr<Device> device) {
if (!device)
return nullptr;
return std::make_shared<API>(device);
}
std::shared_ptr<API> API::Create(int argc, char *argv[]) {
static glog_init _(argc, argv);
auto &&device = device::select();
if (!device)
return nullptr;
return std::make_shared<API>(device);
if (!device) return nullptr;
return Create(argc, argv, device);
}
std::shared_ptr<API> API::Create(
int argc, char *argv[], std::shared_ptr<Device> device) {
int argc, char *argv[], const std::shared_ptr<Device> &device) {
static glog_init _(argc, argv);
if (!device)
return nullptr;
return Create(device);
}
std::shared_ptr<API> API::Create(const std::shared_ptr<Device> &device) {
return std::make_shared<API>(device);
}
@ -283,6 +253,10 @@ bool API::Supports(const AddOns &addon) const {
return device_->Supports(addon);
}
StreamRequest API::SelectStreamRequest(bool *ok) const {
return device::select_request(device_, ok);
}
const std::vector<StreamRequest> &API::GetStreamRequests(
const Capabilities &capability) const {
return device_->GetStreamRequests(capability);
@ -291,6 +265,29 @@ const std::vector<StreamRequest> &API::GetStreamRequests(
void API::ConfigStreamRequest(
const Capabilities &capability, const StreamRequest &request) {
device_->ConfigStreamRequest(capability, request);
synthetic_->NotifyImageParamsChanged();
}
const StreamRequest &API::GetStreamRequest(
const Capabilities &capability) const {
return device_->GetStreamRequest(capability);
}
const std::vector<StreamRequest> &API::GetStreamRequests() const {
return device_->GetStreamRequests();
}
void API::ConfigStreamRequest(const StreamRequest &request) {
device_->ConfigStreamRequest(request);
synthetic_->NotifyImageParamsChanged();
}
const StreamRequest &API::GetStreamRequest() const {
return device_->GetStreamRequest();
}
std::shared_ptr<DeviceInfo> API::GetInfo() const {
return device_->GetInfo();
}
std::string API::GetInfo(const Info &info) const {
@ -450,4 +447,23 @@ std::shared_ptr<Device> API::device() {
return device_;
}
// TODO(Kalman): Call this function in the appropriate place
void API::CheckImageParams() {
if (device_ != nullptr) {
bool in_l_ok, in_r_ok, ex_l2r_ok;
device_->GetIntrinsics(Stream::LEFT, &in_l_ok);
device_->GetIntrinsics(Stream::RIGHT, &in_r_ok);
device_->GetExtrinsics(Stream::LEFT, Stream::RIGHT, &ex_l2r_ok);
if (!in_l_ok || !in_r_ok || !ex_l2r_ok) {
LOG(FATAL) << "Image params not found, but we need it to process the "
"images. Please `make tools` and use `img_params_writer` "
"to write the image params. If you update the SDK from "
"1.x, the `SN*.conf` is the file contains them. Besides, "
"you could also calibrate them by yourself. Read the guide "
"doc (https://github.com/slightech/MYNT-EYE-SDK-2-Guide) "
"to learn more.";
}
}
}
MYNTEYE_END_NAMESPACE

View File

@ -27,11 +27,9 @@ const char RectifyProcessor::NAME[] = "RectifyProcessor";
RectifyProcessor::RectifyProcessor(
std::shared_ptr<Device> device, std::int32_t proc_period)
: Processor(std::move(proc_period)) {
: Processor(std::move(proc_period)), device_(device) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
InitParams(
device->GetIntrinsics(Stream::LEFT), device->GetIntrinsics(Stream::RIGHT),
device->GetExtrinsics(Stream::RIGHT, Stream::LEFT));
NotifyImageParamsChanged();
}
RectifyProcessor::~RectifyProcessor() {
@ -42,6 +40,13 @@ std::string RectifyProcessor::Name() {
return NAME;
}
void RectifyProcessor::NotifyImageParamsChanged() {
InitParams(
device_->GetIntrinsics(Stream::LEFT),
device_->GetIntrinsics(Stream::RIGHT),
device_->GetExtrinsics(Stream::RIGHT, Stream::LEFT));
}
Object *RectifyProcessor::OnCreateOutput() {
return new ObjMat2();
}

View File

@ -37,6 +37,8 @@ class RectifyProcessor : public Processor {
std::string Name() override;
void NotifyImageParamsChanged();
cv::Mat R1, P1, R2, P2, Q;
cv::Mat map11, map12, map21, map22;
@ -48,6 +50,8 @@ class RectifyProcessor : public Processor {
private:
void InitParams(
Intrinsics in_left, Intrinsics in_right, Extrinsics ex_right_to_left);
std::shared_ptr<Device> device_;
};
MYNTEYE_END_NAMESPACE

View File

@ -17,6 +17,8 @@
#include <functional>
#include <stdexcept>
#include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/logger.h"
#include "mynteye/api/object.h"
#include "mynteye/api/plugin.h"
@ -39,9 +41,16 @@ MYNTEYE_BEGIN_NAMESPACE
namespace {
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
// TODO(JohnZhao) Support different format frame to cv::Mat
CHECK_EQ(frame->format(), Format::GREY);
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
if (frame->format() == Format::YUYV) {
cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data());
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
return img;
} else if (frame->format() == Format::BGR888) {
cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data());
return img;
} else { // Format::GRAY
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
}
}
api::StreamData data2api(const device::StreamData &data) {
@ -74,6 +83,11 @@ Synthetic::~Synthetic() {
}
}
void Synthetic::NotifyImageParamsChanged() {
auto &&processor = find_processor<RectifyProcessor>(processor_);
if (processor) processor->NotifyImageParamsChanged();
}
bool Synthetic::Supports(const Stream &stream) const {
return stream_supports_mode_.find(stream) != stream_supports_mode_.end();
}
@ -152,7 +166,7 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
auto &&mode = GetStreamEnabledMode(stream);
if (mode == MODE_NATIVE) {
auto &&device = api_->device();
return data2api(device->GetLatestStreamData(stream));
return data2api(device->GetStreamData(stream));
} else if (mode == MODE_SYNTHETIC) {
if (stream == Stream::LEFT_RECTIFIED || stream == Stream::RIGHT_RECTIFIED) {
static std::shared_ptr<ObjMat2> output = nullptr;
@ -302,42 +316,35 @@ void Synthetic::EnableStreamData(const Stream &stream, std::uint32_t depth) {
break;
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
CHECK(ActivateProcessor<RectifyProcessor>());
}
return;
} return;
case Stream::RIGHT_RECTIFIED: {
if (!IsStreamDataEnabled(Stream::RIGHT))
break;
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
CHECK(ActivateProcessor<RectifyProcessor>());
}
return;
} return;
case Stream::DISPARITY: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::LEFT_RECTIFIED, depth + 1);
EnableStreamData(Stream::RIGHT_RECTIFIED, depth + 1);
CHECK(ActivateProcessor<DisparityProcessor>());
}
return;
} return;
case Stream::DISPARITY_NORMALIZED: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::DISPARITY, depth + 1);
CHECK(ActivateProcessor<DisparityNormalizedProcessor>());
}
return;
} return;
case Stream::POINTS: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::DISPARITY, depth + 1);
CHECK(ActivateProcessor<PointsProcessor>());
}
return;
} return;
case Stream::DEPTH: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::POINTS, depth + 1);
CHECK(ActivateProcessor<DepthProcessor>());
}
return;
default:
break;
} return;
default: break;
}
if (depth == 0) {
LOG(WARNING) << "Enable stream data of " << stream << " failed";
@ -390,8 +397,7 @@ void Synthetic::DisableStreamData(const Stream &stream, std::uint32_t depth) {
case Stream::DEPTH: {
DeactivateProcessor<DepthProcessor>();
} break;
default:
return;
default: return;
}
if (depth > 0) {
LOG(WARNING) << "Disable synthetic stream data of " << stream << " too";

View File

@ -43,6 +43,8 @@ class Synthetic {
explicit Synthetic(API *api);
~Synthetic();
void NotifyImageParamsChanged();
bool Supports(const Stream &stream) const;
mode_t SupportsMode(const Stream &stream) const;

View File

@ -19,6 +19,7 @@
#include <iterator>
#include <sstream>
#include <stdexcept>
#include <vector>
#include "mynteye/logger.h"
#include "mynteye/util/strings.h"
@ -30,12 +31,12 @@ MYNTEYE_BEGIN_NAMESPACE
namespace {
const uvc::xu mynteye_xu = {3,
2,
{0x947a6d9f,
0x8a2f,
0x418d,
{0x85, 0x9e, 0x6c, 0x9a, 0xa0, 0x38, 0x10, 0x14}}};
const uvc::xu mynteye_xu = {3, 2,
{
0x947a6d9f, 0x8a2f, 0x418d,
{0x85, 0x9e, 0x6c, 0x9a, 0xa0, 0x38, 0x10, 0x14}
}
};
int XuCamCtrlId(Option option) {
switch (option) {
@ -63,12 +64,21 @@ int XuCamCtrlId(Option option) {
case Option::FRAME_RATE:
return 7;
break;
case Option::MIN_EXPOSURE_TIME:
return 8;
break;
case Option::ACCELEROMETER_RANGE:
return 9;
break;
case Option::GYROSCOPE_RANGE:
return 10;
break;
case Option::ACCELEROMETER_LOW_PASS_FILTER:
return 11;
break;
case Option::GYROSCOPE_LOW_PASS_FILTER:
return 12;
break;
default:
LOG(FATAL) << "No cam ctrl id for " << option;
}
@ -92,7 +102,7 @@ void CheckSpecVersion(const Version *spec_version) {
LOG(FATAL) << "Spec version must be specified";
}
std::vector<std::string> spec_versions{"1.0"};
std::vector<std::string> spec_versions{"1.0", "1.1"};
for (auto &&spec_ver : spec_versions) {
if (*spec_version == Version(spec_ver)) {
return; // supported
@ -109,12 +119,14 @@ void CheckSpecVersion(const Version *spec_version) {
} // namespace
Channels::Channels(std::shared_ptr<uvc::device> device)
: device_(device),
is_imu_tracking_(false),
imu_track_stop_(false),
imu_sn_(0),
imu_callback_(nullptr) {
Channels::Channels(const std::shared_ptr<uvc::device> &device,
const std::shared_ptr<ChannelsAdapter> &adapter)
: device_(device),
adapter_(adapter),
is_imu_tracking_(false),
imu_track_stop_(false),
imu_sn_(0),
imu_callback_(nullptr) {
VLOG(2) << __func__;
UpdateControlInfos();
}
@ -124,6 +136,14 @@ Channels::~Channels() {
StopImuTracking();
}
std::int32_t Channels::GetAccelRangeDefault() {
return adapter_->GetAccelRangeDefault();
}
std::int32_t Channels::GetGyroRangeDefault() {
return adapter_->GetGyroRangeDefault();
}
void Channels::LogControlInfos() const {
for (auto &&it = control_infos_.begin(); it != control_infos_.end(); it++) {
LOG(INFO) << it->first << ": min=" << it->second.min
@ -133,17 +153,23 @@ void Channels::LogControlInfos() const {
}
void Channels::UpdateControlInfos() {
for (auto &&option : std::vector<Option>{Option::GAIN, Option::BRIGHTNESS,
Option::CONTRAST}) {
control_infos_[option] = PuControlInfo(option);
auto &&supports = adapter_->GetOptionSupports();
for (auto &&option : std::vector<Option>{
Option::GAIN, Option::BRIGHTNESS, Option::CONTRAST}) {
if (supports.find(option) != supports.end())
control_infos_[option] = PuControlInfo(option);
}
for (auto &&option : std::vector<Option>{
Option::FRAME_RATE, Option::IMU_FREQUENCY, Option::EXPOSURE_MODE,
Option::MAX_GAIN, Option::MAX_EXPOSURE_TIME,
Option::DESIRED_BRIGHTNESS, Option::IR_CONTROL, Option::HDR_MODE,
Option::ACCELEROMETER_RANGE, Option::GYROSCOPE_RANGE}) {
control_infos_[option] = XuControlInfo(option);
Option::FRAME_RATE, Option::IMU_FREQUENCY,
Option::EXPOSURE_MODE, Option::MAX_GAIN,
Option::MAX_EXPOSURE_TIME, Option::MIN_EXPOSURE_TIME,
Option::DESIRED_BRIGHTNESS, Option::IR_CONTROL,
Option::HDR_MODE, Option::ACCELEROMETER_RANGE,
Option::GYROSCOPE_RANGE, Option::ACCELEROMETER_LOW_PASS_FILTER,
Option::GYROSCOPE_LOW_PASS_FILTER}) {
if (supports.find(option) != supports.end())
control_infos_[option] = XuControlInfo(option);
}
if (VLOG_IS_ON(2)) {
@ -184,8 +210,11 @@ std::int32_t Channels::GetControlValue(const Option &option) const {
case Option::DESIRED_BRIGHTNESS:
case Option::IR_CONTROL:
case Option::HDR_MODE:
case Option::MIN_EXPOSURE_TIME:
case Option::ACCELEROMETER_RANGE:
case Option::GYROSCOPE_RANGE:
case Option::ACCELEROMETER_LOW_PASS_FILTER:
case Option::GYROSCOPE_LOW_PASS_FILTER:
return XuCamCtrlGet(option);
case Option::ZERO_DRIFT_CALIBRATION:
case Option::ERASE_CHIP:
@ -242,12 +271,22 @@ void Channels::SetControlValue(const Option &option, std::int32_t value) {
XuCamCtrlSet(option, value);
} break;
case Option::ACCELEROMETER_RANGE: {
if (!in_range() || !in_values({4, 8, 16, 32}))
if (!in_range() || !in_values(adapter_->GetAccelRangeValues()))
break;
XuCamCtrlSet(option, value);
} break;
case Option::GYROSCOPE_RANGE: {
if (!in_range() || !in_values({500, 1000, 2000, 4000}))
if (!in_range() || !in_values(adapter_->GetGyroRangeValues()))
break;
XuCamCtrlSet(option, value);
} break;
case Option::ACCELEROMETER_LOW_PASS_FILTER: {
if (!in_range() || !in_values({0, 1, 2}))
break;
XuCamCtrlSet(option, value);
} break;
case Option::GYROSCOPE_LOW_PASS_FILTER: {
if (!in_range() || !in_values({23, 64}))
break;
XuCamCtrlSet(option, value);
} break;
@ -256,7 +295,8 @@ void Channels::SetControlValue(const Option &option, std::int32_t value) {
case Option::MAX_EXPOSURE_TIME:
case Option::DESIRED_BRIGHTNESS:
case Option::IR_CONTROL:
case Option::HDR_MODE: {
case Option::HDR_MODE:
case Option::MIN_EXPOSURE_TIME: {
if (!in_range())
break;
XuCamCtrlSet(option, value);
@ -287,8 +327,11 @@ bool Channels::RunControlAction(const Option &option) const {
case Option::DESIRED_BRIGHTNESS:
case Option::IR_CONTROL:
case Option::HDR_MODE:
case Option::MIN_EXPOSURE_TIME:
case Option::ACCELEROMETER_RANGE:
case Option::GYROSCOPE_RANGE:
case Option::ACCELEROMETER_LOW_PASS_FILTER:
case Option::GYROSCOPE_LOW_PASS_FILTER:
LOG(WARNING) << option << " run action useless";
return false;
default:
@ -318,6 +361,10 @@ void Channels::DoImuTrack() {
return;
}
if (res_packet.packets.back().count == 0) {
return;
}
VLOG(2) << "Imu req sn: " << imu_sn_ << ", res count: " << []() {
std::size_t n = 0;
for (auto &&packet : res_packet.packets) {
@ -383,174 +430,6 @@ void Channels::StopImuTracking() {
}
}
namespace {
template <typename T>
T _from_data(const std::uint8_t *data) {
std::size_t size = sizeof(T) / sizeof(std::uint8_t);
T value = 0;
for (std::size_t i = 0; i < size; i++) {
value |= data[i] << (8 * (size - i - 1));
}
return value;
}
template <>
double _from_data(const std::uint8_t *data) {
return *(reinterpret_cast<const double *>(data));
}
std::string _from_data(const std::uint8_t *data, std::size_t count) {
std::string s(reinterpret_cast<const char *>(data), count);
strings::trim(s);
return s;
}
std::size_t from_data(Channels::device_info_t *info, const std::uint8_t *data) {
std::size_t i = 4; // skip vid, pid
// name, 16
info->name = _from_data(data + i, 16);
i += 16;
// serial_number, 16
info->serial_number = _from_data(data + i, 16);
i += 16;
// firmware_version, 2
info->firmware_version.set_major(data[i]);
info->firmware_version.set_minor(data[i + 1]);
i += 2;
// hardware_version, 3
info->hardware_version.set_major(data[i]);
info->hardware_version.set_minor(data[i + 1]);
info->hardware_version.set_flag(std::bitset<8>(data[i + 2]));
i += 3;
// spec_version, 2
info->spec_version.set_major(data[i]);
info->spec_version.set_minor(data[i + 1]);
i += 2;
// lens_type, 4
info->lens_type.set_vendor(_from_data<std::uint16_t>(data + i));
info->lens_type.set_product(_from_data<std::uint16_t>(data + i + 2));
i += 4;
// imu_type, 4
info->imu_type.set_vendor(_from_data<std::uint16_t>(data + i));
info->imu_type.set_product(_from_data<std::uint16_t>(data + i + 2));
i += 4;
// nominal_baseline, 2
info->nominal_baseline = _from_data<std::uint16_t>(data + i);
i += 2;
return i;
}
std::size_t from_data(
Intrinsics *in, const std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// width, 2
in->width = _from_data<std::uint16_t>(data + i);
i += 2;
// height, 2
in->height = _from_data<std::uint16_t>(data + i);
i += 2;
// fx, 8
in->fx = _from_data<double>(data + i);
i += 8;
// fy, 8
in->fy = _from_data<double>(data + i);
i += 8;
// cx, 8
in->cx = _from_data<double>(data + i);
i += 8;
// cy, 8
in->cy = _from_data<double>(data + i);
i += 8;
// model, 1
in->model = data[i];
i += 1;
// coeffs, 40
for (std::size_t j = 0; j < 5; j++) {
in->coeffs[j] = _from_data<double>(data + i + j * 8);
}
i += 40;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t from_data(
ImuIntrinsics *in, const std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// scale
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
in->scale[j][k] = _from_data<double>(data + i + (j * 3 + k) * 8);
}
}
i += 72;
// drift
for (std::size_t j = 0; j < 3; j++) {
in->drift[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
// noise
for (std::size_t j = 0; j < 3; j++) {
in->noise[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
// bias
for (std::size_t j = 0; j < 3; j++) {
in->bias[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t from_data(
Extrinsics *ex, const std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// rotation
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
ex->rotation[j][k] = _from_data<double>(data + i + (j * 3 + k) * 8);
}
}
i += 72;
// translation
for (std::size_t j = 0; j < 3; j++) {
ex->translation[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t from_data(
Channels::img_params_t *img_params, const std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
i += from_data(&img_params->in_left, data + i, spec_version);
i += from_data(&img_params->in_right, data + i, spec_version);
i += from_data(&img_params->ex_right_to_left, data + i, spec_version);
return i;
}
std::size_t from_data(
Channels::imu_params_t *imu_params, const std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
i += from_data(&imu_params->in_accel, data + i, spec_version);
i += from_data(&imu_params->in_gyro, data + i, spec_version);
i += from_data(&imu_params->ex_left_to_imu, data + i, spec_version);
return i;
}
} // namespace
bool Channels::GetFiles(
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params,
Version *spec_version) const {
@ -578,7 +457,7 @@ bool Channels::GetFiles(
if (XuFileQuery(uvc::XU_QUERY_GET, 2000, data)) {
// header = std::bitset<8>(data[0]);
std::uint16_t size = _from_data<std::uint16_t>(data + 1);
std::uint16_t size = bytes::_from_data<std::uint16_t>(data + 1);
std::uint8_t checksum = data[3 + size];
VLOG(2) << "GetFiles data size: " << size << ", checksum: 0x" << std::hex
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum);
@ -601,30 +480,34 @@ bool Channels::GetFiles(
std::size_t end = 3 + size;
while (i < end) {
std::uint8_t file_id = *(data + i);
std::uint16_t file_size = _from_data<std::uint16_t>(data + i + 1);
std::uint16_t file_size = bytes::_from_data<std::uint16_t>(data + i + 1);
VLOG(2) << "GetFiles id: " << static_cast<int>(file_id)
<< ", size: " << file_size;
i += 3;
switch (file_id) {
case FID_DEVICE_INFO: {
CHECK_EQ(from_data(info, data + i), file_size)
CHECK_EQ(bytes::from_data(info, data + i), file_size)
<< "The firmware not support getting device info, you could "
"upgrade to latest";
spec_ver = &info->spec_version;
CheckSpecVersion(spec_ver);
} break;
case FID_IMG_PARAMS: {
img_params->ok = file_size > 0;
if (img_params->ok) {
if (file_size > 0) {
CheckSpecVersion(spec_ver);
CHECK_EQ(from_data(img_params, data + i, spec_ver), file_size);
/*auto &&n = */adapter_->GetImgParamsFromData(
data + i, spec_ver, img_params);
// Considering the upgrade, comment this
// CHECK_EQ(n, file_size);
}
} break;
case FID_IMU_PARAMS: {
imu_params->ok = file_size > 0;
if (imu_params->ok) {
CheckSpecVersion(spec_ver);
CHECK_EQ(from_data(imu_params, data + i, spec_ver), file_size);
auto &&n = adapter_->GetImuParamsFromData(
data + i, spec_ver, imu_params);
CHECK_EQ(n, file_size);
}
} break;
default:
@ -641,198 +524,6 @@ bool Channels::GetFiles(
}
}
namespace {
template <typename T>
std::size_t _to_data(T value, std::uint8_t *data) {
std::size_t size = sizeof(T) / sizeof(std::uint8_t);
for (std::size_t i = 0; i < size; i++) {
data[i] = static_cast<std::uint8_t>((value >> (8 * (size - i - 1))) & 0xFF);
}
return size;
}
template <>
std::size_t _to_data(double value, std::uint8_t *data) {
std::uint8_t *val = reinterpret_cast<std::uint8_t *>(&value);
std::copy(val, val + 8, data);
return 8;
}
std::size_t _to_data(std::string value, std::uint8_t *data, std::size_t count) {
std::copy(value.begin(), value.end(), data);
for (std::size_t i = value.size(); i < count; i++) {
data[i] = ' ';
}
return count;
}
std::size_t to_data(
const Channels::device_info_t *info, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 3; // skip id, size
i += 4; // skip vid, pid
// name, 16
_to_data(info->name, data + i, 16);
i += 16;
// serial_number, 16
_to_data(info->serial_number, data + i, 16);
i += 16;
// firmware_version, 2
data[i] = info->firmware_version.major();
data[i + 1] = info->firmware_version.minor();
i += 2;
// hardware_version, 3
data[i] = info->hardware_version.major();
data[i + 1] = info->hardware_version.minor();
data[i + 2] =
static_cast<std::uint8_t>(info->hardware_version.flag().to_ulong());
i += 3;
// spec_version, 2
data[i] = info->spec_version.major();
data[i + 1] = info->spec_version.minor();
i += 2;
// lens_type, 4
_to_data(info->lens_type.vendor(), data + i);
_to_data(info->lens_type.product(), data + i + 2);
i += 4;
// imu_type, 4
_to_data(info->imu_type.vendor(), data + i);
_to_data(info->imu_type.product(), data + i + 2);
i += 4;
// nominal_baseline, 2
_to_data(info->nominal_baseline, data + i);
i += 2;
MYNTEYE_UNUSED(spec_version)
// others
std::size_t size = i - 3;
data[0] = Channels::FID_DEVICE_INFO;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
std::size_t to_data(
const Intrinsics *in, std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// width, 2
_to_data(in->width, data + i);
i += 2;
// height, 2
_to_data(in->height, data + i);
i += 2;
// fx, 8
_to_data(in->fx, data + i);
i += 8;
// fy, 8
_to_data(in->fy, data + i);
i += 8;
// cx, 8
_to_data(in->cx, data + i);
i += 8;
// cy, 8
_to_data(in->cy, data + i);
i += 8;
// model, 1
data[i] = in->model;
i += 1;
// coeffs, 40
for (std::size_t j = 0; j < 5; j++) {
_to_data(in->coeffs[j], data + i + j * 8);
}
i += 40;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t to_data(
const ImuIntrinsics *in, std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// scale
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
_to_data(in->scale[j][k], data + i + (j * 3 + k) * 8);
}
}
i += 72;
// drift
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->drift[j], data + i + j * 8);
}
i += 24;
// noise
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->noise[j], data + i + j * 8);
}
i += 24;
// bias
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->bias[j], data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t to_data(
const Extrinsics *ex, std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// rotation
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
_to_data(ex->rotation[j][k], data + i + (j * 3 + k) * 8);
}
}
i += 72;
// translation
for (std::size_t j = 0; j < 3; j++) {
_to_data(ex->translation[j], data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t to_data(
const Channels::img_params_t *img_params, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 3; // skip id, size
i += to_data(&img_params->in_left, data + i, spec_version);
i += to_data(&img_params->in_right, data + i, spec_version);
i += to_data(&img_params->ex_right_to_left, data + i, spec_version);
// others
std::size_t size = i - 3;
data[0] = Channels::FID_IMG_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
std::size_t to_data(
const Channels::imu_params_t *imu_params, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 3; // skip id, size
i += to_data(&imu_params->in_accel, data + i, spec_version);
i += to_data(&imu_params->in_gyro, data + i, spec_version);
i += to_data(&imu_params->ex_left_to_imu, data + i, spec_version);
// others
std::size_t size = i - 3;
data[0] = Channels::FID_IMU_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
} // namespace
bool Channels::SetFiles(
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params,
Version *spec_version) {
@ -854,15 +545,15 @@ bool Channels::SetFiles(
std::uint16_t size = 0;
if (info != nullptr) {
header[0] = true;
size += to_data(info, data + 3 + size, spec_ver);
size += bytes::to_data(info, data + 3 + size, spec_ver);
}
if (img_params != nullptr) {
header[1] = true;
size += to_data(img_params, data + 3 + size, spec_ver);
size += adapter_->SetImgParamsToData(img_params, spec_ver, data + 3 + size);
}
if (imu_params != nullptr) {
header[2] = true;
size += to_data(imu_params, data + 3 + size, spec_ver);
size += adapter_->SetImuParamsToData(imu_params, spec_ver, data + 3 + size);
}
data[0] = static_cast<std::uint8_t>(header.to_ulong());
@ -986,7 +677,7 @@ bool Channels::XuImuRead(ImuResPacket *res) const {
static std::uint8_t data[2000]{};
// std::fill(data, data + 2000, 0); // reset
if (XuControlQuery(CHANNEL_IMU_READ, uvc::XU_QUERY_GET, 2000, data)) {
res->from_data(data);
adapter_->GetImuResPacket(data, res);
if (res->header != 0x5B) {
LOG(WARNING) << "Imu response packet header must be 0x5B, but 0x"
@ -1047,4 +738,309 @@ Channels::control_info_t Channels::XuControlInfo(Option option) const {
return {min, max, def};
}
std::size_t ChannelsAdapter::GetImuParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::imu_params_t *imu_params) {
std::size_t i = 0;
i += bytes::from_data(&imu_params->in_accel, data + i, version);
i += bytes::from_data(&imu_params->in_gyro, data + i, version);
i += bytes::from_data(&imu_params->ex_left_to_imu, data + i, version);
return i;
}
std::size_t ChannelsAdapter::SetImuParamsToData(
const Channels::imu_params_t *imu_params, const Version *version,
std::uint8_t *data) {
std::size_t i = 3; // skip id, size
i += bytes::to_data(&imu_params->in_accel, data + i, version);
i += bytes::to_data(&imu_params->in_gyro, data + i, version);
i += bytes::to_data(&imu_params->ex_left_to_imu, data + i, version);
// others
std::size_t size = i - 3;
data[0] = Channels::FID_IMU_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
namespace bytes {
// from
std::string _from_data(const std::uint8_t *data, std::size_t count) {
std::string s(reinterpret_cast<const char *>(data), count);
strings::trim(s);
return s;
}
std::size_t from_data(Channels::device_info_t *info, const std::uint8_t *data) {
std::size_t i = 4; // skip vid, pid
// name, 16
info->name = _from_data(data + i, 16);
i += 16;
// serial_number, 16
info->serial_number = _from_data(data + i, 16);
i += 16;
// firmware_version, 2
info->firmware_version.set_major(data[i]);
info->firmware_version.set_minor(data[i + 1]);
i += 2;
// hardware_version, 3
info->hardware_version.set_major(data[i]);
info->hardware_version.set_minor(data[i + 1]);
info->hardware_version.set_flag(std::bitset<8>(data[i + 2]));
i += 3;
// spec_version, 2
info->spec_version.set_major(data[i]);
info->spec_version.set_minor(data[i + 1]);
i += 2;
// lens_type, 4
info->lens_type.set_vendor(_from_data<std::uint16_t>(data + i));
info->lens_type.set_product(_from_data<std::uint16_t>(data + i + 2));
i += 4;
// imu_type, 4
info->imu_type.set_vendor(_from_data<std::uint16_t>(data + i));
info->imu_type.set_product(_from_data<std::uint16_t>(data + i + 2));
i += 4;
// nominal_baseline, 2
info->nominal_baseline = _from_data<std::uint16_t>(data + i);
i += 2;
return i;
}
std::size_t from_data(Intrinsics *in, const std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
// width, 2
in->width = _from_data<std::uint16_t>(data + i);
i += 2;
// height, 2
in->height = _from_data<std::uint16_t>(data + i);
i += 2;
// fx, 8
in->fx = _from_data<double>(data + i);
i += 8;
// fy, 8
in->fy = _from_data<double>(data + i);
i += 8;
// cx, 8
in->cx = _from_data<double>(data + i);
i += 8;
// cy, 8
in->cy = _from_data<double>(data + i);
i += 8;
// model, 1
in->model = data[i];
i += 1;
// coeffs, 40
for (std::size_t j = 0; j < 5; j++) {
in->coeffs[j] = _from_data<double>(data + i + j * 8);
}
i += 40;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
// scale
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
in->scale[j][k] = _from_data<double>(data + i + (j * 3 + k) * 8);
}
}
i += 72;
// drift
for (std::size_t j = 0; j < 3; j++) {
in->drift[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
// noise
for (std::size_t j = 0; j < 3; j++) {
in->noise[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
// bias
for (std::size_t j = 0; j < 3; j++) {
in->bias[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t from_data(Extrinsics *ex, const std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
// rotation
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
ex->rotation[j][k] = _from_data<double>(data + i + (j * 3 + k) * 8);
}
}
i += 72;
// translation
for (std::size_t j = 0; j < 3; j++) {
ex->translation[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
// to
std::size_t _to_data(std::string value, std::uint8_t *data, std::size_t count) {
std::copy(value.begin(), value.end(), data);
for (std::size_t i = value.size(); i < count; i++) {
data[i] = ' ';
}
return count;
}
std::size_t to_data(const Channels::device_info_t *info, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 3; // skip id, size
i += 4; // skip vid, pid
// name, 16
_to_data(info->name, data + i, 16);
i += 16;
// serial_number, 16
_to_data(info->serial_number, data + i, 16);
i += 16;
// firmware_version, 2
data[i] = info->firmware_version.major();
data[i + 1] = info->firmware_version.minor();
i += 2;
// hardware_version, 3
data[i] = info->hardware_version.major();
data[i + 1] = info->hardware_version.minor();
data[i + 2] =
static_cast<std::uint8_t>(info->hardware_version.flag().to_ulong());
i += 3;
// spec_version, 2
data[i] = info->spec_version.major();
data[i + 1] = info->spec_version.minor();
i += 2;
// lens_type, 4
_to_data(info->lens_type.vendor(), data + i);
_to_data(info->lens_type.product(), data + i + 2);
i += 4;
// imu_type, 4
_to_data(info->imu_type.vendor(), data + i);
_to_data(info->imu_type.product(), data + i + 2);
i += 4;
// nominal_baseline, 2
_to_data(info->nominal_baseline, data + i);
i += 2;
MYNTEYE_UNUSED(spec_version)
// others
std::size_t size = i - 3;
data[0] = Channels::FID_DEVICE_INFO;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
std::size_t to_data(const Intrinsics *in, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
// width, 2
_to_data(in->width, data + i);
i += 2;
// height, 2
_to_data(in->height, data + i);
i += 2;
// fx, 8
_to_data(in->fx, data + i);
i += 8;
// fy, 8
_to_data(in->fy, data + i);
i += 8;
// cx, 8
_to_data(in->cx, data + i);
i += 8;
// cy, 8
_to_data(in->cy, data + i);
i += 8;
// model, 1
data[i] = in->model;
i += 1;
// coeffs, 40
for (std::size_t j = 0; j < 5; j++) {
_to_data(in->coeffs[j], data + i + j * 8);
}
i += 40;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t to_data(const ImuIntrinsics *in, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
// scale
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
_to_data(in->scale[j][k], data + i + (j * 3 + k) * 8);
}
}
i += 72;
// drift
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->drift[j], data + i + j * 8);
}
i += 24;
// noise
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->noise[j], data + i + j * 8);
}
i += 24;
// bias
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->bias[j], data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t to_data(const Extrinsics *ex, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
// rotation
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
_to_data(ex->rotation[j][k], data + i + (j * 3 + k) * 8);
}
}
i += 72;
// translation
for (std::size_t j = 0; j < 3; j++) {
_to_data(ex->translation[j], data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
} // namespace bytes
MYNTEYE_END_NAMESPACE

View File

@ -15,12 +15,17 @@
#define MYNTEYE_DEVICE_CHANNELS_H_
#pragma once
#include <algorithm>
#include <map>
#include <memory>
#include <set>
#include <string>
#include <thread>
#include <vector>
#include "mynteye/mynteye.h"
#include "mynteye/types.h"
#include "mynteye/device/device.h"
#include "mynteye/device/types.h"
#include "mynteye/uvc/uvc.h"
@ -33,6 +38,8 @@ struct xu;
} // namespace uvc
class ChannelsAdapter;
class MYNTEYE_API Channels {
public:
typedef enum Channel {
@ -67,23 +74,16 @@ class MYNTEYE_API Channels {
using device_info_t = DeviceInfo;
typedef struct ImgParams {
bool ok;
Intrinsics in_left;
Intrinsics in_right;
Extrinsics ex_right_to_left;
} img_params_t;
using img_params_t = std::map<Resolution, device::img_params_t>;
using imu_params_t = device::imu_params_t;
typedef struct ImuParams {
bool ok;
ImuIntrinsics in_accel;
ImuIntrinsics in_gyro;
Extrinsics ex_left_to_imu;
} imu_params_t;
explicit Channels(std::shared_ptr<uvc::device> device);
Channels(const std::shared_ptr<uvc::device> &device,
const std::shared_ptr<ChannelsAdapter> &adapter);
~Channels();
std::int32_t GetAccelRangeDefault();
std::int32_t GetGyroRangeDefault();
void LogControlInfos() const;
void UpdateControlInfos();
control_info_t GetControlInfo(const Option &option) const;
@ -140,6 +140,7 @@ class MYNTEYE_API Channels {
control_info_t XuControlInfo(Option option) const;
std::shared_ptr<uvc::device> device_;
std::shared_ptr<ChannelsAdapter> adapter_;
std::map<Option, control_info_t> control_infos_;
@ -151,6 +152,101 @@ class MYNTEYE_API Channels {
imu_callback_t imu_callback_;
};
class ChannelsAdapter {
public:
virtual ~ChannelsAdapter() {}
virtual std::set<Option> GetOptionSupports() = 0;
virtual std::int32_t GetAccelRangeDefault() = 0;
virtual std::vector<std::int32_t> GetAccelRangeValues() = 0;
virtual std::int32_t GetGyroRangeDefault() = 0;
virtual std::vector<std::int32_t> GetGyroRangeValues() = 0;
virtual void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) = 0;
virtual std::size_t GetImgParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::img_params_t *img_params) = 0;
virtual std::size_t SetImgParamsToData(
const Channels::img_params_t *img_params, const Version *version,
std::uint8_t *data) = 0;
virtual std::size_t GetImuParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::imu_params_t *imu_params);
virtual std::size_t SetImuParamsToData(
const Channels::imu_params_t *imu_params, const Version *version,
std::uint8_t *data);
};
namespace bytes {
// from
template <typename T>
T _from_data(const std::uint8_t *data) {
std::size_t size = sizeof(T) / sizeof(std::uint8_t);
T value = 0;
for (std::size_t i = 0; i < size; i++) {
value |= data[i] << (8 * (size - i - 1));
}
return value;
}
template <>
inline double _from_data(const std::uint8_t *data) {
return *(reinterpret_cast<const double *>(data));
}
std::string _from_data(const std::uint8_t *data, std::size_t count);
std::size_t from_data(Channels::device_info_t *info, const std::uint8_t *data);
std::size_t from_data(Intrinsics *in, const std::uint8_t *data,
const Version *spec_version);
std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data,
const Version *spec_version);
std::size_t from_data(Extrinsics *ex, const std::uint8_t *data,
const Version *spec_version);
// to
template <typename T>
std::size_t _to_data(T value, std::uint8_t *data) {
std::size_t size = sizeof(T) / sizeof(std::uint8_t);
for (std::size_t i = 0; i < size; i++) {
data[i] = static_cast<std::uint8_t>((value >> (8 * (size - i - 1))) & 0xFF);
}
return size;
}
template <>
inline std::size_t _to_data(double value, std::uint8_t *data) {
std::uint8_t *val = reinterpret_cast<std::uint8_t *>(&value);
std::copy(val, val + 8, data);
return 8;
}
std::size_t _to_data(std::string value, std::uint8_t *data, std::size_t count);
std::size_t to_data(const Channels::device_info_t *info, std::uint8_t *data,
const Version *spec_version);
std::size_t to_data(const Intrinsics *in, std::uint8_t *data,
const Version *spec_version);
std::size_t to_data(const ImuIntrinsics *in, std::uint8_t *data,
const Version *spec_version);
std::size_t to_data(const Extrinsics *ex, std::uint8_t *data,
const Version *spec_version);
} // namespace bytes
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_CHANNELS_H_

View File

@ -16,22 +16,55 @@
MYNTEYE_BEGIN_NAMESPACE
const std::map<Model, StreamSupports> stream_supports_map = {
{Model::STANDARD, {Stream::LEFT, Stream::RIGHT}}};
{Model::STANDARD, {Stream::LEFT, Stream::RIGHT}},
{Model::STANDARD2, {Stream::LEFT, Stream::RIGHT}}
};
const std::map<Model, CapabilitiesSupports> capabilities_supports_map = {
{Model::STANDARD, {Capabilities::STEREO, Capabilities::IMU}}};
{Model::STANDARD, {Capabilities::STEREO, Capabilities::IMU}},
{Model::STANDARD2, {Capabilities::STEREO_COLOR, Capabilities::IMU}}
};
const std::map<Model, OptionSupports> option_supports_map = {
{Model::STANDARD,
{Option::GAIN, Option::BRIGHTNESS, Option::CONTRAST, Option::FRAME_RATE,
Option::IMU_FREQUENCY, Option::EXPOSURE_MODE, Option::MAX_GAIN,
Option::MAX_EXPOSURE_TIME, Option::DESIRED_BRIGHTNESS, Option::IR_CONTROL,
Option::HDR_MODE, Option::ZERO_DRIFT_CALIBRATION, Option::ERASE_CHIP,
Option::ACCELEROMETER_RANGE, Option::GYROSCOPE_RANGE}}};
{Model::STANDARD, {
Option::GAIN, Option::BRIGHTNESS, Option::CONTRAST,
Option::FRAME_RATE, Option::IMU_FREQUENCY,
Option::EXPOSURE_MODE, Option::MAX_GAIN, Option::MAX_EXPOSURE_TIME,
Option::DESIRED_BRIGHTNESS,
Option::IR_CONTROL,
Option::HDR_MODE,
Option::ACCELEROMETER_RANGE, Option::GYROSCOPE_RANGE,
Option::ZERO_DRIFT_CALIBRATION,
Option::ERASE_CHIP}
},
{Model::STANDARD2, {
Option::BRIGHTNESS,
Option::EXPOSURE_MODE, Option::MAX_GAIN, Option::MAX_EXPOSURE_TIME,
Option::MIN_EXPOSURE_TIME, Option::DESIRED_BRIGHTNESS,
Option::ACCELEROMETER_RANGE, Option::GYROSCOPE_RANGE,
Option::ACCELEROMETER_LOW_PASS_FILTER, Option::GYROSCOPE_LOW_PASS_FILTER,
Option::ERASE_CHIP}
}
};
const std::map<Model, std::map<Capabilities, StreamRequests>>
stream_requests_map = {
{Model::STANDARD,
{{Capabilities::STEREO, {{752, 480, Format::YUYV, 25}}}}}};
stream_requests_map = {
{Model::STANDARD,
{{Capabilities::STEREO, {
{752, 480, Format::YUYV, 25}}
}}
},
{Model::STANDARD2,
{{Capabilities::STEREO_COLOR, {
{1280, 400, Format::BGR888, 10},
{1280, 400, Format::BGR888, 20},
{1280, 400, Format::BGR888, 30},
{1280, 400, Format::BGR888, 60},
{2560, 800, Format::BGR888, 10},
{2560, 800, Format::BGR888, 20},
{2560, 800, Format::BGR888, 30}}
}}
}
};
MYNTEYE_END_NAMESPACE

View File

@ -23,8 +23,9 @@
#include "mynteye/device/async_callback.h"
#include "mynteye/device/channels.h"
#include "mynteye/device/config.h"
#include "mynteye/device/device_s.h"
#include "mynteye/device/motions.h"
#include "mynteye/device/standard/device_s.h"
#include "mynteye/device/standard2/device_s2.h"
#include "mynteye/device/streams.h"
#include "mynteye/device/types.h"
#include "mynteye/util/strings.h"
@ -78,14 +79,17 @@ bool CheckSupports(
} // namespace
Device::Device(const Model &model, std::shared_ptr<uvc::device> device)
: video_streaming_(false),
motion_tracking_(false),
model_(model),
device_(device),
streams_(nullptr),
channels_(std::make_shared<Channels>(device)),
motions_(std::make_shared<Motions>(channels_)) {
Device::Device(const Model &model,
const std::shared_ptr<uvc::device> &device,
const std::shared_ptr<StreamsAdapter> &streams_adapter,
const std::shared_ptr<ChannelsAdapter> &channels_adapter)
: video_streaming_(false),
motion_tracking_(false),
model_(model),
device_(device),
streams_(std::make_shared<Streams>(streams_adapter)),
channels_(std::make_shared<Channels>(device_, channels_adapter)),
motions_(std::make_shared<Motions>(channels_)) {
VLOG(2) << __func__;
ReadAllInfos();
}
@ -100,14 +104,20 @@ std::shared_ptr<Device> Device::Create(
return std::make_shared<StandardDevice>(device);
} else if (strings::starts_with(name, "MYNT-EYE-")) {
// TODO(JohnZhao): Create different device by name, such as MYNT-EYE-S1000
std::string model_s = name.substr(9);
std::string model_s = name.substr(9, 5);
VLOG(2) << "MYNE EYE Model: " << model_s;
DeviceModel model(model_s);
switch (model.type) {
case 'S':
return std::make_shared<StandardDevice>(device);
default:
LOG(FATAL) << "MYNT EYE model is not supported now";
if (model.type == 'S') {
switch (model.generation) {
case '1':
return std::make_shared<StandardDevice>(device);
case '2':
return std::make_shared<Standard2Device>(device);
default:
LOG(FATAL) << "No such generation now";
}
} else {
LOG(FATAL) << "MYNT EYE model is not supported now";
}
}
return nullptr;
@ -166,6 +176,33 @@ void Device::ConfigStreamRequest(
return;
}
stream_config_requests_[capability] = request;
UpdateStreamIntrinsics(capability, request);
}
const StreamRequest &Device::GetStreamRequest(
const Capabilities &capability) const {
try {
return stream_config_requests_.at(capability);
} catch (const std::out_of_range &e) {
auto &&requests = GetStreamRequests(capability);
if (requests.size() >= 1) {
return requests[0];
} else {
LOG(FATAL) << "Please config the stream request of " << capability;
}
}
}
const std::vector<StreamRequest> &Device::GetStreamRequests() const {
return GetStreamRequests(GetKeyStreamCapability());
}
void Device::ConfigStreamRequest(const StreamRequest &request) {
ConfigStreamRequest(GetKeyStreamCapability(), request);
}
const StreamRequest &Device::GetStreamRequest() const {
return GetStreamRequest(GetKeyStreamCapability());
}
std::shared_ptr<DeviceInfo> Device::GetInfo() const {
@ -400,6 +437,14 @@ void Device::WaitForStreams() {
streams_->WaitForStreams();
}
device::StreamData Device::GetStreamData(const Stream &stream) {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
CheckSupports(this, stream);
std::lock_guard<std::mutex> _(mtx_streams_);
return streams_->GetLatestStreamData(stream);
}
std::vector<device::StreamData> Device::GetStreamDatas(const Stream &stream) {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
@ -408,14 +453,6 @@ std::vector<device::StreamData> Device::GetStreamDatas(const Stream &stream) {
return streams_->GetStreamDatas(stream);
}
device::StreamData Device::GetLatestStreamData(const Stream &stream) {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
CheckSupports(this, stream);
std::lock_guard<std::mutex> _(mtx_streams_);
return streams_->GetLatestStreamData(stream);
}
void Device::EnableMotionDatas() {
EnableMotionDatas(std::numeric_limits<std::size_t>::max());
}
@ -431,51 +468,33 @@ std::vector<device::MotionData> Device::GetMotionDatas() {
return motions_->GetMotionDatas();
}
const StreamRequest &Device::GetStreamRequest(const Capabilities &capability) {
try {
return stream_config_requests_.at(capability);
} catch (const std::out_of_range &e) {
auto &&requests = GetStreamRequests(capability);
if (requests.size() >= 1) {
VLOG(2) << "Select the first one stream request of " << capability;
return requests[0];
} else {
LOG(FATAL) << "Please config the stream request of " << capability;
}
}
}
void Device::StartVideoStreaming() {
if (video_streaming_) {
LOG(WARNING) << "Cannot start video streaming without first stopping it";
return;
}
streams_ = std::make_shared<Streams>(GetKeyStreams());
// if stream capabilities are supported with subdevices of device_
/*
Capabilities stream_capabilities[] = {
Capabilities::STEREO,
Capabilities::COLOR,
Capabilities::DEPTH,
Capabilities::POINTS,
Capabilities::FISHEYE,
Capabilities::INFRARED,
Capabilities::INFRARED2
};
Capabilities::STEREO, Capabilities::STEREO_COLOR,
Capabilities::COLOR, Capabilities::DEPTH,
Capabilities::POINTS, Capabilities::FISHEYE,
Capabilities::INFRARED, Capabilities::INFRARED2};
for (auto &&capability : stream_capabilities) {
}
*/
if (Supports(Capabilities::STEREO)) {
auto &&stream_cap = GetKeyStreamCapability();
if (Supports(stream_cap)) {
// do stream request selection if more than one request of each stream
auto &&stream_request = GetStreamRequest(Capabilities::STEREO);
auto &&stream_request = GetStreamRequest(stream_cap);
streams_->ConfigStream(stream_cap, stream_request);
streams_->ConfigStream(Capabilities::STEREO, stream_request);
uvc::set_device_mode(
*device_, stream_request.width, stream_request.height,
static_cast<int>(stream_request.format), stream_request.fps,
[this](const void *data, std::function<void()> continuation) {
[this, stream_cap](
const void *data, std::function<void()> continuation) {
// drop the first stereo stream data
static std::uint8_t drop_count = 1;
if (drop_count > 0) {
@ -486,7 +505,7 @@ void Device::StartVideoStreaming() {
// auto &&time_beg = times::now();
{
std::lock_guard<std::mutex> _(mtx_streams_);
if (streams_->PushStream(Capabilities::STEREO, data)) {
if (streams_->PushStream(stream_cap, data)) {
CallbackPushedStreamData(Stream::LEFT);
CallbackPushedStreamData(Stream::RIGHT);
}
@ -543,9 +562,9 @@ void Device::ReadAllInfos() {
device_info_ = std::make_shared<DeviceInfo>();
CHECK_NOTNULL(channels_);
Channels::img_params_t img_params;
Channels::imu_params_t imu_params;
if (!channels_->GetFiles(device_info_.get(), &img_params, &imu_params)) {
all_img_params_.clear();
Device::imu_params_t imu_params;
if (!channels_->GetFiles(device_info_.get(), &all_img_params_, &imu_params)) {
#if defined(WITH_DEVICE_INFO_REQUIRED)
LOG(FATAL)
#else
@ -566,18 +585,28 @@ void Device::ReadAllInfos() {
<< ", nominal_baseline: " << device_info_->nominal_baseline << "}";
device_info_->name = uvc::get_name(*device_);
if (img_params.ok) {
SetIntrinsics(Stream::LEFT, img_params.in_left);
SetIntrinsics(Stream::RIGHT, img_params.in_right);
SetExtrinsics(Stream::RIGHT, Stream::LEFT, img_params.ex_right_to_left);
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
VLOG(2) << "Extrinsics right to left: {"
<< GetExtrinsics(Stream::RIGHT, Stream::LEFT) << "}";
} else {
bool img_params_ok = false;
for (auto &&params : all_img_params_) {
auto &&img_params = params.second;
if (img_params.ok) {
img_params_ok = true;
SetIntrinsics(Stream::LEFT, img_params.in_left);
SetIntrinsics(Stream::RIGHT, img_params.in_right);
SetExtrinsics(Stream::LEFT, Stream::RIGHT, img_params.ex_right_to_left);
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
VLOG(2) << "Extrinsics left to right: {"
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
break;
}
}
if (!img_params_ok) {
LOG(WARNING) << "Intrinsics & extrinsics not exist";
}
if (imu_params.ok) {
imu_params_ = imu_params;
SetMotionIntrinsics({imu_params.in_accel, imu_params.in_gyro});
SetMotionExtrinsics(Stream::LEFT, imu_params.ex_left_to_imu);
VLOG(2) << "Motion intrinsics: {" << GetMotionIntrinsics() << "}";
@ -588,6 +617,28 @@ void Device::ReadAllInfos() {
}
}
void Device::UpdateStreamIntrinsics(
const Capabilities &capability, const StreamRequest &request) {
if (capability != GetKeyStreamCapability()) {
return;
}
for (auto &&params : all_img_params_) {
auto &&img_res = params.first;
auto &&img_params = params.second;
if (img_params.ok && img_res == request.GetResolution()) {
SetIntrinsics(Stream::LEFT, img_params.in_left);
SetIntrinsics(Stream::RIGHT, img_params.in_right);
SetExtrinsics(Stream::LEFT, Stream::RIGHT, img_params.ex_right_to_left);
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
VLOG(2) << "Extrinsics left to right: {"
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
break;
}
}
}
void Device::CallbackPushedStreamData(const Stream &stream) {
if (HasStreamCallback(stream)) {
auto &&datas = streams_->stream_datas(stream);

View File

@ -36,11 +36,11 @@ void Motions::SetMotionCallback(motion_callback_t callback) {
if (motion_callback_) {
accel_range = channels_->GetControlValue(Option::ACCELEROMETER_RANGE);
if (accel_range == -1)
accel_range = 8;
accel_range = channels_->GetAccelRangeDefault();
gyro_range = channels_->GetControlValue(Option::GYROSCOPE_RANGE);
if (gyro_range == -1)
gyro_range = 1000;
gyro_range = channels_->GetGyroRangeDefault();
channels_->SetImuCallback([this](const ImuPacket &packet) {
if (!motion_callback_ && !motion_datas_enabled_) {
@ -48,19 +48,21 @@ void Motions::SetMotionCallback(motion_callback_t callback) {
}
for (auto &&seg : packet.segments) {
auto &&imu = std::make_shared<ImuData>();
imu->frame_id = seg.frame_id;
// imu->frame_id = seg.frame_id;
// if (seg.offset < 0 &&
// static_cast<uint32_t>(-seg.offset) > packet.timestamp) {
// LOG(WARNING) << "Imu timestamp offset is incorrect";
// }
imu->timestamp = packet.timestamp + seg.offset;
imu->frame_id = seg.frame_id;
imu->timestamp = seg.timestamp;
imu->flag = seg.flag;
imu->temperature = seg.temperature / 326.8f + 25;
imu->accel[0] = seg.accel[0] * 1.f * accel_range / 0x10000;
imu->accel[1] = seg.accel[1] * 1.f * accel_range / 0x10000;
imu->accel[2] = seg.accel[2] * 1.f * accel_range / 0x10000;
imu->gyro[0] = seg.gyro[0] * 1.f * gyro_range / 0x10000;
imu->gyro[1] = seg.gyro[1] * 1.f * gyro_range / 0x10000;
imu->gyro[2] = seg.gyro[2] * 1.f * gyro_range / 0x10000;
imu->temperature = seg.temperature / 326.8f + 25;
std::lock_guard<std::mutex> _(mtx_datas_);
motion_data_t data = {imu};

View File

@ -58,8 +58,8 @@ class Motions {
std::mutex mtx_datas_;
int accel_range = 8;
int gyro_range = 1000;
int accel_range;
int gyro_range;
};
MYNTEYE_END_NAMESPACE

View File

@ -0,0 +1,163 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard/channels_adapter_s.h"
#include "mynteye/device/config.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
#pragma pack(push, 1)
struct ImuData {
std::int16_t offset;
std::uint16_t frame_id;
std::int16_t accel[3];
std::int16_t temperature;
std::int16_t gyro[3];
ImuData() = default;
explicit ImuData(const std::uint8_t *data) {
from_data(data);
}
void from_data(const std::uint8_t *data) {
offset = (*(data) << 8) | *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
accel[0] = (*(data + 4) << 8) | *(data + 5);
accel[1] = (*(data + 6) << 8) | *(data + 7);
accel[2] = (*(data + 8) << 8) | *(data + 9);
temperature = (*(data + 10) << 8) | *(data + 11);
gyro[0] = (*(data + 12) << 8) | *(data + 13);
gyro[1] = (*(data + 14) << 8) | *(data + 15);
gyro[2] = (*(data + 16) << 8) | *(data + 17);
}
};
#pragma pack(pop)
void unpack_imu_segment(const ImuData &imu, const std::uint32_t &timestamp,
ImuSegment *seg) {
seg->frame_id = static_cast<uint32_t>(imu.frame_id);
seg->timestamp = static_cast<uint64_t>(timestamp + imu.offset) * 10;
seg->flag = 0;
seg->temperature = imu.temperature;
seg->accel[0] = imu.accel[0];
seg->accel[1] = imu.accel[1];
seg->accel[2] = imu.accel[2];
seg->gyro[0] = imu.gyro[0];
seg->gyro[1] = imu.gyro[1];
seg->gyro[2] = imu.gyro[2];
}
void unpack_imu_packet(const std::uint8_t *data, ImuPacket *pkg) {
pkg->serial_number =
(*(data) << 24) | (*(data + 1) << 16) |
(*(data + 2) << 8) | *(data + 3);
std::uint32_t timestamp =
(*(data + 4) << 24) | (*(data + 5) << 16)|
(*(data + 6) << 8) | *(data + 7);
pkg->count = *(data + 8);
std::size_t data_n = sizeof(ImuData); // 18
for (std::size_t i = 0; i < pkg->count; i++) {
ImuSegment seg;
unpack_imu_segment(ImuData(data + 9 + (data_n * i)), timestamp, &seg);
pkg->segments.push_back(seg);
}
}
void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
res->header = *data;
res->state = *(data + 1);
res->size = (*(data + 2) << 8) | *(data + 3);
std::size_t data_n = sizeof(ImuData); // 18
for (std::size_t i = 4; i < res->size;) {
ImuPacket packet;
unpack_imu_packet(data + i, &packet);
res->packets.push_back(packet);
i += 9 + (packet.count * data_n);
}
res->checksum = *(data + 4 + res->size);
}
} // namespace
StandardChannelsAdapter::StandardChannelsAdapter() {
}
StandardChannelsAdapter::~StandardChannelsAdapter() {
}
std::set<Option> StandardChannelsAdapter::GetOptionSupports() {
return option_supports_map.at(Model::STANDARD);
}
std::int32_t StandardChannelsAdapter::GetAccelRangeDefault() {
return 8;
}
std::vector<std::int32_t> StandardChannelsAdapter::GetAccelRangeValues() {
return {4, 8, 16, 32};
}
std::int32_t StandardChannelsAdapter::GetGyroRangeDefault() {
return 1000;
}
std::vector<std::int32_t> StandardChannelsAdapter::GetGyroRangeValues() {
return {500, 1000, 2000, 4000};
}
void StandardChannelsAdapter::GetImuResPacket(
const std::uint8_t *data, ImuResPacket *res) {
unpack_imu_res_packet(data, res);
}
std::size_t StandardChannelsAdapter::GetImgParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::img_params_t *img_params) {
std::size_t i = 0;
Intrinsics in_left, in_right;
Extrinsics ex_right_to_left;
i += bytes::from_data(&in_left, data + i, version);
i += bytes::from_data(&in_right, data + i, version);
i += bytes::from_data(&ex_right_to_left, data + i, version);
(*img_params)[{752, 480}] = {true, in_left, in_right, ex_right_to_left};
return i;
}
std::size_t StandardChannelsAdapter::SetImgParamsToData(
const Channels::img_params_t *img_params, const Version *version,
std::uint8_t *data) {
std::size_t i = 3; // skip id, size
auto &&params = (*img_params).at({752, 480});
i += bytes::to_data(&params.in_left, data + i, version);
i += bytes::to_data(&params.in_right, data + i, version);
i += bytes::to_data(&params.ex_right_to_left, data + i, version);
// others
std::size_t size = i - 3;
data[0] = Channels::FID_IMG_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
MYNTEYE_END_NAMESPACE

View File

@ -0,0 +1,51 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD_CHANNELS_ADAPTER_S_H_
#define MYNTEYE_DEVICE_STANDARD_CHANNELS_ADAPTER_S_H_
#pragma once
#include <cstdint>
#include <set>
#include <vector>
#include "mynteye/device/channels.h"
MYNTEYE_BEGIN_NAMESPACE
class StandardChannelsAdapter : public ChannelsAdapter {
public:
StandardChannelsAdapter();
virtual ~StandardChannelsAdapter();
std::set<Option> GetOptionSupports() override;
std::int32_t GetAccelRangeDefault() override;
std::vector<std::int32_t> GetAccelRangeValues() override;
std::int32_t GetGyroRangeDefault() override;
std::vector<std::int32_t> GetGyroRangeValues() override;
void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) override;
std::size_t GetImgParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::img_params_t *img_params) override;
std::size_t SetImgParamsToData(
const Channels::img_params_t *img_params, const Version *version,
std::uint8_t *data) override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD_CHANNELS_ADAPTER_S_H_

View File

@ -11,15 +11,19 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/device_s.h"
#include "mynteye/device/standard/device_s.h"
#include "mynteye/logger.h"
#include "mynteye/device/motions.h"
#include "mynteye/device/standard/channels_adapter_s.h"
#include "mynteye/device/standard/streams_adapter_s.h"
MYNTEYE_BEGIN_NAMESPACE
StandardDevice::StandardDevice(std::shared_ptr<uvc::device> device)
: Device(Model::STANDARD, device) {
: Device(Model::STANDARD, device,
std::make_shared<StandardStreamsAdapter>(),
std::make_shared<StandardChannelsAdapter>()) {
VLOG(2) << __func__;
}
@ -27,8 +31,8 @@ StandardDevice::~StandardDevice() {
VLOG(2) << __func__;
}
std::vector<Stream> StandardDevice::GetKeyStreams() const {
return {Stream::LEFT, Stream::RIGHT};
Capabilities StandardDevice::GetKeyStreamCapability() const {
return Capabilities::STEREO;
}
void StandardDevice::OnStereoStreamUpdate() {

View File

@ -11,8 +11,8 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_DEVICE_S_H_
#define MYNTEYE_DEVICE_DEVICE_S_H_
#ifndef MYNTEYE_DEVICE_STANDARD_DEVICE_S_H_
#define MYNTEYE_DEVICE_STANDARD_DEVICE_S_H_
#pragma once
#include <memory>
@ -27,11 +27,11 @@ class StandardDevice : public Device {
explicit StandardDevice(std::shared_ptr<uvc::device> device);
virtual ~StandardDevice();
std::vector<Stream> GetKeyStreams() const override;
Capabilities GetKeyStreamCapability() const override;
void OnStereoStreamUpdate() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_DEVICE_S_H_
#endif // MYNTEYE_DEVICE_STANDARD_DEVICE_S_H_

View File

@ -0,0 +1,163 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard/streams_adapter_s.h"
#include <iomanip>
#include "mynteye/logger.h"
#include "mynteye/device/types.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
// image info
#pragma pack(push, 1)
struct ImagePacket {
std::uint8_t header;
std::uint8_t size;
std::uint16_t frame_id;
std::uint32_t timestamp;
std::uint16_t exposure_time;
std::uint8_t checksum;
ImagePacket() = default;
explicit ImagePacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
header = *data;
size = *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
timestamp = (*(data + 4) << 24) | (*(data + 5) << 16) | (*(data + 6) << 8) |
*(data + 7);
exposure_time = (*(data + 8) << 8) | *(data + 9);
checksum = *(data + 10);
}
};
#pragma pack(pop)
bool unpack_stereo_img_data(
const void *data, const StreamRequest &request, ImgData *img) {
CHECK_NOTNULL(img);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t data_n =
request.width * request.height * bytes_per_pixel(request.format);
auto data_end = data_new + data_n;
std::size_t packet_n = sizeof(ImagePacket);
std::vector<std::uint8_t> packet(packet_n);
std::reverse_copy(data_end - packet_n, data_end, packet.begin());
ImagePacket img_packet(packet.data());
// LOG(INFO) << "ImagePacket: header=0x" << std::hex <<
// static_cast<int>(img_packet.header)
// << ", size=0x" << std::hex << static_cast<int>(img_packet.size)
// << ", frame_id="<< std::dec << img_packet.frame_id
// << ", timestamp="<< std::dec << img_packet.timestamp
// << ", exposure_time="<< std::dec << img_packet.exposure_time
// << ", checksum=0x" << std::hex << static_cast<int>(img_packet.checksum);
if (img_packet.header != 0x3B) {
VLOG(2) << "Image packet header must be 0x3B, but 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.header) << " now";
return false;
}
std::uint8_t checksum = 0;
for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9]
checksum = (checksum ^ packet[i]);
}
if (img_packet.checksum != checksum) {
VLOG(2) << "Image packet checksum should be 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.checksum) << ", but 0x"
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum)
<< " now";
return false;
}
img->frame_id = img_packet.frame_id;
// make timestamp unit from 10us to 1us
img->timestamp = static_cast<uint64_t>(img_packet.timestamp) * 10;
img->exposure_time = img_packet.exposure_time;
return true;
}
// image pixels
bool unpack_left_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame->format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame->width() * frame->height();
for (std::size_t i = 0; i < n; i++) {
frame->data()[i] = *(data_new + (i * 2));
}
return true;
}
bool unpack_right_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame->format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame->width() * frame->height();
for (std::size_t i = 0; i < n; i++) {
frame->data()[i] = *(data_new + (i * 2 + 1));
}
return true;
}
} // namespace
StandardStreamsAdapter::StandardStreamsAdapter() {
}
StandardStreamsAdapter::~StandardStreamsAdapter() {
}
std::vector<Stream> StandardStreamsAdapter::GetKeyStreams() {
return {Stream::LEFT, Stream::RIGHT};
}
std::vector<Capabilities> StandardStreamsAdapter::GetStreamCapabilities() {
return {Capabilities::STEREO};
}
std::map<Stream, Streams::unpack_img_data_t>
StandardStreamsAdapter::GetUnpackImgDataMap() {
return {
{Stream::LEFT, unpack_stereo_img_data},
{Stream::RIGHT, unpack_stereo_img_data}
};
}
std::map<Stream, Streams::unpack_img_pixels_t>
StandardStreamsAdapter::GetUnpackImgPixelsMap() {
return {
{Stream::LEFT, unpack_left_img_pixels},
{Stream::RIGHT, unpack_right_img_pixels}
};
}
MYNTEYE_END_NAMESPACE

View File

@ -0,0 +1,42 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD_STREAMS_ADAPTER_S_H_
#define MYNTEYE_DEVICE_STANDARD_STREAMS_ADAPTER_S_H_
#pragma once
#include <map>
#include <memory>
#include <vector>
#include "mynteye/device/streams.h"
MYNTEYE_BEGIN_NAMESPACE
class StandardStreamsAdapter : public StreamsAdapter {
public:
StandardStreamsAdapter();
virtual ~StandardStreamsAdapter();
std::vector<Stream> GetKeyStreams() override;
std::vector<Capabilities> GetStreamCapabilities() override;
std::map<Stream, Streams::unpack_img_data_t>
GetUnpackImgDataMap() override;
std::map<Stream, Streams::unpack_img_pixels_t>
GetUnpackImgPixelsMap() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD_STREAMS_ADAPTER_S_H_

View File

@ -0,0 +1,173 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard2/channels_adapter_s2.h"
#include "mynteye/device/config.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
#pragma pack(push, 1)
struct ImuData {
std::uint32_t frame_id;
std::uint64_t timestamp;
std::uint8_t flag;
std::int16_t temperature;
std::int16_t accel_or_gyro[3];
ImuData() = default;
explicit ImuData(const std::uint8_t *data) {
from_data(data);
}
void from_data(const std::uint8_t *data) {
std::uint32_t timestamp_l;
std::uint32_t timestamp_h;
frame_id = (*(data) << 24) | (*(data + 1) << 16) | (*(data + 2) << 8) |
*(data + 3);
timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) |
(*(data + 6) << 8) | *(data + 7);
timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) |
(*(data + 10) << 8) | *(data + 11);
timestamp = (static_cast<std::uint64_t>(timestamp_h) << 32) | timestamp_l;
flag = *(data + 12);
temperature = (*(data + 13) << 8) | *(data + 14);
accel_or_gyro[0] = (*(data + 15) << 8) | *(data + 16);
accel_or_gyro[1] = (*(data + 17) << 8) | *(data + 18);
accel_or_gyro[2] = (*(data + 19) << 8) | *(data + 20);
}
};
#pragma pack(pop)
void unpack_imu_segment(const ImuData &imu, ImuSegment *seg) {
seg->frame_id = imu.frame_id;
seg->timestamp = imu.timestamp;
seg->flag = imu.flag;
seg->temperature = imu.temperature;
seg->accel[0] = (seg->flag == 1) ? imu.accel_or_gyro[0] : 0;
seg->accel[1] = (seg->flag == 1) ? imu.accel_or_gyro[1] : 0;
seg->accel[2] = (seg->flag == 1) ? imu.accel_or_gyro[2] : 0;
seg->gyro[0] = (seg->flag == 2) ? imu.accel_or_gyro[0] : 0;
seg->gyro[1] = (seg->flag == 2) ? imu.accel_or_gyro[1] : 0;
seg->gyro[2] = (seg->flag == 2) ? imu.accel_or_gyro[2] : 0;
}
void unpack_imu_packet(const std::uint8_t *data, ImuPacket *pkg) {
std::size_t data_n = sizeof(ImuData); // 21
for (std::size_t i = 0; i < pkg->count; i++) {
ImuSegment seg;
unpack_imu_segment(ImuData(data + data_n * i), &seg);
pkg->segments.push_back(seg);
}
pkg->serial_number = pkg->segments.back().frame_id;
}
void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
res->header = *data;
res->state = *(data + 1);
res->size = (*(data + 2) << 8) | *(data + 3);
std::size_t data_n = sizeof(ImuData); // 21
ImuPacket packet;
packet.count = res->size / data_n;
unpack_imu_packet(data + 4, &packet);
res->packets.push_back(packet);
res->checksum = *(data + 4 + res->size);
}
} // namespace
Standard2ChannelsAdapter::Standard2ChannelsAdapter() {
}
Standard2ChannelsAdapter::~Standard2ChannelsAdapter() {
}
std::set<Option> Standard2ChannelsAdapter::GetOptionSupports() {
return option_supports_map.at(Model::STANDARD2);
}
std::int32_t Standard2ChannelsAdapter::GetAccelRangeDefault() {
return 12;
}
std::vector<std::int32_t> Standard2ChannelsAdapter::GetAccelRangeValues() {
return {6, 12, 24, 48};
}
std::int32_t Standard2ChannelsAdapter::GetGyroRangeDefault() {
return 1000;
}
std::vector<std::int32_t> Standard2ChannelsAdapter::GetGyroRangeValues() {
return {250, 500, 1000, 2000, 4000};
}
void Standard2ChannelsAdapter::GetImuResPacket(
const std::uint8_t *data, ImuResPacket *res) {
unpack_imu_res_packet(data, res);
}
std::size_t Standard2ChannelsAdapter::GetImgParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::img_params_t *img_params) {
std::size_t i = 0;
Intrinsics in_left, in_right;
Extrinsics ex_right_to_left;
i += bytes::from_data(&in_left, data + i, version);
i += bytes::from_data(&in_right, data + i, version);
(*img_params)[{1280, 400}] = {true, in_left, in_right, ex_right_to_left};
i += bytes::from_data(&in_left, data + i, version);
i += bytes::from_data(&in_right, data + i, version);
(*img_params)[{2560, 800}] = {true, in_left, in_right, ex_right_to_left};
i += bytes::from_data(&ex_right_to_left, data + i, version);
(*img_params)[{1280, 400}].ex_right_to_left = ex_right_to_left;
(*img_params)[{2560, 800}].ex_right_to_left = ex_right_to_left;
return i;
}
std::size_t Standard2ChannelsAdapter::SetImgParamsToData(
const Channels::img_params_t *img_params, const Version *version,
std::uint8_t *data) {
std::size_t i = 3; // skip id, size
{
auto &&params = (*img_params).at({1280, 400});
i += bytes::to_data(&params.in_left, data + i, version);
i += bytes::to_data(&params.in_right, data + i, version);
}
{
auto &&params = (*img_params).at({2560, 800});
i += bytes::to_data(&params.in_left, data + i, version);
i += bytes::to_data(&params.in_right, data + i, version);
i += bytes::to_data(&params.ex_right_to_left, data + i, version);
}
// others
std::size_t size = i - 3;
data[0] = Channels::FID_IMG_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
MYNTEYE_END_NAMESPACE

View File

@ -0,0 +1,51 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S2_H_
#define MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S2_H_
#pragma once
#include <cstdint>
#include <set>
#include <vector>
#include "mynteye/device/channels.h"
MYNTEYE_BEGIN_NAMESPACE
class Standard2ChannelsAdapter : public ChannelsAdapter {
public:
Standard2ChannelsAdapter();
virtual ~Standard2ChannelsAdapter();
std::set<Option> GetOptionSupports() override;
std::int32_t GetAccelRangeDefault() override;
std::vector<std::int32_t> GetAccelRangeValues() override;
std::int32_t GetGyroRangeDefault() override;
std::vector<std::int32_t> GetGyroRangeValues() override;
void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) override;
std::size_t GetImgParamsFromData(
const std::uint8_t *data, const Version *version,
Channels::img_params_t *img_params) override;
std::size_t SetImgParamsToData(
const Channels::img_params_t *img_params, const Version *version,
std::uint8_t *data) override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S2_H_

View File

@ -0,0 +1,45 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard2/device_s2.h"
#include "mynteye/logger.h"
#include "mynteye/device/motions.h"
#include "mynteye/device/standard2/channels_adapter_s2.h"
#include "mynteye/device/standard2/streams_adapter_s2.h"
MYNTEYE_BEGIN_NAMESPACE
Standard2Device::Standard2Device(std::shared_ptr<uvc::device> device)
: Device(Model::STANDARD2, device,
std::make_shared<Standard2StreamsAdapter>(),
std::make_shared<Standard2ChannelsAdapter>()) {
VLOG(2) << __func__;
}
Standard2Device::~Standard2Device() {
VLOG(2) << __func__;
}
Capabilities Standard2Device::GetKeyStreamCapability() const {
return Capabilities::STEREO_COLOR;
}
void Standard2Device::OnStereoStreamUpdate() {
if (motion_tracking_) {
auto &&motions = this->motions();
motions->DoMotionTrack();
}
}
MYNTEYE_END_NAMESPACE

View File

@ -0,0 +1,37 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD2_DEVICE_S2_H_
#define MYNTEYE_DEVICE_STANDARD2_DEVICE_S2_H_
#pragma once
#include <memory>
#include <vector>
#include "mynteye/device/device.h"
MYNTEYE_BEGIN_NAMESPACE
class Standard2Device : public Device {
public:
explicit Standard2Device(std::shared_ptr<uvc::device> device);
virtual ~Standard2Device();
Capabilities GetKeyStreamCapability() const override;
void OnStereoStreamUpdate() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD2_DEVICE_S2_H_

View File

@ -0,0 +1,186 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard2/streams_adapter_s2.h"
#include <iomanip>
#include "mynteye/logger.h"
#include "mynteye/device/types.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
// image info
#pragma pack(push, 1)
struct ImagePacket {
std::uint8_t header;
std::uint8_t size;
std::uint16_t frame_id;
std::uint64_t timestamp;
std::uint16_t exposure_time;
std::uint8_t checksum;
ImagePacket() = default;
explicit ImagePacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
std::uint32_t timestamp_l;
std::uint32_t timestamp_h;
header = *data;
size = *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) |
(*(data + 6) << 8) | *(data + 7);
timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) |
(*(data + 10) << 8) | *(data + 11);
timestamp = (static_cast<std::uint64_t>(timestamp_h) << 32) | timestamp_l;
exposure_time = (*(data + 12) << 8) | *(data + 13);
checksum = *(data + 14);
}
};
#pragma pack(pop)
// image pixels
bool unpack_left_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::BGR888);
CHECK_EQ(frame->format(), Format::BGR888);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = 3;
std::size_t w = frame->width();
std::size_t h = frame->height();
for (std::size_t i = 0; i < h; i++) {
for (std::size_t j = 0; j < w; j++) {
frame->data()[(i * w + j) * n] =
*(data_new + (2 * i * w + j) * n + 2);
frame->data()[(i * w + j) * n + 1] =
*(data_new + (2 * i * w + j) * n + 1);
frame->data()[(i * w + j) * n + 2] =
*(data_new + (2 * i * w + j) * n);
}
}
return true;
}
bool unpack_right_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::BGR888);
CHECK_EQ(frame->format(), Format::BGR888);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = 3;
std::size_t w = frame->width();
std::size_t h = frame->height();
for (std::size_t i = 0; i < h; i++) {
for (std::size_t j = 0; j < w; j++) {
frame->data()[(i * w + j) * n] =
*(data_new + ((2 * i + 1) * w + j) * n + 2);
frame->data()[(i * w + j) * n + 1] =
*(data_new + ((2 * i + 1) * w + j) * n + 1);
frame->data()[(i * w + j) * n + 2] =
*(data_new + ((2 * i + 1) * w + j) * n);
}
}
return true;
}
bool unpack_stereo_img_data(
const void *data, const StreamRequest &request, ImgData *img) {
CHECK_NOTNULL(img);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t data_n =
request.width * request.height * bytes_per_pixel(request.format);
auto data_end = data_new + data_n;
std::size_t packet_n = sizeof(ImagePacket);
std::vector<std::uint8_t> packet(packet_n);
std::reverse_copy(data_end - packet_n, data_end, packet.begin());
ImagePacket img_packet(packet.data());
// LOG(INFO) << "ImagePacket: header=0x" << std::hex <<
// static_cast<int>(img_packet.header)
// << ", size=0x" << std::hex << static_cast<int>(img_packet.size)
// << ", frame_id="<< std::dec << img_packet.frame_id
// << ", timestamp="<< std::dec << img_packet.timestamp
// << ", exposure_time="<< std::dec << img_packet.exposure_time
// << ", checksum=0x" << std::hex << static_cast<int>(img_packet.checksum);
if (img_packet.header != 0x3B) {
VLOG(2) << "Image packet header must be 0x3B, but 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.header) << " now";
return false;
}
std::uint8_t checksum = 0;
for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9]
checksum = (checksum ^ packet[i]);
}
/*
if (img_packet.checksum != checksum) {
VLOG(2) << "Image packet checksum should be 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.checksum) << ", but 0x"
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum)
<< " now";
return false;
}
*/
img->frame_id = img_packet.frame_id;
img->timestamp = img_packet.timestamp;
img->exposure_time = img_packet.exposure_time;
return true;
}
} // namespace
Standard2StreamsAdapter::Standard2StreamsAdapter() {
}
Standard2StreamsAdapter::~Standard2StreamsAdapter() {
}
std::vector<Stream> Standard2StreamsAdapter::GetKeyStreams() {
return {Stream::LEFT, Stream::RIGHT};
}
std::vector<Capabilities> Standard2StreamsAdapter::GetStreamCapabilities() {
return {Capabilities::STEREO_COLOR};
}
std::map<Stream, Streams::unpack_img_data_t>
Standard2StreamsAdapter::GetUnpackImgDataMap() {
return {
{Stream::LEFT, unpack_stereo_img_data},
{Stream::RIGHT, unpack_stereo_img_data}
};
}
std::map<Stream, Streams::unpack_img_pixels_t>
Standard2StreamsAdapter::GetUnpackImgPixelsMap() {
return {
{Stream::LEFT, unpack_left_img_pixels},
{Stream::RIGHT, unpack_right_img_pixels}
};
}
MYNTEYE_END_NAMESPACE

View File

@ -0,0 +1,42 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S2_H_
#define MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S2_H_
#pragma once
#include <map>
#include <memory>
#include <vector>
#include "mynteye/device/streams.h"
MYNTEYE_BEGIN_NAMESPACE
class Standard2StreamsAdapter : public StreamsAdapter {
public:
Standard2StreamsAdapter();
virtual ~Standard2StreamsAdapter();
std::vector<Stream> GetKeyStreams() override;
std::vector<Capabilities> GetStreamCapabilities() override;
std::map<Stream, Streams::unpack_img_data_t>
GetUnpackImgDataMap() override;
std::map<Stream, Streams::unpack_img_pixels_t>
GetUnpackImgPixelsMap() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S2_H_

View File

@ -15,7 +15,6 @@
#include <algorithm>
#include <chrono>
#include <iomanip>
#include <stdexcept>
#include "mynteye/logger.h"
@ -23,97 +22,11 @@
MYNTEYE_BEGIN_NAMESPACE
namespace {
bool unpack_stereo_img_data(
const void *data, const StreamRequest &request, ImgData *img) {
CHECK_NOTNULL(img);
CHECK_EQ(request.format, Format::YUYV);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t data_n =
request.width * request.height * bytes_per_pixel(request.format);
auto data_end = data_new + data_n;
std::size_t packet_n = sizeof(ImagePacket);
std::vector<std::uint8_t> packet(packet_n);
std::reverse_copy(data_end - packet_n, data_end, packet.begin());
ImagePacket img_packet(packet.data());
// LOG(INFO) << "ImagePacket: header=0x" << std::hex <<
// static_cast<int>(img_packet.header)
// << ", size=0x" << std::hex << static_cast<int>(img_packet.size)
// << ", frame_id="<< std::dec << img_packet.frame_id
// << ", timestamp="<< std::dec << img_packet.timestamp
// << ", exposure_time="<< std::dec << img_packet.exposure_time
// << ", checksum=0x" << std::hex << static_cast<int>(img_packet.checksum);
if (img_packet.header != 0x3B) {
VLOG(2) << "Image packet header must be 0x3B, but 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.header) << " now";
return false;
}
std::uint8_t checksum = 0;
for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9]
checksum = (checksum ^ packet[i]);
}
if (img_packet.checksum != checksum) {
VLOG(2) << "Image packet checksum should be 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.checksum) << ", but 0x"
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum)
<< " now";
return false;
}
img->frame_id = img_packet.frame_id;
img->timestamp = img_packet.timestamp;
img->exposure_time = img_packet.exposure_time;
return true;
}
bool unpack_left_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame->format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame->width() * frame->height();
for (std::size_t i = 0; i < n; i++) {
frame->data()[i] = *(data_new + (i * 2));
}
return true;
}
bool unpack_right_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame->format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame->width() * frame->height();
for (std::size_t i = 0; i < n; i++) {
frame->data()[i] = *(data_new + (i * 2 + 1));
}
return true;
}
} // namespace
Streams::Streams(const std::vector<Stream> key_streams)
: key_streams_(key_streams),
stream_capabilities_(
{Capabilities::STEREO, Capabilities::COLOR, Capabilities::DEPTH,
Capabilities::POINTS, Capabilities::FISHEYE, Capabilities::INFRARED,
Capabilities::INFRARED2}),
unpack_img_data_map_(
{{Stream::LEFT, unpack_stereo_img_data},
{Stream::RIGHT, unpack_stereo_img_data}}),
unpack_img_pixels_map_(
{{Stream::LEFT, unpack_left_img_pixels},
{Stream::RIGHT, unpack_right_img_pixels}}) {
Streams::Streams(const std::shared_ptr<StreamsAdapter> &adapter)
: key_streams_(std::move(adapter->GetKeyStreams())),
stream_capabilities_(std::move(adapter->GetStreamCapabilities())),
unpack_img_data_map_(std::move(adapter->GetUnpackImgDataMap())),
unpack_img_pixels_map_(std::move(adapter->GetUnpackImgPixelsMap())) {
VLOG(2) << __func__;
}
@ -139,16 +52,17 @@ bool Streams::PushStream(const Capabilities &capability, const void *data) {
auto &&request = GetStreamConfigRequest(capability);
bool pushed = false;
switch (capability) {
case Capabilities::STEREO: {
case Capabilities::STEREO:
case Capabilities::STEREO_COLOR: {
// alloc left
AllocStreamData(Stream::LEFT, request, Format::GREY);
AllocStreamData(capability, Stream::LEFT, request);
auto &&left_data = stream_datas_map_[Stream::LEFT].back();
// unpack img data
if (unpack_img_data_map_[Stream::LEFT](
data, request, left_data.img.get())) {
left_data.frame_id = left_data.img->frame_id;
// alloc right
AllocStreamData(Stream::RIGHT, request, Format::GREY);
AllocStreamData(capability, Stream::RIGHT, request);
auto &&right_data = stream_datas_map_[Stream::RIGHT].back();
*right_data.img = *left_data.img;
right_data.frame_id = left_data.img->frame_id;
@ -252,12 +166,16 @@ bool Streams::HasStreamDatas(const Stream &stream) const {
!stream_datas_map_.at(stream).empty();
}
void Streams::AllocStreamData(
void Streams::AllocStreamData(const Capabilities &capability,
const Stream &stream, const StreamRequest &request) {
AllocStreamData(stream, request, request.format);
auto format = request.format;
if (capability == Capabilities::STEREO) {
format = Format::GREY;
}
AllocStreamData(capability, stream, request, format);
}
void Streams::AllocStreamData(
void Streams::AllocStreamData(const Capabilities &capability,
const Stream &stream, const StreamRequest &request, const Format &format) {
stream_data_t data;
@ -282,8 +200,12 @@ void Streams::AllocStreamData(
data.img = nullptr;
}
if (!data.frame) {
data.frame = std::make_shared<frame_t>(
request.width, request.height, format, nullptr);
auto width = request.width;
if (capability == Capabilities::STEREO_COLOR) {
width /= 2; // split to half
}
data.frame =
std::make_shared<frame_t>(width, request.height, format, nullptr);
}
data.frame_id = 0;
stream_datas_map_[stream].push_back(data);

View File

@ -18,6 +18,7 @@
#include <condition_variable>
#include <functional>
#include <map>
#include <memory>
#include <mutex>
#include <vector>
@ -27,6 +28,8 @@
MYNTEYE_BEGIN_NAMESPACE
class StreamsAdapter;
class Streams {
public:
using frame_t = device::Frame;
@ -38,7 +41,7 @@ class Streams {
using unpack_img_pixels_t = std::function<bool(
const void *data, const StreamRequest &request, frame_t *frame)>;
explicit Streams(const std::vector<Stream> key_streams);
explicit Streams(const std::shared_ptr<StreamsAdapter> &adapter);
~Streams();
void ConfigStream(
@ -65,8 +68,9 @@ class Streams {
bool HasStreamDatas(const Stream &stream) const;
void AllocStreamData(const Stream &stream, const StreamRequest &request);
void AllocStreamData(
void AllocStreamData(const Capabilities &capability,
const Stream &stream, const StreamRequest &request);
void AllocStreamData(const Capabilities &capability,
const Stream &stream, const StreamRequest &request, const Format &format);
void DiscardStreamData(const Stream &stream);
@ -88,6 +92,19 @@ class Streams {
std::condition_variable cv_;
};
class StreamsAdapter {
public:
virtual ~StreamsAdapter() {}
virtual std::vector<Stream> GetKeyStreams() = 0;
virtual std::vector<Capabilities> GetStreamCapabilities() = 0;
virtual std::map<Stream, Streams::unpack_img_data_t>
GetUnpackImgDataMap() = 0;
virtual std::map<Stream, Streams::unpack_img_pixels_t>
GetUnpackImgPixelsMap() = 0;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STREAMS_H_

View File

@ -16,7 +16,6 @@
#pragma once
#include <cstdint>
#include <array>
#include <bitset>
#include <string>
@ -139,36 +138,6 @@ struct MYNTEYE_API DeviceInfo {
std::uint16_t nominal_baseline;
};
/**
* @ingroup datatypes
* Image packet.
*/
#pragma pack(push, 1)
struct ImagePacket {
std::uint8_t header;
std::uint8_t size;
std::uint16_t frame_id;
std::uint32_t timestamp;
std::uint16_t exposure_time;
std::uint8_t checksum;
ImagePacket() = default;
explicit ImagePacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
header = *data;
size = *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
timestamp = (*(data + 4) << 24) | (*(data + 5) << 16) | (*(data + 6) << 8) |
*(data + 7);
exposure_time = (*(data + 8) << 8) | *(data + 9);
checksum = *(data + 10);
}
};
#pragma pack(pop)
/**
* @ingroup datatypes
* Imu request packet.
@ -199,28 +168,12 @@ struct ImuReqPacket {
*/
#pragma pack(push, 1)
struct ImuSegment {
std::int16_t offset;
std::uint16_t frame_id;
std::int16_t accel[3];
std::uint32_t frame_id;
std::uint64_t timestamp;
std::uint8_t flag;
std::int16_t temperature;
std::int16_t accel[3];
std::int16_t gyro[3];
ImuSegment() = default;
explicit ImuSegment(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
offset = (*(data) << 8) | *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
accel[0] = (*(data + 4) << 8) | *(data + 5);
accel[1] = (*(data + 6) << 8) | *(data + 7);
accel[2] = (*(data + 8) << 8) | *(data + 9);
temperature = (*(data + 10) << 8) | *(data + 11);
gyro[0] = (*(data + 12) << 8) | *(data + 13);
gyro[1] = (*(data + 14) << 8) | *(data + 15);
gyro[2] = (*(data + 16) << 8) | *(data + 17);
}
};
#pragma pack(pop)
@ -230,28 +183,10 @@ struct ImuSegment {
*/
#pragma pack(push, 1)
struct ImuPacket {
std::uint32_t serial_number;
std::uint32_t timestamp;
std::uint8_t version;
std::uint8_t count;
std::uint32_t serial_number;
std::vector<ImuSegment> segments;
ImuPacket() = default;
explicit ImuPacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
serial_number = (*(data) << 24) | (*(data + 1) << 16) | (*(data + 2) << 8) |
*(data + 3);
timestamp = (*(data + 4) << 24) | (*(data + 5) << 16) | (*(data + 6) << 8) |
*(data + 7);
count = *(data + 8);
std::size_t seg_n = sizeof(ImuSegment); // 18
for (std::size_t i = 0; i < count; i++) {
segments.push_back(ImuSegment(data + 9 + (seg_n * i)));
}
}
};
#pragma pack(pop)
@ -261,31 +196,12 @@ struct ImuPacket {
*/
#pragma pack(push, 1)
struct ImuResPacket {
std::uint8_t version;
std::uint8_t header;
std::uint8_t state;
std::uint16_t size;
std::vector<ImuPacket> packets;
std::uint8_t checksum;
ImuResPacket() = default;
explicit ImuResPacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
header = *data;
state = *(data + 1);
size = (*(data + 2) << 8) | *(data + 3);
std::size_t seg_n = sizeof(ImuSegment); // 18
for (std::size_t i = 4; i < size;) {
ImuPacket packet(data + i);
packets.push_back(packet);
i += 9 + (packet.count * seg_n);
}
checksum = *(data + 4 + size);
}
};
#pragma pack(pop)

View File

@ -20,6 +20,8 @@
#include "mynteye/device/context.h"
#include "mynteye/device/device.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
namespace device {
@ -29,14 +31,14 @@ std::shared_ptr<Device> select() {
Context context;
auto &&devices = context.devices();
size_t n = devices.size();
std::size_t n = devices.size();
if (n <= 0) {
LOG(ERROR) << "No MYNT EYE devices :(";
return nullptr;
}
LOG(INFO) << "MYNT EYE devices:";
for (size_t i = 0; i < n; i++) {
for (std::size_t i = 0; i < n; i++) {
auto &&device = devices[i];
LOG(INFO) << " index: " << i
<< ", name: " << device->GetInfo(Info::DEVICE_NAME)
@ -49,7 +51,7 @@ std::shared_ptr<Device> select() {
LOG(INFO) << "Only one MYNT EYE device, select index: 0";
} else {
while (true) {
size_t i;
std::size_t i;
LOG(INFO) << "There are " << n << " MYNT EYE devices, select index: ";
std::cin >> i;
if (i >= n) {
@ -64,6 +66,42 @@ std::shared_ptr<Device> select() {
return device;
}
MYNTEYE_NAMESPACE::StreamRequest select_request(
const std::shared_ptr<Device> &device, bool *ok) {
auto &&requests = device->GetStreamRequests();
std::size_t n = requests.size();
if (n <= 0) {
LOG(ERROR) << "No MYNT EYE devices :(";
*ok = false;
return {};
}
LOG(INFO) << "MYNT EYE devices:";
for (std::size_t i = 0; i < n; i++) {
auto &&request = requests[i];
LOG(INFO) << " index: " << i
<< ", request: " << request;
}
if (n <= 1) {
LOG(INFO) << "Only one stream request, select index: 0";
*ok = true;
return requests[0];
} else {
while (true) {
std::size_t i;
LOG(INFO) << "There are " << n << " stream requests, select index: ";
std::cin >> i;
if (i >= n) {
LOG(WARNING) << "Index out of range :(";
continue;
}
*ok = true;
return requests[i];
}
}
}
} // namespace device
namespace utils {

View File

@ -29,6 +29,7 @@ const char *to_string(const Model &value) {
return "Model::" #X;
switch (value) {
CASE(STANDARD)
CASE(STANDARD2)
default:
CHECK(is_valid(value));
return "Model::UNKNOWN";
@ -62,6 +63,7 @@ const char *to_string(const Capabilities &value) {
return "Capabilities::" #X;
switch (value) {
CASE(STEREO)
CASE(STEREO_COLOR)
CASE(COLOR)
CASE(DEPTH)
CASE(POINTS)
@ -109,13 +111,16 @@ const char *to_string(const Option &value) {
CASE(EXPOSURE_MODE)
CASE(MAX_GAIN)
CASE(MAX_EXPOSURE_TIME)
CASE(MIN_EXPOSURE_TIME)
CASE(DESIRED_BRIGHTNESS)
CASE(IR_CONTROL)
CASE(HDR_MODE)
CASE(ZERO_DRIFT_CALIBRATION)
CASE(ERASE_CHIP)
CASE(ACCELEROMETER_RANGE)
CASE(GYROSCOPE_RANGE)
CASE(ACCELEROMETER_LOW_PASS_FILTER)
CASE(GYROSCOPE_LOW_PASS_FILTER)
CASE(ZERO_DRIFT_CALIBRATION)
CASE(ERASE_CHIP)
default:
CHECK(is_valid(value));
return "Option::UNKNOWN";
@ -157,6 +162,7 @@ const char *to_string(const Format &value) {
switch (value) {
CASE(GREY)
CASE(YUYV)
CASE(BGR888)
default:
return "Format::UNKNOWN";
}
@ -169,6 +175,8 @@ std::size_t bytes_per_pixel(const Format &value) {
return 1;
case Format::YUYV:
return 2;
case Format::BGR888:
return 3;
default:
LOG(FATAL) << "Unknown format";
}

View File

@ -46,9 +46,10 @@ namespace uvc {
} while (0)
#define NO_DATA_MAX_COUNT 200
#define LIVING_MAX_COUNT 9000
int no_data_count = 0;
int living_count = 0;
/*
class device_error : public std::exception {
public:
@ -394,6 +395,12 @@ struct device {
if (xioctl(fd, VIDIOC_QBUF, &buf) < 0)
throw_error("VIDIOC_QBUF");
});
if (living_count < LIVING_MAX_COUNT) {
living_count++;
} else {
living_count = 0;
// LOG(INFO) << "UVC pulse detection,Please ignore.";
}
}
no_data_count = 0;
@ -402,7 +409,12 @@ struct device {
}
if (no_data_count > NO_DATA_MAX_COUNT) {
throw_error("v4l2 get stream time out!");
no_data_count = 0;
living_count = 0;
LOG(WARNING) << __func__
<< " failed: v4l2 get stream time out, Try to reboot!";
stop_capture();
start_capture();
}
}

View File

@ -64,7 +64,8 @@ namespace uvc {
const std::map<uint32_t, uint32_t> fourcc_map = {
{ 0x56595559, 0x32595559 }, // 'VYUY' => '2YUY'
{ 0x59555956, 0x59555932 } // 'YUYV' => 'YUY2'
{ 0x59555956, 0x59555932 }, // 'YUYV' => 'YUY2'
{ 0x33524742, 0x14 }
};
struct throw_error {
@ -756,6 +757,7 @@ void set_device_mode(device &device, int width, int height, int fourcc, int fps,
check("IMFMediaType::GetGUID", media_type->GetGUID(MF_MT_SUBTYPE, &subtype));
if (subtype.Data1 != fourcc) continue;
check("MFSetAttributeRatio", MFSetAttributeRatio(media_type, MF_MT_FRAME_RATE, fps, 1));
check("MFGetAttributeRatio", MFGetAttributeRatio(media_type, MF_MT_FRAME_RATE, &uvc_fps_num, &uvc_fps_denom));
if (uvc_fps_denom == 0) continue;
//int uvc_fps = uvc_fps_num / uvc_fps_denom;

View File

@ -97,10 +97,10 @@ class BinDataset(object):
if What.imu in result:
imu = result[What.imu]
np.array([(
imu.timestamp,
imu.timestamp, imu.flag,
imu.accel_x, imu.accel_y, imu.accel_z,
imu.gyro_x, imu.gyro_y, imu.gyro_z
)], dtype="f8, f8, f8, f8, f8, f8, f8").tofile(f_imu)
)], dtype="f8, i4, f8, f8, f8, f8, f8, f8").tofile(f_imu)
imu_count = imu_count + 1
has_imu = True
sys.stdout.write('\r img: {}, imu: {}'.format(img_count, imu_count))
@ -130,7 +130,7 @@ class BinDataset(object):
if self.has_imu:
imus = np.memmap(self._binimu, dtype=[
('t', 'f8'),
('t', 'f8'), ('flag', 'i4'),
('accel_x', 'f8'), ('accel_y', 'f8'), ('accel_z', 'f8'),
('gyro_x', 'f8'), ('gyro_y', 'f8'), ('gyro_z', 'f8'),
], mode='r')
@ -145,92 +145,111 @@ class BinDataset(object):
print(' img: {}, imu: {}'.format(period_img, period_imu))
imgs_t_diff = np.diff(imgs['t'])
imus_t_diff = np.diff(imus['t'])
# imus_t_diff = np.diff(imus['t'])
accel = imus[imus['flag'] == 1]
accel_t_diff = np.diff(accel['t'])
gyro = imus[imus['flag'] == 2]
gyro_t_diff = np.diff(gyro['t'])
print('\ncount')
print(' imgs: {}, imus: {}, accel: {}, gyro: {}'.format(
imgs.size, imus.size, accel.size, gyro.size))
print('\ndiff count')
print(' imgs: {}, imus: {}'.format(imgs['t'].size, imus['t'].size))
print(' imgs_t_diff: {}, imus_t_diff: {}'
.format(imgs_t_diff.size, imus_t_diff.size))
print(' imgs_t_diff: {}, accel_t_diff: {}, gyro_t_diff: {}'.format(
imgs_t_diff.size, accel_t_diff.size, gyro_t_diff.size))
print('\ndiff where (factor={})'.format(args.factor))
where = np.argwhere(imgs_t_diff > period_img * (1 + args.factor))
print(' imgs where diff > {}*{} ({})'.format(period_img,
1 + args.factor, where.size))
for x in where:
print(' {:8d}: {:.16f}'.format(x[0], imgs_t_diff[x][0]))
where = np.argwhere(imgs_t_diff < period_img * (1 - args.factor))
print(' imgs where diff < {}*{} ({})'.format(period_img,
1 - args.factor, where.size))
for x in where:
print(' {:8d}: {:.16f}'.format(x[0], imgs_t_diff[x][0]))
where = np.argwhere(imus_t_diff > period_imu * (1 + args.factor))
print(' imus where diff > {}*{} ({})'.format(period_imu,
1 + args.factor, where.size))
for x in where:
print(' {:8d}: {:.16f}'.format(x[0], imus_t_diff[x][0]))
where = np.argwhere(imus_t_diff < period_imu * (1 - args.factor))
print(' imus where diff < {}*{} ({})'.format(period_imu,
1 - args.factor, where.size))
for x in where:
print(' {:8d}: {:.16f}'.format(x[0], imus_t_diff[x][0]))
self._print_t_diff_where('imgs', imgs_t_diff, period_img, args.factor)
# self._print_t_diff_where('imus', imus_t_diff, period_imu, args.factor)
self._print_t_diff_where('accel', accel_t_diff, period_imu, args.factor)
self._print_t_diff_where('gyro', gyro_t_diff, period_imu, args.factor)
import pandas as pd
bins = imgs['t']
bins_n = imgs['t'].size
bins = pd.Series(data=bins).drop_duplicates(keep='first')
cats = pd.cut(imus['t'], bins)
print('\nimage timestamp duplicates: {}'.format(bins_n - bins.size))
self._plot(outdir, imgs_t_diff, imus_t_diff, cats.value_counts())
def _cut_by_imgs_t(imus_t):
cats = pd.cut(imus_t, bins)
return cats.value_counts()
def _plot(self, outdir, imgs_t_diff, imus_t_diff, imgs_t_imus):
self._plot(
outdir,
imgs_t_diff,
accel_t_diff,
_cut_by_imgs_t(
accel['t']),
gyro_t_diff,
_cut_by_imgs_t(
gyro['t']))
def _print_t_diff_where(self, name, t_diff, period, factor):
import numpy as np
where = np.argwhere(t_diff > period * (1 + factor))
print(' {} where diff > {}*{} ({})'.format(
name, period, 1 + factor, where.size))
for x in where:
print(' {:8d}: {:.16f}'.format(x[0], t_diff[x][0]))
where = np.argwhere(t_diff < period * (1 - factor))
print(' {} where diff < {}*{} ({})'.format(
name, period, 1 - factor, where.size))
for x in where:
print(' {:8d}: {:.16f}'.format(x[0], t_diff[x][0]))
def _plot(self, outdir, imgs_t_diff,
accel_t_diff, accel_counts, gyro_t_diff, gyro_counts):
import matplotlib.pyplot as plt
import numpy as np
fig_1 = plt.figure(1, [16, 6])
fig_1 = plt.figure(1, [16, 12])
fig_1.suptitle('Stamp Analytics')
fig_1.subplots_adjust(
left=0.1,
right=0.95,
top=0.85,
bottom=0.15,
wspace=0.4)
wspace=0.4,
hspace=0.4)
ax_imgs_t_diff = fig_1.add_subplot(131)
ax_imgs_t_diff = fig_1.add_subplot(231)
ax_imgs_t_diff.set_title('Image Timestamp Diff')
ax_imgs_t_diff.set_xlabel('diff index')
ax_imgs_t_diff.set_ylabel('diff (s)')
ax_imgs_t_diff.axis('auto')
ax_imus_t_diff = fig_1.add_subplot(132)
ax_imus_t_diff.set_title('Imu Timestamp Diff')
ax_imus_t_diff.set_xlabel('diff index')
ax_imus_t_diff.set_ylabel('diff (s)')
ax_imus_t_diff.axis('auto')
ax_imgs_t_imus = fig_1.add_subplot(133)
ax_imgs_t_imus.set_title('Imu Count Per Image Intervel')
ax_imgs_t_imus.set_xlabel('intervel index')
ax_imgs_t_imus.set_ylabel('imu count')
ax_imgs_t_imus.axis('auto')
ax_imgs_t_diff.set_xlim([0, imgs_t_diff.size])
ax_imgs_t_diff.plot(imgs_t_diff)
ax_imus_t_diff.set_xlim([0, imus_t_diff.size])
ax_imus_t_diff.plot(imus_t_diff)
def _plot_imus(name, t_diff, counts, pos_offset=0):
ax_imus_t_diff = fig_1.add_subplot(232 + pos_offset)
ax_imus_t_diff.set_title('{} Timestamp Diff'.format(name))
ax_imus_t_diff.set_xlabel('diff index')
ax_imus_t_diff.set_ylabel('diff (s)')
ax_imus_t_diff.axis('auto')
# print(imgs_t_imus.values)
# imgs_t_imus.plot(kind='line', ax=ax_imgs_t_imus)
data = imgs_t_imus.values
ax_imgs_t_imus.set_xlim([0, data.size])
ax_imgs_t_imus.set_ylim([np.min(data) - 1, np.max(data) + 1])
ax_imgs_t_imus.plot(data)
ax_imus_t_diff.set_xlim([0, t_diff.size - 1])
ax_imus_t_diff.plot(t_diff)
ax_imus_counts = fig_1.add_subplot(233 + pos_offset)
ax_imus_counts.set_title('{} Count Per Image Intervel'.format(name))
ax_imus_counts.set_xlabel('intervel index')
ax_imus_counts.set_ylabel('imu count')
ax_imus_counts.axis('auto')
# print(counts.values)
# counts.plot(kind='line', ax=ax_imus_counts)
data = counts.values
ax_imus_counts.set_xlim([0, data.size])
ax_imus_counts.set_ylim([np.min(data) - 1, np.max(data) + 1])
ax_imus_counts.plot(data)
_plot_imus('Accel', accel_t_diff, accel_counts)
_plot_imus('Gyro', gyro_t_diff, gyro_counts, 3)
if outdir:
figpath = os.path.join(outdir, RESULT_FIGURE)
@ -288,14 +307,14 @@ def _parse_args():
'--rate-img',
dest='rate_img',
metavar='RATE',
default=25,
default=60,
type=int,
help='the img rate (default: %(default)s)')
parser.add_argument(
'--rate-imu',
dest='rate_imu',
metavar='RATE',
default=500,
default=200,
type=int,
help='the imu rate (default: %(default)s)')
return parser.parse_args()

View File

@ -20,13 +20,17 @@ set_outdir(
"${OUT_DIR}/bin/${DIR_NAME}"
)
include_directories(
${PRO_DIR}/src
)
## record
make_executable(record
SRCS record.cc dataset.cc
LINK_LIBS mynteye ${OpenCV_LIBS}
DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
)
# make_executable(record
# SRCS record.cc dataset.cc
# LINK_LIBS mynteye ${OpenCV_LIBS}
# DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
#)
make_executable(record2
SRCS record2.cc dataset.cc

View File

@ -13,11 +13,8 @@
// limitations under the License.
#include "dataset/dataset.h"
#ifdef WITH_OPENCV2
#include <opencv2/highgui/highgui.hpp>
#else
#include <opencv2/imgcodecs/imgcodecs.hpp>
#endif
#include <opencv2/imgproc/imgproc.hpp>
#include <iomanip>
#include <limits>
@ -68,23 +65,52 @@ void Dataset::SaveStreamData(
std::stringstream ss;
ss << writer->outdir << MYNTEYE_OS_SEP << std::dec
<< std::setw(IMAGE_FILENAME_WIDTH) << std::setfill('0') << seq << ".png";
cv::Mat img(
data.frame->height(), data.frame->width(), CV_8UC1, data.frame->data());
cv::imwrite(ss.str(), img);
if (data.frame->format() == Format::GREY) {
cv::Mat img(
data.frame->height(), data.frame->width(), CV_8UC1,
data.frame->data());
cv::imwrite(ss.str(), img);
} else if (data.frame->format() == Format::YUYV) {
cv::Mat img(
data.frame->height(), data.frame->width(), CV_8UC2,
data.frame->data());
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
cv::imwrite(ss.str(), img);
} else if (data.frame->format() == Format::BGR888) {
cv::Mat img(
data.frame->height(), data.frame->width(), CV_8UC3,
data.frame->data());
cv::imwrite(ss.str(), img);
} else {
cv::Mat img(
data.frame->height(), data.frame->width(), CV_8UC1,
data.frame->data());
cv::imwrite(ss.str(), img);
}
}
++stream_counts_[stream];
}
void Dataset::SaveMotionData(const device::MotionData &data) {
auto &&writer = GetMotionWriter();
// auto seq = data.imu->serial_number;
auto seq = motion_count_;
writer->ofs << seq << ", " << data.imu->frame_id << ", "
<< data.imu->timestamp << ", " << data.imu->accel[0] << ", "
<< data.imu->accel[1] << ", " << data.imu->accel[2] << ", "
<< data.imu->gyro[0] << ", " << data.imu->gyro[1] << ", "
<< data.imu->gyro[2] << ", " << data.imu->temperature
<< std::endl;
++motion_count_;
if (data.imu->flag == 1 || data.imu->flag == 2) {
writer->ofs << seq << ", " << static_cast<int>(data.imu->flag) << ", "
<< data.imu->timestamp << ", " << data.imu->accel[0] << ", "
<< data.imu->accel[1] << ", " << data.imu->accel[2] << ", "
<< data.imu->gyro[0] << ", " << data.imu->gyro[1] << ", "
<< data.imu->gyro[2] << ", " << data.imu->temperature
<< std::endl;
++motion_count_;
}
/*
if(motion_count_ != seq) {
LOG(INFO) << "motion_count_ != seq !" << " motion_count_: " << motion_count_
<< " seq: " << seq;
motion_count_ = seq;
}
*/
}
void Dataset::SaveStreamData(
@ -105,14 +131,24 @@ void Dataset::SaveStreamData(
void Dataset::SaveMotionData(const api::MotionData &data) {
auto &&writer = GetMotionWriter();
// auto seq = data.imu->serial_number;
auto seq = motion_count_;
writer->ofs << seq << ", " << data.imu->frame_id << ", "
<< data.imu->timestamp << ", " << data.imu->accel[0] << ", "
<< data.imu->accel[1] << ", " << data.imu->accel[2] << ", "
<< data.imu->gyro[0] << ", " << data.imu->gyro[1] << ", "
<< data.imu->gyro[2] << ", " << data.imu->temperature
<< std::endl;
++motion_count_;
if (data.imu->flag == 1 || data.imu->flag == 2) {
writer->ofs << seq << ", " << static_cast<int>(data.imu->flag) << ", "
<< data.imu->timestamp << ", " << data.imu->accel[0] << ", "
<< data.imu->accel[1] << ", " << data.imu->accel[2] << ", "
<< data.imu->gyro[0] << ", " << data.imu->gyro[1] << ", "
<< data.imu->gyro[2] << ", " << data.imu->temperature
<< std::endl;
++motion_count_;
}
/*
if(motion_count_ != seq) {
LOG(INFO) << "motion_count_ != seq !" << " motion_count_: " << motion_count_
<< " seq: " << seq;
motion_count_ = seq;
}
*/
}
Dataset::writer_t Dataset::GetStreamWriter(const Stream &stream) {
@ -151,13 +187,15 @@ Dataset::writer_t Dataset::GetMotionWriter() {
files::mkdir(writer->outdir);
writer->ofs.open(writer->outfile, std::ofstream::out);
writer->ofs << "seq, frame_id, timestamp, accel_x, accel_y, accel_z, "
writer->ofs << "seq, flag, timestamp, accel_x, accel_y, accel_z, "
"gyro_x, gyro_y, gyro_z, temperature"
<< std::endl;
writer->ofs << FULL_PRECISION;
motion_writer_ = writer;
motion_count_ = 0;
accel_count_ = 0;
gyro_count_ = 0;
}
return motion_writer_;
}

View File

@ -58,6 +58,8 @@ class Dataset {
std::map<Stream, std::size_t> stream_counts_;
std::size_t motion_count_;
std::size_t accel_count_;
std::size_t gyro_count_;
};
} // namespace tools

View File

@ -24,25 +24,13 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api)
return 1;
/*
{ // auto-exposure
api->SetOptionValue(Option::EXPOSURE_MODE, 0);
api->SetOptionValue(Option::MAX_GAIN, 40); // [0.48]
api->SetOptionValue(Option::MAX_EXPOSURE_TIME, 120); // [0,240]
api->SetOptionValue(Option::DESIRED_BRIGHTNESS, 200); // [0,255]
}
{ // manual-exposure
api->SetOptionValue(Option::EXPOSURE_MODE, 1);
api->SetOptionValue(Option::GAIN, 20); // [0.48]
api->SetOptionValue(Option::BRIGHTNESS, 20); // [0,240]
api->SetOptionValue(Option::CONTRAST, 20); // [0,255]
}
api->SetOptionValue(Option::IR_CONTROL, 80);
api->SetOptionValue(Option::FRAME_RATE, 25);
api->SetOptionValue(Option::IMU_FREQUENCY, 500);
*/
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->LogOptionInfos();
// Enable this will cache the motion datas until you get them.
@ -72,14 +60,42 @@ int main(int argc, char *argv[]) {
auto &&motion_datas = api->GetMotionDatas();
imu_count += motion_datas.size();
auto &&left_img = left_datas.back().frame;
auto &&right_img = right_datas.back().frame;
auto &&left_frame = left_datas.back().frame;
auto &&right_frame = right_datas.back().frame;
cv::Mat img;
cv::hconcat(left_img, right_img, img);
cv::imshow("frame", img);
{ // save
if (left_frame->format() == Format::GREY) {
cv::Mat left_img(
left_frame->height(), left_frame->width(), CV_8UC1,
left_frame->data());
cv::Mat right_img(
right_frame->height(), right_frame->width(), CV_8UC1,
right_frame->data());
cv::hconcat(left_img, right_img, img);
} else if (left_frame->format() == Format::YUYV) {
cv::Mat left_img(
left_frame->height(), left_frame->width(), CV_8UC2,
left_frame->data());
cv::Mat right_img(
right_frame->height(), right_frame->width(), CV_8UC2,
right_frame->data());
cv::cvtColor(left_img, left_img, cv::COLOR_YUV2BGR_YUY2);
cv::cvtColor(right_img, right_img, cv::COLOR_YUV2BGR_YUY2);
cv::hconcat(left_img, right_img, img);
} else if (left_frame->format() == Format::BGR888) {
cv::Mat left_img(
left_frame->height(), left_frame->width(), CV_8UC3,
left_frame->data());
cv::Mat right_img(
right_frame->height(), right_frame->width(), CV_8UC3,
right_frame->data());
cv::hconcat(left_img, right_img, img);
} else {
return -1;
}
cv::imshow("frame", img);
if (img_count > 10 && imu_count > 50) { // save
for (auto &&left : left_datas) {
dataset.SaveStreamData(Stream::LEFT, left);
}

View File

@ -27,25 +27,13 @@ int main(int argc, char *argv[]) {
glog_init _(argc, argv);
auto &&device = device::select();
if (!device)
return 1;
/*
{ // auto-exposure
device->SetOptionValue(Option::EXPOSURE_MODE, 0);
device->SetOptionValue(Option::MAX_GAIN, 40); // [0.48]
device->SetOptionValue(Option::MAX_EXPOSURE_TIME, 120); // [0,240]
device->SetOptionValue(Option::DESIRED_BRIGHTNESS, 200); // [0,255]
}
{ // manual-exposure
device->SetOptionValue(Option::EXPOSURE_MODE, 1);
device->SetOptionValue(Option::GAIN, 20); // [0.48]
device->SetOptionValue(Option::BRIGHTNESS, 20); // [0,240]
device->SetOptionValue(Option::CONTRAST, 20); // [0,255]
}
device->SetOptionValue(Option::IR_CONTROL, 80);
device->SetOptionValue(Option::FRAME_RATE, 25);
device->SetOptionValue(Option::IMU_FREQUENCY, 500);
*/
if (!device) return 1;
bool ok;
auto &&request = device::select_request(device, &ok);
if (!ok) return 1;
device->ConfigStreamRequest(request);
device->LogOptionInfos();
// Enable this will cache the motion datas until you get them.
@ -77,17 +65,40 @@ int main(int argc, char *argv[]) {
auto &&left_frame = left_datas.back().frame;
auto &&right_frame = right_datas.back().frame;
cv::Mat left_img(
left_frame->height(), left_frame->width(), CV_8UC1, left_frame->data());
cv::Mat right_img(
right_frame->height(), right_frame->width(), CV_8UC1,
right_frame->data());
cv::Mat img;
cv::hconcat(left_img, right_img, img);
cv::imshow("frame", img);
{ // save
if (left_frame->format() == Format::GREY) {
cv::Mat left_img(
left_frame->height(), left_frame->width(), CV_8UC1,
left_frame->data());
cv::Mat right_img(
right_frame->height(), right_frame->width(), CV_8UC1,
right_frame->data());
cv::hconcat(left_img, right_img, img);
} else if (left_frame->format() == Format::YUYV) {
cv::Mat left_img(
left_frame->height(), left_frame->width(), CV_8UC2,
left_frame->data());
cv::Mat right_img(
right_frame->height(), right_frame->width(), CV_8UC2,
right_frame->data());
cv::cvtColor(left_img, left_img, cv::COLOR_YUV2BGR_YUY2);
cv::cvtColor(right_img, right_img, cv::COLOR_YUV2BGR_YUY2);
cv::hconcat(left_img, right_img, img);
} else if (left_frame->format() == Format::BGR888) {
cv::Mat left_img(
left_frame->height(), left_frame->width(), CV_8UC3,
left_frame->data());
cv::Mat right_img(
right_frame->height(), right_frame->width(), CV_8UC3,
right_frame->data());
cv::hconcat(left_img, right_img, img);
} else {
return -1;
}
cv::imshow("frame", img);
if (img_count > 10 && imu_count > 50) { // save
for (auto &&left : left_datas) {
dataset.SaveStreamData(Stream::LEFT, left);
}
@ -123,4 +134,4 @@ int main(int argc, char *argv[]) {
LOG(INFO) << "Imu count: " << imu_count
<< ", hz: " << (1000.f * imu_count / elapsed_ms);
return 0;
}
}

View File

@ -81,6 +81,7 @@ class IMU(Data):
def __init__(self):
super(IMU, self).__init__()
self._flag = 0
self._accel_x = 0
self._accel_y = 0
self._accel_z = 0
@ -88,6 +89,14 @@ class IMU(Data):
self._gyro_y = 0
self._gyro_z = 0
@property
def flag(self):
return self._flag
@flag.setter
def flag(self, flag):
self._flag = flag
@property
def accel(self):
return self._accel_x, self._accel_y, self._accel_z
@ -339,9 +348,9 @@ class MYNTEYE(Dataset):
if index == -1:
sys.exit('Error: Dataset is unexpected format, timestamp not found')
# unit from 0.01ms to 1s
info.timebeg = float(first.split(',')[index].strip()) * 0.00001
info.timeend = float(last.split(',')[index].strip()) * 0.00001
# unit from 1us to 1s
info.timebeg = float(first.split(',')[index].strip()) * 0.000001
info.timeend = float(last.split(',')[index].strip()) * 0.000001
# print('time: [{}, {}]'.format(info.timebeg, info.timeend))
return info
@ -364,7 +373,7 @@ class MYNTEYE(Dataset):
for line in f:
values = [_.strip() for _ in line.split(',')]
img = Image()
img.timestamp = float(values[fields['timestamp']]) * 0.00001
img.timestamp = float(values[fields['timestamp']]) * 0.000001
yield {What.img_left: img}
if hit_img_right and self._info.has_img_right:
with open(self._info.img_right_txt) as f:
@ -372,7 +381,7 @@ class MYNTEYE(Dataset):
for line in f:
values = [_.strip() for _ in line.split(',')]
img = Image()
img.timestamp = float(values[fields['timestamp']]) * 0.00001
img.timestamp = float(values[fields['timestamp']]) * 0.000001
yield {What.img_right: img}
if (hit_imu or hit_temp) and self._info.has_imu:
with open(self._info.imu_txt) as f:
@ -380,7 +389,8 @@ class MYNTEYE(Dataset):
for line in f:
values = [_.strip() for _ in line.split(',')]
imu = IMU()
imu.timestamp = float(values[fields['timestamp']]) * 0.00001
imu.timestamp = float(values[fields['timestamp']]) * 0.000001
imu.flag = values[fields['flag']]
imu.accel_x = float(values[fields['accel_x']])
imu.accel_y = float(values[fields['accel_y']])
imu.accel_z = float(values[fields['accel_z']])

View File

@ -1,10 +1,10 @@
%YAML:1.0
---
device_name: MYNT-EYE-S1000
device_name: MYNT-EYE-S210A
serial_number: "0386322C0009070E"
firmware_version: "2.2"
hardware_version: "2.0"
spec_version: "1.0"
firmware_version: "1.0"
hardware_version: "1.0"
spec_version: "1.1"
lens_type: "0000"
imu_type: "0000"
nominal_baseline: 120

View File

@ -1,32 +1,53 @@
%YAML:1.0
---
in_left:
version: "1.1"
in_left_map:
-
width: 752
height: 480
fx: 3.6220059643202876e+02
fy: 3.6350065250745848e+02
cx: 4.0658699068023441e+02
cy: 2.3435161110061483e+02
width: 640
height: 400
fx: 1.9739641213416058e+02
fy: 1.9772337597617189e+02
cx: 3.2611983633916327e+02
cy: 1.9986969132833946e+02
model: 0
coeffs: [ -2.5034765682756088e-01, 5.0579399202897619e-02,
-7.0536676161976066e-04, -8.5255451307033846e-03, 0. ]
in_right:
coeffs: [ 1.2135236310725651e-01, -8.5442776049177036e-02,
2.4914898631983504e-03, -3.7752063658256863e-03, 0. ]
-
width: 752
height: 480
fx: 3.6514014888558478e+02
fy: 3.6513385298966961e+02
cx: 3.8932395100630907e+02
cy: 2.3495160212312547e+02
width: 1280
height: 800
fx: 1.9739641213416058e+02
fy: 1.9772337597617189e+02
cx: 3.2611983633916327e+02
cy: 1.9986969132833946e+02
model: 0
coeffs: [ -3.0377346762098512e-01, 7.9929693673999838e-02,
5.1547517530716883e-05, -6.7345903740579250e-04, 0. ]
coeffs: [ 1.2135236310725651e-01, -8.5442776049177036e-02,
2.4914898631983504e-03, -3.7752063658256863e-03, 0. ]
in_right_map:
-
width: 640
height: 400
fx: 2.0335498653655989e+02
fy: 2.0453858622699008e+02
cx: 3.1589962248180814e+02
cy: 2.1871688038954812e+02
model: 0
coeffs: [ 2.2904330559241560e-02, -2.9561990079971841e-02,
3.9725942760981507e-03, -3.9689073214945591e-03, 0. ]
-
width: 1280
height: 800
fx: 2.0335498653655989e+02
fy: 2.0453858622699008e+02
cx: 3.1589962248180814e+02
cy: 2.1871688038954812e+02
model: 0
coeffs: [ 2.2904330559241560e-02, -2.9561990079971841e-02,
3.9725942760981507e-03, -3.9689073214945591e-03, 0. ]
ex_right_to_left:
rotation: [ 9.9867908939669447e-01, -6.3445566137485428e-03,
5.0988459509619687e-02, 5.9890316389333252e-03,
9.9995670037792639e-01, 7.1224201868366971e-03,
-5.1031440326695092e-02, -6.8076406092671274e-03,
9.9867384471984544e-01 ]
translation: [ -1.2002489764113250e+02, -1.1782637409050747e+00,
-5.2058205159996538e+00 ]
rotation: [ 9.9998850083695123e-01, -1.9263678722299450e-03,
-4.3917309443490191e-03, 1.8166060642710027e-03,
9.9968925981619028e-01, -2.4861290203142431e-02,
4.4382582477776426e-03, 2.4853026274046636e-02,
9.9968126367795229e-01 ]
translation: [ -8.2270200890555529e+01, -1.9535144360069059e+00,
2.2588034344482368e+00 ]

View File

@ -13,6 +13,7 @@
// limitations under the License.
#include "writer/device_writer.h"
#include <map>
#include <vector>
#include <opencv2/core/core.hpp>
@ -69,9 +70,9 @@ bool DeviceWriter::WriteImgParams(const img_params_t &params) {
nullptr, const_cast<img_params_t *>(&params), nullptr,
&dev_info->spec_version)) {
LOG(INFO) << "Write img params success";
LOG(INFO) << "Intrinsics left: {" << params.in_left << "}";
LOG(INFO) << "Intrinsics right: {" << params.in_right << "}";
LOG(INFO) << "Extrinsics right to left: {" << params.ex_right_to_left
// LOG(INFO) << "Intrinsics left: {" << params.in_left << "}";
// LOG(INFO) << "Intrinsics right: {" << params.in_right << "}";
LOG(INFO) << "Extrinsics left to right: {" << params.ex_right_to_left
<< "}";
return true;
} else {
@ -125,6 +126,16 @@ cv::FileStorage &operator<<(
return fs;
}
cv::FileStorage &operator<<(
cv::FileStorage &fs, const std::map<Resolution, Intrinsics> &mapIn) {
fs << "[";
std::map<Resolution, Intrinsics>::const_iterator it;
for (it = mapIn.begin(); it != mapIn.end(); it++)
fs << (*it).second;
fs << "]";
return fs;
}
cv::FileStorage &operator<<(cv::FileStorage &fs, const ImuIntrinsics &in) {
std::vector<double> scales;
for (std::size_t i = 0; i < 3; i++) {
@ -176,18 +187,25 @@ bool DeviceWriter::SaveDeviceInfo(
}
bool DeviceWriter::SaveImgParams(
const img_params_t &params, const std::string &filepath) {
const dev_info_t &info, const img_params_t &params,
const std::string &filepath) {
using FileStorage = cv::FileStorage;
FileStorage fs(filepath, FileStorage::WRITE);
if (!fs.isOpened()) {
LOG(ERROR) << "Failed to save file: " << filepath;
return false;
}
fs << "in_left" << std::vector<Intrinsics>{params.in_left} << "in_right"
<< std::vector<Intrinsics>{params.in_right} << "ex_right_to_left"
<< params.ex_right_to_left;
fs.release();
return true;
if (params.in_left_map.size() == params.in_right_map.size()) {
fs << "version" << info.spec_version.to_string() << "in_left_map"
<< params.in_left_map << "in_right_map" << params.in_right_map
<< "ex_right_to_left" << params.ex_right_to_left;
fs.release();
return true;
} else {
fs.release();
return false;
}
}
bool DeviceWriter::SaveImuParams(
@ -210,9 +228,7 @@ void DeviceWriter::SaveAllInfos(const std::string &dir) {
}
SaveDeviceInfo(*device_->GetInfo(), dir + MYNTEYE_OS_SEP "device.info");
SaveImgParams(
{false, device_->GetIntrinsics(Stream::LEFT),
device_->GetIntrinsics(Stream::RIGHT),
device_->GetExtrinsics(Stream::RIGHT, Stream::LEFT)},
*device_->GetInfo(), device_->GetImgParams(),
dir + MYNTEYE_OS_SEP "img.params");
auto &&m_in = device_->GetMotionIntrinsics();
SaveImuParams(
@ -326,34 +342,51 @@ DeviceWriter::img_params_t DeviceWriter::LoadImgParams(
}
img_params_t params;
if (fs["in_left"].isNone()) {
std::uint16_t w = 752;
std::uint16_t h = 480;
std::uint8_t m = 0;
if (!fs["width"].isNone())
w = static_cast<int>(fs["width"]);
if (!fs["height"].isNone())
h = static_cast<int>(fs["height"]);
if (!fs["model"].isNone())
m = static_cast<int>(fs["model"]);
if (fs["version"].isNone()) {
if (fs["in_left"].isNone()) {
std::uint16_t w = 752;
std::uint16_t h = 480;
std::uint8_t m = 0;
if (!fs["width"].isNone())
w = static_cast<int>(fs["width"]);
if (!fs["height"].isNone())
h = static_cast<int>(fs["height"]);
if (!fs["model"].isNone())
m = static_cast<int>(fs["model"]);
cv::Mat M1, D1, M2, D2, R, T;
fs["M1"] >> M1;
fs["D1"] >> D1;
fs["M2"] >> M2;
fs["D2"] >> D2;
fs["R"] >> R;
fs["T"] >> T;
cv::Mat M1, D1, M2, D2, R, T;
fs["M1"] >> M1;
fs["D1"] >> D1;
fs["M2"] >> M2;
fs["D2"] >> D2;
fs["R"] >> R;
fs["T"] >> T;
to_intrinsics(w, h, m, M1, D1, &params.in_left);
to_intrinsics(w, h, m, M2, D2, &params.in_right);
to_extrinsics(R, T, &params.ex_right_to_left);
to_intrinsics(
w, h, m, M1, D1, &params.in_left_map[Resolution::RES_752x480]);
to_intrinsics(
w, h, m, M2, D2, &params.in_right_map[Resolution::RES_752x480]);
to_extrinsics(R, T, &params.ex_right_to_left);
} else {
fs["in_left"][0] >> params.in_left_map[Resolution::RES_752x480];
fs["in_right"][0] >> params.in_right_map[Resolution::RES_752x480];
fs["ex_right_to_left"] >> params.ex_right_to_left;
}
} else {
fs["in_left"][0] >> params.in_left;
fs["in_right"][0] >> params.in_right;
fs["ex_right_to_left"] >> params.ex_right_to_left;
// TODO(Kalman): Is there a more reasonable way?
if (static_cast<std::string>(fs["version"]) == "1.0") {
fs["in_left_map"][0] >> params.in_left_map[Resolution::RES_752x480];
fs["in_right_map"][0] >> params.in_right_map[Resolution::RES_752x480];
fs["ex_right_to_left"] >> params.ex_right_to_left;
}
if (static_cast<std::string>(fs["version"]) == "1.1") {
fs["in_left_map"][0] >> params.in_left_map[Resolution::RES_1280x400];
fs["in_left_map"][1] >> params.in_left_map[Resolution::RES_2560x800];
fs["in_right_map"][0] >> params.in_right_map[Resolution::RES_1280x400];
fs["in_right_map"][1] >> params.in_right_map[Resolution::RES_2560x800];
fs["ex_right_to_left"] >> params.ex_right_to_left;
}
}
fs.release();
return params;
}

View File

@ -31,8 +31,8 @@ namespace tools {
class DeviceWriter {
public:
using dev_info_t = DeviceInfo;
using img_params_t = Channels::img_params_t;
using imu_params_t = Channels::imu_params_t;
using img_params_t = device::img_params_t;
using imu_params_t = device::imu_params_t;
explicit DeviceWriter(std::shared_ptr<Device> device);
~DeviceWriter();
@ -47,7 +47,9 @@ class DeviceWriter {
bool WriteImuParams(const std::string &filepath);
bool SaveDeviceInfo(const dev_info_t &info, const std::string &filepath);
bool SaveImgParams(const img_params_t &params, const std::string &filepath);
bool SaveImgParams(
const dev_info_t &info, const img_params_t &params,
const std::string &filepath);
bool SaveImuParams(const imu_params_t &params, const std::string &filepath);
/** Save all infos of this device */

View File

@ -104,8 +104,7 @@ struct MYNTEYE_API MotionData {
ImuData imu;
bool operator==(const MotionData &other) const {
return imu.frame_id == other.imu.frame_id &&
imu.timestamp == other.imu.timestamp;
return imu.timestamp == other.imu.timestamp;
}
};
@ -364,7 +363,6 @@ BOOST_PYTHON_MODULE(mynteye_py) {
// bp::register_ptr_to_python<std::shared_ptr<ImgData>>();
bp::class_<ImuData>("ImuData")
.def_readonly("frame_id", &ImuData::frame_id)
.def_readonly("timestamp", &ImuData::timestamp)
.add_property(
"accel", +[](ImuData *o) { return array_ref<double>{o->accel}; })

View File

@ -103,6 +103,7 @@ add_compile_options(-std=c++11)
include_directories(
${catkin_INCLUDE_DIRS}
${SDK_DIR}/src
)
set(LINK_LIBS

View File

@ -13,6 +13,9 @@
<arg name="depth_topic" default="depth/image_raw" />
<arg name="points_topic" default="points/data_raw" />
<arg name="left_mono_topic" default="left/image_mono" />
<arg name="right_mono_topic" default="right/image_mono" />
<arg name="imu_topic" default="imu/data_raw" />
<arg name="temp_topic" default="temp/data_raw" />
@ -26,13 +29,34 @@
<arg name="points_frame_id" default="$(arg mynteye)_points_frame" />
<arg name="depth_frame_id" default="$(arg mynteye)_depth_frame" />
<arg name="imu_frame_id" default="$(arg mynteye)_imu_frame" />
<arg name="temp_frame_id" default="$(arg mynteye)_temp_frame" />
<arg name="gravity" default="9.8" />
<!-- stream toggles -->
<!-- Request index -->
<!-- MYNTEYE-S210A, Reslution: 1280x400, Format: BGR888, Fps: 10 -->
<arg name="index_s210a_0" default="0" />
<!-- MYNTEYE-S210A, Reslution: 1280x400, Format: BGR888, Fps: 20 -->
<arg name="index_s210a_1" default="1" />
<!-- MYNTEYE-S210A, Reslution: 1280x400, Format: BGR888, Fps: 30 -->
<arg name="index_s210a_2" default="2" />
<!-- MYNTEYE-S210A, Reslution: 1280x400, Format: BGR888, Fps: 60 -->
<arg name="index_s210a_3" default="3" />
<!-- MYNTEYE-S210A, Reslution: 2560x800, Format: BGR888, Fps: 10 -->
<arg name="index_s210a_4" default="4" />
<!-- MYNTEYE-S210A, Reslution: 2560x800, Format: BGR888, Fps: 20 -->
<arg name="index_s210a_5" default="5" />
<!-- MYNTEYE-S210A, Reslution: 2560x800, Format: BGR888, Fps: 30 -->
<arg name="index_s210a_6" default="6" />
<!-- MYNTEYE-S1030, Reslution: 752x480, Format: YUYV, Fps: 25 -->
<arg name="index_s1030_0" default="0" />
<arg name="request_index" default="$(arg index_s210a_2)" />
<arg name="enable_left_rect" default="false" />
<arg name="enable_right_rect" default="false" />
<arg name="enable_disparity" default="false" />
@ -42,58 +66,46 @@
<!-- device options, -1 will not set the value -->
<!-- gain range: [0,48] -->
<arg name="gain" default="-1" />
<!-- <arg name="gain" default="24" /> -->
<!-- brightness range: [0,240] -->
<arg name="brightness" default="-1" />
<!-- <arg name="brightness" default="120" /> -->
<!-- contrast range: [0,255] -->
<arg name="contrast" default="-1" />
<!-- <arg name="contrast" default="127" /> -->
<!-- frame_rate range: {10,15,20,25,30,35,40,45,50,55,60} -->
<arg name="frame_rate" default="-1" />
<!-- <arg name="frame_rate" default="25" /> -->
<!-- imu_frequency range: {100,200,250,333,500} -->
<arg name="imu_frequency" default="-1" />
<!-- <arg name="imu_frequency" default="200" /> -->
<!-- exposure_mode, 0: auto-exposure, 1: manual-exposure -->
<arg name="exposure_mode" default="-1" />
<!-- <arg name="exposure_mode" default="0" /> -->
<!-- max_gain range: [0,48] -->
<!-- max_gain range: [0,255] -->
<arg name="max_gain" default="-1" />
<!-- <arg name="max_gain" default="48" /> -->
<!-- <arg name="max_gain" default="8" /> -->
<!-- max_exposure_time range: [0,240] -->
<!-- max_exposure_time range: [0,1000] -->
<arg name="max_exposure_time" default="-1" />
<!-- <arg name="max_exposure_time" default="240" /> -->
<!-- <arg name="max_exposure_time" default="333" /> -->
<!-- desired_brightness range: [0,255] -->
<!-- desired_brightness range: [1,255] -->
<arg name="desired_brightness" default="-1" />
<!-- <arg name="desired_brightness" default="192" /> -->
<!-- <arg name="desired_brightness" default="122" /> -->
<!-- ir_control range: [0,160] -->
<arg name="ir_control" default="80" />
<!-- <arg name="ir_control" default="0" /> -->
<!-- min_exposure_time range: [0,1000] -->
<arg name="min_exposure_time" default="-1" />
<!-- <arg name="min_exposure_time" default="0" /> -->
<!-- hdr_mode, 0: 10-bit, 1: 12-bit -->
<arg name="hdr_mode" default="-1" />
<!-- <arg name="hdr_mode" default="0" /> -->
<!-- accel_range range: {4,8,16,32} -->
<!-- accel_range range: [6,48] -->
<arg name="accel_range" default="-1" />
<!-- <arg name="accel_range" default="8" /> -->
<!-- <arg name="accel_range" default="6" /> -->
<!-- gyro_range range: {500,1000,2000,4000} -->
<!-- gyro_range range: [250,4000] -->
<arg name="gyro_range" default="-1" />
<!-- <arg name="gyro_range" default="1000" /> -->
<!-- accel_low_filter range: [0,2] -->
<arg name="accel_low_filter" default="-1" />
<!-- <arg name="accel_low_filter" default="2" /> -->
<!-- gyro_low_filter range: [23,64] -->
<arg name="gyro_low_filter" default="-1" />
<!-- <arg name="gyro_low_filter" default="64" /> -->
<!-- Push down all topics/nodelets into "mynteye" namespace -->
<group ns="$(arg mynteye)">
@ -111,6 +123,9 @@
<param name="points_topic" value="$(arg points_topic)" />
<param name="depth_topic" value="$(arg depth_topic)" />
<param name="left_mono_topic" value="$(arg left_mono_topic)" />
<param name="right_mono_topic" value="$(arg right_mono_topic)" />
<param name="imu_topic" value="$(arg imu_topic)" />
<param name="temp_topic" value="$(arg temp_topic)" />
@ -124,7 +139,6 @@
<param name="points_frame_id" value="$(arg points_frame_id)" />
<param name="depth_frame_id" value="$(arg depth_frame_id)" />
<param name="imu_frame_id" value="$(arg imu_frame_id)" />
<param name="temp_frame_id" value="$(arg temp_frame_id)" />
<param name="gravity" value="$(arg gravity)" />
@ -138,22 +152,21 @@
<param name="enable_points" value="$(arg enable_points)" />
<param name="enable_depth" value="$(arg enable_depth)" />
<!-- stream request index -->
<param name="request_index" value="$(arg request_index)" />
<!-- device options -->
<param name="gain" value="$(arg gain)" />
<param name="brightness" value="$(arg brightness)" />
<param name="contrast" value="$(arg contrast)" />
<param name="frame_rate" value="$(arg frame_rate)" />
<param name="imu_frequency" value="$(arg imu_frequency)" />
<param name="exposure_mode" value="$(arg exposure_mode)" />
<param name="max_gain" value="$(arg max_gain)" />
<param name="max_exposure_time" value="$(arg max_exposure_time)" />
<param name="desired_brightness" value="$(arg desired_brightness)" />
<param name="ir_control" value="$(arg ir_control)" />
<param name="hdr_mode" value="$(arg hdr_mode)" />
<param name="min_exposure_time" value="$(arg max_exposure_time)" />
<param name="accel_range" value="$(arg accel_range)" />
<param name="gyro_range" value="$(arg gyro_range)" />
<param name="accel_low_filter" value="$(arg accel_low_filter)" />
<param name="gyro_low_filter" value="$(arg gyro_low_filter)" />
</node>
<!-- disable compressed depth plugin for image topics -->

View File

@ -7,6 +7,7 @@ Panels:
- /Global Options1
- /Status1
- /Images1
- /Images1/Gray1
- /Images1/Rectified1
- /Disparity1
Splitter Ratio: 0.5
@ -77,6 +78,34 @@ Visualization Manager:
Transport Hint: raw
Unreliable: false
Value: true
- Class: rviz/Group
Displays:
- Class: rviz/Image
Enabled: true
Image Topic: /mynteye/left/image_mono
Max Value: 1
Median window: 5
Min Value: 0
Name: LeftMono
Normalize Range: true
Queue Size: 2
Transport Hint: raw
Unreliable: false
Value: true
- Class: rviz/Image
Enabled: true
Image Topic: /mynteye/right/image_mono
Max Value: 1
Median window: 5
Min Value: 0
Name: RightMono
Normalize Range: true
Queue Size: 2
Transport Hint: raw
Unreliable: false
Value: true
Enabled: true
Name: Mono
- Class: rviz/Group
Displays:
- Class: rviz/Image

View File

@ -71,8 +71,15 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
<< (right_count_ / compute_time(time_end, right_time_beg_));
}
if (imu_time_beg_ != -1) {
LOG(INFO) << "Imu count: " << imu_count_ << ", hz: "
<< (imu_count_ / compute_time(time_end, imu_time_beg_));
if (publish_imu_by_sync_) {
LOG(INFO) << "imu_sync_count: " << imu_sync_count_ << ", hz: "
<< (imu_sync_count_ /
compute_time(time_end, imu_time_beg_));
} else {
LOG(INFO) << "Imu count: " << imu_count_ << ", hz: "
<< (imu_count_ /
compute_time(time_end, imu_time_beg_));
}
}
// ROS messages could not be reliably printed here, using glog instead :(
@ -92,15 +99,17 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
}
return ros::Time(
soft_time_begin + (_hard_time - hard_time_begin) * 0.00001f);
soft_time_begin + (_hard_time - hard_time_begin) * 0.000001f);
}
void onInit() override {
initDevice();
NODELET_FATAL_COND(api_ == nullptr, "No MYNT EYE device selected :(");
nh_ = getMTNodeHandle();
private_nh_ = getMTPrivateNodeHandle();
int request_index = 0;
private_nh_.getParam("request_index", request_index);
initDevice(request_index);
NODELET_FATAL_COND(api_ == nullptr, "No MYNT EYE device selected :(");
// node params
@ -125,6 +134,15 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
is_motion_published_ = false;
is_started_ = false;
std::map<Stream, std::string> mono_names{{Stream::LEFT, "left_mono"},
{Stream::RIGHT, "right_mono"}};
std::map<Stream, std::string> mono_topics{};
for (auto &&it = mono_names.begin(); it != mono_names.end(); ++it) {
mono_topics[it->first] = it->second;
private_nh_.getParam(it->second + "_topic", mono_topics[it->first]);
}
std::string imu_topic = "imu";
std::string temp_topic = "temp";
private_nh_.getParam("imu_topic", imu_topic);
@ -149,20 +167,16 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
// device options
std::map<Option, std::string> option_names = {
{Option::GAIN, "gain"},
{Option::BRIGHTNESS, "brightness"},
{Option::CONTRAST, "contrast"},
{Option::FRAME_RATE, "frame_rate"},
{Option::IMU_FREQUENCY, "imu_frequency"},
{Option::EXPOSURE_MODE, "exposure_mode"},
{Option::MAX_GAIN, "max_gain"},
{Option::MAX_EXPOSURE_TIME, "max_exposure_time"},
{Option::DESIRED_BRIGHTNESS, "desired_brightness"},
{Option::IR_CONTROL, "ir_control"},
{Option::HDR_MODE, "hdr_mode"},
{Option::MIN_EXPOSURE_TIME, "min_exposure_time"},
{Option::ACCELEROMETER_RANGE, "accel_range"},
{Option::GYROSCOPE_RANGE, "gyro_range"}
};
{Option::GYROSCOPE_RANGE, "gyro_range"},
{Option::ACCELEROMETER_LOW_PASS_FILTER, "accel_low_filter"},
{Option::GYROSCOPE_LOW_PASS_FILTER, "gyro_low_filter"}};
for (auto &&it = option_names.begin(); it != option_names.end(); ++it) {
if (!api_->Supports(it->first))
continue;
@ -174,7 +188,6 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
}
NODELET_INFO_STREAM(it->first << ": " << api_->GetOptionValue(it->first));
}
frame_rate_ = api_->GetOptionValue(Option::FRAME_RATE);
// publishers
@ -190,10 +203,18 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
NODELET_INFO_STREAM("Advertized on topic " << topic);
}
camera_encodings_ = {{Stream::LEFT, enc::MONO8},
{Stream::RIGHT, enc::MONO8},
{Stream::LEFT_RECTIFIED, enc::MONO8},
{Stream::RIGHT_RECTIFIED, enc::MONO8},
for (auto &&it = mono_topics.begin(); it != mono_topics.end(); ++it) {
auto &&topic = mono_topics[it->first];
if (it->first == Stream::LEFT || it->first == Stream::RIGHT) { // camera
mono_publishers_[it->first] = it_mynteye.advertiseCamera(topic, 1);
}
NODELET_INFO_STREAM("Advertized on topic " << topic);
}
camera_encodings_ = {{Stream::LEFT, enc::BGR8},
{Stream::RIGHT, enc::BGR8},
{Stream::LEFT_RECTIFIED, enc::BGR8},
{Stream::RIGHT_RECTIFIED, enc::BGR8},
{Stream::DISPARITY, enc::MONO8}, // float
{Stream::DISPARITY_NORMALIZED, enc::MONO8},
{Stream::DEPTH, enc::MONO16}};
@ -229,7 +250,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
NODELET_INFO_STREAM("Advertized service " << DEVICE_INFO_SERVICE);
publishStaticTransforms();
ros::Rate loop_rate(frame_rate_);
ros::Rate loop_rate(60);
while (private_nh_.ok()) {
publishTopics();
loop_rate.sleep();
@ -352,7 +373,8 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
}
void publishTopics() {
if (camera_publishers_[Stream::LEFT].getNumSubscribers() > 0 &&
if ((camera_publishers_[Stream::LEFT].getNumSubscribers() > 0 ||
mono_publishers_[Stream::LEFT].getNumSubscribers() > 0) &&
!is_published_[Stream::LEFT]) {
api_->SetStreamCallback(
Stream::LEFT, [this](const api::StreamData &data) {
@ -371,6 +393,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
++left_count_;
publishCamera(Stream::LEFT, data, left_count_, stamp);
publishMono(Stream::LEFT, data, left_count_, stamp);
NODELET_DEBUG_STREAM(
Stream::LEFT << ", count: " << left_count_
<< ", frame_id: " << data.img->frame_id
@ -381,7 +404,8 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
is_published_[Stream::LEFT] = true;
}
if (camera_publishers_[Stream::RIGHT].getNumSubscribers() > 0 &&
if ((camera_publishers_[Stream::RIGHT].getNumSubscribers() > 0 ||
mono_publishers_[Stream::RIGHT].getNumSubscribers() > 0) &&
!is_published_[Stream::RIGHT]) {
api_->SetStreamCallback(
Stream::RIGHT, [this](const api::StreamData &data) {
@ -389,6 +413,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
++right_count_;
publishCamera(Stream::RIGHT, data, right_count_, stamp);
publishMono(Stream::RIGHT, data, right_count_, stamp);
NODELET_DEBUG_STREAM(
Stream::RIGHT
<< ", count: " << right_count_ << ", frame_id: "
@ -414,33 +439,47 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
if (!is_motion_published_) {
api_->SetMotionCallback([this](const api::MotionData &data) {
ros::Time stamp = hardTimeToSoftTime(data.imu->timestamp);
ros::Time stamp = hardTimeToSoftTime(data.imu->timestamp);
// static double imu_time_prev = -1;
// NODELET_INFO_STREAM("ros_time_beg: " << FULL_PRECISION <<
// ros_time_beg
// << ", imu_time_elapsed: " << FULL_PRECISION
// << ((data.imu->timestamp - imu_time_beg) * 0.00001f)
// << ", imu_time_diff: " << FULL_PRECISION
// << ((imu_time_prev < 0) ? 0
// : (data.imu->timestamp - imu_time_prev) * 0.01f) << " ms");
// imu_time_prev = data.imu->timestamp;
// static double imu_time_prev = -1;
// NODELET_INFO_STREAM("ros_time_beg: " << FULL_PRECISION << ros_time_beg
// << ", imu_time_elapsed: " << FULL_PRECISION
// << ((data.imu->timestamp - imu_time_beg) * 0.00001f)
// << ", imu_time_diff: " << FULL_PRECISION
// << ((imu_time_prev < 0) ? 0
// : (data.imu->timestamp - imu_time_prev) * 0.01f) << " ms");
// imu_time_prev = data.imu->timestamp;
++imu_count_;
++imu_count_;
if (publish_imu_by_sync_) {
if (data.imu) {
if (data.imu->flag == 1) { // accelerometer
imu_accel_ = data.imu;
publishImuBySync(stamp);
} else if (data.imu->flag == 2) { // gyroscope
imu_gyro_ = data.imu;
publishImuBySync(stamp);
} else {
NODELET_WARN_STREAM("Imu type is unknown");
}
} else {
NODELET_WARN_STREAM("Motion data is empty");
}
} else {
publishImu(data, imu_count_, stamp);
publishTemp(data.imu->temperature, imu_count_, stamp);
NODELET_DEBUG_STREAM(
"Imu count: " << imu_count_ << ", frame_id: " << data.imu->frame_id
<< ", timestamp: " << data.imu->timestamp
<< ", accel_x: " << data.imu->accel[0]
<< ", accel_y: " << data.imu->accel[1]
<< ", accel_z: " << data.imu->accel[2]
<< ", gyro_x: " << data.imu->gyro[0]
<< ", gyro_y: " << data.imu->gyro[1]
<< ", gyro_z: " << data.imu->gyro[2]
<< ", temperature: " << data.imu->temperature);
// Sleep 1ms, otherwise publish may drop some datas.
ros::Duration(0.001).sleep();
}
NODELET_DEBUG_STREAM(
"Imu count: " << imu_count_ << ", timestamp: " << data.imu->timestamp
<< ", accel_x: " << data.imu->accel[0]
<< ", accel_y: " << data.imu->accel[1]
<< ", accel_z: " << data.imu->accel[2]
<< ", gyro_x: " << data.imu->gyro[0]
<< ", gyro_y: " << data.imu->gyro[1]
<< ", gyro_z: " << data.imu->gyro[2]
<< ", temperature: " << data.imu->temperature);
// Sleep 1ms, otherwise publish may drop some datas.
ros::Duration(0.001).sleep();
});
imu_time_beg_ = ros::Time::now().toSec();
is_motion_published_ = true;
@ -493,6 +532,23 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
}
*/
void publishMono(
const Stream &stream, const api::StreamData &data, std::uint32_t seq,
ros::Time stamp) {
if (mono_publishers_[stream].getNumSubscribers() == 0)
return;
std_msgs::Header header;
header.seq = seq;
header.stamp = stamp;
header.frame_id = frame_ids_[stream];
cv::Mat mono;
cv::cvtColor(data.frame, mono, CV_RGB2GRAY);
auto &&msg = cv_bridge::CvImage(header, enc::MONO8, mono).toImageMsg();
auto &&info = getCameraInfo(stream);
info->header.stamp = msg->header.stamp;
mono_publishers_[stream].publish(msg, info);
}
void publishPoints(
const api::StreamData &data, std::uint32_t seq, ros::Time stamp) {
// if (points_publisher_.getNumSubscribers() == 0)
@ -598,6 +654,59 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
pub_imu_.publish(msg);
}
void publishImuBySync(ros::Time stamp) {
if (imu_accel_ == nullptr || imu_gyro_ == nullptr) {
return;
}
sensor_msgs::Imu msg;
msg.header.seq = imu_sync_count_;
msg.header.stamp = stamp;
msg.header.frame_id = imu_frame_id_;
// acceleration should be in m/s^2 (not in g's)
msg.linear_acceleration.x = imu_accel_->accel[0] * gravity_;
msg.linear_acceleration.y = imu_accel_->accel[1] * gravity_;
msg.linear_acceleration.z = imu_accel_->accel[2] * gravity_;
msg.linear_acceleration_covariance[0] = 0;
msg.linear_acceleration_covariance[1] = 0;
msg.linear_acceleration_covariance[2] = 0;
msg.linear_acceleration_covariance[3] = 0;
msg.linear_acceleration_covariance[4] = 0;
msg.linear_acceleration_covariance[5] = 0;
msg.linear_acceleration_covariance[6] = 0;
msg.linear_acceleration_covariance[7] = 0;
msg.linear_acceleration_covariance[8] = 0;
// velocity should be in rad/sec
msg.angular_velocity.x = imu_gyro_->gyro[0] * M_PI / 180;
msg.angular_velocity.y = imu_gyro_->gyro[1] * M_PI / 180;
msg.angular_velocity.z = imu_gyro_->gyro[2] * M_PI / 180;
msg.angular_velocity_covariance[0] = 0;
msg.angular_velocity_covariance[1] = 0;
msg.angular_velocity_covariance[2] = 0;
msg.angular_velocity_covariance[3] = 0;
msg.angular_velocity_covariance[4] = 0;
msg.angular_velocity_covariance[5] = 0;
msg.angular_velocity_covariance[6] = 0;
msg.angular_velocity_covariance[7] = 0;
msg.angular_velocity_covariance[8] = 0;
pub_imu_.publish(msg);
publishTemp(imu_accel_->temperature, imu_sync_count_, stamp);
++imu_sync_count_;
imu_accel_ = nullptr;
imu_gyro_ = nullptr;
}
void publishTemp(float temperature, std::uint32_t seq, ros::Time stamp) {
if (pub_temp_.getNumSubscribers() == 0)
return;
@ -610,7 +719,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
}
private:
void initDevice() {
void initDevice(int request_index) {
NODELET_INFO_STREAM("Detecting MYNT EYE devices");
Context context;
@ -646,6 +755,22 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
}
api_ = API::Create(device);
auto &&requests = device->GetStreamRequests();
std::size_t m = requests.size();
NODELET_FATAL_COND(m <= 0, "No MYNT EYE devices :(");
if (m <= 1) {
NODELET_INFO_STREAM("Only one stream request, select index: 0");
api_->ConfigStreamRequest(requests[0]);
} else {
if (request_index >= m) {
NODELET_WARN_STREAM("Resquest_index out of range");
api_->ConfigStreamRequest(requests[0]);
} else {
NODELET_WARN_STREAM("request_index: " << request_index);
api_->ConfigStreamRequest(requests[request_index]);
}
}
computeRectTransforms();
}
@ -657,7 +782,6 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
auto &&ex_right_to_left = api_->GetExtrinsics(Stream::RIGHT, Stream::LEFT);
cv::Size size{in_left.width, in_left.height};
cv::Mat M1 =
(cv::Mat_<double>(3, 3) << in_left.fx, 0, in_left.cx, 0, in_left.fy,
in_left.cy, 0, 0, 1);
@ -910,6 +1034,14 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
std::map<Stream, sensor_msgs::CameraInfoPtr> camera_info_ptrs_;
std::map<Stream, std::string> camera_encodings_;
// image: LEFT_RECTIFIED, RIGHT_RECTIFIED, DISPARITY, DISPARITY_NORMALIZED,
// DEPTH
std::map<Stream, image_transport::Publisher> image_publishers_;
std::map<Stream, std::string> image_encodings_;
// mono: LEFT, RIGHT
std::map<Stream, image_transport::CameraPublisher> mono_publishers_;
// pointcloud: POINTS
ros::Publisher points_publisher_;
@ -944,7 +1076,10 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
std::size_t left_count_ = 0;
std::size_t right_count_ = 0;
std::size_t imu_count_ = 0;
std::size_t imu_sync_count_ = 0;
std::shared_ptr<ImuData> imu_accel_;
std::shared_ptr<ImuData> imu_gyro_;
bool publish_imu_by_sync_ = false;
std::map<Stream, bool> is_published_;
bool is_motion_published_;
bool is_started_;