Complete device & streams

This commit is contained in:
John Zhao 2018-04-08 22:35:49 +08:00
parent 81caf45dba
commit cfcb1ca531
15 changed files with 356 additions and 36 deletions

View File

@ -4,6 +4,7 @@
#include <cstdint>
#include <algorithm>
#include <iostream>
#include <type_traits>
@ -198,6 +199,8 @@ MYNTEYE_ENUM_HELPERS(Source)
* @brief Formats define how each stream can be encoded.
*/
enum class Format : std::uint32_t {
/** Greyscale, 8 bits per pixel */
GREY = MYNTEYE_FOURCC('G', 'R', 'E', 'Y'),
/** YUV 4:2:2, 16 bits per pixel */
YUYV = MYNTEYE_FOURCC('Y', 'U', 'Y', 'V'),
/** Last guard */
@ -224,7 +227,7 @@ struct MYNTEYE_API StreamRequest {
std::uint16_t height;
/** stream pixel format */
Format format;
/** frames per second */
/** frames per second (unused) */
std::uint16_t fps;
bool operator==(const StreamRequest &other) const {
@ -236,6 +239,8 @@ struct MYNTEYE_API StreamRequest {
}
};
std::ostream &operator<<(std::ostream &os, const StreamRequest &request);
/**
* @defgroup calibration Intrinsics & Extrinsics
* @brief Intrinsic and extrinsic properties.
@ -334,6 +339,12 @@ struct MYNTEYE_API ImgData {
std::uint32_t timestamp;
/** Image exposure time in 0.01ms */
std::uint16_t exposure_time;
void Reset() {
frame_id = 0;
timestamp = 0;
exposure_time = 0;
}
};
/**
@ -351,6 +362,14 @@ struct MYNTEYE_API ImuData {
double gyro[3];
/** IMU temperature */
double temperature;
void Reset() {
frame_id = 0;
timestamp = 0;
std::fill(accel, accel + 3, 0);
std::fill(gyro, gyro + 3, 0);
temperature = 0;
}
};
MYNTEYE_END_NAMESPACE

View File

@ -1,6 +1,10 @@
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "glog_init.h" // NOLINT
#include "device/context.h"
#include "device/device.h"
MYNTEYE_USE_NAMESPACE
@ -8,6 +12,78 @@ int main(int argc, char *argv[]) {
glog_init _(argc, argv);
Context context;
auto &&devices = context.devices();
size_t n = devices.size();
LOG_IF(FATAL, n <= 0) << "No MYNT EYE devices :(";
LOG(INFO) << "MYNT EYE devices: ";
for (size_t i = 0; i < n; i++) {
auto &&device = devices[i];
auto &&name = device->GetInfo(Info::DEVICE_NAME);
LOG(INFO) << " index: " << i << ", name: " << name;
}
std::shared_ptr<Device> device = nullptr;
if (n <= 1) {
device = devices[0];
LOG(INFO) << "Only one MYNT EYE device, select index: 0";
} else {
while (true) {
size_t i;
LOG(INFO) << "There are " << n << " MYNT EYE devices, select index: ";
std::cin >> i;
if (i >= n) {
LOG(WARNING) << "Index out of range :(";
continue;
}
device = devices[i];
break;
}
}
std::size_t left_count = 0;
device->SetStreamCallback(
Stream::LEFT, [&left_count](const device::StreamData &data) {
++left_count;
VLOG(2) << Stream::LEFT << ", count: " << left_count;
UNUSED(data)
});
std::size_t right_count = 0;
device->SetStreamCallback(
Stream::RIGHT, [&right_count](const device::StreamData &data) {
++right_count;
VLOG(2) << Stream::RIGHT << ", count: " << right_count;
UNUSED(data)
});
device->Start(Source::ALL);
cv::namedWindow("frame");
while (true) {
device->WaitForStreams();
device::StreamData left_data = device->GetLatestStreamData(Stream::LEFT);
device::StreamData right_data = device->GetLatestStreamData(Stream::RIGHT);
cv::Mat left_img(
left_data.frame->height(), left_data.frame->width(), CV_8UC1,
left_data.frame->data());
cv::Mat right_img(
right_data.frame->height(), right_data.frame->width(), CV_8UC1,
right_data.frame->data());
cv::Mat img;
cv::hconcat(left_img, right_img, img);
cv::imshow("frame", img);
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
break;
}
}
device->Stop(Source::ALL);
return 0;
}

View File

@ -14,7 +14,7 @@ struct glog_init {
FLAGS_max_log_size = 1024;
FLAGS_stop_logging_if_full_disk = true;
FLAGS_v = 2;
// FLAGS_v = 2;
google::InitGoogleLogging(argv[0]);

View File

@ -15,8 +15,8 @@ Context::Context() : context_(uvc::create_context()) {
auto vid = uvc::get_vendor_id(*device);
auto pid = uvc::get_product_id(*device);
// auto video_name = uvc::get_video_name(*device);
LOG(INFO) << "UVC device detected, name: " << name << ", vid: 0x"
<< std::hex << vid << ", pid: 0x" << std::hex << pid;
VLOG(2) << "UVC device detected, name: " << name << ", vid: 0x" << std::hex
<< vid << ", pid: 0x" << std::hex << pid;
if (vid == MYNTEYE_VID) {
auto d = Device::Create(name, device);
if (d) {

View File

@ -146,6 +146,18 @@ void Device::SetMotionCallback(motion_callback_t callback) {
motion_callback_ = callback;
}
bool Device::HasStreamCallback(const Stream &stream) const {
try {
return stream_callbacks_.at(stream) != nullptr;
} catch (const std::out_of_range &e) {
return false;
}
}
bool Device::HasMotionCallback() const {
return motion_callback_ != nullptr;
}
void Device::Start(const Source &source) {
if (source == Source::VIDEO_STREAMING) {
StartVideoStreaming();
@ -172,9 +184,27 @@ void Device::Stop(const Source &source) {
}
}
void Device::WaitForStreams() {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
streams_->WaitForStreams();
}
std::vector<device::StreamData> Device::GetStreamDatas(const Stream &stream) {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
return streams_->GetStreamDatas(stream);
}
device::StreamData Device::GetLatestStreamData(const Stream &stream) {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
return streams_->GetLatestStreamData(stream);
}
const StreamRequest &Device::GetStreamRequest(const Capabilities &capability) {
try {
return stream_config_requests_[capability];
return stream_config_requests_.at(capability);
} catch (const std::out_of_range &e) {
auto &&requests = GetStreamRequests(capability);
if (requests.size() == 1) {
@ -192,7 +222,7 @@ void Device::StartVideoStreaming() {
return;
}
streams_ = std::make_shared<Streams>();
streams_ = std::make_shared<Streams>(GetKeyStreams());
// if stream capabilities are supported with subdevices of device_
/*
@ -211,13 +241,21 @@ void Device::StartVideoStreaming() {
if (Supports(Capabilities::STEREO)) {
// do stream request selection if more than one request of each stream
auto &&stream_request = GetStreamRequest(Capabilities::STEREO);
streams_->ConfigStream(Capabilities::STEREO, stream_request);
uvc::set_device_mode(
*device_, stream_request.width, stream_request.height,
static_cast<int>(stream_request.format), stream_request.fps,
[this](const void *data) {
streams_->PushStream(Capabilities::STEREO, data);
// ...
if (HasStreamCallback(Stream::LEFT)) {
auto &&stream_data = streams_->stream_datas(Stream::LEFT).back();
stream_callbacks_.at(Stream::LEFT)(stream_data);
}
if (HasStreamCallback(Stream::RIGHT)) {
auto &&stream_data = streams_->stream_datas(Stream::RIGHT).back();
stream_callbacks_.at(Stream::RIGHT)(stream_data);
}
});
} else {
LOG(FATAL) << "Not any stream capabilities are supported by this device";
@ -245,6 +283,7 @@ void Device::StartMotionTracking() {
return;
}
// TODO(JohnZhao)
motion_tracking_ = true;
}
void Device::StopMotionTracking() {
@ -253,10 +292,13 @@ void Device::StopMotionTracking() {
return;
}
// TODO(JohnZhao)
motion_tracking_ = false;
}
void Device::ReadDeviceInfo() {
// TODO(JohnZhao): Read device info
device_info_ = std::make_shared<DeviceInfo>();
device_info_->name = uvc::get_name(*device_);
}
void Device::WriteImgIntrinsics(const ImgIntrinsics &intrinsics) {

View File

@ -62,9 +62,17 @@ class Device {
void SetStreamCallback(const Stream &stream, stream_callback_t callback);
void SetMotionCallback(motion_callback_t callback);
bool HasStreamCallback(const Stream &stream) const;
bool HasMotionCallback() const;
virtual void Start(const Source &source);
virtual void Stop(const Source &source);
void WaitForStreams();
std::vector<device::StreamData> GetStreamDatas(const Stream &stream);
device::StreamData GetLatestStreamData(const Stream &stream);
protected:
std::shared_ptr<uvc::device> device() const {
return device_;
@ -82,6 +90,8 @@ class Device {
virtual void StartMotionTracking();
virtual void StopMotionTracking();
virtual std::vector<Stream> GetKeyStreams() const = 0;
bool video_streaming_;
bool motion_tracking_;

View File

@ -13,4 +13,8 @@ StandardDevice::~StandardDevice() {
VLOG(2) << __func__;
}
std::vector<Stream> StandardDevice::GetKeyStreams() const {
return {Stream::LEFT, Stream::RIGHT};
}
MYNTEYE_END_NAMESPACE

View File

@ -3,6 +3,7 @@
#pragma once
#include <memory>
#include <vector>
#include "device/device.h"
@ -12,6 +13,8 @@ class StandardDevice : public Device {
public:
explicit StandardDevice(std::shared_ptr<uvc::device> device);
virtual ~StandardDevice();
std::vector<Stream> GetKeyStreams() const override;
};
MYNTEYE_END_NAMESPACE

View File

@ -3,6 +3,8 @@
#pragma once
#include <cstdint>
#include <algorithm>
#include <functional>
#include <memory>
#include <vector>
@ -25,8 +27,13 @@ class Frame {
std::uint16_t width, std::uint16_t height, Format format,
const void *data)
: width_(width), height_(height), format_(format) {
const std::uint8_t *bytes = static_cast<const std::uint8_t *>(data);
data_ = data_t(bytes, bytes + (width * height) * bytes_per_pixel(format));
std::size_t bytes_n = (width * height) * bytes_per_pixel(format);
if (data) {
const std::uint8_t *bytes = static_cast<const std::uint8_t *>(data);
data_ = data_t(bytes, bytes + bytes_n);
} else {
data_.assign(bytes_n, 0);
}
}
std::uint16_t width() const {
@ -41,8 +48,22 @@ class Frame {
return format_;
}
const data_t &data() const {
return data_;
std::uint8_t *data() {
return data_.data();
}
const std::uint8_t *data() const {
return data_.data();
}
std::size_t size() const {
return data_.size();
}
Frame clone() const {
Frame frame(width_, height_, format_, nullptr);
std::copy(data_.begin(), data_.end(), frame.data_.begin());
return frame;
}
private:
@ -54,12 +75,12 @@ class Frame {
};
struct MYNTEYE_API StreamData {
ImgData img;
std::shared_ptr<ImgData> img;
std::shared_ptr<Frame> frame;
};
struct MYNTEYE_API MotionData {
ImuData imu;
std::shared_ptr<ImuData> imu;
};
using StreamCallback = std::function<void(const StreamData &data)>;

View File

@ -3,14 +3,57 @@
#include <glog/logging.h>
#include <algorithm>
#include <chrono>
MYNTEYE_BEGIN_NAMESPACE
Streams::Streams()
: stream_capabilities_(
namespace {
void unpack_stereo_img_data(
const void *data, const StreamRequest &request, ImgData &img) { // NOLINT
UNUSED(data)
UNUSED(request)
UNUSED(img)
}
void unpack_left_img_pixels(
const void *data, const StreamRequest &request,
Streams::frame_t &frame) { // NOLINT
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame.format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame.width() * frame.height();
for (std::size_t i = 0; i < n; i++) {
frame.data()[i] = *(data_new + (i * 2));
}
}
void unpack_right_img_pixels(
const void *data, const StreamRequest &request,
Streams::frame_t &frame) { // NOLINT
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame.format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame.width() * frame.height();
for (std::size_t i = 0; i < n; i++) {
frame.data()[i] = *(data_new + (i * 2 + 1));
}
}
} // namespace
Streams::Streams(const std::vector<Stream> key_streams)
: key_streams_(key_streams),
stream_capabilities_(
{Capabilities::STEREO, Capabilities::COLOR, Capabilities::DEPTH,
Capabilities::POINTS, Capabilities::FISHEYE, Capabilities::INFRARED,
Capabilities::INFRARED2}) {
Capabilities::INFRARED2}),
unpack_img_data_map_(
{{Stream::LEFT, unpack_stereo_img_data},
{Stream::RIGHT, unpack_stereo_img_data}}),
unpack_img_pixels_map_(
{{Stream::LEFT, unpack_left_img_pixels},
{Stream::RIGHT, unpack_right_img_pixels}}) {
VLOG(2) << __func__;
}
@ -23,6 +66,7 @@ void Streams::ConfigStream(
if (!IsStreamCapability(capability)) {
LOG(FATAL) << "Cannot config stream without stream capability";
}
VLOG(2) << "Config stream request of " << capability << ", " << request;
stream_config_requests_[capability] = request;
}
@ -30,10 +74,56 @@ void Streams::PushStream(const Capabilities &capability, const void *data) {
if (!HasStreamConfigRequest(capability)) {
LOG(FATAL) << "Cannot push stream without stream config request";
}
auto frame =
std::make_shared<frame_t>(GetStreamConfigRequest(capability), data);
// stream_datas_map_[Stream::LEFT];
// stream_datas_map_[Stream::RIGHT];
std::unique_lock<std::mutex> lock(mtx_);
auto &&request = GetStreamConfigRequest(capability);
switch (capability) {
case Capabilities::STEREO: {
// alloc
AllocStreamData(Stream::LEFT, request, Format::GREY);
AllocStreamData(Stream::RIGHT, request, Format::GREY);
auto &&left_data = stream_datas_map_[Stream::LEFT].back();
auto &&right_data = stream_datas_map_[Stream::RIGHT].back();
// unpack img data
unpack_img_data_map_[Stream::LEFT](data, request, *left_data.img);
right_data.img = left_data.img;
// unpack frame
unpack_img_pixels_map_[Stream::LEFT](data, request, *left_data.frame);
unpack_img_pixels_map_[Stream::RIGHT](data, request, *right_data.frame);
} break;
default:
LOG(FATAL) << "Not supported " << capability << " now";
}
if (HasKeyStreamDatas())
cv_.notify_one();
}
void Streams::WaitForStreams() {
std::unique_lock<std::mutex> lock(mtx_);
auto ready = std::bind(&Streams::HasKeyStreamDatas, this);
if (!ready() && !cv_.wait_for(lock, std::chrono::seconds(2), ready)) {
LOG(FATAL) << "Timeout waiting for key frames";
}
}
Streams::stream_datas_t Streams::GetStreamDatas(const Stream &stream) {
if (!HasStreamDatas(stream) || stream_datas_map_.at(stream).empty()) {
LOG(WARNING) << "There are stream datas of " << stream
<< ", do you first call WaitForStreams?";
return {};
}
std::unique_lock<std::mutex> lock(mtx_);
stream_datas_t datas = stream_datas_map_.at(stream);
stream_datas_map_[stream].clear();
return datas;
}
Streams::stream_data_t Streams::GetLatestStreamData(const Stream &stream) {
return GetStreamDatas(stream).back();
}
const Streams::stream_datas_t &Streams::stream_datas(
const Stream &stream) const {
return stream_datas_map_.at(stream);
}
bool Streams::IsStreamCapability(const Capabilities &capability) const {
@ -56,8 +146,35 @@ bool Streams::HasStreamDatas(const Stream &stream) const {
return stream_datas_map_.find(stream) != stream_datas_map_.end();
}
Streams::stream_datas_t &Streams::GetStreamDatas(const Stream &stream) {
return stream_datas_map_[stream];
void Streams::AllocStreamData(
const Stream &stream, const StreamRequest &request) {
AllocStreamData(stream, request, request.format);
}
void Streams::AllocStreamData(
const Stream &stream, const StreamRequest &request, const Format &format) {
static std::size_t stream_data_limits_max = 4;
stream_data_t data;
data.img = std::shared_ptr<ImgData>(new ImgData{0, 0, 0});
data.frame =
std::make_shared<frame_t>(request.width, request.height, format, nullptr);
stream_datas_map_[stream].push_back(data);
// If cached more then limits_max, drop the oldest one.
if (stream_datas_map_.at(stream).size() > stream_data_limits_max) {
auto &&datas = stream_datas_map_[stream];
datas.erase(datas.begin());
VLOG(2) << "Stream data of " << stream << " is dropped";
}
}
bool Streams::HasKeyStreamDatas() const {
for (auto &&s : key_streams_) {
if (!HasStreamDatas(s))
return false;
if (stream_datas_map_.at(s).empty())
return false;
}
return true;
}
MYNTEYE_END_NAMESPACE

View File

@ -2,7 +2,10 @@
#define MYNTEYE_INTERNAL_STREAMS_H_
#pragma once
#include <condition_variable>
#include <functional>
#include <map>
#include <mutex>
#include <vector>
#include "mynteye/mynteye.h"
@ -12,21 +15,19 @@
MYNTEYE_BEGIN_NAMESPACE
namespace streams {
class Stream {
public:
};
} // namesapce streams
class Streams {
public:
using frame_t = device::Frame;
using stream_data_t = device::StreamData;
using stream_datas_t = std::vector<stream_data_t>;
Streams();
using unpack_img_data_t = std::function<void(
const void *data, const StreamRequest &request, ImgData &img)>; // NOLINT
using unpack_img_pixels_t = std::function<void(
const void *data, const StreamRequest &request,
frame_t &frame)>; // NOLINT
explicit Streams(const std::vector<Stream> key_streams);
~Streams();
void ConfigStream(
@ -34,10 +35,12 @@ class Streams {
void PushStream(const Capabilities &capability, const void *data);
// void WaitForStreams() const;
void WaitForStreams();
// std::vector<StreamData> GetStreamData(const Stream &stream) const;
// StreamData GetLatestStreamData(const Stream &stream) const;
stream_datas_t GetStreamDatas(const Stream &stream);
stream_data_t GetLatestStreamData(const Stream &stream);
const stream_datas_t &stream_datas(const Stream &stream) const;
private:
bool IsStreamCapability(const Capabilities &capability) const;
@ -47,12 +50,25 @@ class Streams {
const Capabilities &capability) const;
bool HasStreamDatas(const Stream &stream) const;
stream_datas_t &GetStreamDatas(const Stream &stream);
void AllocStreamData(const Stream &stream, const StreamRequest &request);
void AllocStreamData(
const Stream &stream, const StreamRequest &request, const Format &format);
bool HasKeyStreamDatas() const;
std::vector<Stream> key_streams_;
std::vector<Capabilities> stream_capabilities_;
std::map<Capabilities, StreamRequest> stream_config_requests_;
std::map<Stream, unpack_img_data_t> unpack_img_data_map_;
std::map<Stream, unpack_img_pixels_t> unpack_img_pixels_map_;
std::map<Stream, stream_datas_t> stream_datas_map_;
std::mutex mtx_;
std::condition_variable cv_;
};
MYNTEYE_END_NAMESPACE

View File

@ -32,6 +32,7 @@ class Version {
using size_t = std::size_t;
using value_t = std::uint8_t;
Version() = default;
Version(value_t major, value_t minor) : major_(major), minor_(minor) {}
explicit Version(const std::string &name)
: major_(parse_part(name, 0)), minor_(parse_part(name, 1)) {}
@ -79,6 +80,7 @@ class HardwareVersion : public Version {
public:
using flag_t = std::bitset<8>;
HardwareVersion() = default;
HardwareVersion(value_t major, value_t minor, value_t flag = 0)
: Version(major, minor), flag_(flag) {}
explicit HardwareVersion(const std::string &name, value_t flag = 0)
@ -95,6 +97,7 @@ class Type {
using size_t = std::size_t;
using value_t = std::uint16_t;
Type() = default;
Type(value_t vendor, value_t product) : vendor_(vendor), product_(product) {}
explicit Type(const std::string &name)
: vendor_(parse_part(name, 0, 2)), product_(parse_part(name, 2, 2)) {}

View File

@ -121,6 +121,7 @@ const char *to_string(const Format &value) {
case Format::X: \
return "Format::" #X;
switch (value) {
CASE(GREY)
CASE(YUYV)
default:
return "Format::UNKNOWN";
@ -130,6 +131,8 @@ const char *to_string(const Format &value) {
std::size_t bytes_per_pixel(const Format &value) {
switch (value) {
case Format::GREY:
return 1;
case Format::YUYV:
return 2;
default:
@ -137,4 +140,9 @@ std::size_t bytes_per_pixel(const Format &value) {
}
}
std::ostream &operator<<(std::ostream &os, const StreamRequest &request) {
return os << "width: " << request.width << ", height: " << request.height
<< ", format: " << request.format << ", fps: " << request.fps;
}
MYNTEYE_END_NAMESPACE

View File

@ -236,7 +236,7 @@ struct device {
fmt.fmt.pix.height = height;
fmt.fmt.pix.pixelformat = format;
fmt.fmt.pix.field = V4L2_FIELD_NONE;
// fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
// fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (xioctl(fd, VIDIOC_S_FMT, &fmt) < 0)
LOG_ERROR(FATAL, "VIDIOC_S_FMT");

View File

@ -69,5 +69,6 @@ TEST(Source, VerifyToString) {
}
TEST(Format, VerifyToString) {
EXPECT_STREQ("Format::GREY", to_string(Format::GREY));
EXPECT_STREQ("Format::YUYV", to_string(Format::YUYV));
}