Add uvc streaming with v4l2

This commit is contained in:
John Zhao 2018-03-25 23:03:04 +08:00
parent bd8e03da5b
commit d7271ffe07
5 changed files with 487 additions and 7 deletions

View File

@ -31,6 +31,9 @@ message(STATUS "Found mynteye: ${mynteye_VERSION}")
LIST(APPEND CMAKE_MODULE_PATH ${PRO_DIR}/cmake)
find_package(OpenCV REQUIRED)
message(STATUS "Found OpenCV: ${OpenCV_VERSION}")
# targets
include(${PRO_DIR}/cmake/Common.cmake)
@ -49,7 +52,7 @@ include_directories(
## camera
add_executable(camera camera.cc)
target_link_libraries(camera mynteye)
target_link_libraries(camera mynteye ${OpenCV_LIBS})
if(OS_WIN)
target_compile_definitions(camera

View File

@ -1,5 +1,14 @@
#include <glog/logging.h>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <chrono>
#include <condition_variable>
#include <iomanip>
#include <iostream>
#include <mutex>
#include "mynteye/mynteye.h"
#include "uvc/uvc.h"
@ -13,7 +22,7 @@ struct glog_init {
FLAGS_max_log_size = 1024;
FLAGS_stop_logging_if_full_disk = true;
FLAGS_v = 2;
// FLAGS_v = 2;
google::InitGoogleLogging(argv[0]);
@ -26,19 +35,116 @@ struct glog_init {
}
};
struct frame {
const void *data = nullptr;
~frame() {
data = nullptr;
}
};
MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
glog_init _(argc, argv);
std::vector<std::shared_ptr<uvc::device>> mynteye_devices;
auto context = uvc::create_context();
auto devices = uvc::query_devices(context);
LOG_IF(FATAL, devices.size() <= 0) << "No devices :(";
for (auto &&device : devices) {
auto vid = uvc::get_vendor_id(*device);
auto pid = uvc::get_product_id(*device);
LOG(INFO) << "vid: " << vid << ", pid: " << pid;
// auto pid = uvc::get_product_id(*device);
// LOG(INFO) << "vid: " << vid << ", pid: " << pid;
if (vid == MYNTEYE_VID) {
mynteye_devices.push_back(device);
}
}
// std::string dashes(80, '-');
size_t n = mynteye_devices.size();
LOG_IF(FATAL, n <= 0) << "No MYNT EYE devices :(";
for (size_t i = 0; i < n; i++) {
auto device = mynteye_devices[i];
auto name = uvc::get_name(*device);
auto vid = uvc::get_vendor_id(*device);
auto pid = uvc::get_product_id(*device);
LOG(INFO) << i << " | name: " << name << ", vid: " << vid
<< ", pid: " << pid;
}
std::shared_ptr<uvc::device> device = nullptr;
if (n <= 1) {
device = mynteye_devices[0];
LOG(INFO) << "Only one MYNT EYE device, select index: 0";
} else {
while (true) {
size_t i;
LOG(INFO) << "There are " << n << " MYNT EYE devices, select index: ";
std::cin >> i;
if (i >= n) {
LOG(WARNING) << "Index out of range :(";
continue;
}
device = mynteye_devices[i];
break;
}
}
std::mutex mtx;
std::condition_variable cv;
std::vector<frame> frames;
const auto frame_ready = [&frames]() { return !frames.empty(); };
const auto frame_empty = [&frames]() { return frames.empty(); };
uvc::set_device_mode(
*device, 752, 480, 0, 25,
[&mtx, &cv, &frames, &frame_ready](const void *data) {
// reinterpret_cast<const std::uint8_t *>(data);
std::unique_lock<std::mutex> lock(mtx);
frame frame;
frame.data = data; // not copy
frames.push_back(frame);
if (frame_ready())
cv.notify_one();
});
cv::namedWindow("frame");
uvc::start_streaming(*device, 0);
double t, fps = 0;
while (true) {
t = static_cast<double>(cv::getTickCount());
std::unique_lock<std::mutex> lock(mtx);
if (frame_empty()) {
if (!cv.wait_for(lock, std::chrono::seconds(2), frame_ready))
throw std::runtime_error("Timeout waiting for frame.");
}
auto frame = frames.back(); // only last one is valid
cv::Mat img(480, 752, CV_8UC2, const_cast<void *>(frame.data));
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
cv::imshow("frame", img);
frames.clear();
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
break;
}
t = static_cast<double>(cv::getTickCount() - t);
fps = cv::getTickFrequency() / t;
}
UNUSED(fps)
uvc::stop_streaming(*device);
// cv::destroyAllWindows();
return 0;
}

26
src/types.h Normal file
View File

@ -0,0 +1,26 @@
#ifndef MYNTEYE_TYPES_H_ // NOLINT
#define MYNTEYE_TYPES_H_
#pragma once
#include "mynteye/mynteye.h"
MYNTEYE_BEGIN_NAMESPACE
template <class T>
class big_endian {
T be_value;
public:
operator T() const { // convert to T from big to little endian
T le_value = 0;
for (unsigned int i = 0; i < sizeof(T); ++i) {
reinterpret_cast<char *>(&le_value)[i] =
reinterpret_cast<const char *>(&be_value)[sizeof(T) - i - 1];
}
return le_value;
}
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_TYPES_H_ NOLINT

View File

@ -3,18 +3,68 @@
#include <dirent.h>
#include <errno.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <linux/usb/video.h>
#include <linux/uvcvideo.h>
#include <linux/videodev2.h>
#include <glog/logging.h>
#include <chrono>
#include <fstream>
#include <string>
#include <thread>
#include "types.h" // NOLINT
MYNTEYE_BEGIN_NAMESPACE
namespace uvc {
#define LOG_ERROR(severity, str) \
do { \
LOG(severity) << str << " error " << errno << ", " << strerror(errno); \
} while (0)
/*
struct throw_error {
throw_error() {}
explicit throw_error(const std::string &s) {
ss << s;
}
~throw_error() {
throw std::runtime_error(ss.str());
}
template<class T>
throw_error &operator<<(const T &val) {
ss << val;
return *this;
}
std::ostringstream ss;
};
*/
static int xioctl(int fh, int request, void *arg) {
int r;
do {
r = ioctl(fh, request, arg);
} while (r < 0 && errno == EINTR);
return r;
}
struct buffer {
void *start;
size_t length;
};
struct context {
context() {
VLOG(2) << __func__;
@ -34,6 +84,15 @@ struct device {
int vid, pid, mi; // Vendor ID, product ID, and multiple interface index
int fd = -1; // File descriptor for this device
int width, height, format, fps;
video_channel_callback callback = nullptr;
bool is_capturing = false;
std::vector<buffer> buffers;
std::thread thread;
volatile bool stop = false;
device(std::shared_ptr<context> parent, const std::string &name)
: parent(parent), dev_name("/dev/" + name) {
VLOG(2) << __func__ << ": " << dev_name;
@ -90,13 +149,241 @@ struct device {
LOG(FATAL) << "Cannot open '" << dev_name << "': " << errno << ", "
<< strerror(errno);
}
v4l2_capability cap = {};
if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
if (errno == EINVAL)
LOG(FATAL) << dev_name << " is no V4L2 device";
else
LOG_ERROR(FATAL, "VIDIOC_QUERYCAP");
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
LOG(FATAL) << dev_name + " is no video capture device";
if (!(cap.capabilities & V4L2_CAP_STREAMING))
LOG(FATAL) << dev_name + " does not support streaming I/O";
// Select video input, video standard and tune here.
v4l2_cropcap cropcap = {};
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(fd, VIDIOC_CROPCAP, &cropcap) == 0) {
v4l2_crop crop = {};
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect; // reset to default
if (xioctl(fd, VIDIOC_S_CROP, &crop) < 0) {
switch (errno) {
case EINVAL:
break; // Cropping not supported
default:
break; // Errors ignored
}
}
} else {
} // Errors ignored
}
~device() {
VLOG(2) << __func__;
if (fd != -1) {
LOG_IF(WARNING, close(fd) < 0) << "close error " << errno << ", "
<< strerror(errno);
stop_streaming();
if (fd != -1 && close(fd) < 0) {
LOG_ERROR(WARNING, "close");
}
}
void get_control(
const extension_unit &xu, uint8_t control, void *data,
size_t size) const {
uvc_xu_control_query q = {static_cast<uint8_t>(xu.unit), control,
UVC_GET_CUR, static_cast<uint16_t>(size),
reinterpret_cast<uint8_t *>(data)};
if (xioctl(fd, UVCIOC_CTRL_QUERY, &q) < 0) {
LOG_ERROR(FATAL, "UVCIOC_CTRL_QUERY:UVC_GET_CUR");
}
}
void set_control(
const extension_unit &xu, uint8_t control, void *data,
size_t size) const {
uvc_xu_control_query q = {static_cast<uint8_t>(xu.unit), control,
UVC_SET_CUR, static_cast<uint16_t>(size),
reinterpret_cast<uint8_t *>(data)};
if (xioctl(fd, UVCIOC_CTRL_QUERY, &q) < 0) {
LOG_ERROR(FATAL, "UVCIOC_CTRL_QUERY:UVC_SET_CUR");
}
}
void set_format(
int width, int height, int fourcc, int fps,
video_channel_callback callback) {
this->width = width;
this->height = height;
this->format = fourcc;
this->fps = fps;
this->callback = callback;
}
void start_capture() {
if (is_capturing) {
LOG(WARNING) << "start capture failed: is capturing already";
return;
}
v4l2_format fmt = {};
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = width;
fmt.fmt.pix.height = height;
// fmt.fmt.pix.pixelformat = format;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
fmt.fmt.pix.field = V4L2_FIELD_NONE;
// fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (xioctl(fd, VIDIOC_S_FMT, &fmt) < 0)
LOG_ERROR(FATAL, "VIDIOC_S_FMT");
v4l2_streamparm parm = {};
parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(fd, VIDIOC_G_PARM, &parm) < 0)
LOG_ERROR(FATAL, "VIDIOC_G_PARM");
parm.parm.capture.timeperframe.numerator = 1;
parm.parm.capture.timeperframe.denominator = fps;
if (xioctl(fd, VIDIOC_S_PARM, &parm) < 0)
LOG_ERROR(FATAL, "VIDIOC_S_PARM");
// Init memory mapped IO
v4l2_requestbuffers req = {};
req.count = 4;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (xioctl(fd, VIDIOC_REQBUFS, &req) < 0) {
if (errno == EINVAL)
LOG(FATAL) << dev_name << " does not support memory mapping";
else
LOG_ERROR(FATAL, "VIDIOC_REQBUFS");
}
if (req.count < 2) {
LOG(FATAL) << "Insufficient buffer memory on " << dev_name;
}
buffers.resize(req.count);
for (size_t i = 0; i < buffers.size(); ++i) {
v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (xioctl(fd, VIDIOC_QUERYBUF, &buf) < 0)
LOG_ERROR(FATAL, "VIDIOC_QUERYBUF");
buffers[i].length = buf.length;
buffers[i].start = mmap(
NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd,
buf.m.offset);
if (buffers[i].start == MAP_FAILED)
LOG_ERROR(FATAL, "mmap");
}
// Start capturing
for (size_t i = 0; i < buffers.size(); ++i) {
v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (xioctl(fd, VIDIOC_QBUF, &buf) < 0)
LOG_ERROR(FATAL, "VIDIOC_QBUF");
}
v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
for (int i = 0; i < 10; ++i) {
if (xioctl(fd, VIDIOC_STREAMON, &type) < 0) {
std::this_thread::sleep_for(std::chrono::milliseconds(100));
}
}
if (xioctl(fd, VIDIOC_STREAMON, &type) < 0)
LOG_ERROR(FATAL, "VIDIOC_STREAMON");
is_capturing = true;
}
void stop_capture() {
if (!is_capturing)
return;
// Stop streamining
v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(fd, VIDIOC_STREAMOFF, &type) < 0)
LOG_ERROR(WARNING, "VIDIOC_STREAMOFF");
for (size_t i = 0; i < buffers.size(); i++) {
if (munmap(buffers[i].start, buffers[i].length) < 0)
LOG_ERROR(WARNING, "munmap");
}
// Close memory mapped IO
struct v4l2_requestbuffers req = {};
req.count = 0;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (xioctl(fd, VIDIOC_REQBUFS, &req) < 0) {
if (errno == EINVAL)
LOG(ERROR) << dev_name << " does not support memory mapping";
else
LOG_ERROR(WARNING, "VIDIOC_REQBUFS");
}
callback = nullptr;
is_capturing = false;
}
void poll() {
fd_set fds;
FD_ZERO(&fds);
FD_SET(fd, &fds);
struct timeval tv = {0, 10000};
if (select(fd + 1, &fds, NULL, NULL, &tv) < 0) {
if (errno == EINTR)
return;
LOG_ERROR(FATAL, "select");
}
if (FD_ISSET(fd, &fds)) {
v4l2_buffer buf = {};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (xioctl(fd, VIDIOC_DQBUF, &buf) < 0) {
if (errno == EAGAIN)
return;
LOG_ERROR(FATAL, "VIDIOC_DQBUF");
}
if (callback) {
callback(buffers[buf.index].start);
}
if (xioctl(fd, VIDIOC_QBUF, &buf) < 0)
LOG_ERROR(FATAL, "VIDIOC_QBUF");
}
}
void start_streaming() {
if (!callback) {
LOG(WARNING) << __func__ << " failed: video_channel_callback is empty";
return;
}
start_capture();
thread = std::thread([this]() {
while (!stop)
poll();
});
}
void stop_streaming() {
if (thread.joinable()) {
stop = true;
thread.join();
stop = false;
stop_capture();
}
}
};
@ -139,6 +426,10 @@ std::vector<std::shared_ptr<device>> query_devices(
return devices;
}
std::string get_name(const device &device) {
return device.dev_name;
}
int get_vendor_id(const device &device) {
return device.vid;
}
@ -147,6 +438,36 @@ int get_product_id(const device &device) {
return device.pid;
}
void get_control(
const device &device, const extension_unit &xu, uint8_t ctrl, void *data,
int len) {
device.get_control(xu, ctrl, data, len);
}
void set_control(
const device &device, const extension_unit &xu, uint8_t ctrl, void *data,
int len) {
device.set_control(xu, ctrl, data, len);
}
void set_device_mode(
device &device, int width, int height, uint32_t fourcc, int fps, // NOLINT
video_channel_callback callback) {
device.set_format(
width, height, (const big_endian<int> &)fourcc, fps, callback);
}
void start_streaming(device &device, int /*num_transfer_bufs*/) { // NOLINT
device.start_streaming();
}
void stop_streaming(device &device) { // NOLINT
device.stop_streaming();
}
} // namespace uvc
MYNTEYE_END_NAMESPACE
// Video4Linux (V4L) driver-specific documentation
// https://linuxtv.org/downloads/v4l-dvb-apis/v4l-drivers/index.html

View File

@ -2,7 +2,9 @@
#define MYNTEYE_UVC_H_
#pragma once
#include <functional>
#include <memory>
#include <string>
#include <vector>
#include "mynteye/mynteye.h"
@ -14,6 +16,10 @@ MYNTEYE_BEGIN_NAMESPACE
namespace uvc {
struct extension_unit {
int unit;
};
struct context; // Opaque type representing access to the underlying UVC
// implementation
struct device; // Opaque type representing access to a specific UVC device
@ -24,9 +30,27 @@ std::vector<std::shared_ptr<device>> query_devices(
std::shared_ptr<context> context);
// Static device properties
std::string get_name(const device &device);
int get_vendor_id(const device &device);
int get_product_id(const device &device);
// Access XU controls
void get_control(
const device &device, const extension_unit &xu, uint8_t ctrl, void *data,
int len);
void set_control(
const device &device, const extension_unit &xu, uint8_t ctrl, void *data,
int len);
// Control streaming
typedef std::function<void(const void *frame)> video_channel_callback;
void set_device_mode(
device &device, int width, int height, uint32_t fourcc, int fps, // NOLINT
video_channel_callback callback);
void start_streaming(device &device, int num_transfer_bufs); // NOLINT
void stop_streaming(device &device); // NOLINT
} // namespace uvc
MYNTEYE_END_NAMESPACE