Rearrage include and src

This commit is contained in:
John Zhao
2018-10-27 21:24:04 +08:00
parent 08271be063
commit 972ab79a76
72 changed files with 280 additions and 260 deletions

452
src/mynteye/api/api.cc Normal file
View File

@@ -0,0 +1,452 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/api.h"
#ifdef WITH_BOOST_FILESYSTEM
#include <boost/filesystem.hpp>
#include <boost/range/iterator_range.hpp>
#endif
#include <algorithm>
#include <thread>
#include "mynteye/logger.h"
#include "mynteye/api/dl.h"
#include "mynteye/api/plugin.h"
#include "mynteye/api/synthetic.h"
#include "mynteye/device/device.h"
#include "mynteye/device/device_s.h"
#include "mynteye/device/utils.h"
#if defined(WITH_FILESYSTEM) && defined(WITH_NATIVE_FILESYSTEM)
#if defined(MYNTEYE_OS_WIN)
#include <windows.h>
#endif
#endif
MYNTEYE_BEGIN_NAMESPACE
namespace {
#if defined(WITH_FILESYSTEM)
#if defined(WITH_BOOST_FILESYSTEM)
namespace fs = boost::filesystem;
bool file_exists(const fs::path &p) {
try {
fs::file_status s = fs::status(p);
return fs::exists(s) && fs::is_regular_file(s);
} catch (fs::filesystem_error &e) {
LOG(ERROR) << e.what();
return false;
}
}
bool dir_exists(const fs::path &p) {
try {
fs::file_status s = fs::status(p);
return fs::exists(s) && fs::is_directory(s);
} catch (fs::filesystem_error &e) {
LOG(ERROR) << e.what();
return false;
}
}
#elif defined(WITH_NATIVE_FILESYSTEM)
#if defined(MYNTEYE_OS_WIN)
bool file_exists(const std::string &p) {
DWORD attrs = GetFileAttributes(p.c_str());
return (attrs != INVALID_FILE_ATTRIBUTES) &&
!(attrs & FILE_ATTRIBUTE_DIRECTORY);
}
bool dir_exists(const std::string &p) {
DWORD attrs = GetFileAttributes(p.c_str());
return (attrs != INVALID_FILE_ATTRIBUTES) &&
(attrs & FILE_ATTRIBUTE_DIRECTORY);
}
#else
#error "Unsupported native filesystem"
#endif
#endif
std::vector<std::string> get_plugin_paths() {
std::string info_path(MYNTEYE_SDK_INSTALL_DIR);
info_path.append(MYNTEYE_OS_SEP "share" MYNTEYE_OS_SEP "mynteye" MYNTEYE_OS_SEP "build.info");
cv::FileStorage fs(info_path, cv::FileStorage::READ);
if (!fs.isOpened()) {
LOG(WARNING) << "build.info not found: " << info_path;
return {};
}
auto to_lower = [](std::string &s) { // NOLINT
std::transform(s.begin(), s.end(), s.begin(), ::tolower);
};
std::string host_os = fs["HOST_OS"];
to_lower(host_os);
std::string host_name = fs["HOST_NAME"];
to_lower(host_name);
std::string host_arch = fs["HOST_ARCH"];
to_lower(host_arch);
std::string host_compiler = fs["HOST_COMPILER"];
to_lower(host_compiler);
// std::string compiler_version = fs["COMPILER_VERSION"];
int compiler_version_major = fs["COMPILER_VERSION_MAJOR"];
// int compiler_version_minor = fs["COMPILER_VERSION_MINOR"];
// int compiler_version_patch = fs["COMPILER_VERSION_PATCH"];
// int compiler_version_tweak = fs["COMPILER_VERSION_TWEAK"];
std::string cuda_version = fs["CUDA_VERSION"];
// int cuda_version_major = fs["CUDA_VERSION_MAJOR"];
// int cuda_version_minor = fs["CUDA_VERSION_MINOR"];
// std::string cuda_version_string = fs["CUDA_VERSION_STRING"];
std::string opencv_version = fs["OpenCV_VERSION"];
// int opencv_version_major = fs["OpenCV_VERSION_MAJOR"];
// int opencv_version_minor = fs["OpenCV_VERSION_MINOR"];
// int opencv_version_patch = fs["OpenCV_VERSION_PATCH"];
// int opencv_version_tweak = fs["OpenCV_VERSION_TWEAK"];
// std::string opencv_version_status = fs["OpenCV_VERSION_STATUS"];
std::string opencv_with_world = fs["OpenCV_WITH_WORLD"];
to_lower(opencv_with_world);
std::string mynteye_version = fs["MYNTEYE_VERSION"];
// int mynteye_version_major = fs["MYNTEYE_VERSION_MAJOR"];
// int mynteye_version_minor = fs["MYNTEYE_VERSION_MINOR"];
// int mynteye_version_patch = fs["MYNTEYE_VERSION_PATCH"];
// int mynteye_version_tweak = fs["MYNTEYE_VERSION_TWEAK"];
fs.release();
std::string lib_prefix;
std::string lib_suffix;
if (host_os == "linux") {
if (host_compiler != "gnu" || compiler_version_major < 5)
return {};
lib_prefix = "lib";
lib_suffix = ".so";
} else if (host_os == "win") {
lib_prefix = "";
lib_suffix = ".dll";
} else if (host_os == "mac") {
lib_prefix = "lib";
lib_suffix = ".dylib";
} else {
return {};
}
std::vector<std::string> names;
{
std::vector<std::string> prefixes{
// lib_prefix + "plugin_b_ocl" + ocl_version,
lib_prefix + "plugin_g_cuda" + cuda_version,
};
std::string opencv_name("_opencv" + opencv_version);
if (opencv_with_world == "true") {
opencv_name.append("-world");
}
for (auto &&prefix : prefixes) {
names.push_back(prefix + opencv_name + "_mynteye" + mynteye_version);
names.push_back(prefix + opencv_name);
names.push_back(prefix);
}
for (auto &&name : names) {
name.append(lib_suffix);
}
}
std::vector<std::string> paths;
std::vector<std::string> plats;
if (host_name != host_os) {
plats.push_back(host_name + "-" + host_arch);
}
plats.push_back(host_os + "-" + host_arch);
std::vector<std::string> dirs{MYNTEYE_SDK_ROOT_DIR, MYNTEYE_SDK_INSTALL_DIR};
for (auto &&plat : plats) {
for (auto &&dir : dirs) {
auto &&plat_dir = dir + MYNTEYE_OS_SEP "plugins" + MYNTEYE_OS_SEP + plat;
// VLOG(2) << "plat_dir: " << plat_dir;
if (!dir_exists(plat_dir))
continue;
for (auto &&name : names) {
// VLOG(2) << " name: " << name;
auto &&path = plat_dir + MYNTEYE_OS_SEP + name;
if (!file_exists(path))
continue;
paths.push_back(path);
}
}
}
return paths;
}
#endif
} // namespace
API::API(std::shared_ptr<Device> device) : device_(device) {
VLOG(2) << __func__;
if (std::dynamic_pointer_cast<StandardDevice>(device_) != nullptr) {
bool in_l_ok, in_r_ok, ex_r2l_ok;
device_->GetIntrinsics(Stream::LEFT, &in_l_ok);
device_->GetIntrinsics(Stream::RIGHT, &in_r_ok);
device_->GetExtrinsics(Stream::RIGHT, Stream::LEFT, &ex_r2l_ok);
if (!in_l_ok || !in_r_ok || !ex_r2l_ok) {
#if defined(WITH_DEVICE_INFO_REQUIRED)
LOG(FATAL)
#else
LOG(WARNING)
#endif
<< "Image params not found, but we need it to process the "
"images. Please `make tools` and use `img_params_writer` "
"to write the image params. If you update the SDK from "
"1.x, the `SN*.conf` is the file contains them. Besides, "
"you could also calibrate them by yourself. Read the guide "
"doc (https://github.com/slightech/MYNT-EYE-SDK-2-Guide) "
"to learn more.";
}
}
synthetic_.reset(new Synthetic(this));
}
API::~API() {
VLOG(2) << __func__;
}
std::shared_ptr<API> API::Create() {
return Create(device::select());
}
std::shared_ptr<API> API::Create(std::shared_ptr<Device> device) {
if (!device)
return nullptr;
return std::make_shared<API>(device);
}
std::shared_ptr<API> API::Create(int argc, char *argv[]) {
static glog_init _(argc, argv);
auto &&device = device::select();
if (!device)
return nullptr;
return std::make_shared<API>(device);
}
std::shared_ptr<API> API::Create(
int argc, char *argv[], std::shared_ptr<Device> device) {
static glog_init _(argc, argv);
if (!device)
return nullptr;
return std::make_shared<API>(device);
}
Model API::GetModel() const {
return device_->GetModel();
}
bool API::Supports(const Stream &stream) const {
return synthetic_->Supports(stream);
}
bool API::Supports(const Capabilities &capability) const {
return device_->Supports(capability);
}
bool API::Supports(const Option &option) const {
return device_->Supports(option);
}
bool API::Supports(const AddOns &addon) const {
return device_->Supports(addon);
}
const std::vector<StreamRequest> &API::GetStreamRequests(
const Capabilities &capability) const {
return device_->GetStreamRequests(capability);
}
void API::ConfigStreamRequest(
const Capabilities &capability, const StreamRequest &request) {
device_->ConfigStreamRequest(capability, request);
}
std::string API::GetInfo(const Info &info) const {
return device_->GetInfo(info);
}
Intrinsics API::GetIntrinsics(const Stream &stream) const {
return device_->GetIntrinsics(stream);
}
Extrinsics API::GetExtrinsics(const Stream &from, const Stream &to) const {
return device_->GetExtrinsics(from, to);
}
MotionIntrinsics API::GetMotionIntrinsics() const {
return device_->GetMotionIntrinsics();
}
Extrinsics API::GetMotionExtrinsics(const Stream &from) const {
return device_->GetMotionExtrinsics(from);
}
void API::LogOptionInfos() const {
device_->LogOptionInfos();
}
OptionInfo API::GetOptionInfo(const Option &option) const {
return device_->GetOptionInfo(option);
}
std::int32_t API::GetOptionValue(const Option &option) const {
return device_->GetOptionValue(option);
}
void API::SetOptionValue(const Option &option, std::int32_t value) {
device_->SetOptionValue(option, value);
}
bool API::RunOptionAction(const Option &option) const {
return device_->RunOptionAction(option);
}
void API::SetStreamCallback(const Stream &stream, stream_callback_t callback) {
synthetic_->SetStreamCallback(stream, callback);
}
void API::SetMotionCallback(motion_callback_t callback) {
static auto callback_ = callback;
if (callback_) {
device_->SetMotionCallback(
[](const device::MotionData &data) { callback_({data.imu}); }, true);
} else {
device_->SetMotionCallback(nullptr);
}
}
bool API::HasStreamCallback(const Stream &stream) const {
return synthetic_->HasStreamCallback(stream);
}
bool API::HasMotionCallback() const {
return device_->HasMotionCallback();
}
void API::Start(const Source &source) {
if (source == Source::VIDEO_STREAMING) {
#ifdef WITH_FILESYSTEM
if (!synthetic_->HasPlugin()) {
try {
auto &&plugin_paths = get_plugin_paths();
if (plugin_paths.size() > 0) {
EnablePlugin(plugin_paths[0]);
}
} catch (...) {
LOG(WARNING) << "Incorrect yaml format: build.info";
}
}
#endif
synthetic_->StartVideoStreaming();
} else if (source == Source::MOTION_TRACKING) {
device_->StartMotionTracking();
} else if (source == Source::ALL) {
Start(Source::VIDEO_STREAMING);
Start(Source::MOTION_TRACKING);
} else {
LOG(ERROR) << "Unsupported source :(";
}
}
void API::Stop(const Source &source) {
if (source == Source::VIDEO_STREAMING) {
synthetic_->StopVideoStreaming();
} else if (source == Source::MOTION_TRACKING) {
device_->StopMotionTracking();
} else if (source == Source::ALL) {
Stop(Source::MOTION_TRACKING);
// Must stop motion tracking before video streaming and sleep a moment here
std::this_thread::sleep_for(std::chrono::milliseconds(10));
Stop(Source::VIDEO_STREAMING);
} else {
LOG(ERROR) << "Unsupported source :(";
}
}
void API::WaitForStreams() {
synthetic_->WaitForStreams();
}
void API::EnableStreamData(const Stream &stream) {
synthetic_->EnableStreamData(stream);
}
void API::DisableStreamData(const Stream &stream) {
synthetic_->DisableStreamData(stream);
}
api::StreamData API::GetStreamData(const Stream &stream) {
return synthetic_->GetStreamData(stream);
}
std::vector<api::StreamData> API::GetStreamDatas(const Stream &stream) {
return synthetic_->GetStreamDatas(stream);
}
void API::EnableMotionDatas(std::size_t max_size) {
device_->EnableMotionDatas(max_size);
}
std::vector<api::MotionData> API::GetMotionDatas() {
std::vector<api::MotionData> datas;
for (auto &&data : device_->GetMotionDatas()) {
datas.push_back({data.imu});
}
return datas;
}
void API::EnablePlugin(const std::string &path) {
static DL dl;
CHECK(dl.Open(path.c_str())) << "Open plugin failed: " << path;
plugin_version_code_t *plugin_version_code =
dl.Sym<plugin_version_code_t>("plugin_version_code");
LOG(INFO) << "Enable plugin success";
LOG(INFO) << " version code: " << plugin_version_code();
LOG(INFO) << " path: " << path;
plugin_create_t *plugin_create = dl.Sym<plugin_create_t>("plugin_create");
plugin_destroy_t *plugin_destroy = dl.Sym<plugin_destroy_t>("plugin_destroy");
std::shared_ptr<Plugin> plugin(plugin_create(), plugin_destroy);
plugin->OnCreate(this);
synthetic_->SetPlugin(plugin);
}
std::shared_ptr<Device> API::device() {
return device_;
}
MYNTEYE_END_NAMESPACE

129
src/mynteye/api/dl.cc Normal file
View File

@@ -0,0 +1,129 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/dl.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
#if defined(MYNTEYE_OS_WIN) && !defined(MYNTEYE_OS_MINGW) && \
!defined(MYNTEYE_OS_CYGWIN)
namespace {
// How to get the error message from the error code returned by GetLastError()?
// https://stackoverflow.com/questions/9272415/how-to-convert-dword-to-char
std::string GetLastErrorAsString() {
DWORD dw = ::GetLastError();
if (dw == 0)
return std::string();
LPSTR lpMsgBuf;
size_t size = FormatMessageA(
FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM |
FORMAT_MESSAGE_IGNORE_INSERTS,
NULL, dw, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), (LPSTR)&lpMsgBuf, 0,
NULL);
std::string message(lpMsgBuf, size);
LocalFree(lpMsgBuf);
return message;
}
} // namespace
#endif
DL::DL() : handle(nullptr) {
VLOG(2) << __func__;
}
DL::DL(const char *filename) : handle(nullptr) {
VLOG(2) << __func__;
Open(filename);
}
DL::~DL() {
VLOG(2) << __func__;
Close();
}
bool DL::Open(const char *filename) {
if (handle != nullptr) {
VLOG(2) << "Already opened, do nothing";
// Close();
return false;
}
#if defined(MYNTEYE_OS_WIN) && !defined(MYNTEYE_OS_MINGW) && \
!defined(MYNTEYE_OS_CYGWIN)
handle = LoadLibraryEx(filename, nullptr, 0);
#else
handle = dlopen(filename, RTLD_LAZY);
#endif
if (handle == nullptr) {
VLOG(2) << "Open library failed: " << filename;
return false;
} else {
return true;
}
}
bool DL::IsOpened() {
return handle != nullptr;
}
void *DL::Sym(const char *symbol) {
if (handle == nullptr) {
VLOG(2) << "Not opened, do nothing";
return nullptr;
}
#if defined(MYNTEYE_OS_WIN) && !defined(MYNTEYE_OS_MINGW) && !defined(MYNTEYE_OS_CYGWIN)
void *f = GetProcAddress(handle, symbol);
if (f == nullptr) {
VLOG(2) << "Load symbol failed: " << symbol;
}
#else
dlerror(); // reset errors
void *f = dlsym(handle, symbol);
const char *error = dlerror();
if (error != nullptr) {
VLOG(2) << "Load symbol failed: " << symbol;
f = nullptr;
}
#endif
return f;
}
int DL::Close() {
int ret = 0;
if (handle == nullptr) {
VLOG(2) << "Not opened, do nothing";
} else {
#if defined(MYNTEYE_OS_WIN) && !defined(MYNTEYE_OS_MINGW) && !defined(MYNTEYE_OS_CYGWIN)
ret = FreeLibrary(handle) ? 0 : 1;
#else
ret = dlclose(handle);
#endif
handle = nullptr;
}
return ret;
}
const char *DL::Error() {
#if defined(MYNTEYE_OS_WIN) && !defined(MYNTEYE_OS_MINGW) && !defined(MYNTEYE_OS_CYGWIN)
return GetLastErrorAsString().c_str();
#else
return dlerror();
#endif
}
MYNTEYE_END_NAMESPACE

71
src/mynteye/api/dl.h Normal file
View File

@@ -0,0 +1,71 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_DL_H_
#define MYNTEYE_API_DL_H_
#pragma once
#include "mynteye/mynteye.h"
#if defined(MYNTEYE_OS_WIN) && !defined(MYNTEYE_OS_MINGW) && \
!defined(MYNTEYE_OS_CYGWIN)
#include <Windows.h>
#else
#include <dlfcn.h>
#endif
MYNTEYE_BEGIN_NAMESPACE
#if defined(MYNTEYE_OS_WIN) && !defined(MYNTEYE_OS_MINGW) && \
!defined(MYNTEYE_OS_CYGWIN)
using DLLIB = HMODULE;
#else
using DLLIB = void *;
#endif
// Dynamic loading
// https://en.wikipedia.org/wiki/Dynamic_loading
// C++ dlopen mini HOWTO
// http://tldp.org/HOWTO/C++-dlopen/
class MYNTEYE_API DL {
public:
DL();
explicit DL(const char *filename);
~DL();
bool Open(const char *filename);
bool IsOpened();
void *Sym(const char *symbol);
template <typename Func>
Func *Sym(const char *symbol);
int Close();
const char *Error();
private:
DLLIB handle;
};
template <typename Func>
Func *DL::Sym(const char *symbol) {
void *f = Sym(symbol);
return reinterpret_cast<Func *>(f);
}
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_DL_H_

View File

@@ -0,0 +1,247 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/processor.h"
#include <exception>
#include <utility>
#include "mynteye/logger.h"
#include "mynteye/util/strings.h"
#include "mynteye/util/times.h"
MYNTEYE_BEGIN_NAMESPACE
Processor::Processor(std::int32_t proc_period)
: proc_period_(std::move(proc_period)),
activated_(false),
input_ready_(false),
idle_(true),
dropped_count_(0),
input_(nullptr),
output_(nullptr),
output_result_(nullptr),
pre_callback_(nullptr),
post_callback_(nullptr),
callback_(nullptr),
parent_(nullptr) {
VLOG(2) << __func__;
}
Processor::~Processor() {
VLOG(2) << __func__;
Deactivate();
input_.reset(nullptr);
output_.reset(nullptr);
output_result_.reset(nullptr);
childs_.clear();
}
std::string Processor::Name() {
return "Processor";
}
void Processor::AddChild(const std::shared_ptr<Processor> &child) {
child->parent_ = this;
childs_.push_back(child);
}
void Processor::RemoveChild(const std::shared_ptr<Processor> &child) {
childs_.remove(child);
}
std::list<std::shared_ptr<Processor>> Processor::GetChilds() {
return childs_;
}
void Processor::SetPreProcessCallback(PreProcessCallback callback) {
pre_callback_ = std::move(callback);
}
void Processor::SetPostProcessCallback(PostProcessCallback callback) {
post_callback_ = std::move(callback);
}
void Processor::SetProcessCallback(ProcessCallback callback) {
callback_ = std::move(callback);
}
void Processor::Activate(bool parents) {
if (activated_)
return;
if (parents) {
// Activate all parents
Processor *parent = parent_;
while (parent != nullptr) {
parent->Activate();
parent = parent->parent_;
}
}
activated_ = true;
thread_ = std::thread(&Processor::Run, this);
// thread_.detach();
}
void Processor::Deactivate(bool childs) {
if (!activated_)
return;
if (childs) {
// Deactivate all childs
iterate_processors(GetChilds(), [](std::shared_ptr<Processor> proc) {
proc->Deactivate();
});
}
activated_ = false;
{
std::lock_guard<std::mutex> lk(mtx_input_ready_);
input_ready_ = true;
}
cond_input_ready_.notify_all();
thread_.join();
}
bool Processor::IsActivated() {
return activated_;
}
bool Processor::IsIdle() {
std::lock_guard<std::mutex> lk(mtx_state_);
return idle_;
}
bool Processor::Process(const Object &in) {
if (!activated_)
return false;
if (!idle_) {
std::lock_guard<std::mutex> lk(mtx_state_);
if (!idle_) {
++dropped_count_;
return false;
}
}
if (!in.DecValidity()) {
LOG(WARNING) << Name() << " process with invalid input";
return false;
}
{
std::lock_guard<std::mutex> lk(mtx_input_ready_);
input_.reset(in.Clone());
input_ready_ = true;
}
cond_input_ready_.notify_all();
return true;
}
std::shared_ptr<Object> Processor::GetOutput() {
std::lock_guard<std::mutex> lk(mtx_result_);
return std::shared_ptr<Object>(std::move(output_result_));
}
std::uint64_t Processor::GetDroppedCount() {
std::lock_guard<std::mutex> lk(mtx_state_);
return dropped_count_;
}
void Processor::Run() {
VLOG(2) << Name() << " thread start";
auto sleep = [this](const times::system_clock::time_point &time_beg) {
if (proc_period_ > 0) {
static times::system_clock::time_point time_prev = time_beg;
auto &&time_elapsed_ms =
times::count<times::milliseconds>(times::now() - time_prev);
time_prev = time_beg;
if (time_elapsed_ms < proc_period_) {
VLOG(2) << Name() << " process cost "
<< times::count<times::milliseconds>(times::now() - time_beg)
<< " ms, sleep " << (proc_period_ - time_elapsed_ms) << " ms";
std::this_thread::sleep_for(
std::chrono::milliseconds(proc_period_ - time_elapsed_ms));
return;
}
}
VLOG(2) << Name() << " process cost "
<< times::count<times::milliseconds>(times::now() - time_beg)
<< " ms";
};
while (true) {
std::unique_lock<std::mutex> lk(mtx_input_ready_);
cond_input_ready_.wait(lk, [this] { return input_ready_; });
auto &&time_beg = times::now();
if (!activated_) {
SetIdle(true);
input_ready_ = false;
break;
}
SetIdle(false);
if (!output_) {
output_.reset(OnCreateOutput());
}
if (pre_callback_) {
pre_callback_(input_.get());
}
bool ok = false;
try {
if (callback_) {
if (callback_(input_.get(), output_.get(), parent_)) {
ok = true;
} else {
ok = OnProcess(input_.get(), output_.get(), parent_);
}
} else {
ok = OnProcess(input_.get(), output_.get(), parent_);
}
// CV_Assert(false);
} catch (const std::exception &e) {
std::string msg(e.what());
strings::rtrim(msg);
LOG(ERROR) << Name() << " process error \"" << msg << "\"";
}
if (!ok) {
VLOG(2) << Name() << " process failed";
continue;
}
if (post_callback_) {
post_callback_(output_.get());
}
{
std::unique_lock<std::mutex> lk(mtx_result_);
output_result_.reset(output_->Clone());
}
if (!childs_.empty()) {
for (auto child : childs_) {
child->Process(*output_);
}
}
SetIdle(true);
input_ready_ = false;
sleep(time_beg);
}
VLOG(2) << Name() << " thread end";
}
void Processor::SetIdle(bool idle) {
std::lock_guard<std::mutex> lk(mtx_state_);
idle_ = idle;
}
MYNTEYE_END_NAMESPACE

121
src/mynteye/api/processor.h Normal file
View File

@@ -0,0 +1,121 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_PROCESSOR_H_
#define MYNTEYE_API_PROCESSOR_H_
#pragma once
#include <condition_variable>
#include <cstdint>
#include <functional>
#include <list>
#include <memory>
#include <mutex>
#include <string>
#include <thread>
#include "mynteye/mynteye.h"
#include "mynteye/api/object.h"
MYNTEYE_BEGIN_NAMESPACE
class Processor /*: public std::enable_shared_from_this<Processor>*/ {
public:
using PreProcessCallback = std::function<void(Object *const)>;
using PostProcessCallback = std::function<void(Object *const)>;
using ProcessCallback = std::function<bool(
Object *const in, Object *const out, Processor *const parent)>;
explicit Processor(std::int32_t proc_period = 0);
virtual ~Processor();
virtual std::string Name();
void AddChild(const std::shared_ptr<Processor> &child);
void RemoveChild(const std::shared_ptr<Processor> &child);
std::list<std::shared_ptr<Processor>> GetChilds();
void SetPreProcessCallback(PreProcessCallback callback);
void SetPostProcessCallback(PostProcessCallback callback);
void SetProcessCallback(ProcessCallback callback);
void Activate(bool parents = false);
void Deactivate(bool childs = false);
bool IsActivated();
bool IsIdle();
/** Returns dropped or not. */
bool Process(const Object &in);
/**
* Returns the last output.
* @note Returns null if not output now.
*/
std::shared_ptr<Object> GetOutput();
std::uint64_t GetDroppedCount();
protected:
virtual Object *OnCreateOutput() = 0;
virtual bool OnProcess(
Object *const in, Object *const out, Processor *const parent) = 0;
private:
/** Run in standalone thread. */
void Run();
void SetIdle(bool idle);
std::int32_t proc_period_;
bool activated_;
bool input_ready_;
std::mutex mtx_input_ready_;
std::condition_variable cond_input_ready_;
bool idle_;
std::uint64_t dropped_count_;
std::mutex mtx_state_;
std::unique_ptr<Object> input_;
std::unique_ptr<Object> output_;
std::unique_ptr<Object> output_result_;
std::mutex mtx_result_;
PreProcessCallback pre_callback_;
PostProcessCallback post_callback_;
ProcessCallback callback_;
Processor *parent_;
std::list<std::shared_ptr<Processor>> childs_;
std::thread thread_;
};
template <typename T>
void iterate_processors(
const T &processors, std::function<void(std::shared_ptr<Processor>)> fn) {
for (auto &&proc : processors) {
fn(proc);
iterate_processors(proc->GetChilds(), fn);
}
}
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_PROCESSOR_H_

View File

@@ -0,0 +1,53 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/processor/depth_processor.h"
#include <utility>
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
const char DepthProcessor::NAME[] = "DepthProcessor";
DepthProcessor::DepthProcessor(std::int32_t proc_period)
: Processor(std::move(proc_period)) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
}
DepthProcessor::~DepthProcessor() {
VLOG(2) << __func__;
}
std::string DepthProcessor::Name() {
return NAME;
}
Object *DepthProcessor::OnCreateOutput() {
return new ObjMat();
}
bool DepthProcessor::OnProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
const ObjMat *input = Object::Cast<ObjMat>(in);
ObjMat *output = Object::Cast<ObjMat>(out);
cv::Mat channels[3 /*input->value.channels()*/];
cv::split(input->value, channels);
channels[2].convertTo(output->value, CV_16UC1);
output->id = input->id;
return true;
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,41 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_PROCESSOR_DEPTH_PROCESSOR_H_
#define MYNTEYE_API_PROCESSOR_DEPTH_PROCESSOR_H_
#pragma once
#include <string>
#include "mynteye/api/processor.h"
MYNTEYE_BEGIN_NAMESPACE
class DepthProcessor : public Processor {
public:
static const char NAME[];
explicit DepthProcessor(std::int32_t proc_period = 0);
virtual ~DepthProcessor();
std::string Name() override;
protected:
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out, Processor *const parent) override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_PROCESSOR_DEPTH_PROCESSOR_H_

View File

@@ -0,0 +1,56 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/processor/disparity_normalized_processor.h"
#include <utility>
#include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
const char DisparityNormalizedProcessor::NAME[] =
"DisparityNormalizedProcessor";
DisparityNormalizedProcessor::DisparityNormalizedProcessor(
std::int32_t proc_period)
: Processor(std::move(proc_period)) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
}
DisparityNormalizedProcessor::~DisparityNormalizedProcessor() {
VLOG(2) << __func__;
}
std::string DisparityNormalizedProcessor::Name() {
return NAME;
}
Object *DisparityNormalizedProcessor::OnCreateOutput() {
return new ObjMat();
}
bool DisparityNormalizedProcessor::OnProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
const ObjMat *input = Object::Cast<ObjMat>(in);
ObjMat *output = Object::Cast<ObjMat>(out);
cv::normalize(input->value, output->value, 0, 255, cv::NORM_MINMAX, CV_8UC1);
// cv::normalize maybe return empty ==
output->id = input->id;
return !output->value.empty();
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,41 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_PROCESSOR_DISPARITY_NORMALIZED_PROCESSOR_H_
#define MYNTEYE_API_PROCESSOR_DISPARITY_NORMALIZED_PROCESSOR_H_
#pragma once
#include <string>
#include "mynteye/api/processor.h"
MYNTEYE_BEGIN_NAMESPACE
class DisparityNormalizedProcessor : public Processor {
public:
static const char NAME[];
explicit DisparityNormalizedProcessor(std::int32_t proc_period = 0);
virtual ~DisparityNormalizedProcessor();
std::string Name() override;
protected:
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out, Processor *const parent) override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_PROCESSOR_DISPARITY_NORMALIZED_PROCESSOR_H_

View File

@@ -0,0 +1,106 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/processor/disparity_processor.h"
#include <utility>
#include <opencv2/calib3d/calib3d.hpp>
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
const char DisparityProcessor::NAME[] = "DisparityProcessor";
DisparityProcessor::DisparityProcessor(std::int32_t proc_period)
: Processor(std::move(proc_period)) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
int sgbmWinSize = 3;
int numberOfDisparities = 64;
#ifdef USE_OPENCV2
// StereoSGBM
// http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html?#stereosgbm
sgbm_ = cv::Ptr<cv::StereoSGBM>(
new cv::StereoSGBM(
0, // minDisparity
numberOfDisparities, // numDisparities
sgbmWinSize, // SADWindowSize
8 * sgbmWinSize * sgbmWinSize, // P1
32 * sgbmWinSize * sgbmWinSize, // P2
1, // disp12MaxDiff
63, // preFilterCap
10, // uniquenessRatio
100, // speckleWindowSize
32, // speckleRange
false)); // fullDP
#else
sgbm_ = cv::StereoSGBM::create(0, 16, 3);
sgbm_->setPreFilterCap(63);
sgbm_->setBlockSize(sgbmWinSize);
sgbm_->setP1(8 * sgbmWinSize * sgbmWinSize);
sgbm_->setP2(32 * sgbmWinSize * sgbmWinSize);
sgbm_->setMinDisparity(0);
sgbm_->setNumDisparities(numberOfDisparities);
sgbm_->setUniquenessRatio(10);
sgbm_->setSpeckleWindowSize(100);
sgbm_->setSpeckleRange(32);
sgbm_->setDisp12MaxDiff(1);
#endif
}
DisparityProcessor::~DisparityProcessor() {
VLOG(2) << __func__;
}
std::string DisparityProcessor::Name() {
return NAME;
}
Object *DisparityProcessor::OnCreateOutput() {
return new ObjMat();
}
bool DisparityProcessor::OnProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
const ObjMat2 *input = Object::Cast<ObjMat2>(in);
ObjMat *output = Object::Cast<ObjMat>(out);
cv::Mat disparity;
#ifdef USE_OPENCV2
// StereoSGBM::operator()
// http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html#stereosgbm-operator
// Output disparity map. It is a 16-bit signed single-channel image of the
// same size as the input image.
// It contains disparity values scaled by 16. So, to get the floating-point
// disparity map,
// you need to divide each disp element by 16.
(*sgbm_)(input->first, input->second, disparity);
#else
// compute()
// http://docs.opencv.org/master/d2/d6e/classcv_1_1StereoMatcher.html
// Output disparity map. It has the same size as the input images.
// Some algorithms, like StereoBM or StereoSGBM compute 16-bit fixed-point
// disparity map
// (where each disparity value has 4 fractional bits),
// whereas other algorithms output 32-bit floating-point disparity map.
sgbm_->compute(input->first, input->second, disparity);
#endif
output->value = disparity / 16 + 1;
output->id = input->first_id;
return true;
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,50 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_PROCESSOR_DISPARITY_PROCESSOR_H_
#define MYNTEYE_API_PROCESSOR_DISPARITY_PROCESSOR_H_
#pragma once
#include <string>
#include "mynteye/api/processor.h"
namespace cv {
class StereoSGBM;
} // namespace cv
MYNTEYE_BEGIN_NAMESPACE
class DisparityProcessor : public Processor {
public:
static const char NAME[];
explicit DisparityProcessor(std::int32_t proc_period = 0);
virtual ~DisparityProcessor();
std::string Name() override;
protected:
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out, Processor *const parent) override;
private:
cv::Ptr<cv::StereoSGBM> sgbm_;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_PROCESSOR_DISPARITY_PROCESSOR_H_

View File

@@ -0,0 +1,53 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/processor/points_processor.h"
#include <utility>
#include <opencv2/calib3d/calib3d.hpp>
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
const char PointsProcessor::NAME[] = "PointsProcessor";
PointsProcessor::PointsProcessor(cv::Mat Q, std::int32_t proc_period)
: Processor(std::move(proc_period)), Q_(std::move(Q)) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
}
PointsProcessor::~PointsProcessor() {
VLOG(2) << __func__;
}
std::string PointsProcessor::Name() {
return NAME;
}
Object *PointsProcessor::OnCreateOutput() {
return new ObjMat();
}
bool PointsProcessor::OnProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
const ObjMat *input = Object::Cast<ObjMat>(in);
ObjMat *output = Object::Cast<ObjMat>(out);
cv::reprojectImageTo3D(input->value, output->value, Q_, true);
output->id = input->id;
return true;
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,46 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_PROCESSOR_POINTS_PROCESSOR_H_
#define MYNTEYE_API_PROCESSOR_POINTS_PROCESSOR_H_
#pragma once
#include <string>
#include <opencv2/core/core.hpp>
#include "mynteye/api/processor.h"
MYNTEYE_BEGIN_NAMESPACE
class PointsProcessor : public Processor {
public:
static const char NAME[];
explicit PointsProcessor(cv::Mat Q, std::int32_t proc_period = 0);
virtual ~PointsProcessor();
std::string Name() override;
protected:
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out, Processor *const parent) override;
private:
cv::Mat Q_;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_PROCESSOR_POINTS_PROCESSOR_H_

View File

@@ -0,0 +1,98 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/processor/rectify_processor.h"
#include <utility>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/logger.h"
#include "mynteye/device/device.h"
MYNTEYE_BEGIN_NAMESPACE
const char RectifyProcessor::NAME[] = "RectifyProcessor";
RectifyProcessor::RectifyProcessor(
std::shared_ptr<Device> device, std::int32_t proc_period)
: Processor(std::move(proc_period)) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
InitParams(
device->GetIntrinsics(Stream::LEFT), device->GetIntrinsics(Stream::RIGHT),
device->GetExtrinsics(Stream::RIGHT, Stream::LEFT));
}
RectifyProcessor::~RectifyProcessor() {
VLOG(2) << __func__;
}
std::string RectifyProcessor::Name() {
return NAME;
}
Object *RectifyProcessor::OnCreateOutput() {
return new ObjMat2();
}
bool RectifyProcessor::OnProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
const ObjMat2 *input = Object::Cast<ObjMat2>(in);
ObjMat2 *output = Object::Cast<ObjMat2>(out);
cv::remap(input->first, output->first, map11, map12, cv::INTER_LINEAR);
cv::remap(input->second, output->second, map21, map22, cv::INTER_LINEAR);
output->first_id = input->first_id;
output->second_id = input->second_id;
return true;
}
void RectifyProcessor::InitParams(
Intrinsics in_left, Intrinsics in_right, Extrinsics ex_right_to_left) {
cv::Size size{in_left.width, in_left.height};
cv::Mat M1 =
(cv::Mat_<double>(3, 3) << in_left.fx, 0, in_left.cx, 0, in_left.fy,
in_left.cy, 0, 0, 1);
cv::Mat M2 =
(cv::Mat_<double>(3, 3) << in_right.fx, 0, in_right.cx, 0, in_right.fy,
in_right.cy, 0, 0, 1);
cv::Mat D1(1, 5, CV_64F, in_left.coeffs);
cv::Mat D2(1, 5, CV_64F, in_right.coeffs);
cv::Mat R =
(cv::Mat_<double>(3, 3) << ex_right_to_left.rotation[0][0],
ex_right_to_left.rotation[0][1], ex_right_to_left.rotation[0][2],
ex_right_to_left.rotation[1][0], ex_right_to_left.rotation[1][1],
ex_right_to_left.rotation[1][2], ex_right_to_left.rotation[2][0],
ex_right_to_left.rotation[2][1], ex_right_to_left.rotation[2][2]);
cv::Mat T(3, 1, CV_64F, ex_right_to_left.translation);
VLOG(2) << "InitParams size: " << size;
VLOG(2) << "M1: " << M1;
VLOG(2) << "M2: " << M2;
VLOG(2) << "D1: " << D1;
VLOG(2) << "D2: " << D2;
VLOG(2) << "R: " << R;
VLOG(2) << "T: " << T;
cv::Rect left_roi, right_roi;
cv::stereoRectify(
M1, D1, M2, D2, size, R, T, R1, R2, P1, P2, Q, cv::CALIB_ZERO_DISPARITY,
0, size, &left_roi, &right_roi);
cv::initUndistortRectifyMap(M1, D1, R1, P1, size, CV_16SC2, map11, map12);
cv::initUndistortRectifyMap(M2, D2, R2, P2, size, CV_16SC2, map21, map22);
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,55 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_PROCESSOR_RECTIFY_PROCESSOR_H_
#define MYNTEYE_API_PROCESSOR_RECTIFY_PROCESSOR_H_
#pragma once
#include <memory>
#include <string>
#include <opencv2/core/core.hpp>
#include "mynteye/types.h"
#include "mynteye/api/processor.h"
MYNTEYE_BEGIN_NAMESPACE
class Device;
class RectifyProcessor : public Processor {
public:
static const char NAME[];
RectifyProcessor(
std::shared_ptr<Device> device, std::int32_t proc_period = 0);
virtual ~RectifyProcessor();
std::string Name() override;
cv::Mat R1, P1, R2, P2, Q;
cv::Mat map11, map12, map21, map22;
protected:
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out, Processor *const parent) override;
private:
void InitParams(
Intrinsics in_left, Intrinsics in_right, Extrinsics ex_right_to_left);
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_PROCESSOR_RECTIFY_PROCESSOR_H_

View File

@@ -0,0 +1,593 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/synthetic.h"
#include <algorithm>
#include <functional>
#include <stdexcept>
#include "mynteye/logger.h"
#include "mynteye/api/object.h"
#include "mynteye/api/plugin.h"
#include "mynteye/api/processor.h"
#include "mynteye/api/processor/depth_processor.h"
#include "mynteye/api/processor/disparity_normalized_processor.h"
#include "mynteye/api/processor/disparity_processor.h"
#include "mynteye/api/processor/points_processor.h"
#include "mynteye/api/processor/rectify_processor.h"
#include "mynteye/device/device.h"
#define RECTIFY_PROC_PERIOD 0
#define DISPARITY_PROC_PERIOD 0
#define DISPARITY_NORM_PROC_PERIOD 0
#define POINTS_PROC_PERIOD 0
#define DEPTH_PROC_PERIOD 0
MYNTEYE_BEGIN_NAMESPACE
namespace {
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
// TODO(JohnZhao) Support different format frame to cv::Mat
CHECK_EQ(frame->format(), Format::GREY);
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
}
api::StreamData data2api(const device::StreamData &data) {
return {data.img, frame2mat(data.frame), data.frame, data.frame_id};
}
void process_childs(
const std::shared_ptr<Processor> &proc, const std::string &name,
const Object &obj) {
auto &&processor = find_processor<Processor>(proc, name);
for (auto child : processor->GetChilds()) {
child->Process(obj);
}
}
} // namespace
Synthetic::Synthetic(API *api) : api_(api), plugin_(nullptr) {
VLOG(2) << __func__;
CHECK_NOTNULL(api_);
InitStreamSupports();
InitProcessors();
}
Synthetic::~Synthetic() {
VLOG(2) << __func__;
if (processor_) {
processor_->Deactivate(true);
processor_ = nullptr;
}
}
bool Synthetic::Supports(const Stream &stream) const {
return stream_supports_mode_.find(stream) != stream_supports_mode_.end();
}
Synthetic::mode_t Synthetic::SupportsMode(const Stream &stream) const {
try {
return stream_supports_mode_.at(stream);
} catch (const std::out_of_range &e) {
return MODE_LAST;
}
}
void Synthetic::EnableStreamData(const Stream &stream) {
EnableStreamData(stream, 0);
}
void Synthetic::DisableStreamData(const Stream &stream) {
DisableStreamData(stream, 0);
}
bool Synthetic::IsStreamDataEnabled(const Stream &stream) const {
return stream_enabled_mode_.find(stream) != stream_enabled_mode_.end();
}
void Synthetic::SetStreamCallback(
const Stream &stream, stream_callback_t callback) {
if (callback == nullptr) {
stream_callbacks_.erase(stream);
} else {
stream_callbacks_[stream] = callback;
}
}
bool Synthetic::HasStreamCallback(const Stream &stream) const {
return stream_callbacks_.find(stream) != stream_callbacks_.end();
}
void Synthetic::StartVideoStreaming() {
auto &&device = api_->device();
for (auto &&it = stream_supports_mode_.begin();
it != stream_supports_mode_.end(); it++) {
if (it->second == MODE_NATIVE) {
auto &&stream = it->first;
device->SetStreamCallback(
stream,
[this, stream](const device::StreamData &data) {
auto &&stream_data = data2api(data);
ProcessNativeStream(stream, stream_data);
// Need mutex if set callback after start
if (HasStreamCallback(stream)) {
stream_callbacks_.at(stream)(stream_data);
}
},
true);
}
}
device->Start(Source::VIDEO_STREAMING);
}
void Synthetic::StopVideoStreaming() {
auto &&device = api_->device();
for (auto &&it = stream_supports_mode_.begin();
it != stream_supports_mode_.end(); it++) {
if (it->second == MODE_NATIVE) {
device->SetStreamCallback(it->first, nullptr);
}
}
device->Stop(Source::VIDEO_STREAMING);
}
void Synthetic::WaitForStreams() {
api_->device()->WaitForStreams();
}
api::StreamData Synthetic::GetStreamData(const Stream &stream) {
auto &&mode = GetStreamEnabledMode(stream);
if (mode == MODE_NATIVE) {
auto &&device = api_->device();
return data2api(device->GetLatestStreamData(stream));
} else if (mode == MODE_SYNTHETIC) {
if (stream == Stream::LEFT_RECTIFIED || stream == Stream::RIGHT_RECTIFIED) {
static std::shared_ptr<ObjMat2> output = nullptr;
auto &&processor = find_processor<RectifyProcessor>(processor_);
auto &&out = processor->GetOutput();
if (out != nullptr) {
// Obtain the output, out will be nullptr if get again immediately.
output = Object::Cast<ObjMat2>(out);
}
if (output != nullptr) {
if (stream == Stream::LEFT_RECTIFIED) {
return {nullptr, output->first, nullptr, output->first_id};
} else {
return {nullptr, output->second, nullptr, output->second_id};
}
}
VLOG(2) << "Rectify not ready now";
return {};
}
switch (stream) {
case Stream::DISPARITY: {
auto &&processor = find_processor<DisparityProcessor>(processor_);
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {nullptr, output->value, nullptr, output->id};
}
VLOG(2) << "Disparity not ready now";
} break;
case Stream::DISPARITY_NORMALIZED: {
auto &&processor =
find_processor<DisparityNormalizedProcessor>(processor_);
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {nullptr, output->value, nullptr, output->id};
}
VLOG(2) << "Disparity normalized not ready now";
} break;
case Stream::POINTS: {
auto &&processor = find_processor<PointsProcessor>(processor_);
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {nullptr, output->value, nullptr, output->id};
}
VLOG(2) << "Points not ready now";
} break;
case Stream::DEPTH: {
auto &&processor = find_processor<DepthProcessor>(processor_);
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {nullptr, output->value, nullptr, output->id};
}
VLOG(2) << "Depth not ready now";
} break;
default:
break;
}
return {}; // frame.empty() == true
} else {
LOG(ERROR) << "Failed to get stream data of " << stream
<< ", unsupported or disabled";
return {}; // frame.empty() == true
}
}
std::vector<api::StreamData> Synthetic::GetStreamDatas(const Stream &stream) {
auto &&mode = GetStreamEnabledMode(stream);
if (mode == MODE_NATIVE) {
auto &&device = api_->device();
std::vector<api::StreamData> datas;
for (auto &&data : device->GetStreamDatas(stream)) {
datas.push_back(data2api(data));
}
return datas;
} else if (mode == MODE_SYNTHETIC) {
return {GetStreamData(stream)};
} else {
LOG(ERROR) << "Failed to get stream data of " << stream
<< ", unsupported or disabled";
}
return {};
}
void Synthetic::SetPlugin(std::shared_ptr<Plugin> plugin) {
plugin_ = plugin;
}
bool Synthetic::HasPlugin() const {
return plugin_ != nullptr;
}
void Synthetic::InitStreamSupports() {
auto &&device = api_->device();
if (device->Supports(Stream::LEFT) && device->Supports(Stream::RIGHT)) {
stream_supports_mode_[Stream::LEFT] = MODE_NATIVE;
stream_supports_mode_[Stream::RIGHT] = MODE_NATIVE;
std::vector<Stream> stream_chain{
Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED,
Stream::DISPARITY, Stream::DISPARITY_NORMALIZED,
Stream::POINTS, Stream::DEPTH};
for (auto &&stream : stream_chain) {
if (device->Supports(stream)) {
stream_supports_mode_[stream] = MODE_NATIVE;
} else {
stream_supports_mode_[stream] = MODE_SYNTHETIC;
}
}
}
// Enabled native streams by default
for (auto &&it = stream_supports_mode_.begin();
it != stream_supports_mode_.end(); it++) {
if (it->second == MODE_NATIVE) {
stream_enabled_mode_[it->first] = MODE_NATIVE;
}
}
}
Synthetic::mode_t Synthetic::GetStreamEnabledMode(const Stream &stream) const {
try {
return stream_enabled_mode_.at(stream);
} catch (const std::out_of_range &e) {
return MODE_LAST;
}
}
bool Synthetic::IsStreamEnabledNative(const Stream &stream) const {
return GetStreamEnabledMode(stream) == MODE_NATIVE;
}
bool Synthetic::IsStreamEnabledSynthetic(const Stream &stream) const {
return GetStreamEnabledMode(stream) == MODE_SYNTHETIC;
}
void Synthetic::EnableStreamData(const Stream &stream, std::uint32_t depth) {
if (IsStreamDataEnabled(stream))
return;
// Activate processors of synthetic stream
switch (stream) {
case Stream::LEFT_RECTIFIED: {
if (!IsStreamDataEnabled(Stream::LEFT))
break;
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
CHECK(ActivateProcessor<RectifyProcessor>());
}
return;
case Stream::RIGHT_RECTIFIED: {
if (!IsStreamDataEnabled(Stream::RIGHT))
break;
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
CHECK(ActivateProcessor<RectifyProcessor>());
}
return;
case Stream::DISPARITY: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::LEFT_RECTIFIED, depth + 1);
EnableStreamData(Stream::RIGHT_RECTIFIED, depth + 1);
CHECK(ActivateProcessor<DisparityProcessor>());
}
return;
case Stream::DISPARITY_NORMALIZED: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::DISPARITY, depth + 1);
CHECK(ActivateProcessor<DisparityNormalizedProcessor>());
}
return;
case Stream::POINTS: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::DISPARITY, depth + 1);
CHECK(ActivateProcessor<PointsProcessor>());
}
return;
case Stream::DEPTH: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::POINTS, depth + 1);
CHECK(ActivateProcessor<DepthProcessor>());
}
return;
default:
break;
}
if (depth == 0) {
LOG(WARNING) << "Enable stream data of " << stream << " failed";
}
}
void Synthetic::DisableStreamData(const Stream &stream, std::uint32_t depth) {
if (!IsStreamDataEnabled(stream))
return;
// Deactivate processors of synthetic stream
if (stream_enabled_mode_[stream] != MODE_NATIVE) {
stream_enabled_mode_.erase(stream);
switch (stream) {
case Stream::LEFT_RECTIFIED: {
if (IsStreamEnabledSynthetic(Stream::RIGHT_RECTIFIED)) {
DisableStreamData(Stream::RIGHT_RECTIFIED, depth + 1);
}
if (IsStreamEnabledSynthetic(Stream::DISPARITY)) {
DisableStreamData(Stream::DISPARITY, depth + 1);
}
DeactivateProcessor<RectifyProcessor>();
} break;
case Stream::RIGHT_RECTIFIED: {
if (IsStreamEnabledSynthetic(Stream::LEFT_RECTIFIED)) {
DisableStreamData(Stream::LEFT_RECTIFIED, depth + 1);
}
if (IsStreamEnabledSynthetic(Stream::DISPARITY)) {
DisableStreamData(Stream::DISPARITY, depth + 1);
}
DeactivateProcessor<RectifyProcessor>();
} break;
case Stream::DISPARITY: {
if (IsStreamEnabledSynthetic(Stream::DISPARITY_NORMALIZED)) {
DisableStreamData(Stream::DISPARITY_NORMALIZED, depth + 1);
}
if (IsStreamEnabledSynthetic(Stream::POINTS)) {
DisableStreamData(Stream::POINTS, depth + 1);
}
DeactivateProcessor<DisparityProcessor>();
} break;
case Stream::DISPARITY_NORMALIZED: {
DeactivateProcessor<DisparityNormalizedProcessor>();
} break;
case Stream::POINTS: {
if (IsStreamEnabledSynthetic(Stream::DEPTH)) {
DisableStreamData(Stream::DEPTH, depth + 1);
}
DeactivateProcessor<PointsProcessor>();
} break;
case Stream::DEPTH: {
DeactivateProcessor<DepthProcessor>();
} break;
default:
return;
}
if (depth > 0) {
LOG(WARNING) << "Disable synthetic stream data of " << stream << " too";
}
} else if (depth == 0) {
LOG(WARNING) << "Disable native stream data of " << stream << " failed";
}
}
void Synthetic::InitProcessors() {
auto &&rectify_processor =
std::make_shared<RectifyProcessor>(api_->device(), RECTIFY_PROC_PERIOD);
auto &&disparity_processor =
std::make_shared<DisparityProcessor>(DISPARITY_PROC_PERIOD);
auto &&disparitynormalized_processor =
std::make_shared<DisparityNormalizedProcessor>(
DISPARITY_NORM_PROC_PERIOD);
auto &&points_processor = std::make_shared<PointsProcessor>(
rectify_processor->Q, POINTS_PROC_PERIOD);
auto &&depth_processor = std::make_shared<DepthProcessor>(DEPTH_PROC_PERIOD);
using namespace std::placeholders; // NOLINT
rectify_processor->SetProcessCallback(
std::bind(&Synthetic::OnRectifyProcess, this, _1, _2, _3));
disparity_processor->SetProcessCallback(
std::bind(&Synthetic::OnDisparityProcess, this, _1, _2, _3));
disparitynormalized_processor->SetProcessCallback(
std::bind(&Synthetic::OnDisparityNormalizedProcess, this, _1, _2, _3));
points_processor->SetProcessCallback(
std::bind(&Synthetic::OnPointsProcess, this, _1, _2, _3));
depth_processor->SetProcessCallback(
std::bind(&Synthetic::OnDepthProcess, this, _1, _2, _3));
rectify_processor->SetPostProcessCallback(
std::bind(&Synthetic::OnRectifyPostProcess, this, _1));
disparity_processor->SetPostProcessCallback(
std::bind(&Synthetic::OnDisparityPostProcess, this, _1));
disparitynormalized_processor->SetPostProcessCallback(
std::bind(&Synthetic::OnDisparityNormalizedPostProcess, this, _1));
points_processor->SetPostProcessCallback(
std::bind(&Synthetic::OnPointsPostProcess, this, _1));
depth_processor->SetPostProcessCallback(
std::bind(&Synthetic::OnDepthPostProcess, this, _1));
rectify_processor->AddChild(disparity_processor);
disparity_processor->AddChild(disparitynormalized_processor);
disparity_processor->AddChild(points_processor);
points_processor->AddChild(depth_processor);
processor_ = rectify_processor;
}
void Synthetic::ProcessNativeStream(
const Stream &stream, const api::StreamData &data) {
if (stream == Stream::LEFT || stream == Stream::RIGHT) {
static api::StreamData left_data, right_data;
if (stream == Stream::LEFT) {
left_data = data;
} else if (stream == Stream::RIGHT) {
right_data = data;
}
if (left_data.img && right_data.img &&
left_data.img->frame_id == right_data.img->frame_id) {
auto &&processor = find_processor<RectifyProcessor>(processor_);
processor->Process(ObjMat2{left_data.frame, left_data.frame_id,
right_data.frame, right_data.frame_id});
}
return;
}
if (stream == Stream::LEFT_RECTIFIED || stream == Stream::RIGHT_RECTIFIED) {
static api::StreamData left_rect_data, right_rect_data;
if (stream == Stream::LEFT_RECTIFIED) {
left_rect_data = data;
} else if (stream == Stream::RIGHT_RECTIFIED) {
right_rect_data = data;
}
if (left_rect_data.img && right_rect_data.img &&
left_rect_data.img->frame_id == right_rect_data.img->frame_id) {
process_childs(
processor_, RectifyProcessor::NAME,
ObjMat2{left_rect_data.frame, left_rect_data.frame_id,
right_rect_data.frame, right_rect_data.frame_id});
}
return;
}
switch (stream) {
case Stream::DISPARITY: {
process_childs(processor_, DisparityProcessor::NAME,
ObjMat{data.frame, data.frame_id});
} break;
case Stream::DISPARITY_NORMALIZED: {
process_childs(processor_, DisparityNormalizedProcessor::NAME,
ObjMat{data.frame, data.frame_id});
} break;
case Stream::POINTS: {
process_childs(processor_, PointsProcessor::NAME,
ObjMat{data.frame, data.frame_id});
} break;
case Stream::DEPTH: {
process_childs(processor_, DepthProcessor::NAME,
ObjMat{data.frame, data.frame_id});
} break;
default:
break;
}
}
bool Synthetic::OnRectifyProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
if (plugin_ && plugin_->OnRectifyProcess(in, out)) {
return true;
}
return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_SYNTHETIC;
// && GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_SYNTHETIC
}
bool Synthetic::OnDisparityProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
if (plugin_ && plugin_->OnDisparityProcess(in, out)) {
return true;
}
return GetStreamEnabledMode(Stream::DISPARITY) != MODE_SYNTHETIC;
}
bool Synthetic::OnDisparityNormalizedProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
if (plugin_ && plugin_->OnDisparityNormalizedProcess(in, out)) {
return true;
}
return GetStreamEnabledMode(Stream::DISPARITY_NORMALIZED) != MODE_SYNTHETIC;
}
bool Synthetic::OnPointsProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
if (plugin_ && plugin_->OnPointsProcess(in, out)) {
return true;
}
return GetStreamEnabledMode(Stream::POINTS) != MODE_SYNTHETIC;
}
bool Synthetic::OnDepthProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
if (plugin_ && plugin_->OnDepthProcess(in, out)) {
return true;
}
return GetStreamEnabledMode(Stream::DEPTH) != MODE_SYNTHETIC;
}
void Synthetic::OnRectifyPostProcess(Object *const out) {
const ObjMat2 *output = Object::Cast<ObjMat2>(out);
if (HasStreamCallback(Stream::LEFT_RECTIFIED)) {
stream_callbacks_.at(Stream::LEFT_RECTIFIED)(
{nullptr, output->first, nullptr, output->first_id});
}
if (HasStreamCallback(Stream::RIGHT_RECTIFIED)) {
stream_callbacks_.at(Stream::RIGHT_RECTIFIED)(
{nullptr, output->second, nullptr, output->second_id});
}
}
void Synthetic::OnDisparityPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out);
if (HasStreamCallback(Stream::DISPARITY)) {
stream_callbacks_.at(Stream::DISPARITY)(
{nullptr, output->value, nullptr, output->id});
}
}
void Synthetic::OnDisparityNormalizedPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out);
if (HasStreamCallback(Stream::DISPARITY_NORMALIZED)) {
stream_callbacks_.at(Stream::DISPARITY_NORMALIZED)(
{nullptr, output->value, nullptr, output->id});
}
}
void Synthetic::OnPointsPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out);
if (HasStreamCallback(Stream::POINTS)) {
stream_callbacks_.at(Stream::POINTS)(
{nullptr, output->value, nullptr, output->id});
}
}
void Synthetic::OnDepthPostProcess(Object *const out) {
const ObjMat *output = Object::Cast<ObjMat>(out);
if (HasStreamCallback(Stream::DEPTH)) {
stream_callbacks_.at(Stream::DEPTH)(
{nullptr, output->value, nullptr, output->id});
}
}
MYNTEYE_END_NAMESPACE

172
src/mynteye/api/synthetic.h Normal file
View File

@@ -0,0 +1,172 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_SYNTHETIC_H_
#define MYNTEYE_API_SYNTHETIC_H_
#pragma once
#include <map>
#include <memory>
#include <string>
#include <vector>
#include "mynteye/api/api.h"
MYNTEYE_BEGIN_NAMESPACE
class API;
class Plugin;
class Processor;
struct Object;
class Synthetic {
public:
using stream_callback_t = API::stream_callback_t;
typedef enum Mode {
MODE_NATIVE, // Native stream
MODE_SYNTHETIC, // Synthetic stream
MODE_LAST // Unsupported
} mode_t;
explicit Synthetic(API *api);
~Synthetic();
bool Supports(const Stream &stream) const;
mode_t SupportsMode(const Stream &stream) const;
void EnableStreamData(const Stream &stream);
void DisableStreamData(const Stream &stream);
bool IsStreamDataEnabled(const Stream &stream) const;
void SetStreamCallback(const Stream &stream, stream_callback_t callback);
bool HasStreamCallback(const Stream &stream) const;
void StartVideoStreaming();
void StopVideoStreaming();
void WaitForStreams();
api::StreamData GetStreamData(const Stream &stream);
std::vector<api::StreamData> GetStreamDatas(const Stream &stream);
void SetPlugin(std::shared_ptr<Plugin> plugin);
bool HasPlugin() const;
private:
void InitStreamSupports();
mode_t GetStreamEnabledMode(const Stream &stream) const;
bool IsStreamEnabledNative(const Stream &stream) const;
bool IsStreamEnabledSynthetic(const Stream &stream) const;
void EnableStreamData(const Stream &stream, std::uint32_t depth);
void DisableStreamData(const Stream &stream, std::uint32_t depth);
void InitProcessors();
template <class T>
bool ActivateProcessor(bool tree = false);
template <class T>
bool DeactivateProcessor(bool tree = false);
void ProcessNativeStream(const Stream &stream, const api::StreamData &data);
bool OnRectifyProcess(
Object *const in, Object *const out, Processor *const parent);
bool OnDisparityProcess(
Object *const in, Object *const out, Processor *const parent);
bool OnDisparityNormalizedProcess(
Object *const in, Object *const out, Processor *const parent);
bool OnPointsProcess(
Object *const in, Object *const out, Processor *const parent);
bool OnDepthProcess(
Object *const in, Object *const out, Processor *const parent);
void OnRectifyPostProcess(Object *const out);
void OnDisparityPostProcess(Object *const out);
void OnDisparityNormalizedPostProcess(Object *const out);
void OnPointsPostProcess(Object *const out);
void OnDepthPostProcess(Object *const out);
API *api_;
std::map<Stream, mode_t> stream_supports_mode_;
std::map<Stream, mode_t> stream_enabled_mode_;
std::map<Stream, stream_callback_t> stream_callbacks_;
std::shared_ptr<Processor> processor_;
std::shared_ptr<Plugin> plugin_;
};
template <class T, class P>
std::shared_ptr<T> find_processor(const P &processor) {
return find_processor<T>(processor, T::NAME);
}
template <class T, class P>
std::shared_ptr<T> find_processor(const P &processor, const std::string &name) {
if (processor->Name() == name) {
return std::dynamic_pointer_cast<T>(processor);
}
auto &&childs = processor->GetChilds();
return find_processor<T>(std::begin(childs), std::end(childs), name);
}
template <class T, class InputIt>
std::shared_ptr<T> find_processor(
InputIt first, InputIt last, const std::string &name) {
if (first == last)
return nullptr;
for (auto it = first; it != last; ++it) {
if ((*it)->Name() == name) {
return std::dynamic_pointer_cast<T>(*it);
}
}
for (auto it = first; it != last; ++it) {
auto &&childs = (*it)->GetChilds();
if (childs.empty())
continue;
auto &&result =
find_processor<T>(std::begin(childs), std::end(childs), name);
if (result == nullptr)
continue;
return result;
}
return nullptr;
}
template <class T>
bool Synthetic::ActivateProcessor(bool parents) {
auto &&processor = find_processor<T>(processor_);
if (processor == nullptr)
return false;
processor->Activate(parents);
return true;
}
template <class T>
bool Synthetic::DeactivateProcessor(bool childs) {
auto &&processor = find_processor<T>(processor_);
if (processor == nullptr)
return false;
processor->Deactivate(childs);
return true;
}
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_SYNTHETIC_H_

View File

@@ -0,0 +1,62 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_ASYNC_CALLBACK_H_
#define MYNTEYE_DEVICE_ASYNC_CALLBACK_H_
#pragma once
#include <condition_variable>
#include <functional>
#include <mutex>
#include <string>
#include <thread>
#include <vector>
#include "mynteye/mynteye.h"
MYNTEYE_BEGIN_NAMESPACE
template <class Data>
class AsyncCallback {
public:
using callback_t = std::function<void(Data data)>;
AsyncCallback(
std::string name, callback_t callback, std::size_t max_data_size = 0);
~AsyncCallback();
void PushData(Data data);
private:
void Run();
std::string name_;
callback_t callback_;
std::mutex mtx_;
std::condition_variable cv_;
bool running_;
std::thread thread_;
std::uint32_t count_;
std::vector<Data> datas_;
std::size_t max_data_size_;
};
MYNTEYE_END_NAMESPACE
#include "mynteye/device/async_callback_impl.h"
#endif // MYNTEYE_DEVICE_ASYNC_CALLBACK_H_

View File

@@ -0,0 +1,92 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_ASYNC_CALLBACK_IMPL_H_
#define MYNTEYE_DEVICE_ASYNC_CALLBACK_IMPL_H_
#pragma once
#include <string>
#include <utility>
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
template <class Data>
AsyncCallback<Data>::AsyncCallback(
std::string name, callback_t callback, std::size_t max_data_size)
: name_(std::move(name)),
callback_(std::move(callback)),
count_(0),
max_data_size_(max_data_size) {
VLOG(2) << __func__;
running_ = true;
thread_ = std::thread(&AsyncCallback<Data>::Run, this);
}
template <class Data>
AsyncCallback<Data>::~AsyncCallback() {
VLOG(2) << __func__;
{
std::lock_guard<std::mutex> _(mtx_);
running_ = false;
++count_;
}
cv_.notify_one();
if (thread_.joinable()) {
thread_.join();
}
}
template <class Data>
void AsyncCallback<Data>::PushData(Data data) {
std::lock_guard<std::mutex> _(mtx_);
if (max_data_size_ <= 0) {
datas_.clear();
} else if (max_data_size_ == datas_.size()) { // >= 1
datas_.erase(datas_.begin());
}
datas_.push_back(data);
++count_;
cv_.notify_one();
}
template <class Data>
void AsyncCallback<Data>::Run() {
VLOG(2) << "AsyncCallback(" << name_ << ") thread start";
while (true) {
std::unique_lock<std::mutex> lock(mtx_);
cv_.wait(lock, [this] { return count_ > 0; });
if (!running_)
break;
if (callback_) {
for (auto &&data : datas_) {
callback_(data);
}
}
if (VLOG_IS_ON(2) && count_ > datas_.size()) {
VLOG(2) << "AsyncCallback(" << name_ << ") dropped "
<< (count_ - datas_.size());
}
count_ = 0;
datas_.clear();
}
VLOG(2) << "AsyncCallback(" << name_ << ") thread end";
}
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_ASYNC_CALLBACK_IMPL_H_

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,156 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_CHANNELS_H_
#define MYNTEYE_DEVICE_CHANNELS_H_
#pragma once
#include <map>
#include <memory>
#include <thread>
#include "mynteye/mynteye.h"
#include "mynteye/types.h"
#include "mynteye/device/types.h"
#include "mynteye/uvc/uvc.h"
MYNTEYE_BEGIN_NAMESPACE
namespace uvc {
struct device;
struct xu;
} // namespace uvc
class MYNTEYE_API Channels {
public:
typedef enum Channel {
CHANNEL_CAM_CTRL = 1,
CHANNEL_HALF_DUPLEX = 2,
CHANNEL_IMU_WRITE = 3,
CHANNEL_IMU_READ = 4,
CHANNEL_FILE = 5,
CHANNEL_LAST
} channel_t;
typedef struct ControlInfo {
std::int32_t min;
std::int32_t max;
std::int32_t def;
} control_info_t;
typedef enum XuCmd {
XU_CMD_ZDC = 0xE6, // zero drift calibration
XU_CMD_ERASE = 0xDE, // erase chip
XU_CMD_LAST
} xu_cmd_t;
typedef enum FileId {
FID_DEVICE_INFO = 1, // device info
FID_IMG_PARAMS = 2, // image intrinsics & extrinsics
FID_IMU_PARAMS = 4, // imu intrinsics & extrinsics
FID_LAST,
} file_id_t;
using imu_callback_t = std::function<void(const ImuPacket &packet)>;
using device_info_t = DeviceInfo;
typedef struct ImgParams {
bool ok;
Intrinsics in_left;
Intrinsics in_right;
Extrinsics ex_right_to_left;
} img_params_t;
typedef struct ImuParams {
bool ok;
ImuIntrinsics in_accel;
ImuIntrinsics in_gyro;
Extrinsics ex_left_to_imu;
} imu_params_t;
explicit Channels(std::shared_ptr<uvc::device> device);
~Channels();
void LogControlInfos() const;
void UpdateControlInfos();
control_info_t GetControlInfo(const Option &option) const;
std::int32_t GetControlValue(const Option &option) const;
void SetControlValue(const Option &option, std::int32_t value);
bool RunControlAction(const Option &option) const;
void SetImuCallback(imu_callback_t callback);
void DoImuTrack();
void StartImuTracking(imu_callback_t callback = nullptr);
void StopImuTracking();
bool GetFiles(
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params,
Version *spec_version = nullptr) const;
bool SetFiles(
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params,
Version *spec_version = nullptr);
private:
bool PuControlRange(
Option option, int32_t *min, int32_t *max, int32_t *def) const;
bool PuControlQuery(Option option, uvc::pu_query query, int32_t *value) const;
bool XuControlRange(
channel_t channel, uint8_t id, int32_t *min, int32_t *max,
int32_t *def) const;
bool XuControlRange(
const uvc::xu &xu, uint8_t selector, uint8_t id, int32_t *min,
int32_t *max, int32_t *def) const;
bool XuControlQuery(
channel_t channel, uvc::xu_query query, uint16_t size,
uint8_t *data) const;
bool XuControlQuery(
const uvc::xu &xu, uint8_t selector, uvc::xu_query query, uint16_t size,
uint8_t *data) const;
bool XuCamCtrlQuery(uvc::xu_query query, uint16_t size, uint8_t *data) const;
std::int32_t XuCamCtrlGet(Option option) const;
void XuCamCtrlSet(Option option, std::int32_t value) const;
bool XuHalfDuplexSet(Option option, xu_cmd_t cmd) const;
bool XuImuWrite(const ImuReqPacket &req) const;
bool XuImuRead(ImuResPacket *res) const;
bool XuFileQuery(uvc::xu_query query, uint16_t size, uint8_t *data) const;
control_info_t PuControlInfo(Option option) const;
control_info_t XuControlInfo(Option option) const;
std::shared_ptr<uvc::device> device_;
std::map<Option, control_info_t> control_infos_;
bool is_imu_tracking_;
std::thread imu_track_thread_;
volatile bool imu_track_stop_;
std::uint32_t imu_sn_;
imu_callback_t imu_callback_;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_CHANNELS_H_

View File

@@ -0,0 +1,36 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/config.h"
MYNTEYE_BEGIN_NAMESPACE
const std::map<Model, StreamSupports> stream_supports_map = {
{Model::STANDARD, {Stream::LEFT, Stream::RIGHT}}};
const std::map<Model, CapabilitiesSupports> capabilities_supports_map = {
{Model::STANDARD, {Capabilities::STEREO, Capabilities::IMU}}};
const std::map<Model, OptionSupports> option_supports_map = {
{Model::STANDARD,
{Option::GAIN, Option::BRIGHTNESS, Option::CONTRAST, Option::FRAME_RATE,
Option::IMU_FREQUENCY, Option::EXPOSURE_MODE, Option::MAX_GAIN,
Option::MAX_EXPOSURE_TIME, Option::DESIRED_BRIGHTNESS, Option::IR_CONTROL,
Option::HDR_MODE, Option::ZERO_DRIFT_CALIBRATION, Option::ERASE_CHIP}}};
const std::map<Model, std::map<Capabilities, StreamRequests>>
stream_requests_map = {
{Model::STANDARD,
{{Capabilities::STEREO, {{752, 480, Format::YUYV, 25}}}}}};
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,42 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_CONFIG_H_
#define MYNTEYE_DEVICE_CONFIG_H_
#pragma once
#include <map>
#include <set>
#include <vector>
#include "mynteye/mynteye.h"
#include "mynteye/types.h"
MYNTEYE_BEGIN_NAMESPACE
using StreamSupports = std::set<Stream>;
using CapabilitiesSupports = std::set<Capabilities>;
using OptionSupports = std::set<Option>;
extern const std::map<Model, StreamSupports> stream_supports_map;
extern const std::map<Model, CapabilitiesSupports> capabilities_supports_map;
extern const std::map<Model, OptionSupports> option_supports_map;
using StreamRequests = std::vector<StreamRequest>;
extern const std::map<Model, std::map<Capabilities, StreamRequests>>
stream_requests_map;
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_CONFIG_H_

View File

@@ -0,0 +1,48 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/context.h"
#include "mynteye/logger.h"
#include "mynteye/device/device.h"
#include "mynteye/uvc/uvc.h"
MYNTEYE_BEGIN_NAMESPACE
Context::Context() : context_(uvc::create_context()) {
VLOG(2) << __func__;
for (auto &&device : uvc::query_devices(context_)) {
auto name = uvc::get_name(*device);
auto vid = uvc::get_vendor_id(*device);
auto pid = uvc::get_product_id(*device);
// auto video_name = uvc::get_video_name(*device);
VLOG(2) << "UVC device detected, name: " << name << ", vid: 0x" << std::hex
<< vid << ", pid: 0x" << std::hex << pid;
if (vid == MYNTEYE_VID) {
auto d = Device::Create(name, device);
if (d) {
devices_.push_back(d);
} else {
LOG(ERROR) << "Device is not supported by MYNT EYE.";
}
}
}
}
Context::~Context() {
VLOG(2) << __func__;
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,609 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/device.h"
#include <algorithm>
#include <iterator>
#include <stdexcept>
#include <utility>
#include "mynteye/logger.h"
#include "mynteye/device/async_callback.h"
#include "mynteye/device/channels.h"
#include "mynteye/device/config.h"
#include "mynteye/device/device_s.h"
#include "mynteye/device/motions.h"
#include "mynteye/device/streams.h"
#include "mynteye/device/types.h"
#include "mynteye/util/strings.h"
#include "mynteye/util/times.h"
#include "mynteye/uvc/uvc.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
struct DeviceModel {
char type;
std::uint8_t generation;
std::uint8_t baseline_code;
std::uint8_t hardware_code;
std::uint8_t custom_code;
bool ir_fixed;
DeviceModel() = default;
explicit DeviceModel(std::string model) {
CHECK_GE(model.size(), 5);
type = model[0];
generation = model[1];
baseline_code = model[2];
hardware_code = model[3];
custom_code = model[4];
ir_fixed = (model.size() == 8) && model.substr(5) == "-IR";
}
};
bool CheckSupports(
const Device *const device, const Stream &stream, bool fatal = true) {
if (device->Supports(stream)) {
return true;
} else {
auto &&supports = stream_supports_map.at(device->GetModel());
std::ostringstream ss;
std::copy(
supports.begin(), supports.end(),
std::ostream_iterator<Stream>(ss, ", "));
if (fatal) {
LOG(FATAL) << "Unsupported stream: " << stream
<< ". Please use these: " << ss.str();
} else {
LOG(WARNING) << "Unsupported stream: " << stream
<< ". Please use these: " << ss.str();
}
return false;
}
}
} // namespace
Device::Device(const Model &model, std::shared_ptr<uvc::device> device)
: video_streaming_(false),
motion_tracking_(false),
model_(model),
device_(device),
streams_(nullptr),
channels_(std::make_shared<Channels>(device)),
motions_(std::make_shared<Motions>(channels_)) {
VLOG(2) << __func__;
ReadAllInfos();
}
Device::~Device() {
VLOG(2) << __func__;
}
std::shared_ptr<Device> Device::Create(
const std::string &name, std::shared_ptr<uvc::device> device) {
if (name == "MYNTEYE") {
return std::make_shared<StandardDevice>(device);
} else if (strings::starts_with(name, "MYNT-EYE-")) {
// TODO(JohnZhao): Create different device by name, such as MYNT-EYE-S1000
std::string model_s = name.substr(9);
VLOG(2) << "MYNE EYE Model: " << model_s;
DeviceModel model(model_s);
switch (model.type) {
case 'S':
return std::make_shared<StandardDevice>(device);
default:
LOG(FATAL) << "MYNT EYE model is not supported now";
}
}
return nullptr;
}
bool Device::Supports(const Stream &stream) const {
auto &&supports = stream_supports_map.at(model_);
return supports.find(stream) != supports.end();
}
bool Device::Supports(const Capabilities &capability) const {
auto &&supports = capabilities_supports_map.at(model_);
return supports.find(capability) != supports.end();
}
bool Device::Supports(const Option &option) const {
auto &&supports = option_supports_map.at(model_);
return supports.find(option) != supports.end();
}
bool Device::Supports(const AddOns &addon) const {
CHECK_NOTNULL(device_info_);
auto &&hw_flag = device_info_->hardware_version.flag();
switch (addon) {
case AddOns::INFRARED:
return hw_flag[0];
case AddOns::INFRARED2:
return hw_flag[1];
default:
LOG(WARNING) << "Unknown add-on";
return false;
}
}
const std::vector<StreamRequest> &Device::GetStreamRequests(
const Capabilities &capability) const {
if (!Supports(capability)) {
LOG(FATAL) << "Unsupported capability: " << capability;
}
try {
auto &&cap_requests = stream_requests_map.at(model_);
return cap_requests.at(capability);
} catch (const std::out_of_range &e) {
LOG(FATAL) << "Stream request of " << capability << " of " << model_
<< " not found";
}
}
void Device::ConfigStreamRequest(
const Capabilities &capability, const StreamRequest &request) {
auto &&requests = GetStreamRequests(capability);
if (std::find(requests.cbegin(), requests.cend(), request) ==
requests.cend()) {
LOG(WARNING) << "Config stream request of " << capability
<< " is not accpected";
return;
}
stream_config_requests_[capability] = request;
}
std::shared_ptr<DeviceInfo> Device::GetInfo() const {
return device_info_;
}
std::string Device::GetInfo(const Info &info) const {
CHECK_NOTNULL(device_info_);
switch (info) {
case Info::DEVICE_NAME:
return device_info_->name;
case Info::SERIAL_NUMBER:
return device_info_->serial_number;
case Info::FIRMWARE_VERSION:
return device_info_->firmware_version.to_string();
case Info::HARDWARE_VERSION:
return device_info_->hardware_version.to_string();
case Info::SPEC_VERSION:
return device_info_->spec_version.to_string();
case Info::LENS_TYPE:
return device_info_->lens_type.to_string();
case Info::IMU_TYPE:
return device_info_->imu_type.to_string();
case Info::NOMINAL_BASELINE:
return std::to_string(device_info_->nominal_baseline);
default:
LOG(WARNING) << "Unknown device info";
return "";
}
}
Intrinsics Device::GetIntrinsics(const Stream &stream) const {
bool ok;
return GetIntrinsics(stream, &ok);
}
Extrinsics Device::GetExtrinsics(const Stream &from, const Stream &to) const {
bool ok;
return GetExtrinsics(from, to, &ok);
}
MotionIntrinsics Device::GetMotionIntrinsics() const {
bool ok;
return GetMotionIntrinsics(&ok);
}
Extrinsics Device::GetMotionExtrinsics(const Stream &from) const {
bool ok;
return GetMotionExtrinsics(from, &ok);
}
Intrinsics Device::GetIntrinsics(const Stream &stream, bool *ok) const {
try {
*ok = true;
return stream_intrinsics_.at(stream);
} catch (const std::out_of_range &e) {
*ok = false;
LOG(WARNING) << "Intrinsics of " << stream << " not found";
return {};
}
}
Extrinsics Device::GetExtrinsics(
const Stream &from, const Stream &to, bool *ok) const {
try {
*ok = true;
return stream_from_extrinsics_.at(from).at(to);
} catch (const std::out_of_range &e) {
try {
*ok = true;
return stream_from_extrinsics_.at(to).at(from).Inverse();
} catch (const std::out_of_range &e) {
*ok = false;
LOG(WARNING) << "Extrinsics from " << from << " to " << to
<< " not found";
return {};
}
}
}
MotionIntrinsics Device::GetMotionIntrinsics(bool *ok) const {
if (motion_intrinsics_) {
*ok = true;
return *motion_intrinsics_;
} else {
*ok = false;
VLOG(2) << "Motion intrinsics not found";
return {};
}
}
Extrinsics Device::GetMotionExtrinsics(const Stream &from, bool *ok) const {
try {
*ok = true;
return motion_from_extrinsics_.at(from);
} catch (const std::out_of_range &e) {
*ok = false;
VLOG(2) << "Motion extrinsics from " << from << " not found";
return {};
}
}
void Device::SetIntrinsics(const Stream &stream, const Intrinsics &in) {
stream_intrinsics_[stream] = in;
}
void Device::SetExtrinsics(
const Stream &from, const Stream &to, const Extrinsics &ex) {
stream_from_extrinsics_[from][to] = ex;
}
void Device::SetMotionIntrinsics(const MotionIntrinsics &in) {
if (motion_intrinsics_ == nullptr) {
motion_intrinsics_ = std::make_shared<MotionIntrinsics>();
}
*motion_intrinsics_ = in;
}
void Device::SetMotionExtrinsics(const Stream &from, const Extrinsics &ex) {
motion_from_extrinsics_[from] = ex;
}
void Device::LogOptionInfos() const {
channels_->LogControlInfos();
}
OptionInfo Device::GetOptionInfo(const Option &option) const {
if (!Supports(option)) {
LOG(WARNING) << "Unsupported option: " << option;
return {0, 0, 0};
}
auto &&info = channels_->GetControlInfo(option);
return {info.min, info.max, info.def};
}
std::int32_t Device::GetOptionValue(const Option &option) const {
if (!Supports(option)) {
LOG(WARNING) << "Unsupported option: " << option;
return -1;
}
return channels_->GetControlValue(option);
}
void Device::SetOptionValue(const Option &option, std::int32_t value) {
if (!Supports(option)) {
LOG(WARNING) << "Unsupported option: " << option;
return;
}
channels_->SetControlValue(option, value);
}
bool Device::RunOptionAction(const Option &option) const {
if (!Supports(option)) {
LOG(WARNING) << "Unsupported option: " << option;
return false;
}
return channels_->RunControlAction(option);
}
void Device::SetStreamCallback(
const Stream &stream, stream_callback_t callback, bool async) {
if (!CheckSupports(this, stream, false)) {
return;
}
if (callback) {
stream_callbacks_[stream] = callback;
if (async)
stream_async_callbacks_[stream] =
std::make_shared<stream_async_callback_t>(
to_string(stream), callback); // max_data_size = 1
} else {
stream_callbacks_.erase(stream);
stream_async_callbacks_.erase(stream);
}
}
void Device::SetMotionCallback(motion_callback_t callback, bool async) {
motion_callback_ = callback;
if (callback) {
if (async)
motion_async_callback_ =
std::make_shared<motion_async_callback_t>("motion", callback, 1000);
// will drop old motion datas after callback cost > 2 s (1000 / 500 Hz)
} else {
motion_async_callback_ = nullptr;
}
}
bool Device::HasStreamCallback(const Stream &stream) const {
try {
return stream_callbacks_.at(stream) != nullptr;
} catch (const std::out_of_range &e) {
return false;
}
}
bool Device::HasMotionCallback() const {
return motion_callback_ != nullptr;
}
void Device::Start(const Source &source) {
if (source == Source::VIDEO_STREAMING) {
StartVideoStreaming();
} else if (source == Source::MOTION_TRACKING) {
StartMotionTracking();
} else if (source == Source::ALL) {
Start(Source::VIDEO_STREAMING);
Start(Source::MOTION_TRACKING);
} else {
LOG(ERROR) << "Unsupported source :(";
}
}
void Device::Stop(const Source &source) {
if (source == Source::VIDEO_STREAMING) {
StopVideoStreaming();
} else if (source == Source::MOTION_TRACKING) {
StopMotionTracking();
} else if (source == Source::ALL) {
Stop(Source::MOTION_TRACKING);
// Must stop motion tracking before video streaming and sleep a moment here
std::this_thread::sleep_for(std::chrono::milliseconds(10));
Stop(Source::VIDEO_STREAMING);
} else {
LOG(ERROR) << "Unsupported source :(";
}
}
void Device::WaitForStreams() {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
streams_->WaitForStreams();
}
std::vector<device::StreamData> Device::GetStreamDatas(const Stream &stream) {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
CheckSupports(this, stream);
std::lock_guard<std::mutex> _(mtx_streams_);
return streams_->GetStreamDatas(stream);
}
device::StreamData Device::GetLatestStreamData(const Stream &stream) {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
CheckSupports(this, stream);
std::lock_guard<std::mutex> _(mtx_streams_);
return streams_->GetLatestStreamData(stream);
}
void Device::EnableMotionDatas(std::size_t max_size) {
CHECK_NOTNULL(motions_);
motions_->EnableMotionDatas(max_size);
}
std::vector<device::MotionData> Device::GetMotionDatas() {
CHECK(motion_tracking_);
CHECK_NOTNULL(motions_);
return motions_->GetMotionDatas();
}
const StreamRequest &Device::GetStreamRequest(const Capabilities &capability) {
try {
return stream_config_requests_.at(capability);
} catch (const std::out_of_range &e) {
auto &&requests = GetStreamRequests(capability);
if (requests.size() >= 1) {
VLOG(2) << "Select the first one stream request of " << capability;
return requests[0];
} else {
LOG(FATAL) << "Please config the stream request of " << capability;
}
}
}
void Device::StartVideoStreaming() {
if (video_streaming_) {
LOG(WARNING) << "Cannot start video streaming without first stopping it";
return;
}
streams_ = std::make_shared<Streams>(GetKeyStreams());
// if stream capabilities are supported with subdevices of device_
/*
Capabilities stream_capabilities[] = {
Capabilities::STEREO,
Capabilities::COLOR,
Capabilities::DEPTH,
Capabilities::POINTS,
Capabilities::FISHEYE,
Capabilities::INFRARED,
Capabilities::INFRARED2
};
for (auto &&capability : stream_capabilities) {
}
*/
if (Supports(Capabilities::STEREO)) {
// do stream request selection if more than one request of each stream
auto &&stream_request = GetStreamRequest(Capabilities::STEREO);
streams_->ConfigStream(Capabilities::STEREO, stream_request);
uvc::set_device_mode(
*device_, stream_request.width, stream_request.height,
static_cast<int>(stream_request.format), stream_request.fps,
[this](const void *data, std::function<void()> continuation) {
// drop the first stereo stream data
static std::uint8_t drop_count = 1;
if (drop_count > 0) {
--drop_count;
continuation();
return;
}
// auto &&time_beg = times::now();
{
std::lock_guard<std::mutex> _(mtx_streams_);
if (streams_->PushStream(Capabilities::STEREO, data)) {
CallbackPushedStreamData(Stream::LEFT);
CallbackPushedStreamData(Stream::RIGHT);
}
}
continuation();
OnStereoStreamUpdate();
// VLOG(2) << "Stereo video callback cost "
// << times::count<times::milliseconds>(times::now() - time_beg)
// << " ms";
});
} else {
LOG(FATAL) << "Not any stream capabilities are supported by this device";
}
uvc::start_streaming(*device_, 0);
video_streaming_ = true;
}
void Device::StopVideoStreaming() {
if (!video_streaming_) {
LOG(WARNING) << "Cannot stop video streaming without first starting it";
return;
}
stop_streaming(*device_);
video_streaming_ = false;
}
void Device::StartMotionTracking() {
if (!Supports(Capabilities::IMU)) {
LOG(FATAL) << "IMU capability is not supported by this device";
}
if (motion_tracking_) {
LOG(WARNING) << "Cannot start motion tracking without first stopping it";
return;
}
motions_->SetMotionCallback(
std::bind(&Device::CallbackMotionData, this, std::placeholders::_1));
// motions_->StartMotionTracking();
motion_tracking_ = true;
}
void Device::StopMotionTracking() {
if (!motion_tracking_) {
LOG(WARNING) << "Cannot stop motion tracking without first starting it";
return;
}
// motions_->StopMotionTracking();
motion_tracking_ = false;
}
void Device::OnStereoStreamUpdate() {}
void Device::ReadAllInfos() {
device_info_ = std::make_shared<DeviceInfo>();
CHECK_NOTNULL(channels_);
Channels::img_params_t img_params;
Channels::imu_params_t imu_params;
if (!channels_->GetFiles(device_info_.get(), &img_params, &imu_params)) {
#if defined(WITH_DEVICE_INFO_REQUIRED)
LOG(FATAL)
#else
LOG(WARNING)
#endif
<< "Read device infos failed. Please upgrade your firmware to the "
"latest version.";
}
VLOG(2) << "Device info: {name: " << device_info_->name
<< ", serial_number: " << device_info_->serial_number
<< ", firmware_version: "
<< device_info_->firmware_version.to_string()
<< ", hardware_version: "
<< device_info_->hardware_version.to_string()
<< ", spec_version: " << device_info_->spec_version.to_string()
<< ", lens_type: " << device_info_->lens_type.to_string()
<< ", imu_type: " << device_info_->imu_type.to_string()
<< ", nominal_baseline: " << device_info_->nominal_baseline << "}";
device_info_->name = uvc::get_name(*device_);
if (img_params.ok) {
SetIntrinsics(Stream::LEFT, img_params.in_left);
SetIntrinsics(Stream::RIGHT, img_params.in_right);
SetExtrinsics(Stream::RIGHT, Stream::LEFT, img_params.ex_right_to_left);
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
VLOG(2) << "Extrinsics right to left: {"
<< GetExtrinsics(Stream::RIGHT, Stream::LEFT) << "}";
} else {
LOG(WARNING) << "Intrinsics & extrinsics not exist";
}
if (imu_params.ok) {
SetMotionIntrinsics({imu_params.in_accel, imu_params.in_gyro});
SetMotionExtrinsics(Stream::LEFT, imu_params.ex_left_to_imu);
VLOG(2) << "Motion intrinsics: {" << GetMotionIntrinsics() << "}";
VLOG(2) << "Motion extrinsics left to imu: {"
<< GetMotionExtrinsics(Stream::LEFT) << "}";
} else {
VLOG(2) << "Motion intrinsics & extrinsics not exist";
}
}
void Device::CallbackPushedStreamData(const Stream &stream) {
if (HasStreamCallback(stream)) {
auto &&datas = streams_->stream_datas(stream);
// if (datas.size() > 0) {}
auto &&data = datas.back();
if (stream_async_callbacks_.find(stream) != stream_async_callbacks_.end()) {
stream_async_callbacks_.at(stream)->PushData(data);
} else {
stream_callbacks_.at(stream)(data);
}
}
}
void Device::CallbackMotionData(const device::MotionData &data) {
if (HasMotionCallback()) {
if (motion_async_callback_) {
motion_async_callback_->PushData(data);
} else {
motion_callback_(data);
}
}
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,41 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/device_s.h"
#include "mynteye/logger.h"
#include "mynteye/device/motions.h"
MYNTEYE_BEGIN_NAMESPACE
StandardDevice::StandardDevice(std::shared_ptr<uvc::device> device)
: Device(Model::STANDARD, device) {
VLOG(2) << __func__;
}
StandardDevice::~StandardDevice() {
VLOG(2) << __func__;
}
std::vector<Stream> StandardDevice::GetKeyStreams() const {
return {Stream::LEFT, Stream::RIGHT};
}
void StandardDevice::OnStereoStreamUpdate() {
if (motion_tracking_) {
auto &&motions = this->motions();
motions->DoMotionTrack();
}
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,37 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_DEVICE_S_H_
#define MYNTEYE_DEVICE_DEVICE_S_H_
#pragma once
#include <memory>
#include <vector>
#include "mynteye/device/device.h"
MYNTEYE_BEGIN_NAMESPACE
class StandardDevice : public Device {
public:
explicit StandardDevice(std::shared_ptr<uvc::device> device);
virtual ~StandardDevice();
std::vector<Stream> GetKeyStreams() const override;
void OnStereoStreamUpdate() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_DEVICE_S_H_

View File

@@ -0,0 +1,110 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/motions.h"
#include "mynteye/logger.h"
#include "mynteye/device/channels.h"
MYNTEYE_BEGIN_NAMESPACE
Motions::Motions(std::shared_ptr<Channels> channels)
: channels_(channels),
motion_callback_(nullptr),
motion_datas_enabled_(false),
is_imu_tracking(false) {
CHECK_NOTNULL(channels_);
VLOG(2) << __func__;
}
Motions::~Motions() {
VLOG(2) << __func__;
}
void Motions::SetMotionCallback(motion_callback_t callback) {
motion_callback_ = callback;
if (motion_callback_) {
channels_->SetImuCallback([this](const ImuPacket &packet) {
if (!motion_callback_ && !motion_datas_enabled_) {
LOG(WARNING) << "";
return;
}
for (auto &&seg : packet.segments) {
auto &&imu = std::make_shared<ImuData>();
imu->frame_id = seg.frame_id;
// if (seg.offset < 0 &&
// static_cast<uint32_t>(-seg.offset) > packet.timestamp) {
// LOG(WARNING) << "Imu timestamp offset is incorrect";
// }
imu->timestamp = packet.timestamp + seg.offset;
imu->accel[0] = seg.accel[0] * 8.f / 0x10000;
imu->accel[1] = seg.accel[1] * 8.f / 0x10000;
imu->accel[2] = seg.accel[2] * 8.f / 0x10000;
imu->gyro[0] = seg.gyro[0] * 1000.f / 0x10000;
imu->gyro[1] = seg.gyro[1] * 1000.f / 0x10000;
imu->gyro[2] = seg.gyro[2] * 1000.f / 0x10000;
imu->temperature = seg.temperature / 326.8f + 25;
std::lock_guard<std::mutex> _(mtx_datas_);
motion_data_t data = {imu};
motion_datas_.push_back(data);
motion_callback_(data);
}
});
} else {
channels_->SetImuCallback(nullptr);
}
}
void Motions::DoMotionTrack() {
channels_->DoImuTrack();
}
void Motions::StartMotionTracking() {
if (!is_imu_tracking) {
channels_->StartImuTracking();
is_imu_tracking = true;
} else {
LOG(WARNING) << "Imu is tracking already";
}
}
void Motions::StopMotionTracking() {
if (is_imu_tracking) {
channels_->StopImuTracking();
is_imu_tracking = false;
}
}
void Motions::EnableMotionDatas(std::size_t max_size) {
if (max_size <= 0) {
LOG(WARNING) << "Could not enable motion datas with max_size <= 0";
return;
}
motion_datas_enabled_ = true;
motion_datas_max_size = max_size;
}
Motions::motion_datas_t Motions::GetMotionDatas() {
if (!motion_datas_enabled_) {
LOG(FATAL) << "Must enable motion datas before getting them, or you set "
"motion callback instead";
}
std::lock_guard<std::mutex> _(mtx_datas_);
motion_datas_t datas = motion_datas_;
motion_datas_.clear();
return datas;
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,64 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_MOTIONS_H_
#define MYNTEYE_DEVICE_MOTIONS_H_
#pragma once
#include <memory>
#include <mutex>
#include <vector>
#include "mynteye/mynteye.h"
#include "mynteye/device/callbacks.h"
MYNTEYE_BEGIN_NAMESPACE
class Channels;
class Motions {
public:
using motion_data_t = device::MotionData;
using motion_datas_t = std::vector<motion_data_t>;
using motion_callback_t = device::MotionCallback;
explicit Motions(std::shared_ptr<Channels> channels);
~Motions();
void SetMotionCallback(motion_callback_t callback);
void DoMotionTrack();
void StartMotionTracking();
void StopMotionTracking();
void EnableMotionDatas(std::size_t max_size);
motion_datas_t GetMotionDatas();
private:
std::shared_ptr<Channels> channels_;
motion_callback_t motion_callback_;
motion_datas_t motion_datas_;
bool motion_datas_enabled_;
std::size_t motion_datas_max_size;
bool is_imu_tracking;
std::mutex mtx_datas_;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_MOTIONS_H_

View File

@@ -0,0 +1,310 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/streams.h"
#include <algorithm>
#include <chrono>
#include <iomanip>
#include <stdexcept>
#include "mynteye/logger.h"
#include "mynteye/device/types.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
bool unpack_stereo_img_data(
const void *data, const StreamRequest &request, ImgData *img) {
CHECK_NOTNULL(img);
CHECK_EQ(request.format, Format::YUYV);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t data_n =
request.width * request.height * bytes_per_pixel(request.format);
auto data_end = data_new + data_n;
std::size_t packet_n = sizeof(ImagePacket);
std::vector<std::uint8_t> packet(packet_n);
std::reverse_copy(data_end - packet_n, data_end, packet.begin());
ImagePacket img_packet(packet.data());
// LOG(INFO) << "ImagePacket: header=0x" << std::hex <<
// static_cast<int>(img_packet.header)
// << ", size=0x" << std::hex << static_cast<int>(img_packet.size)
// << ", frame_id="<< std::dec << img_packet.frame_id
// << ", timestamp="<< std::dec << img_packet.timestamp
// << ", exposure_time="<< std::dec << img_packet.exposure_time
// << ", checksum=0x" << std::hex << static_cast<int>(img_packet.checksum);
if (img_packet.header != 0x3B) {
VLOG(2) << "Image packet header must be 0x3B, but 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.header) << " now";
return false;
}
std::uint8_t checksum = 0;
for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9]
checksum = (checksum ^ packet[i]);
}
if (img_packet.checksum != checksum) {
VLOG(2) << "Image packet checksum should be 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.checksum) << ", but 0x"
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum)
<< " now";
return false;
}
img->frame_id = img_packet.frame_id;
img->timestamp = img_packet.timestamp;
img->exposure_time = img_packet.exposure_time;
return true;
}
bool unpack_left_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame->format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame->width() * frame->height();
for (std::size_t i = 0; i < n; i++) {
frame->data()[i] = *(data_new + (i * 2));
}
return true;
}
bool unpack_right_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame->format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame->width() * frame->height();
for (std::size_t i = 0; i < n; i++) {
frame->data()[i] = *(data_new + (i * 2 + 1));
}
return true;
}
} // namespace
Streams::Streams(const std::vector<Stream> key_streams)
: key_streams_(key_streams),
stream_capabilities_(
{Capabilities::STEREO, Capabilities::COLOR, Capabilities::DEPTH,
Capabilities::POINTS, Capabilities::FISHEYE, Capabilities::INFRARED,
Capabilities::INFRARED2}),
unpack_img_data_map_(
{{Stream::LEFT, unpack_stereo_img_data},
{Stream::RIGHT, unpack_stereo_img_data}}),
unpack_img_pixels_map_(
{{Stream::LEFT, unpack_left_img_pixels},
{Stream::RIGHT, unpack_right_img_pixels}}) {
VLOG(2) << __func__;
}
Streams::~Streams() {
VLOG(2) << __func__;
}
void Streams::ConfigStream(
const Capabilities &capability, const StreamRequest &request) {
if (!IsStreamCapability(capability)) {
LOG(ERROR) << "Cannot config stream without stream capability";
return;
}
VLOG(2) << "Config stream request of " << capability << ", " << request;
stream_config_requests_[capability] = request;
}
bool Streams::PushStream(const Capabilities &capability, const void *data) {
if (!HasStreamConfigRequest(capability)) {
LOG(FATAL) << "Cannot push stream without stream config request";
}
std::unique_lock<std::mutex> lock(mtx_);
auto &&request = GetStreamConfigRequest(capability);
bool pushed = false;
switch (capability) {
case Capabilities::STEREO: {
// alloc left
AllocStreamData(Stream::LEFT, request, Format::GREY);
auto &&left_data = stream_datas_map_[Stream::LEFT].back();
// unpack img data
if (unpack_img_data_map_[Stream::LEFT](
data, request, left_data.img.get())) {
left_data.frame_id = left_data.img->frame_id;
// alloc right
AllocStreamData(Stream::RIGHT, request, Format::GREY);
auto &&right_data = stream_datas_map_[Stream::RIGHT].back();
*right_data.img = *left_data.img;
right_data.frame_id = left_data.img->frame_id;
// unpack frame
unpack_img_pixels_map_[Stream::LEFT](
data, request, left_data.frame.get());
unpack_img_pixels_map_[Stream::RIGHT](
data, request, right_data.frame.get());
pushed = true;
} else {
// discard left
DiscardStreamData(Stream::LEFT);
VLOG(2) << "Image packet is unaccepted, frame dropped";
pushed = false;
}
} break;
default:
LOG(FATAL) << "Not supported " << capability << " now";
}
if (HasKeyStreamDatas())
cv_.notify_one();
return pushed;
}
void Streams::WaitForStreams() {
std::unique_lock<std::mutex> lock(mtx_);
auto ready = std::bind(&Streams::HasKeyStreamDatas, this);
if (!ready() && !cv_.wait_for(lock, std::chrono::seconds(2), ready)) {
LOG(FATAL) << "Timeout waiting for key frames. Please use USB 3.0, and not "
"in virtual machine.";
}
}
void Streams::ConfigStreamLimits(
const Stream &stream, std::size_t max_data_size) {
CHECK_GT(max_data_size, 0);
stream_limits_map_[stream] = max_data_size;
}
std::size_t Streams::GetStreamDataMaxSize(const Stream &stream) const {
try {
return stream_limits_map_.at(stream);
} catch (const std::out_of_range &e) {
return 4; // default stream data max size
}
}
Streams::stream_datas_t Streams::GetStreamDatas(const Stream &stream) {
std::unique_lock<std::mutex> lock(mtx_);
if (!HasStreamDatas(stream)) {
LOG(WARNING) << "There are no stream datas of " << stream
<< ". Did you call WaitForStreams() before this?";
return {};
}
auto datas = stream_datas_map_.at(stream);
stream_datas_map_[stream].clear();
return datas;
}
Streams::stream_data_t Streams::GetLatestStreamData(const Stream &stream) {
std::unique_lock<std::mutex> lock(mtx_);
if (!HasStreamDatas(stream)) {
LOG(WARNING) << "There are no stream datas of " << stream
<< ". Did you call WaitForStreams() before this?";
return {};
}
auto data = stream_datas_map_.at(stream).back();
stream_datas_map_[stream].clear();
return data;
}
const Streams::stream_datas_t &Streams::stream_datas(const Stream &stream) {
std::unique_lock<std::mutex> lock(mtx_);
try {
return stream_datas_map_.at(stream);
} catch (const std::out_of_range &e) {
// Add empty vector of this stream key
stream_datas_map_[stream] = {};
return stream_datas_map_.at(stream);
}
}
bool Streams::IsStreamCapability(const Capabilities &capability) const {
return std::find(
stream_capabilities_.begin(), stream_capabilities_.end(),
capability) != stream_capabilities_.end();
}
bool Streams::HasStreamConfigRequest(const Capabilities &capability) const {
return stream_config_requests_.find(capability) !=
stream_config_requests_.end();
}
const StreamRequest &Streams::GetStreamConfigRequest(
const Capabilities &capability) const {
return stream_config_requests_.at(capability);
}
bool Streams::HasStreamDatas(const Stream &stream) const {
return stream_datas_map_.find(stream) != stream_datas_map_.end() &&
!stream_datas_map_.at(stream).empty();
}
void Streams::AllocStreamData(
const Stream &stream, const StreamRequest &request) {
AllocStreamData(stream, request, request.format);
}
void Streams::AllocStreamData(
const Stream &stream, const StreamRequest &request, const Format &format) {
stream_data_t data;
if (HasStreamDatas(stream)) {
// If cached equal to limits_max, drop the oldest one.
if (stream_datas_map_.at(stream).size() == GetStreamDataMaxSize(stream)) {
auto &&datas = stream_datas_map_[stream];
// reuse the dropped data
data.img = datas.front().img;
data.frame = datas.front().frame;
data.frame_id = 0;
datas.erase(datas.begin());
VLOG(2) << "Stream data of " << stream << " is dropped as out of limits";
}
}
if (stream == Stream::LEFT || stream == Stream::RIGHT) {
if (!data.img) {
data.img = std::make_shared<ImgData>();
}
} else {
data.img = nullptr;
}
if (!data.frame) {
data.frame = std::make_shared<frame_t>(
request.width, request.height, format, nullptr);
}
data.frame_id = 0;
stream_datas_map_[stream].push_back(data);
}
void Streams::DiscardStreamData(const Stream &stream) {
// Must discard after alloc, otherwise at will out of range when no this key.
if (stream_datas_map_.at(stream).size() > 0) {
auto &&datas = stream_datas_map_[stream];
datas.pop_back();
} else {
VLOG(2) << "Stream data of " << stream << " is empty, could not discard";
}
}
bool Streams::HasKeyStreamDatas() const {
for (auto &&s : key_streams_) {
if (!HasStreamDatas(s))
return false;
}
return true;
}
MYNTEYE_END_NAMESPACE

View File

@@ -0,0 +1,93 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STREAMS_H_
#define MYNTEYE_DEVICE_STREAMS_H_
#pragma once
#include <condition_variable>
#include <functional>
#include <map>
#include <mutex>
#include <vector>
#include "mynteye/mynteye.h"
#include "mynteye/types.h"
#include "mynteye/device/callbacks.h"
MYNTEYE_BEGIN_NAMESPACE
class Streams {
public:
using frame_t = device::Frame;
using stream_data_t = device::StreamData;
using stream_datas_t = std::vector<stream_data_t>;
using unpack_img_data_t = std::function<bool(
const void *data, const StreamRequest &request, ImgData *img)>;
using unpack_img_pixels_t = std::function<bool(
const void *data, const StreamRequest &request, frame_t *frame)>;
explicit Streams(const std::vector<Stream> key_streams);
~Streams();
void ConfigStream(
const Capabilities &capability, const StreamRequest &request);
bool PushStream(const Capabilities &capability, const void *data);
void WaitForStreams();
void ConfigStreamLimits(const Stream &stream, std::size_t max_data_size);
std::size_t GetStreamDataMaxSize(const Stream &stream) const;
stream_datas_t GetStreamDatas(const Stream &stream);
stream_data_t GetLatestStreamData(const Stream &stream);
const stream_datas_t &stream_datas(const Stream &stream);
private:
bool IsStreamCapability(const Capabilities &capability) const;
bool HasStreamConfigRequest(const Capabilities &capability) const;
const StreamRequest &GetStreamConfigRequest(
const Capabilities &capability) const;
bool HasStreamDatas(const Stream &stream) const;
void AllocStreamData(const Stream &stream, const StreamRequest &request);
void AllocStreamData(
const Stream &stream, const StreamRequest &request, const Format &format);
void DiscardStreamData(const Stream &stream);
bool HasKeyStreamDatas() const;
std::vector<Stream> key_streams_;
std::vector<Capabilities> stream_capabilities_;
std::map<Capabilities, StreamRequest> stream_config_requests_;
std::map<Stream, unpack_img_data_t> unpack_img_data_map_;
std::map<Stream, unpack_img_pixels_t> unpack_img_pixels_map_;
std::map<Stream, std::size_t> stream_limits_map_;
std::map<Stream, stream_datas_t> stream_datas_map_;
std::mutex mtx_;
std::condition_variable cv_;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STREAMS_H_

View File

@@ -0,0 +1,61 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/types.h"
#include <algorithm>
#include <iomanip>
#include <sstream>
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
std::string Version::to_string() const {
std::stringstream s;
s << static_cast<int>(major_) << "." << static_cast<int>(minor_);
return s.str();
}
std::vector<std::string> Version::split(const std::string &s) {
std::vector<std::string> result;
auto e = s.end();
auto i = s.begin();
while (i != e) {
i = std::find_if_not(i, e, [](char c) { return c == '.'; });
if (i == e)
break;
auto j = std::find(i, e, '.');
result.emplace_back(i, j);
i = j;
}
return result;
}
Version::value_t Version::parse_part(const std::string &name, size_t part) {
return std::stoi(split(name)[part]);
}
std::string Type::to_string() const {
std::stringstream s;
s << std::hex << std::uppercase << std::setfill('0') << std::setw(2)
<< vendor_ << std::setfill('0') << std::setw(2) << product_;
return s.str();
}
Type::value_t Type::parse_part(
const std::string &name, size_t pos, size_t count) {
return std::stoi(name.substr(pos, count), 0, 16);
}
MYNTEYE_END_NAMESPACE

296
src/mynteye/device/types.h Normal file
View File

@@ -0,0 +1,296 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_TYPES_H_
#define MYNTEYE_DEVICE_TYPES_H_
#pragma once
#include <cstdint>
#include <array>
#include <bitset>
#include <string>
#include <vector>
#include "mynteye/mynteye.h"
MYNTEYE_BEGIN_NAMESPACE
#define MYNTEYE_PROPERTY(TYPE, NAME) \
public: \
void set_##NAME(TYPE NAME) { \
NAME##_ = NAME; \
} \
TYPE NAME() const { \
return NAME##_; \
} \
\
private: \
TYPE NAME##_;
/**
* Version.
*/
class MYNTEYE_API Version {
public:
using size_t = std::size_t;
using value_t = std::uint8_t;
Version() = default;
Version(value_t major, value_t minor) : major_(major), minor_(minor) {}
explicit Version(const std::string &name)
: major_(parse_part(name, 0)), minor_(parse_part(name, 1)) {}
virtual ~Version() {}
bool operator==(const Version &other) const {
return major_ == other.major_ && minor_ == other.minor_;
}
bool operator<=(const Version &other) const {
if (major_ < other.major_)
return true;
if (major_ > other.major_)
return false;
return minor_ <= other.minor_;
}
bool operator!=(const Version &other) const {
return !(*this == other);
}
bool operator<(const Version &other) const {
return !(*this == other) && (*this <= other);
}
bool operator>(const Version &other) const {
return !(*this <= other);
}
bool operator>=(const Version &other) const {
return (*this == other) || (*this > other);
}
bool is_between(const Version &from, const Version &until) {
return (from <= *this) && (*this <= until);
}
std::string to_string() const;
static std::vector<std::string> split(const std::string &s);
static value_t parse_part(const std::string &name, size_t part);
MYNTEYE_PROPERTY(value_t, major)
MYNTEYE_PROPERTY(value_t, minor)
};
/**
* Hardware version.
*/
class MYNTEYE_API HardwareVersion : public Version {
public:
using flag_t = std::bitset<8>;
HardwareVersion() = default;
HardwareVersion(value_t major, value_t minor, value_t flag = 0)
: Version(major, minor), flag_(flag) {}
explicit HardwareVersion(const std::string &name, value_t flag = 0)
: Version(parse_part(name, 0), parse_part(name, 1)), flag_(flag) {}
MYNTEYE_PROPERTY(flag_t, flag)
};
/**
* Type.
*/
class MYNTEYE_API Type {
public:
using size_t = std::size_t;
using value_t = std::uint16_t;
Type() = default;
Type(value_t vendor, value_t product) : vendor_(vendor), product_(product) {}
explicit Type(const std::string &name)
: vendor_(parse_part(name, 0, 2)), product_(parse_part(name, 2, 2)) {}
virtual ~Type() {}
std::string to_string() const;
static value_t parse_part(const std::string &name, size_t pos, size_t count);
MYNTEYE_PROPERTY(value_t, vendor)
MYNTEYE_PROPERTY(value_t, product)
};
/**
* @ingroup datatypes
* Device infomation.
*/
struct MYNTEYE_API DeviceInfo {
std::string name;
std::string serial_number;
Version firmware_version;
HardwareVersion hardware_version;
Version spec_version;
Type lens_type;
Type imu_type;
std::uint16_t nominal_baseline;
};
/**
* @ingroup datatypes
* Image packet.
*/
#pragma pack(push, 1)
struct ImagePacket {
std::uint8_t header;
std::uint8_t size;
std::uint16_t frame_id;
std::uint32_t timestamp;
std::uint16_t exposure_time;
std::uint8_t checksum;
ImagePacket() = default;
explicit ImagePacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
header = *data;
size = *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
timestamp = (*(data + 4) << 24) | (*(data + 5) << 16) | (*(data + 6) << 8) |
*(data + 7);
exposure_time = (*(data + 8) << 8) | *(data + 9);
checksum = *(data + 10);
}
};
#pragma pack(pop)
/**
* @ingroup datatypes
* Imu request packet.
*/
#pragma pack(push, 1)
struct ImuReqPacket {
std::uint8_t header;
std::uint32_t serial_number;
ImuReqPacket() = default;
explicit ImuReqPacket(std::uint32_t serial_number)
: ImuReqPacket(0x5A, serial_number) {}
ImuReqPacket(std::uint8_t header, std::uint32_t serial_number)
: header(header), serial_number(serial_number) {}
std::array<std::uint8_t, 5> to_data() const {
return {{header, static_cast<std::uint8_t>((serial_number >> 24) & 0xFF),
static_cast<std::uint8_t>((serial_number >> 16) & 0xFF),
static_cast<std::uint8_t>((serial_number >> 8) & 0xFF),
static_cast<std::uint8_t>(serial_number & 0xFF)}};
}
};
#pragma pack(pop)
/**
* @ingroup datatypes
* Imu segment.
*/
#pragma pack(push, 1)
struct ImuSegment {
std::int16_t offset;
std::uint16_t frame_id;
std::int16_t accel[3];
std::int16_t temperature;
std::int16_t gyro[3];
ImuSegment() = default;
explicit ImuSegment(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
offset = (*(data) << 8) | *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
accel[0] = (*(data + 4) << 8) | *(data + 5);
accel[1] = (*(data + 6) << 8) | *(data + 7);
accel[2] = (*(data + 8) << 8) | *(data + 9);
temperature = (*(data + 10) << 8) | *(data + 11);
gyro[0] = (*(data + 12) << 8) | *(data + 13);
gyro[1] = (*(data + 14) << 8) | *(data + 15);
gyro[2] = (*(data + 16) << 8) | *(data + 17);
}
};
#pragma pack(pop)
/**
* @ingroup datatypes
* Imu packet.
*/
#pragma pack(push, 1)
struct ImuPacket {
std::uint32_t serial_number;
std::uint32_t timestamp;
std::uint8_t count;
std::vector<ImuSegment> segments;
ImuPacket() = default;
explicit ImuPacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
serial_number = (*(data) << 24) | (*(data + 1) << 16) | (*(data + 2) << 8) |
*(data + 3);
timestamp = (*(data + 4) << 24) | (*(data + 5) << 16) | (*(data + 6) << 8) |
*(data + 7);
count = *(data + 8);
std::size_t seg_n = sizeof(ImuSegment); // 18
for (std::size_t i = 0; i < count; i++) {
segments.push_back(ImuSegment(data + 9 + (seg_n * i)));
}
}
};
#pragma pack(pop)
/**
* @ingroup datatypes
* Imu response packet.
*/
#pragma pack(push, 1)
struct ImuResPacket {
std::uint8_t header;
std::uint8_t state;
std::uint16_t size;
std::vector<ImuPacket> packets;
std::uint8_t checksum;
ImuResPacket() = default;
explicit ImuResPacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
header = *data;
state = *(data + 1);
size = (*(data + 2) << 8) | *(data + 3);
std::size_t seg_n = sizeof(ImuSegment); // 18
for (std::size_t i = 4; i < size;) {
ImuPacket packet(data + i);
packets.push_back(packet);
i += 9 + (packet.count * seg_n);
}
checksum = *(data + 4 + size);
}
};
#pragma pack(pop)
#undef MYNTEYE_PROPERTY
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_TYPES_H_

115
src/mynteye/device/utils.cc Normal file
View File

@@ -0,0 +1,115 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/utils.h"
#include "mynteye/logger.h"
#include "mynteye/device/context.h"
#include "mynteye/device/device.h"
MYNTEYE_BEGIN_NAMESPACE
namespace device {
std::shared_ptr<Device> select() {
LOG(INFO) << "Detecting MYNT EYE devices";
Context context;
auto &&devices = context.devices();
size_t n = devices.size();
if (n <= 0) {
LOG(ERROR) << "No MYNT EYE devices :(";
return nullptr;
}
LOG(INFO) << "MYNT EYE devices:";
for (size_t i = 0; i < n; i++) {
auto &&device = devices[i];
LOG(INFO) << " index: " << i
<< ", name: " << device->GetInfo(Info::DEVICE_NAME)
<< ", sn: " << device->GetInfo(Info::SERIAL_NUMBER);
}
std::shared_ptr<Device> device = nullptr;
if (n <= 1) {
device = devices[0];
LOG(INFO) << "Only one MYNT EYE device, select index: 0";
} else {
while (true) {
size_t i;
LOG(INFO) << "There are " << n << " MYNT EYE devices, select index: ";
std::cin >> i;
if (i >= n) {
LOG(WARNING) << "Index out of range :(";
continue;
}
device = devices[i];
break;
}
}
return device;
}
} // namespace device
namespace utils {
float get_real_exposure_time(
std::int32_t frame_rate, std::uint16_t exposure_time) {
float real_max = 0;
switch (frame_rate) {
case 10:
real_max = 18;
break;
case 15:
real_max = 18;
break;
case 20:
real_max = 18;
break;
case 25:
real_max = 18;
break;
case 30:
real_max = 18;
break;
case 35:
real_max = 18;
break;
case 40:
real_max = 18;
break;
case 45:
real_max = 18;
break;
case 50:
real_max = 17;
break;
case 55:
real_max = 16.325;
break;
case 60:
real_max = 15;
break;
default:
LOG(ERROR) << "Invalid frame rate: " << frame_rate;
return exposure_time;
}
return exposure_time * real_max / 480.f;
}
} // namespace utils
MYNTEYE_END_NAMESPACE

41
src/mynteye/miniglog.cc Normal file
View File

@@ -0,0 +1,41 @@
// Ceres Solver - A fast non-linear least squares minimizer
// Copyright 2012 Google Inc. All rights reserved.
// http://code.google.com/p/ceres-solver/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of Google Inc. nor the names of its contributors may be
// used to endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
// Author: keir@google.com (Keir Mierle)
#include "mynteye/miniglog.h"
namespace google {
// This is the set of log sinks. This must be in a separate library to ensure
// that there is only one instance of this across the entire program.
std::set<google::LogSink *> log_sinks_global;
int log_severity_global(INFO);
} // namespace google

View File

@@ -0,0 +1,3 @@
miniglog:
* https://github.com/arpg/miniglog
* https://github.com/tzutalin/miniglog

246
src/mynteye/types.cc Normal file
View File

@@ -0,0 +1,246 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/types.h"
#include <iomanip>
#include <limits>
#include "mynteye/logger.h"
#define FULL_PRECISION \
std::fixed << std::setprecision(std::numeric_limits<double>::max_digits10)
MYNTEYE_BEGIN_NAMESPACE
const char *to_string(const Model &value) {
#define CASE(X) \
case Model::X: \
return "Model::" #X;
switch (value) {
CASE(STANDARD)
default:
CHECK(is_valid(value));
return "Model::UNKNOWN";
}
#undef CASE
}
const char *to_string(const Stream &value) {
#define CASE(X) \
case Stream::X: \
return "Stream::" #X;
switch (value) {
CASE(LEFT)
CASE(RIGHT)
CASE(LEFT_RECTIFIED)
CASE(RIGHT_RECTIFIED)
CASE(DISPARITY)
CASE(DISPARITY_NORMALIZED)
CASE(DEPTH)
CASE(POINTS)
default:
CHECK(is_valid(value));
return "Stream::UNKNOWN";
}
#undef CASE
}
const char *to_string(const Capabilities &value) {
#define CASE(X) \
case Capabilities::X: \
return "Capabilities::" #X;
switch (value) {
CASE(STEREO)
CASE(COLOR)
CASE(DEPTH)
CASE(POINTS)
CASE(FISHEYE)
CASE(INFRARED)
CASE(INFRARED2)
CASE(IMU)
default:
CHECK(is_valid(value));
return "Capabilities::UNKNOWN";
}
#undef CASE
}
const char *to_string(const Info &value) {
#define CASE(X) \
case Info::X: \
return "Info::" #X;
switch (value) {
CASE(DEVICE_NAME)
CASE(SERIAL_NUMBER)
CASE(FIRMWARE_VERSION)
CASE(HARDWARE_VERSION)
CASE(SPEC_VERSION)
CASE(LENS_TYPE)
CASE(IMU_TYPE)
CASE(NOMINAL_BASELINE)
default:
CHECK(is_valid(value));
return "Info::UNKNOWN";
}
#undef CASE
}
const char *to_string(const Option &value) {
#define CASE(X) \
case Option::X: \
return "Option::" #X;
switch (value) {
CASE(GAIN)
CASE(BRIGHTNESS)
CASE(CONTRAST)
CASE(FRAME_RATE)
CASE(IMU_FREQUENCY)
CASE(EXPOSURE_MODE)
CASE(MAX_GAIN)
CASE(MAX_EXPOSURE_TIME)
CASE(DESIRED_BRIGHTNESS)
CASE(IR_CONTROL)
CASE(HDR_MODE)
CASE(ZERO_DRIFT_CALIBRATION)
CASE(ERASE_CHIP)
default:
CHECK(is_valid(value));
return "Option::UNKNOWN";
}
#undef CASE
}
const char *to_string(const Source &value) {
#define CASE(X) \
case Source::X: \
return "Source::" #X;
switch (value) {
CASE(VIDEO_STREAMING)
CASE(MOTION_TRACKING)
CASE(ALL)
default:
return "Source::UNKNOWN";
}
#undef CASE
}
const char *to_string(const AddOns &value) {
#define CASE(X) \
case AddOns::X: \
return "AddOns::" #X;
switch (value) {
CASE(INFRARED)
CASE(INFRARED2)
default:
return "AddOns::UNKNOWN";
}
#undef CASE
}
const char *to_string(const Format &value) {
#define CASE(X) \
case Format::X: \
return "Format::" #X;
switch (value) {
CASE(GREY)
CASE(YUYV)
default:
return "Format::UNKNOWN";
}
#undef CASE
}
std::size_t bytes_per_pixel(const Format &value) {
switch (value) {
case Format::GREY:
return 1;
case Format::YUYV:
return 2;
default:
LOG(FATAL) << "Unknown format";
}
}
std::ostream &operator<<(std::ostream &os, const StreamRequest &request) {
return os << "width: " << request.width << ", height: " << request.height
<< ", format: " << request.format << ", fps: " << request.fps;
}
std::ostream &operator<<(std::ostream &os, const Intrinsics &in) {
os << FULL_PRECISION << "width: " << in.width << ", height: " << in.height
<< ", fx: " << in.fx << ", fy: " << in.fy << ", cx: " << in.cx
<< ", cy: " << in.cy << ", model: " << static_cast<int>(in.model)
<< ", coeffs: [";
for (int i = 0; i <= 3; i++)
os << in.coeffs[i] << ", ";
return os << in.coeffs[4] << "]";
}
std::ostream &operator<<(std::ostream &os, const ImuIntrinsics &in) {
os << FULL_PRECISION << "scale: [";
for (int i = 0; i <= 2; i++)
os << in.scale[0][i] << ", ";
for (int i = 0; i <= 2; i++)
os << in.scale[1][i] << ", ";
for (int i = 0; i <= 1; i++)
os << in.scale[2][i] << ", ";
os << in.scale[2][2] << "]";
os << ", drift: [";
for (int i = 0; i <= 1; i++)
os << in.drift[i] << ", ";
os << in.drift[2] << "]";
os << ", noise: [";
for (int i = 0; i <= 1; i++)
os << in.noise[i] << ", ";
os << in.noise[2] << "]";
os << ", bias: [";
for (int i = 0; i <= 1; i++)
os << in.bias[i] << ", ";
os << in.bias[2] << "]";
return os;
}
std::ostream &operator<<(std::ostream &os, const MotionIntrinsics &in) {
return os << FULL_PRECISION << "accel: {" << in.accel << "}, gyro: {"
<< in.gyro << "}";
}
std::ostream &operator<<(std::ostream &os, const Extrinsics &ex) {
os << FULL_PRECISION << "rotation: [";
for (int i = 0; i <= 2; i++)
os << ex.rotation[0][i] << ", ";
for (int i = 0; i <= 2; i++)
os << ex.rotation[1][i] << ", ";
for (int i = 0; i <= 1; i++)
os << ex.rotation[2][i] << ", ";
os << ex.rotation[2][2] << "]";
os << ", translation: [";
for (int i = 0; i <= 1; i++)
os << ex.translation[i] << ", ";
os << ex.translation[2] << "]";
return os;
}
std::ostream &operator<<(std::ostream &os, const OptionInfo &info) {
return os << "min: " << info.min << ", max: " << info.max
<< ", def: " << info.def;
}
MYNTEYE_END_NAMESPACE

72
src/mynteye/util/files.cc Normal file
View File

@@ -0,0 +1,72 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/util/files.h"
#include "mynteye/logger.h"
#if defined(MYNTEYE_OS_WIN) && !defined(MYNTEYE_OS_MINGW) && \
!defined(MYNTEYE_OS_CYGWIN)
#include <direct.h>
#else
#include <sys/stat.h>
#endif
#include "mynteye/util/strings.h"
MYNTEYE_BEGIN_NAMESPACE
namespace files {
bool _mkdir(const std::string &path) {
#if defined(MYNTEYE_OS_MINGW) || defined(MYNTEYE_OS_CYGWIN)
const int status = ::mkdir(path.c_str());
#elif defined(MYNTEYE_OS_WIN)
const int status = ::_mkdir(path.c_str());
#else
const int status =
::mkdir(path.c_str(), S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH);
#endif
if (status != 0 && errno != EEXIST) {
VLOG(2) << "Create directory failed (status " << status
<< "), path: " << path;
return false;
}
if (errno == EEXIST) {
VLOG(2) << "Create directory needless (already exist), path: " << path;
return true;
} else {
VLOG(2) << "Create directory success, path: " << path;
return true;
}
}
bool mkdir(const std::string &path) {
auto &&dirs = strings::split(path, MYNTEYE_OS_SEP);
auto &&size = dirs.size();
if (size <= 0)
return false;
std::string p{dirs[0]};
if (!_mkdir(p))
return false;
for (std::size_t i = 1; i < size; i++) {
p.append(MYNTEYE_OS_SEP).append(dirs[i]);
if (!_mkdir(p))
return false;
}
return true;
}
} // namespace files
MYNTEYE_END_NAMESPACE

112
src/mynteye/util/strings.cc Normal file
View File

@@ -0,0 +1,112 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/util/strings.h"
#include <algorithm>
#include <cctype>
#include <exception>
#include <locale>
MYNTEYE_BEGIN_NAMESPACE
namespace strings {
namespace {
// The most elegant way to iterate the words of a string
// https://stackoverflow.com/questions/236129/the-most-elegant-way-to-iterate-the-words-of-a-string
template <class ContainerT>
void tokenize(
const std::string &str, ContainerT &tokens, // NOLINT
const std::string &delimiters = " ", bool trimEmpty = false) {
std::string::size_type pos, lastPos = 0, length = str.length();
using value_type = typename ContainerT::value_type;
using size_type = typename ContainerT::size_type;
while (lastPos < length + 1) {
pos = str.find_first_of(delimiters, lastPos);
if (pos == std::string::npos) {
pos = length;
}
if (pos != lastPos || !trimEmpty) {
tokens.push_back(
value_type(str.data() + lastPos, (size_type)pos - lastPos));
}
lastPos = pos + 1;
}
}
} // namespace
int hex2int(const std::string &text) {
try {
return std::stoi(text, nullptr, 16);
} catch (const std::exception &e) {
throw new strings_error("strings conversion error");
}
}
bool starts_with(const std::string &text, const std::string &prefix) {
return text.compare(0, prefix.length(), prefix) == 0;
}
bool ends_with(const std::string &text, const std::string &suffix) {
if (suffix.length() > text.length())
return false;
return text.compare(
text.length() - suffix.length(), suffix.length(), suffix) == 0;
}
std::vector<std::string> split(
const std::string &text, const std::string &delimiters) {
std::vector<std::string> tokens;
tokenize(text, tokens, delimiters);
return tokens;
}
// What's the best way to trim std::string?
// https://stackoverflow.com/questions/216823/whats-the-best-way-to-trim-stdstring
void ltrim(std::string &s) { // NOLINT
s.erase(s.begin(), std::find_if(s.begin(), s.end(), [](int ch) {
return !std::isspace(ch);
}));
}
void rtrim(std::string &s) { // NOLINT
s.erase(
std::find_if(
s.rbegin(), s.rend(), [](int ch) { return !std::isspace(ch); })
.base(),
s.end());
}
void trim(std::string &s) { // NOLINT
ltrim(s);
rtrim(s);
}
std::string trim_copy(const std::string &text) {
std::string s = text;
trim(s);
return s;
}
} // namespace strings
MYNTEYE_END_NAMESPACE

10
src/mynteye/uvc/README.md Normal file
View File

@@ -0,0 +1,10 @@
## `uvc-v4l2.cc`
* [Linux Media Subsystem Documentation](https://www.kernel.org/doc/html/latest/media/index.html)
* [The Linux USB Video Class (UVC) driver](https://www.kernel.org/doc/html/latest/media/v4l-drivers/uvcvideo.html)
## `uvc-wmf.cc`
* [Media Foundation](https://msdn.microsoft.com/en-us/library/ms694197(VS.85).aspx)
* [USB Video Class Driver](https://docs.microsoft.com/en-us/windows-hardware/drivers/stream/usb-video-class-driver)

View File

@@ -0,0 +1,200 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/uvc/uvc.h"
#include <libuvc/libuvc.h>
#include "mynteye/logger.h"
// #define ENABLE_DEBUG_SPAM
MYNTEYE_BEGIN_NAMESPACE
namespace uvc {
static void check(const char *call, uvc_error_t status) {
LOG_IF(FATAL, status < 0)
<< call << "(...) returned " << uvc_strerror(status);
}
#define CALL_UVC(name, ...) check(#name, name(__VA_ARGS__))
struct context {
uvc_context_t *ctx;
context() : ctx(nullptr) {
VLOG(2) << __func__;
CALL_UVC(uvc_init, &ctx, nullptr);
}
~context() {
VLOG(2) << __func__;
if (ctx)
uvc_exit(ctx);
}
};
struct device {
const std::shared_ptr<context> parent;
uvc_device_t *uvcdevice = nullptr;
uvc_device_handle_t *handle = nullptr;
int vid, pid;
device(std::shared_ptr<context> parent, uvc_device_t *uvcdevice)
: parent(parent), uvcdevice(uvcdevice) {
VLOG(2) << __func__;
open();
uvc_device_descriptor_t *desc;
CALL_UVC(uvc_get_device_descriptor, uvcdevice, &desc);
vid = desc->idVendor;
pid = desc->idProduct;
uvc_free_device_descriptor(desc);
}
~device() {
VLOG(2) << __func__;
if (handle)
uvc_close(handle);
if (uvcdevice)
uvc_unref_device(uvcdevice);
}
void open() {
if (!handle)
CALL_UVC(uvc_open, uvcdevice, &handle);
}
};
std::shared_ptr<context> create_context() {
return std::make_shared<context>();
}
std::vector<std::shared_ptr<device>> query_devices(
std::shared_ptr<context> context) {
std::vector<std::shared_ptr<device>> devices;
uvc_device_t **list;
CALL_UVC(uvc_get_device_list, context->ctx, &list);
for (auto it = list; *it; ++it) {
try {
auto dev = std::make_shared<device>(context, *it);
devices.push_back(dev);
} catch (std::runtime_error &e) {
LOG(WARNING) << "usb:" << static_cast<int>(uvc_get_bus_number(*it)) << ':'
<< static_cast<int>(uvc_get_device_address(*it)) << ": "
<< e.what();
}
}
uvc_free_device_list(list, 1);
return devices;
}
int get_vendor_id(const device &device) {
return device.vid;
}
int get_product_id(const device &device) {
return device.pid;
}
std::string get_name(const device &device) {
// TODO(JohnZhao)
MYNTEYE_UNUSED(device)
return "";
}
std::string get_video_name(const device &device) {
// TODO(JohnZhao)
MYNTEYE_UNUSED(device)
return "";
}
bool pu_control_range(
const device &device, Option option, int32_t *min, int32_t *max,
int32_t *def) {
// TODO(JohnZhao)
MYNTEYE_UNUSED(device)
MYNTEYE_UNUSED(option)
MYNTEYE_UNUSED(min)
MYNTEYE_UNUSED(max)
MYNTEYE_UNUSED(def)
return false;
}
bool pu_control_query(
const device &device, Option option, pu_query query, int32_t *value) {
// TODO(JohnZhao)
MYNTEYE_UNUSED(device)
MYNTEYE_UNUSED(option)
MYNTEYE_UNUSED(query)
MYNTEYE_UNUSED(value)
return false;
}
bool xu_control_range(
const device &device, const xu &xu, uint8_t selector, uint8_t id, int32_t *min,
int32_t *max, int32_t *def) {
// TODO(JohnZhao)
MYNTEYE_UNUSED(device)
MYNTEYE_UNUSED(xu)
MYNTEYE_UNUSED(selector)
MYNTEYE_UNUSED(id)
MYNTEYE_UNUSED(min)
MYNTEYE_UNUSED(max)
MYNTEYE_UNUSED(def)
return false;
}
bool xu_control_query(
const device &device, const xu &xu, uint8_t selector, xu_query query,
uint16_t size, uint8_t *data) {
// TODO(JohnZhao)
MYNTEYE_UNUSED(device)
MYNTEYE_UNUSED(xu)
MYNTEYE_UNUSED(selector)
MYNTEYE_UNUSED(query)
MYNTEYE_UNUSED(size)
MYNTEYE_UNUSED(data)
return false;
}
void set_device_mode(
device &device, int width, int height, int fourcc, int fps, // NOLINT
video_channel_callback callback) {
// TODO(JohnZhao)
MYNTEYE_UNUSED(device)
MYNTEYE_UNUSED(width)
MYNTEYE_UNUSED(height)
MYNTEYE_UNUSED(fourcc)
MYNTEYE_UNUSED(fps)
MYNTEYE_UNUSED(callback)
}
void start_streaming(device &device, int num_transfer_bufs) { // NOLINT
// TODO(JohnZhao)
MYNTEYE_UNUSED(device)
MYNTEYE_UNUSED(num_transfer_bufs)
}
void stop_streaming(device &device) { // NOLINT
// TODO(JohnZhao)
MYNTEYE_UNUSED(device)
}
} // namespace uvc
MYNTEYE_END_NAMESPACE

603
src/mynteye/uvc/uvc-v4l2.cc Executable file
View File

@@ -0,0 +1,603 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/uvc/uvc.h"
#include <dirent.h>
#include <errno.h>
#include <fcntl.h>
#include <string.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <linux/usb/video.h>
#include <linux/uvcvideo.h>
#include <linux/videodev2.h>
#include <chrono>
#include <fstream>
#include <sstream>
#include <string>
#include <thread>
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
namespace uvc {
#define LOG_ERROR(severity, str) \
do { \
LOG(severity) << str << " error " << errno << ", " << strerror(errno); \
} while (0)
#define NO_DATA_MAX_COUNT 200
int no_data_count = 0;
/*
class device_error : public std::exception {
public:
explicit device_error(const std::string &what_arg) noexcept
: what_message_(std::move(what_arg)) {}
explicit device_error(const char *what_arg) noexcept
: what_message_(std::move(what_arg)) {}
const char *what() const noexcept {
return what_message_.c_str();
}
private:
std::string what_message_;
};
*/
struct throw_error {
throw_error() = default;
explicit throw_error(const std::string &s) {
ss << s;
}
~throw_error() noexcept(false) {
throw std::runtime_error(ss.str());
// throw device_error(ss.str());
}
template<class T>
throw_error &operator<<(const T &val) {
ss << val;
return *this;
}
std::ostringstream ss;
};
static int xioctl(int fh, int request, void *arg) {
int r;
do {
r = ioctl(fh, request, arg);
} while (r < 0 && errno == EINTR);
return r;
}
struct buffer {
void *start;
size_t length;
};
struct context {
context() {
VLOG(2) << __func__;
}
~context() {
VLOG(2) << __func__;
}
};
struct device {
const std::shared_ptr<context> parent;
std::string dev_name; // Device name (typically of the form /dev/video*)
std::string name; // Device description name
int vid, pid, mi; // Vendor ID, product ID, and multiple interface index
int fd = -1; // File descriptor for this device
int width, height, format, fps;
video_channel_callback callback = nullptr;
bool is_capturing = false;
std::vector<buffer> buffers;
std::thread thread;
volatile bool stop = false;
device(std::shared_ptr<context> parent, const std::string &name)
: parent(parent), dev_name("/dev/" + name) {
VLOG(2) << __func__ << ": " << dev_name;
struct stat st;
if (stat(dev_name.c_str(), &st) < 0) { // file status
throw_error() << "Cannot identify '" << dev_name << "': " << errno << ", "
<< strerror(errno);
}
if (!S_ISCHR(st.st_mode)) { // character device?
throw_error() << dev_name << " is no device";
}
if (!(std::ifstream("/sys/class/video4linux/" + name + "/name") >>
this->name))
throw_error() << "Failed to read name";
std::string modalias;
if (!(std::ifstream(
"/sys/class/video4linux/" + name + "/device/modalias") >>
modalias))
throw_error() << "Failed to read modalias";
if (modalias.size() < 14 || modalias.substr(0, 5) != "usb:v" ||
modalias[9] != 'p')
throw_error() << "Not a usb format modalias";
if (!(std::istringstream(modalias.substr(5, 4)) >> std::hex >> vid))
throw_error() << "Failed to read vendor ID";
if (!(std::istringstream(modalias.substr(10, 4)) >> std::hex >> pid))
throw_error() << "Failed to read product ID";
if (!(std::ifstream(
"/sys/class/video4linux/" + name + "/device/bInterfaceNumber") >>
std::hex >> mi))
throw_error() << "Failed to read interface number";
fd = open(dev_name.c_str(), O_RDWR | O_NONBLOCK, 0);
if (fd < 0) {
throw_error() << "Cannot open '" << dev_name << "': " << errno << ", "
<< strerror(errno);
}
v4l2_capability cap;
if (xioctl(fd, VIDIOC_QUERYCAP, &cap) < 0) {
if (errno == EINVAL)
throw_error() << dev_name << " is no V4L2 device";
else
throw_error() << "VIDIOC_QUERYCAP error " << errno << ", "
<< strerror(errno);
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
throw_error() << dev_name + " is no video capture device";
if (!(cap.capabilities & V4L2_CAP_STREAMING))
throw_error() << dev_name + " does not support streaming I/O";
// Select video input, video standard and tune here.
v4l2_cropcap cropcap;
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(fd, VIDIOC_CROPCAP, &cropcap) == 0) {
v4l2_crop crop;
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect; // reset to default
if (xioctl(fd, VIDIOC_S_CROP, &crop) < 0) {
switch (errno) {
case EINVAL:
break; // Cropping not supported
default:
break; // Errors ignored
}
}
} else {
} // Errors ignored
}
~device() {
VLOG(2) << __func__;
stop_streaming();
no_data_count = 0;
if (fd != -1 && close(fd) < 0) {
LOG_ERROR(WARNING, "close");
}
}
bool pu_control_range(
uint32_t id, int32_t *min, int32_t *max, int32_t *def) const {
struct v4l2_queryctrl query;
query.id = id;
if (xioctl(fd, VIDIOC_QUERYCTRL, &query) < 0) {
LOG_ERROR(WARNING, "pu_control_range failed");
return false;
}
if (min)
*min = query.minimum;
if (max)
*max = query.maximum;
if (def)
*def = query.default_value;
return true;
}
bool pu_control_query(uint32_t id, int query, int32_t *value) const {
CHECK_NOTNULL(value);
struct v4l2_control control = {id, *value};
if (xioctl(fd, query, &control) < 0) {
LOG_ERROR(WARNING, "pu_control_query failed");
return false;
}
*value = control.value;
return true;
}
bool xu_control_query(
const xu &xu, uint8_t selector, uint8_t query, uint16_t size,
uint8_t *data) const {
CHECK_NOTNULL(data);
uvc_xu_control_query q = {xu.unit, selector, query, size, data};
if (xioctl(fd, UVCIOC_CTRL_QUERY, &q) < 0) {
LOG_ERROR(WARNING, "xu_control_query failed");
return false;
}
return true;
}
void set_format(
int width, int height, int fourcc, int fps,
video_channel_callback callback) {
this->width = width;
this->height = height;
this->format = fourcc;
this->fps = fps;
this->callback = callback;
}
void start_capture() {
if (is_capturing) {
LOG(WARNING) << "Start capture failed, is capturing already";
return;
}
v4l2_format fmt;
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = width;
fmt.fmt.pix.height = height;
fmt.fmt.pix.pixelformat = format;
fmt.fmt.pix.field = V4L2_FIELD_NONE;
// fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (xioctl(fd, VIDIOC_S_FMT, &fmt) < 0)
LOG_ERROR(FATAL, "VIDIOC_S_FMT");
v4l2_streamparm parm;
parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(fd, VIDIOC_G_PARM, &parm) < 0)
LOG_ERROR(FATAL, "VIDIOC_G_PARM");
parm.parm.capture.timeperframe.numerator = 1;
parm.parm.capture.timeperframe.denominator = fps;
if (xioctl(fd, VIDIOC_S_PARM, &parm) < 0)
LOG_ERROR(FATAL, "VIDIOC_S_PARM");
// Init memory mapped IO
v4l2_requestbuffers req;
req.count = 24;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (xioctl(fd, VIDIOC_REQBUFS, &req) < 0) {
if (errno == EINVAL)
LOG(FATAL) << dev_name << " does not support memory mapping";
else
LOG_ERROR(FATAL, "VIDIOC_REQBUFS");
}
if (req.count < 2) {
LOG(FATAL) << "Insufficient buffer memory on " << dev_name;
}
buffers.resize(req.count);
for (size_t i = 0; i < buffers.size(); ++i) {
v4l2_buffer buf;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (xioctl(fd, VIDIOC_QUERYBUF, &buf) < 0)
LOG_ERROR(FATAL, "VIDIOC_QUERYBUF");
buffers[i].length = buf.length;
buffers[i].start = mmap(
NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, fd,
buf.m.offset);
if (buffers[i].start == MAP_FAILED)
LOG_ERROR(FATAL, "mmap");
}
// Start capturing
for (size_t i = 0; i < buffers.size(); ++i) {
v4l2_buffer buf;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
if (xioctl(fd, VIDIOC_QBUF, &buf) < 0)
LOG_ERROR(FATAL, "VIDIOC_QBUF");
}
v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
for (int i = 0; i < 10; ++i) {
if (xioctl(fd, VIDIOC_STREAMON, &type) < 0) {
std::this_thread::sleep_for(std::chrono::milliseconds(100));
}
}
if (xioctl(fd, VIDIOC_STREAMON, &type) < 0)
LOG_ERROR(FATAL, "VIDIOC_STREAMON");
is_capturing = true;
}
void stop_capture() {
if (!is_capturing)
return;
// Stop streamining
v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (xioctl(fd, VIDIOC_STREAMOFF, &type) < 0)
LOG_ERROR(WARNING, "VIDIOC_STREAMOFF");
for (size_t i = 0; i < buffers.size(); i++) {
if (munmap(buffers[i].start, buffers[i].length) < 0)
LOG_ERROR(WARNING, "munmap");
}
// Close memory mapped IO
struct v4l2_requestbuffers req;
req.count = 0;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
if (xioctl(fd, VIDIOC_REQBUFS, &req) < 0) {
if (errno == EINVAL)
LOG(ERROR) << dev_name << " does not support memory mapping";
else
LOG_ERROR(WARNING, "VIDIOC_REQBUFS");
}
is_capturing = false;
}
void poll() {
fd_set fds;
FD_ZERO(&fds);
FD_SET(fd, &fds);
struct timeval tv = {0, 10000};
if (select(fd + 1, &fds, NULL, NULL, &tv) < 0) {
if (errno == EINTR)
return;
LOG_ERROR(FATAL, "select");
}
if (FD_ISSET(fd, &fds)) {
v4l2_buffer buf;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (xioctl(fd, VIDIOC_DQBUF, &buf) < 0) {
if (errno == EAGAIN)
return;
LOG_ERROR(FATAL, "VIDIOC_DQBUF");
}
if (callback) {
callback(buffers[buf.index].start, [buf, this]() mutable {
if (xioctl(fd, VIDIOC_QBUF, &buf) < 0)
throw_error("VIDIOC_QBUF");
});
}
no_data_count = 0;
} else {
no_data_count++;
}
if (no_data_count > NO_DATA_MAX_COUNT) {
throw_error("v4l2 get stream time out!");
}
}
void start_streaming() {
if (!callback) {
LOG(WARNING) << __func__ << " failed: video_channel_callback is empty";
return;
}
start_capture();
thread = std::thread([this]() {
while (!stop)
poll();
});
}
void stop_streaming() {
if (thread.joinable()) {
stop = true;
thread.join();
stop = false;
stop_capture();
}
}
};
std::shared_ptr<context> create_context() {
return std::make_shared<context>();
}
std::vector<std::shared_ptr<device>> query_devices(
std::shared_ptr<context> context) {
std::vector<std::shared_ptr<device>> devices;
DIR *dir = opendir("/sys/class/video4linux");
if (!dir)
LOG(FATAL) << "Cannot access /sys/class/video4linux";
while (dirent *entry = readdir(dir)) {
std::string name = entry->d_name;
if (name == "." || name == "..")
continue;
// Resolve a pathname to ignore virtual video devices
std::string path = "/sys/class/video4linux/" + name;
char buff[PATH_MAX];
ssize_t len = ::readlink(path.c_str(), buff, sizeof(buff) - 1);
if (len != -1) {
buff[len] = '\0';
std::string real_path = std::string(buff);
if (real_path.find("virtual") != std::string::npos)
continue;
}
try {
devices.push_back(std::make_shared<device>(context, name));
} catch (const std::exception &e) {
VLOG(2) << "Not a USB video device: " << e.what();
}
}
closedir(dir);
return devices;
}
std::string get_name(const device &device) {
return device.name;
}
int get_vendor_id(const device &device) {
return device.vid;
}
int get_product_id(const device &device) {
return device.pid;
}
std::string get_video_name(const device &device) {
return device.dev_name;
}
static uint32_t get_cid(Option option) {
switch (option) {
case Option::GAIN:
return V4L2_CID_GAIN;
case Option::BRIGHTNESS:
return V4L2_CID_BRIGHTNESS;
case Option::CONTRAST:
return V4L2_CID_CONTRAST;
default:
LOG(FATAL) << "No v4l2 cid for " << option;
}
}
bool pu_control_range(
const device &device, Option option, int32_t *min, int32_t *max,
int32_t *def) {
return device.pu_control_range(get_cid(option), min, max, def);
}
bool pu_control_query(
const device &device, Option option, pu_query query, int32_t *value) {
int code;
switch (query) {
case PU_QUERY_SET:
code = VIDIOC_S_CTRL;
break;
case PU_QUERY_GET:
code = VIDIOC_G_CTRL;
break;
default:
LOG(ERROR) << "pu_control_query request code is unaccepted";
return false;
}
return device.pu_control_query(get_cid(option), code, value);
}
bool xu_control_range(
const device &device, const xu &xu, uint8_t selector, uint8_t id,
int32_t *min, int32_t *max, int32_t *def) {
bool ret = true;
std::uint8_t data[3]{static_cast<uint8_t>(id | 0x80), 0, 0};
if (!xu_control_query(device, xu, selector, XU_QUERY_SET, 3, data)) {
LOG(WARNING) << "xu_control_range query failed";
ret = false;
}
if (xu_control_query(device, xu, selector, XU_QUERY_MIN, 3, data)) {
*min = (data[1] << 8) | (data[2]);
} else {
LOG(WARNING) << "xu_control_range query min failed";
ret = false;
}
if (xu_control_query(device, xu, selector, XU_QUERY_MAX, 3, data)) {
*max = (data[1] << 8) | (data[2]);
} else {
LOG(WARNING) << "xu_control_range query max failed";
ret = false;
}
if (xu_control_query(device, xu, selector, XU_QUERY_DEF, 3, data)) {
*def = (data[1] << 8) | (data[2]);
} else {
LOG(WARNING) << "xu_control_range query def failed";
ret = false;
}
return ret;
}
bool xu_control_query(
const device &device, const xu &xu, uint8_t selector, xu_query query,
uint16_t size, uint8_t *data) {
uint8_t code;
switch (query) {
case XU_QUERY_SET:
code = UVC_SET_CUR;
break;
case XU_QUERY_GET:
code = UVC_GET_CUR;
break;
case XU_QUERY_MIN:
code = UVC_GET_MIN;
break;
case XU_QUERY_MAX:
code = UVC_GET_MAX;
break;
case XU_QUERY_DEF:
code = UVC_GET_DEF;
break;
default:
LOG(ERROR) << "xu_control_query request code is unaccepted";
return false;
}
return device.xu_control_query(xu, selector, code, size, data);
}
void set_device_mode(
device &device, int width, int height, int fourcc, int fps, // NOLINT
video_channel_callback callback) {
device.set_format(width, height, fourcc, fps, callback);
}
void start_streaming(device &device, int /*num_transfer_bufs*/) { // NOLINT
device.start_streaming();
}
void stop_streaming(device &device) { // NOLINT
device.stop_streaming();
}
} // namespace uvc
MYNTEYE_END_NAMESPACE
// Video4Linux (V4L) driver-specific documentation
// https://linuxtv.org/downloads/v4l-dvb-apis/v4l-drivers/index.html

782
src/mynteye/uvc/uvc-wmf.cc Normal file
View File

@@ -0,0 +1,782 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/uvc/uvc.h"
#include <windows.h>
#include <usbioctl.h>
#include <Shlwapi.h> // For QISearch, etc.
#include <mfapi.h> // For MFStartup, etc.
#include <mfidl.h> // For MF_DEVSOURCE_*, etc.
#include <mfreadwrite.h> // MFCreateSourceReaderFromMediaSource
#include <mferror.h>
#pragma comment(lib, "Shlwapi.lib")
#pragma comment(lib, "mf.lib")
#pragma comment(lib, "mfplat.lib")
#pragma comment(lib, "mfreadwrite.lib")
#pragma comment(lib, "mfuuid.lib")
#pragma comment(lib, "setupapi.lib")
#pragma comment(lib, "winusb.lib")
#include <uuids.h>
#include <vidcap.h>
#include <ksmedia.h>
#include <ksproxy.h>
#include <Cfgmgr32.h>
#pragma comment(lib, "cfgmgr32.lib")
#include <SetupAPI.h>
#include <WinUsb.h>
#include <algorithm>
#include <chrono>
#include <functional>
#include <map>
#include <regex>
#include <sstream>
#include <thread>
#include <strsafe.h>
#include "mynteye/logger.h"
#define VLOG_INFO VLOG(2)
// #define VLOG_INFO LOG(INFO)
MYNTEYE_BEGIN_NAMESPACE
namespace uvc {
const std::map<uint32_t, uint32_t> fourcc_map = {
{ 0x56595559, 0x32595559 }, // 'VYUY' => '2YUY'
{ 0x59555956, 0x59555932 } // 'YUYV' => 'YUY2'
};
struct throw_error {
throw_error() = default;
explicit throw_error(const std::string &s) {
ss << s;
}
~throw_error() noexcept(false) {
throw std::runtime_error(ss.str());
}
template<class T>
throw_error &operator<<(const T &val) {
ss << val;
return *this;
}
std::ostringstream ss;
};
static void check(const char *call, HRESULT hr) {
if (FAILED(hr)) {
throw_error() << call << "(...) returned 0x" << std::hex
<< static_cast<uint32_t>(hr);
} else {
// VLOG_INFO << call << " SUCCESSED";
}
}
template<class T> class com_ptr {
T *p;
void ref(T *new_p) {
if (p == new_p) return;
unref();
p = new_p;
if (p) p->AddRef();
}
void unref() {
if (p) {
p->Release();
p = nullptr;
}
}
public:
com_ptr() : p() {}
com_ptr(T *p) : com_ptr() {
ref(p);
}
com_ptr(const com_ptr &r) : com_ptr(r.p) {}
~com_ptr() {
unref();
}
operator T *() const {
return p;
}
T &operator*() const {
return *p;
}
T *operator->() const {
return p;
}
T **operator&() {
unref();
return &p;
}
com_ptr &operator=(const com_ptr &r) {
ref(r.p);
return *this;
}
};
static std::string win_to_utf(const WCHAR *s) {
int len = WideCharToMultiByte(CP_UTF8, 0, s, -1, nullptr, 0, NULL, NULL);
if (len == 0) throw_error() << "WideCharToMultiByte(...) returned 0 and GetLastError() is " << GetLastError();
std::string buffer(len - 1, ' ');
len = WideCharToMultiByte(CP_UTF8, 0, s, -1, &buffer[0], (int)buffer.size()+1, NULL, NULL);
if (len == 0) throw_error() << "WideCharToMultiByte(...) returned 0 and GetLastError() is " << GetLastError();
return buffer;
}
std::vector<std::string> tokenize(std::string string, char separator) {
std::vector<std::string> tokens;
std::string::size_type i1 = 0;
while (true) {
auto i2 = string.find(separator, i1);
if (i2 == std::string::npos) {
tokens.push_back(string.substr(i1));
return tokens;
}
tokens.push_back(string.substr(i1, i2-i1));
i1 = i2 + 1;
}
}
/*
static void print_guid(const char *call, int i, GUID guid) {
std::ostringstream ss;
ss << call << "(" << i << ") = ";
ss << "Data1: " << std::hex << guid.Data1 << ", ";
ss << "Data2: " << std::hex << guid.Data2 << ", ";
ss << "Data3: " << std::hex << guid.Data3 << ", ";
ss << "Data4: [ ";
for (int j = 0; j < 8; j++) {
ss << std::hex << (int)guid.Data4[j] << " ";
}
ss << "]";
LOG(INFO) << ss.str();
}
*/
bool parse_usb_path(int &vid, int &pid, int &mi, std::string &unique_id, const std::string &path) {
auto name = path;
std::transform(begin(name), end(name), begin(name), ::tolower);
auto tokens = tokenize(name, '#');
if (tokens.size() < 1 || tokens[0] != R"(\\?\usb)") return false; // Not a USB device
if (tokens.size() < 3) {
LOG(ERROR) << "malformed usb device path: " << name;
return false;
}
auto ids = tokenize(tokens[1], '&');
if (ids[0].size() != 8 || ids[0].substr(0,4) != "vid_" || !(std::istringstream(ids[0].substr(4,4)) >> std::hex >> vid)) {
LOG(ERROR) << "malformed vid string: " << tokens[1];
return false;
}
if (ids[1].size() != 8 || ids[1].substr(0,4) != "pid_" || !(std::istringstream(ids[1].substr(4,4)) >> std::hex >> pid)) {
LOG(ERROR) << "malformed pid string: " << tokens[1];
return false;
}
if (ids[2].size() != 5 || ids[2].substr(0,3) != "mi_" || !(std::istringstream(ids[2].substr(3,2)) >> mi)) {
LOG(ERROR) << "malformed mi string: " << tokens[1];
return false;
}
ids = tokenize(tokens[2], '&');
if (ids.size() < 2) {
LOG(ERROR) << "malformed id string: " << tokens[2];
return false;
}
unique_id = ids[1];
return true;
}
bool parse_usb_path_from_device_id(int &vid, int &pid, int &mi, std::string &unique_id, const std::string &device_id) {
auto name = device_id;
std::transform(begin(name), end(name), begin(name), ::tolower);
auto tokens = tokenize(name, '\\');
if (tokens.size() < 1 || tokens[0] != R"(usb)") return false; // Not a USB device
auto ids = tokenize(tokens[1], '&');
if (ids[0].size() != 8 || ids[0].substr(0, 4) != "vid_" || !(std::istringstream(ids[0].substr(4, 4)) >> std::hex >> vid)) {
LOG(ERROR) << "malformed vid string: " << tokens[1];
return false;
}
if (ids[1].size() != 8 || ids[1].substr(0, 4) != "pid_" || !(std::istringstream(ids[1].substr(4, 4)) >> std::hex >> pid)) {
LOG(ERROR) << "malformed pid string: " << tokens[1];
return false;
}
if (ids[2].size() != 5 || ids[2].substr(0, 3) != "mi_" || !(std::istringstream(ids[2].substr(3, 2)) >> mi)) {
LOG(ERROR) << "malformed mi string: " << tokens[1];
return false;
}
ids = tokenize(tokens[2], '&');
if (ids.size() < 2) {
LOG(ERROR) << "malformed id string: " + tokens[2];
return false;
}
unique_id = ids[1];
return true;
}
struct context {
context() {
CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
std::this_thread::sleep_for(std::chrono::milliseconds(100));
MFStartup(MF_VERSION, MFSTARTUP_NOSOCKET);
}
~context() {
MFShutdown();
CoUninitialize();
}
};
class reader_callback : public IMFSourceReaderCallback {
std::weak_ptr<device> owner; // The device holds a reference to us, so use
// weak_ptr to prevent a cycle
ULONG ref_count;
volatile bool streaming = false;
public:
reader_callback(std::weak_ptr<device> owner) : owner(owner), ref_count() {}
bool is_streaming() const {
return streaming;
}
void on_start() {
streaming = true;
}
#pragma warning( push )
#pragma warning( disable: 4838 )
// Implement IUnknown
HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void **ppvObject) override {
static const QITAB table[] = {QITABENT(reader_callback, IUnknown), QITABENT(reader_callback, IMFSourceReaderCallback), {0}};
return QISearch(this, table, riid, ppvObject);
}
#pragma warning( pop )
ULONG STDMETHODCALLTYPE AddRef() override { return InterlockedIncrement(&ref_count); }
ULONG STDMETHODCALLTYPE Release() override {
ULONG count = InterlockedDecrement(&ref_count);
if (count == 0) delete this;
return count;
}
// Implement IMFSourceReaderCallback
HRESULT STDMETHODCALLTYPE OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex, DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *sample) override;
HRESULT STDMETHODCALLTYPE OnFlush(DWORD dwStreamIndex) override { streaming = false; return S_OK; }
HRESULT STDMETHODCALLTYPE OnEvent(DWORD dwStreamIndex, IMFMediaEvent *pEvent) override { return S_OK; }
};
struct device {
const std::shared_ptr<context> parent;
int vid, pid;
std::string unique_id;
std::string name;
com_ptr<reader_callback> reader_callback;
com_ptr<IMFActivate> mf_activate;
com_ptr<IMFMediaSource> mf_media_source;
com_ptr<IAMCameraControl> am_camera_control;
com_ptr<IAMVideoProcAmp> am_video_proc_amp;
std::map<int, com_ptr<IKsControl>> ks_controls;
com_ptr<IMFSourceReader> mf_source_reader;
video_channel_callback callback = nullptr;
device(std::shared_ptr<context> parent, int vid, int pid, std::string unique_id, std::string name)
: parent(move(parent)), vid(vid), pid(pid), unique_id(move(unique_id)), name(name) {
}
~device() {
stop_streaming();
}
IKsControl *get_ks_control(const uvc::xu &xu) {
auto it = ks_controls.find(xu.node);
if (it != end(ks_controls)) return it->second;
get_media_source();
// Attempt to retrieve IKsControl
com_ptr<IKsTopologyInfo> ks_topology_info = NULL;
check("QueryInterface", mf_media_source->QueryInterface(__uuidof(IKsTopologyInfo), (void **)&ks_topology_info));
GUID node_type;
/*
DWORD numberOfNodes;
check("get_NumNodes", ks_topology_info->get_NumNodes(&numberOfNodes));
for (int i = 0; i < numberOfNodes; i++) {
check("get_NodeType", ks_topology_info->get_NodeType(i, &node_type));
print_guid("node_type", i, node_type);
}
*/
check("get_NodeType", ks_topology_info->get_NodeType(xu.node, &node_type));
const GUID KSNODETYPE_DEV_SPECIFIC_LOCAL{0x941C7AC0L, 0xC559, 0x11D0, {0x8A, 0x2B, 0x00, 0xA0, 0xC9, 0x25, 0x5A, 0xC1}};
if (node_type != KSNODETYPE_DEV_SPECIFIC_LOCAL) throw_error() << "Invalid extension unit node ID: " << xu.node;
com_ptr<IUnknown> unknown;
check("CreateNodeInstance", ks_topology_info->CreateNodeInstance(xu.node, IID_IUnknown, (LPVOID *)&unknown));
com_ptr<IKsControl> ks_control;
check("QueryInterface", unknown->QueryInterface(__uuidof(IKsControl), (void **)&ks_control));
VLOG_INFO << "Obtained KS control node : " << xu.node;
return ks_controls[xu.node] = ks_control;
}
void start_streaming() {
if (mf_source_reader) {
reader_callback->on_start();
check("IMFSourceReader::ReadSample", mf_source_reader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL));
}
}
void stop_streaming() {
if (mf_source_reader) mf_source_reader->Flush(MF_SOURCE_READER_FIRST_VIDEO_STREAM);
while (true) {
bool is_streaming = reader_callback->is_streaming();
if (is_streaming) std::this_thread::sleep_for(std::chrono::milliseconds(10));
else break;
}
mf_source_reader = nullptr;
am_camera_control = nullptr;
am_video_proc_amp = nullptr;
ks_controls.clear();
if (mf_media_source) {
mf_media_source = nullptr;
check("IMFActivate::ShutdownObject", mf_activate->ShutdownObject());
}
callback = {};
}
com_ptr<IMFMediaSource> get_media_source() {
if (!mf_media_source) {
check("IMFActivate::ActivateObject", mf_activate->ActivateObject(__uuidof(IMFMediaSource), (void **)&mf_media_source));
if (mf_media_source) {
check("IMFMediaSource::QueryInterface", mf_media_source->QueryInterface(__uuidof(IAMCameraControl), (void **)&am_camera_control));
if (SUCCEEDED(mf_media_source->QueryInterface(__uuidof(IAMVideoProcAmp), (void **)&am_video_proc_amp)));
} else throw_error() << "Invalid media source";
}
return mf_media_source;
}
};
HRESULT reader_callback::OnReadSample(HRESULT hrStatus, DWORD dwStreamIndex, DWORD dwStreamFlags, LONGLONG llTimestamp, IMFSample *sample) {
if (auto owner_ptr = owner.lock()) {
if (sample) {
com_ptr<IMFMediaBuffer> buffer = NULL;
if (SUCCEEDED(sample->GetBufferByIndex(0, &buffer))) {
BYTE *byte_buffer;
DWORD max_length, current_length;
if (SUCCEEDED(buffer->Lock(&byte_buffer, &max_length, &current_length))) {
auto continuation = [buffer, this]() {
buffer->Unlock();
};
owner_ptr->callback(byte_buffer, continuation);
}
}
}
if (auto owner_ptr_new = owner.lock()) {
auto hr = owner_ptr_new->mf_source_reader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, NULL, NULL, NULL, NULL);
switch (hr) {
case S_OK: break;
case MF_E_INVALIDREQUEST: LOG(ERROR) << "ReadSample returned MF_E_INVALIDREQUEST"; break;
case MF_E_INVALIDSTREAMNUMBER: LOG(ERROR) << "ReadSample returned MF_E_INVALIDSTREAMNUMBER"; break;
case MF_E_NOTACCEPTING: LOG(ERROR) << "ReadSample returned MF_E_NOTACCEPTING"; break;
case E_INVALIDARG: LOG(ERROR) << "ReadSample returned E_INVALIDARG"; break;
case MF_E_VIDEO_RECORDING_DEVICE_INVALIDATED: LOG(ERROR) << "ReadSample returned MF_E_VIDEO_RECORDING_DEVICE_INVALIDATED"; break;
default: LOG(ERROR) << "ReadSample returned HRESULT " << std::hex << (uint32_t)hr; break;
}
if (hr != S_OK) streaming = false;
}
}
return S_OK;
}
std::shared_ptr<context> create_context() {
return std::make_shared<context>();
}
std::vector<std::shared_ptr<device>> query_devices(std::shared_ptr<context> context) {
IMFAttributes *pAttributes = NULL;
check("MFCreateAttributes", MFCreateAttributes(&pAttributes, 1));
check("IMFAttributes::SetGUID", pAttributes->SetGUID(
MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE, MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_GUID));
IMFActivate **ppDevices;
UINT32 numDevices;
check("MFEnumDeviceSources", MFEnumDeviceSources(pAttributes, &ppDevices, &numDevices));
std::vector<std::shared_ptr<device>> devices;
for (UINT32 i = 0; i < numDevices; ++i) {
com_ptr<IMFActivate> pDevice;
*&pDevice = ppDevices[i];
WCHAR *wchar_dev_name = NULL;
WCHAR *wchar_name = NULL;
UINT32 length;
pDevice->GetAllocatedString(
MF_DEVSOURCE_ATTRIBUTE_SOURCE_TYPE_VIDCAP_SYMBOLIC_LINK, &wchar_dev_name,
&length);
auto dev_name = win_to_utf(wchar_dev_name);
CoTaskMemFree(wchar_dev_name);
pDevice->GetAllocatedString(MF_DEVSOURCE_ATTRIBUTE_FRIENDLY_NAME, &wchar_name, &length);
auto name = win_to_utf(wchar_name); // Device description name
CoTaskMemFree(wchar_name);
int vid, pid, mi;
std::string unique_id;
if (!parse_usb_path(vid, pid, mi, unique_id, dev_name)) continue;
std::shared_ptr<device> dev;
for (auto & d : devices) {
if (d->vid == vid && d->pid == pid && d->unique_id == unique_id)
dev = d;
}
if (!dev) {
try {
dev = std::make_shared<device>(context, vid, pid, unique_id, name);
devices.push_back(dev);
} catch (const std::exception &e) {
VLOG_INFO << "Not a USB video device: " << e.what();
}
}
dev->reader_callback = new reader_callback(dev);
dev->mf_activate = pDevice;
dev->vid = vid;
dev->pid = pid;
}
CoTaskMemFree(ppDevices);
return devices;
}
int get_vendor_id(const device &device) {
return device.vid;
}
int get_product_id(const device &device) {
return device.pid;
}
std::string get_name(const device &device) {
return device.name;
}
std::string get_video_name(const device &device) {
return device.name;
}
static long get_cid(Option option) {
switch (option) {
case Option::GAIN:
return VideoProcAmp_Gain;
case Option::BRIGHTNESS:
return VideoProcAmp_Brightness;
case Option::CONTRAST:
return VideoProcAmp_Contrast;
default:
LOG(FATAL) << "No VideoProcAmp cid for " << option;
}
}
bool pu_control_range(
const device &device, Option option, int32_t *min, int32_t *max,
int32_t *def) {
VLOG_INFO << __func__ << " " << option;
const_cast<uvc::device &>(device).get_media_source();
long minVal = 0, maxVal = 0, steppingDelta = 0, defVal = 0, capsFlag = 0;
check("IAMVideoProcAmp::GetRange",
const_cast<uvc::device &>(device).am_video_proc_amp->GetRange(
get_cid(option), &minVal, &maxVal, &steppingDelta, &defVal, &capsFlag));
if (min) *min = static_cast<int>(minVal);
if (max) *max = static_cast<int>(maxVal);
if (def) *def = static_cast<int>(defVal);
VLOG_INFO << __func__ << " " << option <<
": min=" << *min << ", max=" << *max << ", def=" << *def;
return true;
}
static void pu_control_get(const device &device, long property, int32_t *value) {
long data, flags = 0;
check("IAMVideoProcAmp::Get",
const_cast<uvc::device &>(device).am_video_proc_amp->Get(
property, &data, &flags));
*value = data;
}
static void pu_control_set(const device &device, long property, int32_t *value) {
long data = *value;
check("IAMVideoProcAmp::Set",
const_cast<uvc::device &>(device).am_video_proc_amp->Set(
property, data, VideoProcAmp_Flags_Auto));
}
bool pu_control_query(
const device &device, Option option, pu_query query, int32_t *value) {
CHECK_NOTNULL(value);
const_cast<uvc::device &>(device).get_media_source();
switch (query) {
case PU_QUERY_SET:
VLOG_INFO << "pu_control_set " << option << ": " << *value;
pu_control_set(device, get_cid(option), value);
VLOG_INFO << "pu_control_set " << option << " done";
return true;
case PU_QUERY_GET:
VLOG_INFO << "pu_control_get " << option;
pu_control_get(device, get_cid(option), value);
VLOG_INFO << "pu_control_get " << option << ": " << *value;
return true;
default:
LOG(ERROR) << "pu_control_query request code is unaccepted";
return false;
}
}
static std::string to_string(uint16_t size, uint8_t *data) {
std::ostringstream ss;
for (uint8_t *beg = data, *end = data + size; beg != end; beg++) {
ss << "0x" << std::hex << static_cast<int>(*beg) << ",";
}
return ss.str();
}
/*
static std::vector<BYTE> xu_control_desc(const device &device, const xu &xu, ULONG id, ULONG flags) {
auto ks_control = const_cast<uvc::device &>(device).get_ks_control(xu);
KSP_NODE node;
memset(&node, 0, sizeof(KSP_NODE));
node.Property.Set = reinterpret_cast<const GUID &>(xu.id);
node.Property.Id = id;
node.Property.Flags = flags;
node.NodeId = xu.node;
KSPROPERTY_DESCRIPTION description;
ULONG bytes_received = 0;
check("IKsControl::KsProperty", ks_control->KsProperty(
(PKSPROPERTY)&node,
sizeof(node),
&description,
sizeof(KSPROPERTY_DESCRIPTION),
&bytes_received));
ULONG size = description.DescriptionSize;
std::vector<BYTE> buffer(size);
check("IKsControl::KsProperty", ks_control->KsProperty(
(PKSPROPERTY)&node,
sizeof(node),
buffer.data(),
size,
&bytes_received));
if (bytes_received != size) { throw_error() << "wrong data"; }
// VLOG_INFO << "buffer size=" << size << ", data=["
// << to_string(size, buffer.data()) << "]";
return buffer;
}
bool xu_control_range(
const device &device, const xu &xu, uint8_t selector, int32_t *min,
int32_t *max, int32_t *def) {
VLOG_INFO << __func__ << " " << static_cast<int>(selector);
size_t prop_header_size = sizeof(KSPROPERTY_MEMBERSHEADER) + sizeof(KSPROPERTY_DESCRIPTION);
// get step, min and max values
{
auto &&buffer = xu_control_desc(device, xu, selector,
KSPROPERTY_TYPE_BASICSUPPORT | KSPROPERTY_TYPE_TOPOLOGY);
BYTE *values = buffer.data() + prop_header_size;
// size_t size = buffer.size() - prop_header_size;
// VLOG_INFO << "values size: " << size << ", data=["
// << to_string(size, values) << "]";
*min = (values[1] << 8) | (values[2]);
values += 3;
*max = (values[1] << 8) | (values[2]);
// values += 3;
// *step = (values[1] << 8) | (values[2]);
}
// get def value
{
auto &&buffer = xu_control_desc(device, xu, selector,
KSPROPERTY_TYPE_DEFAULTVALUES | KSPROPERTY_TYPE_TOPOLOGY);
BYTE *values = buffer.data() + prop_header_size;
// size_t size = buffer.size() - prop_header_size;
// VLOG_INFO << "values size: " << size << ", data=["
// << to_string(size, values) << "]";
*def = (values[1] << 8) | (values[2]);
}
VLOG_INFO << __func__ << " " << static_cast<int>(selector)
<< ": min=" << *min << ", max=" << *max << ", def=" << *def;
return true;
}
*/
static void xu_control_get(const device &device, const xu &xu, uint8_t selector,
uint16_t size, uint8_t *data) {
VLOG_INFO << __func__ << " " << static_cast<int>(selector);
auto &&ks_control = const_cast<uvc::device &>(device).get_ks_control(xu);
KSP_NODE node;
memset(&node, 0, sizeof(KSP_NODE));
node.Property.Set = reinterpret_cast<const GUID &>(xu.id);
node.Property.Id = selector;
node.Property.Flags = KSPROPERTY_TYPE_GET | KSPROPERTY_TYPE_TOPOLOGY;
node.NodeId = xu.node;
ULONG bytes_received = 0;
check("IKsControl::KsProperty", ks_control->KsProperty(
(PKSPROPERTY)&node, sizeof(node), data, size, &bytes_received));
if (bytes_received != size)
throw_error() << "xu_control_get did not return enough data";
VLOG_INFO << __func__ << " " << static_cast<int>(selector)
<< ": size=" << size << ", data=[" << to_string(size, data) << "]";
}
static void xu_control_set(const device &device, const xu &xu, uint8_t selector,
uint16_t size, uint8_t *data) {
VLOG_INFO << __func__ << " " << static_cast<int>(selector)
<< ": size=" << size << ", data=[" << to_string(size, data) << "]";
auto &&ks_control = const_cast<uvc::device &>(device).get_ks_control(xu);
KSP_NODE node;
memset(&node, 0, sizeof(KSP_NODE));
node.Property.Set = reinterpret_cast<const GUID &>(xu.id);
node.Property.Id = selector;
node.Property.Flags = KSPROPERTY_TYPE_SET | KSPROPERTY_TYPE_TOPOLOGY;
node.NodeId = xu.node;
ULONG bytes_received = 0;
check("IKsControl::KsProperty", ks_control->KsProperty(
(PKSPROPERTY)&node, sizeof(node), data, size, &bytes_received));
VLOG_INFO << __func__ << " " << static_cast<int>(selector) << " done";
}
static int32_t xu_control_range_basic(const device &device, const xu &xu, uint8_t selector, uint8_t id) {
std::uint8_t data[3]{id, 0, 0};
xu_control_set(device, xu, selector, 3, data);
xu_control_get(device, xu, selector, 3, data);
return (data[1] << 8) | (data[2]);
}
bool xu_control_range(
const device &device, const xu &xu, uint8_t selector, uint8_t id,
int32_t *min, int32_t *max, int32_t *def) {
VLOG_INFO << __func__ << " " << static_cast<int>(selector);
*min = xu_control_range_basic(
device, xu, selector, static_cast<uint8_t>(id | 0x90));
*max = xu_control_range_basic(
device, xu, selector, static_cast<uint8_t>(id | 0xa0));
*def = xu_control_range_basic(
device, xu, selector, static_cast<uint8_t>(id | 0xc0));
return true;
}
bool xu_control_query(
const device &device, const xu &xu, uint8_t selector, xu_query query,
uint16_t size, uint8_t *data) {
CHECK_NOTNULL(data);
switch (query) {
case XU_QUERY_SET:
xu_control_set(device, xu, selector, size, data);
return true;
case XU_QUERY_GET:
xu_control_get(device, xu, selector, size, data);
return true;
default:
LOG(ERROR) << "xu_control_query request code is unaccepted";
return false;
}
}
void set_device_mode(device &device, int width, int height, int fourcc, int fps, video_channel_callback callback) {
if (!device.mf_source_reader) {
com_ptr<IMFAttributes> pAttributes;
check("MFCreateAttributes", MFCreateAttributes(&pAttributes, 1));
check("IMFAttributes::SetUnknown", pAttributes->SetUnknown(MF_SOURCE_READER_ASYNC_CALLBACK, static_cast<IUnknown *>(device.reader_callback)));
check("MFCreateSourceReaderFromMediaSource", MFCreateSourceReaderFromMediaSource(device.get_media_source(), pAttributes, &device.mf_source_reader));
}
if (fourcc_map.count(fourcc)) fourcc = fourcc_map.at(fourcc);
for (DWORD j = 0; ; j++) {
com_ptr<IMFMediaType> media_type;
HRESULT hr = device.mf_source_reader->GetNativeMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, j, &media_type);
if (hr == MF_E_NO_MORE_TYPES) break;
check("IMFSourceReader::GetNativeMediaType", hr);
UINT32 uvc_width, uvc_height, uvc_fps_num, uvc_fps_denom;
GUID subtype;
check("MFGetAttributeSize", MFGetAttributeSize(media_type, MF_MT_FRAME_SIZE, &uvc_width, &uvc_height));
if (uvc_width != width || uvc_height != height) continue;
check("IMFMediaType::GetGUID", media_type->GetGUID(MF_MT_SUBTYPE, &subtype));
if (subtype.Data1 != fourcc) continue;
check("MFGetAttributeRatio", MFGetAttributeRatio(media_type, MF_MT_FRAME_RATE, &uvc_fps_num, &uvc_fps_denom));
if (uvc_fps_denom == 0) continue;
//int uvc_fps = uvc_fps_num / uvc_fps_denom;
//LOG(INFO) << "uvc_fps: " << uvc_fps;
check("IMFSourceReader::SetCurrentMediaType", device.mf_source_reader->SetCurrentMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, NULL, media_type));
device.callback = callback;
return;
}
throw_error() << "no matching media type for pixel format " << std::hex << fourcc;
}
void start_streaming(device &device, int num_transfer_bufs) {
device.start_streaming();
}
void stop_streaming(device &device) {
device.stop_streaming();
}
} // namespace uvc
MYNTEYE_END_NAMESPACE

109
src/mynteye/uvc/uvc.h Normal file
View File

@@ -0,0 +1,109 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_UVC_UVC_H_
#define MYNTEYE_UVC_UVC_H_
#pragma once
#include <functional>
#include <memory>
#include <string>
#include <vector>
#include "mynteye/mynteye.h"
#include "mynteye/types.h"
#define MYNTEYE_VID 0x04B4
#define MYNTEYE_PID 0x00F9
MYNTEYE_BEGIN_NAMESPACE
namespace uvc {
typedef enum pu_query {
PU_QUERY_SET, // Set the value of a control
PU_QUERY_GET, // Get the value of a control
PU_QUERY_LAST
} pu_query;
struct MYNTEYE_API guid {
uint32_t data1;
uint16_t data2, data3;
uint8_t data4[8];
};
// Extension Unit
struct MYNTEYE_API xu {
uint8_t unit;
int node;
guid id;
};
typedef enum xu_query {
XU_QUERY_SET, // Set current value of the control
XU_QUERY_GET, // Get current value of the control
XU_QUERY_MIN, // Get min value of the control
XU_QUERY_MAX, // Get max value of the control
XU_QUERY_DEF, // Get default value of the control
XU_QUERY_LAST
} xu_query;
struct context; // Opaque type representing access to the underlying UVC
// implementation
struct device; // Opaque type representing access to a specific UVC device
// Enumerate devices
MYNTEYE_API std::shared_ptr<context> create_context();
MYNTEYE_API std::vector<std::shared_ptr<device>> query_devices(
std::shared_ptr<context> context);
// Static device properties
MYNTEYE_API std::string get_name(const device &device);
MYNTEYE_API int get_vendor_id(const device &device);
MYNTEYE_API int get_product_id(const device &device);
MYNTEYE_API std::string get_video_name(const device &device);
// Access PU (Processing Unit) controls
inline bool is_pu_control(Option option) {
return option >= Option::GAIN && option <= Option::CONTRAST;
}
MYNTEYE_API bool pu_control_range(
const device &device, Option option, int32_t *min, int32_t *max,
int32_t *def);
MYNTEYE_API bool pu_control_query(
const device &device, Option option, pu_query query, int32_t *value);
// Access XU (Extension Unit) controls
MYNTEYE_API bool xu_control_range(
const device &device, const xu &xu, uint8_t selector, uint8_t id,
int32_t *min, int32_t *max, int32_t *def);
MYNTEYE_API bool xu_control_query( // XU_QUERY_SET, XU_QUERY_GET
const device &device, const xu &xu, uint8_t selector, xu_query query,
uint16_t size, uint8_t *data);
// Control streaming
typedef std::function<void(const void *frame,
std::function<void()> continuation)> video_channel_callback;
MYNTEYE_API void set_device_mode(
device &device, int width, int height, int fourcc, int fps, // NOLINT
video_channel_callback callback);
MYNTEYE_API void start_streaming(device &device, int num_transfer_bufs); // NOLINT
MYNTEYE_API void stop_streaming(device &device); // NOLINT
} // namespace uvc
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_UVC_UVC_H_