3 Commits

Author SHA1 Message Date
kalman
9810bba236 Update mynteye_image_pipeline 2018-12-18 20:09:34 +08:00
kalman
9c540db149 Merge branch 'devel-s210a' into devel-s210a-image-pipeline 2018-12-18 10:56:52 +08:00
kalman
f96e89b941 Add mynteye_image_pipeline 2018-12-06 11:19:32 +08:00
1031 changed files with 30520 additions and 11333 deletions

View File

@@ -1,11 +0,0 @@
module.exports = {
extends: ['@commitlint/config-conventional'],
rules: {
'type-enum': [2, 'always', [
"feat", "fix", "docs", "style", "refactor", "perf", "test", "build", "ci", "chore", "revert"
]],
'scope-empty': [2, 'never'],
'subject-full-stop': [0, 'never'],
'subject-case': [0, 'never']
}
};

4
.gitignore vendored
View File

@@ -16,15 +16,11 @@ _output/
/plugins/
/3rdparty/ceres/
/3rdparty/opencv/
/pkginfo.sh
/*.nsi
/*.exe
/node_modules/
/package-lock.json
# ros
/wrappers/ros/build

View File

@@ -1,427 +0,0 @@
// Ceres Solver - A fast non-linear least squares minimizer
// Copyright 2015 Google Inc. All rights reserved.
// http://ceres-solver.org/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of Google Inc. nor the names of its contributors may be
// used to endorse or promote products derived from this software without
// specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
// Author: settinger@google.com (Scott Ettinger)
// mierle@gmail.com (Keir Mierle)
//
// Simplified Glog style logging with Android support. Supported macros in
// decreasing severity level per line:
//
// VLOG(2), VLOG(N)
// VLOG(1),
// LOG(INFO), VLOG(0), LG
// LOG(WARNING),
// LOG(ERROR),
// LOG(FATAL),
//
// With VLOG(n), the output is directed to one of the 5 Android log levels:
//
// 2 - Verbose
// 1 - Debug
// 0 - Info
// -1 - Warning
// -2 - Error
// -3 - Fatal
//
// Any logging of level 2 and above is directed to the Verbose level. All
// Android log output is tagged with the string "native".
//
// If the symbol ANDROID is not defined, all output goes to std::cerr.
// This allows code to be built on a different system for debug.
//
// Portions of this code are taken from the GLOG package. This code is only a
// small subset of the GLOG functionality. Notable differences from GLOG
// behavior include lack of support for displaying unprintable characters and
// lack of stack trace information upon failure of the CHECK macros. On
// non-Android systems, log output goes to std::cerr and is not written to a
// file.
//
// CHECK macros are defined to test for conditions within code. Any CHECK that
// fails will log the failure and terminate the application.
// e.g. CHECK_GE(3, 2) will pass while CHECK_GE(3, 4) will fail after logging
// "Check failed 3 >= 4".
//
// The following CHECK macros are defined:
//
// CHECK(condition) - fails if condition is false and logs condition.
// CHECK_NOTNULL(variable) - fails if the variable is NULL.
//
// The following binary check macros are also defined :
//
// Macro Operator equivalent
// -------------------- -------------------
// CHECK_EQ(val1, val2) val1 == val2
// CHECK_NE(val1, val2) val1 != val2
// CHECK_GT(val1, val2) val1 > val2
// CHECK_GE(val1, val2) val1 >= val2
// CHECK_LT(val1, val2) val1 < val2
// CHECK_LE(val1, val2) val1 <= val2
//
// Debug only versions of all of the check macros are also defined. These
// macros generate no code in a release build, but avoid unused variable
// warnings / errors.
//
// To use the debug only versions, prepend a D to the normal check macros, e.g.
// DCHECK_EQ(a, b).
#ifndef CERCES_INTERNAL_MINIGLOG_GLOG_LOGGING_H_
#define CERCES_INTERNAL_MINIGLOG_GLOG_LOGGING_H_
#ifdef ANDROID
# include <android/log.h>
#endif // ANDROID
#include <algorithm>
#include <ctime>
#include <fstream>
#include <iostream>
#include <set>
#include <sstream>
#include <string>
#include <vector>
// For appropriate definition of CERES_EXPORT macro.
// #include "ceres/internal/port.h"
// #include "ceres/internal/disable_warnings.h"
#include "mynteye/mynteye.h"
// Log severity level constants.
const int FATAL = -3;
const int ERROR = -2;
const int WARNING = -1;
const int INFO = 0;
// ------------------------- Glog compatibility ------------------------------
namespace google {
typedef int LogSeverity;
const int INFO = ::INFO;
const int WARNING = ::WARNING;
const int ERROR = ::ERROR;
const int FATAL = ::FATAL;
// Sink class used for integration with mock and test functions. If sinks are
// added, all log output is also sent to each sink through the send function.
// In this implementation, WaitTillSent() is called immediately after the send.
// This implementation is not thread safe.
class MYNTEYE_API LogSink {
public:
virtual ~LogSink() {}
virtual void send(LogSeverity severity,
const char* full_filename,
const char* base_filename,
int line,
const struct tm* tm_time,
const char* message,
size_t message_len) = 0;
virtual void WaitTillSent() = 0;
};
// Global set of log sinks. The actual object is defined in logging.cc.
extern MYNTEYE_API std::set<LogSink *> log_sinks_global;
inline void InitGoogleLogging(char *argv) {
// Do nothing; this is ignored.
}
// Note: the Log sink functions are not thread safe.
inline void AddLogSink(LogSink *sink) {
// TODO(settinger): Add locks for thread safety.
log_sinks_global.insert(sink);
}
inline void RemoveLogSink(LogSink *sink) {
log_sinks_global.erase(sink);
}
} // namespace google
// ---------------------------- Logger Class --------------------------------
// Class created for each use of the logging macros.
// The logger acts as a stream and routes the final stream contents to the
// Android logcat output at the proper filter level. If ANDROID is not
// defined, output is directed to std::cerr. This class should not
// be directly instantiated in code, rather it should be invoked through the
// use of the log macros LG, LOG, or VLOG.
class MYNTEYE_API MessageLogger {
public:
MessageLogger(const char *file, int line, const char *tag, int severity)
: file_(file), line_(line), tag_(tag), severity_(severity) {
// Pre-pend the stream with the file and line number.
StripBasename(std::string(file), &filename_only_);
stream_ << filename_only_ << ":" << line << " ";
}
// Output the contents of the stream to the proper channel on destruction.
~MessageLogger() {
stream_ << "\n";
#ifdef ANDROID
static const int android_log_levels[] = {
ANDROID_LOG_FATAL, // LOG(FATAL)
ANDROID_LOG_ERROR, // LOG(ERROR)
ANDROID_LOG_WARN, // LOG(WARNING)
ANDROID_LOG_INFO, // LOG(INFO), LG, VLOG(0)
ANDROID_LOG_DEBUG, // VLOG(1)
ANDROID_LOG_VERBOSE, // VLOG(2) .. VLOG(N)
};
// Bound the logging level.
const int kMaxVerboseLevel = 2;
int android_level_index = std::min(std::max(FATAL, severity_),
kMaxVerboseLevel) - FATAL;
int android_log_level = android_log_levels[android_level_index];
// Output the log string the Android log at the appropriate level.
__android_log_write(android_log_level, tag_.c_str(), stream_.str().c_str());
// Indicate termination if needed.
if (severity_ == FATAL) {
__android_log_write(ANDROID_LOG_FATAL,
tag_.c_str(),
"terminating.\n");
}
#else
// If not building on Android, log all output to std::cerr.
std::cerr << stream_.str();
#endif // ANDROID
LogToSinks(severity_);
WaitForSinks();
// Android logging at level FATAL does not terminate execution, so abort()
// is still required to stop the program.
if (severity_ == FATAL) {
abort();
}
}
// Return the stream associated with the logger object.
std::stringstream &stream() { return stream_; }
private:
void LogToSinks(int severity) {
time_t rawtime;
time (&rawtime);
struct tm* timeinfo;
#if defined(WIN32) || defined(_WIN32) || defined(__WIN32__)
// On Windows, use secure localtime_s not localtime.
struct tm windows_timeinfo;
timeinfo = &windows_timeinfo;
localtime_s(timeinfo, &rawtime);
#else
timeinfo = localtime(&rawtime);
#endif
std::set<google::LogSink*>::iterator iter;
// Send the log message to all sinks.
for (iter = google::log_sinks_global.begin();
iter != google::log_sinks_global.end(); ++iter) {
(*iter)->send(severity, file_.c_str(), filename_only_.c_str(), line_,
timeinfo, stream_.str().c_str(), stream_.str().size());
}
}
void WaitForSinks() {
// TODO(settinger): Add locks for thread safety.
std::set<google::LogSink *>::iterator iter;
// Call WaitTillSent() for all sinks.
for (iter = google::log_sinks_global.begin();
iter != google::log_sinks_global.end(); ++iter) {
(*iter)->WaitTillSent();
}
}
void StripBasename(const std::string &full_path, std::string *filename) {
// TODO(settinger): Add support for OSs with different path separators.
const char kSeparator = '/';
size_t pos = full_path.rfind(kSeparator);
if (pos != std::string::npos) {
*filename = full_path.substr(pos + 1, std::string::npos);
} else {
*filename = full_path;
}
}
std::string file_;
std::string filename_only_;
int line_;
std::string tag_;
std::stringstream stream_;
int severity_;
};
// ---------------------- Logging Macro definitions --------------------------
// This class is used to explicitly ignore values in the conditional
// logging macros. This avoids compiler warnings like "value computed
// is not used" and "statement has no effect".
class MYNTEYE_API LoggerVoidify {
public:
LoggerVoidify() { }
// This has to be an operator with a precedence lower than << but
// higher than ?:
void operator&(const std::ostream &s) { }
};
// Log only if condition is met. Otherwise evaluates to void.
#define LOG_IF(severity, condition) \
!(condition) ? (void) 0 : LoggerVoidify() & \
MessageLogger((char *)__FILE__, __LINE__, "native", severity).stream()
// Log only if condition is NOT met. Otherwise evaluates to void.
#define LOG_IF_FALSE(severity, condition) LOG_IF(severity, !(condition))
// LG is a convenient shortcut for LOG(INFO). Its use is in new
// google3 code is discouraged and the following shortcut exists for
// backward compatibility with existing code.
#ifdef MAX_LOG_LEVEL
# define LOG(n) LOG_IF(n, n <= MAX_LOG_LEVEL)
# define VLOG(n) LOG_IF(n, n <= MAX_LOG_LEVEL)
# define LG LOG_IF(INFO, INFO <= MAX_LOG_LEVEL)
# define VLOG_IF(n, condition) LOG_IF(n, (n <= MAX_LOG_LEVEL) && condition)
#else
# define LOG(n) MessageLogger((char *)__FILE__, __LINE__, "native", n).stream() // NOLINT
# define VLOG(n) MessageLogger((char *)__FILE__, __LINE__, "native", n).stream() // NOLINT
# define LG MessageLogger((char *)__FILE__, __LINE__, "native", INFO).stream() // NOLINT
# define VLOG_IF(n, condition) LOG_IF(n, condition)
#endif
// Currently, VLOG is always on for levels below MAX_LOG_LEVEL.
#ifndef MAX_LOG_LEVEL
# define VLOG_IS_ON(x) (1)
#else
# define VLOG_IS_ON(x) (x <= MAX_LOG_LEVEL)
#endif
#ifndef NDEBUG
# define DLOG LOG
#else
# define DLOG(severity) true ? (void) 0 : LoggerVoidify() & \
MessageLogger((char *)__FILE__, __LINE__, "native", severity).stream()
#endif
// Log a message and terminate.
template<class T>
void LogMessageFatal(const char *file, int line, const T &message) {
MessageLogger((char *)__FILE__, __LINE__, "native", FATAL).stream()
<< message;
}
// ---------------------------- CHECK macros ---------------------------------
// Check for a given boolean condition.
#define CHECK(condition) LOG_IF_FALSE(FATAL, condition) \
<< "Check failed: " #condition " "
#ifndef NDEBUG
// Debug only version of CHECK
# define DCHECK(condition) LOG_IF_FALSE(FATAL, condition) \
<< "Check failed: " #condition " "
#else
// Optimized version - generates no code.
# define DCHECK(condition) if (false) LOG_IF_FALSE(FATAL, condition) \
<< "Check failed: " #condition " "
#endif // NDEBUG
// ------------------------- CHECK_OP macros ---------------------------------
// Generic binary operator check macro. This should not be directly invoked,
// instead use the binary comparison macros defined below.
#define CHECK_OP(val1, val2, op) LOG_IF_FALSE(FATAL, ((val1) op (val2))) \
<< "Check failed: " #val1 " " #op " " #val2 " "
// Check_op macro definitions
#define CHECK_EQ(val1, val2) CHECK_OP(val1, val2, ==)
#define CHECK_NE(val1, val2) CHECK_OP(val1, val2, !=)
#define CHECK_LE(val1, val2) CHECK_OP(val1, val2, <=)
#define CHECK_LT(val1, val2) CHECK_OP(val1, val2, <)
#define CHECK_GE(val1, val2) CHECK_OP(val1, val2, >=)
#define CHECK_GT(val1, val2) CHECK_OP(val1, val2, >)
#ifndef NDEBUG
// Debug only versions of CHECK_OP macros.
# define DCHECK_EQ(val1, val2) CHECK_OP(val1, val2, ==)
# define DCHECK_NE(val1, val2) CHECK_OP(val1, val2, !=)
# define DCHECK_LE(val1, val2) CHECK_OP(val1, val2, <=)
# define DCHECK_LT(val1, val2) CHECK_OP(val1, val2, <)
# define DCHECK_GE(val1, val2) CHECK_OP(val1, val2, >=)
# define DCHECK_GT(val1, val2) CHECK_OP(val1, val2, >)
#else
// These versions generate no code in optimized mode.
# define DCHECK_EQ(val1, val2) if (false) CHECK_OP(val1, val2, ==)
# define DCHECK_NE(val1, val2) if (false) CHECK_OP(val1, val2, !=)
# define DCHECK_LE(val1, val2) if (false) CHECK_OP(val1, val2, <=)
# define DCHECK_LT(val1, val2) if (false) CHECK_OP(val1, val2, <)
# define DCHECK_GE(val1, val2) if (false) CHECK_OP(val1, val2, >=)
# define DCHECK_GT(val1, val2) if (false) CHECK_OP(val1, val2, >)
#endif // NDEBUG
// ---------------------------CHECK_NOTNULL macros ---------------------------
// Helpers for CHECK_NOTNULL(). Two are necessary to support both raw pointers
// and smart pointers.
template <typename T>
T& CheckNotNullCommon(const char *file, int line, const char *names, T& t) {
if (t == NULL) {
LogMessageFatal(file, line, std::string(names));
}
return t;
}
template <typename T>
T* CheckNotNull(const char *file, int line, const char *names, T* t) {
return CheckNotNullCommon(file, line, names, t);
}
template <typename T>
T& CheckNotNull(const char *file, int line, const char *names, T& t) {
return CheckNotNullCommon(file, line, names, t);
}
// Check that a pointer is not null.
#define CHECK_NOTNULL(val) \
CheckNotNull(__FILE__, __LINE__, "'" #val "' Must be non NULL", (val))
#ifndef NDEBUG
// Debug only version of CHECK_NOTNULL
#define DCHECK_NOTNULL(val) \
CheckNotNull(__FILE__, __LINE__, "'" #val "' Must be non NULL", (val))
#else
// Optimized version - generates no code.
#define DCHECK_NOTNULL(val) if (false)\
CheckNotNull(__FILE__, __LINE__, "'" #val "' Must be non NULL", (val))
#endif // NDEBUG
// #include "ceres/internal/reenable_warnings.h"
#endif // CERCES_INTERNAL_MINIGLOG_GLOG_LOGGING_H_

View File

@@ -14,7 +14,7 @@
cmake_minimum_required(VERSION 3.0)
project(mynteye VERSION 2.3.0 LANGUAGES C CXX)
project(mynteye VERSION 2.2.2 LANGUAGES C CXX)
include(cmake/Common.cmake)
@@ -55,7 +55,7 @@ macro(target_link_threads NAME)
target_compile_options(PUBLIC ${NAME} "-pthread")
endif()
if(CMAKE_THREAD_LIBS_INIT)
target_link_libraries(${NAME} PUBLIC "${CMAKE_THREAD_LIBS_INIT}")
target_link_libraries(${NAME} "${CMAKE_THREAD_LIBS_INIT}")
endif()
endmacro()
@@ -94,18 +94,6 @@ if(OS_WIN)
)
endif()
# rpath
set(CMAKE_MACOSX_RPATH 1)
set(MYNTEYE_CMAKE_RPATH "")
if(WITH_OPENCV)
list(APPEND MYNTEYE_CMAKE_RPATH ${OpenCV_LIB_PATH})
endif()
if(MYNTEYE_CMAKE_RPATH)
message(STATUS "RPATH: ${MYNTEYE_CMAKE_RPATH}")
set(CMAKE_INSTALL_RPATH "${MYNTEYE_CMAKE_RPATH}")
endif()
# targets
add_definitions(-DMYNTEYE_EXPORTS)
@@ -118,7 +106,6 @@ set_outdir(
)
## main
if(WITH_GLOG)
add_executable(main src/main.cc)
target_link_libraries(main glog::glog)
@@ -128,43 +115,6 @@ if(WITH_GLOG)
)
endif()
## camodocal
if(WITH_CAM_MODELS)
set(EIGEN_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/eigen3)
LIST(APPEND CMAKE_PREFIX_PATH ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/ceres/share/Ceres)
find_package(Ceres REQUIRED)
message(STATUS "CERES_INCLUDE_DIRS: ${CERES_INCLUDE_DIRS}")
message(STATUS "CERES_LIBRARIES: ${CERES_LIBRARIES}")
message(STATUS "EIGEN_INCLUDE_DIRS: ${EIGEN_INCLUDE_DIRS}")
include_directories(
${CERES_INCLUDE_DIRS}
${EIGEN_INCLUDE_DIRS}
src/mynteye/api/camodocal/include
)
add_library(camodocal STATIC
src/mynteye/api/camodocal/src/chessboard/Chessboard.cc
src/mynteye/api/camodocal/src/calib/CameraCalibration.cc
src/mynteye/api/camodocal/src/calib/StereoCameraCalibration.cc
src/mynteye/api/camodocal/src/camera_models/Camera.cc
src/mynteye/api/camodocal/src/camera_models/CameraFactory.cc
src/mynteye/api/camodocal/src/camera_models/CostFunctionFactory.cc
src/mynteye/api/camodocal/src/camera_models/PinholeCamera.cc
src/mynteye/api/camodocal/src/camera_models/CataCamera.cc
src/mynteye/api/camodocal/src/camera_models/EquidistantCamera.cc
src/mynteye/api/camodocal/src/camera_models/ScaramuzzaCamera.cc
src/mynteye/api/camodocal/src/sparse_graph/Transform.cc
src/mynteye/api/camodocal/src/gpl/gpl.cc
src/mynteye/api/camodocal/src/gpl/EigenQuaternionParameterization.cc
)
target_link_libraries(camodocal ${CERES_LIBRARIES})
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fPIC")
endif()
## libmynteye
if(NOT WITH_GLOG AND NOT OS_WIN)
@@ -182,7 +132,15 @@ endif()
if(OS_WIN)
set(UVC_SRC src/mynteye/uvc/win/uvc-wmf.cc)
elseif(OS_MAC)
add_compile_options(-x objective-c++ -Wno-unused-command-line-argument -Wno-missing-method-return-type -Wno-sign-compare)
add_compile_options(-x objective-c++)
## INCLUDE_DIRECTORIES(src/mynteye/uvc/macosx)
## INCLUDE_DIRECTORIES(src/mynteye/uvc/macosx/VVUVCKit)
## aux_source_directory(src/mynteye/uvc/macosx/VVUVCKit/ MAC_VVUVCKIT_SRC_LIST)
## aux_source_directory(src/mynteye/uvc/macosx/USBBusProber/ MAC_USBBUSPROBER_SRC_LIST)
## add_library(usbBusProber SHARED ${MAC_USBBUSPROBER_SRC_LIST})
## set_target_properties(usbBusProber PROPERTIES FRAMEWORK TRUE )
## add_library(vvuvckit SHARED ${MAC_VVUVCKIT_SRC_LIST})
## set_target_properties(vvuvckit PROPERTIES FRAMEWORK TRUE )
INCLUDE_DIRECTORIES(src/mynteye/uvc/macosx/USBBusProber.framework/Headers)
INCLUDE_DIRECTORIES(src/mynteye/uvc/macosx/VVUVCKit.framework/Headers)
@@ -192,7 +150,12 @@ elseif(OS_MAC)
SET(OSX_EXTRA_LIBS ${VVUVCKIT_LIBRARY} ${USB_LIBRARY})
set(UVC_SRC src/mynteye/uvc/macosx/CameraEngine.cpp src/mynteye/uvc/macosx/AVfoundationCamera.mm src/mynteye/uvc/macosx/uvc-vvuvckit.cc )
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -framework CoreFoundation -framework AVFoundation -framework IOKit -framework AppKit -framework Cocoa -framework CoreMedia -framework CoreData -framework Foundation -framework CoreVideo ${__MACUVCLOG_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -framework CoreFoundation -framework AVFoundation -framework IOKit -framework AppKit -framework Cocoa -framework CoreMedia -framework CoreData -framework Foundation -framework CoreVideo ${__MACUVCLOG_FLAGS}")
find_package(libuvc REQUIRED)
set(UVC_LIB ${libuvc_LIBRARIES})
include_directories(${libuvc_INCLUDE_DIRS})
elseif(OS_LINUX)
set(UVC_SRC src/mynteye/uvc/linux/uvc-v4l2.cc)
else()
@@ -204,22 +167,12 @@ set(MYNTEYE_SRCS
src/mynteye/types.cc
src/mynteye/util/files.cc
src/mynteye/util/strings.cc
src/mynteye/device/channel/bytes.cc
src/mynteye/device/channel/channels.cc
src/mynteye/device/channel/file_channel.cc
src/mynteye/device/channels.cc
src/mynteye/device/config.cc
src/mynteye/device/context.cc
src/mynteye/device/device.cc
src/mynteye/device/device_s.cc
src/mynteye/device/motions.cc
src/mynteye/device/standard/channels_adapter_s.cc
src/mynteye/device/standard/device_s.cc
src/mynteye/device/standard/streams_adapter_s.cc
src/mynteye/device/standard2/channels_adapter_s2.cc
src/mynteye/device/standard2/device_s2.cc
src/mynteye/device/standard2/streams_adapter_s2.cc
src/mynteye/device/standard2/channels_adapter_s210a.cc
src/mynteye/device/standard2/device_s210a.cc
src/mynteye/device/standard2/streams_adapter_s210a.cc
src/mynteye/device/streams.cc
src/mynteye/device/types.cc
src/mynteye/device/utils.cc
@@ -230,23 +183,14 @@ if(WITH_API)
src/mynteye/api/dl.cc
src/mynteye/api/processor.cc
src/mynteye/api/synthetic.cc
src/mynteye/api/processor/rectify_processor.cc
src/mynteye/api/processor/disparity_processor.cc
src/mynteye/api/processor/disparity_normalized_processor.cc
src/mynteye/api/processor/points_processor_ocv.cc
src/mynteye/api/processor/depth_processor_ocv.cc
src/mynteye/api/processor/rectify_processor_ocv.cc
src/mynteye/api/config.cc
src/mynteye/api/processor/depth_processor.cc
src/mynteye/api/processor/points_processor.cc
)
if(WITH_CAM_MODELS)
list(APPEND MYNTEYE_SRCS
src/mynteye/api/processor/depth_processor.cc
src/mynteye/api/processor/points_processor.cc
src/mynteye/api/processor/rectify_processor.cc
)
endif()
endif()
if(NOT WITH_GLOG AND NOT WITH_CAM_MODELS)
if(NOT WITH_GLOG)
list(APPEND MYNTEYE_SRCS src/mynteye/miniglog.cc)
endif()
@@ -260,16 +204,12 @@ endif()
if(WITH_GLOG)
list(APPEND MYNTEYE_LINKLIBS glog::glog)
endif()
#message(STATUS "MYNTEYE_LINKLIBS: ${MYNTEYE_LINKLIBS}")
add_library(${MYNTEYE_NAME} SHARED ${MYNTEYE_SRCS})
target_link_libraries(${MYNTEYE_NAME} PUBLIC ${MYNTEYE_LINKLIBS})
target_link_libraries(${MYNTEYE_NAME} ${MYNTEYE_LINKLIBS})
if(OS_MAC)
target_link_libraries(${MYNTEYE_NAME} PUBLIC ${OSX_EXTRA_LIBS} )
endif()
if(WITH_CAM_MODELS)
target_link_libraries(${MYNTEYE_NAME} PRIVATE camodocal)
target_link_libraries( ${MYNTEYE_NAME} ${OSX_EXTRA_LIBS} )
endif()
target_link_threads(${MYNTEYE_NAME})
@@ -309,7 +249,6 @@ install(FILES
${CMAKE_CURRENT_SOURCE_DIR}/include/mynteye/device/callbacks.h
${CMAKE_CURRENT_SOURCE_DIR}/include/mynteye/device/context.h
${CMAKE_CURRENT_SOURCE_DIR}/include/mynteye/device/device.h
${CMAKE_CURRENT_SOURCE_DIR}/include/mynteye/device/types.h
${CMAKE_CURRENT_SOURCE_DIR}/include/mynteye/device/utils.h
DESTINATION ${MYNTEYE_CMAKE_INCLUDE_DIR}/device
)
@@ -333,12 +272,6 @@ if(NOT WITH_GLOG)
DESTINATION ${MYNTEYE_CMAKE_INCLUDE_DIR}
)
endif()
if(WITH_CAM_MODELS)
install(FILES
${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/glog/logging.h
DESTINATION ${MYNTEYE_CMAKE_INCLUDE_DIR}/glog
)
endif()
install(FILES
${CMAKE_CURRENT_SOURCE_DIR}/include/deprecated/mynteye/callbacks.h

View File

@@ -22,25 +22,8 @@ MKFILE_DIR := $(patsubst %/,%,$(dir $(MKFILE_PATH)))
# UNIX: /usr/local
# Windows: c:/Program Files/${PROJECT_NAME}
# Options
#
# SUDO: sudo command
# CAM_MODELS: cmake build with -DWITH_CAM_MODELS=ON
#
# e.g. make [TARGET] SUDO=
# e.g. make [TARGET] CAM_MODELS=1
SUDO ?= sudo
CAM_MODELS ?=
CMAKE_BUILD_EXTRA_OPTIONS :=
ifeq ($(CAM_MODELS),)
CMAKE_BUILD_EXTRA_OPTIONS := $(CMAKE_BUILD_EXTRA_OPTIONS) -DWITH_CAM_MODELS=OFF
else
CMAKE_BUILD_EXTRA_OPTIONS := $(CMAKE_BUILD_EXTRA_OPTIONS) -DWITH_CAM_MODELS=ON
endif
.DEFAULT_GOAL := all
help:
@@ -93,18 +76,6 @@ submodules:
.PHONY: submodules
# 3rdparty
ceres:
@$(call echo,Make $@)
@$(call cmake_build,./3rdparty/ceres-solver-1.11.0/_build,.., \
-DCMAKE_INSTALL_PREFIX=$(MKFILE_DIR)/3rdparty/ceres \
-DGFLAGS_PREFER_EXPORTED_GFLAGS_CMAKE_CONFIGURATION=OFF \
-DMINIGLOG=ON)
@cd ./3rdparty/ceres-solver-1.11.0/_build; make install
.PHONY: ceres
# init
init:
@@ -117,13 +88,10 @@ init:
build:
@$(call echo,Make $@)
ifneq ($(CAM_MODELS),)
@$(MAKE) ceres
endif
ifeq ($(HOST_OS),Win)
@$(call cmake_build,./_build,..,-DCMAKE_INSTALL_PREFIX=$(MKFILE_DIR)/_install)
else
@$(call cmake_build,./_build,..,$(CMAKE_BUILD_EXTRA_OPTIONS))
@$(call cmake_build,./_build,..)
endif
.PHONY: build
@@ -301,8 +269,8 @@ cleanlog:
@$(call rm_f,*FATAL*)
cleanall: clean cleandoc
@$(call rm,./3rdparty/ceres-solver-1.11.0/_build/)
@$(call rm,./test/gtest/_build/)
@$(call rm,./third_party/glog/_build/)
@$(FIND) . -type f -name ".DS_Store" -print0 | xargs -0 rm -f
@$(call rm,./$(PBCVT_DIR)/)
@$(call rm,./$(NPCV_DIR)/)
@@ -328,7 +296,6 @@ host:
@echo LDD: $(LDD)
@echo CMAKE: $(CMAKE)
@echo PKGNAME: $(PKGNAME)
@echo CMAKE_BUILD_EXTRA_OPTIONS: $(CMAKE_BUILD_EXTRA_OPTIONS)
.PHONY: host

View File

@@ -1,6 +1,6 @@
# MYNT® EYE S SDK
[![](https://img.shields.io/badge/MYNT%20EYE%20S%20SDK-2.3.0-brightgreen.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK)
[![](https://img.shields.io/badge/MYNT%20EYE%20S%20SDK-2.2.2-brightgreen.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK)
## Overview
@@ -17,11 +17,11 @@ Please follow the guide doc to install the SDK on different platforms.
## Documentations
* [API Doc](https://github.com/slightech/MYNT-EYE-S-SDK/releases): API reference, some guides and data spec.
* en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2764152/mynt-eye-s-sdk-apidoc-2.3.0-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2764156/mynt-eye-s-sdk-apidoc-2.3.0-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK/)
* zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2764160/mynt-eye-s-sdk-apidoc-2.3.0-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2764173/mynt-eye-s-sdk-apidoc-2.3.0-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/api/mynt-eye-s-sdk-apidoc-2.3.0-zh-Hans/mynt-eye-s-sdk-apidoc-2.3.0-zh-Hans/index.html)
* en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2683636/mynt-eye-s-sdk-apidoc-2.2.2-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2683637/mynt-eye-s-sdk-apidoc-2.2.2-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK/)
* zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2683638/mynt-eye-s-sdk-apidoc-2.2.2-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2683639/mynt-eye-s-sdk-apidoc-2.2.2-zh-Hans.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/api/mynt-eye-s-sdk-apidoc-2.2.2-zh-Hans/mynt-eye-s-sdk-apidoc-2.2.2-zh-Hans/index.html)
* [Guide Doc](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/releases): How to install and start using the SDK.
* en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2764143/mynt-eye-s-sdk-guide-2.3.0-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2764145/mynt-eye-s-sdk-guide-2.3.0-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK-Guide/)
* zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2764150/mynt-eye-s-sdk-guide-2.3.0-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2764163/mynt-eye-s-sdk-guide-2.3.0-zh-Hans.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/sdk/mynt-eye-s-sdk-guide-2.3.0-zh-Hans/mynt-eye-s-sdk-guide-2.3.0-zh-Hans/index.html)
* en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2683625/mynt-eye-s-sdk-guide-2.2.2-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2683626/mynt-eye-s-sdk-guide-2.2.2-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK-Guide/)
* zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2683627/mynt-eye-s-sdk-guide-2.2.2-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2683628/mynt-eye-s-sdk-guide-2.2.2-zh-Hans.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/sdk/mynt-eye-s-sdk-guide-2.2.2-zh-Hans/mynt-eye-s-sdk-guide-2.2.2-zh-Hans/index.html)
> Supported languages: `en`, `zh-Hans`.
@@ -29,7 +29,7 @@ Please follow the guide doc to install the SDK on different platforms.
[MYNTEYE_BOX]: http://doc.myntai.com/mynteye/s/download
Get firmwares from our online disks: [MYNTEYE_BOX][].
Get firmwares from our online disks: [MYNTEYE_BOX][]. The latest version is `2.2.2`.
## Usage

26
cmake/DetectGLog.cmake Normal file
View File

@@ -0,0 +1,26 @@
# Copyright 2018 Slightech Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
include(${CMAKE_CURRENT_LIST_DIR}/IncludeGuard.cmake)
cmake_include_guard()
get_filename_component(__pro_dir ${CMAKE_CURRENT_LIST_DIR} DIRECTORY)
LIST(APPEND CMAKE_PREFIX_PATH ${__pro_dir}/third_party/glog/_build)
find_package(glog REQUIRED)
if(glog_FOUND)
add_definitions(-DWITH_GLOG)
endif()
unset(__pro_dir)

View File

@@ -26,13 +26,14 @@ if(OpenCV_FOUND)
#message(STATUS "Found OpenCV: ${OpenCV_VERSION}")
set(WITH_OPENCV TRUE)
add_definitions(-DWITH_OPENCV)
if(OpenCV_VERSION VERSION_LESS 3.0)
set(WITH_OPENCV2 TRUE)
add_definitions(-DWITH_OPENCV2)
elseif(OpenCV_VERSION VERSION_LESS 4.0)
set(WITH_OPENCV3 TRUE)
add_definitions(-DWITH_OPENCV3)
else()
set(WITH_OPENCV4 TRUE)
add_definitions(-DWITH_OPENCV4)
endif()
list(FIND OpenCV_LIBS "opencv_world" __index)
@@ -40,35 +41,12 @@ if(${__index} GREATER -1)
set(WITH_OPENCV_WORLD TRUE)
endif()
if(NOT OpenCV_LIB_PATH)
list(LENGTH OpenCV_INCLUDE_DIRS __length)
if(${__length} GREATER 0)
list(GET OpenCV_INCLUDE_DIRS 0 __include_dir)
string(REGEX REPLACE "include.*$" "lib" __lib_dir "${__include_dir}")
find_library(__opencv_lib
NAMES opencv_core3 opencv_core opencv_world
PATHS "${__lib_dir}" "${__lib_dir}/x86_64-linux-gnu"
NO_DEFAULT_PATH)
#message(STATUS "__opencv_lib: ${__opencv_lib}")
if(__opencv_lib)
get_filename_component(OpenCV_LIB_PATH "${__opencv_lib}" DIRECTORY)
else()
set(OpenCV_LIB_PATH "${__lib_dir}")
endif()
#message(STATUS "OpenCV_LIB_PATH: ${OpenCV_LIB_PATH}")
endif()
endif()
if(MSVC OR MSYS OR MINGW)
get_filename_component(OpenCV_LIB_SEARCH_PATH "${OpenCV_LIB_PATH}/../bin" ABSOLUTE)
else()
set(OpenCV_LIB_SEARCH_PATH "${OpenCV_LIB_PATH}")
endif()
include_directories(
${OpenCV_INCLUDE_DIRS}
)
else()
set(WITH_OPENCV FALSE)

View File

@@ -23,9 +23,6 @@ include(${CMAKE_CURRENT_LIST_DIR}/Utils.cmake)
option(WITH_API "Build with API layer, need OpenCV" ON)
option(WITH_DEVICE_INFO_REQUIRED "Build with device info required" ON)
option(WITH_CAM_MODELS "Build with more camera models, WITH_API must be ON" OFF)
option(WITH_BM_SOBEL_FILTER "Build with bm and sobel filter, need OpenCV contronb" OFF)
# 3rdparty components
option(WITH_BOOST "Include Boost support" ON)
@@ -38,17 +35,20 @@ option(WITH_GLOG "Include glog support" OFF)
if(WITH_API)
include(${CMAKE_CURRENT_LIST_DIR}/DetectOpenCV.cmake)
else()
# Disable WITH_CAM_MODELS if WITH_API is OFF
set(WITH_CAM_MODELS OFF)
endif()
if(WITH_DEVICE_INFO_REQUIRED)
add_definitions(-DWITH_DEVICE_INFO_REQUIRED)
endif()
if(WITH_BOOST)
find_package(Boost QUIET COMPONENTS filesystem)
find_package(Boost COMPONENTS filesystem)
if(Boost_FOUND)
set(Boost_VERSION_STRING "${Boost_MAJOR_VERSION}.${Boost_MINOR_VERSION}.${Boost_SUBMINOR_VERSION}")
set(WITH_FILESYSTEM TRUE)
set(WITH_BOOST_FILESYSTEM TRUE)
add_definitions(-DWITH_FILESYSTEM)
add_definitions(-DWITH_BOOST_FILESYSTEM)
endif()
endif()
@@ -56,11 +56,13 @@ if(NOT WITH_FILESYSTEM)
if(MSVC OR MSYS OR MINGW) # win
set(WITH_FILESYSTEM TRUE)
set(WITH_NATIVE_FILESYSTEM TRUE)
add_definitions(-DWITH_FILESYSTEM)
add_definitions(-DWITH_NATIVE_FILESYSTEM)
endif()
endif()
if(WITH_GLOG)
find_package(glog REQUIRED)
include(${CMAKE_CURRENT_LIST_DIR}/DetectGLog.cmake)
endif()
find_package(CUDA QUIET)
@@ -118,8 +120,6 @@ endif()
status(" WITH_DEVICE_INFO_REQUIRED: ${WITH_DEVICE_INFO_REQUIRED}")
status(" WITH_CAM_MODELS: ${WITH_CAM_MODELS}")
status(" WITH_BOOST: ${WITH_BOOST}")
if(WITH_BOOST)
if(Boost_FOUND)

View File

@@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE S SDK"
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = 2.3.0
PROJECT_NUMBER = 2.2.2
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a

View File

@@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE S SDK"
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = 2.3.0
PROJECT_NUMBER = 2.2.2
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a

View File

@@ -28,8 +28,6 @@
MYNTEYE_BEGIN_NAMESPACE
struct DeviceInfo;
class Device;
class Synthetic;
@@ -92,9 +90,28 @@ class MYNTEYE_API API {
/** The api::MotionData callback. */
using motion_callback_t = std::function<void(const api::MotionData &data)>;
explicit API(std::shared_ptr<Device> device, CalibrationModel calib_model);
explicit API(std::shared_ptr<Device> device);
virtual ~API();
/**
* Create the API instance.
* @return the API instance.
* @note This will call device::select() to select a device.
*/
static std::shared_ptr<API> Create(Resolution res);
/**
* Create the API instance.
* @param device the selected device.
* @return the API instance.
*/
static std::shared_ptr<API> Create(
std::shared_ptr<Device> device, Resolution res);
/**
* Create the API instance.
* @param device the selected device.
* @return the API instance.
*/
static std::shared_ptr<API> Create(std::shared_ptr<Device> device);
/**
* Create the API instance.
* @param argc the arg count.
@@ -113,13 +130,7 @@ class MYNTEYE_API API {
* @note This will init glog with args.
*/
static std::shared_ptr<API> Create(
int argc, char *argv[], const std::shared_ptr<Device> &device);
/**
* Create the API instance.
* @param device the selected device.
* @return the API instance.
*/
static std::shared_ptr<API> Create(const std::shared_ptr<Device> &device);
int argc, char *argv[], std::shared_ptr<Device> device);
/**
* Get the model.
@@ -144,10 +155,9 @@ class MYNTEYE_API API {
bool Supports(const AddOns &addon) const;
/**
* Log all stream requests and prompt user to select one.
* set the stream request.
*/
StreamRequest SelectStreamRequest(bool *ok) const;
void SetStreamRequest(const Format &format, const FrameRate &rate);
/**
* Get all stream requests of the capability.
*/
@@ -158,46 +168,16 @@ class MYNTEYE_API API {
*/
void ConfigStreamRequest(
const Capabilities &capability, const StreamRequest &request);
/**
* Get the config stream requests of the capability.
*/
const StreamRequest &GetStreamRequest(const Capabilities &capability) const;
/**
* Get all stream requests of the key stream capability.
*/
const std::vector<StreamRequest> &GetStreamRequests() const;
/**
* Config the stream request to the key stream capability.
*/
void ConfigStreamRequest(const StreamRequest &request);
/**
* Get the config stream requests of the key stream capability.
*/
const StreamRequest &GetStreamRequest() const;
/**
* Get the device info.
*/
std::shared_ptr<DeviceInfo> GetInfo() const;
/**
* Get the device info.
*/
std::string GetInfo(const Info &info) const;
/**
* @deprecated Get the intrinsics (pinhole) of stream.
*/
IntrinsicsPinhole GetIntrinsics(const Stream &stream) const;
/**
* Get the intrinsics of stream.
*/
template <typename T>
T GetIntrinsics(const Stream &stream) const;
/**
* Get the intrinsics base of stream.
*/
std::shared_ptr<IntrinsicsBase> GetIntrinsicsBase(const Stream &stream) const;
Intrinsics GetIntrinsics(const Stream &stream) const;
/**
* Get the extrinsics from one stream to another.
*/
@@ -234,6 +214,11 @@ class MYNTEYE_API API {
*/
bool RunOptionAction(const Option &option) const;
/**
* Init device resolution.
*/
void InitResolution(const Resolution &res);
/**
* Set the callback of stream.
*/
@@ -313,12 +298,6 @@ class MYNTEYE_API API {
void CheckImageParams();
};
template <typename T>
T API::GetIntrinsics(const Stream &stream) const {
auto in = GetIntrinsicsBase(stream);
return *std::dynamic_pointer_cast<T>(in);
}
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_API_H_

View File

@@ -30,6 +30,20 @@ MYNTEYE_BEGIN_NAMESPACE
namespace device {
typedef struct ImgParams {
bool ok;
std::map<Resolution, Intrinsics> in_left_map;
std::map<Resolution, Intrinsics> in_right_map;
Extrinsics ex_right_to_left;
} img_params_t;
typedef struct ImuParams {
bool ok;
ImuIntrinsics in_accel;
ImuIntrinsics in_gyro;
Extrinsics ex_left_to_imu;
} imu_params_t;
/**
* @ingroup datatypes
* Frame with raw data.

View File

@@ -22,8 +22,8 @@
#include <vector>
#include "mynteye/mynteye.h"
#include "mynteye/types.h"
#include "mynteye/device/callbacks.h"
#include "mynteye/device/types.h"
MYNTEYE_BEGIN_NAMESPACE
@@ -43,10 +43,8 @@ struct DeviceInfo;
class API;
class Channels;
class ChannelsAdapter;
class Motions;
class Streams;
class StreamsAdapter;
template <class Data>
class AsyncCallback;
@@ -69,16 +67,9 @@ class MYNTEYE_API Device {
using motion_async_callback_ptr_t = std::shared_ptr<motion_async_callback_t>;
using img_params_t = device::img_params_t;
using img_params_map_t = std::map<Resolution, img_params_t>;
using imu_params_t = device::imu_params_t;
protected:
Device(const Model &model,
const std::shared_ptr<uvc::device> &device,
const std::shared_ptr<StreamsAdapter> &streams_adapter,
const std::shared_ptr<ChannelsAdapter> &channels_adapter);
public:
Device(const Model &model, std::shared_ptr<uvc::device> device);
virtual ~Device();
/**
@@ -113,7 +104,14 @@ class MYNTEYE_API Device {
* Supports the addon or not.
*/
bool Supports(const AddOns &addon) const;
/**
* Init device resolution.
*/
void InitResolution(const Resolution &res);
/**
* set the stream request.
*/
void SetStreamRequest(const Format &format, const FrameRate &rate);
/**
* Get all stream requests of the capability.
*/
@@ -124,23 +122,6 @@ class MYNTEYE_API Device {
*/
void ConfigStreamRequest(
const Capabilities &capability, const StreamRequest &request);
/**
* Get the config stream requests of the capability.
*/
const StreamRequest &GetStreamRequest(const Capabilities &capability) const;
/**
* Get all stream requests of the key stream capability.
*/
const std::vector<StreamRequest> &GetStreamRequests() const;
/**
* Config the stream request to the key stream capability.
*/
void ConfigStreamRequest(const StreamRequest &request);
/**
* Get the config stream requests of the key stream capability.
*/
const StreamRequest &GetStreamRequest() const;
/**
* Get the device info.
@@ -154,7 +135,7 @@ class MYNTEYE_API Device {
/**
* Get the intrinsics of stream.
*/
std::shared_ptr<IntrinsicsBase> GetIntrinsics(const Stream &stream) const;
Intrinsics GetIntrinsics(const Stream &stream) const;
/**
* Get the extrinsics from one stream to another.
*/
@@ -171,8 +152,7 @@ class MYNTEYE_API Device {
/**
* Get the intrinsics of stream.
*/
std::shared_ptr<IntrinsicsBase> GetIntrinsics(
const Stream &stream, bool *ok) const;
Intrinsics GetIntrinsics(const Stream &stream, bool *ok) const;
/**
* Get the extrinsics from one stream to another.
*/
@@ -190,8 +170,7 @@ class MYNTEYE_API Device {
/**
* Set the intrinsics of stream.
*/
void SetIntrinsics(const Stream &stream,
const std::shared_ptr<IntrinsicsBase> &in);
void SetIntrinsics(const Stream &stream, const Intrinsics &in);
/**
* Set the extrinsics from one stream to another.
*/
@@ -262,21 +241,15 @@ class MYNTEYE_API Device {
*/
void WaitForStreams();
/**
* Get the latest data of stream.
*/
device::StreamData GetStreamData(const Stream &stream);
/**
* @deprecated Replaced by GetStreamData(const Stream &stream)
*/
device::StreamData GetLatestStreamData(const Stream &stream);
/**
* Get the datas of stream.
* @note default cache 4 datas at most.
*/
std::vector<device::StreamData> GetStreamDatas(const Stream &stream);
/**
* Get the latest data of stream.
*/
device::StreamData GetLatestStreamData(const Stream &stream);
/**
* Enable cache motion datas.
@@ -290,6 +263,10 @@ class MYNTEYE_API Device {
* Get the motion datas.
*/
std::vector<device::MotionData> GetMotionDatas();
/**
* Get the device img params
*/
img_params_t GetImgParams();
protected:
std::shared_ptr<uvc::device> device() const {
@@ -308,6 +285,8 @@ class MYNTEYE_API Device {
return motions_;
}
const StreamRequest &GetStreamRequest(const Capabilities &capability);
virtual void StartVideoStreaming();
virtual void StopVideoStreaming();
@@ -316,32 +295,25 @@ class MYNTEYE_API Device {
virtual void OnStereoStreamUpdate();
virtual Capabilities GetKeyStreamCapability() const = 0;
img_params_map_t GetImgParams() const {
return all_img_params_;
}
imu_params_t GetImuParams() const {
return imu_params_;
}
virtual std::vector<Stream> GetKeyStreams() const = 0;
bool video_streaming_;
bool motion_tracking_;
private:
Model model_;
Resolution res_ = Resolution::RES_752x480;
StreamRequest request_;
std::shared_ptr<uvc::device> device_;
std::shared_ptr<DeviceInfo> device_info_;
img_params_map_t all_img_params_;
imu_params_t imu_params_;
std::map<Stream, std::shared_ptr<IntrinsicsBase>> stream_intrinsics_;
std::map<Stream, Intrinsics> stream_intrinsics_;
std::map<Stream, std::map<Stream, Extrinsics>> stream_from_extrinsics_;
std::shared_ptr<MotionIntrinsics> motion_intrinsics_;
std::map<Stream, Extrinsics> motion_from_extrinsics_;
img_params_t img_params_;
stream_callbacks_t stream_callbacks_;
motion_callback_t motion_callback_;
@@ -359,17 +331,12 @@ class MYNTEYE_API Device {
std::shared_ptr<Motions> motions_;
void ReadAllInfos();
void UpdateStreamIntrinsics(
const Capabilities &capability, const StreamRequest &request);
void ConfigIntrinsics(const Resolution &res);
void CallbackPushedStreamData(const Stream &stream);
void CallbackMotionData(const device::MotionData &data);
bool GetFiles(
DeviceInfo *info, img_params_map_t *img_params, imu_params_t *imu_params);
bool SetFiles(
DeviceInfo *info, img_params_map_t *img_params, imu_params_t *imu_params);
friend API;
friend tools::DeviceWriter;
};

View File

@@ -1,171 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_TYPES_H_
#define MYNTEYE_DEVICE_TYPES_H_
#pragma once
#include <cstdint>
#include <array>
#include <bitset>
#include <memory>
#include <string>
#include <vector>
#include "mynteye/mynteye.h"
#include "mynteye/types.h"
MYNTEYE_BEGIN_NAMESPACE
namespace device {
typedef struct ImgParams {
bool ok;
std::string version;
std::shared_ptr<IntrinsicsBase> in_left;
std::shared_ptr<IntrinsicsBase> in_right;
Extrinsics ex_right_to_left;
} img_params_t;
typedef struct ImuParams {
bool ok;
std::string version;
ImuIntrinsics in_accel;
ImuIntrinsics in_gyro;
Extrinsics ex_left_to_imu;
} imu_params_t;
} // namespace device
#define MYNTEYE_PROPERTY(TYPE, NAME) \
public: \
void set_##NAME(TYPE NAME) { \
NAME##_ = NAME; \
} \
TYPE NAME() const { \
return NAME##_; \
} \
\
private: \
TYPE NAME##_;
/**
* Version.
*/
class MYNTEYE_API Version {
public:
using size_t = std::size_t;
using value_t = std::uint8_t;
Version() = default;
Version(value_t major, value_t minor) : major_(major), minor_(minor) {}
explicit Version(const std::string &name)
: major_(parse_part(name, 0)), minor_(parse_part(name, 1)) {}
virtual ~Version() {}
bool empty() const {
return major_ == 0 && minor_ == 0;
}
bool operator==(const Version &other) const {
return major_ == other.major_ && minor_ == other.minor_;
}
bool operator<=(const Version &other) const {
if (major_ < other.major_)
return true;
if (major_ > other.major_)
return false;
return minor_ <= other.minor_;
}
bool operator!=(const Version &other) const {
return !(*this == other);
}
bool operator<(const Version &other) const {
return !(*this == other) && (*this <= other);
}
bool operator>(const Version &other) const {
return !(*this <= other);
}
bool operator>=(const Version &other) const {
return (*this == other) || (*this > other);
}
bool is_between(const Version &from, const Version &until) {
return (from <= *this) && (*this <= until);
}
std::string to_string() const;
static std::vector<std::string> split(const std::string &s);
static value_t parse_part(const std::string &name, size_t part);
MYNTEYE_PROPERTY(value_t, major)
MYNTEYE_PROPERTY(value_t, minor)
};
/**
* Hardware version.
*/
class MYNTEYE_API HardwareVersion : public Version {
public:
using flag_t = std::bitset<8>;
HardwareVersion() = default;
HardwareVersion(value_t major, value_t minor, value_t flag = 0)
: Version(major, minor), flag_(flag) {}
explicit HardwareVersion(const std::string &name, value_t flag = 0)
: Version(parse_part(name, 0), parse_part(name, 1)), flag_(flag) {}
MYNTEYE_PROPERTY(flag_t, flag)
};
/**
* Type.
*/
class MYNTEYE_API Type {
public:
using size_t = std::size_t;
using value_t = std::uint16_t;
Type() = default;
Type(value_t vendor, value_t product) : vendor_(vendor), product_(product) {}
explicit Type(const std::string &name)
: vendor_(parse_part(name, 0, 2)), product_(parse_part(name, 2, 2)) {}
virtual ~Type() {}
std::string to_string() const;
static value_t parse_part(const std::string &name, size_t pos, size_t count);
MYNTEYE_PROPERTY(value_t, vendor)
MYNTEYE_PROPERTY(value_t, product)
};
/**
* @ingroup datatypes
* Device infomation.
*/
struct MYNTEYE_API DeviceInfo {
std::string name;
std::string serial_number;
Version firmware_version;
HardwareVersion hardware_version;
Version spec_version;
Type lens_type;
Type imu_type;
std::uint16_t nominal_baseline;
};
#undef MYNTEYE_PROPERTY
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_TYPES_H_

View File

@@ -19,7 +19,6 @@
#include <string>
#include "mynteye/mynteye.h"
#include "mynteye/types.h"
MYNTEYE_BEGIN_NAMESPACE
@@ -40,16 +39,6 @@ namespace device {
*/
MYNTEYE_API std::shared_ptr<Device> select();
/**
* @ingroup utils
*
* List stream requests and prompt user to select one.
*
* @return the selected request.
*/
MYNTEYE_API MYNTEYE_NAMESPACE::StreamRequest select_request(
const std::shared_ptr<Device> &device, bool *ok);
} // namespace device
namespace utils {
@@ -69,14 +58,14 @@ MYNTEYE_API float get_real_exposure_time(
/**
* @ingroup utils
*
*
* Get sdk root dir.
*/
MYNTEYE_API std::string get_sdk_root_dir();
/**
* @ingroup utils
*
*
* Get sdk install dir.
*/
MYNTEYE_API std::string get_sdk_install_dir();

View File

@@ -74,23 +74,10 @@ struct glog_init {
}
};
#include "mynteye/mynteye.h"
#ifdef WITH_CAM_MODELS
#define MAX_LOG_LEVEL google::INFO
// include ceres miniglog
#include "glog/logging.h"
#else
#define MYNTEYE_MAX_LOG_LEVEL google::INFO
#include "mynteye/miniglog.h"
#endif
#endif
#endif // MYNTEYE_LOGGER_H_

View File

@@ -68,20 +68,4 @@ void UNUSED(T &&...) {}
MYNTEYE_END_NAMESPACE
#cmakedefine WITH_API
#cmakedefine WITH_DEVICE_INFO_REQUIRED
#cmakedefine WITH_CAM_MODELS
#cmakedefine WITH_BM_SOBEL_FILTER
#cmakedefine WITH_OPENCV
#cmakedefine WITH_OPENCV2
#cmakedefine WITH_OPENCV3
#cmakedefine WITH_OPENCV4
#cmakedefine WITH_OPENCV_WORLD
#cmakedefine WITH_GLOG
#cmakedefine WITH_FILESYSTEM
#cmakedefine WITH_BOOST_FILESYSTEM
#cmakedefine WITH_NATIVE_FILESYSTEM
#endif // MYNTEYE_MYNTEYE_H_

View File

@@ -15,8 +15,6 @@
#define MYNTEYE_TYPES_H_
#pragma once
#include <memory.h>
#include <cstdint>
#include <algorithm>
@@ -39,10 +37,6 @@ MYNTEYE_BEGIN_NAMESPACE
enum class Model : std::uint8_t {
/** Standard */
STANDARD,
/** Standard 2 */
STANDARD2,
/** Standard 210a */
STANDARD210A,
/** Last guard */
LAST
};
@@ -80,10 +74,10 @@ enum class Stream : std::uint8_t {
enum class Capabilities : std::uint8_t {
/** Provides stereo stream */
STEREO,
/** Provide stereo color stream */
STEREO_COLOR,
/** Provides color stream */
COLOR,
/** Provide stereo color stream */
STEREO_COLOR,
/** Provides depth stream */
DEPTH,
/** Provides point cloud stream */
@@ -149,7 +143,6 @@ enum class Option : std::uint8_t {
* range: [0,255], default: 127
*/
CONTRAST,
/**
* Image frame rate, must set IMU_FREQUENCY together
*
@@ -162,7 +155,6 @@ enum class Option : std::uint8_t {
* values: {100,200,250,333,500}, default: 200
*/
IMU_FREQUENCY,
/**
* Exposure mode
*
@@ -173,31 +165,21 @@ enum class Option : std::uint8_t {
/**
* Max gain, valid if auto-exposure
*
* range of standard 1: [0,48], default: 48
* range of standard 2: [0,255], default: 8
* range: [0,255], default: 8
*/
MAX_GAIN,
/**
* Max exposure time, valid if auto-exposure
*
* range of standard 1: [0,240], default: 240
* range of standard 2: [0,1000], default: 333
* range: [0,1000], default: 333
*/
MAX_EXPOSURE_TIME,
/**
* min exposure time, valid if auto-exposure
*
* range: [0,1000], default: 0
*/
MIN_EXPOSURE_TIME,
/**
* Desired brightness, valid if auto-exposure
*
* range of standard 1: [0,255], default: 192
* range of standard 2: [1,255], default: 122
* range: [1,255], default: 122
*/
DESIRED_BRIGHTNESS,
/**
* IR control
*
@@ -211,19 +193,26 @@ enum class Option : std::uint8_t {
* 1: 12-bit
*/
HDR_MODE,
/** Zero drift calibration */
ZERO_DRIFT_CALIBRATION,
/** Erase chip */
ERASE_CHIP,
/**
* min exposure time, valid if auto-exposure
*
* range: [0,1000], default: 0
*/
MIN_EXPOSURE_TIME,
/**
* The range of accelerometer
*
* value of standard 1: {4,8,16,32}, default: 8
* value of standard 2: {6,12,24,48}, default: 12
* values: {6,12,24,48}, default: 6
*/
ACCELEROMETER_RANGE,
/**
* The range of gyroscope
*
* value of standard 1: {500,1000,2000,4000}, default: 1000
* value of standard 2: {250,500,1000,2000,4000}, default: 1000
* values: {250,500,1000,2000,4000}, default: 1000
*/
GYROSCOPE_RANGE,
/**
@@ -238,12 +227,6 @@ enum class Option : std::uint8_t {
* values: {23,64}, default: 64
*/
GYROSCOPE_LOW_PASS_FILTER,
/** Zero drift calibration */
ZERO_DRIFT_CALIBRATION,
/** Erase chip */
ERASE_CHIP,
/** Last guard */
LAST
};
@@ -276,6 +259,40 @@ enum class AddOns : std::uint8_t {
LAST
};
/**
* @ingroup enumerations
* @brief Camera supported resolution.
*/
enum class Resolution : std::uint8_t {
/** 752x480 */
RES_752x480,
/** 1280x400 */
RES_1280x400,
/** 2560x800 */
RES_2560x800,
/** Last guard */
LAST
};
/**
* @ingroup enumerations
* @brief Camera supported frame rate.
*/
enum class FrameRate : std::uint8_t {
/** 10 fps */
RATE_10_FPS = 10,
/** 20 fps */
RATE_20_FPS = 20,
/** 20 fps */
RATE_25_FPS = 25,
/** 30 fps */
RATE_30_FPS = 30,
/** 60 fps */
RATE_60_FPS = 60,
/** Last guard */
LAST
};
#define MYNTEYE_ENUM_HELPERS(TYPE) \
MYNTEYE_API const char *to_string(const TYPE &value); \
inline bool is_valid(const TYPE &value) { \
@@ -299,6 +316,8 @@ MYNTEYE_ENUM_HELPERS(Info)
MYNTEYE_ENUM_HELPERS(Option)
MYNTEYE_ENUM_HELPERS(Source)
MYNTEYE_ENUM_HELPERS(AddOns)
MYNTEYE_ENUM_HELPERS(Resolution)
MYNTEYE_ENUM_HELPERS(FrameRate)
#undef MYNTEYE_ENUM_HELPERS
@@ -317,8 +336,6 @@ enum class Format : std::uint32_t {
YUYV = MYNTEYE_FOURCC('Y', 'U', 'Y', 'V'),
/** BGR 8:8:8, 24 bits per pixel */
BGR888 = MYNTEYE_FOURCC('B', 'G', 'R', '3'),
/** RGB 8:8:8, 24 bits per pixel */
RGB888 = MYNTEYE_FOURCC('R', 'G', 'B', '3'),
/** Last guard */
LAST
};
@@ -333,26 +350,6 @@ inline std::ostream &operator<<(std::ostream &os, const Format &value) {
MYNTEYE_API std::size_t bytes_per_pixel(const Format &value);
/**
* Resolution.
*/
struct MYNTEYE_API Resolution {
/** Width */
std::uint16_t width;
/** Height */
std::uint16_t height;
bool operator==(const Resolution &other) const {
return width == other.width && height == other.height;
}
bool operator!=(const Resolution &other) const {
return !(*this == other);
}
bool operator<(const Resolution &other) const {
return (width * height) < (other.width * other.height);
}
};
/**
* Stream request.
*/
@@ -363,7 +360,7 @@ struct MYNTEYE_API StreamRequest {
std::uint16_t height;
/** Stream pixel format */
Format format;
/** Stream frames per second */
/** Stream frames per second (unused) */
std::uint16_t fps;
StreamRequest() {}
@@ -373,10 +370,25 @@ struct MYNTEYE_API StreamRequest {
std::uint16_t fps)
: width(width), height(height), format(format), fps(fps) {}
StreamRequest(const Resolution &res, Format format, std::uint16_t fps)
: width(res.width), height(res.height), format(format), fps(fps) {}
StreamRequest(Resolution res, Format format, FrameRate rate)
: format(format) {
fps = static_cast<uint16_t>(rate);
Resolution GetResolution() const { return {width, height}; }
switch (res) {
case Resolution::RES_752x480:
width = 480, height = 752;
break;
case Resolution::RES_1280x400:
width = 1280, height = 400;
break;
case Resolution::RES_2560x800:
width = 2560, height = 800;
break;
default:
width = 480, height = 752;
break;
}
}
bool operator==(const StreamRequest &other) const {
return width == other.width && height == other.height &&
@@ -397,54 +409,13 @@ std::ostream &operator<<(std::ostream &os, const StreamRequest &request);
/**
* @ingroup calibration
* Camera calibration model.
* Stream intrinsics,
*/
enum class CalibrationModel : std::uint8_t {
/** Pinhole */
PINHOLE = 0,
/** Equidistant: KANNALA_BRANDT */
KANNALA_BRANDT = 1,
/** Unknow */
UNKNOW
};
MYNTEYE_API const char *to_string(const CalibrationModel &model);
inline std::ostream &operator<<(std::ostream &os,
const CalibrationModel &model) {
return os << to_string(model);
}
struct MYNTEYE_API IntrinsicsBase {
IntrinsicsBase() {
calib_model_ = CalibrationModel::UNKNOW;
}
virtual ~IntrinsicsBase() {}
/** The calibration model */
CalibrationModel calib_model() const {
return calib_model_;
}
struct MYNTEYE_API Intrinsics {
/** The width of the image in pixels */
std::uint16_t width;
/** The height of the image in pixels */
std::uint16_t height;
protected:
CalibrationModel calib_model_;
};
MYNTEYE_API
std::ostream &operator<<(std::ostream &os, const IntrinsicsBase &in);
/**
* @ingroup calibration
* Stream intrinsics (Pinhole)
*/
struct MYNTEYE_API IntrinsicsPinhole : public IntrinsicsBase {
IntrinsicsPinhole() {
calib_model_ = CalibrationModel::PINHOLE;
}
/** The focal length of the image plane, as a multiple of pixel width */
double fx;
/** The focal length of the image plane, as a multiple of pixel height */
@@ -453,34 +424,14 @@ struct MYNTEYE_API IntrinsicsPinhole : public IntrinsicsBase {
double cx;
/** The vertical coordinate of the principal point of the image */
double cy;
/** @deprecated Replaced by calib_model_. The distortion model of the image */
/** The distortion model of the image */
std::uint8_t model;
/** The distortion coefficients: k1,k2,p1,p2,k3 */
double coeffs[5];
};
MYNTEYE_API
std::ostream &operator<<(std::ostream &os, const IntrinsicsPinhole &in);
/**
* @deprecated Replaced by IntrinsicsPinhole.
*/
using Intrinsics = IntrinsicsPinhole;
/**
* @ingroup calibration
* Stream intrinsics (Equidistant: KANNALA_BRANDT)
*/
struct MYNTEYE_API IntrinsicsEquidistant : public IntrinsicsBase {
IntrinsicsEquidistant() {
calib_model_ = CalibrationModel::KANNALA_BRANDT;
}
/** The distortion coefficients: k2,k3,k4,k5,mu,mv,u0,v0 */
double coeffs[8];
};
MYNTEYE_API
std::ostream &operator<<(std::ostream &os, const IntrinsicsEquidistant &in);
std::ostream &operator<<(std::ostream &os, const Intrinsics &in);
/**
* @ingroup calibration
@@ -498,6 +449,7 @@ struct MYNTEYE_API ImuIntrinsics {
double scale[3][3];
/* Zero-drift: X, Y, Z */
double drift[3];
/** Noise density variances */
double noise[3];
/** Random walk variances */
@@ -586,15 +538,9 @@ struct MYNTEYE_API ImgData {
* IMU data.
*/
struct MYNTEYE_API ImuData {
/** IMU frame id */
std::uint32_t frame_id;
/**
* IMU accel or gyro flag
*
* 0: accel and gyro are both valid
* 1: accel is valid
* 2: gyro is valid
*/
/** Imu serial number */
std::uint32_t serial_number;
/** accel or gyro flag:1 for accel,2 for gyro,3 for both */
std::uint8_t flag;
/** IMU timestamp in 1us */
std::uint64_t timestamp;

View File

@@ -13,9 +13,10 @@
# limitations under the License.
@PACKAGE_INIT@
set(mynteye_WITH_API @WITH_API@)
set(mynteye_WITH_GLOG @WITH_GLOG@)
set(mynteye_WITH_API @WITH_API@)
set(mynteye_WITH_GLOG @WITH_GLOG@)
set(mynteye_WITH_CAM_MODELS @WITH_CAM_MODELS@)
include("${CMAKE_CURRENT_LIST_DIR}/mynteye-targets.cmake")

View File

@@ -1,37 +0,0 @@
{
"name": "mynt-eye-s-sdk",
"version": "1.0.0",
"description": "MYNT EYE S SDK",
"main": "index.js",
"directories": {
"doc": "doc",
"test": "test"
},
"dependencies": {},
"devDependencies": {
"@commitlint/cli": "^7.2.1",
"@commitlint/config-conventional": "^7.1.2",
"cz-conventional-changelog": "^2.1.0",
"husky": "^1.3.0"
},
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"repository": {
"type": "git",
"url": "https://github.com/slightech/MYNT-EYE-S-SDK.git"
},
"keywords": [],
"author": "",
"license": "Apache-2.0",
"config": {
"commitizen": {
"path": "./node_modules/cz-conventional-changelog"
}
},
"husky": {
"hooks": {
"commit-msg": "commitlint -E HUSKY_GIT_PARAMS"
}
}
}

View File

@@ -63,24 +63,22 @@ endif()
set(OUT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/_output")
if(OS_MAC)
# samples above api layer
if(WITH_API)
add_subdirectory(api)
endif()
# samples above device layer
add_subdirectory(device)
# samples above uvc layer
add_subdirectory(uvc)
else()
# samples above device layer
#add_subdirectory(uvc)
add_subdirectory(device)
# tutorials
# tutorials
if(WITH_API)
add_subdirectory(tutorials)
endif()
# samples above api layer
if(WITH_API)
add_subdirectory(api)
endif()
if(WITH_API)
add_subdirectory(tutorials)
endif()

View File

@@ -21,13 +21,9 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
if (!api)
return 1;
api->SetStreamRequest(Format::BGR888, FrameRate::RATE_30_FPS);
api->LogOptionInfos();
std::size_t left_count = 0;

View File

@@ -144,12 +144,8 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
if (!api)
return 1;
api->SetOptionValue(Option::IR_CONTROL, 80);

View File

@@ -25,14 +25,12 @@ int main(int argc, char *argv[]) {
glog_init _(argc, argv);
auto &&device = device::select();
if (!device) return 1;
bool ok;
auto &&request = device::select_request(device, &ok);
if (!ok) return 1;
device->ConfigStreamRequest(request);
if (!device)
return 1;
std::size_t left_count = 0;
device->InitResolution(Resolution::RES_1280x400);
device->SetStreamRequest(Format::BGR888, FrameRate::RATE_30_FPS);
device->SetStreamCallback(
Stream::LEFT, [&left_count](const device::StreamData &data) {
CHECK_NOTNULL(data.img);
@@ -79,8 +77,8 @@ int main(int argc, char *argv[]) {
while (true) {
device->WaitForStreams();
device::StreamData left_data = device->GetStreamData(Stream::LEFT);
device::StreamData right_data = device->GetStreamData(Stream::RIGHT);
device::StreamData left_data = device->GetLatestStreamData(Stream::LEFT);
device::StreamData right_data = device->GetLatestStreamData(Stream::RIGHT);
auto &&motion_datas = device->GetMotionDatas();
motion_count += motion_datas.size();
@@ -123,6 +121,8 @@ int main(int argc, char *argv[]) {
cv::Mat right_img(
right_data.frame->height(), right_data.frame->width(), CV_8UC3,
right_data.frame->data());
cv::cvtColor(left_img, left_img, CV_BGR2RGB);
cv::cvtColor(right_img, right_img, CV_BGR2RGB);
cv::hconcat(left_img, right_img, img);
} else {
return -1;

View File

@@ -115,9 +115,7 @@ make_executable2(get_with_plugin SRCS data/get_with_plugin.cc WITH_OPENCV)
## control
make_executable2(ctrl_framerate SRCS control/framerate.cc WITH_OPENCV)
make_executable2(ctrl_imu_low_pass_filter SRCS control/imu_low_pass_filter.cc WITH_OPENCV)
make_executable2(ctrl_imu_range SRCS control/imu_range.cc WITH_OPENCV)
make_executable2(ctrl_infrared SRCS control/infrared.cc WITH_OPENCV)
make_executable2(ctrl_auto_exposure
SRCS control/auto_exposure.cc util/cv_painter.cc
WITH_OPENCV
@@ -127,6 +125,10 @@ make_executable2(ctrl_manual_exposure
WITH_OPENCV
)
# intermediate level
make_executable2(get_all_device_info SRCS intermediate/get_all_device_info.cc WITH_OPENCV)
if(PCL_FOUND)
if(OpenCV_VERSION VERSION_LESS 4.0)

View File

@@ -22,71 +22,34 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
if (!api)
return 1;
api->SetStreamRequest(Format::BGR888, FrameRate::RATE_30_FPS);
std::int32_t frame_rate = 0;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
// auto-exposure: 0
api->SetOptionValue(Option::EXPOSURE_MODE, 0);
Model model = api->GetModel();
// max_gain: range [0,255], default 8
api->SetOptionValue(Option::MAX_GAIN, 8);
// max_exposure_time: range [0,1000], default 333
api->SetOptionValue(Option::MAX_EXPOSURE_TIME, 333);
// desired_brightness: range [1,255], default 122
api->SetOptionValue(Option::DESIRED_BRIGHTNESS, 122);
// min_exposure_time: range [0,1000], default 0
api->SetOptionValue(Option::MIN_EXPOSURE_TIME, 0);
// Set auto exposure options fo s1030
if (model == Model::STANDARD) {
// auto-exposure: 0
api->SetOptionValue(Option::EXPOSURE_MODE, 0);
// max_gain: range [0,48], default 48
api->SetOptionValue(Option::MAX_GAIN, 48);
// max_exposure_time: range [0,240], default 240
api->SetOptionValue(Option::MAX_EXPOSURE_TIME, 240);
// desired_brightness: range [0,255], default 192
api->SetOptionValue(Option::DESIRED_BRIGHTNESS, 192);
frame_rate = api->GetOptionValue(Option::FRAME_RATE);
LOG(INFO) << "Enable auto-exposure";
LOG(INFO) << "Set EXPOSURE_MODE to "
<< api->GetOptionValue(Option::EXPOSURE_MODE);
LOG(INFO) << "Set EXPOSURE_MODE to "
<< api->GetOptionValue(Option::EXPOSURE_MODE);
LOG(INFO) << "Set MAX_GAIN to " << api->GetOptionValue(Option::MAX_GAIN);
LOG(INFO) << "Set MAX_EXPOSURE_TIME to "
<< api->GetOptionValue(Option::MAX_EXPOSURE_TIME);
LOG(INFO) << "Set DESIRED_BRIGHTNESS to "
<< api->GetOptionValue(Option::DESIRED_BRIGHTNESS);
}
// Set auto exposure options fo S2000/S2100/S210A
if (model == Model::STANDARD2 || model == Model::STANDARD210A) {
// auto-exposure: 0
api->SetOptionValue(Option::EXPOSURE_MODE, 0);
// max_gain: range [0,255], default 8
api->SetOptionValue(Option::MAX_GAIN, 8);
// max_exposure_time: range [0,1000], default 333
api->SetOptionValue(Option::MAX_EXPOSURE_TIME, 333);
// desired_brightness: range [1,255], default 122
api->SetOptionValue(Option::DESIRED_BRIGHTNESS, 122);
// min_exposure_time: range [0,1000], default 0
api->SetOptionValue(Option::MIN_EXPOSURE_TIME, 0);
LOG(INFO) << "Enable auto-exposure";
LOG(INFO) << "Set EXPOSURE_MODE to "
<< api->GetOptionValue(Option::EXPOSURE_MODE);
LOG(INFO) << "Set MAX_GAIN to " << api->GetOptionValue(Option::MAX_GAIN);
LOG(INFO) << "Set MAX_EXPOSURE_TIME to "
<< api->GetOptionValue(Option::MAX_EXPOSURE_TIME);
LOG(INFO) << "Set DESIRED_BRIGHTNESS to "
<< api->GetOptionValue(Option::DESIRED_BRIGHTNESS);
LOG(INFO) << "Set MIN_EXPOSURE_TIME to "
<< api->GetOptionValue(Option::MIN_EXPOSURE_TIME);
}
LOG(INFO) << "Enable auto-exposure";
LOG(INFO) << "Set MAX_GAIN to " << api->GetOptionValue(Option::MAX_GAIN);
LOG(INFO) << "Set MAX_EXPOSURE_TIME to "
<< api->GetOptionValue(Option::MAX_EXPOSURE_TIME);
LOG(INFO) << "Set DESIRED_BRIGHTNESS to "
<< api->GetOptionValue(Option::DESIRED_BRIGHTNESS);
LOG(INFO) << "Set MIN_EXPOSURE_TIME to "
<< api->GetOptionValue(Option::MIN_EXPOSURE_TIME);
api->Start(Source::VIDEO_STREAMING);
CVPainter painter(frame_rate);
CVPainter painter(30);
cv::namedWindow("frame");

View File

@@ -22,38 +22,11 @@
MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
Model model = api->GetModel();
// Set frame rate options for s1030
if (model == Model::STANDARD) {
// Attention: must set FRAME_RATE and IMU_FREQUENCY together,
// otherwise won't.
// succeed.
// FRAME_RATE values: 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60
api->SetOptionValue(Option::FRAME_RATE, 25);
// IMU_FREQUENCY values: 100, 200, 250, 333, 500
api->SetOptionValue(Option::IMU_FREQUENCY, 500);
LOG(INFO) << "Set FRAME_RATE to "
<< api->GetOptionValue(Option::FRAME_RATE);
LOG(INFO) << "Set IMU_FREQUENCY to "
<< api->GetOptionValue(Option::IMU_FREQUENCY);
}
// You should set frame rate for S2000/S2100/S210A by 'SelectStreamRequest()'
if (model == Model::STANDARD2 || model == Model::STANDARD210A) {
LOG(INFO) << "Please set frame rate by 'SelectStreamRequest()'";
}
auto &&api = API::Create(Resolution::RES_1280x400);
// auto &&api = API::Create(argc, argv);
if (!api)
return 1;
api->SetStreamRequest(Format::BGR888, FrameRate::RATE_30_FPS);
// Count img
std::atomic_uint img_count(0);
api->SetStreamCallback(
@@ -62,13 +35,6 @@ int main(int argc, char *argv[]) {
++img_count;
});
// Count imu
std::atomic_uint imu_count(0);
api->SetMotionCallback([&imu_count](const api::MotionData &data) {
CHECK_NOTNULL(data.imu);
++imu_count;
});
api->Start(Source::ALL);
cv::namedWindow("frame");
@@ -101,7 +67,5 @@ int main(int argc, char *argv[]) {
<< ", cost: " << elapsed_ms << "ms";
LOG(INFO) << "Img count: " << img_count
<< ", fps: " << (1000.f * img_count / elapsed_ms);
LOG(INFO) << "Imu count: " << imu_count
<< ", hz: " << (1000.f * imu_count / elapsed_ms);
return 0;
}

View File

@@ -1,104 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <atomic>
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/logger.h"
#include "mynteye/api/api.h"
#include "mynteye/util/times.h"
MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
Model model = api->GetModel();
// MYNTEYE-S1030 don't support this option
if (model == Model::STANDARD) {
LOG(INFO) << "Sorry,MYNTEYE-S1030 don't support imu low pass settings";
return 0;
}
// Set imu low pass filter for S2000/S2100/S210A
if (model == Model::STANDARD2 || model == Model::STANDARD210A) {
// ACCELEROMETER_RANGE values: 0, 1, 2
api->SetOptionValue(Option::ACCELEROMETER_LOW_PASS_FILTER, 2);
// GYROSCOPE_RANGE values: 23, 64
api->SetOptionValue(Option::GYROSCOPE_LOW_PASS_FILTER, 64);
LOG(INFO) << "Set ACCELEROMETER_LOW_PASS_FILTER to "
<< api->GetOptionValue(Option::ACCELEROMETER_LOW_PASS_FILTER);
LOG(INFO) << "Set GYROSCOPE_LOW_PASS_FILTER to "
<< api->GetOptionValue(Option::GYROSCOPE_LOW_PASS_FILTER);
}
// Count img
std::atomic_uint img_count(0);
api->SetStreamCallback(
Stream::LEFT, [&img_count](const api::StreamData &data) {
CHECK_NOTNULL(data.img);
++img_count;
});
// Count imu
std::atomic_uint imu_count(0);
api->SetMotionCallback([&imu_count](const api::MotionData &data) {
CHECK_NOTNULL(data.imu);
++imu_count;
});
api->Start(Source::ALL);
cv::namedWindow("frame");
auto &&time_beg = times::now();
while (true) {
api->WaitForStreams();
auto &&left_data = api->GetStreamData(Stream::LEFT);
auto &&right_data = api->GetStreamData(Stream::RIGHT);
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
break;
}
}
auto &&time_end = times::now();
api->Stop(Source::ALL);
// Calculate img fps and imu hz
float elapsed_ms =
times::count<times::microseconds>(time_end - time_beg) * 0.001f;
LOG(INFO) << "Time beg: " << times::to_local_string(time_beg)
<< ", end: " << times::to_local_string(time_end)
<< ", cost: " << elapsed_ms << "ms";
LOG(INFO) << "Img count: " << img_count
<< ", fps: " << (1000.f * img_count / elapsed_ms);
LOG(INFO) << "Imu count: " << imu_count
<< ", hz: " << (1000.f * imu_count / elapsed_ms);
return 0;
}

View File

@@ -23,30 +23,13 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
if (!api)
return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
Model model = api->GetModel();
// Set imu range for S1030
if (model == Model::STANDARD) {
// ACCELEROMETER_RANGE values: 4, 8, 16, 32
api->SetOptionValue(Option::ACCELEROMETER_RANGE, 8);
// GYROSCOPE_RANGE values: 500, 1000, 2000, 4000
api->SetOptionValue(Option::GYROSCOPE_RANGE, 1000);
}
// Set imu range for S2000/S2100/S210A
if (model == Model::STANDARD2 || model == Model::STANDARD210A) {
// ACCELEROMETER_RANGE values: 6, 12, 24, 32
api->SetOptionValue(Option::ACCELEROMETER_RANGE, 6);
// GYROSCOPE_RANGE values: 250, 500, 1000, 2000, 4000
api->SetOptionValue(Option::GYROSCOPE_RANGE, 1000);
}
// ACCELEROMETER_RANGE values: 6, 12, 24, 32
api->SetOptionValue(Option::ACCELEROMETER_RANGE, 6);
// GYROSCOPE_RANGE values: 250, 500, 1000, 2000, 4000
api->SetOptionValue(Option::GYROSCOPE_RANGE, 1000);
LOG(INFO) << "Set ACCELEROMETER_RANGE to "
<< api->GetOptionValue(Option::ACCELEROMETER_RANGE);

View File

@@ -1,68 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/logger.h"
#include "mynteye/api/api.h"
MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
Model model = api->GetModel();
if (model == Model::STANDARD || model == Model::STANDARD2) {
// ir control: range [0,160], default 0
api->SetOptionValue(Option::IR_CONTROL, 80);
LOG(INFO) << "Set IR_CONTROL to "
<< api->GetOptionValue(Option::IR_CONTROL);
}
// MYNTEYE-S210A don't support this option
if (model == Model::STANDARD210A) {
LOG(INFO) << "Sorry,MYNTEYE-S210A don't support ir control";
return 0;
}
api->Start(Source::VIDEO_STREAMING);
cv::namedWindow("frame");
while (true) {
api->WaitForStreams();
auto &&left_data = api->GetStreamData(Stream::LEFT);
auto &&right_data = api->GetStreamData(Stream::RIGHT);
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
break;
}
}
api->Stop(Source::VIDEO_STREAMING);
return 0;
}

View File

@@ -22,56 +22,22 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
if (!api)
return 1;
api->SetStreamRequest(Format::BGR888, FrameRate::RATE_30_FPS);
std::int32_t frame_rate = 0;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
// manual-exposure: 1
api->SetOptionValue(Option::EXPOSURE_MODE, 1);
Model model = api->GetModel();
// brightness/exposure_time: range [0,240], default 120
api->SetOptionValue(Option::BRIGHTNESS, 120);
// Set manual exposure options fo s1030
if (model == Model::STANDARD) {
// manual-exposure: 1
api->SetOptionValue(Option::EXPOSURE_MODE, 1);
// gain: range [0,48], default 24
api->SetOptionValue(Option::GAIN, 24);
// brightness/exposure_time: range [0,240], default 120
api->SetOptionValue(Option::BRIGHTNESS, 120);
// contrast/black_level_calibration: range [0,255], default 127
api->SetOptionValue(Option::CONTRAST, 127);
frame_rate = api->GetOptionValue(Option::FRAME_RATE);
LOG(INFO) << "Enable manual-exposure";
LOG(INFO) << "Set EXPOSURE_MODE to "
<< api->GetOptionValue(Option::EXPOSURE_MODE);
LOG(INFO) << "Set GAIN to " << api->GetOptionValue(Option::GAIN);
LOG(INFO) << "Set BRIGHTNESS to "
<< api->GetOptionValue(Option::BRIGHTNESS);
LOG(INFO) << "Set CONTRAST to " << api->GetOptionValue(Option::CONTRAST);
}
// Set manual exposure options fo S2000/S2100/S210A
if (model == Model::STANDARD2 || model == Model::STANDARD210A) {
// manual-exposure: 1
api->SetOptionValue(Option::EXPOSURE_MODE, 1);
// brightness/exposure_time: range [0,240], default 120
api->SetOptionValue(Option::BRIGHTNESS, 120);
LOG(INFO) << "Enable manual-exposure";
LOG(INFO) << "Set EXPOSURE_MODE to "
<< api->GetOptionValue(Option::EXPOSURE_MODE);
LOG(INFO) << "Set BRIGHTNESS to "
<< api->GetOptionValue(Option::BRIGHTNESS);
}
LOG(INFO) << "Enable manual-exposure";
LOG(INFO) << "Set BRIGHTNESS to " << api->GetOptionValue(Option::BRIGHTNESS);
api->Start(Source::VIDEO_STREAMING);
CVPainter painter(frame_rate);
CVPainter painter(30);
cv::namedWindow("frame");

View File

@@ -19,12 +19,8 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
if (!api)
return 1;
api->EnableStreamData(Stream::DEPTH);

View File

@@ -18,7 +18,8 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
if (!api)
return 1;
LOG(INFO) << "Device name: " << api->GetInfo(Info::DEVICE_NAME);
LOG(INFO) << "Serial number: " << api->GetInfo(Info::SERIAL_NUMBER);

View File

@@ -19,12 +19,8 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
if (!api)
return 1;
// api->EnableStreamData(Stream::DISPARITY);
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);

View File

@@ -27,12 +27,8 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
if (!api)
return 1;
// Attention: must not block the callbacks.

View File

@@ -13,22 +13,16 @@
// limitations under the License.
#include "mynteye/logger.h"
#include "mynteye/api/api.h"
#include "mynteye/types.h"
MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
LOG(INFO) << "Intrinsics left: {" << *api->GetIntrinsicsBase(Stream::LEFT)
<< "}";
LOG(INFO) << "Intrinsics right: {" << *api->GetIntrinsicsBase(Stream::RIGHT)
if (!api)
return 1;
api->SetStreamRequest(Format::BGR888, FrameRate::RATE_30_FPS);
LOG(INFO) << "Intrinsics left: {" << api->GetIntrinsics(Stream::LEFT) << "}";
LOG(INFO) << "Intrinsics right: {" << api->GetIntrinsics(Stream::RIGHT)
<< "}";
LOG(INFO) << "Extrinsics right to left: {"
<< api->GetExtrinsics(Stream::RIGHT, Stream::LEFT) << "}";

View File

@@ -22,12 +22,8 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
if (!api)
return 1;
// Enable this will cache the motion datas until you get them.
api->EnableMotionDatas();

View File

@@ -18,7 +18,8 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
if (!api)
return 1;
LOG(INFO) << "Motion intrinsics: {" << api->GetMotionIntrinsics() << "}";
LOG(INFO) << "Motion extrinsics left to imu: {"

View File

@@ -21,12 +21,8 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
if (!api)
return 1;
api->EnableStreamData(Stream::POINTS);

View File

@@ -19,12 +19,8 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
if (!api)
return 1;
api->Start(Source::VIDEO_STREAMING);

View File

@@ -19,12 +19,8 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
if (!api)
return 1;
api->EnableStreamData(Stream::LEFT_RECTIFIED);
api->EnableStreamData(Stream::RIGHT_RECTIFIED);

View File

@@ -19,12 +19,8 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
if (!api)
return 1;
api->EnablePlugin("plugins/linux-x86_64/libplugin_g_cuda9.1_opencv3.4.0.so");

View File

@@ -0,0 +1,50 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/logger.h"
#include "mynteye/device/context.h"
#include "mynteye/device/device.h"
MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
glog_init _(argc, argv);
LOG(INFO) << "Detecting MYNT EYE devices";
Context context;
auto &&devices = context.devices();
size_t n = devices.size();
if (n <= 0) {
LOG(ERROR) << "No MYNT EYE devices :(";
return 1;
}
for (size_t i = 0; i < n; i++) {
auto &&device = devices[i];
LOG(INFO) << "MYNT EYE device index: " << i;
LOG(INFO) << " Device name: " << device->GetInfo(Info::DEVICE_NAME);
LOG(INFO) << " Serial number: " << device->GetInfo(Info::SERIAL_NUMBER);
LOG(INFO) << " Firmware version: "
<< device->GetInfo(Info::FIRMWARE_VERSION);
LOG(INFO) << " Hardware version: "
<< device->GetInfo(Info::HARDWARE_VERSION);
LOG(INFO) << " Spec version: " << device->GetInfo(Info::SPEC_VERSION);
LOG(INFO) << " Lens type: " << device->GetInfo(Info::LENS_TYPE);
LOG(INFO) << " IMU type: " << device->GetInfo(Info::IMU_TYPE);
LOG(INFO) << " Nominal baseline: "
<< device->GetInfo(Info::NOMINAL_BASELINE);
}
return 0;
}

View File

@@ -148,17 +148,12 @@ MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
if (!api)
return 1;
api->SetOptionValue(Option::IR_CONTROL, 80);
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);
api->EnableStreamData(Stream::POINTS);
api->EnableStreamData(Stream::DEPTH);
api->Start(Source::VIDEO_STREAMING);

View File

@@ -82,7 +82,7 @@ cv::Rect CVPainter::DrawImgData(
std::ostringstream ss;
ss << "frame_id: " << data.frame_id;
ss << ", stamp: " << fmt_time << (0.001f * data.timestamp); // ms
ss << ", stamp: " << fmt_time << (0.01f * data.timestamp); // ms
ss << ", expo: ";
if (frame_rate_ == 0) {
ss << data.exposure_time;

View File

@@ -108,11 +108,7 @@ int main(int argc, char *argv[]) {
const auto frame_empty = [&frame]() { return frame == nullptr; };
uvc::set_device_mode(
#ifdef MYNTEYE_OS_MAC
*device, 752, 480, static_cast<int>(Format::YUYV), 25,
#else
*device, 1280, 400, static_cast<int>(Format::BGR888), 20,
#endif
[&mtx, &cv, &frame, &frame_ready](
const void *data, std::function<void()> continuation) {
// reinterpret_cast<const std::uint8_t *>(data);
@@ -147,12 +143,8 @@ int main(int argc, char *argv[]) {
}
// only lastest frame is valid
#ifdef MYNTEYE_OS_MAC
cv::Mat img(480, 752, CV_8UC2, const_cast<void *>(frame->data));
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
#else
cv::Mat img(400, 1280, CV_8UC3, const_cast<void *>(frame->data));
#endif
cv::cvtColor(img, img, CV_BGR2RGB);
cv::imshow("frame", img);
frame = nullptr;

View File

@@ -15,7 +15,6 @@
# _VERBOSE_=1
# _INIT_LINTER_=1
# _INIT_COMMITIZEN_=1
# _FORCE_INSRALL_=1
_INSTALL_OPTIONS_=$@

View File

@@ -15,7 +15,6 @@
_INIT_BUILD_=1
# _INIT_LINTER_=1
# _INIT_COMMITIZEN_=1
# _FORCE_INSRALL_=1
# _INSTALL_OPTIONS_=-y
@@ -147,7 +146,6 @@ else # unexpected
exit 1
fi
## init linter - optional
if [ -n "${_INIT_LINTER_}" ]; then
@@ -219,30 +217,3 @@ else
fi
fi # _INIT_LINTER_
## init commitizen - optional
if [ -n "${_INIT_COMMITIZEN_}" ]; then
if _detect_cmd npm; then
_echo_d "npm install commitizen -g; npm install"
npm install commitizen -g; npm install
# if _detect_cmd node; then
# commitizen init cz-conventional-changelog --save-dev --save-exact
# npm install --save-dev @commitlint/{config-conventional,cli}
# npm install husky --save-dev
# else
# _echo_en "Skipped commitizen init, as node not found"
# fi
else
_echo_en "Skipped npm install packages, as npm not found"
_echo
_echo_e "Download Node.js from https://nodejs.org/, then add to \`~/.bashrc\`."
_echo
_echo_e " export PATH=\"/home/john/node-v10.14.2-linux-x64/bin:\$PATH\""
_echo
_echo_e "p.s. not \"apt-get install npm\", it's too old."
fi
fi # _INIT_COMMITIZEN_

View File

@@ -26,6 +26,7 @@
#include "mynteye/api/plugin.h"
#include "mynteye/api/synthetic.h"
#include "mynteye/device/device.h"
#include "mynteye/device/device_s.h"
#include "mynteye/device/utils.h"
#if defined(WITH_FILESYSTEM) && defined(WITH_NATIVE_FILESYSTEM)
@@ -207,62 +208,63 @@ std::vector<std::string> get_plugin_paths() {
} // namespace
API::API(std::shared_ptr<Device> device, CalibrationModel calib_model)
: device_(device) {
API::API(std::shared_ptr<Device> device) : device_(device) {
VLOG(2) << __func__;
// std::dynamic_pointer_cast<StandardDevice>(device_);
synthetic_.reset(new Synthetic(this, calib_model));
std::dynamic_pointer_cast<StandardDevice>(device_);
synthetic_.reset(new Synthetic(this));
}
API::~API() {
VLOG(2) << __func__;
}
std::shared_ptr<API> API::Create(int argc, char *argv[]) {
std::shared_ptr<API> API::Create(Resolution res) {
auto &&device = device::select();
if (!device) return nullptr;
return Create(argc, argv, device);
if (!device)
return nullptr;
device->InitResolution(res);
return std::make_shared<API>(device);
}
std::shared_ptr<API> API::Create(
int argc, char *argv[], const std::shared_ptr<Device> &device) {
static glog_init _(argc, argv);
return Create(device);
std::shared_ptr<Device> device, Resolution res) {
if (!device)
return nullptr;
device->InitResolution(res);
return std::make_shared<API>(device);
}
std::shared_ptr<API> API::Create(const std::shared_ptr<Device> &device) {
std::shared_ptr<API> api = nullptr;
if (device != nullptr) {
bool in_l_ok, in_r_ok;
auto left_intr = device->GetIntrinsics(Stream::LEFT, &in_l_ok);
auto right_intr = device->GetIntrinsics(Stream::RIGHT, &in_r_ok);
if (!in_l_ok || !in_r_ok) {
LOG(ERROR) << "Image params not found, but we need it to process the "
"images. Please `make tools` and use `img_params_writer` "
"to write the image params. If you update the SDK from "
"1.x, the `SN*.conf` is the file contains them. Besides, "
"you could also calibrate them by yourself. Read the guide "
"doc (https://github.com/slightech/MYNT-EYE-SDK-2-Guide) "
"to learn more.";
LOG(WARNING) << "use pinhole as default";
api = std::make_shared<API>(device, CalibrationModel::UNKNOW);
return api;
} else {
if (left_intr->calib_model() != right_intr->calib_model()) {
LOG(ERROR) << "left camera and right camera use different calib models!";
LOG(WARNING) << "use pinhole as default";
api = std::make_shared<API>(device, CalibrationModel::UNKNOW);
return api;
} else {
api = std::make_shared<API>(device, left_intr->calib_model());
return api;
}
// TODO(Kalman): Compatible with two generation
std::shared_ptr<API> API::Create(std::shared_ptr<Device> device) {
return Create(device, Resolution::RES_2560x800);
}
std::shared_ptr<API> API::Create(int argc, char *argv[]) {
auto &&device = device::select();
return Create(argc, argv, device);
}
// TODO(Kalman): Compatible with two generation
std::shared_ptr<API> API::Create(
int argc, char *argv[], std::shared_ptr<Device> device) {
static glog_init _(argc, argv);
int index = 0;
if (argc >= 2) {
try {
index = std::stoi(argv[1]);
} catch (...) {
LOG(WARNING) << "Unexpected index.";
}
} else {
LOG(ERROR) <<"no device!";
api = std::make_shared<API>(device, CalibrationModel::UNKNOW);
}
return api;
if (!device)
return nullptr;
if (index == 0)
device->InitResolution(Resolution::RES_1280x400);
else if (index == 1)
device->InitResolution(Resolution::RES_2560x800);
else
device->InitResolution(Resolution::RES_1280x400);
return std::make_shared<API>(device);
}
Model API::GetModel() const {
@@ -285,8 +287,12 @@ bool API::Supports(const AddOns &addon) const {
return device_->Supports(addon);
}
StreamRequest API::SelectStreamRequest(bool *ok) const {
return device::select_request(device_, ok);
void API::InitResolution(const Resolution &res) {
return device_->InitResolution(res);
}
void API::SetStreamRequest(const Format &format, const FrameRate &rate) {
device_->SetStreamRequest(format, rate);
}
const std::vector<StreamRequest> &API::GetStreamRequests(
@@ -297,46 +303,13 @@ const std::vector<StreamRequest> &API::GetStreamRequests(
void API::ConfigStreamRequest(
const Capabilities &capability, const StreamRequest &request) {
device_->ConfigStreamRequest(capability, request);
synthetic_->NotifyImageParamsChanged();
}
const StreamRequest &API::GetStreamRequest(
const Capabilities &capability) const {
return device_->GetStreamRequest(capability);
}
const std::vector<StreamRequest> &API::GetStreamRequests() const {
return device_->GetStreamRequests();
}
void API::ConfigStreamRequest(const StreamRequest &request) {
device_->ConfigStreamRequest(request);
synthetic_->NotifyImageParamsChanged();
}
const StreamRequest &API::GetStreamRequest() const {
return device_->GetStreamRequest();
}
std::shared_ptr<DeviceInfo> API::GetInfo() const {
return device_->GetInfo();
}
std::string API::GetInfo(const Info &info) const {
return device_->GetInfo(info);
}
IntrinsicsPinhole API::GetIntrinsics(const Stream &stream) const {
auto in = GetIntrinsicsBase(stream);
if (in->calib_model() == CalibrationModel::PINHOLE) {
return *std::dynamic_pointer_cast<IntrinsicsPinhole>(in);
}
throw std::runtime_error("Intrinsics is not pinhole model"
", please use GetIntrinsicsBase() or GetIntrinsics<T>() instead.");
}
std::shared_ptr<IntrinsicsBase> API::GetIntrinsicsBase(
const Stream &stream) const {
Intrinsics API::GetIntrinsics(const Stream &stream) const {
return device_->GetIntrinsics(stream);
}

View File

@@ -1,78 +0,0 @@
#ifndef CAMERACALIBRATION_H
#define CAMERACALIBRATION_H
#include <opencv2/core/core.hpp>
#include "camodocal/camera_models/Camera.h"
namespace camodocal {
class CameraCalibration {
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
CameraCalibration();
CameraCalibration(
Camera::ModelType modelType, const std::string &cameraName,
const cv::Size &imageSize, const cv::Size &boardSize, float squareSize);
void clear(void);
void addChessboardData(const std::vector<cv::Point2f> &corners);
bool calibrate(void);
int sampleCount(void) const;
std::vector<std::vector<cv::Point2f> > &imagePoints(void);
const std::vector<std::vector<cv::Point2f> > &imagePoints(void) const;
std::vector<std::vector<cv::Point3f> > &scenePoints(void);
const std::vector<std::vector<cv::Point3f> > &scenePoints(void) const;
CameraPtr &camera(void);
const CameraConstPtr camera(void) const;
Eigen::Matrix2d &measurementCovariance(void);
const Eigen::Matrix2d &measurementCovariance(void) const;
cv::Mat &cameraPoses(void);
const cv::Mat &cameraPoses(void) const;
void drawResults(std::vector<cv::Mat> &images) const;
void writeParams(const std::string &filename) const;
bool writeChessboardData(const std::string &filename) const;
bool readChessboardData(const std::string &filename);
void setVerbose(bool verbose);
private:
bool calibrateHelper(
CameraPtr &camera, std::vector<cv::Mat> &rvecs,
std::vector<cv::Mat> &tvecs) const;
void optimize(
CameraPtr &camera, std::vector<cv::Mat> &rvecs,
std::vector<cv::Mat> &tvecs) const;
template <typename T>
void readData(std::ifstream &ifs, T &data) const;
template <typename T>
void writeData(std::ofstream &ofs, T data) const;
cv::Size m_boardSize;
float m_squareSize;
CameraPtr m_camera;
cv::Mat m_cameraPoses;
std::vector<std::vector<cv::Point2f> > m_imagePoints;
std::vector<std::vector<cv::Point3f> > m_scenePoints;
Eigen::Matrix2d m_measurementCovariance;
bool m_verbose;
};
}
#endif

View File

@@ -1,53 +0,0 @@
#ifndef STEREOCAMERACALIBRATION_H
#define STEREOCAMERACALIBRATION_H
#include "CameraCalibration.h"
namespace camodocal {
class StereoCameraCalibration {
public:
StereoCameraCalibration(
Camera::ModelType modelType, const std::string &cameraLeftName,
const std::string &cameraRightName, const cv::Size &imageSize,
const cv::Size &boardSize, float squareSize);
void clear(void);
void addChessboardData(
const std::vector<cv::Point2f> &cornersLeft,
const std::vector<cv::Point2f> &cornersRight);
bool calibrate(void);
int sampleCount(void) const;
const std::vector<std::vector<cv::Point2f> > &imagePointsLeft(void) const;
const std::vector<std::vector<cv::Point2f> > &imagePointsRight(void) const;
const std::vector<std::vector<cv::Point3f> > &scenePoints(void) const;
CameraPtr &cameraLeft(void);
const CameraConstPtr cameraLeft(void) const;
CameraPtr &cameraRight(void);
const CameraConstPtr cameraRight(void) const;
void drawResults(
std::vector<cv::Mat> &imagesLeft,
std::vector<cv::Mat> &imagesRight) const;
void writeParams(const std::string &directory) const;
void setVerbose(bool verbose);
private:
CameraCalibration m_calibLeft;
CameraCalibration m_calibRight;
Eigen::Quaterniond m_q;
Eigen::Vector3d m_t;
bool m_verbose;
std::vector<double> stereo_error;
};
}
#endif

View File

@@ -1,29 +0,0 @@
#ifndef CAMERAFACTORY_H
#define CAMERAFACTORY_H
#include <boost/shared_ptr.hpp>
#include <opencv2/core/core.hpp>
#include "camodocal/camera_models/Camera.h"
namespace camodocal {
class CameraFactory {
public:
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
CameraFactory();
static boost::shared_ptr<CameraFactory> instance(void);
CameraPtr generateCamera(
Camera::ModelType modelType, const std::string &cameraName,
cv::Size imageSize) const;
CameraPtr generateCameraFromYamlFile(const std::string &filename);
private:
static boost::shared_ptr<CameraFactory> m_instance;
};
}
#endif

View File

@@ -1,85 +0,0 @@
#ifndef CHESSBOARD_H
#define CHESSBOARD_H
#include <boost/shared_ptr.hpp>
#include <opencv2/core/core.hpp>
namespace camodocal {
// forward declarations
class ChessboardCorner;
typedef boost::shared_ptr<ChessboardCorner> ChessboardCornerPtr;
class ChessboardQuad;
typedef boost::shared_ptr<ChessboardQuad> ChessboardQuadPtr;
class Chessboard {
public:
Chessboard(cv::Size boardSize, cv::Mat &image);
void findCorners(bool useOpenCV = false);
const std::vector<cv::Point2f> &getCorners(void) const;
bool cornersFound(void) const;
const cv::Mat &getImage(void) const;
const cv::Mat &getSketch(void) const;
private:
bool findChessboardCorners(
const cv::Mat &image, const cv::Size &patternSize,
std::vector<cv::Point2f> &corners, int flags, bool useOpenCV);
bool findChessboardCornersImproved(
const cv::Mat &image, const cv::Size &patternSize,
std::vector<cv::Point2f> &corners, int flags);
void cleanFoundConnectedQuads(
std::vector<ChessboardQuadPtr> &quadGroup, cv::Size patternSize);
void findConnectedQuads(
std::vector<ChessboardQuadPtr> &quads,
std::vector<ChessboardQuadPtr> &group, int group_idx, int dilation);
// int checkQuadGroup(std::vector<ChessboardQuadPtr>& quadGroup,
// std::vector<ChessboardCornerPtr>& outCorners,
// cv::Size patternSize);
void labelQuadGroup(
std::vector<ChessboardQuadPtr> &quad_group, cv::Size patternSize,
bool firstRun);
void findQuadNeighbors(std::vector<ChessboardQuadPtr> &quads, int dilation);
int augmentBestRun(
std::vector<ChessboardQuadPtr> &candidateQuads, int candidateDilation,
std::vector<ChessboardQuadPtr> &existingQuads, int existingDilation);
void generateQuads(
std::vector<ChessboardQuadPtr> &quads, cv::Mat &image, int flags,
int dilation, bool firstRun);
bool checkQuadGroup(
std::vector<ChessboardQuadPtr> &quads,
std::vector<ChessboardCornerPtr> &corners, cv::Size patternSize);
void getQuadrangleHypotheses(
const std::vector<std::vector<cv::Point> > &contours,
std::vector<std::pair<float, int> > &quads, int classId) const;
bool checkChessboard(const cv::Mat &image, cv::Size patternSize) const;
bool checkBoardMonotony(
std::vector<ChessboardCornerPtr> &corners, cv::Size patternSize);
bool matchCorners(
ChessboardQuadPtr &quad1, int corner1, ChessboardQuadPtr &quad2,
int corner2) const;
cv::Mat mImage;
cv::Mat mSketch;
std::vector<cv::Point2f> mCorners;
cv::Size mBoardSize;
bool mCornersFound;
};
}
#endif

View File

@@ -1,39 +0,0 @@
#ifndef CHESSBOARDCORNER_H
#define CHESSBOARDCORNER_H
#include <boost/shared_ptr.hpp>
#include <opencv2/core/core.hpp>
namespace camodocal {
class ChessboardCorner;
typedef boost::shared_ptr<ChessboardCorner> ChessboardCornerPtr;
class ChessboardCorner {
public:
ChessboardCorner() : row(0), column(0), needsNeighbor(true), count(0) {}
float meanDist(int &n) const {
float sum = 0;
n = 0;
for (int i = 0; i < 4; ++i) {
if (neighbors[i].get()) {
float dx = neighbors[i]->pt.x - pt.x;
float dy = neighbors[i]->pt.y - pt.y;
sum += sqrt(dx * dx + dy * dy);
n++;
}
}
return sum / std::max(n, 1);
}
cv::Point2f pt; // X and y coordinates
int row; // Row and column of the corner
int column; // in the found pattern
bool needsNeighbor; // Does the corner require a neighbor?
int count; // number of corner neighbors
ChessboardCornerPtr neighbors[4]; // pointer to all corner neighbors
};
}
#endif

View File

@@ -1,27 +0,0 @@
#ifndef CHESSBOARDQUAD_H
#define CHESSBOARDQUAD_H
#include <boost/shared_ptr.hpp>
#include "camodocal/chessboard/ChessboardCorner.h"
namespace camodocal {
class ChessboardQuad;
typedef boost::shared_ptr<ChessboardQuad> ChessboardQuadPtr;
class ChessboardQuad {
public:
ChessboardQuad()
: count(0), group_idx(-1), edge_len(FLT_MAX), labeled(false) {}
int count; // Number of quad neighbors
int group_idx; // Quad group ID
float edge_len; // Smallest side length^2
ChessboardCornerPtr corners[4]; // Coordinates of quad corners
ChessboardQuadPtr neighbors[4]; // Pointers of quad neighbors
bool labeled; // Has this corner been labeled?
};
}
#endif

View File

@@ -1,336 +0,0 @@
/* dynamo:- Event driven molecular dynamics simulator
http://www.marcusbannerman.co.uk/dynamo
Copyright (C) 2011 Marcus N Campbell Bannerman <m.bannerman@gmail.com>
This program is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License
version 3 as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
#include <boost/numeric/ublas/lu.hpp>
#include <boost/numeric/ublas/matrix.hpp>
#include <boost/numeric/ublas/triangular.hpp>
#include <boost/numeric/ublas/vector.hpp>
#include <boost/numeric/ublas/vector_proxy.hpp>
#include <exception>
namespace ublas = boost::numeric::ublas;
class Spline : private std::vector<std::pair<double, double> > {
public:
// The boundary conditions available
enum BC_type { FIXED_1ST_DERIV_BC, FIXED_2ND_DERIV_BC, PARABOLIC_RUNOUT_BC };
enum Spline_type { LINEAR, CUBIC };
// Constructor takes the boundary conditions as arguments, this
// sets the first derivative (gradient) at the lower and upper
// end points
Spline()
: _valid(false),
_BCLow(FIXED_2ND_DERIV_BC),
_BCHigh(FIXED_2ND_DERIV_BC),
_BCLowVal(0),
_BCHighVal(0),
_type(CUBIC) {}
typedef std::vector<std::pair<double, double> > base;
typedef base::const_iterator const_iterator;
// Standard STL read-only container stuff
const_iterator begin() const {
return base::begin();
}
const_iterator end() const {
return base::end();
}
void clear() {
_valid = false;
base::clear();
_data.clear();
}
size_t size() const {
return base::size();
}
size_t max_size() const {
return base::max_size();
}
size_t capacity() const {
return base::capacity();
}
bool empty() const {
return base::empty();
}
// Add a point to the spline, and invalidate it so its
// recalculated on the next access
inline void addPoint(double x, double y) {
_valid = false;
base::push_back(std::pair<double, double>(x, y));
}
// Reset the boundary conditions
inline void setLowBC(BC_type BC, double val = 0) {
_BCLow = BC;
_BCLowVal = val;
_valid = false;
}
inline void setHighBC(BC_type BC, double val = 0) {
_BCHigh = BC;
_BCHighVal = val;
_valid = false;
}
void setType(Spline_type type) {
_type = type;
_valid = false;
}
// Check if the spline has been calculated, then generate the
// spline interpolated value
double operator()(double xval) {
if (!_valid)
generate();
// Special cases when we're outside the range of the spline points
if (xval <= x(0))
return lowCalc(xval);
if (xval >= x(size() - 1))
return highCalc(xval);
// Check all intervals except the last one
for (std::vector<SplineData>::const_iterator iPtr = _data.begin();
iPtr != _data.end() - 1; ++iPtr)
if ((xval >= iPtr->x) && (xval <= (iPtr + 1)->x))
return splineCalc(iPtr, xval);
return splineCalc(_data.end() - 1, xval);
}
private:
///////PRIVATE DATA MEMBERS
struct SplineData {
double x, a, b, c, d;
};
// vector of calculated spline data
std::vector<SplineData> _data;
// Second derivative at each point
ublas::vector<double> _ddy;
// Tracks whether the spline parameters have been calculated for
// the current set of points
bool _valid;
// The boundary conditions
BC_type _BCLow, _BCHigh;
// The values of the boundary conditions
double _BCLowVal, _BCHighVal;
Spline_type _type;
///////PRIVATE FUNCTIONS
// Function to calculate the value of a given spline at a point xval
inline double splineCalc(
std::vector<SplineData>::const_iterator i, double xval) {
const double lx = xval - i->x;
return ((i->a * lx + i->b) * lx + i->c) * lx + i->d;
}
inline double lowCalc(double xval) {
const double lx = xval - x(0);
if (_type == LINEAR)
return lx * _BCHighVal + y(0);
const double firstDeriv =
(y(1) - y(0)) / h(0) - 2 * h(0) * (_data[0].b + 2 * _data[1].b) / 6;
switch (_BCLow) {
case FIXED_1ST_DERIV_BC:
return lx * _BCLowVal + y(0);
case FIXED_2ND_DERIV_BC:
return lx * lx * _BCLowVal + firstDeriv * lx + y(0);
case PARABOLIC_RUNOUT_BC:
return lx * lx * _ddy[0] + lx * firstDeriv + y(0);
}
throw std::runtime_error("Unknown BC");
}
inline double highCalc(double xval) {
const double lx = xval - x(size() - 1);
if (_type == LINEAR)
return lx * _BCHighVal + y(size() - 1);
const double firstDeriv =
2 * h(size() - 2) * (_ddy[size() - 2] + 2 * _ddy[size() - 1]) / 6 +
(y(size() - 1) - y(size() - 2)) / h(size() - 2);
switch (_BCHigh) {
case FIXED_1ST_DERIV_BC:
return lx * _BCHighVal + y(size() - 1);
case FIXED_2ND_DERIV_BC:
return lx * lx * _BCHighVal + firstDeriv * lx + y(size() - 1);
case PARABOLIC_RUNOUT_BC:
return lx * lx * _ddy[size() - 1] + lx * firstDeriv + y(size() - 1);
}
throw std::runtime_error("Unknown BC");
}
// These just provide access to the point data in a clean way
inline double x(size_t i) const {
return operator[](i).first;
}
inline double y(size_t i) const {
return operator[](i).second;
}
inline double h(size_t i) const {
return x(i + 1) - x(i);
}
// Invert a arbitrary matrix using the boost ublas library
template <class T>
bool InvertMatrix(ublas::matrix<T> A, ublas::matrix<T> &inverse) {
using namespace ublas;
// create a permutation matrix for the LU-factorization
permutation_matrix<std::size_t> pm(A.size1());
// perform LU-factorization
int res = lu_factorize(A, pm);
if (res != 0)
return false;
// create identity matrix of "inverse"
inverse.assign(ublas::identity_matrix<T>(A.size1()));
// backsubstitute to get the inverse
lu_substitute(A, pm, inverse);
return true;
}
// This function will recalculate the spline parameters and store
// them in _data, ready for spline interpolation
void generate() {
if (size() < 2)
throw std::runtime_error("Spline requires at least 2 points");
// If any spline points are at the same x location, we have to
// just slightly seperate them
{
bool testPassed(false);
while (!testPassed) {
testPassed = true;
std::sort(base::begin(), base::end());
for (base::iterator iPtr = base::begin(); iPtr != base::end() - 1;
++iPtr)
if (iPtr->first == (iPtr + 1)->first) {
if ((iPtr + 1)->first != 0)
(iPtr + 1)->first += (iPtr + 1)->first *
std::numeric_limits<double>::epsilon() * 10;
else
(iPtr + 1)->first = std::numeric_limits<double>::epsilon() * 10;
testPassed = false;
break;
}
}
}
const size_t e = size() - 1;
switch (_type) {
case LINEAR: {
_data.resize(e);
for (size_t i(0); i < e; ++i) {
_data[i].x = x(i);
_data[i].a = 0;
_data[i].b = 0;
_data[i].c = (y(i + 1) - y(i)) / (x(i + 1) - x(i));
_data[i].d = y(i);
}
break;
}
case CUBIC: {
ublas::matrix<double> A(size(), size());
for (size_t yv(0); yv <= e; ++yv)
for (size_t xv(0); xv <= e; ++xv)
A(xv, yv) = 0;
for (size_t i(1); i < e; ++i) {
A(i - 1, i) = h(i - 1);
A(i, i) = 2 * (h(i - 1) + h(i));
A(i + 1, i) = h(i);
}
ublas::vector<double> C(size());
for (size_t xv(0); xv <= e; ++xv)
C(xv) = 0;
for (size_t i(1); i < e; ++i)
C(i) = 6 * ((y(i + 1) - y(i)) / h(i) - (y(i) - y(i - 1)) / h(i - 1));
// Boundary conditions
switch (_BCLow) {
case FIXED_1ST_DERIV_BC:
C(0) = 6 * ((y(1) - y(0)) / h(0) - _BCLowVal);
A(0, 0) = 2 * h(0);
A(1, 0) = h(0);
break;
case FIXED_2ND_DERIV_BC:
C(0) = _BCLowVal;
A(0, 0) = 1;
break;
case PARABOLIC_RUNOUT_BC:
C(0) = 0;
A(0, 0) = 1;
A(1, 0) = -1;
break;
}
switch (_BCHigh) {
case FIXED_1ST_DERIV_BC:
C(e) = 6 * (_BCHighVal - (y(e) - y(e - 1)) / h(e - 1));
A(e, e) = 2 * h(e - 1);
A(e - 1, e) = h(e - 1);
break;
case FIXED_2ND_DERIV_BC:
C(e) = _BCHighVal;
A(e, e) = 1;
break;
case PARABOLIC_RUNOUT_BC:
C(e) = 0;
A(e, e) = 1;
A(e - 1, e) = -1;
break;
}
ublas::matrix<double> AInv(size(), size());
InvertMatrix(A, AInv);
_ddy = ublas::prod(C, AInv);
_data.resize(size() - 1);
for (size_t i(0); i < e; ++i) {
_data[i].x = x(i);
_data[i].a = (_ddy(i + 1) - _ddy(i)) / (6 * h(i));
_data[i].b = _ddy(i) / 2;
_data[i].c = (y(i + 1) - y(i)) / h(i) - _ddy(i + 1) * h(i) / 6 -
_ddy(i) * h(i) / 3;
_data[i].d = y(i);
}
}
}
_valid = true;
}
};

File diff suppressed because it is too large Load Diff

View File

@@ -1,64 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/config.h"
MYNTEYE_BEGIN_NAMESPACE
/**
* default intrinsics
*/
std::shared_ptr<IntrinsicsBase> getDefaultIntrinsics() {
auto res = std::make_shared<IntrinsicsPinhole>();
res->width = 640;
res->height = 400;
res->model = 0;
res->fx = 3.6220059643202876e+02;
res->fy = 3.6350065250745848e+02;
res->cx = 4.0658699068023441e+02;
res->cy = 2.3435161110061483e+02;
double codffs[5] = {
-2.5034765682756088e-01,
5.0579399202897619e-02,
-7.0536676161976066e-04,
-8.5255451307033846e-03,
0.
};
for (unsigned int i = 0; i < 5; i++) {
res->coeffs[i] = codffs[i];
}
return res;
}
std::shared_ptr<Extrinsics> getDefaultExtrinsics() {
auto res = std::make_shared<Extrinsics>();
double rotation[9] = {
9.9867908939669447e-01, -6.3445566137485428e-03, 5.0988459509619687e-02,
5.9890316389333252e-03, 9.9995670037792639e-01, 7.1224201868366971e-03,
-5.1031440326695092e-02, -6.8076406092671274e-03, 9.9867384471984544e-01
};
double translation[3] = {-1.2002489764113250e+02, -1.1782637409050747e+00,
-5.2058205159996538e+00};
for (unsigned int i = 0; i < 3; i++) {
for (unsigned int j = 0; j < 3; j++) {
res->rotation[i][j] = rotation[i*3 + j];
}
}
for (unsigned int i = 0; i < 3; i++) {
res->translation[i] = translation[i];
}
return res;
}
MYNTEYE_END_NAMESPACE

View File

@@ -21,15 +21,9 @@ MYNTEYE_BEGIN_NAMESPACE
const char DepthProcessor::NAME[] = "DepthProcessor";
const int DISPARITY_MIN = 0;
const int DISPARITY_MAX = 64;
DepthProcessor::DepthProcessor(
std::shared_ptr<struct camera_calib_info_pair> calib_infos,
std::int32_t proc_period)
: Processor(std::move(proc_period)),
calib_infos_(calib_infos) {
VLOG(2) << __func__;
DepthProcessor::DepthProcessor(std::int32_t proc_period)
: Processor(std::move(proc_period)) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
}
DepthProcessor::~DepthProcessor() {
@@ -49,21 +43,9 @@ bool DepthProcessor::OnProcess(
MYNTEYE_UNUSED(parent)
const ObjMat *input = Object::Cast<ObjMat>(in);
ObjMat *output = Object::Cast<ObjMat>(out);
int rows = input->value.rows;
int cols = input->value.cols;
// std::cout << calib_infos_->T_mul_f << std::endl;
// 0.0793434
cv::Mat depth_mat = cv::Mat::zeros(rows, cols, CV_16U);
for (int i = 0; i < rows; i++) {
for (int j = 0; j < cols; j++) {
float disparity_value = input->value.at<float>(i, j);
if (disparity_value < DISPARITY_MAX && disparity_value > DISPARITY_MIN) {
float depth = calib_infos_->T_mul_f / disparity_value;
depth_mat.at<ushort>(i, j) = depth;
}
}
}
output->value = depth_mat;
cv::Mat channels[3 /*input->value.channels()*/];
cv::split(input->value, channels);
channels[2].convertTo(output->value, CV_16UC1);
output->id = input->id;
output->data = input->data;
return true;

View File

@@ -18,7 +18,6 @@
#include <string>
#include "mynteye/api/processor.h"
#include "mynteye/api/processor/rectify_processor.h"
MYNTEYE_BEGIN_NAMESPACE
@@ -26,9 +25,7 @@ class DepthProcessor : public Processor {
public:
static const char NAME[];
explicit DepthProcessor(
std::shared_ptr<struct camera_calib_info_pair> calib_infos,
std::int32_t proc_period = 0);
explicit DepthProcessor(std::int32_t proc_period = 0);
virtual ~DepthProcessor();
std::string Name() override;
@@ -37,8 +34,6 @@ class DepthProcessor : public Processor {
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out, Processor *const parent) override;
private:
std::shared_ptr<struct camera_calib_info_pair> calib_infos_;
};
MYNTEYE_END_NAMESPACE

View File

@@ -1,54 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/processor/depth_processor_ocv.h"
#include <utility>
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
const char DepthProcessorOCV::NAME[] = "DepthProcessorOCV";
DepthProcessorOCV::DepthProcessorOCV(std::int32_t proc_period)
: Processor(std::move(proc_period)) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
}
DepthProcessorOCV::~DepthProcessorOCV() {
VLOG(2) << __func__;
}
std::string DepthProcessorOCV::Name() {
return NAME;
}
Object *DepthProcessorOCV::OnCreateOutput() {
return new ObjMat();
}
bool DepthProcessorOCV::OnProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
const ObjMat *input = Object::Cast<ObjMat>(in);
ObjMat *output = Object::Cast<ObjMat>(out);
cv::Mat channels[3 /*input->value.channels()*/];
cv::split(input->value, channels);
channels[2].convertTo(output->value, CV_16UC1);
output->id = input->id;
output->data = input->data;
return true;
}
MYNTEYE_END_NAMESPACE

View File

@@ -1,41 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_PROCESSOR_DEPTH_PROCESSOR_OCV_H_
#define MYNTEYE_API_PROCESSOR_DEPTH_PROCESSOR_OCV_H_
#pragma once
#include <string>
#include "mynteye/api/processor.h"
MYNTEYE_BEGIN_NAMESPACE
class DepthProcessorOCV : public Processor {
public:
static const char NAME[];
explicit DepthProcessorOCV(std::int32_t proc_period = 0);
virtual ~DepthProcessorOCV();
std::string Name() override;
protected:
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out, Processor *const parent) override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_PROCESSOR_DEPTH_PROCESSOR_OCV_H_

View File

@@ -16,124 +16,48 @@
#include <utility>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#ifdef WITH_BM_SOBEL_FILTER
#include <opencv2/ximgproc/disparity_filter.hpp>
#endif
#include "mynteye/logger.h"
#define WITH_BM_SOBEL_FILTER
MYNTEYE_BEGIN_NAMESPACE
const char DisparityProcessor::NAME[] = "DisparityProcessor";
DisparityProcessor::DisparityProcessor(DisparityProcessorType type,
std::int32_t proc_period)
: Processor(std::move(proc_period)), type_(type) {
DisparityProcessor::DisparityProcessor(std::int32_t proc_period)
: Processor(std::move(proc_period)) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
if (type_ == DisparityProcessorType::SGBM) {
int sgbmWinSize = 3;
int numberOfDisparities = 64;
int sgbmWinSize = 3;
int numberOfDisparities = 64;
#ifdef WITH_OPENCV2
// StereoSGBM
// http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html?#stereosgbm
sgbm_matcher = cv::Ptr<cv::StereoSGBM>(
new cv::StereoSGBM(
0, // minDisparity
numberOfDisparities, // numDisparities
sgbmWinSize, // SADWindowSize
8 * sgbmWinSize * sgbmWinSize, // P1
32 * sgbmWinSize * sgbmWinSize, // P2
1, // disp12MaxDiff
63, // preFilterCap
10, // uniquenessRatio
100, // speckleWindowSize
32, // speckleRange
false)); // fullDP
// StereoSGBM
// http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html?#stereosgbm
sgbm_ = cv::Ptr<cv::StereoSGBM>(
new cv::StereoSGBM(
0, // minDisparity
numberOfDisparities, // numDisparities
sgbmWinSize, // SADWindowSize
8 * sgbmWinSize * sgbmWinSize, // P1
32 * sgbmWinSize * sgbmWinSize, // P2
1, // disp12MaxDiff
63, // preFilterCap
10, // uniquenessRatio
100, // speckleWindowSize
32, // speckleRange
false)); // fullDP
#else
sgbm_matcher = cv::StereoSGBM::create(0, 16, 3);
sgbm_matcher->setPreFilterCap(63);
sgbm_matcher->setBlockSize(sgbmWinSize);
sgbm_matcher->setP1(8 * sgbmWinSize * sgbmWinSize);
sgbm_matcher->setP2(32 * sgbmWinSize * sgbmWinSize);
sgbm_matcher->setMinDisparity(0);
sgbm_matcher->setNumDisparities(numberOfDisparities);
sgbm_matcher->setUniquenessRatio(10);
sgbm_matcher->setSpeckleWindowSize(100);
sgbm_matcher->setSpeckleRange(32);
sgbm_matcher->setDisp12MaxDiff(1);
sgbm_ = cv::StereoSGBM::create(0, 16, 3);
sgbm_->setPreFilterCap(63);
sgbm_->setBlockSize(sgbmWinSize);
sgbm_->setP1(8 * sgbmWinSize * sgbmWinSize);
sgbm_->setP2(32 * sgbmWinSize * sgbmWinSize);
sgbm_->setMinDisparity(0);
sgbm_->setNumDisparities(numberOfDisparities);
sgbm_->setUniquenessRatio(10);
sgbm_->setSpeckleWindowSize(100);
sgbm_->setSpeckleRange(32);
sgbm_->setDisp12MaxDiff(1);
#endif
#ifdef WITH_BM_SOBEL_FILTER
} else if (type_ == DisparityProcessorType::BM) {
int bmWinSize = 3;
#ifdef WITH_OPENCV2
int bmWinSize = 3;
// StereoBM
// https://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html#stereobm-stereobm
bm_matcher = cv::Ptr<cv::StereoBM>(new cv::StereoBM(
int 0,
64,
100,
8 * bmWinSize * bmWinSize,
32 * bmWinSize * bmWinSize,
int -1,
int 31,
15,
100,
4));
#else
bm_matcher = cv::StereoBM::create(0, 3);
bm_matcher->setPreFilterSize(9);
bm_matcher->setPreFilterCap(31);
bm_matcher->setBlockSize(bmWinSize);
bm_matcher->setMinDisparity(0);
bm_matcher->setNumDisparities(64);
bm_matcher->setUniquenessRatio(15);
bm_matcher->setTextureThreshold(10);
bm_matcher->setSpeckleWindowSize(100);
bm_matcher->setSpeckleRange(4);
bm_matcher->setPreFilterType(cv::StereoBM::PREFILTER_XSOBEL);
#endif
#endif
} else {
LOG(ERROR) << "no enum DisparityProcessorType,use default sgbm";
int sgbmWinSize = 3;
int numberOfDisparities = 64;
#ifdef WITH_OPENCV2
// StereoSGBM
// http://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html?#stereosgbm
sgbm_matcher = cv::Ptr<cv::StereoSGBM>(
new cv::StereoSGBM(
0, // minDisparity
numberOfDisparities, // numDisparities
sgbmWinSize, // SADWindowSize
8 * sgbmWinSize * sgbmWinSize, // P1
32 * sgbmWinSize * sgbmWinSize, // P2
1, // disp12MaxDiff
63, // preFilterCap
10, // uniquenessRatio
100, // speckleWindowSize
32, // speckleRange
false)); // fullDP
#else
sgbm_matcher = cv::StereoSGBM::create(0, 16, 3);
sgbm_matcher->setPreFilterCap(63);
sgbm_matcher->setBlockSize(sgbmWinSize);
sgbm_matcher->setP1(8 * sgbmWinSize * sgbmWinSize);
sgbm_matcher->setP2(32 * sgbmWinSize * sgbmWinSize);
sgbm_matcher->setMinDisparity(0);
sgbm_matcher->setNumDisparities(numberOfDisparities);
sgbm_matcher->setUniquenessRatio(10);
sgbm_matcher->setSpeckleWindowSize(100);
sgbm_matcher->setSpeckleRange(32);
sgbm_matcher->setDisp12MaxDiff(1);
#endif
}
}
DisparityProcessor::~DisparityProcessor() {
@@ -163,16 +87,7 @@ bool DisparityProcessor::OnProcess(
// It contains disparity values scaled by 16. So, to get the floating-point
// disparity map,
// you need to divide each disp element by 16.
if (type_ == DisparityProcessorType::SGBM) {
(*sgbm_matcher)(input->first, input->second, disparity);
#ifdef WITH_BM_SOBEL_FILTER
} else if (type_ == DisparityProcessorType::BM) {
cv::Mat tmp1, tmp2;
cv::cvtColor(input->first, tmp1, CV_RGB2GRAY);
cv::cvtColor(input->second, tmp2, CV_RGB2GRAY);
(*bm_matcher)(tmp1, tmp2, disparity);
#endif
}
(*sgbm_)(input->first, input->second, disparity);
#else
// compute()
// http://docs.opencv.org/master/d2/d6e/classcv_1_1StereoMatcher.html
@@ -181,24 +96,7 @@ bool DisparityProcessor::OnProcess(
// disparity map
// (where each disparity value has 4 fractional bits),
// whereas other algorithms output 32-bit floating-point disparity map.
if (type_ == DisparityProcessorType::SGBM) {
sgbm_matcher->compute(input->first, input->second, disparity);
#ifdef WITH_BM_SOBEL_FILTER
} else if (type_ == DisparityProcessorType::BM) {
cv::Mat tmp1, tmp2;
if (input->first.channels() == 1) {
// s1030
} else if (input->first.channels() == 3) {
// s210
cv::cvtColor(input->first, tmp1, CV_RGB2GRAY);
cv::cvtColor(input->second, tmp2, CV_RGB2GRAY);
}
bm_matcher->compute(tmp1, tmp2, disparity);
#endif
} else {
// default
sgbm_matcher->compute(input->first, input->second, disparity);
}
sgbm_->compute(input->first, input->second, disparity);
#endif
disparity.convertTo(output->value, CV_32F, 1./16, 1);
output->id = input->first_id;

View File

@@ -16,32 +16,22 @@
#pragma once
#include <string>
#include "mynteye/api/processor.h"
namespace cv {
class StereoSGBM;
class StereoBM;
} // namespace cv
enum class DisparityProcessorType : std::uint8_t {
/** bm */
SGBM = 0,
/** sgbm */
BM = 1,
/** unknow */
UNKNOW
};
MYNTEYE_BEGIN_NAMESPACE
class DisparityProcessor : public Processor {
public:
static const char NAME[];
explicit DisparityProcessor(DisparityProcessorType type,
std::int32_t proc_period = 0);
explicit DisparityProcessor(std::int32_t proc_period = 0);
virtual ~DisparityProcessor();
std::string Name() override;
@@ -52,9 +42,7 @@ class DisparityProcessor : public Processor {
Object *const in, Object *const out, Processor *const parent) override;
private:
cv::Ptr<cv::StereoSGBM> sgbm_matcher;
cv::Ptr<cv::StereoBM> bm_matcher;
DisparityProcessorType type_;
cv::Ptr<cv::StereoSGBM> sgbm_;
};
MYNTEYE_END_NAMESPACE

View File

@@ -14,8 +14,6 @@
#include "mynteye/api/processor/points_processor.h"
#include <utility>
#include <vector>
#include <limits>
#include <opencv2/calib3d/calib3d.hpp>
@@ -23,41 +21,11 @@
MYNTEYE_BEGIN_NAMESPACE
namespace {
// Encapsulate differences between processing float and uint16_t depths
template<typename T> struct DepthTraits {};
template<>
struct DepthTraits<uint16_t> {
static inline bool valid(uint16_t depth) { return depth != 0; }
static inline float toMeters(uint16_t depth) { return depth * 0.001f; } // originally mm
static inline uint16_t fromMeters(float depth) { return (depth * 1000.0f) + 0.5f; }
static inline void initializeBuffer(std::vector<uint16_t>& buffer) {} // Do nothing - already zero-filled
};
template<>
struct DepthTraits<float> {
static inline bool valid(float depth) { return std::isfinite(depth); }
static inline float toMeters(float depth) { return depth; }
static inline float fromMeters(float depth) { return depth; }
static inline void initializeBuffer(std::vector<uint8_t>& buffer) {
float* start = reinterpret_cast<float*>(&buffer[0]);
float* end = reinterpret_cast<float*>(&buffer[0] + buffer.size());
std::fill(start, end, std::numeric_limits<float>::quiet_NaN());
}
};
}; // namespace
const char PointsProcessor::NAME[] = "PointsProcessor";
PointsProcessor::PointsProcessor(
std::shared_ptr<struct camera_calib_info_pair> calib_infos,
std::int32_t proc_period)
: Processor(std::move(proc_period)),
calib_infos_(calib_infos) {
VLOG(2) << __func__;
PointsProcessor::PointsProcessor(cv::Mat Q, std::int32_t proc_period)
: Processor(std::move(proc_period)), Q_(std::move(Q)) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
}
PointsProcessor::~PointsProcessor() {
@@ -73,44 +41,11 @@ Object *PointsProcessor::OnCreateOutput() {
}
bool PointsProcessor::OnProcess(
Object *const in, Object *const out, Processor *const parent) {
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
float fx = calib_infos_->left.K[0];
float fy = calib_infos_->left.K[4];
float cx = calib_infos_->left.K[2];
float cy = calib_infos_->left.K[5];
// Use correct principal point from calibration
float center_x = cx;
float center_y = cy;
// Combine unit conversion (if necessary) with scaling by focal length for computing (X,Y)
double unit_scaling = DepthTraits<float>::toMeters(static_cast<float>(1));
float constant_x = unit_scaling / fx;
float constant_y = unit_scaling / fy;
// float bad_point = std::numeric_limits<float>::quiet_NaN();
const ObjMat *input = Object::Cast<ObjMat>(in);
ObjMat *output = Object::Cast<ObjMat>(out);
output->value.create(input->value.size(), CV_MAKETYPE(CV_32F, 3));
int height = static_cast<int>(output->value.rows);
int width = static_cast<int>(output->value.cols);
for (int v = 0; v < height; ++v) {
cv::Vec3f *dptr = output->value.ptr<cv::Vec3f>(v);
for (int u = 0; u < width; ++u) {
float depth = input->value.at<uint16_t>(v, u);
// Missing points denoted by NaNs
if (!DepthTraits<uint16_t>::valid(depth)) {
continue;
}
dptr[u][0] = (u - center_x) * depth * constant_x ;
dptr[u][1] = (v - center_y) * depth * constant_y ;
dptr[u][2] = depth ;
}
}
cv::reprojectImageTo3D(input->value, output->value, Q_, true);
output->id = input->id;
output->data = input->data;
return true;

View File

@@ -20,7 +20,6 @@
#include <opencv2/core/core.hpp>
#include "mynteye/api/processor.h"
#include "mynteye/api/processor/rectify_processor.h"
MYNTEYE_BEGIN_NAMESPACE
@@ -28,9 +27,7 @@ class PointsProcessor : public Processor {
public:
static const char NAME[];
explicit PointsProcessor(
std::shared_ptr<struct camera_calib_info_pair> calib_infos,
std::int32_t proc_period = 0);
explicit PointsProcessor(cv::Mat Q, std::int32_t proc_period = 0);
virtual ~PointsProcessor();
std::string Name() override;
@@ -41,7 +38,7 @@ class PointsProcessor : public Processor {
Object *const in, Object *const out, Processor *const parent) override;
private:
std::shared_ptr<struct camera_calib_info_pair> calib_infos_;
cv::Mat Q_;
};
MYNTEYE_END_NAMESPACE

View File

@@ -1,54 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/processor/points_processor_ocv.h"
#include <utility>
#include <opencv2/calib3d/calib3d.hpp>
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
const char PointsProcessorOCV::NAME[] = "PointsProcessorOCV";
PointsProcessorOCV::PointsProcessorOCV(cv::Mat Q, std::int32_t proc_period)
: Processor(std::move(proc_period)), Q_(std::move(Q)) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
}
PointsProcessorOCV::~PointsProcessorOCV() {
VLOG(2) << __func__;
}
std::string PointsProcessorOCV::Name() {
return NAME;
}
Object *PointsProcessorOCV::OnCreateOutput() {
return new ObjMat();
}
bool PointsProcessorOCV::OnProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
const ObjMat *input = Object::Cast<ObjMat>(in);
ObjMat *output = Object::Cast<ObjMat>(out);
cv::reprojectImageTo3D(input->value, output->value, Q_, true);
output->id = input->id;
output->data = input->data;
return true;
}
MYNTEYE_END_NAMESPACE

View File

@@ -17,381 +17,21 @@
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/logger.h"
#include "mynteye/device/device.h"
MYNTEYE_BEGIN_NAMESPACE
cv::Mat RectifyProcessor::rectifyrad(const cv::Mat& R) {
cv::Mat r_vec;
cv::Rodrigues(R, r_vec);
// pi/180 = x/179 ==> x = 3.1241
double rad = cv::norm(r_vec);
if (rad >= 3.1241) {
cv::Mat r_dir;
cv::normalize(r_vec, r_dir);
cv::Mat r = r_dir*(3.1415926 - rad);
cv::Mat r_r;
cv::Rodrigues(r, r_r);
return r_r.clone();
}
return R.clone();
}
void RectifyProcessor::stereoRectify(camodocal::CameraPtr leftOdo,
camodocal::CameraPtr rightOdo, const CvMat* K1, const CvMat* K2,
const CvMat* D1, const CvMat* D2, CvSize imageSize,
const CvMat* matR, const CvMat* matT,
CvMat* _R1, CvMat* _R2, CvMat* _P1, CvMat* _P2, double* T_mul_f,
int flags, double alpha, CvSize newImgSize) {
double _om[3], _t[3] = {0}, _uu[3]={0, 0, 0}, _r_r[3][3], _pp[3][4];
double _ww[3], _wr[3][3], _z[3] = {0, 0, 0}, _ri[3][3], _w3[3];
cv::Rect_<float> inner1, inner2, outer1, outer2;
CvMat om = cvMat(3, 1, CV_64F, _om);
CvMat t = cvMat(3, 1, CV_64F, _t);
CvMat uu = cvMat(3, 1, CV_64F, _uu);
CvMat r_r = cvMat(3, 3, CV_64F, _r_r);
CvMat pp = cvMat(3, 4, CV_64F, _pp);
CvMat ww = cvMat(3, 1, CV_64F, _ww); // temps
CvMat w3 = cvMat(3, 1, CV_64F, _w3); // temps
CvMat wR = cvMat(3, 3, CV_64F, _wr);
CvMat Z = cvMat(3, 1, CV_64F, _z);
CvMat Ri = cvMat(3, 3, CV_64F, _ri);
double nx = imageSize.width, ny = imageSize.height;
int i, k;
double nt, nw;
if ( matR->rows == 3 && matR->cols == 3)
cvRodrigues2(matR, &om); // get vector rotation
else
cvConvert(matR, &om); // it's already a rotation vector
cvConvertScale(&om, &om, -0.5); // get average rotation
cvRodrigues2(&om, &r_r); // rotate cameras to same orientation by averaging
cvMatMul(&r_r, matT, &t);
int idx = fabs(_t[0]) > fabs(_t[1]) ? 0 : 1;
// if idx == 0
// e1 = T / ||T||
// e2 = e1 x [0,0,1]
// if idx == 1
// e2 = T / ||T||
// e1 = e2 x [0,0,1]
// e3 = e1 x e2
_uu[2] = 1;
cvCrossProduct(&uu, &t, &ww);
nt = cvNorm(&t, 0, CV_L2);
nw = cvNorm(&ww, 0, CV_L2);
cvConvertScale(&ww, &ww, 1 / nw);
cvCrossProduct(&t, &ww, &w3);
nw = cvNorm(&w3, 0, CV_L2);
cvConvertScale(&w3, &w3, 1 / nw);
_uu[2] = 0;
for (i = 0; i < 3; ++i) {
_wr[idx][i] = -_t[i] / nt;
_wr[idx ^ 1][i] = -_ww[i];
_wr[2][i] = _w3[i] * (1 - 2 * idx); // if idx == 1 -> opposite direction
}
// apply to both views
cvGEMM(&wR, &r_r, 1, 0, 0, &Ri, CV_GEMM_B_T);
cvConvert(&Ri, _R1);
cvGEMM(&wR, &r_r, 1, 0, 0, &Ri, 0);
cvConvert(&Ri, _R2);
cvMatMul(&Ri, matT, &t);
// calculate projection/camera matrices
// these contain the relevant rectified image internal params (fx, fy=fx, cx, cy)
double fc_new = DBL_MAX;
CvPoint2D64f cc_new[2] = {{0, 0}, {0, 0}};
newImgSize = newImgSize.width * newImgSize.height != 0 ? newImgSize : imageSize;
const double ratio_x = (double)newImgSize.width / imageSize.width / 2;
const double ratio_y = (double)newImgSize.height / imageSize.height / 2;
const double ratio = idx == 1 ? ratio_x : ratio_y;
fc_new = (cvmGet(K1, idx ^ 1, idx ^ 1) + cvmGet(K2, idx ^ 1, idx ^ 1)) * ratio;
for (k = 0; k < 2; k++) {
CvPoint2D32f _pts[4];
CvPoint3D32f _pts_3[4];
CvMat pts = cvMat(1, 4, CV_32FC2, _pts);
CvMat pts_3 = cvMat(1, 4, CV_32FC3, _pts_3);
Eigen::Vector2d a;
Eigen::Vector3d b;
for (i = 0; i < 4; i++) {
int j = (i < 2) ? 0 : 1;
a.x() = (float)((i % 2)*(nx));
a.y() = (float)(j*(ny));
if (0 == k) {
leftOdo->liftProjective(a, b);
} else {
rightOdo->liftProjective(a, b);
}
_pts[i].x = b.x()/b.z();
_pts[i].y = b.y()/b.z();
}
cvConvertPointsHomogeneous(&pts, &pts_3);
// Change camera matrix to have cc=[0,0] and fc = fc_new
double _a_tmp[3][3];
CvMat A_tmp = cvMat(3, 3, CV_64F, _a_tmp);
_a_tmp[0][0] = fc_new;
_a_tmp[1][1] = fc_new;
_a_tmp[0][2] = 0.0;
_a_tmp[1][2] = 0.0;
cvProjectPoints2(&pts_3, k == 0 ? _R1 : _R2, &Z, &A_tmp, 0, &pts);
CvScalar avg = cvAvg(&pts);
cc_new[k].x = (nx)/2 - avg.val[0];
cc_new[k].y = (ny)/2 - avg.val[1];
}
if (flags & cv::CALIB_ZERO_DISPARITY) {
cc_new[0].x = cc_new[1].x = (cc_new[0].x + cc_new[1].x)*0.5;
cc_new[0].y = cc_new[1].y = (cc_new[0].y + cc_new[1].y)*0.5;
} else if (idx == 0) {
// horizontal stereo
cc_new[0].y = cc_new[1].y = (cc_new[0].y + cc_new[1].y)*0.5;
} else {
// vertical stereo
cc_new[0].x = cc_new[1].x = (cc_new[0].x + cc_new[1].x)*0.5;
}
cvZero(&pp);
_pp[0][0] = _pp[1][1] = fc_new;
_pp[0][2] = cc_new[0].x;
_pp[1][2] = cc_new[0].y;
_pp[2][2] = 1;
cvConvert(&pp, _P1);
_pp[0][2] = cc_new[1].x;
_pp[1][2] = cc_new[1].y;
_pp[idx][3] = _t[idx]*fc_new; // baseline * focal length
*T_mul_f = 0. - _t[idx] * fc_new;
cvConvert(&pp, _P2);
alpha = MIN(alpha, 1.);
{
newImgSize = newImgSize.width*newImgSize.height != 0 ? newImgSize : imageSize;
double cx1_0 = cc_new[0].x;
double cy1_0 = cc_new[0].y;
double cx2_0 = cc_new[1].x;
double cy2_0 = cc_new[1].y;
double cx1 = newImgSize.width*cx1_0/imageSize.width;
double cy1 = newImgSize.height*cy1_0/imageSize.height;
double cx2 = newImgSize.width*cx2_0/imageSize.width;
double cy2 = newImgSize.height*cy2_0/imageSize.height;
double s = 1.;
fc_new *= s;
cc_new[0] = cvPoint2D64f(cx1, cy1);
cc_new[1] = cvPoint2D64f(cx2, cy2);
cvmSet(_P1, 0, 0, fc_new);
cvmSet(_P1, 1, 1, fc_new);
cvmSet(_P1, 0, 2, cx1);
cvmSet(_P1, 1, 2, cy1);
cvmSet(_P2, 0, 0, fc_new);
cvmSet(_P2, 1, 1, fc_new);
cvmSet(_P2, 0, 2, cx2);
cvmSet(_P2, 1, 2, cy2);
cvmSet(_P2, idx, 3, s*cvmGet(_P2, idx, 3));
}
}
Eigen::Matrix4d RectifyProcessor::loadT(const mynteye::Extrinsics& in) {
Eigen::Matrix3d R;
R<<
in.rotation[0][0], in.rotation[0][1], in.rotation[0][2],
in.rotation[1][0], in.rotation[1][1], in.rotation[1][2],
in.rotation[2][0], in.rotation[2][1], in.rotation[2][2];
double t_x = in.translation[0];
double t_y = in.translation[1];
double t_z = in.translation[2];
Eigen::Quaterniond q(R);
q.normalize();
Eigen::Matrix4d T = Eigen::Matrix4d::Identity();
T.topLeftCorner<3, 3>() = q.toRotationMatrix();
T.topRightCorner<3, 1>() << t_x, t_y, t_z;
return T;
}
void RectifyProcessor::loadCameraMatrix(cv::Mat& K, cv::Mat& D, // NOLINT
cv::Size& image_size, // NOLINT
struct camera_calib_info& calib_data) { // NOLINT
K = cv::Mat(3, 3, CV_64F, calib_data.K);
std::size_t d_length = 4;
D = cv::Mat(1, d_length, CV_64F, calib_data.D);
image_size = cv::Size(calib_data.width, calib_data.height);
}
struct camera_calib_info RectifyProcessor::getCalibMatData(
const mynteye::IntrinsicsEquidistant& in) {
struct camera_calib_info calib_mat_data;
calib_mat_data.distortion_model = "KANNALA_BRANDT";
calib_mat_data.height = in.height;
calib_mat_data.width = in.width;
for (unsigned int i = 0; i < 4; i++) {
calib_mat_data.D[i] = in.coeffs[i];
}
calib_mat_data.K[0] = in.coeffs[4]; // mu
calib_mat_data.K[4] = in.coeffs[5]; // mv();
calib_mat_data.K[2] = in.coeffs[6]; // u0();
calib_mat_data.K[5] = in.coeffs[7]; // v0();
calib_mat_data.K[8] = 1;
return calib_mat_data;
}
std::shared_ptr<struct camera_calib_info_pair> RectifyProcessor::stereoRectify(
camodocal::CameraPtr leftOdo,
camodocal::CameraPtr rightOdo,
mynteye::IntrinsicsEquidistant in_left,
mynteye::IntrinsicsEquidistant in_right,
mynteye::Extrinsics ex_right_to_left) {
Eigen::Matrix4d T = loadT(ex_right_to_left);
Eigen::Matrix3d R = T.topLeftCorner<3, 3>();
Eigen::Vector3d t = T.topRightCorner<3, 1>();
cv::Mat cv_R, cv_t;
cv::eigen2cv(R, cv_R);
cv::eigen2cv(t, cv_t);
cv::Mat K1, D1, K2, D2;
cv::Size image_size1, image_size2;
struct camera_calib_info calib_mat_data_left = getCalibMatData(in_left);
struct camera_calib_info calib_mat_data_right = getCalibMatData(in_right);
loadCameraMatrix(K1, D1, image_size1, calib_mat_data_left);
loadCameraMatrix(K2, D2, image_size2, calib_mat_data_right);
cv::Mat R1 = cv::Mat(cv::Size(3, 3), CV_64F);
cv::Mat R2 = cv::Mat(cv::Size(3, 3), CV_64F);
cv::Mat P1 = cv::Mat(3, 4, CV_64F);
cv::Mat P2 = cv::Mat(3, 4, CV_64F);
CvMat c_R = cv_R, c_t = cv_t;
CvMat c_K1 = K1, c_K2 = K2, c_D1 = D1, c_D2 = D2;
CvMat c_R1 = R1, c_R2 = R2, c_P1 = P1, c_P2 = P2;
double T_mul_f;
stereoRectify(leftOdo, rightOdo, &c_K1, &c_K2, &c_D1, &c_D2,
image_size1, &c_R, &c_t, &c_R1, &c_R2, &c_P1, &c_P2, &T_mul_f);
// std::cout << "K1: " << K1 << std::endl;
// std::cout << "D1: " << D1 << std::endl;
// std::cout << "K2: " << K2 << std::endl;
// std::cout << "D2: " << D2 << std::endl;
// std::cout << "R: " << cv_R << std::endl;
// std::cout << "t: " << cv_t << std::endl;
// std::cout << "R1: " << R1 << std::endl;
// std::cout << "R2: " << R2 << std::endl;
// std::cout << "P1: " << P1 << std::endl;
// std::cout << "P2: " << P2 << std::endl;
R1 = rectifyrad(R1);
R2 = rectifyrad(R2);
for (std::size_t i = 0; i < 3; i++) {
for (std::size_t j = 0; j < 4; j++) {
calib_mat_data_left.P[i*4 + j] = P1.at<double>(i, j);
calib_mat_data_right.P[i*4 + j] = P2.at<double>(i, j);
}
}
for (std::size_t i = 0; i < 3; i++) {
for (std::size_t j = 0; j < 3; j++) {
calib_mat_data_left.R[i*3 + j] = R1.at<double>(i, j);
calib_mat_data_right.R[i*3 +j] = R2.at<double>(i, j);
}
}
struct camera_calib_info_pair res =
{calib_mat_data_left, calib_mat_data_right, T_mul_f};
return std::make_shared<struct camera_calib_info_pair>(res);
}
camodocal::CameraPtr RectifyProcessor::generateCameraFromIntrinsicsEquidistant(
const mynteye::IntrinsicsEquidistant & in) {
camodocal::EquidistantCameraPtr camera(
new camodocal::EquidistantCamera("KANNALA_BRANDT",
in.width,
in.height,
in.coeffs[0],
in.coeffs[1],
in.coeffs[2],
in.coeffs[3],
in.coeffs[4],
in.coeffs[5],
in.coeffs[6],
in.coeffs[7]));
return camera;
}
void RectifyProcessor::InitParams(
IntrinsicsEquidistant in_left,
IntrinsicsEquidistant in_right,
Extrinsics ex_right_to_left) {
calib_model = CalibrationModel::KANNALA_BRANDT;
camodocal::CameraPtr camera_odo_ptr_left =
generateCameraFromIntrinsicsEquidistant(in_left);
camodocal::CameraPtr camera_odo_ptr_right =
generateCameraFromIntrinsicsEquidistant(in_right);
auto calib_infos_temp =
stereoRectify(camera_odo_ptr_left,
camera_odo_ptr_right,
in_left,
in_right,
ex_right_to_left);
*calib_infos = *calib_infos_temp;
auto calib_info_tmp = stereoRectify(camera_odo_ptr_left,
camera_odo_ptr_right,
in_left,
in_right,
ex_right_to_left);
*calib_infos = *calib_info_tmp;
cv::Mat rect_R_l =
cv::Mat::eye(3, 3, CV_32F), rect_R_r = cv::Mat::eye(3, 3, CV_32F);
for (size_t i = 0; i < 3; i++) {
for (size_t j = 0; j < 3; j++) {
rect_R_l.at<float>(i, j) = calib_infos->left.R[i*3+j];
rect_R_r.at<float>(i, j) = calib_infos->right.R[i*3+j];
}
}
double left_f[] =
{calib_infos->left.P[0], calib_infos->left.P[5]};
double left_center[] =
{calib_infos->left.P[2], calib_infos->left.P[6]};
double right_f[] =
{calib_infos->right.P[0], calib_infos->right.P[5]};
double right_center[] =
{calib_infos->right.P[2], calib_infos->right.P[6]};
camera_odo_ptr_left->initUndistortRectifyMap(
map11, map12, left_f[0], left_f[1],
cv::Size(0, 0), left_center[0],
left_center[1], rect_R_l);
camera_odo_ptr_right->initUndistortRectifyMap(
map21, map22, right_f[0], right_f[1],
cv::Size(0, 0), right_center[0],
right_center[1], rect_R_r);
}
const char RectifyProcessor::NAME[] = "RectifyProcessor";
RectifyProcessor::RectifyProcessor(
std::shared_ptr<IntrinsicsBase> intr_left,
std::shared_ptr<IntrinsicsBase> intr_right,
std::shared_ptr<Extrinsics> extr,
std::int32_t proc_period)
: Processor(std::move(proc_period)),
calib_model(CalibrationModel::UNKNOW) {
calib_infos = std::make_shared<struct camera_calib_info_pair>();
std::shared_ptr<Device> device, std::int32_t proc_period)
: Processor(std::move(proc_period)) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
InitParams(
*std::dynamic_pointer_cast<IntrinsicsEquidistant>(intr_left),
*std::dynamic_pointer_cast<IntrinsicsEquidistant>(intr_right),
*extr);
device->GetIntrinsics(Stream::LEFT), device->GetIntrinsics(Stream::RIGHT),
device->GetExtrinsics(Stream::RIGHT, Stream::LEFT));
}
RectifyProcessor::~RectifyProcessor() {
@@ -402,16 +42,6 @@ std::string RectifyProcessor::Name() {
return NAME;
}
void RectifyProcessor::ReloadImageParams(
std::shared_ptr<IntrinsicsBase> intr_left,
std::shared_ptr<IntrinsicsBase> intr_right,
std::shared_ptr<Extrinsics> extr) {
InitParams(
*std::dynamic_pointer_cast<IntrinsicsEquidistant>(intr_left),
*std::dynamic_pointer_cast<IntrinsicsEquidistant>(intr_right),
*extr);
}
Object *RectifyProcessor::OnCreateOutput() {
return new ObjMat2();
}
@@ -430,4 +60,41 @@ bool RectifyProcessor::OnProcess(
return true;
}
void RectifyProcessor::InitParams(
Intrinsics in_left, Intrinsics in_right, Extrinsics ex_right_to_left) {
cv::Size size{in_left.width, in_left.height};
cv::Mat M1 =
(cv::Mat_<double>(3, 3) << in_left.fx, 0, in_left.cx, 0, in_left.fy,
in_left.cy, 0, 0, 1);
cv::Mat M2 =
(cv::Mat_<double>(3, 3) << in_right.fx, 0, in_right.cx, 0, in_right.fy,
in_right.cy, 0, 0, 1);
cv::Mat D1(1, 5, CV_64F, in_left.coeffs);
cv::Mat D2(1, 5, CV_64F, in_right.coeffs);
cv::Mat R =
(cv::Mat_<double>(3, 3) << ex_right_to_left.rotation[0][0],
ex_right_to_left.rotation[0][1], ex_right_to_left.rotation[0][2],
ex_right_to_left.rotation[1][0], ex_right_to_left.rotation[1][1],
ex_right_to_left.rotation[1][2], ex_right_to_left.rotation[2][0],
ex_right_to_left.rotation[2][1], ex_right_to_left.rotation[2][2]);
cv::Mat T(3, 1, CV_64F, ex_right_to_left.translation);
VLOG(2) << "InitParams size: " << size;
VLOG(2) << "M1: " << M1;
VLOG(2) << "M2: " << M2;
VLOG(2) << "D1: " << D1;
VLOG(2) << "D2: " << D2;
VLOG(2) << "R: " << R;
VLOG(2) << "T: " << T;
cv::Rect left_roi, right_roi;
cv::stereoRectify(
M1, D1, M2, D2, size, R, T, R1, R2, P1, P2, Q, cv::CALIB_ZERO_DISPARITY,
0, size, &left_roi, &right_roi);
cv::initUndistortRectifyMap(M1, D1, R1, P1, size, CV_16SC2, map11, map12);
cv::initUndistortRectifyMap(M2, D2, R2, P2, size, CV_16SC2, map21, map22);
}
MYNTEYE_END_NAMESPACE

View File

@@ -22,31 +22,9 @@
#include "mynteye/types.h"
#include "mynteye/api/processor.h"
#include "mynteye/device/device.h"
#include <camodocal/camera_models/EquidistantCamera.h>
#include <opencv2/core/eigen.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/opencv.hpp>
#include <opencv2/core/core.hpp>
MYNTEYE_BEGIN_NAMESPACE
struct camera_calib_info {
unsigned int height = 0;
unsigned int width = 0;
std::string distortion_model = "null";
double D[4] = {0};
double K[9] = {0};
double R[9] = {0};
double P[12] = {0};
};
struct camera_calib_info_pair {
struct camera_calib_info left;
struct camera_calib_info right;
double T_mul_f;
};
class Device;
class RectifyProcessor : public Processor {
@@ -54,65 +32,22 @@ class RectifyProcessor : public Processor {
static const char NAME[];
RectifyProcessor(
std::shared_ptr<IntrinsicsBase> intr_left,
std::shared_ptr<IntrinsicsBase> intr_right,
std::shared_ptr<Extrinsics> extr,
std::int32_t proc_period = 0);
std::shared_ptr<Device> device, std::int32_t proc_period = 0);
virtual ~RectifyProcessor();
std::string Name() override;
void ReloadImageParams(
std::shared_ptr<IntrinsicsBase> intr_left,
std::shared_ptr<IntrinsicsBase> intr_right,
std::shared_ptr<Extrinsics> extr);
cv::Mat R1, P1, R2, P2, Q;
cv::Mat map11, map12, map21, map22;
inline std::shared_ptr<struct camera_calib_info_pair> getCalibInfoPair() {
return calib_infos;
}
protected:
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out, Processor *const parent) override;
private:
void InitParams(IntrinsicsEquidistant in_left,
IntrinsicsEquidistant in_right, Extrinsics ex_right_to_left);
cv::Mat rectifyrad(const cv::Mat& R);
void stereoRectify(camodocal::CameraPtr leftOdo,
camodocal::CameraPtr rightOdo, const CvMat* K1, const CvMat* K2,
const CvMat* D1, const CvMat* D2, CvSize imageSize,
const CvMat* matR, const CvMat* matT,
CvMat* _R1, CvMat* _R2, CvMat* _P1, CvMat* _P2, double* T_mul_f,
int flags = cv::CALIB_ZERO_DISPARITY, double alpha = -1,
CvSize newImgSize = cv::Size());
Eigen::Matrix4d loadT(const mynteye::Extrinsics& in);
void loadCameraMatrix(cv::Mat& K, cv::Mat& D, // NOLINT
cv::Size& image_size, // NOLINT
struct camera_calib_info& calib_data); // NOLINT
struct camera_calib_info getCalibMatData(
const mynteye::IntrinsicsEquidistant& in);
std::shared_ptr<struct camera_calib_info_pair> stereoRectify(
camodocal::CameraPtr leftOdo,
camodocal::CameraPtr rightOdo,
mynteye::IntrinsicsEquidistant in_left,
mynteye::IntrinsicsEquidistant in_right,
mynteye::Extrinsics ex_right_to_left);
camodocal::CameraPtr generateCameraFromIntrinsicsEquidistant(
const mynteye::IntrinsicsEquidistant & in);
CalibrationModel calib_model;
std::shared_ptr<struct camera_calib_info_pair> calib_infos;
void InitParams(
Intrinsics in_left, Intrinsics in_right, Extrinsics ex_right_to_left);
};
MYNTEYE_END_NAMESPACE

View File

@@ -1,117 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/api/processor/rectify_processor_ocv.h"
#include <utility>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/logger.h"
#include "mynteye/device/device.h"
MYNTEYE_BEGIN_NAMESPACE
const char RectifyProcessorOCV::NAME[] = "RectifyProcessorOCV";
RectifyProcessorOCV::RectifyProcessorOCV(
std::shared_ptr<IntrinsicsBase> intr_left,
std::shared_ptr<IntrinsicsBase> intr_right,
std::shared_ptr<Extrinsics> extr,
std::int32_t proc_period)
: Processor(std::move(proc_period)),
calib_model(CalibrationModel::UNKNOW) {
VLOG(2) << __func__ << ": proc_period=" << proc_period;
InitParams(
*std::dynamic_pointer_cast<IntrinsicsPinhole>(intr_left),
*std::dynamic_pointer_cast<IntrinsicsPinhole>(intr_right),
*extr);
}
RectifyProcessorOCV::~RectifyProcessorOCV() {
VLOG(2) << __func__;
}
std::string RectifyProcessorOCV::Name() {
return NAME;
}
void RectifyProcessorOCV::ReloadImageParams(
std::shared_ptr<IntrinsicsBase> intr_left,
std::shared_ptr<IntrinsicsBase> intr_right,
std::shared_ptr<Extrinsics> extr) {
InitParams(
*std::dynamic_pointer_cast<IntrinsicsPinhole>(intr_left),
*std::dynamic_pointer_cast<IntrinsicsPinhole>(intr_right),
*extr);
}
Object *RectifyProcessorOCV::OnCreateOutput() {
return new ObjMat2();
}
bool RectifyProcessorOCV::OnProcess(
Object *const in, Object *const out, Processor *const parent) {
MYNTEYE_UNUSED(parent)
const ObjMat2 *input = Object::Cast<ObjMat2>(in);
ObjMat2 *output = Object::Cast<ObjMat2>(out);
cv::remap(input->first, output->first, map11, map12, cv::INTER_LINEAR);
cv::remap(input->second, output->second, map21, map22, cv::INTER_LINEAR);
output->first_id = input->first_id;
output->first_data = input->first_data;
output->second_id = input->second_id;
output->second_data = input->second_data;
return true;
}
void RectifyProcessorOCV::InitParams(
IntrinsicsPinhole in_left,
IntrinsicsPinhole in_right,
Extrinsics ex_right_to_left) {
calib_model = CalibrationModel::PINHOLE;
cv::Size size{in_left.width, in_left.height};
cv::Mat M1 =
(cv::Mat_<double>(3, 3) << in_left.fx, 0, in_left.cx, 0, in_left.fy,
in_left.cy, 0, 0, 1);
cv::Mat M2 =
(cv::Mat_<double>(3, 3) << in_right.fx, 0, in_right.cx, 0, in_right.fy,
in_right.cy, 0, 0, 1);
cv::Mat D1(1, 5, CV_64F, in_left.coeffs);
cv::Mat D2(1, 5, CV_64F, in_right.coeffs);
cv::Mat R =
(cv::Mat_<double>(3, 3) << ex_right_to_left.rotation[0][0],
ex_right_to_left.rotation[0][1], ex_right_to_left.rotation[0][2],
ex_right_to_left.rotation[1][0], ex_right_to_left.rotation[1][1],
ex_right_to_left.rotation[1][2], ex_right_to_left.rotation[2][0],
ex_right_to_left.rotation[2][1], ex_right_to_left.rotation[2][2]);
cv::Mat T(3, 1, CV_64F, ex_right_to_left.translation);
VLOG(2) << "InitParams size: " << size;
VLOG(2) << "M1: " << M1;
VLOG(2) << "M2: " << M2;
VLOG(2) << "D1: " << D1;
VLOG(2) << "D2: " << D2;
VLOG(2) << "R: " << R;
VLOG(2) << "T: " << T;
cv::Rect left_roi, right_roi;
cv::stereoRectify(
M1, D1, M2, D2, size, R, T, R1, R2, P1, P2, Q, cv::CALIB_ZERO_DISPARITY,
0, size, &left_roi, &right_roi);
cv::initUndistortRectifyMap(M1, D1, R1, P1, size, CV_16SC2, map11, map12);
cv::initUndistortRectifyMap(M2, D2, R2, P2, size, CV_16SC2, map21, map22);
}
MYNTEYE_END_NAMESPACE

View File

@@ -1,65 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_API_PROCESSOR_RECTIFY_PROCESSOR_OCV_H_
#define MYNTEYE_API_PROCESSOR_RECTIFY_PROCESSOR_OCV_H_
#pragma once
#include <memory>
#include <string>
#include <opencv2/core/core.hpp>
#include "mynteye/types.h"
#include "mynteye/api/processor.h"
MYNTEYE_BEGIN_NAMESPACE
class Device;
class RectifyProcessorOCV : public Processor {
public:
static const char NAME[];
RectifyProcessorOCV(
std::shared_ptr<IntrinsicsBase> intr_left,
std::shared_ptr<IntrinsicsBase> intr_right,
std::shared_ptr<Extrinsics> extr,
std::int32_t proc_period = 0);
virtual ~RectifyProcessorOCV();
std::string Name() override;
void ReloadImageParams(
std::shared_ptr<IntrinsicsBase> intr_left,
std::shared_ptr<IntrinsicsBase> intr_right,
std::shared_ptr<Extrinsics> extr);
cv::Mat R1, P1, R2, P2, Q;
cv::Mat map11, map12, map21, map22;
protected:
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out, Processor *const parent) override;
private:
void InitParams(IntrinsicsPinhole in_left,
IntrinsicsPinhole in_right, Extrinsics ex_right_to_left);
CalibrationModel calib_model;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_API_PROCESSOR_RECTIFY_PROCESSOR_OCV_H_

View File

@@ -23,17 +23,11 @@
#include "mynteye/api/object.h"
#include "mynteye/api/plugin.h"
#include "mynteye/api/processor.h"
#include "mynteye/api/processor/depth_processor.h"
#include "mynteye/api/processor/disparity_normalized_processor.h"
#include "mynteye/api/processor/disparity_processor.h"
#include "mynteye/api/processor/rectify_processor_ocv.h"
#include "mynteye/api/processor/depth_processor_ocv.h"
#include "mynteye/api/processor/points_processor_ocv.h"
#include "mynteye/api/config.h"
#ifdef WITH_CAM_MODELS
#include "mynteye/api/processor/depth_processor.h"
#include "mynteye/api/processor/points_processor.h"
#include "mynteye/api/processor/rectify_processor.h"
#endif
#include "mynteye/device/device.h"
#define RECTIFY_PROC_PERIOD 0
@@ -53,8 +47,9 @@ cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
return img;
} else if (frame->format() == Format::BGR888) {
cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data());
cv::cvtColor(img, img, CV_BGR2RGB);
return img;
} else { // Format::GRAY
} else {
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
}
}
@@ -74,39 +69,9 @@ void process_childs(
} // namespace
void Synthetic::InitCalibInfo() {
if (calib_model_ == CalibrationModel::PINHOLE) {
LOG(INFO) << "camera calib model: pinhole";
intr_left_ = api_->GetIntrinsicsBase(Stream::LEFT);
intr_right_ = api_->GetIntrinsicsBase(Stream::RIGHT);
extr_ = std::make_shared<Extrinsics>(
api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT));
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
LOG(INFO) << "camera calib model: kannala_brandt";
intr_left_ = api_->GetIntrinsicsBase(Stream::LEFT);
intr_right_ = api_->GetIntrinsicsBase(Stream::RIGHT);
extr_ = std::make_shared<Extrinsics>(
api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT));
#endif
} else {
calib_default_tag_ = true;
calib_model_ = CalibrationModel::PINHOLE;
LOG(INFO) << "camera calib model: unknow ,use default pinhole data";
intr_left_ = getDefaultIntrinsics();
intr_right_ = getDefaultIntrinsics();
extr_ = getDefaultExtrinsics();
}
}
Synthetic::Synthetic(API *api, CalibrationModel calib_model)
: api_(api),
plugin_(nullptr),
calib_model_(calib_model),
calib_default_tag_(false) {
Synthetic::Synthetic(API *api) : api_(api), plugin_(nullptr) {
VLOG(2) << __func__;
CHECK_NOTNULL(api_);
InitCalibInfo();
InitStreamSupports();
InitProcessors();
}
@@ -119,29 +84,6 @@ Synthetic::~Synthetic() {
}
}
void Synthetic::NotifyImageParamsChanged() {
if (!calib_default_tag_) {
intr_left_ = api_->GetIntrinsicsBase(Stream::LEFT);
intr_right_ = api_->GetIntrinsicsBase(Stream::RIGHT);
extr_ = std::make_shared<Extrinsics>(
api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT));
}
if (calib_model_ == CalibrationModel::PINHOLE) {
auto &&processor = find_processor<RectifyProcessorOCV>(processor_);
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_);
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
auto &&processor = find_processor<RectifyProcessor>(processor_);
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_);
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
auto &&processor = find_processor<RectifyProcessorOCV>(processor_);
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_);
}
}
bool Synthetic::Supports(const Stream &stream) const {
return stream_supports_mode_.find(stream) != stream_supports_mode_.end();
}
@@ -220,22 +162,11 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
auto &&mode = GetStreamEnabledMode(stream);
if (mode == MODE_NATIVE) {
auto &&device = api_->device();
return data2api(device->GetStreamData(stream));
return data2api(device->GetLatestStreamData(stream));
} else if (mode == MODE_SYNTHETIC) {
if (stream == Stream::LEFT_RECTIFIED || stream == Stream::RIGHT_RECTIFIED) {
static std::shared_ptr<ObjMat2> output = nullptr;
std::shared_ptr<Processor> processor = nullptr;
if (calib_model_ == CalibrationModel::PINHOLE) {
processor = find_processor<RectifyProcessorOCV>(processor_);
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
processor = find_processor<RectifyProcessor>(processor_);
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
processor = find_processor<RectifyProcessorOCV>(processor_);
}
auto &&processor = find_processor<RectifyProcessor>(processor_);
auto &&out = processor->GetOutput();
if (out != nullptr) {
// Obtain the output, out will be nullptr if get again immediately.
@@ -273,54 +204,22 @@ api::StreamData Synthetic::GetStreamData(const Stream &stream) {
VLOG(2) << "Disparity normalized not ready now";
} break;
case Stream::POINTS: {
if (calib_model_ == CalibrationModel::PINHOLE) {
auto &&processor = find_processor<PointsProcessorOCV>(processor_);
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {output->data, output->value, nullptr, output->id};
}
VLOG(2) << "Points not ready now";
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
auto &&processor = find_processor<PointsProcessor>(processor_);
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {output->data, output->value, nullptr, output->id};
}
VLOG(2) << "Points not ready now";
#endif
} else {
// UNKNOW
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
auto &&processor = find_processor<PointsProcessor>(processor_);
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {output->data, output->value, nullptr, output->id};
}
VLOG(2) << "Points not ready now";
} break;
case Stream::DEPTH: {
if (calib_model_ == CalibrationModel::PINHOLE) {
auto &&processor = find_processor<DepthProcessorOCV>(processor_);
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {output->data, output->value, nullptr, output->id};
}
VLOG(2) << "Depth not ready now";
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
auto &&processor = find_processor<DepthProcessor>(processor_);
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {output->data, output->value, nullptr, output->id};
}
VLOG(2) << "Depth not ready now";
#endif
} else {
// UNKNOW
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
auto &&processor = find_processor<DepthProcessor>(processor_);
auto &&out = processor->GetOutput();
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
return {output->data, output->value, nullptr, output->id};
}
VLOG(2) << "Depth not ready now";
} break;
default:
break;
@@ -412,76 +311,43 @@ void Synthetic::EnableStreamData(const Stream &stream, std::uint32_t depth) {
if (!IsStreamDataEnabled(Stream::LEFT))
break;
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
if (calib_model_ == CalibrationModel::PINHOLE) {
CHECK(ActivateProcessor<RectifyProcessorOCV>());
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
CHECK(ActivateProcessor<RectifyProcessor>());
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
CHECK(ActivateProcessor<RectifyProcessorOCV>());
}
} return;
CHECK(ActivateProcessor<RectifyProcessor>());
}
return;
case Stream::RIGHT_RECTIFIED: {
if (!IsStreamDataEnabled(Stream::RIGHT))
break;
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
if (calib_model_ == CalibrationModel::PINHOLE) {
CHECK(ActivateProcessor<RectifyProcessorOCV>());
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
CHECK(ActivateProcessor<RectifyProcessor>());
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
CHECK(ActivateProcessor<RectifyProcessorOCV>());
}
} return;
CHECK(ActivateProcessor<RectifyProcessor>());
}
return;
case Stream::DISPARITY: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::LEFT_RECTIFIED, depth + 1);
EnableStreamData(Stream::RIGHT_RECTIFIED, depth + 1);
CHECK(ActivateProcessor<DisparityProcessor>());
} return;
}
return;
case Stream::DISPARITY_NORMALIZED: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
EnableStreamData(Stream::DISPARITY, depth + 1);
CHECK(ActivateProcessor<DisparityNormalizedProcessor>());
} return;
}
return;
case Stream::POINTS: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
if (calib_model_ == CalibrationModel::PINHOLE) {
EnableStreamData(Stream::DISPARITY, depth + 1);
CHECK(ActivateProcessor<PointsProcessorOCV>());
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
EnableStreamData(Stream::DEPTH, depth + 1);
CHECK(ActivateProcessor<PointsProcessor>());
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
}
} return;
EnableStreamData(Stream::DISPARITY, depth + 1);
CHECK(ActivateProcessor<PointsProcessor>());
}
return;
case Stream::DEPTH: {
stream_enabled_mode_[stream] = MODE_SYNTHETIC;
if (calib_model_ == CalibrationModel::PINHOLE) {
EnableStreamData(Stream::POINTS, depth + 1);
CHECK(ActivateProcessor<DepthProcessorOCV>());
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
EnableStreamData(Stream::DISPARITY, depth + 1);
CHECK(ActivateProcessor<DepthProcessor>());
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
}
} return;
default: break;
EnableStreamData(Stream::POINTS, depth + 1);
CHECK(ActivateProcessor<DepthProcessor>());
}
return;
default:
break;
}
if (depth == 0) {
LOG(WARNING) << "Enable stream data of " << stream << " failed";
@@ -496,101 +362,46 @@ void Synthetic::DisableStreamData(const Stream &stream, std::uint32_t depth) {
stream_enabled_mode_.erase(stream);
switch (stream) {
case Stream::LEFT_RECTIFIED: {
if (IsStreamEnabledSynthetic(Stream::DISPARITY)) {
DisableStreamData(Stream::DISPARITY, depth + 1);
}
if (IsStreamEnabledSynthetic(Stream::RIGHT_RECTIFIED)) {
DisableStreamData(Stream::RIGHT_RECTIFIED, depth + 1);
}
if (calib_model_ == CalibrationModel::PINHOLE) {
DeactivateProcessor<RectifyProcessorOCV>();
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
DeactivateProcessor<RectifyProcessor>();
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
DeactivateProcessor<RectifyProcessorOCV>();
}
} break;
case Stream::RIGHT_RECTIFIED: {
if (IsStreamEnabledSynthetic(Stream::DISPARITY)) {
DisableStreamData(Stream::DISPARITY, depth + 1);
}
DeactivateProcessor<RectifyProcessor>();
} break;
case Stream::RIGHT_RECTIFIED: {
if (IsStreamEnabledSynthetic(Stream::LEFT_RECTIFIED)) {
DisableStreamData(Stream::LEFT_RECTIFIED, depth + 1);
}
if (calib_model_ == CalibrationModel::PINHOLE) {
DeactivateProcessor<RectifyProcessorOCV>();
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
DeactivateProcessor<RectifyProcessor>();
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
DeactivateProcessor<RectifyProcessorOCV>();
if (IsStreamEnabledSynthetic(Stream::DISPARITY)) {
DisableStreamData(Stream::DISPARITY, depth + 1);
}
DeactivateProcessor<RectifyProcessor>();
} break;
case Stream::DISPARITY: {
if (calib_model_ == CalibrationModel::PINHOLE) {
if (IsStreamEnabledSynthetic(Stream::DISPARITY_NORMALIZED)) {
DisableStreamData(Stream::DISPARITY_NORMALIZED, depth + 1);
}
if (IsStreamEnabledSynthetic(Stream::POINTS)) {
DisableStreamData(Stream::POINTS, depth + 1);
}
DeactivateProcessor<DisparityProcessor>();
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
if (IsStreamEnabledSynthetic(Stream::DISPARITY_NORMALIZED)) {
DisableStreamData(Stream::DISPARITY_NORMALIZED, depth + 1);
}
if (IsStreamEnabledSynthetic(Stream::DEPTH)) {
DisableStreamData(Stream::DEPTH, depth + 1);
}
DeactivateProcessor<DisparityProcessor>();
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
if (IsStreamEnabledSynthetic(Stream::DISPARITY_NORMALIZED)) {
DisableStreamData(Stream::DISPARITY_NORMALIZED, depth + 1);
}
if (IsStreamEnabledSynthetic(Stream::POINTS)) {
DisableStreamData(Stream::POINTS, depth + 1);
}
DeactivateProcessor<DisparityProcessor>();
} break;
case Stream::DISPARITY_NORMALIZED: {
DeactivateProcessor<DisparityNormalizedProcessor>();
} break;
case Stream::POINTS: {
if (calib_model_ == CalibrationModel::PINHOLE) {
if (IsStreamEnabledSynthetic(Stream::DEPTH)) {
DisableStreamData(Stream::DEPTH, depth + 1);
}
DeactivateProcessor<PointsProcessorOCV>();
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
DeactivateProcessor<PointsProcessor>();
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
if (IsStreamEnabledSynthetic(Stream::DEPTH)) {
DisableStreamData(Stream::DEPTH, depth + 1);
}
DeactivateProcessor<PointsProcessor>();
} break;
case Stream::DEPTH: {
if (calib_model_ == CalibrationModel::PINHOLE) {
DeactivateProcessor<DepthProcessorOCV>();
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
if (IsStreamEnabledSynthetic(Stream::POINTS)) {
DisableStreamData(Stream::POINTS, depth + 1);
}
DeactivateProcessor<DepthProcessor>();
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
}
DeactivateProcessor<DepthProcessor>();
} break;
default: return;
default:
return;
}
if (depth > 0) {
LOG(WARNING) << "Disable synthetic stream data of " << stream << " too";
@@ -601,64 +412,16 @@ void Synthetic::DisableStreamData(const Stream &stream, std::uint32_t depth) {
}
void Synthetic::InitProcessors() {
std::shared_ptr<Processor> rectify_processor = nullptr;
#ifdef WITH_CAM_MODELS
std::shared_ptr<RectifyProcessor> rectify_processor_imp = nullptr;
#endif
cv::Mat Q;
if (calib_model_ == CalibrationModel::PINHOLE) {
auto &&rectify_processor_ocv =
std::make_shared<RectifyProcessorOCV>(intr_left_, intr_right_, extr_,
RECTIFY_PROC_PERIOD);
rectify_processor = rectify_processor_ocv;
Q = rectify_processor_ocv->Q;
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
rectify_processor_imp =
std::make_shared<RectifyProcessor>(intr_left_, intr_right_, extr_,
RECTIFY_PROC_PERIOD);
rectify_processor = rectify_processor_imp;
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
auto &&rectify_processor_ocv =
std::make_shared<RectifyProcessorOCV>(intr_left_, intr_right_, extr_,
RECTIFY_PROC_PERIOD);
rectify_processor = rectify_processor_ocv;
}
auto &&rectify_processor =
std::make_shared<RectifyProcessor>(api_->device(), RECTIFY_PROC_PERIOD);
auto &&disparity_processor =
std::make_shared<DisparityProcessor>(DisparityProcessorType::SGBM,
DISPARITY_PROC_PERIOD);
std::make_shared<DisparityProcessor>(DISPARITY_PROC_PERIOD);
auto &&disparitynormalized_processor =
std::make_shared<DisparityNormalizedProcessor>(
DISPARITY_NORM_PROC_PERIOD);
std::shared_ptr<Processor> points_processor = nullptr;
if (calib_model_ == CalibrationModel::PINHOLE) {
points_processor = std::make_shared<PointsProcessorOCV>(
Q, POINTS_PROC_PERIOD);
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
points_processor = std::make_shared<PointsProcessor>(
rectify_processor_imp -> getCalibInfoPair(),
POINTS_PROC_PERIOD);
#endif
} else {
points_processor = std::make_shared<PointsProcessorOCV>(
Q, POINTS_PROC_PERIOD);
}
std::shared_ptr<Processor> depth_processor = nullptr;
if (calib_model_ == CalibrationModel::PINHOLE) {
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
depth_processor = std::make_shared<DepthProcessor>(
rectify_processor_imp -> getCalibInfoPair(),
DEPTH_PROC_PERIOD);
#endif
} else {
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
}
auto &&points_processor = std::make_shared<PointsProcessor>(
rectify_processor->Q, POINTS_PROC_PERIOD);
auto &&depth_processor = std::make_shared<DepthProcessor>(DEPTH_PROC_PERIOD);
using namespace std::placeholders; // NOLINT
rectify_processor->SetProcessCallback(
@@ -683,23 +446,10 @@ void Synthetic::InitProcessors() {
depth_processor->SetPostProcessCallback(
std::bind(&Synthetic::OnDepthPostProcess, this, _1));
if (calib_model_ == CalibrationModel::PINHOLE) {
// PINHOLE
rectify_processor->AddChild(disparity_processor);
disparity_processor->AddChild(disparitynormalized_processor);
disparity_processor->AddChild(points_processor);
points_processor->AddChild(depth_processor);
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
// KANNALA_BRANDT
rectify_processor->AddChild(disparity_processor);
disparity_processor->AddChild(disparitynormalized_processor);
disparity_processor->AddChild(depth_processor);
depth_processor->AddChild(points_processor);
} else {
// UNKNOW
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
}
rectify_processor->AddChild(disparity_processor);
disparity_processor->AddChild(disparitynormalized_processor);
disparity_processor->AddChild(points_processor);
points_processor->AddChild(depth_processor);
processor_ = rectify_processor;
}
@@ -715,18 +465,7 @@ void Synthetic::ProcessNativeStream(
}
if (left_data.img && right_data.img &&
left_data.img->frame_id == right_data.img->frame_id) {
std::shared_ptr<Processor> processor = nullptr;
if (calib_model_ == CalibrationModel::PINHOLE) {
processor = find_processor<RectifyProcessorOCV>(processor_);
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
processor = find_processor<RectifyProcessor>(processor_);
#endif
} else {
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_ << ", use default pinhole model";
processor = find_processor<RectifyProcessorOCV>(processor_);
}
auto &&processor = find_processor<RectifyProcessor>(processor_);
processor->Process(ObjMat2{
left_data.frame, left_data.frame_id, left_data.img,
right_data.frame, right_data.frame_id, right_data.img});
@@ -743,16 +482,8 @@ void Synthetic::ProcessNativeStream(
}
if (left_rect_data.img && right_rect_data.img &&
left_rect_data.img->frame_id == right_rect_data.img->frame_id) {
std::string name = RectifyProcessorOCV::NAME;
if (calib_model_ == CalibrationModel::PINHOLE) {
name = RectifyProcessorOCV::NAME;
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
name = RectifyProcessor::NAME;
#endif
}
process_childs(
processor_, name, ObjMat2{
processor_, RectifyProcessor::NAME, ObjMat2{
left_rect_data.frame, left_rect_data.frame_id, left_rect_data.img,
right_rect_data.frame, right_rect_data.frame_id,
right_rect_data.img});
@@ -770,38 +501,12 @@ void Synthetic::ProcessNativeStream(
ObjMat{data.frame, data.frame_id, data.img});
} break;
case Stream::POINTS: {
if (calib_model_ == CalibrationModel::PINHOLE) {
// PINHOLE
process_childs(processor_, PointsProcessorOCV::NAME,
ObjMat{data.frame, data.frame_id, data.img});
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
// KANNALA_BRANDT
process_childs(processor_, PointsProcessor::NAME,
ObjMat{data.frame, data.frame_id, data.img});
#endif
} else {
// UNKNOW
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
}
process_childs(processor_, PointsProcessor::NAME,
ObjMat{data.frame, data.frame_id, data.img});
} break;
case Stream::DEPTH: {
if (calib_model_ == CalibrationModel::PINHOLE) {
// PINHOLE
process_childs(processor_, DepthProcessorOCV::NAME,
ObjMat{data.frame, data.frame_id, data.img});
#ifdef WITH_CAM_MODELS
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
// KANNALA_BRANDT
process_childs(processor_, DepthProcessor::NAME,
ObjMat{data.frame, data.frame_id, data.img});
#endif
} else {
// UNKNOW
LOG(ERROR) << "Unknow calib model type in device: "
<< calib_model_;
}
process_childs(processor_, DepthProcessor::NAME,
ObjMat{data.frame, data.frame_id, data.img});
} break;
default:
break;

View File

@@ -21,7 +21,6 @@
#include <vector>
#include "mynteye/api/api.h"
#include "mynteye/api/config.h"
MYNTEYE_BEGIN_NAMESPACE
@@ -41,11 +40,9 @@ class Synthetic {
MODE_LAST // Unsupported
} mode_t;
explicit Synthetic(API *api, CalibrationModel calib_model);
explicit Synthetic(API *api);
~Synthetic();
void NotifyImageParamsChanged();
bool Supports(const Stream &stream) const;
mode_t SupportsMode(const Stream &stream) const;
@@ -68,7 +65,6 @@ class Synthetic {
bool HasPlugin() const;
private:
void InitCalibInfo();
void InitStreamSupports();
mode_t GetStreamEnabledMode(const Stream &stream) const;
@@ -114,13 +110,6 @@ class Synthetic {
std::shared_ptr<Processor> processor_;
std::shared_ptr<Plugin> plugin_;
CalibrationModel calib_model_;
std::shared_ptr<IntrinsicsBase> intr_left_;
std::shared_ptr<IntrinsicsBase> intr_right_;
std::shared_ptr<Extrinsics> extr_;
bool calib_default_tag_;
};
template <class T, class P>

View File

@@ -1,289 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/channel/bytes.h"
#include "mynteye/logger.h"
#include "mynteye/util/strings.h"
MYNTEYE_BEGIN_NAMESPACE
namespace bytes {
// from
std::string _from_data(const std::uint8_t *data, std::size_t count) {
std::string s(reinterpret_cast<const char *>(data), count);
strings::trim(s);
return s;
}
// from types
std::size_t from_data(IntrinsicsBase *in, const std::uint8_t *data,
bool get_size) {
switch (in->calib_model()) {
case CalibrationModel::PINHOLE:
return from_data(dynamic_cast<IntrinsicsPinhole *>(in), data,
get_size);
case CalibrationModel::KANNALA_BRANDT:
return from_data(dynamic_cast<IntrinsicsEquidistant *>(in), data,
get_size);
default:
LOG(FATAL) << "Unknown calib model: " << in->calib_model();
}
}
std::size_t from_data(IntrinsicsPinhole *in, const std::uint8_t *data,
bool get_size) {
std::size_t i = 0;
if (get_size) {
// width, 2
in->width = _from_data<std::uint16_t>(data + i);
i += 2;
// height, 2
in->height = _from_data<std::uint16_t>(data + i);
i += 2;
}
// fx, 8
in->fx = _from_data<double>(data + i);
i += 8;
// fy, 8
in->fy = _from_data<double>(data + i);
i += 8;
// cx, 8
in->cx = _from_data<double>(data + i);
i += 8;
// cy, 8
in->cy = _from_data<double>(data + i);
i += 8;
if (get_size) {
// model, 1
in->model = data[i];
i += 1;
}
// coeffs, 40
for (std::size_t j = 0; j < 5; j++) {
in->coeffs[j] = _from_data<double>(data + i + j * 8);
}
i += 40;
return i;
}
std::size_t from_data(IntrinsicsEquidistant *in, const std::uint8_t *data,
bool get_size) {
std::size_t i = 0;
if (get_size) {
// width, 2
in->width = _from_data<std::uint16_t>(data + i);
i += 2;
// height, 2
in->height = _from_data<std::uint16_t>(data + i);
i += 2;
}
// coeffs, 64
for (std::size_t j = 0; j < 8; j++) {
in->coeffs[j] = _from_data<double>(data + i + j * 8);
}
i += 64;
return i;
}
std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data) {
std::size_t i = 0;
// scale
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
in->scale[j][k] = _from_data<double>(data + i + (j * 3 + k) * 8);
}
}
i += 72;
// drift
for (std::size_t j = 0; j < 3; j++) {
in->drift[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
// noise
for (std::size_t j = 0; j < 3; j++) {
in->noise[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
// bias
for (std::size_t j = 0; j < 3; j++) {
in->bias[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
return i;
}
std::size_t from_data(Extrinsics *ex, const std::uint8_t *data) {
std::size_t i = 0;
// rotation
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
ex->rotation[j][k] = _from_data<double>(data + i + (j * 3 + k) * 8);
}
}
i += 72;
// translation
for (std::size_t j = 0; j < 3; j++) {
ex->translation[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
return i;
}
// to
std::size_t _to_data(std::string value, std::uint8_t *data, std::size_t count) {
std::copy(value.begin(), value.end(), data);
for (std::size_t i = value.size(); i < count; i++) {
data[i] = ' ';
}
return count;
}
// to types
std::size_t to_data(const IntrinsicsBase *in, std::uint8_t *data,
bool set_size) {
switch (in->calib_model()) {
case CalibrationModel::PINHOLE:
return to_data(dynamic_cast<const IntrinsicsPinhole *>(in), data,
set_size);
case CalibrationModel::KANNALA_BRANDT:
return to_data(dynamic_cast<const IntrinsicsEquidistant *>(in), data,
set_size);
default:
LOG(FATAL) << "Unknown calib model: " << in->calib_model();
}
}
std::size_t to_data(const IntrinsicsPinhole *in, std::uint8_t *data,
bool set_size) {
std::size_t i = 0;
if (set_size) {
// width, 2
_to_data(in->width, data + i);
i += 2;
// height, 2
_to_data(in->height, data + i);
i += 2;
}
// fx, 8
_to_data(in->fx, data + i);
i += 8;
// fy, 8
_to_data(in->fy, data + i);
i += 8;
// cx, 8
_to_data(in->cx, data + i);
i += 8;
// cy, 8
_to_data(in->cy, data + i);
i += 8;
if (set_size) {
// model, 1
data[i] = in->model;
i += 1;
}
// coeffs, 40
for (std::size_t j = 0; j < 5; j++) {
_to_data(in->coeffs[j], data + i + j * 8);
}
i += 40;
return i;
}
std::size_t to_data(const IntrinsicsEquidistant *in, std::uint8_t *data,
bool set_size) {
std::size_t i = 0;
if (set_size) {
// width, 2
_to_data(in->width, data + i);
i += 2;
// height, 2
_to_data(in->height, data + i);
i += 2;
}
// coeffs, 64
for (std::size_t j = 0; j < 8; j++) {
_to_data(in->coeffs[j], data + i + j * 8);
}
i += 64;
return i;
}
std::size_t to_data(const ImuIntrinsics *in, std::uint8_t *data) {
std::size_t i = 0;
// scale
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
_to_data(in->scale[j][k], data + i + (j * 3 + k) * 8);
}
}
i += 72;
// drift
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->drift[j], data + i + j * 8);
}
i += 24;
// noise
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->noise[j], data + i + j * 8);
}
i += 24;
// bias
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->bias[j], data + i + j * 8);
}
i += 24;
return i;
}
std::size_t to_data(const Extrinsics *ex, std::uint8_t *data) {
std::size_t i = 0;
// rotation
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
_to_data(ex->rotation[j][k], data + i + (j * 3 + k) * 8);
}
}
i += 72;
// translation
for (std::size_t j = 0; j < 3; j++) {
_to_data(ex->translation[j], data + i + j * 8);
}
i += 24;
return i;
}
} // namespace bytes
MYNTEYE_END_NAMESPACE

View File

@@ -1,99 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_CHANNEL_BYTES_H_
#define MYNTEYE_DEVICE_CHANNEL_BYTES_H_
#pragma once
#include <algorithm>
#include <string>
#include "mynteye/mynteye.h"
#include "mynteye/types.h"
#include "mynteye/device/channel/def.h"
#include "mynteye/device/types.h"
MYNTEYE_BEGIN_NAMESPACE
namespace bytes {
// from
template <typename T>
T _from_data(const std::uint8_t *data) {
std::size_t size = sizeof(T) / sizeof(std::uint8_t);
T value = 0;
for (std::size_t i = 0; i < size; i++) {
value |= data[i] << (8 * (size - i - 1));
}
return value;
}
template <>
inline double _from_data(const std::uint8_t *data) {
return *(reinterpret_cast<const double *>(data));
}
std::string _from_data(const std::uint8_t *data, std::size_t count);
// from types
std::size_t from_data(IntrinsicsBase *in, const std::uint8_t *data,
bool get_size);
std::size_t from_data(IntrinsicsPinhole *in, const std::uint8_t *data,
bool get_size);
std::size_t from_data(IntrinsicsEquidistant *in, const std::uint8_t *data,
bool get_size);
std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data);
std::size_t from_data(Extrinsics *ex, const std::uint8_t *data);
// to
template <typename T>
std::size_t _to_data(T value, std::uint8_t *data) {
std::size_t size = sizeof(T) / sizeof(std::uint8_t);
for (std::size_t i = 0; i < size; i++) {
data[i] = static_cast<std::uint8_t>((value >> (8 * (size - i - 1))) & 0xFF);
}
return size;
}
template <>
inline std::size_t _to_data(double value, std::uint8_t *data) {
std::uint8_t *val = reinterpret_cast<std::uint8_t *>(&value);
std::copy(val, val + 8, data);
return 8;
}
std::size_t _to_data(std::string value, std::uint8_t *data, std::size_t count);
// to types
std::size_t to_data(const IntrinsicsBase *in, std::uint8_t *data,
bool set_size);
std::size_t to_data(const IntrinsicsPinhole *in, std::uint8_t *data,
bool set_size);
std::size_t to_data(const IntrinsicsEquidistant *in, std::uint8_t *data,
bool set_size);
std::size_t to_data(const ImuIntrinsics *in, std::uint8_t *data);
std::size_t to_data(const Extrinsics *ex, std::uint8_t *data);
} // namespace bytes
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_CHANNEL_BYTES_H_

View File

@@ -1,111 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_CHANNEL_DEF_H_
#define MYNTEYE_DEVICE_CHANNEL_DEF_H_
#pragma once
#include <array>
#include <cstdint>
#include <vector>
#include "mynteye/mynteye.h"
MYNTEYE_BEGIN_NAMESPACE
typedef enum Channel {
CHANNEL_CAM_CTRL = 1,
CHANNEL_HALF_DUPLEX = 2,
CHANNEL_IMU_WRITE = 3,
CHANNEL_IMU_READ = 4,
CHANNEL_FILE = 5,
CHANNEL_LAST
} channel_t;
typedef enum FileId {
FID_DEVICE_INFO = 1, // device info
FID_IMG_PARAMS = 2, // image intrinsics & extrinsics
FID_IMU_PARAMS = 4, // imu intrinsics & extrinsics
FID_LAST,
} file_id_t;
/**
* @ingroup datatypes
* Imu request packet.
*/
#pragma pack(push, 1)
struct ImuReqPacket {
std::uint8_t header;
std::uint32_t serial_number;
ImuReqPacket() = default;
explicit ImuReqPacket(std::uint32_t serial_number)
: ImuReqPacket(0x5A, serial_number) {}
ImuReqPacket(std::uint8_t header, std::uint32_t serial_number)
: header(header), serial_number(serial_number) {}
std::array<std::uint8_t, 5> to_data() const {
return {{header, static_cast<std::uint8_t>((serial_number >> 24) & 0xFF),
static_cast<std::uint8_t>((serial_number >> 16) & 0xFF),
static_cast<std::uint8_t>((serial_number >> 8) & 0xFF),
static_cast<std::uint8_t>(serial_number & 0xFF)}};
}
};
#pragma pack(pop)
/**
* @ingroup datatypes
* Imu segment.
*/
#pragma pack(push, 1)
struct ImuSegment {
std::uint32_t frame_id;
std::uint64_t timestamp;
std::uint8_t flag;
std::int16_t temperature;
std::int16_t accel[3];
std::int16_t gyro[3];
};
#pragma pack(pop)
/**
* @ingroup datatypes
* Imu packet.
*/
#pragma pack(push, 1)
struct ImuPacket {
std::uint8_t version;
std::uint8_t count;
std::uint32_t serial_number;
std::vector<ImuSegment> segments;
};
#pragma pack(pop)
/**
* @ingroup datatypes
* Imu response packet.
*/
#pragma pack(push, 1)
struct ImuResPacket {
std::uint8_t version;
std::uint8_t header;
std::uint8_t state;
std::uint16_t size;
std::vector<ImuPacket> packets;
std::uint8_t checksum;
};
#pragma pack(pop)
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_CHANNEL_DEF_H_

View File

@@ -1,497 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/channel/file_channel.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
// FileChannel
FileChannel::FileChannel() {
dev_info_parser_ = std::make_shared<DeviceInfoParser>();
img_params_parser_ = std::make_shared<ImgParamsParser>();
imu_params_parser_ = std::make_shared<ImuParamsParser>();
}
FileChannel::~FileChannel() {
}
std::size_t FileChannel::GetDeviceInfoFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
device_info_t *info) {
auto n = dev_info_parser_->GetFromData(data, data_size, info);
auto spec_version = info->spec_version;
img_params_parser_->SetSpecVersion(spec_version);
imu_params_parser_->SetSpecVersion(spec_version);
return n;
}
std::size_t FileChannel::SetDeviceInfoToData(
const device_info_t *info, std::uint8_t *data) {
auto spec_version = info->spec_version;
img_params_parser_->SetSpecVersion(spec_version);
imu_params_parser_->SetSpecVersion(spec_version);
return dev_info_parser_->SetToData(info, data);
}
std::size_t FileChannel::GetImgParamsFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
img_params_t *img_params) {
CHECK_NOTNULL(img_params_parser_);
return img_params_parser_->GetFromData(data, data_size, img_params);
}
std::size_t FileChannel::SetImgParamsToData(
const img_params_t *img_params, std::uint8_t *data) {
CHECK_NOTNULL(img_params_parser_);
return img_params_parser_->SetToData(img_params, data);
}
std::size_t FileChannel::GetImuParamsFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
imu_params_t *imu_params) {
return imu_params_parser_->GetFromData(data, data_size, imu_params);
}
std::size_t FileChannel::SetImuParamsToData(
const imu_params_t *imu_params, std::uint8_t *data) {
return imu_params_parser_->SetToData(imu_params, data);
}
// DeviceInfoParser
DeviceInfoParser::DeviceInfoParser() {
}
DeviceInfoParser::~DeviceInfoParser() {
}
std::size_t DeviceInfoParser::GetFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
device_info_t *info) const {
std::size_t i = 4; // skip vid, pid
// name, 16
info->name = bytes::_from_data(data + i, 16);
i += 16;
// serial_number, 16
info->serial_number = bytes::_from_data(data + i, 16);
i += 16;
// firmware_version, 2
info->firmware_version.set_major(data[i]);
info->firmware_version.set_minor(data[i + 1]);
i += 2;
// hardware_version, 3
info->hardware_version.set_major(data[i]);
info->hardware_version.set_minor(data[i + 1]);
info->hardware_version.set_flag(std::bitset<8>(data[i + 2]));
i += 3;
// spec_version, 2
info->spec_version.set_major(data[i]);
info->spec_version.set_minor(data[i + 1]);
i += 2;
// lens_type, 4
info->lens_type.set_vendor(bytes::_from_data<std::uint16_t>(data + i));
info->lens_type.set_product(bytes::_from_data<std::uint16_t>(data + i + 2));
i += 4;
// imu_type, 4
info->imu_type.set_vendor(bytes::_from_data<std::uint16_t>(data + i));
info->imu_type.set_product(bytes::_from_data<std::uint16_t>(data + i + 2));
i += 4;
// nominal_baseline, 2
info->nominal_baseline = bytes::_from_data<std::uint16_t>(data + i);
i += 2;
// get other infos according to spec_version
MYNTEYE_UNUSED(data_size)
return i;
}
std::size_t DeviceInfoParser::SetToData(
const device_info_t *info, std::uint8_t *data) const {
std::size_t i = 3; // skip id, size
i += 4; // skip vid, pid
// name, 16
bytes::_to_data(info->name, data + i, 16);
i += 16;
// serial_number, 16
bytes::_to_data(info->serial_number, data + i, 16);
i += 16;
// firmware_version, 2
data[i] = info->firmware_version.major();
data[i + 1] = info->firmware_version.minor();
i += 2;
// hardware_version, 3
data[i] = info->hardware_version.major();
data[i + 1] = info->hardware_version.minor();
data[i + 2] =
static_cast<std::uint8_t>(info->hardware_version.flag().to_ulong());
i += 3;
// spec_version, 2
data[i] = info->spec_version.major();
data[i + 1] = info->spec_version.minor();
i += 2;
// lens_type, 4
bytes::_to_data(info->lens_type.vendor(), data + i);
bytes::_to_data(info->lens_type.product(), data + i + 2);
i += 4;
// imu_type, 4
bytes::_to_data(info->imu_type.vendor(), data + i);
bytes::_to_data(info->imu_type.product(), data + i + 2);
i += 4;
// nominal_baseline, 2
bytes::_to_data(info->nominal_baseline, data + i);
i += 2;
// set other infos according to spec_version
// others
std::size_t size = i - 3;
data[0] = FID_DEVICE_INFO;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
// ImgParamsParser
ImgParamsParser::ImgParamsParser() {
}
ImgParamsParser::~ImgParamsParser() {
}
std::size_t ImgParamsParser::GetFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
img_params_t *img_params) const {
// s1030 old params
if (spec_version_ == Version(1, 0) && data_size == 250) {
return GetFromData_v1_0(data, data_size, img_params);
}
// s210a old params
if (spec_version_ == Version(1, 1) && data_size == 404) {
return GetFromData_v1_1(data, data_size, img_params);
}
// get img params with new version format
return GetFromData_new(data, data_size, img_params);
}
std::size_t ImgParamsParser::SetToData(
const img_params_t *img_params, std::uint8_t *data) const {
// always set img params with new version format
return SetToData_new(img_params, data);
}
std::size_t ImgParamsParser::GetFromData_v1_0(
const std::uint8_t *data, const std::uint16_t &data_size,
img_params_t *img_params) const {
std::size_t i = 0;
auto in_left = std::make_shared<IntrinsicsPinhole>();
auto in_right = std::make_shared<IntrinsicsPinhole>();
Extrinsics ex_right_to_left;
i += bytes::from_data(in_left.get(), data + i, true);
i += bytes::from_data(in_right.get(), data + i, true);
i += bytes::from_data(&ex_right_to_left, data + i);
(*img_params)[{752, 480}] = {true, spec_version_.to_string(),
in_left, in_right, ex_right_to_left};
MYNTEYE_UNUSED(data_size)
return i;
}
std::size_t ImgParamsParser::SetToData_v1_0(
const img_params_t *img_params, std::uint8_t *data) const {
std::size_t i = 3; // skip id, size
auto params = (*img_params).at({752, 480});
i += bytes::to_data(params.in_left.get(), data + i, true);
i += bytes::to_data(params.in_right.get(), data + i, true);
i += bytes::to_data(&params.ex_right_to_left, data + i);
// others
std::size_t size = i - 3;
data[0] = FID_IMG_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
std::size_t ImgParamsParser::GetFromData_v1_1(
const std::uint8_t *data, const std::uint16_t &data_size,
img_params_t *img_params) const {
std::size_t i = 0;
Extrinsics ex_right_to_left;
{
auto in_left = std::make_shared<IntrinsicsPinhole>();
auto in_right = std::make_shared<IntrinsicsPinhole>();
i += bytes::from_data(in_left.get(), data + i, true);
i += bytes::from_data(in_right.get(), data + i, true);
(*img_params)[{1280, 400}] = {true, spec_version_.to_string(),
in_left, in_right, ex_right_to_left};
}
{
auto in_left = std::make_shared<IntrinsicsPinhole>();
auto in_right = std::make_shared<IntrinsicsPinhole>();
i += bytes::from_data(in_left.get(), data + i, true);
i += bytes::from_data(in_right.get(), data + i, true);
(*img_params)[{2560, 800}] = {true, spec_version_.to_string(),
in_left, in_right, ex_right_to_left};
}
{
i += bytes::from_data(&ex_right_to_left, data + i);
(*img_params)[{1280, 400}].ex_right_to_left = ex_right_to_left;
(*img_params)[{2560, 800}].ex_right_to_left = ex_right_to_left;
}
MYNTEYE_UNUSED(data_size)
return i;
}
std::size_t ImgParamsParser::SetToData_v1_1(
const img_params_t *img_params, std::uint8_t *data) const {
std::size_t i = 3; // skip id, size
{
auto params = (*img_params).at({1280, 400});
i += bytes::to_data(params.in_left.get(), data + i, true);
i += bytes::to_data(params.in_right.get(), data + i, true);
}
{
auto params = (*img_params).at({2560, 800});
i += bytes::to_data(params.in_left.get(), data + i, true);
i += bytes::to_data(params.in_right.get(), data + i, true);
i += bytes::to_data(&params.ex_right_to_left, data + i);
}
// others
std::size_t size = i - 3;
data[0] = FID_IMG_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
std::size_t ImgParamsParser::GetFromData_new(
const std::uint8_t *data, const std::uint16_t &data_size,
img_params_t *img_params) const {
std::size_t i = 0;
// version, 2
Version version(data[i], data[i + 1]);
i += 2;
// get img params according to version
if (version == Version(1, 2)) { // v1.2
for (; i < data_size;) {
// calib_model, 1
auto model = data[i];
auto calib_model = static_cast<CalibrationModel>(model);
i += 1;
// width, 2
auto width = bytes::_from_data<std::uint16_t>(data + i);
i += 2;
// height, 2
auto height = bytes::_from_data<std::uint16_t>(data + i);
i += 2;
// calib_params
std::shared_ptr<IntrinsicsBase> in_left, in_right;
Extrinsics ex_right_to_left;
switch (calib_model) {
case CalibrationModel::PINHOLE: {
auto in_left_p = std::make_shared<IntrinsicsPinhole>();
in_left_p->model = model;
in_left = in_left_p;
auto in_right_p = std::make_shared<IntrinsicsPinhole>();
in_right_p->model = model;
in_right = in_right_p;
} break;
case CalibrationModel::KANNALA_BRANDT: {
in_left = std::make_shared<IntrinsicsEquidistant>();
in_right = std::make_shared<IntrinsicsEquidistant>();
} break;
default:
LOG(FATAL) << "Could not get img params as unknown calib model"
", please use latest SDK.";
}
i += bytes::from_data(in_left.get(), data + i, false);
i += bytes::from_data(in_right.get(), data + i, false);
i += bytes::from_data(&ex_right_to_left, data + i);
in_left->width = width;
in_left->height = height;
in_right->width = width;
in_right->height = height;
(*img_params)[{width, height}] = {true, version.to_string(),
in_left, in_right, ex_right_to_left};
}
} else {
LOG(FATAL) << "Could not get img params of version "
<< version.to_string() << ", please use latest SDK.";
}
MYNTEYE_UNUSED(data_size)
return i;
}
std::size_t ImgParamsParser::SetToData_new(
const img_params_t *img_params, std::uint8_t *data) const {
if (img_params->empty()) {
return 0;
}
Version version_new(1, 2); // new version
Version version_raw(img_params->begin()->second.version);
std::size_t i = 3; // skip id, size
// version, 2
data[i] = version_new.major();
data[i + 1] = version_new.minor();
i += 2;
// set img params with new version format
if (version_raw <= version_new) {
for (auto &&entry : *img_params) {
auto &&params = entry.second;
// calib_model, 1
data[i] = static_cast<std::uint8_t>(params.in_left->calib_model());
i += 1;
// width, 2
bytes::_to_data(params.in_left->width, data + i);
i += 2;
// height, 2
bytes::_to_data(params.in_left->height, data + i);
i += 2;
// calib_params
i += bytes::to_data(params.in_left.get(), data + i, false);
i += bytes::to_data(params.in_right.get(), data + i, false);
i += bytes::to_data(&params.ex_right_to_left, data + i);
}
} else {
LOG(FATAL) << "Could not set img params of version "
<< version_raw.to_string() << ", please use latest SDK.";
}
// others
std::size_t size = i - 3;
data[0] = FID_IMG_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
// ImuParamsParser
ImuParamsParser::ImuParamsParser() {
}
ImuParamsParser::~ImuParamsParser() {
}
std::size_t ImuParamsParser::GetFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
imu_params_t *imu_params) const {
// s1030 old params
if (spec_version_ == Version(1, 0) && data_size == 384) {
return GetFromData_old(data, data_size, imu_params);
}
// s210a old params
if (spec_version_ == Version(1, 1) && data_size == 384) {
return GetFromData_old(data, data_size, imu_params);
}
// get imu params with new version format
return GetFromData_new(data, data_size, imu_params);
}
std::size_t ImuParamsParser::SetToData(
const imu_params_t *imu_params, std::uint8_t *data) const {
// always set imu params with new version format
return SetToData_new(imu_params, data);
}
std::size_t ImuParamsParser::GetFromData_old(
const std::uint8_t *data, const std::uint16_t &data_size,
imu_params_t *imu_params) const {
std::size_t i = 0;
i += bytes::from_data(&imu_params->in_accel, data + i);
i += bytes::from_data(&imu_params->in_gyro, data + i);
i += bytes::from_data(&imu_params->ex_left_to_imu, data + i);
imu_params->version = spec_version_.to_string();
MYNTEYE_UNUSED(data_size)
return i;
}
std::size_t ImuParamsParser::SetToData_old(
const imu_params_t *imu_params, std::uint8_t *data) const {
std::size_t i = 3; // skip id, size
i += bytes::to_data(&imu_params->in_accel, data + i);
i += bytes::to_data(&imu_params->in_gyro, data + i);
i += bytes::to_data(&imu_params->ex_left_to_imu, data + i);
// others
std::size_t size = i - 3;
data[0] = FID_IMU_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
std::size_t ImuParamsParser::GetFromData_new(
const std::uint8_t *data, const std::uint16_t &data_size,
imu_params_t *imu_params) const {
std::size_t i = 0;
// version, 2
Version version(data[i], data[i + 1]);
imu_params->version = version.to_string();
i += 2;
// get imu params according to version
if (version == Version(1, 2)) { // v1.2
i += bytes::from_data(&imu_params->in_accel, data + i);
i += bytes::from_data(&imu_params->in_gyro, data + i);
i += bytes::from_data(&imu_params->ex_left_to_imu, data + i);
} else {
LOG(FATAL) << "Could not get imu params of version "
<< version.to_string() << ", please use latest SDK.";
}
MYNTEYE_UNUSED(data_size)
return i;
}
std::size_t ImuParamsParser::SetToData_new(
const imu_params_t *imu_params, std::uint8_t *data) const {
std::size_t i = 3; // skip id, size
Version version_new(1, 2); // new version
Version version_raw(imu_params->version);
// version, 2
data[i] = version_new.major();
data[i + 1] = version_new.minor();
i += 2;
// set imu params with new version format
if (version_raw <= version_new) {
i += bytes::to_data(&imu_params->in_accel, data + i);
i += bytes::to_data(&imu_params->in_gyro, data + i);
i += bytes::to_data(&imu_params->ex_left_to_imu, data + i);
} else {
LOG(FATAL) << "Could not set imu params of version "
<< version_raw.to_string() << ", please use latest SDK.";
}
// others
std::size_t size = i - 3;
data[0] = FID_IMU_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
MYNTEYE_END_NAMESPACE

View File

@@ -1,153 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_CHANNEL_FILE_CHANNEL_H_
#define MYNTEYE_DEVICE_CHANNEL_FILE_CHANNEL_H_
#pragma once
#include <map>
#include <memory>
#include "mynteye/mynteye.h"
#include "mynteye/device/device.h"
#include "mynteye/device/channel/bytes.h"
MYNTEYE_BEGIN_NAMESPACE
class DeviceInfoParser;
class ImgParamsParser;
class ImuParamsParser;
class FileChannel {
public:
using device_info_t = DeviceInfo;
using img_params_t = std::map<Resolution, device::img_params_t>;
using imu_params_t = device::imu_params_t;
FileChannel();
~FileChannel();
std::size_t GetDeviceInfoFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
device_info_t *info);
std::size_t SetDeviceInfoToData(
const device_info_t *info, std::uint8_t *data);
std::size_t GetImgParamsFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
img_params_t *img_params);
std::size_t SetImgParamsToData(
const img_params_t *img_params, std::uint8_t *data);
std::size_t GetImuParamsFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
imu_params_t *imu_params);
std::size_t SetImuParamsToData(
const imu_params_t *imu_params, std::uint8_t *data);
private:
std::shared_ptr<DeviceInfoParser> dev_info_parser_;
std::shared_ptr<ImgParamsParser> img_params_parser_;
std::shared_ptr<ImuParamsParser> imu_params_parser_;
};
class DeviceInfoParser {
public:
using device_info_t = FileChannel::device_info_t;
DeviceInfoParser();
~DeviceInfoParser();
std::size_t GetFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
device_info_t *info) const;
std::size_t SetToData(
const device_info_t *info, std::uint8_t *data) const;
};
class ImgParamsParser {
public:
using img_params_t = FileChannel::img_params_t;
ImgParamsParser();
~ImgParamsParser();
void SetSpecVersion(const Version& spec_version) {
spec_version_ = spec_version;
}
std::size_t GetFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
img_params_t *img_params) const;
std::size_t SetToData(
const img_params_t *img_params, std::uint8_t *data) const;
std::size_t GetFromData_v1_0(
const std::uint8_t *data, const std::uint16_t &data_size,
img_params_t *img_params) const;
std::size_t SetToData_v1_0(
const img_params_t *img_params, std::uint8_t *data) const;
std::size_t GetFromData_v1_1(
const std::uint8_t *data, const std::uint16_t &data_size,
img_params_t *img_params) const;
std::size_t SetToData_v1_1(
const img_params_t *img_params, std::uint8_t *data) const;
std::size_t GetFromData_new(
const std::uint8_t *data, const std::uint16_t &data_size,
img_params_t *img_params) const;
std::size_t SetToData_new(
const img_params_t *img_params, std::uint8_t *data) const;
private:
Version spec_version_;
};
class ImuParamsParser {
public:
using imu_params_t = FileChannel::imu_params_t;
ImuParamsParser();
~ImuParamsParser();
void SetSpecVersion(const Version& spec_version) {
spec_version_ = spec_version;
}
std::size_t GetFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
imu_params_t *imu_params) const;
std::size_t SetToData(
const imu_params_t *imu_params, std::uint8_t *data) const;
std::size_t GetFromData_old(
const std::uint8_t *data, const std::uint16_t &data_size,
imu_params_t *imu_params) const;
std::size_t SetToData_old(
const imu_params_t *imu_params, std::uint8_t *data) const;
std::size_t GetFromData_new(
const std::uint8_t *data, const std::uint16_t &data_size,
imu_params_t *imu_params) const;
std::size_t SetToData_new(
const imu_params_t *imu_params, std::uint8_t *data) const;
private:
Version spec_version_;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_CHANNEL_FILE_CHANNEL_H_

View File

@@ -11,7 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/channel/channels.h"
#include "mynteye/device/channels.h"
#include <bitset>
#include <chrono>
@@ -19,25 +19,25 @@
#include <iterator>
#include <sstream>
#include <stdexcept>
#include <string>
#include <vector>
#include "mynteye/device/config.h"
#include "mynteye/logger.h"
#include "mynteye/util/strings.h"
#include "mynteye/util/times.h"
#include "mynteye/logger.h"
#define IMU_TRACK_PERIOD 25 // ms
MYNTEYE_BEGIN_NAMESPACE
namespace {
const uvc::xu mynteye_xu = {3, 2,
{
0x947a6d9f, 0x8a2f, 0x418d,
{0x85, 0x9e, 0x6c, 0x9a, 0xa0, 0x38, 0x10, 0x14}
}
};
const uvc::xu mynteye_xu = {3,
2,
{0x947a6d9f,
0x8a2f,
0x418d,
{0x85, 0x9e, 0x6c, 0x9a, 0xa0, 0x38, 0x10, 0x14}}};
int XuCamCtrlId(Option option) {
switch (option) {
@@ -98,16 +98,34 @@ int XuHalfDuplexId(Option option) {
}
}
void CheckSpecVersion(const Version *spec_version) {
if (spec_version == nullptr) {
LOG(FATAL) << "Spec version must be specified";
}
std::vector<std::string> spec_versions{"1.0", "1.1"};
for (auto &&spec_ver : spec_versions) {
if (*spec_version == Version(spec_ver)) {
return; // supported
}
}
std::ostringstream ss;
std::copy(
spec_versions.begin(), spec_versions.end(),
std::ostream_iterator<std::string>(ss, ","));
LOG(FATAL) << "Spec version " << spec_version->to_string()
<< " not supported, must in [" << ss.str() << "]";
}
} // namespace
Channels::Channels(const std::shared_ptr<uvc::device> &device,
const std::shared_ptr<ChannelsAdapter> &adapter)
: device_(device),
adapter_(adapter),
is_imu_tracking_(false),
imu_track_stop_(false),
imu_sn_(0),
imu_callback_(nullptr) {
Channels::Channels(std::shared_ptr<uvc::device> device)
: device_(device),
is_imu_tracking_(false),
imu_track_stop_(false),
imu_sn_(0),
imu_callback_(nullptr) {
VLOG(2) << __func__;
UpdateControlInfos();
}
@@ -117,14 +135,6 @@ Channels::~Channels() {
StopImuTracking();
}
std::int32_t Channels::GetAccelRangeDefault() {
return adapter_->GetAccelRangeDefault();
}
std::int32_t Channels::GetGyroRangeDefault() {
return adapter_->GetGyroRangeDefault();
}
void Channels::LogControlInfos() const {
for (auto &&it = control_infos_.begin(); it != control_infos_.end(); it++) {
LOG(INFO) << it->first << ": min=" << it->second.min
@@ -133,33 +143,29 @@ void Channels::LogControlInfos() const {
}
}
// TODO(Kalman): Compatible with two generation
void Channels::UpdateControlInfos() {
auto &&supports = adapter_->GetOptionSupports();
for (auto &&option : std::vector<Option>{
Option::GAIN, Option::BRIGHTNESS, Option::CONTRAST}) {
if (supports.find(option) != supports.end())
control_infos_[option] = PuControlInfo(option);
}
for (auto &&option : std::vector<Option>{
Option::FRAME_RATE, Option::IMU_FREQUENCY,
Option::EXPOSURE_MODE, Option::MAX_GAIN,
Option::MAX_EXPOSURE_TIME, Option::MIN_EXPOSURE_TIME,
Option::DESIRED_BRIGHTNESS, Option::IR_CONTROL,
Option::HDR_MODE, Option::ACCELEROMETER_RANGE,
Option::GYROSCOPE_RANGE, Option::ACCELEROMETER_LOW_PASS_FILTER,
Option::GYROSCOPE_LOW_PASS_FILTER}) {
if (supports.find(option) != supports.end())
control_infos_[option] = XuControlInfo(option);
}
if (VLOG_IS_ON(2)) {
for (auto &&it = control_infos_.begin(); it != control_infos_.end(); it++) {
VLOG(2) << it->first << ": min=" << it->second.min
<< ", max=" << it->second.max << ", def=" << it->second.def
<< ", cur=" << GetControlValue(it->first);
for (auto &&option : std::vector<Option>{Option::BRIGHTNESS}) {
control_infos_[option] = PuControlInfo(option);
}
for (auto &&option : std::vector<Option>{
Option::EXPOSURE_MODE, Option::DESIRED_BRIGHTNESS,
Option::MAX_GAIN, Option::MAX_EXPOSURE_TIME,
Option::MIN_EXPOSURE_TIME, Option::ACCELEROMETER_RANGE,
Option::GYROSCOPE_RANGE, Option::ACCELEROMETER_LOW_PASS_FILTER,
Option::GYROSCOPE_LOW_PASS_FILTER}) {
control_infos_[option] = XuControlInfo(option);
}
if (VLOG_IS_ON(2)) {
for (auto &&it = control_infos_.begin(); it != control_infos_.end();
it++) {
VLOG(2) << it->first << ": min=" << it->second.min
<< ", max=" << it->second.max << ", def=" << it->second.def
<< ", cur=" << GetControlValue(it->first);
}
}
}
}
Channels::control_info_t Channels::GetControlInfo(const Option &option) const {
@@ -252,12 +258,12 @@ void Channels::SetControlValue(const Option &option, std::int32_t value) {
XuCamCtrlSet(option, value);
} break;
case Option::ACCELEROMETER_RANGE: {
if (!in_range() || !in_values(adapter_->GetAccelRangeValues()))
if (!in_range() || !in_values({6, 12, 24, 48}))
break;
XuCamCtrlSet(option, value);
} break;
case Option::GYROSCOPE_RANGE: {
if (!in_range() || !in_values(adapter_->GetGyroRangeValues()))
if (!in_range() || !in_values({250, 500, 1000, 2000, 4000}))
break;
XuCamCtrlSet(option, value);
} break;
@@ -354,7 +360,7 @@ void Channels::DoImuTrack() {
return n;
}();
auto &&sn = res_packet.packets.back().serial_number;
auto &&sn = res_packet.packets.back().segments.back().serial_number;
if (imu_sn_ == sn) {
VLOG(2) << "New imu not ready, dropped";
return;
@@ -411,8 +417,203 @@ void Channels::StopImuTracking() {
}
}
namespace {
template <typename T>
T _from_data(const std::uint8_t *data) {
std::size_t size = sizeof(T) / sizeof(std::uint8_t);
T value = 0;
for (std::size_t i = 0; i < size; i++) {
value |= data[i] << (8 * (size - i - 1));
}
return value;
}
template <>
double _from_data(const std::uint8_t *data) {
return *(reinterpret_cast<const double *>(data));
}
std::string _from_data(const std::uint8_t *data, std::size_t count) {
std::string s(reinterpret_cast<const char *>(data), count);
strings::trim(s);
return s;
}
std::size_t from_data(Channels::device_info_t *info, const std::uint8_t *data) {
std::size_t i = 4; // skip vid, pid
// name, 16
info->name = _from_data(data + i, 16);
i += 16;
// serial_number, 16
info->serial_number = _from_data(data + i, 16);
i += 16;
// firmware_version, 2
info->firmware_version.set_major(data[i]);
info->firmware_version.set_minor(data[i + 1]);
i += 2;
// hardware_version, 3
info->hardware_version.set_major(data[i]);
info->hardware_version.set_minor(data[i + 1]);
info->hardware_version.set_flag(std::bitset<8>(data[i + 2]));
i += 3;
// spec_version, 2
info->spec_version.set_major(data[i]);
info->spec_version.set_minor(data[i + 1]);
i += 2;
// lens_type, 4
info->lens_type.set_vendor(_from_data<std::uint16_t>(data + i));
info->lens_type.set_product(_from_data<std::uint16_t>(data + i + 2));
i += 4;
// imu_type, 4
info->imu_type.set_vendor(_from_data<std::uint16_t>(data + i));
info->imu_type.set_product(_from_data<std::uint16_t>(data + i + 2));
i += 4;
// nominal_baseline, 2
info->nominal_baseline = _from_data<std::uint16_t>(data + i);
i += 2;
return i;
}
std::size_t from_data(
Intrinsics *in, const std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// width, 2
in->width = _from_data<std::uint16_t>(data + i);
i += 2;
// height, 2
in->height = _from_data<std::uint16_t>(data + i);
i += 2;
// fx, 8
in->fx = _from_data<double>(data + i);
i += 8;
// fy, 8
in->fy = _from_data<double>(data + i);
i += 8;
// cx, 8
in->cx = _from_data<double>(data + i);
i += 8;
// cy, 8
in->cy = _from_data<double>(data + i);
i += 8;
// model, 1
in->model = data[i];
i += 1;
// coeffs, 40
for (std::size_t j = 0; j < 5; j++) {
in->coeffs[j] = _from_data<double>(data + i + j * 8);
}
i += 40;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t from_data(
ImuIntrinsics *in, const std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// scale
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
in->scale[j][k] = _from_data<double>(data + i + (j * 3 + k) * 8);
}
}
i += 72;
// drift
for (std::size_t j = 0; j < 3; j++) {
in->drift[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
// noise
for (std::size_t j = 0; j < 3; j++) {
in->noise[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
// bias
for (std::size_t j = 0; j < 3; j++) {
in->bias[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t from_data(
Extrinsics *ex, const std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// rotation
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
ex->rotation[j][k] = _from_data<double>(data + i + (j * 3 + k) * 8);
}
}
i += 72;
// translation
for (std::size_t j = 0; j < 3; j++) {
ex->translation[j] = _from_data<double>(data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
// TODO(Kalman): Is there a more elegant way?
std::size_t from_data(
device::img_params_t *img_params, const std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
if (spec_version->major() == 1) {
if (spec_version->minor() == 0) {
i += from_data(
&img_params->in_left_map[Resolution::RES_752x480], data + i,
spec_version);
i += from_data(
&img_params->in_right_map[Resolution::RES_752x480], data + i,
spec_version);
}
if (spec_version->minor() == 1) {
i += from_data(
&img_params->in_left_map[Resolution::RES_1280x400], data + i,
spec_version);
i += from_data(
&img_params->in_right_map[Resolution::RES_1280x400], data + i,
spec_version);
i += from_data(
&img_params->in_left_map[Resolution::RES_2560x800], data + i,
spec_version);
i += from_data(
&img_params->in_right_map[Resolution::RES_2560x800], data + i,
spec_version);
}
}
i += from_data(&img_params->ex_right_to_left, data + i, spec_version);
return i;
}
std::size_t from_data(
device::imu_params_t *imu_params, const std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 0;
i += from_data(&imu_params->in_accel, data + i, spec_version);
i += from_data(&imu_params->in_gyro, data + i, spec_version);
i += from_data(&imu_params->ex_left_to_imu, data + i, spec_version);
return i;
}
} // namespace
bool Channels::GetFiles(
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params) {
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params,
Version *spec_version) const {
if (info == nullptr && img_params == nullptr && imu_params == nullptr) {
LOG(WARNING) << "Files are not provided to get";
return false;
@@ -437,7 +638,7 @@ bool Channels::GetFiles(
if (XuFileQuery(uvc::XU_QUERY_GET, 2000, data)) {
// header = std::bitset<8>(data[0]);
std::uint16_t size = bytes::_from_data<std::uint16_t>(data + 1);
std::uint16_t size = _from_data<std::uint16_t>(data + 1);
std::uint8_t checksum = data[3 + size];
VLOG(2) << "GetFiles data size: " << size << ", checksum: 0x" << std::hex
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum);
@@ -455,35 +656,37 @@ bool Channels::GetFiles(
return false;
}
Version *spec_ver = spec_version;
std::size_t i = 3;
std::size_t end = 3 + size;
while (i < end) {
std::uint8_t file_id = *(data + i);
std::uint16_t file_size = bytes::_from_data<std::uint16_t>(data + i + 1);
std::uint16_t file_size = _from_data<std::uint16_t>(data + i + 1);
VLOG(2) << "GetFiles id: " << static_cast<int>(file_id)
<< ", size: " << file_size;
i += 3;
switch (file_id) {
case FID_DEVICE_INFO: {
auto &&n = file_channel_.GetDeviceInfoFromData(
data + i, file_size, info);
CHECK_EQ(n, file_size)
CHECK_EQ(from_data(info, data + i), file_size)
<< "The firmware not support getting device info, you could "
"upgrade to latest";
spec_ver = &info->spec_version;
CheckSpecVersion(spec_ver);
} break;
case FID_IMG_PARAMS: {
if (file_size > 0) {
auto &&n = file_channel_.GetImgParamsFromData(
data + i, file_size, img_params);
CHECK_EQ(n, file_size);
img_params->ok = file_size > 0;
if (img_params->ok) {
CheckSpecVersion(spec_ver);
from_data(img_params, data + i, spec_ver);
// Considering the upgrade, comment this
// CHECK_EQ(from_data(img_params, data + i, spec_ver), file_size);
}
} break;
case FID_IMU_PARAMS: {
imu_params->ok = file_size > 0;
if (imu_params->ok) {
auto &&n = file_channel_.GetImuParamsFromData(
data + i, file_size, imu_params);
CHECK_EQ(n, file_size);
CheckSpecVersion(spec_ver);
CHECK_EQ(from_data(imu_params, data + i, spec_ver), file_size);
}
} break;
default:
@@ -500,12 +703,236 @@ bool Channels::GetFiles(
}
}
namespace {
template <typename T>
std::size_t _to_data(T value, std::uint8_t *data) {
std::size_t size = sizeof(T) / sizeof(std::uint8_t);
for (std::size_t i = 0; i < size; i++) {
data[i] = static_cast<std::uint8_t>((value >> (8 * (size - i - 1))) & 0xFF);
}
return size;
}
template <>
std::size_t _to_data(double value, std::uint8_t *data) {
std::uint8_t *val = reinterpret_cast<std::uint8_t *>(&value);
std::copy(val, val + 8, data);
return 8;
}
std::size_t _to_data(std::string value, std::uint8_t *data, std::size_t count) {
std::copy(value.begin(), value.end(), data);
for (std::size_t i = value.size(); i < count; i++) {
data[i] = ' ';
}
return count;
}
std::size_t to_data(
const Channels::device_info_t *info, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 3; // skip id, size
i += 4; // skip vid, pid
// name, 16
_to_data(info->name, data + i, 16);
i += 16;
// serial_number, 16
_to_data(info->serial_number, data + i, 16);
i += 16;
// firmware_version, 2
data[i] = info->firmware_version.major();
data[i + 1] = info->firmware_version.minor();
i += 2;
// hardware_version, 3
data[i] = info->hardware_version.major();
data[i + 1] = info->hardware_version.minor();
data[i + 2] =
static_cast<std::uint8_t>(info->hardware_version.flag().to_ulong());
i += 3;
// spec_version, 2
data[i] = info->spec_version.major();
data[i + 1] = info->spec_version.minor();
i += 2;
// lens_type, 4
_to_data(info->lens_type.vendor(), data + i);
_to_data(info->lens_type.product(), data + i + 2);
i += 4;
// imu_type, 4
_to_data(info->imu_type.vendor(), data + i);
_to_data(info->imu_type.product(), data + i + 2);
i += 4;
// nominal_baseline, 2
_to_data(info->nominal_baseline, data + i);
i += 2;
MYNTEYE_UNUSED(spec_version)
// others
std::size_t size = i - 3;
data[0] = Channels::FID_DEVICE_INFO;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
std::size_t to_data(
const Intrinsics *in, std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// width, 2
_to_data(in->width, data + i);
i += 2;
// height, 2
_to_data(in->height, data + i);
i += 2;
// fx, 8
_to_data(in->fx, data + i);
i += 8;
// fy, 8
_to_data(in->fy, data + i);
i += 8;
// cx, 8
_to_data(in->cx, data + i);
i += 8;
// cy, 8
_to_data(in->cy, data + i);
i += 8;
// model, 1
data[i] = in->model;
i += 1;
// coeffs, 40
for (std::size_t j = 0; j < 5; j++) {
_to_data(in->coeffs[j], data + i + j * 8);
}
i += 40;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t to_data(
const ImuIntrinsics *in, std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// scale
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
_to_data(in->scale[j][k], data + i + (j * 3 + k) * 8);
}
}
i += 72;
// drift
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->drift[j], data + i + j * 8);
}
i += 24;
// noise
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->noise[j], data + i + j * 8);
}
i += 24;
// bias
for (std::size_t j = 0; j < 3; j++) {
_to_data(in->bias[j], data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t to_data(
const Extrinsics *ex, std::uint8_t *data, const Version *spec_version) {
std::size_t i = 0;
// rotation
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
_to_data(ex->rotation[j][k], data + i + (j * 3 + k) * 8);
}
}
i += 72;
// translation
for (std::size_t j = 0; j < 3; j++) {
_to_data(ex->translation[j], data + i + j * 8);
}
i += 24;
MYNTEYE_UNUSED(spec_version)
return i;
}
std::size_t to_data(
const device::img_params_t *img_params, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 3; // skip id, size
if (spec_version->major() == 1) {
if (spec_version->minor() == 0) {
i += to_data(
&img_params->in_left_map.at(Resolution::RES_752x480), data + i,
spec_version);
i += to_data(
&img_params->in_right_map.at(Resolution::RES_752x480), data + i,
spec_version);
}
if (spec_version->minor() == 1) {
i += to_data(
&img_params->in_left_map.at(Resolution::RES_1280x400), data + i,
spec_version);
i += to_data(
&img_params->in_right_map.at(Resolution::RES_1280x400), data + i,
spec_version);
i += to_data(
&img_params->in_left_map.at(Resolution::RES_2560x800), data + i,
spec_version);
i += to_data(
&img_params->in_right_map.at(Resolution::RES_2560x800), data + i,
spec_version);
}
}
i += to_data(&img_params->ex_right_to_left, data + i, spec_version);
// others
std::size_t size = i - 3;
data[0] = Channels::FID_IMG_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
std::size_t to_data(
const Channels::imu_params_t *imu_params, std::uint8_t *data,
const Version *spec_version) {
std::size_t i = 3; // skip id, size
i += to_data(&imu_params->in_accel, data + i, spec_version);
i += to_data(&imu_params->in_gyro, data + i, spec_version);
i += to_data(&imu_params->ex_left_to_imu, data + i, spec_version);
// others
std::size_t size = i - 3;
data[0] = Channels::FID_IMU_PARAMS;
data[1] = static_cast<std::uint8_t>((size >> 8) & 0xFF);
data[2] = static_cast<std::uint8_t>(size & 0xFF);
return size + 3;
}
} // namespace
bool Channels::SetFiles(
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params) {
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params,
Version *spec_version) {
if (info == nullptr && img_params == nullptr && imu_params == nullptr) {
LOG(WARNING) << "Files are not provided to set";
return false;
}
Version *spec_ver = spec_version;
if (spec_ver == nullptr && info != nullptr) {
spec_ver = &info->spec_version;
}
CheckSpecVersion(spec_ver);
std::uint8_t data[2000]{};
@@ -514,49 +941,16 @@ bool Channels::SetFiles(
std::uint16_t size = 0;
if (info != nullptr) {
auto n = file_channel_.SetDeviceInfoToData(info, data + 3 + size);
if (n > 0) {
header[0] = true;
size += n;
}
header[0] = true;
size += to_data(info, data + 3 + size, spec_ver);
}
if (img_params != nullptr) {
// remove not supported resolution
auto&& res = adapter_->GetResolutionSupports();
for (auto it = img_params->begin(); it != img_params->end(); ) {
if (res.find(it->first) == res.end()) {
LOG(WARNING) << "Image params of resolution "
<< it->first.width << "x" << it->first.height << " not supported";
it = img_params->erase(it);
} else {
++it;
}
}
if (img_params->empty()) {
std::ostringstream os;
os << "Image params resolution must be ";
for (auto&& r : res) {
os << r.width << "x" << r.height << " ";
}
LOG(WARNING) << os.str();
} else {
auto n = file_channel_.SetImgParamsToData(img_params, data + 3 + size);
if (n > 0) {
header[1] = true;
size += n;
}
}
header[1] = true;
size += to_data(img_params, data + 3 + size, spec_ver);
}
if (imu_params != nullptr) {
auto n = file_channel_.SetImuParamsToData(imu_params, data + 3 + size);
if (n > 0) {
header[2] = true;
size += n;
}
}
if (size + 3 > 2000) {
LOG(FATAL) << "SetFiles failed, data is too large: " << (size + 3);
header[2] = true;
size += to_data(imu_params, data + 3 + size, spec_ver);
}
data[0] = static_cast<std::uint8_t>(header.to_ulong());
@@ -680,7 +1074,7 @@ bool Channels::XuImuRead(ImuResPacket *res) const {
static std::uint8_t data[2000]{};
// std::fill(data, data + 2000, 0); // reset
if (XuControlQuery(CHANNEL_IMU_READ, uvc::XU_QUERY_GET, 2000, data)) {
adapter_->GetImuResPacket(data, res);
res->from_data(data);
if (res->header != 0x5B) {
LOG(WARNING) << "Imu response packet header must be 0x5B, but 0x"
@@ -741,34 +1135,4 @@ Channels::control_info_t Channels::XuControlInfo(Option option) const {
return {min, max, def};
}
// ChannelsAdapter
ChannelsAdapter::ChannelsAdapter(const Model &model)
: model_(model) {
}
ChannelsAdapter::~ChannelsAdapter() {
}
std::set<Option> ChannelsAdapter::GetOptionSupports() {
return option_supports_map.at(model_);
}
std::set<Resolution> ChannelsAdapter::GetResolutionSupports() {
std::set<Resolution> res;
auto requests_map = stream_requests_map.at(model_);
for (auto&& r_map : requests_map) {
if (r_map.first == Capabilities::STEREO) {
for (auto&& r : r_map.second) {
res.insert({r.width, r.height});
}
} else if (r_map.first == Capabilities::STEREO_COLOR) {
for (auto&& r : r_map.second) {
res.insert({static_cast<std::uint16_t>(r.width / 2), r.height});
}
}
}
return res;
}
MYNTEYE_END_NAMESPACE

View File

@@ -11,19 +11,16 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_CHANNEL_CHANNELS_H_
#define MYNTEYE_DEVICE_CHANNEL_CHANNELS_H_
#ifndef MYNTEYE_DEVICE_CHANNELS_H_
#define MYNTEYE_DEVICE_CHANNELS_H_
#pragma once
#include <map>
#include <memory>
#include <set>
#include <thread>
#include <vector>
#include "mynteye/mynteye.h"
#include "mynteye/device/channel/def.h"
#include "mynteye/device/channel/file_channel.h"
#include "mynteye/types.h"
#include "mynteye/device/device.h"
#include "mynteye/device/types.h"
#include "mynteye/uvc/uvc.h"
@@ -37,10 +34,17 @@ struct xu;
} // namespace uvc
class ChannelsAdapter;
class MYNTEYE_API Channels {
public:
typedef enum Channel {
CHANNEL_CAM_CTRL = 1,
CHANNEL_HALF_DUPLEX = 2,
CHANNEL_IMU_WRITE = 3,
CHANNEL_IMU_READ = 4,
CHANNEL_FILE = 5,
CHANNEL_LAST
} channel_t;
typedef struct ControlInfo {
std::int32_t min;
std::int32_t max;
@@ -53,19 +57,37 @@ class MYNTEYE_API Channels {
XU_CMD_LAST
} xu_cmd_t;
typedef enum FileId {
FID_DEVICE_INFO = 1, // device info
FID_IMG_PARAMS = 2, // image intrinsics & extrinsics
FID_IMU_PARAMS = 4, // imu intrinsics & extrinsics
FID_LAST,
} file_id_t;
using imu_callback_t = std::function<void(const ImuPacket &packet)>;
using device_info_t = FileChannel::device_info_t;
using img_params_t = FileChannel::img_params_t;
using imu_params_t = FileChannel::imu_params_t;
using device_info_t = DeviceInfo;
Channels(const std::shared_ptr<uvc::device> &device,
const std::shared_ptr<ChannelsAdapter> &adapter);
using imu_params_t = device::imu_params_t;
using img_params_t = device::img_params_t;
/*
typedef struct ImgParams {
bool ok;
std::map<Resolution, Intrinsics> in_left_map;
std::map<Resolution, Intrinsics> in_right_map;
Extrinsics ex_right_to_left;
} img_params_t;
typedef struct ImuParams {
bool ok;
ImuIntrinsics in_accel;
ImuIntrinsics in_gyro;
Extrinsics ex_left_to_imu;
} imu_params_t;
*/
explicit Channels(std::shared_ptr<uvc::device> device);
~Channels();
std::int32_t GetAccelRangeDefault();
std::int32_t GetGyroRangeDefault();
void LogControlInfos() const;
void UpdateControlInfos();
control_info_t GetControlInfo(const Option &option) const;
@@ -82,9 +104,11 @@ class MYNTEYE_API Channels {
void StopImuTracking();
bool GetFiles(
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params);
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params,
Version *spec_version = nullptr) const;
bool SetFiles(
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params);
device_info_t *info, img_params_t *img_params, imu_params_t *imu_params,
Version *spec_version = nullptr);
private:
bool PuControlRange(
@@ -120,9 +144,6 @@ class MYNTEYE_API Channels {
control_info_t XuControlInfo(Option option) const;
std::shared_ptr<uvc::device> device_;
std::shared_ptr<ChannelsAdapter> adapter_;
FileChannel file_channel_;
std::map<Option, control_info_t> control_infos_;
@@ -134,26 +155,6 @@ class MYNTEYE_API Channels {
imu_callback_t imu_callback_;
};
class ChannelsAdapter {
public:
explicit ChannelsAdapter(const Model &model);
virtual ~ChannelsAdapter();
virtual std::set<Option> GetOptionSupports();
virtual std::set<Resolution> GetResolutionSupports();
virtual std::int32_t GetAccelRangeDefault() = 0;
virtual std::vector<std::int32_t> GetAccelRangeValues() = 0;
virtual std::int32_t GetGyroRangeDefault() = 0;
virtual std::vector<std::int32_t> GetGyroRangeValues() = 0;
virtual void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) = 0;
protected:
Model model_;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_CHANNEL_CHANNELS_H_
#endif // MYNTEYE_DEVICE_CHANNELS_H_

View File

@@ -16,76 +16,39 @@
MYNTEYE_BEGIN_NAMESPACE
const std::map<Model, StreamSupports> stream_supports_map = {
{Model::STANDARD, {Stream::LEFT, Stream::RIGHT}},
{Model::STANDARD2, {Stream::LEFT, Stream::RIGHT}},
{Model::STANDARD210A, {Stream::LEFT, Stream::RIGHT}}
};
{Model::STANDARD, {Stream::LEFT, Stream::RIGHT}}};
const std::map<Model, CapabilitiesSupports> capabilities_supports_map = {
{Model::STANDARD, {Capabilities::STEREO, Capabilities::IMU}},
{Model::STANDARD2, {Capabilities::STEREO_COLOR, Capabilities::IMU}},
{Model::STANDARD210A, {Capabilities::STEREO_COLOR, Capabilities::IMU}}
};
{Model::STANDARD, {Capabilities::STEREO_COLOR, Capabilities::IMU}}};
// TODO(Kalman): Compatible with two generation
const std::map<Model, OptionSupports> option_supports_map = {
{Model::STANDARD, {
Option::GAIN, Option::BRIGHTNESS, Option::CONTRAST,
Option::FRAME_RATE, Option::IMU_FREQUENCY,
Option::EXPOSURE_MODE, Option::MAX_GAIN, Option::MAX_EXPOSURE_TIME,
Option::DESIRED_BRIGHTNESS,
Option::IR_CONTROL,
Option::HDR_MODE,
Option::ACCELEROMETER_RANGE, Option::GYROSCOPE_RANGE,
Option::ZERO_DRIFT_CALIBRATION,
Option::ERASE_CHIP}
},
{Model::STANDARD2, {
Option::BRIGHTNESS,
Option::EXPOSURE_MODE, Option::MAX_GAIN, Option::MAX_EXPOSURE_TIME,
Option::IR_CONTROL, Option::MIN_EXPOSURE_TIME,
Option::DESIRED_BRIGHTNESS, Option::ACCELEROMETER_RANGE,
Option::GYROSCOPE_RANGE, Option::ACCELEROMETER_LOW_PASS_FILTER,
Option::GYROSCOPE_LOW_PASS_FILTER, Option::ERASE_CHIP}
},
{Model::STANDARD210A, {
Option::BRIGHTNESS,
Option::EXPOSURE_MODE, Option::MAX_GAIN, Option::MAX_EXPOSURE_TIME,
Option::MIN_EXPOSURE_TIME, Option::DESIRED_BRIGHTNESS,
Option::ACCELEROMETER_RANGE, Option::GYROSCOPE_RANGE,
Option::ACCELEROMETER_LOW_PASS_FILTER, Option::GYROSCOPE_LOW_PASS_FILTER,
Option::ERASE_CHIP}
}
};
{Model::STANDARD,
{Option::BRIGHTNESS, Option::EXPOSURE_MODE, Option::MAX_GAIN,
Option::MAX_EXPOSURE_TIME, Option::DESIRED_BRIGHTNESS,
Option::MIN_EXPOSURE_TIME, Option::ERASE_CHIP,
Option::ACCELEROMETER_RANGE, Option::GYROSCOPE_RANGE,
Option::ACCELEROMETER_LOW_PASS_FILTER,
Option::GYROSCOPE_LOW_PASS_FILTER}}};
const std::map<Model, std::map<Capabilities, StreamRequests>>
stream_requests_map = {
{Model::STANDARD,
{{Capabilities::STEREO, {
{752, 480, Format::YUYV, 60}}
}}
},
{Model::STANDARD2,
{{Capabilities::STEREO_COLOR, {
{1280, 400, Format::YUYV, 10},
{1280, 400, Format::YUYV, 20},
{1280, 400, Format::YUYV, 30},
{1280, 400, Format::YUYV, 60},
{2560, 800, Format::YUYV, 10},
{2560, 800, Format::YUYV, 20},
{2560, 800, Format::YUYV, 30}}
}}
},
{Model::STANDARD210A,
{{Capabilities::STEREO_COLOR, {
{1280, 400, Format::BGR888, 10},
{1280, 400, Format::BGR888, 20},
{1280, 400, Format::BGR888, 30},
{1280, 400, Format::BGR888, 60},
{2560, 800, Format::BGR888, 10},
{2560, 800, Format::BGR888, 20},
{2560, 800, Format::BGR888, 30}}
}}
}
};
stream_requests_map = {
{Model::STANDARD,
{{Capabilities::STEREO, {{480, 752, Format::YUYV, 25}}},
{Capabilities::STEREO_COLOR,
{// {1280, 400, Format::YUYV, 10},
// {1280, 400, Format::YUYV, 20},
// {1280, 400, Format::YUYV, 30},
// {1280, 400, Format::YUYV, 60},
// {2560, 800, Format::YUYV, 10},
// {2560, 800, Format::YUYV, 20},
// {2560, 800, Format::YUYV, 30},
{1280, 400, Format::BGR888, 10},
{1280, 400, Format::BGR888, 20},
{1280, 400, Format::BGR888, 30},
{1280, 400, Format::BGR888, 60},
{2560, 800, Format::BGR888, 10},
{2560, 800, Format::BGR888, 20},
{2560, 800, Format::BGR888, 30}}}}}};
MYNTEYE_END_NAMESPACE

View File

@@ -27,13 +27,18 @@ MYNTEYE_BEGIN_NAMESPACE
using StreamSupports = std::set<Stream>;
using CapabilitiesSupports = std::set<Capabilities>;
using OptionSupports = std::set<Option>;
using ResolutionSupports = std::set<Resolution>;
using FrameRateSupports = std::set<FrameRate>;
extern const std::map<Model, StreamSupports> stream_supports_map;
extern const std::map<Model, CapabilitiesSupports> capabilities_supports_map;
extern const std::map<Model, OptionSupports> option_supports_map;
extern const std::map<Model, ResolutionSupports> resolution_supports_map;
using StreamRequests = std::vector<StreamRequest>;
extern const std::map<Model, std::map<Resolution, FrameRateSupports>>
framerate_Supports_supports_map;
extern const std::map<Model, std::map<Capabilities, StreamRequests>>
stream_requests_map;

View File

@@ -21,12 +21,10 @@
#include "mynteye/logger.h"
#include "mynteye/device/async_callback.h"
#include "mynteye/device/channel/channels.h"
#include "mynteye/device/channels.h"
#include "mynteye/device/config.h"
#include "mynteye/device/device_s.h"
#include "mynteye/device/motions.h"
#include "mynteye/device/standard/device_s.h"
#include "mynteye/device/standard2/device_s2.h"
#include "mynteye/device/standard2/device_s210a.h"
#include "mynteye/device/streams.h"
#include "mynteye/device/types.h"
#include "mynteye/util/strings.h"
@@ -80,17 +78,14 @@ bool CheckSupports(
} // namespace
Device::Device(const Model &model,
const std::shared_ptr<uvc::device> &device,
const std::shared_ptr<StreamsAdapter> &streams_adapter,
const std::shared_ptr<ChannelsAdapter> &channels_adapter)
: video_streaming_(false),
motion_tracking_(false),
model_(model),
device_(device),
streams_(std::make_shared<Streams>(streams_adapter)),
channels_(std::make_shared<Channels>(device_, channels_adapter)),
motions_(std::make_shared<Motions>(channels_)) {
Device::Device(const Model &model, std::shared_ptr<uvc::device> device)
: video_streaming_(false),
motion_tracking_(false),
model_(model),
device_(device),
streams_(nullptr),
channels_(std::make_shared<Channels>(device)),
motions_(std::make_shared<Motions>(channels_)) {
VLOG(2) << __func__;
ReadAllInfos();
}
@@ -109,18 +104,13 @@ std::shared_ptr<Device> Device::Create(
VLOG(2) << "MYNE EYE Model: " << model_s;
DeviceModel model(model_s);
if (model.type == 'S') {
if (model.generation == '1') {
return std::make_shared<StandardDevice>(device);
} else if (model.generation == '2') {
if (model.custom_code == '0') {
return std::make_shared<Standard2Device>(device);
} else if (model.custom_code == 'A') {
return std::make_shared<Standard210aDevice>(device);
} else {
LOG(FATAL) << "No such custom code now";
}
} else {
LOG(FATAL) << "No such generation now";
switch (model.generation) {
case '1':
return std::make_shared<StandardDevice>(device);
case '2':
return std::make_shared<StandardDevice>(device);
default:
LOG(FATAL) << "No such generation now";
}
} else {
LOG(FATAL) << "MYNT EYE model is not supported now";
@@ -182,33 +172,6 @@ void Device::ConfigStreamRequest(
return;
}
stream_config_requests_[capability] = request;
UpdateStreamIntrinsics(capability, request);
}
const StreamRequest &Device::GetStreamRequest(
const Capabilities &capability) const {
try {
return stream_config_requests_.at(capability);
} catch (const std::out_of_range &e) {
auto &&requests = GetStreamRequests(capability);
if (requests.size() >= 1) {
return requests[0];
} else {
LOG(FATAL) << "Please config the stream request of " << capability;
}
}
}
const std::vector<StreamRequest> &Device::GetStreamRequests() const {
return GetStreamRequests(GetKeyStreamCapability());
}
void Device::ConfigStreamRequest(const StreamRequest &request) {
ConfigStreamRequest(GetKeyStreamCapability(), request);
}
const StreamRequest &Device::GetStreamRequest() const {
return GetStreamRequest(GetKeyStreamCapability());
}
std::shared_ptr<DeviceInfo> Device::GetInfo() const {
@@ -240,8 +203,7 @@ std::string Device::GetInfo(const Info &info) const {
}
}
std::shared_ptr<IntrinsicsBase> Device::GetIntrinsics(
const Stream &stream) const {
Intrinsics Device::GetIntrinsics(const Stream &stream) const {
bool ok;
return GetIntrinsics(stream, &ok);
}
@@ -261,8 +223,7 @@ Extrinsics Device::GetMotionExtrinsics(const Stream &from) const {
return GetMotionExtrinsics(from, &ok);
}
std::shared_ptr<IntrinsicsBase> Device::GetIntrinsics(
const Stream &stream, bool *ok) const {
Intrinsics Device::GetIntrinsics(const Stream &stream, bool *ok) const {
try {
*ok = true;
return stream_intrinsics_.at(stream);
@@ -313,8 +274,7 @@ Extrinsics Device::GetMotionExtrinsics(const Stream &from, bool *ok) const {
}
}
void Device::SetIntrinsics(const Stream &stream,
const std::shared_ptr<IntrinsicsBase> &in) {
void Device::SetIntrinsics(const Stream &stream, const Intrinsics &in) {
stream_intrinsics_[stream] = in;
}
@@ -446,18 +406,6 @@ void Device::WaitForStreams() {
streams_->WaitForStreams();
}
device::StreamData Device::GetStreamData(const Stream &stream) {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
CheckSupports(this, stream);
std::lock_guard<std::mutex> _(mtx_streams_);
return streams_->GetLatestStreamData(stream);
}
device::StreamData Device::GetLatestStreamData(const Stream &stream) {
return GetStreamData(stream);
}
std::vector<device::StreamData> Device::GetStreamDatas(const Stream &stream) {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
@@ -466,6 +414,14 @@ std::vector<device::StreamData> Device::GetStreamDatas(const Stream &stream) {
return streams_->GetStreamDatas(stream);
}
device::StreamData Device::GetLatestStreamData(const Stream &stream) {
CHECK(video_streaming_);
CHECK_NOTNULL(streams_);
CheckSupports(this, stream);
std::lock_guard<std::mutex> _(mtx_streams_);
return streams_->GetLatestStreamData(stream);
}
void Device::EnableMotionDatas() {
EnableMotionDatas(std::numeric_limits<std::size_t>::max());
}
@@ -481,58 +437,84 @@ std::vector<device::MotionData> Device::GetMotionDatas() {
return motions_->GetMotionDatas();
}
void Device::InitResolution(const Resolution &res) {
res_ = res;
ConfigIntrinsics(res_);
}
void Device::SetStreamRequest(const Format &format, const FrameRate &rate) {
StreamRequest request(res_, format, rate);
request_ = request;
}
const StreamRequest &Device::GetStreamRequest(const Capabilities &capability) {
try {
return stream_config_requests_.at(capability);
} catch (const std::out_of_range &e) {
auto &&requests = GetStreamRequests(capability);
if (requests.size() >= 1) {
for (auto &&request : requests) {
if (request == request_)
return request;
}
return requests[0];
} else {
LOG(FATAL) << "Please config the stream request of " << capability;
}
}
}
void Device::StartVideoStreaming() {
if (video_streaming_) {
LOG(WARNING) << "Cannot start video streaming without first stopping it";
return;
}
streams_ = std::make_shared<Streams>(GetKeyStreams());
// if stream capabilities are supported with subdevices of device_
/*
Capabilities stream_capabilities[] = {
Capabilities::STEREO, Capabilities::STEREO_COLOR,
Capabilities::COLOR, Capabilities::DEPTH,
Capabilities::STEREO, Capabilities::COLOR,
Capabilities::STEREO_COLOR, Capabilities::DEPTH,
Capabilities::POINTS, Capabilities::FISHEYE,
Capabilities::INFRARED, Capabilities::INFRARED2};
for (auto &&capability : stream_capabilities) {
}
*/
auto &&stream_cap = GetKeyStreamCapability();
if (Supports(stream_cap)) {
// do stream request selection if more than one request of each stream
auto &&stream_request = GetStreamRequest(stream_cap);
streams_->ConfigStream(stream_cap, stream_request);
if (Supports(capability)) {
// do stream request selection if more than one request of each stream
auto &&stream_request = GetStreamRequest(capability);
uvc::set_device_mode(
*device_, stream_request.width, stream_request.height,
static_cast<int>(stream_request.format), stream_request.fps,
[this, stream_cap](
const void *data, std::function<void()> continuation) {
// drop the first stereo stream data
static std::uint8_t drop_count = 1;
if (drop_count > 0) {
--drop_count;
continuation();
return;
}
// auto &&time_beg = times::now();
{
std::lock_guard<std::mutex> _(mtx_streams_);
if (streams_->PushStream(stream_cap, data)) {
CallbackPushedStreamData(Stream::LEFT);
CallbackPushedStreamData(Stream::RIGHT);
streams_->ConfigStream(capability, stream_request);
uvc::set_device_mode(
*device_, stream_request.width, stream_request.height,
static_cast<int>(stream_request.format), stream_request.fps,
[this, capability](
const void *data, std::function<void()> continuation) {
// drop the first stereo stream data
static std::uint8_t drop_count = 1;
if (drop_count > 0) {
--drop_count;
continuation();
return;
}
}
continuation();
OnStereoStreamUpdate();
// VLOG(2) << "Stereo video callback cost "
// << times::count<times::milliseconds>(times::now() - time_beg)
// << " ms";
});
} else {
LOG(FATAL) << "Not any stream capabilities are supported by this device";
// auto &&time_beg = times::now();
{
std::lock_guard<std::mutex> _(mtx_streams_);
if (streams_->PushStream(capability, data)) {
CallbackPushedStreamData(Stream::LEFT);
CallbackPushedStreamData(Stream::RIGHT);
}
}
continuation();
OnStereoStreamUpdate();
// VLOG(2) << "Stereo video callback cost "
// << times::count<times::milliseconds>(times::now() - time_beg)
// << " ms";
});
} else {
// LOG(FATAL) << "Not any stream capabilities are supported by this
// device";
}
}
uvc::start_streaming(*device_, 0);
video_streaming_ = true;
}
@@ -575,9 +557,8 @@ void Device::ReadAllInfos() {
device_info_ = std::make_shared<DeviceInfo>();
CHECK_NOTNULL(channels_);
all_img_params_.clear();
Device::imu_params_t imu_params;
if (!channels_->GetFiles(device_info_.get(), &all_img_params_, &imu_params)) {
if (!channels_->GetFiles(device_info_.get(), &img_params_, &imu_params)) {
#if defined(WITH_DEVICE_INFO_REQUIRED)
LOG(FATAL)
#else
@@ -598,66 +579,30 @@ void Device::ReadAllInfos() {
<< ", nominal_baseline: " << device_info_->nominal_baseline << "}";
device_info_->name = uvc::get_name(*device_);
bool img_params_ok = false;
for (auto &&params : all_img_params_) {
auto &&img_params = params.second;
if (img_params.ok) {
img_params_ok = true;
SetIntrinsics(Stream::LEFT, img_params.in_left);
SetIntrinsics(Stream::RIGHT, img_params.in_right);
SetExtrinsics(Stream::LEFT, Stream::RIGHT, img_params.ex_right_to_left);
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
VLOG(2) << "Extrinsics left to right: {"
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
break;
}
}
if (!img_params_ok) {
if (img_params_.ok) {
SetExtrinsics(Stream::LEFT, Stream::RIGHT, img_params_.ex_right_to_left);
VLOG(2) << "Extrinsics left to right: {"
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
} else {
LOG(WARNING) << "Intrinsics & extrinsics not exist";
}
if (imu_params.ok) {
imu_params_ = imu_params;
SetMotionIntrinsics({imu_params.in_accel, imu_params.in_gyro});
SetMotionExtrinsics(Stream::LEFT, imu_params.ex_left_to_imu);
VLOG(2) << "Motion intrinsics: {" << GetMotionIntrinsics() << "}";
VLOG(2) << "Motion extrinsics left to imu: {"
<< GetMotionExtrinsics(Stream::LEFT) << "}";
} else {
imu_params_.ok = false;
VLOG(2) << "Motion intrinsics & extrinsics not exist";
}
}
void Device::UpdateStreamIntrinsics(
const Capabilities &capability, const StreamRequest &request) {
if (capability != GetKeyStreamCapability()) {
return;
}
for (auto &&params : all_img_params_) {
auto &&img_res = params.first;
auto &&img_params = params.second;
bool ok = false;
if (capability == Capabilities::STEREO_COLOR) {
ok = img_params.ok &&
img_res.height == request.GetResolution().height &&
img_res.width == request.GetResolution().width / 2;
} else if (capability == Capabilities::STEREO) {
ok = img_params.ok && img_res == request.GetResolution();
}
if (ok) {
SetIntrinsics(Stream::LEFT, img_params.in_left);
SetIntrinsics(Stream::RIGHT, img_params.in_right);
SetExtrinsics(Stream::LEFT, Stream::RIGHT, img_params.ex_right_to_left);
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
VLOG(2) << "Extrinsics left to right: {"
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
break;
}
void Device::ConfigIntrinsics(const Resolution &res) {
if (img_params_.ok) {
SetIntrinsics(Stream::LEFT, img_params_.in_left_map[res]);
SetIntrinsics(Stream::RIGHT, img_params_.in_right_map[res]);
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
}
}
@@ -684,14 +629,8 @@ void Device::CallbackMotionData(const device::MotionData &data) {
}
}
bool Device::GetFiles(
DeviceInfo *info, img_params_map_t *img_params, imu_params_t *imu_params) {
return channels_->GetFiles(info, img_params, imu_params);
}
bool Device::SetFiles(
DeviceInfo *info, img_params_map_t *img_params, imu_params_t *imu_params) {
return channels_->SetFiles(info, img_params, imu_params);
Device::img_params_t Device::GetImgParams() {
return img_params_;
}
MYNTEYE_END_NAMESPACE

View File

@@ -11,19 +11,15 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard/device_s.h"
#include "mynteye/device/device_s.h"
#include "mynteye/logger.h"
#include "mynteye/device/motions.h"
#include "mynteye/device/standard/channels_adapter_s.h"
#include "mynteye/device/standard/streams_adapter_s.h"
MYNTEYE_BEGIN_NAMESPACE
StandardDevice::StandardDevice(std::shared_ptr<uvc::device> device)
: Device(Model::STANDARD, device,
std::make_shared<StandardStreamsAdapter>(),
std::make_shared<StandardChannelsAdapter>()) {
: Device(Model::STANDARD, device) {
VLOG(2) << __func__;
}
@@ -31,8 +27,8 @@ StandardDevice::~StandardDevice() {
VLOG(2) << __func__;
}
Capabilities StandardDevice::GetKeyStreamCapability() const {
return Capabilities::STEREO;
std::vector<Stream> StandardDevice::GetKeyStreams() const {
return {Stream::LEFT, Stream::RIGHT};
}
void StandardDevice::OnStereoStreamUpdate() {

View File

@@ -11,8 +11,8 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD_DEVICE_S_H_
#define MYNTEYE_DEVICE_STANDARD_DEVICE_S_H_
#ifndef MYNTEYE_DEVICE_DEVICE_S_H_
#define MYNTEYE_DEVICE_DEVICE_S_H_
#pragma once
#include <memory>
@@ -27,11 +27,11 @@ class StandardDevice : public Device {
explicit StandardDevice(std::shared_ptr<uvc::device> device);
virtual ~StandardDevice();
Capabilities GetKeyStreamCapability() const override;
std::vector<Stream> GetKeyStreams() const override;
void OnStereoStreamUpdate() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD_DEVICE_S_H_
#endif // MYNTEYE_DEVICE_DEVICE_S_H_

View File

@@ -14,7 +14,7 @@
#include "mynteye/device/motions.h"
#include "mynteye/logger.h"
#include "mynteye/device/channel/channels.h"
#include "mynteye/device/channels.h"
MYNTEYE_BEGIN_NAMESPACE
@@ -36,11 +36,11 @@ void Motions::SetMotionCallback(motion_callback_t callback) {
if (motion_callback_) {
accel_range = channels_->GetControlValue(Option::ACCELEROMETER_RANGE);
if (accel_range == -1)
accel_range = channels_->GetAccelRangeDefault();
accel_range = 12;
gyro_range = channels_->GetControlValue(Option::GYROSCOPE_RANGE);
if (gyro_range == -1)
gyro_range = channels_->GetGyroRangeDefault();
gyro_range = 1000;
channels_->SetImuCallback([this](const ImuPacket &packet) {
if (!motion_callback_ && !motion_datas_enabled_) {
@@ -53,16 +53,28 @@ void Motions::SetMotionCallback(motion_callback_t callback) {
// static_cast<uint32_t>(-seg.offset) > packet.timestamp) {
// LOG(WARNING) << "Imu timestamp offset is incorrect";
// }
imu->frame_id = seg.frame_id;
imu->serial_number = seg.serial_number;
imu->timestamp = seg.timestamp;
imu->flag = seg.flag;
imu->temperature = seg.temperature / 326.8f + 25;
imu->accel[0] = seg.accel[0] * 1.f * accel_range / 0x10000;
imu->accel[1] = seg.accel[1] * 1.f * accel_range / 0x10000;
imu->accel[2] = seg.accel[2] * 1.f * accel_range / 0x10000;
imu->gyro[0] = seg.gyro[0] * 1.f * gyro_range / 0x10000;
imu->gyro[1] = seg.gyro[1] * 1.f * gyro_range / 0x10000;
imu->gyro[2] = seg.gyro[2] * 1.f * gyro_range / 0x10000;
if (imu->flag == 1) {
imu->accel[0] = seg.accel_or_gyro[0] * 1.f * accel_range / 0x10000;
imu->accel[1] = seg.accel_or_gyro[1] * 1.f * accel_range / 0x10000;
imu->accel[2] = seg.accel_or_gyro[2] * 1.f * accel_range / 0x10000;
imu->gyro[0] = 0;
imu->gyro[1] = 0;
imu->gyro[2] = 0;
} else if (imu->flag == 2) {
imu->accel[0] = 0;
imu->accel[1] = 0;
imu->accel[2] = 0;
imu->gyro[0] = seg.accel_or_gyro[0] * 1.f * gyro_range / 0x10000;
imu->gyro[1] = seg.accel_or_gyro[1] * 1.f * gyro_range / 0x10000;
imu->gyro[2] = seg.accel_or_gyro[2] * 1.f * gyro_range / 0x10000;
} else {
imu->Reset();
}
std::lock_guard<std::mutex> _(mtx_datas_);
motion_data_t data = {imu};

View File

@@ -58,8 +58,8 @@ class Motions {
std::mutex mtx_datas_;
int accel_range;
int gyro_range;
int accel_range = 12;
int gyro_range = 1000;
};
MYNTEYE_END_NAMESPACE

View File

@@ -1,125 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard/channels_adapter_s.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
#pragma pack(push, 1)
struct ImuData {
std::int16_t offset;
std::uint16_t frame_id;
std::int16_t accel[3];
std::int16_t temperature;
std::int16_t gyro[3];
ImuData() = default;
explicit ImuData(const std::uint8_t *data) {
from_data(data);
}
void from_data(const std::uint8_t *data) {
offset = (*(data) << 8) | *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
accel[0] = (*(data + 4) << 8) | *(data + 5);
accel[1] = (*(data + 6) << 8) | *(data + 7);
accel[2] = (*(data + 8) << 8) | *(data + 9);
temperature = (*(data + 10) << 8) | *(data + 11);
gyro[0] = (*(data + 12) << 8) | *(data + 13);
gyro[1] = (*(data + 14) << 8) | *(data + 15);
gyro[2] = (*(data + 16) << 8) | *(data + 17);
}
};
#pragma pack(pop)
void unpack_imu_segment(const ImuData &imu, const std::uint32_t &timestamp,
ImuSegment *seg) {
seg->frame_id = static_cast<uint32_t>(imu.frame_id);
seg->timestamp = static_cast<uint64_t>(timestamp + imu.offset) * 10;
seg->flag = 0;
seg->temperature = imu.temperature;
seg->accel[0] = imu.accel[0];
seg->accel[1] = imu.accel[1];
seg->accel[2] = imu.accel[2];
seg->gyro[0] = imu.gyro[0];
seg->gyro[1] = imu.gyro[1];
seg->gyro[2] = imu.gyro[2];
}
void unpack_imu_packet(const std::uint8_t *data, ImuPacket *pkg) {
pkg->serial_number =
(*(data) << 24) | (*(data + 1) << 16) |
(*(data + 2) << 8) | *(data + 3);
std::uint32_t timestamp =
(*(data + 4) << 24) | (*(data + 5) << 16)|
(*(data + 6) << 8) | *(data + 7);
pkg->count = *(data + 8);
std::size_t data_n = sizeof(ImuData); // 18
for (std::size_t i = 0; i < pkg->count; i++) {
ImuSegment seg;
unpack_imu_segment(ImuData(data + 9 + (data_n * i)), timestamp, &seg);
pkg->segments.push_back(seg);
}
}
void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
res->header = *data;
res->state = *(data + 1);
res->size = (*(data + 2) << 8) | *(data + 3);
std::size_t data_n = sizeof(ImuData); // 18
for (std::size_t i = 4; i < res->size;) {
ImuPacket packet;
unpack_imu_packet(data + i, &packet);
res->packets.push_back(packet);
i += 9 + (packet.count * data_n);
}
res->checksum = *(data + 4 + res->size);
}
} // namespace
StandardChannelsAdapter::StandardChannelsAdapter()
: ChannelsAdapter(Model::STANDARD) {
}
StandardChannelsAdapter::~StandardChannelsAdapter() {
}
std::int32_t StandardChannelsAdapter::GetAccelRangeDefault() {
return 8;
}
std::vector<std::int32_t> StandardChannelsAdapter::GetAccelRangeValues() {
return {4, 8, 16, 32};
}
std::int32_t StandardChannelsAdapter::GetGyroRangeDefault() {
return 1000;
}
std::vector<std::int32_t> StandardChannelsAdapter::GetGyroRangeValues() {
return {500, 1000, 2000, 4000};
}
void StandardChannelsAdapter::GetImuResPacket(
const std::uint8_t *data, ImuResPacket *res) {
unpack_imu_res_packet(data, res);
}
MYNTEYE_END_NAMESPACE

View File

@@ -1,42 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD_CHANNELS_ADAPTER_S_H_
#define MYNTEYE_DEVICE_STANDARD_CHANNELS_ADAPTER_S_H_
#pragma once
#include <cstdint>
#include <set>
#include <vector>
#include "mynteye/device/channel/channels.h"
MYNTEYE_BEGIN_NAMESPACE
class StandardChannelsAdapter : public ChannelsAdapter {
public:
StandardChannelsAdapter();
virtual ~StandardChannelsAdapter();
std::int32_t GetAccelRangeDefault() override;
std::vector<std::int32_t> GetAccelRangeValues() override;
std::int32_t GetGyroRangeDefault() override;
std::vector<std::int32_t> GetGyroRangeValues() override;
void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD_CHANNELS_ADAPTER_S_H_

View File

@@ -1,163 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard/streams_adapter_s.h"
#include <iomanip>
#include "mynteye/logger.h"
#include "mynteye/device/types.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
// image info
#pragma pack(push, 1)
struct ImagePacket {
std::uint8_t header;
std::uint8_t size;
std::uint16_t frame_id;
std::uint32_t timestamp;
std::uint16_t exposure_time;
std::uint8_t checksum;
ImagePacket() = default;
explicit ImagePacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
header = *data;
size = *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
timestamp = (*(data + 4) << 24) | (*(data + 5) << 16) | (*(data + 6) << 8) |
*(data + 7);
exposure_time = (*(data + 8) << 8) | *(data + 9);
checksum = *(data + 10);
}
};
#pragma pack(pop)
bool unpack_stereo_img_data(
const void *data, const StreamRequest &request, ImgData *img) {
CHECK_NOTNULL(img);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t data_n =
request.width * request.height * bytes_per_pixel(request.format);
auto data_end = data_new + data_n;
std::size_t packet_n = sizeof(ImagePacket);
std::vector<std::uint8_t> packet(packet_n);
std::reverse_copy(data_end - packet_n, data_end, packet.begin());
ImagePacket img_packet(packet.data());
// LOG(INFO) << "ImagePacket: header=0x" << std::hex <<
// static_cast<int>(img_packet.header)
// << ", size=0x" << std::hex << static_cast<int>(img_packet.size)
// << ", frame_id="<< std::dec << img_packet.frame_id
// << ", timestamp="<< std::dec << img_packet.timestamp
// << ", exposure_time="<< std::dec << img_packet.exposure_time
// << ", checksum=0x" << std::hex << static_cast<int>(img_packet.checksum);
if (img_packet.header != 0x3B) {
VLOG(2) << "Image packet header must be 0x3B, but 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.header) << " now";
return false;
}
std::uint8_t checksum = 0;
for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9]
checksum = (checksum ^ packet[i]);
}
if (img_packet.checksum != checksum) {
VLOG(2) << "Image packet checksum should be 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.checksum) << ", but 0x"
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum)
<< " now";
return false;
}
img->frame_id = img_packet.frame_id;
// make timestamp unit from 10us to 1us
img->timestamp = static_cast<uint64_t>(img_packet.timestamp) * 10;
img->exposure_time = img_packet.exposure_time;
return true;
}
// image pixels
bool unpack_left_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame->format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame->width() * frame->height();
for (std::size_t i = 0; i < n; i++) {
frame->data()[i] = *(data_new + (i * 2));
}
return true;
}
bool unpack_right_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::YUYV);
CHECK_EQ(frame->format(), Format::GREY);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = frame->width() * frame->height();
for (std::size_t i = 0; i < n; i++) {
frame->data()[i] = *(data_new + (i * 2 + 1));
}
return true;
}
} // namespace
StandardStreamsAdapter::StandardStreamsAdapter() {
}
StandardStreamsAdapter::~StandardStreamsAdapter() {
}
std::vector<Stream> StandardStreamsAdapter::GetKeyStreams() {
return {Stream::LEFT, Stream::RIGHT};
}
std::vector<Capabilities> StandardStreamsAdapter::GetStreamCapabilities() {
return {Capabilities::STEREO};
}
std::map<Stream, Streams::unpack_img_data_t>
StandardStreamsAdapter::GetUnpackImgDataMap() {
return {
{Stream::LEFT, unpack_stereo_img_data},
{Stream::RIGHT, unpack_stereo_img_data}
};
}
std::map<Stream, Streams::unpack_img_pixels_t>
StandardStreamsAdapter::GetUnpackImgPixelsMap() {
return {
{Stream::LEFT, unpack_left_img_pixels},
{Stream::RIGHT, unpack_right_img_pixels}
};
}
MYNTEYE_END_NAMESPACE

View File

@@ -1,42 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD_STREAMS_ADAPTER_S_H_
#define MYNTEYE_DEVICE_STANDARD_STREAMS_ADAPTER_S_H_
#pragma once
#include <map>
#include <memory>
#include <vector>
#include "mynteye/device/streams.h"
MYNTEYE_BEGIN_NAMESPACE
class StandardStreamsAdapter : public StreamsAdapter {
public:
StandardStreamsAdapter();
virtual ~StandardStreamsAdapter();
std::vector<Stream> GetKeyStreams() override;
std::vector<Capabilities> GetStreamCapabilities() override;
std::map<Stream, Streams::unpack_img_data_t>
GetUnpackImgDataMap() override;
std::map<Stream, Streams::unpack_img_pixels_t>
GetUnpackImgPixelsMap() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD_STREAMS_ADAPTER_S_H_

View File

@@ -1,121 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard2/channels_adapter_s2.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
#pragma pack(push, 1)
struct ImuData {
std::uint32_t frame_id;
std::uint64_t timestamp;
std::uint8_t flag;
std::int16_t temperature;
std::int16_t accel_or_gyro[3];
ImuData() = default;
explicit ImuData(const std::uint8_t *data) {
from_data(data);
}
void from_data(const std::uint8_t *data) {
std::uint32_t timestamp_l;
std::uint32_t timestamp_h;
frame_id = (*(data) << 24) | (*(data + 1) << 16) | (*(data + 2) << 8) |
*(data + 3);
timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) |
(*(data + 6) << 8) | *(data + 7);
timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) |
(*(data + 10) << 8) | *(data + 11);
timestamp = (static_cast<std::uint64_t>(timestamp_h) << 32) | timestamp_l;
flag = *(data + 12);
temperature = (*(data + 13) << 8) | *(data + 14);
accel_or_gyro[0] = (*(data + 15) << 8) | *(data + 16);
accel_or_gyro[1] = (*(data + 17) << 8) | *(data + 18);
accel_or_gyro[2] = (*(data + 19) << 8) | *(data + 20);
}
};
#pragma pack(pop)
void unpack_imu_segment(const ImuData &imu, ImuSegment *seg) {
seg->frame_id = imu.frame_id;
seg->timestamp = imu.timestamp;
seg->flag = imu.flag;
seg->temperature = imu.temperature;
seg->accel[0] = (seg->flag == 1) ? imu.accel_or_gyro[0] : 0;
seg->accel[1] = (seg->flag == 1) ? imu.accel_or_gyro[1] : 0;
seg->accel[2] = (seg->flag == 1) ? imu.accel_or_gyro[2] : 0;
seg->gyro[0] = (seg->flag == 2) ? imu.accel_or_gyro[0] : 0;
seg->gyro[1] = (seg->flag == 2) ? imu.accel_or_gyro[1] : 0;
seg->gyro[2] = (seg->flag == 2) ? imu.accel_or_gyro[2] : 0;
}
void unpack_imu_packet(const std::uint8_t *data, ImuPacket *pkg) {
std::size_t data_n = sizeof(ImuData); // 21
for (std::size_t i = 0; i < pkg->count; i++) {
ImuSegment seg;
unpack_imu_segment(ImuData(data + data_n * i), &seg);
pkg->segments.push_back(seg);
}
pkg->serial_number = pkg->segments.back().frame_id;
}
void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
res->header = *data;
res->state = *(data + 1);
res->size = (*(data + 2) << 8) | *(data + 3);
std::size_t data_n = sizeof(ImuData); // 21
ImuPacket packet;
packet.count = res->size / data_n;
unpack_imu_packet(data + 4, &packet);
res->packets.push_back(packet);
res->checksum = *(data + 4 + res->size);
}
} // namespace
Standard2ChannelsAdapter::Standard2ChannelsAdapter()
: ChannelsAdapter(Model::STANDARD2) {
}
Standard2ChannelsAdapter::~Standard2ChannelsAdapter() {
}
std::int32_t Standard2ChannelsAdapter::GetAccelRangeDefault() {
return 12;
}
std::vector<std::int32_t> Standard2ChannelsAdapter::GetAccelRangeValues() {
return {6, 12, 24, 48};
}
std::int32_t Standard2ChannelsAdapter::GetGyroRangeDefault() {
return 1000;
}
std::vector<std::int32_t> Standard2ChannelsAdapter::GetGyroRangeValues() {
return {250, 500, 1000, 2000, 4000};
}
void Standard2ChannelsAdapter::GetImuResPacket(
const std::uint8_t *data, ImuResPacket *res) {
unpack_imu_res_packet(data, res);
}
MYNTEYE_END_NAMESPACE

View File

@@ -1,42 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S2_H_
#define MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S2_H_
#pragma once
#include <cstdint>
#include <set>
#include <vector>
#include "mynteye/device/channel/channels.h"
MYNTEYE_BEGIN_NAMESPACE
class Standard2ChannelsAdapter : public ChannelsAdapter {
public:
Standard2ChannelsAdapter();
virtual ~Standard2ChannelsAdapter();
std::int32_t GetAccelRangeDefault() override;
std::vector<std::int32_t> GetAccelRangeValues() override;
std::int32_t GetGyroRangeDefault() override;
std::vector<std::int32_t> GetGyroRangeValues() override;
void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S2_H_

View File

@@ -1,121 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard2/channels_adapter_s210a.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
#pragma pack(push, 1)
struct ImuData {
std::uint32_t frame_id;
std::uint64_t timestamp;
std::uint8_t flag;
std::int16_t temperature;
std::int16_t accel_or_gyro[3];
ImuData() = default;
explicit ImuData(const std::uint8_t *data) {
from_data(data);
}
void from_data(const std::uint8_t *data) {
std::uint32_t timestamp_l;
std::uint32_t timestamp_h;
frame_id = (*(data) << 24) | (*(data + 1) << 16) | (*(data + 2) << 8) |
*(data + 3);
timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) |
(*(data + 6) << 8) | *(data + 7);
timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) |
(*(data + 10) << 8) | *(data + 11);
timestamp = (static_cast<std::uint64_t>(timestamp_h) << 32) | timestamp_l;
flag = *(data + 12);
temperature = (*(data + 13) << 8) | *(data + 14);
accel_or_gyro[0] = (*(data + 15) << 8) | *(data + 16);
accel_or_gyro[1] = (*(data + 17) << 8) | *(data + 18);
accel_or_gyro[2] = (*(data + 19) << 8) | *(data + 20);
}
};
#pragma pack(pop)
void unpack_imu_segment(const ImuData &imu, ImuSegment *seg) {
seg->frame_id = imu.frame_id;
seg->timestamp = imu.timestamp;
seg->flag = imu.flag;
seg->temperature = imu.temperature;
seg->accel[0] = (seg->flag == 1) ? imu.accel_or_gyro[0] : 0;
seg->accel[1] = (seg->flag == 1) ? imu.accel_or_gyro[1] : 0;
seg->accel[2] = (seg->flag == 1) ? imu.accel_or_gyro[2] : 0;
seg->gyro[0] = (seg->flag == 2) ? imu.accel_or_gyro[0] : 0;
seg->gyro[1] = (seg->flag == 2) ? imu.accel_or_gyro[1] : 0;
seg->gyro[2] = (seg->flag == 2) ? imu.accel_or_gyro[2] : 0;
}
void unpack_imu_packet(const std::uint8_t *data, ImuPacket *pkg) {
std::size_t data_n = sizeof(ImuData); // 21
for (std::size_t i = 0; i < pkg->count; i++) {
ImuSegment seg;
unpack_imu_segment(ImuData(data + data_n * i), &seg);
pkg->segments.push_back(seg);
}
pkg->serial_number = pkg->segments.back().frame_id;
}
void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
res->header = *data;
res->state = *(data + 1);
res->size = (*(data + 2) << 8) | *(data + 3);
std::size_t data_n = sizeof(ImuData); // 21
ImuPacket packet;
packet.count = res->size / data_n;
unpack_imu_packet(data + 4, &packet);
res->packets.push_back(packet);
res->checksum = *(data + 4 + res->size);
}
} // namespace
Standard210aChannelsAdapter::Standard210aChannelsAdapter()
: ChannelsAdapter(Model::STANDARD210A) {
}
Standard210aChannelsAdapter::~Standard210aChannelsAdapter() {
}
std::int32_t Standard210aChannelsAdapter::GetAccelRangeDefault() {
return 12;
}
std::vector<std::int32_t> Standard210aChannelsAdapter::GetAccelRangeValues() {
return {6, 12, 24, 48};
}
std::int32_t Standard210aChannelsAdapter::GetGyroRangeDefault() {
return 1000;
}
std::vector<std::int32_t> Standard210aChannelsAdapter::GetGyroRangeValues() {
return {250, 500, 1000, 2000, 4000};
}
void Standard210aChannelsAdapter::GetImuResPacket(
const std::uint8_t *data, ImuResPacket *res) {
unpack_imu_res_packet(data, res);
}
MYNTEYE_END_NAMESPACE

View File

@@ -1,42 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_
#define MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_
#pragma once
#include <cstdint>
#include <set>
#include <vector>
#include "mynteye/device/channel/channels.h"
MYNTEYE_BEGIN_NAMESPACE
class Standard210aChannelsAdapter : public ChannelsAdapter {
public:
Standard210aChannelsAdapter();
virtual ~Standard210aChannelsAdapter();
std::int32_t GetAccelRangeDefault() override;
std::vector<std::int32_t> GetAccelRangeValues() override;
std::int32_t GetGyroRangeDefault() override;
std::vector<std::int32_t> GetGyroRangeValues() override;
void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_

View File

@@ -1,45 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard2/device_s2.h"
#include "mynteye/logger.h"
#include "mynteye/device/motions.h"
#include "mynteye/device/standard2/channels_adapter_s2.h"
#include "mynteye/device/standard2/streams_adapter_s2.h"
MYNTEYE_BEGIN_NAMESPACE
Standard2Device::Standard2Device(std::shared_ptr<uvc::device> device)
: Device(Model::STANDARD2, device,
std::make_shared<Standard2StreamsAdapter>(),
std::make_shared<Standard2ChannelsAdapter>()) {
VLOG(2) << __func__;
}
Standard2Device::~Standard2Device() {
VLOG(2) << __func__;
}
Capabilities Standard2Device::GetKeyStreamCapability() const {
return Capabilities::STEREO_COLOR;
}
void Standard2Device::OnStereoStreamUpdate() {
if (motion_tracking_) {
auto &&motions = this->motions();
motions->DoMotionTrack();
}
}
MYNTEYE_END_NAMESPACE

View File

@@ -1,37 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD2_DEVICE_S2_H_
#define MYNTEYE_DEVICE_STANDARD2_DEVICE_S2_H_
#pragma once
#include <memory>
#include <vector>
#include "mynteye/device/device.h"
MYNTEYE_BEGIN_NAMESPACE
class Standard2Device : public Device {
public:
explicit Standard2Device(std::shared_ptr<uvc::device> device);
virtual ~Standard2Device();
Capabilities GetKeyStreamCapability() const override;
void OnStereoStreamUpdate() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD2_DEVICE_S2_H_

View File

@@ -1,45 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard2/device_s210a.h"
#include "mynteye/logger.h"
#include "mynteye/device/motions.h"
#include "mynteye/device/standard2/channels_adapter_s210a.h"
#include "mynteye/device/standard2/streams_adapter_s210a.h"
MYNTEYE_BEGIN_NAMESPACE
Standard210aDevice::Standard210aDevice(std::shared_ptr<uvc::device> device)
: Device(Model::STANDARD210A, device,
std::make_shared<Standard210aStreamsAdapter>(),
std::make_shared<Standard210aChannelsAdapter>()) {
VLOG(2) << __func__;
}
Standard210aDevice::~Standard210aDevice() {
VLOG(2) << __func__;
}
Capabilities Standard210aDevice::GetKeyStreamCapability() const {
return Capabilities::STEREO_COLOR;
}
void Standard210aDevice::OnStereoStreamUpdate() {
if (motion_tracking_) {
auto &&motions = this->motions();
motions->DoMotionTrack();
}
}
MYNTEYE_END_NAMESPACE

Some files were not shown because too many files have changed in this diff Show More