Compare commits
132 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f9fa5f8823 | ||
|
|
15464160f6 | ||
|
|
3b8f2d7706 | ||
|
|
8b07e18c23 | ||
|
|
8400966a5c | ||
|
|
2462c1b37a | ||
|
|
d1e856a749 | ||
|
|
a75d064a2a | ||
|
|
c37bbfb0bd | ||
|
|
56a92df24f | ||
|
|
957b460671 | ||
|
|
21c72ce3b7 | ||
|
|
0061061130 | ||
|
|
b9a8eea7be | ||
|
|
3bef119492 | ||
|
|
e0d61a9c48 | ||
|
|
5f63df740d | ||
|
|
2d1989fc2b | ||
|
|
fd551de04b | ||
|
|
5deefc3af4 | ||
|
|
cdbe614072 | ||
|
|
ccf7e7cb08 | ||
|
|
272ba40b68 | ||
|
|
ceb98aa2d2 | ||
|
|
e1c6ad86ec | ||
|
|
620b834b6c | ||
|
|
a6f7112f82 | ||
|
|
b3e41de62c | ||
|
|
edb18a90b8 | ||
|
|
e91a2663e2 | ||
|
|
dacad9483e | ||
|
|
3f80c8bbff | ||
|
|
189a2bd6b7 | ||
|
|
e325706c9d | ||
|
|
3abb4dc153 | ||
|
|
dfc0b5eaab | ||
|
|
8f50f8d214 | ||
|
|
cda6024a3e | ||
|
|
6b561733c0 | ||
|
|
5a6eabcc5f | ||
|
|
0025a555ba | ||
|
|
3c4f8b986b | ||
|
|
ecb8cd640a | ||
|
|
f4007aa78c | ||
|
|
8f255ec291 | ||
|
|
d453f98b6d | ||
|
|
af8e188ae1 | ||
|
|
dfea14aba3 | ||
|
|
fd2e6c4708 | ||
|
|
f08dda33b5 | ||
|
|
849e8c80dd | ||
|
|
ff2598120a | ||
|
|
e85321130e | ||
|
|
dc0a259007 | ||
|
|
ea5d40ff20 | ||
|
|
d261bad291 | ||
|
|
4d29d18035 | ||
|
|
6075cd61eb | ||
|
|
7322ade75b | ||
|
|
1d81ea4ac6 | ||
|
|
ce7071db2c | ||
|
|
b48cc6d98e | ||
|
|
3490f0ca44 | ||
|
|
1f51ab8fa2 | ||
|
|
3cc158231a | ||
|
|
c2173f9f8e | ||
|
|
ad64476ee6 | ||
|
|
3c687d43ca | ||
|
|
be15d6a6d9 | ||
|
|
dfb2be71a6 | ||
|
|
4ac353c8c1 | ||
|
|
764ccba041 | ||
|
|
1bc0e47cd1 | ||
|
|
8b8e9bd5fd | ||
|
|
e3de24dca1 | ||
|
|
34b079f9ad | ||
|
|
d69263a2c7 | ||
|
|
70e1e9e32f | ||
|
|
f7df7db10b | ||
|
|
a5b337b50f | ||
|
|
f744fa06d5 | ||
|
|
c9bfdbb4d6 | ||
|
|
469ffe5075 | ||
|
|
95d733b2b4 | ||
|
|
22bd0fab3a | ||
|
|
96a1b2a62d | ||
|
|
b93026bdcf | ||
|
|
3b22fa3abc | ||
|
|
2896e00acf | ||
|
|
71e3286014 | ||
|
|
dc4a21d635 | ||
|
|
d1cd7fadc5 | ||
|
|
da9417852a | ||
|
|
f525c2063e | ||
|
|
8f096612a3 | ||
|
|
6a0ce86594 | ||
|
|
559a812c65 | ||
|
|
c24e2806b3 | ||
|
|
c35d338248 | ||
|
|
455bfc72b2 | ||
|
|
852d8d8faf | ||
|
|
d09f037aee | ||
|
|
b2bd90192d | ||
|
|
6953758101 | ||
|
|
262f64715d | ||
|
|
b6d4037357 | ||
|
|
30ed3ed5e2 | ||
|
|
5b5ded25c9 | ||
|
|
01abf3d346 | ||
|
|
05d46111b7 | ||
|
|
44bff1992a | ||
|
|
34f3e08b20 | ||
|
|
c5bf93dfbd | ||
|
|
4b8b7f707c | ||
|
|
b35d55309d | ||
|
|
9068b0dd53 | ||
|
|
5322fc38f4 | ||
|
|
6988ae803b | ||
|
|
74b71dc161 | ||
|
|
cabaeb4794 | ||
|
|
1f7621debd | ||
|
|
f98fe1b4ec | ||
|
|
5eb3174edb | ||
|
|
d6ff3470f1 | ||
|
|
d673a151c9 | ||
|
|
d531112075 | ||
|
|
5105b5ea82 | ||
|
|
a8796478e6 | ||
|
|
d41f4cbcbc | ||
|
|
5d5b5cf476 | ||
|
|
9aefabf76b | ||
|
|
829d37f620 |
@@ -14,7 +14,7 @@
|
||||
|
||||
cmake_minimum_required(VERSION 3.0)
|
||||
|
||||
project(mynteye VERSION 2.3.1 LANGUAGES C CXX)
|
||||
project(mynteye VERSION 2.3.4 LANGUAGES C CXX)
|
||||
|
||||
include(cmake/Common.cmake)
|
||||
|
||||
@@ -199,9 +199,6 @@ set(MYNTEYE_SRCS
|
||||
src/mynteye/device/standard2/channels_adapter_s2.cc
|
||||
src/mynteye/device/standard2/device_s2.cc
|
||||
src/mynteye/device/standard2/streams_adapter_s2.cc
|
||||
src/mynteye/device/standard2/channels_adapter_s210a.cc
|
||||
src/mynteye/device/standard2/device_s210a.cc
|
||||
src/mynteye/device/standard2/streams_adapter_s210a.cc
|
||||
src/mynteye/device/streams.cc
|
||||
src/mynteye/device/types.cc
|
||||
src/mynteye/device/utils.cc
|
||||
@@ -220,6 +217,8 @@ if(WITH_API)
|
||||
src/mynteye/api/processor/rectify_processor_ocv.cc
|
||||
src/mynteye/api/config.cc
|
||||
src/mynteye/api/correspondence.cc
|
||||
src/mynteye/api/version_checker.cc
|
||||
src/mynteye/api/data_tools.cc
|
||||
)
|
||||
if(WITH_CAM_MODELS)
|
||||
list(APPEND MYNTEYE_SRCS
|
||||
|
||||
2
Makefile
2
Makefile
@@ -130,7 +130,7 @@ endif
|
||||
|
||||
# install
|
||||
|
||||
install: build
|
||||
install: uninstall build
|
||||
@$(call echo,Make $@)
|
||||
ifeq ($(HOST_OS),Win)
|
||||
ifneq ($(HOST_NAME),MinGW)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# MYNT® EYE S SDK
|
||||
|
||||
[](https://github.com/slightech/MYNT-EYE-S-SDK)
|
||||
[](https://github.com/slightech/MYNT-EYE-S-SDK)
|
||||
|
||||
## Overview
|
||||
|
||||
@@ -17,11 +17,8 @@ Please follow the guide doc to install the SDK on different platforms.
|
||||
## Documentations
|
||||
|
||||
* [API Doc](https://github.com/slightech/MYNT-EYE-S-SDK/releases): API reference, some guides and data spec.
|
||||
* en: [](https://github.com/slightech/MYNT-EYE-S-SDK/files/2893965/mynt-eye-s-sdk-apidoc-2.3.1-en.pdf) [](https://github.com/slightech/MYNT-EYE-S-SDK/files/2893979/mynt-eye-s-sdk-apidoc-2.3.1-en.zip) [](https://slightech.github.io/MYNT-EYE-S-SDK/)
|
||||
* zh-Hans: [](https://github.com/slightech/MYNT-EYE-S-SDK/files/2893985/mynt-eye-s-sdk-apidoc-2.3.1-zh-Hans.pdf) [](https://github.com/slightech/MYNT-EYE-S-SDK/files/2893986/mynt-eye-s-sdk-apidoc-2.3.1-zh-Hans.zip) [](http://doc.myntai.com/resource/api/mynt-eye-s-sdk-apidoc-2.3.1-zh-Hans/mynt-eye-s-sdk-apidoc-2.3.1-zh-Hans/index.html)
|
||||
* [Guide Doc](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/releases): How to install and start using the SDK.
|
||||
* en: [](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2893975/mynt-eye-s-sdk-guide-2.3.1-en.pdf) [](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2893996/mynt-eye-s-sdk-guide-2.3.1-en.zip) [](https://slightech.github.io/MYNT-EYE-S-SDK-Guide/)
|
||||
* zh-Hans: [](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2893997/mynt-eye-s-sdk-guide-2.3.1-zh-Hans.pdf) [](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2893997/mynt-eye-s-sdk-guide-2.3.1-zh-Hans.pdf) [](http://doc.myntai.com/resource/sdk/mynt-eye-s-sdk-guide-2.3.1-zh-Hans/mynt-eye-s-sdk-guide-2.3.1-zh-Hans/index.html)
|
||||
* en: [](https://github.com/slightech/MYNT-EYE-S-SDK/files/2981536/mynteye-s-sdk-docs-en-2.3.4.pdf) [](https://github.com/slightech/MYNT-EYE-S-SDK/files/2981540/mynt-eye-s-sdk-docs-2.3.4-en.zip) [](https://slightech.github.io/MYNT-EYE-S-SDK/)
|
||||
* zh-Hans: []() [](https://github.com/slightech/MYNT-EYE-S-SDK/files/2981541/mynt-eye-s-sdk-docs-2.3.4-zh-Hans.zip) [](http://doc.myntai.com/resource/api/mynt-eye-s-sdk-docs-2.3.4-zh-Hans/mynt-eye-s-sdk-docs-2.3.4-zh-Hans/index.html)
|
||||
|
||||
> Supported languages: `en`, `zh-Hans`.
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE S SDK"
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = 2.3.1
|
||||
PROJECT_NUMBER = 2.3.4
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
|
||||
@@ -12,3 +12,5 @@
|
||||
| Lens type | lens_type | - | × | √ Get/Set | 4 | vendor(2),product(2); default: 0 |
|
||||
| IMU type | imu_type | - | × | √ Get/Set | 4 | vendor(2),product(2); default: 0 |
|
||||
| Nominal baseline | nominal_baseline | - | × | √ Get/Set | 2 | unit: mm; default: 0 |
|
||||
| Auxiliary chip version | auxiliary_chip_version | - | × | √ Get | 2 | major,minor |
|
||||
| isp version | isp_version | - | × | √ Get | 2 | major,minor |
|
||||
|
||||
@@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE S SDK"
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
# control system is used.
|
||||
|
||||
PROJECT_NUMBER = 2.3.1
|
||||
PROJECT_NUMBER = 2.3.4
|
||||
|
||||
# Using the PROJECT_BRIEF tag one can provide an optional one line description
|
||||
# for a project that appears at the top of each page and should give viewer a
|
||||
|
||||
@@ -12,6 +12,8 @@
|
||||
| 镜头类型 | lens_type | - | × | √ Get/Set | 4 | vendor(2),product(2) ,未 Set 默认 0 |
|
||||
| IMU 类型 | imu_type | - | × | √ Get/Set | 4 | vendor(2),product(2) ,未 Set 默认 0 |
|
||||
| 基线长度 | nominal_baseline | - | × | √ Get/Set | 2 | 单位 mm ,未 set 默认 0 |
|
||||
| 辅助芯片版本 | auxiliary_chip_version | - | × | √ Get | 2 | major,minor |
|
||||
| ISP版本 | isp_version | - | × | √ Get | 2 | major,minor |
|
||||
|
||||
* 描述符获取:指通用 USB 设备信息,可用工具查看。
|
||||
* 拓展通道获取:指通过拓展通道(UVC Extension Unit)问硬件获取到的信息,需要读取。
|
||||
|
||||
@@ -187,7 +187,10 @@ class MYNTEYE_API API {
|
||||
* Get the device info.
|
||||
*/
|
||||
std::string GetInfo(const Info &info) const;
|
||||
|
||||
/**
|
||||
* Get the sdk version.
|
||||
*/
|
||||
std::string GetSDKVersion() const;
|
||||
/**
|
||||
* @deprecated Get the intrinsics (pinhole) of stream.
|
||||
*/
|
||||
@@ -234,6 +237,11 @@ class MYNTEYE_API API {
|
||||
void SetDisparityComputingMethodType(
|
||||
const DisparityComputingMethod &MethodType);
|
||||
|
||||
/**
|
||||
* Set if the duplicate frames is enable.
|
||||
*/
|
||||
void setDuplicate(bool isEnable);
|
||||
|
||||
/**
|
||||
* Set the option value.
|
||||
*/
|
||||
@@ -331,7 +339,8 @@ class MYNTEYE_API API {
|
||||
/**
|
||||
* Enable motion datas with timestamp correspondence of some stream.
|
||||
*/
|
||||
void EnableTimestampCorrespondence(const Stream &stream);
|
||||
void EnableTimestampCorrespondence(const Stream &stream,
|
||||
bool keep_accel_then_gyro = true);
|
||||
|
||||
/**
|
||||
* Enable the plugin.
|
||||
@@ -340,6 +349,11 @@ class MYNTEYE_API API {
|
||||
|
||||
std::shared_ptr<Device> device();
|
||||
|
||||
/** Enable process mode, e.g. imu assembly, temp_drift */
|
||||
void EnableProcessMode(const ProcessMode& mode);
|
||||
/** Enable process mode, e.g. imu assembly, temp_drift */
|
||||
void EnableProcessMode(const std::int32_t& mode);
|
||||
|
||||
private:
|
||||
std::shared_ptr<Device> device_;
|
||||
|
||||
|
||||
@@ -295,6 +295,11 @@ class MYNTEYE_API Device {
|
||||
*/
|
||||
std::vector<device::MotionData> GetMotionDatas();
|
||||
|
||||
/** Enable process mode, e.g. imu assembly, temp_drift */
|
||||
void EnableProcessMode(const ProcessMode& mode);
|
||||
/** Enable process mode, e.g. imu assembly, temp_drift */
|
||||
void EnableProcessMode(const std::int32_t& mode);
|
||||
|
||||
protected:
|
||||
std::shared_ptr<uvc::device> device() const {
|
||||
return device_;
|
||||
|
||||
@@ -162,6 +162,8 @@ struct MYNTEYE_API DeviceInfo {
|
||||
Type lens_type;
|
||||
Type imu_type;
|
||||
std::uint16_t nominal_baseline;
|
||||
Version auxiliary_chip_version;
|
||||
Version isp_version;
|
||||
};
|
||||
|
||||
#undef MYNTEYE_PROPERTY
|
||||
|
||||
@@ -77,6 +77,7 @@ struct glog_init {
|
||||
#include "mynteye/mynteye.h"
|
||||
|
||||
#define MYNTEYE_MAX_LOG_LEVEL google::INFO
|
||||
// #define MYNTEYE_MAX_LOG_LEVEL 2
|
||||
|
||||
#include "mynteye/miniglog.h"
|
||||
|
||||
|
||||
@@ -157,9 +157,6 @@ class MYNTEYE_API LogSink {
|
||||
// Global set of log sinks. The actual object is defined in logging.cc.
|
||||
MYNTEYE_API extern std::set<LogSink *> log_sinks_global;
|
||||
|
||||
// Added by chachi - a runtime global maximum log level. Defined in logging.cc
|
||||
MYNTEYE_API extern int log_severity_global;
|
||||
|
||||
inline void InitGoogleLogging(char */*argv*/) {
|
||||
// Do nothing; this is ignored.
|
||||
}
|
||||
@@ -315,8 +312,7 @@ class MYNTEYE_API LoggerVoidify {
|
||||
|
||||
// Log only if condition is met. Otherwise evaluates to void.
|
||||
#define LOG_IF(severity, condition) \
|
||||
(static_cast<int>(severity) > google::log_severity_global || !(condition)) ? \
|
||||
(void) 0 : LoggerVoidify() & \
|
||||
!(condition) ? (void) 0 : LoggerVoidify() & \
|
||||
MessageLogger((char *)__FILE__, __LINE__, "native", severity).stream()
|
||||
|
||||
// Log only if condition is NOT met. Otherwise evaluates to void.
|
||||
|
||||
@@ -27,9 +27,9 @@
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#define MYNTEYE_API_VERSION_MAJOR @PROJECT_VERSION_MAJOR@
|
||||
#define MYNTEYE_API_VERSION_MINOR @PROJECT_VERSION_MINOR@
|
||||
#define MYNTEYE_API_VERSION_PATCH @PROJECT_VERSION_PATCH@
|
||||
#define MYNTEYE_API_VERSION_MAJOR @mynteye_VERSION_MAJOR@
|
||||
#define MYNTEYE_API_VERSION_MINOR @mynteye_VERSION_MINOR@
|
||||
#define MYNTEYE_API_VERSION_PATCH @mynteye_VERSION_PATCH@
|
||||
|
||||
/* MYNTEYE_API_VERSION is (major << 16) + (minor << 8) + patch */
|
||||
#define MYNTEYE_API_VERSION \
|
||||
|
||||
@@ -122,6 +122,10 @@ enum class Info : std::uint8_t {
|
||||
IMU_TYPE,
|
||||
/** Nominal baseline */
|
||||
NOMINAL_BASELINE,
|
||||
/** Auxiliary chip version */
|
||||
AUXILIARY_CHIP_VERSION,
|
||||
/** Isp version */
|
||||
ISP_VERSION,
|
||||
/** Last guard */
|
||||
LAST
|
||||
};
|
||||
@@ -325,6 +329,30 @@ enum class Format : std::uint32_t {
|
||||
|
||||
#undef MYNTEYE_FOURCC
|
||||
|
||||
/**
|
||||
* @ingroup enumerations
|
||||
* @brief Process modes.
|
||||
*/
|
||||
enum class ProcessMode : std::int32_t {
|
||||
PROC_NONE = 0,
|
||||
PROC_IMU_ASSEMBLY = 1,
|
||||
PROC_IMU_TEMP_DRIFT = 2,
|
||||
PROC_IMU_ALL = PROC_IMU_ASSEMBLY | PROC_IMU_TEMP_DRIFT
|
||||
};
|
||||
|
||||
inline
|
||||
std::int32_t operator&(const std::int32_t& lhs, const ProcessMode& rhs) {
|
||||
return lhs & static_cast<std::int32_t>(rhs);
|
||||
}
|
||||
inline
|
||||
std::int32_t operator&(const ProcessMode& lhs, const std::int32_t& rhs) {
|
||||
return static_cast<std::int32_t>(lhs) & rhs;
|
||||
}
|
||||
inline
|
||||
std::int32_t operator&(const ProcessMode& lhs, const ProcessMode& rhs) {
|
||||
return static_cast<std::int32_t>(lhs) & static_cast<std::int32_t>(rhs);
|
||||
}
|
||||
|
||||
MYNTEYE_API const char *to_string(const Format &value);
|
||||
|
||||
inline std::ostream &operator<<(std::ostream &os, const Format &value) {
|
||||
@@ -420,6 +448,7 @@ struct MYNTEYE_API IntrinsicsBase {
|
||||
calib_model_ = CalibrationModel::UNKNOW;
|
||||
}
|
||||
virtual ~IntrinsicsBase() {}
|
||||
virtual void ResizeIntrinsics() {}
|
||||
|
||||
/** The calibration model */
|
||||
CalibrationModel calib_model() const {
|
||||
@@ -429,7 +458,8 @@ struct MYNTEYE_API IntrinsicsBase {
|
||||
std::uint16_t width;
|
||||
/** The height of the image in pixels */
|
||||
std::uint16_t height;
|
||||
|
||||
/** Resize scale */
|
||||
double resize_scale = 1.0;
|
||||
protected:
|
||||
CalibrationModel calib_model_;
|
||||
};
|
||||
@@ -445,6 +475,15 @@ struct MYNTEYE_API IntrinsicsPinhole : public IntrinsicsBase {
|
||||
IntrinsicsPinhole() {
|
||||
calib_model_ = CalibrationModel::PINHOLE;
|
||||
}
|
||||
void ResizeIntrinsics() {
|
||||
width = static_cast<std::uint16_t>(width * resize_scale);
|
||||
height = static_cast<std::uint16_t>(height * resize_scale);
|
||||
fx *= resize_scale;
|
||||
fy *= resize_scale;
|
||||
cx *= resize_scale;
|
||||
cy *= resize_scale;
|
||||
resize_scale = 1.0;
|
||||
}
|
||||
/** The focal length of the image plane, as a multiple of pixel width */
|
||||
double fx;
|
||||
/** The focal length of the image plane, as a multiple of pixel height */
|
||||
@@ -477,6 +516,15 @@ struct MYNTEYE_API IntrinsicsEquidistant : public IntrinsicsBase {
|
||||
}
|
||||
/** The distortion coefficients: k2,k3,k4,k5,mu,mv,u0,v0 */
|
||||
double coeffs[8];
|
||||
void ResizeIntrinsics() {
|
||||
width = static_cast<std::uint16_t>(width * resize_scale);
|
||||
height = static_cast<std::uint16_t>(height * resize_scale);
|
||||
coeffs[4] *= resize_scale;
|
||||
coeffs[5] *= resize_scale;
|
||||
coeffs[6] *= resize_scale;
|
||||
coeffs[7] *= resize_scale;
|
||||
resize_scale = 1.0;
|
||||
}
|
||||
};
|
||||
|
||||
MYNTEYE_API
|
||||
@@ -496,12 +544,24 @@ struct MYNTEYE_API ImuIntrinsics {
|
||||
* \endcode
|
||||
*/
|
||||
double scale[3][3];
|
||||
/** Assembly error [3][3] */
|
||||
double assembly[3][3];
|
||||
/* Zero-drift: X, Y, Z */
|
||||
double drift[3];
|
||||
/** Noise density variances */
|
||||
double noise[3];
|
||||
/** Random walk variances */
|
||||
double bias[3];
|
||||
|
||||
/** Temperature drift
|
||||
* \code
|
||||
* 0 - Constant value
|
||||
* 1 - Slope
|
||||
* \endcode
|
||||
*/
|
||||
double x[2];
|
||||
double y[2];
|
||||
double z[2];
|
||||
};
|
||||
|
||||
MYNTEYE_API
|
||||
|
||||
@@ -11,7 +11,9 @@
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
#include <string>
|
||||
#include <opencv2/highgui/highgui.hpp>
|
||||
#include <opencv2/imgproc/imgproc.hpp>
|
||||
|
||||
#include "mynteye/api/api.h"
|
||||
|
||||
@@ -26,22 +28,24 @@ int main(int argc, char *argv[]) {
|
||||
if (!ok) return 1;
|
||||
api->ConfigStreamRequest(request);
|
||||
|
||||
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
|
||||
api->EnableStreamData(Stream::DEPTH);
|
||||
|
||||
api->Start(Source::VIDEO_STREAMING);
|
||||
|
||||
cv::namedWindow("frame");
|
||||
cv::namedWindow("depth");
|
||||
|
||||
while (true) {
|
||||
api->WaitForStreams();
|
||||
|
||||
auto &&left_data = api->GetStreamData(Stream::LEFT);
|
||||
auto &&right_data = api->GetStreamData(Stream::RIGHT);
|
||||
|
||||
if (!left_data.frame.empty() && !right_data.frame.empty()) {
|
||||
cv::Mat img;
|
||||
cv::hconcat(left_data.frame, right_data.frame, img);
|
||||
cv::imshow("frame", img);
|
||||
}
|
||||
|
||||
auto &&depth_data = api->GetStreamData(Stream::DEPTH);
|
||||
if (!depth_data.frame.empty()) {
|
||||
|
||||
@@ -28,6 +28,9 @@ int main(int argc, char *argv[]) {
|
||||
LOG(INFO) << "Lens type: " << api->GetInfo(Info::LENS_TYPE);
|
||||
LOG(INFO) << "IMU type: " << api->GetInfo(Info::IMU_TYPE);
|
||||
LOG(INFO) << "Nominal baseline: " << api->GetInfo(Info::NOMINAL_BASELINE);
|
||||
LOG(INFO) << "Auxiliary chip version: "
|
||||
<< api->GetInfo(Info::AUXILIARY_CHIP_VERSION);
|
||||
LOG(INFO) << "Isp version: " << api->GetInfo(Info::ISP_VERSION);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -43,9 +43,11 @@ int main(int argc, char *argv[]) {
|
||||
auto &&left_data = api->GetStreamData(Stream::LEFT);
|
||||
auto &&right_data = api->GetStreamData(Stream::RIGHT);
|
||||
|
||||
if (!left_data.frame.empty() && !right_data.frame.empty()) {
|
||||
cv::Mat img;
|
||||
cv::hconcat(left_data.frame, right_data.frame, img);
|
||||
cv::imshow("frame", img);
|
||||
}
|
||||
|
||||
// auto &&disp_data = api->GetStreamData(Stream::DISPARITY);
|
||||
// if (!disp_data.frame.empty()) {
|
||||
|
||||
@@ -25,11 +25,22 @@ int main(int argc, char *argv[]) {
|
||||
auto &&request = api->SelectStreamRequest(&ok);
|
||||
if (!ok) return 1;
|
||||
api->ConfigStreamRequest(request);
|
||||
|
||||
LOG(INFO) << "Intrinsics left: {" << *api->GetIntrinsicsBase(Stream::LEFT)
|
||||
<< "}";
|
||||
LOG(INFO) << "Intrinsics right: {" << *api->GetIntrinsicsBase(Stream::RIGHT)
|
||||
<< "}";
|
||||
auto in_left = api->GetIntrinsicsBase(Stream::LEFT);
|
||||
auto in_right = api->GetIntrinsicsBase(Stream::RIGHT);
|
||||
if (in_left->calib_model() == CalibrationModel::PINHOLE) {
|
||||
in_left = std::dynamic_pointer_cast<IntrinsicsPinhole>(in_left);
|
||||
in_right = std::dynamic_pointer_cast<IntrinsicsPinhole>(in_right);
|
||||
} else if (in_left->calib_model() == CalibrationModel::KANNALA_BRANDT) {
|
||||
in_left = std::dynamic_pointer_cast<IntrinsicsEquidistant>(in_left);
|
||||
in_right = std::dynamic_pointer_cast<IntrinsicsEquidistant>(in_right);
|
||||
} else {
|
||||
LOG(INFO) << "UNKNOW CALIB MODEL.";
|
||||
return 0;
|
||||
}
|
||||
in_left -> ResizeIntrinsics();
|
||||
in_right -> ResizeIntrinsics();
|
||||
LOG(INFO) << "Intrinsics left: {" << *in_left << "}";
|
||||
LOG(INFO) << "Intrinsics right: {" << *in_right << "}";
|
||||
LOG(INFO) << "Extrinsics right to left: {"
|
||||
<< api->GetExtrinsics(Stream::RIGHT, Stream::LEFT) << "}";
|
||||
|
||||
|
||||
@@ -44,6 +44,7 @@ int main(int argc, char *argv[]) {
|
||||
auto &&left_data = api->GetStreamData(Stream::LEFT);
|
||||
auto &&right_data = api->GetStreamData(Stream::RIGHT);
|
||||
|
||||
if (!left_data.frame.empty() && !right_data.frame.empty()) {
|
||||
cv::Mat img;
|
||||
cv::hconcat(left_data.frame, right_data.frame, img);
|
||||
|
||||
@@ -63,11 +64,12 @@ int main(int argc, char *argv[]) {
|
||||
*/
|
||||
|
||||
painter.DrawImgData(img, *left_data.img);
|
||||
if (!motion_datas.empty()) {
|
||||
if (!motion_datas.empty() && motion_datas.size() > 0) {
|
||||
painter.DrawImuData(img, *motion_datas[0].imu);
|
||||
}
|
||||
|
||||
cv::imshow("frame", img);
|
||||
}
|
||||
|
||||
char key = static_cast<char>(cv::waitKey(1));
|
||||
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
|
||||
|
||||
@@ -18,6 +18,10 @@
|
||||
|
||||
#include "util/cv_painter.h"
|
||||
|
||||
// #define CHECK_ACCEL_THEN_GYRO
|
||||
#define SEQ_FIRST 1 // accel
|
||||
#define SEQ_SECOND 2 // gyro
|
||||
|
||||
MYNTEYE_USE_NAMESPACE
|
||||
|
||||
int main(int argc, char *argv[]) {
|
||||
@@ -40,12 +44,18 @@ int main(int argc, char *argv[]) {
|
||||
|
||||
std::uint64_t prev_img_stamp = 0;
|
||||
std::uint64_t prev_imu_stamp = 0;
|
||||
#ifdef CHECK_ACCEL_THEN_GYRO
|
||||
std::uint8_t prev_imu_flag = 0;
|
||||
std::uint64_t imu_count = 0;
|
||||
std::uint64_t imu_disorder_count = 0;
|
||||
bool exit = false;
|
||||
#endif
|
||||
while (true) {
|
||||
api->WaitForStreams();
|
||||
|
||||
auto &&left_data = api->GetStreamData(Stream::LEFT);
|
||||
auto &&right_data = api->GetStreamData(Stream::RIGHT);
|
||||
|
||||
if (!left_data.frame.empty() || !right_data.frame.empty()) {
|
||||
auto img_stamp = left_data.img->timestamp;
|
||||
LOG(INFO) << "Img timestamp: " << img_stamp
|
||||
<< ", diff_prev=" << (img_stamp - prev_img_stamp);
|
||||
@@ -56,23 +66,66 @@ int main(int argc, char *argv[]) {
|
||||
|
||||
auto &&motion_datas = api->GetMotionDatas();
|
||||
LOG(INFO) << "Imu count: " << motion_datas.size();
|
||||
for (auto &&data : motion_datas) {
|
||||
if (motion_datas.size() == 0) {
|
||||
continue;
|
||||
}
|
||||
for (size_t i = 0, n = motion_datas.size() - 1; i <= n; ++i) {
|
||||
auto data = motion_datas[i];
|
||||
auto imu_flag = data.imu->flag;
|
||||
auto imu_stamp = data.imu->timestamp;
|
||||
LOG(INFO) << "Imu timestamp: " << imu_stamp
|
||||
|
||||
std::stringstream ss;
|
||||
if (imu_flag == 0) { // accel + gyro
|
||||
ss << "Imu";
|
||||
} else if (imu_flag == 1) { // accel
|
||||
ss << "Accel";
|
||||
} else if (imu_flag == 2) { // gyro
|
||||
ss << "Gyro";
|
||||
}
|
||||
ss << " timestamp: " << imu_stamp
|
||||
<< ", diff_prev=" << (imu_stamp - prev_imu_stamp)
|
||||
<< ", diff_img=" << (1.f + imu_stamp - img_stamp);
|
||||
<< ", diff_img=" << (1.0f + imu_stamp - img_stamp);
|
||||
#ifdef CHECK_ACCEL_THEN_GYRO
|
||||
if (imu_flag > 0) {
|
||||
bool ok = false;
|
||||
if (i == 0) { // first
|
||||
ok = (imu_flag == SEQ_FIRST);
|
||||
} else if (i == n) { // last
|
||||
ok = (imu_flag == SEQ_SECOND);
|
||||
} else {
|
||||
if (imu_flag == SEQ_FIRST) {
|
||||
ok = (prev_imu_flag == SEQ_SECOND);
|
||||
} else if (imu_flag == SEQ_SECOND) {
|
||||
ok = (prev_imu_flag == SEQ_FIRST);
|
||||
}
|
||||
}
|
||||
ss << (ok ? " ✓" : " x");
|
||||
if (!ok) ++imu_disorder_count;
|
||||
prev_imu_flag = imu_flag;
|
||||
if (!exit) {
|
||||
if (!ok) exit = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
LOG(INFO) << ss.str();
|
||||
|
||||
prev_imu_stamp = imu_stamp;
|
||||
}
|
||||
LOG(INFO);
|
||||
#ifdef CHECK_ACCEL_THEN_GYRO
|
||||
imu_count += motion_datas.size();
|
||||
if (exit) break;
|
||||
#endif
|
||||
|
||||
/*
|
||||
painter.DrawImgData(img, *left_data.img);
|
||||
if (!motion_datas.empty()) {
|
||||
if (!motion_datas.empty() && motion_datas.size() > 0) {
|
||||
painter.DrawImuData(img, *motion_datas[0].imu);
|
||||
}
|
||||
*/
|
||||
|
||||
cv::imshow("frame", img);
|
||||
}
|
||||
|
||||
char key = static_cast<char>(cv::waitKey(1));
|
||||
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
|
||||
@@ -81,5 +134,14 @@ int main(int argc, char *argv[]) {
|
||||
}
|
||||
|
||||
api->Stop(Source::ALL);
|
||||
|
||||
#ifdef CHECK_ACCEL_THEN_GYRO
|
||||
if (imu_disorder_count > 0) {
|
||||
LOG(INFO) << "accel_then_gyro, disorder_count: " << imu_disorder_count
|
||||
<< "/" << imu_count;
|
||||
} else {
|
||||
LOG(INFO) << "accel_then_gyro, ok";
|
||||
}
|
||||
#endif
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -28,6 +28,8 @@ int main(int argc, char *argv[]) {
|
||||
if (!ok) return 1;
|
||||
api->ConfigStreamRequest(request);
|
||||
|
||||
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
|
||||
|
||||
api->EnableStreamData(Stream::POINTS);
|
||||
|
||||
api->Start(Source::VIDEO_STREAMING);
|
||||
@@ -40,10 +42,11 @@ int main(int argc, char *argv[]) {
|
||||
|
||||
auto &&left_data = api->GetStreamData(Stream::LEFT);
|
||||
auto &&right_data = api->GetStreamData(Stream::RIGHT);
|
||||
|
||||
if (!left_data.frame.empty() && !right_data.frame.empty()) {
|
||||
cv::Mat img;
|
||||
cv::hconcat(left_data.frame, right_data.frame, img);
|
||||
cv::imshow("frame", img);
|
||||
}
|
||||
|
||||
auto &&points_data = api->GetStreamData(Stream::POINTS);
|
||||
if (!points_data.frame.empty()) {
|
||||
|
||||
@@ -25,7 +25,7 @@ int main(int argc, char *argv[]) {
|
||||
auto &&request = api->SelectStreamRequest(&ok);
|
||||
if (!ok) return 1;
|
||||
api->ConfigStreamRequest(request);
|
||||
|
||||
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
|
||||
api->Start(Source::VIDEO_STREAMING);
|
||||
|
||||
cv::namedWindow("frame");
|
||||
@@ -37,8 +37,10 @@ int main(int argc, char *argv[]) {
|
||||
auto &&right_data = api->GetStreamData(Stream::RIGHT);
|
||||
|
||||
cv::Mat img;
|
||||
if (!left_data.frame.empty() && !right_data.frame.empty()) {
|
||||
cv::hconcat(left_data.frame, right_data.frame, img);
|
||||
cv::imshow("frame", img);
|
||||
}
|
||||
|
||||
char key = static_cast<char>(cv::waitKey(1));
|
||||
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
|
||||
|
||||
@@ -25,6 +25,7 @@ int main(int argc, char *argv[]) {
|
||||
auto &&request = api->SelectStreamRequest(&ok);
|
||||
if (!ok) return 1;
|
||||
api->ConfigStreamRequest(request);
|
||||
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
|
||||
|
||||
api->EnableStreamData(Stream::LEFT_RECTIFIED);
|
||||
api->EnableStreamData(Stream::RIGHT_RECTIFIED);
|
||||
|
||||
@@ -26,6 +26,8 @@ int main(int argc, char *argv[]) {
|
||||
if (!ok) return 1;
|
||||
api->ConfigStreamRequest(request);
|
||||
|
||||
api->setDuplicate(true);
|
||||
|
||||
api->EnablePlugin("plugins/linux-x86_64/libplugin_g_cuda9.1_opencv3.4.0.so");
|
||||
|
||||
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);
|
||||
@@ -41,6 +43,7 @@ int main(int argc, char *argv[]) {
|
||||
auto &&left_data = api->GetStreamData(Stream::LEFT);
|
||||
auto &&right_data = api->GetStreamData(Stream::RIGHT);
|
||||
|
||||
if (!left_data.frame.empty() && !right_data.frame.empty()) {
|
||||
cv::Mat img;
|
||||
cv::hconcat(left_data.frame, right_data.frame, img);
|
||||
cv::imshow("frame", img);
|
||||
@@ -49,6 +52,7 @@ int main(int argc, char *argv[]) {
|
||||
if (!disp_data.frame.empty()) {
|
||||
cv::imshow("disparity", disp_data.frame);
|
||||
}
|
||||
}
|
||||
|
||||
char key = static_cast<char>(cv::waitKey(1));
|
||||
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
|
||||
|
||||
@@ -156,7 +156,7 @@ int main(int argc, char *argv[]) {
|
||||
api->ConfigStreamRequest(request);
|
||||
|
||||
api->SetOptionValue(Option::IR_CONTROL, 80);
|
||||
|
||||
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
|
||||
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);
|
||||
api->EnableStreamData(Stream::POINTS);
|
||||
api->EnableStreamData(Stream::DEPTH);
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
#include "mynteye/api/dl.h"
|
||||
#include "mynteye/api/plugin.h"
|
||||
#include "mynteye/api/synthetic.h"
|
||||
#include "mynteye/api/version_checker.h"
|
||||
#include "mynteye/device/device.h"
|
||||
#include "mynteye/device/utils.h"
|
||||
|
||||
@@ -222,7 +223,10 @@ API::~API() {
|
||||
std::shared_ptr<API> API::Create(int argc, char *argv[]) {
|
||||
auto &&device = device::select();
|
||||
if (!device) return nullptr;
|
||||
return Create(argc, argv, device);
|
||||
auto api = Create(argc, argv, device);
|
||||
if (api && checkFirmwareVersion(api))
|
||||
return api;
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
std::shared_ptr<API> API::Create(
|
||||
@@ -261,7 +265,7 @@ std::shared_ptr<API> API::Create(const std::shared_ptr<Device> &device) {
|
||||
}
|
||||
} else {
|
||||
LOG(ERROR) <<"no device!";
|
||||
api = std::make_shared<API>(device, CalibrationModel::UNKNOW);
|
||||
return nullptr;
|
||||
}
|
||||
return api;
|
||||
}
|
||||
@@ -327,6 +331,20 @@ std::string API::GetInfo(const Info &info) const {
|
||||
return device_->GetInfo(info);
|
||||
}
|
||||
|
||||
std::string API::GetSDKVersion() const {
|
||||
std::string info_path =
|
||||
utils::get_sdk_install_dir();
|
||||
info_path.append(MYNTEYE_OS_SEP "share" \
|
||||
MYNTEYE_OS_SEP "mynteye" MYNTEYE_OS_SEP "build.info");
|
||||
|
||||
cv::FileStorage fs(info_path, cv::FileStorage::READ);
|
||||
if (!fs.isOpened()) {
|
||||
LOG(WARNING) << "build.info not found: " << info_path;
|
||||
return "null";
|
||||
}
|
||||
return fs["MYNTEYE_VERSION"];
|
||||
}
|
||||
|
||||
IntrinsicsPinhole API::GetIntrinsics(const Stream &stream) const {
|
||||
auto in = GetIntrinsicsBase(stream);
|
||||
if (in->calib_model() == CalibrationModel::PINHOLE) {
|
||||
@@ -500,9 +518,11 @@ std::vector<api::MotionData> API::GetMotionDatas() {
|
||||
}
|
||||
}
|
||||
|
||||
void API::EnableTimestampCorrespondence(const Stream &stream) {
|
||||
void API::EnableTimestampCorrespondence(const Stream &stream,
|
||||
bool keep_accel_then_gyro) {
|
||||
if (correspondence_ == nullptr) {
|
||||
correspondence_.reset(new Correspondence(device_, stream));
|
||||
correspondence_->KeepAccelThenGyro(keep_accel_then_gyro);
|
||||
{
|
||||
device_->DisableMotionDatas();
|
||||
if (callback_) {
|
||||
@@ -540,6 +560,10 @@ void API::EnablePlugin(const std::string &path) {
|
||||
synthetic_->SetPlugin(plugin);
|
||||
}
|
||||
|
||||
void API::setDuplicate(bool isEnable) {
|
||||
synthetic_->setDuplicate(isEnable);
|
||||
}
|
||||
|
||||
void API::SetDisparityComputingMethodType(
|
||||
const DisparityComputingMethod &MethodType) {
|
||||
synthetic_->SetDisparityComputingMethodType(MethodType);
|
||||
@@ -568,4 +592,12 @@ void API::CheckImageParams() {
|
||||
}
|
||||
}
|
||||
|
||||
void API::EnableProcessMode(const ProcessMode& mode) {
|
||||
EnableProcessMode(static_cast<std::int32_t>(mode));
|
||||
}
|
||||
|
||||
void API::EnableProcessMode(const std::int32_t& mode) {
|
||||
device_->EnableProcessMode(mode);
|
||||
}
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
@@ -16,11 +16,15 @@
|
||||
#include "mynteye/device/device.h"
|
||||
#include "mynteye/logger.h"
|
||||
|
||||
#define MYNTEYE_IMU_SEQ_FIRST 1 // accel
|
||||
#define MYNTEYE_IMU_SEQ_SECOND 2 // gyro
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
Correspondence::Correspondence(const std::shared_ptr<Device> &device,
|
||||
const Stream &stream)
|
||||
: device_(device), stream_(stream), ready_image_timestamp_(0) {
|
||||
: device_(device), stream_(stream), ready_image_timestamp_(0),
|
||||
keep_accel_then_gyro_(false) {
|
||||
VLOG(2) << __func__;
|
||||
// set matched stream to be watched too,
|
||||
// aim to make stream and matched stream correspondence
|
||||
@@ -54,6 +58,10 @@ bool Correspondence::Watch(const Stream &stream) const {
|
||||
return false;
|
||||
}
|
||||
|
||||
void Correspondence::KeepAccelThenGyro(bool enabled) {
|
||||
keep_accel_then_gyro_ = enabled;
|
||||
}
|
||||
|
||||
void Correspondence::OnStreamDataCallback(
|
||||
const Stream &stream, const api::StreamData &data) {
|
||||
if (!Watch(stream)) {
|
||||
@@ -143,7 +151,27 @@ std::vector<api::StreamData> Correspondence::GetStreamDatas(
|
||||
}
|
||||
|
||||
std::vector<api::MotionData> Correspondence::GetMotionDatas() {
|
||||
return GetReadyMotionDatas();
|
||||
auto &&datas = GetReadyMotionDatas();
|
||||
/*
|
||||
for (auto data : datas) {
|
||||
auto imu_flag = data.imu->flag;
|
||||
auto imu_stamp = data.imu->timestamp;
|
||||
std::stringstream ss;
|
||||
if (imu_flag == 0) { // accel + gyro
|
||||
ss << "Imu";
|
||||
} else if (imu_flag == 1) { // accel
|
||||
ss << "Accel";
|
||||
} else if (imu_flag == 2) { // gyro
|
||||
ss << "Gyro";
|
||||
}
|
||||
ss << " timestamp: " << imu_stamp;
|
||||
LOG(INFO) << ss.str();
|
||||
}
|
||||
*/
|
||||
if (keep_accel_then_gyro_ && device_->GetModel() != Model::STANDARD) {
|
||||
KeepAccelThenGyro(datas); // only s2 need do this
|
||||
}
|
||||
return datas;
|
||||
}
|
||||
|
||||
void Correspondence::EnableStreamMatch() {
|
||||
@@ -171,9 +199,13 @@ void Correspondence::NotifyStreamDataReady() {
|
||||
|
||||
bool Correspondence::IsStreamDataReady() {
|
||||
if (stream_datas_.empty()) return false;
|
||||
if (stream_match_enabled_) {
|
||||
if (stream_datas_match_.empty()) return false;
|
||||
}
|
||||
if (motion_datas_.empty()) return false;
|
||||
|
||||
std::uint64_t img_stamp = 0;
|
||||
std::uint64_t img_macth_stamp = 0;
|
||||
{
|
||||
std::lock_guard<std::recursive_mutex> _(mtx_stream_datas_);
|
||||
auto data = stream_datas_.front();
|
||||
@@ -181,6 +213,10 @@ bool Correspondence::IsStreamDataReady() {
|
||||
LOG(FATAL) << "stream data image info is empty!";
|
||||
}
|
||||
img_stamp = data.img->timestamp;
|
||||
|
||||
if (stream_match_enabled_) {
|
||||
img_macth_stamp = stream_datas_match_.front().img->timestamp;
|
||||
}
|
||||
}
|
||||
std::uint64_t imu_stamp = 0;
|
||||
{
|
||||
@@ -192,7 +228,12 @@ bool Correspondence::IsStreamDataReady() {
|
||||
imu_stamp = data.imu->timestamp;
|
||||
}
|
||||
|
||||
if (stream_match_enabled_) {
|
||||
return img_stamp + stream_interval_us_half_ < imu_stamp
|
||||
&& img_macth_stamp + stream_interval_us_half_ < imu_stamp;
|
||||
} else {
|
||||
return img_stamp + stream_interval_us_half_ < imu_stamp;
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<api::StreamData> Correspondence::GetReadyStreamData(bool matched) {
|
||||
@@ -261,4 +302,66 @@ std::vector<api::MotionData> Correspondence::GetReadyMotionDatas() {
|
||||
return result;
|
||||
}
|
||||
|
||||
void Correspondence::KeepAccelThenGyro(std::vector<api::MotionData> &datas) {
|
||||
if (datas.size() == 0) return;
|
||||
|
||||
static std::shared_ptr<ImuData> last_imu = nullptr;
|
||||
|
||||
// process last imu
|
||||
if (datas[0].imu->flag == MYNTEYE_IMU_SEQ_SECOND) {
|
||||
if (last_imu && last_imu->flag == MYNTEYE_IMU_SEQ_FIRST) {
|
||||
datas.insert(datas.begin(), {last_imu});
|
||||
}
|
||||
}
|
||||
last_imu = nullptr;
|
||||
|
||||
// if only one
|
||||
if (datas.size() == 1) {
|
||||
last_imu = datas[0].imu;
|
||||
datas.clear();
|
||||
return;
|
||||
}
|
||||
|
||||
std::uint8_t prev_flag = 0;
|
||||
for (auto it = datas.begin(); it != datas.end(); ) {
|
||||
auto flag = it->imu->flag;
|
||||
if (flag == 0) {
|
||||
++it; // unexpected, keep it
|
||||
continue;
|
||||
}
|
||||
|
||||
bool is_first = (it == datas.begin());
|
||||
bool is_last = (it == datas.end() - 1);
|
||||
bool ok = false;
|
||||
if (is_first) {
|
||||
ok = (flag == MYNTEYE_IMU_SEQ_FIRST);
|
||||
} else {
|
||||
if (flag == MYNTEYE_IMU_SEQ_FIRST) {
|
||||
ok = (prev_flag == MYNTEYE_IMU_SEQ_SECOND);
|
||||
} else if (flag == MYNTEYE_IMU_SEQ_SECOND) {
|
||||
ok = (prev_flag == MYNTEYE_IMU_SEQ_FIRST);
|
||||
}
|
||||
}
|
||||
|
||||
if (ok) {
|
||||
prev_flag = flag;
|
||||
++it;
|
||||
} else {
|
||||
if (is_last) {
|
||||
// if tail not ok, retain last imu
|
||||
last_imu = it->imu;
|
||||
}
|
||||
it = datas.erase(it);
|
||||
}
|
||||
}
|
||||
|
||||
// if tail is not second
|
||||
if (datas.size() > 0) {
|
||||
auto it = datas.end() - 1;
|
||||
if (it->imu->flag != MYNTEYE_IMU_SEQ_SECOND) {
|
||||
datas.erase(it);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
#define MYNTEYE_API_CONFIG_H_
|
||||
#pragma once
|
||||
|
||||
#include <atomic>
|
||||
#include <condition_variable>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
@@ -31,6 +32,7 @@ class Correspondence {
|
||||
~Correspondence();
|
||||
|
||||
bool Watch(const Stream &stream) const;
|
||||
void KeepAccelThenGyro(bool enabled);
|
||||
|
||||
void OnStreamDataCallback(const Stream &stream, const api::StreamData &data);
|
||||
void OnMotionDataCallback(const device::MotionData &data);
|
||||
@@ -54,10 +56,12 @@ class Correspondence {
|
||||
std::vector<api::StreamData> GetReadyStreamData(bool matched);
|
||||
std::vector<api::MotionData> GetReadyMotionDatas();
|
||||
|
||||
void KeepAccelThenGyro(std::vector<api::MotionData> &datas); // NOLINT
|
||||
|
||||
std::shared_ptr<Device> device_;
|
||||
Stream stream_;
|
||||
Stream stream_match_;
|
||||
bool stream_match_enabled_;
|
||||
std::atomic_bool stream_match_enabled_;
|
||||
|
||||
float stream_interval_us_;
|
||||
float stream_interval_us_half_;
|
||||
@@ -72,6 +76,8 @@ class Correspondence {
|
||||
std::condition_variable_any cond_stream_datas_;
|
||||
|
||||
std::uint64_t ready_image_timestamp_;
|
||||
|
||||
bool keep_accel_then_gyro_;
|
||||
};
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
78
src/mynteye/api/data_tools.cc
Normal file
78
src/mynteye/api/data_tools.cc
Normal file
@@ -0,0 +1,78 @@
|
||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include <algorithm>
|
||||
#include <functional>
|
||||
#include <stdexcept>
|
||||
#include "mynteye/api/data_tools.h"
|
||||
#include "mynteye/logger.h"
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
|
||||
if (frame->format() == Format::YUYV) {
|
||||
cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data());
|
||||
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
|
||||
return img;
|
||||
} else if (frame->format() == Format::BGR888) {
|
||||
cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data());
|
||||
return img;
|
||||
} else { // Format::GRAY
|
||||
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
|
||||
}
|
||||
}
|
||||
|
||||
api::StreamData data2api(const device::StreamData &data) {
|
||||
return {data.img, frame2mat(data.frame), data.frame, data.frame_id};
|
||||
}
|
||||
|
||||
// ObjMat/ObjMat2 > api::StreamData
|
||||
|
||||
api::StreamData obj_data_first(const ObjMat2 *obj) {
|
||||
return {obj->first_data, obj->first, nullptr, obj->first_id};
|
||||
}
|
||||
|
||||
api::StreamData obj_data_second(const ObjMat2 *obj) {
|
||||
return {obj->second_data, obj->second, nullptr, obj->second_id};
|
||||
}
|
||||
|
||||
api::StreamData obj_data(const ObjMat *obj) {
|
||||
return {obj->data, obj->value, nullptr, obj->id};
|
||||
}
|
||||
|
||||
api::StreamData obj_data_first(const std::shared_ptr<ObjMat2> &obj) {
|
||||
return {obj->first_data, obj->first, nullptr, obj->first_id};
|
||||
}
|
||||
|
||||
api::StreamData obj_data_second(const std::shared_ptr<ObjMat2> &obj) {
|
||||
return {obj->second_data, obj->second, nullptr, obj->second_id};
|
||||
}
|
||||
|
||||
api::StreamData obj_data(const std::shared_ptr<ObjMat> &obj) {
|
||||
return {obj->data, obj->value, nullptr, obj->id};
|
||||
}
|
||||
|
||||
// api::StreamData > ObjMat/ObjMat2
|
||||
|
||||
ObjMat data_obj(const api::StreamData &data) {
|
||||
return ObjMat{data.frame, data.frame_id, data.img};
|
||||
}
|
||||
|
||||
ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second) {
|
||||
return ObjMat2{
|
||||
first.frame, first.frame_id, first.img,
|
||||
second.frame, second.frame_id, second.img};
|
||||
}
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
33
src/mynteye/api/data_tools.h
Normal file
33
src/mynteye/api/data_tools.h
Normal file
@@ -0,0 +1,33 @@
|
||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
#ifndef MYNTEYE_API_DATA_TOOLS_H_
|
||||
#define MYNTEYE_API_DATA_TOOLS_H_
|
||||
#pragma once
|
||||
#include <opencv2/imgproc/imgproc.hpp>
|
||||
#include "mynteye/api/object.h"
|
||||
#include "mynteye/api/api.h"
|
||||
#include "mynteye/device/device.h"
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame);
|
||||
api::StreamData data2api(const device::StreamData &data);
|
||||
api::StreamData obj_data_first(const ObjMat2 *obj);
|
||||
api::StreamData obj_data_second(const ObjMat2 *obj);
|
||||
api::StreamData obj_data(const ObjMat *obj);
|
||||
api::StreamData obj_data_first(const std::shared_ptr<ObjMat2> &obj);
|
||||
api::StreamData obj_data_second(const std::shared_ptr<ObjMat2> &obj);
|
||||
api::StreamData obj_data(const std::shared_ptr<ObjMat> &obj);
|
||||
ObjMat data_obj(const api::StreamData &data);
|
||||
ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second);
|
||||
MYNTEYE_END_NAMESPACE
|
||||
#endif // MYNTEYE_API_DATA_TOOLS_H_
|
||||
@@ -19,11 +19,15 @@
|
||||
#include "mynteye/logger.h"
|
||||
#include "mynteye/util/strings.h"
|
||||
#include "mynteye/util/times.h"
|
||||
#include "mynteye/api/data_tools.h"
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
Processor::Processor(std::int32_t proc_period)
|
||||
: proc_period_(std::move(proc_period)),
|
||||
: last_frame_id_cd(0),
|
||||
last_frame_id_cd_vice(0),
|
||||
is_enable_cd(false),
|
||||
proc_period_(std::move(proc_period)),
|
||||
activated_(false),
|
||||
input_ready_(false),
|
||||
idle_(true),
|
||||
@@ -41,9 +45,9 @@ Processor::Processor(std::int32_t proc_period)
|
||||
Processor::~Processor() {
|
||||
VLOG(2) << __func__;
|
||||
Deactivate();
|
||||
input_.reset(nullptr);
|
||||
output_.reset(nullptr);
|
||||
output_result_.reset(nullptr);
|
||||
input_ = nullptr;
|
||||
output_ = nullptr;
|
||||
output_result_ = nullptr;
|
||||
childs_.clear();
|
||||
}
|
||||
|
||||
@@ -121,7 +125,7 @@ bool Processor::IsIdle() {
|
||||
return idle_;
|
||||
}
|
||||
|
||||
bool Processor::Process(const Object &in) {
|
||||
bool Processor::Process(std::shared_ptr<Object> in) {
|
||||
if (!activated_)
|
||||
return false;
|
||||
if (!idle_) {
|
||||
@@ -131,13 +135,17 @@ bool Processor::Process(const Object &in) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!in.DecValidity()) {
|
||||
if (in && !in->DecValidity()) {
|
||||
LOG(WARNING) << Name() << " process with invalid input";
|
||||
return false;
|
||||
}
|
||||
{
|
||||
std::lock_guard<std::mutex> lk(mtx_input_ready_);
|
||||
input_.reset(in.Clone());
|
||||
if (ProcessInputConnection() == WITH_CLONE) {
|
||||
input_.reset(in->Clone());
|
||||
} else {
|
||||
input_ = in;
|
||||
}
|
||||
input_ready_ = true;
|
||||
}
|
||||
cond_input_ready_.notify_all();
|
||||
@@ -228,12 +236,16 @@ void Processor::Run() {
|
||||
}
|
||||
{
|
||||
std::unique_lock<std::mutex> lk(mtx_result_);
|
||||
if (ProcessOutputConnection() == WITH_CLONE) {
|
||||
output_result_.reset(output_->Clone());
|
||||
} else {
|
||||
output_result_ = output_;
|
||||
}
|
||||
}
|
||||
|
||||
if (!childs_.empty()) {
|
||||
for (auto child : childs_) {
|
||||
child->Process(*output_);
|
||||
child->Process(output_);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -245,6 +257,106 @@ void Processor::Run() {
|
||||
VLOG(2) << Name() << " thread end";
|
||||
}
|
||||
|
||||
Processor::process_type Processor::ProcessOutputConnection() {
|
||||
return WITH_CLONE;
|
||||
}
|
||||
|
||||
Processor::process_type Processor::ProcessInputConnection() {
|
||||
return WITH_CLONE;
|
||||
}
|
||||
|
||||
api::StreamData Processor::GetStreamData(const Stream &stream) {
|
||||
auto sum = getStreamsSum();
|
||||
auto &&out = GetOutput();
|
||||
Synthetic::Mode enable_mode = Synthetic::MODE_OFF;
|
||||
auto streams = getTargetStreams();
|
||||
for (auto it_s : streams) {
|
||||
if (it_s.stream == stream) {
|
||||
enable_mode = it_s.enabled_mode_;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (enable_mode == Synthetic::MODE_ON) {
|
||||
if (sum == 1) {
|
||||
if (out != nullptr) {
|
||||
auto output = Object::Cast<ObjMat>(out);
|
||||
if (output != nullptr) {
|
||||
if (!is_enable_cd) {
|
||||
if (output->data &&
|
||||
last_frame_id_cd == output->data->frame_id) {
|
||||
// cut the duplicate frame.
|
||||
return {};
|
||||
}
|
||||
last_frame_id_cd = output->data->frame_id;
|
||||
}
|
||||
return obj_data(output);
|
||||
}
|
||||
VLOG(2) << "Frame not ready now";
|
||||
}
|
||||
} else if (sum == 2) {
|
||||
static std::shared_ptr<ObjMat2> output = nullptr;
|
||||
if (out != nullptr) {
|
||||
output = Object::Cast<ObjMat2>(out);
|
||||
}
|
||||
auto streams = getTargetStreams();
|
||||
if (output != nullptr) {
|
||||
int num = 0;
|
||||
for (auto it : streams) {
|
||||
if (it.stream == stream) {
|
||||
if (num == 1) {
|
||||
if (!is_enable_cd) {
|
||||
if (output->first_data &&
|
||||
last_frame_id_cd == output->first_data->frame_id) {
|
||||
// cut the duplicate frame.
|
||||
return {};
|
||||
}
|
||||
last_frame_id_cd = output->first_data->frame_id;
|
||||
}
|
||||
return obj_data_first(output);
|
||||
} else {
|
||||
// last_frame_id_cd = output->second_data->frame_id;
|
||||
if (!is_enable_cd) {
|
||||
if (output->second_data &&
|
||||
last_frame_id_cd_vice == output->second_data->frame_id) {
|
||||
return {};
|
||||
}
|
||||
last_frame_id_cd_vice = output->second_data->frame_id;
|
||||
}
|
||||
return obj_data_second(output);
|
||||
}
|
||||
}
|
||||
num++;
|
||||
}
|
||||
}
|
||||
VLOG(2) << "Frame not ready now";
|
||||
} else {
|
||||
LOG(ERROR) << "error: invalid sum!";
|
||||
}
|
||||
return {}; // frame.empty() == true
|
||||
}
|
||||
LOG(ERROR) << "Failed to get stream data of " << stream
|
||||
<< ", unsupported or disabled";
|
||||
return {}; // frame.empty() == true
|
||||
}
|
||||
|
||||
std::vector<api::StreamData> Processor::GetStreamDatas(const Stream &stream) {
|
||||
Synthetic::Mode enable_mode = Synthetic::MODE_OFF;
|
||||
auto streams = getTargetStreams();
|
||||
for (auto it_s : streams) {
|
||||
if (it_s.stream == stream) {
|
||||
enable_mode = it_s.enabled_mode_;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (enable_mode == Synthetic::MODE_ON) {
|
||||
return {GetStreamData(stream)};
|
||||
} else {
|
||||
LOG(ERROR) << "Failed to get stream data of " << stream
|
||||
<< ", unsupported or disabled";
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
void Processor::SetIdle(bool idle) {
|
||||
std::lock_guard<std::mutex> lk(mtx_state_);
|
||||
idle_ = idle;
|
||||
|
||||
@@ -64,7 +64,11 @@ class Processor :
|
||||
bool IsIdle();
|
||||
|
||||
/** Returns dropped or not. */
|
||||
bool Process(const Object &in);
|
||||
bool Process(std::shared_ptr<Object> in);
|
||||
|
||||
virtual api::StreamData GetStreamData(const Stream &stream);
|
||||
|
||||
virtual std::vector<api::StreamData> GetStreamDatas(const Stream &stream);
|
||||
|
||||
/**
|
||||
* Returns the last output.
|
||||
@@ -73,13 +77,25 @@ class Processor :
|
||||
std::shared_ptr<Object> GetOutput();
|
||||
|
||||
std::uint64_t GetDroppedCount();
|
||||
inline void setDupEnable(bool isEnable) {
|
||||
is_enable_cd = isEnable;
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual Object *OnCreateOutput() = 0;
|
||||
virtual bool OnProcess(
|
||||
Object *const in, Object *const out,
|
||||
std::shared_ptr<Processor> const parent) = 0;
|
||||
enum process_type{
|
||||
WITH_CLONE,
|
||||
WITHOUT_CLONE
|
||||
};
|
||||
|
||||
virtual process_type ProcessOutputConnection();
|
||||
virtual process_type ProcessInputConnection();
|
||||
std::uint16_t last_frame_id_cd;
|
||||
std::uint16_t last_frame_id_cd_vice;
|
||||
bool is_enable_cd;
|
||||
private:
|
||||
/** Run in standalone thread. */
|
||||
void Run();
|
||||
@@ -98,10 +114,10 @@ class Processor :
|
||||
std::uint64_t dropped_count_;
|
||||
std::mutex mtx_state_;
|
||||
|
||||
std::unique_ptr<Object> input_;
|
||||
std::unique_ptr<Object> output_;
|
||||
std::shared_ptr<Object> input_;
|
||||
std::shared_ptr<Object> output_;
|
||||
|
||||
std::unique_ptr<Object> output_result_;
|
||||
std::shared_ptr<Object> output_result_;
|
||||
std::mutex mtx_result_;
|
||||
|
||||
PreProcessCallback pre_callback_;
|
||||
|
||||
@@ -34,6 +34,9 @@ class DepthProcessor : public Processor {
|
||||
std::string Name() override;
|
||||
|
||||
protected:
|
||||
// inline Processor::process_type ProcessInputConnection() override {
|
||||
// return Processor::WITHOUT_CLONE;
|
||||
// }
|
||||
Object *OnCreateOutput() override;
|
||||
bool OnProcess(
|
||||
Object *const in, Object *const out,
|
||||
|
||||
@@ -31,6 +31,12 @@ class DisparityNormalizedProcessor : public Processor {
|
||||
std::string Name() override;
|
||||
|
||||
protected:
|
||||
// inline Processor::process_type ProcessOutputConnection() override {
|
||||
// return Processor::WITHOUT_CLONE;
|
||||
// }
|
||||
// inline Processor::process_type ProcessInputConnection() override {
|
||||
// return Processor::WITHOUT_CLONE;
|
||||
// }
|
||||
Object *OnCreateOutput() override;
|
||||
bool OnProcess(
|
||||
Object *const in, Object *const out,
|
||||
|
||||
@@ -80,7 +80,7 @@ DisparityProcessor::DisparityProcessor(DisparityComputingMethod type,
|
||||
bm_matcher->setBlockSize(15);
|
||||
bm_matcher->setMinDisparity(0);
|
||||
bm_matcher->setNumDisparities(64);
|
||||
bm_matcher->setUniquenessRatio(15);
|
||||
bm_matcher->setUniquenessRatio(60);
|
||||
bm_matcher->setTextureThreshold(10);
|
||||
bm_matcher->setSpeckleWindowSize(100);
|
||||
bm_matcher->setSpeckleRange(4);
|
||||
|
||||
@@ -41,6 +41,9 @@ class DisparityProcessor : public Processor {
|
||||
void NotifyComputingTypeChanged(const DisparityComputingMethod &MethodType);
|
||||
|
||||
protected:
|
||||
// inline Processor::process_type ProcessOutputConnection() override {
|
||||
// return Processor::WITHOUT_CLONE;
|
||||
// }
|
||||
Object *OnCreateOutput() override;
|
||||
bool OnProcess(
|
||||
Object *const in, Object *const out,
|
||||
|
||||
@@ -105,6 +105,9 @@ bool PointsProcessor::OnProcess(
|
||||
|
||||
// Missing points denoted by NaNs
|
||||
if (!DepthTraits<uint16_t>::valid(depth)) {
|
||||
dptr[u][0] = 0;
|
||||
dptr[u][1] = 0;
|
||||
dptr[u][2] = 0;
|
||||
continue;
|
||||
}
|
||||
dptr[u][0] = (u - center_x) * depth * constant_x ;
|
||||
|
||||
@@ -36,6 +36,9 @@ class PointsProcessor : public Processor {
|
||||
std::string Name() override;
|
||||
|
||||
protected:
|
||||
// inline Processor::process_type ProcessOutputConnection() override {
|
||||
// return Processor::WITHOUT_CLONE;
|
||||
// }
|
||||
Object *OnCreateOutput() override;
|
||||
bool OnProcess(
|
||||
Object *const in, Object *const out,
|
||||
|
||||
@@ -37,6 +37,12 @@ class PointsProcessorOCV : public Processor {
|
||||
bool OnProcess(
|
||||
Object *const in, Object *const out,
|
||||
std::shared_ptr<Processor> const parent) override;
|
||||
// inline Processor::process_type ProcessOutputConnection() override {
|
||||
// return Processor::WITHOUT_CLONE;
|
||||
// }
|
||||
// inline Processor::process_type ProcessInputConnection() override {
|
||||
// return Processor::WITHOUT_CLONE;
|
||||
// }
|
||||
|
||||
private:
|
||||
cv::Mat Q_;
|
||||
|
||||
@@ -335,6 +335,8 @@ void RectifyProcessor::InitParams(
|
||||
IntrinsicsEquidistant in_right,
|
||||
Extrinsics ex_right_to_left) {
|
||||
calib_model = CalibrationModel::KANNALA_BRANDT;
|
||||
in_left.ResizeIntrinsics();
|
||||
in_right.ResizeIntrinsics();
|
||||
camodocal::CameraPtr camera_odo_ptr_left =
|
||||
generateCameraFromIntrinsicsEquidistant(in_left);
|
||||
camodocal::CameraPtr camera_odo_ptr_right =
|
||||
|
||||
@@ -79,6 +79,12 @@ class RectifyProcessor : public Processor {
|
||||
bool OnProcess(
|
||||
Object *const in, Object *const out,
|
||||
std::shared_ptr<Processor> const parent) override;
|
||||
// inline Processor::process_type ProcessOutputConnection() override {
|
||||
// return Processor::WITHOUT_CLONE;
|
||||
// }
|
||||
// inline Processor::process_type ProcessInputConnection() override {
|
||||
// return Processor::WITHOUT_CLONE;
|
||||
// }
|
||||
|
||||
private:
|
||||
void InitParams(IntrinsicsEquidistant in_left,
|
||||
|
||||
@@ -80,6 +80,8 @@ void RectifyProcessorOCV::InitParams(
|
||||
IntrinsicsPinhole in_right,
|
||||
Extrinsics ex_right_to_left) {
|
||||
calib_model = CalibrationModel::PINHOLE;
|
||||
in_left.ResizeIntrinsics();
|
||||
in_right.ResizeIntrinsics();
|
||||
cv::Size size{in_left.width, in_left.height};
|
||||
|
||||
cv::Mat M1 =
|
||||
|
||||
@@ -49,6 +49,13 @@ class RectifyProcessorOCV : public Processor {
|
||||
cv::Mat map11, map12, map21, map22;
|
||||
|
||||
protected:
|
||||
// inline Processor::process_type ProcessOutputConnection() override {
|
||||
// return Processor::WITHOUT_CLONE;
|
||||
// }
|
||||
// inline Processor::process_type ProcessInputConnection() override {
|
||||
// return Processor::WITHOUT_CLONE;
|
||||
// }
|
||||
|
||||
Object *OnCreateOutput() override;
|
||||
bool OnProcess(
|
||||
Object *const in, Object *const out,
|
||||
|
||||
@@ -14,15 +14,21 @@
|
||||
#include "mynteye/api/processor/root_camera_processor.h"
|
||||
|
||||
#include <utility>
|
||||
#include <vector>
|
||||
|
||||
#include <opencv2/calib3d/calib3d.hpp>
|
||||
#include <opencv2/imgproc/imgproc.hpp>
|
||||
#include "mynteye/logger.h"
|
||||
#include "mynteye/api/synthetic.h"
|
||||
#include "mynteye/device/device.h"
|
||||
#include "mynteye/api/data_tools.h"
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
const char RootProcessor::NAME[] = "RootProcessor";
|
||||
|
||||
RootProcessor::RootProcessor(std::int32_t proc_period)
|
||||
: Processor(std::move(proc_period)) {}
|
||||
RootProcessor::RootProcessor(std::shared_ptr<Device> device,
|
||||
std::int32_t proc_period)
|
||||
: Processor(std::move(proc_period)),
|
||||
device_(device) {}
|
||||
RootProcessor::~RootProcessor() {
|
||||
VLOG(2) << __func__;
|
||||
}
|
||||
@@ -31,13 +37,118 @@ std::string RootProcessor::Name() {
|
||||
return NAME;
|
||||
}
|
||||
|
||||
Object *RootProcessor::OnCreateOutput() {
|
||||
s1s2Processor::s1s2Processor(std::shared_ptr<Device> device,
|
||||
std::int32_t proc_period)
|
||||
: RootProcessor(device, std::move(proc_period)) {}
|
||||
s1s2Processor::~s1s2Processor() {
|
||||
VLOG(2) << __func__;
|
||||
}
|
||||
|
||||
Object *s1s2Processor::OnCreateOutput() {
|
||||
return new ObjMat2();
|
||||
}
|
||||
bool RootProcessor::OnProcess(
|
||||
bool s1s2Processor::OnProcess(
|
||||
Object *const in, Object *const out,
|
||||
std::shared_ptr<Processor> const parent) {
|
||||
const ObjMat2 *input = Object::Cast<ObjMat2>(in);
|
||||
ObjMat2 *output = Object::Cast<ObjMat2>(out);
|
||||
output->second = input->second;
|
||||
output->first = input->first;
|
||||
output->first_id = input->first_id;
|
||||
output->first_data = input->first_data;
|
||||
output->second_id = input->second_id;
|
||||
output->second_data = input->second_data;
|
||||
MYNTEYE_UNUSED(parent)
|
||||
return true;
|
||||
}
|
||||
|
||||
void s1s2Processor::ProcessNativeStream(
|
||||
const Stream &stream, const api::StreamData &data) {
|
||||
std::unique_lock<std::mutex> lk(mtx_left_right_ready_);
|
||||
static api::StreamData left_data, right_data;
|
||||
if (stream == Stream::LEFT) {
|
||||
left_data = data;
|
||||
} else if (stream == Stream::RIGHT) {
|
||||
right_data = data;
|
||||
}
|
||||
if (left_data.img && right_data.img &&
|
||||
left_data.img->frame_id == right_data.img->frame_id) {
|
||||
Process(std::make_shared<ObjMat2>(data_obj(left_data, right_data)));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
void s1s2Processor::StartVideoStreaming() {
|
||||
Activate();
|
||||
auto streams = getTargetStreams();
|
||||
for (unsigned int j =0; j< streams.size(); j++) {
|
||||
auto stream = streams[j].stream;
|
||||
auto callback = streams[j].stream_callback;
|
||||
target_streams_[j].enabled_mode_ = Synthetic::MODE_ON;
|
||||
device_->SetStreamCallback(
|
||||
stream,
|
||||
[this, stream, callback](const device::StreamData &data) {
|
||||
auto &&stream_data = data2api(data);
|
||||
ProcessNativeStream(stream, stream_data);
|
||||
},
|
||||
true);
|
||||
}
|
||||
device_->Start(Source::VIDEO_STREAMING);
|
||||
}
|
||||
|
||||
void s1s2Processor::StopVideoStreaming() {
|
||||
Deactivate();
|
||||
auto streams = getTargetStreams();
|
||||
for (unsigned int j =0; j< streams.size(); j++) {
|
||||
auto stream = streams[j].stream;
|
||||
target_streams_[j].enabled_mode_ = Synthetic::MODE_OFF;
|
||||
device_->SetStreamCallback(stream, nullptr);
|
||||
}
|
||||
device_->Stop(Source::VIDEO_STREAMING);
|
||||
}
|
||||
api::StreamData s1s2Processor::GetStreamData(const Stream &stream) {
|
||||
Synthetic::Mode enable_mode = Synthetic::MODE_OFF;
|
||||
auto streams = getTargetStreams();
|
||||
for (auto it_s : streams) {
|
||||
if (it_s.stream == stream) {
|
||||
enable_mode = it_s.enabled_mode_;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (enable_mode == Synthetic::MODE_ON) {
|
||||
auto res = data2api(device_->GetStreamData(stream));
|
||||
if (res.img == nullptr ||
|
||||
res.img->timestamp == last_frame_id_cd ||
|
||||
res.frame.empty()) {
|
||||
return {};
|
||||
}
|
||||
last_frame_id_cd = res.img->timestamp;
|
||||
return res;
|
||||
// return data2api(device_->GetStreamData(stream));
|
||||
}
|
||||
LOG(ERROR) << "Failed to get device stream data of " << stream
|
||||
<< ", unsupported or disabled";
|
||||
LOG(ERROR) << "Make sure you have enable " << stream;
|
||||
return {};
|
||||
}
|
||||
|
||||
std::vector<api::StreamData> s1s2Processor::GetStreamDatas(
|
||||
const Stream &stream) {
|
||||
Synthetic::Mode enable_mode = Synthetic::MODE_OFF;
|
||||
auto streams = getTargetStreams();
|
||||
for (auto it_s : streams) {
|
||||
if (it_s.stream == stream) {
|
||||
enable_mode = it_s.enabled_mode_;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (enable_mode == Synthetic::MODE_ON) {
|
||||
std::vector<api::StreamData> datas;
|
||||
for (auto &&data : device_->GetStreamDatas(stream)) {
|
||||
datas.push_back(data2api(data));
|
||||
}
|
||||
return datas;
|
||||
}
|
||||
}
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
@@ -28,16 +28,48 @@ class RootProcessor : public Processor {
|
||||
public:
|
||||
static const char NAME[];
|
||||
|
||||
explicit RootProcessor(std::int32_t proc_period = 0);
|
||||
explicit RootProcessor(std::shared_ptr<Device> device,
|
||||
std::int32_t proc_period = 0);
|
||||
virtual ~RootProcessor();
|
||||
|
||||
std::string Name() override;
|
||||
virtual std::string Name();
|
||||
|
||||
virtual void StartVideoStreaming() = 0;
|
||||
virtual void StopVideoStreaming() = 0;
|
||||
virtual api::StreamData GetStreamData(const Stream &stream) = 0;
|
||||
virtual std::vector<api::StreamData> GetStreamDatas(const Stream &stream) = 0; // NOLINT
|
||||
protected:
|
||||
virtual Object *OnCreateOutput() = 0;
|
||||
virtual bool OnProcess(
|
||||
Object *const in, Object *const out,
|
||||
std::shared_ptr<Processor> const parent) = 0;
|
||||
std::shared_ptr<Device> device_;
|
||||
};
|
||||
|
||||
class s1s2Processor : public RootProcessor {
|
||||
public:
|
||||
explicit s1s2Processor(std::shared_ptr<Device> device,
|
||||
std::int32_t proc_period = 0);
|
||||
virtual ~s1s2Processor();
|
||||
void StartVideoStreaming();
|
||||
void StopVideoStreaming();
|
||||
api::StreamData GetStreamData(const Stream &stream) override;
|
||||
std::vector<api::StreamData> GetStreamDatas(const Stream &stream) override; // NOLINT
|
||||
protected:
|
||||
// inline Processor::process_type ProcessOutputConnection() override {
|
||||
// return Processor::WITHOUT_CLONE;
|
||||
// }
|
||||
// inline Processor::process_type ProcessInputConnection() override {
|
||||
// return Processor::WITHOUT_CLONE;
|
||||
// }
|
||||
Object *OnCreateOutput() override;
|
||||
bool OnProcess(
|
||||
Object *const in, Object *const out,
|
||||
std::shared_ptr<Processor> const parent) override;
|
||||
private:
|
||||
void ProcessNativeStream(
|
||||
const Stream &stream, const api::StreamData &data);
|
||||
std::mutex mtx_left_right_ready_;
|
||||
};
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
@@ -36,6 +36,7 @@
|
||||
#include "mynteye/api/processor/rectify_processor.h"
|
||||
#endif
|
||||
#include "mynteye/device/device.h"
|
||||
#include "mynteye/api/data_tools.h"
|
||||
|
||||
#define RECTIFY_PROC_PERIOD 0
|
||||
#define DISPARITY_PROC_PERIOD 0
|
||||
@@ -46,74 +47,6 @@
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
namespace {
|
||||
|
||||
cv::Mat frame2mat(const std::shared_ptr<device::Frame> &frame) {
|
||||
if (frame->format() == Format::YUYV) {
|
||||
cv::Mat img(frame->height(), frame->width(), CV_8UC2, frame->data());
|
||||
cv::cvtColor(img, img, cv::COLOR_YUV2BGR_YUY2);
|
||||
return img;
|
||||
} else if (frame->format() == Format::BGR888) {
|
||||
cv::Mat img(frame->height(), frame->width(), CV_8UC3, frame->data());
|
||||
return img;
|
||||
} else { // Format::GRAY
|
||||
return cv::Mat(frame->height(), frame->width(), CV_8UC1, frame->data());
|
||||
}
|
||||
}
|
||||
|
||||
api::StreamData data2api(const device::StreamData &data) {
|
||||
return {data.img, frame2mat(data.frame), data.frame, data.frame_id};
|
||||
}
|
||||
|
||||
void process_childs(
|
||||
const std::shared_ptr<Processor> &proc, const std::string &name,
|
||||
const Object &obj) {
|
||||
auto &&processor = find_processor<Processor>(proc, name);
|
||||
for (auto child : processor->GetChilds()) {
|
||||
child->Process(obj);
|
||||
}
|
||||
}
|
||||
|
||||
// ObjMat/ObjMat2 > api::StreamData
|
||||
|
||||
api::StreamData obj_data_first(const ObjMat2 *obj) {
|
||||
return {obj->first_data, obj->first, nullptr, obj->first_id};
|
||||
}
|
||||
|
||||
api::StreamData obj_data_second(const ObjMat2 *obj) {
|
||||
return {obj->second_data, obj->second, nullptr, obj->second_id};
|
||||
}
|
||||
|
||||
api::StreamData obj_data(const ObjMat *obj) {
|
||||
return {obj->data, obj->value, nullptr, obj->id};
|
||||
}
|
||||
|
||||
api::StreamData obj_data_first(const std::shared_ptr<ObjMat2> &obj) {
|
||||
return {obj->first_data, obj->first, nullptr, obj->first_id};
|
||||
}
|
||||
|
||||
api::StreamData obj_data_second(const std::shared_ptr<ObjMat2> &obj) {
|
||||
return {obj->second_data, obj->second, nullptr, obj->second_id};
|
||||
}
|
||||
|
||||
api::StreamData obj_data(const std::shared_ptr<ObjMat> &obj) {
|
||||
return {obj->data, obj->value, nullptr, obj->id};
|
||||
}
|
||||
|
||||
// api::StreamData > ObjMat/ObjMat2
|
||||
|
||||
ObjMat data_obj(const api::StreamData &data) {
|
||||
return ObjMat{data.frame, data.frame_id, data.img};
|
||||
}
|
||||
|
||||
ObjMat2 data_obj(const api::StreamData &first, const api::StreamData &second) {
|
||||
return ObjMat2{
|
||||
first.frame, first.frame_id, first.img,
|
||||
second.frame, second.frame_id, second.img};
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
void Synthetic::InitCalibInfo() {
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
LOG(INFO) << "camera calib model: pinhole";
|
||||
@@ -149,11 +82,11 @@ Synthetic::Synthetic(API *api, CalibrationModel calib_model)
|
||||
CHECK_NOTNULL(api_);
|
||||
InitCalibInfo();
|
||||
InitProcessors();
|
||||
InitStreamSupports();
|
||||
}
|
||||
|
||||
Synthetic::~Synthetic() {
|
||||
VLOG(2) << __func__;
|
||||
processors_.clear();
|
||||
if (processor_) {
|
||||
processor_->Deactivate(true);
|
||||
processor_ = nullptr;
|
||||
@@ -171,19 +104,18 @@ void Synthetic::NotifyImageParamsChanged() {
|
||||
extr_ = std::make_shared<Extrinsics>(
|
||||
api_->GetExtrinsics(Stream::LEFT, Stream::RIGHT));
|
||||
}
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
auto &&processor = find_processor<RectifyProcessorOCV>(processor_);
|
||||
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_);
|
||||
auto processor = getProcessorWithStream(Stream::LEFT_RECTIFIED);
|
||||
|
||||
if (processor && calib_model_ == CalibrationModel::PINHOLE) {
|
||||
auto proc = static_cast<RectifyProcessorOCV*>(&(*processor));
|
||||
proc->ReloadImageParams(intr_left_, intr_right_, extr_);
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
auto &&processor = find_processor<RectifyProcessor>(processor_);
|
||||
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_);
|
||||
} else if (processor && calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
auto proc = static_cast<RectifyProcessor*>(&(*processor));
|
||||
proc->ReloadImageParams(intr_left_, intr_right_, extr_);
|
||||
#endif
|
||||
} else {
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_ << ", use default pinhole model";
|
||||
auto &&processor = find_processor<RectifyProcessorOCV>(processor_);
|
||||
if (processor) processor->ReloadImageParams(intr_left_, intr_right_, extr_);
|
||||
LOG(ERROR) << "Unknow calib model type in device" << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -242,12 +174,10 @@ bool Synthetic::Supports(const Stream &stream) const {
|
||||
return checkControlDateWithStream(stream);
|
||||
}
|
||||
|
||||
Synthetic::mode_t Synthetic::SupportsMode(const Stream &stream) const {
|
||||
if (checkControlDateWithStream(stream)) {
|
||||
auto data = getControlDateWithStream(stream);
|
||||
return data.support_mode_;
|
||||
void Synthetic::setDuplicate(bool isEnable) {
|
||||
for (auto it : processors_) {
|
||||
it->setDupEnable(isEnable);
|
||||
}
|
||||
return MODE_LAST;
|
||||
}
|
||||
|
||||
void Synthetic::EnableStreamData(
|
||||
@@ -257,14 +187,17 @@ void Synthetic::EnableStreamData(
|
||||
auto processor = getProcessorWithStream(stream);
|
||||
iterate_processor_CtoP_before(processor,
|
||||
[callback, try_tag](std::shared_ptr<Processor> proce){
|
||||
if (proce->Name() == "RootProcessor") {
|
||||
return;
|
||||
}
|
||||
auto streams = proce->getTargetStreams();
|
||||
int act_tag = 0;
|
||||
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
|
||||
if (proce->target_streams_[i].enabled_mode_ == MODE_LAST) {
|
||||
if (proce->target_streams_[i].enabled_mode_ == MODE_OFF) {
|
||||
callback(proce->target_streams_[i].stream);
|
||||
if (!try_tag) {
|
||||
act_tag++;
|
||||
proce->target_streams_[i].enabled_mode_ = MODE_SYNTHETIC;
|
||||
proce->target_streams_[i].enabled_mode_ = MODE_ON;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -280,14 +213,17 @@ void Synthetic::DisableStreamData(
|
||||
auto processor = getProcessorWithStream(stream);
|
||||
iterate_processor_PtoC_before(processor,
|
||||
[callback, try_tag](std::shared_ptr<Processor> proce){
|
||||
if (proce->Name() == "RootProcessor") {
|
||||
return;
|
||||
}
|
||||
auto streams = proce->getTargetStreams();
|
||||
int act_tag = 0;
|
||||
for (unsigned int i = 0; i < proce->getStreamsSum() ; i++) {
|
||||
if (proce->target_streams_[i].enabled_mode_ == MODE_SYNTHETIC) {
|
||||
if (proce->target_streams_[i].enabled_mode_ == MODE_ON) {
|
||||
callback(proce->target_streams_[i].stream);
|
||||
if (!try_tag) {
|
||||
act_tag++;
|
||||
proce->target_streams_[i].enabled_mode_ = MODE_LAST;
|
||||
proce->target_streams_[i].enabled_mode_ = MODE_OFF;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -315,8 +251,7 @@ void Synthetic::DisableStreamData(const Stream &stream) {
|
||||
bool Synthetic::IsStreamDataEnabled(const Stream &stream) const {
|
||||
if (checkControlDateWithStream(stream)) {
|
||||
auto data = getControlDateWithStream(stream);
|
||||
return data.enabled_mode_ == MODE_SYNTHETIC ||
|
||||
data.enabled_mode_ == MODE_NATIVE;
|
||||
return data.enabled_mode_ == MODE_ON;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
@@ -344,42 +279,11 @@ bool Synthetic::HasStreamCallback(const Stream &stream) const {
|
||||
}
|
||||
|
||||
void Synthetic::StartVideoStreaming() {
|
||||
auto &&device = api_->device();
|
||||
for (unsigned int i =0; i< processors_.size(); i++) {
|
||||
auto streams = processors_[i]->getTargetStreams();
|
||||
for (unsigned int j =0; j< streams.size(); j++) {
|
||||
if (processors_[i]->target_streams_[j].support_mode_ == MODE_NATIVE) {
|
||||
auto stream = processors_[i]->target_streams_[j].stream;
|
||||
device->SetStreamCallback(
|
||||
stream,
|
||||
[this, stream](const device::StreamData &data) {
|
||||
auto &&stream_data = data2api(data);
|
||||
ProcessNativeStream(stream, stream_data);
|
||||
// Need mutex if set callback after start
|
||||
if (HasStreamCallback(stream)) {
|
||||
auto data = getControlDateWithStream(stream);
|
||||
data.stream_callback(stream_data);
|
||||
}
|
||||
},
|
||||
true);
|
||||
}
|
||||
}
|
||||
}
|
||||
device->Start(Source::VIDEO_STREAMING);
|
||||
processor_->StartVideoStreaming();
|
||||
}
|
||||
|
||||
void Synthetic::StopVideoStreaming() {
|
||||
auto &&device = api_->device();
|
||||
for (unsigned int i =0; i< processors_.size(); i++) {
|
||||
auto streams = processors_[i]->getTargetStreams();
|
||||
for (unsigned int j =0; j< streams.size(); j++) {
|
||||
if (processors_[i]->target_streams_[j].support_mode_ == MODE_NATIVE) {
|
||||
auto stream = processors_[i]->target_streams_[j].stream;
|
||||
device->SetStreamCallback(stream, nullptr);
|
||||
}
|
||||
}
|
||||
}
|
||||
device->Stop(Source::VIDEO_STREAMING);
|
||||
processor_->StopVideoStreaming();
|
||||
}
|
||||
|
||||
void Synthetic::WaitForStreams() {
|
||||
@@ -387,69 +291,11 @@ void Synthetic::WaitForStreams() {
|
||||
}
|
||||
|
||||
api::StreamData Synthetic::GetStreamData(const Stream &stream) {
|
||||
auto &&mode = GetStreamEnabledMode(stream);
|
||||
if (mode == MODE_NATIVE) {
|
||||
auto &&device = api_->device();
|
||||
return data2api(device->GetStreamData(stream));
|
||||
} else if (mode == MODE_SYNTHETIC) {
|
||||
auto processor = getProcessorWithStream(stream);
|
||||
auto sum = processor->getStreamsSum();
|
||||
auto &&out = processor->GetOutput();
|
||||
static std::shared_ptr<ObjMat2> output = nullptr;
|
||||
if (sum == 1) {
|
||||
if (out != nullptr) {
|
||||
auto &&output = Object::Cast<ObjMat>(out);
|
||||
if (output != nullptr) {
|
||||
return obj_data(output);
|
||||
}
|
||||
VLOG(2) << "Rectify not ready now";
|
||||
}
|
||||
} else if (sum == 2) {
|
||||
if (out != nullptr) {
|
||||
output = Object::Cast<ObjMat2>(out);
|
||||
}
|
||||
auto streams = processor->getTargetStreams();
|
||||
if (output != nullptr) {
|
||||
int num = 0;
|
||||
for (auto it : streams) {
|
||||
if (it.stream == stream) {
|
||||
if (num == 1) {
|
||||
return obj_data_first(output);
|
||||
} else {
|
||||
return obj_data_second(output);
|
||||
}
|
||||
}
|
||||
num++;
|
||||
}
|
||||
}
|
||||
VLOG(2) << "Rectify not ready now";
|
||||
} else {
|
||||
LOG(ERROR) << "error: invalid sum!";
|
||||
}
|
||||
return {}; // frame.empty() == true
|
||||
} else {
|
||||
LOG(ERROR) << "Failed to get stream data of " << stream
|
||||
<< ", unsupported or disabled";
|
||||
return {}; // frame.empty() == true
|
||||
}
|
||||
return getProcessorWithStream(stream)->GetStreamData(stream);
|
||||
}
|
||||
|
||||
std::vector<api::StreamData> Synthetic::GetStreamDatas(const Stream &stream) {
|
||||
auto &&mode = GetStreamEnabledMode(stream);
|
||||
if (mode == MODE_NATIVE) {
|
||||
auto &&device = api_->device();
|
||||
std::vector<api::StreamData> datas;
|
||||
for (auto &&data : device->GetStreamDatas(stream)) {
|
||||
datas.push_back(data2api(data));
|
||||
}
|
||||
return datas;
|
||||
} else if (mode == MODE_SYNTHETIC) {
|
||||
return {GetStreamData(stream)};
|
||||
} else {
|
||||
LOG(ERROR) << "Failed to get stream data of " << stream
|
||||
<< ", unsupported or disabled";
|
||||
}
|
||||
return {};
|
||||
return getProcessorWithStream(stream)->GetStreamDatas(stream);
|
||||
}
|
||||
|
||||
void Synthetic::SetPlugin(std::shared_ptr<Plugin> plugin) {
|
||||
@@ -460,134 +306,87 @@ bool Synthetic::HasPlugin() const {
|
||||
return plugin_ != nullptr;
|
||||
}
|
||||
|
||||
void Synthetic::InitStreamSupports() {
|
||||
auto &&device = api_->device();
|
||||
if (device->Supports(Stream::LEFT) && device->Supports(Stream::RIGHT)) {
|
||||
auto processor = getProcessorWithStream(Stream::LEFT);
|
||||
for (unsigned int i = 0; i< processor->target_streams_.size(); i++) {
|
||||
if (processor->target_streams_[i].stream == Stream::LEFT) {
|
||||
processor->target_streams_[i].support_mode_ = MODE_NATIVE;
|
||||
}
|
||||
if (processor->target_streams_[i].stream == Stream::RIGHT) {
|
||||
processor->target_streams_[i].support_mode_ = MODE_NATIVE;
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<Stream> stream_chain{
|
||||
Stream::LEFT_RECTIFIED, Stream::RIGHT_RECTIFIED,
|
||||
Stream::DISPARITY, Stream::DISPARITY_NORMALIZED,
|
||||
Stream::POINTS, Stream::DEPTH};
|
||||
for (auto &&stream : stream_chain) {
|
||||
auto processor = getProcessorWithStream(stream);
|
||||
for (unsigned int i = 0; i< processor->target_streams_.size(); i++) {
|
||||
if (processor->target_streams_[i].stream == stream) {
|
||||
if (device->Supports(stream)) {
|
||||
processor->target_streams_[i].support_mode_ = MODE_NATIVE;
|
||||
processor->target_streams_[i].enabled_mode_ = MODE_NATIVE;
|
||||
} else {
|
||||
processor->target_streams_[i].support_mode_ = MODE_SYNTHETIC;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Synthetic::mode_t Synthetic::GetStreamEnabledMode(const Stream &stream) const {
|
||||
if (checkControlDateWithStream(stream)) {
|
||||
auto data = getControlDateWithStream(stream);
|
||||
return data.enabled_mode_;
|
||||
}
|
||||
return MODE_LAST;
|
||||
}
|
||||
|
||||
bool Synthetic::IsStreamEnabledNative(const Stream &stream) const {
|
||||
return GetStreamEnabledMode(stream) == MODE_NATIVE;
|
||||
}
|
||||
|
||||
bool Synthetic::IsStreamEnabledSynthetic(const Stream &stream) const {
|
||||
return GetStreamEnabledMode(stream) == MODE_SYNTHETIC;
|
||||
return MODE_OFF;
|
||||
}
|
||||
|
||||
void Synthetic::InitProcessors() {
|
||||
std::shared_ptr<Processor> rectify_processor = nullptr;
|
||||
#ifdef WITH_CAM_MODELS
|
||||
std::shared_ptr<RectifyProcessor> rectify_processor_imp = nullptr;
|
||||
#endif
|
||||
cv::Mat Q;
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
auto &&rectify_processor_ocv =
|
||||
std::make_shared<RectifyProcessorOCV>(intr_left_, intr_right_, extr_,
|
||||
RECTIFY_PROC_PERIOD);
|
||||
Q = rectify_processor_ocv->Q;
|
||||
rectify_processor = rectify_processor_ocv;
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
rectify_processor_imp =
|
||||
std::make_shared<RectifyProcessor>(intr_left_, intr_right_, extr_,
|
||||
RECTIFY_PROC_PERIOD);
|
||||
rectify_processor = rectify_processor_imp;
|
||||
#endif
|
||||
} else {
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_ << ", use default pinhole model";
|
||||
auto &&rectify_processor_ocv =
|
||||
std::make_shared<RectifyProcessorOCV>(intr_left_, intr_right_, extr_,
|
||||
RECTIFY_PROC_PERIOD);
|
||||
rectify_processor = rectify_processor_ocv;
|
||||
}
|
||||
std::shared_ptr<Processor> points_processor = nullptr;
|
||||
std::shared_ptr<Processor> depth_processor = nullptr;
|
||||
|
||||
auto &&disparity_processor =
|
||||
std::make_shared<DisparityProcessor>(DisparityComputingMethod::SGBM,
|
||||
DISPARITY_PROC_PERIOD);
|
||||
auto &&disparitynormalized_processor =
|
||||
std::make_shared<DisparityNormalizedProcessor>(
|
||||
DISPARITY_NORM_PROC_PERIOD);
|
||||
std::shared_ptr<Processor> points_processor = nullptr;
|
||||
|
||||
auto root_processor =
|
||||
std::make_shared<s1s2Processor>(api_->device(), ROOT_PROC_PERIOD);
|
||||
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
// PINHOLE
|
||||
auto &&rectify_processor_ocv =
|
||||
std::make_shared<RectifyProcessorOCV>(intr_left_, intr_right_, extr_,
|
||||
RECTIFY_PROC_PERIOD);
|
||||
rectify_processor = rectify_processor_ocv;
|
||||
points_processor = std::make_shared<PointsProcessorOCV>(
|
||||
Q, POINTS_PROC_PERIOD);
|
||||
rectify_processor_ocv->Q, POINTS_PROC_PERIOD);
|
||||
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
|
||||
|
||||
root_processor->AddChild(rectify_processor);
|
||||
rectify_processor->AddChild(disparity_processor);
|
||||
disparity_processor->AddChild(disparitynormalized_processor);
|
||||
disparity_processor->AddChild(points_processor);
|
||||
points_processor->AddChild(depth_processor);
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
// KANNALA_BRANDT
|
||||
auto rectify_processor_imp =
|
||||
std::make_shared<RectifyProcessor>(intr_left_, intr_right_, extr_,
|
||||
RECTIFY_PROC_PERIOD);
|
||||
rectify_processor = rectify_processor_imp;
|
||||
points_processor = std::make_shared<PointsProcessor>(
|
||||
rectify_processor_imp -> getCalibInfoPair(),
|
||||
POINTS_PROC_PERIOD);
|
||||
#endif
|
||||
} else {
|
||||
points_processor = std::make_shared<PointsProcessorOCV>(
|
||||
Q, POINTS_PROC_PERIOD);
|
||||
}
|
||||
std::shared_ptr<Processor> depth_processor = nullptr;
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
depth_processor = std::make_shared<DepthProcessor>(
|
||||
rectify_processor_imp -> getCalibInfoPair(),
|
||||
DEPTH_PROC_PERIOD);
|
||||
|
||||
root_processor->AddChild(rectify_processor);
|
||||
rectify_processor->AddChild(disparity_processor);
|
||||
disparity_processor->AddChild(disparitynormalized_processor);
|
||||
disparity_processor->AddChild(depth_processor);
|
||||
depth_processor->AddChild(points_processor);
|
||||
#endif
|
||||
} else {
|
||||
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
|
||||
// UNKNOW
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_;
|
||||
return;
|
||||
}
|
||||
auto root_processor =
|
||||
std::make_shared<RootProcessor>(ROOT_PROC_PERIOD);
|
||||
root_processor->AddChild(rectify_processor);
|
||||
|
||||
root_processor->addTargetStreams(
|
||||
{Stream::LEFT, Mode::MODE_OFF, nullptr});
|
||||
root_processor->addTargetStreams(
|
||||
{Stream::RIGHT, Mode::MODE_OFF, nullptr});
|
||||
rectify_processor->addTargetStreams(
|
||||
{Stream::LEFT_RECTIFIED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
||||
{Stream::LEFT_RECTIFIED, Mode::MODE_OFF, nullptr});
|
||||
rectify_processor->addTargetStreams(
|
||||
{Stream::RIGHT_RECTIFIED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
||||
{Stream::RIGHT_RECTIFIED, Mode::MODE_OFF, nullptr});
|
||||
disparity_processor->addTargetStreams(
|
||||
{Stream::DISPARITY, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
||||
{Stream::DISPARITY, Mode::MODE_OFF, nullptr});
|
||||
disparitynormalized_processor->addTargetStreams(
|
||||
{Stream::DISPARITY_NORMALIZED, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
||||
{Stream::DISPARITY_NORMALIZED, Mode::MODE_OFF, nullptr});
|
||||
points_processor->addTargetStreams(
|
||||
{Stream::POINTS, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
||||
{Stream::POINTS, Mode::MODE_OFF, nullptr});
|
||||
depth_processor->addTargetStreams(
|
||||
{Stream::DEPTH, Mode::MODE_LAST, Mode::MODE_LAST, nullptr});
|
||||
root_processor->addTargetStreams(
|
||||
{Stream::LEFT, Mode::MODE_NATIVE, Mode::MODE_NATIVE, nullptr});
|
||||
root_processor->addTargetStreams(
|
||||
{Stream::RIGHT, Mode::MODE_NATIVE, Mode::MODE_NATIVE, nullptr});
|
||||
{Stream::DEPTH, Mode::MODE_OFF, nullptr});
|
||||
|
||||
processors_.push_back(root_processor);
|
||||
processors_.push_back(rectify_processor);
|
||||
@@ -596,6 +395,8 @@ void Synthetic::InitProcessors() {
|
||||
processors_.push_back(points_processor);
|
||||
processors_.push_back(depth_processor);
|
||||
using namespace std::placeholders; // NOLINT
|
||||
root_processor->SetProcessCallback(
|
||||
std::bind(&Synthetic::OnDeviceProcess, this, _1, _2, _3));
|
||||
rectify_processor->SetProcessCallback(
|
||||
std::bind(&Synthetic::OnRectifyProcess, this, _1, _2, _3));
|
||||
disparity_processor->SetProcessCallback(
|
||||
@@ -607,6 +408,8 @@ void Synthetic::InitProcessors() {
|
||||
depth_processor->SetProcessCallback(
|
||||
std::bind(&Synthetic::OnDepthProcess, this, _1, _2, _3));
|
||||
|
||||
root_processor->SetPostProcessCallback(
|
||||
std::bind(&Synthetic::OnDevicePostProcess, this, _1));
|
||||
rectify_processor->SetPostProcessCallback(
|
||||
std::bind(&Synthetic::OnRectifyPostProcess, this, _1));
|
||||
disparity_processor->SetPostProcessCallback(
|
||||
@@ -618,121 +421,15 @@ void Synthetic::InitProcessors() {
|
||||
depth_processor->SetPostProcessCallback(
|
||||
std::bind(&Synthetic::OnDepthPostProcess, this, _1));
|
||||
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
// PINHOLE
|
||||
rectify_processor->AddChild(disparity_processor);
|
||||
disparity_processor->AddChild(disparitynormalized_processor);
|
||||
disparity_processor->AddChild(points_processor);
|
||||
points_processor->AddChild(depth_processor);
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
// KANNALA_BRANDT
|
||||
rectify_processor->AddChild(disparity_processor);
|
||||
disparity_processor->AddChild(disparitynormalized_processor);
|
||||
disparity_processor->AddChild(depth_processor);
|
||||
depth_processor->AddChild(points_processor);
|
||||
} else {
|
||||
// UNKNOW
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_;
|
||||
}
|
||||
|
||||
processor_ = rectify_processor;
|
||||
processor_ = root_processor;
|
||||
}
|
||||
|
||||
void Synthetic::ProcessNativeStream(
|
||||
const Stream &stream, const api::StreamData &data) {
|
||||
NotifyStreamData(stream, data);
|
||||
if (stream == Stream::LEFT || stream == Stream::RIGHT) {
|
||||
std::unique_lock<std::mutex> lk(mtx_left_right_ready_);
|
||||
static api::StreamData left_data, right_data;
|
||||
if (stream == Stream::LEFT) {
|
||||
left_data = data;
|
||||
} else if (stream == Stream::RIGHT) {
|
||||
right_data = data;
|
||||
}
|
||||
if (left_data.img && right_data.img &&
|
||||
left_data.img->frame_id == right_data.img->frame_id) {
|
||||
std::shared_ptr<Processor> processor = nullptr;
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
processor = find_processor<RectifyProcessorOCV>(processor_);
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
processor = find_processor<RectifyProcessor>(processor_);
|
||||
#endif
|
||||
} else {
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_ << ", use default pinhole model";
|
||||
processor = find_processor<RectifyProcessorOCV>(processor_);
|
||||
}
|
||||
processor->Process(data_obj(left_data, right_data));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (stream == Stream::LEFT_RECTIFIED || stream == Stream::RIGHT_RECTIFIED) {
|
||||
static api::StreamData left_rect_data, right_rect_data;
|
||||
if (stream == Stream::LEFT_RECTIFIED) {
|
||||
left_rect_data = data;
|
||||
} else if (stream == Stream::RIGHT_RECTIFIED) {
|
||||
right_rect_data = data;
|
||||
}
|
||||
if (left_rect_data.img && right_rect_data.img &&
|
||||
left_rect_data.img->frame_id == right_rect_data.img->frame_id) {
|
||||
std::string name = RectifyProcessorOCV::NAME;
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
name = RectifyProcessorOCV::NAME;
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
name = RectifyProcessor::NAME;
|
||||
#endif
|
||||
}
|
||||
process_childs(processor_, name,
|
||||
data_obj(left_rect_data, right_rect_data));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
switch (stream) {
|
||||
case Stream::DISPARITY: {
|
||||
process_childs(processor_, DisparityProcessor::NAME, data_obj(data));
|
||||
} break;
|
||||
case Stream::DISPARITY_NORMALIZED: {
|
||||
process_childs(processor_, DisparityNormalizedProcessor::NAME,
|
||||
data_obj(data));
|
||||
} break;
|
||||
case Stream::POINTS: {
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
// PINHOLE
|
||||
process_childs(processor_, PointsProcessorOCV::NAME, data_obj(data));
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
// KANNALA_BRANDT
|
||||
process_childs(processor_, PointsProcessor::NAME, data_obj(data));
|
||||
#endif
|
||||
} else {
|
||||
// UNKNOW
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_;
|
||||
}
|
||||
} break;
|
||||
case Stream::DEPTH: {
|
||||
if (calib_model_ == CalibrationModel::PINHOLE) {
|
||||
// PINHOLE
|
||||
process_childs(processor_, DepthProcessorOCV::NAME, data_obj(data));
|
||||
#ifdef WITH_CAM_MODELS
|
||||
} else if (calib_model_ == CalibrationModel::KANNALA_BRANDT) {
|
||||
// KANNALA_BRANDT
|
||||
process_childs(processor_, DepthProcessor::NAME, data_obj(data));
|
||||
#endif
|
||||
} else {
|
||||
// UNKNOW
|
||||
LOG(ERROR) << "Unknow calib model type in device: "
|
||||
<< calib_model_;
|
||||
}
|
||||
} break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
bool Synthetic::OnDeviceProcess(
|
||||
Object *const in, Object *const out,
|
||||
std::shared_ptr<Processor> const parent) {
|
||||
MYNTEYE_UNUSED(parent)
|
||||
return GetStreamEnabledMode(Stream::LEFT) != MODE_ON
|
||||
|| GetStreamEnabledMode(Stream::RIGHT) != MODE_ON;
|
||||
}
|
||||
|
||||
bool Synthetic::OnRectifyProcess(
|
||||
@@ -742,8 +439,8 @@ bool Synthetic::OnRectifyProcess(
|
||||
if (plugin_ && plugin_->OnRectifyProcess(in, out)) {
|
||||
return true;
|
||||
}
|
||||
return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_SYNTHETIC;
|
||||
// && GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_SYNTHETIC
|
||||
return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_ON
|
||||
&& GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_ON;
|
||||
}
|
||||
|
||||
bool Synthetic::OnDisparityProcess(
|
||||
@@ -753,7 +450,7 @@ bool Synthetic::OnDisparityProcess(
|
||||
if (plugin_ && plugin_->OnDisparityProcess(in, out)) {
|
||||
return true;
|
||||
}
|
||||
return GetStreamEnabledMode(Stream::DISPARITY) != MODE_SYNTHETIC;
|
||||
return GetStreamEnabledMode(Stream::DISPARITY) != MODE_ON;
|
||||
}
|
||||
|
||||
bool Synthetic::OnDisparityNormalizedProcess(
|
||||
@@ -763,7 +460,7 @@ bool Synthetic::OnDisparityNormalizedProcess(
|
||||
if (plugin_ && plugin_->OnDisparityNormalizedProcess(in, out)) {
|
||||
return true;
|
||||
}
|
||||
return GetStreamEnabledMode(Stream::DISPARITY_NORMALIZED) != MODE_SYNTHETIC;
|
||||
return GetStreamEnabledMode(Stream::DISPARITY_NORMALIZED) != MODE_ON;
|
||||
}
|
||||
|
||||
bool Synthetic::OnPointsProcess(
|
||||
@@ -773,7 +470,7 @@ bool Synthetic::OnPointsProcess(
|
||||
if (plugin_ && plugin_->OnPointsProcess(in, out)) {
|
||||
return true;
|
||||
}
|
||||
return GetStreamEnabledMode(Stream::POINTS) != MODE_SYNTHETIC;
|
||||
return GetStreamEnabledMode(Stream::POINTS) != MODE_ON;
|
||||
}
|
||||
|
||||
bool Synthetic::OnDepthProcess(
|
||||
@@ -783,7 +480,22 @@ bool Synthetic::OnDepthProcess(
|
||||
if (plugin_ && plugin_->OnDepthProcess(in, out)) {
|
||||
return true;
|
||||
}
|
||||
return GetStreamEnabledMode(Stream::DEPTH) != MODE_SYNTHETIC;
|
||||
return GetStreamEnabledMode(Stream::DEPTH) != MODE_ON;
|
||||
}
|
||||
|
||||
void Synthetic::OnDevicePostProcess(Object *const out) {
|
||||
const ObjMat2 *output = Object::Cast<ObjMat2>(out);
|
||||
NotifyStreamData(Stream::LEFT, obj_data_first(output));
|
||||
NotifyStreamData(Stream::RIGHT, obj_data_second(output));
|
||||
if (HasStreamCallback(Stream::LEFT)) {
|
||||
auto data = getControlDateWithStream(Stream::LEFT);
|
||||
data.stream_callback(obj_data_first(output));
|
||||
}
|
||||
if (HasStreamCallback(Stream::RIGHT)) {
|
||||
auto data = getControlDateWithStream(Stream::RIGHT);
|
||||
if (data.stream_callback)
|
||||
data.stream_callback(obj_data_second(output));
|
||||
}
|
||||
}
|
||||
|
||||
void Synthetic::OnRectifyPostProcess(Object *const out) {
|
||||
|
||||
@@ -29,6 +29,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||
class API;
|
||||
class Plugin;
|
||||
class Processor;
|
||||
class RootProcessor;
|
||||
|
||||
struct Object;
|
||||
|
||||
@@ -40,14 +41,12 @@ class Synthetic {
|
||||
using stream_switch_callback_t = API::stream_switch_callback_t;
|
||||
|
||||
typedef enum Mode {
|
||||
MODE_NATIVE, // Native stream
|
||||
MODE_SYNTHETIC, // Synthetic stream
|
||||
MODE_LAST // Unsupported
|
||||
MODE_ON, // On
|
||||
MODE_OFF // Off
|
||||
} mode_t;
|
||||
|
||||
struct stream_control_t {
|
||||
Stream stream;
|
||||
mode_t support_mode_;
|
||||
mode_t enabled_mode_;
|
||||
stream_callback_t stream_callback;
|
||||
};
|
||||
@@ -60,7 +59,6 @@ class Synthetic {
|
||||
void NotifyImageParamsChanged();
|
||||
|
||||
bool Supports(const Stream &stream) const;
|
||||
mode_t SupportsMode(const Stream &stream) const;
|
||||
|
||||
void EnableStreamData(const Stream &stream);
|
||||
void DisableStreamData(const Stream &stream);
|
||||
@@ -85,6 +83,8 @@ class Synthetic {
|
||||
void SetPlugin(std::shared_ptr<Plugin> plugin);
|
||||
bool HasPlugin() const;
|
||||
|
||||
void setDuplicate(bool isEnable);
|
||||
|
||||
const struct stream_control_t getControlDateWithStream(
|
||||
const Stream& stream) const;
|
||||
void setControlDateCallbackWithStream(
|
||||
@@ -96,11 +96,8 @@ class Synthetic {
|
||||
|
||||
private:
|
||||
void InitCalibInfo();
|
||||
void InitStreamSupports();
|
||||
|
||||
mode_t GetStreamEnabledMode(const Stream &stream) const;
|
||||
bool IsStreamEnabledNative(const Stream &stream) const;
|
||||
bool IsStreamEnabledSynthetic(const Stream &stream) const;
|
||||
|
||||
void EnableStreamData(const Stream &stream, std::uint32_t depth);
|
||||
void DisableStreamData(const Stream &stream, std::uint32_t depth);
|
||||
@@ -112,8 +109,9 @@ class Synthetic {
|
||||
template <class T>
|
||||
bool DeactivateProcessor(bool tree = false);
|
||||
|
||||
void ProcessNativeStream(const Stream &stream, const api::StreamData &data);
|
||||
|
||||
bool OnDeviceProcess(
|
||||
Object *const in, Object *const out,
|
||||
std::shared_ptr<Processor> const parent);
|
||||
bool OnRectifyProcess(
|
||||
Object *const in, Object *const out,
|
||||
std::shared_ptr<Processor> const parent);
|
||||
@@ -130,6 +128,7 @@ class Synthetic {
|
||||
Object *const in, Object *const out,
|
||||
std::shared_ptr<Processor> const parent);
|
||||
|
||||
void OnDevicePostProcess(Object *const out);
|
||||
void OnRectifyPostProcess(Object *const out);
|
||||
void OnDisparityPostProcess(Object *const out);
|
||||
void OnDisparityNormalizedPostProcess(Object *const out);
|
||||
@@ -140,25 +139,22 @@ class Synthetic {
|
||||
|
||||
API *api_;
|
||||
|
||||
std::shared_ptr<Processor> processor_;
|
||||
|
||||
std::shared_ptr<RootProcessor> processor_;
|
||||
std::vector<std::shared_ptr<Processor>> processors_;
|
||||
std::shared_ptr<Plugin> plugin_;
|
||||
|
||||
CalibrationModel calib_model_;
|
||||
std::mutex mtx_left_right_ready_;
|
||||
|
||||
std::shared_ptr<IntrinsicsBase> intr_left_;
|
||||
std::shared_ptr<IntrinsicsBase> intr_right_;
|
||||
std::shared_ptr<Extrinsics> extr_;
|
||||
bool calib_default_tag_;
|
||||
|
||||
std::vector<std::shared_ptr<Processor>> processors_;
|
||||
|
||||
stream_data_listener_t stream_data_listener_;
|
||||
};
|
||||
|
||||
class SyntheticProcessorPart {
|
||||
private:
|
||||
protected:
|
||||
inline std::vector<Synthetic::stream_control_t> getTargetStreams() {
|
||||
return target_streams_;
|
||||
}
|
||||
|
||||
159
src/mynteye/api/version_checker.cc
Normal file
159
src/mynteye/api/version_checker.cc
Normal file
@@ -0,0 +1,159 @@
|
||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
#include "mynteye/api/version_checker.h"
|
||||
#include "mynteye/device/utils.h"
|
||||
#include "mynteye/logger.h"
|
||||
#include "mynteye/types.h"
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
typedef struct {
|
||||
const std::string device_type;
|
||||
const std::string sdk_version;
|
||||
const std::string firmware_version;
|
||||
const std::string status;
|
||||
}firmware_version_match_table_unit;
|
||||
|
||||
const char* ERRO_DESCRIPTION_F =
|
||||
"Please update the firmware at first";
|
||||
const char* ERRO_DESCRIPTION_S =
|
||||
"Please update the SDK at first";
|
||||
const char* WARN_DESCRIPTION_F =
|
||||
"We suggest that you should update the firmware";
|
||||
const char* WARN_DESCRIPTION_S =
|
||||
"We suggest that you should update the SDK";
|
||||
const char* PASS_DESCRIPTION = "pass";
|
||||
const char* PASS_OUTOF_TABLE_WARNING = "You're using a custom mynteye device";
|
||||
|
||||
/** firmware/sdk version matched table */
|
||||
/**----device type-----sdk version---firmware version-----pass tag-----*/
|
||||
static const firmware_version_match_table_unit FSVM_TABLE[] ={
|
||||
/** S1030 */
|
||||
{"MYNT-EYE-S1030", ">2.3.3", ">2.3.0", PASS_DESCRIPTION},
|
||||
{"MYNT-EYE-S1030", ">2.3.3", "<2.4.0", WARN_DESCRIPTION_F},
|
||||
{"MYNT-EYE-S1030", ">0.0.0", ">2.2.0", PASS_DESCRIPTION},
|
||||
{"MYNT-EYE-S1030", ">0.0.0", "<2.3.0", WARN_DESCRIPTION_F},
|
||||
/** S2100 */
|
||||
{"MYNT-EYE-S2100", ">2.3.3", ">1.0", PASS_DESCRIPTION},
|
||||
{"MYNT-EYE-S2100", ">2.3.3", "<1.1", WARN_DESCRIPTION_F},
|
||||
{"MYNT-EYE-S2100", ">2.3.0", ">0.9", PASS_DESCRIPTION},
|
||||
{"MYNT-EYE-S2100", ">0.0.0", ">0.9", WARN_DESCRIPTION_S},
|
||||
{"MYNT-EYE-S2100", ">0.0.0", "<1.0", WARN_DESCRIPTION_F},
|
||||
/** S210A */
|
||||
{"MYNT-EYE-S210A", ">2.3.3", ">1.0", PASS_DESCRIPTION},
|
||||
{"MYNT-EYE-S210A", ">2.3.3", "<1.1", WARN_DESCRIPTION_F},
|
||||
{"MYNT-EYE-S210A", ">2.3.0", ">0.9", PASS_DESCRIPTION},
|
||||
{"MYNT-EYE-S210A", ">0.0.0", ">0.9", WARN_DESCRIPTION_S},
|
||||
{"MYNT-EYE-S210A", ">0.0.0", "<1.0", WARN_DESCRIPTION_F},
|
||||
};
|
||||
|
||||
void getVersion(const std::string &str, char *version) {
|
||||
std::string st1("");
|
||||
int j = 0;
|
||||
for (size_t i = 0; i < str.size(); i++) {
|
||||
if (str[i] == '.') {
|
||||
version[j++] = atoi(st1.c_str());
|
||||
st1 = "";
|
||||
} else {
|
||||
st1 += str[i];
|
||||
}
|
||||
}
|
||||
version[j++] = atoi(st1.c_str());
|
||||
}
|
||||
|
||||
bool conditionMatch(const std::string& condition, const std::string& target) {
|
||||
char version[4] = {0};
|
||||
char version_c[4] = {0};
|
||||
getVersion(target, version);
|
||||
int tag_c = 0;
|
||||
std::string condition_c;
|
||||
if (condition[0] == '>') {
|
||||
tag_c = 1;
|
||||
condition_c = condition.substr(1);
|
||||
} else if (condition[0] == '<') {
|
||||
tag_c = -1;
|
||||
condition_c = condition.substr(1);
|
||||
} else {
|
||||
tag_c = 0;
|
||||
condition_c = condition;
|
||||
}
|
||||
getVersion(condition_c, version_c);
|
||||
int tag_big = memcmp(version, version_c, 4);
|
||||
if (tag_big * tag_c > 0 || (tag_big == 0 && tag_c == 0)) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
enum STATUS_UNIT {
|
||||
ST_PASS,
|
||||
ST_ERRO_F,
|
||||
ST_ERRO_S,
|
||||
ST_NOT_PASS
|
||||
};
|
||||
|
||||
STATUS_UNIT checkUnit(const std::string& sdkv,
|
||||
const std::string& devn,
|
||||
const std::string& firmv,
|
||||
const firmware_version_match_table_unit& condition) {
|
||||
if (condition.device_type == devn &&
|
||||
conditionMatch(condition.sdk_version, sdkv) &&
|
||||
conditionMatch(condition.firmware_version, firmv)) {
|
||||
if (condition.status == ERRO_DESCRIPTION_F) return ST_ERRO_F;
|
||||
if (condition.status == ERRO_DESCRIPTION_S) return ST_ERRO_S;
|
||||
if (condition.status == WARN_DESCRIPTION_F ||
|
||||
condition.status == WARN_DESCRIPTION_S) {
|
||||
LOG(WARNING) << condition.status;
|
||||
}
|
||||
return ST_PASS;
|
||||
}
|
||||
return ST_NOT_PASS;
|
||||
}
|
||||
|
||||
bool checkIfDeviceInTable(const std::string& devn) {
|
||||
for (size_t i =0;
|
||||
i < sizeof(FSVM_TABLE)/sizeof(firmware_version_match_table_unit);
|
||||
i++) {
|
||||
if (FSVM_TABLE[i].device_type == devn) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool checkFirmwareVersion(const std::shared_ptr<API> api) {
|
||||
auto sdkv = api->GetSDKVersion();
|
||||
auto devn = api->GetInfo(Info::DEVICE_NAME);
|
||||
auto firmv = api->GetInfo(Info::FIRMWARE_VERSION);
|
||||
|
||||
if (!checkIfDeviceInTable(devn)) {
|
||||
LOG(WARNING) << PASS_OUTOF_TABLE_WARNING;
|
||||
return true;
|
||||
}
|
||||
|
||||
for (size_t i =0;
|
||||
i < sizeof(FSVM_TABLE)/sizeof(firmware_version_match_table_unit);
|
||||
i++) {
|
||||
auto res = checkUnit(sdkv, devn, firmv, FSVM_TABLE[i]);
|
||||
if (res == ST_PASS) {
|
||||
return true;
|
||||
} else if (res == ST_ERRO_S || res == ST_ERRO_F) {
|
||||
LOG(ERROR) << FSVM_TABLE[i].status;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
LOG(ERROR) << ERRO_DESCRIPTION_S;
|
||||
return false;
|
||||
}
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
@@ -11,27 +11,15 @@
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
#ifndef MYNTEYE_DEVICE_STANDARD2_DEVICE_S210A_H_
|
||||
#define MYNTEYE_DEVICE_STANDARD2_DEVICE_S210A_H_
|
||||
#ifndef MYNTEYE_API_VERSION_CHECKER_H_
|
||||
#define MYNTEYE_API_VERSION_CHECKER_H_
|
||||
#pragma once
|
||||
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
#include "mynteye/device/device.h"
|
||||
#include <string>
|
||||
#include "mynteye/api/api.h"
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
class Standard210aDevice : public Device {
|
||||
public:
|
||||
explicit Standard210aDevice(std::shared_ptr<uvc::device> device);
|
||||
virtual ~Standard210aDevice();
|
||||
|
||||
Capabilities GetKeyStreamCapability() const override;
|
||||
|
||||
void OnStereoStreamUpdate() override;
|
||||
};
|
||||
|
||||
bool checkFirmwareVersion(const std::shared_ptr<API> api);
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
#endif // MYNTEYE_DEVICE_STANDARD2_DEVICE_S210A_H_
|
||||
#endif // MYNTEYE_API_VERSION_CHECKER_H_
|
||||
@@ -103,7 +103,8 @@ std::size_t from_data(IntrinsicsEquidistant *in, const std::uint8_t *data,
|
||||
return i;
|
||||
}
|
||||
|
||||
std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data) {
|
||||
std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data,
|
||||
bool get_size) {
|
||||
std::size_t i = 0;
|
||||
|
||||
// scale
|
||||
@@ -113,6 +114,15 @@ std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data) {
|
||||
}
|
||||
}
|
||||
i += 72;
|
||||
if (get_size) {
|
||||
// assembly
|
||||
for (std::size_t j = 0; j < 3; j++) {
|
||||
for (std::size_t k = 0; k < 3; k++) {
|
||||
in->assembly[j][k] = _from_data<double>(data + i + (j * 3 + k) * 8);
|
||||
}
|
||||
}
|
||||
i += 72;
|
||||
}
|
||||
// drift
|
||||
for (std::size_t j = 0; j < 3; j++) {
|
||||
in->drift[j] = _from_data<double>(data + i + j * 8);
|
||||
@@ -128,6 +138,24 @@ std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data) {
|
||||
in->bias[j] = _from_data<double>(data + i + j * 8);
|
||||
}
|
||||
i += 24;
|
||||
if (get_size) {
|
||||
// temperature drift
|
||||
// x
|
||||
for (std::size_t j = 0; j < 2; j++) {
|
||||
in->x[j] = _from_data<double>(data + i + j * 8);
|
||||
}
|
||||
i += 16;
|
||||
// y
|
||||
for (std::size_t j = 0; j < 2; j++) {
|
||||
in->y[j] = _from_data<double>(data + i + j * 8);
|
||||
}
|
||||
i += 16;
|
||||
// z
|
||||
for (std::size_t j = 0; j < 2; j++) {
|
||||
in->z[j] = _from_data<double>(data + i + j * 8);
|
||||
}
|
||||
i += 16;
|
||||
}
|
||||
|
||||
return i;
|
||||
}
|
||||
@@ -236,7 +264,8 @@ std::size_t to_data(const IntrinsicsEquidistant *in, std::uint8_t *data,
|
||||
return i;
|
||||
}
|
||||
|
||||
std::size_t to_data(const ImuIntrinsics *in, std::uint8_t *data) {
|
||||
std::size_t to_data(const ImuIntrinsics *in, std::uint8_t *data,
|
||||
bool set_size) {
|
||||
std::size_t i = 0;
|
||||
|
||||
// scale
|
||||
@@ -246,6 +275,15 @@ std::size_t to_data(const ImuIntrinsics *in, std::uint8_t *data) {
|
||||
}
|
||||
}
|
||||
i += 72;
|
||||
if (set_size) {
|
||||
// assembly
|
||||
for (std::size_t j = 0; j < 3; j++) {
|
||||
for (std::size_t k = 0; k < 3; k++) {
|
||||
_to_data(in->assembly[j][k], data + i + (j * 3 + k) * 8);
|
||||
}
|
||||
}
|
||||
i += 72;
|
||||
}
|
||||
// drift
|
||||
for (std::size_t j = 0; j < 3; j++) {
|
||||
_to_data(in->drift[j], data + i + j * 8);
|
||||
@@ -261,6 +299,24 @@ std::size_t to_data(const ImuIntrinsics *in, std::uint8_t *data) {
|
||||
_to_data(in->bias[j], data + i + j * 8);
|
||||
}
|
||||
i += 24;
|
||||
if (set_size) {
|
||||
// temperature drift
|
||||
// x
|
||||
for (std::size_t j = 0; j < 2; j++) {
|
||||
_to_data<double>(in->x[j], data + i + j * 8);
|
||||
}
|
||||
i += 16;
|
||||
// y
|
||||
for (std::size_t j = 0; j < 2; j++) {
|
||||
_to_data<double>(in->y[j], data + i + j * 8);
|
||||
}
|
||||
i += 16;
|
||||
// z
|
||||
for (std::size_t j = 0; j < 2; j++) {
|
||||
_to_data<double>(in->z[j], data + i + j * 8);
|
||||
}
|
||||
i += 16;
|
||||
}
|
||||
|
||||
return i;
|
||||
}
|
||||
|
||||
@@ -55,7 +55,8 @@ std::size_t from_data(IntrinsicsPinhole *in, const std::uint8_t *data,
|
||||
std::size_t from_data(IntrinsicsEquidistant *in, const std::uint8_t *data,
|
||||
bool get_size);
|
||||
|
||||
std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data);
|
||||
std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data,
|
||||
bool get_size);
|
||||
|
||||
std::size_t from_data(Extrinsics *ex, const std::uint8_t *data);
|
||||
|
||||
@@ -88,7 +89,8 @@ std::size_t to_data(const IntrinsicsPinhole *in, std::uint8_t *data,
|
||||
std::size_t to_data(const IntrinsicsEquidistant *in, std::uint8_t *data,
|
||||
bool set_size);
|
||||
|
||||
std::size_t to_data(const ImuIntrinsics *in, std::uint8_t *data);
|
||||
std::size_t to_data(const ImuIntrinsics *in, std::uint8_t *data, bool
|
||||
set_size);
|
||||
|
||||
std::size_t to_data(const Extrinsics *ex, std::uint8_t *data);
|
||||
|
||||
|
||||
@@ -475,7 +475,9 @@ bool Channels::GetFiles(
|
||||
if (file_size > 0) {
|
||||
auto &&n = file_channel_.GetImgParamsFromData(
|
||||
data + i, file_size, img_params);
|
||||
CHECK_EQ(n, file_size);
|
||||
CHECK_EQ(n, file_size)
|
||||
<< "The firmware not support getting device info, you could "
|
||||
"upgrade to latest";
|
||||
}
|
||||
} break;
|
||||
case FID_IMU_PARAMS: {
|
||||
@@ -483,7 +485,13 @@ bool Channels::GetFiles(
|
||||
if (imu_params->ok) {
|
||||
auto &&n = file_channel_.GetImuParamsFromData(
|
||||
data + i, file_size, imu_params);
|
||||
CHECK_EQ(n, file_size);
|
||||
// TODO(Kalman): Use CHECK_EQ
|
||||
if (n != file_size) {
|
||||
LOG(WARNING) << "Wrong sizes of imu params";
|
||||
}
|
||||
// CHECK_EQ(n, file_size)
|
||||
// << "The firmware not support getting device info, you could "
|
||||
// "upgrade to latest";
|
||||
}
|
||||
} break;
|
||||
default:
|
||||
|
||||
@@ -113,6 +113,22 @@ std::size_t DeviceInfoParser::GetFromData(
|
||||
info->nominal_baseline = bytes::_from_data<std::uint16_t>(data + i);
|
||||
i += 2;
|
||||
|
||||
if (info->spec_version >= Version(1, 2)) {
|
||||
// auxiliary_chip_version, 2
|
||||
info->auxiliary_chip_version.set_major(data[i]);
|
||||
info->auxiliary_chip_version.set_minor(data[i + 1]);
|
||||
i += 2;
|
||||
// isp_version, 2
|
||||
info->isp_version.set_major(data[i]);
|
||||
info->isp_version.set_minor(data[i + 1]);
|
||||
i += 2;
|
||||
} else {
|
||||
info->auxiliary_chip_version.set_major(0);
|
||||
info->auxiliary_chip_version.set_minor(0);
|
||||
info->isp_version.set_major(0);
|
||||
info->isp_version.set_minor(0);
|
||||
}
|
||||
|
||||
// get other infos according to spec_version
|
||||
|
||||
MYNTEYE_UNUSED(data_size)
|
||||
@@ -155,6 +171,17 @@ std::size_t DeviceInfoParser::SetToData(
|
||||
bytes::_to_data(info->nominal_baseline, data + i);
|
||||
i += 2;
|
||||
|
||||
if (info->spec_version >= Version(1, 2)) {
|
||||
// auxiliary_chip_version, 2
|
||||
data[i] = info->auxiliary_chip_version.major();
|
||||
data[i + 1] = info->auxiliary_chip_version.minor();
|
||||
i += 2;
|
||||
// isp_version, 2
|
||||
data[i] = info->isp_version.major();
|
||||
data[i + 1] = info->isp_version.minor();
|
||||
i += 2;
|
||||
}
|
||||
|
||||
// set other infos according to spec_version
|
||||
|
||||
// others
|
||||
@@ -181,7 +208,7 @@ std::size_t ImgParamsParser::GetFromData(
|
||||
return GetFromData_v1_0(data, data_size, img_params);
|
||||
}
|
||||
// s210a old params
|
||||
if (spec_version_ == Version(1, 1) && data_size == 404) {
|
||||
if (spec_version_ >= Version(1, 1) && data_size == 404) {
|
||||
return GetFromData_v1_1(data, data_size, img_params);
|
||||
}
|
||||
// get img params with new version format
|
||||
@@ -406,7 +433,7 @@ std::size_t ImuParamsParser::GetFromData(
|
||||
return GetFromData_old(data, data_size, imu_params);
|
||||
}
|
||||
// s210a old params
|
||||
if (spec_version_ == Version(1, 1) && data_size == 384) {
|
||||
if (spec_version_ >= Version(1, 1) && data_size == 384) {
|
||||
return GetFromData_old(data, data_size, imu_params);
|
||||
}
|
||||
// get imu params with new version format
|
||||
@@ -415,16 +442,19 @@ std::size_t ImuParamsParser::GetFromData(
|
||||
|
||||
std::size_t ImuParamsParser::SetToData(
|
||||
const imu_params_t *imu_params, std::uint8_t *data) const {
|
||||
// always set imu params with new version format
|
||||
if (spec_version_ >= Version(1, 2)) {
|
||||
return SetToData_new(imu_params, data);
|
||||
} else {
|
||||
return SetToData_old(imu_params, data);
|
||||
}
|
||||
}
|
||||
|
||||
std::size_t ImuParamsParser::GetFromData_old(
|
||||
const std::uint8_t *data, const std::uint16_t &data_size,
|
||||
imu_params_t *imu_params) const {
|
||||
std::size_t i = 0;
|
||||
i += bytes::from_data(&imu_params->in_accel, data + i);
|
||||
i += bytes::from_data(&imu_params->in_gyro, data + i);
|
||||
i += bytes::from_data(&imu_params->in_accel, data + i, false);
|
||||
i += bytes::from_data(&imu_params->in_gyro, data + i, false);
|
||||
i += bytes::from_data(&imu_params->ex_left_to_imu, data + i);
|
||||
imu_params->version = spec_version_.to_string();
|
||||
MYNTEYE_UNUSED(data_size)
|
||||
@@ -434,8 +464,8 @@ std::size_t ImuParamsParser::GetFromData_old(
|
||||
std::size_t ImuParamsParser::SetToData_old(
|
||||
const imu_params_t *imu_params, std::uint8_t *data) const {
|
||||
std::size_t i = 3; // skip id, size
|
||||
i += bytes::to_data(&imu_params->in_accel, data + i);
|
||||
i += bytes::to_data(&imu_params->in_gyro, data + i);
|
||||
i += bytes::to_data(&imu_params->in_accel, data + i, false);
|
||||
i += bytes::to_data(&imu_params->in_gyro, data + i, false);
|
||||
i += bytes::to_data(&imu_params->ex_left_to_imu, data + i);
|
||||
// others
|
||||
std::size_t size = i - 3;
|
||||
@@ -455,8 +485,8 @@ std::size_t ImuParamsParser::GetFromData_new(
|
||||
i += 2;
|
||||
// get imu params according to version
|
||||
if (version == Version(1, 2)) { // v1.2
|
||||
i += bytes::from_data(&imu_params->in_accel, data + i);
|
||||
i += bytes::from_data(&imu_params->in_gyro, data + i);
|
||||
i += bytes::from_data(&imu_params->in_accel, data + i, true);
|
||||
i += bytes::from_data(&imu_params->in_gyro, data + i, true);
|
||||
i += bytes::from_data(&imu_params->ex_left_to_imu, data + i);
|
||||
} else {
|
||||
LOG(FATAL) << "Could not get imu params of version "
|
||||
@@ -479,8 +509,8 @@ std::size_t ImuParamsParser::SetToData_new(
|
||||
i += 2;
|
||||
// set imu params with new version format
|
||||
if (version_raw <= version_new) {
|
||||
i += bytes::to_data(&imu_params->in_accel, data + i);
|
||||
i += bytes::to_data(&imu_params->in_gyro, data + i);
|
||||
i += bytes::to_data(&imu_params->in_accel, data + i, true);
|
||||
i += bytes::to_data(&imu_params->in_gyro, data + i, true);
|
||||
i += bytes::to_data(&imu_params->ex_left_to_imu, data + i);
|
||||
} else {
|
||||
LOG(FATAL) << "Could not set imu params of version "
|
||||
|
||||
@@ -61,7 +61,8 @@ const std::map<Model, std::map<Capabilities, StreamRequests>>
|
||||
stream_requests_map = {
|
||||
{Model::STANDARD,
|
||||
{{Capabilities::STEREO, {
|
||||
{752, 480, Format::YUYV, 60}}
|
||||
{752, 480, Format::YUYV, 60},
|
||||
{376, 240, Format::YUYV, 60}}
|
||||
}}
|
||||
},
|
||||
{Model::STANDARD2,
|
||||
|
||||
@@ -26,7 +26,6 @@
|
||||
#include "mynteye/device/motions.h"
|
||||
#include "mynteye/device/standard/device_s.h"
|
||||
#include "mynteye/device/standard2/device_s2.h"
|
||||
#include "mynteye/device/standard2/device_s210a.h"
|
||||
#include "mynteye/device/streams.h"
|
||||
#include "mynteye/device/types.h"
|
||||
#include "mynteye/util/strings.h"
|
||||
@@ -104,7 +103,6 @@ std::shared_ptr<Device> Device::Create(
|
||||
if (name == "MYNTEYE") {
|
||||
return std::make_shared<StandardDevice>(device);
|
||||
} else if (strings::starts_with(name, "MYNT-EYE-")) {
|
||||
// TODO(JohnZhao): Create different device by name, such as MYNT-EYE-S1000
|
||||
std::string model_s = name.substr(9, 5);
|
||||
VLOG(2) << "MYNE EYE Model: " << model_s;
|
||||
DeviceModel model(model_s);
|
||||
@@ -113,9 +111,9 @@ std::shared_ptr<Device> Device::Create(
|
||||
return std::make_shared<StandardDevice>(device);
|
||||
} else if (model.generation == '2') {
|
||||
if (model.custom_code == '0') {
|
||||
return std::make_shared<Standard2Device>(device);
|
||||
return std::make_shared<Standard2Device>(Model::STANDARD2, device);
|
||||
} else if (model.custom_code == 'A') {
|
||||
return std::make_shared<Standard210aDevice>(device);
|
||||
return std::make_shared<Standard2Device>(Model::STANDARD210A, device);
|
||||
} else {
|
||||
LOG(FATAL) << "No such custom code now";
|
||||
}
|
||||
@@ -234,6 +232,10 @@ std::string Device::GetInfo(const Info &info) const {
|
||||
return device_info_->imu_type.to_string();
|
||||
case Info::NOMINAL_BASELINE:
|
||||
return std::to_string(device_info_->nominal_baseline);
|
||||
case Info::AUXILIARY_CHIP_VERSION:
|
||||
return device_info_->auxiliary_chip_version.to_string();
|
||||
case Info::ISP_VERSION:
|
||||
return device_info_->isp_version.to_string();
|
||||
default:
|
||||
LOG(WARNING) << "Unknown device info";
|
||||
return "";
|
||||
@@ -328,6 +330,7 @@ void Device::SetMotionIntrinsics(const MotionIntrinsics &in) {
|
||||
motion_intrinsics_ = std::make_shared<MotionIntrinsics>();
|
||||
}
|
||||
*motion_intrinsics_ = in;
|
||||
motions_->SetMotionIntrinsics(motion_intrinsics_);
|
||||
}
|
||||
|
||||
void Device::SetMotionExtrinsics(const Stream &from, const Extrinsics &ex) {
|
||||
@@ -607,6 +610,8 @@ void Device::ReadAllInfos() {
|
||||
|
||||
device_info_->name = uvc::get_name(*device_);
|
||||
|
||||
motions_->SetDeviceInfo(device_info_);
|
||||
|
||||
bool img_params_ok = false;
|
||||
for (auto &¶ms : all_img_params_) {
|
||||
auto &&img_params = params.second;
|
||||
@@ -614,9 +619,9 @@ void Device::ReadAllInfos() {
|
||||
img_params_ok = true;
|
||||
SetIntrinsics(Stream::LEFT, img_params.in_left);
|
||||
SetIntrinsics(Stream::RIGHT, img_params.in_right);
|
||||
SetExtrinsics(Stream::LEFT, Stream::RIGHT, img_params.ex_right_to_left);
|
||||
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
|
||||
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
|
||||
SetExtrinsics(Stream::RIGHT, Stream::LEFT, img_params.ex_right_to_left);
|
||||
VLOG(2) << "Intrinsics left: {" << *GetIntrinsics(Stream::LEFT) << "}";
|
||||
VLOG(2) << "Intrinsics right: {" << *GetIntrinsics(Stream::RIGHT) << "}";
|
||||
VLOG(2) << "Extrinsics left to right: {"
|
||||
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
|
||||
break;
|
||||
@@ -654,14 +659,25 @@ void Device::UpdateStreamIntrinsics(
|
||||
img_res.height == request.GetResolution().height &&
|
||||
img_res.width == request.GetResolution().width / 2;
|
||||
} else if (capability == Capabilities::STEREO) {
|
||||
ok = img_params.ok && img_res == request.GetResolution();
|
||||
if (img_res == request.GetResolution()) {
|
||||
ok = img_params.ok;
|
||||
} else if (request.GetResolution().height / img_res.height ==
|
||||
request.GetResolution().width / img_res.width) {
|
||||
double scale = static_cast<double> (
|
||||
1.0 * request.GetResolution().height / img_res.height);
|
||||
img_params.in_left->resize_scale = scale;
|
||||
img_params.in_right->resize_scale = scale;
|
||||
ok = img_params.ok;
|
||||
} else {
|
||||
ok = false;
|
||||
}
|
||||
}
|
||||
if (ok) {
|
||||
SetIntrinsics(Stream::LEFT, img_params.in_left);
|
||||
SetIntrinsics(Stream::RIGHT, img_params.in_right);
|
||||
SetExtrinsics(Stream::LEFT, Stream::RIGHT, img_params.ex_right_to_left);
|
||||
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
|
||||
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
|
||||
VLOG(2) << "Intrinsics left: {" << *GetIntrinsics(Stream::LEFT) << "}";
|
||||
VLOG(2) << "Intrinsics right: {" << *GetIntrinsics(Stream::RIGHT) << "}";
|
||||
VLOG(2) << "Extrinsics left to right: {"
|
||||
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
|
||||
break;
|
||||
@@ -702,4 +718,12 @@ bool Device::SetFiles(
|
||||
return channels_->SetFiles(info, img_params, imu_params);
|
||||
}
|
||||
|
||||
void Device::EnableProcessMode(const ProcessMode& mode) {
|
||||
EnableProcessMode(static_cast<std::int32_t>(mode));
|
||||
}
|
||||
|
||||
void Device::EnableProcessMode(const std::int32_t& mode) {
|
||||
motions_->EnableProcessMode(mode);
|
||||
}
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
@@ -18,11 +18,39 @@
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
namespace {
|
||||
|
||||
void matrix_3x1(const double (*src1)[3], const double (*src2)[1],
|
||||
double (*dst)[1]) {
|
||||
for (int i = 0; i < 3; i++) {
|
||||
for (int j = 0; j < 1; j++) {
|
||||
for (int k = 0; k < 3; k++) {
|
||||
dst[i][j] += src1[i][k] * src2[k][j];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void matrix_3x3(const double (*src1)[3], const double (*src2)[3],
|
||||
double (*dst)[3]) {
|
||||
for (int i = 0; i < 3; i++) {
|
||||
for (int j = 0; j < 3; j++) {
|
||||
for (int k = 0; k < 3; k++) {
|
||||
dst[i][j] += src1[i][k] * src2[k][j];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
Motions::Motions(std::shared_ptr<Channels> channels)
|
||||
: channels_(channels),
|
||||
motion_callback_(nullptr),
|
||||
motion_datas_enabled_(false),
|
||||
is_imu_tracking(false) {
|
||||
is_imu_tracking(false),
|
||||
proc_mode_(static_cast<const std::int32_t>(ProcessMode::PROC_NONE)),
|
||||
motion_intrinsics_(nullptr) {
|
||||
CHECK_NOTNULL(channels_);
|
||||
VLOG(2) << __func__;
|
||||
}
|
||||
@@ -64,6 +92,17 @@ void Motions::SetMotionCallback(motion_callback_t callback) {
|
||||
imu->gyro[1] = seg.gyro[1] * 1.f * gyro_range / 0x10000;
|
||||
imu->gyro[2] = seg.gyro[2] * 1.f * gyro_range / 0x10000;
|
||||
|
||||
bool proc_assembly = ((proc_mode_ & ProcessMode::PROC_IMU_ASSEMBLY) > 0);
|
||||
bool proc_temp_drift = ((proc_mode_ & ProcessMode::PROC_IMU_TEMP_DRIFT) > 0);
|
||||
if (proc_assembly && proc_temp_drift) {
|
||||
ProcImuTempDrift(imu);
|
||||
ProcImuAssembly(imu);
|
||||
} else if (proc_assembly) {
|
||||
ProcImuAssembly(imu);
|
||||
} else if (proc_temp_drift) {
|
||||
ProcImuTempDrift(imu);
|
||||
}
|
||||
|
||||
std::lock_guard<std::mutex> _(mtx_datas_);
|
||||
motion_data_t data = {imu};
|
||||
if (motion_datas_enabled_ && motion_datas_max_size_ > 0) {
|
||||
@@ -129,4 +168,82 @@ Motions::motion_datas_t Motions::GetMotionDatas() {
|
||||
return datas;
|
||||
}
|
||||
|
||||
void Motions::ProcImuAssembly(std::shared_ptr<ImuData> data) const {
|
||||
if (nullptr == motion_intrinsics_ ||
|
||||
IsNullAssemblyOrTempDrift())
|
||||
return;
|
||||
|
||||
double dst[3][3] = {0};
|
||||
if (data->flag == 1) {
|
||||
matrix_3x3(motion_intrinsics_->accel.scale,
|
||||
motion_intrinsics_->accel.assembly, dst);
|
||||
double s[3][1] = {0};
|
||||
double d[3][1] = {0};
|
||||
for (int i = 0; i < 3; i++) {
|
||||
s[i][0] = data->accel[i];
|
||||
}
|
||||
matrix_3x1(dst, s, d);
|
||||
for (int i = 0; i < 3; i++) {
|
||||
data->accel[i] = d[i][0];
|
||||
}
|
||||
} else if (data->flag == 2) {
|
||||
matrix_3x3(motion_intrinsics_->gyro.scale,
|
||||
motion_intrinsics_->gyro.assembly, dst);
|
||||
double s[3][1] = {0};
|
||||
double d[3][1] = {0};
|
||||
for (int i = 0; i < 3; i++) {
|
||||
s[i][0] = data->gyro[i];
|
||||
}
|
||||
matrix_3x1(dst, s, d);
|
||||
for (int i = 0; i < 3; i++) {
|
||||
data->gyro[i] = d[i][0];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Motions::ProcImuTempDrift(std::shared_ptr<ImuData> data) const {
|
||||
if (nullptr == motion_intrinsics_ ||
|
||||
IsNullAssemblyOrTempDrift())
|
||||
return;
|
||||
|
||||
double temp = data->temperature;
|
||||
if (data->flag == 1) {
|
||||
data->accel[0] -= motion_intrinsics_->accel.x[1] * temp
|
||||
+ motion_intrinsics_->accel.x[0];
|
||||
data->accel[1] -= motion_intrinsics_->accel.y[1] * temp
|
||||
+ motion_intrinsics_->accel.y[0];
|
||||
data->accel[2] -= motion_intrinsics_->accel.z[1] * temp
|
||||
+ motion_intrinsics_->accel.z[0];
|
||||
} else if (data->flag == 2) {
|
||||
data->gyro[0] -= motion_intrinsics_->gyro.x[1] * temp
|
||||
+ motion_intrinsics_->gyro.x[0];
|
||||
data->gyro[1] -= motion_intrinsics_->gyro.y[1] * temp
|
||||
+ motion_intrinsics_->gyro.y[0];
|
||||
data->gyro[2] -= motion_intrinsics_->gyro.z[1] * temp
|
||||
+ motion_intrinsics_->gyro.z[0];
|
||||
}
|
||||
}
|
||||
|
||||
void Motions::SetMotionIntrinsics(const std::shared_ptr<MotionIntrinsics>& in) {
|
||||
motion_intrinsics_ = in;
|
||||
}
|
||||
|
||||
void Motions::EnableProcessMode(const std::int32_t& mode) {
|
||||
proc_mode_ = mode;
|
||||
}
|
||||
|
||||
bool Motions::IsNullAssemblyOrTempDrift() const {
|
||||
if (!device_info_)
|
||||
return true;
|
||||
|
||||
if (device_info_->spec_version >= Version(1, 2))
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void Motions::SetDeviceInfo(const std::shared_ptr<DeviceInfo>& in) {
|
||||
device_info_ = in;
|
||||
}
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
|
||||
#include "mynteye/mynteye.h"
|
||||
#include "mynteye/device/callbacks.h"
|
||||
#include "mynteye/device/types.h"
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
@@ -46,7 +47,16 @@ class Motions {
|
||||
void EnableMotionDatas(std::size_t max_size);
|
||||
motion_datas_t GetMotionDatas();
|
||||
|
||||
void SetMotionIntrinsics(const std::shared_ptr<MotionIntrinsics>& in);
|
||||
void EnableProcessMode(const std::int32_t& mode);
|
||||
|
||||
void SetDeviceInfo(const std::shared_ptr<DeviceInfo>& in);
|
||||
|
||||
private:
|
||||
void ProcImuAssembly(std::shared_ptr<ImuData> data) const;
|
||||
void ProcImuTempDrift(std::shared_ptr<ImuData> data) const;
|
||||
bool IsNullAssemblyOrTempDrift() const;
|
||||
|
||||
std::shared_ptr<Channels> channels_;
|
||||
|
||||
motion_callback_t motion_callback_;
|
||||
@@ -61,6 +71,10 @@ class Motions {
|
||||
|
||||
int accel_range;
|
||||
int gyro_range;
|
||||
|
||||
std::int32_t proc_mode_;
|
||||
std::shared_ptr<MotionIntrinsics> motion_intrinsics_;
|
||||
std::shared_ptr<DeviceInfo> device_info_;
|
||||
};
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
@@ -90,8 +90,8 @@ void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
|
||||
|
||||
} // namespace
|
||||
|
||||
Standard2ChannelsAdapter::Standard2ChannelsAdapter()
|
||||
: ChannelsAdapter(Model::STANDARD2) {
|
||||
Standard2ChannelsAdapter::Standard2ChannelsAdapter(const Model &model)
|
||||
: ChannelsAdapter(model) {
|
||||
}
|
||||
|
||||
Standard2ChannelsAdapter::~Standard2ChannelsAdapter() {
|
||||
|
||||
@@ -25,7 +25,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
class Standard2ChannelsAdapter : public ChannelsAdapter {
|
||||
public:
|
||||
Standard2ChannelsAdapter();
|
||||
explicit Standard2ChannelsAdapter(const Model &model);
|
||||
virtual ~Standard2ChannelsAdapter();
|
||||
|
||||
std::int32_t GetAccelRangeDefault() override;
|
||||
|
||||
@@ -1,121 +0,0 @@
|
||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
#include "mynteye/device/standard2/channels_adapter_s210a.h"
|
||||
|
||||
#include "mynteye/logger.h"
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
namespace {
|
||||
|
||||
#pragma pack(push, 1)
|
||||
struct ImuData {
|
||||
std::uint32_t frame_id;
|
||||
std::uint64_t timestamp;
|
||||
std::uint8_t flag;
|
||||
std::int16_t temperature;
|
||||
std::int16_t accel_or_gyro[3];
|
||||
|
||||
ImuData() = default;
|
||||
explicit ImuData(const std::uint8_t *data) {
|
||||
from_data(data);
|
||||
}
|
||||
|
||||
void from_data(const std::uint8_t *data) {
|
||||
std::uint32_t timestamp_l;
|
||||
std::uint32_t timestamp_h;
|
||||
|
||||
frame_id = (*(data) << 24) | (*(data + 1) << 16) | (*(data + 2) << 8) |
|
||||
*(data + 3);
|
||||
timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) |
|
||||
(*(data + 6) << 8) | *(data + 7);
|
||||
timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) |
|
||||
(*(data + 10) << 8) | *(data + 11);
|
||||
timestamp = (static_cast<std::uint64_t>(timestamp_h) << 32) | timestamp_l;
|
||||
flag = *(data + 12);
|
||||
temperature = (*(data + 13) << 8) | *(data + 14);
|
||||
accel_or_gyro[0] = (*(data + 15) << 8) | *(data + 16);
|
||||
accel_or_gyro[1] = (*(data + 17) << 8) | *(data + 18);
|
||||
accel_or_gyro[2] = (*(data + 19) << 8) | *(data + 20);
|
||||
}
|
||||
};
|
||||
#pragma pack(pop)
|
||||
|
||||
void unpack_imu_segment(const ImuData &imu, ImuSegment *seg) {
|
||||
seg->frame_id = imu.frame_id;
|
||||
seg->timestamp = imu.timestamp;
|
||||
seg->flag = imu.flag;
|
||||
seg->temperature = imu.temperature;
|
||||
seg->accel[0] = (seg->flag == 1) ? imu.accel_or_gyro[0] : 0;
|
||||
seg->accel[1] = (seg->flag == 1) ? imu.accel_or_gyro[1] : 0;
|
||||
seg->accel[2] = (seg->flag == 1) ? imu.accel_or_gyro[2] : 0;
|
||||
seg->gyro[0] = (seg->flag == 2) ? imu.accel_or_gyro[0] : 0;
|
||||
seg->gyro[1] = (seg->flag == 2) ? imu.accel_or_gyro[1] : 0;
|
||||
seg->gyro[2] = (seg->flag == 2) ? imu.accel_or_gyro[2] : 0;
|
||||
}
|
||||
|
||||
void unpack_imu_packet(const std::uint8_t *data, ImuPacket *pkg) {
|
||||
std::size_t data_n = sizeof(ImuData); // 21
|
||||
for (std::size_t i = 0; i < pkg->count; i++) {
|
||||
ImuSegment seg;
|
||||
unpack_imu_segment(ImuData(data + data_n * i), &seg);
|
||||
pkg->segments.push_back(seg);
|
||||
}
|
||||
pkg->serial_number = pkg->segments.back().frame_id;
|
||||
}
|
||||
|
||||
void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
|
||||
res->header = *data;
|
||||
res->state = *(data + 1);
|
||||
res->size = (*(data + 2) << 8) | *(data + 3);
|
||||
|
||||
std::size_t data_n = sizeof(ImuData); // 21
|
||||
ImuPacket packet;
|
||||
packet.count = res->size / data_n;
|
||||
unpack_imu_packet(data + 4, &packet);
|
||||
res->packets.push_back(packet);
|
||||
res->checksum = *(data + 4 + res->size);
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
Standard210aChannelsAdapter::Standard210aChannelsAdapter()
|
||||
: ChannelsAdapter(Model::STANDARD210A) {
|
||||
}
|
||||
|
||||
Standard210aChannelsAdapter::~Standard210aChannelsAdapter() {
|
||||
}
|
||||
|
||||
std::int32_t Standard210aChannelsAdapter::GetAccelRangeDefault() {
|
||||
return 12;
|
||||
}
|
||||
|
||||
std::vector<std::int32_t> Standard210aChannelsAdapter::GetAccelRangeValues() {
|
||||
return {6, 12, 24, 48};
|
||||
}
|
||||
|
||||
std::int32_t Standard210aChannelsAdapter::GetGyroRangeDefault() {
|
||||
return 1000;
|
||||
}
|
||||
|
||||
std::vector<std::int32_t> Standard210aChannelsAdapter::GetGyroRangeValues() {
|
||||
return {250, 500, 1000, 2000, 4000};
|
||||
}
|
||||
|
||||
void Standard210aChannelsAdapter::GetImuResPacket(
|
||||
const std::uint8_t *data, ImuResPacket *res) {
|
||||
unpack_imu_res_packet(data, res);
|
||||
}
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
@@ -1,42 +0,0 @@
|
||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
#ifndef MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_
|
||||
#define MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
#include <set>
|
||||
#include <vector>
|
||||
|
||||
#include "mynteye/device/channel/channels.h"
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
class Standard210aChannelsAdapter : public ChannelsAdapter {
|
||||
public:
|
||||
Standard210aChannelsAdapter();
|
||||
virtual ~Standard210aChannelsAdapter();
|
||||
|
||||
std::int32_t GetAccelRangeDefault() override;
|
||||
std::vector<std::int32_t> GetAccelRangeValues() override;
|
||||
|
||||
std::int32_t GetGyroRangeDefault() override;
|
||||
std::vector<std::int32_t> GetGyroRangeValues() override;
|
||||
|
||||
void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) override;
|
||||
};
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
#endif // MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_
|
||||
@@ -20,11 +20,13 @@
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
Standard2Device::Standard2Device(std::shared_ptr<uvc::device> device)
|
||||
: Device(Model::STANDARD2, device,
|
||||
std::make_shared<Standard2StreamsAdapter>(),
|
||||
std::make_shared<Standard2ChannelsAdapter>()) {
|
||||
Standard2Device::Standard2Device(const Model &model,
|
||||
std::shared_ptr<uvc::device> device)
|
||||
: Device(model, device,
|
||||
std::make_shared<Standard2StreamsAdapter>(model),
|
||||
std::make_shared<Standard2ChannelsAdapter>(model)) {
|
||||
VLOG(2) << __func__;
|
||||
CHECK(model == Model::STANDARD2 || model == Model::STANDARD210A);
|
||||
}
|
||||
|
||||
Standard2Device::~Standard2Device() {
|
||||
|
||||
@@ -24,7 +24,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
class Standard2Device : public Device {
|
||||
public:
|
||||
explicit Standard2Device(std::shared_ptr<uvc::device> device);
|
||||
Standard2Device(const Model &model, std::shared_ptr<uvc::device> device);
|
||||
virtual ~Standard2Device();
|
||||
|
||||
Capabilities GetKeyStreamCapability() const override;
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
#include "mynteye/device/standard2/device_s210a.h"
|
||||
|
||||
#include "mynteye/logger.h"
|
||||
#include "mynteye/device/motions.h"
|
||||
#include "mynteye/device/standard2/channels_adapter_s210a.h"
|
||||
#include "mynteye/device/standard2/streams_adapter_s210a.h"
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
Standard210aDevice::Standard210aDevice(std::shared_ptr<uvc::device> device)
|
||||
: Device(Model::STANDARD210A, device,
|
||||
std::make_shared<Standard210aStreamsAdapter>(),
|
||||
std::make_shared<Standard210aChannelsAdapter>()) {
|
||||
VLOG(2) << __func__;
|
||||
}
|
||||
|
||||
Standard210aDevice::~Standard210aDevice() {
|
||||
VLOG(2) << __func__;
|
||||
}
|
||||
|
||||
Capabilities Standard210aDevice::GetKeyStreamCapability() const {
|
||||
return Capabilities::STEREO_COLOR;
|
||||
}
|
||||
|
||||
void Standard210aDevice::OnStereoStreamUpdate() {
|
||||
if (motion_tracking_) {
|
||||
auto &&motions = this->motions();
|
||||
motions->DoMotionTrack();
|
||||
}
|
||||
}
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
@@ -120,12 +120,12 @@ bool unpack_stereo_img_data(
|
||||
<< static_cast<int>(img_packet.header) << " now";
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
std::uint8_t checksum = 0;
|
||||
for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9]
|
||||
checksum = (checksum ^ packet[i]);
|
||||
}
|
||||
/*
|
||||
|
||||
if (img_packet.checksum != checksum) {
|
||||
VLOG(2) << "Image packet checksum should be 0x" << std::hex
|
||||
<< std::uppercase << std::setw(2) << std::setfill('0')
|
||||
@@ -143,7 +143,58 @@ bool unpack_stereo_img_data(
|
||||
|
||||
} // namespace
|
||||
|
||||
Standard2StreamsAdapter::Standard2StreamsAdapter() {
|
||||
namespace s210a {
|
||||
|
||||
// image pixels
|
||||
|
||||
bool unpack_left_img_pixels(
|
||||
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
|
||||
CHECK_NOTNULL(frame);
|
||||
CHECK_EQ(request.format, Format::BGR888);
|
||||
CHECK_EQ(frame->format(), Format::BGR888);
|
||||
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
||||
std::size_t n = 3;
|
||||
std::size_t w = frame->width();
|
||||
std::size_t h = frame->height();
|
||||
for (std::size_t i = 0; i < h; i++) {
|
||||
for (std::size_t j = 0; j < w; j++) {
|
||||
frame->data()[(i * w + j) * n] =
|
||||
*(data_new + (2 * i * w + j) * n + 2);
|
||||
frame->data()[(i * w + j) * n + 1] =
|
||||
*(data_new + (2 * i * w + j) * n + 1);
|
||||
frame->data()[(i * w + j) * n + 2] =
|
||||
*(data_new + (2 * i * w + j) * n);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool unpack_right_img_pixels(
|
||||
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
|
||||
CHECK_NOTNULL(frame);
|
||||
CHECK_EQ(request.format, Format::BGR888);
|
||||
CHECK_EQ(frame->format(), Format::BGR888);
|
||||
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
||||
std::size_t n = 3;
|
||||
std::size_t w = frame->width();
|
||||
std::size_t h = frame->height();
|
||||
for (std::size_t i = 0; i < h; i++) {
|
||||
for (std::size_t j = 0; j < w; j++) {
|
||||
frame->data()[(i * w + j) * n] =
|
||||
*(data_new + ((2 * i + 1) * w + j) * n + 2);
|
||||
frame->data()[(i * w + j) * n + 1] =
|
||||
*(data_new + ((2 * i + 1) * w + j) * n + 1);
|
||||
frame->data()[(i * w + j) * n + 2] =
|
||||
*(data_new + ((2 * i + 1) * w + j) * n);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace s210a
|
||||
|
||||
Standard2StreamsAdapter::Standard2StreamsAdapter(const Model &model)
|
||||
: model_(model) {
|
||||
}
|
||||
|
||||
Standard2StreamsAdapter::~Standard2StreamsAdapter() {
|
||||
@@ -167,10 +218,19 @@ Standard2StreamsAdapter::GetUnpackImgDataMap() {
|
||||
|
||||
std::map<Stream, Streams::unpack_img_pixels_t>
|
||||
Standard2StreamsAdapter::GetUnpackImgPixelsMap() {
|
||||
switch (model_) {
|
||||
case Model::STANDARD210A:
|
||||
return {
|
||||
{Stream::LEFT, s210a::unpack_left_img_pixels},
|
||||
{Stream::RIGHT, s210a::unpack_right_img_pixels}
|
||||
};
|
||||
case Model::STANDARD2:
|
||||
default:
|
||||
return {
|
||||
{Stream::LEFT, unpack_left_img_pixels},
|
||||
{Stream::RIGHT, unpack_right_img_pixels}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
@@ -25,7 +25,7 @@ MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
class Standard2StreamsAdapter : public StreamsAdapter {
|
||||
public:
|
||||
Standard2StreamsAdapter();
|
||||
explicit Standard2StreamsAdapter(const Model &model);
|
||||
virtual ~Standard2StreamsAdapter();
|
||||
|
||||
std::vector<Stream> GetKeyStreams() override;
|
||||
@@ -35,6 +35,9 @@ class Standard2StreamsAdapter : public StreamsAdapter {
|
||||
GetUnpackImgDataMap() override;
|
||||
std::map<Stream, Streams::unpack_img_pixels_t>
|
||||
GetUnpackImgPixelsMap() override;
|
||||
|
||||
private:
|
||||
Model model_;
|
||||
};
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
@@ -1,186 +0,0 @@
|
||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
#include "mynteye/device/standard2/streams_adapter_s210a.h"
|
||||
|
||||
#include <iomanip>
|
||||
|
||||
#include "mynteye/logger.h"
|
||||
#include "mynteye/device/types.h"
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
namespace {
|
||||
|
||||
// image info
|
||||
|
||||
#pragma pack(push, 1)
|
||||
struct ImagePacket {
|
||||
std::uint8_t header;
|
||||
std::uint8_t size;
|
||||
std::uint16_t frame_id;
|
||||
std::uint64_t timestamp;
|
||||
std::uint16_t exposure_time;
|
||||
std::uint8_t checksum;
|
||||
|
||||
ImagePacket() = default;
|
||||
explicit ImagePacket(std::uint8_t *data) {
|
||||
from_data(data);
|
||||
}
|
||||
|
||||
void from_data(std::uint8_t *data) {
|
||||
std::uint32_t timestamp_l;
|
||||
std::uint32_t timestamp_h;
|
||||
|
||||
header = *data;
|
||||
size = *(data + 1);
|
||||
frame_id = (*(data + 2) << 8) | *(data + 3);
|
||||
timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) |
|
||||
(*(data + 6) << 8) | *(data + 7);
|
||||
timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) |
|
||||
(*(data + 10) << 8) | *(data + 11);
|
||||
timestamp = (static_cast<std::uint64_t>(timestamp_h) << 32) | timestamp_l;
|
||||
exposure_time = (*(data + 12) << 8) | *(data + 13);
|
||||
checksum = *(data + 14);
|
||||
}
|
||||
};
|
||||
#pragma pack(pop)
|
||||
|
||||
// image pixels
|
||||
|
||||
bool unpack_left_img_pixels(
|
||||
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
|
||||
CHECK_NOTNULL(frame);
|
||||
CHECK_EQ(request.format, Format::BGR888);
|
||||
CHECK_EQ(frame->format(), Format::BGR888);
|
||||
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
||||
std::size_t n = 3;
|
||||
std::size_t w = frame->width();
|
||||
std::size_t h = frame->height();
|
||||
for (std::size_t i = 0; i < h; i++) {
|
||||
for (std::size_t j = 0; j < w; j++) {
|
||||
frame->data()[(i * w + j) * n] =
|
||||
*(data_new + (2 * i * w + j) * n + 2);
|
||||
frame->data()[(i * w + j) * n + 1] =
|
||||
*(data_new + (2 * i * w + j) * n + 1);
|
||||
frame->data()[(i * w + j) * n + 2] =
|
||||
*(data_new + (2 * i * w + j) * n);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool unpack_right_img_pixels(
|
||||
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
|
||||
CHECK_NOTNULL(frame);
|
||||
CHECK_EQ(request.format, Format::BGR888);
|
||||
CHECK_EQ(frame->format(), Format::BGR888);
|
||||
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
||||
std::size_t n = 3;
|
||||
std::size_t w = frame->width();
|
||||
std::size_t h = frame->height();
|
||||
for (std::size_t i = 0; i < h; i++) {
|
||||
for (std::size_t j = 0; j < w; j++) {
|
||||
frame->data()[(i * w + j) * n] =
|
||||
*(data_new + ((2 * i + 1) * w + j) * n + 2);
|
||||
frame->data()[(i * w + j) * n + 1] =
|
||||
*(data_new + ((2 * i + 1) * w + j) * n + 1);
|
||||
frame->data()[(i * w + j) * n + 2] =
|
||||
*(data_new + ((2 * i + 1) * w + j) * n);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool unpack_stereo_img_data(
|
||||
const void *data, const StreamRequest &request, ImgData *img) {
|
||||
CHECK_NOTNULL(img);
|
||||
|
||||
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
|
||||
std::size_t data_n =
|
||||
request.width * request.height * bytes_per_pixel(request.format);
|
||||
auto data_end = data_new + data_n;
|
||||
|
||||
std::size_t packet_n = sizeof(ImagePacket);
|
||||
std::vector<std::uint8_t> packet(packet_n);
|
||||
std::reverse_copy(data_end - packet_n, data_end, packet.begin());
|
||||
|
||||
ImagePacket img_packet(packet.data());
|
||||
// LOG(INFO) << "ImagePacket: header=0x" << std::hex <<
|
||||
// static_cast<int>(img_packet.header)
|
||||
// << ", size=0x" << std::hex << static_cast<int>(img_packet.size)
|
||||
// << ", frame_id="<< std::dec << img_packet.frame_id
|
||||
// << ", timestamp="<< std::dec << img_packet.timestamp
|
||||
// << ", exposure_time="<< std::dec << img_packet.exposure_time
|
||||
// << ", checksum=0x" << std::hex << static_cast<int>(img_packet.checksum);
|
||||
|
||||
if (img_packet.header != 0x3B) {
|
||||
VLOG(2) << "Image packet header must be 0x3B, but 0x" << std::hex
|
||||
<< std::uppercase << std::setw(2) << std::setfill('0')
|
||||
<< static_cast<int>(img_packet.header) << " now";
|
||||
return false;
|
||||
}
|
||||
|
||||
std::uint8_t checksum = 0;
|
||||
for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9]
|
||||
checksum = (checksum ^ packet[i]);
|
||||
}
|
||||
/*
|
||||
if (img_packet.checksum != checksum) {
|
||||
VLOG(2) << "Image packet checksum should be 0x" << std::hex
|
||||
<< std::uppercase << std::setw(2) << std::setfill('0')
|
||||
<< static_cast<int>(img_packet.checksum) << ", but 0x"
|
||||
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum)
|
||||
<< " now";
|
||||
return false;
|
||||
}
|
||||
*/
|
||||
img->frame_id = img_packet.frame_id;
|
||||
img->timestamp = img_packet.timestamp;
|
||||
img->exposure_time = img_packet.exposure_time;
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
||||
Standard210aStreamsAdapter::Standard210aStreamsAdapter() {
|
||||
}
|
||||
|
||||
Standard210aStreamsAdapter::~Standard210aStreamsAdapter() {
|
||||
}
|
||||
|
||||
std::vector<Stream> Standard210aStreamsAdapter::GetKeyStreams() {
|
||||
return {Stream::LEFT, Stream::RIGHT};
|
||||
}
|
||||
|
||||
std::vector<Capabilities> Standard210aStreamsAdapter::GetStreamCapabilities() {
|
||||
return {Capabilities::STEREO_COLOR};
|
||||
}
|
||||
|
||||
std::map<Stream, Streams::unpack_img_data_t>
|
||||
Standard210aStreamsAdapter::GetUnpackImgDataMap() {
|
||||
return {
|
||||
{Stream::LEFT, unpack_stereo_img_data},
|
||||
{Stream::RIGHT, unpack_stereo_img_data}
|
||||
};
|
||||
}
|
||||
|
||||
std::map<Stream, Streams::unpack_img_pixels_t>
|
||||
Standard210aStreamsAdapter::GetUnpackImgPixelsMap() {
|
||||
return {
|
||||
{Stream::LEFT, unpack_left_img_pixels},
|
||||
{Stream::RIGHT, unpack_right_img_pixels}
|
||||
};
|
||||
}
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
@@ -1,42 +0,0 @@
|
||||
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
#ifndef MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S210A_H_
|
||||
#define MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S210A_H_
|
||||
#pragma once
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
#include "mynteye/device/streams.h"
|
||||
|
||||
MYNTEYE_BEGIN_NAMESPACE
|
||||
|
||||
class Standard210aStreamsAdapter : public StreamsAdapter {
|
||||
public:
|
||||
Standard210aStreamsAdapter();
|
||||
virtual ~Standard210aStreamsAdapter();
|
||||
|
||||
std::vector<Stream> GetKeyStreams() override;
|
||||
std::vector<Capabilities> GetStreamCapabilities() override;
|
||||
|
||||
std::map<Stream, Streams::unpack_img_data_t>
|
||||
GetUnpackImgDataMap() override;
|
||||
std::map<Stream, Streams::unpack_img_pixels_t>
|
||||
GetUnpackImgPixelsMap() override;
|
||||
};
|
||||
|
||||
MYNTEYE_END_NAMESPACE
|
||||
|
||||
#endif // MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S210A_H_
|
||||
@@ -42,7 +42,8 @@ std::shared_ptr<Device> select() {
|
||||
auto &&device = devices[i];
|
||||
LOG(INFO) << " index: " << i
|
||||
<< ", name: " << device->GetInfo(Info::DEVICE_NAME)
|
||||
<< ", sn: " << device->GetInfo(Info::SERIAL_NUMBER);
|
||||
<< ", sn: " << device->GetInfo(Info::SERIAL_NUMBER)
|
||||
<< ", firmware: " << device->GetInfo(Info::FIRMWARE_VERSION);
|
||||
}
|
||||
|
||||
std::shared_ptr<Device> device = nullptr;
|
||||
@@ -70,13 +71,18 @@ MYNTEYE_NAMESPACE::StreamRequest select_request(
|
||||
const std::shared_ptr<Device> &device, bool *ok) {
|
||||
auto &&requests = device->GetStreamRequests();
|
||||
std::size_t n = requests.size();
|
||||
// TODO(Kalman): Get request size by uvc enum
|
||||
if (device->GetModel() == Model::STANDARD &&
|
||||
device->GetInfo()->firmware_version < Version(2, 4)) {
|
||||
n -= 1;
|
||||
}
|
||||
if (n <= 0) {
|
||||
LOG(ERROR) << "No MYNT EYE devices :(";
|
||||
*ok = false;
|
||||
return {};
|
||||
}
|
||||
|
||||
LOG(INFO) << "MYNT EYE devices:";
|
||||
LOG(INFO) << "MYNT EYE requests:";
|
||||
for (std::size_t i = 0; i < n; i++) {
|
||||
auto &&request = requests[i];
|
||||
LOG(INFO) << " index: " << i
|
||||
|
||||
@@ -36,6 +36,4 @@ namespace google {
|
||||
// that there is only one instance of this across the entire program.
|
||||
std::set<google::LogSink *> log_sinks_global;
|
||||
|
||||
int log_severity_global(INFO);
|
||||
|
||||
} // namespace google
|
||||
|
||||
@@ -92,6 +92,8 @@ const char *to_string(const Info &value) {
|
||||
CASE(LENS_TYPE)
|
||||
CASE(IMU_TYPE)
|
||||
CASE(NOMINAL_BASELINE)
|
||||
CASE(AUXILIARY_CHIP_VERSION)
|
||||
CASE(ISP_VERSION)
|
||||
default:
|
||||
CHECK(is_valid(value));
|
||||
return "Info::UNKNOWN";
|
||||
@@ -164,6 +166,7 @@ const char *to_string(const Format &value) {
|
||||
CASE(GREY)
|
||||
CASE(YUYV)
|
||||
CASE(BGR888)
|
||||
CASE(RGB888)
|
||||
default:
|
||||
return "Format::UNKNOWN";
|
||||
}
|
||||
@@ -243,6 +246,15 @@ std::ostream &operator<<(std::ostream &os, const ImuIntrinsics &in) {
|
||||
os << in.scale[2][i] << ", ";
|
||||
os << in.scale[2][2] << "]";
|
||||
|
||||
os << ", assembly: [";
|
||||
for (int i = 0; i <= 2; i++)
|
||||
os << in.assembly[0][i] << ", ";
|
||||
for (int i = 0; i <= 2; i++)
|
||||
os << in.assembly[1][i] << ", ";
|
||||
for (int i = 0; i <= 2; i++)
|
||||
os << in.assembly[2][i] << ", ";
|
||||
os << in.assembly[2][2] << "]";
|
||||
|
||||
os << ", drift: [";
|
||||
for (int i = 0; i <= 1; i++)
|
||||
os << in.drift[i] << ", ";
|
||||
@@ -258,6 +270,21 @@ std::ostream &operator<<(std::ostream &os, const ImuIntrinsics &in) {
|
||||
os << in.bias[i] << ", ";
|
||||
os << in.bias[2] << "]";
|
||||
|
||||
os << ", x: [";
|
||||
for (int i = 0; i <= 0; i++)
|
||||
os << in.x[i] << ", ";
|
||||
os << in.x[1] << "]";
|
||||
|
||||
os << ", y: [";
|
||||
for (int i = 0; i <= 0; i++)
|
||||
os << in.y[i] << ", ";
|
||||
os << in.y[1] << "]";
|
||||
|
||||
os << ", z: [";
|
||||
for (int i = 0; i <= 0; i++)
|
||||
os << in.z[i] << ", ";
|
||||
os << in.z[1] << "]";
|
||||
|
||||
return os;
|
||||
}
|
||||
|
||||
|
||||
@@ -330,6 +330,9 @@ struct device {
|
||||
for (int i = 0; i < 10; ++i) {
|
||||
if (xioctl(fd, VIDIOC_STREAMON, &type) < 0) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(100));
|
||||
} else {
|
||||
is_capturing = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (xioctl(fd, VIDIOC_STREAMON, &type) < 0)
|
||||
|
||||
@@ -53,6 +53,8 @@ TEST(Info, VerifyToString) {
|
||||
EXPECT_STREQ("Info::LENS_TYPE", to_string(Info::LENS_TYPE));
|
||||
EXPECT_STREQ("Info::IMU_TYPE", to_string(Info::IMU_TYPE));
|
||||
EXPECT_STREQ("Info::NOMINAL_BASELINE", to_string(Info::NOMINAL_BASELINE));
|
||||
EXPECT_STREQ("Info::AUXILIARY_CHIP_VERSION", to_string(Info::AUXILIARY_CHIP_VERSION));
|
||||
EXPECT_STREQ("Info::ISP_VERSION", to_string(Info::ISP_VERSION));
|
||||
}
|
||||
|
||||
TEST(Option, VerifyToString) {
|
||||
|
||||
@@ -3,18 +3,26 @@
|
||||
version: "1.2"
|
||||
in_accel:
|
||||
scale: [ 1., 0., 0., 0., 1., 0., 0., 0., 1. ]
|
||||
assembly: [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ]
|
||||
drift: [ 0., 0., 0. ]
|
||||
noise: [ 1.6925432397973516e-02, 1.6735310195561025e-02,
|
||||
1.7452487504590969e-02 ]
|
||||
bias: [ 1.9031356589714596e-04, 1.6996777864587261e-04,
|
||||
5.4490537096493644e-04 ]
|
||||
x: [ 0.0, 0.0 ]
|
||||
y: [ 0.0, 0.0 ]
|
||||
z: [ 0.0, 0.0 ]
|
||||
in_gyro:
|
||||
scale: [ 1., 0., 0., 0., 1., 0., 0., 0., 1. ]
|
||||
assembly: [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ]
|
||||
drift: [ 0., 0., 0. ]
|
||||
noise: [ 1.0848026158819934e-03, 1.2466367883501759e-03,
|
||||
1.1003229919806443e-03 ]
|
||||
bias: [ 2.3404834136742844e-05, 2.3596771567764949e-05,
|
||||
1.4970418056326829e-05 ]
|
||||
x: [ 0.0, 0.0 ]
|
||||
y: [ 0.0, 0.0 ]
|
||||
z: [ 0.0, 0.0 ]
|
||||
ex_left_to_imu:
|
||||
rotation: [ -6.4662000000000001e-03, -9.9994994000000004e-01,
|
||||
-7.6356499999999999e-03, 9.9997908999999996e-01,
|
||||
|
||||
@@ -2,9 +2,11 @@
|
||||
---
|
||||
device_name: MYNT-EYE-S210A
|
||||
serial_number: "07C40D1C0009071F"
|
||||
firmware_version: "0.1"
|
||||
firmware_version: "1.1"
|
||||
hardware_version: "1.0"
|
||||
spec_version: "1.1"
|
||||
lens_type: "0000"
|
||||
imu_type: "0000"
|
||||
nominal_baseline: 0
|
||||
spec_version: "1.2"
|
||||
lens_type: "0001"
|
||||
imu_type: "0001"
|
||||
nominal_baseline: 80
|
||||
auxiliary_chip_version: "1.0"
|
||||
isp_version: "1.0"
|
||||
|
||||
@@ -3,18 +3,26 @@
|
||||
version: "1.2"
|
||||
in_accel:
|
||||
scale: [ 1., 0., 0., 0., 1., 0., 0., 0., 1. ]
|
||||
assembly: [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ]
|
||||
drift: [ 0., 0., 0. ]
|
||||
noise: [ 1.6925432397973516e-02, 1.6735310195561025e-02,
|
||||
1.7452487504590969e-02 ]
|
||||
bias: [ 1.9031356589714596e-04, 1.6996777864587261e-04,
|
||||
5.4490537096493644e-04 ]
|
||||
x: [ 0.0, 0.0 ]
|
||||
y: [ 0.0, 0.0 ]
|
||||
z: [ 0.0, 0.0 ]
|
||||
in_gyro:
|
||||
scale: [ 1., 0., 0., 0., 1., 0., 0., 0., 1. ]
|
||||
assembly: [ 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 ]
|
||||
drift: [ 0., 0., 0. ]
|
||||
noise: [ 1.0848026158819934e-03, 1.2466367883501759e-03,
|
||||
1.1003229919806443e-03 ]
|
||||
bias: [ 2.3404834136742844e-05, 2.3596771567764949e-05,
|
||||
1.4970418056326829e-05 ]
|
||||
x: [ 0.0, 0.0 ]
|
||||
y: [ 0.0, 0.0 ]
|
||||
z: [ 0.0, 0.0 ]
|
||||
ex_left_to_imu:
|
||||
rotation: [ -6.4662000000000001e-03, -9.9994994000000004e-01,
|
||||
-7.6356499999999999e-03, 9.9997908999999996e-01,
|
||||
|
||||
@@ -52,7 +52,11 @@ bool DeviceWriter::WriteDeviceInfo(const dev_info_t &info) {
|
||||
<< ", spec_version: " << dev_info->spec_version.to_string()
|
||||
<< ", lens_type: " << dev_info->lens_type.to_string()
|
||||
<< ", imu_type: " << dev_info->imu_type.to_string()
|
||||
<< ", nominal_baseline: " << dev_info->nominal_baseline << "}";
|
||||
<< ", nominal_baseline: " << dev_info->nominal_baseline
|
||||
<< ", auxiliary_chip_version: "
|
||||
<< dev_info->auxiliary_chip_version.to_string()
|
||||
<< ", isp_version: "
|
||||
<< dev_info->isp_version.to_string()<< "}";
|
||||
return true;
|
||||
} else {
|
||||
LOG(ERROR) << "Write device info failed";
|
||||
@@ -153,11 +157,20 @@ cv::FileStorage &operator<<(cv::FileStorage &fs, const ImuIntrinsics &in) {
|
||||
scales.push_back(in.scale[i][j]);
|
||||
}
|
||||
}
|
||||
std::vector<double> assembly;
|
||||
for (std::size_t i = 0; i < 3; i++) {
|
||||
for (std::size_t j = 0; j < 3; j++) {
|
||||
assembly.push_back(in.assembly[i][j]);
|
||||
}
|
||||
}
|
||||
fs << "{"
|
||||
<< "scale" << scales << "drift"
|
||||
<< "scale" << scales << "assembly" << assembly << "drift"
|
||||
<< std::vector<double>(in.drift, in.drift + 3) << "noise"
|
||||
<< std::vector<double>(in.noise, in.noise + 3) << "bias"
|
||||
<< std::vector<double>(in.bias, in.bias + 3) << "}";
|
||||
<< std::vector<double>(in.bias, in.bias + 3) << "x"
|
||||
<< std::vector<double>(in.x, in.x + 2) << "y"
|
||||
<< std::vector<double>(in.y, in.y + 2) << "z"
|
||||
<< std::vector<double>(in.z, in.z + 2) << "}";
|
||||
return fs;
|
||||
}
|
||||
|
||||
@@ -215,6 +228,8 @@ bool DeviceWriter::SaveDeviceInfo(
|
||||
fs << "lens_type" << info.lens_type.to_string();
|
||||
fs << "imu_type" << info.imu_type.to_string();
|
||||
fs << "nominal_baseline" << info.nominal_baseline;
|
||||
fs << "auxiliary_chip_version" << info.auxiliary_chip_version.to_string();
|
||||
fs << "isp_version" << info.isp_version.to_string();
|
||||
// save other infos according to spec_version
|
||||
fs.release();
|
||||
return true;
|
||||
@@ -337,6 +352,11 @@ void operator>>(const cv::FileNode &n, ImuIntrinsics &in) {
|
||||
in.scale[i][j] = n["scale"][3 * i + j];
|
||||
}
|
||||
}
|
||||
for (std::size_t i = 0; i < 3; i++) {
|
||||
for (std::size_t j = 0; j < 3; j++) {
|
||||
in.assembly[i][j] = n["assembly"][3 * i + j];
|
||||
}
|
||||
}
|
||||
for (std::size_t i = 0; i < 3; i++) {
|
||||
in.drift[i] = n["drift"][i];
|
||||
}
|
||||
@@ -346,6 +366,15 @@ void operator>>(const cv::FileNode &n, ImuIntrinsics &in) {
|
||||
for (std::size_t i = 0; i < 3; i++) {
|
||||
in.bias[i] = n["bias"][i];
|
||||
}
|
||||
for (std::size_t i = 0; i < 2; i++) {
|
||||
in.x[i] = n["x"][i];
|
||||
}
|
||||
for (std::size_t i = 0; i < 2; i++) {
|
||||
in.y[i] = n["y"][i];
|
||||
}
|
||||
for (std::size_t i = 0; i < 2; i++) {
|
||||
in.z[i] = n["z"][i];
|
||||
}
|
||||
}
|
||||
|
||||
void operator>>(const cv::FileNode &n, Extrinsics &ex) {
|
||||
|
||||
8
wrappers/android/mynteye/.gitignore
vendored
Normal file
8
wrappers/android/mynteye/.gitignore
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
*.iml
|
||||
.gradle
|
||||
/local.properties
|
||||
/.idea/
|
||||
.DS_Store
|
||||
/build
|
||||
/captures
|
||||
.externalNativeBuild
|
||||
11
wrappers/android/mynteye/README.md
Normal file
11
wrappers/android/mynteye/README.md
Normal file
@@ -0,0 +1,11 @@
|
||||
# MYNT® EYE Android Wrapper
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Android device need be rooted and support [USB3 OTG](https://en.wikipedia.org/wiki/USB_On-The-Go) feature.
|
||||
|
||||
## Build & Run
|
||||
|
||||
1. Download and install [Android Studio](https://developer.android.com/studio/index.html)
|
||||
2. Start Android Studio and [download the NDK and build tools](https://developer.android.com/studio/projects/add-native-code)
|
||||
3. Open this project using `Open an existing Android Studio project`
|
||||
1
wrappers/android/mynteye/app/.gitignore
vendored
Normal file
1
wrappers/android/mynteye/app/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/build
|
||||
45
wrappers/android/mynteye/app/build.gradle
Normal file
45
wrappers/android/mynteye/app/build.gradle
Normal file
@@ -0,0 +1,45 @@
|
||||
apply plugin: 'com.android.application'
|
||||
|
||||
android {
|
||||
compileSdkVersion xversions.compileSdk
|
||||
defaultConfig {
|
||||
applicationId "com.slightech.mynteye.demo"
|
||||
minSdkVersion xversions.minSdk
|
||||
targetSdkVersion xversions.targetSdk
|
||||
versionCode 1
|
||||
versionName "1.0"
|
||||
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
|
||||
ndk {
|
||||
abiFilters xabis
|
||||
}
|
||||
}
|
||||
buildTypes {
|
||||
release {
|
||||
minifyEnabled false
|
||||
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
|
||||
}
|
||||
}
|
||||
compileOptions {
|
||||
sourceCompatibility JavaVersion.VERSION_1_8
|
||||
targetCompatibility JavaVersion.VERSION_1_8
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation fileTree(dir: 'libs', include: ['*.jar'])
|
||||
|
||||
implementation 'androidx.appcompat:appcompat:1.1.0-alpha01'
|
||||
implementation 'androidx.constraintlayout:constraintlayout:2.0.0-alpha3'
|
||||
|
||||
implementation 'com.jakewharton.timber:timber:4.7.1'
|
||||
|
||||
implementation 'com.jakewharton:butterknife:10.0.0'
|
||||
annotationProcessor 'com.jakewharton:butterknife-compiler:10.0.0'
|
||||
|
||||
implementation project(':libmynteye')
|
||||
implementation project(':libshell')
|
||||
|
||||
testImplementation 'junit:junit:4.12'
|
||||
androidTestImplementation 'androidx.test:runner:1.1.1'
|
||||
androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1'
|
||||
}
|
||||
21
wrappers/android/mynteye/app/proguard-rules.pro
vendored
Normal file
21
wrappers/android/mynteye/app/proguard-rules.pro
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
# Add project specific ProGuard rules here.
|
||||
# You can control the set of applied configuration files using the
|
||||
# proguardFiles setting in build.gradle.
|
||||
#
|
||||
# For more details, see
|
||||
# http://developer.android.com/guide/developing/tools/proguard.html
|
||||
|
||||
# If your project uses WebView with JS, uncomment the following
|
||||
# and specify the fully qualified class name to the JavaScript interface
|
||||
# class:
|
||||
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
|
||||
# public *;
|
||||
#}
|
||||
|
||||
# Uncomment this to preserve the line number information for
|
||||
# debugging stack traces.
|
||||
#-keepattributes SourceFile,LineNumberTable
|
||||
|
||||
# If you keep the line number information, uncomment this to
|
||||
# hide the original source file name.
|
||||
#-renamesourcefileattribute SourceFile
|
||||
@@ -0,0 +1,26 @@
|
||||
package com.slightech.mynteye.demo;
|
||||
|
||||
import android.content.Context;
|
||||
import androidx.test.InstrumentationRegistry;
|
||||
import androidx.test.runner.AndroidJUnit4;
|
||||
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
* Instrumented test, which will execute on an Android device.
|
||||
*
|
||||
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
|
||||
*/
|
||||
@RunWith(AndroidJUnit4.class)
|
||||
public class ExampleInstrumentedTest {
|
||||
@Test
|
||||
public void useAppContext() {
|
||||
// Context of the app under test.
|
||||
Context appContext = InstrumentationRegistry.getTargetContext();
|
||||
|
||||
assertEquals("com.slightech.mynteye.demo", appContext.getPackageName());
|
||||
}
|
||||
}
|
||||
26
wrappers/android/mynteye/app/src/main/AndroidManifest.xml
Normal file
26
wrappers/android/mynteye/app/src/main/AndroidManifest.xml
Normal file
@@ -0,0 +1,26 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
package="com.slightech.mynteye.demo">
|
||||
|
||||
<uses-permission android:name="android.permission.CAMERA" />
|
||||
<uses-permission android:name="android.permission.INTERNET" />
|
||||
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
|
||||
|
||||
<application
|
||||
android:allowBackup="false"
|
||||
android:label="@string/app_name"
|
||||
android:name=".MyApplication"
|
||||
android:supportsRtl="true"
|
||||
android:theme="@style/AppTheme"
|
||||
tools:ignore="GoogleAppIndexingWarning">
|
||||
<activity android:name=".ui.MainActivity"
|
||||
android:screenOrientation="landscape">
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.MAIN"/>
|
||||
<category android:name="android.intent.category.LAUNCHER"/>
|
||||
</intent-filter>
|
||||
</activity>
|
||||
</application>
|
||||
|
||||
</manifest>
|
||||
@@ -0,0 +1,35 @@
|
||||
package com.slightech.mynteye.demo;
|
||||
|
||||
import android.app.Application;
|
||||
import timber.log.Timber;
|
||||
|
||||
//import com.stericson.RootShell.RootShell;
|
||||
|
||||
public class MyApplication extends Application {
|
||||
|
||||
static {
|
||||
try {
|
||||
System.loadLibrary("mynteye_jni");
|
||||
} catch (UnsatisfiedLinkError e) {
|
||||
System.err.println("mynteye_jni library failed to load.\n" + e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void onCreate() {
|
||||
super.onCreate();
|
||||
Timber.plant(new Timber.DebugTree());
|
||||
//RootShell.debugMode = true;
|
||||
}
|
||||
|
||||
@Override public void onLowMemory() {
|
||||
super.onLowMemory();
|
||||
}
|
||||
|
||||
@Override public void onTrimMemory(int level) {
|
||||
super.onTrimMemory(level);
|
||||
}
|
||||
|
||||
@Override public void onTerminate() {
|
||||
super.onTerminate();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,213 @@
|
||||
package com.slightech.mynteye.demo.camera;
|
||||
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import com.slightech.mynteye.Device;
|
||||
import com.slightech.mynteye.DeviceUsbInfo;
|
||||
import com.slightech.mynteye.Info;
|
||||
import com.slightech.mynteye.MotionData;
|
||||
import com.slightech.mynteye.MotionIntrinsics;
|
||||
import com.slightech.mynteye.Option;
|
||||
import com.slightech.mynteye.Source;
|
||||
import com.slightech.mynteye.Stream;
|
||||
import com.slightech.mynteye.StreamData;
|
||||
import com.slightech.mynteye.StreamRequest;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Map;
|
||||
import timber.log.Timber;
|
||||
|
||||
public final class Mynteye implements Runnable {
|
||||
|
||||
private Device mDevice;
|
||||
|
||||
private HandlerThread mBackgroundThread;
|
||||
private Handler mBackgroundHandler;
|
||||
|
||||
private boolean mOpened;
|
||||
private boolean mImuEnabled;
|
||||
|
||||
public interface OnStreamDataReceiveListener {
|
||||
void onStreamDataReceive(Stream stream, StreamData data, Handler handler);
|
||||
void onStreamLeftReceive(StreamData data, Handler handler);
|
||||
void onStreamRightReceive(StreamData data, Handler handler);
|
||||
}
|
||||
|
||||
public interface OnMotionDataReceiveListener {
|
||||
void onMotionDataReceive(ArrayList<MotionData> datas, Handler handler);
|
||||
}
|
||||
|
||||
private OnStreamDataReceiveListener mOnStreamDataReceiveListener;
|
||||
private OnMotionDataReceiveListener mOnMotionDataReceiveListener;
|
||||
|
||||
private StreamRequest mStreamRequest;
|
||||
|
||||
public Mynteye(DeviceUsbInfo info) {
|
||||
mDevice = Device.create(info);
|
||||
mOpened = false;
|
||||
mImuEnabled = false;
|
||||
}
|
||||
|
||||
public void setOnStreamDataReceiveListener(OnStreamDataReceiveListener l) {
|
||||
mOnStreamDataReceiveListener = l;
|
||||
}
|
||||
|
||||
public void setOnMotionDataReceiveListener(OnMotionDataReceiveListener l) {
|
||||
mOnMotionDataReceiveListener = l;
|
||||
}
|
||||
|
||||
public ArrayList<StreamRequest> getStreamRequests() {
|
||||
return mDevice.getStreamRequests();
|
||||
}
|
||||
|
||||
public String getDeviceInfos() {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
for (Info info : Info.values()) {
|
||||
sb.append(info.toString());
|
||||
sb.append(": ");
|
||||
sb.append(mDevice.getInfo(info));
|
||||
sb.append('\n');
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public String getImageParams() {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
sb.append(Stream.LEFT).append('\n').append(mDevice.getIntrinsics(Stream.LEFT));
|
||||
sb.append("\n\n");
|
||||
sb.append(Stream.RIGHT).append('\n').append(mDevice.getIntrinsics(Stream.RIGHT));
|
||||
sb.append("\n\n");
|
||||
sb.append(Stream.LEFT).append(" > ").append(Stream.RIGHT);
|
||||
sb.append('\n');
|
||||
sb.append(mDevice.getExtrinsics(Stream.LEFT, Stream.RIGHT));
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public String getImuParams() {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
MotionIntrinsics in = mDevice.getMotionIntrinsics();
|
||||
sb.append("Accel\n").append(in.getAccel());
|
||||
sb.append("\n\n");
|
||||
sb.append("Gyro\n").append(in.getGyro());
|
||||
sb.append("\n\n");
|
||||
sb.append("Imu > ").append(Stream.LEFT).append('\n')
|
||||
.append(mDevice.getMotionExtrinsics(Stream.LEFT));
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public String getOptionInfos() {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
for (Option op : Option.values()) {
|
||||
if (!mDevice.supportsOption(op)) {
|
||||
continue;
|
||||
}
|
||||
sb.append(op.toString());
|
||||
sb.append(": ");
|
||||
sb.append(mDevice.getOptionValue(op));
|
||||
sb.append("\n ");
|
||||
sb.append(mDevice.getOptionInfo(op));
|
||||
sb.append('\n');
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
public boolean isOpened() {
|
||||
return mOpened;
|
||||
}
|
||||
|
||||
public boolean isImuEnabled() {
|
||||
return mImuEnabled;
|
||||
}
|
||||
|
||||
public void setImuEnabled(boolean enabled) {
|
||||
mImuEnabled = enabled;
|
||||
if (mOpened) {
|
||||
Timber.w("Will enable imu when open next time");
|
||||
}
|
||||
}
|
||||
|
||||
public void open() {
|
||||
if (mOpened) return;
|
||||
if (mStreamRequest == null) {
|
||||
Timber.w("Should open with stream request");
|
||||
return;
|
||||
}
|
||||
open(mStreamRequest);
|
||||
}
|
||||
|
||||
public void open(StreamRequest request) {
|
||||
if (mOpened) return;
|
||||
mOpened = true;
|
||||
mStreamRequest = request;
|
||||
|
||||
startBackgroundThread();
|
||||
|
||||
mDevice.configStreamRequest(request);
|
||||
if (mImuEnabled) {
|
||||
mDevice.enableMotionDatas(Integer.MAX_VALUE);
|
||||
mDevice.start(Source.ALL);
|
||||
} else {
|
||||
mDevice.start(Source.VIDEO_STREAMING);
|
||||
}
|
||||
|
||||
mBackgroundHandler.post(this);
|
||||
}
|
||||
|
||||
public void close() {
|
||||
if (!mOpened) return;
|
||||
mOpened = false;
|
||||
stopBackgroundThread();
|
||||
mDevice.stop(Source.ALL);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void run() {
|
||||
//Timber.i("wait streams");
|
||||
mDevice.waitForStreams();
|
||||
|
||||
//Timber.i("get streams");
|
||||
{
|
||||
StreamData data = mDevice.getStreamData(Stream.LEFT);
|
||||
if (mOnStreamDataReceiveListener != null) {
|
||||
mOnStreamDataReceiveListener.onStreamDataReceive(Stream.LEFT, data, mBackgroundHandler);
|
||||
mOnStreamDataReceiveListener.onStreamLeftReceive(data, mBackgroundHandler);
|
||||
}
|
||||
}
|
||||
{
|
||||
StreamData data = mDevice.getStreamData(Stream.RIGHT);
|
||||
if (mOnStreamDataReceiveListener != null) {
|
||||
mOnStreamDataReceiveListener.onStreamDataReceive(Stream.RIGHT, data, mBackgroundHandler);
|
||||
mOnStreamDataReceiveListener.onStreamRightReceive(data, mBackgroundHandler);
|
||||
}
|
||||
}
|
||||
|
||||
//Timber.i("get motions");
|
||||
if (mImuEnabled) {
|
||||
ArrayList<MotionData> datas = mDevice.getMotionDatas();
|
||||
if (mOnMotionDataReceiveListener != null) {
|
||||
mOnMotionDataReceiveListener.onMotionDataReceive(datas, mBackgroundHandler);
|
||||
}
|
||||
}
|
||||
|
||||
if (mOpened) mBackgroundHandler.post(this);
|
||||
}
|
||||
|
||||
private void startBackgroundThread() {
|
||||
mBackgroundThread = new HandlerThread("MynteyeBackground");
|
||||
mBackgroundThread.start();
|
||||
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
|
||||
}
|
||||
|
||||
private void stopBackgroundThread() {
|
||||
mBackgroundThread.quitSafely();
|
||||
//mBackgroundThread.interrupt();
|
||||
try {
|
||||
mBackgroundHandler.removeCallbacksAndMessages(null);
|
||||
mBackgroundThread.join();
|
||||
mBackgroundThread = null;
|
||||
mBackgroundHandler = null;
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
package com.slightech.mynteye.demo.ui;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.content.pm.PackageManager;
|
||||
import android.os.Bundle;
|
||||
import android.widget.Toast;
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.annotation.Nullable;
|
||||
import androidx.appcompat.app.AppCompatActivity;
|
||||
import androidx.core.app.ActivityCompat;
|
||||
import androidx.core.content.ContextCompat;
|
||||
|
||||
import static android.Manifest.permission.CAMERA;
|
||||
import static android.Manifest.permission.WRITE_EXTERNAL_STORAGE;
|
||||
|
||||
@SuppressLint("Registered")
|
||||
public class BaseActivity extends AppCompatActivity {
|
||||
|
||||
private final int REQ_PERMISSIONS = 1;
|
||||
|
||||
@Override
|
||||
protected void onCreate(@Nullable Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
requestPermissions();
|
||||
}
|
||||
|
||||
private void requestPermissions() {
|
||||
final String[] permissions = new String[]{WRITE_EXTERNAL_STORAGE, CAMERA};
|
||||
|
||||
boolean granted = true;
|
||||
for (String permission : permissions) {
|
||||
if (ContextCompat.checkSelfPermission(this, permission)
|
||||
!= PackageManager.PERMISSION_GRANTED) {
|
||||
granted = false;
|
||||
}
|
||||
}
|
||||
if (granted) return;
|
||||
|
||||
ActivityCompat.requestPermissions(this, permissions, REQ_PERMISSIONS);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
|
||||
@NonNull int[] grantResults) {
|
||||
if (requestCode == REQ_PERMISSIONS) {
|
||||
boolean granted = true;
|
||||
if (grantResults.length < 1) {
|
||||
granted = false;
|
||||
} else {
|
||||
for (int result : grantResults) {
|
||||
if (result != PackageManager.PERMISSION_GRANTED) {
|
||||
granted = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!granted) {
|
||||
Toast.makeText(this, "Permission denied :(", Toast.LENGTH_LONG).show();
|
||||
}
|
||||
} else {
|
||||
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,323 @@
|
||||
package com.slightech.mynteye.demo.ui;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
import android.hardware.usb.UsbDevice;
|
||||
import android.os.Bundle;
|
||||
import android.os.Handler;
|
||||
import android.text.TextUtils;
|
||||
import android.view.Menu;
|
||||
import android.view.MenuItem;
|
||||
import android.widget.ArrayAdapter;
|
||||
import android.widget.ImageView;
|
||||
import android.widget.ListView;
|
||||
import android.widget.TextView;
|
||||
import android.widget.Toast;
|
||||
import androidx.appcompat.app.AlertDialog;
|
||||
import butterknife.BindView;
|
||||
import butterknife.ButterKnife;
|
||||
import com.slightech.mynteye.DeviceUsbInfo;
|
||||
import com.slightech.mynteye.Frame;
|
||||
import com.slightech.mynteye.ImuData;
|
||||
import com.slightech.mynteye.MotionData;
|
||||
import com.slightech.mynteye.Stream;
|
||||
import com.slightech.mynteye.StreamData;
|
||||
import com.slightech.mynteye.StreamRequest;
|
||||
import com.slightech.mynteye.demo.R;
|
||||
import com.slightech.mynteye.demo.camera.Mynteye;
|
||||
import com.slightech.mynteye.usb.CameraDialog;
|
||||
import com.slightech.mynteye.usb.USBMonitor;
|
||||
import com.slightech.mynteye.usb.USBMonitor.OnDeviceConnectListener;
|
||||
import com.slightech.mynteye.usb.USBMonitor.UsbControlBlock;
|
||||
import com.slightech.mynteye.util.BitmapUtils;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Locale;
|
||||
import timber.log.Timber;
|
||||
|
||||
public class MainActivity extends BaseActivity implements CameraDialog.CameraDialogParent,
|
||||
Mynteye.OnStreamDataReceiveListener, Mynteye.OnMotionDataReceiveListener {
|
||||
|
||||
@BindView(R.id.text) TextView mTextView;
|
||||
@BindView(R.id.image_left) ImageView mLeftImageView;
|
||||
@BindView(R.id.image_right) ImageView mRightImageView;
|
||||
|
||||
private USBMonitor mUSBMonitor;
|
||||
|
||||
private Mynteye mMynteye;
|
||||
private Bitmap mLeftBitmap, mRightBitmap;
|
||||
|
||||
private boolean mImuEnabled;
|
||||
|
||||
@Override
|
||||
protected void onCreate(Bundle savedInstanceState) {
|
||||
super.onCreate(savedInstanceState);
|
||||
setContentView(R.layout.activity_main);
|
||||
ButterKnife.bind(this);
|
||||
mUSBMonitor = new USBMonitor(this, mOnDeviceConnectListener);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onStart() {
|
||||
super.onStart();
|
||||
mUSBMonitor.register();
|
||||
if (mMynteye == null) {
|
||||
//actionOpen();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onStop() {
|
||||
super.onStop();
|
||||
if (mUSBMonitor != null) {
|
||||
mUSBMonitor.unregister();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onDestroy() {
|
||||
if (mMynteye != null) {
|
||||
mMynteye.close();
|
||||
mMynteye = null;
|
||||
}
|
||||
if (mUSBMonitor != null) {
|
||||
mUSBMonitor.destroy();
|
||||
mUSBMonitor = null;
|
||||
}
|
||||
super.onDestroy();
|
||||
}
|
||||
|
||||
private final OnDeviceConnectListener mOnDeviceConnectListener = new OnDeviceConnectListener() {
|
||||
|
||||
@Override
|
||||
public void onAttach(final UsbDevice device) {
|
||||
toast("USB_DEVICE_ATTACHED");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onConnect(final UsbDevice device, final UsbControlBlock ctrlBlock, final boolean createNew) {
|
||||
toast(String.format(Locale.getDefault(), "CONNECT, %s: %s", ctrlBlock.getProductName(), ctrlBlock.getSerial()));
|
||||
openDevice(new DeviceUsbInfo(
|
||||
ctrlBlock.getVenderId(),
|
||||
ctrlBlock.getProductId(),
|
||||
ctrlBlock.getFileDescriptor(),
|
||||
ctrlBlock.getBusNum(),
|
||||
ctrlBlock.getDevNum(),
|
||||
getUSBFSName(ctrlBlock),
|
||||
ctrlBlock.getProductName(),
|
||||
ctrlBlock.getSerial()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDisconnect(final UsbDevice device, final UsbControlBlock ctrlBlock) {
|
||||
toast(String.format(Locale.getDefault(), "DISCONNECT, %s: %s", ctrlBlock.getProductName(), ctrlBlock.getSerial()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDetach(final UsbDevice device) {
|
||||
toast("USB_DEVICE_DETACHED");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onCancel(final UsbDevice device) {
|
||||
}
|
||||
|
||||
private static final String DEFAULT_USBFS = "/dev/bus/usb";
|
||||
|
||||
private final String getUSBFSName(final UsbControlBlock ctrlBlock) {
|
||||
String result = null;
|
||||
final String name = ctrlBlock.getDeviceName();
|
||||
final String[] v = !TextUtils.isEmpty(name) ? name.split("/") : null;
|
||||
if ((v != null) && (v.length > 2)) {
|
||||
final StringBuilder sb = new StringBuilder(v[0]);
|
||||
for (int i = 1; i < v.length - 2; i++)
|
||||
sb.append("/").append(v[i]);
|
||||
result = sb.toString();
|
||||
}
|
||||
if (TextUtils.isEmpty(result)) {
|
||||
Timber.w("failed to get USBFS path, try to use default path: %s", name);
|
||||
result = DEFAULT_USBFS;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
};
|
||||
|
||||
@Override
|
||||
public boolean onCreateOptionsMenu(Menu menu) {
|
||||
getMenuInflater().inflate(R.menu.menu_main, menu);
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onPrepareOptionsMenu(Menu menu) {
|
||||
if (mMynteye == null) {
|
||||
menu.findItem(R.id.action_open).setVisible(true);
|
||||
menu.findItem(R.id.action_close).setVisible(false);
|
||||
} else {
|
||||
menu.findItem(R.id.action_open).setVisible(!mMynteye.isOpened());
|
||||
menu.findItem(R.id.action_close).setVisible(mMynteye.isOpened());
|
||||
}
|
||||
menu.findItem(R.id.check_imu_data).setChecked(mImuEnabled);
|
||||
boolean featuresUsable = mMynteye != null && mMynteye.isOpened();
|
||||
menu.findItem(R.id.show_device_infos).setEnabled(featuresUsable);
|
||||
menu.findItem(R.id.show_image_params).setEnabled(featuresUsable);
|
||||
menu.findItem(R.id.show_imu_params).setEnabled(featuresUsable);
|
||||
menu.findItem(R.id.show_option_infos).setEnabled(featuresUsable);
|
||||
return super.onPrepareOptionsMenu(menu);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onOptionsItemSelected(MenuItem item) {
|
||||
switch (item.getItemId()) {
|
||||
case R.id.action_open:
|
||||
actionOpen();
|
||||
return true;
|
||||
case R.id.action_close:
|
||||
actionClose();
|
||||
return true;
|
||||
case R.id.check_imu_data:
|
||||
mImuEnabled = !mImuEnabled;
|
||||
item.setChecked(mImuEnabled);
|
||||
return true;
|
||||
case R.id.show_device_infos:
|
||||
alert(R.string.device_infos, mMynteye.getDeviceInfos());
|
||||
return true;
|
||||
case R.id.show_image_params:
|
||||
alert(R.string.image_params, mMynteye.getImageParams());
|
||||
return true;
|
||||
case R.id.show_imu_params:
|
||||
alert(R.string.imu_params, mMynteye.getImuParams());
|
||||
return true;
|
||||
case R.id.show_option_infos:
|
||||
alert(R.string.option_infos, mMynteye.getOptionInfos());
|
||||
return true;
|
||||
default:
|
||||
return super.onOptionsItemSelected(item);
|
||||
}
|
||||
}
|
||||
|
||||
private void actionOpen() {
|
||||
mTextView.setText("");
|
||||
if (mMynteye == null) {
|
||||
CameraDialog.showDialog(this);
|
||||
} else {
|
||||
mMynteye.setImuEnabled(mImuEnabled);
|
||||
mMynteye.open();
|
||||
}
|
||||
}
|
||||
|
||||
private void actionClose() {
|
||||
if (mMynteye != null) {
|
||||
mMynteye.close();
|
||||
mMynteye = null;
|
||||
}
|
||||
invalidateOptionsMenu();
|
||||
}
|
||||
|
||||
private void openDevice(DeviceUsbInfo info) {
|
||||
mMynteye = new Mynteye(info);
|
||||
ArrayList<StreamRequest> requests = mMynteye.getStreamRequests();
|
||||
if (requests.isEmpty()) {
|
||||
alert("Warning", "There are no streams to request :(");
|
||||
mMynteye = null;
|
||||
} else {
|
||||
ArrayList<String> items = new ArrayList<>();
|
||||
for (StreamRequest req : requests) {
|
||||
items.add(req.toString());
|
||||
}
|
||||
|
||||
AlertDialog dialog = new AlertDialog.Builder(this)
|
||||
.setTitle("StreamRequests")
|
||||
.create();
|
||||
ListView listView = new ListView(this);
|
||||
listView.setAdapter(new ArrayAdapter<>(this, android.R.layout.simple_list_item_1, items));
|
||||
listView.setOnItemClickListener((parent, view, position, id) -> {
|
||||
dialog.dismiss();
|
||||
mMynteye.setOnStreamDataReceiveListener(this);
|
||||
mMynteye.setOnMotionDataReceiveListener(this);
|
||||
mMynteye.setImuEnabled(mImuEnabled);
|
||||
mMynteye.open(requests.get(position));
|
||||
invalidateOptionsMenu();
|
||||
});
|
||||
dialog.setOnCancelListener(dlg -> {
|
||||
mMynteye = null;
|
||||
});
|
||||
dialog.setView(listView);
|
||||
dialog.show();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public USBMonitor getUSBMonitor() {
|
||||
return mUSBMonitor;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDialogResult(boolean canceled) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStreamDataReceive(Stream stream, StreamData data, Handler handler) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStreamLeftReceive(StreamData data, Handler handler) {
|
||||
//Timber.i("onStreamLeftReceive");
|
||||
Frame frame = data.frame();
|
||||
if (mLeftBitmap == null) {
|
||||
mLeftBitmap = Bitmap.createBitmap(frame.width(), frame.height(), Bitmap.Config.ARGB_8888);
|
||||
}
|
||||
BitmapUtils.copyPixels(frame, mLeftBitmap);
|
||||
mLeftImageView.post(() -> mLeftImageView.setImageBitmap(mLeftBitmap));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStreamRightReceive(StreamData data, Handler handler) {
|
||||
//Timber.i("onStreamRightReceive");
|
||||
Frame frame = data.frame();
|
||||
if (mRightBitmap == null) {
|
||||
mRightBitmap = Bitmap.createBitmap(frame.width(), frame.height(), Bitmap.Config.ARGB_8888);
|
||||
}
|
||||
BitmapUtils.copyPixels(frame, mRightBitmap);
|
||||
mRightImageView.post(() -> mRightImageView.setImageBitmap(mRightBitmap));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onMotionDataReceive(ArrayList<MotionData> datas, Handler handler) {
|
||||
if (datas.isEmpty()) return;
|
||||
ImuData data = datas.get(0).imu();
|
||||
mTextView.post(() -> {
|
||||
StringBuffer sb = new StringBuffer();
|
||||
final int flag = data.getFlag();
|
||||
if (flag == 0) { // accel & gyro
|
||||
sb.append("Accel: ").append(data.getAccel());
|
||||
sb.append("\nGyro: ").append(data.getGyro());
|
||||
} else if (flag == 1) { // accel
|
||||
sb.append("Accel: ").append(data.getAccel());
|
||||
sb.append("\nGyro: -");
|
||||
} else if (flag == 2) { // gyro
|
||||
sb.append("Accel: -");
|
||||
sb.append("\nGyro: ").append(data.getGyro());
|
||||
}
|
||||
mTextView.setText(sb.toString());
|
||||
});
|
||||
}
|
||||
|
||||
private void toast(int textId) {
|
||||
toast(getString(textId));
|
||||
}
|
||||
|
||||
private void toast(CharSequence text) {
|
||||
Toast.makeText(this, text, Toast.LENGTH_LONG).show();
|
||||
}
|
||||
|
||||
private void alert(int titleId, CharSequence message) {
|
||||
alert(getString(titleId), message);
|
||||
}
|
||||
|
||||
private void alert(CharSequence title, CharSequence message) {
|
||||
new AlertDialog.Builder(this)
|
||||
.setTitle(title)
|
||||
.setMessage(message)
|
||||
.setPositiveButton(android.R.string.ok, null)
|
||||
.show();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
package com.slightech.mynteye.demo.util;
|
||||
|
||||
import com.stericson.RootShell.RootShell;
|
||||
import com.stericson.RootShell.exceptions.RootDeniedException;
|
||||
import com.stericson.RootShell.execution.Command;
|
||||
import com.stericson.RootShell.execution.Shell;
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
import timber.log.Timber;
|
||||
|
||||
public final class RootUtils {
|
||||
|
||||
public interface OnRequestAccessibleListener {
|
||||
void onRequestAccessible(boolean ok);
|
||||
}
|
||||
|
||||
public static boolean isRooted() {
|
||||
if (!RootShell.isRootAvailable()) {
|
||||
Timber.e("Root not found");
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
RootShell.getShell(true);
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
return false;
|
||||
} catch (TimeoutException e) {
|
||||
Timber.e("TIMEOUT EXCEPTION!");
|
||||
e.printStackTrace();
|
||||
return false;
|
||||
} catch (RootDeniedException e) {
|
||||
Timber.e("ROOT DENIED EXCEPTION!");
|
||||
e.printStackTrace();
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
if (!RootShell.isAccessGiven()) {
|
||||
Timber.e("ERROR: No root access to this device.");
|
||||
return false;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
Timber.e("ERROR: could not determine root access to this device.");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public static void requestAccessible(OnRequestAccessibleListener l) {
|
||||
try {
|
||||
Shell sh = RootShell.getShell(true);
|
||||
sh.add(new Command(1, "chmod 666 /dev/video*") {
|
||||
@Override
|
||||
public void commandOutput(int id, String line) {
|
||||
Timber.d("commandOutput: %s", line);
|
||||
super.commandOutput(id, line);
|
||||
}
|
||||
@Override
|
||||
public void commandTerminated(int id, String reason) {
|
||||
Timber.d("commandTerminated: %s", reason);
|
||||
}
|
||||
@Override
|
||||
public void commandCompleted(int id, int exitcode) {
|
||||
Timber.d("commandCompleted: %s", ((exitcode == 0) ? "ok" : "fail"));
|
||||
if (l != null) l.onRequestAccessible(exitcode == 0);
|
||||
}
|
||||
});
|
||||
sh.close();
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<androidx.constraintlayout.widget.ConstraintLayout
|
||||
xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:app="http://schemas.android.com/apk/res-auto"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent"
|
||||
android:orientation="horizontal"
|
||||
tools:context=".ui.MainActivity"
|
||||
>
|
||||
|
||||
<TextView
|
||||
android:id="@+id/text"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:layout_marginEnd="8dp"
|
||||
android:layout_marginStart="8dp"
|
||||
android:text="@string/tip_open"
|
||||
app:layout_constraintEnd_toEndOf="parent"
|
||||
app:layout_constraintStart_toStartOf="parent"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
/>
|
||||
|
||||
<androidx.constraintlayout.widget.ConstraintLayout
|
||||
android:id="@+id/layout_image"
|
||||
android:layout_width="0dp"
|
||||
android:layout_height="0dp"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
app:layout_constraintEnd_toEndOf="parent"
|
||||
app:layout_constraintStart_toStartOf="parent"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
>
|
||||
|
||||
<ImageView
|
||||
android:id="@+id/image_left"
|
||||
android:layout_width="0dp"
|
||||
android:layout_height="0dp"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
app:layout_constraintEnd_toStartOf="@id/image_right"
|
||||
app:layout_constraintStart_toStartOf="parent"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintVertical_weight="1"
|
||||
/>
|
||||
|
||||
<ImageView
|
||||
android:id="@+id/image_right"
|
||||
android:layout_width="0dp"
|
||||
android:layout_height="0dp"
|
||||
app:layout_constraintBottom_toBottomOf="parent"
|
||||
app:layout_constraintEnd_toEndOf="parent"
|
||||
app:layout_constraintStart_toEndOf="@id/image_left"
|
||||
app:layout_constraintTop_toTopOf="parent"
|
||||
app:layout_constraintVertical_weight="1"
|
||||
/>
|
||||
|
||||
</androidx.constraintlayout.widget.ConstraintLayout>
|
||||
|
||||
</androidx.constraintlayout.widget.ConstraintLayout>
|
||||
36
wrappers/android/mynteye/app/src/main/res/menu/menu_main.xml
Normal file
36
wrappers/android/mynteye/app/src/main/res/menu/menu_main.xml
Normal file
@@ -0,0 +1,36 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<menu xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:app="http://schemas.android.com/apk/res-auto">
|
||||
<item
|
||||
android:id="@+id/action_open"
|
||||
android:title="@string/open"
|
||||
app:showAsAction="ifRoom|withText" />
|
||||
<item
|
||||
android:id="@+id/action_close"
|
||||
android:title="@string/close"
|
||||
app:showAsAction="ifRoom|withText" />
|
||||
|
||||
<item
|
||||
android:id="@+id/check_imu_data"
|
||||
android:title="@string/imu_data"
|
||||
android:checkable="true"
|
||||
android:checked="false"
|
||||
app:showAsAction="never" />
|
||||
|
||||
<item
|
||||
android:id="@+id/show_device_infos"
|
||||
android:title="@string/device_infos"
|
||||
app:showAsAction="never" />
|
||||
<item
|
||||
android:id="@+id/show_image_params"
|
||||
android:title="@string/image_params"
|
||||
app:showAsAction="never" />
|
||||
<item
|
||||
android:id="@+id/show_imu_params"
|
||||
android:title="@string/imu_params"
|
||||
app:showAsAction="never" />
|
||||
<item
|
||||
android:id="@+id/show_option_infos"
|
||||
android:title="@string/option_infos"
|
||||
app:showAsAction="never" />
|
||||
</menu>
|
||||
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<resources>
|
||||
<color name="colorPrimary">#008577</color>
|
||||
<color name="colorPrimaryDark">#00574B</color>
|
||||
<color name="colorAccent">#D81B60</color>
|
||||
</resources>
|
||||
15
wrappers/android/mynteye/app/src/main/res/values/strings.xml
Normal file
15
wrappers/android/mynteye/app/src/main/res/values/strings.xml
Normal file
@@ -0,0 +1,15 @@
|
||||
<resources>
|
||||
<string name="app_name">MYNTEYE-S SDK Sample</string>
|
||||
|
||||
<string name="tip_open">Please \"Open\" the camera!</string>
|
||||
|
||||
<string name="open">Open</string>
|
||||
<string name="close">Close</string>
|
||||
|
||||
<string name="imu_data">Imu Data</string>
|
||||
|
||||
<string name="device_infos">Device Infos</string>
|
||||
<string name="image_params">Image Params</string>
|
||||
<string name="imu_params">Imu Params</string>
|
||||
<string name="option_infos">Option Infos</string>
|
||||
</resources>
|
||||
11
wrappers/android/mynteye/app/src/main/res/values/styles.xml
Normal file
11
wrappers/android/mynteye/app/src/main/res/values/styles.xml
Normal file
@@ -0,0 +1,11 @@
|
||||
<resources>
|
||||
|
||||
<!-- Base application theme. -->
|
||||
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
|
||||
<!-- Customize your theme here. -->
|
||||
<item name="colorPrimary">@color/colorPrimary</item>
|
||||
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
|
||||
<item name="colorAccent">@color/colorAccent</item>
|
||||
</style>
|
||||
|
||||
</resources>
|
||||
@@ -0,0 +1,17 @@
|
||||
package com.slightech.mynteye.demo;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
/**
|
||||
* Example local unit test, which will execute on the development machine (host).
|
||||
*
|
||||
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
|
||||
*/
|
||||
public class ExampleUnitTest {
|
||||
@Test
|
||||
public void addition_isCorrect() {
|
||||
assertEquals(4, 2 + 2);
|
||||
}
|
||||
}
|
||||
27
wrappers/android/mynteye/build.gradle
Normal file
27
wrappers/android/mynteye/build.gradle
Normal file
@@ -0,0 +1,27 @@
|
||||
// Top-level build file where you can add configuration options common to all sub-projects/modules.
|
||||
|
||||
buildscript {
|
||||
apply from: rootProject.file('gradle/dependencies.gradle')
|
||||
|
||||
repositories {
|
||||
google()
|
||||
jcenter()
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.android.tools.build:gradle:3.3.1'
|
||||
|
||||
// NOTE: Do not place your application dependencies here; they belong
|
||||
// in the individual module build.gradle files
|
||||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
repositories {
|
||||
google()
|
||||
jcenter()
|
||||
}
|
||||
}
|
||||
|
||||
task clean(type: Delete) {
|
||||
delete rootProject.buildDir
|
||||
}
|
||||
17
wrappers/android/mynteye/gradle.properties
Normal file
17
wrappers/android/mynteye/gradle.properties
Normal file
@@ -0,0 +1,17 @@
|
||||
# Project-wide Gradle settings.
|
||||
# IDE (e.g. Android Studio) users:
|
||||
# Gradle settings configured through the IDE *will override*
|
||||
# any settings specified in this file.
|
||||
# For more details on how to configure your build environment visit
|
||||
# http://www.gradle.org/docs/current/userguide/build_environment.html
|
||||
# Specifies the JVM arguments used for the daemon process.
|
||||
# The setting is particularly useful for tweaking memory settings.
|
||||
android.enableJetifier=true
|
||||
android.useAndroidX=true
|
||||
org.gradle.jvmargs=-Xmx1536m
|
||||
# When configured, Gradle will run in incubating parallel mode.
|
||||
# This option should only be used with decoupled projects. More details, visit
|
||||
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
|
||||
# org.gradle.parallel=true
|
||||
|
||||
|
||||
10
wrappers/android/mynteye/gradle/dependencies.gradle
Normal file
10
wrappers/android/mynteye/gradle/dependencies.gradle
Normal file
@@ -0,0 +1,10 @@
|
||||
ext {
|
||||
xversions = [
|
||||
'compileSdk': 28,
|
||||
'minSdk': 24,
|
||||
'targetSdk': 28,
|
||||
]
|
||||
|
||||
xabis = ['arm64-v8a', 'armeabi-v7a'] as String[]
|
||||
//xabis = ['arm64-v8a', 'armeabi-v7a', 'x86', 'x86_64'] as String[]
|
||||
}
|
||||
BIN
wrappers/android/mynteye/gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
BIN
wrappers/android/mynteye/gradle/wrapper/gradle-wrapper.jar
vendored
Normal file
Binary file not shown.
6
wrappers/android/mynteye/gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
6
wrappers/android/mynteye/gradle/wrapper/gradle-wrapper.properties
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
#Tue Jan 15 14:54:17 CST 2019
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-4.10.1-all.zip
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user