Merge branch 'develop' of http://gitlab.mynt.com/mynteye/mynt-eye-s-sdk into develop

This commit is contained in:
Osenberg 2019-03-15 14:31:12 +08:00
commit 3f80c8bbff
210 changed files with 15818 additions and 643 deletions

View File

@ -14,7 +14,7 @@
cmake_minimum_required(VERSION 3.0)
project(mynteye VERSION 2.3.2 LANGUAGES C CXX)
project(mynteye VERSION 2.3.3 LANGUAGES C CXX)
include(cmake/Common.cmake)
@ -199,9 +199,6 @@ set(MYNTEYE_SRCS
src/mynteye/device/standard2/channels_adapter_s2.cc
src/mynteye/device/standard2/device_s2.cc
src/mynteye/device/standard2/streams_adapter_s2.cc
src/mynteye/device/standard2/channels_adapter_s210a.cc
src/mynteye/device/standard2/device_s210a.cc
src/mynteye/device/standard2/streams_adapter_s210a.cc
src/mynteye/device/streams.cc
src/mynteye/device/types.cc
src/mynteye/device/utils.cc

View File

@ -1,6 +1,6 @@
# MYNT® EYE S SDK
[![](https://img.shields.io/badge/MYNT%20EYE%20S%20SDK-2.3.2-brightgreen.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK)
[![](https://img.shields.io/badge/MYNT%20EYE%20S%20SDK-2.3.3-brightgreen.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK)
## Overview
@ -17,11 +17,11 @@ Please follow the guide doc to install the SDK on different platforms.
## Documentations
* [API Doc](https://github.com/slightech/MYNT-EYE-S-SDK/releases): API reference, some guides and data spec.
* en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2913110/mynt-eye-s-sdk-apidoc-2.3.2-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2913111/mynt-eye-s-sdk-apidoc-2.3.2-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK/)
* zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2913112/mynt-eye-s-sdk-apidoc-2.3.2-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2913113/mynt-eye-s-sdk-apidoc-2.3.2-zh-Hans.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/api/mynt-eye-s-sdk-apidoc-2.3.2-zh-Hans/mynt-eye-s-sdk-apidoc-2.3.2-zh-Hans/index.html)
* en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2939071/mynt-eye-s-sdk-apidoc-2.3.3-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2939073/mynt-eye-s-sdk-apidoc-2.3.3-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK/)
* zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2939074/mynt-eye-s-sdk-apidoc-2.3.3-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK/files/2939076/mynt-eye-s-sdk-apidoc-2.3.3-zh-Hans.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/api/mynt-eye-s-sdk-apidoc-2.3.3-zh-Hans/mynt-eye-s-sdk-apidoc-2.3.3-zh-Hans/index.html)
* [Guide Doc](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/releases): How to install and start using the SDK.
* en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2913052/mynt-eye-s-sdk-guide-2.3.2-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2913053/mynt-eye-s-sdk-guide-2.3.2-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK-Guide/)
* zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2913054/mynt-eye-s-sdk-guide-2.3.2-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2913056/mynt-eye-s-sdk-guide-2.3.2-zh-Hans.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/sdk/mynt-eye-s-sdk-guide-2.3.2-zh-Hans/mynt-eye-s-sdk-guide-2.3.2-zh-Hans/index.html)
* en: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2938988/mynt-eye-s-sdk-guide-2.3.3-en.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2938990/mynt-eye-s-sdk-guide-2.3.3-en.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](https://slightech.github.io/MYNT-EYE-S-SDK-Guide/)
* zh-Hans: [![](https://img.shields.io/badge/Download-PDF-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2938991/mynt-eye-s-sdk-guide-2.3.3-zh-Hans.pdf) [![](https://img.shields.io/badge/Download-HTML-blue.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK-Guide/files/2938992/mynt-eye-s-sdk-guide-2.3.3-zh-Hans.zip) [![](https://img.shields.io/badge/Online-HTML-blue.svg?style=flat)](http://doc.myntai.com/resource/sdk/mynt-eye-s-sdk-guide-2.3.3-zh-Hans/mynt-eye-s-sdk-guide-2.3.3-zh-Hans/index.html)
> Supported languages: `en`, `zh-Hans`.

View File

@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE S SDK"
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = 2.3.2
PROJECT_NUMBER = 2.3.3
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a

View File

@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE S SDK"
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = 2.3.2
PROJECT_NUMBER = 2.3.3
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a

View File

@ -237,6 +237,11 @@ class MYNTEYE_API API {
void SetDisparityComputingMethodType(
const DisparityComputingMethod &MethodType);
/**
* Set if the duplicate frames is enable.
*/
void setDuplicate(bool isEnable);
/**
* Set the option value.
*/
@ -334,7 +339,8 @@ class MYNTEYE_API API {
/**
* Enable motion datas with timestamp correspondence of some stream.
*/
void EnableTimestampCorrespondence(const Stream &stream);
void EnableTimestampCorrespondence(const Stream &stream,
bool keep_accel_then_gyro = true);
/**
* Enable the plugin.

View File

@ -77,6 +77,7 @@ struct glog_init {
#include "mynteye/mynteye.h"
#define MYNTEYE_MAX_LOG_LEVEL google::INFO
// #define MYNTEYE_MAX_LOG_LEVEL 2
#include "mynteye/miniglog.h"

View File

@ -157,9 +157,6 @@ class MYNTEYE_API LogSink {
// Global set of log sinks. The actual object is defined in logging.cc.
MYNTEYE_API extern std::set<LogSink *> log_sinks_global;
// Added by chachi - a runtime global maximum log level. Defined in logging.cc
MYNTEYE_API extern int log_severity_global;
inline void InitGoogleLogging(char */*argv*/) {
// Do nothing; this is ignored.
}
@ -315,9 +312,8 @@ class MYNTEYE_API LoggerVoidify {
// Log only if condition is met. Otherwise evaluates to void.
#define LOG_IF(severity, condition) \
(static_cast<int>(severity) > google::log_severity_global || !(condition)) ? \
(void) 0 : LoggerVoidify() & \
MessageLogger((char *)__FILE__, __LINE__, "native", severity).stream()
!(condition) ? (void) 0 : LoggerVoidify() & \
MessageLogger((char *)__FILE__, __LINE__, "native", severity).stream()
// Log only if condition is NOT met. Otherwise evaluates to void.
#define LOG_IF_FALSE(severity, condition) LOG_IF(severity, !(condition))

View File

@ -27,9 +27,9 @@
# endif
#endif
#define MYNTEYE_API_VERSION_MAJOR @PROJECT_VERSION_MAJOR@
#define MYNTEYE_API_VERSION_MINOR @PROJECT_VERSION_MINOR@
#define MYNTEYE_API_VERSION_PATCH @PROJECT_VERSION_PATCH@
#define MYNTEYE_API_VERSION_MAJOR @mynteye_VERSION_MAJOR@
#define MYNTEYE_API_VERSION_MINOR @mynteye_VERSION_MINOR@
#define MYNTEYE_API_VERSION_PATCH @mynteye_VERSION_PATCH@
/* MYNTEYE_API_VERSION is (major << 16) + (minor << 8) + patch */
#define MYNTEYE_API_VERSION \

View File

@ -11,7 +11,9 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <string>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/api/api.h"
@ -26,22 +28,24 @@ int main(int argc, char *argv[]) {
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
api->EnableStreamData(Stream::DEPTH);
api->Start(Source::VIDEO_STREAMING);
cv::namedWindow("frame");
cv::namedWindow("depth");
while (true) {
api->WaitForStreams();
auto &&left_data = api->GetStreamData(Stream::LEFT);
auto &&right_data = api->GetStreamData(Stream::RIGHT);
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
if (!left_data.frame.empty() && !right_data.frame.empty()) {
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
}
auto &&depth_data = api->GetStreamData(Stream::DEPTH);
if (!depth_data.frame.empty()) {

View File

@ -34,8 +34,8 @@ int main(int argc, char *argv[]) {
api->Start(Source::VIDEO_STREAMING);
cv::namedWindow("frame");
// cv::namedWindow("disparity");
cv::namedWindow("disparity_normalized");
cv::namedWindow("disparity");
// cv::namedWindow("disparity_normalized");
while (true) {
api->WaitForStreams();
@ -43,20 +43,22 @@ int main(int argc, char *argv[]) {
auto &&left_data = api->GetStreamData(Stream::LEFT);
auto &&right_data = api->GetStreamData(Stream::RIGHT);
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
// auto &&disp_data = api->GetStreamData(Stream::DISPARITY);
// if (!disp_data.frame.empty()) {
// cv::imshow("disparity", disp_data.frame);
// }
auto &&disp_norm_data = api->GetStreamData(Stream::DISPARITY_NORMALIZED);
if (!disp_norm_data.frame.empty()) {
cv::imshow("disparity_normalized", disp_norm_data.frame); // CV_8UC1
if (!left_data.frame.empty() && !right_data.frame.empty()) {
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
}
auto &&disp_data = api->GetStreamData(Stream::DISPARITY);
if (!disp_data.frame.empty()) {
cv::imshow("disparity", disp_data.frame);
}
// auto &&disp_norm_data = api->GetStreamData(Stream::DISPARITY_NORMALIZED);
// if (!disp_norm_data.frame.empty()) {
// cv::imshow("disparity_normalized", disp_norm_data.frame); // CV_8UC1
// }
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
break;

View File

@ -44,30 +44,32 @@ int main(int argc, char *argv[]) {
auto &&left_data = api->GetStreamData(Stream::LEFT);
auto &&right_data = api->GetStreamData(Stream::RIGHT);
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
if (!left_data.frame.empty() && !right_data.frame.empty()) {
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
auto &&motion_datas = api->GetMotionDatas();
/*
for (auto &&data : motion_datas) {
LOG(INFO) << "Imu frame_id: " << data.imu->frame_id
<< ", timestamp: " << data.imu->timestamp
<< ", accel_x: " << data.imu->accel[0]
<< ", accel_y: " << data.imu->accel[1]
<< ", accel_z: " << data.imu->accel[2]
<< ", gyro_x: " << data.imu->gyro[0]
<< ", gyro_y: " << data.imu->gyro[1]
<< ", gyro_z: " << data.imu->gyro[2]
<< ", temperature: " << data.imu->temperature;
auto &&motion_datas = api->GetMotionDatas();
/*
for (auto &&data : motion_datas) {
LOG(INFO) << "Imu frame_id: " << data.imu->frame_id
<< ", timestamp: " << data.imu->timestamp
<< ", accel_x: " << data.imu->accel[0]
<< ", accel_y: " << data.imu->accel[1]
<< ", accel_z: " << data.imu->accel[2]
<< ", gyro_x: " << data.imu->gyro[0]
<< ", gyro_y: " << data.imu->gyro[1]
<< ", gyro_z: " << data.imu->gyro[2]
<< ", temperature: " << data.imu->temperature;
}
*/
painter.DrawImgData(img, *left_data.img);
if (!motion_datas.empty()) {
painter.DrawImuData(img, *motion_datas[0].imu);
}
cv::imshow("frame", img);
}
*/
painter.DrawImgData(img, *left_data.img);
if (!motion_datas.empty()) {
painter.DrawImuData(img, *motion_datas[0].imu);
}
cv::imshow("frame", img);
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q

View File

@ -18,6 +18,10 @@
#include "util/cv_painter.h"
// #define CHECK_ACCEL_THEN_GYRO
#define SEQ_FIRST 1 // accel
#define SEQ_SECOND 2 // gyro
MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
@ -40,39 +44,88 @@ int main(int argc, char *argv[]) {
std::uint64_t prev_img_stamp = 0;
std::uint64_t prev_imu_stamp = 0;
#ifdef CHECK_ACCEL_THEN_GYRO
std::uint8_t prev_imu_flag = 0;
std::uint64_t imu_count = 0;
std::uint64_t imu_disorder_count = 0;
bool exit = false;
#endif
while (true) {
api->WaitForStreams();
auto &&left_data = api->GetStreamData(Stream::LEFT);
auto &&right_data = api->GetStreamData(Stream::RIGHT);
if (!left_data.frame.empty() || !right_data.frame.empty()) {
auto img_stamp = left_data.img->timestamp;
LOG(INFO) << "Img timestamp: " << img_stamp
<< ", diff_prev=" << (img_stamp - prev_img_stamp);
prev_img_stamp = img_stamp;
auto img_stamp = left_data.img->timestamp;
LOG(INFO) << "Img timestamp: " << img_stamp
<< ", diff_prev=" << (img_stamp - prev_img_stamp);
prev_img_stamp = img_stamp;
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
auto &&motion_datas = api->GetMotionDatas();
LOG(INFO) << "Imu count: " << motion_datas.size();
if (motion_datas.size() == 0) {
continue;
}
for (size_t i = 0, n = motion_datas.size() - 1; i <= n; ++i) {
auto data = motion_datas[i];
auto imu_flag = data.imu->flag;
auto imu_stamp = data.imu->timestamp;
auto &&motion_datas = api->GetMotionDatas();
LOG(INFO) << "Imu count: " << motion_datas.size();
for (auto &&data : motion_datas) {
auto imu_stamp = data.imu->timestamp;
LOG(INFO) << "Imu timestamp: " << imu_stamp
<< ", diff_prev=" << (imu_stamp - prev_imu_stamp)
<< ", diff_img=" << (1.f + imu_stamp - img_stamp);
prev_imu_stamp = imu_stamp;
std::stringstream ss;
if (imu_flag == 0) { // accel + gyro
ss << "Imu";
} else if (imu_flag == 1) { // accel
ss << "Accel";
} else if (imu_flag == 2) { // gyro
ss << "Gyro";
}
ss << " timestamp: " << imu_stamp
<< ", diff_prev=" << (imu_stamp - prev_imu_stamp)
<< ", diff_img=" << (1.0f + imu_stamp - img_stamp);
#ifdef CHECK_ACCEL_THEN_GYRO
if (imu_flag > 0) {
bool ok = false;
if (i == 0) { // first
ok = (imu_flag == SEQ_FIRST);
} else if (i == n) { // last
ok = (imu_flag == SEQ_SECOND);
} else {
if (imu_flag == SEQ_FIRST) {
ok = (prev_imu_flag == SEQ_SECOND);
} else if (imu_flag == SEQ_SECOND) {
ok = (prev_imu_flag == SEQ_FIRST);
}
}
ss << (ok ? "" : " x");
if (!ok) ++imu_disorder_count;
prev_imu_flag = imu_flag;
if (!exit) {
if (!ok) exit = true;
}
}
#endif
LOG(INFO) << ss.str();
prev_imu_stamp = imu_stamp;
}
LOG(INFO);
#ifdef CHECK_ACCEL_THEN_GYRO
imu_count += motion_datas.size();
if (exit) break;
#endif
/*
painter.DrawImgData(img, *left_data.img);
if (!motion_datas.empty()) {
painter.DrawImuData(img, *motion_datas[0].imu);
}
*/
cv::imshow("frame", img);
}
LOG(INFO);
/*
painter.DrawImgData(img, *left_data.img);
if (!motion_datas.empty()) {
painter.DrawImuData(img, *motion_datas[0].imu);
}
*/
cv::imshow("frame", img);
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
@ -81,5 +134,14 @@ int main(int argc, char *argv[]) {
}
api->Stop(Source::ALL);
#ifdef CHECK_ACCEL_THEN_GYRO
if (imu_disorder_count > 0) {
LOG(INFO) << "accel_then_gyro, disorder_count: " << imu_disorder_count
<< "/" << imu_count;
} else {
LOG(INFO) << "accel_then_gyro, ok";
}
#endif
return 0;
}

View File

@ -27,6 +27,7 @@ int main(int argc, char *argv[]) {
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
api->EnableStreamData(Stream::POINTS);
@ -40,10 +41,11 @@ int main(int argc, char *argv[]) {
auto &&left_data = api->GetStreamData(Stream::LEFT);
auto &&right_data = api->GetStreamData(Stream::RIGHT);
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
if (!left_data.frame.empty() && !right_data.frame.empty()) {
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
}
auto &&points_data = api->GetStreamData(Stream::POINTS);
if (!points_data.frame.empty()) {

View File

@ -25,7 +25,7 @@ int main(int argc, char *argv[]) {
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
api->Start(Source::VIDEO_STREAMING);
cv::namedWindow("frame");
@ -37,8 +37,10 @@ int main(int argc, char *argv[]) {
auto &&right_data = api->GetStreamData(Stream::RIGHT);
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
if (!left_data.frame.empty() && !right_data.frame.empty()) {
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
}
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q

View File

@ -25,6 +25,7 @@ int main(int argc, char *argv[]) {
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
api->EnableStreamData(Stream::LEFT_RECTIFIED);
api->EnableStreamData(Stream::RIGHT_RECTIFIED);

View File

@ -26,6 +26,8 @@ int main(int argc, char *argv[]) {
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->setDuplicate(true);
api->EnablePlugin("plugins/linux-x86_64/libplugin_g_cuda9.1_opencv3.4.0.so");
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);
@ -41,13 +43,15 @@ int main(int argc, char *argv[]) {
auto &&left_data = api->GetStreamData(Stream::LEFT);
auto &&right_data = api->GetStreamData(Stream::RIGHT);
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
if (!left_data.frame.empty() && !right_data.frame.empty()) {
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
auto &&disp_data = api->GetStreamData(Stream::DISPARITY_NORMALIZED);
if (!disp_data.frame.empty()) {
cv::imshow("disparity", disp_data.frame);
auto &&disp_data = api->GetStreamData(Stream::DISPARITY_NORMALIZED);
if (!disp_data.frame.empty()) {
cv::imshow("disparity", disp_data.frame);
}
}
char key = static_cast<char>(cv::waitKey(1));

View File

@ -154,6 +154,7 @@ int main(int argc, char *argv[]) {
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
api->SetOptionValue(Option::IR_CONTROL, 80);

View File

@ -518,9 +518,11 @@ std::vector<api::MotionData> API::GetMotionDatas() {
}
}
void API::EnableTimestampCorrespondence(const Stream &stream) {
void API::EnableTimestampCorrespondence(const Stream &stream,
bool keep_accel_then_gyro) {
if (correspondence_ == nullptr) {
correspondence_.reset(new Correspondence(device_, stream));
correspondence_->KeepAccelThenGyro(keep_accel_then_gyro);
{
device_->DisableMotionDatas();
if (callback_) {
@ -558,6 +560,10 @@ void API::EnablePlugin(const std::string &path) {
synthetic_->SetPlugin(plugin);
}
void API::setDuplicate(bool isEnable) {
synthetic_->setDuplicate(isEnable);
}
void API::SetDisparityComputingMethodType(
const DisparityComputingMethod &MethodType) {
synthetic_->SetDisparityComputingMethodType(MethodType);

View File

@ -16,11 +16,15 @@
#include "mynteye/device/device.h"
#include "mynteye/logger.h"
#define MYNTEYE_IMU_SEQ_FIRST 1 // accel
#define MYNTEYE_IMU_SEQ_SECOND 2 // gyro
MYNTEYE_BEGIN_NAMESPACE
Correspondence::Correspondence(const std::shared_ptr<Device> &device,
const Stream &stream)
: device_(device), stream_(stream), ready_image_timestamp_(0) {
: device_(device), stream_(stream), ready_image_timestamp_(0),
keep_accel_then_gyro_(false) {
VLOG(2) << __func__;
// set matched stream to be watched too,
// aim to make stream and matched stream correspondence
@ -54,6 +58,10 @@ bool Correspondence::Watch(const Stream &stream) const {
return false;
}
void Correspondence::KeepAccelThenGyro(bool enabled) {
keep_accel_then_gyro_ = enabled;
}
void Correspondence::OnStreamDataCallback(
const Stream &stream, const api::StreamData &data) {
if (!Watch(stream)) {
@ -143,7 +151,27 @@ std::vector<api::StreamData> Correspondence::GetStreamDatas(
}
std::vector<api::MotionData> Correspondence::GetMotionDatas() {
return GetReadyMotionDatas();
auto &&datas = GetReadyMotionDatas();
/*
for (auto data : datas) {
auto imu_flag = data.imu->flag;
auto imu_stamp = data.imu->timestamp;
std::stringstream ss;
if (imu_flag == 0) { // accel + gyro
ss << "Imu";
} else if (imu_flag == 1) { // accel
ss << "Accel";
} else if (imu_flag == 2) { // gyro
ss << "Gyro";
}
ss << " timestamp: " << imu_stamp;
LOG(INFO) << ss.str();
}
*/
if (keep_accel_then_gyro_ && device_->GetModel() != Model::STANDARD) {
KeepAccelThenGyro(datas); // only s2 need do this
}
return datas;
}
void Correspondence::EnableStreamMatch() {
@ -274,4 +302,66 @@ std::vector<api::MotionData> Correspondence::GetReadyMotionDatas() {
return result;
}
void Correspondence::KeepAccelThenGyro(std::vector<api::MotionData> &datas) {
if (datas.size() == 0) return;
static std::shared_ptr<ImuData> last_imu = nullptr;
// process last imu
if (datas[0].imu->flag == MYNTEYE_IMU_SEQ_SECOND) {
if (last_imu && last_imu->flag == MYNTEYE_IMU_SEQ_FIRST) {
datas.insert(datas.begin(), {last_imu});
}
}
last_imu = nullptr;
// if only one
if (datas.size() == 1) {
last_imu = datas[0].imu;
datas.clear();
return;
}
std::uint8_t prev_flag = 0;
for (auto it = datas.begin(); it != datas.end(); ) {
auto flag = it->imu->flag;
if (flag == 0) {
++it; // unexpected, keep it
continue;
}
bool is_first = (it == datas.begin());
bool is_last = (it == datas.end() - 1);
bool ok = false;
if (is_first) {
ok = (flag == MYNTEYE_IMU_SEQ_FIRST);
} else {
if (flag == MYNTEYE_IMU_SEQ_FIRST) {
ok = (prev_flag == MYNTEYE_IMU_SEQ_SECOND);
} else if (flag == MYNTEYE_IMU_SEQ_SECOND) {
ok = (prev_flag == MYNTEYE_IMU_SEQ_FIRST);
}
}
if (ok) {
prev_flag = flag;
++it;
} else {
if (is_last) {
// if tail not ok, retain last imu
last_imu = it->imu;
}
it = datas.erase(it);
}
}
// if tail is not second
if (datas.size() > 0) {
auto it = datas.end() - 1;
if (it->imu->flag != MYNTEYE_IMU_SEQ_SECOND) {
datas.erase(it);
}
}
}
MYNTEYE_END_NAMESPACE

View File

@ -32,6 +32,7 @@ class Correspondence {
~Correspondence();
bool Watch(const Stream &stream) const;
void KeepAccelThenGyro(bool enabled);
void OnStreamDataCallback(const Stream &stream, const api::StreamData &data);
void OnMotionDataCallback(const device::MotionData &data);
@ -55,6 +56,8 @@ class Correspondence {
std::vector<api::StreamData> GetReadyStreamData(bool matched);
std::vector<api::MotionData> GetReadyMotionDatas();
void KeepAccelThenGyro(std::vector<api::MotionData> &datas); // NOLINT
std::shared_ptr<Device> device_;
Stream stream_;
Stream stream_match_;
@ -73,6 +76,8 @@ class Correspondence {
std::condition_variable_any cond_stream_datas_;
std::uint64_t ready_image_timestamp_;
bool keep_accel_then_gyro_;
};
MYNTEYE_END_NAMESPACE

View File

@ -24,7 +24,10 @@
MYNTEYE_BEGIN_NAMESPACE
Processor::Processor(std::int32_t proc_period)
: proc_period_(std::move(proc_period)),
: last_frame_id_cd(0),
last_frame_id_cd_vice(0),
is_enable_cd(false),
proc_period_(std::move(proc_period)),
activated_(false),
input_ready_(false),
idle_(true),
@ -42,9 +45,9 @@ Processor::Processor(std::int32_t proc_period)
Processor::~Processor() {
VLOG(2) << __func__;
Deactivate();
input_.reset(nullptr);
output_.reset(nullptr);
output_result_.reset(nullptr);
input_ = nullptr;
output_ = nullptr;
output_result_ = nullptr;
childs_.clear();
}
@ -122,7 +125,7 @@ bool Processor::IsIdle() {
return idle_;
}
bool Processor::Process(const Object &in) {
bool Processor::Process(std::shared_ptr<Object> in) {
if (!activated_)
return false;
if (!idle_) {
@ -132,13 +135,17 @@ bool Processor::Process(const Object &in) {
return false;
}
}
if (!in.DecValidity()) {
if (in && !in->DecValidity()) {
LOG(WARNING) << Name() << " process with invalid input";
return false;
}
{
std::lock_guard<std::mutex> lk(mtx_input_ready_);
input_.reset(in.Clone());
if (ProcessInputConnection() == WITH_CLONE) {
input_.reset(in->Clone());
} else {
input_ = in;
}
input_ready_ = true;
}
cond_input_ready_.notify_all();
@ -229,12 +236,16 @@ void Processor::Run() {
}
{
std::unique_lock<std::mutex> lk(mtx_result_);
output_result_.reset(output_->Clone());
if (ProcessOutputConnection() == WITH_CLONE) {
output_result_.reset(output_->Clone());
} else {
output_result_ = output_;
}
}
if (!childs_.empty()) {
for (auto child : childs_) {
child->Process(*output_);
child->Process(output_);
}
}
@ -246,6 +257,14 @@ void Processor::Run() {
VLOG(2) << Name() << " thread end";
}
Processor::process_type Processor::ProcessOutputConnection() {
return WITH_CLONE;
}
Processor::process_type Processor::ProcessInputConnection() {
return WITH_CLONE;
}
api::StreamData Processor::GetStreamData(const Stream &stream) {
auto sum = getStreamsSum();
auto &&out = GetOutput();
@ -260,11 +279,19 @@ api::StreamData Processor::GetStreamData(const Stream &stream) {
if (enable_mode == Synthetic::MODE_ON) {
if (sum == 1) {
if (out != nullptr) {
auto &&output = Object::Cast<ObjMat>(out);
auto output = Object::Cast<ObjMat>(out);
if (output != nullptr) {
if (!is_enable_cd) {
if (output->data &&
last_frame_id_cd == output->data->frame_id) {
// cut the duplicate frame.
return {};
}
last_frame_id_cd = output->data->frame_id;
}
return obj_data(output);
}
VLOG(2) << "Rectify not ready now";
VLOG(2) << "Frame not ready now";
}
} else if (sum == 2) {
static std::shared_ptr<ObjMat2> output = nullptr;
@ -277,15 +304,31 @@ api::StreamData Processor::GetStreamData(const Stream &stream) {
for (auto it : streams) {
if (it.stream == stream) {
if (num == 1) {
if (!is_enable_cd) {
if (output->first_data &&
last_frame_id_cd == output->first_data->frame_id) {
// cut the duplicate frame.
return {};
}
last_frame_id_cd = output->first_data->frame_id;
}
return obj_data_first(output);
} else {
// last_frame_id_cd = output->second_data->frame_id;
if (!is_enable_cd) {
if (output->second_data &&
last_frame_id_cd_vice == output->second_data->frame_id) {
return {};
}
last_frame_id_cd_vice = output->second_data->frame_id;
}
return obj_data_second(output);
}
}
num++;
}
}
VLOG(2) << "Rectify not ready now";
VLOG(2) << "Frame not ready now";
} else {
LOG(ERROR) << "error: invalid sum!";
}

View File

@ -64,7 +64,7 @@ class Processor :
bool IsIdle();
/** Returns dropped or not. */
bool Process(const Object &in);
bool Process(std::shared_ptr<Object> in);
virtual api::StreamData GetStreamData(const Stream &stream);
@ -77,13 +77,25 @@ class Processor :
std::shared_ptr<Object> GetOutput();
std::uint64_t GetDroppedCount();
inline void setDupEnable(bool isEnable) {
is_enable_cd = isEnable;
}
protected:
virtual Object *OnCreateOutput() = 0;
virtual bool OnProcess(
Object *const in, Object *const out,
std::shared_ptr<Processor> const parent) = 0;
enum process_type{
WITH_CLONE,
WITHOUT_CLONE
};
virtual process_type ProcessOutputConnection();
virtual process_type ProcessInputConnection();
std::uint16_t last_frame_id_cd;
std::uint16_t last_frame_id_cd_vice;
bool is_enable_cd;
private:
/** Run in standalone thread. */
void Run();
@ -102,10 +114,10 @@ class Processor :
std::uint64_t dropped_count_;
std::mutex mtx_state_;
std::unique_ptr<Object> input_;
std::unique_ptr<Object> output_;
std::shared_ptr<Object> input_;
std::shared_ptr<Object> output_;
std::unique_ptr<Object> output_result_;
std::shared_ptr<Object> output_result_;
std::mutex mtx_result_;
PreProcessCallback pre_callback_;

View File

@ -34,6 +34,9 @@ class DepthProcessor : public Processor {
std::string Name() override;
protected:
// inline Processor::process_type ProcessInputConnection() override {
// return Processor::WITHOUT_CLONE;
// }
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out,

View File

@ -31,6 +31,12 @@ class DisparityNormalizedProcessor : public Processor {
std::string Name() override;
protected:
// inline Processor::process_type ProcessOutputConnection() override {
// return Processor::WITHOUT_CLONE;
// }
// inline Processor::process_type ProcessInputConnection() override {
// return Processor::WITHOUT_CLONE;
// }
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out,

View File

@ -41,6 +41,9 @@ class DisparityProcessor : public Processor {
void NotifyComputingTypeChanged(const DisparityComputingMethod &MethodType);
protected:
// inline Processor::process_type ProcessOutputConnection() override {
// return Processor::WITHOUT_CLONE;
// }
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out,

View File

@ -36,6 +36,9 @@ class PointsProcessor : public Processor {
std::string Name() override;
protected:
// inline Processor::process_type ProcessOutputConnection() override {
// return Processor::WITHOUT_CLONE;
// }
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out,

View File

@ -37,6 +37,12 @@ class PointsProcessorOCV : public Processor {
bool OnProcess(
Object *const in, Object *const out,
std::shared_ptr<Processor> const parent) override;
// inline Processor::process_type ProcessOutputConnection() override {
// return Processor::WITHOUT_CLONE;
// }
// inline Processor::process_type ProcessInputConnection() override {
// return Processor::WITHOUT_CLONE;
// }
private:
cv::Mat Q_;

View File

@ -79,6 +79,12 @@ class RectifyProcessor : public Processor {
bool OnProcess(
Object *const in, Object *const out,
std::shared_ptr<Processor> const parent) override;
// inline Processor::process_type ProcessOutputConnection() override {
// return Processor::WITHOUT_CLONE;
// }
// inline Processor::process_type ProcessInputConnection() override {
// return Processor::WITHOUT_CLONE;
// }
private:
void InitParams(IntrinsicsEquidistant in_left,

View File

@ -49,6 +49,13 @@ class RectifyProcessorOCV : public Processor {
cv::Mat map11, map12, map21, map22;
protected:
// inline Processor::process_type ProcessOutputConnection() override {
// return Processor::WITHOUT_CLONE;
// }
// inline Processor::process_type ProcessInputConnection() override {
// return Processor::WITHOUT_CLONE;
// }
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out,

View File

@ -73,7 +73,7 @@ void s1s2Processor::ProcessNativeStream(
}
if (left_data.img && right_data.img &&
left_data.img->frame_id == right_data.img->frame_id) {
Process(data_obj(left_data, right_data));
Process(std::make_shared<ObjMat2>(data_obj(left_data, right_data)));
}
return;
}
@ -90,10 +90,6 @@ void s1s2Processor::StartVideoStreaming() {
[this, stream, callback](const device::StreamData &data) {
auto &&stream_data = data2api(data);
ProcessNativeStream(stream, stream_data);
// Need mutex if set callback after start
if (callback) {
callback(stream_data);
}
},
true);
}
@ -120,7 +116,15 @@ api::StreamData s1s2Processor::GetStreamData(const Stream &stream) {
}
}
if (enable_mode == Synthetic::MODE_ON) {
return data2api(device_->GetStreamData(stream));
auto res = data2api(device_->GetStreamData(stream));
if (res.img == nullptr ||
res.img->timestamp == last_frame_id_cd ||
res.frame.empty()) {
return {};
}
last_frame_id_cd = res.img->timestamp;
return res;
// return data2api(device_->GetStreamData(stream));
}
LOG(ERROR) << "Failed to get device stream data of " << stream
<< ", unsupported or disabled";

View File

@ -56,6 +56,12 @@ class s1s2Processor : public RootProcessor {
api::StreamData GetStreamData(const Stream &stream) override;
std::vector<api::StreamData> GetStreamDatas(const Stream &stream) override; // NOLINT
protected:
// inline Processor::process_type ProcessOutputConnection() override {
// return Processor::WITHOUT_CLONE;
// }
// inline Processor::process_type ProcessInputConnection() override {
// return Processor::WITHOUT_CLONE;
// }
Object *OnCreateOutput() override;
bool OnProcess(
Object *const in, Object *const out,

View File

@ -86,6 +86,7 @@ Synthetic::Synthetic(API *api, CalibrationModel calib_model)
Synthetic::~Synthetic() {
VLOG(2) << __func__;
processors_.clear();
if (processor_) {
processor_->Deactivate(true);
processor_ = nullptr;
@ -173,6 +174,12 @@ bool Synthetic::Supports(const Stream &stream) const {
return checkControlDateWithStream(stream);
}
void Synthetic::setDuplicate(bool isEnable) {
for (auto it : processors_) {
it->setDupEnable(isEnable);
}
}
void Synthetic::EnableStreamData(
const Stream &stream, stream_switch_callback_t callback,
bool try_tag) {
@ -364,6 +371,10 @@ void Synthetic::InitProcessors() {
return;
}
root_processor->addTargetStreams(
{Stream::LEFT, Mode::MODE_OFF, nullptr});
root_processor->addTargetStreams(
{Stream::RIGHT, Mode::MODE_OFF, nullptr});
rectify_processor->addTargetStreams(
{Stream::LEFT_RECTIFIED, Mode::MODE_OFF, nullptr});
rectify_processor->addTargetStreams(
@ -376,10 +387,6 @@ void Synthetic::InitProcessors() {
{Stream::POINTS, Mode::MODE_OFF, nullptr});
depth_processor->addTargetStreams(
{Stream::DEPTH, Mode::MODE_OFF, nullptr});
root_processor->addTargetStreams(
{Stream::LEFT, Mode::MODE_OFF, nullptr});
root_processor->addTargetStreams(
{Stream::RIGHT, Mode::MODE_OFF, nullptr});
processors_.push_back(root_processor);
processors_.push_back(rectify_processor);
@ -421,7 +428,8 @@ bool Synthetic::OnDeviceProcess(
Object *const in, Object *const out,
std::shared_ptr<Processor> const parent) {
MYNTEYE_UNUSED(parent)
return GetStreamEnabledMode(Stream::LEFT) != MODE_ON;
return GetStreamEnabledMode(Stream::LEFT) != MODE_ON
|| GetStreamEnabledMode(Stream::RIGHT) != MODE_ON;
}
bool Synthetic::OnRectifyProcess(
@ -431,8 +439,8 @@ bool Synthetic::OnRectifyProcess(
if (plugin_ && plugin_->OnRectifyProcess(in, out)) {
return true;
}
return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_ON;
// && GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_ON
return GetStreamEnabledMode(Stream::LEFT_RECTIFIED) != MODE_ON
&& GetStreamEnabledMode(Stream::RIGHT_RECTIFIED) != MODE_ON;
}
bool Synthetic::OnDisparityProcess(

View File

@ -83,6 +83,8 @@ class Synthetic {
void SetPlugin(std::shared_ptr<Plugin> plugin);
bool HasPlugin() const;
void setDuplicate(bool isEnable);
const struct stream_control_t getControlDateWithStream(
const Stream& stream) const;
void setControlDateCallbackWithStream(

View File

@ -34,21 +34,28 @@ const char* WARN_DESCRIPTION_F =
const char* WARN_DESCRIPTION_S =
"We suggest that you should update the SDK";
const char* PASS_DESCRIPTION = "pass";
const char* PASS_OUTOF_TABLE_WARNING = "You're using a custom mynteye device";
/** firmware/sdk version matched table */
/**----device type-----sdk version---firmware version-----pass tag-----*/
static const firmware_version_match_table_unit FSVM_TABLE[] ={
/** S1030 */
{"MYNT-EYE-S1030", ">2.3.0", ">2.2.0", PASS_DESCRIPTION},
{"MYNT-EYE-S1030", ">2.3.0", "2.2.0", WARN_DESCRIPTION_F},
{"MYNT-EYE-S1030", ">2.3.0", "<2.2.0", ERRO_DESCRIPTION_F},
{"MYNT-EYE-S1030", "<2.3.1", "<2.2.0", WARN_DESCRIPTION_S},
{"MYNT-EYE-S1030", ">2.3.3", ">2.3.0", PASS_DESCRIPTION},
{"MYNT-EYE-S1030", ">2.3.3", "<2.4.0", WARN_DESCRIPTION_F},
{"MYNT-EYE-S1030", ">0.0.0", ">2.2.0", PASS_DESCRIPTION},
{"MYNT-EYE-S1030", ">0.0.0", "<2.3.0", WARN_DESCRIPTION_F},
/** S2100 */
{"MYNT-EYE-S2100", ">2.3.0", "1.0", PASS_DESCRIPTION},
{"MYNT-EYE-S2100", "<2.3.1", "1.0", ERRO_DESCRIPTION_S},
{"MYNT-EYE-S2100", ">2.3.3", ">1.0", PASS_DESCRIPTION},
{"MYNT-EYE-S2100", ">2.3.3", "<1.1", WARN_DESCRIPTION_F},
{"MYNT-EYE-S2100", ">2.3.0", ">0.9", PASS_DESCRIPTION},
{"MYNT-EYE-S2100", ">0.0.0", ">0.9", WARN_DESCRIPTION_S},
{"MYNT-EYE-S2100", ">0.0.0", "<1.0", WARN_DESCRIPTION_F},
/** S210A */
{"MYNT-EYE-S210A", ">2.3.0", "1.0", PASS_DESCRIPTION},
{"MYNT-EYE-S210A", "<2.3.1", "1.0", ERRO_DESCRIPTION_S},
{"MYNT-EYE-S210A", ">2.3.3", ">1.0", PASS_DESCRIPTION},
{"MYNT-EYE-S210A", ">2.3.3", "<1.1", WARN_DESCRIPTION_F},
{"MYNT-EYE-S210A", ">2.3.0", ">0.9", PASS_DESCRIPTION},
{"MYNT-EYE-S210A", ">0.0.0", ">0.9", WARN_DESCRIPTION_S},
{"MYNT-EYE-S210A", ">0.0.0", "<1.0", WARN_DESCRIPTION_F},
};
void getVersion(const std::string &str, char *version) {
@ -112,11 +119,27 @@ STATUS_UNIT checkUnit(const std::string& sdkv,
return ST_NOT_PASS;
}
bool checkIfDeviceInTable(const std::string& devn) {
for (size_t i =0;
i < sizeof(FSVM_TABLE)/sizeof(firmware_version_match_table_unit);
i++) {
if (FSVM_TABLE[i].device_type == devn) {
return true;
}
}
return false;
}
bool checkFirmwareVersion(const std::shared_ptr<API> api) {
auto sdkv = api->GetSDKVersion();
auto devn = api->GetInfo(Info::DEVICE_NAME);
auto firmv = api->GetInfo(Info::FIRMWARE_VERSION);
if (!checkIfDeviceInTable(devn)) {
LOG(WARNING) << PASS_OUTOF_TABLE_WARNING;
return true;
}
for (size_t i =0;
i < sizeof(FSVM_TABLE)/sizeof(firmware_version_match_table_unit);
i++) {

View File

@ -460,7 +460,6 @@ bool Channels::GetFiles(
while (i < end) {
std::uint8_t file_id = *(data + i);
std::uint16_t file_size = bytes::_from_data<std::uint16_t>(data + i + 1);
LOG(INFO) << "GetFiles:data_size : " << file_size;
VLOG(2) << "GetFiles id: " << static_cast<int>(file_id)
<< ", size: " << file_size;
i += 3;

View File

@ -32,7 +32,6 @@ std::size_t FileChannel::GetDeviceInfoFromData(
const std::uint8_t *data, const std::uint16_t &data_size,
device_info_t *info) {
auto n = dev_info_parser_->GetFromData(data, data_size, info);
LOG(INFO) << "GetDeviceInfoFromData:data_size : " << data_size;
auto spec_version = info->spec_version;
img_params_parser_->SetSpecVersion(spec_version);
imu_params_parser_->SetSpecVersion(spec_version);

View File

@ -61,7 +61,8 @@ const std::map<Model, std::map<Capabilities, StreamRequests>>
stream_requests_map = {
{Model::STANDARD,
{{Capabilities::STEREO, {
{752, 480, Format::YUYV, 60}}
{752, 480, Format::YUYV, 60},
{376, 240, Format::YUYV, 60}}
}}
},
{Model::STANDARD2,

View File

@ -26,7 +26,6 @@
#include "mynteye/device/motions.h"
#include "mynteye/device/standard/device_s.h"
#include "mynteye/device/standard2/device_s2.h"
#include "mynteye/device/standard2/device_s210a.h"
#include "mynteye/device/streams.h"
#include "mynteye/device/types.h"
#include "mynteye/util/strings.h"
@ -104,7 +103,6 @@ std::shared_ptr<Device> Device::Create(
if (name == "MYNTEYE") {
return std::make_shared<StandardDevice>(device);
} else if (strings::starts_with(name, "MYNT-EYE-")) {
// TODO(JohnZhao): Create different device by name, such as MYNT-EYE-S1000
std::string model_s = name.substr(9, 5);
VLOG(2) << "MYNE EYE Model: " << model_s;
DeviceModel model(model_s);
@ -113,9 +111,9 @@ std::shared_ptr<Device> Device::Create(
return std::make_shared<StandardDevice>(device);
} else if (model.generation == '2') {
if (model.custom_code == '0') {
return std::make_shared<Standard2Device>(device);
return std::make_shared<Standard2Device>(Model::STANDARD2, device);
} else if (model.custom_code == 'A') {
return std::make_shared<Standard210aDevice>(device);
return std::make_shared<Standard2Device>(Model::STANDARD210A, device);
} else {
LOG(FATAL) << "No such custom code now";
}
@ -618,9 +616,9 @@ void Device::ReadAllInfos() {
img_params_ok = true;
SetIntrinsics(Stream::LEFT, img_params.in_left);
SetIntrinsics(Stream::RIGHT, img_params.in_right);
SetExtrinsics(Stream::LEFT, Stream::RIGHT, img_params.ex_right_to_left);
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
SetExtrinsics(Stream::RIGHT, Stream::LEFT, img_params.ex_right_to_left);
VLOG(2) << "Intrinsics left: {" << *GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << *GetIntrinsics(Stream::RIGHT) << "}";
VLOG(2) << "Extrinsics left to right: {"
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
break;
@ -664,8 +662,8 @@ void Device::UpdateStreamIntrinsics(
SetIntrinsics(Stream::LEFT, img_params.in_left);
SetIntrinsics(Stream::RIGHT, img_params.in_right);
SetExtrinsics(Stream::LEFT, Stream::RIGHT, img_params.ex_right_to_left);
VLOG(2) << "Intrinsics left: {" << GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << GetIntrinsics(Stream::RIGHT) << "}";
VLOG(2) << "Intrinsics left: {" << *GetIntrinsics(Stream::LEFT) << "}";
VLOG(2) << "Intrinsics right: {" << *GetIntrinsics(Stream::RIGHT) << "}";
VLOG(2) << "Extrinsics left to right: {"
<< GetExtrinsics(Stream::LEFT, Stream::RIGHT) << "}";
break;

View File

@ -90,8 +90,8 @@ void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
} // namespace
Standard2ChannelsAdapter::Standard2ChannelsAdapter()
: ChannelsAdapter(Model::STANDARD2) {
Standard2ChannelsAdapter::Standard2ChannelsAdapter(const Model &model)
: ChannelsAdapter(model) {
}
Standard2ChannelsAdapter::~Standard2ChannelsAdapter() {

View File

@ -25,7 +25,7 @@ MYNTEYE_BEGIN_NAMESPACE
class Standard2ChannelsAdapter : public ChannelsAdapter {
public:
Standard2ChannelsAdapter();
explicit Standard2ChannelsAdapter(const Model &model);
virtual ~Standard2ChannelsAdapter();
std::int32_t GetAccelRangeDefault() override;

View File

@ -1,121 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard2/channels_adapter_s210a.h"
#include "mynteye/logger.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
#pragma pack(push, 1)
struct ImuData {
std::uint32_t frame_id;
std::uint64_t timestamp;
std::uint8_t flag;
std::int16_t temperature;
std::int16_t accel_or_gyro[3];
ImuData() = default;
explicit ImuData(const std::uint8_t *data) {
from_data(data);
}
void from_data(const std::uint8_t *data) {
std::uint32_t timestamp_l;
std::uint32_t timestamp_h;
frame_id = (*(data) << 24) | (*(data + 1) << 16) | (*(data + 2) << 8) |
*(data + 3);
timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) |
(*(data + 6) << 8) | *(data + 7);
timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) |
(*(data + 10) << 8) | *(data + 11);
timestamp = (static_cast<std::uint64_t>(timestamp_h) << 32) | timestamp_l;
flag = *(data + 12);
temperature = (*(data + 13) << 8) | *(data + 14);
accel_or_gyro[0] = (*(data + 15) << 8) | *(data + 16);
accel_or_gyro[1] = (*(data + 17) << 8) | *(data + 18);
accel_or_gyro[2] = (*(data + 19) << 8) | *(data + 20);
}
};
#pragma pack(pop)
void unpack_imu_segment(const ImuData &imu, ImuSegment *seg) {
seg->frame_id = imu.frame_id;
seg->timestamp = imu.timestamp;
seg->flag = imu.flag;
seg->temperature = imu.temperature;
seg->accel[0] = (seg->flag == 1) ? imu.accel_or_gyro[0] : 0;
seg->accel[1] = (seg->flag == 1) ? imu.accel_or_gyro[1] : 0;
seg->accel[2] = (seg->flag == 1) ? imu.accel_or_gyro[2] : 0;
seg->gyro[0] = (seg->flag == 2) ? imu.accel_or_gyro[0] : 0;
seg->gyro[1] = (seg->flag == 2) ? imu.accel_or_gyro[1] : 0;
seg->gyro[2] = (seg->flag == 2) ? imu.accel_or_gyro[2] : 0;
}
void unpack_imu_packet(const std::uint8_t *data, ImuPacket *pkg) {
std::size_t data_n = sizeof(ImuData); // 21
for (std::size_t i = 0; i < pkg->count; i++) {
ImuSegment seg;
unpack_imu_segment(ImuData(data + data_n * i), &seg);
pkg->segments.push_back(seg);
}
pkg->serial_number = pkg->segments.back().frame_id;
}
void unpack_imu_res_packet(const std::uint8_t *data, ImuResPacket *res) {
res->header = *data;
res->state = *(data + 1);
res->size = (*(data + 2) << 8) | *(data + 3);
std::size_t data_n = sizeof(ImuData); // 21
ImuPacket packet;
packet.count = res->size / data_n;
unpack_imu_packet(data + 4, &packet);
res->packets.push_back(packet);
res->checksum = *(data + 4 + res->size);
}
} // namespace
Standard210aChannelsAdapter::Standard210aChannelsAdapter()
: ChannelsAdapter(Model::STANDARD210A) {
}
Standard210aChannelsAdapter::~Standard210aChannelsAdapter() {
}
std::int32_t Standard210aChannelsAdapter::GetAccelRangeDefault() {
return 12;
}
std::vector<std::int32_t> Standard210aChannelsAdapter::GetAccelRangeValues() {
return {6, 12, 24, 48};
}
std::int32_t Standard210aChannelsAdapter::GetGyroRangeDefault() {
return 1000;
}
std::vector<std::int32_t> Standard210aChannelsAdapter::GetGyroRangeValues() {
return {250, 500, 1000, 2000, 4000};
}
void Standard210aChannelsAdapter::GetImuResPacket(
const std::uint8_t *data, ImuResPacket *res) {
unpack_imu_res_packet(data, res);
}
MYNTEYE_END_NAMESPACE

View File

@ -1,42 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_
#define MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_
#pragma once
#include <cstdint>
#include <set>
#include <vector>
#include "mynteye/device/channel/channels.h"
MYNTEYE_BEGIN_NAMESPACE
class Standard210aChannelsAdapter : public ChannelsAdapter {
public:
Standard210aChannelsAdapter();
virtual ~Standard210aChannelsAdapter();
std::int32_t GetAccelRangeDefault() override;
std::vector<std::int32_t> GetAccelRangeValues() override;
std::int32_t GetGyroRangeDefault() override;
std::vector<std::int32_t> GetGyroRangeValues() override;
void GetImuResPacket(const std::uint8_t *data, ImuResPacket *res) override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD2_CHANNELS_ADAPTER_S210A_H_

View File

@ -20,11 +20,13 @@
MYNTEYE_BEGIN_NAMESPACE
Standard2Device::Standard2Device(std::shared_ptr<uvc::device> device)
: Device(Model::STANDARD2, device,
std::make_shared<Standard2StreamsAdapter>(),
std::make_shared<Standard2ChannelsAdapter>()) {
Standard2Device::Standard2Device(const Model &model,
std::shared_ptr<uvc::device> device)
: Device(model, device,
std::make_shared<Standard2StreamsAdapter>(model),
std::make_shared<Standard2ChannelsAdapter>(model)) {
VLOG(2) << __func__;
CHECK(model == Model::STANDARD2 || model == Model::STANDARD210A);
}
Standard2Device::~Standard2Device() {

View File

@ -24,7 +24,7 @@ MYNTEYE_BEGIN_NAMESPACE
class Standard2Device : public Device {
public:
explicit Standard2Device(std::shared_ptr<uvc::device> device);
Standard2Device(const Model &model, std::shared_ptr<uvc::device> device);
virtual ~Standard2Device();
Capabilities GetKeyStreamCapability() const override;

View File

@ -1,45 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard2/device_s210a.h"
#include "mynteye/logger.h"
#include "mynteye/device/motions.h"
#include "mynteye/device/standard2/channels_adapter_s210a.h"
#include "mynteye/device/standard2/streams_adapter_s210a.h"
MYNTEYE_BEGIN_NAMESPACE
Standard210aDevice::Standard210aDevice(std::shared_ptr<uvc::device> device)
: Device(Model::STANDARD210A, device,
std::make_shared<Standard210aStreamsAdapter>(),
std::make_shared<Standard210aChannelsAdapter>()) {
VLOG(2) << __func__;
}
Standard210aDevice::~Standard210aDevice() {
VLOG(2) << __func__;
}
Capabilities Standard210aDevice::GetKeyStreamCapability() const {
return Capabilities::STEREO_COLOR;
}
void Standard210aDevice::OnStereoStreamUpdate() {
if (motion_tracking_) {
auto &&motions = this->motions();
motions->DoMotionTrack();
}
}
MYNTEYE_END_NAMESPACE

View File

@ -1,37 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD2_DEVICE_S210A_H_
#define MYNTEYE_DEVICE_STANDARD2_DEVICE_S210A_H_
#pragma once
#include <memory>
#include <vector>
#include "mynteye/device/device.h"
MYNTEYE_BEGIN_NAMESPACE
class Standard210aDevice : public Device {
public:
explicit Standard210aDevice(std::shared_ptr<uvc::device> device);
virtual ~Standard210aDevice();
Capabilities GetKeyStreamCapability() const override;
void OnStereoStreamUpdate() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD2_DEVICE_S210A_H_

View File

@ -120,12 +120,12 @@ bool unpack_stereo_img_data(
<< static_cast<int>(img_packet.header) << " now";
return false;
}
/*
std::uint8_t checksum = 0;
for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9]
checksum = (checksum ^ packet[i]);
}
/*
if (img_packet.checksum != checksum) {
VLOG(2) << "Image packet checksum should be 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
@ -143,7 +143,58 @@ bool unpack_stereo_img_data(
} // namespace
Standard2StreamsAdapter::Standard2StreamsAdapter() {
namespace s210a {
// image pixels
bool unpack_left_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::BGR888);
CHECK_EQ(frame->format(), Format::BGR888);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = 3;
std::size_t w = frame->width();
std::size_t h = frame->height();
for (std::size_t i = 0; i < h; i++) {
for (std::size_t j = 0; j < w; j++) {
frame->data()[(i * w + j) * n] =
*(data_new + (2 * i * w + j) * n + 2);
frame->data()[(i * w + j) * n + 1] =
*(data_new + (2 * i * w + j) * n + 1);
frame->data()[(i * w + j) * n + 2] =
*(data_new + (2 * i * w + j) * n);
}
}
return true;
}
bool unpack_right_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::BGR888);
CHECK_EQ(frame->format(), Format::BGR888);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = 3;
std::size_t w = frame->width();
std::size_t h = frame->height();
for (std::size_t i = 0; i < h; i++) {
for (std::size_t j = 0; j < w; j++) {
frame->data()[(i * w + j) * n] =
*(data_new + ((2 * i + 1) * w + j) * n + 2);
frame->data()[(i * w + j) * n + 1] =
*(data_new + ((2 * i + 1) * w + j) * n + 1);
frame->data()[(i * w + j) * n + 2] =
*(data_new + ((2 * i + 1) * w + j) * n);
}
}
return true;
}
} // namespace s210a
Standard2StreamsAdapter::Standard2StreamsAdapter(const Model &model)
: model_(model) {
}
Standard2StreamsAdapter::~Standard2StreamsAdapter() {
@ -167,10 +218,19 @@ Standard2StreamsAdapter::GetUnpackImgDataMap() {
std::map<Stream, Streams::unpack_img_pixels_t>
Standard2StreamsAdapter::GetUnpackImgPixelsMap() {
return {
{Stream::LEFT, unpack_left_img_pixels},
{Stream::RIGHT, unpack_right_img_pixels}
};
switch (model_) {
case Model::STANDARD210A:
return {
{Stream::LEFT, s210a::unpack_left_img_pixels},
{Stream::RIGHT, s210a::unpack_right_img_pixels}
};
case Model::STANDARD2:
default:
return {
{Stream::LEFT, unpack_left_img_pixels},
{Stream::RIGHT, unpack_right_img_pixels}
};
}
}
MYNTEYE_END_NAMESPACE

View File

@ -25,7 +25,7 @@ MYNTEYE_BEGIN_NAMESPACE
class Standard2StreamsAdapter : public StreamsAdapter {
public:
Standard2StreamsAdapter();
explicit Standard2StreamsAdapter(const Model &model);
virtual ~Standard2StreamsAdapter();
std::vector<Stream> GetKeyStreams() override;
@ -35,6 +35,9 @@ class Standard2StreamsAdapter : public StreamsAdapter {
GetUnpackImgDataMap() override;
std::map<Stream, Streams::unpack_img_pixels_t>
GetUnpackImgPixelsMap() override;
private:
Model model_;
};
MYNTEYE_END_NAMESPACE

View File

@ -1,186 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "mynteye/device/standard2/streams_adapter_s210a.h"
#include <iomanip>
#include "mynteye/logger.h"
#include "mynteye/device/types.h"
MYNTEYE_BEGIN_NAMESPACE
namespace {
// image info
#pragma pack(push, 1)
struct ImagePacket {
std::uint8_t header;
std::uint8_t size;
std::uint16_t frame_id;
std::uint64_t timestamp;
std::uint16_t exposure_time;
std::uint8_t checksum;
ImagePacket() = default;
explicit ImagePacket(std::uint8_t *data) {
from_data(data);
}
void from_data(std::uint8_t *data) {
std::uint32_t timestamp_l;
std::uint32_t timestamp_h;
header = *data;
size = *(data + 1);
frame_id = (*(data + 2) << 8) | *(data + 3);
timestamp_h = (*(data + 4) << 24) | (*(data + 5) << 16) |
(*(data + 6) << 8) | *(data + 7);
timestamp_l = (*(data + 8) << 24) | (*(data + 9) << 16) |
(*(data + 10) << 8) | *(data + 11);
timestamp = (static_cast<std::uint64_t>(timestamp_h) << 32) | timestamp_l;
exposure_time = (*(data + 12) << 8) | *(data + 13);
checksum = *(data + 14);
}
};
#pragma pack(pop)
// image pixels
bool unpack_left_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::BGR888);
CHECK_EQ(frame->format(), Format::BGR888);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = 3;
std::size_t w = frame->width();
std::size_t h = frame->height();
for (std::size_t i = 0; i < h; i++) {
for (std::size_t j = 0; j < w; j++) {
frame->data()[(i * w + j) * n] =
*(data_new + (2 * i * w + j) * n + 2);
frame->data()[(i * w + j) * n + 1] =
*(data_new + (2 * i * w + j) * n + 1);
frame->data()[(i * w + j) * n + 2] =
*(data_new + (2 * i * w + j) * n);
}
}
return true;
}
bool unpack_right_img_pixels(
const void *data, const StreamRequest &request, Streams::frame_t *frame) {
CHECK_NOTNULL(frame);
CHECK_EQ(request.format, Format::BGR888);
CHECK_EQ(frame->format(), Format::BGR888);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t n = 3;
std::size_t w = frame->width();
std::size_t h = frame->height();
for (std::size_t i = 0; i < h; i++) {
for (std::size_t j = 0; j < w; j++) {
frame->data()[(i * w + j) * n] =
*(data_new + ((2 * i + 1) * w + j) * n + 2);
frame->data()[(i * w + j) * n + 1] =
*(data_new + ((2 * i + 1) * w + j) * n + 1);
frame->data()[(i * w + j) * n + 2] =
*(data_new + ((2 * i + 1) * w + j) * n);
}
}
return true;
}
bool unpack_stereo_img_data(
const void *data, const StreamRequest &request, ImgData *img) {
CHECK_NOTNULL(img);
auto data_new = reinterpret_cast<const std::uint8_t *>(data);
std::size_t data_n =
request.width * request.height * bytes_per_pixel(request.format);
auto data_end = data_new + data_n;
std::size_t packet_n = sizeof(ImagePacket);
std::vector<std::uint8_t> packet(packet_n);
std::reverse_copy(data_end - packet_n, data_end, packet.begin());
ImagePacket img_packet(packet.data());
// LOG(INFO) << "ImagePacket: header=0x" << std::hex <<
// static_cast<int>(img_packet.header)
// << ", size=0x" << std::hex << static_cast<int>(img_packet.size)
// << ", frame_id="<< std::dec << img_packet.frame_id
// << ", timestamp="<< std::dec << img_packet.timestamp
// << ", exposure_time="<< std::dec << img_packet.exposure_time
// << ", checksum=0x" << std::hex << static_cast<int>(img_packet.checksum);
if (img_packet.header != 0x3B) {
VLOG(2) << "Image packet header must be 0x3B, but 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.header) << " now";
return false;
}
std::uint8_t checksum = 0;
for (std::size_t i = 2, n = packet_n - 2; i <= n; i++) { // content: [2,9]
checksum = (checksum ^ packet[i]);
}
/*
if (img_packet.checksum != checksum) {
VLOG(2) << "Image packet checksum should be 0x" << std::hex
<< std::uppercase << std::setw(2) << std::setfill('0')
<< static_cast<int>(img_packet.checksum) << ", but 0x"
<< std::setw(2) << std::setfill('0') << static_cast<int>(checksum)
<< " now";
return false;
}
*/
img->frame_id = img_packet.frame_id;
img->timestamp = img_packet.timestamp;
img->exposure_time = img_packet.exposure_time;
return true;
}
} // namespace
Standard210aStreamsAdapter::Standard210aStreamsAdapter() {
}
Standard210aStreamsAdapter::~Standard210aStreamsAdapter() {
}
std::vector<Stream> Standard210aStreamsAdapter::GetKeyStreams() {
return {Stream::LEFT, Stream::RIGHT};
}
std::vector<Capabilities> Standard210aStreamsAdapter::GetStreamCapabilities() {
return {Capabilities::STEREO_COLOR};
}
std::map<Stream, Streams::unpack_img_data_t>
Standard210aStreamsAdapter::GetUnpackImgDataMap() {
return {
{Stream::LEFT, unpack_stereo_img_data},
{Stream::RIGHT, unpack_stereo_img_data}
};
}
std::map<Stream, Streams::unpack_img_pixels_t>
Standard210aStreamsAdapter::GetUnpackImgPixelsMap() {
return {
{Stream::LEFT, unpack_left_img_pixels},
{Stream::RIGHT, unpack_right_img_pixels}
};
}
MYNTEYE_END_NAMESPACE

View File

@ -1,42 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S210A_H_
#define MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S210A_H_
#pragma once
#include <map>
#include <memory>
#include <vector>
#include "mynteye/device/streams.h"
MYNTEYE_BEGIN_NAMESPACE
class Standard210aStreamsAdapter : public StreamsAdapter {
public:
Standard210aStreamsAdapter();
virtual ~Standard210aStreamsAdapter();
std::vector<Stream> GetKeyStreams() override;
std::vector<Capabilities> GetStreamCapabilities() override;
std::map<Stream, Streams::unpack_img_data_t>
GetUnpackImgDataMap() override;
std::map<Stream, Streams::unpack_img_pixels_t>
GetUnpackImgPixelsMap() override;
};
MYNTEYE_END_NAMESPACE
#endif // MYNTEYE_DEVICE_STANDARD2_STREAMS_ADAPTER_S210A_H_

View File

@ -36,6 +36,4 @@ namespace google {
// that there is only one instance of this across the entire program.
std::set<google::LogSink *> log_sinks_global;
int log_severity_global(INFO);
} // namespace google

View File

@ -166,6 +166,7 @@ const char *to_string(const Format &value) {
CASE(GREY)
CASE(YUYV)
CASE(BGR888)
CASE(RGB888)
default:
return "Format::UNKNOWN";
}

View File

@ -330,6 +330,9 @@ struct device {
for (int i = 0; i < 10; ++i) {
if (xioctl(fd, VIDIOC_STREAMON, &type) < 0) {
std::this_thread::sleep_for(std::chrono::milliseconds(100));
} else {
is_capturing = true;
return;
}
}
if (xioctl(fd, VIDIOC_STREAMON, &type) < 0)

8
wrappers/android/mynteye/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
*.iml
.gradle
/local.properties
/.idea/
.DS_Store
/build
/captures
.externalNativeBuild

View File

@ -0,0 +1,11 @@
# MYNT® EYE Android Wrapper
## Prerequisites
Android device need be rooted and support [USB3 OTG](https://en.wikipedia.org/wiki/USB_On-The-Go) feature.
## Build & Run
1. Download and install [Android Studio](https://developer.android.com/studio/index.html)
2. Start Android Studio and [download the NDK and build tools](https://developer.android.com/studio/projects/add-native-code)
3. Open this project using `Open an existing Android Studio project`

View File

@ -0,0 +1 @@
/build

View File

@ -0,0 +1,45 @@
apply plugin: 'com.android.application'
android {
compileSdkVersion xversions.compileSdk
defaultConfig {
applicationId "com.slightech.mynteye.demo"
minSdkVersion xversions.minSdk
targetSdkVersion xversions.targetSdk
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
ndk {
abiFilters xabis
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'androidx.appcompat:appcompat:1.1.0-alpha01'
implementation 'androidx.constraintlayout:constraintlayout:2.0.0-alpha3'
implementation 'com.jakewharton.timber:timber:4.7.1'
implementation 'com.jakewharton:butterknife:10.0.0'
annotationProcessor 'com.jakewharton:butterknife-compiler:10.0.0'
implementation project(':libmynteye')
implementation project(':libshell')
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test:runner:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1'
}

View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,26 @@
package com.slightech.mynteye.demo;
import android.content.Context;
import androidx.test.InstrumentationRegistry;
import androidx.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getTargetContext();
assertEquals("com.slightech.mynteye.demo", appContext.getPackageName());
}
}

View File

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="com.slightech.mynteye.demo">
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<application
android:allowBackup="false"
android:label="@string/app_name"
android:name=".MyApplication"
android:supportsRtl="true"
android:theme="@style/AppTheme"
tools:ignore="GoogleAppIndexingWarning">
<activity android:name=".ui.MainActivity"
android:screenOrientation="landscape">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
</application>
</manifest>

View File

@ -0,0 +1,35 @@
package com.slightech.mynteye.demo;
import android.app.Application;
import timber.log.Timber;
//import com.stericson.RootShell.RootShell;
public class MyApplication extends Application {
static {
try {
System.loadLibrary("mynteye_jni");
} catch (UnsatisfiedLinkError e) {
System.err.println("mynteye_jni library failed to load.\n" + e);
}
}
@Override public void onCreate() {
super.onCreate();
Timber.plant(new Timber.DebugTree());
//RootShell.debugMode = true;
}
@Override public void onLowMemory() {
super.onLowMemory();
}
@Override public void onTrimMemory(int level) {
super.onTrimMemory(level);
}
@Override public void onTerminate() {
super.onTerminate();
}
}

View File

@ -0,0 +1,213 @@
package com.slightech.mynteye.demo.camera;
import android.os.Handler;
import android.os.HandlerThread;
import com.slightech.mynteye.Device;
import com.slightech.mynteye.DeviceUsbInfo;
import com.slightech.mynteye.Info;
import com.slightech.mynteye.MotionData;
import com.slightech.mynteye.MotionIntrinsics;
import com.slightech.mynteye.Option;
import com.slightech.mynteye.Source;
import com.slightech.mynteye.Stream;
import com.slightech.mynteye.StreamData;
import com.slightech.mynteye.StreamRequest;
import java.util.ArrayList;
import java.util.Map;
import timber.log.Timber;
public final class Mynteye implements Runnable {
private Device mDevice;
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private boolean mOpened;
private boolean mImuEnabled;
public interface OnStreamDataReceiveListener {
void onStreamDataReceive(Stream stream, StreamData data, Handler handler);
void onStreamLeftReceive(StreamData data, Handler handler);
void onStreamRightReceive(StreamData data, Handler handler);
}
public interface OnMotionDataReceiveListener {
void onMotionDataReceive(ArrayList<MotionData> datas, Handler handler);
}
private OnStreamDataReceiveListener mOnStreamDataReceiveListener;
private OnMotionDataReceiveListener mOnMotionDataReceiveListener;
private StreamRequest mStreamRequest;
public Mynteye(DeviceUsbInfo info) {
mDevice = Device.create(info);
mOpened = false;
mImuEnabled = false;
}
public void setOnStreamDataReceiveListener(OnStreamDataReceiveListener l) {
mOnStreamDataReceiveListener = l;
}
public void setOnMotionDataReceiveListener(OnMotionDataReceiveListener l) {
mOnMotionDataReceiveListener = l;
}
public ArrayList<StreamRequest> getStreamRequests() {
return mDevice.getStreamRequests();
}
public String getDeviceInfos() {
StringBuffer sb = new StringBuffer();
for (Info info : Info.values()) {
sb.append(info.toString());
sb.append(": ");
sb.append(mDevice.getInfo(info));
sb.append('\n');
}
return sb.toString();
}
public String getImageParams() {
StringBuffer sb = new StringBuffer();
sb.append(Stream.LEFT).append('\n').append(mDevice.getIntrinsics(Stream.LEFT));
sb.append("\n\n");
sb.append(Stream.RIGHT).append('\n').append(mDevice.getIntrinsics(Stream.RIGHT));
sb.append("\n\n");
sb.append(Stream.LEFT).append(" > ").append(Stream.RIGHT);
sb.append('\n');
sb.append(mDevice.getExtrinsics(Stream.LEFT, Stream.RIGHT));
return sb.toString();
}
public String getImuParams() {
StringBuffer sb = new StringBuffer();
MotionIntrinsics in = mDevice.getMotionIntrinsics();
sb.append("Accel\n").append(in.getAccel());
sb.append("\n\n");
sb.append("Gyro\n").append(in.getGyro());
sb.append("\n\n");
sb.append("Imu > ").append(Stream.LEFT).append('\n')
.append(mDevice.getMotionExtrinsics(Stream.LEFT));
return sb.toString();
}
public String getOptionInfos() {
StringBuffer sb = new StringBuffer();
for (Option op : Option.values()) {
if (!mDevice.supportsOption(op)) {
continue;
}
sb.append(op.toString());
sb.append(": ");
sb.append(mDevice.getOptionValue(op));
sb.append("\n ");
sb.append(mDevice.getOptionInfo(op));
sb.append('\n');
}
return sb.toString();
}
public boolean isOpened() {
return mOpened;
}
public boolean isImuEnabled() {
return mImuEnabled;
}
public void setImuEnabled(boolean enabled) {
mImuEnabled = enabled;
if (mOpened) {
Timber.w("Will enable imu when open next time");
}
}
public void open() {
if (mOpened) return;
if (mStreamRequest == null) {
Timber.w("Should open with stream request");
return;
}
open(mStreamRequest);
}
public void open(StreamRequest request) {
if (mOpened) return;
mOpened = true;
mStreamRequest = request;
startBackgroundThread();
mDevice.configStreamRequest(request);
if (mImuEnabled) {
mDevice.enableMotionDatas(Integer.MAX_VALUE);
mDevice.start(Source.ALL);
} else {
mDevice.start(Source.VIDEO_STREAMING);
}
mBackgroundHandler.post(this);
}
public void close() {
if (!mOpened) return;
mOpened = false;
stopBackgroundThread();
mDevice.stop(Source.ALL);
}
@Override
public void run() {
//Timber.i("wait streams");
mDevice.waitForStreams();
//Timber.i("get streams");
{
StreamData data = mDevice.getStreamData(Stream.LEFT);
if (mOnStreamDataReceiveListener != null) {
mOnStreamDataReceiveListener.onStreamDataReceive(Stream.LEFT, data, mBackgroundHandler);
mOnStreamDataReceiveListener.onStreamLeftReceive(data, mBackgroundHandler);
}
}
{
StreamData data = mDevice.getStreamData(Stream.RIGHT);
if (mOnStreamDataReceiveListener != null) {
mOnStreamDataReceiveListener.onStreamDataReceive(Stream.RIGHT, data, mBackgroundHandler);
mOnStreamDataReceiveListener.onStreamRightReceive(data, mBackgroundHandler);
}
}
//Timber.i("get motions");
if (mImuEnabled) {
ArrayList<MotionData> datas = mDevice.getMotionDatas();
if (mOnMotionDataReceiveListener != null) {
mOnMotionDataReceiveListener.onMotionDataReceive(datas, mBackgroundHandler);
}
}
if (mOpened) mBackgroundHandler.post(this);
}
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("MynteyeBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
//mBackgroundThread.interrupt();
try {
mBackgroundHandler.removeCallbacksAndMessages(null);
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,63 @@
package com.slightech.mynteye.demo.ui;
import android.annotation.SuppressLint;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import static android.Manifest.permission.CAMERA;
import static android.Manifest.permission.WRITE_EXTERNAL_STORAGE;
@SuppressLint("Registered")
public class BaseActivity extends AppCompatActivity {
private final int REQ_PERMISSIONS = 1;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestPermissions();
}
private void requestPermissions() {
final String[] permissions = new String[]{WRITE_EXTERNAL_STORAGE, CAMERA};
boolean granted = true;
for (String permission : permissions) {
if (ContextCompat.checkSelfPermission(this, permission)
!= PackageManager.PERMISSION_GRANTED) {
granted = false;
}
}
if (granted) return;
ActivityCompat.requestPermissions(this, permissions, REQ_PERMISSIONS);
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
if (requestCode == REQ_PERMISSIONS) {
boolean granted = true;
if (grantResults.length < 1) {
granted = false;
} else {
for (int result : grantResults) {
if (result != PackageManager.PERMISSION_GRANTED) {
granted = false;
}
}
}
if (!granted) {
Toast.makeText(this, "Permission denied :(", Toast.LENGTH_LONG).show();
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
}

View File

@ -0,0 +1,323 @@
package com.slightech.mynteye.demo.ui;
import android.graphics.Bitmap;
import android.hardware.usb.UsbDevice;
import android.os.Bundle;
import android.os.Handler;
import android.text.TextUtils;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import androidx.appcompat.app.AlertDialog;
import butterknife.BindView;
import butterknife.ButterKnife;
import com.slightech.mynteye.DeviceUsbInfo;
import com.slightech.mynteye.Frame;
import com.slightech.mynteye.ImuData;
import com.slightech.mynteye.MotionData;
import com.slightech.mynteye.Stream;
import com.slightech.mynteye.StreamData;
import com.slightech.mynteye.StreamRequest;
import com.slightech.mynteye.demo.R;
import com.slightech.mynteye.demo.camera.Mynteye;
import com.slightech.mynteye.usb.CameraDialog;
import com.slightech.mynteye.usb.USBMonitor;
import com.slightech.mynteye.usb.USBMonitor.OnDeviceConnectListener;
import com.slightech.mynteye.usb.USBMonitor.UsbControlBlock;
import com.slightech.mynteye.util.BitmapUtils;
import java.util.ArrayList;
import java.util.Locale;
import timber.log.Timber;
public class MainActivity extends BaseActivity implements CameraDialog.CameraDialogParent,
Mynteye.OnStreamDataReceiveListener, Mynteye.OnMotionDataReceiveListener {
@BindView(R.id.text) TextView mTextView;
@BindView(R.id.image_left) ImageView mLeftImageView;
@BindView(R.id.image_right) ImageView mRightImageView;
private USBMonitor mUSBMonitor;
private Mynteye mMynteye;
private Bitmap mLeftBitmap, mRightBitmap;
private boolean mImuEnabled;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
ButterKnife.bind(this);
mUSBMonitor = new USBMonitor(this, mOnDeviceConnectListener);
}
@Override
protected void onStart() {
super.onStart();
mUSBMonitor.register();
if (mMynteye == null) {
//actionOpen();
}
}
@Override
protected void onStop() {
super.onStop();
if (mUSBMonitor != null) {
mUSBMonitor.unregister();
}
}
@Override
protected void onDestroy() {
if (mMynteye != null) {
mMynteye.close();
mMynteye = null;
}
if (mUSBMonitor != null) {
mUSBMonitor.destroy();
mUSBMonitor = null;
}
super.onDestroy();
}
private final OnDeviceConnectListener mOnDeviceConnectListener = new OnDeviceConnectListener() {
@Override
public void onAttach(final UsbDevice device) {
toast("USB_DEVICE_ATTACHED");
}
@Override
public void onConnect(final UsbDevice device, final UsbControlBlock ctrlBlock, final boolean createNew) {
toast(String.format(Locale.getDefault(), "CONNECT, %s: %s", ctrlBlock.getProductName(), ctrlBlock.getSerial()));
openDevice(new DeviceUsbInfo(
ctrlBlock.getVenderId(),
ctrlBlock.getProductId(),
ctrlBlock.getFileDescriptor(),
ctrlBlock.getBusNum(),
ctrlBlock.getDevNum(),
getUSBFSName(ctrlBlock),
ctrlBlock.getProductName(),
ctrlBlock.getSerial()));
}
@Override
public void onDisconnect(final UsbDevice device, final UsbControlBlock ctrlBlock) {
toast(String.format(Locale.getDefault(), "DISCONNECT, %s: %s", ctrlBlock.getProductName(), ctrlBlock.getSerial()));
}
@Override
public void onDetach(final UsbDevice device) {
toast("USB_DEVICE_DETACHED");
}
@Override
public void onCancel(final UsbDevice device) {
}
private static final String DEFAULT_USBFS = "/dev/bus/usb";
private final String getUSBFSName(final UsbControlBlock ctrlBlock) {
String result = null;
final String name = ctrlBlock.getDeviceName();
final String[] v = !TextUtils.isEmpty(name) ? name.split("/") : null;
if ((v != null) && (v.length > 2)) {
final StringBuilder sb = new StringBuilder(v[0]);
for (int i = 1; i < v.length - 2; i++)
sb.append("/").append(v[i]);
result = sb.toString();
}
if (TextUtils.isEmpty(result)) {
Timber.w("failed to get USBFS path, try to use default path: %s", name);
result = DEFAULT_USBFS;
}
return result;
}
};
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
if (mMynteye == null) {
menu.findItem(R.id.action_open).setVisible(true);
menu.findItem(R.id.action_close).setVisible(false);
} else {
menu.findItem(R.id.action_open).setVisible(!mMynteye.isOpened());
menu.findItem(R.id.action_close).setVisible(mMynteye.isOpened());
}
menu.findItem(R.id.check_imu_data).setChecked(mImuEnabled);
boolean featuresUsable = mMynteye != null && mMynteye.isOpened();
menu.findItem(R.id.show_device_infos).setEnabled(featuresUsable);
menu.findItem(R.id.show_image_params).setEnabled(featuresUsable);
menu.findItem(R.id.show_imu_params).setEnabled(featuresUsable);
menu.findItem(R.id.show_option_infos).setEnabled(featuresUsable);
return super.onPrepareOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_open:
actionOpen();
return true;
case R.id.action_close:
actionClose();
return true;
case R.id.check_imu_data:
mImuEnabled = !mImuEnabled;
item.setChecked(mImuEnabled);
return true;
case R.id.show_device_infos:
alert(R.string.device_infos, mMynteye.getDeviceInfos());
return true;
case R.id.show_image_params:
alert(R.string.image_params, mMynteye.getImageParams());
return true;
case R.id.show_imu_params:
alert(R.string.imu_params, mMynteye.getImuParams());
return true;
case R.id.show_option_infos:
alert(R.string.option_infos, mMynteye.getOptionInfos());
return true;
default:
return super.onOptionsItemSelected(item);
}
}
private void actionOpen() {
mTextView.setText("");
if (mMynteye == null) {
CameraDialog.showDialog(this);
} else {
mMynteye.setImuEnabled(mImuEnabled);
mMynteye.open();
}
}
private void actionClose() {
if (mMynteye != null) {
mMynteye.close();
mMynteye = null;
}
invalidateOptionsMenu();
}
private void openDevice(DeviceUsbInfo info) {
mMynteye = new Mynteye(info);
ArrayList<StreamRequest> requests = mMynteye.getStreamRequests();
if (requests.isEmpty()) {
alert("Warning", "There are no streams to request :(");
mMynteye = null;
} else {
ArrayList<String> items = new ArrayList<>();
for (StreamRequest req : requests) {
items.add(req.toString());
}
AlertDialog dialog = new AlertDialog.Builder(this)
.setTitle("StreamRequests")
.create();
ListView listView = new ListView(this);
listView.setAdapter(new ArrayAdapter<>(this, android.R.layout.simple_list_item_1, items));
listView.setOnItemClickListener((parent, view, position, id) -> {
dialog.dismiss();
mMynteye.setOnStreamDataReceiveListener(this);
mMynteye.setOnMotionDataReceiveListener(this);
mMynteye.setImuEnabled(mImuEnabled);
mMynteye.open(requests.get(position));
invalidateOptionsMenu();
});
dialog.setOnCancelListener(dlg -> {
mMynteye = null;
});
dialog.setView(listView);
dialog.show();
}
}
@Override
public USBMonitor getUSBMonitor() {
return mUSBMonitor;
}
@Override
public void onDialogResult(boolean canceled) {
}
@Override
public void onStreamDataReceive(Stream stream, StreamData data, Handler handler) {
}
@Override
public void onStreamLeftReceive(StreamData data, Handler handler) {
//Timber.i("onStreamLeftReceive");
Frame frame = data.frame();
if (mLeftBitmap == null) {
mLeftBitmap = Bitmap.createBitmap(frame.width(), frame.height(), Bitmap.Config.ARGB_8888);
}
BitmapUtils.copyPixels(frame, mLeftBitmap);
mLeftImageView.post(() -> mLeftImageView.setImageBitmap(mLeftBitmap));
}
@Override
public void onStreamRightReceive(StreamData data, Handler handler) {
//Timber.i("onStreamRightReceive");
Frame frame = data.frame();
if (mRightBitmap == null) {
mRightBitmap = Bitmap.createBitmap(frame.width(), frame.height(), Bitmap.Config.ARGB_8888);
}
BitmapUtils.copyPixels(frame, mRightBitmap);
mRightImageView.post(() -> mRightImageView.setImageBitmap(mRightBitmap));
}
@Override
public void onMotionDataReceive(ArrayList<MotionData> datas, Handler handler) {
if (datas.isEmpty()) return;
ImuData data = datas.get(0).imu();
mTextView.post(() -> {
StringBuffer sb = new StringBuffer();
final int flag = data.getFlag();
if (flag == 0) { // accel & gyro
sb.append("Accel: ").append(data.getAccel());
sb.append("\nGyro: ").append(data.getGyro());
} else if (flag == 1) { // accel
sb.append("Accel: ").append(data.getAccel());
sb.append("\nGyro: -");
} else if (flag == 2) { // gyro
sb.append("Accel: -");
sb.append("\nGyro: ").append(data.getGyro());
}
mTextView.setText(sb.toString());
});
}
private void toast(int textId) {
toast(getString(textId));
}
private void toast(CharSequence text) {
Toast.makeText(this, text, Toast.LENGTH_LONG).show();
}
private void alert(int titleId, CharSequence message) {
alert(getString(titleId), message);
}
private void alert(CharSequence title, CharSequence message) {
new AlertDialog.Builder(this)
.setTitle(title)
.setMessage(message)
.setPositiveButton(android.R.string.ok, null)
.show();
}
}

View File

@ -0,0 +1,76 @@
package com.slightech.mynteye.demo.util;
import com.stericson.RootShell.RootShell;
import com.stericson.RootShell.exceptions.RootDeniedException;
import com.stericson.RootShell.execution.Command;
import com.stericson.RootShell.execution.Shell;
import java.io.IOException;
import java.util.concurrent.TimeoutException;
import timber.log.Timber;
public final class RootUtils {
public interface OnRequestAccessibleListener {
void onRequestAccessible(boolean ok);
}
public static boolean isRooted() {
if (!RootShell.isRootAvailable()) {
Timber.e("Root not found");
return false;
}
try {
RootShell.getShell(true);
} catch (IOException e) {
e.printStackTrace();
return false;
} catch (TimeoutException e) {
Timber.e("TIMEOUT EXCEPTION!");
e.printStackTrace();
return false;
} catch (RootDeniedException e) {
Timber.e("ROOT DENIED EXCEPTION!");
e.printStackTrace();
return false;
}
try {
if (!RootShell.isAccessGiven()) {
Timber.e("ERROR: No root access to this device.");
return false;
}
} catch (Exception e) {
Timber.e("ERROR: could not determine root access to this device.");
return false;
}
return true;
}
public static void requestAccessible(OnRequestAccessibleListener l) {
try {
Shell sh = RootShell.getShell(true);
sh.add(new Command(1, "chmod 666 /dev/video*") {
@Override
public void commandOutput(int id, String line) {
Timber.d("commandOutput: %s", line);
super.commandOutput(id, line);
}
@Override
public void commandTerminated(int id, String reason) {
Timber.d("commandTerminated: %s", reason);
}
@Override
public void commandCompleted(int id, int exitcode) {
Timber.d("commandCompleted: %s", ((exitcode == 0) ? "ok" : "fail"));
if (l != null) l.onRequestAccessible(exitcode == 0);
}
});
sh.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,58 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="horizontal"
tools:context=".ui.MainActivity"
>
<TextView
android:id="@+id/text"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginEnd="8dp"
android:layout_marginStart="8dp"
android:text="@string/tip_open"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
/>
<androidx.constraintlayout.widget.ConstraintLayout
android:id="@+id/layout_image"
android:layout_width="0dp"
android:layout_height="0dp"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
>
<ImageView
android:id="@+id/image_left"
android:layout_width="0dp"
android:layout_height="0dp"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toStartOf="@id/image_right"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintVertical_weight="1"
/>
<ImageView
android:id="@+id/image_right"
android:layout_width="0dp"
android:layout_height="0dp"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintStart_toEndOf="@id/image_left"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintVertical_weight="1"
/>
</androidx.constraintlayout.widget.ConstraintLayout>
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,36 @@
<?xml version="1.0" encoding="utf-8"?>
<menu xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto">
<item
android:id="@+id/action_open"
android:title="@string/open"
app:showAsAction="ifRoom|withText" />
<item
android:id="@+id/action_close"
android:title="@string/close"
app:showAsAction="ifRoom|withText" />
<item
android:id="@+id/check_imu_data"
android:title="@string/imu_data"
android:checkable="true"
android:checked="false"
app:showAsAction="never" />
<item
android:id="@+id/show_device_infos"
android:title="@string/device_infos"
app:showAsAction="never" />
<item
android:id="@+id/show_image_params"
android:title="@string/image_params"
app:showAsAction="never" />
<item
android:id="@+id/show_imu_params"
android:title="@string/imu_params"
app:showAsAction="never" />
<item
android:id="@+id/show_option_infos"
android:title="@string/option_infos"
app:showAsAction="never" />
</menu>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorPrimary">#008577</color>
<color name="colorPrimaryDark">#00574B</color>
<color name="colorAccent">#D81B60</color>
</resources>

View File

@ -0,0 +1,15 @@
<resources>
<string name="app_name">MYNTEYE-S SDK Sample</string>
<string name="tip_open">Please \"Open\" the camera!</string>
<string name="open">Open</string>
<string name="close">Close</string>
<string name="imu_data">Imu Data</string>
<string name="device_infos">Device Infos</string>
<string name="image_params">Image Params</string>
<string name="imu_params">Imu Params</string>
<string name="option_infos">Option Infos</string>
</resources>

View File

@ -0,0 +1,11 @@
<resources>
<!-- Base application theme. -->
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/colorPrimary</item>
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="colorAccent">@color/colorAccent</item>
</style>
</resources>

View File

@ -0,0 +1,17 @@
package com.slightech.mynteye.demo;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}

View File

@ -0,0 +1,27 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
apply from: rootProject.file('gradle/dependencies.gradle')
repositories {
google()
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.3.1'
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}

View File

@ -0,0 +1,17 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
android.enableJetifier=true
android.useAndroidX=true
org.gradle.jvmargs=-Xmx1536m
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true

View File

@ -0,0 +1,10 @@
ext {
xversions = [
'compileSdk': 28,
'minSdk': 24,
'targetSdk': 28,
]
xabis = ['arm64-v8a', 'armeabi-v7a'] as String[]
//xabis = ['arm64-v8a', 'armeabi-v7a', 'x86', 'x86_64'] as String[]
}

Binary file not shown.

View File

@ -0,0 +1,6 @@
#Tue Jan 15 14:54:17 CST 2019
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-4.10.1-all.zip

172
wrappers/android/mynteye/gradlew vendored Executable file
View File

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

84
wrappers/android/mynteye/gradlew.bat vendored Normal file
View File

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

View File

@ -0,0 +1 @@
/build

View File

@ -0,0 +1,122 @@
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.4.1)
get_filename_component(MYNTETE_ROOT "${PROJECT_SOURCE_DIR}/../../../.." ABSOLUTE)
message(STATUS "MYNTETE_ROOT: ${MYNTETE_ROOT}")
get_filename_component(PRO_ROOT "${PROJECT_SOURCE_DIR}/.." ABSOLUTE)
message(STATUS "PRO_ROOT: ${PRO_ROOT}")
set(LIB_ROOT "${PROJECT_SOURCE_DIR}")
message(STATUS "LIB_ROOT: ${LIB_ROOT}")
if(NOT DJINNI_DIR)
if(DEFINED ENV{DJINNI_DIR})
set(DJINNI_DIR $ENV{DJINNI_DIR})
else()
set(DJINNI_DIR "${PRO_ROOT}/third_party/djinni")
endif()
endif()
# libs
## log
find_library(log-lib log)
## jnigraphics
find_library(jnigraphics-lib jnigraphics)
## djinni_jni
include_directories(
${DJINNI_DIR}/support-lib/jni
)
add_library(djinni_jni STATIC
${DJINNI_DIR}/support-lib/jni/djinni_support.cpp
)
## mynteye_internal
add_library(mynteye_internal SHARED IMPORTED)
set_target_properties(mynteye_internal PROPERTIES
IMPORTED_LOCATION "${LIB_ROOT}/src/main/jniLibs/${ANDROID_ABI}/libmynteye_internal.so"
)
# targets
## libmynteye
add_definitions(-DMYNTEYE_EXPORTS)
set(MYNTEYE_NAMESPACE "mynteye")
#message(STATUS "Namespace: ${MYNTEYE_NAMESPACE}")
configure_file(
${MYNTETE_ROOT}/include/mynteye/mynteye.h.in
include/mynteye/mynteye.h @ONLY
)
set(MYNTEYE_SRCS
#${MYNTETE_ROOT}/src/mynteye/uvc/linux/uvc-v4l2.cc
${MYNTETE_ROOT}/src/mynteye/types.cc
${MYNTETE_ROOT}/src/mynteye/util/files.cc
${MYNTETE_ROOT}/src/mynteye/util/strings.cc
${MYNTETE_ROOT}/src/mynteye/device/channel/bytes.cc
${MYNTETE_ROOT}/src/mynteye/device/channel/channels.cc
${MYNTETE_ROOT}/src/mynteye/device/channel/file_channel.cc
${MYNTETE_ROOT}/src/mynteye/device/config.cc
${MYNTETE_ROOT}/src/mynteye/device/context.cc
${MYNTETE_ROOT}/src/mynteye/device/device.cc
${MYNTETE_ROOT}/src/mynteye/device/motions.cc
${MYNTETE_ROOT}/src/mynteye/device/standard/channels_adapter_s.cc
${MYNTETE_ROOT}/src/mynteye/device/standard/device_s.cc
${MYNTETE_ROOT}/src/mynteye/device/standard/streams_adapter_s.cc
${MYNTETE_ROOT}/src/mynteye/device/standard2/channels_adapter_s2.cc
${MYNTETE_ROOT}/src/mynteye/device/standard2/device_s2.cc
${MYNTETE_ROOT}/src/mynteye/device/standard2/streams_adapter_s2.cc
${MYNTETE_ROOT}/src/mynteye/device/streams.cc
${MYNTETE_ROOT}/src/mynteye/device/types.cc
${MYNTETE_ROOT}/src/mynteye/device/utils.cc
)
list(APPEND MYNTEYE_SRCS ${MYNTETE_ROOT}/src/mynteye/miniglog.cc)
add_library(mynteye STATIC ${MYNTEYE_SRCS})
target_link_libraries(mynteye ${log-lib})
target_include_directories(mynteye PUBLIC
"$<BUILD_INTERFACE:${MYNTETE_ROOT}/include>"
"$<BUILD_INTERFACE:${MYNTETE_ROOT}/src>"
"$<BUILD_INTERFACE:${CMAKE_CURRENT_BINARY_DIR}/include>"
"$<INSTALL_INTERFACE:include>"
)
## libmynteye_jni
set(CPP_DIR "${PROJECT_SOURCE_DIR}/src/main/cpp")
include_directories(
${CPP_DIR}/mynteye/cpp
${CPP_DIR}/mynteye/impl
${CPP_DIR}/mynteye/jni
)
set(MYNTEYE_JNI_SRCS "")
foreach(__dir cpp impl jni)
file(GLOB __srcs "${CPP_DIR}/mynteye/${__dir}/*.cpp")
list(APPEND MYNTEYE_JNI_SRCS ${__srcs})
endforeach()
#message(STATUS "MYNTEYE_JNI_SRCS: ${MYNTEYE_JNI_SRCS}")
add_library(mynteye_jni SHARED
${DJINNI_DIR}/support-lib/jni/djinni_main.cpp
${CPP_DIR}/mynteye/impl/util/jni_util.cpp
${MYNTEYE_JNI_SRCS}
)
target_link_libraries(mynteye_jni ${log-lib} ${jnigraphics-lib} djinni_jni mynteye mynteye_internal)

View File

@ -0,0 +1,53 @@
apply plugin: 'com.android.library'
android {
compileSdkVersion xversions.compileSdk
defaultConfig {
minSdkVersion xversions.minSdk
targetSdkVersion xversions.targetSdk
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
// https://developer.android.com/ndk/guides/cmake
cmake {
cppFlags "-std=c++11 -frtti -fexceptions"
}
}
ndk {
abiFilters xabis
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
externalNativeBuild {
cmake {
path "CMakeLists.txt"
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'androidx.annotation:annotation:1.0.1'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'androidx.test:runner:1.1.1'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.1.1'
}

View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,26 @@
package com.slightech.mynteye;
import android.content.Context;
import androidx.test.InstrumentationRegistry;
import androidx.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getTargetContext();
assertEquals("com.slightech.mynteye.test", appContext.getPackageName());
}
}

View File

@ -0,0 +1,2 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.slightech.mynteye"/>

View File

@ -0,0 +1,28 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include <functional>
namespace mynteye_jni {
enum class Addon : int {
/** Infrared */
INFRARED,
/** Second infrared */
INFRARED2,
};
} // namespace mynteye_jni
namespace std {
template <>
struct hash<::mynteye_jni::Addon> {
size_t operator()(::mynteye_jni::Addon type) const {
return std::hash<int>()(static_cast<int>(type));
}
};
} // namespace std

View File

@ -0,0 +1,30 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include <functional>
namespace mynteye_jni {
enum class CalibrationModel : int {
/** Pinhole */
PINHOLE,
/** Equidistant: KANNALA_BRANDT */
KANNALA_BRANDT,
/** Unknow */
UNKNOW,
};
} // namespace mynteye_jni
namespace std {
template <>
struct hash<::mynteye_jni::CalibrationModel> {
size_t operator()(::mynteye_jni::CalibrationModel type) const {
return std::hash<int>()(static_cast<int>(type));
}
};
} // namespace std

View File

@ -0,0 +1,42 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include <functional>
namespace mynteye_jni {
enum class Capability : int {
/** Provides stereo stream */
STEREO,
/** Provide stereo color stream */
STEREO_COLOR,
/** Provides color stream */
COLOR,
/** Provides depth stream */
DEPTH,
/** Provides point cloud stream */
POINTS,
/** Provides fisheye stream */
FISHEYE,
/** Provides infrared stream */
INFRARED,
/** Provides second infrared stream */
INFRARED2,
/** Provides IMU (accelerometer, gyroscope) data */
IMU,
};
} // namespace mynteye_jni
namespace std {
template <>
struct hash<::mynteye_jni::Capability> {
size_t operator()(::mynteye_jni::Capability type) const {
return std::hash<int>()(static_cast<int>(type));
}
};
} // namespace std

View File

@ -0,0 +1,106 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye.djinni
#pragma once
#include "addon.hpp"
#include "capability.hpp"
#include "device_usb_info.hpp"
#include "extrinsics.hpp"
#include "info.hpp"
#include "intrinsics.hpp"
#include "model.hpp"
#include "motion_data.hpp"
#include "motion_intrinsics.hpp"
#include "option.hpp"
#include "option_info.hpp"
#include "source.hpp"
#include "stream.hpp"
#include "stream_data.hpp"
#include "stream_request.hpp"
#include <cstdint>
#include <memory>
#include <string>
#include <vector>
namespace mynteye_jni {
/** Device class to communicate with MYNT® EYE device */
class Device {
public:
virtual ~Device() {}
/** Create the device instance */
static std::shared_ptr<Device> Create(const ::mynteye_jni::DeviceUsbInfo & info);
/** Get the model */
virtual ::mynteye_jni::Model GetModel() = 0;
/** Supports the stream or not */
virtual bool SupportsStream(::mynteye_jni::Stream stream) = 0;
/** Supports the capability or not */
virtual bool SupportsCapability(::mynteye_jni::Capability capabilities) = 0;
/** Supports the option or not */
virtual bool SupportsOption(::mynteye_jni::Option option) = 0;
/** Supports the addon or not */
virtual bool SupportsAddon(::mynteye_jni::Addon addon) = 0;
/** Get all stream requests */
virtual std::vector<::mynteye_jni::StreamRequest> GetStreamRequests() = 0;
/** Config the stream request */
virtual void ConfigStreamRequest(const ::mynteye_jni::StreamRequest & request) = 0;
/** Get the device info */
virtual std::string GetInfo(::mynteye_jni::Info info) = 0;
/** Get the intrinsics of stream */
virtual ::mynteye_jni::Intrinsics GetIntrinsics(::mynteye_jni::Stream stream) = 0;
/** Get the extrinsics of stream */
virtual ::mynteye_jni::Extrinsics GetExtrinsics(::mynteye_jni::Stream from, ::mynteye_jni::Stream to) = 0;
/** Get the intrinsics of motion */
virtual ::mynteye_jni::MotionIntrinsics GetMotionIntrinsics() = 0;
/** Get the extrinsics from one stream to motion */
virtual ::mynteye_jni::Extrinsics GetMotionExtrinsics(::mynteye_jni::Stream from) = 0;
/** Get the option info */
virtual ::mynteye_jni::OptionInfo GetOptionInfo(::mynteye_jni::Option option) = 0;
/** Get the option value */
virtual int32_t GetOptionValue(::mynteye_jni::Option option) = 0;
/** Set the option value */
virtual void SetOptionValue(::mynteye_jni::Option option, int32_t value) = 0;
/** Run the option value */
virtual bool RunOptionAction(::mynteye_jni::Option option) = 0;
/** Start capturing the source */
virtual void Start(::mynteye_jni::Source source) = 0;
/** Stop capturing the source */
virtual void Stop(::mynteye_jni::Source source) = 0;
/** Wait the streams are ready */
virtual void WaitForStreams() = 0;
/** Get the latest data of stream */
virtual std::shared_ptr<::mynteye_jni::StreamData> GetStreamData(::mynteye_jni::Stream stream) = 0;
/** Get the datas of stream */
virtual std::vector<std::shared_ptr<::mynteye_jni::StreamData>> GetStreamDatas(::mynteye_jni::Stream stream) = 0;
/** Enable cache motion datas until get them, otherwise using callback instead */
virtual void EnableMotionDatas(int32_t max_size) = 0;
/** Get the motion datas */
virtual std::vector<std::shared_ptr<::mynteye_jni::MotionData>> GetMotionDatas() = 0;
};
} // namespace mynteye_jni

View File

@ -0,0 +1,50 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include <cstdint>
#include <string>
#include <utility>
namespace mynteye_jni {
/** Device USB information */
struct DeviceUsbInfo final {
/** Vendor id */
int32_t vid;
/** Product id */
int32_t pid;
/** File descriptor */
int32_t fd;
/** Bus number */
int32_t bus_num;
/** Dev number */
int32_t dev_num;
/** Usb file system path */
std::string usb_fs;
/** Product name */
std::string name;
/** Serial number */
std::string serial;
DeviceUsbInfo(int32_t vid_,
int32_t pid_,
int32_t fd_,
int32_t bus_num_,
int32_t dev_num_,
std::string usb_fs_,
std::string name_,
std::string serial_)
: vid(std::move(vid_))
, pid(std::move(pid_))
, fd(std::move(fd_))
, bus_num(std::move(bus_num_))
, dev_num(std::move(dev_num_))
, usb_fs(std::move(usb_fs_))
, name(std::move(name_))
, serial(std::move(serial_))
{}
};
} // namespace mynteye_jni

View File

@ -0,0 +1,25 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include <utility>
#include <vector>
namespace mynteye_jni {
/** Extrinsics, represent how the different datas are connected */
struct Extrinsics final {
/** Rotation matrix, 3x3 */
std::vector<double> rotation;
/** Translation vector, 1x3 */
std::vector<double> translation;
Extrinsics(std::vector<double> rotation_,
std::vector<double> translation_)
: rotation(std::move(rotation_))
, translation(std::move(translation_))
{}
};
} // namespace mynteye_jni

View File

@ -0,0 +1,32 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include <functional>
namespace mynteye_jni {
enum class Format : int {
/** Greyscale, 8 bits per pixel */
GREY,
/** YUV 4:2:2, 16 bits per pixel */
YUYV,
/** BGR 8:8:8, 24 bits per pixel */
BGR888,
/** RGB 8:8:8, 24 bits per pixel */
RGB888,
};
} // namespace mynteye_jni
namespace std {
template <>
struct hash<::mynteye_jni::Format> {
size_t operator()(::mynteye_jni::Format type) const {
return std::hash<int>()(static_cast<int>(type));
}
};
} // namespace std

View File

@ -0,0 +1,34 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include <cstdint>
#include <vector>
namespace mynteye_jni {
enum class Format;
/** Frame with raw data */
class Frame {
public:
virtual ~Frame() {}
/** Get the width */
virtual int32_t Width() = 0;
/** Get the height */
virtual int32_t Height() = 0;
/** Get the pixel format */
virtual ::mynteye_jni::Format Format() = 0;
/** Get the size */
virtual int32_t Size() = 0;
/** Get the data */
virtual std::vector<uint8_t> Data() = 0;
};
} // namespace mynteye_jni

View File

@ -0,0 +1,29 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include <cstdint>
#include <utility>
namespace mynteye_jni {
/** Image data */
struct ImgData final {
/** Image frame id */
int64_t frame_id;
/** Image timestamp in 1us */
int64_t timestamp;
/** Image exposure time, virtual value in [1, 480] */
int64_t exposure_time;
ImgData(int64_t frame_id_,
int64_t timestamp_,
int64_t exposure_time_)
: frame_id(std::move(frame_id_))
, timestamp(std::move(timestamp_))
, exposure_time(std::move(exposure_time_))
{}
};
} // namespace mynteye_jni

View File

@ -0,0 +1,47 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include <cstdint>
#include <utility>
#include <vector>
namespace mynteye_jni {
/** IMU data */
struct ImuData final {
/** IMU frame id */
int64_t frame_id;
/**
* IMU accel or gyro flag
* 0: accel and gyro are both valid
* 1: accel is valid
* 2: gyro is valid
*/
int32_t flag;
/** IMU timestamp in 1us */
int64_t timestamp;
/** IMU accelerometer data for 3-axis: X, Y, Z. */
std::vector<double> accel;
/** IMU gyroscope data for 3-axis: X, Y, Z. */
std::vector<double> gyro;
/** IMU temperature */
double temperature;
ImuData(int64_t frame_id_,
int32_t flag_,
int64_t timestamp_,
std::vector<double> accel_,
std::vector<double> gyro_,
double temperature_)
: frame_id(std::move(frame_id_))
, flag(std::move(flag_))
, timestamp(std::move(timestamp_))
, accel(std::move(accel_))
, gyro(std::move(gyro_))
, temperature(std::move(temperature_))
{}
};
} // namespace mynteye_jni

View File

@ -0,0 +1,38 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include <utility>
#include <vector>
namespace mynteye_jni {
/** IMU intrinsics: scale, drift and variances */
struct ImuIntrinsics final {
/**
* Scale matrix 3x3
* Scale X cross axis cross axis
* cross axis Scale Y cross axis
* cross axis cross axis Scale Z
*/
std::vector<double> scale;
/** Zero-drift: X, Y, Z 1x3 */
std::vector<double> drift;
/** Noise density variances 1x3 */
std::vector<double> noise;
/** Random walk variances 1x3 */
std::vector<double> bias;
ImuIntrinsics(std::vector<double> scale_,
std::vector<double> drift_,
std::vector<double> noise_,
std::vector<double> bias_)
: scale(std::move(scale_))
, drift(std::move(drift_))
, noise(std::move(noise_))
, bias(std::move(bias_))
{}
};
} // namespace mynteye_jni

View File

@ -0,0 +1,40 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include <functional>
namespace mynteye_jni {
enum class Info : int {
/** Device name */
DEVICE_NAME,
/** Serial number */
SERIAL_NUMBER,
/** Firmware version */
FIRMWARE_VERSION,
/** Hardware version */
HARDWARE_VERSION,
/** Spec version */
SPEC_VERSION,
/** Lens type */
LENS_TYPE,
/** IMU type */
IMU_TYPE,
/** Nominal baseline */
NOMINAL_BASELINE,
};
} // namespace mynteye_jni
namespace std {
template <>
struct hash<::mynteye_jni::Info> {
size_t operator()(::mynteye_jni::Info type) const {
return std::hash<int>()(static_cast<int>(type));
}
};
} // namespace std

View File

@ -0,0 +1,55 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include "calibration_model.hpp"
#include <cstdint>
#include <utility>
#include <vector>
namespace mynteye_jni {
/** Stream intrinsics */
struct Intrinsics final {
/** The calibration model */
CalibrationModel calib_model;
/** The width of the image in pixels */
int32_t width;
/** The height of the image in pixels */
int32_t height;
/** The focal length of the image plane, as a multiple of pixel width (pinhole) */
double fx;
/** The focal length of the image plane, as a multiple of pixel height (pinhole) */
double fy;
/** The horizontal coordinate of the principal point of the image (pinhole) */
double cx;
/** The vertical coordinate of the principal point of the image (pinhole) */
double cy;
/**
* The distortion coefficients
* pinhole: k1,k2,p1,p2,k3
* kannala_brandt: k2,k3,k4,k5,mu,mv,u0,v0
*/
std::vector<double> coeffs;
Intrinsics(CalibrationModel calib_model_,
int32_t width_,
int32_t height_,
double fx_,
double fy_,
double cx_,
double cy_,
std::vector<double> coeffs_)
: calib_model(std::move(calib_model_))
, width(std::move(width_))
, height(std::move(height_))
, fx(std::move(fx_))
, fy(std::move(fy_))
, cx(std::move(cx_))
, cy(std::move(cy_))
, coeffs(std::move(coeffs_))
{}
};
} // namespace mynteye_jni

View File

@ -0,0 +1,30 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include <functional>
namespace mynteye_jni {
enum class Model : int {
/** Standard */
STANDARD,
/** Standard 2 */
STANDARD2,
/** Standard 210a */
STANDARD210A,
};
} // namespace mynteye_jni
namespace std {
template <>
struct hash<::mynteye_jni::Model> {
size_t operator()(::mynteye_jni::Model type) const {
return std::hash<int>()(static_cast<int>(type));
}
};
} // namespace std

View File

@ -0,0 +1,18 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
namespace mynteye_jni {
struct ImuData;
/** Device motion data */
class MotionData {
public:
virtual ~MotionData() {}
virtual ImuData Imu() = 0;
};
} // namespace mynteye_jni

View File

@ -0,0 +1,25 @@
// AUTOGENERATED FILE - DO NOT MODIFY!
// This file generated by Djinni from mynteye_types.djinni
#pragma once
#include "imu_intrinsics.hpp"
#include <utility>
namespace mynteye_jni {
/** Motion intrinsics, including accelerometer and gyroscope */
struct MotionIntrinsics final {
/** Accelerometer intrinsics */
ImuIntrinsics accel;
/** Gyroscope intrinsics */
ImuIntrinsics gyro;
MotionIntrinsics(ImuIntrinsics accel_,
ImuIntrinsics gyro_)
: accel(std::move(accel_))
, gyro(std::move(gyro_))
{}
};
} // namespace mynteye_jni

Some files were not shown because too many files have changed in this diff Show More