Merge branch 'develop' into rmEigen

This commit is contained in:
Messier 2019-09-05 17:04:19 +08:00
commit d158907f49
88 changed files with 330 additions and 2568 deletions

6
Jenkinsfile vendored
View File

@ -80,20 +80,22 @@ pipeline {
post {
always {
echo 'This will always run'
/*
dingTalk accessToken: '7dca6ae9b1b159b8b4b375e858b71f2e6cec8f73fa20d07552d09791261b2344',
imageUrl: 'http://icon-park.com/imagefiles/loading7_gray.gif',
message: '开始构建',
jenkinsUrl: "${JENKINS_URL}"
*/
}
success {
echo 'This will run only if successful'
/*
dingTalk accessToken: '7dca6ae9b1b159b8b4b375e858b71f2e6cec8f73fa20d07552d09791261b2344',
imageUrl: 'http://icons.iconarchive.com/icons/paomedia/small-n-flat/1024/sign-check-icon.png',
message: '构建成功',
jenkinsUrl: "${JENKINS_URL}"
*/
}
failure {
echo 'This will run only if failed'

View File

@ -43,7 +43,6 @@ help:
@echo " make install install project"
@echo " make test build test and run"
@echo " make samples build samples"
@echo " make tools build tools"
@echo " make pkg package sdk"
@echo " make ros build ros wrapper"
@echo " make py build python wrapper"
@ -51,7 +50,7 @@ help:
.PHONY: help
all: init samples tools ros
all: init samples ros
.PHONY: all
@ -159,20 +158,6 @@ samples: install
.PHONY: samples
# tools
tools: install
@$(call echo,Make $@)
ifeq ($(HOST_OS),Mac)
$(error "Can't make tools on $(HOST_OS)")
else
@$(call cmake_build,./tools/_build)
endif
.PHONY: tools
# pkg
pkg: clean
@ -262,8 +247,6 @@ clean:
@$(call rm,./_install/)
@$(call rm,./samples/_build/)
@$(call rm,./samples/_output/)
@$(call rm,./tools/_build/)
@$(call rm,./tools/_output/)
@$(call rm,./test/_build/)
@$(call rm,./test/_output/)
@$(MAKE) cleanlog

View File

@ -53,7 +53,7 @@ Init project, build samples and run someone.
```bash
make init
make samples
./samples/_output/bin/device/camera_d
./samples/_output/bin/camera_with_junior_device_api
```
## Mirrors

View File

@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE S SDK"
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = 2.4.1
PROJECT_NUMBER = 2.4.2
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a

Binary file not shown.

After

Width:  |  Height:  |  Size: 718 KiB

View File

@ -3,7 +3,7 @@
Update Auxiliary Chip Firmware
==================================
Update auxiliary chip (Only Support S2100/S210A)
Update auxiliary chip (Only Support S21XX)
------------------------------------------------
* Plug in the MYNT® EYE camera into a USB3.0 port

View File

@ -8,3 +8,4 @@ IMU Coordinate System
imu_coord_s1030
imu_coord_s2100
imu_coord_s2110

View File

@ -0,0 +1,8 @@
.. _product_imu_coord_s2110:
S2110 Coordinate System
====================================
IMU coordinate system is right-handed,the axis directions are as follows:
.. image:: ../../images/product/mynteye_s2110_imu_coord.jpg

View File

@ -9,3 +9,4 @@ Product Specification
spec_s1030
spec_s1030_ir
spec_s2100
spec_s2110

View File

@ -8,3 +8,4 @@ Product Surface
surface_s1030
surface_s2100
surface_s2110

View File

@ -41,7 +41,7 @@ Product Specification
Synchronization Precision <1ms (up to 0.02ms)
-------------------------- -----------------------------------------------------
IMU Frequency 200Hz
-------------------------- ---------------------------------------
-------------------------- -----------------------------------------------------
Output data format YUYV
-------------------------- -----------------------------------------------------
Data transfer Interface USB3.0
@ -64,7 +64,7 @@ Software
SDK http://www.myntai.com/dev/mynteye
---------------- ---------------------------------------------------------------------------------
Support ORB_SLAM2、OKVIS、Vins-Mono、Vins-Fusion、VIORB
================ ==================================================================================
================ =================================================================================

View File

@ -0,0 +1,105 @@
.. _mynteye_spec_s2110:
S2110-95/Color Product Specification
========================================
Product Specification
-----------------------
========================== =====================================================
Model S2110-95/Color
-------------------------- -----------------------------------------------------
Size PCB dimension:17.74x100mm
Total dimension:125x47x26.6mm
-------------------------- -----------------------------------------------------
Frame Rate 1280x400\@10/20/30/60fps 2560x800\@10/20/30fps
-------------------------- -----------------------------------------------------
Resolution 1280x400; 2560x800
-------------------------- -----------------------------------------------------
Depth Resolution Based on CPU/GPU Up to 1280*400\@60FPS
-------------------------- -----------------------------------------------------
Pixel Size 3.0*3.0μm
-------------------------- -----------------------------------------------------
Baseline 80.0mm
-------------------------- -----------------------------------------------------
Visual Angle D:112° H:95° V:50°
-------------------------- -----------------------------------------------------
Focal Length 2.63mm
-------------------------- -----------------------------------------------------
IR Support No
-------------------------- -----------------------------------------------------
Color Mode Color
-------------------------- -----------------------------------------------------
Depth Working Distance 0.60-7m+
-------------------------- -----------------------------------------------------
Scanning Mode Global Shutter
-------------------------- -----------------------------------------------------
Power 1.1W\@5V DC from USB
-------------------------- -----------------------------------------------------
Synchronization Precision <1ms (up to 0.02ms)
-------------------------- -----------------------------------------------------
IMU Frequency 200Hz
-------------------------- -----------------------------------------------------
Output data format YUYV
-------------------------- -----------------------------------------------------
Data transfer Interface USB3.0
-------------------------- -----------------------------------------------------
Time Sync interface DF50A
-------------------------- -----------------------------------------------------
Weight 100.8g
-------------------------- -----------------------------------------------------
UVC MODE Yes
========================== =====================================================
Software
--------
================ =================================================================================
Support system Windows 10、Ubuntu 14.04/16.04/18.04、ROS indigo/kinetic/melodic、Android 7.0+
---------------- ---------------------------------------------------------------------------------
SDK http://www.myntai.com/dev/mynteye
---------------- ---------------------------------------------------------------------------------
Support ORB_SLAM2、OKVIS、Vins-Mono、Vins-Fusion、VIORB
================ =================================================================================
Work Environment
-----------------
========================= ==============================
Operating Temperature -15°C~55°C
------------------------- ------------------------------
Storage Temperature -20°C~75°C
------------------------- ------------------------------
Humidity 0% to 95% non-condensing
========================= ==============================
Package
---------
=================== =======================================
Package Contents MYNT EYE x1 USB Micro-B Cable x1
=================== =======================================
Warranty
----------
==================== ============================================
Product Warranty 12 Months Limited Manufacturer's Warranty
==================== ============================================
Accuracy
---------
============================ ============================================
Depth Distance Deviation Less than 4%
============================ ============================================

View File

@ -0,0 +1,15 @@
.. _mynteye_surface_s2110:
S2110 Size and Structure
================================
============= ==============
Shell(mm) PCBA board(mm)
============= ==============
125x47x40 100x15
============= ==============
.. image:: ../../images/product/mynteye_s2_surface_zh-Hans.jpg
A. Camera:please pay attention to protect the camera sensor lenses, to avoid imaging quality degradation.
B. USB Micro-B interface and set screw holes: during usage, plug in the USB Micro-B cable and secure it by fastening the set screws to avoid damage to the interface and to ensure stability in connection.

View File

@ -8,6 +8,5 @@ SDK
sdk_install
data/contents
control/contents
tools/contents
project/contents
sdk_changelog

View File

@ -13,7 +13,7 @@ For mynteye s1030, the settings available for adjustment during auto exposure ar
* ``Option::MAX_EXPOSURE_TIME`` Maximum exposure time.
* ``Option::DESIRED_BRIGHTNESS`` Expected brightness.
For mynteye s2100/s210a, the settings available for adjustment during auto exposure are:
For mynteye s21XX, the settings available for adjustment during auto exposure are:
* ``Option::MAX_GAIN`` Maximum gain.
* ``Option::MAX_EXPOSURE_TIME`` Maximum exposure time.
@ -45,7 +45,7 @@ s1030
LOG(INFO) << "Set DESIRED_BRIGHTNESS to "
<< api->GetOptionValue(Option::DESIRED_BRIGHTNESS);
s2100/s210a
s21XX
.. code-block:: c++
@ -86,7 +86,7 @@ s1030
.. code-block:: bash
$ ./samples/_output/bin/tutorials/ctrl_auto_exposure
$ ./samples/_output/bin/ctrl_auto_exposure
I0513 14:07:57.963943 31845 utils.cc:26] Detecting MYNT EYE devices
I0513 14:07:58.457536 31845 utils.cc:33] MYNT EYE devices:
I0513 14:07:58.457563 31845 utils.cc:37] index: 0, name: MYNT-EYE-S1000
@ -97,11 +97,11 @@ s1030
I0513 14:07:58.521375 31845 auto_exposure.cc:41] Set DESIRED_BRIGHTNESS to 192
s2100/s210a
s21XX
.. code-block:: bash
$ ./samples/_output/bin/tutorials/ctrl_auto_exposure
$ ./samples/_output/bin/ctrl_auto_exposure
I/utils.cc:30 Detecting MYNT EYE devices
I/utils.cc:40 MYNT EYE devices:
I/utils.cc:43 index: 0, name: MYNT-EYE-S210A, sn: 07C41A190009071F
@ -126,4 +126,4 @@ s2100/s210a
The sample program displays an image with a real exposure time in the upper left corner, in milliseconds.
Complete code examples, see `auto_exposure.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/control/auto_exposure.cc>`_ .
Complete code examples, see `ctrl_auto_exposure.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/ctrl_auto_exposure.cc>`_ .

View File

@ -12,7 +12,7 @@ For mynteye s1030, to set the image frame rate and IMU frequency, set ``Option::
* The effective fps of the image: 10, 15, 20, 25, 30, 35, 40, 45, 50, 55, 60.
* The effective frequency of IMU: 100, 200, 250, 333, 500.
For mynteye s2100/s210a, the image frame rate should be selected when running the sample, and the frame rate and resolution are combined as follows:
For mynteye s21XX, the image frame rate should be selected when running the sample, and the frame rate and resolution are combined as follows:
.. code-block:: bash
@ -45,7 +45,7 @@ s1030
LOG(INFO) << "Set IMU_FREQUENCY to "
<< api->GetOptionValue(Option::IMU_FREQUENCY);
s2100/s210a
s21XX
.. code-block:: c++
@ -67,7 +67,7 @@ s1030
.. code-block:: bash
$ ./samples/_output/bin/tutorials/ctrl_framerate
$ ./samples/_output/bin/ctrl_framerate
I0513 14:05:57.218222 31813 utils.cc:26] Detecting MYNT EYE devices
I0513 14:05:57.899404 31813 utils.cc:33] MYNT EYE devices:
I0513 14:05:57.899430 31813 utils.cc:37] index: 0, name: MYNT-EYE-S1000
@ -78,11 +78,11 @@ s1030
I0513 14:06:21.702388 31813 framerate.cc:85] Img count: 573, fps: 24.6122
I0513 14:06:21.702404 31813 framerate.cc:87] Imu count: 11509, hz: 494.348
s2100/s210a
s21XX
.. code-block:: bash
$ ./samples/_output/bin/tutorials/ctrl_framerate
$ ./samples/_output/bin/ctrl_framerate
I/utils.cc:30 Detecting MYNT EYE devices
I/utils.cc:40 MYNT EYE devices:
I/utils.cc:43 index: 0, name: MYNT-EYE-S210A, sn: 07C41A190009071F
@ -105,4 +105,4 @@ s2100/s210a
After the sample program finishes running with ``ESC/Q``, it will output the calculated value of the frame rate of image & IMU frequency.
Complete code samplesplease see `framerate.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/control/framerate.cc>`_ .
Complete code samplesplease see `ctrl_framerate.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/ctrl_framerate.cc>`_ .

View File

@ -9,11 +9,11 @@ To set the IIC address, set ``Option::IIC_ADDRESS_SETTING``.
.. Attention::
Only support S210A/2100
Only support S21XX
Reference Code:
s210a/s2100
s2XX
.. code-block:: c++
@ -33,11 +33,11 @@ s210a/s2100
Reference running results on Linux:
s210a/s2100
s21XX
.. code-block:: bash
$ ./samples/_output/bin/tutorials/ctrl_iic_adress
$ ./samples/_output/bin/ctrl_iic_adress
I/utils.cc:30 Detecting MYNT EYE devices
I/utils.cc:40 MYNT EYE devices:
I/utils.cc:43 index: 0, name: MYNT-EYE-S210A, sn: 07C41A190009071F
@ -55,4 +55,4 @@ s210a/s2100
I/imu_range.cc:51 Set iic address to 0x31
After the sample program finishes running with ``ESC/Q``.
Complete code samplesplease see `iic_address.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/control/iic_address.cc>`_ .
Complete code samplesplease see `ctrl_iic_address.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/ctrl_iic_address.cc>`_ .

View File

@ -37,7 +37,7 @@ Reference running results on Linux:
.. code-block:: bash
$ ./samples/_output/bin/tutorials/ctrl_imu_low_pass_filter
$ ./samples/_output/bin/ctrl_imu_low_pass_filter
I/utils.cc:30 Detecting MYNT EYE devices
I/utils.cc:40 MYNT EYE devices:
I/utils.cc:43 index: 0, name: MYNT-EYE-S210A, sn: 07C41A190009071F
@ -60,4 +60,4 @@ Reference running results on Linux:
After the sample program finishes running with ``ESC/Q``, the low-pass filter of imu setting is complete. The ranges will be kept inside the hardware and not affected by power off.
Complete code samplesplease see `imu_low_pass_filter.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/control/imu_low_pass_filter.cc>`_
Complete code samplesplease see `ctrl_imu_low_pass_filter.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/ctrl_imu_low_pass_filter.cc>`_

View File

@ -13,7 +13,7 @@ To set the range of accelerometer and gyroscope, set ``Option::ACCELEROMETER_RAN
* The effective range of accelerometer(unit:g): 4, 8, 16, 32.
* Gyroscope Range Valid value (unit: DEG/S): 500, 1000, 2000, 4000.
For mynteye s2100/s210a, the available settings are:
For mynteye s21XX, the available settings are:
* The effective range of accelerometer(unit:g): 6, 12, 24, 48.
* The effective range of gyroscope(unit:deg/s): 250, 500, 1000, 2000, 4000.
@ -38,7 +38,7 @@ s1030
LOG(INFO) << "Set GYROSCOPE_RANGE to "
<< api->GetOptionValue(Option::GYROSCOPE_RANGE);
s2100/s210a
s21XX
.. code-block:: c++
@ -67,7 +67,7 @@ s1030
.. code-block:: bash
$ ./samples/_output/bin/tutorials/ctrl_imu_range
$ ./samples/_output/bin/ctrl_imu_range
I/utils.cc:28 Detecting MYNT EYE devices
I/utils.cc:38 MYNT EYE devices:
I/utils.cc:41 index: 0, name: MYNT-EYE-S1030, sn: 4B4C1F1100090712
@ -78,11 +78,11 @@ s1030
I/imu_range.cc:84 Img count: 363, fps: 25.0967
I/imu_range.cc:86 Imu count: 2825, hz: 195.312
s2100/s210a
s21XX
.. code-block:: bash
$ ./samples/_output/bin/tutorials/ctrl_imu_range
$ ./samples/_output/bin/ctrl_imu_range
I/utils.cc:30 Detecting MYNT EYE devices
I/utils.cc:40 MYNT EYE devices:
I/utils.cc:43 index: 0, name: MYNT-EYE-S210A, sn: 07C41A190009071F
@ -105,4 +105,4 @@ s2100/s210a
After the sample program finishes running with ``ESC/Q``, the ranges of imu setting is complete. The ranges will be kept inside the hardware and not affected by power off.
Complete code samplesplease see `imu_range.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/control/imu_range.cc>`_.
Complete code samplesplease see `ctrl_imu_range.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/ctrl_imu_range.cc>`_.

View File

@ -8,7 +8,7 @@ Using the ``SetOptionValue()`` function of the API, you can set various control
Enabling IR is setting ``Option::IR_CONTROL`` greater than 0. The greater the value, the greater the IR's intensity.
.. Attention::
* mynteye s2100/s210a doesn't support this feature.
* mynteye s21XX doesn't support this feature.
Reference Code:
@ -33,7 +33,7 @@ Reference running results on Linux:
.. code-block:: bash
$ ./samples/_output/bin/tutorials/ctrl_infrared
$ ./samples/_output/bin/ctrl_infrared
I0504 16:16:28.016624 25848 utils.cc:13] Detecting MYNT EYE devices
I0504 16:16:28.512462 25848 utils.cc:20] MYNT EYE devices:
I0504 16:16:28.512473 25848 utils.cc:24] index: 0, name: MYNT-EYE-S1000
@ -49,6 +49,6 @@ At this point, if the image is displayed, you can see IR speckle on the image, a
.. attention::
The hardware will not record the IR value after being turned off. In order to keep IR enabled, you must set the IR value after turning on the device.
The hardware will not record the IR value after being turned off and will reset to 0. In order to keep IR enabled, you must set the IR value after turning on the device.
Complete code samplessee `infrared.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/control/infrared.cc>`_ .
Complete code samplessee `ctrl_infrared.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/ctrl_infrared.cc>`_ .

View File

@ -13,7 +13,7 @@ For mynteye s1030, during manual exposure, the settings available for adjustment
* ``Option::BRIGHTNESS`` Brightness (Exposure time).
* ``Option::CONTRAST`` Contrast (Black level calibration).
For mynteye s2100/s210a, during manual exposure, the settings available for adjustment are:
For mynteye s21XX, during manual exposure, the settings available for adjustment are:
* ``Option::BRIGHTNESS`` Brightness (Exposure time).
@ -41,7 +41,7 @@ s1030
LOG(INFO) << "Set BRIGHTNESS to " << api->GetOptionValue(Option::BRIGHTNESS);
LOG(INFO) << "Set CONTRAST to " << api->GetOptionValue(Option::CONTRAST);
s2100/s210a
s21XX
.. code-block:: c++
@ -71,7 +71,7 @@ s1030
.. code-block:: bash
$ ./samples/_output/bin/tutorials/ctrl_manual_exposure
$ ./samples/_output/bin/ctrl_manual_exposure
I0513 14:09:17.104431 31908 utils.cc:26] Detecting MYNT EYE devices
I0513 14:09:17.501519 31908 utils.cc:33] MYNT EYE devices:
I0513 14:09:17.501551 31908 utils.cc:37] index: 0, name: MYNT-EYE-S1000
@ -81,11 +81,11 @@ s1030
I0513 14:09:17.552958 31908 manual_exposure.cc:39] Set BRIGHTNESS to 120
I0513 14:09:17.552963 31908 manual_exposure.cc:40] Set CONTRAST to 116
s2100/s210a
s21XX
.. code-block:: bash
$ ./samples/_output/bin/tutorials/ctrl_manual_exposure
$ ./samples/_output/bin/ctrl_manual_exposure
I/utils.cc:30 Detecting MYNT EYE devices
I/utils.cc:40 MYNT EYE devices:
I/utils.cc:43 index: 0, name: MYNT-EYE-S210A, sn: 07C41A190009071F
@ -107,4 +107,4 @@ s2100/s210a
The sample program displays an image with a real exposure time in the upper left corner, in milliseconds.
Complete code samplessee `manual_exposure.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/control/manual_exposure.cc>`_ .
Complete code samplessee `ctrl_manual_exposure.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/ctrl_manual_exposure.cc>`_ .

View File

@ -17,4 +17,7 @@ SDK Data Samples
get_imu_correspondence
get_from_callbacks
get_with_plugin
save_params
save_single_image
write_img_params
write_imu_params

View File

@ -47,8 +47,8 @@ Reference code snippet:
The above code uses OpenCV to display the image. When the display window is selected, pressing ``ESC/Q`` will end the program.
Complete code examples, see `get_depth.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/data/get_depth.cc>`_ .
Complete code examples, see `get_depth.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/get_depth.cc>`_ .
.. tip::
Preview the value of a region of the depth image, see `get_depth_with_region.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/api/get_depth_with_region.cc>`_ .
Preview the value of a region of the depth image, see `get_depth_with_region.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/get_depth_with_region.cc>`_ .

View File

@ -24,7 +24,7 @@ Reference result on Linux:
.. code-block:: bash
$ ./samples/_output/bin/tutorials/get_device_info
$ ./samples/_output/bin/get_device_info
I0503 16:40:21.109391 32106 utils.cc:13] Detecting MYNT EYE devices
I0503 16:40:21.604116 32106 utils.cc:20] MYNT EYE devices:
I0503 16:40:21.604127 32106 utils.cc:24] index: 0, name: MYNT-EYE-S1000
@ -38,4 +38,4 @@ Reference result on Linux:
I0503 16:40:21.615164 32106 get_device_info.cc:16] IMU type: 0000
I0503 16:40:21.615171 32106 get_device_info.cc:17] Nominal baseline: 120
Complete code examples, see `get_device_info.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/data/get_device_info.cc>`_ .
Complete code examples, see `get_device_info.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/get_device_info.cc>`_ .

View File

@ -59,4 +59,4 @@ Reference code snippet:
The above code uses OpenCV to display the image. Select the display window, press ``ESC/Q`` to exit in the program.
Complete code examples, see `get_disparity.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/data/get_disparity.cc>`_ .
Complete code examples, see `get_disparity.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/get_disparity.cc>`_ .

View File

@ -116,4 +116,4 @@ Reference code snippet:
OpenCV is used to display images and data above. When the window is selected, pressing ``ESC/Q`` will exit program.
Complete code examples, see `get_from_callbacks.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/data/get_from_callbacks.cc>`_ .
Complete code examples, see `get_from_callbacks.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/get_from_callbacks.cc>`_ .

View File

@ -7,9 +7,13 @@ Use ``GetIntrinsics()`` & ``GetExtrinsics()`` to get image calibration parameter
.. tip::
The detailed meaning of parameters can reference the files in ``tools/writer/config`` , of these
the image calibration parameters of S2100/S210A are in ``tools/writer/config/S210A``
the image calibration parameters of S21XX are in ``tools/writer/config/S21XX``
the image calibration parameters of S1030 are in ``tools/writer/config/S1030``
Note
Camera Intrinsics/Extrinsics, please ref to: ros `CameraInfo <http://docs.ros.org/melodic/api/sensor_msgs/html/msg/CameraInfo.html>`_.
Reference code snippet:
.. code-block:: c++
@ -27,7 +31,7 @@ Reference result on Linux:
.. code-block:: bash
$ ./samples/_output/bin/tutorials/get_img_params
$ ./samples/_output/bin/get_img_params
I0510 15:00:22.643263 6980 utils.cc:26] Detecting MYNT EYE devices
I0510 15:00:23.138811 6980 utils.cc:33] MYNT EYE devices:
I0510 15:00:23.138849 6980 utils.cc:37] index: 0, name: MYNT-EYE-S1000
@ -36,4 +40,4 @@ Reference result on Linux:
I0510 15:00:23.210551 6980 get_img_params.cc:24] Intrinsics right: {width: 752, height: 480, fx: 736.38305001095545776, fy: 723.50066150722432212, cx: 456.68367112303980093, cy: 250.70083335536796199, model: 0, coeffs: [-0.51012886039889305, 0.38764476500996770, 0.00000000000000000, 0.00000000000000000, 0.00000000000000000]}
I0510 15:00:23.210577 6980 get_img_params.cc:26] Extrinsics left to right: {rotation: [0.99701893306553813, -0.00095378124886237, -0.07715139279485062, 0.00144939967628305, 0.99997867219985104, 0.00636823256494144, 0.07714367342455503, -0.00646107164115277, 0.99699905125522237], translation: [-118.88991734400046596, -0.04560580387053091, -3.95313736911933855]}
Complete code examples, see `get_img_params.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/data/get_img_params.cc>`_ .
Complete code examples, see `get_img_params.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/get_img_params.cc>`_ .

View File

@ -47,8 +47,13 @@ Sample code snippet:
*/
painter.DrawImgData(img, *left_data.img);
if (!motion_datas.empty()) {
painter.DrawImuData(img, *motion_datas[0].imu);
static std::vector<api::MotionData> motion_datas_s = motion_datas;
if (!motion_datas.empty() && motion_datas.size() > 0) {
motion_datas_s = motion_datas;
}
if (!motion_datas_s.empty() && motion_datas_s.size() > 0) {
painter.DrawImuData(img, *motion_datas_s[0].imu);
}
cv::imshow("frame", img);
@ -63,4 +68,4 @@ Sample code snippet:
OpenCV is used to display image and data. When window is selected, press ``ESC/Q`` to exit program.
Complete code examples, see `get_imu.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/data/get_imu.cc>`_ .
Complete code examples, see `get_imu.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/get_imu.cc>`_ .

View File

@ -58,7 +58,7 @@ Reference result on Linux:
.. code-block:: bash
$ ./samples/_output/bin/tutorials/get_imu_correspondence
$ ./samples/_output/bin/get_imu_correspondence
I/utils.cc:30 Detecting MYNT EYE devices
I/utils.cc:40 MYNT EYE devices:
I/utils.cc:43 index: 0, name: MYNT-EYE-S1030, sn: 0281351000090807
@ -106,4 +106,4 @@ Reference result on Linux:
I/get_imu_correspondence.cc:61 Imu timestamp: 171419650, diff_prev=2020, diff_img=16608
I/get_imu_correspondence.cc:61 Imu timestamp: 171421660, diff_prev=2010, diff_img=18624
Complete code examples, see `get_imu_correspondence.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/data/get_imu_correspondence.cc>`_ .
Complete code examples, see `get_imu_correspondence.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/get_imu_correspondence.cc>`_ .

View File

@ -15,4 +15,4 @@ Reference commands:
LOG(INFO) << "Motion extrinsics left to imu: {"
<< api->GetMotionExtrinsics(Stream::LEFT) << "}";
Complete code examples, see `get_imu_params.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/data/get_imu_params.cc>`_ .
Complete code examples, see `get_imu_params.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/get_imu_params.cc>`_ .

View File

@ -7,7 +7,6 @@ Point images belongs to upper layer of synthetic data. To get this kind of data
For detail process description, please see :ref:`get_stereo` :ref:`get_stereo_rectified` .
It is recommended to use plugin to calculate depth: the depth map will be better with a higher frame rate. Please see :ref:`get_with_plugin` for detail.
Sample code snippet:
@ -50,7 +49,7 @@ Sample code snippet:
`PCL <https://github.com/PointCloudLibrary/pcl>`_ is used to display point images above. Program will close when point image window is closed.
Complete code examples, see `get_points.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/data/get_points.cc>`_ .
Complete code examples, see `get_depth_and_points.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/get_depth_and_points.cc>`_ .
.. attention::

View File

@ -37,4 +37,4 @@ Reference commands:
The above code uses OpenCV to display the image. When the display window is selected, pressing ``ESC/Q`` will end the program.
Complete code examples, see `get_stereo.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/data/get_stereo.cc>`_ .
Complete code examples, see `get_stereo.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/get_stereo.cc>`_ .

View File

@ -48,4 +48,4 @@ Reference code snippet:
OpenCV is used to display the image above. Select the display window, press ``ESC/Q`` to exit the program.
Complete code examples, see `get_stereo_rectified.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/data/get_stereo_rectified.cc>`_ .
Complete code examples, see `get_stereo_rectified.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/get_stereo_rectified.cc>`_ .

View File

@ -83,7 +83,7 @@ In addition, the following command can be executed to check whether the dependen
If the plugin's dependent library is not found, it will report an error \"Open plugin failed\" when loading.
Complete code sample, see `get_with_plugin.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/data/get_with_plugin.cc>`_ .
Complete code sample, see `get_with_plugin.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/get_with_plugin.cc>`_ .
.. tip::

View File

@ -3,22 +3,22 @@
Save Device Infomation And Parameters
=====================================
The SDK provides a tool ``save_all_infos`` for save information and parameters. For more information, please read `tools/README.md <https://github.com/slightech/MYNT-EYE-S-SDK/tree/master/tools>`_ .
The SDK provides a tool ``save_all_infos`` for save information and parameters.
Reference commands:
.. code-block:: bash
./tools/_output/bin/writer/save_all_infos
./samples/_output/bin/save_all_infos
# Windows
.\tools\_output\bin\writer\save_all_infos.bat
.\samples\_output\bin\save_all_infos.bat
Reference result on Linux:
.. code-block:: bash
$ ./tools/_output/bin/writer/save_all_infos
$ ./samples/_output/bin/save_all_infos
I0512 21:40:08.687088 4092 utils.cc:26] Detecting MYNT EYE devices
I0512 21:40:09.366693 4092 utils.cc:33] MYNT EYE devices:
I0512 21:40:09.366734 4092 utils.cc:37] index: 0, name: MYNT-EYE-S1000

View File

@ -49,4 +49,4 @@ Reference commands:
The above code uses OpenCV to display the image. When the display window is selected, pressing ``ESC/Q`` will end the program.
Complete code examples, see `save_single_image.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/tutorials/data/save_single_image.cc>`_ .
Complete code examples, see `save_single_image.cc <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/save_single_image.cc>`_ .

View File

@ -3,7 +3,7 @@
Write Image Parameters
=======================
The SDK provides a tool ``img_params_writer`` for writing image parameters. For details, read `tools/README.md <https://github.com/slightech/MYNT-EYE-S-SDK/tree/master/tools>`_ .
The SDK provides a tool ``write_img_params`` for writing image parameters.
For getting image parameters, please read :ref:`get_img_params`. This is used to calculate the deviation.
@ -11,21 +11,21 @@ Reference commands:
.. code-block:: bash
./tools/_output/bin/writer/img_params_writer tools/writer/config/img.params
./samples/_output/bin/write_img_params samples/config/img.params
# Windows
.\tools\_output\bin\writer\img_params_writer.bat tools\writer\config\img.params
.\samples\_output\bin\write_img_params.bat samples\config\img.params
.. warning::
Please don't override parameters, you can use ``save_all_infos`` to backup parameters.
And, `tools/writer/config/S1030/img.params.pinhole <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/tools/writer/config/S1030/img.params.pinhole>`_ is the path of S1030 pihole parameters file. If you calibrated parameters yourself, you can edit it and run previous commands to write them into the devices.
And, `samples/config/S1030/img.params.pinhole <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/config/S1030/img.params.pinhole>`_ is the path of S1030 pihole parameters file. If you calibrated parameters yourself, you can edit it and run previous commands to write them into the devices.
.. tip::
The image calibration parameters of S2100/S210A are in ``tools/writer/config/S210A``
The image calibration parameters of S1030 are in ``tools/writer/config/S1030``
The image calibration parameters of S21XX are in ``samples/config/S21XX``
The image calibration parameters of S1030 are in ``samples/config/S1030``
.. tip::

View File

@ -3,7 +3,7 @@
Write IMU Parameters
=====================
SDK provides the tool ``imu_params_writer`` to write IMU parameters. For deltail, please read `tools/README.md <https://github.com/slightech/MYNT-EYE-S-SDK/tree/master/tools>`_ .
SDK provides the tool ``write_imu_params`` to write IMU parameters.
Information about how to get IMU parameters, please read :ref:`get_imu_params` .
@ -11,12 +11,12 @@ Reference commands:
.. code-block:: bash
./tools/_output/bin/writer/imu_params_writer tools/writer/config/imu.params
./samples/_output/bin/write_imu_params samples/config/imu.params
# Windows
.\tools\_output\bin\writer\imu_params_writer.bat tools\writer\config\imu.params
.\samples\_output\bin\write_imu_params.bat samples\config\imu.params
The path of parameters file can be found in `tools/writer/config/img.params <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/tools/writer/config/img.params>`_ . If you calibrated the parameters yourself, you can edit the file and run above commands to write them into the device.
The path of parameters folder can be found in `samples/config/imu.params <https://github.com/slightech/MYNT-EYE-S-SDK/blob/master/samples/config>`_ . If you calibrated the parameters yourself, you can edit the file and run above commands to write them into the device.
.. warning::

View File

@ -5,7 +5,7 @@ How to use SDK with CMake
This tutorial will create a project with CMake to start using SDK.
You could find the project demo in ``<sdk>/platforms/projects/cmake directory`` .
You could find the project demo in ``<sdk>/samples/simple_demo/project_cmake directory`` .
Preparation
-----------

View File

@ -5,7 +5,7 @@ How to use SDK with Visual Studio 2017
This tutorial will create a project with Visual Studio 2017 to start using SDK.
You could find the project demo in ``<sdk>/platforms/projects/vs2017`` directory.
You could find the project demo in ``<sdk>/samples/simple_demo/project_vs2017`` directory.
Preparation
------------

View File

@ -1,58 +0,0 @@
.. _analytics_dataset:
Recording Data Sets
====================
The SDK provides the tool ``record`` for recording data sets. Tool details can be seen in `tools/README.md <https://github.com/slightech/MYNT-EYE-S-SDK/tree/master/tools>`_ .
Reference run command:
.. code-block:: bash
./tools/_output/bin/dataset/record2
# Windows
.\tools\_output\bin\dataset\record2.bat
Reference run results on Linux:
.. code-block:: bash
$ ./tools/_output/bin/dataset/record
I0513 21:28:57.128947 11487 utils.cc:26] Detecting MYNT EYE devices
I0513 21:28:57.807116 11487 utils.cc:33] MYNT EYE devices:
I0513 21:28:57.807155 11487 utils.cc:37] index: 0, name: MYNT-EYE-S1000
I0513 21:28:57.807163 11487 utils.cc:43] Only one MYNT EYE device, select index: 0
I0513 21:28:57.808437 11487 channels.cc:114] Option::GAIN: min=0, max=48, def=24, cur=24
I0513 21:28:57.809999 11487 channels.cc:114] Option::BRIGHTNESS: min=0, max=240, def=120, cur=120
I0513 21:28:57.818678 11487 channels.cc:114] Option::CONTRAST: min=0, max=255, def=127, cur=127
I0513 21:28:57.831529 11487 channels.cc:114] Option::FRAME_RATE: min=10, max=60, def=25, cur=25
I0513 21:28:57.848914 11487 channels.cc:114] Option::IMU_FREQUENCY: min=100, max=500, def=200, cur=500
I0513 21:28:57.865185 11487 channels.cc:114] Option::EXPOSURE_MODE: min=0, max=1, def=0, cur=0
I0513 21:28:57.881434 11487 channels.cc:114] Option::MAX_GAIN: min=0, max=48, def=48, cur=48
I0513 21:28:57.897598 11487 channels.cc:114] Option::MAX_EXPOSURE_TIME: min=0, max=240, def=240, cur=240
I0513 21:28:57.913918 11487 channels.cc:114] Option::DESIRED_BRIGHTNESS: min=0, max=255, def=192, cur=192
I0513 21:28:57.930177 11487 channels.cc:114] Option::IR_CONTROL: min=0, max=160, def=0, cur=0
I0513 21:28:57.946341 11487 channels.cc:114] Option::HDR_MODE: min=0, max=1, def=0, cur=0
Saved 1007 imgs, 20040 imus to ./dataset
I0513 21:29:38.608772 11487 record.cc:118] Time beg: 2018-05-13 21:28:58.255395, end: 2018-05-13 21:29:38.578696, cost: 40323.3ms
I0513 21:29:38.608853 11487 record.cc:121] Img count: 1007, fps: 24.9732
I0513 21:29:38.608873 11487 record.cc:123] Imu count: 20040, hz: 496.983
Results save into ``<workdir>/dataset`` by default. You can also add parameter, select other directory to save.
Record contents:
.. code-block:: none
<workdir>/
└─dataset/
├─left/
│ ├─stream.txt # Image infomation
│ ├─000000.png # Imageindex 0
│ └─...
├─right/
│ ├─stream.txt # Image information
│ ├─000000.png # Imageindex 0
│ └─...
└─motion.txt # IMU information

View File

@ -1,38 +0,0 @@
.. _analytics_imu:
Analyzing IMU
==============
The SDK provides the script ``imu_analytics.py`` for IMU analysis. The tool details can be seen in `tools/README.md <https://github.com/slightech/MYNT-EYE-S-SDK/tree/master/tools>`_ .
Refer to run commands and results on Linux:
.. code-block:: bash
$ python tools/analytics/imu_analytics.py -i dataset -c tools/config/mynteye/mynteye_config.yaml -al=-1.2,1.2 -gl= -gdu=d -gsu=d -kl=
imu analytics ...
input: dataset
outdir: dataset
gyro_limits: None
accel_limits: [(-1.2, 1.2), (-1.2, 1.2), (-1.2, 1.2), (-1.2, 1.2)]
time_unit: None
time_limits: None
auto: False
gyro_show_unit: d
gyro_data_unit: d
temp_limits: None
open dataset ...
imu: 20040, temp: 20040
timebeg: 4.384450, timeend: 44.615550, duration: 40.231100
save figure to:
dataset/imu_analytics.png
imu analytics done
The analysis result graph will be saved in the data set directory, as follows:
.. image:: ../../../images/sdk/tools/imu_analytics.png
In addition, the script specific options can be executed ``-h``:
.. code-block:: bash
$ python tools/analytics/imu_analytics.py -h

View File

@ -1,55 +0,0 @@
.. _analytics_stamp:
Analyze Time Stamps
====================
SDK provides a script for timestamp analysis ``stamp_analytics.py`` . Tool details are visible in `tools/README.md <https://github.com/slightech/MYNT-EYE-S-SDK/tree/master/tools>`_ .
Reference run commands and results on Linux:
.. code-block:: bash
$ python tools/analytics/stamp_analytics.py -i dataset -c tools/config/mynteye/mynteye_config.yaml
stamp analytics ...
input: dataset
outdir: dataset
open dataset ...
save to binary files ...
binimg: dataset/stamp_analytics_img.bin
binimu: dataset/stamp_analytics_imu.bin
img: 1007, imu: 20040
rate (Hz)
img: 25, imu: 500
sample period (s)
img: 0.04, imu: 0.002
diff count
imgs: 1007, imus: 20040
imgs_t_diff: 1006, imus_t_diff: 20039
diff where (factor=0.1)
imgs where diff > 0.04*1.1 (0)
imgs where diff < 0.04*0.9 (0)
imus where diff > 0.002*1.1 (0)
imus where diff < 0.002*0.9 (0)
image timestamp duplicates: 0
save figure to:
dataset/stamp_analytics.png
stamp analytics done
The analysis result graph will be saved in the dataset directory, as follows:
.. image:: ../../../images/sdk/tools/stamp_analytics.png
In addition, the script specific options can be executed ``-h`` to understand:
.. code-block:: bash
$ python tools/analytics/stamp_analytics.py -h
.. tip::
Suggestions when recording data sets ``record.cc`` annotation display image inside ``cv::imshow()``, ``dataset.cc`` annotation display image inside ``cv::imwrite()`` . Because these operations are time-consuming, they can cause images to be discarded. In other words, consumption can't keep up with production, so some images are discarded. ``GetStreamDatas()`` used in ``record.cc`` only caches the latest 4 images.

View File

@ -1,9 +0,0 @@
SDK Tools
==================
.. toctree::
:maxdepth: 2
analytics_dataset
analytics_imu
analytics_stamp

View File

@ -1,269 +0,0 @@
.. _calibration_tool:
Calibration Tool Manual
=======================
Introduction
--------
1.1 Support Platform
--------
Currently the calibration tool only supports Ubuntu 16.04 LTS, but support the official, ROS multiple version of OpenCV dependencies.
==================== ==================== ======================
Platform Architecture Different dependence
==================== ==================== ======================
Ubuntu 16.04 LTS x64(amd64) libopencv-dev
Ubuntu 16.04 LTS x64(amd64) ros-kinetic-opencv3
==================== ==================== ======================
1.2 Tools description
--------
Deb/ppa installation package is available on Ubuntu. The architecture, dependencies, and versions will be distinguished from the name:
* mynteye-s-calibrator-opencv-official-1.0.0_amd64.deb
* mynteye-s-calibrator-opencv-ros-kinetic-1.0.0_amd64.deb
======================= ==================== ============================================================
Dependency identifier Dependency package Detailed description
======================= ==================== ============================================================
opencv-official libopencv-dev https://packages.ubuntu.com/xenial/libopencv-dev
opencv-ros-kinetic ros-kinetic-opencv3 http://wiki.ros.org/opencv3
======================= ==================== ============================================================
1.3 Deb Toolkit Get
--------
==================== ========================================================================
Method of Obtaining Get address
==================== ========================================================================
Baidu Cloud https://pan.baidu.com/s/19rW0fPKUlQj6eldZpZFoAA Extraction code: a6ps
Google Drive https://drive.google.com/open?id=1RsV2WEKAsfxbn-Z5nGjk5g3ml1UDEsDc
==================== ========================================================================
Installation
--------
2.1 Installation Preparation
--------
* Ubuntu 16.04 LTS environment, x64 architecture
* Deb package for the calibration tool, select OpenCV dependencies as needed
(this step is not required for PPA installation)
2.2 Install ppa Package
--------
.. code-block:: bash
$ sudo add-apt-repository ppa:slightech/mynt-eye-s-sdk
$ sudo apt-get update
$ sudo apt-get install mynteye-s-calibrator
$ sudo ln -sf /opt/myntai/mynteye-s-calibrator/mynteye-s-calibrator /usr/local/bin/ mynteye-s-calibrator
2.3 Install deb Package
--------
Install the deb package with udo dpkg -i:
.. code-block:: bash
$ sudo dpkg -i mynteye-s-calibrator-opencv-official-1.0.0_amd64.deb
...
(Reading database ... 359020 files and directories currently installed.)
Preparing to unpack mynteye-s-calibrator-opencv-official-1.0.0_amd64.deb ...
Unpacking mynteye-s-calibrator (1.0.0) over (1.0.0) ...
Setting up mynteye-s-calibrator (1.0.0) ...
If you encounter an error that the dependency package is not installed, for example:
.. code-block:: bash
$ sudo dpkg -i mynteye-s-calibrator-opencv-official-1.0.0_amd64.deb
Selecting previously unselected package mynteye-s-calibrator.
(Reading database ... 358987 files and directories currently installed.)
Preparing to unpack mynteye-s-calibrator-opencv-official-1.0.0_amd64.deb ...
Unpacking mynteye-s-calibrator (1.0.0) ...
dpkg: dependency problems prevent configuration of mynteye-s-calibrator:
mynteye-s-calibrator depends on libatlas-base-dev; however:
Package libatlas-base-dev is not installed.
dpkg: error processing package mynteye-s-calibrator (--install):
dependency problems - leaving unconfigured
Errors were encountered while processing:
mynteye-s-calibrator
You can continue use sudo apt-get -f install to finished install
.. code-block:: bash
$ sudo apt-get -f install
Reading package lists... Done
Building dependency tree
Reading state information... Done
Correcting dependencies... Done
The following additional packages will be installed:
libatlas-base-dev
Suggested packages:
libblas-doc liblapack-doc
The following NEW packages will be installed:
libatlas-base-dev
0 upgraded, 1 newly installed, 0 to remove and 0 not upgraded.
1 not fully installed or removed.
Need to get 3,596 kB of archives.
After this operation, 30.8 MB of additional disk space will be used.
Do you want to continue? [Y/n]
Get:1 http://cn.archive.ubuntu.com/ubuntu xenial/universe amd64 libatlas-base-dev amd64 3.10.2-9 [3,596 kB]
Fetched 3,596 kB in 3s (1,013 kB/s)
Selecting previously unselected package libatlas-base-dev.
(Reading database ... 358993 files and directories currently installed.)
Preparing to unpack .../libatlas-base-dev_3.10.2-9_amd64.deb ...
Unpacking libatlas-base-dev (3.10.2-9) ...
Setting up libatlas-base-dev (3.10.2-9) ...
update-alternatives: using /usr/lib/atlas-base/atlas/libblas.so to provide /usr/lib/libblas.so (libblas.so) in auto mode
update-alternatives: using /usr/lib/atlas-base/atlas/liblapack.so to provide /usr/lib/liblapack.so (liblapack.so) in auto mode
Setting up mynteye-s-calibrator (1.0.0) ...
How To Use
--------
3.1 Preparation For Use
--------
* MYNT EYE S Camera
* Checkerboard
* Evenly illuminated scene
3.2 Use Command
--------
* After installing the calibration tool, you can run the `mynteye-s-calibrator` command directly on the terminal to calibrate. -h can see its options:
.. code-block:: bash
$ mynteye-s-calibrator -h
Usage: mynteye-s-calibrator [options]
help: mynteye-s-calibrator -h
calibrate: mynteye-s-calibrator -x 11 -y 7 -s 0.036
Calibrate MYNT EYE S device.
Options:
-h, --help show this help message and exit
-x WIDTH, --width=WIDTH The chessboard width, default: 11
-y HEIGHT, --height=HEIGHT The chessboard height, default: 7
-s METERS, --square=METERS The chessboard square size in meters, default: 0.036
-n NUMBER, --number=NUMBER The number of imagestools to use for calibration, default: 11
-p PATH, --path=PATH The path to save the result, default: folder name using device's SN
* -x -y -s Used to set the width, height, and grid size of the calibration plate. Width and height refer to the number of black and white intersections in the horizontal and vertical directions of the checkerboard. Square size in meters.
3.3 Steps For Usage
--------
* First, connect the MYNT EYE S camera.
* Then, run the mynteye-s-calibrator <calibration board parameter> command in the terminal.
.. image:: ../../images/tools/calibration001.png
:width: 60%
* Follow the prompts to select an index for the camera's resolution, perform image calibration at this resolution
* The S1030 camera only need calibrate 752*480 resolution. The S2100 camera need calibrate 2560*800 and 1280*400 resolutions.
* As far as possible, let the calibration plate cover the left and right eye images of the camera,
and take care of the surroundings (maximum distortion). The calibration tool will automatically
evaluate the qualified image for the calibration calculation and will indicate on the terminal how many have been selected.
Reference acquisition image, as follows:
.. image:: ../../images/tools/calibration002.png
:width: 60%
.. image:: ../../images/tools/calibration003.png
:width: 60%
.. image:: ../../images/tools/calibration004.png
:width: 60%
.. image:: ../../images/tools/calibration005.png
:width: 60%
.. image:: ../../images/tools/calibration006.png
:width: 60%
* Note: p_x, p_y, size, skew respectively indicate the scale of the calibration plate on the x-axis, y-axis,
zoom, and tilt when the image is acquired. Make a point for reference.
* Once the number of images acquired by the calibration needs is reached,
the calibration calculation will be performed. The output is as follows:
.. image:: ../../images/tools/calibration007.png
:width: 60%
* 1. The terminal will print out the left and right purpose calibration results.
* 2. The calibration results will be written into the files in <SN number> directory.
a) camera_left.yaml: Left eye parameter
b) camera_right.yaml: Right eye parameter
c) extrinsics.yaml: Binocular external parameter
d) img.params.equidistant: Camera parameters, which can be used for S SDK writing
e) stereo_reprojection_error.yaml: Reprojection error
* Finally, you will also be asked if you want to write to the camera device. Enter or `y` to confirm
.. image:: ../../images/tools/calibration008.png
:width: 60%
* After writing to the device, you will be prompted with "Write to device done".
3.4 Calibration result
--------
Calibration result, It is desirable to have a reprojection error of 0.2 or less. If exceeds 1, it needs to be recalibrated.
Reprojection error, visible output after calibration completion "Final reprojection error: 0.201
Pixels", or see the calibration result file "stereo_reprojection_error.yaml".

View File

@ -1,8 +0,0 @@
.. _tools:
TOOLS SUPPORT
==============
.. toctree::
calibration_tool

View File

@ -54,7 +54,7 @@ The ROS file is structured like follows:
│ ├─config/
│ │ ├─device/
│ │ ├─standard.yaml # S1030
│ │ └─standard2.yaml # S2100/S210A
│ │ └─standard2.yaml # S21XX
│ │ ├─laserscan/
│ │ ├─process/
│ │ └─...
@ -72,14 +72,14 @@ The ROS file is structured like follows:
│ └─package.xml
└─README.md
In ``mynteye.launch``, you can configure the topics and frame_ids, decide which data to enable, ``standard.yaml`` (standard2.yaml is S2100/S210A config file) can set parameters for device. Please set ``gravity`` to the local gravity acceleration.
In ``mynteye.launch``, you can configure the topics and frame_ids, decide which data to enable, ``standard.yaml`` (standard2.yaml is S21XX config file) can set parameters for device. Please set ``gravity`` to the local gravity acceleration.
standard.yaml/standard2.yaml:
.. code-block:: xml
# s2100/s210a modify frame/resolution
# s21XX modify frame/resolution
standard2/request_index: 2
# s1030 modify frame/imu hz

View File

@ -242,4 +242,48 @@ if(WITH_API)
endif()
endif()
## device_writer
add_library(device_writer STATIC device_writer.cc)
target_link_libraries(device_writer mynteye ${OpenCV_LIBS})
## device_info_writer
make_executable(write_device_info
SRCS write_device_info.cc
LINK_LIBS device_writer
DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
)
## img_params_writer
make_executable(write_img_params
SRCS write_img_params.cc
LINK_LIBS device_writer
DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
)
## imu_params_writer
make_executable(write_imu_params
SRCS write_imu_params.cc
LINK_LIBS device_writer
DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
)
## save_all_infos
make_executable(save_all_infos
SRCS save_all_infos.cc
LINK_LIBS device_writer
DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
)
## record
make_executable(record
SRCS record.cc dataset.cc
LINK_LIBS mynteye ${OpenCV_LIBS}
DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
)

View File

@ -11,7 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "dataset/dataset.h"
#include "dataset.h"
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>

View File

@ -11,7 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "writer/device_writer.h"
#include "device_writer.h"
#include <vector>

View File

@ -19,7 +19,7 @@
#include "mynteye/device/utils.h"
#include "mynteye/util/times.h"
#include "dataset/dataset.h"
#include "dataset.h"
MYNTEYE_USE_NAMESPACE

View File

@ -15,7 +15,7 @@
#include "mynteye/device/device.h"
#include "mynteye/device/utils.h"
#include "writer/device_writer.h"
#include "device_writer.h"
MYNTEYE_USE_NAMESPACE

View File

@ -15,7 +15,7 @@
#include "mynteye/device/device.h"
#include "mynteye/device/utils.h"
#include "writer/device_writer.h"
#include "device_writer.h"
MYNTEYE_USE_NAMESPACE

View File

@ -15,7 +15,7 @@
#include "mynteye/device/device.h"
#include "mynteye/device/utils.h"
#include "writer/device_writer.h"
#include "device_writer.h"
MYNTEYE_USE_NAMESPACE

View File

@ -15,7 +15,7 @@
#include "mynteye/device/device.h"
#include "mynteye/device/utils.h"
#include "writer/device_writer.h"
#include "device_writer.h"
MYNTEYE_USE_NAMESPACE

View File

@ -26,9 +26,13 @@ int DISPARITY_MAX = 64;
DepthProcessor::DepthProcessor(
std::shared_ptr<struct CameraROSMsgInfoPair> calib_infos,
std::shared_ptr<int> min_disp,
std::shared_ptr<int> max_disp,
std::int32_t proc_period)
: Processor(std::move(proc_period)),
calib_infos_(calib_infos) {
calib_infos_(calib_infos),
min_disp_(min_disp),
max_disp_(max_disp) {
VLOG(2) << __func__;
}
@ -54,11 +58,13 @@ bool DepthProcessor::OnProcess(
int cols = input->value.cols;
// std::cout << calib_infos_->T_mul_f << std::endl;
// 0.0793434
cv::Mat depth_mat = cv::Mat::zeros(rows, cols, CV_16U);
for (int i = 0; i < rows; i++) {
for (int j = 0; j < cols; j++) {
float disparity_value = input->value.at<float>(i, j);
if (disparity_value < DISPARITY_MAX && disparity_value > DISPARITY_MIN) {
if (disparity_value < (max_disp_ ? *max_disp_ : DISPARITY_MAX) &&
disparity_value > (min_disp_ ? *min_disp_ : DISPARITY_MIN)) {
float depth = calib_infos_->T_mul_f / disparity_value;
depth_mat.at<ushort>(i, j) = depth;
}

View File

@ -16,6 +16,7 @@
#pragma once
#include <memory>
#include <string>
#include <memory>
#include "mynteye/api/processor.h"
#include "mynteye/api/processor/rectify_processor.h"
@ -29,6 +30,8 @@ class DepthProcessor : public Processor {
explicit DepthProcessor(
std::shared_ptr<struct CameraROSMsgInfoPair> calib_infos,
std::shared_ptr<int> min_disp = nullptr,
std::shared_ptr<int> max_disp = nullptr,
std::int32_t proc_period = 0);
virtual ~DepthProcessor();
@ -44,6 +47,8 @@ class DepthProcessor : public Processor {
std::shared_ptr<Processor> const parent) override;
private:
std::shared_ptr<struct CameraROSMsgInfoPair> calib_infos_;
std::shared_ptr<int> min_disp_;
std::shared_ptr<int> max_disp_;
};
MYNTEYE_END_NAMESPACE

View File

@ -81,7 +81,10 @@ DisparityProcessor::DisparityProcessor(DisparityComputingMethod type,
sgbm_matcher->setSpeckleWindowSize(100);
sgbm_matcher->setSpeckleRange(32);
sgbm_matcher->setDisp12MaxDiff(1);
disparity_min_sgbm_ptr =
std::make_shared<int>(sgbm_matcher->getMinDisparity());
disparity_max_sgbm_ptr =
std::make_shared<int>(sgbm_matcher->getNumDisparities());
bm_matcher = cv::StereoBM::create(0, 3);
bm_matcher->setPreFilterSize(9);
bm_matcher->setPreFilterCap(31);
@ -93,8 +96,10 @@ DisparityProcessor::DisparityProcessor(DisparityComputingMethod type,
bm_matcher->setSpeckleWindowSize(100);
bm_matcher->setSpeckleRange(4);
bm_matcher->setPreFilterType(cv::StereoBM::PREFILTER_XSOBEL);
// DISPARITY_MIN = bm_matcher->getMinDisparity();
// DISPARITY_MAX = bm_matcher->getMinDisparity();
disparity_min_bm_ptr =
std::make_shared<int>(bm_matcher->getMinDisparity());
disparity_max_bm_ptr =
std::make_shared<int>(bm_matcher->getNumDisparities());
#endif
NotifyComputingTypeChanged(type_);
}
@ -123,11 +128,15 @@ bool DisparityProcessor::ConfigFromFile(const std::string& config_file) {
cv::FileNode node_sgbm = fsSettings["SGBM"];
if (node_sgbm.type() == cv::FileNode::MAP) {
sgbm_matcher->read(node_sgbm);
*disparity_min_sgbm_ptr = sgbm_matcher->getMinDisparity();
*disparity_max_sgbm_ptr = sgbm_matcher->getNumDisparities();
}
cv::FileNode node_bm = fsSettings["BM"];
if (node_bm.type() == cv::FileNode::MAP) {
bm_matcher->read(node_bm);
*disparity_min_bm_ptr = bm_matcher->getMinDisparity();
*disparity_max_bm_ptr = bm_matcher->getNumDisparities();
}
return true;
#else

View File

@ -16,6 +16,7 @@
#pragma once
#include <memory>
#include <string>
#include <memory>
#include "mynteye/api/processor.h"
#include "mynteye/types.h"
@ -41,6 +42,20 @@ class DisparityProcessor : public Processor {
const DisparityComputingMethod &MethodType);
void NotifyComputingTypeChanged(const DisparityComputingMethod &MethodType);
bool ConfigFromFile(const std::string& config);
std::shared_ptr<int> GetMinDisparity() {
if (type_ == DisparityComputingMethod::BM) {
return disparity_min_bm_ptr;
} else {
return disparity_min_sgbm_ptr;
}
}
std::shared_ptr<int> GetMaxDisparity() {
if (type_ == DisparityComputingMethod::BM) {
return disparity_max_bm_ptr;
} else {
return disparity_max_sgbm_ptr;
}
}
protected:
// inline Processor::process_type ProcessOutputConnection() override {
@ -56,6 +71,10 @@ class DisparityProcessor : public Processor {
cv::Ptr<cv::StereoBM> bm_matcher;
DisparityComputingMethod type_;
double cx1_minus_cx2_;
std::shared_ptr<int> disparity_min_bm_ptr;
std::shared_ptr<int> disparity_max_bm_ptr;
std::shared_ptr<int> disparity_min_sgbm_ptr;
std::shared_ptr<int> disparity_max_sgbm_ptr;
};
MYNTEYE_END_NAMESPACE

View File

@ -330,11 +330,11 @@ void Synthetic::InitProcessors() {
rectify_processor = rectify_processor_ocv;
points_processor = std::make_shared<PointsProcessorOCV>(
rectify_processor_ocv->Q, POINTS_PROC_PERIOD);
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
disparity_processor =
std::make_shared<DisparityProcessor>(DisparityComputingMethod::BM,
nullptr,
DISPARITY_PROC_PERIOD);
depth_processor = std::make_shared<DepthProcessorOCV>(DEPTH_PROC_PERIOD);
root_processor->AddChild(rectify_processor);
rectify_processor->AddChild(disparity_processor);
@ -351,13 +351,16 @@ void Synthetic::InitProcessors() {
points_processor = std::make_shared<PointsProcessor>(
rectify_processor_imp -> getCameraROSMsgInfoPair(),
POINTS_PROC_PERIOD);
depth_processor = std::make_shared<DepthProcessor>(
rectify_processor_imp -> getCameraROSMsgInfoPair(),
DEPTH_PROC_PERIOD);
disparity_processor =
auto disparity_processor_imp =
std::make_shared<DisparityProcessor>(DisparityComputingMethod::BM,
rectify_processor_imp -> getCameraROSMsgInfoPair(),
DISPARITY_PROC_PERIOD);
depth_processor = std::make_shared<DepthProcessor>(
rectify_processor_imp -> getCameraROSMsgInfoPair(),
disparity_processor_imp->GetMinDisparity(),
disparity_processor_imp->GetMaxDisparity(),
DEPTH_PROC_PERIOD);
disparity_processor = disparity_processor_imp;
root_processor->AddChild(rectify_processor);
rectify_processor->AddChild(disparity_processor);

View File

@ -1,72 +0,0 @@
# Copyright 2018 Slightech Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
cmake_minimum_required(VERSION 3.0)
project(mynteye_tools VERSION 2.0.0 LANGUAGES C CXX)
get_filename_component(PRO_DIR ${PROJECT_SOURCE_DIR} DIRECTORY)
include(${PRO_DIR}/cmake/Common.cmake)
include(${PRO_DIR}/cmake/Utils.cmake)
# flags
if(OS_WIN)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall")
else()
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -Wextra")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra")
endif()
include(${PRO_DIR}/cmake/DetectCXX11.cmake)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -march=native")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=native")
string(STRIP "${CMAKE_C_FLAGS}" CMAKE_C_FLAGS)
string(STRIP "${CMAKE_CXX_FLAGS}" CMAKE_CXX_FLAGS)
message(STATUS "C_FLAGS: ${CMAKE_C_FLAGS}")
message(STATUS "CXX_FLAGS: ${CMAKE_CXX_FLAGS}")
# packages
LIST(APPEND CMAKE_PREFIX_PATH ${PRO_DIR}/_install/lib/cmake)
find_package(mynteye REQUIRED)
message(STATUS "Found mynteye: ${mynteye_VERSION}")
include(${PRO_DIR}/cmake/DetectOpenCV.cmake)
if(mynteye_WITH_GLOG)
include(${PRO_DIR}/cmake/DetectGLog.cmake)
endif()
#LIST(APPEND CMAKE_MODULE_PATH ${PRO_DIR}/cmake)
# targets
set(OUT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/_output")
include_directories(
${CMAKE_CURRENT_SOURCE_DIR}
)
# dataset
add_subdirectory(dataset)
# writer
add_subdirectory(writer)

View File

@ -1,135 +0,0 @@
# Tools for MYNT® EYE cameras
## Prerequisites
[OpenCV](https://opencv.org/),
```bash
# Linux, macOS
export OpenCV_DIR=~/opencv
# Windows
set OpenCV_DIR=C:\opencv
```
Python packages,
```bash
cd <sdk>/tools/
sudo pip install -r requirements.txt
```
[ROS](http://www.ros.org/) if using rosbag.
## Build
```bash
cd <sdk>
make tools
```
---
## Record data (mynteye dataset)
```bash
./tools/_output/bin/dataset/record
# Windows
.\tools\_output\bin\dataset\record.bat
```
## Analytics data (mynteye dataset)
### imu_analytics.py
```bash
python tools/analytics/imu_analytics.py -i dataset -c tools/config/mynteye/mynteye_config.yaml \
-al=-1.2,1.2 -gl= -gdu=d -gsu=d -kl=
```
### stamp_analytics.py
```bash
python tools/analytics/stamp_analytics.py -i dataset -c tools/config/mynteye/mynteye_config.yaml
```
---
## Record data (rosbag)
```bash
cd <sdk>
make ros
```
```bash
source wrappers/ros/devel/setup.bash
roslaunch mynt_eye_ros_wrapper mynteye.launch
```
```bash
rosbag record -O mynteye.bag /mynteye/left/image_raw /mynteye/imu/data_raw /mynteye/temperature/data_raw
```
## Analytics data (rosbag)
### imu_analytics.py
```bash
python tools/analytics/imu_analytics.py -i mynteye.bag
```
### stamp_analytics.py
```bash
python tools/analytics/stamp_analytics.py -i mynteye.bag
```
---
## Writer
### device_info_writer.cc
```bash
./tools/_output/bin/writer/device_info_writer tools/writer/config/device.info
# Windows
.\tools\_output\bin\writer\device_info_writer.bat tools\writer\config\device.info
```
### img_params_writer.cc
```bash
./tools/_output/bin/writer/img_params_writer tools/writer/config/img.params
# Windows
.\tools\_output\bin\writer\img_params_writer.bat tools\writer\config\img.params
```
### imu_params_writer.cc
```bash
./tools/_output/bin/writer/imu_params_writer tools/writer/config/imu.params
# Windows
.\tools\_output\bin\writer\imu_params_writer.bat tools\writer\config\imu.params
```
### save_all_infos.cc
```bash
./tools/_output/bin/writer/save_all_infos
# Windows
.\tools\_output\bin\writer\save_all_infos.bat
```
---
## Checksum
```bash
./tools/checksum/md5sum.sh <file or directory>
```

View File

@ -1,671 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2018 Slightech Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=missing-docstring
from __future__ import print_function
import os
import sys
TOOLBOX_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(os.path.join(TOOLBOX_DIR, 'internal'))
# pylint: disable=import-error,wrong-import-position
from data import DataError, Dataset, ROSBag, MYNTEYE, What
TIME_SCALE_FACTORS = {
's': 1.,
'm': 1. / 60,
'h': 1. / 3600
}
ANGLE_DEGREES = 'd'
ANGLE_RADIANS = 'r'
ANGLE_UNITS = (ANGLE_DEGREES, ANGLE_RADIANS)
BIN_CONFIG_NAME = 'imu_analytics_bin.cfg'
BIN_IMU_NAME = 'imu_analytics_imu.bin'
BIN_TEMP_NAME = 'imu_analytics_temp.bin'
class RawDataset(Dataset):
def __init__(self, path, dataset_creator):
super(RawDataset, self).__init__(path)
self.dataset_creator = dataset_creator
self._digest()
def _digest(self):
dataset = self.dataset_creator(self.path)
results = dataset.collect(What.imu, What.temp)
self._dataset = dataset
self._results = results
self._has_imu = What.imu in results.keys()
self._has_temp = What.temp in results.keys()
print(' ' + ', '.join('{}: {}'.format(k, len(v))
for k, v in results.items()))
@staticmethod
def _hypot(*args):
from math import sqrt
return sqrt(sum(x ** 2 for x in args))
def plot(self, t_scale_factor, gryo_converter,
ax_accel_x, ax_accel_y, ax_accel_z, ax_accel,
ax_gyro_x, ax_gyro_y, ax_gyro_z, ax_temp):
results = self._results
if self._has_imu:
imu_t_beg = results[What.imu][0].timestamp
imu_ts = [(imu.timestamp - imu_t_beg) * t_scale_factor
for imu in results[What.imu]]
ax_accel_x.plot(imu_ts, [imu.accel_x for imu in results[What.imu]])
ax_accel_y.plot(imu_ts, [imu.accel_y for imu in results[What.imu]])
ax_accel_z.plot(imu_ts, [imu.accel_z for imu in results[What.imu]])
import math
my_gryo_converter = \
lambda x: gryo_converter(x, math.degrees, math.radians)
ax_gyro_x.plot(imu_ts, [my_gryo_converter(imu.gyro_x)
for imu in results[What.imu]])
ax_gyro_y.plot(imu_ts, [my_gryo_converter(imu.gyro_y)
for imu in results[What.imu]])
ax_gyro_z.plot(imu_ts, [my_gryo_converter(imu.gyro_z)
for imu in results[What.imu]])
ax_accel.plot(imu_ts, [self._hypot(imu.accel_x, imu.accel_y, imu.accel_z)
for imu in results[What.imu]])
if self._has_temp:
temp_t_beg = results[What.temp][0].timestamp
temp_ts = [(temp.timestamp - temp_t_beg) * t_scale_factor
for temp in results[What.temp]]
ax_temp.plot(temp_ts, [temp.value for temp in results[What.temp]])
def generate(self, *what): # pylint: disable=unused-argument
raise DataError('DataError: method not implemented')
def iterate(self, action, *what): # pylint: disable=unused-argument
raise DataError('DataError: method not implemented')
def collect(self, *what): # pylint: disable=unused-argument
raise DataError('DataError: method not implemented')
@property
def timebeg(self):
return self._dataset.timebeg
@property
def timeend(self):
return self._dataset.timeend
@property
def duration(self):
return self._dataset.duration
@property
def has_imu(self):
return self._has_imu
@property
def has_temp(self):
return self._has_temp
class BinDataset(RawDataset):
"""
Binary memory-mapped files of large dataset.
References:
https://stackoverflow.com/questions/5854515/large-plot-20-million-samples-gigabytes-of-data
https://stackoverflow.com/questions/1053928/very-large-matrices-using-python-and-numpy
"""
# def __init__(self, path, dataset_creator):
# super(BinDataset, self).__init__(path, dataset_creator)
def _digest(self):
bindir = os.path.splitext(self.path)[0]
bincfg = os.path.join(bindir, BIN_CONFIG_NAME)
if os.path.isfile(bincfg):
with open(bincfg, 'r') as f_cfg:
import yaml
cfg = yaml.load(f_cfg)
self._info = cfg['info']
self._binimu = os.path.join(bindir, cfg['bins']['imu'])
self._bintemp = os.path.join(bindir, cfg['bins']['temp'])
print('find binary files ...')
print(' binimu: {}'.format(self._binimu))
print(' bintemp: {}'.format(self._bintemp))
print(' bincfg: {}'.format(bincfg))
if self._exists():
while True:
sys.stdout.write('Do you want to use it directly? [Y/n] ')
choice = raw_input().lower()
if choice == '' or choice == 'y':
return
elif choice == 'n':
break
else:
print('Please respond with \'y\' or \'n\'.')
self._convert()
def _exists(self):
return os.path.isfile(self._binimu) or os.path.isfile(self._bintemp)
def _convert(self):
import numpy as np
dataset = self.dataset_creator(self.path)
bindir = os.path.splitext(self.path)[0]
if not os.path.exists(bindir):
os.makedirs(bindir)
binimu = os.path.join(bindir, BIN_IMU_NAME)
bintemp = os.path.join(bindir, BIN_TEMP_NAME)
bincfg = os.path.join(bindir, BIN_CONFIG_NAME)
print('save to binary files ...')
print(' binimu: {}'.format(binimu))
print(' bintemp: {}'.format(bintemp))
print(' bincfg: {}'.format(bincfg))
has_imu = False
has_temp = False
with open(binimu, 'wb') as f_imu, open(bintemp, 'wb') as f_temp:
imu_t_beg = -1
imu_count = 0
temp_t_beg = -1
temp_count = 0
for result in dataset.generate(What.imu, What.temp):
if What.imu in result:
imu = result[What.imu]
if imu_t_beg == -1:
imu_t_beg = imu.timestamp
np.array([(
(imu.timestamp - imu_t_beg),
imu.accel_x, imu.accel_y, imu.accel_z,
self._hypot(imu.accel_x, imu.accel_y, imu.accel_z),
imu.gyro_x, imu.gyro_y, imu.gyro_z
)], dtype="f8, f8, f8, f8, f8, f8, f8, f8").tofile(f_imu)
imu_count = imu_count + 1
has_imu = True
if What.temp in result:
temp = result[What.temp]
if temp_t_beg == -1:
temp_t_beg = temp.timestamp
np.array([(
(temp.timestamp - temp_t_beg),
temp.value
)], dtype="f8, f8").tofile(f_temp)
temp_count = temp_count + 1
has_temp = True
sys.stdout.write('\r imu: {}, temp: {}'.format(imu_count, temp_count))
sys.stdout.write('\n')
# pylint: disable=attribute-defined-outside-init
self._info = {
'timebeg': dataset.timebeg,
'timeend': dataset.timeend,
'duration': dataset.duration,
'has_imu': has_imu,
'has_temp': has_temp
}
self._binimu = binimu
self._bintemp = bintemp
with open(bincfg, 'w') as f_cfg:
import yaml
yaml.dump({'info': self._info, 'bins': {
'imu': BIN_IMU_NAME,
'temp': BIN_TEMP_NAME
}}, f_cfg, default_flow_style=False)
def plot(self, t_scale_factor, gryo_converter,
ax_accel_x, ax_accel_y, ax_accel_z, ax_accel,
ax_gyro_x, ax_gyro_y, ax_gyro_z, ax_temp):
import numpy as np
if self.has_imu:
imus = np.memmap(self._binimu, dtype=[
('t', 'f8'),
('accel_x', 'f8'), ('accel_y', 'f8'), ('accel_z', 'f8'),
('accel', 'f8'),
('gyro_x', 'f8'), ('gyro_y', 'f8'), ('gyro_z', 'f8'),
], mode='r')
imus_t = imus['t'] * t_scale_factor
ax_accel_x.plot(imus_t, imus['accel_x'])
ax_accel_y.plot(imus_t, imus['accel_y'])
ax_accel_z.plot(imus_t, imus['accel_z'])
ax_accel.plot(imus_t, imus['accel'])
my_gryo_converter = \
lambda x: gryo_converter(x, np.degrees, np.radians)
ax_gyro_x.plot(imus_t, my_gryo_converter(imus['gyro_x']))
ax_gyro_y.plot(imus_t, my_gryo_converter(imus['gyro_y']))
ax_gyro_z.plot(imus_t, my_gryo_converter(imus['gyro_z']))
if self.has_temp:
temps = np.memmap(self._bintemp, dtype=[
('t', 'f8'), ('value', 'f8')
], mode='r')
temps_t = temps['t'] * t_scale_factor
ax_temp.plot(temps_t, temps['value'])
@property
def timebeg(self):
return self._info['timebeg']
@property
def timeend(self):
return self._info['timeend']
@property
def duration(self):
return self._info['duration']
@property
def has_imu(self):
return self._info['has_imu']
@property
def has_temp(self):
return self._info['has_temp']
def analyze(dataset, profile):
if not profile.time_unit:
if dataset.duration > 3600:
time_unit = 'h'
elif dataset.duration > 60:
time_unit = 'm'
else:
time_unit = 's'
else:
time_unit = profile.time_unit
t_name = 'time ({})'.format(time_unit)
t_scale_factor = TIME_SCALE_FACTORS[time_unit]
time_limits = profile.time_limits
if not time_limits:
time_limits = [0, dataset.duration * t_scale_factor]
accel_limits = profile.accel_limits
gyro_limits = profile.gyro_limits
temp_limits = profile.temp_limits
auto = profile.auto
import matplotlib.pyplot as plt
fig_1 = plt.figure(1, [16, 12])
fig_1.suptitle('IMU Analytics')
fig_1.subplots_adjust(wspace=0.4, hspace=0.2)
ax_accel_x = fig_1.add_subplot(241)
ax_accel_x.set_title('accel_x')
ax_accel_x.set_xlabel(t_name)
ax_accel_x.set_ylabel('accel_x (m/s^2)')
ax_accel_x.axis('auto')
ax_accel_x.set_xlim(time_limits)
if not auto and accel_limits and accel_limits[0]:
ax_accel_x.set_ylim(accel_limits[0])
ax_accel_y = fig_1.add_subplot(242)
ax_accel_y.set_title('accel_y')
ax_accel_y.set_xlabel(t_name)
ax_accel_y.set_ylabel('accel_y (m/s^2)')
ax_accel_y.axis('auto')
ax_accel_y.set_xlim(time_limits)
if not auto and accel_limits and accel_limits[1]:
ax_accel_y.set_ylim(accel_limits[1])
ax_accel_z = fig_1.add_subplot(243)
ax_accel_z.set_title('accel_z')
ax_accel_z.set_xlabel(t_name)
ax_accel_z.set_ylabel('accel_z (m/s^2)')
ax_accel_z.axis('auto')
ax_accel_z.set_xlim(time_limits)
if not auto and accel_limits and accel_limits[2]:
ax_accel_z.set_ylim(accel_limits[2])
ax_accel = fig_1.add_subplot(244)
ax_accel.set_title('accel hypot(x,y,z)')
ax_accel.set_xlabel(t_name)
ax_accel.set_ylabel('accel (m/s^2)')
ax_accel.axis('auto')
ax_accel.set_xlim(time_limits)
if not auto and accel_limits and accel_limits[3]:
ax_accel.set_ylim(accel_limits[3])
ax_gyro_ylabels = {
ANGLE_DEGREES: 'deg/sec',
ANGLE_RADIANS: 'rad/sec'
}
ax_gyro_ylabel = ax_gyro_ylabels[profile.gyro_show_unit]
ax_gyro_x = fig_1.add_subplot(245)
ax_gyro_x.set_title('gyro_x')
ax_gyro_x.set_xlabel(t_name)
ax_gyro_x.set_ylabel('gyro_x ({})'.format(ax_gyro_ylabel))
ax_gyro_x.axis('auto')
ax_gyro_x.set_xlim(time_limits)
if not auto and gyro_limits and gyro_limits[0]:
ax_gyro_x.set_ylim(gyro_limits[0])
ax_gyro_y = fig_1.add_subplot(246)
ax_gyro_y.set_title('gyro_y')
ax_gyro_y.set_xlabel(t_name)
ax_gyro_y.set_ylabel('gyro_y ({})'.format(ax_gyro_ylabel))
ax_gyro_y.axis('auto')
ax_gyro_y.set_xlim(time_limits)
if not auto and gyro_limits and gyro_limits[1]:
ax_gyro_y.set_ylim(gyro_limits[1])
ax_gyro_z = fig_1.add_subplot(247)
ax_gyro_z.set_title('gyro_z')
ax_gyro_z.set_xlabel(t_name)
ax_gyro_z.set_ylabel('gyro_z ({})'.format(ax_gyro_ylabel))
ax_gyro_z.axis('auto')
ax_gyro_z.set_xlim(time_limits)
if not auto and gyro_limits and gyro_limits[2]:
ax_gyro_z.set_ylim(gyro_limits[2])
ax_temp = None
if dataset.has_temp:
ax_temp = fig_1.add_subplot(248)
ax_temp.set_title('temperature')
ax_temp.set_xlabel(t_name)
ax_temp.set_ylabel('temperature (degree Celsius)')
ax_temp.axis('auto')
ax_temp.set_xlim(time_limits)
if not auto and temp_limits:
ax_temp.set_ylim(temp_limits)
def gryo_converter(x, degrees, radians):
if profile.gyro_show_unit == profile.gyro_data_unit:
return x
if profile.gyro_show_unit == ANGLE_DEGREES and \
profile.gyro_data_unit == ANGLE_RADIANS:
return degrees(x)
if profile.gyro_show_unit == ANGLE_RADIANS and \
profile.gyro_data_unit == ANGLE_DEGREES:
return radians(x)
sys.exit('Error: gryo_converter wrong logic')
dataset.plot(t_scale_factor, gryo_converter,
ax_accel_x, ax_accel_y, ax_accel_z, ax_accel,
ax_gyro_x, ax_gyro_y, ax_gyro_z, ax_temp)
outdir = profile.outdir
if outdir:
figpath = os.path.join(outdir, 'imu_analytics.png')
print('save figure to:\n {}'.format(figpath))
if not os.path.exists(outdir):
os.makedirs(outdir)
fig_1.savefig(figpath, dpi=100)
plt.show()
def _parse_args():
def limits_type(string, num=1):
if not string:
return None
if num < 1:
sys.exit('Error: limits_type must be greater than one pair')
pairs = string.split(':')
pairs_len = len(pairs)
if pairs_len == 1:
values = pairs[0].split(',')
if len(values) != 2:
sys.exit('Error: limits_type must be two values'
' as \'min,max\' for each pair')
results = (float(values[0]), float(values[1]))
if num > 1:
return [results for i in xrange(num)]
else:
return results
elif pairs_len == num:
results = []
for i in xrange(num):
if pairs[i]:
values = pairs[i].split(',')
if len(values) != 2:
sys.exit('Error: limits_type must be two values'
' as \'min,max\' for each pair')
results.append((float(values[0]), float(values[1])))
else:
results.append(None)
return results
else:
sys.exit('Error: limits_type must one or {:d} pairs'.format(num))
from functools import partial
import argparse
parser = argparse.ArgumentParser(
prog=os.path.basename(__file__),
formatter_class=argparse.RawTextHelpFormatter,
description='usage examples:'
'\n python %(prog)s -i DATASET'
'\n python %(prog)s -i DATASET -al=-10,10'
'\n python %(prog)s -i DATASET -al=-5,5::5,15: -gl=-0.1,0.1:: -kl=')
parser.add_argument(
'-i',
'--input',
dest='input',
metavar='DATASET',
required=True,
help='the input dataset path')
parser.add_argument(
'-o',
'--outdir',
dest='outdir',
metavar='OUTDIR',
help='the output directory')
parser.add_argument(
'-c',
'--config',
dest='config',
metavar='CONFIG',
help='yaml config file about input dataset')
parser.add_argument(
'-tu',
'--time-unit',
dest='time_unit',
metavar='s|m|h',
help='the time unit (seconds, minutes or hours)')
parser.add_argument(
'-gdu',
'--gyro-data-unit',
dest='gyro_data_unit',
metavar='r|d',
default='r',
help='the gyro data unit (radians or degrees, default: %(default)s)')
parser.add_argument(
'-gsu',
'--gyro-show-unit',
dest='gyro_show_unit',
metavar='r|d',
help='the gyro show unit (radians or degrees, '
'default: same as gyro data unit)')
parser.add_argument(
'-tl',
'--time-limits',
dest='time_limits',
metavar='min,max',
type=limits_type,
help='the time limits, in time unit')
parser.add_argument(
'-al',
'--accel-limits',
dest='accel_limits',
metavar='min,max [min,max:...]',
default='-10,10',
type=partial(limits_type, num=4),
help='the accel limits (default: %(default)s)'
'\n or 4 limits of accel_x,y,z,accel like \'min,max:...\'')
parser.add_argument(
'-gl',
'--gyro-limits',
dest='gyro_limits',
metavar='min,max [min,max:...]',
default='-0.02,0.02',
type=partial(limits_type, num=3),
help='the gyro limits (default: %(default)s)'
'\n or 3 limits of gyro_x,y,z like \'min,max:...\'')
parser.add_argument(
'-kl',
'--temp-limits',
dest='temp_limits',
metavar='min,max',
default='-20,80',
type=limits_type,
help='the temperature limits (default: %(default)s)')
parser.add_argument(
'-l',
'--limits',
dest='all_limits',
metavar='min,max [min,max:...]',
# nargs='+',
type=partial(limits_type, num=8),
help='the all limits, absent one will auto scale'
'\n accel_x,y,z,accel,gyro_x,y,z,temp like \'min,max:...\'')
parser.add_argument(
'-a',
'--auto',
dest='auto',
action='store_true',
help='make all limits auto scale to data limits, except the time')
parser.add_argument(
'-b',
'--binary',
dest='binary',
action='store_true',
help='save large dataset to binary files'
', and plot them with numpy.memmap()')
return parser.parse_args()
def _dict2obj(d):
from collections import namedtuple
return namedtuple('X', d.keys())(*d.values())
def _main():
args = _parse_args()
# print(args)
dataset_path = args.input
if not dataset_path or not os.path.exists(dataset_path):
sys.exit('Error: the dataset path not exists, %s' % dataset_path)
dataset_path = os.path.normpath(dataset_path)
outdir = args.outdir
if not outdir:
outdir = os.path.splitext(dataset_path)[0]
else:
outdir = os.path.abspath(outdir)
print('imu analytics ...')
print(' input: %s' % dataset_path)
print(' outdir: %s' % outdir)
profile = {
'auto': False,
'time_unit': None,
'gyro_data_unit': None,
'gyro_show_unit': None,
'time_limits': None,
'accel_limits': None,
'gyro_limits': None,
'temp_limits': None
}
profile['auto'] = args.auto
if args.time_unit:
if args.time_unit not in TIME_SCALE_FACTORS.keys():
sys.exit('Error: the time unit must be \'s|m|h\'')
else:
profile['time_unit'] = args.time_unit
if args.gyro_data_unit:
if args.gyro_data_unit not in ANGLE_UNITS:
sys.exit('Error: the gyro unit must be \'r|d\'')
else:
profile['gyro_data_unit'] = args.gyro_data_unit
else:
profile['gyro_data_unit'] = ANGLE_RADIANS
if args.gyro_show_unit:
if args.gyro_show_unit not in ANGLE_UNITS:
sys.exit('Error: the gyro unit must be \'r|d\'')
else:
profile['gyro_show_unit'] = args.gyro_show_unit
else:
profile['gyro_show_unit'] = profile['gyro_data_unit']
if args.time_limits:
if not args.time_unit:
sys.exit('Error: the time unit must be set')
profile['time_limits'] = args.time_limits
if args.all_limits:
profile['accel_limits'] = args.all_limits[:4]
profile['gyro_limits'] = args.all_limits[4:7]
profile['temp_limits'] = args.all_limits[7]
else:
profile['accel_limits'] = args.accel_limits
profile['gyro_limits'] = args.gyro_limits
profile['temp_limits'] = args.temp_limits
for k, v in profile.items():
print(' {}: {}'.format(k, v))
def dataset_creator(path):
print('open dataset ...')
if args.config:
import yaml
config = yaml.load(file(args.config, 'r'))
model = config['dataset']
if model == 'rosbag':
dataset = ROSBag(path, **config['rosbag'])
elif model == 'mynteye':
dataset = MYNTEYE(path)
else:
sys.exit('Error: dataset model not supported {}'.format(model))
else:
dataset = ROSBag(
path,
topic_imu='/mynteye/imu/data_raw',
topic_temp='/mynteye/temp/data_raw')
return dataset
if args.binary:
dataset = BinDataset(dataset_path, dataset_creator)
else:
dataset = RawDataset(dataset_path, dataset_creator)
print(' timebeg: {:f}, timeend: {:f}, duration: {:f}'.format(
dataset.timebeg, dataset.timeend, dataset.duration))
profile['outdir'] = outdir
analyze(dataset, _dict2obj(profile))
print('imu analytics done')
if __name__ == '__main__':
_main()

View File

@ -1,370 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2018 Slightech Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=missing-docstring
from __future__ import print_function
import os
import sys
TOOLBOX_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(os.path.join(TOOLBOX_DIR, 'internal'))
# pylint: disable=import-error,wrong-import-position
from data import ROSBag, MYNTEYE, What
ANGLE_DEGREES = 'd'
ANGLE_RADIANS = 'r'
ANGLE_UNITS = (ANGLE_DEGREES, ANGLE_RADIANS)
BIN_IMG_NAME = 'stamp_analytics_img.bin'
BIN_IMU_NAME = 'stamp_analytics_imu.bin'
RESULT_FIGURE = 'stamp_analytics.png'
IMU_ALL = 0
IMU_ACCEL = 1
IMU_GYRO = 2
class BinDataset(object):
def __init__(self, path, dataset_creator):
self.path = path
self.dataset_creator = dataset_creator
self._digest()
def _digest(self):
bindir = os.path.splitext(self.path)[0]
binimg = os.path.join(bindir, BIN_IMG_NAME)
binimu = os.path.join(bindir, BIN_IMU_NAME)
if os.path.isfile(binimg) and os.path.isfile(binimu):
print('find binary files ...')
print(' binimg: {}'.format(binimg))
print(' binimu: {}'.format(binimu))
while True:
sys.stdout.write('Do you want to use it directly? [Y/n] ')
choice = raw_input().lower()
if choice == '' or choice == 'y':
self._binimg = binimg
self._binimu = binimu
self._has_img = True
self._has_imu = True
return
elif choice == 'n':
break
else:
print('Please respond with \'y\' or \'n\'.')
self._convert()
def _convert(self):
import numpy as np
dataset = self.dataset_creator(self.path)
bindir = os.path.splitext(self.path)[0]
if not os.path.exists(bindir):
os.makedirs(bindir)
binimg = os.path.join(bindir, BIN_IMG_NAME)
binimu = os.path.join(bindir, BIN_IMU_NAME)
print('save to binary files ...')
print(' binimg: {}'.format(binimg))
print(' binimu: {}'.format(binimu))
has_img = False
has_imu = False
with open(binimg, 'wb') as f_img, open(binimu, 'wb') as f_imu:
img_count = 0
imu_count = 0
for result in dataset.generate(What.img_left, What.imu):
if What.img_left in result:
img = result[What.img_left]
np.array([(
img.timestamp
)], dtype="f8").tofile(f_img)
img_count = img_count + 1
has_img = True
if What.imu in result:
imu = result[What.imu]
np.array([(
imu.timestamp, imu.flag,
imu.accel_x, imu.accel_y, imu.accel_z,
imu.gyro_x, imu.gyro_y, imu.gyro_z
)], dtype="f8, i4, f8, f8, f8, f8, f8, f8").tofile(f_imu)
imu_count = imu_count + 1
has_imu = True
sys.stdout.write('\r img: {}, imu: {}'.format(img_count, imu_count))
sys.stdout.write('\n')
# pylint: disable=attribute-defined-outside-init
self._binimg = binimg
self._binimu = binimu
self._has_img = has_img
self._has_imu = has_imu
def stamp_analytics(self, args):
outdir = args.outdir
import numpy as np
if self.has_img:
# pd.cut fails on readonly arrays
# https://github.com/pandas-dev/pandas/issues/18773
# imgs = np.memmap(self._binimg, dtype=[
# ('t', 'f8')
# ], mode='r')
imgs = np.fromfile(self._binimg, dtype=[
('t', 'f8')
])
else:
sys.exit("Error: there are no imgs.")
if self.has_imu:
imus = np.memmap(self._binimu, dtype=[
('t', 'f8'), ('flag', 'i4'),
('accel_x', 'f8'), ('accel_y', 'f8'), ('accel_z', 'f8'),
('gyro_x', 'f8'), ('gyro_y', 'f8'), ('gyro_z', 'f8'),
], mode='r')
else:
sys.exit("Error: there are no imus.")
period_img = 1. / args.rate_img
period_imu = 1. / args.rate_imu
print('\nrate (Hz)')
print(' img: {}, imu: {}'.format(args.rate_img, args.rate_imu))
print('sample period (s)')
print(' img: {}, imu: {}'.format(period_img, period_imu))
imgs_t_diff = np.diff(imgs['t'])
# imus_t_diff = np.diff(imus['t'])
accel = imus[(imus['flag'] == IMU_ALL) | (imus['flag'] == IMU_ACCEL)]
accel_t_diff = np.diff(accel['t'])
gyro = imus[(imus['flag'] == IMU_ALL) | (imus['flag'] == IMU_GYRO)]
gyro_t_diff = np.diff(gyro['t'])
print('\ncount')
print(' imgs: {}, imus: {}, accel: {}, gyro: {}'.format(
imgs.size, imus.size, accel.size, gyro.size))
print('\ndiff count')
print(' imgs_t_diff: {}, accel_t_diff: {}, gyro_t_diff: {}'.format(
imgs_t_diff.size, accel_t_diff.size, gyro_t_diff.size))
print('\ndiff where (factor={})'.format(args.factor))
self._print_t_diff_where('imgs', imgs_t_diff, period_img, args.factor)
# self._print_t_diff_where('imus', imus_t_diff, period_imu, args.factor)
self._print_t_diff_where('accel', accel_t_diff, period_imu, args.factor)
self._print_t_diff_where('gyro', gyro_t_diff, period_imu, args.factor)
import pandas as pd
bins = imgs['t']
bins_n = imgs['t'].size
bins = pd.Series(data=bins).drop_duplicates(keep='first')
print('\nimage timestamp duplicates: {}'.format(bins_n - bins.size))
def _cut_by_imgs_t(imus_t):
cats = pd.cut(imus_t, bins)
return cats.value_counts()
self._plot(
outdir,
imgs_t_diff,
accel_t_diff,
_cut_by_imgs_t(
accel['t']),
gyro_t_diff,
_cut_by_imgs_t(
gyro['t']))
def _print_t_diff_where(self, name, t_diff, period, factor):
import numpy as np
where = np.argwhere(t_diff > period * (1 + factor))
print(' {} where diff > {}*{} ({})'.format(
name, period, 1 + factor, where.size))
for x in where:
print(' {:8d}: {:.16f}'.format(x[0], t_diff[x][0]))
where = np.argwhere(t_diff < period * (1 - factor))
print(' {} where diff < {}*{} ({})'.format(
name, period, 1 - factor, where.size))
for x in where:
print(' {:8d}: {:.16f}'.format(x[0], t_diff[x][0]))
def _plot(self, outdir, imgs_t_diff,
accel_t_diff, accel_counts, gyro_t_diff, gyro_counts):
import matplotlib.pyplot as plt
import numpy as np
fig_1 = plt.figure(1, [16, 12])
fig_1.suptitle('Stamp Analytics')
fig_1.subplots_adjust(
left=0.1,
right=0.95,
top=0.85,
bottom=0.15,
wspace=0.4,
hspace=0.4)
ax_imgs_t_diff = fig_1.add_subplot(231)
ax_imgs_t_diff.set_title('Image Timestamp Diff')
ax_imgs_t_diff.set_xlabel('diff index')
ax_imgs_t_diff.set_ylabel('diff (s)')
ax_imgs_t_diff.axis('auto')
ax_imgs_t_diff.set_xlim([0, imgs_t_diff.size])
ax_imgs_t_diff.plot(imgs_t_diff)
def _plot_imus(name, t_diff, counts, pos_offset=0):
ax_imus_t_diff = fig_1.add_subplot(232 + pos_offset)
ax_imus_t_diff.set_title('{} Timestamp Diff'.format(name))
ax_imus_t_diff.set_xlabel('diff index')
ax_imus_t_diff.set_ylabel('diff (s)')
ax_imus_t_diff.axis('auto')
ax_imus_t_diff.set_xlim([0, t_diff.size - 1])
ax_imus_t_diff.plot(t_diff)
ax_imus_counts = fig_1.add_subplot(233 + pos_offset)
ax_imus_counts.set_title('{} Count Per Image Intervel'.format(name))
ax_imus_counts.set_xlabel('intervel index')
ax_imus_counts.set_ylabel('imu count')
ax_imus_counts.axis('auto')
# print(counts.values)
# counts.plot(kind='line', ax=ax_imus_counts)
data = counts.values
ax_imus_counts.set_xlim([0, data.size])
ax_imus_counts.set_ylim([np.min(data) - 1, np.max(data) + 1])
ax_imus_counts.plot(data)
_plot_imus('Accel', accel_t_diff, accel_counts)
_plot_imus('Gyro', gyro_t_diff, gyro_counts, 3)
if outdir:
figpath = os.path.join(outdir, RESULT_FIGURE)
print('\nsave figure to:\n {}'.format(figpath))
if not os.path.exists(outdir):
os.makedirs(outdir)
fig_1.savefig(figpath, dpi=100)
plt.show()
@property
def has_img(self):
return self._has_img
@property
def has_imu(self):
return self._has_imu
def _parse_args():
import argparse
parser = argparse.ArgumentParser(
prog=os.path.basename(__file__),
formatter_class=argparse.RawTextHelpFormatter,
description='usage examples:'
'\n python %(prog)s -i DATASET')
parser.add_argument(
'-i',
'--input',
dest='input',
metavar='DATASET',
required=True,
help='the input dataset path')
parser.add_argument(
'-o',
'--outdir',
dest='outdir',
metavar='OUTDIR',
help='the output directory')
parser.add_argument(
'-c',
'--config',
dest='config',
metavar='CONFIG',
help='yaml config file about input dataset')
parser.add_argument(
'-f',
'--factor',
dest='factor',
metavar='FACTOR',
default=0.1,
type=float,
help='the wave factor (default: %(default)s)')
parser.add_argument(
'--rate-img',
dest='rate_img',
metavar='RATE',
default=60,
type=int,
help='the img rate (default: %(default)s)')
parser.add_argument(
'--rate-imu',
dest='rate_imu',
metavar='RATE',
default=200,
type=int,
help='the imu rate (default: %(default)s)')
return parser.parse_args()
def _main():
args = _parse_args()
dataset_path = args.input
if not dataset_path or not os.path.exists(dataset_path):
sys.exit('Error: the dataset path not exists, %s' % dataset_path)
dataset_path = os.path.normpath(dataset_path)
outdir = args.outdir
if not args.outdir:
outdir = os.path.splitext(dataset_path)[0]
else:
outdir = os.path.abspath(outdir)
args.outdir = outdir
print('stamp analytics ...')
print(' input: %s' % dataset_path)
print(' outdir: %s' % outdir)
def dataset_creator(path):
print('open dataset ...')
if args.config:
import yaml
config = yaml.load(file(args.config, 'r'))
model = config['dataset']
if model == 'rosbag':
dataset = ROSBag(path, **config['rosbag'])
elif model == 'mynteye':
dataset = MYNTEYE(path)
else:
sys.exit('Error: dataset model not supported {}'.format(model))
else:
dataset = ROSBag(path,
topic_img_left='/mynteye/left/image_raw',
topic_imu='/mynteye/imu/data_raw')
return dataset
dataset = BinDataset(dataset_path, dataset_creator)
dataset.stamp_analytics(args)
print('stamp analytics done')
if __name__ == '__main__':
_main()

View File

@ -1,78 +0,0 @@
#!/usr/bin/env bash
# Copyright 2018 Slightech Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
BASE_DIR=$(cd "$(dirname "$0")" && pwd)
ROOT_DIR=$(realpath "$BASE_DIR/../..")
SCRIPTS_DIR="$ROOT_DIR/scripts"
source "$SCRIPTS_DIR/common/echo.sh"
source "$SCRIPTS_DIR/common/host.sh"
if [ "$HOST_OS" = "Linux" ]; then
_md5sum() { md5sum "$1"; }
elif [ "$HOST_OS" = "Mac" ]; then
_md5sum() { md5 -q "$1"; }
elif [ "$HOST_OS" = "Win" ]; then
_md5sum() { certutil -hashfile "$1" MD5; }
else # unexpected
_echo_e "Unknown host os :("
exit 1
fi
PYTHON="python"
if [ "$HOST_OS" = "Win" ]; then
# default python on MSYS
PYTHON="python2"
fi
_get_size() {
PYTHON_ARG="$1" $PYTHON - <<EOF
import math
from os.path import getsize
def convert_size(size_bytes):
if size_bytes == 0:
return "0B"
size_name = ("B", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB")
i = int(math.floor(math.log(size_bytes, 1024)))
p = math.pow(1024, i)
s = round(size_bytes / p, 2)
return "%s %s" % (s, size_name[i])
import os
print(convert_size(getsize(os.environ["PYTHON_ARG"])))
EOF
}
_print_info() {
file="$1"
_echo "File: $file"
_echo "Size: `_get_size "$file"`"
_echo "MD5: `_md5sum "$file"`"
_echo
}
if [ $# -eq 0 ]; then
_echo_e "Usage: ./tools/checksum/md5sum.sh <file or directory>"
exit 1
fi
if [ -d "$1" ]; then
find "$1" -type f | while read -r f; do
_print_info "$f"
done
else
_print_info "$1"
fi

View File

@ -1,9 +0,0 @@
# dataset model: rosbag, mynteye
dataset: "mynteye"
# rosbag config
rosbag:
topic_img_left: "/mynteye/left/image_raw"
topic_img_right: "/mynteye/right/image_raw"
topic_imu: "/mynteye/imu/data_raw"
topic_temp: "/mynteye/temp/data_raw"

View File

@ -1,39 +0,0 @@
# Copyright 2018 Slightech Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
get_filename_component(DIR_NAME ${CMAKE_CURRENT_LIST_DIR} NAME)
set_outdir(
"${OUT_DIR}/lib/${DIR_NAME}"
"${OUT_DIR}/lib/${DIR_NAME}"
"${OUT_DIR}/bin/${DIR_NAME}"
)
include_directories(
${PRO_DIR}/src
)
## record
# make_executable(record
# SRCS record.cc dataset.cc
# LINK_LIBS mynteye ${OpenCV_LIBS}
# DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
#)
make_executable(record2
SRCS record2.cc dataset.cc
LINK_LIBS mynteye ${OpenCV_LIBS}
DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
)

View File

@ -1,134 +0,0 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/logger.h"
#include "mynteye/api/api.h"
#include "mynteye/util/times.h"
#include "dataset/dataset.h"
MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->LogOptionInfos();
// Enable this will cache the motion datas until you get them.
api->EnableMotionDatas();
api->Start(Source::ALL);
const char *outdir;
if (argc >= 2) {
outdir = argv[1];
} else {
outdir = "./dataset";
}
tools::Dataset dataset(outdir);
cv::namedWindow("frame");
std::size_t img_count = 0;
std::size_t imu_count = 0;
auto &&time_beg = times::now();
while (true) {
api->WaitForStreams();
auto &&left_datas = api->GetStreamDatas(Stream::LEFT);
auto &&right_datas = api->GetStreamDatas(Stream::RIGHT);
img_count += left_datas.size();
auto &&motion_datas = api->GetMotionDatas();
imu_count += motion_datas.size();
auto &&left_frame = left_datas.back().frame;
auto &&right_frame = right_datas.back().frame;
cv::Mat img;
if (left_frame->format() == Format::GREY) {
cv::Mat left_img(
left_frame->height(), left_frame->width(), CV_8UC1,
left_frame->data());
cv::Mat right_img(
right_frame->height(), right_frame->width(), CV_8UC1,
right_frame->data());
cv::hconcat(left_img, right_img, img);
} else if (left_frame->format() == Format::YUYV) {
cv::Mat left_img(
left_frame->height(), left_frame->width(), CV_8UC2,
left_frame->data());
cv::Mat right_img(
right_frame->height(), right_frame->width(), CV_8UC2,
right_frame->data());
cv::cvtColor(left_img, left_img, cv::COLOR_YUV2BGR_YUY2);
cv::cvtColor(right_img, right_img, cv::COLOR_YUV2BGR_YUY2);
cv::hconcat(left_img, right_img, img);
} else if (left_frame->format() == Format::BGR888) {
cv::Mat left_img(
left_frame->height(), left_frame->width(), CV_8UC3,
left_frame->data());
cv::Mat right_img(
right_frame->height(), right_frame->width(), CV_8UC3,
right_frame->data());
cv::hconcat(left_img, right_img, img);
} else {
return -1;
}
cv::imshow("frame", img);
if (img_count > 10 && imu_count > 50) { // save
for (auto &&left : left_datas) {
dataset.SaveStreamData(Stream::LEFT, left);
}
for (auto &&right : right_datas) {
dataset.SaveStreamData(Stream::RIGHT, right);
}
for (auto &&motion : motion_datas) {
dataset.SaveMotionData(motion);
}
std::cout << "\rSaved " << img_count << " imgs"
<< ", " << imu_count << " imus" << std::flush;
}
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
break;
}
}
std::cout << " to " << outdir << std::endl;
auto &&time_end = times::now();
api->Stop(Source::ALL);
float elapsed_ms =
times::count<times::microseconds>(time_end - time_beg) * 0.001f;
LOG(INFO) << "Time beg: " << times::to_local_string(time_beg)
<< ", end: " << times::to_local_string(time_end)
<< ", cost: " << elapsed_ms << "ms";
LOG(INFO) << "Img count: " << img_count
<< ", fps: " << (1000.f * img_count / elapsed_ms);
LOG(INFO) << "Imu count: " << imu_count
<< ", hz: " << (1000.f * imu_count / elapsed_ms);
return 0;
}

View File

@ -1,442 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2018 Slightech Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=missing-docstring
from __future__ import print_function
def isiter_not_str(obj):
return hasattr(obj, '__iter__') and not isinstance(obj, basestring)
class What(object):
img_left = "img_left"
img_right = "img_right"
imu = "imu"
temp = "temp"
class DataError(Exception):
def __init__(self, message):
super(DataError, self).__init__()
self.message = message
class Data(object):
def __init__(self):
self._timestamp = 0
@property
def timestamp(self):
return self._timestamp
@timestamp.setter
def timestamp(self, value):
self._timestamp = value
def __str__(self):
return "timestamp: {:f}".format(self.timestamp)
class Image(Data):
def __init__(self):
super(Image, self).__init__()
self._data = None
@property
def data(self):
return self._data
@data.setter
def data(self, data):
self._data = data
@property
def width(self):
return 0
@property
def height(self):
return 0
class IMU(Data):
def __init__(self):
super(IMU, self).__init__()
self._flag = 0
self._accel_x = 0
self._accel_y = 0
self._accel_z = 0
self._gyro_x = 0
self._gyro_y = 0
self._gyro_z = 0
@property
def flag(self):
return self._flag
@flag.setter
def flag(self, flag):
self._flag = flag
@property
def accel(self):
return self._accel_x, self._accel_y, self._accel_z
@property
def accel_x(self):
return self._accel_x
@accel_x.setter
def accel_x(self, accel_x):
self._accel_x = accel_x
@property
def accel_y(self):
return self._accel_y
@accel_y.setter
def accel_y(self, accel_y):
self._accel_y = accel_y
@property
def accel_z(self):
return self._accel_z
@accel_z.setter
def accel_z(self, accel_z):
self._accel_z = accel_z
@property
def gyro(self):
return self._gyro_x, self._gyro_y, self._gyro_z
@property
def gyro_x(self):
return self._gyro_x
@gyro_x.setter
def gyro_x(self, gyro_x):
self._gyro_x = gyro_x
@property
def gyro_y(self):
return self._gyro_y
@gyro_y.setter
def gyro_y(self, gyro_y):
self._gyro_y = gyro_y
@property
def gyro_z(self):
return self._gyro_z
@gyro_z.setter
def gyro_z(self, gyro_z):
self._gyro_z = gyro_z
def __str__(self):
return super(IMU, self).__str__() + \
"\naccel: {:f}, {:f}, {:f}".format(*self.accel) + \
"\ngyro: {:f}, {:f}, {:f}".format(*self.gyro)
class Temp(Data):
def __init__(self):
super(Temp, self).__init__()
self._value = 0
@property
def value(self):
return self._value
@value.setter
def value(self, value):
self._value = value
def __str__(self):
return super(Temp, self).__str__() + \
"\ntemp: {:f}".format(self.value)
class Dataset(object):
def __init__(self, path):
self.path = path
def generate(self, *what): # pylint: disable=unused-argument
raise DataError('DataError: method not implemented')
def iterate(self, action, *what):
for result in self.generate(*what):
if isinstance(result, dict): # dict > **kwds
action(**result)
elif isiter_not_str(result): # iterable > *args
action(*result)
else:
action(result)
def collect(self, *what):
results = {}
for result in self.generate(*what):
for key in result.keys():
if key not in what:
continue
if key not in results:
results[key] = []
results[key].append(result[key])
return results
@property
def timebeg(self):
raise DataError('DataError: method not implemented')
@property
def timeend(self):
raise DataError('DataError: method not implemented')
@property
def duration(self):
raise DataError('DataError: method not implemented')
class ROSBag(Dataset):
def __init__(self, path, **config):
super(ROSBag, self).__init__(path)
self.topic_img_left = config['topic_img_left'] \
if 'topic_img_left' in config else None
self.topic_img_right = config['topic_img_right'] \
if 'topic_img_right' in config else None
self.topic_imu = config['topic_imu'] \
if 'topic_imu' in config else None
self.topic_temp = config['topic_temp'] \
if 'topic_temp' in config else None
import yaml
from rosbag.bag import Bag
# pylint: disable=protected-access
self._info = yaml.load(Bag(self.path, 'r')._get_yaml_info())
def generate(self, *what):
import rosbag
hit_img_left = What.img_left in what
hit_img_right = What.img_right in what
hit_imu = What.imu in what
hit_temp = What.temp in what
try:
# pylint: disable=unused-variable
for topic, msg, t in rosbag.Bag(self.path).read_messages():
result = {}
stamp = msg.header.stamp.to_sec()
if hit_img_left and topic == self.topic_img_left:
img = Image()
img.timestamp = stamp
# pylint: disable=fixme
# TODO: data with cv_bridge
result[What.img_left] = img
elif hit_img_right and topic == self.topic_img_right:
img = Image()
img.timestamp = stamp
# TODO: data with cv_bridge
result[What.img_right] = img
elif hit_imu and topic == self.topic_imu:
imu = IMU()
imu.timestamp = stamp
imu.accel_x = msg.linear_acceleration.x
imu.accel_y = msg.linear_acceleration.y
imu.accel_z = msg.linear_acceleration.z
imu.gyro_x = msg.angular_velocity.x
imu.gyro_y = msg.angular_velocity.y
imu.gyro_z = msg.angular_velocity.z
result[What.imu] = imu
elif hit_temp and topic == self.topic_temp:
temp = Temp()
temp.timestamp = stamp
temp.value = msg.data
result[What.temp] = temp
else:
# raise DataError('DataError: not proper topics in the rosbag')
continue
yield result
finally:
pass
@property
def info(self):
return self._info
@property
def timebeg(self):
return self._info['start']
@property
def timeend(self):
return self._info['end']
@property
def duration(self):
return self._info['duration']
class MYNTEYE(Dataset):
# pylint: disable=no-member
def __init__(self, path):
super(MYNTEYE, self).__init__(path)
self._info = self._get_info()
def _get_info(self):
import os
import sys
from os import path
info = type('', (), {})()
info.img_left_dir = path.join(self.path, 'left')
info.img_left_txt = path.join(info.img_left_dir, 'stream.txt')
info.has_img_left = path.isfile(info.img_left_txt)
info.img_right_dir = path.join(self.path, 'right')
info.img_right_txt = path.join(info.img_right_dir, 'stream.txt')
info.has_img_right = path.isfile(info.img_right_txt)
info.imu_txt = path.join(self.path, 'motion.txt')
info.has_imu = path.isfile(info.imu_txt)
if info.has_img_left:
info_txt = info.img_left_txt
elif info.has_img_right:
info_txt = info.img_right_txt
elif info.has_imu:
info_txt = info.img_left_txt
else:
sys.exit('Error: Dataset is empty or unexpected format')
with open(info_txt, 'rb') as f:
fields = [_.strip() for _ in f.readline().split(',')]
first = f.readline()
f.seek(-2, os.SEEK_END)
while f.read(1) != b'\n':
f.seek(-2, os.SEEK_CUR)
last = f.readline()
index = -1
for i, field in enumerate(fields):
if field == 'timestamp':
index = i
break
if index == -1:
sys.exit('Error: Dataset is unexpected format, timestamp not found')
# unit from 1us to 1s
info.timebeg = float(first.split(',')[index].strip()) * 0.000001
info.timeend = float(last.split(',')[index].strip()) * 0.000001
# print('time: [{}, {}]'.format(info.timebeg, info.timeend))
return info
def generate(self, *what):
hit_img_left = What.img_left in what
hit_img_right = What.img_right in what
hit_imu = What.imu in what
hit_temp = What.temp in what
def get_fields(f):
fields = {}
for i, field in enumerate(_.strip() for _ in f.readline().split(',')):
fields[field] = i
return fields
if hit_img_left and self._info.has_img_left:
with open(self._info.img_left_txt) as f:
fields = get_fields(f)
for line in f:
values = [_.strip() for _ in line.split(',')]
img = Image()
img.timestamp = float(values[fields['timestamp']]) * 0.000001
yield {What.img_left: img}
if hit_img_right and self._info.has_img_right:
with open(self._info.img_right_txt) as f:
fields = get_fields(f)
for line in f:
values = [_.strip() for _ in line.split(',')]
img = Image()
img.timestamp = float(values[fields['timestamp']]) * 0.000001
yield {What.img_right: img}
if (hit_imu or hit_temp) and self._info.has_imu:
with open(self._info.imu_txt) as f:
fields = get_fields(f)
for line in f:
values = [_.strip() for _ in line.split(',')]
imu = IMU()
imu.timestamp = float(values[fields['timestamp']]) * 0.000001
imu.flag = values[fields['flag']]
imu.accel_x = float(values[fields['accel_x']])
imu.accel_y = float(values[fields['accel_y']])
imu.accel_z = float(values[fields['accel_z']])
imu.gyro_x = float(values[fields['gyro_x']])
imu.gyro_y = float(values[fields['gyro_y']])
imu.gyro_z = float(values[fields['gyro_z']])
temp = Temp()
temp.timestamp = imu.timestamp
temp.value = float(values[fields['temperature']])
yield {What.imu: imu, What.temp: temp}
@property
def timebeg(self):
return self._info.timebeg
@property
def timeend(self):
return self._info.timeend
@property
def duration(self):
return self.timeend - self.timebeg
if __name__ == '__main__':
class DataA(Dataset):
def generate(self, *what):
yield 'a'
yield 'b'
class DataB(Dataset):
def generate(self, *what):
yield 'a1', 'a2', 'a3'
yield 'b1', 'b2', 'b3'
print('DataA, generate')
for x in DataA('path').generate("what"):
print(x)
print('\nDataA, iterate')
DataA('path').iterate(print, "what")
print('\nDataB, generate')
for x in DataB('path').generate("what"):
print(', '.join(x))
print('\nDataB, iterate')
DataB('path').iterate(lambda *x: print(', '.join(x)), "what")

@ -1 +0,0 @@
Subproject commit 1c7828bedb786c9f4fcf6f31f619bfb71d7b1cd3

View File

@ -1,4 +0,0 @@
matplotlib>=1.5.1
numpy>=1.11.0
pandas>=0.22.0
PyYAML>=3.11

View File

@ -1,58 +0,0 @@
# Copyright 2018 Slightech Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
get_filename_component(DIR_NAME ${CMAKE_CURRENT_LIST_DIR} NAME)
set_outdir(
"${OUT_DIR}/lib/${DIR_NAME}"
"${OUT_DIR}/lib/${DIR_NAME}"
"${OUT_DIR}/bin/${DIR_NAME}"
)
## device_writer
add_library(device_writer STATIC device_writer.cc)
target_link_libraries(device_writer mynteye ${OpenCV_LIBS})
## device_info_writer
make_executable(device_info_writer
SRCS device_info_writer.cc
LINK_LIBS device_writer
DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
)
## img_params_writer
make_executable(img_params_writer
SRCS img_params_writer.cc
LINK_LIBS device_writer
DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
)
## imu_params_writer
make_executable(imu_params_writer
SRCS imu_params_writer.cc
LINK_LIBS device_writer
DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
)
## save_all_infos
make_executable(save_all_infos
SRCS save_all_infos.cc
LINK_LIBS device_writer
DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
)