40 Commits
2.3.6 ... 2.3.8

Author SHA1 Message Date
harjeb
9b346ff33a docs(*): update docs v2.3.8 2019-05-22 11:18:25 +08:00
TinyO
36c5491c93 fix(imu params default): value default fix. 3 2019-05-21 18:11:36 +08:00
TinyO
e32a323e57 fix(imu params default): value default fix. 2 2019-05-21 18:09:37 +08:00
TinyO
7e07b7772e fix(imu params default): value default fix. 2019-05-21 17:55:02 +08:00
TinyO
0d0bb1d2ad fix(ros): rosbag record -a bug 2. 2019-05-21 17:38:35 +08:00
TinyO
380e19fa94 fix(ros): rosbag record -a bug. 2019-05-21 17:25:48 +08:00
TinyO
535778ef76 fix(*): cmake sample windows platform complie. 2019-05-21 11:35:11 +08:00
TinyO
c23b0309a9 docs(*): update changelog 2019-05-20 15:01:25 +08:00
TinyO
80c1e84941 docs(*): update changelog 2019-05-20 14:51:39 +08:00
TinyO
a9966b19f9 chore(*): update version 2019-05-20 14:48:19 +08:00
TinyO
8251d1066d fix: vins-fusion time error with hight fps. 2019-05-16 17:14:02 +08:00
TinyO
f3a14f8254 fix: remove osstream output base P/R. 2019-05-15 11:33:30 +08:00
TinyO
ad0774f466 fix: left right rect image swap 2019-05-10 15:57:38 +08:00
TinyO
74ce809acf feat: add 3rd easylogging++ 2019-05-09 09:12:38 +08:00
TinyO
c81f51fdee fix: camera info width and height 2019-05-05 10:30:05 +08:00
TinyOh
65e7a4b288 fix(sample): add .cc head 2019-04-26 10:11:45 +08:00
harjeb
37255b0e1b Merge branch 'develop' of http://gitlab.mynt.com/mynteye/mynt-eye-s-sdk into develop 2019-04-25 11:23:30 +08:00
harjeb
9c8a1420bc docs(*): update docs 2019-04-25 11:23:12 +08:00
TinyOh
3c1a1d3356 Merge branch 'develop' of http://gitlab.mynt.com/mynteye/mynt-eye-s-sdk into develop 2019-04-23 16:20:33 +08:00
TinyOh
deb146f786 feat: add cmake project 2019-04-23 16:19:52 +08:00
harjeb
20fc6a75b6 docs(*): update requirements 2019-04-23 15:54:52 +08:00
John Zhao
7ef64208be Merge branch 'release/2.3.7' into develop
* release/2.3.7:
  docs(*): update changelog
  chore(*): update version
  docs(*): update ORB build
  docs(*): update slam
2019-04-19 14:52:16 +08:00
harjeb
0267f4e4b3 docs(*): update changelog 2019-04-19 14:48:03 +08:00
John Zhao
8446083bf4 chore(*): update version 2019-04-19 14:38:57 +08:00
harjeb
b3acbfe8aa docs(*): update ORB build 2019-04-19 14:22:07 +08:00
TinyOh
c279d97261 fix(ros): use 2 param for ros::time 2019-04-19 10:41:58 +08:00
harjeb
d988fce95a docs(*): update slam 2019-04-19 05:19:55 +08:00
TinyOh
0c0c299c84 fix(slam): rect gray topic sub logic 2019-04-15 13:35:05 +08:00
TinyOh
6dab54117d Merge branch 'develop' of http://gitlab.mynt.com/mynteye/mynt-eye-s-sdk into develop 2019-04-15 13:13:50 +08:00
TinyOh
966511edf1 feat(slam): add rect gray topic 2019-04-15 13:13:27 +08:00
John Zhao
5fb5a876b8 Merge branch 'hotfix/docs' into develop
* hotfix/docs:
  docs(sdk): update exe version
2019-04-15 10:14:53 +08:00
John Zhao
dd81c782b4 Merge branch 'hotfix/ros_imu_timestamp' into develop
* hotfix/ros_imu_timestamp:
  chore(*): update version
2019-04-15 10:05:28 +08:00
John Zhao
3383d43360 Merge branch 'hotfix/ros_imu_timestamp' into develop
* hotfix/ros_imu_timestamp:
  fix(ros): ros error enum error
  fix: 14.04 complie error
  feat(doc): supported set address for 2100
  feat(src): supported set iic address for s2100
  fix(ros): fixed bug of imu align for part of device

# Conflicts:
#	wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc
2019-04-15 09:51:58 +08:00
TinyOh
1a080d438f fix: 14.04 complie error 2019-04-12 14:42:08 +08:00
TinyOh
089c1b1b0c fix: change fps show to right target 2019-04-10 11:40:14 +08:00
TinyOh
7e588d5e24 feat: add fps show in console 2019-04-10 11:31:13 +08:00
TinyOh
4be2d6ae49 feat: add open without select sample 2019-04-04 14:19:17 +08:00
TinyOh
28e089c412 fix(ros): respawn delay change to 10 for avatar 2019-04-04 11:09:09 +08:00
TinyOh
af2327cd92 Merge branch 'develop' of http://gitlab.mynt.com/mynteye/mynt-eye-s-sdk into develop 2019-04-04 09:23:14 +08:00
TinyOh
e28e34eafa fix(api): use BM as default ,only use sgbm when get_disparity 2019-04-04 09:22:26 +08:00
43 changed files with 9892 additions and 436 deletions

7
3rdparty/easyloggingpp/.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
build-*
*.pro.user
.DS_Store
release.info
bin/*
logs/*
experiments/*

0
3rdparty/easyloggingpp/CMakeLists.txt vendored Normal file
View File

25
3rdparty/easyloggingpp/LICENSE vendored Normal file
View File

@@ -0,0 +1,25 @@
The MIT License (MIT)
Copyright (c) 2012-2018 Zuhd Web Services
Copyright (c) 2012-2018 @abumusamq
https://github.com/zuhd-org/
https://zuhd.org
https://muflihun.com
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

1539
3rdparty/easyloggingpp/README.md vendored Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

4569
3rdparty/easyloggingpp/src/easylogging++.h vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -14,7 +14,7 @@
cmake_minimum_required(VERSION 3.0)
project(mynteye VERSION 2.3.6 LANGUAGES C CXX)
project(mynteye VERSION 2.3.8 LANGUAGES C CXX)
include(cmake/Common.cmake)

View File

@@ -1,6 +1,6 @@
# MYNT® EYE S SDK
[![](https://img.shields.io/badge/MYNT%20EYE%20S%20SDK-2.3.6-brightgreen.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK)
[![](https://img.shields.io/badge/MYNT%20EYE%20S%20SDK-2.3.8-brightgreen.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK)
## Overview

View File

@@ -38,7 +38,7 @@ PROJECT_NAME = "MYNT EYE S SDK"
# could be handy for archiving the generated documentation or if some version
# control system is used.
PROJECT_NUMBER = 2.3.5
PROJECT_NUMBER = 2.3.8
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a

View File

@@ -24,7 +24,7 @@ copyright = '2018, MYNTAI'
author = 'MYNTAI'
# The short X.Y version
version = '2.3.6'
version = '2.3.8'
# The full version, including alpha/beta/rc tags
release = version

View File

@@ -1,5 +1,5 @@
breathe>=4.11.1
restructuredtext-lint>=1.1.3
Sphinx>=1.8.1
breathe>=4.13
restructuredtext-lint>=1.3.0
Sphinx>=2.0.1
sphinx-intl>=0.9.11
sphinx-rtd-theme>=0.4.2

View File

@@ -15,7 +15,7 @@ To set the range of accelerometer and gyroscope, set ``Option::ACCELEROMETER_RAN
For mynteye s2100/s210a, the available settings are:
* The effective range of accelerometer(unit:g): 6, 12, 24, 32.
* The effective range of accelerometer(unit:g): 6, 12, 24, 48.
* The effective range of gyroscope(unit:deg/s): 250, 500, 1000, 2000, 4000.
Reference Code:
@@ -50,7 +50,7 @@ s2100/s210a
if (!ok) return 1;
api->ConfigStreamRequest(request);
// ACCELEROMETER_RANGE values: 6, 12, 24, 32
// ACCELEROMETER_RANGE values: 6, 12, 24, 48
api->SetOptionValue(Option::ACCELEROMETER_RANGE, 6);
// GYROSCOPE_RANGE values: 250, 500, 1000, 2000, 4000
api->SetOptionValue(Option::GYROSCOPE_RANGE, 1000);

View File

@@ -1,6 +1,6 @@
.. _firmware_stm_update:
How to upgrade the auxiliary chip
How to update the Auxiliary Chip
==================================
Update auxiliary chip(Only Support S2100/S210A)

View File

@@ -1,9 +1,9 @@
.. _firmware_upgrade:
How to upgrade the firmware
============================
How to update Main Processing Chip
====================================
Please use the MYNT EYE TOOL to upgrade the firmware.
Please use the MYNT EYE TOOL to update main processing chip.
You can download the firmware and MYNT EYE TOOL installation package in the ``Firmwares`` folder of `MYNTEYE_BOX(Download Link) <http://www.myntai.com/mynteye/s/download>`_ . The file structure is as follows:

View File

@@ -3,6 +3,18 @@
Changelog
=========
2019-05-20(v2.3.8)
-------------------
1. Improve VINS-Fusion supporting
2. Improve VINS-MONO supporting
3. Fix left/right rect image order error
2019-04-19(v2.3.7)
-------------------
1. Improve VINS-Fusion supporting
2. Improve ORB-SLAM2 supporting
2019-04-15(v2.3.6)
-------------------
@@ -15,7 +27,7 @@ Changelog
1. Improve camera info.
2. Modify SGBM/BM parameters by yaml file.
2. Modify image algorithm parameters by yaml file.
3. Add opening multi devices launch file in ROS.
@@ -31,9 +43,9 @@ Changelog
2019-03-18(v2.3.4)
-------------------
1. Add API to get subsidiary chip&ISP's version(Depend on S2100/S210A 1.1 firmware & 1.0 subsidiary chip firmware).
1. Add API to get auxiliary chip&ISP's version(Depend on S2100/S210A 1.1 firmware & 1.0 subsidiary chip firmware).
2. Fix point fragment issue in BM algorithm.
2. Fix point fragment issue in image algorithm.
3. Add 376*240 resolution support to S1030(Depend on 2.4.0 firmware of S1030).

View File

@@ -8,7 +8,7 @@ SDK is built on CMake and can be used cross multiple platforms such as "Linux, W
These are the platforms that can be used:
* Windows 10
* Ubuntu 18.04 / 16.04 / 14.04
* Ubuntu 18.04.1 / 16.04.6 / 14.04.5
* Jetson TX1/TX2 / Xavier
* firefly RK3399

View File

@@ -5,7 +5,6 @@ Open Source project Support
.. toctree::
how_to_use_kalibr
vins
vins_fusion
orb_slam2

View File

@@ -1,300 +0,0 @@
.. _how_to_use_kalibr:
How to calibrate MYNTEYE by kalibr
===================================
Target
------------
* Calibrate the pose relationship between left and right camera
* Calibrate the pose relationship left camera between and IMU
Preparation
------------
* **Install kalibr**:Refer to `kalibr wiki <https://github.com/ethz-asl/kalibr/wiki/installation>`_ and follow the steps to install
* **Calibration board** kalibr supports ``chessbord`` , ``circlegrid`` , ``aprilgrid`` ,choose ``aprilgrid`` here Calibration board file can be directly `download <https://github.com/ethz-asl/kalibr/wiki/downloads>`_ , Or you can also generate calibration board by Kalibr tool.
.. code-block:: bash
$ kalibr_create_target_pdf --type 'apriltag' --nx 6 --ny 6 --tsize 0.08 --tspace 0.3
View parameters' meanings by kalibr_create)target_pdf command:
.. code-block:: bash
$ kalibr_create_target_pdf --h
usage:
Example Aprilgrid:
kalibr_create_target_pdf --type apriltag --nx 6 --ny 6 --tsize 0.08 --tspace 0.3
Example Checkerboard:
kalibr_create_target_pdf --type checkerboard --nx 6 --ny 6 -csx 0.05 --csy 0.1
Generate PDFs of calibration patterns.
optional arguments:
-h, --help show this help message and exit
Output options:
output Output filename
--eps Also output an EPS file
Generic grid options:
--type GRIDTYPE The grid pattern type. ('apriltag' or 'checkerboard')
--nx N_COLS The number of tags in x direction (default: 6)
--ny N_ROWS The number of tags in y direction (default: 7)
Apriltag arguments:
--tsize TSIZE The size of one tag [m] (default: 0.08)
--tspace TAGSPACING The space between the tags in fraction of the edge size
[0..1] (default: 0.3)
--tfam TAGFAMILIY Familiy of April tags ['t16h5', 't25h9', 't25h7',
't36h11'] (default: t36h11)
Checkerboard arguments:
--csx CHESSSZX The size of one chessboard square in x direction [m]
(default: 0.05)
--csy CHESSSZY The size of one chessboard square in y direction [m]
(default: 0.05)
* **Calibrate the intrinsic IMU parameters** kalibr requires imu data to be calibrated by intrinsic parameters by default.The intrinsic parameters calibration tool uses `imu-tk <https://github.com/Kyle-ak/imu_tk.git>`_ .
* **Count imu data parameter**
* noise density
* bias random walk
Using Allan analyzing tool `imu_utils <https://github.com/gaowenliang/imu_utils>`_, We can get the characteristics of above imu data,and to format the output as ``imu.yaml``
.. code-block:: bash
#Accelerometers
accelerometer_noise_density: 0.02680146180736048 #Noise density (continuous-time)
accelerometer_random_walk: 0.0026296086159332804 #Bias random walk
#Gyroscopes
gyroscope_noise_density: 0.008882328296710996 #Noise density (continuous-time)
gyroscope_random_walk: 0.00037956578292701033 #Bias random walk
rostopic: /mynteye/imu/data_raw #the IMU ROS topic
update_rate: 200.0 #Hz (for discretization of the values above)
Calibrate the pose relationship between left and right camera
--------------------------------------------------------------
* Collect calibration images: kalibr supports the collection of the required calibration images through two ways:by ``rosbag`` or collect offline images . Using ``rosbag`` here for convenience,Reference `link <https://github.com/ethz-asl/kalibr/wiki/bag-format>`_ for collecting images.
* Method of collecting images by ``rosbag`` :fix mynteye camera,move ``aprilgrid`` calibration board in the camera field of view.
* To increase the calibration time,try to use image acquisition data with lower frame rate,kalibr recommends using ``4Hz`` frame rate,here uses ``10hz`` .
* MYNTEYE S series camera offers images at least 10Hz,You can use `topic_tools <http://wiki.ros.org/topic_tools/throttle>`_ to modify frequency,because using 10Hz requires more calibration time.
* Record ``static.bag`` : After fix the mynteye camera,start `wrapper <https://github.com/slightech/MYNT-EYE-S-SDK>`_, record the topic of the left and right images to ``static.bag`` .
.. code-block:: bash
$ source wrappers/ros/devel/setup.bash
$ roslaunch mynt_eye_ros_wrapper display.launch
$ cd ~
$ mkdir -p bag
$ cd bag
$ rosbag record -O static_10hz /mynteye/left/image_raw /mynteye/right/image_raw #recommand use 10hz,you can also use topic_tools to publish 4hz.
* kalibr calibration:
.. code-block:: bash
$ kalibr_calibrate_cameras --target aprilgrid.yaml --bag ~/bag/static_10hz.bag --models pinhole-radtan pinhole-radtan --topics /mynteye/left/image_raw /mynteye/right/image_raw
View parameters' meanings by kalibr_calibrate_cameras command:
.. code-block:: bash
$ kalibr_calibrate_cameras --h
Calibrate the intrinsics and extrinsics of a camera system with non-shared
overlapping field of view.
usage:
Example usage to calibrate a camera system with two cameras using an aprilgrid.
cam0: omnidirection model with radial-tangential distortion
cam1: pinhole model with equidistant distortion
kalibr_calibrate_cameras --models omni-radtan pinhole-equi --target aprilgrid.yaml \
--bag MYROSBAG.bag --topics /cam0/image_raw /cam1/image_raw
example aprilgrid.yaml:
target_type: 'aprilgrid'
tagCols: 6
tagRows: 6
tagSize: 0.088 #m
tagSpacing: 0.3 #percent of tagSize
optional arguments:
-h, --help show this help message and exit
--models MODELS [MODELS ...]
The camera model ['pinhole-radtan', 'pinhole-equi',
'omni-radtan', 'pinhole-fov'] to estimate
Data source:
--bag BAGFILE The bag file with the data
--topics TOPICS [TOPICS ...]
The list of image topics
--bag-from-to bag_from_to bag_from_to
Use the bag data starting from up to this time [s]
Calibration target configuration:
--target TARGETYAML Calibration target configuration as yaml file
Image synchronization:
--approx-sync MAX_DELTA_APPROXSYNC
Time tolerance for approximate image synchronization
[s] (default: 0.02)
Calibrator settings:
--qr-tol QRTOL The tolerance on the factors of the QR decomposition
(default: 0.02)
--mi-tol MITOL The tolerance on the mutual information for adding an
image. Higher means fewer images will be added. Use -1
to force all images. (default: 0.2)
--no-shuffle Do not shuffle the dataset processing order
Outlier filtering options:
--no-outliers-removal
Disable corner outlier filtering
--no-final-filtering Disable filtering after all views have been processed.
--min-views-outlier MINVIEWOUTLIER
Number of raw views to initialize statistics (default:
20)
--use-blakezisserman Enable the Blake-Zisserman m-estimator
--plot-outliers Plot the detect outliers during extraction (this could
be slow)
Output options:
--verbose Enable (really) verbose output (disables plots)
--show-extraction Show the calibration target extraction. (disables
plots)
--plot Plot during calibration (this could be slow).
--dont-show-report Do not show the report on screen after calibration.
Output the following three files after finish calibration:
* ``camchain-homezhangsbagstatic_10hz.yaml``
* ``report-cam-homezhangsbagstatic_10hz.pdf``
* ``results-cam-homezhangsbagstatic_10hz.txt``
.. tip::
If you use camera parameters in Vins,it would be better to choose the pinhole-equi model or the omni-radtan model.If you use camera parameters in Maplab,please choose pinhole-equi model
Calibrate the pose relationship between camera and IMU coordinate system
-------------------------------------------------------------------------
* **Collect calibration data**as calibrate the pose relationship of camera,Kalibr supports two ways to collect data,we still use ``rosbag`` here.
* Method of collecting image: fix ``apilgrid`` calibration board, move camera
* Make sure that the data collected is good:the brightness of the calibration board should be appropriate,too bright or too dark can't guarantee the quality of data,meanwhile do not shake too fast to avoid blurring of the image.
* Set the imu publishing frequency to 200Hz,image to 20Hz(recommended by kalibr)
* Fully motivate each axis of the imu,for example ,3 actions on each axis,then in the \"8-shaped\" motion
* Record camera and imu as ``dynamic.bag``.
.. code-block:: bash
$ roslaunch mynt_eye_ros_wrapper display.launch
$ cd bag
$ rosbag record -O dynamic /mynteye/left/image_raw /mynteye/right/image_raw /mynteye/imu/data_raw #remember set image hz to 20hz, imu hz to 200hz
* kalibr calibration:
.. code-block:: bash
$ kalibr_calibrate_imu_camera --cam camchain-homezhangsbagstatic_10hz.yaml --target aprilgrid.yaml --imu imu.yaml --time-calibration --bag ~/bag/dynamic.bag
View the parameters' meanings by kalibr_calibrate_imu_camera command
.. code-block:: bash
$ kalibr_calibrate_imu_camera --h
Calibrate the spatial and temporal parameters of an IMU to a camera chain.
usage:
Example usage to calibrate a camera system against an IMU using an aprilgrid
with temporal calibration enabled.
kalibr_calibrate_imu_camera --bag MYROSBAG.bag --cam camchain.yaml --imu imu.yaml \
--target aprilgrid.yaml --time-calibration
camchain.yaml: is the camera-system calibration output of the multiple-camera
calibratin tool (kalibr_calibrate_cameras)
example aprilgrid.yaml: | example imu.yaml: (ADIS16448)
target_type: 'aprilgrid' | accelerometer_noise_density: 0.006
tagCols: 6 | accelerometer_random_walk: 0.0002
tagRows: 6 | gyroscope_noise_density: 0.0004
tagSize: 0.088 | gyroscope_random_walk: 4.0e-06
tagSpacing: 0.3 | update_rate: 200.0
optional arguments:
-h, --help show this help message and exit
Dataset source:
--bag BAGFILE Ros bag file containing image and imu data (rostopics
specified in the yamls)
--bag-from-to bag_from_to bag_from_to
Use the bag data starting from up to this time [s]
--perform-synchronization
Perform a clock synchronization according to 'Clock
synchronization algorithms for network measurements'
by Zhang et al. (2002).
Camera system configuration:
--cams CHAIN_YAML Camera system configuration as yaml file
--recompute-camera-chain-extrinsics
Recompute the camera chain extrinsics. This option is
exclusively recommended for debugging purposes in
order to identify problems with the camera chain
extrinsics.
--reprojection-sigma REPROJECTION_SIGMA
Standard deviation of the distribution of reprojected
corner points [px]. (default: 1.0)
IMU configuration:
--imu IMU_YAMLS [IMU_YAMLS ...]
Yaml files holding the IMU noise parameters. The first
IMU will be the reference IMU.
--imu-delay-by-correlation
Estimate the delay between multiple IMUs by
correlation. By default, no temporal calibration
between IMUs will be performed.
--imu-models IMU_MODELS [IMU_MODELS ...]
The IMU models to estimate. Currently supported are
'calibrated', 'scale-misalignment' and 'scale-
misalignment-size-effect'.
Calibration target:
--target TARGET_YAML Calibration target configuration as yaml file
Optimization options:
--time-calibration Enable the temporal calibration
--max-iter MAX_ITER Max. iterations (default: 30)
--recover-covariance Recover the covariance of the design variables.
--timeoffset-padding TIMEOFFSET_PADDING
Maximum range in which the timeoffset may change
during estimation [s] (default: 0.01)
Output options:
--show-extraction Show the calibration target extraction. (disables
plots)
--extraction-stepping
Show each image during calibration target extraction
(disables plots)
--verbose Verbose output (disables plots)
--dont-show-report Do not show the report on screen after calibration.
Output the follwing 4 files after finish calibration:
* ``camchain-imucam-homezhangsbagdynamic.yaml``
* ``imu-homezhangsbagdynamatic.yaml``
* ``report-imucam-homezhangsbagdynamic.pdf``
* ``results-imucam-homezhangsbagdynamic.yaml``

View File

@@ -9,41 +9,38 @@ If you wanna run ORB_SLAM2 with MYNT EYE camera, please follow the steps:
1. Download `MYNT-EYE-S-SDK <https://github.com/slightech/MYNT-EYE-S-SDK.git>`_ and follow steps to install.
2. Follow the normal procedure to install ORB_SLAM2.
3. Update ``distortion_parameters`` and ``projection_parameters`` to ``<ORB_SLAM2>/config/mynteye_*.yaml``.
4. Run examples by MYNT® EYE.
3. Run examples by MYNT® EYE.
Binocular camera sample
------------------------
* Calibrate a stereo camera with `ROS-StereoCalibration <http://wiki.ros.org/camera_calibration/Tutorials/StereoCalibration>`_ or OpenCV, and then update parameters to ``<ORB_SLAM2>/config/mynteye_s_stereo.yaml``.
* Execute ``build.sh``:
Prerequisites
--------------
.. code-block:: bash
chmod +x build.sh
./build.sh
* Run stereo sample using the follow type:
.. code-block:: bash
./Examples/Stereo/stereo_mynt_s ./Vocabulary/ORBvoc.txt ./config/mynteye_s_stereo.yaml true /mynteye/left/image_raw /mynteye/right/image_raw
sudo apt-get -y install libglew-dev cmake
cd ~
git clone https://github.com/stevenlovegrove/Pangolin.git
cd Pangolin
mkdir build
cd build
cmake ..
cmake --build .
sudo make install
Building the nodes for mono and stereo (ROS)
--------------------------------------------
* Add the path including ``Examples/ROS/ORB_SLAM2`` to the ``ROS_PACKAGE_PATH`` environment variable. Open ``.bashrc`` file and add at the end the following line. Replace ``PATH`` by the folder where you cloned ORB_SLAM2:
* Add the path including ``Examples/ROS/ORB_SLAM2`` to the ``ROS_PACKAGE_PATH`` environment variable. Open ``.bashrc`` file and add at the end the following line.
.. code-block:: bash
export ROS_PACKAGE_PATH=${ROS_PACKAGE_PATH}:PATH/ORB_SLAM2/Examples/ROS
export ROS_PACKAGE_PATH=${ROS_PACKAGE_PATH}:~/catkin_ws/src/MYNT-EYE-ORB-SLAM2-Sample
* Execute `build_ros.sh`:
.. code-block:: bash
chmod +x build.sh
./build.sh
chmod +x build_ros.sh
./build_ros.sh
@@ -51,8 +48,6 @@ Building the nodes for mono and stereo (ROS)
Stereo_ROS Example
~~~~~~~~~~~~~~~~~~~
* Reference ``Get camera calibration parameters`` in :ref:`slam_okvis` to get ``distortion_parameters`` and ``projection_parameters`` , and update ``<ORB_SLAM2>/config/mynteye_s_stereo.yaml`` .
* Launch ORB_SLAM2 ``Stereo_ROS``
1. Launch mynteye node
@@ -68,4 +63,4 @@ Stereo_ROS Example
.. code-block:: bash
rosrun ORB_SLAM2 mynteye_s_stereo ./Vocabulary/ORBvoc.txt ./config/mynteye_s_stereo.yaml true /mynteye/left/image_raw /mynteye/right/image_raw
rosrun ORB_SLAM2 mynteye_s_stereo ./Vocabulary/ORBvoc.txt ./config/mynteye_s_stereo.yaml false /mynteye/left_rect/image_rect /mynteye/right_rect/image_rect

View File

@@ -9,8 +9,7 @@ If you wanna run VINS-Mono with MYNT EYE camera, please follow the steps:
1. Download `MYNT-EYE-S-SDK <https://github.com/slightech/MYNT-EYE-S-SDK.git>`_ and install mynt_eye_ros_wrapper.
2. Follow the normal procedure to install VINS-Mono.
3. Update ``distortion_parameters`` and ``projection_parameters`` to `here <https://github.com/slightech/MYNT-EYE-VINS-Sample/blob/mynteye/config/mynteye/mynteye_s_config.yaml>`_ .
4. Run mynt_eye_ros_wrapper and VINS-Mono.
3. Run mynt_eye_ros_wrapper and VINS-Mono.
Install ROS Kinetic conveniently (if already installed, please ignore)
----------------------------------------------------------------------
@@ -21,35 +20,40 @@ Install ROS Kinetic conveniently (if already installed, please ignore)
wget https://raw.githubusercontent.com/oroca/oroca-ros-pkg/master/ros_install.sh && \
chmod 755 ./ros_install.sh && bash ./ros_install.sh catkin_ws kinetic
Install Docker
---------------
.. code-block:: bash
sudo apt-get update
sudo apt-get install \
apt-transport-https \
ca-certificates \
curl \
gnupg-agent \
software-properties-common
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
sudo add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) \
stable"
sudo apt-get update
sudo apt-get install docker-ce docker-ce-cli containerd.io
Then add your account to ``docker`` group by ``sudo usermod -aG docker $YOUR_USER_NAME`` . Relaunch the terminal or logout and re-login if you get ``Permission denied`` error.
To complie with docker,we recommend that you should use more than 16G RAM, or ensure that the RAM and virtual memory space is greater than 16G.
Install MYNT-EYE-VINS-Sample
------------------------------
.. code-block:: bash
mkdir -p ~/catkin_ws/src
cd ~/catkin_ws/src
git clone -b mynteye https://github.com/slightech/MYNT-EYE-VINS-Sample.git
cd ..
catkin_make
source devel/setup.bash
echo "source ~/catkin_ws/devel/setup.bash" >> ~/.bashrc
source ~/.bashrc
git clone -b docker_feat https://github.com/slightech/MYNT-EYE-VINS-Sample.git
cd MYNT-EYE-VINS-Sample/docker
make build
Get image calibration parameters
---------------------------------
Use MYNT® EYE's left eye camera and IMU. By `MYNT-EYE-S-SDK <https://github.com/slightech/MYNT-EYE-S-SDK.git>`_ API ``GetIntrinsics()`` function and ``GetExtrinsics()`` function, you can "get the image calibration parameters of the current working device:
.. code-block:: bash
cd MYNT-EYE-S-SDK
./samples/_output/bin/tutorials/get_img_params
After running the above type, pinhole's ``distortion_parameters`` and ``projection_parameters`` is obtained , and then update to `here <https://github.com/slightech/MYNT-EYE-VINS-Sample/blob/mynteye-s/config/mynteye/mynteye_config.yaml>`_ .
.. tip::
You can get the camera model of device when get camera calibration parameters, if model is equidistant you need calibrate pinhole model by yourself or reference :ref:`write_img_params` to write a default pinhole config file to your device.
Note that the docker building process may take a while depends on your network and machine. After VINS-Mono successfully started, open another terminal and play your bag file, then you should be able to see the result. If you need modify the code, simply run ``./run.sh LAUNCH_FILE_NAME`` after your changes.
Run VINS-Mono with MYNT® EYE
-----------------------------
@@ -60,15 +64,12 @@ Run VINS-Mono with MYNT® EYE
cd (local path of MYNT-EYE-S-SDK)
source ./wrappers/ros/devel/setup.bash
roslaunch mynt_eye_ros_wrapper mynteye.launch
roslaunch mynt_eye_ros_wrapper vins_mono.launch
2. Open another terminal and run vins
.. code-block:: bash
cd ~/catkin_ws
roslaunch vins_estimator mynteye_s.launch
.. note::
If you want to use a fish-eye camera model, please click `here <https://github.com/slightech/MYNT-EYE-VINS-Sample/tree/mynteye-s/calibration_images>`_ .
cd path/to/VINS-Mono/docker
./run.sh mynteye_s.launch
# ./run.sh mynteye_s2100.launch # mono with s2100

View File

@@ -11,12 +11,38 @@ If you wanna run VINS-Fusion with MYNT EYE camera, please follow the steps:
2. Follow the normal procedure to install VINS-Fusion.
3. Run mynt_eye_ros_wrapper and VINS-Fusion.
Install ROS Kinetic conveniently (if already installed, please ignore)
----------------------------------------------------------------------
Prerequisites
--------------
.. code-block:: bash
1. Install Ubuntu 64-bit 16.04 or 18.04. ROS Kinetic or Melodic.(if already installed, please ignore). `ROS Installation <http://wiki.ros.org/ROS/Installation>`_
2. Install `Ceres <http://ceres-solver.org/installation.html>`_
cd ~
wget https://raw.githubusercontent.com/oroca/oroca-ros-pkg/master/ros_install.sh && \
chmod 755 ./ros_install.sh && bash ./ros_install.sh catkin_ws kinetic
Install Docker
---------------
.. code-block:: bash
sudo apt-get update
sudo apt-get install \
apt-transport-https \
ca-certificates \
curl \
gnupg-agent \
software-properties-common
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
sudo add-apt-repository \
"deb [arch=amd64] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) \
stable"
sudo apt-get update
sudo apt-get install docker-ce docker-ce-cli containerd.io
Then add your account to ``docker`` group by ``sudo usermod -aG docker $YOUR_USER_NAME`` . Relaunch the terminal or logout and re-login if you get ``Permission denied`` error.
To complie with docker,we recommend that you should use more than 16G RAM, or ensure that the RAM and virtual memory space is greater than 16G.
Install MYNT-EYE-VINS-FUSION-Samples
@@ -24,14 +50,11 @@ Install MYNT-EYE-VINS-FUSION-Samples
.. code-block:: bash
mkdir -p ~/catkin_ws/src
cd ~/catkin_ws/src
git clone -b mynteye https://github.com/slightech/MYNT-EYE-VINS-FUSION-Samples.git
cd ..
catkin_make
source ~/catkin_ws/devel/setup.bash
git clone https://github.com/slightech/MYNT-EYE-VINS-FUSION-Samples.git
cd MYNT-EYE-VINS-FUSION-Samples/docker
make build
(if you fail in this step, try to find another computer with clean system or reinstall Ubuntu and ROS)
Note that the docker building process may take a while depends on your network and machine. After VINS-Mono successfully started, open another terminal and play your bag file, then you should be able to see the result. If you need modify the code, simply run ``./run.sh LAUNCH_FILE_NAME`` after your changes.
Run VINS-FUSION with MYNT® EYE
-------------------------------
@@ -42,14 +65,13 @@ Run VINS-FUSION with MYNT® EYE
cd (local path of MYNT-EYE-S-SDK)
source ./wrappers/ros/devel/setup.bash
roslaunch mynt_eye_ros_wrapper mynteye.launch
roslaunch mynt_eye_ros_wrapper vins_fusion.launch
2. Open another terminal and run vins
.. code-block:: bash
cd ~/catkin_ws
roslaunch vins mynteye-s-mono-imu.launch # mono+imu fusion
# roslaunch vins mynteye-s-stereo.launch # Stereo fusion / Stereo+imu fusion
# roslaunch vins mynteye-avarta-mono-imu.launch # mono+imu fusion with mynteye-avarta
# roslaunch vins mynteye-avarta-stereo.launch # Stereo fusion / Stereo+imu fusion with mynteye-avarta
cd path/to/this_repo/docker
./run.sh mynteye-s/mynt_stereo_imu_config.yaml # Stereo fusion
# ./run.sh mynteye-s2100/mynt_stereo_config.yaml # Stereo fusion with mynteye-s2100
# ./run.sh mynteye-s2100/mynt_stereo_imu_config.yaml # Stereo+imu fusion with mynteye-s2100

View File

@@ -51,9 +51,17 @@ The ROS file is structured like follows:
<sdk>/wrappers/ros/
├─src/
│ └─mynt_eye_ros_wrapper/
│ ├─config/
│ │ ├─device/
│ │ ├─standard.yaml # S1030
│ │ └─standard2.yaml # S2100/S210A
│ │ ├─laserscan/
│ │ ├─process/
│ │ └─...
│ ├─launch/
│ │ ├─display.launch
│ │ └─mynteye.launch
│ │ └─...
│ ├─msg/
│ ├─rviz/
│ ├─src/
@@ -64,35 +72,41 @@ The ROS file is structured like follows:
│ └─package.xml
└─README.md
In ``mynteye.launch``, you can configure the topics and frame_ids, decide which data to enable, and set the control options. Please set ``gravity`` to the local gravity acceleration.
In ``mynteye.launch``, you can configure the topics and frame_ids, decide which data to enable, ``standard.yaml`` (standard2.yaml is S2100/S210A config file) can set parameters for device. Please set ``gravity`` to the local gravity acceleration.
standard.yaml/standard2.yaml:
.. code-block:: xml
# s2100/s210a modify frame/resolution
<arg name="request_index" default="$(arg index_s2_2)" />
standard2/request_index: 2
# s1030 modify frame/imu hz
<!-- standard/frame_rate range: {10,15,20,25,30,35,40,45,50,55,60} -->
<arg name="standard/frame_rate" default="-1" />
<!-- <arg name="standard/frame_rate" default="25" /> -->
# standard/frame_rate range: {10,15,20,25,30,35,40,45,50,55,60}
standard/frame_rate: -1
# standard/frame_rate: 25
<!-- standard/imu_frequency range: {100,200,250,333,500} -->
<arg name="standard/imu_frequency" default="-1" />
<!-- <arg name="standard/imu_frequency" default="200" /> -->
# standard/imu_frequency range: {100,200,250,333,500}
standard/imu_frequency: -1
# standard/imu_frequency: 200
...
# s2100 modify brightness
<!-- standard2/brightness range: [0,240] -->
<arg name="standard2/brightness" default="-1" />
<!-- <arg name="standard2/brightness" default="120" /> -->
# standard2/brightness range: [0,240]
standard2/brightness: -1
# standard2/brightness: 120
...
# s210a modify brightness
<!-- standard210a/brightness range: [0,240] -->
<arg name="standard210a/brightness" default="-1" />
<!-- <arg name="standard210a/brightness" default="120" /> -->
# standard210a/brightness range: [0,240]
standard210a/brightness: -1
# standard210a/brightness: 120
...
mynteye.launch:
.. code-block:: xml
<arg name="gravity" default="9.8" />

View File

@@ -0,0 +1,74 @@
cmake_minimum_required(VERSION 3.0)
project(mynteye_demo VERSION 1.0.0 LANGUAGES C CXX)
# flags
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -O3")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -O3")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -std=c++11 -march=native")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -march=native")
# packages
if(MSVC)
set(SDK_ROOT "$ENV{MYNTEYES_SDK_ROOT}")
if(SDK_ROOT)
message(STATUS "MYNTEYES_SDK_ROOT: ${SDK_ROOT}")
list(APPEND CMAKE_PREFIX_PATH
"${SDK_ROOT}/lib/cmake"
"${SDK_ROOT}/3rdparty/opencv/build"
)
else()
message(FATAL_ERROR "MYNTEYES_SDK_ROOT not found, please install SDK firstly")
endif()
endif()
## mynteye
find_package(mynteye REQUIRED)
message(STATUS "Found mynteye: ${mynteye_VERSION}")
# When SDK build with OpenCV, we can add WITH_OPENCV macro to enable some
# features depending on OpenCV, such as ToMat().
if(mynteye_WITH_OPENCV)
add_definitions(-DWITH_OPENCV)
endif()
## OpenCV
# Set where to find OpenCV
#set(OpenCV_DIR "/usr/share/OpenCV")
# When SDK build with OpenCV, we must find the same version here.
find_package(OpenCV REQUIRED)
message(STATUS "Found OpenCV: ${OpenCV_VERSION}")
# targets
include_directories(
${OpenCV_INCLUDE_DIRS}
)
## mynteye_demo
add_executable(mynteye_demo mynteye_demo.cc)
target_link_libraries(mynteye_demo mynteye ${OpenCV_LIBS})
# Build
# mkdir _build
# cd _build
#
# # win
# cmake -G "Visual Studio 15 2017 Win64" ..
# msbuild.exe ALL_BUILD.vcxproj /property:Configuration=Release
#
# .\Release\mynteye_demo.exe
#
# # unix
# cmake ..
# make
#
# ./mynteye_demo

View File

@@ -0,0 +1,61 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/api/api.h"
MYNTEYE_USE_NAMESPACE
int main(int argc, char const *argv[]) {
auto &&api = API::Create(0, nullptr);
if (!api) return 1;
bool ok;
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->Start(Source::VIDEO_STREAMING);
double fps;
double t = 0.01;
std::cout << "fps:" << std::endl;
cv::namedWindow("frame");
while (true) {
api->WaitForStreams();
auto &&left_data = api->GetStreamData(Stream::LEFT);
auto &&right_data = api->GetStreamData(Stream::RIGHT);
cv::Mat img;
if (!left_data.frame.empty() && !right_data.frame.empty()) {
double t_c = cv::getTickCount() / cv::getTickFrequency();
fps = 1.0/(t_c - t);
printf("\b\b\b\b\b\b\b\b\b%.2f", fps);
t = t_c;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
}
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
break;
}
}
api->Stop(Source::VIDEO_STREAMING);
return 0;
}

View File

@@ -99,6 +99,8 @@ make_executable2(get_stereo SRCS data/get_stereo.cc WITH_OPENCV)
make_executable2(get_stereo_rectified SRCS data/get_stereo_rectified.cc WITH_OPENCV)
make_executable2(get_disparity SRCS data/get_disparity.cc WITH_OPENCV)
make_executable2(get_depth SRCS data/get_depth.cc WITH_OPENCV)
make_executable2(get_data_without_select SRCS data/get_data_without_select.cc WITH_OPENCV)
if(PCL_FOUND)
make_executable2(get_points
SRCS data/get_points.cc util/pc_viewer.cc

View File

@@ -42,7 +42,7 @@ int main(int argc, char *argv[]) {
// Set imu range for S2000/S2100/S210A
if (model == Model::STANDARD2 || model == Model::STANDARD210A) {
// ACCELEROMETER_RANGE values: 6, 12, 24, 32
// ACCELEROMETER_RANGE values: 6, 12, 24, 48
api->SetOptionValue(Option::ACCELEROMETER_RANGE, 6);
// GYROSCOPE_RANGE values: 250, 500, 1000, 2000, 4000
api->SetOptionValue(Option::GYROSCOPE_RANGE, 1000);

View File

@@ -0,0 +1,74 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <string>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/api/api.h"
MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
auto request = api->GetStreamRequest();
// struct StreamRequest {
// /** Stream width in pixels */
// std::uint16_t width;
// /** Stream height in pixels */
// std::uint16_t height;
// /** Stream pixel format */
// Format format;
// /** Stream frames per second */
// std::uint16_t fps;
// }
request.fps = 10;
api->ConfigStreamRequest(request);
api->EnableStreamData(Stream::DEPTH);
api->Start(Source::VIDEO_STREAMING);
cv::namedWindow("frame");
cv::namedWindow("depth");
while (true) {
api->WaitForStreams();
auto &&left_data = api->GetStreamData(Stream::LEFT);
auto &&right_data = api->GetStreamData(Stream::RIGHT);
if (!left_data.frame.empty() && !right_data.frame.empty()) {
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
}
// this code is for real depth data
auto &&depth_data = api->GetStreamData(Stream::DEPTH);
if (!depth_data.frame.empty()) {
cv::imshow("depth", depth_data.frame); // CV_16UC1
}
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
break;
}
}
api->Stop(Source::VIDEO_STREAMING);
return 0;
}

View File

@@ -11,6 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <string>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
@@ -28,7 +29,10 @@ int main(int argc, char *argv[]) {
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
double fps;
double t = 0.01;
std::cout << "depth fps:" << std::endl;
api->EnableStreamData(Stream::DEPTH);
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);
@@ -52,6 +56,10 @@ int main(int argc, char *argv[]) {
// this code is for real depth data
auto &&depth_data = api->GetStreamData(Stream::DEPTH);
if (!depth_data.frame.empty()) {
double t_c = cv::getTickCount() / cv::getTickFrequency();
fps = 1.0/(t_c - t);
printf("\b\b\b\b\b\b\b\b\b%.2f", fps);
t = t_c;
cv::imshow("depth_real", depth_data.frame); // CV_16UC1
}

View File

@@ -11,6 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/api/api.h"
@@ -27,7 +28,7 @@ int main(int argc, char *argv[]) {
if (!ok) return 1;
api->ConfigStreamRequest(request);
// api->EnableStreamData(Stream::DISPARITY);
api->SetDisparityComputingMethodType(DisparityComputingMethod::SGBM);
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);
if (argc == 2) {
@@ -45,14 +46,16 @@ int main(int argc, char *argv[]) {
}
}
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
api->Start(Source::VIDEO_STREAMING);
cv::namedWindow("frame");
// cv::namedWindow("disparity");
cv::namedWindow("disparity_normalized");
double fps;
double t = 0.01;
std::cout << "disparity fps:" << std::endl;
while (true) {
api->WaitForStreams();
@@ -72,6 +75,10 @@ int main(int argc, char *argv[]) {
auto &&disp_norm_data = api->GetStreamData(Stream::DISPARITY_NORMALIZED);
if (!disp_norm_data.frame.empty()) {
double t_c = cv::getTickCount() / cv::getTickFrequency();
fps = 1.0/(t_c - t);
printf("\b\b\b\b\b\b\b\b\b%.2f", fps);
t = t_c;
cv::imshow("disparity_normalized", disp_norm_data.frame); // CV_8UC1
}

View File

@@ -11,6 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/api/api.h"
@@ -28,12 +29,14 @@ int main(int argc, char *argv[]) {
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
api->EnableStreamData(Stream::POINTS);
api->Start(Source::VIDEO_STREAMING);
double fps;
double t = 0.01;
std::cout << "points cloud fps:" << std::endl;
cv::namedWindow("frame");
PCViewer pcviewer;
@@ -50,6 +53,10 @@ int main(int argc, char *argv[]) {
auto &&points_data = api->GetStreamData(Stream::POINTS);
if (!points_data.frame.empty()) {
double t_c = cv::getTickCount() / cv::getTickFrequency();
fps = 1.0/(t_c - t);
printf("\b\b\b\b\b\b\b\b\b%.2f", fps);
t = t_c;
pcviewer.Update(points_data.frame);
}

View File

@@ -11,6 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/api/api.h"
@@ -25,9 +26,12 @@ int main(int argc, char *argv[]) {
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
api->Start(Source::VIDEO_STREAMING);
double fps;
double t = 0.01;
std::cout << "fps:" << std::endl;
cv::namedWindow("frame");
while (true) {
@@ -38,6 +42,10 @@ int main(int argc, char *argv[]) {
cv::Mat img;
if (!left_data.frame.empty() && !right_data.frame.empty()) {
double t_c = cv::getTickCount() / cv::getTickFrequency();
fps = 1.0/(t_c - t);
printf("\b\b\b\b\b\b\b\b\b%.2f", fps);
t = t_c;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
}

View File

@@ -11,6 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/api/api.h"
@@ -25,13 +26,16 @@ int main(int argc, char *argv[]) {
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
api->EnableStreamData(Stream::LEFT_RECTIFIED);
api->EnableStreamData(Stream::RIGHT_RECTIFIED);
api->Start(Source::VIDEO_STREAMING);
double fps;
double t = 0.01;
std::cout << "fps:" << std::endl;
cv::namedWindow("frame");
while (true) {
@@ -41,6 +45,11 @@ int main(int argc, char *argv[]) {
auto &&right_data = api->GetStreamData(Stream::RIGHT_RECTIFIED);
if (!left_data.frame.empty() && !right_data.frame.empty()) {
double t_c = cv::getTickCount() / cv::getTickFrequency();
fps = 1.0/(t_c - t);
printf("\b\b\b\b\b\b\b\b\b%.2f", fps);
t = t_c;
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);

View File

@@ -11,6 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/api/api.h"
@@ -28,7 +29,7 @@ int main(int argc, char *argv[]) {
api->setDuplicate(true);
api->EnablePlugin("plugins/linux-x86_64/libplugin_g_cuda9.1_opencv3.4.0.so");
api->EnablePlugin("plugins/libplugin_g_cuda9.2_opencv3.3.1.so");
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);
@@ -36,7 +37,9 @@ int main(int argc, char *argv[]) {
cv::namedWindow("frame");
cv::namedWindow("disparity");
double fps;
double t = 0.01;
std::cout << "fps:" << std::endl;
while (true) {
api->WaitForStreams();
@@ -44,6 +47,10 @@ int main(int argc, char *argv[]) {
auto &&right_data = api->GetStreamData(Stream::RIGHT);
if (!left_data.frame.empty() && !right_data.frame.empty()) {
double t_c = cv::getTickCount() / cv::getTickFrequency();
fps = 1.0/(t_c - t);
printf("\b\b\b\b\b\b\b\b\b%.2f", fps);
t = t_c;
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);

View File

@@ -300,10 +300,10 @@ api::StreamData Processor::GetStreamData(const Stream &stream) {
}
auto streams = getTargetStreams();
if (output != nullptr) {
int num = 0;
for (auto it : streams) {
if (it.stream == stream) {
if (num == 1) {
if (it.stream == Stream::LEFT ||
it.stream == Stream::LEFT_RECTIFIED) {
if (!is_enable_cd) {
if (output->first_data &&
last_frame_id_cd == output->first_data->frame_id) {
@@ -313,7 +313,8 @@ api::StreamData Processor::GetStreamData(const Stream &stream) {
last_frame_id_cd = output->first_data->frame_id;
}
return obj_data_first(output);
} else {
} else if (it.stream == Stream::RIGHT ||
it.stream == Stream::RIGHT_RECTIFIED) {
// last_frame_id_cd = output->second_data->frame_id;
if (!is_enable_cd) {
if (output->second_data &&
@@ -325,7 +326,6 @@ api::StreamData Processor::GetStreamData(const Stream &stream) {
return obj_data_second(output);
}
}
num++;
}
}
VLOG(2) << "Frame not ready now";

View File

@@ -326,7 +326,7 @@ void Synthetic::InitProcessors() {
std::shared_ptr<Processor> depth_processor = nullptr;
auto &&disparity_processor =
std::make_shared<DisparityProcessor>(DisparityComputingMethod::SGBM,
std::make_shared<DisparityProcessor>(DisparityComputingMethod::BM,
DISPARITY_PROC_PERIOD);
auto &&disparitynormalized_processor =
std::make_shared<DisparityNormalizedProcessor>(

View File

@@ -122,6 +122,13 @@ std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data,
}
}
i += 72;
} else {
// assembly
for (std::size_t j = 0; j < 3; j++) {
for (std::size_t k = 0; k < 3; k++) {
in->assembly[j][k] = 0.0;
}
}
}
// drift
for (std::size_t j = 0; j < 3; j++) {
@@ -155,6 +162,20 @@ std::size_t from_data(ImuIntrinsics *in, const std::uint8_t *data,
in->z[j] = _from_data<double>(data + i + j * 8);
}
i += 16;
} else {
// temperature drift
// x
for (std::size_t j = 0; j < 2; j++) {
in->x[j] = 0.0;
}
// y
for (std::size_t j = 0; j < 2; j++) {
in->y[j] = 0.0;
}
// z
for (std::size_t j = 0; j < 2; j++) {
in->z[j] = 0.0;
}
}
return i;

View File

@@ -340,14 +340,6 @@ std::ostream &operator<<(std::ostream &os, const CameraROSMsgInfo &info) {
std::ostream &operator<<(std::ostream &os, const CameraROSMsgInfoPair &info) {
os << "left:\n" << info.left << std::endl;
os << "right:\n" << info.right << std::endl;
os << "base R:";
for (size_t i = 0; i < 9; i++)
os << info.R[i] << ",";
os << std::endl << "base P:";
for (size_t i = 0; i < 12; i++)
os << info.P[i] << ",";
os << std::endl;
return os;
}

View File

@@ -19,4 +19,4 @@
standard2/request_index: 1
# standard/frame_rate range: {10,15,20,25,30,35,40,45,50,55,60}
standard/frame_rate: 20
standard/frame_rate: 15

View File

@@ -48,7 +48,7 @@
<group ns="$(arg mynteye)">
<!-- mynteye_wrapper_node -->
<node name="mynteye_wrapper_node" pkg="mynt_eye_ros_wrapper" type="mynteye_wrapper_node" output="screen" respawn="true" respawn_delay="5">
<node name="mynteye_wrapper_node" pkg="mynt_eye_ros_wrapper" type="mynteye_wrapper_node" output="screen" respawn="true" respawn_delay="10">
<!-- node params -->
@@ -103,6 +103,16 @@
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="/mynteye/right_rect_mono">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="/mynteye/left_rect_mono">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="$(arg left_mono_topic)">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'

View File

@@ -0,0 +1,129 @@
<?xml version="1.0"?>
<launch>
<arg name="mynteye" default="mynteye" />
<!-- node params -->
<arg name="left_topic" default="left/image_raw" />
<arg name="right_topic" default="right/image_raw" />
<arg name="left_rect_topic" default="left_rect/image_rect" />
<arg name="right_rect_topic" default="right_rect/image_rect" />
<arg name="disparity_topic" default="disparity/image_raw" />
<arg name="disparity_norm_topic" default="disparity/image_norm" />
<arg name="depth_topic" default="depth/image_raw" />
<arg name="points_topic" default="points/data_raw" />
<arg name="left_mono_topic" default="left/image_mono" />
<arg name="right_mono_topic" default="right/image_mono" />
<arg name="imu_topic" default="imu/data_raw" />
<arg name="temperature_topic" default="temperature/data_raw" />
<arg name="base_frame_id" default="$(arg mynteye)_link" />
<arg name="left_frame_id" default="$(arg mynteye)_left_frame" />
<arg name="right_frame_id" default="$(arg mynteye)_right_frame" />
<arg name="left_rect_frame_id" default="$(arg mynteye)_left_rect_frame" />
<arg name="right_rect_frame_id" default="$(arg mynteye)_right_rect_frame" />
<arg name="disparity_frame_id" default="$(arg mynteye)_disparity_frame" />
<arg name="disparity_norm_frame_id" default="$(arg mynteye)_disparity_norm_frame" />
<arg name="points_frame_id" default="$(arg mynteye)_points_frame" />
<arg name="depth_frame_id" default="$(arg mynteye)_depth_frame" />
<arg name="temperature_frame_id" default="$(arg mynteye)_temperature_frame" />
<arg name="gravity" default="9.8" />
<arg name="mesh_file" default="S1030-0315.obj" />
<!-- Push down all topics/nodelets into "mynteye" namespace -->
<group ns="$(arg mynteye)">
<!-- mynteye_wrapper_node -->
<node name="mynteye_wrapper_node" pkg="mynt_eye_ros_wrapper" type="mynteye_wrapper_node" output="screen" respawn="true" respawn_delay="5">
<!-- node params -->
<param name="left_topic" value="$(arg left_topic)" />
<param name="right_topic" value="$(arg right_topic)" />
<param name="left_rect_topic" value="$(arg left_rect_topic)" />
<param name="right_rect_topic" value="$(arg right_rect_topic)" />
<param name="disparity_topic" value="$(arg disparity_topic)" />
<param name="disparity_norm_topic" value="$(arg disparity_norm_topic)" />
<param name="points_topic" value="$(arg points_topic)" />
<param name="depth_topic" value="$(arg depth_topic)" />
<param name="mesh_file" value="$(arg mesh_file)" />
<param name="left_mono_topic" value="$(arg left_mono_topic)" />
<param name="right_mono_topic" value="$(arg right_mono_topic)" />
<param name="imu_topic" value="$(arg imu_topic)" />
<param name="temperature_topic" value="$(arg temperature_topic)" />
<param name="base_frame_id" value="$(arg base_frame_id)" />
<param name="left_frame_id" value="$(arg left_frame_id)" />
<param name="right_frame_id" value="$(arg right_frame_id)" />
<param name="left_rect_frame_id" value="$(arg left_rect_frame_id)" />
<param name="right_rect_frame_id" value="$(arg right_rect_frame_id)" />
<param name="disparity_frame_id" value="$(arg disparity_frame_id)" />
<param name="disparity_norm_frame_id" value="$(arg disparity_norm_frame_id)" />
<param name="points_frame_id" value="$(arg points_frame_id)" />
<param name="depth_frame_id" value="$(arg depth_frame_id)" />
<param name="temperature_frame_id" value="$(arg temperature_frame_id)" />
<rosparam file="$(find mynt_eye_ros_wrapper)/config/device/standard.yaml" command="load" />
<rosparam file="$(find mynt_eye_ros_wrapper)/config/device/standard2.yaml" command="load" />
<rosparam file="$(find mynt_eye_ros_wrapper)/config/process/process_config.yaml" command="load" />
<rosparam file="$(find mynt_eye_ros_wrapper)/config/slam/vins_fusion.yaml" command="load" />
<param name="gravity" value="$(arg gravity)" />
</node>
<!-- disable compressed depth plugin for image topics -->
<group ns="$(arg left_topic)">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="$(arg left_rect_topic)">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="$(arg left_mono_topic)">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="$(arg right_topic)">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="$(arg right_mono_topic)">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="$(arg right_rect_topic)">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="$(arg disparity_topic)">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="$(arg disparity_norm_topic)">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="$(arg depth_topic)">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
</group> <!-- mynteye -->
</launch>

View File

@@ -103,6 +103,16 @@
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="/mynteye/right_rect_mono">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="/mynteye/left_rect_mono">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="$(arg left_mono_topic)">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'

View File

@@ -103,6 +103,16 @@
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="/mynteye/right_rect_mono">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="/mynteye/left_rect_mono">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'
</rosparam>
</group>
<group ns="$(arg left_mono_topic)">
<rosparam param="disable_pub_plugins">
- 'image_transport/compressedDepth'

View File

@@ -104,7 +104,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
ros::Time hardTimeToSoftTime(std::uint64_t _hard_time) {
static bool isInited = false;
static double soft_time_begin(0);
static std::uint32_t soft_time_begin(0);
static std::uint64_t hard_time_begin(0);
if (false == isInited) {
@@ -113,11 +113,31 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
isInited = true;
}
std::uint64_t time_ns_detal = (_hard_time - hard_time_begin);
std::uint64_t time_ns_detal_s = time_ns_detal / 1000000;
std::uint64_t time_ns_detal_ns = time_ns_detal % 1000000;
return ros::Time(
static_cast<double>(soft_time_begin +
static_cast<double>(_hard_time - hard_time_begin) * 0.000001f));
soft_time_begin + time_ns_detal_s,
time_ns_detal_ns * 1000);
}
// ros::Time hardTimeToSoftTime(std::uint64_t _hard_time) {
// static bool isInited = false;
// static double soft_time_begin(0);
// static std::uint64_t hard_time_begin(0);
// if (false == isInited) {
// soft_time_begin = ros::Time::now().toSec();
// hard_time_begin = _hard_time;
// isInited = true;
// }
// return ros::Time(
// static_cast<double>(soft_time_begin +
// static_cast<double>(_hard_time - hard_time_begin) * 0.000001f));
// }
inline bool is_overflow(std::uint64_t now,
std::uint64_t pre) {
static std::uint64_t unit =
@@ -204,7 +224,9 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
is_started_ = false;
std::map<Stream, std::string> mono_names{{Stream::LEFT, "left_mono"},
{Stream::RIGHT, "right_mono"}};
{Stream::RIGHT, "right_mono"},
{Stream::LEFT_RECTIFIED, "left_rect_mono"},
{Stream::RIGHT_RECTIFIED, "right_rect_mono"}};
std::map<Stream, std::string> mono_topics{};
for (auto &&it = mono_names.begin(); it != mono_names.end(); ++it) {
@@ -319,7 +341,10 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
if (model_ == Model::STANDARD2 || model_ == Model::STANDARD210A) {
for (auto &&it = mono_topics.begin(); it != mono_topics.end(); ++it) {
auto &&topic = mono_topics[it->first];
if (it->first == Stream::LEFT || it->first == Stream::RIGHT) {
if (it->first == Stream::LEFT ||
it->first == Stream::RIGHT ||
it->first == Stream::RIGHT_RECTIFIED ||
it->first == Stream::LEFT_RECTIFIED) {
mono_publishers_[it->first] = it_mynteye.advertise(topic, 1);
}
NODELET_INFO_STREAM("Advertized on topic " << topic);
@@ -594,6 +619,10 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
return;
} else if (stream == Stream::POINTS) {
publishPoints(data, seq, stamp);
} else if (stream == Stream::LEFT_RECTIFIED ||
stream == Stream::RIGHT_RECTIFIED) {
publishMono(stream, data, seq, stamp);
publishCamera(stream, data, seq, stamp);
} else {
publishCamera(stream, data, seq, stamp);
}
@@ -610,7 +639,8 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
}
void publishOthers(const Stream &stream) {
if (getStreamSubscribers(stream) > 0 && !is_published_[stream]) {
if ((getStreamSubscribers(stream) > 0 && !is_published_[stream]) ||
mono_publishers_[stream].getNumSubscribers() > 0) {
api_->EnableStreamData(stream);
api_->SetStreamCallback(
stream, [this, stream](const api::StreamData &data) {
@@ -1292,6 +1322,8 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
sensor_msgs::CameraInfo *camera_info = new sensor_msgs::CameraInfo();
camera_info_ptrs_[stream] = sensor_msgs::CameraInfoPtr(camera_info);
auto info_pair = api_->GetCameraROSMsgInfoPair();
camera_info->width = info_pair->left.width;
camera_info->height = info_pair->left.height;
if (is_intrinsics_enable_) {
if (stream == Stream::RIGHT ||
stream == Stream::RIGHT_RECTIFIED) {