24 Commits
2.3.5 ... 2.3.7

Author SHA1 Message Date
harjeb
0267f4e4b3 docs(*): update changelog 2019-04-19 14:48:03 +08:00
John Zhao
8446083bf4 chore(*): update version 2019-04-19 14:38:57 +08:00
harjeb
b3acbfe8aa docs(*): update ORB build 2019-04-19 14:22:07 +08:00
harjeb
d988fce95a docs(*): update slam 2019-04-19 05:19:55 +08:00
TinyOh
0c0c299c84 fix(slam): rect gray topic sub logic 2019-04-15 13:35:05 +08:00
TinyOh
6dab54117d Merge branch 'develop' of http://gitlab.mynt.com/mynteye/mynt-eye-s-sdk into develop 2019-04-15 13:13:50 +08:00
TinyOh
966511edf1 feat(slam): add rect gray topic 2019-04-15 13:13:27 +08:00
John Zhao
5fb5a876b8 Merge branch 'hotfix/docs' into develop
* hotfix/docs:
  docs(sdk): update exe version
2019-04-15 10:14:53 +08:00
John Zhao
4613d78d8c docs(sdk): update exe version 2019-04-15 10:13:58 +08:00
John Zhao
dd81c782b4 Merge branch 'hotfix/ros_imu_timestamp' into develop
* hotfix/ros_imu_timestamp:
  chore(*): update version
2019-04-15 10:05:28 +08:00
John Zhao
0d5cd2df76 chore(*): update version 2019-04-15 10:05:00 +08:00
John Zhao
3383d43360 Merge branch 'hotfix/ros_imu_timestamp' into develop
* hotfix/ros_imu_timestamp:
  fix(ros): ros error enum error
  fix: 14.04 complie error
  feat(doc): supported set address for 2100
  feat(src): supported set iic address for s2100
  fix(ros): fixed bug of imu align for part of device

# Conflicts:
#	wrappers/ros/src/mynt_eye_ros_wrapper/src/wrapper_nodelet.cc
2019-04-15 09:51:58 +08:00
TinyOh
b3f6c82f8d fix(ros): ros error enum error 2019-04-12 15:41:25 +08:00
TinyOh
2d8a66eb47 fix: 14.04 complie error 2019-04-12 15:18:02 +08:00
TinyOh
1a080d438f fix: 14.04 complie error 2019-04-12 14:42:08 +08:00
Osenberg
c18a23b059 feat(doc): supported set address for 2100 2019-04-12 14:11:19 +08:00
Osenberg
05dc3e99ef feat(src): supported set iic address for s2100 2019-04-12 11:31:53 +08:00
Osenberg
9a13909de2 fix(ros): fixed bug of imu align for part of device
n
2019-04-11 18:15:58 +08:00
TinyOh
089c1b1b0c fix: change fps show to right target 2019-04-10 11:40:14 +08:00
TinyOh
7e588d5e24 feat: add fps show in console 2019-04-10 11:31:13 +08:00
TinyOh
4be2d6ae49 feat: add open without select sample 2019-04-04 14:19:17 +08:00
TinyOh
28e089c412 fix(ros): respawn delay change to 10 for avatar 2019-04-04 11:09:09 +08:00
TinyOh
af2327cd92 Merge branch 'develop' of http://gitlab.mynt.com/mynteye/mynt-eye-s-sdk into develop 2019-04-04 09:23:14 +08:00
TinyOh
e28e34eafa fix(api): use BM as default ,only use sgbm when get_disparity 2019-04-04 09:22:26 +08:00
24 changed files with 339 additions and 498 deletions

View File

@@ -14,7 +14,7 @@
cmake_minimum_required(VERSION 3.0)
project(mynteye VERSION 2.3.5 LANGUAGES C CXX)
project(mynteye VERSION 2.3.7 LANGUAGES C CXX)
include(cmake/Common.cmake)

View File

@@ -1,6 +1,6 @@
# MYNT® EYE S SDK
[![](https://img.shields.io/badge/MYNT%20EYE%20S%20SDK-2.3.5-brightgreen.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK)
[![](https://img.shields.io/badge/MYNT%20EYE%20S%20SDK-2.3.7-brightgreen.svg?style=flat)](https://github.com/slightech/MYNT-EYE-S-SDK)
## Overview

View File

@@ -24,7 +24,7 @@ copyright = '2018, MYNTAI'
author = 'MYNTAI'
# The short X.Y version
version = '2.3.5'
version = '2.3.7'
# The full version, including alpha/beta/rc tags
release = version

View File

@@ -9,11 +9,11 @@ To set the IIC address, set ``Option::IIC_ADDRESS_SETTING``.
.. Attention::
Only support S210A
Only support S210A/2100
Reference Code:
s210a
s210a/s2100
.. code-block:: c++
@@ -24,7 +24,7 @@ s210a
if (!ok) return 1;
api->ConfigStreamRequest(request);
Model model = api->GetModel();
if (model == Model::STANDARD210A) {
if (model == Model::STANDARD210A || model == Model::STANDARD2) {
api->SetOptionValue(Option::IIC_ADDRESS_SETTING, 0x31);
LOG(INFO) << "Set iic address to " << std::hex << "0x"
<< api->GetOptionValue(Option::IIC_ADDRESS_SETTING);
@@ -33,7 +33,7 @@ s210a
Reference running results on Linux:
s210a
s210a/s2100
.. code-block:: bash

View File

@@ -3,6 +3,19 @@
Changelog
=========
2019-04-19(v2.3.7)
-------------------
1. Improve VINS-Fusion supporting
2. Improve ORB-SLAM2 supporting
2019-04-15(v2.3.6)
-------------------
1. Fix imu align bug of ros wrapper
2. Fix 14.04 complie error of ros wrapper
3. Support set iic address for s2100
2019-04-01(v2.3.5)
-------------------
@@ -21,11 +34,9 @@ Changelog
7. Modify default orientation of point in ROS.
2019-03-18(v2.3.4)
-------------------
1. Add API to get subsidiary chip&ISP's version(Depend on S2100/S210A 1.1 firmware & 1.0 subsidiary chip firmware).
2. Fix point fragment issue in BM algorithm.

View File

@@ -26,7 +26,7 @@ Download and install SDK
.. tip::
Download here: mynteye-s-2.3.4-win-x64-opencv-3.4.3.exe `Google Drive <https://drive.google.com/open?id=1PYC_5Mh2pzLFVXkYlkllEzPnr50EbKht>`_ `Baidu Pan <https://pan.baidu.com/s/1s4KIcuYkO5i_9E1pG5blQA>`_ .
Download here: mynteye-s-2.3.6-win-x64-opencv-3.4.3.exe `Google Drive <https://drive.google.com/open?id=1PYC_5Mh2pzLFVXkYlkllEzPnr50EbKht>`_ `Baidu Pan <https://pan.baidu.com/s/1s4KIcuYkO5i_9E1pG5blQA>`_ .
After you install the win pack of SDK, there will be a shortcut to the SDK root directory on your desktop.

View File

@@ -5,7 +5,6 @@ Open Source project Support
.. toctree::
how_to_use_kalibr
vins
vins_fusion
orb_slam2

View File

@@ -1,300 +0,0 @@
.. _how_to_use_kalibr:
How to calibrate MYNTEYE by kalibr
===================================
Target
------------
* Calibrate the pose relationship between left and right camera
* Calibrate the pose relationship left camera between and IMU
Preparation
------------
* **Install kalibr**:Refer to `kalibr wiki <https://github.com/ethz-asl/kalibr/wiki/installation>`_ and follow the steps to install
* **Calibration board** kalibr supports ``chessbord`` , ``circlegrid`` , ``aprilgrid`` ,choose ``aprilgrid`` here Calibration board file can be directly `download <https://github.com/ethz-asl/kalibr/wiki/downloads>`_ , Or you can also generate calibration board by Kalibr tool.
.. code-block:: bash
$ kalibr_create_target_pdf --type 'apriltag' --nx 6 --ny 6 --tsize 0.08 --tspace 0.3
View parameters' meanings by kalibr_create)target_pdf command:
.. code-block:: bash
$ kalibr_create_target_pdf --h
usage:
Example Aprilgrid:
kalibr_create_target_pdf --type apriltag --nx 6 --ny 6 --tsize 0.08 --tspace 0.3
Example Checkerboard:
kalibr_create_target_pdf --type checkerboard --nx 6 --ny 6 -csx 0.05 --csy 0.1
Generate PDFs of calibration patterns.
optional arguments:
-h, --help show this help message and exit
Output options:
output Output filename
--eps Also output an EPS file
Generic grid options:
--type GRIDTYPE The grid pattern type. ('apriltag' or 'checkerboard')
--nx N_COLS The number of tags in x direction (default: 6)
--ny N_ROWS The number of tags in y direction (default: 7)
Apriltag arguments:
--tsize TSIZE The size of one tag [m] (default: 0.08)
--tspace TAGSPACING The space between the tags in fraction of the edge size
[0..1] (default: 0.3)
--tfam TAGFAMILIY Familiy of April tags ['t16h5', 't25h9', 't25h7',
't36h11'] (default: t36h11)
Checkerboard arguments:
--csx CHESSSZX The size of one chessboard square in x direction [m]
(default: 0.05)
--csy CHESSSZY The size of one chessboard square in y direction [m]
(default: 0.05)
* **Calibrate the intrinsic IMU parameters** kalibr requires imu data to be calibrated by intrinsic parameters by default.The intrinsic parameters calibration tool uses `imu-tk <https://github.com/Kyle-ak/imu_tk.git>`_ .
* **Count imu data parameter**
* noise density
* bias random walk
Using Allan analyzing tool `imu_utils <https://github.com/gaowenliang/imu_utils>`_, We can get the characteristics of above imu data,and to format the output as ``imu.yaml``
.. code-block:: bash
#Accelerometers
accelerometer_noise_density: 0.02680146180736048 #Noise density (continuous-time)
accelerometer_random_walk: 0.0026296086159332804 #Bias random walk
#Gyroscopes
gyroscope_noise_density: 0.008882328296710996 #Noise density (continuous-time)
gyroscope_random_walk: 0.00037956578292701033 #Bias random walk
rostopic: /mynteye/imu/data_raw #the IMU ROS topic
update_rate: 200.0 #Hz (for discretization of the values above)
Calibrate the pose relationship between left and right camera
--------------------------------------------------------------
* Collect calibration images: kalibr supports the collection of the required calibration images through two ways:by ``rosbag`` or collect offline images . Using ``rosbag`` here for convenience,Reference `link <https://github.com/ethz-asl/kalibr/wiki/bag-format>`_ for collecting images.
* Method of collecting images by ``rosbag`` :fix mynteye camera,move ``aprilgrid`` calibration board in the camera field of view.
* To increase the calibration time,try to use image acquisition data with lower frame rate,kalibr recommends using ``4Hz`` frame rate,here uses ``10hz`` .
* MYNTEYE S series camera offers images at least 10Hz,You can use `topic_tools <http://wiki.ros.org/topic_tools/throttle>`_ to modify frequency,because using 10Hz requires more calibration time.
* Record ``static.bag`` : After fix the mynteye camera,start `wrapper <https://github.com/slightech/MYNT-EYE-S-SDK>`_, record the topic of the left and right images to ``static.bag`` .
.. code-block:: bash
$ source wrappers/ros/devel/setup.bash
$ roslaunch mynt_eye_ros_wrapper display.launch
$ cd ~
$ mkdir -p bag
$ cd bag
$ rosbag record -O static_10hz /mynteye/left/image_raw /mynteye/right/image_raw #recommand use 10hz,you can also use topic_tools to publish 4hz.
* kalibr calibration:
.. code-block:: bash
$ kalibr_calibrate_cameras --target aprilgrid.yaml --bag ~/bag/static_10hz.bag --models pinhole-radtan pinhole-radtan --topics /mynteye/left/image_raw /mynteye/right/image_raw
View parameters' meanings by kalibr_calibrate_cameras command:
.. code-block:: bash
$ kalibr_calibrate_cameras --h
Calibrate the intrinsics and extrinsics of a camera system with non-shared
overlapping field of view.
usage:
Example usage to calibrate a camera system with two cameras using an aprilgrid.
cam0: omnidirection model with radial-tangential distortion
cam1: pinhole model with equidistant distortion
kalibr_calibrate_cameras --models omni-radtan pinhole-equi --target aprilgrid.yaml \
--bag MYROSBAG.bag --topics /cam0/image_raw /cam1/image_raw
example aprilgrid.yaml:
target_type: 'aprilgrid'
tagCols: 6
tagRows: 6
tagSize: 0.088 #m
tagSpacing: 0.3 #percent of tagSize
optional arguments:
-h, --help show this help message and exit
--models MODELS [MODELS ...]
The camera model ['pinhole-radtan', 'pinhole-equi',
'omni-radtan', 'pinhole-fov'] to estimate
Data source:
--bag BAGFILE The bag file with the data
--topics TOPICS [TOPICS ...]
The list of image topics
--bag-from-to bag_from_to bag_from_to
Use the bag data starting from up to this time [s]
Calibration target configuration:
--target TARGETYAML Calibration target configuration as yaml file
Image synchronization:
--approx-sync MAX_DELTA_APPROXSYNC
Time tolerance for approximate image synchronization
[s] (default: 0.02)
Calibrator settings:
--qr-tol QRTOL The tolerance on the factors of the QR decomposition
(default: 0.02)
--mi-tol MITOL The tolerance on the mutual information for adding an
image. Higher means fewer images will be added. Use -1
to force all images. (default: 0.2)
--no-shuffle Do not shuffle the dataset processing order
Outlier filtering options:
--no-outliers-removal
Disable corner outlier filtering
--no-final-filtering Disable filtering after all views have been processed.
--min-views-outlier MINVIEWOUTLIER
Number of raw views to initialize statistics (default:
20)
--use-blakezisserman Enable the Blake-Zisserman m-estimator
--plot-outliers Plot the detect outliers during extraction (this could
be slow)
Output options:
--verbose Enable (really) verbose output (disables plots)
--show-extraction Show the calibration target extraction. (disables
plots)
--plot Plot during calibration (this could be slow).
--dont-show-report Do not show the report on screen after calibration.
Output the following three files after finish calibration:
* ``camchain-homezhangsbagstatic_10hz.yaml``
* ``report-cam-homezhangsbagstatic_10hz.pdf``
* ``results-cam-homezhangsbagstatic_10hz.txt``
.. tip::
If you use camera parameters in Vins,it would be better to choose the pinhole-equi model or the omni-radtan model.If you use camera parameters in Maplab,please choose pinhole-equi model
Calibrate the pose relationship between camera and IMU coordinate system
-------------------------------------------------------------------------
* **Collect calibration data**as calibrate the pose relationship of camera,Kalibr supports two ways to collect data,we still use ``rosbag`` here.
* Method of collecting image: fix ``apilgrid`` calibration board, move camera
* Make sure that the data collected is good:the brightness of the calibration board should be appropriate,too bright or too dark can't guarantee the quality of data,meanwhile do not shake too fast to avoid blurring of the image.
* Set the imu publishing frequency to 200Hz,image to 20Hz(recommended by kalibr)
* Fully motivate each axis of the imu,for example ,3 actions on each axis,then in the \"8-shaped\" motion
* Record camera and imu as ``dynamic.bag``.
.. code-block:: bash
$ roslaunch mynt_eye_ros_wrapper display.launch
$ cd bag
$ rosbag record -O dynamic /mynteye/left/image_raw /mynteye/right/image_raw /mynteye/imu/data_raw #remember set image hz to 20hz, imu hz to 200hz
* kalibr calibration:
.. code-block:: bash
$ kalibr_calibrate_imu_camera --cam camchain-homezhangsbagstatic_10hz.yaml --target aprilgrid.yaml --imu imu.yaml --time-calibration --bag ~/bag/dynamic.bag
View the parameters' meanings by kalibr_calibrate_imu_camera command
.. code-block:: bash
$ kalibr_calibrate_imu_camera --h
Calibrate the spatial and temporal parameters of an IMU to a camera chain.
usage:
Example usage to calibrate a camera system against an IMU using an aprilgrid
with temporal calibration enabled.
kalibr_calibrate_imu_camera --bag MYROSBAG.bag --cam camchain.yaml --imu imu.yaml \
--target aprilgrid.yaml --time-calibration
camchain.yaml: is the camera-system calibration output of the multiple-camera
calibratin tool (kalibr_calibrate_cameras)
example aprilgrid.yaml: | example imu.yaml: (ADIS16448)
target_type: 'aprilgrid' | accelerometer_noise_density: 0.006
tagCols: 6 | accelerometer_random_walk: 0.0002
tagRows: 6 | gyroscope_noise_density: 0.0004
tagSize: 0.088 | gyroscope_random_walk: 4.0e-06
tagSpacing: 0.3 | update_rate: 200.0
optional arguments:
-h, --help show this help message and exit
Dataset source:
--bag BAGFILE Ros bag file containing image and imu data (rostopics
specified in the yamls)
--bag-from-to bag_from_to bag_from_to
Use the bag data starting from up to this time [s]
--perform-synchronization
Perform a clock synchronization according to 'Clock
synchronization algorithms for network measurements'
by Zhang et al. (2002).
Camera system configuration:
--cams CHAIN_YAML Camera system configuration as yaml file
--recompute-camera-chain-extrinsics
Recompute the camera chain extrinsics. This option is
exclusively recommended for debugging purposes in
order to identify problems with the camera chain
extrinsics.
--reprojection-sigma REPROJECTION_SIGMA
Standard deviation of the distribution of reprojected
corner points [px]. (default: 1.0)
IMU configuration:
--imu IMU_YAMLS [IMU_YAMLS ...]
Yaml files holding the IMU noise parameters. The first
IMU will be the reference IMU.
--imu-delay-by-correlation
Estimate the delay between multiple IMUs by
correlation. By default, no temporal calibration
between IMUs will be performed.
--imu-models IMU_MODELS [IMU_MODELS ...]
The IMU models to estimate. Currently supported are
'calibrated', 'scale-misalignment' and 'scale-
misalignment-size-effect'.
Calibration target:
--target TARGET_YAML Calibration target configuration as yaml file
Optimization options:
--time-calibration Enable the temporal calibration
--max-iter MAX_ITER Max. iterations (default: 30)
--recover-covariance Recover the covariance of the design variables.
--timeoffset-padding TIMEOFFSET_PADDING
Maximum range in which the timeoffset may change
during estimation [s] (default: 0.01)
Output options:
--show-extraction Show the calibration target extraction. (disables
plots)
--extraction-stepping
Show each image during calibration target extraction
(disables plots)
--verbose Verbose output (disables plots)
--dont-show-report Do not show the report on screen after calibration.
Output the follwing 4 files after finish calibration:
* ``camchain-imucam-homezhangsbagdynamic.yaml``
* ``imu-homezhangsbagdynamatic.yaml``
* ``report-imucam-homezhangsbagdynamic.pdf``
* ``results-imucam-homezhangsbagdynamic.yaml``

View File

@@ -9,41 +9,38 @@ If you wanna run ORB_SLAM2 with MYNT EYE camera, please follow the steps:
1. Download `MYNT-EYE-S-SDK <https://github.com/slightech/MYNT-EYE-S-SDK.git>`_ and follow steps to install.
2. Follow the normal procedure to install ORB_SLAM2.
3. Update ``distortion_parameters`` and ``projection_parameters`` to ``<ORB_SLAM2>/config/mynteye_*.yaml``.
4. Run examples by MYNT® EYE.
3. Run examples by MYNT® EYE.
Binocular camera sample
------------------------
* Calibrate a stereo camera with `ROS-StereoCalibration <http://wiki.ros.org/camera_calibration/Tutorials/StereoCalibration>`_ or OpenCV, and then update parameters to ``<ORB_SLAM2>/config/mynteye_s_stereo.yaml``.
* Execute ``build.sh``:
Prerequisites
--------------
.. code-block:: bash
chmod +x build.sh
./build.sh
* Run stereo sample using the follow type:
.. code-block:: bash
./Examples/Stereo/stereo_mynt_s ./Vocabulary/ORBvoc.txt ./config/mynteye_s_stereo.yaml true /mynteye/left/image_raw /mynteye/right/image_raw
sudo apt-get -y install libglew-dev cmake
cd ~
git clone https://github.com/stevenlovegrove/Pangolin.git
cd Pangolin
mkdir build
cd build
cmake ..
cmake --build .
sudo make install
Building the nodes for mono and stereo (ROS)
--------------------------------------------
* Add the path including ``Examples/ROS/ORB_SLAM2`` to the ``ROS_PACKAGE_PATH`` environment variable. Open ``.bashrc`` file and add at the end the following line. Replace ``PATH`` by the folder where you cloned ORB_SLAM2:
* Add the path including ``Examples/ROS/ORB_SLAM2`` to the ``ROS_PACKAGE_PATH`` environment variable. Open ``.bashrc`` file and add at the end the following line.
.. code-block:: bash
export ROS_PACKAGE_PATH=${ROS_PACKAGE_PATH}:PATH/ORB_SLAM2/Examples/ROS
export ROS_PACKAGE_PATH=${ROS_PACKAGE_PATH}:~/catkin_ws/src/MYNT-EYE-ORB-SLAM2-Sample
* Execute `build_ros.sh`:
.. code-block:: bash
chmod +x build.sh
./build.sh
chmod +x build_ros.sh
./build_ros.sh
@@ -51,8 +48,6 @@ Building the nodes for mono and stereo (ROS)
Stereo_ROS Example
~~~~~~~~~~~~~~~~~~~
* Reference ``Get camera calibration parameters`` in :ref:`slam_okvis` to get ``distortion_parameters`` and ``projection_parameters`` , and update ``<ORB_SLAM2>/config/mynteye_s_stereo.yaml`` .
* Launch ORB_SLAM2 ``Stereo_ROS``
1. Launch mynteye node
@@ -68,4 +63,4 @@ Stereo_ROS Example
.. code-block:: bash
rosrun ORB_SLAM2 mynteye_s_stereo ./Vocabulary/ORBvoc.txt ./config/mynteye_s_stereo.yaml true /mynteye/left/image_raw /mynteye/right/image_raw
rosrun ORB_SLAM2 mynteye_s_stereo ./Vocabulary/ORBvoc.txt ./config/mynteye_s_stereo.yaml false /mynteye/left_rect/image_rect /mynteye/right_rect/image_rect

View File

@@ -9,8 +9,7 @@ If you wanna run VINS-Mono with MYNT EYE camera, please follow the steps:
1. Download `MYNT-EYE-S-SDK <https://github.com/slightech/MYNT-EYE-S-SDK.git>`_ and install mynt_eye_ros_wrapper.
2. Follow the normal procedure to install VINS-Mono.
3. Update ``distortion_parameters`` and ``projection_parameters`` to `here <https://github.com/slightech/MYNT-EYE-VINS-Sample/blob/mynteye/config/mynteye/mynteye_s_config.yaml>`_ .
4. Run mynt_eye_ros_wrapper and VINS-Mono.
3. Run mynt_eye_ros_wrapper and VINS-Mono.
Install ROS Kinetic conveniently (if already installed, please ignore)
----------------------------------------------------------------------
@@ -21,6 +20,22 @@ Install ROS Kinetic conveniently (if already installed, please ignore)
wget https://raw.githubusercontent.com/oroca/oroca-ros-pkg/master/ros_install.sh && \
chmod 755 ./ros_install.sh && bash ./ros_install.sh catkin_ws kinetic
Install Ceres
--------------
.. code-block:: bash
cd ~
git clone https://ceres-solver.googlesource.com/ceres-solver
sudo apt-get -y install cmake libgoogle-glog-dev libatlas-base-dev libeigen3-dev libsuitesparse-dev
sudo add-apt-repository ppa:bzindovic/suitesparse-bugfix-1319687
sudo apt-get update && sudo apt-get install libsuitesparse-dev
mkdir ceres-bin
cd ceres-bin
cmake ../ceres-solver
make -j3
sudo make install
Install MYNT-EYE-VINS-Sample
------------------------------
@@ -28,28 +43,14 @@ Install MYNT-EYE-VINS-Sample
mkdir -p ~/catkin_ws/src
cd ~/catkin_ws/src
git clone -b mynteye https://github.com/slightech/MYNT-EYE-VINS-Sample.git
git clone https://github.com/slightech/MYNT-EYE-VINS-Sample.git
cd ..
catkin_make
source devel/setup.bash
echo "source ~/catkin_ws/devel/setup.bash" >> ~/.bashrc
source ~/.bashrc
Get image calibration parameters
---------------------------------
Use MYNT® EYE's left eye camera and IMU. By `MYNT-EYE-S-SDK <https://github.com/slightech/MYNT-EYE-S-SDK.git>`_ API ``GetIntrinsics()`` function and ``GetExtrinsics()`` function, you can "get the image calibration parameters of the current working device:
.. code-block:: bash
cd MYNT-EYE-S-SDK
./samples/_output/bin/tutorials/get_img_params
After running the above type, pinhole's ``distortion_parameters`` and ``projection_parameters`` is obtained , and then update to `here <https://github.com/slightech/MYNT-EYE-VINS-Sample/blob/mynteye-s/config/mynteye/mynteye_config.yaml>`_ .
.. tip::
You can get the camera model of device when get camera calibration parameters, if model is equidistant you need calibrate pinhole model by yourself or reference :ref:`write_img_params` to write a default pinhole config file to your device.
(if you fail in this step, try to find another computer with clean system or reinstall Ubuntu and ROS)
Run VINS-Mono with MYNT® EYE
-----------------------------
@@ -68,7 +69,3 @@ Run VINS-Mono with MYNT® EYE
cd ~/catkin_ws
roslaunch vins_estimator mynteye_s.launch
.. note::
If you want to use a fish-eye camera model, please click `here <https://github.com/slightech/MYNT-EYE-VINS-Sample/tree/mynteye-s/calibration_images>`_ .

View File

@@ -11,13 +11,30 @@ If you wanna run VINS-Fusion with MYNT EYE camera, please follow the steps:
2. Follow the normal procedure to install VINS-Fusion.
3. Run mynt_eye_ros_wrapper and VINS-Fusion.
Install ROS Kinetic conveniently (if already installed, please ignore)
----------------------------------------------------------------------
Prerequisites
.. code-block:: bash
cd ~
wget https://raw.githubusercontent.com/oroca/oroca-ros-pkg/master/ros_install.sh && \
chmod 755 ./ros_install.sh && bash ./ros_install.sh catkin_ws kinetic
Install Ceres
--------------
1. Install Ubuntu 64-bit 16.04 or 18.04. ROS Kinetic or Melodic.(if already installed, please ignore). `ROS Installation <http://wiki.ros.org/ROS/Installation>`_
2. Install `Ceres <http://ceres-solver.org/installation.html>`_
.. code-block:: bash
cd ~
git clone https://ceres-solver.googlesource.com/ceres-solver
sudo apt-get -y install cmake libgoogle-glog-dev libatlas-base-dev libeigen3-dev libsuitesparse-dev
sudo add-apt-repository ppa:bzindovic/suitesparse-bugfix-1319687
sudo apt-get update && sudo apt-get install libsuitesparse-dev
mkdir ceres-bin
cd ceres-bin
cmake ../ceres-solver
make -j3
sudo make install
Install MYNT-EYE-VINS-FUSION-Samples
-------------------------------------
@@ -26,7 +43,7 @@ Install MYNT-EYE-VINS-FUSION-Samples
mkdir -p ~/catkin_ws/src
cd ~/catkin_ws/src
git clone -b mynteye https://github.com/slightech/MYNT-EYE-VINS-FUSION-Samples.git
git clone https://github.com/slightech/MYNT-EYE-VINS-FUSION-Samples.git
cd ..
catkin_make
source ~/catkin_ws/devel/setup.bash
@@ -42,14 +59,15 @@ Run VINS-FUSION with MYNT® EYE
cd (local path of MYNT-EYE-S-SDK)
source ./wrappers/ros/devel/setup.bash
roslaunch mynt_eye_ros_wrapper mynteye.launch
roslaunch mynt_eye_ros_wrapper vins_fusion.launch
2. Open another terminal and run vins
.. code-block:: bash
cd ~/catkin_ws
roslaunch vins mynteye-s-mono-imu.launch # mono+imu fusion
# roslaunch vins mynteye-s-stereo.launch # Stereo fusion / Stereo+imu fusion
# roslaunch vins mynteye-avarta-mono-imu.launch # mono+imu fusion with mynteye-avarta
# roslaunch vins mynteye-avarta-stereo.launch # Stereo fusion / Stereo+imu fusion with mynteye-avarta
cd ~/catkin_ws/src
source ./devel/setup.bash
roslaunch vins mynteye-s-stereo.launch # Stereo fusion / Stereo+imu fusion
# roslaunch vins mynteye-s-mono-imu.launch # mono+imu fusion
# roslaunch vins mynteye-s2100-mono-imu.launch # mono+imu fusion with mynteye-s2100
# roslaunch vins mynteye-s2100-stereo.launch # Stereo fusion / Stereo+imu fusion with mynteye-s2100

View File

@@ -99,6 +99,8 @@ make_executable2(get_stereo SRCS data/get_stereo.cc WITH_OPENCV)
make_executable2(get_stereo_rectified SRCS data/get_stereo_rectified.cc WITH_OPENCV)
make_executable2(get_disparity SRCS data/get_disparity.cc WITH_OPENCV)
make_executable2(get_depth SRCS data/get_depth.cc WITH_OPENCV)
make_executable2(get_data_without_select SRCS data/get_data_without_select.cc WITH_OPENCV)
if(PCL_FOUND)
make_executable2(get_points
SRCS data/get_points.cc util/pc_viewer.cc

View File

@@ -28,15 +28,15 @@ int main(int argc, char *argv[]) {
api->ConfigStreamRequest(request);
Model model = api->GetModel();
if (model == Model::STANDARD210A) {
if (model == Model::STANDARD210A || model == Model::STANDARD2) {
api->SetOptionValue(Option::IIC_ADDRESS_SETTING, 0x31);
LOG(INFO) << "Set iic address to " << std::hex << "0x"
<< api->GetOptionValue(Option::IIC_ADDRESS_SETTING);
}
// MYNTEYE-S1030/S2100 don't support this option
if (model == Model::STANDARD2 || model == Model::STANDARD) {
LOG(INFO) << "Sorry,MYNTEYE-S1030/S2100 don't support iic address setting";
// MYNTEYE-S1030 don't support this option
if (model == Model::STANDARD) {
LOG(INFO) << "Sorry,MYNTEYE-S1030 don't support iic address setting";
return 0;
}

View File

@@ -0,0 +1,74 @@
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <string>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include "mynteye/api/api.h"
MYNTEYE_USE_NAMESPACE
int main(int argc, char *argv[]) {
auto &&api = API::Create(argc, argv);
if (!api) return 1;
auto request = api->GetStreamRequest();
// struct StreamRequest {
// /** Stream width in pixels */
// std::uint16_t width;
// /** Stream height in pixels */
// std::uint16_t height;
// /** Stream pixel format */
// Format format;
// /** Stream frames per second */
// std::uint16_t fps;
// }
request.fps = 10;
api->ConfigStreamRequest(request);
api->EnableStreamData(Stream::DEPTH);
api->Start(Source::VIDEO_STREAMING);
cv::namedWindow("frame");
cv::namedWindow("depth");
while (true) {
api->WaitForStreams();
auto &&left_data = api->GetStreamData(Stream::LEFT);
auto &&right_data = api->GetStreamData(Stream::RIGHT);
if (!left_data.frame.empty() && !right_data.frame.empty()) {
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
}
// this code is for real depth data
auto &&depth_data = api->GetStreamData(Stream::DEPTH);
if (!depth_data.frame.empty()) {
cv::imshow("depth", depth_data.frame); // CV_16UC1
}
char key = static_cast<char>(cv::waitKey(1));
if (key == 27 || key == 'q' || key == 'Q') { // ESC/Q
break;
}
}
api->Stop(Source::VIDEO_STREAMING);
return 0;
}

View File

@@ -11,6 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <string>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
@@ -28,7 +29,10 @@ int main(int argc, char *argv[]) {
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
double fps;
double t = 0.01;
std::cout << "depth fps:" << std::endl;
api->EnableStreamData(Stream::DEPTH);
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);
@@ -52,6 +56,10 @@ int main(int argc, char *argv[]) {
// this code is for real depth data
auto &&depth_data = api->GetStreamData(Stream::DEPTH);
if (!depth_data.frame.empty()) {
double t_c = cv::getTickCount() / cv::getTickFrequency();
fps = 1.0/(t_c - t);
printf("\b\b\b\b\b\b\b\b\b%.2f", fps);
t = t_c;
cv::imshow("depth_real", depth_data.frame); // CV_16UC1
}

View File

@@ -11,6 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/api/api.h"
@@ -27,7 +28,7 @@ int main(int argc, char *argv[]) {
if (!ok) return 1;
api->ConfigStreamRequest(request);
// api->EnableStreamData(Stream::DISPARITY);
api->SetDisparityComputingMethodType(DisparityComputingMethod::SGBM);
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);
if (argc == 2) {
@@ -45,14 +46,16 @@ int main(int argc, char *argv[]) {
}
}
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
api->Start(Source::VIDEO_STREAMING);
cv::namedWindow("frame");
// cv::namedWindow("disparity");
cv::namedWindow("disparity_normalized");
double fps;
double t = 0.01;
std::cout << "disparity fps:" << std::endl;
while (true) {
api->WaitForStreams();
@@ -72,6 +75,10 @@ int main(int argc, char *argv[]) {
auto &&disp_norm_data = api->GetStreamData(Stream::DISPARITY_NORMALIZED);
if (!disp_norm_data.frame.empty()) {
double t_c = cv::getTickCount() / cv::getTickFrequency();
fps = 1.0/(t_c - t);
printf("\b\b\b\b\b\b\b\b\b%.2f", fps);
t = t_c;
cv::imshow("disparity_normalized", disp_norm_data.frame); // CV_8UC1
}

View File

@@ -11,6 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/api/api.h"
@@ -28,12 +29,14 @@ int main(int argc, char *argv[]) {
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
api->EnableStreamData(Stream::POINTS);
api->Start(Source::VIDEO_STREAMING);
double fps;
double t = 0.01;
std::cout << "points cloud fps:" << std::endl;
cv::namedWindow("frame");
PCViewer pcviewer;
@@ -50,6 +53,10 @@ int main(int argc, char *argv[]) {
auto &&points_data = api->GetStreamData(Stream::POINTS);
if (!points_data.frame.empty()) {
double t_c = cv::getTickCount() / cv::getTickFrequency();
fps = 1.0/(t_c - t);
printf("\b\b\b\b\b\b\b\b\b%.2f", fps);
t = t_c;
pcviewer.Update(points_data.frame);
}

View File

@@ -11,6 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/api/api.h"
@@ -25,9 +26,12 @@ int main(int argc, char *argv[]) {
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
api->Start(Source::VIDEO_STREAMING);
double fps;
double t = 0.01;
std::cout << "fps:" << std::endl;
cv::namedWindow("frame");
while (true) {
@@ -38,6 +42,10 @@ int main(int argc, char *argv[]) {
cv::Mat img;
if (!left_data.frame.empty() && !right_data.frame.empty()) {
double t_c = cv::getTickCount() / cv::getTickFrequency();
fps = 1.0/(t_c - t);
printf("\b\b\b\b\b\b\b\b\b%.2f", fps);
t = t_c;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);
}

View File

@@ -11,6 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/api/api.h"
@@ -25,13 +26,16 @@ int main(int argc, char *argv[]) {
auto &&request = api->SelectStreamRequest(&ok);
if (!ok) return 1;
api->ConfigStreamRequest(request);
api->SetDisparityComputingMethodType(DisparityComputingMethod::BM);
api->EnableStreamData(Stream::LEFT_RECTIFIED);
api->EnableStreamData(Stream::RIGHT_RECTIFIED);
api->Start(Source::VIDEO_STREAMING);
double fps;
double t = 0.01;
std::cout << "fps:" << std::endl;
cv::namedWindow("frame");
while (true) {
@@ -41,6 +45,11 @@ int main(int argc, char *argv[]) {
auto &&right_data = api->GetStreamData(Stream::RIGHT_RECTIFIED);
if (!left_data.frame.empty() && !right_data.frame.empty()) {
double t_c = cv::getTickCount() / cv::getTickFrequency();
fps = 1.0/(t_c - t);
printf("\b\b\b\b\b\b\b\b\b%.2f", fps);
t = t_c;
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);

View File

@@ -11,6 +11,7 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <stdio.h>
#include <opencv2/highgui/highgui.hpp>
#include "mynteye/api/api.h"
@@ -28,7 +29,7 @@ int main(int argc, char *argv[]) {
api->setDuplicate(true);
api->EnablePlugin("plugins/linux-x86_64/libplugin_g_cuda9.1_opencv3.4.0.so");
api->EnablePlugin("plugins/libplugin_g_cuda9.2_opencv3.3.1.so");
api->EnableStreamData(Stream::DISPARITY_NORMALIZED);
@@ -36,7 +37,9 @@ int main(int argc, char *argv[]) {
cv::namedWindow("frame");
cv::namedWindow("disparity");
double fps;
double t = 0.01;
std::cout << "fps:" << std::endl;
while (true) {
api->WaitForStreams();
@@ -44,6 +47,10 @@ int main(int argc, char *argv[]) {
auto &&right_data = api->GetStreamData(Stream::RIGHT);
if (!left_data.frame.empty() && !right_data.frame.empty()) {
double t_c = cv::getTickCount() / cv::getTickFrequency();
fps = 1.0/(t_c - t);
printf("\b\b\b\b\b\b\b\b\b%.2f", fps);
t = t_c;
cv::Mat img;
cv::hconcat(left_data.frame, right_data.frame, img);
cv::imshow("frame", img);

View File

@@ -326,7 +326,7 @@ void Synthetic::InitProcessors() {
std::shared_ptr<Processor> depth_processor = nullptr;
auto &&disparity_processor =
std::make_shared<DisparityProcessor>(DisparityComputingMethod::SGBM,
std::make_shared<DisparityProcessor>(DisparityComputingMethod::BM,
DISPARITY_PROC_PERIOD);
auto &&disparitynormalized_processor =
std::make_shared<DisparityNormalizedProcessor>(

View File

@@ -45,7 +45,8 @@ const std::map<Model, OptionSupports> option_supports_map = {
Option::IR_CONTROL, Option::MIN_EXPOSURE_TIME,
Option::DESIRED_BRIGHTNESS, Option::ACCELEROMETER_RANGE,
Option::GYROSCOPE_RANGE, Option::ACCELEROMETER_LOW_PASS_FILTER,
Option::GYROSCOPE_LOW_PASS_FILTER, Option::ERASE_CHIP}
Option::GYROSCOPE_LOW_PASS_FILTER, Option::IIC_ADDRESS_SETTING,
Option::ERASE_CHIP}
},
{Model::STANDARD210A, {
Option::BRIGHTNESS,

View File

@@ -48,7 +48,7 @@
<group ns="$(arg mynteye)">
<!-- mynteye_wrapper_node -->
<node name="mynteye_wrapper_node" pkg="mynt_eye_ros_wrapper" type="mynteye_wrapper_node" output="screen" respawn="true" respawn_delay="5">
<node name="mynteye_wrapper_node" pkg="mynt_eye_ros_wrapper" type="mynteye_wrapper_node" output="screen" respawn="true" respawn_delay="10">
<!-- node params -->

View File

@@ -46,6 +46,8 @@ using namespace configuru; // NOLINT
#define FULL_PRECISION \
std::fixed << std::setprecision(std::numeric_limits<double>::max_digits10)
static const std::size_t MAXSIZE = 4;
MYNTEYE_BEGIN_NAMESPACE
namespace enc = sensor_msgs::image_encodings;
@@ -202,7 +204,9 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
is_started_ = false;
std::map<Stream, std::string> mono_names{{Stream::LEFT, "left_mono"},
{Stream::RIGHT, "right_mono"}};
{Stream::RIGHT, "right_mono"},
{Stream::LEFT_RECTIFIED, "left_rect_mono"},
{Stream::RIGHT_RECTIFIED, "right_rect_mono"}};
std::map<Stream, std::string> mono_topics{};
for (auto &&it = mono_names.begin(); it != mono_names.end(); ++it) {
@@ -317,7 +321,10 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
if (model_ == Model::STANDARD2 || model_ == Model::STANDARD210A) {
for (auto &&it = mono_topics.begin(); it != mono_topics.end(); ++it) {
auto &&topic = mono_topics[it->first];
if (it->first == Stream::LEFT || it->first == Stream::RIGHT) {
if (it->first == Stream::LEFT ||
it->first == Stream::RIGHT ||
it->first == Stream::RIGHT_RECTIFIED ||
it->first == Stream::LEFT_RECTIFIED) {
mono_publishers_[it->first] = it_mynteye.advertise(topic, 1);
}
NODELET_INFO_STREAM("Advertized on topic " << topic);
@@ -592,6 +599,10 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
return;
} else if (stream == Stream::POINTS) {
publishPoints(data, seq, stamp);
} else if (stream == Stream::LEFT_RECTIFIED ||
stream == Stream::RIGHT_RECTIFIED) {
publishMono(stream, data, seq, stamp);
publishCamera(stream, data, seq, stamp);
} else {
publishCamera(stream, data, seq, stamp);
}
@@ -608,7 +619,8 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
}
void publishOthers(const Stream &stream) {
if (getStreamSubscribers(stream) > 0 && !is_published_[stream]) {
if ((getStreamSubscribers(stream) > 0 && !is_published_[stream]) ||
mono_publishers_[stream].getNumSubscribers() > 0) {
api_->EnableStreamData(stream);
api_->SetStreamCallback(
stream, [this, stream](const api::StreamData &data) {
@@ -918,121 +930,111 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
}
void timestampAlign() {
static bool get_first_acc = false;
static bool get_first_gyro = false;
static bool has_gyro = false;
static ImuData acc;
static ImuData gyro;
if (!get_first_acc && imu_accel_ != nullptr) {
acc = *imu_accel_;
imu_accel_ = nullptr;
get_first_acc = true;
return;
}
if (!get_first_gyro && imu_gyro_ != nullptr) {
gyro = *imu_gyro_;
imu_gyro_ = nullptr;
get_first_gyro = true;
return;
}
static std::vector<ImuData> acc_buf;
static std::vector<ImuData> gyro_buf;
if (imu_accel_ != nullptr) {
if (!has_gyro) {
acc = *imu_accel_;
imu_accel_ = nullptr;
return;
}
if (acc.timestamp <= gyro.timestamp) {
ImuData acc_temp;
acc_temp = *imu_accel_;
acc_temp.timestamp = gyro.timestamp;
double k = static_cast<double>(imu_accel_->timestamp - acc.timestamp);
k = static_cast<double>(gyro.timestamp - acc.timestamp) / k;
acc_temp.accel[0] = acc.accel[0] +
(imu_accel_->accel[0] - acc.accel[0]) * k;
acc_temp.accel[1] = acc.accel[1] +
(imu_accel_->accel[1] - acc.accel[1]) * k;
acc_temp.accel[2] = acc.accel[2] +
(imu_accel_->accel[2] - acc.accel[2]) * k;
acc = *imu_accel_;
*imu_accel_ = acc_temp;
imu_gyro_.reset(new ImuData(gyro));
has_gyro = false;
return;
} else {
acc = *imu_accel_;
imu_accel_ = nullptr;
return;
}
acc_buf.push_back(*imu_accel_);
}
if (imu_gyro_ != nullptr) {
has_gyro = true;
gyro = *imu_gyro_;
imu_gyro_ = nullptr;
gyro_buf.push_back(*imu_gyro_);
}
imu_accel_ = nullptr;
imu_gyro_ = nullptr;
imu_align_.clear();
if (acc_buf.empty() || gyro_buf.empty()) {
return;
}
ImuData imu_temp;
auto itg = gyro_buf.end();
auto ita = acc_buf.end();
for (auto it_gyro = gyro_buf.begin();
it_gyro != gyro_buf.end(); it_gyro++) {
for (auto it_acc = acc_buf.begin();
it_acc+1 != acc_buf.end(); it_acc++) {
if (it_gyro->timestamp >= it_acc->timestamp
&& it_gyro->timestamp <= (it_acc+1)->timestamp) {
double k = static_cast<double>((it_acc+1)->timestamp - it_acc->timestamp);
k = static_cast<double>(it_gyro->timestamp - it_acc->timestamp) / k;
imu_temp = *it_gyro;
imu_temp.accel[0] = it_acc->accel[0] + ((it_acc+1)->accel[0] - it_acc->accel[0]) * k;
imu_temp.accel[1] = it_acc->accel[1] + ((it_acc+1)->accel[1] - it_acc->accel[1]) * k;
imu_temp.accel[2] = it_acc->accel[2] + ((it_acc+1)->accel[2] - it_acc->accel[2]) * k;
imu_align_.push_back(imu_temp);
itg = it_gyro;
ita = it_acc;
}
}
}
if (itg != gyro_buf.end()) {
gyro_buf.erase(gyro_buf.begin(), itg + 1);
}
if (ita != acc_buf.end()) {
acc_buf.erase(acc_buf.begin(), ita);
}
}
void publishImuBySync() {
timestampAlign();
if (imu_accel_ == nullptr || imu_gyro_ == nullptr) {
return;
for (int i = 0; i < imu_align_.size(); i++) {
sensor_msgs::Imu msg;
msg.header.seq = imu_sync_count_;
ros::Time stamp = checkUpImuTimeStamp(imu_align_[i].timestamp);
msg.header.stamp = stamp;
msg.header.frame_id = imu_frame_id_;
// acceleration should be in m/s^2 (not in g's)
msg.linear_acceleration.x = imu_align_[i].accel[0] * gravity_;
msg.linear_acceleration.y = imu_align_[i].accel[1] * gravity_;
msg.linear_acceleration.z = imu_align_[i].accel[2] * gravity_;
msg.linear_acceleration_covariance[0] = 0;
msg.linear_acceleration_covariance[1] = 0;
msg.linear_acceleration_covariance[2] = 0;
msg.linear_acceleration_covariance[3] = 0;
msg.linear_acceleration_covariance[4] = 0;
msg.linear_acceleration_covariance[5] = 0;
msg.linear_acceleration_covariance[6] = 0;
msg.linear_acceleration_covariance[7] = 0;
msg.linear_acceleration_covariance[8] = 0;
// velocity should be in rad/sec
msg.angular_velocity.x = imu_align_[i].gyro[0] * M_PI / 180;
msg.angular_velocity.y = imu_align_[i].gyro[1] * M_PI / 180;
msg.angular_velocity.z = imu_align_[i].gyro[2] * M_PI / 180;
msg.angular_velocity_covariance[0] = 0;
msg.angular_velocity_covariance[1] = 0;
msg.angular_velocity_covariance[2] = 0;
msg.angular_velocity_covariance[3] = 0;
msg.angular_velocity_covariance[4] = 0;
msg.angular_velocity_covariance[5] = 0;
msg.angular_velocity_covariance[6] = 0;
msg.angular_velocity_covariance[7] = 0;
msg.angular_velocity_covariance[8] = 0;
pub_imu_.publish(msg);
publishTemperature(imu_align_[i].temperature, imu_sync_count_, stamp);
++imu_sync_count_;
}
sensor_msgs::Imu msg;
ros::Time stamp = checkUpImuTimeStamp(imu_accel_->timestamp);
msg.header.seq = imu_sync_count_;
msg.header.stamp = stamp;
msg.header.frame_id = imu_frame_id_;
// acceleration should be in m/s^2 (not in g's)
msg.linear_acceleration.x = imu_accel_->accel[0] * gravity_;
msg.linear_acceleration.y = imu_accel_->accel[1] * gravity_;
msg.linear_acceleration.z = imu_accel_->accel[2] * gravity_;
msg.linear_acceleration_covariance[0] = 0;
msg.linear_acceleration_covariance[1] = 0;
msg.linear_acceleration_covariance[2] = 0;
msg.linear_acceleration_covariance[3] = 0;
msg.linear_acceleration_covariance[4] = 0;
msg.linear_acceleration_covariance[5] = 0;
msg.linear_acceleration_covariance[6] = 0;
msg.linear_acceleration_covariance[7] = 0;
msg.linear_acceleration_covariance[8] = 0;
// velocity should be in rad/sec
msg.angular_velocity.x = imu_gyro_->gyro[0] * M_PI / 180;
msg.angular_velocity.y = imu_gyro_->gyro[1] * M_PI / 180;
msg.angular_velocity.z = imu_gyro_->gyro[2] * M_PI / 180;
msg.angular_velocity_covariance[0] = 0;
msg.angular_velocity_covariance[1] = 0;
msg.angular_velocity_covariance[2] = 0;
msg.angular_velocity_covariance[3] = 0;
msg.angular_velocity_covariance[4] = 0;
msg.angular_velocity_covariance[5] = 0;
msg.angular_velocity_covariance[6] = 0;
msg.angular_velocity_covariance[7] = 0;
msg.angular_velocity_covariance[8] = 0;
pub_imu_.publish(msg);
publishTemperature(imu_accel_->temperature, imu_sync_count_, stamp);
++imu_sync_count_;
imu_accel_ = nullptr;
imu_gyro_ = nullptr;
}
void publishTemperature(
@@ -1304,14 +1306,12 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
if (stream == Stream::RIGHT ||
stream == Stream::RIGHT_RECTIFIED) {
if (info_pair->right.distortion_model == "KANNALA_BRANDT") {
camera_info->distortion_model =
sensor_msgs::distortion_models::EQUIDISTANT;
camera_info->distortion_model = "KANNALA_BRANDT";
for (size_t i; i < 4; i++) {
camera_info->D.push_back(info_pair->right.D[i]);
}
} else if (info_pair->right.distortion_model == "PINHOLE") {
camera_info->distortion_model =
sensor_msgs::distortion_models::PLUMB_BOB;
camera_info->distortion_model = "plumb_bob";
for (size_t i; i < 5; i++) {
camera_info->D.push_back(info_pair->right.D[i]);
}
@@ -1331,21 +1331,18 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
bool is_laserscan = false;
private_nh_.getParamCached("is_laserscan", is_laserscan);
if (!is_laserscan) {
camera_info->distortion_model =
sensor_msgs::distortion_models::EQUIDISTANT;
camera_info->distortion_model = "KANNALA_BRANDT";
for (size_t i; i < 4; i++) {
camera_info->D.push_back(info_pair->left.D[i]);
}
} else {
camera_info->distortion_model =
sensor_msgs::distortion_models::PLUMB_BOB;
camera_info->distortion_model = "KANNALA_BRANDT";
for (size_t i; i < 4; i++) {
camera_info->D.push_back(0.0);
}
}
} else if (info_pair->left.distortion_model == "PINHOLE") {
camera_info->distortion_model =
sensor_msgs::distortion_models::PLUMB_BOB;
camera_info->distortion_model = "plumb_bob";
for (size_t i; i < 5; i++) {
camera_info->D.push_back(info_pair->left.D[i]);
}
@@ -1586,6 +1583,7 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
bool is_started_;
int frame_rate_;
bool is_intrinsics_enable_;
std::vector<ImuData> imu_align_;
};
MYNTEYE_END_NAMESPACE