Merge branch 'develop' into feature/android
* develop: fix opencv 4.x compile feat(intri): ros img prams out put in srv fix opencv 4.x compile fix(complie): depend error fix(style): complie warning fix(ros): use bm as ros default fix(disparity): opencv2.0 logic fix(default): default ros launch use bm fix(discut): eigen 3rd path fix(discut): eigen path fix fix(discut): fix discut boost & add ros disparity_type_ fix(ros): delete useless code fix(ros): Align imu time stamp style(camodocal): camodocal style
This commit is contained in:
commit
b35d55309d
|
@ -131,11 +131,11 @@ endif()
|
||||||
## camodocal
|
## camodocal
|
||||||
|
|
||||||
if(WITH_CAM_MODELS)
|
if(WITH_CAM_MODELS)
|
||||||
set(EIGEN_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/eigen3)
|
set(EIGEN_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/3rdparty)
|
||||||
message(STATUS "EIGEN_INCLUDE_DIRS: ${EIGEN_INCLUDE_DIRS}")
|
message(STATUS "EIGEN_INCLUDE_DIR: ${EIGEN_INCLUDE_DIR}")
|
||||||
|
|
||||||
include_directories(
|
include_directories(
|
||||||
${EIGEN_INCLUDE_DIRS}
|
${EIGEN_INCLUDE_DIR}
|
||||||
src/mynteye/api/camodocal/include
|
src/mynteye/api/camodocal/include
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -30,12 +30,8 @@ make_executable(camera_a
|
||||||
|
|
||||||
## get_depth_with_region
|
## get_depth_with_region
|
||||||
|
|
||||||
if(OpenCV_VERSION VERSION_LESS 4.0)
|
|
||||||
|
|
||||||
make_executable(get_depth_with_region
|
make_executable(get_depth_with_region
|
||||||
SRCS get_depth_with_region.cc
|
SRCS get_depth_with_region.cc
|
||||||
LINK_LIBS mynteye ${OpenCV_LIBS}
|
LINK_LIBS mynteye ${OpenCV_LIBS}
|
||||||
DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
|
DLL_SEARCH_PATHS ${PRO_DIR}/_install/bin ${OpenCV_LIB_SEARCH_PATH}
|
||||||
)
|
)
|
||||||
|
|
||||||
endif()
|
|
||||||
|
|
|
@ -30,17 +30,17 @@ class DepthRegion {
|
||||||
*/
|
*/
|
||||||
void OnMouse(const int &event, const int &x, const int &y, const int &flags) {
|
void OnMouse(const int &event, const int &x, const int &y, const int &flags) {
|
||||||
MYNTEYE_UNUSED(flags)
|
MYNTEYE_UNUSED(flags)
|
||||||
if (event != CV_EVENT_MOUSEMOVE && event != CV_EVENT_LBUTTONDOWN) {
|
if (event != cv::EVENT_MOUSEMOVE && event != cv::EVENT_LBUTTONDOWN) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
show_ = true;
|
show_ = true;
|
||||||
|
|
||||||
if (event == CV_EVENT_MOUSEMOVE) {
|
if (event == cv::EVENT_MOUSEMOVE) {
|
||||||
if (!selected_) {
|
if (!selected_) {
|
||||||
point_.x = x;
|
point_.x = x;
|
||||||
point_.y = y;
|
point_.y = y;
|
||||||
}
|
}
|
||||||
} else if (event == CV_EVENT_LBUTTONDOWN) {
|
} else if (event == cv::EVENT_LBUTTONDOWN) {
|
||||||
if (selected_) {
|
if (selected_) {
|
||||||
if (x >= static_cast<int>(point_.x - n_) &&
|
if (x >= static_cast<int>(point_.x - n_) &&
|
||||||
x <= static_cast<int>(point_.x + n_) &&
|
x <= static_cast<int>(point_.x + n_) &&
|
||||||
|
|
|
@ -129,8 +129,6 @@ make_executable2(ctrl_manual_exposure
|
||||||
|
|
||||||
if(PCL_FOUND)
|
if(PCL_FOUND)
|
||||||
|
|
||||||
if(OpenCV_VERSION VERSION_LESS 4.0)
|
|
||||||
|
|
||||||
make_executable2(get_depth_and_points
|
make_executable2(get_depth_and_points
|
||||||
SRCS intermediate/get_depth_and_points.cc util/cv_painter.cc util/pc_viewer.cc
|
SRCS intermediate/get_depth_and_points.cc util/cv_painter.cc util/pc_viewer.cc
|
||||||
WITH_OPENCV WITH_PCL
|
WITH_OPENCV WITH_PCL
|
||||||
|
@ -138,6 +136,4 @@ make_executable2(get_depth_and_points
|
||||||
|
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
endif()
|
|
||||||
|
|
||||||
# advanced level
|
# advanced level
|
||||||
|
|
|
@ -34,17 +34,17 @@ class DepthRegion {
|
||||||
*/
|
*/
|
||||||
void OnMouse(const int &event, const int &x, const int &y, const int &flags) {
|
void OnMouse(const int &event, const int &x, const int &y, const int &flags) {
|
||||||
MYNTEYE_UNUSED(flags)
|
MYNTEYE_UNUSED(flags)
|
||||||
if (event != CV_EVENT_MOUSEMOVE && event != CV_EVENT_LBUTTONDOWN) {
|
if (event != cv::EVENT_MOUSEMOVE && event != cv::EVENT_LBUTTONDOWN) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
show_ = true;
|
show_ = true;
|
||||||
|
|
||||||
if (event == CV_EVENT_MOUSEMOVE) {
|
if (event == cv::EVENT_MOUSEMOVE) {
|
||||||
if (!selected_) {
|
if (!selected_) {
|
||||||
point_.x = x;
|
point_.x = x;
|
||||||
point_.y = y;
|
point_.y = y;
|
||||||
}
|
}
|
||||||
} else if (event == CV_EVENT_LBUTTONDOWN) {
|
} else if (event == cv::EVENT_LBUTTONDOWN) {
|
||||||
if (selected_) {
|
if (selected_) {
|
||||||
if (x >= static_cast<int>(point_.x - n_) &&
|
if (x >= static_cast<int>(point_.x - n_) &&
|
||||||
x <= static_cast<int>(point_.x + n_) &&
|
x <= static_cast<int>(point_.x + n_) &&
|
||||||
|
|
|
@ -1,10 +1,23 @@
|
||||||
#ifndef CAMERA_H
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
#define CAMERA_H
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#ifndef SRC_MYNTEYE_API_CAMODOCAL_INCLUDE_CAMODOCAL_CAMERA_MODELS_CAMERA_H_
|
||||||
|
#define SRC_MYNTEYE_API_CAMODOCAL_INCLUDE_CAMODOCAL_CAMERA_MODELS_CAMERA_H_
|
||||||
|
|
||||||
#include <boost/shared_ptr.hpp>
|
#include <vector>
|
||||||
|
#include <memory>
|
||||||
#include "eigen3/Eigen/Dense"
|
#include "eigen3/Eigen/Dense"
|
||||||
#include <opencv2/core/core.hpp>
|
#include <opencv2/core/core.hpp>
|
||||||
#include <vector>
|
|
||||||
|
|
||||||
namespace camodocal {
|
namespace camodocal {
|
||||||
|
|
||||||
|
@ -16,7 +29,7 @@ class Camera {
|
||||||
class Parameters {
|
class Parameters {
|
||||||
public:
|
public:
|
||||||
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
|
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
|
||||||
Parameters(ModelType modelType);
|
explicit Parameters(ModelType modelType);
|
||||||
|
|
||||||
Parameters(
|
Parameters(
|
||||||
ModelType modelType, const std::string &cameraName, int w, int h);
|
ModelType modelType, const std::string &cameraName, int w, int h);
|
||||||
|
@ -33,9 +46,6 @@ class Camera {
|
||||||
|
|
||||||
int nIntrinsics(void) const;
|
int nIntrinsics(void) const;
|
||||||
|
|
||||||
virtual bool readFromYamlFile(const std::string &filename) = 0;
|
|
||||||
virtual void writeToYamlFile(const std::string &filename) const = 0;
|
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
ModelType m_modelType;
|
ModelType m_modelType;
|
||||||
int m_nIntrinsics;
|
int m_nIntrinsics;
|
||||||
|
@ -58,22 +68,17 @@ class Camera {
|
||||||
const std::vector<std::vector<cv::Point2f> > &imagePoints) = 0;
|
const std::vector<std::vector<cv::Point2f> > &imagePoints) = 0;
|
||||||
virtual void estimateExtrinsics(
|
virtual void estimateExtrinsics(
|
||||||
const std::vector<cv::Point3f> &objectPoints,
|
const std::vector<cv::Point3f> &objectPoints,
|
||||||
const std::vector<cv::Point2f> &imagePoints, cv::Mat &rvec,
|
const std::vector<cv::Point2f> &imagePoints,
|
||||||
cv::Mat &tvec) const;
|
cv::Mat &rvec, cv::Mat &tvec) const; // NOLINT
|
||||||
|
|
||||||
// Lift points from the image plane to the sphere
|
|
||||||
virtual void liftSphere(
|
|
||||||
const Eigen::Vector2d &p, Eigen::Vector3d &P) const = 0;
|
|
||||||
//%output P
|
|
||||||
|
|
||||||
// Lift points from the image plane to the projective space
|
// Lift points from the image plane to the projective space
|
||||||
virtual void liftProjective(
|
virtual void liftProjective(
|
||||||
const Eigen::Vector2d &p, Eigen::Vector3d &P) const = 0;
|
const Eigen::Vector2d &p, Eigen::Vector3d &P) const = 0; // NOLINT
|
||||||
// %output P
|
// %output P
|
||||||
|
|
||||||
// Projects 3D points to the image plane (Pi function)
|
// Projects 3D points to the image plane (Pi function)
|
||||||
virtual void spaceToPlane(
|
virtual void spaceToPlane(
|
||||||
const Eigen::Vector3d &P, Eigen::Vector2d &p) const = 0;
|
const Eigen::Vector3d &P, Eigen::Vector2d &p) const = 0; // NOLINT
|
||||||
// %output p
|
// %output p
|
||||||
|
|
||||||
// Projects 3D points to the image plane (Pi function)
|
// Projects 3D points to the image plane (Pi function)
|
||||||
|
@ -83,25 +88,19 @@ class Camera {
|
||||||
// %output p
|
// %output p
|
||||||
// %output J
|
// %output J
|
||||||
|
|
||||||
virtual void undistToPlane(
|
|
||||||
const Eigen::Vector2d &p_u, Eigen::Vector2d &p) const = 0;
|
|
||||||
//%output p
|
|
||||||
|
|
||||||
// virtual void initUndistortMap(cv::Mat& map1, cv::Mat& map2, double fScale =
|
// virtual void initUndistortMap(cv::Mat& map1, cv::Mat& map2, double fScale =
|
||||||
// 1.0) const = 0;
|
// 1.0) const = 0;
|
||||||
virtual cv::Mat initUndistortRectifyMap(
|
virtual cv::Mat initUndistortRectifyMap(
|
||||||
cv::Mat &map1, cv::Mat &map2, float fx = -1.0f, float fy = -1.0f,
|
cv::Mat &map1, cv::Mat &map2, float fx = -1.0f, float fy = -1.0f, // NOLINT
|
||||||
cv::Size imageSize = cv::Size(0, 0), float cx = -1.0f, float cy = -1.0f,
|
cv::Size imageSize = cv::Size(0, 0), float cx = -1.0f, float cy = -1.0f,
|
||||||
cv::Mat rmat = cv::Mat::eye(3, 3, CV_32F)) const = 0;
|
cv::Mat rmat = cv::Mat::eye(3, 3, CV_32F)) const = 0;
|
||||||
|
|
||||||
virtual int parameterCount(void) const = 0;
|
virtual int parameterCount(void) const = 0;
|
||||||
|
|
||||||
virtual void readParameters(const std::vector<double> ¶meters) = 0;
|
virtual void readParameters(const std::vector<double> ¶meters) = 0;
|
||||||
virtual void writeParameters(std::vector<double> ¶meters) const = 0;
|
virtual void writeParameters(std::vector<double> ¶meters) const = 0; // NOLINT
|
||||||
|
|
||||||
virtual void writeParametersToYamlFile(const std::string &filename) const = 0;
|
virtual std::string parametersToString(void) const = 0; // NOLINT
|
||||||
|
|
||||||
virtual std::string parametersToString(void) const = 0;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* \brief Calculates the reprojection distance between points
|
* \brief Calculates the reprojection distance between points
|
||||||
|
@ -125,14 +124,15 @@ class Camera {
|
||||||
|
|
||||||
void projectPoints(
|
void projectPoints(
|
||||||
const std::vector<cv::Point3f> &objectPoints, const cv::Mat &rvec,
|
const std::vector<cv::Point3f> &objectPoints, const cv::Mat &rvec,
|
||||||
const cv::Mat &tvec, std::vector<cv::Point2f> &imagePoints) const;
|
const cv::Mat &tvec, std::vector<cv::Point2f> &imagePoints) const; // NOLINT
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
cv::Mat m_mask;
|
cv::Mat m_mask;
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef boost::shared_ptr<Camera> CameraPtr;
|
typedef std::shared_ptr<Camera> CameraPtr;
|
||||||
typedef boost::shared_ptr<const Camera> CameraConstPtr;
|
typedef std::shared_ptr<const Camera> CameraConstPtr;
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif // SRC_MYNTEYE_API_CAMODOCAL_INCLUDE_CAMODOCAL_CAMERA_MODELS_CAMERA_H_
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,22 @@
|
||||||
#ifndef EQUIDISTANTCAMERA_H
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
#define EQUIDISTANTCAMERA_H
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#ifndef SRC_MYNTEYE_API_CAMODOCAL_INCLUDE_CAMODOCAL_CAMERA_MODELS_EQUIDISTANTCAMERA_H_
|
||||||
|
#define SRC_MYNTEYE_API_CAMODOCAL_INCLUDE_CAMODOCAL_CAMERA_MODELS_EQUIDISTANTCAMERA_H_
|
||||||
|
|
||||||
#include <opencv2/core/core.hpp>
|
|
||||||
#include <string>
|
#include <string>
|
||||||
|
|
||||||
#include "Camera.h"
|
#include "Camera.h"
|
||||||
|
#include <opencv2/core/core.hpp>
|
||||||
|
|
||||||
namespace camodocal {
|
namespace camodocal {
|
||||||
|
|
||||||
|
@ -59,9 +71,6 @@ class EquidistantCamera : public Camera {
|
||||||
double u0(void) const;
|
double u0(void) const;
|
||||||
double v0(void) const;
|
double v0(void) const;
|
||||||
|
|
||||||
bool readFromYamlFile(const std::string &filename);
|
|
||||||
void writeToYamlFile(const std::string &filename) const;
|
|
||||||
|
|
||||||
Parameters &operator=(const Parameters &other);
|
Parameters &operator=(const Parameters &other);
|
||||||
friend std::ostream &operator<<(
|
friend std::ostream &operator<<(
|
||||||
std::ostream &out, const Parameters ¶ms);
|
std::ostream &out, const Parameters ¶ms);
|
||||||
|
@ -91,7 +100,7 @@ class EquidistantCamera : public Camera {
|
||||||
/**
|
/**
|
||||||
* \brief Constructor from the projection model parameters
|
* \brief Constructor from the projection model parameters
|
||||||
*/
|
*/
|
||||||
EquidistantCamera(const Parameters ¶ms);
|
explicit EquidistantCamera(const Parameters ¶ms);
|
||||||
|
|
||||||
Camera::ModelType modelType(void) const;
|
Camera::ModelType modelType(void) const;
|
||||||
const std::string &cameraName(void) const;
|
const std::string &cameraName(void) const;
|
||||||
|
@ -103,38 +112,31 @@ class EquidistantCamera : public Camera {
|
||||||
const std::vector<std::vector<cv::Point3f> > &objectPoints,
|
const std::vector<std::vector<cv::Point3f> > &objectPoints,
|
||||||
const std::vector<std::vector<cv::Point2f> > &imagePoints);
|
const std::vector<std::vector<cv::Point2f> > &imagePoints);
|
||||||
|
|
||||||
// Lift points from the image plane to the sphere
|
|
||||||
virtual void liftSphere(const Eigen::Vector2d &p, Eigen::Vector3d &P) const;
|
|
||||||
//%output P
|
|
||||||
|
|
||||||
// Lift points from the image plane to the projective space
|
// Lift points from the image plane to the projective space
|
||||||
void liftProjective(const Eigen::Vector2d &p, Eigen::Vector3d &P) const;
|
void liftProjective(const Eigen::Vector2d &p, Eigen::Vector3d &P) const; // NOLINT
|
||||||
// %output P
|
// %output P
|
||||||
|
|
||||||
// Projects 3D points to the image plane (Pi function)
|
// Projects 3D points to the image plane (Pi function)
|
||||||
void spaceToPlane(const Eigen::Vector3d &P, Eigen::Vector2d &p) const;
|
void spaceToPlane(const Eigen::Vector3d &P, Eigen::Vector2d &p) const; // NOLINT
|
||||||
// %output p
|
// %output p
|
||||||
|
|
||||||
// Projects 3D points to the image plane (Pi function)
|
// Projects 3D points to the image plane (Pi function)
|
||||||
// and calculates jacobian
|
// and calculates jacobian
|
||||||
void spaceToPlane(
|
void spaceToPlane(
|
||||||
const Eigen::Vector3d &P, Eigen::Vector2d &p,
|
const Eigen::Vector3d &P, Eigen::Vector2d &p, // NOLINT
|
||||||
Eigen::Matrix<double, 2, 3> &J) const;
|
Eigen::Matrix<double, 2, 3> &J) const; // NOLINT
|
||||||
// %output p
|
// %output p
|
||||||
// %output J
|
// %output J
|
||||||
|
|
||||||
void undistToPlane(const Eigen::Vector2d &p_u, Eigen::Vector2d &p) const;
|
|
||||||
//%output p
|
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
static void spaceToPlane(
|
static void spaceToPlane(
|
||||||
const T *const params, const T *const q, const T *const t,
|
const T *const params, const T *const q, const T *const t,
|
||||||
const Eigen::Matrix<T, 3, 1> &P, Eigen::Matrix<T, 2, 1> &p);
|
const Eigen::Matrix<T, 3, 1> &P, Eigen::Matrix<T, 2, 1> &p); // NOLINT
|
||||||
|
|
||||||
void initUndistortMap(
|
void initUndistortMap(
|
||||||
cv::Mat &map1, cv::Mat &map2, double fScale = 1.0) const;
|
cv::Mat &map1, cv::Mat &map2, double fScale = 1.0) const; // NOLINT
|
||||||
cv::Mat initUndistortRectifyMap(
|
cv::Mat initUndistortRectifyMap(
|
||||||
cv::Mat &map1, cv::Mat &map2, float fx = -1.0f, float fy = -1.0f,
|
cv::Mat &map1, cv::Mat &map2, float fx = -1.0f, float fy = -1.0f, // NOLINT
|
||||||
cv::Size imageSize = cv::Size(0, 0), float cx = -1.0f, float cy = -1.0f,
|
cv::Size imageSize = cv::Size(0, 0), float cx = -1.0f, float cy = -1.0f,
|
||||||
cv::Mat rmat = cv::Mat::eye(3, 3, CV_32F)) const;
|
cv::Mat rmat = cv::Mat::eye(3, 3, CV_32F)) const;
|
||||||
|
|
||||||
|
@ -144,9 +146,7 @@ class EquidistantCamera : public Camera {
|
||||||
void setParameters(const Parameters ¶meters);
|
void setParameters(const Parameters ¶meters);
|
||||||
|
|
||||||
void readParameters(const std::vector<double> ¶meterVec);
|
void readParameters(const std::vector<double> ¶meterVec);
|
||||||
void writeParameters(std::vector<double> ¶meterVec) const;
|
void writeParameters(std::vector<double> ¶meterVec) const; // NOLINT
|
||||||
|
|
||||||
void writeParametersToYamlFile(const std::string &filename) const;
|
|
||||||
|
|
||||||
std::string parametersToString(void) const;
|
std::string parametersToString(void) const;
|
||||||
|
|
||||||
|
@ -156,18 +156,18 @@ class EquidistantCamera : public Camera {
|
||||||
|
|
||||||
void fitOddPoly(
|
void fitOddPoly(
|
||||||
const std::vector<double> &x, const std::vector<double> &y, int n,
|
const std::vector<double> &x, const std::vector<double> &y, int n,
|
||||||
std::vector<double> &coeffs) const;
|
std::vector<double> &coeffs) const; // NOLINT
|
||||||
|
|
||||||
void backprojectSymmetric(
|
void backprojectSymmetric(
|
||||||
const Eigen::Vector2d &p_u, double &theta, double &phi) const;
|
const Eigen::Vector2d &p_u, double &theta, double &phi) const; // NOLINT
|
||||||
|
|
||||||
Parameters mParameters;
|
Parameters mParameters;
|
||||||
|
|
||||||
double m_inv_K11, m_inv_K13, m_inv_K22, m_inv_K23;
|
double m_inv_K11, m_inv_K13, m_inv_K22, m_inv_K23;
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef boost::shared_ptr<EquidistantCamera> EquidistantCameraPtr;
|
typedef std::shared_ptr<EquidistantCamera> EquidistantCameraPtr;
|
||||||
typedef boost::shared_ptr<const EquidistantCamera> EquidistantCameraConstPtr;
|
typedef std::shared_ptr<const EquidistantCamera> EquidistantCameraConstPtr;
|
||||||
|
|
||||||
template <typename T>
|
template <typename T>
|
||||||
T EquidistantCamera::r(T k2, T k3, T k4, T k5, T theta) {
|
T EquidistantCamera::r(T k2, T k3, T k4, T k5, T theta) {
|
||||||
|
@ -227,4 +227,5 @@ void EquidistantCamera::spaceToPlane(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif // SRC_MYNTEYE_API_CAMODOCAL_INCLUDE_CAMODOCAL_CAMERA_MODELS_EQUIDISTANTCAMERA_H_
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,20 @@
|
||||||
#ifndef GPL_H
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
#define GPL_H
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#ifndef SRC_MYNTEYE_API_CAMODOCAL_INCLUDE_CAMODOCAL_GPL_GPL_H_
|
||||||
|
#define SRC_MYNTEYE_API_CAMODOCAL_INCLUDE_CAMODOCAL_GPL_GPL_H_
|
||||||
|
|
||||||
|
#include <vector>
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <cmath>
|
#include <cmath>
|
||||||
#include <opencv2/core/core.hpp>
|
#include <opencv2/core/core.hpp>
|
||||||
|
@ -48,7 +62,7 @@ const T cube(const T &x) {
|
||||||
|
|
||||||
template <class T>
|
template <class T>
|
||||||
const T random(const T &a, const T &b) {
|
const T random(const T &a, const T &b) {
|
||||||
return static_cast<double>(rand()) / RAND_MAX * (b - a) + a;
|
return static_cast<double>(rand()) / RAND_MAX * (b - a) + a; // NOLINT
|
||||||
}
|
}
|
||||||
|
|
||||||
template <class T>
|
template <class T>
|
||||||
|
@ -66,35 +80,36 @@ const T randomNormal(const T &sigma) {
|
||||||
return x1 * w * sigma;
|
return x1 * w * sigma;
|
||||||
}
|
}
|
||||||
|
|
||||||
unsigned long long timeInMicroseconds(void);
|
unsigned long long timeInMicroseconds(void); // NOLINT
|
||||||
|
|
||||||
double timeInSeconds(void);
|
double timeInSeconds(void);
|
||||||
|
|
||||||
void colorDepthImage(
|
void colorDepthImage(
|
||||||
cv::Mat &imgDepth, cv::Mat &imgColoredDepth, float minRange,
|
cv::Mat &imgDepth, cv::Mat &imgColoredDepth, float minRange, // NOLINT
|
||||||
float maxRange);
|
float maxRange);
|
||||||
|
|
||||||
bool colormap(
|
bool colormap(
|
||||||
const std::string &name, unsigned char idx, float &r, float &g, float &b);
|
const std::string &name, unsigned char idx, float &r, float &g, float &b); // NOLINT
|
||||||
|
|
||||||
std::vector<cv::Point2i> bresLine(int x0, int y0, int x1, int y1);
|
std::vector<cv::Point2i> bresLine(int x0, int y0, int x1, int y1);
|
||||||
std::vector<cv::Point2i> bresCircle(int x0, int y0, int r);
|
std::vector<cv::Point2i> bresCircle(int x0, int y0, int r);
|
||||||
|
|
||||||
void fitCircle(
|
void fitCircle(
|
||||||
const std::vector<cv::Point2d> &points, double ¢erX, double ¢erY,
|
const std::vector<cv::Point2d> &points, double ¢erX, double ¢erY, // NOLINT
|
||||||
double &radius);
|
double &radius); // NOLINT
|
||||||
|
|
||||||
std::vector<cv::Point2d> intersectCircles(
|
std::vector<cv::Point2d> intersectCircles(
|
||||||
double x1, double y1, double r1, double x2, double y2, double r2);
|
double x1, double y1, double r1, double x2, double y2, double r2);
|
||||||
|
|
||||||
void LLtoUTM(
|
void LLtoUTM(
|
||||||
double latitude, double longitude, double &utmNorthing, double &utmEasting,
|
double latitude, double longitude, double &utmNorthing, double &utmEasting, // NOLINT
|
||||||
std::string &utmZone);
|
std::string &utmZone); // NOLINT
|
||||||
void UTMtoLL(
|
void UTMtoLL(
|
||||||
double utmNorthing, double utmEasting, const std::string &utmZone,
|
double utmNorthing, double utmEasting, const std::string &utmZone, // NOLINT
|
||||||
double &latitude, double &longitude);
|
double &latitude, double &longitude); // NOLINT
|
||||||
|
|
||||||
long int timestampDiff(uint64_t t1, uint64_t t2);
|
long int timestampDiff(uint64_t t1, uint64_t t2); // NOLINT
|
||||||
}
|
}
|
||||||
|
|
||||||
#endif
|
#endif // SRC_MYNTEYE_API_CAMODOCAL_INCLUDE_CAMODOCAL_GPL_GPL_H_
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,18 @@
|
||||||
#include "camodocal/camera_models/Camera.h"
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
#include <opencv2/calib3d/calib3d.hpp>
|
#include <opencv2/calib3d/calib3d.hpp>
|
||||||
|
#include "camodocal/camera_models/Camera.h"
|
||||||
|
|
||||||
namespace camodocal {
|
namespace camodocal {
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,28 @@
|
||||||
#include "camodocal/camera_models/EquidistantCamera.h"
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
#include <iomanip>
|
||||||
|
#include <iostream>
|
||||||
#include <cstdint>
|
#include <cstdint>
|
||||||
#include <cmath>
|
#include <cmath>
|
||||||
#include <cstdio>
|
#include <cstdio>
|
||||||
|
#include "camodocal/camera_models/EquidistantCamera.h"
|
||||||
#include "eigen3/Eigen/Dense"
|
#include "eigen3/Eigen/Dense"
|
||||||
#include <iomanip>
|
|
||||||
#include <iostream>
|
|
||||||
#include <opencv2/calib3d/calib3d.hpp>
|
#include <opencv2/calib3d/calib3d.hpp>
|
||||||
#include <opencv2/core/eigen.hpp>
|
#include <opencv2/core/eigen.hpp>
|
||||||
#include <opencv2/imgproc/imgproc.hpp>
|
#include <opencv2/imgproc/imgproc.hpp>
|
||||||
|
|
||||||
#include "camodocal/gpl/gpl.h"
|
#include "camodocal/gpl/gpl.h"
|
||||||
|
// #include "mynteye/logger.h"
|
||||||
|
|
||||||
namespace camodocal {
|
namespace camodocal {
|
||||||
#define PI M_PI
|
#define PI M_PI
|
||||||
|
@ -19,13 +32,20 @@ float ApproxAtan2(float y, float x)
|
||||||
const float n1 = 0.97239411f;
|
const float n1 = 0.97239411f;
|
||||||
const float n2 = -0.19194795f;
|
const float n2 = -0.19194795f;
|
||||||
float result = 0.0f;
|
float result = 0.0f;
|
||||||
if (x != 0.0f)
|
if (x != 0.0f) {
|
||||||
{
|
const union {
|
||||||
const union { float flVal; std::uint32_t nVal; } tYSign = { y };
|
float flVal;
|
||||||
const union { float flVal; std::uint32_t nVal; } tXSign = { x };
|
std::uint32_t nVal;
|
||||||
if (fabsf(x) >= fabsf(y))
|
} tYSign = { y };
|
||||||
{
|
const union {
|
||||||
union { float flVal; std::uint32_t nVal; } tOffset = { PI };
|
float flVal;
|
||||||
|
std::uint32_t nVal;
|
||||||
|
} tXSign = { x };
|
||||||
|
if (fabsf(x) >= fabsf(y)) {
|
||||||
|
union {
|
||||||
|
float flVal;
|
||||||
|
std::uint32_t nVal;
|
||||||
|
} tOffset = { PI };
|
||||||
// Add or subtract PI based on y's sign.
|
// Add or subtract PI based on y's sign.
|
||||||
tOffset.nVal |= tYSign.nVal & 0x80000000u;
|
tOffset.nVal |= tYSign.nVal & 0x80000000u;
|
||||||
// No offset if x is positive, so multiply by 0 or based on x's sign.
|
// No offset if x is positive, so multiply by 0 or based on x's sign.
|
||||||
|
@ -33,23 +53,20 @@ float ApproxAtan2(float y, float x)
|
||||||
result = tOffset.flVal;
|
result = tOffset.flVal;
|
||||||
const float z = y / x;
|
const float z = y / x;
|
||||||
result += (n1 + n2 * z * z) * z;
|
result += (n1 + n2 * z * z) * z;
|
||||||
}
|
} else { // Use atan(y/x) = pi/2 - atan(x/y) if |y/x| > 1. n
|
||||||
else // Use atan(y/x) = pi/2 - atan(x/y) if |y/x| > 1.
|
union {
|
||||||
{
|
float flVal;
|
||||||
union { float flVal; std::uint32_t nVal; } tOffset = { PI_2 };
|
std::uint32_t nVal;
|
||||||
|
} tOffset = { PI_2 };
|
||||||
// Add or subtract PI/2 based on y's sign.
|
// Add or subtract PI/2 based on y's sign.
|
||||||
tOffset.nVal |= tYSign.nVal & 0x80000000u;
|
tOffset.nVal |= tYSign.nVal & 0x80000000u;
|
||||||
result = tOffset.flVal;
|
result = tOffset.flVal;
|
||||||
const float z = x / y;
|
const float z = x / y;
|
||||||
result -= (n1 + n2 * z * z) * z;
|
result -= (n1 + n2 * z * z) * z;
|
||||||
}
|
}
|
||||||
}
|
} else if (y > 0.0f) {
|
||||||
else if (y > 0.0f)
|
|
||||||
{
|
|
||||||
result = PI_2;
|
result = PI_2;
|
||||||
}
|
} else if (y < 0.0f) {
|
||||||
else if (y < 0.0f)
|
|
||||||
{
|
|
||||||
result = -PI_2;
|
result = -PI_2;
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
|
@ -143,60 +160,6 @@ double EquidistantCamera::Parameters::v0(void) const {
|
||||||
return m_v0;
|
return m_v0;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool EquidistantCamera::Parameters::readFromYamlFile(
|
|
||||||
const std::string &filename) {
|
|
||||||
cv::FileStorage fs(filename, cv::FileStorage::READ);
|
|
||||||
|
|
||||||
if (!fs.isOpened()) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!fs["model_type"].isNone()) {
|
|
||||||
std::string sModelType;
|
|
||||||
fs["model_type"] >> sModelType;
|
|
||||||
|
|
||||||
if (sModelType.compare("KANNALA_BRANDT") != 0) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
m_modelType = KANNALA_BRANDT;
|
|
||||||
fs["camera_name"] >> m_cameraName;
|
|
||||||
m_imageWidth = static_cast<int>(fs["image_width"]);
|
|
||||||
m_imageHeight = static_cast<int>(fs["image_height"]);
|
|
||||||
|
|
||||||
cv::FileNode n = fs["projection_parameters"];
|
|
||||||
m_k2 = static_cast<double>(n["k2"]);
|
|
||||||
m_k3 = static_cast<double>(n["k3"]);
|
|
||||||
m_k4 = static_cast<double>(n["k4"]);
|
|
||||||
m_k5 = static_cast<double>(n["k5"]);
|
|
||||||
m_mu = static_cast<double>(n["mu"]);
|
|
||||||
m_mv = static_cast<double>(n["mv"]);
|
|
||||||
m_u0 = static_cast<double>(n["u0"]);
|
|
||||||
m_v0 = static_cast<double>(n["v0"]);
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
void EquidistantCamera::Parameters::writeToYamlFile(
|
|
||||||
const std::string &filename) const {
|
|
||||||
cv::FileStorage fs(filename, cv::FileStorage::WRITE);
|
|
||||||
|
|
||||||
fs << "model_type"
|
|
||||||
<< "KANNALA_BRANDT";
|
|
||||||
fs << "camera_name" << m_cameraName;
|
|
||||||
fs << "image_width" << m_imageWidth;
|
|
||||||
fs << "image_height" << m_imageHeight;
|
|
||||||
|
|
||||||
// projection: k2, k3, k4, k5, mu, mv, u0, v0
|
|
||||||
fs << "projection_parameters";
|
|
||||||
fs << "{"
|
|
||||||
<< "k2" << m_k2 << "k3" << m_k3 << "k4" << m_k4 << "k5" << m_k5 << "mu"
|
|
||||||
<< m_mu << "mv" << m_mv << "u0" << m_u0 << "v0" << m_v0 << "}";
|
|
||||||
|
|
||||||
fs.release();
|
|
||||||
}
|
|
||||||
|
|
||||||
EquidistantCamera::Parameters &EquidistantCamera::Parameters::operator=(
|
EquidistantCamera::Parameters &EquidistantCamera::Parameters::operator=(
|
||||||
const EquidistantCamera::Parameters &other) {
|
const EquidistantCamera::Parameters &other) {
|
||||||
if (this != &other) {
|
if (this != &other) {
|
||||||
|
@ -313,7 +276,7 @@ void EquidistantCamera::estimateIntrinsics(
|
||||||
double f0 = 0.0;
|
double f0 = 0.0;
|
||||||
for (size_t i = 0; i < imagePoints.size(); ++i) {
|
for (size_t i = 0; i < imagePoints.size(); ++i) {
|
||||||
std::vector<Eigen::Vector2d> center(boardSize.height);
|
std::vector<Eigen::Vector2d> center(boardSize.height);
|
||||||
double radius[boardSize.height];
|
double radius[boardSize.height]; // NOLINT
|
||||||
for (int r = 0; r < boardSize.height; ++r) {
|
for (int r = 0; r < boardSize.height; ++r) {
|
||||||
std::vector<cv::Point2d> circle;
|
std::vector<cv::Point2d> circle;
|
||||||
for (int c = 0; c < boardSize.width; ++c) {
|
for (int c = 0; c < boardSize.width; ++c) {
|
||||||
|
@ -373,17 +336,6 @@ void EquidistantCamera::estimateIntrinsics(
|
||||||
setParameters(params);
|
setParameters(params);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* \brief Lifts a point from the image plane to the unit sphere
|
|
||||||
*
|
|
||||||
* \param p image coordinates
|
|
||||||
* \param P coordinates of the point on the sphere
|
|
||||||
*/
|
|
||||||
void EquidistantCamera::liftSphere(
|
|
||||||
const Eigen::Vector2d &p, Eigen::Vector3d &P) const {
|
|
||||||
liftProjective(p, P);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* \brief Lifts a point from the image plane to its projective ray
|
* \brief Lifts a point from the image plane to its projective ray
|
||||||
*
|
*
|
||||||
|
@ -453,33 +405,6 @@ void EquidistantCamera::spaceToPlane(
|
||||||
mParameters.mv() * p_u(1) + mParameters.v0();
|
mParameters.mv() * p_u(1) + mParameters.v0();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* \brief Projects an undistorted 2D point p_u to the image plane
|
|
||||||
*
|
|
||||||
* \param p_u 2D point coordinates
|
|
||||||
* \return image point coordinates
|
|
||||||
*/
|
|
||||||
void EquidistantCamera::undistToPlane(
|
|
||||||
const Eigen::Vector2d &p_u, Eigen::Vector2d &p) const {
|
|
||||||
// Eigen::Vector2d p_d;
|
|
||||||
//
|
|
||||||
// if (m_noDistortion)
|
|
||||||
// {
|
|
||||||
// p_d = p_u;
|
|
||||||
// }
|
|
||||||
// else
|
|
||||||
// {
|
|
||||||
// // Apply distortion
|
|
||||||
// Eigen::Vector2d d_u;
|
|
||||||
// distortion(p_u, d_u);
|
|
||||||
// p_d = p_u + d_u;
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// // Apply generalised projection matrix
|
|
||||||
// p << mParameters.gamma1() * p_d(0) + mParameters.u0(),
|
|
||||||
// mParameters.gamma2() * p_d(1) + mParameters.v0();
|
|
||||||
}
|
|
||||||
|
|
||||||
void EquidistantCamera::initUndistortMap(
|
void EquidistantCamera::initUndistortMap(
|
||||||
cv::Mat &map1, cv::Mat &map2, double fScale) const {
|
cv::Mat &map1, cv::Mat &map2, double fScale) const {
|
||||||
cv::Size imageSize(mParameters.imageWidth(), mParameters.imageHeight());
|
cv::Size imageSize(mParameters.imageWidth(), mParameters.imageHeight());
|
||||||
|
@ -582,7 +507,7 @@ void EquidistantCamera::setParameters(
|
||||||
|
|
||||||
void EquidistantCamera::readParameters(
|
void EquidistantCamera::readParameters(
|
||||||
const std::vector<double> ¶meterVec) {
|
const std::vector<double> ¶meterVec) {
|
||||||
if (parameterVec.size() != parameterCount()) {
|
if (parameterVec.size() != static_cast<unsigned int>(parameterCount())) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -613,11 +538,6 @@ void EquidistantCamera::writeParameters(
|
||||||
parameterVec.at(7) = mParameters.v0();
|
parameterVec.at(7) = mParameters.v0();
|
||||||
}
|
}
|
||||||
|
|
||||||
void EquidistantCamera::writeParametersToYamlFile(
|
|
||||||
const std::string &filename) const {
|
|
||||||
mParameters.writeToYamlFile(filename);
|
|
||||||
}
|
|
||||||
|
|
||||||
std::string EquidistantCamera::parametersToString(void) const {
|
std::string EquidistantCamera::parametersToString(void) const {
|
||||||
std::ostringstream oss;
|
std::ostringstream oss;
|
||||||
oss << mParameters;
|
oss << mParameters;
|
||||||
|
@ -730,4 +650,4 @@ void EquidistantCamera::backprojectSymmetric(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
} // namespace camodocal
|
||||||
|
|
|
@ -1,3 +1,16 @@
|
||||||
|
// Copyright 2018 Slightech Co., Ltd. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
#include "camodocal/gpl/gpl.h"
|
#include "camodocal/gpl/gpl.h"
|
||||||
|
|
||||||
#include <set>
|
#include <set>
|
||||||
|
@ -112,15 +125,15 @@ int clock_gettime(int X, struct timespec *tp) {
|
||||||
if (usePerformanceCounter) {
|
if (usePerformanceCounter) {
|
||||||
QueryPerformanceCounter(&offset);
|
QueryPerformanceCounter(&offset);
|
||||||
frequencyToMicroseconds =
|
frequencyToMicroseconds =
|
||||||
(double)performanceFrequency.QuadPart / 1000000.;
|
static_cast<double>(performanceFrequency.QuadPart / 1000000.);
|
||||||
} else {
|
} else {
|
||||||
offset = getFILETIMEoffset();
|
offset = getFILETIMEoffset();
|
||||||
frequencyToMicroseconds = 10.;
|
frequencyToMicroseconds = 10.;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (usePerformanceCounter)
|
if (usePerformanceCounter) {
|
||||||
QueryPerformanceCounter(&t);
|
QueryPerformanceCounter(&t);
|
||||||
else {
|
} else {
|
||||||
GetSystemTimeAsFileTime(&f);
|
GetSystemTimeAsFileTime(&f);
|
||||||
t.QuadPart = f.dwHighDateTime;
|
t.QuadPart = f.dwHighDateTime;
|
||||||
t.QuadPart <<= 32;
|
t.QuadPart <<= 32;
|
||||||
|
@ -128,7 +141,7 @@ int clock_gettime(int X, struct timespec *tp) {
|
||||||
}
|
}
|
||||||
|
|
||||||
t.QuadPart -= offset.QuadPart;
|
t.QuadPart -= offset.QuadPart;
|
||||||
microseconds = (double)t.QuadPart / frequencyToMicroseconds;
|
microseconds = static_cast<double>(t.QuadPart / frequencyToMicroseconds);
|
||||||
t.QuadPart = microseconds;
|
t.QuadPart = microseconds;
|
||||||
tp->tv_sec = t.QuadPart / 1000000;
|
tp->tv_sec = t.QuadPart / 1000000;
|
||||||
tp->tv_nsec = (t.QuadPart % 1000000) * 1000;
|
tp->tv_nsec = (t.QuadPart % 1000000) * 1000;
|
||||||
|
@ -136,7 +149,7 @@ int clock_gettime(int X, struct timespec *tp) {
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
unsigned long long timeInMicroseconds(void) {
|
unsigned long long timeInMicroseconds(void) { // NOLINT
|
||||||
struct timespec tp;
|
struct timespec tp;
|
||||||
#ifdef __APPLE__
|
#ifdef __APPLE__
|
||||||
tp = orwl_gettime();
|
tp = orwl_gettime();
|
||||||
|
@ -271,7 +284,7 @@ float colormapJet[128][3] = {
|
||||||
{0.53125f, 0.0f, 0.0f}, {0.5f, 0.0f, 0.0f}};
|
{0.53125f, 0.0f, 0.0f}, {0.5f, 0.0f, 0.0f}};
|
||||||
|
|
||||||
void colorDepthImage(
|
void colorDepthImage(
|
||||||
cv::Mat &imgDepth, cv::Mat &imgColoredDepth, float minRange,
|
cv::Mat &imgDepth, cv::Mat &imgColoredDepth, float minRange, // NOLINT
|
||||||
float maxRange) {
|
float maxRange) {
|
||||||
imgColoredDepth = cv::Mat::zeros(imgDepth.size(), CV_8UC3);
|
imgColoredDepth = cv::Mat::zeros(imgDepth.size(), CV_8UC3);
|
||||||
|
|
||||||
|
@ -295,7 +308,7 @@ void colorDepthImage(
|
||||||
}
|
}
|
||||||
|
|
||||||
bool colormap(
|
bool colormap(
|
||||||
const std::string &name, unsigned char idx, float &r, float &g, float &b) {
|
const std::string &name, unsigned char idx, float &r, float &g, float &b) { // NOLINT
|
||||||
if (name.compare("jet") == 0) {
|
if (name.compare("jet") == 0) {
|
||||||
float *color = colormapJet[idx];
|
float *color = colormapJet[idx];
|
||||||
|
|
||||||
|
@ -434,8 +447,8 @@ std::vector<cv::Point2i> bresCircle(int x0, int y0, int r) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void fitCircle(
|
void fitCircle(
|
||||||
const std::vector<cv::Point2d> &points, double ¢erX, double ¢erY,
|
const std::vector<cv::Point2d> &points, double ¢erX, double ¢erY, // NOLINT
|
||||||
double &radius) {
|
double &radius) { // NOLINT
|
||||||
// D. Umbach, and K. Jones, A Few Methods for Fitting Circles to Data,
|
// D. Umbach, and K. Jones, A Few Methods for Fitting Circles to Data,
|
||||||
// IEEE Transactions on Instrumentation and Measurement, 2000
|
// IEEE Transactions on Instrumentation and Measurement, 2000
|
||||||
// We use the modified least squares method.
|
// We use the modified least squares method.
|
||||||
|
@ -573,8 +586,8 @@ char UTMLetterDesignator(double latitude) {
|
||||||
}
|
}
|
||||||
|
|
||||||
void LLtoUTM(
|
void LLtoUTM(
|
||||||
double latitude, double longitude, double &utmNorthing, double &utmEasting,
|
double latitude, double longitude, double &utmNorthing, double &utmEasting, // NOLINT
|
||||||
std::string &utmZone) {
|
std::string &utmZone) { // NOLINT
|
||||||
// converts lat/long to UTM coords. Equations from USGS Bulletin 1532
|
// converts lat/long to UTM coords. Equations from USGS Bulletin 1532
|
||||||
// East Longitudes are positive, West longitudes are negative.
|
// East Longitudes are positive, West longitudes are negative.
|
||||||
// North latitudes are positive, South latitudes are negative
|
// North latitudes are positive, South latitudes are negative
|
||||||
|
@ -659,7 +672,7 @@ void LLtoUTM(
|
||||||
|
|
||||||
void UTMtoLL(
|
void UTMtoLL(
|
||||||
double utmNorthing, double utmEasting, const std::string &utmZone,
|
double utmNorthing, double utmEasting, const std::string &utmZone,
|
||||||
double &latitude, double &longitude) {
|
double &latitude, double &longitude) { // NOLINT
|
||||||
// converts UTM coords to lat/long. Equations from USGS Bulletin 1532
|
// converts UTM coords to lat/long. Equations from USGS Bulletin 1532
|
||||||
// East Longitudes are positive, West longitudes are negative.
|
// East Longitudes are positive, West longitudes are negative.
|
||||||
// North latitudes are positive, South latitudes are negative
|
// North latitudes are positive, South latitudes are negative
|
||||||
|
@ -732,22 +745,22 @@ void UTMtoLL(
|
||||||
longitude = LongOrigin + longitude / M_PI * 180.0;
|
longitude = LongOrigin + longitude / M_PI * 180.0;
|
||||||
}
|
}
|
||||||
|
|
||||||
long int timestampDiff(uint64_t t1, uint64_t t2) {
|
long int timestampDiff(uint64_t t1, uint64_t t2) { // NOLINT
|
||||||
if (t2 > t1) {
|
if (t2 > t1) {
|
||||||
uint64_t d = t2 - t1;
|
uint64_t d = t2 - t1;
|
||||||
|
|
||||||
if (d > std::numeric_limits<long int>::max()) {
|
if (d > std::numeric_limits<long int>::max()) { // NOLINT
|
||||||
return std::numeric_limits<long int>::max();
|
return std::numeric_limits<long int>::max(); // NOLINT
|
||||||
} else {
|
} else {
|
||||||
return d;
|
return d;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
uint64_t d = t1 - t2;
|
uint64_t d = t1 - t2;
|
||||||
|
|
||||||
if (d > std::numeric_limits<long int>::max()) {
|
if (d > std::numeric_limits<long int>::max()) { // NOLINT
|
||||||
return std::numeric_limits<long int>::min();
|
return std::numeric_limits<long int>::min(); // NOLINT
|
||||||
} else {
|
} else {
|
||||||
return -static_cast<long int>(d);
|
return -static_cast<long int>(d); // NOLINT
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,7 +46,7 @@ DisparityProcessor::DisparityProcessor(DisparityComputingMethod type,
|
||||||
100, // speckleWindowSize
|
100, // speckleWindowSize
|
||||||
32, // speckleRange
|
32, // speckleRange
|
||||||
false)); // fullDP
|
false)); // fullDP
|
||||||
LOG(ERROR) << "not supported in opencv 2.x";
|
LOG(ERROR) << "BM not supported in opencv 2.x, use sgbm";
|
||||||
// int bmWinSize = 3;
|
// int bmWinSize = 3;
|
||||||
// // StereoBM
|
// // StereoBM
|
||||||
// // https://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html#stereobm-stereobm
|
// // https://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html#stereobm-stereobm
|
||||||
|
@ -130,7 +130,8 @@ bool DisparityProcessor::OnProcess(
|
||||||
if (type_ == DisparityComputingMethod::SGBM) {
|
if (type_ == DisparityComputingMethod::SGBM) {
|
||||||
(*sgbm_matcher)(input->first, input->second, disparity);
|
(*sgbm_matcher)(input->first, input->second, disparity);
|
||||||
} else if (type_ == DisparityComputingMethod::BM) {
|
} else if (type_ == DisparityComputingMethod::BM) {
|
||||||
LOG(ERROR) << "not supported in opencv 2.x";
|
// LOG(ERROR) << "not supported in opencv 2.x";
|
||||||
|
(*sgbm_matcher)(input->first, input->second, disparity);
|
||||||
// cv::Mat tmp1, tmp2;
|
// cv::Mat tmp1, tmp2;
|
||||||
// cv::cvtColor(input->first, tmp1, CV_RGB2GRAY);
|
// cv::cvtColor(input->first, tmp1, CV_RGB2GRAY);
|
||||||
// cv::cvtColor(input->second, tmp2, CV_RGB2GRAY);
|
// cv::cvtColor(input->second, tmp2, CV_RGB2GRAY);
|
||||||
|
@ -154,8 +155,8 @@ bool DisparityProcessor::OnProcess(
|
||||||
tmp2 = input->second;
|
tmp2 = input->second;
|
||||||
} else if (input->first.channels() >= 3) {
|
} else if (input->first.channels() >= 3) {
|
||||||
// s210
|
// s210
|
||||||
cv::cvtColor(input->first, tmp1, CV_RGB2GRAY);
|
cv::cvtColor(input->first, tmp1, cv::COLOR_RGB2GRAY);
|
||||||
cv::cvtColor(input->second, tmp2, CV_RGB2GRAY);
|
cv::cvtColor(input->second, tmp2, cv::COLOR_RGB2GRAY);
|
||||||
}
|
}
|
||||||
bm_matcher->compute(tmp1, tmp2, disparity);
|
bm_matcher->compute(tmp1, tmp2, disparity);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -56,6 +56,11 @@
|
||||||
|
|
||||||
<arg name="request_index" default="$(arg index_s2_2)" />
|
<arg name="request_index" default="$(arg index_s2_2)" />
|
||||||
|
|
||||||
|
<!-- disparity computing method type -->
|
||||||
|
<arg name="sgbm" default="0" />
|
||||||
|
<arg name="bm" default="1" />
|
||||||
|
<arg name="disparity_computing_method" default="$(arg bm)" />
|
||||||
|
|
||||||
<arg name="enable_left_rect" default="false" />
|
<arg name="enable_left_rect" default="false" />
|
||||||
<arg name="enable_right_rect" default="false" />
|
<arg name="enable_right_rect" default="false" />
|
||||||
<arg name="enable_disparity" default="false" />
|
<arg name="enable_disparity" default="false" />
|
||||||
|
@ -242,6 +247,8 @@
|
||||||
|
|
||||||
<param name="gravity" value="$(arg gravity)" />
|
<param name="gravity" value="$(arg gravity)" />
|
||||||
|
|
||||||
|
<param name="disparity_computing_method" value="$(arg disparity_computing_method)" />
|
||||||
|
|
||||||
<!-- stream toggles -->
|
<!-- stream toggles -->
|
||||||
|
|
||||||
<param name="enable_left_rect" value="$(arg enable_left_rect)" />
|
<param name="enable_left_rect" value="$(arg enable_left_rect)" />
|
||||||
|
|
3653
wrappers/ros/src/mynt_eye_ros_wrapper/src/configuru.hpp
Normal file
3653
wrappers/ros/src/mynt_eye_ros_wrapper/src/configuru.hpp
Normal file
File diff suppressed because it is too large
Load Diff
|
@ -37,6 +37,9 @@
|
||||||
#include "mynteye/api/api.h"
|
#include "mynteye/api/api.h"
|
||||||
#include "mynteye/device/context.h"
|
#include "mynteye/device/context.h"
|
||||||
#include "mynteye/device/device.h"
|
#include "mynteye/device/device.h"
|
||||||
|
#define CONFIGURU_IMPLEMENTATION 1
|
||||||
|
#include "configuru.hpp"
|
||||||
|
using namespace configuru; // NOLINT
|
||||||
|
|
||||||
#define FULL_PRECISION \
|
#define FULL_PRECISION \
|
||||||
std::fixed << std::setprecision(std::numeric_limits<double>::max_digits10)
|
std::fixed << std::setprecision(std::numeric_limits<double>::max_digits10)
|
||||||
|
@ -226,6 +229,12 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
gravity_ = 9.8;
|
gravity_ = 9.8;
|
||||||
private_nh_.getParam("gravity", gravity_);
|
private_nh_.getParam("gravity", gravity_);
|
||||||
|
|
||||||
|
int tmp_disparity_type_ = 0;
|
||||||
|
disparity_type_ = DisparityComputingMethod::BM;
|
||||||
|
private_nh_.getParam("disparity_computing_method", tmp_disparity_type_);
|
||||||
|
disparity_type_ = (DisparityComputingMethod)tmp_disparity_type_;
|
||||||
|
api_->SetDisparityComputingMethodType(disparity_type_);
|
||||||
|
|
||||||
// device options of standard210a
|
// device options of standard210a
|
||||||
if (model_ == Model::STANDARD210A) {
|
if (model_ == Model::STANDARD210A) {
|
||||||
option_names_ = {
|
option_names_ = {
|
||||||
|
@ -398,6 +407,115 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
case Request::NOMINAL_BASELINE:
|
case Request::NOMINAL_BASELINE:
|
||||||
res.value = api_->GetInfo(Info::NOMINAL_BASELINE);
|
res.value = api_->GetInfo(Info::NOMINAL_BASELINE);
|
||||||
break;
|
break;
|
||||||
|
case Request::IMG_INTRINSICS:
|
||||||
|
{
|
||||||
|
auto intri_left = api_->GetIntrinsicsBase(Stream::LEFT);
|
||||||
|
auto calib_model = intri_left->calib_model();
|
||||||
|
if (calib_model == CalibrationModel::PINHOLE) {
|
||||||
|
auto intri_left =
|
||||||
|
api_->GetIntrinsics<IntrinsicsPinhole>(Stream::LEFT);
|
||||||
|
auto intri_right =
|
||||||
|
api_->GetIntrinsics<IntrinsicsPinhole>(Stream::RIGHT);
|
||||||
|
Config intrinsics{
|
||||||
|
{"calib_model", "pinhole"},
|
||||||
|
{"left", {
|
||||||
|
{"width", intri_left.width},
|
||||||
|
{"height", intri_left.height},
|
||||||
|
{"fx", intri_left.fx},
|
||||||
|
{"fy", intri_left.fy},
|
||||||
|
{"cx", intri_left.cx},
|
||||||
|
{"cy", intri_left.cy},
|
||||||
|
{"model", intri_left.model},
|
||||||
|
{"coeffs", Config::array(
|
||||||
|
{intri_left.coeffs[0],
|
||||||
|
intri_left.coeffs[1],
|
||||||
|
intri_left.coeffs[2],
|
||||||
|
intri_left.coeffs[3],
|
||||||
|
intri_left.coeffs[4]})}
|
||||||
|
}},
|
||||||
|
{"right", {
|
||||||
|
{"width", intri_right.width},
|
||||||
|
{"height", intri_right.height},
|
||||||
|
{"fx", intri_right.fx},
|
||||||
|
{"fy", intri_right.fy},
|
||||||
|
{"cx", intri_right.cx},
|
||||||
|
{"cy", intri_right.cy},
|
||||||
|
{"model", intri_right.model},
|
||||||
|
{"coeffs", Config::array(
|
||||||
|
{intri_right.coeffs[0],
|
||||||
|
intri_right.coeffs[1],
|
||||||
|
intri_right.coeffs[2],
|
||||||
|
intri_right.coeffs[3],
|
||||||
|
intri_right.coeffs[4]})}
|
||||||
|
}}
|
||||||
|
};
|
||||||
|
std::string json = dump_string(intrinsics, configuru::JSON);
|
||||||
|
res.value = json;
|
||||||
|
} else if (calib_model == CalibrationModel::KANNALA_BRANDT) {
|
||||||
|
auto intri_left =
|
||||||
|
api_->GetIntrinsics<IntrinsicsEquidistant>(Stream::LEFT);
|
||||||
|
auto intri_right =
|
||||||
|
api_->GetIntrinsics<IntrinsicsEquidistant>(Stream::RIGHT);
|
||||||
|
Config intrinsics{
|
||||||
|
{"calib_model", "kannala_brandt"},
|
||||||
|
{"left", {
|
||||||
|
{"width", intri_left.width},
|
||||||
|
{"height", intri_left.height},
|
||||||
|
{"coeffs", Config::array(
|
||||||
|
{intri_left.coeffs[0],
|
||||||
|
intri_left.coeffs[1],
|
||||||
|
intri_left.coeffs[2],
|
||||||
|
intri_left.coeffs[3],
|
||||||
|
intri_left.coeffs[4],
|
||||||
|
intri_left.coeffs[5],
|
||||||
|
intri_left.coeffs[6],
|
||||||
|
intri_left.coeffs[7]})
|
||||||
|
}
|
||||||
|
}},
|
||||||
|
{"right", {
|
||||||
|
{"width", intri_right.width},
|
||||||
|
{"height", intri_right.height},
|
||||||
|
{"coeffs", Config::array(
|
||||||
|
{intri_right.coeffs[0],
|
||||||
|
intri_right.coeffs[1],
|
||||||
|
intri_right.coeffs[2],
|
||||||
|
intri_right.coeffs[3],
|
||||||
|
intri_right.coeffs[4],
|
||||||
|
intri_right.coeffs[5],
|
||||||
|
intri_right.coeffs[6],
|
||||||
|
intri_right.coeffs[7]})
|
||||||
|
}
|
||||||
|
}}
|
||||||
|
};
|
||||||
|
std::string json = dump_string(intrinsics, configuru::JSON);
|
||||||
|
res.value = json;
|
||||||
|
} else {
|
||||||
|
NODELET_INFO_STREAM("INVALID CALIB MODEL:" << calib_model);
|
||||||
|
Config intrinsics{};
|
||||||
|
std::string json = dump_string(intrinsics, configuru::JSON);
|
||||||
|
res.value = json;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case Request::IMG_EXTRINSICS_RTOL:
|
||||||
|
{
|
||||||
|
auto extri = api_->GetExtrinsics(Stream::RIGHT, Stream::LEFT);
|
||||||
|
Config extrinsics{
|
||||||
|
{"rotation", Config::array({extri.rotation[0][0], extri.rotation[0][1], extri.rotation[0][2], // NOLINT
|
||||||
|
extri.rotation[1][0], extri.rotation[1][1], extri.rotation[1][2], // NOLINT
|
||||||
|
extri.rotation[2][0], extri.rotation[2][1], extri.rotation[2][2]})},// NOLINT
|
||||||
|
{"translation", Config::array({extri.translation[0], extri.translation[1], extri.translation[2]})} // NOLINT
|
||||||
|
};
|
||||||
|
std::string json = dump_string(extrinsics, configuru::JSON);
|
||||||
|
res.value = json;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case Request::IMU_INTRINSICS:
|
||||||
|
res.value = "TODO";
|
||||||
|
break;
|
||||||
|
case Request::IMU_EXTRINSICS:
|
||||||
|
res.value = "TODO";
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
NODELET_WARN_STREAM("Info of key " << req.key << " not exist");
|
NODELET_WARN_STREAM("Info of key " << req.key << " not exist");
|
||||||
return false;
|
return false;
|
||||||
|
@ -783,7 +901,72 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
pub_imu_.publish(msg);
|
pub_imu_.publish(msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void timestampAlign() {
|
||||||
|
static bool get_first_acc = false;
|
||||||
|
static bool get_first_gyro = false;
|
||||||
|
static bool has_gyro = false;
|
||||||
|
static ImuData acc;
|
||||||
|
static ImuData gyro;
|
||||||
|
|
||||||
|
if (!get_first_acc && imu_accel_ != nullptr) {
|
||||||
|
acc = *imu_accel_;
|
||||||
|
imu_accel_ = nullptr;
|
||||||
|
get_first_acc = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!get_first_gyro && imu_gyro_ != nullptr) {
|
||||||
|
gyro = *imu_gyro_;
|
||||||
|
imu_gyro_ = nullptr;
|
||||||
|
get_first_gyro = true;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (imu_accel_ != nullptr) {
|
||||||
|
if (!has_gyro) {
|
||||||
|
acc = *imu_accel_;
|
||||||
|
imu_accel_ = nullptr;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (acc.timestamp <= gyro.timestamp) {
|
||||||
|
ImuData acc_temp;
|
||||||
|
acc_temp = *imu_accel_;
|
||||||
|
acc_temp.timestamp = gyro.timestamp;
|
||||||
|
|
||||||
|
double k = static_cast<double>(imu_accel_->timestamp - acc.timestamp);
|
||||||
|
k = static_cast<double>(gyro.timestamp - acc.timestamp) / k;
|
||||||
|
|
||||||
|
acc_temp.accel[0] = acc.accel[0] +
|
||||||
|
(imu_accel_->accel[0] - acc.accel[0]) * k;
|
||||||
|
acc_temp.accel[1] = acc.accel[1] +
|
||||||
|
(imu_accel_->accel[1] - acc.accel[1]) * k;
|
||||||
|
acc_temp.accel[2] = acc.accel[2] +
|
||||||
|
(imu_accel_->accel[2] - acc.accel[2]) * k;
|
||||||
|
|
||||||
|
acc = *imu_accel_;
|
||||||
|
*imu_accel_ = acc_temp;
|
||||||
|
imu_gyro_.reset(new ImuData(gyro));
|
||||||
|
has_gyro = false;
|
||||||
|
return;
|
||||||
|
} else {
|
||||||
|
acc = *imu_accel_;
|
||||||
|
imu_accel_ = nullptr;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (imu_gyro_ != nullptr) {
|
||||||
|
has_gyro = true;
|
||||||
|
gyro = *imu_gyro_;
|
||||||
|
imu_gyro_ = nullptr;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void publishImuBySync(ros::Time stamp) {
|
void publishImuBySync(ros::Time stamp) {
|
||||||
|
timestampAlign();
|
||||||
|
|
||||||
if (imu_accel_ == nullptr || imu_gyro_ == nullptr) {
|
if (imu_accel_ == nullptr || imu_gyro_ == nullptr) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -1317,6 +1500,8 @@ class ROSWrapperNodelet : public nodelet::Nodelet {
|
||||||
|
|
||||||
double gravity_;
|
double gravity_;
|
||||||
|
|
||||||
|
// disparity type
|
||||||
|
DisparityComputingMethod disparity_type_;
|
||||||
// api
|
// api
|
||||||
|
|
||||||
std::shared_ptr<API> api_;
|
std::shared_ptr<API> api_;
|
||||||
|
|
|
@ -6,7 +6,10 @@ uint32 SPEC_VERSION=4
|
||||||
uint32 LENS_TYPE=5
|
uint32 LENS_TYPE=5
|
||||||
uint32 IMU_TYPE=6
|
uint32 IMU_TYPE=6
|
||||||
uint32 NOMINAL_BASELINE=7
|
uint32 NOMINAL_BASELINE=7
|
||||||
|
uint32 IMG_INTRINSICS=8
|
||||||
|
uint32 IMG_EXTRINSICS_RTOL=9
|
||||||
|
uint32 IMU_INTRINSICS=10
|
||||||
|
uint32 IMU_EXTRINSICS=11
|
||||||
uint32 key
|
uint32 key
|
||||||
---
|
---
|
||||||
string value
|
string value
|
||||||
|
|
Loading…
Reference in New Issue
Block a user