diff --git a/CMakeLists.txt b/CMakeLists.txt
index b3a0284b..176ed10e 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -35,9 +35,6 @@ checkPackage("OPENCV_IMGPROC" "OpenCV imgproc not found, install it from the tut
find_package(CUDA)
checkPackage("CUDA" "CUDA not found, install it from:\n https://developer.nvidia.com/cuda-downloads")
-find_package(PCL)
-checkPackage("PCL" "PCL not found, try to install it with:\n sudo apt-get install libpcl1 ros-$(rosversion -d)-pcl-ros")
-
find_package(OpenMP)
checkPackage("OpenMP" "OpenMP not found, please install it to improve performances: 'sudo apt install libomp-dev'")
if (OPENMP_FOUND)
@@ -54,7 +51,6 @@ find_package(catkin COMPONENTS
stereo_msgs
dynamic_reconfigure
tf2_ros
- pcl_conversions
nodelet
tf2_geometry_msgs
message_generation
@@ -67,7 +63,6 @@ checkPackage("sensor_msgs" "")
checkPackage("stereo_msgs" "")
checkPackage("dynamic_reconfigure" "")
checkPackage("tf2_ros" "")
-checkPackage("pcl_conversions" "")
checkPackage("nodelet" "")
checkPackage("tf2_geometry_msgs" "")
checkPackage("message_generation" "")
@@ -95,14 +90,14 @@ catkin_package(
dynamic_reconfigure
tf2_ros
tf2_geometry_msgs
- pcl_conversions
message_runtime
)
###############################################################################
# SOURCES
-set(TOOLS_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/tools/src/sl_tools.cpp)
+set(TOOLS_SRC
+ ${CMAKE_CURRENT_SOURCE_DIR}/src/tools/src/sl_tools.cpp )
set(NODE_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/zed_wrapper_node.cpp)
set(NODELET_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/nodelet/src/zed_wrapper_nodelet.cpp)
@@ -117,7 +112,6 @@ include_directories(
${CUDA_INCLUDE_DIRS}
${ZED_INCLUDE_DIRS}
${OpenCV_INCLUDE_DIRS}
- ${PCL_INCLUDE_DIRS}
${CMAKE_CURRENT_SOURCE_DIR}/src/tools/include
${CMAKE_CURRENT_SOURCE_DIR}/src/nodelet/include
)
@@ -125,7 +119,6 @@ include_directories(
link_directories(${ZED_LIBRARY_DIR})
link_directories(${CUDA_LIBRARY_DIRS})
link_directories(${OpenCV_LIBRARY_DIRS})
-link_directories(${PCL_LIBRARY_DIRS})
###############################################################################
@@ -139,13 +132,12 @@ endif()
add_definitions(-std=c++11)
-list(REMOVE_ITEM PCL_LIBRARIES "vtkproj4") ## if using from pcl package (pcl/vtk bug)
set(LINK_LIBRARIES
${catkin_LIBRARIES}
${ZED_LIBRARIES}
${CUDA_LIBRARIES} ${CUDA_NPP_LIBRARIES_ZED}
${OpenCV_LIBS}
- ${PCL_LIBRARIES})
+ )
add_library(ZEDWrapper ${TOOLS_SRC} ${NODELET_SRC})
target_link_libraries(ZEDWrapper ${LINK_LIBRARIES})
diff --git a/README.md b/README.md
index d28f879d..b91b049c 100644
--- a/README.md
+++ b/README.md
@@ -13,7 +13,6 @@ This package lets you use the ZED stereo camera with ROS. It outputs the camera
- Ubuntu 16.04
- [ZED SDK **≥ 2.3**](https://www.stereolabs.com/developers/) and its dependency [CUDA](https://developer.nvidia.com/cuda-downloads)
- [ROS Kinetic](http://wiki.ros.org/kinetic/Installation/Ubuntu)
-- [Point Cloud Library (PCL)](https://github.com/PointCloudLibrary/pcl)
### Build the program
diff --git a/launch/zed_camera.launch b/launch/zed_camera.launch
index 153242e6..0a88818b 100644
--- a/launch/zed_camera.launch
+++ b/launch/zed_camera.launch
@@ -65,6 +65,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
diff --git a/launch/zed_camera_nodelet.launch b/launch/zed_camera_nodelet.launch
index 33ec5688..d6cb8c4d 100644
--- a/launch/zed_camera_nodelet.launch
+++ b/launch/zed_camera_nodelet.launch
@@ -50,6 +50,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
@@ -61,6 +63,9 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+
diff --git a/package.xml b/package.xml
index 09fd218c..9ab9bace 100644
--- a/package.xml
+++ b/package.xml
@@ -20,7 +20,6 @@
opencv3
image_transport
dynamic_reconfigure
- pcl_conversions
nodelet
urdf
diff --git a/rviz/zedm.rviz b/rviz/zedm.rviz
index 69a40a70..7742c0fe 100644
--- a/rviz/zedm.rviz
+++ b/rviz/zedm.rviz
@@ -10,7 +10,7 @@ Panels:
- /DepthCloud1/Occlusion Compensation1
- /Odometry1/Covariance1/Position1
Splitter Ratio: 0.5
- Tree Height: 773
+ Tree Height: 451
- Class: rviz/Selection
Name: Selection
- Class: rviz/Tool Properties
@@ -171,7 +171,7 @@ Visualization Manager:
Use rainbow: true
Value: true
- Class: rviz/Camera
- Enabled: false
+ Enabled: true
Image Rendering: background and overlay
Image Topic: /zed/rgb/image_rect_color
Name: Camera
@@ -179,7 +179,7 @@ Visualization Manager:
Queue Size: 2
Transport Hint: raw
Unreliable: true
- Value: false
+ Value: true
Visibility:
DepthCloud: true
Grid: true
@@ -217,7 +217,7 @@ Visualization Manager:
Size (m): 0.00999999978
Style: Points
Topic: /zed/point_cloud/cloud_registered
- Unreliable: false
+ Unreliable: true
Use Fixed Frame: true
Use rainbow: true
Value: false
@@ -346,7 +346,7 @@ Window Geometry:
Height: 1056
Hide Left Dock: false
Hide Right Dock: false
- QMainWindow State: 000000ff00000000fd00000004000000000000016a00000394fc020000000efb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006100fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000002800000394000000d700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000002400000010f0000000000000000fb0000000a0049006d006100670065000000028b000000c40000000000000000fb0000000a0049006d006100670065010000028a000000c50000000000000000fb0000000a0049006d00610067006500000002b30000009c0000000000000000fb0000000a0049006d006100670065010000028b000000c40000000000000000fb0000000c00430061006d00650072006100000002800000013c0000001600ffffff000000010000010f00000394fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000002800000394000000ad00fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073f00000040fc0100000002fb0000000800540069006d006501000000000000073f0000030000fffffffb0000000800540069006d00650100000000000004500000000000000000000004ba0000039400000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
+ QMainWindow State: 000000ff00000000fd00000004000000000000016a00000394fc020000000efb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006100fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000002800000252000000d700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000002400000010f0000000000000000fb0000000a0049006d006100670065000000028b000000c40000000000000000fb0000000a0049006d006100670065010000028a000000c50000000000000000fb0000000a0049006d00610067006500000002b30000009c0000000000000000fb0000000a0049006d006100670065010000028b000000c40000000000000000fb0000000c00430061006d00650072006101000002800000013c0000001600ffffff000000010000010f00000394fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000002800000394000000ad00fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073f00000040fc0100000002fb0000000800540069006d006501000000000000073f0000030000fffffffb0000000800540069006d00650100000000000004500000000000000000000004ba0000039400000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
Selection:
collapsed: false
Time:
diff --git a/src/nodelet/include/zed_wrapper_nodelet.hpp b/src/nodelet/include/zed_wrapper_nodelet.hpp
index 69769453..e1f987ff 100644
--- a/src/nodelet/include/zed_wrapper_nodelet.hpp
+++ b/src/nodelet/include/zed_wrapper_nodelet.hpp
@@ -35,328 +35,340 @@
#include
#include
#include
+#include
#include
#include
#include
#include
-#include
-
#include
#include
+#include
+#include
using namespace std;
namespace zed_wrapper {
-class ZEDWrapperNodelet : public nodelet::Nodelet {
-
-public:
- /* \brief Default constructor
- */
- ZEDWrapperNodelet();
-
- /* \brief \ref ZEDWrapperNodelet destructor
- */
- virtual ~ZEDWrapperNodelet();
-
- /* \brief Image to ros message conversion
- * \param img : the image to publish
- * \param encodingType : the sensor_msgs::image_encodings encoding type
- * \param frameId : the id of the reference frame of the image
- * \param t : the ros::Time to stamp the image
- */
- static sensor_msgs::ImagePtr imageToROSmsg(cv::Mat img, const std::string encodingType, std::string frameId, ros::Time t);
-
-private:
- /* \brief Initialization function called by the Nodelet base class
- */
- virtual void onInit();
-
- /* \brief ZED camera polling thread function
- */
- void device_poll();
-
-protected:
-
- /* \brief Publish the pose of the camera in "Map" frame with a ros Publisher
- * \param poseBaseTransform : Transformation representing the camera pose from odom frame to map frame
- * \param t : the ros::Time to stamp the image
- */
- void publishPose(tf2::Transform poseBaseTransform, ros::Time t);
-
- /* \brief Publish the pose of the camera in "Odom" frame with a ros Publisher
- * \param odom_base_transform : Transformation representing the camera pose from base frame to odom frame
- * \param t : the ros::Time to stamp the image
- */
- void publishOdom(tf2::Transform baseToOdomTransform, ros::Time t);
-
- /* \brief Publish the pose of the camera in "Map" frame as a transformation
- * \param base_transform : Transformation representing the camera pose from odom frame to map frame
- * \param t : the ros::Time to stamp the image
- */
- void publishPoseFrame(tf2::Transform baseTransform, ros::Time t);
-
- /* \brief Publish the odometry of the camera in "Odom" frame as a transformation
- * \param base_transform : Transformation representing the camera pose from base frame to odom frame
- * \param t : the ros::Time to stamp the image
- */
- void publishOdomFrame(tf2::Transform baseToOdomTransform, ros::Time t);
-
- /* \brief Publish the pose of the imu in "Odom" frame as a transformation
- * \param base_transform : Transformation representing the imu pose from base frame to odom frame
- * \param t : the ros::Time to stamp the image
- */
- void publishImuFrame(tf2::Transform baseTransform);
-
- /* \brief Publish a cv::Mat image with a ros Publisher
- * \param img : the image to publish
- * \param pub_img : the publisher object to use (different image publishers exist)
- * \param img_frame_id : the id of the reference frame of the image (different image frames exist)
- * \param t : the ros::Time to stamp the image
- */
- void publishImage(cv::Mat img, image_transport::Publisher &pubImg, string imgFrameId, ros::Time t);
-
- /* \brief Publish a cv::Mat depth image with a ros Publisher
- * \param depth : the depth image to publish
- * \param t : the ros::Time to stamp the depth image
- */
- void publishDepth(cv::Mat depth, ros::Time t);
-
- /* \brief Publish a cv::Mat confidence image with a ros Publisher
- * \param conf : the confidence image to publish
- * \param t : the ros::Time to stamp the depth image
- */
- void publishConf(cv::Mat conf, ros::Time t);
-
- /* \brief Publish a pointCloud with a ros Publisher
- * \param width : the width of the point cloud
- * \param height : the height of the point cloud
- */
- void publishPointCloud(int width, int height);
-
- /* \brief Publish the informations of a camera with a ros Publisher
- * \param cam_info_msg : the information message to publish
- * \param pub_cam_info : the publisher object to use
- * \param t : the ros::Time to stamp the message
- */
- void publishCamInfo(sensor_msgs::CameraInfoPtr camInfoMsg, ros::Publisher pubCamInfo, ros::Time t);
-
- /* \brief Publish a cv::Mat disparity image with a ros Publisher
- * \param disparity : the disparity image to publish
- * \param t : the ros::Time to stamp the depth image
- */
- void publishDisparity(cv::Mat disparity, ros::Time t);
-
- /* \brief Get the information of the ZED cameras and store them in an information message
- * \param zed : the sl::zed::Camera* pointer to an instance
- * \param left_cam_info_msg : the information message to fill with the left camera informations
- * \param right_cam_info_msg : the information message to fill with the right camera informations
- * \param left_frame_id : the id of the reference frame of the left camera
- * \param right_frame_id : the id of the reference frame of the right camera
- */
- void fillCamInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr leftCamInfoMsg, sensor_msgs::CameraInfoPtr rightCamInfoMsg,
- string leftFrameId, string rightFrameId, bool rawParam = false);
-
- /* \brief Callback to handle dynamic reconfigure events in ROS
- */
- void dynamicReconfCallback(zed_wrapper::ZedConfig &config, uint32_t level);
-
- /* \brief Callback to publish IMU raw data with a ROS publisher.
- * \param e : the ros::TimerEvent binded to the callback
- */
- void imuPubCallback(const ros::TimerEvent & e);
-
- /* \brief Service callback to reset_tracking service
- * Tracking pose is reinitialized to the value available in the ROS Param server
- */
- bool on_reset_tracking(zed_wrapper::reset_tracking::Request &req,
- zed_wrapper::reset_tracking::Response &res);
-
- /* \brief Service callback to reset_odometry service
- * Odometry is reset to clear drift and odometry frame gets the latest pose
- * from ZED tracking.
- */
- bool on_reset_odometry(zed_wrapper::reset_odometry::Request &req,
- zed_wrapper::reset_odometry::Response &res);
-
- /* \brief Service callback to set_pose service
- * Tracking pose is set to the new values
- */
- bool on_set_pose(zed_wrapper::set_initial_pose::Request &req,
- zed_wrapper::set_initial_pose::Response &res);
-
- /* \brief Utility to initialize the pose variables
- */
- void set_pose( float xt, float yt, float zt, float rr, float pr, float yr);
-
- /* \bried Start tracking loading the parameters from param server
- */
- void start_tracking();
-
- /* \bried Check if FPS and Resolution chosen by user are correct.
- * Modifies FPS to match correct value.
- */
- void checkResolFps();
-
-private:
- // SDK version
- int verMajor;
- int verMinor;
- int verSubMinor;
-
- // ROS
- ros::NodeHandle nh;
- ros::NodeHandle nhNs;
- boost::shared_ptr devicePollThread;
-
- // Publishers
- image_transport::Publisher pubRgb;
- image_transport::Publisher pubRawRgb;
- image_transport::Publisher pubLeft;
- image_transport::Publisher pubRawLeft;
- image_transport::Publisher pubRight;
- image_transport::Publisher pubRawRight;
- image_transport::Publisher pubDepth;
- image_transport::Publisher pubConfImg;
- ros::Publisher pubConfMap;
- ros::Publisher pubDisparity;
- ros::Publisher pubCloud;
- ros::Publisher pubRgbCamInfo;
- ros::Publisher pubLeftCamInfo;
- ros::Publisher pubRightCamInfo;
- ros::Publisher pubRgbCamInfoRaw;
- ros::Publisher pubLeftCamInfoRaw;
- ros::Publisher pubRightCamInfoRaw;
- ros::Publisher pubDepthCamInfo;
- ros::Publisher pubPose;
- ros::Publisher pubOdom;
- ros::Publisher pubImu;
- ros::Publisher pubImuRaw;
- ros::Timer pubImuTimer;
-
- // Service
- bool trackingActivated;
- ros::ServiceServer srvSetInitPose;
- ros::ServiceServer srvResetOdometry;
- ros::ServiceServer srvResetTracking;
-
- // Camera info
- sensor_msgs::CameraInfoPtr rgbCamInfoMsg;
- sensor_msgs::CameraInfoPtr leftCamInfoMsg;
- sensor_msgs::CameraInfoPtr rightCamInfoMsg;
- sensor_msgs::CameraInfoPtr rgbCamInfoRawMsg;
- sensor_msgs::CameraInfoPtr leftCamInfoRawMsg;
- sensor_msgs::CameraInfoPtr rightCamInfoRawMsg;
- sensor_msgs::CameraInfoPtr depthCamInfoMsg;
-
- // tf
- tf2_ros::TransformBroadcaster transformPoseBroadcaster;
- tf2_ros::TransformBroadcaster transformOdomBroadcaster;
- tf2_ros::TransformBroadcaster transformImuBroadcaster;
-
- std::string rgbFrameId;
- std::string rgbOptFrameId;
-
- std::string depthFrameId;
- std::string depthOptFrameId;
-
- std::string disparityFrameId;
- std::string disparityOptFrameId;
-
- std::string confidenceFrameId;
- std::string confidenceOptFrameId;
-
- std::string cloudFrameId;
-
- std::string mapFrameId;
- std::string odometryFrameId;
- std::string baseFrameId;
- std::string rightCamFrameId;
- std::string rightCamOptFrameId;
- std::string leftCamFrameId;
- std::string leftCamOptFrameId;
- std::string imuFrameId;
-
- // initialization Transform listener
- boost::shared_ptr tfBuffer;
- boost::shared_ptr tfListener;
- bool publishTf;
- bool cameraFlip;
-
- // Launch file parameters
- int resolution;
- int frameRate;
- int quality;
- int sensingMode;
- int gpuId;
- int zedId;
- int depthStabilization;
- std::string odometryDb;
- std::string svoFilepath;
- double imuPubRate;
- bool verbose;
-
- // IMU time
- ros::Time imuTime;
-
- bool poseSmoothing;
- bool spatialMemory;
- bool initOdomWithPose;
-
- //Tracking variables
- sl::Transform initialPoseSl;
- std::vector initialTrackPose;
-
- tf2::Transform odomToMapTransform;
- tf2::Transform baseToOdomTransform;
-
- // zed object
- sl::InitParameters param;
- sl::Camera zed;
- unsigned int serial_number;
- int userCamModel; // Camera model set by ROS Param
- sl::MODEL realCamModel; // Camera model requested to SDK
-
- // flags
- double matResizeFactor;
- int confidence;
- int exposure;
- int gain;
- bool autoExposure;
- bool triggerAutoExposure;
- bool computeDepth;
- bool grabbing = false;
- int openniDepthMode = 0; // 16 bit UC data in mm else 32F in m, for more info http://www.ros.org/reps/rep-0118.html
-
- // Frame and Mat
- int camWidth;
- int camHeight;
- int matWidth;
- int matHeight;
- cv::Mat leftImRGB;
- cv::Mat rightImRGB;
- cv::Mat confImRGB;
- cv::Mat confMapFloat;
-
- // Mutex
- std::mutex dataMutex;
-
- // Point cloud variables
- sl::Mat cloud;
- string pointCloudFrameId = "";
- ros::Time pointCloudTime;
-
- // Dynamic reconfigure
- boost::shared_ptr> server;
-
- // Coordinate Changing indices and signs
- unsigned int xIdx, yIdx, zIdx;
- int xSign, ySign, zSign;
-
-}; // class ZEDROSWrapperNodelet
+ class ZEDWrapperNodelet : public nodelet::Nodelet {
+
+ public:
+ /* \brief Default constructor
+ */
+ ZEDWrapperNodelet();
+
+ /* \brief \ref ZEDWrapperNodelet destructor
+ */
+ virtual ~ZEDWrapperNodelet();
+
+ /* \brief Image to ros message conversion
+ * \param img : the image to publish
+ * \param encodingType : the sensor_msgs::image_encodings encoding type
+ * \param frameId : the id of the reference frame of the image
+ * \param t : the ros::Time to stamp the image
+ */
+ static sensor_msgs::ImagePtr imageToROSmsg(cv::Mat img, const std::string encodingType, std::string frameId, ros::Time t);
+
+ private:
+ /* \brief Initialization function called by the Nodelet base class
+ */
+ virtual void onInit();
+
+ /* \brief ZED camera polling thread function
+ */
+ void device_poll();
+
+ /* \brief Pointcloud publishing function
+ */
+ void pointcloud_thread();
+
+ protected:
+
+ /* \brief Publish the pose of the camera in "Map" frame with a ros Publisher
+ * \param poseBaseTransform : Transformation representing the camera pose from odom frame to map frame
+ * \param t : the ros::Time to stamp the image
+ */
+ void publishPose(tf2::Transform poseBaseTransform, ros::Time t);
+
+ /* \brief Publish the pose of the camera in "Odom" frame with a ros Publisher
+ * \param odom_base_transform : Transformation representing the camera pose from base frame to odom frame
+ * \param t : the ros::Time to stamp the image
+ */
+ void publishOdom(tf2::Transform baseToOdomTransform, ros::Time t);
+
+ /* \brief Publish the pose of the camera in "Map" frame as a transformation
+ * \param base_transform : Transformation representing the camera pose from odom frame to map frame
+ * \param t : the ros::Time to stamp the image
+ */
+ void publishPoseFrame(tf2::Transform baseTransform, ros::Time t);
+
+ /* \brief Publish the odometry of the camera in "Odom" frame as a transformation
+ * \param base_transform : Transformation representing the camera pose from base frame to odom frame
+ * \param t : the ros::Time to stamp the image
+ */
+ void publishOdomFrame(tf2::Transform baseToOdomTransform, ros::Time t);
+
+ /* \brief Publish the pose of the imu in "Odom" frame as a transformation
+ * \param base_transform : Transformation representing the imu pose from base frame to odom frame
+ * \param t : the ros::Time to stamp the image
+ */
+ void publishImuFrame(tf2::Transform baseTransform);
+
+ /* \brief Publish a cv::Mat image with a ros Publisher
+ * \param img : the image to publish
+ * \param pub_img : the publisher object to use (different image publishers exist)
+ * \param img_frame_id : the id of the reference frame of the image (different image frames exist)
+ * \param t : the ros::Time to stamp the image
+ */
+ void publishImage(cv::Mat img, image_transport::Publisher& pubImg, string imgFrameId, ros::Time t);
+
+ /* \brief Publish a cv::Mat depth image with a ros Publisher
+ * \param depth : the depth image to publish
+ * \param t : the ros::Time to stamp the depth image
+ */
+ void publishDepth(cv::Mat depth, ros::Time t);
+
+ /* \brief Publish a cv::Mat confidence image with a ros Publisher
+ * \param conf : the confidence image to publish
+ * \param t : the ros::Time to stamp the depth image
+ */
+ void publishConf(cv::Mat conf, ros::Time t);
+
+ /* \brief Publish a pointCloud with a ros Publisher
+ * \param width : the width of the point cloud
+ * \param height : the height of the point cloud
+ */
+ void publishPointCloud();
+
+ /* \brief Publish the informations of a camera with a ros Publisher
+ * \param cam_info_msg : the information message to publish
+ * \param pub_cam_info : the publisher object to use
+ * \param t : the ros::Time to stamp the message
+ */
+ void publishCamInfo(sensor_msgs::CameraInfoPtr camInfoMsg, ros::Publisher pubCamInfo, ros::Time t);
+
+ /* \brief Publish a cv::Mat disparity image with a ros Publisher
+ * \param disparity : the disparity image to publish
+ * \param t : the ros::Time to stamp the depth image
+ */
+ void publishDisparity(cv::Mat disparity, ros::Time t);
+
+ /* \brief Get the information of the ZED cameras and store them in an information message
+ * \param zed : the sl::zed::Camera* pointer to an instance
+ * \param left_cam_info_msg : the information message to fill with the left camera informations
+ * \param right_cam_info_msg : the information message to fill with the right camera informations
+ * \param left_frame_id : the id of the reference frame of the left camera
+ * \param right_frame_id : the id of the reference frame of the right camera
+ */
+ void fillCamInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr leftCamInfoMsg, sensor_msgs::CameraInfoPtr rightCamInfoMsg,
+ string leftFrameId, string rightFrameId, bool rawParam = false);
+
+ /* \brief Callback to handle dynamic reconfigure events in ROS
+ */
+ void dynamicReconfCallback(zed_wrapper::ZedConfig& config, uint32_t level);
+
+ /* \brief Callback to publish IMU raw data with a ROS publisher.
+ * \param e : the ros::TimerEvent binded to the callback
+ */
+ void imuPubCallback(const ros::TimerEvent& e);
+
+ /* \brief Service callback to reset_tracking service
+ * Tracking pose is reinitialized to the value available in the ROS Param server
+ */
+ bool on_reset_tracking(zed_wrapper::reset_tracking::Request& req,
+ zed_wrapper::reset_tracking::Response& res);
+
+ /* \brief Service callback to reset_odometry service
+ * Odometry is reset to clear drift and odometry frame gets the latest pose
+ * from ZED tracking.
+ */
+ bool on_reset_odometry(zed_wrapper::reset_odometry::Request& req,
+ zed_wrapper::reset_odometry::Response& res);
+
+ /* \brief Service callback to set_pose service
+ * Tracking pose is set to the new values
+ */
+ bool on_set_pose(zed_wrapper::set_initial_pose::Request& req,
+ zed_wrapper::set_initial_pose::Response& res);
+
+ /* \brief Utility to initialize the pose variables
+ */
+ void set_pose(float xt, float yt, float zt, float rr, float pr, float yr);
+
+ /* \bried Start tracking loading the parameters from param server
+ */
+ void start_tracking();
+
+ /* \bried Check if FPS and Resolution chosen by user are correct.
+ * Modifies FPS to match correct value.
+ */
+ void checkResolFps();
+
+ private:
+ // SDK version
+ int verMajor;
+ int verMinor;
+ int verSubMinor;
+
+ // ROS
+ ros::NodeHandle nh;
+ ros::NodeHandle nhNs;
+ std::thread devicePollThread;
+ std::thread mPcThread; // Point Cloud thread
+
+ bool mStopNode;
+
+ // Publishers
+ image_transport::Publisher pubRgb;
+ image_transport::Publisher pubRawRgb;
+ image_transport::Publisher pubLeft;
+ image_transport::Publisher pubRawLeft;
+ image_transport::Publisher pubRight;
+ image_transport::Publisher pubRawRight;
+ image_transport::Publisher pubDepth;
+ image_transport::Publisher pubConfImg;
+ ros::Publisher pubConfMap;
+ ros::Publisher pubDisparity;
+ ros::Publisher pubCloud;
+ ros::Publisher pubRgbCamInfo;
+ ros::Publisher pubLeftCamInfo;
+ ros::Publisher pubRightCamInfo;
+ ros::Publisher pubRgbCamInfoRaw;
+ ros::Publisher pubLeftCamInfoRaw;
+ ros::Publisher pubRightCamInfoRaw;
+ ros::Publisher pubDepthCamInfo;
+ ros::Publisher pubPose;
+ ros::Publisher pubOdom;
+ ros::Publisher pubImu;
+ ros::Publisher pubImuRaw;
+ ros::Timer pubImuTimer;
+
+ // Service
+ bool trackingActivated;
+ ros::ServiceServer srvSetInitPose;
+ ros::ServiceServer srvResetOdometry;
+ ros::ServiceServer srvResetTracking;
+
+ // Camera info
+ sensor_msgs::CameraInfoPtr rgbCamInfoMsg;
+ sensor_msgs::CameraInfoPtr leftCamInfoMsg;
+ sensor_msgs::CameraInfoPtr rightCamInfoMsg;
+ sensor_msgs::CameraInfoPtr rgbCamInfoRawMsg;
+ sensor_msgs::CameraInfoPtr leftCamInfoRawMsg;
+ sensor_msgs::CameraInfoPtr rightCamInfoRawMsg;
+ sensor_msgs::CameraInfoPtr depthCamInfoMsg;
+
+ // tf
+ tf2_ros::TransformBroadcaster transformPoseBroadcaster;
+ tf2_ros::TransformBroadcaster transformOdomBroadcaster;
+ tf2_ros::TransformBroadcaster transformImuBroadcaster;
+
+ std::string rgbFrameId;
+ std::string rgbOptFrameId;
+
+ std::string depthFrameId;
+ std::string depthOptFrameId;
+
+ std::string disparityFrameId;
+ std::string disparityOptFrameId;
+
+ std::string confidenceFrameId;
+ std::string confidenceOptFrameId;
+
+ std::string cloudFrameId;
+
+ std::string mapFrameId;
+ std::string odometryFrameId;
+ std::string baseFrameId;
+ std::string rightCamFrameId;
+ std::string rightCamOptFrameId;
+ std::string leftCamFrameId;
+ std::string leftCamOptFrameId;
+ std::string imuFrameId;
+
+ // initialization Transform listener
+ std::shared_ptr tfBuffer;
+ std::shared_ptr tfListener;
+ bool publishTf;
+ bool cameraFlip;
+
+ // Launch file parameters
+ int resolution;
+ int frameRate;
+ int quality;
+ int sensingMode;
+ int gpuId;
+ int zedId;
+ int depthStabilization;
+ std::string odometryDb;
+ std::string svoFilepath;
+ double imuPubRate;
+ bool verbose;
+
+ // IMU time
+ ros::Time imuTime;
+
+ bool poseSmoothing;
+ bool spatialMemory;
+ bool initOdomWithPose;
+
+ //Tracking variables
+ sl::Transform initialPoseSl;
+ std::vector initialTrackPose;
+
+ tf2::Transform odomToMapTransform;
+ tf2::Transform baseToOdomTransform;
+
+ // zed object
+ sl::InitParameters param;
+ sl::Camera zed;
+ unsigned int serial_number;
+ int userCamModel; // Camera model set by ROS Param
+ sl::MODEL realCamModel; // Camera model requested to SDK
+
+ // flags
+ double matResizeFactor;
+ int confidence;
+ int exposure;
+ int gain;
+ bool autoExposure;
+ bool triggerAutoExposure;
+ bool computeDepth;
+ bool grabbing = false;
+ int openniDepthMode = 0; // 16 bit UC data in mm else 32F in m, for more info http://www.ros.org/reps/rep-0118.html
+
+ // Frame and Mat
+ int camWidth;
+ int camHeight;
+ int matWidth;
+ int matHeight;
+ cv::Mat leftImRGB;
+ cv::Mat rightImRGB;
+ cv::Mat confImRGB;
+ cv::Mat confMapFloat;
+
+ // Mutex
+ std::mutex dataMutex;
+ std::mutex mPcMutex;
+ std::condition_variable mPcDataReadyCondVar;
+ bool mPcDataReady;
+
+ // Point cloud variables
+ sl::Mat cloud;
+ sensor_msgs::PointCloud2 mPointcloudMsg;
+ string pointCloudFrameId = "";
+ ros::Time pointCloudTime;
+
+ // Dynamic reconfigure
+ boost::shared_ptr> server;
+
+ // Coordinate Changing indices and signs
+ unsigned int xIdx, yIdx, zIdx;
+ int xSign, ySign, zSign;
+
+ }; // class ZEDROSWrapperNodelet
} // namespace
#include
diff --git a/src/nodelet/src/zed_wrapper_nodelet.cpp b/src/nodelet/src/zed_wrapper_nodelet.cpp
index 8c02b21f..9b8af78f 100644
--- a/src/nodelet/src/zed_wrapper_nodelet.cpp
+++ b/src/nodelet/src/zed_wrapper_nodelet.cpp
@@ -27,1595 +27,1669 @@
#endif
#include
-#include
#include
#include
#include
+#include
#include
+
#include
// >>>>> Backward compatibility
#define COORDINATE_SYSTEM_IMAGE static_cast(0)
#define COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP \
- static_cast(3)
+ static_cast(3)
#define COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP_X_FWD \
- static_cast(5)
+ static_cast(5)
// <<<<< Backward compatibility
using namespace std;
namespace zed_wrapper {
-ZEDWrapperNodelet::ZEDWrapperNodelet() : Nodelet() {}
+ ZEDWrapperNodelet::ZEDWrapperNodelet() : Nodelet() {}
+
+ ZEDWrapperNodelet::~ZEDWrapperNodelet() {
+ if (devicePollThread.joinable()) {
+ devicePollThread.join();
+ }
+
+ if (mPcThread.joinable()) {
+ mPcThread.join();
+ }
+ }
-ZEDWrapperNodelet::~ZEDWrapperNodelet() { devicePollThread.get()->join(); }
+ void ZEDWrapperNodelet::onInit() {
-void ZEDWrapperNodelet::onInit() {
+ mStopNode = false;
+ mPcDataReady = false;
#ifndef NDEBUG
- if (ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME,
- ros::console::levels::Debug)) {
- ros::console::notifyLoggerLevelsChanged();
- }
+ if (ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME,
+ ros::console::levels::Debug)) {
+ ros::console::notifyLoggerLevelsChanged();
+ }
#endif
- // Launch file parameters
- resolution = sl::RESOLUTION_HD720;
- quality = sl::DEPTH_MODE_PERFORMANCE;
- sensingMode = sl::SENSING_MODE_STANDARD;
- frameRate = 30;
- gpuId = -1;
- zedId = 0;
- serial_number = 0;
- odometryDb = "";
- imuPubRate = 100.0;
- initialTrackPose.resize(6);
- for (int i = 0; i < 6; i++) {
- initialTrackPose[i] = 0.0f;
- }
- matResizeFactor = 1.0;
-
- verbose = true;
-
- nh = getMTNodeHandle();
- nhNs = getMTPrivateNodeHandle();
-
- // Set default coordinate frames
- nhNs.param("pose_frame", mapFrameId, "pose_frame");
- nhNs.param("odometry_frame", odometryFrameId, "odometry_frame");
- nhNs.param("base_frame", baseFrameId, "base_frame");
- nhNs.param("imu_frame", imuFrameId, "imu_link");
-
- nhNs.param("left_camera_frame", leftCamFrameId,
- "left_camera_frame");
- nhNs.param("left_camera_optical_frame", leftCamOptFrameId,
- "left_camera_optical_frame");
-
- nhNs.param("right_camera_frame", rightCamFrameId,
- "right_camera_frame");
- nhNs.param("right_camera_optical_frame", rightCamOptFrameId,
- "right_camera_optical_frame");
-
- depthFrameId = leftCamFrameId;
- depthOptFrameId = leftCamOptFrameId;
-
- // Note: Depth image frame id must match color image frame id
- cloudFrameId = depthOptFrameId;
- rgbFrameId = depthFrameId;
- rgbOptFrameId = cloudFrameId;
-
- disparityFrameId = depthFrameId;
- disparityOptFrameId = depthOptFrameId;
-
- confidenceFrameId = depthFrameId;
- confidenceOptFrameId = depthOptFrameId;
-
- // Get parameters from launch file
- nhNs.getParam("resolution", resolution);
- nhNs.getParam("frame_rate", frameRate);
-
- checkResolFps();
-
- nhNs.getParam("verbose", verbose);
- nhNs.getParam("quality", quality);
- nhNs.getParam("sensing_mode", sensingMode);
- nhNs.getParam("openni_depth_mode", openniDepthMode);
- nhNs.getParam("gpu_id", gpuId);
- nhNs.getParam("zed_id", zedId);
- nhNs.getParam("depth_stabilization", depthStabilization);
- int tmp_sn = 0;
- nhNs.getParam("serial_number", tmp_sn);
- if (tmp_sn > 0)
- serial_number = tmp_sn;
- nhNs.getParam("camera_model", userCamModel);
-
- // Publish odometry tf
- nhNs.param("publish_tf", publishTf, true);
-
- nhNs.param("camera_flip", cameraFlip, false);
-
- if (serial_number > 0)
- NODELET_INFO_STREAM("SN : " << serial_number);
-
- // Print order frames
- NODELET_INFO_STREAM("pose_frame \t\t -> " << mapFrameId);
- NODELET_INFO_STREAM("odometry_frame \t\t -> " << odometryFrameId);
- NODELET_INFO_STREAM("base_frame \t\t -> " << baseFrameId);
- NODELET_INFO_STREAM("imu_link \t\t -> " << imuFrameId);
- NODELET_INFO_STREAM("left_camera_frame \t -> " << leftCamFrameId);
- NODELET_INFO_STREAM("left_camera_optical_frame -> " << leftCamOptFrameId);
- NODELET_INFO_STREAM("right_camera_frame \t -> " << rightCamFrameId);
- NODELET_INFO_STREAM("right_camera_optical_frame -> " << rightCamOptFrameId);
- NODELET_INFO_STREAM("depth_frame \t\t -> " << depthFrameId);
- NODELET_INFO_STREAM("depth_optical_frame \t -> " << depthOptFrameId);
- NODELET_INFO_STREAM("disparity_frame \t -> " << disparityFrameId);
- NODELET_INFO_STREAM("disparity_optical_frame -> " << disparityOptFrameId);
-
- // Status of map TF
- NODELET_INFO_STREAM("Publish " << mapFrameId << " ["
- << (publishTf ? "TRUE" : "FALSE") << "]");
- NODELET_INFO_STREAM("Camera Flip [" << (cameraFlip ? "TRUE" : "FALSE") << "]");
-
- // Status of odometry TF
- // NODELET_INFO_STREAM("Publish " << odometry_frame_id << " [" << (publish_tf
- // ? "TRUE" : "FALSE") << "]");
-
- std::string img_topic = "image_rect_color";
- std::string img_raw_topic = "image_raw_color";
-
- // Set the default topic names
- string left_topic = "left/" + img_topic;
- string left_raw_topic = "left/" + img_raw_topic;
- string left_cam_info_topic = "left/camera_info";
- string left_cam_info_raw_topic = "left/camera_info_raw";
-
- string right_topic = "right/" + img_topic;
- string right_raw_topic = "right/" + img_raw_topic;
- string right_cam_info_topic = "right/camera_info";
- string right_cam_info_raw_topic = "right/camera_info_raw";
-
- string rgb_topic = "rgb/" + img_topic;
- string rgb_raw_topic = "rgb/" + img_raw_topic;
- string rgb_cam_info_topic = "rgb/camera_info";
- string rgb_cam_info_raw_topic = "rgb/camera_info_raw";
-
- string depth_topic = "depth/";
- if (openniDepthMode) {
- NODELET_INFO_STREAM("Openni depth mode activated");
- depth_topic += "depth_raw_registered";
- } else {
- depth_topic += "depth_registered";
- }
-
- string depth_cam_info_topic = "depth/camera_info";
-
- string disparity_topic = "disparity/disparity_image";
-
- string point_cloud_topic = "point_cloud/cloud_registered";
-
- string conf_img_topic = "confidence/confidence_image";
- string conf_map_topic = "confidence/confidence_map";
-
- string pose_topic = "map";
- string odometry_topic = "odom";
- string imu_topic = "imu/data";
- string imu_topic_raw = "imu/data_raw";
-
- nhNs.getParam("rgb_topic", rgb_topic);
- nhNs.getParam("rgb_raw_topic", rgb_raw_topic);
- nhNs.getParam("rgb_cam_info_topic", rgb_cam_info_topic);
- nhNs.getParam("rgb_cam_info_raw_topic", rgb_cam_info_raw_topic);
-
- nhNs.getParam("left_topic", left_topic);
- nhNs.getParam("left_raw_topic", left_raw_topic);
- nhNs.getParam("left_cam_info_topic", left_cam_info_topic);
- nhNs.getParam("left_cam_info_raw_topic", left_cam_info_raw_topic);
-
- nhNs.getParam("right_topic", right_topic);
- nhNs.getParam("right_raw_topic", right_raw_topic);
- nhNs.getParam("right_cam_info_topic", right_cam_info_topic);
- nhNs.getParam("right_cam_info_raw_topic", right_cam_info_raw_topic);
-
- nhNs.getParam("depth_topic", depth_topic);
- nhNs.getParam("depth_cam_info_topic", depth_cam_info_topic);
-
- nhNs.getParam("disparity_topic", disparity_topic);
-
- nhNs.getParam("confidence_img_topic", conf_img_topic);
- nhNs.getParam("confidence_map_topic", conf_map_topic);
-
- nhNs.getParam("point_cloud_topic", point_cloud_topic);
-
- nhNs.getParam("pose_topic", pose_topic);
- nhNs.getParam("odometry_topic", odometry_topic);
-
- nhNs.getParam("imu_topic", imu_topic);
- nhNs.getParam("imu_topic_raw", imu_topic_raw);
- nhNs.getParam("imu_pub_rate", imuPubRate);
-
- // Create camera info
- sensor_msgs::CameraInfoPtr rgb_cam_info_ms_g(new sensor_msgs::CameraInfo());
- sensor_msgs::CameraInfoPtr left_cam_info_msg_(new sensor_msgs::CameraInfo());
- sensor_msgs::CameraInfoPtr right_cam_info_msg_(new sensor_msgs::CameraInfo());
- sensor_msgs::CameraInfoPtr rgb_cam_info_raw_msg_(
- new sensor_msgs::CameraInfo());
- sensor_msgs::CameraInfoPtr left_cam_info_raw_ms_g(
- new sensor_msgs::CameraInfo());
- sensor_msgs::CameraInfoPtr right_cam_info_raw_msg_(
- new sensor_msgs::CameraInfo());
- sensor_msgs::CameraInfoPtr depth_cam_info_msg_(new sensor_msgs::CameraInfo());
-
- rgbCamInfoMsg = rgb_cam_info_ms_g;
- leftCamInfoMsg = left_cam_info_msg_;
- rightCamInfoMsg = right_cam_info_msg_;
- rgbCamInfoRawMsg = rgb_cam_info_raw_msg_;
- leftCamInfoRawMsg = left_cam_info_raw_ms_g;
- rightCamInfoRawMsg = right_cam_info_raw_msg_;
- depthCamInfoMsg = depth_cam_info_msg_;
-
- // SVO
- nhNs.param("svo_filepath", svoFilepath, std::string());
-
- // Initialize tf2 transformation
- nhNs.getParam("initial_tracking_pose", initialTrackPose);
- set_pose(initialTrackPose[0], initialTrackPose[1], initialTrackPose[2],
- initialTrackPose[3], initialTrackPose[4], initialTrackPose[5]);
-
- // Initialization transformation listener
- tfBuffer.reset(new tf2_ros::Buffer);
- tfListener.reset(new tf2_ros::TransformListener(*tfBuffer));
-
- // Try to initialize the ZED
- if (!svoFilepath.empty())
- param.svo_input_filename = svoFilepath.c_str();
- else {
- param.camera_fps = frameRate;
- param.camera_resolution = static_cast(resolution);
- if (serial_number == 0)
- param.camera_linux_id = zedId;
- else {
- bool waiting_for_camera = true;
- while (waiting_for_camera) {
-
- if (!nhNs.ok()) {
- zed.close();
- return;
+ // Launch file parameters
+ resolution = sl::RESOLUTION_HD720;
+ quality = sl::DEPTH_MODE_PERFORMANCE;
+ sensingMode = sl::SENSING_MODE_STANDARD;
+ frameRate = 30;
+ gpuId = -1;
+ zedId = 0;
+ serial_number = 0;
+ odometryDb = "";
+ imuPubRate = 100.0;
+ initialTrackPose.resize(6);
+ for (int i = 0; i < 6; i++) {
+ initialTrackPose[i] = 0.0f;
+ }
+ matResizeFactor = 1.0;
+
+ verbose = true;
+
+ nh = getMTNodeHandle();
+ nhNs = getMTPrivateNodeHandle();
+
+ // Set default coordinate frames
+ nhNs.param("pose_frame", mapFrameId, "pose_frame");
+ nhNs.param("odometry_frame", odometryFrameId, "odometry_frame");
+ nhNs.param("base_frame", baseFrameId, "base_frame");
+ nhNs.param("imu_frame", imuFrameId, "imu_link");
+
+ nhNs.param("left_camera_frame", leftCamFrameId,
+ "left_camera_frame");
+ nhNs.param("left_camera_optical_frame", leftCamOptFrameId,
+ "left_camera_optical_frame");
+
+ nhNs.param("right_camera_frame", rightCamFrameId,
+ "right_camera_frame");
+ nhNs.param("right_camera_optical_frame", rightCamOptFrameId,
+ "right_camera_optical_frame");
+
+ depthFrameId = leftCamFrameId;
+ depthOptFrameId = leftCamOptFrameId;
+
+ // Note: Depth image frame id must match color image frame id
+ cloudFrameId = depthOptFrameId;
+ rgbFrameId = depthFrameId;
+ rgbOptFrameId = cloudFrameId;
+
+ disparityFrameId = depthFrameId;
+ disparityOptFrameId = depthOptFrameId;
+
+ confidenceFrameId = depthFrameId;
+ confidenceOptFrameId = depthOptFrameId;
+
+ // Get parameters from launch file
+ nhNs.getParam("resolution", resolution);
+ nhNs.getParam("frame_rate", frameRate);
+
+ checkResolFps();
+
+ nhNs.getParam("verbose", verbose);
+ nhNs.getParam("quality", quality);
+ nhNs.getParam("sensing_mode", sensingMode);
+ nhNs.getParam("openni_depth_mode", openniDepthMode);
+ nhNs.getParam("gpu_id", gpuId);
+ nhNs.getParam("zed_id", zedId);
+ nhNs.getParam("depth_stabilization", depthStabilization);
+ int tmp_sn = 0;
+ nhNs.getParam("serial_number", tmp_sn);
+ if (tmp_sn > 0) {
+ serial_number = tmp_sn;
+ }
+ nhNs.getParam("camera_model", userCamModel);
+
+ // Publish odometry tf
+ nhNs.param("publish_tf", publishTf, true);
+
+ nhNs.param("camera_flip", cameraFlip, false);
+
+ if (serial_number > 0) {
+ NODELET_INFO_STREAM("SN : " << serial_number);
}
- sl::DeviceProperties prop = sl_tools::getZEDFromSN(serial_number);
- if (prop.id < -1 ||
- prop.camera_state == sl::CAMERA_STATE::CAMERA_STATE_NOT_AVAILABLE) {
- std::string msg = "ZED SN" + to_string(serial_number) +
- " not detected ! Please connect this ZED";
- NODELET_INFO_STREAM(msg.c_str());
- std::this_thread::sleep_for(std::chrono::milliseconds(2000));
+ // Print order frames
+ NODELET_INFO_STREAM("pose_frame \t\t -> " << mapFrameId);
+ NODELET_INFO_STREAM("odometry_frame \t\t -> " << odometryFrameId);
+ NODELET_INFO_STREAM("base_frame \t\t -> " << baseFrameId);
+ NODELET_INFO_STREAM("imu_link \t\t -> " << imuFrameId);
+ NODELET_INFO_STREAM("left_camera_frame \t -> " << leftCamFrameId);
+ NODELET_INFO_STREAM("left_camera_optical_frame -> " << leftCamOptFrameId);
+ NODELET_INFO_STREAM("right_camera_frame \t -> " << rightCamFrameId);
+ NODELET_INFO_STREAM("right_camera_optical_frame -> " << rightCamOptFrameId);
+ NODELET_INFO_STREAM("depth_frame \t\t -> " << depthFrameId);
+ NODELET_INFO_STREAM("depth_optical_frame \t -> " << depthOptFrameId);
+ NODELET_INFO_STREAM("disparity_frame \t -> " << disparityFrameId);
+ NODELET_INFO_STREAM("disparity_optical_frame -> " << disparityOptFrameId);
+
+ // Status of map TF
+ NODELET_INFO_STREAM("Publish " << mapFrameId << " ["
+ << (publishTf ? "TRUE" : "FALSE") << "]");
+ NODELET_INFO_STREAM("Camera Flip [" << (cameraFlip ? "TRUE" : "FALSE") << "]");
+
+ // Status of odometry TF
+ // NODELET_INFO_STREAM("Publish " << odometry_frame_id << " [" << (publish_tf
+ // ? "TRUE" : "FALSE") << "]");
+
+ std::string img_topic = "image_rect_color";
+ std::string img_raw_topic = "image_raw_color";
+
+ // Set the default topic names
+ string left_topic = "left/" + img_topic;
+ string left_raw_topic = "left/" + img_raw_topic;
+ string left_cam_info_topic = "left/camera_info";
+ string left_cam_info_raw_topic = "left/camera_info_raw";
+
+ string right_topic = "right/" + img_topic;
+ string right_raw_topic = "right/" + img_raw_topic;
+ string right_cam_info_topic = "right/camera_info";
+ string right_cam_info_raw_topic = "right/camera_info_raw";
+
+ string rgb_topic = "rgb/" + img_topic;
+ string rgb_raw_topic = "rgb/" + img_raw_topic;
+ string rgb_cam_info_topic = "rgb/camera_info";
+ string rgb_cam_info_raw_topic = "rgb/camera_info_raw";
+
+ string depth_topic = "depth/";
+ if (openniDepthMode) {
+ NODELET_INFO_STREAM("Openni depth mode activated");
+ depth_topic += "depth_raw_registered";
} else {
- waiting_for_camera = false;
- param.camera_linux_id = prop.id;
+ depth_topic += "depth_registered";
}
- }
- }
- }
-
- std::string ver = sl_tools::getSDKVersion(verMajor, verMinor, verSubMinor);
- NODELET_INFO_STREAM("SDK version : " << ver);
-
- if (verMajor < 2) {
- NODELET_WARN_STREAM("Please consider to upgrade to latest SDK version to "
- "get better performances");
- param.coordinate_system = COORDINATE_SYSTEM_IMAGE;
- NODELET_INFO_STREAM("Camera coordinate system : COORDINATE_SYSTEM_IMAGE");
- xIdx = 2;
- yIdx = 0;
- zIdx = 1;
- xSign = 1;
- ySign = -1;
- zSign = -1;
- } else if (verMajor == 2 && verMinor < 5) {
- NODELET_WARN_STREAM("Please consider to upgrade to latest SDK version to "
- "get latest features");
- param.coordinate_system = COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP;
- NODELET_INFO_STREAM(
- "Camera coordinate system : COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP");
- xIdx = 1;
- yIdx = 0;
- zIdx = 2;
- xSign = 1;
- ySign = -1;
- zSign = 1;
- } else {
- param.coordinate_system = COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP_X_FWD;
- NODELET_INFO_STREAM(
- "Camera coordinate system : COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP_X_FWD");
- xIdx = 0;
- yIdx = 1;
- zIdx = 2;
- xSign = 1;
- ySign = 1;
- zSign = 1;
- }
-
- param.coordinate_units = sl::UNIT_METER;
-
- param.depth_mode = static_cast(quality);
- param.sdk_verbose = verbose;
- param.sdk_gpu_id = gpuId;
- param.depth_stabilization = depthStabilization;
- param.camera_image_flip = cameraFlip;
-
- sl::ERROR_CODE err = sl::ERROR_CODE_CAMERA_NOT_DETECTED;
- while (err != sl::SUCCESS) {
- err = zed.open(param);
- NODELET_INFO_STREAM(toString(err));
- std::this_thread::sleep_for(std::chrono::milliseconds(2000));
-
- if (!nhNs.ok()) {
- zed.close();
- return;
+
+ string depth_cam_info_topic = "depth/camera_info";
+
+ string disparity_topic = "disparity/disparity_image";
+
+ string point_cloud_topic = "point_cloud/cloud_registered";
+
+ string conf_img_topic = "confidence/confidence_image";
+ string conf_map_topic = "confidence/confidence_map";
+
+ string pose_topic = "map";
+ string odometry_topic = "odom";
+ string imu_topic = "imu/data";
+ string imu_topic_raw = "imu/data_raw";
+
+ nhNs.getParam("rgb_topic", rgb_topic);
+ nhNs.getParam("rgb_raw_topic", rgb_raw_topic);
+ nhNs.getParam("rgb_cam_info_topic", rgb_cam_info_topic);
+ nhNs.getParam("rgb_cam_info_raw_topic", rgb_cam_info_raw_topic);
+
+ nhNs.getParam("left_topic", left_topic);
+ nhNs.getParam("left_raw_topic", left_raw_topic);
+ nhNs.getParam("left_cam_info_topic", left_cam_info_topic);
+ nhNs.getParam("left_cam_info_raw_topic", left_cam_info_raw_topic);
+
+ nhNs.getParam("right_topic", right_topic);
+ nhNs.getParam("right_raw_topic", right_raw_topic);
+ nhNs.getParam("right_cam_info_topic", right_cam_info_topic);
+ nhNs.getParam("right_cam_info_raw_topic", right_cam_info_raw_topic);
+
+ nhNs.getParam("depth_topic", depth_topic);
+ nhNs.getParam("depth_cam_info_topic", depth_cam_info_topic);
+
+ nhNs.getParam("disparity_topic", disparity_topic);
+
+ nhNs.getParam("confidence_img_topic", conf_img_topic);
+ nhNs.getParam("confidence_map_topic", conf_map_topic);
+
+ nhNs.getParam("point_cloud_topic", point_cloud_topic);
+
+ nhNs.getParam("pose_topic", pose_topic);
+ nhNs.getParam("odometry_topic", odometry_topic);
+
+ nhNs.getParam("imu_topic", imu_topic);
+ nhNs.getParam("imu_topic_raw", imu_topic_raw);
+ nhNs.getParam("imu_pub_rate", imuPubRate);
+
+ // Create camera info
+ sensor_msgs::CameraInfoPtr rgb_cam_info_ms_g(new sensor_msgs::CameraInfo());
+ sensor_msgs::CameraInfoPtr left_cam_info_msg_(new sensor_msgs::CameraInfo());
+ sensor_msgs::CameraInfoPtr right_cam_info_msg_(new sensor_msgs::CameraInfo());
+ sensor_msgs::CameraInfoPtr rgb_cam_info_raw_msg_(
+ new sensor_msgs::CameraInfo());
+ sensor_msgs::CameraInfoPtr left_cam_info_raw_ms_g(
+ new sensor_msgs::CameraInfo());
+ sensor_msgs::CameraInfoPtr right_cam_info_raw_msg_(
+ new sensor_msgs::CameraInfo());
+ sensor_msgs::CameraInfoPtr depth_cam_info_msg_(new sensor_msgs::CameraInfo());
+
+ rgbCamInfoMsg = rgb_cam_info_ms_g;
+ leftCamInfoMsg = left_cam_info_msg_;
+ rightCamInfoMsg = right_cam_info_msg_;
+ rgbCamInfoRawMsg = rgb_cam_info_raw_msg_;
+ leftCamInfoRawMsg = left_cam_info_raw_ms_g;
+ rightCamInfoRawMsg = right_cam_info_raw_msg_;
+ depthCamInfoMsg = depth_cam_info_msg_;
+
+ // SVO
+ nhNs.param("svo_filepath", svoFilepath, std::string());
+
+ // Initialize tf2 transformation
+ nhNs.getParam("initial_tracking_pose", initialTrackPose);
+ set_pose(initialTrackPose[0], initialTrackPose[1], initialTrackPose[2],
+ initialTrackPose[3], initialTrackPose[4], initialTrackPose[5]);
+
+ // Initialization transformation listener
+ tfBuffer.reset(new tf2_ros::Buffer);
+ tfListener.reset(new tf2_ros::TransformListener(*tfBuffer));
+
+ // Try to initialize the ZED
+ if (!svoFilepath.empty()) {
+ param.svo_input_filename = svoFilepath.c_str();
+ } else {
+ param.camera_fps = frameRate;
+ param.camera_resolution = static_cast(resolution);
+ if (serial_number == 0) {
+ param.camera_linux_id = zedId;
+ } else {
+ bool waiting_for_camera = true;
+ while (waiting_for_camera) {
+
+ if (!nhNs.ok()) {
+ mStopNode = true; // Stops other threads
+ zed.close();
+ NODELET_DEBUG("ZED pool thread finished");
+ return;
+ }
+
+ sl::DeviceProperties prop = sl_tools::getZEDFromSN(serial_number);
+ if (prop.id < -1 ||
+ prop.camera_state == sl::CAMERA_STATE::CAMERA_STATE_NOT_AVAILABLE) {
+ std::string msg = "ZED SN" + to_string(serial_number) +
+ " not detected ! Please connect this ZED";
+ NODELET_INFO_STREAM(msg.c_str());
+ std::this_thread::sleep_for(std::chrono::milliseconds(2000));
+ } else {
+ waiting_for_camera = false;
+ param.camera_linux_id = prop.id;
+ }
+ }
+ }
+ }
+
+ std::string ver = sl_tools::getSDKVersion(verMajor, verMinor, verSubMinor);
+ NODELET_INFO_STREAM("SDK version : " << ver);
+
+ if (verMajor < 2) {
+ NODELET_WARN_STREAM("Please consider to upgrade to latest SDK version to "
+ "get better performances");
+ param.coordinate_system = COORDINATE_SYSTEM_IMAGE;
+ NODELET_INFO_STREAM("Camera coordinate system : COORDINATE_SYSTEM_IMAGE");
+ xIdx = 2;
+ yIdx = 0;
+ zIdx = 1;
+ xSign = 1;
+ ySign = -1;
+ zSign = -1;
+ } else if (verMajor == 2 && verMinor < 5) {
+ NODELET_WARN_STREAM("Please consider to upgrade to latest SDK version to "
+ "get latest features");
+ param.coordinate_system = COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP;
+ NODELET_INFO_STREAM(
+ "Camera coordinate system : COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP");
+ xIdx = 1;
+ yIdx = 0;
+ zIdx = 2;
+ xSign = 1;
+ ySign = -1;
+ zSign = 1;
+ } else {
+ param.coordinate_system = COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP_X_FWD;
+ NODELET_INFO_STREAM(
+ "Camera coordinate system : COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP_X_FWD");
+ xIdx = 0;
+ yIdx = 1;
+ zIdx = 2;
+ xSign = 1;
+ ySign = 1;
+ zSign = 1;
+ }
+
+ param.coordinate_units = sl::UNIT_METER;
+
+ param.depth_mode = static_cast(quality);
+ param.sdk_verbose = verbose;
+ param.sdk_gpu_id = gpuId;
+ param.depth_stabilization = depthStabilization;
+ param.camera_image_flip = cameraFlip;
+
+ sl::ERROR_CODE err = sl::ERROR_CODE_CAMERA_NOT_DETECTED;
+ while (err != sl::SUCCESS) {
+ err = zed.open(param);
+ NODELET_INFO_STREAM(toString(err));
+ std::this_thread::sleep_for(std::chrono::milliseconds(2000));
+
+ if (!nhNs.ok()) {
+ mStopNode = true; // Stops other threads
+ zed.close();
+ NODELET_DEBUG("ZED pool thread finished");
+ return;
+ }
+ }
+
+ realCamModel = zed.getCameraInformation().camera_model;
+
+ std::string camModelStr = "LAST";
+ if (realCamModel == sl::MODEL_ZED) {
+ camModelStr = "ZED";
+ if (userCamModel != 0) {
+ NODELET_WARN("Camera model does not match user parameter. Please modify "
+ "the value of the parameter 'camera_model' to 0");
+ }
+ } else if (realCamModel == sl::MODEL_ZED_M) {
+ camModelStr = "ZED M";
+ if (userCamModel != 1) {
+ NODELET_WARN("Camera model does not match user parameter. Please modify "
+ "the value of the parameter 'camera_model' to 1");
+ }
+ }
+
+ NODELET_INFO_STREAM("CAMERA MODEL : " << realCamModel);
+
+ serial_number = zed.getCameraInformation().serial_number;
+
+ // Dynamic Reconfigure parameters
+ server =
+ boost::make_shared>();
+ dynamic_reconfigure::Server::CallbackType f;
+ f = boost::bind(&ZEDWrapperNodelet::dynamicReconfCallback, this, _1, _2);
+ server->setCallback(f);
+
+ nhNs.getParam("mat_resize_factor", matResizeFactor);
+
+ if (matResizeFactor < 0.1) {
+ matResizeFactor = 0.1;
+ NODELET_WARN_STREAM(
+ "Minimum allowed values for 'mat_resize_factor' is 0.1");
+ }
+ if (matResizeFactor > 1.0) {
+ matResizeFactor = 1.0;
+ NODELET_WARN_STREAM(
+ "Maximum allowed values for 'mat_resize_factor' is 1.0");
+ }
+
+ nhNs.getParam("confidence", confidence);
+ nhNs.getParam("exposure", exposure);
+ nhNs.getParam("gain", gain);
+ nhNs.getParam("auto_exposure", autoExposure);
+ if (autoExposure) {
+ triggerAutoExposure = true;
+ }
+
+ // Create all the publishers
+ // Image publishers
+ image_transport::ImageTransport it_zed(nh);
+ pubRgb = it_zed.advertise(rgb_topic, 1); // rgb
+ NODELET_INFO_STREAM("Advertized on topic " << rgb_topic);
+ pubRawRgb = it_zed.advertise(rgb_raw_topic, 1); // rgb raw
+ NODELET_INFO_STREAM("Advertized on topic " << rgb_raw_topic);
+ pubLeft = it_zed.advertise(left_topic, 1); // left
+ NODELET_INFO_STREAM("Advertized on topic " << left_topic);
+ pubRawLeft = it_zed.advertise(left_raw_topic, 1); // left raw
+ NODELET_INFO_STREAM("Advertized on topic " << left_raw_topic);
+ pubRight = it_zed.advertise(right_topic, 1); // right
+ NODELET_INFO_STREAM("Advertized on topic " << right_topic);
+ pubRawRight = it_zed.advertise(right_raw_topic, 1); // right raw
+ NODELET_INFO_STREAM("Advertized on topic " << right_raw_topic);
+ pubDepth = it_zed.advertise(depth_topic, 1); // depth
+ NODELET_INFO_STREAM("Advertized on topic " << depth_topic);
+ pubConfImg = it_zed.advertise(conf_img_topic, 1); // confidence image
+ NODELET_INFO_STREAM("Advertized on topic " << conf_img_topic);
+
+ // Confidence Map publisher
+ pubConfMap =
+ nh.advertise(conf_map_topic, 1); // confidence map
+ NODELET_INFO_STREAM("Advertized on topic " << conf_map_topic);
+
+ // Disparity publisher
+ pubDisparity = nh.advertise(disparity_topic, 1);
+ NODELET_INFO_STREAM("Advertized on topic " << disparity_topic);
+
+ // PointCloud publisher
+ pubCloud = nh.advertise(point_cloud_topic, 1);
+ NODELET_INFO_STREAM("Advertized on topic " << point_cloud_topic);
+
+ // Camera info publishers
+ pubRgbCamInfo =
+ nh.advertise(rgb_cam_info_topic, 1); // rgb
+ NODELET_INFO_STREAM("Advertized on topic " << rgb_cam_info_topic);
+ pubLeftCamInfo =
+ nh.advertise(left_cam_info_topic, 1); // left
+ NODELET_INFO_STREAM("Advertized on topic " << left_cam_info_topic);
+ pubRightCamInfo =
+ nh.advertise(right_cam_info_topic, 1); // right
+ NODELET_INFO_STREAM("Advertized on topic " << right_cam_info_topic);
+ pubDepthCamInfo =
+ nh.advertise(depth_cam_info_topic, 1); // depth
+ NODELET_INFO_STREAM("Advertized on topic " << depth_cam_info_topic);
+ // Raw
+ pubRgbCamInfoRaw = nh.advertise(
+ rgb_cam_info_raw_topic, 1); // raw rgb
+ NODELET_INFO_STREAM("Advertized on topic " << rgb_cam_info_raw_topic);
+ pubLeftCamInfoRaw = nh.advertise(
+ left_cam_info_raw_topic, 1); // raw left
+ NODELET_INFO_STREAM("Advertized on topic " << left_cam_info_raw_topic);
+ pubRightCamInfoRaw = nh.advertise(
+ right_cam_info_raw_topic, 1); // raw right
+ NODELET_INFO_STREAM("Advertized on topic " << right_cam_info_raw_topic);
+
+ // Odometry and Map publisher
+ pubPose = nh.advertise(pose_topic, 1);
+ NODELET_INFO_STREAM("Advertized on topic " << pose_topic);
+ pubOdom = nh.advertise(odometry_topic, 1);
+ NODELET_INFO_STREAM("Advertized on topic " << odometry_topic);
+
+ // Imu publisher
+ if (imuPubRate > 0 && realCamModel == sl::MODEL_ZED_M) {
+ pubImu = nh.advertise(imu_topic, 500);
+ NODELET_INFO_STREAM("Advertized on topic " << imu_topic << " @ "
+ << imuPubRate << " Hz");
+
+ pubImuRaw = nh.advertise(imu_topic_raw, 500);
+ NODELET_INFO_STREAM("Advertized on topic " << imu_topic_raw << " @ "
+ << imuPubRate << " Hz");
+
+ imuTime = ros::Time::now();
+ pubImuTimer = nhNs.createTimer(ros::Duration(1.0 / imuPubRate),
+ &ZEDWrapperNodelet::imuPubCallback, this);
+ } else if (imuPubRate > 0 && realCamModel == sl::MODEL_ZED) {
+ NODELET_WARN_STREAM(
+ "'imu_pub_rate' set to "
+ << imuPubRate << " Hz"
+ << " but ZED camera model does not support IMU data publishing.");
+ }
+
+ // Service
+ srvSetInitPose = nh.advertiseService("set_initial_pose",
+ &ZEDWrapperNodelet::on_set_pose, this);
+ srvResetOdometry = nh.advertiseService(
+ "reset_odometry", &ZEDWrapperNodelet::on_reset_odometry, this);
+ srvResetTracking = nh.advertiseService(
+ "reset_tracking", &ZEDWrapperNodelet::on_reset_tracking, this);
+
+ // Start Pointcloud thread
+ mPcThread = std::thread(&ZEDWrapperNodelet::pointcloud_thread, this);
+
+ // Start pool thread
+ devicePollThread = std::thread(&ZEDWrapperNodelet::device_poll, this);
+
+
}
- }
- realCamModel = zed.getCameraInformation().camera_model;
+ void ZEDWrapperNodelet::checkResolFps() {
+ switch (resolution) {
+ case sl::RESOLUTION_HD2K:
+ if (frameRate != 15) {
+ NODELET_WARN_STREAM("Wrong FrameRate ("
+ << frameRate
+ << ") for the resolution HD2K. Set to 15 FPS.");
+ frameRate = 15;
+ }
+ break;
+
+ case sl::RESOLUTION_HD1080:
+ if (frameRate == 15 || frameRate == 30) {
+ break;
+ }
- std::string camModelStr = "LAST";
- if (realCamModel == sl::MODEL_ZED) {
- camModelStr = "ZED";
- if (userCamModel != 0) {
- NODELET_WARN("Camera model does not match user parameter. Please modify "
- "the value of the parameter 'camera_model' to 0");
+ if (frameRate > 15 && frameRate < 30) {
+ NODELET_WARN_STREAM("Wrong FrameRate ("
+ << frameRate
+ << ") for the resolution HD1080. Set to 15 FPS.");
+ frameRate = 15;
+ } else if (frameRate > 30) {
+ NODELET_WARN_STREAM("Wrong FrameRate ("
+ << frameRate
+ << ") for the resolution HD1080. Set to 30 FPS.");
+ frameRate = 30;
+ } else {
+ NODELET_WARN_STREAM("Wrong FrameRate ("
+ << frameRate
+ << ") for the resolution HD1080. Set to 15 FPS.");
+ frameRate = 15;
+ }
+ break;
+
+ case sl::RESOLUTION_HD720:
+ if (frameRate == 15 || frameRate == 30 || frameRate == 60) {
+ break;
+ }
+
+ if (frameRate > 15 && frameRate < 30) {
+ NODELET_WARN_STREAM("Wrong FrameRate ("
+ << frameRate
+ << ") for the resolution HD720. Set to 15 FPS.");
+ frameRate = 15;
+ } else if (frameRate > 30 && frameRate < 60) {
+ NODELET_WARN_STREAM("Wrong FrameRate ("
+ << frameRate
+ << ") for the resolution HD720. Set to 30 FPS.");
+ frameRate = 30;
+ } else if (frameRate > 60) {
+ NODELET_WARN_STREAM("Wrong FrameRate ("
+ << frameRate
+ << ") for the resolution HD720. Set to 60 FPS.");
+ frameRate = 60;
+ } else {
+ NODELET_WARN_STREAM("Wrong FrameRate ("
+ << frameRate
+ << ") for the resolution HD720. Set to 15 FPS.");
+ frameRate = 15;
+ }
+ break;
+
+ case sl::RESOLUTION_VGA:
+ if (frameRate == 15 || frameRate == 30 || frameRate == 60 ||
+ frameRate == 100) {
+ break;
+ }
+
+ if (frameRate > 15 && frameRate < 30) {
+ NODELET_WARN_STREAM("Wrong FrameRate ("
+ << frameRate
+ << ") for the resolution VGA. Set to 15 FPS.");
+ frameRate = 15;
+ } else if (frameRate > 30 && frameRate < 60) {
+ NODELET_WARN_STREAM("Wrong FrameRate ("
+ << frameRate
+ << ") for the resolution VGA. Set to 30 FPS.");
+ frameRate = 30;
+ } else if (frameRate > 60 && frameRate < 100) {
+ NODELET_WARN_STREAM("Wrong FrameRate ("
+ << frameRate
+ << ") for the resolution VGA. Set to 60 FPS.");
+ frameRate = 60;
+ } else if (frameRate > 100) {
+ NODELET_WARN_STREAM("Wrong FrameRate ("
+ << frameRate
+ << ") for the resolution VGA. Set to 100 FPS.");
+ frameRate = 100;
+ } else {
+ NODELET_WARN_STREAM("Wrong FrameRate ("
+ << frameRate
+ << ") for the resolution VGA. Set to 15 FPS.");
+ frameRate = 15;
+ }
+ break;
+
+ default:
+ NODELET_WARN_STREAM("Invalid resolution. Set to HD720 @ 30 FPS");
+ resolution = 2;
+ frameRate = 30;
+ }
+ }
+
+ sensor_msgs::ImagePtr
+ ZEDWrapperNodelet::imageToROSmsg(cv::Mat img, const std::string encodingType,
+ std::string frameId, ros::Time t) {
+ sensor_msgs::ImagePtr ptr = boost::make_shared();
+ sensor_msgs::Image& imgMessage = *ptr;
+ imgMessage.header.stamp = t;
+ imgMessage.header.frame_id = frameId;
+ imgMessage.height = img.rows;
+ imgMessage.width = img.cols;
+ imgMessage.encoding = encodingType;
+ int num = 1; // for endianness detection
+ imgMessage.is_bigendian = !(*(char*)&num == 1);
+ imgMessage.step = img.cols * img.elemSize();
+ size_t size = imgMessage.step * img.rows;
+ imgMessage.data.resize(size);
+
+ if (img.isContinuous()) {
+ memcpy((char*)(&imgMessage.data[0]), img.data, size);
+ } else {
+ uchar* opencvData = img.data;
+ uchar* rosData = (uchar*)(&imgMessage.data[0]);
+
+ #pragma omp parallel for
+ for (unsigned int i = 0; i < img.rows; i++) {
+ memcpy(rosData, opencvData, imgMessage.step);
+ rosData += imgMessage.step;
+ opencvData += img.step;
+ }
+ }
+ return ptr;
}
- } else if (realCamModel == sl::MODEL_ZED_M) {
- camModelStr = "ZED M";
- if (userCamModel != 1) {
- NODELET_WARN("Camera model does not match user parameter. Please modify "
- "the value of the parameter 'camera_model' to 1");
+
+ void ZEDWrapperNodelet::set_pose(float xt, float yt, float zt, float rr,
+ float pr, float yr) {
+ // ROS pose
+ tf2::Quaternion q;
+ q.setRPY(rr, pr, yr);
+
+ tf2::Vector3 orig(xt, yt, zt);
+
+ baseToOdomTransform.setOrigin(orig);
+ baseToOdomTransform.setRotation(q);
+
+ odomToMapTransform.setIdentity();
+
+ // SL pose
+ sl::float4 q_vec;
+ q_vec[0] = q.x();
+ q_vec[1] = q.y();
+ q_vec[2] = q.z();
+ q_vec[3] = q.w();
+
+ sl::Orientation r(q_vec);
+
+ initialPoseSl.setTranslation(sl::Translation(xt, yt, zt));
+ initialPoseSl.setOrientation(r);
}
- }
-
- NODELET_INFO_STREAM("CAMERA MODEL : " << realCamModel);
-
- serial_number = zed.getCameraInformation().serial_number;
-
- // Dynamic Reconfigure parameters
- server =
- boost::make_shared>();
- dynamic_reconfigure::Server::CallbackType f;
- f = boost::bind(&ZEDWrapperNodelet::dynamicReconfCallback, this, _1, _2);
- server->setCallback(f);
-
- nhNs.getParam("mat_resize_factor", matResizeFactor);
-
- if (matResizeFactor < 0.1) {
- matResizeFactor = 0.1;
- NODELET_WARN_STREAM(
- "Minimum allowed values for 'mat_resize_factor' is 0.1");
- }
- if (matResizeFactor > 1.0) {
- matResizeFactor = 1.0;
- NODELET_WARN_STREAM(
- "Maximum allowed values for 'mat_resize_factor' is 1.0");
- }
-
- nhNs.getParam("confidence", confidence);
- nhNs.getParam("exposure", exposure);
- nhNs.getParam("gain", gain);
- nhNs.getParam("auto_exposure", autoExposure);
- if (autoExposure)
- triggerAutoExposure = true;
-
- // Create all the publishers
- // Image publishers
- image_transport::ImageTransport it_zed(nh);
- pubRgb = it_zed.advertise(rgb_topic, 1); // rgb
- NODELET_INFO_STREAM("Advertized on topic " << rgb_topic);
- pubRawRgb = it_zed.advertise(rgb_raw_topic, 1); // rgb raw
- NODELET_INFO_STREAM("Advertized on topic " << rgb_raw_topic);
- pubLeft = it_zed.advertise(left_topic, 1); // left
- NODELET_INFO_STREAM("Advertized on topic " << left_topic);
- pubRawLeft = it_zed.advertise(left_raw_topic, 1); // left raw
- NODELET_INFO_STREAM("Advertized on topic " << left_raw_topic);
- pubRight = it_zed.advertise(right_topic, 1); // right
- NODELET_INFO_STREAM("Advertized on topic " << right_topic);
- pubRawRight = it_zed.advertise(right_raw_topic, 1); // right raw
- NODELET_INFO_STREAM("Advertized on topic " << right_raw_topic);
- pubDepth = it_zed.advertise(depth_topic, 1); // depth
- NODELET_INFO_STREAM("Advertized on topic " << depth_topic);
- pubConfImg = it_zed.advertise(conf_img_topic, 1); // confidence image
- NODELET_INFO_STREAM("Advertized on topic " << conf_img_topic);
-
- // Confidence Map publisher
- pubConfMap =
- nh.advertise(conf_map_topic, 1); // confidence map
- NODELET_INFO_STREAM("Advertized on topic " << conf_map_topic);
-
- // Disparity publisher
- pubDisparity = nh.advertise(disparity_topic, 1);
- NODELET_INFO_STREAM("Advertized on topic " << disparity_topic);
-
- // PointCloud publisher
- pubCloud = nh.advertise(point_cloud_topic, 1);
- NODELET_INFO_STREAM("Advertized on topic " << point_cloud_topic);
-
- // Camera info publishers
- pubRgbCamInfo =
- nh.advertise(rgb_cam_info_topic, 1); // rgb
- NODELET_INFO_STREAM("Advertized on topic " << rgb_cam_info_topic);
- pubLeftCamInfo =
- nh.advertise(left_cam_info_topic, 1); // left
- NODELET_INFO_STREAM("Advertized on topic " << left_cam_info_topic);
- pubRightCamInfo =
- nh.advertise(right_cam_info_topic, 1); // right
- NODELET_INFO_STREAM("Advertized on topic " << right_cam_info_topic);
- pubDepthCamInfo =
- nh.advertise(depth_cam_info_topic, 1); // depth
- NODELET_INFO_STREAM("Advertized on topic " << depth_cam_info_topic);
- // Raw
- pubRgbCamInfoRaw = nh.advertise(
- rgb_cam_info_raw_topic, 1); // raw rgb
- NODELET_INFO_STREAM("Advertized on topic " << rgb_cam_info_raw_topic);
- pubLeftCamInfoRaw = nh.advertise(
- left_cam_info_raw_topic, 1); // raw left
- NODELET_INFO_STREAM("Advertized on topic " << left_cam_info_raw_topic);
- pubRightCamInfoRaw = nh.advertise(
- right_cam_info_raw_topic, 1); // raw right
- NODELET_INFO_STREAM("Advertized on topic " << right_cam_info_raw_topic);
-
- // Odometry and Map publisher
- pubPose = nh.advertise(pose_topic, 1);
- NODELET_INFO_STREAM("Advertized on topic " << pose_topic);
- pubOdom = nh.advertise(odometry_topic, 1);
- NODELET_INFO_STREAM("Advertized on topic " << odometry_topic);
-
- // Imu publisher
- if (imuPubRate > 0 && realCamModel == sl::MODEL_ZED_M) {
- pubImu = nh.advertise(imu_topic, 500);
- NODELET_INFO_STREAM("Advertized on topic " << imu_topic << " @ "
- << imuPubRate << " Hz");
-
- pubImuRaw = nh.advertise(imu_topic_raw, 500);
- NODELET_INFO_STREAM("Advertized on topic " << imu_topic_raw << " @ "
- << imuPubRate << " Hz");
-
- imuTime = ros::Time::now();
- pubImuTimer = nhNs.createTimer(ros::Duration(1.0 / imuPubRate),
- &ZEDWrapperNodelet::imuPubCallback, this);
- } else if (imuPubRate > 0 && realCamModel == sl::MODEL_ZED) {
- NODELET_WARN_STREAM(
- "'imu_pub_rate' set to "
- << imuPubRate << " Hz"
- << " but ZED camera model does not support IMU data publishing.");
- }
-
- // Service
- srvSetInitPose = nh.advertiseService("set_initial_pose",
- &ZEDWrapperNodelet::on_set_pose, this);
- srvResetOdometry = nh.advertiseService(
- "reset_odometry", &ZEDWrapperNodelet::on_reset_odometry, this);
- srvResetTracking = nh.advertiseService(
- "reset_tracking", &ZEDWrapperNodelet::on_reset_tracking, this);
-
- // Start pool thread
- devicePollThread = boost::shared_ptr(
- new boost::thread(boost::bind(&ZEDWrapperNodelet::device_poll, this)));
-}
-
-void ZEDWrapperNodelet::checkResolFps() {
- switch (resolution) {
- case sl::RESOLUTION_HD2K:
- if (frameRate != 15) {
- NODELET_WARN_STREAM("Wrong FrameRate ("
- << frameRate
- << ") for the resolution HD2K. Set to 15 FPS.");
- frameRate = 15;
+
+ bool ZEDWrapperNodelet::on_set_pose(
+ zed_wrapper::set_initial_pose::Request& req,
+ zed_wrapper::set_initial_pose::Response& res) {
+ initialTrackPose.resize(6);
+
+ initialTrackPose[0] = req.x;
+ initialTrackPose[1] = req.y;
+ initialTrackPose[2] = req.z;
+ initialTrackPose[3] = req.R;
+ initialTrackPose[4] = req.P;
+ initialTrackPose[5] = req.Y;
+
+ set_pose(initialTrackPose[0], initialTrackPose[1], initialTrackPose[2],
+ initialTrackPose[3], initialTrackPose[4], initialTrackPose[5]);
+
+ if (trackingActivated) {
+ zed.resetTracking(initialPoseSl);
+ }
+
+ res.done = true;
+
+ return true;
}
- break;
- case sl::RESOLUTION_HD1080:
- if (frameRate == 15 || frameRate == 30) {
- break;
+ bool ZEDWrapperNodelet::on_reset_tracking(
+ zed_wrapper::reset_tracking::Request& req,
+ zed_wrapper::reset_tracking::Response& res) {
+ if (!trackingActivated) {
+ res.reset_done = false;
+ return false;
+ }
+
+ nhNs.getParam("initial_tracking_pose", initialTrackPose);
+
+ if (initialTrackPose.size() != 6) {
+ NODELET_WARN_STREAM("Invalid Initial Pose size (" << initialTrackPose.size()
+ << "). Using Identity");
+ initialPoseSl.setIdentity();
+ odomToMapTransform.setIdentity();
+ baseToOdomTransform.setIdentity();
+ } else {
+ set_pose(initialTrackPose[0], initialTrackPose[1], initialTrackPose[2],
+ initialTrackPose[3], initialTrackPose[4], initialTrackPose[5]);
+ }
+
+ zed.resetTracking(initialPoseSl);
+
+ return true;
}
- if (frameRate > 15 && frameRate < 30) {
- NODELET_WARN_STREAM("Wrong FrameRate ("
- << frameRate
- << ") for the resolution HD1080. Set to 15 FPS.");
- frameRate = 15;
- } else if (frameRate > 30) {
- NODELET_WARN_STREAM("Wrong FrameRate ("
- << frameRate
- << ") for the resolution HD1080. Set to 30 FPS.");
- frameRate = 30;
- } else {
- NODELET_WARN_STREAM("Wrong FrameRate ("
- << frameRate
- << ") for the resolution HD1080. Set to 15 FPS.");
- frameRate = 15;
+ bool ZEDWrapperNodelet::on_reset_odometry(
+ zed_wrapper::reset_odometry::Request& req,
+ zed_wrapper::reset_odometry::Response& res) {
+ initOdomWithPose = true;
+
+ res.reset_done = true;
+
+ return true;
}
- break;
- case sl::RESOLUTION_HD720:
- if (frameRate == 15 || frameRate == 30 || frameRate == 60) {
- break;
+ void ZEDWrapperNodelet::start_tracking() {
+ NODELET_INFO_STREAM("Starting Tracking");
+
+ nhNs.getParam("odometry_DB", odometryDb);
+ nhNs.getParam("pose_smoothing", poseSmoothing);
+ NODELET_INFO_STREAM("Pose Smoothing : " << poseSmoothing);
+ nhNs.getParam("spatial_memory", spatialMemory);
+ NODELET_INFO_STREAM("Spatial Memory : " << spatialMemory);
+ if (realCamModel == sl::MODEL_ZED_M) {
+ nhNs.getParam("init_odom_with_imu", initOdomWithPose);
+ NODELET_INFO_STREAM(
+ "Init Odometry with first IMU data : " << initOdomWithPose);
+ } else {
+ initOdomWithPose = false;
+ }
+
+ if (initialTrackPose.size() != 6) {
+ NODELET_WARN_STREAM("Invalid Initial Pose size (" << initialTrackPose.size()
+ << "). Using Identity");
+ initialPoseSl.setIdentity();
+ odomToMapTransform.setIdentity();
+ odomToMapTransform.setIdentity();
+ } else {
+ set_pose(initialTrackPose[0], initialTrackPose[1], initialTrackPose[2],
+ initialTrackPose[3], initialTrackPose[4], initialTrackPose[5]);
+ }
+
+ if (odometryDb != "" && !sl_tools::file_exist(odometryDb)) {
+ odometryDb = "";
+ NODELET_WARN("odometry_DB path doesn't exist or is unreachable.");
+ }
+
+ // Tracking parameters
+ sl::TrackingParameters trackParams;
+ trackParams.area_file_path = odometryDb.c_str();
+ trackParams.enable_pose_smoothing = poseSmoothing;
+ trackParams.enable_spatial_memory = spatialMemory;
+
+ trackParams.initial_world_transform = initialPoseSl;
+
+ zed.enableTracking(trackParams);
+ trackingActivated = true;
}
- if (frameRate > 15 && frameRate < 30) {
- NODELET_WARN_STREAM("Wrong FrameRate ("
- << frameRate
- << ") for the resolution HD720. Set to 15 FPS.");
- frameRate = 15;
- } else if (frameRate > 30 && frameRate < 60) {
- NODELET_WARN_STREAM("Wrong FrameRate ("
- << frameRate
- << ") for the resolution HD720. Set to 30 FPS.");
- frameRate = 30;
- } else if (frameRate > 60) {
- NODELET_WARN_STREAM("Wrong FrameRate ("
- << frameRate
- << ") for the resolution HD720. Set to 60 FPS.");
- frameRate = 60;
- } else {
- NODELET_WARN_STREAM("Wrong FrameRate ("
- << frameRate
- << ") for the resolution HD720. Set to 15 FPS.");
- frameRate = 15;
+ void ZEDWrapperNodelet::publishOdom(tf2::Transform odom_base_transform,
+ ros::Time t) {
+ nav_msgs::Odometry odom;
+ odom.header.stamp = t;
+ odom.header.frame_id = odometryFrameId; // odom_frame
+ odom.child_frame_id = baseFrameId; // base_frame
+ // conversion from Tranform to message
+ geometry_msgs::Transform base2 = tf2::toMsg(odom_base_transform);
+ // Add all value in odometry message
+ odom.pose.pose.position.x = base2.translation.x;
+ odom.pose.pose.position.y = base2.translation.y;
+ odom.pose.pose.position.z = base2.translation.z;
+ odom.pose.pose.orientation.x = base2.rotation.x;
+ odom.pose.pose.orientation.y = base2.rotation.y;
+ odom.pose.pose.orientation.z = base2.rotation.z;
+ odom.pose.pose.orientation.w = base2.rotation.w;
+ // Publish odometry message
+ pubOdom.publish(odom);
}
- break;
- case sl::RESOLUTION_VGA:
- if (frameRate == 15 || frameRate == 30 || frameRate == 60 ||
- frameRate == 100) {
- break;
+ void ZEDWrapperNodelet::publishOdomFrame(tf2::Transform baseTransform,
+ ros::Time t) {
+ geometry_msgs::TransformStamped transformStamped;
+ transformStamped.header.stamp = t;
+ transformStamped.header.frame_id = odometryFrameId;
+ transformStamped.child_frame_id = baseFrameId;
+ // conversion from Tranform to message
+ transformStamped.transform = tf2::toMsg(baseTransform);
+ // Publish transformation
+ transformOdomBroadcaster.sendTransform(transformStamped);
}
- if (frameRate > 15 && frameRate < 30) {
- NODELET_WARN_STREAM("Wrong FrameRate ("
- << frameRate
- << ") for the resolution VGA. Set to 15 FPS.");
- frameRate = 15;
- } else if (frameRate > 30 && frameRate < 60) {
- NODELET_WARN_STREAM("Wrong FrameRate ("
- << frameRate
- << ") for the resolution VGA. Set to 30 FPS.");
- frameRate = 30;
- } else if (frameRate > 60 && frameRate < 100) {
- NODELET_WARN_STREAM("Wrong FrameRate ("
- << frameRate
- << ") for the resolution VGA. Set to 60 FPS.");
- frameRate = 60;
- } else if (frameRate > 100) {
- NODELET_WARN_STREAM("Wrong FrameRate ("
- << frameRate
- << ") for the resolution VGA. Set to 100 FPS.");
- frameRate = 100;
- } else {
- NODELET_WARN_STREAM("Wrong FrameRate ("
- << frameRate
- << ") for the resolution VGA. Set to 15 FPS.");
- frameRate = 15;
+ void ZEDWrapperNodelet::publishPose(tf2::Transform poseBaseTransform,
+ ros::Time t) {
+ geometry_msgs::PoseStamped pose;
+ pose.header.stamp = t;
+ pose.header.frame_id = mapFrameId; // map_frame
+ // conversion from Tranform to message
+ geometry_msgs::Transform base2 = tf2::toMsg(poseBaseTransform);
+ // Add all value in Pose message
+ pose.pose.position.x = base2.translation.x;
+ pose.pose.position.y = base2.translation.y;
+ pose.pose.position.z = base2.translation.z;
+ pose.pose.orientation.x = base2.rotation.x;
+ pose.pose.orientation.y = base2.rotation.y;
+ pose.pose.orientation.z = base2.rotation.z;
+ pose.pose.orientation.w = base2.rotation.w;
+ // Publish odometry message
+ pubPose.publish(pose);
}
- break;
-
- default:
- NODELET_WARN_STREAM("Invalid resolution. Set to HD720 @ 30 FPS");
- resolution = 2;
- frameRate = 30;
- }
-}
-
-sensor_msgs::ImagePtr
-ZEDWrapperNodelet::imageToROSmsg(cv::Mat img, const std::string encodingType,
- std::string frameId, ros::Time t) {
- sensor_msgs::ImagePtr ptr = boost::make_shared();
- sensor_msgs::Image &imgMessage = *ptr;
- imgMessage.header.stamp = t;
- imgMessage.header.frame_id = frameId;
- imgMessage.height = img.rows;
- imgMessage.width = img.cols;
- imgMessage.encoding = encodingType;
- int num = 1; // for endianness detection
- imgMessage.is_bigendian = !(*(char *)&num == 1);
- imgMessage.step = img.cols * img.elemSize();
- size_t size = imgMessage.step * img.rows;
- imgMessage.data.resize(size);
-
- if (img.isContinuous())
- memcpy((char *)(&imgMessage.data[0]), img.data, size);
- else {
- uchar *opencvData = img.data;
- uchar *rosData = (uchar *)(&imgMessage.data[0]);
-
-#pragma omp parallel for
- for (unsigned int i = 0; i < img.rows; i++) {
- memcpy(rosData, opencvData, imgMessage.step);
- rosData += imgMessage.step;
- opencvData += img.step;
+
+ void ZEDWrapperNodelet::publishPoseFrame(tf2::Transform baseTransform,
+ ros::Time t) {
+ geometry_msgs::TransformStamped transformStamped;
+ transformStamped.header.stamp = t;
+ transformStamped.header.frame_id = mapFrameId;
+ transformStamped.child_frame_id = odometryFrameId;
+ // conversion from Tranform to message
+ transformStamped.transform = tf2::toMsg(baseTransform);
+ // Publish transformation
+ transformPoseBroadcaster.sendTransform(transformStamped);
}
- }
- return ptr;
-}
-
-void ZEDWrapperNodelet::set_pose(float xt, float yt, float zt, float rr,
- float pr, float yr) {
- // ROS pose
- tf2::Quaternion q;
- q.setRPY(rr, pr, yr);
-
- tf2::Vector3 orig(xt, yt, zt);
-
- baseToOdomTransform.setOrigin(orig);
- baseToOdomTransform.setRotation(q);
-
- odomToMapTransform.setIdentity();
-
- // SL pose
- sl::float4 q_vec;
- q_vec[0] = q.x();
- q_vec[1] = q.y();
- q_vec[2] = q.z();
- q_vec[3] = q.w();
-
- sl::Orientation r(q_vec);
-
- initialPoseSl.setTranslation(sl::Translation(xt, yt, zt));
- initialPoseSl.setOrientation(r);
-}
-
-bool ZEDWrapperNodelet::on_set_pose(
- zed_wrapper::set_initial_pose::Request &req,
- zed_wrapper::set_initial_pose::Response &res) {
- initialTrackPose.resize(6);
-
- initialTrackPose[0] = req.x;
- initialTrackPose[1] = req.y;
- initialTrackPose[2] = req.z;
- initialTrackPose[3] = req.R;
- initialTrackPose[4] = req.P;
- initialTrackPose[5] = req.Y;
-
- set_pose(initialTrackPose[0], initialTrackPose[1], initialTrackPose[2],
- initialTrackPose[3], initialTrackPose[4], initialTrackPose[5]);
-
- if (trackingActivated) {
- zed.resetTracking(initialPoseSl);
- }
-
- res.done = true;
-
- return true;
-}
-
-bool ZEDWrapperNodelet::on_reset_tracking(
- zed_wrapper::reset_tracking::Request &req,
- zed_wrapper::reset_tracking::Response &res) {
- if (!trackingActivated) {
- res.reset_done = false;
- return false;
- }
-
- nhNs.getParam("initial_tracking_pose", initialTrackPose);
-
- if (initialTrackPose.size() != 6) {
- NODELET_WARN_STREAM("Invalid Initial Pose size (" << initialTrackPose.size()
- << "). Using Identity");
- initialPoseSl.setIdentity();
- odomToMapTransform.setIdentity();
- baseToOdomTransform.setIdentity();
- } else {
- set_pose(initialTrackPose[0], initialTrackPose[1], initialTrackPose[2],
- initialTrackPose[3], initialTrackPose[4], initialTrackPose[5]);
- }
-
- zed.resetTracking(initialPoseSl);
-
- return true;
-}
-
-bool ZEDWrapperNodelet::on_reset_odometry(
- zed_wrapper::reset_odometry::Request &req,
- zed_wrapper::reset_odometry::Response &res) {
- initOdomWithPose = true;
-
- res.reset_done = true;
-
- return true;
-}
-
-void ZEDWrapperNodelet::start_tracking() {
- NODELET_INFO_STREAM("Starting Tracking");
-
- nhNs.getParam("odometry_DB", odometryDb);
- nhNs.getParam("pose_smoothing", poseSmoothing);
- NODELET_INFO_STREAM("Pose Smoothing : " << poseSmoothing);
- nhNs.getParam("spatial_memory", spatialMemory);
- NODELET_INFO_STREAM("Spatial Memory : " << spatialMemory);
- if (realCamModel == sl::MODEL_ZED_M) {
- nhNs.getParam("init_odom_with_imu", initOdomWithPose);
- NODELET_INFO_STREAM(
- "Init Odometry with first IMU data : " << initOdomWithPose);
- } else {
- initOdomWithPose = false;
- }
-
- if (initialTrackPose.size() != 6) {
- NODELET_WARN_STREAM("Invalid Initial Pose size (" << initialTrackPose.size()
- << "). Using Identity");
- initialPoseSl.setIdentity();
- odomToMapTransform.setIdentity();
- odomToMapTransform.setIdentity();
- } else {
- set_pose(initialTrackPose[0], initialTrackPose[1], initialTrackPose[2],
- initialTrackPose[3], initialTrackPose[4], initialTrackPose[5]);
- }
-
- if (odometryDb != "" && !sl_tools::file_exist(odometryDb)) {
- odometryDb = "";
- NODELET_WARN("odometry_DB path doesn't exist or is unreachable.");
- }
-
- // Tracking parameters
- sl::TrackingParameters trackParams;
- trackParams.area_file_path = odometryDb.c_str();
- trackParams.enable_pose_smoothing = poseSmoothing;
- trackParams.enable_spatial_memory = spatialMemory;
-
- trackParams.initial_world_transform = initialPoseSl;
-
- zed.enableTracking(trackParams);
- trackingActivated = true;
-}
-
-void ZEDWrapperNodelet::publishOdom(tf2::Transform odom_base_transform,
- ros::Time t) {
- nav_msgs::Odometry odom;
- odom.header.stamp = t;
- odom.header.frame_id = odometryFrameId; // odom_frame
- odom.child_frame_id = baseFrameId; // base_frame
- // conversion from Tranform to message
- geometry_msgs::Transform base2 = tf2::toMsg(odom_base_transform);
- // Add all value in odometry message
- odom.pose.pose.position.x = base2.translation.x;
- odom.pose.pose.position.y = base2.translation.y;
- odom.pose.pose.position.z = base2.translation.z;
- odom.pose.pose.orientation.x = base2.rotation.x;
- odom.pose.pose.orientation.y = base2.rotation.y;
- odom.pose.pose.orientation.z = base2.rotation.z;
- odom.pose.pose.orientation.w = base2.rotation.w;
- // Publish odometry message
- pubOdom.publish(odom);
-}
-
-void ZEDWrapperNodelet::publishOdomFrame(tf2::Transform baseTransform,
- ros::Time t) {
- geometry_msgs::TransformStamped transformStamped;
- transformStamped.header.stamp = t;
- transformStamped.header.frame_id = odometryFrameId;
- transformStamped.child_frame_id = baseFrameId;
- // conversion from Tranform to message
- transformStamped.transform = tf2::toMsg(baseTransform);
- // Publish transformation
- transformOdomBroadcaster.sendTransform(transformStamped);
-}
-
-void ZEDWrapperNodelet::publishPose(tf2::Transform poseBaseTransform,
- ros::Time t) {
- geometry_msgs::PoseStamped pose;
- pose.header.stamp = t;
- pose.header.frame_id = mapFrameId; // map_frame
- // conversion from Tranform to message
- geometry_msgs::Transform base2 = tf2::toMsg(poseBaseTransform);
- // Add all value in Pose message
- pose.pose.position.x = base2.translation.x;
- pose.pose.position.y = base2.translation.y;
- pose.pose.position.z = base2.translation.z;
- pose.pose.orientation.x = base2.rotation.x;
- pose.pose.orientation.y = base2.rotation.y;
- pose.pose.orientation.z = base2.rotation.z;
- pose.pose.orientation.w = base2.rotation.w;
- // Publish odometry message
- pubPose.publish(pose);
-}
-
-void ZEDWrapperNodelet::publishPoseFrame(tf2::Transform baseTransform,
- ros::Time t) {
- geometry_msgs::TransformStamped transformStamped;
- transformStamped.header.stamp = t;
- transformStamped.header.frame_id = mapFrameId;
- transformStamped.child_frame_id = odometryFrameId;
- // conversion from Tranform to message
- transformStamped.transform = tf2::toMsg(baseTransform);
- // Publish transformation
- transformPoseBroadcaster.sendTransform(transformStamped);
-}
-
-void ZEDWrapperNodelet::publishImuFrame(tf2::Transform baseTransform) {
- geometry_msgs::TransformStamped transformStamped;
- transformStamped.header.stamp = imuTime;
- transformStamped.header.frame_id = baseFrameId;
- transformStamped.child_frame_id = imuFrameId;
- // conversion from Tranform to message
- transformStamped.transform = tf2::toMsg(baseTransform);
- // Publish transformation
- transformImuBroadcaster.sendTransform(transformStamped);
-}
-
-void ZEDWrapperNodelet::publishImage(cv::Mat img,
- image_transport::Publisher &pubImg,
- string imgFrameId, ros::Time t) {
- pubImg.publish(
- imageToROSmsg(img, sensor_msgs::image_encodings::BGR8, imgFrameId, t));
-}
-
-void ZEDWrapperNodelet::publishDepth(cv::Mat depth, ros::Time t) {
- string encoding;
- if (openniDepthMode) {
- depth *= 1000.0f;
- depth.convertTo(depth, CV_16UC1); // in mm, rounded
- encoding = sensor_msgs::image_encodings::TYPE_16UC1;
- } else {
- encoding = sensor_msgs::image_encodings::TYPE_32FC1;
- }
-
- pubDepth.publish(imageToROSmsg(depth, encoding, depthOptFrameId, t));
-}
-
-void ZEDWrapperNodelet::publishDisparity(cv::Mat disparity, ros::Time t) {
- sl::CameraInformation zedParam =
- zed.getCameraInformation(sl::Resolution(matWidth, matHeight));
- sensor_msgs::ImagePtr disparity_image = imageToROSmsg(
- disparity, sensor_msgs::image_encodings::TYPE_32FC1, disparityFrameId, t);
-
- stereo_msgs::DisparityImage msg;
- msg.image = *disparity_image;
- msg.header = msg.image.header;
- msg.f = zedParam.calibration_parameters.left_cam.fx;
- msg.T = zedParam.calibration_parameters.T.x;
- msg.min_disparity = msg.f * msg.T / zed.getDepthMaxRangeValue();
- msg.max_disparity = msg.f * msg.T / zed.getDepthMinRangeValue();
- pubDisparity.publish(msg);
-}
-
-void ZEDWrapperNodelet::publishPointCloud(int width, int height) {
- pcl::PointCloud point_cloud;
- point_cloud.width = width;
- point_cloud.height = height;
- int size = width * height;
- point_cloud.points.resize(size);
-
- sl::Vector4 *cpu_cloud = cloud.getPtr();
-
-#pragma omp parallel for
- for (int i = 0; i < size; i++) {
- // COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP_X_FWD
- point_cloud.points[i].x = xSign * cpu_cloud[i][xIdx];
- point_cloud.points[i].y = ySign * cpu_cloud[i][yIdx];
- point_cloud.points[i].z = zSign * cpu_cloud[i][zIdx];
- point_cloud.points[i].rgb = cpu_cloud[i][3];
- }
-
- sensor_msgs::PointCloud2 output;
- pcl::toROSMsg(point_cloud,
- output); // Convert the point cloud to a ROS message
- output.header.frame_id =
- pointCloudFrameId; // Set the header values of the ROS message
- output.header.stamp = pointCloudTime;
- output.height = height;
- output.width = width;
- output.is_bigendian = false;
- output.is_dense = false;
- pubCloud.publish(output);
-}
-
-void ZEDWrapperNodelet::publishCamInfo(sensor_msgs::CameraInfoPtr camInfoMsg,
- ros::Publisher pubCamInfo, ros::Time t) {
- static int seq = 0;
- camInfoMsg->header.stamp = t;
- camInfoMsg->header.seq = seq;
- pubCamInfo.publish(camInfoMsg);
- seq++;
-}
-
-void ZEDWrapperNodelet::fillCamInfo(
- sl::Camera &zed, sensor_msgs::CameraInfoPtr left_cam_info_msg,
- sensor_msgs::CameraInfoPtr right_cam_info_msg, string leftFrameId,
- string rightFrameId, bool rawParam /*= false*/) {
- sl::CalibrationParameters zedParam;
-
- if (rawParam)
- zedParam = zed.getCameraInformation(sl::Resolution(matWidth, matHeight))
- .calibration_parameters_raw;
- else
- zedParam = zed.getCameraInformation(sl::Resolution(matWidth, matHeight))
- .calibration_parameters;
-
- float baseline = zedParam.T[0];
-
- left_cam_info_msg->distortion_model =
- sensor_msgs::distortion_models::PLUMB_BOB;
- right_cam_info_msg->distortion_model =
- sensor_msgs::distortion_models::PLUMB_BOB;
-
- left_cam_info_msg->D.resize(5);
- right_cam_info_msg->D.resize(5);
- left_cam_info_msg->D[0] = zedParam.left_cam.disto[0]; // k1
- left_cam_info_msg->D[1] = zedParam.left_cam.disto[1]; // k2
- left_cam_info_msg->D[2] = zedParam.left_cam.disto[4]; // k3
- left_cam_info_msg->D[3] = zedParam.left_cam.disto[2]; // p1
- left_cam_info_msg->D[4] = zedParam.left_cam.disto[3]; // p2
- right_cam_info_msg->D[0] = zedParam.right_cam.disto[0]; // k1
- right_cam_info_msg->D[1] = zedParam.right_cam.disto[1]; // k2
- right_cam_info_msg->D[2] = zedParam.right_cam.disto[4]; // k3
- right_cam_info_msg->D[3] = zedParam.right_cam.disto[2]; // p1
- right_cam_info_msg->D[4] = zedParam.right_cam.disto[3]; // p2
-
- left_cam_info_msg->K.fill(0.0);
- right_cam_info_msg->K.fill(0.0);
- left_cam_info_msg->K[0] = zedParam.left_cam.fx;
- left_cam_info_msg->K[2] = zedParam.left_cam.cx;
- left_cam_info_msg->K[4] = zedParam.left_cam.fy;
- left_cam_info_msg->K[5] = zedParam.left_cam.cy;
- left_cam_info_msg->K[8] = 1.0;
- right_cam_info_msg->K[0] = zedParam.right_cam.fx;
- right_cam_info_msg->K[2] = zedParam.right_cam.cx;
- right_cam_info_msg->K[4] = zedParam.right_cam.fy;
- right_cam_info_msg->K[5] = zedParam.right_cam.cy;
- right_cam_info_msg->K[8] = 1.0;
-
- left_cam_info_msg->R.fill(0.0);
- right_cam_info_msg->R.fill(0.0);
- for (int i = 0; i < 3; i++) {
- // identity
- right_cam_info_msg->R[i + i * 3] = 1;
- left_cam_info_msg->R[i + i * 3] = 1;
- }
-
- if (rawParam) {
- cv::Mat R_ = sl_tools::convertRodrigues(zedParam.R);
- float *p = (float *)R_.data;
- for (int i = 0; i < 9; i++)
- right_cam_info_msg->R[i] = p[i];
- }
-
- left_cam_info_msg->P.fill(0.0);
- right_cam_info_msg->P.fill(0.0);
- left_cam_info_msg->P[0] = zedParam.left_cam.fx;
- left_cam_info_msg->P[2] = zedParam.left_cam.cx;
- left_cam_info_msg->P[5] = zedParam.left_cam.fy;
- left_cam_info_msg->P[6] = zedParam.left_cam.cy;
- left_cam_info_msg->P[10] = 1.0;
- // http://docs.ros.org/api/sensor_msgs/html/msg/CameraInfo.html
- right_cam_info_msg->P[3] = (-1 * zedParam.left_cam.fx * baseline);
-
- right_cam_info_msg->P[0] = zedParam.right_cam.fx;
- right_cam_info_msg->P[2] = zedParam.right_cam.cx;
- right_cam_info_msg->P[5] = zedParam.right_cam.fy;
- right_cam_info_msg->P[6] = zedParam.right_cam.cy;
- right_cam_info_msg->P[10] = 1.0;
-
- left_cam_info_msg->width = right_cam_info_msg->width = matWidth;
- left_cam_info_msg->height = right_cam_info_msg->height = matHeight;
-
- left_cam_info_msg->header.frame_id = leftFrameId;
- right_cam_info_msg->header.frame_id = rightFrameId;
-}
-
-void ZEDWrapperNodelet::dynamicReconfCallback(zed_wrapper::ZedConfig &config,
- uint32_t level) {
- switch (level) {
- case 0:
- confidence = config.confidence;
- NODELET_INFO("Reconfigure confidence : %d", confidence);
- break;
- case 1:
- exposure = config.exposure;
- NODELET_INFO("Reconfigure exposure : %d", exposure);
- break;
- case 2:
- gain = config.gain;
- NODELET_INFO("Reconfigure gain : %d", gain);
- break;
- case 3:
- autoExposure = config.auto_exposure;
- if (autoExposure)
- triggerAutoExposure = true;
- NODELET_INFO("Reconfigure auto control of exposure and gain : %s",
- autoExposure ? "Enable" : "Disable");
- break;
- case 4:
- matResizeFactor = config.mat_resize_factor;
- NODELET_INFO("Reconfigure mat_resize_factor: %g", matResizeFactor);
-
- dataMutex.lock();
-
- matWidth = static_cast(camWidth * matResizeFactor);
- matHeight = static_cast(camHeight * matResizeFactor);
- NODELET_DEBUG_STREAM("Data Mat size : " << matWidth << "x" << matHeight);
-
- // Update Camera Info
- fillCamInfo(zed, leftCamInfoMsg, rightCamInfoMsg, leftCamOptFrameId,
- rightCamOptFrameId);
- fillCamInfo(zed, leftCamInfoRawMsg, rightCamInfoRawMsg, leftCamOptFrameId,
- rightCamOptFrameId, true);
- rgbCamInfoMsg = depthCamInfoMsg = leftCamInfoMsg; // the reference camera is
- // the Left one (next to
- // the ZED logo)
- rgbCamInfoRawMsg = leftCamInfoRawMsg;
-
- dataMutex.unlock();
-
- break;
- }
-}
-
-void ZEDWrapperNodelet::imuPubCallback(const ros::TimerEvent &e) {
- int imu_SubNumber = pubImu.getNumSubscribers();
- int imu_RawSubNumber = pubImuRaw.getNumSubscribers();
-
- if (imu_SubNumber < 1 && imu_RawSubNumber < 1) {
- return;
- }
-
- ros::Time t =
- sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_IMAGE));
-
- sl::IMUData imu_data;
- zed.getIMUData(imu_data, sl::TIME_REFERENCE_CURRENT);
-
- if (imu_SubNumber > 0) {
- sensor_msgs::Imu imu_msg;
- imu_msg.header.stamp = imuTime; // t;
- imu_msg.header.frame_id = imuFrameId;
-
- imu_msg.orientation.x = xSign * imu_data.getOrientation()[xIdx];
- imu_msg.orientation.y = ySign * imu_data.getOrientation()[yIdx];
- imu_msg.orientation.z = zSign * imu_data.getOrientation()[zIdx];
- imu_msg.orientation.w = imu_data.getOrientation()[3];
-
- imu_msg.angular_velocity.x = xSign * imu_data.angular_velocity[xIdx];
- imu_msg.angular_velocity.y = ySign * imu_data.angular_velocity[yIdx];
- imu_msg.angular_velocity.z = zSign * imu_data.angular_velocity[zIdx];
-
- imu_msg.linear_acceleration.x = xSign * imu_data.linear_acceleration[xIdx];
- imu_msg.linear_acceleration.y = ySign * imu_data.linear_acceleration[yIdx];
- imu_msg.linear_acceleration.z = zSign * imu_data.linear_acceleration[zIdx];
-
- for (int i = 0; i < 3; i += 3) {
- imu_msg.orientation_covariance[i * 3 + 0] =
- imu_data.orientation_covariance.r[i * 3 + xIdx];
- imu_msg.orientation_covariance[i * 3 + 1] =
- imu_data.orientation_covariance.r[i * 3 + yIdx];
- imu_msg.orientation_covariance[i * 3 + 2] =
- imu_data.orientation_covariance.r[i * 3 + zIdx];
-
- imu_msg.linear_acceleration_covariance[i * 3 + 0] =
- imu_data.linear_acceleration_convariance.r[i * 3 + xIdx];
- imu_msg.linear_acceleration_covariance[i * 3 + 1] =
- imu_data.linear_acceleration_convariance.r[i * 3 + yIdx];
- imu_msg.linear_acceleration_covariance[i * 3 + 2] =
- imu_data.linear_acceleration_convariance.r[i * 3 + zIdx];
-
- imu_msg.angular_velocity_covariance[i * 3 + 0] =
- imu_data.angular_velocity_convariance.r[i * 3 + xIdx];
- imu_msg.angular_velocity_covariance[i * 3 + 1] =
- imu_data.angular_velocity_convariance.r[i * 3 + yIdx];
- imu_msg.angular_velocity_covariance[i * 3 + 2] =
- imu_data.angular_velocity_convariance.r[i * 3 + zIdx];
+
+ void ZEDWrapperNodelet::publishImuFrame(tf2::Transform baseTransform) {
+ geometry_msgs::TransformStamped transformStamped;
+ transformStamped.header.stamp = imuTime;
+ transformStamped.header.frame_id = baseFrameId;
+ transformStamped.child_frame_id = imuFrameId;
+ // conversion from Tranform to message
+ transformStamped.transform = tf2::toMsg(baseTransform);
+ // Publish transformation
+ transformImuBroadcaster.sendTransform(transformStamped);
}
- pubImu.publish(imu_msg);
- }
-
- if (imu_RawSubNumber > 0) {
- sensor_msgs::Imu imu_raw_msg;
- imu_raw_msg.header.stamp = imuTime; // t;
- imu_raw_msg.header.frame_id = imuFrameId;
-
- imu_raw_msg.angular_velocity.x = xSign * imu_data.angular_velocity[xIdx];
- imu_raw_msg.angular_velocity.y = ySign * imu_data.angular_velocity[yIdx];
- imu_raw_msg.angular_velocity.z = zSign * imu_data.angular_velocity[zIdx];
-
- imu_raw_msg.linear_acceleration.x =
- xSign * imu_data.linear_acceleration[xIdx];
- imu_raw_msg.linear_acceleration.y =
- ySign * imu_data.linear_acceleration[yIdx];
- imu_raw_msg.linear_acceleration.z =
- zSign * imu_data.linear_acceleration[zIdx];
-
- for (int i = 0; i < 3; i += 3) {
- imu_raw_msg.linear_acceleration_covariance[i * 3 + 0] =
- imu_data.linear_acceleration_convariance.r[i * 3 + xIdx];
- imu_raw_msg.linear_acceleration_covariance[i * 3 + 1] =
- imu_data.linear_acceleration_convariance.r[i * 3 + yIdx];
- imu_raw_msg.linear_acceleration_covariance[i * 3 + 2] =
- imu_data.linear_acceleration_convariance.r[i * 3 + zIdx];
-
- imu_raw_msg.angular_velocity_covariance[i * 3 + 0] =
- imu_data.angular_velocity_convariance.r[i * 3 + xIdx];
- imu_raw_msg.angular_velocity_covariance[i * 3 + 1] =
- imu_data.angular_velocity_convariance.r[i * 3 + yIdx];
- imu_raw_msg.angular_velocity_covariance[i * 3 + 2] =
- imu_data.angular_velocity_convariance.r[i * 3 + zIdx];
+ void ZEDWrapperNodelet::publishImage(cv::Mat img,
+ image_transport::Publisher& pubImg,
+ string imgFrameId, ros::Time t) {
+ pubImg.publish(
+ imageToROSmsg(img, sensor_msgs::image_encodings::BGR8, imgFrameId, t));
}
- imu_raw_msg.orientation_covariance[0] =
- -1; // Orientation data is not available in "data_raw" -> See ROS REP145
- // http://www.ros.org/reps/rep-0145.html#topics
+ void ZEDWrapperNodelet::publishDepth(cv::Mat depth, ros::Time t) {
+ string encoding;
+ if (openniDepthMode) {
+ depth *= 1000.0f;
+ depth.convertTo(depth, CV_16UC1); // in mm, rounded
+ encoding = sensor_msgs::image_encodings::TYPE_16UC1;
+ } else {
+ encoding = sensor_msgs::image_encodings::TYPE_32FC1;
+ }
- pubImuRaw.publish(imu_raw_msg);
- }
-
- // Publish IMU tf only if enabled
- if (publishTf) {
- // Camera to map transform from TF buffer
- tf2::Transform base_to_map;
- // Look up the transformation from base frame to map link
- try {
- // Save the transformation from base to frame
- geometry_msgs::TransformStamped b2m =
- tfBuffer->lookupTransform(mapFrameId, baseFrameId, ros::Time(0));
- // Get the TF2 transformation
- tf2::fromMsg(b2m.transform, base_to_map);
- } catch (tf2::TransformException &ex) {
- NODELET_WARN_THROTTLE(
- 10.0, "The tf from '%s' to '%s' does not seem to be available. "
- "IMU TF not published!",
- baseFrameId.c_str(), mapFrameId.c_str());
- NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what());
- return;
+ pubDepth.publish(imageToROSmsg(depth, encoding, depthOptFrameId, t));
}
- // IMU Quaternion in Map frame
- tf2::Quaternion imu_q;
- imu_q.setX(xSign * imu_data.getOrientation()[xIdx]);
- imu_q.setY(ySign * imu_data.getOrientation()[yIdx]);
- imu_q.setZ(zSign * imu_data.getOrientation()[zIdx]);
- imu_q.setW(imu_data.getOrientation()[3]);
-
- // Pose Quaternion from ZED Camera
- tf2::Quaternion map_q = base_to_map.getRotation();
-
- // Difference between IMU and ZED Quaternion
- tf2::Quaternion delta_q = imu_q * map_q.inverse();
-
- tf2::Transform imu_pose;
- imu_pose.setIdentity();
- imu_pose.setRotation(delta_q);
-
- // Note, the frame is published, but its values will only change if someone
- // has subscribed to IMU
- publishImuFrame(imu_pose); // publish the imu Frame
- }
-}
-
-void ZEDWrapperNodelet::device_poll() {
- ros::Rate loop_rate(frameRate);
-
- ros::Time old_t =
- sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_CURRENT));
- imuTime = old_t;
-
- sl::ERROR_CODE grab_status;
- trackingActivated = false;
-
- // Get the parameters of the ZED images
- camWidth = zed.getResolution().width;
- camHeight = zed.getResolution().height;
- NODELET_DEBUG_STREAM("Camera Frame size : " << camWidth << "x" << camHeight);
-
- matWidth = static_cast(camWidth * matResizeFactor);
- matHeight = static_cast(camHeight * matResizeFactor);
- NODELET_DEBUG_STREAM("Data Mat size : " << matWidth << "x" << matHeight);
-
- cv::Size cvSize(matWidth, matWidth);
- leftImRGB = cv::Mat(cvSize, CV_8UC3);
- rightImRGB = cv::Mat(cvSize, CV_8UC3);
- confImRGB = cv::Mat(cvSize, CV_8UC3);
- confMapFloat = cv::Mat(cvSize, CV_32FC1);
-
- // Create and fill the camera information messages
- fillCamInfo(zed, leftCamInfoMsg, rightCamInfoMsg, leftCamOptFrameId,
- rightCamOptFrameId);
- fillCamInfo(zed, leftCamInfoRawMsg, rightCamInfoRawMsg, leftCamOptFrameId,
- rightCamOptFrameId, true);
- rgbCamInfoMsg = depthCamInfoMsg = leftCamInfoMsg; // the reference camera is
- // the Left one (next to the
- // ZED logo)
- rgbCamInfoRawMsg = leftCamInfoRawMsg;
-
- sl::RuntimeParameters runParams;
- runParams.sensing_mode = static_cast(sensingMode);
-
- sl::Mat leftZEDMat, rightZEDMat, depthZEDMat, disparityZEDMat, confImgZEDMat,
- confMapZEDMat;
- // Main loop
- while (nhNs.ok()) {
- // Check for subscribers
- int rgb_SubNumber = pubRgb.getNumSubscribers();
- int rgb_raw_SubNumber = pubRawRgb.getNumSubscribers();
- int left_SubNumber = pubLeft.getNumSubscribers();
- int left_raw_SubNumber = pubRawLeft.getNumSubscribers();
- int right_SubNumber = pubRight.getNumSubscribers();
- int right_raw_SubNumber = pubRawRight.getNumSubscribers();
- int depth_SubNumber = pubDepth.getNumSubscribers();
- int disparity_SubNumber = pubDisparity.getNumSubscribers();
- int cloud_SubNumber = pubCloud.getNumSubscribers();
- int pose_SubNumber = pubPose.getNumSubscribers();
- int odom_SubNumber = pubOdom.getNumSubscribers();
- int conf_img_SubNumber = pubConfImg.getNumSubscribers();
- int conf_map_SubNumber = pubConfMap.getNumSubscribers();
- int imu_SubNumber = pubImu.getNumSubscribers();
- int imu_RawSubNumber = pubImuRaw.getNumSubscribers();
- bool runLoop = (rgb_SubNumber + rgb_raw_SubNumber + left_SubNumber +
- left_raw_SubNumber + right_SubNumber + right_raw_SubNumber +
- depth_SubNumber + disparity_SubNumber + cloud_SubNumber +
- pose_SubNumber + odom_SubNumber + conf_img_SubNumber +
- conf_map_SubNumber + imu_SubNumber + imu_RawSubNumber) > 0;
-
- runParams.enable_point_cloud = false;
- if (cloud_SubNumber > 0)
- runParams.enable_point_cloud = true;
- // Run the loop only if there is some subscribers
- if (runLoop) {
- if ((depthStabilization || pose_SubNumber > 0 || odom_SubNumber > 0 ||
- cloud_SubNumber > 0 || depth_SubNumber > 0) &&
- !trackingActivated) { // Start the tracking
- start_tracking();
- } else if (!depthStabilization && pose_SubNumber == 0 &&
- odom_SubNumber == 0 &&
- trackingActivated) { // Stop the tracking
- zed.disableTracking();
- trackingActivated = false;
- }
- computeDepth = (depth_SubNumber + disparity_SubNumber + cloud_SubNumber +
- pose_SubNumber + odom_SubNumber + conf_img_SubNumber +
- conf_map_SubNumber) > 0; // Detect if one of the
- // subscriber need to have the
- // depth information
-
- // Timestamp
- ros::Time t =
- sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_IMAGE));
-
- grabbing = true;
- if (computeDepth) {
- int actual_confidence = zed.getConfidenceThreshold();
- if (actual_confidence != confidence)
- zed.setConfidenceThreshold(confidence);
- runParams.enable_depth = true; // Ask to compute the depth
- } else
- runParams.enable_depth = false;
-
- grab_status = zed.grab(runParams); // Ask to not compute the depth
-
- grabbing = false;
-
- // cout << toString(grab_status) << endl;
- if (grab_status !=
- sl::ERROR_CODE::SUCCESS) { // Detect if a error occurred (for example:
- // the zed have been disconnected) and
- // re-initialize the ZED
-
- if (grab_status == sl::ERROR_CODE_NOT_A_NEW_FRAME) {
- NODELET_DEBUG_THROTTLE(1.0, "Wait for a new image to proceed");
- } else
- NODELET_INFO_STREAM_ONCE(toString(grab_status));
-
- std::this_thread::sleep_for(std::chrono::milliseconds(2));
-
- if ((t - old_t).toSec() > 5) {
- zed.close();
-
- NODELET_INFO("Re-opening the ZED");
- sl::ERROR_CODE err = sl::ERROR_CODE_CAMERA_NOT_DETECTED;
- while (err != sl::SUCCESS) {
- if (!nhNs.ok()) {
- zed.close();
- return;
- }
+ void ZEDWrapperNodelet::publishDisparity(cv::Mat disparity, ros::Time t) {
+ sl::CameraInformation zedParam =
+ zed.getCameraInformation(sl::Resolution(matWidth, matHeight));
+ sensor_msgs::ImagePtr disparity_image = imageToROSmsg(
+ disparity, sensor_msgs::image_encodings::TYPE_32FC1, disparityFrameId, t);
+
+ stereo_msgs::DisparityImage msg;
+ msg.image = *disparity_image;
+ msg.header = msg.image.header;
+ msg.f = zedParam.calibration_parameters.left_cam.fx;
+ msg.T = zedParam.calibration_parameters.T.x;
+ msg.min_disparity = msg.f * msg.T / zed.getDepthMaxRangeValue();
+ msg.max_disparity = msg.f * msg.T / zed.getDepthMinRangeValue();
+ pubDisparity.publish(msg);
+ }
- int id = sl_tools::checkCameraReady(serial_number);
- if (id > 0) {
- param.camera_linux_id = id;
- err = zed.open(param); // Try to initialize the ZED
- NODELET_INFO_STREAM(toString(err));
- } else
- NODELET_INFO("Waiting for the ZED to be re-connected");
- std::this_thread::sleep_for(std::chrono::milliseconds(2000));
- }
- trackingActivated = false;
- if (depthStabilization || pose_SubNumber > 0 ||
- odom_SubNumber > 0) { // Start the tracking
- start_tracking();
- }
+ void ZEDWrapperNodelet::pointcloud_thread() {
+ std::unique_lock lock(mPcMutex);
+ while (!mStopNode) {
+ while (!mPcDataReady) { // loop to avoid spurious wakeups
+ if (mPcDataReadyCondVar.wait_for(lock, std::chrono::milliseconds(500)) == std::cv_status::timeout) {
+ // Check thread stopping
+ if (mStopNode) {
+ return;
+ } else {
+ continue;
+ }
+ }
+
+ publishPointCloud();
+ mPcDataReady = false;
+ }
}
- continue;
- }
-
- // Time update
- old_t =
- sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_CURRENT));
-
- if (autoExposure) {
- // getCameraSettings() can't check status of auto exposure
- // triggerAutoExposure is used to execute setCameraSettings() only once
- if (triggerAutoExposure) {
- zed.setCameraSettings(sl::CAMERA_SETTINGS_EXPOSURE, 0, true);
- triggerAutoExposure = false;
+
+ NODELET_DEBUG("Pointcloud thread finished");
+ }
+
+ void ZEDWrapperNodelet::publishPointCloud() {
+ // Initialize Point Cloud message
+ // https://github.com/ros/common_msgs/blob/jade-devel/sensor_msgs/include/sensor_msgs/point_cloud2_iterator.h
+ int ptsCount = matWidth * matHeight;
+ mPointcloudMsg.header.stamp = pointCloudTime;
+ if (mPointcloudMsg.width != matWidth || mPointcloudMsg.height != matHeight) {
+ mPointcloudMsg.header.frame_id = pointCloudFrameId; // Set the header values of the ROS message
+ mPointcloudMsg.is_bigendian = false;
+ mPointcloudMsg.is_dense = false;
+
+ sensor_msgs::PointCloud2Modifier modifier(mPointcloudMsg);
+ modifier.setPointCloud2Fields(4,
+ "x", 1, sensor_msgs::PointField::FLOAT32,
+ "y", 1, sensor_msgs::PointField::FLOAT32,
+ "z", 1, sensor_msgs::PointField::FLOAT32,
+ "rgb", 1, sensor_msgs::PointField::FLOAT32);
+
+ modifier.resize(ptsCount);
}
- } else {
- int actual_exposure =
- zed.getCameraSettings(sl::CAMERA_SETTINGS_EXPOSURE);
- if (actual_exposure != exposure)
- zed.setCameraSettings(sl::CAMERA_SETTINGS_EXPOSURE, exposure);
-
- int actual_gain = zed.getCameraSettings(sl::CAMERA_SETTINGS_GAIN);
- if (actual_gain != gain)
- zed.setCameraSettings(sl::CAMERA_SETTINGS_GAIN, gain);
- }
-
- dataMutex.lock();
-
- // Publish the left == rgb image if someone has subscribed to
- if (left_SubNumber > 0 || rgb_SubNumber > 0) {
- // Retrieve RGBA Left image
- zed.retrieveImage(leftZEDMat, sl::VIEW_LEFT, sl::MEM_CPU, matWidth,
- matHeight);
- cv::cvtColor(sl_tools::toCVMat(leftZEDMat), leftImRGB, CV_RGBA2RGB);
- if (left_SubNumber > 0) {
- publishCamInfo(leftCamInfoMsg, pubLeftCamInfo, t);
- publishImage(leftImRGB, pubLeft, leftCamOptFrameId, t);
+
+ sl::Vector4* cpu_cloud = cloud.getPtr();
+
+ // Data copy
+ float* ptCloudPtr = (float*)(&mPointcloudMsg.data[0]);
+
+ #pragma omp parallel for
+ for (size_t i = 0; i < ptsCount; ++i) {
+ ptCloudPtr[i * 4 + 0] = xSign * cpu_cloud[i][xIdx];
+ ptCloudPtr[i * 4 + 1] = ySign * cpu_cloud[i][yIdx];
+ ptCloudPtr[i * 4 + 2] = zSign * cpu_cloud[i][zIdx];
+ ptCloudPtr[i * 4 + 3] = cpu_cloud[i][3];
}
- if (rgb_SubNumber > 0) {
- publishCamInfo(rgbCamInfoMsg, pubRgbCamInfo, t);
- publishImage(leftImRGB, pubRgb, depthOptFrameId,
- t); // rgb is the left image
+
+ // Pointcloud publishing
+ pubCloud.publish(mPointcloudMsg);
+ }
+
+ void ZEDWrapperNodelet::publishCamInfo(sensor_msgs::CameraInfoPtr camInfoMsg,
+ ros::Publisher pubCamInfo, ros::Time t) {
+ static int seq = 0;
+ camInfoMsg->header.stamp = t;
+ camInfoMsg->header.seq = seq;
+ pubCamInfo.publish(camInfoMsg);
+ seq++;
+ }
+
+ void ZEDWrapperNodelet::fillCamInfo(
+ sl::Camera& zed, sensor_msgs::CameraInfoPtr left_cam_info_msg,
+ sensor_msgs::CameraInfoPtr right_cam_info_msg, string leftFrameId,
+ string rightFrameId, bool rawParam /*= false*/) {
+ sl::CalibrationParameters zedParam;
+
+ if (rawParam)
+ zedParam = zed.getCameraInformation(sl::Resolution(matWidth, matHeight))
+ .calibration_parameters_raw;
+ else
+ zedParam = zed.getCameraInformation(sl::Resolution(matWidth, matHeight))
+ .calibration_parameters;
+
+ float baseline = zedParam.T[0];
+
+ left_cam_info_msg->distortion_model =
+ sensor_msgs::distortion_models::PLUMB_BOB;
+ right_cam_info_msg->distortion_model =
+ sensor_msgs::distortion_models::PLUMB_BOB;
+
+ left_cam_info_msg->D.resize(5);
+ right_cam_info_msg->D.resize(5);
+ left_cam_info_msg->D[0] = zedParam.left_cam.disto[0]; // k1
+ left_cam_info_msg->D[1] = zedParam.left_cam.disto[1]; // k2
+ left_cam_info_msg->D[2] = zedParam.left_cam.disto[4]; // k3
+ left_cam_info_msg->D[3] = zedParam.left_cam.disto[2]; // p1
+ left_cam_info_msg->D[4] = zedParam.left_cam.disto[3]; // p2
+ right_cam_info_msg->D[0] = zedParam.right_cam.disto[0]; // k1
+ right_cam_info_msg->D[1] = zedParam.right_cam.disto[1]; // k2
+ right_cam_info_msg->D[2] = zedParam.right_cam.disto[4]; // k3
+ right_cam_info_msg->D[3] = zedParam.right_cam.disto[2]; // p1
+ right_cam_info_msg->D[4] = zedParam.right_cam.disto[3]; // p2
+
+ left_cam_info_msg->K.fill(0.0);
+ right_cam_info_msg->K.fill(0.0);
+ left_cam_info_msg->K[0] = zedParam.left_cam.fx;
+ left_cam_info_msg->K[2] = zedParam.left_cam.cx;
+ left_cam_info_msg->K[4] = zedParam.left_cam.fy;
+ left_cam_info_msg->K[5] = zedParam.left_cam.cy;
+ left_cam_info_msg->K[8] = 1.0;
+ right_cam_info_msg->K[0] = zedParam.right_cam.fx;
+ right_cam_info_msg->K[2] = zedParam.right_cam.cx;
+ right_cam_info_msg->K[4] = zedParam.right_cam.fy;
+ right_cam_info_msg->K[5] = zedParam.right_cam.cy;
+ right_cam_info_msg->K[8] = 1.0;
+
+ left_cam_info_msg->R.fill(0.0);
+ right_cam_info_msg->R.fill(0.0);
+ for (int i = 0; i < 3; i++) {
+ // identity
+ right_cam_info_msg->R[i + i * 3] = 1;
+ left_cam_info_msg->R[i + i * 3] = 1;
}
- }
-
- // Publish the left_raw == rgb_raw image if someone has subscribed to
- if (left_raw_SubNumber > 0 || rgb_raw_SubNumber > 0) {
- // Retrieve RGBA Left image
- zed.retrieveImage(leftZEDMat, sl::VIEW_LEFT_UNRECTIFIED, sl::MEM_CPU,
- matWidth, matHeight);
- cv::cvtColor(sl_tools::toCVMat(leftZEDMat), leftImRGB, CV_RGBA2RGB);
- if (left_raw_SubNumber > 0) {
- publishCamInfo(leftCamInfoRawMsg, pubLeftCamInfoRaw, t);
- publishImage(leftImRGB, pubRawLeft, leftCamOptFrameId, t);
+
+ if (rawParam) {
+ cv::Mat R_ = sl_tools::convertRodrigues(zedParam.R);
+ float* p = (float*)R_.data;
+ for (int i = 0; i < 9; i++) {
+ right_cam_info_msg->R[i] = p[i];
+ }
}
- if (rgb_raw_SubNumber > 0) {
- publishCamInfo(rgbCamInfoRawMsg, pubRgbCamInfoRaw, t);
- publishImage(leftImRGB, pubRawRgb, depthOptFrameId, t);
+
+ left_cam_info_msg->P.fill(0.0);
+ right_cam_info_msg->P.fill(0.0);
+ left_cam_info_msg->P[0] = zedParam.left_cam.fx;
+ left_cam_info_msg->P[2] = zedParam.left_cam.cx;
+ left_cam_info_msg->P[5] = zedParam.left_cam.fy;
+ left_cam_info_msg->P[6] = zedParam.left_cam.cy;
+ left_cam_info_msg->P[10] = 1.0;
+ // http://docs.ros.org/api/sensor_msgs/html/msg/CameraInfo.html
+ right_cam_info_msg->P[3] = (-1 * zedParam.left_cam.fx * baseline);
+
+ right_cam_info_msg->P[0] = zedParam.right_cam.fx;
+ right_cam_info_msg->P[2] = zedParam.right_cam.cx;
+ right_cam_info_msg->P[5] = zedParam.right_cam.fy;
+ right_cam_info_msg->P[6] = zedParam.right_cam.cy;
+ right_cam_info_msg->P[10] = 1.0;
+
+ left_cam_info_msg->width = right_cam_info_msg->width = matWidth;
+ left_cam_info_msg->height = right_cam_info_msg->height = matHeight;
+
+ left_cam_info_msg->header.frame_id = leftFrameId;
+ right_cam_info_msg->header.frame_id = rightFrameId;
+ }
+
+ void ZEDWrapperNodelet::dynamicReconfCallback(zed_wrapper::ZedConfig& config,
+ uint32_t level) {
+ switch (level) {
+ case 0:
+ confidence = config.confidence;
+ NODELET_INFO("Reconfigure confidence : %d", confidence);
+ break;
+ case 1:
+ exposure = config.exposure;
+ NODELET_INFO("Reconfigure exposure : %d", exposure);
+ break;
+ case 2:
+ gain = config.gain;
+ NODELET_INFO("Reconfigure gain : %d", gain);
+ break;
+ case 3:
+ autoExposure = config.auto_exposure;
+ if (autoExposure) {
+ triggerAutoExposure = true;
+ }
+ NODELET_INFO("Reconfigure auto control of exposure and gain : %s",
+ autoExposure ? "Enable" : "Disable");
+ break;
+ case 4:
+ matResizeFactor = config.mat_resize_factor;
+ NODELET_INFO("Reconfigure mat_resize_factor: %g", matResizeFactor);
+
+ dataMutex.lock();
+
+ matWidth = static_cast(camWidth * matResizeFactor);
+ matHeight = static_cast(camHeight * matResizeFactor);
+ NODELET_DEBUG_STREAM("Data Mat size : " << matWidth << "x" << matHeight);
+
+ // Update Camera Info
+ fillCamInfo(zed, leftCamInfoMsg, rightCamInfoMsg, leftCamOptFrameId,
+ rightCamOptFrameId);
+ fillCamInfo(zed, leftCamInfoRawMsg, rightCamInfoRawMsg, leftCamOptFrameId,
+ rightCamOptFrameId, true);
+ rgbCamInfoMsg = depthCamInfoMsg = leftCamInfoMsg; // the reference camera is
+ // the Left one (next to
+ // the ZED logo)
+ rgbCamInfoRawMsg = leftCamInfoRawMsg;
+
+ dataMutex.unlock();
+
+ break;
}
- }
-
- // Publish the right image if someone has subscribed to
- if (right_SubNumber > 0) {
- // Retrieve RGBA Right image
- zed.retrieveImage(rightZEDMat, sl::VIEW_RIGHT, sl::MEM_CPU, matWidth,
- matHeight);
- cv::cvtColor(sl_tools::toCVMat(rightZEDMat), rightImRGB, CV_RGBA2RGB);
- publishCamInfo(rightCamInfoMsg, pubRightCamInfo, t);
- publishImage(rightImRGB, pubRight, rightCamOptFrameId, t);
- }
-
- // Publish the right image if someone has subscribed to
- if (right_raw_SubNumber > 0) {
- // Retrieve RGBA Right image
- zed.retrieveImage(rightZEDMat, sl::VIEW_RIGHT_UNRECTIFIED, sl::MEM_CPU,
- matWidth, matHeight);
- cv::cvtColor(sl_tools::toCVMat(rightZEDMat), rightImRGB, CV_RGBA2RGB);
- publishCamInfo(rightCamInfoRawMsg, pubRightCamInfoRaw, t);
- publishImage(rightImRGB, pubRawRight, rightCamOptFrameId, t);
- }
-
- // Publish the depth image if someone has subscribed to
- if (depth_SubNumber > 0 || disparity_SubNumber > 0) {
- zed.retrieveMeasure(depthZEDMat, sl::MEASURE_DEPTH, sl::MEM_CPU,
- matWidth, matHeight);
- publishCamInfo(depthCamInfoMsg, pubDepthCamInfo, t);
- publishDepth(sl_tools::toCVMat(depthZEDMat), t); // in meters
- }
-
- // Publish the disparity image if someone has subscribed to
- if (disparity_SubNumber > 0) {
- zed.retrieveMeasure(disparityZEDMat, sl::MEASURE_DISPARITY, sl::MEM_CPU,
- matWidth, matHeight);
- // Need to flip sign, but cause of this is not sure
- cv::Mat disparity = sl_tools::toCVMat(disparityZEDMat) * -1.0;
- publishDisparity(disparity, t);
- }
-
- // Publish the confidence image if someone has subscribed to
- if (conf_img_SubNumber > 0) {
- zed.retrieveImage(confImgZEDMat, sl::VIEW_CONFIDENCE, sl::MEM_CPU,
- matWidth, matHeight);
- cv::cvtColor(sl_tools::toCVMat(confImgZEDMat), confImRGB, CV_RGBA2RGB);
- publishImage(confImRGB, pubConfImg, confidenceOptFrameId, t);
- }
-
- // Publish the confidence map if someone has subscribed to
- if (conf_map_SubNumber > 0) {
- zed.retrieveMeasure(confMapZEDMat, sl::MEASURE_CONFIDENCE, sl::MEM_CPU,
- matWidth, matHeight);
- confMapFloat = sl_tools::toCVMat(confMapZEDMat);
- pubConfMap.publish(imageToROSmsg(
- confMapFloat, sensor_msgs::image_encodings::TYPE_32FC1,
- confidenceOptFrameId, t));
- }
-
- // Publish the point cloud if someone has subscribed to
- if (cloud_SubNumber > 0) {
- // Run the point cloud conversion asynchronously to avoid slowing down
- // all the program
- // Retrieve raw pointCloud data
- zed.retrieveMeasure(cloud, sl::MEASURE_XYZBGRA, sl::MEM_CPU, matWidth,
- matHeight);
- pointCloudFrameId = depthFrameId;
- pointCloudTime = t;
- publishPointCloud(matWidth, matHeight);
- }
-
- dataMutex.unlock();
-
- // Transform from base to sensor
- tf2::Transform sensor_to_base_transf;
- // Look up the transformation from base frame to camera link
- try {
- // Save the transformation from base to frame
- geometry_msgs::TransformStamped s2b =
- tfBuffer->lookupTransform(baseFrameId, depthFrameId, t);
- // Get the TF2 transformation
- tf2::fromMsg(s2b.transform, sensor_to_base_transf);
-
- } catch (tf2::TransformException &ex) {
- NODELET_WARN_THROTTLE(
- 10.0, "The tf from '%s' to '%s' does not seem to be available, "
- "will assume it as identity!",
- depthFrameId.c_str(), baseFrameId.c_str());
- NODELET_DEBUG("Transform error: %s", ex.what());
- sensor_to_base_transf.setIdentity();
- }
-
- // Publish the odometry if someone has subscribed to
- if (pose_SubNumber > 0 || odom_SubNumber > 0 || cloud_SubNumber > 0 ||
- depth_SubNumber > 0 || imu_SubNumber > 0 || imu_RawSubNumber > 0) {
- if (!initOdomWithPose) {
- sl::Pose deltaOdom;
- zed.getPosition(deltaOdom, sl::REFERENCE_FRAME_CAMERA);
-
- // Transform ZED delta odom pose in TF2 Transformation
- geometry_msgs::Transform deltaTransf;
-
- sl::Translation translation = deltaOdom.getTranslation();
- sl::Orientation quat = deltaOdom.getOrientation();
-
- deltaTransf.translation.x = xSign * translation(xIdx);
- deltaTransf.translation.y = ySign * translation(yIdx);
- deltaTransf.translation.z = zSign * translation(zIdx);
-
- deltaTransf.rotation.x = xSign * quat(xIdx);
- deltaTransf.rotation.y = ySign * quat(yIdx);
- deltaTransf.rotation.z = zSign * quat(zIdx);
- deltaTransf.rotation.w = quat(3);
-
- tf2::Transform deltaOdomTf;
- tf2::fromMsg(deltaTransf, deltaOdomTf);
-
- // delta odom from sensor to base frame
- tf2::Transform deltaOdomTf_base = sensor_to_base_transf *
- deltaOdomTf *
- sensor_to_base_transf.inverse();
-
- // Propagate Odom transform in time
- baseToOdomTransform = baseToOdomTransform * deltaOdomTf_base;
-
- // Publish odometry message
- publishOdom(baseToOdomTransform, t);
+ }
+
+ void ZEDWrapperNodelet::imuPubCallback(const ros::TimerEvent& e) {
+ int imu_SubNumber = pubImu.getNumSubscribers();
+ int imu_RawSubNumber = pubImuRaw.getNumSubscribers();
+
+ if (imu_SubNumber < 1 && imu_RawSubNumber < 1) {
+ return;
}
- }
- // Publish the zed camera pose if someone has subscribed to
- if (pose_SubNumber > 0 || odom_SubNumber > 0 || cloud_SubNumber > 0 ||
- depth_SubNumber > 0 || imu_SubNumber > 0 || imu_RawSubNumber > 0) {
- sl::Pose zed_pose; // Sensor to Map transform
- zed.getPosition(zed_pose, sl::REFERENCE_FRAME_WORLD);
+ ros::Time t =
+ sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_IMAGE));
+
+ sl::IMUData imu_data;
+ zed.getIMUData(imu_data, sl::TIME_REFERENCE_CURRENT);
+
+ if (imu_SubNumber > 0) {
+ sensor_msgs::Imu imu_msg;
+ imu_msg.header.stamp = imuTime; // t;
+ imu_msg.header.frame_id = imuFrameId;
+
+ imu_msg.orientation.x = xSign * imu_data.getOrientation()[xIdx];
+ imu_msg.orientation.y = ySign * imu_data.getOrientation()[yIdx];
+ imu_msg.orientation.z = zSign * imu_data.getOrientation()[zIdx];
+ imu_msg.orientation.w = imu_data.getOrientation()[3];
+
+ imu_msg.angular_velocity.x = xSign * imu_data.angular_velocity[xIdx];
+ imu_msg.angular_velocity.y = ySign * imu_data.angular_velocity[yIdx];
+ imu_msg.angular_velocity.z = zSign * imu_data.angular_velocity[zIdx];
+
+ imu_msg.linear_acceleration.x = xSign * imu_data.linear_acceleration[xIdx];
+ imu_msg.linear_acceleration.y = ySign * imu_data.linear_acceleration[yIdx];
+ imu_msg.linear_acceleration.z = zSign * imu_data.linear_acceleration[zIdx];
+
+ for (int i = 0; i < 3; i += 3) {
+ imu_msg.orientation_covariance[i * 3 + 0] =
+ imu_data.orientation_covariance.r[i * 3 + xIdx];
+ imu_msg.orientation_covariance[i * 3 + 1] =
+ imu_data.orientation_covariance.r[i * 3 + yIdx];
+ imu_msg.orientation_covariance[i * 3 + 2] =
+ imu_data.orientation_covariance.r[i * 3 + zIdx];
+
+ imu_msg.linear_acceleration_covariance[i * 3 + 0] =
+ imu_data.linear_acceleration_convariance.r[i * 3 + xIdx];
+ imu_msg.linear_acceleration_covariance[i * 3 + 1] =
+ imu_data.linear_acceleration_convariance.r[i * 3 + yIdx];
+ imu_msg.linear_acceleration_covariance[i * 3 + 2] =
+ imu_data.linear_acceleration_convariance.r[i * 3 + zIdx];
+
+ imu_msg.angular_velocity_covariance[i * 3 + 0] =
+ imu_data.angular_velocity_convariance.r[i * 3 + xIdx];
+ imu_msg.angular_velocity_covariance[i * 3 + 1] =
+ imu_data.angular_velocity_convariance.r[i * 3 + yIdx];
+ imu_msg.angular_velocity_covariance[i * 3 + 2] =
+ imu_data.angular_velocity_convariance.r[i * 3 + zIdx];
+ }
- // Transform ZED pose in TF2 Transformation
- geometry_msgs::Transform sens2mapTransf;
+ pubImu.publish(imu_msg);
+ }
- sl::Translation translation = zed_pose.getTranslation();
- sl::Orientation quat = zed_pose.getOrientation();
+ if (imu_RawSubNumber > 0) {
+ sensor_msgs::Imu imu_raw_msg;
+ imu_raw_msg.header.stamp = imuTime; // t;
+ imu_raw_msg.header.frame_id = imuFrameId;
+
+ imu_raw_msg.angular_velocity.x = xSign * imu_data.angular_velocity[xIdx];
+ imu_raw_msg.angular_velocity.y = ySign * imu_data.angular_velocity[yIdx];
+ imu_raw_msg.angular_velocity.z = zSign * imu_data.angular_velocity[zIdx];
+
+ imu_raw_msg.linear_acceleration.x =
+ xSign * imu_data.linear_acceleration[xIdx];
+ imu_raw_msg.linear_acceleration.y =
+ ySign * imu_data.linear_acceleration[yIdx];
+ imu_raw_msg.linear_acceleration.z =
+ zSign * imu_data.linear_acceleration[zIdx];
+
+ for (int i = 0; i < 3; i += 3) {
+ imu_raw_msg.linear_acceleration_covariance[i * 3 + 0] =
+ imu_data.linear_acceleration_convariance.r[i * 3 + xIdx];
+ imu_raw_msg.linear_acceleration_covariance[i * 3 + 1] =
+ imu_data.linear_acceleration_convariance.r[i * 3 + yIdx];
+ imu_raw_msg.linear_acceleration_covariance[i * 3 + 2] =
+ imu_data.linear_acceleration_convariance.r[i * 3 + zIdx];
+
+ imu_raw_msg.angular_velocity_covariance[i * 3 + 0] =
+ imu_data.angular_velocity_convariance.r[i * 3 + xIdx];
+ imu_raw_msg.angular_velocity_covariance[i * 3 + 1] =
+ imu_data.angular_velocity_convariance.r[i * 3 + yIdx];
+ imu_raw_msg.angular_velocity_covariance[i * 3 + 2] =
+ imu_data.angular_velocity_convariance.r[i * 3 + zIdx];
+ }
- sens2mapTransf.translation.x = xSign * translation(xIdx);
- sens2mapTransf.translation.y = ySign * translation(yIdx);
- sens2mapTransf.translation.z = zSign * translation(zIdx);
+ imu_raw_msg.orientation_covariance[0] =
+ -1; // Orientation data is not available in "data_raw" -> See ROS REP145
+ // http://www.ros.org/reps/rep-0145.html#topics
- sens2mapTransf.rotation.x = xSign * quat(xIdx);
- sens2mapTransf.rotation.y = ySign * quat(yIdx);
- sens2mapTransf.rotation.z = zSign * quat(zIdx);
- sens2mapTransf.rotation.w = quat(3);
+ pubImuRaw.publish(imu_raw_msg);
+ }
- tf2::Transform sens_to_map_transf;
- tf2::fromMsg(sens2mapTransf, sens_to_map_transf);
+ // Publish IMU tf only if enabled
+ if (publishTf) {
+ // Camera to map transform from TF buffer
+ tf2::Transform base_to_map;
+ // Look up the transformation from base frame to map link
+ try {
+ // Save the transformation from base to frame
+ geometry_msgs::TransformStamped b2m =
+ tfBuffer->lookupTransform(mapFrameId, baseFrameId, ros::Time(0));
+ // Get the TF2 transformation
+ tf2::fromMsg(b2m.transform, base_to_map);
+ } catch (tf2::TransformException& ex) {
+ NODELET_WARN_THROTTLE(
+ 10.0, "The tf from '%s' to '%s' does not seem to be available. "
+ "IMU TF not published!",
+ baseFrameId.c_str(), mapFrameId.c_str());
+ NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what());
+ return;
+ }
- // Transformation from camera sensor to base frame
- tf2::Transform base_to_map_transform = sensor_to_base_transf *
- sens_to_map_transf *
- sensor_to_base_transf.inverse();
+ // IMU Quaternion in Map frame
+ tf2::Quaternion imu_q;
+ imu_q.setX(xSign * imu_data.getOrientation()[xIdx]);
+ imu_q.setY(ySign * imu_data.getOrientation()[yIdx]);
+ imu_q.setZ(zSign * imu_data.getOrientation()[zIdx]);
+ imu_q.setW(imu_data.getOrientation()[3]);
- if (initOdomWithPose) {
- // Propagate Odom transform in time
- baseToOdomTransform = base_to_map_transform;
- base_to_map_transform.setIdentity();
+ // Pose Quaternion from ZED Camera
+ tf2::Quaternion map_q = base_to_map.getRotation();
- if (odom_SubNumber > 0) {
- // Publish odometry message
- publishOdom(baseToOdomTransform, t);
- }
+ // Difference between IMU and ZED Quaternion
+ tf2::Quaternion delta_q = imu_q * map_q.inverse();
- initOdomWithPose = false;
- } else {
- // Transformation from map to odometry frame
- odomToMapTransform =
- base_to_map_transform * baseToOdomTransform.inverse();
- }
+ tf2::Transform imu_pose;
+ imu_pose.setIdentity();
+ imu_pose.setRotation(delta_q);
- // Publish Pose message
- publishPose(odomToMapTransform, t);
- }
-
- // Publish pose tf only if enabled
- if (publishTf) {
- // Note, the frame is published, but its values will only change if
- // someone has subscribed to odom
- publishOdomFrame(baseToOdomTransform,
- t); // publish the base Frame in odometry frame
- // Note, the frame is published, but its values will only change if
- // someone has subscribed to map
- publishPoseFrame(odomToMapTransform,
- t); // publish the odometry Frame in map frame
-
- imuTime = t;
- }
-
- static int rateWarnCount = 0;
- if (!loop_rate.sleep()) {
- rateWarnCount++;
-
- if (rateWarnCount == 10) {
- NODELET_DEBUG_THROTTLE(
- 1.0,
- "Working thread is not synchronized with the Camera frame rate");
- NODELET_DEBUG_STREAM_THROTTLE(
- 1.0, "Expected cycle time: " << loop_rate.expectedCycleTime()
- << " - Real cycle time: "
- << loop_rate.cycleTime());
- NODELET_WARN_THROTTLE(10.0, "Elaboration takes longer than requested "
- "by the FPS rate. Please consider to "
- "lower the 'frame_rate' setting.");
+ // Note, the frame is published, but its values will only change if someone
+ // has subscribed to IMU
+ publishImuFrame(imu_pose); // publish the imu Frame
}
- } else {
- rateWarnCount = 0;
- }
- } else {
- NODELET_DEBUG_THROTTLE(1.0, "No topics subscribed by users");
-
- // Publish odometry tf only if enabled
- if (publishTf) {
- ros::Time t =
- sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_CURRENT));
- publishOdomFrame(baseToOdomTransform,
- t); // publish the base Frame in odometry frame
- publishPoseFrame(odomToMapTransform,
- t); // publish the odometry Frame in map frame
- }
- std::this_thread::sleep_for(
- std::chrono::milliseconds(10)); // No subscribers, we just wait
-
- loop_rate.reset();
}
- } // while loop
- zed.close();
-}
+ void ZEDWrapperNodelet::device_poll() {
+ ros::Rate loop_rate(frameRate);
+
+ ros::Time old_t =
+ sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_CURRENT));
+ imuTime = old_t;
+
+ sl::ERROR_CODE grab_status;
+ trackingActivated = false;
+
+ // Get the parameters of the ZED images
+ camWidth = zed.getResolution().width;
+ camHeight = zed.getResolution().height;
+ NODELET_DEBUG_STREAM("Camera Frame size : " << camWidth << "x" << camHeight);
+
+ matWidth = static_cast(camWidth * matResizeFactor);
+ matHeight = static_cast(camHeight * matResizeFactor);
+ NODELET_DEBUG_STREAM("Data Mat size : " << matWidth << "x" << matHeight);
+
+ cv::Size cvSize(matWidth, matWidth);
+ leftImRGB = cv::Mat(cvSize, CV_8UC3);
+ rightImRGB = cv::Mat(cvSize, CV_8UC3);
+ confImRGB = cv::Mat(cvSize, CV_8UC3);
+ confMapFloat = cv::Mat(cvSize, CV_32FC1);
+
+ // Create and fill the camera information messages
+ fillCamInfo(zed, leftCamInfoMsg, rightCamInfoMsg, leftCamOptFrameId,
+ rightCamOptFrameId);
+ fillCamInfo(zed, leftCamInfoRawMsg, rightCamInfoRawMsg, leftCamOptFrameId,
+ rightCamOptFrameId, true);
+ rgbCamInfoMsg = depthCamInfoMsg = leftCamInfoMsg; // the reference camera is
+ // the Left one (next to the
+ // ZED logo)
+ rgbCamInfoRawMsg = leftCamInfoRawMsg;
+
+ sl::RuntimeParameters runParams;
+ runParams.sensing_mode = static_cast(sensingMode);
+
+ sl::Mat leftZEDMat, rightZEDMat, depthZEDMat, disparityZEDMat, confImgZEDMat,
+ confMapZEDMat;
+ // Main loop
+ while (nhNs.ok()) {
+ // Check for subscribers
+ int rgb_SubNumber = pubRgb.getNumSubscribers();
+ int rgb_raw_SubNumber = pubRawRgb.getNumSubscribers();
+ int left_SubNumber = pubLeft.getNumSubscribers();
+ int left_raw_SubNumber = pubRawLeft.getNumSubscribers();
+ int right_SubNumber = pubRight.getNumSubscribers();
+ int right_raw_SubNumber = pubRawRight.getNumSubscribers();
+ int depth_SubNumber = pubDepth.getNumSubscribers();
+ int disparity_SubNumber = pubDisparity.getNumSubscribers();
+ int cloud_SubNumber = pubCloud.getNumSubscribers();
+ int pose_SubNumber = pubPose.getNumSubscribers();
+ int odom_SubNumber = pubOdom.getNumSubscribers();
+ int conf_img_SubNumber = pubConfImg.getNumSubscribers();
+ int conf_map_SubNumber = pubConfMap.getNumSubscribers();
+ int imu_SubNumber = pubImu.getNumSubscribers();
+ int imu_RawSubNumber = pubImuRaw.getNumSubscribers();
+ bool runLoop = (rgb_SubNumber + rgb_raw_SubNumber + left_SubNumber +
+ left_raw_SubNumber + right_SubNumber + right_raw_SubNumber +
+ depth_SubNumber + disparity_SubNumber + cloud_SubNumber +
+ pose_SubNumber + odom_SubNumber + conf_img_SubNumber +
+ conf_map_SubNumber + imu_SubNumber + imu_RawSubNumber) > 0;
+
+ runParams.enable_point_cloud = false;
+ if (cloud_SubNumber > 0) {
+ runParams.enable_point_cloud = true;
+ }
+ // Run the loop only if there is some subscribers
+ if (runLoop) {
+ if ((depthStabilization || pose_SubNumber > 0 || odom_SubNumber > 0 ||
+ cloud_SubNumber > 0 || depth_SubNumber > 0) &&
+ !trackingActivated) { // Start the tracking
+ start_tracking();
+ } else if (!depthStabilization && pose_SubNumber == 0 &&
+ odom_SubNumber == 0 &&
+ trackingActivated) { // Stop the tracking
+ zed.disableTracking();
+ trackingActivated = false;
+ }
+ computeDepth = (depth_SubNumber + disparity_SubNumber + cloud_SubNumber +
+ pose_SubNumber + odom_SubNumber + conf_img_SubNumber +
+ conf_map_SubNumber) > 0; // Detect if one of the
+ // subscriber need to have the
+ // depth information
+
+ // Timestamp
+ ros::Time t =
+ sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_IMAGE));
+
+ grabbing = true;
+ if (computeDepth) {
+ int actual_confidence = zed.getConfidenceThreshold();
+ if (actual_confidence != confidence) {
+ zed.setConfidenceThreshold(confidence);
+ }
+ runParams.enable_depth = true; // Ask to compute the depth
+ } else {
+ runParams.enable_depth = false;
+ }
+
+ grab_status = zed.grab(runParams); // Ask to not compute the depth
+
+ grabbing = false;
+
+ // cout << toString(grab_status) << endl;
+ if (grab_status !=
+ sl::ERROR_CODE::SUCCESS) { // Detect if a error occurred (for example:
+ // the zed have been disconnected) and
+ // re-initialize the ZED
+
+ if (grab_status == sl::ERROR_CODE_NOT_A_NEW_FRAME) {
+ NODELET_DEBUG_THROTTLE(1.0, "Wait for a new image to proceed");
+ } else {
+ NODELET_INFO_STREAM_ONCE(toString(grab_status));
+ }
+
+ std::this_thread::sleep_for(std::chrono::milliseconds(2));
+
+ if ((t - old_t).toSec() > 5) {
+ zed.close();
+
+ NODELET_INFO("Re-opening the ZED");
+ sl::ERROR_CODE err = sl::ERROR_CODE_CAMERA_NOT_DETECTED;
+ while (err != sl::SUCCESS) {
+ if (!nhNs.ok()) {
+ mStopNode = true;
+ zed.close();
+ NODELET_DEBUG("ZED pool thread finished");
+ return;
+ }
+
+ int id = sl_tools::checkCameraReady(serial_number);
+ if (id > 0) {
+ param.camera_linux_id = id;
+ err = zed.open(param); // Try to initialize the ZED
+ NODELET_INFO_STREAM(toString(err));
+ } else {
+ NODELET_INFO("Waiting for the ZED to be re-connected");
+ }
+ std::this_thread::sleep_for(std::chrono::milliseconds(2000));
+ }
+ trackingActivated = false;
+ if (depthStabilization || pose_SubNumber > 0 ||
+ odom_SubNumber > 0) { // Start the tracking
+ start_tracking();
+ }
+ }
+ continue;
+ }
+
+ // Time update
+ old_t =
+ sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_CURRENT));
+
+ if (autoExposure) {
+ // getCameraSettings() can't check status of auto exposure
+ // triggerAutoExposure is used to execute setCameraSettings() only once
+ if (triggerAutoExposure) {
+ zed.setCameraSettings(sl::CAMERA_SETTINGS_EXPOSURE, 0, true);
+ triggerAutoExposure = false;
+ }
+ } else {
+ int actual_exposure =
+ zed.getCameraSettings(sl::CAMERA_SETTINGS_EXPOSURE);
+ if (actual_exposure != exposure) {
+ zed.setCameraSettings(sl::CAMERA_SETTINGS_EXPOSURE, exposure);
+ }
+
+ int actual_gain = zed.getCameraSettings(sl::CAMERA_SETTINGS_GAIN);
+ if (actual_gain != gain) {
+ zed.setCameraSettings(sl::CAMERA_SETTINGS_GAIN, gain);
+ }
+ }
+
+ dataMutex.lock();
+
+ // Publish the left == rgb image if someone has subscribed to
+ if (left_SubNumber > 0 || rgb_SubNumber > 0) {
+ // Retrieve RGBA Left image
+ zed.retrieveImage(leftZEDMat, sl::VIEW_LEFT, sl::MEM_CPU, matWidth,
+ matHeight);
+ cv::cvtColor(sl_tools::toCVMat(leftZEDMat), leftImRGB, CV_RGBA2RGB);
+ if (left_SubNumber > 0) {
+ publishCamInfo(leftCamInfoMsg, pubLeftCamInfo, t);
+ publishImage(leftImRGB, pubLeft, leftCamOptFrameId, t);
+ }
+ if (rgb_SubNumber > 0) {
+ publishCamInfo(rgbCamInfoMsg, pubRgbCamInfo, t);
+ publishImage(leftImRGB, pubRgb, depthOptFrameId,
+ t); // rgb is the left image
+ }
+ }
+
+ // Publish the left_raw == rgb_raw image if someone has subscribed to
+ if (left_raw_SubNumber > 0 || rgb_raw_SubNumber > 0) {
+ // Retrieve RGBA Left image
+ zed.retrieveImage(leftZEDMat, sl::VIEW_LEFT_UNRECTIFIED, sl::MEM_CPU,
+ matWidth, matHeight);
+ cv::cvtColor(sl_tools::toCVMat(leftZEDMat), leftImRGB, CV_RGBA2RGB);
+ if (left_raw_SubNumber > 0) {
+ publishCamInfo(leftCamInfoRawMsg, pubLeftCamInfoRaw, t);
+ publishImage(leftImRGB, pubRawLeft, leftCamOptFrameId, t);
+ }
+ if (rgb_raw_SubNumber > 0) {
+ publishCamInfo(rgbCamInfoRawMsg, pubRgbCamInfoRaw, t);
+ publishImage(leftImRGB, pubRawRgb, depthOptFrameId, t);
+ }
+ }
+
+ // Publish the right image if someone has subscribed to
+ if (right_SubNumber > 0) {
+ // Retrieve RGBA Right image
+ zed.retrieveImage(rightZEDMat, sl::VIEW_RIGHT, sl::MEM_CPU, matWidth,
+ matHeight);
+ cv::cvtColor(sl_tools::toCVMat(rightZEDMat), rightImRGB, CV_RGBA2RGB);
+ publishCamInfo(rightCamInfoMsg, pubRightCamInfo, t);
+ publishImage(rightImRGB, pubRight, rightCamOptFrameId, t);
+ }
+
+ // Publish the right image if someone has subscribed to
+ if (right_raw_SubNumber > 0) {
+ // Retrieve RGBA Right image
+ zed.retrieveImage(rightZEDMat, sl::VIEW_RIGHT_UNRECTIFIED, sl::MEM_CPU,
+ matWidth, matHeight);
+ cv::cvtColor(sl_tools::toCVMat(rightZEDMat), rightImRGB, CV_RGBA2RGB);
+ publishCamInfo(rightCamInfoRawMsg, pubRightCamInfoRaw, t);
+ publishImage(rightImRGB, pubRawRight, rightCamOptFrameId, t);
+ }
+
+ // Publish the depth image if someone has subscribed to
+ if (depth_SubNumber > 0 || disparity_SubNumber > 0) {
+ zed.retrieveMeasure(depthZEDMat, sl::MEASURE_DEPTH, sl::MEM_CPU,
+ matWidth, matHeight);
+ publishCamInfo(depthCamInfoMsg, pubDepthCamInfo, t);
+ publishDepth(sl_tools::toCVMat(depthZEDMat), t); // in meters
+ }
+
+ // Publish the disparity image if someone has subscribed to
+ if (disparity_SubNumber > 0) {
+ zed.retrieveMeasure(disparityZEDMat, sl::MEASURE_DISPARITY, sl::MEM_CPU,
+ matWidth, matHeight);
+ // Need to flip sign, but cause of this is not sure
+ cv::Mat disparity = sl_tools::toCVMat(disparityZEDMat) * -1.0;
+ publishDisparity(disparity, t);
+ }
+
+ // Publish the confidence image if someone has subscribed to
+ if (conf_img_SubNumber > 0) {
+ zed.retrieveImage(confImgZEDMat, sl::VIEW_CONFIDENCE, sl::MEM_CPU,
+ matWidth, matHeight);
+ cv::cvtColor(sl_tools::toCVMat(confImgZEDMat), confImRGB, CV_RGBA2RGB);
+ publishImage(confImRGB, pubConfImg, confidenceOptFrameId, t);
+ }
+
+ // Publish the confidence map if someone has subscribed to
+ if (conf_map_SubNumber > 0) {
+ zed.retrieveMeasure(confMapZEDMat, sl::MEASURE_CONFIDENCE, sl::MEM_CPU,
+ matWidth, matHeight);
+ confMapFloat = sl_tools::toCVMat(confMapZEDMat);
+ pubConfMap.publish(imageToROSmsg(
+ confMapFloat, sensor_msgs::image_encodings::TYPE_32FC1,
+ confidenceOptFrameId, t));
+ }
+
+ // Publish the point cloud if someone has subscribed to
+ if (cloud_SubNumber > 0) {
+ // Run the point cloud conversion asynchronously to avoid slowing down
+ // all the program
+ // Retrieve raw pointCloud data if latest Pointcloud is ready
+ std::unique_lock lock(mPcMutex, std::defer_lock);
+ if (lock.try_lock()) {
+ zed.retrieveMeasure(cloud, sl::MEASURE_XYZBGRA, sl::MEM_CPU, matWidth, matHeight);
+
+ pointCloudFrameId = depthFrameId;
+ pointCloudTime = t;
+
+ // Signal Pointcloud thread that a new pointcloud is ready
+ mPcDataReady = true;
+
+ mPcDataReadyCondVar.notify_one();
+ }
+ }
+
+ dataMutex.unlock();
+
+ // Transform from base to sensor
+ tf2::Transform sensor_to_base_transf;
+ // Look up the transformation from base frame to camera link
+ try {
+ // Save the transformation from base to frame
+ geometry_msgs::TransformStamped s2b =
+ tfBuffer->lookupTransform(baseFrameId, depthFrameId, t);
+ // Get the TF2 transformation
+ tf2::fromMsg(s2b.transform, sensor_to_base_transf);
+
+ } catch (tf2::TransformException& ex) {
+ NODELET_WARN_THROTTLE(
+ 10.0, "The tf from '%s' to '%s' does not seem to be available, "
+ "will assume it as identity!",
+ depthFrameId.c_str(), baseFrameId.c_str());
+ NODELET_DEBUG("Transform error: %s", ex.what());
+ sensor_to_base_transf.setIdentity();
+ }
+
+ // Publish the odometry if someone has subscribed to
+ if (pose_SubNumber > 0 || odom_SubNumber > 0 || cloud_SubNumber > 0 ||
+ depth_SubNumber >
+
+
+ 0 || imu_SubNumber > 0 || imu_RawSubNumber > 0) {
+ if (!initOdomWithPose) {
+ sl::Pose deltaOdom;
+ zed.getPosition(deltaOdom, sl::REFERENCE_FRAME_CAMERA);
+
+ // Transform ZED delta odom pose in TF2 Transformation
+ geometry_msgs::Transform deltaTransf;
+
+ sl::Translation translation = deltaOdom.getTranslation();
+ sl::Orientation quat = deltaOdom.getOrientation();
+
+ deltaTransf.translation.x = xSign * translation(xIdx);
+ deltaTransf.translation.y = ySign * translation(yIdx);
+ deltaTransf.translation.z = zSign * translation(zIdx);
+
+ deltaTransf.rotation.x = xSign * quat(xIdx);
+ deltaTransf.rotation.y = ySign * quat(yIdx);
+ deltaTransf.rotation.z = zSign * quat(zIdx);
+ deltaTransf.rotation.w = quat(3);
+
+ tf2::Transform deltaOdomTf;
+ tf2::fromMsg(deltaTransf, deltaOdomTf);
+
+ // delta odom from sensor to base frame
+ tf2::Transform deltaOdomTf_base = sensor_to_base_transf *
+ deltaOdomTf *
+ sensor_to_base_transf.inverse();
+
+ // Propagate Odom transform in time
+ baseToOdomTransform = baseToOdomTransform * deltaOdomTf_base;
+
+ // Publish odometry message
+ publishOdom(baseToOdomTransform, t);
+ }
+ }
+
+ // Publish the zed camera pose if someone has subscribed to
+ if (pose_SubNumber > 0 || odom_SubNumber > 0 || cloud_SubNumber > 0 ||
+ depth_SubNumber > 0 || imu_SubNumber > 0 || imu_RawSubNumber > 0) {
+ sl::Pose zed_pose; // Sensor to Map transform
+ zed.getPosition(zed_pose, sl::REFERENCE_FRAME_WORLD);
+
+ // Transform ZED pose in TF2 Transformation
+ geometry_msgs::Transform sens2mapTransf;
+
+ sl::Translation translation = zed_pose.getTranslation();
+ sl::Orientation quat = zed_pose.getOrientation();
+
+ sens2mapTransf.translation.x = xSign * translation(xIdx);
+ sens2mapTransf.translation.y = ySign * translation(yIdx);
+ sens2mapTransf.translation.z = zSign * translation(zIdx);
+
+ sens2mapTransf.rotation.x = xSign * quat(xIdx);
+ sens2mapTransf.rotation.y = ySign * quat(yIdx);
+ sens2mapTransf.rotation.z = zSign * quat(zIdx);
+ sens2mapTransf.rotation.w = quat(3);
+
+ tf2::Transform sens_to_map_transf;
+ tf2::fromMsg(sens2mapTransf, sens_to_map_transf);
+
+ // Transformation from camera sensor to base frame
+ tf2::Transform base_to_map_transform = sensor_to_base_transf *
+ sens_to_map_transf *
+ sensor_to_base_transf.inverse();
+
+ if (initOdomWithPose) {
+ // Propagate Odom transform in time
+ baseToOdomTransform = base_to_map_transform;
+ base_to_map_transform.setIdentity();
+
+ if (odom_SubNumber > 0) {
+ // Publish odometry message
+ publishOdom(baseToOdomTransform, t);
+ }
+
+ initOdomWithPose = false;
+ } else {
+ // Transformation from map to odometry frame
+ odomToMapTransform =
+ base_to_map_transform * baseToOdomTransform.inverse();
+ }
+
+ // Publish Pose message
+ publishPose(odomToMapTransform, t);
+ }
+
+ // Publish pose tf only if enabled
+ if (publishTf) {
+ // Note, the frame is published, but its values will only change if
+ // someone has subscribed to odom
+ publishOdomFrame(baseToOdomTransform,
+ t); // publish the base Frame in odometry frame
+ // Note, the frame is published, but its values will only change if
+ // someone has subscribed to map
+ publishPoseFrame(odomToMapTransform,
+ t); // publish the odometry Frame in map frame
+
+ imuTime = t;
+ }
+
+ static int rateWarnCount = 0;
+ if (!loop_rate.sleep()) {
+ rateWarnCount++;
+
+ if (rateWarnCount == 10) {
+ NODELET_DEBUG_THROTTLE(
+ 1.0,
+ "Working thread is not synchronized with the Camera frame rate");
+ NODELET_DEBUG_STREAM_THROTTLE(
+ 1.0, "Expected cycle time: " << loop_rate.expectedCycleTime()
+ << " - Real cycle time: "
+ << loop_rate.cycleTime());
+ NODELET_WARN_THROTTLE(10.0, "Elaboration takes longer than requested "
+ "by the FPS rate. Please consider to "
+ "lower the 'frame_rate' setting.");
+ }
+ } else {
+ rateWarnCount = 0;
+ }
+ } else {
+ NODELET_DEBUG_THROTTLE(1.0, "No topics subscribed by users");
+
+ // Publish odometry tf only if enabled
+ if (publishTf) {
+ ros::Time t =
+ sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_CURRENT));
+ publishOdomFrame(baseToOdomTransform,
+ t); // publish the base Frame in odometry frame
+ publishPoseFrame(odomToMapTransform,
+ t); // publish the odometry Frame in map frame
+ }
+ std::this_thread::sleep_for(
+ std::chrono::milliseconds(10)); // No subscribers, we just wait
+
+ loop_rate.reset();
+ }
+ } // while loop
+
+ mStopNode = true; // Stops other threads
+ zed.close();
+
+ NODELET_DEBUG("ZED pool thread finished");
+ }
} // namespace
diff --git a/src/tools/include/sl_tools.h b/src/tools/include/sl_tools.h
index 8f6d308f..ba6470ce 100644
--- a/src/tools/include/sl_tools.h
+++ b/src/tools/include/sl_tools.h
@@ -41,7 +41,7 @@ namespace sl_tools {
/* \brief Convert an sl:Mat to a cv::Mat
* \param mat : the sl::Mat to convert
*/
- cv::Mat toCVMat(sl::Mat &mat);
+ cv::Mat toCVMat(sl::Mat& mat);
cv::Mat convertRodrigues(sl::float3 r);
@@ -54,13 +54,25 @@ namespace sl_tools {
* \param major : major value for version
* \param minor : minor value for version
* \param sub_minor _ sub_minor value for version
- */
- std::string getSDKVersion( int& major, int& minor, int& sub_minor);
+ */
+ std::string getSDKVersion(int& major, int& minor, int& sub_minor);
/* \brief Convert StereoLabs timestamp to ROS timestamp
* \param t : Stereolabs timestamp to be converted
*/
ros::Time slTime2Ros(sl::timeStamp t);
+
+ inline sl::uchar4 depackColor4(float colorIn) {
+ sl::uchar4 out;
+ uint32_t color_uint = *(uint32_t*) & colorIn;
+ unsigned char* color_uchar = (unsigned char*) &color_uint;
+ for (int c = 0; c < 3; c++) {
+ out[c] = static_cast(color_uchar[c]);
+ }
+ out.w = 255;
+ return out;
+ }
+
} // namespace