diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..6732e720 --- /dev/null +++ b/.gitignore @@ -0,0 +1,39 @@ +# C++ objects and libs + +*.slo +*.lo +*.o +*.a +*.la +*.lai +*.so +*.dll +*.dylib + +# Qt-es + +/.qmake.cache +/.qmake.stash +*.pro.user +*.pro.user.* +*.qbs.user +*.qbs.user.* +*.moc +moc_*.cpp +moc_*.h +qrc_*.cpp +ui_*.h +Makefile* +*build-* + +# QtCreator + +*.autosave + +# QtCtreator Qml +*.qmlproject.user +*.qmlproject.user.* + +# QtCtreator CMake +CMakeLists.txt.user* + diff --git a/LICENSE b/LICENSE index e4e98760..827507c6 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2017, Stereolabs +Copyright (c) 2018, Stereolabs All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/README.md b/README.md index 97df7155..ef9653e5 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Stereolabs ZED Camera - ROS Integration -This package lets you use the ZED stereo camera with ROS. It outputs the camera left and right images, depth map, point cloud, odometry information and supports the use of multiple ZED cameras. +This package lets you use the ZED stereo camera with ROS. It outputs the camera left and right images, depth map, point cloud, pose information and supports the use of multiple ZED cameras. ## Getting started @@ -13,7 +13,6 @@ This package lets you use the ZED stereo camera with ROS. It outputs the camera - Ubuntu 16.04 - [ZED SDK **≥ 2.3**](https://www.stereolabs.com/developers/) and its dependency [CUDA](https://developer.nvidia.com/cuda-downloads) - [ROS Kinetic](http://wiki.ros.org/kinetic/Installation/Ubuntu) -- [Point Cloud Library (PCL)](https://github.com/PointCloudLibrary/pcl) ### Build the program @@ -25,12 +24,12 @@ The zed_ros_wrapper is a catkin package. It depends on the following ROS package - roscpp - rosconsole - sensor_msgs - - opencv + - stereo_msgs + - opencv3 - image_transport - dynamic_reconfigure - urdf - Open a terminal and build the package: cd ~/catkin_ws/src @@ -41,21 +40,29 @@ Open a terminal and build the package: ### Run the program -To launch the wrapper along with an Rviz preview, open a terminal and launch: +To launch the wrapper [along with an Rviz preview](./zed_display_rviz), open a terminal and launch: - roslaunch zed_wrapper display.launch # by default open a ZED + $ roslaunch zed_display_rviz display.launch # by default open a ZED or - roslaunch zed_wrapper display_zedm.launch # open a ZED Mini + $ roslaunch zed_display_rviz display_zedm.launch # open a ZED Mini To launch the wrapper without Rviz, use: - roslaunch zed_wrapper zed.launch + $ roslaunch zed_wrapper zed.launch To select the ZED from its serial number - roslaunch zed_wrapper zed.launch serial_number:=1010 #replace 1010 with the actual SN + $ roslaunch zed_wrapper zed.launch serial_number:=1010 #replace 1010 with the actual SN + +### Modules + +Alongside the wrapper itself and the Rviz display, a few other modules are provided to interface the ZED with other ROS packages : + +- [RTAB-Map](http://introlab.github.io/rtabmap/) : See [zed_rtabmap_example](./zed_rtabmap_example) +- ROS Nodelet, `depthimage_to_laserscan` : See [zed_nodelet_example](./zed_nodelet_example) + [More](https://www.stereolabs.com/documentation/guides/using-zed-with-ros/introduction.html) diff --git a/launch/zed.launch b/launch/zed.launch deleted file mode 100644 index e7c83a5e..00000000 --- a/launch/zed.launch +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/launch/zed_camera.launch b/launch/zed_camera.launch deleted file mode 100644 index 386257d0..00000000 --- a/launch/zed_camera.launch +++ /dev/null @@ -1,102 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/launch/zed_multi_cam.launch b/launch/zed_multi_cam.launch deleted file mode 100644 index 9b0d0c83..00000000 --- a/launch/zed_multi_cam.launch +++ /dev/null @@ -1,45 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/launch/zed_multi_gpu.launch b/launch/zed_multi_gpu.launch deleted file mode 100644 index 83b3375c..00000000 --- a/launch/zed_multi_gpu.launch +++ /dev/null @@ -1,49 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/package.xml b/package.xml deleted file mode 100644 index cb581d5e..00000000 --- a/package.xml +++ /dev/null @@ -1,41 +0,0 @@ - - - zed_wrapper - 1.0.0 - zed_wrapper is a ROS wrapper for the ZED library - for interfacing with the ZED camera. - -STEREOLABS - BSD - - catkin - - tf2_ros - tf2_geometry_msgs - nav_msgs - roscpp - rosconsole - sensor_msgs - opencv - image_transport - dynamic_reconfigure - pcl_conversions - urdf - nodelet - - tf2_ros - tf2_geometry_msgs - nav_msgs - roscpp - rosconsole - sensor_msgs - opencv - image_transport - dynamic_reconfigure - pcl_conversions - nodelet - robot_state_publisher - - - - diff --git a/src/zed_wrapper_nodelet.cpp b/src/zed_wrapper_nodelet.cpp deleted file mode 100644 index 170fcdd5..00000000 --- a/src/zed_wrapper_nodelet.cpp +++ /dev/null @@ -1,995 +0,0 @@ -/////////////////////////////////////////////////////////////////////////// -// -// Copyright (c) 2017, STEREOLABS. -// -// All rights reserved. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -// -/////////////////////////////////////////////////////////////////////////// - - - - -/**************************************************************************************************** - ** This sample is a wrapper for the ZED library in order to use the ZED Camera with ROS. ** - ** A set of parameters can be specified in the launch file. ** - ****************************************************************************************************/ - -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#include -#include -#include -#include - -#include - -#include -#include -#include -#include - -#include - -using namespace std; - -namespace zed_wrapper { - - int checkCameraReady(unsigned int serial_number) { - int id = -1; - auto f = sl::Camera::getDeviceList(); - for (auto &it : f) - if (it.serial_number == serial_number && it.camera_state == sl::CAMERA_STATE::CAMERA_STATE_AVAILABLE) - id = it.id; - return id; - } - - sl::DeviceProperties getZEDFromSN(unsigned int serial_number) { - sl::DeviceProperties prop; - auto f = sl::Camera::getDeviceList(); - for (auto &it : f) { - if (it.serial_number == serial_number && it.camera_state == sl::CAMERA_STATE::CAMERA_STATE_AVAILABLE) - prop = it; - } - return prop; - } - - class ZEDWrapperNodelet : public nodelet::Nodelet { - ros::NodeHandle nh; - ros::NodeHandle nh_ns; - boost::shared_ptr device_poll_thread; - image_transport::Publisher pub_rgb; - image_transport::Publisher pub_raw_rgb; - image_transport::Publisher pub_left; - image_transport::Publisher pub_raw_left; - image_transport::Publisher pub_right; - image_transport::Publisher pub_raw_right; - image_transport::Publisher pub_depth; - ros::Publisher pub_cloud; - ros::Publisher pub_rgb_cam_info; - ros::Publisher pub_left_cam_info; - ros::Publisher pub_right_cam_info; - ros::Publisher pub_rgb_cam_info_raw; - ros::Publisher pub_left_cam_info_raw; - ros::Publisher pub_right_cam_info_raw; - ros::Publisher pub_depth_cam_info; - ros::Publisher pub_odom; - - // tf - tf2_ros::TransformBroadcaster transform_odom_broadcaster; - std::string left_frame_id; - std::string right_frame_id; - std::string rgb_frame_id; - std::string depth_frame_id; - std::string cloud_frame_id; - std::string odometry_frame_id; - std::string base_frame_id; - std::string camera_frame_id; - // initialization Transform listener - boost::shared_ptr tfBuffer; - boost::shared_ptr tf_listener; - bool publish_tf; - - // Launch file parameters - int resolution; - int quality; - int sensing_mode; - int rate; - int gpu_id; - int zed_id; - int depth_stabilization; - std::string odometry_DB; - std::string svo_filepath; - - //Tracking variables - sl::Pose pose; - tf2::Transform base_transform; - - // zed object - sl::InitParameters param; - sl::Camera zed; - unsigned int serial_number; - - // flags - int confidence; - int exposure; - int gain; - bool autoExposure; - bool triggerAutoExposure; - bool computeDepth; - bool grabbing = false; - int openniDepthMode = 0; // 16 bit UC data in mm else 32F in m, for more info http://www.ros.org/reps/rep-0118.html - - // Point cloud variables - sl::Mat cloud; - string point_cloud_frame_id = ""; - ros::Time point_cloud_time; - - ~ZEDWrapperNodelet() { - device_poll_thread.get()->join(); - } - - /* \brief Convert an sl:Mat to a cv::Mat - * \param mat : the sl::Mat to convert - */ - cv::Mat toCVMat(sl::Mat &mat) { - if (mat.getMemoryType() == sl::MEM_GPU) - mat.updateCPUfromGPU(); - - int cvType; - switch (mat.getDataType()) { - case sl::MAT_TYPE_32F_C1: - cvType = CV_32FC1; - break; - case sl::MAT_TYPE_32F_C2: - cvType = CV_32FC2; - break; - case sl::MAT_TYPE_32F_C3: - cvType = CV_32FC3; - break; - case sl::MAT_TYPE_32F_C4: - cvType = CV_32FC4; - break; - case sl::MAT_TYPE_8U_C1: - cvType = CV_8UC1; - break; - case sl::MAT_TYPE_8U_C2: - cvType = CV_8UC2; - break; - case sl::MAT_TYPE_8U_C3: - cvType = CV_8UC3; - break; - case sl::MAT_TYPE_8U_C4: - cvType = CV_8UC4; - break; - } - return cv::Mat((int) mat.getHeight(), (int) mat.getWidth(), cvType, mat.getPtr(sl::MEM_CPU), mat.getStepBytes(sl::MEM_CPU)); - } - - cv::Mat convertRodrigues(sl::float3 r) { - double theta = sqrt(r.x * r.x + r.y * r.y + r.z * r.z); - cv::Mat R = cv::Mat::eye(3, 3, CV_32F); - - if (theta < DBL_EPSILON) { - return R; - } else { - double c = cos(theta); - double s = sin(theta); - double c1 = 1. - c; - double itheta = theta ? 1. / theta : 0.; - - r *= itheta; - - cv::Mat rrt = cv::Mat::eye(3, 3, CV_32F); - float* p = (float*) rrt.data; - p[0] = r.x * r.x; - p[1] = r.x * r.y; - p[2] = r.x * r.z; - p[3] = r.x * r.y; - p[4] = r.y * r.y; - p[5] = r.y * r.z; - p[6] = r.x * r.z; - p[7] = r.y * r.z; - p[8] = r.z * r.z; - - cv::Mat r_x = cv::Mat::eye(3, 3, CV_32F); - p = (float*) r_x.data; - p[0] = 0; - p[1] = -r.z; - p[2] = r.y; - p[3] = r.z; - p[4] = 0; - p[5] = -r.x; - p[6] = -r.y; - p[7] = r.x; - p[8] = 0; - - // R = cos(theta)*I + (1 - cos(theta))*r*rT + sin(theta)*[r_x] - R = c * cv::Mat::eye(3, 3, CV_32F) + c1 * rrt + s*r_x; - } - return R; - } - - /* \brief Test if a file exist - * \param name : the path to the file - */ - bool file_exist(const std::string& name) { - struct stat buffer; - return (stat(name.c_str(), &buffer) == 0); - } - - /* \brief Image to ros message conversion - * \param img : the image to publish - * \param encodingType : the sensor_msgs::image_encodings encoding type - * \param frameId : the id of the reference frame of the image - * \param t : the ros::Time to stamp the image - */ - sensor_msgs::ImagePtr imageToROSmsg(cv::Mat img, const std::string encodingType, std::string frameId, ros::Time t) { - sensor_msgs::ImagePtr ptr = boost::make_shared(); - sensor_msgs::Image& imgMessage = *ptr; - imgMessage.header.stamp = t; - imgMessage.header.frame_id = frameId; - imgMessage.height = img.rows; - imgMessage.width = img.cols; - imgMessage.encoding = encodingType; - int num = 1; //for endianness detection - imgMessage.is_bigendian = !(*(char *) &num == 1); - imgMessage.step = img.cols * img.elemSize(); - size_t size = imgMessage.step * img.rows; - imgMessage.data.resize(size); - - if (img.isContinuous()) - memcpy((char*) (&imgMessage.data[0]), img.data, size); - else { - uchar* opencvData = img.data; - uchar* rosData = (uchar*) (&imgMessage.data[0]); - for (unsigned int i = 0; i < img.rows; i++) { - memcpy(rosData, opencvData, imgMessage.step); - rosData += imgMessage.step; - opencvData += img.step; - } - } - return ptr; - } - - /* \brief Publish the pose of the camera with a ros Publisher - * \param base_transform : Transformation representing the camera pose from base frame - * \param pub_odom : the publisher object to use - * \param odom_frame_id : the id of the reference frame of the pose - * \param t : the ros::Time to stamp the image - */ - void publishOdom(tf2::Transform base_transform, ros::Publisher &pub_odom, string odom_frame_id, ros::Time t) { - nav_msgs::Odometry odom; - odom.header.stamp = t; - odom.header.frame_id = odom_frame_id; // odom_frame - odom.child_frame_id = base_frame_id; // base_frame - // conversion from Tranform to message - geometry_msgs::Transform base2 = tf2::toMsg(base_transform); - // Add all value in odometry message - odom.pose.pose.position.x = base2.translation.x; - odom.pose.pose.position.y = base2.translation.y; - odom.pose.pose.position.z = base2.translation.z; - odom.pose.pose.orientation.x = base2.rotation.x; - odom.pose.pose.orientation.y = base2.rotation.y; - odom.pose.pose.orientation.z = base2.rotation.z; - odom.pose.pose.orientation.w = base2.rotation.w; - // Publish odometry message - pub_odom.publish(odom); - } - - /* \brief Publish the pose of the camera as a transformation - * \param base_transform : Transformation representing the camera pose from base frame - * \param trans_br : the TransformBroadcaster object to use - * \param odometry_transform_frame_id : the id of the transformation - * \param t : the ros::Time to stamp the image - */ - void publishTrackedFrame(tf2::Transform base_transform, tf2_ros::TransformBroadcaster &trans_br, string odometry_transform_frame_id, ros::Time t) { - geometry_msgs::TransformStamped transformStamped; - transformStamped.header.stamp = ros::Time::now(); - transformStamped.header.frame_id = odometry_frame_id; - transformStamped.child_frame_id = odometry_transform_frame_id; - // conversion from Tranform to message - transformStamped.transform = tf2::toMsg(base_transform); - // Publish transformation - trans_br.sendTransform(transformStamped); - } - - /* \brief Publish a cv::Mat image with a ros Publisher - * \param img : the image to publish - * \param pub_img : the publisher object to use - * \param img_frame_id : the id of the reference frame of the image - * \param t : the ros::Time to stamp the image - */ - void publishImage(cv::Mat img, image_transport::Publisher &pub_img, string img_frame_id, ros::Time t) { - pub_img.publish(imageToROSmsg(img, sensor_msgs::image_encodings::BGR8, img_frame_id, t)); - } - - /* \brief Publish a cv::Mat depth image with a ros Publisher - * \param depth : the depth image to publish - * \param pub_depth : the publisher object to use - * \param depth_frame_id : the id of the reference frame of the depth image - * \param t : the ros::Time to stamp the depth image - */ - void publishDepth(cv::Mat depth, image_transport::Publisher &pub_depth, string depth_frame_id, ros::Time t) { - string encoding; - if (openniDepthMode) { - depth *= 1000.0f; - depth.convertTo(depth, CV_16UC1); // in mm, rounded - encoding = sensor_msgs::image_encodings::TYPE_16UC1; - } else { - encoding = sensor_msgs::image_encodings::TYPE_32FC1; - } - pub_depth.publish(imageToROSmsg(depth, encoding, depth_frame_id, t)); - } - - /* \brief Publish a pointCloud with a ros Publisher - * \param width : the width of the point cloud - * \param height : the height of the point cloud - * \param pub_cloud : the publisher object to use - */ - void publishPointCloud(int width, int height, ros::Publisher &pub_cloud) { - pcl::PointCloud point_cloud; - point_cloud.width = width; - point_cloud.height = height; - int size = width*height; - point_cloud.points.resize(size); - - sl::Vector4* cpu_cloud = cloud.getPtr(); - for (int i = 0; i < size; i++) { - point_cloud.points[i].x = cpu_cloud[i][2]; - point_cloud.points[i].y = -cpu_cloud[i][0]; - point_cloud.points[i].z = -cpu_cloud[i][1]; - point_cloud.points[i].rgb = cpu_cloud[i][3]; - } - - sensor_msgs::PointCloud2 output; - pcl::toROSMsg(point_cloud, output); // Convert the point cloud to a ROS message - output.header.frame_id = point_cloud_frame_id; // Set the header values of the ROS message - output.header.stamp = point_cloud_time; - output.height = height; - output.width = width; - output.is_bigendian = false; - output.is_dense = false; - pub_cloud.publish(output); - } - - /* \brief Publish the informations of a camera with a ros Publisher - * \param cam_info_msg : the information message to publish - * \param pub_cam_info : the publisher object to use - * \param t : the ros::Time to stamp the message - */ - void publishCamInfo(sensor_msgs::CameraInfoPtr cam_info_msg, ros::Publisher pub_cam_info, ros::Time t) { - static int seq = 0; - cam_info_msg->header.stamp = t; - cam_info_msg->header.seq = seq; - pub_cam_info.publish(cam_info_msg); - seq++; - } - - /* \brief Get the information of the ZED cameras and store them in an information message - * \param zed : the sl::zed::Camera* pointer to an instance - * \param left_cam_info_msg : the information message to fill with the left camera informations - * \param right_cam_info_msg : the information message to fill with the right camera informations - * \param left_frame_id : the id of the reference frame of the left camera - * \param right_frame_id : the id of the reference frame of the right camera - */ - void fillCamInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr left_cam_info_msg, sensor_msgs::CameraInfoPtr right_cam_info_msg, - string left_frame_id, string right_frame_id, bool raw_param = false) { - - int width = zed.getResolution().width; - int height = zed.getResolution().height; - - sl::CalibrationParameters zedParam; - - if (raw_param) zedParam = zed.getCameraInformation().calibration_parameters_raw; - else zedParam = zed.getCameraInformation().calibration_parameters; - - float baseline = zedParam.T[0]; - - left_cam_info_msg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; - right_cam_info_msg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; - - left_cam_info_msg->D.resize(5); - right_cam_info_msg->D.resize(5); - left_cam_info_msg->D[0] = zedParam.left_cam.disto[0]; // k1 - left_cam_info_msg->D[1] = zedParam.left_cam.disto[1]; // k2 - left_cam_info_msg->D[2] = zedParam.left_cam.disto[4]; // k3 - left_cam_info_msg->D[3] = zedParam.left_cam.disto[2]; // p1 - left_cam_info_msg->D[4] = zedParam.left_cam.disto[3]; // p2 - right_cam_info_msg->D[0] = zedParam.right_cam.disto[0]; // k1 - right_cam_info_msg->D[1] = zedParam.right_cam.disto[1]; // k2 - right_cam_info_msg->D[2] = zedParam.right_cam.disto[4]; // k3 - right_cam_info_msg->D[3] = zedParam.right_cam.disto[2]; // p1 - right_cam_info_msg->D[4] = zedParam.right_cam.disto[3]; // p2 - - left_cam_info_msg->K.fill(0.0); - right_cam_info_msg->K.fill(0.0); - left_cam_info_msg->K[0] = zedParam.left_cam.fx; - left_cam_info_msg->K[2] = zedParam.left_cam.cx; - left_cam_info_msg->K[4] = zedParam.left_cam.fy; - left_cam_info_msg->K[5] = zedParam.left_cam.cy; - left_cam_info_msg->K[8] = right_cam_info_msg->K[8] = 1.0; - right_cam_info_msg->K[0] = zedParam.right_cam.fx; - right_cam_info_msg->K[2] = zedParam.right_cam.cx; - right_cam_info_msg->K[4] = zedParam.right_cam.fy; - right_cam_info_msg->K[5] = zedParam.right_cam.cy; - - left_cam_info_msg->R.fill(0.0); - right_cam_info_msg->R.fill(0.0); - for (int i = 0; i < 3; i++) { - // identity - right_cam_info_msg->R[i + i * 3] = 1; - left_cam_info_msg->R[i + i * 3] = 1; - } - - if (raw_param) { - cv::Mat R_ = convertRodrigues(zedParam.R); - float* p = (float*) R_.data; - for (int i = 0; i < 9; i++) - right_cam_info_msg->R[i] = p[i]; - } - - left_cam_info_msg->P.fill(0.0); - right_cam_info_msg->P.fill(0.0); - left_cam_info_msg->P[0] = zedParam.left_cam.fx; - left_cam_info_msg->P[2] = zedParam.left_cam.cx; - left_cam_info_msg->P[5] = zedParam.left_cam.fy; - left_cam_info_msg->P[6] = zedParam.left_cam.cy; - left_cam_info_msg->P[10] = right_cam_info_msg->P[10] = 1.0; - //http://docs.ros.org/api/sensor_msgs/html/msg/CameraInfo.html - right_cam_info_msg->P[3] = (-1 * zedParam.left_cam.fx * baseline); - - right_cam_info_msg->P[0] = zedParam.right_cam.fx; - right_cam_info_msg->P[2] = zedParam.right_cam.cx; - right_cam_info_msg->P[5] = zedParam.right_cam.fy; - right_cam_info_msg->P[6] = zedParam.right_cam.cy; - - left_cam_info_msg->width = right_cam_info_msg->width = width; - left_cam_info_msg->height = right_cam_info_msg->height = height; - - left_cam_info_msg->header.frame_id = left_frame_id; - right_cam_info_msg->header.frame_id = right_frame_id; - } - - void callback(zed_wrapper::ZedConfig &config, uint32_t level) { - switch (level) { - case 0: - NODELET_INFO("Reconfigure confidence : %d", config.confidence); - confidence = config.confidence; - break; - case 1: - NODELET_INFO("Reconfigure exposure : %d", config.exposure); - exposure = config.exposure; - break; - case 2: - NODELET_INFO("Reconfigure gain : %d", config.gain); - gain = config.gain; - break; - case 3: - NODELET_INFO("Reconfigure auto control of exposure and gain : %s", config.auto_exposure ? "Enable" : "Disable"); - autoExposure = config.auto_exposure; - if (autoExposure) - triggerAutoExposure = true; - break; - } - } - - void device_poll() { - ros::Rate loop_rate(rate); - ros::Time old_t = ros::Time::now(); - sl::ERROR_CODE grab_status; - bool tracking_activated = false; - - // Get the parameters of the ZED images - int width = zed.getResolution().width; - int height = zed.getResolution().height; - NODELET_DEBUG_STREAM("Image size : " << width << "x" << height); - - cv::Size cvSize(width, height); - cv::Mat leftImRGB(cvSize, CV_8UC3); - cv::Mat rightImRGB(cvSize, CV_8UC3); - - // Create and fill the camera information messages - sensor_msgs::CameraInfoPtr rgb_cam_info_msg(new sensor_msgs::CameraInfo()); - sensor_msgs::CameraInfoPtr left_cam_info_msg(new sensor_msgs::CameraInfo()); - sensor_msgs::CameraInfoPtr right_cam_info_msg(new sensor_msgs::CameraInfo()); - sensor_msgs::CameraInfoPtr rgb_cam_info_raw_msg(new sensor_msgs::CameraInfo()); - sensor_msgs::CameraInfoPtr left_cam_info_raw_msg(new sensor_msgs::CameraInfo()); - sensor_msgs::CameraInfoPtr right_cam_info_raw_msg(new sensor_msgs::CameraInfo()); - sensor_msgs::CameraInfoPtr depth_cam_info_msg(new sensor_msgs::CameraInfo()); - fillCamInfo(zed, left_cam_info_msg, right_cam_info_msg, left_frame_id, right_frame_id); - fillCamInfo(zed, left_cam_info_raw_msg, right_cam_info_raw_msg, left_frame_id, right_frame_id, true); - rgb_cam_info_msg = depth_cam_info_msg = left_cam_info_msg; // the reference camera is the Left one (next to the ZED logo) - rgb_cam_info_raw_msg = left_cam_info_raw_msg; - - - sl::RuntimeParameters runParams; - runParams.sensing_mode = static_cast (sensing_mode); - - sl::TrackingParameters trackParams; - trackParams.area_file_path = odometry_DB.c_str(); - - - sl::Mat leftZEDMat, rightZEDMat, depthZEDMat; - // Main loop - while (nh_ns.ok()) { - // Check for subscribers - int rgb_SubNumber = pub_rgb.getNumSubscribers(); - int rgb_raw_SubNumber = pub_raw_rgb.getNumSubscribers(); - int left_SubNumber = pub_left.getNumSubscribers(); - int left_raw_SubNumber = pub_raw_left.getNumSubscribers(); - int right_SubNumber = pub_right.getNumSubscribers(); - int right_raw_SubNumber = pub_raw_right.getNumSubscribers(); - int depth_SubNumber = pub_depth.getNumSubscribers(); - int cloud_SubNumber = pub_cloud.getNumSubscribers(); - int odom_SubNumber = pub_odom.getNumSubscribers(); - bool runLoop = (rgb_SubNumber + rgb_raw_SubNumber + left_SubNumber + left_raw_SubNumber + right_SubNumber + right_raw_SubNumber + depth_SubNumber + cloud_SubNumber + odom_SubNumber) > 0; - - runParams.enable_point_cloud = false; - if (cloud_SubNumber > 0) - runParams.enable_point_cloud = true; - // Run the loop only if there is some subscribers - if (runLoop) { - if ((depth_stabilization || odom_SubNumber > 0) && !tracking_activated) { //Start the tracking - if (odometry_DB != "" && !file_exist(odometry_DB)) { - odometry_DB = ""; - NODELET_WARN("odometry_DB path doesn't exist or is unreachable."); - } - zed.enableTracking(trackParams); - tracking_activated = true; - } else if (!depth_stabilization && odom_SubNumber == 0 && tracking_activated) { //Stop the tracking - zed.disableTracking(); - tracking_activated = false; - } - computeDepth = (depth_SubNumber + cloud_SubNumber + odom_SubNumber) > 0; // Detect if one of the subscriber need to have the depth information - ros::Time t = ros::Time::now(); // Get current time - - grabbing = true; - if (computeDepth) { - int actual_confidence = zed.getConfidenceThreshold(); - if (actual_confidence != confidence) - zed.setConfidenceThreshold(confidence); - runParams.enable_depth = true; // Ask to compute the depth - } else - runParams.enable_depth = false; - - grab_status = zed.grab(runParams); // Ask to not compute the depth - - grabbing = false; - - //cout << toString(grab_status) << endl; - if (grab_status != sl::ERROR_CODE::SUCCESS) { // Detect if a error occurred (for example: the zed have been disconnected) and re-initialize the ZED - - if (grab_status == sl::ERROR_CODE_NOT_A_NEW_FRAME) { - NODELET_DEBUG("Wait for a new image to proceed"); - } else NODELET_INFO_ONCE(toString(grab_status)); - - std::this_thread::sleep_for(std::chrono::milliseconds(2)); - - if ((t - old_t).toSec() > 5) { - zed.close(); - - NODELET_INFO("Re-opening the ZED"); - sl::ERROR_CODE err = sl::ERROR_CODE_CAMERA_NOT_DETECTED; - while (err != sl::SUCCESS) { - int id = checkCameraReady(serial_number); - if (id > 0) { - param.camera_linux_id = id; - err = zed.open(param); // Try to initialize the ZED - NODELET_INFO_STREAM(toString(err)); - } else NODELET_INFO("Waiting for the ZED to be re-connected"); - std::this_thread::sleep_for(std::chrono::milliseconds(2000)); - } - tracking_activated = false; - if (depth_stabilization || odom_SubNumber > 0) { //Start the tracking - if (odometry_DB != "" && !file_exist(odometry_DB)) { - odometry_DB = ""; - NODELET_WARN("odometry_DB path doesn't exist or is unreachable."); - } - zed.enableTracking(trackParams); - tracking_activated = true; - } - } - continue; - } - - old_t = ros::Time::now(); - - if (autoExposure) { - // getCameraSettings() can't check status of auto exposure - // triggerAutoExposure is used to execute setCameraSettings() only once - if (triggerAutoExposure) { - zed.setCameraSettings(sl::CAMERA_SETTINGS_EXPOSURE, 0, true); - triggerAutoExposure = false; - } - } else { - int actual_exposure = zed.getCameraSettings(sl::CAMERA_SETTINGS_EXPOSURE); - if (actual_exposure != exposure) - zed.setCameraSettings(sl::CAMERA_SETTINGS_EXPOSURE, exposure); - - int actual_gain = zed.getCameraSettings(sl::CAMERA_SETTINGS_GAIN); - if (actual_gain != gain) - zed.setCameraSettings(sl::CAMERA_SETTINGS_GAIN, gain); - } - - // Publish the left == rgb image if someone has subscribed to - if (left_SubNumber > 0 || rgb_SubNumber > 0) { - // Retrieve RGBA Left image - zed.retrieveImage(leftZEDMat, sl::VIEW_LEFT); - cv::cvtColor(toCVMat(leftZEDMat), leftImRGB, CV_RGBA2RGB); - if (left_SubNumber > 0) { - publishCamInfo(left_cam_info_msg, pub_left_cam_info, t); - publishImage(leftImRGB, pub_left, left_frame_id, t); - } - if (rgb_SubNumber > 0) { - publishCamInfo(rgb_cam_info_msg, pub_rgb_cam_info, t); - publishImage(leftImRGB, pub_rgb, rgb_frame_id, t); // rgb is the left image - } - } - - // Publish the left_raw == rgb_raw image if someone has subscribed to - if (left_raw_SubNumber > 0 || rgb_raw_SubNumber > 0) { - // Retrieve RGBA Left image - zed.retrieveImage(leftZEDMat, sl::VIEW_LEFT_UNRECTIFIED); - cv::cvtColor(toCVMat(leftZEDMat), leftImRGB, CV_RGBA2RGB); - if (left_raw_SubNumber > 0) { - publishCamInfo(left_cam_info_raw_msg, pub_left_cam_info_raw, t); - publishImage(leftImRGB, pub_raw_left, left_frame_id, t); - } - if (rgb_raw_SubNumber > 0) { - publishCamInfo(rgb_cam_info_raw_msg, pub_rgb_cam_info_raw, t); - publishImage(leftImRGB, pub_raw_rgb, rgb_frame_id, t); - } - } - - // Publish the right image if someone has subscribed to - if (right_SubNumber > 0) { - // Retrieve RGBA Right image - zed.retrieveImage(rightZEDMat, sl::VIEW_RIGHT); - cv::cvtColor(toCVMat(rightZEDMat), rightImRGB, CV_RGBA2RGB); - publishCamInfo(right_cam_info_msg, pub_right_cam_info, t); - publishImage(rightImRGB, pub_right, right_frame_id, t); - } - - // Publish the right image if someone has subscribed to - if (right_raw_SubNumber > 0) { - // Retrieve RGBA Right image - zed.retrieveImage(rightZEDMat, sl::VIEW_RIGHT_UNRECTIFIED); - cv::cvtColor(toCVMat(rightZEDMat), rightImRGB, CV_RGBA2RGB); - publishCamInfo(right_cam_info_raw_msg, pub_right_cam_info_raw, t); - publishImage(rightImRGB, pub_raw_right, right_frame_id, t); - } - - // Publish the depth image if someone has subscribed to - if (depth_SubNumber > 0) { - zed.retrieveMeasure(depthZEDMat, sl::MEASURE_DEPTH); - publishCamInfo(depth_cam_info_msg, pub_depth_cam_info, t); - publishDepth(toCVMat(depthZEDMat), pub_depth, depth_frame_id, t); // in meters - } - - // Publish the point cloud if someone has subscribed to - if (cloud_SubNumber > 0) { - // Run the point cloud conversion asynchronously to avoid slowing down all the program - // Retrieve raw pointCloud data - zed.retrieveMeasure(cloud, sl::MEASURE_XYZBGRA); - point_cloud_frame_id = cloud_frame_id; - point_cloud_time = t; - publishPointCloud(width, height, pub_cloud); - } - - // Transform from base to sensor - tf2::Transform base_to_sensor; - // Look up the transformation from base frame to camera link - try { - // Save the transformation from base to frame - geometry_msgs::TransformStamped b2s = tfBuffer->lookupTransform(base_frame_id, camera_frame_id, t); - // Get the TF2 transformation - tf2::fromMsg(b2s.transform, base_to_sensor); - - } catch (tf2::TransformException &ex) { - ROS_WARN_THROTTLE(10.0, "The tf from '%s' to '%s' does not seem to be available, " - "will assume it as identity!", - base_frame_id.c_str(), - camera_frame_id.c_str()); - ROS_DEBUG("Transform error: %s", ex.what()); - base_to_sensor.setIdentity(); - } - - // Publish the odometry if someone has subscribed to - if (odom_SubNumber > 0) { - zed.getPosition(pose); - // Transform ZED pose in TF2 Transformation - tf2::Transform camera_transform; - geometry_msgs::Transform c2s; - sl::Translation translation = pose.getTranslation(); - c2s.translation.x = translation(2); - c2s.translation.y = -translation(0); - c2s.translation.z = -translation(1); - sl::Orientation quat = pose.getOrientation(); - c2s.rotation.x = quat(2); - c2s.rotation.y = -quat(0); - c2s.rotation.z = -quat(1); - c2s.rotation.w = quat(3); - tf2::fromMsg(c2s, camera_transform); - // Transformation from camera sensor to base frame - base_transform = base_to_sensor * camera_transform * base_to_sensor.inverse(); - // Publish odometry message - publishOdom(base_transform, pub_odom, odometry_frame_id, t); - } - - // Publish odometry tf only if enabled - if (publish_tf) { - //Note, the frame is published, but its values will only change if someone has subscribed to odom - publishTrackedFrame(base_transform, transform_odom_broadcaster, base_frame_id, t); //publish the tracked Frame - } - - loop_rate.sleep(); - } else { - // Publish odometry tf only if enabled - if (publish_tf) { - publishTrackedFrame(base_transform, transform_odom_broadcaster, base_frame_id, ros::Time::now()); //publish the tracked Frame before the sleep - } - std::this_thread::sleep_for(std::chrono::milliseconds(10)); // No subscribers, we just wait - } - } // while loop - zed.close(); - } - - boost::shared_ptr> server; - - void onInit() { - // Launch file parameters - resolution = sl::RESOLUTION_HD720; - quality = sl::DEPTH_MODE_PERFORMANCE; - sensing_mode = sl::SENSING_MODE_STANDARD; - rate = 30; - gpu_id = -1; - zed_id = 0; - serial_number = 0; - odometry_DB = ""; - - nh = getMTNodeHandle(); - nh_ns = getMTPrivateNodeHandle(); - - // Set default coordinate frames - // If unknown left and right frames are set in the same camera coordinate frame - nh_ns.param("odometry_frame", odometry_frame_id, "odometry_frame"); - nh_ns.param("base_frame", base_frame_id, "base_frame"); - nh_ns.param("camera_frame", camera_frame_id, "camera_frame"); - nh_ns.param("depth_frame", depth_frame_id, "depth_frame"); - - // Get parameters from launch file - nh_ns.getParam("resolution", resolution); - nh_ns.getParam("quality", quality); - nh_ns.getParam("sensing_mode", sensing_mode); - nh_ns.getParam("frame_rate", rate); - nh_ns.getParam("odometry_DB", odometry_DB); - nh_ns.getParam("openni_depth_mode", openniDepthMode); - nh_ns.getParam("gpu_id", gpu_id); - nh_ns.getParam("zed_id", zed_id); - nh_ns.getParam("depth_stabilization", depth_stabilization); - int tmp_sn = 0; - nh_ns.getParam("serial_number", tmp_sn); - if (tmp_sn > 0) serial_number = tmp_sn; - - // Publish odometry tf - nh_ns.param("publish_tf", publish_tf, true); - - if (serial_number > 0) - ROS_INFO_STREAM("SN : " << serial_number); - - // Print order frames - ROS_INFO_STREAM("odometry_frame: " << odometry_frame_id); - ROS_INFO_STREAM("base_frame: " << base_frame_id); - ROS_INFO_STREAM("camera_frame: " << camera_frame_id); - ROS_INFO_STREAM("depth_frame: " << depth_frame_id); - // Status of odometry TF - ROS_INFO_STREAM("Publish " << odometry_frame_id << " [" << (publish_tf ? "TRUE" : "FALSE") << "]"); - - std::string img_topic = "image_rect_color"; - std::string img_raw_topic = "image_raw_color"; - - // Set the default topic names - string left_topic = "left/" + img_topic; - string left_raw_topic = "left/" + img_raw_topic; - string left_cam_info_topic = "left/camera_info"; - string left_cam_info_raw_topic = "left/camera_info_raw"; - left_frame_id = camera_frame_id; - - string right_topic = "right/" + img_topic; - string right_raw_topic = "right/" + img_raw_topic; - string right_cam_info_topic = "right/camera_info"; - string right_cam_info_raw_topic = "right/camera_info_raw"; - right_frame_id = camera_frame_id; - - string rgb_topic = "rgb/" + img_topic; - string rgb_raw_topic = "rgb/" + img_raw_topic; - string rgb_cam_info_topic = "rgb/camera_info"; - string rgb_cam_info_raw_topic = "rgb/camera_info_raw"; - rgb_frame_id = depth_frame_id; - - string depth_topic = "depth/"; - if (openniDepthMode) { - NODELET_INFO_STREAM("Openni depth mode activated"); - depth_topic += "depth_raw_registered"; - } else { - depth_topic += "depth_registered"; - } - - string depth_cam_info_topic = "depth/camera_info"; - - string point_cloud_topic = "point_cloud/cloud_registered"; - cloud_frame_id = camera_frame_id; - - string odometry_topic = "odom"; - - nh_ns.getParam("rgb_topic", rgb_topic); - nh_ns.getParam("rgb_raw_topic", rgb_raw_topic); - nh_ns.getParam("rgb_cam_info_topic", rgb_cam_info_topic); - nh_ns.getParam("rgb_cam_info_raw_topic", rgb_cam_info_raw_topic); - - nh_ns.getParam("left_topic", left_topic); - nh_ns.getParam("left_raw_topic", left_raw_topic); - nh_ns.getParam("left_cam_info_topic", left_cam_info_topic); - nh_ns.getParam("left_cam_info_raw_topic", left_cam_info_raw_topic); - - nh_ns.getParam("right_topic", right_topic); - nh_ns.getParam("right_raw_topic", right_raw_topic); - nh_ns.getParam("right_cam_info_topic", right_cam_info_topic); - nh_ns.getParam("right_cam_info_raw_topic", right_cam_info_raw_topic); - - nh_ns.getParam("depth_topic", depth_topic); - nh_ns.getParam("depth_cam_info_topic", depth_cam_info_topic); - - nh_ns.getParam("point_cloud_topic", point_cloud_topic); - - nh_ns.getParam("odometry_topic", odometry_topic); - - nh_ns.param("svo_filepath", svo_filepath, std::string()); - - - // Initialization transformation listener - tfBuffer.reset(new tf2_ros::Buffer); - tf_listener.reset(new tf2_ros::TransformListener(*tfBuffer)); - - // Initialize tf2 transformation - base_transform.setIdentity(); - - // Try to initialize the ZED - if (!svo_filepath.empty()) - param.svo_input_filename = svo_filepath.c_str(); - else { - param.camera_fps = rate; - param.camera_resolution = static_cast (resolution); - if (serial_number == 0) - param.camera_linux_id = zed_id; - else { - bool waiting_for_camera = true; - while (waiting_for_camera) { - sl::DeviceProperties prop = getZEDFromSN(serial_number); - if (prop.id < -1 || prop.camera_state == sl::CAMERA_STATE::CAMERA_STATE_NOT_AVAILABLE) { - std::string msg = "ZED SN" + to_string(serial_number) + " not detected ! Please connect this ZED"; - NODELET_INFO(msg.c_str()); - std::this_thread::sleep_for(std::chrono::milliseconds(2000)); - } else { - waiting_for_camera = false; - param.camera_linux_id = prop.id; - } - } - } - } - - param.coordinate_units = sl::UNIT_METER; - param.coordinate_system = sl::COORDINATE_SYSTEM_IMAGE; - param.depth_mode = static_cast (quality); - param.sdk_verbose = true; - param.sdk_gpu_id = gpu_id; - param.depth_stabilization = depth_stabilization; - - sl::ERROR_CODE err = sl::ERROR_CODE_CAMERA_NOT_DETECTED; - while (err != sl::SUCCESS) { - err = zed.open(param); - NODELET_INFO_STREAM(toString(err)); - std::this_thread::sleep_for(std::chrono::milliseconds(2000)); - } - - serial_number = zed.getCameraInformation().serial_number; - - //Reconfigure parameters - server = boost::make_shared> (); - dynamic_reconfigure::Server::CallbackType f; - f = boost::bind(&ZEDWrapperNodelet::callback, this, _1, _2); - server->setCallback(f); - - nh_ns.getParam("confidence", confidence); - nh_ns.getParam("exposure", exposure); - nh_ns.getParam("gain", gain); - nh_ns.getParam("auto_exposure", autoExposure); - if (autoExposure) - triggerAutoExposure = true; - - // Create all the publishers - // Image publishers - image_transport::ImageTransport it_zed(nh); - pub_rgb = it_zed.advertise(rgb_topic, 1); //rgb - NODELET_INFO_STREAM("Advertized on topic " << rgb_topic); - pub_raw_rgb = it_zed.advertise(rgb_raw_topic, 1); //rgb raw - NODELET_INFO_STREAM("Advertized on topic " << rgb_raw_topic); - pub_left = it_zed.advertise(left_topic, 1); //left - NODELET_INFO_STREAM("Advertized on topic " << left_topic); - pub_raw_left = it_zed.advertise(left_raw_topic, 1); //left raw - NODELET_INFO_STREAM("Advertized on topic " << left_raw_topic); - pub_right = it_zed.advertise(right_topic, 1); //right - NODELET_INFO_STREAM("Advertized on topic " << right_topic); - pub_raw_right = it_zed.advertise(right_raw_topic, 1); //right raw - NODELET_INFO_STREAM("Advertized on topic " << right_raw_topic); - pub_depth = it_zed.advertise(depth_topic, 1); //depth - NODELET_INFO_STREAM("Advertized on topic " << depth_topic); - - //PointCloud publisher - pub_cloud = nh.advertise (point_cloud_topic, 1); - NODELET_INFO_STREAM("Advertized on topic " << point_cloud_topic); - - // Camera info publishers - pub_rgb_cam_info = nh.advertise(rgb_cam_info_topic, 1); //rgb - NODELET_INFO_STREAM("Advertized on topic " << rgb_cam_info_topic); - pub_left_cam_info = nh.advertise(left_cam_info_topic, 1); //left - NODELET_INFO_STREAM("Advertized on topic " << left_cam_info_topic); - pub_right_cam_info = nh.advertise(right_cam_info_topic, 1); //right - NODELET_INFO_STREAM("Advertized on topic " << right_cam_info_topic); - pub_depth_cam_info = nh.advertise(depth_cam_info_topic, 1); //depth - NODELET_INFO_STREAM("Advertized on topic " << depth_cam_info_topic); - // Raw - pub_rgb_cam_info_raw = nh.advertise(rgb_cam_info_raw_topic, 1); // raw rgb - NODELET_INFO_STREAM("Advertized on topic " << rgb_cam_info_raw_topic); - pub_left_cam_info_raw = nh.advertise(left_cam_info_raw_topic, 1); // raw left - NODELET_INFO_STREAM("Advertized on topic " << left_cam_info_raw_topic); - pub_right_cam_info_raw = nh.advertise(right_cam_info_raw_topic, 1); // raw right - NODELET_INFO_STREAM("Advertized on topic " << right_cam_info_raw_topic); - - //Odometry publisher - pub_odom = nh.advertise(odometry_topic, 1); - NODELET_INFO_STREAM("Advertized on topic " << odometry_topic); - - device_poll_thread = boost::shared_ptr (new boost::thread(boost::bind(&ZEDWrapperNodelet::device_poll, this))); - } - }; // class ZEDROSWrapperNodelet -} // namespace - -#include -PLUGINLIB_EXPORT_CLASS(zed_wrapper::ZEDWrapperNodelet, nodelet::Nodelet); diff --git a/zed_display_rviz/CMakeLists.txt b/zed_display_rviz/CMakeLists.txt new file mode 100644 index 00000000..55b64682 --- /dev/null +++ b/zed_display_rviz/CMakeLists.txt @@ -0,0 +1,19 @@ +cmake_minimum_required(VERSION 2.8.7) + +project(zed_display_rviz) + +find_package(catkin REQUIRED) + +catkin_package() + +#Add all files in subdirectories of the project in +# a dummy_target so qtcreator have access to all files +FILE(GLOB_RECURSE extra_files ${CMAKE_SOURCE_DIR}/*) +add_custom_target(dummy_${PROJECT_NAME} SOURCES ${extra_files}) + +############################################################################### +# INSTALL + +install(DIRECTORY + launch + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}) diff --git a/zed_display_rviz/README.md b/zed_display_rviz/README.md new file mode 100644 index 00000000..54eb06b2 --- /dev/null +++ b/zed_display_rviz/README.md @@ -0,0 +1,21 @@ +# Stereolabs ZED Camera - ROS Display example + +This package lets you visualize in the RViz application all the possible information that can be acquired using a ZED camera. + +![ZED rendering on Rviz](images/depthcloud-RGB.png) + +### Run the program + +If you own a ZED camera launch: + + $ roslaunch zed_display display.launch + +![ZED rendering on Rviz](images/ZED-Rviz.png) + +If you own a ZED Mini camera launch: + + $ roslaunch zed_display display_zedm.launch + +![ZED rendering on Rviz](images/ZEDM-Rviz.png) + +[More](https://www.stereolabs.com/documentation/guides/using-zed-with-ros/introduction.html) diff --git a/zed_display_rviz/images/ZED-Rviz.png b/zed_display_rviz/images/ZED-Rviz.png new file mode 100644 index 00000000..d9fe4cb2 Binary files /dev/null and b/zed_display_rviz/images/ZED-Rviz.png differ diff --git a/zed_display_rviz/images/ZEDM-Rviz.png b/zed_display_rviz/images/ZEDM-Rviz.png new file mode 100644 index 00000000..46dab14a Binary files /dev/null and b/zed_display_rviz/images/ZEDM-Rviz.png differ diff --git a/zed_display_rviz/images/depthcloud-RGB.png b/zed_display_rviz/images/depthcloud-RGB.png new file mode 100644 index 00000000..abb1822d Binary files /dev/null and b/zed_display_rviz/images/depthcloud-RGB.png differ diff --git a/launch/display.launch b/zed_display_rviz/launch/display.launch similarity index 87% rename from launch/display.launch rename to zed_display_rviz/launch/display.launch index 7cc41c58..e27842c8 100644 --- a/launch/display.launch +++ b/zed_display_rviz/launch/display.launch @@ -1,6 +1,6 @@ - + diff --git a/launch/display_zedm.launch b/zed_display_rviz/launch/display_zedm.launch similarity index 60% rename from launch/display_zedm.launch rename to zed_display_rviz/launch/display_zedm.launch index b57075cf..2a5e50c2 100644 --- a/launch/display_zedm.launch +++ b/zed_display_rviz/launch/display_zedm.launch @@ -1,6 +1,6 @@ + - - - - + + + + + - - + + diff --git a/zed_display_rviz/package.xml b/zed_display_rviz/package.xml new file mode 100644 index 00000000..468d24b1 --- /dev/null +++ b/zed_display_rviz/package.xml @@ -0,0 +1,16 @@ + + + zed_display_rviz + 1.0.0 + + "zed_display" is a ROS package to visualize in Rviz the information from the "zed_wrapper" node + + STEREOLABS + BSD + + catkin + + rviz + rviz_imu_plugin + zed_wrapper + diff --git a/rviz/zed.rviz b/zed_display_rviz/rviz/zed.rviz similarity index 77% rename from rviz/zed.rviz rename to zed_display_rviz/rviz/zed.rviz index fcfc814b..931d66c1 100644 --- a/rviz/zed.rviz +++ b/zed_display_rviz/rviz/zed.rviz @@ -5,10 +5,13 @@ Panels: Property Tree Widget: Expanded: - /RobotModel1/Links1 + - /TF1 - /TF1/Frames1 + - /Odometry1 - /Odometry1/Shape1 + - /Pose1 Splitter Ratio: 0.5 - Tree Height: 281 + Tree Height: 452 - Class: rviz/Selection Name: Selection - Class: rviz/Tool Properties @@ -27,7 +30,7 @@ Panels: Experimental: false Name: Time SyncMode: 0 - SyncSource: Camera + SyncSource: DepthCloud Visualization Manager: Class: "" Displays: @@ -59,25 +62,27 @@ Visualization Manager: Expand Link Details: false Expand Tree: false Link Tree Style: Links in Alphabetic Order - zed_center: + zed_camera_center: Alpha: 1 Show Axes: false Show Trail: false Value: true - zed_depth_camera: + zed_left_camera_frame: Alpha: 1 Show Axes: false Show Trail: false - zed_left_camera: + zed_left_camera_optical_frame: Alpha: 1 Show Axes: false Show Trail: false - Value: true - zed_right_camera: + zed_right_camera_frame: + Alpha: 1 + Show Axes: false + Show Trail: false + zed_right_camera_optical_frame: Alpha: 1 Show Axes: false Show Trail: false - Value: true Name: RobotModel Robot Description: zed/zed_description TF Prefix: "" @@ -90,64 +95,36 @@ Visualization Manager: Frames: All Enabled: false map: + Value: true + odom: + Value: true + zed_camera_center: + Value: true + zed_left_camera_frame: Value: false - zed_center: + zed_left_camera_optical_frame: Value: false - zed_depth_camera: + zed_right_camera_frame: Value: false - zed_left_camera: - Value: true - zed_right_camera: + zed_right_camera_optical_frame: Value: false Marker Scale: 1 Name: TF - Show Arrows: false + Show Arrows: true Show Axes: true Show Names: true Tree: map: - zed_center: - zed_left_camera: - zed_depth_camera: - {} - zed_right_camera: - {} + odom: + zed_camera_center: + zed_left_camera_frame: + zed_left_camera_optical_frame: + {} + zed_right_camera_frame: + zed_right_camera_optical_frame: + {} Update Interval: 0 Value: true - - Angle Tolerance: 0 - Class: rviz/Odometry - Covariance: - Orientation: - Alpha: 0.5 - Color: 255; 255; 127 - Color Style: Unique - Frame: Local - Offset: 1 - Scale: 1 - Value: true - Position: - Alpha: 0.300000012 - Color: 204; 51; 204 - Scale: 1 - Value: true - Value: true - Enabled: true - Keep: 1 - Name: Odometry - Position Tolerance: 0 - Shape: - Alpha: 0 - Axes Length: 1 - Axes Radius: 0.100000001 - Color: 255; 25; 0 - Head Length: 0.200000003 - Head Radius: 0.100000001 - Shaft Length: 0.300000012 - Shaft Radius: 0.0199999996 - Value: Arrow - Topic: /zed/odom - Unreliable: false - Value: true - Alpha: 1 Auto Size: Auto Size Factor: 1 @@ -167,7 +144,7 @@ Visualization Manager: Decay Time: 0 Depth Map Topic: /zed/depth/depth_registered Depth Map Transport Hint: raw - Enabled: false + Enabled: true Invert Rainbow: false Max Color: 255; 255; 255 Max Intensity: 4096 @@ -180,31 +157,12 @@ Visualization Manager: Position Transformer: XYZ Queue Size: 5 Selectable: true - Size (Pixels): 3 - Style: Flat Squares + Size (Pixels): 1 + Style: Points Topic Filter: true Use Fixed Frame: true Use rainbow: true - Value: false - - Class: rviz/Camera - Enabled: true - Image Rendering: background and overlay - Image Topic: /zed/rgb/image_rect_color - Name: Camera - Overlay Alpha: 0.5 - Queue Size: 2 - Transport Hint: raw - Unreliable: false Value: true - Visibility: - DepthCloud: true - Grid: true - Odometry: true - PointCloud2: true - RobotModel: true - TF: true - Value: false - Zoom Factor: 1 - Alpha: 1 Autocompute Intensity Bounds: true Autocompute Value Bounds: @@ -225,19 +183,90 @@ Visualization Manager: Min Intensity: 0 Name: PointCloud2 Position Transformer: XYZ - Queue Size: 10 + Queue Size: 5 Selectable: true - Size (Pixels): 3 + Size (Pixels): 1 Size (m): 0.00999999978 - Style: Flat Squares + Style: Points Topic: /zed/point_cloud/cloud_registered Unreliable: false Use Fixed Frame: true Use rainbow: true Value: false + - Class: rviz/Camera + Enabled: true + Image Rendering: background and overlay + Image Topic: /zed/rgb/image_rect_color + Name: Camera + Overlay Alpha: 0.5 + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + Visibility: + Camera: true + DepthCloud: true + Grid: true + Odometry: true + PointCloud2: true + Pose: true + RobotModel: true + TF: true + Value: false + Zoom Factor: 1 + - Angle Tolerance: 0 + Class: rviz/Odometry + Covariance: + Orientation: + Alpha: 0.5 + Color: 255; 255; 127 + Color Style: Unique + Frame: Local + Offset: 1 + Scale: 1 + Value: true + Position: + Alpha: 0.300000012 + Color: 204; 51; 204 + Scale: 1 + Value: true + Value: true + Enabled: true + Keep: 1 + Name: Odometry + Position Tolerance: 0 + Shape: + Alpha: 1 + Axes Length: 1 + Axes Radius: 0.100000001 + Color: 25; 255; 0 + Head Length: 0.100000001 + Head Radius: 0.100000001 + Shaft Length: 0.300000012 + Shaft Radius: 0.0199999996 + Value: Arrow + Topic: /zed/odom + Unreliable: false + Value: true + - Alpha: 1 + Axes Length: 1 + Axes Radius: 0.100000001 + Class: rviz/Pose + Color: 255; 25; 0 + Enabled: true + Head Length: 0.100000001 + Head Radius: 0.100000001 + Name: Pose + Shaft Length: 0.300000012 + Shaft Radius: 0.0199999996 + Shape: Arrow + Topic: /zed/map + Unreliable: false + Value: true Enabled: true Global Options: Background Color: 48; 48; 48 + Default Light: true Fixed Frame: map Frame Rate: 30 Name: root @@ -259,35 +288,35 @@ Visualization Manager: Views: Current: Class: rviz/Orbit - Distance: 3.33887124 + Distance: 7.54095364 Enable Stereo Rendering: Stereo Eye Separation: 0.0599999987 Stereo Focal Distance: 1 Swap Stereo Eyes: false Value: false Focal Point: - X: 0.00562699512 - Y: -0.0610139929 - Z: -0.0259050336 + X: 0.946748376 + Y: 0.731980622 + Z: 0.698233485 Focal Shape Fixed Size: false Focal Shape Size: 0.0500000007 Invert Z Axis: false Name: Current View Near Clip Distance: 0.00999999978 - Pitch: 0.310398251 + Pitch: 0.170398071 Target Frame: zed_left_camera Value: Orbit (rviz) - Yaw: 3.44725299 + Yaw: 2.91542745 Saved: ~ Window Geometry: Camera: collapsed: false Displays: collapsed: false - Height: 793 + Height: 1056 Hide Left Dock: false Hide Right Dock: true - QMainWindow State: 000000ff00000000fd00000004000000000000016a0000028ffc020000000efb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006400fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000028000001a8000000dd00fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000002400000010f0000000000000000fb0000000a0049006d006100670065000000028b000000c40000000000000000fb0000000a0049006d006100670065010000028a000000c50000000000000000fb0000000a0049006d00610067006500000002b30000009c0000000000000000fb0000000a0049006d006100670065010000028b000000c40000000000000000fb0000000c00430061006d00650072006101000001d6000000e10000001600ffffff000000010000010f00000327fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002800000327000000b000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000045d0000003efc0100000002fb0000000800540069006d006501000000000000045d0000030000fffffffb0000000800540069006d00650100000000000004500000000000000000000002ed0000028f00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + QMainWindow State: 000000ff00000000fd00000004000000000000016a00000396fc020000000efb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006100fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000002800000253000000d700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000002400000010f0000000000000000fb0000000a0049006d006100670065000000028b000000c40000000000000000fb0000000a0049006d006100670065010000028a000000c50000000000000000fb0000000a0049006d00610067006500000002b30000009c0000000000000000fb0000000a0049006d006100670065010000028b000000c40000000000000000fb0000000c00430061006d00650072006101000002810000013d0000001600ffffff000000010000010f00000327fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002800000327000000ad00fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073f0000003efc0100000002fb0000000800540069006d006501000000000000073f0000030000fffffffb0000000800540069006d00650100000000000004500000000000000000000005cf0000039600000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 Selection: collapsed: false Time: @@ -296,6 +325,6 @@ Window Geometry: collapsed: false Views: collapsed: true - Width: 1117 - X: 1474 - Y: 432 + Width: 1855 + X: 65 + Y: 24 diff --git a/rviz/zedm.rviz b/zed_display_rviz/rviz/zedm.rviz similarity index 72% rename from rviz/zedm.rviz rename to zed_display_rviz/rviz/zedm.rviz index 33158d5c..69fa257d 100644 --- a/rviz/zedm.rviz +++ b/zed_display_rviz/rviz/zedm.rviz @@ -4,11 +4,13 @@ Panels: Name: Displays Property Tree Widget: Expanded: + - /Global Options1 - /RobotModel1/Links1 - - /Odometry1/Shape1 - - /Imu1 + - /TF1/Frames1 + - /DepthCloud1/Occlusion Compensation1 + - /Odometry1/Covariance1/Position1 Splitter Ratio: 0.5 - Tree Height: 492 + Tree Height: 393 - Class: rviz/Selection Name: Selection - Class: rviz/Tool Properties @@ -27,7 +29,7 @@ Panels: Experimental: false Name: Time SyncMode: 0 - SyncSource: Camera + SyncSource: DepthCloud Visualization Manager: Class: "" Displays: @@ -59,24 +61,28 @@ Visualization Manager: Expand Link Details: false Expand Tree: false Link Tree Style: Links in Alphabetic Order - zed_center: + imu_link: + Alpha: 1 + Show Axes: false + Show Trail: false + zed_camera_center: Alpha: 1 Show Axes: false Show Trail: false Value: true - zed_depth_camera: + zed_left_camera_frame: Alpha: 1 Show Axes: false Show Trail: false - zed_imu: + zed_left_camera_optical_frame: Alpha: 1 Show Axes: false Show Trail: false - zed_left_camera: + zed_right_camera_frame: Alpha: 1 Show Axes: false Show Trail: false - zed_right_camera: + zed_right_camera_optical_frame: Alpha: 1 Show Axes: false Show Trail: false @@ -88,72 +94,44 @@ Visualization Manager: Visual Enabled: true - Class: rviz/TF Enabled: true - Frame Timeout: 15 + Frame Timeout: 5 Frames: All Enabled: false - map: + imu_link: Value: false - zed_center: + map: + Value: true + odom: + Value: true + zed_camera_center: + Value: true + zed_left_camera_frame: Value: false - zed_depth_camera: + zed_left_camera_optical_frame: Value: false - zed_imu: + zed_right_camera_frame: Value: false - zed_left_camera: - Value: true - zed_right_camera: + zed_right_camera_optical_frame: Value: false - Marker Scale: 1 + Marker Scale: 0.5 Name: TF - Show Arrows: false + Show Arrows: true Show Axes: true Show Names: true Tree: map: - zed_center: - zed_imu: - {} - zed_left_camera: - zed_depth_camera: + odom: + zed_camera_center: + imu_link: {} - zed_right_camera: - {} + zed_left_camera_frame: + zed_left_camera_optical_frame: + {} + zed_right_camera_frame: + zed_right_camera_optical_frame: + {} Update Interval: 0 Value: true - - Angle Tolerance: 0 - Class: rviz/Odometry - Covariance: - Orientation: - Alpha: 0.5 - Color: 255; 255; 127 - Color Style: Unique - Frame: Local - Offset: 1 - Scale: 1 - Value: true - Position: - Alpha: 0.300000012 - Color: 204; 51; 204 - Scale: 1 - Value: true - Value: true - Enabled: true - Keep: 1 - Name: Odometry - Position Tolerance: 0 - Shape: - Alpha: 0 - Axes Length: 1 - Axes Radius: 0.100000001 - Color: 255; 25; 0 - Head Length: 0.200000003 - Head Radius: 0.100000001 - Shaft Length: 0.300000012 - Shaft Radius: 0.0199999996 - Value: Arrow - Topic: /zed/odom - Unreliable: false - Value: true - Alpha: 1 Auto Size: Auto Size Factor: 1 @@ -167,13 +145,13 @@ Visualization Manager: Channel Name: intensity Class: rviz/DepthCloud Color: 255; 255; 255 - Color Image Topic: /zed/rgb/image_raw_color + Color Image Topic: /zed/rgb/image_rect_color Color Transformer: RGB8 Color Transport Hint: raw Decay Time: 0 Depth Map Topic: /zed/depth/depth_registered Depth Map Transport Hint: raw - Enabled: false + Enabled: true Invert Rainbow: false Max Color: 255; 255; 255 Max Intensity: 4096 @@ -184,14 +162,14 @@ Visualization Manager: Occlusion Time-Out: 30 Value: false Position Transformer: XYZ - Queue Size: 5 + Queue Size: 1 Selectable: true - Size (Pixels): 3 - Style: Flat Squares + Size (Pixels): 1 + Style: Points Topic Filter: true Use Fixed Frame: true Use rainbow: true - Value: false + Value: true - Class: rviz/Camera Enabled: true Image Rendering: background and overlay @@ -200,7 +178,7 @@ Visualization Manager: Overlay Alpha: 0.5 Queue Size: 2 Transport Hint: raw - Unreliable: false + Unreliable: true Value: true Visibility: DepthCloud: true @@ -208,6 +186,7 @@ Visualization Manager: Imu: true Odometry: true PointCloud2: true + Pose: true RobotModel: true TF: true Value: false @@ -215,10 +194,10 @@ Visualization Manager: - Alpha: 1 Autocompute Intensity Bounds: true Autocompute Value Bounds: - Max Value: 10 - Min Value: -10 + Max Value: 0.240854248 + Min Value: -1.55063653 Value: true - Axis: Z + Axis: X Channel Name: intensity Class: rviz/PointCloud2 Color: 255; 255; 255 @@ -232,11 +211,11 @@ Visualization Manager: Min Intensity: 0 Name: PointCloud2 Position Transformer: XYZ - Queue Size: 10 - Selectable: true - Size (Pixels): 3 + Queue Size: 1 + Selectable: false + Size (Pixels): 1 Size (m): 0.00999999978 - Style: Flat Squares + Style: Points Topic: /zed/point_cloud/cloud_registered Unreliable: false Use Fixed Frame: true @@ -246,7 +225,7 @@ Visualization Manager: Acc. vector alpha: 1 Acc. vector color: 255; 0; 0 Acc. vector scale: 1 - Derotate acceleration: true + Derotate acceleration: false Enable acceleration: false Axes properties: Axes scale: 1 @@ -261,15 +240,65 @@ Visualization Manager: Class: rviz_imu_plugin/Imu Enabled: true Name: Imu - Topic: /zed/imu_raw + Topic: /zed/imu/data Unreliable: false Value: true fixed_frame_orientation: true + - Alpha: 1 + Axes Length: 1 + Axes Radius: 0.100000001 + Class: rviz/Pose + Color: 0; 255; 0 + Enabled: true + Head Length: 0.100000001 + Head Radius: 0.0500000007 + Name: Pose + Shaft Length: 0.300000012 + Shaft Radius: 0.0199999996 + Shape: Arrow + Topic: /zed/map + Unreliable: false + Value: true + - Angle Tolerance: 0 + Class: rviz/Odometry + Covariance: + Orientation: + Alpha: 0.5 + Color: 255; 255; 127 + Color Style: Unique + Frame: Local + Offset: 1 + Scale: 1 + Value: true + Position: + Alpha: 0.300000012 + Color: 204; 51; 204 + Scale: 1 + Value: true + Value: true + Enabled: true + Keep: 1 + Name: Odometry + Position Tolerance: 0 + Shape: + Alpha: 1 + Axes Length: 0.100000001 + Axes Radius: 0.00999999978 + Color: 255; 25; 0 + Head Length: 0.100000001 + Head Radius: 0.0500000007 + Shaft Length: 0.300000012 + Shaft Radius: 0.0199999996 + Value: Arrow + Topic: /zed/odom + Unreliable: false + Value: true Enabled: true Global Options: Background Color: 48; 48; 48 + Default Light: true Fixed Frame: map - Frame Rate: 30 + Frame Rate: 60 Name: root Tools: - Class: rviz/Interact @@ -289,35 +318,35 @@ Visualization Manager: Views: Current: Class: rviz/Orbit - Distance: 2.76508617 + Distance: 2.1419313 Enable Stereo Rendering: Stereo Eye Separation: 0.0599999987 Stereo Focal Distance: 1 Swap Stereo Eyes: false Value: false Focal Point: - X: -0.178178757 - Y: -0.160461873 - Z: 0.0447466969 + X: 0.546764851 + Y: -0.0699039996 + Z: 0.35577035 Focal Shape Fixed Size: false Focal Shape Size: 0.0500000007 Invert Z Axis: false Name: Current View Near Clip Distance: 0.00999999978 - Pitch: 0.664797008 - Target Frame: zed_left_camera + Pitch: 0.30479762 + Target Frame: Value: Orbit (rviz) - Yaw: 2.39227414 + Yaw: 2.96410108 Saved: ~ Window Geometry: Camera: collapsed: false Displays: collapsed: false - Height: 1154 + Height: 1056 Hide Left Dock: false - Hide Right Dock: true - QMainWindow State: 000000ff00000000fd00000004000000000000016a000003d7fc020000000efb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006000fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c00610079007301000000440000027e000000d400fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000002400000010f0000000000000000fb0000000a0049006d006100670065000000028b000000c40000000000000000fb0000000a0049006d006100670065010000028a000000c50000000000000000fb0000000a0049006d00610067006500000002b30000009c0000000000000000fb0000000a0049006d006100670065010000028b000000c40000000000000000fb0000000c00430061006d00650072006101000002c8000001530000001800ffffff000000010000010f00000327fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002800000327000000b000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000078000000040fc0100000002fb0000000800540069006d00650100000000000007800000027100fffffffb0000000800540069006d0065010000000000000450000000000000000000000610000003d700000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + Hide Right Dock: false + QMainWindow State: 000000ff00000000fd00000004000000000000016a00000394fc020000000efb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006100fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000002800000218000000d700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000a0049006d00610067006501000002400000010f0000000000000000fb0000000a0049006d006100670065000000028b000000c40000000000000000fb0000000a0049006d006100670065010000028a000000c50000000000000000fb0000000a0049006d00610067006500000002b30000009c0000000000000000fb0000000a0049006d006100670065010000028b000000c40000000000000000fb0000000c00430061006d0065007200610100000246000001760000001600ffffff000000010000010f00000394fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000002800000394000000ad00fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073f00000040fc0100000002fb0000000800540069006d006501000000000000073f0000030000fffffffb0000000800540069006d00650100000000000004500000000000000000000004ba0000039400000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 Selection: collapsed: false Time: @@ -325,7 +354,7 @@ Window Geometry: Tool Properties: collapsed: false Views: - collapsed: true - Width: 1920 - X: 1920 - Y: 360 + collapsed: false + Width: 1855 + X: 65 + Y: 24 diff --git a/zed_nodelet_example/CMakeLists.txt b/zed_nodelet_example/CMakeLists.txt new file mode 100644 index 00000000..94500898 --- /dev/null +++ b/zed_nodelet_example/CMakeLists.txt @@ -0,0 +1,19 @@ +cmake_minimum_required(VERSION 2.8.7) + +project(zed_nodelet_example) + +find_package(catkin REQUIRED) + +catkin_package() + +#Add all files in subdirectories of the project in +# a dummy_target so qtcreator have access to all files +FILE(GLOB_RECURSE extra_files ${CMAKE_SOURCE_DIR}/*) +add_custom_target(dummy_${PROJECT_NAME} SOURCES ${extra_files}) + +############################################################################### +# INSTALL + +install(DIRECTORY + launch + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}) diff --git a/zed_nodelet_example/README.md b/zed_nodelet_example/README.md new file mode 100644 index 00000000..bc1e07aa --- /dev/null +++ b/zed_nodelet_example/README.md @@ -0,0 +1,22 @@ +# Stereolabs ZED Camera - ROS Nodelet example + +`zed_nodelet_example` is a ROS package to illustrate how to load the `ZEDWrapperNodelet` with an external nodelet manager and use the intraprocess communication to generate a virtual laser scan thanks to the nodelet `depthimage_to_laserscan` + +### Run the program + +To launch the wrapper nodelet along with the `depthimage_to_laserscan` nodelet, open a terminal and launch: + +`$ roslaunch zed_nodelet_example zed_nodelet_laserscan.launch` + +**Note**: Remember to change the parameter `camera_model` to `0` if you are using a **ZED** or to `1` if you are using a **ZED Mini** + +## Visualization +To visualize the result of the process open Rviz, add a `LaserScan` visualization and set `/zed/scan` as `topic` parameter + +Virtual 2D laser scan rendered in Rviz. You can see the projection of the virtual laser scan on the RGB image on the left +![Virtual laser scan rendered in Rviz](images/laserscan.png) + +Virtual 2D laser scan rendered in Rviz over the 3D depth cloud. You can see the projection of the virtual laser scan on the RGB image on the left +![Virtual laser scan rendered in Rviz on the Depthcloud](images/laserscan-depthcloud.png) + +[More](https://www.stereolabs.com/documentation/guides/using-zed-with-ros/introduction.html) diff --git a/zed_nodelet_example/images/laserscan-depthcloud.png b/zed_nodelet_example/images/laserscan-depthcloud.png new file mode 100644 index 00000000..712a91c1 Binary files /dev/null and b/zed_nodelet_example/images/laserscan-depthcloud.png differ diff --git a/zed_nodelet_example/images/laserscan.png b/zed_nodelet_example/images/laserscan.png new file mode 100644 index 00000000..96d24b54 Binary files /dev/null and b/zed_nodelet_example/images/laserscan.png differ diff --git a/zed_nodelet_example/launch/zed_nodelet_laserscan.launch b/zed_nodelet_example/launch/zed_nodelet_laserscan.launch new file mode 100644 index 00000000..3fae64c7 --- /dev/null +++ b/zed_nodelet_example/launch/zed_nodelet_laserscan.launch @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/zed_nodelet_example/package.xml b/zed_nodelet_example/package.xml new file mode 100644 index 00000000..75908476 --- /dev/null +++ b/zed_nodelet_example/package.xml @@ -0,0 +1,16 @@ + + + zed_nodelet_example + 1.0.0 + + "zed_nodelet_example" is a ROS package to illustrate how to load the ZEDWrapperNodelet with an external nodelet manager and use the intraprocess communication. + + STEREOLABS + BSD + + catkin + + depthimage_to_laserscan + nodelet + zed_wrapper + diff --git a/zed_rtabmap_example/CMakeLists.txt b/zed_rtabmap_example/CMakeLists.txt new file mode 100644 index 00000000..6fea9cd2 --- /dev/null +++ b/zed_rtabmap_example/CMakeLists.txt @@ -0,0 +1,19 @@ +cmake_minimum_required(VERSION 2.8.7) + +project(zed_rtabmap_example) + +find_package(catkin REQUIRED) + +catkin_package() + +#Add all files in subdirectories of the project in +# a dummy_target so qtcreator have access to all files +FILE(GLOB_RECURSE extra_files ${CMAKE_SOURCE_DIR}/*) +add_custom_target(dummy_${PROJECT_NAME} SOURCES ${extra_files}) + +############################################################################### +# INSTALL + +install(DIRECTORY + launch + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION}) diff --git a/zed_rtabmap_example/README.md b/zed_rtabmap_example/README.md new file mode 100644 index 00000000..d83e15c1 --- /dev/null +++ b/zed_rtabmap_example/README.md @@ -0,0 +1,14 @@ +# Stereolabs ZED Camera - RTAB-map example + +This package shows how to use the ZED Wrapper with [RTAB-map](http://introlab.github.io/rtabmap/) + +### Run the program + +To launch the example, open a terminal and launch: + + $ roslaunch zed_rtabmap_example zed_rtabmap.launch + +Example of indoor 3D mapping using RTAB-map and ZED + +![Example of indoor 3D mapping](images/rtab-map.png) + diff --git a/zed_rtabmap_example/images/rtab-map.png b/zed_rtabmap_example/images/rtab-map.png new file mode 100644 index 00000000..566e45a3 Binary files /dev/null and b/zed_rtabmap_example/images/rtab-map.png differ diff --git a/zed_rtabmap_example/launch/zed_rtabmap.launch b/zed_rtabmap_example/launch/zed_rtabmap.launch new file mode 100644 index 00000000..efc2ad5a --- /dev/null +++ b/zed_rtabmap_example/launch/zed_rtabmap.launch @@ -0,0 +1,67 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/zed_rtabmap_example/package.xml b/zed_rtabmap_example/package.xml new file mode 100644 index 00000000..822cb27a --- /dev/null +++ b/zed_rtabmap_example/package.xml @@ -0,0 +1,16 @@ + + + zed_rtabmap_example + 1.0.0 + + "zed_rtabmap_example" is a ROS package to show how to use the ROS wrapper with "RTAB-MAP" + + STEREOLABS + BSD + + catkin + + rtabmap + rtabmap_ros + zed_wrapper + diff --git a/zed_wrapper/.gitignore b/zed_wrapper/.gitignore new file mode 100644 index 00000000..7a8a3cd7 --- /dev/null +++ b/zed_wrapper/.gitignore @@ -0,0 +1,41 @@ +# C++ objects and libs + +*.slo +*.lo +*.o +*.a +*.la +*.lai +*.so +*.dll +*.dylib + +# Qt-es + +/.qmake.cache +/.qmake.stash +*.txt.user +*.txt.user.* +*.pro.user +*.pro.user.* +*.qbs.user +*.qbs.user.* +*.moc +moc_*.cpp +moc_*.h +qrc_*.cpp +ui_*.h +Makefile* +*build-* + +# QtCreator + +*.autosave + +# QtCtreator Qml +*.qmlproject.user +*.qmlproject.user.* + +# QtCtreator CMake +CMakeLists.txt.user* + diff --git a/CMakeLists.txt b/zed_wrapper/CMakeLists.txt similarity index 75% rename from CMakeLists.txt rename to zed_wrapper/CMakeLists.txt index c677419f..176ed10e 100644 --- a/CMakeLists.txt +++ b/zed_wrapper/CMakeLists.txt @@ -35,30 +35,45 @@ checkPackage("OPENCV_IMGPROC" "OpenCV imgproc not found, install it from the tut find_package(CUDA) checkPackage("CUDA" "CUDA not found, install it from:\n https://developer.nvidia.com/cuda-downloads") -find_package(PCL) -checkPackage("PCL" "PCL not found, try to install it with:\n sudo apt-get install libpcl1 ros-$(rosversion -d)-pcl-ros") +find_package(OpenMP) +checkPackage("OpenMP" "OpenMP not found, please install it to improve performances: 'sudo apt install libomp-dev'") +if (OPENMP_FOUND) + set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}") + set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}") + set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${OpenMP_EXE_LINKER_FLAGS}") +endif() find_package(catkin COMPONENTS image_transport roscpp rosconsole sensor_msgs + stereo_msgs dynamic_reconfigure tf2_ros - pcl_conversions nodelet tf2_geometry_msgs + message_generation ) checkPackage("image_transport" "") checkPackage("roscpp" "") checkPackage("rosconsole" "") checkPackage("sensor_msgs" "") +checkPackage("stereo_msgs" "") checkPackage("dynamic_reconfigure" "") checkPackage("tf2_ros" "") -checkPackage("pcl_conversions" "") checkPackage("nodelet" "") checkPackage("tf2_geometry_msgs" "") +checkPackage("message_generation" "") + +add_service_files( FILES + set_initial_pose.srv + reset_odometry.srv + reset_tracking.srv + ) + +generate_messages() generate_dynamic_reconfigure_options( cfg/Zed.cfg @@ -69,36 +84,47 @@ catkin_package( roscpp rosconsole sensor_msgs - opencv + stereo_msgs + opencv3 image_transport dynamic_reconfigure tf2_ros tf2_geometry_msgs - pcl_conversions + message_runtime ) + +############################################################################### +# SOURCES + +set(TOOLS_SRC + ${CMAKE_CURRENT_SOURCE_DIR}/src/tools/src/sl_tools.cpp ) +set(NODE_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/zed_wrapper_node.cpp) +set(NODELET_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/nodelet/src/zed_wrapper_nodelet.cpp) + +############################################################################### + ############################################################################### # INCLUDES # Specify locations of header files. include_directories( - ${catkin_INCLUDE_DIRS} - ${CUDA_INCLUDE_DIRS} - ${ZED_INCLUDE_DIRS} - ${OpenCV_INCLUDE_DIRS} - ${PCL_INCLUDE_DIRS} + ${catkin_INCLUDE_DIRS} + ${CUDA_INCLUDE_DIRS} + ${ZED_INCLUDE_DIRS} + ${OpenCV_INCLUDE_DIRS} + ${CMAKE_CURRENT_SOURCE_DIR}/src/tools/include + ${CMAKE_CURRENT_SOURCE_DIR}/src/nodelet/include ) link_directories(${ZED_LIBRARY_DIR}) link_directories(${CUDA_LIBRARY_DIRS}) link_directories(${OpenCV_LIBRARY_DIRS}) -link_directories(${PCL_LIBRARY_DIRS}) ############################################################################### ############################################################################### # EXECUTABLE - if(NOT DEFINED CUDA_NPP_LIBRARIES_ZED) #To deal with cuda 9 nppi libs and previous versions of ZED SDK set(CUDA_NPP_LIBRARIES_ZED ${CUDA_npp_LIBRARY} ${CUDA_npps_LIBRARY} ${CUDA_nppi_LIBRARY}) @@ -106,19 +132,18 @@ endif() add_definitions(-std=c++11) -list(REMOVE_ITEM PCL_LIBRARIES "vtkproj4") ## if using from pcl package (pcl/vtk bug) set(LINK_LIBRARIES ${catkin_LIBRARIES} ${ZED_LIBRARIES} ${CUDA_LIBRARIES} ${CUDA_NPP_LIBRARIES_ZED} ${OpenCV_LIBS} - ${PCL_LIBRARIES}) + ) -add_library(ZEDWrapper src/zed_wrapper_nodelet.cpp) +add_library(ZEDWrapper ${TOOLS_SRC} ${NODELET_SRC}) target_link_libraries(ZEDWrapper ${LINK_LIBRARIES}) add_dependencies(ZEDWrapper ${PROJECT_NAME}_gencfg) -add_executable(zed_wrapper_node src/zed_wrapper_node.cpp) +add_executable(zed_wrapper_node ${NODE_SRC}) target_link_libraries(zed_wrapper_node ZEDWrapper ${LINK_LIBRARIES}) add_dependencies(zed_wrapper_node ${PROJECT_NAME}_gencfg) diff --git a/zed_wrapper/README.md b/zed_wrapper/README.md new file mode 100644 index 00000000..046e86de --- /dev/null +++ b/zed_wrapper/README.md @@ -0,0 +1,58 @@ +# Stereolabs ZED Camera - ROS Integration + +This package lets you use the ZED stereo camera with ROS. It outputs the camera left and right images, depth map, point cloud, pose information and supports the use of multiple ZED cameras. + +## Getting started + +- First, download the latest version of the ZED SDK on [stereolabs.com](https://www.stereolabs.com/developers/) +- [Install](#build-the-program) the ZED ROS wrapper. +- For more information, check out our [ROS documentation](https://www.stereolabs.com/documentation/guides/using-zed-with-ros/introduction.html) or our [ROS wiki](http://wiki.ros.org/zed-ros-wrapper). If you want to customize the wrapper, check the [ZED API documentation](https://www.stereolabs.com/developers/documentation/API/). + +### Prerequisites + +- Ubuntu 16.04 +- [ZED SDK **≥ 2.3**](https://www.stereolabs.com/developers/) and its dependency [CUDA](https://developer.nvidia.com/cuda-downloads) +- [ROS Kinetic](http://wiki.ros.org/kinetic/Installation/Ubuntu) + +### Build the program + +The zed_ros_wrapper is a catkin package. It depends on the following ROS packages: + + - tf2_ros + - tf2_geometry_msgs + - nav_msgs + - roscpp + - rosconsole + - sensor_msgs + - stereo_msgs + - opencv3 + - image_transport + - dynamic_reconfigure + - urdf + + +Open a terminal and build the package: + + cd ~/catkin_ws/src + git clone https://github.com/stereolabs/zed-ros-wrapper.git + cd ../ + catkin_make + source ./devel/setup.bash + +### Run the program + +To launch ZED node, use: + + $ roslaunch zed_wrapper zed.launch + +**Note**: Remember to change the parameter `camera_model` to `0` if you are using a **ZED** or to `1` if you are using a **ZED Mini** + + To select the ZED from its serial number + + $ roslaunch zed_wrapper zed.launch serial_number:=1010 + +**Note**: replace 1010 with the actual SN + +If you want to use the `ZEDWrapperNodelet` with an external nodelet manager follow the `zed_nodelet_example` approach + +[More](https://www.stereolabs.com/documentation/guides/using-zed-with-ros/introduction.html) diff --git a/cfg/Zed.cfg b/zed_wrapper/cfg/Zed.cfg similarity index 85% rename from cfg/Zed.cfg rename to zed_wrapper/cfg/Zed.cfg index 465a7b4f..816893f5 100755 --- a/cfg/Zed.cfg +++ b/zed_wrapper/cfg/Zed.cfg @@ -9,5 +9,6 @@ gen.add("confidence", int_t, 0, "Confidence threshold, the lower the better", 80 gen.add("exposure", int_t, 1, "Exposure value when manual controlled", 100, 0, 100); gen.add("gain", int_t, 2, "Gain value when manual controlled", 50, 0, 100); gen.add("auto_exposure", bool_t, 3, "Enable/Disable auto control of exposure and gain", True); +gen.add("mat_resize_factor", double_t, 4, "Image/Measures resize factor", 1.0, 0.1, 1.0); exit(gen.generate(PACKAGE, "zed_wrapper", "Zed")) diff --git a/launch/README.md b/zed_wrapper/launch/README.md similarity index 100% rename from launch/README.md rename to zed_wrapper/launch/README.md diff --git a/zed_wrapper/launch/zed.launch b/zed_wrapper/launch/zed.launch new file mode 100644 index 00000000..92b060c9 --- /dev/null +++ b/zed_wrapper/launch/zed.launch @@ -0,0 +1,48 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/zed_wrapper/launch/zed_camera.launch b/zed_wrapper/launch/zed_camera.launch new file mode 100644 index 00000000..af2968e7 --- /dev/null +++ b/zed_wrapper/launch/zed_camera.launch @@ -0,0 +1,151 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $(arg initial_pose) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/zed_wrapper/launch/zed_camera_nodelet.launch b/zed_wrapper/launch/zed_camera_nodelet.launch new file mode 100644 index 00000000..8e0c5866 --- /dev/null +++ b/zed_wrapper/launch/zed_camera_nodelet.launch @@ -0,0 +1,152 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + $(arg initial_pose) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/zed_wrapper/launch/zed_multi_cam.launch b/zed_wrapper/launch/zed_multi_cam.launch new file mode 100644 index 00000000..7c32ddbd --- /dev/null +++ b/zed_wrapper/launch/zed_multi_cam.launch @@ -0,0 +1,64 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/zed_wrapper/launch/zed_multi_gpu.launch b/zed_wrapper/launch/zed_multi_gpu.launch new file mode 100644 index 00000000..8f7f639f --- /dev/null +++ b/zed_wrapper/launch/zed_multi_gpu.launch @@ -0,0 +1,69 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/nodelet_plugins.xml b/zed_wrapper/nodelet_plugins.xml similarity index 100% rename from nodelet_plugins.xml rename to zed_wrapper/nodelet_plugins.xml diff --git a/zed_wrapper/package.xml b/zed_wrapper/package.xml new file mode 100644 index 00000000..9ab9bace --- /dev/null +++ b/zed_wrapper/package.xml @@ -0,0 +1,33 @@ + + + zed_wrapper + 1.0.0 + zed_wrapper is a ROS wrapper for the ZED library + for interfacing with the ZED camera. + +STEREOLABS + BSD + + catkin + + tf2_ros + tf2_geometry_msgs + nav_msgs + roscpp + rosconsole + sensor_msgs + stereo_msgs + opencv3 + image_transport + dynamic_reconfigure + nodelet + + urdf + message_generation + robot_state_publisher + message_runtime + + + + + diff --git a/records/record_depth.sh b/zed_wrapper/records/record_depth.sh similarity index 100% rename from records/record_depth.sh rename to zed_wrapper/records/record_depth.sh diff --git a/records/record_stereo.sh b/zed_wrapper/records/record_stereo.sh similarity index 100% rename from records/record_stereo.sh rename to zed_wrapper/records/record_stereo.sh diff --git a/zed_wrapper/src/nodelet/include/zed_wrapper_nodelet.hpp b/zed_wrapper/src/nodelet/include/zed_wrapper_nodelet.hpp new file mode 100644 index 00000000..e1f987ff --- /dev/null +++ b/zed_wrapper/src/nodelet/include/zed_wrapper_nodelet.hpp @@ -0,0 +1,377 @@ +#ifndef ZED_WRAPPER_NODELET_H +#define ZED_WRAPPER_NODELET_H + +/////////////////////////////////////////////////////////////////////////// +// +// Copyright (c) 2018, STEREOLABS. +// +// All rights reserved. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////// + +/**************************************************************************************************** + ** This sample is a wrapper for the ZED library in order to use the ZED Camera with ROS. ** + ** A set of parameters can be specified in the launch file. ** + ****************************************************************************************************/ + +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include + +#include +#include +#include + +using namespace std; + +namespace zed_wrapper { + + class ZEDWrapperNodelet : public nodelet::Nodelet { + + public: + /* \brief Default constructor + */ + ZEDWrapperNodelet(); + + /* \brief \ref ZEDWrapperNodelet destructor + */ + virtual ~ZEDWrapperNodelet(); + + /* \brief Image to ros message conversion + * \param img : the image to publish + * \param encodingType : the sensor_msgs::image_encodings encoding type + * \param frameId : the id of the reference frame of the image + * \param t : the ros::Time to stamp the image + */ + static sensor_msgs::ImagePtr imageToROSmsg(cv::Mat img, const std::string encodingType, std::string frameId, ros::Time t); + + private: + /* \brief Initialization function called by the Nodelet base class + */ + virtual void onInit(); + + /* \brief ZED camera polling thread function + */ + void device_poll(); + + /* \brief Pointcloud publishing function + */ + void pointcloud_thread(); + + protected: + + /* \brief Publish the pose of the camera in "Map" frame with a ros Publisher + * \param poseBaseTransform : Transformation representing the camera pose from odom frame to map frame + * \param t : the ros::Time to stamp the image + */ + void publishPose(tf2::Transform poseBaseTransform, ros::Time t); + + /* \brief Publish the pose of the camera in "Odom" frame with a ros Publisher + * \param odom_base_transform : Transformation representing the camera pose from base frame to odom frame + * \param t : the ros::Time to stamp the image + */ + void publishOdom(tf2::Transform baseToOdomTransform, ros::Time t); + + /* \brief Publish the pose of the camera in "Map" frame as a transformation + * \param base_transform : Transformation representing the camera pose from odom frame to map frame + * \param t : the ros::Time to stamp the image + */ + void publishPoseFrame(tf2::Transform baseTransform, ros::Time t); + + /* \brief Publish the odometry of the camera in "Odom" frame as a transformation + * \param base_transform : Transformation representing the camera pose from base frame to odom frame + * \param t : the ros::Time to stamp the image + */ + void publishOdomFrame(tf2::Transform baseToOdomTransform, ros::Time t); + + /* \brief Publish the pose of the imu in "Odom" frame as a transformation + * \param base_transform : Transformation representing the imu pose from base frame to odom frame + * \param t : the ros::Time to stamp the image + */ + void publishImuFrame(tf2::Transform baseTransform); + + /* \brief Publish a cv::Mat image with a ros Publisher + * \param img : the image to publish + * \param pub_img : the publisher object to use (different image publishers exist) + * \param img_frame_id : the id of the reference frame of the image (different image frames exist) + * \param t : the ros::Time to stamp the image + */ + void publishImage(cv::Mat img, image_transport::Publisher& pubImg, string imgFrameId, ros::Time t); + + /* \brief Publish a cv::Mat depth image with a ros Publisher + * \param depth : the depth image to publish + * \param t : the ros::Time to stamp the depth image + */ + void publishDepth(cv::Mat depth, ros::Time t); + + /* \brief Publish a cv::Mat confidence image with a ros Publisher + * \param conf : the confidence image to publish + * \param t : the ros::Time to stamp the depth image + */ + void publishConf(cv::Mat conf, ros::Time t); + + /* \brief Publish a pointCloud with a ros Publisher + * \param width : the width of the point cloud + * \param height : the height of the point cloud + */ + void publishPointCloud(); + + /* \brief Publish the informations of a camera with a ros Publisher + * \param cam_info_msg : the information message to publish + * \param pub_cam_info : the publisher object to use + * \param t : the ros::Time to stamp the message + */ + void publishCamInfo(sensor_msgs::CameraInfoPtr camInfoMsg, ros::Publisher pubCamInfo, ros::Time t); + + /* \brief Publish a cv::Mat disparity image with a ros Publisher + * \param disparity : the disparity image to publish + * \param t : the ros::Time to stamp the depth image + */ + void publishDisparity(cv::Mat disparity, ros::Time t); + + /* \brief Get the information of the ZED cameras and store them in an information message + * \param zed : the sl::zed::Camera* pointer to an instance + * \param left_cam_info_msg : the information message to fill with the left camera informations + * \param right_cam_info_msg : the information message to fill with the right camera informations + * \param left_frame_id : the id of the reference frame of the left camera + * \param right_frame_id : the id of the reference frame of the right camera + */ + void fillCamInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr leftCamInfoMsg, sensor_msgs::CameraInfoPtr rightCamInfoMsg, + string leftFrameId, string rightFrameId, bool rawParam = false); + + /* \brief Callback to handle dynamic reconfigure events in ROS + */ + void dynamicReconfCallback(zed_wrapper::ZedConfig& config, uint32_t level); + + /* \brief Callback to publish IMU raw data with a ROS publisher. + * \param e : the ros::TimerEvent binded to the callback + */ + void imuPubCallback(const ros::TimerEvent& e); + + /* \brief Service callback to reset_tracking service + * Tracking pose is reinitialized to the value available in the ROS Param server + */ + bool on_reset_tracking(zed_wrapper::reset_tracking::Request& req, + zed_wrapper::reset_tracking::Response& res); + + /* \brief Service callback to reset_odometry service + * Odometry is reset to clear drift and odometry frame gets the latest pose + * from ZED tracking. + */ + bool on_reset_odometry(zed_wrapper::reset_odometry::Request& req, + zed_wrapper::reset_odometry::Response& res); + + /* \brief Service callback to set_pose service + * Tracking pose is set to the new values + */ + bool on_set_pose(zed_wrapper::set_initial_pose::Request& req, + zed_wrapper::set_initial_pose::Response& res); + + /* \brief Utility to initialize the pose variables + */ + void set_pose(float xt, float yt, float zt, float rr, float pr, float yr); + + /* \bried Start tracking loading the parameters from param server + */ + void start_tracking(); + + /* \bried Check if FPS and Resolution chosen by user are correct. + * Modifies FPS to match correct value. + */ + void checkResolFps(); + + private: + // SDK version + int verMajor; + int verMinor; + int verSubMinor; + + // ROS + ros::NodeHandle nh; + ros::NodeHandle nhNs; + std::thread devicePollThread; + std::thread mPcThread; // Point Cloud thread + + bool mStopNode; + + // Publishers + image_transport::Publisher pubRgb; + image_transport::Publisher pubRawRgb; + image_transport::Publisher pubLeft; + image_transport::Publisher pubRawLeft; + image_transport::Publisher pubRight; + image_transport::Publisher pubRawRight; + image_transport::Publisher pubDepth; + image_transport::Publisher pubConfImg; + ros::Publisher pubConfMap; + ros::Publisher pubDisparity; + ros::Publisher pubCloud; + ros::Publisher pubRgbCamInfo; + ros::Publisher pubLeftCamInfo; + ros::Publisher pubRightCamInfo; + ros::Publisher pubRgbCamInfoRaw; + ros::Publisher pubLeftCamInfoRaw; + ros::Publisher pubRightCamInfoRaw; + ros::Publisher pubDepthCamInfo; + ros::Publisher pubPose; + ros::Publisher pubOdom; + ros::Publisher pubImu; + ros::Publisher pubImuRaw; + ros::Timer pubImuTimer; + + // Service + bool trackingActivated; + ros::ServiceServer srvSetInitPose; + ros::ServiceServer srvResetOdometry; + ros::ServiceServer srvResetTracking; + + // Camera info + sensor_msgs::CameraInfoPtr rgbCamInfoMsg; + sensor_msgs::CameraInfoPtr leftCamInfoMsg; + sensor_msgs::CameraInfoPtr rightCamInfoMsg; + sensor_msgs::CameraInfoPtr rgbCamInfoRawMsg; + sensor_msgs::CameraInfoPtr leftCamInfoRawMsg; + sensor_msgs::CameraInfoPtr rightCamInfoRawMsg; + sensor_msgs::CameraInfoPtr depthCamInfoMsg; + + // tf + tf2_ros::TransformBroadcaster transformPoseBroadcaster; + tf2_ros::TransformBroadcaster transformOdomBroadcaster; + tf2_ros::TransformBroadcaster transformImuBroadcaster; + + std::string rgbFrameId; + std::string rgbOptFrameId; + + std::string depthFrameId; + std::string depthOptFrameId; + + std::string disparityFrameId; + std::string disparityOptFrameId; + + std::string confidenceFrameId; + std::string confidenceOptFrameId; + + std::string cloudFrameId; + + std::string mapFrameId; + std::string odometryFrameId; + std::string baseFrameId; + std::string rightCamFrameId; + std::string rightCamOptFrameId; + std::string leftCamFrameId; + std::string leftCamOptFrameId; + std::string imuFrameId; + + // initialization Transform listener + std::shared_ptr tfBuffer; + std::shared_ptr tfListener; + bool publishTf; + bool cameraFlip; + + // Launch file parameters + int resolution; + int frameRate; + int quality; + int sensingMode; + int gpuId; + int zedId; + int depthStabilization; + std::string odometryDb; + std::string svoFilepath; + double imuPubRate; + bool verbose; + + // IMU time + ros::Time imuTime; + + bool poseSmoothing; + bool spatialMemory; + bool initOdomWithPose; + + //Tracking variables + sl::Transform initialPoseSl; + std::vector initialTrackPose; + + tf2::Transform odomToMapTransform; + tf2::Transform baseToOdomTransform; + + // zed object + sl::InitParameters param; + sl::Camera zed; + unsigned int serial_number; + int userCamModel; // Camera model set by ROS Param + sl::MODEL realCamModel; // Camera model requested to SDK + + // flags + double matResizeFactor; + int confidence; + int exposure; + int gain; + bool autoExposure; + bool triggerAutoExposure; + bool computeDepth; + bool grabbing = false; + int openniDepthMode = 0; // 16 bit UC data in mm else 32F in m, for more info http://www.ros.org/reps/rep-0118.html + + // Frame and Mat + int camWidth; + int camHeight; + int matWidth; + int matHeight; + cv::Mat leftImRGB; + cv::Mat rightImRGB; + cv::Mat confImRGB; + cv::Mat confMapFloat; + + // Mutex + std::mutex dataMutex; + std::mutex mPcMutex; + std::condition_variable mPcDataReadyCondVar; + bool mPcDataReady; + + // Point cloud variables + sl::Mat cloud; + sensor_msgs::PointCloud2 mPointcloudMsg; + string pointCloudFrameId = ""; + ros::Time pointCloudTime; + + // Dynamic reconfigure + boost::shared_ptr> server; + + // Coordinate Changing indices and signs + unsigned int xIdx, yIdx, zIdx; + int xSign, ySign, zSign; + + }; // class ZEDROSWrapperNodelet +} // namespace + +#include +PLUGINLIB_EXPORT_CLASS(zed_wrapper::ZEDWrapperNodelet, nodelet::Nodelet) + +#endif // ZED_WRAPPER_NODELET_H diff --git a/zed_wrapper/src/nodelet/src/zed_wrapper_nodelet.cpp b/zed_wrapper/src/nodelet/src/zed_wrapper_nodelet.cpp new file mode 100644 index 00000000..7afdbe1b --- /dev/null +++ b/zed_wrapper/src/nodelet/src/zed_wrapper_nodelet.cpp @@ -0,0 +1,1700 @@ +/////////////////////////////////////////////////////////////////////////// +// +// Copyright (c) 2018, STEREOLABS. +// +// All rights reserved. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////// +#include "zed_wrapper_nodelet.hpp" +#include "sl_tools.h" + +#include + +#ifndef NDEBUG +#include +#endif + +#include +#include +#include +#include +#include +#include + +#include + +// >>>>> Backward compatibility +#define COORDINATE_SYSTEM_IMAGE static_cast(0) +#define COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP \ + static_cast(3) +#define COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP_X_FWD \ + static_cast(5) +// <<<<< Backward compatibility + +using namespace std; + +namespace zed_wrapper { + + ZEDWrapperNodelet::ZEDWrapperNodelet() : Nodelet() {} + + ZEDWrapperNodelet::~ZEDWrapperNodelet() { + if (devicePollThread.joinable()) { + devicePollThread.join(); + } + + if (mPcThread.joinable()) { + mPcThread.join(); + } + } + + void ZEDWrapperNodelet::onInit() { + + mStopNode = false; + mPcDataReady = false; + +#ifndef NDEBUG + if (ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, + ros::console::levels::Debug)) { + ros::console::notifyLoggerLevelsChanged(); + } +#endif + + // Launch file parameters + resolution = sl::RESOLUTION_HD720; + quality = sl::DEPTH_MODE_PERFORMANCE; + sensingMode = sl::SENSING_MODE_STANDARD; + frameRate = 30; + gpuId = -1; + zedId = 0; + serial_number = 0; + odometryDb = ""; + imuPubRate = 100.0; + initialTrackPose.resize(6); + for (int i = 0; i < 6; i++) { + initialTrackPose[i] = 0.0f; + } + matResizeFactor = 1.0; + + verbose = true; + + nh = getMTNodeHandle(); + nhNs = getMTPrivateNodeHandle(); + + // Set default coordinate frames + nhNs.param("pose_frame", mapFrameId, "pose_frame"); + nhNs.param("odometry_frame", odometryFrameId, "odometry_frame"); + nhNs.param("base_frame", baseFrameId, "base_frame"); + nhNs.param("imu_frame", imuFrameId, "imu_link"); + + nhNs.param("left_camera_frame", leftCamFrameId, + "left_camera_frame"); + nhNs.param("left_camera_optical_frame", leftCamOptFrameId, + "left_camera_optical_frame"); + + nhNs.param("right_camera_frame", rightCamFrameId, + "right_camera_frame"); + nhNs.param("right_camera_optical_frame", rightCamOptFrameId, + "right_camera_optical_frame"); + + depthFrameId = leftCamFrameId; + depthOptFrameId = leftCamOptFrameId; + + // Note: Depth image frame id must match color image frame id + cloudFrameId = depthOptFrameId; + rgbFrameId = depthFrameId; + rgbOptFrameId = cloudFrameId; + + disparityFrameId = depthFrameId; + disparityOptFrameId = depthOptFrameId; + + confidenceFrameId = depthFrameId; + confidenceOptFrameId = depthOptFrameId; + + // Get parameters from launch file + nhNs.getParam("resolution", resolution); + nhNs.getParam("frame_rate", frameRate); + + checkResolFps(); + + nhNs.getParam("verbose", verbose); + nhNs.getParam("quality", quality); + nhNs.getParam("sensing_mode", sensingMode); + nhNs.getParam("openni_depth_mode", openniDepthMode); + nhNs.getParam("gpu_id", gpuId); + nhNs.getParam("zed_id", zedId); + nhNs.getParam("depth_stabilization", depthStabilization); + int tmp_sn = 0; + nhNs.getParam("serial_number", tmp_sn); + if (tmp_sn > 0) { + serial_number = tmp_sn; + } + nhNs.getParam("camera_model", userCamModel); + + // Publish odometry tf + nhNs.param("publish_tf", publishTf, true); + + nhNs.param("camera_flip", cameraFlip, false); + + if (serial_number > 0) { + NODELET_INFO_STREAM("SN : " << serial_number); + } + + // Print order frames + NODELET_INFO_STREAM("pose_frame \t\t -> " << mapFrameId); + NODELET_INFO_STREAM("odometry_frame \t\t -> " << odometryFrameId); + NODELET_INFO_STREAM("base_frame \t\t -> " << baseFrameId); + NODELET_INFO_STREAM("imu_link \t\t -> " << imuFrameId); + NODELET_INFO_STREAM("left_camera_frame \t -> " << leftCamFrameId); + NODELET_INFO_STREAM("left_camera_optical_frame -> " << leftCamOptFrameId); + NODELET_INFO_STREAM("right_camera_frame \t -> " << rightCamFrameId); + NODELET_INFO_STREAM("right_camera_optical_frame -> " << rightCamOptFrameId); + NODELET_INFO_STREAM("depth_frame \t\t -> " << depthFrameId); + NODELET_INFO_STREAM("depth_optical_frame \t -> " << depthOptFrameId); + NODELET_INFO_STREAM("disparity_frame \t -> " << disparityFrameId); + NODELET_INFO_STREAM("disparity_optical_frame -> " << disparityOptFrameId); + + // Status of map TF + NODELET_INFO_STREAM("Publish " << mapFrameId << " [" + << (publishTf ? "TRUE" : "FALSE") << "]"); + NODELET_INFO_STREAM("Camera Flip [" << (cameraFlip ? "TRUE" : "FALSE") << "]"); + + // Status of odometry TF + // NODELET_INFO_STREAM("Publish " << odometry_frame_id << " [" << (publish_tf + // ? "TRUE" : "FALSE") << "]"); + + std::string img_topic = "image_rect_color"; + std::string img_raw_topic = "image_raw_color"; + + // Set the default topic names + string left_topic = "left/" + img_topic; + string left_raw_topic = "left/" + img_raw_topic; + string left_cam_info_topic = "left/camera_info"; + string left_cam_info_raw_topic = "left/camera_info_raw"; + + string right_topic = "right/" + img_topic; + string right_raw_topic = "right/" + img_raw_topic; + string right_cam_info_topic = "right/camera_info"; + string right_cam_info_raw_topic = "right/camera_info_raw"; + + string rgb_topic = "rgb/" + img_topic; + string rgb_raw_topic = "rgb/" + img_raw_topic; + string rgb_cam_info_topic = "rgb/camera_info"; + string rgb_cam_info_raw_topic = "rgb/camera_info_raw"; + + string depth_topic = "depth/"; + if (openniDepthMode) { + NODELET_INFO_STREAM("Openni depth mode activated"); + depth_topic += "depth_raw_registered"; + } else { + depth_topic += "depth_registered"; + } + + string depth_cam_info_topic = "depth/camera_info"; + + string disparity_topic = "disparity/disparity_image"; + + string point_cloud_topic = "point_cloud/cloud_registered"; + + string conf_img_topic = "confidence/confidence_image"; + string conf_map_topic = "confidence/confidence_map"; + + string pose_topic = "map"; + string odometry_topic = "odom"; + string imu_topic = "imu/data"; + string imu_topic_raw = "imu/data_raw"; + + nhNs.getParam("rgb_topic", rgb_topic); + nhNs.getParam("rgb_raw_topic", rgb_raw_topic); + nhNs.getParam("rgb_cam_info_topic", rgb_cam_info_topic); + nhNs.getParam("rgb_cam_info_raw_topic", rgb_cam_info_raw_topic); + + nhNs.getParam("left_topic", left_topic); + nhNs.getParam("left_raw_topic", left_raw_topic); + nhNs.getParam("left_cam_info_topic", left_cam_info_topic); + nhNs.getParam("left_cam_info_raw_topic", left_cam_info_raw_topic); + + nhNs.getParam("right_topic", right_topic); + nhNs.getParam("right_raw_topic", right_raw_topic); + nhNs.getParam("right_cam_info_topic", right_cam_info_topic); + nhNs.getParam("right_cam_info_raw_topic", right_cam_info_raw_topic); + + nhNs.getParam("depth_topic", depth_topic); + nhNs.getParam("depth_cam_info_topic", depth_cam_info_topic); + + nhNs.getParam("disparity_topic", disparity_topic); + + nhNs.getParam("confidence_img_topic", conf_img_topic); + nhNs.getParam("confidence_map_topic", conf_map_topic); + + nhNs.getParam("point_cloud_topic", point_cloud_topic); + + nhNs.getParam("pose_topic", pose_topic); + nhNs.getParam("odometry_topic", odometry_topic); + + nhNs.getParam("imu_topic", imu_topic); + nhNs.getParam("imu_topic_raw", imu_topic_raw); + nhNs.getParam("imu_pub_rate", imuPubRate); + + // Create camera info + sensor_msgs::CameraInfoPtr rgb_cam_info_ms_g(new sensor_msgs::CameraInfo()); + sensor_msgs::CameraInfoPtr left_cam_info_msg_(new sensor_msgs::CameraInfo()); + sensor_msgs::CameraInfoPtr right_cam_info_msg_(new sensor_msgs::CameraInfo()); + sensor_msgs::CameraInfoPtr rgb_cam_info_raw_msg_( + new sensor_msgs::CameraInfo()); + sensor_msgs::CameraInfoPtr left_cam_info_raw_ms_g( + new sensor_msgs::CameraInfo()); + sensor_msgs::CameraInfoPtr right_cam_info_raw_msg_( + new sensor_msgs::CameraInfo()); + sensor_msgs::CameraInfoPtr depth_cam_info_msg_(new sensor_msgs::CameraInfo()); + + rgbCamInfoMsg = rgb_cam_info_ms_g; + leftCamInfoMsg = left_cam_info_msg_; + rightCamInfoMsg = right_cam_info_msg_; + rgbCamInfoRawMsg = rgb_cam_info_raw_msg_; + leftCamInfoRawMsg = left_cam_info_raw_ms_g; + rightCamInfoRawMsg = right_cam_info_raw_msg_; + depthCamInfoMsg = depth_cam_info_msg_; + + // SVO + nhNs.param("svo_filepath", svoFilepath, std::string()); + + // Initialize tf2 transformation + nhNs.getParam("initial_tracking_pose", initialTrackPose); + set_pose(initialTrackPose[0], initialTrackPose[1], initialTrackPose[2], + initialTrackPose[3], initialTrackPose[4], initialTrackPose[5]); + + // Initialization transformation listener + tfBuffer.reset(new tf2_ros::Buffer); + tfListener.reset(new tf2_ros::TransformListener(*tfBuffer)); + + // Try to initialize the ZED + if (!svoFilepath.empty()) { + param.svo_input_filename = svoFilepath.c_str(); + } else { + param.camera_fps = frameRate; + param.camera_resolution = static_cast(resolution); + if (serial_number == 0) { + param.camera_linux_id = zedId; + } else { + bool waiting_for_camera = true; + while (waiting_for_camera) { + + if (!nhNs.ok()) { + mStopNode = true; // Stops other threads + zed.close(); + NODELET_DEBUG("ZED pool thread finished"); + return; + } + + sl::DeviceProperties prop = sl_tools::getZEDFromSN(serial_number); + if (prop.id < -1 || + prop.camera_state == sl::CAMERA_STATE::CAMERA_STATE_NOT_AVAILABLE) { + std::string msg = "ZED SN" + to_string(serial_number) + + " not detected ! Please connect this ZED"; + NODELET_INFO_STREAM(msg.c_str()); + std::this_thread::sleep_for(std::chrono::milliseconds(2000)); + } else { + waiting_for_camera = false; + param.camera_linux_id = prop.id; + } + } + } + } + + std::string ver = sl_tools::getSDKVersion(verMajor, verMinor, verSubMinor); + NODELET_INFO_STREAM("SDK version : " << ver); + + if (verMajor < 2) { + NODELET_WARN_STREAM("Please consider to upgrade to latest SDK version to " + "get better performances"); + param.coordinate_system = COORDINATE_SYSTEM_IMAGE; + NODELET_INFO_STREAM("Camera coordinate system : COORDINATE_SYSTEM_IMAGE"); + xIdx = 2; + yIdx = 0; + zIdx = 1; + xSign = 1; + ySign = -1; + zSign = -1; + } else if (verMajor == 2 && verMinor < 5) { + NODELET_WARN_STREAM("Please consider to upgrade to latest SDK version to " + "get latest features"); + param.coordinate_system = COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP; + NODELET_INFO_STREAM( + "Camera coordinate system : COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP"); + xIdx = 1; + yIdx = 0; + zIdx = 2; + xSign = 1; + ySign = -1; + zSign = 1; + } else { + param.coordinate_system = COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP_X_FWD; + NODELET_INFO_STREAM( + "Camera coordinate system : COORDINATE_SYSTEM_RIGHT_HANDED_Z_UP_X_FWD"); + xIdx = 0; + yIdx = 1; + zIdx = 2; + xSign = 1; + ySign = 1; + zSign = 1; + } + + param.coordinate_units = sl::UNIT_METER; + + param.depth_mode = static_cast(quality); + param.sdk_verbose = verbose; + param.sdk_gpu_id = gpuId; + param.depth_stabilization = depthStabilization; + param.camera_image_flip = cameraFlip; + + sl::ERROR_CODE err = sl::ERROR_CODE_CAMERA_NOT_DETECTED; + while (err != sl::SUCCESS) { + err = zed.open(param); + NODELET_INFO_STREAM(toString(err)); + std::this_thread::sleep_for(std::chrono::milliseconds(2000)); + + if (!nhNs.ok()) { + mStopNode = true; // Stops other threads + zed.close(); + NODELET_DEBUG("ZED pool thread finished"); + return; + } + } + + realCamModel = zed.getCameraInformation().camera_model; + + std::string camModelStr = "LAST"; + if (realCamModel == sl::MODEL_ZED) { + camModelStr = "ZED"; + if (userCamModel != 0) { + NODELET_WARN("Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 0"); + } + } else if (realCamModel == sl::MODEL_ZED_M) { + camModelStr = "ZED M"; + if (userCamModel != 1) { + NODELET_WARN("Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 1"); + } + } + + NODELET_INFO_STREAM("CAMERA MODEL : " << realCamModel); + + serial_number = zed.getCameraInformation().serial_number; + + // Dynamic Reconfigure parameters + server = + boost::make_shared>(); + dynamic_reconfigure::Server::CallbackType f; + f = boost::bind(&ZEDWrapperNodelet::dynamicReconfCallback, this, _1, _2); + server->setCallback(f); + + nhNs.getParam("mat_resize_factor", matResizeFactor); + + if (matResizeFactor < 0.1) { + matResizeFactor = 0.1; + NODELET_WARN_STREAM( + "Minimum allowed values for 'mat_resize_factor' is 0.1"); + } + if (matResizeFactor > 1.0) { + matResizeFactor = 1.0; + NODELET_WARN_STREAM( + "Maximum allowed values for 'mat_resize_factor' is 1.0"); + } + + nhNs.getParam("confidence", confidence); + nhNs.getParam("exposure", exposure); + nhNs.getParam("gain", gain); + nhNs.getParam("auto_exposure", autoExposure); + if (autoExposure) { + triggerAutoExposure = true; + } + + // Create all the publishers + // Image publishers + image_transport::ImageTransport it_zed(nh); + pubRgb = it_zed.advertise(rgb_topic, 1); // rgb + NODELET_INFO_STREAM("Advertized on topic " << rgb_topic); + pubRawRgb = it_zed.advertise(rgb_raw_topic, 1); // rgb raw + NODELET_INFO_STREAM("Advertized on topic " << rgb_raw_topic); + pubLeft = it_zed.advertise(left_topic, 1); // left + NODELET_INFO_STREAM("Advertized on topic " << left_topic); + pubRawLeft = it_zed.advertise(left_raw_topic, 1); // left raw + NODELET_INFO_STREAM("Advertized on topic " << left_raw_topic); + pubRight = it_zed.advertise(right_topic, 1); // right + NODELET_INFO_STREAM("Advertized on topic " << right_topic); + pubRawRight = it_zed.advertise(right_raw_topic, 1); // right raw + NODELET_INFO_STREAM("Advertized on topic " << right_raw_topic); + pubDepth = it_zed.advertise(depth_topic, 1); // depth + NODELET_INFO_STREAM("Advertized on topic " << depth_topic); + pubConfImg = it_zed.advertise(conf_img_topic, 1); // confidence image + NODELET_INFO_STREAM("Advertized on topic " << conf_img_topic); + + // Confidence Map publisher + pubConfMap = + nh.advertise(conf_map_topic, 1); // confidence map + NODELET_INFO_STREAM("Advertized on topic " << conf_map_topic); + + // Disparity publisher + pubDisparity = nh.advertise(disparity_topic, 1); + NODELET_INFO_STREAM("Advertized on topic " << disparity_topic); + + // PointCloud publisher + pubCloud = nh.advertise(point_cloud_topic, 1); + NODELET_INFO_STREAM("Advertized on topic " << point_cloud_topic); + + // Camera info publishers + pubRgbCamInfo = + nh.advertise(rgb_cam_info_topic, 1); // rgb + NODELET_INFO_STREAM("Advertized on topic " << rgb_cam_info_topic); + pubLeftCamInfo = + nh.advertise(left_cam_info_topic, 1); // left + NODELET_INFO_STREAM("Advertized on topic " << left_cam_info_topic); + pubRightCamInfo = + nh.advertise(right_cam_info_topic, 1); // right + NODELET_INFO_STREAM("Advertized on topic " << right_cam_info_topic); + pubDepthCamInfo = + nh.advertise(depth_cam_info_topic, 1); // depth + NODELET_INFO_STREAM("Advertized on topic " << depth_cam_info_topic); + // Raw + pubRgbCamInfoRaw = nh.advertise( + rgb_cam_info_raw_topic, 1); // raw rgb + NODELET_INFO_STREAM("Advertized on topic " << rgb_cam_info_raw_topic); + pubLeftCamInfoRaw = nh.advertise( + left_cam_info_raw_topic, 1); // raw left + NODELET_INFO_STREAM("Advertized on topic " << left_cam_info_raw_topic); + pubRightCamInfoRaw = nh.advertise( + right_cam_info_raw_topic, 1); // raw right + NODELET_INFO_STREAM("Advertized on topic " << right_cam_info_raw_topic); + + // Odometry and Map publisher + pubPose = nh.advertise(pose_topic, 1); + NODELET_INFO_STREAM("Advertized on topic " << pose_topic); + pubOdom = nh.advertise(odometry_topic, 1); + NODELET_INFO_STREAM("Advertized on topic " << odometry_topic); + + // Imu publisher + if (imuPubRate > 0 && realCamModel == sl::MODEL_ZED_M) { + pubImu = nh.advertise(imu_topic, 500); + NODELET_INFO_STREAM("Advertized on topic " << imu_topic << " @ " + << imuPubRate << " Hz"); + + pubImuRaw = nh.advertise(imu_topic_raw, 500); + NODELET_INFO_STREAM("Advertized on topic " << imu_topic_raw << " @ " + << imuPubRate << " Hz"); + + imuTime = ros::Time::now(); + pubImuTimer = nhNs.createTimer(ros::Duration(1.0 / imuPubRate), + &ZEDWrapperNodelet::imuPubCallback, this); + } else if (imuPubRate > 0 && realCamModel == sl::MODEL_ZED) { + NODELET_WARN_STREAM( + "'imu_pub_rate' set to " + << imuPubRate << " Hz" + << " but ZED camera model does not support IMU data publishing."); + } + + // Service + srvSetInitPose = nh.advertiseService("set_initial_pose", + &ZEDWrapperNodelet::on_set_pose, this); + srvResetOdometry = nh.advertiseService( + "reset_odometry", &ZEDWrapperNodelet::on_reset_odometry, this); + srvResetTracking = nh.advertiseService( + "reset_tracking", &ZEDWrapperNodelet::on_reset_tracking, this); + + // Start Pointcloud thread + mPcThread = std::thread(&ZEDWrapperNodelet::pointcloud_thread, this); + + // Start pool thread + devicePollThread = std::thread(&ZEDWrapperNodelet::device_poll, this); + + + } + + void ZEDWrapperNodelet::checkResolFps() { + switch (resolution) { + case sl::RESOLUTION_HD2K: + if (frameRate != 15) { + NODELET_WARN_STREAM("Wrong FrameRate (" + << frameRate + << ") for the resolution HD2K. Set to 15 FPS."); + frameRate = 15; + } + break; + + case sl::RESOLUTION_HD1080: + if (frameRate == 15 || frameRate == 30) { + break; + } + + if (frameRate > 15 && frameRate < 30) { + NODELET_WARN_STREAM("Wrong FrameRate (" + << frameRate + << ") for the resolution HD1080. Set to 15 FPS."); + frameRate = 15; + } else if (frameRate > 30) { + NODELET_WARN_STREAM("Wrong FrameRate (" + << frameRate + << ") for the resolution HD1080. Set to 30 FPS."); + frameRate = 30; + } else { + NODELET_WARN_STREAM("Wrong FrameRate (" + << frameRate + << ") for the resolution HD1080. Set to 15 FPS."); + frameRate = 15; + } + break; + + case sl::RESOLUTION_HD720: + if (frameRate == 15 || frameRate == 30 || frameRate == 60) { + break; + } + + if (frameRate > 15 && frameRate < 30) { + NODELET_WARN_STREAM("Wrong FrameRate (" + << frameRate + << ") for the resolution HD720. Set to 15 FPS."); + frameRate = 15; + } else if (frameRate > 30 && frameRate < 60) { + NODELET_WARN_STREAM("Wrong FrameRate (" + << frameRate + << ") for the resolution HD720. Set to 30 FPS."); + frameRate = 30; + } else if (frameRate > 60) { + NODELET_WARN_STREAM("Wrong FrameRate (" + << frameRate + << ") for the resolution HD720. Set to 60 FPS."); + frameRate = 60; + } else { + NODELET_WARN_STREAM("Wrong FrameRate (" + << frameRate + << ") for the resolution HD720. Set to 15 FPS."); + frameRate = 15; + } + break; + + case sl::RESOLUTION_VGA: + if (frameRate == 15 || frameRate == 30 || frameRate == 60 || + frameRate == 100) { + break; + } + + if (frameRate > 15 && frameRate < 30) { + NODELET_WARN_STREAM("Wrong FrameRate (" + << frameRate + << ") for the resolution VGA. Set to 15 FPS."); + frameRate = 15; + } else if (frameRate > 30 && frameRate < 60) { + NODELET_WARN_STREAM("Wrong FrameRate (" + << frameRate + << ") for the resolution VGA. Set to 30 FPS."); + frameRate = 30; + } else if (frameRate > 60 && frameRate < 100) { + NODELET_WARN_STREAM("Wrong FrameRate (" + << frameRate + << ") for the resolution VGA. Set to 60 FPS."); + frameRate = 60; + } else if (frameRate > 100) { + NODELET_WARN_STREAM("Wrong FrameRate (" + << frameRate + << ") for the resolution VGA. Set to 100 FPS."); + frameRate = 100; + } else { + NODELET_WARN_STREAM("Wrong FrameRate (" + << frameRate + << ") for the resolution VGA. Set to 15 FPS."); + frameRate = 15; + } + break; + + default: + NODELET_WARN_STREAM("Invalid resolution. Set to HD720 @ 30 FPS"); + resolution = 2; + frameRate = 30; + } + } + + sensor_msgs::ImagePtr + ZEDWrapperNodelet::imageToROSmsg(cv::Mat img, const std::string encodingType, + std::string frameId, ros::Time t) { + sensor_msgs::ImagePtr ptr = boost::make_shared(); + sensor_msgs::Image& imgMessage = *ptr; + imgMessage.header.stamp = t; + imgMessage.header.frame_id = frameId; + imgMessage.height = img.rows; + imgMessage.width = img.cols; + imgMessage.encoding = encodingType; + int num = 1; // for endianness detection + imgMessage.is_bigendian = !(*(char*)&num == 1); + imgMessage.step = img.cols * img.elemSize(); + size_t size = imgMessage.step * img.rows; + imgMessage.data.resize(size); + + if (img.isContinuous()) { + memcpy((char*)(&imgMessage.data[0]), img.data, size); + } else { + uchar* opencvData = img.data; + uchar* rosData = (uchar*)(&imgMessage.data[0]); + + #pragma omp parallel for + for (unsigned int i = 0; i < img.rows; i++) { + memcpy(rosData, opencvData, imgMessage.step); + rosData += imgMessage.step; + opencvData += img.step; + } + } + return ptr; + } + + void ZEDWrapperNodelet::set_pose(float xt, float yt, float zt, float rr, + float pr, float yr) { + // ROS pose + tf2::Quaternion q; + q.setRPY(rr, pr, yr); + + tf2::Vector3 orig(xt, yt, zt); + + baseToOdomTransform.setOrigin(orig); + baseToOdomTransform.setRotation(q); + + odomToMapTransform.setIdentity(); + + // SL pose + sl::float4 q_vec; + q_vec[0] = q.x(); + q_vec[1] = q.y(); + q_vec[2] = q.z(); + q_vec[3] = q.w(); + + sl::Orientation r(q_vec); + + initialPoseSl.setTranslation(sl::Translation(xt, yt, zt)); + initialPoseSl.setOrientation(r); + } + + bool ZEDWrapperNodelet::on_set_pose( + zed_wrapper::set_initial_pose::Request& req, + zed_wrapper::set_initial_pose::Response& res) { + initialTrackPose.resize(6); + + initialTrackPose[0] = req.x; + initialTrackPose[1] = req.y; + initialTrackPose[2] = req.z; + initialTrackPose[3] = req.R; + initialTrackPose[4] = req.P; + initialTrackPose[5] = req.Y; + + set_pose(initialTrackPose[0], initialTrackPose[1], initialTrackPose[2], + initialTrackPose[3], initialTrackPose[4], initialTrackPose[5]); + + if (trackingActivated) { + zed.resetTracking(initialPoseSl); + } + + res.done = true; + + return true; + } + + bool ZEDWrapperNodelet::on_reset_tracking( + zed_wrapper::reset_tracking::Request& req, + zed_wrapper::reset_tracking::Response& res) { + if (!trackingActivated) { + res.reset_done = false; + return false; + } + + nhNs.getParam("initial_tracking_pose", initialTrackPose); + + if (initialTrackPose.size() != 6) { + NODELET_WARN_STREAM("Invalid Initial Pose size (" << initialTrackPose.size() + << "). Using Identity"); + initialPoseSl.setIdentity(); + odomToMapTransform.setIdentity(); + baseToOdomTransform.setIdentity(); + } else { + set_pose(initialTrackPose[0], initialTrackPose[1], initialTrackPose[2], + initialTrackPose[3], initialTrackPose[4], initialTrackPose[5]); + } + + zed.resetTracking(initialPoseSl); + + return true; + } + + bool ZEDWrapperNodelet::on_reset_odometry( + zed_wrapper::reset_odometry::Request& req, + zed_wrapper::reset_odometry::Response& res) { + initOdomWithPose = true; + + res.reset_done = true; + + return true; + } + + void ZEDWrapperNodelet::start_tracking() { + NODELET_INFO_STREAM("Starting Tracking"); + + nhNs.getParam("odometry_DB", odometryDb); + nhNs.getParam("pose_smoothing", poseSmoothing); + NODELET_INFO_STREAM("Pose Smoothing : " << poseSmoothing); + nhNs.getParam("spatial_memory", spatialMemory); + NODELET_INFO_STREAM("Spatial Memory : " << spatialMemory); + if (realCamModel == sl::MODEL_ZED_M) { + nhNs.getParam("init_odom_with_imu", initOdomWithPose); + NODELET_INFO_STREAM( + "Init Odometry with first IMU data : " << initOdomWithPose); + } else { + initOdomWithPose = false; + } + + if (initialTrackPose.size() != 6) { + NODELET_WARN_STREAM("Invalid Initial Pose size (" << initialTrackPose.size() + << "). Using Identity"); + initialPoseSl.setIdentity(); + odomToMapTransform.setIdentity(); + odomToMapTransform.setIdentity(); + } else { + set_pose(initialTrackPose[0], initialTrackPose[1], initialTrackPose[2], + initialTrackPose[3], initialTrackPose[4], initialTrackPose[5]); + } + + if (odometryDb != "" && !sl_tools::file_exist(odometryDb)) { + odometryDb = ""; + NODELET_WARN("odometry_DB path doesn't exist or is unreachable."); + } + + // Tracking parameters + sl::TrackingParameters trackParams; + trackParams.area_file_path = odometryDb.c_str(); + trackParams.enable_pose_smoothing = poseSmoothing; + trackParams.enable_spatial_memory = spatialMemory; + + trackParams.initial_world_transform = initialPoseSl; + + zed.enableTracking(trackParams); + trackingActivated = true; + } + + void ZEDWrapperNodelet::publishOdom(tf2::Transform odom_base_transform, + ros::Time t) { + nav_msgs::Odometry odom; + odom.header.stamp = t; + odom.header.frame_id = odometryFrameId; // odom_frame + odom.child_frame_id = baseFrameId; // base_frame + // conversion from Tranform to message + geometry_msgs::Transform base2 = tf2::toMsg(odom_base_transform); + // Add all value in odometry message + odom.pose.pose.position.x = base2.translation.x; + odom.pose.pose.position.y = base2.translation.y; + odom.pose.pose.position.z = base2.translation.z; + odom.pose.pose.orientation.x = base2.rotation.x; + odom.pose.pose.orientation.y = base2.rotation.y; + odom.pose.pose.orientation.z = base2.rotation.z; + odom.pose.pose.orientation.w = base2.rotation.w; + // Publish odometry message + pubOdom.publish(odom); + } + + void ZEDWrapperNodelet::publishOdomFrame(tf2::Transform baseTransform, + ros::Time t) { + geometry_msgs::TransformStamped transformStamped; + transformStamped.header.stamp = t; + transformStamped.header.frame_id = odometryFrameId; + transformStamped.child_frame_id = baseFrameId; + // conversion from Tranform to message + transformStamped.transform = tf2::toMsg(baseTransform); + // Publish transformation + transformOdomBroadcaster.sendTransform(transformStamped); + } + + void ZEDWrapperNodelet::publishPose(tf2::Transform poseBaseTransform, + ros::Time t) { + geometry_msgs::PoseStamped pose; + pose.header.stamp = t; + pose.header.frame_id = mapFrameId; // map_frame + // conversion from Tranform to message + geometry_msgs::Transform base2 = tf2::toMsg(poseBaseTransform); + // Add all value in Pose message + pose.pose.position.x = base2.translation.x; + pose.pose.position.y = base2.translation.y; + pose.pose.position.z = base2.translation.z; + pose.pose.orientation.x = base2.rotation.x; + pose.pose.orientation.y = base2.rotation.y; + pose.pose.orientation.z = base2.rotation.z; + pose.pose.orientation.w = base2.rotation.w; + // Publish odometry message + pubPose.publish(pose); + } + + void ZEDWrapperNodelet::publishPoseFrame(tf2::Transform baseTransform, + ros::Time t) { + geometry_msgs::TransformStamped transformStamped; + transformStamped.header.stamp = t; + transformStamped.header.frame_id = mapFrameId; + transformStamped.child_frame_id = odometryFrameId; + // conversion from Tranform to message + transformStamped.transform = tf2::toMsg(baseTransform); + // Publish transformation + transformPoseBroadcaster.sendTransform(transformStamped); + } + + void ZEDWrapperNodelet::publishImuFrame(tf2::Transform baseTransform) { + geometry_msgs::TransformStamped transformStamped; + transformStamped.header.stamp = imuTime; + transformStamped.header.frame_id = baseFrameId; + transformStamped.child_frame_id = imuFrameId; + // conversion from Tranform to message + transformStamped.transform = tf2::toMsg(baseTransform); + // Publish transformation + transformImuBroadcaster.sendTransform(transformStamped); + } + + void ZEDWrapperNodelet::publishImage(cv::Mat img, + image_transport::Publisher& pubImg, + string imgFrameId, ros::Time t) { + pubImg.publish( + imageToROSmsg(img, sensor_msgs::image_encodings::BGR8, imgFrameId, t)); + } + + void ZEDWrapperNodelet::publishDepth(cv::Mat depth, ros::Time t) { + string encoding; + if (openniDepthMode) { + depth *= 1000.0f; + depth.convertTo(depth, CV_16UC1); // in mm, rounded + encoding = sensor_msgs::image_encodings::TYPE_16UC1; + } else { + encoding = sensor_msgs::image_encodings::TYPE_32FC1; + } + + pubDepth.publish(imageToROSmsg(depth, encoding, depthOptFrameId, t)); + } + + void ZEDWrapperNodelet::publishDisparity(cv::Mat disparity, ros::Time t) { + sl::CameraInformation zedParam = + zed.getCameraInformation(sl::Resolution(matWidth, matHeight)); + sensor_msgs::ImagePtr disparity_image = imageToROSmsg( + disparity, sensor_msgs::image_encodings::TYPE_32FC1, disparityFrameId, t); + + stereo_msgs::DisparityImage msg; + msg.image = *disparity_image; + msg.header = msg.image.header; + msg.f = zedParam.calibration_parameters.left_cam.fx; + msg.T = zedParam.calibration_parameters.T.x; + msg.min_disparity = msg.f * msg.T / zed.getDepthMaxRangeValue(); + msg.max_disparity = msg.f * msg.T / zed.getDepthMinRangeValue(); + pubDisparity.publish(msg); + } + + void ZEDWrapperNodelet::pointcloud_thread() { + std::unique_lock lock(mPcMutex); + while (!mStopNode) { + while (!mPcDataReady) { // loop to avoid spurious wakeups + if (mPcDataReadyCondVar.wait_for(lock, std::chrono::milliseconds(500)) == std::cv_status::timeout) { + // Check thread stopping + if (mStopNode) { + return; + } else { + continue; + } + } + + publishPointCloud(); + mPcDataReady = false; + } + } + + NODELET_DEBUG("Pointcloud thread finished"); + } + + void ZEDWrapperNodelet::publishPointCloud() { + // Initialize Point Cloud message + // https://github.com/ros/common_msgs/blob/jade-devel/sensor_msgs/include/sensor_msgs/point_cloud2_iterator.h + int ptsCount = matWidth * matHeight; + mPointcloudMsg.header.stamp = pointCloudTime; + + if (mPointcloudMsg.width != matWidth || mPointcloudMsg.height != matHeight) { + mPointcloudMsg.header.frame_id = pointCloudFrameId; // Set the header values of the ROS message + + mPointcloudMsg.is_bigendian = false; + mPointcloudMsg.is_dense = false; + + sensor_msgs::PointCloud2Modifier modifier(mPointcloudMsg); + modifier.setPointCloud2Fields(4, + "x", 1, sensor_msgs::PointField::FLOAT32, + "y", 1, sensor_msgs::PointField::FLOAT32, + "z", 1, sensor_msgs::PointField::FLOAT32, + "rgb", 1, sensor_msgs::PointField::FLOAT32); + + modifier.resize(ptsCount); + + mPointcloudMsg.width = matWidth; + mPointcloudMsg.height = matHeight; + } + + sl::Vector4* cpu_cloud = cloud.getPtr(); + + // Data copy + float* ptCloudPtr = (float*)(&mPointcloudMsg.data[0]); + + #pragma omp parallel for + for (size_t i = 0; i < ptsCount; ++i) { + ptCloudPtr[i * 4 + 0] = xSign * cpu_cloud[i][xIdx]; + ptCloudPtr[i * 4 + 1] = ySign * cpu_cloud[i][yIdx]; + ptCloudPtr[i * 4 + 2] = zSign * cpu_cloud[i][zIdx]; + ptCloudPtr[i * 4 + 3] = cpu_cloud[i][3]; + } + + // Pointcloud publishing + pubCloud.publish(mPointcloudMsg); + } + + void ZEDWrapperNodelet::publishCamInfo(sensor_msgs::CameraInfoPtr camInfoMsg, + ros::Publisher pubCamInfo, ros::Time t) { + static int seq = 0; + camInfoMsg->header.stamp = t; + camInfoMsg->header.seq = seq; + pubCamInfo.publish(camInfoMsg); + seq++; + } + + void ZEDWrapperNodelet::fillCamInfo( + sl::Camera& zed, sensor_msgs::CameraInfoPtr left_cam_info_msg, + sensor_msgs::CameraInfoPtr right_cam_info_msg, string leftFrameId, + string rightFrameId, bool rawParam /*= false*/) { + sl::CalibrationParameters zedParam; + + if (rawParam) + zedParam = zed.getCameraInformation(sl::Resolution(matWidth, matHeight)) + .calibration_parameters_raw; + else + zedParam = zed.getCameraInformation(sl::Resolution(matWidth, matHeight)) + .calibration_parameters; + + float baseline = zedParam.T[0]; + + left_cam_info_msg->distortion_model = + sensor_msgs::distortion_models::PLUMB_BOB; + right_cam_info_msg->distortion_model = + sensor_msgs::distortion_models::PLUMB_BOB; + + left_cam_info_msg->D.resize(5); + right_cam_info_msg->D.resize(5); + left_cam_info_msg->D[0] = zedParam.left_cam.disto[0]; // k1 + left_cam_info_msg->D[1] = zedParam.left_cam.disto[1]; // k2 + left_cam_info_msg->D[2] = zedParam.left_cam.disto[4]; // k3 + left_cam_info_msg->D[3] = zedParam.left_cam.disto[2]; // p1 + left_cam_info_msg->D[4] = zedParam.left_cam.disto[3]; // p2 + right_cam_info_msg->D[0] = zedParam.right_cam.disto[0]; // k1 + right_cam_info_msg->D[1] = zedParam.right_cam.disto[1]; // k2 + right_cam_info_msg->D[2] = zedParam.right_cam.disto[4]; // k3 + right_cam_info_msg->D[3] = zedParam.right_cam.disto[2]; // p1 + right_cam_info_msg->D[4] = zedParam.right_cam.disto[3]; // p2 + + left_cam_info_msg->K.fill(0.0); + right_cam_info_msg->K.fill(0.0); + left_cam_info_msg->K[0] = zedParam.left_cam.fx; + left_cam_info_msg->K[2] = zedParam.left_cam.cx; + left_cam_info_msg->K[4] = zedParam.left_cam.fy; + left_cam_info_msg->K[5] = zedParam.left_cam.cy; + left_cam_info_msg->K[8] = 1.0; + right_cam_info_msg->K[0] = zedParam.right_cam.fx; + right_cam_info_msg->K[2] = zedParam.right_cam.cx; + right_cam_info_msg->K[4] = zedParam.right_cam.fy; + right_cam_info_msg->K[5] = zedParam.right_cam.cy; + right_cam_info_msg->K[8] = 1.0; + + left_cam_info_msg->R.fill(0.0); + right_cam_info_msg->R.fill(0.0); + for (int i = 0; i < 3; i++) { + // identity + right_cam_info_msg->R[i + i * 3] = 1; + left_cam_info_msg->R[i + i * 3] = 1; + } + + if (rawParam) { + cv::Mat R_ = sl_tools::convertRodrigues(zedParam.R); + float* p = (float*)R_.data; + for (int i = 0; i < 9; i++) { + right_cam_info_msg->R[i] = p[i]; + } + } + + left_cam_info_msg->P.fill(0.0); + right_cam_info_msg->P.fill(0.0); + left_cam_info_msg->P[0] = zedParam.left_cam.fx; + left_cam_info_msg->P[2] = zedParam.left_cam.cx; + left_cam_info_msg->P[5] = zedParam.left_cam.fy; + left_cam_info_msg->P[6] = zedParam.left_cam.cy; + left_cam_info_msg->P[10] = 1.0; + // http://docs.ros.org/api/sensor_msgs/html/msg/CameraInfo.html + right_cam_info_msg->P[3] = (-1 * zedParam.left_cam.fx * baseline); + + right_cam_info_msg->P[0] = zedParam.right_cam.fx; + right_cam_info_msg->P[2] = zedParam.right_cam.cx; + right_cam_info_msg->P[5] = zedParam.right_cam.fy; + right_cam_info_msg->P[6] = zedParam.right_cam.cy; + right_cam_info_msg->P[10] = 1.0; + + left_cam_info_msg->width = right_cam_info_msg->width = matWidth; + left_cam_info_msg->height = right_cam_info_msg->height = matHeight; + + left_cam_info_msg->header.frame_id = leftFrameId; + right_cam_info_msg->header.frame_id = rightFrameId; + } + + void ZEDWrapperNodelet::dynamicReconfCallback(zed_wrapper::ZedConfig& config, + uint32_t level) { + switch (level) { + case 0: + confidence = config.confidence; + NODELET_INFO("Reconfigure confidence : %d", confidence); + break; + case 1: + exposure = config.exposure; + NODELET_INFO("Reconfigure exposure : %d", exposure); + break; + case 2: + gain = config.gain; + NODELET_INFO("Reconfigure gain : %d", gain); + break; + case 3: + autoExposure = config.auto_exposure; + if (autoExposure) { + triggerAutoExposure = true; + } + NODELET_INFO("Reconfigure auto control of exposure and gain : %s", + autoExposure ? "Enable" : "Disable"); + break; + case 4: + matResizeFactor = config.mat_resize_factor; + NODELET_INFO("Reconfigure mat_resize_factor: %g", matResizeFactor); + + dataMutex.lock(); + + matWidth = static_cast(camWidth * matResizeFactor); + matHeight = static_cast(camHeight * matResizeFactor); + NODELET_DEBUG_STREAM("Data Mat size : " << matWidth << "x" << matHeight); + + // Update Camera Info + fillCamInfo(zed, leftCamInfoMsg, rightCamInfoMsg, leftCamOptFrameId, + rightCamOptFrameId); + fillCamInfo(zed, leftCamInfoRawMsg, rightCamInfoRawMsg, leftCamOptFrameId, + rightCamOptFrameId, true); + rgbCamInfoMsg = depthCamInfoMsg = leftCamInfoMsg; // the reference camera is + // the Left one (next to + // the ZED logo) + rgbCamInfoRawMsg = leftCamInfoRawMsg; + + dataMutex.unlock(); + + break; + } + } + + void ZEDWrapperNodelet::imuPubCallback(const ros::TimerEvent& e) { + int imu_SubNumber = pubImu.getNumSubscribers(); + int imu_RawSubNumber = pubImuRaw.getNumSubscribers(); + + if (imu_SubNumber < 1 && imu_RawSubNumber < 1) { + return; + } + + ros::Time t = + sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_IMAGE)); + + sl::IMUData imu_data; + zed.getIMUData(imu_data, sl::TIME_REFERENCE_CURRENT); + + if (imu_SubNumber > 0) { + sensor_msgs::Imu imu_msg; + imu_msg.header.stamp = imuTime; // t; + imu_msg.header.frame_id = imuFrameId; + + imu_msg.orientation.x = xSign * imu_data.getOrientation()[xIdx]; + imu_msg.orientation.y = ySign * imu_data.getOrientation()[yIdx]; + imu_msg.orientation.z = zSign * imu_data.getOrientation()[zIdx]; + imu_msg.orientation.w = imu_data.getOrientation()[3]; + + imu_msg.angular_velocity.x = xSign * imu_data.angular_velocity[xIdx]; + imu_msg.angular_velocity.y = ySign * imu_data.angular_velocity[yIdx]; + imu_msg.angular_velocity.z = zSign * imu_data.angular_velocity[zIdx]; + + imu_msg.linear_acceleration.x = xSign * imu_data.linear_acceleration[xIdx]; + imu_msg.linear_acceleration.y = ySign * imu_data.linear_acceleration[yIdx]; + imu_msg.linear_acceleration.z = zSign * imu_data.linear_acceleration[zIdx]; + + for (int i = 0; i < 3; i += 3) { + imu_msg.orientation_covariance[i * 3 + 0] = + imu_data.orientation_covariance.r[i * 3 + xIdx]; + imu_msg.orientation_covariance[i * 3 + 1] = + imu_data.orientation_covariance.r[i * 3 + yIdx]; + imu_msg.orientation_covariance[i * 3 + 2] = + imu_data.orientation_covariance.r[i * 3 + zIdx]; + + imu_msg.linear_acceleration_covariance[i * 3 + 0] = + imu_data.linear_acceleration_convariance.r[i * 3 + xIdx]; + imu_msg.linear_acceleration_covariance[i * 3 + 1] = + imu_data.linear_acceleration_convariance.r[i * 3 + yIdx]; + imu_msg.linear_acceleration_covariance[i * 3 + 2] = + imu_data.linear_acceleration_convariance.r[i * 3 + zIdx]; + + imu_msg.angular_velocity_covariance[i * 3 + 0] = + imu_data.angular_velocity_convariance.r[i * 3 + xIdx]; + imu_msg.angular_velocity_covariance[i * 3 + 1] = + imu_data.angular_velocity_convariance.r[i * 3 + yIdx]; + imu_msg.angular_velocity_covariance[i * 3 + 2] = + imu_data.angular_velocity_convariance.r[i * 3 + zIdx]; + } + + pubImu.publish(imu_msg); + } + + if (imu_RawSubNumber > 0) { + sensor_msgs::Imu imu_raw_msg; + imu_raw_msg.header.stamp = imuTime; // t; + imu_raw_msg.header.frame_id = imuFrameId; + + imu_raw_msg.angular_velocity.x = xSign * imu_data.angular_velocity[xIdx]; + imu_raw_msg.angular_velocity.y = ySign * imu_data.angular_velocity[yIdx]; + imu_raw_msg.angular_velocity.z = zSign * imu_data.angular_velocity[zIdx]; + + imu_raw_msg.linear_acceleration.x = + xSign * imu_data.linear_acceleration[xIdx]; + imu_raw_msg.linear_acceleration.y = + ySign * imu_data.linear_acceleration[yIdx]; + imu_raw_msg.linear_acceleration.z = + zSign * imu_data.linear_acceleration[zIdx]; + + for (int i = 0; i < 3; i += 3) { + imu_raw_msg.linear_acceleration_covariance[i * 3 + 0] = + imu_data.linear_acceleration_convariance.r[i * 3 + xIdx]; + imu_raw_msg.linear_acceleration_covariance[i * 3 + 1] = + imu_data.linear_acceleration_convariance.r[i * 3 + yIdx]; + imu_raw_msg.linear_acceleration_covariance[i * 3 + 2] = + imu_data.linear_acceleration_convariance.r[i * 3 + zIdx]; + + imu_raw_msg.angular_velocity_covariance[i * 3 + 0] = + imu_data.angular_velocity_convariance.r[i * 3 + xIdx]; + imu_raw_msg.angular_velocity_covariance[i * 3 + 1] = + imu_data.angular_velocity_convariance.r[i * 3 + yIdx]; + imu_raw_msg.angular_velocity_covariance[i * 3 + 2] = + imu_data.angular_velocity_convariance.r[i * 3 + zIdx]; + } + + imu_raw_msg.orientation_covariance[0] = + -1; // Orientation data is not available in "data_raw" -> See ROS REP145 + // http://www.ros.org/reps/rep-0145.html#topics + + pubImuRaw.publish(imu_raw_msg); + } + + // Publish IMU tf only if enabled + if (publishTf) { + // Camera to map transform from TF buffer + tf2::Transform base_to_map; + // Look up the transformation from base frame to map link + try { + // Save the transformation from base to frame + geometry_msgs::TransformStamped b2m = + tfBuffer->lookupTransform(mapFrameId, baseFrameId, ros::Time(0)); + // Get the TF2 transformation + tf2::fromMsg(b2m.transform, base_to_map); + } catch (tf2::TransformException& ex) { + NODELET_WARN_THROTTLE( + 10.0, "The tf from '%s' to '%s' does not seem to be available. " + "IMU TF not published!", + baseFrameId.c_str(), mapFrameId.c_str()); + NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); + return; + } + + // IMU Quaternion in Map frame + tf2::Quaternion imu_q; + imu_q.setX(xSign * imu_data.getOrientation()[xIdx]); + imu_q.setY(ySign * imu_data.getOrientation()[yIdx]); + imu_q.setZ(zSign * imu_data.getOrientation()[zIdx]); + imu_q.setW(imu_data.getOrientation()[3]); + + // Pose Quaternion from ZED Camera + tf2::Quaternion map_q = base_to_map.getRotation(); + + // Difference between IMU and ZED Quaternion + tf2::Quaternion delta_q = imu_q * map_q.inverse(); + + tf2::Transform imu_pose; + imu_pose.setIdentity(); + imu_pose.setRotation(delta_q); + + // Note, the frame is published, but its values will only change if someone + // has subscribed to IMU + publishImuFrame(imu_pose); // publish the imu Frame + } + } + + void ZEDWrapperNodelet::device_poll() { + ros::Rate loop_rate(frameRate); + + ros::Time old_t = + sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_CURRENT)); + imuTime = old_t; + + sl::ERROR_CODE grab_status; + trackingActivated = false; + + // Get the parameters of the ZED images + camWidth = zed.getResolution().width; + camHeight = zed.getResolution().height; + NODELET_DEBUG_STREAM("Camera Frame size : " << camWidth << "x" << camHeight); + + matWidth = static_cast(camWidth * matResizeFactor); + matHeight = static_cast(camHeight * matResizeFactor); + NODELET_DEBUG_STREAM("Data Mat size : " << matWidth << "x" << matHeight); + + cv::Size cvSize(matWidth, matWidth); + leftImRGB = cv::Mat(cvSize, CV_8UC3); + rightImRGB = cv::Mat(cvSize, CV_8UC3); + confImRGB = cv::Mat(cvSize, CV_8UC3); + confMapFloat = cv::Mat(cvSize, CV_32FC1); + + // Create and fill the camera information messages + fillCamInfo(zed, leftCamInfoMsg, rightCamInfoMsg, leftCamOptFrameId, + rightCamOptFrameId); + fillCamInfo(zed, leftCamInfoRawMsg, rightCamInfoRawMsg, leftCamOptFrameId, + rightCamOptFrameId, true); + rgbCamInfoMsg = depthCamInfoMsg = leftCamInfoMsg; // the reference camera is + // the Left one (next to the + // ZED logo) + rgbCamInfoRawMsg = leftCamInfoRawMsg; + + sl::RuntimeParameters runParams; + runParams.sensing_mode = static_cast(sensingMode); + + sl::Mat leftZEDMat, rightZEDMat, depthZEDMat, disparityZEDMat, confImgZEDMat, + confMapZEDMat; + // Main loop + while (nhNs.ok()) { + // Check for subscribers + int rgb_SubNumber = pubRgb.getNumSubscribers(); + int rgb_raw_SubNumber = pubRawRgb.getNumSubscribers(); + int left_SubNumber = pubLeft.getNumSubscribers(); + int left_raw_SubNumber = pubRawLeft.getNumSubscribers(); + int right_SubNumber = pubRight.getNumSubscribers(); + int right_raw_SubNumber = pubRawRight.getNumSubscribers(); + int depth_SubNumber = pubDepth.getNumSubscribers(); + int disparity_SubNumber = pubDisparity.getNumSubscribers(); + int cloud_SubNumber = pubCloud.getNumSubscribers(); + int pose_SubNumber = pubPose.getNumSubscribers(); + int odom_SubNumber = pubOdom.getNumSubscribers(); + int conf_img_SubNumber = pubConfImg.getNumSubscribers(); + int conf_map_SubNumber = pubConfMap.getNumSubscribers(); + int imu_SubNumber = pubImu.getNumSubscribers(); + int imu_RawSubNumber = pubImuRaw.getNumSubscribers(); + bool runLoop = (rgb_SubNumber + rgb_raw_SubNumber + left_SubNumber + + left_raw_SubNumber + right_SubNumber + right_raw_SubNumber + + depth_SubNumber + disparity_SubNumber + cloud_SubNumber + + pose_SubNumber + odom_SubNumber + conf_img_SubNumber + + conf_map_SubNumber + imu_SubNumber + imu_RawSubNumber) > 0; + + runParams.enable_point_cloud = false; + if (cloud_SubNumber > 0) { + runParams.enable_point_cloud = true; + } + // Run the loop only if there is some subscribers + if (runLoop) { + if ((depthStabilization || pose_SubNumber > 0 || odom_SubNumber > 0 || + cloud_SubNumber > 0 || depth_SubNumber > 0) && + !trackingActivated) { // Start the tracking + start_tracking(); + } else if (!depthStabilization && pose_SubNumber == 0 && + odom_SubNumber == 0 && + trackingActivated) { // Stop the tracking + zed.disableTracking(); + trackingActivated = false; + } + computeDepth = (depth_SubNumber + disparity_SubNumber + cloud_SubNumber + + pose_SubNumber + odom_SubNumber + conf_img_SubNumber + + conf_map_SubNumber) > 0; // Detect if one of the + // subscriber need to have the + // depth information + + // Timestamp + ros::Time t = + sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_IMAGE)); + + grabbing = true; + if (computeDepth) { + int actual_confidence = zed.getConfidenceThreshold(); + if (actual_confidence != confidence) { + zed.setConfidenceThreshold(confidence); + } + runParams.enable_depth = true; // Ask to compute the depth + } else { + runParams.enable_depth = false; + } + + grab_status = zed.grab(runParams); // Ask to not compute the depth + + grabbing = false; + + // cout << toString(grab_status) << endl; + if (grab_status != + sl::ERROR_CODE::SUCCESS) { // Detect if a error occurred (for example: + // the zed have been disconnected) and + // re-initialize the ZED + + if (grab_status == sl::ERROR_CODE_NOT_A_NEW_FRAME) { + NODELET_DEBUG_THROTTLE(1.0, "Wait for a new image to proceed"); + } else { + NODELET_INFO_STREAM_ONCE(toString(grab_status)); + } + + std::this_thread::sleep_for(std::chrono::milliseconds(2)); + + if ((t - old_t).toSec() > 5) { + zed.close(); + + NODELET_INFO("Re-opening the ZED"); + sl::ERROR_CODE err = sl::ERROR_CODE_CAMERA_NOT_DETECTED; + while (err != sl::SUCCESS) { + if (!nhNs.ok()) { + mStopNode = true; + zed.close(); + NODELET_DEBUG("ZED pool thread finished"); + return; + } + + int id = sl_tools::checkCameraReady(serial_number); + if (id > 0) { + param.camera_linux_id = id; + err = zed.open(param); // Try to initialize the ZED + NODELET_INFO_STREAM(toString(err)); + } else { + NODELET_INFO("Waiting for the ZED to be re-connected"); + } + std::this_thread::sleep_for(std::chrono::milliseconds(2000)); + } + trackingActivated = false; + if (depthStabilization || pose_SubNumber > 0 || + odom_SubNumber > 0) { // Start the tracking + start_tracking(); + } + } + continue; + } + + // Time update + old_t = + sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_CURRENT)); + + if (autoExposure) { + // getCameraSettings() can't check status of auto exposure + // triggerAutoExposure is used to execute setCameraSettings() only once + if (triggerAutoExposure) { + zed.setCameraSettings(sl::CAMERA_SETTINGS_EXPOSURE, 0, true); + triggerAutoExposure = false; + } + } else { + int actual_exposure = + zed.getCameraSettings(sl::CAMERA_SETTINGS_EXPOSURE); + if (actual_exposure != exposure) { + zed.setCameraSettings(sl::CAMERA_SETTINGS_EXPOSURE, exposure); + } + + int actual_gain = zed.getCameraSettings(sl::CAMERA_SETTINGS_GAIN); + if (actual_gain != gain) { + zed.setCameraSettings(sl::CAMERA_SETTINGS_GAIN, gain); + } + } + + dataMutex.lock(); + + // Publish the left == rgb image if someone has subscribed to + if (left_SubNumber > 0 || rgb_SubNumber > 0) { + // Retrieve RGBA Left image + zed.retrieveImage(leftZEDMat, sl::VIEW_LEFT, sl::MEM_CPU, matWidth, + matHeight); + cv::cvtColor(sl_tools::toCVMat(leftZEDMat), leftImRGB, CV_RGBA2RGB); + if (left_SubNumber > 0) { + publishCamInfo(leftCamInfoMsg, pubLeftCamInfo, t); + publishImage(leftImRGB, pubLeft, leftCamOptFrameId, t); + } + if (rgb_SubNumber > 0) { + publishCamInfo(rgbCamInfoMsg, pubRgbCamInfo, t); + publishImage(leftImRGB, pubRgb, depthOptFrameId, + t); // rgb is the left image + } + } + + // Publish the left_raw == rgb_raw image if someone has subscribed to + if (left_raw_SubNumber > 0 || rgb_raw_SubNumber > 0) { + // Retrieve RGBA Left image + zed.retrieveImage(leftZEDMat, sl::VIEW_LEFT_UNRECTIFIED, sl::MEM_CPU, + matWidth, matHeight); + cv::cvtColor(sl_tools::toCVMat(leftZEDMat), leftImRGB, CV_RGBA2RGB); + if (left_raw_SubNumber > 0) { + publishCamInfo(leftCamInfoRawMsg, pubLeftCamInfoRaw, t); + publishImage(leftImRGB, pubRawLeft, leftCamOptFrameId, t); + } + if (rgb_raw_SubNumber > 0) { + publishCamInfo(rgbCamInfoRawMsg, pubRgbCamInfoRaw, t); + publishImage(leftImRGB, pubRawRgb, depthOptFrameId, t); + } + } + + // Publish the right image if someone has subscribed to + if (right_SubNumber > 0) { + // Retrieve RGBA Right image + zed.retrieveImage(rightZEDMat, sl::VIEW_RIGHT, sl::MEM_CPU, matWidth, + matHeight); + cv::cvtColor(sl_tools::toCVMat(rightZEDMat), rightImRGB, CV_RGBA2RGB); + publishCamInfo(rightCamInfoMsg, pubRightCamInfo, t); + publishImage(rightImRGB, pubRight, rightCamOptFrameId, t); + } + + // Publish the right image if someone has subscribed to + if (right_raw_SubNumber > 0) { + // Retrieve RGBA Right image + zed.retrieveImage(rightZEDMat, sl::VIEW_RIGHT_UNRECTIFIED, sl::MEM_CPU, + matWidth, matHeight); + cv::cvtColor(sl_tools::toCVMat(rightZEDMat), rightImRGB, CV_RGBA2RGB); + publishCamInfo(rightCamInfoRawMsg, pubRightCamInfoRaw, t); + publishImage(rightImRGB, pubRawRight, rightCamOptFrameId, t); + } + + // Publish the depth image if someone has subscribed to + if (depth_SubNumber > 0 || disparity_SubNumber > 0) { + zed.retrieveMeasure(depthZEDMat, sl::MEASURE_DEPTH, sl::MEM_CPU, + matWidth, matHeight); + publishCamInfo(depthCamInfoMsg, pubDepthCamInfo, t); + publishDepth(sl_tools::toCVMat(depthZEDMat), t); // in meters + } + + // Publish the disparity image if someone has subscribed to + if (disparity_SubNumber > 0) { + zed.retrieveMeasure(disparityZEDMat, sl::MEASURE_DISPARITY, sl::MEM_CPU, + matWidth, matHeight); + // Need to flip sign, but cause of this is not sure + cv::Mat disparity = sl_tools::toCVMat(disparityZEDMat) * -1.0; + publishDisparity(disparity, t); + } + + // Publish the confidence image if someone has subscribed to + if (conf_img_SubNumber > 0) { + zed.retrieveImage(confImgZEDMat, sl::VIEW_CONFIDENCE, sl::MEM_CPU, + matWidth, matHeight); + cv::cvtColor(sl_tools::toCVMat(confImgZEDMat), confImRGB, CV_RGBA2RGB); + publishImage(confImRGB, pubConfImg, confidenceOptFrameId, t); + } + + // Publish the confidence map if someone has subscribed to + if (conf_map_SubNumber > 0) { + zed.retrieveMeasure(confMapZEDMat, sl::MEASURE_CONFIDENCE, sl::MEM_CPU, + matWidth, matHeight); + confMapFloat = sl_tools::toCVMat(confMapZEDMat); + pubConfMap.publish(imageToROSmsg( + confMapFloat, sensor_msgs::image_encodings::TYPE_32FC1, + confidenceOptFrameId, t)); + } + + // Publish the point cloud if someone has subscribed to + if (cloud_SubNumber > 0) { + // Run the point cloud conversion asynchronously to avoid slowing down + // all the program + // Retrieve raw pointCloud data if latest Pointcloud is ready + std::unique_lock lock(mPcMutex, std::defer_lock); + if (lock.try_lock()) { + zed.retrieveMeasure(cloud, sl::MEASURE_XYZBGRA, sl::MEM_CPU, matWidth, matHeight); + + pointCloudFrameId = depthFrameId; + pointCloudTime = t; + + // Signal Pointcloud thread that a new pointcloud is ready + mPcDataReady = true; + + mPcDataReadyCondVar.notify_one(); + } + } + + dataMutex.unlock(); + + // Transform from base to sensor + tf2::Transform sensor_to_base_transf; + // Look up the transformation from base frame to camera link + try { + // Save the transformation from base to frame + geometry_msgs::TransformStamped s2b = + tfBuffer->lookupTransform(baseFrameId, depthFrameId, t); + // Get the TF2 transformation + tf2::fromMsg(s2b.transform, sensor_to_base_transf); + + } catch (tf2::TransformException& ex) { + NODELET_WARN_THROTTLE( + 10.0, "The tf from '%s' to '%s' does not seem to be available, " + "will assume it as identity!", + depthFrameId.c_str(), baseFrameId.c_str()); + NODELET_DEBUG("Transform error: %s", ex.what()); + sensor_to_base_transf.setIdentity(); + } + + // Publish the odometry if someone has subscribed to + if (pose_SubNumber > 0 || odom_SubNumber > 0 || cloud_SubNumber > 0 || + depth_SubNumber > + + + 0 || imu_SubNumber > 0 || imu_RawSubNumber > 0) { + if (!initOdomWithPose) { + sl::Pose deltaOdom; + zed.getPosition(deltaOdom, sl::REFERENCE_FRAME_CAMERA); + + // Transform ZED delta odom pose in TF2 Transformation + geometry_msgs::Transform deltaTransf; + + sl::Translation translation = deltaOdom.getTranslation(); + sl::Orientation quat = deltaOdom.getOrientation(); + + deltaTransf.translation.x = xSign * translation(xIdx); + deltaTransf.translation.y = ySign * translation(yIdx); + deltaTransf.translation.z = zSign * translation(zIdx); + + deltaTransf.rotation.x = xSign * quat(xIdx); + deltaTransf.rotation.y = ySign * quat(yIdx); + deltaTransf.rotation.z = zSign * quat(zIdx); + deltaTransf.rotation.w = quat(3); + + tf2::Transform deltaOdomTf; + tf2::fromMsg(deltaTransf, deltaOdomTf); + + // delta odom from sensor to base frame + tf2::Transform deltaOdomTf_base = sensor_to_base_transf * + deltaOdomTf * + sensor_to_base_transf.inverse(); + + // Propagate Odom transform in time + baseToOdomTransform = baseToOdomTransform * deltaOdomTf_base; + + // Publish odometry message + publishOdom(baseToOdomTransform, t); + } + } + + // Publish the zed camera pose if someone has subscribed to + if (pose_SubNumber > 0 || odom_SubNumber > 0 || cloud_SubNumber > 0 || + depth_SubNumber > 0 || imu_SubNumber > 0 || imu_RawSubNumber > 0) { + sl::Pose zed_pose; // Sensor to Map transform + zed.getPosition(zed_pose, sl::REFERENCE_FRAME_WORLD); + + // Transform ZED pose in TF2 Transformation + geometry_msgs::Transform sens2mapTransf; + + sl::Translation translation = zed_pose.getTranslation(); + sl::Orientation quat = zed_pose.getOrientation(); + + sens2mapTransf.translation.x = xSign * translation(xIdx); + sens2mapTransf.translation.y = ySign * translation(yIdx); + sens2mapTransf.translation.z = zSign * translation(zIdx); + + sens2mapTransf.rotation.x = xSign * quat(xIdx); + sens2mapTransf.rotation.y = ySign * quat(yIdx); + sens2mapTransf.rotation.z = zSign * quat(zIdx); + sens2mapTransf.rotation.w = quat(3); + + tf2::Transform sens_to_map_transf; + tf2::fromMsg(sens2mapTransf, sens_to_map_transf); + + // Transformation from camera sensor to base frame + tf2::Transform base_to_map_transform = sensor_to_base_transf * + sens_to_map_transf * + sensor_to_base_transf.inverse(); + + if (initOdomWithPose) { + // Propagate Odom transform in time + baseToOdomTransform = base_to_map_transform; + base_to_map_transform.setIdentity(); + + if (odom_SubNumber > 0) { + // Publish odometry message + publishOdom(baseToOdomTransform, t); + } + + initOdomWithPose = false; + } else { + // Transformation from map to odometry frame + odomToMapTransform = + base_to_map_transform * baseToOdomTransform.inverse(); + } + + // Publish Pose message + publishPose(odomToMapTransform, t); + } + + // Publish pose tf only if enabled + if (publishTf) { + // Note, the frame is published, but its values will only change if + // someone has subscribed to odom + publishOdomFrame(baseToOdomTransform, + t); // publish the base Frame in odometry frame + // Note, the frame is published, but its values will only change if + // someone has subscribed to map + publishPoseFrame(odomToMapTransform, + t); // publish the odometry Frame in map frame + + imuTime = t; + } + + static int rateWarnCount = 0; + if (!loop_rate.sleep()) { + rateWarnCount++; + + if (rateWarnCount == 10) { + NODELET_DEBUG_THROTTLE( + 1.0, + "Working thread is not synchronized with the Camera frame rate"); + NODELET_DEBUG_STREAM_THROTTLE( + 1.0, "Expected cycle time: " << loop_rate.expectedCycleTime() + << " - Real cycle time: " + << loop_rate.cycleTime()); + NODELET_WARN_THROTTLE(10.0, "Elaboration takes longer than requested " + "by the FPS rate. Please consider to " + "lower the 'frame_rate' setting."); + } + } else { + rateWarnCount = 0; + } + } else { + NODELET_DEBUG_THROTTLE(1.0, "No topics subscribed by users"); + + // Publish odometry tf only if enabled + if (publishTf) { + ros::Time t = + sl_tools::slTime2Ros(zed.getTimestamp(sl::TIME_REFERENCE_CURRENT)); + publishOdomFrame(baseToOdomTransform, + t); // publish the base Frame in odometry frame + publishPoseFrame(odomToMapTransform, + t); // publish the odometry Frame in map frame + } + std::this_thread::sleep_for( + std::chrono::milliseconds(10)); // No subscribers, we just wait + + loop_rate.reset(); + } + } // while loop + + mStopNode = true; // Stops other threads + zed.close(); + + NODELET_DEBUG("ZED pool thread finished"); + } + +} // namespace diff --git a/zed_wrapper/src/tools/include/sl_tools.h b/zed_wrapper/src/tools/include/sl_tools.h new file mode 100644 index 00000000..ba6470ce --- /dev/null +++ b/zed_wrapper/src/tools/include/sl_tools.h @@ -0,0 +1,79 @@ +#ifndef SL_TOOLS_H +#define SL_TOOLS_H + +/////////////////////////////////////////////////////////////////////////// +// +// Copyright (c) 2018, STEREOLABS. +// +// All rights reserved. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////// + +#include +#include +#include +#include + +namespace sl_tools { + + /* \brief Check if a ZED camera is ready + * \param serial_number : the serial number of the camera to be checked + */ + int checkCameraReady(unsigned int serial_number); + + /* \brief Get ZED camera properties + * \param serial_number : the serial number of the camera + */ + sl::DeviceProperties getZEDFromSN(unsigned int serial_number); + + /* \brief Convert an sl:Mat to a cv::Mat + * \param mat : the sl::Mat to convert + */ + cv::Mat toCVMat(sl::Mat& mat); + + cv::Mat convertRodrigues(sl::float3 r); + + /* \brief Test if a file exist + * \param name : the path to the file + */ + bool file_exist(const std::string& name); + + /* \brief Get Stereolabs SDK version + * \param major : major value for version + * \param minor : minor value for version + * \param sub_minor _ sub_minor value for version + */ + std::string getSDKVersion(int& major, int& minor, int& sub_minor); + + /* \brief Convert StereoLabs timestamp to ROS timestamp + * \param t : Stereolabs timestamp to be converted + */ + ros::Time slTime2Ros(sl::timeStamp t); + + inline sl::uchar4 depackColor4(float colorIn) { + sl::uchar4 out; + uint32_t color_uint = *(uint32_t*) & colorIn; + unsigned char* color_uchar = (unsigned char*) &color_uint; + for (int c = 0; c < 3; c++) { + out[c] = static_cast(color_uchar[c]); + } + out.w = 255; + return out; + } + + +} // namespace + +#endif // SL_TOOLS_H diff --git a/zed_wrapper/src/tools/src/sl_tools.cpp b/zed_wrapper/src/tools/src/sl_tools.cpp new file mode 100644 index 00000000..488511d6 --- /dev/null +++ b/zed_wrapper/src/tools/src/sl_tools.cpp @@ -0,0 +1,167 @@ +/////////////////////////////////////////////////////////////////////////// +// +// Copyright (c) 2018, STEREOLABS. +// +// All rights reserved. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +/////////////////////////////////////////////////////////////////////////// + +#include "sl_tools.h" + +#include +#include +#include + +namespace sl_tools { + + int checkCameraReady(unsigned int serial_number) { + int id = -1; + auto f = sl::Camera::getDeviceList(); + for (auto &it : f) + if (it.serial_number == serial_number && it.camera_state == sl::CAMERA_STATE::CAMERA_STATE_AVAILABLE) + id = it.id; + return id; + } + + sl::DeviceProperties getZEDFromSN(unsigned int serial_number) { + sl::DeviceProperties prop; + auto f = sl::Camera::getDeviceList(); + for (auto &it : f) { + if (it.serial_number == serial_number && it.camera_state == sl::CAMERA_STATE::CAMERA_STATE_AVAILABLE) + prop = it; + } + return prop; + } + + cv::Mat toCVMat(sl::Mat &mat) { + if (mat.getMemoryType() == sl::MEM_GPU) + mat.updateCPUfromGPU(); + + int cvType; + switch (mat.getDataType()) { + case sl::MAT_TYPE_32F_C1: + cvType = CV_32FC1; + break; + case sl::MAT_TYPE_32F_C2: + cvType = CV_32FC2; + break; + case sl::MAT_TYPE_32F_C3: + cvType = CV_32FC3; + break; + case sl::MAT_TYPE_32F_C4: + cvType = CV_32FC4; + break; + case sl::MAT_TYPE_8U_C1: + cvType = CV_8UC1; + break; + case sl::MAT_TYPE_8U_C2: + cvType = CV_8UC2; + break; + case sl::MAT_TYPE_8U_C3: + cvType = CV_8UC3; + break; + case sl::MAT_TYPE_8U_C4: + cvType = CV_8UC4; + break; + } + return cv::Mat((int) mat.getHeight(), (int) mat.getWidth(), cvType, mat.getPtr(sl::MEM_CPU), mat.getStepBytes(sl::MEM_CPU)); + } + + cv::Mat convertRodrigues(sl::float3 r) { + double theta = sqrt(r.x * r.x + r.y * r.y + r.z * r.z); + cv::Mat R = cv::Mat::eye(3, 3, CV_32F); + + if (theta < DBL_EPSILON) { + return R; + } else { + double c = cos(theta); + double s = sin(theta); + double c1 = 1. - c; + double itheta = theta ? 1. / theta : 0.; + + r *= itheta; + + cv::Mat rrt = cv::Mat::eye(3, 3, CV_32F); + float* p = (float*) rrt.data; + p[0] = r.x * r.x; + p[1] = r.x * r.y; + p[2] = r.x * r.z; + p[3] = r.x * r.y; + p[4] = r.y * r.y; + p[5] = r.y * r.z; + p[6] = r.x * r.z; + p[7] = r.y * r.z; + p[8] = r.z * r.z; + + cv::Mat r_x = cv::Mat::eye(3, 3, CV_32F); + p = (float*) r_x.data; + p[0] = 0; + p[1] = -r.z; + p[2] = r.y; + p[3] = r.z; + p[4] = 0; + p[5] = -r.x; + p[6] = -r.y; + p[7] = r.x; + p[8] = 0; + + // R = cos(theta)*I + (1 - cos(theta))*r*rT + sin(theta)*[r_x] + R = c * cv::Mat::eye(3, 3, CV_32F) + c1 * rrt + s*r_x; + } + return R; + } + + bool file_exist(const std::string& name) { + struct stat buffer; + return (stat(name.c_str(), &buffer) == 0); + } + + std::string getSDKVersion( int& major, int& minor, int& sub_minor) { + std::string ver = sl::Camera::getSDKVersion().c_str(); + + std::vector strings; + std::istringstream f(ver); + std::string s; + + while (getline(f, s, '.')) { + strings.push_back(s); + } + + major = 0; + minor = 0; + sub_minor = 0; + + switch( strings.size() ) + { + case 3: + sub_minor = std::stoi(strings[2]); + + case 2: + minor = std::stoi(strings[1]); + + case 1: + major = std::stoi(strings[0]); + } + + return ver; + } + + ros::Time slTime2Ros(sl::timeStamp t) { + uint32_t sec = static_cast(t/1000000000); + uint32_t nsec = static_cast(t%1000000000); + return ros::Time(sec, nsec); + } + +} // namespace diff --git a/src/zed_wrapper_node.cpp b/zed_wrapper/src/zed_wrapper_node.cpp similarity index 62% rename from src/zed_wrapper_node.cpp rename to zed_wrapper/src/zed_wrapper_node.cpp index d8078de0..c82e4743 100644 --- a/src/zed_wrapper_node.cpp +++ b/zed_wrapper/src/zed_wrapper_node.cpp @@ -1,6 +1,7 @@ -/////////////////////////////////////////////////////////////////////////// +// ///////////////////////////////////////////////////////////////////////// + // -// Copyright (c) 2017, STEREOLABS. +// Copyright (c) 2018, STEREOLABS. // // All rights reserved. // @@ -16,22 +17,22 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // -/////////////////////////////////////////////////////////////////////////// +// ///////////////////////////////////////////////////////////////////////// -#include #include +#include -int main(int argc, char** argv) { - ros::init(argc, argv, "zed_wrapper_node"); +int main(int argc, char **argv) { + ros::init(argc, argv, "zed_wrapper_node"); - nodelet::Loader nodelet; - nodelet::M_string remap(ros::names::getRemappings()); - nodelet::V_string nargv; - nodelet.load(ros::this_node::getName(), - "zed_wrapper/ZEDWrapperNodelet", - remap, nargv); + // ZED Nodelet + nodelet::Loader nodelet; + nodelet::M_string remap(ros::names::getRemappings()); + nodelet::V_string nargv; + nodelet.load(ros::this_node::getName(), "zed_wrapper/ZEDWrapperNodelet", + remap, nargv); - ros::spin(); + ros::spin(); - return 0; + return 0; } diff --git a/zed_wrapper/srv/reset_odometry.srv b/zed_wrapper/srv/reset_odometry.srv new file mode 100644 index 00000000..931e58c3 --- /dev/null +++ b/zed_wrapper/srv/reset_odometry.srv @@ -0,0 +1,2 @@ +--- +bool reset_done \ No newline at end of file diff --git a/zed_wrapper/srv/reset_tracking.srv b/zed_wrapper/srv/reset_tracking.srv new file mode 100644 index 00000000..931e58c3 --- /dev/null +++ b/zed_wrapper/srv/reset_tracking.srv @@ -0,0 +1,2 @@ +--- +bool reset_done \ No newline at end of file diff --git a/zed_wrapper/srv/set_initial_pose.srv b/zed_wrapper/srv/set_initial_pose.srv new file mode 100644 index 00000000..60c0008e --- /dev/null +++ b/zed_wrapper/srv/set_initial_pose.srv @@ -0,0 +1,10 @@ +# Translation XYZ [meters] +float32 x +float32 y +float32 z +# Orientation RPY [radians] +float32 R +float32 P +float32 Y +--- +bool done diff --git a/urdf/ZED.stl b/zed_wrapper/urdf/ZED.stl similarity index 100% rename from urdf/ZED.stl rename to zed_wrapper/urdf/ZED.stl diff --git a/urdf/ZEDM.stl b/zed_wrapper/urdf/ZEDM.stl similarity index 100% rename from urdf/ZEDM.stl rename to zed_wrapper/urdf/ZEDM.stl diff --git a/urdf/zed.urdf b/zed_wrapper/urdf/zed.urdf similarity index 53% rename from urdf/zed.urdf rename to zed_wrapper/urdf/zed.urdf index 8f301db0..94d99ee0 100644 --- a/urdf/zed.urdf +++ b/zed_wrapper/urdf/zed.urdf @@ -1,6 +1,6 @@ - - - - - - - - - - - - + + @@ -40,34 +30,41 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - - - - - - - - - - - + + - - + + - - - - + + + + + + + + + + + - - + + + + + + + + + + + + diff --git a/urdf/zedm.urdf b/zed_wrapper/urdf/zedm.urdf similarity index 52% rename from urdf/zedm.urdf rename to zed_wrapper/urdf/zedm.urdf index 8cf584b5..0185e81f 100644 --- a/urdf/zedm.urdf +++ b/zed_wrapper/urdf/zedm.urdf @@ -1,6 +1,6 @@ - - - - + + @@ -32,26 +30,50 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - - - + + - - + + - - - - + + + + + + + + + + + - - + + + + + + + + + + + + + + + + + + + + +