From 6eee00b7b03aa7394f139b95f46f1252e7577c1d Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Fri, 8 Sep 2023 14:03:18 +0200 Subject: [PATCH 01/21] Add support for "grab_compute_capping_fps" - `pub_frame_rate` now controls the `InitParameters::grab_compute_capping_fps` parameter of the ZED SDK instead of controlling the frequency of a parallel thread. It's not a Dynamic parameter anymore. - Change `general/camera_flip` parameter to string: 'AUTO', 'ON', 'OFF' - Change 'general/verbose' from bool to integer --- CHANGELOG.rst | 7 ++ zed_nodelets/cfg/Zed.cfg | 1 - .../include/zed_wrapper_nodelet.hpp | 21 +++--- .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 68 ++++++++----------- zed_wrapper/cfg/Zed.cfg | 1 - zed_wrapper/params/common.yaml | 6 +- zed_wrapper/params/zed.yaml | 2 +- zed_wrapper/params/zed2.yaml | 2 +- zed_wrapper/params/zed2i.yaml | 6 +- zed_wrapper/params/zedm.yaml | 2 +- 10 files changed, 52 insertions(+), 64 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index e27eac74..7a6fa878 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,13 @@ CHANGELOG ========= +2023-09-07 +---------- +- `pub_frame_rate` now controls the `InitParameters::grab_compute_capping_fps` parameter of the ZED SDK instead of controlling the frequency of a parallel thread. It's not a Dynamic parameter anymore. +- Change `general/camera_flip` parameter to string: 'AUTO', 'ON', 'OFF' +- Change 'general/verbose' from bool to integer + + 2023-04-27 ---------- - Add 1080p for ZED X and ZED X Mini diff --git a/zed_nodelets/cfg/Zed.cfg b/zed_nodelets/cfg/Zed.cfg index be86bed4..fe41f8cd 100755 --- a/zed_nodelets/cfg/Zed.cfg +++ b/zed_nodelets/cfg/Zed.cfg @@ -6,7 +6,6 @@ from dynamic_reconfigure.parameter_generator_catkin import * gen = ParameterGenerator() group_general = gen.add_group("general") -group_general.add("pub_frame_rate", double_t, 0, "Video and Depth data frequency", 15.0, 0.1, 60.0); group_depth = gen.add_group("depth") group_depth.add("depth_confidence", int_t, 1, "Depth confidence threshold", 50, 1, 100) diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index c1160508..52adb949 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -190,6 +190,11 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { */ void callback_pubFusedPointCloud(const ros::TimerEvent& e); + /*! + * @brief Publish Color and Depth images + */ + void pubVideoDepth(); + /*! \brief Publish the informations of a camera with a ros Publisher * \param cam_info_msg : the information message to publish * \param pub_cam_info : the publisher object to use @@ -245,11 +250,6 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { */ void callback_dynamicReconf(zed_nodelets::ZedConfig& config, uint32_t level); - /*! \brief Callback to publish Video and Depth data - * \param e : the ros::TimerEvent binded to the callback - */ - void callback_pubVideoDepth(const ros::TimerEvent& e); - /*! \brief Callback to publish Path data with a ROS publisher. * \param e : the ros::TimerEvent binded to the callback */ @@ -467,7 +467,6 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { // Timers ros::Timer mPathTimer; ros::Timer mFusedPcTimer; - ros::Timer mVideoDepthTimer; // Services ros::ServiceServer mSrvSetInitPose; @@ -538,13 +537,14 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { bool mPublishTf; bool mPublishMapTf; bool mPublishImuTf; - bool mCameraFlip; + sl::FLIP_MODE mCameraFlip; bool mCameraSelfCalib; // Launch file parameters std::string mCameraName; sl::RESOLUTION mCamResol; int mCamFrameRate; + double mVideoDepthFreq = 15.; sl::DEPTH_MODE mDepthMode; int mGpuId; int mZedId; @@ -557,7 +557,7 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { double mSensPubRate = 400.0; double mPathPubRate; int mPathMaxCount; - bool mVerbose; + int mVerbose; bool mSvoMode = false; double mCamMinDepth; double mCamMaxDepth; @@ -634,8 +634,7 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { int mCamDepthConfidence = 50; int mCamDepthTextureConf = 100; - double mPointCloudFreq = 15.; - double mVideoDepthFreq = 15.; + double mPointCloudFreq = 15.; double mCamImageResizeFactor = 1.0; double mCamDepthResizeFactor = 1.0; @@ -677,8 +676,6 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { std::mutex mObjDetMutex; std::condition_variable mPcDataReadyCondVar; bool mPcDataReady; - std::condition_variable mRgbDepthDataRetrievedCondVar; - bool mRgbDepthDataRetrieved; // Point cloud variables sl::Mat mCloud; diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index d1dfc4de..6586f55d 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -71,11 +71,10 @@ void ZEDWrapperNodelet::onInit() { // Node handlers mNh = getMTNodeHandle(); - mNhNs = getMTPrivateNodeHandle(); + mNhNs = getMTPrivateNodeHandle(); mStopNode = false; mPcDataReady = false; - mRgbDepthDataRetrieved = true; #ifndef NDEBUG @@ -196,6 +195,7 @@ void ZEDWrapperNodelet::onInit() mSvoMode = true; } else { mZedParams.camera_fps = mCamFrameRate; + mZedParams.grab_compute_capping_fps = static_cast(mVideoDepthFreq); mZedParams.camera_resolution = mCamResol; if (mZedSerialNumber == 0) { @@ -415,7 +415,7 @@ void ZEDWrapperNodelet::onInit() NODELET_INFO_STREAM("Advertised on topic " << mPubConfMap.getTopic()); // Disparity publisher - mPubDisparity = mNhNs.advertise(disparityTopic, static_cast(mVideoDepthFreq)); + mPubDisparity = mNhNs.advertise(disparityTopic, 1); NODELET_INFO_STREAM("Advertised on topic " << mPubDisparity.getTopic()); // PointCloud publishers @@ -565,9 +565,6 @@ void ZEDWrapperNodelet::onInit() // Start pool thread mDevicePollThread = std::thread(&ZEDWrapperNodelet::device_poll_thread_func, this); - // Start data publishing timer - mVideoDepthTimer = mNhNs.createTimer(ros::Duration(1.0 / mVideoDepthFreq), &ZEDWrapperNodelet::callback_pubVideoDepth, this); - // Start Sensors thread mSensThread = std::thread(&ZEDWrapperNodelet::sensors_thread_func, this); } @@ -614,6 +611,12 @@ void ZEDWrapperNodelet::readParameters() mNhNs.getParam("general/grab_frame_rate", mCamFrameRate); checkResolFps(); NODELET_INFO_STREAM(" * Camera Grab Framerate\t-> " << mCamFrameRate); + mNhNs.getParam("general/pub_frame_rate", mVideoDepthFreq); + if(mVideoDepthFreq>mCamFrameRate) { + mVideoDepthFreq = mCamFrameRate; + add warning + } + NODELET_INFO_STREAM(" * Grab compute capping FPS\t-> " << mVideoDepthFreq << " Hz"); mNhNs.getParam("general/gpu_id", mGpuId); NODELET_INFO_STREAM(" * Gpu ID\t\t\t-> " << mGpuId); @@ -621,8 +624,16 @@ void ZEDWrapperNodelet::readParameters() NODELET_INFO_STREAM(" * Camera ID\t\t\t-> " << mZedId); mNhNs.getParam("general/verbose", mVerbose); NODELET_INFO_STREAM(" * Verbose\t\t\t-> " << (mVerbose ? "ENABLED" : "DISABLED")); - mNhNs.param("general/camera_flip", mCameraFlip, false); - NODELET_INFO_STREAM(" * Camera Flip\t\t\t-> " << (mCameraFlip ? "ENABLED" : "DISABLED")); + std::string flip_str; + mNhNs.getParam("general/camera_flip", flip_str); + if(flip_str=="ON") { + mCameraFlip = sl::FLIP_MODE::ON; + } else if(flip_str=="OFF") { + mCameraFlip = sl::FLIP_MODE::OFF; + } else { + mCameraFlip = sl::FLIP_MODE::AUTO; + } + NODELET_INFO_STREAM(" * Camera Flip\t\t\t-> " << sl::toString(mCameraFlip).c_str()); mNhNs.param("general/self_calib", mCameraSelfCalib, true); NODELET_INFO_STREAM(" * Self calibration\t\t-> " << (mCameraSelfCalib ? "ENABLED" : "DISABLED")); @@ -882,10 +893,7 @@ void ZEDWrapperNodelet::readParameters() mNhNs.getParam("depth_confidence", mCamDepthConfidence); NODELET_INFO_STREAM(" * [DYN] Depth confidence\t-> " << mCamDepthConfidence); mNhNs.getParam("depth_texture_conf", mCamDepthTextureConf); - NODELET_INFO_STREAM(" * [DYN] Depth texture conf.\t-> " << mCamDepthTextureConf); - - mNhNs.getParam("pub_frame_rate", mVideoDepthFreq); - NODELET_INFO_STREAM(" * [DYN] pub_frame_rate\t\t-> " << mVideoDepthFreq << " Hz"); + NODELET_INFO_STREAM(" * [DYN] Depth texture conf.\t-> " << mCamDepthTextureConf); mNhNs.getParam("point_cloud_freq", mPointCloudFreq); NODELET_INFO_STREAM(" * [DYN] point_cloud_freq\t-> " << mPointCloudFreq << " Hz"); mNhNs.getParam("brightness", mCamBrightness); @@ -2264,7 +2272,6 @@ void ZEDWrapperNodelet::updateDynamicReconfigure() config.gamma = mCamGamma; config.whitebalance_temperature = mCamWB / 100; config.point_cloud_freq = mPointCloudFreq; - config.pub_frame_rate = mVideoDepthFreq; mDynParMutex.unlock(); mDynServerMutex.lock(); @@ -2282,25 +2289,7 @@ void ZEDWrapperNodelet::callback_dynamicReconf(zed_nodelets::ZedConfig& config, mDynParMutex.lock(); DynParams param = static_cast(level); - switch (param) { - case DATAPUB_FREQ: - if (config.pub_frame_rate > mCamFrameRate) { - mVideoDepthFreq = mCamFrameRate; - NODELET_WARN_STREAM("'pub_frame_rate' cannot be major than camera grabbing framerate. Set to " - << mVideoDepthFreq); - - mUpdateDynParams = true; - } else { - mVideoDepthFreq = config.pub_frame_rate; - NODELET_INFO("Reconfigure Video and Depth pub. frequency: %g", mVideoDepthFreq); - } - - mVideoDepthTimer.setPeriod(ros::Duration(1.0 / mVideoDepthFreq)); - - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; - + switch (param) { case CONFIDENCE: mCamDepthConfidence = config.depth_confidence; NODELET_INFO("Reconfigure confidence threshold: %d", mCamDepthConfidence); @@ -2436,7 +2425,7 @@ void ZEDWrapperNodelet::callback_dynamicReconf(zed_nodelets::ZedConfig& config, } } -void ZEDWrapperNodelet::callback_pubVideoDepth(const ros::TimerEvent& e) +void ZEDWrapperNodelet::pubVideoDepth() { static sl::Timestamp lastZedTs = 0; // Used to calculate stable publish frequency @@ -2472,8 +2461,6 @@ void ZEDWrapperNodelet::callback_pubVideoDepth(const ros::TimerEvent& e) sl::Timestamp ts_depth = 0; // used to check RGB/Depth sync sl::Timestamp grab_ts = 0; - mCamDataMutex.lock(); - // ----> Retrieve all required image data if (rgbSubnumber + leftSubnumber + stereoSubNumber > 0) { mZed.retrieveImage(mat_left, sl::VIEW::LEFT, sl::MEM::CPU, mMatResolVideo); @@ -2566,8 +2553,6 @@ void ZEDWrapperNodelet::callback_pubVideoDepth(const ros::TimerEvent& e) } // <---- Publish sensor data if sync is required by user or SVO - mCamDataMutex.unlock(); - // ----> Notify grab thread that all data are synchronized and a new grab can be done // mRgbDepthDataRetrievedCondVar.notify_one(); // mRgbDepthDataRetrieved = true; @@ -3278,10 +3263,8 @@ void ZEDWrapperNodelet::device_poll_thread_func() std::chrono::steady_clock::time_point start_elab = std::chrono::steady_clock::now(); - mCamDataMutex.lock(); - mRgbDepthDataRetrieved = false; + // ZED Grab mGrabStatus = mZed.grab(runParams); - mCamDataMutex.unlock(); // cout << toString(grab_status) << endl; if (mGrabStatus != sl::ERROR_CODE::SUCCESS) { @@ -3342,6 +3325,9 @@ void ZEDWrapperNodelet::device_poll_thread_func() continue; } + // Publish Color and Depth images + pubVideoDepth(); + mFrameCount++; // SVO recording @@ -3950,7 +3936,7 @@ void ZEDWrapperNodelet::callback_updateDiagnostic(diagnostic_updater::Diagnostic if (mObjDetRunning) { double freq = 1000. / mObjDetPeriodMean_msec->getMean(); - double freq_perc = 100. * freq / mCamFrameRate; + double freq_perc = 100. * freq / mVideoDepthFreq; stat.addf("Object detection", "Mean Frequency: %.3f Hz (%.1f%%)", freq, freq_perc); } else { stat.add("Object Detection", "INACTIVE"); diff --git a/zed_wrapper/cfg/Zed.cfg b/zed_wrapper/cfg/Zed.cfg index 051c419e..9a11d9fe 100755 --- a/zed_wrapper/cfg/Zed.cfg +++ b/zed_wrapper/cfg/Zed.cfg @@ -6,7 +6,6 @@ from dynamic_reconfigure.parameter_generator_catkin import * gen = ParameterGenerator() group_general = gen.add_group("general") -group_general.add("pub_frame_rate", double_t, 0, "Video and Depth data frequency", 15.0, 0.1, 60.0); group_depth = gen.add_group("depth") group_depth.add("depth_confidence", int_t, 1, "Depth confidence threshold", 50, 1, 100) diff --git a/zed_wrapper/params/common.yaml b/zed_wrapper/params/common.yaml index de5160e0..11e25c6e 100644 --- a/zed_wrapper/params/common.yaml +++ b/zed_wrapper/params/common.yaml @@ -16,7 +16,6 @@ auto_whitebalance: true # Dynamic whitebalance_temperature: 42 # Dynamic - works only if `auto_whitebalance` is false depth_confidence: 30 # Dynamic depth_texture_conf: 100 # Dynamic -pub_frame_rate: 15.0 # Dynamic - frequency of publishing of video and depth data point_cloud_freq: 10.0 # Dynamic - frequency of the pointcloud publishing (equal or less to `grab_frame_rate` value) general: @@ -25,10 +24,11 @@ general: serial_number: 0 gpu_id: -1 base_frame: 'base_link' # must be equal to the frame_id used in the URDF file - verbose: false # Enable info message by the ZED SDK + verbose: 1 # Set verbose level of the ZED SDK svo_compression: 2 # `0`: LOSSLESS, `1`: AVCHD, `2`: HEVC self_calib: true # enable/disable self calibration at starting - camera_flip: false + camera_flip: 'AUTO' # camera flip mode: 'OFF', 'ON', 'AUTO' + pub_frame_rate: 15.0 # frequency of publishing of video and depth data (see SDK API "InitParameters::grab_compute_capping_fps") video: img_downsample_factor: 0.5 # Resample factor for images [0.01,1.0] The SDK works with native image sizes, but publishes rescaled image. diff --git a/zed_wrapper/params/zed.yaml b/zed_wrapper/params/zed.yaml index 5a9a35f1..06cc54bd 100644 --- a/zed_wrapper/params/zed.yaml +++ b/zed_wrapper/params/zed.yaml @@ -5,7 +5,7 @@ general: camera_model: 'zed' resolution: 3 # '0': HD2K, '1': HD1080, '3': HD720, '5': VGA, '6': AUTO - grab_frame_rate: 15 # Frequency of frame grabbing for internal SDK operations + grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations depth: min_depth: 0.3 # Min: 0.3, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory diff --git a/zed_wrapper/params/zed2.yaml b/zed_wrapper/params/zed2.yaml index 7f6ea2ce..e467b390 100644 --- a/zed_wrapper/params/zed2.yaml +++ b/zed_wrapper/params/zed2.yaml @@ -5,7 +5,7 @@ general: camera_model: 'zed2' resolution: 3 # '0': HD2K, '1': HD1080, '3': HD720, '5': VGA, '6': AUTO - grab_frame_rate: 15 # Frequency of frame grabbing for internal SDK operations + grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations depth: min_depth: 0.3 # Min: 0.2, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory diff --git a/zed_wrapper/params/zed2i.yaml b/zed_wrapper/params/zed2i.yaml index 5e490e76..b50091ca 100644 --- a/zed_wrapper/params/zed2i.yaml +++ b/zed_wrapper/params/zed2i.yaml @@ -5,8 +5,8 @@ general: camera_model: 'zed2i' resolution: 3 # '0': HD2K, '1': HD1080, '3': HD720, '5': VGA, '6': AUTO - grab_frame_rate: 15 # Frequency of frame grabbing for internal SDK operations + grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations depth: - min_depth: 0.2 # Min: 0.2, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory - max_depth: 10.0 # Max: 40.0 + min_depth: 0.3 # Min: 0.3, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory + max_depth: 10.0 # Max: 40.0 diff --git a/zed_wrapper/params/zedm.yaml b/zed_wrapper/params/zedm.yaml index 88902df1..48102e8a 100644 --- a/zed_wrapper/params/zedm.yaml +++ b/zed_wrapper/params/zedm.yaml @@ -5,7 +5,7 @@ general: camera_model: 'zedm' resolution: 3 # '0': HD2K, '1': HD1080, '3': HD720, '5': VGA, '6': AUTO - grab_frame_rate: 15 # Frequency of frame grabbing for internal SDK operations + grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations depth: min_depth: 0.1 # Min: 0.1, Max: 3.0 - Default 0.3 - Note: reducing this value wil require more computational power and GPU memory From cdc46b203a9470e3aecf1acfa78fd73098ba7f0f Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Mon, 11 Sep 2023 14:47:45 +0200 Subject: [PATCH 02/21] Add Positional Tracking parameters * Add `pos_tracking/set_gravity_as_origin` parameter * Add `pos_tracking/pos_tracking_mode` parameter. --- CHANGELOG.rst | 9 +- .../include/zed_wrapper_nodelet.hpp | 4 + .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 91 ++++++++++++------- zed_wrapper/params/common.yaml | 2 + 4 files changed, 70 insertions(+), 36 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 17e333d8..3cb064f1 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,11 +1,18 @@ CHANGELOG ========= +2023-09-08 +---------- +- Add `pos_tracking/set_gravity_as_origin` parameter. If 'true' align the positional tracking world to imu gravity measurement. Keep the yaw from the user initial pose. +- Add `pos_tracking/pos_tracking_mode` parameter. Matches the ZED SDK setting: 'QUALITY', 'STANDARD' +- Fix the warning `Elaboration takes longer [...]` + + 2023-09-07 ---------- - `pub_frame_rate` now controls the `InitParameters::grab_compute_capping_fps` parameter of the ZED SDK instead of controlling the frequency of a parallel thread. It's not a Dynamic parameter anymore. - Change `general/camera_flip` parameter to string: 'AUTO', 'ON', 'OFF' -- Change 'general/verbose' from bool to integer +- Change `general/verbose` from bool to integer v4.0.5 ------ diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 52adb949..11404241 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -565,12 +565,16 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { // Positional tracking bool mPosTrackingEnabled = false; + sl::POSITIONAL_TRACKING_MODE mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; bool mPosTrackingActivated = false; bool mPosTrackingReady = false; bool mTwoDMode = false; double mFixedZValue = 0.0; bool mFloorAlignment = false; bool mImuFusion = true; + bool mSetGravityAsOrigin = false; + + // Flags bool mGrabActive = false; // Indicate if camera grabbing is active (at least one topic subscribed) sl::ERROR_CODE mConnStatus; sl::ERROR_CODE mGrabStatus; diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index 6586f55d..7e784037 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -614,7 +614,7 @@ void ZEDWrapperNodelet::readParameters() mNhNs.getParam("general/pub_frame_rate", mVideoDepthFreq); if(mVideoDepthFreq>mCamFrameRate) { mVideoDepthFreq = mCamFrameRate; - add warning + NODELET_WARN_STREAM("'general/pub_frame_rate' cannot be bigger than 'general/grab_frame_rate'. Forced to the correct value: " << mVideoDepthFreq); } NODELET_INFO_STREAM(" * Grab compute capping FPS\t-> " << mVideoDepthFreq << " Hz"); @@ -673,10 +673,27 @@ void ZEDWrapperNodelet::readParameters() // <----- Depth NODELET_INFO_STREAM("*** POSITIONAL TRACKING PARAMETERS ***"); - // ----> Tracking mNhNs.param("pos_tracking/pos_tracking_enabled", mPosTrackingEnabled, true); NODELET_INFO_STREAM(" * Positional tracking\t\t-> " << (mPosTrackingEnabled ? "ENABLED" : "DISABLED")); + + std::string pos_trk_mode; + mNhNs.getParam("pos_tracking/pos_tracking_mode", pos_trk_mode); + if (pos_trk_mode == "QUALITY") { + mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; + } else if (pos_trk_mode == "STANDARD") { + mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::STANDARD; + } else { + NODELET_WARN_STREAM( + "'pos_tracking/pos_tracking_mode' not valid ('" << pos_trk_mode << + "'). Using default value."); + mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; + } + NODELET_INFO_STREAM( " * Positional tracking mode: " << sl::toString(mPosTrkMode).c_str()); + + mNhNs.getParam("pos_tracking/set_gravity_as_origin", mSetGravityAsOrigin); + NODELET_INFO_STREAM(" * Set gravity as origin\t\t-> " << (mSetGravityAsOrigin ? "ENABLED" : "DISABLED")); + mNhNs.getParam("pos_tracking/path_pub_rate", mPathPubRate); NODELET_INFO_STREAM(" * Path rate\t\t\t-> " << mPathPubRate << " Hz"); mNhNs.getParam("pos_tracking/path_max_count", mPathMaxCount); @@ -893,9 +910,16 @@ void ZEDWrapperNodelet::readParameters() mNhNs.getParam("depth_confidence", mCamDepthConfidence); NODELET_INFO_STREAM(" * [DYN] Depth confidence\t-> " << mCamDepthConfidence); mNhNs.getParam("depth_texture_conf", mCamDepthTextureConf); - NODELET_INFO_STREAM(" * [DYN] Depth texture conf.\t-> " << mCamDepthTextureConf); + NODELET_INFO_STREAM(" * [DYN] Depth texture conf.\t-> " << mCamDepthTextureConf); + mNhNs.getParam("point_cloud_freq", mPointCloudFreq); + if(mPointCloudFreq>mVideoDepthFreq) { + mPointCloudFreq=mVideoDepthFreq; + NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than 'general/pub_frame_rate'. Set to " + << mPointCloudFreq); + } NODELET_INFO_STREAM(" * [DYN] point_cloud_freq\t-> " << mPointCloudFreq << " Hz"); + mNhNs.getParam("brightness", mCamBrightness); NODELET_INFO_STREAM(" * [DYN] brightness\t\t-> " << mCamBrightness); mNhNs.getParam("contrast", mCamContrast); @@ -1601,6 +1625,8 @@ void ZEDWrapperNodelet::start_pos_tracking() posTrackParams.enable_imu_fusion = mImuFusion; posTrackParams.set_as_static = false; + posTrackParams.set_gravity_as_origin = mSetGravityAsOrigin; + posTrackParams.mode = mPosTrkMode; sl::ERROR_CODE err = mZed.enablePositionalTracking(posTrackParams); @@ -2305,9 +2331,9 @@ void ZEDWrapperNodelet::callback_dynamicReconf(zed_nodelets::ZedConfig& config, break; case POINTCLOUD_FREQ: - if (config.point_cloud_freq > mCamFrameRate) { - mPointCloudFreq = mCamFrameRate; - NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than camera grabbing framerate. Set to " + if (config.point_cloud_freq > mVideoDepthFreq) { + mPointCloudFreq = mVideoDepthFreq; + NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than Video/Depth publishing framerate. Set to " << mPointCloudFreq); mUpdateDynParams = true; @@ -3142,7 +3168,7 @@ void ZEDWrapperNodelet::publishSensData(ros::Time t) void ZEDWrapperNodelet::device_poll_thread_func() { - ros::Rate loop_rate(mCamFrameRate); + ros::Rate loop_rate(mVideoDepthFreq); mRecording = false; @@ -3158,7 +3184,6 @@ void ZEDWrapperNodelet::device_poll_thread_func() } else { mFrameTimestamp = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::CURRENT)); } - mPrevFrameTimestamp = mFrameTimestamp; mPosTrackingActivated = false; @@ -3325,12 +3350,22 @@ void ZEDWrapperNodelet::device_poll_thread_func() continue; } + mFrameCount++; + + // ----> Timestamp + if (mSvoMode) { + mFrameTimestamp = ros::Time::now(); + } else { + mFrameTimestamp = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::IMAGE)); + } + mPrevFrameTimestamp = mFrameTimestamp; + ros::Time stamp = mFrameTimestamp; // Fix processing Timestamp + // <---- Timestamp + // Publish Color and Depth images pubVideoDepth(); - mFrameCount++; - - // SVO recording + // ----> SVO recording mRecMutex.lock(); if (mRecording) { @@ -3342,13 +3377,10 @@ void ZEDWrapperNodelet::device_poll_thread_func() mDiagUpdater.force_update(); } - mRecMutex.unlock(); + // <---- SVO recording - // Timestamp - mPrevFrameTimestamp = mFrameTimestamp; - - // Publish freq calculation + // ----> Publish freq calculation static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); @@ -3356,17 +3388,8 @@ void ZEDWrapperNodelet::device_poll_thread_func() last_time = now; mGrabPeriodMean_usec->addValue(elapsed_usec); - // NODELET_INFO_STREAM("Grab time: " << elapsed_usec / 1000 << " msec"); - - // Timestamp - if (mSvoMode) { - mFrameTimestamp = ros::Time::now(); - } else { - mFrameTimestamp = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::IMAGE)); - } - - ros::Time stamp = mFrameTimestamp; // Timestamp + // <---- Publish freq calculation // ----> Camera Settings int value; @@ -3808,16 +3831,14 @@ void ZEDWrapperNodelet::device_poll_thread_func() static int count_warn = 0; if (!loop_rate.sleep()) { - if (mean_elab_sec > (1. / mCamFrameRate)) { + if (mean_elab_sec > (1. / mVideoDepthFreq)) { if (++count_warn > 10) { - NODELET_DEBUG_THROTTLE(1.0, "Working thread is not synchronized with the Camera frame rate"); + NODELET_DEBUG_THROTTLE(1.0, "Working thread is not synchronized with the Camera grab rate"); NODELET_DEBUG_STREAM_THROTTLE(1.0, "Expected cycle time: " << loop_rate.expectedCycleTime() << " - Real cycle time: " << mean_elab_sec); - NODELET_WARN_STREAM_THROTTLE(10.0, "Elaboration takes longer (" << mean_elab_sec << " sec) than requested " - "by the FPS rate (" + NODELET_WARN_STREAM_THROTTLE(10.0, "Elaboration takes longer (" << mean_elab_sec << " sec) than requested by the FPS rate (" << loop_rate.expectedCycleTime() << " sec). Please consider to " - "lower the 'frame_rate' setting or to " - "reduce the power requirements reducing " - "the resolutions."); + "lower the 'general/pub_frame_rate' setting or to " + "reduce the power requirements by reducing the camera resolutions."); } loop_rate.reset(); @@ -3871,7 +3892,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() mRecording = false; mZed.disableRecording(); } - mStopNode = true; + mZed.close(); NODELET_DEBUG("ZED pool thread finished"); @@ -3893,7 +3914,7 @@ void ZEDWrapperNodelet::callback_updateDiagnostic(diagnostic_updater::Diagnostic stat.addf("Capture", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); stat.addf("General Processing", "Mean Time: %.3f sec (Max. %.3f sec)", mElabPeriodMean_sec->getMean(), - 1. / mCamFrameRate); + 1. / mVideoDepthFreq); if (mPublishingData) { freq = 1. / mVideoDepthPeriodMean_sec->getMean(); diff --git a/zed_wrapper/params/common.yaml b/zed_wrapper/params/common.yaml index 11e25c6e..8b166b89 100644 --- a/zed_wrapper/params/common.yaml +++ b/zed_wrapper/params/common.yaml @@ -41,6 +41,8 @@ depth: pos_tracking: pos_tracking_enabled: true # True to enable positional tracking from start + pos_tracking_mode: 'STANDARD' # Matches the ZED SDK setting: 'QUALITY', 'STANDARD' + set_gravity_as_origin: true # If 'true' align the positional tracking world to imu gravity measurement. Keep the yaw from the user initial pose. imu_fusion: true # enable/disable IMU fusion. When set to false, only the optical odometry will be used. publish_tf: true # publish `odom -> base_link` TF publish_map_tf: true # publish `map -> odom` TF From 88c001cb1849040adaa96f88ef641b1c15ff58f7 Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Mon, 11 Sep 2023 16:08:10 +0200 Subject: [PATCH 03/21] Improve NO DEPTH mode --- CHANGELOG.rst | 5 + .../include/zed_wrapper_nodelet.hpp | 5 +- .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 540 ++++++++++-------- zed_wrapper/params/common.yaml | 2 +- 4 files changed, 310 insertions(+), 242 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 3cb064f1..8da8a49b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,11 @@ CHANGELOG ========= +2023-09-10 +---------- +- Change the parameter 'depth/quality' to 'depth/depth_mode' and replace numeric values with strings +- Improve the behavior of the "NO DEPTH" mode + 2023-09-08 ---------- - Add `pos_tracking/set_gravity_as_origin` parameter. If 'true' align the positional tracking world to imu gravity measurement. Keep the yaw from the user initial pose. diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 11404241..1242d945 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -545,7 +545,7 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { sl::RESOLUTION mCamResol; int mCamFrameRate; double mVideoDepthFreq = 15.; - sl::DEPTH_MODE mDepthMode; + sl::DEPTH_MODE mDepthMode = sl::DEPTH_MODE::ULTRA; int mGpuId; int mZedId; int mDepthStabilization; @@ -573,7 +573,7 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { bool mFloorAlignment = false; bool mImuFusion = true; bool mSetGravityAsOrigin = false; - + // Flags bool mGrabActive = false; // Indicate if camera grabbing is active (at least one topic subscribed) sl::ERROR_CODE mConnStatus; @@ -581,6 +581,7 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { sl::POSITIONAL_TRACKING_STATE mPosTrackingStatus; bool mSensPublishing = false; bool mPcPublishing = false; + bool mDepthDisabled = false; // Last frame time ros::Time mPrevFrameTimestamp; diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index 7e784037..d4bc0d56 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -401,72 +401,75 @@ void ZEDWrapperNodelet::onInit() NODELET_INFO_STREAM("Advertised on topic " << mPubRawRightGray.getTopic()); NODELET_INFO_STREAM("Advertised on topic " << mPubRawRightGray.getInfoTopic()); - mPubDepth = it_zed.advertiseCamera(depth_topic_root, 1); // depth - NODELET_INFO_STREAM("Advertised on topic " << mPubDepth.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubDepth.getInfoTopic()); - mPubStereo = it_zed.advertise(stereo_topic, 1); NODELET_INFO_STREAM("Advertised on topic " << mPubStereo.getTopic()); mPubRawStereo = it_zed.advertise(stereo_raw_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawStereo.getTopic()); - - // Confidence Map publisher - mPubConfMap = mNhNs.advertise(conf_map_topic, 1); // confidence map - NODELET_INFO_STREAM("Advertised on topic " << mPubConfMap.getTopic()); - - // Disparity publisher - mPubDisparity = mNhNs.advertise(disparityTopic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubDisparity.getTopic()); - - // PointCloud publishers - mPubCloud = mNhNs.advertise(pointcloud_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubCloud.getTopic()); - - if (mMappingEnabled) { - mPubFusedCloud = mNhNs.advertise(pointcloud_fused_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); - } - - // Object detection publishers - if (mObjDetEnabled) { - mPubObjDet = mNhNs.advertise(object_det_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubObjDet.getTopic()); - } + NODELET_INFO_STREAM("Advertised on topic " << mPubRawStereo.getTopic()); + + // Detected planes publisher + mPubPlane = mNhNs.advertise(plane_topic, 1); + + if (!mDepthDisabled) { + mPubDepth = it_zed.advertiseCamera(depth_topic_root, 1); // depth + NODELET_INFO_STREAM("Advertised on topic " << mPubDepth.getTopic()); + NODELET_INFO_STREAM("Advertised on topic " << mPubDepth.getInfoTopic()); + + // Confidence Map publisher + mPubConfMap = mNhNs.advertise(conf_map_topic, 1); // confidence map + NODELET_INFO_STREAM("Advertised on topic " << mPubConfMap.getTopic()); + + // Disparity publisher + mPubDisparity = mNhNs.advertise(disparityTopic, 1); + NODELET_INFO_STREAM("Advertised on topic " << mPubDisparity.getTopic()); + + // PointCloud publishers + mPubCloud = mNhNs.advertise(pointcloud_topic, 1); + NODELET_INFO_STREAM("Advertised on topic " << mPubCloud.getTopic()); + - // Odometry and Pose publisher - mPubPose = mNhNs.advertise(poseTopic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubPose.getTopic()); + if (mMappingEnabled) { + mPubFusedCloud = mNhNs.advertise(pointcloud_fused_topic, 1); + NODELET_INFO_STREAM("Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); + } - mPubPoseCov = mNhNs.advertise(pose_cov_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubPoseCov.getTopic()); + // Object detection publishers + if (mObjDetEnabled) { + mPubObjDet = mNhNs.advertise(object_det_topic, 1); + NODELET_INFO_STREAM("Advertised on topic " << mPubObjDet.getTopic()); + } - mPubOdom = mNhNs.advertise(odometryTopic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubOdom.getTopic()); + // Odometry and Pose publisher + mPubPose = mNhNs.advertise(poseTopic, 1); + NODELET_INFO_STREAM("Advertised on topic " << mPubPose.getTopic()); - // Rviz markers publisher - mPubMarker = mNhNs.advertise(marker_topic, 10, true); + mPubPoseCov = mNhNs.advertise(pose_cov_topic, 1); + NODELET_INFO_STREAM("Advertised on topic " << mPubPoseCov.getTopic()); - // Detected planes publisher - mPubPlane = mNhNs.advertise(plane_topic, 1); + mPubOdom = mNhNs.advertise(odometryTopic, 1); + NODELET_INFO_STREAM("Advertised on topic " << mPubOdom.getTopic()); - // Camera Path - if (mPathPubRate > 0) { - mPubOdomPath = mNhNs.advertise(odom_path_topic, 1, true); - NODELET_INFO_STREAM("Advertised on topic " << mPubOdomPath.getTopic()); - mPubMapPath = mNhNs.advertise(map_path_topic, 1, true); - NODELET_INFO_STREAM("Advertised on topic " << mPubMapPath.getTopic()); + // Rviz markers publisher + mPubMarker = mNhNs.advertise(marker_topic, 10, true); + + // Camera Path + if (mPathPubRate > 0) { + mPubOdomPath = mNhNs.advertise(odom_path_topic, 1, true); + NODELET_INFO_STREAM("Advertised on topic " << mPubOdomPath.getTopic()); + mPubMapPath = mNhNs.advertise(map_path_topic, 1, true); + NODELET_INFO_STREAM("Advertised on topic " << mPubMapPath.getTopic()); - mPathTimer = mNhNs.createTimer(ros::Duration(1.0 / mPathPubRate), &ZEDWrapperNodelet::callback_pubPath, this); + mPathTimer = mNhNs.createTimer(ros::Duration(1.0 / mPathPubRate), &ZEDWrapperNodelet::callback_pubPath, this); - if (mPathMaxCount != -1) { - NODELET_DEBUG_STREAM("Path vectors reserved " << mPathMaxCount << " poses."); - mOdomPath.reserve(mPathMaxCount); - mMapPath.reserve(mPathMaxCount); + if (mPathMaxCount != -1) { + NODELET_DEBUG_STREAM("Path vectors reserved " << mPathMaxCount << " poses."); + mOdomPath.reserve(mPathMaxCount); + mMapPath.reserve(mPathMaxCount); - NODELET_DEBUG_STREAM("Path vector sizes: " << mOdomPath.size() << " " << mMapPath.size()); + NODELET_DEBUG_STREAM("Path vector sizes: " << mOdomPath.size() << " " << mMapPath.size()); + } + } else { + NODELET_INFO_STREAM("Path topics not published -> mPathPubRate: " << mPathPubRate); } - } else { - NODELET_INFO_STREAM("Path topics not published -> mPathPubRate: " << mPathPubRate); } // Sensor publishers @@ -535,13 +538,24 @@ void ZEDWrapperNodelet::onInit() // Subscribers mClickedPtSub = mNhNs.subscribe(mClickedPtTopic, 10, &ZEDWrapperNodelet::clickedPtCallback, this); - NODELET_INFO_STREAM("Subscribed to topic " << mClickedPtTopic.c_str()); // Services - mSrvSetInitPose = mNhNs.advertiseService("set_pose", &ZEDWrapperNodelet::on_set_pose, this); - mSrvResetOdometry = mNhNs.advertiseService("reset_odometry", &ZEDWrapperNodelet::on_reset_odometry, this); - mSrvResetTracking = mNhNs.advertiseService("reset_tracking", &ZEDWrapperNodelet::on_reset_tracking, this); + if (!mDepthDisabled) { + mSrvSetInitPose = mNhNs.advertiseService("set_pose", &ZEDWrapperNodelet::on_set_pose, this); + mSrvResetOdometry = mNhNs.advertiseService("reset_odometry", &ZEDWrapperNodelet::on_reset_odometry, this); + mSrvResetTracking = mNhNs.advertiseService("reset_tracking", &ZEDWrapperNodelet::on_reset_tracking, this); + + mSrvSaveAreaMemory = mNhNs.advertiseService("save_area_memory", &ZEDWrapperNodelet::on_save_area_memory, this); + + mSrvStartMapping = mNhNs.advertiseService("start_3d_mapping", &ZEDWrapperNodelet::on_start_3d_mapping, this); + mSrvStopMapping = mNhNs.advertiseService("stop_3d_mapping", &ZEDWrapperNodelet::on_stop_3d_mapping, this); + mSrvSave3dMap = mNhNs.advertiseService("save_3d_map", &ZEDWrapperNodelet::on_save_3d_map, this); + + mSrvStartObjDet = mNhNs.advertiseService("start_object_detection", &ZEDWrapperNodelet::on_start_object_detection, this); + mSrvStopObjDet = mNhNs.advertiseService("stop_object_detection", &ZEDWrapperNodelet::on_stop_object_detection, this); + } + mSrvSvoStartRecording = mNhNs.advertiseService("start_svo_recording", &ZEDWrapperNodelet::on_start_svo_recording, this); mSrvSvoStopRecording = mNhNs.advertiseService("stop_svo_recording", &ZEDWrapperNodelet::on_stop_svo_recording, this); @@ -549,32 +563,25 @@ void ZEDWrapperNodelet::onInit() mSrvToggleLed = mNhNs.advertiseService("toggle_led", &ZEDWrapperNodelet::on_toggle_led, this); mSrvSvoStartStream = mNhNs.advertiseService("start_remote_stream", &ZEDWrapperNodelet::on_start_remote_stream, this); mSrvSvoStopStream = mNhNs.advertiseService("stop_remote_stream", &ZEDWrapperNodelet::on_stop_remote_stream, this); - - mSrvStartMapping = mNhNs.advertiseService("start_3d_mapping", &ZEDWrapperNodelet::on_start_3d_mapping, this); - mSrvStopMapping = mNhNs.advertiseService("stop_3d_mapping", &ZEDWrapperNodelet::on_stop_3d_mapping, this); - mSrvSave3dMap = mNhNs.advertiseService("save_3d_map", &ZEDWrapperNodelet::on_save_3d_map, this); - - mSrvStartObjDet = mNhNs.advertiseService("start_object_detection", &ZEDWrapperNodelet::on_start_object_detection, this); - mSrvStopObjDet = mNhNs.advertiseService("stop_object_detection", &ZEDWrapperNodelet::on_stop_object_detection, this); - - mSrvSaveAreaMemory = mNhNs.advertiseService("save_area_memory", &ZEDWrapperNodelet::on_save_area_memory, this); - - // Start Pointcloud thread - mPcThread = std::thread(&ZEDWrapperNodelet::pointcloud_thread_func, this); + + if (!mDepthDisabled) { + // Start Pointcloud thread + mPcThread = std::thread(&ZEDWrapperNodelet::pointcloud_thread_func, this); + } // Start pool thread mDevicePollThread = std::thread(&ZEDWrapperNodelet::device_poll_thread_func, this); // Start Sensors thread mSensThread = std::thread(&ZEDWrapperNodelet::sensors_thread_func, this); + + NODELET_INFO("ZED Node started"); } void ZEDWrapperNodelet::readParameters() { - NODELET_INFO_STREAM("*** GENERAL PARAMETERS ***"); - // ----> General - // Get parameters from param files + NODELET_INFO_STREAM("*** GENERAL PARAMETERS ***"); std::string camera_model; mNhNs.getParam("general/camera_model", camera_model); @@ -656,146 +663,182 @@ void ZEDWrapperNodelet::readParameters() NODELET_INFO_STREAM("*** DEPTH PARAMETERS ***"); // -----> Depth - int depth_mode; - mNhNs.getParam("depth/quality", depth_mode); - mDepthMode = static_cast(depth_mode); - NODELET_INFO_STREAM(" * Depth quality\t\t-> " << sl::toString(mDepthMode).c_str()); - mNhNs.getParam("depth/openni_depth_mode", mOpenniDepthMode); - NODELET_INFO_STREAM(" * OpenNI mode\t\t\t-> " << (mOpenniDepthMode ? "ENABLED" : "DISABLED")); - mNhNs.getParam("depth/depth_stabilization", mDepthStabilization); - NODELET_INFO_STREAM(" * Depth Stabilization\t\t-> " << mDepthStabilization ); - mNhNs.getParam("depth/min_depth", mCamMinDepth); - NODELET_INFO_STREAM(" * Minimum depth\t\t-> " << mCamMinDepth << " m"); - mNhNs.getParam("depth/max_depth", mCamMaxDepth); - NODELET_INFO_STREAM(" * Maximum depth\t\t-> " << mCamMaxDepth << " m"); - mNhNs.getParam("depth/depth_downsample_factor", mCamDepthResizeFactor); - NODELET_INFO_STREAM(" * Depth resample factor\t-> " << mCamDepthResizeFactor); - // <----- Depth + std::string depth_mode_str = sl::toString(mDepthMode).c_str(); + mNhNs.getParam("depth/depth_mode", depth_mode_str); - NODELET_INFO_STREAM("*** POSITIONAL TRACKING PARAMETERS ***"); - // ----> Tracking - mNhNs.param("pos_tracking/pos_tracking_enabled", mPosTrackingEnabled, true); - NODELET_INFO_STREAM(" * Positional tracking\t\t-> " << (mPosTrackingEnabled ? "ENABLED" : "DISABLED")); - - std::string pos_trk_mode; - mNhNs.getParam("pos_tracking/pos_tracking_mode", pos_trk_mode); - if (pos_trk_mode == "QUALITY") { - mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; - } else if (pos_trk_mode == "STANDARD") { - mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::STANDARD; - } else { - NODELET_WARN_STREAM( - "'pos_tracking/pos_tracking_mode' not valid ('" << pos_trk_mode << - "'). Using default value."); - mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; + bool matched = false; + for (int mode = static_cast(sl::DEPTH_MODE::NONE); + mode < static_cast(sl::DEPTH_MODE::LAST); ++mode) + { + std::string test_str = sl::toString(static_cast(mode)).c_str(); + std::replace(test_str.begin(), test_str.end(), ' ', '_'); // Replace spaces with underscores to match the YAML setting + if (test_str == depth_mode_str) { + matched = true; + mDepthMode = static_cast(mode); + break; + } } - NODELET_INFO_STREAM( " * Positional tracking mode: " << sl::toString(mPosTrkMode).c_str()); - mNhNs.getParam("pos_tracking/set_gravity_as_origin", mSetGravityAsOrigin); - NODELET_INFO_STREAM(" * Set gravity as origin\t\t-> " << (mSetGravityAsOrigin ? "ENABLED" : "DISABLED")); - - mNhNs.getParam("pos_tracking/path_pub_rate", mPathPubRate); - NODELET_INFO_STREAM(" * Path rate\t\t\t-> " << mPathPubRate << " Hz"); - mNhNs.getParam("pos_tracking/path_max_count", mPathMaxCount); - NODELET_INFO_STREAM(" * Path history size\t\t-> " << (mPathMaxCount == -1) ? std::string("INFINITE") : std::to_string(mPathMaxCount)); + if (!matched) { + ROS_WARN_STREAM( + "The parameter 'depth.depth_mode' contains a not valid string. Please check it in 'common.yaml'."); + ROS_WARN("Using default DEPTH_MODE."); + mDepthMode = sl::DEPTH_MODE::ULTRA; + } - if (mPathMaxCount < 2 && mPathMaxCount != -1) { - mPathMaxCount = 2; + if (mDepthMode == sl::DEPTH_MODE::NONE) { + mDepthDisabled = true; + mDepthStabilization = 0; + ROS_INFO_STREAM(" * Depth mode: " << sl::toString(mDepthMode).c_str() << " - DEPTH DISABLED"); + } else { + mDepthDisabled = false; + ROS_INFO_STREAM(" * Depth mode: " << sl::toString( + mDepthMode).c_str() << " [" << static_cast(mDepthMode) << "]"); + } + + if(!mDepthDisabled) { + mNhNs.getParam("depth/openni_depth_mode", mOpenniDepthMode); + NODELET_INFO_STREAM(" * OpenNI mode\t\t\t-> " << (mOpenniDepthMode ? "ENABLED" : "DISABLED")); + mNhNs.getParam("depth/depth_stabilization", mDepthStabilization); + NODELET_INFO_STREAM(" * Depth Stabilization\t\t-> " << mDepthStabilization ); + mNhNs.getParam("depth/min_depth", mCamMinDepth); + NODELET_INFO_STREAM(" * Minimum depth\t\t-> " << mCamMinDepth << " m"); + mNhNs.getParam("depth/max_depth", mCamMaxDepth); + NODELET_INFO_STREAM(" * Maximum depth\t\t-> " << mCamMaxDepth << " m"); + mNhNs.getParam("depth/depth_downsample_factor", mCamDepthResizeFactor); + NODELET_INFO_STREAM(" * Depth resample factor\t-> " << mCamDepthResizeFactor); } + // <----- Depth + + // ----> Positional Tracking + if(!mDepthDisabled) { + NODELET_INFO_STREAM("*** POSITIONAL TRACKING PARAMETERS ***"); + + mNhNs.param("pos_tracking/pos_tracking_enabled", mPosTrackingEnabled, true); + NODELET_INFO_STREAM(" * Positional tracking\t\t-> " << (mPosTrackingEnabled ? "ENABLED" : "DISABLED")); + + std::string pos_trk_mode; + mNhNs.getParam("pos_tracking/pos_tracking_mode", pos_trk_mode); + if (pos_trk_mode == "QUALITY") { + mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; + } else if (pos_trk_mode == "STANDARD") { + mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::STANDARD; + } else { + NODELET_WARN_STREAM( + "'pos_tracking/pos_tracking_mode' not valid ('" << pos_trk_mode << + "'). Using default value."); + mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; + } + NODELET_INFO_STREAM( " * Positional tracking mode: " << sl::toString(mPosTrkMode).c_str()); - mNhNs.getParam("pos_tracking/initial_base_pose", mInitialBasePose); + mNhNs.getParam("pos_tracking/set_gravity_as_origin", mSetGravityAsOrigin); + NODELET_INFO_STREAM(" * Set gravity as origin\t\t-> " << (mSetGravityAsOrigin ? "ENABLED" : "DISABLED")); - mNhNs.getParam("pos_tracking/area_memory_db_path", mAreaMemDbPath); - mAreaMemDbPath = sl_tools::resolveFilePath(mAreaMemDbPath); - NODELET_INFO_STREAM(" * Odometry DB path\t\t-> " << mAreaMemDbPath.c_str()); + mNhNs.getParam("pos_tracking/path_pub_rate", mPathPubRate); + NODELET_INFO_STREAM(" * Path rate\t\t\t-> " << mPathPubRate << " Hz"); + mNhNs.getParam("pos_tracking/path_max_count", mPathMaxCount); + NODELET_INFO_STREAM(" * Path history size\t\t-> " << (mPathMaxCount == -1) ? std::string("INFINITE") : std::to_string(mPathMaxCount)); - mNhNs.param("pos_tracking/save_area_memory_db_on_exit", mSaveAreaMapOnClosing, false); - NODELET_INFO_STREAM(" * Save Area Memory on closing\t-> " << (mSaveAreaMapOnClosing ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/area_memory", mAreaMemory, false); - NODELET_INFO_STREAM(" * Area Memory\t\t\t-> " << (mAreaMemory ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/imu_fusion", mImuFusion, true); - NODELET_INFO_STREAM(" * IMU Fusion\t\t\t-> " << (mImuFusion ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/floor_alignment", mFloorAlignment, false); - NODELET_INFO_STREAM(" * Floor alignment\t\t-> " << (mFloorAlignment ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/init_odom_with_first_valid_pose", mInitOdomWithPose, true); - NODELET_INFO_STREAM(" * Init Odometry with first valid pose data -> " - << (mInitOdomWithPose ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/two_d_mode", mTwoDMode, false); - NODELET_INFO_STREAM(" * Two D mode\t\t\t-> " << (mTwoDMode ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/fixed_z_value", mFixedZValue, 0.0); + if (mPathMaxCount < 2 && mPathMaxCount != -1) { + mPathMaxCount = 2; + } - if (mTwoDMode) { - NODELET_INFO_STREAM(" * Fixed Z value\t\t-> " << mFixedZValue); - } - // <---- Tracking + mNhNs.getParam("pos_tracking/initial_base_pose", mInitialBasePose); + + mNhNs.getParam("pos_tracking/area_memory_db_path", mAreaMemDbPath); + mAreaMemDbPath = sl_tools::resolveFilePath(mAreaMemDbPath); + NODELET_INFO_STREAM(" * Odometry DB path\t\t-> " << mAreaMemDbPath.c_str()); + + mNhNs.param("pos_tracking/save_area_memory_db_on_exit", mSaveAreaMapOnClosing, false); + NODELET_INFO_STREAM(" * Save Area Memory on closing\t-> " << (mSaveAreaMapOnClosing ? "ENABLED" : "DISABLED")); + mNhNs.param("pos_tracking/area_memory", mAreaMemory, false); + NODELET_INFO_STREAM(" * Area Memory\t\t\t-> " << (mAreaMemory ? "ENABLED" : "DISABLED")); + mNhNs.param("pos_tracking/imu_fusion", mImuFusion, true); + NODELET_INFO_STREAM(" * IMU Fusion\t\t\t-> " << (mImuFusion ? "ENABLED" : "DISABLED")); + mNhNs.param("pos_tracking/floor_alignment", mFloorAlignment, false); + NODELET_INFO_STREAM(" * Floor alignment\t\t-> " << (mFloorAlignment ? "ENABLED" : "DISABLED")); + mNhNs.param("pos_tracking/init_odom_with_first_valid_pose", mInitOdomWithPose, true); + NODELET_INFO_STREAM(" * Init Odometry with first valid pose data -> " + << (mInitOdomWithPose ? "ENABLED" : "DISABLED")); + mNhNs.param("pos_tracking/two_d_mode", mTwoDMode, false); + NODELET_INFO_STREAM(" * Two D mode\t\t\t-> " << (mTwoDMode ? "ENABLED" : "DISABLED")); + mNhNs.param("pos_tracking/fixed_z_value", mFixedZValue, 0.0); - NODELET_INFO_STREAM("*** MAPPING PARAMETERS ***"); + if (mTwoDMode) { + NODELET_INFO_STREAM(" * Fixed Z value\t\t-> " << mFixedZValue); + } + } + // <---- Positional Tracking // ----> Mapping - mNhNs.param("mapping/mapping_enabled", mMappingEnabled, false); + NODELET_INFO_STREAM("*** MAPPING PARAMETERS ***"); + if(!mDepthDisabled) { + mNhNs.param("mapping/mapping_enabled", mMappingEnabled, false); - if (mMappingEnabled) { - NODELET_INFO_STREAM(" * Mapping\t\t\t-> ENABLED"); + if (mMappingEnabled) { + NODELET_INFO_STREAM(" * Mapping\t\t\t-> ENABLED"); - mNhNs.getParam("mapping/resolution", mMappingRes); - NODELET_INFO_STREAM(" * Mapping resolution\t\t-> " << mMappingRes << " m"); + mNhNs.getParam("mapping/resolution", mMappingRes); + NODELET_INFO_STREAM(" * Mapping resolution\t\t-> " << mMappingRes << " m"); - mNhNs.getParam("mapping/max_mapping_range", mMaxMappingRange); - NODELET_INFO_STREAM(" * Mapping max range\t\t-> " << mMaxMappingRange << " m" - << ((mMaxMappingRange < 0.0) ? " [AUTO]" : "")); + mNhNs.getParam("mapping/max_mapping_range", mMaxMappingRange); + NODELET_INFO_STREAM(" * Mapping max range\t\t-> " << mMaxMappingRange << " m" + << ((mMaxMappingRange < 0.0) ? " [AUTO]" : "")); - mNhNs.getParam("mapping/fused_pointcloud_freq", mFusedPcPubFreq); - NODELET_INFO_STREAM(" * Fused point cloud freq:\t-> " << mFusedPcPubFreq << " Hz"); - } else { - NODELET_INFO_STREAM(" * Mapping\t\t\t-> DISABLED"); + mNhNs.getParam("mapping/fused_pointcloud_freq", mFusedPcPubFreq); + NODELET_INFO_STREAM(" * Fused point cloud freq:\t-> " << mFusedPcPubFreq << " Hz"); + } else { + NODELET_INFO_STREAM(" * Mapping\t\t\t-> DISABLED"); + } } - mNhNs.param("mapping/clicked_point_topic", mClickedPtTopic, std::string("/clicked_point")); NODELET_INFO_STREAM(" * Clicked point topic\t\t-> " << mClickedPtTopic.c_str()); // <---- Mapping - NODELET_INFO_STREAM("*** OBJECT DETECTION PARAMETERS ***"); - // ----> Object Detection - mNhNs.param("object_detection/od_enabled", mObjDetEnabled, false); - - if (mObjDetEnabled) { - NODELET_INFO_STREAM(" * Object Detection\t\t-> ENABLED"); - mNhNs.getParam("object_detection/confidence_threshold", mObjDetConfidence); - NODELET_INFO_STREAM(" * Object confidence\t\t-> " << mObjDetConfidence); - mNhNs.getParam("object_detection/object_tracking_enabled", mObjDetTracking); - NODELET_INFO_STREAM(" * Object tracking\t\t-> " << (mObjDetTracking ? "ENABLED" : "DISABLED")); - mNhNs.getParam("object_detection/max_range", mObjDetMaxRange); - if (mObjDetMaxRange > mCamMaxDepth) { - NODELET_WARN("Detection max range cannot be major than depth max range. Automatically fixed."); - mObjDetMaxRange = mCamMaxDepth; - } - NODELET_INFO_STREAM(" * Detection max range\t\t-> " << mObjDetMaxRange); + if(!mDepthDisabled) { + NODELET_INFO_STREAM("*** OBJECT DETECTION PARAMETERS ***"); + + mNhNs.param("object_detection/od_enabled", mObjDetEnabled, false); + + if (mObjDetEnabled) { + NODELET_INFO_STREAM(" * Object Detection\t\t-> ENABLED"); + mNhNs.getParam("object_detection/confidence_threshold", mObjDetConfidence); + NODELET_INFO_STREAM(" * Object confidence\t\t-> " << mObjDetConfidence); + mNhNs.getParam("object_detection/object_tracking_enabled", mObjDetTracking); + NODELET_INFO_STREAM(" * Object tracking\t\t-> " << (mObjDetTracking ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/max_range", mObjDetMaxRange); + if (mObjDetMaxRange > mCamMaxDepth) { + NODELET_WARN("Detection max range cannot be major than depth max range. Automatically fixed."); + mObjDetMaxRange = mCamMaxDepth; + } + NODELET_INFO_STREAM(" * Detection max range\t\t-> " << mObjDetMaxRange); - int model; - mNhNs.getParam("object_detection/model", model); - if (model < 0 || model >= static_cast(sl::OBJECT_DETECTION_MODEL::LAST)) { - NODELET_WARN("Detection model not valid. Forced to the default value"); - model = static_cast(mObjDetModel); + int model; + mNhNs.getParam("object_detection/model", model); + if (model < 0 || model >= static_cast(sl::OBJECT_DETECTION_MODEL::LAST)) { + NODELET_WARN("Detection model not valid. Forced to the default value"); + model = static_cast(mObjDetModel); + } + mObjDetModel = static_cast(model); + + NODELET_INFO_STREAM(" * Detection model\t\t-> " << sl::toString(mObjDetModel)); + + mNhNs.getParam("object_detection/mc_people", mObjDetPeopleEnable); + NODELET_INFO_STREAM(" * Detect people\t\t-> " << (mObjDetPeopleEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_vehicle", mObjDetVehiclesEnable); + NODELET_INFO_STREAM(" * Detect vehicles\t\t-> " << (mObjDetVehiclesEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_bag", mObjDetBagsEnable); + NODELET_INFO_STREAM(" * Detect bags\t\t\t-> " << (mObjDetBagsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_animal", mObjDetAnimalsEnable); + NODELET_INFO_STREAM(" * Detect animals\t\t-> " << (mObjDetAnimalsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_electronics", mObjDetElectronicsEnable); + NODELET_INFO_STREAM(" * Detect electronics\t\t-> " << (mObjDetElectronicsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_fruit_vegetable", mObjDetFruitsEnable); + NODELET_INFO_STREAM(" * Detect fruit and vegetables\t-> " << (mObjDetFruitsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_sport", mObjDetSportsEnable); + NODELET_INFO_STREAM(" * Detect sport-related objects\t-> " << (mObjDetSportsEnable ? "ENABLED" : "DISABLED")); } - mObjDetModel = static_cast(model); - - NODELET_INFO_STREAM(" * Detection model\t\t-> " << sl::toString(mObjDetModel)); - - mNhNs.getParam("object_detection/mc_people", mObjDetPeopleEnable); - NODELET_INFO_STREAM(" * Detect people\t\t-> " << (mObjDetPeopleEnable ? "ENABLED" : "DISABLED")); - mNhNs.getParam("object_detection/mc_vehicle", mObjDetVehiclesEnable); - NODELET_INFO_STREAM(" * Detect vehicles\t\t-> " << (mObjDetVehiclesEnable ? "ENABLED" : "DISABLED")); - mNhNs.getParam("object_detection/mc_bag", mObjDetBagsEnable); - NODELET_INFO_STREAM(" * Detect bags\t\t\t-> " << (mObjDetBagsEnable ? "ENABLED" : "DISABLED")); - mNhNs.getParam("object_detection/mc_animal", mObjDetAnimalsEnable); - NODELET_INFO_STREAM(" * Detect animals\t\t-> " << (mObjDetAnimalsEnable ? "ENABLED" : "DISABLED")); - mNhNs.getParam("object_detection/mc_electronics", mObjDetElectronicsEnable); - NODELET_INFO_STREAM(" * Detect electronics\t\t-> " << (mObjDetElectronicsEnable ? "ENABLED" : "DISABLED")); - mNhNs.getParam("object_detection/mc_fruit_vegetable", mObjDetFruitsEnable); - NODELET_INFO_STREAM(" * Detect fruit and vegetables\t-> " << (mObjDetFruitsEnable ? "ENABLED" : "DISABLED")); - mNhNs.getParam("object_detection/mc_sport", mObjDetSportsEnable); - NODELET_INFO_STREAM(" * Detect sport-related objects\t-> " << (mObjDetSportsEnable ? "ENABLED" : "DISABLED")); } // <---- Object Detection @@ -877,49 +920,40 @@ void ZEDWrapperNodelet::readParameters() mConfidenceOptFrameId = mDepthOptFrameId; // Print TF frames - NODELET_INFO_STREAM(" * map_frame\t\t\t-> " << mMapFrameId); - NODELET_INFO_STREAM(" * odometry_frame\t\t-> " << mOdometryFrameId); - NODELET_INFO_STREAM(" * base_frame\t\t\t-> " << mBaseFrameId); NODELET_INFO_STREAM(" * camera_frame\t\t\t-> " << mCameraFrameId); NODELET_INFO_STREAM(" * imu_link\t\t\t-> " << mImuFrameId); NODELET_INFO_STREAM(" * left_camera_frame\t\t-> " << mLeftCamFrameId); NODELET_INFO_STREAM(" * left_camera_optical_frame\t-> " << mLeftCamOptFrameId); NODELET_INFO_STREAM(" * right_camera_frame\t\t-> " << mRightCamFrameId); NODELET_INFO_STREAM(" * right_camera_optical_frame\t-> " << mRightCamOptFrameId); - NODELET_INFO_STREAM(" * depth_frame\t\t\t-> " << mDepthFrameId); - NODELET_INFO_STREAM(" * depth_optical_frame\t\t-> " << mDepthOptFrameId); - NODELET_INFO_STREAM(" * disparity_frame\t\t-> " << mDisparityFrameId); - NODELET_INFO_STREAM(" * disparity_optical_frame\t-> " << mDisparityOptFrameId); - NODELET_INFO_STREAM(" * confidence_frame\t\t-> " << mConfidenceFrameId); - NODELET_INFO_STREAM(" * confidence_optical_frame\t-> " << mConfidenceOptFrameId); + + if(!mDepthDisabled) { + NODELET_INFO_STREAM(" * map_frame\t\t\t-> " << mMapFrameId); + NODELET_INFO_STREAM(" * odometry_frame\t\t-> " << mOdometryFrameId); + NODELET_INFO_STREAM(" * base_frame\t\t\t-> " << mBaseFrameId); + NODELET_INFO_STREAM(" * depth_frame\t\t\t-> " << mDepthFrameId); + NODELET_INFO_STREAM(" * depth_optical_frame\t\t-> " << mDepthOptFrameId); + NODELET_INFO_STREAM(" * disparity_frame\t\t-> " << mDisparityFrameId); + NODELET_INFO_STREAM(" * disparity_optical_frame\t-> " << mDisparityOptFrameId); + NODELET_INFO_STREAM(" * confidence_frame\t\t-> " << mConfidenceFrameId); + NODELET_INFO_STREAM(" * confidence_optical_frame\t-> " << mConfidenceOptFrameId); + } // <---- Coordinate frames // ----> TF broadcasting - mNhNs.param("pos_tracking/publish_tf", mPublishTf, true); - NODELET_INFO_STREAM(" * Broadcast odometry TF\t-> " << (mPublishTf ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/publish_map_tf", mPublishMapTf, true); - NODELET_INFO_STREAM(" * Broadcast map pose TF\t-> " - << (mPublishTf ? (mPublishMapTf ? "ENABLED" : "DISABLED") : "DISABLED")); - mNhNs.param("sensors/publish_imu_tf", mPublishImuTf, true); - NODELET_INFO_STREAM(" * Broadcast IMU pose TF\t-> " << (mPublishImuTf ? "ENABLED" : "DISABLED")); - // <---- TF broadcasting - - NODELET_INFO_STREAM("*** DYNAMIC PARAMETERS (Init. values) ***"); - - // ----> Dynamic - mNhNs.getParam("depth_confidence", mCamDepthConfidence); - NODELET_INFO_STREAM(" * [DYN] Depth confidence\t-> " << mCamDepthConfidence); - mNhNs.getParam("depth_texture_conf", mCamDepthTextureConf); - NODELET_INFO_STREAM(" * [DYN] Depth texture conf.\t-> " << mCamDepthTextureConf); - - mNhNs.getParam("point_cloud_freq", mPointCloudFreq); - if(mPointCloudFreq>mVideoDepthFreq) { - mPointCloudFreq=mVideoDepthFreq; - NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than 'general/pub_frame_rate'. Set to " - << mPointCloudFreq); + if(!mDepthDisabled) { + mNhNs.param("pos_tracking/publish_tf", mPublishTf, true); + NODELET_INFO_STREAM(" * Broadcast odometry TF\t-> " << (mPublishTf ? "ENABLED" : "DISABLED")); + mNhNs.param("pos_tracking/publish_map_tf", mPublishMapTf, true); + NODELET_INFO_STREAM(" * Broadcast map pose TF\t-> " + << (mPublishTf ? (mPublishMapTf ? "ENABLED" : "DISABLED") : "DISABLED")); + mNhNs.param("sensors/publish_imu_tf", mPublishImuTf, true); + NODELET_INFO_STREAM(" * Broadcast IMU pose TF\t-> " << (mPublishImuTf ? "ENABLED" : "DISABLED")); } - NODELET_INFO_STREAM(" * [DYN] point_cloud_freq\t-> " << mPointCloudFreq << " Hz"); + // <---- TF broadcasting + // ----> Dynamic + NODELET_INFO_STREAM("*** DYNAMIC PARAMETERS (Init. values) ***"); mNhNs.getParam("brightness", mCamBrightness); NODELET_INFO_STREAM(" * [DYN] brightness\t\t-> " << mCamBrightness); mNhNs.getParam("contrast", mCamContrast); @@ -953,6 +987,21 @@ void ZEDWrapperNodelet::readParameters() if (mCamAutoWB) { mTriggerAutoWB = true; } + + if(!mDepthDisabled) { + mNhNs.getParam("depth_confidence", mCamDepthConfidence); + NODELET_INFO_STREAM(" * [DYN] Depth confidence\t-> " << mCamDepthConfidence); + mNhNs.getParam("depth_texture_conf", mCamDepthTextureConf); + NODELET_INFO_STREAM(" * [DYN] Depth texture conf.\t-> " << mCamDepthTextureConf); + + mNhNs.getParam("point_cloud_freq", mPointCloudFreq); + if(mPointCloudFreq>mVideoDepthFreq) { + mPointCloudFreq=mVideoDepthFreq; + NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than 'general/pub_frame_rate'. Set to " + << mPointCloudFreq); + } + NODELET_INFO_STREAM(" * [DYN] point_cloud_freq\t-> " << mPointCloudFreq << " Hz"); + } // <---- Dynamic } @@ -3216,7 +3265,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() sl::RuntimeParameters runParams; // Main loop - while (mNhNs.ok()) { + while (mNhNs.ok()) { // Check for subscribers uint32_t rgbSubnumber = mPubRgb.getNumSubscribers(); uint32_t rgbRawSubnumber = mPubRawRgb.getNumSubscribers(); @@ -3230,23 +3279,34 @@ void ZEDWrapperNodelet::device_poll_thread_func() uint32_t leftGrayRawSubnumber = mPubRawLeftGray.getNumSubscribers(); uint32_t rightGraySubnumber = mPubRightGray.getNumSubscribers(); uint32_t rightGrayRawSubnumber = mPubRawRightGray.getNumSubscribers(); - uint32_t depthSubnumber = mPubDepth.getNumSubscribers(); - uint32_t disparitySubnumber = mPubDisparity.getNumSubscribers(); - uint32_t cloudSubnumber = mPubCloud.getNumSubscribers(); - uint32_t fusedCloudSubnumber = mPubFusedCloud.getNumSubscribers(); - uint32_t poseSubnumber = mPubPose.getNumSubscribers(); - uint32_t poseCovSubnumber = mPubPoseCov.getNumSubscribers(); - uint32_t odomSubnumber = mPubOdom.getNumSubscribers(); - uint32_t confMapSubnumber = mPubConfMap.getNumSubscribers(); - uint32_t pathSubNumber = mPubMapPath.getNumSubscribers() + mPubOdomPath.getNumSubscribers(); - uint32_t stereoSubNumber = mPubStereo.getNumSubscribers(); - uint32_t stereoRawSubNumber = mPubRawStereo.getNumSubscribers(); - + uint32_t depthSubnumber = 0; uint32_t objDetSubnumber = 0; - if (mObjDetEnabled && mObjDetRunning) { - objDetSubnumber = mPubObjDet.getNumSubscribers(); + uint32_t disparitySubnumber = 0; + uint32_t cloudSubnumber = 0; + uint32_t fusedCloudSubnumber = 0; + uint32_t poseSubnumber = 0; + uint32_t poseCovSubnumber = 0; + uint32_t odomSubnumber = 0; + uint32_t confMapSubnumber = 0; + uint32_t pathSubNumber = 0; + if(!mDepthDisabled) { + depthSubnumber = mPubDepth.getNumSubscribers(); + disparitySubnumber = mPubDisparity.getNumSubscribers(); + cloudSubnumber = mPubCloud.getNumSubscribers(); + fusedCloudSubnumber = mPubFusedCloud.getNumSubscribers(); + poseSubnumber = mPubPose.getNumSubscribers(); + poseCovSubnumber = mPubPoseCov.getNumSubscribers(); + odomSubnumber = mPubOdom.getNumSubscribers(); + confMapSubnumber = mPubConfMap.getNumSubscribers(); + pathSubNumber = mPubMapPath.getNumSubscribers() + mPubOdomPath.getNumSubscribers(); + + if (mObjDetEnabled && mObjDetRunning) { + objDetSubnumber = mPubObjDet.getNumSubscribers(); + } } - + uint32_t stereoSubNumber = mPubStereo.getNumSubscribers(); + uint32_t stereoRawSubNumber = mPubRawStereo.getNumSubscribers(); + mGrabActive = mRecording || mStreaming || mMappingEnabled || mObjDetEnabled || mPosTrackingEnabled || mPosTrackingActivated || ((rgbSubnumber + rgbRawSubnumber + leftSubnumber + leftRawSubnumber + rightSubnumber + rightRawSubnumber + rgbGraySubnumber + rgbGrayRawSubnumber + leftGraySubnumber + leftGrayRawSubnumber + rightGraySubnumber + rightGrayRawSubnumber + depthSubnumber + disparitySubnumber + cloudSubnumber + poseSubnumber + poseCovSubnumber + odomSubnumber + confMapSubnumber /*+ imuSubnumber + imuRawsubnumber*/ + pathSubNumber + stereoSubNumber + stereoRawSubNumber + objDetSubnumber) > 0); // Run the loop only if there is some subscribers or SVO is active @@ -3254,10 +3314,12 @@ void ZEDWrapperNodelet::device_poll_thread_func() std::lock_guard lock(mPosTrkMutex); // Note: ones tracking is started it is never stopped anymore to not lose tracking information - bool computeTracking = (mPosTrackingEnabled || mPosTrackingActivated || mMappingEnabled || mObjDetEnabled || (mComputeDepth & mDepthStabilization) || poseSubnumber > 0 || poseCovSubnumber > 0 || odomSubnumber > 0 || pathSubNumber > 0); + bool computeTracking = !mDepthDisabled && + (mPosTrackingEnabled || mPosTrackingActivated || mMappingEnabled || mObjDetEnabled || (mComputeDepth & mDepthStabilization) || + poseSubnumber > 0 || poseCovSubnumber > 0 || odomSubnumber > 0 || pathSubNumber > 0); // Start the tracking? - if ((computeTracking) && !mPosTrackingActivated && (mDepthMode != sl::DEPTH_MODE::NONE)) { + if ((computeTracking) && !mPosTrackingActivated && !mDepthDisabled) { start_pos_tracking(); } @@ -3276,7 +3338,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() mObjDetMutex.unlock(); // Detect if one of the subscriber need to have the depth information - mComputeDepth = mDepthMode != sl::DEPTH_MODE::NONE && (computeTracking || ((depthSubnumber + disparitySubnumber + cloudSubnumber + fusedCloudSubnumber + poseSubnumber + poseCovSubnumber + odomSubnumber + confMapSubnumber + objDetSubnumber) > 0)); + mComputeDepth = !mDepthDisabled && (computeTracking || ((depthSubnumber + disparitySubnumber + cloudSubnumber + fusedCloudSubnumber + poseSubnumber + poseCovSubnumber + odomSubnumber + confMapSubnumber + objDetSubnumber) > 0)); if (mComputeDepth) { runParams.confidence_threshold = mCamDepthConfidence; diff --git a/zed_wrapper/params/common.yaml b/zed_wrapper/params/common.yaml index 8b166b89..e651a7e0 100644 --- a/zed_wrapper/params/common.yaml +++ b/zed_wrapper/params/common.yaml @@ -34,7 +34,7 @@ video: img_downsample_factor: 0.5 # Resample factor for images [0.01,1.0] The SDK works with native image sizes, but publishes rescaled image. depth: - quality: 3 # '0': NONE, '1': PERFORMANCE, '2': QUALITY, '3': ULTRA, '4': NEURAL + depth_mode: 'NONE' # 'NONE', 'PERFORMANCE', 'QUALITY', 'ULTRA', 'NEURAL' depth_stabilization: 1 # [0-100] - 0: Disabled openni_depth_mode: false # 'false': 32bit float meters, 'true': 16bit uchar millimeters depth_downsample_factor: 0.5 # Resample factor for depth data matrices [0.01,1.0] The SDK works with native data sizes, but publishes rescaled matrices (depth map, point cloud, ...) From 9d12c4344be9c6b573c19fd6db5309e993e77617 Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Mon, 11 Sep 2023 18:08:06 +0200 Subject: [PATCH 04/21] [WIP] Custom rescale factor --- CHANGELOG.rst | 2 ++ zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp | 3 +-- zed_wrapper/params/common.yaml | 4 ++-- 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 8da8a49b..731f050d 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,6 +5,8 @@ CHANGELOG ---------- - Change the parameter 'depth/quality' to 'depth/depth_mode' and replace numeric values with strings - Improve the behavior of the "NO DEPTH" mode +- Remove the parameters 'depth_downsample_factor' and 'img_downsample_factor' +- Add the parameter 'pub_resolution' and 'pub_downscale_factor' 2023-09-08 ---------- diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 1242d945..2d78238e 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -641,8 +641,7 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { int mCamDepthTextureConf = 100; double mPointCloudFreq = 15.; - double mCamImageResizeFactor = 1.0; - double mCamDepthResizeFactor = 1.0; + double mCustomDownscaleFactor = 0.5; // flags bool mTriggerAutoExposure = true; diff --git a/zed_wrapper/params/common.yaml b/zed_wrapper/params/common.yaml index e651a7e0..413cecfb 100644 --- a/zed_wrapper/params/common.yaml +++ b/zed_wrapper/params/common.yaml @@ -28,16 +28,16 @@ general: svo_compression: 2 # `0`: LOSSLESS, `1`: AVCHD, `2`: HEVC self_calib: true # enable/disable self calibration at starting camera_flip: 'AUTO' # camera flip mode: 'OFF', 'ON', 'AUTO' + pub_resolution: 'CUSTOM' # The resolution used for output. 'NATIVE' to use the same `general.grab_resolution` - `CUSTOM` to apply the `general.pub_downscale_factor` downscale factory to reduce bandwidth in transmission + pub_downscale_factor: 2.0 # rescale factor used to rescale image before publishing when 'pub_resolution' is 'CUSTOM' pub_frame_rate: 15.0 # frequency of publishing of video and depth data (see SDK API "InitParameters::grab_compute_capping_fps") video: - img_downsample_factor: 0.5 # Resample factor for images [0.01,1.0] The SDK works with native image sizes, but publishes rescaled image. depth: depth_mode: 'NONE' # 'NONE', 'PERFORMANCE', 'QUALITY', 'ULTRA', 'NEURAL' depth_stabilization: 1 # [0-100] - 0: Disabled openni_depth_mode: false # 'false': 32bit float meters, 'true': 16bit uchar millimeters - depth_downsample_factor: 0.5 # Resample factor for depth data matrices [0.01,1.0] The SDK works with native data sizes, but publishes rescaled matrices (depth map, point cloud, ...) pos_tracking: pos_tracking_enabled: true # True to enable positional tracking from start From 5601dc7e18ba2e0e7ef0606868bc4f676ff3cb1f Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Tue, 12 Sep 2023 12:06:12 +0200 Subject: [PATCH 05/21] 'pub_resolution' and 'pub_downscale_factor' completed --- .../include/zed_wrapper_nodelet.hpp | 15 +- .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 149 +++++++++++------- zed_wrapper/params/common.yaml | 8 +- 3 files changed, 103 insertions(+), 69 deletions(-) diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 2d78238e..3a8be1eb 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -79,6 +79,13 @@ #include namespace zed_nodelets { + +typedef enum +{ + NATIVE, //!< Same camera grab resolution + CUSTOM //!< Custom Rescale Factor +} PubRes; + class ZEDWrapperNodelet : public nodelet::Nodelet { typedef enum _dyn_params { DATAPUB_FREQ = 0, @@ -544,7 +551,7 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { std::string mCameraName; sl::RESOLUTION mCamResol; int mCamFrameRate; - double mVideoDepthFreq = 15.; + double mPubFrameRate = 15.; sl::DEPTH_MODE mDepthMode = sl::DEPTH_MODE::ULTRA; int mGpuId; int mZedId; @@ -641,7 +648,8 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { int mCamDepthTextureConf = 100; double mPointCloudFreq = 15.; - double mCustomDownscaleFactor = 0.5; + PubRes mPubResolution = PubRes::NATIVE; // Use native grab resolution by default + double mCustomDownscaleFactor = 1.0; // Used to rescale data with user factor // flags bool mTriggerAutoExposure = true; @@ -666,8 +674,7 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { // Mat int mCamWidth; int mCamHeight; - sl::Resolution mMatResolVideo; - sl::Resolution mMatResolDepth; + sl::Resolution mMatResol; // Thread Sync std::mutex mCloseZedMutex; diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index d4bc0d56..c2fa364f 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -195,7 +195,7 @@ void ZEDWrapperNodelet::onInit() mSvoMode = true; } else { mZedParams.camera_fps = mCamFrameRate; - mZedParams.grab_compute_capping_fps = static_cast(mVideoDepthFreq); + mZedParams.grab_compute_capping_fps = static_cast(mPubFrameRate); mZedParams.camera_resolution = mCamResol; if (mZedSerialNumber == 0) { @@ -618,12 +618,33 @@ void ZEDWrapperNodelet::readParameters() mNhNs.getParam("general/grab_frame_rate", mCamFrameRate); checkResolFps(); NODELET_INFO_STREAM(" * Camera Grab Framerate\t-> " << mCamFrameRate); - mNhNs.getParam("general/pub_frame_rate", mVideoDepthFreq); - if(mVideoDepthFreq>mCamFrameRate) { - mVideoDepthFreq = mCamFrameRate; - NODELET_WARN_STREAM("'general/pub_frame_rate' cannot be bigger than 'general/grab_frame_rate'. Forced to the correct value: " << mVideoDepthFreq); + mNhNs.getParam("general/pub_frame_rate", mPubFrameRate); + if(mPubFrameRate>mCamFrameRate) { + mPubFrameRate = mCamFrameRate; + NODELET_WARN_STREAM("'general/pub_frame_rate' cannot be bigger than 'general/grab_frame_rate'. Forced to the correct value: " << mPubFrameRate); + } + NODELET_INFO_STREAM(" * Grab compute capping FPS\t-> " << mPubFrameRate << " Hz"); + + + std::string out_resol = "CUSTOM"; + mNhNs.getParam("general/pub_resolution", out_resol); + if (out_resol == "NATIVE") { + mPubResolution = PubRes::NATIVE; + } else if (out_resol == "CUSTOM") { + mPubResolution = PubRes::CUSTOM; + } else { + ROS_WARN("Not valid 'general/pub_resolution' value: '%s'. Using default setting instead.", out_resol.c_str()); + out_resol = "CUSTOM"; + mPubResolution = PubRes::CUSTOM; + } + ROS_INFO_STREAM(" * Publishing resolution: " << out_resol.c_str()); + + if (mPubResolution == PubRes::CUSTOM) { + mNhNs.getParam( "general/pub_downscale_factor", mCustomDownscaleFactor ); + NODELET_INFO_STREAM(" * Publishing downscale factor: " << mCustomDownscaleFactor ); + } else { + mCustomDownscaleFactor = 1.0; } - NODELET_INFO_STREAM(" * Grab compute capping FPS\t-> " << mVideoDepthFreq << " Hz"); mNhNs.getParam("general/gpu_id", mGpuId); NODELET_INFO_STREAM(" * Gpu ID\t\t\t-> " << mGpuId); @@ -653,16 +674,15 @@ void ZEDWrapperNodelet::readParameters() } // <---- General - NODELET_INFO_STREAM("*** VIDEO PARAMETERS ***"); - // ----> Video - mNhNs.getParam("video/img_downsample_factor", mCamImageResizeFactor); - NODELET_INFO_STREAM(" * Image resample factor\t-> " << mCamImageResizeFactor); - // <---- Video - NODELET_INFO_STREAM("*** DEPTH PARAMETERS ***"); + // ----> Video + //NODELET_INFO_STREAM("*** VIDEO PARAMETERS ***"); + // Note: there are no "static" video parameters + // <---- Video // -----> Depth + NODELET_INFO_STREAM("*** DEPTH PARAMETERS ***"); std::string depth_mode_str = sl::toString(mDepthMode).c_str(); mNhNs.getParam("depth/depth_mode", depth_mode_str); @@ -705,8 +725,6 @@ void ZEDWrapperNodelet::readParameters() NODELET_INFO_STREAM(" * Minimum depth\t\t-> " << mCamMinDepth << " m"); mNhNs.getParam("depth/max_depth", mCamMaxDepth); NODELET_INFO_STREAM(" * Maximum depth\t\t-> " << mCamMaxDepth << " m"); - mNhNs.getParam("depth/depth_downsample_factor", mCamDepthResizeFactor); - NODELET_INFO_STREAM(" * Depth resample factor\t-> " << mCamDepthResizeFactor); } // <----- Depth @@ -995,8 +1013,8 @@ void ZEDWrapperNodelet::readParameters() NODELET_INFO_STREAM(" * [DYN] Depth texture conf.\t-> " << mCamDepthTextureConf); mNhNs.getParam("point_cloud_freq", mPointCloudFreq); - if(mPointCloudFreq>mVideoDepthFreq) { - mPointCloudFreq=mVideoDepthFreq; + if(mPointCloudFreq>mPubFrameRate) { + mPointCloudFreq=mPubFrameRate; NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than 'general/pub_frame_rate'. Set to " << mPointCloudFreq); } @@ -2008,7 +2026,7 @@ void ZEDWrapperNodelet::publishDepth(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat de void ZEDWrapperNodelet::publishDisparity(sl::Mat disparity, ros::Time t) { - sl::CameraInformation zedParam = mZed.getCameraInformation(mMatResolDepth); + sl::CameraInformation zedParam = mZed.getCameraInformation(mMatResol); sensor_msgs::ImagePtr disparityImgMsg = boost::make_shared(); stereo_msgs::DisparityImagePtr disparityMsg = boost::make_shared(); @@ -2085,18 +2103,18 @@ void ZEDWrapperNodelet::publishPointCloud() // Initialize Point Cloud message // https://github.com/ros/common_msgs/blob/jade-devel/sensor_msgs/include/sensor_msgs/point_cloud2_iterator.h - int ptsCount = mMatResolDepth.width * mMatResolDepth.height; + int ptsCount = mMatResol.width * mMatResol.height; pointcloudMsg->header.stamp = mPointCloudTime; - if (pointcloudMsg->width != mMatResolDepth.width || pointcloudMsg->height != mMatResolDepth.height) { + if (pointcloudMsg->width != mMatResol.width || pointcloudMsg->height != mMatResol.height) { pointcloudMsg->header.frame_id = mPointCloudFrameId; // Set the header values of the ROS message pointcloudMsg->is_bigendian = false; pointcloudMsg->is_dense = false; - pointcloudMsg->width = mMatResolDepth.width; - pointcloudMsg->height = mMatResolDepth.height; + pointcloudMsg->width = mMatResol.width; + pointcloudMsg->height = mMatResol.height; sensor_msgs::PointCloud2Modifier modifier(*pointcloudMsg); modifier.setPointCloud2Fields(4, "x", 1, sensor_msgs::PointField::FLOAT32, "y", 1, sensor_msgs::PointField::FLOAT32, @@ -2221,9 +2239,9 @@ void ZEDWrapperNodelet::fillCamInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr sl::CalibrationParameters zedParam; if (rawParam) { - zedParam = zed.getCameraInformation(mMatResolVideo).camera_configuration.calibration_parameters_raw; + zedParam = zed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters_raw; } else { - zedParam = zed.getCameraInformation(mMatResolVideo).camera_configuration.calibration_parameters; + zedParam = zed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters; } float baseline = zedParam.getCameraBaseline(); @@ -2282,8 +2300,8 @@ void ZEDWrapperNodelet::fillCamInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr rightCamInfoMsg->P[5] = static_cast(zedParam.right_cam.fy); rightCamInfoMsg->P[6] = static_cast(zedParam.right_cam.cy); rightCamInfoMsg->P[10] = 1.0; - leftCamInfoMsg->width = rightCamInfoMsg->width = static_cast(mMatResolVideo.width); - leftCamInfoMsg->height = rightCamInfoMsg->height = static_cast(mMatResolVideo.height); + leftCamInfoMsg->width = rightCamInfoMsg->width = static_cast(mMatResol.width); + leftCamInfoMsg->height = rightCamInfoMsg->height = static_cast(mMatResol.height); leftCamInfoMsg->header.frame_id = leftFrameId; rightCamInfoMsg->header.frame_id = rightFrameId; } @@ -2292,7 +2310,7 @@ void ZEDWrapperNodelet::fillCamDepthInfo(sl::Camera& zed, sensor_msgs::CameraInf { sl::CalibrationParameters zedParam; - zedParam = zed.getCameraInformation(mMatResolDepth).camera_configuration.calibration_parameters; + zedParam = zed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters; float baseline = zedParam.getCameraBaseline(); depth_info_msg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; @@ -2322,8 +2340,8 @@ void ZEDWrapperNodelet::fillCamDepthInfo(sl::Camera& zed, sensor_msgs::CameraInf depth_info_msg->P[6] = static_cast(zedParam.left_cam.cy); depth_info_msg->P[10] = 1.0; // http://docs.ros.org/api/sensor_msgs/html/msg/CameraInfo.html - depth_info_msg->width = static_cast(mMatResolDepth.width); - depth_info_msg->height = static_cast(mMatResolDepth.height); + depth_info_msg->width = static_cast(mMatResol.width); + depth_info_msg->height = static_cast(mMatResol.height); depth_info_msg->header.frame_id = frame_id; } @@ -2380,8 +2398,8 @@ void ZEDWrapperNodelet::callback_dynamicReconf(zed_nodelets::ZedConfig& config, break; case POINTCLOUD_FREQ: - if (config.point_cloud_freq > mVideoDepthFreq) { - mPointCloudFreq = mVideoDepthFreq; + if (config.point_cloud_freq > mPubFrameRate) { + mPointCloudFreq = mPubFrameRate; NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than Video/Depth publishing framerate. Set to " << mPointCloudFreq); @@ -2538,51 +2556,51 @@ void ZEDWrapperNodelet::pubVideoDepth() // ----> Retrieve all required image data if (rgbSubnumber + leftSubnumber + stereoSubNumber > 0) { - mZed.retrieveImage(mat_left, sl::VIEW::LEFT, sl::MEM::CPU, mMatResolVideo); + mZed.retrieveImage(mat_left, sl::VIEW::LEFT, sl::MEM::CPU, mMatResol); retrieved = true; ts_rgb = mat_left.timestamp; grab_ts = mat_left.timestamp; } if (rgbRawSubnumber + leftRawSubnumber + stereoRawSubNumber > 0) { - mZed.retrieveImage(mat_left_raw, sl::VIEW::LEFT_UNRECTIFIED, sl::MEM::CPU, mMatResolVideo); + mZed.retrieveImage(mat_left_raw, sl::VIEW::LEFT_UNRECTIFIED, sl::MEM::CPU, mMatResol); retrieved = true; grab_ts = mat_left_raw.timestamp; } if (rightSubnumber + stereoSubNumber > 0) { - mZed.retrieveImage(mat_right, sl::VIEW::RIGHT, sl::MEM::CPU, mMatResolVideo); + mZed.retrieveImage(mat_right, sl::VIEW::RIGHT, sl::MEM::CPU, mMatResol); retrieved = true; grab_ts = mat_right.timestamp; } if (rightRawSubnumber + stereoRawSubNumber > 0) { - mZed.retrieveImage(mat_right_raw, sl::VIEW::RIGHT_UNRECTIFIED, sl::MEM::CPU, mMatResolVideo); + mZed.retrieveImage(mat_right_raw, sl::VIEW::RIGHT_UNRECTIFIED, sl::MEM::CPU, mMatResol); retrieved = true; grab_ts = mat_right_raw.timestamp; } if (rgbGraySubnumber + leftGraySubnumber > 0) { - mZed.retrieveImage(mat_left_gray, sl::VIEW::LEFT_GRAY, sl::MEM::CPU, mMatResolVideo); + mZed.retrieveImage(mat_left_gray, sl::VIEW::LEFT_GRAY, sl::MEM::CPU, mMatResol); retrieved = true; grab_ts = mat_left_gray.timestamp; } if (rgbGrayRawSubnumber + leftGrayRawSubnumber > 0) { - mZed.retrieveImage(mat_left_raw_gray, sl::VIEW::LEFT_UNRECTIFIED_GRAY, sl::MEM::CPU, mMatResolVideo); + mZed.retrieveImage(mat_left_raw_gray, sl::VIEW::LEFT_UNRECTIFIED_GRAY, sl::MEM::CPU, mMatResol); retrieved = true; grab_ts = mat_left_raw_gray.timestamp; } if (rightGraySubnumber > 0) { - mZed.retrieveImage(mat_right_gray, sl::VIEW::RIGHT_GRAY, sl::MEM::CPU, mMatResolVideo); + mZed.retrieveImage(mat_right_gray, sl::VIEW::RIGHT_GRAY, sl::MEM::CPU, mMatResol); retrieved = true; grab_ts = mat_right_gray.timestamp; } if (rightGrayRawSubnumber > 0) { - mZed.retrieveImage(mat_right_raw_gray, sl::VIEW::RIGHT_UNRECTIFIED_GRAY, sl::MEM::CPU, mMatResolVideo); + mZed.retrieveImage(mat_right_raw_gray, sl::VIEW::RIGHT_UNRECTIFIED_GRAY, sl::MEM::CPU, mMatResol); retrieved = true; grab_ts = mat_right_raw_gray.timestamp; } if (depthSubnumber > 0) { if (!mOpenniDepthMode) { - mZed.retrieveMeasure(mat_depth, sl::MEASURE::DEPTH, sl::MEM::CPU, mMatResolDepth); + mZed.retrieveMeasure(mat_depth, sl::MEASURE::DEPTH, sl::MEM::CPU, mMatResol); } else { - mZed.retrieveMeasure(mat_depth, sl::MEASURE::DEPTH_U16_MM, sl::MEM::CPU, mMatResolDepth); + mZed.retrieveMeasure(mat_depth, sl::MEASURE::DEPTH_U16_MM, sl::MEM::CPU, mMatResol); } retrieved = true; grab_ts = mat_depth.timestamp; @@ -2595,12 +2613,12 @@ void ZEDWrapperNodelet::pubVideoDepth() } } if (disparitySubnumber > 0) { - mZed.retrieveMeasure(mat_disp, sl::MEASURE::DISPARITY, sl::MEM::CPU, mMatResolDepth); + mZed.retrieveMeasure(mat_disp, sl::MEASURE::DISPARITY, sl::MEM::CPU, mMatResol); retrieved = true; grab_ts = mat_disp.timestamp; } if (confMapSubnumber > 0) { - mZed.retrieveMeasure(mat_conf, sl::MEASURE::CONFIDENCE, sl::MEM::CPU, mMatResolDepth); + mZed.retrieveMeasure(mat_conf, sl::MEASURE::CONFIDENCE, sl::MEM::CPU, mMatResol); retrieved = true; grab_ts = mat_conf.timestamp; } @@ -3217,7 +3235,7 @@ void ZEDWrapperNodelet::publishSensData(ros::Time t) void ZEDWrapperNodelet::device_poll_thread_func() { - ros::Rate loop_rate(mVideoDepthFreq); + ros::Rate loop_rate(mPubFrameRate); mRecording = false; @@ -3242,15 +3260,22 @@ void ZEDWrapperNodelet::device_poll_thread_func() // Get the parameters of the ZED images mCamWidth = mZed.getCameraInformation().camera_configuration.resolution.width; mCamHeight = mZed.getCameraInformation().camera_configuration.resolution.height; - NODELET_DEBUG_STREAM("Camera Frame size : " << mCamWidth << "x" << mCamHeight); - int v_w = static_cast(mCamWidth * mCamImageResizeFactor); - int v_h = static_cast(mCamHeight * mCamImageResizeFactor); - mMatResolVideo = sl::Resolution(v_w, v_h); - NODELET_DEBUG_STREAM("Image Mat size : " << mMatResolVideo.width << "x" << mMatResolVideo.height); - int d_w = static_cast(mCamWidth * mCamDepthResizeFactor); - int d_h = static_cast(mCamHeight * mCamDepthResizeFactor); - mMatResolDepth = sl::Resolution(d_w, d_h); - NODELET_DEBUG_STREAM("Depth Mat size : " << mMatResolDepth.width << "x" << mMatResolDepth.height); + NODELET_DEBUG_STREAM("Original Camera grab frame size -> " << mCamWidth << "x" << mCamHeight); + int pub_w, pub_h; + pub_w = static_cast(std::round(mCamWidth / mCustomDownscaleFactor)); + pub_h = static_cast(std::round(mCamHeight / mCustomDownscaleFactor)); + + if (pub_w > mCamWidth || pub_h > mCamHeight) { + NODELET_WARN_STREAM("The publishing resolution (" << + pub_w << "x" << pub_h << + ") cannot be higher than the grabbing resolution (" << + mCamWidth << "x" << mCamHeight << + "). Using grab resolution for output messages."); + pub_w = mCamWidth; + pub_h = mCamHeight; + } + mMatResol = sl::Resolution(pub_w, pub_h); + NODELET_DEBUG_STREAM("Publishing frame size -> " << mMatResol.width << "x" << mMatResol.height); // Create and fill the camera information messages fillCamInfo(mZed, mLeftCamInfoMsg, mRightCamInfoMsg, mLeftCamOptFrameId, mRightCamOptFrameId); @@ -3619,7 +3644,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() std::unique_lock lock(mPcMutex, std::defer_lock); if (lock.try_lock()) { - mZed.retrieveMeasure(mCloud, sl::MEASURE::XYZBGRA, sl::MEM::CPU, mMatResolDepth); + mZed.retrieveMeasure(mCloud, sl::MEASURE::XYZBGRA, sl::MEM::CPU, mMatResol); mPointCloudFrameId = mDepthFrameId; mPointCloudTime = stamp; @@ -3893,7 +3918,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() static int count_warn = 0; if (!loop_rate.sleep()) { - if (mean_elab_sec > (1. / mVideoDepthFreq)) { + if (mean_elab_sec > (1. / mPubFrameRate)) { if (++count_warn > 10) { NODELET_DEBUG_THROTTLE(1.0, "Working thread is not synchronized with the Camera grab rate"); NODELET_DEBUG_STREAM_THROTTLE(1.0, "Expected cycle time: " << loop_rate.expectedCycleTime() << " - Real cycle time: " << mean_elab_sec); @@ -3972,15 +3997,15 @@ void ZEDWrapperNodelet::callback_updateDiagnostic(diagnostic_updater::Diagnostic stat.summary(diagnostic_msgs::DiagnosticStatus::OK, "Camera grabbing"); double freq = 1000000. / mGrabPeriodMean_usec->getMean(); - double freq_perc = 100. * freq / mCamFrameRate; + double freq_perc = 100. * freq / mPubFrameRate; stat.addf("Capture", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); stat.addf("General Processing", "Mean Time: %.3f sec (Max. %.3f sec)", mElabPeriodMean_sec->getMean(), - 1. / mVideoDepthFreq); + 1. / mPubFrameRate); if (mPublishingData) { freq = 1. / mVideoDepthPeriodMean_sec->getMean(); - freq_perc = 100. * freq / mVideoDepthFreq; + freq_perc = 100. * freq / mPubFrameRate; stat.addf("Video/Depth Publish", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); } @@ -4019,7 +4044,7 @@ void ZEDWrapperNodelet::callback_updateDiagnostic(diagnostic_updater::Diagnostic if (mObjDetRunning) { double freq = 1000. / mObjDetPeriodMean_msec->getMean(); - double freq_perc = 100. * freq / mVideoDepthFreq; + double freq_perc = 100. * freq / mPubFrameRate; stat.addf("Object detection", "Mean Frequency: %.3f Hz (%.1f%%)", freq, freq_perc); } else { stat.add("Object Detection", "INACTIVE"); @@ -4563,15 +4588,17 @@ void ZEDWrapperNodelet::clickedPtCallback(geometry_msgs::PointStampedConstPtr ms // ----> Project the point into 2D image coordinates sl::CalibrationParameters zedParam; - zedParam = mZed.getCameraInformation(mMatResolVideo).camera_configuration.calibration_parameters; // ok + zedParam = mZed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters; // ok float f_x = zedParam.left_cam.fx; float f_y = zedParam.left_cam.fy; float c_x = zedParam.left_cam.cx; float c_y = zedParam.left_cam.cy; - float u = ((camX / camZ) * f_x + c_x) / mCamImageResizeFactor; - float v = ((camY / camZ) * f_y + c_y) / mCamImageResizeFactor; + float out_scale_factor = mMatResol.width / mCamWidth; + + float u = ((camX / camZ) * f_x + c_x) / out_scale_factor; + float v = ((camY / camZ) * f_y + c_y) / out_scale_factor; NODELET_INFO_STREAM("Clicked point image coordinates: [" << u << "," << v << "]"); // <---- Project the point into 2D image coordinates diff --git a/zed_wrapper/params/common.yaml b/zed_wrapper/params/common.yaml index 413cecfb..eef54dd6 100644 --- a/zed_wrapper/params/common.yaml +++ b/zed_wrapper/params/common.yaml @@ -28,14 +28,14 @@ general: svo_compression: 2 # `0`: LOSSLESS, `1`: AVCHD, `2`: HEVC self_calib: true # enable/disable self calibration at starting camera_flip: 'AUTO' # camera flip mode: 'OFF', 'ON', 'AUTO' - pub_resolution: 'CUSTOM' # The resolution used for output. 'NATIVE' to use the same `general.grab_resolution` - `CUSTOM` to apply the `general.pub_downscale_factor` downscale factory to reduce bandwidth in transmission - pub_downscale_factor: 2.0 # rescale factor used to rescale image before publishing when 'pub_resolution' is 'CUSTOM' + pub_resolution: 'CUSTOM' # The resolution used for output. 'NATIVE' to use the same `general.grab_resolution` - `CUSTOM` to apply the `general.pub_downscale_factor` downscale factory to reduce bandwidth in transmission + pub_downscale_factor: 2.0 # rescale factor used to rescale image before publishing when 'pub_resolution' is 'CUSTOM' pub_frame_rate: 15.0 # frequency of publishing of video and depth data (see SDK API "InitParameters::grab_compute_capping_fps") -video: +#video: depth: - depth_mode: 'NONE' # 'NONE', 'PERFORMANCE', 'QUALITY', 'ULTRA', 'NEURAL' + depth_mode: 'ULTRA' # 'NONE', 'PERFORMANCE', 'QUALITY', 'ULTRA', 'NEURAL' depth_stabilization: 1 # [0-100] - 0: Disabled openni_depth_mode: false # 'false': 32bit float meters, 'true': 16bit uchar millimeters From d9126cbd0f6bdb9dd5c28a511144aea9799a12b0 Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Tue, 12 Sep 2023 14:39:24 +0200 Subject: [PATCH 06/21] Change `resolution` to `grab_resolution` --- CHANGELOG.rst | 4 + zed_nodelets/src/tools/include/sl_tools.h | 20 ++ zed_nodelets/src/tools/src/sl_tools.cpp | 38 +++ .../include/zed_wrapper_nodelet.hpp | 6 +- .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 231 +++++++++--------- zed_wrapper/params/zed.yaml | 2 +- zed_wrapper/params/zed2.yaml | 2 +- zed_wrapper/params/zed2i.yaml | 2 +- zed_wrapper/params/zedm.yaml | 2 +- zed_wrapper/params/zedx.yaml | 4 +- zed_wrapper/params/zedxm.yaml | 4 +- 11 files changed, 185 insertions(+), 130 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 731f050d..76fc0563 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,10 @@ CHANGELOG ========= +2023-09-12 +---------- +- Change the parameter 'general/resolution' to 'general/grab_resolution' and replace numeric values with strings + 2023-09-10 ---------- - Change the parameter 'depth/quality' to 'depth/depth_mode' and replace numeric values with strings diff --git a/zed_nodelets/src/tools/include/sl_tools.h b/zed_nodelets/src/tools/include/sl_tools.h index 2dd123f2..a222da59 100644 --- a/zed_nodelets/src/tools/include/sl_tools.h +++ b/zed_nodelets/src/tools/include/sl_tools.h @@ -52,6 +52,26 @@ std::string getSDKVersion(int& major, int& minor, int& sub_minor); */ ros::Time slTime2Ros(sl::Timestamp t); +/*! \brief check if ZED + * \param camModel the model to check + */ +bool isZED(sl::MODEL camModel); + +/*! \brief check if ZED Mini + * \param camModel the model to check + */ +bool isZEDM(sl::MODEL camModel); + +/*! \brief check if ZED2 or ZED2i + * \param camModel the model to check + */ +bool isZED2OrZED2i(sl::MODEL camModel); + +/*! \brief check if ZED-X or ZED-X Mini + * \param camModel the model to check + */ +bool isZEDX(sl::MODEL camModel); + /*! \brief sl::Mat to ros message conversion * \param imgMsgPtr : the image topic message to publish * \param img : the image to publish diff --git a/zed_nodelets/src/tools/src/sl_tools.cpp b/zed_nodelets/src/tools/src/sl_tools.cpp index 3857cdb0..2e47ef45 100644 --- a/zed_nodelets/src/tools/src/sl_tools.cpp +++ b/zed_nodelets/src/tools/src/sl_tools.cpp @@ -121,6 +121,44 @@ ros::Time slTime2Ros(sl::Timestamp t) return ros::Time(sec, nsec); } +bool isZED(sl::MODEL camModel) +{ + if (camModel == sl::MODEL::ZED) { + return true; + } + return false; +} + +bool isZEDM(sl::MODEL camModel) +{ + if (camModel == sl::MODEL::ZED_M) { + return true; + } + return false; +} + +bool isZED2OrZED2i(sl::MODEL camModel) +{ + if (camModel == sl::MODEL::ZED2) { + return true; + } + if (camModel == sl::MODEL::ZED2i) { + return true; + } + return false; +} + +bool isZEDX(sl::MODEL camModel) +{ + if (camModel == sl::MODEL::ZED_X) { + return true; + } + if (camModel == sl::MODEL::ZED_XM) { + return true; + } + return false; +} + void imageToROSmsg(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat img, std::string frameId, ros::Time t) { if (!imgMsgPtr) diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 3a8be1eb..99b00679 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -243,11 +243,7 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { */ void fillCamDepthInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr depth_info_msg, std::string frame_id); - /*! \brief Check if Resolution is valid for the camera model. - * Modifies Resolution to match correct value. - */ - void checkResol(); - + /*! \brief Check if FPS and Resolution chosen by user are correct. * Modifies FPS to match correct value. */ diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index c2fa364f..64228443 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -46,7 +46,7 @@ namespace zed_nodelets { #define BARO_FREQ 25. ZEDWrapperNodelet::ZEDWrapperNodelet() - : Nodelet() + : Nodelet() { } @@ -87,7 +87,7 @@ void ZEDWrapperNodelet::onInit() NODELET_INFO("********** Starting nodelet '%s' **********", getName().c_str()); std::string ver = sl_tools::getSDKVersion(mVerMajor, mVerMinor, mVerSubMinor); - NODELET_INFO_STREAM("SDK version : " << ver); + NODELET_INFO_STREAM("SDK version: " << ver); if (mVerMajor < 3) { NODELET_ERROR("This version of the ZED ROS Wrapper is designed to be used with ZED SDK v3.x"); @@ -180,6 +180,8 @@ void ZEDWrapperNodelet::onInit() mZedParams.input.setFromSVOFile(mSvoFilepath.c_str()); mZedParams.svo_real_time_mode = true; + NODELET_INFO_STREAM( "Node input: SVO - " << mSvoFilepath.c_str()); + // TODO ADD PARAMETER FOR SVO REAL TIME } else if (!mRemoteStreamAddr.empty()) { std::vector configStream = sl_tools::split_string(mRemoteStreamAddr, ':'); @@ -190,6 +192,8 @@ void ZEDWrapperNodelet::onInit() } else { mZedParams.input.setFromStream(ip); } + + NODELET_INFO_STREAM( "Node input: NETWORK STREAM - " << mRemoteStreamAddr.c_str()); } mSvoMode = true; @@ -200,8 +204,10 @@ void ZEDWrapperNodelet::onInit() if (mZedSerialNumber == 0) { mZedParams.input.setFromCameraID(mZedId); + NODELET_INFO_STREAM( "Node input: LIVE CAMERA with ID " << mZedId); } else { mZedParams.input.setFromSerialNumber(mZedSerialNumber); + NODELET_INFO_STREAM( "Node input: LIVE CAMERA with SN " << mZedSerialNumber); } } @@ -229,7 +235,7 @@ void ZEDWrapperNodelet::onInit() NODELET_INFO_STREAM(" *** Opening " << sl::toString(mZedUserCamModel) << "..."); while (mConnStatus != sl::ERROR_CODE::SUCCESS) { mConnStatus = mZed.open(mZedParams); - NODELET_INFO_STREAM("ZED connection -> " << sl::toString(mConnStatus)); + NODELET_INFO_STREAM("ZED connection: " << sl::toString(mConnStatus)); std::this_thread::sleep_for(std::chrono::milliseconds(2000)); if (!mNhNs.ok()) { @@ -300,19 +306,19 @@ void ZEDWrapperNodelet::onInit() NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); } - NODELET_INFO_STREAM(" * CAMERA MODEL\t -> " << sl::toString(mZedRealCamModel).c_str()); + NODELET_INFO_STREAM(" * CAMERA MODEL\t-> " << sl::toString(mZedRealCamModel).c_str()); mZedSerialNumber = mZed.getCameraInformation().serial_number; - NODELET_INFO_STREAM(" * Serial Number -> " << mZedSerialNumber); + NODELET_INFO_STREAM(" * Serial Number: " << mZedSerialNumber); if (!mSvoMode) { mCamFwVersion = mZed.getCameraInformation().camera_configuration.firmware_version; - NODELET_INFO_STREAM(" * Camera FW Version -> " << mCamFwVersion); + NODELET_INFO_STREAM(" * Camera FW Version: " << mCamFwVersion); if (mZedRealCamModel != sl::MODEL::ZED) { mSensFwVersion = mZed.getCameraInformation().sensors_configuration.firmware_version; - NODELET_INFO_STREAM(" * Sensors FW Version -> " << mSensFwVersion); + NODELET_INFO_STREAM(" * Sensors FW Version: " << mSensFwVersion); } } else { - NODELET_INFO_STREAM(" * Input type\t -> " << sl::toString(mZed.getCameraInformation().input_type).c_str()); + NODELET_INFO_STREAM(" * Input type\t-> " << sl::toString(mZed.getCameraInformation().input_type).c_str()); } // Set the IMU topic names using real camera model @@ -610,11 +616,38 @@ void ZEDWrapperNodelet::readParameters() mNhNs.getParam("general/camera_name", mCameraName); NODELET_INFO_STREAM(" * Camera Name\t\t\t-> " << mCameraName.c_str()); - int resol; - mNhNs.getParam("general/resolution", resol); - mCamResol = static_cast(resol); - checkResol(); - NODELET_INFO_STREAM(" * Camera Resolution\t\t-> " << sl::toString(mCamResol).c_str()); + + std::string resol="AUTO"; + mNhNs.getParam("general/grab_resolution", resol); + if (resol == "AUTO") { + mCamResol = sl::RESOLUTION::AUTO; + } else if (sl_tools::isZEDX(mZedUserCamModel)) { + if (resol == "HD1200") { + mCamResol = sl::RESOLUTION::HD1200; + } else if (resol == "HD1080") { + mCamResol = sl::RESOLUTION::HD1080; + } else if (resol == "SVGA") { + mCamResol = sl::RESOLUTION::SVGA; + } else { + NODELET_WARN("Not valid 'general.grab_resolution' value: '%s'. Using 'AUTO' setting.", resol.c_str()); + mCamResol = sl::RESOLUTION::AUTO; + } + } else { + if (resol == "HD2K") { + mCamResol = sl::RESOLUTION::HD2K; + } else if (resol == "HD1080") { + mCamResol = sl::RESOLUTION::HD1080; + } else if (resol == "HD720") { + mCamResol = sl::RESOLUTION::HD720; + } else if (resol == "VGA") { + mCamResol = sl::RESOLUTION::VGA; + } else { + NODELET_WARN("Not valid 'general.grab_resolution' value: '%s'. Using 'AUTO' setting.", resol.c_str()); + mCamResol = sl::RESOLUTION::AUTO; + } + } + NODELET_INFO_STREAM(" * Camera resolution\t\t-> " << sl::toString(mCamResol).c_str()); + mNhNs.getParam("general/grab_frame_rate", mCamFrameRate); checkResolFps(); NODELET_INFO_STREAM(" * Camera Grab Framerate\t-> " << mCamFrameRate); @@ -633,15 +666,15 @@ void ZEDWrapperNodelet::readParameters() } else if (out_resol == "CUSTOM") { mPubResolution = PubRes::CUSTOM; } else { - ROS_WARN("Not valid 'general/pub_resolution' value: '%s'. Using default setting instead.", out_resol.c_str()); + NODELET_WARN("Not valid 'general/pub_resolution' value: '%s'. Using default setting instead.", out_resol.c_str()); out_resol = "CUSTOM"; mPubResolution = PubRes::CUSTOM; } - ROS_INFO_STREAM(" * Publishing resolution: " << out_resol.c_str()); + NODELET_INFO_STREAM(" * Publishing resolution\t-> " << out_resol.c_str()); if (mPubResolution == PubRes::CUSTOM) { mNhNs.getParam( "general/pub_downscale_factor", mCustomDownscaleFactor ); - NODELET_INFO_STREAM(" * Publishing downscale factor: " << mCustomDownscaleFactor ); + NODELET_INFO_STREAM(" * Publishing downscale factor\t-> " << mCustomDownscaleFactor ); } else { mCustomDownscaleFactor = 1.0; } @@ -651,7 +684,7 @@ void ZEDWrapperNodelet::readParameters() mNhNs.getParam("general/zed_id", mZedId); NODELET_INFO_STREAM(" * Camera ID\t\t\t-> " << mZedId); mNhNs.getParam("general/verbose", mVerbose); - NODELET_INFO_STREAM(" * Verbose\t\t\t-> " << (mVerbose ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Verbose\t\t\t-> " << (mVerbose ? "ENABLED": "DISABLED")); std::string flip_str; mNhNs.getParam("general/camera_flip", flip_str); if(flip_str=="ON") { @@ -663,7 +696,7 @@ void ZEDWrapperNodelet::readParameters() } NODELET_INFO_STREAM(" * Camera Flip\t\t\t-> " << sl::toString(mCameraFlip).c_str()); mNhNs.param("general/self_calib", mCameraSelfCalib, true); - NODELET_INFO_STREAM(" * Self calibration\t\t-> " << (mCameraSelfCalib ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Self calibration\t\t-> " << (mCameraSelfCalib ? "ENABLED": "DISABLED")); int tmp_sn = 0; mNhNs.getParam("general/serial_number", tmp_sn); @@ -700,25 +733,25 @@ void ZEDWrapperNodelet::readParameters() } if (!matched) { - ROS_WARN_STREAM( + NODELET_WARN_STREAM( "The parameter 'depth.depth_mode' contains a not valid string. Please check it in 'common.yaml'."); - ROS_WARN("Using default DEPTH_MODE."); + NODELET_WARN("Using default DEPTH_MODE."); mDepthMode = sl::DEPTH_MODE::ULTRA; } if (mDepthMode == sl::DEPTH_MODE::NONE) { mDepthDisabled = true; mDepthStabilization = 0; - ROS_INFO_STREAM(" * Depth mode: " << sl::toString(mDepthMode).c_str() << " - DEPTH DISABLED"); + NODELET_INFO_STREAM(" * Depth mode\t\t\t> " << sl::toString(mDepthMode).c_str() << " - DEPTH DISABLED"); } else { mDepthDisabled = false; - ROS_INFO_STREAM(" * Depth mode: " << sl::toString( + NODELET_INFO_STREAM(" * Depth mode\t\t\t--> " << sl::toString( mDepthMode).c_str() << " [" << static_cast(mDepthMode) << "]"); } if(!mDepthDisabled) { mNhNs.getParam("depth/openni_depth_mode", mOpenniDepthMode); - NODELET_INFO_STREAM(" * OpenNI mode\t\t\t-> " << (mOpenniDepthMode ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * OpenNI mode\t\t\t-> " << (mOpenniDepthMode ? "ENABLED": "DISABLED")); mNhNs.getParam("depth/depth_stabilization", mDepthStabilization); NODELET_INFO_STREAM(" * Depth Stabilization\t\t-> " << mDepthStabilization ); mNhNs.getParam("depth/min_depth", mCamMinDepth); @@ -733,7 +766,7 @@ void ZEDWrapperNodelet::readParameters() NODELET_INFO_STREAM("*** POSITIONAL TRACKING PARAMETERS ***"); mNhNs.param("pos_tracking/pos_tracking_enabled", mPosTrackingEnabled, true); - NODELET_INFO_STREAM(" * Positional tracking\t\t-> " << (mPosTrackingEnabled ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Positional tracking\t\t-> " << (mPosTrackingEnabled ? "ENABLED": "DISABLED")); std::string pos_trk_mode; mNhNs.getParam("pos_tracking/pos_tracking_mode", pos_trk_mode); @@ -747,15 +780,15 @@ void ZEDWrapperNodelet::readParameters() "'). Using default value."); mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; } - NODELET_INFO_STREAM( " * Positional tracking mode: " << sl::toString(mPosTrkMode).c_str()); + NODELET_INFO_STREAM( " * Positional tracking mode\t:" << sl::toString(mPosTrkMode).c_str()); mNhNs.getParam("pos_tracking/set_gravity_as_origin", mSetGravityAsOrigin); - NODELET_INFO_STREAM(" * Set gravity as origin\t\t-> " << (mSetGravityAsOrigin ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Set gravity as origin\t-> " << (mSetGravityAsOrigin ? "ENABLED": "DISABLED")); mNhNs.getParam("pos_tracking/path_pub_rate", mPathPubRate); NODELET_INFO_STREAM(" * Path rate\t\t\t-> " << mPathPubRate << " Hz"); mNhNs.getParam("pos_tracking/path_max_count", mPathMaxCount); - NODELET_INFO_STREAM(" * Path history size\t\t-> " << (mPathMaxCount == -1) ? std::string("INFINITE") : std::to_string(mPathMaxCount)); + NODELET_INFO_STREAM(" * Path history size\t\t-> " << (mPathMaxCount == -1) ? std::string("INFINITE"): std::to_string(mPathMaxCount)); if (mPathMaxCount < 2 && mPathMaxCount != -1) { mPathMaxCount = 2; @@ -768,18 +801,18 @@ void ZEDWrapperNodelet::readParameters() NODELET_INFO_STREAM(" * Odometry DB path\t\t-> " << mAreaMemDbPath.c_str()); mNhNs.param("pos_tracking/save_area_memory_db_on_exit", mSaveAreaMapOnClosing, false); - NODELET_INFO_STREAM(" * Save Area Memory on closing\t-> " << (mSaveAreaMapOnClosing ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Save Area Memory on closing\t-> " << (mSaveAreaMapOnClosing ? "ENABLED": "DISABLED")); mNhNs.param("pos_tracking/area_memory", mAreaMemory, false); - NODELET_INFO_STREAM(" * Area Memory\t\t\t-> " << (mAreaMemory ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Area Memory\t\t\t-> " << (mAreaMemory ? "ENABLED": "DISABLED")); mNhNs.param("pos_tracking/imu_fusion", mImuFusion, true); - NODELET_INFO_STREAM(" * IMU Fusion\t\t\t-> " << (mImuFusion ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * IMU Fusion\t\t\t-> " << (mImuFusion ? "ENABLED": "DISABLED")); mNhNs.param("pos_tracking/floor_alignment", mFloorAlignment, false); - NODELET_INFO_STREAM(" * Floor alignment\t\t-> " << (mFloorAlignment ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Floor alignment\t\t-> " << (mFloorAlignment ? "ENABLED": "DISABLED")); mNhNs.param("pos_tracking/init_odom_with_first_valid_pose", mInitOdomWithPose, true); - NODELET_INFO_STREAM(" * Init Odometry with first valid pose data -> " - << (mInitOdomWithPose ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Init Odometry with first valid pose data: " + << (mInitOdomWithPose ? "ENABLED": "DISABLED")); mNhNs.param("pos_tracking/two_d_mode", mTwoDMode, false); - NODELET_INFO_STREAM(" * Two D mode\t\t\t-> " << (mTwoDMode ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Two D mode\t\t\t-> " << (mTwoDMode ? "ENABLED": "DISABLED")); mNhNs.param("pos_tracking/fixed_z_value", mFixedZValue, 0.0); if (mTwoDMode) { @@ -801,7 +834,7 @@ void ZEDWrapperNodelet::readParameters() mNhNs.getParam("mapping/max_mapping_range", mMaxMappingRange); NODELET_INFO_STREAM(" * Mapping max range\t\t-> " << mMaxMappingRange << " m" - << ((mMaxMappingRange < 0.0) ? " [AUTO]" : "")); + << ((mMaxMappingRange < 0.0) ? " [AUTO]": "")); mNhNs.getParam("mapping/fused_pointcloud_freq", mFusedPcPubFreq); NODELET_INFO_STREAM(" * Fused point cloud freq:\t-> " << mFusedPcPubFreq << " Hz"); @@ -824,7 +857,7 @@ void ZEDWrapperNodelet::readParameters() mNhNs.getParam("object_detection/confidence_threshold", mObjDetConfidence); NODELET_INFO_STREAM(" * Object confidence\t\t-> " << mObjDetConfidence); mNhNs.getParam("object_detection/object_tracking_enabled", mObjDetTracking); - NODELET_INFO_STREAM(" * Object tracking\t\t-> " << (mObjDetTracking ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Object tracking\t\t-> " << (mObjDetTracking ? "ENABLED": "DISABLED")); mNhNs.getParam("object_detection/max_range", mObjDetMaxRange); if (mObjDetMaxRange > mCamMaxDepth) { NODELET_WARN("Detection max range cannot be major than depth max range. Automatically fixed."); @@ -843,19 +876,19 @@ void ZEDWrapperNodelet::readParameters() NODELET_INFO_STREAM(" * Detection model\t\t-> " << sl::toString(mObjDetModel)); mNhNs.getParam("object_detection/mc_people", mObjDetPeopleEnable); - NODELET_INFO_STREAM(" * Detect people\t\t-> " << (mObjDetPeopleEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect people\t\t-> " << (mObjDetPeopleEnable ? "ENABLED": "DISABLED")); mNhNs.getParam("object_detection/mc_vehicle", mObjDetVehiclesEnable); - NODELET_INFO_STREAM(" * Detect vehicles\t\t-> " << (mObjDetVehiclesEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect vehicles\t\t-> " << (mObjDetVehiclesEnable ? "ENABLED": "DISABLED")); mNhNs.getParam("object_detection/mc_bag", mObjDetBagsEnable); - NODELET_INFO_STREAM(" * Detect bags\t\t\t-> " << (mObjDetBagsEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect bags\t\t\t-> " << (mObjDetBagsEnable ? "ENABLED": "DISABLED")); mNhNs.getParam("object_detection/mc_animal", mObjDetAnimalsEnable); - NODELET_INFO_STREAM(" * Detect animals\t\t-> " << (mObjDetAnimalsEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect animals\t\t-> " << (mObjDetAnimalsEnable ? "ENABLED": "DISABLED")); mNhNs.getParam("object_detection/mc_electronics", mObjDetElectronicsEnable); - NODELET_INFO_STREAM(" * Detect electronics\t\t-> " << (mObjDetElectronicsEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect electronics\t\t-> " << (mObjDetElectronicsEnable ? "ENABLED": "DISABLED")); mNhNs.getParam("object_detection/mc_fruit_vegetable", mObjDetFruitsEnable); - NODELET_INFO_STREAM(" * Detect fruit and vegetables\t-> " << (mObjDetFruitsEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect fruit and vegetables\t-> " << (mObjDetFruitsEnable ? "ENABLED": "DISABLED")); mNhNs.getParam("object_detection/mc_sport", mObjDetSportsEnable); - NODELET_INFO_STREAM(" * Detect sport-related objects\t-> " << (mObjDetSportsEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect sport-related objects\t-> " << (mObjDetSportsEnable ? "ENABLED": "DISABLED")); } } // <---- Object Detection @@ -865,7 +898,7 @@ void ZEDWrapperNodelet::readParameters() // ----> Sensors if (camera_model != "zed") { mNhNs.getParam("sensors/sensors_timestamp_sync", mSensTimestampSync); - NODELET_INFO_STREAM(" * Sensors timestamp sync\t-> " << (mSensTimestampSync ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Sensors timestamp sync\t-> " << (mSensTimestampSync ? "ENABLED": "DISABLED")); mNhNs.getParam("sensors/max_pub_rate", mSensPubRate); if (camera_model == "zedm") { if (mSensPubRate > 800.) @@ -961,12 +994,12 @@ void ZEDWrapperNodelet::readParameters() // ----> TF broadcasting if(!mDepthDisabled) { mNhNs.param("pos_tracking/publish_tf", mPublishTf, true); - NODELET_INFO_STREAM(" * Broadcast odometry TF\t-> " << (mPublishTf ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Broadcast odometry TF\t-> " << (mPublishTf ? "ENABLED": "DISABLED")); mNhNs.param("pos_tracking/publish_map_tf", mPublishMapTf, true); NODELET_INFO_STREAM(" * Broadcast map pose TF\t-> " - << (mPublishTf ? (mPublishMapTf ? "ENABLED" : "DISABLED") : "DISABLED")); + << (mPublishTf ? (mPublishMapTf ? "ENABLED": "DISABLED"): "DISABLED")); mNhNs.param("sensors/publish_imu_tf", mPublishImuTf, true); - NODELET_INFO_STREAM(" * Broadcast IMU pose TF\t-> " << (mPublishImuTf ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Broadcast IMU pose TF\t-> " << (mPublishImuTf ? "ENABLED": "DISABLED")); } // <---- TF broadcasting @@ -985,7 +1018,7 @@ void ZEDWrapperNodelet::readParameters() mNhNs.getParam("gamma", mCamGamma); NODELET_INFO_STREAM(" * [DYN] gamma\t\t\t-> " << mCamGamma); mNhNs.getParam("auto_exposure_gain", mCamAutoExposure); - NODELET_INFO_STREAM(" * [DYN] auto_exposure_gain\t-> " << (mCamAutoExposure ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * [DYN] auto_exposure_gain\t-> " << (mCamAutoExposure ? "ENABLED": "DISABLED")); mNhNs.getParam("gain", mCamGain); mNhNs.getParam("exposure", mCamExposure); if (!mCamAutoExposure) { @@ -993,7 +1026,7 @@ void ZEDWrapperNodelet::readParameters() NODELET_INFO_STREAM(" * [DYN] exposure\t\t-> " << mCamExposure); } mNhNs.getParam("auto_whitebalance", mCamAutoWB); - NODELET_INFO_STREAM(" * [DYN] auto_whitebalance\t-> " << (mCamAutoWB ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * [DYN] auto_whitebalance\t-> " << (mCamAutoWB ? "ENABLED": "DISABLED")); mNhNs.getParam("whitebalance_temperature", mCamWB); if (!mCamAutoWB) { NODELET_INFO_STREAM(" * [DYN] whitebalance_temperature\t\t-> " << mCamWB); @@ -1023,54 +1056,6 @@ void ZEDWrapperNodelet::readParameters() // <---- Dynamic } -void ZEDWrapperNodelet::checkResol() -{ - switch (mCamResol) { - case sl::RESOLUTION::HD2K: - if (mZedUserCamModel == sl::MODEL::ZED_X || mZedUserCamModel == sl::MODEL::ZED_XM) { - NODELET_WARN("Selected a not valid resolution. Modified to 'AUTO'"); - mCamResol = sl::RESOLUTION::AUTO; - } - break; - - case sl::RESOLUTION::HD1200: - if (mZedUserCamModel != sl::MODEL::ZED_X && mZedUserCamModel != sl::MODEL::ZED_XM) { - NODELET_WARN("Selected a not valid resolution. Modified to 'AUTO'"); - mCamResol = sl::RESOLUTION::AUTO; - } - break; - - case sl::RESOLUTION::HD1080: - break; - - case sl::RESOLUTION::HD720 : - if (mZedUserCamModel == sl::MODEL::ZED_X || mZedUserCamModel == sl::MODEL::ZED_XM) { - NODELET_WARN("Selected a not valid resolution. Modified to 'AUTO'"); - mCamResol = sl::RESOLUTION::AUTO; - } - break; - - case sl::RESOLUTION::SVGA: - if (mZedUserCamModel != sl::MODEL::ZED_X && mZedUserCamModel != sl::MODEL::ZED_XM) { - NODELET_WARN("Selected a not valid resolution. Modified to 'AUTO'"); - mCamResol = sl::RESOLUTION::AUTO; - } - break; - - case sl::RESOLUTION::VGA: - if (mZedUserCamModel == sl::MODEL::ZED_X || mZedUserCamModel == sl::MODEL::ZED_XM) { - NODELET_WARN("Selected a not valid resolution. Modified to 'AUTO'"); - mCamResol = sl::RESOLUTION::AUTO; - } - break; - - default: - NODELET_WARN("Selected a not valid resolution. Modified to 'AUTO'"); - mCamResol = sl::RESOLUTION::AUTO; - break; - } -} - void ZEDWrapperNodelet::checkResolFps() { switch (mCamResol) { @@ -2459,7 +2444,7 @@ void ZEDWrapperNodelet::callback_dynamicReconf(zed_nodelets::ZedConfig& config, if(config.auto_exposure_gain!=mCamAutoExposure) { mCamAutoExposure = config.auto_exposure_gain; - NODELET_INFO_STREAM("Reconfigure auto exposure/gain: " << mCamAutoExposure ? "ENABLED" : "DISABLED"); + NODELET_INFO_STREAM("Reconfigure auto exposure/gain: " << mCamAutoExposure ? "ENABLED": "DISABLED"); } mDynParMutex.unlock(); // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); @@ -2491,7 +2476,7 @@ void ZEDWrapperNodelet::callback_dynamicReconf(zed_nodelets::ZedConfig& config, if(config.auto_whitebalance!=mCamAutoWB) { mCamAutoWB = config.auto_whitebalance; - NODELET_INFO_STREAM("Reconfigure auto white balance: " << mCamAutoWB ? "ENABLED" : "DISABLED"); + NODELET_INFO_STREAM("Reconfigure auto white balance: " << mCamAutoWB ? "ENABLED": "DISABLED"); mTriggerAutoWB = true; } else { mTriggerAutoWB = false; @@ -2623,7 +2608,7 @@ void ZEDWrapperNodelet::pubVideoDepth() grab_ts = mat_conf.timestamp; } // <---- Retrieve all required image data - +mSvoMode // ----> Data ROS timestamp ros::Time stamp = sl_tools::slTime2Ros(grab_ts); if (mSvoMode) { @@ -3260,7 +3245,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() // Get the parameters of the ZED images mCamWidth = mZed.getCameraInformation().camera_configuration.resolution.width; mCamHeight = mZed.getCameraInformation().camera_configuration.resolution.height; - NODELET_DEBUG_STREAM("Original Camera grab frame size -> " << mCamWidth << "x" << mCamHeight); + NODELET_DEBUG_STREAM("Original Camera grab frame size: " << mCamWidth << "x" << mCamHeight); int pub_w, pub_h; pub_w = static_cast(std::round(mCamWidth / mCustomDownscaleFactor)); pub_h = static_cast(std::round(mCamHeight / mCustomDownscaleFactor)); @@ -3275,7 +3260,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() pub_h = mCamHeight; } mMatResol = sl::Resolution(pub_w, pub_h); - NODELET_DEBUG_STREAM("Publishing frame size -> " << mMatResol.width << "x" << mMatResol.height); + NODELET_DEBUG_STREAM("Publishing frame size: " << mMatResol.width << "x" << mMatResol.height); // Create and fill the camera information messages fillCamInfo(mZed, mLeftCamInfoMsg, mRightCamInfoMsg, mLeftCamOptFrameId, mRightCamOptFrameId); @@ -3338,7 +3323,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() if (mGrabActive) { std::lock_guard lock(mPosTrkMutex); - // Note: ones tracking is started it is never stopped anymore to not lose tracking information + // Note: once tracking is started it is never stopped anymore to not lose tracking information bool computeTracking = !mDepthDisabled && (mPosTrackingEnabled || mPosTrackingActivated || mMappingEnabled || mObjDetEnabled || (mComputeDepth & mDepthStabilization) || poseSubnumber > 0 || poseCovSubnumber > 0 || odomSubnumber > 0 || pathSubNumber > 0); @@ -3390,6 +3375,14 @@ void ZEDWrapperNodelet::device_poll_thread_func() if(mGrabStatus!=sl::ERROR_CODE::CAMERA_REBOOTING) { std::this_thread::sleep_for(std::chrono::milliseconds(50)); continue; + } else if (mSvoMode && mGrabStatus == sl::ERROR_CODE::END_OF_SVOFILE_REACHED) { + NODELET_WARN("SVO reached the end. The node will be stopped."); + mZed.close(); + exit(EXIT_SUCCESS); + } else if (!mRemoteStreamAddr.empty()) { + NODELET_ERROR("Remote stream problem. The node will be stopped."); + mZed.close(); + exit(EXIT_FAILURE); } else { if ((ros::Time::now() - mPrevFrameTimestamp).toSec() > 5 && !mSvoMode) { mCloseZedMutex.lock(); @@ -3424,9 +3417,13 @@ void ZEDWrapperNodelet::device_poll_thread_func() mPosTrackingActivated = false; - computeTracking = mPosTrackingEnabled || mDepthStabilization || poseSubnumber > 0 || poseCovSubnumber > 0 || odomSubnumber > 0; + // Note: once tracking is started it is never stopped anymore to not lose tracking information + bool computeTracking = !mDepthDisabled && + (mPosTrackingEnabled || mPosTrackingActivated || mMappingEnabled || mObjDetEnabled || (mComputeDepth & mDepthStabilization) || + poseSubnumber > 0 || poseCovSubnumber > 0 || odomSubnumber > 0 || pathSubNumber > 0); - if (computeTracking) { // Start the tracking + // Start the tracking? + if ((computeTracking) && !mPosTrackingActivated && !mDepthDisabled) { start_pos_tracking(); } } @@ -4123,7 +4120,7 @@ bool ZEDWrapperNodelet::on_start_svo_recording(zed_interfaces::start_svo_recordi err = mZed.enableRecording(recParams); if (err == sl::ERROR_CODE::SVO_UNSUPPORTED_COMPRESSION) { - recParams.compression_mode = mSvoComprMode == sl::SVO_COMPRESSION_MODE::H265 ? sl::SVO_COMPRESSION_MODE::H264 : sl::SVO_COMPRESSION_MODE::H265; + recParams.compression_mode = mSvoComprMode == sl::SVO_COMPRESSION_MODE::H265 ? sl::SVO_COMPRESSION_MODE::H264: sl::SVO_COMPRESSION_MODE::H265; NODELET_WARN_STREAM("The chosen " << sl::toString(mSvoComprMode).c_str() << "mode is not available. Trying " << sl::toString(recParams.compression_mode).c_str()); @@ -4267,7 +4264,7 @@ bool ZEDWrapperNodelet::on_set_led_status(zed_interfaces::set_led_status::Reques return false; } - mZed.setCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS, req.led_enabled ? 1 : 0); + mZed.setCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS, req.led_enabled ? 1: 0); return true; } @@ -4288,7 +4285,7 @@ bool ZEDWrapperNodelet::on_toggle_led(zed_interfaces::toggle_led::Request& req, return false; } - int new_status = status == 0 ? 1 : 0; + int new_status = status == 0 ? 1: 0; err = mZed.setCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS, new_status); if(err!=sl::ERROR_CODE::SUCCESS) { @@ -4421,23 +4418,23 @@ bool ZEDWrapperNodelet::on_start_object_detection(zed_interfaces::start_object_d NODELET_INFO_STREAM(" * Detection max range\t\t-> " << mObjDetMaxRange); NODELET_INFO_STREAM(" * Object min. confidence\t-> " << mObjDetConfidence); - NODELET_INFO_STREAM(" * Object tracking\t\t-> " << (mObjDetTracking ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Object tracking\t\t-> " << (mObjDetTracking ? "ENABLED": "DISABLED")); NODELET_INFO_STREAM(" * Detection model\t\t-> " << sl::toString(mObjDetModel)); mNhNs.getParam("object_detection/mc_people", mObjDetPeopleEnable); - NODELET_INFO_STREAM(" * Detect people\t\t-> " << (mObjDetPeopleEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect people\t\t-> " << (mObjDetPeopleEnable ? "ENABLED": "DISABLED")); mNhNs.getParam("object_detection/mc_vehicle", mObjDetVehiclesEnable); - NODELET_INFO_STREAM(" * Detect vehicles\t\t-> " << (mObjDetVehiclesEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect vehicles\t\t-> " << (mObjDetVehiclesEnable ? "ENABLED": "DISABLED")); mNhNs.getParam("object_detection/mc_bag", mObjDetBagsEnable); - NODELET_INFO_STREAM(" * Detect bags\t\t\t-> " << (mObjDetBagsEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect bags\t\t\t-> " << (mObjDetBagsEnable ? "ENABLED": "DISABLED")); mNhNs.getParam("object_detection/mc_animal", mObjDetAnimalsEnable); - NODELET_INFO_STREAM(" * Detect animals\t\t-> " << (mObjDetAnimalsEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect animals\t\t-> " << (mObjDetAnimalsEnable ? "ENABLED": "DISABLED")); mNhNs.getParam("object_detection/mc_electronics", mObjDetElectronicsEnable); - NODELET_INFO_STREAM(" * Detect electronics\t\t-> " << (mObjDetElectronicsEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect electronics\t\t-> " << (mObjDetElectronicsEnable ? "ENABLED": "DISABLED")); mNhNs.getParam("object_detection/mc_fruit_vegetable", mObjDetFruitsEnable); - NODELET_INFO_STREAM(" * Detect fruit and vegetables\t-> " << (mObjDetFruitsEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect fruit and vegetables\t-> " << (mObjDetFruitsEnable ? "ENABLED": "DISABLED")); mNhNs.getParam("object_detection/mc_sport", mObjDetSportsEnable); - NODELET_INFO_STREAM(" * Detect sport-related objects\t-> " << (mObjDetSportsEnable ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detect sport-related objects\t-> " << (mObjDetSportsEnable ? "ENABLED": "DISABLED")); mObjDetRunning = false; mObjDetEnabled = true; @@ -4503,7 +4500,7 @@ void ZEDWrapperNodelet::processDetectedObjects(ros::Time t) objMsg->objects.resize(objCount); size_t idx = 0; - for (auto data : objects.object_list) { + for (auto data: objects.object_list) { objMsg->objects[idx].label = sl::toString(data.label).c_str(); objMsg->objects[idx].sublabel = sl::toString(data.sublabel).c_str(); objMsg->objects[idx].label_id = data.raw_label; diff --git a/zed_wrapper/params/zed.yaml b/zed_wrapper/params/zed.yaml index 06cc54bd..0dada5cd 100644 --- a/zed_wrapper/params/zed.yaml +++ b/zed_wrapper/params/zed.yaml @@ -4,7 +4,7 @@ general: camera_model: 'zed' - resolution: 3 # '0': HD2K, '1': HD1080, '3': HD720, '5': VGA, '6': AUTO + grab_resolution: 'HD720' # 'HD2K', 'HD1080', 'HD720', 'VGA', 'AUTO' grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations depth: diff --git a/zed_wrapper/params/zed2.yaml b/zed_wrapper/params/zed2.yaml index e467b390..d4d1e83b 100644 --- a/zed_wrapper/params/zed2.yaml +++ b/zed_wrapper/params/zed2.yaml @@ -4,7 +4,7 @@ general: camera_model: 'zed2' - resolution: 3 # '0': HD2K, '1': HD1080, '3': HD720, '5': VGA, '6': AUTO + grab_resolution: 'HD720' # 'HD2K', 'HD1080', 'HD720', 'VGA', 'AUTO' grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations depth: diff --git a/zed_wrapper/params/zed2i.yaml b/zed_wrapper/params/zed2i.yaml index b50091ca..08510fa4 100644 --- a/zed_wrapper/params/zed2i.yaml +++ b/zed_wrapper/params/zed2i.yaml @@ -4,7 +4,7 @@ general: camera_model: 'zed2i' - resolution: 3 # '0': HD2K, '1': HD1080, '3': HD720, '5': VGA, '6': AUTO + grab_resolution: 'HD720' # 'HD2K', 'HD1080', 'HD720', 'VGA', 'AUTO' grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations depth: diff --git a/zed_wrapper/params/zedm.yaml b/zed_wrapper/params/zedm.yaml index 48102e8a..a6fde8fa 100644 --- a/zed_wrapper/params/zedm.yaml +++ b/zed_wrapper/params/zedm.yaml @@ -4,7 +4,7 @@ general: camera_model: 'zedm' - resolution: 3 # '0': HD2K, '1': HD1080, '3': HD720, '5': VGA, '6': AUTO + grab_resolution: 'HD720' # 'HD2K', 'HD1080', 'HD720', 'VGA', 'AUTO' grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations depth: diff --git a/zed_wrapper/params/zedx.yaml b/zed_wrapper/params/zedx.yaml index 7a2d5794..e2129585 100644 --- a/zed_wrapper/params/zedx.yaml +++ b/zed_wrapper/params/zedx.yaml @@ -4,8 +4,8 @@ general: camera_model: 'zedx' - resolution: 1 # '1': HD1080 - '2': HD1200 - '4': SVGA - '6': AUTO - grab_frame_rate: 60 # 120, 60, 30 + grab_resolution: 'HD1200' # HD1080, HD1200, SVGA, AUTO + grab_frame_rate: 60 # 120, 60, 30, 15 depth: min_depth: 0.2 # Min: 0.2, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory diff --git a/zed_wrapper/params/zedxm.yaml b/zed_wrapper/params/zedxm.yaml index b3d18f5a..dd01ce84 100644 --- a/zed_wrapper/params/zedxm.yaml +++ b/zed_wrapper/params/zedxm.yaml @@ -4,8 +4,8 @@ general: camera_model: 'zedxm' - resolution: 2 # '1': HD1080 - '2': HD1200 - '4': SVGA - '6': AUTO - grab_frame_rate: 60 # 120, 60, 30 + grab_resolution: 'HD1200' # HD1080, HD1200, SVGA, AUTO + grab_frame_rate: 60 # 120, 60, 30, 15 depth: min_depth: 0.1 # Min: 0.1, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory From 81e9307d3bc544f85c491c7e19662a4d5500e796 Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Tue, 12 Sep 2023 16:07:49 +0200 Subject: [PATCH 07/21] SVO Realtime parameter and other fixes --- CHANGELOG.rst | 2 + .../include/zed_wrapper_nodelet.hpp | 3 +- .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 54 +++++++++++-------- zed_wrapper/params/common.yaml | 31 +++++------ zed_wrapper/params/zed2.yaml | 4 +- zed_wrapper/params/zedm.yaml | 4 +- zed_wrapper/params/zedx.yaml | 8 +-- zed_wrapper/params/zedxm.yaml | 8 +-- 8 files changed, 63 insertions(+), 51 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 76fc0563..d5e9a4e3 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,6 +4,8 @@ CHANGELOG 2023-09-12 ---------- - Change the parameter 'general/resolution' to 'general/grab_resolution' and replace numeric values with strings +- Add 'general.svo_realtime' parameter +- Change 'general/verbose' to 'general/sdk_verbose' 2023-09-10 ---------- diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 99b00679..d3b4fd76 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -555,12 +555,13 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { std::string mAreaMemDbPath; bool mSaveAreaMapOnClosing = true; std::string mSvoFilepath; + bool mSvoRealtime = true; std::string mRemoteStreamAddr; bool mSensTimestampSync; double mSensPubRate = 400.0; double mPathPubRate; int mPathMaxCount; - int mVerbose; + int mSdkVerbose=1; bool mSvoMode = false; double mCamMinDepth; double mCamMaxDepth; diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index 64228443..93a3cffb 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -174,15 +174,18 @@ void ZEDWrapperNodelet::onInit() mTfBuffer.reset(new tf2_ros::Buffer); mTfListener.reset(new tf2_ros::TransformListener(*mTfBuffer)); + mZedParams.coordinate_system = sl::COORDINATE_SYSTEM::RIGHT_HANDED_Z_UP_X_FWD; + NODELET_INFO_STREAM(" * Camera coordinate system\t-> " << sl::toString(mZedParams.coordinate_system)); + // Try to initialize the ZED + std::stringstream ss; // Input mode info for logging if (!mSvoFilepath.empty() || !mRemoteStreamAddr.empty()) { if (!mSvoFilepath.empty()) { mZedParams.input.setFromSVOFile(mSvoFilepath.c_str()); - mZedParams.svo_real_time_mode = true; - - NODELET_INFO_STREAM( "Node input: SVO - " << mSvoFilepath.c_str()); + mZedParams.svo_real_time_mode = mSvoRealtime; - // TODO ADD PARAMETER FOR SVO REAL TIME + // Input mode info for logging + ss << "SVO - " << mSvoFilepath.c_str(); } else if (!mRemoteStreamAddr.empty()) { std::vector configStream = sl_tools::split_string(mRemoteStreamAddr, ':'); sl::String ip = sl::String(configStream.at(0).c_str()); @@ -193,7 +196,8 @@ void ZEDWrapperNodelet::onInit() mZedParams.input.setFromStream(ip); } - NODELET_INFO_STREAM( "Node input: NETWORK STREAM - " << mRemoteStreamAddr.c_str()); + // Input mode info for logging + ss << "NETWORK STREAM - " << mRemoteStreamAddr.c_str(); } mSvoMode = true; @@ -204,20 +208,21 @@ void ZEDWrapperNodelet::onInit() if (mZedSerialNumber == 0) { mZedParams.input.setFromCameraID(mZedId); - NODELET_INFO_STREAM( "Node input: LIVE CAMERA with ID " << mZedId); + + // Input mode info for logging + ss << "LIVE CAMERA with ID " << mZedId; } else { mZedParams.input.setFromSerialNumber(mZedSerialNumber); - NODELET_INFO_STREAM( "Node input: LIVE CAMERA with SN " << mZedSerialNumber); + + // Input mode info for logging + ss << "LIVE CAMERA with SN " << mZedSerialNumber; } } - mZedParams.coordinate_system = sl::COORDINATE_SYSTEM::RIGHT_HANDED_Z_UP_X_FWD; - NODELET_INFO_STREAM(" * Camera coordinate system\t-> " << sl::toString(mZedParams.coordinate_system)); - mZedParams.async_grab_camera_recovery = true; mZedParams.coordinate_units = sl::UNIT::METER; mZedParams.depth_mode = static_cast(mDepthMode); - mZedParams.sdk_verbose = mVerbose; + mZedParams.sdk_verbose = mSdkVerbose; mZedParams.sdk_gpu_id = mGpuId; mZedParams.depth_stabilization = mDepthStabilization; mZedParams.camera_image_flip = mCameraFlip; @@ -232,7 +237,7 @@ void ZEDWrapperNodelet::onInit() mConnStatus = sl::ERROR_CODE::CAMERA_NOT_DETECTED; - NODELET_INFO_STREAM(" *** Opening " << sl::toString(mZedUserCamModel) << "..."); + NODELET_INFO_STREAM(" *** Opening " << sl::toString(mZedUserCamModel) << " - " << ss.str().c_str() << " ***"); while (mConnStatus != sl::ERROR_CODE::SUCCESS) { mConnStatus = mZed.open(mZedParams); NODELET_INFO_STREAM("ZED connection: " << sl::toString(mConnStatus)); @@ -317,8 +322,6 @@ void ZEDWrapperNodelet::onInit() mSensFwVersion = mZed.getCameraInformation().sensors_configuration.firmware_version; NODELET_INFO_STREAM(" * Sensors FW Version: " << mSensFwVersion); } - } else { - NODELET_INFO_STREAM(" * Input type\t-> " << sl::toString(mZed.getCameraInformation().input_type).c_str()); } // Set the IMU topic names using real camera model @@ -683,8 +686,8 @@ void ZEDWrapperNodelet::readParameters() NODELET_INFO_STREAM(" * Gpu ID\t\t\t-> " << mGpuId); mNhNs.getParam("general/zed_id", mZedId); NODELET_INFO_STREAM(" * Camera ID\t\t\t-> " << mZedId); - mNhNs.getParam("general/verbose", mVerbose); - NODELET_INFO_STREAM(" * Verbose\t\t\t-> " << (mVerbose ? "ENABLED": "DISABLED")); + mNhNs.getParam("general/sdk_verbose", mSdkVerbose); + NODELET_INFO_STREAM(" * SDK Verbose level\t\t-> " << mSdkVerbose ); std::string flip_str; mNhNs.getParam("general/camera_flip", flip_str); if(flip_str=="ON") { @@ -742,10 +745,10 @@ void ZEDWrapperNodelet::readParameters() if (mDepthMode == sl::DEPTH_MODE::NONE) { mDepthDisabled = true; mDepthStabilization = 0; - NODELET_INFO_STREAM(" * Depth mode\t\t\t> " << sl::toString(mDepthMode).c_str() << " - DEPTH DISABLED"); + NODELET_INFO_STREAM(" * Depth mode\t\t\t-> " << sl::toString(mDepthMode).c_str() << " - DEPTH DISABLED"); } else { mDepthDisabled = false; - NODELET_INFO_STREAM(" * Depth mode\t\t\t--> " << sl::toString( + NODELET_INFO_STREAM(" * Depth mode\t\t\t-> " << sl::toString( mDepthMode).c_str() << " [" << static_cast(mDepthMode) << "]"); } @@ -780,7 +783,7 @@ void ZEDWrapperNodelet::readParameters() "'). Using default value."); mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; } - NODELET_INFO_STREAM( " * Positional tracking mode\t:" << sl::toString(mPosTrkMode).c_str()); + NODELET_INFO_STREAM( " * Positional tracking mode\t-> " << sl::toString(mPosTrkMode).c_str()); mNhNs.getParam("pos_tracking/set_gravity_as_origin", mSetGravityAsOrigin); NODELET_INFO_STREAM(" * Set gravity as origin\t-> " << (mSetGravityAsOrigin ? "ENABLED": "DISABLED")); @@ -800,7 +803,7 @@ void ZEDWrapperNodelet::readParameters() mAreaMemDbPath = sl_tools::resolveFilePath(mAreaMemDbPath); NODELET_INFO_STREAM(" * Odometry DB path\t\t-> " << mAreaMemDbPath.c_str()); - mNhNs.param("pos_tracking/save_area_memory_db_on_exit", mSaveAreaMapOnClosing, false); + mNhNs.param("pos_tracking/save_area_memory_db_on_exit", mSaveAreaMapOnClosing,AUTO_WB false); NODELET_INFO_STREAM(" * Save Area Memory on closing\t-> " << (mSaveAreaMapOnClosing ? "ENABLED": "DISABLED")); mNhNs.param("pos_tracking/area_memory", mAreaMemory, false); NODELET_INFO_STREAM(" * Area Memory\t\t\t-> " << (mAreaMemory ? "ENABLED": "DISABLED")); @@ -812,10 +815,10 @@ void ZEDWrapperNodelet::readParameters() NODELET_INFO_STREAM(" * Init Odometry with first valid pose data: " << (mInitOdomWithPose ? "ENABLED": "DISABLED")); mNhNs.param("pos_tracking/two_d_mode", mTwoDMode, false); - NODELET_INFO_STREAM(" * Two D mode\t\t\t-> " << (mTwoDMode ? "ENABLED": "DISABLED")); - mNhNs.param("pos_tracking/fixed_z_value", mFixedZValue, 0.0); + NODELET_INFO_STREAM(" * Force 2D mode\t\t\t-> " << (mTwoDMode ? "ENABLED": "DISABLED")); if (mTwoDMode) { + mNhNs.param("pos_tracking/fixed_z_value", mFixedZValue, 0.0); NODELET_INFO_STREAM(" * Fixed Z value\t\t-> " << mFixedZValue); } } @@ -921,6 +924,11 @@ void ZEDWrapperNodelet::readParameters() mSvoFilepath = sl_tools::resolveFilePath(mSvoFilepath); NODELET_INFO_STREAM(" * SVO input file: \t\t-> " << mSvoFilepath.c_str()); + if(!mSvoFilepath.empty()) { + mNhNs.getParam("general/svo_realtime", mSvoRealtime); + NODELET_INFO_STREAM(" * SVO realtime mode\t\t-> " << (mSvoRealtime ? "ENABLED": "DISABLED")); + } + int svo_compr = 0; mNhNs.getParam("general/svo_compression", svo_compr); @@ -2608,7 +2616,7 @@ void ZEDWrapperNodelet::pubVideoDepth() grab_ts = mat_conf.timestamp; } // <---- Retrieve all required image data -mSvoMode + // ----> Data ROS timestamp ros::Time stamp = sl_tools::slTime2Ros(grab_ts); if (mSvoMode) { diff --git a/zed_wrapper/params/common.yaml b/zed_wrapper/params/common.yaml index eef54dd6..0dcfd9be 100644 --- a/zed_wrapper/params/common.yaml +++ b/zed_wrapper/params/common.yaml @@ -3,20 +3,20 @@ --- # Dynamic parameters cannot have a namespace -brightness: 4 # Dynamic -contrast: 4 # Dynamic -hue: 0 # Dynamic -saturation: 4 # Dynamic -sharpness: 4 # Dynamic -gamma: 8 # Dynamic - Requires SDK >=v3.1 -auto_exposure_gain: true # Dynamic -gain: 100 # Dynamic - works only if `auto_exposure_gain` is false -exposure: 100 # Dynamic - works only if `auto_exposure_gain` is false -auto_whitebalance: true # Dynamic -whitebalance_temperature: 42 # Dynamic - works only if `auto_whitebalance` is false -depth_confidence: 30 # Dynamic -depth_texture_conf: 100 # Dynamic -point_cloud_freq: 10.0 # Dynamic - frequency of the pointcloud publishing (equal or less to `grab_frame_rate` value) +brightness: 4 # Dynamic +contrast: 4 # Dynamic +hue: 0 # Dynamic +saturation: 4 # Dynamic +sharpness: 4 # Dynamic +gamma: 8 # Dynamic - Requires SDK >=v3.1 +auto_exposure_gain: true # Dynamic +gain: 100 # Dynamic - works only if `auto_exposure_gain` is false +exposure: 100 # Dynamic - works only if `auto_exposure_gain` is false +auto_whitebalance: true # Dynamic +whitebalance_temperature: 42 # Dynamic - works only if `auto_whitebalance` is false +depth_confidence: 30 # Dynamic +depth_texture_conf: 100 # Dynamic +point_cloud_freq: 10.0 # Dynamic - frequency of the pointcloud publishing (equal or less to `grab_frame_rate` value) general: camera_name: zed # A name for the camera (can be different from camera model and node name and can be overwritten by the launch file) @@ -24,13 +24,14 @@ general: serial_number: 0 gpu_id: -1 base_frame: 'base_link' # must be equal to the frame_id used in the URDF file - verbose: 1 # Set verbose level of the ZED SDK + sdk_verbose: 1 # Set verbose level of the ZED SDK svo_compression: 2 # `0`: LOSSLESS, `1`: AVCHD, `2`: HEVC self_calib: true # enable/disable self calibration at starting camera_flip: 'AUTO' # camera flip mode: 'OFF', 'ON', 'AUTO' pub_resolution: 'CUSTOM' # The resolution used for output. 'NATIVE' to use the same `general.grab_resolution` - `CUSTOM` to apply the `general.pub_downscale_factor` downscale factory to reduce bandwidth in transmission pub_downscale_factor: 2.0 # rescale factor used to rescale image before publishing when 'pub_resolution' is 'CUSTOM' pub_frame_rate: 15.0 # frequency of publishing of video and depth data (see SDK API "InitParameters::grab_compute_capping_fps") + svo_realtime: true # if true the input SVO will be played trying to respect the original framerate eventually skipping frames, otherwise every frame will be processed respecting the `pub_frame_rate` setting #video: diff --git a/zed_wrapper/params/zed2.yaml b/zed_wrapper/params/zed2.yaml index d4d1e83b..6f3cfeda 100644 --- a/zed_wrapper/params/zed2.yaml +++ b/zed_wrapper/params/zed2.yaml @@ -8,6 +8,6 @@ general: grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations depth: - min_depth: 0.3 # Min: 0.2, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory - max_depth: 20.0 # Max: 40.0 + min_depth: 0.3 # Min: 0.2, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory + max_depth: 20.0 # Max: 40.0 \ No newline at end of file diff --git a/zed_wrapper/params/zedm.yaml b/zed_wrapper/params/zedm.yaml index a6fde8fa..ca3f66c2 100644 --- a/zed_wrapper/params/zedm.yaml +++ b/zed_wrapper/params/zedm.yaml @@ -8,5 +8,5 @@ general: grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations depth: - min_depth: 0.1 # Min: 0.1, Max: 3.0 - Default 0.3 - Note: reducing this value wil require more computational power and GPU memory - max_depth: 10.0 # Max: 20.0 + min_depth: 0.1 # Min: 0.1, Max: 3.0 - Default 0.3 - Note: reducing this value wil require more computational power and GPU memory + max_depth: 10.0 # Max: 20.0 diff --git a/zed_wrapper/params/zedx.yaml b/zed_wrapper/params/zedx.yaml index e2129585..7d364165 100644 --- a/zed_wrapper/params/zedx.yaml +++ b/zed_wrapper/params/zedx.yaml @@ -4,10 +4,10 @@ general: camera_model: 'zedx' - grab_resolution: 'HD1200' # HD1080, HD1200, SVGA, AUTO - grab_frame_rate: 60 # 120, 60, 30, 15 + grab_resolution: 'HD1200' # HD1080, HD1200, SVGA, AUTO + grab_frame_rate: 60 # 120, 60, 30, 15 depth: - min_depth: 0.2 # Min: 0.2, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory - max_depth: 10.0 # Max: 40.0 + min_depth: 0.2 # Min: 0.2, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory + max_depth: 10.0 # Max: 40.0 diff --git a/zed_wrapper/params/zedxm.yaml b/zed_wrapper/params/zedxm.yaml index dd01ce84..dd931d84 100644 --- a/zed_wrapper/params/zedxm.yaml +++ b/zed_wrapper/params/zedxm.yaml @@ -4,9 +4,9 @@ general: camera_model: 'zedxm' - grab_resolution: 'HD1200' # HD1080, HD1200, SVGA, AUTO - grab_frame_rate: 60 # 120, 60, 30, 15 + grab_resolution: 'HD1200' # HD1080, HD1200, SVGA, AUTO + grab_frame_rate: 60 # 120, 60, 30, 15 depth: - min_depth: 0.1 # Min: 0.1, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory - max_depth: 10.0 # Max: 40.0 + min_depth: 0.1 # Min: 0.1, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory + max_depth: 10.0 # Max: 40.0 From 713071cf7b0884b5fa200c0cd3e87410c6705724 Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Tue, 12 Sep 2023 16:56:34 +0200 Subject: [PATCH 08/21] - Add 'general/region_of_interest' parameter --- CHANGELOG.rst | 1 + zed_nodelets/src/tools/include/sl_tools.h | 13 ++ zed_nodelets/src/tools/src/sl_tools.cpp | 133 ++++++++++++++++++ .../include/zed_wrapper_nodelet.hpp | 8 ++ .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 123 +++++++++++++++- zed_wrapper/params/common.yaml | 5 + 6 files changed, 276 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index d5e9a4e3..4a10e200 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -6,6 +6,7 @@ CHANGELOG - Change the parameter 'general/resolution' to 'general/grab_resolution' and replace numeric values with strings - Add 'general.svo_realtime' parameter - Change 'general/verbose' to 'general/sdk_verbose' +- Add 'general/region_of_interest' parameter 2023-09-10 ---------- diff --git a/zed_nodelets/src/tools/include/sl_tools.h b/zed_nodelets/src/tools/include/sl_tools.h index a222da59..b0c38188 100644 --- a/zed_nodelets/src/tools/include/sl_tools.h +++ b/zed_nodelets/src/tools/include/sl_tools.h @@ -72,6 +72,19 @@ bool isZED2OrZED2i(sl::MODEL camModel); */ bool isZEDX(sl::MODEL camModel); +/*! \brief Creates an sl::Mat containing a ROI from a polygon + * \param poly the ROI polygon. Coordinates must be normalized from 0.0 to 1.0 + * \param out_roi the `sl::Mat` containing the ROI + */ +bool generateROI(const std::vector & poly, sl::Mat & out_roi); + +/*! \brief Parse a vector of vector of floats from a string. + * \param input + * \param error_return + * Syntax is [[1.0, 2.0], [3.3, 4.4, 5.5], ...] */ +std::vector> parseStringVector( + const std::string & input, std::string & error_return); + /*! \brief sl::Mat to ros message conversion * \param imgMsgPtr : the image topic message to publish * \param img : the image to publish diff --git a/zed_nodelets/src/tools/src/sl_tools.cpp b/zed_nodelets/src/tools/src/sl_tools.cpp index 2e47ef45..d913f6d4 100644 --- a/zed_nodelets/src/tools/src/sl_tools.cpp +++ b/zed_nodelets/src/tools/src/sl_tools.cpp @@ -351,6 +351,139 @@ std::vector split_string(const std::string& s, char seperator) return output; } +inline bool contains(std::vector & poly, sl::float2 test) +{ + int i, j; + bool c = false; + const int nvert = poly.size(); + for (i = 0, j = nvert - 1; i < nvert; j = i++) { + if ( + ((poly[i].y > test.y) != (poly[j].y > test.y)) && + (test.x < + (poly[j].x - poly[i].x) * (test.y - poly[i].y) / (poly[j].y - poly[i].y) + poly[i].x)) + { + c = !c; + } + } + return c; +} + +bool generateROI(const std::vector & poly, sl::Mat & out_roi) +{ + if (poly.size() < 3) { + out_roi = sl::Mat(); + return false; + } + + // Set each pixel to valid + // std::cerr << "Setting ROI mask to full valid" << std::endl; + out_roi.setTo(255, sl::MEM::CPU); + + // ----> De-normalize coordinates + size_t w = out_roi.getWidth(); + size_t h = out_roi.getHeight(); + + // std::cerr << "De-normalize coordinates" << std::endl; + // std::cerr << "Image resolution: " << w << "x" << h << std::endl; + std::vector poly_img; + size_t idx = 0; + for (auto & it : poly) { + sl::float2 pt; + pt.x = it.x * w; + pt.y = it.y * h; + + if (pt.x >= w) { + pt.x = (w - 1); + } + if (pt.y >= h) { + pt.y = (h - 1); + } + + poly_img.push_back(pt); + + ++idx; + } + // <---- De-normalize coordinates + + // ----> Unset ROI pixels outside the polygon + //std::cerr << "Unset ROI pixels outside the polygon" << std::endl; + //std::cerr << "Set mask" << std::endl; + for (int v = 0; v < h; v++) { + for (int u = 0; u < w; u++) { + if (!contains(poly_img, sl::float2(u, v))) { + out_roi.setValue(u, v, 0, sl::MEM::CPU); + } + } + } + // std::cerr << "Mask ready" << std::endl; + // std::cerr << "ROI resolution: " << w << "x" << h << std::endl; + // <---- Unset ROI pixels outside the polygon + + return true; +} + +std::vector> parseStringVector( + const std::string & input, std::string & error_return) +{ + std::vector> result; + + std::stringstream input_ss(input); + int depth = 0; + std::vector current_vector; + while (!!input_ss && !input_ss.eof()) { + switch (input_ss.peek()) { + case EOF: + break; + case '[': + depth++; + if (depth > 2) { + error_return = "Array depth greater than 2"; + return result; + } + input_ss.get(); + current_vector.clear(); + break; + case ']': + depth--; + if (depth < 0) { + error_return = "More close ] than open ["; + return result; + } + input_ss.get(); + if (depth == 1) { + result.push_back(current_vector); + } + break; + case ',': + case ' ': + case '\t': + input_ss.get(); + break; + default: // All other characters should be part of the numbers. + if (depth != 2) { + std::stringstream err_ss; + err_ss << "Numbers at depth other than 2. Char was '" << char(input_ss.peek()) << "'."; + error_return = err_ss.str(); + return result; + } + float value; + input_ss >> value; + if (!!input_ss) { + current_vector.push_back(value); + } + break; + } + } + + if (depth != 0) { + error_return = "Unterminated vector string."; + } else { + error_return = ""; + } + + return result; +} + CSmartMean::CSmartMean(int winSize) { mValCount = 0; diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index d3b4fd76..3be9f439 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -249,6 +249,13 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { */ void checkResolFps(); + // ----> Region of Interest + std::string getRoiParam(std::string paramName, std::vector> & outVal); + std::string parseRoiPoly( + const std::vector> & in_poly, std::vector & out_poly); + void resetRoi(); + // <---- Region of Interest + /*! \brief Callback to handle dynamic reconfigure events in ROS */ void callback_dynamicReconf(zed_nodelets::ZedConfig& config, uint32_t level); @@ -562,6 +569,7 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { double mPathPubRate; int mPathMaxCount; int mSdkVerbose=1; + std::vector> mRoiParam; bool mSvoMode = false; double mCamMinDepth; double mCamMaxDepth; diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index 93a3cffb..0d7966a8 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -707,15 +707,16 @@ void ZEDWrapperNodelet::readParameters() if (tmp_sn > 0) { mZedSerialNumber = static_cast(tmp_sn); NODELET_INFO_STREAM(" * Serial number\t\t-> " << mZedSerialNumber); - } - // <---- General - + } + std::string parsed_str = getRoiParam("general/region_of_interest", mRoiParam); + NODELET_INFO_STREAM(" * Region of interest\t\t-> " << parsed_str.c_str()); + // <---- General - // ----> Video + // ----> Video //NODELET_INFO_STREAM("*** VIDEO PARAMETERS ***"); // Note: there are no "static" video parameters - // <---- Video + // <---- Video // -----> Depth NODELET_INFO_STREAM("*** DEPTH PARAMETERS ***"); @@ -802,8 +803,8 @@ void ZEDWrapperNodelet::readParameters() mNhNs.getParam("pos_tracking/area_memory_db_path", mAreaMemDbPath); mAreaMemDbPath = sl_tools::resolveFilePath(mAreaMemDbPath); NODELET_INFO_STREAM(" * Odometry DB path\t\t-> " << mAreaMemDbPath.c_str()); - - mNhNs.param("pos_tracking/save_area_memory_db_on_exit", mSaveAreaMapOnClosing,AUTO_WB false); + + mNhNs.param("pos_tracking/save_area_memory_db_on_exit", mSaveAreaMapOnClosing, false); NODELET_INFO_STREAM(" * Save Area Memory on closing\t-> " << (mSaveAreaMapOnClosing ? "ENABLED": "DISABLED")); mNhNs.param("pos_tracking/area_memory", mAreaMemory, false); NODELET_INFO_STREAM(" * Area Memory\t\t\t-> " << (mAreaMemory ? "ENABLED": "DISABLED")); @@ -1064,6 +1065,91 @@ void ZEDWrapperNodelet::readParameters() // <---- Dynamic } +std::string ZEDWrapperNodelet::getRoiParam(std::string paramName, std::vector> & outVal) +{ + outVal.clear(); + + std::string roi_param_str = ""; + + mNhNs.getParam("general/region_of_interest", roi_param_str); + + if (roi_param_str.empty()) { + return std::string(); + } + + std::string error; + outVal = sl_tools::parseStringVector(roi_param_str, error); + + if (error != "") { + NODELET_WARN_STREAM("Error parsing " << paramName << " parameter: " << error.c_str()); + NODELET_WARN_STREAM(" " << paramName << " string was " << roi_param_str.c_str()); + + outVal.clear(); + } + + return roi_param_str; +} + +std::string ZEDWrapperNodelet::parseRoiPoly( + const std::vector> & in_poly, + std::vector & out_poly) +{ + out_poly.clear(); + + std::string ss; + ss = "["; + + size_t poly_size = in_poly.size(); + + if (poly_size < 3) { + if (poly_size != 0) { + NODELET_WARN_STREAM("A vector with " << + poly_size << + " points is not enough to create a polygon to set a Region " + "of Interest."); + return std::string(); + } + } else { + for (size_t i; i < poly_size; ++i) { + if (in_poly[i].size() != 2) { + NODELET_WARN_STREAM("The component with index '" << i << + "' of the ROI vector " + "has not the correct size."); + out_poly.clear(); + return std::string(); + } else if (in_poly[i][0] < 0.0 || in_poly[i][1] < 0.0 || in_poly[i][0] > 1.0 || + in_poly[i][1] > 1.0) + { + NODELET_WARN_STREAM("The component with index '" << i << + "' of the ROI vector " + "is not a " + "valid normalized point: [" << + in_poly[i][0] << "," << in_poly[i][1] << + "]."); + out_poly.clear(); + return std::string(); + } else { + sl::float2 pt; + pt.x = in_poly[i][0]; + pt.y = in_poly[i][1]; + out_poly.push_back(pt); + ss += "["; + ss += std::to_string(pt.x); + ss += ","; + ss += std::to_string(pt.y); + ss += "]"; + } + + if (i != poly_size - 1) { + ss += ","; + } + } + } + ss += "]"; + + return ss; +} + void ZEDWrapperNodelet::checkResolFps() { switch (mCamResol) { @@ -3270,6 +3356,29 @@ void ZEDWrapperNodelet::device_poll_thread_func() mMatResol = sl::Resolution(pub_w, pub_h); NODELET_DEBUG_STREAM("Publishing frame size: " << mMatResol.width << "x" << mMatResol.height); + // ----> Set Region of Interest + if (!mRoiParam.empty()) { + NODELET_INFO("*** Setting ROI ***"); + sl::Resolution resol(mCamWidth, mCamHeight); + std::vector sl_poly; + std::string log_msg = parseRoiPoly(mRoiParam, sl_poly); + + // Create ROI mask + sl::Mat roi_mask(resol, sl::MAT_TYPE::U8_C1, sl::MEM::CPU); + + if (!sl_tools::generateROI(sl_poly, roi_mask)) { + NODELET_WARN(" * Error generating the region of interest image mask."); + } else { + sl::ERROR_CODE err = mZed.setRegionOfInterest(roi_mask); + if (err != sl::ERROR_CODE::SUCCESS) { + NODELET_WARN_STREAM(" * Error while setting ZED SDK region of interest: " << sl::toString(err).c_str()); + } else { + NODELET_INFO(" * Region of Interest correctly set."); + } + } + } + // <---- Set Region of Interest + // Create and fill the camera information messages fillCamInfo(mZed, mLeftCamInfoMsg, mRightCamInfoMsg, mLeftCamOptFrameId, mRightCamOptFrameId); fillCamInfo(mZed, mLeftCamInfoRawMsg, mRightCamInfoRawMsg, mLeftCamOptFrameId, mRightCamOptFrameId, true); diff --git a/zed_wrapper/params/common.yaml b/zed_wrapper/params/common.yaml index 0dcfd9be..e98bad15 100644 --- a/zed_wrapper/params/common.yaml +++ b/zed_wrapper/params/common.yaml @@ -32,6 +32,11 @@ general: pub_downscale_factor: 2.0 # rescale factor used to rescale image before publishing when 'pub_resolution' is 'CUSTOM' pub_frame_rate: 15.0 # frequency of publishing of video and depth data (see SDK API "InitParameters::grab_compute_capping_fps") svo_realtime: true # if true the input SVO will be played trying to respect the original framerate eventually skipping frames, otherwise every frame will be processed respecting the `pub_frame_rate` setting + #region_of_interest: '[]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. + region_of_interest: '[[0.25,0.33],[0.75,0.33],[0.75,0.5],[0.5,0.75],[0.25,0.5]]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. + #region_of_interest: '[[0.25,0.25],[0.75,0.25],[0.75,0.75],[0.25,0.75]]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. + #region_of_interest: '[[0.5,0.25],[0.75,0.5],[0.5,0.75],[0.25,0.5]]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. + #video: From fb9c8263d78748628b52d26137e5d245c0bf9407 Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Wed, 13 Sep 2023 11:17:24 +0200 Subject: [PATCH 09/21] Rework params reading --- CHANGELOG.rst | 3 +- .../include/zed_wrapper_nodelet.hpp | 32 ++ .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 354 +++++++++++------- zed_wrapper/params/common.yaml | 17 +- 4 files changed, 253 insertions(+), 153 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 4a10e200..6bd26187 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -21,13 +21,14 @@ CHANGELOG - Add `pos_tracking/pos_tracking_mode` parameter. Matches the ZED SDK setting: 'QUALITY', 'STANDARD' - Fix the warning `Elaboration takes longer [...]` - 2023-09-07 ---------- - `pub_frame_rate` now controls the `InitParameters::grab_compute_capping_fps` parameter of the ZED SDK instead of controlling the frequency of a parallel thread. It's not a Dynamic parameter anymore. - Change `general/camera_flip` parameter to string: 'AUTO', 'ON', 'OFF' - Change `general/verbose` from bool to integer + + v4.0.5 ------ - Support for ZED SDK v4.0 diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 3be9f439..00dc5a72 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -123,6 +123,38 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { */ void readParameters(); + /*! \brief Reads general parameters from the param server + */ + void readGeneralParams(); + + /*! \brief Reads depth parameters from the param server + */ + void readDepthParams(); + + /*! \brief Reads positional tracking parameters from the param server + */ + void readPosTrkParams(); + + /*! \brief Reads spatial mapping parameters from the param server + */ + void readMappingParams(); + + /*! \brief Reads object detection parameters from the param server + */ + void readObjDetParams(); + + /*! \brief Reads sensors parameters from the param server + */ + void readSensParams(); + + /*! \brief Reads SVO parameters from the param server + */ + void readSvoParams(); + + /*! \brief Reads dynamic parameters from the param server + */ + void readDynParams(); + /*! \brief ZED camera polling thread function */ void device_poll_thread_func(); diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index 0d7966a8..3e93a2fb 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -368,94 +368,96 @@ void ZEDWrapperNodelet::onInit() updateDynamicReconfigure(); // <---- Dynamic Reconfigure parameters - // Create all the publishers + // ----> Publishers + NODELET_INFO( "*** PUBLISHERS ***"); + // Image publishers image_transport::ImageTransport it_zed(mNhNs); mPubRgb = it_zed.advertiseCamera(rgb_topic, 1); // rgb - NODELET_INFO_STREAM("Advertised on topic " << mPubRgb.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRgb.getInfoTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgb.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgb.getInfoTopic()); mPubRawRgb = it_zed.advertiseCamera(rgb_raw_topic, 1); // rgb raw - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRgb.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRgb.getInfoTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgb.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgb.getInfoTopic()); mPubLeft = it_zed.advertiseCamera(left_topic, 1); // left - NODELET_INFO_STREAM("Advertised on topic " << mPubLeft.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubLeft.getInfoTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeft.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeft.getInfoTopic()); mPubRawLeft = it_zed.advertiseCamera(left_raw_topic, 1); // left raw - NODELET_INFO_STREAM("Advertised on topic " << mPubRawLeft.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawLeft.getInfoTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeft.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeft.getInfoTopic()); mPubRight = it_zed.advertiseCamera(right_topic, 1); // right - NODELET_INFO_STREAM("Advertised on topic " << mPubRight.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRight.getInfoTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRight.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRight.getInfoTopic()); mPubRawRight = it_zed.advertiseCamera(right_raw_topic, 1); // right raw - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRight.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRight.getInfoTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRight.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRight.getInfoTopic()); mPubRgbGray = it_zed.advertiseCamera(rgb_gray_topic, 1); // rgb - NODELET_INFO_STREAM("Advertised on topic " << mPubRgbGray.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRgbGray.getInfoTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgbGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgbGray.getInfoTopic()); mPubRawRgbGray = it_zed.advertiseCamera(rgb_raw_gray_topic, 1); // rgb raw - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRgbGray.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRgbGray.getInfoTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgbGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgbGray.getInfoTopic()); mPubLeftGray = it_zed.advertiseCamera(left_gray_topic, 1); // left - NODELET_INFO_STREAM("Advertised on topic " << mPubLeftGray.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubLeftGray.getInfoTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeftGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeftGray.getInfoTopic()); mPubRawLeftGray = it_zed.advertiseCamera(left_raw_gray_topic, 1); // left raw - NODELET_INFO_STREAM("Advertised on topic " << mPubRawLeftGray.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawLeftGray.getInfoTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeftGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeftGray.getInfoTopic()); mPubRightGray = it_zed.advertiseCamera(right_gray_topic, 1); // right - NODELET_INFO_STREAM("Advertised on topic " << mPubRightGray.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRightGray.getInfoTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRightGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRightGray.getInfoTopic()); mPubRawRightGray = it_zed.advertiseCamera(right_raw_gray_topic, 1); // right raw - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRightGray.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRightGray.getInfoTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRightGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRightGray.getInfoTopic()); mPubStereo = it_zed.advertise(stereo_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubStereo.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubStereo.getTopic()); mPubRawStereo = it_zed.advertise(stereo_raw_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawStereo.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawStereo.getTopic()); // Detected planes publisher mPubPlane = mNhNs.advertise(plane_topic, 1); if (!mDepthDisabled) { mPubDepth = it_zed.advertiseCamera(depth_topic_root, 1); // depth - NODELET_INFO_STREAM("Advertised on topic " << mPubDepth.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubDepth.getInfoTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubDepth.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubDepth.getInfoTopic()); // Confidence Map publisher mPubConfMap = mNhNs.advertise(conf_map_topic, 1); // confidence map - NODELET_INFO_STREAM("Advertised on topic " << mPubConfMap.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubConfMap.getTopic()); // Disparity publisher mPubDisparity = mNhNs.advertise(disparityTopic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubDisparity.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubDisparity.getTopic()); // PointCloud publishers mPubCloud = mNhNs.advertise(pointcloud_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubCloud.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubCloud.getTopic()); if (mMappingEnabled) { mPubFusedCloud = mNhNs.advertise(pointcloud_fused_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); } // Object detection publishers if (mObjDetEnabled) { mPubObjDet = mNhNs.advertise(object_det_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubObjDet.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubObjDet.getTopic()); } // Odometry and Pose publisher mPubPose = mNhNs.advertise(poseTopic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubPose.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubPose.getTopic()); mPubPoseCov = mNhNs.advertise(pose_cov_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubPoseCov.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubPoseCov.getTopic()); mPubOdom = mNhNs.advertise(odometryTopic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubOdom.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubOdom.getTopic()); // Rviz markers publisher mPubMarker = mNhNs.advertise(marker_topic, 10, true); @@ -463,9 +465,9 @@ void ZEDWrapperNodelet::onInit() // Camera Path if (mPathPubRate > 0) { mPubOdomPath = mNhNs.advertise(odom_path_topic, 1, true); - NODELET_INFO_STREAM("Advertised on topic " << mPubOdomPath.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubOdomPath.getTopic()); mPubMapPath = mNhNs.advertise(map_path_topic, 1, true); - NODELET_INFO_STREAM("Advertised on topic " << mPubMapPath.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubMapPath.getTopic()); mPathTimer = mNhNs.createTimer(ros::Duration(1.0 / mPathPubRate), &ZEDWrapperNodelet::callback_pubPath, this); @@ -482,32 +484,32 @@ void ZEDWrapperNodelet::onInit() } // Sensor publishers - if (mZedRealCamModel != sl::MODEL::ZED) { + if (!sl_tools::isZED(mZedRealCamModel)) { // IMU Publishers mPubImu = mNhNs.advertise(imu_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubImu.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubImu.getTopic()); mPubImuRaw = mNhNs.advertise(imu_topic_raw, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubImuRaw.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubImuRaw.getTopic()); if(mZedRealCamModel != sl::MODEL::ZED_M) { // IMU temperature sensor mPubImuTemp = mNhNs.advertise(imu_temp_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubImuTemp.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubImuTemp.getTopic()); } if (mZedRealCamModel == sl::MODEL::ZED2 || mZedRealCamModel == sl::MODEL::ZED2i) { mPubImuMag = mNhNs.advertise(imu_mag_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubImuMag.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubImuMag.getTopic()); // Atmospheric pressure mPubPressure = mNhNs.advertise(pressure_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubPressure.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubPressure.getTopic()); // CMOS sensor temperatures mPubTempL = mNhNs.advertise(temp_topic_left, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubTempL.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubTempL.getTopic()); mPubTempR = mNhNs.advertise(temp_topic_right, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubTempR.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubTempR.getTopic()); } // Publish camera imu transform in a latched topic @@ -534,7 +536,7 @@ void ZEDWrapperNodelet::onInit() mPubCamImuTransf.publish(mCameraImuTransfMgs); - NODELET_INFO_STREAM("Advertised on topic " << mPubCamImuTransf.getTopic() << " [LATCHED]"); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubCamImuTransf.getTopic() << " [LATCHED]"); } if (!mSvoMode && !mSensTimestampSync) { @@ -544,35 +546,57 @@ void ZEDWrapperNodelet::onInit() mSensPeriodMean_usec.reset(new sl_tools::CSmartMean(mCamFrameRate / 2)); } } + // <---- Publishers - // Subscribers + // ----> Subscribers + NODELET_INFO( "*** SUBSCRIBERS ***"); mClickedPtSub = mNhNs.subscribe(mClickedPtTopic, 10, &ZEDWrapperNodelet::clickedPtCallback, this); - NODELET_INFO_STREAM("Subscribed to topic " << mClickedPtTopic.c_str()); + NODELET_INFO_STREAM(" * Subscribed to topic " << mClickedPtTopic.c_str()); + // <---- Subscribers - // Services + // ----> Services + NODELET_INFO( "*** SERVICES ***"); if (!mDepthDisabled) { mSrvSetInitPose = mNhNs.advertiseService("set_pose", &ZEDWrapperNodelet::on_set_pose, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSetInitPose.getService().c_str()); mSrvResetOdometry = mNhNs.advertiseService("reset_odometry", &ZEDWrapperNodelet::on_reset_odometry, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvResetOdometry.getService().c_str()); mSrvResetTracking = mNhNs.advertiseService("reset_tracking", &ZEDWrapperNodelet::on_reset_tracking, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvResetTracking.getService().c_str()); mSrvSaveAreaMemory = mNhNs.advertiseService("save_area_memory", &ZEDWrapperNodelet::on_save_area_memory, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSaveAreaMemory.getService().c_str()); mSrvStartMapping = mNhNs.advertiseService("start_3d_mapping", &ZEDWrapperNodelet::on_start_3d_mapping, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvStartMapping.getService().c_str()); mSrvStopMapping = mNhNs.advertiseService("stop_3d_mapping", &ZEDWrapperNodelet::on_stop_3d_mapping, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvStopMapping.getService().c_str()); mSrvSave3dMap = mNhNs.advertiseService("save_3d_map", &ZEDWrapperNodelet::on_save_3d_map, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSave3dMap.getService().c_str()); mSrvStartObjDet = mNhNs.advertiseService("start_object_detection", &ZEDWrapperNodelet::on_start_object_detection, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvStartObjDet.getService().c_str()); mSrvStopObjDet = mNhNs.advertiseService("stop_object_detection", &ZEDWrapperNodelet::on_stop_object_detection, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvStopObjDet.getService().c_str()); } mSrvSvoStartRecording = mNhNs.advertiseService("start_svo_recording", &ZEDWrapperNodelet::on_start_svo_recording, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStartRecording.getService().c_str()); mSrvSvoStopRecording = mNhNs.advertiseService("stop_svo_recording", &ZEDWrapperNodelet::on_stop_svo_recording, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStopRecording.getService().c_str()); mSrvSetLedStatus = mNhNs.advertiseService("set_led_status", &ZEDWrapperNodelet::on_set_led_status, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSetLedStatus.getService().c_str()); mSrvToggleLed = mNhNs.advertiseService("toggle_led", &ZEDWrapperNodelet::on_toggle_led, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvToggleLed.getService().c_str()); + mSrvSvoStartStream = mNhNs.advertiseService("start_remote_stream", &ZEDWrapperNodelet::on_start_remote_stream, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStartStream.getService().c_str()); mSrvSvoStopStream = mNhNs.advertiseService("stop_remote_stream", &ZEDWrapperNodelet::on_stop_remote_stream, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStopStream.getService().c_str()); + // <---- Services + // ----> Threads if (!mDepthDisabled) { // Start Pointcloud thread mPcThread = std::thread(&ZEDWrapperNodelet::pointcloud_thread_func, this); @@ -583,14 +607,15 @@ void ZEDWrapperNodelet::onInit() // Start Sensors thread mSensThread = std::thread(&ZEDWrapperNodelet::sensors_thread_func, this); + // <---- Threads - NODELET_INFO("ZED Node started"); + NODELET_INFO("+++ ZED Node started +++"); } -void ZEDWrapperNodelet::readParameters() +void ZEDWrapperNodelet::readGeneralParams() { - // ----> General NODELET_INFO_STREAM("*** GENERAL PARAMETERS ***"); + std::string camera_model; mNhNs.getParam("general/camera_model", camera_model); @@ -711,15 +736,12 @@ void ZEDWrapperNodelet::readParameters() std::string parsed_str = getRoiParam("general/region_of_interest", mRoiParam); NODELET_INFO_STREAM(" * Region of interest\t\t-> " << parsed_str.c_str()); - // <---- General - - // ----> Video - //NODELET_INFO_STREAM("*** VIDEO PARAMETERS ***"); - // Note: there are no "static" video parameters - // <---- Video +} - // -----> Depth +void ZEDWrapperNodelet::readDepthParams() +{ NODELET_INFO_STREAM("*** DEPTH PARAMETERS ***"); + std::string depth_mode_str = sl::toString(mDepthMode).c_str(); mNhNs.getParam("depth/depth_mode", depth_mode_str); @@ -763,12 +785,13 @@ void ZEDWrapperNodelet::readParameters() mNhNs.getParam("depth/max_depth", mCamMaxDepth); NODELET_INFO_STREAM(" * Maximum depth\t\t-> " << mCamMaxDepth << " m"); } - // <----- Depth +} - // ----> Positional Tracking +void ZEDWrapperNodelet::readPosTrkParams() +{ if(!mDepthDisabled) { NODELET_INFO_STREAM("*** POSITIONAL TRACKING PARAMETERS ***"); - + mNhNs.param("pos_tracking/pos_tracking_enabled", mPosTrackingEnabled, true); NODELET_INFO_STREAM(" * Positional tracking\t\t-> " << (mPosTrackingEnabled ? "ENABLED": "DISABLED")); @@ -813,20 +836,28 @@ void ZEDWrapperNodelet::readParameters() mNhNs.param("pos_tracking/floor_alignment", mFloorAlignment, false); NODELET_INFO_STREAM(" * Floor alignment\t\t-> " << (mFloorAlignment ? "ENABLED": "DISABLED")); mNhNs.param("pos_tracking/init_odom_with_first_valid_pose", mInitOdomWithPose, true); - NODELET_INFO_STREAM(" * Init Odometry with first valid pose data: " + NODELET_INFO_STREAM(" * Init Odometry with first valid pose data -> " << (mInitOdomWithPose ? "ENABLED": "DISABLED")); mNhNs.param("pos_tracking/two_d_mode", mTwoDMode, false); - NODELET_INFO_STREAM(" * Force 2D mode\t\t\t-> " << (mTwoDMode ? "ENABLED": "DISABLED")); + NODELET_INFO_STREAM(" * Force 2D mode\t\t-> " << (mTwoDMode ? "ENABLED": "DISABLED")); if (mTwoDMode) { mNhNs.param("pos_tracking/fixed_z_value", mFixedZValue, 0.0); NODELET_INFO_STREAM(" * Fixed Z value\t\t-> " << mFixedZValue); } + + mNhNs.param("pos_tracking/publish_tf", mPublishTf, true); + NODELET_INFO_STREAM(" * Broadcast odometry TF\t-> " << (mPublishTf ? "ENABLED": "DISABLED")); + mNhNs.param("pos_tracking/publish_map_tf", mPublishMapTf, true); + NODELET_INFO_STREAM(" * Broadcast map pose TF\t-> " + << (mPublishTf ? (mPublishMapTf ? "ENABLED": "DISABLED"): "DISABLED")); } - // <---- Positional Tracking +} - // ----> Mapping +void ZEDWrapperNodelet::readMappingParams() +{ NODELET_INFO_STREAM("*** MAPPING PARAMETERS ***"); + if(!mDepthDisabled) { mNhNs.param("mapping/mapping_enabled", mMappingEnabled, false); @@ -848,9 +879,10 @@ void ZEDWrapperNodelet::readParameters() } mNhNs.param("mapping/clicked_point_topic", mClickedPtTopic, std::string("/clicked_point")); NODELET_INFO_STREAM(" * Clicked point topic\t\t-> " << mClickedPtTopic.c_str()); - // <---- Mapping +} - // ----> Object Detection +void ZEDWrapperNodelet::readObjDetParams() +{ if(!mDepthDisabled) { NODELET_INFO_STREAM("*** OBJECT DETECTION PARAMETERS ***"); @@ -895,16 +927,17 @@ void ZEDWrapperNodelet::readParameters() NODELET_INFO_STREAM(" * Detect sport-related objects\t-> " << (mObjDetSportsEnable ? "ENABLED": "DISABLED")); } } - // <---- Object Detection +} +void ZEDWrapperNodelet::readSensParams() +{ NODELET_INFO_STREAM("*** SENSORS PARAMETERS ***"); - // ----> Sensors - if (camera_model != "zed") { + if (!sl_tools::isZED(mZedUserCamModel)) { mNhNs.getParam("sensors/sensors_timestamp_sync", mSensTimestampSync); NODELET_INFO_STREAM(" * Sensors timestamp sync\t-> " << (mSensTimestampSync ? "ENABLED": "DISABLED")); mNhNs.getParam("sensors/max_pub_rate", mSensPubRate); - if (camera_model == "zedm") { + if (!sl_tools::isZEDM(mZedUserCamModel)) { if (mSensPubRate > 800.) mSensPubRate = 800.; } else { @@ -913,14 +946,18 @@ void ZEDWrapperNodelet::readParameters() } NODELET_INFO_STREAM(" * Max sensors rate\t\t-> " << mSensPubRate); + + mNhNs.param("sensors/publish_imu_tf", mPublishImuTf, true); + NODELET_INFO_STREAM(" * Broadcast IMU pose TF\t-> " << (mPublishImuTf ? "ENABLED": "DISABLED")); } else { NODELET_INFO_STREAM(" * The ZED camera has no available inertial/environmental sensors."); } - // <---- Sensors +} +void ZEDWrapperNodelet::readSvoParams() +{ NODELET_INFO_STREAM("*** SVO PARAMETERS ***"); - // ----> SVO mNhNs.param("svo_file", mSvoFilepath, std::string()); mSvoFilepath = sl_tools::resolveFilePath(mSvoFilepath); NODELET_INFO_STREAM(" * SVO input file: \t\t-> " << mSvoFilepath.c_str()); @@ -943,14 +980,107 @@ void ZEDWrapperNodelet::readParameters() mSvoComprMode = static_cast(svo_compr); NODELET_INFO_STREAM(" * SVO REC compression\t\t-> " << sl::toString(mSvoComprMode)); +} + +void ZEDWrapperNodelet::readDynParams() +{ + NODELET_INFO_STREAM("*** DYNAMIC PARAMETERS (Init. values) ***"); + + mNhNs.getParam("brightness", mCamBrightness); + NODELET_INFO_STREAM(" * [DYN] brightness\t\t-> " << mCamBrightness); + mNhNs.getParam("contrast", mCamContrast); + NODELET_INFO_STREAM(" * [DYN] contrast\t\t-> " << mCamContrast); + mNhNs.getParam("hue", mCamHue); + NODELET_INFO_STREAM(" * [DYN] hue\t\t\t-> " << mCamHue); + mNhNs.getParam("saturation", mCamSaturation); + NODELET_INFO_STREAM(" * [DYN] saturation\t\t-> " << mCamSaturation); + mNhNs.getParam("sharpness", mCamSharpness); + NODELET_INFO_STREAM(" * [DYN] sharpness\t\t-> " << mCamSharpness); + mNhNs.getParam("gamma", mCamGamma); + NODELET_INFO_STREAM(" * [DYN] gamma\t\t\t-> " << mCamGamma); + mNhNs.getParam("auto_exposure_gain", mCamAutoExposure); + NODELET_INFO_STREAM(" * [DYN] auto_exposure_gain\t-> " << (mCamAutoExposure ? "ENABLED": "DISABLED")); + mNhNs.getParam("gain", mCamGain); + mNhNs.getParam("exposure", mCamExposure); + if (!mCamAutoExposure) { + NODELET_INFO_STREAM(" * [DYN] gain\t\t-> " << mCamGain); + NODELET_INFO_STREAM(" * [DYN] exposure\t\t-> " << mCamExposure); + } + mNhNs.getParam("auto_whitebalance", mCamAutoWB); + NODELET_INFO_STREAM(" * [DYN] auto_whitebalance\t-> " << (mCamAutoWB ? "ENABLED": "DISABLED")); + mNhNs.getParam("whitebalance_temperature", mCamWB); + if (!mCamAutoWB) { + NODELET_INFO_STREAM(" * [DYN] whitebalance_temperature\t\t-> " << mCamWB); + } + + if (mCamAutoExposure) { + mTriggerAutoExposure = true; + } + if (mCamAutoWB) { + mTriggerAutoWB = true; + } + + if(!mDepthDisabled) { + mNhNs.getParam("depth_confidence", mCamDepthConfidence); + NODELET_INFO_STREAM(" * [DYN] Depth confidence\t-> " << mCamDepthConfidence); + mNhNs.getParam("depth_texture_conf", mCamDepthTextureConf); + NODELET_INFO_STREAM(" * [DYN] Depth texture conf.\t-> " << mCamDepthTextureConf); + + mNhNs.getParam("point_cloud_freq", mPointCloudFreq); + if(mPointCloudFreq>mPubFrameRate) { + mPointCloudFreq=mPubFrameRate; + NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than 'general/pub_frame_rate'. Set to " + << mPointCloudFreq); + } + NODELET_INFO_STREAM(" * [DYN] point_cloud_freq\t-> " << mPointCloudFreq << " Hz"); + } +} + +void ZEDWrapperNodelet::readParameters() +{ + // ----> General + readGeneralParams(); + // <---- General + + // ----> Video + //NODELET_INFO_STREAM("*** VIDEO PARAMETERS ***"); + // Note: there are no "static" video parameters + // <---- Video + + // -----> Depth + readDepthParams(); + // <----- Depth + + // ----> Dynamic + void readDynParams(); + // <---- Dynamic + + // ----> Positional Tracking + readPosTrkParams(); + // <---- Positional Tracking + + // ----> Mapping + readMappingParams(); + // <---- Mapping + + // ----> Object Detection + readObjDetParams(); + // <---- Object Detection + + // ----> Sensors + readSensParams(); + // <---- Sensors + + // ----> SVO + readSvoParams(); // <---- SVO // Remote Stream - mNhNs.param("stream", mRemoteStreamAddr, std::string()); + mNhNs.param("stream", mRemoteStreamAddr, std::string()); + // ----> Coordinate frames NODELET_INFO_STREAM("*** COORDINATE FRAMES ***"); - // ----> Coordinate frames mNhNs.param("pos_tracking/map_frame", mMapFrameId, "map"); mNhNs.param("pos_tracking/odometry_frame", mOdometryFrameId, "odom"); mNhNs.param("general/base_frame", mBaseFrameId, "base_link"); @@ -999,70 +1129,6 @@ void ZEDWrapperNodelet::readParameters() NODELET_INFO_STREAM(" * confidence_optical_frame\t-> " << mConfidenceOptFrameId); } // <---- Coordinate frames - - // ----> TF broadcasting - if(!mDepthDisabled) { - mNhNs.param("pos_tracking/publish_tf", mPublishTf, true); - NODELET_INFO_STREAM(" * Broadcast odometry TF\t-> " << (mPublishTf ? "ENABLED": "DISABLED")); - mNhNs.param("pos_tracking/publish_map_tf", mPublishMapTf, true); - NODELET_INFO_STREAM(" * Broadcast map pose TF\t-> " - << (mPublishTf ? (mPublishMapTf ? "ENABLED": "DISABLED"): "DISABLED")); - mNhNs.param("sensors/publish_imu_tf", mPublishImuTf, true); - NODELET_INFO_STREAM(" * Broadcast IMU pose TF\t-> " << (mPublishImuTf ? "ENABLED": "DISABLED")); - } - // <---- TF broadcasting - - // ----> Dynamic - NODELET_INFO_STREAM("*** DYNAMIC PARAMETERS (Init. values) ***"); - mNhNs.getParam("brightness", mCamBrightness); - NODELET_INFO_STREAM(" * [DYN] brightness\t\t-> " << mCamBrightness); - mNhNs.getParam("contrast", mCamContrast); - NODELET_INFO_STREAM(" * [DYN] contrast\t\t-> " << mCamContrast); - mNhNs.getParam("hue", mCamHue); - NODELET_INFO_STREAM(" * [DYN] hue\t\t\t-> " << mCamHue); - mNhNs.getParam("saturation", mCamSaturation); - NODELET_INFO_STREAM(" * [DYN] saturation\t\t-> " << mCamSaturation); - mNhNs.getParam("sharpness", mCamSharpness); - NODELET_INFO_STREAM(" * [DYN] sharpness\t\t-> " << mCamSharpness); - mNhNs.getParam("gamma", mCamGamma); - NODELET_INFO_STREAM(" * [DYN] gamma\t\t\t-> " << mCamGamma); - mNhNs.getParam("auto_exposure_gain", mCamAutoExposure); - NODELET_INFO_STREAM(" * [DYN] auto_exposure_gain\t-> " << (mCamAutoExposure ? "ENABLED": "DISABLED")); - mNhNs.getParam("gain", mCamGain); - mNhNs.getParam("exposure", mCamExposure); - if (!mCamAutoExposure) { - NODELET_INFO_STREAM(" * [DYN] gain\t\t-> " << mCamGain); - NODELET_INFO_STREAM(" * [DYN] exposure\t\t-> " << mCamExposure); - } - mNhNs.getParam("auto_whitebalance", mCamAutoWB); - NODELET_INFO_STREAM(" * [DYN] auto_whitebalance\t-> " << (mCamAutoWB ? "ENABLED": "DISABLED")); - mNhNs.getParam("whitebalance_temperature", mCamWB); - if (!mCamAutoWB) { - NODELET_INFO_STREAM(" * [DYN] whitebalance_temperature\t\t-> " << mCamWB); - } - - if (mCamAutoExposure) { - mTriggerAutoExposure = true; - } - if (mCamAutoWB) { - mTriggerAutoWB = true; - } - - if(!mDepthDisabled) { - mNhNs.getParam("depth_confidence", mCamDepthConfidence); - NODELET_INFO_STREAM(" * [DYN] Depth confidence\t-> " << mCamDepthConfidence); - mNhNs.getParam("depth_texture_conf", mCamDepthTextureConf); - NODELET_INFO_STREAM(" * [DYN] Depth texture conf.\t-> " << mCamDepthTextureConf); - - mNhNs.getParam("point_cloud_freq", mPointCloudFreq); - if(mPointCloudFreq>mPubFrameRate) { - mPointCloudFreq=mPubFrameRate; - NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than 'general/pub_frame_rate'. Set to " - << mPointCloudFreq); - } - NODELET_INFO_STREAM(" * [DYN] point_cloud_freq\t-> " << mPointCloudFreq << " Hz"); - } - // <---- Dynamic } std::string ZEDWrapperNodelet::getRoiParam(std::string paramName, std::vector> & outVal) @@ -1590,7 +1656,7 @@ bool ZEDWrapperNodelet::start_3d_mapping() if (mPubFusedCloud.getTopic().empty()) { std::string pointcloud_fused_topic = "mapping/fused_cloud"; mPubFusedCloud = mNhNs.advertise(pointcloud_fused_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); } mMappingRunning = true; @@ -1663,7 +1729,7 @@ bool ZEDWrapperNodelet::start_obj_detect() std::string object_det_topic = object_det_topic_root + "/objects"; mPubObjDet = mNhNs.advertise(object_det_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubObjDet.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubObjDet.getTopic()); } mObjDetFilter.clear(); diff --git a/zed_wrapper/params/common.yaml b/zed_wrapper/params/common.yaml index e98bad15..93f8fbcd 100644 --- a/zed_wrapper/params/common.yaml +++ b/zed_wrapper/params/common.yaml @@ -14,7 +14,7 @@ gain: 100 # Dynamic - work exposure: 100 # Dynamic - works only if `auto_exposure_gain` is false auto_whitebalance: true # Dynamic whitebalance_temperature: 42 # Dynamic - works only if `auto_whitebalance` is false -depth_confidence: 30 # Dynamic +depth_confidence: 50 # Dynamic depth_texture_conf: 100 # Dynamic point_cloud_freq: 10.0 # Dynamic - frequency of the pointcloud publishing (equal or less to `grab_frame_rate` value) @@ -24,7 +24,7 @@ general: serial_number: 0 gpu_id: -1 base_frame: 'base_link' # must be equal to the frame_id used in the URDF file - sdk_verbose: 1 # Set verbose level of the ZED SDK + sdk_verbose: 1 # Set verbose level of the ZED SDK svo_compression: 2 # `0`: LOSSLESS, `1`: AVCHD, `2`: HEVC self_calib: true # enable/disable self calibration at starting camera_flip: 'AUTO' # camera flip mode: 'OFF', 'ON', 'AUTO' @@ -32,22 +32,21 @@ general: pub_downscale_factor: 2.0 # rescale factor used to rescale image before publishing when 'pub_resolution' is 'CUSTOM' pub_frame_rate: 15.0 # frequency of publishing of video and depth data (see SDK API "InitParameters::grab_compute_capping_fps") svo_realtime: true # if true the input SVO will be played trying to respect the original framerate eventually skipping frames, otherwise every frame will be processed respecting the `pub_frame_rate` setting - #region_of_interest: '[]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. - region_of_interest: '[[0.25,0.33],[0.75,0.33],[0.75,0.5],[0.5,0.75],[0.25,0.5]]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. + region_of_interest: '[]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. + #region_of_interest: '[[0.25,0.33],[0.75,0.33],[0.75,0.5],[0.5,0.75],[0.25,0.5]]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. #region_of_interest: '[[0.25,0.25],[0.75,0.25],[0.75,0.75],[0.25,0.75]]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. #region_of_interest: '[[0.5,0.25],[0.75,0.5],[0.5,0.75],[0.25,0.5]]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. - #video: depth: depth_mode: 'ULTRA' # 'NONE', 'PERFORMANCE', 'QUALITY', 'ULTRA', 'NEURAL' depth_stabilization: 1 # [0-100] - 0: Disabled - openni_depth_mode: false # 'false': 32bit float meters, 'true': 16bit uchar millimeters + openni_depth_mode: false # 'false': 32bit float meter units, 'true': 16bit uchar millimeter units pos_tracking: pos_tracking_enabled: true # True to enable positional tracking from start - pos_tracking_mode: 'STANDARD' # Matches the ZED SDK setting: 'QUALITY', 'STANDARD' + pos_tracking_mode: 'STANDARD' # Matches the ZED SDK setting: 'QUALITY', 'STANDARD' -> Use 'QUALITY' with 'ULTRA' depth mode for improved outdoors performance set_gravity_as_origin: true # If 'true' align the positional tracking world to imu gravity measurement. Keep the yaw from the user initial pose. imu_fusion: true # enable/disable IMU fusion. When set to false, only the optical odometry will be used. publish_tf: true # publish `odom -> base_link` TF @@ -63,7 +62,9 @@ pos_tracking: path_pub_rate: 2.0 # Camera trajectory publishing frequency path_max_count: -1 # use '-1' for unlimited path size two_d_mode: false # Force navigation on a plane. If true the Z value will be fixed to "fixed_z_value", roll and pitch to zero - fixed_z_value: 0.00 # Value to be used for Z coordinate if `two_d_mode` is true + fixed_z_value: 0.00 # Value to be used for Z coordinate if `two_d_mode` is true + depth_min_range: 0.0 # Set this value for removing fixed zones of the robot in the FoV of the camerafrom the visual odometry evaluation + set_as_static: false # If 'true' the camera will be static and not move in the environment mapping: mapping_enabled: false # True to enable mapping and fused point cloud publication From 1d36f548495b3180b48c34408d5d269219aee2d0 Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Wed, 13 Sep 2023 11:23:28 +0200 Subject: [PATCH 10/21] Add .clang-format --- .clang-format | 65 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) create mode 100644 .clang-format diff --git a/.clang-format b/.clang-format new file mode 100644 index 00000000..817fd6d6 --- /dev/null +++ b/.clang-format @@ -0,0 +1,65 @@ +--- +BasedOnStyle: Google +AccessModifierOffset: -2 +ConstructorInitializerIndentWidth: 2 +AlignEscapedNewlinesLeft: false +AlignTrailingComments: true +AllowAllParametersOfDeclarationOnNextLine: false +AllowShortIfStatementsOnASingleLine: false +AllowShortLoopsOnASingleLine: false +AllowShortFunctionsOnASingleLine: None +AlwaysBreakTemplateDeclarations: true +AlwaysBreakBeforeMultilineStrings: true +BreakBeforeBinaryOperators: false +BreakBeforeTernaryOperators: false +BreakConstructorInitializersBeforeComma: true +BinPackParameters: true +ColumnLimit: 120 +ConstructorInitializerAllOnOneLineOrOnePerLine: true +DerivePointerBinding: false +PointerBindsToType: true +ExperimentalAutoDetectBinPacking: false +IndentCaseLabels: true +MaxEmptyLinesToKeep: 1 +NamespaceIndentation: None +ObjCSpaceBeforeProtocolList: true +PenaltyBreakBeforeFirstCallParameter: 19 +PenaltyBreakComment: 60 +PenaltyBreakString: 1 +PenaltyBreakFirstLessLess: 1000 +PenaltyExcessCharacter: 1000 +PenaltyReturnTypeOnItsOwnLine: 90 +SpacesBeforeTrailingComments: 2 +Cpp11BracedListStyle: false +Standard: Auto +IndentWidth: 2 +TabWidth: 2 +UseTab: Never +IndentFunctionDeclarationAfterType: false +SpacesInParentheses: false +SpacesInAngles: false +SpaceInEmptyParentheses: false +SpacesInCStyleCastParentheses: false +SpaceAfterControlStatementKeyword: true +SpaceBeforeAssignmentOperators: true +ContinuationIndentWidth: 4 +SortIncludes: false +SpaceAfterCStyleCast: false + +# Configure each individual brace in BraceWrapping +BreakBeforeBraces: Custom + +# Control of individual brace wrapping cases +BraceWrapping: { + AfterClass: 'true' + AfterControlStatement: 'true' + AfterEnum : 'true' + AfterFunction : 'true' + AfterNamespace : 'true' + AfterStruct : 'true' + AfterUnion : 'true' + BeforeCatch : 'true' + BeforeElse : 'true' + IndentBraces : 'false' +} +... From ac61ca6775f76aafacf70d12dfa2ba6755017ec3 Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Wed, 13 Sep 2023 11:28:19 +0200 Subject: [PATCH 11/21] Code refactoring --- .../src/rgbd_sensor_demux.cpp | 2 +- .../src/rgbd_sensor_sync.cpp | 97 +- zed_nodelets/src/tools/include/sl_tools.h | 5 +- zed_nodelets/src/tools/src/sl_tools.cpp | 92 +- .../include/zed_wrapper_nodelet.hpp | 1019 +- .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 8496 +++++++++-------- 6 files changed, 5219 insertions(+), 4492 deletions(-) diff --git a/zed_nodelets/src/rgbd_sensors_demux_nodelet/src/rgbd_sensor_demux.cpp b/zed_nodelets/src/rgbd_sensors_demux_nodelet/src/rgbd_sensor_demux.cpp index 0ff9473c..f024d56c 100644 --- a/zed_nodelets/src/rgbd_sensors_demux_nodelet/src/rgbd_sensor_demux.cpp +++ b/zed_nodelets/src/rgbd_sensors_demux_nodelet/src/rgbd_sensor_demux.cpp @@ -55,7 +55,7 @@ void RgbdSensorsDemuxNodelet::onInit() NODELET_INFO_STREAM(" * Subscribed to topic: " << mSub.getTopic().c_str()); } -void RgbdSensorsDemuxNodelet::msgCallback(const zed_interfaces::RGBDSensorsPtr &msg) +void RgbdSensorsDemuxNodelet::msgCallback(const zed_interfaces::RGBDSensorsPtr& msg) { if (!msg->rgb.header.stamp.isZero()) { diff --git a/zed_nodelets/src/rgbd_sensors_sync_nodelet/src/rgbd_sensor_sync.cpp b/zed_nodelets/src/rgbd_sensors_sync_nodelet/src/rgbd_sensor_sync.cpp index 2a2046cd..b2b329b6 100644 --- a/zed_nodelets/src/rgbd_sensors_sync_nodelet/src/rgbd_sensor_sync.cpp +++ b/zed_nodelets/src/rgbd_sensors_sync_nodelet/src/rgbd_sensor_sync.cpp @@ -209,10 +209,10 @@ void RgbdSensorsSyncNodelet::readParameters() NODELET_INFO(" * sub_mag -> %s", mUseMag ? "true" : "false"); } -void RgbdSensorsSyncNodelet::callbackRGBD(const sensor_msgs::ImageConstPtr &rgb, - const sensor_msgs::ImageConstPtr &depth, - const sensor_msgs::CameraInfoConstPtr &rgbCameraInfo, - const sensor_msgs::CameraInfoConstPtr &depthCameraInfo) +void RgbdSensorsSyncNodelet::callbackRGBD(const sensor_msgs::ImageConstPtr& rgb, + const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, + const sensor_msgs::CameraInfoConstPtr& depthCameraInfo) { // ----> Frequency calculation static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); @@ -251,20 +251,22 @@ void RgbdSensorsSyncNodelet::callbackRGBD(const sensor_msgs::ImageConstPtr &rgb, if (rgbStamp != rgb->header.stamp.toSec()) { - NODELET_ERROR("Input stamps changed between the beginning and the end of the callback! Make " - "sure the node publishing the topics doesn't override the same data after publishing them. A " - "solution is to use this node within another nodelet manager. "); - NODELET_ERROR("Stamps: " - "rgb=%f->%f", - rgbStamp, rgb->header.stamp.toSec()); + NODELET_ERROR( + "Input stamps changed between the beginning and the end of the callback! Make " + "sure the node publishing the topics doesn't override the same data after publishing them. A " + "solution is to use this node within another nodelet manager. "); + NODELET_ERROR( + "Stamps: " + "rgb=%f->%f", + rgbStamp, rgb->header.stamp.toSec()); } } -void RgbdSensorsSyncNodelet::callbackRGBDIMU(const sensor_msgs::ImageConstPtr &rgb, - const sensor_msgs::ImageConstPtr &depth, - const sensor_msgs::CameraInfoConstPtr &rgbCameraInfo, - const sensor_msgs::CameraInfoConstPtr &depthCameraInfo, - const sensor_msgs::ImuConstPtr &imu) +void RgbdSensorsSyncNodelet::callbackRGBDIMU(const sensor_msgs::ImageConstPtr& rgb, + const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, + const sensor_msgs::CameraInfoConstPtr& depthCameraInfo, + const sensor_msgs::ImuConstPtr& imu) { // ----> Frequency calculation static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); @@ -309,20 +311,22 @@ void RgbdSensorsSyncNodelet::callbackRGBDIMU(const sensor_msgs::ImageConstPtr &r if (rgbStamp != rgb->header.stamp.toSec() || imuStamp != imu->header.stamp.toSec()) { - NODELET_ERROR("Input stamps changed between the beginning and the end of the callback! Make " - "sure the node publishing the topics doesn't override the same data after publishing them. A " - "solution is to use this node within another nodelet manager. "); - NODELET_ERROR("Stamps: " - "rgb=%f->%f IMU=%f->%f", - rgbStamp, rgb->header.stamp.toSec(), imuStamp, imu->header.stamp.toSec()); + NODELET_ERROR( + "Input stamps changed between the beginning and the end of the callback! Make " + "sure the node publishing the topics doesn't override the same data after publishing them. A " + "solution is to use this node within another nodelet manager. "); + NODELET_ERROR( + "Stamps: " + "rgb=%f->%f IMU=%f->%f", + rgbStamp, rgb->header.stamp.toSec(), imuStamp, imu->header.stamp.toSec()); } } -void RgbdSensorsSyncNodelet::callbackRGBDMag(const sensor_msgs::ImageConstPtr &rgb, - const sensor_msgs::ImageConstPtr &depth, - const sensor_msgs::CameraInfoConstPtr &rgbCameraInfo, - const sensor_msgs::CameraInfoConstPtr &depthCameraInfo, - const sensor_msgs::MagneticFieldConstPtr &mag) +void RgbdSensorsSyncNodelet::callbackRGBDMag(const sensor_msgs::ImageConstPtr& rgb, + const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, + const sensor_msgs::CameraInfoConstPtr& depthCameraInfo, + const sensor_msgs::MagneticFieldConstPtr& mag) { // ----> Frequency calculation static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); @@ -367,21 +371,23 @@ void RgbdSensorsSyncNodelet::callbackRGBDMag(const sensor_msgs::ImageConstPtr &r if (rgbStamp != rgb->header.stamp.toSec() || magStamp != mag->header.stamp.toSec()) { - NODELET_ERROR("Input stamps changed between the beginning and the end of the callback! Make " - "sure the node publishing the topics doesn't override the same data after publishing them. A " - "solution is to use this node within another nodelet manager. "); - NODELET_ERROR("Stamps: " - "rgb=%f->%f MAG=%f->%f", - rgbStamp, rgb->header.stamp.toSec(), magStamp, mag->header.stamp.toSec()); + NODELET_ERROR( + "Input stamps changed between the beginning and the end of the callback! Make " + "sure the node publishing the topics doesn't override the same data after publishing them. A " + "solution is to use this node within another nodelet manager. "); + NODELET_ERROR( + "Stamps: " + "rgb=%f->%f MAG=%f->%f", + rgbStamp, rgb->header.stamp.toSec(), magStamp, mag->header.stamp.toSec()); } } -void RgbdSensorsSyncNodelet::callbackFull(const sensor_msgs::ImageConstPtr &rgb, - const sensor_msgs::ImageConstPtr &depth, - const sensor_msgs::CameraInfoConstPtr &rgbCameraInfo, - const sensor_msgs::CameraInfoConstPtr &depthCameraInfo, - const sensor_msgs::ImuConstPtr &imu, - const sensor_msgs::MagneticFieldConstPtr &mag) +void RgbdSensorsSyncNodelet::callbackFull(const sensor_msgs::ImageConstPtr& rgb, + const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, + const sensor_msgs::CameraInfoConstPtr& depthCameraInfo, + const sensor_msgs::ImuConstPtr& imu, + const sensor_msgs::MagneticFieldConstPtr& mag) { // ----> Frequency calculation static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); @@ -431,13 +437,14 @@ void RgbdSensorsSyncNodelet::callbackFull(const sensor_msgs::ImageConstPtr &rgb, if (rgbStamp != rgb->header.stamp.toSec() || imuStamp != imu->header.stamp.toSec() || magStamp != mag->header.stamp.toSec()) { - NODELET_ERROR("Input stamps changed between the beginning and the end of the callback! Make " - "sure the node publishing the topics doesn't override the same data after publishing them. A " - "solution is to use this node within another nodelet manager. "); - NODELET_ERROR("Stamps: " - "rgb=%f->%f IMU=%f->%f MAG=%f->%f", - rgbStamp, rgb->header.stamp.toSec(), imuStamp, imu->header.stamp.toSec(), magStamp, - mag->header.stamp.toSec()); + NODELET_ERROR( + "Input stamps changed between the beginning and the end of the callback! Make " + "sure the node publishing the topics doesn't override the same data after publishing them. A " + "solution is to use this node within another nodelet manager. "); + NODELET_ERROR( + "Stamps: " + "rgb=%f->%f IMU=%f->%f MAG=%f->%f", + rgbStamp, rgb->header.stamp.toSec(), imuStamp, imu->header.stamp.toSec(), magStamp, mag->header.stamp.toSec()); } } diff --git a/zed_nodelets/src/tools/include/sl_tools.h b/zed_nodelets/src/tools/include/sl_tools.h index b0c38188..7e2e649f 100644 --- a/zed_nodelets/src/tools/include/sl_tools.h +++ b/zed_nodelets/src/tools/include/sl_tools.h @@ -76,14 +76,13 @@ bool isZEDX(sl::MODEL camModel); * \param poly the ROI polygon. Coordinates must be normalized from 0.0 to 1.0 * \param out_roi the `sl::Mat` containing the ROI */ -bool generateROI(const std::vector & poly, sl::Mat & out_roi); +bool generateROI(const std::vector& poly, sl::Mat& out_roi); /*! \brief Parse a vector of vector of floats from a string. * \param input * \param error_return * Syntax is [[1.0, 2.0], [3.3, 4.4, 5.5], ...] */ -std::vector> parseStringVector( - const std::string & input, std::string & error_return); +std::vector> parseStringVector(const std::string& input, std::string& error_return); /*! \brief sl::Mat to ros message conversion * \param imgMsgPtr : the image topic message to publish diff --git a/zed_nodelets/src/tools/src/sl_tools.cpp b/zed_nodelets/src/tools/src/sl_tools.cpp index d913f6d4..6687bbb6 100644 --- a/zed_nodelets/src/tools/src/sl_tools.cpp +++ b/zed_nodelets/src/tools/src/sl_tools.cpp @@ -39,12 +39,11 @@ bool file_exist(const std::string& name) namespace fs = std::experimental::filesystem; std::string resolveFilePath(std::string file_path) { - if(file_path.empty()) + if (file_path.empty()) { return file_path; } - std::string abs_path = file_path; if (file_path[0] == '~') { @@ -74,7 +73,7 @@ std::string resolveFilePath(std::string file_path) return std::string(); } } - else if(file_path[0] != '/') + else if (file_path[0] != '/') { fs::path current_path = fs::current_path(); abs_path = current_path.string() + "/" + file_path; @@ -123,7 +122,8 @@ ros::Time slTime2Ros(sl::Timestamp t) bool isZED(sl::MODEL camModel) { - if (camModel == sl::MODEL::ZED) { + if (camModel == sl::MODEL::ZED) + { return true; } return false; @@ -131,7 +131,8 @@ bool isZED(sl::MODEL camModel) bool isZEDM(sl::MODEL camModel) { - if (camModel == sl::MODEL::ZED_M) { + if (camModel == sl::MODEL::ZED_M) + { return true; } return false; @@ -139,10 +140,12 @@ bool isZEDM(sl::MODEL camModel) bool isZED2OrZED2i(sl::MODEL camModel) { - if (camModel == sl::MODEL::ZED2) { + if (camModel == sl::MODEL::ZED2) + { return true; } - if (camModel == sl::MODEL::ZED2i) { + if (camModel == sl::MODEL::ZED2i) + { return true; } return false; @@ -150,10 +153,12 @@ bool isZED2OrZED2i(sl::MODEL camModel) bool isZEDX(sl::MODEL camModel) { - if (camModel == sl::MODEL::ZED_X) { + if (camModel == sl::MODEL::ZED_X) + { return true; } - if (camModel == sl::MODEL::ZED_XM) { + if (camModel == sl::MODEL::ZED_XM) + { return true; } return false; @@ -351,16 +356,15 @@ std::vector split_string(const std::string& s, char seperator) return output; } -inline bool contains(std::vector & poly, sl::float2 test) +inline bool contains(std::vector& poly, sl::float2 test) { int i, j; bool c = false; const int nvert = poly.size(); - for (i = 0, j = nvert - 1; i < nvert; j = i++) { - if ( - ((poly[i].y > test.y) != (poly[j].y > test.y)) && - (test.x < - (poly[j].x - poly[i].x) * (test.y - poly[i].y) / (poly[j].y - poly[i].y) + poly[i].x)) + for (i = 0, j = nvert - 1; i < nvert; j = i++) + { + if (((poly[i].y > test.y) != (poly[j].y > test.y)) && + (test.x < (poly[j].x - poly[i].x) * (test.y - poly[i].y) / (poly[j].y - poly[i].y) + poly[i].x)) { c = !c; } @@ -368,9 +372,10 @@ inline bool contains(std::vector & poly, sl::float2 test) return c; } -bool generateROI(const std::vector & poly, sl::Mat & out_roi) +bool generateROI(const std::vector& poly, sl::Mat& out_roi) { - if (poly.size() < 3) { + if (poly.size() < 3) + { out_roi = sl::Mat(); return false; } @@ -387,15 +392,18 @@ bool generateROI(const std::vector & poly, sl::Mat & out_roi) // std::cerr << "Image resolution: " << w << "x" << h << std::endl; std::vector poly_img; size_t idx = 0; - for (auto & it : poly) { + for (auto& it : poly) + { sl::float2 pt; pt.x = it.x * w; pt.y = it.y * h; - if (pt.x >= w) { + if (pt.x >= w) + { pt.x = (w - 1); } - if (pt.y >= h) { + if (pt.y >= h) + { pt.y = (h - 1); } @@ -406,11 +414,14 @@ bool generateROI(const std::vector & poly, sl::Mat & out_roi) // <---- De-normalize coordinates // ----> Unset ROI pixels outside the polygon - //std::cerr << "Unset ROI pixels outside the polygon" << std::endl; - //std::cerr << "Set mask" << std::endl; - for (int v = 0; v < h; v++) { - for (int u = 0; u < w; u++) { - if (!contains(poly_img, sl::float2(u, v))) { + // std::cerr << "Unset ROI pixels outside the polygon" << std::endl; + // std::cerr << "Set mask" << std::endl; + for (int v = 0; v < h; v++) + { + for (int u = 0; u < w; u++) + { + if (!contains(poly_img, sl::float2(u, v))) + { out_roi.setValue(u, v, 0, sl::MEM::CPU); } } @@ -422,21 +433,23 @@ bool generateROI(const std::vector & poly, sl::Mat & out_roi) return true; } -std::vector> parseStringVector( - const std::string & input, std::string & error_return) +std::vector> parseStringVector(const std::string& input, std::string& error_return) { std::vector> result; std::stringstream input_ss(input); int depth = 0; std::vector current_vector; - while (!!input_ss && !input_ss.eof()) { - switch (input_ss.peek()) { + while (!!input_ss && !input_ss.eof()) + { + switch (input_ss.peek()) + { case EOF: break; case '[': depth++; - if (depth > 2) { + if (depth > 2) + { error_return = "Array depth greater than 2"; return result; } @@ -445,12 +458,14 @@ std::vector> parseStringVector( break; case ']': depth--; - if (depth < 0) { + if (depth < 0) + { error_return = "More close ] than open ["; return result; } input_ss.get(); - if (depth == 1) { + if (depth == 1) + { result.push_back(current_vector); } break; @@ -460,7 +475,8 @@ std::vector> parseStringVector( input_ss.get(); break; default: // All other characters should be part of the numbers. - if (depth != 2) { + if (depth != 2) + { std::stringstream err_ss; err_ss << "Numbers at depth other than 2. Char was '" << char(input_ss.peek()) << "'."; error_return = err_ss.str(); @@ -468,16 +484,20 @@ std::vector> parseStringVector( } float value; input_ss >> value; - if (!!input_ss) { + if (!!input_ss) + { current_vector.push_back(value); } break; } } - if (depth != 0) { + if (depth != 0) + { error_return = "Unterminated vector string."; - } else { + } + else + { error_return = ""; } diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 00dc5a72..73354370 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -78,130 +78,131 @@ #include #include -namespace zed_nodelets { - +namespace zed_nodelets +{ typedef enum { NATIVE, //!< Same camera grab resolution CUSTOM //!< Custom Rescale Factor } PubRes; -class ZEDWrapperNodelet : public nodelet::Nodelet { - typedef enum _dyn_params { - DATAPUB_FREQ = 0, - CONFIDENCE = 1, - TEXTURE_CONF = 2, - POINTCLOUD_FREQ = 3, - BRIGHTNESS = 4, - CONTRAST = 5, - HUE = 6, - SATURATION = 7, - SHARPNESS = 8, - GAMMA = 9, - AUTO_EXP_GAIN = 10, - GAIN = 11, - EXPOSURE = 12, - AUTO_WB = 13, - WB_TEMP = 14 - } DynParams; +class ZEDWrapperNodelet : public nodelet::Nodelet +{ + typedef enum _dyn_params + { + DATAPUB_FREQ = 0, + CONFIDENCE = 1, + TEXTURE_CONF = 2, + POINTCLOUD_FREQ = 3, + BRIGHTNESS = 4, + CONTRAST = 5, + HUE = 6, + SATURATION = 7, + SHARPNESS = 8, + GAMMA = 9, + AUTO_EXP_GAIN = 10, + GAIN = 11, + EXPOSURE = 12, + AUTO_WB = 13, + WB_TEMP = 14 + } DynParams; public: - /*! \brief Default constructor + /*! \brief Default constructor */ - ZEDWrapperNodelet(); + ZEDWrapperNodelet(); - /*! \brief \ref ZEDWrapperNodelet destructor + /*! \brief \ref ZEDWrapperNodelet destructor */ - virtual ~ZEDWrapperNodelet(); + virtual ~ZEDWrapperNodelet(); protected: - /*! \brief Initialization function called by the Nodelet base class + /*! \brief Initialization function called by the Nodelet base class */ - virtual void onInit(); + virtual void onInit(); - /*! \brief Reads parameters from the param server + /*! \brief Reads parameters from the param server */ - void readParameters(); + void readParameters(); - /*! \brief Reads general parameters from the param server + /*! \brief Reads general parameters from the param server */ - void readGeneralParams(); + void readGeneralParams(); - /*! \brief Reads depth parameters from the param server + /*! \brief Reads depth parameters from the param server */ - void readDepthParams(); + void readDepthParams(); - /*! \brief Reads positional tracking parameters from the param server + /*! \brief Reads positional tracking parameters from the param server */ - void readPosTrkParams(); + void readPosTrkParams(); - /*! \brief Reads spatial mapping parameters from the param server + /*! \brief Reads spatial mapping parameters from the param server */ - void readMappingParams(); + void readMappingParams(); - /*! \brief Reads object detection parameters from the param server + /*! \brief Reads object detection parameters from the param server */ - void readObjDetParams(); + void readObjDetParams(); - /*! \brief Reads sensors parameters from the param server + /*! \brief Reads sensors parameters from the param server */ - void readSensParams(); + void readSensParams(); - /*! \brief Reads SVO parameters from the param server + /*! \brief Reads SVO parameters from the param server */ - void readSvoParams(); + void readSvoParams(); - /*! \brief Reads dynamic parameters from the param server + /*! \brief Reads dynamic parameters from the param server */ - void readDynParams(); + void readDynParams(); - /*! \brief ZED camera polling thread function + /*! \brief ZED camera polling thread function */ - void device_poll_thread_func(); + void device_poll_thread_func(); - /*! \brief Pointcloud publishing function + /*! \brief Pointcloud publishing function */ - void pointcloud_thread_func(); + void pointcloud_thread_func(); - /*! \brief Sensors data publishing function + /*! \brief Sensors data publishing function */ - void sensors_thread_func(); + void sensors_thread_func(); - /*! \brief Publish the pose of the camera in "Map" frame with a ros Publisher + /*! \brief Publish the pose of the camera in "Map" frame with a ros Publisher * \param t : the ros::Time to stamp the image */ - void - publishPose(ros::Time t); + void publishPose(ros::Time t); - /*! \brief Publish the pose of the camera in "Odom" frame with a ros Publisher + /*! \brief Publish the pose of the camera in "Odom" frame with a ros Publisher * \param base2odomTransf : Transformation representing the camera pose * from base frame to odom frame * \param slPose : latest odom pose from ZED SDK * \param t : the ros::Time to stamp the image */ - void publishOdom(tf2::Transform odom2baseTransf, sl::Pose& slPose, ros::Time t); + void publishOdom(tf2::Transform odom2baseTransf, sl::Pose& slPose, ros::Time t); - /*! \brief Publish the pose of the camera in "Map" frame as a transformation + /*! \brief Publish the pose of the camera in "Map" frame as a transformation * \param baseTransform : Transformation representing the camera pose from * odom frame to map frame * \param t : the ros::Time to stamp the image */ - void publishPoseFrame(tf2::Transform baseTransform, ros::Time t); + void publishPoseFrame(tf2::Transform baseTransform, ros::Time t); - /*! \brief Publish the odometry of the camera in "Odom" frame as a + /*! \brief Publish the odometry of the camera in "Odom" frame as a * transformation * \param odomTransf : Transformation representing the camera pose from * base frame to odom frame * \param t : the ros::Time to stamp the image */ - void publishOdomFrame(tf2::Transform odomTransf, ros::Time t); + void publishOdomFrame(tf2::Transform odomTransf, ros::Time t); - /*! + /*! * \brief Publish IMU frame once as static TF */ - void publishStaticImuFrame(); + void publishStaticImuFrame(); - /*! \brief Publish a sl::Mat image with a ros Publisher + /*! \brief Publish a sl::Mat image with a ros Publisher * \param imgMsgPtr : the image message to publish * \param img : the image to publish * \param pubImg : the publisher object to use (different image publishers @@ -211,48 +212,48 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { * image frames exist) * \param t : the ros::Time to stamp the image */ - void publishImage(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat img, image_transport::CameraPublisher& pubImg, - sensor_msgs::CameraInfoPtr camInfoMsg, std::string imgFrameId, ros::Time t); + void publishImage(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat img, image_transport::CameraPublisher& pubImg, + sensor_msgs::CameraInfoPtr camInfoMsg, std::string imgFrameId, ros::Time t); - /*! \brief Publish a sl::Mat depth image with a ros Publisher + /*! \brief Publish a sl::Mat depth image with a ros Publisher * \param imgMsgPtr : the depth image topic message to publish * \param depth : the depth image to publish * \param t : the ros::Time to stamp the depth image */ - void publishDepth(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat depth, ros::Time t); + void publishDepth(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat depth, ros::Time t); - /*! \brief Publish a single pointCloud with a ros Publisher + /*! \brief Publish a single pointCloud with a ros Publisher */ - void publishPointCloud(); + void publishPointCloud(); - /*! \brief Publish a fused pointCloud with a ros Publisher + /*! \brief Publish a fused pointCloud with a ros Publisher */ - void callback_pubFusedPointCloud(const ros::TimerEvent& e); + void callback_pubFusedPointCloud(const ros::TimerEvent& e); - /*! - * @brief Publish Color and Depth images - */ - void pubVideoDepth(); + /*! + * @brief Publish Color and Depth images + */ + void pubVideoDepth(); - /*! \brief Publish the informations of a camera with a ros Publisher + /*! \brief Publish the informations of a camera with a ros Publisher * \param cam_info_msg : the information message to publish * \param pub_cam_info : the publisher object to use * \param t : the ros::Time to stamp the message */ - void publishCamInfo(sensor_msgs::CameraInfoPtr camInfoMsg, ros::Publisher pubCamInfo, ros::Time t); + void publishCamInfo(sensor_msgs::CameraInfoPtr camInfoMsg, ros::Publisher pubCamInfo, ros::Time t); - /*! \brief Publish a sl::Mat disparity image with a ros Publisher + /*! \brief Publish a sl::Mat disparity image with a ros Publisher * \param disparity : the disparity image to publish * \param t : the ros::Time to stamp the depth image */ - void publishDisparity(sl::Mat disparity, ros::Time t); + void publishDisparity(sl::Mat disparity, ros::Time t); - /*! \brief Publish sensors data and TF + /*! \brief Publish sensors data and TF * \param t : the ros::Time to stamp the depth image */ - void publishSensData(ros::Time t = ros::Time(0)); + void publishSensData(ros::Time t = ros::Time(0)); - /*! \brief Get the information of the ZED cameras and store them in an + /*! \brief Get the information of the ZED cameras and store them in an * information message * \param zed : the sl::zed::Camera* pointer to an instance * \param left_cam_info_msg : the information message to fill with the left @@ -262,525 +263,523 @@ class ZEDWrapperNodelet : public nodelet::Nodelet { * \param left_frame_id : the id of the reference frame of the left camera * \param right_frame_id : the id of the reference frame of the right camera */ - void fillCamInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr leftCamInfoMsg, - sensor_msgs::CameraInfoPtr rightCamInfoMsg, std::string leftFrameId, std::string rightFrameId, - bool rawParam = false); + void fillCamInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr leftCamInfoMsg, + sensor_msgs::CameraInfoPtr rightCamInfoMsg, std::string leftFrameId, std::string rightFrameId, + bool rawParam = false); - /*! \brief Get the information of the ZED cameras and store them in an + /*! \brief Get the information of the ZED cameras and store them in an * information message for depth topics * \param zed : the sl::zed::Camera* pointer to an instance * \param depth_info_msg : the information message to fill with the left * camera informations * \param frame_id : the id of the reference frame of the left camera */ - void fillCamDepthInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr depth_info_msg, std::string frame_id); + void fillCamDepthInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr depth_info_msg, std::string frame_id); - - /*! \brief Check if FPS and Resolution chosen by user are correct. + /*! \brief Check if FPS and Resolution chosen by user are correct. * Modifies FPS to match correct value. */ - void checkResolFps(); + void checkResolFps(); - // ----> Region of Interest - std::string getRoiParam(std::string paramName, std::vector> & outVal); - std::string parseRoiPoly( - const std::vector> & in_poly, std::vector & out_poly); - void resetRoi(); - // <---- Region of Interest + // ----> Region of Interest + std::string getRoiParam(std::string paramName, std::vector>& outVal); + std::string parseRoiPoly(const std::vector>& in_poly, std::vector& out_poly); + void resetRoi(); + // <---- Region of Interest - /*! \brief Callback to handle dynamic reconfigure events in ROS + /*! \brief Callback to handle dynamic reconfigure events in ROS */ - void callback_dynamicReconf(zed_nodelets::ZedConfig& config, uint32_t level); + void callback_dynamicReconf(zed_nodelets::ZedConfig& config, uint32_t level); - /*! \brief Callback to publish Path data with a ROS publisher. + /*! \brief Callback to publish Path data with a ROS publisher. * \param e : the ros::TimerEvent binded to the callback */ - void callback_pubPath(const ros::TimerEvent& e); + void callback_pubPath(const ros::TimerEvent& e); - /*! \brief Callback to update node diagnostic status + /*! \brief Callback to update node diagnostic status * \param stat : node status */ - void callback_updateDiagnostic(diagnostic_updater::DiagnosticStatusWrapper& stat); + void callback_updateDiagnostic(diagnostic_updater::DiagnosticStatusWrapper& stat); - /*! \brief Callback to receive geometry_msgs::PointStamped topics + /*! \brief Callback to receive geometry_msgs::PointStamped topics * \param msg : pointer to the received message */ - void clickedPtCallback(geometry_msgs::PointStampedConstPtr msg); + void clickedPtCallback(geometry_msgs::PointStampedConstPtr msg); - /*! \brief Service callback to reset_tracking service + /*! \brief Service callback to reset_tracking service * Tracking pose is reinitialized to the value available in the ROS Param * server */ - bool on_reset_tracking(zed_interfaces::reset_tracking::Request& req, zed_interfaces::reset_tracking::Response& res); + bool on_reset_tracking(zed_interfaces::reset_tracking::Request& req, zed_interfaces::reset_tracking::Response& res); - /*! \brief Service callback to reset_odometry service + /*! \brief Service callback to reset_odometry service * Odometry is reset to clear drift and odometry frame gets the latest * pose * from ZED tracking. */ - bool on_reset_odometry(zed_interfaces::reset_odometry::Request& req, zed_interfaces::reset_odometry::Response& res); + bool on_reset_odometry(zed_interfaces::reset_odometry::Request& req, zed_interfaces::reset_odometry::Response& res); - /*! \brief Service callback to set_pose service + /*! \brief Service callback to set_pose service * Tracking pose is set to the new values */ - bool on_set_pose(zed_interfaces::set_pose::Request& req, zed_interfaces::set_pose::Response& res); + bool on_set_pose(zed_interfaces::set_pose::Request& req, zed_interfaces::set_pose::Response& res); - /*! \brief Service callback to start_svo_recording service + /*! \brief Service callback to start_svo_recording service */ - bool on_start_svo_recording(zed_interfaces::start_svo_recording::Request& req, - zed_interfaces::start_svo_recording::Response& res); + bool on_start_svo_recording(zed_interfaces::start_svo_recording::Request& req, + zed_interfaces::start_svo_recording::Response& res); - /*! \brief Service callback to stop_svo_recording service + /*! \brief Service callback to stop_svo_recording service */ - bool on_stop_svo_recording(zed_interfaces::stop_svo_recording::Request& req, - zed_interfaces::stop_svo_recording::Response& res); + bool on_stop_svo_recording(zed_interfaces::stop_svo_recording::Request& req, + zed_interfaces::stop_svo_recording::Response& res); - /*! \brief Service callback to start_remote_stream service + /*! \brief Service callback to start_remote_stream service */ - bool on_start_remote_stream(zed_interfaces::start_remote_stream::Request& req, - zed_interfaces::start_remote_stream::Response& res); + bool on_start_remote_stream(zed_interfaces::start_remote_stream::Request& req, + zed_interfaces::start_remote_stream::Response& res); - /*! \brief Service callback to stop_remote_stream service + /*! \brief Service callback to stop_remote_stream service */ - bool on_stop_remote_stream(zed_interfaces::stop_remote_stream::Request& req, - zed_interfaces::stop_remote_stream::Response& res); + bool on_stop_remote_stream(zed_interfaces::stop_remote_stream::Request& req, + zed_interfaces::stop_remote_stream::Response& res); - /*! \brief Service callback to set_led_status service + /*! \brief Service callback to set_led_status service */ - bool on_set_led_status(zed_interfaces::set_led_status::Request& req, zed_interfaces::set_led_status::Response& res); + bool on_set_led_status(zed_interfaces::set_led_status::Request& req, zed_interfaces::set_led_status::Response& res); - /*! \brief Service callback to toggle_led service + /*! \brief Service callback to toggle_led service */ - bool on_toggle_led(zed_interfaces::toggle_led::Request& req, zed_interfaces::toggle_led::Response& res); + bool on_toggle_led(zed_interfaces::toggle_led::Request& req, zed_interfaces::toggle_led::Response& res); - /*! \brief Service callback to start_3d_mapping service + /*! \brief Service callback to start_3d_mapping service */ - bool on_start_3d_mapping(zed_interfaces::start_3d_mapping::Request& req, - zed_interfaces::start_3d_mapping::Response& res); + bool on_start_3d_mapping(zed_interfaces::start_3d_mapping::Request& req, + zed_interfaces::start_3d_mapping::Response& res); - /*! \brief Service callback to stop_3d_mapping service + /*! \brief Service callback to stop_3d_mapping service */ - bool on_stop_3d_mapping(zed_interfaces::stop_3d_mapping::Request& req, - zed_interfaces::stop_3d_mapping::Response& res); + bool on_stop_3d_mapping(zed_interfaces::stop_3d_mapping::Request& req, + zed_interfaces::stop_3d_mapping::Response& res); - /*! \brief Service callback to save_3d_map service + /*! \brief Service callback to save_3d_map service */ - bool on_save_3d_map(zed_interfaces::save_3d_map::Request& req, zed_interfaces::save_3d_map::Response& res); + bool on_save_3d_map(zed_interfaces::save_3d_map::Request& req, zed_interfaces::save_3d_map::Response& res); - /*! \brief Service callback to start_object_detection service + /*! \brief Service callback to start_object_detection service */ - bool on_start_object_detection(zed_interfaces::start_object_detection::Request& req, - zed_interfaces::start_object_detection::Response& res); + bool on_start_object_detection(zed_interfaces::start_object_detection::Request& req, + zed_interfaces::start_object_detection::Response& res); - /*! \brief Service callback to stop_object_detection service + /*! \brief Service callback to stop_object_detection service */ - bool on_stop_object_detection(zed_interfaces::stop_object_detection::Request& req, - zed_interfaces::stop_object_detection::Response& res); + bool on_stop_object_detection(zed_interfaces::stop_object_detection::Request& req, + zed_interfaces::stop_object_detection::Response& res); - /*! \brief Service callback to save_area_memory service + /*! \brief Service callback to save_area_memory service */ - bool on_save_area_memory(zed_interfaces::save_area_memory::Request& req, - zed_interfaces::save_area_memory::Response& res); + bool on_save_area_memory(zed_interfaces::save_area_memory::Request& req, + zed_interfaces::save_area_memory::Response& res); - /*! \brief Utility to initialize the pose variables + /*! \brief Utility to initialize the pose variables */ - bool set_pose(float xt, float yt, float zt, float rr, float pr, float yr); + bool set_pose(float xt, float yt, float zt, float rr, float pr, float yr); - /*! \brief Utility to initialize the most used transforms + /*! \brief Utility to initialize the most used transforms */ - void initTransforms(); + void initTransforms(); - /*! \brief Utility to initialize the static transform from Sensor to Base + /*! \brief Utility to initialize the static transform from Sensor to Base */ - bool getSens2BaseTransform(); + bool getSens2BaseTransform(); - /*! \brief Utility to initialize the static transform from Sensor to Camera + /*! \brief Utility to initialize the static transform from Sensor to Camera */ - bool getSens2CameraTransform(); + bool getSens2CameraTransform(); - /*! \brief Utility to initialize the static transform from Camera to Base + /*! \brief Utility to initialize the static transform from Camera to Base */ - bool getCamera2BaseTransform(); + bool getCamera2BaseTransform(); - /*! \brief Start tracking + /*! \brief Start tracking */ - void start_pos_tracking(); + void start_pos_tracking(); - /*! \brief Start spatial mapping + /*! \brief Start spatial mapping */ - bool start_3d_mapping(); + bool start_3d_mapping(); - /*! \brief Stop spatial mapping + /*! \brief Stop spatial mapping */ - void stop_3d_mapping(); + void stop_3d_mapping(); - /*! \brief Start object detection + /*! \brief Start object detection */ - bool start_obj_detect(); + bool start_obj_detect(); - /*! \brief Stop object detection + /*! \brief Stop object detection */ - void stop_obj_detect(); + void stop_obj_detect(); - /*! \brief Publish object detection results + /*! \brief Publish object detection results */ - void processDetectedObjects(ros::Time t); + void processDetectedObjects(ros::Time t); - /*! \brief Generates an univoque color for each object class ID + /*! \brief Generates an univoque color for each object class ID */ - inline sl::float3 generateColorClass(int idx) - { - sl::float3 clr; - clr.r = static_cast(33 + (idx * 456262)); - clr.g = static_cast(233 + (idx * 1564684)); - clr.b = static_cast(133 + (idx * 76873242)); - return clr / 255.f; - } + inline sl::float3 generateColorClass(int idx) + { + sl::float3 clr; + clr.r = static_cast(33 + (idx * 456262)); + clr.g = static_cast(233 + (idx * 1564684)); + clr.b = static_cast(133 + (idx * 76873242)); + return clr / 255.f; + } - /*! \brief Update Dynamic reconfigure parameters + /*! \brief Update Dynamic reconfigure parameters */ - void updateDynamicReconfigure(); + void updateDynamicReconfigure(); - /*! \brief Save the current area map if positional tracking + /*! \brief Save the current area map if positional tracking * and area memory are active */ - bool saveAreaMap(std::string file_path, std::string* out_msg = nullptr); + bool saveAreaMap(std::string file_path, std::string* out_msg = nullptr); private: - uint64_t mFrameCount = 0; - - // SDK version - int mVerMajor; - int mVerMinor; - int mVerSubMinor; - - // ROS - ros::NodeHandle mNh; - ros::NodeHandle mNhNs; - std::thread mDevicePollThread; - std::thread mPcThread; // Point Cloud thread - std::thread mSensThread; // Sensors data thread - - bool mStopNode = false; - - // Publishers - image_transport::CameraPublisher mPubRgb; // - image_transport::CameraPublisher mPubRawRgb; // - image_transport::CameraPublisher mPubLeft; // - image_transport::CameraPublisher mPubRawLeft; // - image_transport::CameraPublisher mPubRight; // - image_transport::CameraPublisher mPubRawRight; // - image_transport::CameraPublisher mPubDepth; // - image_transport::Publisher mPubStereo; - image_transport::Publisher mPubRawStereo; - - image_transport::CameraPublisher mPubRgbGray; - image_transport::CameraPublisher mPubRawRgbGray; - image_transport::CameraPublisher mPubLeftGray; - image_transport::CameraPublisher mPubRawLeftGray; - image_transport::CameraPublisher mPubRightGray; - image_transport::CameraPublisher mPubRawRightGray; - - ros::Publisher mPubConfMap; // - ros::Publisher mPubDisparity; // - ros::Publisher mPubCloud; - ros::Publisher mPubFusedCloud; - ros::Publisher mPubPose; - ros::Publisher mPubPoseCov; - ros::Publisher mPubOdom; - ros::Publisher mPubOdomPath; - ros::Publisher mPubMapPath; - ros::Publisher mPubImu; - ros::Publisher mPubImuRaw; - ros::Publisher mPubImuTemp; - ros::Publisher mPubImuMag; - // ros::Publisher mPubImuMagRaw; - ros::Publisher mPubPressure; - ros::Publisher mPubTempL; - ros::Publisher mPubTempR; - ros::Publisher mPubCamImuTransf; - - ros::Publisher mPubMarker; // Publisher for Rviz markers - ros::Publisher mPubPlane; // Publisher for detected planes - - // Subscribers - ros::Subscriber mClickedPtSub; - - // Timers - ros::Timer mPathTimer; - ros::Timer mFusedPcTimer; - - // Services - ros::ServiceServer mSrvSetInitPose; - ros::ServiceServer mSrvResetOdometry; - ros::ServiceServer mSrvResetTracking; - ros::ServiceServer mSrvSvoStartRecording; - ros::ServiceServer mSrvSvoStopRecording; - ros::ServiceServer mSrvSvoStartStream; - ros::ServiceServer mSrvSvoStopStream; - ros::ServiceServer mSrvSetLedStatus; - ros::ServiceServer mSrvToggleLed; - ros::ServiceServer mSrvStartMapping; - ros::ServiceServer mSrvStopMapping; - ros::ServiceServer mSrvSave3dMap; - ros::ServiceServer mSrvStartObjDet; - ros::ServiceServer mSrvStopObjDet; - ros::ServiceServer mSrvSaveAreaMemory; - - // ----> Topics (ONLY THOSE NOT CHANGING WHILE NODE RUNS) - // Camera info - sensor_msgs::CameraInfoPtr mRgbCamInfoMsg; - sensor_msgs::CameraInfoPtr mLeftCamInfoMsg; - sensor_msgs::CameraInfoPtr mRightCamInfoMsg; - sensor_msgs::CameraInfoPtr mRgbCamInfoRawMsg; - sensor_msgs::CameraInfoPtr mLeftCamInfoRawMsg; - sensor_msgs::CameraInfoPtr mRightCamInfoRawMsg; - sensor_msgs::CameraInfoPtr mDepthCamInfoMsg; - - geometry_msgs::TransformPtr mCameraImuTransfMgs; - // <---- Topics - - // ROS TF - tf2_ros::TransformBroadcaster mTransformPoseBroadcaster; - tf2_ros::TransformBroadcaster mTransformOdomBroadcaster; - tf2_ros::StaticTransformBroadcaster mStaticTransformImuBroadcaster; - - std::string mRgbFrameId; - std::string mRgbOptFrameId; - - std::string mDepthFrameId; - std::string mDepthOptFrameId; - - std::string mDisparityFrameId; - std::string mDisparityOptFrameId; - - std::string mConfidenceFrameId; - std::string mConfidenceOptFrameId; - - std::string mCloudFrameId; - std::string mPointCloudFrameId; - - std::string mMapFrameId; - std::string mOdometryFrameId; - std::string mBaseFrameId; - std::string mCameraFrameId; - - std::string mRightCamFrameId; - std::string mRightCamOptFrameId; - std::string mLeftCamFrameId; - std::string mLeftCamOptFrameId; - std::string mImuFrameId; - - std::string mBaroFrameId; - std::string mMagFrameId; - std::string mTempLeftFrameId; - std::string mTempRightFrameId; - - bool mPublishTf; - bool mPublishMapTf; - bool mPublishImuTf; - sl::FLIP_MODE mCameraFlip; - bool mCameraSelfCalib; - - // Launch file parameters - std::string mCameraName; - sl::RESOLUTION mCamResol; - int mCamFrameRate; - double mPubFrameRate = 15.; - sl::DEPTH_MODE mDepthMode = sl::DEPTH_MODE::ULTRA; - int mGpuId; - int mZedId; - int mDepthStabilization; - std::string mAreaMemDbPath; - bool mSaveAreaMapOnClosing = true; - std::string mSvoFilepath; - bool mSvoRealtime = true; - std::string mRemoteStreamAddr; - bool mSensTimestampSync; - double mSensPubRate = 400.0; - double mPathPubRate; - int mPathMaxCount; - int mSdkVerbose=1; - std::vector> mRoiParam; - bool mSvoMode = false; - double mCamMinDepth; - double mCamMaxDepth; - std::string mClickedPtTopic; - - // Positional tracking - bool mPosTrackingEnabled = false; - sl::POSITIONAL_TRACKING_MODE mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; - bool mPosTrackingActivated = false; - bool mPosTrackingReady = false; - bool mTwoDMode = false; - double mFixedZValue = 0.0; - bool mFloorAlignment = false; - bool mImuFusion = true; - bool mSetGravityAsOrigin = false; - - // Flags - bool mGrabActive = false; // Indicate if camera grabbing is active (at least one topic subscribed) - sl::ERROR_CODE mConnStatus; - sl::ERROR_CODE mGrabStatus; - sl::POSITIONAL_TRACKING_STATE mPosTrackingStatus; - bool mSensPublishing = false; - bool mPcPublishing = false; - bool mDepthDisabled = false; - - // Last frame time - ros::Time mPrevFrameTimestamp; - ros::Time mFrameTimestamp; - - // Positional Tracking variables - sl::Pose mLastZedPose; // Sensor to Map transform - sl::Transform mInitialPoseSl; - std::vector mInitialBasePose; - std::vector mOdomPath; - std::vector mMapPath; - - // IMU TF - tf2::Transform mLastImuPose; - - // TF Transforms - tf2::Transform mMap2OdomTransf; // Coordinates of the odometry frame in map frame - tf2::Transform mOdom2BaseTransf; // Coordinates of the base in odometry frame - tf2::Transform mMap2BaseTransf; // Coordinates of the base in map frame - tf2::Transform mMap2CameraTransf; // Coordinates of the camera in base frame - tf2::Transform mSensor2BaseTransf; // Coordinates of the base frame in sensor frame - tf2::Transform mSensor2CameraTransf; // Coordinates of the camera frame in sensor frame - tf2::Transform mCamera2BaseTransf; // Coordinates of the base frame in camera frame - - bool mSensor2BaseTransfValid = false; - bool mSensor2CameraTransfValid = false; - bool mCamera2BaseTransfValid = false; - bool mStaticImuFramePublished = false; - - // initialization Transform listener - boost::shared_ptr mTfBuffer; - boost::shared_ptr mTfListener; - - // Zed object - sl::InitParameters mZedParams; - sl::Camera mZed; - unsigned int mZedSerialNumber; - sl::MODEL mZedUserCamModel; // Camera model set by ROS Param - sl::MODEL mZedRealCamModel; // Real camera model by SDK API - unsigned int mCamFwVersion; // Camera FW version - unsigned int mSensFwVersion; // Sensors FW version - - // Dynamic Parameters - int mCamBrightness = 4; - int mCamContrast = 4; - int mCamHue = 0; - int mCamSaturation = 4; - int mCamSharpness = 3; - int mCamGamma = 1; - bool mCamAutoExposure = true; - int mCamGain = 100; - int mCamExposure = 100; - bool mCamAutoWB = true; - int mCamWB = 4200; - - int mCamDepthConfidence = 50; - int mCamDepthTextureConf = 100; - double mPointCloudFreq = 15.; - - PubRes mPubResolution = PubRes::NATIVE; // Use native grab resolution by default - double mCustomDownscaleFactor = 1.0; // Used to rescale data with user factor - - // flags - bool mTriggerAutoExposure = true; - bool mTriggerAutoWB = true; - bool mComputeDepth; - bool mOpenniDepthMode; // 16 bit UC data in mm else 32F in m, for more info -> http://www.ros.org/reps/rep-0118.html - bool mPoseSmoothing = false; // Always disabled. Enable only for AR/VR applications - bool mAreaMemory; - bool mInitOdomWithPose; - bool mResetOdom = false; - bool mUpdateDynParams = false; - bool mPublishingData = false; - - // SVO recording - bool mRecording = false; - sl::RecordingStatus mRecStatus; - sl::SVO_COMPRESSION_MODE mSvoComprMode; - - // Streaming - bool mStreaming = false; - - // Mat - int mCamWidth; - int mCamHeight; - sl::Resolution mMatResol; - - // Thread Sync - std::mutex mCloseZedMutex; - std::mutex mCamDataMutex; - std::mutex mPcMutex; - std::mutex mRecMutex; - std::mutex mPosTrkMutex; - std::mutex mDynParMutex; - std::mutex mMappingMutex; - std::mutex mObjDetMutex; - std::condition_variable mPcDataReadyCondVar; - bool mPcDataReady; - - // Point cloud variables - sl::Mat mCloud; - sl::FusedPointCloud mFusedPC; - ros::Time mPointCloudTime; - - // Dynamic reconfigure - boost::recursive_mutex mDynServerMutex; // To avoid Dynamic Reconfigure Server warning - boost::shared_ptr> mDynRecServer; - - // Diagnostic - float mTempLeft = -273.15f; - float mTempRight = -273.15f; - std::unique_ptr mElabPeriodMean_sec; - std::unique_ptr mGrabPeriodMean_usec; - std::unique_ptr mVideoDepthPeriodMean_sec; - std::unique_ptr mPcPeriodMean_usec; - std::unique_ptr mSensPeriodMean_usec; - std::unique_ptr mObjDetPeriodMean_msec; - - diagnostic_updater::Updater mDiagUpdater; // Diagnostic Updater - - // Camera IMU transform - sl::Transform mSlCamImuTransf; - geometry_msgs::TransformStamped mStaticImuTransformStamped; - - // Spatial mapping - bool mMappingEnabled; - bool mMappingRunning; - bool mMapSave = false; - float mMappingRes = 0.1; - float mMaxMappingRange = -1; - double mFusedPcPubFreq = 2.0; - - // Object Detection - bool mObjDetEnabled = false; - bool mObjDetRunning = false; - bool mObjDetTracking = true; - bool mObjDetBodyFitting = true; - float mObjDetConfidence = 50.f; - float mObjDetMaxRange = 10.f; - std::vector mObjDetFilter; - bool mObjDetPeopleEnable = true; - bool mObjDetVehiclesEnable = true; - bool mObjDetBagsEnable = true; - bool mObjDetAnimalsEnable = true; - bool mObjDetElectronicsEnable = true; - bool mObjDetFruitsEnable = true; - bool mObjDetSportsEnable = true; - - sl::OBJECT_DETECTION_MODEL mObjDetModel = sl::OBJECT_DETECTION_MODEL::MULTI_CLASS_BOX_MEDIUM; - - ros::Publisher mPubObjDet; -}; // class ZEDROSWrapperNodelet -} // namespace zed_nodelets + uint64_t mFrameCount = 0; + + // SDK version + int mVerMajor; + int mVerMinor; + int mVerSubMinor; + + // ROS + ros::NodeHandle mNh; + ros::NodeHandle mNhNs; + std::thread mDevicePollThread; + std::thread mPcThread; // Point Cloud thread + std::thread mSensThread; // Sensors data thread + + bool mStopNode = false; + + // Publishers + image_transport::CameraPublisher mPubRgb; // + image_transport::CameraPublisher mPubRawRgb; // + image_transport::CameraPublisher mPubLeft; // + image_transport::CameraPublisher mPubRawLeft; // + image_transport::CameraPublisher mPubRight; // + image_transport::CameraPublisher mPubRawRight; // + image_transport::CameraPublisher mPubDepth; // + image_transport::Publisher mPubStereo; + image_transport::Publisher mPubRawStereo; + + image_transport::CameraPublisher mPubRgbGray; + image_transport::CameraPublisher mPubRawRgbGray; + image_transport::CameraPublisher mPubLeftGray; + image_transport::CameraPublisher mPubRawLeftGray; + image_transport::CameraPublisher mPubRightGray; + image_transport::CameraPublisher mPubRawRightGray; + + ros::Publisher mPubConfMap; // + ros::Publisher mPubDisparity; // + ros::Publisher mPubCloud; + ros::Publisher mPubFusedCloud; + ros::Publisher mPubPose; + ros::Publisher mPubPoseCov; + ros::Publisher mPubOdom; + ros::Publisher mPubOdomPath; + ros::Publisher mPubMapPath; + ros::Publisher mPubImu; + ros::Publisher mPubImuRaw; + ros::Publisher mPubImuTemp; + ros::Publisher mPubImuMag; + // ros::Publisher mPubImuMagRaw; + ros::Publisher mPubPressure; + ros::Publisher mPubTempL; + ros::Publisher mPubTempR; + ros::Publisher mPubCamImuTransf; + + ros::Publisher mPubMarker; // Publisher for Rviz markers + ros::Publisher mPubPlane; // Publisher for detected planes + + // Subscribers + ros::Subscriber mClickedPtSub; + + // Timers + ros::Timer mPathTimer; + ros::Timer mFusedPcTimer; + + // Services + ros::ServiceServer mSrvSetInitPose; + ros::ServiceServer mSrvResetOdometry; + ros::ServiceServer mSrvResetTracking; + ros::ServiceServer mSrvSvoStartRecording; + ros::ServiceServer mSrvSvoStopRecording; + ros::ServiceServer mSrvSvoStartStream; + ros::ServiceServer mSrvSvoStopStream; + ros::ServiceServer mSrvSetLedStatus; + ros::ServiceServer mSrvToggleLed; + ros::ServiceServer mSrvStartMapping; + ros::ServiceServer mSrvStopMapping; + ros::ServiceServer mSrvSave3dMap; + ros::ServiceServer mSrvStartObjDet; + ros::ServiceServer mSrvStopObjDet; + ros::ServiceServer mSrvSaveAreaMemory; + + // ----> Topics (ONLY THOSE NOT CHANGING WHILE NODE RUNS) + // Camera info + sensor_msgs::CameraInfoPtr mRgbCamInfoMsg; + sensor_msgs::CameraInfoPtr mLeftCamInfoMsg; + sensor_msgs::CameraInfoPtr mRightCamInfoMsg; + sensor_msgs::CameraInfoPtr mRgbCamInfoRawMsg; + sensor_msgs::CameraInfoPtr mLeftCamInfoRawMsg; + sensor_msgs::CameraInfoPtr mRightCamInfoRawMsg; + sensor_msgs::CameraInfoPtr mDepthCamInfoMsg; + + geometry_msgs::TransformPtr mCameraImuTransfMgs; + // <---- Topics + + // ROS TF + tf2_ros::TransformBroadcaster mTransformPoseBroadcaster; + tf2_ros::TransformBroadcaster mTransformOdomBroadcaster; + tf2_ros::StaticTransformBroadcaster mStaticTransformImuBroadcaster; + + std::string mRgbFrameId; + std::string mRgbOptFrameId; + + std::string mDepthFrameId; + std::string mDepthOptFrameId; + + std::string mDisparityFrameId; + std::string mDisparityOptFrameId; + + std::string mConfidenceFrameId; + std::string mConfidenceOptFrameId; + + std::string mCloudFrameId; + std::string mPointCloudFrameId; + + std::string mMapFrameId; + std::string mOdometryFrameId; + std::string mBaseFrameId; + std::string mCameraFrameId; + + std::string mRightCamFrameId; + std::string mRightCamOptFrameId; + std::string mLeftCamFrameId; + std::string mLeftCamOptFrameId; + std::string mImuFrameId; + + std::string mBaroFrameId; + std::string mMagFrameId; + std::string mTempLeftFrameId; + std::string mTempRightFrameId; + + bool mPublishTf; + bool mPublishMapTf; + bool mPublishImuTf; + sl::FLIP_MODE mCameraFlip; + bool mCameraSelfCalib; + + // Launch file parameters + std::string mCameraName; + sl::RESOLUTION mCamResol; + int mCamFrameRate; + double mPubFrameRate = 15.; + sl::DEPTH_MODE mDepthMode = sl::DEPTH_MODE::ULTRA; + int mGpuId; + int mZedId; + int mDepthStabilization; + std::string mAreaMemDbPath; + bool mSaveAreaMapOnClosing = true; + std::string mSvoFilepath; + bool mSvoRealtime = true; + std::string mRemoteStreamAddr; + bool mSensTimestampSync; + double mSensPubRate = 400.0; + double mPathPubRate; + int mPathMaxCount; + int mSdkVerbose = 1; + std::vector> mRoiParam; + bool mSvoMode = false; + double mCamMinDepth; + double mCamMaxDepth; + std::string mClickedPtTopic; + + // Positional tracking + bool mPosTrackingEnabled = false; + sl::POSITIONAL_TRACKING_MODE mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; + bool mPosTrackingActivated = false; + bool mPosTrackingReady = false; + bool mTwoDMode = false; + double mFixedZValue = 0.0; + bool mFloorAlignment = false; + bool mImuFusion = true; + bool mSetGravityAsOrigin = false; + + // Flags + bool mGrabActive = false; // Indicate if camera grabbing is active (at least one topic subscribed) + sl::ERROR_CODE mConnStatus; + sl::ERROR_CODE mGrabStatus; + sl::POSITIONAL_TRACKING_STATE mPosTrackingStatus; + bool mSensPublishing = false; + bool mPcPublishing = false; + bool mDepthDisabled = false; + + // Last frame time + ros::Time mPrevFrameTimestamp; + ros::Time mFrameTimestamp; + + // Positional Tracking variables + sl::Pose mLastZedPose; // Sensor to Map transform + sl::Transform mInitialPoseSl; + std::vector mInitialBasePose; + std::vector mOdomPath; + std::vector mMapPath; + + // IMU TF + tf2::Transform mLastImuPose; + + // TF Transforms + tf2::Transform mMap2OdomTransf; // Coordinates of the odometry frame in map frame + tf2::Transform mOdom2BaseTransf; // Coordinates of the base in odometry frame + tf2::Transform mMap2BaseTransf; // Coordinates of the base in map frame + tf2::Transform mMap2CameraTransf; // Coordinates of the camera in base frame + tf2::Transform mSensor2BaseTransf; // Coordinates of the base frame in sensor frame + tf2::Transform mSensor2CameraTransf; // Coordinates of the camera frame in sensor frame + tf2::Transform mCamera2BaseTransf; // Coordinates of the base frame in camera frame + + bool mSensor2BaseTransfValid = false; + bool mSensor2CameraTransfValid = false; + bool mCamera2BaseTransfValid = false; + bool mStaticImuFramePublished = false; + + // initialization Transform listener + boost::shared_ptr mTfBuffer; + boost::shared_ptr mTfListener; + + // Zed object + sl::InitParameters mZedParams; + sl::Camera mZed; + unsigned int mZedSerialNumber; + sl::MODEL mZedUserCamModel; // Camera model set by ROS Param + sl::MODEL mZedRealCamModel; // Real camera model by SDK API + unsigned int mCamFwVersion; // Camera FW version + unsigned int mSensFwVersion; // Sensors FW version + + // Dynamic Parameters + int mCamBrightness = 4; + int mCamContrast = 4; + int mCamHue = 0; + int mCamSaturation = 4; + int mCamSharpness = 3; + int mCamGamma = 1; + bool mCamAutoExposure = true; + int mCamGain = 100; + int mCamExposure = 100; + bool mCamAutoWB = true; + int mCamWB = 4200; + + int mCamDepthConfidence = 50; + int mCamDepthTextureConf = 100; + double mPointCloudFreq = 15.; + + PubRes mPubResolution = PubRes::NATIVE; // Use native grab resolution by default + double mCustomDownscaleFactor = 1.0; // Used to rescale data with user factor + + // flags + bool mTriggerAutoExposure = true; + bool mTriggerAutoWB = true; + bool mComputeDepth; + bool mOpenniDepthMode; // 16 bit UC data in mm else 32F in m, for more info -> http://www.ros.org/reps/rep-0118.html + bool mPoseSmoothing = false; // Always disabled. Enable only for AR/VR applications + bool mAreaMemory; + bool mInitOdomWithPose; + bool mResetOdom = false; + bool mUpdateDynParams = false; + bool mPublishingData = false; + + // SVO recording + bool mRecording = false; + sl::RecordingStatus mRecStatus; + sl::SVO_COMPRESSION_MODE mSvoComprMode; + + // Streaming + bool mStreaming = false; + + // Mat + int mCamWidth; + int mCamHeight; + sl::Resolution mMatResol; + + // Thread Sync + std::mutex mCloseZedMutex; + std::mutex mCamDataMutex; + std::mutex mPcMutex; + std::mutex mRecMutex; + std::mutex mPosTrkMutex; + std::mutex mDynParMutex; + std::mutex mMappingMutex; + std::mutex mObjDetMutex; + std::condition_variable mPcDataReadyCondVar; + bool mPcDataReady; + + // Point cloud variables + sl::Mat mCloud; + sl::FusedPointCloud mFusedPC; + ros::Time mPointCloudTime; + + // Dynamic reconfigure + boost::recursive_mutex mDynServerMutex; // To avoid Dynamic Reconfigure Server warning + boost::shared_ptr> mDynRecServer; + + // Diagnostic + float mTempLeft = -273.15f; + float mTempRight = -273.15f; + std::unique_ptr mElabPeriodMean_sec; + std::unique_ptr mGrabPeriodMean_usec; + std::unique_ptr mVideoDepthPeriodMean_sec; + std::unique_ptr mPcPeriodMean_usec; + std::unique_ptr mSensPeriodMean_usec; + std::unique_ptr mObjDetPeriodMean_msec; + + diagnostic_updater::Updater mDiagUpdater; // Diagnostic Updater + + // Camera IMU transform + sl::Transform mSlCamImuTransf; + geometry_msgs::TransformStamped mStaticImuTransformStamped; + + // Spatial mapping + bool mMappingEnabled; + bool mMappingRunning; + bool mMapSave = false; + float mMappingRes = 0.1; + float mMaxMappingRange = -1; + double mFusedPcPubFreq = 2.0; + + // Object Detection + bool mObjDetEnabled = false; + bool mObjDetRunning = false; + bool mObjDetTracking = true; + bool mObjDetBodyFitting = true; + float mObjDetConfidence = 50.f; + float mObjDetMaxRange = 10.f; + std::vector mObjDetFilter; + bool mObjDetPeopleEnable = true; + bool mObjDetVehiclesEnable = true; + bool mObjDetBagsEnable = true; + bool mObjDetAnimalsEnable = true; + bool mObjDetElectronicsEnable = true; + bool mObjDetFruitsEnable = true; + bool mObjDetSportsEnable = true; + + sl::OBJECT_DETECTION_MODEL mObjDetModel = sl::OBJECT_DETECTION_MODEL::MULTI_CLASS_BOX_MEDIUM; + + ros::Publisher mPubObjDet; +}; // class ZEDROSWrapperNodelet +} // namespace zed_nodelets #include PLUGINLIB_EXPORT_CLASS(zed_nodelets::ZEDWrapperNodelet, nodelet::Nodelet); -#endif // ZED_WRAPPER_NODELET_H +#endif // ZED_WRAPPER_NODELET_H diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index 3e93a2fb..717357e8 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -36,7 +36,8 @@ //#define DEBUG_SENS_TS 1 -namespace zed_nodelets { +namespace zed_nodelets +{ #ifndef DEG2RAD #define DEG2RAD 0.017453293 #define RAD2DEG 57.295777937 @@ -45,1093 +46,1244 @@ namespace zed_nodelets { #define MAG_FREQ 50. #define BARO_FREQ 25. -ZEDWrapperNodelet::ZEDWrapperNodelet() - : Nodelet() +ZEDWrapperNodelet::ZEDWrapperNodelet() : Nodelet() { } ZEDWrapperNodelet::~ZEDWrapperNodelet() { - if (mDevicePollThread.joinable()) { - mDevicePollThread.join(); - } + if (mDevicePollThread.joinable()) + { + mDevicePollThread.join(); + } - if (mPcThread.joinable()) { - mPcThread.join(); - } + if (mPcThread.joinable()) + { + mPcThread.join(); + } - if (mSensThread.joinable()) { - mSensThread.join(); - } + if (mSensThread.joinable()) + { + mSensThread.join(); + } - std::cerr << "ZED Nodelet destroyed" << std::endl; + std::cerr << "ZED Nodelet destroyed" << std::endl; } void ZEDWrapperNodelet::onInit() { - // Node handlers - mNh = getMTNodeHandle(); + // Node handlers + mNh = getMTNodeHandle(); - mNhNs = getMTPrivateNodeHandle(); - mStopNode = false; - mPcDataReady = false; + mNhNs = getMTPrivateNodeHandle(); + mStopNode = false; + mPcDataReady = false; #ifndef NDEBUG - if (ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Debug)) { - ros::console::notifyLoggerLevelsChanged(); - } + if (ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Debug)) + { + ros::console::notifyLoggerLevelsChanged(); + } #endif - NODELET_INFO("********** Starting nodelet '%s' **********", getName().c_str()); + NODELET_INFO("********** Starting nodelet '%s' **********", getName().c_str()); - std::string ver = sl_tools::getSDKVersion(mVerMajor, mVerMinor, mVerSubMinor); - NODELET_INFO_STREAM("SDK version: " << ver); + std::string ver = sl_tools::getSDKVersion(mVerMajor, mVerMinor, mVerSubMinor); + NODELET_INFO_STREAM("SDK version: " << ver); - if (mVerMajor < 3) { - NODELET_ERROR("This version of the ZED ROS Wrapper is designed to be used with ZED SDK v3.x"); - ros::shutdown(); - raise(SIGINT); - raise(SIGABRT); - exit(-1); - } + if (mVerMajor < 3) + { + NODELET_ERROR("This version of the ZED ROS Wrapper is designed to be used with ZED SDK v3.x"); + ros::shutdown(); + raise(SIGINT); + raise(SIGABRT); + exit(-1); + } - readParameters(); - - initTransforms(); - - // Set the video topic names - std::string rgbTopicRoot = "rgb"; - std::string rightTopicRoot = "right"; - std::string leftTopicRoot = "left"; - std::string stereoTopicRoot = "stereo"; - std::string img_topic = "/image_rect_color"; - std::string img_raw_topic = "/image_raw_color"; - std::string img_gray_topic = "/image_rect_gray"; - std::string img_raw_gray_topic_ = "/image_raw_gray"; - std::string raw_suffix = "_raw"; - std::string left_topic = leftTopicRoot + img_topic; - std::string left_raw_topic = leftTopicRoot + raw_suffix + img_raw_topic; - std::string right_topic = rightTopicRoot + img_topic; - std::string right_raw_topic = rightTopicRoot + raw_suffix + img_raw_topic; - std::string rgb_topic = rgbTopicRoot + img_topic; - std::string rgb_raw_topic = rgbTopicRoot + raw_suffix + img_raw_topic; - std::string stereo_topic = stereoTopicRoot + img_topic; - std::string stereo_raw_topic = stereoTopicRoot + raw_suffix + img_raw_topic; - std::string left_gray_topic = leftTopicRoot + img_gray_topic; - std::string left_raw_gray_topic = leftTopicRoot + raw_suffix + img_raw_gray_topic_; - std::string right_gray_topic = rightTopicRoot + img_gray_topic; - std::string right_raw_gray_topic = rightTopicRoot + raw_suffix + img_raw_gray_topic_; - std::string rgb_gray_topic = rgbTopicRoot + img_gray_topic; - std::string rgb_raw_gray_topic = rgbTopicRoot + raw_suffix + img_raw_gray_topic_; - - // Set the disparity topic name - std::string disparityTopic = "disparity/disparity_image"; - - // Set the depth topic names - std::string depth_topic_root = "depth"; - - if (mOpenniDepthMode) { - NODELET_INFO_STREAM("Openni depth mode activated -> Units: mm, Encoding: TYPE_16UC1"); - } - depth_topic_root += "/depth_registered"; + readParameters(); + + initTransforms(); + + // Set the video topic names + std::string rgbTopicRoot = "rgb"; + std::string rightTopicRoot = "right"; + std::string leftTopicRoot = "left"; + std::string stereoTopicRoot = "stereo"; + std::string img_topic = "/image_rect_color"; + std::string img_raw_topic = "/image_raw_color"; + std::string img_gray_topic = "/image_rect_gray"; + std::string img_raw_gray_topic_ = "/image_raw_gray"; + std::string raw_suffix = "_raw"; + std::string left_topic = leftTopicRoot + img_topic; + std::string left_raw_topic = leftTopicRoot + raw_suffix + img_raw_topic; + std::string right_topic = rightTopicRoot + img_topic; + std::string right_raw_topic = rightTopicRoot + raw_suffix + img_raw_topic; + std::string rgb_topic = rgbTopicRoot + img_topic; + std::string rgb_raw_topic = rgbTopicRoot + raw_suffix + img_raw_topic; + std::string stereo_topic = stereoTopicRoot + img_topic; + std::string stereo_raw_topic = stereoTopicRoot + raw_suffix + img_raw_topic; + std::string left_gray_topic = leftTopicRoot + img_gray_topic; + std::string left_raw_gray_topic = leftTopicRoot + raw_suffix + img_raw_gray_topic_; + std::string right_gray_topic = rightTopicRoot + img_gray_topic; + std::string right_raw_gray_topic = rightTopicRoot + raw_suffix + img_raw_gray_topic_; + std::string rgb_gray_topic = rgbTopicRoot + img_gray_topic; + std::string rgb_raw_gray_topic = rgbTopicRoot + raw_suffix + img_raw_gray_topic_; + + // Set the disparity topic name + std::string disparityTopic = "disparity/disparity_image"; + + // Set the depth topic names + std::string depth_topic_root = "depth"; + + if (mOpenniDepthMode) + { + NODELET_INFO_STREAM("Openni depth mode activated -> Units: mm, Encoding: TYPE_16UC1"); + } + depth_topic_root += "/depth_registered"; - std::string pointcloud_topic = "point_cloud/cloud_registered"; + std::string pointcloud_topic = "point_cloud/cloud_registered"; - std::string pointcloud_fused_topic = "mapping/fused_cloud"; + std::string pointcloud_fused_topic = "mapping/fused_cloud"; - std::string object_det_topic_root = "obj_det"; - std::string object_det_topic = object_det_topic_root + "/objects"; + std::string object_det_topic_root = "obj_det"; + std::string object_det_topic = object_det_topic_root + "/objects"; - std::string confImgRoot = "confidence"; - std::string conf_map_topic_name = "confidence_map"; - std::string conf_map_topic = confImgRoot + "/" + conf_map_topic_name; - - // Set the positional tracking topic names - std::string poseTopic = "pose"; - std::string pose_cov_topic; - pose_cov_topic = poseTopic + "_with_covariance"; - - std::string odometryTopic = "odom"; - std::string odom_path_topic = "path_odom"; - std::string map_path_topic = "path_map"; - - // Extracted plane topics - std::string marker_topic = "plane_marker"; - std::string plane_topic = "plane"; - - // Create camera info - mRgbCamInfoMsg.reset(new sensor_msgs::CameraInfo()); - mLeftCamInfoMsg.reset(new sensor_msgs::CameraInfo()); - mRightCamInfoMsg.reset(new sensor_msgs::CameraInfo()); - mRgbCamInfoRawMsg.reset(new sensor_msgs::CameraInfo()); - mLeftCamInfoRawMsg.reset(new sensor_msgs::CameraInfo()); - mRightCamInfoRawMsg.reset(new sensor_msgs::CameraInfo()); - mDepthCamInfoMsg.reset(new sensor_msgs::CameraInfo()); - - // Initialization transformation listener - mTfBuffer.reset(new tf2_ros::Buffer); - mTfListener.reset(new tf2_ros::TransformListener(*mTfBuffer)); - - mZedParams.coordinate_system = sl::COORDINATE_SYSTEM::RIGHT_HANDED_Z_UP_X_FWD; - NODELET_INFO_STREAM(" * Camera coordinate system\t-> " << sl::toString(mZedParams.coordinate_system)); - - // Try to initialize the ZED - std::stringstream ss; // Input mode info for logging - if (!mSvoFilepath.empty() || !mRemoteStreamAddr.empty()) { - if (!mSvoFilepath.empty()) { - mZedParams.input.setFromSVOFile(mSvoFilepath.c_str()); - mZedParams.svo_real_time_mode = mSvoRealtime; - - // Input mode info for logging - ss << "SVO - " << mSvoFilepath.c_str(); - } else if (!mRemoteStreamAddr.empty()) { - std::vector configStream = sl_tools::split_string(mRemoteStreamAddr, ':'); - sl::String ip = sl::String(configStream.at(0).c_str()); - - if (configStream.size() == 2) { - mZedParams.input.setFromStream(ip, atoi(configStream.at(1).c_str())); - } else { - mZedParams.input.setFromStream(ip); - } + std::string confImgRoot = "confidence"; + std::string conf_map_topic_name = "confidence_map"; + std::string conf_map_topic = confImgRoot + "/" + conf_map_topic_name; - // Input mode info for logging - ss << "NETWORK STREAM - " << mRemoteStreamAddr.c_str(); - } + // Set the positional tracking topic names + std::string poseTopic = "pose"; + std::string pose_cov_topic; + pose_cov_topic = poseTopic + "_with_covariance"; - mSvoMode = true; - } else { - mZedParams.camera_fps = mCamFrameRate; - mZedParams.grab_compute_capping_fps = static_cast(mPubFrameRate); - mZedParams.camera_resolution = mCamResol; - - if (mZedSerialNumber == 0) { - mZedParams.input.setFromCameraID(mZedId); - - // Input mode info for logging - ss << "LIVE CAMERA with ID " << mZedId; - } else { - mZedParams.input.setFromSerialNumber(mZedSerialNumber); - - // Input mode info for logging - ss << "LIVE CAMERA with SN " << mZedSerialNumber; - } + std::string odometryTopic = "odom"; + std::string odom_path_topic = "path_odom"; + std::string map_path_topic = "path_map"; + + // Extracted plane topics + std::string marker_topic = "plane_marker"; + std::string plane_topic = "plane"; + + // Create camera info + mRgbCamInfoMsg.reset(new sensor_msgs::CameraInfo()); + mLeftCamInfoMsg.reset(new sensor_msgs::CameraInfo()); + mRightCamInfoMsg.reset(new sensor_msgs::CameraInfo()); + mRgbCamInfoRawMsg.reset(new sensor_msgs::CameraInfo()); + mLeftCamInfoRawMsg.reset(new sensor_msgs::CameraInfo()); + mRightCamInfoRawMsg.reset(new sensor_msgs::CameraInfo()); + mDepthCamInfoMsg.reset(new sensor_msgs::CameraInfo()); + + // Initialization transformation listener + mTfBuffer.reset(new tf2_ros::Buffer); + mTfListener.reset(new tf2_ros::TransformListener(*mTfBuffer)); + + mZedParams.coordinate_system = sl::COORDINATE_SYSTEM::RIGHT_HANDED_Z_UP_X_FWD; + NODELET_INFO_STREAM(" * Camera coordinate system\t-> " << sl::toString(mZedParams.coordinate_system)); + + // Try to initialize the ZED + std::stringstream ss; // Input mode info for logging + if (!mSvoFilepath.empty() || !mRemoteStreamAddr.empty()) + { + if (!mSvoFilepath.empty()) + { + mZedParams.input.setFromSVOFile(mSvoFilepath.c_str()); + mZedParams.svo_real_time_mode = mSvoRealtime; + + // Input mode info for logging + ss << "SVO - " << mSvoFilepath.c_str(); + } + else if (!mRemoteStreamAddr.empty()) + { + std::vector configStream = sl_tools::split_string(mRemoteStreamAddr, ':'); + sl::String ip = sl::String(configStream.at(0).c_str()); + + if (configStream.size() == 2) + { + mZedParams.input.setFromStream(ip, atoi(configStream.at(1).c_str())); + } + else + { + mZedParams.input.setFromStream(ip); + } + + // Input mode info for logging + ss << "NETWORK STREAM - " << mRemoteStreamAddr.c_str(); + } + + mSvoMode = true; + } + else + { + mZedParams.camera_fps = mCamFrameRate; + mZedParams.grab_compute_capping_fps = static_cast(mPubFrameRate); + mZedParams.camera_resolution = mCamResol; + + if (mZedSerialNumber == 0) + { + mZedParams.input.setFromCameraID(mZedId); + + // Input mode info for logging + ss << "LIVE CAMERA with ID " << mZedId; } + else + { + mZedParams.input.setFromSerialNumber(mZedSerialNumber); - mZedParams.async_grab_camera_recovery = true; - mZedParams.coordinate_units = sl::UNIT::METER; - mZedParams.depth_mode = static_cast(mDepthMode); - mZedParams.sdk_verbose = mSdkVerbose; - mZedParams.sdk_gpu_id = mGpuId; - mZedParams.depth_stabilization = mDepthStabilization; - mZedParams.camera_image_flip = mCameraFlip; - mZedParams.depth_minimum_distance = static_cast(mCamMinDepth); - mZedParams.depth_maximum_distance = static_cast(mCamMaxDepth); - mZedParams.camera_disable_self_calib = !mCameraSelfCalib; + // Input mode info for logging + ss << "LIVE CAMERA with SN " << mZedSerialNumber; + } + } - mZedParams.enable_image_enhancement = true; // Always active + mZedParams.async_grab_camera_recovery = true; + mZedParams.coordinate_units = sl::UNIT::METER; + mZedParams.depth_mode = static_cast(mDepthMode); + mZedParams.sdk_verbose = mSdkVerbose; + mZedParams.sdk_gpu_id = mGpuId; + mZedParams.depth_stabilization = mDepthStabilization; + mZedParams.camera_image_flip = mCameraFlip; + mZedParams.depth_minimum_distance = static_cast(mCamMinDepth); + mZedParams.depth_maximum_distance = static_cast(mCamMaxDepth); + mZedParams.camera_disable_self_calib = !mCameraSelfCalib; - mDiagUpdater.add("ZED Diagnostic", this, &ZEDWrapperNodelet::callback_updateDiagnostic); - mDiagUpdater.setHardwareID("ZED camera"); + mZedParams.enable_image_enhancement = true; // Always active - mConnStatus = sl::ERROR_CODE::CAMERA_NOT_DETECTED; + mDiagUpdater.add("ZED Diagnostic", this, &ZEDWrapperNodelet::callback_updateDiagnostic); + mDiagUpdater.setHardwareID("ZED camera"); - NODELET_INFO_STREAM(" *** Opening " << sl::toString(mZedUserCamModel) << " - " << ss.str().c_str() << " ***"); - while (mConnStatus != sl::ERROR_CODE::SUCCESS) { - mConnStatus = mZed.open(mZedParams); - NODELET_INFO_STREAM("ZED connection: " << sl::toString(mConnStatus)); - std::this_thread::sleep_for(std::chrono::milliseconds(2000)); + mConnStatus = sl::ERROR_CODE::CAMERA_NOT_DETECTED; - if (!mNhNs.ok()) { - mStopNode = true; // Stops other threads + NODELET_INFO_STREAM(" *** Opening " << sl::toString(mZedUserCamModel) << " - " << ss.str().c_str() << " ***"); + while (mConnStatus != sl::ERROR_CODE::SUCCESS) + { + mConnStatus = mZed.open(mZedParams); + NODELET_INFO_STREAM("ZED connection: " << sl::toString(mConnStatus)); + std::this_thread::sleep_for(std::chrono::milliseconds(2000)); - std::lock_guard lock(mCloseZedMutex); - NODELET_DEBUG("Closing ZED"); - mZed.close(); + if (!mNhNs.ok()) + { + mStopNode = true; // Stops other threads - NODELET_DEBUG("ZED pool thread finished"); - return; - } + std::lock_guard lock(mCloseZedMutex); + NODELET_DEBUG("Closing ZED"); + mZed.close(); - mDiagUpdater.update(); + NODELET_DEBUG("ZED pool thread finished"); + return; } - NODELET_INFO_STREAM(" ... " << sl::toString(mZedRealCamModel) << " ready"); - // CUdevice devid; - cuCtxGetDevice(&mGpuId); + mDiagUpdater.update(); + } + NODELET_INFO_STREAM(" ... " << sl::toString(mZedRealCamModel) << " ready"); + + // CUdevice devid; + cuCtxGetDevice(&mGpuId); - NODELET_INFO_STREAM("ZED SDK running on GPU #" << mGpuId); + NODELET_INFO_STREAM("ZED SDK running on GPU #" << mGpuId); - // Disable AEC_AGC and Auto Whitebalance to trigger it if use set to automatic - mZed.setCameraSettings(sl::VIDEO_SETTINGS::AEC_AGC, 0); - mZed.setCameraSettings(sl::VIDEO_SETTINGS::WHITEBALANCE_AUTO, 0); + // Disable AEC_AGC and Auto Whitebalance to trigger it if use set to automatic + mZed.setCameraSettings(sl::VIDEO_SETTINGS::AEC_AGC, 0); + mZed.setCameraSettings(sl::VIDEO_SETTINGS::WHITEBALANCE_AUTO, 0); - mZedRealCamModel = mZed.getCameraInformation().camera_model; + mZedRealCamModel = mZed.getCameraInformation().camera_model; - if (mZedRealCamModel == sl::MODEL::ZED) { - if (mZedUserCamModel != mZedRealCamModel) { - NODELET_WARN("Camera model does not match user parameter. Please modify " - "the value of the parameter 'camera_model' to 'zed'"); - } - } else if (mZedRealCamModel == sl::MODEL::ZED_M) { - if (mZedUserCamModel != mZedRealCamModel) { - NODELET_WARN("Camera model does not match user parameter. Please modify " - "the value of the parameter 'camera_model' to 'zedm'"); - } - mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; - NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); - } else if (mZedRealCamModel == sl::MODEL::ZED2) { - if (mZedUserCamModel != mZedRealCamModel) { - NODELET_WARN("Camera model does not match user parameter. Please modify " - "the value of the parameter 'camera_model' to 'zed2'"); - } - mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; - NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); - } else if (mZedRealCamModel == sl::MODEL::ZED2i) { - if (mZedUserCamModel != mZedRealCamModel) { - NODELET_WARN("Camera model does not match user parameter. Please modify " - "the value of the parameter 'camera_model' to 'zed2i'"); - } - mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; - NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); - } else if (mZedRealCamModel == sl::MODEL::ZED_X) { - if (mZedUserCamModel != mZedRealCamModel) { - NODELET_WARN("Camera model does not match user parameter. Please modify " - "the value of the parameter 'camera_model' to 'zedx'"); - } - mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; - NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); - } else if (mZedRealCamModel == sl::MODEL::ZED_XM) { - if (mZedUserCamModel != mZedRealCamModel) { - NODELET_WARN("Camera model does not match user parameter. Please modify " - "the value of the parameter 'camera_model' to 'zedxm'"); - } - mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; - NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); + if (mZedRealCamModel == sl::MODEL::ZED) + { + if (mZedUserCamModel != mZedRealCamModel) + { + NODELET_WARN( + "Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 'zed'"); + } + } + else if (mZedRealCamModel == sl::MODEL::ZED_M) + { + if (mZedUserCamModel != mZedRealCamModel) + { + NODELET_WARN( + "Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 'zedm'"); + } + mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; + NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); + } + else if (mZedRealCamModel == sl::MODEL::ZED2) + { + if (mZedUserCamModel != mZedRealCamModel) + { + NODELET_WARN( + "Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 'zed2'"); + } + mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; + NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); + } + else if (mZedRealCamModel == sl::MODEL::ZED2i) + { + if (mZedUserCamModel != mZedRealCamModel) + { + NODELET_WARN( + "Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 'zed2i'"); + } + mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; + NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); + } + else if (mZedRealCamModel == sl::MODEL::ZED_X) + { + if (mZedUserCamModel != mZedRealCamModel) + { + NODELET_WARN( + "Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 'zedx'"); } + mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; + NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); + } + else if (mZedRealCamModel == sl::MODEL::ZED_XM) + { + if (mZedUserCamModel != mZedRealCamModel) + { + NODELET_WARN( + "Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 'zedxm'"); + } + mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; + NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); + } - NODELET_INFO_STREAM(" * CAMERA MODEL\t-> " << sl::toString(mZedRealCamModel).c_str()); - mZedSerialNumber = mZed.getCameraInformation().serial_number; - NODELET_INFO_STREAM(" * Serial Number: " << mZedSerialNumber); + NODELET_INFO_STREAM(" * CAMERA MODEL\t-> " << sl::toString(mZedRealCamModel).c_str()); + mZedSerialNumber = mZed.getCameraInformation().serial_number; + NODELET_INFO_STREAM(" * Serial Number: " << mZedSerialNumber); - if (!mSvoMode) { - mCamFwVersion = mZed.getCameraInformation().camera_configuration.firmware_version; - NODELET_INFO_STREAM(" * Camera FW Version: " << mCamFwVersion); - if (mZedRealCamModel != sl::MODEL::ZED) { - mSensFwVersion = mZed.getCameraInformation().sensors_configuration.firmware_version; - NODELET_INFO_STREAM(" * Sensors FW Version: " << mSensFwVersion); - } + if (!mSvoMode) + { + mCamFwVersion = mZed.getCameraInformation().camera_configuration.firmware_version; + NODELET_INFO_STREAM(" * Camera FW Version: " << mCamFwVersion); + if (mZedRealCamModel != sl::MODEL::ZED) + { + mSensFwVersion = mZed.getCameraInformation().sensors_configuration.firmware_version; + NODELET_INFO_STREAM(" * Sensors FW Version: " << mSensFwVersion); } + } + + // Set the IMU topic names using real camera model + std::string imu_topic; + std::string imu_topic_raw; + std::string imu_temp_topic; + std::string imu_mag_topic; + // std::string imu_mag_topic_raw; + std::string pressure_topic; + std::string temp_topic_root = "temperature"; + std::string temp_topic_left = temp_topic_root + "/left"; + std::string temp_topic_right = temp_topic_root + "/right"; + + if (mZedRealCamModel != sl::MODEL::ZED) + { + std::string imuTopicRoot = "imu"; + std::string imu_topic_name = "data"; + std::string imu_topic_raw_name = "data_raw"; + std::string imu_topic_mag_name = "mag"; + // std::string imu_topic_mag_raw_name = "mag_raw"; + std::string pressure_topic_name = "atm_press"; + imu_topic = imuTopicRoot + "/" + imu_topic_name; + imu_topic_raw = imuTopicRoot + "/" + imu_topic_raw_name; + imu_temp_topic = temp_topic_root + "/" + imuTopicRoot; + imu_mag_topic = imuTopicRoot + "/" + imu_topic_mag_name; + // imu_mag_topic_raw = imuTopicRoot + "/" + imu_topic_mag_raw_name; + pressure_topic = /*imuTopicRoot + "/" +*/ pressure_topic_name; + } - // Set the IMU topic names using real camera model - std::string imu_topic; - std::string imu_topic_raw; - std::string imu_temp_topic; - std::string imu_mag_topic; - // std::string imu_mag_topic_raw; - std::string pressure_topic; - std::string temp_topic_root = "temperature"; - std::string temp_topic_left = temp_topic_root + "/left"; - std::string temp_topic_right = temp_topic_root + "/right"; - - if (mZedRealCamModel != sl::MODEL::ZED) { - std::string imuTopicRoot = "imu"; - std::string imu_topic_name = "data"; - std::string imu_topic_raw_name = "data_raw"; - std::string imu_topic_mag_name = "mag"; - // std::string imu_topic_mag_raw_name = "mag_raw"; - std::string pressure_topic_name = "atm_press"; - imu_topic = imuTopicRoot + "/" + imu_topic_name; - imu_topic_raw = imuTopicRoot + "/" + imu_topic_raw_name; - imu_temp_topic = temp_topic_root + "/" + imuTopicRoot; - imu_mag_topic = imuTopicRoot + "/" + imu_topic_mag_name; - // imu_mag_topic_raw = imuTopicRoot + "/" + imu_topic_mag_raw_name; - pressure_topic = /*imuTopicRoot + "/" +*/ pressure_topic_name; + mDiagUpdater.setHardwareIDf("%s - s/n: %d [GPU #%d]", sl::toString(mZedRealCamModel).c_str(), mZedSerialNumber, + mGpuId); + + // ----> Dynamic Reconfigure parameters + mDynRecServer = boost::make_shared>(mDynServerMutex); + dynamic_reconfigure::Server::CallbackType f; + f = boost::bind(&ZEDWrapperNodelet::callback_dynamicReconf, this, _1, _2); + mDynRecServer->setCallback(f); + // Update parameters + zed_nodelets::ZedConfig config; + mDynRecServer->getConfigDefault(config); + mDynServerMutex.lock(); + mDynRecServer->updateConfig(config); + mDynServerMutex.unlock(); + + updateDynamicReconfigure(); + // <---- Dynamic Reconfigure parameters + + // ----> Publishers + NODELET_INFO("*** PUBLISHERS ***"); + + // Image publishers + image_transport::ImageTransport it_zed(mNhNs); + + mPubRgb = it_zed.advertiseCamera(rgb_topic, 1); // rgb + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgb.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgb.getInfoTopic()); + mPubRawRgb = it_zed.advertiseCamera(rgb_raw_topic, 1); // rgb raw + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgb.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgb.getInfoTopic()); + mPubLeft = it_zed.advertiseCamera(left_topic, 1); // left + NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeft.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeft.getInfoTopic()); + mPubRawLeft = it_zed.advertiseCamera(left_raw_topic, 1); // left raw + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeft.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeft.getInfoTopic()); + mPubRight = it_zed.advertiseCamera(right_topic, 1); // right + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRight.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRight.getInfoTopic()); + mPubRawRight = it_zed.advertiseCamera(right_raw_topic, 1); // right raw + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRight.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRight.getInfoTopic()); + + mPubRgbGray = it_zed.advertiseCamera(rgb_gray_topic, 1); // rgb + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgbGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgbGray.getInfoTopic()); + mPubRawRgbGray = it_zed.advertiseCamera(rgb_raw_gray_topic, 1); // rgb raw + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgbGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgbGray.getInfoTopic()); + mPubLeftGray = it_zed.advertiseCamera(left_gray_topic, 1); // left + NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeftGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeftGray.getInfoTopic()); + mPubRawLeftGray = it_zed.advertiseCamera(left_raw_gray_topic, 1); // left raw + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeftGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeftGray.getInfoTopic()); + mPubRightGray = it_zed.advertiseCamera(right_gray_topic, 1); // right + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRightGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRightGray.getInfoTopic()); + mPubRawRightGray = it_zed.advertiseCamera(right_raw_gray_topic, 1); // right raw + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRightGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRightGray.getInfoTopic()); + + mPubStereo = it_zed.advertise(stereo_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubStereo.getTopic()); + mPubRawStereo = it_zed.advertise(stereo_raw_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawStereo.getTopic()); + + // Detected planes publisher + mPubPlane = mNhNs.advertise(plane_topic, 1); + + if (!mDepthDisabled) + { + mPubDepth = it_zed.advertiseCamera(depth_topic_root, 1); // depth + NODELET_INFO_STREAM(" * Advertised on topic " << mPubDepth.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubDepth.getInfoTopic()); + + // Confidence Map publisher + mPubConfMap = mNhNs.advertise(conf_map_topic, 1); // confidence map + NODELET_INFO_STREAM(" * Advertised on topic " << mPubConfMap.getTopic()); + + // Disparity publisher + mPubDisparity = mNhNs.advertise(disparityTopic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubDisparity.getTopic()); + + // PointCloud publishers + mPubCloud = mNhNs.advertise(pointcloud_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubCloud.getTopic()); + + if (mMappingEnabled) + { + mPubFusedCloud = mNhNs.advertise(pointcloud_fused_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); } - mDiagUpdater.setHardwareIDf("%s - s/n: %d [GPU #%d]", sl::toString(mZedRealCamModel).c_str(), mZedSerialNumber, - mGpuId); - - // ----> Dynamic Reconfigure parameters - mDynRecServer = boost::make_shared>(mDynServerMutex); - dynamic_reconfigure::Server::CallbackType f; - f = boost::bind(&ZEDWrapperNodelet::callback_dynamicReconf, this, _1, _2); - mDynRecServer->setCallback(f); - // Update parameters - zed_nodelets::ZedConfig config; - mDynRecServer->getConfigDefault(config); - mDynServerMutex.lock(); - mDynRecServer->updateConfig(config); - mDynServerMutex.unlock(); - - updateDynamicReconfigure(); - // <---- Dynamic Reconfigure parameters - - // ----> Publishers - NODELET_INFO( "*** PUBLISHERS ***"); - - // Image publishers - image_transport::ImageTransport it_zed(mNhNs); - - mPubRgb = it_zed.advertiseCamera(rgb_topic, 1); // rgb - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgb.getTopic()); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgb.getInfoTopic()); - mPubRawRgb = it_zed.advertiseCamera(rgb_raw_topic, 1); // rgb raw - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgb.getTopic()); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgb.getInfoTopic()); - mPubLeft = it_zed.advertiseCamera(left_topic, 1); // left - NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeft.getTopic()); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeft.getInfoTopic()); - mPubRawLeft = it_zed.advertiseCamera(left_raw_topic, 1); // left raw - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeft.getTopic()); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeft.getInfoTopic()); - mPubRight = it_zed.advertiseCamera(right_topic, 1); // right - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRight.getTopic()); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRight.getInfoTopic()); - mPubRawRight = it_zed.advertiseCamera(right_raw_topic, 1); // right raw - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRight.getTopic()); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRight.getInfoTopic()); - - mPubRgbGray = it_zed.advertiseCamera(rgb_gray_topic, 1); // rgb - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgbGray.getTopic()); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgbGray.getInfoTopic()); - mPubRawRgbGray = it_zed.advertiseCamera(rgb_raw_gray_topic, 1); // rgb raw - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgbGray.getTopic()); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgbGray.getInfoTopic()); - mPubLeftGray = it_zed.advertiseCamera(left_gray_topic, 1); // left - NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeftGray.getTopic()); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeftGray.getInfoTopic()); - mPubRawLeftGray = it_zed.advertiseCamera(left_raw_gray_topic, 1); // left raw - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeftGray.getTopic()); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeftGray.getInfoTopic()); - mPubRightGray = it_zed.advertiseCamera(right_gray_topic, 1); // right - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRightGray.getTopic()); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRightGray.getInfoTopic()); - mPubRawRightGray = it_zed.advertiseCamera(right_raw_gray_topic, 1); // right raw - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRightGray.getTopic()); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRightGray.getInfoTopic()); - - mPubStereo = it_zed.advertise(stereo_topic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubStereo.getTopic()); - mPubRawStereo = it_zed.advertise(stereo_raw_topic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawStereo.getTopic()); - - // Detected planes publisher - mPubPlane = mNhNs.advertise(plane_topic, 1); - - if (!mDepthDisabled) { - mPubDepth = it_zed.advertiseCamera(depth_topic_root, 1); // depth - NODELET_INFO_STREAM(" * Advertised on topic " << mPubDepth.getTopic()); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubDepth.getInfoTopic()); - - // Confidence Map publisher - mPubConfMap = mNhNs.advertise(conf_map_topic, 1); // confidence map - NODELET_INFO_STREAM(" * Advertised on topic " << mPubConfMap.getTopic()); - - // Disparity publisher - mPubDisparity = mNhNs.advertise(disparityTopic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubDisparity.getTopic()); - - // PointCloud publishers - mPubCloud = mNhNs.advertise(pointcloud_topic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubCloud.getTopic()); - - - if (mMappingEnabled) { - mPubFusedCloud = mNhNs.advertise(pointcloud_fused_topic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); - } + // Object detection publishers + if (mObjDetEnabled) + { + mPubObjDet = mNhNs.advertise(object_det_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubObjDet.getTopic()); + } - // Object detection publishers - if (mObjDetEnabled) { - mPubObjDet = mNhNs.advertise(object_det_topic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubObjDet.getTopic()); - } + // Odometry and Pose publisher + mPubPose = mNhNs.advertise(poseTopic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubPose.getTopic()); - // Odometry and Pose publisher - mPubPose = mNhNs.advertise(poseTopic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubPose.getTopic()); + mPubPoseCov = mNhNs.advertise(pose_cov_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubPoseCov.getTopic()); - mPubPoseCov = mNhNs.advertise(pose_cov_topic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubPoseCov.getTopic()); + mPubOdom = mNhNs.advertise(odometryTopic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubOdom.getTopic()); - mPubOdom = mNhNs.advertise(odometryTopic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubOdom.getTopic()); + // Rviz markers publisher + mPubMarker = mNhNs.advertise(marker_topic, 10, true); - // Rviz markers publisher - mPubMarker = mNhNs.advertise(marker_topic, 10, true); - - // Camera Path - if (mPathPubRate > 0) { - mPubOdomPath = mNhNs.advertise(odom_path_topic, 1, true); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubOdomPath.getTopic()); - mPubMapPath = mNhNs.advertise(map_path_topic, 1, true); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubMapPath.getTopic()); + // Camera Path + if (mPathPubRate > 0) + { + mPubOdomPath = mNhNs.advertise(odom_path_topic, 1, true); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubOdomPath.getTopic()); + mPubMapPath = mNhNs.advertise(map_path_topic, 1, true); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubMapPath.getTopic()); - mPathTimer = mNhNs.createTimer(ros::Duration(1.0 / mPathPubRate), &ZEDWrapperNodelet::callback_pubPath, this); + mPathTimer = mNhNs.createTimer(ros::Duration(1.0 / mPathPubRate), &ZEDWrapperNodelet::callback_pubPath, this); - if (mPathMaxCount != -1) { - NODELET_DEBUG_STREAM("Path vectors reserved " << mPathMaxCount << " poses."); - mOdomPath.reserve(mPathMaxCount); - mMapPath.reserve(mPathMaxCount); + if (mPathMaxCount != -1) + { + NODELET_DEBUG_STREAM("Path vectors reserved " << mPathMaxCount << " poses."); + mOdomPath.reserve(mPathMaxCount); + mMapPath.reserve(mPathMaxCount); - NODELET_DEBUG_STREAM("Path vector sizes: " << mOdomPath.size() << " " << mMapPath.size()); - } - } else { - NODELET_INFO_STREAM("Path topics not published -> mPathPubRate: " << mPathPubRate); - } + NODELET_DEBUG_STREAM("Path vector sizes: " << mOdomPath.size() << " " << mMapPath.size()); + } + } + else + { + NODELET_INFO_STREAM("Path topics not published -> mPathPubRate: " << mPathPubRate); } + } - // Sensor publishers - if (!sl_tools::isZED(mZedRealCamModel)) { - // IMU Publishers - mPubImu = mNhNs.advertise(imu_topic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubImu.getTopic()); - mPubImuRaw = mNhNs.advertise(imu_topic_raw, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubImuRaw.getTopic()); - - if(mZedRealCamModel != sl::MODEL::ZED_M) { - // IMU temperature sensor - mPubImuTemp = mNhNs.advertise(imu_temp_topic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubImuTemp.getTopic()); - } + // Sensor publishers + if (!sl_tools::isZED(mZedRealCamModel)) + { + // IMU Publishers + mPubImu = mNhNs.advertise(imu_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubImu.getTopic()); + mPubImuRaw = mNhNs.advertise(imu_topic_raw, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubImuRaw.getTopic()); - if (mZedRealCamModel == sl::MODEL::ZED2 || mZedRealCamModel == sl::MODEL::ZED2i) { - mPubImuMag = mNhNs.advertise(imu_mag_topic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubImuMag.getTopic()); + if (mZedRealCamModel != sl::MODEL::ZED_M) + { + // IMU temperature sensor + mPubImuTemp = mNhNs.advertise(imu_temp_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubImuTemp.getTopic()); + } - // Atmospheric pressure - mPubPressure = mNhNs.advertise(pressure_topic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubPressure.getTopic()); + if (mZedRealCamModel == sl::MODEL::ZED2 || mZedRealCamModel == sl::MODEL::ZED2i) + { + mPubImuMag = mNhNs.advertise(imu_mag_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubImuMag.getTopic()); - // CMOS sensor temperatures - mPubTempL = mNhNs.advertise(temp_topic_left, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubTempL.getTopic()); - mPubTempR = mNhNs.advertise(temp_topic_right, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubTempR.getTopic()); - } + // Atmospheric pressure + mPubPressure = mNhNs.advertise(pressure_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubPressure.getTopic()); - // Publish camera imu transform in a latched topic - if (mZedRealCamModel != sl::MODEL::ZED) { - std::string cam_imu_tr_topic = "left_cam_imu_transform"; - mPubCamImuTransf = mNhNs.advertise(cam_imu_tr_topic, 1, true); + // CMOS sensor temperatures + mPubTempL = mNhNs.advertise(temp_topic_left, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubTempL.getTopic()); + mPubTempR = mNhNs.advertise(temp_topic_right, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubTempR.getTopic()); + } - sl::Orientation sl_rot = mSlCamImuTransf.getOrientation(); - sl::Translation sl_tr = mSlCamImuTransf.getTranslation(); + // Publish camera imu transform in a latched topic + if (mZedRealCamModel != sl::MODEL::ZED) + { + std::string cam_imu_tr_topic = "left_cam_imu_transform"; + mPubCamImuTransf = mNhNs.advertise(cam_imu_tr_topic, 1, true); - mCameraImuTransfMgs = boost::make_shared(); + sl::Orientation sl_rot = mSlCamImuTransf.getOrientation(); + sl::Translation sl_tr = mSlCamImuTransf.getTranslation(); - mCameraImuTransfMgs->rotation.x = sl_rot.ox; - mCameraImuTransfMgs->rotation.y = sl_rot.oy; - mCameraImuTransfMgs->rotation.z = sl_rot.oz; - mCameraImuTransfMgs->rotation.w = sl_rot.ow; + mCameraImuTransfMgs = boost::make_shared(); - mCameraImuTransfMgs->translation.x = sl_tr.x; - mCameraImuTransfMgs->translation.y = sl_tr.y; - mCameraImuTransfMgs->translation.z = sl_tr.z; + mCameraImuTransfMgs->rotation.x = sl_rot.ox; + mCameraImuTransfMgs->rotation.y = sl_rot.oy; + mCameraImuTransfMgs->rotation.z = sl_rot.oz; + mCameraImuTransfMgs->rotation.w = sl_rot.ow; - NODELET_DEBUG("Camera-IMU Rotation: \n %s", sl_rot.getRotationMatrix().getInfos().c_str()); - NODELET_DEBUG("Camera-IMU Translation: \n %g %g %g", sl_tr.x, sl_tr.y, sl_tr.z); + mCameraImuTransfMgs->translation.x = sl_tr.x; + mCameraImuTransfMgs->translation.y = sl_tr.y; + mCameraImuTransfMgs->translation.z = sl_tr.z; - mPubCamImuTransf.publish(mCameraImuTransfMgs); + NODELET_DEBUG("Camera-IMU Rotation: \n %s", sl_rot.getRotationMatrix().getInfos().c_str()); + NODELET_DEBUG("Camera-IMU Translation: \n %g %g %g", sl_tr.x, sl_tr.y, sl_tr.z); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubCamImuTransf.getTopic() << " [LATCHED]"); - } + mPubCamImuTransf.publish(mCameraImuTransfMgs); - if (!mSvoMode && !mSensTimestampSync) { - mFrameTimestamp = ros::Time::now(); - mSensPeriodMean_usec.reset(new sl_tools::CSmartMean(mSensPubRate / 2)); - } else { - mSensPeriodMean_usec.reset(new sl_tools::CSmartMean(mCamFrameRate / 2)); - } + NODELET_INFO_STREAM(" * Advertised on topic " << mPubCamImuTransf.getTopic() << " [LATCHED]"); } - // <---- Publishers - - // ----> Subscribers - NODELET_INFO( "*** SUBSCRIBERS ***"); - mClickedPtSub = mNhNs.subscribe(mClickedPtTopic, 10, &ZEDWrapperNodelet::clickedPtCallback, this); - NODELET_INFO_STREAM(" * Subscribed to topic " << mClickedPtTopic.c_str()); - // <---- Subscribers - - // ----> Services - NODELET_INFO( "*** SERVICES ***"); - if (!mDepthDisabled) { - mSrvSetInitPose = mNhNs.advertiseService("set_pose", &ZEDWrapperNodelet::on_set_pose, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvSetInitPose.getService().c_str()); - mSrvResetOdometry = mNhNs.advertiseService("reset_odometry", &ZEDWrapperNodelet::on_reset_odometry, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvResetOdometry.getService().c_str()); - mSrvResetTracking = mNhNs.advertiseService("reset_tracking", &ZEDWrapperNodelet::on_reset_tracking, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvResetTracking.getService().c_str()); - - mSrvSaveAreaMemory = mNhNs.advertiseService("save_area_memory", &ZEDWrapperNodelet::on_save_area_memory, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvSaveAreaMemory.getService().c_str()); - - mSrvStartMapping = mNhNs.advertiseService("start_3d_mapping", &ZEDWrapperNodelet::on_start_3d_mapping, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvStartMapping.getService().c_str()); - mSrvStopMapping = mNhNs.advertiseService("stop_3d_mapping", &ZEDWrapperNodelet::on_stop_3d_mapping, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvStopMapping.getService().c_str()); - mSrvSave3dMap = mNhNs.advertiseService("save_3d_map", &ZEDWrapperNodelet::on_save_3d_map, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvSave3dMap.getService().c_str()); - - mSrvStartObjDet = mNhNs.advertiseService("start_object_detection", &ZEDWrapperNodelet::on_start_object_detection, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvStartObjDet.getService().c_str()); - mSrvStopObjDet = mNhNs.advertiseService("stop_object_detection", &ZEDWrapperNodelet::on_stop_object_detection, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvStopObjDet.getService().c_str()); + + if (!mSvoMode && !mSensTimestampSync) + { + mFrameTimestamp = ros::Time::now(); + mSensPeriodMean_usec.reset(new sl_tools::CSmartMean(mSensPubRate / 2)); } - - mSrvSvoStartRecording = mNhNs.advertiseService("start_svo_recording", &ZEDWrapperNodelet::on_start_svo_recording, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStartRecording.getService().c_str()); - mSrvSvoStopRecording = mNhNs.advertiseService("stop_svo_recording", &ZEDWrapperNodelet::on_stop_svo_recording, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStopRecording.getService().c_str()); - - mSrvSetLedStatus = mNhNs.advertiseService("set_led_status", &ZEDWrapperNodelet::on_set_led_status, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvSetLedStatus.getService().c_str()); - mSrvToggleLed = mNhNs.advertiseService("toggle_led", &ZEDWrapperNodelet::on_toggle_led, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvToggleLed.getService().c_str()); - - mSrvSvoStartStream = mNhNs.advertiseService("start_remote_stream", &ZEDWrapperNodelet::on_start_remote_stream, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStartStream.getService().c_str()); - mSrvSvoStopStream = mNhNs.advertiseService("stop_remote_stream", &ZEDWrapperNodelet::on_stop_remote_stream, this); - NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStopStream.getService().c_str()); - // <---- Services - - // ----> Threads - if (!mDepthDisabled) { - // Start Pointcloud thread - mPcThread = std::thread(&ZEDWrapperNodelet::pointcloud_thread_func, this); + else + { + mSensPeriodMean_usec.reset(new sl_tools::CSmartMean(mCamFrameRate / 2)); } + } + // <---- Publishers + + // ----> Subscribers + NODELET_INFO("*** SUBSCRIBERS ***"); + mClickedPtSub = mNhNs.subscribe(mClickedPtTopic, 10, &ZEDWrapperNodelet::clickedPtCallback, this); + NODELET_INFO_STREAM(" * Subscribed to topic " << mClickedPtTopic.c_str()); + // <---- Subscribers + + // ----> Services + NODELET_INFO("*** SERVICES ***"); + if (!mDepthDisabled) + { + mSrvSetInitPose = mNhNs.advertiseService("set_pose", &ZEDWrapperNodelet::on_set_pose, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSetInitPose.getService().c_str()); + mSrvResetOdometry = mNhNs.advertiseService("reset_odometry", &ZEDWrapperNodelet::on_reset_odometry, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvResetOdometry.getService().c_str()); + mSrvResetTracking = mNhNs.advertiseService("reset_tracking", &ZEDWrapperNodelet::on_reset_tracking, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvResetTracking.getService().c_str()); + + mSrvSaveAreaMemory = mNhNs.advertiseService("save_area_memory", &ZEDWrapperNodelet::on_save_area_memory, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSaveAreaMemory.getService().c_str()); + + mSrvStartMapping = mNhNs.advertiseService("start_3d_mapping", &ZEDWrapperNodelet::on_start_3d_mapping, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvStartMapping.getService().c_str()); + mSrvStopMapping = mNhNs.advertiseService("stop_3d_mapping", &ZEDWrapperNodelet::on_stop_3d_mapping, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvStopMapping.getService().c_str()); + mSrvSave3dMap = mNhNs.advertiseService("save_3d_map", &ZEDWrapperNodelet::on_save_3d_map, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSave3dMap.getService().c_str()); + + mSrvStartObjDet = + mNhNs.advertiseService("start_object_detection", &ZEDWrapperNodelet::on_start_object_detection, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvStartObjDet.getService().c_str()); + mSrvStopObjDet = + mNhNs.advertiseService("stop_object_detection", &ZEDWrapperNodelet::on_stop_object_detection, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvStopObjDet.getService().c_str()); + } - // Start pool thread - mDevicePollThread = std::thread(&ZEDWrapperNodelet::device_poll_thread_func, this); + mSrvSvoStartRecording = + mNhNs.advertiseService("start_svo_recording", &ZEDWrapperNodelet::on_start_svo_recording, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStartRecording.getService().c_str()); + mSrvSvoStopRecording = mNhNs.advertiseService("stop_svo_recording", &ZEDWrapperNodelet::on_stop_svo_recording, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStopRecording.getService().c_str()); + + mSrvSetLedStatus = mNhNs.advertiseService("set_led_status", &ZEDWrapperNodelet::on_set_led_status, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSetLedStatus.getService().c_str()); + mSrvToggleLed = mNhNs.advertiseService("toggle_led", &ZEDWrapperNodelet::on_toggle_led, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvToggleLed.getService().c_str()); + + mSrvSvoStartStream = mNhNs.advertiseService("start_remote_stream", &ZEDWrapperNodelet::on_start_remote_stream, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStartStream.getService().c_str()); + mSrvSvoStopStream = mNhNs.advertiseService("stop_remote_stream", &ZEDWrapperNodelet::on_stop_remote_stream, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStopStream.getService().c_str()); + // <---- Services + + // ----> Threads + if (!mDepthDisabled) + { + // Start Pointcloud thread + mPcThread = std::thread(&ZEDWrapperNodelet::pointcloud_thread_func, this); + } + + // Start pool thread + mDevicePollThread = std::thread(&ZEDWrapperNodelet::device_poll_thread_func, this); - // Start Sensors thread - mSensThread = std::thread(&ZEDWrapperNodelet::sensors_thread_func, this); - // <---- Threads + // Start Sensors thread + mSensThread = std::thread(&ZEDWrapperNodelet::sensors_thread_func, this); + // <---- Threads - NODELET_INFO("+++ ZED Node started +++"); + NODELET_INFO("+++ ZED Node started +++"); } void ZEDWrapperNodelet::readGeneralParams() { - NODELET_INFO_STREAM("*** GENERAL PARAMETERS ***"); - - std::string camera_model; - mNhNs.getParam("general/camera_model", camera_model); - - if (camera_model == "zed") { - mZedUserCamModel = sl::MODEL::ZED; - NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); - } else if (camera_model == "zedm") { - mZedUserCamModel = sl::MODEL::ZED_M; - NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); - } else if (camera_model == "zed2") { - mZedUserCamModel = sl::MODEL::ZED2; - NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); - } else if (camera_model == "zed2i") { - mZedUserCamModel = sl::MODEL::ZED2i; - NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); - } else if (camera_model == "zedx") { - mZedUserCamModel = sl::MODEL::ZED_X; - NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); - } else if (camera_model == "zedxm") { - mZedUserCamModel = sl::MODEL::ZED_XM; - NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); - } else { - NODELET_ERROR_STREAM("Camera model not valid: " << camera_model); - exit(EXIT_FAILURE); - } + NODELET_INFO_STREAM("*** GENERAL PARAMETERS ***"); - mNhNs.getParam("general/camera_name", mCameraName); - NODELET_INFO_STREAM(" * Camera Name\t\t\t-> " << mCameraName.c_str()); - - std::string resol="AUTO"; - mNhNs.getParam("general/grab_resolution", resol); - if (resol == "AUTO") { - mCamResol = sl::RESOLUTION::AUTO; - } else if (sl_tools::isZEDX(mZedUserCamModel)) { - if (resol == "HD1200") { - mCamResol = sl::RESOLUTION::HD1200; - } else if (resol == "HD1080") { - mCamResol = sl::RESOLUTION::HD1080; - } else if (resol == "SVGA") { - mCamResol = sl::RESOLUTION::SVGA; - } else { - NODELET_WARN("Not valid 'general.grab_resolution' value: '%s'. Using 'AUTO' setting.", resol.c_str()); - mCamResol = sl::RESOLUTION::AUTO; - } - } else { - if (resol == "HD2K") { - mCamResol = sl::RESOLUTION::HD2K; - } else if (resol == "HD1080") { - mCamResol = sl::RESOLUTION::HD1080; - } else if (resol == "HD720") { - mCamResol = sl::RESOLUTION::HD720; - } else if (resol == "VGA") { - mCamResol = sl::RESOLUTION::VGA; - } else { - NODELET_WARN("Not valid 'general.grab_resolution' value: '%s'. Using 'AUTO' setting.", resol.c_str()); - mCamResol = sl::RESOLUTION::AUTO; - } + std::string camera_model; + mNhNs.getParam("general/camera_model", camera_model); + + if (camera_model == "zed") + { + mZedUserCamModel = sl::MODEL::ZED; + NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); + } + else if (camera_model == "zedm") + { + mZedUserCamModel = sl::MODEL::ZED_M; + NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); + } + else if (camera_model == "zed2") + { + mZedUserCamModel = sl::MODEL::ZED2; + NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); + } + else if (camera_model == "zed2i") + { + mZedUserCamModel = sl::MODEL::ZED2i; + NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); + } + else if (camera_model == "zedx") + { + mZedUserCamModel = sl::MODEL::ZED_X; + NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); + } + else if (camera_model == "zedxm") + { + mZedUserCamModel = sl::MODEL::ZED_XM; + NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); + } + else + { + NODELET_ERROR_STREAM("Camera model not valid: " << camera_model); + exit(EXIT_FAILURE); + } + + mNhNs.getParam("general/camera_name", mCameraName); + NODELET_INFO_STREAM(" * Camera Name\t\t\t-> " << mCameraName.c_str()); + + std::string resol = "AUTO"; + mNhNs.getParam("general/grab_resolution", resol); + if (resol == "AUTO") + { + mCamResol = sl::RESOLUTION::AUTO; + } + else if (sl_tools::isZEDX(mZedUserCamModel)) + { + if (resol == "HD1200") + { + mCamResol = sl::RESOLUTION::HD1200; } - NODELET_INFO_STREAM(" * Camera resolution\t\t-> " << sl::toString(mCamResol).c_str()); - - mNhNs.getParam("general/grab_frame_rate", mCamFrameRate); - checkResolFps(); - NODELET_INFO_STREAM(" * Camera Grab Framerate\t-> " << mCamFrameRate); - mNhNs.getParam("general/pub_frame_rate", mPubFrameRate); - if(mPubFrameRate>mCamFrameRate) { - mPubFrameRate = mCamFrameRate; - NODELET_WARN_STREAM("'general/pub_frame_rate' cannot be bigger than 'general/grab_frame_rate'. Forced to the correct value: " << mPubFrameRate); + else if (resol == "HD1080") + { + mCamResol = sl::RESOLUTION::HD1080; } - NODELET_INFO_STREAM(" * Grab compute capping FPS\t-> " << mPubFrameRate << " Hz"); - - - std::string out_resol = "CUSTOM"; - mNhNs.getParam("general/pub_resolution", out_resol); - if (out_resol == "NATIVE") { - mPubResolution = PubRes::NATIVE; - } else if (out_resol == "CUSTOM") { - mPubResolution = PubRes::CUSTOM; - } else { - NODELET_WARN("Not valid 'general/pub_resolution' value: '%s'. Using default setting instead.", out_resol.c_str()); - out_resol = "CUSTOM"; - mPubResolution = PubRes::CUSTOM; + else if (resol == "SVGA") + { + mCamResol = sl::RESOLUTION::SVGA; } - NODELET_INFO_STREAM(" * Publishing resolution\t-> " << out_resol.c_str()); - - if (mPubResolution == PubRes::CUSTOM) { - mNhNs.getParam( "general/pub_downscale_factor", mCustomDownscaleFactor ); - NODELET_INFO_STREAM(" * Publishing downscale factor\t-> " << mCustomDownscaleFactor ); - } else { - mCustomDownscaleFactor = 1.0; + else + { + NODELET_WARN("Not valid 'general.grab_resolution' value: '%s'. Using 'AUTO' setting.", resol.c_str()); + mCamResol = sl::RESOLUTION::AUTO; } - - mNhNs.getParam("general/gpu_id", mGpuId); - NODELET_INFO_STREAM(" * Gpu ID\t\t\t-> " << mGpuId); - mNhNs.getParam("general/zed_id", mZedId); - NODELET_INFO_STREAM(" * Camera ID\t\t\t-> " << mZedId); - mNhNs.getParam("general/sdk_verbose", mSdkVerbose); - NODELET_INFO_STREAM(" * SDK Verbose level\t\t-> " << mSdkVerbose ); - std::string flip_str; - mNhNs.getParam("general/camera_flip", flip_str); - if(flip_str=="ON") { - mCameraFlip = sl::FLIP_MODE::ON; - } else if(flip_str=="OFF") { - mCameraFlip = sl::FLIP_MODE::OFF; - } else { - mCameraFlip = sl::FLIP_MODE::AUTO; + } + else + { + if (resol == "HD2K") + { + mCamResol = sl::RESOLUTION::HD2K; } - NODELET_INFO_STREAM(" * Camera Flip\t\t\t-> " << sl::toString(mCameraFlip).c_str()); - mNhNs.param("general/self_calib", mCameraSelfCalib, true); - NODELET_INFO_STREAM(" * Self calibration\t\t-> " << (mCameraSelfCalib ? "ENABLED": "DISABLED")); + else if (resol == "HD1080") + { + mCamResol = sl::RESOLUTION::HD1080; + } + else if (resol == "HD720") + { + mCamResol = sl::RESOLUTION::HD720; + } + else if (resol == "VGA") + { + mCamResol = sl::RESOLUTION::VGA; + } + else + { + NODELET_WARN("Not valid 'general.grab_resolution' value: '%s'. Using 'AUTO' setting.", resol.c_str()); + mCamResol = sl::RESOLUTION::AUTO; + } + } + NODELET_INFO_STREAM(" * Camera resolution\t\t-> " << sl::toString(mCamResol).c_str()); + + mNhNs.getParam("general/grab_frame_rate", mCamFrameRate); + checkResolFps(); + NODELET_INFO_STREAM(" * Camera Grab Framerate\t-> " << mCamFrameRate); + mNhNs.getParam("general/pub_frame_rate", mPubFrameRate); + if (mPubFrameRate > mCamFrameRate) + { + mPubFrameRate = mCamFrameRate; + NODELET_WARN_STREAM( + "'general/pub_frame_rate' cannot be bigger than 'general/grab_frame_rate'. Forced to the correct value: " + << mPubFrameRate); + } + NODELET_INFO_STREAM(" * Grab compute capping FPS\t-> " << mPubFrameRate << " Hz"); - int tmp_sn = 0; - mNhNs.getParam("general/serial_number", tmp_sn); + std::string out_resol = "CUSTOM"; + mNhNs.getParam("general/pub_resolution", out_resol); + if (out_resol == "NATIVE") + { + mPubResolution = PubRes::NATIVE; + } + else if (out_resol == "CUSTOM") + { + mPubResolution = PubRes::CUSTOM; + } + else + { + NODELET_WARN("Not valid 'general/pub_resolution' value: '%s'. Using default setting instead.", out_resol.c_str()); + out_resol = "CUSTOM"; + mPubResolution = PubRes::CUSTOM; + } + NODELET_INFO_STREAM(" * Publishing resolution\t-> " << out_resol.c_str()); - if (tmp_sn > 0) { - mZedSerialNumber = static_cast(tmp_sn); - NODELET_INFO_STREAM(" * Serial number\t\t-> " << mZedSerialNumber); - } + if (mPubResolution == PubRes::CUSTOM) + { + mNhNs.getParam("general/pub_downscale_factor", mCustomDownscaleFactor); + NODELET_INFO_STREAM(" * Publishing downscale factor\t-> " << mCustomDownscaleFactor); + } + else + { + mCustomDownscaleFactor = 1.0; + } - std::string parsed_str = getRoiParam("general/region_of_interest", mRoiParam); - NODELET_INFO_STREAM(" * Region of interest\t\t-> " << parsed_str.c_str()); + mNhNs.getParam("general/gpu_id", mGpuId); + NODELET_INFO_STREAM(" * Gpu ID\t\t\t-> " << mGpuId); + mNhNs.getParam("general/zed_id", mZedId); + NODELET_INFO_STREAM(" * Camera ID\t\t\t-> " << mZedId); + mNhNs.getParam("general/sdk_verbose", mSdkVerbose); + NODELET_INFO_STREAM(" * SDK Verbose level\t\t-> " << mSdkVerbose); + std::string flip_str; + mNhNs.getParam("general/camera_flip", flip_str); + if (flip_str == "ON") + { + mCameraFlip = sl::FLIP_MODE::ON; + } + else if (flip_str == "OFF") + { + mCameraFlip = sl::FLIP_MODE::OFF; + } + else + { + mCameraFlip = sl::FLIP_MODE::AUTO; + } + NODELET_INFO_STREAM(" * Camera Flip\t\t\t-> " << sl::toString(mCameraFlip).c_str()); + mNhNs.param("general/self_calib", mCameraSelfCalib, true); + NODELET_INFO_STREAM(" * Self calibration\t\t-> " << (mCameraSelfCalib ? "ENABLED" : "DISABLED")); + + int tmp_sn = 0; + mNhNs.getParam("general/serial_number", tmp_sn); + + if (tmp_sn > 0) + { + mZedSerialNumber = static_cast(tmp_sn); + NODELET_INFO_STREAM(" * Serial number\t\t-> " << mZedSerialNumber); + } + + std::string parsed_str = getRoiParam("general/region_of_interest", mRoiParam); + NODELET_INFO_STREAM(" * Region of interest\t\t-> " << parsed_str.c_str()); } void ZEDWrapperNodelet::readDepthParams() { - NODELET_INFO_STREAM("*** DEPTH PARAMETERS ***"); - - std::string depth_mode_str = sl::toString(mDepthMode).c_str(); - mNhNs.getParam("depth/depth_mode", depth_mode_str); - - bool matched = false; - for (int mode = static_cast(sl::DEPTH_MODE::NONE); - mode < static_cast(sl::DEPTH_MODE::LAST); ++mode) + NODELET_INFO_STREAM("*** DEPTH PARAMETERS ***"); + + std::string depth_mode_str = sl::toString(mDepthMode).c_str(); + mNhNs.getParam("depth/depth_mode", depth_mode_str); + + bool matched = false; + for (int mode = static_cast(sl::DEPTH_MODE::NONE); mode < static_cast(sl::DEPTH_MODE::LAST); ++mode) + { + std::string test_str = sl::toString(static_cast(mode)).c_str(); + std::replace(test_str.begin(), test_str.end(), ' ', + '_'); // Replace spaces with underscores to match the YAML setting + if (test_str == depth_mode_str) { - std::string test_str = sl::toString(static_cast(mode)).c_str(); - std::replace(test_str.begin(), test_str.end(), ' ', '_'); // Replace spaces with underscores to match the YAML setting - if (test_str == depth_mode_str) { - matched = true; - mDepthMode = static_cast(mode); - break; - } + matched = true; + mDepthMode = static_cast(mode); + break; } + } - if (!matched) { - NODELET_WARN_STREAM( - "The parameter 'depth.depth_mode' contains a not valid string. Please check it in 'common.yaml'."); - NODELET_WARN("Using default DEPTH_MODE."); - mDepthMode = sl::DEPTH_MODE::ULTRA; - } + if (!matched) + { + NODELET_WARN_STREAM( + "The parameter 'depth.depth_mode' contains a not valid string. Please check it in 'common.yaml'."); + NODELET_WARN("Using default DEPTH_MODE."); + mDepthMode = sl::DEPTH_MODE::ULTRA; + } - if (mDepthMode == sl::DEPTH_MODE::NONE) { - mDepthDisabled = true; - mDepthStabilization = 0; - NODELET_INFO_STREAM(" * Depth mode\t\t\t-> " << sl::toString(mDepthMode).c_str() << " - DEPTH DISABLED"); - } else { - mDepthDisabled = false; - NODELET_INFO_STREAM(" * Depth mode\t\t\t-> " << sl::toString( - mDepthMode).c_str() << " [" << static_cast(mDepthMode) << "]"); - } + if (mDepthMode == sl::DEPTH_MODE::NONE) + { + mDepthDisabled = true; + mDepthStabilization = 0; + NODELET_INFO_STREAM(" * Depth mode\t\t\t-> " << sl::toString(mDepthMode).c_str() << " - DEPTH DISABLED"); + } + else + { + mDepthDisabled = false; + NODELET_INFO_STREAM(" * Depth mode\t\t\t-> " << sl::toString(mDepthMode).c_str() << " [" + << static_cast(mDepthMode) << "]"); + } - if(!mDepthDisabled) { - mNhNs.getParam("depth/openni_depth_mode", mOpenniDepthMode); - NODELET_INFO_STREAM(" * OpenNI mode\t\t\t-> " << (mOpenniDepthMode ? "ENABLED": "DISABLED")); - mNhNs.getParam("depth/depth_stabilization", mDepthStabilization); - NODELET_INFO_STREAM(" * Depth Stabilization\t\t-> " << mDepthStabilization ); - mNhNs.getParam("depth/min_depth", mCamMinDepth); - NODELET_INFO_STREAM(" * Minimum depth\t\t-> " << mCamMinDepth << " m"); - mNhNs.getParam("depth/max_depth", mCamMaxDepth); - NODELET_INFO_STREAM(" * Maximum depth\t\t-> " << mCamMaxDepth << " m"); - } + if (!mDepthDisabled) + { + mNhNs.getParam("depth/openni_depth_mode", mOpenniDepthMode); + NODELET_INFO_STREAM(" * OpenNI mode\t\t\t-> " << (mOpenniDepthMode ? "ENABLED" : "DISABLED")); + mNhNs.getParam("depth/depth_stabilization", mDepthStabilization); + NODELET_INFO_STREAM(" * Depth Stabilization\t\t-> " << mDepthStabilization); + mNhNs.getParam("depth/min_depth", mCamMinDepth); + NODELET_INFO_STREAM(" * Minimum depth\t\t-> " << mCamMinDepth << " m"); + mNhNs.getParam("depth/max_depth", mCamMaxDepth); + NODELET_INFO_STREAM(" * Maximum depth\t\t-> " << mCamMaxDepth << " m"); + } } void ZEDWrapperNodelet::readPosTrkParams() { - if(!mDepthDisabled) { - NODELET_INFO_STREAM("*** POSITIONAL TRACKING PARAMETERS ***"); - - mNhNs.param("pos_tracking/pos_tracking_enabled", mPosTrackingEnabled, true); - NODELET_INFO_STREAM(" * Positional tracking\t\t-> " << (mPosTrackingEnabled ? "ENABLED": "DISABLED")); - - std::string pos_trk_mode; - mNhNs.getParam("pos_tracking/pos_tracking_mode", pos_trk_mode); - if (pos_trk_mode == "QUALITY") { - mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; - } else if (pos_trk_mode == "STANDARD") { - mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::STANDARD; - } else { - NODELET_WARN_STREAM( - "'pos_tracking/pos_tracking_mode' not valid ('" << pos_trk_mode << - "'). Using default value."); - mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; - } - NODELET_INFO_STREAM( " * Positional tracking mode\t-> " << sl::toString(mPosTrkMode).c_str()); + if (!mDepthDisabled) + { + NODELET_INFO_STREAM("*** POSITIONAL TRACKING PARAMETERS ***"); - mNhNs.getParam("pos_tracking/set_gravity_as_origin", mSetGravityAsOrigin); - NODELET_INFO_STREAM(" * Set gravity as origin\t-> " << (mSetGravityAsOrigin ? "ENABLED": "DISABLED")); + mNhNs.param("pos_tracking/pos_tracking_enabled", mPosTrackingEnabled, true); + NODELET_INFO_STREAM(" * Positional tracking\t\t-> " << (mPosTrackingEnabled ? "ENABLED" : "DISABLED")); + + std::string pos_trk_mode; + mNhNs.getParam("pos_tracking/pos_tracking_mode", pos_trk_mode); + if (pos_trk_mode == "QUALITY") + { + mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; + } + else if (pos_trk_mode == "STANDARD") + { + mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::STANDARD; + } + else + { + NODELET_WARN_STREAM("'pos_tracking/pos_tracking_mode' not valid ('" << pos_trk_mode + << "'). Using default value."); + mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; + } + NODELET_INFO_STREAM(" * Positional tracking mode\t-> " << sl::toString(mPosTrkMode).c_str()); - mNhNs.getParam("pos_tracking/path_pub_rate", mPathPubRate); - NODELET_INFO_STREAM(" * Path rate\t\t\t-> " << mPathPubRate << " Hz"); - mNhNs.getParam("pos_tracking/path_max_count", mPathMaxCount); - NODELET_INFO_STREAM(" * Path history size\t\t-> " << (mPathMaxCount == -1) ? std::string("INFINITE"): std::to_string(mPathMaxCount)); + mNhNs.getParam("pos_tracking/set_gravity_as_origin", mSetGravityAsOrigin); + NODELET_INFO_STREAM(" * Set gravity as origin\t-> " << (mSetGravityAsOrigin ? "ENABLED" : "DISABLED")); - if (mPathMaxCount < 2 && mPathMaxCount != -1) { - mPathMaxCount = 2; - } + mNhNs.getParam("pos_tracking/path_pub_rate", mPathPubRate); + NODELET_INFO_STREAM(" * Path rate\t\t\t-> " << mPathPubRate << " Hz"); + mNhNs.getParam("pos_tracking/path_max_count", mPathMaxCount); + NODELET_INFO_STREAM(" * Path history size\t\t-> " << (mPathMaxCount == -1) ? std::string("INFINITE") : + std::to_string(mPathMaxCount)); - mNhNs.getParam("pos_tracking/initial_base_pose", mInitialBasePose); - - mNhNs.getParam("pos_tracking/area_memory_db_path", mAreaMemDbPath); - mAreaMemDbPath = sl_tools::resolveFilePath(mAreaMemDbPath); - NODELET_INFO_STREAM(" * Odometry DB path\t\t-> " << mAreaMemDbPath.c_str()); - - mNhNs.param("pos_tracking/save_area_memory_db_on_exit", mSaveAreaMapOnClosing, false); - NODELET_INFO_STREAM(" * Save Area Memory on closing\t-> " << (mSaveAreaMapOnClosing ? "ENABLED": "DISABLED")); - mNhNs.param("pos_tracking/area_memory", mAreaMemory, false); - NODELET_INFO_STREAM(" * Area Memory\t\t\t-> " << (mAreaMemory ? "ENABLED": "DISABLED")); - mNhNs.param("pos_tracking/imu_fusion", mImuFusion, true); - NODELET_INFO_STREAM(" * IMU Fusion\t\t\t-> " << (mImuFusion ? "ENABLED": "DISABLED")); - mNhNs.param("pos_tracking/floor_alignment", mFloorAlignment, false); - NODELET_INFO_STREAM(" * Floor alignment\t\t-> " << (mFloorAlignment ? "ENABLED": "DISABLED")); - mNhNs.param("pos_tracking/init_odom_with_first_valid_pose", mInitOdomWithPose, true); - NODELET_INFO_STREAM(" * Init Odometry with first valid pose data -> " - << (mInitOdomWithPose ? "ENABLED": "DISABLED")); - mNhNs.param("pos_tracking/two_d_mode", mTwoDMode, false); - NODELET_INFO_STREAM(" * Force 2D mode\t\t-> " << (mTwoDMode ? "ENABLED": "DISABLED")); - - if (mTwoDMode) { - mNhNs.param("pos_tracking/fixed_z_value", mFixedZValue, 0.0); - NODELET_INFO_STREAM(" * Fixed Z value\t\t-> " << mFixedZValue); - } - - mNhNs.param("pos_tracking/publish_tf", mPublishTf, true); - NODELET_INFO_STREAM(" * Broadcast odometry TF\t-> " << (mPublishTf ? "ENABLED": "DISABLED")); - mNhNs.param("pos_tracking/publish_map_tf", mPublishMapTf, true); - NODELET_INFO_STREAM(" * Broadcast map pose TF\t-> " - << (mPublishTf ? (mPublishMapTf ? "ENABLED": "DISABLED"): "DISABLED")); + if (mPathMaxCount < 2 && mPathMaxCount != -1) + { + mPathMaxCount = 2; + } + + mNhNs.getParam("pos_tracking/initial_base_pose", mInitialBasePose); + + mNhNs.getParam("pos_tracking/area_memory_db_path", mAreaMemDbPath); + mAreaMemDbPath = sl_tools::resolveFilePath(mAreaMemDbPath); + NODELET_INFO_STREAM(" * Odometry DB path\t\t-> " << mAreaMemDbPath.c_str()); + + mNhNs.param("pos_tracking/save_area_memory_db_on_exit", mSaveAreaMapOnClosing, false); + NODELET_INFO_STREAM(" * Save Area Memory on closing\t-> " << (mSaveAreaMapOnClosing ? "ENABLED" : "DISABLED")); + mNhNs.param("pos_tracking/area_memory", mAreaMemory, false); + NODELET_INFO_STREAM(" * Area Memory\t\t\t-> " << (mAreaMemory ? "ENABLED" : "DISABLED")); + mNhNs.param("pos_tracking/imu_fusion", mImuFusion, true); + NODELET_INFO_STREAM(" * IMU Fusion\t\t\t-> " << (mImuFusion ? "ENABLED" : "DISABLED")); + mNhNs.param("pos_tracking/floor_alignment", mFloorAlignment, false); + NODELET_INFO_STREAM(" * Floor alignment\t\t-> " << (mFloorAlignment ? "ENABLED" : "DISABLED")); + mNhNs.param("pos_tracking/init_odom_with_first_valid_pose", mInitOdomWithPose, true); + NODELET_INFO_STREAM(" * Init Odometry with first valid pose data -> " + << (mInitOdomWithPose ? "ENABLED" : "DISABLED")); + mNhNs.param("pos_tracking/two_d_mode", mTwoDMode, false); + NODELET_INFO_STREAM(" * Force 2D mode\t\t-> " << (mTwoDMode ? "ENABLED" : "DISABLED")); + + if (mTwoDMode) + { + mNhNs.param("pos_tracking/fixed_z_value", mFixedZValue, 0.0); + NODELET_INFO_STREAM(" * Fixed Z value\t\t-> " << mFixedZValue); } + + mNhNs.param("pos_tracking/publish_tf", mPublishTf, true); + NODELET_INFO_STREAM(" * Broadcast odometry TF\t-> " << (mPublishTf ? "ENABLED" : "DISABLED")); + mNhNs.param("pos_tracking/publish_map_tf", mPublishMapTf, true); + NODELET_INFO_STREAM(" * Broadcast map pose TF\t-> " + << (mPublishTf ? (mPublishMapTf ? "ENABLED" : "DISABLED") : "DISABLED")); + } } void ZEDWrapperNodelet::readMappingParams() { - NODELET_INFO_STREAM("*** MAPPING PARAMETERS ***"); + NODELET_INFO_STREAM("*** MAPPING PARAMETERS ***"); - if(!mDepthDisabled) { - mNhNs.param("mapping/mapping_enabled", mMappingEnabled, false); + if (!mDepthDisabled) + { + mNhNs.param("mapping/mapping_enabled", mMappingEnabled, false); - if (mMappingEnabled) { - NODELET_INFO_STREAM(" * Mapping\t\t\t-> ENABLED"); + if (mMappingEnabled) + { + NODELET_INFO_STREAM(" * Mapping\t\t\t-> ENABLED"); - mNhNs.getParam("mapping/resolution", mMappingRes); - NODELET_INFO_STREAM(" * Mapping resolution\t\t-> " << mMappingRes << " m"); + mNhNs.getParam("mapping/resolution", mMappingRes); + NODELET_INFO_STREAM(" * Mapping resolution\t\t-> " << mMappingRes << " m"); - mNhNs.getParam("mapping/max_mapping_range", mMaxMappingRange); - NODELET_INFO_STREAM(" * Mapping max range\t\t-> " << mMaxMappingRange << " m" - << ((mMaxMappingRange < 0.0) ? " [AUTO]": "")); + mNhNs.getParam("mapping/max_mapping_range", mMaxMappingRange); + NODELET_INFO_STREAM(" * Mapping max range\t\t-> " << mMaxMappingRange << " m" + << ((mMaxMappingRange < 0.0) ? " [AUTO]" : "")); - mNhNs.getParam("mapping/fused_pointcloud_freq", mFusedPcPubFreq); - NODELET_INFO_STREAM(" * Fused point cloud freq:\t-> " << mFusedPcPubFreq << " Hz"); - } else { - NODELET_INFO_STREAM(" * Mapping\t\t\t-> DISABLED"); - } + mNhNs.getParam("mapping/fused_pointcloud_freq", mFusedPcPubFreq); + NODELET_INFO_STREAM(" * Fused point cloud freq:\t-> " << mFusedPcPubFreq << " Hz"); + } + else + { + NODELET_INFO_STREAM(" * Mapping\t\t\t-> DISABLED"); } - mNhNs.param("mapping/clicked_point_topic", mClickedPtTopic, std::string("/clicked_point")); - NODELET_INFO_STREAM(" * Clicked point topic\t\t-> " << mClickedPtTopic.c_str()); + } + mNhNs.param("mapping/clicked_point_topic", mClickedPtTopic, std::string("/clicked_point")); + NODELET_INFO_STREAM(" * Clicked point topic\t\t-> " << mClickedPtTopic.c_str()); } void ZEDWrapperNodelet::readObjDetParams() { - if(!mDepthDisabled) { - NODELET_INFO_STREAM("*** OBJECT DETECTION PARAMETERS ***"); - - mNhNs.param("object_detection/od_enabled", mObjDetEnabled, false); - - if (mObjDetEnabled) { - NODELET_INFO_STREAM(" * Object Detection\t\t-> ENABLED"); - mNhNs.getParam("object_detection/confidence_threshold", mObjDetConfidence); - NODELET_INFO_STREAM(" * Object confidence\t\t-> " << mObjDetConfidence); - mNhNs.getParam("object_detection/object_tracking_enabled", mObjDetTracking); - NODELET_INFO_STREAM(" * Object tracking\t\t-> " << (mObjDetTracking ? "ENABLED": "DISABLED")); - mNhNs.getParam("object_detection/max_range", mObjDetMaxRange); - if (mObjDetMaxRange > mCamMaxDepth) { - NODELET_WARN("Detection max range cannot be major than depth max range. Automatically fixed."); - mObjDetMaxRange = mCamMaxDepth; - } - NODELET_INFO_STREAM(" * Detection max range\t\t-> " << mObjDetMaxRange); + if (!mDepthDisabled) + { + NODELET_INFO_STREAM("*** OBJECT DETECTION PARAMETERS ***"); - int model; - mNhNs.getParam("object_detection/model", model); - if (model < 0 || model >= static_cast(sl::OBJECT_DETECTION_MODEL::LAST)) { - NODELET_WARN("Detection model not valid. Forced to the default value"); - model = static_cast(mObjDetModel); - } - mObjDetModel = static_cast(model); - - NODELET_INFO_STREAM(" * Detection model\t\t-> " << sl::toString(mObjDetModel)); - - mNhNs.getParam("object_detection/mc_people", mObjDetPeopleEnable); - NODELET_INFO_STREAM(" * Detect people\t\t-> " << (mObjDetPeopleEnable ? "ENABLED": "DISABLED")); - mNhNs.getParam("object_detection/mc_vehicle", mObjDetVehiclesEnable); - NODELET_INFO_STREAM(" * Detect vehicles\t\t-> " << (mObjDetVehiclesEnable ? "ENABLED": "DISABLED")); - mNhNs.getParam("object_detection/mc_bag", mObjDetBagsEnable); - NODELET_INFO_STREAM(" * Detect bags\t\t\t-> " << (mObjDetBagsEnable ? "ENABLED": "DISABLED")); - mNhNs.getParam("object_detection/mc_animal", mObjDetAnimalsEnable); - NODELET_INFO_STREAM(" * Detect animals\t\t-> " << (mObjDetAnimalsEnable ? "ENABLED": "DISABLED")); - mNhNs.getParam("object_detection/mc_electronics", mObjDetElectronicsEnable); - NODELET_INFO_STREAM(" * Detect electronics\t\t-> " << (mObjDetElectronicsEnable ? "ENABLED": "DISABLED")); - mNhNs.getParam("object_detection/mc_fruit_vegetable", mObjDetFruitsEnable); - NODELET_INFO_STREAM(" * Detect fruit and vegetables\t-> " << (mObjDetFruitsEnable ? "ENABLED": "DISABLED")); - mNhNs.getParam("object_detection/mc_sport", mObjDetSportsEnable); - NODELET_INFO_STREAM(" * Detect sport-related objects\t-> " << (mObjDetSportsEnable ? "ENABLED": "DISABLED")); - } + mNhNs.param("object_detection/od_enabled", mObjDetEnabled, false); + + if (mObjDetEnabled) + { + NODELET_INFO_STREAM(" * Object Detection\t\t-> ENABLED"); + mNhNs.getParam("object_detection/confidence_threshold", mObjDetConfidence); + NODELET_INFO_STREAM(" * Object confidence\t\t-> " << mObjDetConfidence); + mNhNs.getParam("object_detection/object_tracking_enabled", mObjDetTracking); + NODELET_INFO_STREAM(" * Object tracking\t\t-> " << (mObjDetTracking ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/max_range", mObjDetMaxRange); + if (mObjDetMaxRange > mCamMaxDepth) + { + NODELET_WARN("Detection max range cannot be major than depth max range. Automatically fixed."); + mObjDetMaxRange = mCamMaxDepth; + } + NODELET_INFO_STREAM(" * Detection max range\t\t-> " << mObjDetMaxRange); + + int model; + mNhNs.getParam("object_detection/model", model); + if (model < 0 || model >= static_cast(sl::OBJECT_DETECTION_MODEL::LAST)) + { + NODELET_WARN("Detection model not valid. Forced to the default value"); + model = static_cast(mObjDetModel); + } + mObjDetModel = static_cast(model); + + NODELET_INFO_STREAM(" * Detection model\t\t-> " << sl::toString(mObjDetModel)); + + mNhNs.getParam("object_detection/mc_people", mObjDetPeopleEnable); + NODELET_INFO_STREAM(" * Detect people\t\t-> " << (mObjDetPeopleEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_vehicle", mObjDetVehiclesEnable); + NODELET_INFO_STREAM(" * Detect vehicles\t\t-> " << (mObjDetVehiclesEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_bag", mObjDetBagsEnable); + NODELET_INFO_STREAM(" * Detect bags\t\t\t-> " << (mObjDetBagsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_animal", mObjDetAnimalsEnable); + NODELET_INFO_STREAM(" * Detect animals\t\t-> " << (mObjDetAnimalsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_electronics", mObjDetElectronicsEnable); + NODELET_INFO_STREAM(" * Detect electronics\t\t-> " << (mObjDetElectronicsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_fruit_vegetable", mObjDetFruitsEnable); + NODELET_INFO_STREAM(" * Detect fruit and vegetables\t-> " << (mObjDetFruitsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_sport", mObjDetSportsEnable); + NODELET_INFO_STREAM(" * Detect sport-related objects\t-> " << (mObjDetSportsEnable ? "ENABLED" : "DISABLED")); } + } } void ZEDWrapperNodelet::readSensParams() { - NODELET_INFO_STREAM("*** SENSORS PARAMETERS ***"); - - if (!sl_tools::isZED(mZedUserCamModel)) { - mNhNs.getParam("sensors/sensors_timestamp_sync", mSensTimestampSync); - NODELET_INFO_STREAM(" * Sensors timestamp sync\t-> " << (mSensTimestampSync ? "ENABLED": "DISABLED")); - mNhNs.getParam("sensors/max_pub_rate", mSensPubRate); - if (!sl_tools::isZEDM(mZedUserCamModel)) { - if (mSensPubRate > 800.) - mSensPubRate = 800.; - } else { - if (mSensPubRate > 400.) - mSensPubRate = 400.; - } + NODELET_INFO_STREAM("*** SENSORS PARAMETERS ***"); + + if (!sl_tools::isZED(mZedUserCamModel)) + { + mNhNs.getParam("sensors/sensors_timestamp_sync", mSensTimestampSync); + NODELET_INFO_STREAM(" * Sensors timestamp sync\t-> " << (mSensTimestampSync ? "ENABLED" : "DISABLED")); + mNhNs.getParam("sensors/max_pub_rate", mSensPubRate); + if (!sl_tools::isZEDM(mZedUserCamModel)) + { + if (mSensPubRate > 800.) + mSensPubRate = 800.; + } + else + { + if (mSensPubRate > 400.) + mSensPubRate = 400.; + } - NODELET_INFO_STREAM(" * Max sensors rate\t\t-> " << mSensPubRate); + NODELET_INFO_STREAM(" * Max sensors rate\t\t-> " << mSensPubRate); - mNhNs.param("sensors/publish_imu_tf", mPublishImuTf, true); - NODELET_INFO_STREAM(" * Broadcast IMU pose TF\t-> " << (mPublishImuTf ? "ENABLED": "DISABLED")); - } else { - NODELET_INFO_STREAM(" * The ZED camera has no available inertial/environmental sensors."); - } + mNhNs.param("sensors/publish_imu_tf", mPublishImuTf, true); + NODELET_INFO_STREAM(" * Broadcast IMU pose TF\t-> " << (mPublishImuTf ? "ENABLED" : "DISABLED")); + } + else + { + NODELET_INFO_STREAM(" * The ZED camera has no available inertial/environmental sensors."); + } } void ZEDWrapperNodelet::readSvoParams() { - NODELET_INFO_STREAM("*** SVO PARAMETERS ***"); + NODELET_INFO_STREAM("*** SVO PARAMETERS ***"); - mNhNs.param("svo_file", mSvoFilepath, std::string()); - mSvoFilepath = sl_tools::resolveFilePath(mSvoFilepath); - NODELET_INFO_STREAM(" * SVO input file: \t\t-> " << mSvoFilepath.c_str()); + mNhNs.param("svo_file", mSvoFilepath, std::string()); + mSvoFilepath = sl_tools::resolveFilePath(mSvoFilepath); + NODELET_INFO_STREAM(" * SVO input file: \t\t-> " << mSvoFilepath.c_str()); - if(!mSvoFilepath.empty()) { - mNhNs.getParam("general/svo_realtime", mSvoRealtime); - NODELET_INFO_STREAM(" * SVO realtime mode\t\t-> " << (mSvoRealtime ? "ENABLED": "DISABLED")); - } + if (!mSvoFilepath.empty()) + { + mNhNs.getParam("general/svo_realtime", mSvoRealtime); + NODELET_INFO_STREAM(" * SVO realtime mode\t\t-> " << (mSvoRealtime ? "ENABLED" : "DISABLED")); + } - int svo_compr = 0; - mNhNs.getParam("general/svo_compression", svo_compr); + int svo_compr = 0; + mNhNs.getParam("general/svo_compression", svo_compr); - if (svo_compr >= static_cast(sl::SVO_COMPRESSION_MODE::LAST)) { - NODELET_WARN_STREAM("The parameter `general/svo_compression` has an invalid value. Please check it in the " - "configuration file `common.yaml`"); + if (svo_compr >= static_cast(sl::SVO_COMPRESSION_MODE::LAST)) + { + NODELET_WARN_STREAM( + "The parameter `general/svo_compression` has an invalid value. Please check it in the " + "configuration file `common.yaml`"); - svo_compr = 0; - } + svo_compr = 0; + } - mSvoComprMode = static_cast(svo_compr); + mSvoComprMode = static_cast(svo_compr); - NODELET_INFO_STREAM(" * SVO REC compression\t\t-> " << sl::toString(mSvoComprMode)); + NODELET_INFO_STREAM(" * SVO REC compression\t\t-> " << sl::toString(mSvoComprMode)); } void ZEDWrapperNodelet::readDynParams() { - NODELET_INFO_STREAM("*** DYNAMIC PARAMETERS (Init. values) ***"); - - mNhNs.getParam("brightness", mCamBrightness); - NODELET_INFO_STREAM(" * [DYN] brightness\t\t-> " << mCamBrightness); - mNhNs.getParam("contrast", mCamContrast); - NODELET_INFO_STREAM(" * [DYN] contrast\t\t-> " << mCamContrast); - mNhNs.getParam("hue", mCamHue); - NODELET_INFO_STREAM(" * [DYN] hue\t\t\t-> " << mCamHue); - mNhNs.getParam("saturation", mCamSaturation); - NODELET_INFO_STREAM(" * [DYN] saturation\t\t-> " << mCamSaturation); - mNhNs.getParam("sharpness", mCamSharpness); - NODELET_INFO_STREAM(" * [DYN] sharpness\t\t-> " << mCamSharpness); - mNhNs.getParam("gamma", mCamGamma); - NODELET_INFO_STREAM(" * [DYN] gamma\t\t\t-> " << mCamGamma); - mNhNs.getParam("auto_exposure_gain", mCamAutoExposure); - NODELET_INFO_STREAM(" * [DYN] auto_exposure_gain\t-> " << (mCamAutoExposure ? "ENABLED": "DISABLED")); - mNhNs.getParam("gain", mCamGain); - mNhNs.getParam("exposure", mCamExposure); - if (!mCamAutoExposure) { - NODELET_INFO_STREAM(" * [DYN] gain\t\t-> " << mCamGain); - NODELET_INFO_STREAM(" * [DYN] exposure\t\t-> " << mCamExposure); - } - mNhNs.getParam("auto_whitebalance", mCamAutoWB); - NODELET_INFO_STREAM(" * [DYN] auto_whitebalance\t-> " << (mCamAutoWB ? "ENABLED": "DISABLED")); - mNhNs.getParam("whitebalance_temperature", mCamWB); - if (!mCamAutoWB) { - NODELET_INFO_STREAM(" * [DYN] whitebalance_temperature\t\t-> " << mCamWB); - } + NODELET_INFO_STREAM("*** DYNAMIC PARAMETERS (Init. values) ***"); + + mNhNs.getParam("brightness", mCamBrightness); + NODELET_INFO_STREAM(" * [DYN] brightness\t\t-> " << mCamBrightness); + mNhNs.getParam("contrast", mCamContrast); + NODELET_INFO_STREAM(" * [DYN] contrast\t\t-> " << mCamContrast); + mNhNs.getParam("hue", mCamHue); + NODELET_INFO_STREAM(" * [DYN] hue\t\t\t-> " << mCamHue); + mNhNs.getParam("saturation", mCamSaturation); + NODELET_INFO_STREAM(" * [DYN] saturation\t\t-> " << mCamSaturation); + mNhNs.getParam("sharpness", mCamSharpness); + NODELET_INFO_STREAM(" * [DYN] sharpness\t\t-> " << mCamSharpness); + mNhNs.getParam("gamma", mCamGamma); + NODELET_INFO_STREAM(" * [DYN] gamma\t\t\t-> " << mCamGamma); + mNhNs.getParam("auto_exposure_gain", mCamAutoExposure); + NODELET_INFO_STREAM(" * [DYN] auto_exposure_gain\t-> " << (mCamAutoExposure ? "ENABLED" : "DISABLED")); + mNhNs.getParam("gain", mCamGain); + mNhNs.getParam("exposure", mCamExposure); + if (!mCamAutoExposure) + { + NODELET_INFO_STREAM(" * [DYN] gain\t\t-> " << mCamGain); + NODELET_INFO_STREAM(" * [DYN] exposure\t\t-> " << mCamExposure); + } + mNhNs.getParam("auto_whitebalance", mCamAutoWB); + NODELET_INFO_STREAM(" * [DYN] auto_whitebalance\t-> " << (mCamAutoWB ? "ENABLED" : "DISABLED")); + mNhNs.getParam("whitebalance_temperature", mCamWB); + if (!mCamAutoWB) + { + NODELET_INFO_STREAM(" * [DYN] whitebalance_temperature\t\t-> " << mCamWB); + } - if (mCamAutoExposure) { - mTriggerAutoExposure = true; - } - if (mCamAutoWB) { - mTriggerAutoWB = true; - } + if (mCamAutoExposure) + { + mTriggerAutoExposure = true; + } + if (mCamAutoWB) + { + mTriggerAutoWB = true; + } - if(!mDepthDisabled) { - mNhNs.getParam("depth_confidence", mCamDepthConfidence); - NODELET_INFO_STREAM(" * [DYN] Depth confidence\t-> " << mCamDepthConfidence); - mNhNs.getParam("depth_texture_conf", mCamDepthTextureConf); - NODELET_INFO_STREAM(" * [DYN] Depth texture conf.\t-> " << mCamDepthTextureConf); - - mNhNs.getParam("point_cloud_freq", mPointCloudFreq); - if(mPointCloudFreq>mPubFrameRate) { - mPointCloudFreq=mPubFrameRate; - NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than 'general/pub_frame_rate'. Set to " - << mPointCloudFreq); - } - NODELET_INFO_STREAM(" * [DYN] point_cloud_freq\t-> " << mPointCloudFreq << " Hz"); + if (!mDepthDisabled) + { + mNhNs.getParam("depth_confidence", mCamDepthConfidence); + NODELET_INFO_STREAM(" * [DYN] Depth confidence\t-> " << mCamDepthConfidence); + mNhNs.getParam("depth_texture_conf", mCamDepthTextureConf); + NODELET_INFO_STREAM(" * [DYN] Depth texture conf.\t-> " << mCamDepthTextureConf); + + mNhNs.getParam("point_cloud_freq", mPointCloudFreq); + if (mPointCloudFreq > mPubFrameRate) + { + mPointCloudFreq = mPubFrameRate; + NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than 'general/pub_frame_rate'. Set to " + << mPointCloudFreq); } + NODELET_INFO_STREAM(" * [DYN] point_cloud_freq\t-> " << mPointCloudFreq << " Hz"); + } } void ZEDWrapperNodelet::readParameters() { - // ----> General - readGeneralParams(); - // <---- General - - // ----> Video - //NODELET_INFO_STREAM("*** VIDEO PARAMETERS ***"); - // Note: there are no "static" video parameters - // <---- Video - - // -----> Depth - readDepthParams(); - // <----- Depth - - // ----> Dynamic - void readDynParams(); - // <---- Dynamic - - // ----> Positional Tracking - readPosTrkParams(); - // <---- Positional Tracking - - // ----> Mapping - readMappingParams(); - // <---- Mapping - - // ----> Object Detection - readObjDetParams(); - // <---- Object Detection - - // ----> Sensors - readSensParams(); - // <---- Sensors - - // ----> SVO - readSvoParams(); - // <---- SVO - - // Remote Stream - mNhNs.param("stream", mRemoteStreamAddr, std::string()); - - // ----> Coordinate frames - NODELET_INFO_STREAM("*** COORDINATE FRAMES ***"); - - mNhNs.param("pos_tracking/map_frame", mMapFrameId, "map"); - mNhNs.param("pos_tracking/odometry_frame", mOdometryFrameId, "odom"); - mNhNs.param("general/base_frame", mBaseFrameId, "base_link"); - - mCameraFrameId = mCameraName + "_camera_center"; - mImuFrameId = mCameraName + "_imu_link"; - mLeftCamFrameId = mCameraName + "_left_camera_frame"; - mLeftCamOptFrameId = mCameraName + "_left_camera_optical_frame"; - mRightCamFrameId = mCameraName + "_right_camera_frame"; - mRightCamOptFrameId = mCameraName + "_right_camera_optical_frame"; - - mBaroFrameId = mCameraName + "_baro_link"; - mMagFrameId = mCameraName + "_mag_link"; - mTempLeftFrameId = mCameraName + "_temp_left_link"; - mTempRightFrameId = mCameraName + "_temp_right_link"; - - mDepthFrameId = mLeftCamFrameId; - mDepthOptFrameId = mLeftCamOptFrameId; - - // Note: Depth image frame id must match color image frame id - mCloudFrameId = mDepthOptFrameId; - mRgbFrameId = mDepthFrameId; - mRgbOptFrameId = mCloudFrameId; - mDisparityFrameId = mDepthFrameId; - mDisparityOptFrameId = mDepthOptFrameId; - mConfidenceFrameId = mDepthFrameId; - mConfidenceOptFrameId = mDepthOptFrameId; - - // Print TF frames - NODELET_INFO_STREAM(" * camera_frame\t\t\t-> " << mCameraFrameId); - NODELET_INFO_STREAM(" * imu_link\t\t\t-> " << mImuFrameId); - NODELET_INFO_STREAM(" * left_camera_frame\t\t-> " << mLeftCamFrameId); - NODELET_INFO_STREAM(" * left_camera_optical_frame\t-> " << mLeftCamOptFrameId); - NODELET_INFO_STREAM(" * right_camera_frame\t\t-> " << mRightCamFrameId); - NODELET_INFO_STREAM(" * right_camera_optical_frame\t-> " << mRightCamOptFrameId); - - if(!mDepthDisabled) { - NODELET_INFO_STREAM(" * map_frame\t\t\t-> " << mMapFrameId); - NODELET_INFO_STREAM(" * odometry_frame\t\t-> " << mOdometryFrameId); - NODELET_INFO_STREAM(" * base_frame\t\t\t-> " << mBaseFrameId); - NODELET_INFO_STREAM(" * depth_frame\t\t\t-> " << mDepthFrameId); - NODELET_INFO_STREAM(" * depth_optical_frame\t\t-> " << mDepthOptFrameId); - NODELET_INFO_STREAM(" * disparity_frame\t\t-> " << mDisparityFrameId); - NODELET_INFO_STREAM(" * disparity_optical_frame\t-> " << mDisparityOptFrameId); - NODELET_INFO_STREAM(" * confidence_frame\t\t-> " << mConfidenceFrameId); - NODELET_INFO_STREAM(" * confidence_optical_frame\t-> " << mConfidenceOptFrameId); - } - // <---- Coordinate frames + // ----> General + readGeneralParams(); + // <---- General + + // ----> Video + // NODELET_INFO_STREAM("*** VIDEO PARAMETERS ***"); + // Note: there are no "static" video parameters + // <---- Video + + // -----> Depth + readDepthParams(); + // <----- Depth + + // ----> Dynamic + void readDynParams(); + // <---- Dynamic + + // ----> Positional Tracking + readPosTrkParams(); + // <---- Positional Tracking + + // ----> Mapping + readMappingParams(); + // <---- Mapping + + // ----> Object Detection + readObjDetParams(); + // <---- Object Detection + + // ----> Sensors + readSensParams(); + // <---- Sensors + + // ----> SVO + readSvoParams(); + // <---- SVO + + // Remote Stream + mNhNs.param("stream", mRemoteStreamAddr, std::string()); + + // ----> Coordinate frames + NODELET_INFO_STREAM("*** COORDINATE FRAMES ***"); + + mNhNs.param("pos_tracking/map_frame", mMapFrameId, "map"); + mNhNs.param("pos_tracking/odometry_frame", mOdometryFrameId, "odom"); + mNhNs.param("general/base_frame", mBaseFrameId, "base_link"); + + mCameraFrameId = mCameraName + "_camera_center"; + mImuFrameId = mCameraName + "_imu_link"; + mLeftCamFrameId = mCameraName + "_left_camera_frame"; + mLeftCamOptFrameId = mCameraName + "_left_camera_optical_frame"; + mRightCamFrameId = mCameraName + "_right_camera_frame"; + mRightCamOptFrameId = mCameraName + "_right_camera_optical_frame"; + + mBaroFrameId = mCameraName + "_baro_link"; + mMagFrameId = mCameraName + "_mag_link"; + mTempLeftFrameId = mCameraName + "_temp_left_link"; + mTempRightFrameId = mCameraName + "_temp_right_link"; + + mDepthFrameId = mLeftCamFrameId; + mDepthOptFrameId = mLeftCamOptFrameId; + + // Note: Depth image frame id must match color image frame id + mCloudFrameId = mDepthOptFrameId; + mRgbFrameId = mDepthFrameId; + mRgbOptFrameId = mCloudFrameId; + mDisparityFrameId = mDepthFrameId; + mDisparityOptFrameId = mDepthOptFrameId; + mConfidenceFrameId = mDepthFrameId; + mConfidenceOptFrameId = mDepthOptFrameId; + + // Print TF frames + NODELET_INFO_STREAM(" * camera_frame\t\t\t-> " << mCameraFrameId); + NODELET_INFO_STREAM(" * imu_link\t\t\t-> " << mImuFrameId); + NODELET_INFO_STREAM(" * left_camera_frame\t\t-> " << mLeftCamFrameId); + NODELET_INFO_STREAM(" * left_camera_optical_frame\t-> " << mLeftCamOptFrameId); + NODELET_INFO_STREAM(" * right_camera_frame\t\t-> " << mRightCamFrameId); + NODELET_INFO_STREAM(" * right_camera_optical_frame\t-> " << mRightCamOptFrameId); + + if (!mDepthDisabled) + { + NODELET_INFO_STREAM(" * map_frame\t\t\t-> " << mMapFrameId); + NODELET_INFO_STREAM(" * odometry_frame\t\t-> " << mOdometryFrameId); + NODELET_INFO_STREAM(" * base_frame\t\t\t-> " << mBaseFrameId); + NODELET_INFO_STREAM(" * depth_frame\t\t\t-> " << mDepthFrameId); + NODELET_INFO_STREAM(" * depth_optical_frame\t\t-> " << mDepthOptFrameId); + NODELET_INFO_STREAM(" * disparity_frame\t\t-> " << mDisparityFrameId); + NODELET_INFO_STREAM(" * disparity_optical_frame\t-> " << mDisparityOptFrameId); + NODELET_INFO_STREAM(" * confidence_frame\t\t-> " << mConfidenceFrameId); + NODELET_INFO_STREAM(" * confidence_optical_frame\t-> " << mConfidenceOptFrameId); + } + // <---- Coordinate frames } -std::string ZEDWrapperNodelet::getRoiParam(std::string paramName, std::vector> & outVal) +std::string ZEDWrapperNodelet::getRoiParam(std::string paramName, std::vector>& outVal) { outVal.clear(); @@ -1139,14 +1291,16 @@ std::string ZEDWrapperNodelet::getRoiParam(std::string paramName, std::vector> & in_poly, - std::vector & out_poly) +std::string ZEDWrapperNodelet::parseRoiPoly(const std::vector>& in_poly, + std::vector& out_poly) { out_poly.clear(); @@ -1167,34 +1320,40 @@ std::string ZEDWrapperNodelet::parseRoiPoly( size_t poly_size = in_poly.size(); - if (poly_size < 3) { - if (poly_size != 0) { - NODELET_WARN_STREAM("A vector with " << - poly_size << - " points is not enough to create a polygon to set a Region " - "of Interest."); + if (poly_size < 3) + { + if (poly_size != 0) + { + NODELET_WARN_STREAM("A vector with " << poly_size + << " points is not enough to create a polygon to set a Region " + "of Interest."); return std::string(); } - } else { - for (size_t i; i < poly_size; ++i) { - if (in_poly[i].size() != 2) { - NODELET_WARN_STREAM("The component with index '" << i << - "' of the ROI vector " - "has not the correct size."); + } + else + { + for (size_t i; i < poly_size; ++i) + { + if (in_poly[i].size() != 2) + { + NODELET_WARN_STREAM("The component with index '" << i + << "' of the ROI vector " + "has not the correct size."); out_poly.clear(); return std::string(); - } else if (in_poly[i][0] < 0.0 || in_poly[i][1] < 0.0 || in_poly[i][0] > 1.0 || - in_poly[i][1] > 1.0) + } + else if (in_poly[i][0] < 0.0 || in_poly[i][1] < 0.0 || in_poly[i][0] > 1.0 || in_poly[i][1] > 1.0) { - NODELET_WARN_STREAM("The component with index '" << i << - "' of the ROI vector " - "is not a " - "valid normalized point: [" << - in_poly[i][0] << "," << in_poly[i][1] << - "]."); + NODELET_WARN_STREAM("The component with index '" << i + << "' of the ROI vector " + "is not a " + "valid normalized point: [" + << in_poly[i][0] << "," << in_poly[i][1] << "]."); out_poly.clear(); return std::string(); - } else { + } + else + { sl::float2 pt; pt.x = in_poly[i][0]; pt.y = in_poly[i][1]; @@ -1206,7 +1365,8 @@ std::string ZEDWrapperNodelet::parseRoiPoly( ss += "]"; } - if (i != poly_size - 1) { + if (i != poly_size - 1) + { ss += ","; } } @@ -1218,2652 +1378,3055 @@ std::string ZEDWrapperNodelet::parseRoiPoly( void ZEDWrapperNodelet::checkResolFps() { - switch (mCamResol) { + switch (mCamResol) + { case sl::RESOLUTION::HD2K: - if (mCamFrameRate != 15) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD2K. Set to 15 FPS."); - mCamFrameRate = 15; - } + if (mCamFrameRate != 15) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD2K. Set to 15 FPS."); + mCamFrameRate = 15; + } - break; + break; case sl::RESOLUTION::HD1080: - if (mZedUserCamModel == sl::MODEL::ZED_X || mZedUserCamModel == sl::MODEL::ZED_XM) + if (mZedUserCamModel == sl::MODEL::ZED_X || mZedUserCamModel == sl::MODEL::ZED_XM) + { + if (mCamFrameRate == 60 || mCamFrameRate == 30) { - if (mCamFrameRate == 60 || mCamFrameRate == 30) { - break; - } + break; + } - if (mCamFrameRate > 30 && mCamFrameRate < 60) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Set to 30 FPS."); - mCamFrameRate = 30; - } else if (mCamFrameRate > 60) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Set to 60 FPS."); - mCamFrameRate = 60; - } else { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Set to 30 FPS."); - mCamFrameRate = 30; - } + if (mCamFrameRate > 30 && mCamFrameRate < 60) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Set to 30 FPS."); + mCamFrameRate = 30; + } + else if (mCamFrameRate > 60) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Set to 60 FPS."); + mCamFrameRate = 60; } else { - if (mCamFrameRate == 15 || mCamFrameRate == 30) { - break; - } - - if (mCamFrameRate > 15 && mCamFrameRate < 30) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Set to 15 FPS."); - mCamFrameRate = 15; - } else if (mCamFrameRate > 30) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Set to 30 FPS."); - mCamFrameRate = 30; - } else { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Set to 15 FPS."); - mCamFrameRate = 15; - } + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Set to 30 FPS."); + mCamFrameRate = 30; } - - break; - - case sl::RESOLUTION::HD720: - if (mCamFrameRate == 15 || mCamFrameRate == 30 || mCamFrameRate == 60) { - break; + } + else + { + if (mCamFrameRate == 15 || mCamFrameRate == 30) + { + break; } - if (mCamFrameRate > 15 && mCamFrameRate < 30) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD720. Set to 15 FPS."); - mCamFrameRate = 15; - } else if (mCamFrameRate > 30 && mCamFrameRate < 60) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD720. Set to 30 FPS."); - mCamFrameRate = 30; - } else if (mCamFrameRate > 60) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD720. Set to 60 FPS."); - mCamFrameRate = 60; - } else { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD720. Set to 15 FPS."); - mCamFrameRate = 15; + if (mCamFrameRate > 15 && mCamFrameRate < 30) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Set to 15 FPS."); + mCamFrameRate = 15; } - - break; - - case sl::RESOLUTION::VGA: - if (mCamFrameRate == 15 || mCamFrameRate == 30 || mCamFrameRate == 60 || mCamFrameRate == 100) { - break; + else if (mCamFrameRate > 30) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Set to 30 FPS."); + mCamFrameRate = 30; } - - if (mCamFrameRate > 15 && mCamFrameRate < 30) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Set to 15 FPS."); - mCamFrameRate = 15; - } else if (mCamFrameRate > 30 && mCamFrameRate < 60) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Set to 30 FPS."); - mCamFrameRate = 30; - } else if (mCamFrameRate > 60 && mCamFrameRate < 100) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Set to 60 FPS."); - mCamFrameRate = 60; - } else if (mCamFrameRate > 100) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Set to 100 FPS."); - mCamFrameRate = 100; - } else { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Set to 15 FPS."); - mCamFrameRate = 15; + else + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Set to 15 FPS."); + mCamFrameRate = 15; } + } + + break; + case sl::RESOLUTION::HD720: + if (mCamFrameRate == 15 || mCamFrameRate == 30 || mCamFrameRate == 60) + { break; + } - case sl::RESOLUTION::HD1200: - if (mCamFrameRate == 60 || mCamFrameRate == 30) { - break; - } + if (mCamFrameRate > 15 && mCamFrameRate < 30) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD720. Set to 15 FPS."); + mCamFrameRate = 15; + } + else if (mCamFrameRate > 30 && mCamFrameRate < 60) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD720. Set to 30 FPS."); + mCamFrameRate = 30; + } + else if (mCamFrameRate > 60) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD720. Set to 60 FPS."); + mCamFrameRate = 60; + } + else + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD720. Set to 15 FPS."); + mCamFrameRate = 15; + } - if (mCamFrameRate > 30 && mCamFrameRate < 60) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1200. Set to 30 FPS."); - mCamFrameRate = 30; - } else if (mCamFrameRate > 60) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1200. Set to 60 FPS."); - mCamFrameRate = 60; - } else { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1200. Set to 30 FPS."); - mCamFrameRate = 30; - } + break; + case sl::RESOLUTION::VGA: + if (mCamFrameRate == 15 || mCamFrameRate == 30 || mCamFrameRate == 60 || mCamFrameRate == 100) + { break; + } - case sl::RESOLUTION::SVGA: - if (mCamFrameRate == 120 || mCamFrameRate == 60) { - break; - } + if (mCamFrameRate > 15 && mCamFrameRate < 30) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Set to 15 FPS."); + mCamFrameRate = 15; + } + else if (mCamFrameRate > 30 && mCamFrameRate < 60) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Set to 30 FPS."); + mCamFrameRate = 30; + } + else if (mCamFrameRate > 60 && mCamFrameRate < 100) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Set to 60 FPS."); + mCamFrameRate = 60; + } + else if (mCamFrameRate > 100) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Set to 100 FPS."); + mCamFrameRate = 100; + } + else + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Set to 15 FPS."); + mCamFrameRate = 15; + } - if (mCamFrameRate > 60 && mCamFrameRate < 120) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution SVGA. Set to 60 FPS."); - mCamFrameRate = 60; - } else if (mCamFrameRate > 120) { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution SVGA. Set to 120 FPS."); - mCamFrameRate = 120; - } else { - NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution SVGA. Set to 60 FPS."); - mCamFrameRate = 60; - } + break; + case sl::RESOLUTION::HD1200: + if (mCamFrameRate == 60 || mCamFrameRate == 30) + { break; + } - case sl::RESOLUTION::AUTO: + if (mCamFrameRate > 30 && mCamFrameRate < 60) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1200. Set to 30 FPS."); + mCamFrameRate = 30; + } + else if (mCamFrameRate > 60) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1200. Set to 60 FPS."); + mCamFrameRate = 60; + } + else + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1200. Set to 30 FPS."); + mCamFrameRate = 30; + } + + break; + + case sl::RESOLUTION::SVGA: + if (mCamFrameRate == 120 || mCamFrameRate == 60) + { break; - + } + + if (mCamFrameRate > 60 && mCamFrameRate < 120) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution SVGA. Set to 60 FPS."); + mCamFrameRate = 60; + } + else if (mCamFrameRate > 120) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution SVGA. Set to 120 FPS."); + mCamFrameRate = 120; + } + else + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution SVGA. Set to 60 FPS."); + mCamFrameRate = 60; + } + + break; + + case sl::RESOLUTION::AUTO: + break; + default: - NODELET_WARN_STREAM("Invalid resolution. Set to AUTO with default frame rate"); - mCamResol = sl::RESOLUTION::AUTO; - } + NODELET_WARN_STREAM("Invalid resolution. Set to AUTO with default frame rate"); + mCamResol = sl::RESOLUTION::AUTO; + } } void ZEDWrapperNodelet::initTransforms() { - // According to REP 105 -> http://www.ros.org/reps/rep-0105.html - - // base_link <- odom <- map - // ^ | - // | | - // ------------------- - - // ----> Dynamic transforms - mOdom2BaseTransf.setIdentity(); // broadcasted if `publish_tf` is true - mMap2OdomTransf.setIdentity(); // broadcasted if `publish_map_tf` is true - mMap2BaseTransf.setIdentity(); // used internally, but not broadcasted - mMap2CameraTransf.setIdentity(); // used internally, but not broadcasted - // <---- Dynamic transforms + // According to REP 105 -> http://www.ros.org/reps/rep-0105.html + + // base_link <- odom <- map + // ^ | + // | | + // ------------------- + + // ----> Dynamic transforms + mOdom2BaseTransf.setIdentity(); // broadcasted if `publish_tf` is true + mMap2OdomTransf.setIdentity(); // broadcasted if `publish_map_tf` is true + mMap2BaseTransf.setIdentity(); // used internally, but not broadcasted + mMap2CameraTransf.setIdentity(); // used internally, but not broadcasted + // <---- Dynamic transforms } bool ZEDWrapperNodelet::getCamera2BaseTransform() { - NODELET_DEBUG("Getting static TF from '%s' to '%s'", mCameraFrameId.c_str(), mBaseFrameId.c_str()); + NODELET_DEBUG("Getting static TF from '%s' to '%s'", mCameraFrameId.c_str(), mBaseFrameId.c_str()); - mCamera2BaseTransfValid = false; - static bool first_error = true; + mCamera2BaseTransfValid = false; + static bool first_error = true; - // ----> Static transforms - // Sensor to Base link - try { - // Save the transformation - geometry_msgs::TransformStamped c2b = mTfBuffer->lookupTransform(mCameraFrameId, mBaseFrameId, ros::Time(0), ros::Duration(0.1)); + // ----> Static transforms + // Sensor to Base link + try + { + // Save the transformation + geometry_msgs::TransformStamped c2b = + mTfBuffer->lookupTransform(mCameraFrameId, mBaseFrameId, ros::Time(0), ros::Duration(0.1)); - // Get the TF2 transformation - tf2::fromMsg(c2b.transform, mCamera2BaseTransf); + // Get the TF2 transformation + tf2::fromMsg(c2b.transform, mCamera2BaseTransf); - double roll, pitch, yaw; - tf2::Matrix3x3(mCamera2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - - NODELET_INFO("Static transform Camera Center to Base [%s -> %s]", mCameraFrameId.c_str(), mBaseFrameId.c_str()); - NODELET_INFO(" * Translation: {%.3f,%.3f,%.3f}", mCamera2BaseTransf.getOrigin().x(), - mCamera2BaseTransf.getOrigin().y(), mCamera2BaseTransf.getOrigin().z()); - NODELET_INFO(" * Rotation: {%.3f,%.3f,%.3f}", roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); - } catch (tf2::TransformException& ex) { - if (!first_error) { - NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); - NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", mCameraFrameId.c_str(), - mBaseFrameId.c_str()); - NODELET_WARN_THROTTLE(1.0, - "Note: one of the possible cause of the problem is the absense of an instance " - "of the `robot_state_publisher` node publishing the correct static TF transformations " - "or a modified URDF not correctly reproducing the ZED " - "TF chain '%s' -> '%s' -> '%s'", - mBaseFrameId.c_str(), mCameraFrameId.c_str(), mDepthFrameId.c_str()); - first_error = false; - } + double roll, pitch, yaw; + tf2::Matrix3x3(mCamera2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - mCamera2BaseTransf.setIdentity(); - return false; - } + NODELET_INFO("Static transform Camera Center to Base [%s -> %s]", mCameraFrameId.c_str(), mBaseFrameId.c_str()); + NODELET_INFO(" * Translation: {%.3f,%.3f,%.3f}", mCamera2BaseTransf.getOrigin().x(), + mCamera2BaseTransf.getOrigin().y(), mCamera2BaseTransf.getOrigin().z()); + NODELET_INFO(" * Rotation: {%.3f,%.3f,%.3f}", roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + } + catch (tf2::TransformException& ex) + { + if (!first_error) + { + NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); + NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", mCameraFrameId.c_str(), + mBaseFrameId.c_str()); + NODELET_WARN_THROTTLE(1.0, + "Note: one of the possible cause of the problem is the absense of an instance " + "of the `robot_state_publisher` node publishing the correct static TF transformations " + "or a modified URDF not correctly reproducing the ZED " + "TF chain '%s' -> '%s' -> '%s'", + mBaseFrameId.c_str(), mCameraFrameId.c_str(), mDepthFrameId.c_str()); + first_error = false; + } + + mCamera2BaseTransf.setIdentity(); + return false; + } - // <---- Static transforms - mCamera2BaseTransfValid = true; - return true; + // <---- Static transforms + mCamera2BaseTransfValid = true; + return true; } bool ZEDWrapperNodelet::getSens2CameraTransform() { - NODELET_DEBUG("Getting static TF from '%s' to '%s'", mDepthFrameId.c_str(), mCameraFrameId.c_str()); + NODELET_DEBUG("Getting static TF from '%s' to '%s'", mDepthFrameId.c_str(), mCameraFrameId.c_str()); - mSensor2CameraTransfValid = false; + mSensor2CameraTransfValid = false; - static bool first_error = true; + static bool first_error = true; - // ----> Static transforms - // Sensor to Camera Center - try { - // Save the transformation - geometry_msgs::TransformStamped s2c = mTfBuffer->lookupTransform(mDepthFrameId, mCameraFrameId, ros::Time(0), ros::Duration(0.1)); - // Get the TF2 transformation - tf2::fromMsg(s2c.transform, mSensor2CameraTransf); + // ----> Static transforms + // Sensor to Camera Center + try + { + // Save the transformation + geometry_msgs::TransformStamped s2c = + mTfBuffer->lookupTransform(mDepthFrameId, mCameraFrameId, ros::Time(0), ros::Duration(0.1)); + // Get the TF2 transformation + tf2::fromMsg(s2c.transform, mSensor2CameraTransf); - double roll, pitch, yaw; - tf2::Matrix3x3(mSensor2CameraTransf.getRotation()).getRPY(roll, pitch, yaw); - - NODELET_INFO("Static transform Sensor to Camera Center [%s -> %s]", mDepthFrameId.c_str(), mCameraFrameId.c_str()); - NODELET_INFO(" * Translation: {%.3f,%.3f,%.3f}", mSensor2CameraTransf.getOrigin().x(), - mSensor2CameraTransf.getOrigin().y(), mSensor2CameraTransf.getOrigin().z()); - NODELET_INFO(" * Rotation: {%.3f,%.3f,%.3f}", roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); - } catch (tf2::TransformException& ex) { - if (!first_error) { - NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); - NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", mDepthFrameId.c_str(), - mCameraFrameId.c_str()); - NODELET_WARN_THROTTLE(1.0, - "Note: one of the possible cause of the problem is the absense of an instance " - "of the `robot_state_publisher` node publishing the correct static TF transformations " - "or a modified URDF not correctly reproducing the ZED " - "TF chain '%s' -> '%s' -> '%s'", - mBaseFrameId.c_str(), mCameraFrameId.c_str(), mDepthFrameId.c_str()); - first_error = false; - } + double roll, pitch, yaw; + tf2::Matrix3x3(mSensor2CameraTransf.getRotation()).getRPY(roll, pitch, yaw); - mSensor2CameraTransf.setIdentity(); - return false; - } + NODELET_INFO("Static transform Sensor to Camera Center [%s -> %s]", mDepthFrameId.c_str(), mCameraFrameId.c_str()); + NODELET_INFO(" * Translation: {%.3f,%.3f,%.3f}", mSensor2CameraTransf.getOrigin().x(), + mSensor2CameraTransf.getOrigin().y(), mSensor2CameraTransf.getOrigin().z()); + NODELET_INFO(" * Rotation: {%.3f,%.3f,%.3f}", roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + } + catch (tf2::TransformException& ex) + { + if (!first_error) + { + NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); + NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", mDepthFrameId.c_str(), + mCameraFrameId.c_str()); + NODELET_WARN_THROTTLE(1.0, + "Note: one of the possible cause of the problem is the absense of an instance " + "of the `robot_state_publisher` node publishing the correct static TF transformations " + "or a modified URDF not correctly reproducing the ZED " + "TF chain '%s' -> '%s' -> '%s'", + mBaseFrameId.c_str(), mCameraFrameId.c_str(), mDepthFrameId.c_str()); + first_error = false; + } + + mSensor2CameraTransf.setIdentity(); + return false; + } - // <---- Static transforms + // <---- Static transforms - mSensor2CameraTransfValid = true; - return true; + mSensor2CameraTransfValid = true; + return true; } bool ZEDWrapperNodelet::getSens2BaseTransform() { - NODELET_DEBUG("Getting static TF from '%s' to '%s'", mDepthFrameId.c_str(), mBaseFrameId.c_str()); - - mSensor2BaseTransfValid = false; - static bool first_error = true; - - // ----> Static transforms - // Sensor to Base link - try { - // Save the transformation - geometry_msgs::TransformStamped s2b = mTfBuffer->lookupTransform(mDepthFrameId, mBaseFrameId, ros::Time(0), ros::Duration(0.1)); - // Get the TF2 transformation - tf2::fromMsg(s2b.transform, mSensor2BaseTransf); - - double roll, pitch, yaw; - tf2::Matrix3x3(mSensor2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - - NODELET_INFO("Static transform Sensor to Base [%s -> %s]", mDepthFrameId.c_str(), mBaseFrameId.c_str()); - NODELET_INFO(" * Translation: {%.3f,%.3f,%.3f}", mSensor2BaseTransf.getOrigin().x(), - mSensor2BaseTransf.getOrigin().y(), mSensor2BaseTransf.getOrigin().z()); - NODELET_INFO(" * Rotation: {%.3f,%.3f,%.3f}", roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); - } catch (tf2::TransformException& ex) { - if (!first_error) { - NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); - NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", mDepthFrameId.c_str(), - mBaseFrameId.c_str()); - NODELET_WARN_THROTTLE(1.0, - "Note: one of the possible cause of the problem is the absense of an instance " - "of the `robot_state_publisher` node publishing the correct static TF transformations " - "or a modified URDF not correctly reproducing the ZED " - "TF chain '%s' -> '%s' -> '%s'", - mBaseFrameId.c_str(), mCameraFrameId.c_str(), mDepthFrameId.c_str()); - first_error = false; - } - - mSensor2BaseTransf.setIdentity(); - return false; - } + NODELET_DEBUG("Getting static TF from '%s' to '%s'", mDepthFrameId.c_str(), mBaseFrameId.c_str()); + + mSensor2BaseTransfValid = false; + static bool first_error = true; + + // ----> Static transforms + // Sensor to Base link + try + { + // Save the transformation + geometry_msgs::TransformStamped s2b = + mTfBuffer->lookupTransform(mDepthFrameId, mBaseFrameId, ros::Time(0), ros::Duration(0.1)); + // Get the TF2 transformation + tf2::fromMsg(s2b.transform, mSensor2BaseTransf); + + double roll, pitch, yaw; + tf2::Matrix3x3(mSensor2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + + NODELET_INFO("Static transform Sensor to Base [%s -> %s]", mDepthFrameId.c_str(), mBaseFrameId.c_str()); + NODELET_INFO(" * Translation: {%.3f,%.3f,%.3f}", mSensor2BaseTransf.getOrigin().x(), + mSensor2BaseTransf.getOrigin().y(), mSensor2BaseTransf.getOrigin().z()); + NODELET_INFO(" * Rotation: {%.3f,%.3f,%.3f}", roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + } + catch (tf2::TransformException& ex) + { + if (!first_error) + { + NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); + NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", mDepthFrameId.c_str(), + mBaseFrameId.c_str()); + NODELET_WARN_THROTTLE(1.0, + "Note: one of the possible cause of the problem is the absense of an instance " + "of the `robot_state_publisher` node publishing the correct static TF transformations " + "or a modified URDF not correctly reproducing the ZED " + "TF chain '%s' -> '%s' -> '%s'", + mBaseFrameId.c_str(), mCameraFrameId.c_str(), mDepthFrameId.c_str()); + first_error = false; + } + + mSensor2BaseTransf.setIdentity(); + return false; + } - // <---- Static transforms + // <---- Static transforms - mSensor2BaseTransfValid = true; - return true; + mSensor2BaseTransfValid = true; + return true; } bool ZEDWrapperNodelet::set_pose(float xt, float yt, float zt, float rr, float pr, float yr) { - initTransforms(); + initTransforms(); - if (!mSensor2BaseTransfValid) { - getSens2BaseTransform(); - } + if (!mSensor2BaseTransfValid) + { + getSens2BaseTransform(); + } - if (!mSensor2CameraTransfValid) { - getSens2CameraTransform(); - } + if (!mSensor2CameraTransfValid) + { + getSens2CameraTransform(); + } - if (!mCamera2BaseTransfValid) { - getCamera2BaseTransform(); - } + if (!mCamera2BaseTransfValid) + { + getCamera2BaseTransform(); + } - // Apply Base to sensor transform - tf2::Transform initPose; - tf2::Vector3 origin(xt, yt, zt); - initPose.setOrigin(origin); - tf2::Quaternion quat; - quat.setRPY(rr, pr, yr); - initPose.setRotation(quat); - - initPose = initPose * mSensor2BaseTransf.inverse(); - - // SL pose - sl::float3 t_vec; - t_vec[0] = initPose.getOrigin().x(); - t_vec[1] = initPose.getOrigin().y(); - t_vec[2] = initPose.getOrigin().z(); - - sl::float4 q_vec; - q_vec[0] = initPose.getRotation().x(); - q_vec[1] = initPose.getRotation().y(); - q_vec[2] = initPose.getRotation().z(); - q_vec[3] = initPose.getRotation().w(); - - sl::Translation trasl(t_vec); - sl::Orientation orient(q_vec); - mInitialPoseSl.setTranslation(trasl); - mInitialPoseSl.setOrientation(orient); - - return (mSensor2BaseTransfValid & mSensor2CameraTransfValid & mCamera2BaseTransfValid); + // Apply Base to sensor transform + tf2::Transform initPose; + tf2::Vector3 origin(xt, yt, zt); + initPose.setOrigin(origin); + tf2::Quaternion quat; + quat.setRPY(rr, pr, yr); + initPose.setRotation(quat); + + initPose = initPose * mSensor2BaseTransf.inverse(); + + // SL pose + sl::float3 t_vec; + t_vec[0] = initPose.getOrigin().x(); + t_vec[1] = initPose.getOrigin().y(); + t_vec[2] = initPose.getOrigin().z(); + + sl::float4 q_vec; + q_vec[0] = initPose.getRotation().x(); + q_vec[1] = initPose.getRotation().y(); + q_vec[2] = initPose.getRotation().z(); + q_vec[3] = initPose.getRotation().w(); + + sl::Translation trasl(t_vec); + sl::Orientation orient(q_vec); + mInitialPoseSl.setTranslation(trasl); + mInitialPoseSl.setOrientation(orient); + + return (mSensor2BaseTransfValid & mSensor2CameraTransfValid & mCamera2BaseTransfValid); } bool ZEDWrapperNodelet::on_set_pose(zed_interfaces::set_pose::Request& req, zed_interfaces::set_pose::Response& res) { - mInitialBasePose.resize(6); - mInitialBasePose[0] = req.x; - mInitialBasePose[1] = req.y; - mInitialBasePose[2] = req.z; - mInitialBasePose[3] = req.R; - mInitialBasePose[4] = req.P; - mInitialBasePose[5] = req.Y; + mInitialBasePose.resize(6); + mInitialBasePose[0] = req.x; + mInitialBasePose[1] = req.y; + mInitialBasePose[2] = req.z; + mInitialBasePose[3] = req.R; + mInitialBasePose[4] = req.P; + mInitialBasePose[5] = req.Y; - std::lock_guard lock(mPosTrkMutex); + std::lock_guard lock(mPosTrkMutex); - // Restart tracking - start_pos_tracking(); + // Restart tracking + start_pos_tracking(); - res.done = true; - return true; + res.done = true; + return true; } bool ZEDWrapperNodelet::on_reset_tracking(zed_interfaces::reset_tracking::Request& req, - zed_interfaces::reset_tracking::Response& res) + zed_interfaces::reset_tracking::Response& res) { - if (!mPosTrackingActivated) { - res.reset_done = false; - return false; - } + if (!mPosTrackingActivated) + { + res.reset_done = false; + return false; + } - std::lock_guard lock(mPosTrkMutex); + std::lock_guard lock(mPosTrkMutex); - // Restart tracking - start_pos_tracking(); + // Restart tracking + start_pos_tracking(); - res.reset_done = true; - return true; + res.reset_done = true; + return true; } bool ZEDWrapperNodelet::on_reset_odometry(zed_interfaces::reset_odometry::Request& req, - zed_interfaces::reset_odometry::Response& res) + zed_interfaces::reset_odometry::Response& res) { - mResetOdom = true; - res.reset_done = true; - return true; + mResetOdom = true; + res.reset_done = true; + return true; } bool ZEDWrapperNodelet::start_3d_mapping() { - if (!mMappingEnabled) { - return false; - } + if (!mMappingEnabled) + { + return false; + } - NODELET_INFO_STREAM("*** Starting Spatial Mapping ***"); + NODELET_INFO_STREAM("*** Starting Spatial Mapping ***"); - sl::SpatialMappingParameters params; - params.map_type = sl::SpatialMappingParameters::SPATIAL_MAP_TYPE::FUSED_POINT_CLOUD; - params.use_chunk_only = true; + sl::SpatialMappingParameters params; + params.map_type = sl::SpatialMappingParameters::SPATIAL_MAP_TYPE::FUSED_POINT_CLOUD; + params.use_chunk_only = true; - sl::SpatialMappingParameters spMapPar; + sl::SpatialMappingParameters spMapPar; - float lRes = spMapPar.allowed_resolution.first; - float hRes = spMapPar.allowed_resolution.second; + float lRes = spMapPar.allowed_resolution.first; + float hRes = spMapPar.allowed_resolution.second; - if (mMappingRes < lRes) { - NODELET_WARN_STREAM("'mapping/resolution' value (" - << mMappingRes << " m) is lower than the allowed resolution values. Fixed automatically"); - mMappingRes = lRes; - } - if (mMappingRes > hRes) { - NODELET_WARN_STREAM("'mapping/resolution' value (" - << mMappingRes << " m) is higher than the allowed resolution values. Fixed automatically"); - mMappingRes = hRes; - } + if (mMappingRes < lRes) + { + NODELET_WARN_STREAM("'mapping/resolution' value (" + << mMappingRes << " m) is lower than the allowed resolution values. Fixed automatically"); + mMappingRes = lRes; + } + if (mMappingRes > hRes) + { + NODELET_WARN_STREAM("'mapping/resolution' value (" + << mMappingRes << " m) is higher than the allowed resolution values. Fixed automatically"); + mMappingRes = hRes; + } - params.resolution_meter = mMappingRes; - - float lRng = spMapPar.allowed_range.first; - float hRng = spMapPar.allowed_range.second; - - if (mMaxMappingRange < 0) { - mMaxMappingRange = sl::SpatialMappingParameters::getRecommendedRange(mMappingRes, mZed); - NODELET_INFO_STREAM("Mapping: max range set to " << mMaxMappingRange << " m for a resolution of " << mMappingRes - << " m"); - } else if (mMaxMappingRange < lRng) { - NODELET_WARN_STREAM("'mapping/max_mapping_range_m' value (" - << mMaxMappingRange << " m) is lower than the allowed resolution values. Fixed automatically"); - mMaxMappingRange = lRng; - } else if (mMaxMappingRange > hRng) { - NODELET_WARN_STREAM("'mapping/max_mapping_range_m' value (" - << mMaxMappingRange << " m) is higher than the allowed resolution values. Fixed automatically"); - mMaxMappingRange = hRng; - } + params.resolution_meter = mMappingRes; + + float lRng = spMapPar.allowed_range.first; + float hRng = spMapPar.allowed_range.second; - params.range_meter = mMaxMappingRange; + if (mMaxMappingRange < 0) + { + mMaxMappingRange = sl::SpatialMappingParameters::getRecommendedRange(mMappingRes, mZed); + NODELET_INFO_STREAM("Mapping: max range set to " << mMaxMappingRange << " m for a resolution of " << mMappingRes + << " m"); + } + else if (mMaxMappingRange < lRng) + { + NODELET_WARN_STREAM("'mapping/max_mapping_range_m' value (" + << mMaxMappingRange << " m) is lower than the allowed resolution values. Fixed automatically"); + mMaxMappingRange = lRng; + } + else if (mMaxMappingRange > hRng) + { + NODELET_WARN_STREAM("'mapping/max_mapping_range_m' value (" + << mMaxMappingRange << " m) is higher than the allowed resolution values. Fixed automatically"); + mMaxMappingRange = hRng; + } - sl::ERROR_CODE err = mZed.enableSpatialMapping(params); + params.range_meter = mMaxMappingRange; - if (err == sl::ERROR_CODE::SUCCESS) { - if (mPubFusedCloud.getTopic().empty()) { - std::string pointcloud_fused_topic = "mapping/fused_cloud"; - mPubFusedCloud = mNhNs.advertise(pointcloud_fused_topic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); - } + sl::ERROR_CODE err = mZed.enableSpatialMapping(params); - mMappingRunning = true; + if (err == sl::ERROR_CODE::SUCCESS) + { + if (mPubFusedCloud.getTopic().empty()) + { + std::string pointcloud_fused_topic = "mapping/fused_cloud"; + mPubFusedCloud = mNhNs.advertise(pointcloud_fused_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); + } - mFusedPcTimer = mNhNs.createTimer(ros::Duration(1.0 / mFusedPcPubFreq), &ZEDWrapperNodelet::callback_pubFusedPointCloud, this); + mMappingRunning = true; - NODELET_INFO_STREAM(" * Resolution: " << params.resolution_meter << " m"); - NODELET_INFO_STREAM(" * Max Mapping Range: " << params.range_meter << " m"); - NODELET_INFO_STREAM(" * Map point cloud publishing rate: " << mFusedPcPubFreq << " Hz"); + mFusedPcTimer = + mNhNs.createTimer(ros::Duration(1.0 / mFusedPcPubFreq), &ZEDWrapperNodelet::callback_pubFusedPointCloud, this); - return true; - } else { - mMappingRunning = false; - mFusedPcTimer.stop(); + NODELET_INFO_STREAM(" * Resolution: " << params.resolution_meter << " m"); + NODELET_INFO_STREAM(" * Max Mapping Range: " << params.range_meter << " m"); + NODELET_INFO_STREAM(" * Map point cloud publishing rate: " << mFusedPcPubFreq << " Hz"); + + return true; + } + else + { + mMappingRunning = false; + mFusedPcTimer.stop(); - NODELET_WARN("Mapping not activated: %s", sl::toString(err).c_str()); + NODELET_WARN("Mapping not activated: %s", sl::toString(err).c_str()); - return false; - } + return false; + } } void ZEDWrapperNodelet::stop_3d_mapping() { - mFusedPcTimer.stop(); - mMappingRunning = false; - mMappingEnabled = false; - mZed.disableSpatialMapping(); + mFusedPcTimer.stop(); + mMappingRunning = false; + mMappingEnabled = false; + mZed.disableSpatialMapping(); - NODELET_INFO("*** Spatial Mapping stopped ***"); + NODELET_INFO("*** Spatial Mapping stopped ***"); } bool ZEDWrapperNodelet::start_obj_detect() { - if (mZedRealCamModel == sl::MODEL::ZED) { - NODELET_ERROR_STREAM("Object detection not started. OD is not available for ZED camera model"); - return false; - } + if (mZedRealCamModel == sl::MODEL::ZED) + { + NODELET_ERROR_STREAM("Object detection not started. OD is not available for ZED camera model"); + return false; + } - if (!mObjDetEnabled) { - return false; - } + if (!mObjDetEnabled) + { + return false; + } - if (!mCamera2BaseTransfValid || !mSensor2CameraTransfValid || !mSensor2BaseTransfValid) { - NODELET_DEBUG("Tracking transforms not yet ready, OD starting postponed"); - return false; - } + if (!mCamera2BaseTransfValid || !mSensor2CameraTransfValid || !mSensor2BaseTransfValid) + { + NODELET_DEBUG("Tracking transforms not yet ready, OD starting postponed"); + return false; + } - NODELET_INFO_STREAM("*** Starting Object Detection ***"); + NODELET_INFO_STREAM("*** Starting Object Detection ***"); - sl::ObjectDetectionParameters od_p; - od_p.allow_reduced_precision_inference = false; - od_p.enable_segmentation = false; - od_p.enable_tracking = mObjDetTracking; - od_p.image_sync = false; // Asynchronous object detection - od_p.detection_model = mObjDetModel; - od_p.max_range = mObjDetMaxRange; - od_p.instance_module_id = 0; + sl::ObjectDetectionParameters od_p; + od_p.allow_reduced_precision_inference = false; + od_p.enable_segmentation = false; + od_p.enable_tracking = mObjDetTracking; + od_p.image_sync = false; // Asynchronous object detection + od_p.detection_model = mObjDetModel; + od_p.max_range = mObjDetMaxRange; + od_p.instance_module_id = 0; - sl::ERROR_CODE objDetError = mZed.enableObjectDetection(od_p); + sl::ERROR_CODE objDetError = mZed.enableObjectDetection(od_p); - if (objDetError != sl::ERROR_CODE::SUCCESS) { - NODELET_ERROR_STREAM("Object detection error: " << sl::toString(objDetError)); + if (objDetError != sl::ERROR_CODE::SUCCESS) + { + NODELET_ERROR_STREAM("Object detection error: " << sl::toString(objDetError)); - mObjDetRunning = false; - return false; - } + mObjDetRunning = false; + return false; + } - if (mPubObjDet.getTopic().empty()) { - std::string object_det_topic_root = "obj_det"; - std::string object_det_topic = object_det_topic_root + "/objects"; + if (mPubObjDet.getTopic().empty()) + { + std::string object_det_topic_root = "obj_det"; + std::string object_det_topic = object_det_topic_root + "/objects"; - mPubObjDet = mNhNs.advertise(object_det_topic, 1); - NODELET_INFO_STREAM(" * Advertised on topic " << mPubObjDet.getTopic()); - } + mPubObjDet = mNhNs.advertise(object_det_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubObjDet.getTopic()); + } - mObjDetFilter.clear(); + mObjDetFilter.clear(); - if (mObjDetModel == sl::OBJECT_DETECTION_MODEL::MULTI_CLASS_BOX_ACCURATE || - mObjDetModel == sl::OBJECT_DETECTION_MODEL::MULTI_CLASS_BOX_MEDIUM || - mObjDetModel == sl::OBJECT_DETECTION_MODEL::MULTI_CLASS_BOX_FAST) { - if (mObjDetPeopleEnable) { - mObjDetFilter.push_back(sl::OBJECT_CLASS::PERSON); - } - if (mObjDetVehiclesEnable) { - mObjDetFilter.push_back(sl::OBJECT_CLASS::VEHICLE); - } - if (mObjDetBagsEnable) { - mObjDetFilter.push_back(sl::OBJECT_CLASS::BAG); - } - if (mObjDetAnimalsEnable) { - mObjDetFilter.push_back(sl::OBJECT_CLASS::ANIMAL); - } - if (mObjDetElectronicsEnable) { - mObjDetFilter.push_back(sl::OBJECT_CLASS::ELECTRONICS); - } - if (mObjDetFruitsEnable) { - mObjDetFilter.push_back(sl::OBJECT_CLASS::FRUIT_VEGETABLE); - } - if (mObjDetSportsEnable) { - mObjDetFilter.push_back(sl::OBJECT_CLASS::SPORT); - } + if (mObjDetModel == sl::OBJECT_DETECTION_MODEL::MULTI_CLASS_BOX_ACCURATE || + mObjDetModel == sl::OBJECT_DETECTION_MODEL::MULTI_CLASS_BOX_MEDIUM || + mObjDetModel == sl::OBJECT_DETECTION_MODEL::MULTI_CLASS_BOX_FAST) + { + if (mObjDetPeopleEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::PERSON); + } + if (mObjDetVehiclesEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::VEHICLE); + } + if (mObjDetBagsEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::BAG); + } + if (mObjDetAnimalsEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::ANIMAL); + } + if (mObjDetElectronicsEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::ELECTRONICS); + } + if (mObjDetFruitsEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::FRUIT_VEGETABLE); + } + if (mObjDetSportsEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::SPORT); } + } - mObjDetRunning = true; - return false; + mObjDetRunning = true; + return false; } void ZEDWrapperNodelet::stop_obj_detect() { - if (mObjDetRunning) { - NODELET_INFO_STREAM("*** Stopping Object Detection ***"); - mObjDetRunning = false; - mObjDetEnabled = false; - mZed.disableObjectDetection(); - } + if (mObjDetRunning) + { + NODELET_INFO_STREAM("*** Stopping Object Detection ***"); + mObjDetRunning = false; + mObjDetEnabled = false; + mZed.disableObjectDetection(); + } } void ZEDWrapperNodelet::start_pos_tracking() { - NODELET_INFO_STREAM("*** Starting Positional Tracking ***"); + NODELET_INFO_STREAM("*** Starting Positional Tracking ***"); - NODELET_INFO(" * Waiting for valid static transformations..."); + NODELET_INFO(" * Waiting for valid static transformations..."); - mPosTrackingReady = false; // Useful to not publish wrong TF with IMU frame broadcasting - bool transformOk = false; - double elapsed = 0.0; + mPosTrackingReady = false; // Useful to not publish wrong TF with IMU frame broadcasting + bool transformOk = false; + double elapsed = 0.0; - auto start = std::chrono::high_resolution_clock::now(); + auto start = std::chrono::high_resolution_clock::now(); - do { - transformOk = set_pose(mInitialBasePose[0], mInitialBasePose[1], mInitialBasePose[2], mInitialBasePose[3], - mInitialBasePose[4], mInitialBasePose[5]); + do + { + transformOk = set_pose(mInitialBasePose[0], mInitialBasePose[1], mInitialBasePose[2], mInitialBasePose[3], + mInitialBasePose[4], mInitialBasePose[5]); - elapsed = std::chrono::duration_cast(std::chrono::high_resolution_clock::now() - start) - .count(); + elapsed = std::chrono::duration_cast(std::chrono::high_resolution_clock::now() - start) + .count(); - std::this_thread::sleep_for(std::chrono::milliseconds(100)); + std::this_thread::sleep_for(std::chrono::milliseconds(100)); - if (elapsed > 10000) { - NODELET_WARN(" !!! Failed to get static transforms. Is the 'ROBOT STATE PUBLISHER' node correctly working? "); - break; - } + if (elapsed > 10000) + { + NODELET_WARN(" !!! Failed to get static transforms. Is the 'ROBOT STATE PUBLISHER' node correctly working? "); + break; + } - } while (transformOk == false); + } while (transformOk == false); - if (transformOk) { - NODELET_DEBUG("Time required to get valid static transforms: %g sec", elapsed / 1000.); - } + if (transformOk) + { + NODELET_DEBUG("Time required to get valid static transforms: %g sec", elapsed / 1000.); + } - NODELET_INFO("Initial ZED left camera pose (ZED pos. tracking): "); - NODELET_INFO(" * T: [%g,%g,%g]", mInitialPoseSl.getTranslation().x, mInitialPoseSl.getTranslation().y, - mInitialPoseSl.getTranslation().z); - NODELET_INFO(" * Q: [%g,%g,%g,%g]", mInitialPoseSl.getOrientation().ox, mInitialPoseSl.getOrientation().oy, - mInitialPoseSl.getOrientation().oz, mInitialPoseSl.getOrientation().ow); + NODELET_INFO("Initial ZED left camera pose (ZED pos. tracking): "); + NODELET_INFO(" * T: [%g,%g,%g]", mInitialPoseSl.getTranslation().x, mInitialPoseSl.getTranslation().y, + mInitialPoseSl.getTranslation().z); + NODELET_INFO(" * Q: [%g,%g,%g,%g]", mInitialPoseSl.getOrientation().ox, mInitialPoseSl.getOrientation().oy, + mInitialPoseSl.getOrientation().oz, mInitialPoseSl.getOrientation().ow); - // Positional Tracking parameters - sl::PositionalTrackingParameters posTrackParams; + // Positional Tracking parameters + sl::PositionalTrackingParameters posTrackParams; - posTrackParams.initial_world_transform = mInitialPoseSl; - posTrackParams.enable_area_memory = mAreaMemory; + posTrackParams.initial_world_transform = mInitialPoseSl; + posTrackParams.enable_area_memory = mAreaMemory; - mPoseSmoothing = false; // Always false. Pose Smoothing is to be enabled only for VR/AR applications - posTrackParams.enable_pose_smoothing = mPoseSmoothing; + mPoseSmoothing = false; // Always false. Pose Smoothing is to be enabled only for VR/AR applications + posTrackParams.enable_pose_smoothing = mPoseSmoothing; - posTrackParams.set_floor_as_origin = mFloorAlignment; + posTrackParams.set_floor_as_origin = mFloorAlignment; - if (mAreaMemDbPath != "" && !sl_tools::file_exist(mAreaMemDbPath)) { - posTrackParams.area_file_path = ""; - NODELET_WARN_STREAM("area_memory_db_path [" << mAreaMemDbPath << "] doesn't exist or is unreachable. "); - if (mSaveAreaMapOnClosing) { - NODELET_INFO_STREAM("The file will be automatically created when closing the node or calling the " - "'save_area_map' service if a valid Area Memory is available."); - } - } else { - posTrackParams.area_file_path = mAreaMemDbPath.c_str(); + if (mAreaMemDbPath != "" && !sl_tools::file_exist(mAreaMemDbPath)) + { + posTrackParams.area_file_path = ""; + NODELET_WARN_STREAM("area_memory_db_path [" << mAreaMemDbPath << "] doesn't exist or is unreachable. "); + if (mSaveAreaMapOnClosing) + { + NODELET_INFO_STREAM( + "The file will be automatically created when closing the node or calling the " + "'save_area_map' service if a valid Area Memory is available."); } + } + else + { + posTrackParams.area_file_path = mAreaMemDbPath.c_str(); + } - posTrackParams.enable_imu_fusion = mImuFusion; + posTrackParams.enable_imu_fusion = mImuFusion; - posTrackParams.set_as_static = false; - posTrackParams.set_gravity_as_origin = mSetGravityAsOrigin; - posTrackParams.mode = mPosTrkMode; + posTrackParams.set_as_static = false; + posTrackParams.set_gravity_as_origin = mSetGravityAsOrigin; + posTrackParams.mode = mPosTrkMode; - sl::ERROR_CODE err = mZed.enablePositionalTracking(posTrackParams); + sl::ERROR_CODE err = mZed.enablePositionalTracking(posTrackParams); - if (err == sl::ERROR_CODE::SUCCESS) { - mPosTrackingActivated = true; - } else { - mPosTrackingActivated = false; + if (err == sl::ERROR_CODE::SUCCESS) + { + mPosTrackingActivated = true; + } + else + { + mPosTrackingActivated = false; - NODELET_WARN("Tracking not activated: %s", sl::toString(err).c_str()); - } + NODELET_WARN("Tracking not activated: %s", sl::toString(err).c_str()); + } } bool ZEDWrapperNodelet::on_save_area_memory(zed_interfaces::save_area_memory::Request& req, - zed_interfaces::save_area_memory::Response& res) + zed_interfaces::save_area_memory::Response& res) { - std::string file_path = sl_tools::resolveFilePath(req.area_memory_filename); + std::string file_path = sl_tools::resolveFilePath(req.area_memory_filename); - bool ret = saveAreaMap(file_path, &res.info); + bool ret = saveAreaMap(file_path, &res.info); - return ret; + return ret; } bool ZEDWrapperNodelet::saveAreaMap(std::string file_path, std::string* out_msg) { - std::ostringstream os; - - bool node_running = mNhNs.ok(); - if (!mZed.isOpened()) { - os << "Cannot save Area Memory. The camera is closed."; + std::ostringstream os; - if (node_running) - NODELET_WARN_STREAM(os.str().c_str()); - else - std::cerr << os.str() << std::endl; + bool node_running = mNhNs.ok(); + if (!mZed.isOpened()) + { + os << "Cannot save Area Memory. The camera is closed."; - if (out_msg) - *out_msg = os.str(); + if (node_running) + NODELET_WARN_STREAM(os.str().c_str()); + else + std::cerr << os.str() << std::endl; - return false; - } + if (out_msg) + *out_msg = os.str(); - if (mPosTrackingActivated && mAreaMemory) { - sl::ERROR_CODE err = mZed.saveAreaMap(sl::String(file_path.c_str())); - if (err != sl::ERROR_CODE::SUCCESS) { - os << "Error saving positional tracking area memory: " << sl::toString(err).c_str(); + return false; + } - if (node_running) - NODELET_WARN_STREAM(os.str().c_str()); - else - std::cerr << os.str() << std::endl; + if (mPosTrackingActivated && mAreaMemory) + { + sl::ERROR_CODE err = mZed.saveAreaMap(sl::String(file_path.c_str())); + if (err != sl::ERROR_CODE::SUCCESS) + { + os << "Error saving positional tracking area memory: " << sl::toString(err).c_str(); - if (out_msg) - *out_msg = os.str(); + if (node_running) + NODELET_WARN_STREAM(os.str().c_str()); + else + std::cerr << os.str() << std::endl; - return false; - } + if (out_msg) + *out_msg = os.str(); - if (node_running) - NODELET_INFO_STREAM("Saving Area Memory file: " << file_path); - else - std::cerr << "Saving Area Memory file: " << file_path << " "; - - sl::AREA_EXPORTING_STATE state; - do { - std::this_thread::sleep_for(std::chrono::milliseconds(500)); - state = mZed.getAreaExportState(); - if (node_running) - NODELET_INFO_STREAM("."); - else - std::cerr << "."; - } while (state == sl::AREA_EXPORTING_STATE::RUNNING); - if (!node_running) - std::cerr << std::endl; - - if (state == sl::AREA_EXPORTING_STATE::SUCCESS) { - os << "Area Memory file saved correctly."; - - if (node_running) - NODELET_INFO_STREAM(os.str().c_str()); - else - std::cerr << os.str() << std::endl; - - if (out_msg) - *out_msg = os.str(); - return true; - } + return false; + } - os << "Error saving Area Memory file: " << sl::toString(state).c_str(); + if (node_running) + NODELET_INFO_STREAM("Saving Area Memory file: " << file_path); + else + std::cerr << "Saving Area Memory file: " << file_path << " "; - if (node_running) - NODELET_WARN_STREAM(os.str().c_str()); - else - std::cerr << os.str() << std::endl; + sl::AREA_EXPORTING_STATE state; + do + { + std::this_thread::sleep_for(std::chrono::milliseconds(500)); + state = mZed.getAreaExportState(); + if (node_running) + NODELET_INFO_STREAM("."); + else + std::cerr << "."; + } while (state == sl::AREA_EXPORTING_STATE::RUNNING); + if (!node_running) + std::cerr << std::endl; + + if (state == sl::AREA_EXPORTING_STATE::SUCCESS) + { + os << "Area Memory file saved correctly."; - if (out_msg) - *out_msg = os.str(); + if (node_running) + NODELET_INFO_STREAM(os.str().c_str()); + else + std::cerr << os.str() << std::endl; - return false; + if (out_msg) + *out_msg = os.str(); + return true; } + + os << "Error saving Area Memory file: " << sl::toString(state).c_str(); + + if (node_running) + NODELET_WARN_STREAM(os.str().c_str()); + else + std::cerr << os.str() << std::endl; + + if (out_msg) + *out_msg = os.str(); + return false; + } + return false; } void ZEDWrapperNodelet::publishOdom(tf2::Transform odom2baseTransf, sl::Pose& slPose, ros::Time t) { - nav_msgs::OdometryPtr odomMsg = boost::make_shared(); - - odomMsg->header.stamp = t; - odomMsg->header.frame_id = mOdometryFrameId; // frame - odomMsg->child_frame_id = mBaseFrameId; // camera_frame - // conversion from Tranform to message - geometry_msgs::Transform base2odom = tf2::toMsg(odom2baseTransf); - // Add all value in odometry message - odomMsg->pose.pose.position.x = base2odom.translation.x; - odomMsg->pose.pose.position.y = base2odom.translation.y; - odomMsg->pose.pose.position.z = base2odom.translation.z; - odomMsg->pose.pose.orientation.x = base2odom.rotation.x; - odomMsg->pose.pose.orientation.y = base2odom.rotation.y; - odomMsg->pose.pose.orientation.z = base2odom.rotation.z; - odomMsg->pose.pose.orientation.w = base2odom.rotation.w; - - // Odometry pose covariance - - for (size_t i = 0; i < odomMsg->pose.covariance.size(); i++) { - odomMsg->pose.covariance[i] = static_cast(slPose.pose_covariance[i]); - - if (mTwoDMode) { - if (i == 14 || i == 21 || i == 28) { - odomMsg->pose.covariance[i] = 1e-9; // Very low covariance if 2D mode - } else if ((i >= 2 && i <= 4) || (i >= 8 && i <= 10) || (i >= 12 && i <= 13) || (i >= 15 && i <= 20) || (i >= 22 && i <= 27) || (i == 29) || (i >= 32 && i <= 34)) { - odomMsg->pose.covariance[i] = 0.0; - } - } + nav_msgs::OdometryPtr odomMsg = boost::make_shared(); + + odomMsg->header.stamp = t; + odomMsg->header.frame_id = mOdometryFrameId; // frame + odomMsg->child_frame_id = mBaseFrameId; // camera_frame + // conversion from Tranform to message + geometry_msgs::Transform base2odom = tf2::toMsg(odom2baseTransf); + // Add all value in odometry message + odomMsg->pose.pose.position.x = base2odom.translation.x; + odomMsg->pose.pose.position.y = base2odom.translation.y; + odomMsg->pose.pose.position.z = base2odom.translation.z; + odomMsg->pose.pose.orientation.x = base2odom.rotation.x; + odomMsg->pose.pose.orientation.y = base2odom.rotation.y; + odomMsg->pose.pose.orientation.z = base2odom.rotation.z; + odomMsg->pose.pose.orientation.w = base2odom.rotation.w; + + // Odometry pose covariance + + for (size_t i = 0; i < odomMsg->pose.covariance.size(); i++) + { + odomMsg->pose.covariance[i] = static_cast(slPose.pose_covariance[i]); + + if (mTwoDMode) + { + if (i == 14 || i == 21 || i == 28) + { + odomMsg->pose.covariance[i] = 1e-9; // Very low covariance if 2D mode + } + else if ((i >= 2 && i <= 4) || (i >= 8 && i <= 10) || (i >= 12 && i <= 13) || (i >= 15 && i <= 20) || + (i >= 22 && i <= 27) || (i == 29) || (i >= 32 && i <= 34)) + { + odomMsg->pose.covariance[i] = 0.0; + } } + } - // Publish odometry message - mPubOdom.publish(odomMsg); + // Publish odometry message + mPubOdom.publish(odomMsg); } void ZEDWrapperNodelet::publishPose(ros::Time t) { - tf2::Transform base_pose; - base_pose.setIdentity(); + tf2::Transform base_pose; + base_pose.setIdentity(); - if (mPublishMapTf) { - base_pose = mMap2BaseTransf; - } else if (mPublishTf) { - base_pose = mOdom2BaseTransf; - } + if (mPublishMapTf) + { + base_pose = mMap2BaseTransf; + } + else if (mPublishTf) + { + base_pose = mOdom2BaseTransf; + } - std_msgs::Header header; - header.stamp = t; - header.frame_id = mMapFrameId; - geometry_msgs::Pose pose; + std_msgs::Header header; + header.stamp = t; + header.frame_id = mMapFrameId; + geometry_msgs::Pose pose; - // conversion from Tranform to message - geometry_msgs::Transform base2frame = tf2::toMsg(base_pose); + // conversion from Tranform to message + geometry_msgs::Transform base2frame = tf2::toMsg(base_pose); - // Add all value in Pose message - pose.position.x = base2frame.translation.x; - pose.position.y = base2frame.translation.y; - pose.position.z = base2frame.translation.z; - pose.orientation.x = base2frame.rotation.x; - pose.orientation.y = base2frame.rotation.y; - pose.orientation.z = base2frame.rotation.z; - pose.orientation.w = base2frame.rotation.w; + // Add all value in Pose message + pose.position.x = base2frame.translation.x; + pose.position.y = base2frame.translation.y; + pose.position.z = base2frame.translation.z; + pose.orientation.x = base2frame.rotation.x; + pose.orientation.y = base2frame.rotation.y; + pose.orientation.z = base2frame.rotation.z; + pose.orientation.w = base2frame.rotation.w; - if (mPubPose.getNumSubscribers() > 0) { - geometry_msgs::PoseStamped poseNoCov; + if (mPubPose.getNumSubscribers() > 0) + { + geometry_msgs::PoseStamped poseNoCov; - poseNoCov.header = header; - poseNoCov.pose = pose; + poseNoCov.header = header; + poseNoCov.pose = pose; - // Publish pose stamped message - mPubPose.publish(poseNoCov); - } + // Publish pose stamped message + mPubPose.publish(poseNoCov); + } - if (mPubPoseCov.getNumSubscribers() > 0) { - geometry_msgs::PoseWithCovarianceStampedPtr poseCovMsg = boost::make_shared(); + if (mPubPoseCov.getNumSubscribers() > 0) + { + geometry_msgs::PoseWithCovarianceStampedPtr poseCovMsg = + boost::make_shared(); - poseCovMsg->header = header; - poseCovMsg->pose.pose = pose; + poseCovMsg->header = header; + poseCovMsg->pose.pose = pose; - // Odometry pose covariance if available - for (size_t i = 0; i < poseCovMsg->pose.covariance.size(); i++) { - poseCovMsg->pose.covariance[i] = static_cast(mLastZedPose.pose_covariance[i]); + // Odometry pose covariance if available + for (size_t i = 0; i < poseCovMsg->pose.covariance.size(); i++) + { + poseCovMsg->pose.covariance[i] = static_cast(mLastZedPose.pose_covariance[i]); - if (mTwoDMode) { - if (i == 14 || i == 21 || i == 28) { - poseCovMsg->pose.covariance[i] = 1e-9; // Very low covariance if 2D mode - } else if ((i >= 2 && i <= 4) || (i >= 8 && i <= 10) || (i >= 12 && i <= 13) || (i >= 15 && i <= 16) || (i >= 18 && i <= 20) || (i == 22) || (i >= 24 && i <= 27)) { - poseCovMsg->pose.covariance[i] = 0.0; - } - } + if (mTwoDMode) + { + if (i == 14 || i == 21 || i == 28) + { + poseCovMsg->pose.covariance[i] = 1e-9; // Very low covariance if 2D mode } - - // Publish pose with covariance stamped message - mPubPoseCov.publish(poseCovMsg); + else if ((i >= 2 && i <= 4) || (i >= 8 && i <= 10) || (i >= 12 && i <= 13) || (i >= 15 && i <= 16) || + (i >= 18 && i <= 20) || (i == 22) || (i >= 24 && i <= 27)) + { + poseCovMsg->pose.covariance[i] = 0.0; + } + } } + + // Publish pose with covariance stamped message + mPubPoseCov.publish(poseCovMsg); + } } void ZEDWrapperNodelet::publishStaticImuFrame() { - // Publish IMU TF as static TF - if (!mPublishImuTf) { - return; - } + // Publish IMU TF as static TF + if (!mPublishImuTf) + { + return; + } - if (mStaticImuFramePublished) { - return; - } + if (mStaticImuFramePublished) + { + return; + } - mStaticImuTransformStamped.header.stamp = ros::Time::now(); - mStaticImuTransformStamped.header.frame_id = mLeftCamFrameId; - mStaticImuTransformStamped.child_frame_id = mImuFrameId; - sl::Translation sl_tr = mSlCamImuTransf.getTranslation(); - mStaticImuTransformStamped.transform.translation.x = sl_tr.x; - mStaticImuTransformStamped.transform.translation.y = sl_tr.y; - mStaticImuTransformStamped.transform.translation.z = sl_tr.z; - sl::Orientation sl_or = mSlCamImuTransf.getOrientation(); - mStaticImuTransformStamped.transform.rotation.x = sl_or.ox; - mStaticImuTransformStamped.transform.rotation.y = sl_or.oy; - mStaticImuTransformStamped.transform.rotation.z = sl_or.oz; - mStaticImuTransformStamped.transform.rotation.w = sl_or.ow; - - // Publish transformation - mStaticTransformImuBroadcaster.sendTransform(mStaticImuTransformStamped); - - NODELET_INFO_STREAM("Published static transform '" << mImuFrameId << "' -> '" << mLeftCamFrameId << "'"); - - mStaticImuFramePublished = true; + mStaticImuTransformStamped.header.stamp = ros::Time::now(); + mStaticImuTransformStamped.header.frame_id = mLeftCamFrameId; + mStaticImuTransformStamped.child_frame_id = mImuFrameId; + sl::Translation sl_tr = mSlCamImuTransf.getTranslation(); + mStaticImuTransformStamped.transform.translation.x = sl_tr.x; + mStaticImuTransformStamped.transform.translation.y = sl_tr.y; + mStaticImuTransformStamped.transform.translation.z = sl_tr.z; + sl::Orientation sl_or = mSlCamImuTransf.getOrientation(); + mStaticImuTransformStamped.transform.rotation.x = sl_or.ox; + mStaticImuTransformStamped.transform.rotation.y = sl_or.oy; + mStaticImuTransformStamped.transform.rotation.z = sl_or.oz; + mStaticImuTransformStamped.transform.rotation.w = sl_or.ow; + + // Publish transformation + mStaticTransformImuBroadcaster.sendTransform(mStaticImuTransformStamped); + + NODELET_INFO_STREAM("Published static transform '" << mImuFrameId << "' -> '" << mLeftCamFrameId << "'"); + + mStaticImuFramePublished = true; } void ZEDWrapperNodelet::publishOdomFrame(tf2::Transform odomTransf, ros::Time t) { - // ----> Avoid duplicated TF publishing - static ros::Time last_stamp; + // ----> Avoid duplicated TF publishing + static ros::Time last_stamp; - if (t == last_stamp) { - return; - } - last_stamp = t; - // <---- Avoid duplicated TF publishing + if (t == last_stamp) + { + return; + } + last_stamp = t; + // <---- Avoid duplicated TF publishing - if (!mSensor2BaseTransfValid) { - getSens2BaseTransform(); - } + if (!mSensor2BaseTransfValid) + { + getSens2BaseTransform(); + } - if (!mSensor2CameraTransfValid) { - getSens2CameraTransform(); - } + if (!mSensor2CameraTransfValid) + { + getSens2CameraTransform(); + } - if (!mCamera2BaseTransfValid) { - getCamera2BaseTransform(); - } + if (!mCamera2BaseTransfValid) + { + getCamera2BaseTransform(); + } - geometry_msgs::TransformStamped transformStamped; - transformStamped.header.stamp = t; - transformStamped.header.frame_id = mOdometryFrameId; - transformStamped.child_frame_id = mBaseFrameId; - // conversion from Tranform to message - transformStamped.transform = tf2::toMsg(odomTransf); - // Publish transformation - mTransformOdomBroadcaster.sendTransform(transformStamped); + geometry_msgs::TransformStamped transformStamped; + transformStamped.header.stamp = t; + transformStamped.header.frame_id = mOdometryFrameId; + transformStamped.child_frame_id = mBaseFrameId; + // conversion from Tranform to message + transformStamped.transform = tf2::toMsg(odomTransf); + // Publish transformation + mTransformOdomBroadcaster.sendTransform(transformStamped); - // NODELET_INFO_STREAM( "Published ODOM TF with TS: " << t ); + // NODELET_INFO_STREAM( "Published ODOM TF with TS: " << t ); } void ZEDWrapperNodelet::publishPoseFrame(tf2::Transform baseTransform, ros::Time t) { - // ----> Avoid duplicated TF publishing - static ros::Time last_stamp; + // ----> Avoid duplicated TF publishing + static ros::Time last_stamp; - if (t == last_stamp) { - return; - } - last_stamp = t; - // <---- Avoid duplicated TF publishing + if (t == last_stamp) + { + return; + } + last_stamp = t; + // <---- Avoid duplicated TF publishing - if (!mSensor2BaseTransfValid) { - getSens2BaseTransform(); - } + if (!mSensor2BaseTransfValid) + { + getSens2BaseTransform(); + } - if (!mSensor2CameraTransfValid) { - getSens2CameraTransform(); - } + if (!mSensor2CameraTransfValid) + { + getSens2CameraTransform(); + } - if (!mCamera2BaseTransfValid) { - getCamera2BaseTransform(); - } + if (!mCamera2BaseTransfValid) + { + getCamera2BaseTransform(); + } - geometry_msgs::TransformStamped transformStamped; - transformStamped.header.stamp = t; - transformStamped.header.frame_id = mMapFrameId; - transformStamped.child_frame_id = mOdometryFrameId; - // conversion from Tranform to message - transformStamped.transform = tf2::toMsg(baseTransform); - // Publish transformation - mTransformPoseBroadcaster.sendTransform(transformStamped); + geometry_msgs::TransformStamped transformStamped; + transformStamped.header.stamp = t; + transformStamped.header.frame_id = mMapFrameId; + transformStamped.child_frame_id = mOdometryFrameId; + // conversion from Tranform to message + transformStamped.transform = tf2::toMsg(baseTransform); + // Publish transformation + mTransformPoseBroadcaster.sendTransform(transformStamped); - // NODELET_INFO_STREAM( "Published POSE TF with TS: " << t ); + // NODELET_INFO_STREAM( "Published POSE TF with TS: " << t ); } void ZEDWrapperNodelet::publishImage(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat img, - image_transport::CameraPublisher& pubImg, sensor_msgs::CameraInfoPtr camInfoMsg, - std::string imgFrameId, ros::Time t) + image_transport::CameraPublisher& pubImg, sensor_msgs::CameraInfoPtr camInfoMsg, + std::string imgFrameId, ros::Time t) { - camInfoMsg->header.stamp = t; - sl_tools::imageToROSmsg(imgMsgPtr, img, imgFrameId, t); - pubImg.publish(imgMsgPtr, camInfoMsg); + camInfoMsg->header.stamp = t; + sl_tools::imageToROSmsg(imgMsgPtr, img, imgFrameId, t); + pubImg.publish(imgMsgPtr, camInfoMsg); } void ZEDWrapperNodelet::publishDepth(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat depth, ros::Time t) { - mDepthCamInfoMsg->header.stamp = t; - - // NODELET_DEBUG_STREAM("mOpenniDepthMode: " << mOpenniDepthMode); + mDepthCamInfoMsg->header.stamp = t; - if (!mOpenniDepthMode) { - // NODELET_INFO("Using float32"); - sl_tools::imageToROSmsg(imgMsgPtr, depth, mDepthOptFrameId, t); - mPubDepth.publish(imgMsgPtr, mDepthCamInfoMsg); - - return; - } + // NODELET_DEBUG_STREAM("mOpenniDepthMode: " << mOpenniDepthMode); - // NODELET_INFO("Using depth16"); + if (!mOpenniDepthMode) + { + // NODELET_INFO("Using float32"); sl_tools::imageToROSmsg(imgMsgPtr, depth, mDepthOptFrameId, t); mPubDepth.publish(imgMsgPtr, mDepthCamInfoMsg); + + return; + } + + // NODELET_INFO("Using depth16"); + sl_tools::imageToROSmsg(imgMsgPtr, depth, mDepthOptFrameId, t); + mPubDepth.publish(imgMsgPtr, mDepthCamInfoMsg); } void ZEDWrapperNodelet::publishDisparity(sl::Mat disparity, ros::Time t) { - sl::CameraInformation zedParam = mZed.getCameraInformation(mMatResol); + sl::CameraInformation zedParam = mZed.getCameraInformation(mMatResol); - sensor_msgs::ImagePtr disparityImgMsg = boost::make_shared(); - stereo_msgs::DisparityImagePtr disparityMsg = boost::make_shared(); + sensor_msgs::ImagePtr disparityImgMsg = boost::make_shared(); + stereo_msgs::DisparityImagePtr disparityMsg = boost::make_shared(); - sl_tools::imageToROSmsg(disparityImgMsg, disparity, mDisparityFrameId, t); + sl_tools::imageToROSmsg(disparityImgMsg, disparity, mDisparityFrameId, t); - disparityMsg->image = *disparityImgMsg; - disparityMsg->header = disparityMsg->image.header; + disparityMsg->image = *disparityImgMsg; + disparityMsg->header = disparityMsg->image.header; - disparityMsg->f = zedParam.camera_configuration.calibration_parameters.left_cam.fx; - disparityMsg->T = zedParam.camera_configuration.calibration_parameters.getCameraBaseline(); + disparityMsg->f = zedParam.camera_configuration.calibration_parameters.left_cam.fx; + disparityMsg->T = zedParam.camera_configuration.calibration_parameters.getCameraBaseline(); - if (disparityMsg->T > 0) { - disparityMsg->T *= -1.0f; - } + if (disparityMsg->T > 0) + { + disparityMsg->T *= -1.0f; + } - disparityMsg->min_disparity = disparityMsg->f * disparityMsg->T / mZed.getInitParameters().depth_minimum_distance; - disparityMsg->max_disparity = disparityMsg->f * disparityMsg->T / mZed.getInitParameters().depth_maximum_distance; + disparityMsg->min_disparity = disparityMsg->f * disparityMsg->T / mZed.getInitParameters().depth_minimum_distance; + disparityMsg->max_disparity = disparityMsg->f * disparityMsg->T / mZed.getInitParameters().depth_maximum_distance; - mPubDisparity.publish(disparityMsg); + mPubDisparity.publish(disparityMsg); } void ZEDWrapperNodelet::pointcloud_thread_func() { - std::unique_lock lock(mPcMutex); - - while (!mStopNode) { - while (!mPcDataReady) { // loop to avoid spurious wakeups - if (mPcDataReadyCondVar.wait_for(lock, std::chrono::milliseconds(500)) == std::cv_status::timeout) { - // Check thread stopping - if (mStopNode) { - return; - } else { - continue; - } - } - } + std::unique_lock lock(mPcMutex); - // ----> Check publishing frequency - double pc_period_msec = 1000.0 / mPointCloudFreq; - - static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + while (!mStopNode) + { + while (!mPcDataReady) + { // loop to avoid spurious wakeups + if (mPcDataReadyCondVar.wait_for(lock, std::chrono::milliseconds(500)) == std::cv_status::timeout) + { + // Check thread stopping + if (mStopNode) + { + return; + } + else + { + continue; + } + } + } - double elapsed_msec = std::chrono::duration_cast(now - last_time).count(); + // ----> Check publishing frequency + double pc_period_msec = 1000.0 / mPointCloudFreq; - if (elapsed_msec < pc_period_msec) { - std::this_thread::sleep_for(std::chrono::milliseconds(static_cast(pc_period_msec - elapsed_msec))); - } - // <---- Check publishing frequency + static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - last_time = std::chrono::steady_clock::now(); - publishPointCloud(); + double elapsed_msec = std::chrono::duration_cast(now - last_time).count(); - mPcDataReady = false; + if (elapsed_msec < pc_period_msec) + { + std::this_thread::sleep_for(std::chrono::milliseconds(static_cast(pc_period_msec - elapsed_msec))); } + // <---- Check publishing frequency + + last_time = std::chrono::steady_clock::now(); + publishPointCloud(); + + mPcDataReady = false; + } - NODELET_DEBUG("Pointcloud thread finished"); + NODELET_DEBUG("Pointcloud thread finished"); } void ZEDWrapperNodelet::publishPointCloud() { - sensor_msgs::PointCloud2Ptr pointcloudMsg = boost::make_shared(); + sensor_msgs::PointCloud2Ptr pointcloudMsg = boost::make_shared(); - // Publish freq calculation - static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + // Publish freq calculation + static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); - last_time = now; + double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); + last_time = now; - mPcPeriodMean_usec->addValue(elapsed_usec); + mPcPeriodMean_usec->addValue(elapsed_usec); - // Initialize Point Cloud message - // https://github.com/ros/common_msgs/blob/jade-devel/sensor_msgs/include/sensor_msgs/point_cloud2_iterator.h + // Initialize Point Cloud message + // https://github.com/ros/common_msgs/blob/jade-devel/sensor_msgs/include/sensor_msgs/point_cloud2_iterator.h - int ptsCount = mMatResol.width * mMatResol.height; + int ptsCount = mMatResol.width * mMatResol.height; - pointcloudMsg->header.stamp = mPointCloudTime; + pointcloudMsg->header.stamp = mPointCloudTime; - if (pointcloudMsg->width != mMatResol.width || pointcloudMsg->height != mMatResol.height) { - pointcloudMsg->header.frame_id = mPointCloudFrameId; // Set the header values of the ROS message + if (pointcloudMsg->width != mMatResol.width || pointcloudMsg->height != mMatResol.height) + { + pointcloudMsg->header.frame_id = mPointCloudFrameId; // Set the header values of the ROS message - pointcloudMsg->is_bigendian = false; - pointcloudMsg->is_dense = false; + pointcloudMsg->is_bigendian = false; + pointcloudMsg->is_dense = false; - pointcloudMsg->width = mMatResol.width; - pointcloudMsg->height = mMatResol.height; + pointcloudMsg->width = mMatResol.width; + pointcloudMsg->height = mMatResol.height; - sensor_msgs::PointCloud2Modifier modifier(*pointcloudMsg); - modifier.setPointCloud2Fields(4, "x", 1, sensor_msgs::PointField::FLOAT32, "y", 1, sensor_msgs::PointField::FLOAT32, - "z", 1, sensor_msgs::PointField::FLOAT32, "rgb", 1, sensor_msgs::PointField::FLOAT32); - } + sensor_msgs::PointCloud2Modifier modifier(*pointcloudMsg); + modifier.setPointCloud2Fields(4, "x", 1, sensor_msgs::PointField::FLOAT32, "y", 1, sensor_msgs::PointField::FLOAT32, + "z", 1, sensor_msgs::PointField::FLOAT32, "rgb", 1, sensor_msgs::PointField::FLOAT32); + } - // Data copy - sl::Vector4* cpu_cloud = mCloud.getPtr(); - float* ptCloudPtr = (float*)(&pointcloudMsg->data[0]); + // Data copy + sl::Vector4* cpu_cloud = mCloud.getPtr(); + float* ptCloudPtr = (float*)(&pointcloudMsg->data[0]); - // We can do a direct memcpy since data organization is the same - memcpy(ptCloudPtr, (float*)cpu_cloud, 4 * ptsCount * sizeof(float)); + // We can do a direct memcpy since data organization is the same + memcpy(ptCloudPtr, (float*)cpu_cloud, 4 * ptsCount * sizeof(float)); - // Pointcloud publishing - mPubCloud.publish(pointcloudMsg); + // Pointcloud publishing + mPubCloud.publish(pointcloudMsg); } void ZEDWrapperNodelet::callback_pubFusedPointCloud(const ros::TimerEvent& e) { - sensor_msgs::PointCloud2Ptr pointcloudFusedMsg = boost::make_shared(); + sensor_msgs::PointCloud2Ptr pointcloudFusedMsg = boost::make_shared(); - uint32_t fusedCloudSubnumber = mPubFusedCloud.getNumSubscribers(); + uint32_t fusedCloudSubnumber = mPubFusedCloud.getNumSubscribers(); - if (fusedCloudSubnumber == 0) { - return; - } + if (fusedCloudSubnumber == 0) + { + return; + } - std::lock_guard lock(mCloseZedMutex); + std::lock_guard lock(mCloseZedMutex); - if (!mZed.isOpened()) { - return; - } + if (!mZed.isOpened()) + { + return; + } - // pointcloudFusedMsg->header.stamp = t; - mZed.requestSpatialMapAsync(); + // pointcloudFusedMsg->header.stamp = t; + mZed.requestSpatialMapAsync(); - while (mZed.getSpatialMapRequestStatusAsync() == sl::ERROR_CODE::FAILURE) { - // Mesh is still generating - std::this_thread::sleep_for(std::chrono::milliseconds(1)); - } + while (mZed.getSpatialMapRequestStatusAsync() == sl::ERROR_CODE::FAILURE) + { + // Mesh is still generating + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + } - sl::ERROR_CODE res = mZed.retrieveSpatialMapAsync(mFusedPC); + sl::ERROR_CODE res = mZed.retrieveSpatialMapAsync(mFusedPC); - if (res != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM("Fused point cloud not extracted: " << sl::toString(res).c_str()); - return; - } + if (res != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Fused point cloud not extracted: " << sl::toString(res).c_str()); + return; + } - size_t ptsCount = mFusedPC.getNumberOfPoints(); - bool resized = false; + size_t ptsCount = mFusedPC.getNumberOfPoints(); + bool resized = false; - if (pointcloudFusedMsg->width != ptsCount || pointcloudFusedMsg->height != 1) { - // Initialize Point Cloud message - // https://github.com/ros/common_msgs/blob/jade-devel/sensor_msgs/include/sensor_msgs/point_cloud2_iterator.h - pointcloudFusedMsg->header.frame_id = mMapFrameId; // Set the header values of the ROS message - pointcloudFusedMsg->is_bigendian = false; - pointcloudFusedMsg->is_dense = false; - pointcloudFusedMsg->width = ptsCount; - pointcloudFusedMsg->height = 1; + if (pointcloudFusedMsg->width != ptsCount || pointcloudFusedMsg->height != 1) + { + // Initialize Point Cloud message + // https://github.com/ros/common_msgs/blob/jade-devel/sensor_msgs/include/sensor_msgs/point_cloud2_iterator.h + pointcloudFusedMsg->header.frame_id = mMapFrameId; // Set the header values of the ROS message + pointcloudFusedMsg->is_bigendian = false; + pointcloudFusedMsg->is_dense = false; + pointcloudFusedMsg->width = ptsCount; + pointcloudFusedMsg->height = 1; - sensor_msgs::PointCloud2Modifier modifier(*pointcloudFusedMsg); - modifier.setPointCloud2Fields(4, "x", 1, sensor_msgs::PointField::FLOAT32, "y", 1, sensor_msgs::PointField::FLOAT32, - "z", 1, sensor_msgs::PointField::FLOAT32, "rgb", 1, sensor_msgs::PointField::FLOAT32); + sensor_msgs::PointCloud2Modifier modifier(*pointcloudFusedMsg); + modifier.setPointCloud2Fields(4, "x", 1, sensor_msgs::PointField::FLOAT32, "y", 1, sensor_msgs::PointField::FLOAT32, + "z", 1, sensor_msgs::PointField::FLOAT32, "rgb", 1, sensor_msgs::PointField::FLOAT32); - resized = true; - } + resized = true; + } - std::chrono::steady_clock::time_point start_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point start_time = std::chrono::steady_clock::now(); - // NODELET_INFO_STREAM("Chunks: " << mFusedPC.chunks.size()); + // NODELET_INFO_STREAM("Chunks: " << mFusedPC.chunks.size()); - int index = 0; - float* ptCloudPtr = (float*)(&pointcloudFusedMsg->data[0]); - int updated = 0; + int index = 0; + float* ptCloudPtr = (float*)(&pointcloudFusedMsg->data[0]); + int updated = 0; - for (int c = 0; c < mFusedPC.chunks.size(); c++) { - if (mFusedPC.chunks[c].has_been_updated || resized) { - updated++; + for (int c = 0; c < mFusedPC.chunks.size(); c++) + { + if (mFusedPC.chunks[c].has_been_updated || resized) + { + updated++; - size_t chunkSize = mFusedPC.chunks[c].vertices.size(); + size_t chunkSize = mFusedPC.chunks[c].vertices.size(); - if (chunkSize > 0) { - float* cloud_pts = (float*)(mFusedPC.chunks[c].vertices.data()); + if (chunkSize > 0) + { + float* cloud_pts = (float*)(mFusedPC.chunks[c].vertices.data()); - memcpy(ptCloudPtr, cloud_pts, 4 * chunkSize * sizeof(float)); + memcpy(ptCloudPtr, cloud_pts, 4 * chunkSize * sizeof(float)); - ptCloudPtr += 4 * chunkSize; + ptCloudPtr += 4 * chunkSize; - pointcloudFusedMsg->header.stamp = sl_tools::slTime2Ros(mFusedPC.chunks[c].timestamp); - } - } else { - index += mFusedPC.chunks[c].vertices.size(); - } + pointcloudFusedMsg->header.stamp = sl_tools::slTime2Ros(mFusedPC.chunks[c].timestamp); + } } + else + { + index += mFusedPC.chunks[c].vertices.size(); + } + } - std::chrono::steady_clock::time_point end_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point end_time = std::chrono::steady_clock::now(); - // NODELET_INFO_STREAM("Updated: " << updated); + // NODELET_INFO_STREAM("Updated: " << updated); - // double elapsed_usec = std::chrono::duration_cast(end_time - start_time).count(); + // double elapsed_usec = std::chrono::duration_cast(end_time - start_time).count(); - // NODELET_INFO_STREAM("Data copy: " << elapsed_usec << " usec [" << ptsCount << "] - " << (static_cast - // (ptsCount) / elapsed_usec) << " pts/usec"); + // NODELET_INFO_STREAM("Data copy: " << elapsed_usec << " usec [" << ptsCount << "] - " << (static_cast + // (ptsCount) / elapsed_usec) << " pts/usec"); - // Pointcloud publishing - mPubFusedCloud.publish(pointcloudFusedMsg); + // Pointcloud publishing + mPubFusedCloud.publish(pointcloudFusedMsg); } void ZEDWrapperNodelet::publishCamInfo(sensor_msgs::CameraInfoPtr camInfoMsg, ros::Publisher pubCamInfo, ros::Time t) { - static int seq = 0; - camInfoMsg->header.stamp = t; - camInfoMsg->header.seq = seq; - pubCamInfo.publish(camInfoMsg); - seq++; + static int seq = 0; + camInfoMsg->header.stamp = t; + camInfoMsg->header.seq = seq; + pubCamInfo.publish(camInfoMsg); + seq++; } void ZEDWrapperNodelet::fillCamInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr leftCamInfoMsg, - sensor_msgs::CameraInfoPtr rightCamInfoMsg, std::string leftFrameId, std::string rightFrameId, - bool rawParam /*= false*/) + sensor_msgs::CameraInfoPtr rightCamInfoMsg, std::string leftFrameId, + std::string rightFrameId, bool rawParam /*= false*/) { - sl::CalibrationParameters zedParam; + sl::CalibrationParameters zedParam; - if (rawParam) { - zedParam = zed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters_raw; - } else { - zedParam = zed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters; - } + if (rawParam) + { + zedParam = zed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters_raw; + } + else + { + zedParam = zed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters; + } - float baseline = zedParam.getCameraBaseline(); - leftCamInfoMsg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; - rightCamInfoMsg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; - leftCamInfoMsg->D.resize(5); - rightCamInfoMsg->D.resize(5); - leftCamInfoMsg->D[0] = zedParam.left_cam.disto[0]; // k1 - leftCamInfoMsg->D[1] = zedParam.left_cam.disto[1]; // k2 - leftCamInfoMsg->D[2] = zedParam.left_cam.disto[4]; // k3 - leftCamInfoMsg->D[3] = zedParam.left_cam.disto[2]; // p1 - leftCamInfoMsg->D[4] = zedParam.left_cam.disto[3]; // p2 - rightCamInfoMsg->D[0] = zedParam.right_cam.disto[0]; // k1 - rightCamInfoMsg->D[1] = zedParam.right_cam.disto[1]; // k2 - rightCamInfoMsg->D[2] = zedParam.right_cam.disto[4]; // k3 - rightCamInfoMsg->D[3] = zedParam.right_cam.disto[2]; // p1 - rightCamInfoMsg->D[4] = zedParam.right_cam.disto[3]; // p2 - leftCamInfoMsg->K.fill(0.0); - rightCamInfoMsg->K.fill(0.0); - leftCamInfoMsg->K[0] = static_cast(zedParam.left_cam.fx); - leftCamInfoMsg->K[2] = static_cast(zedParam.left_cam.cx); - leftCamInfoMsg->K[4] = static_cast(zedParam.left_cam.fy); - leftCamInfoMsg->K[5] = static_cast(zedParam.left_cam.cy); - leftCamInfoMsg->K[8] = 1.0; - rightCamInfoMsg->K[0] = static_cast(zedParam.right_cam.fx); - rightCamInfoMsg->K[2] = static_cast(zedParam.right_cam.cx); - rightCamInfoMsg->K[4] = static_cast(zedParam.right_cam.fy); - rightCamInfoMsg->K[5] = static_cast(zedParam.right_cam.cy); - rightCamInfoMsg->K[8] = 1.0; - leftCamInfoMsg->R.fill(0.0); - rightCamInfoMsg->R.fill(0.0); - - for (size_t i = 0; i < 3; i++) { - // identity - rightCamInfoMsg->R[i + i * 3] = 1; - leftCamInfoMsg->R[i + i * 3] = 1; - } + float baseline = zedParam.getCameraBaseline(); + leftCamInfoMsg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; + rightCamInfoMsg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; + leftCamInfoMsg->D.resize(5); + rightCamInfoMsg->D.resize(5); + leftCamInfoMsg->D[0] = zedParam.left_cam.disto[0]; // k1 + leftCamInfoMsg->D[1] = zedParam.left_cam.disto[1]; // k2 + leftCamInfoMsg->D[2] = zedParam.left_cam.disto[4]; // k3 + leftCamInfoMsg->D[3] = zedParam.left_cam.disto[2]; // p1 + leftCamInfoMsg->D[4] = zedParam.left_cam.disto[3]; // p2 + rightCamInfoMsg->D[0] = zedParam.right_cam.disto[0]; // k1 + rightCamInfoMsg->D[1] = zedParam.right_cam.disto[1]; // k2 + rightCamInfoMsg->D[2] = zedParam.right_cam.disto[4]; // k3 + rightCamInfoMsg->D[3] = zedParam.right_cam.disto[2]; // p1 + rightCamInfoMsg->D[4] = zedParam.right_cam.disto[3]; // p2 + leftCamInfoMsg->K.fill(0.0); + rightCamInfoMsg->K.fill(0.0); + leftCamInfoMsg->K[0] = static_cast(zedParam.left_cam.fx); + leftCamInfoMsg->K[2] = static_cast(zedParam.left_cam.cx); + leftCamInfoMsg->K[4] = static_cast(zedParam.left_cam.fy); + leftCamInfoMsg->K[5] = static_cast(zedParam.left_cam.cy); + leftCamInfoMsg->K[8] = 1.0; + rightCamInfoMsg->K[0] = static_cast(zedParam.right_cam.fx); + rightCamInfoMsg->K[2] = static_cast(zedParam.right_cam.cx); + rightCamInfoMsg->K[4] = static_cast(zedParam.right_cam.fy); + rightCamInfoMsg->K[5] = static_cast(zedParam.right_cam.cy); + rightCamInfoMsg->K[8] = 1.0; + leftCamInfoMsg->R.fill(0.0); + rightCamInfoMsg->R.fill(0.0); + + for (size_t i = 0; i < 3; i++) + { + // identity + rightCamInfoMsg->R[i + i * 3] = 1; + leftCamInfoMsg->R[i + i * 3] = 1; + } - if (rawParam) { - for (int i = 0; i < 9; i++) { - rightCamInfoMsg->R[i] = zedParam.stereo_transform.getRotationMatrix().r[i]; - } + if (rawParam) + { + for (int i = 0; i < 9; i++) + { + rightCamInfoMsg->R[i] = zedParam.stereo_transform.getRotationMatrix().r[i]; } + } - leftCamInfoMsg->P.fill(0.0); - rightCamInfoMsg->P.fill(0.0); - leftCamInfoMsg->P[0] = static_cast(zedParam.left_cam.fx); - leftCamInfoMsg->P[2] = static_cast(zedParam.left_cam.cx); - leftCamInfoMsg->P[5] = static_cast(zedParam.left_cam.fy); - leftCamInfoMsg->P[6] = static_cast(zedParam.left_cam.cy); - leftCamInfoMsg->P[10] = 1.0; - // http://docs.ros.org/api/sensor_msgs/html/msg/CameraInfo.html - rightCamInfoMsg->P[3] = static_cast(-1 * zedParam.left_cam.fx * baseline); - rightCamInfoMsg->P[0] = static_cast(zedParam.right_cam.fx); - rightCamInfoMsg->P[2] = static_cast(zedParam.right_cam.cx); - rightCamInfoMsg->P[5] = static_cast(zedParam.right_cam.fy); - rightCamInfoMsg->P[6] = static_cast(zedParam.right_cam.cy); - rightCamInfoMsg->P[10] = 1.0; - leftCamInfoMsg->width = rightCamInfoMsg->width = static_cast(mMatResol.width); - leftCamInfoMsg->height = rightCamInfoMsg->height = static_cast(mMatResol.height); - leftCamInfoMsg->header.frame_id = leftFrameId; - rightCamInfoMsg->header.frame_id = rightFrameId; + leftCamInfoMsg->P.fill(0.0); + rightCamInfoMsg->P.fill(0.0); + leftCamInfoMsg->P[0] = static_cast(zedParam.left_cam.fx); + leftCamInfoMsg->P[2] = static_cast(zedParam.left_cam.cx); + leftCamInfoMsg->P[5] = static_cast(zedParam.left_cam.fy); + leftCamInfoMsg->P[6] = static_cast(zedParam.left_cam.cy); + leftCamInfoMsg->P[10] = 1.0; + // http://docs.ros.org/api/sensor_msgs/html/msg/CameraInfo.html + rightCamInfoMsg->P[3] = static_cast(-1 * zedParam.left_cam.fx * baseline); + rightCamInfoMsg->P[0] = static_cast(zedParam.right_cam.fx); + rightCamInfoMsg->P[2] = static_cast(zedParam.right_cam.cx); + rightCamInfoMsg->P[5] = static_cast(zedParam.right_cam.fy); + rightCamInfoMsg->P[6] = static_cast(zedParam.right_cam.cy); + rightCamInfoMsg->P[10] = 1.0; + leftCamInfoMsg->width = rightCamInfoMsg->width = static_cast(mMatResol.width); + leftCamInfoMsg->height = rightCamInfoMsg->height = static_cast(mMatResol.height); + leftCamInfoMsg->header.frame_id = leftFrameId; + rightCamInfoMsg->header.frame_id = rightFrameId; } -void ZEDWrapperNodelet::fillCamDepthInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr depth_info_msg, std::string frame_id) +void ZEDWrapperNodelet::fillCamDepthInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr depth_info_msg, + std::string frame_id) { - sl::CalibrationParameters zedParam; - - zedParam = zed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters; - - float baseline = zedParam.getCameraBaseline(); - depth_info_msg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; - depth_info_msg->D.resize(5); - depth_info_msg->D[0] = zedParam.left_cam.disto[0]; // k1 - depth_info_msg->D[1] = zedParam.left_cam.disto[1]; // k2 - depth_info_msg->D[2] = zedParam.left_cam.disto[4]; // k3 - depth_info_msg->D[3] = zedParam.left_cam.disto[2]; // p1 - depth_info_msg->D[4] = zedParam.left_cam.disto[3]; // p2 - depth_info_msg->K.fill(0.0); - depth_info_msg->K[0] = static_cast(zedParam.left_cam.fx); - depth_info_msg->K[2] = static_cast(zedParam.left_cam.cx); - depth_info_msg->K[4] = static_cast(zedParam.left_cam.fy); - depth_info_msg->K[5] = static_cast(zedParam.left_cam.cy); - depth_info_msg->K[8] = 1.0; - depth_info_msg->R.fill(0.0); - - for (size_t i = 0; i < 3; i++) { - // identity - depth_info_msg->R[i + i * 3] = 1; - } + sl::CalibrationParameters zedParam; + + zedParam = zed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters; + + float baseline = zedParam.getCameraBaseline(); + depth_info_msg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; + depth_info_msg->D.resize(5); + depth_info_msg->D[0] = zedParam.left_cam.disto[0]; // k1 + depth_info_msg->D[1] = zedParam.left_cam.disto[1]; // k2 + depth_info_msg->D[2] = zedParam.left_cam.disto[4]; // k3 + depth_info_msg->D[3] = zedParam.left_cam.disto[2]; // p1 + depth_info_msg->D[4] = zedParam.left_cam.disto[3]; // p2 + depth_info_msg->K.fill(0.0); + depth_info_msg->K[0] = static_cast(zedParam.left_cam.fx); + depth_info_msg->K[2] = static_cast(zedParam.left_cam.cx); + depth_info_msg->K[4] = static_cast(zedParam.left_cam.fy); + depth_info_msg->K[5] = static_cast(zedParam.left_cam.cy); + depth_info_msg->K[8] = 1.0; + depth_info_msg->R.fill(0.0); + + for (size_t i = 0; i < 3; i++) + { + // identity + depth_info_msg->R[i + i * 3] = 1; + } - depth_info_msg->P.fill(0.0); - depth_info_msg->P[0] = static_cast(zedParam.left_cam.fx); - depth_info_msg->P[2] = static_cast(zedParam.left_cam.cx); - depth_info_msg->P[5] = static_cast(zedParam.left_cam.fy); - depth_info_msg->P[6] = static_cast(zedParam.left_cam.cy); - depth_info_msg->P[10] = 1.0; - // http://docs.ros.org/api/sensor_msgs/html/msg/CameraInfo.html - depth_info_msg->width = static_cast(mMatResol.width); - depth_info_msg->height = static_cast(mMatResol.height); - depth_info_msg->header.frame_id = frame_id; + depth_info_msg->P.fill(0.0); + depth_info_msg->P[0] = static_cast(zedParam.left_cam.fx); + depth_info_msg->P[2] = static_cast(zedParam.left_cam.cx); + depth_info_msg->P[5] = static_cast(zedParam.left_cam.fy); + depth_info_msg->P[6] = static_cast(zedParam.left_cam.cy); + depth_info_msg->P[10] = 1.0; + // http://docs.ros.org/api/sensor_msgs/html/msg/CameraInfo.html + depth_info_msg->width = static_cast(mMatResol.width); + depth_info_msg->height = static_cast(mMatResol.height); + depth_info_msg->header.frame_id = frame_id; } void ZEDWrapperNodelet::updateDynamicReconfigure() { - // NODELET_DEBUG_STREAM( "updateDynamicReconfigure MUTEX LOCK"); - mDynParMutex.lock(); - zed_nodelets::ZedConfig config; - - config.auto_exposure_gain = mCamAutoExposure; - config.auto_whitebalance = mCamAutoWB; - config.brightness = mCamBrightness; - config.depth_confidence = mCamDepthConfidence; - config.depth_texture_conf = mCamDepthTextureConf; - config.contrast = mCamContrast; - config.exposure = mCamExposure; - config.gain = mCamGain; - config.hue = mCamHue; - config.saturation = mCamSaturation; - config.sharpness = mCamSharpness; - config.gamma = mCamGamma; - config.whitebalance_temperature = mCamWB / 100; - config.point_cloud_freq = mPointCloudFreq; - mDynParMutex.unlock(); - - mDynServerMutex.lock(); - mDynRecServer->updateConfig(config); - mDynServerMutex.unlock(); - - mUpdateDynParams = false; - - // NODELET_DEBUG_STREAM( "updateDynamicReconfigure MUTEX UNLOCK"); + // NODELET_DEBUG_STREAM( "updateDynamicReconfigure MUTEX LOCK"); + mDynParMutex.lock(); + zed_nodelets::ZedConfig config; + + config.auto_exposure_gain = mCamAutoExposure; + config.auto_whitebalance = mCamAutoWB; + config.brightness = mCamBrightness; + config.depth_confidence = mCamDepthConfidence; + config.depth_texture_conf = mCamDepthTextureConf; + config.contrast = mCamContrast; + config.exposure = mCamExposure; + config.gain = mCamGain; + config.hue = mCamHue; + config.saturation = mCamSaturation; + config.sharpness = mCamSharpness; + config.gamma = mCamGamma; + config.whitebalance_temperature = mCamWB / 100; + config.point_cloud_freq = mPointCloudFreq; + mDynParMutex.unlock(); + + mDynServerMutex.lock(); + mDynRecServer->updateConfig(config); + mDynServerMutex.unlock(); + + mUpdateDynParams = false; + + // NODELET_DEBUG_STREAM( "updateDynamicReconfigure MUTEX UNLOCK"); } void ZEDWrapperNodelet::callback_dynamicReconf(zed_nodelets::ZedConfig& config, uint32_t level) { - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX LOCK"); - mDynParMutex.lock(); - DynParams param = static_cast(level); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX LOCK"); + mDynParMutex.lock(); + DynParams param = static_cast(level); - switch (param) { + switch (param) + { case CONFIDENCE: - mCamDepthConfidence = config.depth_confidence; - NODELET_INFO("Reconfigure confidence threshold: %d", mCamDepthConfidence); - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamDepthConfidence = config.depth_confidence; + NODELET_INFO("Reconfigure confidence threshold: %d", mCamDepthConfidence); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case TEXTURE_CONF: - mCamDepthTextureConf = config.depth_texture_conf; - NODELET_INFO("Reconfigure texture confidence threshold: %d", mCamDepthTextureConf); - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamDepthTextureConf = config.depth_texture_conf; + NODELET_INFO("Reconfigure texture confidence threshold: %d", mCamDepthTextureConf); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case POINTCLOUD_FREQ: - if (config.point_cloud_freq > mPubFrameRate) { - mPointCloudFreq = mPubFrameRate; - NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than Video/Depth publishing framerate. Set to " - << mPointCloudFreq); + if (config.point_cloud_freq > mPubFrameRate) + { + mPointCloudFreq = mPubFrameRate; + NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than Video/Depth publishing framerate. Set to " + << mPointCloudFreq); - mUpdateDynParams = true; - } else { - mPointCloudFreq = config.point_cloud_freq; - NODELET_INFO("Reconfigure point cloud pub. frequency: %g", mPointCloudFreq); - } + mUpdateDynParams = true; + } + else + { + mPointCloudFreq = config.point_cloud_freq; + NODELET_INFO("Reconfigure point cloud pub. frequency: %g", mPointCloudFreq); + } - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case BRIGHTNESS: - mCamBrightness = config.brightness; - NODELET_INFO("Reconfigure image brightness: %d", mCamBrightness); - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamBrightness = config.brightness; + NODELET_INFO("Reconfigure image brightness: %d", mCamBrightness); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case CONTRAST: - mCamContrast = config.contrast; - NODELET_INFO("Reconfigure image contrast: %d", mCamContrast); - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamContrast = config.contrast; + NODELET_INFO("Reconfigure image contrast: %d", mCamContrast); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case HUE: - mCamHue = config.hue; - NODELET_INFO("Reconfigure image hue: %d", mCamHue); - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamHue = config.hue; + NODELET_INFO("Reconfigure image hue: %d", mCamHue); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case SATURATION: - mCamSaturation = config.saturation; - NODELET_INFO("Reconfigure image saturation: %d", mCamSaturation); - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamSaturation = config.saturation; + NODELET_INFO("Reconfigure image saturation: %d", mCamSaturation); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case SHARPNESS: - mCamSharpness = config.sharpness; - NODELET_INFO("Reconfigure image sharpness: %d", mCamSharpness); - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamSharpness = config.sharpness; + NODELET_INFO("Reconfigure image sharpness: %d", mCamSharpness); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case GAMMA: - mCamGamma = config.gamma; - NODELET_INFO("Reconfigure image gamma: %d", mCamGamma); - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamGamma = config.gamma; + NODELET_INFO("Reconfigure image gamma: %d", mCamGamma); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case AUTO_EXP_GAIN: - if(config.auto_exposure_gain!=mCamAutoExposure) - { - mCamAutoExposure = config.auto_exposure_gain; - NODELET_INFO_STREAM("Reconfigure auto exposure/gain: " << mCamAutoExposure ? "ENABLED": "DISABLED"); - } - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + if (config.auto_exposure_gain != mCamAutoExposure) + { + mCamAutoExposure = config.auto_exposure_gain; + NODELET_INFO_STREAM("Reconfigure auto exposure/gain: " << mCamAutoExposure ? "ENABLED" : "DISABLED"); + } + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case GAIN: - mCamGain = config.gain; - if (mCamAutoExposure) { - NODELET_WARN("Reconfigure gain has no effect if 'auto_exposure_gain' is enabled"); - } else { - NODELET_INFO("Reconfigure gain: %d", mCamGain); - } - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamGain = config.gain; + if (mCamAutoExposure) + { + NODELET_WARN("Reconfigure gain has no effect if 'auto_exposure_gain' is enabled"); + } + else + { + NODELET_INFO("Reconfigure gain: %d", mCamGain); + } + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case EXPOSURE: - mCamExposure = config.exposure; - if (mCamAutoExposure) { - NODELET_WARN("Reconfigure exposure has no effect if 'auto_exposure_gain' is enabled"); - } else { - NODELET_INFO("Reconfigure exposure: %d", mCamExposure); - } - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamExposure = config.exposure; + if (mCamAutoExposure) + { + NODELET_WARN("Reconfigure exposure has no effect if 'auto_exposure_gain' is enabled"); + } + else + { + NODELET_INFO("Reconfigure exposure: %d", mCamExposure); + } + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case AUTO_WB: - if(config.auto_whitebalance!=mCamAutoWB) - { - mCamAutoWB = config.auto_whitebalance; - NODELET_INFO_STREAM("Reconfigure auto white balance: " << mCamAutoWB ? "ENABLED": "DISABLED"); - mTriggerAutoWB = true; - } else { - mTriggerAutoWB = false; - } - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + if (config.auto_whitebalance != mCamAutoWB) + { + mCamAutoWB = config.auto_whitebalance; + NODELET_INFO_STREAM("Reconfigure auto white balance: " << mCamAutoWB ? "ENABLED" : "DISABLED"); + mTriggerAutoWB = true; + } + else + { + mTriggerAutoWB = false; + } + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case WB_TEMP: - mCamWB = config.whitebalance_temperature * 100; - if (mCamAutoWB) { - NODELET_WARN("Reconfigure white balance temperature has no effect if 'auto_whitebalance' is enabled"); - } else { - NODELET_INFO("Reconfigure white balance temperature: %d", mCamWB); - } - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamWB = config.whitebalance_temperature * 100; + if (mCamAutoWB) + { + NODELET_WARN("Reconfigure white balance temperature has no effect if 'auto_whitebalance' is enabled"); + } + else + { + NODELET_INFO("Reconfigure white balance temperature: %d", mCamWB); + } + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; default: - NODELET_DEBUG_STREAM("dynamicReconfCallback Unknown param: " << level); - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - } + NODELET_DEBUG_STREAM("dynamicReconfCallback Unknown param: " << level); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + } } void ZEDWrapperNodelet::pubVideoDepth() { - static sl::Timestamp lastZedTs = 0; // Used to calculate stable publish frequency - - uint32_t rgbSubnumber = mPubRgb.getNumSubscribers(); - uint32_t rgbRawSubnumber = mPubRawRgb.getNumSubscribers(); - uint32_t leftSubnumber = mPubLeft.getNumSubscribers(); - uint32_t leftRawSubnumber = mPubRawLeft.getNumSubscribers(); - uint32_t rightSubnumber = mPubRight.getNumSubscribers(); - uint32_t rightRawSubnumber = mPubRawRight.getNumSubscribers(); - uint32_t rgbGraySubnumber = mPubRgbGray.getNumSubscribers(); - uint32_t rgbGrayRawSubnumber = mPubRawRgbGray.getNumSubscribers(); - uint32_t leftGraySubnumber = mPubLeftGray.getNumSubscribers(); - uint32_t leftGrayRawSubnumber = mPubRawLeftGray.getNumSubscribers(); - uint32_t rightGraySubnumber = mPubRightGray.getNumSubscribers(); - uint32_t rightGrayRawSubnumber = mPubRawRightGray.getNumSubscribers(); - uint32_t depthSubnumber = mPubDepth.getNumSubscribers(); - uint32_t disparitySubnumber = mPubDisparity.getNumSubscribers(); - uint32_t confMapSubnumber = mPubConfMap.getNumSubscribers(); - uint32_t stereoSubNumber = mPubStereo.getNumSubscribers(); - uint32_t stereoRawSubNumber = mPubRawStereo.getNumSubscribers(); - - uint32_t tot_sub = rgbSubnumber + rgbRawSubnumber + leftSubnumber + leftRawSubnumber + rightSubnumber + rightRawSubnumber + rgbGraySubnumber + rgbGrayRawSubnumber + leftGraySubnumber + leftGrayRawSubnumber + rightGraySubnumber + rightGrayRawSubnumber + depthSubnumber + disparitySubnumber + confMapSubnumber + stereoSubNumber + stereoRawSubNumber; - - bool retrieved = false; - - sl::Mat mat_left, mat_left_raw; - sl::Mat mat_right, mat_right_raw; - sl::Mat mat_left_gray, mat_left_raw_gray; - sl::Mat mat_right_gray, mat_right_raw_gray; - sl::Mat mat_depth, mat_disp, mat_conf; - - sl::Timestamp ts_rgb = 0; // used to check RGB/Depth sync - sl::Timestamp ts_depth = 0; // used to check RGB/Depth sync - sl::Timestamp grab_ts = 0; - - // ----> Retrieve all required image data - if (rgbSubnumber + leftSubnumber + stereoSubNumber > 0) { - mZed.retrieveImage(mat_left, sl::VIEW::LEFT, sl::MEM::CPU, mMatResol); - retrieved = true; - ts_rgb = mat_left.timestamp; - grab_ts = mat_left.timestamp; - } - if (rgbRawSubnumber + leftRawSubnumber + stereoRawSubNumber > 0) { - mZed.retrieveImage(mat_left_raw, sl::VIEW::LEFT_UNRECTIFIED, sl::MEM::CPU, mMatResol); - retrieved = true; - grab_ts = mat_left_raw.timestamp; - } - if (rightSubnumber + stereoSubNumber > 0) { - mZed.retrieveImage(mat_right, sl::VIEW::RIGHT, sl::MEM::CPU, mMatResol); - retrieved = true; - grab_ts = mat_right.timestamp; - } - if (rightRawSubnumber + stereoRawSubNumber > 0) { - mZed.retrieveImage(mat_right_raw, sl::VIEW::RIGHT_UNRECTIFIED, sl::MEM::CPU, mMatResol); - retrieved = true; - grab_ts = mat_right_raw.timestamp; - } - if (rgbGraySubnumber + leftGraySubnumber > 0) { - mZed.retrieveImage(mat_left_gray, sl::VIEW::LEFT_GRAY, sl::MEM::CPU, mMatResol); - retrieved = true; - grab_ts = mat_left_gray.timestamp; - } - if (rgbGrayRawSubnumber + leftGrayRawSubnumber > 0) { - mZed.retrieveImage(mat_left_raw_gray, sl::VIEW::LEFT_UNRECTIFIED_GRAY, sl::MEM::CPU, mMatResol); - retrieved = true; - grab_ts = mat_left_raw_gray.timestamp; - } - if (rightGraySubnumber > 0) { - mZed.retrieveImage(mat_right_gray, sl::VIEW::RIGHT_GRAY, sl::MEM::CPU, mMatResol); - retrieved = true; - grab_ts = mat_right_gray.timestamp; + static sl::Timestamp lastZedTs = 0; // Used to calculate stable publish frequency + + uint32_t rgbSubnumber = mPubRgb.getNumSubscribers(); + uint32_t rgbRawSubnumber = mPubRawRgb.getNumSubscribers(); + uint32_t leftSubnumber = mPubLeft.getNumSubscribers(); + uint32_t leftRawSubnumber = mPubRawLeft.getNumSubscribers(); + uint32_t rightSubnumber = mPubRight.getNumSubscribers(); + uint32_t rightRawSubnumber = mPubRawRight.getNumSubscribers(); + uint32_t rgbGraySubnumber = mPubRgbGray.getNumSubscribers(); + uint32_t rgbGrayRawSubnumber = mPubRawRgbGray.getNumSubscribers(); + uint32_t leftGraySubnumber = mPubLeftGray.getNumSubscribers(); + uint32_t leftGrayRawSubnumber = mPubRawLeftGray.getNumSubscribers(); + uint32_t rightGraySubnumber = mPubRightGray.getNumSubscribers(); + uint32_t rightGrayRawSubnumber = mPubRawRightGray.getNumSubscribers(); + uint32_t depthSubnumber = mPubDepth.getNumSubscribers(); + uint32_t disparitySubnumber = mPubDisparity.getNumSubscribers(); + uint32_t confMapSubnumber = mPubConfMap.getNumSubscribers(); + uint32_t stereoSubNumber = mPubStereo.getNumSubscribers(); + uint32_t stereoRawSubNumber = mPubRawStereo.getNumSubscribers(); + + uint32_t tot_sub = rgbSubnumber + rgbRawSubnumber + leftSubnumber + leftRawSubnumber + rightSubnumber + + rightRawSubnumber + rgbGraySubnumber + rgbGrayRawSubnumber + leftGraySubnumber + + leftGrayRawSubnumber + rightGraySubnumber + rightGrayRawSubnumber + depthSubnumber + + disparitySubnumber + confMapSubnumber + stereoSubNumber + stereoRawSubNumber; + + bool retrieved = false; + + sl::Mat mat_left, mat_left_raw; + sl::Mat mat_right, mat_right_raw; + sl::Mat mat_left_gray, mat_left_raw_gray; + sl::Mat mat_right_gray, mat_right_raw_gray; + sl::Mat mat_depth, mat_disp, mat_conf; + + sl::Timestamp ts_rgb = 0; // used to check RGB/Depth sync + sl::Timestamp ts_depth = 0; // used to check RGB/Depth sync + sl::Timestamp grab_ts = 0; + + // ----> Retrieve all required image data + if (rgbSubnumber + leftSubnumber + stereoSubNumber > 0) + { + mZed.retrieveImage(mat_left, sl::VIEW::LEFT, sl::MEM::CPU, mMatResol); + retrieved = true; + ts_rgb = mat_left.timestamp; + grab_ts = mat_left.timestamp; + } + if (rgbRawSubnumber + leftRawSubnumber + stereoRawSubNumber > 0) + { + mZed.retrieveImage(mat_left_raw, sl::VIEW::LEFT_UNRECTIFIED, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_left_raw.timestamp; + } + if (rightSubnumber + stereoSubNumber > 0) + { + mZed.retrieveImage(mat_right, sl::VIEW::RIGHT, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_right.timestamp; + } + if (rightRawSubnumber + stereoRawSubNumber > 0) + { + mZed.retrieveImage(mat_right_raw, sl::VIEW::RIGHT_UNRECTIFIED, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_right_raw.timestamp; + } + if (rgbGraySubnumber + leftGraySubnumber > 0) + { + mZed.retrieveImage(mat_left_gray, sl::VIEW::LEFT_GRAY, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_left_gray.timestamp; + } + if (rgbGrayRawSubnumber + leftGrayRawSubnumber > 0) + { + mZed.retrieveImage(mat_left_raw_gray, sl::VIEW::LEFT_UNRECTIFIED_GRAY, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_left_raw_gray.timestamp; + } + if (rightGraySubnumber > 0) + { + mZed.retrieveImage(mat_right_gray, sl::VIEW::RIGHT_GRAY, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_right_gray.timestamp; + } + if (rightGrayRawSubnumber > 0) + { + mZed.retrieveImage(mat_right_raw_gray, sl::VIEW::RIGHT_UNRECTIFIED_GRAY, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_right_raw_gray.timestamp; + } + if (depthSubnumber > 0) + { + if (!mOpenniDepthMode) + { + mZed.retrieveMeasure(mat_depth, sl::MEASURE::DEPTH, sl::MEM::CPU, mMatResol); } - if (rightGrayRawSubnumber > 0) { - mZed.retrieveImage(mat_right_raw_gray, sl::VIEW::RIGHT_UNRECTIFIED_GRAY, sl::MEM::CPU, mMatResol); - retrieved = true; - grab_ts = mat_right_raw_gray.timestamp; + else + { + mZed.retrieveMeasure(mat_depth, sl::MEASURE::DEPTH_U16_MM, sl::MEM::CPU, mMatResol); } - if (depthSubnumber > 0) { - if (!mOpenniDepthMode) { - mZed.retrieveMeasure(mat_depth, sl::MEASURE::DEPTH, sl::MEM::CPU, mMatResol); - } else { - mZed.retrieveMeasure(mat_depth, sl::MEASURE::DEPTH_U16_MM, sl::MEM::CPU, mMatResol); - } - retrieved = true; - grab_ts = mat_depth.timestamp; - - ts_depth = mat_depth.timestamp; + retrieved = true; + grab_ts = mat_depth.timestamp; - if (ts_rgb.data_ns != 0 && (ts_depth.data_ns != ts_rgb.data_ns)) { - NODELET_WARN_STREAM("!!!!! DEPTH/RGB ASYNC !!!!! - Delta: " << 1e-9 * static_cast(ts_depth - ts_rgb) - << " sec"); - } - } - if (disparitySubnumber > 0) { - mZed.retrieveMeasure(mat_disp, sl::MEASURE::DISPARITY, sl::MEM::CPU, mMatResol); - retrieved = true; - grab_ts = mat_disp.timestamp; - } - if (confMapSubnumber > 0) { - mZed.retrieveMeasure(mat_conf, sl::MEASURE::CONFIDENCE, sl::MEM::CPU, mMatResol); - retrieved = true; - grab_ts = mat_conf.timestamp; - } - // <---- Retrieve all required image data + ts_depth = mat_depth.timestamp; - // ----> Data ROS timestamp - ros::Time stamp = sl_tools::slTime2Ros(grab_ts); - if (mSvoMode) { - stamp = ros::Time::now(); - } - // <---- Data ROS timestamp - - // ----> Publish sensor data if sync is required by user or SVO - if (mZedRealCamModel != sl::MODEL::ZED) { - if (mSensTimestampSync) { - // NODELET_INFO_STREAM("tot_sub: " << tot_sub << " - retrieved: " << retrieved << " - - // grab_ts.data_ns!=lastZedTs.data_ns: " << (grab_ts.data_ns!=lastZedTs.data_ns)); - if (tot_sub > 0 && retrieved && (grab_ts.data_ns != lastZedTs.data_ns)) { - // NODELET_INFO("CALLBACK"); - publishSensData(stamp); - } - } else if (mSvoMode) { - publishSensData(stamp); - } + if (ts_rgb.data_ns != 0 && (ts_depth.data_ns != ts_rgb.data_ns)) + { + NODELET_WARN_STREAM("!!!!! DEPTH/RGB ASYNC !!!!! - Delta: " << 1e-9 * static_cast(ts_depth - ts_rgb) + << " sec"); } - // <---- Publish sensor data if sync is required by user or SVO - - // ----> Notify grab thread that all data are synchronized and a new grab can be done - // mRgbDepthDataRetrievedCondVar.notify_one(); - // mRgbDepthDataRetrieved = true; - // <---- Notify grab thread that all data are synchronized and a new grab can be done + } + if (disparitySubnumber > 0) + { + mZed.retrieveMeasure(mat_disp, sl::MEASURE::DISPARITY, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_disp.timestamp; + } + if (confMapSubnumber > 0) + { + mZed.retrieveMeasure(mat_conf, sl::MEASURE::CONFIDENCE, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_conf.timestamp; + } + // <---- Retrieve all required image data - if (!retrieved) { - mPublishingData = false; - lastZedTs = 0; - return; - } - mPublishingData = true; + // ----> Data ROS timestamp + ros::Time stamp = sl_tools::slTime2Ros(grab_ts); + if (mSvoMode) + { + stamp = ros::Time::now(); + } + // <---- Data ROS timestamp - // ----> Check if a grab has been done before publishing the same images - if (grab_ts.data_ns == lastZedTs.data_ns) { - // Data not updated by a grab calling in the grab thread - return; + // ----> Publish sensor data if sync is required by user or SVO + if (mZedRealCamModel != sl::MODEL::ZED) + { + if (mSensTimestampSync) + { + // NODELET_INFO_STREAM("tot_sub: " << tot_sub << " - retrieved: " << retrieved << " - + // grab_ts.data_ns!=lastZedTs.data_ns: " << (grab_ts.data_ns!=lastZedTs.data_ns)); + if (tot_sub > 0 && retrieved && (grab_ts.data_ns != lastZedTs.data_ns)) + { + // NODELET_INFO("CALLBACK"); + publishSensData(stamp); + } } - if (lastZedTs.data_ns != 0) { - double period_sec = static_cast(grab_ts.data_ns - lastZedTs.data_ns) / 1e9; - // NODELET_DEBUG_STREAM( "PUBLISHING PERIOD: " << period_sec << " sec @" << 1./period_sec << " Hz") ; - - mVideoDepthPeriodMean_sec->addValue(period_sec); - // NODELET_DEBUG_STREAM( "MEAN PUBLISHING PERIOD: " << mVideoDepthPeriodMean_sec->getMean() << " sec @" - // << 1./mVideoDepthPeriodMean_sec->getMean() << " Hz") ; + else if (mSvoMode) + { + publishSensData(stamp); } - lastZedTs = grab_ts; - // <---- Check if a grab has been done before publishing the same images + } + // <---- Publish sensor data if sync is required by user or SVO + + // ----> Notify grab thread that all data are synchronized and a new grab can be done + // mRgbDepthDataRetrievedCondVar.notify_one(); + // mRgbDepthDataRetrieved = true; + // <---- Notify grab thread that all data are synchronized and a new grab can be done + + if (!retrieved) + { + mPublishingData = false; + lastZedTs = 0; + return; + } + mPublishingData = true; - // Publish the left = rgb image if someone has subscribed to - if (leftSubnumber > 0) { - sensor_msgs::ImagePtr leftImgMsg = boost::make_shared(); - publishImage(leftImgMsg, mat_left, mPubLeft, mLeftCamInfoMsg, mLeftCamOptFrameId, stamp); - } - if (rgbSubnumber > 0) { - sensor_msgs::ImagePtr rgbImgMsg = boost::make_shared(); - publishImage(rgbImgMsg, mat_left, mPubRgb, mRgbCamInfoMsg, mDepthOptFrameId, stamp); - } + // ----> Check if a grab has been done before publishing the same images + if (grab_ts.data_ns == lastZedTs.data_ns) + { + // Data not updated by a grab calling in the grab thread + return; + } + if (lastZedTs.data_ns != 0) + { + double period_sec = static_cast(grab_ts.data_ns - lastZedTs.data_ns) / 1e9; + // NODELET_DEBUG_STREAM( "PUBLISHING PERIOD: " << period_sec << " sec @" << 1./period_sec << " Hz") ; + + mVideoDepthPeriodMean_sec->addValue(period_sec); + // NODELET_DEBUG_STREAM( "MEAN PUBLISHING PERIOD: " << mVideoDepthPeriodMean_sec->getMean() << " sec @" + // << 1./mVideoDepthPeriodMean_sec->getMean() << " Hz") ; + } + lastZedTs = grab_ts; + // <---- Check if a grab has been done before publishing the same images + + // Publish the left = rgb image if someone has subscribed to + if (leftSubnumber > 0) + { + sensor_msgs::ImagePtr leftImgMsg = boost::make_shared(); + publishImage(leftImgMsg, mat_left, mPubLeft, mLeftCamInfoMsg, mLeftCamOptFrameId, stamp); + } + if (rgbSubnumber > 0) + { + sensor_msgs::ImagePtr rgbImgMsg = boost::make_shared(); + publishImage(rgbImgMsg, mat_left, mPubRgb, mRgbCamInfoMsg, mDepthOptFrameId, stamp); + } - // Publish the left = rgb GRAY image if someone has subscribed to - if (leftGraySubnumber > 0) { - sensor_msgs::ImagePtr leftGrayImgMsg = boost::make_shared(); - publishImage(leftGrayImgMsg, mat_left_gray, mPubLeftGray, mLeftCamInfoMsg, mLeftCamOptFrameId, stamp); - } - if (rgbGraySubnumber > 0) { - sensor_msgs::ImagePtr rgbGrayImgMsg = boost::make_shared(); - publishImage(rgbGrayImgMsg, mat_left_gray, mPubRgbGray, mRgbCamInfoMsg, mDepthOptFrameId, stamp); - } + // Publish the left = rgb GRAY image if someone has subscribed to + if (leftGraySubnumber > 0) + { + sensor_msgs::ImagePtr leftGrayImgMsg = boost::make_shared(); + publishImage(leftGrayImgMsg, mat_left_gray, mPubLeftGray, mLeftCamInfoMsg, mLeftCamOptFrameId, stamp); + } + if (rgbGraySubnumber > 0) + { + sensor_msgs::ImagePtr rgbGrayImgMsg = boost::make_shared(); + publishImage(rgbGrayImgMsg, mat_left_gray, mPubRgbGray, mRgbCamInfoMsg, mDepthOptFrameId, stamp); + } - // Publish the left_raw = rgb_raw image if someone has subscribed to - if (leftRawSubnumber > 0) { - sensor_msgs::ImagePtr rawLeftImgMsg = boost::make_shared(); - publishImage(rawLeftImgMsg, mat_left_raw, mPubRawLeft, mLeftCamInfoRawMsg, mLeftCamOptFrameId, stamp); - } - if (rgbRawSubnumber > 0) { - sensor_msgs::ImagePtr rawRgbImgMsg = boost::make_shared(); - publishImage(rawRgbImgMsg, mat_left_raw, mPubRawRgb, mRgbCamInfoRawMsg, mDepthOptFrameId, stamp); - } + // Publish the left_raw = rgb_raw image if someone has subscribed to + if (leftRawSubnumber > 0) + { + sensor_msgs::ImagePtr rawLeftImgMsg = boost::make_shared(); + publishImage(rawLeftImgMsg, mat_left_raw, mPubRawLeft, mLeftCamInfoRawMsg, mLeftCamOptFrameId, stamp); + } + if (rgbRawSubnumber > 0) + { + sensor_msgs::ImagePtr rawRgbImgMsg = boost::make_shared(); + publishImage(rawRgbImgMsg, mat_left_raw, mPubRawRgb, mRgbCamInfoRawMsg, mDepthOptFrameId, stamp); + } - // Publish the left_raw == rgb_raw GRAY image if someone has subscribed to - if (leftGrayRawSubnumber > 0) { - sensor_msgs::ImagePtr rawLeftGrayImgMsg = boost::make_shared(); + // Publish the left_raw == rgb_raw GRAY image if someone has subscribed to + if (leftGrayRawSubnumber > 0) + { + sensor_msgs::ImagePtr rawLeftGrayImgMsg = boost::make_shared(); - publishImage(rawLeftGrayImgMsg, mat_left_raw_gray, mPubRawLeftGray, mLeftCamInfoRawMsg, mLeftCamOptFrameId, stamp); - } - if (rgbGrayRawSubnumber > 0) { - sensor_msgs::ImagePtr rawRgbGrayImgMsg = boost::make_shared(); - publishImage(rawRgbGrayImgMsg, mat_left_raw_gray, mPubRawRgbGray, mRgbCamInfoRawMsg, mDepthOptFrameId, stamp); - } + publishImage(rawLeftGrayImgMsg, mat_left_raw_gray, mPubRawLeftGray, mLeftCamInfoRawMsg, mLeftCamOptFrameId, stamp); + } + if (rgbGrayRawSubnumber > 0) + { + sensor_msgs::ImagePtr rawRgbGrayImgMsg = boost::make_shared(); + publishImage(rawRgbGrayImgMsg, mat_left_raw_gray, mPubRawRgbGray, mRgbCamInfoRawMsg, mDepthOptFrameId, stamp); + } - // Publish the right image if someone has subscribed to - if (rightSubnumber > 0) { - sensor_msgs::ImagePtr rightImgMsg = boost::make_shared(); - publishImage(rightImgMsg, mat_right, mPubRight, mRightCamInfoMsg, mRightCamOptFrameId, stamp); - } + // Publish the right image if someone has subscribed to + if (rightSubnumber > 0) + { + sensor_msgs::ImagePtr rightImgMsg = boost::make_shared(); + publishImage(rightImgMsg, mat_right, mPubRight, mRightCamInfoMsg, mRightCamOptFrameId, stamp); + } - // Publish the right image GRAY if someone has subscribed to - if (rightGraySubnumber > 0) { - sensor_msgs::ImagePtr rightGrayImgMsg = boost::make_shared(); - publishImage(rightGrayImgMsg, mat_right_gray, mPubRightGray, mRightCamInfoMsg, mRightCamOptFrameId, stamp); - } + // Publish the right image GRAY if someone has subscribed to + if (rightGraySubnumber > 0) + { + sensor_msgs::ImagePtr rightGrayImgMsg = boost::make_shared(); + publishImage(rightGrayImgMsg, mat_right_gray, mPubRightGray, mRightCamInfoMsg, mRightCamOptFrameId, stamp); + } - // Publish the right raw image if someone has subscribed to - if (rightRawSubnumber > 0) { - sensor_msgs::ImagePtr rawRightImgMsg = boost::make_shared(); - publishImage(rawRightImgMsg, mat_right_raw, mPubRawRight, mRightCamInfoRawMsg, mRightCamOptFrameId, stamp); - } + // Publish the right raw image if someone has subscribed to + if (rightRawSubnumber > 0) + { + sensor_msgs::ImagePtr rawRightImgMsg = boost::make_shared(); + publishImage(rawRightImgMsg, mat_right_raw, mPubRawRight, mRightCamInfoRawMsg, mRightCamOptFrameId, stamp); + } - // Publish the right raw image GRAY if someone has subscribed to - if (rightGrayRawSubnumber > 0) { - sensor_msgs::ImagePtr rawRightGrayImgMsg = boost::make_shared(); - publishImage(rawRightGrayImgMsg, mat_right_raw_gray, mPubRawRightGray, mRightCamInfoRawMsg, mRightCamOptFrameId, - stamp); - } + // Publish the right raw image GRAY if someone has subscribed to + if (rightGrayRawSubnumber > 0) + { + sensor_msgs::ImagePtr rawRightGrayImgMsg = boost::make_shared(); + publishImage(rawRightGrayImgMsg, mat_right_raw_gray, mPubRawRightGray, mRightCamInfoRawMsg, mRightCamOptFrameId, + stamp); + } - // Stereo couple side-by-side - if (stereoSubNumber > 0) { - sensor_msgs::ImagePtr stereoImgMsg = boost::make_shared(); - sl_tools::imagesToROSmsg(stereoImgMsg, mat_left, mat_right, mCameraFrameId, stamp); - mPubStereo.publish(stereoImgMsg); - } + // Stereo couple side-by-side + if (stereoSubNumber > 0) + { + sensor_msgs::ImagePtr stereoImgMsg = boost::make_shared(); + sl_tools::imagesToROSmsg(stereoImgMsg, mat_left, mat_right, mCameraFrameId, stamp); + mPubStereo.publish(stereoImgMsg); + } - // Stereo RAW couple side-by-side - if (stereoRawSubNumber > 0) { - sensor_msgs::ImagePtr rawStereoImgMsg = boost::make_shared(); - sl_tools::imagesToROSmsg(rawStereoImgMsg, mat_left_raw, mat_right_raw, mCameraFrameId, stamp); - mPubRawStereo.publish(rawStereoImgMsg); - } + // Stereo RAW couple side-by-side + if (stereoRawSubNumber > 0) + { + sensor_msgs::ImagePtr rawStereoImgMsg = boost::make_shared(); + sl_tools::imagesToROSmsg(rawStereoImgMsg, mat_left_raw, mat_right_raw, mCameraFrameId, stamp); + mPubRawStereo.publish(rawStereoImgMsg); + } - // Publish the depth image if someone has subscribed to - if (depthSubnumber > 0) { - sensor_msgs::ImagePtr depthImgMsg = boost::make_shared(); - publishDepth(depthImgMsg, mat_depth, stamp); - } + // Publish the depth image if someone has subscribed to + if (depthSubnumber > 0) + { + sensor_msgs::ImagePtr depthImgMsg = boost::make_shared(); + publishDepth(depthImgMsg, mat_depth, stamp); + } - // Publish the disparity image if someone has subscribed to - if (disparitySubnumber > 0) { - publishDisparity(mat_disp, stamp); - } + // Publish the disparity image if someone has subscribed to + if (disparitySubnumber > 0) + { + publishDisparity(mat_disp, stamp); + } - // Publish the confidence map if someone has subscribed to - if (confMapSubnumber > 0) { - sensor_msgs::ImagePtr confMapMsg = boost::make_shared(); - sl_tools::imageToROSmsg(confMapMsg, mat_conf, mConfidenceOptFrameId, stamp); - mPubConfMap.publish(confMapMsg); - } + // Publish the confidence map if someone has subscribed to + if (confMapSubnumber > 0) + { + sensor_msgs::ImagePtr confMapMsg = boost::make_shared(); + sl_tools::imageToROSmsg(confMapMsg, mat_conf, mConfidenceOptFrameId, stamp); + mPubConfMap.publish(confMapMsg); + } } void ZEDWrapperNodelet::callback_pubPath(const ros::TimerEvent& e) { - uint32_t mapPathSub = mPubMapPath.getNumSubscribers(); - uint32_t odomPathSub = mPubOdomPath.getNumSubscribers(); - - geometry_msgs::PoseStamped odomPose; - geometry_msgs::PoseStamped mapPose; - - odomPose.header.stamp = mFrameTimestamp; - odomPose.header.frame_id = mMapFrameId; // frame - // conversion from Tranform to message - geometry_msgs::Transform base2odom = tf2::toMsg(mOdom2BaseTransf); - // Add all value in Pose message - odomPose.pose.position.x = base2odom.translation.x; - odomPose.pose.position.y = base2odom.translation.y; - odomPose.pose.position.z = base2odom.translation.z; - odomPose.pose.orientation.x = base2odom.rotation.x; - odomPose.pose.orientation.y = base2odom.rotation.y; - odomPose.pose.orientation.z = base2odom.rotation.z; - odomPose.pose.orientation.w = base2odom.rotation.w; - - mapPose.header.stamp = mFrameTimestamp; - mapPose.header.frame_id = mMapFrameId; // frame - // conversion from Tranform to message - geometry_msgs::Transform base2map = tf2::toMsg(mMap2BaseTransf); - // Add all value in Pose message - mapPose.pose.position.x = base2map.translation.x; - mapPose.pose.position.y = base2map.translation.y; - mapPose.pose.position.z = base2map.translation.z; - mapPose.pose.orientation.x = base2map.rotation.x; - mapPose.pose.orientation.y = base2map.rotation.y; - mapPose.pose.orientation.z = base2map.rotation.z; - mapPose.pose.orientation.w = base2map.rotation.w; - - // Circular vector - if (mPathMaxCount != -1) { - if (mOdomPath.size() == mPathMaxCount) { - NODELET_DEBUG("Path vectors full: rotating "); - std::rotate(mOdomPath.begin(), mOdomPath.begin() + 1, mOdomPath.end()); - std::rotate(mMapPath.begin(), mMapPath.begin() + 1, mMapPath.end()); - - mMapPath[mPathMaxCount - 1] = mapPose; - mOdomPath[mPathMaxCount - 1] = odomPose; - } else { - // NODELET_DEBUG_STREAM("Path vectors adding last available poses"); - mMapPath.push_back(mapPose); - mOdomPath.push_back(odomPose); - } - } else { - // NODELET_DEBUG_STREAM("No limit path vectors, adding last available poses"); - mMapPath.push_back(mapPose); - mOdomPath.push_back(odomPose); + uint32_t mapPathSub = mPubMapPath.getNumSubscribers(); + uint32_t odomPathSub = mPubOdomPath.getNumSubscribers(); + + geometry_msgs::PoseStamped odomPose; + geometry_msgs::PoseStamped mapPose; + + odomPose.header.stamp = mFrameTimestamp; + odomPose.header.frame_id = mMapFrameId; // frame + // conversion from Tranform to message + geometry_msgs::Transform base2odom = tf2::toMsg(mOdom2BaseTransf); + // Add all value in Pose message + odomPose.pose.position.x = base2odom.translation.x; + odomPose.pose.position.y = base2odom.translation.y; + odomPose.pose.position.z = base2odom.translation.z; + odomPose.pose.orientation.x = base2odom.rotation.x; + odomPose.pose.orientation.y = base2odom.rotation.y; + odomPose.pose.orientation.z = base2odom.rotation.z; + odomPose.pose.orientation.w = base2odom.rotation.w; + + mapPose.header.stamp = mFrameTimestamp; + mapPose.header.frame_id = mMapFrameId; // frame + // conversion from Tranform to message + geometry_msgs::Transform base2map = tf2::toMsg(mMap2BaseTransf); + // Add all value in Pose message + mapPose.pose.position.x = base2map.translation.x; + mapPose.pose.position.y = base2map.translation.y; + mapPose.pose.position.z = base2map.translation.z; + mapPose.pose.orientation.x = base2map.rotation.x; + mapPose.pose.orientation.y = base2map.rotation.y; + mapPose.pose.orientation.z = base2map.rotation.z; + mapPose.pose.orientation.w = base2map.rotation.w; + + // Circular vector + if (mPathMaxCount != -1) + { + if (mOdomPath.size() == mPathMaxCount) + { + NODELET_DEBUG("Path vectors full: rotating "); + std::rotate(mOdomPath.begin(), mOdomPath.begin() + 1, mOdomPath.end()); + std::rotate(mMapPath.begin(), mMapPath.begin() + 1, mMapPath.end()); + + mMapPath[mPathMaxCount - 1] = mapPose; + mOdomPath[mPathMaxCount - 1] = odomPose; + } + else + { + // NODELET_DEBUG_STREAM("Path vectors adding last available poses"); + mMapPath.push_back(mapPose); + mOdomPath.push_back(odomPose); } + } + else + { + // NODELET_DEBUG_STREAM("No limit path vectors, adding last available poses"); + mMapPath.push_back(mapPose); + mOdomPath.push_back(odomPose); + } - if (mapPathSub > 0) { - nav_msgs::PathPtr mapPath = boost::make_shared(); - mapPath->header.frame_id = mMapFrameId; - mapPath->header.stamp = mFrameTimestamp; - mapPath->poses = mMapPath; + if (mapPathSub > 0) + { + nav_msgs::PathPtr mapPath = boost::make_shared(); + mapPath->header.frame_id = mMapFrameId; + mapPath->header.stamp = mFrameTimestamp; + mapPath->poses = mMapPath; - mPubMapPath.publish(mapPath); - } + mPubMapPath.publish(mapPath); + } - if (odomPathSub > 0) { - nav_msgs::PathPtr odomPath = boost::make_shared(); - odomPath->header.frame_id = mMapFrameId; - odomPath->header.stamp = mFrameTimestamp; - odomPath->poses = mOdomPath; + if (odomPathSub > 0) + { + nav_msgs::PathPtr odomPath = boost::make_shared(); + odomPath->header.frame_id = mMapFrameId; + odomPath->header.stamp = mFrameTimestamp; + odomPath->poses = mOdomPath; - mPubOdomPath.publish(odomPath); - } + mPubOdomPath.publish(odomPath); + } } void ZEDWrapperNodelet::sensors_thread_func() { - ros::Rate loop_rate(mSensPubRate); + ros::Rate loop_rate(mSensPubRate); - std::chrono::steady_clock::time_point prev_usec = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point prev_usec = std::chrono::steady_clock::now(); - int count_warn = 0; + int count_warn = 0; - while (!mStopNode) { - std::chrono::steady_clock::time_point start_elab = std::chrono::steady_clock::now(); + while (!mStopNode) + { + std::chrono::steady_clock::time_point start_elab = std::chrono::steady_clock::now(); - mCloseZedMutex.lock(); - if (!mZed.isOpened()) { - mCloseZedMutex.unlock(); - loop_rate.sleep(); - continue; - } + mCloseZedMutex.lock(); + if (!mZed.isOpened()) + { + mCloseZedMutex.unlock(); + loop_rate.sleep(); + continue; + } - publishSensData(); - mCloseZedMutex.unlock(); - - if (!loop_rate.sleep()) { - if (++count_warn > 10) { - NODELET_INFO_THROTTLE(1.0, "Sensors thread is not synchronized with the Sensors rate"); - NODELET_INFO_STREAM_THROTTLE(1.0, "Expected cycle time: " << loop_rate.expectedCycleTime() << " - Real cycle time: " << loop_rate.cycleTime()); - NODELET_WARN_STREAM_THROTTLE(10.0, "Sensors data publishing takes longer (" << loop_rate.cycleTime() << " sec) than requested by the Sensors rate (" << loop_rate.expectedCycleTime() << " sec). Please consider to " - "lower the 'max_pub_rate' setting or to " - "reduce the power requirements reducing " - "the resolutions."); - } + publishSensData(); + mCloseZedMutex.unlock(); - loop_rate.reset(); - } else { - count_warn = 0; - } + if (!loop_rate.sleep()) + { + if (++count_warn > 10) + { + NODELET_INFO_THROTTLE(1.0, "Sensors thread is not synchronized with the Sensors rate"); + NODELET_INFO_STREAM_THROTTLE(1.0, "Expected cycle time: " << loop_rate.expectedCycleTime() + << " - Real cycle time: " << loop_rate.cycleTime()); + NODELET_WARN_STREAM_THROTTLE(10.0, "Sensors data publishing takes longer (" + << loop_rate.cycleTime() << " sec) than requested by the Sensors rate (" + << loop_rate.expectedCycleTime() + << " sec). Please consider to " + "lower the 'max_pub_rate' setting or to " + "reduce the power requirements reducing " + "the resolutions."); + } + + loop_rate.reset(); } + else + { + count_warn = 0; + } + } - NODELET_DEBUG("Sensors thread finished"); + NODELET_DEBUG("Sensors thread finished"); } void ZEDWrapperNodelet::publishSensData(ros::Time t) { - // NODELET_INFO("publishSensData"); - - uint32_t imu_SubNumber = mPubImu.getNumSubscribers(); - uint32_t imu_RawSubNumber = mPubImuRaw.getNumSubscribers(); - uint32_t imu_TempSubNumber = 0; - uint32_t imu_MagSubNumber = 0; - uint32_t pressSubNumber = 0; - uint32_t tempLeftSubNumber = 0; - uint32_t tempRightSubNumber = 0; - - if (mZedRealCamModel != sl::MODEL::ZED_M) { - imu_TempSubNumber = mPubImuTemp.getNumSubscribers(); - } + // NODELET_INFO("publishSensData"); + + uint32_t imu_SubNumber = mPubImu.getNumSubscribers(); + uint32_t imu_RawSubNumber = mPubImuRaw.getNumSubscribers(); + uint32_t imu_TempSubNumber = 0; + uint32_t imu_MagSubNumber = 0; + uint32_t pressSubNumber = 0; + uint32_t tempLeftSubNumber = 0; + uint32_t tempRightSubNumber = 0; + + if (mZedRealCamModel != sl::MODEL::ZED_M) + { + imu_TempSubNumber = mPubImuTemp.getNumSubscribers(); + } - if (mZedRealCamModel == sl::MODEL::ZED2 || mZedRealCamModel == sl::MODEL::ZED2i) { - imu_MagSubNumber = mPubImuMag.getNumSubscribers(); - pressSubNumber = mPubPressure.getNumSubscribers(); - tempLeftSubNumber = mPubTempL.getNumSubscribers(); - tempRightSubNumber = mPubTempR.getNumSubscribers(); - } + if (mZedRealCamModel == sl::MODEL::ZED2 || mZedRealCamModel == sl::MODEL::ZED2i) + { + imu_MagSubNumber = mPubImuMag.getNumSubscribers(); + pressSubNumber = mPubPressure.getNumSubscribers(); + tempLeftSubNumber = mPubTempL.getNumSubscribers(); + tempRightSubNumber = mPubTempR.getNumSubscribers(); + } - uint32_t tot_sub = imu_SubNumber + imu_RawSubNumber + imu_TempSubNumber + imu_MagSubNumber + pressSubNumber + tempLeftSubNumber + tempRightSubNumber; + uint32_t tot_sub = imu_SubNumber + imu_RawSubNumber + imu_TempSubNumber + imu_MagSubNumber + pressSubNumber + + tempLeftSubNumber + tempRightSubNumber; - if (tot_sub > 0) { - mSensPublishing = true; - } else { - mSensPublishing = false; - } + if (tot_sub > 0) + { + mSensPublishing = true; + } + else + { + mSensPublishing = false; + } - bool sensors_data_published = false; + bool sensors_data_published = false; - ros::Time ts_imu; - ros::Time ts_baro; - ros::Time ts_mag; + ros::Time ts_imu; + ros::Time ts_baro; + ros::Time ts_mag; - static ros::Time lastTs_imu = ros::Time(); - static ros::Time lastTs_baro = ros::Time(); - static ros::Time lastT_mag = ros::Time(); + static ros::Time lastTs_imu = ros::Time(); + static ros::Time lastTs_baro = ros::Time(); + static ros::Time lastT_mag = ros::Time(); - sl::SensorsData sens_data; + sl::SensorsData sens_data; - if (mSvoMode || mSensTimestampSync) { - if (mZed.getSensorsData(sens_data, sl::TIME_REFERENCE::IMAGE) != sl::ERROR_CODE::SUCCESS) { - NODELET_DEBUG("Not retrieved sensors data in IMAGE REFERENCE TIME"); - return; - } - } else { - if (mZed.getSensorsData(sens_data, sl::TIME_REFERENCE::CURRENT) != sl::ERROR_CODE::SUCCESS) { - NODELET_DEBUG("Not retrieved sensors data in CURRENT REFERENCE TIME"); - return; - } + if (mSvoMode || mSensTimestampSync) + { + if (mZed.getSensorsData(sens_data, sl::TIME_REFERENCE::IMAGE) != sl::ERROR_CODE::SUCCESS) + { + NODELET_DEBUG("Not retrieved sensors data in IMAGE REFERENCE TIME"); + return; } - - if (t != ros::Time(0)) { - ts_imu = t; - ts_baro = t; - ts_mag = t; - } else { - ts_imu = sl_tools::slTime2Ros(sens_data.imu.timestamp); - ts_baro = sl_tools::slTime2Ros(sens_data.barometer.timestamp); - ts_mag = sl_tools::slTime2Ros(sens_data.magnetometer.timestamp); + } + else + { + if (mZed.getSensorsData(sens_data, sl::TIME_REFERENCE::CURRENT) != sl::ERROR_CODE::SUCCESS) + { + NODELET_DEBUG("Not retrieved sensors data in CURRENT REFERENCE TIME"); + return; } + } - bool new_imu_data = ts_imu != lastTs_imu; - bool new_baro_data = ts_baro != lastTs_baro; - bool new_mag_data = ts_mag != lastT_mag; + if (t != ros::Time(0)) + { + ts_imu = t; + ts_baro = t; + ts_mag = t; + } + else + { + ts_imu = sl_tools::slTime2Ros(sens_data.imu.timestamp); + ts_baro = sl_tools::slTime2Ros(sens_data.barometer.timestamp); + ts_mag = sl_tools::slTime2Ros(sens_data.magnetometer.timestamp); + } - if (!new_imu_data && !new_baro_data && !new_mag_data) { - NODELET_DEBUG("No updated sensors data"); - return; - } + bool new_imu_data = ts_imu != lastTs_imu; + bool new_baro_data = ts_baro != lastTs_baro; + bool new_mag_data = ts_mag != lastT_mag; - // ----> Publish odometry tf only if enabled and not in SVO mode - if(!mSvoMode) { - if (mPublishTf && mPosTrackingReady && new_imu_data) { - //NODELET_DEBUG("Publishing TF"); + if (!new_imu_data && !new_baro_data && !new_mag_data) + { + NODELET_DEBUG("No updated sensors data"); + return; + } - publishOdomFrame(mOdom2BaseTransf, ts_imu); // publish the base Frame in odometry frame + // ----> Publish odometry tf only if enabled and not in SVO mode + if (!mSvoMode) + { + if (mPublishTf && mPosTrackingReady && new_imu_data) + { + // NODELET_DEBUG("Publishing TF"); - if (mPublishMapTf) { - publishPoseFrame(mMap2OdomTransf, ts_imu); // publish the odometry Frame in map frame - } - } - } - // <---- Publish odometry tf only if enabled and not in SVO mode + publishOdomFrame(mOdom2BaseTransf, ts_imu); // publish the base Frame in odometry frame - if (mPublishImuTf && !mStaticImuFramePublished) { - NODELET_DEBUG("Publishing static IMU TF"); - publishStaticImuFrame(); + if (mPublishMapTf) + { + publishPoseFrame(mMap2OdomTransf, ts_imu); // publish the odometry Frame in map frame + } } + } + // <---- Publish odometry tf only if enabled and not in SVO mode - if (mZedRealCamModel == sl::MODEL::ZED2 || mZedRealCamModel == sl::MODEL::ZED2i) { - // Update temperatures for Diagnostic - sens_data.temperature.get(sl::SensorsData::TemperatureData::SENSOR_LOCATION::ONBOARD_LEFT, mTempLeft); - sens_data.temperature.get(sl::SensorsData::TemperatureData::SENSOR_LOCATION::ONBOARD_RIGHT, mTempRight); - } + if (mPublishImuTf && !mStaticImuFramePublished) + { + NODELET_DEBUG("Publishing static IMU TF"); + publishStaticImuFrame(); + } - if (imu_TempSubNumber > 0 && new_imu_data) { - lastTs_imu = ts_imu; + if (mZedRealCamModel == sl::MODEL::ZED2 || mZedRealCamModel == sl::MODEL::ZED2i) + { + // Update temperatures for Diagnostic + sens_data.temperature.get(sl::SensorsData::TemperatureData::SENSOR_LOCATION::ONBOARD_LEFT, mTempLeft); + sens_data.temperature.get(sl::SensorsData::TemperatureData::SENSOR_LOCATION::ONBOARD_RIGHT, mTempRight); + } + + if (imu_TempSubNumber > 0 && new_imu_data) + { + lastTs_imu = ts_imu; - sensor_msgs::TemperaturePtr imuTempMsg = boost::make_shared(); + sensor_msgs::TemperaturePtr imuTempMsg = boost::make_shared(); - imuTempMsg->header.stamp = ts_imu; + imuTempMsg->header.stamp = ts_imu; #ifdef DEBUG_SENS_TS - static ros::Time old_ts; - if (old_ts == imuTempMsg->header.stamp) { - NODELET_WARN_STREAM("Publishing IMU data with old timestamp " << old_ts); - } - old_ts = imuTempMsg->header.stamp; + static ros::Time old_ts; + if (old_ts == imuTempMsg->header.stamp) + { + NODELET_WARN_STREAM("Publishing IMU data with old timestamp " << old_ts); + } + old_ts = imuTempMsg->header.stamp; #endif - imuTempMsg->header.frame_id = mImuFrameId; - float imu_temp; - sens_data.temperature.get(sl::SensorsData::TemperatureData::SENSOR_LOCATION::IMU, imu_temp); - imuTempMsg->temperature = static_cast(imu_temp); - imuTempMsg->variance = 0.0; + imuTempMsg->header.frame_id = mImuFrameId; + float imu_temp; + sens_data.temperature.get(sl::SensorsData::TemperatureData::SENSOR_LOCATION::IMU, imu_temp); + imuTempMsg->temperature = static_cast(imu_temp); + imuTempMsg->variance = 0.0; - sensors_data_published = true; - mPubImuTemp.publish(imuTempMsg); - } /*else { - NODELET_DEBUG("No new IMU temp."); - }*/ + sensors_data_published = true; + mPubImuTemp.publish(imuTempMsg); + } /*else { + NODELET_DEBUG("No new IMU temp."); + }*/ - if (sens_data.barometer.is_available && new_baro_data) { - lastTs_baro = ts_baro; + if (sens_data.barometer.is_available && new_baro_data) + { + lastTs_baro = ts_baro; - if (pressSubNumber > 0) { - sensor_msgs::FluidPressurePtr pressMsg = boost::make_shared(); + if (pressSubNumber > 0) + { + sensor_msgs::FluidPressurePtr pressMsg = boost::make_shared(); - pressMsg->header.stamp = ts_baro; + pressMsg->header.stamp = ts_baro; #ifdef DEBUG_SENS_TS - static ros::Time old_ts; - if (old_ts == pressMsg->header.stamp) { - NODELET_WARN_STREAM("Publishing BARO data with old timestamp " << old_ts); - } - old_ts = pressMsg->header.stamp; + static ros::Time old_ts; + if (old_ts == pressMsg->header.stamp) + { + NODELET_WARN_STREAM("Publishing BARO data with old timestamp " << old_ts); + } + old_ts = pressMsg->header.stamp; #endif - pressMsg->header.frame_id = mBaroFrameId; - pressMsg->fluid_pressure = sens_data.barometer.pressure; // Pascal - pressMsg->variance = 1.0585e-2; + pressMsg->header.frame_id = mBaroFrameId; + pressMsg->fluid_pressure = sens_data.barometer.pressure; // Pascal + pressMsg->variance = 1.0585e-2; - sensors_data_published = true; - mPubPressure.publish(pressMsg); - } + sensors_data_published = true; + mPubPressure.publish(pressMsg); + } - if (tempLeftSubNumber > 0) { - sensor_msgs::TemperaturePtr tempLeftMsg = boost::make_shared(); + if (tempLeftSubNumber > 0) + { + sensor_msgs::TemperaturePtr tempLeftMsg = boost::make_shared(); - tempLeftMsg->header.stamp = ts_baro; + tempLeftMsg->header.stamp = ts_baro; #ifdef DEBUG_SENS_TS - static ros::Time old_ts; - if (old_ts == tempLeftMsg->header.stamp) { - NODELET_WARN_STREAM("Publishing BARO data with old timestamp " << old_ts); - } - old_ts = tempLeftMsg->header.stamp; + static ros::Time old_ts; + if (old_ts == tempLeftMsg->header.stamp) + { + NODELET_WARN_STREAM("Publishing BARO data with old timestamp " << old_ts); + } + old_ts = tempLeftMsg->header.stamp; #endif - tempLeftMsg->header.frame_id = mTempLeftFrameId; - tempLeftMsg->temperature = static_cast(mTempLeft); - tempLeftMsg->variance = 0.0; + tempLeftMsg->header.frame_id = mTempLeftFrameId; + tempLeftMsg->temperature = static_cast(mTempLeft); + tempLeftMsg->variance = 0.0; - sensors_data_published = true; - mPubTempL.publish(tempLeftMsg); - } + sensors_data_published = true; + mPubTempL.publish(tempLeftMsg); + } - if (tempRightSubNumber > 0) { - sensor_msgs::TemperaturePtr tempRightMsg = boost::make_shared(); + if (tempRightSubNumber > 0) + { + sensor_msgs::TemperaturePtr tempRightMsg = boost::make_shared(); - tempRightMsg->header.stamp = ts_baro; + tempRightMsg->header.stamp = ts_baro; #ifdef DEBUG_SENS_TS - static ros::Time old_ts; - if (old_ts == tempRightMsg->header.stamp) { - NODELET_WARN_STREAM("Publishing BARO data with old timestamp " << old_ts); - } - old_ts = tempRightMsg->header.stamp; + static ros::Time old_ts; + if (old_ts == tempRightMsg->header.stamp) + { + NODELET_WARN_STREAM("Publishing BARO data with old timestamp " << old_ts); + } + old_ts = tempRightMsg->header.stamp; #endif - tempRightMsg->header.frame_id = mTempRightFrameId; - tempRightMsg->temperature = static_cast(mTempRight); - tempRightMsg->variance = 0.0; + tempRightMsg->header.frame_id = mTempRightFrameId; + tempRightMsg->temperature = static_cast(mTempRight); + tempRightMsg->variance = 0.0; - sensors_data_published = true; - mPubTempR.publish(tempRightMsg); - } - } /*else { - NODELET_DEBUG("No new BAROM. DATA"); - }*/ + sensors_data_published = true; + mPubTempR.publish(tempRightMsg); + } + } /*else { + NODELET_DEBUG("No new BAROM. DATA"); + }*/ - if (imu_MagSubNumber > 0) { - if (sens_data.magnetometer.is_available && new_mag_data) { - lastT_mag = ts_mag; + if (imu_MagSubNumber > 0) + { + if (sens_data.magnetometer.is_available && new_mag_data) + { + lastT_mag = ts_mag; - sensor_msgs::MagneticFieldPtr magMsg = boost::make_shared(); + sensor_msgs::MagneticFieldPtr magMsg = boost::make_shared(); - magMsg->header.stamp = ts_mag; + magMsg->header.stamp = ts_mag; #ifdef DEBUG_SENS_TS - static ros::Time old_ts; - if (old_ts == magMsg->header.stamp) { - NODELET_WARN_STREAM("Publishing MAG data with old timestamp " << old_ts); - } - old_ts = magMsg->header.stamp; + static ros::Time old_ts; + if (old_ts == magMsg->header.stamp) + { + NODELET_WARN_STREAM("Publishing MAG data with old timestamp " << old_ts); + } + old_ts = magMsg->header.stamp; #endif - magMsg->header.frame_id = mMagFrameId; - magMsg->magnetic_field.x = sens_data.magnetometer.magnetic_field_calibrated.x * 1e-6; // Tesla - magMsg->magnetic_field.y = sens_data.magnetometer.magnetic_field_calibrated.y * 1e-6; // Tesla - magMsg->magnetic_field.z = sens_data.magnetometer.magnetic_field_calibrated.z * 1e-6; // Tesla - magMsg->magnetic_field_covariance[0] = 0.039e-6; - magMsg->magnetic_field_covariance[1] = 0.0f; - magMsg->magnetic_field_covariance[2] = 0.0f; - magMsg->magnetic_field_covariance[3] = 0.0f; - magMsg->magnetic_field_covariance[4] = 0.037e-6; - magMsg->magnetic_field_covariance[5] = 0.0f; - magMsg->magnetic_field_covariance[6] = 0.0f; - magMsg->magnetic_field_covariance[7] = 0.0f; - magMsg->magnetic_field_covariance[8] = 0.047e-6; - - sensors_data_published = true; - mPubImuMag.publish(magMsg); - } - } /*else { - NODELET_DEBUG("No new MAG. DATA"); - }*/ + magMsg->header.frame_id = mMagFrameId; + magMsg->magnetic_field.x = sens_data.magnetometer.magnetic_field_calibrated.x * 1e-6; // Tesla + magMsg->magnetic_field.y = sens_data.magnetometer.magnetic_field_calibrated.y * 1e-6; // Tesla + magMsg->magnetic_field.z = sens_data.magnetometer.magnetic_field_calibrated.z * 1e-6; // Tesla + magMsg->magnetic_field_covariance[0] = 0.039e-6; + magMsg->magnetic_field_covariance[1] = 0.0f; + magMsg->magnetic_field_covariance[2] = 0.0f; + magMsg->magnetic_field_covariance[3] = 0.0f; + magMsg->magnetic_field_covariance[4] = 0.037e-6; + magMsg->magnetic_field_covariance[5] = 0.0f; + magMsg->magnetic_field_covariance[6] = 0.0f; + magMsg->magnetic_field_covariance[7] = 0.0f; + magMsg->magnetic_field_covariance[8] = 0.047e-6; + + sensors_data_published = true; + mPubImuMag.publish(magMsg); + } + } /*else { + NODELET_DEBUG("No new MAG. DATA"); + }*/ - if (imu_SubNumber > 0 && new_imu_data) { - lastTs_imu = ts_imu; + if (imu_SubNumber > 0 && new_imu_data) + { + lastTs_imu = ts_imu; - sensor_msgs::ImuPtr imuMsg = boost::make_shared(); + sensor_msgs::ImuPtr imuMsg = boost::make_shared(); - imuMsg->header.stamp = ts_imu; + imuMsg->header.stamp = ts_imu; #ifdef DEBUG_SENS_TS - static ros::Time old_ts; - if (old_ts == imuMsg->header.stamp) { - NODELET_WARN_STREAM("Publishing IMU data with old timestamp " << old_ts); - } else { - NODELET_INFO_STREAM("Publishing IMU data with new timestamp. Freq: " << 1. / (ts_imu.toSec() - old_ts.toSec())); - old_ts = imuMsg->header.stamp; - } + static ros::Time old_ts; + if (old_ts == imuMsg->header.stamp) + { + NODELET_WARN_STREAM("Publishing IMU data with old timestamp " << old_ts); + } + else + { + NODELET_INFO_STREAM("Publishing IMU data with new timestamp. Freq: " << 1. / (ts_imu.toSec() - old_ts.toSec())); + old_ts = imuMsg->header.stamp; + } #endif - imuMsg->header.frame_id = mImuFrameId; + imuMsg->header.frame_id = mImuFrameId; - imuMsg->orientation.x = sens_data.imu.pose.getOrientation()[0]; - imuMsg->orientation.y = sens_data.imu.pose.getOrientation()[1]; - imuMsg->orientation.z = sens_data.imu.pose.getOrientation()[2]; - imuMsg->orientation.w = sens_data.imu.pose.getOrientation()[3]; + imuMsg->orientation.x = sens_data.imu.pose.getOrientation()[0]; + imuMsg->orientation.y = sens_data.imu.pose.getOrientation()[1]; + imuMsg->orientation.z = sens_data.imu.pose.getOrientation()[2]; + imuMsg->orientation.w = sens_data.imu.pose.getOrientation()[3]; - imuMsg->angular_velocity.x = sens_data.imu.angular_velocity[0] * DEG2RAD; - imuMsg->angular_velocity.y = sens_data.imu.angular_velocity[1] * DEG2RAD; - imuMsg->angular_velocity.z = sens_data.imu.angular_velocity[2] * DEG2RAD; + imuMsg->angular_velocity.x = sens_data.imu.angular_velocity[0] * DEG2RAD; + imuMsg->angular_velocity.y = sens_data.imu.angular_velocity[1] * DEG2RAD; + imuMsg->angular_velocity.z = sens_data.imu.angular_velocity[2] * DEG2RAD; - imuMsg->linear_acceleration.x = sens_data.imu.linear_acceleration[0]; - imuMsg->linear_acceleration.y = sens_data.imu.linear_acceleration[1]; - imuMsg->linear_acceleration.z = sens_data.imu.linear_acceleration[2]; + imuMsg->linear_acceleration.x = sens_data.imu.linear_acceleration[0]; + imuMsg->linear_acceleration.y = sens_data.imu.linear_acceleration[1]; + imuMsg->linear_acceleration.z = sens_data.imu.linear_acceleration[2]; - for (int i = 0; i < 3; ++i) { - int r = 0; + for (int i = 0; i < 3; ++i) + { + int r = 0; - if (i == 0) { - r = 0; - } else if (i == 1) { - r = 1; - } else { - r = 2; - } + if (i == 0) + { + r = 0; + } + else if (i == 1) + { + r = 1; + } + else + { + r = 2; + } - imuMsg->orientation_covariance[i * 3 + 0] = sens_data.imu.pose_covariance.r[r * 3 + 0] * DEG2RAD * DEG2RAD; - imuMsg->orientation_covariance[i * 3 + 1] = sens_data.imu.pose_covariance.r[r * 3 + 1] * DEG2RAD * DEG2RAD; - imuMsg->orientation_covariance[i * 3 + 2] = sens_data.imu.pose_covariance.r[r * 3 + 2] * DEG2RAD * DEG2RAD; + imuMsg->orientation_covariance[i * 3 + 0] = sens_data.imu.pose_covariance.r[r * 3 + 0] * DEG2RAD * DEG2RAD; + imuMsg->orientation_covariance[i * 3 + 1] = sens_data.imu.pose_covariance.r[r * 3 + 1] * DEG2RAD * DEG2RAD; + imuMsg->orientation_covariance[i * 3 + 2] = sens_data.imu.pose_covariance.r[r * 3 + 2] * DEG2RAD * DEG2RAD; - imuMsg->linear_acceleration_covariance[i * 3 + 0] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 0]; - imuMsg->linear_acceleration_covariance[i * 3 + 1] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 1]; - imuMsg->linear_acceleration_covariance[i * 3 + 2] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 2]; + imuMsg->linear_acceleration_covariance[i * 3 + 0] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 0]; + imuMsg->linear_acceleration_covariance[i * 3 + 1] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 1]; + imuMsg->linear_acceleration_covariance[i * 3 + 2] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 2]; - imuMsg->angular_velocity_covariance[i * 3 + 0] = sens_data.imu.angular_velocity_covariance.r[r * 3 + 0] * DEG2RAD * DEG2RAD; - imuMsg->angular_velocity_covariance[i * 3 + 1] = sens_data.imu.angular_velocity_covariance.r[r * 3 + 1] * DEG2RAD * DEG2RAD; - imuMsg->angular_velocity_covariance[i * 3 + 2] = sens_data.imu.angular_velocity_covariance.r[r * 3 + 2] * DEG2RAD * DEG2RAD; - } + imuMsg->angular_velocity_covariance[i * 3 + 0] = + sens_data.imu.angular_velocity_covariance.r[r * 3 + 0] * DEG2RAD * DEG2RAD; + imuMsg->angular_velocity_covariance[i * 3 + 1] = + sens_data.imu.angular_velocity_covariance.r[r * 3 + 1] * DEG2RAD * DEG2RAD; + imuMsg->angular_velocity_covariance[i * 3 + 2] = + sens_data.imu.angular_velocity_covariance.r[r * 3 + 2] * DEG2RAD * DEG2RAD; + } - sensors_data_published = true; - mPubImu.publish(imuMsg); - } /*else { - NODELET_DEBUG("No new IMU DATA"); - }*/ - - if (imu_RawSubNumber > 0 && new_imu_data) { - lastTs_imu = ts_imu; - - sensor_msgs::ImuPtr imuRawMsg = boost::make_shared(); - - imuRawMsg->header.stamp = ts_imu; - imuRawMsg->header.frame_id = mImuFrameId; - imuRawMsg->angular_velocity.x = sens_data.imu.angular_velocity[0] * DEG2RAD; - imuRawMsg->angular_velocity.y = sens_data.imu.angular_velocity[1] * DEG2RAD; - imuRawMsg->angular_velocity.z = sens_data.imu.angular_velocity[2] * DEG2RAD; - imuRawMsg->linear_acceleration.x = sens_data.imu.linear_acceleration[0]; - imuRawMsg->linear_acceleration.y = sens_data.imu.linear_acceleration[1]; - imuRawMsg->linear_acceleration.z = sens_data.imu.linear_acceleration[2]; - - for (int i = 0; i < 3; ++i) { - int r = 0; - - if (i == 0) { - r = 0; - } else if (i == 1) { - r = 1; - } else { - r = 2; - } + sensors_data_published = true; + mPubImu.publish(imuMsg); + } /*else { + NODELET_DEBUG("No new IMU DATA"); + }*/ - imuRawMsg->linear_acceleration_covariance[i * 3 + 0] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 0]; - imuRawMsg->linear_acceleration_covariance[i * 3 + 1] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 1]; - imuRawMsg->linear_acceleration_covariance[i * 3 + 2] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 2]; - imuRawMsg->angular_velocity_covariance[i * 3 + 0] = sens_data.imu.angular_velocity_covariance.r[r * 3 + 0] * DEG2RAD * DEG2RAD; - imuRawMsg->angular_velocity_covariance[i * 3 + 1] = sens_data.imu.angular_velocity_covariance.r[r * 3 + 1] * DEG2RAD * DEG2RAD; - imuRawMsg->angular_velocity_covariance[i * 3 + 2] = sens_data.imu.angular_velocity_covariance.r[r * 3 + 2] * DEG2RAD * DEG2RAD; - } + if (imu_RawSubNumber > 0 && new_imu_data) + { + lastTs_imu = ts_imu; - // Orientation data is not available in "data_raw" -> See ROS REP145 - // http://www.ros.org/reps/rep-0145.html#topics - imuRawMsg->orientation_covariance[0] = -1; - sensors_data_published = true; - mPubImuRaw.publish(imuRawMsg); - } + sensor_msgs::ImuPtr imuRawMsg = boost::make_shared(); + + imuRawMsg->header.stamp = ts_imu; + imuRawMsg->header.frame_id = mImuFrameId; + imuRawMsg->angular_velocity.x = sens_data.imu.angular_velocity[0] * DEG2RAD; + imuRawMsg->angular_velocity.y = sens_data.imu.angular_velocity[1] * DEG2RAD; + imuRawMsg->angular_velocity.z = sens_data.imu.angular_velocity[2] * DEG2RAD; + imuRawMsg->linear_acceleration.x = sens_data.imu.linear_acceleration[0]; + imuRawMsg->linear_acceleration.y = sens_data.imu.linear_acceleration[1]; + imuRawMsg->linear_acceleration.z = sens_data.imu.linear_acceleration[2]; - // ----> Update Diagnostic - if (sensors_data_published) { - // Publish freq calculation - static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + for (int i = 0; i < 3; ++i) + { + int r = 0; - double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); - last_time = now; + if (i == 0) + { + r = 0; + } + else if (i == 1) + { + r = 1; + } + else + { + r = 2; + } - mSensPeriodMean_usec->addValue(elapsed_usec); - } - // <---- Update Diagnostic + imuRawMsg->linear_acceleration_covariance[i * 3 + 0] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 0]; + imuRawMsg->linear_acceleration_covariance[i * 3 + 1] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 1]; + imuRawMsg->linear_acceleration_covariance[i * 3 + 2] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 2]; + imuRawMsg->angular_velocity_covariance[i * 3 + 0] = + sens_data.imu.angular_velocity_covariance.r[r * 3 + 0] * DEG2RAD * DEG2RAD; + imuRawMsg->angular_velocity_covariance[i * 3 + 1] = + sens_data.imu.angular_velocity_covariance.r[r * 3 + 1] * DEG2RAD * DEG2RAD; + imuRawMsg->angular_velocity_covariance[i * 3 + 2] = + sens_data.imu.angular_velocity_covariance.r[r * 3 + 2] * DEG2RAD * DEG2RAD; + } + + // Orientation data is not available in "data_raw" -> See ROS REP145 + // http://www.ros.org/reps/rep-0145.html#topics + imuRawMsg->orientation_covariance[0] = -1; + sensors_data_published = true; + mPubImuRaw.publish(imuRawMsg); + } + + // ----> Update Diagnostic + if (sensors_data_published) + { + // Publish freq calculation + static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + + double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); + last_time = now; + + mSensPeriodMean_usec->addValue(elapsed_usec); + } + // <---- Update Diagnostic } void ZEDWrapperNodelet::device_poll_thread_func() { - ros::Rate loop_rate(mPubFrameRate); + ros::Rate loop_rate(mPubFrameRate); - mRecording = false; + mRecording = false; - mElabPeriodMean_sec.reset(new sl_tools::CSmartMean(mCamFrameRate)); - mGrabPeriodMean_usec.reset(new sl_tools::CSmartMean(mCamFrameRate)); - mVideoDepthPeriodMean_sec.reset(new sl_tools::CSmartMean(mCamFrameRate)); - mPcPeriodMean_usec.reset(new sl_tools::CSmartMean(mCamFrameRate)); - mObjDetPeriodMean_msec.reset(new sl_tools::CSmartMean(mCamFrameRate)); + mElabPeriodMean_sec.reset(new sl_tools::CSmartMean(mCamFrameRate)); + mGrabPeriodMean_usec.reset(new sl_tools::CSmartMean(mCamFrameRate)); + mVideoDepthPeriodMean_sec.reset(new sl_tools::CSmartMean(mCamFrameRate)); + mPcPeriodMean_usec.reset(new sl_tools::CSmartMean(mCamFrameRate)); + mObjDetPeriodMean_msec.reset(new sl_tools::CSmartMean(mCamFrameRate)); - // Timestamp initialization - if (mSvoMode) { - mFrameTimestamp = ros::Time::now(); - } else { - mFrameTimestamp = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::CURRENT)); + // Timestamp initialization + if (mSvoMode) + { + mFrameTimestamp = ros::Time::now(); + } + else + { + mFrameTimestamp = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::CURRENT)); + } + mPrevFrameTimestamp = mFrameTimestamp; + + mPosTrackingActivated = false; + mMappingRunning = false; + mRecording = false; + + // Get the parameters of the ZED images + mCamWidth = mZed.getCameraInformation().camera_configuration.resolution.width; + mCamHeight = mZed.getCameraInformation().camera_configuration.resolution.height; + NODELET_DEBUG_STREAM("Original Camera grab frame size: " << mCamWidth << "x" << mCamHeight); + int pub_w, pub_h; + pub_w = static_cast(std::round(mCamWidth / mCustomDownscaleFactor)); + pub_h = static_cast(std::round(mCamHeight / mCustomDownscaleFactor)); + + if (pub_w > mCamWidth || pub_h > mCamHeight) + { + NODELET_WARN_STREAM("The publishing resolution (" + << pub_w << "x" << pub_h << ") cannot be higher than the grabbing resolution (" << mCamWidth + << "x" << mCamHeight << "). Using grab resolution for output messages."); + pub_w = mCamWidth; + pub_h = mCamHeight; + } + mMatResol = sl::Resolution(pub_w, pub_h); + NODELET_DEBUG_STREAM("Publishing frame size: " << mMatResol.width << "x" << mMatResol.height); + + // ----> Set Region of Interest + if (!mRoiParam.empty()) + { + NODELET_INFO("*** Setting ROI ***"); + sl::Resolution resol(mCamWidth, mCamHeight); + std::vector sl_poly; + std::string log_msg = parseRoiPoly(mRoiParam, sl_poly); + + // Create ROI mask + sl::Mat roi_mask(resol, sl::MAT_TYPE::U8_C1, sl::MEM::CPU); + + if (!sl_tools::generateROI(sl_poly, roi_mask)) + { + NODELET_WARN(" * Error generating the region of interest image mask."); } - mPrevFrameTimestamp = mFrameTimestamp; + else + { + sl::ERROR_CODE err = mZed.setRegionOfInterest(roi_mask); + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM(" * Error while setting ZED SDK region of interest: " << sl::toString(err).c_str()); + } + else + { + NODELET_INFO(" * Region of Interest correctly set."); + } + } + } + // <---- Set Region of Interest - mPosTrackingActivated = false; - mMappingRunning = false; - mRecording = false; + // Create and fill the camera information messages + fillCamInfo(mZed, mLeftCamInfoMsg, mRightCamInfoMsg, mLeftCamOptFrameId, mRightCamOptFrameId); + fillCamInfo(mZed, mLeftCamInfoRawMsg, mRightCamInfoRawMsg, mLeftCamOptFrameId, mRightCamOptFrameId, true); + fillCamDepthInfo(mZed, mDepthCamInfoMsg, mLeftCamOptFrameId); + + // the reference camera is the Left one (next to the ZED logo) + + mRgbCamInfoMsg = mLeftCamInfoMsg; + mRgbCamInfoRawMsg = mLeftCamInfoRawMsg; - // Get the parameters of the ZED images - mCamWidth = mZed.getCameraInformation().camera_configuration.resolution.width; - mCamHeight = mZed.getCameraInformation().camera_configuration.resolution.height; - NODELET_DEBUG_STREAM("Original Camera grab frame size: " << mCamWidth << "x" << mCamHeight); - int pub_w, pub_h; - pub_w = static_cast(std::round(mCamWidth / mCustomDownscaleFactor)); - pub_h = static_cast(std::round(mCamHeight / mCustomDownscaleFactor)); - - if (pub_w > mCamWidth || pub_h > mCamHeight) { - NODELET_WARN_STREAM("The publishing resolution (" << - pub_w << "x" << pub_h << - ") cannot be higher than the grabbing resolution (" << - mCamWidth << "x" << mCamHeight << - "). Using grab resolution for output messages."); - pub_w = mCamWidth; - pub_h = mCamHeight; + sl::RuntimeParameters runParams; + + // Main loop + while (mNhNs.ok()) + { + // Check for subscribers + uint32_t rgbSubnumber = mPubRgb.getNumSubscribers(); + uint32_t rgbRawSubnumber = mPubRawRgb.getNumSubscribers(); + uint32_t leftSubnumber = mPubLeft.getNumSubscribers(); + uint32_t leftRawSubnumber = mPubRawLeft.getNumSubscribers(); + uint32_t rightSubnumber = mPubRight.getNumSubscribers(); + uint32_t rightRawSubnumber = mPubRawRight.getNumSubscribers(); + uint32_t rgbGraySubnumber = mPubRgbGray.getNumSubscribers(); + uint32_t rgbGrayRawSubnumber = mPubRawRgbGray.getNumSubscribers(); + uint32_t leftGraySubnumber = mPubLeftGray.getNumSubscribers(); + uint32_t leftGrayRawSubnumber = mPubRawLeftGray.getNumSubscribers(); + uint32_t rightGraySubnumber = mPubRightGray.getNumSubscribers(); + uint32_t rightGrayRawSubnumber = mPubRawRightGray.getNumSubscribers(); + uint32_t depthSubnumber = 0; + uint32_t objDetSubnumber = 0; + uint32_t disparitySubnumber = 0; + uint32_t cloudSubnumber = 0; + uint32_t fusedCloudSubnumber = 0; + uint32_t poseSubnumber = 0; + uint32_t poseCovSubnumber = 0; + uint32_t odomSubnumber = 0; + uint32_t confMapSubnumber = 0; + uint32_t pathSubNumber = 0; + if (!mDepthDisabled) + { + depthSubnumber = mPubDepth.getNumSubscribers(); + disparitySubnumber = mPubDisparity.getNumSubscribers(); + cloudSubnumber = mPubCloud.getNumSubscribers(); + fusedCloudSubnumber = mPubFusedCloud.getNumSubscribers(); + poseSubnumber = mPubPose.getNumSubscribers(); + poseCovSubnumber = mPubPoseCov.getNumSubscribers(); + odomSubnumber = mPubOdom.getNumSubscribers(); + confMapSubnumber = mPubConfMap.getNumSubscribers(); + pathSubNumber = mPubMapPath.getNumSubscribers() + mPubOdomPath.getNumSubscribers(); + + if (mObjDetEnabled && mObjDetRunning) + { + objDetSubnumber = mPubObjDet.getNumSubscribers(); + } } - mMatResol = sl::Resolution(pub_w, pub_h); - NODELET_DEBUG_STREAM("Publishing frame size: " << mMatResol.width << "x" << mMatResol.height); - - // ----> Set Region of Interest - if (!mRoiParam.empty()) { - NODELET_INFO("*** Setting ROI ***"); - sl::Resolution resol(mCamWidth, mCamHeight); - std::vector sl_poly; - std::string log_msg = parseRoiPoly(mRoiParam, sl_poly); - - // Create ROI mask - sl::Mat roi_mask(resol, sl::MAT_TYPE::U8_C1, sl::MEM::CPU); - - if (!sl_tools::generateROI(sl_poly, roi_mask)) { - NODELET_WARN(" * Error generating the region of interest image mask."); - } else { - sl::ERROR_CODE err = mZed.setRegionOfInterest(roi_mask); - if (err != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM(" * Error while setting ZED SDK region of interest: " << sl::toString(err).c_str()); - } else { - NODELET_INFO(" * Region of Interest correctly set."); + uint32_t stereoSubNumber = mPubStereo.getNumSubscribers(); + uint32_t stereoRawSubNumber = mPubRawStereo.getNumSubscribers(); + + mGrabActive = + mRecording || mStreaming || mMappingEnabled || mObjDetEnabled || mPosTrackingEnabled || mPosTrackingActivated || + ((rgbSubnumber + rgbRawSubnumber + leftSubnumber + leftRawSubnumber + rightSubnumber + rightRawSubnumber + + rgbGraySubnumber + rgbGrayRawSubnumber + leftGraySubnumber + leftGrayRawSubnumber + rightGraySubnumber + + rightGrayRawSubnumber + depthSubnumber + disparitySubnumber + cloudSubnumber + poseSubnumber + + poseCovSubnumber + odomSubnumber + confMapSubnumber /*+ imuSubnumber + imuRawsubnumber*/ + pathSubNumber + + stereoSubNumber + stereoRawSubNumber + objDetSubnumber) > 0); + + // Run the loop only if there is some subscribers or SVO is active + if (mGrabActive) + { + std::lock_guard lock(mPosTrkMutex); + + // Note: once tracking is started it is never stopped anymore to not lose tracking information + bool computeTracking = + !mDepthDisabled && (mPosTrackingEnabled || mPosTrackingActivated || mMappingEnabled || mObjDetEnabled || + (mComputeDepth & mDepthStabilization) || poseSubnumber > 0 || poseCovSubnumber > 0 || + odomSubnumber > 0 || pathSubNumber > 0); + + // Start the tracking? + if ((computeTracking) && !mPosTrackingActivated && !mDepthDisabled) + { + start_pos_tracking(); + } + + // Start the mapping? + mMappingMutex.lock(); + if (mMappingEnabled && !mMappingRunning) + { + start_3d_mapping(); + } + mMappingMutex.unlock(); + + // Start the object detection? + mObjDetMutex.lock(); + if (mObjDetEnabled && !mObjDetRunning) + { + start_obj_detect(); + } + mObjDetMutex.unlock(); + + // Detect if one of the subscriber need to have the depth information + mComputeDepth = !mDepthDisabled && + (computeTracking || + ((depthSubnumber + disparitySubnumber + cloudSubnumber + fusedCloudSubnumber + poseSubnumber + + poseCovSubnumber + odomSubnumber + confMapSubnumber + objDetSubnumber) > 0)); + + if (mComputeDepth) + { + runParams.confidence_threshold = mCamDepthConfidence; + runParams.texture_confidence_threshold = mCamDepthTextureConf; + runParams.enable_depth = true; // Ask to compute the depth + } + else + { + runParams.enable_depth = false; // Ask to not compute the depth + } + + std::chrono::steady_clock::time_point start_elab = std::chrono::steady_clock::now(); + + // ZED Grab + mGrabStatus = mZed.grab(runParams); + + // cout << toString(grab_status) << endl; + if (mGrabStatus != sl::ERROR_CODE::SUCCESS) + { + // Detect if a error occurred (for example: + // the zed have been disconnected) and + // re-initialize the ZED + + NODELET_INFO_STREAM_THROTTLE(1.0, "Camera grab error: " << sl::toString(mGrabStatus).c_str()); + + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + if (mGrabStatus != sl::ERROR_CODE::CAMERA_REBOOTING) + { + std::this_thread::sleep_for(std::chrono::milliseconds(50)); + continue; } + else if (mSvoMode && mGrabStatus == sl::ERROR_CODE::END_OF_SVOFILE_REACHED) + { + NODELET_WARN("SVO reached the end. The node will be stopped."); + mZed.close(); + exit(EXIT_SUCCESS); } - } - // <---- Set Region of Interest - - // Create and fill the camera information messages - fillCamInfo(mZed, mLeftCamInfoMsg, mRightCamInfoMsg, mLeftCamOptFrameId, mRightCamOptFrameId); - fillCamInfo(mZed, mLeftCamInfoRawMsg, mRightCamInfoRawMsg, mLeftCamOptFrameId, mRightCamOptFrameId, true); - fillCamDepthInfo(mZed, mDepthCamInfoMsg, mLeftCamOptFrameId); - - // the reference camera is the Left one (next to the ZED logo) - - mRgbCamInfoMsg = mLeftCamInfoMsg; - mRgbCamInfoRawMsg = mLeftCamInfoRawMsg; - - sl::RuntimeParameters runParams; - - // Main loop - while (mNhNs.ok()) { - // Check for subscribers - uint32_t rgbSubnumber = mPubRgb.getNumSubscribers(); - uint32_t rgbRawSubnumber = mPubRawRgb.getNumSubscribers(); - uint32_t leftSubnumber = mPubLeft.getNumSubscribers(); - uint32_t leftRawSubnumber = mPubRawLeft.getNumSubscribers(); - uint32_t rightSubnumber = mPubRight.getNumSubscribers(); - uint32_t rightRawSubnumber = mPubRawRight.getNumSubscribers(); - uint32_t rgbGraySubnumber = mPubRgbGray.getNumSubscribers(); - uint32_t rgbGrayRawSubnumber = mPubRawRgbGray.getNumSubscribers(); - uint32_t leftGraySubnumber = mPubLeftGray.getNumSubscribers(); - uint32_t leftGrayRawSubnumber = mPubRawLeftGray.getNumSubscribers(); - uint32_t rightGraySubnumber = mPubRightGray.getNumSubscribers(); - uint32_t rightGrayRawSubnumber = mPubRawRightGray.getNumSubscribers(); - uint32_t depthSubnumber = 0; - uint32_t objDetSubnumber = 0; - uint32_t disparitySubnumber = 0; - uint32_t cloudSubnumber = 0; - uint32_t fusedCloudSubnumber = 0; - uint32_t poseSubnumber = 0; - uint32_t poseCovSubnumber = 0; - uint32_t odomSubnumber = 0; - uint32_t confMapSubnumber = 0; - uint32_t pathSubNumber = 0; - if(!mDepthDisabled) { - depthSubnumber = mPubDepth.getNumSubscribers(); - disparitySubnumber = mPubDisparity.getNumSubscribers(); - cloudSubnumber = mPubCloud.getNumSubscribers(); - fusedCloudSubnumber = mPubFusedCloud.getNumSubscribers(); - poseSubnumber = mPubPose.getNumSubscribers(); - poseCovSubnumber = mPubPoseCov.getNumSubscribers(); - odomSubnumber = mPubOdom.getNumSubscribers(); - confMapSubnumber = mPubConfMap.getNumSubscribers(); - pathSubNumber = mPubMapPath.getNumSubscribers() + mPubOdomPath.getNumSubscribers(); - - if (mObjDetEnabled && mObjDetRunning) { - objDetSubnumber = mPubObjDet.getNumSubscribers(); - } + else if (!mRemoteStreamAddr.empty()) + { + NODELET_ERROR("Remote stream problem. The node will be stopped."); + mZed.close(); + exit(EXIT_FAILURE); } - uint32_t stereoSubNumber = mPubStereo.getNumSubscribers(); - uint32_t stereoRawSubNumber = mPubRawStereo.getNumSubscribers(); - - mGrabActive = mRecording || mStreaming || mMappingEnabled || mObjDetEnabled || mPosTrackingEnabled || mPosTrackingActivated || ((rgbSubnumber + rgbRawSubnumber + leftSubnumber + leftRawSubnumber + rightSubnumber + rightRawSubnumber + rgbGraySubnumber + rgbGrayRawSubnumber + leftGraySubnumber + leftGrayRawSubnumber + rightGraySubnumber + rightGrayRawSubnumber + depthSubnumber + disparitySubnumber + cloudSubnumber + poseSubnumber + poseCovSubnumber + odomSubnumber + confMapSubnumber /*+ imuSubnumber + imuRawsubnumber*/ + pathSubNumber + stereoSubNumber + stereoRawSubNumber + objDetSubnumber) > 0); - - // Run the loop only if there is some subscribers or SVO is active - if (mGrabActive) { - std::lock_guard lock(mPosTrkMutex); + else + { + if ((ros::Time::now() - mPrevFrameTimestamp).toSec() > 5 && !mSvoMode) + { + mCloseZedMutex.lock(); + mZed.close(); + mCloseZedMutex.unlock(); - // Note: once tracking is started it is never stopped anymore to not lose tracking information - bool computeTracking = !mDepthDisabled && - (mPosTrackingEnabled || mPosTrackingActivated || mMappingEnabled || mObjDetEnabled || (mComputeDepth & mDepthStabilization) || - poseSubnumber > 0 || poseCovSubnumber > 0 || odomSubnumber > 0 || pathSubNumber > 0); + mConnStatus = sl::ERROR_CODE::CAMERA_NOT_DETECTED; - // Start the tracking? - if ((computeTracking) && !mPosTrackingActivated && !mDepthDisabled) { - start_pos_tracking(); - } + while (mConnStatus != sl::ERROR_CODE::SUCCESS) + { + if (!mNhNs.ok()) + { + mStopNode = true; - // Start the mapping? - mMappingMutex.lock(); - if (mMappingEnabled && !mMappingRunning) { - start_3d_mapping(); - } - mMappingMutex.unlock(); + std::lock_guard lock(mCloseZedMutex); + NODELET_DEBUG("Closing ZED"); + if (mRecording) + { + mRecording = false; + mZed.disableRecording(); + } + mZed.close(); - // Start the object detection? - mObjDetMutex.lock(); - if (mObjDetEnabled && !mObjDetRunning) { - start_obj_detect(); - } - mObjDetMutex.unlock(); + NODELET_DEBUG("ZED pool thread finished"); + return; + } - // Detect if one of the subscriber need to have the depth information - mComputeDepth = !mDepthDisabled && (computeTracking || ((depthSubnumber + disparitySubnumber + cloudSubnumber + fusedCloudSubnumber + poseSubnumber + poseCovSubnumber + odomSubnumber + confMapSubnumber + objDetSubnumber) > 0)); + mZedParams.input.setFromSerialNumber(mZedSerialNumber); + mConnStatus = mZed.open(mZedParams); // Try to initialize the ZED + NODELET_INFO_STREAM("Connection status: " << toString(mConnStatus)); - if (mComputeDepth) { - runParams.confidence_threshold = mCamDepthConfidence; - runParams.texture_confidence_threshold = mCamDepthTextureConf; - runParams.enable_depth = true; // Ask to compute the depth - } else { - runParams.enable_depth = false; // Ask to not compute the depth + mDiagUpdater.force_update(); + std::this_thread::sleep_for(std::chrono::milliseconds(1000)); } - std::chrono::steady_clock::time_point start_elab = std::chrono::steady_clock::now(); - - // ZED Grab - mGrabStatus = mZed.grab(runParams); - - // cout << toString(grab_status) << endl; - if (mGrabStatus != sl::ERROR_CODE::SUCCESS) { - // Detect if a error occurred (for example: - // the zed have been disconnected) and - // re-initialize the ZED - - NODELET_INFO_STREAM_THROTTLE( 1.0, "Camera grab error: " << sl::toString(mGrabStatus).c_str()); - - std::this_thread::sleep_for(std::chrono::milliseconds(1)); - if(mGrabStatus!=sl::ERROR_CODE::CAMERA_REBOOTING) { - std::this_thread::sleep_for(std::chrono::milliseconds(50)); - continue; - } else if (mSvoMode && mGrabStatus == sl::ERROR_CODE::END_OF_SVOFILE_REACHED) { - NODELET_WARN("SVO reached the end. The node will be stopped."); - mZed.close(); - exit(EXIT_SUCCESS); - } else if (!mRemoteStreamAddr.empty()) { - NODELET_ERROR("Remote stream problem. The node will be stopped."); - mZed.close(); - exit(EXIT_FAILURE); - } else { - if ((ros::Time::now() - mPrevFrameTimestamp).toSec() > 5 && !mSvoMode) { - mCloseZedMutex.lock(); - mZed.close(); - mCloseZedMutex.unlock(); - - mConnStatus = sl::ERROR_CODE::CAMERA_NOT_DETECTED; - - while (mConnStatus != sl::ERROR_CODE::SUCCESS) { - if (!mNhNs.ok()) { - mStopNode = true; - - std::lock_guard lock(mCloseZedMutex); - NODELET_DEBUG("Closing ZED"); - if (mRecording) { - mRecording = false; - mZed.disableRecording(); - } - mZed.close(); - - NODELET_DEBUG("ZED pool thread finished"); - return; - } - - mZedParams.input.setFromSerialNumber(mZedSerialNumber); - mConnStatus = mZed.open(mZedParams); // Try to initialize the ZED - NODELET_INFO_STREAM("Connection status: " << toString(mConnStatus)); - - mDiagUpdater.force_update(); - std::this_thread::sleep_for(std::chrono::milliseconds(1000)); - } - - mPosTrackingActivated = false; - - // Note: once tracking is started it is never stopped anymore to not lose tracking information - bool computeTracking = !mDepthDisabled && - (mPosTrackingEnabled || mPosTrackingActivated || mMappingEnabled || mObjDetEnabled || (mComputeDepth & mDepthStabilization) || - poseSubnumber > 0 || poseCovSubnumber > 0 || odomSubnumber > 0 || pathSubNumber > 0); - - // Start the tracking? - if ((computeTracking) && !mPosTrackingActivated && !mDepthDisabled) { - start_pos_tracking(); - } - } - } + mPosTrackingActivated = false; - mDiagUpdater.update(); + // Note: once tracking is started it is never stopped anymore to not lose tracking information + bool computeTracking = + !mDepthDisabled && (mPosTrackingEnabled || mPosTrackingActivated || mMappingEnabled || mObjDetEnabled || + (mComputeDepth & mDepthStabilization) || poseSubnumber > 0 || + poseCovSubnumber > 0 || odomSubnumber > 0 || pathSubNumber > 0); - continue; + // Start the tracking? + if ((computeTracking) && !mPosTrackingActivated && !mDepthDisabled) + { + start_pos_tracking(); } + } + } - mFrameCount++; + mDiagUpdater.update(); - // ----> Timestamp - if (mSvoMode) { - mFrameTimestamp = ros::Time::now(); - } else { - mFrameTimestamp = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::IMAGE)); - } - mPrevFrameTimestamp = mFrameTimestamp; - ros::Time stamp = mFrameTimestamp; // Fix processing Timestamp - // <---- Timestamp + continue; + } - // Publish Color and Depth images - pubVideoDepth(); + mFrameCount++; - // ----> SVO recording - mRecMutex.lock(); + // ----> Timestamp + if (mSvoMode) + { + mFrameTimestamp = ros::Time::now(); + } + else + { + mFrameTimestamp = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::IMAGE)); + } + mPrevFrameTimestamp = mFrameTimestamp; + ros::Time stamp = mFrameTimestamp; // Fix processing Timestamp + // <---- Timestamp - if (mRecording) { - mRecStatus = mZed.getRecordingStatus(); + // Publish Color and Depth images + pubVideoDepth(); - if (!mRecStatus.status) { - NODELET_ERROR_THROTTLE(1.0, "Error saving frame to SVO"); - } + // ----> SVO recording + mRecMutex.lock(); - mDiagUpdater.force_update(); - } - mRecMutex.unlock(); - // <---- SVO recording + if (mRecording) + { + mRecStatus = mZed.getRecordingStatus(); - // ----> Publish freq calculation - static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + if (!mRecStatus.status) + { + NODELET_ERROR_THROTTLE(1.0, "Error saving frame to SVO"); + } - double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); - last_time = now; + mDiagUpdater.force_update(); + } + mRecMutex.unlock(); + // <---- SVO recording - mGrabPeriodMean_usec->addValue(elapsed_usec); - // NODELET_INFO_STREAM("Grab time: " << elapsed_usec / 1000 << " msec"); - // <---- Publish freq calculation + // ----> Publish freq calculation + static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - // ----> Camera Settings - int value; - sl::VIDEO_SETTINGS setting; - sl::ERROR_CODE err; + double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); + last_time = now; - if (!mSvoMode && mFrameCount % 5 == 0) { - // NODELET_DEBUG_STREAM( "[" << mFrameCount << "] device_poll_thread_func MUTEX LOCK"); - mDynParMutex.lock(); + mGrabPeriodMean_usec->addValue(elapsed_usec); + // NODELET_INFO_STREAM("Grab time: " << elapsed_usec / 1000 << " msec"); + // <---- Publish freq calculation - setting = sl::VIDEO_SETTINGS::BRIGHTNESS; - err = mZed.getCameraSettings(setting, value); - if (err==sl::ERROR_CODE::SUCCESS && value != mCamBrightness) { - err = mZed.setCameraSettings(setting, mCamBrightness); - } - if(err != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM( "Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } else { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamBrightness << " <- " << value); - mUpdateDynParams = true; - } + // ----> Camera Settings + int value; + sl::VIDEO_SETTINGS setting; + sl::ERROR_CODE err; - setting = sl::VIDEO_SETTINGS::CONTRAST; - err = mZed.getCameraSettings(setting, value); - if (err==sl::ERROR_CODE::SUCCESS && value != mCamContrast) { - err = mZed.setCameraSettings(setting, mCamContrast); - } - if(err != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM( "Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } else { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamContrast << " <- " << value); - mUpdateDynParams = true; - } + if (!mSvoMode && mFrameCount % 5 == 0) + { + // NODELET_DEBUG_STREAM( "[" << mFrameCount << "] device_poll_thread_func MUTEX LOCK"); + mDynParMutex.lock(); - setting = sl::VIDEO_SETTINGS::HUE; - err = mZed.getCameraSettings(setting, value); - if (err==sl::ERROR_CODE::SUCCESS && value != mCamHue) { - err = mZed.setCameraSettings(setting, mCamHue); - } - if(err != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM( "Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } else { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamHue << " <- " << value); - mUpdateDynParams = true; - } + setting = sl::VIDEO_SETTINGS::BRIGHTNESS; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamBrightness) + { + err = mZed.setCameraSettings(setting, mCamBrightness); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamBrightness << " <- " << value); + mUpdateDynParams = true; + } - setting = sl::VIDEO_SETTINGS::SATURATION; - err = mZed.getCameraSettings(setting, value); - if (err==sl::ERROR_CODE::SUCCESS && value != mCamSaturation) { - err = mZed.setCameraSettings(setting, mCamSaturation); - } - if(err != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM( "Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } else { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamSaturation << " <- " << value); - mUpdateDynParams = true; - } + setting = sl::VIDEO_SETTINGS::CONTRAST; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamContrast) + { + err = mZed.setCameraSettings(setting, mCamContrast); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamContrast << " <- " << value); + mUpdateDynParams = true; + } - setting = sl::VIDEO_SETTINGS::SHARPNESS; - err = mZed.getCameraSettings(setting, value); - if (err==sl::ERROR_CODE::SUCCESS && value != mCamSharpness) { - err = mZed.setCameraSettings(setting, mCamSharpness); - } - if(err != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM( "Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } else { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamSharpness << " <- " << value); - mUpdateDynParams = true; - } + setting = sl::VIDEO_SETTINGS::HUE; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamHue) + { + err = mZed.setCameraSettings(setting, mCamHue); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamHue << " <- " << value); + mUpdateDynParams = true; + } - setting = sl::VIDEO_SETTINGS::GAMMA; - err = mZed.getCameraSettings(setting, value); - if (err==sl::ERROR_CODE::SUCCESS && value != mCamGamma) { - err = mZed.setCameraSettings(setting, mCamGamma); - } - if(err != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM( "Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } else { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamGamma << " <- " << value); - mUpdateDynParams = true; - } + setting = sl::VIDEO_SETTINGS::SATURATION; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamSaturation) + { + err = mZed.setCameraSettings(setting, mCamSaturation); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamSaturation << " <- " << value); + mUpdateDynParams = true; + } - if (mTriggerAutoExposure) { - setting = sl::VIDEO_SETTINGS::AEC_AGC; - err = mZed.getCameraSettings(setting, value); - int aec_agc = (mCamAutoExposure?1:0); - if (err==sl::ERROR_CODE::SUCCESS && value != aec_agc) { - err = mZed.setCameraSettings(setting, aec_agc); - } - if(err != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM( "Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } else { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << aec_agc << " <- " << value); - mUpdateDynParams = true; - } - } - if (!mCamAutoExposure){ - setting = sl::VIDEO_SETTINGS::EXPOSURE; - err = mZed.getCameraSettings(setting, value); - if (err==sl::ERROR_CODE::SUCCESS && value != mCamExposure) { - err = mZed.setCameraSettings(setting, mCamExposure); - } - if(err != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM( "Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } else { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamExposure << " <- " << value); - mUpdateDynParams = true; - } - - setting = sl::VIDEO_SETTINGS::GAIN; - err = mZed.getCameraSettings(setting, value); - if (err==sl::ERROR_CODE::SUCCESS && value != mCamGain) { - err = mZed.setCameraSettings(setting, mCamGain); - } - if(err != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM( "Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } else { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamGain << " <- " << value); - mUpdateDynParams = true; - } - } + setting = sl::VIDEO_SETTINGS::SHARPNESS; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamSharpness) + { + err = mZed.setCameraSettings(setting, mCamSharpness); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamSharpness << " <- " << value); + mUpdateDynParams = true; + } - if (mTriggerAutoWB) { - setting = sl::VIDEO_SETTINGS::WHITEBALANCE_AUTO; - err = mZed.getCameraSettings(setting, value); - int wb_auto = (mCamAutoWB?1:0); - if (err==sl::ERROR_CODE::SUCCESS && value != wb_auto) { - err = mZed.setCameraSettings(setting, wb_auto); - } - if(err != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM( "Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } else { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << wb_auto << " <- " << value); - mUpdateDynParams = true; - } - } - if (!mCamAutoWB) { - setting = sl::VIDEO_SETTINGS::WHITEBALANCE_TEMPERATURE; - err = mZed.getCameraSettings(setting, value); - if (err==sl::ERROR_CODE::SUCCESS && value != mCamWB) { - err = mZed.setCameraSettings(setting, mCamWB); - } - if(err != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM( "Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } else { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamWB << " <- " << value); - mUpdateDynParams = true; - } - } - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "device_poll_thread_func MUTEX UNLOCK"); - } + setting = sl::VIDEO_SETTINGS::GAMMA; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamGamma) + { + err = mZed.setCameraSettings(setting, mCamGamma); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamGamma << " <- " << value); + mUpdateDynParams = true; + } - if (mUpdateDynParams) { - NODELET_DEBUG("Update Dynamic Parameters"); - updateDynamicReconfigure(); - } - // <---- Camera Settings + if (mTriggerAutoExposure) + { + setting = sl::VIDEO_SETTINGS::AEC_AGC; + err = mZed.getCameraSettings(setting, value); + int aec_agc = (mCamAutoExposure ? 1 : 0); + if (err == sl::ERROR_CODE::SUCCESS && value != aec_agc) + { + err = mZed.setCameraSettings(setting, aec_agc); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << aec_agc << " <- " << value); + mUpdateDynParams = true; + } + } + if (!mCamAutoExposure) + { + setting = sl::VIDEO_SETTINGS::EXPOSURE; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamExposure) + { + err = mZed.setCameraSettings(setting, mCamExposure); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamExposure << " <- " << value); + mUpdateDynParams = true; + } + + setting = sl::VIDEO_SETTINGS::GAIN; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamGain) + { + err = mZed.setCameraSettings(setting, mCamGain); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamGain << " <- " << value); + mUpdateDynParams = true; + } + } - // Publish the point cloud if someone has subscribed to - if (cloudSubnumber > 0) { - // Run the point cloud conversion asynchronously to avoid slowing down - // all the program - // Retrieve raw pointCloud data if latest Pointcloud is ready - std::unique_lock lock(mPcMutex, std::defer_lock); + if (mTriggerAutoWB) + { + setting = sl::VIDEO_SETTINGS::WHITEBALANCE_AUTO; + err = mZed.getCameraSettings(setting, value); + int wb_auto = (mCamAutoWB ? 1 : 0); + if (err == sl::ERROR_CODE::SUCCESS && value != wb_auto) + { + err = mZed.setCameraSettings(setting, wb_auto); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << wb_auto << " <- " << value); + mUpdateDynParams = true; + } + } + if (!mCamAutoWB) + { + setting = sl::VIDEO_SETTINGS::WHITEBALANCE_TEMPERATURE; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamWB) + { + err = mZed.setCameraSettings(setting, mCamWB); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamWB << " <- " << value); + mUpdateDynParams = true; + } + } + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "device_poll_thread_func MUTEX UNLOCK"); + } - if (lock.try_lock()) { - mZed.retrieveMeasure(mCloud, sl::MEASURE::XYZBGRA, sl::MEM::CPU, mMatResol); + if (mUpdateDynParams) + { + NODELET_DEBUG("Update Dynamic Parameters"); + updateDynamicReconfigure(); + } + // <---- Camera Settings - mPointCloudFrameId = mDepthFrameId; - mPointCloudTime = stamp; + // Publish the point cloud if someone has subscribed to + if (cloudSubnumber > 0) + { + // Run the point cloud conversion asynchronously to avoid slowing down + // all the program + // Retrieve raw pointCloud data if latest Pointcloud is ready + std::unique_lock lock(mPcMutex, std::defer_lock); - // Signal Pointcloud thread that a new pointcloud is ready - mPcDataReadyCondVar.notify_one(); - mPcDataReady = true; - mPcPublishing = true; - } - } else { - mPcPublishing = false; - } + if (lock.try_lock()) + { + mZed.retrieveMeasure(mCloud, sl::MEASURE::XYZBGRA, sl::MEM::CPU, mMatResol); - mObjDetMutex.lock(); - if (mObjDetRunning && objDetSubnumber > 0) { - processDetectedObjects(stamp); - } - mObjDetMutex.unlock(); + mPointCloudFrameId = mDepthFrameId; + mPointCloudTime = stamp; - // Publish the odometry if someone has subscribed to - if (computeTracking) { - if (!mSensor2BaseTransfValid) { - getSens2BaseTransform(); - } + // Signal Pointcloud thread that a new pointcloud is ready + mPcDataReadyCondVar.notify_one(); + mPcDataReady = true; + mPcPublishing = true; + } + } + else + { + mPcPublishing = false; + } - if (!mSensor2CameraTransfValid) { - getSens2CameraTransform(); - } + mObjDetMutex.lock(); + if (mObjDetRunning && objDetSubnumber > 0) + { + processDetectedObjects(stamp); + } + mObjDetMutex.unlock(); - if (!mCamera2BaseTransfValid) { - getCamera2BaseTransform(); - } + // Publish the odometry if someone has subscribed to + if (computeTracking) + { + if (!mSensor2BaseTransfValid) + { + getSens2BaseTransform(); + } - if (!mInitOdomWithPose) { - sl::Pose deltaOdom; - mPosTrackingStatus = mZed.getPosition(deltaOdom, sl::REFERENCE_FRAME::CAMERA); + if (!mSensor2CameraTransfValid) + { + getSens2CameraTransform(); + } - sl::Translation translation = deltaOdom.getTranslation(); - sl::Orientation quat = deltaOdom.getOrientation(); + if (!mCamera2BaseTransfValid) + { + getCamera2BaseTransform(); + } + + if (!mInitOdomWithPose) + { + sl::Pose deltaOdom; + mPosTrackingStatus = mZed.getPosition(deltaOdom, sl::REFERENCE_FRAME::CAMERA); + + sl::Translation translation = deltaOdom.getTranslation(); + sl::Orientation quat = deltaOdom.getOrientation(); #if 0 NODELET_DEBUG("delta ODOM [%s] - %.2f,%.2f,%.2f %.2f,%.2f,%.2f,%.2f", @@ -3874,39 +4437,43 @@ void ZEDWrapperNodelet::device_poll_thread_func() NODELET_DEBUG_STREAM("ODOM -> Tracking Status: " << sl::toString(mTrackingStatus)); #endif - if (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK || mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::SEARCHING || mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::FPS_TOO_LOW) { - // Transform ZED delta odom pose in TF2 Transformation - geometry_msgs::Transform deltaTransf; - deltaTransf.translation.x = translation(0); - deltaTransf.translation.y = translation(1); - deltaTransf.translation.z = translation(2); - deltaTransf.rotation.x = quat(0); - deltaTransf.rotation.y = quat(1); - deltaTransf.rotation.z = quat(2); - deltaTransf.rotation.w = quat(3); - tf2::Transform deltaOdomTf; - tf2::fromMsg(deltaTransf, deltaOdomTf); - // delta odom from sensor to base frame - tf2::Transform deltaOdomTf_base = mSensor2BaseTransf.inverse() * deltaOdomTf * mSensor2BaseTransf; - - // Propagate Odom transform in time - mOdom2BaseTransf = mOdom2BaseTransf * deltaOdomTf_base; - - if (mTwoDMode) { - tf2::Vector3 tr_2d = mOdom2BaseTransf.getOrigin(); - tr_2d.setZ(mFixedZValue); - mOdom2BaseTransf.setOrigin(tr_2d); - - double roll, pitch, yaw; - tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - - tf2::Quaternion quat_2d; - quat_2d.setRPY(0.0, 0.0, yaw); - - mOdom2BaseTransf.setRotation(quat_2d); - } - -#if 0 //#ifndef NDEBUG // Enable for TF checking + if (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK || + mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::SEARCHING || + mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::FPS_TOO_LOW) + { + // Transform ZED delta odom pose in TF2 Transformation + geometry_msgs::Transform deltaTransf; + deltaTransf.translation.x = translation(0); + deltaTransf.translation.y = translation(1); + deltaTransf.translation.z = translation(2); + deltaTransf.rotation.x = quat(0); + deltaTransf.rotation.y = quat(1); + deltaTransf.rotation.z = quat(2); + deltaTransf.rotation.w = quat(3); + tf2::Transform deltaOdomTf; + tf2::fromMsg(deltaTransf, deltaOdomTf); + // delta odom from sensor to base frame + tf2::Transform deltaOdomTf_base = mSensor2BaseTransf.inverse() * deltaOdomTf * mSensor2BaseTransf; + + // Propagate Odom transform in time + mOdom2BaseTransf = mOdom2BaseTransf * deltaOdomTf_base; + + if (mTwoDMode) + { + tf2::Vector3 tr_2d = mOdom2BaseTransf.getOrigin(); + tr_2d.setZ(mFixedZValue); + mOdom2BaseTransf.setOrigin(tr_2d); + + double roll, pitch, yaw; + tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + + tf2::Quaternion quat_2d; + quat_2d.setRPY(0.0, 0.0, yaw); + + mOdom2BaseTransf.setRotation(quat_2d); + } + +#if 0 //#ifndef NDEBUG // Enable for TF checking double roll, pitch, yaw; tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); @@ -3916,27 +4483,32 @@ void ZEDWrapperNodelet::device_poll_thread_func() roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); #endif - // Publish odometry message - if (odomSubnumber > 0) { - publishOdom(mOdom2BaseTransf, deltaOdom, stamp); - } - - mPosTrackingReady = true; - } - } else if (mFloorAlignment) { - NODELET_WARN_THROTTLE(5.0, "Odometry will be published as soon as the floor as been detected for the first " - "time"); - } + // Publish odometry message + if (odomSubnumber > 0) + { + publishOdom(mOdom2BaseTransf, deltaOdom, stamp); } - // Publish the zed camera pose if someone has subscribed to - if (computeTracking) { - mPosTrackingStatus = mZed.getPosition(mLastZedPose, sl::REFERENCE_FRAME::WORLD); + mPosTrackingReady = true; + } + } + else if (mFloorAlignment) + { + NODELET_WARN_THROTTLE(5.0, + "Odometry will be published as soon as the floor as been detected for the first " + "time"); + } + } + + // Publish the zed camera pose if someone has subscribed to + if (computeTracking) + { + mPosTrackingStatus = mZed.getPosition(mLastZedPose, sl::REFERENCE_FRAME::WORLD); - sl::Translation translation = mLastZedPose.getTranslation(); - sl::Orientation quat = mLastZedPose.getOrientation(); + sl::Translation translation = mLastZedPose.getTranslation(); + sl::Orientation quat = mLastZedPose.getOrientation(); -#if 0 //#ifndef NDEBUG // Enable for TF checking +#if 0 //#ifndef NDEBUG // Enable for TF checking double roll, pitch, yaw; tf2::Matrix3x3(tf2::Quaternion(quat.ox, quat.oy, quat.oz, quat.ow)).getRPY(roll, pitch, yaw); @@ -3949,48 +4521,54 @@ void ZEDWrapperNodelet::device_poll_thread_func() #endif - static sl::POSITIONAL_TRACKING_STATE old_tracking_state = sl::POSITIONAL_TRACKING_STATE::OFF; - if (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK || mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::SEARCHING - /*|| status == sl::POSITIONAL_TRACKING_STATE::FPS_TOO_LOW*/) { - if (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK && mPosTrackingStatus != old_tracking_state) { - NODELET_INFO_STREAM("Positional tracking -> OK [" << sl::toString(mPosTrackingStatus).c_str() << "]"); - old_tracking_state = mPosTrackingStatus; - } - if (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::SEARCHING && mPosTrackingStatus != old_tracking_state) { - NODELET_INFO_STREAM("Positional tracking -> Searching for a known position [" - << sl::toString(mPosTrackingStatus).c_str() << "]"); - old_tracking_state = mPosTrackingStatus; - } - // Transform ZED pose in TF2 Transformation - geometry_msgs::Transform map2sensTransf; - - map2sensTransf.translation.x = translation(0); - map2sensTransf.translation.y = translation(1); - map2sensTransf.translation.z = translation(2); - map2sensTransf.rotation.x = quat(0); - map2sensTransf.rotation.y = quat(1); - map2sensTransf.rotation.z = quat(2); - map2sensTransf.rotation.w = quat(3); - tf2::Transform map_to_sens_transf; - tf2::fromMsg(map2sensTransf, map_to_sens_transf); - - mMap2BaseTransf = map_to_sens_transf * mSensor2BaseTransf; // Base position in map frame - - if (mTwoDMode) { - tf2::Vector3 tr_2d = mMap2BaseTransf.getOrigin(); - tr_2d.setZ(mFixedZValue); - mMap2BaseTransf.setOrigin(tr_2d); + static sl::POSITIONAL_TRACKING_STATE old_tracking_state = sl::POSITIONAL_TRACKING_STATE::OFF; + if (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK || + mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::SEARCHING + /*|| status == sl::POSITIONAL_TRACKING_STATE::FPS_TOO_LOW*/) + { + if (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK && mPosTrackingStatus != old_tracking_state) + { + NODELET_INFO_STREAM("Positional tracking -> OK [" << sl::toString(mPosTrackingStatus).c_str() << "]"); + old_tracking_state = mPosTrackingStatus; + } + if (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::SEARCHING && + mPosTrackingStatus != old_tracking_state) + { + NODELET_INFO_STREAM("Positional tracking -> Searching for a known position [" + << sl::toString(mPosTrackingStatus).c_str() << "]"); + old_tracking_state = mPosTrackingStatus; + } + // Transform ZED pose in TF2 Transformation + geometry_msgs::Transform map2sensTransf; + + map2sensTransf.translation.x = translation(0); + map2sensTransf.translation.y = translation(1); + map2sensTransf.translation.z = translation(2); + map2sensTransf.rotation.x = quat(0); + map2sensTransf.rotation.y = quat(1); + map2sensTransf.rotation.z = quat(2); + map2sensTransf.rotation.w = quat(3); + tf2::Transform map_to_sens_transf; + tf2::fromMsg(map2sensTransf, map_to_sens_transf); + + mMap2BaseTransf = map_to_sens_transf * mSensor2BaseTransf; // Base position in map frame + + if (mTwoDMode) + { + tf2::Vector3 tr_2d = mMap2BaseTransf.getOrigin(); + tr_2d.setZ(mFixedZValue); + mMap2BaseTransf.setOrigin(tr_2d); - double roll, pitch, yaw; - tf2::Matrix3x3(mMap2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + double roll, pitch, yaw; + tf2::Matrix3x3(mMap2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - tf2::Quaternion quat_2d; - quat_2d.setRPY(0.0, 0.0, yaw); + tf2::Quaternion quat_2d; + quat_2d.setRPY(0.0, 0.0, yaw); - mMap2BaseTransf.setRotation(quat_2d); - } + mMap2BaseTransf.setRotation(quat_2d); + } -#if 0 //#ifndef NDEBUG // Enable for TF checking +#if 0 //#ifndef NDEBUG // Enable for TF checking double roll, pitch, yaw; tf2::Matrix3x3(mMap2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); @@ -3999,34 +4577,41 @@ void ZEDWrapperNodelet::device_poll_thread_func() mMap2BaseTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); #endif - bool initOdom = false; - - if (!(mFloorAlignment)) { - initOdom = mInitOdomWithPose; - } else { - initOdom = (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK) & mInitOdomWithPose; - } - - if (initOdom || mResetOdom) { - NODELET_INFO("Odometry aligned to last tracking pose"); - - // Propagate Odom transform in time - mOdom2BaseTransf = mMap2BaseTransf; - mMap2BaseTransf.setIdentity(); - - if (odomSubnumber > 0) { - // Publish odometry message - publishOdom(mOdom2BaseTransf, mLastZedPose, mFrameTimestamp); - } + bool initOdom = false; + + if (!(mFloorAlignment)) + { + initOdom = mInitOdomWithPose; + } + else + { + initOdom = (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK) & mInitOdomWithPose; + } + + if (initOdom || mResetOdom) + { + NODELET_INFO("Odometry aligned to last tracking pose"); + + // Propagate Odom transform in time + mOdom2BaseTransf = mMap2BaseTransf; + mMap2BaseTransf.setIdentity(); + + if (odomSubnumber > 0) + { + // Publish odometry message + publishOdom(mOdom2BaseTransf, mLastZedPose, mFrameTimestamp); + } - mInitOdomWithPose = false; - mResetOdom = false; - } else { - // Transformation from map to odometry frame - // mMap2OdomTransf = mOdom2BaseTransf.inverse() * mMap2BaseTransf; - mMap2OdomTransf = mMap2BaseTransf * mOdom2BaseTransf.inverse(); + mInitOdomWithPose = false; + mResetOdom = false; + } + else + { + // Transformation from map to odometry frame + // mMap2OdomTransf = mOdom2BaseTransf.inverse() * mMap2BaseTransf; + mMap2OdomTransf = mMap2BaseTransf * mOdom2BaseTransf.inverse(); -#if 0 //#ifndef NDEBUG // Enable for TF checking +#if 0 //#ifndef NDEBUG // Enable for TF checking double roll, pitch, yaw; tf2::Matrix3x3(mMap2OdomTransf.getRotation()).getRPY(roll, pitch, yaw); @@ -4035,34 +4620,39 @@ void ZEDWrapperNodelet::device_poll_thread_func() mMap2OdomTransf.getOrigin().x(), mMap2OdomTransf.getOrigin().y(), mMap2OdomTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); #endif - } + } - // Publish Pose message - if ((poseSubnumber + poseCovSubnumber) > 0) { - publishPose(stamp); - } + // Publish Pose message + if ((poseSubnumber + poseCovSubnumber) > 0) + { + publishPose(stamp); + } - mPosTrackingReady = true; - } - } + mPosTrackingReady = true; + } + } - // Note: in SVO mode the TF must not be broadcasted at the same frequency of the IMU data to avoid timestamp issues - if (mZedRealCamModel == sl::MODEL::ZED || mSvoMode) { - // Publish pose tf only if enabled - if (mPublishTf) { - // Note, the frame is published, but its values will only change if - // someone has subscribed to odom - publishOdomFrame(mOdom2BaseTransf, stamp); // publish the base Frame in odometry frame - - if (mPublishMapTf) { - // Note, the frame is published, but its values will only change if - // someone has subscribed to map - publishPoseFrame(mMap2OdomTransf, stamp); // publish the odometry Frame in map frame - } - } - } + // Note: in SVO mode the TF must not be broadcasted at the same frequency of the IMU data to avoid timestamp + // issues + if (mZedRealCamModel == sl::MODEL::ZED || mSvoMode) + { + // Publish pose tf only if enabled + if (mPublishTf) + { + // Note, the frame is published, but its values will only change if + // someone has subscribed to odom + publishOdomFrame(mOdom2BaseTransf, stamp); // publish the base Frame in odometry frame + + if (mPublishMapTf) + { + // Note, the frame is published, but its values will only change if + // someone has subscribed to map + publishPoseFrame(mMap2OdomTransf, stamp); // publish the odometry Frame in map frame + } + } + } -#if 0 //#ifndef NDEBUG // Enable for TF checking \ +#if 0 //#ifndef NDEBUG // Enable for TF checking \ // Double check: map_to_pose must be equal to mMap2BaseTransf tf2::Transform map_to_base; @@ -4089,902 +4679,1014 @@ void ZEDWrapperNodelet::device_poll_thread_func() NODELET_DEBUG("*******************************"); #endif - std::chrono::steady_clock::time_point end_elab = std::chrono::steady_clock::now(); - - double elab_usec = std::chrono::duration_cast(end_elab - start_elab).count(); - - double mean_elab_sec = mElabPeriodMean_sec->addValue(elab_usec / 1000000.); + std::chrono::steady_clock::time_point end_elab = std::chrono::steady_clock::now(); - static int count_warn = 0; + double elab_usec = std::chrono::duration_cast(end_elab - start_elab).count(); - if (!loop_rate.sleep()) { - if (mean_elab_sec > (1. / mPubFrameRate)) { - if (++count_warn > 10) { - NODELET_DEBUG_THROTTLE(1.0, "Working thread is not synchronized with the Camera grab rate"); - NODELET_DEBUG_STREAM_THROTTLE(1.0, "Expected cycle time: " << loop_rate.expectedCycleTime() << " - Real cycle time: " << mean_elab_sec); - NODELET_WARN_STREAM_THROTTLE(10.0, "Elaboration takes longer (" << mean_elab_sec << " sec) than requested by the FPS rate (" - << loop_rate.expectedCycleTime() << " sec). Please consider to " - "lower the 'general/pub_frame_rate' setting or to " - "reduce the power requirements by reducing the camera resolutions."); - } + double mean_elab_sec = mElabPeriodMean_sec->addValue(elab_usec / 1000000.); - loop_rate.reset(); - } else { - count_warn = 0; - } - } - } else { - NODELET_DEBUG_THROTTLE(5.0, "No topics subscribed by users"); - - if (mSvoMode || mZedRealCamModel == sl::MODEL::ZED || !mPublishImuTf) { - // Publish odometry tf only if enabled - if (mPublishTf) { - ros::Time t; - - if (mSvoMode) { - t = ros::Time::now(); - } else { - t = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::CURRENT)); - } + static int count_warn = 0; - publishOdomFrame(mOdom2BaseTransf, mFrameTimestamp); // publish the base Frame in odometry frame + if (!loop_rate.sleep()) + { + if (mean_elab_sec > (1. / mPubFrameRate)) + { + if (++count_warn > 10) + { + NODELET_DEBUG_THROTTLE(1.0, "Working thread is not synchronized with the Camera grab rate"); + NODELET_DEBUG_STREAM_THROTTLE(1.0, "Expected cycle time: " << loop_rate.expectedCycleTime() + << " - Real cycle time: " << mean_elab_sec); + NODELET_WARN_STREAM_THROTTLE(10.0, "Elaboration takes longer (" + << mean_elab_sec << " sec) than requested by the FPS rate (" + << loop_rate.expectedCycleTime() + << " sec). Please consider to " + "lower the 'general/pub_frame_rate' setting or to " + "reduce the power requirements by reducing the camera " + "resolutions."); + } + + loop_rate.reset(); + } + else + { + count_warn = 0; + } + } + } + else + { + NODELET_DEBUG_THROTTLE(5.0, "No topics subscribed by users"); - if (mPublishMapTf) { - publishPoseFrame(mMap2OdomTransf, mFrameTimestamp); // publish the odometry Frame in map frame - } + if (mSvoMode || mZedRealCamModel == sl::MODEL::ZED || !mPublishImuTf) + { + // Publish odometry tf only if enabled + if (mPublishTf) + { + ros::Time t; + + if (mSvoMode) + { + t = ros::Time::now(); + } + else + { + t = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::CURRENT)); + } + + publishOdomFrame(mOdom2BaseTransf, mFrameTimestamp); // publish the base Frame in odometry frame + + if (mPublishMapTf) + { + publishPoseFrame(mMap2OdomTransf, mFrameTimestamp); // publish the odometry Frame in map frame + } + + if (mPublishImuTf && !mStaticImuFramePublished) + { + publishStaticImuFrame(); + } + } + } - if (mPublishImuTf && !mStaticImuFramePublished) { - publishStaticImuFrame(); - } - } - } + std::this_thread::sleep_for(std::chrono::milliseconds(10)); // No subscribers, we just wait + loop_rate.reset(); + } - std::this_thread::sleep_for(std::chrono::milliseconds(10)); // No subscribers, we just wait - loop_rate.reset(); - } + mDiagUpdater.update(); + } // while loop - mDiagUpdater.update(); - } // while loop + if (mSaveAreaMapOnClosing && mPosTrackingActivated) + { + saveAreaMap(mAreaMemDbPath); + } - if (mSaveAreaMapOnClosing && mPosTrackingActivated) { - saveAreaMap(mAreaMemDbPath); - } + mStopNode = true; // Stops other threads - mStopNode = true; // Stops other threads + std::lock_guard lock(mCloseZedMutex); + NODELET_DEBUG("Closing ZED"); - std::lock_guard lock(mCloseZedMutex); - NODELET_DEBUG("Closing ZED"); + if (mRecording) + { + mRecording = false; + mZed.disableRecording(); + } - if (mRecording) { - mRecording = false; - mZed.disableRecording(); - } - - mZed.close(); + mZed.close(); - NODELET_DEBUG("ZED pool thread finished"); + NODELET_DEBUG("ZED pool thread finished"); } void ZEDWrapperNodelet::callback_updateDiagnostic(diagnostic_updater::DiagnosticStatusWrapper& stat) { - if (mConnStatus != sl::ERROR_CODE::SUCCESS) { - stat.summary(diagnostic_msgs::DiagnosticStatus::ERROR, sl::toString(mConnStatus).c_str()); - return; - } + if (mConnStatus != sl::ERROR_CODE::SUCCESS) + { + stat.summary(diagnostic_msgs::DiagnosticStatus::ERROR, sl::toString(mConnStatus).c_str()); + return; + } - if (mGrabActive) { - if (mGrabStatus == sl::ERROR_CODE::SUCCESS /*|| mGrabStatus == sl::ERROR_CODE::NOT_A_NEW_FRAME*/) { - stat.summary(diagnostic_msgs::DiagnosticStatus::OK, "Camera grabbing"); + if (mGrabActive) + { + if (mGrabStatus == sl::ERROR_CODE::SUCCESS /*|| mGrabStatus == sl::ERROR_CODE::NOT_A_NEW_FRAME*/) + { + stat.summary(diagnostic_msgs::DiagnosticStatus::OK, "Camera grabbing"); - double freq = 1000000. / mGrabPeriodMean_usec->getMean(); - double freq_perc = 100. * freq / mPubFrameRate; - stat.addf("Capture", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); + double freq = 1000000. / mGrabPeriodMean_usec->getMean(); + double freq_perc = 100. * freq / mPubFrameRate; + stat.addf("Capture", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); - stat.addf("General Processing", "Mean Time: %.3f sec (Max. %.3f sec)", mElabPeriodMean_sec->getMean(), + stat.addf("General Processing", "Mean Time: %.3f sec (Max. %.3f sec)", mElabPeriodMean_sec->getMean(), 1. / mPubFrameRate); - if (mPublishingData) { - freq = 1. / mVideoDepthPeriodMean_sec->getMean(); - freq_perc = 100. * freq / mPubFrameRate; - stat.addf("Video/Depth Publish", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); - } + if (mPublishingData) + { + freq = 1. / mVideoDepthPeriodMean_sec->getMean(); + freq_perc = 100. * freq / mPubFrameRate; + stat.addf("Video/Depth Publish", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); + } - if (mSvoMode) { - int frame = mZed.getSVOPosition(); - int totFrames = mZed.getSVONumberOfFrames(); - double svo_perc = 100. * (static_cast(frame) / totFrames); + if (mSvoMode) + { + int frame = mZed.getSVOPosition(); + int totFrames = mZed.getSVONumberOfFrames(); + double svo_perc = 100. * (static_cast(frame) / totFrames); - stat.addf("Playing SVO", "Frame: %d/%d (%.1f%%)", frame, totFrames, svo_perc); - } + stat.addf("Playing SVO", "Frame: %d/%d (%.1f%%)", frame, totFrames, svo_perc); + } - if (mComputeDepth) { - stat.add("Depth status", "ACTIVE"); + if (mComputeDepth) + { + stat.add("Depth status", "ACTIVE"); - if (mPcPublishing) { - double freq = 1000000. / mPcPeriodMean_usec->getMean(); - double freq_perc = 100. * freq / mPointCloudFreq; - stat.addf("Point Cloud", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); - } else { - stat.add("Point Cloud", "Topic not subscribed"); - } + if (mPcPublishing) + { + double freq = 1000000. / mPcPeriodMean_usec->getMean(); + double freq_perc = 100. * freq / mPointCloudFreq; + stat.addf("Point Cloud", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); + } + else + { + stat.add("Point Cloud", "Topic not subscribed"); + } - if (mFloorAlignment) { - if (mInitOdomWithPose) { - stat.add("Floor Detection", "NOT INITIALIZED"); - } else { - stat.add("Floor Detection", "INITIALIZED"); - } - } + if (mFloorAlignment) + { + if (mInitOdomWithPose) + { + stat.add("Floor Detection", "NOT INITIALIZED"); + } + else + { + stat.add("Floor Detection", "INITIALIZED"); + } + } - if (mPosTrackingActivated) { - stat.addf("Tracking status", "%s", sl::toString(mPosTrackingStatus).c_str()); - } else { - stat.add("Pos. Tracking status", "INACTIVE"); - } + if (mPosTrackingActivated) + { + stat.addf("Tracking status", "%s", sl::toString(mPosTrackingStatus).c_str()); + } + else + { + stat.add("Pos. Tracking status", "INACTIVE"); + } - if (mObjDetRunning) { - double freq = 1000. / mObjDetPeriodMean_msec->getMean(); - double freq_perc = 100. * freq / mPubFrameRate; - stat.addf("Object detection", "Mean Frequency: %.3f Hz (%.1f%%)", freq, freq_perc); - } else { - stat.add("Object Detection", "INACTIVE"); - } - } else { - stat.add("Depth status", "INACTIVE"); - } - } else { - stat.summaryf(diagnostic_msgs::DiagnosticStatus::ERROR, "Camera error: %s", sl::toString(mGrabStatus).c_str()); + if (mObjDetRunning) + { + double freq = 1000. / mObjDetPeriodMean_msec->getMean(); + double freq_perc = 100. * freq / mPubFrameRate; + stat.addf("Object detection", "Mean Frequency: %.3f Hz (%.1f%%)", freq, freq_perc); } - } else { - stat.summary(diagnostic_msgs::DiagnosticStatus::OK, "Waiting for data subscriber"); - stat.add("Capture", "INACTIVE"); + else + { + stat.add("Object Detection", "INACTIVE"); + } + } + else + { + stat.add("Depth status", "INACTIVE"); + } } - - if (mSensPublishing) { - double freq = 1000000. / mSensPeriodMean_usec->getMean(); - double freq_perc = 100. * freq / mSensPubRate; - stat.addf("IMU", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); - } else { - stat.add("IMU", "Topics not subscribed"); + else + { + stat.summaryf(diagnostic_msgs::DiagnosticStatus::ERROR, "Camera error: %s", sl::toString(mGrabStatus).c_str()); } + } + else + { + stat.summary(diagnostic_msgs::DiagnosticStatus::OK, "Waiting for data subscriber"); + stat.add("Capture", "INACTIVE"); + } + + if (mSensPublishing) + { + double freq = 1000000. / mSensPeriodMean_usec->getMean(); + double freq_perc = 100. * freq / mSensPubRate; + stat.addf("IMU", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); + } + else + { + stat.add("IMU", "Topics not subscribed"); + } - if (mZedRealCamModel == sl::MODEL::ZED2 || mZedRealCamModel == sl::MODEL::ZED2i) { - stat.addf("Left CMOS Temp.", "%.1f °C", mTempLeft); - stat.addf("Right CMOS Temp.", "%.1f °C", mTempRight); + if (mZedRealCamModel == sl::MODEL::ZED2 || mZedRealCamModel == sl::MODEL::ZED2i) + { + stat.addf("Left CMOS Temp.", "%.1f °C", mTempLeft); + stat.addf("Right CMOS Temp.", "%.1f °C", mTempRight); - if (mTempLeft > 70.f || mTempRight > 70.f) { - stat.summary(diagnostic_msgs::DiagnosticStatus::WARN, "Camera temperature"); - } - } else { - stat.add("Left CMOS Temp.", "N/A"); - stat.add("Right CMOS Temp.", "N/A"); + if (mTempLeft > 70.f || mTempRight > 70.f) + { + stat.summary(diagnostic_msgs::DiagnosticStatus::WARN, "Camera temperature"); } + } + else + { + stat.add("Left CMOS Temp.", "N/A"); + stat.add("Right CMOS Temp.", "N/A"); + } - if (mRecording) { - if (!mRecStatus.status) { - if (mGrabActive) { - stat.add("SVO Recording", "ERROR"); - stat.summary(diagnostic_msgs::DiagnosticStatus::WARN, "Error adding frames to SVO file while recording. Check " - "free disk space"); - } else { - stat.add("SVO Recording", "WAITING"); - } - } else { - stat.add("SVO Recording", "ACTIVE"); - stat.addf("SVO compression time", "%g msec", mRecStatus.average_compression_time); - stat.addf("SVO compression ratio", "%.1f%%", mRecStatus.average_compression_ratio); - } - } else { - stat.add("SVO Recording", "NOT ACTIVE"); + if (mRecording) + { + if (!mRecStatus.status) + { + if (mGrabActive) + { + stat.add("SVO Recording", "ERROR"); + stat.summary(diagnostic_msgs::DiagnosticStatus::WARN, + "Error adding frames to SVO file while recording. Check " + "free disk space"); + } + else + { + stat.add("SVO Recording", "WAITING"); + } } + else + { + stat.add("SVO Recording", "ACTIVE"); + stat.addf("SVO compression time", "%g msec", mRecStatus.average_compression_time); + stat.addf("SVO compression ratio", "%.1f%%", mRecStatus.average_compression_ratio); + } + } + else + { + stat.add("SVO Recording", "NOT ACTIVE"); + } } bool ZEDWrapperNodelet::on_start_svo_recording(zed_interfaces::start_svo_recording::Request& req, - zed_interfaces::start_svo_recording::Response& res) + zed_interfaces::start_svo_recording::Response& res) { - std::lock_guard lock(mRecMutex); + std::lock_guard lock(mRecMutex); - if (mRecording) { - res.result = false; - res.info = "Recording was just active"; - return false; - } + if (mRecording) + { + res.result = false; + res.info = "Recording was just active"; + return false; + } - // Check filename - if (req.svo_filename.empty()) { - req.svo_filename = "zed.svo"; - } + // Check filename + if (req.svo_filename.empty()) + { + req.svo_filename = "zed.svo"; + } - sl::ERROR_CODE err; + sl::ERROR_CODE err; - sl::RecordingParameters recParams; - recParams.compression_mode = mSvoComprMode; - recParams.video_filename = req.svo_filename.c_str(); - err = mZed.enableRecording(recParams); + sl::RecordingParameters recParams; + recParams.compression_mode = mSvoComprMode; + recParams.video_filename = req.svo_filename.c_str(); + err = mZed.enableRecording(recParams); - if (err == sl::ERROR_CODE::SVO_UNSUPPORTED_COMPRESSION) { - recParams.compression_mode = mSvoComprMode == sl::SVO_COMPRESSION_MODE::H265 ? sl::SVO_COMPRESSION_MODE::H264: sl::SVO_COMPRESSION_MODE::H265; + if (err == sl::ERROR_CODE::SVO_UNSUPPORTED_COMPRESSION) + { + recParams.compression_mode = mSvoComprMode == sl::SVO_COMPRESSION_MODE::H265 ? sl::SVO_COMPRESSION_MODE::H264 : + sl::SVO_COMPRESSION_MODE::H265; - NODELET_WARN_STREAM("The chosen " << sl::toString(mSvoComprMode).c_str() << "mode is not available. Trying " - << sl::toString(recParams.compression_mode).c_str()); + NODELET_WARN_STREAM("The chosen " << sl::toString(mSvoComprMode).c_str() << "mode is not available. Trying " + << sl::toString(recParams.compression_mode).c_str()); - err = mZed.enableRecording(recParams); + err = mZed.enableRecording(recParams); - if (err == sl::ERROR_CODE::SVO_UNSUPPORTED_COMPRESSION) { - NODELET_WARN_STREAM(sl::toString(recParams.compression_mode).c_str() - << "not available. Trying " << sl::toString(sl::SVO_COMPRESSION_MODE::H264).c_str()); - recParams.compression_mode = sl::SVO_COMPRESSION_MODE::H264; - err = mZed.enableRecording(recParams); + if (err == sl::ERROR_CODE::SVO_UNSUPPORTED_COMPRESSION) + { + NODELET_WARN_STREAM(sl::toString(recParams.compression_mode).c_str() + << "not available. Trying " << sl::toString(sl::SVO_COMPRESSION_MODE::H264).c_str()); + recParams.compression_mode = sl::SVO_COMPRESSION_MODE::H264; + err = mZed.enableRecording(recParams); - if (err == sl::ERROR_CODE::SVO_UNSUPPORTED_COMPRESSION) { - recParams.compression_mode = sl::SVO_COMPRESSION_MODE::LOSSLESS; - err = mZed.enableRecording(recParams); - } - } + if (err == sl::ERROR_CODE::SVO_UNSUPPORTED_COMPRESSION) + { + recParams.compression_mode = sl::SVO_COMPRESSION_MODE::LOSSLESS; + err = mZed.enableRecording(recParams); + } } + } - if (err != sl::ERROR_CODE::SUCCESS) { - res.result = false; - res.info = sl::toString(err).c_str(); - mRecording = false; - return false; - } + if (err != sl::ERROR_CODE::SUCCESS) + { + res.result = false; + res.info = sl::toString(err).c_str(); + mRecording = false; + return false; + } - mSvoComprMode = recParams.compression_mode; - mRecording = true; - res.info = "Recording started ("; - res.info += sl::toString(mSvoComprMode).c_str(); - res.info += ")"; - res.result = true; + mSvoComprMode = recParams.compression_mode; + mRecording = true; + res.info = "Recording started ("; + res.info += sl::toString(mSvoComprMode).c_str(); + res.info += ")"; + res.result = true; - NODELET_INFO_STREAM("SVO recording STARTED: " << req.svo_filename << " (" << sl::toString(mSvoComprMode).c_str() - << ")"); + NODELET_INFO_STREAM("SVO recording STARTED: " << req.svo_filename << " (" << sl::toString(mSvoComprMode).c_str() + << ")"); - return true; + return true; } bool ZEDWrapperNodelet::on_stop_svo_recording(zed_interfaces::stop_svo_recording::Request& req, - zed_interfaces::stop_svo_recording::Response& res) + zed_interfaces::stop_svo_recording::Response& res) { - std::lock_guard lock(mRecMutex); + std::lock_guard lock(mRecMutex); - if (!mRecording) { - res.done = false; - res.info = "Recording was not active"; - NODELET_WARN_STREAM("Can't stop SVO recording. Recording was not active"); - return false; - } + if (!mRecording) + { + res.done = false; + res.info = "Recording was not active"; + NODELET_WARN_STREAM("Can't stop SVO recording. Recording was not active"); + return false; + } - mZed.disableRecording(); - mRecording = false; - res.info = "Recording stopped"; - res.done = true; + mZed.disableRecording(); + mRecording = false; + res.info = "Recording stopped"; + res.done = true; - NODELET_INFO_STREAM("SVO recording STOPPED"); + NODELET_INFO_STREAM("SVO recording STOPPED"); - return true; + return true; } bool ZEDWrapperNodelet::on_start_remote_stream(zed_interfaces::start_remote_stream::Request& req, - zed_interfaces::start_remote_stream::Response& res) + zed_interfaces::start_remote_stream::Response& res) { - if (mStreaming) { - res.result = false; - res.info = "SVO remote streaming was just active"; - return false; - } + if (mStreaming) + { + res.result = false; + res.info = "SVO remote streaming was just active"; + return false; + } - sl::StreamingParameters params; - params.codec = static_cast(req.codec); - params.port = req.port; - params.bitrate = req.bitrate; - params.gop_size = req.gop_size; - params.adaptative_bitrate = req.adaptative_bitrate; + sl::StreamingParameters params; + params.codec = static_cast(req.codec); + params.port = req.port; + params.bitrate = req.bitrate; + params.gop_size = req.gop_size; + params.adaptative_bitrate = req.adaptative_bitrate; - if ((params.gop_size < -1) || (params.gop_size > 256)) { - mStreaming = false; + if ((params.gop_size < -1) || (params.gop_size > 256)) + { + mStreaming = false; - res.result = false; - res.info = "`gop_size` wrong ("; - res.info += params.gop_size; - res.info += "). Remote streaming not started"; + res.result = false; + res.info = "`gop_size` wrong ("; + res.info += params.gop_size; + res.info += "). Remote streaming not started"; - NODELET_ERROR_STREAM(res.info); - return false; - } + NODELET_ERROR_STREAM(res.info); + return false; + } - if (params.port % 2 != 0) { - mStreaming = false; + if (params.port % 2 != 0) + { + mStreaming = false; - res.result = false; - res.info = "`port` must be an even number. Remote streaming not started"; + res.result = false; + res.info = "`port` must be an even number. Remote streaming not started"; - NODELET_ERROR_STREAM(res.info); - return false; - } + NODELET_ERROR_STREAM(res.info); + return false; + } - sl::ERROR_CODE err = mZed.enableStreaming(params); + sl::ERROR_CODE err = mZed.enableStreaming(params); - if (err != sl::ERROR_CODE::SUCCESS) { - mStreaming = false; + if (err != sl::ERROR_CODE::SUCCESS) + { + mStreaming = false; - res.result = false; - res.info = sl::toString(err).c_str(); + res.result = false; + res.info = sl::toString(err).c_str(); - NODELET_ERROR_STREAM("Remote streaming not started (" << res.info << ")"); + NODELET_ERROR_STREAM("Remote streaming not started (" << res.info << ")"); - return false; - } + return false; + } - mStreaming = true; + mStreaming = true; - NODELET_INFO_STREAM("Remote streaming STARTED"); + NODELET_INFO_STREAM("Remote streaming STARTED"); - res.result = true; - res.info = "Remote streaming STARTED"; - return true; + res.result = true; + res.info = "Remote streaming STARTED"; + return true; } bool ZEDWrapperNodelet::on_stop_remote_stream(zed_interfaces::stop_remote_stream::Request& req, - zed_interfaces::stop_remote_stream::Response& res) + zed_interfaces::stop_remote_stream::Response& res) { - if (mStreaming) { - mZed.disableStreaming(); - } + if (mStreaming) + { + mZed.disableStreaming(); + } - mStreaming = false; - NODELET_INFO_STREAM("SVO remote streaming STOPPED"); + mStreaming = false; + NODELET_INFO_STREAM("SVO remote streaming STOPPED"); - res.done = true; - return true; + res.done = true; + return true; } bool ZEDWrapperNodelet::on_set_led_status(zed_interfaces::set_led_status::Request& req, - zed_interfaces::set_led_status::Response& res) + zed_interfaces::set_led_status::Response& res) { - if (mCamFwVersion < 1523) { - NODELET_WARN_STREAM("To set the status of the blue LED the camera must be updated to FW 1523 or newer"); - return false; - } + if (mCamFwVersion < 1523) + { + NODELET_WARN_STREAM("To set the status of the blue LED the camera must be updated to FW 1523 or newer"); + return false; + } - mZed.setCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS, req.led_enabled ? 1: 0); + mZed.setCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS, req.led_enabled ? 1 : 0); - return true; + return true; } bool ZEDWrapperNodelet::on_toggle_led(zed_interfaces::toggle_led::Request& req, - zed_interfaces::toggle_led::Response& res) + zed_interfaces::toggle_led::Response& res) { - if (mCamFwVersion < 1523) { - NODELET_WARN_STREAM("To set the status of the blue LED the camera must be updated to FW 1523 or newer"); - return false; - } + if (mCamFwVersion < 1523) + { + NODELET_WARN_STREAM("To set the status of the blue LED the camera must be updated to FW 1523 or newer"); + return false; + } - int status; - sl::ERROR_CODE err = mZed.getCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS,status); - if(err!=sl::ERROR_CODE::SUCCESS) - { - NODELET_WARN_STREAM("Error getting the current status of the led: " << sl::toString(err).c_str()); - return false; - } + int status; + sl::ERROR_CODE err = mZed.getCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS, status); + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error getting the current status of the led: " << sl::toString(err).c_str()); + return false; + } - int new_status = status == 0 ? 1: 0; - err = mZed.setCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS, new_status); - if(err!=sl::ERROR_CODE::SUCCESS) - { - NODELET_WARN_STREAM("Error getting the current status of the led: " << sl::toString(err).c_str()); - return false; - } + int new_status = status == 0 ? 1 : 0; + err = mZed.setCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS, new_status); + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error getting the current status of the led: " << sl::toString(err).c_str()); + return false; + } - res.new_led_status = new_status; + res.new_led_status = new_status; - return true; + return true; } bool ZEDWrapperNodelet::on_start_3d_mapping(zed_interfaces::start_3d_mapping::Request& req, - zed_interfaces::start_3d_mapping::Response& res) + zed_interfaces::start_3d_mapping::Response& res) { - if (mMappingEnabled && mMappingRunning) { - NODELET_WARN_STREAM("Spatial mapping was just running"); + if (mMappingEnabled && mMappingRunning) + { + NODELET_WARN_STREAM("Spatial mapping was just running"); - res.done = false; - return res.done; - } + res.done = false; + return res.done; + } - mMappingRunning = false; + mMappingRunning = false; - mMappingRes = req.resolution; - mMaxMappingRange = req.max_mapping_range; - mFusedPcPubFreq = req.fused_pointcloud_freq; + mMappingRes = req.resolution; + mMaxMappingRange = req.max_mapping_range; + mFusedPcPubFreq = req.fused_pointcloud_freq; - NODELET_DEBUG_STREAM(" * Received mapping resolution\t\t-> " << mMappingRes << " m"); - NODELET_DEBUG_STREAM(" * Received mapping max range\t-> " << mMaxMappingRange << " m"); - NODELET_DEBUG_STREAM(" * Received fused point cloud freq:\t-> " << mFusedPcPubFreq << " Hz"); + NODELET_DEBUG_STREAM(" * Received mapping resolution\t\t-> " << mMappingRes << " m"); + NODELET_DEBUG_STREAM(" * Received mapping max range\t-> " << mMaxMappingRange << " m"); + NODELET_DEBUG_STREAM(" * Received fused point cloud freq:\t-> " << mFusedPcPubFreq << " Hz"); - mMappingEnabled = true; - res.done = true; + mMappingEnabled = true; + res.done = true; - return res.done; + return res.done; } bool ZEDWrapperNodelet::on_stop_3d_mapping(zed_interfaces::stop_3d_mapping::Request& req, - zed_interfaces::stop_3d_mapping::Response& res) + zed_interfaces::stop_3d_mapping::Response& res) { - if (mMappingEnabled) { - mPubFusedCloud.shutdown(); - mMappingMutex.lock(); - stop_3d_mapping(); - mMappingMutex.unlock(); - - res.done = true; - } else { - res.done = false; - } + if (mMappingEnabled) + { + mPubFusedCloud.shutdown(); + mMappingMutex.lock(); + stop_3d_mapping(); + mMappingMutex.unlock(); - return res.done; + res.done = true; + } + else + { + res.done = false; + } + + return res.done; } bool ZEDWrapperNodelet::on_save_3d_map(zed_interfaces::save_3d_map::Request& req, - zed_interfaces::save_3d_map::Response& res) + zed_interfaces::save_3d_map::Response& res) { - if (!mMappingEnabled) { - res.result = false; - res.info = "3D Mapping was not active"; - NODELET_WARN_STREAM("Can't save 3D map. Mapping was not active"); - return false; - } + if (!mMappingEnabled) + { + res.result = false; + res.info = "3D Mapping was not active"; + NODELET_WARN_STREAM("Can't save 3D map. Mapping was not active"); + return false; + } - mMapSave = true; + mMapSave = true; - std::lock_guard lock(mMappingMutex); - sl::String filename = req.map_filename.c_str(); - if (req.file_format < 0 || req.file_format > static_cast(sl::MESH_FILE_FORMAT::OBJ)) { - res.result = false; - res.info = "File format not correct"; - NODELET_WARN_STREAM("Can't save 3D map. File format not correct"); - return false; - } + std::lock_guard lock(mMappingMutex); + sl::String filename = req.map_filename.c_str(); + if (req.file_format < 0 || req.file_format > static_cast(sl::MESH_FILE_FORMAT::OBJ)) + { + res.result = false; + res.info = "File format not correct"; + NODELET_WARN_STREAM("Can't save 3D map. File format not correct"); + return false; + } - sl::MESH_FILE_FORMAT file_format = static_cast(req.file_format); + sl::MESH_FILE_FORMAT file_format = static_cast(req.file_format); - bool success = mFusedPC.save(filename, file_format); + bool success = mFusedPC.save(filename, file_format); - if (!success) { - res.result = false; - res.info = "3D Map not saved"; - NODELET_ERROR_STREAM("3D Map not saved"); - return false; - } + if (!success) + { + res.result = false; + res.info = "3D Map not saved"; + NODELET_ERROR_STREAM("3D Map not saved"); + return false; + } - res.info = "3D map saved"; - res.result = true; - return true; + res.info = "3D map saved"; + res.result = true; + return true; } bool ZEDWrapperNodelet::on_start_object_detection(zed_interfaces::start_object_detection::Request& req, - zed_interfaces::start_object_detection::Response& res) + zed_interfaces::start_object_detection::Response& res) { - NODELET_INFO("Called 'start_object_detection' service"); - - if (mZedRealCamModel == sl::MODEL::ZED) { - mObjDetEnabled = false; - mObjDetRunning = false; - - NODELET_ERROR_STREAM("Object detection not started. OD is not available for ZED camera model"); - res.done = false; - return res.done; - } + NODELET_INFO("Called 'start_object_detection' service"); - if (mObjDetEnabled && mObjDetRunning) { - NODELET_WARN_STREAM("Object Detection was just running"); + if (mZedRealCamModel == sl::MODEL::ZED) + { + mObjDetEnabled = false; + mObjDetRunning = false; - res.done = false; - return res.done; - } + NODELET_ERROR_STREAM("Object detection not started. OD is not available for ZED camera model"); + res.done = false; + return res.done; + } - mObjDetRunning = false; + if (mObjDetEnabled && mObjDetRunning) + { + NODELET_WARN_STREAM("Object Detection was just running"); - mObjDetConfidence = req.confidence; - mObjDetTracking = req.tracking; - if (req.model < 0 || req.model >= static_cast(sl::OBJECT_DETECTION_MODEL::LAST)) { - NODELET_ERROR_STREAM("Object Detection model not valid."); - res.done = false; - return res.done; - } - mObjDetModel = static_cast(req.model); + res.done = false; + return res.done; + } - mObjDetMaxRange = req.max_range; - if (mObjDetMaxRange > mCamMaxDepth) { - NODELET_WARN("Detection max range cannot be major than depth max range. Automatically fixed."); - mObjDetMaxRange = mCamMaxDepth; - } - NODELET_INFO_STREAM(" * Detection max range\t\t-> " << mObjDetMaxRange); - - NODELET_INFO_STREAM(" * Object min. confidence\t-> " << mObjDetConfidence); - NODELET_INFO_STREAM(" * Object tracking\t\t-> " << (mObjDetTracking ? "ENABLED": "DISABLED")); - NODELET_INFO_STREAM(" * Detection model\t\t-> " << sl::toString(mObjDetModel)); - - mNhNs.getParam("object_detection/mc_people", mObjDetPeopleEnable); - NODELET_INFO_STREAM(" * Detect people\t\t-> " << (mObjDetPeopleEnable ? "ENABLED": "DISABLED")); - mNhNs.getParam("object_detection/mc_vehicle", mObjDetVehiclesEnable); - NODELET_INFO_STREAM(" * Detect vehicles\t\t-> " << (mObjDetVehiclesEnable ? "ENABLED": "DISABLED")); - mNhNs.getParam("object_detection/mc_bag", mObjDetBagsEnable); - NODELET_INFO_STREAM(" * Detect bags\t\t\t-> " << (mObjDetBagsEnable ? "ENABLED": "DISABLED")); - mNhNs.getParam("object_detection/mc_animal", mObjDetAnimalsEnable); - NODELET_INFO_STREAM(" * Detect animals\t\t-> " << (mObjDetAnimalsEnable ? "ENABLED": "DISABLED")); - mNhNs.getParam("object_detection/mc_electronics", mObjDetElectronicsEnable); - NODELET_INFO_STREAM(" * Detect electronics\t\t-> " << (mObjDetElectronicsEnable ? "ENABLED": "DISABLED")); - mNhNs.getParam("object_detection/mc_fruit_vegetable", mObjDetFruitsEnable); - NODELET_INFO_STREAM(" * Detect fruit and vegetables\t-> " << (mObjDetFruitsEnable ? "ENABLED": "DISABLED")); - mNhNs.getParam("object_detection/mc_sport", mObjDetSportsEnable); - NODELET_INFO_STREAM(" * Detect sport-related objects\t-> " << (mObjDetSportsEnable ? "ENABLED": "DISABLED")); - - mObjDetRunning = false; - mObjDetEnabled = true; - res.done = true; + mObjDetRunning = false; + mObjDetConfidence = req.confidence; + mObjDetTracking = req.tracking; + if (req.model < 0 || req.model >= static_cast(sl::OBJECT_DETECTION_MODEL::LAST)) + { + NODELET_ERROR_STREAM("Object Detection model not valid."); + res.done = false; return res.done; + } + mObjDetModel = static_cast(req.model); + + mObjDetMaxRange = req.max_range; + if (mObjDetMaxRange > mCamMaxDepth) + { + NODELET_WARN("Detection max range cannot be major than depth max range. Automatically fixed."); + mObjDetMaxRange = mCamMaxDepth; + } + NODELET_INFO_STREAM(" * Detection max range\t\t-> " << mObjDetMaxRange); + + NODELET_INFO_STREAM(" * Object min. confidence\t-> " << mObjDetConfidence); + NODELET_INFO_STREAM(" * Object tracking\t\t-> " << (mObjDetTracking ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Detection model\t\t-> " << sl::toString(mObjDetModel)); + + mNhNs.getParam("object_detection/mc_people", mObjDetPeopleEnable); + NODELET_INFO_STREAM(" * Detect people\t\t-> " << (mObjDetPeopleEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_vehicle", mObjDetVehiclesEnable); + NODELET_INFO_STREAM(" * Detect vehicles\t\t-> " << (mObjDetVehiclesEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_bag", mObjDetBagsEnable); + NODELET_INFO_STREAM(" * Detect bags\t\t\t-> " << (mObjDetBagsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_animal", mObjDetAnimalsEnable); + NODELET_INFO_STREAM(" * Detect animals\t\t-> " << (mObjDetAnimalsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_electronics", mObjDetElectronicsEnable); + NODELET_INFO_STREAM(" * Detect electronics\t\t-> " << (mObjDetElectronicsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_fruit_vegetable", mObjDetFruitsEnable); + NODELET_INFO_STREAM(" * Detect fruit and vegetables\t-> " << (mObjDetFruitsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_sport", mObjDetSportsEnable); + NODELET_INFO_STREAM(" * Detect sport-related objects\t-> " << (mObjDetSportsEnable ? "ENABLED" : "DISABLED")); + + mObjDetRunning = false; + mObjDetEnabled = true; + res.done = true; + + return res.done; } /*! \brief Service callback to stop_object_detection service */ bool ZEDWrapperNodelet::on_stop_object_detection(zed_interfaces::stop_object_detection::Request& req, - zed_interfaces::stop_object_detection::Response& res) + zed_interfaces::stop_object_detection::Response& res) { - if (mObjDetEnabled) { - mObjDetMutex.lock(); - stop_obj_detect(); - mObjDetMutex.unlock(); - - res.done = true; - } else { - res.done = false; - } + if (mObjDetEnabled) + { + mObjDetMutex.lock(); + stop_obj_detect(); + mObjDetMutex.unlock(); - return res.done; + res.done = true; + } + else + { + res.done = false; + } + + return res.done; } void ZEDWrapperNodelet::processDetectedObjects(ros::Time t) { - static std::chrono::steady_clock::time_point old_time = std::chrono::steady_clock::now(); + static std::chrono::steady_clock::time_point old_time = std::chrono::steady_clock::now(); - sl::ObjectDetectionRuntimeParameters objectTracker_parameters_rt; - objectTracker_parameters_rt.detection_confidence_threshold = mObjDetConfidence; - objectTracker_parameters_rt.object_class_filter = mObjDetFilter; + sl::ObjectDetectionRuntimeParameters objectTracker_parameters_rt; + objectTracker_parameters_rt.detection_confidence_threshold = mObjDetConfidence; + objectTracker_parameters_rt.object_class_filter = mObjDetFilter; - sl::Objects objects; + sl::Objects objects; - sl::ERROR_CODE objDetRes = mZed.retrieveObjects(objects, objectTracker_parameters_rt); + sl::ERROR_CODE objDetRes = mZed.retrieveObjects(objects, objectTracker_parameters_rt); - if (objDetRes != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM("Object Detection error: " << sl::toString(objDetRes)); - return; - } + if (objDetRes != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Object Detection error: " << sl::toString(objDetRes)); + return; + } - if (!objects.is_new) { - return; - } + if (!objects.is_new) + { + return; + } - // ----> Diagnostic information update - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - double elapsed_msec = std::chrono::duration_cast(now - old_time).count(); - mObjDetPeriodMean_msec->addValue(elapsed_msec); - old_time = now; - // <---- Diagnostic information update + // ----> Diagnostic information update + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + double elapsed_msec = std::chrono::duration_cast(now - old_time).count(); + mObjDetPeriodMean_msec->addValue(elapsed_msec); + old_time = now; + // <---- Diagnostic information update - NODELET_DEBUG_STREAM("Detected " << objects.object_list.size() << " objects"); + NODELET_DEBUG_STREAM("Detected " << objects.object_list.size() << " objects"); - size_t objCount = objects.object_list.size(); + size_t objCount = objects.object_list.size(); - zed_interfaces::ObjectsStampedPtr objMsg = boost::make_shared(); - objMsg->header.stamp = t; - objMsg->header.frame_id = mLeftCamFrameId; + zed_interfaces::ObjectsStampedPtr objMsg = boost::make_shared(); + objMsg->header.stamp = t; + objMsg->header.frame_id = mLeftCamFrameId; - objMsg->objects.resize(objCount); + objMsg->objects.resize(objCount); - size_t idx = 0; - for (auto data: objects.object_list) { - objMsg->objects[idx].label = sl::toString(data.label).c_str(); - objMsg->objects[idx].sublabel = sl::toString(data.sublabel).c_str(); - objMsg->objects[idx].label_id = data.raw_label; - objMsg->objects[idx].instance_id = data.id; - objMsg->objects[idx].confidence = data.confidence; + size_t idx = 0; + for (auto data : objects.object_list) + { + objMsg->objects[idx].label = sl::toString(data.label).c_str(); + objMsg->objects[idx].sublabel = sl::toString(data.sublabel).c_str(); + objMsg->objects[idx].label_id = data.raw_label; + objMsg->objects[idx].instance_id = data.id; + objMsg->objects[idx].confidence = data.confidence; - memcpy(&(objMsg->objects[idx].position[0]), &(data.position[0]), 3 * sizeof(float)); - memcpy(&(objMsg->objects[idx].position_covariance[0]), &(data.position_covariance[0]), 6 * sizeof(float)); - memcpy(&(objMsg->objects[idx].velocity[0]), &(data.velocity[0]), 3 * sizeof(float)); + memcpy(&(objMsg->objects[idx].position[0]), &(data.position[0]), 3 * sizeof(float)); + memcpy(&(objMsg->objects[idx].position_covariance[0]), &(data.position_covariance[0]), 6 * sizeof(float)); + memcpy(&(objMsg->objects[idx].velocity[0]), &(data.velocity[0]), 3 * sizeof(float)); - objMsg->objects[idx].tracking_available = mObjDetTracking; - objMsg->objects[idx].tracking_state = static_cast(data.tracking_state); - // NODELET_INFO_STREAM( "[" << idx << "] Tracking: " << - // sl::toString(static_cast(data.tracking_state))); - objMsg->objects[idx].action_state = static_cast(data.action_state); + objMsg->objects[idx].tracking_available = mObjDetTracking; + objMsg->objects[idx].tracking_state = static_cast(data.tracking_state); + // NODELET_INFO_STREAM( "[" << idx << "] Tracking: " << + // sl::toString(static_cast(data.tracking_state))); + objMsg->objects[idx].action_state = static_cast(data.action_state); - if (data.bounding_box_2d.size() == 4) { - memcpy(&(objMsg->objects[idx].bounding_box_2d.corners[0]), &(data.bounding_box_2d[0]), 8 * sizeof(unsigned int)); - } - if (data.bounding_box.size() == 8) { - memcpy(&(objMsg->objects[idx].bounding_box_3d.corners[0]), &(data.bounding_box[0]), 24 * sizeof(float)); - } + if (data.bounding_box_2d.size() == 4) + { + memcpy(&(objMsg->objects[idx].bounding_box_2d.corners[0]), &(data.bounding_box_2d[0]), 8 * sizeof(unsigned int)); + } + if (data.bounding_box.size() == 8) + { + memcpy(&(objMsg->objects[idx].bounding_box_3d.corners[0]), &(data.bounding_box[0]), 24 * sizeof(float)); + } - memcpy(&(objMsg->objects[idx].dimensions_3d[0]), &(data.dimensions[0]), 3 * sizeof(float)); + memcpy(&(objMsg->objects[idx].dimensions_3d[0]), &(data.dimensions[0]), 3 * sizeof(float)); - // Body Detection is in a separate module in ZED SDK v4 - objMsg->objects[idx].skeleton_available = false; + // Body Detection is in a separate module in ZED SDK v4 + objMsg->objects[idx].skeleton_available = false; - // at the end of the loop - idx++; - } + // at the end of the loop + idx++; + } - mPubObjDet.publish(objMsg); + mPubObjDet.publish(objMsg); } void ZEDWrapperNodelet::clickedPtCallback(geometry_msgs::PointStampedConstPtr msg) { - // ----> Check for result subscribers - uint32_t markerSubNumber = mPubMarker.getNumSubscribers(); - uint32_t planeSubNumber = mPubMarker.getNumSubscribers(); + // ----> Check for result subscribers + uint32_t markerSubNumber = mPubMarker.getNumSubscribers(); + uint32_t planeSubNumber = mPubMarker.getNumSubscribers(); - if ((markerSubNumber + planeSubNumber) == 0) { - return; - } - // <---- Check for result subscribers - - ros::Time ts = ros::Time::now(); + if ((markerSubNumber + planeSubNumber) == 0) + { + return; + } + // <---- Check for result subscribers - float X = msg->point.x; - float Y = msg->point.y; - float Z = msg->point.z; + ros::Time ts = ros::Time::now(); - NODELET_INFO_STREAM("Clicked 3D point [X FW, Y LF, Z UP]: [" << X << "," << Y << "," << Z << "]"); + float X = msg->point.x; + float Y = msg->point.y; + float Z = msg->point.z; - // ----> Transform the point from `map` frame to `left_camera_optical_frame` - double camX, camY, camZ; - try { - // Save the transformation - geometry_msgs::TransformStamped m2o = mTfBuffer->lookupTransform(mLeftCamOptFrameId, msg->header.frame_id, ros::Time(0), ros::Duration(0.1)); + NODELET_INFO_STREAM("Clicked 3D point [X FW, Y LF, Z UP]: [" << X << "," << Y << "," << Z << "]"); - NODELET_INFO("'%s' -> '%s': {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f,%.3f}", msg->header.frame_id.c_str(), mLeftCamOptFrameId.c_str(), - m2o.transform.translation.x, m2o.transform.translation.y, m2o.transform.translation.z, - m2o.transform.rotation.x, m2o.transform.rotation.y, m2o.transform.rotation.z, m2o.transform.rotation.w); + // ----> Transform the point from `map` frame to `left_camera_optical_frame` + double camX, camY, camZ; + try + { + // Save the transformation + geometry_msgs::TransformStamped m2o = + mTfBuffer->lookupTransform(mLeftCamOptFrameId, msg->header.frame_id, ros::Time(0), ros::Duration(0.1)); - // Get the TF2 transformation - geometry_msgs::PointStamped ptCam; + NODELET_INFO("'%s' -> '%s': {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f,%.3f}", msg->header.frame_id.c_str(), + mLeftCamOptFrameId.c_str(), m2o.transform.translation.x, m2o.transform.translation.y, + m2o.transform.translation.z, m2o.transform.rotation.x, m2o.transform.rotation.y, + m2o.transform.rotation.z, m2o.transform.rotation.w); - tf2::doTransform(*msg, ptCam, m2o); + // Get the TF2 transformation + geometry_msgs::PointStamped ptCam; - camX = ptCam.point.x; - camY = ptCam.point.y; - camZ = ptCam.point.z; + tf2::doTransform(*msg, ptCam, m2o); - NODELET_INFO("Point in camera coordinates [Z FW, X RG, Y DW]: {%.3f,%.3f,%.3f}", camX, camY, camZ); - } catch (tf2::TransformException& ex) { - NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); - NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", msg->header.frame_id.c_str(), mLeftCamOptFrameId.c_str()); + camX = ptCam.point.x; + camY = ptCam.point.y; + camZ = ptCam.point.z; - return; - } - // <---- Transform the point from `map` frame to `left_camera_optical_frame` - - // ----> Project the point into 2D image coordinates - sl::CalibrationParameters zedParam; - zedParam = mZed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters; // ok - - float f_x = zedParam.left_cam.fx; - float f_y = zedParam.left_cam.fy; - float c_x = zedParam.left_cam.cx; - float c_y = zedParam.left_cam.cy; - - float out_scale_factor = mMatResol.width / mCamWidth; - - float u = ((camX / camZ) * f_x + c_x) / out_scale_factor; - float v = ((camY / camZ) * f_y + c_y) / out_scale_factor; - NODELET_INFO_STREAM("Clicked point image coordinates: [" << u << "," << v << "]"); - // <---- Project the point into 2D image coordinates - - // ----> Extract plane from clicked point - sl::Plane plane; - sl::ERROR_CODE err = mZed.findPlaneAtHit(sl::uint2(u, v), plane); - if (err != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN("Error extracting plane at point [%.3f,%.3f,%.3f]: %s", X, Y, Z, sl::toString(err).c_str()); - return; - } + NODELET_INFO("Point in camera coordinates [Z FW, X RG, Y DW]: {%.3f,%.3f,%.3f}", camX, camY, camZ); + } + catch (tf2::TransformException& ex) + { + NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); + NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", msg->header.frame_id.c_str(), + mLeftCamOptFrameId.c_str()); - sl::float3 center = plane.getCenter(); - sl::float2 dims = plane.getExtents(); + return; + } + // <---- Transform the point from `map` frame to `left_camera_optical_frame` + + // ----> Project the point into 2D image coordinates + sl::CalibrationParameters zedParam; + zedParam = mZed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters; // ok + + float f_x = zedParam.left_cam.fx; + float f_y = zedParam.left_cam.fy; + float c_x = zedParam.left_cam.cx; + float c_y = zedParam.left_cam.cy; + + float out_scale_factor = mMatResol.width / mCamWidth; + + float u = ((camX / camZ) * f_x + c_x) / out_scale_factor; + float v = ((camY / camZ) * f_y + c_y) / out_scale_factor; + NODELET_INFO_STREAM("Clicked point image coordinates: [" << u << "," << v << "]"); + // <---- Project the point into 2D image coordinates + + // ----> Extract plane from clicked point + sl::Plane plane; + sl::ERROR_CODE err = mZed.findPlaneAtHit(sl::uint2(u, v), plane); + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN("Error extracting plane at point [%.3f,%.3f,%.3f]: %s", X, Y, Z, sl::toString(err).c_str()); + return; + } - if (dims[0] == 0 || dims[1] == 0) { - NODELET_INFO("Plane not found at point [%.3f,%.3f,%.3f]", X, Y, Z); - return; - } + sl::float3 center = plane.getCenter(); + sl::float2 dims = plane.getExtents(); - NODELET_INFO("Found plane at point [%.3f,%.3f,%.3f] -> Center: [%.3f,%.3f,%.3f], Dims: %.3fx%.3f", X, Y, Z, center.x, center.y, center.z, dims[0], dims[1]); - // <---- Extract plane from clicked point - - if (markerSubNumber > 0) { - // ----> Publish a blue sphere in the clicked point - visualization_msgs::MarkerPtr pt_marker = boost::make_shared(); - // Set the frame ID and timestamp. See the TF tutorials for information on these. - static int hit_pt_id = 0; - pt_marker->header.stamp = ts; - // Set the marker action. Options are ADD and DELETE - pt_marker->action = visualization_msgs::Marker::ADD; - pt_marker->lifetime = ros::Duration(); - - // Set the namespace and id for this marker. This serves to create a unique ID - // Any marker sent with the same namespace and id will overwrite the old one - pt_marker->ns = "plane_hit_points"; - pt_marker->id = hit_pt_id++; - pt_marker->header.frame_id = mMapFrameId; - - // Set the marker type. - pt_marker->type = visualization_msgs::Marker::SPHERE; - - // Set the pose of the marker. This is a full 6DOF pose relative to the frame/time specified in the header - pt_marker->pose.position.x = X; - pt_marker->pose.position.y = Y; - pt_marker->pose.position.z = Z; - pt_marker->pose.orientation.x = 0.0; - pt_marker->pose.orientation.y = 0.0; - pt_marker->pose.orientation.z = 0.0; - pt_marker->pose.orientation.w = 1.0; - - // Set the scale of the marker -- 1x1x1 here means 1m on a side - pt_marker->scale.x = 0.025; - pt_marker->scale.y = 0.025; - pt_marker->scale.z = 0.025; + if (dims[0] == 0 || dims[1] == 0) + { + NODELET_INFO("Plane not found at point [%.3f,%.3f,%.3f]", X, Y, Z); + return; + } - // Set the color -- be sure to set alpha to something non-zero! - pt_marker->color.r = 0.2f; - pt_marker->color.g = 0.1f; - pt_marker->color.b = 0.75f; - pt_marker->color.a = 0.8; - - // Publish the marker - mPubMarker.publish(pt_marker); - // ----> Publish a blue sphere in the clicked point - - // ----> Publish the plane as green mesh - visualization_msgs::MarkerPtr plane_marker = boost::make_shared(); - // Set the frame ID and timestamp. See the TF tutorials for information on these. - static int plane_mesh_id = 0; - plane_marker->header.stamp = ts; - // Set the marker action. Options are ADD and DELETE - plane_marker->action = visualization_msgs::Marker::ADD; - plane_marker->lifetime = ros::Duration(); - - // Set the namespace and id for this marker. This serves to create a unique ID - // Any marker sent with the same namespace and id will overwrite the old one - plane_marker->ns = "plane_meshes"; - plane_marker->id = plane_mesh_id++; - plane_marker->header.frame_id = mLeftCamFrameId; - - // Set the marker type. - plane_marker->type = visualization_msgs::Marker::TRIANGLE_LIST; - - // Set the pose of the marker. This is a full 6DOF pose relative to the frame/time specified in the header - plane_marker->pose.position.x = 0; - plane_marker->pose.position.y = 0; - plane_marker->pose.position.z = 0; - plane_marker->pose.orientation.x = 0.0; - plane_marker->pose.orientation.y = 0.0; - plane_marker->pose.orientation.z = 0.0; - plane_marker->pose.orientation.w = 1.0; + NODELET_INFO("Found plane at point [%.3f,%.3f,%.3f] -> Center: [%.3f,%.3f,%.3f], Dims: %.3fx%.3f", X, Y, Z, center.x, + center.y, center.z, dims[0], dims[1]); + // <---- Extract plane from clicked point + + if (markerSubNumber > 0) + { + // ----> Publish a blue sphere in the clicked point + visualization_msgs::MarkerPtr pt_marker = boost::make_shared(); + // Set the frame ID and timestamp. See the TF tutorials for information on these. + static int hit_pt_id = 0; + pt_marker->header.stamp = ts; + // Set the marker action. Options are ADD and DELETE + pt_marker->action = visualization_msgs::Marker::ADD; + pt_marker->lifetime = ros::Duration(); + + // Set the namespace and id for this marker. This serves to create a unique ID + // Any marker sent with the same namespace and id will overwrite the old one + pt_marker->ns = "plane_hit_points"; + pt_marker->id = hit_pt_id++; + pt_marker->header.frame_id = mMapFrameId; + + // Set the marker type. + pt_marker->type = visualization_msgs::Marker::SPHERE; + + // Set the pose of the marker. This is a full 6DOF pose relative to the frame/time specified in the header + pt_marker->pose.position.x = X; + pt_marker->pose.position.y = Y; + pt_marker->pose.position.z = Z; + pt_marker->pose.orientation.x = 0.0; + pt_marker->pose.orientation.y = 0.0; + pt_marker->pose.orientation.z = 0.0; + pt_marker->pose.orientation.w = 1.0; + + // Set the scale of the marker -- 1x1x1 here means 1m on a side + pt_marker->scale.x = 0.025; + pt_marker->scale.y = 0.025; + pt_marker->scale.z = 0.025; + + // Set the color -- be sure to set alpha to something non-zero! + pt_marker->color.r = 0.2f; + pt_marker->color.g = 0.1f; + pt_marker->color.b = 0.75f; + pt_marker->color.a = 0.8; + + // Publish the marker + mPubMarker.publish(pt_marker); + // ----> Publish a blue sphere in the clicked point + + // ----> Publish the plane as green mesh + visualization_msgs::MarkerPtr plane_marker = boost::make_shared(); + // Set the frame ID and timestamp. See the TF tutorials for information on these. + static int plane_mesh_id = 0; + plane_marker->header.stamp = ts; + // Set the marker action. Options are ADD and DELETE + plane_marker->action = visualization_msgs::Marker::ADD; + plane_marker->lifetime = ros::Duration(); + + // Set the namespace and id for this marker. This serves to create a unique ID + // Any marker sent with the same namespace and id will overwrite the old one + plane_marker->ns = "plane_meshes"; + plane_marker->id = plane_mesh_id++; + plane_marker->header.frame_id = mLeftCamFrameId; + + // Set the marker type. + plane_marker->type = visualization_msgs::Marker::TRIANGLE_LIST; + + // Set the pose of the marker. This is a full 6DOF pose relative to the frame/time specified in the header + plane_marker->pose.position.x = 0; + plane_marker->pose.position.y = 0; + plane_marker->pose.position.z = 0; + plane_marker->pose.orientation.x = 0.0; + plane_marker->pose.orientation.y = 0.0; + plane_marker->pose.orientation.z = 0.0; + plane_marker->pose.orientation.w = 1.0; + + // Set the color -- be sure to set alpha to something non-zero! + plane_marker->color.r = 0.10f; + plane_marker->color.g = 0.75f; + plane_marker->color.b = 0.20f; + plane_marker->color.a = 0.75; + + // Set the scale of the marker -- 1x1x1 here means 1m on a side + plane_marker->scale.x = 1.0; + plane_marker->scale.y = 1.0; + plane_marker->scale.z = 1.0; + + sl::Mesh mesh = plane.extractMesh(); + size_t triangCount = mesh.getNumberOfTriangles(); + size_t ptCount = triangCount * 3; + plane_marker->points.resize(ptCount); + plane_marker->colors.resize(ptCount); + + size_t ptIdx = 0; + for (size_t t = 0; t < triangCount; t++) + { + for (int p = 0; p < 3; p++) + { + uint vIdx = mesh.triangles[t][p]; + plane_marker->points[ptIdx].x = mesh.vertices[vIdx][0]; + plane_marker->points[ptIdx].y = mesh.vertices[vIdx][1]; + plane_marker->points[ptIdx].z = mesh.vertices[vIdx][2]; // Set the color -- be sure to set alpha to something non-zero! - plane_marker->color.r = 0.10f; - plane_marker->color.g = 0.75f; - plane_marker->color.b = 0.20f; - plane_marker->color.a = 0.75; - - // Set the scale of the marker -- 1x1x1 here means 1m on a side - plane_marker->scale.x = 1.0; - plane_marker->scale.y = 1.0; - plane_marker->scale.z = 1.0; - - sl::Mesh mesh = plane.extractMesh(); - size_t triangCount = mesh.getNumberOfTriangles(); - size_t ptCount = triangCount * 3; - plane_marker->points.resize(ptCount); - plane_marker->colors.resize(ptCount); - - size_t ptIdx = 0; - for (size_t t = 0; t < triangCount; t++) { - for (int p = 0; p < 3; p++) { - uint vIdx = mesh.triangles[t][p]; - plane_marker->points[ptIdx].x = mesh.vertices[vIdx][0]; - plane_marker->points[ptIdx].y = mesh.vertices[vIdx][1]; - plane_marker->points[ptIdx].z = mesh.vertices[vIdx][2]; - - // Set the color -- be sure to set alpha to something non-zero! - plane_marker->colors[ptIdx].r = 0.10f; - plane_marker->colors[ptIdx].g = 0.75f; - plane_marker->colors[ptIdx].b = 0.20f; - plane_marker->colors[ptIdx].a = 0.75; - - ptIdx++; - } - } + plane_marker->colors[ptIdx].r = 0.10f; + plane_marker->colors[ptIdx].g = 0.75f; + plane_marker->colors[ptIdx].b = 0.20f; + plane_marker->colors[ptIdx].a = 0.75; - // Publish the marker - mPubMarker.publish(plane_marker); - // <---- Publish the plane as green mesh + ptIdx++; + } } - if (planeSubNumber > 0) { - // ----> Publish the plane as custom message - - zed_interfaces::PlaneStampedPtr planeMsg = boost::make_shared(); - planeMsg->header.stamp = ts; - planeMsg->header.frame_id = mLeftCamFrameId; - - // Plane equation - sl::float4 sl_coeff = plane.getPlaneEquation(); - planeMsg->coefficients.coef[0] = static_cast(sl_coeff[0]); - planeMsg->coefficients.coef[1] = static_cast(sl_coeff[1]); - planeMsg->coefficients.coef[2] = static_cast(sl_coeff[2]); - planeMsg->coefficients.coef[3] = static_cast(sl_coeff[3]); - - // Plane Normal - sl::float3 sl_normal = plane.getNormal(); - planeMsg->normal.x = sl_normal[0]; - planeMsg->normal.y = sl_normal[1]; - planeMsg->normal.z = sl_normal[2]; - - // Plane Center - sl::float3 sl_center = plane.getCenter(); - planeMsg->center.x = sl_center[0]; - planeMsg->center.y = sl_center[1]; - planeMsg->center.z = sl_center[2]; - - // Plane extents - sl::float3 sl_extents = plane.getExtents(); - planeMsg->extents[0] = sl_extents[0]; - planeMsg->extents[1] = sl_extents[1]; - - // Plane pose - sl::Pose sl_pose = plane.getPose(); - sl::Orientation sl_rot = sl_pose.getOrientation(); - sl::Translation sl_tr = sl_pose.getTranslation(); - - planeMsg->pose.rotation.x = sl_rot.ox; - planeMsg->pose.rotation.y = sl_rot.oy; - planeMsg->pose.rotation.z = sl_rot.oz; - planeMsg->pose.rotation.w = sl_rot.ow; - - planeMsg->pose.translation.x = sl_tr.x; - planeMsg->pose.translation.y = sl_tr.y; - planeMsg->pose.translation.z = sl_tr.z; - - // Plane Bounds - std::vector sl_bounds = plane.getBounds(); - planeMsg->bounds.points.resize(sl_bounds.size()); - memcpy(planeMsg->bounds.points.data(), sl_bounds.data(), 3 * sl_bounds.size() * sizeof(float)); - - // Plane mesh - sl::Mesh sl_mesh = plane.extractMesh(); - size_t triangCount = sl_mesh.triangles.size(); - size_t ptsCount = sl_mesh.vertices.size(); - planeMsg->mesh.triangles.resize(triangCount); - planeMsg->mesh.vertices.resize(ptsCount); - - // memcpy not allowed because data types are different - for (size_t i = 0; i < triangCount; i++) { - planeMsg->mesh.triangles[i].vertex_indices[0] = sl_mesh.triangles[i][0]; - planeMsg->mesh.triangles[i].vertex_indices[1] = sl_mesh.triangles[i][1]; - planeMsg->mesh.triangles[i].vertex_indices[2] = sl_mesh.triangles[i][2]; - } + // Publish the marker + mPubMarker.publish(plane_marker); + // <---- Publish the plane as green mesh + } - // memcpy not allowed because data types are different - for (size_t i = 0; i < ptsCount; i++) { - planeMsg->mesh.vertices[i].x = sl_mesh.vertices[i][0]; - planeMsg->mesh.vertices[i].y = sl_mesh.vertices[i][1]; - planeMsg->mesh.vertices[i].z = sl_mesh.vertices[i][2]; - } + if (planeSubNumber > 0) + { + // ----> Publish the plane as custom message + + zed_interfaces::PlaneStampedPtr planeMsg = boost::make_shared(); + planeMsg->header.stamp = ts; + planeMsg->header.frame_id = mLeftCamFrameId; + + // Plane equation + sl::float4 sl_coeff = plane.getPlaneEquation(); + planeMsg->coefficients.coef[0] = static_cast(sl_coeff[0]); + planeMsg->coefficients.coef[1] = static_cast(sl_coeff[1]); + planeMsg->coefficients.coef[2] = static_cast(sl_coeff[2]); + planeMsg->coefficients.coef[3] = static_cast(sl_coeff[3]); + + // Plane Normal + sl::float3 sl_normal = plane.getNormal(); + planeMsg->normal.x = sl_normal[0]; + planeMsg->normal.y = sl_normal[1]; + planeMsg->normal.z = sl_normal[2]; + + // Plane Center + sl::float3 sl_center = plane.getCenter(); + planeMsg->center.x = sl_center[0]; + planeMsg->center.y = sl_center[1]; + planeMsg->center.z = sl_center[2]; + + // Plane extents + sl::float3 sl_extents = plane.getExtents(); + planeMsg->extents[0] = sl_extents[0]; + planeMsg->extents[1] = sl_extents[1]; + + // Plane pose + sl::Pose sl_pose = plane.getPose(); + sl::Orientation sl_rot = sl_pose.getOrientation(); + sl::Translation sl_tr = sl_pose.getTranslation(); + + planeMsg->pose.rotation.x = sl_rot.ox; + planeMsg->pose.rotation.y = sl_rot.oy; + planeMsg->pose.rotation.z = sl_rot.oz; + planeMsg->pose.rotation.w = sl_rot.ow; + + planeMsg->pose.translation.x = sl_tr.x; + planeMsg->pose.translation.y = sl_tr.y; + planeMsg->pose.translation.z = sl_tr.z; + + // Plane Bounds + std::vector sl_bounds = plane.getBounds(); + planeMsg->bounds.points.resize(sl_bounds.size()); + memcpy(planeMsg->bounds.points.data(), sl_bounds.data(), 3 * sl_bounds.size() * sizeof(float)); + + // Plane mesh + sl::Mesh sl_mesh = plane.extractMesh(); + size_t triangCount = sl_mesh.triangles.size(); + size_t ptsCount = sl_mesh.vertices.size(); + planeMsg->mesh.triangles.resize(triangCount); + planeMsg->mesh.vertices.resize(ptsCount); + + // memcpy not allowed because data types are different + for (size_t i = 0; i < triangCount; i++) + { + planeMsg->mesh.triangles[i].vertex_indices[0] = sl_mesh.triangles[i][0]; + planeMsg->mesh.triangles[i].vertex_indices[1] = sl_mesh.triangles[i][1]; + planeMsg->mesh.triangles[i].vertex_indices[2] = sl_mesh.triangles[i][2]; + } - mPubPlane.publish(planeMsg); - // <---- Publish the plane as custom message + // memcpy not allowed because data types are different + for (size_t i = 0; i < ptsCount; i++) + { + planeMsg->mesh.vertices[i].x = sl_mesh.vertices[i][0]; + planeMsg->mesh.vertices[i].y = sl_mesh.vertices[i][1]; + planeMsg->mesh.vertices[i].z = sl_mesh.vertices[i][2]; } + + mPubPlane.publish(planeMsg); + // <---- Publish the plane as custom message + } } -} // namespace zed_nodelets +} // namespace zed_nodelets From b5da35c2eadbf51033b1eaa50866b661de9fbe3a Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Wed, 13 Sep 2023 11:58:16 +0200 Subject: [PATCH 12/21] Add `set_roi` and `reset_roi` services --- CHANGELOG.rst | 6 + zed-ros-interfaces | 2 +- .../include/zed_wrapper_nodelet.hpp | 16 ++ .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 143 ++++++++++++++++-- 4 files changed, 150 insertions(+), 17 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 6bd26187..1a1efe29 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,12 @@ CHANGELOG ========= +2023-09-13 +---------- +- Add ROS '.clang-format' +- Code refactoring +- Add `set_roi` and `reset_roi` services + 2023-09-12 ---------- - Change the parameter 'general/resolution' to 'general/grab_resolution' and replace numeric values with strings diff --git a/zed-ros-interfaces b/zed-ros-interfaces index 73bfaa13..206b7e2d 160000 --- a/zed-ros-interfaces +++ b/zed-ros-interfaces @@ -1 +1 @@ -Subproject commit 73bfaa136c408afd3869d7877e4a95bbf412cc09 +Subproject commit 206b7e2d30505176f079168fbed45ed2dc916000 diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 73354370..925b5bde 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -58,6 +58,8 @@ #include #include #include +#include +#include // Topics #include @@ -121,6 +123,10 @@ class ZEDWrapperNodelet : public nodelet::Nodelet */ virtual void onInit(); + /*! \brief Initialize services + */ + void initServices(); + /*! \brief Reads parameters from the param server */ void readParameters(); @@ -344,6 +350,14 @@ class ZEDWrapperNodelet : public nodelet::Nodelet bool on_stop_remote_stream(zed_interfaces::stop_remote_stream::Request& req, zed_interfaces::stop_remote_stream::Response& res); + /*! \brief Service callback to set_roi service + */ + bool on_set_roi(zed_interfaces::set_roi::Request& req, zed_interfaces::set_roi::Response& res); + + /*! \brief Service callback to reset_roi service + */ + bool on_reset_roi(zed_interfaces::reset_roi::Request& req, zed_interfaces::reset_roi::Response& res); + /*! \brief Service callback to set_led_status service */ bool on_set_led_status(zed_interfaces::set_led_status::Request& req, zed_interfaces::set_led_status::Response& res); @@ -525,6 +539,8 @@ class ZEDWrapperNodelet : public nodelet::Nodelet ros::ServiceServer mSrvStartObjDet; ros::ServiceServer mSrvStopObjDet; ros::ServiceServer mSrvSaveAreaMemory; + ros::ServiceServer mSrvSetRoi; + ros::ServiceServer mSrvResetRoi; // ----> Topics (ONLY THOSE NOT CHANGING WHILE NODE RUNS) // Camera info diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index 717357e8..913ec771 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -614,6 +614,28 @@ void ZEDWrapperNodelet::onInit() // <---- Subscribers // ----> Services + initServices(); + // <---- Services + + // ----> Threads + if (!mDepthDisabled) + { + // Start Pointcloud thread + mPcThread = std::thread(&ZEDWrapperNodelet::pointcloud_thread_func, this); + } + + // Start pool thread + mDevicePollThread = std::thread(&ZEDWrapperNodelet::device_poll_thread_func, this); + + // Start Sensors thread + mSensThread = std::thread(&ZEDWrapperNodelet::sensors_thread_func, this); + // <---- Threads + + NODELET_INFO("+++ ZED Node started +++"); +} + +void ZEDWrapperNodelet::initServices() +{ NODELET_INFO("*** SERVICES ***"); if (!mDepthDisabled) { @@ -657,23 +679,11 @@ void ZEDWrapperNodelet::onInit() NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStartStream.getService().c_str()); mSrvSvoStopStream = mNhNs.advertiseService("stop_remote_stream", &ZEDWrapperNodelet::on_stop_remote_stream, this); NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStopStream.getService().c_str()); - // <---- Services - - // ----> Threads - if (!mDepthDisabled) - { - // Start Pointcloud thread - mPcThread = std::thread(&ZEDWrapperNodelet::pointcloud_thread_func, this); - } - - // Start pool thread - mDevicePollThread = std::thread(&ZEDWrapperNodelet::device_poll_thread_func, this); - - // Start Sensors thread - mSensThread = std::thread(&ZEDWrapperNodelet::sensors_thread_func, this); - // <---- Threads - NODELET_INFO("+++ ZED Node started +++"); + mSrvSetRoi = mNhNs.advertiseService("set_roi", &ZEDWrapperNodelet::on_set_roi, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSetRoi.getService().c_str()); + mSrvResetRoi = mNhNs.advertiseService("reset_roi", &ZEDWrapperNodelet::on_reset_roi, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSetRoi.getService().c_str()); } void ZEDWrapperNodelet::readGeneralParams() @@ -5148,6 +5158,107 @@ bool ZEDWrapperNodelet::on_toggle_led(zed_interfaces::toggle_led::Request& req, return true; } +bool ZEDWrapperNodelet::on_set_roi(zed_interfaces::set_roi::Request& req, zed_interfaces::set_roi::Response& res) +{ + NODELET_INFO("** Set ROI service called **"); + NODELET_INFO_STREAM(" * ROI string: " << req.roi.c_str()); + + if (req.roi == "") + { + std::string err_msg = + "Error while setting ZED SDK region of interest: a vector of normalized points describing a " + "polygon is required. e.g. '[[0.5,0.25],[0.75,0.5],[0.5,0.75],[0.25,0.5]]'"; + + NODELET_WARN_STREAM(" * " << err_msg); + + res.message = err_msg; + res.success = false; + return false; + } + + std::string error; + std::vector> parsed_poly = sl_tools::parseStringVector(req.roi, error); + + if (error != "") + { + std::string err_msg = "Error while setting ZED SDK region of interest: "; + err_msg += error; + + NODELET_WARN_STREAM(" * " << err_msg); + + res.message = err_msg; + res.success = false; + return false; + } + + // ----> Set Region of Interest + // Create mask + NODELET_INFO(" * Setting ROI"); + std::vector sl_poly; + std::string log_msg = parseRoiPoly(parsed_poly, sl_poly); + // NODELET_INFO_STREAM(" * Parsed ROI: " << log_msg.c_str()); + sl::Resolution resol(mCamWidth, mCamHeight); + sl::Mat roi_mask(resol, sl::MAT_TYPE::U8_C1, sl::MEM::CPU); + if (!sl_tools::generateROI(sl_poly, roi_mask)) + { + std::string err_msg = "Error generating the region of interest image mask. "; + err_msg += error; + + NODELET_WARN_STREAM(" * " << err_msg); + + res.message = err_msg; + res.success = false; + return false; + } + else + { + sl::ERROR_CODE err = mZed.setRegionOfInterest(roi_mask); + if (err != sl::ERROR_CODE::SUCCESS) + { + std::string err_msg = "Error while setting ZED SDK region of interest: "; + err_msg += sl::toString(err).c_str(); + + NODELET_WARN_STREAM(" * " << err_msg); + + res.message = err_msg; + res.success = false; + return false; + } + else + { + NODELET_INFO(" * Region of Interest correctly set."); + + res.message = "Region of Interest correctly set."; + res.success = true; + return true; + } + } + // <---- Set Region of Interest +} + +bool ZEDWrapperNodelet::on_reset_roi(zed_interfaces::reset_roi::Request& req, zed_interfaces::reset_roi::Response& res) +{ + NODELET_INFO("** Reset ROI service called **"); + + sl::Mat empty_roi; + sl::ERROR_CODE err = mZed.setRegionOfInterest(empty_roi); + + if (err != sl::ERROR_CODE::SUCCESS) + { + std::string err_msg = " * Error while resetting ZED SDK region of interest: "; + err_msg += sl::toString(err); + NODELET_WARN_STREAM(" * Error while resetting ZED SDK region of interest: " << err_msg); + res.reset_done = false; + return false; + } + else + { + NODELET_INFO(" * Region of Interest correctly reset."); + res.reset_done = true; + return true; + } +} + bool ZEDWrapperNodelet::on_start_3d_mapping(zed_interfaces::start_3d_mapping::Request& req, zed_interfaces::start_3d_mapping::Response& res) { From b6ea6332fed7415ee5201765ac45c9c180e036ac Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Wed, 13 Sep 2023 18:33:06 +0200 Subject: [PATCH 13/21] Add parameter 'pos_tracking/depth_min_range' Add parameter 'pos_tracking/set_as_static' --- CHANGELOG.rst | 148 +++++++++--------- .../include/zed_wrapper_nodelet.hpp | 21 +-- .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 53 ++++--- zed_wrapper/params/common.yaml | 2 +- 4 files changed, 118 insertions(+), 106 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 1a1efe29..00ccb922 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -5,7 +5,9 @@ CHANGELOG ---------- - Add ROS '.clang-format' - Code refactoring -- Add `set_roi` and `reset_roi` services +- Add 'set_roi' and 'reset_roi' services +- Add parameter 'pos_tracking/depth_min_range' +- Add parameter 'pos_tracking/set_as_static' 2023-09-12 ---------- @@ -23,39 +25,39 @@ CHANGELOG 2023-09-08 ---------- -- Add `pos_tracking/set_gravity_as_origin` parameter. If 'true' align the positional tracking world to imu gravity measurement. Keep the yaw from the user initial pose. -- Add `pos_tracking/pos_tracking_mode` parameter. Matches the ZED SDK setting: 'QUALITY', 'STANDARD' -- Fix the warning `Elaboration takes longer [...]` +- Add 'pos_tracking/set_gravity_as_origin' parameter. If 'true' align the positional tracking world to imu gravity measurement. Keep the yaw from the user initial pose. +- Add 'pos_tracking/pos_tracking_mode' parameter. Matches the ZED SDK setting: 'QUALITY', 'STANDARD' +- Fix the warning 'Elaboration takes longer [...]' 2023-09-07 ---------- -- `pub_frame_rate` now controls the `InitParameters::grab_compute_capping_fps` parameter of the ZED SDK instead of controlling the frequency of a parallel thread. It's not a Dynamic parameter anymore. -- Change `general/camera_flip` parameter to string: 'AUTO', 'ON', 'OFF' -- Change `general/verbose` from bool to integer +- 'pub_frame_rate' now controls the 'InitParameters::grab_compute_capping_fps' parameter of the ZED SDK instead of controlling the frequency of a parallel thread. It's not a Dynamic parameter anymore. +- Change 'general/camera_flip' parameter to string: 'AUTO', 'ON', 'OFF' +- Change 'general/verbose' from bool to integer v4.0.5 ------ - Support for ZED SDK v4.0 -- Remove parameter `object_detection.body_fitting` -- Remove parameter `depth.sensing_mode` -- Remove parameter `video.extrinsic_in_camera_frame` +- Remove parameter 'object_detection.body_fitting' +- Remove parameter 'depth.sensing_mode' +- Remove parameter 'video.extrinsic_in_camera_frame' - Skeleton Tracking is no more available. Migrate to ROS 2 Wrapper if you need it -- `sensors` and `object_detection` parameters are now in `common.yaml` -- Move parameters `general.resolution` and `general.grab_frame_rate` to cameras yaml files to support the different configurations on ZED X and ZED X Mini. +- 'sensors' and 'object_detection' parameters are now in 'common.yaml' +- Move parameters 'general.resolution' and 'general.grab_frame_rate' to cameras yaml files to support the different configurations on ZED X and ZED X Mini. - Remove support for ROS Melodic that reached EOL - Add 1080p for ZED X and ZED X Mini v3.8.x ------ - Fix the frame links of barometer, magnetometer, and temperature sensors for ZED2i -- Add parameter `sensors/max_pub_rate` to set the maximum publishing frequency of sensors data +- Add parameter 'sensors/max_pub_rate' to set the maximum publishing frequency of sensors data - Improve Sensors thread -- Fix wrong TF broadcasting when calling the `set_pose`, `reset_tracking`, and `reset_odometry` services. Now the initial odometry is coherent with the new starting point. +- Fix wrong TF broadcasting when calling the 'set_pose', 'reset_tracking', and 'reset_odometry' services. Now the initial odometry is coherent with the new starting point. - Add Plane Detection. See [ZED Documentation](https://www.stereolabs.com/docs/ros/plane_detection/) - Fix TF timestamp issue in SVO mode -- Fix units for Atmospheric pressure data. Now the value is correctly published in `Pascal` according to the [topic specification](http://docs.ros.org/en/melodic/api/sensor_msgs/html/msg/FluidPressure.html). +- Fix units for Atmospheric pressure data. Now the value is correctly published in 'Pascal' according to the [topic specification](http://docs.ros.org/en/melodic/api/sensor_msgs/html/msg/FluidPressure.html). v3.7.x --------- @@ -64,23 +66,23 @@ v3.7.x - Add support for sport-related object class - Add support for X_MEDIUM neural network models - Enable AI for ZED Mini -- Add new `_base_link` frame on the base of the camera to easily handle camera positioning on robots. Thx @civerachb-cpr +- Add new '_base_link' frame on the base of the camera to easily handle camera positioning on robots. Thx @civerachb-cpr - Improve URDF by adding 3° slope for ZED and ZED2, X-offset for optical frames to correctly match the CMOS sensors position on the PCB, X-offset for mounting screw on ZED2i -- Add `zed_macro.urdf.xacro` to be included by other `xacro` file to easily integrate ZED cameras in the robot descriptions. See [PR #771](https://github.com/stereolabs/zed-ros-wrapper/pull/771) for details. Thx @civerachb-cpr -- New parameter `save_area_memory_db_on_exit` to force Area Memory saving when the node is closed and Area Memory is enabled and valid. -- Add service `save_Area_map` to trigger an Area Memory saving. +- Add 'zed_macro.urdf.xacro' to be included by other 'xacro' file to easily integrate ZED cameras in the robot descriptions. See [PR #771](https://github.com/stereolabs/zed-ros-wrapper/pull/771) for details. Thx @civerachb-cpr +- New parameter 'save_area_memory_db_on_exit' to force Area Memory saving when the node is closed and Area Memory is enabled and valid. +- Add service 'save_Area_map' to trigger an Area Memory saving. - New tool function to transform a relative path to absolute. -- Enabled static IMU TF broadcasting even it `publish_tf` is set to false, making the two options independent. Thx to @bjsowa -- Moved the `zed_interfaces` folder in the new [`zed-ros-interfaces`](https://github.com/stereolabs/zed-ros-interfaces) repository. The new repository is useful to receive the topics from a ZED node on system where the `zed-ros-wrapper` repository cannot be fully installed, i.e. systems without CUDA support. For this repository nothing changes because the `zed_interfaces` folder is replaced by the `zed-ros-interfaces` git submodule to automatically satisfy all the dependencies. +- Enabled static IMU TF broadcasting even it 'publish_tf' is set to false, making the two options independent. Thx to @bjsowa +- Moved the 'zed_interfaces' folder in the new ['zed-ros-interfaces'](https://github.com/stereolabs/zed-ros-interfaces) repository. The new repository is useful to receive the topics from a ZED node on system where the 'zed-ros-wrapper' repository cannot be fully installed, i.e. systems without CUDA support. For this repository nothing changes because the 'zed_interfaces' folder is replaced by the 'zed-ros-interfaces' git submodule to automatically satisfy all the dependencies. - Fix sensors topics pubblication for ZED2i. The support for the new camera was not complete -- Fix sensor_msgs type for depth image in OpenNI mode, from `sensor_msgs::image_encodings::mono16` to `sensor_msgs::image_encodings::TYPE_16UC1`. Depth image in OpenNI mode is now compatible with the nodelet `depthimage_to_laserscan` +- Fix sensor_msgs type for depth image in OpenNI mode, from 'sensor_msgs::image_encodings::mono16' to 'sensor_msgs::image_encodings::TYPE_16UC1'. Depth image in OpenNI mode is now compatible with the nodelet 'depthimage_to_laserscan' v3.5.x --------- - Add support for ROS Noetic - Add support for SDK v3.5 - Add support for the new ZED 2i -- Add new parameter `pos_tracking/pos_tracking_enabled` to enable positional tracking from start even if not required by any subscribed topic. This is useful, for example, to keep the TF always updated. +- Add new parameter 'pos_tracking/pos_tracking_enabled' to enable positional tracking from start even if not required by any subscribed topic. This is useful, for example, to keep the TF always updated. - Add new example to start multiple ZED Nodelets inside the same nodelet manager - Fixed issue #690 @@ -90,17 +92,17 @@ v3.4.x - Fix issue #660: detected objects topic not published if depth computation not active - Improved support for ZED Object Detection - Add Skeleton Tracking support -- New Rviz plugin for Object Detection in `zed-ros-examples` -- New parameters and name changing to fit the new OD features, also the `start_object_detection` service has been modified to match the new features: - - new `model` parameter to choose the AI model - - new `max_range` parameter to limit the detection range - - new `sk_body_fitting` parameter to enable Skeleton fitting for skeleton AI models - - `people` -> `mc_people` to indicate that it is related to multiclass AI models - - `vehicles`-> `mc_vehicles` to indicate that it is related to multiclass AI models - - new `mc_bag` parameter to enable bags detection with multiclass AI models - - new `mc_animal` parameter to enable animals detection with multiclass AI models - - new `mc_electronics` parameter to enable electronic devices detection with multiclass AI models - - new `mc_fruit_vegetable` parameter to enable fruits and vegetables detection with multiclass AI models +- New Rviz plugin for Object Detection in 'zed-ros-examples' +- New parameters and name changing to fit the new OD features, also the 'start_object_detection' service has been modified to match the new features: + - new 'model' parameter to choose the AI model + - new 'max_range' parameter to limit the detection range + - new 'sk_body_fitting' parameter to enable Skeleton fitting for skeleton AI models + - 'people' -> 'mc_people' to indicate that it is related to multiclass AI models + - 'vehicles'-> 'mc_vehicles' to indicate that it is related to multiclass AI models + - new 'mc_bag' parameter to enable bags detection with multiclass AI models + - new 'mc_animal' parameter to enable animals detection with multiclass AI models + - new 'mc_electronics' parameter to enable electronic devices detection with multiclass AI models + - new 'mc_fruit_vegetable' parameter to enable fruits and vegetables detection with multiclass AI models RGB/Depth sync fix #629 (2020-11-02) ------------------------------- @@ -110,13 +112,13 @@ RGB/Depth sync fix #629 (2020-11-02) ASYNC Object Detection (2020-09-18) ----------------------------------- - Object Detection now runs asynchronously respect to data grabbing and Object Detected data are published only when available not affecting the frequency of the publishing of the other data types -- Depth OpenNI topic name changed from `depth/depth_raw_registered` to `depth/depth_registered` +- Depth OpenNI topic name changed from 'depth/depth_raw_registered' to 'depth/depth_registered' IMU timestamp fix (2020-08-25) ------------------------------ -- Added new parameter `sensors/publish_imu_tf` to enable/disable IMU TF broadcasting +- Added new parameter 'sensors/publish_imu_tf' to enable/disable IMU TF broadcasting - Fixed duplicated IMU timestamp issue (see ticket #577) -- Fixed problem with IMU TF in Rviz: `odom` and `zed_camera_center` TFs are now published at the same frequency of the IMU TF, if available) +- Fixed problem with IMU TF in Rviz: 'odom' and 'zed_camera_center' TFs are now published at the same frequency of the IMU TF, if available) - IMU TF is now published once as static TF even if the IMU topic is not subscribed Timestamp fix (2020-06-03) @@ -125,65 +127,65 @@ Timestamp fix (2020-06-03) IMU fix (2020-05-24) -------------------- -- Fix issue with IMU frame link when `publish_tf` and `publish_map_tf` are disabled +- Fix issue with IMU frame link when 'publish_tf' and 'publish_map_tf' are disabled New package: zed_nodelets (2020-03-20) --------------------------------------- -- Added the new `zed_interfaces/RGBDSensors` custom topic that contains RGB, Depth, IMU and Magnetometer synchronized topics -- Added a new package `zed_nodelets` that contains the main `zed_nodelets/ZEDWrapperNodelet` and new nodelets -- Added a new nodelet `zed_nodelets/RgbdSensorsSyncNodelet` that subscribes to RGB, Depth, IMU and Magnetometer topics and republish them in a single synchronized message -- Added a new nodelet `zed_nodelets/RgbdSensorsDemuxNodelet` that subscribes to RGBDSensors and republish RGB, Depth, IMU and Magnetometer as single topics -- Renamed `zed_interfaces/objects` to `zed_interfaces/Objects` -- Renamed `zed_interfaces/object_stamped` to `zed_interfaces/ObjectStamped` -- Reorganized the `zed_wrapper/launch` folder adding the `include` folder -- New online documentation to explain in details the new `zed_nodelets` package: https://www.stereolabs.com/docs/ros/zed_nodelets/ +- Added the new 'zed_interfaces/RGBDSensors' custom topic that contains RGB, Depth, IMU and Magnetometer synchronized topics +- Added a new package 'zed_nodelets' that contains the main 'zed_nodelets/ZEDWrapperNodelet' and new nodelets +- Added a new nodelet 'zed_nodelets/RgbdSensorsSyncNodelet' that subscribes to RGB, Depth, IMU and Magnetometer topics and republish them in a single synchronized message +- Added a new nodelet 'zed_nodelets/RgbdSensorsDemuxNodelet' that subscribes to RGBDSensors and republish RGB, Depth, IMU and Magnetometer as single topics +- Renamed 'zed_interfaces/objects' to 'zed_interfaces/Objects' +- Renamed 'zed_interfaces/object_stamped' to 'zed_interfaces/ObjectStamped' +- Reorganized the 'zed_wrapper/launch' folder adding the 'include' folder +- New online documentation to explain in details the new 'zed_nodelets' package: https://www.stereolabs.com/docs/ros/zed_nodelets/ v3.1 ----- -- Added new package `zed_interfaces` with isolated declarations of custom messages, services and actions -- Removed not used `world_frame` parameter -- Removed the`publish_pose_covariance` parameter, now covariance for pose and odometry is always published -- Removed `_m` from parameters `mapping/resolution_m` and `mapping/max_mapping_range_m` -- Renamed the parameter `depth_resample_factor` to `depth_downsample_factor` -- Renamed the parameter `img_resample_factor` to `img_downsample_factor` -- Renamed the parameter `odometry_db` to `area_memory_db_path` -- Renamed the parameter `frame_rate` to `grab_frame_rate` -- Added new dynamic parameter `pub_frame_rate` to reduce Video and Depth publishing frequency respect to grabbing frame rate [`grab_frame_rate`] -- Added new dynamic parameter `gamma` for Gamma Control -- Added new dynamic parameter `depth_texture_conf` to filter depth according to textureness information +- Added new package 'zed_interfaces' with isolated declarations of custom messages, services and actions +- Removed not used 'world_frame' parameter +- Removed the'publish_pose_covariance' parameter, now covariance for pose and odometry is always published +- Removed '_m' from parameters 'mapping/resolution_m' and 'mapping/max_mapping_range_m' +- Renamed the parameter 'depth_resample_factor' to 'depth_downsample_factor' +- Renamed the parameter 'img_resample_factor' to 'img_downsample_factor' +- Renamed the parameter 'odometry_db' to 'area_memory_db_path' +- Renamed the parameter 'frame_rate' to 'grab_frame_rate' +- Added new dynamic parameter 'pub_frame_rate' to reduce Video and Depth publishing frequency respect to grabbing frame rate ['grab_frame_rate'] +- Added new dynamic parameter 'gamma' for Gamma Control +- Added new dynamic parameter 'depth_texture_conf' to filter depth according to textureness information - Added new TF frames for all the sensors available on ZED2 - Added publishers for gray images -- Added publisher for Camera to IMU transform: `///camera_imu_transform` -- Default value for `depth_confidence` changed from 100 to 50 -- Added `base_frame` as launch parameter to propagate the value of the parameter in the Xacro description +- Added publisher for Camera to IMU transform: '///camera_imu_transform' +- Default value for 'depth_confidence' changed from 100 to 50 +- Added 'base_frame' as launch parameter to propagate the value of the parameter in the Xacro description Bug fix (2020-03-06) -------------------- -- Fix default value for dynamic parameters not set from `common.yaml` +- Fix default value for dynamic parameters not set from 'common.yaml' XACRO and more (2020-01-31) --------------------------- - Added xacro support for parametric URDF - Removed redundant URDFs and added a single parametric URDF based on xacro - Fixed auto white balance at node start (thanks to @kjaget) -- Removed `fixed_covariance` and `fixed_cov_value` parameters (not required anymore) -- Removed `sens_pub_rate` parameter -- Removed `confidence_image` message -- Removed `color_enhancement` parameter, always ON by default +- Removed 'fixed_covariance' and 'fixed_cov_value' parameters (not required anymore) +- Removed 'sens_pub_rate' parameter +- Removed 'confidence_image' message +- Removed 'color_enhancement' parameter, always ON by default - Mapping does not use presets for resolution, but a float value in range [0.01,0.2] -- Added new parameter `max_mapping_range_m` for mapping depth range (set to `-1` for auto calculation) -- Moved "multi-camera" launch file in [`zed-ros-examples`](https://github.com/stereolabs/zed-ros-examples/tree/master/examples/zed_multicamera_example) +- Added new parameter 'max_mapping_range_m' for mapping depth range (set to '-1' for auto calculation) +- Moved "multi-camera" launch file in ['zed-ros-examples'](https://github.com/stereolabs/zed-ros-examples/tree/master/examples/zed_multicamera_example) - Added current GPU ID to Diagnostic information -- The `confidence` dynamic parameter is now called `depth_confidence` -- Removed dynamic parametes `map_resize_factor` -- Added new parameter `video/img_resample_factor` -- Added new parameter `depth/map_resample_factor` +- The 'confidence' dynamic parameter is now called 'depth_confidence' +- Removed dynamic parametes 'map_resize_factor' +- Added new parameter 'video/img_resample_factor' +- Added new parameter 'depth/map_resample_factor' - Updated the names for the parameters of the Object Detection module [only ZED2] SDK v3.0 (2020-01-27) --------------------- -- Added a new repository [`zed-ros-examples`](https://github.com/stereolabs/zed-ros-examples) to keep separated the main ZED Wrapper node from Examples and Tutorials. A clean robot installation is now allowed +- Added a new repository ['zed-ros-examples'](https://github.com/stereolabs/zed-ros-examples) to keep separated the main ZED Wrapper node from Examples and Tutorials. A clean robot installation is now allowed - ZED 2 support - Color enhancement support - Max range is not a dynamic parameter anymore @@ -191,7 +193,7 @@ SDK v3.0 (2020-01-27) - New service to start/stop mapping - Support for Object Detection (only ZED2) - Advanced support for on-board sensors (only ZED-M and ZED2) -- New tutorials, see [`zed-ros-examples`](https://github.com/stereolabs/zed-ros-examples) +- New tutorials, see ['zed-ros-examples'](https://github.com/stereolabs/zed-ros-examples) diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 925b5bde..318e164c 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -575,9 +575,9 @@ class ZEDWrapperNodelet : public nodelet::Nodelet std::string mCloudFrameId; std::string mPointCloudFrameId; - std::string mMapFrameId; - std::string mOdometryFrameId; - std::string mBaseFrameId; + std::string mMapFrameId = "map"; + std::string mOdometryFrameId = "odom"; + std::string mBaseFrameId = "base_link"; std::string mCameraFrameId; std::string mRightCamFrameId; @@ -591,12 +591,6 @@ class ZEDWrapperNodelet : public nodelet::Nodelet std::string mTempLeftFrameId; std::string mTempRightFrameId; - bool mPublishTf; - bool mPublishMapTf; - bool mPublishImuTf; - sl::FLIP_MODE mCameraFlip; - bool mCameraSelfCalib; - // Launch file parameters std::string mCameraName; sl::RESOLUTION mCamResol; @@ -620,7 +614,7 @@ class ZEDWrapperNodelet : public nodelet::Nodelet bool mSvoMode = false; double mCamMinDepth; double mCamMaxDepth; - std::string mClickedPtTopic; + std::string mClickedPtTopic = "/clicked_point"; // Positional tracking bool mPosTrackingEnabled = false; @@ -632,6 +626,13 @@ class ZEDWrapperNodelet : public nodelet::Nodelet bool mFloorAlignment = false; bool mImuFusion = true; bool mSetGravityAsOrigin = false; + bool mPublishTf; + bool mPublishMapTf; + bool mPublishImuTf; + sl::FLIP_MODE mCameraFlip; + bool mCameraSelfCalib; + bool mIsStatic = false; + double mPosTrkMinDepth = 0.0; // Flags bool mGrabActive = false; // Indicate if camera grabbing is active (at least one topic subscribed) diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index 913ec771..64d6f9a2 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -846,7 +846,7 @@ void ZEDWrapperNodelet::readGeneralParams() mCameraFlip = sl::FLIP_MODE::AUTO; } NODELET_INFO_STREAM(" * Camera Flip\t\t\t-> " << sl::toString(mCameraFlip).c_str()); - mNhNs.param("general/self_calib", mCameraSelfCalib, true); + mNhNs.getParam("general/self_calib", mCameraSelfCalib); NODELET_INFO_STREAM(" * Self calibration\t\t-> " << (mCameraSelfCalib ? "ENABLED" : "DISABLED")); int tmp_sn = 0; @@ -923,7 +923,7 @@ void ZEDWrapperNodelet::readPosTrkParams() { NODELET_INFO_STREAM("*** POSITIONAL TRACKING PARAMETERS ***"); - mNhNs.param("pos_tracking/pos_tracking_enabled", mPosTrackingEnabled, true); + mNhNs.getParam("pos_tracking/pos_tracking_enabled", mPosTrackingEnabled); NODELET_INFO_STREAM(" * Positional tracking\t\t-> " << (mPosTrackingEnabled ? "ENABLED" : "DISABLED")); std::string pos_trk_mode; @@ -964,31 +964,39 @@ void ZEDWrapperNodelet::readPosTrkParams() mAreaMemDbPath = sl_tools::resolveFilePath(mAreaMemDbPath); NODELET_INFO_STREAM(" * Odometry DB path\t\t-> " << mAreaMemDbPath.c_str()); - mNhNs.param("pos_tracking/save_area_memory_db_on_exit", mSaveAreaMapOnClosing, false); + mNhNs.getParam("pos_tracking/save_area_memory_db_on_exit", mSaveAreaMapOnClosing); NODELET_INFO_STREAM(" * Save Area Memory on closing\t-> " << (mSaveAreaMapOnClosing ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/area_memory", mAreaMemory, false); + mNhNs.getParam("pos_tracking/area_memory", mAreaMemory); NODELET_INFO_STREAM(" * Area Memory\t\t\t-> " << (mAreaMemory ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/imu_fusion", mImuFusion, true); + mNhNs.getParam("pos_tracking/imu_fusion", mImuFusion); NODELET_INFO_STREAM(" * IMU Fusion\t\t\t-> " << (mImuFusion ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/floor_alignment", mFloorAlignment, false); + mNhNs.getParam("pos_tracking/floor_alignment", mFloorAlignment); NODELET_INFO_STREAM(" * Floor alignment\t\t-> " << (mFloorAlignment ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/init_odom_with_first_valid_pose", mInitOdomWithPose, true); + mNhNs.getParam("pos_tracking/init_odom_with_first_valid_pose", mInitOdomWithPose); NODELET_INFO_STREAM(" * Init Odometry with first valid pose data -> " << (mInitOdomWithPose ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/two_d_mode", mTwoDMode, false); + mNhNs.getParam("pos_tracking/two_d_mode", mTwoDMode); NODELET_INFO_STREAM(" * Force 2D mode\t\t-> " << (mTwoDMode ? "ENABLED" : "DISABLED")); if (mTwoDMode) { - mNhNs.param("pos_tracking/fixed_z_value", mFixedZValue, 0.0); + mNhNs.getParam("pos_tracking/fixed_z_value", mFixedZValue); NODELET_INFO_STREAM(" * Fixed Z value\t\t-> " << mFixedZValue); } - mNhNs.param("pos_tracking/publish_tf", mPublishTf, true); + mNhNs.getParam("pos_tracking/publish_tf", mPublishTf); NODELET_INFO_STREAM(" * Broadcast odometry TF\t-> " << (mPublishTf ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/publish_map_tf", mPublishMapTf, true); + mNhNs.getParam("pos_tracking/publish_map_tf", mPublishMapTf); NODELET_INFO_STREAM(" * Broadcast map pose TF\t-> " << (mPublishTf ? (mPublishMapTf ? "ENABLED" : "DISABLED") : "DISABLED")); + + mNhNs.getParam("pos_tracking/set_as_static", mIsStatic); + NODELET_INFO_STREAM(" * Camera is static\t\t-> " << (mIsStatic ? "ENABLED" : "DISABLED")); + mNhNs.getParam("pos_tracking/depth_min_range", mPosTrkMinDepth); + NODELET_INFO_STREAM(" * Depth minimum range\t\t-> " << mPosTrkMinDepth); + + bool mIsStatic = false; + double mPosTrkMinDepth = 0.0; } } @@ -998,7 +1006,7 @@ void ZEDWrapperNodelet::readMappingParams() if (!mDepthDisabled) { - mNhNs.param("mapping/mapping_enabled", mMappingEnabled, false); + mNhNs.getParam("mapping/mapping_enabled", mMappingEnabled); if (mMappingEnabled) { @@ -1019,7 +1027,7 @@ void ZEDWrapperNodelet::readMappingParams() NODELET_INFO_STREAM(" * Mapping\t\t\t-> DISABLED"); } } - mNhNs.param("mapping/clicked_point_topic", mClickedPtTopic, std::string("/clicked_point")); + mNhNs.getParam("mapping/clicked_point_topic",mClickedPtTopic); NODELET_INFO_STREAM(" * Clicked point topic\t\t-> " << mClickedPtTopic.c_str()); } @@ -1029,7 +1037,7 @@ void ZEDWrapperNodelet::readObjDetParams() { NODELET_INFO_STREAM("*** OBJECT DETECTION PARAMETERS ***"); - mNhNs.param("object_detection/od_enabled", mObjDetEnabled, false); + mNhNs.getParam("object_detection/od_enabled", mObjDetEnabled); if (mObjDetEnabled) { @@ -1097,7 +1105,7 @@ void ZEDWrapperNodelet::readSensParams() NODELET_INFO_STREAM(" * Max sensors rate\t\t-> " << mSensPubRate); - mNhNs.param("sensors/publish_imu_tf", mPublishImuTf, true); + mNhNs.getParam("sensors/publish_imu_tf", mPublishImuTf); NODELET_INFO_STREAM(" * Broadcast IMU pose TF\t-> " << (mPublishImuTf ? "ENABLED" : "DISABLED")); } else @@ -1110,7 +1118,7 @@ void ZEDWrapperNodelet::readSvoParams() { NODELET_INFO_STREAM("*** SVO PARAMETERS ***"); - mNhNs.param("svo_file", mSvoFilepath, std::string()); + mNhNs.getParam("svo_file", mSvoFilepath); mSvoFilepath = sl_tools::resolveFilePath(mSvoFilepath); NODELET_INFO_STREAM(" * SVO input file: \t\t-> " << mSvoFilepath.c_str()); @@ -1237,14 +1245,14 @@ void ZEDWrapperNodelet::readParameters() // <---- SVO // Remote Stream - mNhNs.param("stream", mRemoteStreamAddr, std::string()); + mNhNs.getParam("stream", mRemoteStreamAddr); // ----> Coordinate frames NODELET_INFO_STREAM("*** COORDINATE FRAMES ***"); - mNhNs.param("pos_tracking/map_frame", mMapFrameId, "map"); - mNhNs.param("pos_tracking/odometry_frame", mOdometryFrameId, "odom"); - mNhNs.param("general/base_frame", mBaseFrameId, "base_link"); + mNhNs.getParam("pos_tracking/map_frame", mMapFrameId); + mNhNs.getParam("pos_tracking/odometry_frame", mOdometryFrameId); + mNhNs.getParam("general/base_frame", mBaseFrameId); mCameraFrameId = mCameraName + "_camera_center"; mImuFrameId = mCameraName + "_imu_link"; @@ -2083,10 +2091,11 @@ void ZEDWrapperNodelet::start_pos_tracking() mPoseSmoothing = false; // Always false. Pose Smoothing is to be enabled only for VR/AR applications posTrackParams.enable_pose_smoothing = mPoseSmoothing; - posTrackParams.set_floor_as_origin = mFloorAlignment; + posTrackParams.set_as_static = mIsStatic; + posTrackParams.depth_min_range = static_cast(mPosTrkMinDepth); - if (mAreaMemDbPath != "" && !sl_tools::file_exist(mAreaMemDbPath)) + if (mAreaMemDbPath != "" && !sl_tools::file_exist(mAreaMemDbPath)) { posTrackParams.area_file_path = ""; NODELET_WARN_STREAM("area_memory_db_path [" << mAreaMemDbPath << "] doesn't exist or is unreachable. "); diff --git a/zed_wrapper/params/common.yaml b/zed_wrapper/params/common.yaml index 93f8fbcd..0982426e 100644 --- a/zed_wrapper/params/common.yaml +++ b/zed_wrapper/params/common.yaml @@ -53,7 +53,7 @@ pos_tracking: publish_map_tf: true # publish `map -> odom` TF map_frame: 'map' # main frame odometry_frame: 'odom' # odometry frame - area_memory_db_path: 'zed_area_memory.area' # file loaded when the node starts to restore the "known visual features" map. + area_memory_db_path: '' # file loaded when the node starts to restore the "known visual features" map. save_area_memory_db_on_exit: false # save the "known visual features" map when the node is correctly closed to the path indicated by `area_memory_db_path` area_memory: true # Enable to detect loop closure floor_alignment: false # Enable to automatically calculate camera/floor offset From 6863f2a0b4c6b335a80d038f3bd03226660803f8 Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Wed, 13 Sep 2023 18:42:20 +0200 Subject: [PATCH 14/21] Code cleaning: Add 'processCameraSettings' func --- .../include/zed_wrapper_nodelet.hpp | 4 + .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 432 +++++++++--------- 2 files changed, 226 insertions(+), 210 deletions(-) diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 318e164c..327c2e13 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -439,6 +439,10 @@ class ZEDWrapperNodelet : public nodelet::Nodelet */ void processDetectedObjects(ros::Time t); + /*! \brief Process camera settings + */ + void processCameraSettings(); + /*! \brief Generates an univoque color for each object class ID */ inline sl::float3 generateColorClass(int idx) diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index 64d6f9a2..7f377b49 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -1027,7 +1027,7 @@ void ZEDWrapperNodelet::readMappingParams() NODELET_INFO_STREAM(" * Mapping\t\t\t-> DISABLED"); } } - mNhNs.getParam("mapping/clicked_point_topic",mClickedPtTopic); + mNhNs.getParam("mapping/clicked_point_topic", mClickedPtTopic); NODELET_INFO_STREAM(" * Clicked point topic\t\t-> " << mClickedPtTopic.c_str()); } @@ -2046,9 +2046,16 @@ void ZEDWrapperNodelet::start_pos_tracking() { NODELET_INFO_STREAM("*** Starting Positional Tracking ***"); + mPosTrackingReady = false; // Useful to not publish wrong TF with IMU frame broadcasting + + if (mDepthDisabled) + { + NODELET_WARN("Cannot start Positional Tracking if `depth.depth_mode` is set to 'NONE'"); + return; + } + NODELET_INFO(" * Waiting for valid static transformations..."); - mPosTrackingReady = false; // Useful to not publish wrong TF with IMU frame broadcasting bool transformOk = false; double elapsed = 0.0; @@ -2062,7 +2069,7 @@ void ZEDWrapperNodelet::start_pos_tracking() elapsed = std::chrono::duration_cast(std::chrono::high_resolution_clock::now() - start) .count(); - std::this_thread::sleep_for(std::chrono::milliseconds(100)); + std::this_thread::sleep_for(std::chrono::milliseconds(500)); if (elapsed > 10000) { @@ -2095,7 +2102,7 @@ void ZEDWrapperNodelet::start_pos_tracking() posTrackParams.set_as_static = mIsStatic; posTrackParams.depth_min_range = static_cast(mPosTrkMinDepth); - if (mAreaMemDbPath != "" && !sl_tools::file_exist(mAreaMemDbPath)) + if (mAreaMemDbPath != "" && !sl_tools::file_exist(mAreaMemDbPath)) { posTrackParams.area_file_path = ""; NODELET_WARN_STREAM("area_memory_db_path [" << mAreaMemDbPath << "] doesn't exist or is unreachable. "); @@ -2127,7 +2134,7 @@ void ZEDWrapperNodelet::start_pos_tracking() { mPosTrackingActivated = false; - NODELET_WARN("Tracking not activated: %s", sl::toString(err).c_str()); + NODELET_WARN("Pos. Tracking not started: %s", sl::toString(err).c_str()); } } @@ -4181,211 +4188,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() // <---- Publish freq calculation // ----> Camera Settings - int value; - sl::VIDEO_SETTINGS setting; - sl::ERROR_CODE err; - - if (!mSvoMode && mFrameCount % 5 == 0) - { - // NODELET_DEBUG_STREAM( "[" << mFrameCount << "] device_poll_thread_func MUTEX LOCK"); - mDynParMutex.lock(); - - setting = sl::VIDEO_SETTINGS::BRIGHTNESS; - err = mZed.getCameraSettings(setting, value); - if (err == sl::ERROR_CODE::SUCCESS && value != mCamBrightness) - { - err = mZed.setCameraSettings(setting, mCamBrightness); - } - if (err != sl::ERROR_CODE::SUCCESS) - { - NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } - else - { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamBrightness << " <- " << value); - mUpdateDynParams = true; - } - - setting = sl::VIDEO_SETTINGS::CONTRAST; - err = mZed.getCameraSettings(setting, value); - if (err == sl::ERROR_CODE::SUCCESS && value != mCamContrast) - { - err = mZed.setCameraSettings(setting, mCamContrast); - } - if (err != sl::ERROR_CODE::SUCCESS) - { - NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } - else - { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamContrast << " <- " << value); - mUpdateDynParams = true; - } - - setting = sl::VIDEO_SETTINGS::HUE; - err = mZed.getCameraSettings(setting, value); - if (err == sl::ERROR_CODE::SUCCESS && value != mCamHue) - { - err = mZed.setCameraSettings(setting, mCamHue); - } - if (err != sl::ERROR_CODE::SUCCESS) - { - NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } - else - { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamHue << " <- " << value); - mUpdateDynParams = true; - } - - setting = sl::VIDEO_SETTINGS::SATURATION; - err = mZed.getCameraSettings(setting, value); - if (err == sl::ERROR_CODE::SUCCESS && value != mCamSaturation) - { - err = mZed.setCameraSettings(setting, mCamSaturation); - } - if (err != sl::ERROR_CODE::SUCCESS) - { - NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } - else - { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamSaturation << " <- " << value); - mUpdateDynParams = true; - } - - setting = sl::VIDEO_SETTINGS::SHARPNESS; - err = mZed.getCameraSettings(setting, value); - if (err == sl::ERROR_CODE::SUCCESS && value != mCamSharpness) - { - err = mZed.setCameraSettings(setting, mCamSharpness); - } - if (err != sl::ERROR_CODE::SUCCESS) - { - NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } - else - { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamSharpness << " <- " << value); - mUpdateDynParams = true; - } - - setting = sl::VIDEO_SETTINGS::GAMMA; - err = mZed.getCameraSettings(setting, value); - if (err == sl::ERROR_CODE::SUCCESS && value != mCamGamma) - { - err = mZed.setCameraSettings(setting, mCamGamma); - } - if (err != sl::ERROR_CODE::SUCCESS) - { - NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } - else - { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamGamma << " <- " << value); - mUpdateDynParams = true; - } - - if (mTriggerAutoExposure) - { - setting = sl::VIDEO_SETTINGS::AEC_AGC; - err = mZed.getCameraSettings(setting, value); - int aec_agc = (mCamAutoExposure ? 1 : 0); - if (err == sl::ERROR_CODE::SUCCESS && value != aec_agc) - { - err = mZed.setCameraSettings(setting, aec_agc); - } - if (err != sl::ERROR_CODE::SUCCESS) - { - NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } - else - { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << aec_agc << " <- " << value); - mUpdateDynParams = true; - } - } - if (!mCamAutoExposure) - { - setting = sl::VIDEO_SETTINGS::EXPOSURE; - err = mZed.getCameraSettings(setting, value); - if (err == sl::ERROR_CODE::SUCCESS && value != mCamExposure) - { - err = mZed.setCameraSettings(setting, mCamExposure); - } - if (err != sl::ERROR_CODE::SUCCESS) - { - NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } - else - { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamExposure << " <- " << value); - mUpdateDynParams = true; - } - - setting = sl::VIDEO_SETTINGS::GAIN; - err = mZed.getCameraSettings(setting, value); - if (err == sl::ERROR_CODE::SUCCESS && value != mCamGain) - { - err = mZed.setCameraSettings(setting, mCamGain); - } - if (err != sl::ERROR_CODE::SUCCESS) - { - NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } - else - { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamGain << " <- " << value); - mUpdateDynParams = true; - } - } - - if (mTriggerAutoWB) - { - setting = sl::VIDEO_SETTINGS::WHITEBALANCE_AUTO; - err = mZed.getCameraSettings(setting, value); - int wb_auto = (mCamAutoWB ? 1 : 0); - if (err == sl::ERROR_CODE::SUCCESS && value != wb_auto) - { - err = mZed.setCameraSettings(setting, wb_auto); - } - if (err != sl::ERROR_CODE::SUCCESS) - { - NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } - else - { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << wb_auto << " <- " << value); - mUpdateDynParams = true; - } - } - if (!mCamAutoWB) - { - setting = sl::VIDEO_SETTINGS::WHITEBALANCE_TEMPERATURE; - err = mZed.getCameraSettings(setting, value); - if (err == sl::ERROR_CODE::SUCCESS && value != mCamWB) - { - err = mZed.setCameraSettings(setting, mCamWB); - } - if (err != sl::ERROR_CODE::SUCCESS) - { - NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); - } - else - { - NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamWB << " <- " << value); - mUpdateDynParams = true; - } - } - mDynParMutex.unlock(); - // NODELET_DEBUG_STREAM( "device_poll_thread_func MUTEX UNLOCK"); - } - - if (mUpdateDynParams) - { - NODELET_DEBUG("Update Dynamic Parameters"); - updateDynamicReconfigure(); - } + processCameraSettings(); // <---- Camera Settings // Publish the point cloud if someone has subscribed to @@ -4794,6 +4597,215 @@ void ZEDWrapperNodelet::device_poll_thread_func() NODELET_DEBUG("ZED pool thread finished"); } +void ZEDWrapperNodelet::processCameraSettings() +{ + int value; + sl::VIDEO_SETTINGS setting; + sl::ERROR_CODE err; + + if (!mSvoMode && mFrameCount % 5 == 0) + { + // NODELET_DEBUG_STREAM( "[" << mFrameCount << "] device_poll_thread_func MUTEX LOCK"); + mDynParMutex.lock(); + + setting = sl::VIDEO_SETTINGS::BRIGHTNESS; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamBrightness) + { + err = mZed.setCameraSettings(setting, mCamBrightness); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamBrightness << " <- " << value); + mUpdateDynParams = true; + } + + setting = sl::VIDEO_SETTINGS::CONTRAST; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamContrast) + { + err = mZed.setCameraSettings(setting, mCamContrast); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamContrast << " <- " << value); + mUpdateDynParams = true; + } + + setting = sl::VIDEO_SETTINGS::HUE; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamHue) + { + err = mZed.setCameraSettings(setting, mCamHue); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamHue << " <- " << value); + mUpdateDynParams = true; + } + + setting = sl::VIDEO_SETTINGS::SATURATION; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamSaturation) + { + err = mZed.setCameraSettings(setting, mCamSaturation); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamSaturation << " <- " << value); + mUpdateDynParams = true; + } + + setting = sl::VIDEO_SETTINGS::SHARPNESS; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamSharpness) + { + err = mZed.setCameraSettings(setting, mCamSharpness); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamSharpness << " <- " << value); + mUpdateDynParams = true; + } + + setting = sl::VIDEO_SETTINGS::GAMMA; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamGamma) + { + err = mZed.setCameraSettings(setting, mCamGamma); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamGamma << " <- " << value); + mUpdateDynParams = true; + } + + if (mTriggerAutoExposure) + { + setting = sl::VIDEO_SETTINGS::AEC_AGC; + err = mZed.getCameraSettings(setting, value); + int aec_agc = (mCamAutoExposure ? 1 : 0); + if (err == sl::ERROR_CODE::SUCCESS && value != aec_agc) + { + err = mZed.setCameraSettings(setting, aec_agc); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << aec_agc << " <- " << value); + mUpdateDynParams = true; + } + } + if (!mCamAutoExposure) + { + setting = sl::VIDEO_SETTINGS::EXPOSURE; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamExposure) + { + err = mZed.setCameraSettings(setting, mCamExposure); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamExposure << " <- " << value); + mUpdateDynParams = true; + } + + setting = sl::VIDEO_SETTINGS::GAIN; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamGain) + { + err = mZed.setCameraSettings(setting, mCamGain); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamGain << " <- " << value); + mUpdateDynParams = true; + } + } + + if (mTriggerAutoWB) + { + setting = sl::VIDEO_SETTINGS::WHITEBALANCE_AUTO; + err = mZed.getCameraSettings(setting, value); + int wb_auto = (mCamAutoWB ? 1 : 0); + if (err == sl::ERROR_CODE::SUCCESS && value != wb_auto) + { + err = mZed.setCameraSettings(setting, wb_auto); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << wb_auto << " <- " << value); + mUpdateDynParams = true; + } + } + if (!mCamAutoWB) + { + setting = sl::VIDEO_SETTINGS::WHITEBALANCE_TEMPERATURE; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamWB) + { + err = mZed.setCameraSettings(setting, mCamWB); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamWB << " <- " << value); + mUpdateDynParams = true; + } + } + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "device_poll_thread_func MUTEX UNLOCK"); + } + + if (mUpdateDynParams) + { + NODELET_DEBUG("Update Dynamic Parameters"); + updateDynamicReconfigure(); + } +} + void ZEDWrapperNodelet::callback_updateDiagnostic(diagnostic_updater::DiagnosticStatusWrapper& stat) { if (mConnStatus != sl::ERROR_CODE::SUCCESS) From 5df31033a19e4e8e90e8f7140b8e85689b9155a9 Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Thu, 14 Sep 2023 16:40:36 +0200 Subject: [PATCH 15/21] Positional tracking aligned to ROS 2 code --- CHANGELOG.rst | 4 + zed-ros-interfaces | 2 +- .../include/zed_wrapper_nodelet.hpp | 58 +- .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 809 +++++++++--------- 4 files changed, 448 insertions(+), 425 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 00ccb922..fd9d3df5 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,10 @@ CHANGELOG ========= +2023-09-14 +---------- +- Add pose and odometry status publishers + 2023-09-13 ---------- - Add ROS '.clang-format' diff --git a/zed-ros-interfaces b/zed-ros-interfaces index 206b7e2d..12f55da8 160000 --- a/zed-ros-interfaces +++ b/zed-ros-interfaces @@ -1 +1 @@ -Subproject commit 206b7e2d30505176f079168fbed45ed2dc916000 +Subproject commit 12f55da885dd3f0f68c168d0bccd33c4e892328d diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 327c2e13..a15799cb 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -175,6 +175,22 @@ class ZEDWrapperNodelet : public nodelet::Nodelet */ void sensors_thread_func(); + /*! \brief Publish odometry status message + */ + void publishPoseStatus(); + + /*! \brief Publish odometry status message + */ + void publishOdomStatus(); + + /*! \brief Process odometry information + */ + void processOdometry(); + + /*! \brief Process pose information + */ + void processPose(); + /*! \brief Publish the pose of the camera in "Map" frame with a ros Publisher * \param t : the ros::Time to stamp the image */ @@ -188,20 +204,20 @@ class ZEDWrapperNodelet : public nodelet::Nodelet */ void publishOdom(tf2::Transform odom2baseTransf, sl::Pose& slPose, ros::Time t); - /*! \brief Publish the pose of the camera in "Map" frame as a transformation - * \param baseTransform : Transformation representing the camera pose from - * odom frame to map frame + /*! \brief Publish the odom -> base_link TF + * \param t : the ros::Time to stamp the image + */ + void publishTFs(ros::Time t); + + /*! \brief Publish the odom -> base_link TF * \param t : the ros::Time to stamp the image */ - void publishPoseFrame(tf2::Transform baseTransform, ros::Time t); + void publishOdomTF(ros::Time t); - /*! \brief Publish the odometry of the camera in "Odom" frame as a - * transformation - * \param odomTransf : Transformation representing the camera pose from - * base frame to odom frame + /*! \brief Publish the map -> odom TF * \param t : the ros::Time to stamp the image */ - void publishOdomFrame(tf2::Transform odomTransf, ros::Time t); + void publishPoseTF(ros::Time t); /*! * \brief Publish IMU frame once as static TF @@ -520,6 +536,9 @@ class ZEDWrapperNodelet : public nodelet::Nodelet ros::Publisher mPubMarker; // Publisher for Rviz markers ros::Publisher mPubPlane; // Publisher for detected planes + ros::Publisher mPubPoseStatus; + ros::Publisher mPubOdomStatus; + // Subscribers ros::Subscriber mClickedPtSub; @@ -560,9 +579,8 @@ class ZEDWrapperNodelet : public nodelet::Nodelet // <---- Topics // ROS TF - tf2_ros::TransformBroadcaster mTransformPoseBroadcaster; - tf2_ros::TransformBroadcaster mTransformOdomBroadcaster; - tf2_ros::StaticTransformBroadcaster mStaticTransformImuBroadcaster; + tf2_ros::TransformBroadcaster mTfBroadcaster; + tf2_ros::StaticTransformBroadcaster mStaticTfBroadcaster; std::string mRgbFrameId; std::string mRgbOptFrameId; @@ -580,7 +598,7 @@ class ZEDWrapperNodelet : public nodelet::Nodelet std::string mPointCloudFrameId; std::string mMapFrameId = "map"; - std::string mOdometryFrameId = "odom"; + std::string mOdomFrameId = "odom"; std::string mBaseFrameId = "base_link"; std::string mCameraFrameId; @@ -623,15 +641,16 @@ class ZEDWrapperNodelet : public nodelet::Nodelet // Positional tracking bool mPosTrackingEnabled = false; sl::POSITIONAL_TRACKING_MODE mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::QUALITY; - bool mPosTrackingActivated = false; bool mPosTrackingReady = false; + bool mPosTrackingStarted = false; + bool mPosTrackingRequired = false; bool mTwoDMode = false; double mFixedZValue = 0.0; bool mFloorAlignment = false; bool mImuFusion = true; bool mSetGravityAsOrigin = false; - bool mPublishTf; - bool mPublishMapTf; + bool mPublishTF; + bool mPublishMapTF; bool mPublishImuTf; sl::FLIP_MODE mCameraFlip; bool mCameraSelfCalib; @@ -642,7 +661,8 @@ class ZEDWrapperNodelet : public nodelet::Nodelet bool mGrabActive = false; // Indicate if camera grabbing is active (at least one topic subscribed) sl::ERROR_CODE mConnStatus; sl::ERROR_CODE mGrabStatus; - sl::POSITIONAL_TRACKING_STATE mPosTrackingStatus; + sl::POSITIONAL_TRACKING_STATE mPosTrackingStatusWorld; + sl::POSITIONAL_TRACKING_STATE mPosTrackingStatusCamera; bool mSensPublishing = false; bool mPcPublishing = false; bool mDepthDisabled = false; @@ -657,6 +677,8 @@ class ZEDWrapperNodelet : public nodelet::Nodelet std::vector mInitialBasePose; std::vector mOdomPath; std::vector mMapPath; + ros::Time mLastTs_odom; + ros::Time mLastTs_pose; // IMU TF tf2::Transform mLastImuPose; @@ -716,7 +738,6 @@ class ZEDWrapperNodelet : public nodelet::Nodelet bool mPoseSmoothing = false; // Always disabled. Enable only for AR/VR applications bool mAreaMemory; bool mInitOdomWithPose; - bool mResetOdom = false; bool mUpdateDynParams = false; bool mPublishingData = false; @@ -739,6 +760,7 @@ class ZEDWrapperNodelet : public nodelet::Nodelet std::mutex mPcMutex; std::mutex mRecMutex; std::mutex mPosTrkMutex; + std::mutex mOdomMutex; std::mutex mDynParMutex; std::mutex mMappingMutex; std::mutex mObjDetMutex; diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index 7f377b49..c1de1166 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -30,9 +30,10 @@ #include #endif -#include "zed_interfaces/Object.h" -#include "zed_interfaces/ObjectsStamped.h" +#include +#include #include +#include //#define DEBUG_SENS_TS 1 @@ -163,6 +164,9 @@ void ZEDWrapperNodelet::onInit() std::string odom_path_topic = "path_odom"; std::string map_path_topic = "path_map"; + std::string odomStatusTopic = odometryTopic + "/status"; + std::string poseStatusTopic = poseTopic + "/status"; + // Extracted plane topics std::string marker_topic = "plane_marker"; std::string plane_topic = "plane"; @@ -500,13 +504,17 @@ void ZEDWrapperNodelet::onInit() // Odometry and Pose publisher mPubPose = mNhNs.advertise(poseTopic, 1); NODELET_INFO_STREAM(" * Advertised on topic " << mPubPose.getTopic()); - mPubPoseCov = mNhNs.advertise(pose_cov_topic, 1); NODELET_INFO_STREAM(" * Advertised on topic " << mPubPoseCov.getTopic()); mPubOdom = mNhNs.advertise(odometryTopic, 1); NODELET_INFO_STREAM(" * Advertised on topic " << mPubOdom.getTopic()); + mPubOdomStatus = mNhNs.advertise(odomStatusTopic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubOdomStatus.getTopic()); + mPubPoseStatus = mNhNs.advertise(poseStatusTopic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubPoseStatus.getTopic()); + // Rviz markers publisher mPubMarker = mNhNs.advertise(marker_topic, 10, true); @@ -984,11 +992,11 @@ void ZEDWrapperNodelet::readPosTrkParams() NODELET_INFO_STREAM(" * Fixed Z value\t\t-> " << mFixedZValue); } - mNhNs.getParam("pos_tracking/publish_tf", mPublishTf); - NODELET_INFO_STREAM(" * Broadcast odometry TF\t-> " << (mPublishTf ? "ENABLED" : "DISABLED")); - mNhNs.getParam("pos_tracking/publish_map_tf", mPublishMapTf); + mNhNs.getParam("pos_tracking/publish_tf", mPublishTF); + NODELET_INFO_STREAM(" * Broadcast odometry TF\t-> " << (mPublishTF ? "ENABLED" : "DISABLED")); + mNhNs.getParam("pos_tracking/publish_map_tf", mPublishMapTF); NODELET_INFO_STREAM(" * Broadcast map pose TF\t-> " - << (mPublishTf ? (mPublishMapTf ? "ENABLED" : "DISABLED") : "DISABLED")); + << (mPublishTF ? (mPublishMapTF ? "ENABLED" : "DISABLED") : "DISABLED")); mNhNs.getParam("pos_tracking/set_as_static", mIsStatic); NODELET_INFO_STREAM(" * Camera is static\t\t-> " << (mIsStatic ? "ENABLED" : "DISABLED")); @@ -1251,7 +1259,7 @@ void ZEDWrapperNodelet::readParameters() NODELET_INFO_STREAM("*** COORDINATE FRAMES ***"); mNhNs.getParam("pos_tracking/map_frame", mMapFrameId); - mNhNs.getParam("pos_tracking/odometry_frame", mOdometryFrameId); + mNhNs.getParam("pos_tracking/odometry_frame", mOdomFrameId); mNhNs.getParam("general/base_frame", mBaseFrameId); mCameraFrameId = mCameraName + "_camera_center"; @@ -1289,7 +1297,7 @@ void ZEDWrapperNodelet::readParameters() if (!mDepthDisabled) { NODELET_INFO_STREAM(" * map_frame\t\t\t-> " << mMapFrameId); - NODELET_INFO_STREAM(" * odometry_frame\t\t-> " << mOdometryFrameId); + NODELET_INFO_STREAM(" * odometry_frame\t\t-> " << mOdomFrameId); NODELET_INFO_STREAM(" * base_frame\t\t\t-> " << mBaseFrameId); NODELET_INFO_STREAM(" * depth_frame\t\t\t-> " << mDepthFrameId); NODELET_INFO_STREAM(" * depth_optical_frame\t\t-> " << mDepthOptFrameId); @@ -1819,7 +1827,7 @@ bool ZEDWrapperNodelet::on_set_pose(zed_interfaces::set_pose::Request& req, zed_ bool ZEDWrapperNodelet::on_reset_tracking(zed_interfaces::reset_tracking::Request& req, zed_interfaces::reset_tracking::Response& res) { - if (!mPosTrackingActivated) + if (!mPosTrackingStarted) { res.reset_done = false; return false; @@ -1837,7 +1845,10 @@ bool ZEDWrapperNodelet::on_reset_tracking(zed_interfaces::reset_tracking::Reques bool ZEDWrapperNodelet::on_reset_odometry(zed_interfaces::reset_odometry::Request& req, zed_interfaces::reset_odometry::Response& res) { - mResetOdom = true; + std::lock_guard lock(mOdomMutex); + mOdom2BaseTransf.setIdentity(); + mOdomPath.clear(); + res.reset_done = true; return true; } @@ -2128,11 +2139,11 @@ void ZEDWrapperNodelet::start_pos_tracking() if (err == sl::ERROR_CODE::SUCCESS) { - mPosTrackingActivated = true; + mPosTrackingStarted = true; } else { - mPosTrackingActivated = false; + mPosTrackingStarted = false; NODELET_WARN("Pos. Tracking not started: %s", sl::toString(err).c_str()); } @@ -2168,7 +2179,7 @@ bool ZEDWrapperNodelet::saveAreaMap(std::string file_path, std::string* out_msg) return false; } - if (mPosTrackingActivated && mAreaMemory) + if (mPosTrackingStarted && mAreaMemory) { sl::ERROR_CODE err = mZed.saveAreaMap(sl::String(file_path.c_str())); if (err != sl::ERROR_CODE::SUCCESS) @@ -2238,8 +2249,8 @@ void ZEDWrapperNodelet::publishOdom(tf2::Transform odom2baseTransf, sl::Pose& sl nav_msgs::OdometryPtr odomMsg = boost::make_shared(); odomMsg->header.stamp = t; - odomMsg->header.frame_id = mOdometryFrameId; // frame - odomMsg->child_frame_id = mBaseFrameId; // camera_frame + odomMsg->header.frame_id = mOdomFrameId; // frame + odomMsg->child_frame_id = mBaseFrameId; // camera_frame // conversion from Tranform to message geometry_msgs::Transform base2odom = tf2::toMsg(odom2baseTransf); // Add all value in odometry message @@ -2280,11 +2291,11 @@ void ZEDWrapperNodelet::publishPose(ros::Time t) tf2::Transform base_pose; base_pose.setIdentity(); - if (mPublishMapTf) + if (mPublishMapTF) { base_pose = mMap2BaseTransf; } - else if (mPublishTf) + else if (mPublishTF) { base_pose = mOdom2BaseTransf; } @@ -2376,91 +2387,13 @@ void ZEDWrapperNodelet::publishStaticImuFrame() mStaticImuTransformStamped.transform.rotation.w = sl_or.ow; // Publish transformation - mStaticTransformImuBroadcaster.sendTransform(mStaticImuTransformStamped); + mStaticTfBroadcaster.sendTransform(mStaticImuTransformStamped); NODELET_INFO_STREAM("Published static transform '" << mImuFrameId << "' -> '" << mLeftCamFrameId << "'"); mStaticImuFramePublished = true; } -void ZEDWrapperNodelet::publishOdomFrame(tf2::Transform odomTransf, ros::Time t) -{ - // ----> Avoid duplicated TF publishing - static ros::Time last_stamp; - - if (t == last_stamp) - { - return; - } - last_stamp = t; - // <---- Avoid duplicated TF publishing - - if (!mSensor2BaseTransfValid) - { - getSens2BaseTransform(); - } - - if (!mSensor2CameraTransfValid) - { - getSens2CameraTransform(); - } - - if (!mCamera2BaseTransfValid) - { - getCamera2BaseTransform(); - } - - geometry_msgs::TransformStamped transformStamped; - transformStamped.header.stamp = t; - transformStamped.header.frame_id = mOdometryFrameId; - transformStamped.child_frame_id = mBaseFrameId; - // conversion from Tranform to message - transformStamped.transform = tf2::toMsg(odomTransf); - // Publish transformation - mTransformOdomBroadcaster.sendTransform(transformStamped); - - // NODELET_INFO_STREAM( "Published ODOM TF with TS: " << t ); -} - -void ZEDWrapperNodelet::publishPoseFrame(tf2::Transform baseTransform, ros::Time t) -{ - // ----> Avoid duplicated TF publishing - static ros::Time last_stamp; - - if (t == last_stamp) - { - return; - } - last_stamp = t; - // <---- Avoid duplicated TF publishing - - if (!mSensor2BaseTransfValid) - { - getSens2BaseTransform(); - } - - if (!mSensor2CameraTransfValid) - { - getSens2CameraTransform(); - } - - if (!mCamera2BaseTransfValid) - { - getCamera2BaseTransform(); - } - - geometry_msgs::TransformStamped transformStamped; - transformStamped.header.stamp = t; - transformStamped.header.frame_id = mMapFrameId; - transformStamped.child_frame_id = mOdometryFrameId; - // conversion from Tranform to message - transformStamped.transform = tf2::toMsg(baseTransform); - // Publish transformation - mTransformPoseBroadcaster.sendTransform(transformStamped); - - // NODELET_INFO_STREAM( "Published POSE TF with TS: " << t ); -} - void ZEDWrapperNodelet::publishImage(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat img, image_transport::CameraPublisher& pubImg, sensor_msgs::CameraInfoPtr camInfoMsg, std::string imgFrameId, ros::Time t) @@ -3545,23 +3478,6 @@ void ZEDWrapperNodelet::publishSensData(ros::Time t) return; } - // ----> Publish odometry tf only if enabled and not in SVO mode - if (!mSvoMode) - { - if (mPublishTf && mPosTrackingReady && new_imu_data) - { - // NODELET_DEBUG("Publishing TF"); - - publishOdomFrame(mOdom2BaseTransf, ts_imu); // publish the base Frame in odometry frame - - if (mPublishMapTf) - { - publishPoseFrame(mMap2OdomTransf, ts_imu); // publish the odometry Frame in map frame - } - } - } - // <---- Publish odometry tf only if enabled and not in SVO mode - if (mPublishImuTf && !mStaticImuFramePublished) { NODELET_DEBUG("Publishing static IMU TF"); @@ -3882,7 +3798,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() } mPrevFrameTimestamp = mFrameTimestamp; - mPosTrackingActivated = false; + mPosTrackingStarted = false; mMappingRunning = false; mRecording = false; @@ -3994,7 +3910,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() uint32_t stereoRawSubNumber = mPubRawStereo.getNumSubscribers(); mGrabActive = - mRecording || mStreaming || mMappingEnabled || mObjDetEnabled || mPosTrackingEnabled || mPosTrackingActivated || + mRecording || mStreaming || mMappingEnabled || mObjDetEnabled || mPosTrackingEnabled || mPosTrackingStarted || ((rgbSubnumber + rgbRawSubnumber + leftSubnumber + leftRawSubnumber + rightSubnumber + rightRawSubnumber + rgbGraySubnumber + rgbGrayRawSubnumber + leftGraySubnumber + leftGrayRawSubnumber + rightGraySubnumber + rightGrayRawSubnumber + depthSubnumber + disparitySubnumber + cloudSubnumber + poseSubnumber + @@ -4007,13 +3923,13 @@ void ZEDWrapperNodelet::device_poll_thread_func() std::lock_guard lock(mPosTrkMutex); // Note: once tracking is started it is never stopped anymore to not lose tracking information - bool computeTracking = - !mDepthDisabled && (mPosTrackingEnabled || mPosTrackingActivated || mMappingEnabled || mObjDetEnabled || + mPosTrackingRequired = + !mDepthDisabled && (mPosTrackingEnabled || mPosTrackingStarted || mMappingEnabled || mObjDetEnabled || (mComputeDepth & mDepthStabilization) || poseSubnumber > 0 || poseCovSubnumber > 0 || odomSubnumber > 0 || pathSubNumber > 0); // Start the tracking? - if ((computeTracking) && !mPosTrackingActivated && !mDepthDisabled) + if ((mPosTrackingRequired) && !mPosTrackingStarted && !mDepthDisabled) { start_pos_tracking(); } @@ -4036,7 +3952,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() // Detect if one of the subscriber need to have the depth information mComputeDepth = !mDepthDisabled && - (computeTracking || + (mPosTrackingRequired || ((depthSubnumber + disparitySubnumber + cloudSubnumber + fusedCloudSubnumber + poseSubnumber + poseCovSubnumber + odomSubnumber + confMapSubnumber + objDetSubnumber) > 0)); @@ -4120,16 +4036,10 @@ void ZEDWrapperNodelet::device_poll_thread_func() std::this_thread::sleep_for(std::chrono::milliseconds(1000)); } - mPosTrackingActivated = false; - - // Note: once tracking is started it is never stopped anymore to not lose tracking information - bool computeTracking = - !mDepthDisabled && (mPosTrackingEnabled || mPosTrackingActivated || mMappingEnabled || mObjDetEnabled || - (mComputeDepth & mDepthStabilization) || poseSubnumber > 0 || - poseCovSubnumber > 0 || odomSubnumber > 0 || pathSubNumber > 0); + mPosTrackingStarted = false; // Start the tracking? - if ((computeTracking) && !mPosTrackingActivated && !mDepthDisabled) + if ((mPosTrackingRequired) && !mPosTrackingStarted && !mDepthDisabled) { start_pos_tracking(); } @@ -4217,262 +4127,29 @@ void ZEDWrapperNodelet::device_poll_thread_func() mPcPublishing = false; } + // ----> Object Detection mObjDetMutex.lock(); if (mObjDetRunning && objDetSubnumber > 0) { processDetectedObjects(stamp); } mObjDetMutex.unlock(); + // <---- Object Detection - // Publish the odometry if someone has subscribed to - if (computeTracking) + // ----> Process Positional Tracking + if (!mDepthDisabled) { - if (!mSensor2BaseTransfValid) - { - getSens2BaseTransform(); - } - - if (!mSensor2CameraTransfValid) - { - getSens2CameraTransform(); - } - - if (!mCamera2BaseTransfValid) + if (mPosTrackingStarted) { - getCamera2BaseTransform(); + processOdometry(); + processPose(); } - if (!mInitOdomWithPose) - { - sl::Pose deltaOdom; - mPosTrackingStatus = mZed.getPosition(deltaOdom, sl::REFERENCE_FRAME::CAMERA); - - sl::Translation translation = deltaOdom.getTranslation(); - sl::Orientation quat = deltaOdom.getOrientation(); - -#if 0 - NODELET_DEBUG("delta ODOM [%s] - %.2f,%.2f,%.2f %.2f,%.2f,%.2f,%.2f", - sl::toString(mTrackingStatus).c_str(), - translation(0), translation(1), translation(2), - quat(0), quat(1), quat(2), quat(3)); - - NODELET_DEBUG_STREAM("ODOM -> Tracking Status: " << sl::toString(mTrackingStatus)); -#endif - - if (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK || - mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::SEARCHING || - mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::FPS_TOO_LOW) - { - // Transform ZED delta odom pose in TF2 Transformation - geometry_msgs::Transform deltaTransf; - deltaTransf.translation.x = translation(0); - deltaTransf.translation.y = translation(1); - deltaTransf.translation.z = translation(2); - deltaTransf.rotation.x = quat(0); - deltaTransf.rotation.y = quat(1); - deltaTransf.rotation.z = quat(2); - deltaTransf.rotation.w = quat(3); - tf2::Transform deltaOdomTf; - tf2::fromMsg(deltaTransf, deltaOdomTf); - // delta odom from sensor to base frame - tf2::Transform deltaOdomTf_base = mSensor2BaseTransf.inverse() * deltaOdomTf * mSensor2BaseTransf; - - // Propagate Odom transform in time - mOdom2BaseTransf = mOdom2BaseTransf * deltaOdomTf_base; - - if (mTwoDMode) - { - tf2::Vector3 tr_2d = mOdom2BaseTransf.getOrigin(); - tr_2d.setZ(mFixedZValue); - mOdom2BaseTransf.setOrigin(tr_2d); - - double roll, pitch, yaw; - tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - - tf2::Quaternion quat_2d; - quat_2d.setRPY(0.0, 0.0, yaw); - - mOdom2BaseTransf.setRotation(quat_2d); - } - -#if 0 //#ifndef NDEBUG // Enable for TF checking - double roll, pitch, yaw; - tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - - NODELET_DEBUG("+++ Odometry [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", - mOdometryFrameId.c_str(), mBaseFrameId.c_str(), - mOdom2BaseTransf.getOrigin().x(), mOdom2BaseTransf.getOrigin().y(), mOdom2BaseTransf.getOrigin().z(), - roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); -#endif - - // Publish odometry message - if (odomSubnumber > 0) - { - publishOdom(mOdom2BaseTransf, deltaOdom, stamp); - } - - mPosTrackingReady = true; - } - } - else if (mFloorAlignment) - { - NODELET_WARN_THROTTLE(5.0, - "Odometry will be published as soon as the floor as been detected for the first " - "time"); - } - } - - // Publish the zed camera pose if someone has subscribed to - if (computeTracking) - { - mPosTrackingStatus = mZed.getPosition(mLastZedPose, sl::REFERENCE_FRAME::WORLD); - - sl::Translation translation = mLastZedPose.getTranslation(); - sl::Orientation quat = mLastZedPose.getOrientation(); - -#if 0 //#ifndef NDEBUG // Enable for TF checking - double roll, pitch, yaw; - tf2::Matrix3x3(tf2::Quaternion(quat.ox, quat.oy, quat.oz, quat.ow)).getRPY(roll, pitch, yaw); - - NODELET_DEBUG("Sensor POSE [%s -> %s] - {%.2f,%.2f,%.2f} {%.2f,%.2f,%.2f}", - mLeftCamFrameId.c_str(), mMapFrameId.c_str(), - translation.x, translation.y, translation.z, - roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); - - NODELET_DEBUG_STREAM("MAP -> Tracking Status: " << sl::toString(mTrackingStatus)); - -#endif - - static sl::POSITIONAL_TRACKING_STATE old_tracking_state = sl::POSITIONAL_TRACKING_STATE::OFF; - if (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK || - mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::SEARCHING - /*|| status == sl::POSITIONAL_TRACKING_STATE::FPS_TOO_LOW*/) - { - if (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK && mPosTrackingStatus != old_tracking_state) - { - NODELET_INFO_STREAM("Positional tracking -> OK [" << sl::toString(mPosTrackingStatus).c_str() << "]"); - old_tracking_state = mPosTrackingStatus; - } - if (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::SEARCHING && - mPosTrackingStatus != old_tracking_state) - { - NODELET_INFO_STREAM("Positional tracking -> Searching for a known position [" - << sl::toString(mPosTrackingStatus).c_str() << "]"); - old_tracking_state = mPosTrackingStatus; - } - // Transform ZED pose in TF2 Transformation - geometry_msgs::Transform map2sensTransf; - - map2sensTransf.translation.x = translation(0); - map2sensTransf.translation.y = translation(1); - map2sensTransf.translation.z = translation(2); - map2sensTransf.rotation.x = quat(0); - map2sensTransf.rotation.y = quat(1); - map2sensTransf.rotation.z = quat(2); - map2sensTransf.rotation.w = quat(3); - tf2::Transform map_to_sens_transf; - tf2::fromMsg(map2sensTransf, map_to_sens_transf); - - mMap2BaseTransf = map_to_sens_transf * mSensor2BaseTransf; // Base position in map frame - - if (mTwoDMode) - { - tf2::Vector3 tr_2d = mMap2BaseTransf.getOrigin(); - tr_2d.setZ(mFixedZValue); - mMap2BaseTransf.setOrigin(tr_2d); - - double roll, pitch, yaw; - tf2::Matrix3x3(mMap2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - - tf2::Quaternion quat_2d; - quat_2d.setRPY(0.0, 0.0, yaw); - - mMap2BaseTransf.setRotation(quat_2d); - } - -#if 0 //#ifndef NDEBUG // Enable for TF checking - double roll, pitch, yaw; - tf2::Matrix3x3(mMap2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - - NODELET_DEBUG("*** Base POSE [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mMapFrameId.c_str(), - mBaseFrameId.c_str(), mMap2BaseTransf.getOrigin().x(), mMap2BaseTransf.getOrigin().y(), - mMap2BaseTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); -#endif - - bool initOdom = false; - - if (!(mFloorAlignment)) - { - initOdom = mInitOdomWithPose; - } - else - { - initOdom = (mPosTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK) & mInitOdomWithPose; - } - - if (initOdom || mResetOdom) - { - NODELET_INFO("Odometry aligned to last tracking pose"); - - // Propagate Odom transform in time - mOdom2BaseTransf = mMap2BaseTransf; - mMap2BaseTransf.setIdentity(); - - if (odomSubnumber > 0) - { - // Publish odometry message - publishOdom(mOdom2BaseTransf, mLastZedPose, mFrameTimestamp); - } - - mInitOdomWithPose = false; - mResetOdom = false; - } - else - { - // Transformation from map to odometry frame - // mMap2OdomTransf = mOdom2BaseTransf.inverse() * mMap2BaseTransf; - mMap2OdomTransf = mMap2BaseTransf * mOdom2BaseTransf.inverse(); - -#if 0 //#ifndef NDEBUG // Enable for TF checking - double roll, pitch, yaw; - tf2::Matrix3x3(mMap2OdomTransf.getRotation()).getRPY(roll, pitch, yaw); - - NODELET_DEBUG("+++ Diff [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", - mMapFrameId.c_str(), mOdometryFrameId.c_str(), - mMap2OdomTransf.getOrigin().x(), mMap2OdomTransf.getOrigin().y(), mMap2OdomTransf.getOrigin().z(), - roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); -#endif - } - - // Publish Pose message - if ((poseSubnumber + poseCovSubnumber) > 0) - { - publishPose(stamp); - } - - mPosTrackingReady = true; - } - } - - // Note: in SVO mode the TF must not be broadcasted at the same frequency of the IMU data to avoid timestamp - // issues - if (mZedRealCamModel == sl::MODEL::ZED || mSvoMode) - { - // Publish pose tf only if enabled - if (mPublishTf) - { - // Note, the frame is published, but its values will only change if - // someone has subscribed to odom - publishOdomFrame(mOdom2BaseTransf, stamp); // publish the base Frame in odometry frame - - if (mPublishMapTf) - { - // Note, the frame is published, but its values will only change if - // someone has subscribed to map - publishPoseFrame(mMap2OdomTransf, stamp); // publish the odometry Frame in map frame - } - } + // Publish `odom` and `map` TFs at the grab frequency + // RCLCPP_INFO(get_logger(), "Publishing TF -> threadFunc_zedGrab"); + publishTFs(mFrameTimestamp); } + // <---- Process Positional Tracking #if 0 //#ifndef NDEBUG // Enable for TF checking \ // Double check: map_to_pose must be equal to mMap2BaseTransf @@ -4539,36 +4216,6 @@ void ZEDWrapperNodelet::device_poll_thread_func() { NODELET_DEBUG_THROTTLE(5.0, "No topics subscribed by users"); - if (mSvoMode || mZedRealCamModel == sl::MODEL::ZED || !mPublishImuTf) - { - // Publish odometry tf only if enabled - if (mPublishTf) - { - ros::Time t; - - if (mSvoMode) - { - t = ros::Time::now(); - } - else - { - t = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::CURRENT)); - } - - publishOdomFrame(mOdom2BaseTransf, mFrameTimestamp); // publish the base Frame in odometry frame - - if (mPublishMapTf) - { - publishPoseFrame(mMap2OdomTransf, mFrameTimestamp); // publish the odometry Frame in map frame - } - - if (mPublishImuTf && !mStaticImuFramePublished) - { - publishStaticImuFrame(); - } - } - } - std::this_thread::sleep_for(std::chrono::milliseconds(10)); // No subscribers, we just wait loop_rate.reset(); } @@ -4576,7 +4223,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() mDiagUpdater.update(); } // while loop - if (mSaveAreaMapOnClosing && mPosTrackingActivated) + if (mSaveAreaMapOnClosing && mPosTrackingStarted) { saveAreaMap(mAreaMemDbPath); } @@ -4595,7 +4242,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() mZed.close(); NODELET_DEBUG("ZED pool thread finished"); -} +} // namespace zed_nodelets void ZEDWrapperNodelet::processCameraSettings() { @@ -4870,9 +4517,10 @@ void ZEDWrapperNodelet::callback_updateDiagnostic(diagnostic_updater::Diagnostic } } - if (mPosTrackingActivated) + if (mPosTrackingStarted) { - stat.addf("Tracking status", "%s", sl::toString(mPosTrackingStatus).c_str()); + stat.addf("Pos. Tracking status [Pose]", "%s", sl::toString(mPosTrackingStatusWorld).c_str()); + stat.addf("Pos. Tracking status [Odometry]", "%s", sl::toString(mPosTrackingStatusCamera).c_str()); } else { @@ -5821,4 +5469,353 @@ void ZEDWrapperNodelet::clickedPtCallback(geometry_msgs::PointStampedConstPtr ms } } +void ZEDWrapperNodelet::publishPoseStatus() +{ + size_t statusSub = mPubPoseStatus.getNumSubscribers(); + + if (statusSub > 0) + { + zed_interfaces::PosTrackStatusPtr msg = boost::make_shared(); + msg->status = static_cast(mPosTrackingStatusWorld); + + mPubPoseStatus.publish(msg); + } +} + +void ZEDWrapperNodelet::publishOdomStatus() +{ + size_t statusSub = mPubOdomStatus.getNumSubscribers(); + + if (statusSub > 0) + { + zed_interfaces::PosTrackStatusPtr msg = boost::make_shared(); + msg->status = static_cast(mPosTrackingStatusCamera); + + mPubPoseStatus.publish(msg); + } +} + +void ZEDWrapperNodelet::processOdometry() +{ + if (!mSensor2BaseTransfValid) + { + getSens2BaseTransform(); + } + + if (!mSensor2CameraTransfValid) + { + getSens2CameraTransform(); + } + + if (!mCamera2BaseTransfValid) + { + getCamera2BaseTransform(); + } + + if (!mInitOdomWithPose) + { + sl::Pose deltaOdom; + + mPosTrackingStatusCamera = mZed.getPosition(deltaOdom, sl::REFERENCE_FRAME::CAMERA); + publishOdomStatus(); + + NODELET_DEBUG("delta ODOM [%s]:\n%s", sl::toString(mPosTrackingStatusCamera).c_str(), + deltaOdom.pose_data.getInfos().c_str()); + + if (mPosTrackingStatusCamera == sl::POSITIONAL_TRACKING_STATE::OK || + mPosTrackingStatusCamera == sl::POSITIONAL_TRACKING_STATE::SEARCHING || + mPosTrackingStatusCamera == sl::POSITIONAL_TRACKING_STATE::FPS_TOO_LOW) + { + sl::Translation translation = deltaOdom.getTranslation(); + sl::Orientation quat = deltaOdom.getOrientation(); + + // Transform ZED delta odom pose in TF2 Transformation + tf2::Transform deltaOdomTf; + deltaOdomTf.setOrigin(tf2::Vector3(translation(0), translation(1), translation(2))); + // w at the end in the constructor + deltaOdomTf.setRotation(tf2::Quaternion(quat(0), quat(1), quat(2), quat(3))); + + // delta odom from sensor to base frame + tf2::Transform deltaOdomTf_base = mSensor2BaseTransf.inverse() * deltaOdomTf * mSensor2BaseTransf; + // TODO(Walter) Use tf2::transform instead? + + // Propagate Odom transform in time + mOdom2BaseTransf = mOdom2BaseTransf * deltaOdomTf_base; + + if (mTwoDMode) + { + tf2::Vector3 tr_2d = mOdom2BaseTransf.getOrigin(); + tr_2d.setZ(mFixedZValue); + mOdom2BaseTransf.setOrigin(tr_2d); + + double roll, pitch, yaw; + tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + + tf2::Quaternion quat_2d; + quat_2d.setRPY(0.0, 0.0, yaw); + + mOdom2BaseTransf.setRotation(quat_2d); + } + + double roll, pitch, yaw; + tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + + NODELET_DEBUG("+++ Odometry [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mOdomFrameId.c_str(), + mBaseFrameId.c_str(), mOdom2BaseTransf.getOrigin().x(), mOdom2BaseTransf.getOrigin().y(), + mOdom2BaseTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + + // Publish odometry message + publishOdom(mOdom2BaseTransf, deltaOdom, mFrameTimestamp); + mPosTrackingReady = true; + } + } + else if (mFloorAlignment) + { + NODELET_DEBUG_STREAM_THROTTLE(5.0, + "Odometry will be published as soon as the floor as " + "been detected for the first time"); + } +} + +void ZEDWrapperNodelet::processPose() +{ + if (!mSensor2BaseTransfValid) + { + getSens2BaseTransform(); + } + + if (!mSensor2CameraTransfValid) + { + getSens2CameraTransform(); + } + + if (!mCamera2BaseTransfValid) + { + getCamera2BaseTransform(); + } + + mPosTrackingStatusWorld = mZed.getPosition(mLastZedPose, sl::REFERENCE_FRAME::WORLD); + + publishPoseStatus(); + + sl::Translation translation = mLastZedPose.getTranslation(); + sl::Orientation quat = mLastZedPose.getOrientation(); + + if (quat.sum() == 0) + { + return; + } + + NODELET_DEBUG_STREAM("MAP -> Tracking Status: " << sl::toString(mPosTrackingStatusWorld).c_str()); + NODELET_DEBUG("Sensor POSE [%s -> %s]:\n%s}", mLeftCamFrameId.c_str(), mMapFrameId.c_str(), + mLastZedPose.pose_data.getInfos().c_str()); + + if (mPosTrackingStatusWorld == sl::POSITIONAL_TRACKING_STATE::OK || + mPosTrackingStatusWorld == sl::POSITIONAL_TRACKING_STATE::SEARCHING) + { + double roll, pitch, yaw; + tf2::Matrix3x3(tf2::Quaternion(quat.ox, quat.oy, quat.oz, quat.ow)).getRPY(roll, pitch, yaw); + + tf2::Transform map_to_sens_transf; + map_to_sens_transf.setOrigin(tf2::Vector3(translation(0), translation(1), translation(2))); + map_to_sens_transf.setRotation(tf2::Quaternion(quat(0), quat(1), quat(2), quat(3))); + + mMap2BaseTransf = map_to_sens_transf * mSensor2BaseTransf; // Base position in map frame + + if (mTwoDMode) + { + tf2::Vector3 tr_2d = mMap2BaseTransf.getOrigin(); + tr_2d.setZ(mFixedZValue); + mMap2BaseTransf.setOrigin(tr_2d); + + double roll, pitch, yaw; + tf2::Matrix3x3(mMap2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + + tf2::Quaternion quat_2d; + quat_2d.setRPY(0.0, 0.0, yaw); + + mMap2BaseTransf.setRotation(quat_2d); + } + + // double roll, pitch, yaw; + tf2::Matrix3x3(mMap2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + + NODELET_DEBUG("*** Base POSE [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mMapFrameId.c_str(), + mBaseFrameId.c_str(), mMap2BaseTransf.getOrigin().x(), mMap2BaseTransf.getOrigin().y(), + mMap2BaseTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + + bool initOdom = false; + + if (!(mFloorAlignment)) + { + initOdom = mInitOdomWithPose; + } + else + { + initOdom = mInitOdomWithPose & (mPosTrackingStatusWorld == sl::POSITIONAL_TRACKING_STATE::OK); + } + + if (initOdom) + { + NODELET_INFO("Odometry aligned to last tracking pose"); + + // Propagate Odom transform in time + mOdom2BaseTransf = mMap2BaseTransf; + mMap2BaseTransf.setIdentity(); + + double roll, pitch, yaw; + tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + + NODELET_INFO(" * Initial odometry [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mOdomFrameId.c_str(), + mBaseFrameId.c_str(), mOdom2BaseTransf.getOrigin().x(), mOdom2BaseTransf.getOrigin().y(), + mOdom2BaseTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + + mInitOdomWithPose = false; + } + else + { + // Transformation from map to odometry frame + mMap2OdomTransf = mMap2BaseTransf * mOdom2BaseTransf.inverse(); + + double roll, pitch, yaw; + tf2::Matrix3x3(mMap2OdomTransf.getRotation()).getRPY(roll, pitch, yaw); + + NODELET_DEBUG("+++ Diff [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mMapFrameId.c_str(), + mOdomFrameId.c_str(), mMap2OdomTransf.getOrigin().x(), mMap2OdomTransf.getOrigin().y(), + mMap2OdomTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + } + + // Publish Pose message + publishPose(mFrameTimestamp); + mPosTrackingReady = true; + } +} + +void ZEDWrapperNodelet::publishTFs(ros::Time t) +{ + // DEBUG_STREAM_PT("publishTFs"); + + // RCLCPP_INFO_STREAM(get_logger(), "publishTFs - t type:" << + // t.get_clock_type()); + + if (!mPosTrackingReady) + { + return; + } + + if (t.isZero()) + { + NODELET_DEBUG("Time zero: not publishing TFs"); + return; + } + + // Publish pose tf only if enabled + if (mDepthMode != sl::DEPTH_MODE::NONE && mPublishTF) + { + publishOdomTF(t); // publish the base Frame in odometry frame + + if (mPublishMapTF) + { + publishPoseTF(t); // publish the odometry Frame in map frame + } + } +} + +void ZEDWrapperNodelet::publishOdomTF(ros::Time t) +{ + // DEBUG_STREAM_PT("publishOdomTF"); + + // ----> Avoid duplicated TF publishing + if (t == mLastTs_odom) + { + return; + } + mLastTs_odom = t; + // <---- Avoid duplicated TF publishing + + if (!mSensor2BaseTransfValid) + { + getSens2BaseTransform(); + } + + if (!mSensor2CameraTransfValid) + { + getSens2CameraTransform(); + } + + if (!mCamera2BaseTransfValid) + { + getCamera2BaseTransform(); + } + + geometry_msgs::TransformStamped transformStamped; + + transformStamped.header.stamp = t; + + // RCLCPP_INFO_STREAM(get_logger(), "Odom TS: " << transformStamped.header.stamp); + + transformStamped.header.frame_id = mOdomFrameId; + transformStamped.child_frame_id = mBaseFrameId; + // conversion from Tranform to message + tf2::Vector3 translation = mOdom2BaseTransf.getOrigin(); + tf2::Quaternion quat = mOdom2BaseTransf.getRotation(); + transformStamped.transform.translation.x = translation.x(); + transformStamped.transform.translation.y = translation.y(); + transformStamped.transform.translation.z = translation.z(); + transformStamped.transform.rotation.x = quat.x(); + transformStamped.transform.rotation.y = quat.y(); + transformStamped.transform.rotation.z = quat.z(); + transformStamped.transform.rotation.w = quat.w(); + + // Publish transformation + mTfBroadcaster.sendTransform(transformStamped); +} + +void ZEDWrapperNodelet::publishPoseTF(ros::Time t) +{ + // DEBUG_STREAM_PT("publishPoseTF"); + + // ----> Avoid duplicated TF publishing + if (t == mLastTs_pose) + { + return; + } + mLastTs_pose = t; + // <---- Avoid duplicated TF publishing + + if (!mSensor2BaseTransfValid) + { + getSens2BaseTransform(); + } + + if (!mSensor2CameraTransfValid) + { + getSens2CameraTransform(); + } + + if (!mCamera2BaseTransfValid) + { + getCamera2BaseTransform(); + } + + geometry_msgs::TransformStamped transformStamped; + + transformStamped.header.stamp = t; + transformStamped.header.frame_id = mMapFrameId; + transformStamped.child_frame_id = mOdomFrameId; + // conversion from Tranform to message + tf2::Vector3 translation = mMap2OdomTransf.getOrigin(); + tf2::Quaternion quat = mMap2OdomTransf.getRotation(); + transformStamped.transform.translation.x = translation.x(); + transformStamped.transform.translation.y = translation.y(); + transformStamped.transform.translation.z = translation.z(); + transformStamped.transform.rotation.x = quat.x(); + transformStamped.transform.rotation.y = quat.y(); + transformStamped.transform.rotation.z = quat.z(); + transformStamped.transform.rotation.w = quat.w(); + + // Publish transformation + mTfBroadcaster.sendTransform(transformStamped); +} + } // namespace zed_nodelets From ca9a5dde11917f43661f340ecf7742d6269c4423 Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Thu, 14 Sep 2023 17:19:32 +0200 Subject: [PATCH 16/21] Add odomMutex --- .../include/zed_wrapper_nodelet.hpp | 1 - .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 55 ++++++------------- 2 files changed, 17 insertions(+), 39 deletions(-) diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index a15799cb..35202f2f 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -687,7 +687,6 @@ class ZEDWrapperNodelet : public nodelet::Nodelet tf2::Transform mMap2OdomTransf; // Coordinates of the odometry frame in map frame tf2::Transform mOdom2BaseTransf; // Coordinates of the base in odometry frame tf2::Transform mMap2BaseTransf; // Coordinates of the base in map frame - tf2::Transform mMap2CameraTransf; // Coordinates of the camera in base frame tf2::Transform mSensor2BaseTransf; // Coordinates of the base frame in sensor frame tf2::Transform mSensor2CameraTransf; // Coordinates of the camera frame in sensor frame tf2::Transform mCamera2BaseTransf; // Coordinates of the base frame in camera frame diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index c1de1166..a0c647be 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -1598,7 +1598,6 @@ void ZEDWrapperNodelet::initTransforms() mOdom2BaseTransf.setIdentity(); // broadcasted if `publish_tf` is true mMap2OdomTransf.setIdentity(); // broadcasted if `publish_map_tf` is true mMap2BaseTransf.setIdentity(); // used internally, but not broadcasted - mMap2CameraTransf.setIdentity(); // used internally, but not broadcasted // <---- Dynamic transforms } @@ -1848,7 +1847,7 @@ bool ZEDWrapperNodelet::on_reset_odometry(zed_interfaces::reset_odometry::Reques std::lock_guard lock(mOdomMutex); mOdom2BaseTransf.setIdentity(); mOdomPath.clear(); - + res.reset_done = true; return true; } @@ -3271,6 +3270,7 @@ void ZEDWrapperNodelet::callback_pubPath(const ros::TimerEvent& e) // conversion from Tranform to message geometry_msgs::Transform base2odom = tf2::toMsg(mOdom2BaseTransf); // Add all value in Pose message + mOdomMutex.lock(); odomPose.pose.position.x = base2odom.translation.x; odomPose.pose.position.y = base2odom.translation.y; odomPose.pose.position.z = base2odom.translation.z; @@ -3278,6 +3278,7 @@ void ZEDWrapperNodelet::callback_pubPath(const ros::TimerEvent& e) odomPose.pose.orientation.y = base2odom.rotation.y; odomPose.pose.orientation.z = base2odom.rotation.z; odomPose.pose.orientation.w = base2odom.rotation.w; + mOdomMutex.unlock(); mapPose.header.stamp = mFrameTimestamp; mapPose.header.frame_id = mMapFrameId; // frame @@ -3293,6 +3294,7 @@ void ZEDWrapperNodelet::callback_pubPath(const ros::TimerEvent& e) mapPose.pose.orientation.w = base2map.rotation.w; // Circular vector + std::lock_guard lock(mOdomMutex); if (mPathMaxCount != -1) { if (mOdomPath.size() == mPathMaxCount) @@ -5540,6 +5542,7 @@ void ZEDWrapperNodelet::processOdometry() // TODO(Walter) Use tf2::transform instead? // Propagate Odom transform in time + mOdomMutex.lock(); mOdom2BaseTransf = mOdom2BaseTransf * deltaOdomTf_base; if (mTwoDMode) @@ -5567,6 +5570,8 @@ void ZEDWrapperNodelet::processOdometry() // Publish odometry message publishOdom(mOdom2BaseTransf, deltaOdom, mFrameTimestamp); mPosTrackingReady = true; + + mOdomMutex.unlock(); } } else if (mFloorAlignment) @@ -5646,44 +5651,16 @@ void ZEDWrapperNodelet::processPose() bool initOdom = false; - if (!(mFloorAlignment)) - { - initOdom = mInitOdomWithPose; - } - else - { - initOdom = mInitOdomWithPose & (mPosTrackingStatusWorld == sl::POSITIONAL_TRACKING_STATE::OK); - } - - if (initOdom) - { - NODELET_INFO("Odometry aligned to last tracking pose"); - - // Propagate Odom transform in time - mOdom2BaseTransf = mMap2BaseTransf; - mMap2BaseTransf.setIdentity(); - - double roll, pitch, yaw; - tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + // Transformation from map to odometry frame + mOdomMutex.lock(); + mMap2OdomTransf = mMap2BaseTransf * mOdom2BaseTransf.inverse(); + mOdomMutex.unlock(); - NODELET_INFO(" * Initial odometry [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mOdomFrameId.c_str(), - mBaseFrameId.c_str(), mOdom2BaseTransf.getOrigin().x(), mOdom2BaseTransf.getOrigin().y(), - mOdom2BaseTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + tf2::Matrix3x3(mMap2OdomTransf.getRotation()).getRPY(roll, pitch, yaw); - mInitOdomWithPose = false; - } - else - { - // Transformation from map to odometry frame - mMap2OdomTransf = mMap2BaseTransf * mOdom2BaseTransf.inverse(); - - double roll, pitch, yaw; - tf2::Matrix3x3(mMap2OdomTransf.getRotation()).getRPY(roll, pitch, yaw); - - NODELET_DEBUG("+++ Diff [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mMapFrameId.c_str(), - mOdomFrameId.c_str(), mMap2OdomTransf.getOrigin().x(), mMap2OdomTransf.getOrigin().y(), - mMap2OdomTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); - } + NODELET_DEBUG("+++ Diff [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mMapFrameId.c_str(), mOdomFrameId.c_str(), + mMap2OdomTransf.getOrigin().x(), mMap2OdomTransf.getOrigin().y(), mMap2OdomTransf.getOrigin().z(), + roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); // Publish Pose message publishPose(mFrameTimestamp); @@ -5757,8 +5734,10 @@ void ZEDWrapperNodelet::publishOdomTF(ros::Time t) transformStamped.header.frame_id = mOdomFrameId; transformStamped.child_frame_id = mBaseFrameId; // conversion from Tranform to message + mOdomMutex.lock(); tf2::Vector3 translation = mOdom2BaseTransf.getOrigin(); tf2::Quaternion quat = mOdom2BaseTransf.getRotation(); + mOdomMutex.unlock(); transformStamped.transform.translation.x = translation.x(); transformStamped.transform.translation.y = translation.y(); transformStamped.transform.translation.z = translation.z(); From ad8362cbbd68d6e548e377228682e96f0328c00c Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Fri, 15 Sep 2023 10:51:19 +0200 Subject: [PATCH 17/21] Improve odometry behavior --- .../include/zed_wrapper_nodelet.hpp | 3 +- .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 274 +++++++++--------- 2 files changed, 133 insertions(+), 144 deletions(-) diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 35202f2f..5fffa5b6 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -192,9 +192,8 @@ class ZEDWrapperNodelet : public nodelet::Nodelet void processPose(); /*! \brief Publish the pose of the camera in "Map" frame with a ros Publisher - * \param t : the ros::Time to stamp the image */ - void publishPose(ros::Time t); + void publishPose(); /*! \brief Publish the pose of the camera in "Odom" frame with a ros Publisher * \param base2odomTransf : Transformation representing the camera pose diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index a0c647be..8191e126 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -1595,10 +1595,10 @@ void ZEDWrapperNodelet::initTransforms() // ------------------- // ----> Dynamic transforms - mOdom2BaseTransf.setIdentity(); // broadcasted if `publish_tf` is true - mMap2OdomTransf.setIdentity(); // broadcasted if `publish_map_tf` is true - mMap2BaseTransf.setIdentity(); // used internally, but not broadcasted - // <---- Dynamic transforms + mOdom2BaseTransf.setIdentity(); // broadcasted if `publish_tf` is true + mMap2OdomTransf.setIdentity(); // broadcasted if `publish_map_tf` is true + mMap2BaseTransf.setIdentity(); // used internally, but not broadcasted + // <---- Dynamic transforms } bool ZEDWrapperNodelet::getCamera2BaseTransform() @@ -2245,117 +2245,108 @@ bool ZEDWrapperNodelet::saveAreaMap(std::string file_path, std::string* out_msg) void ZEDWrapperNodelet::publishOdom(tf2::Transform odom2baseTransf, sl::Pose& slPose, ros::Time t) { - nav_msgs::OdometryPtr odomMsg = boost::make_shared(); + size_t odomSub = mPubOdom.getNumSubscribers(); - odomMsg->header.stamp = t; - odomMsg->header.frame_id = mOdomFrameId; // frame - odomMsg->child_frame_id = mBaseFrameId; // camera_frame - // conversion from Tranform to message - geometry_msgs::Transform base2odom = tf2::toMsg(odom2baseTransf); - // Add all value in odometry message - odomMsg->pose.pose.position.x = base2odom.translation.x; - odomMsg->pose.pose.position.y = base2odom.translation.y; - odomMsg->pose.pose.position.z = base2odom.translation.z; - odomMsg->pose.pose.orientation.x = base2odom.rotation.x; - odomMsg->pose.pose.orientation.y = base2odom.rotation.y; - odomMsg->pose.pose.orientation.z = base2odom.rotation.z; - odomMsg->pose.pose.orientation.w = base2odom.rotation.w; + if (odomSub) + { + nav_msgs::OdometryPtr odomMsg = boost::make_shared(); - // Odometry pose covariance + odomMsg->header.stamp = t; + odomMsg->header.frame_id = mOdomFrameId; // frame + odomMsg->child_frame_id = mBaseFrameId; // camera_frame - for (size_t i = 0; i < odomMsg->pose.covariance.size(); i++) - { - odomMsg->pose.covariance[i] = static_cast(slPose.pose_covariance[i]); + // Add all value in odometry message + odomMsg->pose.pose.position.x = odom2baseTransf.getOrigin().x(); + odomMsg->pose.pose.position.y = odom2baseTransf.getOrigin().y(); + odomMsg->pose.pose.position.z = odom2baseTransf.getOrigin().z(); + odomMsg->pose.pose.orientation.x = odom2baseTransf.getRotation().x(); + odomMsg->pose.pose.orientation.y = odom2baseTransf.getRotation().y(); + odomMsg->pose.pose.orientation.z = odom2baseTransf.getRotation().z(); + odomMsg->pose.pose.orientation.w = odom2baseTransf.getRotation().w(); - if (mTwoDMode) + // Odometry pose covariance + for (size_t i = 0; i < odomMsg->pose.covariance.size(); i++) { - if (i == 14 || i == 21 || i == 28) - { - odomMsg->pose.covariance[i] = 1e-9; // Very low covariance if 2D mode - } - else if ((i >= 2 && i <= 4) || (i >= 8 && i <= 10) || (i >= 12 && i <= 13) || (i >= 15 && i <= 20) || - (i >= 22 && i <= 27) || (i == 29) || (i >= 32 && i <= 34)) + odomMsg->pose.covariance[i] = static_cast(slPose.pose_covariance[i]); + + if (mTwoDMode) { - odomMsg->pose.covariance[i] = 0.0; + if ((i >= 2 && i <= 4) || (i >= 8 && i <= 10) || (i >= 12 && i <= 29) || (i >= 32 && i <= 34)) + { + odomMsg->pose.covariance[i] = 1e-9; // Very low covariance if 2D mode + } } } - } - // Publish odometry message - mPubOdom.publish(odomMsg); + // Publish odometry message + NODELET_DEBUG("Publishing ODOM message"); + mPubOdom.publish(odomMsg); + } } -void ZEDWrapperNodelet::publishPose(ros::Time t) +void ZEDWrapperNodelet::publishPose() { + size_t poseSub = mPubPose.getNumSubscribers(); + size_t poseCovSub = mPubPoseCov.getNumSubscribers(); + tf2::Transform base_pose; base_pose.setIdentity(); - if (mPublishMapTF) - { - base_pose = mMap2BaseTransf; - } - else if (mPublishTF) - { - base_pose = mOdom2BaseTransf; - } + base_pose = mMap2BaseTransf; std_msgs::Header header; - header.stamp = t; - header.frame_id = mMapFrameId; - geometry_msgs::Pose pose; + header.stamp = mFrameTimestamp; + header.frame_id = mMapFrameId; // frame - // conversion from Tranform to message - geometry_msgs::Transform base2frame = tf2::toMsg(base_pose); + geometry_msgs::Pose pose; // Add all value in Pose message - pose.position.x = base2frame.translation.x; - pose.position.y = base2frame.translation.y; - pose.position.z = base2frame.translation.z; - pose.orientation.x = base2frame.rotation.x; - pose.orientation.y = base2frame.rotation.y; - pose.orientation.z = base2frame.rotation.z; - pose.orientation.w = base2frame.rotation.w; + pose.position.x = mMap2BaseTransf.getOrigin().x(); + pose.position.y = mMap2BaseTransf.getOrigin().y(); + pose.position.z = mMap2BaseTransf.getOrigin().z(); + pose.orientation.x = mMap2BaseTransf.getRotation().x(); + pose.orientation.y = mMap2BaseTransf.getRotation().y(); + pose.orientation.z = mMap2BaseTransf.getRotation().z(); + pose.orientation.w = mMap2BaseTransf.getRotation().w(); - if (mPubPose.getNumSubscribers() > 0) + if (poseSub > 0) { - geometry_msgs::PoseStamped poseNoCov; + geometry_msgs::PoseStampedPtr poseNoCov = boost::make_shared(); - poseNoCov.header = header; - poseNoCov.pose = pose; + poseNoCov->header = header; + poseNoCov->pose = pose; // Publish pose stamped message + NODELET_DEBUG("Publishing POSE NO COV message"); mPubPose.publish(poseNoCov); } - if (mPubPoseCov.getNumSubscribers() > 0) + if (poseCovSub > 0) { - geometry_msgs::PoseWithCovarianceStampedPtr poseCovMsg = + geometry_msgs::PoseWithCovarianceStampedPtr poseCov = boost::make_shared(); - poseCovMsg->header = header; - poseCovMsg->pose.pose = pose; + poseCov->header = header; + poseCov->pose.pose = pose; // Odometry pose covariance if available - for (size_t i = 0; i < poseCovMsg->pose.covariance.size(); i++) + + for (size_t i = 0; i < poseCov->pose.covariance.size(); i++) { - poseCovMsg->pose.covariance[i] = static_cast(mLastZedPose.pose_covariance[i]); + poseCov->pose.covariance[i] = static_cast(mLastZedPose.pose_covariance[i]); if (mTwoDMode) { - if (i == 14 || i == 21 || i == 28) + if ((i >= 2 && i <= 4) || (i >= 8 && i <= 10) || (i >= 12 && i <= 29) || (i >= 32 && i <= 34)) { - poseCovMsg->pose.covariance[i] = 1e-9; // Very low covariance if 2D mode - } - else if ((i >= 2 && i <= 4) || (i >= 8 && i <= 10) || (i >= 12 && i <= 13) || (i >= 15 && i <= 16) || - (i >= 18 && i <= 20) || (i == 22) || (i >= 24 && i <= 27)) - { - poseCovMsg->pose.covariance[i] = 0.0; + poseCov->pose.covariance[i] = 1e-9; // Very low covariance if 2D mode } } } // Publish pose with covariance stamped message - mPubPoseCov.publish(poseCovMsg); + NODELET_DEBUG("Publishing POSE COV message"); + mPubPoseCov.publish(std::move(poseCov)); } } @@ -5514,71 +5505,63 @@ void ZEDWrapperNodelet::processOdometry() getCamera2BaseTransform(); } - if (!mInitOdomWithPose) - { - sl::Pose deltaOdom; - - mPosTrackingStatusCamera = mZed.getPosition(deltaOdom, sl::REFERENCE_FRAME::CAMERA); - publishOdomStatus(); - - NODELET_DEBUG("delta ODOM [%s]:\n%s", sl::toString(mPosTrackingStatusCamera).c_str(), - deltaOdom.pose_data.getInfos().c_str()); + // if (!mInitOdomWithPose) { + sl::Pose deltaOdom; - if (mPosTrackingStatusCamera == sl::POSITIONAL_TRACKING_STATE::OK || - mPosTrackingStatusCamera == sl::POSITIONAL_TRACKING_STATE::SEARCHING || - mPosTrackingStatusCamera == sl::POSITIONAL_TRACKING_STATE::FPS_TOO_LOW) - { - sl::Translation translation = deltaOdom.getTranslation(); - sl::Orientation quat = deltaOdom.getOrientation(); + mPosTrackingStatusCamera = mZed.getPosition(deltaOdom, sl::REFERENCE_FRAME::CAMERA); - // Transform ZED delta odom pose in TF2 Transformation - tf2::Transform deltaOdomTf; - deltaOdomTf.setOrigin(tf2::Vector3(translation(0), translation(1), translation(2))); - // w at the end in the constructor - deltaOdomTf.setRotation(tf2::Quaternion(quat(0), quat(1), quat(2), quat(3))); + publishOdomStatus(); - // delta odom from sensor to base frame - tf2::Transform deltaOdomTf_base = mSensor2BaseTransf.inverse() * deltaOdomTf * mSensor2BaseTransf; - // TODO(Walter) Use tf2::transform instead? + NODELET_DEBUG("delta ODOM [%s]:\n%s", sl::toString(mPosTrackingStatusCamera).c_str(), + deltaOdom.pose_data.getInfos().c_str()); - // Propagate Odom transform in time - mOdomMutex.lock(); - mOdom2BaseTransf = mOdom2BaseTransf * deltaOdomTf_base; + if (mPosTrackingStatusCamera == sl::POSITIONAL_TRACKING_STATE::OK || + mPosTrackingStatusCamera == sl::POSITIONAL_TRACKING_STATE::SEARCHING || + mPosTrackingStatusCamera == sl::POSITIONAL_TRACKING_STATE::FPS_TOO_LOW) + { + sl::Translation translation = deltaOdom.getTranslation(); + sl::Orientation quat = deltaOdom.getOrientation(); - if (mTwoDMode) - { - tf2::Vector3 tr_2d = mOdom2BaseTransf.getOrigin(); - tr_2d.setZ(mFixedZValue); - mOdom2BaseTransf.setOrigin(tr_2d); + // Transform ZED delta odom pose in TF2 Transformation + tf2::Transform deltaOdomTf; + deltaOdomTf.setOrigin(tf2::Vector3(translation.x, translation.y, translation.z)); + // w at the end in the constructor + deltaOdomTf.setRotation(tf2::Quaternion(quat.x, quat.y, quat.z, quat.w)); - double roll, pitch, yaw; - tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + // delta odom from sensor to base frame + tf2::Transform deltaOdomTf_base = mSensor2BaseTransf.inverse() * deltaOdomTf * mSensor2BaseTransf; - tf2::Quaternion quat_2d; - quat_2d.setRPY(0.0, 0.0, yaw); + // Propagate Odom transform in time + mOdomMutex.lock(); // + mOdom2BaseTransf = mOdom2BaseTransf * deltaOdomTf_base; - mOdom2BaseTransf.setRotation(quat_2d); - } + if (mTwoDMode) + { + tf2::Vector3 tr_2d = mOdom2BaseTransf.getOrigin(); + tr_2d.setZ(mFixedZValue); + mOdom2BaseTransf.setOrigin(tr_2d); double roll, pitch, yaw; tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - NODELET_DEBUG("+++ Odometry [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mOdomFrameId.c_str(), - mBaseFrameId.c_str(), mOdom2BaseTransf.getOrigin().x(), mOdom2BaseTransf.getOrigin().y(), - mOdom2BaseTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); - - // Publish odometry message - publishOdom(mOdom2BaseTransf, deltaOdom, mFrameTimestamp); - mPosTrackingReady = true; + tf2::Quaternion quat_2d; + quat_2d.setRPY(0.0, 0.0, yaw); - mOdomMutex.unlock(); + mOdom2BaseTransf.setRotation(quat_2d); } - } - else if (mFloorAlignment) - { - NODELET_DEBUG_STREAM_THROTTLE(5.0, - "Odometry will be published as soon as the floor as " - "been detected for the first time"); + + double roll, pitch, yaw; + tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + + NODELET_DEBUG("+++ Odometry [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mOdomFrameId.c_str(), + mBaseFrameId.c_str(), mOdom2BaseTransf.getOrigin().x(), mOdom2BaseTransf.getOrigin().y(), + mOdom2BaseTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + + // Publish odometry message + publishOdom(mOdom2BaseTransf, deltaOdom, mFrameTimestamp); + mPosTrackingReady = true; + + mOdomMutex.unlock(); // } } @@ -5601,6 +5584,8 @@ void ZEDWrapperNodelet::processPose() mPosTrackingStatusWorld = mZed.getPosition(mLastZedPose, sl::REFERENCE_FRAME::WORLD); + NODELET_DEBUG_STREAM("ZED Pose: " << mLastZedPose.pose_data.getInfos()); + publishPoseStatus(); sl::Translation translation = mLastZedPose.getTranslation(); @@ -5612,11 +5597,11 @@ void ZEDWrapperNodelet::processPose() } NODELET_DEBUG_STREAM("MAP -> Tracking Status: " << sl::toString(mPosTrackingStatusWorld).c_str()); + NODELET_DEBUG("Sensor POSE [%s -> %s]:\n%s}", mLeftCamFrameId.c_str(), mMapFrameId.c_str(), mLastZedPose.pose_data.getInfos().c_str()); - if (mPosTrackingStatusWorld == sl::POSITIONAL_TRACKING_STATE::OK || - mPosTrackingStatusWorld == sl::POSITIONAL_TRACKING_STATE::SEARCHING) + if (mPosTrackingStatusWorld == sl::POSITIONAL_TRACKING_STATE::OK) { double roll, pitch, yaw; tf2::Matrix3x3(tf2::Quaternion(quat.ox, quat.oy, quat.oz, quat.ow)).getRPY(roll, pitch, yaw); @@ -5625,7 +5610,8 @@ void ZEDWrapperNodelet::processPose() map_to_sens_transf.setOrigin(tf2::Vector3(translation(0), translation(1), translation(2))); map_to_sens_transf.setRotation(tf2::Quaternion(quat(0), quat(1), quat(2), quat(3))); - mMap2BaseTransf = map_to_sens_transf * mSensor2BaseTransf; // Base position in map frame + mMap2BaseTransf = + mSensor2BaseTransf.inverse() * map_to_sens_transf * mSensor2BaseTransf; // Base position in map frame if (mTwoDMode) { @@ -5652,27 +5638,29 @@ void ZEDWrapperNodelet::processPose() bool initOdom = false; // Transformation from map to odometry frame - mOdomMutex.lock(); + mOdomMutex.lock(); // mMap2OdomTransf = mMap2BaseTransf * mOdom2BaseTransf.inverse(); - mOdomMutex.unlock(); + mOdomMutex.unlock(); // + // double roll, pitch, yaw; tf2::Matrix3x3(mMap2OdomTransf.getRotation()).getRPY(roll, pitch, yaw); NODELET_DEBUG("+++ Diff [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mMapFrameId.c_str(), mOdomFrameId.c_str(), mMap2OdomTransf.getOrigin().x(), mMap2OdomTransf.getOrigin().y(), mMap2OdomTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + //} // Publish Pose message - publishPose(mFrameTimestamp); + publishPose(); mPosTrackingReady = true; } } void ZEDWrapperNodelet::publishTFs(ros::Time t) { - // DEBUG_STREAM_PT("publishTFs"); + // NODELET_DEBUG("publishTFs"); - // RCLCPP_INFO_STREAM(get_logger(), "publishTFs - t type:" << + // NODELET_DEBUG_STREAM("publishTFs - t type:" << // t.get_clock_type()); if (!mPosTrackingReady) @@ -5700,14 +5688,16 @@ void ZEDWrapperNodelet::publishTFs(ros::Time t) void ZEDWrapperNodelet::publishOdomTF(ros::Time t) { - // DEBUG_STREAM_PT("publishOdomTF"); + // NODELET_DEBUG("publishOdomTF"); // ----> Avoid duplicated TF publishing - if (t == mLastTs_odom) + static ros::Time last_stamp; + + if (t == last_stamp) { return; } - mLastTs_odom = t; + last_stamp = t; // <---- Avoid duplicated TF publishing if (!mSensor2BaseTransfValid) @@ -5726,18 +5716,17 @@ void ZEDWrapperNodelet::publishOdomTF(ros::Time t) } geometry_msgs::TransformStamped transformStamped; - transformStamped.header.stamp = t; - // RCLCPP_INFO_STREAM(get_logger(), "Odom TS: " << transformStamped.header.stamp); + // NODELET_DEBUG_STREAM(get_logger(), "Odom TS: " << transformStamped.header.stamp); transformStamped.header.frame_id = mOdomFrameId; transformStamped.child_frame_id = mBaseFrameId; // conversion from Tranform to message - mOdomMutex.lock(); + mOdomMutex.lock(); // tf2::Vector3 translation = mOdom2BaseTransf.getOrigin(); tf2::Quaternion quat = mOdom2BaseTransf.getRotation(); - mOdomMutex.unlock(); + mOdomMutex.unlock(); // transformStamped.transform.translation.x = translation.x(); transformStamped.transform.translation.y = translation.y(); transformStamped.transform.translation.z = translation.z(); @@ -5752,14 +5741,16 @@ void ZEDWrapperNodelet::publishOdomTF(ros::Time t) void ZEDWrapperNodelet::publishPoseTF(ros::Time t) { - // DEBUG_STREAM_PT("publishPoseTF"); + // NODELET_DEBUG("publishPoseTF"); // ----> Avoid duplicated TF publishing - if (t == mLastTs_pose) + static ros::Time last_stamp; + + if (t == last_stamp) { return; } - mLastTs_pose = t; + last_stamp = t; // <---- Avoid duplicated TF publishing if (!mSensor2BaseTransfValid) @@ -5778,7 +5769,6 @@ void ZEDWrapperNodelet::publishPoseTF(ros::Time t) } geometry_msgs::TransformStamped transformStamped; - transformStamped.header.stamp = t; transformStamped.header.frame_id = mMapFrameId; transformStamped.child_frame_id = mOdomFrameId; From 50405402c6345d8012d0bdfe552a361aa1ec38e3 Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Fri, 15 Sep 2023 10:58:58 +0200 Subject: [PATCH 18/21] Fix Path publishers --- .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 72 +++++++++---------- 1 file changed, 34 insertions(+), 38 deletions(-) diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index 8191e126..ede33eea 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -3251,41 +3251,35 @@ void ZEDWrapperNodelet::pubVideoDepth() void ZEDWrapperNodelet::callback_pubPath(const ros::TimerEvent& e) { uint32_t mapPathSub = mPubMapPath.getNumSubscribers(); - uint32_t odomPathSub = mPubOdomPath.getNumSubscribers(); + uint32_t odomPathSub = mPubOdomPath.getNumSubscribers(); geometry_msgs::PoseStamped odomPose; geometry_msgs::PoseStamped mapPose; odomPose.header.stamp = mFrameTimestamp; - odomPose.header.frame_id = mMapFrameId; // frame - // conversion from Tranform to message - geometry_msgs::Transform base2odom = tf2::toMsg(mOdom2BaseTransf); - // Add all value in Pose message - mOdomMutex.lock(); - odomPose.pose.position.x = base2odom.translation.x; - odomPose.pose.position.y = base2odom.translation.y; - odomPose.pose.position.z = base2odom.translation.z; - odomPose.pose.orientation.x = base2odom.rotation.x; - odomPose.pose.orientation.y = base2odom.rotation.y; - odomPose.pose.orientation.z = base2odom.rotation.z; - odomPose.pose.orientation.w = base2odom.rotation.w; - mOdomMutex.unlock(); + odomPose.header.frame_id = mMapFrameId; // map_frame + mOdomMutex.lock(); // + odomPose.pose.position.x = mOdom2BaseTransf.getOrigin().x(); + odomPose.pose.position.y = mOdom2BaseTransf.getOrigin().y(); + odomPose.pose.position.z = mOdom2BaseTransf.getOrigin().z(); + odomPose.pose.orientation.x = mOdom2BaseTransf.getRotation().x(); + odomPose.pose.orientation.y = mOdom2BaseTransf.getRotation().y(); + odomPose.pose.orientation.z = mOdom2BaseTransf.getRotation().z(); + odomPose.pose.orientation.w = mOdom2BaseTransf.getRotation().w(); + mOdomMutex.unlock(); // mapPose.header.stamp = mFrameTimestamp; - mapPose.header.frame_id = mMapFrameId; // frame - // conversion from Tranform to message - geometry_msgs::Transform base2map = tf2::toMsg(mMap2BaseTransf); - // Add all value in Pose message - mapPose.pose.position.x = base2map.translation.x; - mapPose.pose.position.y = base2map.translation.y; - mapPose.pose.position.z = base2map.translation.z; - mapPose.pose.orientation.x = base2map.rotation.x; - mapPose.pose.orientation.y = base2map.rotation.y; - mapPose.pose.orientation.z = base2map.rotation.z; - mapPose.pose.orientation.w = base2map.rotation.w; + mapPose.header.frame_id = mMapFrameId; // map_frame + mapPose.pose.position.x = mMap2BaseTransf.getOrigin().x(); + mapPose.pose.position.y = mMap2BaseTransf.getOrigin().y(); + mapPose.pose.position.z = mMap2BaseTransf.getOrigin().z(); + mapPose.pose.orientation.x = mMap2BaseTransf.getRotation().x(); + mapPose.pose.orientation.y = mMap2BaseTransf.getRotation().y(); + mapPose.pose.orientation.z = mMap2BaseTransf.getRotation().z(); + mapPose.pose.orientation.w = mMap2BaseTransf.getRotation().w(); // Circular vector - std::lock_guard lock(mOdomMutex); + std::lock_guard lock(mOdomMutex); // if (mPathMaxCount != -1) { if (mOdomPath.size() == mPathMaxCount) @@ -3299,36 +3293,38 @@ void ZEDWrapperNodelet::callback_pubPath(const ros::TimerEvent& e) } else { - // NODELET_DEBUG_STREAM("Path vectors adding last available poses"); + // NODELET_DEBUG( "Path vectors adding last available poses"); mMapPath.push_back(mapPose); mOdomPath.push_back(odomPose); } } else { - // NODELET_DEBUG_STREAM("No limit path vectors, adding last available poses"); + // NODELET_DEBUG( "No limit path vectors, adding last available poses"); mMapPath.push_back(mapPose); mOdomPath.push_back(odomPose); } if (mapPathSub > 0) { - nav_msgs::PathPtr mapPath = boost::make_shared(); - mapPath->header.frame_id = mMapFrameId; - mapPath->header.stamp = mFrameTimestamp; - mapPath->poses = mMapPath; + nav_msgs::PathPtr mapPathMsg = boost::make_shared(); + mapPathMsg->header.frame_id = mMapFrameId; + mapPathMsg->header.stamp = mFrameTimestamp; + mapPathMsg->poses = mMapPath; - mPubMapPath.publish(mapPath); + NODELET_DEBUG("Publishing MAP PATH message"); + mPubMapPath.publish(mapPathMsg); } if (odomPathSub > 0) { - nav_msgs::PathPtr odomPath = boost::make_shared(); - odomPath->header.frame_id = mMapFrameId; - odomPath->header.stamp = mFrameTimestamp; - odomPath->poses = mOdomPath; + nav_msgs::PathPtr odomPathMsg = boost::make_shared(); + odomPathMsg->header.frame_id = mOdomFrameId; + odomPathMsg->header.stamp = mFrameTimestamp; + odomPathMsg->poses = mOdomPath; - mPubOdomPath.publish(odomPath); + NODELET_DEBUG("Publishing ODOM PATH message"); + mPubOdomPath.publish(odomPathMsg); } } From ad834b5955de2d1b16703ca913e94c9451c53992 Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Fri, 15 Sep 2023 11:51:58 +0200 Subject: [PATCH 19/21] Code cleaning --- .../include/zed_wrapper_nodelet.hpp | 4 ++ .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 54 ++++++++++--------- 2 files changed, 32 insertions(+), 26 deletions(-) diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 5fffa5b6..de7cadb7 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -458,6 +458,10 @@ class ZEDWrapperNodelet : public nodelet::Nodelet */ void processCameraSettings(); + /*! \brief Process point cloud + */ + void processPointcloud(); + /*! \brief Generates an univoque color for each object class ID */ inline sl::float3 generateColorClass(int idx) diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index ede33eea..ed15db65 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -4060,61 +4060,42 @@ void ZEDWrapperNodelet::device_poll_thread_func() // ----> SVO recording mRecMutex.lock(); - if (mRecording) { mRecStatus = mZed.getRecordingStatus(); - if (!mRecStatus.status) { NODELET_ERROR_THROTTLE(1.0, "Error saving frame to SVO"); } - mDiagUpdater.force_update(); } mRecMutex.unlock(); // <---- SVO recording - // ----> Publish freq calculation + // ----> Grab freq calculation static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); last_time = now; - mGrabPeriodMean_usec->addValue(elapsed_usec); // NODELET_INFO_STREAM("Grab time: " << elapsed_usec / 1000 << " msec"); - // <---- Publish freq calculation + // <---- Grab freq calculation // ----> Camera Settings processCameraSettings(); // <---- Camera Settings - // Publish the point cloud if someone has subscribed to - if (cloudSubnumber > 0) - { - // Run the point cloud conversion asynchronously to avoid slowing down - // all the program - // Retrieve raw pointCloud data if latest Pointcloud is ready - std::unique_lock lock(mPcMutex, std::defer_lock); - - if (lock.try_lock()) - { - mZed.retrieveMeasure(mCloud, sl::MEASURE::XYZBGRA, sl::MEM::CPU, mMatResol); + // ----> Point Cloud - mPointCloudFrameId = mDepthFrameId; - mPointCloudTime = stamp; - - // Signal Pointcloud thread that a new pointcloud is ready - mPcDataReadyCondVar.notify_one(); - mPcDataReady = true; - mPcPublishing = true; - } + if (!mDepthDisabled && cloudSubnumber > 0) + { + processPointcloud(); } else { mPcPublishing = false; } + // <---- Point Cloud // ----> Object Detection mObjDetMutex.lock(); @@ -4233,6 +4214,27 @@ void ZEDWrapperNodelet::device_poll_thread_func() NODELET_DEBUG("ZED pool thread finished"); } // namespace zed_nodelets +void ZEDWrapperNodelet::processPointcloud() +{ + // Run the point cloud conversion asynchronously to avoid slowing down + // all the program + // Retrieve raw pointCloud data if latest Pointcloud is ready + std::unique_lock lock(mPcMutex, std::defer_lock); + + if (lock.try_lock()) + { + mZed.retrieveMeasure(mCloud, sl::MEASURE::XYZBGRA, sl::MEM::CPU, mMatResol); + + mPointCloudFrameId = mDepthFrameId; + mPointCloudTime = stamp; + + // Signal Pointcloud thread that a new pointcloud is ready + mPcDataReadyCondVar.notify_one(); + mPcDataReady = true; + mPcPublishing = true; + } +} + void ZEDWrapperNodelet::processCameraSettings() { int value; From ce86801dac1e9b81e99f67f903a4c34c98bb466b Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Fri, 15 Sep 2023 14:34:54 +0200 Subject: [PATCH 20/21] Minor fixes --- CHANGELOG.rst | 3 +- README.md | 35 ++++++------ .../include/zed_wrapper_nodelet.hpp | 3 +- .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 54 +++++++++---------- 4 files changed, 47 insertions(+), 48 deletions(-) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index fd9d3df5..68fc1b60 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,9 +1,10 @@ CHANGELOG ========= -2023-09-14 +2023-09-15 ---------- - Add pose and odometry status publishers +- Improve odometry and pose publishing and TF broadcasting 2023-09-13 ---------- diff --git a/README.md b/README.md index f3291d1c..16fcbc35 100644 --- a/README.md +++ b/README.md @@ -17,30 +17,27 @@ This package lets you use the ZED stereo camera with ROS. It outputs the camera ### Prerequisites - Ubuntu 20.04 -- [ZED SDK **≥ 4.0**](https://www.stereolabs.com/developers/) and its dependency [CUDA](https://developer.nvidia.com/cuda-downloads) +- [ZED SDK **≥ 4.0.6**](https://www.stereolabs.com/developers/) and its dependency [CUDA](https://developer.nvidia.com/cuda-downloads) - [ROS Noetic](http://wiki.ros.org/noetic/Installation/Ubuntu) ### Build the repository The zed_ros_wrapper is a catkin package. It depends on the following ROS packages: - - nav_msgs - - tf2_geometry_msgs - - message_runtime - - catkin - - roscpp - - stereo_msgs - - rosconsole - - robot_state_publisher - - urdf - - sensor_msgs - - image_transport - - roslint - - diagnostic_updater - - dynamic_reconfigure - - tf2_ros - - message_generation - - nodelet + - roscpp +- image_transport +- rosconsole +- sensor_msgs +- stereo_msgs +- std_msgs +- message_filters +- tf2_ros +- nodelet +- tf2_geometry_msgs +- message_generation +- diagnostic_updater +- dynamic_reconfigure +- zed_interfaces Open a terminal, clone the repository, update the dependencies and build the packages: @@ -93,7 +90,7 @@ ZED X Mini camera: $ roslaunch zed_wrapper zedxm.launch - To select the ZED from its serial number: + To select the camera from its serial number: $ roslaunch zed_wrapper zed.launch serial_number:=1010 #replace 1010 with the actual SN diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index de7cadb7..2768026b 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -459,8 +459,9 @@ class ZEDWrapperNodelet : public nodelet::Nodelet void processCameraSettings(); /*! \brief Process point cloud + * \param ts Frame timestamp */ - void processPointcloud(); + void processPointcloud(ros::Time ts); /*! \brief Generates an univoque color for each object class ID */ diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index ed15db65..c778a47f 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -2965,11 +2965,17 @@ void ZEDWrapperNodelet::pubVideoDepth() uint32_t leftGrayRawSubnumber = mPubRawLeftGray.getNumSubscribers(); uint32_t rightGraySubnumber = mPubRightGray.getNumSubscribers(); uint32_t rightGrayRawSubnumber = mPubRawRightGray.getNumSubscribers(); - uint32_t depthSubnumber = mPubDepth.getNumSubscribers(); - uint32_t disparitySubnumber = mPubDisparity.getNumSubscribers(); - uint32_t confMapSubnumber = mPubConfMap.getNumSubscribers(); uint32_t stereoSubNumber = mPubStereo.getNumSubscribers(); uint32_t stereoRawSubNumber = mPubRawStereo.getNumSubscribers(); + uint32_t depthSubnumber = 0; + uint32_t disparitySubnumber = 0; + uint32_t confMapSubnumber = 0; + if (!mDepthDisabled) + { + depthSubnumber = mPubDepth.getNumSubscribers(); + disparitySubnumber = mPubDisparity.getNumSubscribers(); + confMapSubnumber = mPubConfMap.getNumSubscribers(); + } uint32_t tot_sub = rgbSubnumber + rgbRawSubnumber + leftSubnumber + leftRawSubnumber + rightSubnumber + rightRawSubnumber + rgbGraySubnumber + rgbGrayRawSubnumber + leftGraySubnumber + @@ -3101,11 +3107,6 @@ void ZEDWrapperNodelet::pubVideoDepth() } // <---- Publish sensor data if sync is required by user or SVO - // ----> Notify grab thread that all data are synchronized and a new grab can be done - // mRgbDepthDataRetrievedCondVar.notify_one(); - // mRgbDepthDataRetrieved = true; - // <---- Notify grab thread that all data are synchronized and a new grab can be done - if (!retrieved) { mPublishingData = false; @@ -3251,7 +3252,7 @@ void ZEDWrapperNodelet::pubVideoDepth() void ZEDWrapperNodelet::callback_pubPath(const ros::TimerEvent& e) { uint32_t mapPathSub = mPubMapPath.getNumSubscribers(); - uint32_t odomPathSub = mPubOdomPath.getNumSubscribers(); + uint32_t odomPathSub = mPubOdomPath.getNumSubscribers(); geometry_msgs::PoseStamped odomPose; geometry_msgs::PoseStamped mapPose; @@ -3846,7 +3847,6 @@ void ZEDWrapperNodelet::device_poll_thread_func() fillCamDepthInfo(mZed, mDepthCamInfoMsg, mLeftCamOptFrameId); // the reference camera is the Left one (next to the ZED logo) - mRgbCamInfoMsg = mLeftCamInfoMsg; mRgbCamInfoRawMsg = mLeftCamInfoRawMsg; @@ -3855,7 +3855,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() // Main loop while (mNhNs.ok()) { - // Check for subscribers + // ----> Count subscribers uint32_t rgbSubnumber = mPubRgb.getNumSubscribers(); uint32_t rgbRawSubnumber = mPubRawRgb.getNumSubscribers(); uint32_t leftSubnumber = mPubLeft.getNumSubscribers(); @@ -3897,14 +3897,15 @@ void ZEDWrapperNodelet::device_poll_thread_func() } uint32_t stereoSubNumber = mPubStereo.getNumSubscribers(); uint32_t stereoRawSubNumber = mPubRawStereo.getNumSubscribers(); + // <---- Count subscribers - mGrabActive = - mRecording || mStreaming || mMappingEnabled || mObjDetEnabled || mPosTrackingEnabled || mPosTrackingStarted || - ((rgbSubnumber + rgbRawSubnumber + leftSubnumber + leftRawSubnumber + rightSubnumber + rightRawSubnumber + - rgbGraySubnumber + rgbGrayRawSubnumber + leftGraySubnumber + leftGrayRawSubnumber + rightGraySubnumber + - rightGrayRawSubnumber + depthSubnumber + disparitySubnumber + cloudSubnumber + poseSubnumber + - poseCovSubnumber + odomSubnumber + confMapSubnumber /*+ imuSubnumber + imuRawsubnumber*/ + pathSubNumber + - stereoSubNumber + stereoRawSubNumber + objDetSubnumber) > 0); + mGrabActive = mRecording || mStreaming || mMappingEnabled || mObjDetEnabled || mPosTrackingEnabled || + mPosTrackingStarted || + ((rgbSubnumber + rgbRawSubnumber + leftSubnumber + leftRawSubnumber + rightSubnumber + + rightRawSubnumber + rgbGraySubnumber + rgbGrayRawSubnumber + leftGraySubnumber + + leftGrayRawSubnumber + rightGraySubnumber + rightGrayRawSubnumber + depthSubnumber + + disparitySubnumber + cloudSubnumber + poseSubnumber + poseCovSubnumber + odomSubnumber + + confMapSubnumber + pathSubNumber + stereoSubNumber + stereoRawSubNumber + objDetSubnumber) > 0); // Run the loop only if there is some subscribers or SVO is active if (mGrabActive) @@ -3918,7 +3919,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() odomSubnumber > 0 || pathSubNumber > 0); // Start the tracking? - if ((mPosTrackingRequired) && !mPosTrackingStarted && !mDepthDisabled) + if (mPosTrackingRequired && !mPosTrackingStarted) { start_pos_tracking(); } @@ -3945,6 +3946,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() ((depthSubnumber + disparitySubnumber + cloudSubnumber + fusedCloudSubnumber + poseSubnumber + poseCovSubnumber + odomSubnumber + confMapSubnumber + objDetSubnumber) > 0)); + // ----> Depth runtime parameters if (mComputeDepth) { runParams.confidence_threshold = mCamDepthConfidence; @@ -3955,6 +3957,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() { runParams.enable_depth = false; // Ask to not compute the depth } + // <---- Depth runtime parameters std::chrono::steady_clock::time_point start_elab = std::chrono::steady_clock::now(); @@ -3964,9 +3967,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() // cout << toString(grab_status) << endl; if (mGrabStatus != sl::ERROR_CODE::SUCCESS) { - // Detect if a error occurred (for example: - // the zed have been disconnected) and - // re-initialize the ZED + // Detect if a error occurred (for example: the zed have been disconnected) and re-initialize the ZED NODELET_INFO_STREAM_THROTTLE(1.0, "Camera grab error: " << sl::toString(mGrabStatus).c_str()); @@ -4086,10 +4087,9 @@ void ZEDWrapperNodelet::device_poll_thread_func() // <---- Camera Settings // ----> Point Cloud - if (!mDepthDisabled && cloudSubnumber > 0) { - processPointcloud(); + processPointcloud(mFrameTimestamp); } else { @@ -4184,7 +4184,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() } else { - NODELET_DEBUG_THROTTLE(5.0, "No topics subscribed by users"); + NODELET_DEBUG_THROTTLE(5.0, "No processing required. Waiting for subscribers or modules activation."); std::this_thread::sleep_for(std::chrono::milliseconds(10)); // No subscribers, we just wait loop_rate.reset(); @@ -4214,7 +4214,7 @@ void ZEDWrapperNodelet::device_poll_thread_func() NODELET_DEBUG("ZED pool thread finished"); } // namespace zed_nodelets -void ZEDWrapperNodelet::processPointcloud() +void ZEDWrapperNodelet::processPointcloud(ros::Time ts) { // Run the point cloud conversion asynchronously to avoid slowing down // all the program @@ -4226,7 +4226,7 @@ void ZEDWrapperNodelet::processPointcloud() mZed.retrieveMeasure(mCloud, sl::MEASURE::XYZBGRA, sl::MEM::CPU, mMatResol); mPointCloudFrameId = mDepthFrameId; - mPointCloudTime = stamp; + mPointCloudTime = ts; // Signal Pointcloud thread that a new pointcloud is ready mPcDataReadyCondVar.notify_one(); From aebf2c3593e9b32b2c0bca78293de622ff5f23af Mon Sep 17 00:00:00 2001 From: Walter Lucetti Date: Wed, 20 Sep 2023 14:39:13 +0200 Subject: [PATCH 21/21] Fix `set_as_static` behavior --- .../zed_nodelet/src/zed_wrapper_nodelet.cpp | 50 +++++++++++++++---- 1 file changed, 40 insertions(+), 10 deletions(-) diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index c778a47f..90da3015 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -53,6 +53,8 @@ ZEDWrapperNodelet::ZEDWrapperNodelet() : Nodelet() ZEDWrapperNodelet::~ZEDWrapperNodelet() { + //std::cerr << "Destroying " << getName() << std::endl; + if (mDevicePollThread.joinable()) { mDevicePollThread.join(); @@ -68,7 +70,14 @@ ZEDWrapperNodelet::~ZEDWrapperNodelet() mSensThread.join(); } - std::cerr << "ZED Nodelet destroyed" << std::endl; + if (mZed.isOpened()) + { + mZed.close(); + } + + std::cerr << "ZED " << mZedSerialNumber << " closed." << std::endl; + + //std::cerr << "ZED Nodelet '" << getName().c_str() << "' correctly stopped" << std::endl; } void ZEDWrapperNodelet::onInit() @@ -189,6 +198,7 @@ void ZEDWrapperNodelet::onInit() // Try to initialize the ZED std::stringstream ss; // Input mode info for logging + std::string cam_id; if (!mSvoFilepath.empty() || !mRemoteStreamAddr.empty()) { if (!mSvoFilepath.empty()) @@ -198,6 +208,7 @@ void ZEDWrapperNodelet::onInit() // Input mode info for logging ss << "SVO - " << mSvoFilepath.c_str(); + cam_id = ss.str(); } else if (!mRemoteStreamAddr.empty()) { @@ -215,6 +226,7 @@ void ZEDWrapperNodelet::onInit() // Input mode info for logging ss << "NETWORK STREAM - " << mRemoteStreamAddr.c_str(); + cam_id = ss.str(); } mSvoMode = true; @@ -231,6 +243,7 @@ void ZEDWrapperNodelet::onInit() // Input mode info for logging ss << "LIVE CAMERA with ID " << mZedId; + cam_id = ss.str(); } else { @@ -238,6 +251,7 @@ void ZEDWrapperNodelet::onInit() // Input mode info for logging ss << "LIVE CAMERA with SN " << mZedSerialNumber; + cam_id = ss.str(); } } @@ -251,6 +265,7 @@ void ZEDWrapperNodelet::onInit() mZedParams.depth_minimum_distance = static_cast(mCamMinDepth); mZedParams.depth_maximum_distance = static_cast(mCamMaxDepth); mZedParams.camera_disable_self_calib = !mCameraSelfCalib; + mZedParams.open_timeout_sec = 10.0f; mZedParams.enable_image_enhancement = true; // Always active @@ -263,16 +278,25 @@ void ZEDWrapperNodelet::onInit() while (mConnStatus != sl::ERROR_CODE::SUCCESS) { mConnStatus = mZed.open(mZedParams); - NODELET_INFO_STREAM("ZED connection: " << sl::toString(mConnStatus)); + NODELET_INFO_STREAM("ZED connection [" << cam_id.c_str() << "]: " << sl::toString(mConnStatus)); std::this_thread::sleep_for(std::chrono::milliseconds(2000)); if (!mNhNs.ok()) { - mStopNode = true; // Stops other threads + mStopNode = true; std::lock_guard lock(mCloseZedMutex); - NODELET_DEBUG("Closing ZED"); - mZed.close(); + NODELET_INFO_STREAM("Closing ZED " << mZedSerialNumber << "..."); + if (mRecording) + { + mRecording = false; + mZed.disableRecording(); + } + if (mZed.isOpened()) + { + mZed.close(); + } + NODELET_INFO_STREAM("... ZED " << mZedSerialNumber << " closed."); NODELET_DEBUG("ZED pool thread finished"); return; @@ -2109,7 +2133,6 @@ void ZEDWrapperNodelet::start_pos_tracking() mPoseSmoothing = false; // Always false. Pose Smoothing is to be enabled only for VR/AR applications posTrackParams.enable_pose_smoothing = mPoseSmoothing; posTrackParams.set_floor_as_origin = mFloorAlignment; - posTrackParams.set_as_static = mIsStatic; posTrackParams.depth_min_range = static_cast(mPosTrkMinDepth); if (mAreaMemDbPath != "" && !sl_tools::file_exist(mAreaMemDbPath)) @@ -2130,7 +2153,7 @@ void ZEDWrapperNodelet::start_pos_tracking() posTrackParams.enable_imu_fusion = mImuFusion; - posTrackParams.set_as_static = false; + posTrackParams.set_as_static = mIsStatic; posTrackParams.set_gravity_as_origin = mSetGravityAsOrigin; posTrackParams.mode = mPosTrkMode; @@ -3994,7 +4017,10 @@ void ZEDWrapperNodelet::device_poll_thread_func() if ((ros::Time::now() - mPrevFrameTimestamp).toSec() > 5 && !mSvoMode) { mCloseZedMutex.lock(); - mZed.close(); + if (mZed.isOpened()) + { + mZed.close(); + } mCloseZedMutex.unlock(); mConnStatus = sl::ERROR_CODE::CAMERA_NOT_DETECTED; @@ -4006,13 +4032,17 @@ void ZEDWrapperNodelet::device_poll_thread_func() mStopNode = true; std::lock_guard lock(mCloseZedMutex); - NODELET_DEBUG("Closing ZED"); + NODELET_INFO_STREAM("Closing ZED " << mZedSerialNumber << "..."); if (mRecording) { mRecording = false; mZed.disableRecording(); } - mZed.close(); + if (mZed.isOpened()) + { + mZed.close(); + } + NODELET_INFO_STREAM("... ZED " << mZedSerialNumber << " closed."); NODELET_DEBUG("ZED pool thread finished"); return;