@@ -45,10 +45,10 @@ LaserScanProcessor::LaserScanProcessor(ros::NodeHandle n_)
4545 this ->vo_pub = n.advertise <nav_msgs::Odometry>(" /vo" ,1 );
4646
4747 // Initialize the odom_visual to odom Broadcaster
48- tf::Transform transform;
49- transform.setOrigin ( tf::Vector3 (0.0 , 0.0 , 0.0 ) );
50- transform.setRotation ( tf::Quaternion (0 , 0 , 0 , 1 ) );
51- br_vo_o.sendTransform (tf::StampedTransform (transform, ros::Time::now (), " odom_visual" , " odom" ));
48+ // tf::Transform transform;
49+ // transform.setOrigin( tf::Vector3(0.0, 0.0, 0.0) );
50+ // transform.setRotation( tf::Quaternion(0, 0, 0, 1) );
51+ // br_vo_o.sendTransform(tf::StampedTransform(transform, ros::Time::now(), "odom_visual", "odom"));
5252}
5353
5454void LaserScanProcessor::laser_callback (const sensor_msgs::LaserScan& scan)
@@ -66,6 +66,10 @@ void LaserScanProcessor::laser_callback(const sensor_msgs::LaserScan& scan)
6666 float angle = scan.angle_min ;
6767 std::vector<pcl::PointXYZ> points;
6868 std::vector<float > ranges = scan.ranges ;
69+
70+ // FIX: Make the ranges go reverse to support reverse camera placement
71+ std::reverse (ranges.begin (),ranges.end ());
72+
6973 float theta, r;
7074 tf::Vector3 v, v_glob, v_glob_vo;
7175
@@ -81,7 +85,7 @@ void LaserScanProcessor::laser_callback(const sensor_msgs::LaserScan& scan)
8185 continue ;
8286 if (isnan (r))
8387 continue ;
84- point.x = (r * std::sin (theta)); // camera_depth_optical_frame has actually Z direction pointing forwards.
88+ point.x = (- r * std::sin (theta)); // camera_depth_optical_frame has actually Z direction pointing forwards.
8589 point.z = (r * std::cos (theta)); // Initially, We rather took y direction pointing forwards. i.e. swap z and y and get all
8690 point.y = 0 ; // co-ordinates in first two-components of the PCLXYZ object and work with
8791 // all are equations in X-Y Plane(rather than X-Z). It is better to work in X-Z so that
@@ -150,8 +154,6 @@ void LaserScanProcessor::laser_callback(const sensor_msgs::LaserScan& scan)
150154 // v_glob.setY(loc.shift_x);
151155 // v_glob.setZ(0);
152156
153- // TODO: Need to transform v_glob to base_footprint - static transform-will be only translation
154- // For now robot's pose point is taken to be as the point on the optical depth sensor
155157 v_glob_vo = (this ->vo_fixed_to_base ) * v_glob;
156158
157159 // Correct for the difference in origin
@@ -177,7 +179,7 @@ void LaserScanProcessor::laser_callback(const sensor_msgs::LaserScan& scan)
177179 result_pose.pose .pose .position .z = 0 ;
178180
179181 // tf::Quaternion res = vo_fixed_to_base.getRotation() * tf::createQuaternionFromRPY(0,loc.delta_yaw,0);
180- tf::Quaternion res = tf::createQuaternionFromRPY (0 , 0 , - loc.delta_yaw * M_PI / 180 );
182+ tf::Quaternion res = tf::createQuaternionFromRPY (0 , 0 , loc.delta_yaw * M_PI / 180 );
181183
182184 // Publishing the other way round
183185 // tf::Quaternion res = tf::createQuaternionFromRPY(0, 0, -loc.delta_yaw);
@@ -230,11 +232,11 @@ void LaserScanProcessor::laser_callback(const sensor_msgs::LaserScan& scan)
230232
231233 tf::Transform odom_to_ov;
232234 odom_to_ov = base_to_ov * base_to_odom.inverse ();
233- br_vo_o.sendTransform (tf::StampedTransform (odom_to_ov, ros::Time::now (), " /odom_visual" , " /odom" ));
234- br_vo_bf.sendTransform (tf::StampedTransform (base_to_ov, ros::Time::now (), " /odom_visual" , " /base_footprint" ));
235+ // br_vo_o.sendTransform(tf::StampedTransform(odom_to_ov, ros::Time::now(), "/odom_visual", "/odom"));
236+ // br_vo_bf.sendTransform(tf::StampedTransform(base_to_ov, ros::Time::now(), "/odom_visual", "/base_footprint"));
235237
236238 // Attach base_footprint to odom_visual directly
237- // br_vo_bf.sendTransform(tf::StampedTransform(base_to_ov.inverse(), ros::Time::now(), "/base_footprint", "/odom_visual"));
239+ br_vo_bf.sendTransform (tf::StampedTransform (base_to_ov.inverse (), ros::Time::now (), " /base_footprint" , " /odom_visual" ));
238240
239241}
240242
0 commit comments