YARP
Yet Another Robot Platform
laserFromPointCloud.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2006-2020 Istituto Italiano di Tecnologia (IIT)
3  *
4  * This library is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * This library is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with this library; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #define _USE_MATH_DEFINES
20 
21 #include "laserFromPointCloud.h"
22 
23 #include <yarp/os/Time.h>
24 #include <yarp/os/Log.h>
25 #include <yarp/os/LogStream.h>
26 #include <yarp/os/ResourceFinder.h>
27 #include <yarp/math/Math.h>
28 
30 #include <yarp/os/Node.h>
31 #include <yarp/os/Publisher.h>
32 
33 #include <cmath>
34 #include <cstring>
35 #include <cstdlib>
36 #include <iostream>
37 #include <limits>
38 #include <mutex>
39 
40 using namespace std;
41 
42 #ifndef DEG2RAD
43 #define DEG2RAD M_PI/180.0
44 #endif
45 
46 YARP_LOG_COMPONENT(LASER_FROM_POINTCLOUD, "yarp.devices.laserFromPointCloud")
47 
48 /*
49 
50 yarpdev --device Rangefinder2DWrapper --subdevice laserFromPointCloud \
51  --ROS::useROS true --ROS::ROS_nodeName /cer-laserFront \
52  --ROS::ROS_topicName /laserDepth --ROS::frame_id /mobile_base_lidar_F \
53  --SENSOR \
54  --RGBD_SENSOR_CLIENT::localImagePort /clientRgbPort:i \
55  --RGBD_SENSOR_CLIENT::localDepthPort /clientDepthPort:i \
56  --RGBD_SENSOR_CLIENT::localRpcPort /clientRpcPort \
57  --RGBD_SENSOR_CLIENT::remoteImagePort /SIM_CER_ROBOT/depthCamera/rgbImage:o \
58  --RGBD_SENSOR_CLIENT::remoteDepthPort /SIM_CER_ROBOT/depthCamera/depthImage:o \
59  --RGBD_SENSOR_CLIENT::remoteRpcPort /SIM_CER_ROBOT/depthCamera/rpc:i \
60  --TRANSFORM_CLIENT::local /laserFromDepth/tfClient \
61  --TRANSFORM_CLIENT::remote /transformServer \
62  --Z_CLIPPING_PLANES::floor_height 0.15 \
63  --Z_CLIPPING_PLANES::ceiling_height 3.0 \
64  --Z_CLIPPING_PLANES::camera_frame_id depth_center \
65  --Z_CLIPPING_PLANES::ground_frame_id ground_link \
66  --publish_ROS_pointcloud \
67  --period 10 \
68  --name /outlaser:o
69 */
70 
72 yarp::os::Node * rosNode=nullptr;
73 
75 {
76  // rosNode = new yarp::os::Node("laserFromPointCloud");
77 
79  if (pointCloud_outTopic->topic("/ros_pc")==false)
80  {
81  yCError(LASER_FROM_POINTCLOUD) << "opening topic";
82  }
83  else
84  {
85  yCInfo(LASER_FROM_POINTCLOUD) << "topic successful";
86  }
87 }
88 
90 {
91  //yCDebug(LASER_FROM_POINTCLOUD) << "sizeof:" << sizeof(yarp::sig::DataXYZ);
92 
94  static int counter=0;
95  rosPC_data.header.stamp.nsec=0;
96  rosPC_data.header.stamp.sec=0;
97  rosPC_data.header.seq=counter++;
98  rosPC_data.header.frame_id = frame_id;
99 
100  rosPC_data.fields.resize(3);
101  rosPC_data.fields[0].name = "x";
102  rosPC_data.fields[0].offset = 0; // offset in bytes from start of each point
103  rosPC_data.fields[0].datatype = 7; // 7 = FLOAT32
104  rosPC_data.fields[0].count = 1; // how many FLOAT32 used for 'x'
105 
106  rosPC_data.fields[1].name = "y";
107  rosPC_data.fields[1].offset = 4; // offset in bytes from start of each point
108  rosPC_data.fields[1].datatype = 7; // 7 = FLOAT32
109  rosPC_data.fields[1].count = 1; // how many FLOAT32 used for 'y'
110 
111  rosPC_data.fields[2].name = "z";
112  rosPC_data.fields[2].offset = 8; // offset in bytes from start of each point
113  rosPC_data.fields[2].datatype = 7; // 7 = FLOAT32
114  rosPC_data.fields[2].count = 1; // how many FLOAT32 used for 'z'
115 
116 #if defined(YARP_BIG_ENDIAN)
117  rosPC_data.is_bigendian = true;
118 #elif defined(YARP_LITTLE_ENDIAN)
119  rosPC_data.is_bigendian = false;
120 #else
121  #error "Cannot detect endianness"
122 #endif
123 
124 #if 0
125  rosPC_data.height=1;
126  rosPC_data.width=pc.size();
127 #else
128  rosPC_data.height=pc.height();
129  rosPC_data.width=pc.width();
130 #endif
131 
132  rosPC_data.point_step = 3*4; //x, y, z
133  rosPC_data.row_step = rosPC_data.point_step*rosPC_data.width; //12 *number of points bytes
134  rosPC_data.is_dense = true; // what this field actually means?? When is it false??
135  rosPC_data.data.resize(rosPC_data.row_step*rosPC_data.height);
136 
137  const char* ypointer = pc.getRawData()+12;
138  unsigned char* rpointer = rosPC_data.data.data();
139 
140  //yCDebug(LASER_FROM_POINTCLOUD)<< pc.size() << pc.size()*4*4 << pc.dataSizeBytes() << rosPC_data.data.size();
141  size_t elem =0;
142  size_t yelem=0;
143  for (; elem<pc.size()*3*4; elem++)
144  {
145  *rpointer=*ypointer;
146  rpointer++;
147  ypointer++; yelem++;
148  if (elem%12==0) { ypointer+=4; yelem+=4;}
149  // yCDebug(LASER_FROM_POINTCLOUD << "%d" <<ypointer;
150  }
151  //yCDebug(LASER_FROM_POINTCLOUD)<<elem <<yelem;
152 
153  if (pointCloud_outTopic) pointCloud_outTopic->write(rosPC_data);
154 }
155 
156 //-------------------------------------------------------------------------------------
157 
159 {
160  Property subConfig;
161  m_info = "LaserFromPointCloud device";
162 
163 #ifdef LASER_DEBUG
164  yCDebug(LASER_FROM_POINTCLOUD) << "%s\n", config.toString().c_str();
165 #endif
166 
167  m_min_distance = 0.1; //m
168  m_max_distance = 5.0; //m
169  m_floor_height = 0.1; //m
170  m_pointcloud_max_distance = 10.0; //m
171  m_ceiling_height = 2; //m
172  m_publish_ros_pc = false;
173 
174  m_sensorsNum = 360;
175  m_resolution = 1;
176  m_laser_data.resize(m_sensorsNum, 0.0);
177  m_max_angle = 360;
178  m_min_angle = 0;
179 
180  m_pc_stepx=2;
181  m_pc_stepy=2;
182 
183  m_transform_mtrx(4,4); m_transform_mtrx.eye();
184 
185 
186  m_ground_frame_id = "/ground_frame";
187  m_camera_frame_id = "/depth_camera_frame";
188 
189  if (config.check("publish_ROS_pointcloud"))
190  {
191  m_publish_ros_pc=true;
192  yCDebug(LASER_FROM_POINTCLOUD) << "User requested to publish ROS pointcloud (debug mode)";
193  }
194 
195  bool bpcr = config.check("POINTCLOUD_QUALITY");
196  if (bpcr != false)
197  {
198  yarp::os::Searchable& pointcloud_quality_config = config.findGroup("POINTCLOUD_QUALITY");
199  if (pointcloud_quality_config.check("x_step")) { m_pc_stepx = pointcloud_quality_config.find("x_step").asFloat64(); }
200  if (pointcloud_quality_config.check("y_step")) { m_pc_stepy = pointcloud_quality_config.find("y_step").asFloat64(); }
201  yCInfo(LASER_FROM_POINTCLOUD) << "Pointcloud decimation step set to:" << m_pc_stepx << m_pc_stepy;
202  }
203 
204  bool bpc = config.check("Z_CLIPPING_PLANES");
205  if (bpc != false)
206  {
207  yarp::os::Searchable& pointcloud_clip_config = config.findGroup("Z_CLIPPING_PLANES");
208  if (pointcloud_clip_config.check("floor_height")) {m_floor_height = pointcloud_clip_config.find("floor_height").asFloat64();}
209  if (pointcloud_clip_config.check("ceiling_height")) {m_ceiling_height=pointcloud_clip_config.find("ceiling_height").asFloat64();}
210  if (pointcloud_clip_config.check("max_distance")) { m_pointcloud_max_distance = pointcloud_clip_config.find("max_distance").asFloat64(); }
211  if (pointcloud_clip_config.check("ground_frame_id")) {m_ground_frame_id = pointcloud_clip_config.find("ground_frame_id").asString();}
212  if (pointcloud_clip_config.check("camera_frame_id")) {m_camera_frame_id = pointcloud_clip_config.find("camera_frame_id").asString();}
213  }
214  yCInfo(LASER_FROM_POINTCLOUD) <<"Z clipping planes (floor,ceiling) have been set to ("<< m_floor_height <<","<< m_ceiling_height <<")";
215 
216  if (this->parseConfiguration(config) == false)
217  {
218  yCError(LASER_FROM_POINTCLOUD) << "Error parsing parameters";
219  return false;
220  }
221 
222  //open the tc client
223  Property tcprop;
224  if(!config.check("TRANSFORM_CLIENT"))
225  {
226  yCError(LASER_FROM_POINTCLOUD) << "missing TRANSFORM_CLIENT section in configuration file!";
227  return false;
228  }
229  tcprop.fromString(config.findGroup("TRANSFORM_CLIENT").toString());
230  tcprop.put("device", "transformClient");
231 
232  m_tc_driver.open(tcprop);
233  if (!m_tc_driver.isValid())
234  {
235  yCError(LASER_FROM_POINTCLOUD) << "Error opening PolyDriver check parameters";
236  return false;
237  }
238  m_tc_driver.view(m_iTc);
239  if (!m_iTc)
240  {
241  yCError(LASER_FROM_POINTCLOUD) << "Error opening iFrameTransform interface. Device not available";
242  return false;
243  }
245 
246 
247  //open the rgbd client
248  Property prop;
249  if(!config.check("RGBD_SENSOR_CLIENT"))
250  {
251  yCError(LASER_FROM_POINTCLOUD) << "missing RGBD_SENSOR_CLIENT section in configuration file!";
252  return false;
253  }
254  prop.fromString(config.findGroup("RGBD_SENSOR_CLIENT").toString());
255  prop.put("device", "RGBDSensorClient");
256  //prop.put("ImageCarrier","mjpeg"); //this is set in the ini file
257  //prop.put("DepthCarrier","udp"); //this is set in the ini file
258  m_rgbd_driver.open(prop);
259  if (!m_rgbd_driver.isValid())
260  {
261  yCError(LASER_FROM_POINTCLOUD) << "Error opening PolyDriver check parameters";
262  return false;
263  }
264  m_rgbd_driver.view(m_iRGBD);
265  if (!m_iRGBD)
266  {
267  yCError(LASER_FROM_POINTCLOUD) << "Error opening iRGBD interface. Device not available";
268  return false;
269  }
271 
272  //get parameters data from the camera
273  m_depth_width = m_iRGBD->getRgbWidth(); //@@@ this is horrible! See yarp issue: https://github.com/robotology/yarp/issues/2290
274  m_depth_height = m_iRGBD->getRgbHeight(); //@@@ this is horrible! See yarp issue: https://github.com/robotology/yarp/issues/2290
275  bool propintr = m_iRGBD->getDepthIntrinsicParam(m_propIntrinsics);
276  YARP_UNUSED(propintr);
277  yCInfo(LASER_FROM_POINTCLOUD) << "Depth Intrinsics:" << m_propIntrinsics.toString();
278  m_intrinsics.fromProperty(m_propIntrinsics);
279 
280  PeriodicThread::start();
281  yCInfo(LASER_FROM_POINTCLOUD) << "Sensor ready";
282 
283  //init ros
284  if (m_publish_ros_pc)
285  {
286  ros_init_pc();
287  }
288 
289  return true;
290 }
291 
293 {
294  PeriodicThread::stop();
295 
296  if(m_rgbd_driver.isValid())
297  m_rgbd_driver.close();
298 
299  if(m_tc_driver.isValid())
300  m_tc_driver.close();
301 
302  yCInfo(LASER_FROM_POINTCLOUD) << "closed";
303  return true;
304 }
305 
306 bool LaserFromPointCloud::setDistanceRange(double min, double max)
307 {
308  std::lock_guard<std::mutex> guard(m_mutex);
309  m_min_distance = min;
310  m_max_distance = max;
311  return true;
312 }
313 
314 
315 bool LaserFromPointCloud::setScanLimits(double min, double max)
316 {
317  std::lock_guard<std::mutex> guard(m_mutex);
318  yCWarning(LASER_FROM_POINTCLOUD,"setScanLimits not yet implemented");
319  return true;
320 }
321 
323 {
324  std::lock_guard<std::mutex> guard(m_mutex);
325  yCWarning(LASER_FROM_POINTCLOUD,"setHorizontalResolution not yet implemented");
326  return true;
327 }
328 
330 {
331  std::lock_guard<std::mutex> guard(m_mutex);
332  yCWarning(LASER_FROM_POINTCLOUD,"setScanRate not yet implemented");
333  return false;
334 }
335 
336 
338 {
339 #ifdef LASER_DEBUG
340  yCDebug(LASER_FROM_POINTCLOUD) << "thread initialising...\n";
341  yCDebug(LASER_FROM_POINTCLOUD) << "... done!\n");
342 #endif
343 
344  return true;
345 }
346 
348 {
349  for (size_t i=0; i<pc.size(); i++)
350  {
351  auto v1 = pc(i).toVector4();
352  auto v2 = m*v1;
353  pc(i).x=v2(0);
354  pc(i).y=v2(1);
355  pc(i).z=v2(2);
356  }
357 }
358 
360 {
361 #ifdef DEBUG_TIMING
362  static double t3 = yarp::os::Time::now();
363  double t1 = yarp::os::Time::now();
364  yCDebug(LASER_FROM_POINTCLOUD) << "thread period:" <<t1- t3;
365  t3 = yarp::os::Time::now();
366 #endif
367 
368  bool depth_ok = m_iRGBD->getDepthImage(m_depth_image);
369  if (depth_ok == false)
370  {
371  yCDebug(LASER_FROM_POINTCLOUD) << "getDepthImage failed";
372  return;
373  }
374 
375  if (m_depth_image.getRawImage()==nullptr)
376  {
377  yCDebug(LASER_FROM_POINTCLOUD)<<"invalid image received";
378  return;
379  }
380 
381  if (m_depth_image.width() != m_depth_width ||
382  m_depth_image.height() != m_depth_height)
383  {
384  yCDebug(LASER_FROM_POINTCLOUD)<<"invalid image size: (" << m_depth_image.width() << " " << m_depth_image.height() << ") vs (" << m_depth_width << " " << m_depth_height << ")" ;
385  return;
386  }
387 
388  const double myinf =std::numeric_limits<double>::infinity();
389  const double mynan =std::nan("");
390 
391  //compute the point cloud
392  yarp::sig::PointCloud<yarp::sig::DataXYZ> pc = yarp::sig::utils::depthToPC(m_depth_image, m_intrinsics,m_pc_roi,m_pc_stepx,m_pc_stepy);
393 
394 
395  //if (m_publish_ros_pc) {ros_compute_and_send_pc(pc,m_camera_frame_id);}//<-------------------------
396 
397 #ifdef TEST_M
398  //yCDebug(LASER_FROM_POINTCLOUD) << "pc size:" << pc.size();
399 #endif
400 
401  //we compute the transformation matrix from the camera to the laser reference frame
402 
403 #ifdef TEST_M
404  yarp::sig::Vector vvv(3);
405  vvv(0)=-1.57;
406  vvv(1)=0;
407  vvv(2)=-1.57;
408  m = yarp::math::rpy2dcm(vvv);
409  m(2,3)=1.2; //z translation
410 #else
411  bool frame_exists = m_iTc->getTransform(m_camera_frame_id,m_ground_frame_id, m_transform_mtrx);
412  if (frame_exists==false)
413  {
414  yCWarning(LASER_FROM_POINTCLOUD) << "Unable to found m matrix";
415  }
416 #endif
417 
418  //we rototranslate the full pointcloud
419  rotate_pc(pc, m_transform_mtrx);
420 
421  if (m_publish_ros_pc) {ros_compute_and_send_pc(pc,m_ground_frame_id);}//<-------------------------
422 
423  yarp::sig::Vector left(4);
424  left[0] = (0 - m_intrinsics.principalPointX)/m_intrinsics.focalLengthX*1000;
425  left[1] = (0 - m_intrinsics.principalPointY)/m_intrinsics.focalLengthY*1000;
426  left[2] = 1000;
427  left[3] = 1;
428 
429  yarp::sig::Vector right(4);
430  right[0] = (m_depth_image.width() - m_intrinsics.principalPointX)/m_intrinsics.focalLengthX*1000;
431  right[1] = (0 - m_intrinsics.principalPointY)/m_intrinsics.focalLengthY*1000;
432  right[2] = 1000;
433  right[3] = 1;
434 
435  double left_dist;
436  double left_theta;
437  double right_dist;
438  double right_theta;
439  left = m_transform_mtrx * left;
440  right = m_transform_mtrx * right;
441 
442  LaserMeasurementData data_left;
443  data_left.set_cartesian(left[0],left[1]);
444  data_left.get_polar(left_dist, left_theta);
445 
446  LaserMeasurementData data_right;
447  data_right.set_cartesian(right[0],right[1]);
448  data_right.get_polar(right_dist, right_theta);
449 
450  bool left_elem_neg = 0;
451  bool right_elem_neg = 0;
452 
453  left_theta=left_theta*180/M_PI;
454  right_theta=right_theta*180/M_PI;
455 
456  if (left_theta<0)
457  {
458  left_theta+=360;
459  left_elem_neg = 1;
460  }
461  else if (left_theta>360) left_theta-=360;
462  size_t left_elem= left_theta/m_resolution;
463 
464  if (right_theta<0)
465  {
466  right_theta+=360;
467  right_elem_neg = 1;
468  }
469  else if (right_theta>360) right_theta-=360;
470  size_t right_elem= right_theta/m_resolution;
471 
472  //enter critical section and protect m_laser_data
473  std::lock_guard<std::mutex> guard(m_mutex);
474 #ifdef DEBUG_TIMING
475  double t4 = yarp::os::Time::now();
476 #endif
477  //prepare an empty laserscan vector with the resolution we want
478  for (auto it= m_laser_data.begin(); it!=m_laser_data.end(); it++)
479  {
480  *it= mynan;
481  }
482 
483  if ((!left_elem_neg) && (right_elem_neg))
484  {
485  for (size_t i=0; i<left_elem; i++)
486  {
487  m_laser_data[i] = myinf;
488  }
489  for (size_t i=right_elem; i<m_sensorsNum; i++)
490  {
491  m_laser_data[i] = myinf;
492  }
493  }
494  else
495  {
496  for (size_t i=right_elem; i<left_elem; i++)
497  {
498  m_laser_data[i] = myinf;
499  }
500  }
501 
502 
503 
504  for (size_t i=0; i<pc.size(); i++)
505  {
506 
507 #ifdef TEST_M
508  //yCDebug(LASER_FROM_POINTCLOUD) << pc(i).toString(5,5);
509 #endif
510 
511  //we obtain a point from the point cloud
512  yarp::sig::Vector vec= pc(i).toVector4();
513 
514  //we check if the point is in the volume that we want to consider as possibile obstacle
515  if (vec[2]>m_floor_height && vec[2]<m_ceiling_height &&
516  vec[0]<m_pointcloud_max_distance)
517  {
518 #ifdef TEST_M
519  // yCDebug(LASER_FROM_POINTCLOUD) << "This point is ok:" << i <<"its z is:" << tvec[2];
520 #endif
521  //by removing z, we project the 3d point on the 2D plane on which the laser works.
522  //we use LaserMeasurementData struct to easily obtain a polar representation from a cartesian representation
524  data.set_cartesian(vec[0],vec[1]);
525 
526  //get the polar representation
527  double distance;
528  double theta;
529  data.get_polar(distance, theta);
530 
531  //compute the right element of the vector where to put distance data. This is done by clusterizing angles, depending on the laser resolution.
532  theta=theta*180/M_PI;
533  if (theta<0) theta+=360;
534  else if (theta>360) theta-=360;
535  size_t elem= theta/m_resolution;
536  if (elem>=m_laser_data.size())
537  {
538  yCError(LASER_FROM_POINTCLOUD) << "Error in computing elem" << i << ">" << m_laser_data.size();
539  continue;
540  }
541 
542 #ifdef TEST_M
543  // yCDebug(LASER_FROM_POINTCLOUD) <<theta << elem <<distance;
544 #endif
545  //update the vector of measurements, putting the NEAREST obstacle in right element of the vector.
546  if (distance<m_laser_data[elem])
547  {
548  m_laser_data[elem]=distance;
549  }
550  }
551  else
552  {
553 #ifdef TEST_M
554  //yCDebug(LASER_FROM_POINTCLOUD) << "this point is out of considered volume:" <<i << " z:" << vec[2];
555 #endif
556  }
557  }
558  applyLimitsOnLaserData();
559 
560 #ifdef DEBUG_TIMING
561  double t2 = yarp::os::Time::now();
562  yCDebug(LASER_FROM_POINTCLOUD) << "tot run:" << t2 - t1 << "crit run:" << t2-t4;
563 #endif
564 
565  return;
566 }
567 
569 {
570 #ifdef LASER_DEBUG
571  yCDebug(LASER_FROM_POINTCLOUD) << "Thread releasing...";
572  yCDebug(LASER_FROM_POINTCLOUD) << "... done.";
573 #endif
574 
575  return;
576 }
void run() override
Loop function.
bool setScanRate(double rate) override
set the scan rate (scans per seconds)
bool setDistanceRange(double min, double max) override
set the device detection range.
bool close() override
Close the DeviceDriver.
bool setHorizontalResolution(double step) override
get the angular step between two measurments (if available)
void threadRelease() override
Release method.
bool setScanLimits(double min, double max) override
set the scan angular range.
bool open(yarp::os::Searchable &config) override
Open the DeviceDriver.
bool threadInit() override
Initialization method.
void set_cartesian(const double x, const double y)
void get_polar(double &rho, double &theta)
std::string toString() const override
Gives a human-readable textual representation of the bottle.
Definition: Bottle.cpp:214
The Node class.
Definition: Node.h:27
A class for storing options and configuration information.
Definition: Property.h:37
void fromString(const std::string &txt, bool wipe=true)
Interprets a string as a list of properties.
Definition: Property.cpp:1046
void put(const std::string &key, const std::string &value)
Associate the given key with the given string.
Definition: Property.cpp:998
A port specialized for publishing data of a constant type on a topic.
Definition: Publisher.h:26
A base class for nested structures that can be searched.
Definition: Searchable.h:69
virtual Value & find(const std::string &key) const =0
Gets a value corresponding to a given keyword.
virtual bool check(const std::string &key) const =0
Check if there exists a property of the given name.
virtual std::string toString() const =0
Return a standard text representation of the content of the object.
virtual Bottle & findGroup(const std::string &key) const =0
Gets a list corresponding to a given keyword.
virtual yarp::conf::float64_t asFloat64() const
Get 64-bit floating point value.
Definition: Value.cpp:225
virtual std::string asString() const
Get string value.
Definition: Value.cpp:237
std::uint32_t nsec
Definition: TickTime.h:33
std::uint32_t sec
Definition: TickTime.h:32
yarp::rosmsg::std_msgs::Header header
Definition: PointCloud2.h:60
std::vector< std::uint8_t > data
Definition: PointCloud2.h:67
std::vector< yarp::rosmsg::sensor_msgs::PointField > fields
Definition: PointCloud2.h:63
yarp::rosmsg::TickTime stamp
Definition: Header.h:48
A class for a Matrix.
Definition: Matrix.h:46
virtual size_t width() const
virtual size_t height() const
The PointCloud class.
Definition: PointCloud.h:27
const char * getRawData() const override
Get the pointer to the data.
Definition: PointCloud.h:85
size_t size() const override
Definition: PointCloud.h:110
#define M_PI
void ros_compute_and_send_pc(const yarp::sig::PointCloud< yarp::sig::DataXYZ > &pc, string frame_id)
const yarp::os::LogComponent & LASER_FROM_POINTCLOUD()
yarp::os::Publisher< yarp::rosmsg::sensor_msgs::PointCloud2 > * pointCloud_outTopic
yarp::os::Node * rosNode
void ros_init_pc()
void rotate_pc(yarp::sig::PointCloud< yarp::sig::DataXYZ > &pc, const yarp::sig::Matrix &m)
#define yCInfo(component,...)
Definition: LogComponent.h:135
#define yCError(component,...)
Definition: LogComponent.h:157
#define yCWarning(component,...)
Definition: LogComponent.h:146
#define yCDebug(component,...)
Definition: LogComponent.h:112
#define YARP_LOG_COMPONENT(name,...)
Definition: LogComponent.h:80
yarp::rosmsg::sensor_msgs::PointCloud2 PointCloud2
Definition: PointCloud2.h:24
yarp::sig::Matrix rpy2dcm(const yarp::sig::Vector &rpy)
Converts roll-pitch-yaw angles in the corresponding dcm (direction cosine matrix) rotation matrix (de...
Definition: math.cpp:818
double now()
Return the current time in seconds, relative to an arbitrary starting point.
Definition: Time.cpp:124
void delay(double seconds)
Wait for a certain number of seconds.
Definition: Time.cpp:114
yarp::sig::PointCloud< yarp::sig::DataXYZ > depthToPC(const yarp::sig::ImageOf< yarp::sig::PixelFloat > &depth, const yarp::sig::IntrinsicParams &intrinsic)
depthToPC, compute the PointCloud given depth image and the intrinsic parameters of the camera.
The main, catch-all namespace for YARP.
Definition: environment.h:18
#define YARP_UNUSED(var)
Definition: api.h:159