YARP
Yet Another Robot Platform
 
Loading...
Searching...
No Matches
laserFromPointCloud.cpp
Go to the documentation of this file.
1/*
2 * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT)
3 * SPDX-License-Identifier: LGPL-2.1-or-later
4 */
5
6#define _USE_MATH_DEFINES
7
9
10#include <yarp/os/Time.h>
11#include <yarp/os/Log.h>
12#include <yarp/os/LogStream.h>
14#include <yarp/math/Math.h>
15
16#include <cmath>
17#include <cstring>
18#include <cstdlib>
19#include <iostream>
20#include <limits>
21#include <mutex>
22
23
24#ifndef DEG2RAD
25#define DEG2RAD M_PI/180.0
26#endif
27
28using namespace yarp::sig;
29
30YARP_LOG_COMPONENT(LASER_FROM_POINTCLOUD, "yarp.devices.laserFromPointCloud")
31
32/*
33
34yarpdev --device rangefinder2D_nws_yarp --subdevice laserFromPointCloud \
35 --SENSOR \
36 --RGBD_SENSOR_CLIENT::localImagePort /clientRgbPort:i \
37 --RGBD_SENSOR_CLIENT::localDepthPort /clientDepthPort:i \
38 --RGBD_SENSOR_CLIENT::localRpcPort /clientRpcPort \
39 --RGBD_SENSOR_CLIENT::remoteImagePort /SIM_CER_ROBOT/depthCamera/rgbImage:o \
40 --RGBD_SENSOR_CLIENT::remoteDepthPort /SIM_CER_ROBOT/depthCamera/depthImage:o \
41 --RGBD_SENSOR_CLIENT::remoteRpcPort /SIM_CER_ROBOT/depthCamera/rpc:i \
42 --TRANSFORM_CLIENT::local /laserFromDepth/tfClient \
43 --TRANSFORM_CLIENT::remote /transformServer \
44 --Z_CLIPPING_PLANES::floor_height 0.15 \
45 --Z_CLIPPING_PLANES::ceiling_height 3.0 \
46 --Z_CLIPPING_PLANES::camera_frame_id depth_center \
47 --Z_CLIPPING_PLANES::ground_frame_id ground_link \
48 --period 0.10 \
49 --name /outlaser:o
50*/
51
52//-------------------------------------------------------------------------------------
53
55{
57 m_info = "LaserFromPointCloud device";
58
59#ifdef LASER_DEBUG
60 yCDebug(LASER_FROM_POINTCLOUD) << "%s\n", config.toString().c_str();
61#endif
62
63 m_min_distance = 0.1; //m
64 m_max_distance = 5.0; //m
65 m_floor_height = 0.1; //m
66 m_pointcloud_max_distance = 10.0; //m
67 m_ceiling_height = 2; //m
68
69 m_sensorsNum = 360;
70 m_resolution = 1;
71 m_laser_data.resize(m_sensorsNum, 0.0);
72 m_max_angle = 360;
73 m_min_angle = 0;
74
75 m_pc_stepx=2;
76 m_pc_stepy=2;
77
78 m_transform_mtrx.resize(4,4);
79 m_transform_mtrx.eye();
80
81
82 m_ground_frame_id = "/ground_frame";
83 m_camera_frame_id = "/depth_camera_frame";
84
85
86 bool bpcr = config.check("POINTCLOUD_QUALITY");
87 if (bpcr != false)
88 {
89 yarp::os::Searchable& pointcloud_quality_config = config.findGroup("POINTCLOUD_QUALITY");
90 if (pointcloud_quality_config.check("x_step")) { m_pc_stepx = pointcloud_quality_config.find("x_step").asFloat64(); }
91 if (pointcloud_quality_config.check("y_step")) { m_pc_stepy = pointcloud_quality_config.find("y_step").asFloat64(); }
92 yCInfo(LASER_FROM_POINTCLOUD) << "Pointcloud decimation step set to:" << m_pc_stepx << m_pc_stepy;
93 }
94
95 bool bpc = config.check("Z_CLIPPING_PLANES");
96 if (bpc != false)
97 {
98 yarp::os::Searchable& pointcloud_clip_config = config.findGroup("Z_CLIPPING_PLANES");
99 if (pointcloud_clip_config.check("floor_height")) {m_floor_height = pointcloud_clip_config.find("floor_height").asFloat64();}
100 if (pointcloud_clip_config.check("ceiling_height")) {m_ceiling_height=pointcloud_clip_config.find("ceiling_height").asFloat64();}
101 if (pointcloud_clip_config.check("max_distance")) { m_pointcloud_max_distance = pointcloud_clip_config.find("max_distance").asFloat64(); }
102 if (pointcloud_clip_config.check("ground_frame_id")) {m_ground_frame_id = pointcloud_clip_config.find("ground_frame_id").asString();}
103 if (pointcloud_clip_config.check("camera_frame_id")) {m_camera_frame_id = pointcloud_clip_config.find("camera_frame_id").asString();}
104 }
105 yCInfo(LASER_FROM_POINTCLOUD) <<"Z clipping planes (floor,ceiling) have been set to ("<< m_floor_height <<","<< m_ceiling_height <<")";
106
107 if (this->parseConfiguration(config) == false)
108 {
109 yCError(LASER_FROM_POINTCLOUD) << "Error parsing parameters";
110 return false;
111 }
112
113 //open the tc client
115 if(!config.check("TRANSFORM_CLIENT"))
116 {
117 yCError(LASER_FROM_POINTCLOUD) << "missing TRANSFORM_CLIENT section in configuration file!";
118 return false;
119 }
120 tcprop.fromString(config.findGroup("TRANSFORM_CLIENT").toString());
121 tcprop.put("device", "transformClient");
122
123 m_tc_driver.open(tcprop);
124 if (!m_tc_driver.isValid())
125 {
126 yCError(LASER_FROM_POINTCLOUD) << "Error opening PolyDriver check parameters";
127 return false;
128 }
129 m_tc_driver.view(m_iTc);
130 if (!m_iTc)
131 {
132 yCError(LASER_FROM_POINTCLOUD) << "Error opening iFrameTransform interface. Device not available";
133 return false;
134 }
136
137
138 //open the rgbd client
139 Property prop;
140 if(!config.check("RGBD_SENSOR_CLIENT"))
141 {
142 yCError(LASER_FROM_POINTCLOUD) << "missing RGBD_SENSOR_CLIENT section in configuration file!";
143 return false;
144 }
145 prop.fromString(config.findGroup("RGBD_SENSOR_CLIENT").toString());
146 prop.put("device", "RGBDSensorClient");
147 //prop.put("ImageCarrier","mjpeg"); //this is set in the ini file
148 //prop.put("DepthCarrier","udp"); //this is set in the ini file
149 m_rgbd_driver.open(prop);
150 if (!m_rgbd_driver.isValid())
151 {
152 yCError(LASER_FROM_POINTCLOUD) << "Error opening PolyDriver check parameters";
153 return false;
154 }
155 m_rgbd_driver.view(m_iRGBD);
156 if (!m_iRGBD)
157 {
158 yCError(LASER_FROM_POINTCLOUD) << "Error opening iRGBD interface. Device not available";
159 return false;
160 }
162
163 //get parameters data from the camera
164 m_depth_width = m_iRGBD->getRgbWidth(); //@@@ this is horrible! See yarp issue: https://github.com/robotology/yarp/issues/2290
165 m_depth_height = m_iRGBD->getRgbHeight(); //@@@ this is horrible! See yarp issue: https://github.com/robotology/yarp/issues/2290
166 bool propintr = m_iRGBD->getDepthIntrinsicParam(m_propIntrinsics);
168 yCInfo(LASER_FROM_POINTCLOUD) << "Depth Intrinsics:" << m_propIntrinsics.toString();
169 m_intrinsics.fromProperty(m_propIntrinsics);
170
172 yCInfo(LASER_FROM_POINTCLOUD) << "Sensor ready";
173
174 return true;
175}
176
178{
180
181 if (m_rgbd_driver.isValid()) {
183 }
184
185 if (m_tc_driver.isValid()) {
187 }
188
189 yCInfo(LASER_FROM_POINTCLOUD) << "closed";
190 return true;
191}
192
193bool LaserFromPointCloud::setDistanceRange(double min, double max)
194{
195 std::lock_guard<std::mutex> guard(m_mutex);
196 m_min_distance = min;
197 m_max_distance = max;
198 return true;
199}
200
201
202bool LaserFromPointCloud::setScanLimits(double min, double max)
203{
204 std::lock_guard<std::mutex> guard(m_mutex);
205 yCWarning(LASER_FROM_POINTCLOUD,"setScanLimits not yet implemented");
206 return true;
207}
208
210{
211 std::lock_guard<std::mutex> guard(m_mutex);
212 yCWarning(LASER_FROM_POINTCLOUD,"setHorizontalResolution not yet implemented");
213 return true;
214}
215
217{
218 std::lock_guard<std::mutex> guard(m_mutex);
219 yCWarning(LASER_FROM_POINTCLOUD,"setScanRate not yet implemented");
220 return false;
221}
222
223
225{
226#ifdef LASER_DEBUG
227 yCDebug(LASER_FROM_POINTCLOUD) << "thread initialising...\n";
228 yCDebug(LASER_FROM_POINTCLOUD) << "... done!\n");
229#endif
230
231 return true;
232}
233
235{
236 for (size_t i=0; i<pc.size(); i++)
237 {
238 auto v1 = pc(i).toVector4();
239 auto v2 = m*v1;
240 pc(i).x=v2(0);
241 pc(i).y=v2(1);
242 pc(i).z=v2(2);
243 }
244}
245
247{
248#ifdef DEBUG_TIMING
249 static double t3 = yarp::os::Time::now();
250 double t1 = yarp::os::Time::now();
251 yCDebug(LASER_FROM_POINTCLOUD) << "thread period:" << t1 - t3;
253#endif
254
256 if (depth_ok == false)
257 {
258 yCError(LASER_FROM_POINTCLOUD) << "getDepthImage failed";
259 return false;
260 }
261
262 if (m_depth_image.getRawImage() == nullptr)
263 {
264 yCError(LASER_FROM_POINTCLOUD) << "invalid image received";
265 return false;
266 }
267
270 {
271 yCError(LASER_FROM_POINTCLOUD) << "invalid image size: (" << m_depth_image.width() << " " << m_depth_image.height() << ") vs (" << m_depth_width << " " << m_depth_height << ")";
272 return false;
273 }
274
275 const double myinf = std::numeric_limits<double>::infinity();
276 const double mynan = std::nan("");
277
278 //compute the point cloud
280
281
282#ifdef TEST_M
283 //yCDebug(LASER_FROM_POINTCLOUD) << "pc size:" << pc.size();
284#endif
285
286 //we compute the transformation matrix from the camera to the laser reference frame
287
288#ifdef TEST_M
290 vvv(0) = -1.57;
291 vvv(1) = 0;
292 vvv(2) = -1.57;
294 m(2, 3) = 1.2; //z translation
295#else
297 if (frame_exists == false)
298 {
299 yCWarning(LASER_FROM_POINTCLOUD) << "Unable to found m matrix";
300 }
301#endif
302
303 //we rototranslate the full pointcloud
305
306 yarp::sig::Vector left(4);
307 left[0] = (0 - m_intrinsics.principalPointX) / m_intrinsics.focalLengthX * 1000;
308 left[1] = (0 - m_intrinsics.principalPointY) / m_intrinsics.focalLengthY * 1000;
309 left[2] = 1000;
310 left[3] = 1;
311
312 yarp::sig::Vector right(4);
314 right[1] = (0 - m_intrinsics.principalPointY) / m_intrinsics.focalLengthY * 1000;
315 right[2] = 1000;
316 right[3] = 1;
317
318 double left_dist;
319 double left_theta;
320 double right_dist;
321 double right_theta;
322 left = m_transform_mtrx * left;
323 right = m_transform_mtrx * right;
324
326 data_left.set_cartesian(left[0], left[1]);
327 data_left.get_polar(left_dist, left_theta);
328
330 data_right.set_cartesian(right[0], right[1]);
332
333 bool left_elem_neg = 0;
334 bool right_elem_neg = 0;
335
336 left_theta = left_theta * 180 / M_PI;
337 right_theta = right_theta * 180 / M_PI;
338
339 if (left_theta < 0)
340 {
341 left_theta += 360;
342 left_elem_neg = 1;
343 } else if (left_theta > 360) {
344 left_theta -= 360;
345 }
347
348 if (right_theta < 0)
349 {
350 right_theta += 360;
351 right_elem_neg = 1;
352 } else if (right_theta > 360) {
353 right_theta -= 360;
354 }
356
357 //enter critical section and protect m_laser_data
358 std::lock_guard<std::mutex> guard(m_mutex);
359#ifdef DEBUG_TIMING
360 double t4 = yarp::os::Time::now();
361#endif
362 //prepare an empty laserscan vector with the resolution we want
363 for (auto it = m_laser_data.begin(); it != m_laser_data.end(); it++)
364 {
365 *it = mynan;
366 }
367
368 if ((!left_elem_neg) && (right_elem_neg))
369 {
370 for (size_t i = 0; i < left_elem; i++)
371 {
373 }
374 for (size_t i = right_elem; i < m_sensorsNum; i++)
375 {
377 }
378 }
379 else
380 {
381 for (size_t i = right_elem; i < left_elem; i++)
382 {
384 }
385 }
386
387
388 for (size_t i = 0; i < pc.size(); i++)
389 {
390
391#ifdef TEST_M
392 //yCDebug(LASER_FROM_POINTCLOUD) << pc(i).toString(5,5);
393#endif
394
395 //we obtain a point from the point cloud
396 yarp::sig::Vector vec = pc(i).toVector4();
397
398 //we check if the point is in the volume that we want to consider as possibile obstacle
399 if (vec[2] > m_floor_height&& vec[2] < m_ceiling_height&&
401 {
402#ifdef TEST_M
403 // yCDebug(LASER_FROM_POINTCLOUD) << "This point is ok:" << i <<"its z is:" << tvec[2];
404#endif
405 //by removing z, we project the 3d point on the 2D plane on which the laser works.
406 //we use LaserMeasurementData struct to easily obtain a polar representation from a cartesian representation
408 data.set_cartesian(vec[0], vec[1]);
409
410 //get the polar representation
411 double distance;
412 double theta;
413 data.get_polar(distance, theta);
414
415 //compute the right element of the vector where to put distance data. This is done by clusterizing angles, depending on the laser resolution.
416 theta = theta * 180 / M_PI;
417 if (theta < 0) {
418 theta += 360;
419 } else if (theta > 360) {
420 theta -= 360;
421 }
422 size_t elem = theta / m_resolution;
423 if (elem >= m_laser_data.size())
424 {
425 yCError(LASER_FROM_POINTCLOUD) << "Error in computing elem" << i << ">" << m_laser_data.size();
426 continue;
427 }
428
429#ifdef TEST_M
430 // yCDebug(LASER_FROM_POINTCLOUD) <<theta << elem <<distance;
431#endif
432 //update the vector of measurements, putting the NEAREST obstacle in right element of the vector.
434 {
436 }
437 }
438 else
439 {
440#ifdef TEST_M
441 //yCDebug(LASER_FROM_POINTCLOUD) << "this point is out of considered volume:" <<i << " z:" << vec[2];
442#endif
443 }
444 }
445
446
447#ifdef DEBUG_TIMING
448 double t2 = yarp::os::Time::now();
449 yCDebug(LASER_FROM_POINTCLOUD) << "tot run:" << t2 - t1 << "crit run:" << t2 - t4;
450#endif
451
452 return true;
453}
454
456{
457 m_mutex.lock();
459 m_mutex.unlock();
460}
461
463{
464#ifdef LASER_DEBUG
465 yCDebug(LASER_FROM_POINTCLOUD) << "Thread releasing...";
466 yCDebug(LASER_FROM_POINTCLOUD) << "... done.";
467#endif
468
469 return;
470}
#define M_PI
laserFromPointCloud: Documentation to be added
void run() override
Loop function.
bool setScanRate(double rate) override
set the scan rate (scans per seconds)
yarp::sig::IntrinsicParams m_intrinsics
bool setDistanceRange(double min, double max) override
set the device detection range.
yarp::sig::utils::PCL_ROI m_pc_roi
yarp::sig::Matrix m_transform_mtrx
IFrameTransform * m_iTc
bool close() override
Close the DeviceDriver.
yarp::sig::ImageOf< float > m_depth_image
bool setHorizontalResolution(double step) override
get the angular step between two measurements (if available)
void threadRelease() override
Release method.
bool acquireDataFromHW() override final
This method should be implemented by the user, and contain the logic to grab data from the hardware.
bool setScanLimits(double min, double max) override
set the scan angular range.
bool threadInit() override
Initialization method.
virtual bool getTransform(const std::string &target_frame_id, const std::string &source_frame_id, yarp::sig::Matrix &transform)=0
Get the transform between two frames.
virtual bool getDepthImage(yarp::sig::ImageOf< yarp::sig::PixelFloat > &depthImage, yarp::os::Stamp *timeStamp=nullptr)=0
Get the depth frame from the device.
virtual bool updateLidarData()
This utility method calls in sequence: grabDataFromHW(), updateTimestamp and applyLimitsOnLaserData()...
bool close() override
Close the DeviceDriver.
bool isValid() const
Check if device is valid.
A mini-server for performing network communication in the background.
bool open(const std::string &name) override
Start port operation, with a specific name, with automatically-chosen network parameters.
bool start()
Call this to start the thread.
void stop()
Call this to stop the thread, this call blocks until the thread is terminated (and releaseThread() ca...
A class for storing options and configuration information.
Definition Property.h:33
void fromString(const std::string &txt, bool wipe=true)
Interprets a string as a list of properties.
void put(const std::string &key, const std::string &value)
Associate the given key with the given string.
Definition Property.cpp:987
A base class for nested structures that can be searched.
Definition Searchable.h:31
size_t width() const
Gets width of image in pixels.
Definition Image.h:171
unsigned char * getRawImage() const
Access to the internal image buffer.
Definition Image.cpp:479
size_t height() const
Gets height of image in pixels.
Definition Image.h:177
void set_cartesian(const double x, const double y)
void get_polar(double &rho, double &theta)
A class for a Matrix.
Definition Matrix.h:39
The PointCloud class.
Definition PointCloud.h:22
size_t size() const
Definition Vector.h:322
iterator begin() noexcept
Returns an iterator to the beginning of the VectorOf.
Definition Vector.h:454
iterator end() noexcept
Returns an iterator to the end of the VectorOf.
Definition Vector.h:461
const yarp::os::LogComponent & LASER_FROM_POINTCLOUD()
void rotate_pc(yarp::sig::PointCloud< yarp::sig::DataXYZ > &pc, const yarp::sig::Matrix &m)
#define yCInfo(component,...)
#define yCError(component,...)
#define yCWarning(component,...)
#define yCDebug(component,...)
#define YARP_LOG_COMPONENT(name,...)
yarp::sig::Matrix rpy2dcm(const yarp::sig::Vector &rpy)
Converts roll-pitch-yaw angles in the corresponding dcm (direction cosine matrix) rotation matrix (de...
Definition math.cpp:847
double now()
Return the current time in seconds, relative to an arbitrary starting point.
Definition Time.cpp:121
void delay(double seconds)
Wait for a certain number of seconds.
Definition Time.cpp:111
yarp::sig::PointCloud< yarp::sig::DataXYZ > depthToPC(const yarp::sig::ImageOf< yarp::sig::PixelFloat > &depth, const yarp::sig::IntrinsicParams &intrinsic)
depthToPC, compute the PointCloud given depth image and the intrinsic parameters of the camera.
The main, catch-all namespace for YARP.
Definition dirs.h:16
double focalLengthY
Result of the product of the physical focal length(mm) and the size sy of the individual imager eleme...
double focalLengthX
Result of the product of the physical focal length(mm) and the size sx of the individual imager eleme...
double principalPointX
Horizontal coordinate of the principal point of the image, as a pixel offset from the left edge.
double principalPointY
Vertical coordinate of the principal point of the image, as a pixel offset from the top edge.
#define YARP_UNUSED(var)
Definition api.h:162