YARP
Yet Another Robot Platform
realsense2Driver.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2006-2020 Istituto Italiano di Tecnologia (IIT)
3  * All rights reserved.
4  *
5  * This software may be modified and distributed under the terms of the
6  * BSD-3-Clause license. See the accompanying LICENSE file for details.
7  */
8 
9 #include <cmath>
10 #include <algorithm>
11 #include <iomanip>
12 #include <cstdint>
13 
14 #include <yarp/os/LogComponent.h>
15 #include <yarp/os/Value.h>
16 #include <yarp/sig/ImageUtils.h>
17 
18 #include <librealsense2/rsutil.h>
19 #include "realsense2Driver.h"
20 
21 using namespace yarp::dev;
22 using namespace yarp::sig;
23 using namespace yarp::os;
24 
25 using namespace std;
26 
27 namespace {
28 YARP_LOG_COMPONENT(REALSENSE2, "yarp.device.realsense2")
29 }
30 
31 
32 constexpr char accuracy [] = "accuracy";
33 constexpr char clipPlanes [] = "clipPlanes";
34 constexpr char depthRes [] = "depthResolution";
35 constexpr char rgbRes [] = "rgbResolution";
36 constexpr char framerate [] = "framerate";
37 constexpr char enableEmitter [] = "enableEmitter";
38 constexpr char needAlignment [] = "needAlignment";
39 constexpr char alignmentFrame [] = "alignmentFrame";
40 
41 
42 static std::map<std::string, RGBDSensorParamParser::RGBDParam> params_map =
43 {
52 };
53 
54 static const std::map<std::string, rs2_stream> stringRSStreamMap {
55  {"None", RS2_STREAM_ANY},
56  {"RGB", RS2_STREAM_COLOR},
57  {"Depth", RS2_STREAM_DEPTH}
58 };
59 
60 
61 static std::string get_device_information(const rs2::device& dev)
62 {
63 
64  std::stringstream ss;
65  ss << "Device information: " << std::endl;
66 
67  for (int i = 0; i < static_cast<int>(RS2_CAMERA_INFO_COUNT); i++)
68  {
69  auto info_type = static_cast<rs2_camera_info>(i);
70  ss << " " << std::left << std::setw(20) << info_type << " : ";
71 
72  if (dev.supports(info_type))
73  ss << dev.get_info(info_type) << std::endl;
74  else
75  ss << "N/A" << std::endl;
76  }
77  return ss.str();
78 }
79 
80 
81 static void print_supported_options(const rs2::sensor& sensor)
82 {
83  // Sensors usually have several options to control their properties
84  // such as Exposure, Brightness etc.
85 
86  if (sensor.is<rs2::depth_sensor>())
87  yCInfo(REALSENSE2) << "Depth sensor supports the following options:\n";
88  else if (sensor.get_stream_profiles()[0].stream_type() == RS2_STREAM_COLOR)
89  yCInfo(REALSENSE2) << "RGB camera supports the following options:\n";
90  else
91  yCInfo(REALSENSE2) << "Sensor supports the following options:\n";
92 
93  // The following loop shows how to iterate over all available options
94  // Starting from 0 until RS2_OPTION_COUNT (exclusive)
95  for (int i = 0; i < static_cast<int>(RS2_OPTION_COUNT); i++)
96  {
97  auto option_type = static_cast<rs2_option>(i);
98  //SDK enum types can be streamed to get a string that represents them
99 
100  // To control an option, use the following api:
101 
102  // First, verify that the sensor actually supports this option
103  if (sensor.supports(option_type))
104  {
105  std::cout << " " << option_type;
106  std::cout << std::endl;
107 
108  // Get a human readable description of the option
109  const char* description = sensor.get_option_description(option_type);
110  std::cout << " Description : " << description << std::endl;
111 
112  // Get the current value of the option
113  float current_value = sensor.get_option(option_type);
114  std::cout << " Current Value : " << current_value << std::endl;
115  }
116  }
117 
118  std::cout<<std::endl;
119 }
120 
121 static bool isSupportedFormat(const rs2::sensor &sensor, const int width, const int height, const int fps, bool verbose = false)
122 {
123  bool ret = false;
124  std::vector<rs2::stream_profile> stream_profiles = sensor.get_stream_profiles();
125 
126  std::map<std::pair<rs2_stream, int>, int> unique_streams;
127  for (auto&& sp : stream_profiles)
128  {
129  unique_streams[std::make_pair(sp.stream_type(), sp.stream_index())]++;
130  }
131 
132  if (verbose)
133  {
134  std::cout << "Sensor consists of " << unique_streams.size() << " streams: " << std::endl;
135  for (size_t i = 0; i < unique_streams.size(); i++)
136  {
137  auto it = unique_streams.begin();
138  std::advance(it, i);
139  std::cout << " - " << it->first.first << " #" << it->first.second << std::endl;
140  }
141  std::cout << "Sensor provides the following stream profiles:" << std::endl;
142  }
143 
144  //Next, we go over all the stream profiles and print the details of each one
145 
146  int profile_num = 0;
147  for (const rs2::stream_profile& stream_profile : stream_profiles)
148  {
149  if (verbose)
150  {
151  rs2_stream stream_data_type = stream_profile.stream_type();
152 
153  int stream_index = stream_profile.stream_index();
154 
155  std::cout << std::setw(3) << profile_num << ": " << stream_data_type << " #" << stream_index;
156  }
157 
158  // As noted, a stream is an abstraction.
159  // In order to get additional data for the specific type of a
160  // stream, a mechanism of "Is" and "As" is provided:
161  if (stream_profile.is<rs2::video_stream_profile>()) //"Is" will test if the type tested is of the type given
162  {
163  // "As" will try to convert the instance to the given type
164  rs2::video_stream_profile video_stream_profile = stream_profile.as<rs2::video_stream_profile>();
165 
166  // After using the "as" method we can use the new data type
167  // for additinal operations:
168  if (verbose)
169  {
170  std::cout << " (Video Stream: " << video_stream_profile.format() << " " <<
171  video_stream_profile.width() << "x" << video_stream_profile.height() << "@ " << video_stream_profile.fps() << "Hz)";
172  std::cout << std::endl;
173  }
174 
175  if(video_stream_profile.width() == width && video_stream_profile.height() == height && video_stream_profile.fps() == fps)
176  ret=true;
177  }
178  profile_num++;
179  }
180 
181 
182  return ret;
183 }
184 
185 static bool optionPerc2Value(rs2_option option,const rs2::sensor* sensor, const float& perc, float& value)
186 {
187  if (!sensor)
188  {
189  return false;
190  }
191  try
192  {
193  rs2::option_range optionRange = sensor->get_option_range(option);
194  value =(float) (perc * (optionRange.max - optionRange.min) + optionRange.min);
195 
196  }
197  catch (const rs2::error& e)
198  {
199  // Some options can only be set while the camera is streaming,
200  // and generally the hardware might fail so it is good practice to catch exceptions from set_option
201  yCError(REALSENSE2) << "Failed to get option " << option << " range. (" << e.what() << ")";
202  return false;
203  }
204 
205  return true;
206 }
207 
208 static bool optionValue2Perc(rs2_option option,const rs2::sensor* sensor, float& perc, const float& value)
209 {
210  if (!sensor)
211  {
212  return false;
213  }
214  try
215  {
216  rs2::option_range optionRange = sensor->get_option_range(option);
217  perc =(float) ((value - optionRange.min) / (optionRange.max - optionRange.min));
218 
219  }
220  catch (const rs2::error& e)
221  {
222  // Some options can only be set while the camera is streaming,
223  // and generally the hardware might fail so it is good practice to catch exceptions from set_option
224  yCError(REALSENSE2) << "Failed to get option " << option << " range. (" << e.what() << ")";
225  return false;
226  }
227 
228  return true;
229 }
230 
231 
232 
233 static bool setOption(rs2_option option,const rs2::sensor* sensor, float value)
234 {
235 
236  if (!sensor)
237  {
238  return false;
239  }
240 
241  // First, verify that the sensor actually supports this option
242  if (!sensor->supports(option))
243  {
244  yCError(REALSENSE2) << "The option" << rs2_option_to_string(option) << "is not supported by this sensor";
245  return false;
246  }
247 
248  // To set an option to a different value, we can call set_option with a new value
249  try
250  {
251  sensor->set_option(option, value);
252  }
253  catch (const rs2::error& e)
254  {
255  // Some options can only be set while the camera is streaming,
256  // and generally the hardware might fail so it is good practice to catch exceptions from set_option
257  yCError(REALSENSE2) << "Failed to set option " << rs2_option_to_string(option) << ". (" << e.what() << ")";
258  return false;
259  }
260  return true;
261 }
262 
263 static bool getOption(rs2_option option,const rs2::sensor *sensor, float &value)
264 {
265  if (!sensor)
266  {
267  return false;
268  }
269 
270  // First, verify that the sensor actually supports this option
271  if (!sensor->supports(option))
272  {
273  yCError(REALSENSE2) << "The option" << rs2_option_to_string(option) << "is not supported by this sensor";
274  return false;
275  }
276 
277  // To set an option to a different value, we can call set_option with a new value
278  try
279  {
280  value = sensor->get_option(option);
281  }
282  catch (const rs2::error& e)
283  {
284  // Some options can only be set while the camera is streaming,
285  // and generally the hardware might fail so it is good practice to catch exceptions from set_option
286  yCError(REALSENSE2) << "Failed to get option " << rs2_option_to_string(option) << ". (" << e.what() << ")";
287  return false;
288  }
289  return true;
290 }
291 
292 static int pixFormatToCode(const rs2_format p)
293 {
294  switch(p)
295  {
296  case (RS2_FORMAT_RGB8):
297  return VOCAB_PIXEL_RGB;
298 
299  case (RS2_FORMAT_BGR8):
300  return VOCAB_PIXEL_BGR;
301 
302  case (RS2_FORMAT_Z16):
303  return VOCAB_PIXEL_MONO16;
304 
305  case (RS2_FORMAT_DISPARITY16):
306  return VOCAB_PIXEL_MONO16;
307 
308  case (RS2_FORMAT_RGBA8):
309  return VOCAB_PIXEL_RGBA;
310 
311  case (RS2_FORMAT_BGRA8):
312  return VOCAB_PIXEL_BGRA;
313 
314  case (RS2_FORMAT_Y8):
315  return VOCAB_PIXEL_MONO;
316 
317  case (RS2_FORMAT_Y16):
318  return VOCAB_PIXEL_MONO16;;
319 
320  case (RS2_FORMAT_RAW16):
321  return VOCAB_PIXEL_MONO16;
322 
323  case (RS2_FORMAT_RAW8):
324  return VOCAB_PIXEL_MONO;
325  default:
326  return VOCAB_PIXEL_INVALID;
327 
328  }
329 }
330 
331 static size_t bytesPerPixel(const rs2_format format)
332 {
333  size_t bytes_per_pixel = 0;
334  switch (format)
335  {
336  case RS2_FORMAT_RAW8:
337  case RS2_FORMAT_Y8:
338  bytes_per_pixel = 1;
339  break;
340  case RS2_FORMAT_Z16:
341  case RS2_FORMAT_DISPARITY16:
342  case RS2_FORMAT_Y16:
343  case RS2_FORMAT_RAW16:
344  bytes_per_pixel = 2;
345  break;
346  case RS2_FORMAT_RGB8:
347  case RS2_FORMAT_BGR8:
348  bytes_per_pixel = 3;
349  break;
350  case RS2_FORMAT_RGBA8:
351  case RS2_FORMAT_BGRA8:
352  bytes_per_pixel = 4;
353  break;
354  default:
355  break;
356  }
357  return bytes_per_pixel;
358 }
359 
360 static YarpDistortion rsDistToYarpDist(const rs2_distortion dist)
361 {
362  switch (dist) {
363  case RS2_DISTORTION_BROWN_CONRADY:
364  return YarpDistortion::YARP_PLUM_BOB;
365  default:
366  return YarpDistortion::YARP_UNSUPPORTED;
367  }
368 
369 }
370 
371 static void settingErrorMsg(const string& error, bool& ret)
372 {
373  yCError(REALSENSE2) << error.c_str();
374  ret = false;
375 }
376 
377 static bool setIntrinsic(Property& intrinsic, const rs2_intrinsics &values)
378 {
380  params.focalLengthX = values.fx;
381  params.focalLengthY = values.fy;
382  params.principalPointX = values.ppx;
383  params.principalPointY = values.ppy;
384  // distortion model
385  params.distortionModel.type = rsDistToYarpDist(values.model);
386  params.distortionModel.k1 = values.coeffs[0];
387  params.distortionModel.k2 = values.coeffs[1];
388  params.distortionModel.t1 = values.coeffs[2];
389  params.distortionModel.t2 = values.coeffs[3];
390  params.distortionModel.k3 = values.coeffs[4];
391  params.toProperty(intrinsic);
392  return true;
393 }
394 
395 static bool setExtrinsicParam(Matrix &extrinsic, const rs2_extrinsics &values)
396 {
397 
398  if (extrinsic.cols() != 4 || extrinsic.rows() != 4)
399  {
400  yCError(REALSENSE2) << "Extrinsic matrix is not 4x4";
401  return false;
402  }
403 
404  extrinsic.eye();
405 
406  for (size_t j=0; j<extrinsic.rows() - 1; j++)
407  {
408  for (size_t i=0; i<extrinsic.cols() - 1; i++)
409  {
410  extrinsic[j][i] = values.rotation[i + j*extrinsic.cols()];
411  }
412  }
413 
414  extrinsic[0][3] = values.translation[0];
415  extrinsic[1][3] = values.translation[1];
416  extrinsic[2][3] = values.translation[2];
417 
418  return false;
419 }
420 
421 realsense2Driver::realsense2Driver() : m_depth_sensor(nullptr), m_color_sensor(nullptr),
422  m_paramParser(), m_verbose(false),
423  m_initialized(false), m_stereoMode(false),
424  m_needAlignment(true), m_fps(0),
425  m_scale(0.0)
426 {
427  // realsense SDK already provides them
431 
432 
440 }
441 
443 {
444  try
445  {
446  m_profile = m_pipeline.start(m_cfg);
447  }
448  catch (const rs2::error& e)
449  {
450  yCError(REALSENSE2) << "Failed to start the pipeline:"<< "(" << e.what() << ")";
451  m_lastError = e.what();
452  return false;
453  }
454  return true;
455 }
456 
458 {
459  try
460  {
461  m_pipeline.stop();
462  }
463  catch (const rs2::error& e)
464  {
465  yCError(REALSENSE2) << "Failed to stop the pipeline:"<< "(" << e.what() << ")";
466  m_lastError = e.what();
467  return false;
468  }
469  return true;
470 }
471 
473 {
474  std::lock_guard<std::mutex> guard(m_mutex);
475  if (!pipelineShutdown())
476  return false;
477 
478  return pipelineStartup();
479 
480 }
481 
482 bool realsense2Driver::setFramerate(const int _fps)
483 {
486 
487  m_cfg.enable_stream(RS2_STREAM_COLOR, m_color_intrin.width, m_color_intrin.height, RS2_FORMAT_RGB8, _fps);
488  m_cfg.enable_stream(RS2_STREAM_DEPTH, m_depth_intrin.width, m_depth_intrin.height, RS2_FORMAT_Z16, _fps);
489  }
490  else
491  {
492  if (m_initialized)
493  {
494  fallback();
495  }
496 
497  }
498 
499  if (!pipelineRestart())
500  return false;
501 
502  m_fps = _fps;
503 
505 
506  return true;
507 
508 }
509 
511 {
512  m_cfg.enable_stream(RS2_STREAM_COLOR, m_color_intrin.width, m_color_intrin.height, RS2_FORMAT_RGB8, m_fps);
513  m_cfg.enable_stream(RS2_STREAM_DEPTH, m_depth_intrin.width, m_depth_intrin.height, RS2_FORMAT_Z16, m_fps);
514  yCWarning(REALSENSE2)<<"Format not supported, use --verbose for more details. Setting the fallback format";
515  std::cout<<"COLOR: "<<m_color_intrin.width<<"x"<<m_color_intrin.height<<" fps: "<<m_fps<<std::endl;
516  std::cout<<"DEPTH: "<<m_depth_intrin.width<<"x"<<m_depth_intrin.height<<" fps: "<<m_fps<<std::endl;
517 }
518 
520 {
521  if (!params_map[rgbRes].isSetting || !params_map[depthRes].isSetting)
522  {
523  yCError(REALSENSE2)<<"Missing depthResolution or rgbResolution from [SETTINGS]";
524  return false;
525  }
526  double colorW = params_map[rgbRes].val[0].asFloat64();
527  double colorH = params_map[rgbRes].val[1].asFloat64();
528  double depthW = params_map[depthRes].val[0].asFloat64();
529  double depthH = params_map[depthRes].val[1].asFloat64();
530 
531  m_cfg.enable_stream(RS2_STREAM_COLOR, colorW, colorH, RS2_FORMAT_RGB8, m_fps);
532  m_cfg.enable_stream(RS2_STREAM_DEPTH, depthW, depthH, RS2_FORMAT_Z16, m_fps);
533  if (m_stereoMode) {
534  m_cfg.enable_stream(RS2_STREAM_INFRARED, 1, colorW, colorH, RS2_FORMAT_Y8, m_fps);
535  m_cfg.enable_stream(RS2_STREAM_INFRARED, 2, colorW, colorH, RS2_FORMAT_Y8, m_fps);
536  }
537  if (!pipelineStartup())
538  return false;
539  m_initialized = true;
540 
541  // Camera warmup - Dropped frames to allow stabilization
542  yCInfo(REALSENSE2) << "Sensor warm-up...";
543  for (int i = 0; i < 30; i++)
544  {
545  m_pipeline.wait_for_frames();
546  }
547  yCInfo(REALSENSE2) << "Device ready!";
548 
549  if (m_ctx.query_devices().size() == 0)
550  {
551  yCError(REALSENSE2) << "No device connected, please connect a RealSense device";
552 
553  rs2::device_hub device_hub(m_ctx);
554 
555  //Using the device_hub we can block the program until a device connects
556  m_device = device_hub.wait_for_device();
557  }
558  else
559  {
560  //TODO: if more are connected?!
561  // Update the selected device
562  m_device = m_profile.get_device();
563  if (m_verbose)
564  yCInfo(REALSENSE2) << get_device_information(m_device).c_str();
565  }
566 
567 
568  // Given a device, we can query its sensors using:
569  m_sensors = m_device.query_sensors();
570 
571  yCInfo(REALSENSE2) << "Device consists of" << m_sensors.size() << "sensors. More infos using --verbose option";
572  if (m_verbose)
573  {
574  for (const auto & m_sensor : m_sensors)
575  {
576  print_supported_options(m_sensor);
577  }
578  }
579 
580  for (auto & m_sensor : m_sensors)
581  {
582  if (m_sensor.is<rs2::depth_sensor>())
583  {
584  m_depth_sensor = &m_sensor;
585  if (!getOption(RS2_OPTION_DEPTH_UNITS, m_depth_sensor, m_scale))
586  {
587  yCError(REALSENSE2) << "Failed to retrieve scale";
588  return false;
589  }
590  }
591  else if (m_sensor.get_stream_profiles()[0].stream_type() == RS2_STREAM_COLOR)
592  m_color_sensor = &m_sensor;
593  }
594 
595  // Get stream intrinsics & extrinsics
597  return true;
598 }
599 
601 {
602  rs2::pipeline_profile pipeline_profile = m_pipeline.get_active_profile();
603  rs2::video_stream_profile depth_stream_profile = rs2::video_stream_profile(pipeline_profile.get_stream(RS2_STREAM_DEPTH));
604  rs2::video_stream_profile color_stream_profile = rs2::video_stream_profile(pipeline_profile.get_stream(RS2_STREAM_COLOR));
605 
606  m_depth_intrin = depth_stream_profile.get_intrinsics();
607  m_color_intrin = color_stream_profile.get_intrinsics();
608  m_depth_to_color = depth_stream_profile.get_extrinsics_to(color_stream_profile);
609  m_color_to_depth = color_stream_profile.get_extrinsics_to(depth_stream_profile);
610 
611  if (m_stereoMode) {
612  rs2::video_stream_profile infrared_stream_profile = rs2::video_stream_profile(pipeline_profile.get_stream(RS2_STREAM_INFRARED));
613  m_infrared_intrin = infrared_stream_profile.get_intrinsics();
614  }
615 
616 }
617 
618 
620 {
621  bool ret = true;
622  //ACCURACY
623  if (params_map[accuracy].isSetting && ret)
624  {
625  if (!params_map[accuracy].val[0].isFloat64() )
626  settingErrorMsg("Param " + params_map[accuracy].name + " is not a double as it should be.", ret);
627 
628  if (! setDepthAccuracy(params_map[accuracy].val[0].asFloat64() ) )
629  settingErrorMsg("Setting param " + params_map[accuracy].name + " failed... quitting.", ret);
630  }
631 
632  //CLIP_PLANES
633  if (params_map[clipPlanes].isSetting && ret)
634  {
635  if (!params_map[clipPlanes].val[0].isFloat64() )
636  settingErrorMsg("Param " + params_map[clipPlanes].name + " is not a double as it should be.", ret);
637 
638  if (!params_map[clipPlanes].val[1].isFloat64() )
639  settingErrorMsg("Param " + params_map[clipPlanes].name + " is not a double as it should be.", ret);
640 
641  if (! setDepthClipPlanes(params_map[clipPlanes].val[0].asFloat64(), params_map[clipPlanes].val[1].asFloat64() ) )
642  settingErrorMsg("Setting param " + params_map[clipPlanes].name + " failed... quitting.", ret);
643  }
644 
645  //FRAMERATE
646  if (params_map[framerate].isSetting && ret)
647  {
648  if (!params_map[framerate].val[0].isInt32() )
649  settingErrorMsg("Param " + params_map[framerate].name + " is not a int as it should be.", ret);
650  else
651  m_fps = params_map[framerate].val[0].asInt32();
652  }
653  else
654  {
655  yCWarning(REALSENSE2) << "Framerate not specified... setting 30 fps by default";
656  m_fps = 30;
657  }
658 
659  //EMITTER
660  if (params_map[enableEmitter].isSetting && ret)
661  {
662  Value& v = params_map[enableEmitter].val[0];
663 
664  if (!v.isBool())
665  {
666  settingErrorMsg("Param " + params_map[enableEmitter].name + " is not a bool as it should be.", ret);
667  return false;
668  }
669  if(!setOption(RS2_OPTION_EMITTER_ENABLED, m_depth_sensor, (float) v.asBool()))
670  {
671  settingErrorMsg("Setting param " + params_map[enableEmitter].name + " failed... quitting.", ret);
672  }
673  }
674 
675  //ALIGNMENT
676  if (params_map[needAlignment].isSetting && ret)
677  {
678  yCWarning(REALSENSE2) << "needAlignment parameter is deprecated, use alignmentFrame instead.";
679  Value& v = params_map[needAlignment].val[0];
680  if (!v.isBool())
681  {
682  settingErrorMsg("Param " + params_map[needAlignment].name + " is not a bool as it should be.", ret);
683  return false;
684  }
685 
686  m_needAlignment = v.asBool();
687  m_alignment_stream = m_needAlignment ? RS2_STREAM_COLOR : RS2_STREAM_ANY;
688  }
689 
690  if (params_map[alignmentFrame].isSetting && ret)
691  {
692  Value& v = params_map[alignmentFrame].val[0];
693  auto alignmentFrameStr = v.asString();
694  if (!v.isString())
695  {
696  settingErrorMsg("Param " + params_map[alignmentFrame].name + " is not a string as it should be.", ret);
697  return false;
698  }
699 
700  if (stringRSStreamMap.find(alignmentFrameStr) == stringRSStreamMap.end()) {
701  settingErrorMsg("Value "+alignmentFrameStr+" not allowed for " + params_map[alignmentFrame].name + " see documentation for supported values.", ret);
702  return false;
703  }
704 
705  m_alignment_stream = stringRSStreamMap.at(alignmentFrameStr);
706  }
707 
708  //DEPTH_RES
709  if (params_map[depthRes].isSetting && ret)
710  {
711  Value p1, p2;
712  p1 = params_map[depthRes].val[0];
713  p2 = params_map[depthRes].val[1];
714 
715  if (!p1.isInt32() || !p2.isInt32() )
716  {
717  settingErrorMsg("Param " + params_map[depthRes].name + " is not a int as it should be.", ret);
718  }
719 
720  if (! setDepthResolution(p1.asInt32(), p2.asInt32()))
721  {
722  settingErrorMsg("Setting param " + params_map[depthRes].name + " failed... quitting.", ret);
723  }
724  }
725 
726  //RGB_RES
727  if (params_map[rgbRes].isSetting && ret)
728  {
729  Value p1, p2;
730  p1 = params_map[rgbRes].val[0];
731  p2 = params_map[rgbRes].val[1];
732 
733  if (!p1.isInt32() || !p2.isInt32() )
734  {
735  settingErrorMsg("Param " + params_map[rgbRes].name + " is not a int as it should be.", ret);
736  }
737 
738  if (! setRgbResolution(p1.asInt32(), p2.asInt32()))
739  {
740  settingErrorMsg("Setting param " + params_map[rgbRes].name + " failed... quitting.", ret);
741  }
742  }
743 
744  return ret;
745 }
746 
747 
749 {
750  std::vector<RGBDSensorParamParser::RGBDParam*> params;
751  params.reserve(params_map.size());
752  for (auto& p:params_map)
753  {
754  params.push_back(&(p.second));
755  }
756 
757  m_verbose = config.check("verbose");
758  if (config.check("stereoMode")) {
759  m_stereoMode = config.find("stereoMode").asBool();
760  }
761 
762  if (!m_paramParser.parseParam(config, params))
763  {
764  yCError(REALSENSE2) << "Failed to parse the parameters";
765  return false;
766  }
767 
769  {
770  yCError(REALSENSE2) << "Failed to initialize the realsense device";
771  return false;
772  }
773 
774  // setting Parameters
775  return setParams();
776 
777 }
778 
780 {
782  return true;
783 }
784 
786 {
787  return m_color_intrin.height;
788 }
789 
791 {
792  return m_color_intrin.width;
793 }
794 
796 {
797  yCWarning(REALSENSE2) << "getRgbSupportedConfigurations not implemented yet";
798  return false;
799 }
800 
801 bool realsense2Driver::getRgbResolution(int &width, int &height)
802 {
803  width = m_color_intrin.width;
804  height = m_color_intrin.height;
805  return true;
806 }
807 
808 bool realsense2Driver::setDepthResolution(int width, int height)
809 {
811  {
812  m_cfg.enable_stream(RS2_STREAM_COLOR, m_color_intrin.width, m_color_intrin.height, RS2_FORMAT_RGB8, m_fps);
813  m_cfg.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, m_fps);
814  }
815  else
816  {
817  if (m_initialized)
818  {
819  fallback();
820  return false;
821  }
822  }
823 
824  if (!pipelineRestart())
825  return false;
826 
828  return true;
829 }
830 
831 bool realsense2Driver::setRgbResolution(int width, int height)
832 {
833  bool fail = true;
835  m_cfg.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGB8, m_fps);
836  m_cfg.enable_stream(RS2_STREAM_DEPTH, m_depth_intrin.width, m_depth_intrin.height, RS2_FORMAT_Z16, m_fps);
837  fail = false;
838  if (m_stereoMode)
839  {
841  {
842  m_cfg.enable_stream(RS2_STREAM_INFRARED, 1, width, height, RS2_FORMAT_Y8, m_fps);
843  m_cfg.enable_stream(RS2_STREAM_INFRARED, 2, width, height, RS2_FORMAT_Y8, m_fps);
844  }
845  else
846  {
847  fail = true;
848  }
849  }
850  }
851 
852  if (m_initialized && fail)
853  {
854  fallback();
855  return false;
856  }
857 
858  if (!pipelineRestart())
859  return false;
860 
862  return true;
863 }
864 
865 
866 bool realsense2Driver::setRgbFOV(double horizontalFov, double verticalFov)
867 {
868  // It seems to be not available...
869  return false;
870 }
871 
872 bool realsense2Driver::setDepthFOV(double horizontalFov, double verticalFov)
873 {
874  // It seems to be not available...
875  return false;
876 }
877 
879 {
880  std::lock_guard<std::mutex> guard(m_mutex);
881  bool ok = setOption(RS2_OPTION_DEPTH_UNITS, m_depth_sensor, accuracy);
882  if (ok) {
883  m_scale = accuracy;
884  }
885  return ok;
886 }
887 
888 bool realsense2Driver::getRgbFOV(double &horizontalFov, double &verticalFov)
889 {
890  float fov[2];
891  rs2_fov(&m_color_intrin, fov);
892  horizontalFov = fov[0];
893  verticalFov = fov[1];
894  return true;
895 }
896 
898 {
899  yCWarning(REALSENSE2) << "Mirroring not supported";
900  return false;
901 }
902 
904 {
905  yCWarning(REALSENSE2) << "Mirroring not supported";
906  return false;
907 }
908 
910 {
911  return setIntrinsic(intrinsic, m_color_intrin);
912 }
913 
915 {
916  return m_depth_intrin.height;
917 }
918 
920 {
921  return m_depth_intrin.width;
922 }
923 
924 bool realsense2Driver::getDepthFOV(double& horizontalFov, double& verticalFov)
925 {
926  float fov[2];
927  rs2_fov(&m_depth_intrin, fov);
928  horizontalFov = fov[0];
929  verticalFov = fov[1];
930  return true;
931 }
932 
934 {
935  return setIntrinsic(intrinsic, m_depth_intrin);;
936 }
937 
939 {
940  float accuracy = 0.0;
941  if (getOption(RS2_OPTION_DEPTH_UNITS, m_depth_sensor, accuracy))
942  {
943  return accuracy;
944  }
945  return 0.0;
946 }
947 
948 bool realsense2Driver::getDepthClipPlanes(double& nearPlane, double& farPlane)
949 {
950  if (params_map[clipPlanes].isDescription)
951  {
952  nearPlane = params_map[clipPlanes].val[0].asFloat64();
953  farPlane = params_map[clipPlanes].val[1].asFloat64();
954  return true;
955  }
956 
957  bool ret = getOption(RS2_OPTION_MIN_DISTANCE, m_depth_sensor, (float&) nearPlane);
958  ret &= getOption(RS2_OPTION_MAX_DISTANCE, m_depth_sensor, (float&) farPlane);
959  return ret;
960 }
961 
962 bool realsense2Driver::setDepthClipPlanes(double nearPlane, double farPlane)
963 {
964  if (params_map[clipPlanes].isDescription)
965  {
966  return false;
967  }
968  bool ret = setOption(RS2_OPTION_MIN_DISTANCE, m_depth_sensor, nearPlane);
969  ret &= setOption(RS2_OPTION_MAX_DISTANCE, m_depth_sensor, farPlane);
970  return ret;
971 }
972 
974 {
975  yCWarning(REALSENSE2) << "Mirroring not supported";
976  return false;
977 }
978 
980 {
981  yCWarning(REALSENSE2) << "Mirroring not supported";
982  return false;
983 }
984 
986 {
987  return setExtrinsicParam(extrinsic, m_depth_to_color);
988 }
989 
990 bool realsense2Driver::getRgbImage(FlexImage& rgbImage, Stamp* timeStamp)
991 {
992  std::lock_guard<std::mutex> guard(m_mutex);
993  rs2::frameset data = m_pipeline.wait_for_frames();
994  if (m_alignment_stream == RS2_STREAM_DEPTH)
995  {
996  rs2::align align(m_alignment_stream);
997  data = align.process(data);
998  }
999  return getImage(rgbImage, timeStamp, data);
1000 }
1001 
1003 {
1004  std::lock_guard<std::mutex> guard(m_mutex);
1005  rs2::frameset data = m_pipeline.wait_for_frames();
1006  if (m_alignment_stream == RS2_STREAM_COLOR)
1007  {
1008  rs2::align align(m_alignment_stream);
1009  data = align.process(data);
1010  }
1011  return getImage(depthImage, timeStamp, data);
1012 }
1013 
1014 bool realsense2Driver::getImage(FlexImage& Frame, Stamp *timeStamp, rs2::frameset &sourceFrame)
1015 {
1016  rs2::video_frame color_frm = sourceFrame.get_color_frame();
1017  rs2_format format = color_frm.get_profile().format();
1018 
1019  int pixCode = pixFormatToCode(format);
1020  size_t mem_to_wrt = color_frm.get_width() * color_frm.get_height() * bytesPerPixel(format);
1021 
1022  if (pixCode == VOCAB_PIXEL_INVALID)
1023  {
1024  yCError(REALSENSE2) << "Pixel Format not recognized";
1025  return false;
1026  }
1027 
1028  Frame.setPixelCode(pixCode);
1029  Frame.resize(m_color_intrin.width, m_color_intrin.height);
1030 
1031  if ((size_t) Frame.getRawImageSize() != mem_to_wrt)
1032  {
1033  yCError(REALSENSE2) << "Device and local copy data size doesn't match";
1034  return false;
1035  }
1036 
1037  memcpy((void*)Frame.getRawImage(), (void*)color_frm.get_data(), mem_to_wrt);
1038  m_rgb_stamp.update();
1039  if (timeStamp != nullptr)
1040  {
1041  *timeStamp = m_rgb_stamp;
1042  }
1043  return true;
1044 }
1045 
1046 bool realsense2Driver::getImage(depthImage& Frame, Stamp *timeStamp, const rs2::frameset &sourceFrame)
1047 {
1048  rs2::depth_frame depth_frm = sourceFrame.get_depth_frame();
1049  rs2_format format = depth_frm.get_profile().format();
1050 
1051  int pixCode = pixFormatToCode(format);
1052 
1053  int w = depth_frm.get_width();
1054  int h = depth_frm.get_height();
1055 
1056  if (pixCode == VOCAB_PIXEL_INVALID)
1057  {
1058  yCError(REALSENSE2) << "Pixel Format not recognized";
1059  return false;
1060  }
1061 
1062  Frame.resize(w, h);
1063 
1064  float* rawImage = &Frame.pixel(0,0);
1065  const auto * rawImageRs =(const uint16_t *) depth_frm.get_data();
1066 
1067  for(int i = 0; i < w * h; i++)
1068  {
1069  rawImage[i] = m_scale * rawImageRs[i];
1070  }
1071 
1073  if (timeStamp != nullptr)
1074  {
1075  *timeStamp = m_depth_stamp;
1076  }
1077  return true;
1078 }
1079 
1080 bool realsense2Driver::getImages(FlexImage& colorFrame, ImageOf<PixelFloat>& depthFrame, Stamp* colorStamp, Stamp* depthStamp)
1081 {
1082  std::lock_guard<std::mutex> guard(m_mutex);
1083  rs2::frameset data = m_pipeline.wait_for_frames();
1084  if (m_alignment_stream != RS2_STREAM_ANY) // RS2_STREAM_ANY is used as no-alignment-needed value.
1085  {
1086  rs2::align align(m_alignment_stream);
1087  data = align.process(data);
1088  }
1089  return getImage(colorFrame, colorStamp, data) && getImage(depthFrame, depthStamp, data);
1090 }
1091 
1093 {
1094  return RGBD_SENSOR_OK_IN_USE;
1095 }
1096 
1098 {
1099  return m_lastError;
1100 }
1101 
1103 {
1105  camera->busType = BUS_USB;
1106  return true;
1107 }
1108 
1109 bool realsense2Driver::hasFeature(int feature, bool* hasFeature)
1110 {
1112  f = static_cast<cameraFeature_id_t>(feature);
1113  if (f < YARP_FEATURE_BRIGHTNESS || f > YARP_FEATURE_NUMBER_OF-1)
1114  {
1115  return false;
1116  }
1117 
1118  *hasFeature = std::find(m_supportedFeatures.begin(), m_supportedFeatures.end(), f) != m_supportedFeatures.end();
1119 
1120  return true;
1121 }
1122 
1123 bool realsense2Driver::setFeature(int feature, double value)
1124 {
1125  bool b = false;
1126  if (!hasFeature(feature, &b) || !b)
1127  {
1128  yCError(REALSENSE2) << "Feature not supported!";
1129  return false;
1130  }
1131 
1132  float valToSet = 0.0;
1133  b = false;
1134  auto f = static_cast<cameraFeature_id_t>(feature);
1135  switch(f)
1136  {
1137  case YARP_FEATURE_EXPOSURE:
1138  if(optionPerc2Value(RS2_OPTION_EXPOSURE, m_color_sensor, value, valToSet))
1139  {
1140  b = setOption(RS2_OPTION_EXPOSURE, m_color_sensor, valToSet);
1141  if (m_stereoMode)
1142  {
1143  if(optionPerc2Value(RS2_OPTION_EXPOSURE, m_depth_sensor, value, valToSet))
1144  {
1145  b &= setOption(RS2_OPTION_EXPOSURE, m_depth_sensor, valToSet);
1146  }
1147  }
1148  }
1149  break;
1150  case YARP_FEATURE_GAIN:
1151  if(optionPerc2Value(RS2_OPTION_GAIN, m_color_sensor,value, valToSet))
1152  {
1153  b = setOption(RS2_OPTION_GAIN, m_color_sensor, valToSet);
1154  if (m_stereoMode)
1155  {
1156  if(optionPerc2Value(RS2_OPTION_EXPOSURE, m_depth_sensor, value, valToSet))
1157  {
1158  b &= setOption(RS2_OPTION_EXPOSURE, m_depth_sensor, valToSet);
1159  }
1160  }
1161  }
1162  break;
1164  {
1165  b = setFramerate((int) value);
1166  break;
1167  }
1169  if(optionPerc2Value(RS2_OPTION_WHITE_BALANCE, m_color_sensor, value, valToSet))
1170  b = setOption(RS2_OPTION_WHITE_BALANCE, m_color_sensor, valToSet);
1171  break;
1173  if(optionPerc2Value(RS2_OPTION_SHARPNESS, m_color_sensor, value, valToSet))
1174  b = setOption(RS2_OPTION_SHARPNESS, m_color_sensor, valToSet);
1175  break;
1176  case YARP_FEATURE_HUE:
1177  if(optionPerc2Value(RS2_OPTION_HUE, m_color_sensor, value, valToSet))
1178  b = setOption(RS2_OPTION_HUE, m_color_sensor, valToSet);
1179  break;
1181  if(optionPerc2Value(RS2_OPTION_SATURATION, m_color_sensor, value, valToSet))
1182  b = setOption(RS2_OPTION_SATURATION, m_color_sensor, valToSet);
1183  break;
1184  default:
1185  yCError(REALSENSE2) << "Feature not supported!";
1186  return false;
1187  }
1188  if (!b)
1189  {
1190  yCError(REALSENSE2) << "Something went wrong setting the feature requested, run the device with --verbose for the supported options";
1191  if (m_verbose)
1192  {
1194  }
1195  return false;
1196  }
1197  return true;
1198 }
1199 
1200 bool realsense2Driver::getFeature(int feature, double *value)
1201 {
1202  bool b = false;
1203  if (!hasFeature(feature, &b) || !b)
1204  {
1205  yCError(REALSENSE2) << "Feature not supported!";
1206  return false;
1207  }
1208 
1209  float valToGet = 0.0;
1210  b = false;
1211 
1212  auto f = static_cast<cameraFeature_id_t>(feature);
1213  switch(f)
1214  {
1215  case YARP_FEATURE_EXPOSURE:
1216  if (getOption(RS2_OPTION_EXPOSURE, m_color_sensor, valToGet))
1217  b = optionValue2Perc(RS2_OPTION_EXPOSURE, m_color_sensor, (float&) value, valToGet);
1218  break;
1219  case YARP_FEATURE_GAIN:
1220  if (getOption(RS2_OPTION_GAIN, m_color_sensor, valToGet))
1221  b = optionValue2Perc(RS2_OPTION_GAIN, m_color_sensor, (float&) value, valToGet);
1222  break;
1224  {
1225  b = true;
1226  *value = (double) m_fps;
1227  break;
1228  }
1230  if (getOption(RS2_OPTION_WHITE_BALANCE, m_color_sensor, valToGet))
1231  b = optionValue2Perc(RS2_OPTION_WHITE_BALANCE, m_color_sensor, (float&) value, valToGet);
1232  break;
1234  if (getOption(RS2_OPTION_SHARPNESS, m_color_sensor, valToGet))
1235  b = optionValue2Perc(RS2_OPTION_SHARPNESS, m_color_sensor, (float&) value, valToGet);
1236  break;
1237  case YARP_FEATURE_HUE:
1238  if (getOption(RS2_OPTION_HUE, m_color_sensor, valToGet))
1239  b = optionValue2Perc(RS2_OPTION_HUE, m_color_sensor, (float&) value, valToGet);
1240  break;
1242  if (getOption(RS2_OPTION_SATURATION, m_color_sensor, valToGet))
1243  b = optionValue2Perc(RS2_OPTION_SATURATION, m_color_sensor, (float&) value, valToGet);
1244  break;
1245  default:
1246  yCError(REALSENSE2) << "Feature not supported!";
1247  return false;
1248  }
1249  if (!b)
1250  {
1251  yCError(REALSENSE2) << "Something went wrong setting the feature requested, run the device with --verbose for the supported options";
1252  if (m_verbose)
1253  {
1255  }
1256  return false;
1257  }
1258  return true;
1259 }
1260 
1261 bool realsense2Driver::setFeature(int feature, double value1, double value2)
1262 {
1263  yCError(REALSENSE2) << "No 2-valued feature are supported";
1264  return false;
1265 }
1266 
1267 bool realsense2Driver::getFeature(int feature, double *value1, double *value2)
1268 {
1269  yCError(REALSENSE2) << "No 2-valued feature are supported";
1270  return false;
1271 }
1272 
1273 bool realsense2Driver::hasOnOff( int feature, bool *HasOnOff)
1274 {
1275  bool b;
1276  if (!hasFeature(feature, &b) || !b)
1277  {
1278  yCError(REALSENSE2) << "Feature not supported!";
1279  return false;
1280  }
1281 
1282  auto f = static_cast<cameraFeature_id_t>(feature);
1284  {
1285  *HasOnOff = true;
1286  return true;
1287  }
1288  *HasOnOff = false;
1289  return true;
1290 }
1291 
1292 bool realsense2Driver::setActive( int feature, bool onoff)
1293 {
1294  bool b;
1295  if (!hasFeature(feature, &b) || !b)
1296  {
1297  yCError(REALSENSE2) << "Feature not supported!";
1298  return false;
1299  }
1300 
1301  if (!hasOnOff(feature, &b) || !b)
1302  {
1303  yCError(REALSENSE2) << "Feature does not have OnOff.. call hasOnOff() to know if a specific feature support OnOff mode";
1304  return false;
1305  }
1306 
1307  switch(feature)
1308  {
1310  b = setOption(RS2_OPTION_ENABLE_AUTO_WHITE_BALANCE, m_color_sensor, (float) onoff);
1311  return b;
1312  case YARP_FEATURE_EXPOSURE:
1313  b = setOption(RS2_OPTION_ENABLE_AUTO_EXPOSURE, m_color_sensor, (float) onoff);
1314  return b;
1315  default:
1316  return false;
1317  }
1318 
1319  return true;
1320 }
1321 
1322 bool realsense2Driver::getActive( int feature, bool *isActive)
1323 {
1324  bool b;
1325  if (!hasFeature(feature, &b) || !b)
1326  {
1327  yCError(REALSENSE2) << "Feature not supported!";
1328  return false;
1329  }
1330 
1331  if (!hasOnOff(feature, &b) || !b)
1332  {
1333  yCError(REALSENSE2) << "Feature does not have OnOff.. call hasOnOff() to know if a specific feature support OnOff mode";
1334  return false;
1335  }
1336  float response = 0.0;
1337  switch(feature)
1338  {
1340  b = getOption(RS2_OPTION_ENABLE_AUTO_WHITE_BALANCE, m_color_sensor, response); //TODO check if this exotic conversion works
1341  *isActive = (bool) response;
1342  return b;
1343  case YARP_FEATURE_EXPOSURE:
1344  b = getOption(RS2_OPTION_ENABLE_AUTO_EXPOSURE, m_color_sensor, response); //TODO check if this exotic conversion works
1345  *isActive = (bool) response;
1346  return b;
1347  default:
1348  return false;
1349  }
1350 
1351  return true;
1352 }
1353 
1354 bool realsense2Driver::hasAuto(int feature, bool *hasAuto)
1355 {
1356  bool b;
1357  if (!hasFeature(feature, &b) || !b)
1358  {
1359  yCError(REALSENSE2) << "Feature not supported!";
1360  return false;
1361  }
1362 
1363  auto f = static_cast<cameraFeature_id_t>(feature);
1365  {
1366  *hasAuto = true;
1367  return true;
1368  }
1369  *hasAuto = false;
1370  return true;
1371 }
1372 
1373 bool realsense2Driver::hasManual( int feature, bool* hasManual)
1374 {
1375  bool b;
1376  if (!hasFeature(feature, &b) || !b)
1377  {
1378  yCError(REALSENSE2) << "Feature not supported!";
1379  return false;
1380  }
1381 
1382  auto f = static_cast<cameraFeature_id_t>(feature);
1385  {
1386  *hasManual = true;
1387  return true;
1388  }
1389  *hasManual = false;
1390  return true;
1391 }
1392 
1393 bool realsense2Driver::hasOnePush(int feature, bool* hasOnePush)
1394 {
1395  bool b;
1396  if (!hasFeature(feature, &b) || !b)
1397  {
1398  yCError(REALSENSE2) << "Feature not supported!";
1399  return false;
1400  }
1401 
1402  return hasAuto(feature, hasOnePush);
1403 }
1404 
1406 {
1407  bool b;
1408  if (!hasFeature(feature, &b) || !b)
1409  {
1410  yCError(REALSENSE2) << "Feature not supported!";
1411  return false;
1412  }
1413  float one = 1.0;
1414  float zero = 0.0;
1415 
1416  auto f = static_cast<cameraFeature_id_t>(feature);
1417  if (f == YARP_FEATURE_WHITE_BALANCE)
1418  {
1419  switch(mode)
1420  {
1421  case MODE_AUTO:
1422  return setOption(RS2_OPTION_ENABLE_AUTO_WHITE_BALANCE, m_color_sensor, one);
1423  case MODE_MANUAL:
1424  return setOption(RS2_OPTION_ENABLE_AUTO_WHITE_BALANCE, m_color_sensor, zero);
1425  case MODE_UNKNOWN:
1426  return false;
1427  default:
1428  return false;
1429  }
1430  return true;
1431  }
1432 
1433  if (f == YARP_FEATURE_EXPOSURE)
1434  {
1435  switch(mode)
1436  {
1437  case MODE_AUTO:
1438  return setOption(RS2_OPTION_ENABLE_AUTO_EXPOSURE, m_color_sensor, one);
1439  case MODE_MANUAL:
1440  return setOption(RS2_OPTION_ENABLE_AUTO_EXPOSURE, m_color_sensor, zero);
1441  case MODE_UNKNOWN:
1442  return false;
1443  default:
1444  return false;
1445  }
1446  return true;
1447  }
1448 
1449 
1450  yCError(REALSENSE2) << "Feature does not have both auto and manual mode";
1451  return false;
1452 }
1453 
1454 bool realsense2Driver::getMode(int feature, FeatureMode* mode)
1455 {
1456  bool b;
1457  if (!hasFeature(feature, &b) || !b)
1458  {
1459  yCError(REALSENSE2) << "Feature not supported!";
1460  return false;
1461  }
1462  float res = 0.0;
1463  bool ret = true;
1464  auto f = static_cast<cameraFeature_id_t>(feature);
1465  if (f == YARP_FEATURE_WHITE_BALANCE)
1466  {
1467  ret &= getOption(RS2_OPTION_ENABLE_AUTO_WHITE_BALANCE, m_color_sensor, res);
1468  }
1469 
1470  if (f == YARP_FEATURE_EXPOSURE)
1471  {
1472  ret &= getOption(RS2_OPTION_ENABLE_AUTO_EXPOSURE, m_color_sensor, res);
1473  }
1474 
1475  if (res == 0.0)
1476  {
1477  *mode = MODE_MANUAL;
1478  }
1479  else if (res == 1.0)
1480  {
1481  *mode = MODE_AUTO;
1482  }
1483  else
1484  {
1485  *mode = MODE_UNKNOWN;
1486  }
1487  return ret;
1488 }
1489 
1491 {
1492  bool b;
1493  if (!hasFeature(feature, &b) || !b)
1494  {
1495  yCError(REALSENSE2) << "Feature not supported!";
1496  return false;
1497  }
1498 
1499  if (!hasOnePush(feature, &b) || !b)
1500  {
1501  yCError(REALSENSE2) << "Feature doesn't have OnePush";
1502  return false;
1503  }
1504 
1505  setMode(feature, MODE_AUTO);
1506  setMode(feature, MODE_MANUAL);
1507 
1508  return true;
1509 }
1510 
1512 {
1513  if (!m_stereoMode)
1514  {
1515  yCError(REALSENSE2)<<"Infrared stereo stream not enabled";
1516  return false;
1517  }
1518 
1519  image.resize(width(), height());
1520  std::lock_guard<std::mutex> guard(m_mutex);
1521  rs2::frameset data = m_pipeline.wait_for_frames();
1522 
1523  rs2::video_frame frm1 = data.get_infrared_frame(1);
1524  rs2::video_frame frm2 = data.get_infrared_frame(2);
1525 
1526  int pixCode = pixFormatToCode(frm1.get_profile().format());
1527 
1528  if (pixCode != VOCAB_PIXEL_MONO)
1529  {
1530  yCError(REALSENSE2) << "Expecting Pixel Format MONO";
1531  return false;
1532  }
1533 
1534  // Wrap rs images with yarp ones.
1535  ImageOf<PixelMono> imgL, imgR;
1536  imgL.setExternal((unsigned char*) (frm1.get_data()), frm1.get_width(), frm1.get_height());
1537  imgR.setExternal((unsigned char*) (frm2.get_data()), frm2.get_width(), frm2.get_height());
1538  return utils::horzConcat(imgL, imgR, image);
1539 }
1540 
1542 {
1543  return m_infrared_intrin.height;
1544 }
1545 
1547 {
1548  return m_infrared_intrin.width*2;
1549 }
cameraFeature_id_t
@ YARP_FEATURE_NUMBER_OF
@ YARP_FEATURE_SHARPNESS
@ YARP_FEATURE_FRAME_RATE
@ YARP_FEATURE_HUE
@ YARP_FEATURE_WHITE_BALANCE
@ YARP_FEATURE_EXPOSURE
@ YARP_FEATURE_MIRROR
@ YARP_FEATURE_SATURATION
@ YARP_FEATURE_GAIN
@ MODE_UNKNOWN
bool ret
yarp::os::Stamp m_depth_stamp
rs2::pipeline m_pipeline
bool setActive(int feature, bool onoff) override
Set the requested feature on or off.
bool setDepthClipPlanes(double nearPlane, double farPlane) override
Set the clipping planes of the sensor.
rs2_intrinsics m_infrared_intrin
RGBDSensor_status getSensorStatus() override
Get the surrent status of the sensor, using enum type.
bool hasManual(int feature, bool *hasManual) override
Check if the requested feature has the 'manual' mode.
rs2_intrinsics m_color_intrin
bool getRgbMirroring(bool &mirror) override
Get the mirroring setting of the sensor.
bool getDepthImage(depthImage &depthImage, Stamp *timeStamp=nullptr) override
rs2::sensor * m_depth_sensor
bool getActive(int feature, bool *isActive) override
Get the current status of the feature, on or off.
std::vector< cameraFeature_id_t > m_supportedFeatures
bool close() override
Close the DeviceDriver.
bool setFramerate(const int _fps)
bool hasAuto(int feature, bool *hasAuto) override
Check if the requested feature has the 'auto' mode.
int height() const override
Return the height of each frame.
bool setDepthFOV(double horizontalFov, double verticalFov) override
Set the field of view (FOV) of the depth camera.
bool hasFeature(int feature, bool *hasFeature) override
Check if camera has the requested feature (saturation, brightness ...
bool getCameraDescription(CameraDescriptor *camera) override
Get a basic description of the camera hw.
bool getRgbSupportedConfigurations(yarp::sig::VectorOf< yarp::dev::CameraConfig > &configurations) override
Get the possible configurations of the camera.
bool getMode(int feature, FeatureMode *mode) override
Get the current mode for the feature.
bool setDepthResolution(int width, int height) override
Set the resolution of the depth image from the camera.
bool setRgbFOV(double horizontalFov, double verticalFov) override
Set the field of view (FOV) of the rgb camera.
rs2::pipeline_profile m_profile
rs2_intrinsics m_depth_intrin
rs2_stream m_alignment_stream
int getDepthHeight() override
Return the height of each frame.
bool setRgbMirroring(bool mirror) override
Set the mirroring setting of the sensor.
bool open(yarp::os::Searchable &config) override
Open the DeviceDriver.
rs2_extrinsics m_depth_to_color
double getDepthAccuracy() override
Get the minimum detectable variation in distance [meter].
bool setDepthMirroring(bool mirror) override
Set the mirroring setting of the sensor.
bool hasOnOff(int feature, bool *HasOnOff) override
Check if the camera has the ability to turn on/off the requested feature.
bool hasOnePush(int feature, bool *hasOnePush) override
Check if the requested feature has the 'onePush' mode.
std::string getLastErrorMsg(Stamp *timeStamp=NULL) override
Return an error message in case of error.
int getDepthWidth() override
Return the height of each frame.
bool getRgbResolution(int &width, int &height) override
Get the resolution of the rgb image from the camera.
bool getDepthFOV(double &horizontalFov, double &verticalFov) override
Get the field of view (FOV) of the depth camera.
rs2::context m_ctx
bool setFeature(int feature, double value) override
Set the requested feature to a value (saturation, brightness ...
bool setOnePush(int feature) override
Set the requested feature to a value (saturation, brightness ...
bool setDepthAccuracy(double accuracy) override
Set the minimum detectable variation in distance [meter] when possible.
rs2::sensor * m_color_sensor
bool getRgbFOV(double &horizontalFov, double &verticalFov) override
Get the field of view (FOV) of the rgb camera.
bool getRgbImage(FlexImage &rgbImage, Stamp *timeStamp=nullptr) override
Get the rgb frame from the device.
rs2_extrinsics m_color_to_depth
bool getImages(FlexImage &colorFrame, depthImage &depthFrame, Stamp *colorStamp=NULL, Stamp *depthStamp=NULL) override
yarp::os::Stamp m_rgb_stamp
bool getExtrinsicParam(yarp::sig::Matrix &extrinsic) override
Get the extrinsic parameters from the device.
bool getRgbIntrinsicParam(Property &intrinsic) override
Get the intrinsic parameters of the rgb camera.
int getRgbHeight() override
Return the height of each frame.
bool getDepthIntrinsicParam(Property &intrinsic) override
Get the intrinsic parameters of the depth camera.
bool getImage(yarp::sig::ImageOf< yarp::sig::PixelMono > &image) override
Get a raw image from the frame grabber.
int width() const override
Return the width of each frame.
int getRgbWidth() override
Return the width of each frame.
yarp::dev::RGBDSensorParamParser m_paramParser
bool setMode(int feature, FeatureMode mode) override
Set the requested mode for the feature.
std::vector< rs2::sensor > m_sensors
bool getFeature(int feature, double *value) override
Get the current value for the requested feature.
std::string m_lastError
rs2::device m_device
bool setRgbResolution(int width, int height) override
Set the resolution of the rgb image from the camera.
bool getDepthClipPlanes(double &nearPlane, double &farPlane) override
Get the clipping planes of the sensor.
bool getDepthMirroring(bool &mirror) override
Get the mirroring setting of the sensor.
bool parseParam(const yarp::os::Searchable &config, std::vector< RGBDParam * > &params)
parseParam, parse the params stored in a Searchable.
yarp::sig::IntrinsicParams rgbIntrinsic
yarp::sig::IntrinsicParams depthIntrinsic
A class for storing options and configuration information.
Definition: Property.h:37
A base class for nested structures that can be searched.
Definition: Searchable.h:69
virtual Value & find(const std::string &key) const =0
Gets a value corresponding to a given keyword.
virtual bool check(const std::string &key) const =0
Check if there exists a property of the given name.
An abstraction for a time stamp and/or sequence number.
Definition: Stamp.h:25
void update()
Set the timestamp to the current time, and increment the sequence number (wrapping to 0 if the sequen...
Definition: Stamp.cpp:113
A single value (typically within a Bottle).
Definition: Value.h:47
virtual bool isString() const
Checks if value is a string.
Definition: Value.cpp:159
virtual bool isBool() const
Checks if value is a boolean.
Definition: Value.cpp:117
virtual bool asBool() const
Get boolean value.
Definition: Value.cpp:189
virtual std::int32_t asInt32() const
Get 32-bit integer value.
Definition: Value.cpp:207
virtual bool isInt32() const
Checks if value is a 32-bit integer.
Definition: Value.cpp:135
virtual std::string asString() const
Get string value.
Definition: Value.cpp:237
Image class with user control of representation details.
Definition: Image.h:403
void setPixelCode(int imgPixelCode)
Definition: Image.h:406
Typed image class.
Definition: Image.h:647
T & pixel(size_t x, size_t y)
Definition: Image.h:663
void setExternal(const void *data, size_t imgWidth, size_t imgHeight)
Use this to wrap an external image.
Definition: Image.cpp:883
unsigned char * getRawImage() const
Access to the internal image buffer.
Definition: Image.cpp:535
size_t getRawImageSize() const
Access to the internal buffer size information (this is how much memory has been allocated for the im...
Definition: Image.cpp:544
void resize(size_t imgWidth, size_t imgHeight)
Reallocate an image to be of a desired size, throwing away its current contents.
Definition: Image.cpp:467
A class for a Matrix.
Definition: Matrix.h:46
size_t cols() const
Return number of columns.
Definition: Matrix.h:101
size_t rows() const
Return number of rows.
Definition: Matrix.h:95
const Matrix & eye()
Build an identity matrix, don't resize.
Definition: Matrix.cpp:429
Provides:
Definition: Vector.h:122
#define yCInfo(component,...)
Definition: LogComponent.h:135
#define yCError(component,...)
Definition: LogComponent.h:157
#define yCWarning(component,...)
Definition: LogComponent.h:146
#define YARP_LOG_COMPONENT(name,...)
Definition: LogComponent.h:80
@ VOCAB_PIXEL_RGBA
Definition: Image.h:51
@ VOCAB_PIXEL_MONO16
Definition: Image.h:49
@ VOCAB_PIXEL_BGRA
Definition: Image.h:52
@ VOCAB_PIXEL_BGR
Definition: Image.h:55
@ VOCAB_PIXEL_INVALID
Definition: Image.h:47
@ VOCAB_PIXEL_MONO
Definition: Image.h:48
@ VOCAB_PIXEL_RGB
Definition: Image.h:50
An interface for the device drivers.
An interface to the operating system, including Port based communication.
bool horzConcat(const yarp::sig::Image &inImgL, const yarp::sig::Image &inImgR, yarp::sig::Image &outImg)
horzConcat, concatenate horizontally two images of the same size in one with double width.
Definition: ImageUtils.cpp:69
Signal processing.
Definition: Image.h:25
YarpDistortion
The YarpDistortion enum to define the type of the distortion model of the camera.
constexpr char clipPlanes[]
static bool setOption(rs2_option option, const rs2::sensor *sensor, float value)
static int pixFormatToCode(const rs2_format p)
static void print_supported_options(const rs2::sensor &sensor)
static const std::map< std::string, rs2_stream > stringRSStreamMap
constexpr char needAlignment[]
static bool getOption(rs2_option option, const rs2::sensor *sensor, float &value)
static std::string get_device_information(const rs2::device &dev)
static bool optionValue2Perc(rs2_option option, const rs2::sensor *sensor, float &perc, const float &value)
static std::map< std::string, RGBDSensorParamParser::RGBDParam > params_map
static YarpDistortion rsDistToYarpDist(const rs2_distortion dist)
static bool setExtrinsicParam(Matrix &extrinsic, const rs2_extrinsics &values)
constexpr char enableEmitter[]
static bool setIntrinsic(Property &intrinsic, const rs2_intrinsics &values)
constexpr char framerate[]
constexpr char alignmentFrame[]
constexpr char rgbRes[]
static size_t bytesPerPixel(const rs2_format format)
static void settingErrorMsg(const string &error, bool &ret)
static bool optionPerc2Value(rs2_option option, const rs2::sensor *sensor, const float &perc, float &value)
constexpr char depthRes[]
static bool isSupportedFormat(const rs2::sensor &sensor, const int width, const int height, const int fps, bool verbose=false)
constexpr char accuracy[]
The IntrinsicParams struct to handle the intrinsic parameter of cameras(RGB and RGBD either).
double focalLengthY
Result of the product of the physical focal length(mm) and the size sy of the individual imager eleme...
DistortionModel distortionModel
Distortion model of the image.
double focalLengthX
Result of the product of the physical focal length(mm) and the size sx of the individual imager eleme...
void toProperty(yarp::os::Property &intrinsic) const
toProperty, convert the struct to a Property.
double principalPointX
Horizontal coordinate of the principal point of the image, as a pixel offset from the left edge.
double principalPointY
Vertical coordinate of the principal point of the image, as a pixel offset from the top edge.