YARP
Yet Another Robot Platform
 
Loading...
Searching...
No Matches
realsense2Driver.cpp
Go to the documentation of this file.
1/*
2 * Copyright (C) 2006-2020 Istituto Italiano di Tecnologia (IIT)
3 * All rights reserved.
4 *
5 * This software may be modified and distributed under the terms of the
6 * BSD-3-Clause license. See the accompanying LICENSE file for details.
7 */
8
9#include <cmath>
10#include <algorithm>
11#include <iomanip>
12#include <cstdint>
13
15#include <yarp/os/Value.h>
16#include <yarp/sig/ImageUtils.h>
17
18#include <librealsense2/rsutil.h>
19#include "realsense2Driver.h"
20
21using namespace yarp::dev;
22using namespace yarp::sig;
23using namespace yarp::os;
24
25using namespace std;
26
27namespace {
28YARP_LOG_COMPONENT(REALSENSE2, "yarp.device.realsense2")
29}
30
31
32constexpr char accuracy [] = "accuracy";
33constexpr char clipPlanes [] = "clipPlanes";
34constexpr char depthRes [] = "depthResolution";
35constexpr char rgbRes [] = "rgbResolution";
36constexpr char framerate [] = "framerate";
37constexpr char enableEmitter [] = "enableEmitter";
38constexpr char needAlignment [] = "needAlignment";
39constexpr char alignmentFrame [] = "alignmentFrame";
40
41
53
54static const std::map<std::string, rs2_stream> stringRSStreamMap {
55 {"None", RS2_STREAM_ANY},
56 {"RGB", RS2_STREAM_COLOR},
57 {"Depth", RS2_STREAM_DEPTH}
58};
59
60
61static std::string get_device_information(const rs2::device& dev)
62{
63
64 std::stringstream ss;
65 ss << "Device information: " << std::endl;
66
67 for (int i = 0; i < static_cast<int>(RS2_CAMERA_INFO_COUNT); i++)
68 {
69 auto info_type = static_cast<rs2_camera_info>(i);
70 ss << " " << std::left << std::setw(20) << info_type << " : ";
71
72 if (dev.supports(info_type))
73 ss << dev.get_info(info_type) << std::endl;
74 else
75 ss << "N/A" << std::endl;
76 }
77 return ss.str();
78}
79
80
81static void print_supported_options(const rs2::sensor& sensor)
82{
83 // Sensors usually have several options to control their properties
84 // such as Exposure, Brightness etc.
85
86 if (sensor.is<rs2::depth_sensor>())
87 yCInfo(REALSENSE2) << "Depth sensor supports the following options:\n";
88 else if (sensor.get_stream_profiles()[0].stream_type() == RS2_STREAM_COLOR)
89 yCInfo(REALSENSE2) << "RGB camera supports the following options:\n";
90 else
91 yCInfo(REALSENSE2) << "Sensor supports the following options:\n";
92
93 // The following loop shows how to iterate over all available options
94 // Starting from 0 until RS2_OPTION_COUNT (exclusive)
95 for (int i = 0; i < static_cast<int>(RS2_OPTION_COUNT); i++)
96 {
97 auto option_type = static_cast<rs2_option>(i);
98 //SDK enum types can be streamed to get a string that represents them
99
100 // To control an option, use the following api:
101
102 // First, verify that the sensor actually supports this option
103 if (sensor.supports(option_type))
104 {
105 std::cout << " " << option_type;
106 std::cout << std::endl;
107
108 // Get a human readable description of the option
109 const char* description = sensor.get_option_description(option_type);
110 std::cout << " Description : " << description << std::endl;
111
112 // Get the current value of the option
113 float current_value = sensor.get_option(option_type);
114 std::cout << " Current Value : " << current_value << std::endl;
115 }
116 }
117
118 std::cout<<std::endl;
119}
120
121static bool isSupportedFormat(const rs2::sensor &sensor, const int width, const int height, const int fps, bool verbose = false)
122{
123 bool ret = false;
124 std::vector<rs2::stream_profile> stream_profiles = sensor.get_stream_profiles();
125
126 std::map<std::pair<rs2_stream, int>, int> unique_streams;
127 for (auto&& sp : stream_profiles)
128 {
129 unique_streams[std::make_pair(sp.stream_type(), sp.stream_index())]++;
130 }
131
132 if (verbose)
133 {
134 std::cout << "Sensor consists of " << unique_streams.size() << " streams: " << std::endl;
135 for (size_t i = 0; i < unique_streams.size(); i++)
136 {
137 auto it = unique_streams.begin();
138 std::advance(it, i);
139 std::cout << " - " << it->first.first << " #" << it->first.second << std::endl;
140 }
141 std::cout << "Sensor provides the following stream profiles:" << std::endl;
142 }
143
144 //Next, we go over all the stream profiles and print the details of each one
145
146 int profile_num = 0;
147 for (const rs2::stream_profile& stream_profile : stream_profiles)
148 {
149 if (verbose)
150 {
152
153 int stream_index = stream_profile.stream_index();
154
155 std::cout << std::setw(3) << profile_num << ": " << stream_data_type << " #" << stream_index;
156 }
157
158 // As noted, a stream is an abstraction.
159 // In order to get additional data for the specific type of a
160 // stream, a mechanism of "Is" and "As" is provided:
161 if (stream_profile.is<rs2::video_stream_profile>()) //"Is" will test if the type tested is of the type given
162 {
163 // "As" will try to convert the instance to the given type
164 rs2::video_stream_profile video_stream_profile = stream_profile.as<rs2::video_stream_profile>();
165
166 // After using the "as" method we can use the new data type
167 // for additinal operations:
168 if (verbose)
169 {
170 std::cout << " (Video Stream: " << video_stream_profile.format() << " " <<
171 video_stream_profile.width() << "x" << video_stream_profile.height() << "@ " << video_stream_profile.fps() << "Hz)";
172 std::cout << std::endl;
173 }
174
175 if(video_stream_profile.width() == width && video_stream_profile.height() == height && video_stream_profile.fps() == fps)
176 ret=true;
177 }
178 profile_num++;
179 }
180
181
182 return ret;
183}
184
185static bool optionPerc2Value(rs2_option option,const rs2::sensor* sensor, const float& perc, float& value)
186{
187 if (!sensor)
188 {
189 return false;
190 }
191 try
192 {
193 rs2::option_range optionRange = sensor->get_option_range(option);
194 value =(float) (perc * (optionRange.max - optionRange.min) + optionRange.min);
195
196 }
197 catch (const rs2::error& e)
198 {
199 // Some options can only be set while the camera is streaming,
200 // and generally the hardware might fail so it is good practice to catch exceptions from set_option
201 yCError(REALSENSE2) << "Failed to get option " << option << " range. (" << e.what() << ")";
202 return false;
203 }
204
205 return true;
206}
207
208static bool optionValue2Perc(rs2_option option,const rs2::sensor* sensor, float& perc, const float& value)
209{
210 if (!sensor)
211 {
212 return false;
213 }
214 try
215 {
216 rs2::option_range optionRange = sensor->get_option_range(option);
217 perc =(float) ((value - optionRange.min) / (optionRange.max - optionRange.min));
218
219 }
220 catch (const rs2::error& e)
221 {
222 // Some options can only be set while the camera is streaming,
223 // and generally the hardware might fail so it is good practice to catch exceptions from set_option
224 yCError(REALSENSE2) << "Failed to get option " << option << " range. (" << e.what() << ")";
225 return false;
226 }
227
228 return true;
229}
230
231
232
233static bool setOption(rs2_option option,const rs2::sensor* sensor, float value)
234{
235
236 if (!sensor)
237 {
238 return false;
239 }
240
241 // First, verify that the sensor actually supports this option
242 if (!sensor->supports(option))
243 {
244 yCError(REALSENSE2) << "The option" << rs2_option_to_string(option) << "is not supported by this sensor";
245 return false;
246 }
247
248 // To set an option to a different value, we can call set_option with a new value
249 try
250 {
251 sensor->set_option(option, value);
252 }
253 catch (const rs2::error& e)
254 {
255 // Some options can only be set while the camera is streaming,
256 // and generally the hardware might fail so it is good practice to catch exceptions from set_option
257 yCError(REALSENSE2) << "Failed to set option " << rs2_option_to_string(option) << ". (" << e.what() << ")";
258 return false;
259 }
260 return true;
261}
262
263static bool getOption(rs2_option option,const rs2::sensor *sensor, float &value)
264{
265 if (!sensor)
266 {
267 return false;
268 }
269
270 // First, verify that the sensor actually supports this option
271 if (!sensor->supports(option))
272 {
273 yCError(REALSENSE2) << "The option" << rs2_option_to_string(option) << "is not supported by this sensor";
274 return false;
275 }
276
277 // To set an option to a different value, we can call set_option with a new value
278 try
279 {
280 value = sensor->get_option(option);
281 }
282 catch (const rs2::error& e)
283 {
284 // Some options can only be set while the camera is streaming,
285 // and generally the hardware might fail so it is good practice to catch exceptions from set_option
286 yCError(REALSENSE2) << "Failed to get option " << rs2_option_to_string(option) << ". (" << e.what() << ")";
287 return false;
288 }
289 return true;
290}
291
292static int pixFormatToCode(const rs2_format p)
293{
294 switch(p)
295 {
296 case (RS2_FORMAT_RGB8):
297 return VOCAB_PIXEL_RGB;
298
299 case (RS2_FORMAT_BGR8):
300 return VOCAB_PIXEL_BGR;
301
302 case (RS2_FORMAT_Z16):
303 return VOCAB_PIXEL_MONO16;
304
306 return VOCAB_PIXEL_MONO16;
307
308 case (RS2_FORMAT_RGBA8):
309 return VOCAB_PIXEL_RGBA;
310
311 case (RS2_FORMAT_BGRA8):
312 return VOCAB_PIXEL_BGRA;
313
314 case (RS2_FORMAT_Y8):
315 return VOCAB_PIXEL_MONO;
316
317 case (RS2_FORMAT_Y16):
318 return VOCAB_PIXEL_MONO16;;
319
320 case (RS2_FORMAT_RAW16):
321 return VOCAB_PIXEL_MONO16;
322
323 case (RS2_FORMAT_RAW8):
324 return VOCAB_PIXEL_MONO;
325 default:
326 return VOCAB_PIXEL_INVALID;
327
328 }
329}
330
331static size_t bytesPerPixel(const rs2_format format)
332{
333 size_t bytes_per_pixel = 0;
334 switch (format)
335 {
336 case RS2_FORMAT_RAW8:
337 case RS2_FORMAT_Y8:
338 bytes_per_pixel = 1;
339 break;
340 case RS2_FORMAT_Z16:
342 case RS2_FORMAT_Y16:
343 case RS2_FORMAT_RAW16:
344 bytes_per_pixel = 2;
345 break;
346 case RS2_FORMAT_RGB8:
347 case RS2_FORMAT_BGR8:
348 bytes_per_pixel = 3;
349 break;
350 case RS2_FORMAT_RGBA8:
351 case RS2_FORMAT_BGRA8:
352 bytes_per_pixel = 4;
353 break;
354 default:
355 break;
356 }
357 return bytes_per_pixel;
358}
359
361{
362 switch (dist)
363 {
365 return YarpDistortion::YARP_PLUMB_BOB;
366 default:
367 /*
368 * If the coefficient are all zero the image is undistorted. For now is set to plumb bob since all the devices that uses it are configured with plumb bob.
369 * An issue will be open to fix this bug and set it to undistorted.
370 */
371 if (values.coeffs[0] == 0.0 && values.coeffs[1] == 0.0 && values.coeffs[2] == 0.0 && values.coeffs[3] == 0.0 && values.coeffs[4] == 0.0)
372 {
373 return YarpDistortion::YARP_DISTORTION_NONE;
374 }
375 else
376 {
377 return YarpDistortion::YARP_UNSUPPORTED;
378 }
379 }
380}
381
382static void settingErrorMsg(const string& error, bool& ret)
383{
384 yCError(REALSENSE2) << error.c_str();
385 ret = false;
386}
387
388static bool setIntrinsic(Property& intrinsic, const rs2_intrinsics &values)
389{
391 params.focalLengthX = values.fx;
392 params.focalLengthY = values.fy;
393 params.principalPointX = values.ppx;
394 params.principalPointY = values.ppy;
395 // distortion model
397 params.distortionModel.k1 = values.coeffs[0];
398 params.distortionModel.k2 = values.coeffs[1];
399 params.distortionModel.t1 = values.coeffs[2];
400 params.distortionModel.t2 = values.coeffs[3];
401 params.distortionModel.k3 = values.coeffs[4];
402 params.toProperty(intrinsic);
403 return true;
404}
405
407{
408
409 if (extrinsic.cols() != 4 || extrinsic.rows() != 4)
410 {
411 yCError(REALSENSE2) << "Extrinsic matrix is not 4x4";
412 return false;
413 }
414
415 extrinsic.eye();
416
417 for (size_t j=0; j<extrinsic.rows() - 1; j++)
418 {
419 for (size_t i=0; i<extrinsic.cols() - 1; i++)
420 {
421 extrinsic[j][i] = values.rotation[i + j*extrinsic.cols()];
422 }
423 }
424
425 extrinsic[0][3] = values.translation[0];
426 extrinsic[1][3] = values.translation[1];
427 extrinsic[2][3] = values.translation[2];
428
429 return false;
430}
431
432realsense2Driver::realsense2Driver() : m_depth_sensor(nullptr), m_color_sensor(nullptr),
433 m_paramParser(), m_verbose(false),
434 m_initialized(false), m_stereoMode(false),
435 m_needAlignment(true), m_fps(0),
436 m_scale(0.0)
437{
438 // realsense SDK already provides them
442
443
451}
452
454{
455 try
456 {
457 m_profile = m_pipeline.start(m_cfg);
458 }
459 catch (const rs2::error& e)
460 {
461 yCError(REALSENSE2) << "Failed to start the pipeline:"<< "(" << e.what() << ")";
462 m_lastError = e.what();
463 return false;
464 }
465 return true;
466}
467
469{
470 try
471 {
472 m_pipeline.stop();
473 }
474 catch (const rs2::error& e)
475 {
476 yCError(REALSENSE2) << "Failed to stop the pipeline:"<< "(" << e.what() << ")";
477 m_lastError = e.what();
478 return false;
479 }
480 return true;
481}
482
484{
485 std::lock_guard<std::mutex> guard(m_mutex);
486 if (!pipelineShutdown())
487 return false;
488
489 return pipelineStartup();
490
491}
492
494{
497
500 }
501 else
502 {
503 if (m_initialized)
504 {
505 fallback();
506 }
507
508 }
509
510 if (!pipelineRestart())
511 return false;
512
513 m_fps = _fps;
514
516
517 return true;
518
519}
520
522{
525 yCWarning(REALSENSE2)<<"Format not supported, use --verbose for more details. Setting the fallback format";
526 std::cout<<"COLOR: "<<m_color_intrin.width<<"x"<<m_color_intrin.height<<" fps: "<<m_fps<<std::endl;
527 std::cout<<"DEPTH: "<<m_depth_intrin.width<<"x"<<m_depth_intrin.height<<" fps: "<<m_fps<<std::endl;
528}
529
531{
532 // Check if there are connected cameras
533 if (m_ctx.query_devices().size() == 0)
534 {
535 yCError(REALSENSE2) << "No device connected, please connect a RealSense device";
536 return false;
537 }
538
539 //Using the device_hub we can block the program until a device connects
540 rs2::device_hub device_hub(m_ctx);
541 m_device = device_hub.wait_for_device();
542
543 // Get the camera name as the D405 is to be handled differently from the other cameras
544 const std::string camera_name = std::string(m_device.get_info(RS2_CAMERA_INFO_NAME));
545 const bool is_d405 = (camera_name.find("D405") != std::string::npos);
546
547 // Extract RGB and depth resolution
548 double colorW;
549 double colorH;
550 double depthW;
551 double depthH;
552 if (is_d405)
553 {
554 if (!params_map[depthRes].isSetting)
555 {
556 yCError(REALSENSE2)<<"Missing depthResolution from [SETTINGS]";
557 return false;
558 }
559
560 // A D405 camera inherits the RGB resolution from the that of the depth sensor
561 colorW = depthW = params_map[depthRes].val[0].asFloat64();
562 colorH = depthH = params_map[depthRes].val[1].asFloat64();
563 }
564 else
565 {
566 if (!params_map[rgbRes].isSetting)
567 {
568 yCError(REALSENSE2)<<"Missing rgbResolution from [SETTINGS]";
569 return false;
570 }
571 if (!params_map[depthRes].isSetting)
572 {
573 yCError(REALSENSE2)<<"Missing depthResolution from [SETTINGS]";
574 return false;
575 }
576
577 colorW = params_map[rgbRes].val[0].asFloat64();
578 colorH = params_map[rgbRes].val[1].asFloat64();
579 depthW = params_map[depthRes].val[0].asFloat64();
580 depthH = params_map[depthRes].val[1].asFloat64();
581 }
582
585 if (m_stereoMode) {
588 }
589 if (!pipelineStartup())
590 return false;
591 m_initialized = true;
592
593 //TODO: if more are connected?!
594 // Update the selected device
595 m_device = m_profile.get_device();
596 if (m_verbose)
598
599 // Camera warmup - Dropped frames to allow stabilization
600 yCInfo(REALSENSE2) << "Sensor warm-up...";
601 for (int i = 0; i < 30; i++)
602 {
603 try
604 {
605 m_pipeline.wait_for_frames();
606 }
607 catch (const rs2::error& e)
608 {
609 yCError(REALSENSE2) << "m_pipeline.wait_for_frames() failed with error:"<< "(" << e.what() << ")";
610 m_lastError = e.what();
611 }
612 }
613 yCInfo(REALSENSE2) << "Device ready!";
614
615 // Given a device, we can query its sensors using:
616 m_sensors = m_device.query_sensors();
617
618 yCInfo(REALSENSE2) << "Device consists of" << m_sensors.size() << "sensors. More infos using --verbose option";
619 if (m_verbose)
620 {
621 for (const auto & m_sensor : m_sensors)
622 {
624 }
625 }
626
627 if (is_d405)
628 {
629 // A D405 camera only exposes rs2::depth_sensor depth sensor although it also streams RGB images
630 // Hence, we use the pointer to the depth sensor to access the RGB-related options
631 for (auto & m_sensor : m_sensors)
632 {
633 if (m_sensor.is<rs2::depth_sensor>())
635 }
637 }
638 else
639 {
640 for (auto & m_sensor : m_sensors)
641 {
642 if (m_sensor.is<rs2::depth_sensor>())
644 else if (m_sensor.get_stream_profiles()[0].stream_type() == RS2_STREAM_COLOR)
646 }
647 }
648
649 // Retrieve depth scaling factor
651 {
652 yCError(REALSENSE2) << "Failed to retrieve scale";
653 return false;
654 }
655
656 // Get stream intrinsics & extrinsics
658 return true;
659}
660
662{
663 rs2::pipeline_profile pipeline_profile = m_pipeline.get_active_profile();
664 rs2::video_stream_profile depth_stream_profile = rs2::video_stream_profile(pipeline_profile.get_stream(RS2_STREAM_DEPTH));
665 rs2::video_stream_profile color_stream_profile = rs2::video_stream_profile(pipeline_profile.get_stream(RS2_STREAM_COLOR));
666
667 m_depth_intrin = depth_stream_profile.get_intrinsics();
668 m_color_intrin = color_stream_profile.get_intrinsics();
671
672 if (m_stereoMode) {
673 rs2::video_stream_profile infrared_stream_profile = rs2::video_stream_profile(pipeline_profile.get_stream(RS2_STREAM_INFRARED));
675 }
676
677}
678
679
681{
682 bool ret = true;
683 //ACCURACY
684 if (params_map[accuracy].isSetting && ret)
685 {
686 if (!params_map[accuracy].val[0].isFloat64() )
687 settingErrorMsg("Param " + params_map[accuracy].name + " is not a double as it should be.", ret);
688
689 if (! setDepthAccuracy(params_map[accuracy].val[0].asFloat64() ) )
690 settingErrorMsg("Setting param " + params_map[accuracy].name + " failed... quitting.", ret);
691 }
692
693 //CLIP_PLANES
694 if (params_map[clipPlanes].isSetting && ret)
695 {
696 if (!params_map[clipPlanes].val[0].isFloat64() )
697 settingErrorMsg("Param " + params_map[clipPlanes].name + " is not a double as it should be.", ret);
698
699 if (!params_map[clipPlanes].val[1].isFloat64() )
700 settingErrorMsg("Param " + params_map[clipPlanes].name + " is not a double as it should be.", ret);
701
702 if (! setDepthClipPlanes(params_map[clipPlanes].val[0].asFloat64(), params_map[clipPlanes].val[1].asFloat64() ) )
703 settingErrorMsg("Setting param " + params_map[clipPlanes].name + " failed... quitting.", ret);
704 }
705
706 //FRAMERATE
707 if (params_map[framerate].isSetting && ret)
708 {
709 if (!params_map[framerate].val[0].isInt32() )
710 settingErrorMsg("Param " + params_map[framerate].name + " is not a int as it should be.", ret);
711 else
712 m_fps = params_map[framerate].val[0].asInt32();
713 }
714 else
715 {
716 yCWarning(REALSENSE2) << "Framerate not specified... setting 30 fps by default";
717 m_fps = 30;
718 }
719
720 //EMITTER
721 if (params_map[enableEmitter].isSetting && ret)
722 {
723 Value& v = params_map[enableEmitter].val[0];
724
725 if (!v.isBool())
726 {
727 settingErrorMsg("Param " + params_map[enableEmitter].name + " is not a bool as it should be.", ret);
728 return false;
729 }
731 {
732 settingErrorMsg("Setting param " + params_map[enableEmitter].name + " failed... quitting.", ret);
733 }
734 }
735
736 //ALIGNMENT
737 if (params_map[needAlignment].isSetting && ret)
738 {
739 yCWarning(REALSENSE2) << "needAlignment parameter is deprecated, use alignmentFrame instead.";
740 Value& v = params_map[needAlignment].val[0];
741 if (!v.isBool())
742 {
743 settingErrorMsg("Param " + params_map[needAlignment].name + " is not a bool as it should be.", ret);
744 return false;
745 }
746
749 }
750
751 if (params_map[alignmentFrame].isSetting && ret)
752 {
753 Value& v = params_map[alignmentFrame].val[0];
754 auto alignmentFrameStr = v.asString();
755 if (!v.isString())
756 {
757 settingErrorMsg("Param " + params_map[alignmentFrame].name + " is not a string as it should be.", ret);
758 return false;
759 }
760
762 settingErrorMsg("Value "+alignmentFrameStr+" not allowed for " + params_map[alignmentFrame].name + " see documentation for supported values.", ret);
763 return false;
764 }
765
767 }
768
769 //DEPTH_RES
770 if (params_map[depthRes].isSetting && ret)
771 {
772 Value p1, p2;
773 p1 = params_map[depthRes].val[0];
774 p2 = params_map[depthRes].val[1];
775
776 if (!p1.isInt32() || !p2.isInt32() )
777 {
778 settingErrorMsg("Param " + params_map[depthRes].name + " is not a int as it should be.", ret);
779 }
780
781 if (! setDepthResolution(p1.asInt32(), p2.asInt32()))
782 {
783 settingErrorMsg("Setting param " + params_map[depthRes].name + " failed... quitting.", ret);
784 }
785 }
786
787 //RGB_RES
788 if (params_map[rgbRes].isSetting && ret)
789 {
790 Value p1, p2;
791 p1 = params_map[rgbRes].val[0];
792 p2 = params_map[rgbRes].val[1];
793
794 if (!p1.isInt32() || !p2.isInt32() )
795 {
796 settingErrorMsg("Param " + params_map[rgbRes].name + " is not a int as it should be.", ret);
797 }
798
799 if (! setRgbResolution(p1.asInt32(), p2.asInt32()))
800 {
801 settingErrorMsg("Setting param " + params_map[rgbRes].name + " failed... quitting.", ret);
802 }
803 }
804
805 return ret;
806}
807
808
810{
811 std::vector<RGBDSensorParamParser::RGBDParam*> params;
812 params.reserve(params_map.size());
813 for (auto& p:params_map)
814 {
815 params.push_back(&(p.second));
816 }
817 //Manage depth quantization parameter
818 if(config.check("QUANT_PARAM")) {
820 quantCfg.fromString(config.findGroup("QUANT_PARAM").toString());
822 if (quantCfg.check("depth_quant")) {
823 m_depthDecimalNum = quantCfg.find("depth_quant").asInt32();
824 }
825 }
826
827 if(config.check("rotateImage180")){
828 m_rotateImage180 = config.find("rotateImage180").asBool();
829 if (m_rotateImage180) {
830 yCInfo(REALSENSE2) << "parameter rotateImage180 enabled, the image is rotated";
831 }
832 }
833 m_verbose = config.check("verbose");
834 if (config.check("stereoMode")) {
835 m_stereoMode = config.find("stereoMode").asBool();
836 }
837
838 if (!m_paramParser.parseParam(config, params))
839 {
840 yCError(REALSENSE2) << "Failed to parse the parameters";
841 return false;
842 }
843
845 {
846 yCError(REALSENSE2) << "Failed to initialize the realsense device";
847 return false;
848 }
849
850 // setting Parameters
851 return setParams();
852
853}
854
856{
858 return true;
859}
860
862{
863 return m_color_intrin.height;
864}
865
867{
868 return m_color_intrin.width;
869}
870
872{
873 yCWarning(REALSENSE2) << "getRgbSupportedConfigurations not implemented yet";
874 return false;
875}
876
877bool realsense2Driver::getRgbResolution(int &width, int &height)
878{
879 width = m_color_intrin.width;
880 height = m_color_intrin.height;
881 return true;
882}
883
884bool realsense2Driver::setDepthResolution(int width, int height)
885{
888 else
889 {
890 if (m_initialized)
891 {
892 fallback();
893 return false;
894 }
895 }
896
897 if (!pipelineRestart())
898 return false;
899
901 return true;
902}
903
904bool realsense2Driver::setRgbResolution(int width, int height)
905{
906 bool fail = true;
909 fail = false;
910 if (m_stereoMode)
911 {
913 {
916 }
917 else
918 {
919 fail = true;
920 }
921 }
922 }
923
924 if (m_initialized && fail)
925 {
926 fallback();
927 return false;
928 }
929
930 if (!pipelineRestart())
931 return false;
932
934 return true;
935}
936
937
938bool realsense2Driver::setRgbFOV(double horizontalFov, double verticalFov)
939{
940 // It seems to be not available...
941 return false;
942}
943
944bool realsense2Driver::setDepthFOV(double horizontalFov, double verticalFov)
945{
946 // It seems to be not available...
947 return false;
948}
949
951{
952 std::lock_guard<std::mutex> guard(m_mutex);
954 if (ok) {
956 }
957 return ok;
958}
959
960bool realsense2Driver::getRgbFOV(double &horizontalFov, double &verticalFov)
961{
962 float fov[2];
963 rs2_fov(&m_color_intrin, fov);
964 horizontalFov = fov[0];
965 verticalFov = fov[1];
966 return true;
967}
968
970{
971 yCWarning(REALSENSE2) << "Mirroring not supported";
972 return false;
973}
974
976{
977 yCWarning(REALSENSE2) << "Mirroring not supported";
978 return false;
979}
980
982{
983 return setIntrinsic(intrinsic, m_color_intrin);
984}
985
987{
988 return m_depth_intrin.height;
989}
990
992{
993 return m_depth_intrin.width;
994}
995
996bool realsense2Driver::getDepthFOV(double& horizontalFov, double& verticalFov)
997{
998 float fov[2];
999 rs2_fov(&m_depth_intrin, fov);
1000 horizontalFov = fov[0];
1001 verticalFov = fov[1];
1002 return true;
1003}
1004
1006{
1007 return setIntrinsic(intrinsic, m_depth_intrin);;
1008}
1009
1011{
1012 float accuracy = 0.0;
1014 {
1015 return accuracy;
1016 }
1017 return 0.0;
1018}
1019
1021{
1022 if (params_map[clipPlanes].isDescription)
1023 {
1024 nearPlane = params_map[clipPlanes].val[0].asFloat64();
1025 farPlane = params_map[clipPlanes].val[1].asFloat64();
1026 return true;
1027 }
1028
1031 return ret;
1032}
1033
1035{
1036 if (params_map[clipPlanes].isDescription)
1037 {
1038 return false;
1039 }
1042 return ret;
1043}
1044
1046{
1047 yCWarning(REALSENSE2) << "Mirroring not supported";
1048 return false;
1049}
1050
1052{
1053 yCWarning(REALSENSE2) << "Mirroring not supported";
1054 return false;
1055}
1056
1061
1063{
1064 std::lock_guard<std::mutex> guard(m_mutex);
1065 rs2::frameset data;
1066 try
1067 {
1068 data = m_pipeline.wait_for_frames();
1069 }
1070 catch (const rs2::error& e)
1071 {
1072 yCError(REALSENSE2) << "m_pipeline.wait_for_frames() failed with error:"<< "(" << e.what() << ")";
1073 m_lastError = e.what();
1074 return false;
1075 }
1077 {
1078 rs2::align align(m_alignment_stream);
1079 data = align.process(data);
1080 }
1081 return getImage(rgbImage, timeStamp, data);
1082}
1083
1085{
1086 std::lock_guard<std::mutex> guard(m_mutex);
1087 rs2::frameset data;
1088 try
1089 {
1090 data = m_pipeline.wait_for_frames();
1091 }
1092 catch (const rs2::error& e)
1093 {
1094 yCError(REALSENSE2) << "m_pipeline.wait_for_frames() failed with error:"<< "(" << e.what() << ")";
1095 m_lastError = e.what();
1096 return false;
1097 }
1099 {
1100 rs2::align align(m_alignment_stream);
1101 data = align.process(data);
1102 }
1103 return getImage(depthImage, timeStamp, data);
1104}
1105
1107{
1108 rs2::video_frame color_frm = sourceFrame.get_color_frame();
1109 rs2_format format = color_frm.get_profile().format();
1110
1111 int pixCode = pixFormatToCode(format);
1112 size_t mem_to_wrt = color_frm.get_width() * color_frm.get_height() * bytesPerPixel(format);
1113
1115 {
1116 yCError(REALSENSE2) << "Pixel Format not recognized";
1117 return false;
1118 }
1119
1120 Frame.setPixelCode(pixCode);
1121 Frame.resize(m_color_intrin.width, m_color_intrin.height);
1122
1123 if ((size_t) Frame.getRawImageSize() != mem_to_wrt)
1124 {
1125 yCError(REALSENSE2) << "Device and local copy data size doesn't match";
1126 return false;
1127 }
1128 if (m_rotateImage180) {
1129 for (int i = 0; i < (color_frm.get_width() * color_frm.get_height()); i++) {
1130 for (size_t pixelIndex = 0; pixelIndex < bytesPerPixel(format); pixelIndex++) {
1131 ((char *)Frame.getRawImage())[i * bytesPerPixel(format) + pixelIndex] = ((char *)color_frm.get_data())[
1132 ( Frame.getRawImageSize() -(( i-1) * bytesPerPixel(format))) + pixelIndex];
1133 }
1134 }
1135 } else {
1136 memcpy((void*)Frame.getRawImage(), (void*)color_frm.get_data(), mem_to_wrt);
1137 }
1139 if (timeStamp != nullptr)
1140 {
1141 *timeStamp = m_rgb_stamp;
1142 }
1143 return true;
1144}
1145
1146bool realsense2Driver::getImage(depthImage& Frame, Stamp *timeStamp, const rs2::frameset &sourceFrame)
1147{
1148 rs2::depth_frame depth_frm = sourceFrame.get_depth_frame();
1149 rs2_format format = depth_frm.get_profile().format();
1150
1151 int pixCode = pixFormatToCode(format);
1152
1153 int w = depth_frm.get_width();
1154 int h = depth_frm.get_height();
1155
1157 {
1158 yCError(REALSENSE2) << "Pixel Format not recognized";
1159 return false;
1160 }
1161
1162 Frame.resize(w, h);
1163
1164 float* rawImage = &Frame.pixel(0,0);
1165 const auto * rawImageRs =(const uint16_t *) depth_frm.get_data();
1166 double nearPlane;
1167 double farPlane;
1168 int intTemp;
1170 float powCoeff = pow(10.0f, (float) m_depthDecimalNum);
1171 for(int i = 0; i < w * h; i++)
1172 {
1173 if (m_rotateImage180) {
1174 rawImage[i] = m_scale * rawImageRs[(w * h) - i -1];
1175 }else {
1176 rawImage[i] = m_scale * rawImageRs[i];
1177 }
1179 intTemp = (int) (rawImage[i] * powCoeff);
1180 rawImage[i] = ((float) intTemp) / powCoeff;
1181 }
1182 }
1183
1185 if (timeStamp != nullptr)
1186 {
1187 *timeStamp = m_depth_stamp;
1188 }
1189 return true;
1190}
1191
1193{
1194 std::lock_guard<std::mutex> guard(m_mutex);
1195 rs2::frameset data;
1196 try
1197 {
1198 data = m_pipeline.wait_for_frames();
1199 }
1200 catch (const rs2::error& e)
1201 {
1202 yCError(REALSENSE2) << "m_pipeline.wait_for_frames() failed with error:"<< "(" << e.what() << ")";
1203 m_lastError = e.what();
1204 return false;
1205 }
1206 if (m_alignment_stream != RS2_STREAM_ANY) // RS2_STREAM_ANY is used as no-alignment-needed value.
1207 {
1208 rs2::align align(m_alignment_stream);
1209 data = align.process(data);
1210 }
1211 return getImage(colorFrame, colorStamp, data) && getImage(depthFrame, depthStamp, data);
1212}
1213
1218
1220{
1221 return m_lastError;
1222}
1223
1225{
1226 camera->deviceDescription = get_device_information(m_device);
1227 camera->busType = BUS_USB;
1228 return true;
1229}
1230
1231bool realsense2Driver::hasFeature(int feature, bool* hasFeature)
1232{
1234 f = static_cast<cameraFeature_id_t>(feature);
1236 {
1237 return false;
1238 }
1239
1240 *hasFeature = std::find(m_supportedFeatures.begin(), m_supportedFeatures.end(), f) != m_supportedFeatures.end();
1241
1242 return true;
1243}
1244
1246{
1247 bool b = false;
1248 if (!hasFeature(feature, &b) || !b)
1249 {
1250 yCError(REALSENSE2) << "Feature not supported!";
1251 return false;
1252 }
1253
1254 float valToSet = 0.0;
1255 b = false;
1256 auto f = static_cast<cameraFeature_id_t>(feature);
1257 switch(f)
1258 {
1261 {
1263 if (m_stereoMode)
1264 {
1266 {
1268 }
1269 }
1270 }
1271 break;
1272 case YARP_FEATURE_GAIN:
1274 {
1276 if (m_stereoMode)
1277 {
1279 {
1281 }
1282 }
1283 }
1284 break;
1286 {
1287 b = setFramerate((int) value);
1288 break;
1289 }
1293 break;
1297 break;
1298 case YARP_FEATURE_HUE:
1301 break;
1305 break;
1306 default:
1307 yCError(REALSENSE2) << "Feature not supported!";
1308 return false;
1309 }
1310 if (!b)
1311 {
1312 yCError(REALSENSE2) << "Something went wrong setting the feature requested, run the device with --verbose for the supported options";
1313 if (m_verbose)
1314 {
1316 }
1317 return false;
1318 }
1319 return true;
1320}
1321
1323{
1324 bool b = false;
1325 if (!hasFeature(feature, &b) || !b)
1326 {
1327 yCError(REALSENSE2) << "Feature not supported!";
1328 return false;
1329 }
1330
1331 float valToGet = 0.0;
1332 b = false;
1333
1334 auto f = static_cast<cameraFeature_id_t>(feature);
1335 switch(f)
1336 {
1340 break;
1341 case YARP_FEATURE_GAIN:
1344 break;
1346 {
1347 b = true;
1348 *value = (double) m_fps;
1349 break;
1350 }
1354 break;
1358 break;
1359 case YARP_FEATURE_HUE:
1362 break;
1366 break;
1367 default:
1368 yCError(REALSENSE2) << "Feature not supported!";
1369 return false;
1370 }
1371 if (!b)
1372 {
1373 yCError(REALSENSE2) << "Something went wrong setting the feature requested, run the device with --verbose for the supported options";
1374 if (m_verbose)
1375 {
1377 }
1378 return false;
1379 }
1380 return true;
1381}
1382
1384{
1385 yCError(REALSENSE2) << "No 2-valued feature are supported";
1386 return false;
1387}
1388
1390{
1391 yCError(REALSENSE2) << "No 2-valued feature are supported";
1392 return false;
1393}
1394
1396{
1397 bool b;
1398 if (!hasFeature(feature, &b) || !b)
1399 {
1400 yCError(REALSENSE2) << "Feature not supported!";
1401 return false;
1402 }
1403
1404 auto f = static_cast<cameraFeature_id_t>(feature);
1406 {
1407 *HasOnOff = true;
1408 return true;
1409 }
1410 *HasOnOff = false;
1411 return true;
1412}
1413
1415{
1416 bool b;
1417 if (!hasFeature(feature, &b) || !b)
1418 {
1419 yCError(REALSENSE2) << "Feature not supported!";
1420 return false;
1421 }
1422
1423 if (!hasOnOff(feature, &b) || !b)
1424 {
1425 yCError(REALSENSE2) << "Feature does not have OnOff.. call hasOnOff() to know if a specific feature support OnOff mode";
1426 return false;
1427 }
1428
1429 switch(feature)
1430 {
1433 return b;
1436 return b;
1437 default:
1438 return false;
1439 }
1440
1441 return true;
1442}
1443
1444bool realsense2Driver::getActive( int feature, bool *isActive)
1445{
1446 bool b;
1447 if (!hasFeature(feature, &b) || !b)
1448 {
1449 yCError(REALSENSE2) << "Feature not supported!";
1450 return false;
1451 }
1452
1453 if (!hasOnOff(feature, &b) || !b)
1454 {
1455 yCError(REALSENSE2) << "Feature does not have OnOff.. call hasOnOff() to know if a specific feature support OnOff mode";
1456 return false;
1457 }
1458 float response = 0.0;
1459 switch(feature)
1460 {
1462 b = getOption(RS2_OPTION_ENABLE_AUTO_WHITE_BALANCE, m_color_sensor, response); //TODO check if this exotic conversion works
1463 *isActive = (bool) response;
1464 return b;
1466 b = getOption(RS2_OPTION_ENABLE_AUTO_EXPOSURE, m_color_sensor, response); //TODO check if this exotic conversion works
1467 *isActive = (bool) response;
1468 return b;
1469 default:
1470 return false;
1471 }
1472
1473 return true;
1474}
1475
1476bool realsense2Driver::hasAuto(int feature, bool *hasAuto)
1477{
1478 bool b;
1479 if (!hasFeature(feature, &b) || !b)
1480 {
1481 yCError(REALSENSE2) << "Feature not supported!";
1482 return false;
1483 }
1484
1485 auto f = static_cast<cameraFeature_id_t>(feature);
1487 {
1488 *hasAuto = true;
1489 return true;
1490 }
1491 *hasAuto = false;
1492 return true;
1493}
1494
1495bool realsense2Driver::hasManual( int feature, bool* hasManual)
1496{
1497 bool b;
1498 if (!hasFeature(feature, &b) || !b)
1499 {
1500 yCError(REALSENSE2) << "Feature not supported!";
1501 return false;
1502 }
1503
1504 auto f = static_cast<cameraFeature_id_t>(feature);
1507 {
1508 *hasManual = true;
1509 return true;
1510 }
1511 *hasManual = false;
1512 return true;
1513}
1514
1515bool realsense2Driver::hasOnePush(int feature, bool* hasOnePush)
1516{
1517 bool b;
1518 if (!hasFeature(feature, &b) || !b)
1519 {
1520 yCError(REALSENSE2) << "Feature not supported!";
1521 return false;
1522 }
1523
1524 return hasAuto(feature, hasOnePush);
1525}
1526
1528{
1529 bool b;
1530 if (!hasFeature(feature, &b) || !b)
1531 {
1532 yCError(REALSENSE2) << "Feature not supported!";
1533 return false;
1534 }
1535 float one = 1.0;
1536 float zero = 0.0;
1537
1538 auto f = static_cast<cameraFeature_id_t>(feature);
1540 {
1541 switch(mode)
1542 {
1543 case MODE_AUTO:
1545 case MODE_MANUAL:
1547 case MODE_UNKNOWN:
1548 return false;
1549 default:
1550 return false;
1551 }
1552 return true;
1553 }
1554
1555 if (f == YARP_FEATURE_EXPOSURE)
1556 {
1557 switch(mode)
1558 {
1559 case MODE_AUTO:
1561 case MODE_MANUAL:
1563 case MODE_UNKNOWN:
1564 return false;
1565 default:
1566 return false;
1567 }
1568 return true;
1569 }
1570
1571
1572 yCError(REALSENSE2) << "Feature does not have both auto and manual mode";
1573 return false;
1574}
1575
1577{
1578 bool b;
1579 if (!hasFeature(feature, &b) || !b)
1580 {
1581 yCError(REALSENSE2) << "Feature not supported!";
1582 return false;
1583 }
1584 float res = 0.0;
1585 bool ret = true;
1586 auto f = static_cast<cameraFeature_id_t>(feature);
1588 {
1590 }
1591
1592 if (f == YARP_FEATURE_EXPOSURE)
1593 {
1595 }
1596
1597 if (res == 0.0)
1598 {
1599 *mode = MODE_MANUAL;
1600 }
1601 else if (res == 1.0)
1602 {
1603 *mode = MODE_AUTO;
1604 }
1605 else
1606 {
1607 *mode = MODE_UNKNOWN;
1608 }
1609 return ret;
1610}
1611
1613{
1614 bool b;
1615 if (!hasFeature(feature, &b) || !b)
1616 {
1617 yCError(REALSENSE2) << "Feature not supported!";
1618 return false;
1619 }
1620
1621 if (!hasOnePush(feature, &b) || !b)
1622 {
1623 yCError(REALSENSE2) << "Feature doesn't have OnePush";
1624 return false;
1625 }
1626
1629
1630 return true;
1631}
1632
1634{
1635 if (!m_stereoMode)
1636 {
1637 yCError(REALSENSE2)<<"Infrared stereo stream not enabled";
1638 return false;
1639 }
1640
1641 image.resize(width(), height());
1642 std::lock_guard<std::mutex> guard(m_mutex);
1643 rs2::frameset data;
1644 try
1645 {
1646 data = m_pipeline.wait_for_frames();
1647 }
1648 catch (const rs2::error& e)
1649 {
1650 yCError(REALSENSE2) << "m_pipeline.wait_for_frames() failed with error:"<< "(" << e.what() << ")";
1651 m_lastError = e.what();
1652 return false;
1653 }
1654
1655 rs2::video_frame frm1 = data.get_infrared_frame(1);
1656 rs2::video_frame frm2 = data.get_infrared_frame(2);
1657
1658 int pixCode = pixFormatToCode(frm1.get_profile().format());
1659
1661 {
1662 yCError(REALSENSE2) << "Expecting Pixel Format MONO";
1663 return false;
1664 }
1665
1666 // Wrap rs images with yarp ones.
1668 imgL.setExternal((unsigned char*) (frm1.get_data()), frm1.get_width(), frm1.get_height());
1669 imgR.setExternal((unsigned char*) (frm2.get_data()), frm2.get_width(), frm2.get_height());
1670 return utils::horzConcat(imgL, imgR, image);
1671}
1672
1674{
1675 return m_infrared_intrin.height;
1676}
1677
1679{
1680 return m_infrared_intrin.width*2;
1681}
@ YARP_FEATURE_NUMBER_OF
@ YARP_FEATURE_SHARPNESS
@ YARP_FEATURE_FRAME_RATE
@ YARP_FEATURE_HUE
@ YARP_FEATURE_WHITE_BALANCE
@ YARP_FEATURE_EXPOSURE
@ YARP_FEATURE_MIRROR
@ YARP_FEATURE_SATURATION
@ YARP_FEATURE_GAIN
@ VOCAB_PIXEL_RGBA
Definition Image.h:45
@ VOCAB_PIXEL_MONO16
Definition Image.h:43
@ VOCAB_PIXEL_BGRA
Definition Image.h:46
@ VOCAB_PIXEL_BGR
Definition Image.h:49
@ VOCAB_PIXEL_INVALID
Definition Image.h:41
@ VOCAB_PIXEL_MONO
Definition Image.h:42
@ VOCAB_PIXEL_RGB
Definition Image.h:44
bool ret
yarp::os::Stamp m_depth_stamp
rs2::pipeline m_pipeline
bool setActive(int feature, bool onoff) override
Set the requested feature on or off.
bool setDepthClipPlanes(double nearPlane, double farPlane) override
Set the clipping planes of the sensor.
rs2_intrinsics m_infrared_intrin
RGBDSensor_status getSensorStatus() override
Get the surrent status of the sensor, using enum type.
bool hasManual(int feature, bool *hasManual) override
Check if the requested feature has the 'manual' mode.
rs2_intrinsics m_color_intrin
bool getRgbMirroring(bool &mirror) override
Get the mirroring setting of the sensor.
bool getDepthImage(depthImage &depthImage, Stamp *timeStamp=nullptr) override
rs2::sensor * m_depth_sensor
bool getActive(int feature, bool *isActive) override
Get the current status of the feature, on or off.
std::vector< cameraFeature_id_t > m_supportedFeatures
bool close() override
Close the DeviceDriver.
bool setFramerate(const int _fps)
bool hasAuto(int feature, bool *hasAuto) override
Check if the requested feature has the 'auto' mode.
int height() const override
Return the height of each frame.
bool setDepthFOV(double horizontalFov, double verticalFov) override
Set the field of view (FOV) of the depth camera.
bool hasFeature(int feature, bool *hasFeature) override
Check if camera has the requested feature (saturation, brightness ... )
bool getCameraDescription(CameraDescriptor *camera) override
Get a basic description of the camera hw.
bool getRgbSupportedConfigurations(yarp::sig::VectorOf< yarp::dev::CameraConfig > &configurations) override
Get the possible configurations of the camera.
bool getMode(int feature, FeatureMode *mode) override
Get the current mode for the feature.
bool setDepthResolution(int width, int height) override
Set the resolution of the depth image from the camera.
bool setRgbFOV(double horizontalFov, double verticalFov) override
Set the field of view (FOV) of the rgb camera.
rs2::pipeline_profile m_profile
rs2_intrinsics m_depth_intrin
rs2_stream m_alignment_stream
int getDepthHeight() override
Return the height of each frame.
bool setRgbMirroring(bool mirror) override
Set the mirroring setting of the sensor.
bool open(yarp::os::Searchable &config) override
Open the DeviceDriver.
rs2_extrinsics m_depth_to_color
double getDepthAccuracy() override
Get the minimum detectable variation in distance [meter].
bool setDepthMirroring(bool mirror) override
Set the mirroring setting of the sensor.
bool hasOnOff(int feature, bool *HasOnOff) override
Check if the camera has the ability to turn on/off the requested feature.
bool hasOnePush(int feature, bool *hasOnePush) override
Check if the requested feature has the 'onePush' mode.
std::string getLastErrorMsg(Stamp *timeStamp=NULL) override
Return an error message in case of error.
int getDepthWidth() override
Return the height of each frame.
bool getRgbResolution(int &width, int &height) override
Get the resolution of the rgb image from the camera.
bool getDepthFOV(double &horizontalFov, double &verticalFov) override
Get the field of view (FOV) of the depth camera.
bool setFeature(int feature, double value) override
Set the requested feature to a value (saturation, brightness ... )
bool setOnePush(int feature) override
Set the requested feature to a value (saturation, brightness ... )
bool setDepthAccuracy(double accuracy) override
Set the minimum detectable variation in distance [meter] when possible.
rs2::sensor * m_color_sensor
bool getRgbFOV(double &horizontalFov, double &verticalFov) override
Get the field of view (FOV) of the rgb camera.
bool getRgbImage(FlexImage &rgbImage, Stamp *timeStamp=nullptr) override
Get the rgb frame from the device.
rs2_extrinsics m_color_to_depth
bool getImages(FlexImage &colorFrame, depthImage &depthFrame, Stamp *colorStamp=NULL, Stamp *depthStamp=NULL) override
yarp::os::Stamp m_rgb_stamp
bool getExtrinsicParam(yarp::sig::Matrix &extrinsic) override
Get the extrinsic parameters from the device.
bool getRgbIntrinsicParam(Property &intrinsic) override
Get the intrinsic parameters of the rgb camera.
int getRgbHeight() override
Return the height of each frame.
bool getDepthIntrinsicParam(Property &intrinsic) override
Get the intrinsic parameters of the depth camera.
bool getImage(yarp::sig::ImageOf< yarp::sig::PixelMono > &image) override
Get an image from the frame grabber.
int width() const override
Return the width of each frame.
int getRgbWidth() override
Return the width of each frame.
yarp::dev::RGBDSensorParamParser m_paramParser
bool setMode(int feature, FeatureMode mode) override
Set the requested mode for the feature.
std::vector< rs2::sensor > m_sensors
bool getFeature(int feature, double *value) override
Get the current value for the requested feature.
std::string m_lastError
bool setRgbResolution(int width, int height) override
Set the resolution of the rgb image from the camera.
bool getDepthClipPlanes(double &nearPlane, double &farPlane) override
Get the clipping planes of the sensor.
bool getDepthMirroring(bool &mirror) override
Get the mirroring setting of the sensor.
bool parseParam(const yarp::os::Searchable &config, std::vector< RGBDParam * > &params)
parseParam, parse the params stored in a Searchable.
yarp::sig::IntrinsicParams rgbIntrinsic
yarp::sig::IntrinsicParams depthIntrinsic
A mini-server for performing network communication in the background.
A class for storing options and configuration information.
Definition Property.h:33
A base class for nested structures that can be searched.
Definition Searchable.h:31
virtual bool check(const std::string &key) const =0
Check if there exists a property of the given name.
virtual Value & find(const std::string &key) const =0
Gets a value corresponding to a given keyword.
virtual Bottle & findGroup(const std::string &key) const =0
Gets a list corresponding to a given keyword.
An abstraction for a time stamp and/or sequence number.
Definition Stamp.h:21
void update()
Set the timestamp to the current time, and increment the sequence number (wrapping to 0 if the sequen...
Definition Stamp.cpp:124
A single value (typically within a Bottle).
Definition Value.h:43
virtual bool isString() const
Checks if value is a string.
Definition Value.cpp:156
virtual bool isBool() const
Checks if value is a boolean.
Definition Value.cpp:114
virtual bool asBool() const
Get boolean value.
Definition Value.cpp:186
virtual std::string asString() const
Get string value.
Definition Value.cpp:234
Image class with user control of representation details.
Definition Image.h:363
A class for a Matrix.
Definition Matrix.h:39
static std::map< std::string, RGBDSensorParamParser::RGBDParam > params_map
#define yCInfo(component,...)
#define yCError(component,...)
#define yCWarning(component,...)
#define YARP_LOG_COMPONENT(name,...)
STL namespace.
For streams capable of holding different kinds of content, check what they actually have.
An interface to the operating system, including Port based communication.
bool horzConcat(const yarp::sig::Image &inImgL, const yarp::sig::Image &inImgR, yarp::sig::Image &outImg)
Concatenate horizontally two images of the same size in one with double width.
YarpDistortion
The YarpDistortion enum to define the type of the distortion model of the camera.
constexpr char clipPlanes[]
static bool setOption(rs2_option option, const rs2::sensor *sensor, float value)
static int pixFormatToCode(const rs2_format p)
static void print_supported_options(const rs2::sensor &sensor)
static const std::map< std::string, rs2_stream > stringRSStreamMap
constexpr char needAlignment[]
static bool getOption(rs2_option option, const rs2::sensor *sensor, float &value)
static std::string get_device_information(const rs2::device &dev)
static bool optionValue2Perc(rs2_option option, const rs2::sensor *sensor, float &perc, const float &value)
static std::map< std::string, RGBDSensorParamParser::RGBDParam > params_map
static YarpDistortion rsDistToYarpDist(const rs2_distortion dist, const rs2_intrinsics &values)
static bool setExtrinsicParam(Matrix &extrinsic, const rs2_extrinsics &values)
constexpr char enableEmitter[]
static bool setIntrinsic(Property &intrinsic, const rs2_intrinsics &values)
constexpr char framerate[]
constexpr char alignmentFrame[]
constexpr char rgbRes[]
static size_t bytesPerPixel(const rs2_format format)
static void settingErrorMsg(const string &error, bool &ret)
static bool optionPerc2Value(rs2_option option, const rs2::sensor *sensor, const float &perc, float &value)
constexpr char depthRes[]
static bool isSupportedFormat(const rs2::sensor &sensor, const int width, const int height, const int fps, bool verbose=false)
constexpr char accuracy[]
The IntrinsicParams struct to handle the intrinsic parameter of cameras(RGB and RGBD either).
double focalLengthY
Result of the product of the physical focal length(mm) and the size sy of the individual imager eleme...
DistortionModel distortionModel
Distortion model of the image.
double focalLengthX
Result of the product of the physical focal length(mm) and the size sx of the individual imager eleme...
void toProperty(yarp::os::Property &intrinsic) const
toProperty, convert the struct to a Property.
double principalPointX
Horizontal coordinate of the principal point of the image, as a pixel offset from the left edge.
double principalPointY
Vertical coordinate of the principal point of the image, as a pixel offset from the top edge.