YARP
Yet Another Robot Platform
 
Loading...
Searching...
No Matches
depthCameraDriver.cpp
Go to the documentation of this file.
1/*
2 * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT)
3 * SPDX-License-Identifier: LGPL-2.1-or-later
4 */
5
6#include "depthCameraDriver.h"
7
9#include <yarp/os/LogStream.h>
10#include <yarp/os/Value.h>
11
12#include <algorithm>
13#include <cmath>
14#include <mutex>
15
16using namespace yarp::dev;
17using namespace yarp::sig;
18using namespace yarp::os;
19using namespace openni;
20
21#ifndef RETURN_FALSE_STATUS_NOT_OK
22#define RETURN_FALSE_STATUS_NOT_OK(s) if(s != STATUS_OK) { yCError(DEPTHCAMERA) << OpenNI::getExtendedError(); return false; }
23#endif
24
25namespace {
26YARP_LOG_COMPONENT(DEPTHCAMERA, "yarp.device.depthCamera")
27constexpr char accuracy [] = "accuracy";
28constexpr char clipPlanes [] = "clipPlanes";
29constexpr char depth_Fov [] = "depth_Fov";
30constexpr char depthRes [] = "depthResolution";
31constexpr char rgb_Fov [] = "rgb_Fov";
32constexpr char rgbRes [] = "rgbResolution";
33constexpr char rgbMirroring [] = "rgbMirroring";
34constexpr char depthMirroring [] = "depthMirroring";
35} // namespace
36
48
49class streamFrameListener : public openni::VideoStream::NewFrameListener
50{
51public:
52
53 //Properties
54 std::mutex mutex;
57 openni::PixelFormat pixF{openni::PixelFormat::PIXEL_FORMAT_DEPTH_1_MM};
58 int w{0};
59 int h{0};
60 size_t dataSize{0};
61 bool isReady{false};
62
63 //Method
65 bool isValid(){return frameRef.isValid() & isReady;}
66 void destroy(){frameRef.release();}
68 {
69 std::lock_guard<std::mutex> guard(mutex);
70 return inputImage.copy(image);
71 }
72
74 {
75 std::lock_guard<std::mutex> guard(mutex);
76 return stamp;
77 }
78
79private:
80 void onNewFrame(openni::VideoStream& stream) override;
81 openni::VideoFrameRef frameRef;
82};
83
88
89void streamFrameListener::onNewFrame(openni::VideoStream& stream)
90{
91 std::lock_guard<std::mutex> guard(mutex);
92 stream.readFrame(&frameRef);
93
94 if (!frameRef.isValid() || !frameRef.getData())
95 {
96 yCInfo(DEPTHCAMERA) << "Frame lost";
97 return;
98 }
99
100 int pixC;
101
102 pixF = stream.getVideoMode().getPixelFormat();
104 w = frameRef.getWidth();
105 h = frameRef.getHeight();
106 dataSize = frameRef.getDataSize();
107
108 if (isReady == false)
109 {
110 isReady = true;
111 }
112
114 {
115 yCError(DEPTHCAMERA) << "Pixel Format not recognized";
116 return;
117 }
118
120 image.resize(w, h);
121
122 if (image.getRawImageSize() != (size_t) frameRef.getDataSize())
123 {
124 yCError(DEPTHCAMERA) << "Device and local copy data size doesn't match";
125 return;
126 }
127
128 memcpy((void*)image.getRawImage(), (void*)frameRef.getData(), frameRef.getDataSize());
129 stamp.update();
130 return;
131}
132
133
134depthCameraDriver::depthCameraDriver() : m_depthFrame(nullptr), m_imageFrame(nullptr), m_paramParser(nullptr)
135{
136
137 m_depthRegistration = true;
138 m_depthFrame = new streamFrameListener();
139 m_imageFrame = new streamFrameListener();
140 m_paramParser = new RGBDSensorParamParser();
141
142 m_supportedFeatures.push_back(YARP_FEATURE_EXPOSURE);
143 m_supportedFeatures.push_back(YARP_FEATURE_WHITE_BALANCE);
144 m_supportedFeatures.push_back(YARP_FEATURE_GAIN);
145 m_supportedFeatures.push_back(YARP_FEATURE_FRAME_RATE);
146 m_supportedFeatures.push_back(YARP_FEATURE_MIRROR);
147}
148
150{
151 close();
152 if (m_depthFrame) {
153 delete m_depthFrame;
154 }
155 if (m_imageFrame) {
156 delete m_imageFrame;
157 }
158 if (m_paramParser) {
159 delete m_paramParser;
160 }
161}
162
163bool depthCameraDriver::initializeOpeNIDevice()
164{
165 Status rc;
166
167 rc = OpenNI::initialize();
168 if (rc != STATUS_OK)
169 {
170 yCError(DEPTHCAMERA) << "Initialize failed," << OpenNI::getExtendedError();
171 return false;
172 }
173
174 rc = m_device.open(ANY_DEVICE);
175 if (rc != STATUS_OK)
176 {
177 yCError(DEPTHCAMERA) << "Couldn't open device," << OpenNI::getExtendedError();
178 return false;
179 }
180
181 if (m_device.getSensorInfo(SENSOR_COLOR) != nullptr)
182 {
183 rc = m_imageStream.create(m_device, SENSOR_COLOR);
184 if (rc != STATUS_OK)
185 {
186 yCError(DEPTHCAMERA) << "Couldn't create color stream," << OpenNI::getExtendedError();
187 return false;
188 }
189 }
190
191 rc = m_imageStream.start();
192 if (rc != STATUS_OK)
193 {
194 yCError(DEPTHCAMERA) << "Couldn't start the color stream," << OpenNI::getExtendedError();
195 return false;
196 }
197
198 if (m_device.getSensorInfo(SENSOR_DEPTH) != nullptr)
199 {
200 rc = m_depthStream.create(m_device, SENSOR_DEPTH);
201 if (rc != STATUS_OK)
202 {
203 yCError(DEPTHCAMERA) << "Couldn't create depth stream," << OpenNI::getExtendedError();
204 return false;
205 }
206 }
207
208 if (m_depthRegistration)
209 {
210 if (m_device.isImageRegistrationModeSupported(IMAGE_REGISTRATION_DEPTH_TO_COLOR))
211 {
212 if (m_device.setImageRegistrationMode(IMAGE_REGISTRATION_DEPTH_TO_COLOR) == STATUS_OK)
213 {
214 yCInfo(DEPTHCAMERA) << "Depth successfully registered on rgb sensor";
215 }
216 else
217 {
218 yCWarning(DEPTHCAMERA) << "Depth registration failed.. sending unregistered images";
219 }
220 }
221 else
222 {
223 yCWarning(DEPTHCAMERA) << "Depth image registration not supported by this device";
224 }
225 }
226
227 rc = m_depthStream.start();
228 if (rc != STATUS_OK)
229 {
230 yCError(DEPTHCAMERA) << "Couldn't start the depth stream," << OpenNI::getExtendedError();
231 return false;
232 }
233
234 m_imageStream.addNewFrameListener(m_imageFrame);
235 m_depthStream.addNewFrameListener(m_depthFrame);
236
237 return true;
238}
239
240
241void depthCameraDriver::settingErrorMsg(const std::string& error, bool& ret)
242{
244 ret = false;
245}
246
247bool depthCameraDriver::setParams()
248{
249 bool ret;
250 ret = true;
251
252 // Do all required settings
253
254 //ACCURACY
255 if (params_map[accuracy].isSetting && ret)
256 {
257 if (!params_map[accuracy].val[0].isFloat64()) {
258 settingErrorMsg("Param " + params_map[accuracy].name + " is not a double as it should be.", ret);
259 }
260
261 if (!setDepthAccuracy(params_map[accuracy].val[0].asFloat64())) {
262 settingErrorMsg("Setting param " + params_map[accuracy].name + " failed... quitting.", ret);
263 }
264 }
265
266 //CLIP_PLANES
267 if (params_map[clipPlanes].isSetting && ret)
268 {
269 if (!params_map[clipPlanes].val[0].isFloat64()) {
270 settingErrorMsg("Param " + params_map[clipPlanes].name + " is not a double as it should be.", ret);
271 }
272
273 if (!params_map[clipPlanes].val[1].isFloat64()) {
274 settingErrorMsg("Param " + params_map[clipPlanes].name + " is not a double as it should be.", ret);
275 }
276
277 if (!setDepthClipPlanes(params_map[clipPlanes].val[0].asFloat64(), params_map[clipPlanes].val[1].asFloat64())) {
278 settingErrorMsg("Setting param " + params_map[clipPlanes].name + " failed... quitting.", ret);
279 }
280 }
281
282 //DEPTH_FOV
283 if (params_map[depth_Fov].isSetting && ret)
284 {
285 Value p1, p2;
286 p1 = params_map[depth_Fov].val[0];
287 p2 = params_map[depth_Fov].val[1];
288
289 if (!p1.isFloat64() || !p2.isFloat64()) {
290 settingErrorMsg("Param " + params_map[depth_Fov].name + " is not a double as it should be.", ret);
291 }
292
293 if (!setDepthFOV(p1.asFloat64(), p2.asFloat64())) {
294 settingErrorMsg("Setting param " + params_map[depth_Fov].name + " failed... quitting.", ret);
295 }
296 }
297
298
299
300 //RGB_FOV
301 if (params_map[rgb_Fov].isSetting && ret)
302 {
303 Value p1, p2;
304 p1 = params_map[rgb_Fov].val[0];
305 p2 = params_map[rgb_Fov].val[1];
306
307 if (!p1.isFloat64() || !p2.isFloat64() )
308 {
309 settingErrorMsg("Param " + params_map[rgb_Fov].name + " is not a double as it should be.", ret);
310 }
311
312 if (! setRgbFOV(p1.asFloat64(), p2.asFloat64() ) )
313 {
314 settingErrorMsg("Setting param " + params_map[rgb_Fov].name + " failed... quitting.", ret);
315 }
316 }
317
318 //DEPTH_RES
319 if (params_map[depthRes].isSetting && ret)
320 {
321 Value p1, p2;
322 p1 = params_map[depthRes].val[0];
323 p2 = params_map[depthRes].val[1];
324
325 if (!p1.isInt32() || !p2.isInt32() )
326 {
327 settingErrorMsg("Param " + params_map[depthRes].name + " is not a int as it should be.", ret);
328 }
329
330 if (! setDepthResolution(p1.asInt32(), p2.asInt32()))
331 {
332 settingErrorMsg("Setting param " + params_map[depthRes].name + " failed... quitting.", ret);
333 }
334 }
335
336 //RGB_RES
337 if (params_map[rgbRes].isSetting && ret)
338 {
339 Value p1, p2;
340 p1 = params_map[rgbRes].val[0];
341 p2 = params_map[rgbRes].val[1];
342
343 if (!p1.isInt32() || !p2.isInt32() )
344 {
345 settingErrorMsg("Param " + params_map[rgbRes].name + " is not a int as it should be.", ret);
346 }
347
348 if (! setRgbResolution(p1.asInt32(), p2.asInt32()))
349 {
350 settingErrorMsg("Setting param " + params_map[rgbRes].name + " failed... quitting.", ret);
351 }
352 }
353
354 // rgb MIRRORING
355 if (params_map[rgbMirroring].isSetting && ret)
356 {
357 //the device usually fail to set the mirror properties at the start.
358 //so we will try to set it for 5 times with a little delay before returning false
359 bool mirrorOk;
360 Value& v = params_map[rgbMirroring].val[0];
361 mirrorOk = false;
362
363 if (!v.isBool())
364 {
365 settingErrorMsg("Param " + params_map[rgbMirroring].name + " is not a bool as it should be.", ret);
366 return false;
367 }
368
369 for (int t = 0; t < 5; t++)
370 {
371 yCInfo(DEPTHCAMERA) << "Trying to set rgb mirroring parameter for the" << t+1 << "time/s";
373 if (setRgbMirroring(v.asBool()))
374 {
375 yCInfo(DEPTHCAMERA) << "Rgb mirroring parameter set successfully";
376 mirrorOk = true;
377 break;
378 }
379 }
380 if (!mirrorOk)
381 {
382 settingErrorMsg("Setting param " + params_map[rgbMirroring].name + " failed... quitting.", ret);
383 }
384 }
385 // depth MIRRORING
386 if (params_map[depthMirroring].isSetting && ret)
387 {
388 //the device usually fail to set the mirror properties at the start.
389 //so we will try to set it for 5 times with a little delay before returning false
390 bool mirrorOk;
391 Value& v = params_map[depthMirroring].val[0];
392 mirrorOk = false;
393
394 if (!v.isBool() )
395 {
396 settingErrorMsg("Param " + params_map[depthMirroring].name + " is not a bool as it should be.", ret);
397 }
398
399 for (int t = 0; t < 5; t++)
400 {
401 yCInfo(DEPTHCAMERA) << "Trying to set depth mirroring parameter for the" << t+1 << "time/s";
403 if (setDepthMirroring(v.asBool()))
404 {
405 yCInfo(DEPTHCAMERA) << "Depth mirroring parameter set successfully";
406 mirrorOk = true;
407 break;
408 }
409 }
410 if (!mirrorOk)
411 {
412 settingErrorMsg("Setting param " + params_map[depthMirroring].name + " failed... quitting.", ret);
413 }
414 }
415 return ret;
416}
417
418
420{
421 std::vector<RGBDSensorParamParser::RGBDParam*> params;
422 for (auto& p:params_map)
423 {
424 params.push_back(&(p.second));
425 }
426
427 if (!m_paramParser->parseParam(config, params))
428 {
429 yCError(DEPTHCAMERA) << "Failed to parse the parameters";
430 return false;
431 }
432
433 //"registered" is a hidden parameter for debugging pourpose
434 m_depthRegistration = !(config.check("registered") && config.find("registered").isBool() && config.find("registered").asBool() == false);
435
436 if (!initializeOpeNIDevice())
437 {
438 return false;
439 }
440
441 // setting Parameters
442 if (!setParams())
443 {
444 return false;
445 }
446
447 return true;
448}
449
451{
452 m_imageStream.stop();
453 m_imageStream.destroy();
454 m_depthStream.stop();
455 m_depthStream.destroy();
456 m_device.close();
457 OpenNI::shutdown();
458 return true;
459}
460
462{
463 if (params_map[rgbRes].isDescription)
464 {
465 return params_map[rgbRes].val.at(1).asInt32();
466 }
467
468 return m_imageStream.getVideoMode().getResolutionY();
469}
470
472{
473 if (params_map[rgbRes].isDescription)
474 {
475 return params_map[rgbRes].val.at(0).asInt32();
476 }
477
478 return m_imageStream.getVideoMode().getResolutionX();
479}
480
481ReturnValue depthCameraDriver::getRgbSupportedConfigurations(std::vector<CameraConfig> &configurations)
482{
483 yCWarning(DEPTHCAMERA) << "getRgbSupportedConfigurations not implemented yet";
484 return false;
485}
486
488{
489 if (params_map[rgbRes].isDescription)
490 {
491 return params_map[rgbRes].val.at(0).asInt32();
492 }
493 else{
494 width = m_imageStream.getVideoMode().getResolutionX();
495 height = m_imageStream.getVideoMode().getResolutionY();
496 }
497 return ReturnValue_ok;
498}
499
501{
502 if (params_map[depthRes].isDescription)
503 {
504 yCError(DEPTHCAMERA) << "Cannot set. Depth resolution is a description!";
505 return false;
506 }
507
508 return setResolution(width, height, m_depthStream);
509}
510
511ReturnValue depthCameraDriver::setResolution(int width, int height, VideoStream& stream)
512{
514 bool bRet;
515
516 vm = stream.getVideoMode();
517 vm.setResolution(width, height);
518 stream.stop();
519 bRet = (stream.setVideoMode(vm) == STATUS_OK);
520 RETURN_FALSE_STATUS_NOT_OK(stream.start());
521
522 if (!bRet)
523 {
524 yCError(DEPTHCAMERA) << OpenNI::getExtendedError();
525 }
526
527 return bRet;
528}
529
531{
532 if (params_map[rgbRes].isDescription)
533 {
534 return false;
535 }
536
537 return setResolution(width, height, m_imageStream);
538}
539
540ReturnValue depthCameraDriver::setFOV(double horizontalFov, double verticalFov, VideoStream& stream)
541{
542 RETURN_FALSE_STATUS_NOT_OK(stream.setProperty(STREAM_PROPERTY_VERTICAL_FOV, verticalFov * DEG2RAD));
543 RETURN_FALSE_STATUS_NOT_OK(stream.setProperty(STREAM_PROPERTY_HORIZONTAL_FOV, horizontalFov * DEG2RAD));
544 return ReturnValue_ok;
545}
546
547ReturnValue depthCameraDriver::setRgbFOV(double horizontalFov, double verticalFov)
548{
549 if (params_map[rgb_Fov].isDescription)
550 {
551 return false;
552 }
553 return setFOV(horizontalFov, verticalFov, m_depthStream);
554}
555
556ReturnValue depthCameraDriver::setDepthFOV(double horizontalFov, double verticalFov)
557{
558 if (params_map[depth_Fov].isDescription)
559 {
560 return false;
561 }
562 return setFOV(horizontalFov, verticalFov, m_depthStream);
563}
564
566{
567 if (params_map[accuracy].isDescription)
568 {
569 return false;
570 }
571 bool a1, a2;
572 a1 = fabs(_accuracy - 0.001) < 0.00001;
573 a2 = fabs(_accuracy - 0.0001) < 0.00001;
574 if (!a1 && !a2)
575 {
576 yCError(DEPTHCAMERA) << "Supporting accuracy of 1mm (0.001) or 100um (0.0001) only at the moment";
577 return false;
578 }
579
582 bool ret;
583
584 vm = m_imageStream.getVideoMode();
586
587 vm.setPixelFormat(pf);
588 m_depthStream.stop();
589 ret = m_depthStream.setVideoMode(vm) == STATUS_OK;
590 RETURN_FALSE_STATUS_NOT_OK(m_depthStream.start());
591
592 if (!ret)
593 {
594 yCError(DEPTHCAMERA) << OpenNI::getExtendedError();
595 }
596 return ret;
597}
598
599ReturnValue depthCameraDriver::getRgbFOV(double &horizontalFov, double &verticalFov)
600{
601 if (params_map[rgb_Fov].isDescription)
602 {
603 horizontalFov = params_map[rgb_Fov].val[0].asFloat64();
604 verticalFov = params_map[rgb_Fov].val[1].asFloat64();
605 return ReturnValue_ok;
606 }
607 horizontalFov = m_imageStream.getHorizontalFieldOfView() * RAD2DEG;
608 verticalFov = m_imageStream.getVerticalFieldOfView() * RAD2DEG;
609 return ReturnValue_ok;
610}
611
613{
614 if (params_map[rgbMirroring].isDescription)
615 {
616 mirror = params_map[rgbMirroring].val[0].asBool();
617 return ReturnValue_ok;
618 }
619
620 mirror = m_imageStream.getMirroringEnabled();
621 return ReturnValue_ok;
622}
623
625{
626 if (params_map[rgbMirroring].isDescription)
627 {
628 return false;
629 }
630
631 if (m_imageStream.setMirroringEnabled(mirror) != STATUS_OK)
632 {
633 return false;
634 }
635
636 bool ret;
638 return (ret == mirror);
639}
640
641ReturnValue depthCameraDriver::setIntrinsic(Property& intrinsic, const yarp::sig::IntrinsicParams& values)
642{
643 values.toProperty(intrinsic);
644 return ReturnValue_ok;
645}
646
648{
649 return setIntrinsic(intrinsic, m_paramParser->rgbIntrinsic);
650}
651
653{
654 if (params_map[depthRes].isDescription)
655 {
656 return params_map[depthRes].val[1].asFloat64();
657 }
658 return m_depthStream.getVideoMode().getResolutionY();
659}
660
662{
663 if (params_map[depthRes].isDescription)
664 {
665 return params_map[depthRes].val[0].asFloat64();
666 }
667 return m_depthStream.getVideoMode().getResolutionX();
668}
669
670ReturnValue depthCameraDriver::getDepthFOV(double& horizontalFov, double& verticalFov)
671{
672 if (params_map[depth_Fov].isDescription)
673 {
674 horizontalFov = params_map[depth_Fov].val[0].asFloat64();
675 verticalFov = params_map[depth_Fov].val[1].asFloat64();
676 return ReturnValue_ok;
677 }
678 horizontalFov = m_depthStream.getHorizontalFieldOfView() * RAD2DEG;
679 verticalFov = m_depthStream.getVerticalFieldOfView() * RAD2DEG;
680 return ReturnValue_ok;
681}
682
684{
685 return setIntrinsic(intrinsic, m_paramParser->rgbIntrinsic);
686}
687
689{
690 if (params_map[accuracy].isDescription)
691 {
692 return params_map[accuracy].val[0].asFloat64();
693 }
694 return m_depthStream.getVideoMode().getPixelFormat() == PIXEL_FORMAT_DEPTH_1_MM ? 0.001 : 0.0001;
695}
696
697ReturnValue depthCameraDriver::getDepthClipPlanes(double& nearPlane, double& farPlane)
698{
699 if (params_map[clipPlanes].isDescription)
700 {
701 nearPlane = params_map[clipPlanes].val[0].asFloat64();
702 farPlane = params_map[clipPlanes].val[1].asFloat64();
703 return ReturnValue_ok;
704 }
705 double factor;
707 nearPlane = m_depthStream.getMinPixelValue() * factor;
708 farPlane = m_depthStream.getMaxPixelValue() * factor;
709 return ReturnValue_ok;
710}
711
712ReturnValue depthCameraDriver::setDepthClipPlanes(double nearPlane, double farPlane)
713{
714 if (params_map[clipPlanes].isDescription)
715 {
716 return false;
717 }
718 double factor;
720 RETURN_FALSE_STATUS_NOT_OK(m_depthStream.setProperty(STREAM_PROPERTY_MAX_VALUE, int(farPlane / factor)));
721 RETURN_FALSE_STATUS_NOT_OK(m_depthStream.setProperty(STREAM_PROPERTY_MIN_VALUE, int(nearPlane / factor)));
722 return ReturnValue_ok;
723}
724
726{
727 if (params_map[depthMirroring].isDescription)
728 {
729 return params_map[depthMirroring].val[0].asBool();
730 }
731 mirror = m_depthStream.getMirroringEnabled();
732 return ReturnValue_ok;
733}
734
736{
737 if (params_map[depthMirroring].isDescription)
738 {
739 return false;
740 }
741 RETURN_FALSE_STATUS_NOT_OK(m_depthStream.setMirroringEnabled(mirror));
742
743 bool ret;
745 return (ret == mirror);
746}
747
753
755{
756 return getImage(rgbImage, timeStamp, m_imageFrame);
757}
758
760{
761 return getImage(depthImage, timeStamp, m_depthFrame);
762}
763
765{
766 //pixel size interpretation based on openni2_camera/src/openni2_frame_listener.cpp:
767 /*switch (video_mode.getPixelFormat())
768 {
769 case openni::PIXEL_FORMAT_DEPTH_1_MM:
770 image->encoding = sensor_msgs::image_encodings::TYPE_16UC1;
771 image->step = sizeof(unsigned char) * 2 * image->width;
772 break;
773 case openni::PIXEL_FORMAT_DEPTH_100_UM:
774 image->encoding = sensor_msgs::image_encodings::TYPE_16UC1;
775 image->step = sizeof(unsigned char) * 2 * image->width;
776 break;
777 case openni::PIXEL_FORMAT_SHIFT_9_2:
778 image->encoding = sensor_msgs::image_encodings::TYPE_16UC1;
779 image->step = sizeof(unsigned char) * 2 * image->width;
780 break;
781 case openni::PIXEL_FORMAT_SHIFT_9_3:
782 image->encoding = sensor_msgs::image_encodings::TYPE_16UC1;
783 image->step = sizeof(unsigned char) * 2 * image->width;
784 break;
785 */
786 switch(p)
787 {
788 case (PIXEL_FORMAT_RGB888):
789 return VOCAB_PIXEL_RGB;
790
792 return VOCAB_PIXEL_MONO16;
793
795 return VOCAB_PIXEL_MONO16;
796
798 return VOCAB_PIXEL_INVALID;
799
801 return VOCAB_PIXEL_INVALID;
802
803 case (PIXEL_FORMAT_GRAY8):
804 return VOCAB_PIXEL_MONO;
805
806 case (PIXEL_FORMAT_GRAY16):
807 return VOCAB_PIXEL_MONO16;
808
809 default:
810 return VOCAB_PIXEL_INVALID;
811 }
812 return VOCAB_PIXEL_INVALID;
813}
814
815ReturnValue depthCameraDriver::getImage(FlexImage& Frame, Stamp* Stamp, streamFrameListener* sourceFrame)
816{
817 bool ret = sourceFrame->getImage(Frame);
818 *Stamp = sourceFrame->getStamp();
819 return ret;
820}
821
823{
824 std::lock_guard<std::mutex> guard(sourceFrame->mutex);
825 if (!sourceFrame->isReady)
826 {
827 yCError(DEPTHCAMERA) << "Device not ready";
828 return false;
829 }
830 int w, h, i;
831 w = sourceFrame->w;
832 h = sourceFrame->h;
833
834 if (sourceFrame->dataSize != size_t(h * w * sizeof(short)) ||
836 {
837 yCError(DEPTHCAMERA) << "getImage: image format error";
838 return false;
839 }
840
841 float factor;
842 float* rawImage;
843 short* srcRawImage;
844
845 srcRawImage = reinterpret_cast<short*> (sourceFrame->image.getRawImage());
846 factor = sourceFrame->pixF == PIXEL_FORMAT_DEPTH_1_MM ? 0.001 : 0.0001;
847
848 Frame.resize(w, h);
849 rawImage = reinterpret_cast<float*> (Frame.getRawImage());
850
851 //TODO: optimize short-to-float cast and multiplication using SSE/SIMD instruction
852 for(i = 0; i < w * h; i++)
853 {
854 rawImage[i] = srcRawImage[i] * factor;
855 }
856 *Stamp = sourceFrame->stamp;
857 return ReturnValue_ok;
858}
859
861{
862 return getImage(colorFrame, colorStamp, m_imageFrame) & getImage(depthFrame, depthStamp, m_depthFrame);
863}
864
866{
867 openni::DeviceState status;
868 status = DEVICE_STATE_NOT_READY;
869 if (m_device.isValid() &&
870 m_imageStream.isValid() &&
871 m_depthStream.isValid() &&
872 m_imageFrame->isValid() &&
873 m_depthFrame->isValid())
874 {
875 status = DEVICE_STATE_OK;
876 }
877 switch(status)
878 {
879 case DEVICE_STATE_OK:
881
884
885 default:
887 }
888
890}
891
893{
894 return OpenNI::getExtendedError();
895}
896
903
905{
907 f = static_cast<cameraFeature_id_t>(feature);
909 {
910 return false;
911 }
912
913 if (std::find(m_supportedFeatures.begin(), m_supportedFeatures.end(), f) != m_supportedFeatures.end())
914 {
915 *hasFeature = true;
916 }
917 else
918 {
919 *hasFeature = false;
920 }
921
922 return ReturnValue_ok;
923}
924
926{
927 bool b;
928 if (!hasFeature(feature, &b) || !b)
929 {
930 yCError(DEPTHCAMERA) << "Feature not supported!";
931 return false;
932 }
933
934 auto f = static_cast<cameraFeature_id_t>(feature);
935 switch(f)
936 {
938 RETURN_FALSE_STATUS_NOT_OK(m_imageStream.getCameraSettings()->setExposure(int(value * 100)+1));
939 break;
941 RETURN_FALSE_STATUS_NOT_OK(m_imageStream.getCameraSettings()->setGain(int(value * 100)+1));
942 break;
944 {
946
947 vm = m_imageStream.getVideoMode();
948 vm.setFps(int(value));
949 RETURN_FALSE_STATUS_NOT_OK(m_imageStream.setVideoMode(vm));
950
951 m_depthStream.getVideoMode();
952 vm.setFps(int(value));
953 RETURN_FALSE_STATUS_NOT_OK(m_depthStream.setVideoMode(vm));
954 break;
955 }
957 yCError(DEPTHCAMERA) << "No manual mode for white_balance. call hasManual() to know if a specific feature support Manual mode instead of wasting my time";
958 return false;
959 default:
960 yCError(DEPTHCAMERA) << "Feature not supported!";
961 return false;
962 }
963 return ReturnValue_ok;
964}
965
967{
968 bool b;
969 if (!hasFeature(feature, &b) || !b)
970 {
971 yCError(DEPTHCAMERA) << "Feature not supported!";
972 return false;
973 }
974
975 auto f = static_cast<cameraFeature_id_t>(feature);
976 switch(f)
977 {
979 *value = m_imageStream.getCameraSettings()->getExposure();
980 break;
982 *value = m_imageStream.getCameraSettings()->getGain();
983 break;
985 *value = (m_imageStream.getVideoMode().getFps());
986 break;
988 yCError(DEPTHCAMERA) << "No manual mode for white_balance. call hasManual() to know if a specific feature support Manual mode";
989 return false;
990 default:
991 return false;
992 }
993 return ReturnValue_ok;
994}
995
996ReturnValue depthCameraDriver::setFeature(int feature, double value1, double value2)
997{
998 yCError(DEPTHCAMERA) << "No 2-valued feature are supported";
999 return false;
1000}
1001
1002ReturnValue depthCameraDriver::getFeature(int feature, double *value1, double *value2)
1003{
1004 yCError(DEPTHCAMERA) << "No 2-valued feature are supported";
1005 return false;
1006}
1007
1009{
1010 bool b;
1011 if (!hasFeature(feature, &b) || !b)
1012 {
1013 yCError(DEPTHCAMERA) << "Feature not supported!";
1014 return false;
1015 }
1016
1017 auto f = static_cast<cameraFeature_id_t>(feature);
1019 {
1020 *HasOnOff = true;
1021 return ReturnValue_ok;
1022 }
1023 *HasOnOff = false;
1024 return ReturnValue_ok;
1025}
1026
1028{
1029 bool b;
1030 if (!hasFeature(feature, &b) || !b)
1031 {
1032 yCError(DEPTHCAMERA) << "Feature not supported!";
1033 return false;
1034 }
1035
1036 if (!hasOnOff(feature, &b) || !b)
1037 {
1038 yCError(DEPTHCAMERA) << "Feature does not have OnOff.. call hasOnOff() to know if a specific feature support OnOff mode";
1039 return false;
1040 }
1041
1042 switch(feature)
1043 {
1045 RETURN_FALSE_STATUS_NOT_OK(m_imageStream.setProperty(STREAM_PROPERTY_MIRRORING, onoff));
1046 RETURN_FALSE_STATUS_NOT_OK(m_depthStream.setProperty(STREAM_PROPERTY_MIRRORING, onoff));
1047 break;
1049 RETURN_FALSE_STATUS_NOT_OK(m_imageStream.getCameraSettings()->setAutoWhiteBalanceEnabled(onoff));
1050 break;
1051 default:
1052 return false;
1053 }
1054
1055
1056 return ReturnValue_ok;
1057}
1058
1060{
1061 bool b;
1062 if (!hasFeature(feature, &b) || !b)
1063 {
1064 yCError(DEPTHCAMERA) << "Feature not supported!";
1065 return false;
1066 }
1067
1068 if (!hasOnOff(feature, &b) || !b)
1069 {
1070 yCError(DEPTHCAMERA) << "Feature does not have OnOff.. call hasOnOff() to know if a specific feature support OnOff mode";
1071 return false;
1072 }
1073
1074 *isActive = m_imageStream.getCameraSettings()->getAutoWhiteBalanceEnabled();
1075 return ReturnValue_ok;
1076}
1077
1079{
1080 bool b;
1081 if (!hasFeature(feature, &b) || !b)
1082 {
1083 yCError(DEPTHCAMERA) << "Feature not supported!";
1084 return false;
1085 }
1086
1087 auto f = static_cast<cameraFeature_id_t>(feature);
1089 {
1090 *hasAuto = true;
1091 return ReturnValue_ok;
1092 }
1093 *hasAuto = false;
1094 return ReturnValue_ok;
1095}
1096
1098{
1099 bool b;
1100 if (!hasFeature(feature, &b) || !b)
1101 {
1102 yCError(DEPTHCAMERA) << "Feature not supported!";
1103 return false;
1104 }
1105
1106 auto f = static_cast<cameraFeature_id_t>(feature);
1108 {
1109 *hasManual = true;
1110 return ReturnValue_ok;
1111 }
1112 *hasManual = false;
1113 return ReturnValue_ok;
1114}
1115
1117{
1118 bool b;
1119 if (!hasFeature(feature, &b) || !b)
1120 {
1121 yCError(DEPTHCAMERA) << "Feature not supported!";
1122 return false;
1123 }
1124
1125 return hasAuto(feature, hasOnePush);
1126}
1127
1129{
1130 bool b;
1131 if (!hasFeature(feature, &b) || !b)
1132 {
1133 yCError(DEPTHCAMERA) << "Feature not supported!";
1134 return false;
1135 }
1136
1137 auto f = static_cast<cameraFeature_id_t>(feature);
1138 if (f == YARP_FEATURE_EXPOSURE)
1139 {
1140 switch(mode)
1141 {
1142 case MODE_AUTO:
1143 RETURN_FALSE_STATUS_NOT_OK(m_imageStream.getCameraSettings()->setAutoExposureEnabled(true));
1144 break;
1145 case MODE_MANUAL:
1146 RETURN_FALSE_STATUS_NOT_OK(m_imageStream.getCameraSettings()->setAutoExposureEnabled(false));
1147 break;
1148 case MODE_UNKNOWN:
1149 return false;
1150 default:
1151 return false;
1152 }
1153 return ReturnValue_ok;
1154 }
1155
1156 yCError(DEPTHCAMERA) << "Feature does not have both auto and manual mode";
1157 return false;
1158}
1159
1161{
1162 bool b;
1163 if (!hasFeature(feature, &b) || !b)
1164 {
1165 yCError(DEPTHCAMERA) << "Feature not supported!";
1166 return false;
1167 }
1168
1169 auto f = static_cast<cameraFeature_id_t>(feature);
1170 if (f == YARP_FEATURE_EXPOSURE)
1171 {
1172 *mode = m_imageStream.getCameraSettings()->getAutoExposureEnabled() ? MODE_AUTO : MODE_MANUAL;
1173 return ReturnValue_ok;
1174 }
1175
1176 yCError(DEPTHCAMERA) << "Feature does not have both auto and manual mode";
1177 return false;
1178}
1179
1181{
1182 bool b;
1183 if (!hasFeature(feature, &b) || !b)
1184 {
1185 yCError(DEPTHCAMERA) << "Feature not supported!";
1186 return false;
1187 }
1188
1189 if (!hasOnePush(feature, &b) || !b)
1190 {
1191 yCError(DEPTHCAMERA) << "Feature doesn't have OnePush";
1192 return false;
1193 }
1194
1197
1198 return ReturnValue_ok;
1199}
CameraDescriptor camera
FeatureMode mode
@ VOCAB_PIXEL_MONO16
Definition Image.h:43
@ VOCAB_PIXEL_INVALID
Definition Image.h:41
@ VOCAB_PIXEL_MONO
Definition Image.h:42
@ VOCAB_PIXEL_RGB
Definition Image.h:44
bool ret
#define ReturnValue_ok
Definition ReturnValue.h:80
yarp::dev::ReturnValue getRgbImage(FlexImage &rgbImage, Stamp *timeStamp=NULL) override
Get the rgb frame from the device.
bool open(yarp::os::Searchable &config) override
Open the DeviceDriver.
yarp::dev::ReturnValue setMode(int feature, FeatureMode mode) override
yarp::dev::ReturnValue setActive(int feature, bool onoff) override
yarp::dev::ReturnValue setRgbMirroring(bool mirror) override
Set the mirroring setting of the sensor.
yarp::dev::ReturnValue getDepthImage(depthImage &depthImage, Stamp *timeStamp=NULL) override
yarp::dev::ReturnValue setRgbFOV(double horizontalFov, double verticalFov) override
Set the field of view (FOV) of the rgb camera.
int getDepthHeight() override
Return the height of each frame.
yarp::dev::ReturnValue hasFeature(int feature, bool *hasFeature) override
yarp::dev::ReturnValue setDepthClipPlanes(double nearPlane, double farPlane) override
Set the clipping planes of the sensor.
int getRgbHeight() override
Return the height of each frame.
RGBDSensor_status getSensorStatus() override
yarp::dev::ReturnValue getCameraDescription(CameraDescriptor *camera) override
yarp::dev::ReturnValue getRgbFOV(double &horizontalFov, double &verticalFov) override
Get the field of view (FOV) of the rgb camera.
bool close() override
Close the DeviceDriver.
yarp::dev::ReturnValue getDepthAccuracy(double &accuracy) override
Get the minimum detectable variation in distance [meter].
yarp::dev::ReturnValue getActive(int feature, bool *isActive) override
yarp::dev::ReturnValue getDepthFOV(double &horizontalFov, double &verticalFov) override
Get the field of view (FOV) of the depth camera.
yarp::dev::ReturnValue getRgbIntrinsicParam(Property &intrinsic) override
Get the intrinsic parameters of the rgb camera.
yarp::dev::ReturnValue setDepthResolution(int width, int height) override
Set the resolution of the depth image from the camera.
yarp::dev::ReturnValue getExtrinsicParam(yarp::sig::Matrix &extrinsic) override
Get the extrinsic parameters from the device.
yarp::dev::ReturnValue getMode(int feature, FeatureMode *mode) override
std::string getLastErrorMsg(Stamp *timeStamp=NULL) override
yarp::dev::ReturnValue getRgbResolution(int &width, int &height) override
Get the resolution of the rgb image from the camera.
yarp::dev::ReturnValue hasAuto(int feature, bool *hasAuto) override
yarp::dev::ReturnValue getFeature(int feature, double *value) override
int getRgbWidth() override
Return the width of each frame.
int getDepthWidth() override
Return the height of each frame.
yarp::dev::ReturnValue getDepthClipPlanes(double &nearPlane, double &farPlane) override
Get the clipping planes of the sensor.
yarp::dev::ReturnValue setDepthFOV(double horizontalFov, double verticalFov) override
Set the field of view (FOV) of the depth camera.
yarp::dev::ReturnValue hasManual(int feature, bool *hasManual) override
yarp::dev::ReturnValue getRgbSupportedConfigurations(std::vector< yarp::dev::CameraConfig > &configurations) override
Get the possible configurations of the camera.
yarp::dev::ReturnValue hasOnOff(int feature, bool *HasOnOff) override
yarp::dev::ReturnValue getImages(FlexImage &colorFrame, depthImage &depthFrame, Stamp *colorStamp=NULL, Stamp *depthStamp=NULL) override
yarp::dev::ReturnValue getDepthIntrinsicParam(Property &intrinsic) override
Get the intrinsic parameters of the depth camera.
static int pixFormatToCode(openni::PixelFormat p)
yarp::dev::ReturnValue setFeature(int feature, double value) override
yarp::dev::ReturnValue setDepthAccuracy(double accuracy) override
Set the minimum detectable variation in distance [meter] when possible.
yarp::dev::ReturnValue setRgbResolution(int width, int height) override
Set the resolution of the rgb image from the camera.
yarp::dev::ReturnValue getDepthMirroring(bool &mirror) override
Get the mirroring setting of the sensor.
yarp::dev::ReturnValue setOnePush(int feature) override
yarp::dev::ReturnValue setDepthMirroring(bool mirror) override
Set the mirroring setting of the sensor.
yarp::dev::ReturnValue getRgbMirroring(bool &mirror) override
Get the mirroring setting of the sensor.
yarp::dev::ReturnValue hasOnePush(int feature, bool *hasOnePush) override
bool getImage(FlexImage &inputImage)
yarp::sig::FlexImage image
openni::PixelFormat pixF
yarp::os::Stamp getStamp()
The RGBDSensorParamParser class.
bool parseParam(const yarp::os::Searchable &config, std::vector< RGBDParam * > &params)
parseParam, parse the params stored in a Searchable.
yarp::sig::IntrinsicParams rgbIntrinsic
A mini-server for performing network communication in the background.
A class for storing options and configuration information.
Definition Property.h:33
A base class for nested structures that can be searched.
Definition Searchable.h:31
virtual bool check(const std::string &key) const =0
Check if there exists a property of the given name.
virtual Value & find(const std::string &key) const =0
Gets a value corresponding to a given keyword.
An abstraction for a time stamp and/or sequence number.
Definition Stamp.h:21
void update()
Set the timestamp to the current time, and increment the sequence number (wrapping to 0 if the sequen...
Definition Stamp.cpp:124
static void delaySystem(double seconds)
A single value (typically within a Bottle).
Definition Value.h:44
virtual bool isBool() const
Checks if value is a boolean.
Definition Value.cpp:114
virtual bool asBool() const
Get boolean value.
Definition Value.cpp:192
Image class with user control of representation details.
Definition Image.h:361
void setPixelCode(int imgPixelCode)
Definition Image.h:364
unsigned char * getRawImage() const
Access to the internal image buffer.
Definition Image.cpp:462
size_t getRawImageSize() const
Access to the internal buffer size information (this is how much memory has been allocated for the im...
Definition Image.cpp:471
void resize(size_t imgWidth, size_t imgHeight)
Reallocate an image to be of a desired size, throwing away its current contents.
Definition Image.cpp:387
A class for a Matrix.
Definition Matrix.h:39
#define RETURN_FALSE_STATUS_NOT_OK(s)
static std::map< std::string, RGBDSensorParamParser::RGBDParam > params_map
#define RAD2DEG
#define DEG2RAD
#define yCInfo(component,...)
#define yCError(component,...)
#define yCWarning(component,...)
#define YARP_LOG_COMPONENT(name,...)
For streams capable of holding different kinds of content, check what they actually have.
An interface to the operating system, including Port based communication.
constexpr char clipPlanes[]
constexpr char rgbRes[]
constexpr char depthRes[]
constexpr char accuracy[]
The IntrinsicParams struct to handle the intrinsic parameter of cameras(RGB and RGBD either).