YARP
Yet Another Robot Platform
 
Loading...
Searching...
No Matches
argusCameraDriver.cpp
Go to the documentation of this file.
1/*
2 * Copyright (C) 2006-2024 Istituto Italiano di Tecnologia (IIT)
3 * All rights reserved.
4 *
5 * This software may be modified and distributed under the terms of the
6 * BSD-3-Clause license. See the accompanying LICENSE file for details.
7 */
8
10#include <yarp/os/Value.h>
11#include <yarp/sig/ImageUtils.h>
12
13#include <algorithm>
14#include <cmath>
15#include <cstdint>
16#include <iomanip>
17#ifdef USE_CUDA
18#include <opencv2/cudawarping.hpp>
19#include <opencv2/cudaimgproc.hpp>
20#endif // USE_CUDA
21
22#include "argusCameraDriver.h"
23
24using namespace yarp::dev;
25using namespace yarp::sig;
26using namespace yarp::os;
27
28using namespace std;
29using namespace Argus;
30using namespace EGLStream;
31
32// VERY IMPORTANT ABOUT WHITE BALANCE: the YARP interfaces cannot allow to set a feature with
33// 3 values, 2 is maximum and until now we always used blue and red in this order. Then we ignore
34// green
35
38
39static const std::vector<cameraFeature_id_t> features_with_auto{YARP_FEATURE_EXPOSURE, YARP_FEATURE_WHITE_BALANCE};
40
41static const std::map<cameraFeature_id_t, std::pair<double, double>> featureMinMax{{YARP_FEATURE_EXPOSURE, {-2.0, 2.0}},
42 {YARP_FEATURE_SATURATION, {0.0, 2.0}},
43 {YARP_FEATURE_SHARPNESS, {-1.0, 1.0}},
44 {YARP_FEATURE_WHITE_BALANCE, {1.0, 8.0}}, // not sure about it, the doc is not clear, found empirically
45 {YARP_FEATURE_GAIN, {1.0, 3981.07}}};
46
47static const std::map<double, NV::Rotation> rotationToNVRot{{0.0, NV::ROTATION_0}, {90.0, NV::ROTATION_90}, {-90.0, NV::ROTATION_270}, {180.0, NV::ROTATION_180}};
48static const std::map<double, double> rotationToCVRot{{0.0, 0.0}, {90.0, cv::ROTATE_90_COUNTERCLOCKWISE}, {-90.0, cv::ROTATE_90_CLOCKWISE}, {180.0, cv::ROTATE_180}};
49
50static const std::map<std::string, std::vector<Argus::Size2D<uint32_t>>> cameraResolutions{
51 {"imx415", {Size2D<uint32_t>(1280, 720), Size2D<uint32_t>(1920, 1080), Size2D<uint32_t>(3840, 2160)}},
52 {"imx678", {Size2D<uint32_t>(3840, 2160), Size2D<uint32_t>(2560, 1440), Size2D<uint32_t>(1920, 1080)}}
53};
54
55// We usually set the features through a range between 0 an 1, we have to translate it in meaninful value for the camera
57{
58 return value * (featureMinMax.at(feature).second - featureMinMax.at(feature).first) + featureMinMax.at(feature).first;
59}
60
61// We want the features in the range 0 1
63{
64 return (value - featureMinMax.at(feature).first) / (featureMinMax.at(feature).second - featureMinMax.at(feature).first);
65}
66
67bool argusCameraDriver::setFramerate(const uint64_t _fps)
68{
70 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
71 ISourceSettings *m_iSourceSettings = interface_cast<ISourceSettings>(iRequest->getSourceSettings());
72
73 m_iAutoControlSettings->setAeLock(true);
74
75 // According to https://docs.nvidia.com/jetson/l4t-multimedia/classArgus_1_1ISourceSettings.html, frame duration range is expressed in nanoseconds
77 bool ret = true;
78 ret = ret && m_iSourceSettings->setFrameDurationRange(Argus::Range<uint64_t>(frameDuration)) == STATUS_OK;
79 ret = ret && m_iSourceSettings->setExposureTimeRange(Argus::Range<uint64_t>(frameDuration)) == STATUS_OK;
80 if (ret)
81 {
82 m_fps = _fps;
83 }
84 else
85 {
86 yCError(ARGUS_CAMERA) << "The required frame rate" << m_fps << "cannot be set";
87 return false;
88 }
89
90 return ret;
91}
92
93bool parseUint32Param(std::string param_name, std::uint32_t& param, yarp::os::Searchable& config)
94{
95 if (config.check(param_name) && config.find(param_name).isInt32())
96 {
97 param = config.find(param_name).asInt32();
98 return true;
99 }
100 else
101 {
102 yCWarning(ARGUS_CAMERA) << param_name << "parameter not specified, using" << param;
103 return false;
104 }
105}
106
107bool argusCameraDriver::startCamera()
108{
109 setFramerate(m_fps);
111 if (m_consumer)
112 {
113 if (!iCaptureSession->isRepeating())
114 {
115 iCaptureSession->repeat(m_request.get());
116 }
117 }
118 return true;
119}
120
121bool argusCameraDriver::stopCamera()
122{
124 if (m_consumer)
125 {
126 if (iCaptureSession->isRepeating())
127 {
128 iCaptureSession->stopRepeat();
129 iCaptureSession->waitForIdle();
130 }
131 }
132 return true;
133}
134
136{
137 bool ok{true};
138 yCDebug(ARGUS_CAMERA) << "input params are " << config.toString();
139
140 if(!parseParams(config)) {
141 yCError(ARGUS_CAMERA) << "Error parsing parameters";
142 return false;
143 }
144
145 if (m_period != 0.0)
146 {
147 m_fps = 1.0 / m_period;
148 }
149
150 // FIXME handle m_period = 0.0
151
152 m_cameraProvider.reset(CameraProvider::create());
154 if (!iCameraProvider)
155 {
156 yCError(ARGUS_CAMERA) << "Failed to create CameraProvider";
157 return false;
158 }
159
160 /* Get the camera devices */
161 iCameraProvider->getCameraDevices(&m_cameraDevices);
162 if (m_cameraDevices.size() == 0)
163 {
164 yCError(ARGUS_CAMERA) << "No cameras available";
165 return false;
166 }
167
170 {
171 yCError(ARGUS_CAMERA) << "Failed to get ICameraProperties interface";
172 return false;
173 }
174
175 if (m_d >= m_cameraDevices.size())
176 {
177 yCError(ARGUS_CAMERA) << "Camera device index d =" << m_d << "is invalid.";
178 return false;
179 }
180
181 /* Create the capture session using the first device and get the core interface */
182 m_captureSession.reset(iCameraProvider->createCaptureSession(m_cameraDevices[m_d]));
184 if (!iCaptureSession)
185 {
186 yCError(ARGUS_CAMERA) << "Failed to get ICaptureSession interface";
187 return false;
188 }
189
190 m_streamSettings.reset(iCaptureSession->createOutputStreamSettings(STREAM_TYPE_EGL));
193 {
194 yCError(ARGUS_CAMERA) << "Failed to get IEGLOutputStreamSettings interface";
195 return false;
196 }
197
198 ok = ok && setRgbResolution(m_width, m_height);
199
200 #ifdef USE_CUDA
201 yCDebug(ARGUS_CAMERA) << "Using CUDA!";
202 gpu_rgba_img = cv::cuda::GpuMat(m_width, m_height, CV_8UC4);
203 gpu_bgr_img = cv::cuda::GpuMat(m_width, m_height, CV_8UC3);
204 #else
205 yCDebug(ARGUS_CAMERA) << "Not using CUDA!";
206 #endif
207
208 bgr_img = cv::Mat(m_height, m_width, CV_8UC3);
209 rgba_img = cv::Mat(m_height, m_width, CV_8UC4);
210
211 return ok && startCamera();
212}
213
215{
216 return true;
217}
218
220{
221 return m_height;
222}
223
225{
226 return m_width;
227}
228
230{
231 yCWarning(ARGUS_CAMERA) << "getRgbSupportedConfigurations not implemented yet";
232 return false;
233}
234
235bool argusCameraDriver::getRgbResolution(int& width, int& height)
236{
237 width = m_width;
239 return true;
240}
241
242bool argusCameraDriver::setRgbResolution(int width, int height)
243{
244 stopCamera();
245
249 if (!iCaptureSession)
250 {
251 yCError(ARGUS_CAMERA) << "Failed to get ICaptureSession interface";
252 return false;
253 }
254
255 if (width > 0 && height > 0)
256 {
257 int nearestWidth = -1;
258 int nearestHeight = -1;
259 double minDistance = std::numeric_limits<double>::max();
260
262 for (auto &resolution : supportedResolutions)
263 {
264 if (resolution.width() == width && resolution.height() == height)
265 {
266 yCDebug(ARGUS_CAMERA) << "The resolution" << resolution.width() << "x" << resolution.height() << "is available";
269 break;
270 }
271 else
272 {
273 yCWarning(ARGUS_CAMERA) << "The set width and height are different from the available ones. Searching for the nearest resolution...";
274 double distance = std::abs(int(resolution.width() - width)) + std::abs(int(resolution.height() - height));
275 if (distance < minDistance)
276 {
277 minDistance = distance;
278 nearestWidth = resolution.width();
279 nearestHeight = resolution.height();
280 }
281 }
282 }
283
284 if (nearestWidth != -1 && nearestHeight != -1)
285 {
286 yCInfo(ARGUS_CAMERA) << "Nearest resolution found:" << nearestWidth << "x" << nearestHeight;
287 }
288
290 {
291 if (m_rotation == -90.0 || m_rotation == 90.0)
292 {
293 std::swap(width, height);
294 }
295 }
296
299 iEglStreamSettings->setResolution(resolution);
300
301 if(iEglStreamSettings->setResolution(Size2D<uint32_t>(resolution)) == STATUS_OK)
302 {
303 m_width = width;
305 }
306 }
307
308 m_stream.reset(iCaptureSession->createOutputStream(m_streamSettings.get()));
309 m_consumer.reset(FrameConsumer::create(m_stream.get()));
310
311 if (!m_consumer)
312 {
313 yCError(ARGUS_CAMERA) << "Failed to create FrameConsumer";
314 return false;
315 }
316
317 m_request.reset(iCaptureSession->createRequest(Argus::CAPTURE_INTENT_PREVIEW));
318 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
319 if (iRequest->enableOutputStream(m_stream.get()) != STATUS_OK)
320 {
321 yCError(ARGUS_CAMERA) << "Failed to enable output stream";
322 return false;
323 }
324
325 startCamera();
326 return true;
327}
328
329bool argusCameraDriver::setRgbFOV(double horizontalFov, double verticalFov)
330{
331 yCWarning(ARGUS_CAMERA) << "setRgbFOV not supported";
332 return false;
333}
334
335bool argusCameraDriver::getRgbFOV(double& horizontalFov, double& verticalFov)
336{
337 yCWarning(ARGUS_CAMERA) << "getRgbFOV not supported";
338 return false;
339}
340
342{
343 yCWarning(ARGUS_CAMERA) << "Mirroring not supported";
344 return false;
345}
346
348{
349 yCWarning(ARGUS_CAMERA) << "Mirroring not supported";
350 return false;
351}
352
354{
355 yCWarning(ARGUS_CAMERA) << "getRgbIntrinsicParam not supported"; //no intrinsic parameters stored in the eeprom of the camera
356 return false;
357}
358
366
368{
370 f = static_cast<cameraFeature_id_t>(feature);
372 {
373 return false;
374 }
375
376 *hasFeature = std::find(supported_features.begin(), supported_features.end(), f) != supported_features.end();
377
378 return true;
379}
380
382{
383 bool b = false;
384 if (!hasFeature(feature, &b) || !b)
385 {
386 yCError(ARGUS_CAMERA) << "Feature not supported!";
387 return false;
388 }
389 b = false;
390 auto f = static_cast<cameraFeature_id_t>(feature);
391
392 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
393 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
394 IEdgeEnhanceSettings *m_iEdgeEnhanceSettings = interface_cast<IEdgeEnhanceSettings>(m_request);
395 stopCamera();
396
397 switch (f)
398 {
400 m_iAutoControlSettings->setExposureCompensation(fromZeroOneToRange(f, value));
401 b = true;
402 break;
404 m_iAutoControlSettings->setColorSaturation(fromZeroOneToRange(f, value));
405 b = true;
406 break;
408 m_iEdgeEnhanceSettings->setEdgeEnhanceMode(EDGE_ENHANCE_MODE_HIGH_QUALITY);
409 m_iEdgeEnhanceSettings->setEdgeEnhanceStrength(fromZeroOneToRange(f, value));
410 b = true;
411 break;
413 b = false;
414 yCError(ARGUS_CAMERA) << "White balance require 2 values";
416 b = setFramerate(value);
417 break;
418 default:
419 yCError(ARGUS_CAMERA) << "Feature not supported!";
420 return false;
421 }
422
423 startCamera();
424 return b;
425}
426
428{
429 bool b = false;
430 if (!hasFeature(feature, &b) || !b)
431 {
432 yCError(ARGUS_CAMERA) << "Feature not supported!";
433 return false;
434 }
435 b = false;
436 auto f = static_cast<cameraFeature_id_t>(feature);
437
438 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
439 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
440 IEdgeEnhanceSettings *m_iEdgeEnhanceSettings = interface_cast<IEdgeEnhanceSettings>(m_request);
441
442 switch (f)
443 {
445 *value = m_iAutoControlSettings->getExposureCompensation();
446 b = true;
447 break;
449 m_iAutoControlSettings->setColorSaturationEnable(true);
450 *value = m_iAutoControlSettings->getColorSaturation();
451 b = true;
452 break;
454 *value = m_iEdgeEnhanceSettings->getEdgeEnhanceStrength();
455 b = true;
456 break;
458 b = false;
459 yCError(ARGUS_CAMERA) << "White balance is a 2-values feature";
460 break;
462 b = true;
463 *value = m_fps;
464 break;
465 default:
466 yCError(ARGUS_CAMERA) << "Feature not supported!";
467 return false;
468 }
469
470 *value = fromRangeToZeroOne(f, *value);
471 yCDebug(ARGUS_CAMERA) << "In 0-1" << *value;
472 return b;
473}
474
475bool argusCameraDriver::setFeature(int feature, double value1, double value2)
476{
477 auto f = static_cast<cameraFeature_id_t>(feature);
478 auto res = true;
479 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
480 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
481 stopCamera();
482
484 {
485 yCError(ARGUS_CAMERA) << YARP_FEATURE_WHITE_BALANCE << "is not a 2-values feature supported";
486 return false;
487 }
488
489 m_iAutoControlSettings->setAeLock(true);
490 m_iAutoControlSettings->setAwbLock(false);
491 m_iAutoControlSettings->setAwbMode(AWB_MODE_MANUAL);
493 m_iAutoControlSettings->setWbGains(wbGains);
494
495 startCamera();
496 return res;
497}
498
499bool argusCameraDriver::getFeature(int feature, double* value1, double* value2)
500{
501 auto f = static_cast<cameraFeature_id_t>(feature);
502 auto res = true;
503
504 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
505 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
506
508 {
509 yCError(ARGUS_CAMERA) << "This is not a 2-values feature supported";
510 return false;
511 }
512
513 *value1 = fromRangeToZeroOne(f, m_iAutoControlSettings->getWbGains().r());
514 *value2 = fromRangeToZeroOne(f, m_iAutoControlSettings->getWbGains().b());
515 return res;
516}
517
518bool argusCameraDriver::hasOnOff(int feature, bool* HasOnOff)
519{
520 return hasAuto(feature, HasOnOff);
521}
522
524{
525 bool b = false;
526 auto f = static_cast<cameraFeature_id_t>(feature);
527 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
528 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
529 stopCamera();
530
531 if (!hasFeature(feature, &b) || !b)
532 {
533 yCError(ARGUS_CAMERA) << "Feature" << feature << "not supported!";
534 return false;
535 }
536
537 if (!hasOnOff(feature, &b) || !b)
538 {
539 yCError(ARGUS_CAMERA) << "Feature" << feature << "does not have OnOff.. call hasOnOff() to know if a specific feature support OnOff mode";
540 return false;
541 }
542
543 switch (f)
544 {
546 m_iAutoControlSettings->setAeLock(!onoff);
547 b = true;
548 break;
550 m_iAutoControlSettings->setAwbMode(AWB_MODE_AUTO);
551 m_iAutoControlSettings->setAwbLock(!onoff);
552 b = true;
553 break;
554 default:
555 yCError(ARGUS_CAMERA) << "Feature" << feature << "not supported!";
556 return false;
557 }
558
559 startCamera();
560 return b;
561}
562
564{
565 bool b = false;
566 auto f = static_cast<cameraFeature_id_t>(feature);
567 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
568 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
569 if (!hasFeature(feature, &b) || !b)
570 {
571 yCError(ARGUS_CAMERA) << "Feature" << feature << "not supported!";
572 return false;
573 }
574
575 if (!hasOnOff(feature, &b) || !b)
576 {
577 yCError(ARGUS_CAMERA) << "Feature" << feature << "does not have OnOff.. call hasOnOff() to know if a specific feature support OnOff mode";
578 return false;
579 }
580
581 bool val_to_get;
582 switch (f)
583 {
585 val_to_get = !(m_iAutoControlSettings->getAeLock());
586 b = true;
587 break;
589 val_to_get = !(m_iAutoControlSettings->getAwbLock());
590 b = true;
591 break;
592 default:
593 yCError(ARGUS_CAMERA) << "Feature" << feature << "not supported!";
594 return false;
595 }
596
597 if (b)
598 {
599 if (val_to_get)
600 {
601 *isActive = true;
602 }
603 else
604 {
605 *isActive = false;
606 }
607 }
608 return b;
609}
610
612{
614 f = static_cast<cameraFeature_id_t>(feature);
616 {
617 return false;
618 }
619
620 *hasAuto = std::find(features_with_auto.begin(), features_with_auto.end(), f) != features_with_auto.end();
621
622 return true;
623}
624
629
634
636{
637 bool b{false};
638 if (!hasAuto(feature, &b) || !b)
639 {
640 yCError(ARGUS_CAMERA) << "Feature" << feature << "not supported!";
641 return false;
642 }
643
644 switch (mode)
645 {
646 case MODE_AUTO:
647 return setActive(feature, true);
648 case MODE_MANUAL:
649 return setActive(feature, false);
650 case MODE_UNKNOWN:
651 return false;
652 default:
653 return false;
654 }
655 return b;
656}
657
659{
660 bool b{false};
661 if (!hasAuto(feature, &b) || !b)
662 {
663 yCError(ARGUS_CAMERA) << "Feature" << feature << "not supported!";
664 return false;
665 }
666 bool get_active{false};
667 b = b && getActive(feature, &get_active);
668
669 if (b)
670 {
671 if (get_active)
672 {
673 *mode = MODE_AUTO;
674 }
675 else
676 {
677 *mode = MODE_MANUAL;
678 }
679 }
680 return b;
681}
682
684{
685 bool b = false;
686 if (!hasOnePush(feature, &b) || !b)
687 {
688 yCError(ARGUS_CAMERA) << "Feature" << feature << "doesn't have OnePush";
689 return false;
690 }
691
692 b = b && setMode(feature, MODE_AUTO);
693 b = b && setMode(feature, MODE_MANUAL);
694
695 return b;
696}
697
699{
700 std::lock_guard<std::mutex> guard(m_mutex);
701
702 NvBufSurface* nvBufSurface = nullptr;
703
707
708 if(iFrame)
709 {
710 auto img = iFrame->getImage();
711 auto image2d(Argus::interface_cast<EGLStream::IImage2D>(img));
712 auto width = image2d->getSize()[0];
713 auto height = image2d->getSize()[1];
714
715 NV::IImageNativeBuffer *iNativeBuffer = interface_cast<NV::IImageNativeBuffer>(img);
716 if (!iNativeBuffer)
717 {
718 yCError(ARGUS_CAMERA) << "IImageNativeBuffer not supported by IImage";
719 }
720
721 double rotation = 0.0;
723 {
724 // If m_rotation_with_crop = true, width and height are swapped and the image is stored in a buffer already rotated by m_rotation.
725 // In this way, no further transformations need to be done with OpenCV.
726 rotation = m_rotation;
727 }
728
729 int fd = iNativeBuffer->createNvBuffer(image2d->getSize(), NVBUF_COLOR_FORMAT_RGBA, NVBUF_LAYOUT_PITCH, rotationToNVRot.at(rotation));
730
731 if (fd == -1)
732 {
733 yCError(ARGUS_CAMERA) << "Failed to create NvBuffer";
734 return false;
735 }
736
737 if (NvBufSurfaceFromFd(fd, (void**)(&nvBufSurface)) == -1)
738 {
739 yCError(ARGUS_CAMERA) << "Cannot get NvBufSurface from fd";
740 return false;
741 }
742
743 if (NvBufSurfaceMap(nvBufSurface, 0, 0, NVBUF_MAP_READ) != STATUS_OK)
744 {
745 yCError(ARGUS_CAMERA) << "Failed to map NvBufSurface";
746 return false;
747 }
748
749 rgba_img = cv::Mat(height, width, CV_8UC4, nvBufSurface->surfaceList->mappedAddr.addr[0]);
750#ifdef USE_CUDA
751 gpu_rgba_img.upload(rgba_img);
752 cv::cuda::cvtColor(gpu_rgba_img, gpu_bgr_img, cv::COLOR_RGBA2BGR);
753
754 if (!m_rotation_with_crop && m_rotation != 0.0)
755 {
756 cv::Point2f img_center((gpu_bgr_img.cols - 1) / 2.0, (gpu_bgr_img.rows - 1) / 2.0);
757 cv::Mat M = cv::getRotationMatrix2D(img_center, m_rotation, 1.0);
758 // Workaround since with cv::cuda::warpAffine, source and dest images CANNOT be the same (otherwise will result in black frames)
759 cv::cuda::GpuMat tmp;
760 cv::cuda::warpAffine(gpu_bgr_img, tmp, M, gpu_bgr_img.size());
761 gpu_bgr_img = std::move(tmp);
762 }
763
764 if (m_width != width || m_height != height)
765 {
766 cv::Size size(m_width, m_height);
767 cv::cuda::resize(gpu_bgr_img, gpu_bgr_img, size);
768 }
769 gpu_bgr_img.download(bgr_img);
770#else
771 cv::cvtColor(rgba_img, bgr_img, cv::COLOR_RGBA2BGR);
772
773 if (!m_rotation_with_crop && m_rotation != 0.0)
774 {
775 cv::Point2f img_center((bgr_img.cols - 1) / 2.0, (bgr_img.rows - 1) / 2.0);
776 cv::Mat M = cv::getRotationMatrix2D(img_center, m_rotation, 1.0);
777 cv::warpAffine(bgr_img, bgr_img, M, bgr_img.size());
778 }
779
780 if (m_width != width || m_height != height)
781 {
782 cv::Size size(m_width, m_height);
783 cv::resize(bgr_img, bgr_img, size);
784 }
785#endif // USE_CUDA
786 image.copy(yarp::cv::fromCvMat<yarp::sig::PixelRgb>(bgr_img));
787
788 if (NvBufSurfaceUnMap(nvBufSurface, 0, 0) != STATUS_OK)
789 {
790 yCError(ARGUS_CAMERA) << "Failed to unmap NvBufSurface";
791 }
792
793 if (NvBufSurfaceDestroy(nvBufSurface) != STATUS_OK)
794 {
795 yCError(ARGUS_CAMERA) << "Failed to free the NvBufSurface";
796 }
797 }
798
799 return true;
800}
801
803{
804 return m_height;
805}
806
808{
809 return m_width;
810}
CameraDescriptor camera
FeatureMode mode
bool ret
static const std::map< double, NV::Rotation > rotationToNVRot
bool parseUint32Param(std::string param_name, std::uint32_t &param, yarp::os::Searchable &config)
static const std::vector< cameraFeature_id_t > supported_features
static const std::vector< cameraFeature_id_t > features_with_auto
double fromZeroOneToRange(cameraFeature_id_t feature, double value)
double fromRangeToZeroOne(cameraFeature_id_t feature, double value)
static const std::map< cameraFeature_id_t, std::pair< double, double > > featureMinMax
static const std::map< std::string, std::vector< Argus::Size2D< uint32_t > > > cameraResolutions
static const std::map< double, double > rotationToCVRot
bool parseParams(const yarp::os::Searchable &config) override
Parse the DeviceDriver parameters.
int getRgbHeight() override
Return the height of each frame.
bool hasAuto(int feature, bool *hasAuto) override
bool close() override
Close the DeviceDriver.
bool setRgbFOV(double horizontalFov, double verticalFov) override
Set the field of view (FOV) of the rgb camera.
bool getActive(int feature, bool *isActive) override
bool getRgbResolution(int &width, int &height) override
Get the resolution of the rgb image from the camera.
bool getMode(int feature, FeatureMode *mode) override
bool setRgbMirroring(bool mirror) override
Set the mirroring setting of the sensor.
bool setRgbResolution(int width, int height) override
Set the resolution of the rgb image from the camera.
bool getRgbSupportedConfigurations(yarp::sig::VectorOf< yarp::dev::CameraConfig > &configurations) override
int height() const override
Return the height of each frame.
bool setFeature(int feature, double value) override
int getRgbWidth() override
Return the width of each frame.
bool getCameraDescription(CameraDescriptor *camera) override
bool getRgbMirroring(bool &mirror) override
Get the mirroring setting of the sensor.
bool getImage(yarp::sig::ImageOf< yarp::sig::PixelRgb > &image) override
Get an image from the frame grabber.
bool open(yarp::os::Searchable &config) override
Open the DeviceDriver.
bool hasManual(int feature, bool *hasManual) override
bool hasOnePush(int feature, bool *hasOnePush) override
bool hasFeature(int feature, bool *hasFeature) override
int width() const override
Return the width of each frame.
bool setMode(int feature, FeatureMode mode) override
bool setActive(int feature, bool onoff) override
bool hasOnOff(int feature, bool *HasOnOff) override
bool getFeature(int feature, double *value) override
bool setOnePush(int feature) override
bool getRgbFOV(double &horizontalFov, double &verticalFov) override
Get the field of view (FOV) of the rgb camera.
bool getRgbIntrinsicParam(yarp::os::Property &intrinsic) override
Get the intrinsic parameters of the rgb camera.
A mini-server for performing network communication in the background.
A class for storing options and configuration information.
Definition Property.h:33
A base class for nested structures that can be searched.
Definition Searchable.h:31
virtual bool check(const std::string &key) const =0
Check if there exists a property of the given name.
virtual std::string toString() const =0
Return a standard text representation of the content of the object.
virtual Value & find(const std::string &key) const =0
Gets a value corresponding to a given keyword.
Typed image class.
Definition Image.h:603
#define yCInfo(component,...)
#define yCError(component,...)
#define yCWarning(component,...)
#define yCDebug(component,...)
STL namespace.
For streams capable of holding different kinds of content, check what they actually have.
An interface to the operating system, including Port based communication.