YARP
Yet Another Robot Platform
 
Loading...
Searching...
No Matches
argusCameraDriver.cpp
Go to the documentation of this file.
1/*
2 * Copyright (C) 2006-2024 Istituto Italiano di Tecnologia (IIT)
3 * All rights reserved.
4 *
5 * This software may be modified and distributed under the terms of the
6 * BSD-3-Clause license. See the accompanying LICENSE file for details.
7 */
8
10#include <yarp/os/Value.h>
11#include <yarp/sig/ImageUtils.h>
12
13#include <algorithm>
14#include <cmath>
15#include <cstdint>
16#include <iomanip>
17#ifdef USE_CUDA
18#include <opencv2/cudawarping.hpp>
19#include <opencv2/cudaimgproc.hpp>
20#endif // USE_CUDA
21
22#include "argusCameraDriver.h"
23
24using namespace yarp::dev;
25using namespace yarp::sig;
26using namespace yarp::os;
27
28using namespace std;
29using namespace Argus;
30using namespace EGLStream;
31
32// VERY IMPORTANT ABOUT WHITE BALANCE: the YARP interfaces cannot allow to set a feature with
33// 3 values, 2 is maximum and until now we always used blue and red in this order. Then we ignore
34// green
35
38
39static const std::vector<cameraFeature_id_t> features_with_auto{YARP_FEATURE_EXPOSURE, YARP_FEATURE_WHITE_BALANCE};
40
41static const std::map<cameraFeature_id_t, std::pair<double, double>> featureMinMax{{YARP_FEATURE_EXPOSURE, {-2.0, 2.0}},
42 {YARP_FEATURE_SATURATION, {0.0, 2.0}},
43 {YARP_FEATURE_SHARPNESS, {-1.0, 1.0}},
44 {YARP_FEATURE_WHITE_BALANCE, {1.0, 8.0}}, // not sure about it, the doc is not clear, found empirically
45 {YARP_FEATURE_GAIN, {1.0, 3981.07}}};
46
47static const std::map<double, NV::Rotation> rotationToNVRot{{0.0, NV::ROTATION_0}, {90.0, NV::ROTATION_90}, {-90.0, NV::ROTATION_270}, {180.0, NV::ROTATION_180}};
48static const std::map<double, double> rotationToCVRot{{0.0, 0.0}, {90.0, cv::ROTATE_90_COUNTERCLOCKWISE}, {-90.0, cv::ROTATE_90_CLOCKWISE}, {180.0, cv::ROTATE_180}};
49
50static const std::map<std::string, std::vector<Argus::Size2D<uint32_t>>> cameraResolutions{
51 {"imx415", {Size2D<uint32_t>(1280, 720), Size2D<uint32_t>(1920, 1080), Size2D<uint32_t>(3840, 2160)}}
52};
53
54// We usually set the features through a range between 0 an 1, we have to translate it in meaninful value for the camera
56{
57 return value * (featureMinMax.at(feature).second - featureMinMax.at(feature).first) + featureMinMax.at(feature).first;
58}
59
60// We want the features in the range 0 1
62{
63 return (value - featureMinMax.at(feature).first) / (featureMinMax.at(feature).second - featureMinMax.at(feature).first);
64}
65
66bool argusCameraDriver::setFramerate(const uint64_t _fps)
67{
69 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
70 ISourceSettings *m_iSourceSettings = interface_cast<ISourceSettings>(iRequest->getSourceSettings());
71
72 m_iAutoControlSettings->setAeLock(true);
73
74 // According to https://docs.nvidia.com/jetson/l4t-multimedia/classArgus_1_1ISourceSettings.html, frame duration range is expressed in nanoseconds
76 bool ret = true;
77 ret = ret && m_iSourceSettings->setFrameDurationRange(Argus::Range<uint64_t>(frameDuration)) == STATUS_OK;
78 ret = ret && m_iSourceSettings->setExposureTimeRange(Argus::Range<uint64_t>(frameDuration)) == STATUS_OK;
79 if (ret)
80 {
81 m_fps = _fps;
82 }
83 else
84 {
85 yCError(ARGUS_CAMERA) << "The required frame rate" << m_fps << "cannot be set";
86 return false;
87 }
88
89 return ret;
90}
91
92bool parseUint32Param(std::string param_name, std::uint32_t& param, yarp::os::Searchable& config)
93{
94 if (config.check(param_name) && config.find(param_name).isInt32())
95 {
96 param = config.find(param_name).asInt32();
97 return true;
98 }
99 else
100 {
101 yCWarning(ARGUS_CAMERA) << param_name << "parameter not specified, using" << param;
102 return false;
103 }
104}
105
106bool argusCameraDriver::startCamera()
107{
108 setFramerate(m_fps);
110 if (m_consumer)
111 {
112 if (!iCaptureSession->isRepeating())
113 {
114 iCaptureSession->repeat(m_request.get());
115 }
116 }
117 return true;
118}
119
120bool argusCameraDriver::stopCamera()
121{
123 if (m_consumer)
124 {
125 if (iCaptureSession->isRepeating())
126 {
127 iCaptureSession->stopRepeat();
128 iCaptureSession->waitForIdle();
129 }
130 }
131 return true;
132}
133
135{
136 bool ok{true};
137 yCDebug(ARGUS_CAMERA) << "input params are " << config.toString();
138
139 if(!parseParams(config)) {
140 yCError(ARGUS_CAMERA) << "Error parsing parameters";
141 return false;
142 }
143
144 if (m_period != 0.0)
145 {
146 m_fps = 1.0 / m_period;
147 }
148
149 // FIXME handle m_period = 0.0
150
151 m_cameraProvider.reset(CameraProvider::create());
153 if (!iCameraProvider)
154 {
155 yCError(ARGUS_CAMERA) << "Failed to create CameraProvider";
156 return false;
157 }
158
159 /* Get the camera devices */
160 iCameraProvider->getCameraDevices(&m_cameraDevices);
161 if (m_cameraDevices.size() == 0)
162 {
163 yCError(ARGUS_CAMERA) << "No cameras available";
164 return false;
165 }
166
169 {
170 yCError(ARGUS_CAMERA) << "Failed to get ICameraProperties interface";
171 return false;
172 }
173
174 if (m_d >= m_cameraDevices.size())
175 {
176 yCError(ARGUS_CAMERA) << "Camera device index d =" << m_d << "is invalid.";
177 return false;
178 }
179
180 /* Create the capture session using the first device and get the core interface */
181 m_captureSession.reset(iCameraProvider->createCaptureSession(m_cameraDevices[m_d]));
183 if (!iCaptureSession)
184 {
185 yCError(ARGUS_CAMERA) << "Failed to get ICaptureSession interface";
186 return false;
187 }
188
189 m_streamSettings.reset(iCaptureSession->createOutputStreamSettings(STREAM_TYPE_EGL));
192 {
193 yCError(ARGUS_CAMERA) << "Failed to get IEGLOutputStreamSettings interface";
194 return false;
195 }
196
197 ok = ok && setRgbResolution(m_width, m_height);
198
199 #ifdef USE_CUDA
200 yCDebug(ARGUS_CAMERA) << "Using CUDA!";
201 gpu_rgba_img = cv::cuda::GpuMat(m_width, m_height, CV_8UC4);
202 gpu_bgr_img = cv::cuda::GpuMat(m_width, m_height, CV_8UC3);
203 #else
204 yCDebug(ARGUS_CAMERA) << "Not using CUDA!";
205 #endif
206
207 bgr_img = cv::Mat(m_height, m_width, CV_8UC3);
208 rgba_img = cv::Mat(m_height, m_width, CV_8UC4);
209
210 return ok && startCamera();
211}
212
214{
215 return true;
216}
217
219{
220 return m_height;
221}
222
224{
225 return m_width;
226}
227
229{
230 yCWarning(ARGUS_CAMERA) << "getRgbSupportedConfigurations not implemented yet";
231 return false;
232}
233
234bool argusCameraDriver::getRgbResolution(int& width, int& height)
235{
236 width = m_width;
238 return true;
239}
240
241bool argusCameraDriver::setRgbResolution(int width, int height)
242{
243 stopCamera();
244
248 if (!iCaptureSession)
249 {
250 yCError(ARGUS_CAMERA) << "Failed to get ICaptureSession interface";
251 return false;
252 }
253
254 if (width > 0 && height > 0)
255 {
256 int nearestWidth = -1;
257 int nearestHeight = -1;
258 double minDistance = std::numeric_limits<double>::max();
259
261 for (auto &resolution : supportedResolutions)
262 {
263 if (resolution.width() == width && resolution.height() == height)
264 {
265 yCDebug(ARGUS_CAMERA) << "The resolution" << resolution.width() << "x" << resolution.height() << "is available";
268 break;
269 }
270 else
271 {
272 yCWarning(ARGUS_CAMERA) << "The set width and height are different from the available ones. Searching for the nearest resolution...";
273 double distance = std::abs(int(resolution.width() - width)) + std::abs(int(resolution.height() - height));
274 if (distance < minDistance)
275 {
276 minDistance = distance;
277 nearestWidth = resolution.width();
278 nearestHeight = resolution.height();
279 }
280 }
281 }
282
283 if (nearestWidth != -1 && nearestHeight != -1)
284 {
285 yCInfo(ARGUS_CAMERA) << "Nearest resolution found:" << nearestWidth << "x" << nearestHeight;
286 }
287
289 {
290 if (m_rotation == -90.0 || m_rotation == 90.0)
291 {
292 std::swap(width, height);
293 }
294 }
295
298 iEglStreamSettings->setResolution(resolution);
299
300 if(iEglStreamSettings->setResolution(Size2D<uint32_t>(resolution)) == STATUS_OK)
301 {
302 m_width = width;
304 }
305 }
306
307 m_stream.reset(iCaptureSession->createOutputStream(m_streamSettings.get()));
308 m_consumer.reset(FrameConsumer::create(m_stream.get()));
309
310 if (!m_consumer)
311 {
312 yCError(ARGUS_CAMERA) << "Failed to create FrameConsumer";
313 return false;
314 }
315
316 m_request.reset(iCaptureSession->createRequest(Argus::CAPTURE_INTENT_PREVIEW));
317 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
318 if (iRequest->enableOutputStream(m_stream.get()) != STATUS_OK)
319 {
320 yCError(ARGUS_CAMERA) << "Failed to enable output stream";
321 return false;
322 }
323
324 startCamera();
325 return true;
326}
327
328bool argusCameraDriver::setRgbFOV(double horizontalFov, double verticalFov)
329{
330 yCWarning(ARGUS_CAMERA) << "setRgbFOV not supported";
331 return false;
332}
333
334bool argusCameraDriver::getRgbFOV(double& horizontalFov, double& verticalFov)
335{
336 yCWarning(ARGUS_CAMERA) << "getRgbFOV not supported";
337 return false;
338}
339
341{
342 yCWarning(ARGUS_CAMERA) << "Mirroring not supported";
343 return false;
344}
345
347{
348 yCWarning(ARGUS_CAMERA) << "Mirroring not supported";
349 return false;
350}
351
353{
354 yCWarning(ARGUS_CAMERA) << "getRgbIntrinsicParam not supported"; //no intrinsic parameters stored in the eeprom of the camera
355 return false;
356}
357
359{
361 camera->deviceDescription = iCameraProperties->getModelName();
362 camera->busType = BUS_UNKNOWN;
363 return true;
364}
365
366bool argusCameraDriver::hasFeature(int feature, bool* hasFeature)
367{
369 f = static_cast<cameraFeature_id_t>(feature);
371 {
372 return false;
373 }
374
375 *hasFeature = std::find(supported_features.begin(), supported_features.end(), f) != supported_features.end();
376
377 return true;
378}
379
381{
382 bool b = false;
383 if (!hasFeature(feature, &b) || !b)
384 {
385 yCError(ARGUS_CAMERA) << "Feature not supported!";
386 return false;
387 }
388 b = false;
389 auto f = static_cast<cameraFeature_id_t>(feature);
390
391 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
392 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
393 IEdgeEnhanceSettings *m_iEdgeEnhanceSettings = interface_cast<IEdgeEnhanceSettings>(m_request);
394 stopCamera();
395
396 switch (f)
397 {
399 m_iAutoControlSettings->setExposureCompensation(fromZeroOneToRange(f, value));
400 b = true;
401 break;
403 m_iAutoControlSettings->setColorSaturation(fromZeroOneToRange(f, value));
404 b = true;
405 break;
407 m_iEdgeEnhanceSettings->setEdgeEnhanceMode(EDGE_ENHANCE_MODE_HIGH_QUALITY);
408 m_iEdgeEnhanceSettings->setEdgeEnhanceStrength(fromZeroOneToRange(f, value));
409 b = true;
410 break;
412 b = false;
413 yCError(ARGUS_CAMERA) << "White balance require 2 values";
415 b = setFramerate(value);
416 break;
417 default:
418 yCError(ARGUS_CAMERA) << "Feature not supported!";
419 return false;
420 }
421
422 startCamera();
423 return b;
424}
425
427{
428 bool b = false;
429 if (!hasFeature(feature, &b) || !b)
430 {
431 yCError(ARGUS_CAMERA) << "Feature not supported!";
432 return false;
433 }
434 b = false;
435 auto f = static_cast<cameraFeature_id_t>(feature);
436
437 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
438 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
439 IEdgeEnhanceSettings *m_iEdgeEnhanceSettings = interface_cast<IEdgeEnhanceSettings>(m_request);
440
441 switch (f)
442 {
444 *value = m_iAutoControlSettings->getExposureCompensation();
445 b = true;
446 break;
448 m_iAutoControlSettings->setColorSaturationEnable(true);
449 *value = m_iAutoControlSettings->getColorSaturation();
450 b = true;
451 break;
453 *value = m_iEdgeEnhanceSettings->getEdgeEnhanceStrength();
454 b = true;
455 break;
457 b = false;
458 yCError(ARGUS_CAMERA) << "White balance is a 2-values feature";
459 break;
461 b = true;
462 *value = m_fps;
463 break;
464 default:
465 yCError(ARGUS_CAMERA) << "Feature not supported!";
466 return false;
467 }
468
469 *value = fromRangeToZeroOne(f, *value);
470 yCDebug(ARGUS_CAMERA) << "In 0-1" << *value;
471 return b;
472}
473
475{
476 auto f = static_cast<cameraFeature_id_t>(feature);
477 auto res = true;
478 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
479 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
480 stopCamera();
481
483 {
484 yCError(ARGUS_CAMERA) << YARP_FEATURE_WHITE_BALANCE << "is not a 2-values feature supported";
485 return false;
486 }
487
488 m_iAutoControlSettings->setAeLock(true);
489 m_iAutoControlSettings->setAwbLock(false);
490 m_iAutoControlSettings->setAwbMode(AWB_MODE_MANUAL);
492 m_iAutoControlSettings->setWbGains(wbGains);
493
494 startCamera();
495 return res;
496}
497
499{
500 auto f = static_cast<cameraFeature_id_t>(feature);
501 auto res = true;
502
503 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
504 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
505
507 {
508 yCError(ARGUS_CAMERA) << "This is not a 2-values feature supported";
509 return false;
510 }
511
512 *value1 = fromRangeToZeroOne(f, m_iAutoControlSettings->getWbGains().r());
513 *value2 = fromRangeToZeroOne(f, m_iAutoControlSettings->getWbGains().b());
514 return res;
515}
516
518{
519 return hasAuto(feature, HasOnOff);
520}
521
523{
524 bool b = false;
525 auto f = static_cast<cameraFeature_id_t>(feature);
526 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
527 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
528 stopCamera();
529
530 if (!hasFeature(feature, &b) || !b)
531 {
532 yCError(ARGUS_CAMERA) << "Feature" << feature << "not supported!";
533 return false;
534 }
535
536 if (!hasOnOff(feature, &b) || !b)
537 {
538 yCError(ARGUS_CAMERA) << "Feature" << feature << "does not have OnOff.. call hasOnOff() to know if a specific feature support OnOff mode";
539 return false;
540 }
541
542 switch (f)
543 {
545 m_iAutoControlSettings->setAeLock(!onoff);
546 b = true;
547 break;
549 m_iAutoControlSettings->setAwbMode(AWB_MODE_AUTO);
550 m_iAutoControlSettings->setAwbLock(!onoff);
551 b = true;
552 break;
553 default:
554 yCError(ARGUS_CAMERA) << "Feature" << feature << "not supported!";
555 return false;
556 }
557
558 startCamera();
559 return b;
560}
561
562bool argusCameraDriver::getActive(int feature, bool* isActive)
563{
564 bool b = false;
565 auto f = static_cast<cameraFeature_id_t>(feature);
566 Argus::IRequest *iRequest = Argus::interface_cast<Argus::IRequest>(m_request);
567 IAutoControlSettings *m_iAutoControlSettings = interface_cast<IAutoControlSettings>(iRequest->getAutoControlSettings());
568 if (!hasFeature(feature, &b) || !b)
569 {
570 yCError(ARGUS_CAMERA) << "Feature" << feature << "not supported!";
571 return false;
572 }
573
574 if (!hasOnOff(feature, &b) || !b)
575 {
576 yCError(ARGUS_CAMERA) << "Feature" << feature << "does not have OnOff.. call hasOnOff() to know if a specific feature support OnOff mode";
577 return false;
578 }
579
580 bool val_to_get;
581 switch (f)
582 {
584 val_to_get = !(m_iAutoControlSettings->getAeLock());
585 b = true;
586 break;
588 val_to_get = !(m_iAutoControlSettings->getAwbLock());
589 b = true;
590 break;
591 default:
592 yCError(ARGUS_CAMERA) << "Feature" << feature << "not supported!";
593 return false;
594 }
595
596 if (b)
597 {
598 if (val_to_get)
599 {
600 *isActive = true;
601 }
602 else
603 {
604 *isActive = false;
605 }
606 }
607 return b;
608}
609
610bool argusCameraDriver::hasAuto(int feature, bool* hasAuto)
611{
613 f = static_cast<cameraFeature_id_t>(feature);
615 {
616 return false;
617 }
618
619 *hasAuto = std::find(features_with_auto.begin(), features_with_auto.end(), f) != features_with_auto.end();
620
621 return true;
622}
623
624bool argusCameraDriver::hasManual(int feature, bool* hasManual)
625{
627}
628
629bool argusCameraDriver::hasOnePush(int feature, bool* hasOnePush)
630{
631 return hasAuto(feature, hasOnePush);
632}
633
635{
636 bool b{false};
637 if (!hasAuto(feature, &b) || !b)
638 {
639 yCError(ARGUS_CAMERA) << "Feature" << feature << "not supported!";
640 return false;
641 }
642
643 switch (mode)
644 {
645 case MODE_AUTO:
646 return setActive(feature, true);
647 case MODE_MANUAL:
648 return setActive(feature, false);
649 case MODE_UNKNOWN:
650 return false;
651 default:
652 return false;
653 }
654 return b;
655}
656
658{
659 bool b{false};
660 if (!hasAuto(feature, &b) || !b)
661 {
662 yCError(ARGUS_CAMERA) << "Feature" << feature << "not supported!";
663 return false;
664 }
665 bool get_active{false};
666 b = b && getActive(feature, &get_active);
667
668 if (b)
669 {
670 if (get_active)
671 {
672 *mode = MODE_AUTO;
673 }
674 else
675 {
676 *mode = MODE_MANUAL;
677 }
678 }
679 return b;
680}
681
683{
684 bool b = false;
685 if (!hasOnePush(feature, &b) || !b)
686 {
687 yCError(ARGUS_CAMERA) << "Feature" << feature << "doesn't have OnePush";
688 return false;
689 }
690
691 b = b && setMode(feature, MODE_AUTO);
692 b = b && setMode(feature, MODE_MANUAL);
693
694 return b;
695}
696
698{
699 std::lock_guard<std::mutex> guard(m_mutex);
700
701 NvBufSurface* nvBufSurface = nullptr;
702
706
707 if(iFrame)
708 {
709 auto img = iFrame->getImage();
710 auto image2d(Argus::interface_cast<EGLStream::IImage2D>(img));
711 auto width = image2d->getSize()[0];
712 auto height = image2d->getSize()[1];
713
714 NV::IImageNativeBuffer *iNativeBuffer = interface_cast<NV::IImageNativeBuffer>(img);
715 if (!iNativeBuffer)
716 {
717 yCError(ARGUS_CAMERA) << "IImageNativeBuffer not supported by IImage";
718 }
719
720 double rotation = 0.0;
722 {
723 // If m_rotation_with_crop = true, width and height are swapped and the image is stored in a buffer already rotated by m_rotation.
724 // In this way, no further transformations need to be done with OpenCV.
725 rotation = m_rotation;
726 }
727
728 int fd = iNativeBuffer->createNvBuffer(image2d->getSize(), NVBUF_COLOR_FORMAT_RGBA, NVBUF_LAYOUT_PITCH, rotationToNVRot.at(rotation));
729
730 if (fd == -1)
731 {
732 yCError(ARGUS_CAMERA) << "Failed to create NvBuffer";
733 return false;
734 }
735
736 if (NvBufSurfaceFromFd(fd, (void**)(&nvBufSurface)) == -1)
737 {
738 yCError(ARGUS_CAMERA) << "Cannot get NvBufSurface from fd";
739 return false;
740 }
741
742 if (NvBufSurfaceMap(nvBufSurface, 0, 0, NVBUF_MAP_READ) != STATUS_OK)
743 {
744 yCError(ARGUS_CAMERA) << "Failed to map NvBufSurface";
745 return false;
746 }
747
748 rgba_img = cv::Mat(height, width, CV_8UC4, nvBufSurface->surfaceList->mappedAddr.addr[0]);
749#ifdef USE_CUDA
750 gpu_rgba_img.upload(rgba_img);
751 cv::cuda::cvtColor(gpu_rgba_img, gpu_bgr_img, cv::COLOR_RGBA2BGR);
752
753 if (!m_rotation_with_crop && m_rotation != 0.0)
754 {
755 cv::Point2f img_center((gpu_bgr_img.cols - 1) / 2.0, (gpu_bgr_img.rows - 1) / 2.0);
756 cv::Mat M = cv::getRotationMatrix2D(img_center, m_rotation, 1.0);
757 // Workaround since with cv::cuda::warpAffine, source and dest images CANNOT be the same (otherwise will result in black frames)
758 cv::cuda::GpuMat tmp;
759 cv::cuda::warpAffine(gpu_bgr_img, tmp, M, gpu_bgr_img.size());
760 gpu_bgr_img = std::move(tmp);
761 }
762
763 if (m_width != width || m_height != height)
764 {
765 cv::Size size(m_width, m_height);
766 cv::cuda::resize(gpu_bgr_img, gpu_bgr_img, size);
767 }
768 gpu_bgr_img.download(bgr_img);
769#else
770 cv::cvtColor(rgba_img, bgr_img, cv::COLOR_RGBA2BGR);
771
772 if (!m_rotation_with_crop && m_rotation != 0.0)
773 {
774 cv::Point2f img_center((bgr_img.cols - 1) / 2.0, (bgr_img.rows - 1) / 2.0);
775 cv::Mat M = cv::getRotationMatrix2D(img_center, m_rotation, 1.0);
776 cv::warpAffine(bgr_img, bgr_img, M, bgr_img.size());
777 }
778
779 if (m_width != width || m_height != height)
780 {
781 cv::Size size(m_width, m_height);
782 cv::resize(bgr_img, bgr_img, size);
783 }
784#endif // USE_CUDA
785 image.copy(yarp::cv::fromCvMat<yarp::sig::PixelRgb>(bgr_img));
786
787 if (NvBufSurfaceUnMap(nvBufSurface, 0, 0) != STATUS_OK)
788 {
789 yCError(ARGUS_CAMERA) << "Failed to unmap NvBufSurface";
790 }
791
792 if (NvBufSurfaceDestroy(nvBufSurface) != STATUS_OK)
793 {
794 yCError(ARGUS_CAMERA) << "Failed to free the NvBufSurface";
795 }
796 }
797
798 return true;
799}
800
802{
803 return m_height;
804}
805
807{
808 return m_width;
809}
@ YARP_FEATURE_NUMBER_OF
@ YARP_FEATURE_SHARPNESS
@ YARP_FEATURE_FRAME_RATE
@ YARP_FEATURE_WHITE_BALANCE
@ YARP_FEATURE_EXPOSURE
@ YARP_FEATURE_SATURATION
@ YARP_FEATURE_GAIN
bool ret
static const std::map< double, NV::Rotation > rotationToNVRot
bool parseUint32Param(std::string param_name, std::uint32_t &param, yarp::os::Searchable &config)
static const std::vector< cameraFeature_id_t > supported_features
static const std::vector< cameraFeature_id_t > features_with_auto
double fromZeroOneToRange(cameraFeature_id_t feature, double value)
double fromRangeToZeroOne(cameraFeature_id_t feature, double value)
static const std::map< cameraFeature_id_t, std::pair< double, double > > featureMinMax
static const std::map< std::string, std::vector< Argus::Size2D< uint32_t > > > cameraResolutions
static const std::map< double, double > rotationToCVRot
bool parseParams(const yarp::os::Searchable &config) override
Parse the DeviceDriver parameters.
int getRgbHeight() override
Return the height of each frame.
bool hasAuto(int feature, bool *hasAuto) override
Check if the requested feature has the 'auto' mode.
bool close() override
Close the DeviceDriver.
bool setRgbFOV(double horizontalFov, double verticalFov) override
Set the field of view (FOV) of the rgb camera.
bool getActive(int feature, bool *isActive) override
Get the current status of the feature, on or off.
bool getRgbResolution(int &width, int &height) override
Get the resolution of the rgb image from the camera.
bool getMode(int feature, FeatureMode *mode) override
Get the current mode for the feature.
bool setRgbMirroring(bool mirror) override
Set the mirroring setting of the sensor.
bool setRgbResolution(int width, int height) override
Set the resolution of the rgb image from the camera.
bool getRgbSupportedConfigurations(yarp::sig::VectorOf< yarp::dev::CameraConfig > &configurations) override
Get the possible configurations of the camera.
int height() const override
Return the height of each frame.
bool setFeature(int feature, double value) override
Set the requested feature to a value (saturation, brightness ... )
int getRgbWidth() override
Return the width of each frame.
bool getCameraDescription(CameraDescriptor *camera) override
Get a basic description of the camera hw.
bool getRgbMirroring(bool &mirror) override
Get the mirroring setting of the sensor.
bool getImage(yarp::sig::ImageOf< yarp::sig::PixelRgb > &image) override
Get an image from the frame grabber.
bool open(yarp::os::Searchable &config) override
Open the DeviceDriver.
bool hasManual(int feature, bool *hasManual) override
Check if the requested feature has the 'manual' mode.
bool hasOnePush(int feature, bool *hasOnePush) override
Check if the requested feature has the 'onePush' mode.
bool hasFeature(int feature, bool *hasFeature) override
Check if camera has the requested feature (saturation, brightness ... )
int width() const override
Return the width of each frame.
bool setMode(int feature, FeatureMode mode) override
Set the requested mode for the feature.
bool setActive(int feature, bool onoff) override
Set the requested feature on or off.
bool hasOnOff(int feature, bool *HasOnOff) override
Check if the camera has the ability to turn on/off the requested feature.
bool getFeature(int feature, double *value) override
Get the current value for the requested feature.
bool setOnePush(int feature) override
Set the requested feature to a value (saturation, brightness ... )
bool getRgbFOV(double &horizontalFov, double &verticalFov) override
Get the field of view (FOV) of the rgb camera.
bool getRgbIntrinsicParam(yarp::os::Property &intrinsic) override
Get the intrinsic parameters of the rgb camera.
A mini-server for performing network communication in the background.
A class for storing options and configuration information.
Definition Property.h:33
A base class for nested structures that can be searched.
Definition Searchable.h:31
virtual bool check(const std::string &key) const =0
Check if there exists a property of the given name.
virtual std::string toString() const =0
Return a standard text representation of the content of the object.
virtual Value & find(const std::string &key) const =0
Gets a value corresponding to a given keyword.
Typed image class.
Definition Image.h:605
bool copy(const Image &alt)
Copy operator.
Definition Image.cpp:628
#define yCInfo(component,...)
#define yCError(component,...)
#define yCWarning(component,...)
#define yCDebug(component,...)
STL namespace.
For streams capable of holding different kinds of content, check what they actually have.
An interface to the operating system, including Port based communication.