YARP
Yet Another Robot Platform
FfmpegGrabber.cpp
Go to the documentation of this file.
1/*
2 * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT)
3 * SPDX-FileCopyrightText: 2006-2010 RobotCub Consortium
4 * SPDX-FileCopyrightText: 2006 Jonas Ruesch
5 * SPDX-FileCopyrightText: 2006 Arjan Gijsberts
6 * SPDX-License-Identifier: BSD-3-Clause
7 */
8
9#include "FfmpegGrabber.h"
10#include "ffmpeg_api.h"
11
12#include <yarp/os/all.h>
13#include <yarp/sig/all.h>
14#include <yarp/os/Log.h>
16
17#include <cstdio>
18
19#define MAX_AUDIO_FRAME_SIZE 192000 // 1 second of 48khz 32bit audio
20
21using namespace yarp::os;
22using namespace yarp::dev;
23using namespace yarp::sig;
24using namespace yarp::sig::file;
25
26namespace {
27YARP_LOG_COMPONENT(FFMPEGGRABBER, "yarp.device.ffmpeg_grabber")
28}
29
31{
32public:
36 int index;
37
38 AVCodecContext *pCodecCtx;
39 AVCodec *pCodec;
40
41 // video buffers
42 AVFrame *pFrame;
43 AVFrame *pFrameRGB;
44 AVFrame *pAudio;
45 uint8_t *buffer;
46 int16_t *audioBuffer;
47 int16_t *audioBufferAt;
49
52 bytesDecoded(0),
54 index(-1),
55 pCodecCtx(nullptr),
56 pCodec(nullptr),
57 pFrame(nullptr),
58 pFrameRGB(nullptr),
59 pAudio(nullptr),
60 buffer(nullptr),
61 audioBuffer(nullptr),
62 audioBufferAt(nullptr),
64 {
65 }
66
68 {
69 return frameFinished!=0;
70 }
71
73 {
74 return index;
75 }
76
77 virtual ~DecoderState()
78 {
79 if (pCodecCtx!=nullptr) {
80 avcodec_close(pCodecCtx);
81 }
82 if (audioBuffer!=nullptr) {
83 delete [] audioBuffer;
84 }
85 if (buffer!=nullptr) {
86 delete [] buffer;
87 }
88 if (pFrameRGB!=nullptr) {
89 av_free(pFrameRGB);
90 }
91 if (pFrame!=nullptr) {
92 av_free(pFrame);
93 }
94 if (pAudio!=nullptr) {
95 av_free(pAudio);
96 }
97 }
98
99 int getStream(AVFormatContext *pFormatCtx, AVMediaType code, const char *name)
100 {
101 // Find the first stream
102 int videoStream=-1;
103 for(int i=0; i<(int)(pFormatCtx->nb_streams); i++) {
104 if(pFormatCtx->streams[i]->codecpar->codec_type==code) {
105 videoStream=i;
106 yCInfo(FFMPEGGRABBER, "First %s stream is stream #%d", name, i);
107 break;
108 }
109 }
110 if(videoStream==-1) {
111 yCError(FFMPEGGRABBER, "Could not find %s stream", name);
112 }
113 index = videoStream;
114
115 return index;
116 }
117
118 bool getCodec(AVFormatContext *pFormatCtx)
119 {
120 // Get a pointer to the codec context for the video stream
121 pCodecCtx=pFormatCtx->streams[index]->codec;
122
123 // Find the decoder for the video stream
124 pCodec = avcodec_find_decoder(pFormatCtx->streams[index]->codecpar->codec_id);
125 if(pCodec==nullptr) {
126 yCError(FFMPEGGRABBER, "Codec not found");
127 return false; // Codec not found
128 }
129
130 // Open codec
131 if (avcodec_open2(pCodecCtx, pCodec, nullptr) < 0) {
132 yCError(FFMPEGGRABBER, "Could not open codec");
133 return false; // Could not open codec
134 }
135
136 return true;
137 }
138
139
141 {
142 // Allocate video frame
143 pFrame=av_frame_alloc();
144
145 // Allocate an AVFrame structure
146 pFrameRGB=av_frame_alloc();
147 if(pFrameRGB==nullptr) {
148 yCError(FFMPEGGRABBER, "Could not allocate a frame");
149 return false;
150 }
151
152 // Determine required buffer size and allocate buffer
153 int numBytes=avpicture_get_size(AV_PIX_FMT_RGB24, pCodecCtx->width,
154 pCodecCtx->height);
155 buffer=new uint8_t[numBytes];
156
157 // Assign appropriate parts of buffer to image planes in pFrameRGB
158 avpicture_fill((AVPicture *)pFrameRGB, buffer, AV_PIX_FMT_RGB24,
159 pCodecCtx->width, pCodecCtx->height);
160 return true;
161 }
162
164 {
166 audioBuffer = new int16_t[audioBufferLen];
168 yCInfo(FFMPEGGRABBER,
169 "channels %d, sample_rate %d, frame_size %d",
170 pCodecCtx->channels,
171 pCodecCtx->sample_rate,
172 pCodecCtx->frame_size);
173 return true;
174 }
175
177 {
178 return pCodecCtx->width;
179 }
180
182 {
183 return pCodecCtx->height;
184 }
185
186
188 {
189 return pCodecCtx->sample_rate;
190 }
191
193 {
194 return pCodecCtx->channels;
195 }
196
197 bool getAudio(AVPacket& packet,Sound& sound)
198 {
199 int ct = 0;
200 int bytesRead = 0;
201 int bytesWritten = 0;
202 int gotFrame = 0;
203 while (bytesRead<packet.size) {
204 ct = audioBufferLen;
205 AVPacket tmp = packet;
206 tmp.data += bytesRead;
207 tmp.size -= bytesRead;
208 if (!pAudio) {
209 if (!(pAudio = av_frame_alloc())) {
210 yCFatal(FFMPEGGRABBER, "out of memory");
211 }
212 } else {
213 av_frame_unref(pAudio);
214 }
215 int r = avcodec_decode_audio4(pCodecCtx, pAudio, &gotFrame, &packet);
216 ct = 0;
217 if (gotFrame) {
218 ct = av_samples_get_buffer_size(nullptr,
219 pCodecCtx->channels,
220 pAudio->nb_samples,
221 pCodecCtx->sample_fmt,
222 1);
223 }
224 if (r<0) {
225 yCError(FFMPEGGRABBER, "error decoding audio");
226 return false;
227 }
228 int num_channels = getChannels();
229 int num_rate = getRate();
230 //audioBufferAt += ct;
231 //audioBufferLen += ct;
232 bytesRead += r;
233 bytesWritten += ct;
234 if (bytesRead==packet.size) {
235 int num_samples = bytesWritten/(sizeof(int16_t)*num_channels);
236 sound.resize(num_samples,num_channels);
237 sound.setFrequency(num_rate);
238
239 int idx = 0;
240 for (int i=0; i<num_samples; i++) {
241 for (int j=0; j<num_channels; j++) {
242 sound.set(audioBuffer[idx],i,j);
243 idx++;
244 }
245 }
246 }
247 }
248 return true;
249 }
250
251 bool getVideo(AVPacket& packet)
252 {
253 // Decode video frame
254 avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished,
255 &packet);
256
257 // Did we get a video frame?
258 if(frameFinished) {
259 // Convert the image from its native format to RGB
260 int w = pCodecCtx->width;
261 int h = pCodecCtx->height;
262 static struct SwsContext *img_convert_ctx = nullptr;
263 if (img_convert_ctx==nullptr) {
264 img_convert_ctx = sws_getContext(w,h,
265 pCodecCtx->pix_fmt,
266 w, h, AV_PIX_FMT_RGB24,
267 //0,
268 //SWS_BILINEAR,
269 SWS_BICUBIC,
270 nullptr, nullptr, nullptr);
271 }
272 if (img_convert_ctx!=nullptr) {
273 sws_scale(img_convert_ctx, ((AVPicture*)pFrame)->data,
274 ((AVPicture*)pFrame)->linesize, 0,
275 pCodecCtx->height,
276 ((AVPicture*)pFrameRGB)->data,
277 ((AVPicture*)pFrameRGB)->linesize);
278 } else {
279 yCFatal(FFMPEGGRABBER, "Software scaling not working");
280 }
281 }
282 return frameFinished;
283 }
284
285
287 {
288 if (frameFinished) {
289 FlexImage flex;
291 flex.setQuantum((pFrameRGB->linesize[0]));
292 flex.setExternal(pFrameRGB->data[0],
293 pCodecCtx->width,
294 pCodecCtx->height);
295 image.copy(flex);
296 }
297
298 return frameFinished;
299 }
300
302 {
303 return frameFinished;
304 }
305};
306
308{
309public:
312};
313
314
315#define HELPER(x) (*((FfmpegHelper*)x))
316
317
318const char *xstrdup(const char *str)
319{
320 if (str[0] == '-') {
321 return nullptr;
322 }
323 return strdup(str);
324}
325
327 AVFormatContext **ppFormatCtx,
328 AVFormatContext **ppFormatCtx2)
329{
330 bool audio = (ppFormatCtx==nullptr);
331 AVDictionary*& formatParams =
333
334 AVInputFormat *iformat;
335 Value v;
336
337 if (!audio) {
338 //formatParams.prealloced_context = 1;
339 v = config.check("v4ldevice",
340 Value("/dev/video0"),
341 "device name");
342 } else {
343 v = config.check("audio",
344 Value("/dev/dsp"),
345 "optional audio device name");
346 }
347 yCInfo(FFMPEGGRABBER, "Device %s",v.asString().c_str());
348
349 m_uri = v.asString();
350
351 if (audio) {
352 av_dict_set_int(&formatParams,
353 "sample_rate",
354 config.check("audio_rate",
355 Value(44100),
356 "audio sample rate").asInt32(),
357 0);
358 av_dict_set_int(&formatParams,
359 "channels",
360 config.check("channels",
361 Value(1),
362 "number of channels").asInt32(),
363 0);
364 } else {
365 if (config.check("time_base_num") && config.check("time_base_den")) {
366 char buf[256];
367 sprintf(buf, "%d/%d",
368 config.check("time_base_num",
369 Value(1),
370 "numerator of basic time unit").asInt32(),
371 config.check("time_base_den",
372 Value(29),
373 "denominator of basic time unit").asInt32());
374 av_dict_set(&formatParams, "framerate", buf, 0);
375 }
376
377 if (config.check("channel")) {
378 av_dict_set_int(&formatParams,
379 "channel",
380 config.check("channel",
381 Value(0),
382 "channel identifier").asInt32(),
383 0);
384 }
385 if (config.check("standard")) {
386 av_dict_set(&formatParams,
387 "standard",
388 config.check("standard",
389 Value("-"),
390 "pal versus ntsc").asString().c_str(),
391 0);
392 }
393 av_dict_set_int(&formatParams,
394 "width",
395 config.check("width",
396 Value(640),
397 "width of image").asInt32(),
398 0);
399 av_dict_set_int(&formatParams,
400 "height",
401 config.check("height",
402 Value(480),
403 "height of image").asInt32(),
404 0);
405 }
406
407 std::string videoDevice = (config.check("v4l1") ? "video4linux" : "video4linux2");
408 iformat = av_find_input_format(audio ? "audio_device" : videoDevice.c_str());
409
410 int result = avformat_open_input(audio ? ppFormatCtx2 : ppFormatCtx,
411 v.asString().c_str(),
412 iformat,
413 &formatParams);
414
415 bool ok = (result==0);
416 if (!ok) {
417 yCError(FFMPEGGRABBER, "%s: ffmpeg error %d", v.asString().c_str(), result);
418 }
419
420 if (ok) {
421 if (ppFormatCtx!=nullptr) {
422 if (config.check("audio",
423 "optional audio device")) {
424 ok = openV4L(config,nullptr,ppFormatCtx2);
425 }
426 }
427 }
428
429 return ok;
430}
431
432
433
435 AVFormatContext **ppFormatCtx)
436{
437 AVInputFormat *iformat;
438 std::string devname = config.check("devname",
439 Value("/dev/dv1394"),
440 "firewire device name").asString();
441 iformat = av_find_input_format("dv1394");
442 yCInfo(FFMPEGGRABBER, "Checking for digital video in %s", devname.c_str());
443
444 m_uri = devname;
445
446 return avformat_open_input(ppFormatCtx, strdup(devname.c_str()), iformat, nullptr) == 0;
447}
448
449
450bool FfmpegGrabber::openFile(AVFormatContext **ppFormatCtx,
451 const char *fname)
452{
453 m_uri = fname;
454 return avformat_open_input(ppFormatCtx, fname, nullptr, nullptr) == 0;
455}
456
457
459{
460 std::string fname =
461 config.check("source",
462 Value("default.avi"),
463 "media file to read from").asString();
464
465 if (config.check("loop","media should loop (default)")) {
466 shouldLoop = true;
467 }
468
469 if (config.check("noloop","media should not loop")) {
470 shouldLoop = false;
471 }
472
473 imageSync = false;
474 std::string sync =
475 config.check("sync",
476 Value("image"),
477 "sync on image or audio (if have to choose)?").asString();
478 imageSync = (sync=="image");
479
480 needRateControl = true; // default for recorded media
481
482 if (config.check("nodelay","media will play in simulated realtime unless this is present")) {
483 needRateControl = false;
484 }
485
486 pace = config.check("pace",Value(1.0),
487 "simulated realtime multiplier factor (must be <1 right now)").asFloat64();
488
489 // Register all formats and codecs
490 av_register_all();
491 avdevice_register_all();
492
493 // Open video file
494 if (config.check("v4l","if present, read from video4linux") || config.check("v4l1","if present, read from video4linux") || config.check("v4l2","if present, read from video4linux2")) {
495 needRateControl = false; // reading from live media
496 if (!openV4L(config,&pFormatCtx,&pFormatCtx2)) {
497 yCError(FFMPEGGRABBER, "Could not open Video4Linux input");
498 return false;
499 }
500 } else if (config.check("ieee1394","if present, read from firewire")) {
501 needRateControl = false; // reading from live media
502 if (!openFirewire(config,&pFormatCtx)) {
503 yCError(FFMPEGGRABBER, "Could not open ieee1394 input");
504 return false;
505 }
506 } else {
507 if (!openFile(&pFormatCtx,fname.c_str())) {
508 yCError(FFMPEGGRABBER, "Could not open media file %s", fname.c_str());
509 return false; // Couldn't open file
510 }
511 }
512
513
514 // Retrieve stream information
515 if(avformat_find_stream_info(pFormatCtx, nullptr)<0) {
516 yCError(FFMPEGGRABBER, "Could not find stream information in %s", m_uri.c_str());
517 return false; // Couldn't find stream information
518 }
519
520 // Dump information about file onto standard error
521 av_dump_format(pFormatCtx, 0, m_uri.c_str(), false);
522
523 if (pFormatCtx2!=nullptr) {
524
525 if(avformat_find_stream_info(pFormatCtx2, nullptr)<0) {
526 yCError(FFMPEGGRABBER, "Could not find stream information in %s", m_uri.c_str());
527 return false; // Couldn't find stream information
528 }
529
530 // Dump information about file onto standard error
531 av_dump_format(pFormatCtx2, 0, m_uri.c_str(), false);
532 }
533
534
535 if (pFormatCtx2!=nullptr) {
537 } else {
539 }
540
541 yCAssert(FFMPEGGRABBER, system_resource == nullptr);
543 yCAssert(FFMPEGGRABBER, system_resource != nullptr);
544
546 DecoderState& videoDecoder = helper.videoDecoder;
547 DecoderState& audioDecoder = helper.audioDecoder;
548
549
550 // Find the first video stream
551 int videoStream = videoDecoder.getStream(pFormatCtx,
552 AVMEDIA_TYPE_VIDEO,
553 "video");
554 // Find the first audio stream
555 int audioStream = audioDecoder.getStream(pAudioFormatCtx,
556 AVMEDIA_TYPE_AUDIO,
557 "audio");
558
559 if (videoStream==-1&&audioStream==-1) {
560 return false;
561 }
562
563 _hasVideo = (videoStream!=-1);
564 _hasAudio = (audioStream!=-1);
565
566 bool ok = true;
567 if (_hasVideo) {
568 ok = ok && videoDecoder.getCodec(pFormatCtx);
569 }
570 if (_hasAudio) {
571 ok = ok && audioDecoder.getCodec(pAudioFormatCtx);
572 }
573 if (!ok) {
574 return false;
575 }
576
577 if (_hasVideo) {
578 ok = ok && videoDecoder.allocateImage();
579 }
580 if (_hasAudio) {
581 ok = ok && audioDecoder.allocateSound();
582 }
583 if (!ok) {
584 return false;
585 }
586
587 if (_hasVideo) {
588 m_w = videoDecoder.getWidth();
589 m_h = videoDecoder.getHeight();
590 }
591 if (_hasAudio) {
592 m_channels = audioDecoder.getChannels();
593 m_rate = audioDecoder.getRate();
594 }
595 yCInfo(FFMPEGGRABBER,
596 " video size %dx%d, audio %dHz with %d channels, %s sync",
597 m_w,
598 m_h,
599 m_rate,
601 imageSync ? "image" : "audio");
602
603 if (!(_hasVideo||_hasAudio)) {
604 return false;
605 }
606 active = true;
607 return true;
608}
609
611{
612 if (formatParamsVideo) {
613 av_dict_free(&formatParamsVideo);
614 formatParamsVideo = nullptr;
615 }
616 if (formatParamsAudio) {
617 av_dict_free(&formatParamsAudio);
618 formatParamsAudio = nullptr;
619 }
620
621 if (!active) {
622 return false;
623 }
624
625 // Close the video file
626 if (pFormatCtx!=nullptr) {
627 avformat_close_input(&pFormatCtx);
628 }
629 if (pFormatCtx2!=nullptr) {
630 avformat_close_input(&pFormatCtx2);
631 }
632 if (system_resource!=nullptr) {
633 delete &HELPER(system_resource);
634 system_resource = nullptr;
635 }
636
637 active = false;
638 return true;
639}
640
642{
643 if (!_hasVideo) {
644 return false;
645 }
646 Sound sound;
647 return getAudioVisual(image,sound);
648}
649
650bool FfmpegGrabber::getSound(yarp::sig::Sound& sound, size_t min_number_of_samples, size_t max_number_of_samples, double max_samples_timeout_s)
651{
652 if (!_hasAudio) {
653 return false;
654 }
655 ImageOf<PixelRgb> image;
656 return getAudioVisual(image, sound);
657}
658
659
661 yarp::sig::Sound& sound)
662{
663
665 DecoderState& videoDecoder = helper.videoDecoder;
666 DecoderState& audioDecoder = helper.audioDecoder;
667
668 bool tryAgain = false;
669 bool triedAgain = false;
670
671 do {
672
673 bool gotAudio = false;
674 bool gotVideo = false;
675 if (startTime<0.5) {
676 startTime = SystemClock::nowSystem();
677 }
678 double time_target = 0;
679 while(av_read_frame(pFormatCtx, &packet)>=0) {
680 // Is this a packet from the video stream?
681 bool done = false;
682 if (packet.stream_index==videoDecoder.getIndex()) {
683 done = videoDecoder.getVideo(packet);
684 image.resize(1,1);
685 if (done) {
686 yCTrace(FFMPEGGRABBER, "got a video frame");
687 gotVideo = true;
688 }
689 } if (packet.stream_index==audioDecoder.getIndex()) {
690 done = audioDecoder.getAudio(packet,sound);
691 if (done) {
692 yCTrace(FFMPEGGRABBER, "got an audio frame");
693 gotAudio = true;
694 }
695 }
696 AVRational& time_base = pFormatCtx->streams[packet.stream_index]->time_base;
697 double rbase = av_q2d(time_base);
698
699 time_target = packet.pts*rbase;
700
701 av_free_packet(&packet);
702 if (((imageSync?gotVideo:videoDecoder.haveFrame())||!_hasVideo)&&
703 ((imageSync?1:gotAudio)||!_hasAudio)) {
704 if (_hasVideo) {
705 videoDecoder.getVideo(image);
706 } else {
707 image.resize(0,0);
708 }
709 if (needRateControl) {
710 double now = (SystemClock::nowSystem()-startTime)*pace;
711 double delay = time_target-now;
712 if (delay>0) {
713 SystemClock::delaySystem(delay);
714 }
715 }
716
717 if (!_hasAudio) {
718 sound.resize(0,0);
719 }
720 return true;
721 }
722 }
723
724 tryAgain = !triedAgain;
725
726 if (tryAgain) {
727 if (!shouldLoop) {
728 return false;
729 }
730 av_seek_frame(pFormatCtx,-1,0,AVSEEK_FLAG_BACKWARD);
731 startTime = SystemClock::nowSystem();
732 triedAgain = true;
733 }
734 } while (tryAgain);
735
736 return false;
737}
#define HELPER(x)
const char * xstrdup(const char *str)
#define MAX_AUDIO_FRAME_SIZE
uint8_t * buffer
bool getCodec(AVFormatContext *pFormatCtx)
bool getVideo(AVPacket &packet)
AVFrame * pFrameRGB
int16_t * audioBufferAt
bool getAudio(AVPacket &packet, Sound &sound)
bool allocateSound()
bool allocateImage()
bool getVideo(ImageOf< PixelRgb > &image)
int getStream(AVFormatContext *pFormatCtx, AVMediaType code, const char *name)
int16_t * audioBuffer
AVFrame * pFrame
AVCodec * pCodec
AVCodecContext * pCodecCtx
virtual ~DecoderState()
AVFrame * pAudio
bool openV4L(yarp::os::Searchable &config, AVFormatContext **ppFormatCtx, AVFormatContext **ppFormatCtx2)
int m_w
Width of the images a grabber produces.
bool openFile(AVFormatContext **ppFormatCtx, const char *fname)
int m_h
Height of the images a grabber produces.
void * system_resource
bool openFirewire(yarp::os::Searchable &config, AVFormatContext **ppFormatCtx)
AVFormatContext * pAudioFormatCtx
AVPacket packet
bool getSound(yarp::sig::Sound &sound, size_t min_number_of_samples, size_t max_number_of_samples, double max_samples_timeout_s) override
Get a sound from a device.
bool getImage(yarp::sig::ImageOf< yarp::sig::PixelRgb > &image) override
Get an image from the frame grabber.
bool open(yarp::os::Searchable &config) override
Open the DeviceDriver.
AVDictionary * formatParamsAudio
AVFormatContext * pFormatCtx2
std::string m_uri
Uri of the images a grabber produces.
virtual bool getAudioVisual(yarp::sig::ImageOf< yarp::sig::PixelRgb > &image, yarp::sig::Sound &sound) override
Get an image and sound.
AVFormatContext * pFormatCtx
AVDictionary * formatParamsVideo
bool close() override
Close the DeviceDriver.
DecoderState videoDecoder
DecoderState audioDecoder
A base class for nested structures that can be searched.
Definition: Searchable.h:63
virtual bool check(const std::string &key) const =0
Check if there exists a property of the given name.
A single value (typically within a Bottle).
Definition: Value.h:43
virtual std::string asString() const
Get string value.
Definition: Value.cpp:234
Image class with user control of representation details.
Definition: Image.h:411
void setQuantum(size_t imgQuantum)
Definition: Image.h:426
void setPixelCode(int imgPixelCode)
Definition: Image.h:414
void setExternal(const void *data, size_t imgWidth, size_t imgHeight)
Use this to wrap an external image.
Definition: Image.cpp:904
bool copy(const Image &alt)
Copy operator.
Definition: Image.cpp:837
void resize(size_t imgWidth, size_t imgHeight)
Reallocate an image to be of a desired size, throwing away its current contents.
Definition: Image.cpp:453
Class for storing sounds See Audio in YARP for additional documentation on YARP audio.
Definition: Sound.h:25
void setFrequency(int freq)
Set the frequency of the sound (i.e.
Definition: Sound.cpp:229
void resize(size_t samples, size_t channels=1)
Set the sound size.
Definition: Sound.cpp:168
void set(audio_sample value, size_t sample, size_t channel=0)
Definition: Sound.cpp:209
#define yCInfo(component,...)
Definition: LogComponent.h:171
#define yCError(component,...)
Definition: LogComponent.h:213
#define yCAssert(component, x)
Definition: LogComponent.h:240
#define yCTrace(component,...)
Definition: LogComponent.h:84
#define YARP_LOG_COMPONENT(name,...)
Definition: LogComponent.h:76
#define yCFatal(component,...)
Definition: LogComponent.h:234
@ VOCAB_PIXEL_RGB
Definition: Image.h:44
For streams capable of holding different kinds of content, check what they actually have.
double now()
Return the current time in seconds, relative to an arbitrary starting point.
Definition: Time.cpp:121
void delay(double seconds)
Wait for a certain number of seconds.
Definition: Time.cpp:111
An interface to the operating system, including Port based communication.