YARP
Yet Another Robot Platform
FfmpegWriter.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (C) 2006-2021 Istituto Italiano di Tecnologia (IIT)
3  * Copyright (C) 2006-2010 RobotCub Consortium
4  * All rights reserved.
5  *
6  * This software may be modified and distributed under the terms of the
7  * BSD-3-Clause license. See the accompanying LICENSE file for details.
8  */
9 
10 /*
11  * Most of this file is from the output_example.c of ffmpeg -
12  * copyright/copypolicy statement follows --
13  */
14 
15 /*
16  * Libavformat API example: Output a media file in any supported
17  * libavformat format. The default codecs are used.
18  *
19  * Copyright (c) 2003 Fabrice Bellard
20  *
21  * Permission is hereby granted, free of charge, to any person obtaining a copy
22  * of this software and associated documentation files (the "Software"), to deal
23  * in the Software without restriction, including without limitation the rights
24  * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
25  * copies of the Software, and to permit persons to whom the Software is
26  * furnished to do so, subject to the following conditions:
27  *
28  * The above copyright notice and this permission notice shall be included in
29  * all copies or substantial portions of the Software.
30  *
31  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
32  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
33  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
34  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
35  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
36  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
37  * THE SOFTWARE.
38  */
39 
40 
41 #include "FfmpegWriter.h"
42 #include "ffmpeg_api.h"
43 
44 #include <yarp/os/all.h>
45 #include <yarp/sig/all.h>
46 #include <yarp/os/Log.h>
47 #include <yarp/os/LogComponent.h>
48 
49 #include <cstdlib>
50 #include <cstring>
51 #include <cmath>
52 
53 #ifndef M_PI
54 #define M_PI 3.1415926535897931
55 #endif
56 
57 #define STREAM_FRAME_RATE 25 /* 25 images/s */
58 #define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */
59 #define STREAM_PIX_WORK AV_PIX_FMT_RGB24
60 
61 using namespace yarp::os;
62 using namespace yarp::dev;
63 using namespace yarp::sig;
64 using namespace yarp::sig::file;
65 
66 namespace {
67 YARP_LOG_COMPONENT(FFMPEGWRITER, "yarp.device.ffmpeg_writer")
68 }
69 
70 
71 /**************************************************************/
72 /* audio output */
73 
74 float t, tincr, tincr2;
75 
76 int16_t *samples;
80 
81 uint8_t *audio_outbuf;
84 
85 /*
86  * add an audio output stream
87  */
88 static AVStream *add_audio_stream(AVFormatContext *oc, AVCodecID codec_id)
89 {
90  AVCodecContext *c;
91  AVStream *st;
92 
93  st = avformat_new_stream(oc, NULL);
94  if (!st) {
95  yCFatal(FFMPEGWRITER, "Could not alloc stream");
96  }
97 
98  c = st->codec;
99  c->codec_id = codec_id;
100  c->codec_type = AVMEDIA_TYPE_AUDIO;
101 
102  /* put sample parameters */
103  c->bit_rate = 64000;
104  c->sample_rate = 44100;
105  c->channels = 2;
106  return st;
107 }
108 
109 static void open_audio(AVFormatContext *oc, AVStream *st)
110 {
111  yCInfo(FFMPEGWRITER, "Opening audio stream");
112  AVCodecContext *c;
113  AVCodec *codec;
114 
115  c = st->codec;
116 
117  /* find the audio encoder */
118  codec = avcodec_find_encoder(c->codec_id);
119  if (!codec) {
120  yCFatal(FFMPEGWRITER, "Audio codec not found");
121  }
122 
123  /* open it */
124  if (avcodec_open2(c, codec, nullptr) < 0) {
125  yCFatal(FFMPEGWRITER, "Could not open codec");
126  }
127 
128  /* init signal generator */
129  t = 0;
130  tincr = 2 * M_PI * 110.0 / c->sample_rate;
131  /* increment frequency by 110 Hz per second */
132  tincr2 = 2 * M_PI * 110.0 / c->sample_rate / c->sample_rate;
133 
134  audio_outbuf_size = 10000;
135  audio_outbuf = (uint8_t*)av_malloc(audio_outbuf_size);
136 
137  /* ugly hack for PCM codecs (will be removed ASAP with new PCM
138  support to compute the input frame size in samples */
139  if (c->frame_size <= 1) {
141  switch(st->codec->codec_id) {
142  case AV_CODEC_ID_PCM_S16LE:
143  case AV_CODEC_ID_PCM_S16BE:
144  case AV_CODEC_ID_PCM_U16LE:
145  case AV_CODEC_ID_PCM_U16BE:
147  break;
148  default:
149  break;
150  }
151  } else {
152  audio_input_frame_size = c->frame_size;
153  }
155  samples_at = 0;
156  samples_channels = c->channels;
157  samples = (int16_t*)av_malloc(samples_size*2*samples_channels);
158 
159 
160  yCFatal(FFMPEGWRITER,
161  "FRAME SIZE is %d / samples size is %d\n",
162  c->frame_size,
163  samples_size);
164 }
165 
166 /* prepare a 16 bit dummy audio frame of 'frame_size' samples and
167  'nb_channels' channels */
168 static void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
169 {
170  int j, i, v;
171  int16_t *q;
172 
173  q = samples;
174  for(j=0;j<frame_size;j++) {
175  v = (int)(sin(t) * 10000);
176  for(i = 0; i < nb_channels; i++)
177  *q++ = v;
178  t += tincr;
179  tincr += tincr2;
180  }
181 }
182 
183 static void make_audio_frame(AVCodecContext *c, AVFrame * &frame,
184  void *&samples) {
185  frame = av_frame_alloc();
186  if (!frame) {
187  yCFatal(FFMPEGWRITER, "Could not allocate audio frame");
188  }
189  frame->nb_samples = c->frame_size;
190  frame->format = c->sample_fmt;
191  frame->channel_layout = c->channel_layout;
192  int buffer_size = av_samples_get_buffer_size(nullptr, c->channels,
193  c->frame_size,
194  c->sample_fmt, 0);
195  if (buffer_size < 0) {
196  yCError(FFMPEGWRITER, "Could not get sample buffer size");
197  }
198  samples = av_malloc(buffer_size);
199  if (!samples) {
200  yCFatal(FFMPEGWRITER,
201  "Could not allocate %d bytes for samples buffer",
202  buffer_size);
203  }
204  /* setup the data pointers in the AVFrame */
205  int ret = avcodec_fill_audio_frame(frame, c->channels, c->sample_fmt,
206  (const uint8_t*)samples, buffer_size, 0);
207  if (ret < 0) {
208  yCFatal(FFMPEGWRITER, "Could not setup audio frame");
209  }
210 }
211 
212 static void write_audio_frame(AVFormatContext *oc, AVStream *st)
213 {
214  AVCodecContext *c;
215  AVPacket pkt;
216  av_init_packet(&pkt);
217 
218  c = st->codec;
219 
221 
222  AVFrame *frame;
223  void *samples;
224  make_audio_frame(c,frame,samples);
225  AVPacket tmp;
226  int got_packet = 0;
227  av_init_packet(&tmp);
228  tmp.data = audio_outbuf;
229  tmp.size = audio_outbuf_size;
230  pkt.size = avcodec_encode_audio2(c, &tmp, frame, &got_packet);
231  if (tmp.side_data_elems > 0) {
232  for (int i = 0; i < tmp.side_data_elems; i++) {
233  av_free(tmp.side_data[i].data);
234  }
235  av_freep(&tmp.side_data);
236  tmp.side_data_elems = 0;
237  }
238  av_freep(&samples);
239  av_frame_free(&frame);
240 
241  pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);
242  pkt.flags |= AV_PKT_FLAG_KEY;
243  pkt.stream_index= st->index;
244  pkt.data= audio_outbuf;
245 
246  /* write the compressed frame in the media file */
247  if (av_write_frame(oc, &pkt) != 0) {
248  yCFatal(FFMPEGWRITER, "Error while writing audio frame");
249  } else {
250  yCInfo(FFMPEGWRITER, "Wrote some audio");
251  }
252 }
253 
254 static void write_audio_frame(AVFormatContext *oc, AVStream *st, Sound& snd)
255 {
256  yCInfo(FFMPEGWRITER, "Preparing to write audio (%d left over)", samples_at);
257  AVCodecContext *c;
258  int key = 1;
259 
260  c = st->codec;
261 
262  size_t at = 0;
263  while (at<snd.getSamples()) {
264 
265  int avail = samples_size - samples_at;
266  int remain = snd.getSamples() - at;
267  int chan = snd.getChannels();
268  if (remain<avail) { avail = remain; }
269  for (int i=0; i<avail; i++) {
270  int offset = samples_at*samples_channels;
271  for (int j=0; j<samples_channels; j++) {
272  samples[offset+j] = snd.get(at,j%chan);
273  }
274  samples_at++;
275  at++;
276  }
277  avail = samples_size - samples_at;
278 
279  if (avail==0) {
280  AVPacket pkt;
281  av_init_packet(&pkt);
282 
283 
284  AVFrame *frame;
285  void *samples;
286  make_audio_frame(c,frame,samples);
287  AVPacket tmp;
288  int got_packet = 0;
289  av_init_packet(&tmp);
290  tmp.data = audio_outbuf;
291  tmp.size = audio_outbuf_size;
292  pkt.size = avcodec_encode_audio2(c, &tmp, frame, &got_packet);
293  if (tmp.side_data_elems > 0) {
294  for (int i = 0; i < tmp.side_data_elems; i++) {
295  av_free(tmp.side_data[i].data);
296  }
297  av_freep(&tmp.side_data);
298  tmp.side_data_elems = 0;
299  }
300  av_freep(&samples);
301  av_frame_free(&frame);
302 
303  pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base,
304  st->time_base);
305  pkt.dts = pkt.pts;
306  yCTrace(FFMPEGWRITER, "(%d)", pkt.size);
307  if (key) {
308  pkt.flags |= AV_PKT_FLAG_KEY;
309  key = 0;
310  }
311  pkt.stream_index= st->index;
312  pkt.data = audio_outbuf;
313  pkt.duration = 0;
314 
315 
316  /* write the compressed frame in the media file */
317  if (av_write_frame(oc, &pkt) != 0) {
318  yCFatal(FFMPEGWRITER, "Error while writing audio frame");
319  }
320  samples_at = 0;
321  }
322  }
323  yCInfo(FFMPEGWRITER, " wrote audio\n");
324 }
325 
326 static void close_audio(AVFormatContext *oc, AVStream *st)
327 {
328  avcodec_close(st->codec);
329 
330  av_free(samples);
331  av_free(audio_outbuf);
332 }
333 
334 /**************************************************************/
335 /* video output */
336 
337 
338 /* add a video output stream */
339 static AVStream *add_video_stream(AVFormatContext *oc, AVCodecID codec_id,
340  int w, int h, int framerate)
341 {
342  AVCodecContext *c;
343  AVStream *st;
344 
345  st = avformat_new_stream(oc, NULL);
346  if (!st) {
347  yCFatal(FFMPEGWRITER, "Could not alloc stream");
348  }
349 
350  c = st->codec;
351  c->codec_id = codec_id;
352  c->codec_type = AVMEDIA_TYPE_VIDEO;
353 
354  /* put sample parameters */
355  c->bit_rate = 400000;
356  /* resolution must be a multiple of two */
357  c->width = w;
358  c->height = h;
359  /* time base: this is the fundamental unit of time (in seconds) in terms
360  of which frame timestamps are represented. for fixed-fps content,
361  timebase should be 1/framerate and timestamp increments should be
362  identically 1. */
363  c->time_base.den = framerate;
364  c->time_base.num = 1;
365  c->gop_size = 12; /* emit one intra frame every twelve frames at most */
366  c->pix_fmt = STREAM_PIX_FMT;
367  if (c->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
368  /* just for testing, we also add B frames */
369  c->max_b_frames = 2;
370  }
371  if (c->codec_id == AV_CODEC_ID_MPEG1VIDEO){
372  /* needed to avoid using macroblocks in which some coeffs overflow
373  this doesnt happen with normal video, it just happens here as the
374  motion of the chroma plane doesnt match the luma plane */
375  c->mb_decision=2;
376  }
377  // some formats want stream headers to be separate
378  if(!strcmp(oc->oformat->name, "mp4") || !strcmp(oc->oformat->name, "mov") || !strcmp(oc->oformat->name, "3gp"))
379  c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
380 
381 
382  return st;
383 }
384 
385 static AVFrame *alloc_picture(int pix_fmt, int width, int height)
386 {
387  AVFrame *picture;
388  uint8_t *picture_buf;
389  int size;
390 
391  picture = av_frame_alloc();
392  if (!picture)
393  return nullptr;
394  size = avpicture_get_size((AVPixelFormat)pix_fmt, width, height);
395  picture_buf = (uint8_t*)av_malloc(size);
396  if (!picture_buf) {
397  av_free(picture);
398  return nullptr;
399  }
400  avpicture_fill((AVPicture *)picture, picture_buf,
401  (AVPixelFormat)pix_fmt, width, height);
402  return picture;
403 }
404 
405 void FfmpegWriter::open_video(AVFormatContext *oc, AVStream *st)
406 {
407  yCInfo(FFMPEGWRITER, "Opening video stream");
408  AVCodec *codec;
409  AVCodecContext *c;
410 
411  c = st->codec;
412 
413  /* find the video encoder */
414  codec = avcodec_find_encoder(c->codec_id);
415  if (!codec) {
416  yCFatal(FFMPEGWRITER, "Video codec not found");
417  }
418 
419  /* open the codec */
420  if (avcodec_open2(c, codec, nullptr) < 0) {
421  yCFatal(FFMPEGWRITER, "Could not open codec");
422  }
423 
424  video_outbuf = nullptr;
425  /* allocate output buffer */
426  /* XXX: API change will be done */
427  /* buffers passed into lav* can be allocated any way you prefer,
428  as long as they're aligned enough for the architecture, and
429  they're freed appropriately (such as using av_free for buffers
430  allocated with av_malloc) */
431  video_outbuf_size = 200000;
432  video_outbuf = (uint8_t*)av_malloc(video_outbuf_size);
433 
434  /* allocate the encoded raw picture */
435  picture = alloc_picture(c->pix_fmt, c->width, c->height);
436  if (!picture) {
437  yCFatal(FFMPEGWRITER, "Could not allocate picture");
438  }
439 
440  /* if the output format is not YUV420P, then a temporary YUV420P
441  picture is needed too. It is then converted to the required
442  output format */
443  tmp_picture = nullptr;
444  if (c->pix_fmt != AV_PIX_FMT_RGB24) {
445  tmp_picture = alloc_picture(AV_PIX_FMT_RGB24, c->width, c->height);
446  if (!tmp_picture) {
447  yCFatal(FFMPEGWRITER, "Could not allocate temporary picture");
448  }
449  }
450 }
451 
452 static void fill_rgb_image(AVFrame *pict, int frame_index, int width,
453  int height, ImageOf<PixelRgb>& img)
454 {
455  int x, y;
456 
457  for(y=0;y<height;y++) {
458  for(x=0;x<width;x++) {
459  int base = y*(width*3);
460  pict->data[0][base + x*3] = img.safePixel(x,y).r;
461  pict->data[0][base +x*3+1] = img.safePixel(x,y).g;
462  pict->data[0][base +x*3+2] = img.safePixel(x,y).b;
463  }
464  }
465 }
466 
467 
468 void FfmpegWriter::write_video_frame(AVFormatContext *oc, AVStream *st,
469  ImageOf<PixelRgb>& img)
470 {
471  int out_size, ret;
472  AVCodecContext *c;
473 
474  c = st->codec;
475 
476  if (c->pix_fmt != AV_PIX_FMT_RGB24) {
477  fill_rgb_image(tmp_picture, frame_count, c->width, c->height, img);
478  stable_img_convert((AVPicture *)picture, c->pix_fmt,
479  (AVPicture *)tmp_picture, AV_PIX_FMT_RGB24,
480  c->width, c->height);
481  } else {
482  fill_rgb_image(picture, frame_count, c->width, c->height, img);
483  }
484 
485  /* encode the image */
486  AVPacket tmp;
487  int got_packet = 0;
488  av_init_packet(&tmp);
489  tmp.data = video_outbuf;
490  tmp.size = video_outbuf_size;
491  out_size = avcodec_encode_video2(c, &tmp, picture, &got_packet);
492  if (tmp.side_data_elems > 0) {
493  for (int i = 0; i < tmp.side_data_elems; i++) {
494  av_free(tmp.side_data[i].data);
495  }
496  av_freep(&tmp.side_data);
497  tmp.side_data_elems = 0;
498  }
499  /* if zero size, it means the image was buffered */
500  if (out_size > 0) {
501  AVPacket pkt;
502  av_init_packet(&pkt);
503 
504  pkt.pts= av_rescale_q(c->coded_frame->pts, c->time_base, st->time_base);
505  if(c->coded_frame->key_frame)
506  pkt.flags |= AV_PKT_FLAG_KEY;
507  pkt.stream_index= st->index;
508  pkt.data= video_outbuf;
509  pkt.size= out_size;
510 
511  /*
512  static int x = 0;
513  yCInfo(FFMPEGWRITER,
514  "%ld / %ld : %ld / %ld --> %d\n",
515  (long int) c->time_base.num,
516  (long int) c->time_base.den,
517  (long int) st->time_base.num,
518  (long int) st->time_base.den,
519  x);
520  pkt.pts = x;
521  x++;
522  */
523 
524  /* write the compressed frame in the media file */
525  ret = av_write_frame(oc, &pkt);
526  } else {
527  ret = 0;
528  }
529 
530  if (ret != 0) {
531  yCFatal(FFMPEGWRITER, "Error while writing video frame");
532  }
533  frame_count++;
534 }
535 
536 void FfmpegWriter::close_video(AVFormatContext *oc, AVStream *st)
537 {
538  avcodec_close(st->codec);
539  av_free(picture->data[0]);
540  av_free(picture);
541  if (tmp_picture) {
542  av_free(tmp_picture->data[0]);
543  av_free(tmp_picture);
544  }
545  av_free(video_outbuf);
546 }
547 
548 
549 
550 
551 /**************************************************************/
552 /* YARP adaptation */
553 
555  yCTrace(FFMPEGWRITER,
556  "ffmpeg libavcodec version number %d.%d.%d",
557  LIBAVCODEC_VERSION_MAJOR,
558  LIBAVCODEC_VERSION_MINOR,
559  LIBAVCODEC_VERSION_MICRO);
560 
561  ready = false;
562  savedConfig.fromString(config.toString());
563 
564  // open if possible, if not will do it later
565  return delayedOpen(config);
566 }
567 
568 
569 bool FfmpegWriter::delayedOpen(yarp::os::Searchable & config) {
570  yCTrace(FFMPEGWRITER, "DELAYED OPEN %s", config.toString().c_str());
571 
572  int w = config.check("width",Value(0),
573  "width of image (must be even)").asInt32();
574  int h = config.check("height",Value(0),
575  "height of image (must be even)").asInt32();
576  int framerate = config.check("framerate",Value(30),
577  "baseline images per second").asInt32();
578 
579  int sample_rate = 0;
580  int channels = 0;
581  bool audio = config.check("audio","should audio be included");
582  if (audio) {
583  sample_rate = config.check("sample_rate",Value(44100),
584  "audio samples per second").asInt32();
585  channels = config.check("channels",Value(1),
586  "audio samples per second").asInt32();
587  }
588 
589  filename = config.check("out",Value("movie.avi"),
590  "name of movie to write").asString();
591 
592  delayed = false;
593  if (w<=0||h<=0) {
594  delayed = true;
595  return true;
596  }
597  ready = true;
598 
599  /* initialize libavcodec, and register all codecs and formats */
600  av_register_all();
601 
602  /* auto detect the output format from the name. default is
603  mpeg. */
604  fmt = av_guess_format(nullptr, filename.c_str(), nullptr);
605  if (!fmt) {
606  yCInfo(FFMPEGWRITER, "Could not deduce output format from file extension: using MPEG.");
607  fmt = av_guess_format("mpeg", nullptr, nullptr);
608  }
609  if (!fmt) {
610  yCFatal(FFMPEGWRITER, "Could not find suitable output format");
611  }
612 
613  /* allocate the output media context */
614  oc = avformat_alloc_context();
615  if (!oc) {
616  yCFatal(FFMPEGWRITER, "Memory error");
617  }
618  oc->oformat = fmt;
619  snprintf(oc->filename, sizeof(oc->filename), "%s", filename.c_str());
620 
621  /* add the audio and video streams using the default format codecs
622  and initialize the codecs */
623  video_st = nullptr;
624  audio_st = nullptr;
625  if (fmt->video_codec != AV_CODEC_ID_NONE) {
626  video_st = add_video_stream(oc, fmt->video_codec, w, h, framerate);
627  }
628 
629  if (audio) {
630  yCInfo(FFMPEGWRITER, "Adding audio %dx%d", sample_rate, channels);
631  if (fmt->audio_codec != AV_CODEC_ID_NONE) {
632  audio_st = add_audio_stream(oc, fmt->audio_codec);
633  if (audio_st!=nullptr) {
634  AVCodecContext *c = audio_st->codec;
635  c->sample_rate = sample_rate;
636  c->channels = channels;
637  } else {
638  yCError(FFMPEGWRITER, "Failed to add audio");
639  }
640  } else {
641  yCWarning(FFMPEGWRITER, "No audio codec available");
642  }
643  } else {
644  yCInfo(FFMPEGWRITER, "Skipping audio");
645  }
646 
647  av_dump_format(oc, 0, filename.c_str(), 1);
648 
649  /* now that all the parameters are set, we can open the audio and
650  video codecs and allocate the necessary encode buffers */
651  if (video_st) {
652  open_video(oc, video_st);
653  }
654  if (audio_st) {
655  open_audio(oc, audio_st);
656  }
657 
658  /* open the output file, if needed */
659  if (!(fmt->flags & AVFMT_NOFILE)) {
660  if (avio_open(&oc->pb, filename.c_str(), AVIO_FLAG_WRITE) < 0) {
661  yCFatal(FFMPEGWRITER, "Could not open '%s'", filename.c_str());
662  }
663  }
664 
665  /* write the stream header, if any */
666  avformat_write_header(oc, NULL);
667 
668  return true;
669 }
670 
672  if (!isOk()) { return false; }
673 
674  /* close each codec */
675  if (video_st)
676  close_video(oc, video_st);
677  if (audio_st)
678  close_audio(oc, audio_st);
679 
680  /* write the trailer, if any */
681  av_write_trailer(oc);
682 
683  /* free the streams */
684  for(unsigned int i = 0; i < oc->nb_streams; i++) {
685  av_freep(&oc->streams[i]->codec);
686  av_freep(&oc->streams[i]);
687  }
688 
689  if (!(fmt->flags & AVFMT_NOFILE)) {
690  /* close the output file */
691  avio_close(oc->pb);
692  }
693 
694  /* free the stream */
695  av_free(oc);
696 
697  yCInfo(FFMPEGWRITER, "Closed media file %s", filename.c_str());
698 
699  return true;
700 }
701 
703  if (delayed) {
704  savedConfig.put("width",Value((int)image.width()));
705  savedConfig.put("height",Value((int)image.height()));
706  }
707  if (!isOk()) { return false; }
708 
709  /* compute current audio and video time */
710  if (audio_st)
711  audio_pts = (double)av_stream_get_end_pts(audio_st) * audio_st->time_base.num / audio_st->time_base.den;
712  else
713  audio_pts = 0.0;
714 
715  if (video_st)
716  video_pts = (double)av_stream_get_end_pts(video_st) * video_st->time_base.num / video_st->time_base.den;
717  else
718  video_pts = 0.0;
719 
720  if (!(audio_st||video_st))
721  return false;
722 
723  /* write interleaved audio and video frames */
724  if (!video_st || (video_st && audio_st && audio_pts < video_pts)) {
725  write_audio_frame(oc, audio_st);
726  } else {
727  write_video_frame(oc, video_st, image);
728  }
729 
730  return true;
731 }
732 
733 
734 
736  yarp::sig::Sound& sound) {
737  if (delayed) {
738  savedConfig.put("width",Value((int)image.width()));
739  savedConfig.put("height",Value((int)image.height()));
740  savedConfig.put("sample_rate",Value((int)sound.getFrequency()));
741  savedConfig.put("channels",Value((int)sound.getChannels()));
742  savedConfig.put("audio",Value(1));
743  }
744  if (!isOk()) { return false; }
745 
746  /* write interleaved audio and video frames */
747  write_video_frame(oc, video_st, image);
748  write_audio_frame(oc, audio_st, sound);
749  return true;
750 }
int samples_size
float tincr
static AVStream * add_video_stream(AVFormatContext *oc, AVCodecID codec_id, int w, int h, int framerate)
static void write_audio_frame(AVFormatContext *oc, AVStream *st)
static void close_audio(AVFormatContext *oc, AVStream *st)
static void get_audio_frame(int16_t *samples, int frame_size, int nb_channels)
static void make_audio_frame(AVCodecContext *c, AVFrame *&frame, void *&samples)
int audio_input_frame_size
int samples_at
static AVFrame * alloc_picture(int pix_fmt, int width, int height)
static void fill_rgb_image(AVFrame *pict, int frame_index, int width, int height, ImageOf< PixelRgb > &img)
int audio_outbuf_size
static AVStream * add_audio_stream(AVFormatContext *oc, AVCodecID codec_id)
uint8_t * audio_outbuf
float tincr2
#define STREAM_PIX_FMT
#define M_PI
static void open_audio(AVFormatContext *oc, AVStream *st)
int16_t * samples
float t
int samples_channels
bool ret
bool open(yarp::os::Searchable &config) override
Open the DeviceDriver.
bool putImage(yarp::sig::ImageOf< yarp::sig::PixelRgb > &image) override
Write an image to the device.
bool close() override
Close the DeviceDriver.
virtual bool putAudioVisual(yarp::sig::ImageOf< yarp::sig::PixelRgb > &image, yarp::sig::Sound &sound) override
Write an image and sound.
A base class for nested structures that can be searched.
Definition: Searchable.h:69
virtual bool check(const std::string &key) const =0
Check if there exists a property of the given name.
virtual std::string toString() const =0
Return a standard text representation of the content of the object.
A single value (typically within a Bottle).
Definition: Value.h:47
T & safePixel(size_t x, size_t y)
Definition: Image.h:693
size_t width() const
Gets width of image in pixels.
Definition: Image.h:169
size_t height() const
Gets height of image in pixels.
Definition: Image.h:175
Class for storing sounds.
Definition: Sound.h:28
size_t getChannels() const
Get the number of channels of the sound.
Definition: Sound.cpp:409
int getFrequency() const
Get the frequency of the sound (i.e.
Definition: Sound.cpp:221
audio_sample get(size_t sample, size_t channel=0) const
Definition: Sound.cpp:174
size_t getSamples() const
Get the number of samples contained in the sound.
Definition: Sound.cpp:404
int stable_img_convert(AVPicture *dst, int dst_pix_fmt, const AVPicture *src, int src_pix_fmt, int src_width, int src_height)
Definition: ffmpeg_api.cpp:13
#define yCInfo(component,...)
Definition: LogComponent.h:135
#define yCError(component,...)
Definition: LogComponent.h:157
#define yCTrace(component,...)
Definition: LogComponent.h:88
#define yCWarning(component,...)
Definition: LogComponent.h:146
#define YARP_LOG_COMPONENT(name,...)
Definition: LogComponent.h:80
#define yCFatal(component,...)
Definition: LogComponent.h:168
An interface for the device drivers.
An interface to the operating system, including Port based communication.
Image file operations.
Definition: ImageFile.h:24
Signal processing.
Definition: Image.h:25
unsigned char g
Definition: Image.h:469
unsigned char r
Definition: Image.h:468
unsigned char b
Definition: Image.h:470