YARP
Yet Another Robot Platform
H264Decoder.cpp
Go to the documentation of this file.
1 /*
2  * SPDX-FileCopyrightText: 2006-2021 Istituto Italiano di Tecnologia (IIT)
3  * SPDX-License-Identifier: BSD-3-Clause
4  */
5 
6 #include "H264Decoder.h"
7 #include "H264LogComponent.h"
8 
9 #include <yarp/os/LogStream.h>
10 
11 
12 #include <gst/gst.h>
13 #include <glib.h>
14 
15 #include <gst/app/gstappsink.h>
16 #include <cstdio>
17 #include <cstring>
18 #include <mutex>
19 
20 //#define debug_time 1
21 
22 #ifdef debug_time
23  #include <yarp/os/Time.h>
24  #define DBG_TIME_PERIOD_PRINTS 10 //10 sec
25 #endif
26 
27 using namespace yarp::sig;
28 using namespace yarp::os;
29 
30 
32 {
33  data_for_gst_callback() = default;
34 
35  std::mutex *m{nullptr};
36  ImageOf<PixelRgb> *img{nullptr};
37  bool isNew{false};
38  Semaphore *s{nullptr};
39  bool isReq{false};
40 };
41 //-------------------------------------------------------------------
42 //--------------- CALLBACK FUNCTIONS -------------------------------
43 //-------------------------------------------------------------------
44 
45 /*
46 static GstBusSyncReply bus_call (GstBus *bus, GstMessage *msg, gpointer data)
47 {
48  GstElement *pipeline = (GstElement *) data;
49 
50  switch (GST_MESSAGE_TYPE (msg))
51  {
52 
53  case GST_MESSAGE_EOS:
54  {
55  yCTrace(H264CARRIER, "End of stream");
56  gst_element_set_state (pipeline, GST_STATE_NULL);
57  // g_main_loop_quit (loop);
58  break;
59  }
60 
61  case GST_MESSAGE_ERROR:
62  {
63  gchar *debug;
64  GError *error;
65 
66  gst_message_parse_error (msg, &error, &debug);
67  g_free (debug);
68 
69  yCError(H264CARRIER, "GSTREAMER: Error: %s", error->message);
70  g_error_free (error);
71 
72  gst_element_set_state (pipeline, GST_STATE_NULL);
73  break;
74  }
75  default:
76  {
77  yCTrace("GSTREAMER: I received message of type %d", GST_MESSAGE_TYPE (msg));
78  break;
79  }
80  }
81 
82  return GST_BUS_PASS;
83 }
84 */
85 
86 static gboolean link_videosrc2nextWithCaps(GstElement *e1, GstElement *e2)
87 {
88  gboolean link_ok;
89  GstCaps *caps;
90 
91 /*
92 // "application/x-rtp, media=(string)video, clock-rate=(int)90000, encoding-name=(string)H264, payload=(int)96, a-framerate=(string)30"
93  caps = gst_caps_new_simple("application/x-rtp",
94  "media", G_TYPE_STRING, "video",
95  "clock-rate", G_TYPE_INT, 90000,
96  "encoding-name", G_TYPE_STRING, "H264",
97  "payload", G_TYPE_INT, 96,
98  "a-framerate", G_TYPE_STRING, "30",
99  NULL);
100 */
101 // "application/x-rtp, media=(string)video, encoding-name=(string)H264, payload=(int)96"
102  caps = gst_caps_new_simple("application/x-rtp",
103  "media", G_TYPE_STRING, "video",
104  "encoding-name", G_TYPE_STRING, "H264",
105  "payload", G_TYPE_INT, 96,
106  NULL);
107 
108 
109  link_ok = gst_element_link_filtered(e1, e2, caps);
110  if(!link_ok)
111  {
112  yCError(H264CARRIER) << "H264Decoder-GSTREAMER: link_videosrc2nextWithCaps failed";
113  }
114  else
115  {
116  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: link_videosrc2nextWithCaps OK";
117  }
118 
119  return (link_ok);
120 }
121 
122 
123 
124 static gboolean link_convert2next(GstElement *e1, GstElement *e2)
125 {
126  gboolean link_ok;
127  GstCaps *caps;
128 
129  caps = gst_caps_new_simple("video/x-raw",
130  "format", G_TYPE_STRING, "RGB",
131  NULL);
132 
133 
134  link_ok = gst_element_link_filtered(e1, e2, caps);
135 
136  if(!link_ok)
137  {
138  yCError(H264CARRIER) << "H264Decoder-GSTREAMER: link_convert2next failed";
139  }
140  else
141  {
142  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: link_convert2next OK";
143  }
144 
145  return (link_ok);
146 }
147 
148 
149 GstFlowReturn new_sample(GstAppSink *appsink, gpointer user_data)
150 {
151 #ifdef debug_time
152  static bool isFirst = true;
153  double start_time = Time::now();
154  double end_time=0;
155 
156  static double last_call;
157  static double sumOf_timeBetweenCalls = 0;
158  static double sumOf_timeOfNewSampleFunc = 0;
159  static uint32_t count=0;
160  #define MAX_COUNT 100
161 
162 
163  if(!isFirst)
164  sumOf_timeBetweenCalls+=(start_time -last_call);
165 
166  last_call = start_time;
167 
168 
169 #endif
170 
171  auto* dec_data = (data_for_gst_callback*)user_data;
172 
173  GstSample *sample = nullptr;
174  g_signal_emit_by_name (appsink, "pull-sample", &sample, NULL);
175  if(!sample)
176  {
177  yCWarning(H264CARRIER, "GSTREAMER: could not take a sample!");
178  return GST_FLOW_OK;
179  }
180 
181  GstCaps *caps = gst_sample_get_caps (sample);
182  if(!caps)
183  {
184  yCError(H264CARRIER, "GSTREAMER: could not get caps of sample!");
185  return GST_FLOW_ERROR;
186  }
187  GstStructure *struc = gst_caps_get_structure(caps, 0);
188  if(!struc)
189  {
190  yCError(H264CARRIER, "GSTREAMER: could not get struct of caps!");
191  return GST_FLOW_ERROR;
192  }
193  gint width, height;
194  gboolean res;
195  res = gst_structure_get_int(struc, "width", &width);
196  if(!res)
197  {
198  yCError(H264CARRIER, "GSTREAMER: could not get width!");
199  return GST_FLOW_ERROR;
200  }
201 
202  res = gst_structure_get_int(struc, "height", &height);
203  if(!res)
204  {
205  yCError(H264CARRIER, "GSTREAMER: could not get height!");
206  return GST_FLOW_ERROR;
207  }
208  yCTrace(H264CARRIER, "Image has size %d x %d", width, height);
209 
210  GstBuffer *buffer = gst_sample_get_buffer(sample);
211  GstMapInfo map;
212  if(!gst_buffer_map(buffer, &map, GST_MAP_READ))
213  {
214  yCError(H264CARRIER, "GSTREAMER: could not get map!");
215  return GST_FLOW_ERROR;
216  }
217  //HERE I GET MY IMAGE!!!!
218  //DO SOMETHING...
219  //ImageOf<PixelRgb> &yframebuff = yarp_stuff_ptr->yport_ptr->prepare();
220  dec_data->m->lock();
221  dec_data->isNew = true;
222  dec_data->img->resize(width, height);
223 
224  unsigned char *ydata_ptr = dec_data->img->getRawImage();
225  memcpy(ydata_ptr, map.data, width*height*3);
226 
227  dec_data->m->unlock();
228  gst_buffer_unmap(buffer, &map);
229 
230  gst_sample_unref(sample);
231  if (dec_data->isReq) {
232  dec_data->s->post();
233  }
234 
235 
236 #ifdef debug_time
237  end_time = Time::now();
238  sumOf_timeOfNewSampleFunc += (end_time-start_time);
239  count++;
240  isFirst=false;
241 
242  if(count>=MAX_COUNT)
243  {
245  "On %d times: NewSampleFunc is long %.6f sec and sleeps %.6f sec",
246  MAX_COUNT,
247  (sumOf_timeOfNewSampleFunc/MAX_COUNT),
248  (sumOf_timeBetweenCalls/MAX_COUNT) );
249  count = 0;
250  isFirst = true;
251  sumOf_timeBetweenCalls = 0;
252  sumOf_timeOfNewSampleFunc = 0;
253  }
254 
255 
256 #endif
257 
258 
259  return GST_FLOW_OK;
260 
261 }
262 
263 
264 
265 
266 
267 
268 
269 //----------------------------------------------------------------------
270 
271 
272 
273 
274 
275 
276 
277 
279 {
280 public:
281  //GMainLoop *loop;
282 
283  GstElement *pipeline;
284  GstElement *source;
285  GstElement *sink;
286  GstElement *jitterBuff;
287  GstElement *rtpDepay;
288  GstElement *parser;
289  GstElement *convert;
290  GstElement *decoder;
291  GstElement *sizeChanger;
292 
294 
295  GstBus *bus; //maybe can be moved in function where i use it
297 
299 
300  H264DecoderHelper(std::mutex* m_ptr, Semaphore* s_ptr) :
301  pipeline(nullptr),
302  source(nullptr),
303  sink(nullptr),
304  jitterBuff(nullptr),
305  rtpDepay(nullptr),
306  parser(nullptr),
307  convert(nullptr),
308  decoder(nullptr),
309  sizeChanger(nullptr),
310  bus(nullptr),
311  bus_watch_id(0)
312  {
313  gst_cbk_data.m = m_ptr;
314  gst_cbk_data.img = &myframe;
315  gst_cbk_data.s = s_ptr;
316  }
318 
319 
321  {
322  gst_init(nullptr, nullptr);
323  pipeline = gst_pipeline_new ("video-player");
324  source = gst_element_factory_make ("udpsrc", "video-source");
325  rtpDepay = gst_element_factory_make ("rtph264depay", "rtp-depay");
326  parser = gst_element_factory_make ("h264parse", "parser");
327  decoder = gst_element_factory_make ("avdec_h264", "decoder");
328  sizeChanger = gst_element_factory_make ("videocrop", "cropper");
329  convert = gst_element_factory_make ("videoconvert", "convert"); //because use RGB space
330  sink = gst_element_factory_make ("appsink", "video-output");
331 
332  if (!pipeline || !source || !rtpDepay || !parser || !decoder || !convert || !sink || !sizeChanger)
333  {
334  yCError(H264CARRIER) << "H264Decoder-GSTREAMER: one element could not be created. Exiting.";
335  return false;
336  }
337  if (cfgParams.removeJitter)
338  {
339  jitterBuff = gst_element_factory_make("rtpjitterbuffer", "jitterBuffer");
340  if (!jitterBuff)
341  {
342  yCError(H264CARRIER) << "H264Decoder-GSTREAMER: rtpjitterbuffer could not be created. Exiting.";
343  return false;
344  }
345  }
346 
347  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: istantiateElements OK";
348 
349  return true;
350  }
351 
352  bool configureElements(h264Decoder_cfgParamters &cfgParams) //maybe i can make callbak configurable in the future.....
353  {
354  // 1) configure source port
355  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to configure source port with value" << cfgParams.remotePort;
356  g_object_set(source, "port", cfgParams.remotePort, NULL);
357  yCDebug(H264CARRIER) << "H264Decoder-GSTREAMER: configured source port with" << cfgParams.remotePort;
358 
359  // 2) configure callback on new frame
360  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to configure appsink.... ";
361  //I decided to use callback mechanism because it should have less overhead
362  g_object_set( sink, "emit-signals", false, NULL );
363 
364  GstAppSinkCallbacks cbs; // Does this need to be kept alive?
365 
366  // Set Video Sink callback methods
367  cbs.eos = nullptr;
368  cbs.new_preroll = nullptr;
369  cbs.new_sample = &new_sample;
370  gst_app_sink_set_callbacks( GST_APP_SINK( sink ), &cbs, &gst_cbk_data, nullptr );
371 
372  /* //3) add watch ( a message handler)
373  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
374  //bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
375  gst_object_unref (bus);
376 
377  gst_bus_set_sync_handler(bus, bus_call, pipeline, NULL);
378  gst_object_unref (bus);
379  */
380 
381  //videocrop
382  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to set new size: left" << cfgParams.crop.left << "right=" << cfgParams.crop.right << "top=" << cfgParams.crop.top << "bottom" << cfgParams.crop.bottom;
383  g_object_set(G_OBJECT(sizeChanger), "left", cfgParams.crop.left, "right", cfgParams.crop.right, "top", cfgParams.crop.top, "bottom", cfgParams.crop.bottom, NULL);
384  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: set new size: left" << cfgParams.crop.left << "right=" << cfgParams.crop.right << "top=" << cfgParams.crop.top << "bottom" << cfgParams.crop.bottom;
385 
386  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: configureElements OK";
387  return true;
388 
389  }
390 
392  {
393 
394  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to add elements to pipeline..... ";
395  /* we add all elements into the pipeline */
396  gst_bin_add_many (GST_BIN (pipeline),
397  source, rtpDepay, parser, decoder, sizeChanger, convert, sink, NULL);
398 
399  gboolean result;
400 
401  if (jitterBuff != nullptr)
402  {
403  result = gst_bin_add(GST_BIN(pipeline), jitterBuff);
404  if (!result) { yCError(H264CARRIER) << "H264Decoder: Error adding jitterBuff to the bin"; return false; }
405  }
406 
407  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: elements have been added in pipeline!";
408 
409  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to link_convert2next..... ";
410  result = link_convert2next(convert, sink);
411  if (!result) { yCError(H264CARRIER) << "H264Decoder: Error linking converter to sink "; return false; }
412 
413  /* autovideosrc ! "video/x-raw, width=640, height=480, format=(string)I420" ! videoconvert ! 'video/x-raw, format=(string)RGB' ! yarpdevice ! glimagesink */
414 
415  if (jitterBuff)
416  {
417  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to link videosrc to rtpjitterBuffer.....";
418  result = link_videosrc2nextWithCaps(source, jitterBuff);
419  if (!result){ yCError(H264CARRIER) << "H264Decoder: Error linking videosrc to rtpjitterBuffer "; return false;}
420 
421  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to link jitterBuff to rtpDapay.....";
422  result = gst_element_link(jitterBuff, rtpDepay);
423  if (!result) { yCError(H264CARRIER) << "H264Decoder: Error linking jitterBuff to rtpDapay "; return false; }
424 
425  }
426  else
427  {
428  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to videosrc to rtpDepay";
429  result = link_videosrc2nextWithCaps(source, rtpDepay);
430  if (!result) { yCError(H264CARRIER) << "H264Decoder: Error linking videosrc to rtpDepay "; return false; }
431 
432  }
433 
434  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: try to link all other elements.....";
435  gst_element_link_many(rtpDepay, parser, decoder, sizeChanger, convert, NULL);
436 
437  yCTrace(H264CARRIER) << "H264Decoder-GSTREAMER: linkElements OK";
438  return true;
439  }
440 
441 
442 };
443 
444 
445 
446 #define GET_HELPER(x) (*((H264DecoderHelper*)(x)))
447 
449  sysResource(new H264DecoderHelper(&mutex, &semaphore)),
450  cfg(config)
451 {
452 }
453 
455 {
456  H264DecoderHelper &helper = GET_HELPER(sysResource);
457  if(!helper.istantiateElements(cfg))
458  {
459  yCError(H264CARRIER) << "H264Decoder: Error in istantiateElements";
460  return false;
461  }
462 
463  if(!helper.configureElements(cfg))
464  {
465  yCError(H264CARRIER) << "Error in configureElements";
466  return false;
467  }
468 
469  if(!helper.linkElements())
470  {
471  yCError(H264CARRIER) << "Error in linkElements";
472  return false;
473  }
474 
475  yCDebug(H264CARRIER) << "H264Decoder-GSTREAMER: init ok";
476  return true;
477 
478 }
479 
480 
482 {
483  H264DecoderHelper &helper = GET_HELPER(sysResource);
484  gst_element_set_state (helper.pipeline, GST_STATE_PLAYING);
485  yCDebug(H264CARRIER) << "H264Decoder: pipeline started!";
486 
487  return true;
488 
489 }
490 
492 {
493  H264DecoderHelper &helper = GET_HELPER(sysResource);
494  gst_element_set_state (helper.pipeline, GST_STATE_NULL);
495  gst_bus_set_sync_handler(gst_pipeline_get_bus (GST_PIPELINE (helper.pipeline)), nullptr, nullptr, nullptr);
496  yCDebug(H264CARRIER) << "H264Decoder: deleting pipeline";
497  gst_object_unref (GST_OBJECT (helper.pipeline));
498  return true;
499 }
500 
502 {
503  stop();
504  delete &GET_HELPER(sysResource);
505 
506 
507 }
508 
510 {
511  H264DecoderHelper &helper = GET_HELPER(sysResource);
512  helper.gst_cbk_data.isNew = false;
513  helper.gst_cbk_data.isReq = false;
514  return helper.myframe;
515 }
516 
518 {
519  H264DecoderHelper &helper = GET_HELPER(sysResource);
520  return helper.gst_cbk_data.isNew;
521 }
522 
524 {
525  H264DecoderHelper &helper = GET_HELPER(sysResource);
526  return (helper.myframe.width() * helper.myframe.height() * 3);
527 }
528 
530 {
531  H264DecoderHelper &helper = GET_HELPER(sysResource);
532  helper.gst_cbk_data.isReq = true;
533 
534 }
static gboolean link_convert2next(GstElement *e1, GstElement *e2)
static gboolean link_videosrc2nextWithCaps(GstElement *e1, GstElement *e2)
Definition: H264Decoder.cpp:86
#define GET_HELPER(x)
GstFlowReturn new_sample(GstAppSink *appsink, gpointer user_data)
const yarp::os::LogComponent & H264CARRIER()
GstElement * source
GstElement * jitterBuff
GstElement * convert
H264DecoderHelper(std::mutex *m_ptr, Semaphore *s_ptr)
data_for_gst_callback gst_cbk_data
GstElement * parser
GstElement * sink
GstElement * sizeChanger
bool istantiateElements(h264Decoder_cfgParamters &cfgParams)
GstElement * decoder
GstElement * rtpDepay
GstElement * pipeline
bool configureElements(h264Decoder_cfgParamters &cfgParams)
ImageOf< PixelRgb > myframe
bool newFrameIsAvailable()
int getLastFrameSize()
H264Decoder(h264Decoder_cfgParamters &config)
yarp::sig::ImageOf< yarp::sig::PixelRgb > & getLastFrame()
A class for thread synchronization and mutual exclusion.
Definition: Semaphore.h:26
size_t width() const
Gets width of image in pixels.
Definition: Image.h:166
size_t height() const
Gets height of image in pixels.
Definition: Image.h:172
#define yCError(component,...)
Definition: LogComponent.h:154
#define yCTrace(component,...)
Definition: LogComponent.h:85
#define yCWarning(component,...)
Definition: LogComponent.h:143
#define yCDebug(component,...)
Definition: LogComponent.h:109
double now()
Return the current time in seconds, relative to an arbitrary starting point.
Definition: Time.cpp:121
An interface to the operating system, including Port based communication.
Signal processing.
Definition: Image.h:22
ImageOf< PixelRgb > * img
Definition: H264Decoder.cpp:36
data_for_gst_callback()=default
struct h264Decoder_cfgParamters::@91 crop