Argus Camera Sample
Argus Camera Sample
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
VideoPipeline.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2016-2018, NVIDIA CORPORATION. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * * Redistributions of source code must retain the above copyright
8  * notice, this list of conditions and the following disclaimer.
9  * * Redistributions in binary form must reproduce the above copyright
10  * notice, this list of conditions and the following disclaimer in the
11  * documentation and/or other materials provided with the distribution.
12  * * Neither the name of NVIDIA CORPORATION nor the names of its
13  * contributors may be used to endorse or promote products derived
14  * from this software without specific prior written permission.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
17  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
20  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
21  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
22  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
23  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
24  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 #include <stdio.h>
30 
31 #include <string>
32 
33 #include "Error.h"
34 #include "VideoPipeline.h"
35 #include "Composer.h"
36 #include "Util.h"
37 
38 namespace ArgusSamples
39 {
40 
42 #ifdef GST_SUPPORTED
43  : m_state(GST_STATE_NULL)
44  , m_pipeline(NULL)
45 #endif
46 {
47 }
48 
50 {
51  destroy();
52 }
53 
54 ///! give the video eoncoder a name so we can find it at stop()
55 static const char *s_videoEncoderName = "video encoder";
56 
57 /**
58  * RAII helper class for calling gst_object_unref on exit from a block or function.
59  */
60 template <typename T> class GstUnrefer
61 {
62 public:
63  explicit GstUnrefer(T * p)
64  : m_p(p)
65  {
66  }
68  : m_p(NULL)
69  {
70  }
72  {
73  release();
74  }
75 
76  /// Cancel the unref.
77  void cancel()
78  {
79  m_p = NULL;
80  }
81 
82  /// Unref the object now.
83  void release()
84  {
85  if (m_p)
86  gst_object_unref(m_p);
87  m_p = NULL;
88  }
89 
90  /// Set the object to be unrefed.
91  void set(T* p)
92  {
93  release();
94  m_p = p;
95  }
96 
97  /// Get the object.
98  T * get() const
99  {
100  return m_p;
101  }
102 
103 private:
104  T *m_p;
105 
106  /// Not implemented -- use default constructor
107  GstUnrefer(GstUnrefer& other);
108  /// Not implemented
110 };
111 
112 bool VideoPipeline::setupForRecording(EGLStreamKHR videoStream, uint32_t width, uint32_t height,
113  float frameRate, const char *fileName, VideoFormat videoFormat,
114  VideoFileType videoFileType, uint32_t bitRate)
115 {
116 #ifdef GST_SUPPORTED
117  // set the filename
118  std::string videoFileName(fileName);
119  if (videoFileName != "/dev/null")
120  {
121  videoFileName += ".";
122  videoFileName += getFileExtension(videoFileType);
123  PROPAGATE_ERROR(validateOutputPath(videoFileName.c_str()));
124  }
125 
126  // Init gstreamer
127  gst_init(NULL, NULL);
128 
129  // create the pipeline
130  m_pipeline = gst_pipeline_new("video_pipeline");
131  if (!m_pipeline)
132  ORIGINATE_ERROR("Failed to create video pipeline");
133 
134  // Create the capture source element
135  GstElement *videoSource = gst_element_factory_make("nveglstreamsrc", NULL);
136  if (!videoSource)
137  ORIGINATE_ERROR("Failed to create capture source element");
138  GstUnrefer<GstElement> unrefer(videoSource);
139  if (!gst_bin_add(GST_BIN(m_pipeline), videoSource))
140  ORIGINATE_ERROR("Failed to add video source to pipeline");
141  unrefer.cancel();
142 
143  g_object_set(G_OBJECT(videoSource), "display", Composer::getInstance().getEGLDisplay(), NULL);
144  g_object_set(G_OBJECT(videoSource), "eglstream", videoStream, NULL);
145 
146  // Create queue
147  GstElement *queue = gst_element_factory_make("queue", NULL);
148  if (!queue)
149  ORIGINATE_ERROR("Failed to create queue");
150  unrefer.set(queue);
151  if (!gst_bin_add(GST_BIN(m_pipeline), queue))
152  ORIGINATE_ERROR("Failed to add queue to pipeline");
153  unrefer.cancel();
154 
155  // create the encoder
156  GstElement *videoEncoder = NULL;
157  switch (videoFormat)
158  {
159  case VIDEO_FORMAT_H264:
160  videoEncoder = gst_element_factory_make("omxh264enc", s_videoEncoderName);
161  break;
162  case VIDEO_FORMAT_H265:
163  videoEncoder = gst_element_factory_make("omxh265enc", s_videoEncoderName);
164  break;
165  case VIDEO_FORMAT_VP8:
166  videoEncoder = gst_element_factory_make("omxvp8enc", s_videoEncoderName);
167  break;
168  case VIDEO_FORMAT_VP9:
169  videoEncoder = gst_element_factory_make("omxvp9enc", s_videoEncoderName);
170  break;
171  default:
172  ORIGINATE_ERROR("Unhandled video format");
173  }
174  if (!videoEncoder)
175  ORIGINATE_ERROR("Failed to create video encoder");
176  unrefer.set(videoEncoder);
177  if (!gst_bin_add(GST_BIN(m_pipeline), videoEncoder))
178  ORIGINATE_ERROR("Failed to add video encoder to pipeline");
179  unrefer.cancel();
180 
181  // if no bitrate is given select from reasonable presets
182  if (bitRate == 0)
183  {
184  if (height < 720)
185  bitRate = VIDEO_BITRATE_4M;
186  else if (height < 1080)
187  bitRate = VIDEO_BITRATE_8M;
188  else if (height <= 2160)
189  bitRate = VIDEO_BITRATE_14M;
190  else
191  bitRate = VIDEO_BITRATE_20M;
192  }
193 
194  g_object_set(G_OBJECT(videoEncoder), "bitrate", bitRate, NULL);
195 
196  /*
197  * Currently, of all the supported videoEncoders above: H264, H265, VP8 and VP9, Only H265
198  * supports resolution > 4k.
199  */
200  const uint32_t WIDTH_4K = 3840;
201  if (width > WIDTH_4K && videoFormat != VIDEO_FORMAT_H265)
202  {
203  ORIGINATE_ERROR("\n Resolution > 4k requires videoformat H265 \n");
204  }
205  // set video encoding profile for h.264 to high to get optmized video quality
206  if (videoFormat == VIDEO_FORMAT_H264)
207  {
208  g_object_set(G_OBJECT(videoEncoder), "profile", VIDEO_AVC_PROFILE_HIGH, NULL);
209  }
210 
211  // create the muxer
212  if (videoFormat == VIDEO_FORMAT_VP9)
213  {
214  printf("\nThe VP9 video format is not supported on Jetson-tx1.\n");
215  }
216 
217  if (((videoFormat == VIDEO_FORMAT_H265) || (videoFormat == VIDEO_FORMAT_VP9)) &&
218  (videoFileType != VIDEO_FILE_TYPE_MKV))
219  {
220  printf("\nThe video format H265/VP9 is only supported with MKV in current GST version. "
221  "Selecting MKV as container.\n");
222  videoFileType = VIDEO_FILE_TYPE_MKV;
223  }
224 
225  GstElement *videoMuxer = NULL;
226  switch (videoFileType)
227  {
228  case VIDEO_FILE_TYPE_MP4:
229  videoMuxer = gst_element_factory_make("qtmux", NULL);
230  break;
231  case VIDEO_FILE_TYPE_3GP:
232  videoMuxer = gst_element_factory_make("3gppmux", NULL);
233  break;
234  case VIDEO_FILE_TYPE_AVI:
235  videoMuxer = gst_element_factory_make("avimux", NULL);
236  break;
237  case VIDEO_FILE_TYPE_MKV:
238  videoMuxer = gst_element_factory_make("matroskamux", NULL);
239  break;
241  videoMuxer = gst_element_factory_make("identity", NULL);
242  break;
243  default:
244  ORIGINATE_ERROR("Unhandled video file type");
245  }
246  if (!videoMuxer)
247  ORIGINATE_ERROR("Failed to create video muxer");
248  unrefer.set(videoMuxer);
249  if (!gst_bin_add(GST_BIN(m_pipeline), videoMuxer))
250  ORIGINATE_ERROR("Failed to add video muxer to pipeline");
251  unrefer.cancel();
252 
253  // create the sink
254  GstElement *videoSink = gst_element_factory_make("filesink", NULL);
255  if (!videoSink)
256  ORIGINATE_ERROR("Failed to create video sink");
257  unrefer.set(videoSink);
258  if (!gst_bin_add(GST_BIN(m_pipeline), videoSink))
259  ORIGINATE_ERROR("Failed to add video sink to pipeline");
260  unrefer.cancel();
261 
262  g_object_set(G_OBJECT(videoSink), "location", videoFileName.c_str(), NULL);
263 
264  // @todo 'Floating point exception' and error 'Framerate set to : 0 at
265  // NvxVideoEncoderSetParameter' when no setting the framerate. '0' should be VFR, use 30
266  // instead
267  if (frameRate == 0.0f)
268  frameRate = 30.0f;
269 
270  // create a caps filter
271  GstCaps *caps = gst_caps_new_simple("video/x-raw",
272  "format", G_TYPE_STRING, "NV12",
273  "width", G_TYPE_INT, width,
274  "height", G_TYPE_INT, height,
275  "framerate", GST_TYPE_FRACTION, static_cast<gint>(frameRate * 100.f), 100,
276  NULL);
277  if (!caps)
278  ORIGINATE_ERROR("Failed to create caps");
279 
280  GstCapsFeatures *feature = gst_caps_features_new("memory:NVMM", NULL);
281  if (!feature)
282  {
283  gst_caps_unref(caps);
284  ORIGINATE_ERROR("Failed to create caps feature");
285  }
286 
287  gst_caps_set_features(caps, 0, feature);
288 
289  // link the source to the queue via the capture filter
290  if (!gst_element_link_filtered(videoSource, queue, caps))
291  {
292  gst_caps_unref(caps);
293  ORIGINATE_ERROR("Failed to link source to queue");
294  }
295  gst_caps_unref(caps);
296 
297  // link the queue to the encoder
298  if (!gst_element_link(queue, videoEncoder))
299  ORIGINATE_ERROR("Failed to link queue to encoder");
300 
301  // link the encoder pad to the muxer
302  if (videoFileType == VIDEO_FILE_TYPE_H265)
303  {
304  // H265 has a identity muxer, need to link directly
305  if (!gst_element_link(videoEncoder, videoMuxer))
306  ORIGINATE_ERROR("Failed to link encoder to muxer");
307  }
308  else
309  {
310  if (!gst_element_link_pads(videoEncoder, "src", videoMuxer, "video_%u"))
311  ORIGINATE_ERROR("Failed to link encoder to muxer pad");
312  }
313 
314  // link the muxer to the sink
315  if (!gst_element_link(videoMuxer, videoSink))
316  ORIGINATE_ERROR("Failed to link muxer to sink");
317 
318  return true;
319 #else // GST_SUPPORTED
320  ORIGINATE_ERROR("Not supported");
321 #endif // GST_SUPPORTED
322 }
323 
324 #ifdef GST_SUPPORTED
325 /**
326  * Modify object flag values by name.
327  */
328 static bool objectModifyFlags(GObject *obj, const char *flagName, const char *valueName, bool set)
329 {
330  guint count;
331  GParamSpec **spec = g_object_class_list_properties(G_OBJECT_GET_CLASS(obj), &count);
332 
333  for (guint index = 0; index < count; ++index)
334  {
335  GParamSpec *param = spec[index];
336  if (strcmp(param->name, flagName) == 0)
337  {
338  if (!G_IS_PARAM_SPEC_FLAGS(param))
339  ORIGINATE_ERROR("Param '%s' is not a flag", flagName);
340 
341  GParamSpecFlags *pflags = G_PARAM_SPEC_FLAGS(param);
342  GFlagsValue *value = g_flags_get_value_by_nick(pflags->flags_class, valueName);
343  if (!value)
344  ORIGINATE_ERROR("Value '%s' of flag '%s' not found", valueName, flagName);
345 
346  gint flags;
347  g_object_get(obj, flagName, &flags, NULL);
348  if (set)
349  flags |= value->value;
350  else
351  flags &= ~value->value;
352  g_object_set(obj, flagName, flags, NULL);
353 
354  return true;
355  }
356  }
357 
358  ORIGINATE_ERROR("Param '%s' not found", flagName);
359 }
360 #endif // GST_SUPPORTED
361 
362 bool VideoPipeline::setupForPlayback(EGLStreamKHR *videoStream, const char *fileName)
363 {
364 #ifdef GST_SUPPORTED
365  // Init gstreamer
366  gst_init(NULL, NULL);
367 
368  // Create the source element
369  m_pipeline = gst_element_factory_make("playbin", "play");
370  if (!m_pipeline)
371  ORIGINATE_ERROR("Failed to create playback pipeline");
372 
373  // set the uri
374  char *uri = gst_filename_to_uri(fileName, NULL);
375  g_object_set(G_OBJECT(m_pipeline), "uri", uri, NULL);
376  g_free(uri);
377  uri = NULL;
378 
379  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "text", false));
380  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "native-video", true));
381 
382  // create the audio sink
383  GstElement *audioSink = gst_element_factory_make("autoaudiosink", "audio_sink");
384  if (!audioSink)
385  ORIGINATE_ERROR("Failed to create audio sink");
386 
387  // set the audio sink of the pipeline
388  g_object_set(G_OBJECT(m_pipeline), "audio-sink", audioSink, NULL);
389 
390  // Create the sink bin, this will hold the video converter and the video sink
391  GstElement *videoSinkBin = gst_bin_new("video_sink_bin");
392  if (!videoSinkBin)
393  ORIGINATE_ERROR("Failed to create video sink bin");
394 
395  // set the video sink of the pipeline
396  g_object_set(G_OBJECT(m_pipeline), "video-sink", videoSinkBin, NULL);
397 
398  // Create the video converted
399  GstElement *videoConvert = gst_element_factory_make("nvvidconv", "video converter");
400  if (!videoConvert)
401  ORIGINATE_ERROR("Failed to create video converter");
402  GstUnrefer<GstElement> unrefer(videoConvert);
403  if (!gst_bin_add(GST_BIN(videoSinkBin), videoConvert))
404  ORIGINATE_ERROR("Failed to add video convert to video sink bin");
405  unrefer.cancel();
406 
407  // Create the video sink
408  GstElement *videoSink = gst_element_factory_make("nvvideosink", "video sink");
409  if (!videoSink)
410  ORIGINATE_ERROR("Failed to create video sink");
411  unrefer.set(videoSink);
412  if (!gst_bin_add(GST_BIN(videoSinkBin), videoSink))
413  ORIGINATE_ERROR("Failed to add video sink to video sink bin");
414  unrefer.cancel();
415 
416  // configure video sink
417  g_object_set(G_OBJECT(videoSink), "display", Composer::getInstance().getEGLDisplay(), NULL);
418  // get the EGL stream
419  *videoStream = EGL_NO_STREAM_KHR;
420  g_object_get(G_OBJECT(videoSink), "stream", videoStream, NULL);
421  if (*videoStream == EGL_NO_STREAM_KHR)
422  ORIGINATE_ERROR("Failed to get EGL stream from video sink");
423 
424  if (!gst_element_link(videoConvert, videoSink))
425  ORIGINATE_ERROR("Failed to link video convert to video sink");
426 
427  // create a ghost pad so that the pipeline can connect to the bin as a sink
428  GstPad *pad = gst_element_get_static_pad(videoConvert, "sink");
429  if (!pad)
430  ORIGINATE_ERROR("Failed to get sink pad of video convert");
431  GstUnrefer<GstPad> padUnrefer(pad);
432  GstPad *ghostPad = gst_ghost_pad_new("sink", pad);
433  if (!ghostPad)
434  ORIGINATE_ERROR("Failed to create the ghost pad");
435  GstUnrefer<GstPad> ghostPadUnrefer(ghostPad);
436  if (!gst_pad_set_active(ghostPad, TRUE))
437  ORIGINATE_ERROR("Failed to set pad active");
438  if (!gst_element_add_pad(videoSinkBin, ghostPad))
439  ORIGINATE_ERROR("Failed to add pad");
440  ghostPadUnrefer.cancel();
441  padUnrefer.release();
442 
443  return true;
444 #else // GST_SUPPORTED
445  ORIGINATE_ERROR("Not supported");
446 #endif // GST_SUPPORTED
447 }
448 
450 {
451 #ifdef GST_SUPPORTED
452  if (!m_pipeline)
453  ORIGINATE_ERROR("Video pipeline is not set up");
454 
455  if (m_state != GST_STATE_PLAYING)
456  {
457  // set to playing state
458  if (gst_element_set_state(m_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE)
459  ORIGINATE_ERROR("Failed to set playing state");
460 
461  m_state = GST_STATE_PLAYING;
462 
463  /* Dump Capture - Playing Pipeline into the dot file
464  * Set environment variable "export GST_DEBUG_DUMP_DOT_DIR=/tmp"
465  * Run argus_camera and 0.00.00.*-argus_camera.dot
466  * file will be generated.
467  * Run "dot -Tpng 0.00.00.*-argus_camera.dot > image.png"
468  * image.png will display the running capture pipeline.
469  * */
470  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline),
471  GST_DEBUG_GRAPH_SHOW_ALL, "argus_camera");
472  }
473 
474  return true;
475 #else // GST_SUPPORTED
476  ORIGINATE_ERROR("Not supported");
477 #endif // GST_SUPPORTED
478 }
479 
481 {
482 #ifdef GST_SUPPORTED
483  if (!m_pipeline)
484  ORIGINATE_ERROR("Video pipeline is not set up");
485 
486  if (m_state != GST_STATE_PAUSED)
487  {
488  if (gst_element_set_state(m_pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE)
489  ORIGINATE_ERROR("Failed to set pause state");
490  m_state = GST_STATE_PAUSED;
491  }
492 
493  return true;
494 #else // GST_SUPPORTED
495  ORIGINATE_ERROR("Not supported");
496 #endif // GST_SUPPORTED
497 }
498 
499 
501 {
502 #ifdef GST_SUPPORTED
503  if (!m_pipeline)
504  ORIGINATE_ERROR("Video pipeline is not set up");
505 
506  GstState newState = GST_STATE_NULL;
507  if (m_state == GST_STATE_PLAYING)
508  newState = GST_STATE_PAUSED;
509  else if (m_state == GST_STATE_PAUSED)
510  newState = GST_STATE_PLAYING;
511  else
512  ORIGINATE_ERROR("Invalid state");
513 
514  if (gst_element_set_state(m_pipeline, newState) == GST_STATE_CHANGE_FAILURE)
515  ORIGINATE_ERROR("Failed to set pause state");
516 
517  m_state = newState;
518 
519  return true;
520 #else // GST_SUPPORTED
521  ORIGINATE_ERROR("Not supported");
522 #endif // GST_SUPPORTED
523 }
524 
526 {
527 #ifdef GST_SUPPORTED
528  if (!m_pipeline)
529  ORIGINATE_ERROR("Video pipeline is not set up");
530 
531  if (!gst_element_seek_simple(m_pipeline, GST_FORMAT_TIME,
532  static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), 0))
533  {
534  ORIGINATE_ERROR("Failed to rewind");
535  }
536 
537  return true;
538 #else // GST_SUPPORTED
539  ORIGINATE_ERROR("Not supported");
540 #endif // GST_SUPPORTED
541 }
542 
544 {
545 #ifdef GST_SUPPORTED
546  if (!m_pipeline)
547  ORIGINATE_ERROR("Video pipeline is not set up");
548 
549  if ((m_state == GST_STATE_PLAYING) || (m_state == GST_STATE_PAUSED))
550  {
551  // check if there is a video encoder
552  GstElement *videoEncoder = gst_bin_get_by_name(GST_BIN(m_pipeline), s_videoEncoderName);
553  if (videoEncoder)
554  {
555  // send the end of stream event
556  GstPad *pad = gst_element_get_static_pad(videoEncoder, "sink");
557  if (!pad)
558  ORIGINATE_ERROR("Failed to get 'sink' pad");
559  GstUnrefer<GstPad> padUnrefer(pad);
560  if (!gst_pad_send_event(pad, gst_event_new_eos()))
561  ORIGINATE_ERROR("Failed to send end of stream event encoder");
562  padUnrefer.release();
563 
564  // wait for the event to go through
565  GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
566  if (!bus)
567  ORIGINATE_ERROR("Failed to get bus");
568  GstUnrefer<GstBus> busUnrefer(bus);
569  if (!gst_bus_poll(bus, GST_MESSAGE_EOS, GST_CLOCK_TIME_NONE))
570  ORIGINATE_ERROR("Failed to wait for the eof event");
571  busUnrefer.release();
572  }
573 
574  // stop the pipeline
575  if (gst_element_set_state(m_pipeline, GST_STATE_NULL) != GST_STATE_CHANGE_SUCCESS)
576  ORIGINATE_ERROR("Failed to stop pipeline");
577 
578  m_state = GST_STATE_NULL;
579  }
580 
581  return true;
582 #else // GST_SUPPORTED
583  ORIGINATE_ERROR("Not supported");
584 #endif // GST_SUPPORTED
585 }
586 
588 {
589 #ifdef GST_SUPPORTED
590  if (m_pipeline)
591  {
592  PROPAGATE_ERROR(stop());
593 
594  // delete pipeline
595  gst_object_unref(GST_OBJECT(m_pipeline));
596 
597  m_pipeline = NULL;
598  }
599 
600  return true;
601 #else // GST_SUPPORTED
602  ORIGINATE_ERROR("Not supported");
603 #endif // GST_SUPPORTED
604 }
605 
606 /*static*/ const char* VideoPipeline::getFileExtension(VideoFileType fileType)
607 {
608  switch (fileType)
609  {
610  case VIDEO_FILE_TYPE_MP4:
611  return "mp4";
612  case VIDEO_FILE_TYPE_3GP:
613  return "3gp";
614  case VIDEO_FILE_TYPE_AVI:
615  return "avi";
616  case VIDEO_FILE_TYPE_MKV:
617  return "mkv";
619  return "h265";
620  default:
621  break;
622  }
623 
624  return "Unhandled video file type";
625 }
626 
627 bool VideoPipeline::getAspectRatio(float *aspectRatio) const
628 {
629  if (aspectRatio == NULL)
630  ORIGINATE_ERROR("'aspectRatio' is NULL");
631 #ifdef GST_SUPPORTED
632  if ((m_state != GST_STATE_PLAYING) && (m_state != GST_STATE_PAUSED))
633  ORIGINATE_ERROR("Must be in paused or playing state.");
634 
635  GstState state = GST_STATE_NULL;
636  while ((state != GST_STATE_PLAYING) && (state != GST_STATE_PAUSED))
637  {
638  if (gst_element_get_state(m_pipeline, &state, NULL, GST_CLOCK_TIME_NONE) ==
639  GST_STATE_CHANGE_FAILURE)
640  {
641  ORIGINATE_ERROR("gst_element_get_state failed");
642  }
643  }
644 
645  // Retrieve the Caps at the entrance of the video sink
646  GstElement *videoSink;
647  g_object_get(m_pipeline, "video-sink", &videoSink, NULL);
648  if (!videoSink)
649  ORIGINATE_ERROR("Failed to get video-sink");
650  GstUnrefer<GstElement> videoSinkUnrefer(videoSink);
651 
652  GstPad *videoSinkPad = gst_element_get_static_pad(videoSink, "sink");
653  if (!videoSinkPad)
654  ORIGINATE_ERROR("Failed to get video-sink pad");
655 
656  GstCaps *caps = gst_pad_get_current_caps(videoSinkPad);
657  if (!caps)
658  ORIGINATE_ERROR("Failed to get video-sink pad caps");
659 
660  *aspectRatio = 1.0f;
661 
662  GstStructure *structure = gst_caps_get_structure(caps, 0);
663  if (!structure)
664  {
665  gst_caps_unref(caps);
666  ORIGINATE_ERROR("Failed to get caps structure");
667  }
668 
669  gint width, height;
670  gint pixelAspectRatioNumerator, pixelAspectRatioDenominator;
671 
672  if (!gst_structure_get_int(structure, "width", &width) ||
673  !gst_structure_get_int(structure, "height", &height) ||
674  !gst_structure_get_fraction(structure, "pixel-aspect-ratio",
675  &pixelAspectRatioNumerator, &pixelAspectRatioDenominator))
676  {
677  gst_caps_unref(caps);
678  ORIGINATE_ERROR("Failed to get structure values");
679  }
680 
681  *aspectRatio = (float)width / (float)height;
682  *aspectRatio *= (float)pixelAspectRatioNumerator / (float)pixelAspectRatioDenominator;
683 
684  gst_caps_unref(caps);
685 
686  return true;
687 #else // GST_SUPPORTED
688  ORIGINATE_ERROR("Not supported");
689 #endif // GST_SUPPORTED
690 }
691 
693 {
694 #ifdef GST_SUPPORTED
695  return true;
696 #else // GST_SUPPORTED
697  return false;
698 #endif // GST_SUPPORTED
699 }
700 
701 }; // namespace ArgusSamples