Argus Camera Sample
Argus Camera Sample
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
VideoPipeline.cpp
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2016-2017, NVIDIA CORPORATION. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * * Redistributions of source code must retain the above copyright
8  * notice, this list of conditions and the following disclaimer.
9  * * Redistributions in binary form must reproduce the above copyright
10  * notice, this list of conditions and the following disclaimer in the
11  * documentation and/or other materials provided with the distribution.
12  * * Neither the name of NVIDIA CORPORATION nor the names of its
13  * contributors may be used to endorse or promote products derived
14  * from this software without specific prior written permission.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
17  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
20  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
21  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
22  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
23  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
24  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27  */
28 
29 #include <stdio.h>
30 
31 #include <string>
32 
33 #include "Error.h"
34 #include "VideoPipeline.h"
35 #include "Composer.h"
36 
37 namespace ArgusSamples
38 {
39 
41 #ifdef GST_SUPPORTED
42  : m_state(GST_STATE_NULL)
43  , m_pipeline(NULL)
44 #endif
45 {
46 }
47 
49 {
50  destroy();
51 }
52 
53 ///! give the video eoncoder a name so we can find it at stop()
54 static const char *s_videoEncoderName = "video encoder";
55 
56 /**
57  * RAII helper class for calling gst_object_unref on exit from a block or function.
58  */
59 template <typename T> class GstUnrefer
60 {
61 public:
62  explicit GstUnrefer(T * p)
63  : m_p(p)
64  {
65  }
67  : m_p(NULL)
68  {
69  }
71  {
72  release();
73  }
74 
75  /// Cancel the unref.
76  void cancel()
77  {
78  m_p = NULL;
79  }
80 
81  /// Unref the object now.
82  void release()
83  {
84  if (m_p)
85  gst_object_unref(m_p);
86  m_p = NULL;
87  }
88 
89  /// Set the object to be unrefed.
90  void set(T* p)
91  {
92  release();
93  m_p = p;
94  }
95 
96  /// Get the object.
97  T * get() const
98  {
99  return m_p;
100  }
101 
102 private:
103  T *m_p;
104 
105  /// Not implemented -- use default constructor
106  GstUnrefer(GstUnrefer& other);
107  /// Not implemented
109 };
110 
111 bool VideoPipeline::setupForRecording(EGLStreamKHR videoStream, uint32_t width, uint32_t height,
112  float frameRate, const char *fileName, VideoFormat videoFormat,
113  VideoFileType videoFileType, uint32_t bitRate)
114 {
115 #ifdef GST_SUPPORTED
116  // Init gstreamer
117  gst_init(NULL, NULL);
118 
119  // create the pipeline
120  m_pipeline = gst_pipeline_new("video_pipeline");
121  if (!m_pipeline)
122  ORIGINATE_ERROR("Failed to create video pipeline");
123 
124  // Create the capture source element
125  GstElement *videoSource = gst_element_factory_make("nveglstreamsrc", NULL);
126  if (!videoSource)
127  ORIGINATE_ERROR("Failed to create capture source element");
128  GstUnrefer<GstElement> unrefer(videoSource);
129  if (!gst_bin_add(GST_BIN(m_pipeline), videoSource))
130  ORIGINATE_ERROR("Failed to add video source to pipeline");
131  unrefer.cancel();
132 
133  g_object_set(G_OBJECT(videoSource), "display", Composer::getInstance().getEGLDisplay(), NULL);
134  g_object_set(G_OBJECT(videoSource), "eglstream", videoStream, NULL);
135 
136  // Create queue
137  GstElement *queue = gst_element_factory_make("queue", NULL);
138  if (!queue)
139  ORIGINATE_ERROR("Failed to create queue");
140  unrefer.set(queue);
141  if (!gst_bin_add(GST_BIN(m_pipeline), queue))
142  ORIGINATE_ERROR("Failed to add queue to pipeline");
143  unrefer.cancel();
144 
145  // create the encoder
146  GstElement *videoEncoder = NULL;
147  switch (videoFormat)
148  {
149  case VIDEO_FORMAT_H264:
150  videoEncoder = gst_element_factory_make("omxh264enc", s_videoEncoderName);
151  break;
152  case VIDEO_FORMAT_H265:
153  videoEncoder = gst_element_factory_make("omxh265enc", s_videoEncoderName);
154  break;
155  case VIDEO_FORMAT_VP8:
156  videoEncoder = gst_element_factory_make("omxvp8enc", s_videoEncoderName);
157  break;
158  case VIDEO_FORMAT_VP9:
159  videoEncoder = gst_element_factory_make("omxvp9enc", s_videoEncoderName);
160  break;
161  default:
162  ORIGINATE_ERROR("Unhandled video format");
163  }
164  if (!videoEncoder)
165  ORIGINATE_ERROR("Failed to create video encoder");
166  unrefer.set(videoEncoder);
167  if (!gst_bin_add(GST_BIN(m_pipeline), videoEncoder))
168  ORIGINATE_ERROR("Failed to add video encoder to pipeline");
169  unrefer.cancel();
170 
171  // if no bitrate is given select from reasonable presets
172  if (bitRate == 0)
173  {
174  if (height < 720)
175  bitRate = VIDEO_BITRATE_4M;
176  else if (height < 1080)
177  bitRate = VIDEO_BITRATE_8M;
178  else if (height <= 2160)
179  bitRate = VIDEO_BITRATE_14M;
180  else
181  bitRate = VIDEO_BITRATE_20M;
182  }
183 
184  g_object_set(G_OBJECT(videoEncoder), "bitrate", bitRate, NULL);
185 
186  // set video encoding profile for h.264 to high to get optmized video quality
187  if (videoFormat == VIDEO_FORMAT_H264)
188  {
189  g_object_set(G_OBJECT(videoEncoder), "profile", VIDEO_AVC_PROFILE_HIGH, NULL);
190  }
191 
192  // create the muxer
193  if (videoFormat == VIDEO_FORMAT_VP9)
194  {
195  printf("\nThe VP9 video format is not supported on Jetson-tx1.\n");
196  }
197 
198  if (((videoFormat == VIDEO_FORMAT_H265) || (videoFormat == VIDEO_FORMAT_VP9)) &&
199  (videoFileType != VIDEO_FILE_TYPE_MKV))
200  {
201  printf("\nThe video format H265/VP9 is only supported with MKV in current GST version. "
202  "Selecting MKV as container.\n");
203  videoFileType = VIDEO_FILE_TYPE_MKV;
204  }
205 
206  GstElement *videoMuxer = NULL;
207  switch (videoFileType)
208  {
209  case VIDEO_FILE_TYPE_MP4:
210  videoMuxer = gst_element_factory_make("qtmux", NULL);
211  break;
212  case VIDEO_FILE_TYPE_3GP:
213  videoMuxer = gst_element_factory_make("3gppmux", NULL);
214  break;
215  case VIDEO_FILE_TYPE_AVI:
216  videoMuxer = gst_element_factory_make("avimux", NULL);
217  break;
218  case VIDEO_FILE_TYPE_MKV:
219  videoMuxer = gst_element_factory_make("matroskamux", NULL);
220  break;
222  videoMuxer = gst_element_factory_make("identity", NULL);
223  break;
224  default:
225  ORIGINATE_ERROR("Unhandled video file type");
226  }
227  if (!videoMuxer)
228  ORIGINATE_ERROR("Failed to create video muxer");
229  unrefer.set(videoMuxer);
230  if (!gst_bin_add(GST_BIN(m_pipeline), videoMuxer))
231  ORIGINATE_ERROR("Failed to add video muxer to pipeline");
232  unrefer.cancel();
233 
234  // create the sink
235  GstElement *videoSink = gst_element_factory_make("filesink", NULL);
236  if (!videoSink)
237  ORIGINATE_ERROR("Failed to create video sink");
238  unrefer.set(videoSink);
239  if (!gst_bin_add(GST_BIN(m_pipeline), videoSink))
240  ORIGINATE_ERROR("Failed to add video sink to pipeline");
241  unrefer.cancel();
242 
243  // set the filename
244  std::string videoFileName(fileName);
245  if (videoFileName != "/dev/null")
246  {
247  videoFileName += ".";
248  videoFileName += getFileExtension(videoFileType);
249  }
250 
251  g_object_set(G_OBJECT(videoSink), "location", videoFileName.c_str(), NULL);
252 
253  // @todo 'Floating point exception' and error 'Framerate set to : 0 at
254  // NvxVideoEncoderSetParameter' when no setting the framerate. '0' should be VFR, use 30
255  // instead
256  if (frameRate == 0.0f)
257  frameRate = 30.0f;
258 
259  // create a caps filter
260  GstCaps *caps = gst_caps_new_simple("video/x-raw",
261  "format", G_TYPE_STRING, "I420",
262  "width", G_TYPE_INT, width,
263  "height", G_TYPE_INT, height,
264  "framerate", GST_TYPE_FRACTION, static_cast<gint>(frameRate * 100.f), 100,
265  NULL);
266  if (!caps)
267  ORIGINATE_ERROR("Failed to create caps");
268 
269  GstCapsFeatures *feature = gst_caps_features_new("memory:NVMM", NULL);
270  if (!feature)
271  {
272  gst_caps_unref(caps);
273  ORIGINATE_ERROR("Failed to create caps feature");
274  }
275 
276  gst_caps_set_features(caps, 0, feature);
277 
278  // link the source to the queue via the capture filter
279  if (!gst_element_link_filtered(videoSource, queue, caps))
280  {
281  gst_caps_unref(caps);
282  ORIGINATE_ERROR("Failed to link source to queue");
283  }
284  gst_caps_unref(caps);
285 
286  // link the queue to the encoder
287  if (!gst_element_link(queue, videoEncoder))
288  ORIGINATE_ERROR("Failed to link queue to encoder");
289 
290  // link the encoder pad to the muxer
291  if (videoFileType == VIDEO_FILE_TYPE_H265)
292  {
293  // H265 has a identity muxer, need to link directly
294  if (!gst_element_link(videoEncoder, videoMuxer))
295  ORIGINATE_ERROR("Failed to link encoder to muxer");
296  }
297  else
298  {
299  if (!gst_element_link_pads(videoEncoder, "src", videoMuxer, "video_%u"))
300  ORIGINATE_ERROR("Failed to link encoder to muxer pad");
301  }
302 
303  // link the muxer to the sink
304  if (!gst_element_link(videoMuxer, videoSink))
305  ORIGINATE_ERROR("Failed to link muxer to sink");
306 
307  return true;
308 #else // GST_SUPPORTED
309  ORIGINATE_ERROR("Not supported");
310 #endif // GST_SUPPORTED
311 }
312 
313 #ifdef GST_SUPPORTED
314 /**
315  * Modify object flag values by name.
316  */
317 static bool objectModifyFlags(GObject *obj, const char *flagName, const char *valueName, bool set)
318 {
319  guint count;
320  GParamSpec **spec = g_object_class_list_properties(G_OBJECT_GET_CLASS(obj), &count);
321 
322  for (guint index = 0; index < count; ++index)
323  {
324  GParamSpec *param = spec[index];
325  if (strcmp(param->name, flagName) == 0)
326  {
327  if (!G_IS_PARAM_SPEC_FLAGS(param))
328  ORIGINATE_ERROR("Param '%s' is not a flag", flagName);
329 
330  GParamSpecFlags *pflags = G_PARAM_SPEC_FLAGS(param);
331  GFlagsValue *value = g_flags_get_value_by_nick(pflags->flags_class, valueName);
332  if (!value)
333  ORIGINATE_ERROR("Value '%s' of flag '%s' not found", valueName, flagName);
334 
335  gint flags;
336  g_object_get(obj, flagName, &flags, NULL);
337  if (set)
338  flags |= value->value;
339  else
340  flags &= ~value->value;
341  g_object_set(obj, flagName, flags, NULL);
342 
343  return true;
344  }
345  }
346 
347  ORIGINATE_ERROR("Param '%s' not found", flagName);
348 }
349 #endif // GST_SUPPORTED
350 
351 bool VideoPipeline::setupForPlayback(EGLStreamKHR *videoStream, const char *fileName)
352 {
353 #ifdef GST_SUPPORTED
354  // Init gstreamer
355  gst_init(NULL, NULL);
356 
357  // Create the source element
358  m_pipeline = gst_element_factory_make("playbin", "play");
359  if (!m_pipeline)
360  ORIGINATE_ERROR("Failed to create playback pipeline");
361 
362  // set the uri
363  char *uri = gst_filename_to_uri(fileName, NULL);
364  g_object_set(G_OBJECT(m_pipeline), "uri", uri, NULL);
365  g_free(uri);
366  uri = NULL;
367 
368  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "text", false));
369  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "native-video", true));
370 
371  // create the audio sink
372  GstElement *audioSink = gst_element_factory_make("autoaudiosink", "audio_sink");
373  if (!audioSink)
374  ORIGINATE_ERROR("Failed to create audio sink");
375 
376  // set the audio sink of the pipeline
377  g_object_set(G_OBJECT(m_pipeline), "audio-sink", audioSink, NULL);
378 
379  // Create the sink bin, this will hold the video converter and the video sink
380  GstElement *videoSinkBin = gst_bin_new("video_sink_bin");
381  if (!videoSinkBin)
382  ORIGINATE_ERROR("Failed to create video sink bin");
383 
384  // set the video sink of the pipeline
385  g_object_set(G_OBJECT(m_pipeline), "video-sink", videoSinkBin, NULL);
386 
387  // Create the video converted
388  GstElement *videoConvert = gst_element_factory_make("nvvidconv", "video converter");
389  if (!videoConvert)
390  ORIGINATE_ERROR("Failed to create video converter");
391  GstUnrefer<GstElement> unrefer(videoConvert);
392  if (!gst_bin_add(GST_BIN(videoSinkBin), videoConvert))
393  ORIGINATE_ERROR("Failed to add video convert to video sink bin");
394  unrefer.cancel();
395 
396  // Create the video sink
397  GstElement *videoSink = gst_element_factory_make("nvvideosink", "video sink");
398  if (!videoSink)
399  ORIGINATE_ERROR("Failed to create video sink");
400  unrefer.set(videoSink);
401  if (!gst_bin_add(GST_BIN(videoSinkBin), videoSink))
402  ORIGINATE_ERROR("Failed to add video sink to video sink bin");
403  unrefer.cancel();
404 
405  // configure video sink
406  g_object_set(G_OBJECT(videoSink), "display", Composer::getInstance().getEGLDisplay(), NULL);
407  // get the EGL stream
408  *videoStream = EGL_NO_STREAM_KHR;
409  g_object_get(G_OBJECT(videoSink), "stream", videoStream, NULL);
410  if (*videoStream == EGL_NO_STREAM_KHR)
411  ORIGINATE_ERROR("Failed to get EGL stream from video sink");
412 
413  if (!gst_element_link(videoConvert, videoSink))
414  ORIGINATE_ERROR("Failed to link video convert to video sink");
415 
416  // create a ghost pad so that the pipeline can connect to the bin as a sink
417  GstPad *pad = gst_element_get_static_pad(videoConvert, "sink");
418  if (!pad)
419  ORIGINATE_ERROR("Failed to get sink pad of video convert");
420  GstUnrefer<GstPad> padUnrefer(pad);
421  GstPad *ghostPad = gst_ghost_pad_new("sink", pad);
422  if (!ghostPad)
423  ORIGINATE_ERROR("Failed to create the ghost pad");
424  GstUnrefer<GstPad> ghostPadUnrefer(ghostPad);
425  if (!gst_pad_set_active(ghostPad, TRUE))
426  ORIGINATE_ERROR("Failed to set pad active");
427  if (!gst_element_add_pad(videoSinkBin, ghostPad))
428  ORIGINATE_ERROR("Failed to add pad");
429  ghostPadUnrefer.cancel();
430  padUnrefer.release();
431 
432  return true;
433 #else // GST_SUPPORTED
434  ORIGINATE_ERROR("Not supported");
435 #endif // GST_SUPPORTED
436 }
437 
439 {
440 #ifdef GST_SUPPORTED
441  if (!m_pipeline)
442  ORIGINATE_ERROR("Video pipeline is not set up");
443 
444  if (m_state != GST_STATE_PLAYING)
445  {
446  // set to playing state
447  if (gst_element_set_state(m_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE)
448  ORIGINATE_ERROR("Failed to set playing state");
449 
450  m_state = GST_STATE_PLAYING;
451 
452  /* Dump Capture - Playing Pipeline into the dot file
453  * Set environment variable "export GST_DEBUG_DUMP_DOT_DIR=/tmp"
454  * Run argus_camera and 0.00.00.*-argus_camera.dot
455  * file will be generated.
456  * Run "dot -Tpng 0.00.00.*-argus_camera.dot > image.png"
457  * image.png will display the running capture pipeline.
458  * */
459  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline),
460  GST_DEBUG_GRAPH_SHOW_ALL, "argus_camera");
461  }
462 
463  return true;
464 #else // GST_SUPPORTED
465  ORIGINATE_ERROR("Not supported");
466 #endif // GST_SUPPORTED
467 }
468 
470 {
471 #ifdef GST_SUPPORTED
472  if (!m_pipeline)
473  ORIGINATE_ERROR("Video pipeline is not set up");
474 
475  if (m_state != GST_STATE_PAUSED)
476  {
477  if (gst_element_set_state(m_pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE)
478  ORIGINATE_ERROR("Failed to set pause state");
479  m_state = GST_STATE_PAUSED;
480  }
481 
482  return true;
483 #else // GST_SUPPORTED
484  ORIGINATE_ERROR("Not supported");
485 #endif // GST_SUPPORTED
486 }
487 
488 
490 {
491 #ifdef GST_SUPPORTED
492  if (!m_pipeline)
493  ORIGINATE_ERROR("Video pipeline is not set up");
494 
495  GstState newState = GST_STATE_NULL;
496  if (m_state == GST_STATE_PLAYING)
497  newState = GST_STATE_PAUSED;
498  else if (m_state == GST_STATE_PAUSED)
499  newState = GST_STATE_PLAYING;
500  else
501  ORIGINATE_ERROR("Invalid state");
502 
503  if (gst_element_set_state(m_pipeline, newState) == GST_STATE_CHANGE_FAILURE)
504  ORIGINATE_ERROR("Failed to set pause state");
505 
506  m_state = newState;
507 
508  return true;
509 #else // GST_SUPPORTED
510  ORIGINATE_ERROR("Not supported");
511 #endif // GST_SUPPORTED
512 }
513 
515 {
516 #ifdef GST_SUPPORTED
517  if (!m_pipeline)
518  ORIGINATE_ERROR("Video pipeline is not set up");
519 
520  if (!gst_element_seek_simple(m_pipeline, GST_FORMAT_TIME,
521  static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), 0))
522  {
523  ORIGINATE_ERROR("Failed to rewind");
524  }
525 
526  return true;
527 #else // GST_SUPPORTED
528  ORIGINATE_ERROR("Not supported");
529 #endif // GST_SUPPORTED
530 }
531 
533 {
534 #ifdef GST_SUPPORTED
535  if (!m_pipeline)
536  ORIGINATE_ERROR("Video pipeline is not set up");
537 
538  if ((m_state == GST_STATE_PLAYING) || (m_state == GST_STATE_PAUSED))
539  {
540  // check if there is a video encoder
541  GstElement *videoEncoder = gst_bin_get_by_name(GST_BIN(m_pipeline), s_videoEncoderName);
542  if (videoEncoder)
543  {
544  // send the end of stream event
545  GstPad *pad = gst_element_get_static_pad(videoEncoder, "sink");
546  if (!pad)
547  ORIGINATE_ERROR("Failed to get 'sink' pad");
548  GstUnrefer<GstPad> padUnrefer(pad);
549  if (!gst_pad_send_event(pad, gst_event_new_eos()))
550  ORIGINATE_ERROR("Failed to send end of stream event encoder");
551  padUnrefer.release();
552 
553  // wait for the event to go through
554  GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
555  if (!bus)
556  ORIGINATE_ERROR("Failed to get bus");
557  GstUnrefer<GstBus> busUnrefer(bus);
558  if (!gst_bus_poll(bus, GST_MESSAGE_EOS, GST_CLOCK_TIME_NONE))
559  ORIGINATE_ERROR("Failed to wait for the eof event");
560  busUnrefer.release();
561  }
562 
563  // stop the pipeline
564  if (gst_element_set_state(m_pipeline, GST_STATE_NULL) != GST_STATE_CHANGE_SUCCESS)
565  ORIGINATE_ERROR("Failed to stop pipeline");
566 
567  m_state = GST_STATE_NULL;
568  }
569 
570  return true;
571 #else // GST_SUPPORTED
572  ORIGINATE_ERROR("Not supported");
573 #endif // GST_SUPPORTED
574 }
575 
577 {
578 #ifdef GST_SUPPORTED
579  if (m_pipeline)
580  {
581  PROPAGATE_ERROR(stop());
582 
583  // delete pipeline
584  gst_object_unref(GST_OBJECT(m_pipeline));
585 
586  m_pipeline = NULL;
587  }
588 
589  return true;
590 #else // GST_SUPPORTED
591  ORIGINATE_ERROR("Not supported");
592 #endif // GST_SUPPORTED
593 }
594 
595 /*static*/ const char* VideoPipeline::getFileExtension(VideoFileType fileType)
596 {
597  switch (fileType)
598  {
599  case VIDEO_FILE_TYPE_MP4:
600  return "mp4";
601  case VIDEO_FILE_TYPE_3GP:
602  return "3gp";
603  case VIDEO_FILE_TYPE_AVI:
604  return "avi";
605  case VIDEO_FILE_TYPE_MKV:
606  return "mkv";
608  return "h265";
609  default:
610  break;
611  }
612 
613  return "Unhandled video file type";
614 }
615 
616 bool VideoPipeline::getAspectRatio(float *aspectRatio) const
617 {
618  if (aspectRatio == NULL)
619  ORIGINATE_ERROR("'aspectRatio' is NULL");
620 #ifdef GST_SUPPORTED
621  if ((m_state != GST_STATE_PLAYING) && (m_state != GST_STATE_PAUSED))
622  ORIGINATE_ERROR("Must be in paused or playing state.");
623 
624  GstState state = GST_STATE_NULL;
625  while ((state != GST_STATE_PLAYING) && (state != GST_STATE_PAUSED))
626  {
627  if (gst_element_get_state(m_pipeline, &state, NULL, GST_CLOCK_TIME_NONE) ==
628  GST_STATE_CHANGE_FAILURE)
629  {
630  ORIGINATE_ERROR("gst_element_get_state failed");
631  }
632  }
633 
634  // Retrieve the Caps at the entrance of the video sink
635  GstElement *videoSink;
636  g_object_get(m_pipeline, "video-sink", &videoSink, NULL);
637  if (!videoSink)
638  ORIGINATE_ERROR("Failed to get video-sink");
639  GstUnrefer<GstElement> videoSinkUnrefer(videoSink);
640 
641  GstPad *videoSinkPad = gst_element_get_static_pad(videoSink, "sink");
642  if (!videoSinkPad)
643  ORIGINATE_ERROR("Failed to get video-sink pad");
644 
645  GstCaps *caps = gst_pad_get_current_caps(videoSinkPad);
646  if (!caps)
647  ORIGINATE_ERROR("Failed to get video-sink pad caps");
648 
649  *aspectRatio = 1.0f;
650 
651  GstStructure *structure = gst_caps_get_structure(caps, 0);
652  if (!structure)
653  {
654  gst_caps_unref(caps);
655  ORIGINATE_ERROR("Failed to get caps structure");
656  }
657 
658  gint width, height;
659  gint pixelAspectRatioNumerator, pixelAspectRatioDenominator;
660 
661  if (!gst_structure_get_int(structure, "width", &width) ||
662  !gst_structure_get_int(structure, "height", &height) ||
663  !gst_structure_get_fraction(structure, "pixel-aspect-ratio",
664  &pixelAspectRatioNumerator, &pixelAspectRatioDenominator))
665  {
666  gst_caps_unref(caps);
667  ORIGINATE_ERROR("Failed to get structure values");
668  }
669 
670  *aspectRatio = (float)width / (float)height;
671  *aspectRatio *= (float)pixelAspectRatioNumerator / (float)pixelAspectRatioDenominator;
672 
673  gst_caps_unref(caps);
674 
675  return true;
676 #else // GST_SUPPORTED
677  ORIGINATE_ERROR("Not supported");
678 #endif // GST_SUPPORTED
679 }
680 
681 }; // namespace ArgusSamples