Argus Camera Sample
Argus Camera Sample
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
VideoPipeline.cpp
Go to the documentation of this file.
1 /*
2  * SPDX-FileCopyrightText: Copyright (c) 2016 - 2025 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
3  * * SPDX-License-Identifier: LicenseRef-NvidiaProprietary
4  *
5  * Redistribution and use in source and binary forms, with or without
6  * modification, are permitted provided that the following conditions
7  * are met:
8  * * Redistributions of source code must retain the above copyright
9  * notice, this list of conditions and the following disclaimer.
10  * * Redistributions in binary form must reproduce the above copyright
11  * notice, this list of conditions and the following disclaimer in the
12  * documentation and/or other materials provided with the distribution.
13  * * Neither the name of NVIDIA CORPORATION nor the names of its
14  * contributors may be used to endorse or promote products derived
15  * from this software without specific prior written permission.
16  *
17  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
18  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
20  * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
21  * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
22  * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
23  * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
24  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
25  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28  */
29 
30 #include <stdio.h>
31 
32 #include <string>
33 
34 #include "Error.h"
35 #include "VideoPipeline.h"
36 #include "Composer.h"
37 #include "Util.h"
38 
39 namespace ArgusSamples
40 {
41 
43 #ifdef GST_SUPPORTED
44  : m_state(GST_STATE_NULL)
45  , m_pipeline(NULL)
46 #endif
47 {
48 }
49 
51 {
52  destroy();
53 }
54 
55 ///! give the video eoncoder a name so we can find it at stop()
56 static const char *s_videoEncoderName = "video encoder";
57 
58 /**
59  * RAII helper class for calling gst_object_unref on exit from a block or function.
60  */
61 template <typename T> class GstUnrefer
62 {
63 public:
64  explicit GstUnrefer(T * p)
65  : m_p(p)
66  {
67  }
69  : m_p(NULL)
70  {
71  }
73  {
74  release();
75  }
76 
77  /// Cancel the unref.
78  void cancel()
79  {
80  m_p = NULL;
81  }
82 
83  /// Unref the object now.
84  void release()
85  {
86  if (m_p)
87  gst_object_unref(m_p);
88  m_p = NULL;
89  }
90 
91  /// Set the object to be unrefed.
92  void set(T* p)
93  {
94  release();
95  m_p = p;
96  }
97 
98  /// Get the object.
99  T * get() const
100  {
101  return m_p;
102  }
103 
104 private:
105  T *m_p;
106 
107  /// Not implemented -- use default constructor
108  GstUnrefer(GstUnrefer& other);
109  /// Not implemented
111 };
112 
113 bool VideoPipeline::setupForRecording(EGLStreamKHR videoStream, uint32_t width, uint32_t height,
114  float frameRate, const char *fileName, VideoFormat videoFormat,
115  VideoFileType videoFileType, uint32_t bitRate, VideoControlRateMode controlRate,
116  bool enableTwoPassCBR)
117 {
118 #ifdef GST_SUPPORTED
119  // set the filename
120  std::string videoFileName(fileName);
121  if (videoFileName != "/dev/null")
122  {
123  videoFileName += ".";
124  videoFileName += getFileExtension(videoFileType);
125  PROPAGATE_ERROR(validateOutputPath(videoFileName.c_str()));
126  }
127 
128  // Init gstreamer
129  gst_init(NULL, NULL);
130 
131  // create the pipeline
132  m_pipeline = gst_pipeline_new("video_pipeline");
133  if (!m_pipeline)
134  ORIGINATE_ERROR("Failed to create video pipeline");
135 
136  // Create the capture source element
137  GstElement *videoSource = gst_element_factory_make("nveglstreamsrc", NULL);
138  if (!videoSource)
139  ORIGINATE_ERROR("Failed to create capture source element");
140  GstUnrefer<GstElement> unrefer(videoSource);
141  if (!gst_bin_add(GST_BIN(m_pipeline), videoSource))
142  ORIGINATE_ERROR("Failed to add video source to pipeline");
143  unrefer.cancel();
144 
145  g_object_set(G_OBJECT(videoSource), "display", Composer::getInstance().getEGLDisplay(), NULL);
146  g_object_set(G_OBJECT(videoSource), "eglstream", videoStream, NULL);
147 
148  // Create queue
149  GstElement *queue = gst_element_factory_make("queue", NULL);
150  if (!queue)
151  ORIGINATE_ERROR("Failed to create queue");
152  unrefer.set(queue);
153  if (!gst_bin_add(GST_BIN(m_pipeline), queue))
154  ORIGINATE_ERROR("Failed to add queue to pipeline");
155  unrefer.cancel();
156 
157  // create the encoder
158  GstElement *videoEncoder = NULL;
159  switch (videoFormat)
160  {
161  case VIDEO_FORMAT_H264:
162  videoEncoder = gst_element_factory_make("nvv4l2h264enc", s_videoEncoderName);
163  break;
164  case VIDEO_FORMAT_H265:
165  videoEncoder = gst_element_factory_make("nvv4l2h265enc", s_videoEncoderName);
166  break;
167  case VIDEO_FORMAT_VP8:
168  printf("\n***vp8 encode is not supported for Jetson-Xavier & beyond\n");
169  videoEncoder = gst_element_factory_make("nvv4l2vp8enc", s_videoEncoderName);
170  break;
171  case VIDEO_FORMAT_VP9:
172  videoEncoder = gst_element_factory_make("nvv4l2vp9enc", s_videoEncoderName);
173  break;
174  default:
175  ORIGINATE_ERROR("Unhandled video format");
176  }
177  if (!videoEncoder)
178  ORIGINATE_ERROR("Failed to create video encoder");
179  unrefer.set(videoEncoder);
180  if (!gst_bin_add(GST_BIN(m_pipeline), videoEncoder))
181  ORIGINATE_ERROR("Failed to add video encoder to pipeline");
182  unrefer.cancel();
183 
184  // if no bitrate is given select from reasonable presets
185  if (bitRate == 0)
186  {
187  if (height < 720)
188  bitRate = VIDEO_BITRATE_4M;
189  else if (height < 1080)
190  bitRate = VIDEO_BITRATE_8M;
191  else if (height <= 2160)
192  bitRate = VIDEO_BITRATE_14M;
193  else
194  bitRate = VIDEO_BITRATE_20M;
195 
196  if (frameRate < 15.0)
197  {
198  bitRate = (uint32_t)((float)bitRate/30.0*frameRate);
199  }
200  }
201 
202  g_object_set(G_OBJECT(videoEncoder), "bitrate", bitRate, NULL);
203  g_object_set(G_OBJECT(videoEncoder), "control-rate", controlRate, NULL);
204  g_object_set(G_OBJECT(videoEncoder), "EnableTwopassCBR", enableTwoPassCBR, NULL);
205 
206  /*
207  * Currently, of all the supported videoEncoders above: H264, H265, VP8 and VP9, Only H265
208  * supports resolution > 4k.
209  */
210  const uint32_t WIDTH_4K = 3840;
211  if (width > WIDTH_4K && videoFormat != VIDEO_FORMAT_H265)
212  {
213  ORIGINATE_ERROR("\n Resolution > 4k requires videoformat H265 \n");
214  }
215  // set video encoding profile for h.264 to high to get optmized video quality
216  if (videoFormat == VIDEO_FORMAT_H264)
217  {
218  g_object_set(G_OBJECT(videoEncoder), "profile", VIDEO_AVC_PROFILE_HIGH, NULL);
219  }
220 
221  // create the muxer
222  if (videoFormat == VIDEO_FORMAT_VP9)
223  {
224  printf("\nThe VP9 video format is not supported on Jetson-tx1.\n");
225  }
226 
227  if ((videoFileType == VIDEO_FILE_TYPE_3GP) &&
228  !(videoFormat == VIDEO_FORMAT_H264))
229  {
230  printf("\nThe 3GP is only supported with H264 in current GST version. "
231  "Selecting other containers.\n");
232  videoFileType = VIDEO_FILE_TYPE_MKV;
233  }
234 
235  if ((videoFileType == VIDEO_FILE_TYPE_AVI) &&
236  (videoFormat == VIDEO_FORMAT_H265))
237  {
238  printf("\nThe AVI is not supported with H265 in current GST version. "
239  "Selecting other containers.\n");
240  videoFileType = VIDEO_FILE_TYPE_MKV;
241  }
242 
243  GstElement *videoParse = NULL;
244  switch (videoFormat)
245  {
246  case VIDEO_FORMAT_H264:
247  videoParse = gst_element_factory_make("h264parse", NULL);
248  if (!videoParse)
249  ORIGINATE_ERROR("Failed to create video parser");
250  break;
251  case VIDEO_FORMAT_H265:
252  videoParse = gst_element_factory_make("h265parse", NULL);
253  if (!videoParse)
254  ORIGINATE_ERROR("Failed to create video parser");
255  break;
256  case VIDEO_FORMAT_VP9:
257  case VIDEO_FORMAT_VP8:
258  break;
259  default:
260  ORIGINATE_ERROR("Unhandled video file type");
261  }
262  if (videoParse)
263  {
264  unrefer.set(videoParse);
265  if (!gst_bin_add(GST_BIN(m_pipeline), videoParse))
266  ORIGINATE_ERROR("Failed to add video parser to pipeline");
267  unrefer.cancel();
268  }
269 
270  GstElement *videoMuxer = NULL;
271  switch (videoFileType)
272  {
273  case VIDEO_FILE_TYPE_MP4:
274  videoMuxer = gst_element_factory_make("qtmux", NULL);
275  break;
276  case VIDEO_FILE_TYPE_3GP:
277  videoMuxer = gst_element_factory_make("3gppmux", NULL);
278  break;
279  case VIDEO_FILE_TYPE_AVI:
280  videoMuxer = gst_element_factory_make("avimux", NULL);
281  break;
282  case VIDEO_FILE_TYPE_MKV:
283  videoMuxer = gst_element_factory_make("matroskamux", NULL);
284  break;
285  default:
286  ORIGINATE_ERROR("Unhandled video file type");
287  }
288  if (!videoMuxer)
289  ORIGINATE_ERROR("Failed to create video muxer");
290  unrefer.set(videoMuxer);
291  if (!gst_bin_add(GST_BIN(m_pipeline), videoMuxer))
292  ORIGINATE_ERROR("Failed to add video muxer to pipeline");
293  unrefer.cancel();
294 
295  // create the sink
296  GstElement *videoSink = gst_element_factory_make("filesink", NULL);
297  if (!videoSink)
298  ORIGINATE_ERROR("Failed to create video sink");
299  unrefer.set(videoSink);
300  if (!gst_bin_add(GST_BIN(m_pipeline), videoSink))
301  ORIGINATE_ERROR("Failed to add video sink to pipeline");
302  unrefer.cancel();
303 
304  g_object_set(G_OBJECT(videoSink), "location", videoFileName.c_str(), NULL);
305 
306  // @todo 'Floating point exception' and error 'Framerate set to : 0 at
307  // NvxVideoEncoderSetParameter' when no setting the framerate. '0' should be VFR, use 30
308  // instead
309  if (frameRate == 0.0f)
310  frameRate = 30.0f;
311 
312  // create a caps filter
313  GstCaps *caps = gst_caps_new_simple("video/x-raw",
314  "format", G_TYPE_STRING, "NV12",
315  "width", G_TYPE_INT, width,
316  "height", G_TYPE_INT, height,
317  "framerate", GST_TYPE_FRACTION, static_cast<gint>(frameRate * 100.f), 100,
318  NULL);
319  if (!caps)
320  ORIGINATE_ERROR("Failed to create caps");
321 
322  GstCapsFeatures *feature = gst_caps_features_new("memory:NVMM", NULL);
323  if (!feature)
324  {
325  gst_caps_unref(caps);
326  ORIGINATE_ERROR("Failed to create caps feature");
327  }
328 
329  gst_caps_set_features(caps, 0, feature);
330 
331  // link the source to the queue via the capture filter
332  if (!gst_element_link_filtered(videoSource, queue, caps))
333  {
334  gst_caps_unref(caps);
335  ORIGINATE_ERROR("Failed to link source to queue");
336  }
337  gst_caps_unref(caps);
338 
339  // link the queue to the encoder
340  if (!gst_element_link(queue, videoEncoder))
341  ORIGINATE_ERROR("Failed to link queue to encoder");
342 
343  // link the encoder pad to the muxer
344  if (videoParse)
345  {
346  if (!gst_element_link(videoEncoder, videoParse))
347  ORIGINATE_ERROR("Failed to link encoder to parser");
348 
349  if (!gst_element_link(videoParse, videoMuxer))
350  ORIGINATE_ERROR("Failed to link parser to muxer");
351  }
352  else
353  {
354  if (!gst_element_link(videoEncoder, videoMuxer))
355  ORIGINATE_ERROR("Failed to link encoder to muxer");
356  }
357 
358  // link the muxer to the sink
359  if (!gst_element_link(videoMuxer, videoSink))
360  ORIGINATE_ERROR("Failed to link muxer to sink");
361 
362  return true;
363 #else // GST_SUPPORTED
364  ORIGINATE_ERROR("Not supported");
365 #endif // GST_SUPPORTED
366 }
367 
368 #ifdef GST_SUPPORTED
369 /**
370  * Modify object flag values by name.
371  */
372 static bool objectModifyFlags(GObject *obj, const char *flagName, const char *valueName, bool set)
373 {
374  guint count;
375  GParamSpec **spec = g_object_class_list_properties(G_OBJECT_GET_CLASS(obj), &count);
376 
377  for (guint index = 0; index < count; ++index)
378  {
379  GParamSpec *param = spec[index];
380  if (strcmp(param->name, flagName) == 0)
381  {
382  if (!G_IS_PARAM_SPEC_FLAGS(param))
383  ORIGINATE_ERROR("Param '%s' is not a flag", flagName);
384 
385  GParamSpecFlags *pflags = G_PARAM_SPEC_FLAGS(param);
386  GFlagsValue *value = g_flags_get_value_by_nick(pflags->flags_class, valueName);
387  if (!value)
388  ORIGINATE_ERROR("Value '%s' of flag '%s' not found", valueName, flagName);
389 
390  gint flags;
391  g_object_get(obj, flagName, &flags, NULL);
392  if (set)
393  flags |= value->value;
394  else
395  flags &= ~value->value;
396  g_object_set(obj, flagName, flags, NULL);
397 
398  return true;
399  }
400  }
401 
402  ORIGINATE_ERROR("Param '%s' not found", flagName);
403 }
404 #endif // GST_SUPPORTED
405 
406 bool VideoPipeline::setupForPlayback(EGLStreamKHR *videoStream, const char *fileName)
407 {
408 #ifdef GST_SUPPORTED
409  // Init gstreamer
410  gst_init(NULL, NULL);
411 
412  // Create the source element
413  m_pipeline = gst_element_factory_make("playbin", "play");
414  if (!m_pipeline)
415  ORIGINATE_ERROR("Failed to create playback pipeline");
416 
417  // set the uri
418  char *uri = gst_filename_to_uri(fileName, NULL);
419  g_object_set(G_OBJECT(m_pipeline), "uri", uri, NULL);
420  g_free(uri);
421  uri = NULL;
422 
423  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "text", false));
424  PROPAGATE_ERROR(objectModifyFlags(G_OBJECT(m_pipeline), "flags", "native-video", true));
425 
426  // create the audio sink
427  GstElement *audioSink = gst_element_factory_make("autoaudiosink", "audio_sink");
428  if (!audioSink)
429  ORIGINATE_ERROR("Failed to create audio sink");
430 
431  // set the audio sink of the pipeline
432  g_object_set(G_OBJECT(m_pipeline), "audio-sink", audioSink, NULL);
433 
434  // Create the sink bin, this will hold the video converter and the video sink
435  GstElement *videoSinkBin = gst_bin_new("video_sink_bin");
436  if (!videoSinkBin)
437  ORIGINATE_ERROR("Failed to create video sink bin");
438 
439  // set the video sink of the pipeline
440  g_object_set(G_OBJECT(m_pipeline), "video-sink", videoSinkBin, NULL);
441 
442  // Create the video converted
443  GstElement *videoConvert = gst_element_factory_make("nvvidconv", "video converter");
444  if (!videoConvert)
445  ORIGINATE_ERROR("Failed to create video converter");
446  GstUnrefer<GstElement> unrefer(videoConvert);
447  if (!gst_bin_add(GST_BIN(videoSinkBin), videoConvert))
448  ORIGINATE_ERROR("Failed to add video convert to video sink bin");
449  unrefer.cancel();
450 
451  // Create the video sink
452  GstElement *videoSink = gst_element_factory_make("nvvideosink", "video sink");
453  if (!videoSink)
454  ORIGINATE_ERROR("Failed to create video sink");
455  unrefer.set(videoSink);
456  if (!gst_bin_add(GST_BIN(videoSinkBin), videoSink))
457  ORIGINATE_ERROR("Failed to add video sink to video sink bin");
458  unrefer.cancel();
459 
460  // configure video sink
461  g_object_set(G_OBJECT(videoSink), "display", Composer::getInstance().getEGLDisplay(), NULL);
462  // get the EGL stream
463  *videoStream = EGL_NO_STREAM_KHR;
464  g_object_get(G_OBJECT(videoSink), "stream", videoStream, NULL);
465  if (*videoStream == EGL_NO_STREAM_KHR)
466  ORIGINATE_ERROR("Failed to get EGL stream from video sink");
467 
468  if (!gst_element_link(videoConvert, videoSink))
469  ORIGINATE_ERROR("Failed to link video convert to video sink");
470 
471  // create a ghost pad so that the pipeline can connect to the bin as a sink
472  GstPad *pad = gst_element_get_static_pad(videoConvert, "sink");
473  if (!pad)
474  ORIGINATE_ERROR("Failed to get sink pad of video convert");
475  GstUnrefer<GstPad> padUnrefer(pad);
476  GstPad *ghostPad = gst_ghost_pad_new("sink", pad);
477  if (!ghostPad)
478  ORIGINATE_ERROR("Failed to create the ghost pad");
479  GstUnrefer<GstPad> ghostPadUnrefer(ghostPad);
480  if (!gst_pad_set_active(ghostPad, TRUE))
481  ORIGINATE_ERROR("Failed to set pad active");
482  if (!gst_element_add_pad(videoSinkBin, ghostPad))
483  ORIGINATE_ERROR("Failed to add pad");
484  ghostPadUnrefer.cancel();
485  padUnrefer.release();
486 
487  return true;
488 #else // GST_SUPPORTED
489  ORIGINATE_ERROR("Not supported");
490 #endif // GST_SUPPORTED
491 }
492 
494 {
495 #ifdef GST_SUPPORTED
496  if (!m_pipeline)
497  ORIGINATE_ERROR("Video pipeline is not set up");
498 
499  if (m_state != GST_STATE_PLAYING)
500  {
501  // set to playing state
502  if (gst_element_set_state(m_pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE)
503  ORIGINATE_ERROR("Failed to set playing state");
504 
505  m_state = GST_STATE_PLAYING;
506 
507  /* Dump Capture - Playing Pipeline into the dot file
508  * Set environment variable "export GST_DEBUG_DUMP_DOT_DIR=/tmp"
509  * Run argus_camera and 0.00.00.*-argus_camera.dot
510  * file will be generated.
511  * Run "dot -Tpng 0.00.00.*-argus_camera.dot > image.png"
512  * image.png will display the running capture pipeline.
513  * */
514  GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS(GST_BIN(m_pipeline),
515  GST_DEBUG_GRAPH_SHOW_ALL, "argus_camera");
516  }
517 
518  return true;
519 #else // GST_SUPPORTED
520  ORIGINATE_ERROR("Not supported");
521 #endif // GST_SUPPORTED
522 }
523 
525 {
526 #ifdef GST_SUPPORTED
527  if (!m_pipeline)
528  ORIGINATE_ERROR("Video pipeline is not set up");
529 
530  if (m_state != GST_STATE_PAUSED)
531  {
532  if (gst_element_set_state(m_pipeline, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE)
533  ORIGINATE_ERROR("Failed to set pause state");
534  m_state = GST_STATE_PAUSED;
535  }
536 
537  return true;
538 #else // GST_SUPPORTED
539  ORIGINATE_ERROR("Not supported");
540 #endif // GST_SUPPORTED
541 }
542 
543 
545 {
546 #ifdef GST_SUPPORTED
547  if (!m_pipeline)
548  ORIGINATE_ERROR("Video pipeline is not set up");
549 
550  GstState newState = GST_STATE_NULL;
551  if (m_state == GST_STATE_PLAYING)
552  newState = GST_STATE_PAUSED;
553  else if (m_state == GST_STATE_PAUSED)
554  newState = GST_STATE_PLAYING;
555  else
556  ORIGINATE_ERROR("Invalid state");
557 
558  if (gst_element_set_state(m_pipeline, newState) == GST_STATE_CHANGE_FAILURE)
559  ORIGINATE_ERROR("Failed to set pause state");
560 
561  m_state = newState;
562 
563  return true;
564 #else // GST_SUPPORTED
565  ORIGINATE_ERROR("Not supported");
566 #endif // GST_SUPPORTED
567 }
568 
570 {
571 #ifdef GST_SUPPORTED
572  if (!m_pipeline)
573  ORIGINATE_ERROR("Video pipeline is not set up");
574 
575  if (!gst_element_seek_simple(m_pipeline, GST_FORMAT_TIME,
576  static_cast<GstSeekFlags>(GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_KEY_UNIT), 0))
577  {
578  ORIGINATE_ERROR("Failed to rewind");
579  }
580 
581  return true;
582 #else // GST_SUPPORTED
583  ORIGINATE_ERROR("Not supported");
584 #endif // GST_SUPPORTED
585 }
586 
588 {
589 #ifdef GST_SUPPORTED
590  if (!m_pipeline)
591  ORIGINATE_ERROR("Video pipeline is not set up");
592 
593  if ((m_state == GST_STATE_PLAYING) || (m_state == GST_STATE_PAUSED))
594  {
595  // check if there is a video encoder
596  GstElement *videoEncoder = gst_bin_get_by_name(GST_BIN(m_pipeline), s_videoEncoderName);
597  if (videoEncoder)
598  {
599  // send the end of stream event
600  GstPad *pad = gst_element_get_static_pad(videoEncoder, "sink");
601  if (!pad)
602  ORIGINATE_ERROR("Failed to get 'sink' pad");
603  GstUnrefer<GstPad> padUnrefer(pad);
604  if (!gst_pad_send_event(pad, gst_event_new_eos()))
605  ORIGINATE_ERROR("Failed to send end of stream event encoder");
606  padUnrefer.release();
607 
608  // wait for the event to go through
609  GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(m_pipeline));
610  if (!bus)
611  ORIGINATE_ERROR("Failed to get bus");
612  GstUnrefer<GstBus> busUnrefer(bus);
613  if (!gst_bus_poll(bus, GST_MESSAGE_EOS, GST_CLOCK_TIME_NONE))
614  ORIGINATE_ERROR("Failed to wait for the eof event");
615  busUnrefer.release();
616  }
617 
618  // stop the pipeline
619  if (gst_element_set_state(m_pipeline, GST_STATE_NULL) != GST_STATE_CHANGE_SUCCESS)
620  ORIGINATE_ERROR("Failed to stop pipeline");
621 
622  m_state = GST_STATE_NULL;
623  }
624 
625  return true;
626 #else // GST_SUPPORTED
627  ORIGINATE_ERROR("Not supported");
628 #endif // GST_SUPPORTED
629 }
630 
632 {
633 #ifdef GST_SUPPORTED
634  if (m_pipeline)
635  {
636  PROPAGATE_ERROR(stop());
637 
638  // delete pipeline
639  gst_object_unref(GST_OBJECT(m_pipeline));
640 
641  m_pipeline = NULL;
642  }
643 
644  return true;
645 #else // GST_SUPPORTED
646  ORIGINATE_ERROR("Not supported");
647 #endif // GST_SUPPORTED
648 }
649 
650 /*static*/ const char* VideoPipeline::getFileExtension(VideoFileType fileType)
651 {
652  switch (fileType)
653  {
654  case VIDEO_FILE_TYPE_MP4:
655  return "mp4";
656  case VIDEO_FILE_TYPE_3GP:
657  return "3gp";
658  case VIDEO_FILE_TYPE_AVI:
659  return "avi";
660  case VIDEO_FILE_TYPE_MKV:
661  return "mkv";
663  return "h265";
664  default:
665  break;
666  }
667 
668  return "Unhandled video file type";
669 }
670 
671 bool VideoPipeline::getAspectRatio(float *aspectRatio) const
672 {
673  if (aspectRatio == NULL)
674  ORIGINATE_ERROR("'aspectRatio' is NULL");
675 #ifdef GST_SUPPORTED
676  if ((m_state != GST_STATE_PLAYING) && (m_state != GST_STATE_PAUSED))
677  ORIGINATE_ERROR("Must be in paused or playing state.");
678 
679  GstState state = GST_STATE_NULL;
680  while ((state != GST_STATE_PLAYING) && (state != GST_STATE_PAUSED))
681  {
682  if (gst_element_get_state(m_pipeline, &state, NULL, GST_CLOCK_TIME_NONE) ==
683  GST_STATE_CHANGE_FAILURE)
684  {
685  ORIGINATE_ERROR("gst_element_get_state failed");
686  }
687  }
688 
689  // Retrieve the Caps at the entrance of the video sink
690  GstElement *videoSink;
691  g_object_get(m_pipeline, "video-sink", &videoSink, NULL);
692  if (!videoSink)
693  ORIGINATE_ERROR("Failed to get video-sink");
694  GstUnrefer<GstElement> videoSinkUnrefer(videoSink);
695 
696  GstPad *videoSinkPad = gst_element_get_static_pad(videoSink, "sink");
697  if (!videoSinkPad)
698  ORIGINATE_ERROR("Failed to get video-sink pad");
699 
700  GstCaps *caps = gst_pad_get_current_caps(videoSinkPad);
701  if (!caps)
702  ORIGINATE_ERROR("Failed to get video-sink pad caps");
703 
704  *aspectRatio = 1.0f;
705 
706  GstStructure *structure = gst_caps_get_structure(caps, 0);
707  if (!structure)
708  {
709  gst_caps_unref(caps);
710  ORIGINATE_ERROR("Failed to get caps structure");
711  }
712 
713  gint width, height;
714  gint pixelAspectRatioNumerator, pixelAspectRatioDenominator;
715 
716  if (!gst_structure_get_int(structure, "width", &width) ||
717  !gst_structure_get_int(structure, "height", &height) ||
718  !gst_structure_get_fraction(structure, "pixel-aspect-ratio",
719  &pixelAspectRatioNumerator, &pixelAspectRatioDenominator))
720  {
721  gst_caps_unref(caps);
722  ORIGINATE_ERROR("Failed to get structure values");
723  }
724 
725  *aspectRatio = (float)width / (float)height;
726  *aspectRatio *= (float)pixelAspectRatioNumerator / (float)pixelAspectRatioDenominator;
727 
728  gst_caps_unref(caps);
729 
730  return true;
731 #else // GST_SUPPORTED
732  ORIGINATE_ERROR("Not supported");
733 #endif // GST_SUPPORTED
734 }
735 
737 {
738 #ifdef GST_SUPPORTED
739  return true;
740 #else // GST_SUPPORTED
741  return false;
742 #endif // GST_SUPPORTED
743 }
744 
745 }; // namespace ArgusSamples