DeepStream6.0系列之filesink保存文件

Posted ZONG_XP

tags:

篇首语:本文由小常识网(cha138.com)小编为大家整理,主要介绍了DeepStream6.0系列之filesink保存文件相关的知识,希望对你有一定的参考价值。

点击查看系列文章目录

0 背景

在做 deepstream 功能测试时,常常需要将结果保存为 mp4 视频文件,而 sdk 中并未直接提供相关代码,不熟悉功能的同学改起来有点困难,因此,今天整理一下修改 pipeline 的方法,在 deepstream-test1 的 pipeline 基础上调整为视频文件输出,其它代码类似

1 原 pipeline

在 sdk 中,deepstream-test1 的pipeline 如下所示,最终结果是输出到屏幕显示

filesrc→decode解码→nvstreammux→nvinfer(主检测器)→nvosd→renderer渲染器

如果导出 pipeline 结构图(方法),可以看到结构如下

 我们要做的是把最后一部分修改,修改方法是把 osd 之后的插件修改一下

...! nvdsosd ! nvegltransform ! nveglglessink

修改为

 ...! nvdsosd ! nvvideoconvert ! 'video/x-raw(memory:NVMM),format=NV12' ! nvv4l2h264enc ! h264parse ! qtmux ! filesink

2 修改代码

从上边的修改方法可以看出,需要在 osd 之后连接 nvvideconvert、nvv4l2h264enc、h264parse、 qtmux、filesink 几个插件,首先先定义这几个插件

  GstElement *nvvideoconvert = NULL, *nvv4l2h264enc = NULL, *h264parse = NULL, *qtmux = NULL;
  nvvideoconvert = gst_element_factory_make("nvvideoconvert", "nvvideo-converter2");
  nvv4l2h264enc = gst_element_factory_make("nvv4l2h264enc", "nvv4l2-h264enc");
  h264parse = gst_element_factory_make("h264parse", "h264-parse");
  qtmux = gst_element_factory_make("qtmux", "qtmux");

将原来的 sink 修改,并设置输出文件的名字

  // sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
  sink = gst_element_factory_make ("filesink", "filesink");
  g_object_set (G_OBJECT (sink), "location", "./result.mp4", NULL);

将新增的 element 添加到 pipeline 中

  /* Set up the pipeline */
  /* we add all elements into the pipeline */
  // if(prop.integrated) {
  //   gst_bin_add_many (GST_BIN (pipeline),
  //       source, h264parser, decoder, streammux, pgie,
  //       nvvidconv, nvosd, transform, sink, NULL);
  // }
  // else {
  // gst_bin_add_many (GST_BIN (pipeline),
  //     source, h264parser, decoder, streammux, pgie,
  //     nvvidconv, nvosd, sink, NULL);
  // }

  gst_bin_add_many (GST_BIN (pipeline),
      source, h264parser, decoder, streammux, pgie,
      nvvidconv, nvosd, nvvideoconvert, nvv4l2h264enc, h264parse, qtmux , sink, NULL);

link 连接起来

  // if(prop.integrated) {
  //   if (!gst_element_link_many (streammux, pgie,
  //       nvvidconv, nvosd, transform, sink, NULL)) {
  //     g_printerr ("Elements could not be linked: 2. Exiting.\\n");
  //     return -1;
  //   }
  // }
  // else {
  //   if (!gst_element_link_many (streammux, pgie,
  //       nvvidconv, nvosd, sink, NULL)) {
  //     g_printerr ("Elements could not be linked: 2. Exiting.\\n");
  //     return -1;
  //   }
  // }
  gst_element_link_many (streammux, pgie, nvvidconv, nvosd, nvvideoconvert, nvv4l2h264enc, h264parse, qtmux , sink, NULL));

重新编译,运行即可,完整代码如下

int
main (int argc, char *argv[])
{
  GMainLoop *loop = NULL;
  GstElement *pipeline = NULL, *source = NULL, *h264parser = NULL,
      *decoder = NULL, *streammux = NULL, *sink = NULL, *pgie = NULL, *nvvidconv = NULL,
      *nvosd = NULL;

  GstElement *nvvideoconvert = NULL, *nvv4l2h264enc = NULL, *h264parse = NULL, *qtmux = NULL;
  nvvideoconvert = gst_element_factory_make("nvvideoconvert", "nvvideo-converter2");
  nvv4l2h264enc = gst_element_factory_make("nvv4l2h264enc", "nvv4l2-h264enc");
  h264parse = gst_element_factory_make("h264parse", "h264-parse");
  qtmux = gst_element_factory_make("qtmux", "qtmux");



  GstElement *transform = NULL;
  GstBus *bus = NULL;
  guint bus_watch_id;
  GstPad *osd_sink_pad = NULL;

  int current_device = -1;
  cudaGetDevice(&current_device);
  struct cudaDeviceProp prop;
  cudaGetDeviceProperties(&prop, current_device);
  /* Check input arguments */
  if (argc != 2) {
    g_printerr ("Usage: %s <H264 filename>\\n", argv[0]);
    return -1;
  }

  /* Standard GStreamer initialization */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  /* Create gstreamer elements */
  /* Create Pipeline element that will form a connection of other elements */
  pipeline = gst_pipeline_new ("dstest1-pipeline");

  /* Source element for reading from the file */
  source = gst_element_factory_make ("filesrc", "file-source");

  /* Since the data format in the input file is elementary h264 stream,
   * we need a h264parser */
  h264parser = gst_element_factory_make ("h264parse", "h264-parser");

  /* Use nvdec_h264 for hardware accelerated decode on GPU */
  decoder = gst_element_factory_make ("nvv4l2decoder", "nvv4l2-decoder");

  /* Create nvstreammux instance to form batches from one or more sources. */
  streammux = gst_element_factory_make ("nvstreammux", "stream-muxer");

  if (!pipeline || !streammux) {
    g_printerr ("One element could not be created. Exiting.\\n");
    return -1;
  }

  /* Use nvinfer to run inferencing on decoder's output,
   * behaviour of inferencing is set through config file */
  pgie = gst_element_factory_make ("nvinfer", "primary-nvinference-engine");

  /* Use convertor to convert from NV12 to RGBA as required by nvosd */
  nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");

  /* Create OSD to draw on the converted RGBA buffer */
  nvosd = gst_element_factory_make ("nvdsosd", "nv-onscreendisplay");

  /* Finally render the osd output */
  if(prop.integrated) {
    transform = gst_element_factory_make ("nvegltransform", "nvegl-transform");
  }
  // sink = gst_element_factory_make ("nveglglessink", "nvvideo-renderer");
  // sink = gst_element_factory_make ("fakesink", "nvvideo-renderer");
  sink = gst_element_factory_make ("filesink", "filesink");
  g_object_set (G_OBJECT (sink), "location", "./result.mp4", NULL);

  if (!source || !h264parser || !decoder || !pgie
      || !nvvidconv || !nvosd || !sink) {
    g_printerr ("One element could not be created. Exiting.\\n");
    return -1;
  }

  if(!transform && prop.integrated) {
    g_printerr ("One tegra element could not be created. Exiting.\\n");
    return -1;
  }

  /* we set the input filename to the source element */
  g_object_set (G_OBJECT (source), "location", argv[1], NULL);

  g_object_set (G_OBJECT (streammux), "batch-size", 1, NULL);

  g_object_set (G_OBJECT (streammux), "width", MUXER_OUTPUT_WIDTH, "height",
      MUXER_OUTPUT_HEIGHT,
      "batched-push-timeout", MUXER_BATCH_TIMEOUT_USEC, NULL);

  /* Set all the necessary properties of the nvinfer element,
   * the necessary ones are : */
  g_object_set (G_OBJECT (pgie),
      "config-file-path", "dstest1_pgie_config.txt", NULL);

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* Set up the pipeline */
  /* we add all elements into the pipeline */
  // if(prop.integrated) {
  //   gst_bin_add_many (GST_BIN (pipeline),
  //       source, h264parser, decoder, streammux, pgie,
  //       nvvidconv, nvosd, transform, sink, NULL);
  // }
  // else {
  // gst_bin_add_many (GST_BIN (pipeline),
  //     source, h264parser, decoder, streammux, pgie,
  //     nvvidconv, nvosd, sink, NULL);
  // }

  gst_bin_add_many (GST_BIN (pipeline),
      source, h264parser, decoder, streammux, pgie,
      nvvidconv, nvosd, nvvideoconvert, nvv4l2h264enc, h264parse, qtmux , sink, NULL);




  GstPad *sinkpad, *srcpad;
  gchar pad_name_sink[16] = "sink_0";
  gchar pad_name_src[16] = "src";

  sinkpad = gst_element_get_request_pad (streammux, pad_name_sink);
  if (!sinkpad) {
    g_printerr ("Streammux request sink pad failed. Exiting.\\n");
    return -1;
  }

  srcpad = gst_element_get_static_pad (decoder, pad_name_src);
  if (!srcpad) {
    g_printerr ("Decoder request src pad failed. Exiting.\\n");
    return -1;
  }

  if (gst_pad_link (srcpad, sinkpad) != GST_PAD_LINK_OK) {
      g_printerr ("Failed to link decoder to stream muxer. Exiting.\\n");
      return -1;
  }

  gst_object_unref (sinkpad);
  gst_object_unref (srcpad);

  /* we link the elements together */
  /* file-source -> h264-parser -> nvh264-decoder ->
   * nvinfer -> nvvidconv -> nvosd -> video-renderer */

  if (!gst_element_link_many (source, h264parser, decoder, NULL)) {
    g_printerr ("Elements could not be linked: 1. Exiting.\\n");
    return -1;
  }

  // if(prop.integrated) {
  //   if (!gst_element_link_many (streammux, pgie,
  //       nvvidconv, nvosd, transform, sink, NULL)) {
  //     g_printerr ("Elements could not be linked: 2. Exiting.\\n");
  //     return -1;
  //   }
  // }
  // else {
  //   if (!gst_element_link_many (streammux, pgie,
  //       nvvidconv, nvosd, sink, NULL)) {
  //     g_printerr ("Elements could not be linked: 2. Exiting.\\n");
  //     return -1;
  //   }
  // }
  if(!gst_element_link_many (streammux, pgie, nvvidconv, nvosd, nvvideoconvert, nvv4l2h264enc, h264parse, qtmux , sink, NULL))
  {
      g_printerr("Elements could not be linked. Exiting.\\n");
      return -1;
  }


  /* Lets add probe to get informed of the meta data generated, we add probe to
   * the sink pad of the osd element, since by that time, the buffer would have
   * had got all the metadata. */
  osd_sink_pad = gst_element_get_static_pad (nvosd, "sink");
  if (!osd_sink_pad)
    g_print ("Unable to get sink pad\\n");
  else
    gst_pad_add_probe (osd_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
        osd_sink_pad_buffer_probe, NULL, NULL);
  gst_object_unref (osd_sink_pad);

  /* Set the pipeline to "playing" state */
  g_print ("Now playing: %s\\n", argv[1]);
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL, "pipeline-test1");

  /* Wait till pipeline encounters an error or EOS */
  g_print ("Running...\\n");
  g_main_loop_run (loop);

  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);
  g_print ("Deleting pipeline\\n");
  gst_object_unref (GST_OBJECT (pipeline));
  g_source_remove (bus_watch_id);
  g_main_loop_unref (loop);
  return 0;
}

以上是关于DeepStream6.0系列之filesink保存文件的主要内容,如果未能解决你的问题,请参考以下文章

DeepStream6.0系列之版本说明

DeepStream6.0系列之版本说明

DeepStream6.0系列之SDK文件夹解析

DeepStream6.0系列之SDK文件夹解析

模型推理deepstream6.0 部署 yolov3 和 yolov4 教程

经验分享ubuntu 安装 deepstream6.0