nvidia jetson 平台上,用 gstreamer ,如何保存 nv12 的帧数据?

nvidia jetson 平台上,用 gstreamer ,如何保存 nv12 的帧数据?在x86平台上,GeForce1660显卡,下面代码正常得到nv12的帧数据,可在jetson平台上就不行,请大佬教教我!

#include <gst/gst.h>
#include <glib.h>
#include <stdio.h>
#include "gstnvdsmeta.h"

#include"nvbufsurface.h"
#include <cuda.h>
#include <cuda_runtime.h>


gint frame_number = 0;


static void on_new_sample_from_sink (GstElement * sink_elm, void *param )
{
    GstFlowReturn ret;
    GstSample *sample = NULL;

    g_signal_emit_by_name (sink_elm, "pull-sample", &sample, &ret);

    gst_sample_unref (sample);
}


void saveBuffer(void * pBuffer, int iSize, char* cFileName)
{
  g_print ("\n--- saveBuffer()\n\n");

  FILE* _fhandle = fopen(cFileName,"wb");
  if (0 == _fhandle)
    return;

  fwrite(pBuffer, iSize, 1, _fhandle);
  fclose(_fhandle);
}


static int write_frame(GstBuffer *buf)
{
  printf("\nwrite_frame_gstreamer_sdk, frame sn: %d\n", frame_number);

  GstMapInfo map_info;
  if (buf == NULL || !gst_buffer_map (buf, &map_info, GST_MAP_READ))
  {
      g_print ("gst_buffer_map() error!");
      return -1;
  }
  NvBufSurface *surface = NULL;
  surface = (NvBufSurface *) map_info.data;  

  int batch_size= surface->batchSize;
  printf("batch_size of the frame buffer : %d\n",batch_size);
  for(int i=0; i<batch_size; ++i)
  {
	uint32_t data_size =  surface->surfaceList[i].dataSize;
	uint32_t pitch =  surface->surfaceList[i].pitch;
	uint32_t width =  surface->surfaceList[i].width;
	uint32_t height =  surface->surfaceList[i].height;

	void *dataPtr = surface->surfaceList[i].dataPtr;
	
	printf("Size of the frame buffer : %d\n", data_size);
	printf("Pitch of the frame buffer : %d\n", pitch);
	printf("width of the frame buffer : %d\n", width);
	printf("height of the frame buffer : %d\n", height);

	NvBufSurfaceColorFormat color_format= surface->surfaceList[i].colorFormat;

        if (color_format == NVBUF_COLOR_FORMAT_NV12)
           printf("color_format: NVBUF_COLOR_FORMAT_NV12 \n");
        else if (color_format == NVBUF_COLOR_FORMAT_NV12_ER)
           printf("color_format: NVBUF_COLOR_FORMAT_NV12_ER \n");
        else if (color_format == NVBUF_COLOR_FORMAT_NV12_709)
           printf("color_format: NVBUF_COLOR_FORMAT_NV12_709 \n");
        else if (color_format == NVBUF_COLOR_FORMAT_NV12_709_ER)
           printf("color_format: NVBUF_COLOR_FORMAT_NV12_709_ER \n");


	if(frame_number == 10)
	{
	    saveBuffer(dataPtr, data_size, "testYUV_gs.nv12");
	}
  }

  gst_buffer_unmap (buf, &map_info);
  
  return 1;
}


static GstPadProbeReturn
app_sink_pad_buffer_probe (GstPad * pad, GstPadProbeInfo * info,
    gpointer u_data)
{
    GstBuffer *buf = (GstBuffer *) info->data;
    frame_number++;

   // save frame
   write_frame(buf);

    return GST_PAD_PROBE_OK;
}


static gboolean
bus_call (GstBus * bus, GstMessage * msg, gpointer data)
{
  GMainLoop *loop = (GMainLoop *) data;
  switch (GST_MESSAGE_TYPE (msg)) {
    case GST_MESSAGE_EOS:
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      break;
    case GST_MESSAGE_ERROR:{
      gchar *debug;
      GError *error;
      gst_message_parse_error (msg, &error, &debug);
      g_printerr ("ERROR from element %s: %s\n",
          GST_OBJECT_NAME (msg->src), error->message);
      if (debug)
        g_printerr ("Error details: %s\n", debug);
      g_free (debug);
      g_error_free (error);
      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }
  return TRUE;
}


int
main (int argc, char *argv[])
{
  GMainLoop *loop = NULL;
  GstElement *pipeline = NULL, *source = NULL, *h264parser = NULL,
      *decoder = NULL, *sink = NULL, *nvvidconv = NULL, *appsink = NULL;

  GstBus *bus = NULL;
  guint bus_watch_id;
  GstPad *app_sink_pad = NULL;

  /* Check input arguments */
  if (argc != 2) {
    g_printerr ("Usage: %s <H264 filename>\n", argv[0]);
    return -1;
  }

  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  pipeline = gst_pipeline_new ("dstest1-pipeline");
  if (!pipeline) {
    g_printerr ("pipeline could not be created. Exiting.\n");
    return -1;
  }

  source = gst_element_factory_make ("filesrc", "file-source");
  h264parser = gst_element_factory_make ("h264parse", "h264-parser");
  decoder = gst_element_factory_make ("nvv4l2decoder", "nvv4l2-decoder");
  nvvidconv = gst_element_factory_make ("nvvideoconvert", "nvvideo-converter");
  appsink = gst_element_factory_make ("appsink", "appsink-test");

  if (!source || !h264parser || !decoder || !nvvidconv || !appsink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  g_object_set (G_OBJECT (source), "location", argv[1], NULL);

  g_object_set (appsink, "emit-signals", TRUE, "sync", TRUE, NULL);
  g_signal_connect (appsink, "new-sample", G_CALLBACK (on_new_sample_from_sink), NULL);

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  gst_bin_add_many (GST_BIN (pipeline),
      source, h264parser, decoder, nvvidconv, appsink, NULL);

  if (!gst_element_link_many (source, h264parser, decoder, nvvidconv, appsink, NULL)) {
    g_printerr ("Elements could not be linked: 1. Exiting.\n");
    return -1;
  }

  app_sink_pad = gst_element_get_static_pad (appsink, "sink");
  if (!app_sink_pad)
    g_print ("Unable to get app sink pad\n");
  else
    gst_pad_add_probe (app_sink_pad, GST_PAD_PROBE_TYPE_BUFFER,
        app_sink_pad_buffer_probe, NULL, NULL);
  gst_object_unref (app_sink_pad);

  g_print ("Now playing: %s\n", argv[1]);
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  g_print ("Running...\n");
  g_main_loop_run (loop);

  g_print ("Returned, stopping playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);
  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));
  g_source_remove (bus_watch_id);
  g_main_loop_unref (loop);
  return 0;
}

 

你好,我是有问必答小助手,非常抱歉,本次您提出的有问必答问题,技术专家团超时未为您做出解答

本次提问扣除的有问必答次数,将会以问答VIP体验卡(1次有问必答机会、商城购买实体图书享受95折优惠)的形式为您补发到账户。

​​​​因为有问必答VIP体验卡有效期仅有1天,您在需要使用的时候【私信】联系我,我会为您补发。