playback H264 encoded file too fast

jles jlesquer at gmail.com
Wed Jul 10 15:33:51 UTC 2019


Hello,

I'm interfacing a camera with a variable frame rate depending of the light
conditions. The camera is interfaced using  a third party API so I'm using
the appsrc element and push the frames into the pipeline.

The goal is to encode the raw frames and record a .h264 file. The problem is
that when I'm playing the resulting output.h264 file in VLC, all frames seem
to play at once (too quick), also there's not video duration shown in the
VLC player...

I ran: "ffmpeg -i output.h264" with the next output:

Input #0, h264, from 'output.h264':
  Duration: N/A, bitrate: N/A
    Stream #0:0: Video: h264 (High), yuv420p(tv, bt470bg), 1280x1024, 42
fps, 42 tbr, 1200k tbn, 84 tbc

And the code is:

static void
cb_need_data (GstElement *appsrc,
          guint       unused_size,
		  GstElement *vrate)
{
	static gboolean white = FALSE;
	static GstClockTime timestamp = 0;
	GstBuffer *buffer;
	guint buffer_size;
	GstFlowReturn ret;
	GstCaps *caps = 0;
	float fps=0.0;
	float fps_prev=0.0;
	unsigned char rate;
	// image buffer
	XI_IMG image;
	XI_IMG_FORMAT prev_format = XI_MONO8;
	memset(&image,0,sizeof(image));
	image.size = sizeof(XI_IMG);
	
	//Get camera image
        xiGetImage(xiH, 5000, &image);
	//Get new camera framerate
	xiGetParamFloat(xiH, XI_PRM_FRAMERATE, &fps);
	 
	 //Set buffer size
	buffer_size =  image.width * image.height * 2;

	buffer = gst_buffer_new();
	gst_buffer_insert_memory(buffer, -1,
gst_memory_new_wrapped(GST_MEMORY_FLAG_READONLY, (guint8*)image.bp,
buffer_size, 0, buffer_size, NULL, NULL));

	GST_BUFFER_PTS (buffer) = timestamp;
	GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1, GST_SECOND,
(guint8) fps);
	timestamp += GST_BUFFER_DURATION (buffer);
	 
	 //Push buffer into appsrc
	g_signal_emit_by_name (appsrc, "push-buffer", buffer, &ret);
	 
	if (ret != GST_FLOW_OK ) {
		/* something wrong, stop pushing */
		gst_app_src_end_of_stream(GST_APP_SRC(appsrc));
		g_main_loop_quit (loop);

	}



}

int main(int argc, char **argv)
{

  XI_IMG image;
  XI_IMG_FORMAT prev_format = XI_MONO8;
  /* init GStreamer */
  gst_init (&argc, &argv);
  // Retrieving a handle to the camera device
  stat = xiOpenDevice(0, &xiH);
  HandleResult(stat,"xiOpenDevice");
  GstElement *appsrc, *video_rate, *filesink, *omxh264, *enc_queue,
*enc_capsfilter;
  GstCaps *enc_caps;
  float fps=0.0;


  // Setting Exposure Time parameter (10ms)
  stat = xiSetParamInt(xiH, XI_PRM_AEAG, 1); // AEAG
  HandleResult(stat,"xiSetParam (exposure time set)");

	// Start acquisition
  stat = xiStartAcquisition(xiH);
  HandleResult(stat,"xiStartAcquisition");

  xiGetImage(xiH, 5000, &image);
  HandleResult(stat,"xiGetImage");
  xiGetParamFloat(xiH, XI_PRM_FRAMERATE, &fps);

  loop = g_main_loop_new (NULL, FALSE);

  /* setup pipeline */
  pipeline = gst_pipeline_new ("pipeline");
  appsrc = gst_element_factory_make ("appsrc", "source");
  video_rate = gst_element_factory_make ("videorate", "video_rate");
  omxh264 = gst_element_factory_make ("omxh264enc", "omxh264enc");
  enc_queue = gst_element_factory_make ("queue", "Encoder Queue");
  enc_capsfilter = gst_element_factory_make ("capsfilter", "Encoder output
caps");
  filesink = gst_element_factory_make ("filesink", "filesink");

  g_object_set(filesink, "location", "media/test/output.h264", NULL);
  g_object_set (G_OBJECT (appsrc), "is-live", TRUE,NULL);
  g_object_set(G_OBJECT(appsrc), "do-timestamp", TRUE, NULL);
  enc_caps = gst_caps_new_simple ("video/x-h264", "profile", G_TYPE_STRING,
"high", NULL);
  g_object_set (G_OBJECT (enc_capsfilter), "caps", enc_caps, NULL);
  g_object_set (G_OBJECT (omxh264), "target-bitrate", 5000, "b-frames", 0,
"control-rate", 1, "gop-length", 30, NULL);
  
  /* setup */
  g_object_set (G_OBJECT (appsrc), "caps",
  gst_caps_new_simple ("video/x-raw",
					 "format", G_TYPE_STRING, "NV12",
					 "width", G_TYPE_INT, image.width,
					 "height", G_TYPE_INT, image.height,
					 "framerate", GST_TYPE_FRACTION, 0, 1,
					 NULL), NULL);

  gst_bin_add_many (GST_BIN (pipeline), appsrc, video_rate, omxh264 ,
enc_capsfilter, enc_queue, filesink , NULL);
  gst_element_link_many (appsrc, video_rate, omxh264,
enc_queue,enc_capsfilter, filesink, NULL);

  /* setup appsrc */
  g_object_set (G_OBJECT (appsrc), "stream-type", 0, "format",
GST_FORMAT_TIME, NULL);
  g_signal_connect (appsrc, "need-data", G_CALLBACK
(cb_need_data),video_rate);

  /* play */
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_main_loop_run (loop);

  /* clean up */
  finish:
  printf("Stop Pipeline\r\n ");
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
  g_main_loop_unref (loop);
  xiStopAcquisition(xiH);
	// Close device
  if (xiH) xiCloseDevice(xiH);

  return 0;
}

Any ideas/clues would be appreciated.
Thanks.




--
Sent from: http://gstreamer-devel.966125.n4.nabble.com/


More information about the gstreamer-devel mailing list