Unable to set the pipeline to the playing state ...
Nostalgia
akra.hanine at gmail.com
Wed Feb 21 15:17:13 UTC 2018
Hi,
Please help me find a solution to this issue:
- I have YUV data file as input and I should output H264 file, using
hardware encoding supported by nvidia Jetson TX2 (omxh264enc).
- I should use c/c++ to write my program.
- I have already done the following line command and I obtain the h264 as
output and I I have succeeded in playing it using vlc.
gst-launch-1.0 filesrc blocksize=345600 location=Basketball.yuv !
'video/x-raw, width=(int)640, height=(int)360, format=(string)I420,
framerate=(fraction)25/1' ! omxh264enc ! 'video/x-h264,
stream-format=(string)byte-stream' ! filesink
location=results_block_size_345600.h264
-So now I should convert this command line to a code and replace the filesrc
by an appsrc, but I obtain error when executing it:
//gcc -Wall $(pkg-config --cflags gstreamer-1.0) YUV_to_H264.c -o
yuvtoh264 $(pkg-config --libs gstreamer-1.0) -lgstapp-1.0
#include <gst/gst.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <gst/app/gstappsrc.h>
#define WIDTH 640
#define HEIGHT 360
#define BUFFER_SIZE (WIDTH*HEIGHT*3/2)
/* Structure to contain all our information */
typedef struct _CustomData {
GstElement *pipeline, *source, *encoder, *filter, *fsink;
GMainLoop *loop;
FILE *yuv_file;
} CustomData;
static void cb_need_data (GstElement *source, guint size, CustomData
*data)
{
static GstClockTime timestamp = 0;
char* data_pointer;
//size_t read_bytes;
GstBuffer *buffer;
GstFlowReturn ret;
data_pointer = (char*)malloc (BUFFER_SIZE);
g_assert(data_pointer != NULL);
fread(data_pointer, 1 , BUFFER_SIZE, data->yuv_file);
buffer = gst_buffer_new_wrapped (data_pointer, BUFFER_SIZE);
free(data_pointer);
GST_BUFFER_PTS(buffer) = timestamp;
GST_BUFFER_DTS(buffer) = timestamp;
//GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int (1,
GST_SECOND, 1);
GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE;
timestamp += GST_BUFFER_DURATION (buffer);
g_signal_emit_by_name (source, "push-buffer", buffer, &ret);
gst_buffer_unref (buffer);
if (ret != GST_FLOW_OK) {
g_main_loop_quit (data->loop);
}
}
gint main (gint argc, gchar *argv[])
{
CustomData data;
GstCaps *input_enc_caps, *output_enc_caps;
GstStateChangeReturn state_ret;
long yuv_file_size;
/*Initialize cumstom data structure */
memset (&data, 0, sizeof (data));
/* Initialize GStreamer and create the mainloop */
gst_init (&argc, &argv);
data.loop = g_main_loop_new (NULL, FALSE);
/* Create the elements */
data.source = gst_element_factory_make ("appsrc", "myapp_source");
data.encoder = gst_element_factory_make ("omxh264enc",
"myapp_encoder");
data.filter = gst_element_factory_make ("capsfilter", "myapp_filter");
data.fsink = gst_element_factory_make ("filesink", "myapp_sink");
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new ("myapp_pipeline");
if (!data.pipeline || !data.source || !data.encoder || !data.filter
|| !data.fsink)
{
g_printerr ("Not all elements could be created.\n");
return -1;
}
/* Get a pointer to the YUV input file*/
data.yuv_file = fopen("/media/ubuntu/6634-3132/Basketball.yuv", "rb");
g_assert(data.yuv_file != NULL);
/* Obtain the YUV file size */
fseek (data.yuv_file , 0 , SEEK_END);
yuv_file_size = ftell (data.yuv_file);
rewind (data.yuv_file);
//printf("\n %ld);
/* Configure source, filter and fsink */
input_enc_caps = gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, WIDTH,
"height", G_TYPE_INT, HEIGHT,
"framerate", GST_TYPE_FRACTION, 25, 1, NULL);
output_enc_caps = gst_caps_new_simple ("video/x-h264",
"stream-format", G_TYPE_STRING, "byte-stream", NULL);
g_object_set (G_OBJECT (data.source), "caps", input_enc_caps,
"stream-type", 2,
"format", GST_FORMAT_BYTES,
"size", (gint64)(yuv_file_size), NULL);
g_signal_connect (data.source, "need-data", G_CALLBACK (cb_need_data),
NULL);
g_object_set (G_OBJECT (data.filter), "caps", output_enc_caps, NULL);
g_object_set (G_OBJECT (data.fsink), "location",
"/media/ubuntu/6634-3132/Basketball.h264", NULL);
gst_caps_unref (input_enc_caps);
gst_caps_unref (output_enc_caps);
//gint64 out_size = gst_app_src_get_size(data.source);
//g_print("%" G_GINT64_FORMAT, out_size);
/* Link all elements that can be automatically linked*/
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.encoder,
data.filter, data.fsink, NULL);
if ((gst_element_link_many (data.source, data.encoder, data.filter,
data.fsink, NULL)) != TRUE )
{
g_printerr ("Elements could not be linked.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Start playing the pipeline */
state_ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
//g_assert(state_ret == GST_STATE_CHANGE_ASYNC);
if (state_ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}
/* Set the MainLoop to run */
g_main_loop_run (data.loop);
/* clean up */
fclose (data.yuv_file);
gst_element_set_state (data.pipeline, GST_STATE_NULL);
gst_object_unref (GST_OBJECT (data.pipeline));
g_main_loop_unref (data.loop);
return 0;
}
ERRORS :
ubuntu at tegra-ubuntu:~/Desktop$ ./yuvtoh264
Inside NvxLiteH264DecoderLowLatencyInitNvxLiteH264DecoderLowLatencyInit set
DPB and MjstreamingInside
NvxLiteH265DecoderLowLatencyInitNvxLiteH265DecoderLowLatencyInit set DPB and
MjstreamingUnable to set the pipeline to the playing state.
Thanks to help to resolve the isuue.
Regards,
--
Sent from: http://gstreamer-devel.966125.n4.nabble.com/
More information about the gstreamer-devel
mailing list