problem with compiling the maktroska application using gstreamer

gsksiva21 boopathisivakumar at gmail.com
Sun Sep 14 15:35:25 PDT 2014


hai , 
i'm a beginner to gstreamer. i wrote my first code to play matroska file ..
but i'm little bit confused to use the dynamic pipeline and now the video
not playing at all ..
help me out  fix the errors.

note :
 i can able to play the mkv file using gst-launch-0.10.
i put some debug message inside the on-pad-added function call but no
message printed!! 

kindly let me know where the problem is & how to resolve it...
i tired a lot but i'm not able to overcome it..!
 
and this is my application pipeline flow:

 gst-launch-0.10  filesrc location=Videos/0001.mkv ! matroskademux name=d
d.video_00 ! queue !  h264parse ! ffdec_h264 ! ffmpegcolorspace !
xvimagesink  d.audio_00 ! queue ! ac3parse !  ffdec_ac3 ! audioconvert !
audioresample ! alsasink 


 my application code as below :

#include <stdio.h>
#include <gst/gst.h>

typedef struct __CustomData {
GstElement *source;
GstElement *demuxer;
GstElement *audio_queue;
GstElement *audio_parse;
GstElement *audio_decoder;
GstElement *audio_convert;
GstElement *audio_resamp;
GstElement *audio_sink;
GstElement *video_queue;
GstElement *video_parse;
GstElement *video_decoder;
GstElement *video_convert;
GstElement *video_sink;
GstElement *pipeline;
}CustomData;
  
static gboolean bus_call (GstBus *bus, GstMessage *msg,gpointer data);
static void on_pad_added (GstElement *src, GstPad *pad, CustomData *data);
gint main (gint argc , gchar *argv[])
{
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
GMainLoop *loop;
guint bus_watch_id;
gint64 time_nanoseconds =1;
GstPadTemplate *src_pad_template;
GstPad *audio_pad, *video_pad;
GstPad *queue_audio_pad, *queue_video_pad;
 
gst_init(&argc ,&argv);

if (argc < 2 || !gst_uri_is_valid (argv[1]))
{
g_error ("Usage: %s file:///path/to/file", argv[0]);
return -1;
}
loop = g_main_loop_new (NULL, FALSE);
/*pipeline*/

data.pipeline=gst_pipeline_new("mak_pipeline");


data.source=gst_element_factory_make ("filesrc","mak_source");
data.demuxer=gst_element_factory_make("matroskademux","mat_demux");

/*audio part */
data.audio_queue = gst_element_factory_make("queue","audio_queue");
data.audio_parse =gst_element_factory_make ("ac3parse","audio_parse");
data.audio_decoder=gst_element_factory_make ("ffdec_ac3","audio_decoder");
data.audio_convert=gst_element_factory_make ("audioconvert","audio_conv");
data.audio_resamp=gst_element_factory_make ("audioresample","audio_resamp");
data.audio_sink=gst_element_factory_make ("alsasink","audio_sink");



if (!data.pipeline  || ! data.source || ! data.demuxer || ! data.audio_queue 
		|| ! data.audio_parse || ! data.audio_decoder || ! data.audio_convert
		|| ! data.audio_resamp || ! data.audio_sink  )
{
g_printerr("audio element creation failed \n");
gst_object_unref(data.pipeline);
return -1 ;
}

/*video part */
data.video_queue=gst_element_factory_make ("queue","video_queue");
data.video_parse =gst_element_factory_make ("h264parse","video_parse");
data.video_decoder=gst_element_factory_make("ffdec_h264","video_decoder");
data.video_convert= gst_element_factory_make
("ffmpegcolorspace","video_convert");
data.video_sink=gst_element_factory_make ("autovideosink","video_sink" );
if (!data.video_queue  || ! data.video_parse || ! data.video_decoder
		|| ! data.video_convert || ! data.video_sink)
{
g_printerr("video element creation failed \n");
gst_object_unref(data.pipeline);
return -1 ;
}
/*input file path set*/
g_object_set (G_OBJECT (data.source), "location", argv[1], NULL);
	//create abus for log message tracking
	bus=gst_pipeline_get_bus(GST_PIPELINE(data.pipeline));
	bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
	gst_object_unref (bus);

/*bin creation*/
gst_bin_add_many
(GST_BIN(data.pipeline),data.source,data.demuxer,data.audio_queue,
data.audio_parse,data.audio_decoder,data.audio_convert,data.audio_resamp,data.audio_sink,
data.video_queue,data.video_parse,data.video_decoder,data.video_convert,data.video_sink,NULL);

/*link creation*/
if (!gst_element_link(data.source,data.demuxer))
{
g_printerr("source to demuxer link failed \n");
gst_object_unref(data.pipeline);
return -1;
}

if (!gst_element_link_many(data.audio_queue,data.audio_parse,
data.audio_decoder,data.audio_convert,data.audio_resamp,data.audio_sink,NULL))
{
g_printerr("audio queue part failed to link \n ");
gst_object_unref(data.pipeline);
return -1;
}

if (!gst_element_link_many(data.video_queue,data.video_parse,
data.video_decoder,data.video_convert,data.video_sink,NULL))
{
g_printerr("video link failed \n ");
gst_object_unref(data.pipeline);
return -1;
}
/*link elements with pipeline was done*/
g_print("before pad added \n ");
/*pad added processing*/

  g_signal_connect (data.demuxer, "pad-added", G_CALLBACK (on_pad_added),
&data);
/*using request pad */
g_print("after pad added \n ");
g_print ("Now playing: %s\n", argv[1]);

ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);

if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (data.pipeline);
return -1;
}

/* Iterate */
g_print ("Running...\n");
//g_timeout_add (200, (GSourceFunc) cb_print_position, data.pipeline);
//seek_to_time (data.pipeline,time_nanoseconds);

g_main_loop_run (loop);
/* Listen to the bus */
bus = gst_element_get_bus (data.pipeline);
/* we add a message handler */
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);

/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (data.pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (data.pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);

return 0;
}


static gboolean bus_call (GstBus *bus,GstMessage *msg,gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void on_pad_added  (GstElement *src, GstPad *new_pad, CustomData
*data) {

gint width, height;
GstPadLinkReturn ret;
GstCaps *new_pad_caps = NULL;
GstStructure *new_pad_struct = NULL;
const gchar *new_pad_type = NULL;
GstPad *sink_pad_audio =NULL;
GstPad *sink_pad_video=NULL;
g_print("inside pad added \n ");
sink_pad_audio = gst_element_get_static_pad (data->audio_queue, "sink");
sink_pad_video = gst_element_get_static_pad (data->video_queue, "sink");
g_print("inside pad added \n ");
g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad),
GST_ELEMENT_NAME (src));

/* If our audio converter is already linked, we have nothing to do here */

if (gst_pad_is_linked (sink_pad_audio)) {
g_print (" We are already linked. Ignoring.\n");
goto exit;
}


/* If our video converter is already linked, we have nothing to do here */
if (gst_pad_is_linked (sink_pad_video)) {
g_print (" We are already linked. Ignoring.\n");
goto exit;
}

/* Check the new pad's type */
new_pad_caps = gst_pad_get_caps (new_pad);
new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
new_pad_type = gst_structure_get_name (new_pad_struct);

if (g_str_has_prefix (new_pad_type, "audio/x-raw")){

/* Attempt the link */
ret = gst_pad_link (new_pad, sink_pad_audio);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {
//read_video_props (new_pad_caps);
g_print (" Link succeeded (type '%s').\n", new_pad_type);

}
} else if (g_str_has_prefix (new_pad_type, "video/x-raw")) {
/* Attempt the link */

ret = gst_pad_link (new_pad, sink_pad_video);
if (GST_PAD_LINK_FAILED (ret)) {
g_print (" Type is '%s' but link failed.\n", new_pad_type);
} else {

g_print (" Link succeeded (type '%s').\n", new_pad_type);

}
} else {
g_print (" It has type '%s' which is not raw audio. Ignoring.\n",
new_pad_type);
goto exit;
}

exit:
/* Unreference the new pad's caps, if we got them */
if (new_pad_caps != NULL)
gst_caps_unref (new_pad_caps);

/* Unreference the sink pad */
gst_object_unref (sink_pad_audio);
gst_object_unref (sink_pad_video);
}


regards
gsksiva




--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/problem-with-compiling-the-maktroska-application-using-gstreamer-tp4668718.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.


More information about the gstreamer-devel mailing list