<br>you should be adding respective queue before the audio and video decoder (essentially after the demuxer), and in the on_pad_added connect to the sinkpad of those queue.<br><br><div class="gmail_quote">On Thu, Nov 26, 2009 at 9:19 PM, Sedji Gaouaou <span dir="ltr"><<a href="mailto:sedji.gaouaou@atmel.com">sedji.gaouaou@atmel.com</a>></span> wrote:<br>
<blockquote class="gmail_quote" style="border-left: 1px solid rgb(204, 204, 204); margin: 0pt 0pt 0pt 0.8ex; padding-left: 1ex;">Hi all,<br>
<br>
I am a newbe with gstreamer, and I am currently writting a application<br>
which will play a .avi file.<br>
But I have a problem when trying to play both audio and video.<br>
<br>
Here is my application:<br>
In the main, if I comment the audio(gst_bin_add_many and<br>
gst_element_link_many) then the video works fine. Same thing appends if<br>
I comment the video part then the audio play just fine, but when trying<br>
both the video doesn't even start??<br>
<br>
Am I missing something?<br>
<br>
Regards,<br>
Sedji<br>
<br>
<br>
#include <gst/gst.h><br>
#include <glib.h><br>
#include <string.h><br>
<br>
<br>
static GstElement *source, *demuxer, *vdqueue, *adqueue, *vdsink,<br>
*adsink, *decvd, *decad;<br>
<br>
void on_pad_added (GstElement *element, GstPad *pad)<br>
{<br>
g_debug ("Signal: pad-added");<br>
GstCaps *caps;<br>
GstStructure *str;<br>
<br>
caps = gst_pad_get_caps (pad);<br>
g_assert (caps != NULL);<br>
str = gst_caps_get_structure (caps, 0);<br>
g_assert (str != NULL);<br>
<br>
if (g_strrstr (gst_structure_get_name (str), "video")) {<br>
g_debug ("Linking video pad to dec_vd");<br>
// Link it actually<br>
GstPad *targetsink = gst_element_get_pad (decvd, "sink");<br>
g_assert (targetsink != NULL);<br>
gst_pad_link (pad, targetsink);<br>
gst_object_unref (targetsink);<br>
}<br>
<br>
if (g_strrstr (gst_structure_get_name (str), "audio")) {<br>
g_debug ("Linking audio pad to dec_ad");<br>
// Link it actually<br>
GstPad *targetsink = gst_element_get_pad (decad, "sink");<br>
g_assert (targetsink != NULL);<br>
gst_pad_link (pad, targetsink);<br>
gst_object_unref (targetsink);<br>
}<br>
<br>
gst_caps_unref (caps);<br>
}<br>
<br>
static gboolean<br>
bus_call (GstBus *bus,<br>
GstMessage *msg,<br>
gpointer data)<br>
{<br>
GMainLoop *loop = (GMainLoop *) data;<br>
<br>
switch (GST_MESSAGE_TYPE (msg)) {<br>
case GST_MESSAGE_EOS:<br>
g_print ("End of stream\n");<br>
g_main_loop_quit (loop);<br>
break;<br>
case GST_MESSAGE_ERROR: {<br>
gchar *debug;<br>
GError *error;<br>
<br>
gst_message_parse_error (msg, &error, &debug);<br>
g_free (debug);<br>
<br>
g_printerr ("Error: %s\n", error->message);<br>
g_error_free (error);<br>
<br>
g_main_loop_quit (loop);<br>
break;<br>
}<br>
default:<br>
break;<br>
}<br>
<br>
return TRUE;<br>
}<br>
<br>
int<br>
main (int argc,<br>
char *argv[])<br>
{<br>
GMainLoop *loop;<br>
<br>
GstElement *pipeline;<br>
GstBus *bus;<br>
<br>
/* Initialisation */<br>
gst_init (&argc, &argv);<br>
<br>
loop = g_main_loop_new (NULL, FALSE);<br>
<br>
<br>
/* Check input arguments */<br>
if (argc != 2) {<br>
g_printerr ("Usage: %s <Video H264 filename>\n", argv[0]);<br>
return -1;<br>
}<br>
<br>
/* Create gstreamer elements */<br>
pipeline = gst_pipeline_new ("media-player");<br>
source = gst_element_factory_make ("filesrc","file-source");<br>
demuxer = gst_element_factory_make ("avidemux","avi-demuxer");<br>
decvd = gst_element_factory_make ("x170", "video-decoder");<br>
decad = gst_element_factory_make ("mad", "mp3-decoder");<br>
vdsink = gst_element_factory_make ("ximagesink", "video-sink");<br>
vdqueue = gst_element_factory_make ("multiqueue", "video-queue");<br>
adqueue = gst_element_factory_make ("multiqueue", "audio-queue");<br>
adsink = gst_element_factory_make ("osssink", "audio-sink");<br>
<br>
/* if needed to be set to MPEG4(value 4), default is AUTO */<br>
//g_object_set (decvd, "codec", 4, NULL);<br>
/* output format: RGB16 (i.e 2) */<br>
g_object_set (decvd, "output", 2, NULL);<br>
/* Scaling value */<br>
g_object_set (decvd, "scaling", 1.0, NULL);<br>
/* Threshold of the VDEC to sync audio and video */<br>
g_object_set (decvd, "inbuf-thresh", 100000, NULL);<br>
<br>
if (!pipeline || !source || !demuxer || !decvd || !vdsink || !vdqueue<br>
|| !decad || !adqueue || !adsink) {<br>
g_printerr ("One element could not be created. Exiting.\n");<br>
return -1;<br>
}<br>
<br>
/* Set up the pipeline */<br>
<br>
/* we set the input filename to the source element */<br>
g_object_set (G_OBJECT (source), "location", argv[1], NULL);<br>
<br>
/* we add a message handler */<br>
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));<br>
gst_bus_add_watch (bus, bus_call, loop);<br>
gst_object_unref (bus);<br>
<br>
/* we add all elements into the pipeline */<br>
/* file-source | avi-demuxer | x170-decoder | ximagesink<br>
* | mad | audioconvert | osssink*/<br>
<br>
/* demux only */<br>
gst_bin_add_many (GST_BIN (pipeline),<br>
source, demuxer, NULL);<br>
<br>
/* video only */<br>
gst_bin_add_many (GST_BIN (pipeline),<br>
decvd, vdqueue, vdsink, NULL);<br>
<br>
/* audio only */<br>
gst_bin_add_many (GST_BIN (pipeline),<br>
decad, adqueue, adsink, NULL);<br>
<br>
/* we link the elements together */<br>
/* file-source -> demuxer ~> decoder -> image sink */<br>
gst_element_link (source, demuxer);<br>
<br>
if(!gst_element_link_many(decvd, vdqueue, vdsink, NULL)) {<br>
printf("problem linking video elements!!!!!\n");<br>
return FALSE;<br>
}<br>
<br>
if(!gst_element_link_many( decad, adqueue, adsink, NULL)) {<br>
printf("problem linking audio elements!!!!!\n");<br>
return FALSE;<br>
}<br>
<br>
gst_element_link_pads (demuxer, "video", vdqueue, "sink");<br>
gst_element_link_pads (demuxer, "audio", adqueue, "sink");<br>
<br>
g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added), NULL);<br>
<br>
/* note that the demuxer will be linked to the decoder dynamically.<br>
The reason is that Ogg may contain various streams (for example<br>
audio and video). The source pad(s) will be created at run time,<br>
by the demuxer when it detects the amount and nature of streams.<br>
Therefore we connect a callback function which will be executed<br>
when the "pad-added" is emitted.*/<br>
<br>
/* Set the pipeline to "playing" state*/<br>
g_print ("Now playing: %s\n", argv[1]);<br>
gst_element_set_state (pipeline, GST_STATE_PLAYING);<br>
<br>
<br>
/* Iterate */<br>
g_print ("Running...\n");<br>
g_main_loop_run (loop);<br>
<br>
<br>
/* Out of the main loop, clean up nicely */<br>
g_print ("Returned, stopping playback\n");<br>
gst_element_set_state (pipeline, GST_STATE_NULL);<br>
<br>
g_print ("Deleting pipeline\n");<br>
gst_object_unref (GST_OBJECT (pipeline));<br>
<br>
return 0;<br>
}<br>
<br>
<br>
------------------------------------------------------------------------------<br>
Let Crystal Reports handle the reporting - Free Crystal Reports 2008 30-Day<br>
trial. Simplify your report design, integration and deployment - and focus on<br>
what you do best, core application coding. Discover what's new with<br>
Crystal Reports now. <a href="http://p.sf.net/sfu/bobj-july" target="_blank">http://p.sf.net/sfu/bobj-july</a><br>
_______________________________________________<br>
gstreamer-devel mailing list<br>
<a href="mailto:gstreamer-devel@lists.sourceforge.net">gstreamer-devel@lists.sourceforge.net</a><br>
<a href="https://lists.sourceforge.net/lists/listinfo/gstreamer-devel" target="_blank">https://lists.sourceforge.net/lists/listinfo/gstreamer-devel</a><br>
</blockquote></div><br><br clear="all"><br>-- <br><a href="http://www.linkedin.com/in/kapilagrawal">http://www.linkedin.com/in/kapilagrawal</a><br>