[gst-devel] play audio and video simultaneously
alavariega
alavariega at dextratech.com
Fri Apr 24 16:22:53 CEST 2009
Hi how are you?
Thanks a lot for your help, the problem was solved with your comments below.
Have a nice day and again thanks for your help and time for reply to my
mails
Best Regards,
Alberto
sledge hammer wrote:
>
>
> I think your problem is this:
>
> The queues should be between the demuxer and the decoder. I see that you
> put them between the decoder and the audioconvert(for the audio stream).
> The pipeline linkage should look like
> src->demuxer->queuea->decodera->convert->resample->sinka
> src->demuxer->queuev->decoderv->sinkv
>
>> Date: Thu, 23 Apr 2009 08:31:41 -0700
>> From: alavariega at dextratech.com
>> To: gstreamer-devel at lists.sourceforge.net
>> Subject: Re: [gst-devel] play audio and video simultaneously
>>
>>
>> The "mad" element doesn't have dynamic pads, so it doesn't emit a
>> "pad-added"
>> signal. You can connect it directly to "audioconvert".
>>
>> Also I see that you declare "queueV" as a "GstElement**" and not as a
>> "GstElement*".
>>
>>
>> Hi Sledge, thanks for your reply!
>>
>> I have already made the corresponding changes and still not activated the
>> signal that link the video decoder with the respective queue. when i
>> execute
>> the application this is the result:
>>
>> ** (testvideop:6043): DEBUG: Signal: pad-added
>> on_pad_added: audio/mpeg
>> ** (testvideop:6043): DEBUG: Linking audio pad to decodera
>> The audio pad src was linked
>> ** (testvideop:6043): DEBUG: Signal: pad-added
>> on_pad_added: video/mpeg
>> ** (testvideop:6043): DEBUG: Linking video pad to decoderv
>> The video pad src was linked
>>
>> There are no more errors, but they are not playing the video and audio
>> streams simultaneously
>>
>> Thanks in advance for the help,
>>
>> Best Regards,
>>
>> Alberto.
>>
>>
>>
>> #include<gst/gst.h>
>>
>> static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
>> data);
>> static void on_pad_added (GstElement *element, GstPad *pad);
>> static void on_decpad_added(GstElement *element, GstPad *pad );
>>
>> GstElement *pipeline, *src, *demux, *decoderv, *sinkv, *decodera,
>> *convert,
>> *resample, *sinka;
>> GstElement *queueA, *queueV;
>>
>> int main(int argc, char *argv[])
>> {
>> GstStateChangeReturn ret;
>> GMainLoop *loop;
>> GstBus *bus;
>> /*initialization*/
>> gst_init(&argc,&argv);
>> loop = g_main_loop_new(NULL, FALSE);
>>
>> if(argc != 2)
>> {
>> g_print("Usage: %s <mpg/mpeg video file>", argv[0]);
>> return -1;
>> }
>>
>> pipeline = gst_pipeline_new("VIDEO PLAYER");
>> bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
>> gst_bus_add_watch(bus, bus_call, loop);
>>
>> src = gst_element_factory_make("filesrc", "filesource");
>> demux = gst_element_factory_make("dvddemux", "mpg-demux");
>>
>> /*Gstreamer video elements*/
>> decoderv = gst_element_factory_make("mpeg2dec", "mpeg-decoder");
>> sinkv = gst_element_factory_make("xvimagesink", "video-out");
>> if(!decoderv || !sinkv)
>> {
>> g_print("\nthe video could not playback\n");
>> return -1;
>> }
>>
>> /*Gstreamer audio elements*/
>>
>> decodera = gst_element_factory_make("mad", "decoder-audio");
>> convert = gst_element_factory_make("audioconvert", "a-convert");
>> resample = gst_element_factory_make("audioresample", "a-resample");
>> sinka = gst_element_factory_make("osssink", "play audio");
>> if(!decodera || !convert || !resample || !sinka)
>> {
>> g_print("\nthe audio could not playback\n");
>> return -1;
>> }
>>
>> queueA = gst_element_factory_make("queue", "queue-audio");
>> queueV = gst_element_factory_make("queue", "queue-video");
>>
>> g_object_set (G_OBJECT (src), "location", argv[1], NULL);
>> gst_bin_add_many(GST_BIN(pipeline), src, demux, decoderv,
>> queueV,sinkv,decodera,convert, resample, sinka, NULL);
>>
>> gst_element_link (src, demux);
>> gst_element_link (queueV, sinkv);
>> // gst_element_link (queueA, convert);
>> gst_element_link (decodera, convert);
>> gst_element_link (convert, resample);
>> gst_element_link (resample, sinka);
>>
>> g_signal_connect (demux, "pad-added",G_CALLBACK (on_pad_added), NULL);
>> //g_signal_connect (decodera,
>> "pad-added",G_CALLBACK(on_decpad_added),decodera);
>> g_signal_connect (decoderv,
>> "pad-added",G_CALLBACK(on_decpad_added),decoderv);
>>
>> /* run */
>> ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
>>
>> if (ret == GST_STATE_CHANGE_FAILURE)
>> {
>> GstMessage *msg;
>>
>> g_print ("Failed to start up pipeline!\n");
>>
>> /* check if there is an error message with details on the bus */
>> msg = gst_bus_poll (bus, GST_MESSAGE_ERROR, 0);
>> if (msg) {
>> GError *err = NULL;
>> g_print("\nDISPLAY ERROR:\n");
>> gst_message_parse_error (msg, &err, NULL);
>> g_print ("ERROR: %s\n", err->message);
>> g_error_free (err);
>> gst_message_unref (msg);
>> }
>> return -1;
>> }
>>
>> g_main_loop_run (loop);
>>
>> /* clean up */
>> gst_element_set_state (pipeline, GST_STATE_NULL);
>> gst_object_unref (pipeline);
>>
>> return 0;
>>
>> }
>>
>>
>> static gboolean bus_call (GstBus *bus, GstMessage *msg, gpointer
>> data)
>> {
>> GMainLoop *loop = data;
>>
>> switch (GST_MESSAGE_TYPE (msg)) {
>> case GST_MESSAGE_EOS:
>> g_print ("End-of-stream\n");
>> g_main_loop_quit (loop);
>> break;
>> case GST_MESSAGE_ERROR: {
>> gchar *debug = NULL;
>> GError *err = NULL;
>>
>> gst_message_parse_error (msg, &err, &debug);
>>
>> g_print ("Error: %s\n", err->message);
>> g_error_free (err);
>>
>> if (debug) {
>> g_print ("Debug details: %s\n", debug);
>> g_free (debug);
>> }
>>
>> g_main_loop_quit (loop);
>> break;
>> }
>> default:
>> break;
>> }
>>
>> return TRUE;
>> }
>>
>>
>> static void on_decpad_added(GstElement *element, GstPad *pad )
>> {
>> g_debug ("Signal: decoder pad-added");
>> GstCaps *caps;
>> GstStructure *str;
>> GstPad *targetsink;
>> caps = gst_pad_get_caps (pad);
>> g_assert (caps != NULL);
>> str = gst_caps_get_structure (caps, 0);
>> g_assert (str != NULL);
>>
>> g_debug ("Linking decoder to QUEUE pad ");
>> // Link it actually
>> //element = (element == decodera ? queueA : queueV);
>> targetsink = gst_element_get_pad ( queueV, "sink");
>> g_assert (targetsink != NULL);
>> gst_pad_link (pad, targetsink);
>> if (gst_pad_is_linked(pad))
>> g_print("The decoder pad src was linked\n");
>> else
>> g_print("The stream:%s pad src was
>> linked\n",gst_pad_get_name(pad));
>> gst_object_unref (targetsink);
>> gst_caps_unref (caps);
>> }
>>
>> static void on_pad_added (GstElement *element, GstPad *pad)
>> {
>> g_debug ("Signal: pad-added");
>> GstCaps *caps;
>> GstStructure *str;
>>
>> caps = gst_pad_get_caps (pad);
>> g_assert (caps != NULL);
>> str = gst_caps_get_structure (caps, 0);
>> g_assert (str != NULL);
>>
>> const gchar *c = gst_structure_get_name(str);
>> g_print("on_pad_added: %s\n",c);
>> if (g_strrstr (c, "video") || g_strrstr (c, "image")) {
>> g_debug ("Linking video pad to decoderv");
>> // Link it actually
>> GstPad *targetsink = gst_element_get_pad (decoderv,
>> "sink");
>> g_assert (targetsink != NULL);
>> //g_print("\nVIDEO--pad source: %s\n",
>> gst_pad_get_name(pad));
>> //g_print("\nVIDEO--pad sink: %s\n",
>> gst_pad_get_name(targetsink));
>> gst_pad_link (pad, targetsink);
>> if (gst_pad_is_linked(pad))
>> g_print("The video pad src was linked\n");
>> gst_object_unref (targetsink);
>> }
>>
>> if (g_strrstr (c, "audio")) {
>> g_debug ("Linking audio pad to decodera");
>> // Link it actually
>> GstPad *targetsink = gst_element_get_pad (decodera,
>> "sink");
>> g_assert (targetsink != NULL);
>> //g_print("\nAUDIO--pad source: %s\n",
>> gst_pad_get_name(pad));
>> //g_print("\nAUDIO--pad sink: %s\n",
>> gst_pad_get_name(targetsink));
>> gst_pad_link (pad, targetsink);
>> if (gst_pad_is_linked(pad))
>> g_print("The audio pad src was linked\n");
>> gst_object_unref (targetsink);
>> }
>>
>> gst_caps_unref (caps);
>> }
>>
>>
>>
>>
>> --
>> View this message in context:
>> http://www.nabble.com/play-audio-and-video-simultaneously-tp23175849p23197413.html
>> Sent from the GStreamer-devel mailing list archive at Nabble.com.
>>
>>
>> ------------------------------------------------------------------------------
>> Stay on top of everything new and different, both inside and
>> around Java (TM) technology - register by April 22, and save
>> $200 on the JavaOne (SM) conference, June 2-5, 2009, San Francisco.
>> 300 plus technical and hands-on sessions. Register today.
>> Use priority code J9JMT32. http://p.sf.net/sfu/p
>> _______________________________________________
>> gstreamer-devel mailing list
>> gstreamer-devel at lists.sourceforge.net
>> https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
>
> _________________________________________________________________
> Το What's New σας ειδοποιεί άμεσα για κάθε ενημέρωση. Μάθετε πώς.
> http://home.live.com/
> ------------------------------------------------------------------------------
> Crystal Reports - New Free Runtime and 30 Day Trial
> Check out the new simplified licensign option that enables unlimited
> royalty-free distribution of the report engine for externally facing
> server and web deployment.
> http://p.sf.net/sfu/businessobjects
> _______________________________________________
> gstreamer-devel mailing list
> gstreamer-devel at lists.sourceforge.net
> https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
>
>
--
View this message in context: http://www.nabble.com/play-audio-and-video-simultaneously-tp23175849p23217475.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
More information about the gstreamer-devel
mailing list