[gst-devel] gstreamer appliaction

Sameer Naik sameer.subscriptions at damagehead.com
Thu Nov 26 17:38:02 CET 2009


Hi,
you should add the audio and video processing parts in seperate "bin"
elements. i.e.
create a bin called say "abin" with the elements "decad", "adqueue",
"adsink" and another bin called say "vbin" with the elements "decvd",
"vdqueue", "vdsink" and add these two bins in the pipeline and link
them correctly.

Normally, in an application you would have to register a callback
handler on "pad-added' signal from the demux plugin and setup the rest
of the pipeline from there on.

The "tests/examples" folder of the various gstreamer packages
(core,base,good,bad,ugly) are an indispensable resource to better
understand gstreamer application development and provide a good
overview on how things are generally done. Adding fire and ice from
there on is entirely up to you.

Regards
~Sameer

On Thu, Nov 26, 2009 at 9:52 PM, Kapil Agrawal <kapil.agl at gmail.com> wrote:
>
> you should be adding respective queue before the audio and video decoder
> (essentially after the demuxer), and in the on_pad_added connect to the
> sinkpad of those queue.
>
> On Thu, Nov 26, 2009 at 9:19 PM, Sedji Gaouaou <sedji.gaouaou at atmel.com>
> wrote:
>>
>> Hi all,
>>
>> I am a newbe with gstreamer, and I am currently writting a application
>> which will play a .avi file.
>> But I have a problem when trying to play both audio and video.
>>
>> Here is my application:
>> In the main, if I comment the audio(gst_bin_add_many and
>> gst_element_link_many) then the video works fine. Same thing appends if
>> I comment the video part then the audio play just fine, but when trying
>> both the video doesn't even start??
>>
>> Am I missing something?
>>
>> Regards,
>> Sedji
>>
>>
>> #include <gst/gst.h>
>> #include <glib.h>
>> #include <string.h>
>>
>>
>> static GstElement *source, *demuxer, *vdqueue, *adqueue, *vdsink,
>> *adsink, *decvd, *decad;
>>
>> void on_pad_added (GstElement *element, GstPad *pad)
>> {
>>        g_debug ("Signal: pad-added");
>>        GstCaps *caps;
>>        GstStructure *str;
>>
>>        caps = gst_pad_get_caps (pad);
>>        g_assert (caps != NULL);
>>        str = gst_caps_get_structure (caps, 0);
>>        g_assert (str != NULL);
>>
>>        if (g_strrstr (gst_structure_get_name (str), "video")) {
>>                g_debug ("Linking video pad to dec_vd");
>>                // Link it actually
>>                GstPad *targetsink = gst_element_get_pad (decvd, "sink");
>>                g_assert (targetsink != NULL);
>>                gst_pad_link (pad, targetsink);
>>                gst_object_unref (targetsink);
>>        }
>>
>>        if (g_strrstr (gst_structure_get_name (str), "audio")) {
>>                g_debug ("Linking audio pad to dec_ad");
>>                // Link it actually
>>                GstPad *targetsink = gst_element_get_pad (decad, "sink");
>>                g_assert (targetsink != NULL);
>>                gst_pad_link (pad, targetsink);
>>                gst_object_unref (targetsink);
>>        }
>>
>>        gst_caps_unref (caps);
>> }
>>
>> static gboolean
>> bus_call (GstBus     *bus,
>>           GstMessage *msg,
>>           gpointer    data)
>> {
>>        GMainLoop *loop = (GMainLoop *) data;
>>
>>        switch (GST_MESSAGE_TYPE (msg)) {
>>                case GST_MESSAGE_EOS:
>>                        g_print ("End of stream\n");
>>                        g_main_loop_quit (loop);
>>                        break;
>>                case GST_MESSAGE_ERROR: {
>>                        gchar  *debug;
>>                        GError *error;
>>
>>                        gst_message_parse_error (msg, &error, &debug);
>>                        g_free (debug);
>>
>>                        g_printerr ("Error: %s\n", error->message);
>>                        g_error_free (error);
>>
>>                        g_main_loop_quit (loop);
>>                        break;
>>                }
>>                default:
>>                        break;
>>        }
>>
>>        return TRUE;
>> }
>>
>> int
>> main (int   argc,
>>       char *argv[])
>> {
>>        GMainLoop *loop;
>>
>>        GstElement *pipeline;
>>        GstBus *bus;
>>
>>        /* Initialisation */
>>        gst_init (&argc, &argv);
>>
>>        loop = g_main_loop_new (NULL, FALSE);
>>
>>
>>        /* Check input arguments */
>>        if (argc != 2) {
>>                g_printerr ("Usage: %s <Video H264 filename>\n", argv[0]);
>>                return -1;
>>        }
>>
>>        /* Create gstreamer elements */
>>        pipeline        = gst_pipeline_new ("media-player");
>>        source          = gst_element_factory_make
>> ("filesrc","file-source");
>>        demuxer         = gst_element_factory_make
>> ("avidemux","avi-demuxer");
>>        decvd           = gst_element_factory_make ("x170",
>> "video-decoder");
>>        decad           = gst_element_factory_make ("mad", "mp3-decoder");
>>        vdsink          = gst_element_factory_make ("ximagesink",
>> "video-sink");
>>        vdqueue         = gst_element_factory_make ("multiqueue",
>> "video-queue");
>>        adqueue         = gst_element_factory_make ("multiqueue",
>> "audio-queue");
>>        adsink          = gst_element_factory_make ("osssink",
>> "audio-sink");
>>
>>        /* if needed to be set to MPEG4(value 4), default is AUTO */
>>        //g_object_set (decvd, "codec", 4, NULL);
>>        /* output format: RGB16 (i.e 2) */
>>        g_object_set (decvd, "output", 2, NULL);
>>        /* Scaling value */
>>        g_object_set (decvd, "scaling", 1.0, NULL);
>>        /* Threshold of the VDEC to sync audio and video */
>>        g_object_set (decvd, "inbuf-thresh", 100000, NULL);
>>
>>        if (!pipeline || !source || !demuxer || !decvd || !vdsink ||
>> !vdqueue
>> || !decad || !adqueue || !adsink) {
>>                g_printerr ("One element could not be created.
>> Exiting.\n");
>>                return -1;
>>        }
>>
>>        /* Set up the pipeline */
>>
>>        /* we set the input filename to the source element */
>>        g_object_set (G_OBJECT (source), "location", argv[1], NULL);
>>
>>        /* we add a message handler */
>>        bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
>>        gst_bus_add_watch (bus, bus_call, loop);
>>        gst_object_unref (bus);
>>
>>        /* we add all elements into the pipeline */
>>        /* file-source | avi-demuxer |  x170-decoder | ximagesink
>>         *                           | mad | audioconvert | osssink*/
>>
>>        /* demux only */
>>        gst_bin_add_many (GST_BIN (pipeline),
>>                     source, demuxer, NULL);
>>
>>        /* video only */
>>        gst_bin_add_many (GST_BIN (pipeline),
>>                     decvd, vdqueue, vdsink, NULL);
>>
>>        /* audio only */
>>        gst_bin_add_many (GST_BIN (pipeline),
>>                     decad, adqueue, adsink, NULL);
>>
>>        /* we link the elements together */
>>        /* file-source -> demuxer ~> decoder -> image sink */
>>        gst_element_link (source, demuxer);
>>
>>        if(!gst_element_link_many(decvd, vdqueue, vdsink, NULL)) {
>>                printf("problem linking video elements!!!!!\n");
>>                return FALSE;
>>        }
>>
>>        if(!gst_element_link_many( decad, adqueue, adsink, NULL)) {
>>                printf("problem linking audio elements!!!!!\n");
>>                return FALSE;
>>        }
>>
>>        gst_element_link_pads (demuxer, "video", vdqueue, "sink");
>>        gst_element_link_pads (demuxer, "audio", adqueue, "sink");
>>
>>        g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added),
>> NULL);
>>
>>        /* note that the demuxer will be linked to the decoder dynamically.
>>        The reason is that Ogg may contain various streams (for example
>>        audio and video). The source pad(s) will be created at run time,
>>        by the demuxer when it detects the amount and nature of streams.
>>        Therefore we connect a callback function which will be executed
>>        when the "pad-added" is emitted.*/
>>
>>        /* Set the pipeline to "playing" state*/
>>        g_print ("Now playing: %s\n", argv[1]);
>>        gst_element_set_state (pipeline, GST_STATE_PLAYING);
>>
>>
>>        /* Iterate */
>>        g_print ("Running...\n");
>>        g_main_loop_run (loop);
>>
>>
>>        /* Out of the main loop, clean up nicely */
>>        g_print ("Returned, stopping playback\n");
>>        gst_element_set_state (pipeline, GST_STATE_NULL);
>>
>>        g_print ("Deleting pipeline\n");
>>        gst_object_unref (GST_OBJECT (pipeline));
>>
>>        return 0;
>> }
>>
>>
>>
>> ------------------------------------------------------------------------------
>> Let Crystal Reports handle the reporting - Free Crystal Reports 2008
>> 30-Day
>> trial. Simplify your report design, integration and deployment - and focus
>> on
>> what you do best, core application coding. Discover what's new with
>> Crystal Reports now.  http://p.sf.net/sfu/bobj-july
>> _______________________________________________
>> gstreamer-devel mailing list
>> gstreamer-devel at lists.sourceforge.net
>> https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
>
>
>
> --
> http://www.linkedin.com/in/kapilagrawal
>
> ------------------------------------------------------------------------------
> Let Crystal Reports handle the reporting - Free Crystal Reports 2008 30-Day
> trial. Simplify your report design, integration and deployment - and focus
> on
> what you do best, core application coding. Discover what's new with
> Crystal Reports now.  http://p.sf.net/sfu/bobj-july
> _______________________________________________
> gstreamer-devel mailing list
> gstreamer-devel at lists.sourceforge.net
> https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
>
>




More information about the gstreamer-devel mailing list