[gst-devel] Newbie help. Cannot output audio/videosimultaneously (with DirectFB)

Danielkun danielkun at iremo.com
Tue Feb 26 14:38:04 CET 2008


Thijs Vermeir and Jason Gerard DeRose,

Thank you very much for your fast replies!
I finally got it to work! ;-)  

Daniel



> Hello,
> 
> You have to add some queue element after the demuxer on both the audio
> and video part.
> Your pipeline must like this:
> gst-launch filesrc location=movie.mpg !  mpegdemux name=demuxer
> demuxer. ! queue ! mpeg2dec ! ffmpegcolorspace ! dfbvideosink
> demuxer. ! queue ! mad ! audioconvert ! alsasink
> 
> Gr,
> Thijs
> 
> On Tue, 2008-02-26 at 03:10 -0700, danielkun at iremo.com wrote:
> > Hello,
> >
> > I'm new to Gstreamer and new to this mail subscription but I'm hoping
> > someone will try to help me ;)
> >
> > I trying to playback MPEG files using DirectFB. The code below does the
> > job but it won't let me output
> > video and audio at the same time. It's either audio only or video only.
> >
> > What have I not understood completely?
> >
> > I put the src and the demuxer in the pipeline and made 2 bins for
> > audio/video and added ghost pads.
> > This seems to work well but not simultaneously.
> >
> > I would appreciate any help.
> >
> > Thanks,
> >
> > Daniel
> >
> >
> >
> > #include <string.h>
> > #include <directfb.h>
> > #include <gst/gst.h>
> >
> > static IDirectFB *dfb = NULL;
> > static IDirectFBSurface *primary = NULL;
> > static GMainLoop *loop;
> >
> > GstElement *pipeline, *bin_audio, *bin_video, *source, *parser,
> > *decoder_audio, *decoder_video,
> >
> 				*convert_audio, *convert_video,
> *sink_audio, *sink_video;
> >
> > GstPad *pad_video, *pad_audio;
> >
> > #define DFBCHECK(x...)                                         \
> >   {                                                            \
> >     DFBResult err = x;                                         \
> >                                                                \
> >     if (err != DFB_OK)                                         \
> >       {                                                        \
> >         fprintf( stderr, "%s <%d>:\n\t", __FILE__, __LINE__ ); \
> >         DirectFBErrorFatal( #x, err );                         \
> >       }                                                        \
> >   }
> >
> > static gboolean
> > get_me_out (gpointer data)
> > {
> >   g_main_loop_quit (loop);
> >   return FALSE;
> > }
> >
> >
> > static void
> > new_pad (GstElement *element,
> > 		 GstPad     *pad,	//src
> > 	 	 gpointer    data)
> > {
> >
> >   gchar *name;
> >   name = gst_pad_get_name (pad);
> >
> >   if ( NULL != strstr(name, "video"))
> >   {
> >   	GstPad *sinkpad;	//sink
> >   	g_print ("Dynamic pad created, linking parser/decoder '%s'\n",
> name);
> >   	sinkpad = gst_element_get_pad (bin_video, "sink");
> >   	gst_pad_link (pad, sinkpad);
> >   	gst_object_unref (sinkpad);
> >   }
> >
> >   if ( NULL != strstr(name, "audio"))
> >   {
> >   	GstPad *sinkpad;	//sink
> >   	g_print ("Dynamic pad created, linking parser/decoder '%s'\n",
> name);
> > 	sinkpad = gst_element_get_pad (bin_audio, "sink");
> >   	gst_pad_link (pad, sinkpad);
> >   	gst_object_unref (sinkpad);
> >   }
> >
> > }
> >
> > int
> > main (int argc, char *argv[])
> > {
> >   DFBSurfaceDescription dsc;
> >   GstBus *bus;
> >
> >
> >   /* Init both GStreamer and DirectFB */
> >   DFBCHECK (DirectFBInit (&argc, &argv));
> >   gst_init (&argc, &argv);
> >
> >   loop = g_main_loop_new (NULL, FALSE);
> >
> >   /* Creates DirectFB main context and set it to fullscreen layout */
> >   DFBCHECK (DirectFBCreate (&dfb));
> >   DFBCHECK (dfb->SetCooperativeLevel (dfb, DFSCL_FULLSCREEN));
> >
> >   /* We want a double buffered primary surface */
> >   dsc.flags = DSDESC_CAPS;
> >   dsc.caps = DSCAPS_PRIMARY | DSCAPS_FLIPPING;
> >
> >   DFBCHECK (dfb->CreateSurface (dfb, &dsc, &primary));
> >
> >
> >   // create elements
> >   pipeline = gst_pipeline_new (NULL);
> >   bin_audio = gst_bin_new (NULL);
> >   bin_video = gst_bin_new (NULL);
> >
> >   source = gst_element_factory_make ("filesrc", NULL);	//
> videotestsrc
> >
> >   parser = gst_element_factory_make ("mpegdemux", NULL);	//
> dvddemux,
> > mpegparse, mpegvideoparse
> >
> >   decoder_audio = gst_element_factory_make ("mad", NULL);
> >   decoder_video = gst_element_factory_make ("mpeg2dec", NULL);
> >
> >   convert_audio = gst_element_factory_make ("audioconvert", NULL);
> >   convert_video = gst_element_factory_make("ffmpegcolorspace", NULL);
> >
> >   sink_audio = gst_element_factory_make ("alsasink", NULL);
> >   sink_video = gst_element_factory_make ("dfbvideosink", NULL);
> >
> >
> >   // That's the interesting part, giving the primary surface to
> > dfbvideosink
> >   g_object_set (sink_video, "surface", primary, NULL);
> >
> >   // set filename property on the file source
> >   g_object_set (G_OBJECT (source), "location", argv[1], NULL);
> >
> >   // add source and parser
> >   gst_bin_add_many (GST_BIN (pipeline), source, parser, NULL);
> >
> >   // put all elements in a bin
> >   gst_bin_add_many (GST_BIN (bin_video), decoder_video, convert_video,
> > sink_video, NULL);
> >   gst_bin_add_many (GST_BIN (bin_audio), decoder_audio, convert_audio,
> > sink_audio, NULL);
> >
> >
> >   // add ghostpad to audio
> >   pad_audio = gst_element_get_pad (decoder_audio, "sink");
> >   gst_element_add_pad (bin_audio, gst_ghost_pad_new ("sink",
> > pad_audio));
> >   gst_object_unref (GST_OBJECT (pad_audio));
> >
> >   // add ghostpad to video
> >   pad_video = gst_element_get_pad (decoder_video, "sink");
> >   gst_element_add_pad (bin_video, gst_ghost_pad_new ("sink",
> > pad_video));
> >   gst_object_unref (GST_OBJECT (pad_video));
> >
> >
> >   // add bins to pipeline
> >   gst_bin_add_many (GST_BIN (pipeline), bin_video, bin_audio, NULL);
> >
> >   // link together - note that we cannot link the parser and decoder yet
> >   gst_element_link (source, parser);
> >   gst_element_link_many (decoder_video, convert_video, sink_video,
> > NULL);
> >   gst_element_link_many (decoder_audio, convert_audio, sink_audio,
> > NULL);
> >
> >   g_signal_connect (parser, "pad-added", G_CALLBACK (new_pad), NULL);
> >
> >
> >   // Now set to playing and iterate.
> >   g_print ("Setting to PLAYING\n");
> >   gst_element_set_state (pipeline, GST_STATE_PLAYING);
> >
> >   g_print ("Running\n");
> >
> >   // Get us out after xx seconds
> >   g_timeout_add (5000, get_me_out, NULL);
> >   g_main_loop_run (loop);
> >
> >
> >   // Release elements and stop playback
> >   gst_element_set_state (pipeline, GST_STATE_NULL);
> >
> >
> >   // Free the main loop
> >   g_main_loop_unref (loop);
> >
> >   // clean up nicely
> >   g_print ("Returned, stopping playback\n");
> >   gst_element_set_state (pipeline, GST_STATE_NULL);
> >
> >   g_print ("Deleting pipeline\n");
> >   gst_object_unref (GST_OBJECT (pipeline));
> >
> >   /* Release DirectFB context and surface */
> >   primary->Release (primary);
> >   dfb->Release (dfb);
> >
> >   return 0;
> > }
> >
> >
> >
> > ------------------------------------------------------------------------
> -
> > This SF.net email is sponsored by: Microsoft
> > Defy all challenges. Microsoft(R) Visual Studio 2008.
> > http://clk.atdmt.com/MRT/go/vse0120000070mrt/direct/01/
> > _______________________________________________
> > gstreamer-devel mailing list
> > gstreamer-devel at lists.sourceforge.net
> > https://lists.sourceforge.net/lists/listinfo/gstreamer-devel






More information about the gstreamer-devel mailing list