translating command to code for playing an avi

Rossana Guerra guerra.rossana at gmail.com
Mon Jan 2 22:21:32 PST 2012


Hi, I try to play an avi, I know there's playbin2 option, but I want to
gain more control and understand some issues as well.

I can play an avi from console with this command.
gst-launch filesrc location=/home/rossana/video11.avi ! avidemux
name=demux  demux.audio_00 ! decodebin ! queue ! audioconvert !
audioresample ! autoaudiosink   demux.video_00 ! decodebin ! queue !
ffmpegcolorspace ! videoscale ! autovideosink"

I tempted to translate into C/C++ code, but I got errors "caps incompatible"

Thanks

Rossana


void on_pad_added (GstElement *element, GstPad *pad, GstElement *data)
{
        g_debug ("Signal: pad-added");
        GstCaps *caps;
        GstStructure *str;

        caps = gst_pad_get_caps (pad);
        g_assert (caps != NULL);
        str = gst_caps_get_structure (caps, 0);
        g_assert (str != NULL);

        cout << "enlazando enlazara pads" << endl << endl;
        if (g_strrstr (gst_structure_get_name (str), "video")) {
                g_debug ("Linking video pad to dec_vd");
                // Link it actually
                //GstPad *targetsink = gst_element_get_pad (decvd, "sink");
                GstPad *targetsink = gst_element_get_pad (data, "sink");
                cout << "enlazando cola de video" << endl << endl;
                g_assert (targetsink != NULL);
                gst_pad_link (pad, targetsink);
                gst_object_unref (targetsink);
        }

        if (g_strrstr (gst_structure_get_name (str), "audio")) {
                g_debug ("Linking audio pad to dec_ad");
                // Link it actually
                //GstPad *targetsink = gst_element_get_pad (decad, "sink");
                GstPad *targetsink = gst_element_get_pad (data, "sink");
                cout << "enlazando cola de audio" << endl << endl;
                g_assert (targetsink != NULL);
                gst_pad_link (pad, targetsink);
                gst_object_unref (targetsink);
        }

        gst_caps_unref (caps);
}



int main (int   argc, char *argv[])
{
  GMainLoop *loop;

  GstElement *pipeline;
  GstBus *bus;

  /* Initialisation */
  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, FALSE);


  /* Check input arguments */
  if (argc != 1) {
    g_printerr ("Usage: %s <AVI filename>\n", argv[0]);
    return -1;
  }

  /* Create gstreamer elements */
  pipeline = gst_pipeline_new ("media-player");
  source = gst_element_factory_make ("filesrc", "file-source");
  demuxer = gst_element_factory_make ("avidemux", "avi-demuxer");
  decvd = gst_element_factory_make ("decodebin2", "decvd");
  decad = gst_element_factory_make ("decodebin2", "decad");
  aconvert = gst_element_factory_make ("audioconvert", "aconvert");
  asample = gst_element_factory_make ("audioresample", "asample");
  vdsink = gst_element_factory_make ("autovideosink", "video-sink");
  vdqueue = gst_element_factory_make ("queue", "video-queue");
  adqueue = gst_element_factory_make ("queue", "audio-queue");
  adsink = gst_element_factory_make ("autoaudiosink", "audio-sink");
  color = gst_element_factory_make("ffmpegcolorspace","color");
  vscale = gst_element_factory_make("videoscale","vscale");

  if (!pipeline || !source || !demuxer || !decvd || !decad || !vdsink ||
!vdqueue || !adqueue || !adsink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  /* Set up the pipeline */

  /* we set the input filename to the source element */
   g_object_set (G_OBJECT (source), "location", "/home/videos/video11.avi",
NULL);

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* we add all elements into the pipeline */
  /* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output */
  //GstElement *clk = gst_element_factory_make("clockoverlay","clk");


  gst_bin_add_many (GST_BIN (pipeline),source, demuxer, decvd, decad,
aconvert, vscale,asample,color,adqueue, vdqueue, vdsink, adsink,  NULL);


  /* we link the elements together */
  /* file-source -> demuxer ~> {video-decoder -> video-output} ~>
{audio-decoder -> alsa-output} */
  gst_element_link (source, demuxer);


  GstPad *targetsrc = gst_element_get_pad(demuxer, "video_%02");

  GstPad *targetsrc2 = gst_element_get_pad(demuxer, "audio_%02");


  GstPad *padV = gst_element_get_static_pad(decvd,"sink");
  GstPad *padA = gst_element_get_static_pad(decad,"sink");


  gst_pad_link (targetsrc,padV);
  gst_object_unref (targetsrc);


  g_signal_connect (decvd, "pad-added", G_CALLBACK (on_pad_added), vdqueue);
  cout << "decvd - vdqueue" << endl << endl;
  gst_element_link (vdqueue,color);
  gst_element_link (color,vscale);
  gst_element_link (vscale,vdsink);

  gst_pad_link (targetsrc2,padA);
  gst_object_unref (targetsrc2);
  g_signal_connect (decad, "pad-added", G_CALLBACK (on_pad_added), adqueue);
  cout << "decad - adqueue" << endl << endl;
  gst_element_link (adqueue, aconvert);
  gst_element_link (aconvert, asample);
  gst_element_link (asample, adsink);



  /* Set the pipeline to "playing" state*/
  g_print ("Playing: %s\n", argv[1]);
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline),
GST_DEBUG_GRAPH_SHOW_ALL,"playerdec1");


  g_main_loop_run (loop);


  /* Out of the main loop, clean up nicely */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (pipeline));
  //g_print ("Deleting pipeline\n");


  return 0;
}
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.freedesktop.org/archives/gstreamer-devel/attachments/20120103/7adefa95/attachment.html>


More information about the gstreamer-devel mailing list