[gst-devel] Fwd: Hiiii

Ronald S. Bultje rbultje at ronald.bitfreak.net
Fri Jul 13 17:01:27 CEST 2007


can anyone help this guy?

Ronald

---------- Forwarded message ----------
From: Madhu TS <madhu at mobisy.com>
Date: Jul 13, 2007 10:59 AM
Subject: Hiiii
To: rbultje at ronald.bitfreak.net

Hi,
         I am not able to use decodebin plugin to playback audio-video in
sync. Below is piece of code which I am trying to run...please help me.
#include <gst/gst.h>
GstElement *pipeline, *video,*audio;
static gboolean my_bus_callback(GstBus *bus,GstMessage *msg,gpointer data)
{
   GMainLoop *loop = data;

   switch (GST_MESSAGE_TYPE(msg))
   {
      case GST_MESSAGE_EOS:
              g_print ("End-of-stream\n");
              g_main_loop_quit (loop);
              break;
      case GST_MESSAGE_ERROR:
         {
              gchar *debug = NULL;
              GError *err = NULL;

              gst_message_parse_error (msg, &err, &debug);
              g_print ("Error: %s\n", err->message);
              g_error_free (err);
              g_main_loop_quit (loop);
              break;
          }
          default:
              break;
    }

    return(TRUE);
}

void cb_newpad_video(GstElement *decodebin,GstPad *pad,gboolean
last,gpointer data)

{
  GstCaps *caps;
  GstStructure *str;
  GstPad *videopad;
  /* only link once */
  videopad = gst_element_get_pad (video, "sink");
  if (GST_PAD_IS_LINKED (videopad))
  {
    g_object_unref (videopad);
    return;
  }
  /* check media type */
  caps = gst_pad_get_caps (pad);
  str = gst_caps_get_structure (caps, 0);
  if (!g_strrstr (gst_structure_get_name (str), "video"))
  {
    gst_caps_unref (caps);
    gst_object_unref (videopad);
    return;
  }
  gst_caps_unref (caps);
  /* link'n'play */
  gst_pad_link (pad,videopad);
}

void cb_newpad_audio(GstElement *decodebin,GstPad *pad,gboolean
last,gpointer data)

{
  GstCaps *caps;
  GstStructure *str;
  GstPad *audiopad;
  /* only link once */
  audiopad = gst_element_get_pad (audio, "sink");
  if (GST_PAD_IS_LINKED (audiopad))
  {
    g_object_unref (audiopad);
    return;
  }
  /* check media type */
  caps = gst_pad_get_caps (pad);
  str = gst_caps_get_structure (caps, 0);
  if (!g_strrstr (gst_structure_get_name (str), "audio")) {
    gst_caps_unref (caps);
    gst_object_unref (audiopad);
    return;
  }
  gst_caps_unref (caps);
  /* link'n'play */
  gst_pad_link (pad, audiopad);
}


gint main (gint argc,gchar *argv[])
{
  GMainLoop *loop;
  GstElement *src, *dec, *csp,
*sink1,*sink2,*aconv,*vqueue,*avqueue,*aresample;
  GstPad *videopad,*audiopad;
  /* init GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);
  /* make sure we have input */
  if (argc != 2)
  {
    g_print ("Usage: %s <filename>\n", argv[0]);
    return -1;
  }
  /* setup */
  pipeline = gst_pipeline_new ("pipeline");
  gst_bus_add_watch (gst_pipeline_get_bus (GST_PIPELINE
(pipeline)),my_bus_callback, loop);
  src = gst_element_factory_make ("filesrc", "source");
  g_object_set (G_OBJECT (src), "location", argv[1], NULL);
  dec = gst_element_factory_make ("decodebin", "decoder");
  g_signal_connect (dec, "new-decoded-pad", G_CALLBACK (cb_newpad_video),
NULL);
  g_signal_connect (dec, "new-decoded-pad", G_CALLBACK (cb_newpad_audio),
NULL);
  gst_bin_add_many (GST_BIN (pipeline), src, dec, NULL);
  gst_element_link (src, dec);
  /* create video output */
  video = gst_bin_new ("videobin");
  vqueue=gst_element_factory_make ("vqueue", "vqueue");
  csp = gst_element_factory_make ("ffmpegcolorspace", "csp");
  videopad = gst_element_get_pad (vqueue, "sink");
  sink1 = gst_element_factory_make ("xvimagesink", "sink1");
  gst_bin_add_many (GST_BIN (video),vqueue,csp, sink1, NULL);
  gst_element_link_many(vqueue,csp,sink1);
  gst_element_add_pad (video,gst_ghost_pad_new ("vsink",videopad));
  gst_object_unref (videopad);
  gst_bin_add (GST_BIN (pipeline), video);
  /*Create Audio Output*/
  audio = gst_bin_new ("audiobin");
  aqueue=gst_element_factory_make ("queue", "aqueue");
  aconv = gst_element_factory_make ("audioconvert","aconv");
  audiopad = gst_element_get_pad (aqueue, "sink");
  aresample = gst_element_factory_make ("audioresample","aresample");
  sink2 = gst_element_factory_make ("alsasink", "sink2");
  gst_bin_add_many (GST_BIN (audio),aqueue,aconv,aresample,sink2, NULL);
  gst_element_link_many(aqueue,aconv,aresample,sink2);
  gst_element_add_pad (audio,gst_ghost_pad_new ("asink",audiopad));
  gst_object_unref (audiopad);
  gst_bin_add (GST_BIN (pipeline), audio);
  /* run */
  gst_element_set_state (pipeline,GST_STATE_PLAYING);
  g_print ("Running ...\n");
  g_main_loop_run (loop);
  /* cleanup */
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (pipeline));
  return 0;
}


Looking forward for you help.

Cheers,
Madhu.T.S
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.freedesktop.org/archives/gstreamer-devel/attachments/20070713/e0a4ac41/attachment.htm>


More information about the gstreamer-devel mailing list