[gst-devel] wim and jyoti 's replies for problem of sending out video through rtp protocl

xuxin04072129 xuxin04072129 at 126.com
Thu Jun 4 06:30:11 CEST 2009


hi wim and Jyoti  :
    thank you for your replies .I followed your suggestions and rewrite the code.But it still can't  work fine.  I get the following error:
    Running...
    catch video in decodebin
    Dynamic pad created, linking decoderbin/queue
    catch audio in decodebin
    Dynamic pad created, linking decoderbin/queue
    Error: internal data flow error
    Returned, stopping playback
    Deleting pipeline
please give me some more suggestion,i will thank you very much. 

    By the way,i find a source code on net, he didn't use callback.Instead ,he write the code as follow 

    send_rtp_sink0 = gst_element_get_request_pad(rtpbin, "send_rtp_sink_0");
    send_rtp_src0 = gst_element_get_static_pad(rtpbin, "send_rtp_src_0");

and the reason is :in the Reference it's described that the "send_rtp_src_%d" pad is
automatically created if a pad_request to "send_rtp_sink_%d" is called.

    I have tried this method ,but failed.

#include <gst/gst.h>
#include <glib.h>
#include <unistd.h>
#include <stdlib.h>

// global declaration
GstElement *pipeline,*udpsink0,*udpsink1;
GstElement *audio_bin,*video_bin;

//
gboolean res;//element link 
GstPadLinkReturn lres;//pad link

//bus
static gboolean
bus_call (GstBus     *bus,
          GstMessage *msg,
          gpointer    data)
{
  GMainLoop *loop = (GMainLoop *) data;
  switch (GST_MESSAGE_TYPE (msg)) {
    case GST_MESSAGE_EOS:
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      break;
    case GST_MESSAGE_ERROR: {
      gchar *debug;
      GError *error;
      gst_message_parse_error (msg, &error, &debug);
      g_free (debug);
      g_printerr ("Error: %s\n", error->message);
      g_error_free (error);
      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }
  return TRUE;
}

static void on_pad_added_rtpbin(GstElement *element, GstPad *pad, gpointer data)
{
    GstCaps *caps=NULL;
    GstStructure *str=NULL;
    GstPad *t_pad=NULL;
    
    g_assert(pad!=NULL);
    caps=gst_pad_get_caps(pad);
    str=gst_caps_get_structure(caps,0);
    
    //media type audio or video
         if (g_strrstr (gst_structure_get_name (str), "audio")) 
         {             
             g_print("rtp catch audio\n");
            t_pad = gst_element_get_static_pad (udpsink0, "sink");
            g_assert(t_pad!=NULL);            
             
         }
         else if(g_strrstr (gst_structure_get_name (str), "video"))
      {          
          g_print("rtp catch video\n");
            t_pad = gst_element_get_static_pad (udpsink1, "sink");
            g_assert(t_pad!=NULL);   
             
        }
        else
        {
            gst_caps_unref (caps);
            return;
        }
        
        if (GST_PAD_IS_LINKED (t_pad)) 
      {
          gst_caps_unref (caps);
            g_object_unref (t_pad);
            return;
      }
      else
      {          
          lres=gst_pad_link (pad, t_pad);
          g_assert (lres == GST_PAD_LINK_OK);
          g_print("Dynamic pad created, linking rtpbin/udp\n");
                    
          gst_caps_unref (caps);          
            g_object_unref (t_pad);
            return;
      }     
    
}

static void on_pad_added_decodebin(GstElement *decodebin,GstPad *pad,gboolean last,gpointer data)
{
    GstCaps *caps;
      GstStructure *str;
      GstPad *t_pad;
    
    //media type
    caps = gst_pad_get_caps (pad);
      str = gst_caps_get_structure (caps, 0);
      //check media type audio or video
         if (g_strrstr (gst_structure_get_name (str), "audio")) 
         {             
             g_print("catch audio in decodebin\n");
            t_pad = gst_element_get_static_pad (audio_bin, "sink");
            g_assert(t_pad!=NULL);
            if(t_pad==NULL)
                g_print("cannot get the sink pad of audiobin ");
             
         }
         else if(g_strrstr (gst_structure_get_name (str), "video"))
      {          
          g_print("catch video in decodebin\n");
            t_pad = gst_element_get_static_pad (video_bin, "sink");
            g_assert(t_pad!=NULL);
            if(t_pad==NULL)
                g_print("cannot get the sink pad of videobin ");
             
        }
        else
        {
            gst_caps_unref (caps);
            return;
        }
          
      
      if (GST_PAD_IS_LINKED (t_pad)) 
      {
          gst_caps_unref (caps);
            g_object_unref (t_pad);
            return;
      }
      
      else
      {          
          gst_pad_link (pad, t_pad);
          g_print("Dynamic pad created, linking decoderbin/queue\n");
          
          gst_caps_unref (caps);          
            g_object_unref (t_pad);
            return;
      }     
    
}

int main(int argc, char **argv)
{
    GMainLoop *loop;
    GstBus *bus;
    
    //pad of rtpbin
    GstPad *sink_pad;    
    GstPad *bin_pad;    
    
    GstPad *videopad_src,*videopad_sink;
    GstPad *audiopad_src,*audiopad_sink;
    //declare elements 
    GstElement  *filesrc,*decodebin,*gstrtpbin;
    //element for video
    GstElement *queue1,*x264enc,*rtph264pay;
    //element for audio
    GstElement *queue2,*audioresample,*audioconvert,*alawenc,*rtppcmapay;
    
    gst_init(&argc, &argv);
    loop = g_main_loop_new(NULL, FALSE);
    
    //generate elements
    pipeline = gst_pipeline_new("server");
    filesrc=gst_element_factory_make("filesrc", "filesrc");
    decodebin=gst_element_factory_make("decodebin", "decodebin");
    gstrtpbin = gst_element_factory_make("gstrtpbin", "gstrtpbin");
    //for video
    queue1=gst_element_factory_make("queue", "queue1");
    x264enc=gst_element_factory_make("x264enc", "x264enc");
    rtph264pay=gst_element_factory_make("rtph264pay", "rtph264pay");
    udpsink0=gst_element_factory_make("udpsink", "udpsink0");
    //for audio
    queue2 = gst_element_factory_make("queue", "queue2");
    audioresample = gst_element_factory_make("audioresample", "audioresample");
    audioconvert = gst_element_factory_make("audioconvert", "audioconvert");
    alawenc = gst_element_factory_make("alawenc", "alawenc");    
    rtppcmapay = gst_element_factory_make("rtppcmapay", "rtppcmapay");
    udpsink1=gst_element_factory_make("udpsink", "udpsink1");
    
    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    gst_bus_add_watch(bus, bus_call, loop);
    gst_object_unref(bus);
    
    //set properties of elements
    g_object_set(G_OBJECT(filesrc), "location","/home/xuxin/desktop/g_p/a.avi", NULL);    
    g_object_set(G_OBJECT(udpsink0), "port",5000, NULL);
    g_object_set(G_OBJECT(udpsink1), "port",5002, NULL);
    g_object_set(G_OBJECT(udpsink0), "host","172.21.29.169", NULL);
    g_object_set(G_OBJECT(udpsink1), "host","172.21.29.169", NULL);
    g_object_set(G_OBJECT(udpsink0), "sync",FALSE, NULL);
    g_object_set(G_OBJECT(udpsink1), "sync",FALSE, NULL);
    
    //create video_bin
    video_bin = gst_bin_new ("videobin");
    gst_bin_add_many (GST_BIN (video_bin),queue1,x264enc,rtph264pay,NULL);
    res=gst_element_link_many(queue1,x264enc,rtph264pay,NULL);
    g_assert (res == TRUE);
    //add pad to video_bin
    videopad_sink = gst_element_get_static_pad (queue1, "sink");
    videopad_src= gst_element_get_static_pad (rtph264pay, "src");    
    gst_element_add_pad (video_bin,gst_ghost_pad_new ("sink", videopad_sink));
    gst_element_add_pad (video_bin,gst_ghost_pad_new ("src",videopad_src));
      gst_object_unref (videopad_sink);
      gst_object_unref (videopad_src);
      
      //create audio_bin
      audio_bin = gst_bin_new ("audiobin");
    gst_bin_add_many (GST_BIN (audio_bin),queue2,audioresample,audioconvert,alawenc,rtppcmapay,NULL);
    res=gst_element_link_many(queue2,audioresample,audioconvert,alawenc,rtppcmapay,NULL);
    g_assert (res == TRUE);
    //add pad to audio_bin
    audiopad_sink = gst_element_get_static_pad (queue2, "sink");
    audiopad_src= gst_element_get_static_pad (rtppcmapay, "src");    
    res=gst_element_add_pad (audio_bin,gst_ghost_pad_new ("sink", audiopad_sink));
    g_assert (res == TRUE);
    res=gst_element_add_pad (audio_bin,gst_ghost_pad_new ("src", audiopad_src));
    g_assert (res == TRUE);
      gst_object_unref (audiopad_sink);
      gst_object_unref (audiopad_src);
    
    //add elements into pipeline
    gst_bin_add_many(GST_BIN(pipeline),
            filesrc,decodebin,audio_bin,video_bin,gstrtpbin,udpsink0,udpsink1,NULL);
    
    //static link filesrc and decoderbin 
    res=gst_element_link(filesrc,decodebin);    
    g_assert (res == TRUE);    
    
    //get request pad from gstrtpbin and connect with video_bin
    bin_pad=gst_element_get_pad(video_bin, "src");
    g_assert(bin_pad!=NULL);    
    sink_pad = gst_element_get_request_pad(gstrtpbin, "recv_rtp_sink_0");
    g_assert(sink_pad!=NULL);    
    lres=gst_pad_link(bin_pad, sink_pad);
    g_assert (lres == GST_PAD_LINK_OK);
    
    
    //get request pad from gstrtpbin and connect with audio_bin
    bin_pad=gst_element_get_pad(audio_bin, "src");
    g_assert(bin_pad!=NULL);
    sink_pad = gst_element_get_request_pad(gstrtpbin, "recv_rtp_sink_1");
    g_assert(sink_pad!=NULL);
    lres=gst_pad_link(bin_pad, sink_pad);
    g_assert (lres == GST_PAD_LINK_OK);

    //signal link
    g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(on_pad_added_decodebin),NULL);    
    
    g_signal_connect(gstrtpbin, "pad-added", G_CALLBACK(on_pad_added_rtpbin),NULL);    
    
    gst_element_set_state(pipeline, GST_STATE_PLAYING);
    
    g_print("Running...\n");
    g_main_loop_run(loop);
    
    /* Out of the main loop, clean up nicely */
    g_print("Returned, stopping playback\n");
    gst_element_set_state(pipeline, GST_STATE_NULL);
    
    g_print("Deleting pipeline\n");
    gst_object_unref(GST_OBJECT(pipeline));
    
    return 0;
}
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.freedesktop.org/archives/gstreamer-devel/attachments/20090604/156b8a03/attachment.htm>


More information about the gstreamer-devel mailing list