[gst-devel] problem of sending movie through rtp protocol

xuxin04072129 xuxin04072129 at 126.com
Fri Jun 5 07:47:16 CEST 2009


hi all
     
    I am sorry to disturb you again. I am new to gstreamer and came across many troubles using gstreamer to develop.  I want to send a movie through rtp .I use the gst-launch to test my commends ,and they work fine.But when I implement the commends of Sender in c code , and use ethereal on the site of  receiver to catch the udp packets from sender, I can get nothing. I have no idea of where is the problem. Please give my some suggestions. thank you very much .
     
     the following is my commends and source code
     
 gst-launch --gst-debug=gstrtpbin:2 -v gstrtpbin name=rtpbin \
 
filesrc location=filesrc location=/home/xuxin/desktop/g_p/a.avi ! decodebin name=dec \

dec. ! queue ! x264enc byte-stream=false ! rtph264pay ! rtpbin.send_rtp_sink_0 \

rtpbin.send_rtp_src_0 ! udpsink port=5000 host=172.21.29.169 name=vrtpsink \

dec. ! queue ! audioresample ! audioconvert ! alawenc ! rtppcmapay ! rtpbin.send_rtp_sink_1 \

rtpbin.send_rtp_src_1 ! udpsink port=5002 host=172.21.29.169 ts-offset=0 name=artpsink 
######################################################################################################

#include <gst/gst.h>
#include <glib.h>
#include <unistd.h>
#include <stdlib.h>

// global declaration
GstElement *pipeline,*udpsink0,*udpsink1;
GstElement *audio_bin,*video_bin;

//
gboolean res;//element link 
GstPadLinkReturn lres;//pad link

//bus
static gboolean
bus_call (GstBus     *bus,
          GstMessage *msg,
          gpointer    data)
{
  GMainLoop *loop = (GMainLoop *) data;
  switch (GST_MESSAGE_TYPE (msg)) {
    case GST_MESSAGE_EOS:
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      break;
    case GST_MESSAGE_ERROR: {
      gchar *debug;
      GError *error;
      gst_message_parse_error (msg, &error, &debug);
      g_free (debug);
      g_printerr ("Error: %s\n", error->message);
      g_error_free (error);
      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }
  return TRUE;
}

static void on_pad_added_rtpbin(GstElement *element, GstPad *pad, gpointer data)
{
    GstCaps *caps=NULL;
    GstStructure *str=NULL;
    GstPad *t_pad=NULL;
    
    const gchar* media_type=NULL;
    
    g_print("**************enter into rtpbin callback ****************\n");
    g_assert(pad!=NULL);
    caps=gst_pad_get_caps(pad);
    g_assert(caps!=NULL);
    str=gst_caps_get_structure(caps,0);
    g_assert(str!=NULL);
    
    media_type=gst_structure_get_string(str,"media");
    g_assert(media_type!=NULL);
    g_print("the media type is %s\n",media_type);
    //media type audio or video
         if (g_strrstr (media_type, "audio")) 
         {             
             g_print("rtp catch audio\n");
            t_pad = gst_element_get_static_pad (udpsink0, "sink");
            g_assert(t_pad!=NULL);            
             
         }
         else if(g_strrstr (media_type, "video"))
      {          
          g_print("rtp catch video\n");
            t_pad = gst_element_get_static_pad (udpsink1, "sink");
            g_assert(t_pad!=NULL);   
             
        }
        else
        {
            gst_caps_unref (caps);
            return;
        }
        
        if (GST_PAD_IS_LINKED (t_pad)) 
      {
          gst_caps_unref (caps);
            g_object_unref (t_pad);
            return;
      }
      else
      {          
          lres=gst_pad_link (pad, t_pad);
          g_assert (lres == GST_PAD_LINK_OK);
          g_print("Dynamic pad created, linking rtpbin/udp\n");
                    
          gst_caps_unref (caps);          
            g_object_unref (t_pad);
            return;
      }     
    
}

static void on_pad_added_decodebin(GstElement *decodebin,GstPad *pad,gboolean last,gpointer data)
{
    GstCaps *caps=NULL;
      GstStructure *str=NULL;
      GstPad *t_pad=NULL;
    
    const gchar* media_type=NULL;
    g_print("**************** enter into the decodebin signal callback **********\n");
    
    g_assert(pad!=NULL);
    
    caps = gst_pad_get_caps (pad);
    g_assert(caps!=NULL);
      str = gst_caps_get_structure (caps, 0);
      g_assert(str!=NULL);
      media_type=gst_structure_get_name (str);
      //check media type audio or video
         if (g_strrstr (media_type, "audio")) 
         {             
             g_print("catch audio in decodebin\n");
            t_pad = gst_element_get_static_pad (audio_bin, "sink");
            g_assert(t_pad!=NULL);
            
             
         }
         else if(g_strrstr (media_type, "video"))
      {          
          g_print("catch video in decodebin\n");
            t_pad = gst_element_get_static_pad (video_bin, "sink");
            g_assert(t_pad!=NULL);
            
             
        }
        else
        {
            g_print("have not get suitable type\n");
            gst_caps_unref (caps);
            return;
        }
          
      
      if (GST_PAD_IS_LINKED (t_pad)) 
      {
          g_print("the pad is linked \n");
          gst_caps_unref (caps);
            g_object_unref (t_pad);
            return;
      }
      
      else
      {          
          lres=gst_pad_link (pad, t_pad);
          g_assert (lres == GST_PAD_LINK_OK);
          g_print("Dynamic pad created, linking decoderbin/queue\n");
          
          gst_caps_unref (caps);          
            g_object_unref (t_pad);
            return;
      }     
    
}

int main(int argc, char **argv)
{
    GMainLoop *loop;
    GstBus *bus;
    
    //pad of rtpbin
    GstPad *sink_pad;    
    GstPad *bin_pad;    
    
    GstPad *videopad_src,*videopad_sink;
    GstPad *audiopad_src,*audiopad_sink;
    //declare elements 
    GstElement  *filesrc,*decodebin,*gstrtpbin;
    //element for video
    GstElement *queue1,*x264enc,*rtph264pay;
    //element for audio
    GstElement *queue2,*audioresample,*audioconvert,*alawenc,*rtppcmapay;
    
    gst_init(&argc, &argv);
    loop = g_main_loop_new(NULL, FALSE);
    
    //generate elements
    pipeline = gst_pipeline_new("server");
    filesrc=gst_element_factory_make("filesrc", "filesrc");
    decodebin=gst_element_factory_make("decodebin", "decodebin");
    gstrtpbin = gst_element_factory_make("gstrtpbin", "gstrtpbin");
    //for video
    queue1=gst_element_factory_make("queue", "queue1");
    x264enc=gst_element_factory_make("x264enc", "x264enc");
    rtph264pay=gst_element_factory_make("rtph264pay", "rtph264pay");
    udpsink0=gst_element_factory_make("udpsink", "udpsink0");
    //for audio
    queue2 = gst_element_factory_make("queue", "queue2");
    audioresample = gst_element_factory_make("audioresample", "audioresample");
    audioconvert = gst_element_factory_make("audioconvert", "audioconvert");
    alawenc = gst_element_factory_make("alawenc", "alawenc");    
    rtppcmapay = gst_element_factory_make("rtppcmapay", "rtppcmapay");
    udpsink1=gst_element_factory_make("udpsink", "udpsink1");
    
    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    gst_bus_add_watch(bus, bus_call, loop);
    gst_object_unref(bus);
    
    //set properties of elements
    g_object_set(G_OBJECT(filesrc), "location","/home/xuxin/desktop/g_p/a.avi", NULL);    
    g_object_set(G_OBJECT(udpsink0), "port",5000, NULL);
    g_object_set(G_OBJECT(udpsink1), "port",5002, NULL);
    g_object_set(G_OBJECT(udpsink0), "host","172.21.29.169", NULL);
    g_object_set(G_OBJECT(udpsink1), "host","172.21.29.169", NULL);
    g_object_set(G_OBJECT(udpsink0), "sync",FALSE, NULL);
    g_object_set(G_OBJECT(udpsink1), "sync",FALSE, NULL);
    
    //create video_bin
    video_bin = gst_bin_new ("videobin");
    gst_bin_add_many (GST_BIN (video_bin),queue1,x264enc,rtph264pay,NULL);
    res=gst_element_link_many(queue1,x264enc,rtph264pay,NULL);
    g_assert (res == TRUE);
    //add pad to video_bin
    videopad_sink = gst_element_get_static_pad (queue1, "sink");
    videopad_src= gst_element_get_static_pad (rtph264pay, "src");    
    gst_element_add_pad (video_bin,gst_ghost_pad_new ("sink", videopad_sink));
    gst_element_add_pad (video_bin,gst_ghost_pad_new ("src",videopad_src));
      gst_object_unref (videopad_sink);
      gst_object_unref (videopad_src);
      
      //create audio_bin
      audio_bin = gst_bin_new ("audiobin");
    gst_bin_add_many (GST_BIN (audio_bin),queue2,audioresample,audioconvert,alawenc,rtppcmapay,NULL);
    res=gst_element_link_many(queue2,audioresample,audioconvert,alawenc,rtppcmapay,NULL);
    g_assert (res == TRUE);
    //add pad to audio_bin
    audiopad_sink = gst_element_get_static_pad (queue2, "sink");
    audiopad_src= gst_element_get_static_pad (rtppcmapay, "src");    
    res=gst_element_add_pad (audio_bin,gst_ghost_pad_new ("sink", audiopad_sink));
    g_assert (res == TRUE);
    res=gst_element_add_pad (audio_bin,gst_ghost_pad_new ("src", audiopad_src));
    g_assert (res == TRUE);
      gst_object_unref (audiopad_sink);
      gst_object_unref (audiopad_src);
    
    //add elements into pipeline
    gst_bin_add_many(GST_BIN(pipeline),
            filesrc,decodebin,audio_bin,video_bin,gstrtpbin,udpsink0,udpsink1,NULL);
    
    //static link filesrc and decoderbin 
    res=gst_element_link(filesrc,decodebin);    
    g_assert (res == TRUE);    
    
    //get request pad from gstrtpbin and connect with video_bin
    bin_pad=gst_element_get_pad(video_bin, "src");
    g_assert(bin_pad!=NULL);    
    sink_pad = gst_element_get_request_pad(gstrtpbin, "recv_rtp_sink_0");
    g_assert(sink_pad!=NULL);    
    lres=gst_pad_link(bin_pad, sink_pad);
    g_assert (lres == GST_PAD_LINK_OK);
    
    
    //get request pad from gstrtpbin and connect with audio_bin
    bin_pad=gst_element_get_pad(audio_bin, "src");
    g_assert(bin_pad!=NULL);
    sink_pad = gst_element_get_request_pad(gstrtpbin, "recv_rtp_sink_1");
    g_assert(sink_pad!=NULL);
    lres=gst_pad_link(bin_pad, sink_pad);
    g_assert (lres == GST_PAD_LINK_OK);

    //signal link
    g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(on_pad_added_decodebin),NULL);    
    
    g_signal_connect(gstrtpbin, "pad-added", G_CALLBACK(on_pad_added_rtpbin),NULL);    
    
    gst_element_set_state(pipeline, GST_STATE_PLAYING);    
    g_print("Running...\n");
    g_main_loop_run(loop);
    
    /* Out of the main loop, clean up nicely */
    g_print("Returned, stopping playback\n");
    gst_element_set_state(pipeline, GST_STATE_NULL);
    
    g_print("Deleting pipeline\n");
    gst_object_unref(GST_OBJECT(pipeline));
    
    return 0;
}
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.freedesktop.org/archives/gstreamer-devel/attachments/20090605/85a38c6e/attachment.htm>


More information about the gstreamer-devel mailing list