hi all<br> <br> I am sorry to disturb you again. I am new to gstreamer and came across many troubles using gstreamer to develop. I want to send a movie through rtp .I use the gst-launch to test my commends ,and they work fine.But when I implement the commends of Sender in c code , and use ethereal on the site of receiver to catch the udp packets from sender, I can get nothing. I have no idea of where is the problem. Please give my some suggestions. thank you very much .<br> <br> the following is my commends and source code<br> <br> gst-launch --gst-debug=gstrtpbin:2 -v gstrtpbin name=rtpbin \<br> <br>filesrc location=filesrc location=/home/xuxin/desktop/g_p/a.avi ! decodebin name=dec \<br><br>dec. ! queue ! x264enc byte-stream=false ! rtph264pay ! rtpbin.send_rtp_sink_0 \<br><br>rtpbin.send_rtp_src_0 ! udpsink port=5000 host=172.21.29.169 name=vrtpsink \<br><br>dec. ! queue ! audioresample ! audioconvert ! alawenc ! rtppcmapay ! rtpbin.send_rtp_sink_1 \<br><br>rtpbin.send_rtp_src_1 ! udpsink port=5002 host=172.21.29.169 ts-offset=0 name=artpsink <br>######################################################################################################<br><br>#include <gst/gst.h><br>#include <glib.h><br>#include <unistd.h><br>#include <stdlib.h><br><br>// global declaration<br>GstElement *pipeline,*udpsink0,*udpsink1;<br>GstElement *audio_bin,*video_bin;<br><br>//<br>gboolean res;//element link <br>GstPadLinkReturn lres;//pad link<br><br>//bus<br>static gboolean<br>bus_call (GstBus *bus,<br> GstMessage *msg,<br> gpointer data)<br>{<br> GMainLoop *loop = (GMainLoop *) data;<br> switch (GST_MESSAGE_TYPE (msg)) {<br> case GST_MESSAGE_EOS:<br> g_print ("End of stream\n");<br> g_main_loop_quit (loop);<br> break;<br> case GST_MESSAGE_ERROR: {<br> gchar *debug;<br> GError *error;<br> gst_message_parse_error (msg, &error, &debug);<br> g_free (debug);<br> g_printerr ("Error: %s\n", error->message);<br> g_error_free (error);<br> g_main_loop_quit (loop);<br> break;<br> }<br> default:<br> break;<br> }<br> return TRUE;<br>}<br><br>static void on_pad_added_rtpbin(GstElement *element, GstPad *pad, gpointer data)<br>{<br> GstCaps *caps=NULL;<br> GstStructure *str=NULL;<br> GstPad *t_pad=NULL;<br> <br> const gchar* media_type=NULL;<br> <br> g_print("**************enter into rtpbin callback ****************\n");<br> g_assert(pad!=NULL);<br> caps=gst_pad_get_caps(pad);<br> g_assert(caps!=NULL);<br> str=gst_caps_get_structure(caps,0);<br> g_assert(str!=NULL);<br> <br> media_type=gst_structure_get_string(str,"media");<br> g_assert(media_type!=NULL);<br> g_print("the media type is %s\n",media_type);<br> //media type audio or video<br> if (g_strrstr (media_type, "audio")) <br> { <br> g_print("rtp catch audio\n");<br> t_pad = gst_element_get_static_pad (udpsink0, "sink");<br> g_assert(t_pad!=NULL); <br> <br> }<br> else if(g_strrstr (media_type, "video"))<br> { <br> g_print("rtp catch video\n");<br> t_pad = gst_element_get_static_pad (udpsink1, "sink");<br> g_assert(t_pad!=NULL); <br> <br> }<br> else<br> {<br> gst_caps_unref (caps);<br> return;<br> }<br> <br> if (GST_PAD_IS_LINKED (t_pad)) <br> {<br> gst_caps_unref (caps);<br> g_object_unref (t_pad);<br> return;<br> }<br> else<br> { <br> lres=gst_pad_link (pad, t_pad);<br> g_assert (lres == GST_PAD_LINK_OK);<br> g_print("Dynamic pad created, linking rtpbin/udp\n");<br> <br> gst_caps_unref (caps); <br> g_object_unref (t_pad);<br> return;<br> } <br> <br>}<br><br>static void on_pad_added_decodebin(GstElement *decodebin,GstPad *pad,gboolean last,gpointer data)<br>{<br> GstCaps *caps=NULL;<br> GstStructure *str=NULL;<br> GstPad *t_pad=NULL;<br> <br> const gchar* media_type=NULL;<br> g_print("**************** enter into the decodebin signal callback **********\n");<br> <br> g_assert(pad!=NULL);<br> <br> caps = gst_pad_get_caps (pad);<br> g_assert(caps!=NULL);<br> str = gst_caps_get_structure (caps, 0);<br> g_assert(str!=NULL);<br> media_type=gst_structure_get_name (str);<br> //check media type audio or video<br> if (g_strrstr (media_type, "audio")) <br> { <br> g_print("catch audio in decodebin\n");<br> t_pad = gst_element_get_static_pad (audio_bin, "sink");<br> g_assert(t_pad!=NULL);<br> <br> <br> }<br> else if(g_strrstr (media_type, "video"))<br> { <br> g_print("catch video in decodebin\n");<br> t_pad = gst_element_get_static_pad (video_bin, "sink");<br> g_assert(t_pad!=NULL);<br> <br> <br> }<br> else<br> {<br> g_print("have not get suitable type\n");<br> gst_caps_unref (caps);<br> return;<br> }<br> <br> <br> if (GST_PAD_IS_LINKED (t_pad)) <br> {<br> g_print("the pad is linked \n");<br> gst_caps_unref (caps);<br> g_object_unref (t_pad);<br> return;<br> }<br> <br> else<br> { <br> lres=gst_pad_link (pad, t_pad);<br> g_assert (lres == GST_PAD_LINK_OK);<br> g_print("Dynamic pad created, linking decoderbin/queue\n");<br> <br> gst_caps_unref (caps); <br> g_object_unref (t_pad);<br> return;<br> } <br> <br>}<br><br>int main(int argc, char **argv)<br>{<br> GMainLoop *loop;<br> GstBus *bus;<br> <br> //pad of rtpbin<br> GstPad *sink_pad; <br> GstPad *bin_pad; <br> <br> GstPad *videopad_src,*videopad_sink;<br> GstPad *audiopad_src,*audiopad_sink;<br> //declare elements <br> GstElement *filesrc,*decodebin,*gstrtpbin;<br> //element for video<br> GstElement *queue1,*x264enc,*rtph264pay;<br> //element for audio<br> GstElement *queue2,*audioresample,*audioconvert,*alawenc,*rtppcmapay;<br> <br> gst_init(&argc, &argv);<br> loop = g_main_loop_new(NULL, FALSE);<br> <br> //generate elements<br> pipeline = gst_pipeline_new("server");<br> filesrc=gst_element_factory_make("filesrc", "filesrc");<br> decodebin=gst_element_factory_make("decodebin", "decodebin");<br> gstrtpbin = gst_element_factory_make("gstrtpbin", "gstrtpbin");<br> //for video<br> queue1=gst_element_factory_make("queue", "queue1");<br> x264enc=gst_element_factory_make("x264enc", "x264enc");<br> rtph264pay=gst_element_factory_make("rtph264pay", "rtph264pay");<br> udpsink0=gst_element_factory_make("udpsink", "udpsink0");<br> //for audio<br> queue2 = gst_element_factory_make("queue", "queue2");<br> audioresample = gst_element_factory_make("audioresample", "audioresample");<br> audioconvert = gst_element_factory_make("audioconvert", "audioconvert");<br> alawenc = gst_element_factory_make("alawenc", "alawenc"); <br> rtppcmapay = gst_element_factory_make("rtppcmapay", "rtppcmapay");<br> udpsink1=gst_element_factory_make("udpsink", "udpsink1");<br> <br> bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));<br> gst_bus_add_watch(bus, bus_call, loop);<br> gst_object_unref(bus);<br> <br> //set properties of elements<br> g_object_set(G_OBJECT(filesrc), "location","/home/xuxin/desktop/g_p/a.avi", NULL); <br> g_object_set(G_OBJECT(udpsink0), "port",5000, NULL);<br> g_object_set(G_OBJECT(udpsink1), "port",5002, NULL);<br> g_object_set(G_OBJECT(udpsink0), "host","172.21.29.169", NULL);<br> g_object_set(G_OBJECT(udpsink1), "host","172.21.29.169", NULL);<br> g_object_set(G_OBJECT(udpsink0), "sync",FALSE, NULL);<br> g_object_set(G_OBJECT(udpsink1), "sync",FALSE, NULL);<br> <br> //create video_bin<br> video_bin = gst_bin_new ("videobin");<br> gst_bin_add_many (GST_BIN (video_bin),queue1,x264enc,rtph264pay,NULL);<br> res=gst_element_link_many(queue1,x264enc,rtph264pay,NULL);<br> g_assert (res == TRUE);<br> //add pad to video_bin<br> videopad_sink = gst_element_get_static_pad (queue1, "sink");<br> videopad_src= gst_element_get_static_pad (rtph264pay, "src"); <br> gst_element_add_pad (video_bin,gst_ghost_pad_new ("sink", videopad_sink));<br> gst_element_add_pad (video_bin,gst_ghost_pad_new ("src",videopad_src));<br> gst_object_unref (videopad_sink);<br> gst_object_unref (videopad_src);<br> <br> //create audio_bin<br> audio_bin = gst_bin_new ("audiobin");<br> gst_bin_add_many (GST_BIN (audio_bin),queue2,audioresample,audioconvert,alawenc,rtppcmapay,NULL);<br> res=gst_element_link_many(queue2,audioresample,audioconvert,alawenc,rtppcmapay,NULL);<br> g_assert (res == TRUE);<br> //add pad to audio_bin<br> audiopad_sink = gst_element_get_static_pad (queue2, "sink");<br> audiopad_src= gst_element_get_static_pad (rtppcmapay, "src"); <br> res=gst_element_add_pad (audio_bin,gst_ghost_pad_new ("sink", audiopad_sink));<br> g_assert (res == TRUE);<br> res=gst_element_add_pad (audio_bin,gst_ghost_pad_new ("src", audiopad_src));<br> g_assert (res == TRUE);<br> gst_object_unref (audiopad_sink);<br> gst_object_unref (audiopad_src);<br> <br> //add elements into pipeline<br> gst_bin_add_many(GST_BIN(pipeline),<br> filesrc,decodebin,audio_bin,video_bin,gstrtpbin,udpsink0,udpsink1,NULL);<br> <br> //static link filesrc and decoderbin <br> res=gst_element_link(filesrc,decodebin); <br> g_assert (res == TRUE); <br> <br> //get request pad from gstrtpbin and connect with video_bin<br> bin_pad=gst_element_get_pad(video_bin, "src");<br> g_assert(bin_pad!=NULL); <br> sink_pad = gst_element_get_request_pad(gstrtpbin, "recv_rtp_sink_0");<br> g_assert(sink_pad!=NULL); <br> lres=gst_pad_link(bin_pad, sink_pad);<br> g_assert (lres == GST_PAD_LINK_OK);<br> <br> <br> //get request pad from gstrtpbin and connect with audio_bin<br> bin_pad=gst_element_get_pad(audio_bin, "src");<br> g_assert(bin_pad!=NULL);<br> sink_pad = gst_element_get_request_pad(gstrtpbin, "recv_rtp_sink_1");<br> g_assert(sink_pad!=NULL);<br> lres=gst_pad_link(bin_pad, sink_pad);<br> g_assert (lres == GST_PAD_LINK_OK);<br><br> //signal link<br> g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(on_pad_added_decodebin),NULL); <br> <br> g_signal_connect(gstrtpbin, "pad-added", G_CALLBACK(on_pad_added_rtpbin),NULL); <br> <br> gst_element_set_state(pipeline, GST_STATE_PLAYING); <br> g_print("Running...\n");<br> g_main_loop_run(loop);<br> <br> /* Out of the main loop, clean up nicely */<br> g_print("Returned, stopping playback\n");<br> gst_element_set_state(pipeline, GST_STATE_NULL);<br> <br> g_print("Deleting pipeline\n");<br> gst_object_unref(GST_OBJECT(pipeline));<br> <br> return 0;<br>}<br><!-- footer --><br><span title="neteasefooter"/><hr/>
<a href="http://qiye.163.com/?ft=3">网易企业邮,商务邮箱专家</a>
</span>