hi all :<br> <br> First, thanks to all who helped to solve my problem. Now the "Sender" works fine. I've attached my sample-application for Sender . Now I want to implement a "Server" ,which receive udp packets from "Sender" and forward these packets to Receiver. I have used the following commends to build a Server.But when I send the video again, I should restart the Server. I want the Server to work all the time, and i don't have to restart it each time I send the video.<br> <br> Any suggestions?<br> <br> regards<br>xuxin<br><br>Server commends: <br> <br>gst-launch -v gstrtpbin name=rtpbin latency=200 \<br>udpsrc caps="application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string)H264" port=5000 ! rtpbin.recv_rtp_sink_0 \<br>rtpbin. ! udpsink port=5000 host=172.21.29.177 sync=false ts-offset=0 \<br>udpsrc caps="application/x-rtp,media=(string)audio,clock-rate=(int)8000,encoding-name=(string)PCMA" port=5002 ! rtpbin.recv_rtp_sink_1 \<br>rtpbin. ! udpsink port=5002 host=172.21.29.177 sync=false ts-offset=0 <br><br><br>###########################<br>Sender:<br><br>#include <gst/gst.h><br>#include <glib.h><br>#include <unistd.h><br>#include <stdlib.h><br><br>#define SOURCE "/home/xuxin/desktop/g_p/test/a.avi"<br>#define DEST_HOST "172.21.29.169"<br>#define VIDEO_PORT 5000<br>#define AUDIO_PORT 5002<br><br>// global declaration<br>GstElement *pipeline,*udpsink0,*udpsink1;<br>GstElement *audio_bin,*video_bin;<br><br>//<br>gboolean res;//element link <br>GstPadLinkReturn lres;//pad link<br><br>//bus<br>static gboolean<br>bus_call (GstBus *bus,<br> GstMessage *msg,<br> gpointer data)<br>{<br> GMainLoop *loop = (GMainLoop *) data;<br> switch (GST_MESSAGE_TYPE (msg)) {<br> case GST_MESSAGE_EOS:<br> g_print ("End of stream\n");<br> g_main_loop_quit (loop);<br> break;<br> case GST_MESSAGE_ERROR: {<br> gchar *debug;<br> GError *error;<br> gst_message_parse_error (msg, &error, &debug);<br> g_free (debug);<br> g_printerr ("Error: %s\n", error->message);<br> g_error_free (error);<br> g_main_loop_quit (loop);<br> break;<br> }<br> default:<br> break;<br> }<br> return TRUE;<br>}<br><br>static void on_pad_added_rtpbin(GstElement *element, GstPad *pad, gpointer data)<br>{<br> GstCaps *caps=NULL;<br> GstStructure *str=NULL;<br> GstPad *t_pad=NULL;<br> <br> const gchar* media_type=NULL;<br> gint clock_rate;<br> gboolean judgment;<br> const gchar* encoding_name=NULL;<br> const gchar* name=NULL;<br> g_print("**************enter into rtpbin callback ****************\n");<br> g_assert(pad!=NULL);<br> caps=gst_pad_get_caps(pad);<br> g_assert(caps!=NULL);<br> str=gst_caps_get_structure(caps,0);<br> g_assert(str!=NULL);<br> <br> media_type=gst_structure_get_string(str,"media");<br> g_assert(media_type!=NULL);<br> g_print("the media type is %s\n",media_type);<br> //**************test*****************************<br> name=gst_structure_get_name(str);<br> g_print("the caps is %s\n",name);<br> judgment=gst_structure_get_int(str,"clock-rate",&clock_rate);<br> g_print("clock_rate is :%d\n",clock_rate);<br> encoding_name=gst_structure_get_string(str,"encoding-name");<br> g_assert(encoding_name!=NULL);<br> g_print("the encoding_name is %s\n",encoding_name);<br> //***********************************************<br> <br> <br> <br> //media type audio or video<br> if (g_strrstr (media_type, "video")) <br> { <br> g_print("rtp catch audio\n");<br> t_pad = gst_element_get_static_pad (udpsink0, "sink");<br> g_assert(t_pad!=NULL); <br> g_print("get the pad of udpsink0\n"); <br> lres=gst_pad_link (pad, t_pad);<br> g_assert (lres == GST_PAD_LINK_OK);<br> g_print("Dynamic pad created, linking rtpbin/udp1\n");<br> }<br> else if(g_strrstr (media_type, "audio"))<br> { <br> g_print("rtp catch video\n");<br> t_pad = gst_element_get_static_pad (udpsink1, "sink");<br> g_assert(t_pad!=NULL); <br> g_print("get the pad of udpsink1\n");<br> lres=gst_pad_link (pad, t_pad);<br> g_assert (lres == GST_PAD_LINK_OK);<br> g_print("Dynamic pad created, linking rtpbin/udp0\n"); <br> <br> }<br> else<br> {<br> gst_caps_unref (caps);<br> return;<br> }<br> <br>}<br><br>static void on_pad_added_decodebin(GstElement *decodebin,GstPad *pad,gboolean last,gpointer data)<br>{<br> GstCaps *caps=NULL;<br> GstStructure *str=NULL;<br> GstPad *t_pad=NULL;<br> <br> const gchar* media_type=NULL;<br> g_print("**************** enter into the decodebin signal callback **********\n");<br> <br> g_assert(pad!=NULL);<br> <br> caps = gst_pad_get_caps (pad);<br> g_assert(caps!=NULL);<br> str = gst_caps_get_structure (caps, 0);<br> g_assert(str!=NULL);<br> <br> media_type=gst_structure_get_name (str);<br> g_assert(media_type!=NULL);<br> g_print("the media type is %s\n",media_type);<br> //check media type audio or video<br> if (g_strrstr (media_type, "audio")) <br> { <br> g_print("catch audio in decodebin\n");<br> t_pad = gst_element_get_static_pad (audio_bin, "sink");<br> g_assert(t_pad!=NULL);<br> lres=gst_pad_link (pad, t_pad);<br> g_assert (lres == GST_PAD_LINK_OK);<br> g_print("Dynamic pad created, linking decoderbin/queue2\n");<br> <br> <br> }<br> else if(g_strrstr (media_type, "video"))<br> { <br> g_print("catch video in decodebin\n");<br> t_pad = gst_element_get_static_pad (video_bin, "sink");<br> g_assert(t_pad!=NULL);<br> lres=gst_pad_link (pad, t_pad);<br> g_assert (lres == GST_PAD_LINK_OK);<br> g_print("Dynamic pad created, linking decoderbin/queue1\n");<br> <br> <br> }<br> else<br> {<br> g_print("have not get suitable type\n");<br> gst_caps_unref (caps);<br> return;<br> }<br> <br>}<br><br>int main(int argc, char **argv)<br>{<br> GMainLoop *loop;<br> GstBus *bus;<br> <br> //pad of rtpbin<br> GstPad *sink_pad; <br> GstPad *bin_pad; <br> <br> GstPad *videopad_src,*videopad_sink;<br> GstPad *audiopad_src,*audiopad_sink;<br> //declare elements <br> GstElement *filesrc,*decodebin,*gstrtpbin;<br> //element for video<br> GstElement *queue1,*x264enc,*rtph264pay;<br> //element for audio<br> GstElement *queue2,*audioresample,*audioconvert,*alawenc,*rtppcmapay;<br> <br> gst_init(&argc, &argv);<br> loop = g_main_loop_new(NULL, FALSE);<br> <br> //generate elements<br> pipeline = gst_pipeline_new("server");<br> filesrc=gst_element_factory_make("filesrc", "filesrc");<br> decodebin=gst_element_factory_make("decodebin", "decodebin");<br> gstrtpbin = gst_element_factory_make("gstrtpbin", "gstrtpbin");<br> //for video<br> queue1=gst_element_factory_make("queue", "queue1");<br> x264enc=gst_element_factory_make("x264enc", "x264enc");<br> rtph264pay=gst_element_factory_make("rtph264pay", "rtph264pay");<br> udpsink0=gst_element_factory_make("udpsink", "udpsink0");<br> //for audio<br> queue2 = gst_element_factory_make("queue", "queue2");<br> audioresample = gst_element_factory_make("audioresample", "audioresample");<br> audioconvert = gst_element_factory_make("audioconvert", "audioconvert");<br> alawenc = gst_element_factory_make("alawenc", "alawenc"); <br> rtppcmapay = gst_element_factory_make("rtppcmapay", "rtppcmapay");<br> udpsink1=gst_element_factory_make("udpsink", "udpsink1");<br> <br> bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));<br> gst_bus_add_watch(bus, bus_call, loop);<br> gst_object_unref(bus);<br> <br> //set properties of elements<br> g_object_set(G_OBJECT(filesrc), "location",SOURCE, NULL); <br> g_object_set(G_OBJECT(udpsink0), "host",DEST_HOST,"port",VIDEO_PORT, NULL);<br> g_object_set(G_OBJECT(udpsink1), "host",DEST_HOST,"port",AUDIO_PORT, NULL);<br> //g_object_set(G_OBJECT(udpsink0), "sync",FALSE,NULL);<br> //g_object_set(G_OBJECT(udpsink1), "sync",FALSE,NULL); <br> //g_object_set(G_OBJECT(udpsink0), "async", FALSE,NULL);<br> //g_object_set(G_OBJECT(udpsink1), "async", FALSE,NULL);<br> <br> //create video_bin<br> video_bin = gst_bin_new ("videobin");<br> gst_bin_add_many (GST_BIN (video_bin),queue1,x264enc,rtph264pay,NULL);<br> res=gst_element_link_many(queue1,x264enc,rtph264pay,NULL);<br> g_assert (res == TRUE);<br> //add pad to video_bin<br> videopad_sink = gst_element_get_static_pad (queue1, "sink");<br> videopad_src= gst_element_get_static_pad (rtph264pay, "src"); <br> gst_element_add_pad (video_bin,gst_ghost_pad_new ("sink", videopad_sink));<br> gst_element_add_pad (video_bin,gst_ghost_pad_new ("src",videopad_src));<br> gst_object_unref (videopad_sink);<br> gst_object_unref (videopad_src);<br> <br> //create audio_bin<br> audio_bin = gst_bin_new ("audiobin");<br> gst_bin_add_many (GST_BIN (audio_bin),queue2,audioresample,audioconvert,alawenc,rtppcmapay,NULL);<br> res=gst_element_link_many(queue2,audioresample,audioconvert,alawenc,rtppcmapay,NULL);<br> g_assert (res == TRUE);<br> //add pad to audio_bin<br> audiopad_sink = gst_element_get_static_pad (queue2, "sink");<br> audiopad_src= gst_element_get_static_pad (rtppcmapay, "src"); <br> res=gst_element_add_pad (audio_bin,gst_ghost_pad_new ("sink", audiopad_sink));<br> g_assert (res == TRUE);<br> res=gst_element_add_pad (audio_bin,gst_ghost_pad_new ("src", audiopad_src));<br> g_assert (res == TRUE);<br> gst_object_unref (audiopad_sink);<br> gst_object_unref (audiopad_src);<br> <br> //add elements into pipeline<br> gst_bin_add_many(GST_BIN(pipeline),<br> filesrc,decodebin,audio_bin,video_bin,gstrtpbin,udpsink0,udpsink1,NULL);<br> <br> //static link filesrc and decoderbin <br> res=gst_element_link(filesrc,decodebin); <br> g_assert (res == TRUE); <br> <br> //get request pad from gstrtpbin and connect with video_bin<br> bin_pad=gst_element_get_pad(video_bin, "src");<br> g_assert(bin_pad!=NULL); <br> sink_pad = gst_element_get_request_pad(gstrtpbin, "recv_rtp_sink_0");<br> g_assert(sink_pad!=NULL); <br> lres=gst_pad_link(bin_pad, sink_pad);<br> g_assert (lres == GST_PAD_LINK_OK);<br> <br> <br> //get request pad from gstrtpbin and connect with audio_bin<br> bin_pad=gst_element_get_pad(audio_bin, "src");<br> g_assert(bin_pad!=NULL);<br> sink_pad = gst_element_get_request_pad(gstrtpbin, "recv_rtp_sink_1");<br> g_assert(sink_pad!=NULL);<br> lres=gst_pad_link(bin_pad, sink_pad);<br> g_assert (lres == GST_PAD_LINK_OK);<br><br> //signal link<br> g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(on_pad_added_decodebin),NULL); <br> <br> g_signal_connect(gstrtpbin, "pad-added", G_CALLBACK(on_pad_added_rtpbin),NULL); <br> <br> gst_element_set_state(pipeline, GST_STATE_PLAYING); <br> g_print("Running...\n");<br> g_main_loop_run(loop);<br> <br> /* Out of the main loop, clean up nicely */<br> g_print("Returned, stopping playback\n");<br> gst_element_set_state(pipeline, GST_STATE_NULL);<br> <br> g_print("Deleting pipeline\n");<br> gst_object_unref(GST_OBJECT(pipeline));<br> <br> return 0;<br>}<br><!-- footer --><br><span title="neteasefooter"/><hr/>
<a href="http://qiye.163.com/?ft=2">网易全新推出企业邮箱</a>
</span>