hi wim and Jyoti :<br> thank you for your replies .I followed your suggestions and rewrite the code.But it still can't work fine. I get the following error:<br> Running...<br> catch video in decodebin<br> Dynamic pad created, linking decoderbin/queue<br> catch audio in decodebin<br> Dynamic pad created, linking decoderbin/queue<br> Error: internal data flow error<br> Returned, stopping playback<br> Deleting pipeline<br>please give me some more suggestion,i will thank you very much. <br><br> By the way,i find a source code on net, he didn't use callback.Instead ,he write the code as follow <br><br> send_rtp_sink0 = gst_element_get_request_pad(rtpbin, "send_rtp_sink_0");<br> send_rtp_src0 = gst_element_get_static_pad(rtpbin, "send_rtp_src_0");<br><br>and the reason is :in the Reference it's described that the "send_rtp_src_%d" pad is<br>automatically created if a pad_request to "send_rtp_sink_%d" is called.<br><br> I have tried this method ,but failed.<br><br>#include <gst/gst.h><br>#include <glibh><br>#include <unistd.h><br>#include <stdlib.h><br><br>// global declaration<br>GstElement *pipeline,*udpsink0,*udpsink1;<br>GstElement *audio_bin,*video_bin;<br><br>//<br>gboolean res;//element link <br>GstPadLinkReturn lres;//pad link<br><br>//bus<br>static gboolean<br>bus_call (GstBus *bus,<br> GstMessage *msg,<br> gpointer data)<br>{<br> GMainLoop *loop = (GMainLoop *) data;<br> switch (GST_MESSAGE_TYPE (msg)) {<br> case GST_MESSAGE_EOS:<br> g_print ("End of stream\n");<br> g_main_loop_quit (loop);<br> break;<br> case GST_MESSAGE_ERROR: {<br> gchar *debug;<br> GError *error;<br> gst_message_parse_error (msg, &error, &debug);<br> g_free (debug);<br> g_printerr ("Error: %s\n", error->message);<br> g_error_free (error);<br> g_main_loop_quit (loop);<br> break;<br> }<br> default:<br> break;<br> }<br> return TRUE;<br>}<br><br>static void on_pad_added_rtpbin(GstElement *element, GstPad *pad, gpointer data)<br>{<br> GstCaps *caps=NULL;<br> GstStructure *str=NULL;<br> GstPad *t_pad=NULL;<br> <br> g_assert(pad!=NULL);<br> caps=gst_pad_get_caps(pad);<br> str=gst_caps_get_structure(caps,0);<br> <br> //media type audio or video<br> if (g_strrstr (gst_structure_get_name (str), "audio")) <br> { <br> g_print("rtp catch audio\n");<br> t_pad = gst_element_get_static_pad (udpsink0, "sink");<br> g_assert(t_pad!=NULL); <br> <br> }<br> else if(g_strrstr (gst_structure_get_name (str), "video"))<br> { <br> g_print("rtp catch video\n");<br> t_pad = gst_element_get_static_pad (udpsink1, "sink");<br> g_assert(t_pad!=NULL); <br> <br> }<br> else<br> {<br> gst_caps_unref (caps);<br> return;<br> }<br> <br> if (GST_PAD_IS_LINKED (t_pad)) <br> {<br> gst_caps_unref (caps);<br> g_object_unref (t_pad);<br> return;<br> }<br> else<br> { <br> lres=gst_pad_link (pad, t_pad);<br> g_assert (lres == GST_PAD_LINK_OK);<br> g_print("Dynamic pad created, linking rtpbin/udp\n");<br> <br> gst_caps_unref (caps); <br> g_object_unref (t_pad);<br> return;<br> } <br> <br>}<br><br>static void on_pad_added_decodebin(GstElement *decodebin,GstPad *pad,gboolean last,gpointer data)<br>{<br> GstCaps *caps;<br> GstStructure *str;<br> GstPad *t_pad;<br> <br> //media type<br> caps = gst_pad_get_caps (pad);<br> str = gst_caps_get_structure (caps, 0);<br> //check media type audio or video<br> if (g_strrstr (gst_structure_get_name (str), "audio")) <br> { <br> g_print("catch audio in decodebin\n");<br> t_pad = gst_element_get_static_pad (audio_bin, "sink");<br> g_assert(t_pad!=NULL);<br> if(t_pad==NULL)<br> g_print("cannot get the sink pad of audiobin ");<br> <br> }<br> else if(g_strrstr (gst_structure_get_name (str), "video"))<br> { <br> g_print("catch video in decodebin\n");<br> t_pad = gst_element_get_static_pad (video_bin, "sink");<br> g_assert(t_pad!=NULL);<br> if(t_pad==NULL)<br> g_print("cannot get the sink pad of videobin ");<br> <br> }<br> else<br> {<br> gst_caps_unref (caps);<br> return;<br> }<br> <br> <br> if (GST_PAD_IS_LINKED (t_pad)) <br> {<br> gst_caps_unref (caps);<br> g_object_unref (t_pad);<br> return;<br> }<br> <br> else<br> { <br> gst_pad_link (pad, t_pad);<br> g_print("Dynamic pad created, linking decoderbin/queue\n");<br> <br> gst_caps_unref (caps); <br> g_object_unref (t_pad);<br> return;<br> } <br> <br>}<br><br>int main(int argc, char **argv)<br>{<br> GMainLoop *loop;<br> GstBus *bus;<br> <br> //pad of rtpbin<br> GstPad *sink_pad; <br> GstPad *bin_pad; <br> <br> GstPad *videopad_src,*videopad_sink;<br> GstPad *audiopad_src,*audiopad_sink;<br> //declare elements <br> GstElement *filesrc,*decodebin,*gstrtpbin;<br> //element for video<br> GstElement *queue1,*x264enc,*rtph264pay;<br> //element for audio<br> GstElement *queue2,*audioresample,*audioconvert,*alawenc,*rtppcmapay;<br> <br> gst_init(&argc, &argv);<br> loop = g_main_loop_new(NULL, FALSE);<br> <br> //generate elements<br> pipeline = gst_pipeline_new("server");<br> filesrc=gst_element_factory_make("filesrc", "filesrc");<br> decodebin=gst_element_factory_make("decodebin", "decodebin");<br> gstrtpbin = gst_element_factory_make("gstrtpbin", "gstrtpbin");<br> //for video<br> queue1=gst_element_factory_make("queue", "queue1");<br> x264enc=gst_element_factory_make("x264enc", "x264enc");<br> rtph264pay=gst_element_factory_make("rtph264pay", "rtph264pay");<br> udpsink0=gst_element_factory_make("udpsink", "udpsink0");<br> //for audio<br> queue2 = gst_element_factory_make("queue", "queue2");<br> audioresample = gst_element_factory_make("audioresample", "audioresample");<br> audioconvert = gst_element_factory_make("audioconvert", "audioconvert");<br> alawenc = gst_element_factory_make("alawenc", "alawenc"); <br> rtppcmapay = gst_element_factory_make("rtppcmapay", "rtppcmapay");<br> udpsink1=gst_element_factory_make("udpsink", "udpsink1");<br> <br> bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));<br> gst_bus_add_watch(bus, bus_call, loop);<br> gst_object_unref(bus);<br> <br> //set properties of elements<br> g_object_set(G_OBJECT(filesrc), "location","/home/xuxin/desktop/g_p/a.avi", NULL); <br> g_object_set(G_OBJECT(udpsink0), "port",5000, NULL);<br> g_object_set(G_OBJECT(udpsink1), "port",5002, NULL);<br> g_object_set(G_OBJECT(udpsink0), "host","172.21.29.169", NULL);<br> g_object_set(G_OBJECT(udpsink1), "host","172.21.29.169", NULL);<br> g_object_set(G_OBJECT(udpsink0), "sync",FALSE, NULL);<br> g_object_set(G_OBJECT(udpsink1), "sync",FALSE, NULL);<br> <br> //create video_bin<br> video_bin = gst_bin_new ("videobin");<br> gst_bin_add_many (GST_BIN (video_bin),queue1,x264enc,rtph264pay,NULL);<br> res=gst_element_link_many(queue1,x264enc,rtph264pay,NULL);<br> g_assert (res == TRUE);<br> //add pad to video_bin<br> videopad_sink = gst_element_get_static_pad (queue1, "sink");<br> videopad_src= gst_element_get_static_pad (rtph264pay, "src"); <br> gst_element_add_pad (video_bin,gst_ghost_pad_new ("sink", videopad_sink));<br> gst_element_add_pad (video_bin,gst_ghost_pad_new ("src",videopad_src));<br> gst_object_unref (videopad_sink);<br> gst_object_unref (videopad_src);<br> <br> //create audio_bin<br> audio_bin = gst_bin_new ("audiobin");<br> gst_bin_add_many (GST_BIN (audio_bin),queue2,audioresample,audioconvert,alawenc,rtppcmapay,NULL);<br> res=gst_element_link_many(queue2,audioresample,audioconvert,alawenc,rtppcmapay,NULL);<br> g_assert (res == TRUE);<br> //add pad to audio_bin<br> audiopad_sink = gst_element_get_static_pad (queue2, "sink");<br> audiopad_src= gst_element_get_static_pad (rtppcmapay, "src"); <br> res=gst_element_add_pad (audio_bin,gst_ghost_pad_new ("sink", audiopad_sink));<br> g_assert (res == TRUE);<br> res=gst_element_add_pad (audio_bin,gst_ghost_pad_new ("src", audiopad_src));<br> g_assert (res == TRUE);<br> gst_object_unref (audiopad_sink);<br> gst_object_unref (audiopad_src);<br> <br> //add elements into pipeline<br> gst_bin_add_many(GST_BIN(pipeline),<br> filesrc,decodebin,audio_bin,video_bin,gstrtpbin,udpsink0,udpsink1,NULL);<br> <br> //static link filesrc and decoderbin <br> res=gst_element_link(filesrc,decodebin); <br> g_assert (res == TRUE); <br> <br> //get request pad from gstrtpbin and connect with video_bin<br> bin_pad=gst_element_get_pad(video_bin, "src");<br> g_assert(bin_pad!=NULL); <br> sink_pad = gst_element_get_request_pad(gstrtpbin, "recv_rtp_sink_0");<br> g_assert(sink_pad!=NULL); <br> lres=gst_pad_link(bin_pad, sink_pad);<br> g_assert (lres == GST_PAD_LINK_OK);<br> <br> <br> //get request pad from gstrtpbin and connect with audio_bin<br> bin_pad=gst_element_get_pad(audio_bin, "src");<br> g_assert(bin_pad!=NULL);<br> sink_pad = gst_element_get_request_pad(gstrtpbin, "recv_rtp_sink_1");<br> g_assert(sink_pad!=NULL);<br> lres=gst_pad_link(bin_pad, sink_pad);<br> g_assert (lres == GST_PAD_LINK_OK);<br><br> //signal link<br> g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(on_pad_added_decodebin),NULL); <br> <br> g_signal_connect(gstrtpbin, "pad-added", G_CALLBACK(on_pad_added_rtpbin),NULL); <br> <br> gst_element_set_state(pipeline, GST_STATE_PLAYING);<br> <br> g_print("Running...\n");<br> g_main_loop_run(loop);<br> <br> /* Out of the main loop, clean up nicely */<br> g_print("Returned, stopping playback\n");<br> gst_element_set_state(pipeline, GST_STATE_NULL);<br> <br> g_print("Deleting pipeline\n");<br> gst_object_unref(GST_OBJECT(pipeline));<br> <br> return 0;<br>}<br><br><!-- footer --><br><span title="neteasefooter"/><hr/>
<a href="http://qiye.163.com/?ft=1">业务订单流失怎么办?</a>
</span>