[gst-devel] problem of sending video through rtp protocol
xuxin04072129
xuxin04072129 at 126.com
Sat Jun 6 09:58:04 CEST 2009
hi all
I use rtp protocol to send a movie. On the "sender" when I set the value of "async" property of udpsink true, the "receiver" can't receive any packets. Instead,when i set "async" false ,the receiver can receiver can receive the rtp packets and play the movie ,but the audio and the vedio are asynchronous, and the video is faster than normal.
Any suggestion? What could be the problem?
Regards
xuxin
the following is my code
receiver:\
gst-launch -v gstrtpbin name=rtpbin latency=200 \
udpsrc caps="application/x-rtp,media=(string)video,clock-rate=(int)90000,encoding-name=(string) H264" port=5000 ! rtpbin.recv_rtp_sink_0 \
rtpbin. ! rtph264depay ! decodebin ! xvimagesink \
udpsrc caps="application/x-rtp,media=(string)audio,clock-rate=(int)8000,encoding-name=(string)PCMA" port=5002 ! rtpbin.recv_rtp_sink_1 \
rtpbin. ! rtppcmadepay ! decodebin ! audioconvert ! audioresample ! alsasink
sender :
#include <gst/gst.h>
#include <glib.h>
#include <unistd.h>
#include <stdlib.h>
#define SOURCE "/home/xuxin/desktop/g_p/test/a.avi"
#define DEST_HOST "172.21.29.169"
#define VIDEO_PORT 5000
#define AUDIO_PORT 5002
// global declaration
GstElement *pipeline,*udpsink0,*udpsink1;
GstElement *audio_bin,*video_bin;
//
gboolean res;//element link
GstPadLinkReturn lres;//pad link
//bus
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void on_pad_added_rtpbin(GstElement *element, GstPad *pad, gpointer data)
{
GstCaps *caps=NULL;
GstStructure *str=NULL;
GstPad *t_pad=NULL;
const gchar* media_type=NULL;
gint clock_rate;
gboolean judgment;
const gchar* encoding_name=NULL;
const gchar* name=NULL;
g_print("**************enter into rtpbin callback ****************\n");
g_assert(pad!=NULL);
caps=gst_pad_get_caps(pad);
g_assert(caps!=NULL);
str=gst_caps_get_structure(caps,0);
g_assert(str!=NULL);
media_type=gst_structure_get_string(str,"media");
g_assert(media_type!=NULL);
g_print("the media type is %s\n",media_type);
//**************test*****************************
name=gst_structure_get_name(str);
g_print("the caps is %s\n",name);
judgment=gst_structure_get_int(str,"clock-rate",&clock_rate);
g_print("clock_rate is :%d\n",clock_rate);
encoding_name=gst_structure_get_string(str,"encoding-name");
g_assert(encoding_name!=NULL);
g_print("the encoding_name is %s\n",encoding_name);
//***********************************************
//media type audio or video
if (g_strrstr (media_type, "video"))
{
g_print("rtp catch audio\n");
t_pad = gst_element_get_static_pad (udpsink0, "sink");
g_assert(t_pad!=NULL);
g_print("get the pad of udpsink0\n");
lres=gst_pad_link (pad, t_pad);
g_assert (lres == GST_PAD_LINK_OK);
g_print("Dynamic pad created, linking rtpbin/udp1\n");
}
else if(g_strrstr (media_type, "audio"))
{
g_print("rtp catch video\n");
t_pad = gst_element_get_static_pad (udpsink1, "sink");
g_assert(t_pad!=NULL);
g_print("get the pad of udpsink1\n");
lres=gst_pad_link (pad, t_pad);
g_assert (lres == GST_PAD_LINK_OK);
g_print("Dynamic pad created, linking rtpbin/udp0\n");
}
else
{
gst_caps_unref (caps);
return;
}
}
static void on_pad_added_decodebin(GstElement *decodebin,GstPad *pad,gboolean last,gpointer data)
{
GstCaps *caps=NULL;
GstStructure *str=NULL;
GstPad *t_pad=NULL;
const gchar* media_type=NULL;
g_print("**************** enter into the decodebin signal callback **********\n");
g_assert(pad!=NULL);
caps = gst_pad_get_caps (pad);
g_assert(caps!=NULL);
str = gst_caps_get_structure (caps, 0);
g_assert(str!=NULL);
media_type=gst_structure_get_name (str);
g_assert(media_type!=NULL);
g_print("the media type is %s\n",media_type);
//check media type audio or video
if (g_strrstr (media_type, "audio"))
{
g_print("catch audio in decodebin\n");
t_pad = gst_element_get_static_pad (audio_bin, "sink");
g_assert(t_pad!=NULL);
lres=gst_pad_link (pad, t_pad);
g_assert (lres == GST_PAD_LINK_OK);
g_print("Dynamic pad created, linking decoderbin/queue2\n");
}
else if(g_strrstr (media_type, "video"))
{
g_print("catch video in decodebin\n");
t_pad = gst_element_get_static_pad (video_bin, "sink");
g_assert(t_pad!=NULL);
lres=gst_pad_link (pad, t_pad);
g_assert (lres == GST_PAD_LINK_OK);
g_print("Dynamic pad created, linking decoderbin/queue1\n");
}
else
{
g_print("have not get suitable type\n");
gst_caps_unref (caps);
return;
}
}
int main(int argc, char **argv)
{
GMainLoop *loop;
GstBus *bus;
//pad of rtpbin
GstPad *sink_pad;
GstPad *bin_pad;
GstPad *videopad_src,*videopad_sink;
GstPad *audiopad_src,*audiopad_sink;
//declare elements
GstElement *filesrc,*decodebin,*gstrtpbin;
//element for video
GstElement *queue1,*x264enc,*rtph264pay;
//element for audio
GstElement *queue2,*audioresample,*audioconvert,*alawenc,*rtppcmapay;
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
//generate elements
pipeline = gst_pipeline_new("server");
filesrc=gst_element_factory_make("filesrc", "filesrc");
decodebin=gst_element_factory_make("decodebin", "decodebin");
gstrtpbin = gst_element_factory_make("gstrtpbin", "gstrtpbin");
//for video
queue1=gst_element_factory_make("queue", "queue1");
x264enc=gst_element_factory_make("x264enc", "x264enc");
rtph264pay=gst_element_factory_make("rtph264pay", "rtph264pay");
udpsink0=gst_element_factory_make("udpsink", "udpsink0");
//for audio
queue2 = gst_element_factory_make("queue", "queue2");
audioresample = gst_element_factory_make("audioresample", "audioresample");
audioconvert = gst_element_factory_make("audioconvert", "audioconvert");
alawenc = gst_element_factory_make("alawenc", "alawenc");
rtppcmapay = gst_element_factory_make("rtppcmapay", "rtppcmapay");
udpsink1=gst_element_factory_make("udpsink", "udpsink1");
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, bus_call, loop);
gst_object_unref(bus);
//set properties of elements
g_object_set(G_OBJECT(filesrc), "location",SOURCE, NULL);
g_object_set(G_OBJECT(udpsink0), "host",DEST_HOST,"port",VIDEO_PORT, "sync",FALSE,
"async", FALSE,NULL);
g_object_set(G_OBJECT(udpsink1), "host",DEST_HOST,"port",AUDIO_PORT, "sync",FALSE,
"async", FALSE,NULL);
//g_object_set(G_OBJECT(udpsink0), NULL);
//g_object_set(G_OBJECT(udpsink1), "host",DEST_HOST, NULL);
//g_object_set(G_OBJECT(udpsink0), "sync",FALSE, NULL);
//g_object_set(G_OBJECT(udpsink1), "sync",FALSE, NULL);
//create video_bin
video_bin = gst_bin_new ("videobin");
gst_bin_add_many (GST_BIN (video_bin),queue1,x264enc,rtph264pay,NULL);
res=gst_element_link_many(queue1,x264enc,rtph264pay,NULL);
g_assert (res == TRUE);
//add pad to video_bin
videopad_sink = gst_element_get_static_pad (queue1, "sink");
videopad_src= gst_element_get_static_pad (rtph264pay, "src");
gst_element_add_pad (video_bin,gst_ghost_pad_new ("sink", videopad_sink));
gst_element_add_pad (video_bin,gst_ghost_pad_new ("src",videopad_src));
gst_object_unref (videopad_sink);
gst_object_unref (videopad_src);
//create audio_bin
audio_bin = gst_bin_new ("audiobin");
gst_bin_add_many (GST_BIN (audio_bin),queue2,audioresample,audioconvert,alawenc,rtppcmapay,NULL);
res=gst_element_link_many(queue2,audioresample,audioconvert,alawenc,rtppcmapay,NULL);
g_assert (res == TRUE);
//add pad to audio_bin
audiopad_sink = gst_element_get_static_pad (queue2, "sink");
audiopad_src= gst_element_get_static_pad (rtppcmapay, "src");
res=gst_element_add_pad (audio_bin,gst_ghost_pad_new ("sink", audiopad_sink));
g_assert (res == TRUE);
res=gst_element_add_pad (audio_bin,gst_ghost_pad_new ("src", audiopad_src));
g_assert (res == TRUE);
gst_object_unref (audiopad_sink);
gst_object_unref (audiopad_src);
//add elements into pipeline
gst_bin_add_many(GST_BIN(pipeline),
filesrc,decodebin,audio_bin,video_bin,gstrtpbin,udpsink0,udpsink1,NULL);
//static link filesrc and decoderbin
res=gst_element_link(filesrc,decodebin);
g_assert (res == TRUE);
//get request pad from gstrtpbin and connect with video_bin
bin_pad=gst_element_get_pad(video_bin, "src");
g_assert(bin_pad!=NULL);
sink_pad = gst_element_get_request_pad(gstrtpbin, "recv_rtp_sink_0");
g_assert(sink_pad!=NULL);
lres=gst_pad_link(bin_pad, sink_pad);
g_assert (lres == GST_PAD_LINK_OK);
//get request pad from gstrtpbin and connect with audio_bin
bin_pad=gst_element_get_pad(audio_bin, "src");
g_assert(bin_pad!=NULL);
sink_pad = gst_element_get_request_pad(gstrtpbin, "recv_rtp_sink_1");
g_assert(sink_pad!=NULL);
lres=gst_pad_link(bin_pad, sink_pad);
g_assert (lres == GST_PAD_LINK_OK);
//signal link
g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(on_pad_added_decodebin),NULL);
g_signal_connect(gstrtpbin, "pad-added", G_CALLBACK(on_pad_added_rtpbin),NULL);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
g_print("Running...\n");
g_main_loop_run(loop);
/* Out of the main loop, clean up nicely */
g_print("Returned, stopping playback\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
return 0;
}
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.freedesktop.org/archives/gstreamer-devel/attachments/20090606/db7c40f2/attachment.htm>
More information about the gstreamer-devel
mailing list