<p>Gstreamer application for receiving audio and video over RTP streaming.</p><p>I have written an app for expanding the RTP reciever sample code from audio player to audio adn video player</p><p>While running the code below I am getting the Error given below</p>
<p> </p><p>pad_added_cb: assertion failed: (lres == GST_PAD_LINK_OK)</p><p><br>Can you please see the code given below for reference?</p><p>I am suing queues with pipeline for spereating audio and video. DO we need to link the queues?</p>
<p> </p><p> </p><p> </p><p> </p><p> </p><p> </p><p> </p><p> </p><p> </p><p>#include <string.h><br>#include <math.h></p><p>#include <gst/gst.h><br>#define AV 1</p><p>#define VIDEO_CAPS "application/x-rtp,payload=96,encoding-name=H264,clock-rate=90000"<br>
#define VIDEO_DEPAY "rtph264depay"<br>#define VIDEO_DEC "ffdec_h264"<br>#define VIDEO_PARSE "h264parse"<br>#define VIDEO_COLORSPACE "ffmpegcolorspace"<br>#define VIDEO_DEI "deinterlace"<br>
#define VIDEO_SINK "ximagesink"<br>#define VIDEO_PORT 9700</p><p><br>#define AUDIO_CAPS "application/x-rtp,media=(string)audio,clock-rate=(int)16000,encoding-name=(string)AMR-WB,encoding-params=(string)1,octet-align=(string)1"<br>
#define AUDIO_DEPAY "rtpamrdepay"<br>#define AUDIO_DEC "amrwbdec"<br>#define AUDIO_SINK "alsasink"<br>#define AUDIO_PORT 1200</p><p><br>/* the destination machine to send RTCP to. This is the address of the sender and<br>
* is used to send back the RTCP reports of this receiver. If the data is sent<br> * from another machine, change this address. */<br>#define DEST_HOST "107.108.198.131"</p><p>/* print the stats of a source */<br>
static void<br>print_source_stats (GObject * source)<br>{<br> GstStructure *stats;<br> gchar *str;</p><p> g_return_if_fail (source != NULL);</p><p> /* get the source stats */<br> g_object_get (source, "stats", &stats, NULL);</p>
<p> /* simply dump the stats structure */<br> str = gst_structure_to_string (stats);<br> g_print ("source stats: %s\n", str);</p><p> gst_structure_free (stats);<br> g_free (str);<br>}</p><p>/* will be called when gstrtpbin signals on-ssrc-active. It means that an RTCP<br>
* packet was received from another source. */<br>static void<br>on_ssrc_active_cb (GstElement * rtpbin, guint sessid, guint ssrc,<br> GstElement * depay)<br>{<br> GObject *session, *isrc, *osrc;</p><p> g_print ("got RTCP from session %u, SSRC %u\n", sessid, ssrc);</p>
<p> /* get the right session */<br> g_signal_emit_by_name (rtpbin, "get-internal-session", sessid, &session);</p><p> /* get the internal source (the SSRC allocated to us, the receiver */<br> g_object_get (session, "internal-source", &isrc, NULL);<br>
print_source_stats (isrc);</p><p> /* get the remote source that sent us RTCP */<br> g_signal_emit_by_name (session, "get-source-by-ssrc", ssrc, &osrc);<br> print_source_stats (osrc);<br>}</p><p>/* will be called when rtpbin has validated a payload that we can depayload */<br>
static void<br>pad_added_cb (GstElement * rtpbin, GstPad * new_pad, GstElement * depay)<br>{<br> GstPad *sinkpad;<br> GstPadLinkReturn lres;</p><p> g_print ("new payload on pad: %s\n", GST_PAD_NAME (new_pad));</p>
<p> sinkpad = gst_element_get_static_pad (depay, "sink");<br> g_assert (sinkpad);</p><p> lres = gst_pad_link (new_pad, sinkpad);<br> g_assert (lres == GST_PAD_LINK_OK);<br> gst_object_unref (sinkpad);<br>}<br>
/* build a pipeline equivalent to:<br> *<br> * gst-launch -v gstrtpbin name=rtpbin udpsrc caps="application/x-rtp,payload=96,encoding-name=H264,clock-rate=90000" port=9200 ! rtpbin.recv_rtp_sink_0 rtpbin. ! rtph264depay !<br>
* h264parse ! ffdec_h264 ! ffmpegcolorspace ! deinterlace ! ximagesink udpsrc port=42000 caps="application/x-rtp,media=(string)audio,payload=(int)101,clock-rate=(int)16000,encoding-name= * (string)AMR-WB,encoding-params=(string)1,octet-align=(string)1" port=42000 ! rtpbin.recv_rtp_sink_1 rtpbin. ! rtpamrdepay ! amrwbdec ! audioresample ! audioconvert ! alsasink sync=false<br>
<br> */<br>int main (void)//av_play<br>{<br> GstElement *rtpbin;<br> GstElement *vrtpsrc, *vrtcpsrc, *vrtcpsink;<br> GstElement *artpsrc, *artcpsrc, *artcpsink;<br> GstElement *videodepay, *videodec, *videoparse, *colorspace, *deinterlacer,*videosink;//h264parse ! ffdec_h264 ! ffmpegcolorspace ! deinterlace<br>
GstElement *audiodepay, *audiodec, *audiores, *audioconv, *audiosink;<br> GstElement *pipeline;<br> GstElement *queueA,* *queueV; <br> GMainLoop *loop;<br> GstCaps *vcaps,*acaps;<br> gboolean res,res1;<br> GstPadLinkReturn lres,lres1;<br>
GstPad *asrcpad, *asinkpad,*vsrcpad, *vsinkpad;</p><p> <br> /* always init first */<br> gst_init (NULL, NULL);<br> <br> g_printerr("Client App for Audio and Video\n");<br> /* the pipeline to hold everything */<br>
pipeline = gst_pipeline_new (NULL);<br> g_assert (pipeline);</p><p><br> /* the udp src and source we will use for RTP and RTCP */<br> vrtpsrc = gst_element_factory_make ("udpsrc", "vrtpsrc");<br>
g_assert (vrtpsrc);<br>
g_object_set (vrtpsrc, "port", VIDEO_PORT, NULL);<br> /* we need to set caps on the udpsrc for the RTP data */<br> vcaps = gst_caps_from_string (VIDEO_CAPS);<br> g_object_set (vrtpsrc, "caps", vcaps, NULL);<br>
gst_caps_unref (vcaps);<br>#if AV<br> artpsrc = gst_element_factory_make ("udpsrc", "artpsrc");<br> g_assert (artpsrc);<br> g_object_set (artpsrc, "port", AUDIO_PORT, NULL);<br> /* we need to set caps on the udpsrc for the RTP data */<br>
acaps = gst_caps_from_string (AUDIO_CAPS);<br> g_object_set (artpsrc, "caps", acaps, NULL);<br> gst_caps_unref (acaps);<br>#endif</p><p> vrtcpsrc = gst_element_factory_make ("udpsrc", "vrtcpsrc");<br>
g_assert (vrtcpsrc);<br> g_object_set (vrtcpsrc, "port", 5003, NULL);</p><p> vrtcpsink = gst_element_factory_make ("udpsink", "vrtcpsink");<br> g_assert (vrtcpsink);<br> g_object_set (vrtcpsink, "port", 5007, "host", DEST_HOST, NULL);<br>
/* no need for synchronisation or preroll on the RTCP sink */<br> g_object_set (vrtcpsink, "async", FALSE, "sync", FALSE, NULL);</p><p> </p><p>#if AV<br> artcpsrc = gst_element_factory_make ("udpsrc", "artcpsrc");<br>
g_assert (artcpsrc);<br> g_object_set (artcpsrc, "port", 5003, NULL);</p><p> artcpsink = gst_element_factory_make ("udpsink", "rtcpsink");<br> g_assert (artcpsink);<br> g_object_set (artcpsink, "port", 5007, "host", DEST_HOST, NULL);<br>
/* no need for synchronisation or preroll on the RTCP sink */<br> g_object_set (artcpsink, "async", FALSE, "sync", FALSE, NULL);</p><p> </p><p>#endif<br> /* the depayloading and decoding */<br> videodepay = gst_element_factory_make (VIDEO_DEPAY, "videodepay");<br>
g_assert (videodepay);<br> videodec = gst_element_factory_make (VIDEO_DEC, "videodec");<br> g_assert (videodec);<br>#if AV <br> /* the depayloading and decoding */<br> audiodepay = gst_element_factory_make (AUDIO_DEPAY, "audiodepay");<br>
g_assert (audiodepay);<br> audiodec = gst_element_factory_make (AUDIO_DEC, "audiodec");<br> g_assert (audiodec);<br>#endif<br>//*videoparse, *colorspace, *deinterlacer,*videosink; <br> /* the audio playback and format conversion */</p>
<p> videoparse = gst_element_factory_make (VIDEO_PARSE, "videoparse");<br> g_assert (videoparse);<br> colorspace = gst_element_factory_make (VIDEO_COLORSPACE, "colorspace");<br> g_assert (colorspace);<br>
deinterlacer = gst_element_factory_make (VIDEO_DEI, "deinterlacer");<br> g_assert (deinterlacer);<br> videosink = gst_element_factory_make (VIDEO_SINK, "videosink");<br> g_assert (videosink);</p>
<p>
<br>#if AV<br> /* the audio playback and format conversion */<br> audioconv = gst_element_factory_make ("audioconvert", "audioconv");<br> g_assert (audioconv);<br> audiores = gst_element_factory_make ("audioresample", "audiores");<br>
g_assert (audiores);<br> audiosink = gst_element_factory_make (AUDIO_SINK, "audiosink");<br> g_assert (audiosink);<br> g_object_set (audiosink,"sync", FALSE, NULL);<br>#endif</p><p> queueA = gst_element_factory_make("queue", "queue-audio"); <br>
queueV = gst_element_factory_make("queue", "queue-video"); <br> /* add depayloading and playback to the pipeline and link */</p>
<p>#if AV<br> gst_bin_add_many (GST_BIN (pipeline), vrtpsrc,videodepay,videodec,videoparse,queueV,colorspace,deinterlacer,videosink,artpsrc, artcpsrc, artcpsink, \<br> audiodepay,audiodec,queueA,audioconv,audiores,audiosink, NULL);<br>
res = gst_element_link_many (audiodepay, audiodec, audioconv, audiores, audiosink, NULL);<br> if(!res)<br> g_printerr("Linking audio elements failed\n");<br> res1 = gst_element_link_many (videodepay, videodec, colorspace,deinterlacer,<br>
videosink, NULL);<br> if(!res1)<br> g_printerr("Linking video elements failed\n");<br> <br>#else<br> gst_bin_add_many (GST_BIN (pipeline), audiodepay, audiodec, audioconv,<br> audiores, audiosink, NULL);</p>
<p> res = gst_element_link_many (audiodepay, audiodec, audioconv, audiores,<br> audiosink, NULL);<br> g_assert (res == TRUE);</p><p>#endif</p><p><br> rtpbin = gst_element_factory_make("gstrtpbin", "rtpbin");<br>
if ( !rtpbin ) {<br> g_printerr("Failed to create gstrtpbin\n");<br> g_assert (rtpbin);<br> return 0;<br> }<br> </p><p> gst_bin_add (GST_BIN (pipeline), rtpbin);</p><p> /* now link all to the rtpbin, start by getting an RTP sinkpad for session 0 */<br>
vsrcpad = gst_element_get_static_pad (vrtpsrc, "src");<br> vsinkpad = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_0");<br> lres = gst_pad_link (vsrcpad, vsinkpad);<br> g_assert (lres == GST_PAD_LINK_OK);<br>
gst_object_unref (vsrcpad);<br>#if AV <br> asrcpad = gst_element_get_static_pad (artpsrc, "src");<br> asinkpad = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_1");<br> lres1 = gst_pad_link (asrcpad, asinkpad);<br>
g_assert (lres1 == GST_PAD_LINK_OK);<br> gst_object_unref (asrcpad);<br>#endif</p><p>#if 1<br> /* get an RTCP sinkpad in session 0 */<br> asrcpad = gst_element_get_static_pad (artcpsrc, "src");<br> asinkpad = gst_element_get_request_pad (rtpbin, "recv_rtcp_sink_0");<br>
lres = gst_pad_link (asrcpad, asinkpad);<br> g_assert (lres == GST_PAD_LINK_OK);<br> gst_object_unref (asrcpad);<br> gst_object_unref (asinkpad);</p><p> /* get an RTCP srcpad for sending RTCP back to the sender */<br>
asrcpad = gst_element_get_request_pad (rtpbin, "send_rtcp_src_0");<br> asinkpad = gst_element_get_static_pad (artcpsink, "sink");<br> lres = gst_pad_link (asrcpad, asinkpad);<br> g_assert (lres == GST_PAD_LINK_OK);<br>
gst_object_unref (asinkpad);<br>#if AV<br> /* get an RTCP sinkpad in session 1 */<br> vsrcpad = gst_element_get_static_pad (vrtcpsrc, "src");<br> vsinkpad = gst_element_get_request_pad (rtpbin, "recv_rtcp_sink_1");<br>
lres1 = gst_pad_link (vsrcpad, vsinkpad);<br> g_assert (lres == GST_PAD_LINK_OK);<br> gst_object_unref (vsrcpad);<br> gst_object_unref (vsinkpad);</p><p> /* get an RTCP srcpad for sending RTCP back to the sender */<br>
vsrcpad = gst_element_get_request_pad (rtpbin, "send_rtcp_src_1");<br> vsinkpad = gst_element_get_static_pad (vrtcpsink, "sink");<br> lres1 = gst_pad_link (vsrcpad, vsinkpad);<br> g_assert (lres == GST_PAD_LINK_OK);<br>
gst_object_unref (vsinkpad);<br>#endif<br>#endif</p><p> /* the RTP pad that we have to connect to the depayloader will be created<br> * dynamically so we connect to the pad-added signal, pass the depayloader as<br> * user_data so that we can link to it. */<br>
g_signal_connect (rtpbin, "pad-added", G_CALLBACK (pad_added_cb), audiodepay);<br> g_print ("pad added cb for audio\n");<br>#if AV<br> g_signal_connect (rtpbin, "pad-added", G_CALLBACK (pad_added_cb), videodepay);<br>
g_print ("pad added cb for video\n");<br>#endif<br> /* give some stats when we receive RTCP */<br>#if 0 <br> g_signal_connect (rtpbin, "on-ssrc-active", G_CALLBACK (on_ssrc_active_cb),<br> audiodepay);<br>
#endif<br> /* set the pipeline to playing */<br> g_print ("starting receiver pipeline\n");<br> gst_element_set_state (pipeline, GST_STATE_PLAYING);</p><p> /* we need to run a GLib main loop to get the messages */<br>
loop = g_main_loop_new (NULL, FALSE);<br> g_main_loop_run (loop);</p><p> g_print ("stopping receiver pipeline\n");<br> gst_element_set_state (pipeline, GST_STATE_NULL);</p><p> gst_object_unref (pipeline);</p>
<p> return 0;</p><p>}<br clear="all"></p><div><br></div><div><br></div><div><br></div><div><br></div> <span style="color:rgb(153,153,153)">Cheers!</span><br> <b>Deepth</b><br><br> <br> <br> <br><br>