Video and Audio over RTP

deepth pk deepthpk at gmail.com
Fri Apr 27 05:24:45 PDT 2012


Gstreamer application for receiving audio and video over RTP streaming.

I have written an app for expanding the RTP reciever sample code from audio
player to audio adn video player

While running the code below I am getting the Error given below



pad_added_cb: assertion failed: (lres == GST_PAD_LINK_OK)


Can you please see the code given below for reference?

I am suing queues with pipeline for spereating audio and video. DO we need
to link the queues?



















#include <string.h>
#include <math.h>

#include <gst/gst.h>
#define AV 1

#define VIDEO_CAPS
"application/x-rtp,payload=96,encoding-name=H264,clock-rate=90000"
#define VIDEO_DEPAY "rtph264depay"
#define VIDEO_DEC   "ffdec_h264"
#define VIDEO_PARSE "h264parse"
#define VIDEO_COLORSPACE "ffmpegcolorspace"
#define VIDEO_DEI "deinterlace"
#define VIDEO_SINK  "ximagesink"
#define VIDEO_PORT 9700


#define AUDIO_CAPS
"application/x-rtp,media=(string)audio,clock-rate=(int)16000,encoding-name=(string)AMR-WB,encoding-params=(string)1,octet-align=(string)1"
#define AUDIO_DEPAY "rtpamrdepay"
#define AUDIO_DEC   "amrwbdec"
#define AUDIO_SINK  "alsasink"
#define AUDIO_PORT 1200


/* the destination machine to send RTCP to. This is the address of the
sender and
 * is used to send back the RTCP reports of this receiver. If the data is
sent
 * from another machine, change this address. */
#define DEST_HOST "107.108.198.131"

/* print the stats of a source */
static void
print_source_stats (GObject * source)
{
  GstStructure *stats;
  gchar *str;

  g_return_if_fail (source != NULL);

  /* get the source stats */
  g_object_get (source, "stats", &stats, NULL);

  /* simply dump the stats structure */
  str = gst_structure_to_string (stats);
  g_print ("source stats: %s\n", str);

  gst_structure_free (stats);
  g_free (str);
}

/* will be called when gstrtpbin signals on-ssrc-active. It means that an
RTCP
 * packet was received from another source. */
static void
on_ssrc_active_cb (GstElement * rtpbin, guint sessid, guint ssrc,
    GstElement * depay)
{
  GObject *session, *isrc, *osrc;

  g_print ("got RTCP from session %u, SSRC %u\n", sessid, ssrc);

  /* get the right session */
  g_signal_emit_by_name (rtpbin, "get-internal-session", sessid, &session);

  /* get the internal source (the SSRC allocated to us, the receiver */
  g_object_get (session, "internal-source", &isrc, NULL);
  print_source_stats (isrc);

  /* get the remote source that sent us RTCP */
  g_signal_emit_by_name (session, "get-source-by-ssrc", ssrc, &osrc);
  print_source_stats (osrc);
}

/* will be called when rtpbin has validated a payload that we can depayload
*/
static void
pad_added_cb (GstElement * rtpbin, GstPad * new_pad, GstElement * depay)
{
  GstPad *sinkpad;
  GstPadLinkReturn lres;

  g_print ("new payload on pad: %s\n", GST_PAD_NAME (new_pad));

  sinkpad = gst_element_get_static_pad (depay, "sink");
  g_assert (sinkpad);

  lres = gst_pad_link (new_pad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (sinkpad);
}
/* build a pipeline equivalent to:
 *
 * gst-launch -v gstrtpbin name=rtpbin udpsrc
caps="application/x-rtp,payload=96,encoding-name=H264,clock-rate=90000"
port=9200 ! rtpbin.recv_rtp_sink_0 rtpbin. ! rtph264depay !
 * h264parse ! ffdec_h264 ! ffmpegcolorspace ! deinterlace ! ximagesink
udpsrc port=42000
caps="application/x-rtp,media=(string)audio,payload=(int)101,clock-rate=(int)16000,encoding-name=
* (string)AMR-WB,encoding-params=(string)1,octet-align=(string)1"
port=42000 ! rtpbin.recv_rtp_sink_1 rtpbin. ! rtpamrdepay ! amrwbdec !
audioresample ! audioconvert ! alsasink sync=false

 */
int main (void)//av_play
{
  GstElement *rtpbin;
  GstElement *vrtpsrc, *vrtcpsrc, *vrtcpsink;
  GstElement *artpsrc, *artcpsrc, *artcpsink;
  GstElement *videodepay, *videodec, *videoparse, *colorspace,
*deinterlacer,*videosink;//h264parse ! ffdec_h264 ! ffmpegcolorspace !
deinterlace
  GstElement *audiodepay, *audiodec, *audiores, *audioconv, *audiosink;
  GstElement *pipeline;
  GstElement *queueA,* *queueV;
  GMainLoop *loop;
  GstCaps *vcaps,*acaps;
  gboolean res,res1;
  GstPadLinkReturn lres,lres1;
  GstPad *asrcpad, *asinkpad,*vsrcpad, *vsinkpad;


  /* always init first */
  gst_init (NULL, NULL);

  g_printerr("Client App for Audio and Video\n");
  /* the pipeline to hold everything */
  pipeline = gst_pipeline_new (NULL);
  g_assert (pipeline);


  /* the udp src and source we will use for RTP and RTCP */
  vrtpsrc = gst_element_factory_make ("udpsrc", "vrtpsrc");
  g_assert (vrtpsrc);
  g_object_set (vrtpsrc, "port", VIDEO_PORT, NULL);
  /* we need to set caps on the udpsrc for the RTP data */
  vcaps = gst_caps_from_string (VIDEO_CAPS);
  g_object_set (vrtpsrc, "caps", vcaps, NULL);
  gst_caps_unref (vcaps);
#if AV
  artpsrc = gst_element_factory_make ("udpsrc", "artpsrc");
  g_assert (artpsrc);
  g_object_set (artpsrc, "port", AUDIO_PORT, NULL);
  /* we need to set caps on the udpsrc for the RTP data */
  acaps = gst_caps_from_string (AUDIO_CAPS);
  g_object_set (artpsrc, "caps", acaps, NULL);
  gst_caps_unref (acaps);
#endif

  vrtcpsrc = gst_element_factory_make ("udpsrc", "vrtcpsrc");
  g_assert (vrtcpsrc);
  g_object_set (vrtcpsrc, "port", 5003, NULL);

  vrtcpsink = gst_element_factory_make ("udpsink", "vrtcpsink");
  g_assert (vrtcpsink);
  g_object_set (vrtcpsink, "port", 5007, "host", DEST_HOST, NULL);
  /* no need for synchronisation or preroll on the RTCP sink */
  g_object_set (vrtcpsink, "async", FALSE, "sync", FALSE, NULL);



#if AV
  artcpsrc = gst_element_factory_make ("udpsrc", "artcpsrc");
  g_assert (artcpsrc);
  g_object_set (artcpsrc, "port", 5003, NULL);

  artcpsink = gst_element_factory_make ("udpsink", "rtcpsink");
  g_assert (artcpsink);
  g_object_set (artcpsink, "port", 5007, "host", DEST_HOST, NULL);
  /* no need for synchronisation or preroll on the RTCP sink */
  g_object_set (artcpsink, "async", FALSE, "sync", FALSE, NULL);



#endif
  /* the depayloading and decoding */
  videodepay = gst_element_factory_make (VIDEO_DEPAY, "videodepay");
  g_assert (videodepay);
  videodec = gst_element_factory_make (VIDEO_DEC, "videodec");
  g_assert (videodec);
#if AV
 /* the depayloading and decoding */
  audiodepay = gst_element_factory_make (AUDIO_DEPAY, "audiodepay");
  g_assert (audiodepay);
  audiodec = gst_element_factory_make (AUDIO_DEC, "audiodec");
  g_assert (audiodec);
#endif
//*videoparse, *colorspace, *deinterlacer,*videosink;
 /* the audio playback and format conversion */

  videoparse = gst_element_factory_make (VIDEO_PARSE, "videoparse");
  g_assert (videoparse);
  colorspace = gst_element_factory_make (VIDEO_COLORSPACE, "colorspace");
  g_assert (colorspace);
  deinterlacer = gst_element_factory_make (VIDEO_DEI, "deinterlacer");
  g_assert (deinterlacer);
  videosink = gst_element_factory_make (VIDEO_SINK, "videosink");
  g_assert (videosink);


#if AV
 /* the audio playback and format conversion */
  audioconv = gst_element_factory_make ("audioconvert", "audioconv");
  g_assert (audioconv);
  audiores = gst_element_factory_make ("audioresample", "audiores");
  g_assert (audiores);
  audiosink = gst_element_factory_make (AUDIO_SINK, "audiosink");
  g_assert (audiosink);
  g_object_set (audiosink,"sync", FALSE, NULL);
#endif

   queueA = gst_element_factory_make("queue", "queue-audio");
   queueV = gst_element_factory_make("queue", "queue-video");

/* add depayloading and playback to the pipeline and link */

#if AV
  gst_bin_add_many (GST_BIN (pipeline),
vrtpsrc,videodepay,videodec,videoparse,queueV,colorspace,deinterlacer,videosink,artpsrc,
artcpsrc, artcpsink, \
  audiodepay,audiodec,queueA,audioconv,audiores,audiosink, NULL);
  res = gst_element_link_many (audiodepay, audiodec, audioconv,
audiores,                   audiosink, NULL);
  if(!res)
   g_printerr("Linking audio elements failed\n");
  res1 = gst_element_link_many (videodepay, videodec,
colorspace,deinterlacer,
      videosink, NULL);
  if(!res1)
   g_printerr("Linking video elements failed\n");

#else
  gst_bin_add_many (GST_BIN (pipeline), audiodepay, audiodec, audioconv,
      audiores, audiosink, NULL);

  res = gst_element_link_many (audiodepay, audiodec, audioconv, audiores,
      audiosink, NULL);
  g_assert (res == TRUE);

#endif


    rtpbin = gst_element_factory_make("gstrtpbin", "rtpbin");
    if ( !rtpbin ) {
        g_printerr("Failed to create gstrtpbin\n");
 g_assert (rtpbin);
        return 0;
    }


 gst_bin_add (GST_BIN (pipeline), rtpbin);

  /* now link all to the rtpbin, start by getting an RTP sinkpad for
session 0 */
  vsrcpad = gst_element_get_static_pad (vrtpsrc, "src");
  vsinkpad = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_0");
  lres = gst_pad_link (vsrcpad, vsinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (vsrcpad);
#if AV
  asrcpad = gst_element_get_static_pad (artpsrc, "src");
  asinkpad = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_1");
  lres1 = gst_pad_link (asrcpad, asinkpad);
  g_assert (lres1 == GST_PAD_LINK_OK);
  gst_object_unref (asrcpad);
#endif

#if 1
  /* get an RTCP sinkpad in session 0 */
  asrcpad = gst_element_get_static_pad (artcpsrc, "src");
  asinkpad = gst_element_get_request_pad (rtpbin, "recv_rtcp_sink_0");
  lres = gst_pad_link (asrcpad, asinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (asrcpad);
  gst_object_unref (asinkpad);

  /* get an RTCP srcpad for sending RTCP back to the sender */
  asrcpad = gst_element_get_request_pad (rtpbin, "send_rtcp_src_0");
  asinkpad = gst_element_get_static_pad (artcpsink, "sink");
  lres = gst_pad_link (asrcpad, asinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (asinkpad);
#if AV
  /* get an RTCP sinkpad in session 1 */
  vsrcpad = gst_element_get_static_pad (vrtcpsrc, "src");
  vsinkpad = gst_element_get_request_pad (rtpbin, "recv_rtcp_sink_1");
  lres1 = gst_pad_link (vsrcpad, vsinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (vsrcpad);
  gst_object_unref (vsinkpad);

  /* get an RTCP srcpad for sending RTCP back to the sender */
  vsrcpad = gst_element_get_request_pad (rtpbin, "send_rtcp_src_1");
  vsinkpad = gst_element_get_static_pad (vrtcpsink, "sink");
  lres1 = gst_pad_link (vsrcpad, vsinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (vsinkpad);
#endif
#endif

  /* the RTP pad that we have to connect to the depayloader will be created
   * dynamically so we connect to the pad-added signal, pass the
depayloader as
   * user_data so that we can link to it. */
  g_signal_connect (rtpbin, "pad-added", G_CALLBACK (pad_added_cb),
audiodepay);
  g_print ("pad added cb for audio\n");
#if AV
  g_signal_connect (rtpbin, "pad-added", G_CALLBACK (pad_added_cb),
videodepay);
  g_print ("pad added cb for video\n");
#endif
  /* give some stats when we receive RTCP */
#if 0
 g_signal_connect (rtpbin, "on-ssrc-active", G_CALLBACK (on_ssrc_active_cb),
      audiodepay);
#endif
  /* set the pipeline to playing */
  g_print ("starting receiver pipeline\n");
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* we need to run a GLib main loop to get the messages */
  loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (loop);

  g_print ("stopping receiver pipeline\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_object_unref (pipeline);

  return 0;

}




   Cheers!
   *Deepth*
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.freedesktop.org/archives/gstreamer-devel/attachments/20120427/4f74f683/attachment.htm>


More information about the gstreamer-devel mailing list