how to capture sequence no. and time stamp of RTP/UDP packets

Raheeb Muzaffar raheeb.muzaffar at yahoo.com
Thu Apr 30 10:39:31 PDT 2015


Hello,
I am multicasting video stream using RTP (rtph264pay) and UDP (udpsink) from a source node. At the client end the video stream is decoded and Displayed As They stream. I have coded this using gstreamer in c / c ++. I now want to
1. At the source, capture the sequence no., Packet size (in bytes) and timestamp of each packet of the video stream being Transmitted over the network.
2. At receiver, capture the sequence no. And timestamp of the received packets so as to measure delay and packet loss.
It would be great if someone Could help me suggest a way to implement this in C / C ++. I do not want to use GST debug since I have to do some processing with the video packets. Here is the code for the server and client.

SERVER 
#include <GST / gst.h>
Static gbooleanbus_call (GstBus * Bus,GstMessage * msg,gpointer data){GMainLoop * loop = (GMainLoop *) data;switch (GST_MESSAGE_TYPE (msg)) {Case GST_MESSAGE_EOS:g_print ("End of video stream \ n");g_main_loop_quit (loop);break;Case GST_MESSAGE_ERROR: {gchar * debug;GError * error;gst_message_parse_error (msg, & error, and debug);G_free (debug);g_printerr ("Error:% s \ n", error-> message);g_error_free (error);g_main_loop_quit (loop);break;}default:break;}return TRUE;}
static voidon_pad_added (GstElement * element,GstPad * pad,gpointer data){GstPad * sinkpad;GstElement * decoder = (GstElement *) data;g_print ("Dynamic pad created, linking demuxer / decoder \ n");sinkpad = gst_element_get_static_pad (decoder, "sink");gst_pad_link (pad, sinkpad);gst_object_unref (sinkpad);}


typedef struct {Custom_Data
GMainLoop * Loop;GstElement * pipeline * source, demuxer *, * decoder, * ARATIO, * encoder * RTP, * usink, que *, * videoconvert, * videoscale_capsfilter, * VideoScaleMode;GstBus * bus;guint bus_watch_id;guint bitrate;GstCaps * c, * VideoScaleMode caps;
} Custom_Data;


intmain (int argc,char * argv []){
Custom_Data Data;

/ * Initialization * /gst_init (& argc, argv &);

Data.loop = g_main_loop_new (NULL, FALSE);/ * Check input arguments * /if (argc! = 2) {g_printerr ("Usage:% s <cif filename> \ n", argv [0]);return -1;}
Data.videoscalecaps = gst_caps_from_string ("video / x-raw, width = 1024, height = 768");Data.pipeline = gst_pipeline_new ("video-send");Data.source = gst_element_factory_make ("filesrc", "file-source");g_assert (Data.source);Data.demuxer = gst_element_factory_make ("qtdemux", "demuxer");g_assert (Data.demuxer);Data.que = gst_element_factory_make ("queue", "queer");g_assert (Data.que);Data.decoder = gst_element_factory_make ("avdec_h264", the "Decoder");g_assert (Data.decoder);Data.videoscale = gst_element_factory_make ("scale video", "scale");g_assert (Data.videoscale);Data.videoscale_capsfilter = gst_element_factory_make ("caps filter", "videoscale_capsfilter");g_assert (Data.videoscale_capsfilter);Data.aratio = gst_element_factory_make ("aspectratiocrop", "ARATIO");g_assert (Data.aratio);Data.videoconvert = gst_element_factory_make ("videoconvert", "Video Convert");g_assert (Data.videoconvert);Data.encoder = gst_element_factory_make ("x264enc" "encoder");g_assert (Data.encoder);Data.rtp = gst_element_factory_make ("rtph264pay", "RTP");g_assert (Data.rtp);Data.usink = gst_element_factory_make ("udpsink", "udp_sink");g_assert (Data.usink);
g_object_set (G_OBJECT (Data.source), "location", argv [1], NULL);g_object_set (G_OBJECT (Data.source), "do-timestamp", true, NULL);g_object_set (G_OBJECT (Data.aratio), "Aspect Ratio", 4, 3, NULL);g_object_set (G_OBJECT (Data.encoder), "b-adapt", true, NULL);g_object_set (G_OBJECT (Data.usink), "host" 224.0.0.0, NULL);g_object_set (G_OBJECT (Data.usink), "port", 5007, NULL);g_object_set (G_OBJECT (Data.usink), "auto-multicast", TRUE, NULL);g_object_set (G_OBJECT (Data.videoscale_capsfilter), "caps", Data.videoscalecaps, NULL);
Data.bus = gst_pipeline_get_bus (GST_PIPELINE (Data.pipeline));Data.bus_watch_id = gst_bus_add_watch (Data.bus, bus_call, Data.loop);gst_object_unref (Data.bus);
gst_bin_add (GST_BIN (Data.pipeline) Data.source);gst_bin_add (GST_BIN (Data.pipeline) Data.demuxer);gst_bin_add (GST_BIN (Data.pipeline) Data.decoder);gst_bin_add (GST_BIN (Data.pipeline) Data.videoscale);gst_bin_add (GST_BIN (Data.pipeline) Data.videoscale_capsfilter);gst_bin_add (GST_BIN (Data.pipeline) Data.aratio);gst_bin_add (GST_BIN (Data.pipeline) Data.videoconvert);gst_bin_add (GST_BIN (Data.pipeline) Data.encoder);gst_bin_add (GST_BIN (Data.pipeline) Data.rtp);gst_bin_add (GST_BIN (Data.pipeline) Data.usink);
if (! gst_element_link (Data.source, Data.demuxer)){g_printerr ("Here is the problem. \ n");}
if (! gst_element_link_many (Data.decoder, Data.videoscale, Data.videoscale_capsfilter, Data.aratio, Data.videoconvert, Data.encoder, Data.rtp, Data.usink, NULL)){g_printerr ("Here is the problem too. \ n");}
g_signal_connect (Data.demuxer, "pad-added" G_CALLBACK (on_pad_added) Data.decoder);
g_print ("Now playing:% s \ n", argv [1]);
gst_element_set_state (Data.pipeline, GST_STATE_PLAYING);g_main_loop_run (Data.loop);gst_element_set_state (Data.pipeline, GST_STATE_NULL);gst_object_unref (GST_OBJECT (Data.pipeline));g_source_remove (Data.bus_watch_id);g_main_loop_unref (Data.loop);
return 0;} 


CLIENT
#include <GST / gst.h>
Static gbooleanbus_call (GstBus * Bus,GstMessage * msg,gpointer data){GMainLoop * loop = (GMainLoop *) data;switch (GST_MESSAGE_TYPE (msg)) {Case GST_MESSAGE_EOS:g_print ("End of stream \ n");g_main_loop_quit (loop);break;Case GST_MESSAGE_ERROR: {gchar * debug;GError * error;gst_message_parse_error (msg, & error, and debug);G_free (debug);g_printerr ("Error:% s \ n", error-> message);g_error_free (error);g_main_loop_quit (loop);break;}default:break;}return TRUE;}

static voidon_pad_added (GstElement * element,GstPad * pad,gpointer data){GstPad * sinkpad;GstElement * decoder = (GstElement *) data;g_print ("Dynamic pad created, linking demuxer / decoder \ n");sinkpad = gst_element_get_static_pad (decoder, "sink");gst_pad_link (pad, sinkpad);gst_object_unref (sinkpad);}
typedef struct {_CustomDataGstElement * pipeline * source * RTP, * decoder, sink *, * Video Mixer, * videoconvert, * VideoScaleMode, que *, * filter; GstState State;                 } Custom Data;



int main (int argc, char * argv []) {

  Custom Data Data;  GMainLoop * Loop;  GstPad * pad;


GstCaps * Apparel * filter caps;GstBus * bus;GstStateChangeReturn ret;

  gst_init (& argc, argv &); caps = gst_caps_from_string ("application / x-RTP, media = (string) video, clock rate = (int) 90000, encoding-name = (string) H264, Payload = (int) 96");
loop = g_main_loop_new (NULL, FALSE);
if (argc! = 1) {g_printerr ("Usage:% s <udpsrc> \ n", argv [0]);return -1;}  
data.pipeline = gst_pipeline_new ("video-receive");data.source = gst_element_factory_make ("udpsrc", "UDP source");data.videoscale = gst_element_factory_make ("scale video", "video-scale");data.videoconvert = gst_element_factory_make ("videoconvert", "Video Convert");data.sink = gst_element_factory_make ("xvimagesink", "video output");data.rtp = gst_element_factory_make ("rtph264depay", "RTP");data.decoder = gst_element_factory_make ("avdec_h264", the "Decoder");data.filter = gst_element_factory_make ("caps filter", "filter caps");data.que = gst_element_factory_make ("queue", "queer");

filter caps = gst_caps_new_simple ("video / x-raw", "format", G_TYPE_STRING, "I420", "width", G_TYPE_INT, 200, "height", G_TYPE_INT, 200, NULL);
if (! data.pipeline || data.source! ||! data.rtp ||! data.decoder ||! data.sink){g_printerr ("One element Could not be created. \ n");return -1;}


g_object_set (G_OBJECT (data.filter), "caps", filter caps, NULL);


  bus = gst_element_get_bus (data.pipeline);  gst_bus_add_signal_watch (bus);  gst_object_unref (bus);

g_object_set (G_OBJECT (data.source), "multicast group", 224.0.0.0, NULL);g_object_set (G_OBJECT (data.source), "port", 5007, NULL);g_object_set (G_OBJECT (data.source), "caps", caps, NULL);g_object_set (G_OBJECT (data.source), "do-timestamp", true, NULL);g_object_set (G_OBJECT (data.sink), "sync", FALSE, NULL);

gst_bin_add_many (GST_BIN (data.pipeline) data.source, data.rtp, data.decoder, data.que, data.sink, NULL);
gst_element_link (data.source, data.decoder);gst_element_link_many (data.source, data.rtp, data.decoder, data.que, data.sink, NULL);g_signal_connect (data.decoder, "pad-added" G_CALLBACK (on_pad_added) data.sink);g_print ("Now playing ... \ n"); //:% s \ n ", argv [1]);gst_element_set_state (data.pipeline, GST_STATE_PLAYING);g_print ("Running ... \ n");g_main_loop_run (loop);  g_print ("Deleting pipeline \ n");gst_object_unref (GST_OBJECT (data.pipeline));  g_main_loop_unref (loop);
  return 0;}

-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.freedesktop.org/archives/gstreamer-devel/attachments/20150430/7474c75b/attachment-0001.html>


More information about the gstreamer-devel mailing list