how to display video at client side captured from udpsrc

gagankumarnigam gagankumarnigam at bel.co.in
Mon Jun 15 00:13:13 PDT 2015


.Hello,

I tried to streaming video using RTSP  and UDP (udpsink) from a video source
v4l2. At the client my programe is running well but it could'nt display
video . here is my code for server and client Can some body tell me what
wrong i am doing at client side.
  SERVER.c


int BroadcastVideo()
{
	GMainLoop *loop; 
    gst_init(NULL,NULL); 
    
    loop = g_main_loop_new (NULL, FALSE); 
    
   // GstElement  *pipeline, ;
GstElement *vsource, *vtee, *vqueue, *tover, *xvsink, *evqueue, *vencoder,
*muxer, *filesink; 
    GstCaps *filtercaps;
    GstBin      *recording; 
    GstBus      *bus; 
    GstPad      *srcpad,*sinkpad; 
   gint width, height, num, denom;
   const GstStructure *str;

  // Create gstreamer elements 
    pipeline   = gst_pipeline_new ("Live Recording"); 
    vsource    = gst_element_factory_make ("v4l2src",    
"viewing-file-source"); 
    vtee       = gst_element_factory_make ("tee",             
"viewing-tee"); 
    vqueue     = gst_element_factory_make ("queue2",          
"viewing-queue"); 
    tover      = gst_element_factory_make ("timeoverlay",     
"viewing-overlay"); 
    xvsink     = gst_element_factory_make ("xvimagesink",     
"viewing-xvsink"); 

printf("4\n"); 
/*
    recording  = GST_BIN(gst_bin_new("recording-bin")); 
    evqueue    = gst_element_factory_make ("queue2",          
"encoding-queue"); 
    vencoder   = gst_element_factory_make ("ffenc_mpeg4",     
"encoding-encoder"); 
    muxer      = gst_element_factory_make ("avimux",          
"encoding-muxer");	//mp4mux 
    filesink   = gst_element_factory_make ("filesink",        
"encoding-filesink"); 
*/
   GstElement  *filter, *vrate, *encoder, *conv, *sink;

 
   recording  = GST_BIN(gst_bin_new("recording-bin")); 
   evqueue    = gst_element_factory_make ("queue2","encoding-queue"); 	 
   vrate = gst_element_factory_make ("videorate", "video-rate");
   filter = gst_element_factory_make ("capsfilter", "filter");
   conv = gst_element_factory_make ("ffmpegcolorspace","converter");
   vencoder = gst_element_factory_make ("ffenc_mpeg4","mpeg-decoder");
   sink = gst_element_factory_make ("udpsink","audio-output");

gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(xvsink),
GDK_WINDOW_XID(video_window->window));

   if(!pipeline || !vsource || !xvsink || !tover ) 
    { 
        g_print("Unable to create all necessary elements\n"); 
        return -1; 
    } 

   filtercaps = gst_caps_new_simple ("video/x-raw-yuv","width", G_TYPE_INT,
640,"height", G_TYPE_INT, 480,"framerate", GST_TYPE_FRACTION, 30, 1, NULL);
   g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
   gst_caps_unref (filtercaps);


   g_object_set (G_OBJECT (vencoder), "bitrate" , 384 ,  NULL);
   g_object_set (G_OBJECT (sink), "host" , "192.168.1.25" ,  NULL);
   //g_object_set (G_OBJECT (sink), "host" , "127.0.0.1" ,  NULL);
   g_object_set (G_OBJECT (sink), "port" , 8999 ,  NULL);
   g_object_set (G_OBJECT (sink), "async" , FALSE ,  NULL);

    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); 
    gst_bus_add_watch (bus, bus_call, loop); 
    gst_object_unref (bus); 

   /* g_object_set(G_OBJECT(vsource), "num-buffers",300, NULL); 
    g_object_set(G_OBJECT(filesink),"location","output.avi", NULL);     
    g_object_set (G_OBJECT (tover), "halign", "right", NULL); 
    g_object_set (G_OBJECT (tover), "valign", "top", NULL); 
    g_object_set (G_OBJECT (tover), "shaded-background", TRUE, NULL); 
*/
  /* create the recording bin */ 
 
  //  gst_bin_add_many (recording, evqueue, vencoder, muxer, filesink,
NULL); 
      gst_bin_add_many (recording, evqueue,vrate,filter,conv,vencoder, sink,
NULL);
 

    sinkpad       = gst_element_get_static_pad(evqueue,"sink"); 
    GstPad *ghost = gst_ghost_pad_new("vsink",sinkpad); 

    if(ghost == NULL) 
    {
        g_error("Unable to create ghostpad!\n"); 
    }

    gst_element_add_pad(GST_ELEMENT(recording),ghost); 
    gst_object_unref(GST_OBJECT(sinkpad)); 
   // gst_element_link_many(evqueue,vencoder,muxer,filesink,NULL); 
    gst_element_link_many(evqueue,vrate,filter,conv,vencoder, sink,NULL); 


    gst_bin_add_many (GST_BIN (pipeline), vsource, vtee, vqueue, tover,
xvsink, recording, NULL); 

 
    gst_element_link_many(vsource,tover,vtee,NULL); 
    srcpad  = gst_element_get_request_pad(vtee,"src0"); 
    sinkpad = gst_element_get_pad(vqueue,"sink"); 
    gst_pad_link(srcpad,sinkpad); 
    gst_element_link(vqueue,xvsink); 

   
    srcpad  = gst_element_get_request_pad(vtee,"src1"); 
    sinkpad = gst_element_get_pad(GST_ELEMENT(recording),"vsink"); 
    gst_pad_link(srcpad,sinkpad); 
	
     g_print ("Running...\n"); 
    gst_element_set_state(pipeline,GST_STATE_PLAYING); 

     str = gst_caps_get_structure (filtercaps, 0);
   if (!gst_structure_get_int (str, "width", &width) ||
!gst_structure_get_int (str, "height", &height) || 
       !gst_structure_get_fraction (str, "framerate", &num, &denom)) 

         g_print ("No width/height available\n");
   g_print ("The video size of this set of capabilities is %dx%d and the
frame rate is %d/%d\n", width, height, num, denom);


  
    g_main_loop_run (loop); 

    g_print ("Returned, stopping playback\n"); 
    gst_element_set_state (pipeline, GST_STATE_NULL); 
    g_print ("Deleting pipeline\n"); 
    gst_object_unref (GST_OBJECT (pipeline)); 

    return 0;
}
gboolean bus_call(GstBus *bus, GstMessage *msg, void *data) 
{ 
    gchar           *debug; 
    GError          *err; 
    GMainLoop       *loop = (GMainLoop*)data; 

    switch (GST_MESSAGE_TYPE(msg)) 
    { 
        case GST_MESSAGE_APPLICATION: 
            g_print("APP received on OBJ NAME
%s\n",GST_OBJECT_NAME(msg->src)); 
            break; 
        case GST_MESSAGE_EOS: 
            g_print("EOS received on OBJ NAME
%s\n",GST_OBJECT_NAME(msg->src)); 
            g_main_loop_quit (loop); 
            break; 
        case GST_MESSAGE_ERROR: 
            gst_message_parse_error(msg, &err, &debug); 
            g_free(debug); 
            g_print("BUS CALL %s\n", err->message); 
            g_error_free(err); 
            g_main_loop_quit (loop); 
            break; 
        default: 
            break; 
    } 
    return TRUE; 
}  


client.c

#include <stdlib.h>
#include <gst/gst.h>

#define VIDEO_CAPS
"video/x-raw-yuv,media=video,clock-rate=90000,encoding-name=H264"

gboolean bus_call(GstBus *bus, GstMessage *msg, void *data) 
{ 
    gchar           *debug; 
    GError          *err; 
    GMainLoop       *loop = (GMainLoop*)data; 

    switch (GST_MESSAGE_TYPE(msg)) 
    { 
        case GST_MESSAGE_APPLICATION: 
            g_print("APP received on OBJ NAME
%s\n",GST_OBJECT_NAME(msg->src)); 
            break; 
        case GST_MESSAGE_EOS: 
            g_print("EOS received on OBJ NAME
%s\n",GST_OBJECT_NAME(msg->src)); 
            g_main_loop_quit (loop); 
            break; 
        case GST_MESSAGE_ERROR: 
            gst_message_parse_error(msg, &err, &debug); 
            g_free(debug); 
            g_print("BUS CALL %s\n", err->message); 
            g_error_free(err); 
            g_main_loop_quit (loop); 
            break; 
        default: 
            break; 
    } 
    return TRUE; 
} 

int main(int argc, char* argv[]) 
{ 
    GMainLoop *loop; 
    gst_init(&argc,&argv); 
    
    loop = g_main_loop_new (NULL, FALSE); 
    
    GstElement  *pipeline, *rtpsrc, *vtee, *vqueue, *tover, *xvsink,
*evqueue, *videodec,*videodepay,*muxer, *filesink, *videosink; 
    GstCaps *filtercaps;
    GstBin      *recording; 
    GstBus      *bus; 
    GstPad      *srcpad,*sinkpad; 
   gint width, height, num, denom;
   const GstStructure *str;

  // Create gstreamer elements 
    pipeline   = gst_pipeline_new ("Live Recording"); 
    //vsource    = gst_element_factory_make ("v4l2src",    
"viewing-file-source"); 
    rtpsrc     = gst_element_factory_make ("udpsrc", "UDP source");
   // rtpsrc     = gst_element_factory_make ("v4l2src", "source");
    vtee       = gst_element_factory_make ("tee","viewing-tee"); 
   // videodepay = gst_element_factory_make ("rtph264depay", "videodepay");
  //  g_assert (videodepay);
    vqueue     = gst_element_factory_make ("queue2",          
"viewing-queue"); 
    tover      = gst_element_factory_make ("timeoverlay",     
"viewing-overlay"); 
    xvsink     = gst_element_factory_make ("xvimagesink",     
"viewing-xvsink"); 

printf("4\n"); 
/*
    recording  = GST_BIN(gst_bin_new("recording-bin")); 
    evqueue    = gst_element_factory_make ("queue2",          
"encoding-queue"); 
    videodec   = gst_element_factory_make ("ffenc_mpeg4",     
"encoding-encoder"); 
    muxer      = gst_element_factory_make ("avimux",          
"encoding-muxer");	//mp4mux 
    filesink   = gst_element_factory_make ("filesink",        
"encoding-filesink"); 
*/
   GstElement  *filter, *vrate, *encoder, *conv, *sink;

 
   recording  = GST_BIN(gst_bin_new("recording-bin")); 
   evqueue    = gst_element_factory_make ("queue2","encoding-queue"); 	 
   vrate = gst_element_factory_make ("videorate", "video-rate");
   filter = gst_element_factory_make ("capsfilter", "filter");
   videodepay = gst_element_factory_make ("rtph264depay", "videodepay");
   conv = gst_element_factory_make ("ffmpegcolorspace","converter");
   //videodec = gst_element_factory_make ("ffenc_mpeg4","mpeg-decoder");
   videodec = gst_element_factory_make ("ffdec_h264", "videodec");
   videosink = gst_element_factory_make ("autovideosink", "videosink");
   //sink = gst_element_factory_make ("udpsink","audio-output");

   if(!pipeline || !rtpsrc || !xvsink || !tover ) 
    { 
        g_print("Unable to create all necessary elements\n"); 
        return -1; 
    } 

  // filtercaps = gst_caps_new_simple ("video/x-raw-yuv","width",
G_TYPE_INT, 640,"height", G_TYPE_INT, 480,"framerate", GST_TYPE_FRACTION,
30, 1, NULL);
   filtercaps = gst_caps_from_string (VIDEO_CAPS);
   g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
   gst_caps_unref (filtercaps);


 
   g_object_set (G_OBJECT (rtpsrc), "port" , 8999 ,  NULL);
  

    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); 
    gst_bus_add_watch (bus, bus_call, loop); 
    gst_object_unref (bus); 

 
  //  gst_bin_add_many (recording, evqueue, videodec, muxer, filesink,
NULL); 
    gst_bin_add_many (recording,
evqueue,vrate,filter,videodepay,conv,videodec, videosink, NULL); 
    sinkpad       = gst_element_get_static_pad(evqueue,"sink"); 

    GstPad *ghost = gst_ghost_pad_new("vsink",sinkpad); 

    if(ghost == NULL) 
    {
        g_error("Unable to create ghostpad!\n"); 
    }

    gst_element_add_pad(GST_ELEMENT(recording),ghost); 
    gst_object_unref(GST_OBJECT(sinkpad)); 
   // gst_element_link_many(evqueue,videodec,muxer,filesink,NULL); 
   // gst_element_link_many(evqueue,vrate,filter,conv,videodec,
videosink,NULL); 
   gst_element_link_many(evqueue,vrate,filter,videodepay,conv,videodec,
videosink,NULL); 
   gst_bin_add_many (GST_BIN (pipeline), rtpsrc, vtee, vqueue, tover,
xvsink, recording, NULL); 

    
    gst_element_link_many(rtpsrc,tover,vtee,NULL); 
    srcpad  = gst_element_get_request_pad(vtee,"src0"); 
    sinkpad = gst_element_get_pad(vqueue,"sink"); 
    gst_pad_link(srcpad,sinkpad);     
    gst_element_link(vqueue,xvsink); 

    
    srcpad  = gst_element_get_request_pad(vtee,"src1"); 
    sinkpad = gst_element_get_pad(GST_ELEMENT(recording),"vsink"); 
    gst_pad_link(srcpad,sinkpad); 
	
    g_print ("Running...\n"); 
    gst_element_set_state(pipeline,GST_STATE_PLAYING); 

  
    g_main_loop_run (loop); 

   
    g_print ("Returned, stopping playback\n"); 
    gst_element_set_state (pipeline, GST_STATE_NULL); 
    g_print ("Deleting pipeline\n"); 
    gst_object_unref (GST_OBJECT (pipeline)); 

    return 0; 
} 












--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/how-to-capture-sequence-no-and-time-stamp-of-RTP-UDP-packets-tp4671747p4672278.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.


More information about the gstreamer-devel mailing list