[gst-devel] Problem while playing a video filled in buffer by using appsrc

amit_emb amit.sharma1986 at gmail.com
Tue May 11 15:18:12 CEST 2010


Hi,

filesrc location=football.ts ! mpegtsdemux program-number=3 ! mpeg2dec !
filesink location=sink.yuv
I have written a code for  converting the .ts file into .yuv file the code
is working fine.

Now, i would like to put this file into a appsrc by using gstbuffer and want
to play a video by using xvimagesink.      
Does gstreamer provides any mechanism to play gstbuffer.

I have written a code for that but it is not working properly, the code is
given below, 

#include <gst/gst.h>

#include <gst/interfaces/xoverlay.h>
#include <gst/app/gstappbuffer.h>
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>

#include <gtk/gtk.h>

#include <gdk/gdkx.h>

#include <gdk/gdkkeysyms.h>

#include <stdio.h>
#include <string.h>
#include <stdlib.h>





#define SAMPLE_IN_FILE 55249921
#define BUFFER_SIZE  506880

/* 300 frames = 10 seconds of video, since we are going to save at 30fps (se
the video_caps next) */
#define TOTAL_FRAMES 30000
#define QUEUED_FRAMES 30

/* these are the caps we are going to pass through the appsrc */
const gchar *video_caps =
"video/x-raw-yuv,format=I420,width=704,height=480,bpp=32,framerate=30/1"; 
video_app_source
FILE *fp;  // file pointer pointing to .yuv file     

int i;      

char original_data[SAMPLE_IN_FILE];  //original buffer which we are going to
play  


extern GtkWidget *video_output;

extern GtkWidget *window;


static GstElement *pipeline, *video_app_source, flt, *colconv,
*video_app_sink, *videosink , *videoscale, *videoID, *videosink, *videorate;

static GMainLoop *loop;
GstCaps *src_caps;
void *data;



GstBus *bus;
GstBuffer *gst_buffer;
GstFlowReturn ret;
guint num_frame; 



static gpointer window1;

static GstSeekFlags seek_flags = GST_SEEK_FLAG_FLUSH |
GST_SEEK_FLAG_KEY_UNIT;
static void  dont_eat_my_chicken_wings(void *priv);


//original_data

static void start_feed (GstAppSrc *appsrc, guint length, gpointer user_data) 
{
  num_frame++;
  if(num_frame >= TOTAL_FRAMES)
  {
   /* we are EOS, send end-of-stream and remove the source */
    g_signal_emit_by_name (video_app_source, "end-of-stream", &ret);
    return FALSE;
  }

  /* Allocating the memory for the buffer */
  data = malloc (BUFFER_SIZE);
  memcpy(data, original_data, BUFFER_SIZE); 

  gst_buffer = gst_app_buffer_new (data, BUFFER_SIZE,
dont_eat_my_chicken_wings, data);
  ret = gst_app_src_push_buffer (GST_APP_SRC(video_app_source), gst_buffer);
  
  /* newer basesrc will set caps for use automatically but it does not
really
   * hurt to set it on the buffer again */
  gst_buffer_set_caps (gst_buffer, gst_caps_from_string (video_caps));

  /* Setting the correct timestamp for the buffer is very important,
otherwise the
   * resulting video file won't be created correctly */
  GST_BUFFER_TIMESTAMP(gst_buffer) = (GstClockTime)((num_frame / 30.0) *
1e9);

  /* push new buffer */
  g_signal_emit_by_name (video_app_source, "push-buffer", gst_buffer, &ret);
  gst_buffer_unref (gst_buffer);

  if (ret != GST_FLOW_OK) {
    /* some error, stop sending data */
    return FALSE;
  }

}

static void stop_feed (GstAppSrc *appsrc, gpointer user_data)
{
  g_source_remove (video_app_source); 
} 

static gboolean

bus_cb (GstBus *bus,

        GstMessage *msg,

        gpointer data)

{

    switch (GST_MESSAGE_TYPE (msg))

    {

        case GST_MESSAGE_EOS:

            {

                g_debug ("end-of-stream");

                break;

            }

        case GST_MESSAGE_ERROR:

            {

                gchar *debug;

                GError *err;

                gst_message_parse_error (msg, &err, &debug);

                g_free (debug);



                g_warning ("Error: %s", err->message);

                g_error_free (err);

                break;

            }

        default:

            break;
video_app_source
    }



    return TRUE;

}



void play_buffer ()

{

 
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (GST_OBJECT (pipeline));
   pipeline = NULL;


   /* init GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

  

  /*--------------- set up buffer-------------------------------------- */

  fp = fopen("/root/Desktop/latest_buffer_gui_release/newsink.yuv", "rb");

  i = fread((void *)original_data, sizeof(signed char), (SAMPLE_IN_FILE-1),
fp);
  original_data[SAMPLE_IN_FILE] = '/0';
  /*--------------------------------------------------------------------*/

   

video_app_source
  /* setup pipeline */

  
  pipeline = gst_pipeline_new ("pipeline");


  video_app_source = gst_element_factory_make ("appsrc",
"video_app_source");

  g_object_set (G_OBJECT(video_app_source), "is-live",  (gboolean)TRUE  ,
"format", GST_FORMAT_TIME ,"blocksize",(gulong) 506880, NULL);
  gst_app_src_set_max_bytes((GstAppSrc *)video_app_source, QUEUED_FRAMES *
BUFFER_SIZE); 

  flt = gst_element_factory_make ("capsfilter", "flt1");

  colconv = gst_element_factory_make ("ffmpegcolorspace", "colconI420v");


  videorate = gst_element_factory_make ("videorate", "videorate");

  videosink = gst_element_factory_make ("xvimagesink", "videosink");


  

    /* setup */

  g_object_set (G_OBJECT (flt), "caps",

  		                     gst_caps_new_simple ("video/x-raw-yuv",

				     "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'),

				     "width", G_TYPE_INT, 704,

				     "height", G_TYPE_INT,480,

                                     "pixel-aspect-ratio",
GST_TYPE_FRACTION, 4, 3,

                                    
"force-aspect-ratio",G_TYPE_BOOLEAN,TRUE,

				     "framerate", GST_TYPE_FRACTION, 30, 1,

				     "bpp", G_TYPE_INT, 32,

				     NULL), NULL);


  gst_bin_add_many (GST_BIN (pipeline), video_app_source, colconv,
videorate, videosink, NULL);

  gst_element_link_many (video_app_source, colconv, videorate, videosink,
NULL);



 // set up sync handler for setting the xid once the pipeline is started

  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));

  gst_bus_add_watch (bus, bus_cb, NULL);

  gst_object_unref (bus);

  



  if (GST_IS_X_OVERLAY (videosink))

    {


      gst_x_overlay_set_xwindow_id (GST_X_OVERLAY (videosink),
GPOINTER_TO_INT (GDK_WINDOW_XWINDOW (video_output->window)));



    }

   /* configure the appsrc, we will push data into the appsrc from the
   * mainloop */
  g_signal_connect (video_app_source, "need-data", G_CALLBACK (start_feed),
NULL);
  g_signal_connect (video_app_source, "enough-data", G_CALLBACK (stop_feed),
NULL); 



  /* play */

 gst_element_set_state (pipeline, GST_STATE_PLAYING);

 
 gst_app_src_end_of_stream (GST_APP_SRC(video_app_source));


 gst_element_set_state (pipeline, GST_STATE_NULL);

 /* Cleaning up */
  gst_object_unref (GST_OBJECT (pipeline));
  pipeline = NULL;
  
}


static void
dont_eat_my_chicken_wings (void *priv)
{
  printf ("freeing buffer for pointer %p\n", priv);
  free (priv);
}
-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Error :

 GStreamer-CRITICAL **: gst_value_set_mini_object: assertion `mini_object ==
NULL || GST_IS_MINI_OBJECT (mini_object)' failed

 gst_app_src_push_buffer_full: assertion `GST_IS_BUFFER (buffer)' failed

 GStreamer-CRITICAL **: gst_mini_object_unref: assertion
`mini_object->refcount > 0' failed
 

-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Can anybody help me what are the mistake i am doing.

Thanks
with regard
Amit



-- 
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/Problem-while-playing-a-video-filled-in-buffer-by-using-appsrc-tp2173804p2173804.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.




More information about the gstreamer-devel mailing list