gst_app_sink_pull_buffer takes a long time to get data from a pipeline (about 130ms)

Angel Martin amartin at vicomtech.org
Mon Mar 5 08:05:52 PST 2012


I am not sure, but the loop while(1) that encloses the gst_app_sink_pull_buffer
does not look good.

Maybe you should use a thread or include a *g_timeout_add() set up for each
1/30 in order to update the buffer data.*
http://gstreamer.freedesktop.org/data/doc/gstreamer/head/manual/html/chapter-queryevents.html

Good luck!

Angel

2012/3/5 Aldo Biziak <aldobiziak at gmail.com>

> Could anybody help me please?
>
> ---------- Forwarded message ----------
> From: Aldo Biziak <aldobiziak at gmail.com>
> Date: 2012/3/1
> Subject: gst_app_sink_pull_buffer takes a long time to get data from a
> pipeline (about 130ms)
> To: gstreamer-devel at lists.freedesktop.org
> Cc: luca gherardi <lucaghera8 at gmail.com>
>
>
> Good morning all,
> I'm building a simple C application that use appsink to get buffer of data
> from a pipeline (this last pipeline creates data reading images from a
> webcam 30fps):
> GstBuffer* buf = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
> this function require at least 130ms (7.6fps) of processing time, so I
> can't grab images in real time mode (30fps).
>
> I compiled my application with -O2 optimizer option and I use a PC not bad
> with a dual core 2.1GHz (x86).
> How to get a faster function to work in real time mode?
>
> This is the code of my application:
> #include <gst/gst.h>
> #include <gst/app/gstappsink.h>
> #include <gst/app/gstappbuffer.h>
> #include <gst/base/gstbasesink.h>
>
> #include <string>
> #include <iostream>
> #include <sstream>
> #include <map>
> #include <vector>
>
> /***** section to measure performances***********/
> #define DEBUG_FRAMERATE
> #ifdef DEBUG_FRAMERATE
> #include <time.h>
> #include <sys/time.h>
> #define NUM_OF_FRAMES_TO_CATCH_TO_CALCULATE_FPS 30
> int frame_counter =0;
> struct timespec timestamp1;
> #endif
> /**************************************************/
> using namespace std;
> typedef struct
> {
>   GMainLoop *loop;
>   GstElement *source;
>   GstElement *sink;
> }ProgramData;
>
> int main(char **argc, int argv[]){
> string cameraDevice;
>  bool alwaysCopy;
> string encoding;
> string formatSpecificEncoding;
>
> int width;
> int height;
> int bpp; // bit per pixel
>  int depth;
> int redMask;
> int greenMask;
>  int blueMask;
> int endianness;
> bool interlaced;
>  int framerate;
>
> int brightness;
> int contrast;
>  int saturation;
> int hue;
>
> bool gstreamerPad;
>  bool rosPad;
>
> int num;
> double meanTime;
>
> GstElement *pipeline;
> GstElement *sink;
> cameraDevice = "/dev/video0";
>  alwaysCopy = false;
> encoding = "video/x-raw-rgb";
> formatSpecificEncoding = "YUY2";
>
> width = 640;
> height = 480;
> bpp = 24;
>  depth = 24;
> redMask = 16711680;
> greenMask = 65280;
>  blueMask = 255;
> endianness = 4321;
> interlaced = false;
>  framerate = 15;
>
> brightness = 0;
> contrast = 0;
>  saturation = 0;
> hue = 0;
>
> meanTime = 0;
>  num = 0;
>
> stringstream camConfigStream;#include <gst/gst.h>
> #include <gst/app/gstappsink.h>
> #include <gst/app/gstappbuffer.h>
> #include <gst/base/gstbasesink.h>
>
> #include <string>
> #include <iostream>
> #include <sstream>
> #include <map>
> #include <vector>
>
> /***** section to measure performances***********/
> #define DEBUG_FRAMERATE
> #ifdef DEBUG_FRAMERATE
> #include <time.h>
> #include <sys/time.h>
> #define NUM_OF_FRAMES_TO_CATCH_TO_CALCULATE_FPS 30
> int frame_counter =0;
> struct timespec timestamp1;
> #endif
> /**************************************************/
> using namespace std;
> typedef struct
> {
>   GMainLoop *loop;
>   GstElement *source;
>   GstElement *sink;
> }ProgramData;
>
> int main(char **argc, int argv[]){
> string cameraDevice;
> bool alwaysCopy;
>  string encoding;
> string formatSpecificEncoding;
>
> int width;
>  int height;
> int bpp; // bit per pixel
> int depth;
>  int redMask;
> int greenMask;
> int blueMask;
>  int endianness;
> bool interlaced;
> int framerate;
>
> int brightness;
> int contrast;
> int saturation;
>  int hue;
>
> bool gstreamerPad;
> bool rosPad;
>
> int num;
> double meanTime;
>
> GstElement *pipeline;
>  GstElement *sink;
> cameraDevice = "/dev/video0";
> alwaysCopy = false;
>  encoding = "video/x-raw-rgb";
> formatSpecificEncoding = "YUY2";
>
> width = 640;
> height = 480;
> bpp = 24;
>  depth = 24;
> redMask = 16711680;
> greenMask = 65280;
>  blueMask = 255;
> endianness = 4321;
> interlaced = false;
>  framerate = 15;
>
> brightness = 0;
> contrast = 0;
>  saturation = 0;
> hue = 0;
>
> meanTime = 0;
>  num = 0;
>
> stringstream camConfigStream;
> camConfigStream << "v4l2src device=" << cameraDevice; // /dev/video0
>  camConfigStream << " always-copy=";
> if(alwaysCopy){
> camConfigStream << "true";
>  }else{
> camConfigStream << "false";
> }
>  camConfigStream << " ! capsfilter caps=\" ";
>
> camConfigStream << encoding; //"video/x-raw-rgb
>  if(encoding.find("rgb") != string::npos){
> camConfigStream << ", bpp=(int)" << bpp; //24
>  camConfigStream << ", depth=(int)" << depth; //24
> camConfigStream << ", red_mask=(int)" << redMask; //16711680
>  camConfigStream << ", green_mask=(int)" << greenMask; //65280
> camConfigStream << ", blue_mask=(int)" << blueMask; //255
>  camConfigStream << ", endianness=(int)" << endianness; //4321
> }else if(encoding.find("yuv") != string::npos){
>  camConfigStream << ", format=" << formatSpecificEncoding; //YUY2
> }
>  camConfigStream << ", width=(int)" << width; //640
> camConfigStream << ", height=(int)" << height; //480
>  camConfigStream << ", interlaced=(boolean)";
> if(interlaced){
>  camConfigStream << "true";
> }else{
> camConfigStream << "false";
>  }
> camConfigStream << ", framerate=(fraction)" << framerate << "/1"; //15
>  camConfigStream << ", brightness=(int)" << brightness; //0
> camConfigStream << ", contrast=(int)" << contrast; //0
>  camConfigStream << ", saturation=(int)" << saturation; //0
> camConfigStream << ", hue=(int)" << hue; //0
>  camConfigStream << "\" ";
>
>
> gst_init(0,0);
>  cout<< "Gstreamer Version: " << gst_version_string() << endl;
>
> GError *error = 0; //assignment to zero is a gst requirement
>  pipeline = gst_parse_launch(camConfigStream.str().c_str(),&error);
> if (pipeline == NULL) {
>  cout << error->message << endl;
> exit(-1);
> }
>  sink = gst_element_factory_make("appsink",NULL);
> if(!sink){
> cout << "Sink creation failed" << endl;
>  }
> string capFormat = encoding;
> GstCaps * caps_alone = gst_caps_new_simple(capFormat.c_str(), NULL);
>  gst_app_sink_set_caps(GST_APP_SINK(sink), caps_alone);
> gst_caps_unref(caps_alone);
>
> gst_base_sink_set_sync(GST_BASE_SINK(sink), true);
>
> if(GST_IS_PIPELINE(pipeline)) {
>  GstPad *outpad = gst_bin_find_unlinked_pad(GST_BIN(pipeline),
> GST_PAD_SRC);
> g_assert(outpad);
>  GstElement *outelement = gst_pad_get_parent_element(outpad);
> g_assert(outelement);
>  gst_object_unref(outpad);
>
>
> if(!gst_bin_add(GST_BIN(pipeline), sink)) {
>  cout << "gst_bin_add() failed\n" << endl; // TODO: do some unref
> gst_object_unref(outelement);
>  gst_object_unref(pipeline);
> return -1;
> }
>
> if(!gst_element_link(outelement, sink)) {
> cout << "GStreamer: cannot link outelement(\"" <<
>  gst_element_get_name(outelement) << "\") -> sink\n" << endl;
> gst_object_unref(outelement);
>  gst_object_unref(pipeline);
> return -1;
> }
>
> gst_object_unref(outelement);
> } else {
> GstElement* launchpipe = pipeline;
>  pipeline = gst_pipeline_new(NULL);
> g_assert(pipeline);
>
> gst_object_unparent(GST_OBJECT(launchpipe));
>
> gst_bin_add_many(GST_BIN(pipeline), launchpipe, sink, NULL);
>
> if(!gst_element_link(launchpipe, sink)) {
>  cout << "GStreamer: cannot link launchpipe -> sink\n" << endl;
> gst_object_unref(pipeline);
>  return -1;
> }
> }
>
> gst_element_set_state(pipeline, GST_STATE_PAUSED);
>
> if (gst_element_get_state(pipeline, NULL, NULL, -1) ==
> GST_STATE_CHANGE_FAILURE) {
>  cout << "Failed to PAUSE." << endl;
> exit(-1);
> } else {
>  cout<< "stream is PAUSED." << endl;
> }
>
>  // We could probably do something with the camera name, check
> // errors or something, but at the moment, we don't care.
>  std::string camera_name;
> //TODO
> // if
> (camera_calibration_parsers::readCalibrationIni("../camera_parameters.txt",
> camera_name, camera_info)) {
>  // ROS_INFO("Successfully read camera calibration.  Rerun camera
> calibrator if it is incorrect.");
>  // }
> // else {
>  // ROS_ERROR("No camera_parameters.txt file found.  Use default file if
> no other is available.");
>  // }
>
> //TODO
>  bool preroll = false;
> if (preroll) {
> //The PAUSE, PLAY, PAUSE, PLAY cycle is to ensure proper pre-roll
>  //I am told this is needed and am erring on the side of caution.
> gst_element_set_state(pipeline, GST_STATE_PLAYING);
>
> if (gst_element_get_state(pipeline, NULL, NULL, -1) ==
> GST_STATE_CHANGE_FAILURE) {
> cout << "Failed to PLAY." << endl;
>  exit(-1);
> } else {
> cout << "stream is PLAYING." << endl;
>  }
>
> gst_element_set_state(pipeline, GST_STATE_PAUSED);
>
>  if (gst_element_get_state(pipeline, NULL, NULL, -1) ==
> GST_STATE_CHANGE_FAILURE) {
> cout << "Failed to PAUSE." << endl;
>  exit(-1);
> } else {
> cout<< "stream is PAUSED." << endl;
>  }
> }
>
> // TODO
>  // image_transport::ImageTransport it(nh);
> // image_transport::CameraPublisher pub =
> it.advertiseCamera("gscam/image_raw", 1);
>  //
> // ros::ServiceServer set_camera_info =
> nh.advertiseService("gscam/set_camera_info", setCameraInfo);
>
> std::cout << "Processing..." << std::endl;
>
> //processVideo
>  rosPad = false;
> gstreamerPad = true;
> gst_element_set_state(pipeline, GST_STATE_PLAYING);
> #ifdef DEBUG_FRAMERATE
> clock_gettime(CLOCK_REALTIME, &timestamp1);
> #endif
> while(1){
>  GstBuffer* buf = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
> if (!buf){
>
> return -1;
> }
>
>
> GstPad* pad = gst_element_get_static_pad(sink, "sink"); //TODO spostare
> sink
>  GstCaps *caps = gst_pad_get_negotiated_caps(pad);
> GstStructure *structure = gst_caps_get_structure(caps,0);
>  gst_structure_get_int(structure,"width",&width);
> gst_structure_get_int(structure,"height",&height);
>  gst_buffer_unref(buf);
> //printf("fatto\n");
> #ifdef DEBUG_FRAMERATE
>  frame_counter++;
> if(frame_counter%NUM_OF_FRAMES_TO_CATCH_TO_CALCULATE_FPS==0){
>  struct timespec timestamp2;
> clock_gettime(CLOCK_REALTIME, &timestamp2);
>  long int sec=timestamp2.tv_sec - timestamp1.tv_sec;
> long int nsec=timestamp2.tv_nsec - timestamp1.tv_nsec;
>  long milliseconds=(sec*1000000)+nsec/1000.0;
> printf("time to send one frame: %.02f
> ms\n",(float)milliseconds/(float)NUM_OF_FRAMES_TO_CATCH_TO_CALCULATE_FPS);
>  frame_counter=0;
> clock_gettime(CLOCK_REALTIME, &timestamp1);
> }
> #endif
>
>
> }
>
> return 0;
> }
>
> camConfigStream << "v4l2src device=" << cameraDevice; // /dev/video0
> camConfigStream << " always-copy=";
>  if(alwaysCopy){
> camConfigStream << "true";
> }else{
>  camConfigStream << "false";
> }
> camConfigStream << " ! capsfilter caps=\" ";
>
> camConfigStream << encoding; //"video/x-raw-rgb
> if(encoding.find("rgb") != string::npos){
>  camConfigStream << ", bpp=(int)" << bpp; //24
> camConfigStream << ", depth=(int)" << depth; //24
>  camConfigStream << ", red_mask=(int)" << redMask; //16711680
> camConfigStream << ", green_mask=(int)" << greenMask; //65280
>  camConfigStream << ", blue_mask=(int)" << blueMask; //255
> camConfigStream << ", endianness=(int)" << endianness; //4321
>  }else if(encoding.find("yuv") != string::npos){
> camConfigStream << ", format=" << formatSpecificEncoding; //YUY2
>  }
> camConfigStream << ", width=(int)" << width; //640
>  camConfigStream << ", height=(int)" << height; //480
> camConfigStream << ", interlaced=(boolean)";
>  if(interlaced){
> camConfigStream << "true";
> }else{
>  camConfigStream << "false";
> }
> camConfigStream << ", framerate=(fraction)" << framerate << "/1"; //15
>  camConfigStream << ", brightness=(int)" << brightness; //0
> camConfigStream << ", contrast=(int)" << contrast; //0
>  camConfigStream << ", saturation=(int)" << saturation; //0
> camConfigStream << ", hue=(int)" << hue; //0
>  camConfigStream << "\" ";
>
>
> gst_init(0,0);
>  cout<< "Gstreamer Version: " << gst_version_string() << endl;
>
> GError *error = 0; //assignment to zero is a gst requirement
>  pipeline = gst_parse_launch(camConfigStream.str().c_str(),&error);
> if (pipeline == NULL) {
>  cout << error->message << endl;
> exit(-1);
> }
>  sink = gst_element_factory_make("appsink",NULL);
> if(!sink){
> cout << "Sink creation failed" << endl;
>  }
> string capFormat = encoding;
> GstCaps * caps_alone = gst_caps_new_simple(capFormat.c_str(), NULL);
>  gst_app_sink_set_caps(GST_APP_SINK(sink), caps_alone);
> gst_caps_unref(caps_alone);
>
> gst_base_sink_set_sync(GST_BASE_SINK(sink), true);
>
> if(GST_IS_PIPELINE(pipeline)) {
>  GstPad *outpad = gst_bin_find_unlinked_pad(GST_BIN(pipeline),
> GST_PAD_SRC);
> g_assert(outpad);
>  GstElement *outelement = gst_pad_get_parent_element(outpad);
> g_assert(outelement);
>  gst_object_unref(outpad);
>
>
> if(!gst_bin_add(GST_BIN(pipeline), sink)) {
>  cout << "gst_bin_add() failed\n" << endl; // TODO: do some unref
> gst_object_unref(outelement);
>  gst_object_unref(pipeline);
> return -1;
> }
>
> if(!gst_element_link(outelement, sink)) {
> cout << "GStreamer: cannot link outelement(\"" <<
>  gst_element_get_name(outelement) << "\") -> sink\n" << endl;
> gst_object_unref(outelement);
>  gst_object_unref(pipeline);
> return -1;
> }
>
> gst_object_unref(outelement);
> } else {
> GstElement* launchpipe = pipeline;
>  pipeline = gst_pipeline_new(NULL);
> g_assert(pipeline);
>
> gst_object_unparent(GST_OBJECT(launchpipe));
>
> gst_bin_add_many(GST_BIN(pipeline), launchpipe, sink, NULL);
>
> if(!gst_element_link(launchpipe, sink)) {
>  cout << "GStreamer: cannot link launchpipe -> sink\n" << endl;
> gst_object_unref(pipeline);
>  return -1;
> }
> }
>
> gst_element_set_state(pipeline, GST_STATE_PAUSED);
>
> if (gst_element_get_state(pipeline, NULL, NULL, -1) ==
> GST_STATE_CHANGE_FAILURE) {
>  cout << "Failed to PAUSE." << endl;
> exit(-1);
> } else {
>  cout<< "stream is PAUSED." << endl;
> }
>
>  // We could probably do something with the camera name, check
> // errors or something, but at the moment, we don't care.
>  std::string camera_name;
> //TODO
> // if
> (camera_calibration_parsers::readCalibrationIni("../camera_parameters.txt",
> camera_name, camera_info)) {
>  // ROS_INFO("Successfully read camera calibration.  Rerun camera
> calibrator if it is incorrect.");
>  // }
> // else {
>  // ROS_ERROR("No camera_parameters.txt file found.  Use default file if
> no other is available.");
>  // }
>
> //TODO
>  bool preroll = false;
> if (preroll) {
> //The PAUSE, PLAY, PAUSE, PLAY cycle is to ensure proper pre-roll
>  //I am told this is needed and am erring on the side of caution.
> gst_element_set_state(pipeline, GST_STATE_PLAYING);
>
> if (gst_element_get_state(pipeline, NULL, NULL, -1) ==
> GST_STATE_CHANGE_FAILURE) {
> cout << "Failed to PLAY." << endl;
>  exit(-1);
> } else {
> cout << "stream is PLAYING." << endl;
>  }
>
> gst_element_set_state(pipeline, GST_STATE_PAUSED);
>
>  if (gst_element_get_state(pipeline, NULL, NULL, -1) ==
> GST_STATE_CHANGE_FAILURE) {
> cout << "Failed to PAUSE." << endl;
>  exit(-1);
> } else {
> cout<< "stream is PAUSED." << endl;
>  }
> }
>
> // TODO
>  // image_transport::ImageTransport it(nh);
> // image_transport::CameraPublisher pub =
> it.advertiseCamera("gscam/image_raw", 1);
>  //
> // ros::ServiceServer set_camera_info =
> nh.advertiseService("gscam/set_camera_info", setCameraInfo);
>
> std::cout << "Processing..." << std::endl;
>
> //processVideo
>  rosPad = false;
> gstreamerPad = true;
> gst_element_set_state(pipeline, GST_STATE_PLAYING);
> #ifdef DEBUG_FRAMERATE
> clock_gettime(CLOCK_REALTIME, &timestamp1);
> #endif
> while(1){
>  GstBuffer* buf = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
> if (!buf){
>
> return -1;
> }
>
>
> GstPad* pad = gst_element_get_static_pad(sink, "sink"); //TODO spostare
> sink
>  GstCaps *caps = gst_pad_get_negotiated_caps(pad);
> GstStructure *structure = gst_caps_get_structure(caps,0);
>  gst_structure_get_int(structure,"width",&width);
> gst_structure_get_int(structure,"height",&height);
>  gst_buffer_unref(buf);
> //printf("fatto\n");
> #ifdef DEBUG_FRAMERATE
>  frame_counter++;
> if(frame_counter%NUM_OF_FRAMES_TO_CATCH_TO_CALCULATE_FPS==0){
>  struct timespec timestamp2;
> clock_gettime(CLOCK_REALTIME, &timestamp2);
>  long int sec=timestamp2.tv_sec - timestamp1.tv_sec;
> long int nsec=timestamp2.tv_nsec - timestamp1.tv_nsec;
>  long milliseconds=(sec*1000000)+nsec/1000.0;
> printf("time to send one frame: %.02f
> ms\n",(float)milliseconds/(float)NUM_OF_FRAMES_TO_CATCH_TO_CALCULATE_FPS);
>  frame_counter=0;
> clock_gettime(CLOCK_REALTIME, &timestamp1);
> }
> #endif
>
>
> }
>
> return 0;
> }
>
>
>
> _______________________________________________
> gstreamer-devel mailing list
> gstreamer-devel at lists.freedesktop.org
> http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
>
>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.freedesktop.org/archives/gstreamer-devel/attachments/20120305/13498b86/attachment-0001.htm>


More information about the gstreamer-devel mailing list