Re: cmd pipeline works and program pipeline doesn´t

bertl rzillner at gmx.at
Wed Jul 24 13:03:40 PDT 2013


The following cmd pipeline works with hw acceleration:

gst-launch-0.10 v4l2src ! image/jpeg,with=1920,height=1080,framerate=30/1 !
jpegparse ! ducatijpegdec ! ffmpegcolorspace ! autovideosink


this is an example i´m using to make hw acceleration working in my code.


/*
 * VideoSource.cpp
 *
 *  Created on: 03.07.2013
 *      Author: robert
 */

#include "VideoSource.h"
#include <iostream>
#include <string>
#include "stdio.h"
#include <unistd.h>


#include <glib.h>
#include "gst/gst.h"
#include "gst/gstbin.h"
#include "gst/gstutils.h"
#include <gst/app/gstappsrc.h>
#include <gst/app/gstappsink.h>
#include <gst/app/gstappbuffer.h>
#include <gst/video/video.h>

#include "opencv2/opencv.hpp"
#include "opencv2/highgui/highgui.hpp"


using namespace cv;


using namespace std;


/*!
 * NOTE: The value of the source video file can also be set in the
settings.cfg
 * file as shown below.  The default video source file name is defined in
the
 * constant GST_DEFAULT_VIDEO_SOURCE_FILE_NAME.
 * ...
 * GStreamerVideoFilename={the full path of your video source file}
 * ...
 */

static const int GST_MAX_WIDTH = 800;
static const int GST_MAX_HEIGHT = 600;



static double mFrameNumber;



VideoSource::VideoSource(string device) {


	cvNamedWindow( "Left Cam", CV_WINDOW_AUTOSIZE );

    gchar *pipelineString = NULL;
    mFrameNumber = 0;
    mRgbVideoSink = NULL;

    /*!
     * Retrieve the name of the video source file from settings.cfg.
     */
    //string videoSourceFile =
GVars3::GV2.GetString("GStreamerVideoFilename",
						//						   GST_DEFAULT_VIDEO_SOURCE_FILE_NAME);

	/*!
	 * Initialize the gstreamer library, without the command-line parms.
	 */
    gst_init(NULL, NULL);

    /*!
     * Setup the source pipeline to read from a file, automatically select
the proper decoder,
     * convert each frame to two new color spaces (specified later), scale
the video (specified later),
     * filter capabilities to RGB and Gray level at 640x480 (used by the
previous color space and scale filters),
     * and finally send the result to separate sink filters which will allow
access to the resulting
     * RGB and Gray level video data (limited to 2 buffers).
     */
    /*pipelineString =
    	g_strdup_printf(
    					"v4l2src device=\"%s\" ! "
    					"image/jpeg,width=%d,height=%d ! "
    					"ducatijpegdec ! "
    					"ffmpegcolorspace ! "
    					"appsink name=rgbvideo max-buffers=2 drop=false",
    					device.c_str(), GST_MAX_WIDTH, GST_MAX_HEIGHT);*/

    pipelineString =
	g_strdup_printf(
					"v4l2src device=\"%s\" ! "
					"image/jpeg,width=%d,height=%d ! "
					"ducatijpegdec ! "
					"ffmpegcolorspace ! "
					"video/x-raw-rgb,width=%d,height=%d ! "
					"appsink name=rgbvideo max-buffers=2 drop=false",
					device.c_str(), GST_MAX_WIDTH, GST_MAX_HEIGHT,GST_MAX_WIDTH,
GST_MAX_HEIGHT);
    g_print("gstreamer pipeline:\n%s\n", pipelineString);
    mSourcePipeline = gst_parse_launch(pipelineString, NULL);
    g_free(pipelineString);

    if (mSourcePipeline == NULL)
    {
        g_print("An error occurred when attempting to create the video
source pipeline.\n");
        return;
    }
    /*!
     * Obtain a reference to the appsink element in the pipeline for later
use when pulling a buffer.
     */
    mRgbVideoSink = gst_bin_get_by_name(GST_BIN(mSourcePipeline),
"rgbvideo");
    //mGrayVideoSink = gst_bin_get_by_name(GST_BIN(mSourcePipeline),
"grayvideo");

    if (mRgbVideoSink == NULL)// || mGrayVideoSink == NULL)
    {
        g_print("The video sink filters could not be created.\n");
        gst_object_unref(mSourcePipeline);
        mSourcePipeline = NULL;
        return;
    }
    /*!
     * Activate the video source pipeline, so it is ready when we request a
buffer.
     */
    gst_element_set_state(mSourcePipeline, GST_STATE_PLAYING);




}

VideoSource::~VideoSource() {

	if (mSourcePipeline != NULL)
	    {
	        gst_element_set_state(mSourcePipeline, GST_STATE_NULL);
	        gst_object_unref(mSourcePipeline);
	        mSourcePipeline = NULL;
	    }


}

bool VideoSource::grabFrame()
{

    if(!mSourcePipeline)
        return false;

    if(gst_app_sink_is_eos(GST_APP_SINK_CAST(mRgbVideoSink)))
        return false;

    if(buffer)
        gst_buffer_unref(buffer);


    buffer = gst_app_sink_pull_buffer(GST_APP_SINK_CAST(mRgbVideoSink));
    if(!buffer)
        return false;

    return true;
}

//
// decode buffer
//
IplImage * VideoSource::retrieveFrame(int)
{
    if(!buffer)
        return 0;

    if(!frame) {
        gint height, width;
        GstCaps *buff_caps = gst_buffer_get_caps(buffer);
        assert(gst_caps_get_size(buff_caps) == 1);
        GstStructure* structure = gst_caps_get_structure(buff_caps, 0);

        if(!gst_structure_get_int(structure, "width", &width) ||
           !gst_structure_get_int(structure, "height", &height))
            return 0;


        frame = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U,3);
        gst_caps_unref(buff_caps);
    }


    cvSetData(frame,(char *)GST_BUFFER_DATA(buffer),frame->widthStep);
    // no need to memcpy, just use gstreamer's buffer :-)
    //frame->imageData = (char *)GST_BUFFER_DATA(buffer);
    //memcpy (frame->imageData, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE
(buffer));
    //gst_buffer_unref(buffer);
    //buffer = 0;
    return frame;
}





/*
void VideoSource::GetAndFillFrameBWandRGB(IplImage * picture)
{





	 IplImage *frame_temp=NULL;// = frame
	GstBuffer* buf=NULL;

	 if (mRgbVideoSink != NULL)
	  {
	    buf = gst_app_sink_pull_buffer((GstAppSink*)mRgbVideoSink);

	      if(buf != NULL)
			{
				frame_temp->imageData = (char*) GST_BUFFER_DATA(buf);
				cvShowImage("Left Cam",frame_temp);

				//cvConvertImage(frame_temp, frame_temp, CV_CVTIMG_SWAP_RB);
				//ProcessFrame(frame_temp);
				gst_buffer_unref(buf);
			}

	  }







	GstBuffer *rgbVideoBuffer = NULL;


     // Be sure the video sinks are available before requesting a buffer
with video
     // data.

    if (mRgbVideoSink != NULL)// && mGrayVideoSink != NULL)
    {
        //rgbVideoBuffer =
gst_app_sink_pull_buffer(GST_APP_SINK(mRgbVideoSink));
        rgbVideoBuffer =
gst_app_sink_pull_buffer(GST_APP_SINK_CAST(mRgbVideoSink));


         // If either of the buffers are NULL then assume that we have
reached the end of
         // the video stream and just return.

        if (rgbVideoBuffer != NULL)// && grayVideoBuffer != NULL)
        {
            //BasicImage<Rgb<byte> > rgbVideoFrame((Rgb<byte>
*)rgbVideoBuffer->data, mFrameSize);

             //Copy the streamed image into caller's params.
            //imRGB.copy_from(rgbVideoFrame);

        	picture->imageData = (char*)GST_BUFFER_DATA(rgbVideoBuffer);


             //Release the gst buffer since it is already copied to the
caller.
            gst_buffer_unref(rgbVideoBuffer);

          //   Maintain the running total of frames.
            mFrameNumber++;
        }
    }
}*/



int main() {


	IplImage *frame1;

	VideoSource *vid= new VideoSource("/dev/video1");



while(1){

	if(vid->grabFrame()){
      frame1=vid->retrieveFrame(1);

      //cvCvtColor(frame1,frame1,CV_BGR2RGB);
      cvShowImage("Left Cam",frame1);
      cvWaitKey(20);
	}




    //sleep(1);
	//cvShowImage("Left Cam",frame);

}




--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/cmd-pipeline-works-and-program-pipeline-doesn-t-tp4661101p4661237.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.


More information about the gstreamer-devel mailing list