gstreamer - struggling to get streaming for different decoder and depay formats at run time based on user selection.

Sankareswari sankareswari at e-consystems.com
Sat Jul 9 09:27:17 UTC 2016


Hi,

Actually I am trying to get streaming buffer from wificamera. I am using
gstreamer pipeline which is shown in code. Here decoder and depay fields are
decided based on format change by user. Here while launching application, I
am doing "initGstreamer", "setRtspStreamPipeline", "startPlayStream". When
user change the format, I just call "stopPlayStream",
"closeGStreamerPipeline", "setRtspStreamPipeline", "startPlayStream". While
user quit application,  I just call "stopPlayStream",
"closeGStreamerPipeline", "deinitGstreamer".
While launching application, I am able to get preview from camera with
supported format. When user change the format, I am not able to get frames
for changed format. But If I close the application and relaunch again, I am
able to get preview for user changed format.

What I am doing wrong? Please help me.

Here is my code.


gstreamRtsp.cpp
--------------------

#include "gstreamRtsp.h"
#include <QDebug>

QImage GStreamRTSP::image;

GStreamRTSP::GStreamRTSP(QQuickPaintedItem *parent) :
                         QQuickPaintedItem(parent){
    setFlag(QQuickPaintedItem::ItemHasContents, true);
}

GStreamRTSP::~GStreamRTSP(){


}


void GStreamRTSP::on_pad_added (GstElement *element, GstPad *pad, gpointer
data){

  GstPad *sinkpad;
  GstElement *decoder = (GstElement *) data;

  /* We can now link this pad with the rtsp-decoder sink pad */
  qDebug()<<"Dynamic pad created, linking source/demuxer\n";
  qDebug()<<"Received new pad '%s' from '%s':\n"<< GST_PAD_NAME (pad)<<
GST_ELEMENT_NAME (element);

  sinkpad = gst_element_get_static_pad (decoder, "sink");
  if (gst_pad_link (pad, sinkpad) != GST_PAD_LINK_OK) {
        g_printerr ("Failed to link \n");
        gst_object_unref (sinkpad);
  }
}

void GStreamRTSP::initGstreamer(){
    gst_init(NULL, NULL);
}

/**
 * @brief GStreamRTSP::setRtspStreamPipeline - setting streaming pipeline
 * @return true/false
 */
bool GStreamRTSP::setRtspStreamPipeline(QString streamFormat){

    if(gst_is_initialized()){
        qDebug()<<"gst initialization is done";
    }

    qDebug()<<"set rtsp stream";
    qDebug()<<"streamFormat"<<streamFormat;
    videoStreamPipeline.pipelineName = gst_pipeline_new("Video Stream
Application");
    if(!videoStreamPipeline.pipelineName){
        qDebug()<<" Creating pipeline is failed";
        return false;
    }
    gst_element_set_state (videoStreamPipeline.pipelineName,
GST_STATE_NULL);
    videoStreamPipeline.videoSource =
gst_element_factory_make("rtspsrc", "source");
    if(!videoStreamPipeline.videoSource){
        qDebug()<<"setting video source in pipleline is
failed";
        return false;
    }
    if(streamFormat == "h264"){
        qDebug()<<"h264 foramt";
            videoStreamPipeline.depay =
gst_element_factory_make("rtph264depay", "Depay");
            if(!videoStreamPipeline.depay){
                qDebug()<<"setting depay in pipleline is
failed";
                return false;
            }
            videoStreamPipeline.decoder =
gst_element_factory_make("ffdec_h264", "Decoder");
            if(!videoStreamPipeline.decoder){
                qDebug()<<"setting decoder in pipleline is
failed";
                return false;
            }
    }
    else if(streamFormat == "mpeg4"){
        qDebug()<<"mpeg4 foramt";
        videoStreamPipeline.depay =
gst_element_factory_make("rtpmp4vdepay", "Depay");
        if(!videoStreamPipeline.depay){
            qDebug()<<"setting depay in pipleline is
failed";
            return false;
        }

        videoStreamPipeline.decoder =
gst_element_factory_make("ffdec_mpeg4", "Decoder");
        if(!videoStreamPipeline.decoder){
            qDebug()<<"setting decoder in pipleline is
failed";
            return false;
        }
    }
    else if(streamFormat == "mjpeg"){
        qDebug()<<"mjpeg foramt";
        videoStreamPipeline.depay =
gst_element_factory_make("rtpjpegdepay", "Depay");
        if(!videoStreamPipeline.depay){
            qDebug()<<"setting depay in pipleline is
failed";
            return false;
        }
        videoStreamPipeline.decoder =
gst_element_factory_make("ffdec_mjpeg", "Decoder");
        if(!videoStreamPipeline.decoder){
            qDebug()<<"setting decoder in pipleline is
failed";
            return false;
        }
    }

    videoStreamPipeline.colorSpace =
gst_element_factory_make("ffmpegcolorspace",
"Colorspace");
    if(!videoStreamPipeline.colorSpace){
        qDebug()<<"setting colorspace in pipleline is
failed";
        return false;
    }

    videoStreamPipeline.appSink = gst_element_factory_make
("appsink", "appSink");
    if(!videoStreamPipeline.appSink){
         qDebug()<<"setting video sink in pipleline is
failed";
         return false;
    }

    GstCaps *caps;
    caps = gst_caps_new_simple("video/x-raw-rgb", NULL);
    gst_app_sink_set_caps(GST_APP_SINK(videoStreamPipeline.appSink), caps);
    gst_caps_unref(caps);

   // setting video source location
    g_object_set (G_OBJECT (videoStreamPipeline.videoSource),
"location", "rtsp_url", NULL);
    g_object_set (G_OBJECT (videoStreamPipeline.videoSource),
"latency", 200, NULL);

    // app sink settings
    gst_app_sink_set_emit_signals((GstAppSink*)videoStreamPipeline.appSink,
true);
    gst_app_sink_set_drop((GstAppSink*)videoStreamPipeline.appSink, true);
    gst_app_sink_set_max_buffers((GstAppSink*)videoStreamPipeline.appSink,
1);


    videoStreamPipeline.binCapture = gst_bin_new("bin_capture");

    if(g_signal_connect(videoStreamPipeline.appSink, "new-buffer",
G_CALLBACK(GStreamRTSP::captureGstBuffer), this) <= 0)
    {
        g_printerr("Could not connect signal handler.\n");
        qDebug()<<"Could not connect signal handler.\n";
        return false;
    }

    /* Add Elements to the Bin */
    gst_bin_add_many (GST_BIN (videoStreamPipeline.binCapture),
videoStreamPipeline.videoSource, videoStreamPipeline.colorSpace, \
                      videoStreamPipeline.depay,
videoStreamPipeline.decoder, videoStreamPipeline.appSink, NULL);

    /* Link confirmation */
    if (!gst_element_link_many (videoStreamPipeline.depay,
videoStreamPipeline.decoder, videoStreamPipeline.colorSpace, \
                                videoStreamPipeline.appSink, NULL)){
        g_warning ("Linking pipeleine elements failed...");
    }

    // add bin capture to pipe
    if((gst_bin_add(GST_BIN (videoStreamPipeline.pipelineName),
videoStreamPipeline.binCapture)) != TRUE)
    {
        g_print("bin capture not added to pipeline\n");
    }

    if(gst_element_set_state (videoStreamPipeline.pipelineName,
GST_STATE_NULL) == GST_STATE_CHANGE_SUCCESS)
    {
        qDebug()<<"set pipeline state as NULL";
    }else
    {
        qDebug()<<"set pipeline state is failed";
    }


    /* Dynamic Pad Creation */
    if(! g_signal_connect (videoStreamPipeline.videoSource,
"pad-added", G_CALLBACK (on_pad_added),
videoStreamPipeline.depay))
    {
        g_warning ("dynamic pad creation registering callback
failed");
        return false;
    }
}

/**
 * @brief GStreamRTSP::paint - paint the image in qml
 * @param painter
 */
void GStreamRTSP::paint(QPainter *painter){
   painter->drawImage(0, 0, image);
}

/**
 * @brief GStreamRTSP::updateImage - request to repaint the image
 */
void GStreamRTSP::updateImage(){
    update();
}

/**
 * @brief GStreamRTSP::captureGstBuffer - Capture the frame and update into
qml
 * @param sink
 * @param grtsp
 */
void GStreamRTSP::captureGstBuffer(GstAppSink *sink, GStreamRTSP &grtsp){
    gint width, height;
    GstBuffer* m_buffer = gst_app_sink_pull_buffer(sink);
    if(m_buffer){
        qDebug()<<"frames available================";
         //Get the image width and height
        GstPad* pad =
gst_element_get_static_pad(grtsp.videoStreamPipeline.appSink, "sink");
        const GstCaps *caps = gst_pad_get_negotiated_caps(pad);
        GstStructure *structure = gst_caps_get_structure(caps, 0);
        gst_structure_get_int(structure,"width", &width);
        gst_structure_get_int(structure,"height",&height);

        // update the window resolution
        emit grtsp.getPreviewSize((int)width, (int)height);

        // getting frame rate
        const GValue *pfps;
        if ((pfps = gst_structure_get_value (structure, "framerate")) !=
NULL) {
            if (GST_VALUE_HOLDS_FRACTION (pfps)) {
                gchar *fps_string;
                fps_string = gst_value_serialize (pfps);
               // qDebug()<<"Framerate:"<<fps_string;   // framerate - for
ex: 60/1
                // getting frames per second
                QString frameRateQString = fps_string;
                QString fps = frameRateQString.split('/').at(0);  // fps -
for ex: 60
                //qDebug()<<"fps:"<<fps;
                //grtsp.getFrameRate(fps.toInt());
                g_free (fps_string);

            }else{
                g_warning("gstvideo: framerate property of pad %s:%s is
not of type Fraction",
                 GST_DEBUG_PAD_NAME (pad));
            }
       }
       else{
            g_warning ("gstvideo: failed to get framerate property of
pad %s:%s",
                         GST_DEBUG_PAD_NAME (pad));
       }
        // update image in qml
        QImage img(const_cast<const unsigned
char*>(GST_BUFFER_DATA(m_buffer)), (int)width, (int)height,
QImage::Format_RGB888);
        image = img;
        grtsp.updateImage();
        gst_buffer_unref(m_buffer);
    }
}


/**
 * @brief GStreamRTSP::startPlayStream - Start the streaming
 * @return true/false
 */
bool GStreamRTSP::startPlayStream(){
    qDebug()<<"start play stream";
    if(GST_STATE_CHANGE_FAILURE ==
gst_element_set_state(videoStreamPipeline.pipelineName, GST_STATE_PLAYING)){
        qDebug()<<" Error in setting state to play stream";
        return false;
    }
    else{
        qDebug()<<"Stream is playing";
    }

    return true;
}

/**
 * @brief GStreamRTSP::stopPlayStream - Stop the streaming
 * @return true/false
 */
bool GStreamRTSP::stopPlayStream(){
    if(GST_STATE_CHANGE_FAILURE ==
gst_element_set_state(videoStreamPipeline.pipelineName, GST_STATE_PAUSED)) {
        qDebug()<<"Error in setting state to pause stream";
        return false;
    }
    else{
        qDebug()<<"Stream is paused";
    }

    return true;
}

/**
 * @brief GStreamRTSP::closeGStreamerPipeline - close pipeline and gstreamer
deinit
 */
void GStreamRTSP::closeGStreamerPipeline(){
    if(GST_STATE_CHANGE_FAILURE ==
gst_element_set_state(videoStreamPipeline.pipelineName, GST_STATE_NULL)) {
        qDebug()<<"Error in setting state to NULL stream";
    }
    else{
        qDebug()<<"Stream is NULL";
    }

   gst_element_set_state (videoStreamPipeline.binCapture, GST_STATE_NULL);
   if((gst_bin_remove(GST_BIN (videoStreamPipeline.pipelineName),
videoStreamPipeline.binCapture)) != TRUE)
   {
       qDebug()<<"bin_capture not removed from pipeline\n";
   }

    if(videoStreamPipeline.pipelineName){
        gst_object_unref (GST_OBJECT (videoStreamPipeline.pipelineName));
    }
}


void GStreamRTSP::deinitGstreamer(){
    if(gst_is_initialized()){
        qDebug()<<"deinit gstreamer";
        gst_deinit();
    }
}



Thanks,
Sankareswari D



--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/gstreamer-struggling-to-get-streaming-for-different-decoder-and-depay-formats-at-run-time-based-on-u-tp4678566.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.


More information about the gstreamer-devel mailing list