Read frames from GStreamer pipeline in opencv (cv::Mat)
pchaurasia
pchaurasia at gameloreinc.com
Thu Jun 1 10:40:45 UTC 2017
Hi Tim
I tried -
1. Removing NVMM
2. Trying it out with videotestsrc (upon suggestion from Martin).
However with both #1, and #2 the problem still persists. I feel there is
something fundamentally wrong, either in my code below or in one of nvidia
plugins.
Thanks
#include "opencv2/objdetect/objdetect.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv/cv.h"
#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <gst/gstelement.h>
#include <gst/video/video.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <pthread.h>
using namespace cv;
GstSample* buffer;
cv::Mat frame;
GstVideoInfo vinfo;
int sampleno = 0;
GstFlowReturn CaptureGstBuffer(GstAppSink *sink, gpointer user_data)
{
//prog_data* pd = (prog_data*)user_data;
GstSample* sample = gst_app_sink_pull_sample(sink);
if(sample == NULL) {
return GST_FLOW_ERROR;
}
GstBuffer* buffer = gst_sample_get_buffer(sample);
GstMapInfo map_info;
if (!gst_buffer_map ((buffer), &map_info, GST_MAP_READ)) {
gst_buffer_unmap ((buffer), &map_info);
gst_sample_unref(sample);
return GST_FLOW_ERROR;
}
//render using map_info.data
// frame = Mat::zeros(1080, 1920, CV_8UC3);
// frame = cv::Mat(1080, 1920, CV_8UC3, (char *)map_info.data,
cv::Mat::AUTO_STEP);
//memcpy(frame.data,map_info.data,map_info.size);
//Mat grey;
//cvtColor(frame, grey, CV_BGR2GRAY);
//if (!frame.empty())
// imshow("test-gstreamer-video",grey);
// waitKey(1);
// GstVideoFrame vframe;
// if (gst_video_frame_map (&vframe, &vinfo, buffer, GST_MAP_READ)) {
// fprintf(stderr,"I am able to map vframe\n");
// gst_video_frame_unmap (&vframe);
// }
fprintf(stderr,"Got sample no %d %d\n",sampleno++,(int)map_info.size);
gst_buffer_unmap ((buffer), &map_info);
//gst_memory_unmap(memory, &map_info);
//gst_memory_unref(memory);
gst_sample_unref(sample);
return GST_FLOW_OK;
}
int main(int argc, char *argv[]) {
GstElement *pipeline, *source, *convert, *sink, *capssrc;
GstBus *bus;
GstCaps *filtercaps, *srcfiltercaps;
GstElement *tee, *vq1;
GstMessage *msg;
GstStateChangeReturn ret;
GstPad *srcpad,*sinkpad;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create the elements */
source = gst_element_factory_make ("videostestsrc", "source");
sink = gst_element_factory_make ("ximagesink", "sink");
convert = gst_element_factory_make ("nvvidconv","videoconvert");
/* Create the empty pipeline */
pipeline = gst_pipeline_new ("test-pipeline");
if (!pipeline || !source || !sink || !convert ) {
g_printerr ("Not all elements could be created.\n");
return -1;
}
capssrc = gst_element_factory_make ("capsfilter", "filter1");
g_assert (capssrc != NULL); /* should always exist */
srcfiltercaps = gst_caps_from_string("video/x-raw, width=(int)1920,
height=(int)1080, format=(string)I420");
g_object_set (G_OBJECT (capssrc), "caps-src", srcfiltercaps, NULL);
//gst_video_info_init(&vinfo);
//if (!gst_video_info_from_caps(&vinfo,filtercaps)){
// g_printerr ("Unable to find video info from caps\n");
// return -1;
//}
//gst_caps_unref (filtercaps);
gst_caps_unref (srcfiltercaps);
/* Modify the source's properties */
//g_object_set (source, "pattern", 0, NULL);
g_object_set (sink, "drop" , TRUE, NULL);
g_object_set (sink, "new_sample" , FALSE, NULL);
g_object_set (sink, "max-buffers" , 1, NULL);
GstAppSinkCallbacks* appsink_callbacks =
(GstAppSinkCallbacks*)malloc(sizeof(GstAppSinkCallbacks));
appsink_callbacks->eos = NULL;
appsink_callbacks->new_preroll = NULL;
appsink_callbacks->new_sample = CaptureGstBuffer;
gst_app_sink_set_callbacks(GST_APP_SINK(sink), appsink_callbacks,
(gpointer)NULL, free);
gst_app_sink_set_emit_signals((GstAppSink*)sink,false);
/* Build the pipeline */
gst_bin_add_many (GST_BIN (pipeline), source, capssrc, convert, sink,
NULL);
if (gst_element_link_many (source,capssrc,sink, NULL) != TRUE) {
g_printerr ("Elements could not be linked1.\n");
gst_object_unref (pipeline);
return -1;
}
#if 1
/* Start playing */
ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr ("Unable to set the pipeline to the playing state.\n");
gst_object_unref (pipeline);
return -1;
}
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
(GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error (msg, &err, &debug_info);
g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME
(msg->src), err->message);
g_printerr ("Debugging information: %s\n", debug_info ? debug_info :
"none");
g_clear_error (&err);
g_free (debug_info);
break;
case GST_MESSAGE_EOS:
g_print ("End-Of-Stream reached.\n");
break;
default:
/* We should not reach here because we only asked for ERRORs and EOS
*/
g_printerr ("Unexpected message received.\n");
break;
}
gst_message_unref (msg);
}
/* Free resources */
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
#endif
return 0;
}
--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/Read-frames-from-GStreamer-pipeline-in-opencv-cv-Mat-tp4683139p4683175.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
More information about the gstreamer-devel
mailing list