<p dir="ltr">If you have OpenCV, it will be easy.</p>
<p dir="ltr">Zheng Xing<br>
Ph.D Student<br>
Hanlon Financial Systems Lab<br>
Stevens Institute of Technology</p>
<div class="gmail_quote">On Jun 28, 2015 1:22 PM, "yannick inizan" <<a href="mailto:inizan.yannick@gmail.com">inizan.yannick@gmail.com</a>> wrote:<br type="attribution"><blockquote class="gmail_quote" style="margin:0 0 0 .8ex;border-left:1px #ccc solid;padding-left:1ex"><div dir="ltr"><div class="gmail_extra">I use playbin element and emit "convert-sample" signal. raw data is converted with GdkPixbuf<br>here an example (in Vala) : <a href="http://pastebin.com/Ex83wP6m" target="_blank">http://pastebin.com/Ex83wP6m</a></div><div class="gmail_extra"><br><div class="gmail_quote">2015-06-28 16:49 GMT+02:00 Faran <span dir="ltr"><<a href="mailto:m.faran.majeed@gmail.com" target="_blank">m.faran.majeed@gmail.com</a>></span>:<br><blockquote class="gmail_quote" style="margin:0px 0px 0px 0.8ex;border-left-width:1px;border-left-color:rgb(204,204,204);border-left-style:solid;padding-left:1ex">I've written the following code to extract video frames but now am stuck.<br>
Could anyone please help???<br>
<br>
#include <gst/gst.h><br>
#include <gst/app/gstappsrc.h><br>
#include <gst/app/gstappsink.h><br>
#include <glib.h><br>
<br>
static gboolean<br>
bus_call (GstBus *bus,<br>
GstMessage *msg,<br>
gpointer data)<br>
{<br>
GMainLoop *loop = (GMainLoop *) data;<br>
<br>
switch (GST_MESSAGE_TYPE (msg)) {<br>
<br>
case GST_MESSAGE_EOS:<br>
g_print ("End of stream\n");<br>
g_main_loop_quit (loop);<br>
break;<br>
<br>
case GST_MESSAGE_ERROR: {<br>
gchar *debug;<br>
GError *error;<br>
<br>
gst_message_parse_error (msg, &error, &debug);<br>
g_free (debug);<br>
<br>
g_printerr ("Error: %s\n", error->message);<br>
g_error_free (error);<br>
<br>
g_main_loop_quit (loop);<br>
break;<br>
}<br>
default:<br>
break;<br>
}<br>
<br>
return TRUE;<br>
}<br>
static void sink_pad_add(GstElement *src, GstPad *pad, gpointer data)<br>
{<br>
g_print("\n This method is called when sink is padded \n");<br>
}<br>
<br>
static void<br>
on_pad_added (GstElement *src, GstPad *new_pad, gpointer data)<br>
{<br>
GstPad *sink_pad_audio, *sink_pad_video;<br>
GstElement *decoder = (GstElement *) data;<br>
<br>
/* We can now link this pad with the vorbis-decoder sink pad */<br>
g_print ("Dynamic pad created, linking demuxer/decoder\n");<br>
<br>
sink_pad_audio = gst_element_get_static_pad (decoder, "sink");<br>
//sink_pad_video = gst_element_get_static_pad (decoder, "sink");<br>
<br>
gst_pad_link (new_pad, sink_pad_audio);<br>
gst_object_unref (sink_pad_audio);<br>
}<br>
static void image_processing()<br>
{<br>
g_print("image processing method is called");<br>
}<br>
<br>
<br>
int<br>
main (int argc,<br>
char *argv[])<br>
{<br>
GMainLoop *loop;<br>
<br>
GstElement *pipeline, *source, *demuxer, *decoder, *conv, *sink;<br>
GstElement *video_decoder, *video_conv, *video_sink;<br>
GstBus *bus;<br>
guint bus_watch_id;<br>
GstElementFactory *factory;<br>
/* Initialisation */<br>
gst_init (&argc, &argv);<br>
<br>
loop = g_main_loop_new (NULL, FALSE);<br>
<br>
<br>
/* Check input arguments */<br>
if (argc != 2) {<br>
g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);<br>
return -1;<br>
}<br>
<br>
<br>
/* Create gstreamer elements */<br>
pipeline = gst_pipeline_new ("audio-player");<br>
source = gst_element_factory_make ("filesrc", "source");<br>
<br>
demuxer = gst_element_factory_make ("oggdemux", "ogg-demuxer");<br>
video_decoder = gst_element_factory_make ("theoradec","video_decoder");<br>
video_conv = gst_element_factory_make<br>
("videoconvert","video_convert");<br>
video_sink = gst_element_factory_make ("autovideosink","sink");<br>
if(!demuxer){g_print("qtdemuxer is not created");}<br>
<br>
if(!video_conv){g_print("video converter is not created");}<br>
<br>
<br>
if (!pipeline || !source || !demuxer || !decoder || !conv || !sink ||<br>
!video_decoder || !video_conv || !video_sink) {<br>
g_printerr ("One element could not be created. Exiting.\n");<br>
return -1;<br>
}<br>
<br>
/* Set up the pipeline */<br>
<br>
/* we set the input filename to the source element */<br>
g_object_set (G_OBJECT (source), "location", argv[1], NULL);<br>
<br>
/* we add a message handler */<br>
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));<br>
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);<br>
gst_object_unref (bus);<br>
<br>
/* we add all elements into the pipeline */<br>
/* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output */<br>
//gst_bin_add_many (GST_BIN (pipeline), source, demuxer, decoder, conv,<br>
sink /*,video_decoder, video_conv, video_sink*/, NULL);<br>
<br>
gst_bin_add_many (GST_BIN (pipeline), source, demuxer, video_decoder,<br>
video_conv, video_sink, NULL);<br>
<br>
<br>
/* we link the elements together */<br>
/* file-source -> ogg-demuxer ~> vorbis-decoder -> converter -><br>
alsa-output */<br>
gst_element_link (source, demuxer);<br>
<br>
//gst_element_link_many (decoder, conv, sink, NULL); for .ogg audio<br>
gst_element_link_many (video_decoder, video_conv, video_sink, NULL); //<br>
for .ogg video<br>
/*if(!gst_element_link_many (video_conv, video_sink, NULL))<br>
{<br>
g_print("One of the element is not linking");<br>
}*/<br>
g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added),<br>
video_decoder);<br>
g_signal_connect (video_conv, "pad-added", G_CALLBACK(sink_pad_add),<br>
video_sink);<br>
g_object_connect (video_conv, "pad-added", G_CALLBACK(sink_pad_add),<br>
video_sink);<br>
/* note that the demuxer will be linked to the decoder dynamically.<br>
The reason is that Ogg may contain various streams (for example<br>
audio and video). The source pad(s) will be created at run time,<br>
by the demuxer when it detects the amount and nature of streams.<br>
Therefore we connect a callback function which will be executed<br>
when the "pad-added" is emitted.*/<br>
<br>
<br>
/* Set the pipeline to "playing" state*/<br>
g_print ("Now playing: %s\n", argv[1]);<br>
gst_element_set_state (pipeline, GST_STATE_PLAYING);<br>
<br>
/* Iterate */<br>
g_print ("Running...\n");<br>
gchar *name;<br>
g_object_get(G_OBJECT (video_sink), "name", &name, NULL);<br>
factory = gst_element_factory_find ("autovideosink");<br>
g_print("The name of video_sink is %s \n", name);<br>
g_print ("The '%s' element is a member of the category %s.\n"<br>
"Description: %s\n",<br>
gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)),<br>
gst_element_factory_get_metadata (factory,<br>
GST_ELEMENT_METADATA_KLASS),<br>
gst_element_factory_get_metadata (factory,<br>
GST_ELEMENT_METADATA_DESCRIPTION));<br>
<br>
GstPad *pad = gst_element_get_static_pad(video_sink, name);<br>
//name = gst_pad_get_name (pad);<br>
g_print ("A new pad %s was created\n", name);<br>
g_free (name);<br>
GstCaps *c = gst_pad_get_pad_template_caps(pad);<br>
g_print("Th structure is %c", c);<br>
GstSample *sample;<br>
g_signal_emit_by_name (video_sink, "pull-sample", sample);<br>
GstCaps *caps = gst_sample_get_caps(sample);<br>
GstBuffer *buffer = gst_sample_get_buffer (sample);<br>
//buffer = gst_app_sink_pull_buffer(pro->sink)<br>
GstMapInfo map;<br>
gst_buffer_map (buffer, &map, GST_MAP_READ);<br>
gint width, height;<br>
const GstStructure *str;<br>
str = gst_caps_get_structure (caps, 0);<br>
gst_structure_get_int (str, "width", &width);<br>
gst_structure_get_int (str, "height", &height);<br>
g_print("The width is = %d", width);<br>
g_print("The height is = %d", height);<br>
cv::Mat frame(cv::Size(width, height), CV_8UC3, (char*)map.data, -3);<br>
cv::imwrite("Test2.jpg", frame);<br>
<br>
<br>
GstElement *rsink = gst_bin_get_by_name( GST_BIN( pipeline ), "sink" );<br>
GstSample *pstGstSample1;<br>
// pstGstSample1 = gst_app_sink_pull_sample( (GstAppSink *)rsink);<br>
//image_processing(pstGstSample1);<br>
g_main_loop_run (loop);<br>
<br>
<br>
/* Out of the main loop, clean up nicely */<br>
g_print ("Returned, stopping playback\n");<br>
gst_element_set_state (pipeline, GST_STATE_NULL);<br>
GST_DEBUG_BIN_TO_DOT_FILE(GST_BIN(pipeline), GST_DEBUG_GRAPH_SHOW_ALL<br>
,"/home/siraj/Desktop/myplayer");<br>
g_print ("Deleting pipeline\n");<br>
gst_object_unref (GST_OBJECT (pipeline));<br>
g_source_remove (bus_watch_id);<br>
g_main_loop_unref (loop);<br>
<br>
return 0;<br>
}<br>
<br>
<br>
<br>
--<br>
View this message in context: <a href="http://gstreamer-devel.966125.n4.nabble.com/How-to-extract-video-frames-from-pipeline-through-gstreamer-1-0-tp4672495.html" rel="noreferrer" target="_blank">http://gstreamer-devel.966125.n4.nabble.com/How-to-extract-video-frames-from-pipeline-through-gstreamer-1-0-tp4672495.html</a><br>
Sent from the GStreamer-devel mailing list archive at Nabble.com.<br>
_______________________________________________<br>
gstreamer-devel mailing list<br>
<a href="mailto:gstreamer-devel@lists.freedesktop.org" target="_blank">gstreamer-devel@lists.freedesktop.org</a><br>
<a href="http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel" rel="noreferrer" target="_blank">http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel</a><br>
</blockquote></div><br></div></div>
<br>_______________________________________________<br>
gstreamer-devel mailing list<br>
<a href="mailto:gstreamer-devel@lists.freedesktop.org">gstreamer-devel@lists.freedesktop.org</a><br>
<a href="http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel" rel="noreferrer" target="_blank">http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel</a><br>
<br></blockquote></div>