[gst-devel] Video processing

Eric Zhang nicolas.m.zhang at gmail.com
Tue Aug 26 04:19:43 CEST 2008


Hi, gstreamer-devel:

    If you only want to `ximagesink' or `xvimagesink' draws images in 
your GtkDrawingArea, there is a very simple way to achieve this:

    Just connect the `expose-event' signal of GtkDrawingArea and pass 
the window ID to the sink element:

// Drawing on our drawing area
g_signal_connect(G_OBJECT(area), "expose-event", G_CALLBACK(expose_cb), 
NULL);

/* Callback to be called when the drawing area is exposed */
static gboolean expose_cb(GtkWidget * widget, GdkEventExpose * event, 
gpointer data)
{
    // `play-videosink' is your video sink element
    gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(play->videosink),
                         GDK_WINDOW_XWINDOW(widget->window));
    return FALSE;
}

    That's it. If you want to draw the image yourself while not using 
`xvimagesink' or `ximagesink', then I think this is a Gtk+ problem, not 
a gstreamer issue.

Eric Zhang

  

Bruno wrote:
> Hello all,
>
> I still have some questions about gstreamer.
>
> Actually I'd like to change the way my program work, in order to 
> display a frame from the camera only once the image processing has 
> been done, and with a rectangle over the face of the person.
>
> So I changed my pipeline (removed the screen_sink element), and I'd 
> like to send a buffer from my image processing function to the gtk 
> drawing area where the camera image was displayed before. I tried to 
> do it with gtk drawing area (and with gtk image too with no sucess), 
> but I can't find the way to change the image contained in the drawing 
> area.
>
> Here is my code :
>
>
>
> ///// IMAGE PROCESSING CALLBACK
>
> /* Callback to be called when data goes through the pad */
> static gboolean process_frame(GstElement *video_sink,
>         GstBuffer *buffer, GstPad *pad, AppData *appdata)
> {
>         int x, y;
>         // getting the pointer to camera buffer
>         unsigned char *data_photo = (unsigned char *) 
> GST_BUFFER_DATA(buffer);
>        
>
> // REMOVED PART WHERE THE COORDINATES OF THE POSITION OF THE FACE IS 
> CALCULATED //
>
>        
> // THIS PART IS WHAT I TRIED, BUT I HAVE A SEGMENTATION FAULT WHEN 
> CREATING PIXBUF //      
>         GdkPixbuf *newscreen;
>         //newscreen = gdk_pixbuf_new_from_data(data_photo,
>                     //GDK_COLORSPACE_RGB, /* RGB-colorspace */
>                     //FALSE, /* No alpha-channel */
>                     //8, /* Bits per RGB-component */
>                     //IMAGE_WIDTH, IMAGE_HEIGHT, /* Dimensions */
>                     //3*IMAGE_WIDTH, /* Number of bytes between lines 
> (ie stride) */
>                     //NULL, NULL); /* Callbacks */
>
>
> gdk_draw_pixmap(GDK_DRAWABLE(appdata->screen), 
> appdata->screen->style->black_gc, GDK_DRAWABLE(newscreen), 0, 0, 0, 0, 
> -1, -1);
>               
>         return TRUE;
> }
>
>
>
>
>
> /////// PIPELINE
>
>
> /* Initialize the the Gstreamer pipeline. Below is a diagram
>  * of the pipeline that will be created:
>  *                         
>  * |Camera|  |CSP   |  |Screen|  |Screen|   |Image     |
>  * |src   |->|Filter|->|queue |->|sink  |-> |processing|->  Display
>  */
> static gboolean initialize_pipeline(AppData *appdata,
>         int *argc, char ***argv)
> {
>     GstElement *pipeline, *camera_src, *screen_sink;
>     GstElement *screen_queue;
>     GstElement *csp_filter;
>     GstCaps *caps;
>     GstBus *bus;
>     GstPad *sinkpad;
>
>     /* Initialize Gstreamer */
>     gst_init(argc, argv);
>    
>     /* Create pipeline and attach a callback to it's
>      * message bus */
>     pipeline = gst_pipeline_new("test-camera");
>
>     bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
>     gst_bus_add_watch(bus, (GstBusFunc)bus_callback, appdata);
>     gst_object_unref(GST_OBJECT(bus));
>    
>     /* Save pipeline to the AppData structure */
>     appdata->pipeline = pipeline;
>    
>     /* Create elements */
>     /* Camera video stream comes from a Video4Linux driver */
>     camera_src = gst_element_factory_make(VIDEO_SRC, "camera_src");
>     /* Colorspace filter is needed to make sure that sinks understands
>      * the stream coming from the camera */
>     csp_filter = gst_element_factory_make("ffmpegcolorspace", 
> "csp_filter");
>     /* Queue creates new thread for the stream */
>     screen_queue = gst_element_factory_make("queue", "screen_queue");
>     /* Sink that shows the image on screen. Xephyr doesn't support XVideo
>      * extension, so it needs to use ximagesink, but the device uses
>      * xvimagesink */
>     //screen_sink = gst_element_factory_make(VIDEO_SINK, "screen_sink");
>
>     sinkpad = gst_element_get_static_pad(screen_queue,"sink");
>     gst_pad_add_buffer_probe(sinkpad,G_CALLBACK(process_frame), appdata);
>
>
>     /* Check that elements are correctly initialized */
>     if(!(pipeline && camera_src /*&& screen_sink*/ && csp_filter && 
> screen_queue))
>     {
>         g_critical("Couldn't create pipeline elements");
>         return FALSE;
>     }
>
>    
>     /* Add elements to the pipeline. This has to be done prior to
>      * linking them */
>     gst_bin_add_many(GST_BIN(pipeline), camera_src, csp_filter,
>             screen_queue, /*screen_sink,*/ NULL);
>    
>     /* Specify what kind of video is wanted from the camera */
>     caps = gst_caps_new_simple("video/x-raw-rgb",
>             "width", G_TYPE_INT, IMAGE_WIDTH,
>             "height", G_TYPE_INT, IMAGE_HEIGHT,
>             "framerate", GST_TYPE_FRACTION, FRAMERATE, 1,
>             NULL);
>            
>
>     /* Link the camera source and colorspace filter using capabilities
>      * specified */
>     if(!gst_element_link_filtered(camera_src, csp_filter, caps))
>     {
>         return FALSE;
>     }
>     gst_caps_unref(caps);
>    
>     /* Connect Colorspace Filter -> Screen Queue -> Screen Sink
>      * This finalizes the initialization of the screen-part of the 
> pipeline */
>     if(!gst_element_link_many(csp_filter, screen_queue, /*screen_sink, 
> */NULL))
>     {
>         return FALSE;
>     }
>
>     gst_element_set_state(pipeline, GST_STATE_PAUSED);   
>
>     return TRUE;
> }
>
>
>
>
>
>
>
> /////// MAIN FUNCTION
>
>
> int main(int argc, char **argv)
> {
> // variables for face detection
>     // main structure for vjdetect
>
>     pdata = (mainstruct*) calloc(1, sizeof(mainstruct));
>     // Allocate memory for array of face detections returned by 
> facedetector (VjDetect).
>     pdata->pFaceDetections = (FLY_Rect 
> *)calloc(MAX_NUMBER_OF_FACE_DETECTIONS, sizeof(FLY_Rect));
>     init(pdata);
>
>     AppData appdata;
>     appdata.expression = 0;
>     GtkWidget *hbox, *vbox_button, *vbox, *button1, *button2;
>
>    
>     /* Initialize and create the GUI */
>    
>     example_gui_initialize(
>         &appdata.program, &appdata.window,
>         &argc, &argv, "Expression Detector");
>
>     vbox = gtk_vbox_new(FALSE, 0);
>     hbox = gtk_hbox_new(FALSE, 0);
>     vbox_button = gtk_vbox_new(FALSE, 0);
>
>     gtk_box_pack_start(GTK_BOX(hbox), vbox, FALSE, FALSE, 0);
>     gtk_box_pack_start(GTK_BOX(hbox), vbox_button, FALSE, FALSE, 0);
>
>     appdata.screen = gtk_drawing_area_new();
>     gtk_widget_set_size_request(appdata.screen, 500, 380);
>     gtk_box_pack_start(GTK_BOX(vbox), appdata.screen, FALSE, FALSE, 0);
>
>     button1 = gtk_toggle_button_new_with_label("Run/Stop");
>     gtk_widget_set_size_request(button1, 170, 75);
>     gtk_box_pack_start(GTK_BOX(vbox_button), button1, FALSE, FALSE, 0);
>
>     button2 = gtk_toggle_button_new_with_label("Expressions ON/OFF");
>     gtk_widget_set_size_request(button2, 170, 75);
>     gtk_box_pack_start(GTK_BOX(vbox_button), button2, FALSE, FALSE, 0);
>
>
>     appdata.anger = gtk_image_new_from_file("./smileys/anger.jpg");
>     gtk_widget_set_size_request(appdata.anger, 160, 180);
>     appdata.disgust = gtk_image_new_from_file("./smileys/disgust.jpg");
>     gtk_widget_set_size_request(appdata.disgust, 160, 180);
>     appdata.fear = gtk_image_new_from_file("./smileys/fear.jpg");
>     gtk_widget_set_size_request(appdata.fear, 160, 180);
>     appdata.happy = gtk_image_new_from_file("./smileys/happy.jpg");
>     gtk_widget_set_size_request(appdata.happy, 160, 180);
>     appdata.neutral = gtk_image_new_from_file("./smileys/neutral.jpg");
>     gtk_widget_set_size_request(appdata.neutral, 160, 180);
>     appdata.sad = gtk_image_new_from_file("./smileys/sad.jpg");
>     gtk_widget_set_size_request(appdata.sad, 160, 180);
>     appdata.surprise = gtk_image_new_from_file("./smileys/surprise.jpg");
>     gtk_widget_set_size_request(appdata.surprise, 160, 180);
>     appdata.unknown = gtk_image_new_from_file("./smileys/unknown.jpg");
>     gtk_widget_set_size_request(appdata.unknown, 160, 180);
>    
>     appdata.smiley = gtk_image_new_from_file("./smileys/unknown.jpg");
>     gtk_widget_set_size_request(appdata.smiley, 160, 180);
>     gtk_box_pack_start(GTK_BOX(vbox_button), appdata.smiley, FALSE, 
> FALSE, 0);
>    
>     g_signal_connect(G_OBJECT(button1), "clicked",
>              G_CALLBACK(button1_pressed), &appdata);
>
>     g_signal_connect(G_OBJECT(button2), "clicked",
>              G_CALLBACK(button2_pressed), &appdata);
>
>
>     gtk_container_add(GTK_CONTAINER(appdata.window), hbox);
>
>     /* Initialize the GTK pipeline */
>     if(!initialize_pipeline(&appdata, &argc, &argv))
>     {
>         hildon_banner_show_information(
>                 GTK_WIDGET(appdata.window),
>                 "gtk-dialog-error",
>                 "Failed to initialize pipeline");
>     }
>
>
>
>     g_signal_connect(G_OBJECT(appdata.window), "destroy",
>             G_CALLBACK(destroy_pipeline), &appdata);
>
>
>     /* Begin the main application */
>     example_gui_run(appdata.program, appdata.window);
>
>     /* Free the gstreamer resources. Elements added
>      * to the pipeline will be freed automatically */
>    
>     return 0;
> }
>
>
> What I'd like to do is to modify the data_photo buffer to draw a 
> rectangle in it (in the process_frame function), and draw the content 
> in the appdata.screen GtkWidget. (by the way screen is declared as a 
> GtkWidget * in the appdata structure).
>
> Thanks in advance for your help !
> Bruno
>
> ------------------------------------------------------------------------
>
> -------------------------------------------------------------------------
> This SF.Net email is sponsored by the Moblin Your Move Developer's challenge
> Build the coolest Linux based applications with Moblin SDK & win great prizes
> Grand prize is a trip for two to an Open Source event anywhere in the world
> http://moblin-contest.org/redirect.php?banner_id=100&url=/
> ------------------------------------------------------------------------
>
> _______________________________________________
> gstreamer-devel mailing list
> gstreamer-devel at lists.sourceforge.net
> https://lists.sourceforge.net/lists/listinfo/gstreamer-devel
>   





More information about the gstreamer-devel mailing list