Hello all,<br><br>I still have some questions about gstreamer.<br><br>Actually I'd like to change the way my program work, in order to display a frame from the camera only once the image processing has been done, and with a rectangle over the face of the person.<br>
<br>So I changed my pipeline (removed the screen_sink element), and I'd like to send a buffer from my image processing function to the gtk drawing area where the camera image was displayed before. I tried to do it with gtk drawing area (and with gtk image too with no sucess), but I can't find the way to change the image contained in the drawing area.<br>
<br>Here is my code :<br><br><br><br>///// IMAGE PROCESSING CALLBACK<br><br>/* Callback to be called when data goes through the pad */<br>static gboolean process_frame(GstElement *video_sink,<br> GstBuffer *buffer, GstPad *pad, AppData *appdata)<br>
{<br> int x, y;<br> // getting the pointer to camera buffer<br> unsigned char *data_photo = (unsigned char *) GST_BUFFER_DATA(buffer);<br> <br><br>// REMOVED PART WHERE THE COORDINATES OF THE POSITION OF THE FACE IS CALCULATED //<br>
<br> <br>// THIS PART IS WHAT I TRIED, BUT I HAVE A SEGMENTATION FAULT WHEN CREATING PIXBUF // <br> GdkPixbuf *newscreen;<br> //newscreen = gdk_pixbuf_new_from_data(data_photo,<br> //GDK_COLORSPACE_RGB, /* RGB-colorspace */<br>
//FALSE, /* No alpha-channel */<br> //8, /* Bits per RGB-component */<br> //IMAGE_WIDTH, IMAGE_HEIGHT, /* Dimensions */<br> //3*IMAGE_WIDTH, /* Number of bytes between lines (ie stride) */<br>
//NULL, NULL); /* Callbacks */<br><br><br>gdk_draw_pixmap(GDK_DRAWABLE(appdata->screen), appdata->screen->style->black_gc, GDK_DRAWABLE(newscreen), 0, 0, 0, 0, -1, -1);<br> <br>
return TRUE;<br>}<br><br><br><br><br><br>/////// PIPELINE<br><br><br>/* Initialize the the Gstreamer pipeline. Below is a diagram<br> * of the pipeline that will be created:<br> * <br> * |Camera| |CSP | |Screen| |Screen| |Image |<br>
* |src |->|Filter|->|queue |->|sink |-> |processing|-> Display<br> */<br>static gboolean initialize_pipeline(AppData *appdata,<br> int *argc, char ***argv)<br>{<br> GstElement *pipeline, *camera_src, *screen_sink;<br>
GstElement *screen_queue;<br> GstElement *csp_filter;<br> GstCaps *caps;<br> GstBus *bus;<br> GstPad *sinkpad;<br><br> /* Initialize Gstreamer */<br> gst_init(argc, argv);<br> <br> /* Create pipeline and attach a callback to it's<br>
* message bus */<br> pipeline = gst_pipeline_new("test-camera");<br><br> bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));<br> gst_bus_add_watch(bus, (GstBusFunc)bus_callback, appdata);<br> gst_object_unref(GST_OBJECT(bus));<br>
<br> /* Save pipeline to the AppData structure */<br> appdata->pipeline = pipeline;<br> <br> /* Create elements */<br> /* Camera video stream comes from a Video4Linux driver */<br> camera_src = gst_element_factory_make(VIDEO_SRC, "camera_src");<br>
/* Colorspace filter is needed to make sure that sinks understands<br> * the stream coming from the camera */<br> csp_filter = gst_element_factory_make("ffmpegcolorspace", "csp_filter");<br>
/* Queue creates new thread for the stream */<br> screen_queue = gst_element_factory_make("queue", "screen_queue");<br> /* Sink that shows the image on screen. Xephyr doesn't support XVideo<br>
* extension, so it needs to use ximagesink, but the device uses<br> * xvimagesink */<br> //screen_sink = gst_element_factory_make(VIDEO_SINK, "screen_sink");<br><br> sinkpad = gst_element_get_static_pad(screen_queue,"sink");<br>
gst_pad_add_buffer_probe(sinkpad,G_CALLBACK(process_frame), appdata);<br><br><br> /* Check that elements are correctly initialized */<br> if(!(pipeline && camera_src /*&& screen_sink*/ && csp_filter && screen_queue))<br>
{<br> g_critical("Couldn't create pipeline elements");<br> return FALSE;<br> }<br><br> <br> /* Add elements to the pipeline. This has to be done prior to<br> * linking them */<br>
gst_bin_add_many(GST_BIN(pipeline), camera_src, csp_filter,<br> screen_queue, /*screen_sink,*/ NULL);<br> <br> /* Specify what kind of video is wanted from the camera */<br> caps = gst_caps_new_simple("video/x-raw-rgb",<br>
"width", G_TYPE_INT, IMAGE_WIDTH,<br> "height", G_TYPE_INT, IMAGE_HEIGHT,<br> "framerate", GST_TYPE_FRACTION, FRAMERATE, 1,<br> NULL);<br> <br>
<br> /* Link the camera source and colorspace filter using capabilities<br> * specified */<br> if(!gst_element_link_filtered(camera_src, csp_filter, caps))<br> {<br> return FALSE;<br> }<br> gst_caps_unref(caps);<br>
<br> /* Connect Colorspace Filter -> Screen Queue -> Screen Sink<br> * This finalizes the initialization of the screen-part of the pipeline */<br> if(!gst_element_link_many(csp_filter, screen_queue, /*screen_sink, */NULL))<br>
{<br> return FALSE;<br> }<br><br> gst_element_set_state(pipeline, GST_STATE_PAUSED); <br><br> return TRUE;<br>}<br><br><br><br><br><br><br><br>/////// MAIN FUNCTION<br><br><br>int main(int argc, char **argv)<br>
{<br>// variables for face detection<br> // main structure for vjdetect<br><br> pdata = (mainstruct*) calloc(1, sizeof(mainstruct));<br> // Allocate memory for array of face detections returned by facedetector (VjDetect).<br>
pdata->pFaceDetections = (FLY_Rect *)calloc(MAX_NUMBER_OF_FACE_DETECTIONS, sizeof(FLY_Rect));<br> init(pdata);<br><br> AppData appdata;<br> appdata.expression = 0;<br> GtkWidget *hbox, *vbox_button, *vbox, *button1, *button2;<br>
<br> <br> /* Initialize and create the GUI */<br> <br> example_gui_initialize(<br> &appdata.program, &appdata.window,<br> &argc, &argv, "Expression Detector");<br><br> vbox = gtk_vbox_new(FALSE, 0);<br>
hbox = gtk_hbox_new(FALSE, 0);<br> vbox_button = gtk_vbox_new(FALSE, 0);<br><br> gtk_box_pack_start(GTK_BOX(hbox), vbox, FALSE, FALSE, 0);<br> gtk_box_pack_start(GTK_BOX(hbox), vbox_button, FALSE, FALSE, 0);<br>
<br> appdata.screen = gtk_drawing_area_new();<br> gtk_widget_set_size_request(appdata.screen, 500, 380);<br> gtk_box_pack_start(GTK_BOX(vbox), appdata.screen, FALSE, FALSE, 0);<br><br> button1 = gtk_toggle_button_new_with_label("Run/Stop");<br>
gtk_widget_set_size_request(button1, 170, 75);<br> gtk_box_pack_start(GTK_BOX(vbox_button), button1, FALSE, FALSE, 0);<br><br> button2 = gtk_toggle_button_new_with_label("Expressions ON/OFF");<br> gtk_widget_set_size_request(button2, 170, 75);<br>
gtk_box_pack_start(GTK_BOX(vbox_button), button2, FALSE, FALSE, 0);<br><br><br> appdata.anger = gtk_image_new_from_file("./smileys/anger.jpg");<br> gtk_widget_set_size_request(appdata.anger, 160, 180);<br>
appdata.disgust = gtk_image_new_from_file("./smileys/disgust.jpg");<br> gtk_widget_set_size_request(appdata.disgust, 160, 180);<br> appdata.fear = gtk_image_new_from_file("./smileys/fear.jpg");<br>
gtk_widget_set_size_request(appdata.fear, 160, 180);<br> appdata.happy = gtk_image_new_from_file("./smileys/happy.jpg");<br> gtk_widget_set_size_request(appdata.happy, 160, 180);<br> appdata.neutral = gtk_image_new_from_file("./smileys/neutral.jpg"); <br>
gtk_widget_set_size_request(appdata.neutral, 160, 180);<br> appdata.sad = gtk_image_new_from_file("./smileys/sad.jpg");<br> gtk_widget_set_size_request(appdata.sad, 160, 180);<br> appdata.surprise = gtk_image_new_from_file("./smileys/surprise.jpg"); <br>
gtk_widget_set_size_request(appdata.surprise, 160, 180);<br> appdata.unknown = gtk_image_new_from_file("./smileys/unknown.jpg");<br> gtk_widget_set_size_request(appdata.unknown, 160, 180);<br> <br> appdata.smiley = gtk_image_new_from_file("./smileys/unknown.jpg");<br>
gtk_widget_set_size_request(appdata.smiley, 160, 180);<br> gtk_box_pack_start(GTK_BOX(vbox_button), appdata.smiley, FALSE, FALSE, 0);<br> <br> g_signal_connect(G_OBJECT(button1), "clicked",<br> G_CALLBACK(button1_pressed), &appdata);<br>
<br> g_signal_connect(G_OBJECT(button2), "clicked",<br> G_CALLBACK(button2_pressed), &appdata);<br><br><br> gtk_container_add(GTK_CONTAINER(appdata.window), hbox);<br><br> /* Initialize the GTK pipeline */<br>
if(!initialize_pipeline(&appdata, &argc, &argv))<br> {<br> hildon_banner_show_information(<br> GTK_WIDGET(appdata.window),<br> "gtk-dialog-error",<br> "Failed to initialize pipeline");<br>
}<br><br><br><br> g_signal_connect(G_OBJECT(appdata.window), "destroy",<br> G_CALLBACK(destroy_pipeline), &appdata);<br><br><br> /* Begin the main application */<br> example_gui_run(appdata.program, appdata.window);<br>
<br> /* Free the gstreamer resources. Elements added<br> * to the pipeline will be freed automatically */<br> <br> return 0;<br>}<br><br><br>What I'd like to do is to modify the data_photo buffer to draw a rectangle in it (in the process_frame function), and draw the content in the appdata.screen GtkWidget. (by the way screen is declared as a GtkWidget * in the appdata structure).<br>
<br>Thanks in advance for your help !<br>Bruno<br><br>