Meego camera input
klen
klen.copic at gmail.com
Mon Nov 14 04:06:19 PST 2011
I managed to get gstreamer to work with N9. The pipeline is using appsink
element to capture images frame by frame from the camera. I am using it
coupled with openCV. The NewFrameAdded() callback is only called for the
first frame. For performance issues i rather pool the later frame myself
with PoolFrame fucntion.
CODE:
/static gboolean InitializePipeline(int *argc, char ***argv, int width, int
height, IplImage **_frame3C) {
GstElement *image_sink, *csp_filter, *image_filter;
GstCaps *caps;
buffer_width=width;
buffer_hight=height;
/*Allocate memory for the frame*/
frame3C=cvCreateImageHeader(cvSize(buffer_width,buffer_hight),
IPL_DEPTH_8U, 3); //color image
frame1= cvCreateImage(cvSize(buffer_width,buffer_hight), IPL_DEPTH_8U, 1);
//grey current
frame2= cvCreateImage(cvSize(buffer_width,buffer_hight), IPL_DEPTH_8U, 1);
//grey previous
/*initialize flags*/
cold_start=true;
/* Initialize Gstreamer */
gst_init(argc, argv);
/* Create elements */
/* Camera video stream comes from a Video4Linux driver */
camera_src = gst_element_factory_make("v4l2camsrc", "camera_src");
g_object_set(G_OBJECT(camera_src), "driver-name", "omap3cam", NULL); //
thanks BBNS_ @ maemo irc aka Yun-Ta Tsai
/* Colorspace filter is needed to make sure that sinks understands the
stream coming from the camera */
csp_filter = gst_element_factory_make("ffmpegcolorspace", "csp_filter");
/* Filter to convert stream to use format that the gdkpixbuf library can
use */
image_filter = gst_element_factory_make("ffmpegcolorspace",
"image_filter");
/* A dummy sink for the image stream. Goes to bitheaven AppSink*/
image_sink = gst_element_factory_make("appsink", "image_sink");
/* Create pipeline and attach a callback to it's message bus */
pipeline = gst_pipeline_new("test-camera");
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
/* Check that elements are correctly initialized */
if(!(pipeline && camera_src && csp_filter && image_sink && image_filter))
{
g_critical("Couldn't create pipeline elements");
return FALSE;
}
/* Add elements to the pipeline. This has to be done prior to linking them
*/
gst_bin_add_many(GST_BIN(pipeline), camera_src,
csp_filter,image_filter,image_sink,NULL);
/* Specify what kind of video is wanted from the camera */
char caps_str[100];
sprintf(caps_str,"video/x-raw-yuv,format=(fourcc)UYVY,width=%i,height=%i",width,height);
//video/x-raw-yuv,format=(fourcc)UYVY,width=400,height=240
printf("CAPSE STR: %s\n",caps_str);
caps = gst_caps_from_string(caps_str); //framerate=[1/30,30/1]
/* Link the camera source and colorspace filter using capabilitie specified
*/
if(!gst_element_link_filtered(camera_src, csp_filter, caps))
{
return FALSE;
}
gst_caps_unref(caps);
caps = gst_caps_new_simple("video/x-raw-rgb",
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
NULL);
/* Link the image-branch of the pipeline.*/
if(!gst_element_link_many(csp_filter,image_filter, NULL)) return FALSE;
if(!gst_element_link_filtered(image_filter, image_sink,caps)) return FALSE;
/*Clean up*/
gst_caps_unref(caps);
/* Set image sink to emit handoff-signal before throwing away it's buffer
*/
g_object_set (G_OBJECT (image_sink), "emit-signals", TRUE, NULL);
g_signal_connect (image_sink, "new-buffer", G_CALLBACK (NewFrameAdded),
NULL);
return TRUE;
}
void NewFrameAdded (GstAppSink *_appsink)
{
if(cold_start)
{
//initialize appsink
printf("\nAppsink initialized!\n");
appsink=_appsink;
gst_app_sink_set_drop(appsink,true);
gst_app_sink_set_emit_signals(appsink,false); // do not want to emit
signal on new frame will rather pool frames
gst_app_sink_set_max_buffers(appsink,1);
cold_start=false;
scanning_status=true;
}
buffer = gst_app_sink_pull_buffer(appsink);
}
IplImage * PoolFrame(void){
//printf("pool_buffeer()\n");
/*Block untill the appsing becomes available*/
while(appsink==NULL);
if(appsink!=NULL){
UnrefGstFrame();
unsigned char* data;
buffer = gst_app_sink_pull_buffer(appsink);
data=(unsigned char *) GST_BUFFER_DATA (buffer);
if(!buffer_swap)
{
cvSetData(frame3C ,data,buffer_width*3);
cvCvtColor(frame3C, frame1, CV_RGB2GRAY); //can be optimized
currentFrame=frame1;
}else
{
cvSetData(frame3C ,data,buffer_width*3);
cvCvtColor(frame3C, frame2, CV_RGB2GRAY); //can be optimized
currentFrame=frame2;
}
buffer_swap=!buffer_swap;
if(focusStatus==GST_PHOTOGRAPHY_FOCUS_STATUS_RUNNING)
{
PoolAutoFocusBusMessage();
}
return currentFrame;
}
return currentFrame;
}
/
Hope it will be useful
Cheers,
K
--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/Meego-camera-input-tp3790730p4039262.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
More information about the gstreamer-devel
mailing list