why not appsink buffer error?

JongIlLee lji890214 at naver.com
Mon Sep 5 10:41:52 UTC 2016


hi

I made other pipelines in the pipeline

Pipes should try to play one of the on / off.

However, if the execution, the following error occurs.

What is the cause? Please advise.

(Rear_cam_recorder: 1015): GStreamer-CRITICAL **: gst_buffer_iterate_meta:
assertion '! Buffer = NULL' failed

(Rear_cam_recorder: 1015): GStreamer-CRITICAL **: gst_buffer_add_meta:
assertion '! Buffer = NULL' failed

I designed the code is below.

---------------------------------------------------------------------------------------------------------

GstCaps *v4l2src_caps, *appsink_caps, *appsrc_caps,*png_caps,*pre_caps;

	GstBuffer* buffer;
	GstMemory* memory;
	GstMapInfo mapinfo;

	
	//SRC Define
	mRecSrcData.pipe = gst_pipeline_new("rear_src_pipeline");
	g_assert(mRecSrcData.pipe);

	mPreSinkData.pre_pipe = gst_pipeline_new("rear_pre_pipeline");
	g_assert(mPreSinkData.pre_pipe);

	mRecSrcData.imxv4l2src = gst_element_factory_make("imxv4l2src",
"rear_v4l2src");
	g_assert(mRecSrcData.imxv4l2src);

	mRecSrcData.capsfilter = gst_element_factory_make("capsfilter",
"rear_capsfilter");
	g_assert(mRecSrcData.capsfilter);
	
	mRecSrcData.queue1 = gst_element_factory_make("queue", "rear_queue1");
	g_assert(mRecSrcData.queue1);
	mRecSrcData.ipuconverter = gst_element_factory_make("imxvideoconvert_ipu",
"rear_ipu_converter");
	g_assert(mRecSrcData.ipuconverter);

	mRecSrcData.ipuconverter1 = gst_element_factory_make("imxvideoconvert_ipu",
"rear_pre_ipu_converter");
	g_assert(mRecSrcData.ipuconverter1);

	mRecSrcData.vpuenc = gst_element_factory_make("vpuenc_h264",
"rear_vpuenc");
	g_assert(mRecSrcData.vpuenc);

	mRecSrcData.queue2 = gst_element_factory_make("queue", "rear_queue2");
	g_assert(mRecSrcData.queue2);

	mRecSrcData.appsink2= gst_element_factory_make("appsink",
"rear_appsink_preview");
	g_assert(mRecSrcData.appsink2);

	mRecSrcData.appsink= gst_element_factory_make("appsink", "rear_appsink");
	g_assert(mRecSrcData.appsink);

	mRecSrcData.tee= gst_element_factory_make("tee", "rear_tee");
	g_assert(mRecSrcData.tee);

	mRecSrcData.autovideosink = gst_element_factory_make("autovideosink",
"rear_autovideosink");
	g_assert(mRecSrcData.autovideosink);
	
	mRecSrcData.source_bus =
gst_pipeline_get_bus(GST_PIPELINE(mRecSrcData.pipe));
	g_assert(mRecSrcData.source_bus);
#if 0
	mRecSrcData.textoverlay = gst_element_factory_make("textoverlay",
"textoverlay");
	g_assert(mRecSrcData.textoverlay);
#endif	


	gst_bin_add_many(
		GST_BIN(mRecSrcData.pipe), 
		mRecSrcData.imxv4l2src, 
		mRecSrcData.tee,
		mRecSrcData.capsfilter,
		mRecSrcData.queue1,
		mRecSrcData.ipuconverter, 
		mRecSrcData.vpuenc, 
		mRecSrcData.appsink,
		mRecSrcData.queue2,
		mRecSrcData.ipuconverter1,
		mRecSrcData.appsink2,
		NULL);


	if(gst_element_link_many(
		mRecSrcData.imxv4l2src, 
		mRecSrcData.capsfilter, 
		mRecSrcData.tee,
		NULL) != TRUE)
	{
		printf("RecSrcData link failed!\n");
		gst_object_unref(mRecSrcData.pipe);
		return -1;
	}
	
	if(gst_element_link_many(
		mRecSrcData.queue1,
		mRecSrcData.ipuconverter, 
		mRecSrcData.vpuenc, 
		mRecSrcData.appsink, 
		NULL) != TRUE)
	{
		printf("RecSrcData link failed!\n");
		gst_object_unref(mRecSrcData.pipe);
		return -1;
	}
	if(gst_element_link_many(
		mRecSrcData.queue2,
		mRecSrcData.ipuconverter1,
		mRecSrcData.appsink2, 
		NULL) != TRUE)
	{
		printf("RecSrcData link failed!\n");
		gst_object_unref(mRecSrcData.pipe);
		return -1;
	}

#if 0
	preview_caps = gst_caps_new_simple("video/x-raw-rgb",
				"width", G_TYPE_INT, VGA_WIDTH,
				"height", G_TYPE_INT, VGA_HEIGHT,
				NULL);
#endif
		/* Link the camera source and colorspace filter using capabilities
	 * specified */

	if( !(mNormalRecGstPad.tee_src_pad_template =
gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (mRecSrcData.tee),
"src_%u"))) {
	  	g_critical ("Unable to get pad template!");
	  	return -1;
	}

	mNormalRecGstPad.queue_rec_pad= gst_element_get_static_pad
(mRecSrcData.queue1, "sink");
	mNormalRecGstPad.queue_preview_pad= gst_element_get_static_pad
(mRecSrcData.queue2, "sink");
	
	mNormalRecGstPad.tee_rec_pad= gst_element_request_pad (mRecSrcData.tee,
mNormalRecGstPad.tee_src_pad_template, NULL, NULL);
 	g_print ("Obtained request pad %s for rec branch.\n", gst_pad_get_name
(mNormalRecGstPad.tee_rec_pad));
  	
	
  	mNormalRecGstPad.tee_preview_pad= gst_element_request_pad
(mRecSrcData.tee, mNormalRecGstPad.tee_src_pad_template, NULL, NULL);
  	g_print ("Obtained request pad %s for preview branch.\n",
gst_pad_get_name (mNormalRecGstPad.tee_preview_pad));
  	
	
 	if (gst_pad_link (mNormalRecGstPad.tee_rec_pad,
mNormalRecGstPad.queue_rec_pad) != GST_PAD_LINK_OK ||
      		gst_pad_link (mNormalRecGstPad.tee_preview_pad,
mNormalRecGstPad.queue_preview_pad) != GST_PAD_LINK_OK) {
    		g_printerr ("Tee could not be linked.\n");
    		gst_object_unref (mRecSrcData.pipe);
    		return -1;
  	}
	gst_object_unref (mNormalRecGstPad.queue_rec_pad);
  	gst_object_unref (mNormalRecGstPad.queue_preview_pad);
#if 0	
    // Get sink pad for textoverlay and make it a ghostpad for bin
    mNormalRecGstPad.pad_textoverlay_sink =
gst_element_get_pad(mRecSrcData.textoverlay, "appsink");
    gst_element_add_pad(mRecSrcData.pipe, gst_ghost_pad_new("sink",
mNormalRecGstPad.pad_textoverlay_sink));

    // Link elements: textoverlay -> clockoverlay -> videosink
    gst_element_link_pads(mRecSrcData.textoverlay, "imxv4l2src",
mRecSrcData.clockoverlay, "sink");
    gst_element_link_pads(mRecSrcData.clockoverlay, "imxv4l2src",
mRecSrcData.appsink, "sink");
	
	g_object_set (G_OBJECT (playbin2), "appsink", mybin, NULL);
#endif

	v4l2src_caps = gst_caps_new_simple("video/x-raw", 
								"stream-format", G_TYPE_STRING, STREAM_FORMAT, 
								"width", G_TYPE_INT, VGA_WIDTH,
								"height", G_TYPE_INT, VGA_HEIGHT,
								"framerate", GST_TYPE_FRACTION, VGA_FRAMERATE,1,NULL);
#if 0
	g_object_set(G_OBJECT(mRecSrcData.textoverlay), 
								"font-desc","Sans 20",
								"text", "Normal recording",
								"halignment", GST_TEXT_OVERLAY_HALIGN_RIGHT,
								"valignment", GST_TEXT_OVERLAY_VALIGN_BOTTOM,
								"shaded-background", TRUE,
								NULL);
#endif

	g_object_set(G_OBJECT(mRecSrcData.imxv4l2src), 
								"device", REAR_CAM_DEVICE, NULL);

	g_object_set(G_OBJECT(mRecSrcData.capsfilter),
								"caps", v4l2src_caps, NULL);
	
	g_object_set(G_OBJECT(mRecSrcData.ipuconverter), "composition-meta-enable",
TRUE,
										"in-place",TRUE,NULL);	
	g_object_set(G_OBJECT(mRecSrcData.ipuconverter1),
"composition-meta-enable", TRUE,
										"in-place",TRUE,NULL);	

								
	g_object_set(G_OBJECT(mRecSrcData.vpuenc), "bitrate", REAR_CAM_BITRATE,
NULL);

	buffer = gst_buffer_new();
	memory = gst_allocator_alloc(NULL, REAR_NORMAL_CODEC_DATA_SIZE, NULL);
	gst_buffer_insert_memory(buffer, -1, memory);
	gst_buffer_map(buffer, &mapinfo, GST_MAP_WRITE);
	memcpy(mapinfo.data, rear_normal_codec_data, REAR_NORMAL_CODEC_DATA_SIZE);

	appsink_caps = gst_caps_new_simple("video/x-h264",
									"stream-format",G_TYPE_STRING,"avc",
									"alignment", G_TYPE_STRING, "au",
									"width", G_TYPE_INT, VGA_WIDTH,
									"height", G_TYPE_INT, VGA_HEIGHT,
									"framerate", GST_TYPE_FRACTION, VGA_FRAMERATE,1,
									"codec_data", GST_TYPE_BUFFER, buffer,
									"pixel-aspect-ratio",GST_TYPE_FRACTION,1,1,NULL);
	pre_caps =  gst_caps_new_simple("video/x-raw",
									"format", G_TYPE_STRING, STREAM_FORMAT, 
									"width", G_TYPE_INT, VGA_WIDTH,
									"height", G_TYPE_INT, VGA_HEIGHT,
									"framerate", GST_TYPE_FRACTION, VGA_FRAMERATE,1,NULL);


	g_object_set (G_OBJECT (mRecSrcData.appsink), "emit-signals", TRUE, "caps",
appsink_caps, NULL);
	g_object_set (G_OBJECT (mRecSrcData.appsink2), "emit-signals", TRUE,
"caps",pre_caps, "sync", FALSE, NULL);
	
	g_signal_connect (mRecSrcData.appsink, "new-sample", G_CALLBACK
(on_normal_new_sample_from_sink), this);
	g_signal_connect (mRecSrcData.appsink2, "new-sample", G_CALLBACK
(on_normal_pre_new_sample_from_sink), this);	


	gst_bus_add_watch (mRecSrcData.source_bus, (GstBusFunc)
on_normal_source_message, this);


  	gst_object_unref (mNormalRecGstPad.tee_rec_pad);
  	gst_object_unref (mNormalRecGstPad.tee_preview_pad);

	//SINK Define
	mRecSinkData.pipe = gst_pipeline_new("rear_sink_pipeline");
	g_assert(mRecSinkData.pipe);

	mRecSinkData.appsource = gst_element_factory_make("appsrc", "rear_appsrc");
	g_assert(mRecSinkData.appsource);

	mRecSinkData.queue = gst_element_factory_make("queue", "rear_queue3");
	g_assert(mRecSinkData.queue);
	
	mRecSinkData.h264parse= gst_element_factory_make("h264parse",
"rear_h264parse");
	g_assert(mRecSinkData.h264parse);
		
	mRecSinkData.mp4mux= gst_element_factory_make("mp4mux", "rear_mp4mux");
	g_assert(mRecSinkData.mp4mux);

	mRecSinkData.filesink= gst_element_factory_make("filesink",
"rear_filesink");
	g_assert(mRecSinkData.filesink);

	appsrc_caps = gst_caps_new_simple("video/x-h264",
									"stream-format",G_TYPE_STRING,"avc",
									"alignment", G_TYPE_STRING, "au",
									"width", G_TYPE_INT, VGA_WIDTH,
									"height", G_TYPE_INT, VGA_HEIGHT,
									"framerate", GST_TYPE_FRACTION, VGA_FRAMERATE,1,
									"codec_data", GST_TYPE_BUFFER, buffer,
									"pixel-aspect-ratio",GST_TYPE_FRACTION,1,1,NULL);

	g_object_set (G_OBJECT(mRecSinkData.appsource), "caps",appsrc_caps,
								    "stream-type",GST_APP_STREAM_TYPE_STREAM,
								    "typefind", TRUE,
								    "is-live", TRUE,
								    "block", FALSE,
								    "do-timestamp", TRUE,
								    "num-buffers", REAR_NUM_BUFFERS,
								    "format", GST_FORMAT_BUFFERS,NULL);




	g_signal_connect (mRecSinkData.appsource, "need-data", G_CALLBACK
(normal_start_feed), this);
	g_signal_connect (mRecSinkData.appsource, "enough-data", G_CALLBACK
(normal_stop_feed), this);
  
	mRecSinkData.sink_bus =
gst_pipeline_get_bus(GST_PIPELINE(mRecSinkData.pipe));
	g_assert(mRecSinkData.sink_bus);
	
	gst_bus_add_watch (mRecSinkData.sink_bus, (GstBusFunc)
on_normal_sink_message, this);



	gst_bin_add_many(GST_BIN(mRecSinkData.pipe), mRecSinkData.appsource,
mRecSinkData.queue, mRecSinkData.h264parse,
		mRecSinkData.mp4mux, mRecSinkData.filesink, NULL);

	if(gst_element_link_many(mRecSinkData.appsource, mRecSinkData.queue,
mRecSinkData.h264parse, 
		mRecSinkData.mp4mux, mRecSinkData.filesink,NULL) != TRUE)
	{
		printf("RecSinkData gst_element_link_many failed!\n");
		gst_object_unref(mRecSinkData.pipe);
		return -1;
	}
	
	//preview 
	mPreSinkData.pre_appsource = gst_element_factory_make("appsrc",
"rear_pre_appsrc");
	g_assert(mPreSinkData.pre_appsource);
	mPreSinkData.pre_videomixer = gst_element_factory_make("glvideomixer",
"rear_glvideomixer");
	g_assert(mPreSinkData.pre_videomixer);
	mPreSinkData.pre_multifilesrc = gst_element_factory_make("multifilesrc",
"rear_multifilesrc");
	g_assert(mPreSinkData.pre_multifilesrc);
	mPreSinkData.pre_pngdec = gst_element_factory_make("pngdec",
"rear_pngdec");
	g_assert(mPreSinkData.pre_pngdec);
	mPreSinkData.pre_videoconvert1 = gst_element_factory_make("videoconvert",
"rear_imxvideoconvert_ipu1");
	g_assert(mPreSinkData.pre_videoconvert1);
	mPreSinkData.pre_videoconvert2 = gst_element_factory_make("videoconvert",
"rear_imxvideoconvert_ipu2");
	g_assert(mPreSinkData.pre_videoconvert2);
	mPreSinkData.pre_glsink = gst_element_factory_make("glimagesinkelement",
"rear_glimagesinkelement");
	g_assert(mPreSinkData.pre_glsink);
	mPreSinkData.pre_queue = gst_element_factory_make("queue", "queue4");
	g_assert(mPreSinkData.pre_queue);

	mPreSinkData.pre_sink_bus =
gst_pipeline_get_bus(GST_PIPELINE(mPreSinkData.pre_pipe));
	g_assert(mPreSinkData.pre_sink_bus);
	


								    
	g_object_set (G_OBJECT(mPreSinkData.pre_appsource), "format",
GST_FORMAT_TIME,"caps",pre_caps, NULL);

	png_caps = gst_caps_new_simple("image/png",
									"framerate", GST_TYPE_FRACTION, 1,60,NULL);
	
	g_object_set(G_OBJECT(mPreSinkData.pre_multifilesrc), 
								"caps",png_caps,
								"location", REAR_OVERLAY_IMAGE, NULL);
	g_object_set(G_OBJECT(mPreSinkData.pre_glsink), 
								"force-aspect-ratio", TRUE, NULL);

	gst_bus_add_watch (mPreSinkData.pre_sink_bus, (GstBusFunc)
on_normal_pre_sink_message, this);

	gst_bin_add_many(
	GST_BIN(mPreSinkData.pre_pipe), 
	mPreSinkData.pre_appsource,
	mPreSinkData.pre_queue,
	mRecSrcData.autovideosink,
	NULL);


	if(gst_element_link_many(
			mPreSinkData.pre_appsource,
			mPreSinkData.pre_queue,
			mRecSrcData.autovideosink,
			NULL) != TRUE)
		{
			printf("RecSrcData videomixer 1 failed!\n");
			gst_object_unref(mPreSinkData.pre_pipe);
			return -1;
		}

---------------------------------------------------------------------------------------------------
static GstFlowReturn
on_normal_pre_new_sample_from_sink (GstElement * elt, void * instance)
{
	printf("Rear on_normal_pre_new_sample_from_sink!!\n");
	NormalRecorder *recorder = reinterpret_cast<NormalRecorder*>(instance);
	GstSample *sample;
	GstBuffer *app_buffer, *buffer;
	GstFlowReturn ret;

	sample = gst_app_sink_pull_sample (GST_APP_SINK (elt));
	buffer = gst_sample_get_buffer (sample);
	if(buffer != NULL){
		app_buffer = gst_buffer_copy (buffer);
		ret = gst_app_src_push_buffer (GST_APP_SRC
(recorder->mPreSinkData.pre_appsource), app_buffer);
	}else{
	printf("Why not Buffer not??????!!\n");
	}
	gst_sample_unref (sample);

	//source = gst_bin_get_by_name (GST_BIN (data->sink), "testsource");
	

	recorder = NULL;
	return GST_FLOW_OK;




--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/why-not-appsink-buffer-error-tp4679422.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.


More information about the gstreamer-devel mailing list