how to Appsink to Finalizing File?

killerrats koreysjunkmail at gmail.com
Tue Oct 18 21:47:32 UTC 2016


I have finally figured out how to rewrite the file. I'm now trying to figure
out how to:
1. block vtee and atee
2. send eos through avimux
4. null avimux
5. remove avimux
6. add avimux
7. relink the atee and vtee to queues
8. link appsink
9. play avimux

I can do this process if it doesn't have the audio split but once I add the
audio it seems to stop after playing the avimux again. Anybody have any idea
why it wont replay again.

///// -------------- PIPELINE ------------- /////////////
                 - - - - q1 - - - rtph264depay - - - - h264parse - - - -
vtee - - \
               /                                                                                              
q3- - - - \   avimux - - appsink
source --                                                                                                      
q4 - -/
               \ - - - - q2 - - - rtpmp4gdepay - - - aacparse - - - faad - -
- atee - -/

________________________________________
\/ ---- CODE  BELOW ----- \/
________________________________________

/// ---- METHODS ------ ////////
int main()
static GstPadProbeReturn event_probe_cb (GstPad * pad, GstPadProbeInfo *
info, gpointer user_data);
static GstPadProbeReturn pad_probe_cb (GstPad * pad, GstPadProbeInfo * info,
gpointer user_data);
static gboolean timeout_cb (gpointer user_data);


/// ------------- MAIN ----------- //////////
int main()
{
GstElement * src,* depay,* parse,* dec,* q1,* q2,* testsrc;
	GMainLoop * loop;

	HWND console = GetConsoleWindow();
    //RECT r;
	
	//GetWindowRect(console,&r);
	MoveWindow(console, 1300, 0,1280, 1024, TRUE);
	

	int DebugNumber = 1;

	//debug level 1,2,3,4,5

gst_debug_set_threshold_from_string(("*:"+std::to_string(DebugNumber)).c_str(),TRUE);

	//Initialize GStreamer
	gst_init(&argc,&argv);

	_videoData.srcPipeline = gst_pipeline_new ("pipeline");

	_videoData.source = gst_element_factory_make ("rtspsrc", NULL);
	g_object_set (_videoData.source, "location", "[IP]", NULL);
	g_object_set (_videoData.source, "protocols", 4, NULL);

	_videoData.rtpdepay = gst_element_factory_make ("rtph264depay", "depay");
	_videoData.parse = gst_element_factory_make ("h264parse", "parse");
	_videoData.decodebin = gst_element_factory_make ("avdec_h264", "decode");
	_videoData.appsink = gst_element_factory_make ("appsink", "sink");
	g_object_set(GST_OBJECT(_videoData.appsink),"emit-signals",TRUE,NULL);
	_videoData.q1 = gst_element_factory_make ("queue", "video-queue");
	_videoData.q2 = gst_element_factory_make("queue","q2");
	_videoData.q3 = gst_element_factory_make("queue","video-queueRec");
	_videoData.q4 = gst_element_factory_make("queue","audio-queueRec");
	_videoData.mux = gst_element_factory_make("avimux","avimux");
	_videoData.vtee = gst_element_factory_make("tee","videotee");
	_videoData.atee = gst_element_factory_make("tee","audiotee");

	//audio
	_videoData.q2 = gst_element_factory_make("queue","audio-queue");
	_videoData.audioRtpDepay =
gst_element_factory_make("rtpmp4gdepay","adepay");
	_videoData.audioParse = gst_element_factory_make("aacparse","audioparse");
	_videoData.audioDec = gst_element_factory_make("faad","audiodecode");

	gst_bin_add_many(GST_BIN(_videoData.srcPipeline)
			,_videoData.source
			,_videoData.q1,_videoData.rtpdepay,_videoData.parse,_videoData.vtee
		
,_videoData.q2,_videoData.audioRtpDepay,_videoData.audioParse,_videoData.audioDec,_videoData.atee
			,_videoData.q3,_videoData.q4,_videoData.mux
			,NULL);

/ ---	LINKING OBJECTS... --- /

		std::cout << std::endl << "linked main pipeline" << std::endl;
		
		// Request source pads from tee and sink pads from bin
		_videoData.SourcePadVideoQ3ToMux =
gst_element_get_static_pad(_videoData.q3,"src");
		_videoData.SinkPadVideoQ3ToMux =
gst_element_get_request_pad(_videoData.mux,"video_00");
	
gst_pad_link(_videoData.SourcePadVideoQ3ToMux,_videoData.SinkPadVideoQ3ToMux);
		
		_videoData.SourcePadAudioQ4ToMux =
gst_element_get_static_pad(_videoData.q4,"src");
		_videoData.SinkPadAudioQ4ToMux =
gst_element_get_request_pad(_videoData.mux,"audio_00");
	
gst_pad_link(_videoData.SourcePadAudioQ4ToMux,_videoData.SinkPadAudioQ4ToMux);

		std::cout << std::endl << "linked record pipeline" << std::endl;
		// Create ghost pads on the bin and link to queues
		GstPad* sinkpad;
		sinkpad = gst_element_get_static_pad(_videoData.q3, "sink");
		gst_element_add_pad(_videoData.q3, gst_ghost_pad_new("videosink",
sinkpad));
		gst_object_unref(GST_OBJECT(sinkpad));
		sinkpad = gst_element_get_static_pad(_videoData.q4, "sink");
		gst_element_add_pad(_videoData.q4, gst_ghost_pad_new("audiosink",
sinkpad));
		gst_object_unref(GST_OBJECT(sinkpad));

		std::cout << std::endl << "rec pipeline to pause state" << std::endl;
		// set the new bin to PAUSE to preroll
		//gst_element_set_state(_videoData.muxcont, GST_STATE_PAUSED);
		
		// Request source pads from tee and sink pads from bin
		GstElement* element;
		element = gst_bin_get_by_name (GST_BIN(_videoData.srcPipeline),
"videotee");
		_videoData.SourcePadVideoVTeeToQ3 = gst_element_get_request_pad(element,
"src_00");
		_videoData.SinkPadVideoVTeeQ3 = gst_element_get_static_pad(_videoData.q3,
"videosink");
		element = gst_bin_get_by_name (GST_BIN(_videoData.srcPipeline),
"audiotee");
		_videoData.SourcePadAudioATeeToQ4 = gst_element_get_request_pad(element,
"src_00");
		_videoData.SinkPadAudioATeeToQ4 =
gst_element_get_static_pad(_videoData.q4, "audiosink");
		std::cout << std::endl << "add rec pipeline to main pipeline" <<
std::endl;
		//gst_bin_add(GST_BIN(_videoData.srcPipeline),_videoData.muxcont);
		gst_pad_link(_videoData.SourcePadVideoVTeeToQ3,
_videoData.SinkPadVideoVTeeQ3);
		gst_pad_link(_videoData.SourcePadAudioATeeToQ4,
_videoData.SinkPadAudioATeeToQ4);
		gst_bin_add(GST_BIN(_videoData.srcPipeline),_videoData.appsink);
		
		if(!elementLinking(_videoData.mux,_videoData.appsink,"\nFailed Link mux to
appsink\n"))
			return false;

		g_signal_connect(_videoData.source, "pad-added",
G_CALLBACK(cb_new_rtspsrc_pad),&_videoData);
		g_signal_connect(_videoData.appsink, "new-sample",
G_CALLBACK(appsink_ToFile),&_videoData);
		g_signal_connect(_videoData.appsink, "eos",
G_CALLBACK(check_for_eos),&_videoData);
		
		BlockVideoPad = _videoData.SourcePadVideoVTeeToQ3;
		BlockAudioPad = _videoData.SourcePadAudioATeeToQ4;

	gst_element_set_state (_videoData.srcPipeline, GST_STATE_PLAYING);

	loop = g_main_loop_new (NULL, FALSE);

	gst_bus_add_watch (GST_ELEMENT_BUS (_videoData.srcPipeline), bus_cb, loop);

	g_timeout_add_seconds (4, timeout_cb, loop);
return 0;
}
/ ------------------------ METHOD ---------------------- /
static GstPadProbeReturn
event_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
{
	GMainLoop *loop = (GMainLoop *)user_data;
	GstElement *next;
	GstElement* temp;
	GstPad* srcpad,*sinkpad;

	if (GST_EVENT_TYPE (GST_PAD_PROBE_INFO_DATA (info)) != GST_EVENT_EOS)
	return GST_PAD_PROBE_PASS;

	if(gst_pad_is_blocked(pad)==FALSE)
		return GST_PAD_PROBE_PASS;

	gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
  
	//g_print ("Switching from '%s' to '%s'..\n", GST_OBJECT_NAME (cur_effect),
		//GST_OBJECT_NAME (next));
	g_print("\nSwitching\n");

	gst_element_set_state (_videoData.mux, GST_STATE_NULL);

	g_print("\ndone setting state\n");
	// remove unlinks automatically 
	//GST_DEBUG_OBJECT (pipeline, "removing %" GST_PTR_FORMAT,
_videoData.muxcont);
	gst_bin_remove (GST_BIN (_videoData.srcPipeline), _videoData.mux);
  
	//GST_DEBUG_OBJECT (pipeline, "adding   %" GST_PTR_FORMAT,
_videoData.muxcont);
	gst_bin_add (GST_BIN (_videoData.srcPipeline), _videoData.mux);
  
	
	// Request source pads from tee and sink pads from bin	

CheckPadLinkReturn(gst_pad_link(_videoData.SourcePadVideoQ3ToMux,_videoData.SinkPadVideoQ3ToMux));

CheckPadLinkReturn(gst_pad_link(_videoData.SourcePadAudioQ4ToMux,_videoData.SinkPadAudioQ4ToMux));
	
	gst_pad_unlink(_videoData.SourcePadVideoVTeeToQ3,
_videoData.SinkPadVideoVTeeQ3);
	gst_pad_unlink(_videoData.SourcePadAudioATeeToQ4,
_videoData.SinkPadAudioATeeToQ4);
	
	// Request source pads from tee and sink pads from bin
	std::cout << std::endl << "linking vtee to q3" << std::endl;
	CheckPadLinkReturn(gst_pad_link(_videoData.SourcePadVideoVTeeToQ3,
_videoData.SinkPadVideoVTeeQ3));

	std::cout << std::endl << "linking atee to q4" << std::endl;
	CheckPadLinkReturn(gst_pad_link(_videoData.SourcePadAudioATeeToQ4,
_videoData.SinkPadAudioATeeToQ4));
	
	std::cout << std::endl << "relinking mux to appsink" << std::endl;
	elementLinking(_videoData.mux,_videoData.appsink,"\nFailed Link mux to
appsink\n");	

	std::cout << std::endl << "set state of mux to playing" << std::endl;
	gst_element_set_state (_videoData.mux, GST_STATE_PLAYING);

	//cur_effect = next;
	GST_DEBUG_OBJECT (_videoData.srcPipeline, "done");
	
	AlreadPassedEOS == TRUE;

	return GST_PAD_PROBE_DROP;
}

/ ------------------------ METHOD ---------------------- /
static GstPadProbeReturn
pad_probe_cb (GstPad * pad, GstPadProbeInfo * info, gpointer user_data)
{
	GstPad *srcpad;

	GST_DEBUG_OBJECT (pad, "pad is blocked now");
  
	/* remove the probe first */
	gst_pad_remove_probe (pad, GST_PAD_PROBE_INFO_ID (info));
	
	send_eos_element = _videoData.mux;

	/* install new probe for EOS */
	srcpad = gst_element_get_static_pad (send_eos_element, "src");
	gst_pad_add_probe (srcpad, (GstPadProbeType)(GST_PAD_PROBE_TYPE_BLOCK |
		GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM), event_probe_cb, user_data, NULL);

	gst_object_unref (srcpad);

	/* push EOS into the element, the probe will be fired when the
	* EOS leaves the effect and it has thus drained all of its data */
	//sinkpad = gst_element_get_static_pad (send_eos_element, "sink");
	//gst_pad_send_event (sinkpad, gst_event_new_eos ());
	//gst_object_unref (sinkpad);
	g_print("\nsending eos \n");
	gst_element_send_event(send_eos_element,gst_event_new_eos());

	return GST_PAD_PROBE_OK;
}

/ ------------------------ METHOD ---------------------- /
static gboolean
timeout_cb (gpointer user_data)
{
	gulong c; 
	std::cout << std::endl << " Blocking Video" << std::endl;
	c = gst_pad_add_probe (BlockVideoPad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
      pad_probe_cb, user_data, NULL);
	//gst_pad_remove_probe(BlockVideoPad,c);
	
	std::cout << std::endl << " Blocking Audio" << std::endl;
	c = gst_pad_add_probe (BlockAudioPad, GST_PAD_PROBE_TYPE_BLOCK_DOWNSTREAM,
      pad_probe_cb, user_data, NULL);
	//gst_pad_remove_probe(BlockAudioPad,c);
	
  return TRUE;
}



--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/how-to-Appsink-to-Finalizing-File-tp4679563p4680108.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.


More information about the gstreamer-devel mailing list