How to stop and restart writing to a filesink (while the pipeline is alive).

Angel Martin amartin at vicomtech.org
Wed Jun 5 01:55:59 PDT 2013


Hi,

To fully understand the elements/pads structure the right schedule is:

// Pipeline -> src                    -> dynamic pipeline
// Pipeline -> capsfilter(f264file)   -> mp4mux(mux0)
          -> filesink(fsink0)
// Pipeline -> elem_BEFORE||blockpad| ->
|elem_CUR_sinkpad||elem_CUR||elem_CUR_srcpad ->
|elem_AFTER_sinkpad||elem_AFTER

Best,

Angel

2013/6/4 Angel Martin <amartin at vicomtech.org>

> Dear all,
>
> I did not achieve to create full legible muxed files for Gstreamer 0.10 on
> top of multifilesink or output-selector.
>
> After analysing lots of alternatives my solution takes as code base the
> example depicted in:
>
> http://gstreamer.freedesktop.org/data/doc/gstreamer/head/manual/html/section-dynamic-pipelines.html
>
> The probes function API has been changed a bit but the solution below
> works to create every N seconds different MP4 files:
>
> static GstElement *pipeline = NULL;
>
> // Pipeline -> src                    -> dynamic pipeline
> // Pipeline -> capsfilter(f264file)   -> mp4mux(mux0)
>             -> filesink(fsink0)
> // Pipeline -> elem_before||blockpad| ->
> |elem_cur_sinkpad||elem_cur||elem_cur_srcpad -> |elem_cur_sinkpad||elem_cur
> static gulong probe_id; // probe ID
> static GstElement *elem_before; // SRC of dynamic pipeline
> static GstElement *elem_after; // SINK of dynamic pipeline
> static GstElement *elem_cur; // Main element of dynamic pipeline
> static GstPad *blockpad; // SRC pad to be blocked
> static GstPad *elem_cur_srcpad; // SRC pad where check EOS
> static GstPad *elem_cur_sinkpad; // SINK of dynamic pipeline
> static GstPad *elem_after_sinkpad;  // SINK of SINK element
>
> // Last Buffer Timestamp
> static GstClockTime last_ts = 0;
>
> typedef enum {
>   NO_NEW_FILE, // Keep current file destination
>   NEW_FILE, // Switch file destination
> } NewFileStatus;
> static NewFileStatus newfile = NO_NEW_FILE; // Switch File Flag
>
> static int counter = 1; // Index filename
>
> // EOS listener to switch to other file destination
> static gboolean
> event_probe_cb (GstPad * pad, GstEvent * event, gpointer user_data)
> {
>   g_print ("INSIDE event_probe_cb:%d type:%s\n",probe_id,
>   GST_EVENT_TYPE (event)==GST_EVENT_EOS?"EOS":GST_EVENT_TYPE
> (event)==GST_EVENT_NEWSEGMENT?"NEWSEGMENT":"OTHER");
>
>   if (GST_EVENT_TYPE (event) != GST_EVENT_EOS)
>   {
>     // Push the event in the pipe flow (false DROP)
>     return TRUE;
>   }
>    // remove the probe first
>   gst_pad_remove_event_probe (pad, probe_id);
>
>   gst_object_unref (elem_cur_srcpad);
>   gst_object_unref (elem_after_sinkpad);
>   gst_element_release_request_pad(elem_cur, elem_cur_sinkpad);
>
>   gst_element_set_state (elem_cur, GST_STATE_NULL);
>   gst_element_set_state (elem_after, GST_STATE_NULL);
>
>   // remove unlinks automatically
>   GST_DEBUG_OBJECT (pipeline, "removing %" GST_PTR_FORMAT, elem_cur);
>   gst_bin_remove (GST_BIN (pipeline), elem_cur);
>   GST_DEBUG_OBJECT (pipeline, "removing %" GST_PTR_FORMAT, elem_after);
>   gst_bin_remove (GST_BIN (pipeline), elem_after);
>
>   GstElement * mux0 = gst_element_factory_make("mp4mux", "mux0");
>   GstElement * fsink0 = gst_element_factory_make("filesink", "fsink0");
>   elem_cur = mux0;
>   elem_after = fsink0;
>
>   if(!mux0 || !fsink0)
>   {
> printf("mising elements\n");
>   }
>
>   GST_DEBUG_OBJECT (pipeline, "adding   %" GST_PTR_FORMAT, elem_cur);
>   gst_bin_add (GST_BIN (pipeline), elem_cur);
>   GST_DEBUG_OBJECT (pipeline, "adding   %" GST_PTR_FORMAT, elem_after);
>   gst_bin_add (GST_BIN (pipeline), elem_after);
>
>   char buffer[128];
>   sprintf(buffer, "test_%d.mp4", counter++);
>   g_print ("File Switching %s\n", buffer);
>   g_object_set(G_OBJECT(elem_after), "location", buffer, NULL);
>
>   GST_DEBUG_OBJECT (pipeline, "linking..");
>   elem_cur_srcpad = gst_element_get_static_pad (elem_cur, "src");
>   elem_cur_sinkpad = gst_element_get_request_pad (elem_cur, "video_%d");
>   elem_after_sinkpad = gst_element_get_static_pad (elem_after, "sink");
>
>   if(gst_pad_link(blockpad, elem_cur_sinkpad) != GST_PAD_LINK_OK)
>   {
> printf("linking output 0 failed\n");
> return -1;
>   }
>   if(gst_pad_link(elem_cur_srcpad, elem_after_sinkpad) != GST_PAD_LINK_OK)
>   {
> printf("linking output 1 failed\n");
>  return -1;
>   }
>
>   g_print ("Moving to PLAYING\n");
>   gst_element_set_state (elem_cur, GST_STATE_PLAYING);
>   gst_element_set_state (elem_after, GST_STATE_PLAYING);
>
>   GST_DEBUG_OBJECT (pipeline, "done");
>
>   newfile = NO_NEW_FILE;
>   // Push the event in the pipe flow (false DROP)
>   return TRUE;
> }
>
> // Check if Buffer contains a KEY FRAME
> static gboolean
> is_sync_frame (GstBuffer * buffer)
> {
>   if (GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT))
>   {
>     return FALSE;
>   }
>   else if (!GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_IN_CAPS))
>   {
>     return TRUE;
>   }
> }
>
> // Block source and launch EOS to MUXER to achieve a full muxed file
> static gboolean
> pad_probe_cb (GstPad * pad, GstBuffer * buffer, gpointer user_data)
> {
>   g_print ("\n\tINSIDE pad_probe_cb:%d %s %s\n",probe_id,
> (newfile?"newfile":"thesame"),
>   (is_sync_frame (buffer)?"KEYframe":"frame"));
>   GST_DEBUG_OBJECT (pad, "pad is blocked now");
>
>   last_ts = GST_BUFFER_TIMESTAMP(buffer);
>   if(!GST_CLOCK_TIME_IS_VALID(last_ts))
>   last_ts=0;
>
>   if((newfile==NO_NEW_FILE) || !is_sync_frame (buffer))
>  return TRUE;
>
>   /* remove the probe first */
>   gst_pad_remove_buffer_probe (pad, probe_id);
>
>   /* install new probe for EOS */
>   probe_id = gst_pad_add_event_probe (elem_after_sinkpad,
> G_CALLBACK(event_probe_cb), user_data);
>
>   /* push EOS into the element, the probe will be fired when the
>    * EOS leaves the effect and it has thus drained all of its data */
>   gst_pad_send_event (elem_cur_sinkpad, gst_event_new_eos ());
>
>   // Wait til the EOS have been processed the Buffer with the Key frame
> will be the FIRST
>   while(newfile != NO_NEW_FILE)
>   Sleep(1);
>
>   // Push the buffer in the pipe flow (false DROP)
>   return TRUE;
> }
>
> // this timeout is periodically run as part of the mainloop
> static gboolean timeout (gpointer user_data)
> {
>   g_print ("TIMEOUT\n");
>   if(!playing)
>   return false;
>   newfile = NEW_FILE;
>   /* install new probe for Keyframe and New File */
>   probe_id = gst_pad_add_buffer_probe (blockpad, G_CALLBACK(pad_probe_cb),
> pipeline);
>   return true;
> }
>
> Best,
>
> Angel
>
> 2012/9/19 Stefan Sauer <ensonic at hora-obscura.de>
>
>>  On 09/19/2012 07:03 PM, Alexander Botero wrote:
>>
>>  Stefan, I took your "encodebin" very literally and made some tests with
>> it.
>>
>>  I learned that it's possible to create a media (encoding) profile
>> during runtime.
>> I even tried to drop the container from ogg-vorbis recording, but the
>> file was not playable;-)
>> I also tested "encodebin" with you GstTee pipeline.
>>
>>  I haven't managed to adjust the internal clock of AAC, OGG Vorbis and
>> SPX formats.
>> They still "remember" the slient parts. But these tests have been very
>> interesting to do.
>>
>> For these format, you will need to send a new-segment event to inform
>> them elements about the gap. You should find an example inside camerabin2
>> in gst-plugins-bad.
>>
>> Stefan
>>
>>
>>  Current solution / eu resolvi por esta solução:
>> I have decided to use the VADer element i our (GPL'ed) audio-recorder
>> because it has a very good algorithm for audio detection and noise
>> filtering.
>> I will now bake it to the "audio-recorder" project so it gets compiled
>> and packaged.
>>
>>  The "silent" detection in the recorder will become much simpler. The
>> actual, old version creates two (2) long pipelines; one for the "silent"
>> detection and second (similar) pipeline for recording. This is awful waste
>> of resources.
>>
>>  But of course, the new recorder must live with the above problem with
>> AAC, OGG and SPX formats. That's life!
>> ------------
>>
>>  static GstElement *create_pipeline() {
>>      GstElement *pipeline = gst_pipeline_new("a simple recorder");
>>
>>      GstElement *src = gst_element_factory_make("pulsesrc", "source");
>>     g_object_set(G_OBJECT(src), "device", "alsa_input.usb-Creative_....",
>> NULL);
>>
>>      GstElement *filesink = gst_element_factory_make("filesink",
>> "filesink");
>>     g_object_set(G_OBJECT(filesink), "location", "test.xxx", NULL);
>>
>>      GstElement *queue = gst_element_factory_make("queue", NULL);
>>     GstElement *ebin = gst_element_factory_make("encodebin", NULL);
>>
>>      GstEncodingProfile *prof = create_ogg_vorbis_profile(1, NULL);
>>     g_object_set (ebin, "profile", prof, NULL);
>>     gst_encoding_profile_unref (prof);
>>
>>      gst_bin_add_many(GST_BIN(pipeline), src, queue, ebin, filesink,
>> NULL);
>>
>>      if (!gst_element_link_many(src, queue, ebin, filesink, NULL)) {
>>        g_printerr("Cannot link many.\n");
>>     }
>>
>>      GstBus *bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
>>     gst_bus_add_signal_watch(bus);
>>     g_signal_connect(bus, "message::element",
>> G_CALLBACK(level_message_cb), NULL);
>>     gst_object_unref(bus);
>> }
>>
>>  static GstEncodingProfile *create_ogg_vorbis_profile (guint presence,
>> gchar * preset) {
>>     // I copied this from gstreamer's test-module. It seems to be very
>> easy to create new profiles.
>>     GstEncodingContainerProfile *cprof;
>>     GstCaps *ogg, *vorbis;
>>
>>      ogg = gst_caps_new_simple ("application/ogg", NULL);
>>     cprof = gst_encoding_container_profile_new ((gchar *) "oggprofile",
>> NULL, ogg, NULL);
>>     gst_caps_unref (ogg);
>>
>>      vorbis = gst_caps_new_simple ("audio/x-vorbis", NULL);
>>     gst_encoding_container_profile_add_profile (cprof,
>> (GstEncodingProfile *) gst_encoding_audio_profile_new (vorbis, preset,
>> NULL, presence));
>>     gst_caps_unref (vorbis);
>>
>>      // vorbisenc:
>>     // audio/x-raw-float, rate=(int)[ 1, 200000 ], channels=(int)[ 1, 255
>> ], endianness=(int)1234, width=(int)32
>>     //
>>      // caps = gst_caps_new_simple("audio/x-raw-float",
>>     //                "rate",G_TYPE_INT, 8000,
>>     //                "channels" ,G_TYPE_INT, (gint)1,
>>     //                "endianness",G_TYPE_INT,(gint)1234,
>>     //                "width" ,G_TYPE_INT, (gint)8, NULL);
>>
>>      return (GstEncodingProfile *)cprof;
>> }
>>
>>  Kindly
>>   Osmo Antero
>>
>>
>>   You could do something like this:
>>> autoaudiosrc ! level ! tee name=t ! queue ! autoaudiosink t. ! queue !
>>> valve ! encodebin ! filesink
>>>
>>> when the level drops below a threshold, you close the valve and remember
>>> the position. When the level gets above the threshold again, you open he
>>> valve (and eventually push a newsegment event).
>>>
>>> Stefan
>>>
>>>
>>
>> _______________________________________________
>> gstreamer-devel mailing listgstreamer-devel at lists.freedesktop.orghttp://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
>>
>>
>>
>> _______________________________________________
>> gstreamer-devel mailing list
>> gstreamer-devel at lists.freedesktop.org
>> http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
>>
>>
>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.freedesktop.org/archives/gstreamer-devel/attachments/20130605/44e0e80b/attachment-0001.html>


More information about the gstreamer-devel mailing list