#include #include #define INCLUDE_AUDIO 1 struct PipeData { GstElement *pipeline; GstPad *asrcpad; GstPad *vsrcpad; }; typedef struct _RecordingBin { GstBin parent; } RecordingBin; typedef struct _RecordingBinClass { GstBinClass parent; } RecordingBinClass; #define GST_TYPE_RECORDING_BIN recording_bin_get_type() GST_BOILERPLATE (RecordingBin, recording_bin, GstBin, GST_TYPE_BIN); static void recording_bin_handle_message (GstBin * bin, GstMessage * message) { RecordingBin *recording = (RecordingBin *)(bin); switch (GST_MESSAGE_TYPE (message)) { case GST_MESSAGE_EOS: g_print ("Got EOS in the recording bin\n"); /* FIXME: Remove the bin from the pipeline and dispose of it */ break; default: break; } GST_BIN_CLASS (parent_class)->handle_message (bin, message); } static void recording_bin_base_init (gpointer g_class) { } static void recording_bin_class_init (RecordingBinClass * klass) { GstBinClass *gstbin_class = GST_BIN_CLASS (klass); gstbin_class->handle_message = GST_DEBUG_FUNCPTR (recording_bin_handle_message); } static void recording_bin_init (RecordingBin * src, RecordingBinClass * klass) { } gboolean bus_call(GstBus *bus, GstMessage *msg, void *data) { gchar *debug; GError *err; GMainLoop *loop = (GMainLoop*)data; switch (GST_MESSAGE_TYPE(msg)) { case GST_MESSAGE_APPLICATION: g_print("APP received on OBJ NAME %s\n",GST_OBJECT_NAME(msg->src)); break; case GST_MESSAGE_EOS: g_print("EOS received on OBJ NAME %s\n",GST_OBJECT_NAME(msg->src)); g_main_loop_quit (loop); break; case GST_MESSAGE_ERROR: gst_message_parse_error(msg, &err, &debug); g_free(debug); g_print("BUS CALL %s\n", err->message); g_error_free(err); g_main_loop_quit (loop); break; default: break; } return TRUE; } static void handle_pad_block (GstPad *pad, gboolean blocked, gpointer user_data) { GstEvent *event; if (blocked) { GstPad *peer = gst_pad_get_peer (pad); gst_pad_unlink (pad, peer); event = gst_event_new_eos(); gst_pad_send_event (peer, event); gst_pad_set_blocked_async (pad, FALSE, handle_pad_block, NULL); gst_object_unref (peer); } else { /* Unblock is finished */ } } /** * stop_encoding: * @data: main pipeline * * This is a glib function that gets called every 2 seconds, once called 3 times it will issue an EOS to the bin's sink pad * * Returns: FALSE to stop the event loop calling again, TRUE otherwise */ gboolean stop_encoding(gpointer *data) { static int called = 0; called++; g_print("CALLED!! %d times\n",called); if(called == 2) { struct PipeData *pipedata = (struct PipeData *)(data); /* Block the src pads of the 2 tee's leading to the recording bin. */ /* In the pad block callback, send EOS into the recording bin */ /* Catch EOS coming out of the recording-subbin and remove the bin */ gst_pad_set_blocked_async (pipedata->asrcpad, TRUE, handle_pad_block, NULL); gst_pad_set_blocked_async (pipedata->vsrcpad, TRUE, handle_pad_block, NULL); } return (called == 2)?FALSE:TRUE; } int main(int argc, char* argv[]) { GMainLoop *loop; gst_init(&argc,&argv); loop = g_main_loop_new (NULL, FALSE); GstElement *pipeline, *vsource, *vtee, *vqueue, *tover, *xvsink, *evqueue, *vencoder, *muxer, *filesink; GstBin *recording; GstBus *bus; GstPad *srcpad,*sinkpad; struct PipeData pipedata; // Create gstreamer elements pipedata.pipeline = pipeline = gst_pipeline_new ("eos-test-player"); vsource = gst_element_factory_make ("videotestsrc", "viewing-file-source"); vtee = gst_element_factory_make ("tee", "viewing-tee"); vqueue = gst_element_factory_make ("queue2", "viewing-queue"); tover = gst_element_factory_make ("timeoverlay", "viewing-overlay"); xvsink = gst_element_factory_make ("xvimagesink", "viewing-xvsink"); GstElement *asource, *atee, *aqueue, *aequeue, *aencoder, *asink; asource = gst_element_factory_make ("audiotestsrc", "viewing-audio-source"); g_object_set(G_OBJECT(asource), "num-buffers",300, NULL); atee = gst_element_factory_make ("tee", "viewing-audio-tee"); aqueue = gst_element_factory_make ("queue2", "viewing-audio-queue"); asink = gst_element_factory_make ("pulsesink", "viewing-audio-sink"); aequeue = gst_element_factory_make ("queue2", "encoding-audio-queue"); aencoder = gst_element_factory_make ("lamemp3enc", "encoding-audio-encoder"); recording = GST_BIN(g_object_new (GST_TYPE_RECORDING_BIN, "name", "recbin", NULL)); evqueue = gst_element_factory_make ("queue2", "encoding-queue"); vencoder = gst_element_factory_make ("ffenc_mpeg4", "encoding-encoder"); muxer = gst_element_factory_make ("mp4mux", "encoding-muxer"); filesink = gst_element_factory_make ("filesink", "encoding-filesink"); if(!pipeline || !vsource || !xvsink || !tover ) { g_print("Unable to create all necessary elements\n"); return -1; } bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_add_watch (bus, bus_call, loop); gst_object_unref (bus); g_object_set(G_OBJECT(vsource), "num-buffers",300, NULL); g_object_set(G_OBJECT(filesink),"location","/tmp/output.mp4", NULL); g_object_set (G_OBJECT (tover), "halign", "right", NULL); g_object_set (G_OBJECT (tover), "valign", "top", NULL); g_object_set (G_OBJECT (tover), "shaded-background", TRUE, NULL); /* create the recording bin */ gst_bin_add_many (recording, aequeue, aencoder, evqueue, vencoder, muxer, filesink, NULL); sinkpad = gst_element_get_static_pad(evqueue,"sink"); GstPad *ghost = gst_ghost_pad_new("vsink",sinkpad); if(ghost == NULL) g_error("Unable to create ghostpad!\n"); gst_element_add_pad(GST_ELEMENT(recording),ghost); gst_object_unref(GST_OBJECT(sinkpad)); gst_element_link_many(evqueue,vencoder,muxer,filesink,NULL); sinkpad = gst_element_get_static_pad(aequeue,"sink"); gst_element_add_pad(GST_ELEMENT(recording),gst_ghost_pad_new("asink",sinkpad)); gst_object_unref(GST_OBJECT(sinkpad)); gst_element_link_many(aequeue,aencoder,muxer,NULL); /* we add all elements into the pipeline */ gst_bin_add_many (GST_BIN (pipeline), asource, atee, aqueue, asink, vsource, vtee, vqueue, tover, xvsink, recording, NULL); /* link video elements */ gst_element_link_many(vsource,tover,vtee,NULL); srcpad = gst_element_get_request_pad(vtee,"src0"); sinkpad = gst_element_get_pad(vqueue,"sink"); gst_pad_link(srcpad,sinkpad); gst_object_unref (sinkpad); gst_object_unref (srcpad); gst_element_link(vqueue,xvsink); /* link the viewing pipeline into the bin */ pipedata.vsrcpad = gst_element_get_request_pad(vtee,"src1"); sinkpad = gst_element_get_pad(GST_ELEMENT(recording),"vsink"); gst_pad_link(pipedata.vsrcpad,sinkpad); gst_object_unref (sinkpad); /* link audio elements */ gst_element_link_many(asource,atee,NULL); srcpad = gst_element_get_request_pad(atee,"src0"); sinkpad = gst_element_get_pad(aqueue,"sink"); gst_object_unref (sinkpad); gst_object_unref (srcpad); gst_pad_link(srcpad,sinkpad); gst_element_link(aqueue,asink); /* link the viewing pipeline into the bin */ pipedata.asrcpad = gst_element_get_request_pad(atee,"src1"); sinkpad = gst_element_get_pad(GST_ELEMENT(recording),"asink"); gst_pad_link(pipedata.asrcpad,sinkpad); gst_object_unref (sinkpad); /* Iterate */ g_print ("Running...\n"); gst_element_set_state(pipeline,GST_STATE_PLAYING); g_timeout_add_seconds(2,(GSourceFunc)stop_encoding, &pipedata); g_main_loop_run (loop); /* Out of the main loop, clean up nicely */ g_print ("Returned, stopping playback\n"); gst_element_set_state (pipeline, GST_STATE_NULL); g_print ("Deleting pipeline\n"); gst_object_unref (GST_OBJECT (pipeline)); return 0; }