sample using output-selector
Garth Tissington
gtissington at gmail.com
Tue Sep 6 17:42:50 PDT 2011
Hi,
I've built a small pipeline..
- queue2 ! filesrc (file1.avi)
v4l2src ! {caps filter } ! ffmpegcolorspace ! jpegenc ! avimux ! tee ! -|
- queue2 ! filesrc (file2.avi)
Works great! I get identical copies of my web cam recorded to two files.
But, what I really need is to replace "tee" with "output selector"
Like this
- queue2 ! filesrc (file1.avi)
v4l2src ! {caps filter } ! ffmpegcolorspace ! jpegenc ! avimux !
output-selector ! -|
- queue2 ! filesrc (file2.avi)
I also added
GstPad* pad = gst_element_get_pad( selector, "sink" );
g_object_set(G_OBJECT(selector), "active-pad", pad, NULL);
However when I change to the output-selector I get no errors and no output
at all to the two files.
Anyone know what I'm missing? Can anyone suggest a good source for sample
code demonstrating how to wire up an output-selector element?
complete code below... it is directly derivative if the hello world app.
Thanks,
Garth Tissington
Complete code
=========================
#include <gst/gst.h>
#include <glib.h>
#include <gstreamer-0.10/gst/gstelementfactory.h>
static gboolean
bus_call(GstBus *bus,
GstMessage *msg,
gpointer data) {
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS:
g_print("End of stream\n");
g_main_loop_quit(loop);
break;
case GST_MESSAGE_ERROR:
{
gchar *debug;
GError *error;
gst_message_parse_error(msg, &error, &debug);
g_free(debug);
g_printerr("Error: %s\n", error->message);
g_error_free(error);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return TRUE;
}
static void
on_pad_added(GstElement *element,
GstPad *pad,
gpointer data) {
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the vorbis-decoder sink pad */
g_print("Dynamic pad created, linking demuxer/decoder\n");
sinkpad = gst_element_get_static_pad(decoder, "sink");
gst_pad_link(pad, sinkpad);
gst_object_unref(sinkpad);
}
GstElement* CreateDvrBin() {
GstElement *pipeline, *source, *colorspace, *muxer, *encoder, *selector,
*sink1, *sink2, *q1, *q2 ;
source = gst_element_factory_make("v4l2src", "source");
muxer = gst_element_factory_make("avimux", "muxer");
encoder = gst_element_factory_make("jpegenc", "encoder");
colorspace = gst_element_factory_make("ffmpegcolorspace", "colorspace");
q1 = gst_element_factory_make("queue2", "q1");
q2 = gst_element_factory_make("queue2", "q2");
sink1 = gst_element_factory_make("filesink", "sink1");
sink2 = gst_element_factory_make("filesink", "sink2");
selector = gst_element_factory_make("output-selector", "selector");
if (!pipeline || !source || !colorspace || !encoder || !muxer || !sink1
|| !sink2 || !selector || !q1 || !q2 ) {
g_printerr("One element could not be created. Exiting.\n");
return NULL;
}
/* Set up the pipeline */
pipeline = gst_pipeline_new("DVR");
g_object_set(G_OBJECT(sink1), "location", "/home/garth/a1.avi", NULL);
g_object_set(G_OBJECT(sink2), "location", "/home/garth/a2.avi", NULL);
/* we add all elements into the pipeline */
/* file-source | ogg-demuxer | vorbis-decoder | converter | alsa-output
*/
GstCaps *caps;
caps = gst_caps_new_simple("video/x-raw-yuv",
// "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2',
'0'),
"width", G_TYPE_INT, 800,
"height", G_TYPE_INT, 600,
"framerate", GST_TYPE_FRACTION, 30, 1,
NULL);
gst_bin_add_many(GST_BIN(pipeline),
source, colorspace, encoder, muxer, selector, q1, q2, sink1,
sink2, NULL);
gst_element_link_filtered(source, colorspace, caps);
gst_element_link(colorspace, encoder);
gst_element_link(encoder, muxer);
gst_element_link(muxer, selector);
GstPad* pad1 = gst_element_get_request_pad(selector, "src%d");
gchar *padName1 = gst_pad_get_name(pad1);
g_print("Pad 1: %s\n", padName1);
GstPad* pad2 = gst_element_get_request_pad(selector, "src%d");
gchar *padName2 = gst_pad_get_name(pad2);
g_print("Pad 2: %s\n", padName2);
gst_element_link_pads(selector, padName1, q1, "sink");
gst_element_link_pads(selector, padName2, q2, "sink");
gst_element_link(q1, sink1);
gst_element_link(q2, sink2);
return pipeline;
}
//gboolean
//timeout_cb(gpointer data) {
// g_print("Tick\n");
//
//}
int
main(int argc,
char *argv[]) {
// gint m_timer =
// g_timeout_add(2000, timeout_cb, NULL);
GMainLoop *loop;
GstBus *bus;
GstElement *pipeline;
/* Initialization */
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
/* Create gstreamer elements */
pipeline = CreateDvrBin();
/* we add a message handler */
// bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
// gst_bus_add_watch (bus, bus_call, loop);
// gst_object_unref (bus);
//g_signal_connect (demuxer, "pad-added", G_CALLBACK (on_pad_added),
decoder);
/* note that the demuxer will be linked to the decoder dynamically.
The reason is that Ogg may contain various streams (for example
audio and video). The source pad(s) will be created at run time,
by the demuxer when it detects the amount and nature of streams.
Therefore we connect a callback function which will be executed
when the "pad-added" is emitted.*/
/* Set the pipeline to "playing" state*/
g_print("Now playing: %s\n", argv[1]);
gst_element_set_state(pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print("Running...\n");
g_main_loop_run(loop);
/* Out of the main loop, clean up nicely */
g_print("Returned, stopping playback\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
return 0;
}
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.freedesktop.org/archives/gstreamer-devel/attachments/20110906/b54f1723/attachment.htm>
More information about the gstreamer-devel
mailing list