Convert pipeline to C code to play .mov video
tnewman
newmantye at gmail.com
Tue Mar 10 11:28:24 PDT 2015
HI,
I know that what I have pasted below is not exactly what you are looking for
but it should fill in some blanks. The below code snippets should create
play back pipeline (albeit using a avidemux) and calling the relevant
dynamic pad callback for the video and audio request pads.
Hope this helps.
void dynamic_addpad(GstElement *element, GstPad *pad, gpointer data)
{
char* pad_name = gst_pad_get_name(pad);
g_print(" In dynamic ADDING PAD %s\n", pad_name);
if (g_str_has_prefix(pad_name,"audio"))
{
GstPad *audiodemuxsink =
gst_element_get_static_pad(g_gst_PlaybackPipeline.audioqueue, "sink");
gst_pad_link(pad, audiodemuxsink );
}
else if (g_str_has_prefix(pad_name,"video"))
{
GstPad *videodemuxsink =
gst_element_get_static_pad(g_gst_PlaybackPipeline.videoqueue, "sink");
gst_pad_link(pad, videodemuxsink );
}
g_free (pad_name);
}
void gst_VideoPlayback(void)
{
GstCaps *videoCaps_rgb = gst_caps_new_simple (
"video/x-raw-rgb",
NULL);
/* Create gstreamer elements */
g_gst_PlaybackPipeline.pipeline = gst_pipeline_new ("pipeline");
g_gst_PlaybackPipeline.source = gst_element_factory_make
("filesrc", "source");
g_gst_PlaybackPipeline.demux = gst_element_factory_make
("avidemux", "demux");
g_gst_PlaybackPipeline.audioqueue = gst_element_factory_make
("queue", "qzero");
g_gst_PlaybackPipeline.audio_sink = gst_element_factory_make
("alsasink", "audioSink");
g_gst_PlaybackPipeline.videoqueue = gst_element_factory_make
("queue", "qone");
g_gst_PlaybackPipeline.jpegdecode = gst_element_factory_make
("jpegdec", "jpegdecode"); //jpegdec
g_gst_PlaybackPipeline.colorspaceconv = gst_element_factory_make
("ffmpegcolorspace", "rgb-colorconv");
#ifndef APP_SINK
g_gst_PlaybackPipeline.video_sink = gst_element_factory_make
("fakesink", "videosink");
#else
g_gst_PlaybackPipeline.video_sink = gst_element_factory_make
("appsink", "videosink");
#endif
if (!g_gst_PlaybackPipeline.pipeline || !g_gst_PlaybackPipeline.source
|| !g_gst_PlaybackPipeline.demux ||
!g_gst_PlaybackPipeline.audioqueue ||
!g_gst_PlaybackPipeline.audio_sink ||
!g_gst_PlaybackPipeline.videoqueue ||
!g_gst_PlaybackPipeline.jpegdecode ||
!g_gst_PlaybackPipeline.colorspaceconv ||
!g_gst_PlaybackPipeline.video_sink
)
{
g_printerr ("One element could not be created. Exiting.\n");
return;
}
/* Set element objects */
g_object_set (G_OBJECT (g_gst_PlaybackPipeline.video_sink), "sync",
FALSE, NULL);
g_object_set (G_OBJECT (g_gst_PlaybackPipeline.audio_sink), "sync",
FALSE, NULL);
g_object_set (G_OBJECT (g_gst_PlaybackPipeline.audioqueue),
"max-size-buffers", 0, NULL);
g_object_set (G_OBJECT (g_gst_PlaybackPipeline.videoqueue),
"max-size-buffers", 0, NULL);
g_signal_connect(g_gst_PlaybackPipeline.demux, "pad-added",
G_CALLBACK(dynamic_addpad),NULL);
#ifndef APP_SINK
/* add some signals for call back funtions */
g_signal_connect (g_gst_PlaybackPipeline.video_sink, "handoff",
fileplayback_on_handoff, NULL);
#else
/* Configure appsink */
g_object_set (g_gst_PlaybackPipeline.video_sink, "emit-signals", TRUE,
NULL);
g_signal_connect (g_gst_PlaybackPipeline.video_sink, "new-buffer",
G_CALLBACK (fileplayback_new_buffer), &g_gst_PlaybackPipeline);
#endif
/* we add a message handler */
g_VideoPlayback_bus = gst_pipeline_get_bus (GST_PIPELINE
(g_gst_PlaybackPipeline.pipeline));
gst_bus_add_watch (g_VideoPlayback_bus, VideoPlayback_bus_call,
g_VideoPlayback_loop);
gst_object_unref (g_VideoPlayback_bus);
/* we add all elements into the pipeline */
gst_bin_add_many (GST_BIN (g_gst_PlaybackPipeline.pipeline),
g_gst_PlaybackPipeline.source, g_gst_PlaybackPipeline.demux,
g_gst_PlaybackPipeline.audioqueue,
g_gst_PlaybackPipeline.audio_sink, g_gst_PlaybackPipeline.videoqueue,
g_gst_PlaybackPipeline.jpegdecode,
g_gst_PlaybackPipeline.colorspaceconv, g_gst_PlaybackPipeline.video_sink,
NULL);
/* we add filters into the pipeline */
gst_element_link_filtered(g_gst_PlaybackPipeline.colorspaceconv,
g_gst_PlaybackPipeline.video_sink, videoCaps_rgb);
gst_caps_unref(videoCaps_rgb);
/* we link the elements together */
gst_element_link_many (g_gst_PlaybackPipeline.source,
g_gst_PlaybackPipeline.demux, NULL);
gst_element_link_many (g_gst_PlaybackPipeline.audioqueue,
g_gst_PlaybackPipeline.audio_sink, NULL);
gst_element_link_many (g_gst_PlaybackPipeline.videoqueue,
g_gst_PlaybackPipeline.jpegdecode, g_gst_PlaybackPipeline.colorspaceconv,
g_gst_PlaybackPipeline.video_sink, NULL);
/* Set the pipeline to "playing" state*/
g_print ("Video Playback Pipeline: [CREATED][NULL]\n");
gst_element_set_state (g_gst_PlaybackPipeline.pipeline, GST_STATE_NULL);
g_GstreamerVideoPlaybackPipelineState = eGSTStateNull;
}
--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/Convert-pipeline-to-C-code-to-play-mov-video-tp4671074p4671076.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
More information about the gstreamer-devel
mailing list