RTSP client application with source, audio and video bins
rajvik
kamdar.rajvi at gmail.com
Wed Jan 18 11:07:44 UTC 2017
Hi,
I am not able to link pads between source, audiobin and videobin. Following
is the pipeline I want to convert to application:
gst-launch-1.0 rtspsrc location="rtsp:<filepath>" latency=0 name=demux
demux. ! queue ! rtpmp4gdepay ! aacparse ! avdec_aac ! audioconvert !
audioresample ! autoaudiosink demux. ! queue ! rtph264depay ! h264parse !
omxh264dec ! videoconvert ! videoscale ! video/x-raw,width=176, height=144
! ximagesink
Following is the code implemented till date:
#include <gst/gst.h>
static void onPadAdded(GstElement *element, GstPad *pad, gpointer data)
{
GstElement *decoder;
decoder = GST_ELEMENT(data);
g_debug ("Linking audio pad to depay ");
GstPad *targetsink = gst_element_get_static_pad ( decoder,
"audiosink");
gst_pad_link (pad, targetsink);
gst_object_unref (targetsink);
}
static void on_pad_added(GstElement *element, GstPad *pad, gpointer data)
{
GstElement *decoder;
decoder = GST_ELEMENT(data);
g_debug ("Linking video pad to depay ");
GstPad *targetsink = gst_element_get_static_pad ( decoder,
"videosink");
gst_pad_link (pad, targetsink);
gst_object_unref (targetsink);
}
int main(int argc, char *argv[]) {
GstElement *source, *audio, *video, *convert, *pipeline,
*audioDepay, *audioQueue, *videoQueue,
*audioParse, *audioDecode, *audioConvert, *audioResample,
*audioSink, *videoDepay, *videoParser, *videoDecode, *videoConvert,
*videoScale, *videoSink;
GstCaps *capsFilter;
GstBus *bus;
GstMessage *msg;
GstPad *pad;
GstPad *sinkpad,*ghost_sinkpad;
gboolean link_ok;
GstStateChangeReturn ret;
/* Initialize GStreamer */
gst_init (&argc, &argv);
/* Create Elements */
pipeline = gst_pipeline_new("rtsp-pipeline");
source = gst_element_factory_make ("rtspsrc", "source");
/*audio bin*/
audio = gst_bin_new ("audiobin");
audioQueue = gst_element_factory_make ("queue", "audio-queue");
audioDepay = gst_element_factory_make ("rtpmp4gdepay",
"audio-depayer");
audioParse = gst_element_factory_make ("aacparse", "audio-parser");
audioDecode = gst_element_factory_make ("avdec_aac",
"audio-decoder");
audioConvert = gst_element_factory_make ("audioconvert", "aconv");
audioResample = gst_element_factory_make ("audioresample",
"audio-resample");
audioSink = gst_element_factory_make ("autoaudiosink", "audiosink");
if (!audioQueue || !audioDepay || !audioParse || !audioConvert ||
!audioResample || !audioSink)
{
g_printerr("Cannot create audio elements \n");
return 0;
}
g_object_set(source, "location", "rtsp://<file path>", NULL);
g_object_set(source, "latency", 0, NULL);
g_object_set(source, "name", "demux", NULL);
#if 0
gst_bin_add_many(GST_BIN(pipeline), source, audioQueue, audioDepay,
audioParse, audioDecode,
audioConvert, audioResample, audioSink, NULL);
if (!gst_element_link_many(audioQueue, audioDepay, audioParse,
audioDecode, audioConvert, audioResample, audioSink, NULL))
{
g_printerr("Error linking fields ...1 \n");
return 0;
}
#endif
video = gst_bin_new ("videobin");
videoQueue = gst_element_factory_make ("queue", "video-queue");
videoDepay= gst_element_factory_make ("rtph264depay",
"video-depayer");
videoParser = gst_element_factory_make ("h264parse",
"video-parser");
videoDecode = gst_element_factory_make ("omxh264dec",
"video-decoder");
videoConvert = gst_element_factory_make("videoconvert", "convert");
videoScale = gst_element_factory_make("videoscale", "video-scale");
videoSink = gst_element_factory_make("ximagesink", "video-sink");
capsFilter = gst_caps_new_simple("video/x-raw",
"width", G_TYPE_INT, 176,
"height", G_TYPE_INT, 144,
NULL);
if (!videoQueue || !videoDepay || !videoParser || !videoDecode ||
!videoConvert || !videoScale || !videoSink || !capsFilter)
{
g_printerr("Cannot create video elements \n");
return 0;
/* gst_bin_add_many(GST_BIN(pipeline),source,
audioQueue, audioDepay, audioParse,
audioDecode,audioConvert, audioResample, audioSink,
videoQueue, videoDepay, videoParser, videoDecode,
videoConvert, videoScale, videoSink, NULL);
*/
gst_bin_add_many(GST_BIN(audio),
audioQueue, audioDepay, audioParse,
audioDecode,audioConvert, audioResample, audioSink, NULL);
// g_signal_connect(G_OBJECT(source), "pad-added",
G_CALLBACK(onPadAdded), videoDepay);
/* set property value */
#if 0
if (!gst_element_link(source, audioQueue))
{
g_printerr("Cannot link source and audioQueue \n");
return 0;
}
if (!gst_element_link(audioQueue, audioDepay))
{
g_printerr("Cannot link audioDepay and audioQueue \n");
return 0;
}
#endif
if (!gst_element_link(audioDepay, audioParse))
{
g_printerr("Cannot link audioDepay and audioParse \n");
return 0;
}
if (!gst_element_link(audioParse, audioDecode))
{
g_printerr("Cannot link audioParse and audioDecode \n");
return 0;
}
if (!gst_element_link(audioDecode, audioConvert))
{
g_printerr("Cannot link audioDecode and audioConvert \n");
return 0;
}
if (!gst_element_link(audioConvert, audioResample))
{
g_printerr("Cannot link audioConvert and audioResample \n");
return 0;
}
if (!gst_element_link(audioResample, audioSink))
{
g_printerr("Cannot link audioResample and audioSink \n");
return 0;
}
g_signal_connect(G_OBJECT(source), "pad-added",
G_CALLBACK(onPadAdded), audioQueue);
#if 0
if (!gst_element_link(source, videoQueue))
{
g_printerr("Cannot link source and videoQueue \n");
return 0;
}
#endif
if (!gst_element_link(videoDepay, videoParser))
{
g_printerr("Cannot link videoDepay and videoParser \n");
return 0;
}
if (!gst_element_link(videoParser, videoDecode))
{
g_printerr("Cannot link videoParser and videoConvert \n");
return 0;
}
if (!gst_element_link(videoDecode, videoConvert))
{
g_printerr("Cannot link videoDecode and videoConvert \n");
return 0;
}
g_signal_connect(G_OBJECT(source), "pad-added",
G_CALLBACK(on_pad_added), videoQueue);
gst_bin_add_many(GST_BIN(pipeline), source, audio, video, NULL);
#if 0
sinkpad = gst_element_get_static_pad (videoConvert, "sink");
ghost_sinkpad = gst_ghost_pad_new ("sink", sinkpad);
gst_pad_set_active (ghost_sinkpad, TRUE);
gst_element_add_pad (video, ghost_sinkpad);
if (!gst_element_link_many(videoQueue, videoDepay, videoParser,
videoDecode, videoScale, NULL))
{
g_printerr("Error linking fields... 2 \n");
return 0;
}
gst_bin_add_many (GST_BIN(pipeline), video,NULL);
#endif
/* Start playing */
gst_element_set_state ( pipeline, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus = gst_element_get_bus (pipeline);
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
/* Free resources */
if (msg != NULL)
gst_message_unref (msg);
gst_object_unref (bus);
gst_element_set_state (pipeline, GST_STATE_NULL);
gst_object_unref (pipeline);
return 0;
}
--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/RTSP-client-application-with-source-audio-and-video-bins-tp4681527.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.freedesktop.org/archives/gstreamer-devel/attachments/20170118/fc0a526d/attachment-0001.html>
More information about the gstreamer-devel
mailing list