Video and Audio streaming
clust3r
luads at email.it
Wed Feb 22 06:21:35 PST 2012
Hi all, I'm a n00b in Gstreamer devel..
I wrote a very simple code that creates different pipelines in order to
trasmit audio and video from source video and audio.
now I need to merge and syncronize audio and video , in 1 pipeline, but I
have no idea, how to do this.... please help..!! :-)
<code>
#include <gst/gst.h>
#include <stdbool.h>
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
static GMainLoop *loop;
static gboolean bus_call(GstBus *bus, GstMessage *msg, void *user_data)
{
switch (GST_MESSAGE_TYPE(msg)) {
case GST_MESSAGE_EOS: {
g_message("End-of-stream");
g_main_loop_quit(loop);
break;
}
case GST_MESSAGE_ERROR: {
GError *err;
gst_message_parse_error(msg, &err, NULL);
g_error("%s", err->message);
g_error_free(err);
g_main_loop_quit(loop);
break;
}
default:
break;
}
return true;
}
static void play_uri(const char *uri)
{
GstElement *pipeline;
GstBus *bus;
loop = g_main_loop_new(NULL, FALSE);
pipeline = gst_element_factory_make("playbin", "player");
if (uri)
g_object_set(G_OBJECT(pipeline), "uri", uri, NULL);
bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
gst_bus_add_watch(bus, bus_call, NULL);
gst_object_unref(bus);
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
g_main_loop_run(loop);
gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_NULL);
gst_object_unref(GST_OBJECT(pipeline));
}
static void stream (void)
{
GstElement *video_pipeline, *video_source, *video_encoder, *video_payload,
*video_sink;
GstElement *audio_pipeline_tx, *audio_source, *audio_encoder,
*audio_payload, *audio_tx;
GstElement *audio_pipeline_rx, *audio_rx, *audio_depayload, *audio_decoder,
*audio_sink;
GstBus *video_bus;
GstBus *audio_tx_bus, *audio_rx_bus;
GstCaps *rx_caps;
loop = g_main_loop_new(NULL, FALSE);
/*
* VIDEO PIPELINE
*/
video_pipeline = gst_pipeline_new("test-video-stream");
g_assert (video_pipeline);
video_source = gst_element_factory_make("mfw_v4lsrc", "video_source");
video_encoder = gst_element_factory_make("mfw_vpuencoder",
"video_encoder");
video_payload = gst_element_factory_make("rtph264pay","video_payload");
video_sink = gst_element_factory_make("udpsink","video_sink");
g_object_set(G_OBJECT(video_source), "capture-width", 352, NULL);
g_object_set(G_OBJECT(video_source), "capture-height", 288, NULL);
g_object_set(G_OBJECT(video_encoder), "codec-type", 2, NULL);
g_object_set(G_OBJECT(video_encoder), "width", 352, NULL);
g_object_set(G_OBJECT(video_encoder), "height", 288, NULL);
g_object_set(G_OBJECT(video_encoder), "loopback", FALSE, NULL);
g_object_set(G_OBJECT(video_sink), "host", "192.168.3.140", NULL);
g_object_set(G_OBJECT(video_sink), "port", 5500, NULL);
video_bus = gst_pipeline_get_bus(GST_PIPELINE(video_pipeline));
gst_bus_add_watch(video_bus, bus_call, NULL);
gst_object_unref(video_bus);
gst_bin_add_many(GST_BIN(video_pipeline), video_source, video_encoder,
video_payload, video_sink, NULL);
gst_element_link_many(video_source, video_encoder, video_payload,
video_sink, NULL);
/*
* AUDIO PIPELINE
*/
audio_pipeline_tx = gst_pipeline_new("audio_tx");
g_assert(audio_pipeline_tx);
audio_source = gst_element_factory_make("alsasrc", "audio_source");
audio_encoder = gst_element_factory_make("mulawenc", "audio_encoder");
audio_payload = gst_element_factory_make("rtppcmupay","audio_payload");
audio_tx = gst_element_factory_make("udpsink","audio_tx");
g_object_set(G_OBJECT(audio_tx), "host", "192.168.3.140", NULL);
g_object_set(G_OBJECT(audio_tx), "port", 5600, NULL);
audio_tx_bus = gst_pipeline_get_bus(GST_PIPELINE(audio_pipeline_tx));
gst_bus_add_watch(audio_tx_bus, bus_call, NULL);
gst_object_unref(audio_tx_bus);
gst_bin_add_many(GST_BIN(audio_pipeline_tx), audio_source, audio_encoder,
audio_payload, audio_tx, NULL);
gst_element_link_many(audio_source, audio_encoder, audio_payload, audio_tx,
NULL);
// __________________________________________ //
audio_pipeline_rx = gst_pipeline_new("audio_rx");
audio_rx = gst_element_factory_make("udpsrc", "audio_receive");
audio_depayload = gst_element_factory_make("rtppcmudepay",
"audio_depayload");
audio_decoder = gst_element_factory_make("mulawdec", "audio_decoder");
audio_sink = gst_element_factory_make("alsasink", "audio_sink");
rx_caps = gst_caps_new_simple("application/x-rtp",
"media", G_TYPE_STRING, "audio",
"clock-rate", G_TYPE_INT, 8000,
"encoding-name", G_TYPE_STRING, "PCMU",
NULL);
g_object_set(G_OBJECT(audio_rx), "port", 5600, NULL);
g_object_set (G_OBJECT (audio_rx), "caps", rx_caps, NULL);
gst_caps_unref (rx_caps);
audio_rx_bus = gst_pipeline_get_bus(GST_PIPELINE(audio_pipeline_rx));
gst_bus_add_watch(audio_rx_bus, bus_call, NULL);
gst_object_unref(audio_rx_bus);
gst_bin_add_many(GST_BIN(audio_pipeline_rx), audio_rx, audio_depayload,
audio_decoder, audio_sink, NULL);
gst_element_link_many(audio_rx, audio_depayload, audio_decoder, audio_sink,
NULL);
/*
* TEST
*/
srcpad = gst_element_get_static_pad (rtpsrc, "src");
sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_0");
lres = gst_pad_link (srcpad, sinkpad);
g_assert (lres == GST_PAD_LINK_OK);
gst_object_unref (srcpad);
///
/*
* PLAY
*/
gst_element_set_state(GST_ELEMENT(video_pipeline), GST_STATE_PLAYING);
gst_element_set_state(GST_ELEMENT(audio_pipeline_tx), GST_STATE_PLAYING);
gst_element_set_state(GST_ELEMENT(audio_pipeline_rx), GST_STATE_PLAYING);
g_main_loop_run(loop);
gst_element_set_state(GST_ELEMENT(video_pipeline), GST_STATE_NULL);
gst_object_unref(GST_OBJECT(video_pipeline));
gst_element_set_state(GST_ELEMENT(audio_pipeline_tx), GST_STATE_NULL);
gst_object_unref(GST_OBJECT(audio_pipeline_tx));
gst_element_set_state(GST_ELEMENT(audio_pipeline_rx), GST_STATE_NULL);
gst_object_unref(GST_OBJECT(audio_pipeline_rx));
}
int main(int argc, char *argv[])
{
gst_init(&argc, &argv);
stream();
//play_uri(argv[1]);
return 0;
}
</code>
--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/Video-and-Audio-streaming-tp4410471p4410471.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
More information about the gstreamer-devel
mailing list