<div dir="ltr"><div><div><div><div><div>Hi,<br><br></div>I am writing pipeline in a C code using gstreamer-1.0 which send the audio and receive audio on same port. But I am unable to do that. It is able send the audio but receiving process not working. <br>
</div>Please help about this. What is going to wrong in this ? Any help/pointer appericiated ...<br><br></div>My code is here : <br>#include <stdio.h><br>#include <gst/gst.h><br>#include <gio/gio.h><br>#include <stdlib.h><br>
#include <sys/socket.h><br>#include <netinet/in.h><br><br>/* Structure to contain all our information, so we can pass it to callbacks */<br>typedef struct _CustomData {<br> GstElement *pipeline;<br> GstElement *source;<br>
GstElement *convert;<br> GstElement *audio_resample;<br> GstElement *audio_encoder;<br> GstElement *audio_rtp;<br> GstElement *audio_sink;<br> GstElement *colorspace;<br> GstElement *video_encoder;<br>
GstElement *video_rtp;<br> GstElement *video_sink;<br> GstElement *recvsource;<br> GstElement *recvdepay;<br> GstElement *recvsink;<br>} CustomData;<br>/* Handler for the pad-added signal */<br>
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData *data);<br><br>int main(int argc, char *argv[]) {<br> CustomData data;<br> GstBus *bus;<br> GstMessage *msg;<br> GstStateChangeReturn ret;<br>
gboolean terminate = FALSE;<br> /* Initialize GStreamer */<br> gst_init (&argc, &argv);<br><br> /* Create the elements */<br> data.source = gst_element_factory_make ("uridecodebin", "source");<br>
data.convert = gst_element_factory_make ("audioconvert", "convert");<br> data.audio_resample = gst_element_factory_make ("audioresample", "resample");<br> data.audio_encoder = gst_element_factory_make ("mulawenc", "aencoder");<br>
data.audio_rtp = gst_element_factory_make ("rtppcmupay", "artppay");<br> data.audio_sink = gst_element_factory_make ("udpsink", "audio_sink");<br> data.colorspace = gst_element_factory_make ("autovideoconvert", "colorspace");<br>
data.video_encoder = gst_element_factory_make ("avenc_h263p", "vencoder");<br> data.video_rtp = gst_element_factory_make ("rtph263ppay", "video_rtp");<br> data.video_sink = gst_element_factory_make ("udpsink", "video_sink");<br>
data.recvsource = gst_element_factory_make ("udpsrc", "recvsrc");;<br> data.recvdepay = gst_element_factory_make ("rtppcmudepay", "artdepay");;<br> data.recvsink = gst_element_factory_make ("filesink", "recvsink");;<br>
<br> /* Create the empty pipeline */<br> data.pipeline = gst_pipeline_new ("test-pipeline");<br><br> if (!data.pipeline || !data.source || !data.convert || !data.audio_sink || !data.colorspace || !data.video_sink) {<br>
g_printerr ("Not all elements could be created.\n");<br> return -1;<br> }<br><br> /* Build the pipeline. Note that we are NOT linking the source at this<br> * point. We will do it later. */<br>
gst_bin_add_many (GST_BIN (data.pipeline), data.source, data.convert ,data.audio_resample,data.audio_encoder,data.audio_rtp, data.audio_sink, data.colorspace,data.video_encoder,data.video_rtp, data.video_sink, NULL);<br>
if (!(gst_element_link_many (data.convert, data.audio_resample,data.audio_encoder,data.audio_rtp,data.audio_sink,NULL) )) {<br> g_printerr ("audio Elements could not be linked.\n");<br>
gst_object_unref (data.pipeline);<br> return -1;<br> }<br> if (!( gst_element_link_many (data.colorspace,data.video_encoder,data.video_rtp, data.video_sink,NULL))) {<br>
g_printerr ("video Elements could not be linked.\n");<br> gst_object_unref (data.pipeline);<br> return -1;<br> }<br> if (!( gst_element_link_many (data.recvsource,data.recvdepay,data.recvsink,NULL))) {<br>
g_printerr ("video Elements could not be linked.\n");<br> gst_object_unref (data.pipeline);<br> return -1;<br> }<br><br> struct sockaddr_in artp_addr;<br>
memset(&artp_addr, 0, sizeof(struct sockaddr_in));<br> int artp_sockfd = socket (AF_INET, SOCK_DGRAM, 0);<br> char on =1;<br> setsockopt(artp_sockfd, NULL, SO_REUSEADDR, (const char *) &on, sizeof(on));<br>
perror("setsockopt");<br> if (artp_sockfd > 0) {<br> int res;<br> artp_addr.sin_family = AF_INET;<br> artp_addr.sin_port = htons(7878);<br>
artp_addr.sin_addr.s_addr = inet_addr("192.168.0.227");;<br><br> res = bind(artp_sockfd, (struct sockaddr*)&artp_addr,sizeof(artp_addr));<br> if (res == 0) {<br>
printf("Succesfully bound to audio local RTP port : 7878 \t sockfd : %d.\n",artp_sockfd);<br> } else {<br> printf("Unable to bind to local audio RTP port 7878.");<br>
}<br> }<br> /* Set the URI to play */<br> g_object_set (data.source, "uri", "file:///home/amar/KRSNA.mpg", NULL);<br> GstCaps *caps;<br> caps =gst_caps_from_string("application/x-rtp,media=(string)audio,encoding-name=PCMU,payload=0,clock-rate=8000");<br>
g_object_set (data.audio_sink, "port", 3333 , NULL);<br> g_object_set (data.audio_sink, "host", "127.0.0.1" , NULL);<br> GSocket * s = g_socket_new_from_fd(artp_sockfd, NULL);<br>
g_object_set (data.audio_sink, "socket", s , NULL);<br> g_object_set (data.video_sink, "port", 9078 , NULL);<br> g_object_set (data.video_sink, "host", "127.0.0.1" , NULL);<br>
g_object_set (data.recvsource, "caps", caps, NULL);<br> g_object_set (data.recvsource, "socket", s , NULL);<br> g_object_set (data.recvsink, "location", "new.wav", NULL);<br>
GstPad *srcpad, *sinkpad;<br> GstPadLinkReturn lres;<br> g_signal_connect (data.source, "pad-added", G_CALLBACK (pad_added_handler), &data);<br> /* Start playing */<br>
ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);<br> if (ret == GST_STATE_CHANGE_FAILURE) {<br> g_printerr ("Unable to set the pipeline to the playing state.\n");<br>
gst_object_unref (data.pipeline);<br> return -1;<br> }<br><br> /* Listen to the bus */<br> bus = gst_element_get_bus (data.pipeline);<br> do {<br>
msg = gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE, GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);<br><br> /* Parse message */<br> if (msg != NULL) {<br>
GError *err;<br> gchar *debug_info;<br><br> switch (GST_MESSAGE_TYPE (msg)) {<br> case GST_MESSAGE_ERROR:<br>
gst_message_parse_error (msg, &err, &debug_info);<br> g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);<br>
g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");<br> g_clear_error (&err);<br> g_free (debug_info);<br>
terminate = TRUE;<br> break;<br> case GST_MESSAGE_EOS:<br> g_print ("End-Of-Stream reached.\n");<br>
terminate = TRUE;<br> break;<br> case GST_MESSAGE_STATE_CHANGED:<br> /* We are only interested in state-changed messages from the pipeline */<br>
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {<br> GstState old_state, new_state, pending_state;<br> gst_message_parse_state_changed (msg, &old_state, &new_state, &pending_state);<br>
g_print ("Pipeline state changed from %s to %s:\n",<br> gst_element_state_get_name (old_state), gst_element_state_get_name (new_state));<br>
}<br> break;<br> default:<br> /* We should not reach here */<br>
g_printerr ("Unexpected message received.\n");<br> break;<br> }<br> gst_message_unref (msg);<br>
}<br> } while (!terminate);<br><br> /* Free resources */<br> gst_object_unref (bus);<br> gst_element_set_state (data.pipeline, GST_STATE_NULL);<br> gst_object_unref (data.pipeline);<br>
return 0;<br>}<br><br>/* This function will be called by the pad-added signal */<br>static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData *data) {<br> GstPad *sink_pad_audio = gst_element_get_static_pad (data->convert, "sink");<br>
GstPad *sink_pad_video = gst_element_get_static_pad (data->colorspace, "sink");<br> GstPadLinkReturn ret;<br> GstCaps *new_pad_caps = NULL;<br> GstStructure *new_pad_struct = NULL;<br>
const gchar *new_pad_type = NULL;<br><br> g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME (new_pad), GST_ELEMENT_NAME (src));<br><br><br> /* Check the new pad's type */<br>
new_pad_caps = gst_pad_query_caps (new_pad,NULL);<br> new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);<br> new_pad_type = gst_structure_get_name (new_pad_struct);<br> if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {<br>
g_print (" It has type '%s' which is raw video. Connecting.\n", new_pad_type);<br> /* Attempt the link */<br> ret = gst_pad_link (new_pad, sink_pad_video);<br>
if (GST_PAD_LINK_FAILED (ret)) {<br> g_print (" Type is '%s' but link failed.\n", new_pad_type);<br> } else {<br> g_print (" Link succeeded (type '%s').\n", new_pad_type);<br>
}<br> goto exit;<br> }<br><br> /* Attempt the link */<br> ret = gst_pad_link (new_pad, sink_pad_audio);<br> if (GST_PAD_LINK_FAILED (ret)) {<br> g_print (" Type is '%s' but link failed.\n", new_pad_type);<br>
} else {<br> g_print (" Link succeeded (type '%s').\n", new_pad_type);<br> }<br><br>exit:<br> /* Unreference the new pad's caps, if we got them */<br> if (new_pad_caps != NULL)<br>
gst_caps_unref (new_pad_caps);<br><br> /* Unreference the sink pad */<br> gst_object_unref (sink_pad_audio);<br> gst_object_unref (sink_pad_video);<br>}<br><br></div>Thanks,<br></div>Amar<br>
</div>