How to use same port for sending and receiving data ?

amar asontakke at phonologies.com
Sun Nov 24 23:58:29 PST 2013


Hi,

I am writing pipeline in a C code using gstreamer-1.0 which send the audio
and receive audio on same port. But I am unable to do that. It is able send
the audio but receiving process not working.
Please help about this. What is going to wrong in this ? Any help/pointer
appericiated ...

My code is here :
#include <stdio.h>
#include <gst/gst.h>
#include <gio/gio.h>
#include <stdlib.h>
#include <sys/socket.h>
#include <netinet/in.h>

/* Structure to contain all our information, so we can pass it to callbacks
*/
typedef struct _CustomData {
        GstElement *pipeline;
        GstElement *source;
        GstElement *convert;
        GstElement *audio_resample;
        GstElement *audio_encoder;
        GstElement *audio_rtp;
        GstElement *audio_sink;
        GstElement *colorspace;
        GstElement *video_encoder;
        GstElement *video_rtp;
        GstElement *video_sink;
        GstElement *recvsource;
        GstElement *recvdepay;
        GstElement *recvsink;
} CustomData;
/* Handler for the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *pad, CustomData
*data);

int main(int argc, char *argv[]) {
        CustomData data;
        GstBus *bus;
        GstMessage *msg;
        GstStateChangeReturn ret;
        gboolean terminate = FALSE;
        /* Initialize GStreamer */
        gst_init (&argc, &argv);

        /* Create the elements */
        data.source = gst_element_factory_make ("uridecodebin", "source");
        data.convert = gst_element_factory_make ("audioconvert", "convert");
        data.audio_resample = gst_element_factory_make ("audioresample",
"resample");
        data.audio_encoder = gst_element_factory_make ("mulawenc",
"aencoder");
        data.audio_rtp = gst_element_factory_make ("rtppcmupay", "artppay");
        data.audio_sink = gst_element_factory_make ("udpsink",
"audio_sink");
        data.colorspace = gst_element_factory_make ("autovideoconvert",
"colorspace");
        data.video_encoder = gst_element_factory_make ("avenc_h263p",
"vencoder");
        data.video_rtp = gst_element_factory_make ("rtph263ppay",
"video_rtp");
        data.video_sink = gst_element_factory_make ("udpsink",
"video_sink");
        data.recvsource = gst_element_factory_make ("udpsrc", "recvsrc");;
        data.recvdepay = gst_element_factory_make ("rtppcmudepay",
"artdepay");;
        data.recvsink = gst_element_factory_make ("filesink", "recvsink");;

        /* Create the empty pipeline */
        data.pipeline = gst_pipeline_new ("test-pipeline");

        if (!data.pipeline || !data.source || !data.convert ||
!data.audio_sink || !data.colorspace || !data.video_sink) {
                g_printerr ("Not all elements could be created.\n");
                return -1;
        }

        /* Build the pipeline. Note that we are NOT linking the source at
this
         * point. We will do it later. */
        gst_bin_add_many (GST_BIN (data.pipeline), data.source,
data.convert ,data.audio_resample,data.audio_encoder,data.audio_rtp,
data.audio_sink, data.colorspace,data.video_encoder,data.video_rtp,
data.video_sink, NULL);
              if (!(gst_element_link_many (data.convert,
data.audio_resample,data.audio_encoder,data.audio_rtp,data.audio_sink,NULL)
)) {
                    g_printerr ("audio Elements could not be linked.\n");
                    gst_object_unref (data.pipeline);
                    return -1;
            }
            if (!( gst_element_link_many
(data.colorspace,data.video_encoder,data.video_rtp, data.video_sink,NULL)))
{
                    g_printerr ("video Elements could not be linked.\n");
                    gst_object_unref (data.pipeline);
                    return -1;
            }
            if (!( gst_element_link_many
(data.recvsource,data.recvdepay,data.recvsink,NULL))) {
                    g_printerr ("video Elements could not be linked.\n");
                    gst_object_unref (data.pipeline);
                    return -1;
            }

            struct sockaddr_in artp_addr;
            memset(&artp_addr, 0, sizeof(struct sockaddr_in));
            int artp_sockfd = socket (AF_INET, SOCK_DGRAM, 0);
            char on =1;
            setsockopt(artp_sockfd, NULL, SO_REUSEADDR, (const char *) &on,
sizeof(on));
            perror("setsockopt");
            if (artp_sockfd > 0) {
                    int res;
                    artp_addr.sin_family = AF_INET;
                    artp_addr.sin_port = htons(7878);
                    artp_addr.sin_addr.s_addr = inet_addr("192.168.0.227");;

                    res = bind(artp_sockfd, (struct
sockaddr*)&artp_addr,sizeof(artp_addr));
                    if (res == 0) {
                            printf("Succesfully bound to audio local RTP
port : 7878 \t sockfd : %d.\n",artp_sockfd);
                    } else {
                            printf("Unable to bind to local audio RTP port
7878.");
                    }
            }
            /* Set the URI to play */
            g_object_set (data.source, "uri",
"file:///home/amar/KRSNA.mpg", NULL);
            GstCaps *caps;
            caps
=gst_caps_from_string("application/x-rtp,media=(string)audio,encoding-name=PCMU,payload=0,clock-rate=8000");
            g_object_set (data.audio_sink, "port", 3333 , NULL);
            g_object_set (data.audio_sink, "host", "127.0.0.1" , NULL);
            GSocket * s = g_socket_new_from_fd(artp_sockfd, NULL);
            g_object_set (data.audio_sink, "socket", s , NULL);
            g_object_set (data.video_sink, "port", 9078 , NULL);
            g_object_set (data.video_sink, "host", "127.0.0.1" , NULL);
            g_object_set (data.recvsource, "caps", caps, NULL);
            g_object_set (data.recvsource, "socket", s , NULL);
            g_object_set (data.recvsink, "location", "new.wav", NULL);
            GstPad *srcpad, *sinkpad;
            GstPadLinkReturn lres;
            g_signal_connect (data.source, "pad-added", G_CALLBACK
(pad_added_handler), &data);
            /* Start playing */
            ret = gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
            if (ret == GST_STATE_CHANGE_FAILURE) {
                    g_printerr ("Unable to set the pipeline to the playing
state.\n");
                    gst_object_unref (data.pipeline);
                    return -1;
            }

            /* Listen to the bus */
            bus = gst_element_get_bus (data.pipeline);
            do {
                    msg = gst_bus_timed_pop_filtered (bus,
GST_CLOCK_TIME_NONE, GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR |
GST_MESSAGE_EOS);

                    /* Parse message */
                    if (msg != NULL) {
                            GError *err;
                            gchar *debug_info;

                            switch (GST_MESSAGE_TYPE (msg)) {
                                    case GST_MESSAGE_ERROR:
                                            gst_message_parse_error (msg,
&err, &debug_info);
                                            g_printerr ("Error received
from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
                                            g_printerr ("Debugging
information: %s\n", debug_info ? debug_info : "none");
                                            g_clear_error (&err);
                                            g_free (debug_info);
                                            terminate = TRUE;
                                            break;
                                    case GST_MESSAGE_EOS:
                                            g_print ("End-Of-Stream
reached.\n");
                                            terminate = TRUE;
                                            break;
                                    case GST_MESSAGE_STATE_CHANGED:
                                            /* We are only interested in
state-changed messages from the pipeline */
                                            if (GST_MESSAGE_SRC (msg) ==
GST_OBJECT (data.pipeline)) {
                                                    GstState old_state,
new_state, pending_state;

gst_message_parse_state_changed (msg, &old_state, &new_state,
&pending_state);
                                                    g_print ("Pipeline
state changed from %s to %s:\n",

gst_element_state_get_name (old_state), gst_element_state_get_name
(new_state));
                                            }
                                            break;
                                    default:
                                            /* We should not reach here */
                                            g_printerr ("Unexpected message
received.\n");
                                            break;
                            }
                            gst_message_unref (msg);
                    }
            } while (!terminate);

            /* Free resources */
            gst_object_unref (bus);
            gst_element_set_state (data.pipeline, GST_STATE_NULL);
            gst_object_unref (data.pipeline);
            return 0;
}

/* This function will be called by the pad-added signal */
static void pad_added_handler (GstElement *src, GstPad *new_pad, CustomData
*data) {
        GstPad *sink_pad_audio = gst_element_get_static_pad (data->convert,
"sink");
        GstPad *sink_pad_video = gst_element_get_static_pad
(data->colorspace, "sink");
        GstPadLinkReturn ret;
        GstCaps *new_pad_caps = NULL;
        GstStructure *new_pad_struct = NULL;
        const gchar *new_pad_type = NULL;

        g_print ("Received new pad '%s' from '%s':\n", GST_PAD_NAME
(new_pad), GST_ELEMENT_NAME (src));


        /* Check the new pad's type */
        new_pad_caps = gst_pad_query_caps (new_pad,NULL);
        new_pad_struct = gst_caps_get_structure (new_pad_caps, 0);
        new_pad_type = gst_structure_get_name (new_pad_struct);
        if (!g_str_has_prefix (new_pad_type, "audio/x-raw")) {
                g_print ("  It has type '%s' which is raw video.
Connecting.\n", new_pad_type);
                /* Attempt the link */
                ret = gst_pad_link (new_pad, sink_pad_video);
                if (GST_PAD_LINK_FAILED (ret)) {
                        g_print ("  Type is '%s' but link failed.\n",
new_pad_type);
                } else {
                        g_print ("  Link succeeded (type '%s').\n",
new_pad_type);
                }
                goto exit;
        }

        /* Attempt the link */
        ret = gst_pad_link (new_pad, sink_pad_audio);
        if (GST_PAD_LINK_FAILED (ret)) {
                g_print ("  Type is '%s' but link failed.\n", new_pad_type);
        } else {
                g_print ("  Link succeeded (type '%s').\n", new_pad_type);
        }

exit:
        /* Unreference the new pad's caps, if we got them */
        if (new_pad_caps != NULL)
                gst_caps_unref (new_pad_caps);

        /* Unreference the sink pad */
        gst_object_unref (sink_pad_audio);
        gst_object_unref (sink_pad_video);
}

Thanks,
Amar
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.freedesktop.org/archives/gstreamer-devel/attachments/20131125/0c56f4ea/attachment-0001.html>


More information about the gstreamer-devel mailing list