RTSP/ONVIF/gstreamer

kai666_73 kai66673 at mail.ru
Thu Mar 31 10:58:58 UTC 2016


Okay.
I switched to version 1.6.3.
Implemented conveyor which takes RTSP stream and is divided into two
branches, one writes metadata to the file, the other shows the video. Like
that:

#include <string.h>
#include <gst/gst.h>
#include <glib.h>

typedef struct _ApplicationData {
    GMainLoop *loop;
    GstElement *pipeline;
    GstElement *source;
    GstElement *rtph264depay;
    GstElement *avdec_h264;
    GstElement *videoscale;
    GstElement *videoconvert;
    GstElement *video_sink;
    GstElement *metadata_sink;
} ApplicationData;

static void on_pad_added(GstElement *element, GstPad *pad, ApplicationData
*data)
{
    GstPad *sinkpad;

    gchar *capsstr;
    gchar *padname;
    GstCaps *caps = gst_pad_query_caps(pad, NULL);
    const GstStructure *str = gst_caps_get_structure (caps, 0);
    const gchar *medianame = gst_structure_get_string(str, "media");

    (void)element;

    capsstr = gst_caps_to_string(caps);
    padname = gst_pad_get_name(pad);
    g_print("*********************************** on_pad_added()
**********************************************\n");
    g_print("Pad Name: %s\n", padname);
    g_print("%s\n", capsstr);
   
g_print("*************************************************************************************************\n");
    g_free(capsstr);
    g_free(padname);

    if ( medianame ) {
        g_print("media = %s\n", medianame);
        if ( !strcmp(medianame, "application") ) {
            GstElement *tee             = gst_element_factory_make ("tee",
"metadatatee");
            GstElement *queue           = gst_element_factory_make("queue",
"metadataqueue");
            gst_bin_add_many(GST_BIN (data->pipeline), tee, queue, NULL);
            gst_element_link(tee, queue);
            gst_element_link(queue, data->metadata_sink);
            sinkpad = gst_element_get_static_pad(tee, "sink");
            gst_pad_link(pad, sinkpad);
            gst_object_unref(sinkpad);
            gst_element_set_state(tee, GST_STATE_PLAYING);
            gst_element_set_state(queue, GST_STATE_PLAYING);
            gst_element_set_state(data->metadata_sink, GST_STATE_PLAYING);
        } else if ( !strcmp(medianame, "video") ) {
            GstElement *tee             = gst_element_factory_make ("tee",
"videotee");
            GstElement *queue           = gst_element_factory_make("queue",
"videoqueue");
            gst_bin_add_many (GST_BIN (data->pipeline), tee, queue, NULL);
            gst_element_link (tee, queue);
            gst_element_link (queue, data->rtph264depay);
            sinkpad = gst_element_get_static_pad(tee, "sink");
            gst_pad_link(pad, sinkpad);
            gst_object_unref(sinkpad);
            gst_element_set_state(tee, GST_STATE_PLAYING);
            gst_element_set_state(queue, GST_STATE_PLAYING);
            gst_element_set_state(data->rtph264depay, GST_STATE_PLAYING);
            gst_element_set_state(data->avdec_h264, GST_STATE_PLAYING);
            gst_element_set_state(data->videoscale, GST_STATE_PLAYING);
            gst_element_set_state(data->videoconvert, GST_STATE_PLAYING);
            gst_element_set_state(data->video_sink, GST_STATE_PLAYING);
        } else {
            g_printerr("UNKNOWN/Unsupported media type (%s)\n", medianame);
        }
    } else {
        g_print("<NO MEDIA>\n");
    }
   
g_print("---------------------------------------------------------------\n");
}

static gboolean bus_call(GstBus *bus, GstMessage *msg, ApplicationData
*data)
{
    GMainLoop *loop = data->loop;

    (void)bus;

    switch (GST_MESSAGE_TYPE(msg)) {

    case GST_MESSAGE_EOS:
        g_main_loop_quit(loop);
        break;

    case GST_MESSAGE_ERROR: {
        gchar *debug;
        GError *error;

        gst_message_parse_error(msg, &error, &debug);
        g_free(debug);

        g_printerr("Error: %s\n", error->message);
        g_error_free(error);

        g_main_loop_quit(loop);
        break;
    case GST_MESSAGE_CLOCK_LOST:
      /* Get a new clock */
      gst_element_set_state (data->pipeline, GST_STATE_PAUSED);
      gst_element_set_state (data->pipeline, GST_STATE_PLAYING);
      break;    }

    default:
        break;

    }

    return TRUE;
}

int main(int argc, char *argv[])
{
    ApplicationData app_data;

    GstBus *bus;
    GstCaps *videoCaps;

    gst_init(&argc, &argv);

    app_data.loop = g_main_loop_new(NULL, FALSE);
    videoCaps = gst_caps_new_simple ("video/x-raw", "width", G_TYPE_INT,
640, NULL);


    app_data.pipeline       = gst_pipeline_new("rtsp_capture");
    app_data.source         = gst_element_factory_make("rtspsrc",
"rtp-source");
    app_data.rtph264depay   = gst_element_factory_make("rtph264depay",
"rtph264depay");
    app_data.avdec_h264     = gst_element_factory_make("avdec_h264",
"avdec_h264");
    app_data.videoscale     = gst_element_factory_make("videoscale",
"videoscale");
    app_data.videoconvert   = gst_element_factory_make("videoconvert",
"videoconvert");
    app_data.video_sink     = gst_element_factory_make("ximagesink",
"fake-sink");
    app_data.metadata_sink  = gst_element_factory_make("filesink",
"meta-sink");

    if ( !app_data.pipeline || !app_data.source ||
         !app_data.rtph264depay || !app_data.avdec_h264 ||
!app_data.videoscale || !app_data.videoconvert || !app_data.video_sink ||
         !app_data.metadata_sink ) {
        g_printerr("Error on element creation...\n");
        return -1;
    }

    g_object_set(app_data.source,  "location",
"rtsp://user:pass@XXX.XXX.XXX.XXX:PPP/someurl", NULL);
    g_object_set(app_data.source, "debug", TRUE, NULL);
    g_object_set(app_data.metadata_sink, "location", "/tmp/onvif.xml",
NULL);

    bus = gst_pipeline_get_bus(GST_PIPELINE(app_data.pipeline));
    gst_bus_add_signal_watch(bus);
    g_signal_connect(bus, "message", G_CALLBACK(bus_call), &app_data);

    gst_bin_add_many(GST_BIN(app_data.pipeline), app_data.source,
                     app_data.rtph264depay,
                     app_data.avdec_h264,
                     app_data.videoscale,
                     app_data.videoconvert,
                     app_data.video_sink,
                     app_data.metadata_sink, NULL);
    g_signal_connect(app_data.source, "pad-added", G_CALLBACK(on_pad_added),
&app_data);
    gst_element_link(app_data.rtph264depay, app_data.avdec_h264);
    gst_element_link(app_data.avdec_h264, app_data.videoscale);
    gst_element_link_filtered(app_data.videoscale, app_data.videoconvert,
videoCaps);
    gst_element_link(app_data.videoconvert, app_data.video_sink);

    gst_element_set_state(app_data.pipeline, GST_STATE_PLAYING);

    g_main_loop_run(app_data.loop);

    gst_element_set_state(app_data.pipeline, GST_STATE_NULL);

    g_main_loop_unref(app_data.loop);
    gst_object_unref(bus);
    gst_object_unref(GST_OBJECT(app_data.pipeline));

    return 0;
}

It works great.
The problem is that I need from the first branch to form a video stream and
merge it with the video stream from the second.

Is that possible?
Can you recommend anything?



--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/RTSP-ONVIF-gstreamer-tp4676555p4676693.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.


More information about the gstreamer-devel mailing list