Opencv Mat as AppSrc to srtclientsink

maestro157340 stefankrempel at gmail.com
Sun Mar 17 02:30:56 UTC 2019


I am trying to use an Opencv mat as an appsrc in my pipeline and push it via
srt to a local server, but there will not open any window to play the video
stream.

My system is a mac OS 10.14 with gstreamer 1.15.

The pipeline consists of the following elements:
appsrc -> videoconvert -> videoencoder -> mpegtsmux -> srtclientsink


I want to get the srt stream and show it with the following command:
gst-launch-1.0 srtserversrc uri=srt://:8888 ! decodebin3 ! autovideosink


In the debug logs, it says:

GST_BUFFER gstbuffer.c:445:void _memory_add(GstBuffer *, gint, GstMemory *):
buffer 0x7fd1aca38500, idx -1, mem 0x7fd1aca3a2b0
0:00:08.150919000   974 0x7fd1ac864b20 DEBUG                tsdemux
tsdemux.c:2980:gst_ts_demux_push_pending_data: Not enough information to
push buffers yet, storing buffer
0:00:08.150931000   974 0x7fd1ac864b20 LOG                  tsdemux
tsdemux.c:3098:gst_ts_demux_push_pending_data: Resetting to EMPTY, returning
ok
0:00:08.150942000   974 0x7fd1ac864b20 LOG         mpegtspacketizer
mpegtspacketizer.c:689:mpegts_packetizer_flush_bytes: flushing 564 bytes
from adapter
0:00:08.151214000   974 0x7fd1ac864b20 LOG                  adapter
gstadapter.c:634:void gst_adapter_flush_unchecked(GstAdapter *,
gsize):<GstAdapter at 0x7fd1ad83c5a0> flushing 564 bytes
0:00:08.151234000   974 0x7fd1ac864b20 LOG                  adapter
gstadapter.c:572:void gst_adapter_unmap(GstAdapter
*):<GstAdapter at 0x7fd1ad83c5a0> unmap memory buffer 0x7fd1aca383f0
0:00:08.151247000   974 0x7fd1ac864b20 LOG                  adapter
gstadapter.c:655:void gst_adapter_flush_unchecked(GstAdapter *,
gsize):<GstAdapter at 0x7fd1ad83c5a0> flushing out head buffer


*
so I assume, there is a problem with the demuxer, maybe because I only use
video data and no audio data, but without the mpegtsmuxer in my code, I get
the error that the payload size exceeds the maximum allowed 1316 bytes in
the srt protocol.*


Here is the code:

---------------------------
main.cpp
---------------------------

#include <iostream>
#include <string>
#include <mutex>
#include <thread>
#include <time.h>

#include <opencv2/opencv.hpp>
#include <opencv2/imgproc/types_c.h>
#include <opencv2/highgui/highgui.hpp>

#include <gstreamer-1.0/gst/gstelement.h>
#include <gstreamer-1.0/gst/gstpipeline.h>
#include <gstreamer-1.0/gst/gstutils.h>
#include <gstreamer-1.0/gst/app/gstappsrc.h>
#include <gstreamer-1.0/gst/base/gstbasesrc.h>
#include <gstreamer-1.0/gst/video/video.h>
#include <gstreamer-1.0/gst/gst.h>
#include <gstreamer-1.0/gst/check/gstbufferstraw.h>

#include <glib.h>

#define GST_CAT_DEFAULT appsrc_pipeline_debug
GST_DEBUG_CATEGORY(appsrc_pipeline_debug);

using namespace std;

/*
 * bus: simple system for forwarding messages from streaming threads to app
in own thread context
 * pad:
 * caps:
 * signal:
 * callback:
 *
 */

static std::mutex m;
GMainLoop *loop;

typedef struct _App App;
struct _App {
    GstElement *videoenc;
    GstElement *appsrc;
    GstElement *videoconvert;
    GstElement *sink;
    guint sourceid;
    GstElement *mpegts;
};
App s_app;

int counter = 0;

static gboolean cb_need_data(App *app) {
    static GstClockTime timestamp = 0;
    GstBuffer *buffer;
    guint buffersize;
    GstFlowReturn ret;
    GstMapInfo info;

    counter++;
    m.lock();

    cv::Mat image_mat = cv::imread("./../data/squat.jpg");
    cv::Mat resized_mat;

    cv::resize(image_mat, resized_mat, cv::Size(640, 480));

    buffersize = guint(resized_mat.cols * resized_mat.rows *
resized_mat.channels());
    buffer = gst_buffer_new_and_alloc(buffersize);

    uchar *img_data = image_mat.data;
    m.unlock();

    if (gst_buffer_map(buffer, &info, (GstMapFlags) GST_MAP_WRITE)) {
        memcpy(info.data, img_data, buffersize);
        gst_buffer_unmap(buffer, &info);
    } else {
        g_print("error at memcpy");
    }

    g_signal_emit_by_name(app->appsrc, "push-buffer", buffer, &ret);

    if (ret != GST_FLOW_OK) {
        g_print("Ops\n");
        GST_DEBUG ("something wrong in cb_need_data");
        g_main_loop_quit(loop);
    }

    gst_buffer_unref(buffer);

    return TRUE;
}

static void start_feed(GstElement *pipeline, guint size, App *app) {
    if (app->sourceid == 0) {
        app->sourceid = g_timeout_add(67, (GSourceFunc) cb_need_data, app);
    }
}

static void stop_feed(GstElement *pipeline, App *app) {
    if (app->sourceid != 0) {
        g_source_remove(app->sourceid);
        app->sourceid = 0;
    }
}

static gboolean bus_call(GstBus *bus, GstMessage *message, gpointer data) {
    GError *err = nullptr;
    gchar *dbg_info = nullptr;
    GST_DEBUG ("got message %s",
gst_message_type_get_name(GST_MESSAGE_TYPE(message)));

    switch (GST_MESSAGE_TYPE (message)) {
        case GST_MESSAGE_ERROR: {
            gst_message_parse_error(message, &err, &dbg_info);
            g_printerr("ERROR from element %s: %s\n",
                       GST_OBJECT_NAME (message->src), err->message);
            g_printerr("Debugging info: %s\n", (dbg_info) ? dbg_info :
"none");
            g_error_free(err);
            g_free(dbg_info);
            g_main_loop_quit(loop);
            break;
        }
        case GST_MESSAGE_EOS:
            g_main_loop_quit(loop);
            break;
        default:
            break;
    }
    return TRUE;
}

void startStream() {

    App *app = &s_app;
    GstCaps *caps2;
    GstCaps *caps3;
    GstBus *bus;
    GstElement *pipeline;

    gst_init(nullptr, nullptr);

    loop = g_main_loop_new(nullptr, TRUE);

    /*
     * pipeline elements:
     * appsrc -> videoconvert -> videoencoder -> mpegtsmux -> srtsink
     */

    // create pipeline
    pipeline = gst_pipeline_new("gstreamer-encoder");
    if (!pipeline) {
        g_print("Error creating  pipeline");
    }

    // create appsrc element
    app->appsrc = gst_element_factory_make("appsrc", "appsrc");
    if (!app->appsrc) {
        g_print("Error creating appsrc");
    }

    // create videoconvert element
    app->videoconvert = gst_element_factory_make("videoconvert",
"videoconvert");
    if (!app->videoconvert) {
        g_print("Error creating videoconvert element");
    }

    // create videoencoder element
    app->videoenc = gst_element_factory_make("x264enc", "encoder");
    if (!app->videoenc) {
        g_print("Error creating encoder");
    }

    app->mpegts = gst_element_factory_make("mpegtsmux", "mpegtsmux");
    if (!app->mpegts) {
        g_print("Error creating mpegtsmuxer");
    }

    app->sink = gst_element_factory_make("srtclientsink", "sink");
    if (!app->sink) {
        g_print("Error creating sink");
    }

    g_print("Elements are created\n");

    g_object_set(G_OBJECT(app->sink), "uri", "srt://127.0.0.1:8888",
nullptr);
    g_object_set(G_OBJECT(app->sink), "msg-size", 1316, nullptr);
    g_object_set(G_OBJECT(app->sink), "latency", 120, nullptr);

    g_object_set(G_OBJECT(app->videoenc), "bitrate", 256, nullptr);

    g_print("End of settings\n");

    caps2 = gst_caps_new_simple("video/x-raw",
                                "format", G_TYPE_STRING, "RGB",
                                "width", G_TYPE_INT, 640,
                                "height", G_TYPE_INT, 480,
                                "framerate", GST_TYPE_FRACTION, 25, 1,
                                "pixel-aspect-ratio", GST_TYPE_FRACTION, 1,
1,
                                nullptr);

    gst_app_src_set_caps(GST_APP_SRC(app->appsrc), caps2);

    g_object_set(G_OBJECT (app->appsrc), "stream-type", 0, "format",
GST_FORMAT_TIME, nullptr);

    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    g_assert(bus);
    gst_bus_add_watch(bus, (GstBusFunc) bus_call, app);

    gst_bin_add_many(GST_BIN(pipeline), app->appsrc, app->videoconvert,
app->videoenc,
                     app->mpegts, app->sink, nullptr);

    g_print("Added all the elements to the pipeline\n");

    int ok = FALSE;
    ok = gst_element_link_many(app->appsrc, app->videoconvert,
app->videoenc,
                               app->sink, nullptr);

    if (ok)
        g_print("Linked all elements together\n");
    else
        g_print("Linking error\n");

    g_assert(app->appsrc);
    g_assert(GST_IS_APP_SRC(app->appsrc));

    g_signal_connect(app->appsrc, "need-data", G_CALLBACK(start_feed), app);
    g_signal_connect(app->appsrc, "enough-data", G_CALLBACK(stop_feed),
app);

    g_print("Playing the video\n");
    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    g_print("Running...\n");
    g_main_loop_run(loop);

    g_print("Returned, stopping playback\n");
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(bus);
    g_main_loop_unref(loop);
    g_print("Deleting pipeline\n");
}


int main(int argc, char **argv) {

    startStream();

    return 0;
}



---------------------------
CMakeLists.txt
---------------------------

cmake_minimum_required(VERSION 3.13)
project(opencv_gstreamer)

set(CMAKE_CXX_STANDARD 14)

find_package(PkgConfig REQUIRED)

pkg_search_module(OPENCV opencv4 REQUIRED)

pkg_search_module(GSTREAMER gstreamer-1.0 REQUIRED)
pkg_search_module(APP_GSTREAMER gstreamer-app-1.0 REQUIRED)
pkg_search_module(SRT srt REQUIRED)
pkg_search_module(GLIB glib-2.0 REQUIRED)

include_directories(
        ${OPENCV_INCLUDE_DIRS}
        ${GSTREAMER_INCLUDE_DIRS}
        ${APP_GSTREAMER_INCLUDE_DIRS}
        ${GLIB_INCLUDE_DIRS}
        ${SRT_INCLUDE_DIRS})

link_directories(
        ${OPENCV_LIBRARY_DIRS}
        ${GSTREAMER_LIBRARY_DIRS}
        ${APP_GSTREAMER_LIBRARY_DIRS}
        ${GLIB_LIBRARY_DIRS}
        ${SRT_LIBRARY_DIRS})

link_libraries(
        ${OPENCV_LDFLAGS}
        pthread
        ${GSTREAMER_LDFLAGS}
        ${APP_GSTREAMER_LDFLAGS}
        ${GLIB_LDFLAGS}
        ${SRT_LDFLAGS})

add_compile_options(
        ${OPENCV_CFLAGS}
        ${GSTREAMER_CFLAGS}
        ${APP_GSTREAMER_CFLAGS}
        ${GLIB_CFLAGS}
        ${SRT_CFLAGS})

add_executable(opencv_gstreamer src/main.cpp)





--
Sent from: http://gstreamer-devel.966125.n4.nabble.com/


More information about the gstreamer-devel mailing list