OpenCV Mat to appsrc in C++
cmclar-204
craigalexandermclaren at hotmail.co.uk
Wed Mar 6 13:24:01 UTC 2019
Hi, I'm having issues in pushing the data from a mat into a gstreamer
pipeline using the C++ API for gstreamer. I've successfully been able to
create pipelines for receiving data from appsink so I know the gstreamer
build is fine it's just the appsrc I'm having issues with.
My code is as follows (OO, class based)
///////////////////////// transmitter.cpp
/////////////////////////////////////////////////////////////////
#include "gst_transmitter.h"
cv::Mat out_frame;
static void push_data(GstAppSrc *appsrc,
guint unused_size,
gpointer user_data)
{
guint num_bytes = out_frame.total() * out_frame.elemSize();
GstBuffer* buffer;
GstFlowReturn ret;
GstMapInfo map;
static GstClockTime timestamp = 0;
buffer = gst_buffer_new_and_alloc(num_bytes);
gst_buffer_fill(buffer, 0, (gpointer)(out_frame.data), num_bytes);
//buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_READONLY,
(gpointer)(out_frame.data), num_bytes, 0, num_bytes, NULL, NULL);
/*gst_buffer_map(buffer, &map, GST_MAP_WRITE);
memcpy(map.data, out_frame.data, num_bytes);
gst_buffer_unmap(buffer, &map);*/
GST_BUFFER_PTS(buffer) = timestamp;
GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND, 4);
timestamp += GST_BUFFER_DURATION(buffer);
//ret = gst_app_src_push_buffer(appsrc, buffer);
g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);
std::cout << ret << std::endl;
gst_buffer_unref(buffer);
}
gst_transmitter::gst_transmitter(int frame_width, int frame_height, int
frame_rate,
dst_types dst_type, cv::Mat frame, std::string address)
:dst_frame_width{frame_width}, dst_frame_height{frame_height},
dst_address{ address }, /*out_frame{frame}, */dst_framerate_hz {
frame_rate
}
{
out_frame = frame;
dst_port = 5000;
num_bytes = frame.total() * frame.elemSize();
gst_init(NULL, NULL);
loop = g_main_loop_new(NULL, FALSE);
//buffer = gst_buffer_new_and_alloc(num_bytes);
switch (dst_type)
{
case dst_types::H264: create_h264_pipe(); break;
case dst_types::H265: create_h265_pipe(); break;
default: create_h264_pipe(); break;
}
pipeline = gst_pipeline_new("ethernet-streamer");
source = gst_element_factory_make("appsrc", "app-source");
conv = gst_element_factory_make("videoconvert", NULL);
sink = gst_element_factory_make("udpsink", NULL);
GstCaps* incaps = gst_caps_new_simple("video/x-raw",
"format", G_TYPE_STRING, "BGR",
"width", G_TYPE_INT, dst_frame_width,
"height", G_TYPE_INT, dst_frame_height,
"framerate", GST_TYPE_FRACTION, dst_framerate_hz, 1, NULL);
g_object_set(sink, "host", dst_address, "port", dst_port, NULL);
g_object_set(source, "caps", incaps, NULL);
for each (GstElement* element in pipeline_elements)
{
gst_bin_add(GST_BIN(pipeline), element);
}
gst_bin_add_many(GST_BIN(pipeline), source, conv, sink, NULL);
gst_element_link(source, conv);
gst_element_link(conv, pipeline_elements.front());
for (int i = 0; i < pipeline_elements.size() - 1; i++)
{
gst_element_link(pipeline_elements[i], pipeline_elements[i + 1]);
}
gst_element_link(pipeline_elements.back(), sink);
g_object_set(G_OBJECT(source),
"stream-type", 0,
"is-live", TRUE,
"format", GST_FORMAT_TIME, NULL);
g_signal_connect(source, "need-data", G_CALLBACK(push_data), NULL);
//std::cout << << std::endl;
/* Set the pipeline to "playing" state*/
g_print("Now streaming\n");
gst_element_set_state(pipeline, GST_STATE_PLAYING);
}
gst_transmitter::~gst_transmitter()
{
/* Out of the main loop, clean up nicely */
g_print("Returned, stopping playback\n");
gst_element_set_state(pipeline, GST_STATE_NULL);
g_print("Deleting pipeline\n");
gst_object_unref(GST_OBJECT(pipeline));
//g_source_remove(bus_watch_id);
g_main_loop_unref(loop);
}
void gst_transmitter::run_gst_loop()
{
g_print("streaming...\n");
g_main_loop_run(loop);
}
void gst_transmitter::run()
{
std::thread t1(&gst_transmitter::run_gst_loop, this);
t1.detach();
}
void gst_transmitter::create_h264_pipe()
{
GstElement* encoder = gst_element_factory_make("x264enc", "h264-encoder");
pipeline_elements.push_back(encoder);
GstElement* encfilter = gst_element_factory_make("capsfilter", NULL);
pipeline_elements.push_back(encfilter);
GstElement* payloader = gst_element_factory_make("rtph264pay", NULL);
pipeline_elements.push_back(payloader);
GstCaps* enc_caps = gst_caps_new_simple("video/x-h264", NULL);
g_object_set(encfilter, "caps", enc_caps, NULL);
gst_caps_unref(enc_caps);
}
void gst_transmitter::create_h265_pipe()
{
GstElement* encoder = gst_element_factory_make("x265enc", "h265-encoder");
pipeline_elements.push_back(encoder);
GstElement* encfilter = gst_element_factory_make("capsfilter", NULL);
pipeline_elements.push_back(encoder);
GstElement* payloader = gst_element_factory_make("rtph265pay", NULL);
pipeline_elements.push_back(encoder);
GstCaps* enc_caps = gst_caps_new_simple("video/x-h265", NULL);
g_object_set(encfilter, "caps", enc_caps, NULL);
gst_caps_unref(enc_caps);
}
void gst_transmitter::update_last_frame(cv::Mat& frame)
{
out_frame = frame;
//GstBuffer* buffer = gst_buffer_new_and_alloc(num_bytes);
//static GstClockTime timestamp = 0;
//////buffer = gst_buffer_new_and_alloc(num_bytes);
///*gst_buffer_map(buffer, &map, GST_MAP_WRITE);
//memcpy(map.data, frame.data, num_bytes);
//gst_buffer_unmap(buffer, &map);*/
//const auto out = gst_buffer_fill(buffer, 0, frame.data, num_bytes);
//GST_BUFFER_PTS(buffer) = 15;
//GST_BUFFER_DURATION(buffer) = gst_util_uint64_scale_int(1, GST_SECOND,
4);
//timestamp += GST_BUFFER_DURATION(buffer);
//bool ret = gst_app_src_push_buffer(GST_APP_SRC(source), buffer);
//std::cout << ret << std::endl;
//gst_buffer_unref(buffer);
}
//////////////// transmitter.h
//////////////////////////////////////////////////////
#ifndef GSTTRANSMITTER_H
#define GSTTRANSMITTER_H
#include "gstreamer-1.0/gst/gst.h"
#include "gstreamer-1.0/gst/video/video.h"
#include "gstreamer-1.0/gst/app/gstappsrc.h"
#include "opencv2/opencv.hpp"
#include <glib.h>
#include <iostream>
#include <thread>
#include <vector>
#include <map>
enum class dst_types { H264, H265 };
class gst_transmitter
{
public:
gst_transmitter(int frame_width, int frame_height, int frame_rate,
dst_types dst_type, cv::Mat frame, std::string address = "");
~gst_transmitter();
void run_gst_loop();
void run();
void create_h264_pipe();
void create_h265_pipe();
void update_last_frame(cv::Mat& frame);
guint sourceid;
private:
GMainLoop* loop;
GstElement *pipeline, *source, *identity, *conv, *filter, *sink;
std::vector<GstElement*> pipeline_elements;
//std::string host;
std::string dst_address;
int dst_port;
int dst_frame_width;
int dst_frame_height;
int dst_framerate_hz;
int num_bytes;
GstBuffer* buffer;
GstFlowReturn ret;
GstMapInfo map;
gint num_samples;
//GstClockTime timestamp;
//cv::Mat out_frame;
//char* out_data;
};
#endif // !GSTTRANSMITTER_H
///////////////////////////////////////////////////////////////
I've tried this using signals and by attempting to push buffers every time a
frame is available inside my application and each time the return value of
push buffer comes back as false. Is there any obvious issues with what I'm
doing ?
--
Sent from: http://gstreamer-devel.966125.n4.nabble.com/
More information about the gstreamer-devel
mailing list