RTSP via appsrc/appsink - need pipeline to play before client connects
Øystein Skotheim
oystein.skotheim at scoutdi.com
Thu Feb 10 14:45:38 UTC 2022
Hello. I have a GStreamer pipeline that sets up a live stream from a camera which is split into several branches via a tee. One of the branches is recording video and one branch should be served via RTSP to a client which may connect to it at a later point in time. I have some problems in getting this to work.
To debug this, I looked at the example https://github.com/GStreamer/gst-rtsp-server/blob/master/examples/test-appsrc2.c. In this example, a pipeline is set up that ends in an appsink. The RTSP server sets up a pipeline that starts with an appsrc that pulls data from the appsink. The pipeline that ends in the appsink is started in the media_configure function.
This does not work for me, as I need the pipeline to start running to feed the other branches (e.g. the one that records the video).
I tried to modify the example to set the pipeline to PLAYING state when it starts up. If I do this, I am no longer able to connect to it (see below).
D:\gstreamer\1.0\msvc_x86_64\bin>gst-launch-1.0 -v playbin uri=rtsp://localhost:8554/test
Setting pipeline to PAUSED ...
Pipeline is live and does not need PREROLL ...
/GstPlayBin:playbin0/GstURIDecodeBin:uridecodebin0: ring-buffer-max-size = 0
/GstPlayBin:playbin0/GstURIDecodeBin:uridecodebin0: buffer-size = -1
/GstPlayBin:playbin0/GstURIDecodeBin:uridecodebin0: buffer-duration = -1
/GstPlayBin:playbin0/GstURIDecodeBin:uridecodebin0: force-sw-decoders = false
/GstPlayBin:playbin0/GstURIDecodeBin:uridecodebin0: use-buffering = false
/GstPlayBin:playbin0/GstURIDecodeBin:uridecodebin0: download = false
/GstPlayBin:playbin0/GstURIDecodeBin:uridecodebin0: uri = rtsp://localhost:8554/test
/GstPlayBin:playbin0/GstURIDecodeBin:uridecodebin0: connection-speed = 0
/GstPlayBin:playbin0/GstURIDecodeBin:uridecodebin0: source = "\(GstRTSPSrc\)\ source"
Progress: (open) Opening Stream
Pipeline is PREROLLED ...
Prerolled, waiting for progress to finish...
Progress: (connect) Connecting to rtsp://localhost:8554/test
Progress: (open) Retrieving server options
Progress: (open) Retrieving media info
ERROR: from element /GstPlayBin:playbin0/GstURIDecodeBin:uridecodebin0/GstRTSPSrc:source: Could not read from resource.
Additional debug info:
../gst/rtsp/gstrtspsrc.c(6408): gst_rtsp_src_receive_response (): /GstPlayBin:playbin0/GstURIDecodeBin:uridecodebin0/GstRTSPSrc:source:
Could not receive message. (Timeout while waiting for server response)
ERROR: pipeline doesn't want to preroll.
Setting pipeline to NULL ...
ERROR: from element /GstPlayBin:playbin0/GstURIDecodeBin:uridecodebin0/GstRTSPSrc:source: Could not read from resource.
Additional debug info:
../gst/rtsp/gstrtspsrc.c(6506): gst_rtspsrc_try_send (): /GstPlayBin:playbin0/GstURIDecodeBin:uridecodebin0/GstRTSPSrc:source:
Could not receive message. (Timeout while waiting for server response)
ERROR: pipeline doesn't want to preroll.
Freeing pipeline ...
What is the reason for this? How do you recommend to set up my pipeline if I would like my other branches of the pipeline to play and allow the RTSP client to connect at a later point in time?
See my code below
---------- code below------------
#include <gst/app/app.h>
#include <gst/gst.h>
#include <gst/rtsp-server/rtsp-server.h>
#include <stdio.h>
typedef struct {
GstElement *vid_appsink;
GstElement *vid_appsrc;
} MyContext;
// Main pipeline
GstElement *pipeline;
/* called when we need to give data to an appsrc */
static void need_data(GstElement *appsrc, guint unused, MyContext *ctx) {
GstSample *sample;
GstFlowReturn ret;
sample = gst_app_sink_pull_sample(GST_APP_SINK(ctx->vid_appsink));
if (sample) {
GstBuffer *buffer = gst_sample_get_buffer(sample);
GstSegment *seg = gst_sample_get_segment(sample);
GstClockTime pts, dts;
/* Convert the PTS/DTS to running time so they start from 0 */
pts = GST_BUFFER_PTS(buffer);
if (GST_CLOCK_TIME_IS_VALID(pts))
pts = gst_segment_to_running_time(seg, GST_FORMAT_TIME, pts);
dts = GST_BUFFER_DTS(buffer);
if (GST_CLOCK_TIME_IS_VALID(dts))
dts = gst_segment_to_running_time(seg, GST_FORMAT_TIME, dts);
if (buffer) {
/* Make writable so we can adjust the timestamps */
buffer = gst_buffer_copy(buffer);
GST_BUFFER_PTS(buffer) = pts;
GST_BUFFER_DTS(buffer) = dts;
g_signal_emit_by_name(appsrc, "push-buffer", buffer, &ret);
}
/* we don't need the appsink sample anymore */
gst_sample_unref(sample);
}
}
static void ctx_free(MyContext *ctx) {
gst_object_unref(ctx->vid_appsrc);
g_free(ctx);
}
/* called when a new media pipeline is constructed. We can query the
* pipeline and configure our appsrc */
static void media_configure(GstRTSPMediaFactory *factory, GstRTSPMedia *media,
gpointer user_data) {
GstElement *element, *appsrc, *appsink;
GstCaps *caps;
MyContext *ctx;
ctx = g_new0(MyContext, 1);
g_object_set_data_full(G_OBJECT(media), "rtsp-extra-data", ctx,
(GDestroyNotify)ctx_free);
element = gst_rtsp_media_get_element(media);
caps = gst_caps_new_simple("video/x-h264", "stream-format", G_TYPE_STRING,
"byte-stream", "alignment", G_TYPE_STRING, "au",
"width", G_TYPE_INT, 384, "height", G_TYPE_INT,
288, "framerate", GST_TYPE_FRACTION, 15, 1, NULL);
ctx->vid_appsrc = appsrc =
gst_bin_get_by_name_recurse_up(GST_BIN(element), "videosrc");
ctx->vid_appsink = appsink = gst_bin_get_by_name(GST_BIN(pipeline), "vid");
gst_util_set_object_arg(G_OBJECT(appsrc), "format", "time");
g_object_set(G_OBJECT(appsrc), "caps", caps, NULL);
g_object_set(G_OBJECT(appsink), "caps", caps, NULL);
/* install the callback that will be called when a buffer is needed */
g_signal_connect(appsrc, "need-data", (GCallback)need_data, ctx);
gst_caps_unref(caps);
gst_object_unref(element);
}
int main(int argc, char *argv[]) {
GMainLoop *loop;
GstRTSPServer *server;
GstRTSPMountPoints *mounts;
GstRTSPMediaFactory *factory;
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
server = gst_rtsp_server_new();
mounts = gst_rtsp_server_get_mount_points(server);
factory = gst_rtsp_media_factory_new();
gst_rtsp_media_factory_set_launch(
factory,
"( appsrc name=videosrc ! h264parse ! rtph264pay name=pay0 pt=96 )");
g_signal_connect(factory, "media-configure", (GCallback)media_configure,
NULL);
gst_rtsp_mount_points_add_factory(mounts, "/test", factory);
g_object_unref(mounts);
gst_rtsp_server_attach(server, NULL);
pipeline = gst_parse_launch(
"videotestsrc is-live=true ! x264enc speed-preset=superfast "
"tune=zerolatency ! h264parse ! appsink name=vid max-buffers=1 "
"drop=true",
NULL);
/* Start playing */
gst_element_set_state(pipeline, GST_STATE_PLAYING);
/* start serving */
g_print("stream ready at rtsp://127.0.0.1:8554/test\n");
g_main_loop_run(loop);
return 0;
}
Best regards,
-Øystein
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.freedesktop.org/archives/gstreamer-devel/attachments/20220210/924ef646/attachment-0001.htm>
More information about the gstreamer-devel
mailing list