[Spice-commits] 5 commits - configure.ac src/channel-display.c src/channel-display-gst.c src/channel-display-mjpeg.c src/channel-display-priv.h src/Makefile.am
Pavel Grunt
pgrunt at kemper.freedesktop.org
Wed May 18 07:40:14 UTC 2016
configure.ac | 37 +++
src/Makefile.am | 15 +
src/channel-display-gst.c | 458 ++++++++++++++++++++++++++++++++++++++++++++
src/channel-display-mjpeg.c | 142 ++++++++++++-
src/channel-display-priv.h | 18 +
src/channel-display.c | 228 +++++++--------------
6 files changed, 732 insertions(+), 166 deletions(-)
New commits:
commit 46d8442a3734b782b0934e46bbe8e1711b8d5412
Author: Francois Gouget <fgouget at codeweavers.com>
Date: Tue May 19 01:08:49 2015 +0200
streaming: Use decodebin as a fallback for the GStreamer video decoder
This means future video codecs may be supported automatically.
One can also force usage of decodebin by setting $SPICE_GSTVIDEO_AUTO.
Signed-off-by: Francois Gouget <fgouget at codeweavers.com>
Acked-by: Pavel Grunt <pgrunt at redhat.com>
diff --git a/src/channel-display-gst.c b/src/channel-display-gst.c
index 8934371..46a85ea 100644
--- a/src/channel-display-gst.c
+++ b/src/channel-display-gst.c
@@ -245,8 +245,20 @@ static gboolean create_pipeline(SpiceGstDecoder *decoder)
gstdec_name = "h264parse ! avdec_h264";
break;
default:
- spice_warning("Unknown codec type %d", decoder->base.codec_type);
- return -1;
+ SPICE_DEBUG("Unknown codec type %d. Trying decodebin.",
+ decoder->base.codec_type);
+ src_caps = "";
+ gstdec_name = NULL;
+ break;
+ }
+
+ /* decodebin will use vaapi if installed, which for a time could
+ * intentionally crash the application. So only use decodebin as a
+ * fallback or when SPICE_GSTVIDEO_AUTO is set.
+ * See: https://bugs.freedesktop.org/show_bug.cgi?id=90884
+ */
+ if (gstdec_name == NULL || g_getenv("SPICE_GSTVIDEO_AUTO") != NULL) {
+ gstdec_name = "decodebin";
}
/* - We schedule the frame display ourselves so set sync=false on appsink
commit d780947a6844832274f1f9384fc082d855ac0754
Author: Francois Gouget <fgouget at codeweavers.com>
Date: Tue Dec 22 17:31:53 2015 +0100
streaming: Probe GStreamer before advertising support for a codec
Signed-off-by: Francois Gouget <fgouget at codeweavers.com>
Acked-by: Pavel Grunt <pgrunt at redhat.com>
diff --git a/src/channel-display-gst.c b/src/channel-display-gst.c
index c139c5e..8934371 100644
--- a/src/channel-display-gst.c
+++ b/src/channel-display-gst.c
@@ -395,8 +395,7 @@ static void spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
}
}
-G_GNUC_INTERNAL
-gboolean gstvideo_init(void)
+static gboolean gstvideo_init(void)
{
static int success = 0;
if (!success) {
@@ -431,3 +430,17 @@ VideoDecoder* create_gstreamer_decoder(int codec_type, display_stream *stream)
return (VideoDecoder*)decoder;
}
+
+G_GNUC_INTERNAL
+gboolean gstvideo_has_codec(int codec_type)
+{
+ gboolean has_codec = FALSE;
+
+ VideoDecoder *decoder = create_gstreamer_decoder(codec_type, NULL);
+ if (decoder) {
+ has_codec = create_pipeline((SpiceGstDecoder*)decoder);
+ decoder->destroy(decoder);
+ }
+
+ return has_codec;
+}
diff --git a/src/channel-display-priv.h b/src/channel-display-priv.h
index b504691..94a90e6 100644
--- a/src/channel-display-priv.h
+++ b/src/channel-display-priv.h
@@ -73,9 +73,9 @@ VideoDecoder* create_mjpeg_decoder(int codec_type, display_stream *stream);
#endif
#ifdef HAVE_GSTVIDEO
VideoDecoder* create_gstreamer_decoder(int codec_type, display_stream *stream);
-gboolean gstvideo_init(void);
+gboolean gstvideo_has_codec(int codec_type);
#else
-# define gstvideo_init() FALSE
+# define gstvideo_has_codec(codec_type) FALSE
#endif
diff --git a/src/channel-display.c b/src/channel-display.c
index e99b130..a301e67 100644
--- a/src/channel-display.c
+++ b/src/channel-display.c
@@ -720,10 +720,23 @@ static void spice_display_channel_reset_capabilities(SpiceChannel *channel)
#ifdef HAVE_BUILTIN_MJPEG
spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_CODEC_MJPEG);
#endif
- if (gstvideo_init()) {
- spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_CODEC_MJPEG);
- spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_CODEC_VP8);
- spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_CODEC_H264);
+ if (gstvideo_has_codec(SPICE_VIDEO_CODEC_TYPE_MJPEG)) {
+ spice_channel_set_capability(SPICE_CHANNEL(channel),
+ SPICE_DISPLAY_CAP_CODEC_MJPEG);
+ } else {
+ spice_info("GStreamer does not support the mjpeg codec");
+ }
+ if (gstvideo_has_codec(SPICE_VIDEO_CODEC_TYPE_VP8)) {
+ spice_channel_set_capability(SPICE_CHANNEL(channel),
+ SPICE_DISPLAY_CAP_CODEC_VP8);
+ } else {
+ spice_info("GStreamer does not support the vp8 codec");
+ }
+ if (gstvideo_has_codec(SPICE_VIDEO_CODEC_TYPE_H264)) {
+ spice_channel_set_capability(SPICE_CHANNEL(channel),
+ SPICE_DISPLAY_CAP_CODEC_H264);
+ } else {
+ spice_info("GStreamer does not support the h264 codec");
}
}
commit 3c11ac2c8776abe71eb1820068ab4ee801ee4752
Author: Francois Gouget <fgouget at codeweavers.com>
Date: Thu May 28 19:02:12 2015 +0200
streaming: Allow disabling support for the builtin MJPEG video decoder
This makes it possible to test the GStreamer video decoder with MJPEG
streams.
Signed-off-by: Francois Gouget <fgouget at codeweavers.com>
Acked-by: Pavel Grunt <pgrunt at redhat.com>
diff --git a/configure.ac b/configure.ac
index f13301d..38c5bab 100644
--- a/configure.ac
+++ b/configure.ac
@@ -289,6 +289,17 @@ AS_IF([test "x$enable_gstvideo" != "xno"],
)
AM_CONDITIONAL([HAVE_GSTVIDEO], [test "x$have_gstvideo" = "xyes"])
+AC_ARG_ENABLE([builtin-mjpeg],
+ AS_HELP_STRING([--enable-builtin-mjpeg], [Enable the builtin mjpeg video decoder @<:@default=yes@:>@]),
+ [],
+ enable_builtin_mjpeg="yes")
+AS_IF([test "x$enable_builtin_mjpeg" = "xyes"],
+ [AC_DEFINE([HAVE_BUILTIN_MJPEG], 1, [Use the builtin mjpeg decoder?])])
+AM_CONDITIONAL(HAVE_BUILTIN_MJPEG, [test "x$enable_builtin_mjpeg" != "xno"])
+
+AS_IF([test "x$enable_builtin_mjpeg$enable_gstvideo" = "xnono"],
+ [SPICE_WARNING([No builtin MJPEG or GStreamer decoder, video will not be streamed])])
+
AC_CHECK_LIB(jpeg, jpeg_destroy_decompress,
AC_MSG_CHECKING([for jpeglib.h])
AC_TRY_CPP(
diff --git a/src/Makefile.am b/src/Makefile.am
index 317e993..73bb39c 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
@@ -242,7 +242,6 @@ libspice_client_glib_2_0_la_SOURCES = \
channel-cursor.c \
channel-display.c \
channel-display-priv.h \
- channel-display-mjpeg.c \
channel-inputs.c \
channel-main.c \
channel-playback.c \
@@ -330,6 +329,12 @@ libspice_client_glib_2_0_la_SOURCES += \
$(NULL)
endif
+if HAVE_BUILTIN_MJPEG
+libspice_client_glib_2_0_la_SOURCES += \
+ channel-display-mjpeg.c \
+ $(NULL)
+endif
+
if HAVE_GSTVIDEO
libspice_client_glib_2_0_la_SOURCES += \
channel-display-gst.c \
diff --git a/src/channel-display-priv.h b/src/channel-display-priv.h
index d1a30a6..b504691 100644
--- a/src/channel-display-priv.h
+++ b/src/channel-display-priv.h
@@ -68,7 +68,9 @@ struct VideoDecoder {
* @stream: The associated video stream.
* @return: A pointer to a structure implementing the VideoDecoder methods.
*/
+#ifdef HAVE_BUILTIN_MJPEG
VideoDecoder* create_mjpeg_decoder(int codec_type, display_stream *stream);
+#endif
#ifdef HAVE_GSTVIDEO
VideoDecoder* create_gstreamer_decoder(int codec_type, display_stream *stream);
gboolean gstvideo_init(void);
diff --git a/src/channel-display.c b/src/channel-display.c
index 631fac5..e99b130 100644
--- a/src/channel-display.c
+++ b/src/channel-display.c
@@ -717,8 +717,11 @@ static void spice_display_channel_reset_capabilities(SpiceChannel *channel)
spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_GL_SCANOUT);
#endif
spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_MULTI_CODEC);
+#ifdef HAVE_BUILTIN_MJPEG
spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_CODEC_MJPEG);
+#endif
if (gstvideo_init()) {
+ spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_CODEC_MJPEG);
spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_CODEC_VP8);
spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_CODEC_H264);
}
@@ -1098,9 +1101,11 @@ static void display_handle_stream_create(SpiceChannel *channel, SpiceMsgIn *in)
display_update_stream_region(st);
switch (op->codec_type) {
+#ifdef HAVE_BUILTIN_MJPEG
case SPICE_VIDEO_CODEC_TYPE_MJPEG:
st->video_decoder = create_mjpeg_decoder(op->codec_type, st);
break;
+#endif
default:
#ifdef HAVE_GSTVIDEO
st->video_decoder = create_gstreamer_decoder(op->codec_type, st);
commit 8891be2c2d0347fbfc196973919c0a7c60d7a0ce
Author: Francois Gouget <fgouget at codeweavers.com>
Date: Wed Jul 29 12:49:13 2015 +0200
streaming: Add a GStreamer video decoder for MJPEG, VP8 and h264
Signed-off-by: Francois Gouget <fgouget at codeweavers.com>
Acked-by: Pavel Grunt <pgrunt at redhat.com>
diff --git a/configure.ac b/configure.ac
index ce80d88..f13301d 100644
--- a/configure.ac
+++ b/configure.ac
@@ -266,6 +266,29 @@ AS_IF([test "x$enable_pulse$have_gstaudio" = "xnono"],
[SPICE_WARNING([No PulseAudio or GStreamer 1.0 audio decoder, audio will not be streamed])
])
+AC_ARG_ENABLE([gstvideo],
+ AS_HELP_STRING([--enable-gstvideo=@<:@auto/yes/no@:>@],
+ [Enable GStreamer video support @<:@default=auto@:>@]),
+ [],
+ [enable_gstvideo="auto"])
+AS_IF([test "x$enable_gstvideo" != "xno"],
+ [SPICE_CHECK_GSTREAMER(GSTVIDEO, 1.0,
+ [gstreamer-1.0 gstreamer-base-1.0 gstreamer-app-1.0 gstreamer-video-1.0],
+ [missing_gstreamer_elements=""
+ SPICE_CHECK_GSTREAMER_ELEMENTS($GST_INSPECT_1_0, [gst-plugins-base 1.0], [appsrc videoconvert appsink])
+ SPICE_CHECK_GSTREAMER_ELEMENTS($GST_INSPECT_1_0, [gst-plugins-good 1.0], [jpegdec vp8dec])
+ SPICE_CHECK_GSTREAMER_ELEMENTS($GST_INSPECT_1_0, [gst-plugins-bad 1.0], [h264parse])
+ SPICE_CHECK_GSTREAMER_ELEMENTS($GST_INSPECT_1_0, [gstreamer-libav 1.0], [avdec_h264])
+ AS_IF([test x"$missing_gstreamer_elements" = "xyes"],
+ SPICE_WARNING([The GStreamer video decoder can be built but may not work.]))
+ ],
+ [AS_IF([test "x$enable_gstvideo" = "xyes"],
+ AC_MSG_ERROR([GStreamer 1.0 video requested but not found]))
+ ])
+ ], [have_gstvideo="no"]
+)
+AM_CONDITIONAL([HAVE_GSTVIDEO], [test "x$have_gstvideo" = "xyes"])
+
AC_CHECK_LIB(jpeg, jpeg_destroy_decompress,
AC_MSG_CHECKING([for jpeglib.h])
AC_TRY_CPP(
@@ -552,7 +575,7 @@ SPICE_CFLAGS="$SPICE_CFLAGS $WARN_CFLAGS"
AC_SUBST(SPICE_CFLAGS)
-SPICE_GLIB_CFLAGS="$PIXMAN_CFLAGS $PULSE_CFLAGS $GSTAUDIO_CFLAGS $GLIB2_CFLAGS $GIO_CFLAGS $GOBJECT2_CFLAGS $SSL_CFLAGS $SASL_CFLAGS"
+SPICE_GLIB_CFLAGS="$PIXMAN_CFLAGS $PULSE_CFLAGS $GSTAUDIO_CFLAGS $GSTVIDEO_CFLAGS $GLIB2_CFLAGS $GIO_CFLAGS $GOBJECT2_CFLAGS $SSL_CFLAGS $SASL_CFLAGS"
SPICE_GTK_CFLAGS="$SPICE_GLIB_CFLAGS $GTK_CFLAGS "
AC_SUBST(SPICE_GLIB_CFLAGS)
@@ -596,6 +619,7 @@ AC_MSG_NOTICE([
Coroutine: ${with_coroutine}
PulseAudio: ${enable_pulse}
GStreamer Audio: ${have_gstaudio}
+ GStreamer Video: ${have_gstvideo}
SASL support: ${have_sasl}
Smartcard support: ${have_smartcard}
USB redirection support: ${have_usbredir} ${with_usbredir_hotplug}
diff --git a/src/Makefile.am b/src/Makefile.am
index 0ef3bea..317e993 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
@@ -94,6 +94,7 @@ SPICE_COMMON_CPPFLAGS = \
$(SSL_CFLAGS) \
$(SASL_CFLAGS) \
$(GSTAUDIO_CFLAGS) \
+ $(GSTVIDEO_CFLAGS) \
$(SMARTCARD_CFLAGS) \
$(USBREDIR_CFLAGS) \
$(GUDEV_CFLAGS) \
@@ -197,6 +198,7 @@ libspice_client_glib_2_0_la_LIBADD = \
$(SSL_LIBS) \
$(PULSE_LIBS) \
$(GSTAUDIO_LIBS) \
+ $(GSTVIDEO_LIBS) \
$(SASL_LIBS) \
$(SMARTCARD_LIBS) \
$(USBREDIR_LIBS) \
@@ -328,6 +330,12 @@ libspice_client_glib_2_0_la_SOURCES += \
$(NULL)
endif
+if HAVE_GSTVIDEO
+libspice_client_glib_2_0_la_SOURCES += \
+ channel-display-gst.c \
+ $(NULL)
+endif
+
if WITH_PHODAV
libspice_client_glib_2_0_la_SOURCES += \
giopipe.c \
diff --git a/src/channel-display-gst.c b/src/channel-display-gst.c
new file mode 100644
index 0000000..c139c5e
--- /dev/null
+++ b/src/channel-display-gst.c
@@ -0,0 +1,433 @@
+/* -*- Mode: C; c-basic-offset: 4; indent-tabs-mode: nil -*- */
+/*
+ Copyright (C) 2015-2016 CodeWeavers, Inc
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, see <http://www.gnu.org/licenses/>.
+*/
+#include "config.h"
+
+#include "spice-client.h"
+#include "spice-common.h"
+#include "spice-channel-priv.h"
+
+#include "channel-display-priv.h"
+
+#include <gst/gst.h>
+#include <gst/app/gstappsrc.h>
+#include <gst/app/gstappsink.h>
+
+
+/* GStreamer decoder implementation */
+
+typedef struct SpiceGstDecoder {
+ VideoDecoder base;
+
+ /* ---------- GStreamer pipeline ---------- */
+
+ GstAppSrc *appsrc;
+ GstAppSink *appsink;
+ GstElement *pipeline;
+ GstClock *clock;
+
+ /* ---------- Decoding and display queues ---------- */
+
+ uint32_t last_mm_time;
+
+ GMutex queues_mutex;
+ GQueue *decoding_queue;
+ GQueue *display_queue;
+ guint timer_id;
+} SpiceGstDecoder;
+
+
+/* ---------- SpiceFrame ---------- */
+
+typedef struct _SpiceFrame {
+ GstClockTime timestamp;
+ SpiceMsgIn *msg;
+ GstSample *sample;
+} SpiceFrame;
+
+static SpiceFrame *create_frame(GstBuffer *buffer, SpiceMsgIn *msg)
+{
+ SpiceFrame *frame = spice_new(SpiceFrame, 1);
+ frame->timestamp = GST_BUFFER_PTS(buffer);
+ frame->msg = msg;
+ spice_msg_in_ref(msg);
+ frame->sample = NULL;
+ return frame;
+}
+
+static void free_frame(SpiceFrame *frame)
+{
+ spice_msg_in_unref(frame->msg);
+ if (frame->sample) {
+ gst_sample_unref(frame->sample);
+ }
+ free(frame);
+}
+
+
+/* ---------- GStreamer pipeline ---------- */
+
+static void schedule_frame(SpiceGstDecoder *decoder);
+
+/* main context */
+static gboolean display_frame(gpointer video_decoder)
+{
+ SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder;
+
+ decoder->timer_id = 0;
+
+ g_mutex_lock(&decoder->queues_mutex);
+ SpiceFrame *frame = g_queue_pop_head(decoder->display_queue);
+ g_mutex_unlock(&decoder->queues_mutex);
+ g_return_val_if_fail(frame, G_SOURCE_REMOVE);
+
+ GstBuffer *buffer = frame->sample ? gst_sample_get_buffer(frame->sample) : NULL;
+ GstMapInfo mapinfo;
+ if (!frame->sample) {
+ spice_warning("got a frame without a sample!");
+ } else if (gst_buffer_map(buffer, &mapinfo, GST_MAP_READ)) {
+ stream_display_frame(decoder->base.stream, frame->msg, mapinfo.data);
+ gst_buffer_unmap(buffer, &mapinfo);
+ } else {
+ spice_warning("GStreamer error: could not map the buffer");
+ }
+ free_frame(frame);
+
+ schedule_frame(decoder);
+ return G_SOURCE_REMOVE;
+}
+
+/* main loop or GStreamer streaming thread */
+static void schedule_frame(SpiceGstDecoder *decoder)
+{
+ guint32 now = stream_get_time(decoder->base.stream);
+ g_mutex_lock(&decoder->queues_mutex);
+
+ while (!decoder->timer_id) {
+ SpiceFrame *frame = g_queue_peek_head(decoder->display_queue);
+ if (!frame) {
+ break;
+ }
+
+ SpiceStreamDataHeader *op = spice_msg_in_parsed(frame->msg);
+ if (now < op->multi_media_time) {
+ decoder->timer_id = g_timeout_add(op->multi_media_time - now,
+ display_frame, decoder);
+ } else if (g_queue_get_length(decoder->display_queue) == 1) {
+ /* Still attempt to display the least out of date frame so the
+ * video is not completely frozen for an extended period of time.
+ */
+ decoder->timer_id = g_timeout_add(0, display_frame, decoder);
+ } else {
+ SPICE_DEBUG("%s: rendering too late by %u ms (ts: %u, mmtime: %u), dropping",
+ __FUNCTION__, now - op->multi_media_time,
+ op->multi_media_time, now);
+ stream_dropped_frame_on_playback(decoder->base.stream);
+ g_queue_pop_head(decoder->display_queue);
+ free_frame(frame);
+ }
+ }
+
+ g_mutex_unlock(&decoder->queues_mutex);
+}
+
+/* GStreamer thread
+ *
+ * We cannot use GStreamer's signals because they are not always run in
+ * the main context. So use a callback (lower overhead) and have it pull
+ * the sample to avoid a race with free_pipeline(). This means queuing the
+ * decoded frames outside GStreamer. So while we're at it, also schedule
+ * the frame display ourselves in schedule_frame().
+ */
+static GstFlowReturn new_sample(GstAppSink *gstappsink, gpointer video_decoder)
+{
+ SpiceGstDecoder *decoder = video_decoder;
+
+ GstSample *sample = gst_app_sink_pull_sample(decoder->appsink);
+ GstBuffer *buffer = sample ? gst_sample_get_buffer(sample) : NULL;
+ if (sample) {
+ g_mutex_lock(&decoder->queues_mutex);
+
+ /* gst_app_sink_pull_sample() sometimes returns the same buffer twice
+ * or buffers that have a modified, and thus unrecognizable, PTS.
+ * Blindly removing frames from the decoding_queue until we find a
+ * match would only empty the queue, resulting in later buffers not
+ * finding a match either, etc. So check the buffer has a matching
+ * frame first.
+ */
+ SpiceFrame *frame;
+ GList *l = g_queue_peek_head_link(decoder->decoding_queue);
+ while (l) {
+ frame = l->data;
+ if (frame->timestamp == GST_BUFFER_PTS(buffer)) {
+ /* Now that we know there is a match, remove the older
+ * frames from the decoding queue.
+ */
+ while ((frame = g_queue_pop_head(decoder->decoding_queue))) {
+ if (frame->timestamp == GST_BUFFER_PTS(buffer)) {
+ break;
+ }
+ /* The GStreamer pipeline dropped the corresponding
+ * buffer.
+ */
+ SPICE_DEBUG("the GStreamer pipeline dropped a frame");
+ free_frame(frame);
+ }
+
+ /* The frame is now ready for display */
+ frame->sample = sample;
+ g_queue_push_tail(decoder->display_queue, frame);
+ break;
+ }
+ l = l->next;
+ }
+ if (!l) {
+ spice_warning("got an unexpected decoded buffer!");
+ gst_sample_unref(sample);
+ }
+
+ g_mutex_unlock(&decoder->queues_mutex);
+ schedule_frame(decoder);
+ } else {
+ spice_warning("GStreamer error: could not pull sample");
+ }
+ return GST_FLOW_OK;
+}
+
+static void free_pipeline(SpiceGstDecoder *decoder)
+{
+ if (!decoder->pipeline) {
+ return;
+ }
+
+ gst_element_set_state(decoder->pipeline, GST_STATE_NULL);
+ gst_object_unref(decoder->appsrc);
+ gst_object_unref(decoder->appsink);
+ gst_object_unref(decoder->pipeline);
+ gst_object_unref(decoder->clock);
+ decoder->pipeline = NULL;
+}
+
+static gboolean create_pipeline(SpiceGstDecoder *decoder)
+{
+ const gchar *src_caps, *gstdec_name;
+ switch (decoder->base.codec_type) {
+ case SPICE_VIDEO_CODEC_TYPE_MJPEG:
+ src_caps = "caps=image/jpeg";
+ gstdec_name = "jpegdec";
+ break;
+ case SPICE_VIDEO_CODEC_TYPE_VP8:
+ /* typefind is unable to identify VP8 streams by design.
+ * See: https://bugzilla.gnome.org/show_bug.cgi?id=756457
+ */
+ src_caps = "caps=video/x-vp8";
+ gstdec_name = "vp8dec";
+ break;
+ case SPICE_VIDEO_CODEC_TYPE_H264:
+ /* h264 streams detection works fine and setting an incomplete cap
+ * causes errors. So let typefind do all the work.
+ */
+ src_caps = "";
+ gstdec_name = "h264parse ! avdec_h264";
+ break;
+ default:
+ spice_warning("Unknown codec type %d", decoder->base.codec_type);
+ return -1;
+ }
+
+ /* - We schedule the frame display ourselves so set sync=false on appsink
+ * so the pipeline decodes them as fast as possible. This will also
+ * minimize the risk of frames getting lost when we rebuild the
+ * pipeline.
+ * - Set max-bytes=0 on appsrc so it does not drop frames that may be
+ * needed by those that follow.
+ */
+ gchar *desc = g_strdup_printf("appsrc name=src is-live=true format=time max-bytes=0 block=true %s ! %s ! videoconvert ! appsink name=sink caps=video/x-raw,format=BGRx sync=false drop=false", src_caps, gstdec_name);
+ SPICE_DEBUG("GStreamer pipeline: %s", desc);
+
+ GError *err = NULL;
+ decoder->pipeline = gst_parse_launch_full(desc, NULL, GST_PARSE_FLAG_FATAL_ERRORS, &err);
+ g_free(desc);
+ if (!decoder->pipeline) {
+ spice_warning("GStreamer error: %s", err->message);
+ g_clear_error(&err);
+ return FALSE;
+ }
+
+ decoder->appsrc = GST_APP_SRC(gst_bin_get_by_name(GST_BIN(decoder->pipeline), "src"));
+ decoder->appsink = GST_APP_SINK(gst_bin_get_by_name(GST_BIN(decoder->pipeline), "sink"));
+ GstAppSinkCallbacks appsink_cbs = {NULL, NULL, &new_sample, {NULL}};
+ gst_app_sink_set_callbacks(decoder->appsink, &appsink_cbs, decoder, NULL);
+
+ decoder->clock = gst_pipeline_get_clock(GST_PIPELINE(decoder->pipeline));
+
+ if (gst_element_set_state(decoder->pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
+ SPICE_DEBUG("GStreamer error: Unable to set the pipeline to the playing state.");
+ free_pipeline(decoder);
+ return FALSE;
+ }
+
+ return TRUE;
+}
+
+
+/* ---------- VideoDecoder's public API ---------- */
+
+static void spice_gst_decoder_reschedule(VideoDecoder *video_decoder)
+{
+ SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder;
+ if (decoder->timer_id != 0) {
+ g_source_remove(decoder->timer_id);
+ decoder->timer_id = 0;
+ }
+ schedule_frame(decoder);
+}
+
+/* main context */
+static void spice_gst_decoder_destroy(VideoDecoder *video_decoder)
+{
+ SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder;
+
+ /* Stop and free the pipeline to ensure there will not be any further
+ * new_sample() call (clearing thread-safety concerns).
+ */
+ free_pipeline(decoder);
+
+ /* Even if we kept the decoder around, once we return the stream will be
+ * destroyed making it impossible to display frames. So cancel any
+ * scheduled display_frame() call and drop the queued frames.
+ */
+ if (decoder->timer_id) {
+ g_source_remove(decoder->timer_id);
+ }
+ g_mutex_clear(&decoder->queues_mutex);
+ SpiceFrame *frame;
+ while ((frame = g_queue_pop_head(decoder->decoding_queue))) {
+ free_frame(frame);
+ }
+ g_queue_free(decoder->decoding_queue);
+ while ((frame = g_queue_pop_head(decoder->display_queue))) {
+ free_frame(frame);
+ }
+ g_queue_free(decoder->display_queue);
+
+ free(decoder);
+
+ /* Don't call gst_deinit() as other parts of the client
+ * may still be using GStreamer.
+ */
+}
+
+static void release_buffer_data(gpointer data)
+{
+ SpiceMsgIn* frame_msg = (SpiceMsgIn*)data;
+ spice_msg_in_unref(frame_msg);
+}
+
+static void spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
+ SpiceMsgIn *frame_msg,
+ int32_t latency)
+{
+ SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder;
+
+ uint8_t *data;
+ uint32_t size = spice_msg_in_frame_data(frame_msg, &data);
+ if (size == 0) {
+ SPICE_DEBUG("got an empty frame buffer!");
+ return;
+ }
+
+ SpiceStreamDataHeader *frame_op = spice_msg_in_parsed(frame_msg);
+ if (frame_op->multi_media_time < decoder->last_mm_time) {
+ SPICE_DEBUG("new-frame-time < last-frame-time (%u < %u):"
+ " resetting stream, id %d",
+ frame_op->multi_media_time,
+ decoder->last_mm_time, frame_op->id);
+ /* Let GStreamer deal with the frame anyway */
+ }
+ decoder->last_mm_time = frame_op->multi_media_time;
+
+ if (latency < 0 &&
+ decoder->base.codec_type == SPICE_VIDEO_CODEC_TYPE_MJPEG) {
+ /* Dropping MJPEG frames has no impact on those that follow and
+ * saves CPU so do it.
+ */
+ SPICE_DEBUG("dropping a late MJPEG frame");
+ return;
+ }
+
+ if (!decoder->pipeline && !create_pipeline(decoder)) {
+ stream_dropped_frame_on_playback(decoder->base.stream);
+ return;
+ }
+
+ /* ref() the frame_msg for the buffer */
+ spice_msg_in_ref(frame_msg);
+ GstBuffer *buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS,
+ data, size, 0, size,
+ frame_msg, &release_buffer_data);
+
+ GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE;
+ GST_BUFFER_PTS(buffer) = gst_clock_get_time(decoder->clock) - gst_element_get_base_time(decoder->pipeline) + ((uint64_t)MAX(0, latency)) * 1000 * 1000;
+
+ g_mutex_lock(&decoder->queues_mutex);
+ g_queue_push_tail(decoder->decoding_queue, create_frame(buffer, frame_msg));
+ g_mutex_unlock(&decoder->queues_mutex);
+
+ if (gst_app_src_push_buffer(decoder->appsrc, buffer) != GST_FLOW_OK) {
+ SPICE_DEBUG("GStreamer error: unable to push frame of size %d", size);
+ stream_dropped_frame_on_playback(decoder->base.stream);
+ }
+}
+
+G_GNUC_INTERNAL
+gboolean gstvideo_init(void)
+{
+ static int success = 0;
+ if (!success) {
+ GError *err = NULL;
+ if (gst_init_check(NULL, NULL, &err)) {
+ success = 1;
+ } else {
+ spice_warning("Disabling GStreamer video support: %s", err->message);
+ g_clear_error(&err);
+ success = -1;
+ }
+ }
+ return success > 0;
+}
+
+G_GNUC_INTERNAL
+VideoDecoder* create_gstreamer_decoder(int codec_type, display_stream *stream)
+{
+ SpiceGstDecoder *decoder = NULL;
+
+ if (gstvideo_init()) {
+ decoder = spice_new0(SpiceGstDecoder, 1);
+ decoder->base.destroy = spice_gst_decoder_destroy;
+ decoder->base.reschedule = spice_gst_decoder_reschedule;
+ decoder->base.queue_frame = spice_gst_decoder_queue_frame;
+ decoder->base.codec_type = codec_type;
+ decoder->base.stream = stream;
+ g_mutex_init(&decoder->queues_mutex);
+ decoder->decoding_queue = g_queue_new();
+ decoder->display_queue = g_queue_new();
+ }
+
+ return (VideoDecoder*)decoder;
+}
diff --git a/src/channel-display-priv.h b/src/channel-display-priv.h
index ec48f51..d1a30a6 100644
--- a/src/channel-display-priv.h
+++ b/src/channel-display-priv.h
@@ -69,6 +69,12 @@ struct VideoDecoder {
* @return: A pointer to a structure implementing the VideoDecoder methods.
*/
VideoDecoder* create_mjpeg_decoder(int codec_type, display_stream *stream);
+#ifdef HAVE_GSTVIDEO
+VideoDecoder* create_gstreamer_decoder(int codec_type, display_stream *stream);
+gboolean gstvideo_init(void);
+#else
+# define gstvideo_init() FALSE
+#endif
typedef struct display_surface {
diff --git a/src/channel-display.c b/src/channel-display.c
index cd6ddf4..631fac5 100644
--- a/src/channel-display.c
+++ b/src/channel-display.c
@@ -716,6 +716,12 @@ static void spice_display_channel_reset_capabilities(SpiceChannel *channel)
#ifdef G_OS_UNIX
spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_GL_SCANOUT);
#endif
+ spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_MULTI_CODEC);
+ spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_CODEC_MJPEG);
+ if (gstvideo_init()) {
+ spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_CODEC_VP8);
+ spice_channel_set_capability(SPICE_CHANNEL(channel), SPICE_DISPLAY_CAP_CODEC_H264);
+ }
}
static void destroy_surface(gpointer data)
@@ -1096,7 +1102,11 @@ static void display_handle_stream_create(SpiceChannel *channel, SpiceMsgIn *in)
st->video_decoder = create_mjpeg_decoder(op->codec_type, st);
break;
default:
+#ifdef HAVE_GSTVIDEO
+ st->video_decoder = create_gstreamer_decoder(op->codec_type, st);
+#else
st->video_decoder = NULL;
+#endif
}
if (st->video_decoder == NULL) {
spice_printerr("could not create a video decoder for codec %d", op->codec_type);
commit 326f95980d49886e4d42392dc75c3e7134387975
Author: Francois Gouget <fgouget at codeweavers.com>
Date: Thu Feb 18 01:01:30 2016 +0100
streaming: Let the video decoder queue, schedule and drop the frames
Signed-off-by: Francois Gouget <fgouget at codeweavers.com>
Acked-by: Pavel Grunt <pgrunt at redhat.com>
diff --git a/src/channel-display-mjpeg.c b/src/channel-display-mjpeg.c
index 927827b..1238b41 100644
--- a/src/channel-display-mjpeg.c
+++ b/src/channel-display-mjpeg.c
@@ -31,11 +31,16 @@ typedef struct MJpegDecoder {
/* ---------- The builtin mjpeg decoder ---------- */
- SpiceMsgIn *frame_msg;
struct jpeg_source_mgr mjpeg_src;
struct jpeg_decompress_struct mjpeg_cinfo;
struct jpeg_error_mgr mjpeg_jerr;
+ /* ---------- Frame queue ---------- */
+
+ GQueue *msgq;
+ SpiceMsgIn *cur_frame_msg;
+ guint timer_id;
+
/* ---------- Output frame data ---------- */
uint8_t *out_frame;
@@ -50,7 +55,7 @@ static void mjpeg_src_init(struct jpeg_decompress_struct *cinfo)
MJpegDecoder *decoder = SPICE_CONTAINEROF(cinfo->src, MJpegDecoder, mjpeg_src);
uint8_t *data;
- cinfo->src->bytes_in_buffer = spice_msg_in_frame_data(decoder->frame_msg, &data);
+ cinfo->src->bytes_in_buffer = spice_msg_in_frame_data(decoder->cur_frame_msg, &data);
cinfo->src->next_input_byte = data;
}
@@ -72,10 +77,12 @@ static void mjpeg_src_term(struct jpeg_decompress_struct *cinfo)
}
-/* ---------- VideoDecoder's public API ---------- */
+/* ---------- Decoder proper ---------- */
-static uint8_t* mjpeg_decoder_decode_frame(VideoDecoder *video_decoder,
- SpiceMsgIn *frame_msg)
+static void mjpeg_decoder_schedule(MJpegDecoder *decoder);
+
+/* main context */
+static gboolean mjpeg_decoder_decode_frame(gpointer video_decoder)
{
MJpegDecoder *decoder = (MJpegDecoder*)video_decoder;
gboolean back_compat = decoder->base.stream->channel->priv->peer_hdr.major_version == 1;
@@ -84,8 +91,7 @@ static uint8_t* mjpeg_decoder_decode_frame(VideoDecoder *video_decoder,
uint8_t *dest;
uint8_t *lines[4];
- decoder->frame_msg = frame_msg;
- stream_get_dimensions(decoder->base.stream, frame_msg, &width, &height);
+ stream_get_dimensions(decoder->base.stream, decoder->cur_frame_msg, &width, &height);
if (decoder->out_size < width * height * 4) {
g_free(decoder->out_frame);
decoder->out_size = width * height * 4;
@@ -118,7 +124,7 @@ static uint8_t* mjpeg_decoder_decode_frame(VideoDecoder *video_decoder,
*/
if (decoder->mjpeg_cinfo.rec_outbuf_height > G_N_ELEMENTS(lines)) {
jpeg_abort_decompress(&decoder->mjpeg_cinfo);
- g_return_val_if_reached(NULL);
+ g_return_val_if_reached(G_SOURCE_REMOVE);
}
while (decoder->mjpeg_cinfo.output_scanline < decoder->mjpeg_cinfo.output_height) {
@@ -161,12 +167,125 @@ static uint8_t* mjpeg_decoder_decode_frame(VideoDecoder *video_decoder,
}
jpeg_finish_decompress(&decoder->mjpeg_cinfo);
- return decoder->out_frame;
+ /* Display the frame and dispose of it */
+ stream_display_frame(decoder->base.stream, decoder->cur_frame_msg, decoder->out_frame);
+ spice_msg_in_unref(decoder->cur_frame_msg);
+ decoder->cur_frame_msg = NULL;
+ decoder->timer_id = 0;
+
+ /* Schedule the next frame */
+ mjpeg_decoder_schedule(decoder);
+
+ return G_SOURCE_REMOVE;
+}
+
+/* ---------- VideoDecoder's queue scheduling ---------- */
+
+static void mjpeg_decoder_schedule(MJpegDecoder *decoder)
+{
+ SPICE_DEBUG("%s", __FUNCTION__);
+ if (decoder->timer_id) {
+ return;
+ }
+
+ guint32 time = stream_get_time(decoder->base.stream);
+ SpiceMsgIn *frame_msg = decoder->cur_frame_msg;
+ decoder->cur_frame_msg = NULL;
+ do {
+ if (frame_msg) {
+ SpiceStreamDataHeader *op = spice_msg_in_parsed(frame_msg);
+ if (time <= op->multi_media_time) {
+ guint32 d = op->multi_media_time - time;
+ decoder->cur_frame_msg = frame_msg;
+ decoder->timer_id = g_timeout_add(d, mjpeg_decoder_decode_frame, decoder);
+ break;
+ }
+
+ SPICE_DEBUG("%s: rendering too late by %u ms (ts: %u, mmtime: %u), dropping ",
+ __FUNCTION__, time - op->multi_media_time,
+ op->multi_media_time, time);
+ stream_dropped_frame_on_playback(decoder->base.stream);
+ spice_msg_in_unref(frame_msg);
+ }
+ frame_msg = g_queue_pop_head(decoder->msgq);
+ } while (frame_msg);
+}
+
+
+/* mjpeg_decoder_drop_queue() helper */
+static void _msg_in_unref_func(gpointer data, gpointer user_data)
+{
+ spice_msg_in_unref(data);
+}
+
+static void mjpeg_decoder_drop_queue(MJpegDecoder *decoder)
+{
+ if (decoder->timer_id != 0) {
+ g_source_remove(decoder->timer_id);
+ decoder->timer_id = 0;
+ }
+ if (decoder->cur_frame_msg) {
+ spice_msg_in_unref(decoder->cur_frame_msg);
+ decoder->cur_frame_msg = NULL;
+ }
+ g_queue_foreach(decoder->msgq, _msg_in_unref_func, NULL);
+ g_queue_clear(decoder->msgq);
+}
+
+/* ---------- VideoDecoder's public API ---------- */
+
+static void mjpeg_decoder_queue_frame(VideoDecoder *video_decoder,
+ SpiceMsgIn *frame_msg, int32_t latency)
+{
+ MJpegDecoder *decoder = (MJpegDecoder*)video_decoder;
+ SpiceMsgIn *last_msg;
+
+ SPICE_DEBUG("%s", __FUNCTION__);
+
+ last_msg = g_queue_peek_tail(decoder->msgq);
+ if (last_msg) {
+ SpiceStreamDataHeader *last_op, *frame_op;
+ last_op = spice_msg_in_parsed(last_msg);
+ frame_op = spice_msg_in_parsed(frame_msg);
+ if (frame_op->multi_media_time < last_op->multi_media_time) {
+ /* This should really not happen */
+ SPICE_DEBUG("new-frame-time < last-frame-time (%u < %u):"
+ " resetting stream, id %d",
+ frame_op->multi_media_time,
+ last_op->multi_media_time, frame_op->id);
+ mjpeg_decoder_drop_queue(decoder);
+ }
+ }
+
+ /* Dropped MJPEG frames don't impact the ones that come after.
+ * So drop late frames as early as possible to save on processing time.
+ */
+ if (latency < 0) {
+ return;
+ }
+
+ spice_msg_in_ref(frame_msg);
+ g_queue_push_tail(decoder->msgq, frame_msg);
+ mjpeg_decoder_schedule(decoder);
+}
+
+static void mjpeg_decoder_reschedule(VideoDecoder *video_decoder)
+{
+ MJpegDecoder *decoder = (MJpegDecoder*)video_decoder;
+
+ SPICE_DEBUG("%s", __FUNCTION__);
+ if (decoder->timer_id != 0) {
+ g_source_remove(decoder->timer_id);
+ decoder->timer_id = 0;
+ }
+ mjpeg_decoder_schedule(decoder);
}
static void mjpeg_decoder_destroy(VideoDecoder* video_decoder)
{
MJpegDecoder *decoder = (MJpegDecoder*)video_decoder;
+
+ mjpeg_decoder_drop_queue(decoder);
jpeg_destroy_decompress(&decoder->mjpeg_cinfo);
g_free(decoder->out_frame);
free(decoder);
@@ -180,10 +299,13 @@ VideoDecoder* create_mjpeg_decoder(int codec_type, display_stream *stream)
MJpegDecoder *decoder = spice_new0(MJpegDecoder, 1);
decoder->base.destroy = mjpeg_decoder_destroy;
- decoder->base.decode_frame = mjpeg_decoder_decode_frame;
+ decoder->base.reschedule = mjpeg_decoder_reschedule;
+ decoder->base.queue_frame = mjpeg_decoder_queue_frame;
decoder->base.codec_type = codec_type;
decoder->base.stream = stream;
+ decoder->msgq = g_queue_new();
+
decoder->mjpeg_cinfo.err = jpeg_std_error(&decoder->mjpeg_jerr);
jpeg_create_decompress(&decoder->mjpeg_cinfo);
diff --git a/src/channel-display-priv.h b/src/channel-display-priv.h
index 5256ad9..ec48f51 100644
--- a/src/channel-display-priv.h
+++ b/src/channel-display-priv.h
@@ -41,6 +41,9 @@ struct VideoDecoder {
/* Releases the video decoder's resources */
void (*destroy)(VideoDecoder *decoder);
+ /* Notifies the decoder that the mm-time clock changed. */
+ void (*reschedule)(VideoDecoder *video_decoder);
+
/* Decompresses the specified frame.
*
* @decoder: The video decoder.
@@ -49,7 +52,7 @@ struct VideoDecoder {
* buffer will be invalidated by the next call to
* decode_frame().
*/
- uint8_t* (*decode_frame)(VideoDecoder *decoder, SpiceMsgIn *frame_msg);
+ void (*queue_frame)(VideoDecoder *decoder, SpiceMsgIn *frame_msg, int32_t latency);
/* The format of the encoded video. */
int codec_type;
@@ -98,8 +101,6 @@ struct display_stream {
VideoDecoder *video_decoder;
- GQueue *msgq;
- guint timeout;
SpiceChannel *channel;
/* stats */
@@ -127,6 +128,9 @@ struct display_stream {
};
void stream_get_dimensions(display_stream *st, SpiceMsgIn *frame_msg, int *width, int *height);
+guint32 stream_get_time(display_stream *st);
+void stream_dropped_frame_on_playback(display_stream *st);
+void stream_display_frame(display_stream *st, SpiceMsgIn *frame_msg, uint8_t* data);
uint32_t spice_msg_in_frame_data(SpiceMsgIn *frame_msg, uint8_t **data);
diff --git a/src/channel-display.c b/src/channel-display.c
index 4a06193..cd6ddf4 100644
--- a/src/channel-display.c
+++ b/src/channel-display.c
@@ -106,11 +106,9 @@ static void channel_set_handlers(SpiceChannelClass *klass);
static void clear_surfaces(SpiceChannel *channel, gboolean keep_primary);
static void clear_streams(SpiceChannel *channel);
static display_surface *find_surface(SpiceDisplayChannelPrivate *c, guint32 surface_id);
-static gboolean display_stream_render(display_stream *st);
static void spice_display_channel_reset(SpiceChannel *channel, gboolean migrating);
static void spice_display_channel_reset_capabilities(SpiceChannel *channel);
static void destroy_canvas(display_surface *surface);
-static void _msg_in_unref_func(gpointer data, gpointer user_data);
static void display_session_mm_time_reset_cb(SpiceSession *session, gpointer data);
static SpiceGlScanout* spice_gl_scanout_copy(const SpiceGlScanout *scanout);
@@ -1087,7 +1085,6 @@ static void display_handle_stream_create(SpiceChannel *channel, SpiceMsgIn *in)
spice_msg_in_ref(in);
st->clip = &op->clip;
st->surface = find_surface(c, op->surface_id);
- st->msgq = g_queue_new();
st->channel = channel;
st->drops_seqs_stats_arr = g_array_new(FALSE, FALSE, sizeof(drops_sequence_stats));
@@ -1106,45 +1103,6 @@ static void display_handle_stream_create(SpiceChannel *channel, SpiceMsgIn *in)
}
}
-/* coroutine or main context */
-static gboolean display_stream_schedule(display_stream *st)
-{
- SpiceSession *session = spice_channel_get_session(st->channel);
- guint32 time, d;
- SpiceStreamDataHeader *op;
- SpiceMsgIn *in;
-
- SPICE_DEBUG("%s", __FUNCTION__);
- if (st->timeout || !session)
- return TRUE;
-
- time = spice_session_get_mm_time(session);
- in = g_queue_peek_head(st->msgq);
-
- if (in == NULL) {
- return TRUE;
- }
-
- op = spice_msg_in_parsed(in);
- if (time < op->multi_media_time) {
- d = op->multi_media_time - time;
- SPICE_DEBUG("scheduling next stream render in %u ms", d);
- st->timeout = g_timeout_add(d, (GSourceFunc)display_stream_render, st);
- return TRUE;
- } else {
- SPICE_DEBUG("%s: rendering too late by %u ms (ts: %u, mmtime: %u), dropping ",
- __FUNCTION__, time - op->multi_media_time,
- op->multi_media_time, time);
- in = g_queue_pop_head(st->msgq);
- spice_msg_in_unref(in);
- st->num_drops_on_playback++;
- if (g_queue_get_length(st->msgq) == 0)
- return TRUE;
- }
-
- return FALSE;
-}
-
static SpiceRect *stream_get_dest(display_stream *st, SpiceMsgIn *frame_msg)
{
if (frame_msg == NULL ||
@@ -1207,66 +1165,53 @@ void stream_get_dimensions(display_stream *st, SpiceMsgIn *frame_msg, int *width
}
}
-/* main context */
-static gboolean display_stream_render(display_stream *st)
+G_GNUC_INTERNAL
+guint32 stream_get_time(display_stream *st)
{
- SpiceMsgIn *in;
+ SpiceSession *session = spice_channel_get_session(st->channel);
+ return session ? spice_session_get_mm_time(session) : 0;
+}
- st->timeout = 0;
- do {
- in = g_queue_pop_head(st->msgq);
+/* coroutine or main context */
+G_GNUC_INTERNAL
+void stream_dropped_frame_on_playback(display_stream *st)
+{
+ st->num_drops_on_playback++;
+}
- g_return_val_if_fail(in != NULL, FALSE);
+/* main context */
+G_GNUC_INTERNAL
+void stream_display_frame(display_stream *st, SpiceMsgIn *frame_msg,
+ uint8_t* data)
+{
+ int width, height;
+ SpiceRect *dest;
+ int stride;
- uint8_t *out_frame = NULL;
- if (st->video_decoder) {
- out_frame = st->video_decoder->decode_frame(st->video_decoder, in);
- }
- if (out_frame) {
- int width;
- int height;
- SpiceRect *dest;
- uint8_t *data;
- int stride;
-
- stream_get_dimensions(st, in, &width, &height);
- dest = stream_get_dest(st, in);
-
- data = out_frame;
- stride = width * sizeof(uint32_t);
- if (!(stream_get_flags(st) & SPICE_STREAM_FLAGS_TOP_DOWN)) {
- data += stride * (height - 1);
- stride = -stride;
- }
+ stream_get_dimensions(st, frame_msg, &width, &height);
+ dest = stream_get_dest(st, frame_msg);
- st->surface->canvas->ops->put_image(
- st->surface->canvas,
+ stride = width * sizeof(uint32_t);
+ if (!(stream_get_flags(st) & SPICE_STREAM_FLAGS_TOP_DOWN)) {
+ data += stride * (height - 1);
+ stride = -stride;
+ }
+
+ st->surface->canvas->ops->put_image(st->surface->canvas,
#ifdef G_OS_WIN32
- SPICE_DISPLAY_CHANNEL(st->channel)->priv->dc,
+ SPICE_DISPLAY_CHANNEL(st->channel)->priv->dc,
#endif
- dest, data,
- width, height, stride,
- st->have_region ? &st->region : NULL);
-
- if (st->surface->primary)
- g_signal_emit(st->channel, signals[SPICE_DISPLAY_INVALIDATE], 0,
- dest->left, dest->top,
- dest->right - dest->left,
- dest->bottom - dest->top);
- }
+ dest, data,
+ width, height, stride,
+ st->have_region ? &st->region : NULL);
- spice_msg_in_unref(in);
-
- in = g_queue_peek_head(st->msgq);
- if (in == NULL)
- break;
-
- if (display_stream_schedule(st))
- return FALSE;
- } while (1);
-
- return FALSE;
+ if (st->surface->primary)
+ g_signal_emit(st->channel, signals[SPICE_DISPLAY_INVALIDATE], 0,
+ dest->left, dest->top,
+ dest->right - dest->left,
+ dest->bottom - dest->top);
}
+
/* after a sequence of 3 drops, push a report to the server, even
* if the report window is bigger */
#define STREAM_REPORT_DROP_SEQ_LEN_LIMIT 3
@@ -1327,17 +1272,6 @@ static void display_update_stream_report(SpiceDisplayChannel *channel, uint32_t
}
}
-static void display_stream_reset_rendering_timer(display_stream *st)
-{
- SPICE_DEBUG("%s", __FUNCTION__);
- if (st->timeout != 0) {
- g_source_remove(st->timeout);
- st->timeout = 0;
- }
- while (!display_stream_schedule(st)) {
- }
-}
-
/*
* Migration can occur between 2 spice-servers with different mm-times.
* Then, the following cases can happen after migration completes:
@@ -1367,8 +1301,9 @@ static void display_stream_reset_rendering_timer(display_stream *st)
* case 2 is less likely, since at takes at least 20 frames till the dst-server re-identifies
* the video stream and starts sending stream data
*
- * display_session_mm_time_reset_cb handles case 1.a, and
- * display_stream_test_frames_mm_time_reset handles case 2.b
+ * display_session_mm_time_reset_cb handles case 1.a by notifying the
+ * video decoders through their reschedule() method, and case 2.b is handled
+ * directly by the video decoders in their queue_frame() method
*/
/* main context */
@@ -1388,36 +1323,7 @@ static void display_session_mm_time_reset_cb(SpiceSession *session, gpointer dat
}
SPICE_DEBUG("%s: stream-id %d", __FUNCTION__, i);
st = c->streams[i];
- display_stream_reset_rendering_timer(st);
- }
-}
-
-/* coroutine context */
-static void display_stream_test_frames_mm_time_reset(display_stream *st,
- SpiceMsgIn *new_frame_msg,
- guint32 mm_time)
-{
- SpiceStreamDataHeader *tail_op, *new_op;
- SpiceMsgIn *tail_msg;
-
- SPICE_DEBUG("%s", __FUNCTION__);
- g_return_if_fail(new_frame_msg != NULL);
- tail_msg = g_queue_peek_tail(st->msgq);
- if (!tail_msg) {
- return;
- }
- tail_op = spice_msg_in_parsed(tail_msg);
- new_op = spice_msg_in_parsed(new_frame_msg);
-
- if (new_op->multi_media_time < tail_op->multi_media_time) {
- SPICE_DEBUG("new-frame-time < tail-frame-time (%u < %u):"
- " reseting stream, id %d",
- new_op->multi_media_time,
- tail_op->multi_media_time,
- new_op->id);
- g_queue_foreach(st->msgq, _msg_in_unref_func, NULL);
- g_queue_clear(st->msgq);
- display_stream_reset_rendering_timer(st);
+ st->video_decoder->reschedule(st->video_decoder);
}
}
@@ -1437,7 +1343,7 @@ static void display_handle_stream_data(SpiceChannel *channel, SpiceMsgIn *in)
g_return_if_fail(c->nstreams > op->id);
st = c->streams[op->id];
- mmtime = spice_session_get_mm_time(spice_channel_get_session(channel));
+ mmtime = stream_get_time(st);
if (spice_msg_in_type(in) == SPICE_MSG_DISPLAY_STREAM_DATA_SIZED) {
CHANNEL_DEBUG(channel, "stream %d contains sized data", op->id);
@@ -1467,11 +1373,6 @@ static void display_handle_stream_data(SpiceChannel *channel, SpiceMsgIn *in)
st->playback_sync_drops_seq_len++;
} else {
CHANNEL_DEBUG(channel, "video latency: %d", latency);
- spice_msg_in_ref(in);
- display_stream_test_frames_mm_time_reset(st, in, mmtime);
- g_queue_push_tail(st->msgq, in);
- while (!display_stream_schedule(st)) {
- }
if (st->cur_drops_seq_stats.len) {
st->cur_drops_seq_stats.duration = op->multi_media_time -
st->cur_drops_seq_stats.start_mm_time;
@@ -1481,6 +1382,12 @@ static void display_handle_stream_data(SpiceChannel *channel, SpiceMsgIn *in)
}
st->playback_sync_drops_seq_len = 0;
}
+
+ /* Let the video decoder queue the frames so it can optimize their
+ * decoding and best decide if/when to drop them when they are late,
+ * taking into account the impact on later frames.
+ */
+ st->video_decoder->queue_frame(st->video_decoder, in, latency);
if (c->enable_adaptive_streaming) {
display_update_stream_report(SPICE_DISPLAY_CHANNEL(channel), op->id,
op->multi_media_time, latency);
@@ -1513,11 +1420,6 @@ static void display_handle_stream_clip(SpiceChannel *channel, SpiceMsgIn *in)
display_update_stream_region(st);
}
-static void _msg_in_unref_func(gpointer data, gpointer user_data)
-{
- spice_msg_in_unref(data);
-}
-
static void destroy_stream(SpiceChannel *channel, int id)
{
SpiceDisplayChannelPrivate *c = SPICE_DISPLAY_CHANNEL(channel)->priv;
@@ -1571,10 +1473,6 @@ static void destroy_stream(SpiceChannel *channel, int id)
spice_msg_in_unref(st->msg_clip);
spice_msg_in_unref(st->msg_create);
- g_queue_foreach(st->msgq, _msg_in_unref_func, NULL);
- g_queue_free(st->msgq);
- if (st->timeout != 0)
- g_source_remove(st->timeout);
g_free(st);
c->streams[id] = NULL;
}
More information about the Spice-commits
mailing list