[Spice-commits] 6 commits - configure.ac src/Makefile.am src/channel-display-gst.c src/channel-display-mjpeg.c src/channel-display-priv.h src/channel-display.c src/spice-widget-priv.h src/spice-widget.c

Christophe Fergau teuf at kemper.freedesktop.org
Tue May 9 14:16:48 UTC 2017


 configure.ac                |    8 +
 src/Makefile.am             |    2 
 src/channel-display-gst.c   |  149 ++++++++++++++++++++----------------
 src/channel-display-mjpeg.c |   78 +++++++++----------
 src/channel-display-priv.h  |   31 +++++--
 src/channel-display.c       |  179 ++++++++++++++++++++++----------------------
 src/spice-widget-priv.h     |    6 -
 src/spice-widget.c          |   26 +++---
 8 files changed, 261 insertions(+), 218 deletions(-)

New commits:
commit 16ea4b2f712b7bb30bd2bb0d0dd6e59ed08ff37c
Author: Francois Gouget <fgouget at codeweavers.com>
Date:   Thu Apr 6 16:03:01 2017 +0200

    streaming: Separate the network code from the display_stream management
    
    This makes it easier to reuse display_streams for other types of
    video streams should the need arise.
    
    Signed-off-by: Francois Gouget <fgouget at codeweavers.com>
    Acked-by: Christophe Fergeau <cfergeau at redhat.com>

diff --git a/src/channel-display.c b/src/channel-display.c
index 00b5440..ccaa747 100644
--- a/src/channel-display.c
+++ b/src/channel-display.c
@@ -109,7 +109,7 @@ static display_surface *find_surface(SpiceDisplayChannelPrivate *c, guint32 surf
 static void spice_display_channel_reset(SpiceChannel *channel, gboolean migrating);
 static void spice_display_channel_reset_capabilities(SpiceChannel *channel);
 static void destroy_canvas(display_surface *surface);
-static void destroy_stream(SpiceChannel *channel, int id);
+static void destroy_display_stream(display_stream *st, int id);
 static void display_session_mm_time_reset_cb(SpiceSession *session, gpointer data);
 static SpiceGlScanout* spice_gl_scanout_copy(const SpiceGlScanout *scanout);
 
@@ -1168,11 +1168,61 @@ static display_stream *get_stream_by_id(SpiceChannel *channel, uint32_t id)
 }
 
 /* coroutine context */
+static display_stream *display_stream_create(SpiceChannel *channel, uint32_t surface_id,
+                                             uint32_t flags, uint32_t codec_type,
+                                             const SpiceRect *dest, const SpiceClip *clip)
+{
+    SpiceDisplayChannelPrivate *c = SPICE_DISPLAY_CHANNEL(channel)->priv;
+    display_stream *st = g_new0(display_stream, 1);
+
+    st->flags = flags;
+    st->dest = *dest;
+    st->clip = *clip;
+    st->surface = find_surface(c, surface_id);
+    st->channel = channel;
+    st->drops_seqs_stats_arr = g_array_new(FALSE, FALSE, sizeof(drops_sequence_stats));
+
+    region_init(&st->region);
+    display_update_stream_region(st);
+
+    switch (codec_type) {
+#ifdef HAVE_BUILTIN_MJPEG
+    case SPICE_VIDEO_CODEC_TYPE_MJPEG:
+        st->video_decoder = create_mjpeg_decoder(codec_type, st);
+        break;
+#endif
+    default:
+#ifdef HAVE_GSTVIDEO
+        st->video_decoder = create_gstreamer_decoder(codec_type, st);
+#endif
+        break;
+    }
+    if (st->video_decoder == NULL) {
+        spice_printerr("could not create a video decoder for codec %u", codec_type);
+        destroy_display_stream(st, 0);
+        st = NULL;
+    }
+    return st;
+}
+
+static void destroy_stream(SpiceChannel *channel, int id)
+{
+    SpiceDisplayChannelPrivate *c = SPICE_DISPLAY_CHANNEL(channel)->priv;
+
+    g_return_if_fail(c != NULL);
+    g_return_if_fail(c->streams != NULL);
+    g_return_if_fail(c->nstreams > id);
+
+    if (c->streams[id]) {
+        destroy_display_stream(c->streams[id], id);
+        c->streams[id] = NULL;
+    }
+}
+
 static void display_handle_stream_create(SpiceChannel *channel, SpiceMsgIn *in)
 {
     SpiceDisplayChannelPrivate *c = SPICE_DISPLAY_CHANNEL(channel)->priv;
     SpiceMsgDisplayStreamCreate *op = spice_msg_in_parsed(in);
-    display_stream *st;
 
     CHANNEL_DEBUG(channel, "%s: id %u", __FUNCTION__, op->id);
 
@@ -1188,34 +1238,12 @@ static void display_handle_stream_create(SpiceChannel *channel, SpiceMsgIn *in)
         memset(c->streams + n, 0, (c->nstreams - n) * sizeof(c->streams[0]));
     }
     g_return_if_fail(c->streams[op->id] == NULL);
-    c->streams[op->id] = g_new0(display_stream, 1);
-    st = c->streams[op->id];
-
-    st->flags = op->flags;
-    st->dest = op->dest;
-    st->clip = op->clip;
-    st->surface = find_surface(c, op->surface_id);
-    st->channel = channel;
-    st->drops_seqs_stats_arr = g_array_new(FALSE, FALSE, sizeof(drops_sequence_stats));
-
-    region_init(&st->region);
-    display_update_stream_region(st);
 
-    switch (op->codec_type) {
-#ifdef HAVE_BUILTIN_MJPEG
-    case SPICE_VIDEO_CODEC_TYPE_MJPEG:
-        st->video_decoder = create_mjpeg_decoder(op->codec_type, st);
-        break;
-#endif
-    default:
-#ifdef HAVE_GSTVIDEO
-        st->video_decoder = create_gstreamer_decoder(op->codec_type, st);
-#else
-        st->video_decoder = NULL;
-#endif
-    }
-    if (st->video_decoder == NULL) {
-        spice_printerr("could not create a video decoder for codec %u", op->codec_type);
+    c->streams[op->id] = display_stream_create(channel, op->surface_id,
+                                               op->flags, op->codec_type,
+                                               &op->dest, &op->clip);
+    if (c->streams[op->id] == NULL) {
+        spice_printerr("could not create the %u video stream", op->id);
         destroy_stream(channel, op->id);
         report_invalid_stream(channel, op->id);
     }
@@ -1503,24 +1531,14 @@ static void display_handle_stream_clip(SpiceChannel *channel, SpiceMsgIn *in)
     display_update_stream_region(st);
 }
 
-static void destroy_stream(SpiceChannel *channel, int id)
+static void destroy_display_stream(display_stream *st, int id)
 {
-    SpiceDisplayChannelPrivate *c = SPICE_DISPLAY_CHANNEL(channel)->priv;
-    display_stream *st;
     int i;
 
-    g_return_if_fail(c != NULL);
-    g_return_if_fail(c->streams != NULL);
-    g_return_if_fail(c->nstreams > id);
-
-    st = c->streams[id];
-    if (!st)
-        return;
-
     if (st->num_input_frames > 0) {
         guint64 drops_duration_total = 0;
         guint32 num_out_frames = st->num_input_frames - st->arrive_late_count - st->num_drops_on_playback;
-        CHANNEL_DEBUG(channel, "%s: id=%d #in-frames=%u out/in=%.2f "
+        CHANNEL_DEBUG(st->channel, "%s: id=%d #in-frames=%u out/in=%.2f "
             "#drops-on-receive=%u avg-late-time(ms)=%.2f "
             "#drops-on-playback=%u", __FUNCTION__,
             id,
@@ -1530,20 +1548,20 @@ static void destroy_stream(SpiceChannel *channel, int id)
             st->arrive_late_count ? st->arrive_late_time / ((double)st->arrive_late_count): 0,
             st->num_drops_on_playback);
         if (st->num_drops_seqs) {
-            CHANNEL_DEBUG(channel, "%s: #drops-sequences=%u ==>", __FUNCTION__, st->num_drops_seqs);
+            CHANNEL_DEBUG(st->channel, "%s: #drops-sequences=%u ==>", __FUNCTION__, st->num_drops_seqs);
         }
         for (i = 0; i < st->num_drops_seqs; i++) {
             drops_sequence_stats *stats = &g_array_index(st->drops_seqs_stats_arr,
                                                          drops_sequence_stats,
                                                          i);
             drops_duration_total += stats->duration;
-            CHANNEL_DEBUG(channel, "%s: \t len=%u start-ms=%u duration-ms=%u", __FUNCTION__,
-                                   stats->len,
-                                   stats->start_mm_time - st->first_frame_mm_time,
-                                   stats->duration);
+            CHANNEL_DEBUG(st->channel, "%s: \t len=%u start-ms=%u duration-ms=%u", __FUNCTION__,
+                          stats->len,
+                          stats->start_mm_time - st->first_frame_mm_time,
+                          stats->duration);
         }
         if (st->num_drops_seqs) {
-            CHANNEL_DEBUG(channel, "%s: drops-total-duration=%"G_GUINT64_FORMAT" ==>", __FUNCTION__, drops_duration_total);
+            CHANNEL_DEBUG(st->channel, "%s: drops-total-duration=%"G_GUINT64_FORMAT" ==>", __FUNCTION__, drops_duration_total);
         }
     }
 
@@ -1554,7 +1572,6 @@ static void destroy_stream(SpiceChannel *channel, int id)
     }
 
     g_free(st);
-    c->streams[id] = NULL;
 }
 
 static void clear_streams(SpiceChannel *channel)
commit dd569c13858488b11b15d3d16e573d80dfbcbea4
Author: Francois Gouget <fgouget at codeweavers.com>
Date:   Thu Apr 6 16:02:53 2017 +0200

    streaming: Remove the video decoder's dependency on SpiceMsgIn messages
    
    This improves the separation between networking and the video decoding
    components.
    It also makes it easier to reuse the latter should the client one day
    receive video streams through other messages.
    
    Signed-off-by: Francois Gouget <fgouget at codeweavers.com>
    Acked-by: Christophe Fergeau <cfergeau at redhat.com>

diff --git a/src/channel-display-gst.c b/src/channel-display-gst.c
index 2c002eb..9b79403 100644
--- a/src/channel-display-gst.c
+++ b/src/channel-display-gst.c
@@ -90,23 +90,22 @@ G_STATIC_ASSERT(G_N_ELEMENTS(gst_opts) <= SPICE_VIDEO_CODEC_TYPE_ENUM_END);
 
 typedef struct SpiceGstFrame {
     GstClockTime timestamp;
-    SpiceMsgIn *msg;
+    SpiceFrame *frame;
     GstSample *sample;
 } SpiceGstFrame;
 
-static SpiceGstFrame *create_gst_frame(GstBuffer *buffer, SpiceMsgIn *msg)
+static SpiceGstFrame *create_gst_frame(GstBuffer *buffer, SpiceFrame *frame)
 {
     SpiceGstFrame *gstframe = spice_new(SpiceGstFrame, 1);
     gstframe->timestamp = GST_BUFFER_PTS(buffer);
-    gstframe->msg = msg;
-    spice_msg_in_ref(msg);
+    gstframe->frame = frame;
     gstframe->sample = NULL;
     return gstframe;
 }
 
 static void free_gst_frame(SpiceGstFrame *gstframe)
 {
-    spice_msg_in_unref(gstframe->msg);
+    gstframe->frame->free(gstframe->frame);
     if (gstframe->sample) {
         gst_sample_unref(gstframe->sample);
     }
@@ -160,7 +159,7 @@ static gboolean display_frame(gpointer video_decoder)
         goto error;
     }
 
-    stream_display_frame(decoder->base.stream, gstframe->msg,
+    stream_display_frame(decoder->base.stream, gstframe->frame,
                          width, height, mapinfo.data);
     gst_buffer_unmap(buffer, &mapinfo);
 
@@ -182,9 +181,8 @@ static void schedule_frame(SpiceGstDecoder *decoder)
             break;
         }
 
-        SpiceStreamDataHeader *op = spice_msg_in_parsed(gstframe->msg);
-        if (now < op->multi_media_time) {
-            decoder->timer_id = g_timeout_add(op->multi_media_time - now,
+        if (now < gstframe->frame->mm_time) {
+            decoder->timer_id = g_timeout_add(gstframe->frame->mm_time - now,
                                               display_frame, decoder);
         } else if (g_queue_get_length(decoder->display_queue) == 1) {
             /* Still attempt to display the least out of date frame so the
@@ -193,8 +191,8 @@ static void schedule_frame(SpiceGstDecoder *decoder)
             decoder->timer_id = g_timeout_add(0, display_frame, decoder);
         } else {
             SPICE_DEBUG("%s: rendering too late by %u ms (ts: %u, mmtime: %u), dropping",
-                        __FUNCTION__, now - op->multi_media_time,
-                        op->multi_media_time, now);
+                        __FUNCTION__, now - gstframe->frame->mm_time,
+                        gstframe->frame->mm_time, now);
             stream_dropped_frame_on_playback(decoder->base.stream);
             g_queue_pop_head(decoder->display_queue);
             free_gst_frame(gstframe);
@@ -411,23 +409,17 @@ static void spice_gst_decoder_destroy(VideoDecoder *video_decoder)
      */
 }
 
-static void release_buffer_data(gpointer data)
-{
-    SpiceMsgIn* frame_msg = (SpiceMsgIn*)data;
-    spice_msg_in_unref(frame_msg);
-}
 
-/* spice_gst_decoder_queue_frame() queues the SpiceMsgIn message for decoding
- * and displaying. The steps it goes through are as follows:
+/* spice_gst_decoder_queue_frame() queues the SpiceFrame for decoding and
+ * displaying. The steps it goes through are as follows:
  *
- * 1) A SpiceGstFrame is created to keep track of SpiceMsgIn and some additional
- *    metadata. SpiceMsgIn is reffed. The SpiceFrame is then pushed to the
- *    decoding_queue.
- * 2) The data part of SpiceMsgIn, which contains the compressed frame data,
- *    is wrapped in a GstBuffer and is pushed to the GStreamer pipeline for
- *    decoding. SpiceMsgIn is reffed.
+ * 1) A SpiceGstFrame is created to keep track of SpiceFrame and some additional
+ *    metadata. The SpiceGstFrame is then pushed to the decoding_queue.
+ * 2) frame->data, which contains the compressed frame data, is reffed and
+ *    wrapped in a GstBuffer which is pushed to the GStreamer pipeline for
+ *    decoding.
  * 3) As soon as the GStreamer pipeline no longer needs the compressed frame it
- *    calls release_buffer_data() to unref SpiceMsgIn.
+ *    will call frame->unref_data() to free it.
  * 4) Once the decompressed frame is available the GStreamer pipeline calls
  *    new_sample() in the GStreamer thread.
  * 5) new_sample() then matches the decompressed frame to a SpiceGstFrame from
@@ -435,36 +427,32 @@ static void release_buffer_data(gpointer data)
  *    dropped frames. The SpiceGstFrame is popped from the decoding_queue.
  * 6) new_sample() then attaches the decompressed frame to the SpiceGstFrame,
  *    pushes it to the display_queue and calls schedule_frame().
- * 7) schedule_frame() then uses the SpiceMsgIn's mm_time to arrange for
+ * 7) schedule_frame() then uses gstframe->frame->mm_time to arrange for
  *    display_frame() to be called, in the main thread, at the right time for
  *    the next frame.
  * 8) display_frame() pops the first SpiceGstFrame from the display_queue and
  *    calls stream_display_frame().
- * 9) display_frame() then frees the SpiceGstFrame and the decompressed frame.
- *    SpiceMsgIn is unreffed.
+ * 9) display_frame() then frees the SpiceGstFrame, which frees the SpiceFrame
+ *    and decompressed frame with it.
  */
 static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
-                                              SpiceMsgIn *frame_msg,
-                                              int32_t latency)
+                                              SpiceFrame *frame, int latency)
 {
     SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder;
 
-    uint8_t *data;
-    uint32_t size = spice_msg_in_frame_data(frame_msg, &data);
-    if (size == 0) {
+    if (frame->size == 0) {
         SPICE_DEBUG("got an empty frame buffer!");
+        frame->free(frame);
         return TRUE;
     }
 
-    SpiceStreamDataHeader *frame_op = spice_msg_in_parsed(frame_msg);
-    if (frame_op->multi_media_time < decoder->last_mm_time) {
+    if (frame->mm_time < decoder->last_mm_time) {
         SPICE_DEBUG("new-frame-time < last-frame-time (%u < %u):"
-                    " resetting stream, id %u",
-                    frame_op->multi_media_time,
-                    decoder->last_mm_time, frame_op->id);
+                    " resetting stream",
+                    frame->mm_time, decoder->last_mm_time);
         /* Let GStreamer deal with the frame anyway */
     }
-    decoder->last_mm_time = frame_op->multi_media_time;
+    decoder->last_mm_time = frame->mm_time;
 
     if (latency < 0 &&
         decoder->base.codec_type == SPICE_VIDEO_CODEC_TYPE_MJPEG) {
@@ -472,6 +460,7 @@ static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
          * saves CPU so do it.
          */
         SPICE_DEBUG("dropping a late MJPEG frame");
+        frame->free(frame);
         return TRUE;
     }
 
@@ -481,22 +470,22 @@ static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
         return FALSE;
     }
 
-    /* ref() the frame_msg for the buffer */
-    spice_msg_in_ref(frame_msg);
+    /* ref() the frame data for the buffer */
+    frame->ref_data(frame->data_opaque);
     GstBuffer *buffer = gst_buffer_new_wrapped_full(GST_MEMORY_FLAG_PHYSICALLY_CONTIGUOUS,
-                                                    data, size, 0, size,
-                                                    frame_msg, &release_buffer_data);
+                                                    frame->data, frame->size, 0, frame->size,
+                                                    frame->data_opaque, frame->unref_data);
 
     GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE;
     GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE;
     GST_BUFFER_PTS(buffer) = gst_clock_get_time(decoder->clock) - gst_element_get_base_time(decoder->pipeline) + ((uint64_t)MAX(0, latency)) * 1000 * 1000;
 
     g_mutex_lock(&decoder->queues_mutex);
-    g_queue_push_tail(decoder->decoding_queue, create_gst_frame(buffer, frame_msg));
+    g_queue_push_tail(decoder->decoding_queue, create_gst_frame(buffer, frame));
     g_mutex_unlock(&decoder->queues_mutex);
 
     if (gst_app_src_push_buffer(decoder->appsrc, buffer) != GST_FLOW_OK) {
-        SPICE_DEBUG("GStreamer error: unable to push frame of size %u", size);
+        SPICE_DEBUG("GStreamer error: unable to push frame of size %u", frame->size);
         stream_dropped_frame_on_playback(decoder->base.stream);
     }
     return TRUE;
diff --git a/src/channel-display-mjpeg.c b/src/channel-display-mjpeg.c
index 722494e..3ae9d21 100644
--- a/src/channel-display-mjpeg.c
+++ b/src/channel-display-mjpeg.c
@@ -38,7 +38,7 @@ typedef struct MJpegDecoder {
     /* ---------- Frame queue ---------- */
 
     GQueue *msgq;
-    SpiceMsgIn *cur_frame_msg;
+    SpiceFrame *cur_frame;
     guint timer_id;
 
     /* ---------- Output frame data ---------- */
@@ -53,10 +53,8 @@ typedef struct MJpegDecoder {
 static void mjpeg_src_init(struct jpeg_decompress_struct *cinfo)
 {
     MJpegDecoder *decoder = SPICE_CONTAINEROF(cinfo->src, MJpegDecoder, mjpeg_src);
-
-    uint8_t *data;
-    cinfo->src->bytes_in_buffer = spice_msg_in_frame_data(decoder->cur_frame_msg, &data);
-    cinfo->src->next_input_byte = data;
+    cinfo->src->bytes_in_buffer = decoder->cur_frame->size;
+    cinfo->src->next_input_byte = decoder->cur_frame->data;
 }
 
 static boolean mjpeg_src_fill(struct jpeg_decompress_struct *cinfo)
@@ -77,6 +75,15 @@ static void mjpeg_src_term(struct jpeg_decompress_struct *cinfo)
 }
 
 
+/* ---------- A SpiceFrame helper ---------- */
+
+static void free_spice_frame(SpiceFrame *frame)
+{
+    frame->unref_data(frame->data_opaque);
+    frame->free(frame);
+}
+
+
 /* ---------- Decoder proper ---------- */
 
 static void mjpeg_decoder_schedule(MJpegDecoder *decoder);
@@ -168,10 +175,10 @@ static gboolean mjpeg_decoder_decode_frame(gpointer video_decoder)
     jpeg_finish_decompress(&decoder->mjpeg_cinfo);
 
     /* Display the frame and dispose of it */
-    stream_display_frame(decoder->base.stream, decoder->cur_frame_msg,
+    stream_display_frame(decoder->base.stream, decoder->cur_frame,
                          width, height, decoder->out_frame);
-    spice_msg_in_unref(decoder->cur_frame_msg);
-    decoder->cur_frame_msg = NULL;
+    free_spice_frame(decoder->cur_frame);
+    decoder->cur_frame = NULL;
     decoder->timer_id = 0;
 
     /* Schedule the next frame */
@@ -190,33 +197,32 @@ static void mjpeg_decoder_schedule(MJpegDecoder *decoder)
     }
 
     guint32 time = stream_get_time(decoder->base.stream);
-    SpiceMsgIn *frame_msg = decoder->cur_frame_msg;
-    decoder->cur_frame_msg = NULL;
+    SpiceFrame *frame = decoder->cur_frame;
+    decoder->cur_frame = NULL;
     do {
-        if (frame_msg) {
-            SpiceStreamDataHeader *op = spice_msg_in_parsed(frame_msg);
-            if (time <= op->multi_media_time) {
-                guint32 d = op->multi_media_time - time;
-                decoder->cur_frame_msg = frame_msg;
+        if (frame) {
+            if (time <= frame->mm_time) {
+                guint32 d = frame->mm_time - time;
+                decoder->cur_frame = frame;
                 decoder->timer_id = g_timeout_add(d, mjpeg_decoder_decode_frame, decoder);
                 break;
             }
 
             SPICE_DEBUG("%s: rendering too late by %u ms (ts: %u, mmtime: %u), dropping ",
-                        __FUNCTION__, time - op->multi_media_time,
-                        op->multi_media_time, time);
+                        __FUNCTION__, time - frame->mm_time,
+                        frame->mm_time, time);
             stream_dropped_frame_on_playback(decoder->base.stream);
-            spice_msg_in_unref(frame_msg);
+            free_spice_frame(frame);
         }
-        frame_msg = g_queue_pop_head(decoder->msgq);
-    } while (frame_msg);
+        frame = g_queue_pop_head(decoder->msgq);
+    } while (frame);
 }
 
 
 /* mjpeg_decoder_drop_queue() helper */
 static void _msg_in_unref_func(gpointer data, gpointer user_data)
 {
-    spice_msg_in_unref(data);
+    free_spice_frame((SpiceFrame*)data);
 }
 
 static void mjpeg_decoder_drop_queue(MJpegDecoder *decoder)
@@ -225,9 +231,9 @@ static void mjpeg_decoder_drop_queue(MJpegDecoder *decoder)
         g_source_remove(decoder->timer_id);
         decoder->timer_id = 0;
     }
-    if (decoder->cur_frame_msg) {
-        spice_msg_in_unref(decoder->cur_frame_msg);
-        decoder->cur_frame_msg = NULL;
+    if (decoder->cur_frame) {
+        free_spice_frame(decoder->cur_frame);
+        decoder->cur_frame = NULL;
     }
     g_queue_foreach(decoder->msgq, _msg_in_unref_func, NULL);
     g_queue_clear(decoder->msgq);
@@ -236,25 +242,21 @@ static void mjpeg_decoder_drop_queue(MJpegDecoder *decoder)
 /* ---------- VideoDecoder's public API ---------- */
 
 static gboolean mjpeg_decoder_queue_frame(VideoDecoder *video_decoder,
-                                          SpiceMsgIn *frame_msg,
-                                          int32_t latency)
+                                          SpiceFrame *frame, int32_t latency)
 {
     MJpegDecoder *decoder = (MJpegDecoder*)video_decoder;
-    SpiceMsgIn *last_msg;
+    SpiceFrame *last_frame;
 
     SPICE_DEBUG("%s", __FUNCTION__);
 
-    last_msg = g_queue_peek_tail(decoder->msgq);
-    if (last_msg) {
-        SpiceStreamDataHeader *last_op, *frame_op;
-        last_op = spice_msg_in_parsed(last_msg);
-        frame_op = spice_msg_in_parsed(frame_msg);
-        if (frame_op->multi_media_time < last_op->multi_media_time) {
+    last_frame = g_queue_peek_tail(decoder->msgq);
+    if (last_frame) {
+        if (frame->mm_time < last_frame->mm_time) {
             /* This should really not happen */
             SPICE_DEBUG("new-frame-time < last-frame-time (%u < %u):"
-                        " resetting stream, id %u",
-                        frame_op->multi_media_time,
-                        last_op->multi_media_time, frame_op->id);
+                        " resetting stream",
+                        frame->mm_time,
+                        last_frame->mm_time);
             mjpeg_decoder_drop_queue(decoder);
         }
     }
@@ -266,8 +268,8 @@ static gboolean mjpeg_decoder_queue_frame(VideoDecoder *video_decoder,
         return TRUE;
     }
 
-    spice_msg_in_ref(frame_msg);
-    g_queue_push_tail(decoder->msgq, frame_msg);
+    frame->ref_data(frame->data_opaque);
+    g_queue_push_tail(decoder->msgq, frame);
     mjpeg_decoder_schedule(decoder);
     return TRUE;
 }
diff --git a/src/channel-display-priv.h b/src/channel-display-priv.h
index c5622f1..3c9d119 100644
--- a/src/channel-display-priv.h
+++ b/src/channel-display-priv.h
@@ -36,6 +36,20 @@ G_BEGIN_DECLS
 
 typedef struct display_stream display_stream;
 
+typedef struct SpiceFrame SpiceFrame;
+struct SpiceFrame {
+    uint32_t mm_time;
+    SpiceRect dest;
+
+    uint8_t *data;
+    uint32_t size;
+    gpointer data_opaque;
+    void (*ref_data)(gpointer data_opaque);
+    void (*unref_data)(gpointer data_opaque);
+
+    void (*free)(SpiceFrame *frame);
+};
+
 typedef struct VideoDecoder VideoDecoder;
 struct VideoDecoder {
     /* Releases the video decoder's resources */
@@ -44,16 +58,17 @@ struct VideoDecoder {
     /* Notifies the decoder that the mm-time clock changed. */
     void (*reschedule)(VideoDecoder *decoder);
 
-    /* Decompresses the specified frame.
+    /* Takes ownership of the specified frame, decompresses it,
+     * and displays it at the right time.
      *
      * @decoder:   The video decoder.
-     * @frame_msg: The Spice message containing the compressed frame.
+     * @frame:     The compressed Spice frame.
      * @latency:   How long in milliseconds until the frame should be
      *             displayed. Negative values mean the frame is late.
      * @return:    False if the decoder can no longer decode frames,
      *             True otherwise.
      */
-    gboolean (*queue_frame)(VideoDecoder *decoder, SpiceMsgIn *frame_msg, int32_t latency);
+    gboolean (*queue_frame)(VideoDecoder *video_decoder, SpiceFrame *frame, int latency);
 
     /* The format of the encoded video. */
     int codec_type;
@@ -137,8 +152,7 @@ struct display_stream {
 
 guint32 stream_get_time(display_stream *st);
 void stream_dropped_frame_on_playback(display_stream *st);
-void stream_display_frame(display_stream *st, SpiceMsgIn *frame_msg, uint32_t width, uint32_t height, uint8_t *data);
-uint32_t spice_msg_in_frame_data(SpiceMsgIn *frame_msg, uint8_t **data);
+void stream_display_frame(display_stream *st, SpiceFrame *frame, uint32_t width, uint32_t height, uint8_t* data);
 
 
 G_END_DECLS
diff --git a/src/channel-display.c b/src/channel-display.c
index 2423fb0..00b5440 100644
--- a/src/channel-display.c
+++ b/src/channel-display.c
@@ -1234,8 +1234,7 @@ static const SpiceRect *stream_get_dest(display_stream *st, SpiceMsgIn *frame_ms
 
 }
 
-G_GNUC_INTERNAL
-uint32_t spice_msg_in_frame_data(SpiceMsgIn *frame_msg, uint8_t **data)
+static uint32_t spice_msg_in_frame_data(SpiceMsgIn *frame_msg, uint8_t **data)
 {
     switch (spice_msg_in_type(frame_msg)) {
     case SPICE_MSG_DISPLAY_STREAM_DATA: {
@@ -1270,15 +1269,10 @@ void stream_dropped_frame_on_playback(display_stream *st)
 
 /* main context */
 G_GNUC_INTERNAL
-void stream_display_frame(display_stream *st, SpiceMsgIn *frame_msg,
+void stream_display_frame(display_stream *st, SpiceFrame *frame,
                           uint32_t width, uint32_t height, uint8_t *data)
 {
-    const SpiceRect *dest;
-    int stride;
-
-    dest = stream_get_dest(st, frame_msg);
-
-    stride = width * sizeof(uint32_t);
+    int stride = width * sizeof(uint32_t);
     if (!(st->flags & SPICE_STREAM_FLAGS_TOP_DOWN)) {
         data += stride * (height - 1);
         stride = -stride;
@@ -1288,15 +1282,16 @@ void stream_display_frame(display_stream *st, SpiceMsgIn *frame_msg,
 #ifdef G_OS_WIN32
                                         SPICE_DISPLAY_CHANNEL(st->channel)->priv->dc,
 #endif
-                                        dest, data,
+                                        &frame->dest, data,
                                         width, height, stride,
                                         st->have_region ? &st->region : NULL);
 
-    if (st->surface->primary)
+    if (st->surface->primary) {
         g_signal_emit(st->channel, signals[SPICE_DISPLAY_INVALIDATE], 0,
-                      dest->left, dest->top,
-                      dest->right - dest->left,
-                      dest->bottom - dest->top);
+                      frame->dest.left, frame->dest.top,
+                      frame->dest.right - frame->dest.left,
+                      frame->dest.bottom - frame->dest.top);
+    }
 }
 
 /* after a sequence of 3 drops, push a report to the server, even
@@ -1425,6 +1420,7 @@ static void display_handle_stream_data(SpiceChannel *channel, SpiceMsgIn *in)
     display_stream *st = get_stream_by_id(channel, op->id);
     guint32 mmtime;
     int32_t latency;
+    SpiceFrame *frame;
 
     g_return_if_fail(st != NULL);
     mmtime = stream_get_time(st);
@@ -1471,11 +1467,20 @@ static void display_handle_stream_data(SpiceChannel *channel, SpiceMsgIn *in)
      * decoding and best decide if/when to drop them when they are late,
      * taking into account the impact on later frames.
      */
-    if (!st->video_decoder->queue_frame(st->video_decoder, in, latency)) {
+    frame = spice_new(SpiceFrame, 1);
+    frame->mm_time = op->multi_media_time;
+    frame->dest = *stream_get_dest(st, in);
+    frame->size = spice_msg_in_frame_data(in, &frame->data);
+    frame->data_opaque = in;
+    frame->ref_data = (void*)spice_msg_in_ref;
+    frame->unref_data = (void*)spice_msg_in_unref;
+    frame->free = (void*)free;
+    if (!st->video_decoder->queue_frame(st->video_decoder, frame, latency)) {
         destroy_stream(channel, op->id);
         report_invalid_stream(channel, op->id);
         return;
     }
+
     if (c->enable_adaptive_streaming) {
         display_update_stream_report(SPICE_DISPLAY_CHANNEL(channel), op->id,
                                      op->multi_media_time, latency);
commit ce8ae1fc7b2381fa2f9708293f51396c60a07987
Author: Francois Gouget <fgouget at codeweavers.com>
Date:   Thu Apr 6 16:02:30 2017 +0200

    streaming: Rename SpiceFrame to SpiceGstFrame in the GStreamer decoder
    
    This emphasizes that this structure is specific to the GStreamer
    decoder.
    
    Signed-off-by: Francois Gouget <fgouget at codeweavers.com>
    Acked-by: Christophe Fergeau <cfergeau at redhat.com>

diff --git a/src/channel-display-gst.c b/src/channel-display-gst.c
index 7e0ddde..2c002eb 100644
--- a/src/channel-display-gst.c
+++ b/src/channel-display-gst.c
@@ -86,31 +86,31 @@ G_STATIC_ASSERT(G_N_ELEMENTS(gst_opts) <= SPICE_VIDEO_CODEC_TYPE_ENUM_END);
 #define VALID_VIDEO_CODEC_TYPE(codec) \
     (codec > 0 && codec < G_N_ELEMENTS(gst_opts))
 
-/* ---------- SpiceFrame ---------- */
+/* ---------- SpiceGstFrame ---------- */
 
-typedef struct _SpiceFrame {
+typedef struct SpiceGstFrame {
     GstClockTime timestamp;
     SpiceMsgIn *msg;
     GstSample *sample;
-} SpiceFrame;
+} SpiceGstFrame;
 
-static SpiceFrame *create_frame(GstBuffer *buffer, SpiceMsgIn *msg)
+static SpiceGstFrame *create_gst_frame(GstBuffer *buffer, SpiceMsgIn *msg)
 {
-    SpiceFrame *frame = spice_new(SpiceFrame, 1);
-    frame->timestamp = GST_BUFFER_PTS(buffer);
-    frame->msg = msg;
+    SpiceGstFrame *gstframe = spice_new(SpiceGstFrame, 1);
+    gstframe->timestamp = GST_BUFFER_PTS(buffer);
+    gstframe->msg = msg;
     spice_msg_in_ref(msg);
-    frame->sample = NULL;
-    return frame;
+    gstframe->sample = NULL;
+    return gstframe;
 }
 
-static void free_frame(SpiceFrame *frame)
+static void free_gst_frame(SpiceGstFrame *gstframe)
 {
-    spice_msg_in_unref(frame->msg);
-    if (frame->sample) {
-        gst_sample_unref(frame->sample);
+    spice_msg_in_unref(gstframe->msg);
+    if (gstframe->sample) {
+        gst_sample_unref(gstframe->sample);
     }
-    free(frame);
+    free(gstframe);
 }
 
 
@@ -122,7 +122,7 @@ static void schedule_frame(SpiceGstDecoder *decoder);
 static gboolean display_frame(gpointer video_decoder)
 {
     SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder;
-    SpiceFrame *frame;
+    SpiceGstFrame *gstframe;
     GstCaps *caps;
     gint width, height;
     GstStructure *s;
@@ -131,17 +131,17 @@ static gboolean display_frame(gpointer video_decoder)
 
     g_mutex_lock(&decoder->queues_mutex);
     decoder->timer_id = 0;
-    frame = g_queue_pop_head(decoder->display_queue);
+    gstframe = g_queue_pop_head(decoder->display_queue);
     g_mutex_unlock(&decoder->queues_mutex);
     /* If the queue is empty we don't even need to reschedule */
-    g_return_val_if_fail(frame, G_SOURCE_REMOVE);
+    g_return_val_if_fail(gstframe, G_SOURCE_REMOVE);
 
-    if (!frame->sample) {
+    if (!gstframe->sample) {
         spice_warning("got a frame without a sample!");
         goto error;
     }
 
-    caps = gst_sample_get_caps(frame->sample);
+    caps = gst_sample_get_caps(gstframe->sample);
     if (!caps) {
         spice_warning("GStreamer error: could not get the caps of the sample");
         goto error;
@@ -154,18 +154,18 @@ static gboolean display_frame(gpointer video_decoder)
         goto error;
     }
 
-    buffer = gst_sample_get_buffer(frame->sample);
+    buffer = gst_sample_get_buffer(gstframe->sample);
     if (!gst_buffer_map(buffer, &mapinfo, GST_MAP_READ)) {
         spice_warning("GStreamer error: could not map the buffer");
         goto error;
     }
 
-    stream_display_frame(decoder->base.stream, frame->msg,
+    stream_display_frame(decoder->base.stream, gstframe->msg,
                          width, height, mapinfo.data);
     gst_buffer_unmap(buffer, &mapinfo);
 
  error:
-    free_frame(frame);
+    free_gst_frame(gstframe);
     schedule_frame(decoder);
     return G_SOURCE_REMOVE;
 }
@@ -177,12 +177,12 @@ static void schedule_frame(SpiceGstDecoder *decoder)
     g_mutex_lock(&decoder->queues_mutex);
 
     while (!decoder->timer_id) {
-        SpiceFrame *frame = g_queue_peek_head(decoder->display_queue);
-        if (!frame) {
+        SpiceGstFrame *gstframe = g_queue_peek_head(decoder->display_queue);
+        if (!gstframe) {
             break;
         }
 
-        SpiceStreamDataHeader *op = spice_msg_in_parsed(frame->msg);
+        SpiceStreamDataHeader *op = spice_msg_in_parsed(gstframe->msg);
         if (now < op->multi_media_time) {
             decoder->timer_id = g_timeout_add(op->multi_media_time - now,
                                               display_frame, decoder);
@@ -197,7 +197,7 @@ static void schedule_frame(SpiceGstDecoder *decoder)
                         op->multi_media_time, now);
             stream_dropped_frame_on_playback(decoder->base.stream);
             g_queue_pop_head(decoder->display_queue);
-            free_frame(frame);
+            free_gst_frame(gstframe);
         }
     }
 
@@ -228,27 +228,27 @@ static GstFlowReturn new_sample(GstAppSink *gstappsink, gpointer video_decoder)
          * finding a match either, etc. So check the buffer has a matching
          * frame first.
          */
-        SpiceFrame *frame;
+        SpiceGstFrame *gstframe;
         GList *l = g_queue_peek_head_link(decoder->decoding_queue);
         while (l) {
-            frame = l->data;
-            if (frame->timestamp == GST_BUFFER_PTS(buffer)) {
+            gstframe = l->data;
+            if (gstframe->timestamp == GST_BUFFER_PTS(buffer)) {
                 /* The frame is now ready for display */
-                frame->sample = sample;
-                g_queue_push_tail(decoder->display_queue, frame);
+                gstframe->sample = sample;
+                g_queue_push_tail(decoder->display_queue, gstframe);
 
                 /* Now that we know there is a match, remove it and the older
                  * frames from the decoding queue.
                  */
-                while ((frame = g_queue_pop_head(decoder->decoding_queue))) {
-                    if (frame->timestamp == GST_BUFFER_PTS(buffer)) {
+                while ((gstframe = g_queue_pop_head(decoder->decoding_queue))) {
+                    if (gstframe->timestamp == GST_BUFFER_PTS(buffer)) {
                         break;
                     }
                     /* The GStreamer pipeline dropped the corresponding
                      * buffer.
                      */
                     SPICE_DEBUG("the GStreamer pipeline dropped a frame");
-                    free_frame(frame);
+                    free_gst_frame(gstframe);
                 }
                 break;
             }
@@ -394,13 +394,13 @@ static void spice_gst_decoder_destroy(VideoDecoder *video_decoder)
         g_source_remove(decoder->timer_id);
     }
     g_mutex_clear(&decoder->queues_mutex);
-    SpiceFrame *frame;
-    while ((frame = g_queue_pop_head(decoder->decoding_queue))) {
-        free_frame(frame);
+    SpiceGstFrame *gstframe;
+    while ((gstframe = g_queue_pop_head(decoder->decoding_queue))) {
+        free_gst_frame(gstframe);
     }
     g_queue_free(decoder->decoding_queue);
-    while ((frame = g_queue_pop_head(decoder->display_queue))) {
-        free_frame(frame);
+    while ((gstframe = g_queue_pop_head(decoder->display_queue))) {
+        free_gst_frame(gstframe);
     }
     g_queue_free(decoder->display_queue);
 
@@ -420,7 +420,7 @@ static void release_buffer_data(gpointer data)
 /* spice_gst_decoder_queue_frame() queues the SpiceMsgIn message for decoding
  * and displaying. The steps it goes through are as follows:
  *
- * 1) A SpiceFrame is created to keep track of SpiceMsgIn and some additional
+ * 1) A SpiceGstFrame is created to keep track of SpiceMsgIn and some additional
  *    metadata. SpiceMsgIn is reffed. The SpiceFrame is then pushed to the
  *    decoding_queue.
  * 2) The data part of SpiceMsgIn, which contains the compressed frame data,
@@ -430,17 +430,17 @@ static void release_buffer_data(gpointer data)
  *    calls release_buffer_data() to unref SpiceMsgIn.
  * 4) Once the decompressed frame is available the GStreamer pipeline calls
  *    new_sample() in the GStreamer thread.
- * 5) new_sample() then matches the decompressed frame to a SpiceFrame from
+ * 5) new_sample() then matches the decompressed frame to a SpiceGstFrame from
  *    the decoding queue using the GStreamer timestamp information to deal with
- *    dropped frames. The SpiceFrame is popped from the decoding_queue.
- * 6) new_sample() then attaches the decompressed frame to the SpiceFrame,
+ *    dropped frames. The SpiceGstFrame is popped from the decoding_queue.
+ * 6) new_sample() then attaches the decompressed frame to the SpiceGstFrame,
  *    pushes it to the display_queue and calls schedule_frame().
  * 7) schedule_frame() then uses the SpiceMsgIn's mm_time to arrange for
  *    display_frame() to be called, in the main thread, at the right time for
  *    the next frame.
- * 8) display_frame() pops the first SpiceFrame from the display_queue and
+ * 8) display_frame() pops the first SpiceGstFrame from the display_queue and
  *    calls stream_display_frame().
- * 9) display_frame() then frees the SpiceFrame and the decompressed frame.
+ * 9) display_frame() then frees the SpiceGstFrame and the decompressed frame.
  *    SpiceMsgIn is unreffed.
  */
 static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
@@ -492,7 +492,7 @@ static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
     GST_BUFFER_PTS(buffer) = gst_clock_get_time(decoder->clock) - gst_element_get_base_time(decoder->pipeline) + ((uint64_t)MAX(0, latency)) * 1000 * 1000;
 
     g_mutex_lock(&decoder->queues_mutex);
-    g_queue_push_tail(decoder->decoding_queue, create_frame(buffer, frame_msg));
+    g_queue_push_tail(decoder->decoding_queue, create_gst_frame(buffer, frame_msg));
     g_mutex_unlock(&decoder->queues_mutex);
 
     if (gst_app_src_push_buffer(decoder->appsrc, buffer) != GST_FLOW_OK) {
commit 92901285a776ccf2a70f988c90d75c9a34df7125
Author: Francois Gouget <fgouget at codeweavers.com>
Date:   Thu Apr 6 16:00:40 2017 +0200

    streaming: Move SpiceMsgIn parsing to display_handle_stream_create()
    
    This regroups all the parsing in one place and makes the rest of the
    display_stream code independent from the network messaging details.
    
    Signed-off-by: Francois Gouget <fgouget at codeweavers.com>
    Acked-by: Christophe Fergeau <cfergeau at redhat.com>

diff --git a/src/channel-display-priv.h b/src/channel-display-priv.h
index b9c08a3..c5622f1 100644
--- a/src/channel-display-priv.h
+++ b/src/channel-display-priv.h
@@ -99,12 +99,11 @@ typedef struct drops_sequence_stats {
 } drops_sequence_stats;
 
 struct display_stream {
-    SpiceMsgIn                  *msg_create;
-    SpiceMsgIn                  *msg_clip;
-
     /* from messages */
+    uint32_t                    flags;
+    SpiceRect                   dest;
     display_surface             *surface;
-    const SpiceClip             *clip;
+    SpiceClip                   clip;
     QRegion                     region;
     int                         have_region;
 
diff --git a/src/channel-display.c b/src/channel-display.c
index 7a5a23b..2423fb0 100644
--- a/src/channel-display.c
+++ b/src/channel-display.c
@@ -1118,11 +1118,11 @@ static void display_update_stream_region(display_stream *st)
 {
     int i;
 
-    switch (st->clip->type) {
+    switch (st->clip.type) {
     case SPICE_CLIP_TYPE_RECTS:
         region_clear(&st->region);
-        for (i = 0; i < st->clip->rects->num_rects; i++) {
-            region_add(&st->region, &st->clip->rects->rects[i]);
+        for (i = 0; i < st->clip.rects->num_rects; i++) {
+            region_add(&st->region, &st->clip.rects->rects[i]);
         }
         st->have_region = true;
         break;
@@ -1191,9 +1191,9 @@ static void display_handle_stream_create(SpiceChannel *channel, SpiceMsgIn *in)
     c->streams[op->id] = g_new0(display_stream, 1);
     st = c->streams[op->id];
 
-    st->msg_create = in;
-    spice_msg_in_ref(in);
-    st->clip = &op->clip;
+    st->flags = op->flags;
+    st->dest = op->dest;
+    st->clip = op->clip;
     st->surface = find_surface(c, op->surface_id);
     st->channel = channel;
     st->drops_seqs_stats_arr = g_array_new(FALSE, FALSE, sizeof(drops_sequence_stats));
@@ -1225,9 +1225,7 @@ static const SpiceRect *stream_get_dest(display_stream *st, SpiceMsgIn *frame_ms
 {
     if (frame_msg == NULL ||
         spice_msg_in_type(frame_msg) != SPICE_MSG_DISPLAY_STREAM_DATA_SIZED) {
-        SpiceMsgDisplayStreamCreate *info = spice_msg_in_parsed(st->msg_create);
-
-        return &info->dest;
+        return &st->dest;
     } else {
         SpiceMsgDisplayStreamDataSized *op = spice_msg_in_parsed(frame_msg);
 
@@ -1236,13 +1234,6 @@ static const SpiceRect *stream_get_dest(display_stream *st, SpiceMsgIn *frame_ms
 
 }
 
-static uint32_t stream_get_flags(display_stream *st)
-{
-    SpiceMsgDisplayStreamCreate *info = spice_msg_in_parsed(st->msg_create);
-
-    return info->flags;
-}
-
 G_GNUC_INTERNAL
 uint32_t spice_msg_in_frame_data(SpiceMsgIn *frame_msg, uint8_t **data)
 {
@@ -1288,7 +1279,7 @@ void stream_display_frame(display_stream *st, SpiceMsgIn *frame_msg,
     dest = stream_get_dest(st, frame_msg);
 
     stride = width * sizeof(uint32_t);
-    if (!(stream_get_flags(st) & SPICE_STREAM_FLAGS_TOP_DOWN)) {
+    if (!(st->flags & SPICE_STREAM_FLAGS_TOP_DOWN)) {
         data += stride * (height - 1);
         stride = -stride;
     }
@@ -1502,12 +1493,8 @@ static void display_handle_stream_clip(SpiceChannel *channel, SpiceMsgIn *in)
     display_stream *st = get_stream_by_id(channel, op->id);
 
     g_return_if_fail(st != NULL);
-    if (st->msg_clip) {
-        spice_msg_in_unref(st->msg_clip);
-    }
-    spice_msg_in_ref(in);
-    st->msg_clip = in;
-    st->clip = &op->clip;
+
+    st->clip = op->clip;
     display_update_stream_region(st);
 }
 
@@ -1561,10 +1548,6 @@ static void destroy_stream(SpiceChannel *channel, int id)
         st->video_decoder->destroy(st->video_decoder);
     }
 
-    if (st->msg_clip)
-        spice_msg_in_unref(st->msg_clip);
-    spice_msg_in_unref(st->msg_create);
-
     g_free(st);
     c->streams[id] = NULL;
 }
commit 65b08cdb332001a750defba55dac309473e81db1
Author: Francois Gouget <fgouget at codeweavers.com>
Date:   Thu Apr 6 16:00:28 2017 +0200

    streaming: Document the GStreamer decoding process
    
    Signed-off-by: Francois Gouget <fgouget at codeweavers.com>
    Acked-by: Jonathon Jongsma <jjongsma at redhat.com>

diff --git a/src/channel-display-gst.c b/src/channel-display-gst.c
index c4190b2..7e0ddde 100644
--- a/src/channel-display-gst.c
+++ b/src/channel-display-gst.c
@@ -417,6 +417,32 @@ static void release_buffer_data(gpointer data)
     spice_msg_in_unref(frame_msg);
 }
 
+/* spice_gst_decoder_queue_frame() queues the SpiceMsgIn message for decoding
+ * and displaying. The steps it goes through are as follows:
+ *
+ * 1) A SpiceFrame is created to keep track of SpiceMsgIn and some additional
+ *    metadata. SpiceMsgIn is reffed. The SpiceFrame is then pushed to the
+ *    decoding_queue.
+ * 2) The data part of SpiceMsgIn, which contains the compressed frame data,
+ *    is wrapped in a GstBuffer and is pushed to the GStreamer pipeline for
+ *    decoding. SpiceMsgIn is reffed.
+ * 3) As soon as the GStreamer pipeline no longer needs the compressed frame it
+ *    calls release_buffer_data() to unref SpiceMsgIn.
+ * 4) Once the decompressed frame is available the GStreamer pipeline calls
+ *    new_sample() in the GStreamer thread.
+ * 5) new_sample() then matches the decompressed frame to a SpiceFrame from
+ *    the decoding queue using the GStreamer timestamp information to deal with
+ *    dropped frames. The SpiceFrame is popped from the decoding_queue.
+ * 6) new_sample() then attaches the decompressed frame to the SpiceFrame,
+ *    pushes it to the display_queue and calls schedule_frame().
+ * 7) schedule_frame() then uses the SpiceMsgIn's mm_time to arrange for
+ *    display_frame() to be called, in the main thread, at the right time for
+ *    the next frame.
+ * 8) display_frame() pops the first SpiceFrame from the display_queue and
+ *    calls stream_display_frame().
+ * 9) display_frame() then frees the SpiceFrame and the decompressed frame.
+ *    SpiceMsgIn is unreffed.
+ */
 static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
                                               SpiceMsgIn *frame_msg,
                                               int32_t latency)
commit 977db3bb3da94def4c0e1d4087037303d250e158
Author: Christophe de Dinechin <dinechin at redhat.com>
Date:   Fri Apr 28 13:41:02 2017 +0200

    build: Check for epoxy/egl.h availability
    
    This header is not present on OSX. This added check allows to make the
    EGL code conditionally compiled depending on egl.h availability rather
    than only disabling it for win32 builds.
    
    Signed-off-by: Christophe de Dinechin <dinechin at redhat.com>
    Acked-by: Christophe Fergeau <cfergeau at redhat.com>

diff --git a/configure.ac b/configure.ac
index ad5e6e9..74b5811 100644
--- a/configure.ac
+++ b/configure.ac
@@ -64,6 +64,14 @@ AM_CONDITIONAL([OS_WIN32],[test "$os_win32" = "yes"])
 
 AC_CHECK_HEADERS([sys/socket.h netinet/in.h arpa/inet.h])
 AC_CHECK_HEADERS([termios.h])
+AC_CHECK_HEADERS([epoxy/egl.h],
+                 [have_egl=yes],
+                 [have_egl=no])
+AC_MSG_CHECKING([if we can use EGL in libepoxy)])
+AC_MSG_RESULT([$have_egl])
+AM_CONDITIONAL([HAVE_EGL],[test "$have_egl" = "yes"])
+AS_IF([test "$have_egl" = "yes"],
+       AC_DEFINE([HAVE_EGL], [1], [Define if supporting EGL]))
 
 AC_CHECK_LIBM
 AC_SUBST(LIBM)
diff --git a/src/Makefile.am b/src/Makefile.am
index 4fa7357..bb7ad6c 100644
--- a/src/Makefile.am
+++ b/src/Makefile.am
@@ -145,7 +145,7 @@ SPICE_GTK_SOURCES_COMMON +=		\
 	spice-widget-cairo.c		\
 	$(NULL)
 
-if !OS_WIN32
+if HAVE_EGL
 SPICE_GTK_SOURCES_COMMON +=		\
 	spice-widget-egl.c		\
 	$(NULL)
diff --git a/src/spice-widget-priv.h b/src/spice-widget-priv.h
index e29e1b7..5b3216f 100644
--- a/src/spice-widget-priv.h
+++ b/src/spice-widget-priv.h
@@ -24,7 +24,7 @@
 #include <windows.h>
 #endif
 
-#ifndef G_OS_WIN32
+#ifdef HAVE_EPOXY_EGL_H
 #include <epoxy/egl.h>
 #endif
 
@@ -133,7 +133,7 @@ struct _SpiceDisplayPrivate {
     int                     x11_accel_denominator;
     int                     x11_threshold;
 #endif
-#ifndef G_OS_WIN32
+#if HAVE_EGL
     struct {
         gboolean            context_ready;
         gboolean            enabled;
@@ -150,7 +150,7 @@ struct _SpiceDisplayPrivate {
         gboolean            call_draw_done;
         SpiceGlScanout      scanout;
     } egl;
-#endif
+#endif // HAVE_EGL
 };
 
 int      spice_cairo_image_create                 (SpiceDisplay *display);
diff --git a/src/spice-widget.c b/src/spice-widget.c
index 5bbba8f..8203d55 100644
--- a/src/spice-widget.c
+++ b/src/spice-widget.c
@@ -235,7 +235,7 @@ static gint get_display_id(SpiceDisplay *display)
 
 static bool egl_enabled(SpiceDisplayPrivate *d)
 {
-#ifndef G_OS_WIN32
+#if HAVE_EGL
     return d->egl.enabled;
 #else
     return false;
@@ -574,7 +574,7 @@ static void grab_notify(SpiceDisplay *display, gboolean was_grabbed)
 }
 
 #if GTK_CHECK_VERSION(3,16,0)
-#ifndef G_OS_WIN32
+#if HAVE_EGL
 /* Ignore GLib's too-new warnings */
 G_GNUC_BEGIN_IGNORE_DEPRECATIONS
 static gboolean
@@ -646,7 +646,7 @@ static void spice_display_init(SpiceDisplay *display)
     gtk_stack_set_visible_child(d->stack, area);
 
 #if GTK_CHECK_VERSION(3,16,0)
-#ifndef G_OS_WIN32
+#if HAVE_EGL
 /* Ignore GLib's too-new warnings */
 G_GNUC_BEGIN_IGNORE_DEPRECATIONS
     area = gtk_gl_area_new();
@@ -1305,7 +1305,7 @@ static gboolean do_color_convert(SpiceDisplay *display, GdkRectangle *r)
     return true;
 }
 
-#ifndef G_OS_WIN32
+#if HAVE_EGL
 static void set_egl_enabled(SpiceDisplay *display, bool enabled)
 {
     SpiceDisplayPrivate *d = display->priv;
@@ -1341,7 +1341,7 @@ static gboolean draw_event(GtkWidget *widget, cairo_t *cr, gpointer data)
     SpiceDisplayPrivate *d = display->priv;
     g_return_val_if_fail(d != NULL, false);
 
-#ifndef G_OS_WIN32
+#if HAVE_EGL
     if (egl_enabled(d) &&
         g_str_equal(gtk_stack_get_visible_child_name(d->stack), "draw-area")) {
         spice_egl_update_display(display);
@@ -2101,7 +2101,7 @@ static void size_allocate(GtkWidget *widget, GtkAllocation *conf, gpointer data)
         d->ww = conf->width;
         d->wh = conf->height;
         recalc_geometry(widget);
-#ifndef G_OS_WIN32
+#if HAVE_EGL
         if (egl_enabled(d))
             spice_egl_resize_display(display, conf->width, conf->height);
 #endif
@@ -2144,7 +2144,7 @@ static void realize(GtkWidget *widget)
 static void unrealize(GtkWidget *widget)
 {
     spice_cairo_image_destroy(SPICE_DISPLAY(widget));
-#ifndef G_OS_WIN32
+#if HAVE_EGL
     spice_egl_unrealize_display(SPICE_DISPLAY(widget));
 #endif
 
@@ -2500,7 +2500,7 @@ static void update_area(SpiceDisplay *display,
         .height = height
     };
 
-#ifndef G_OS_WIN32
+#if HAVE_EGL
     if (egl_enabled(d)) {
         const SpiceGlScanout *so =
             spice_display_get_gl_scanout(SPICE_DISPLAY_CHANNEL(d->display));
@@ -2597,7 +2597,7 @@ static void invalidate(SpiceChannel *channel,
         .height = h
     };
 
-#ifndef G_OS_WIN32
+#if HAVE_EGL
     set_egl_enabled(display, false);
 #endif
 
@@ -2661,7 +2661,7 @@ static void cursor_set(SpiceCursorChannel *channel,
     } else
         g_warn_if_reached();
 
-#ifndef G_OS_WIN32
+#if HAVE_EGL
     if (egl_enabled(d))
         spice_egl_cursor_set(display);
 #endif
@@ -2833,7 +2833,7 @@ static void inputs_channel_event(SpiceChannel *channel, SpiceChannelEvent event,
     spice_display_set_keypress_delay(display, delay);
 }
 
-#ifndef G_OS_WIN32
+#if HAVE_EGL
 G_GNUC_INTERNAL
 void spice_display_widget_gl_scanout(SpiceDisplay *display)
 {
@@ -2942,7 +2942,7 @@ static void channel_new(SpiceSession *s, SpiceChannel *channel, gpointer data)
             mark(display, primary.marked);
         }
 
-#ifndef G_OS_WIN32
+#if HAVE_EGL
         spice_g_signal_connect_object(channel, "notify::gl-scanout",
                                       G_CALLBACK(spice_display_widget_gl_scanout),
                                       display, G_CONNECT_SWAPPED);
@@ -3102,7 +3102,7 @@ GdkPixbuf *spice_display_get_pixbuf(SpiceDisplay *display)
     g_return_val_if_fail(d != NULL, NULL);
     g_return_val_if_fail(d->display != NULL, NULL);
 
-#ifndef G_OS_WIN32
+#if HAVE_EGL
     if (egl_enabled(d)) {
         GdkPixbuf *tmp;
 


More information about the Spice-commits mailing list