[Spice-devel] [client v2 05/12] channel-display: The video latency is in fact a margin

Frediano Ziglio fziglio at redhat.com
Tue Jun 18 07:23:17 UTC 2019


> 
> 'Latency' is confusing because it evokes either the network latency
> or the audio latency, neither of which match what the latency
> variables contain. Instead they record how much margin there was
> between when the frame was received, and when it is supposed to be
> displayed. Hence the new name.
> 
> Signed-off-by: Francois Gouget <fgouget at codeweavers.com>

Acked

> ---
> 
> It's less confusing when using the right terminology so I'm starting
> with this before the more involved patches.
> 
>  src/channel-display-gst.c   |  6 +++---
>  src/channel-display-mjpeg.c |  4 ++--
>  src/channel-display.c       | 26 +++++++++++++-------------
>  3 files changed, 18 insertions(+), 18 deletions(-)
> 
> diff --git a/src/channel-display-gst.c b/src/channel-display-gst.c
> index 449b9cb8..6fccf621 100644
> --- a/src/channel-display-gst.c
> +++ b/src/channel-display-gst.c
> @@ -679,7 +679,7 @@ static void spice_gst_decoder_destroy(VideoDecoder
> *video_decoder)
>   *      spice_frame_free() and free its encoded_frame.
>   */
>  static gboolean spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
> -                                              SpiceFrame *frame, int
> latency)
> +                                              SpiceFrame *frame, int margin)
>  {
>      SpiceGstDecoder *decoder = (SpiceGstDecoder*)video_decoder;
>  
> @@ -697,7 +697,7 @@ static gboolean
> spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
>      }
>      decoder->last_mm_time = frame->mm_time;
>  
> -    if (latency < 0 &&
> +    if (margin < 0 &&
>          decoder->base.codec_type == SPICE_VIDEO_CODEC_TYPE_MJPEG) {
>          /* Dropping MJPEG frames has no impact on those that follow and
>           * saves CPU so do it.
> @@ -726,7 +726,7 @@ static gboolean
> spice_gst_decoder_queue_frame(VideoDecoder *video_decoder,
>                                                      frame->data,
>                                                      frame->size, 0,
>                                                      frame->size,
>                                                      frame, (GDestroyNotify)
>                                                      spice_frame_free);
>  
> -    GstClockTime pts = gst_clock_get_time(decoder->clock) -
> gst_element_get_base_time(decoder->pipeline) + ((uint64_t)MAX(0, latency)) *
> 1000 * 1000;
> +    GstClockTime pts = gst_clock_get_time(decoder->clock) -
> gst_element_get_base_time(decoder->pipeline) + ((uint64_t)MAX(0, margin)) *
> 1000 * 1000;
>      GST_BUFFER_DURATION(buffer) = GST_CLOCK_TIME_NONE;
>      GST_BUFFER_DTS(buffer) = GST_CLOCK_TIME_NONE;
>      GST_BUFFER_PTS(buffer) = pts;
> diff --git a/src/channel-display-mjpeg.c b/src/channel-display-mjpeg.c
> index ba7f266e..647d31b0 100644
> --- a/src/channel-display-mjpeg.c
> +++ b/src/channel-display-mjpeg.c
> @@ -219,7 +219,7 @@ static void mjpeg_decoder_drop_queue(MJpegDecoder
> *decoder)
>  /* ---------- VideoDecoder's public API ---------- */
>  
>  static gboolean mjpeg_decoder_queue_frame(VideoDecoder *video_decoder,
> -                                          SpiceFrame *frame, int32_t
> latency)
> +                                          SpiceFrame *frame, int32_t margin)
>  {
>      MJpegDecoder *decoder = (MJpegDecoder*)video_decoder;
>      SpiceFrame *last_frame;
> @@ -239,7 +239,7 @@ static gboolean mjpeg_decoder_queue_frame(VideoDecoder
> *video_decoder,
>      /* Dropped MJPEG frames don't impact the ones that come after.
>       * So drop late frames as early as possible to save on processing time.
>       */
> -    if (latency < 0) {
> +    if (margin < 0) {
>          SPICE_DEBUG("dropping a late MJPEG frame");
>          spice_frame_free(frame);
>          return TRUE;
> diff --git a/src/channel-display.c b/src/channel-display.c
> index 6c2a99c7..cda0fcdd 100644
> --- a/src/channel-display.c
> +++ b/src/channel-display.c
> @@ -1432,7 +1432,7 @@ gboolean hand_pipeline_to_widget(display_stream *st,
> GstPipeline *pipeline)
>  #define STREAM_REPORT_DROP_SEQ_LEN_LIMIT 3
>  
>  static void display_update_stream_report(SpiceDisplayChannel *channel,
>  uint32_t stream_id,
> -                                         uint32_t frame_time, int32_t
> latency)
> +                                         uint32_t frame_time, int32_t
> margin)
>  {
>      display_stream *st = get_stream_by_id(SPICE_CHANNEL(channel),
>      stream_id);
>      guint64 now;
> @@ -1449,7 +1449,7 @@ static void
> display_update_stream_report(SpiceDisplayChannel *channel, uint32_t
>      }
>      st->report_num_frames++;
>  
> -    if (latency < 0) { // drop
> +    if (margin < 0) { // drop
>          st->report_num_drops++;
>          st->report_drops_seq_len++;
>      } else {
> @@ -1469,7 +1469,7 @@ static void
> display_update_stream_report(SpiceDisplayChannel *channel, uint32_t
>          report.end_frame_mm_time = frame_time;
>          report.num_frames = st->report_num_frames;
>          report.num_drops = st-> report_num_drops;
> -        report.last_frame_delay = latency;
> +        report.last_frame_delay = margin;
>          if (spice_session_is_playback_active(session)) {
>              report.audio_delay =
>              spice_session_get_playback_latency(session);
>          } else {
> @@ -1606,14 +1606,14 @@ static void display_stream_stats_save(display_stream
> *st,
>                                        guint32 server_mmtime,
>                                        guint32 client_mmtime)
>  {
> -    gint32 latency = server_mmtime - client_mmtime;
> +    gint32 margin = server_mmtime - client_mmtime;
>  
>      if (!st->num_input_frames) {
>          st->first_frame_mm_time = server_mmtime;
>      }
>      st->num_input_frames++;
>  
> -    if (latency < 0) {
> +    if (margin < 0) {
>          CHANNEL_DEBUG(st->channel, "stream data too late by %u ms (ts: %u,
>          mmtime: %u)",
>                        client_mmtime - server_mmtime, server_mmtime,
>                        client_mmtime);
>          st->arrive_late_time += client_mmtime - server_mmtime;
> @@ -1630,7 +1630,7 @@ static void display_stream_stats_save(display_stream
> *st,
>          return;
>      }
>  
> -    CHANNEL_DEBUG(st->channel, "video latency: %d", latency);
> +    CHANNEL_DEBUG(st->channel, "video margin: %d", margin);
>      if (st->cur_drops_seq_stats.len) {
>          st->cur_drops_seq_stats.duration = server_mmtime -
>                                             st->cur_drops_seq_stats.start_mm_time;
> @@ -1679,7 +1679,7 @@ static void display_handle_stream_data(SpiceChannel
> *channel, SpiceMsgIn *in)
>      SpiceStreamDataHeader *op = spice_msg_in_parsed(in);
>      display_stream *st = get_stream_by_id(channel, op->id);
>      guint32 mmtime;
> -    int32_t latency, latency_report;
> +    int32_t margin, margin_report;
>      SpiceFrame *frame;
>  
>      g_return_if_fail(st != NULL);
> @@ -1694,13 +1694,13 @@ static void display_handle_stream_data(SpiceChannel
> *channel, SpiceMsgIn *in)
>          op->multi_media_time = mmtime + 100; /* workaround... */
>      }
>  
> -    latency = latency_report = op->multi_media_time - mmtime;
> -    if (latency > 0) {
> +    margin = margin_report = op->multi_media_time - mmtime;
> +    if (margin > 0) {
>          SpiceSession *s = spice_channel_get_session(channel);
>  
>          if (st->surface->streaming_mode &&
>          !spice_session_is_playback_active(s)) {
> -            CHANNEL_DEBUG(channel, "video latency: %d, set to 0 since there
> is no playback", latency);
> -            latency = 0;
> +            CHANNEL_DEBUG(channel, "video margin: %d, set to 0 since there
> is no playback", margin);
> +            margin = 0;
>          }
>      }
>      display_stream_stats_save(st, op->multi_media_time, mmtime);
> @@ -1710,7 +1710,7 @@ static void display_handle_stream_data(SpiceChannel
> *channel, SpiceMsgIn *in)
>       * taking into account the impact on later frames.
>       */
>      frame = spice_frame_new(st, in, op->multi_media_time);
> -    if (!st->video_decoder->queue_frame(st->video_decoder, frame, latency))
> {
> +    if (!st->video_decoder->queue_frame(st->video_decoder, frame, margin)) {
>          destroy_stream(channel, op->id);
>          report_invalid_stream(channel, op->id);
>          return;
> @@ -1718,7 +1718,7 @@ static void display_handle_stream_data(SpiceChannel
> *channel, SpiceMsgIn *in)
>  
>      if (c->enable_adaptive_streaming) {
>          display_update_stream_report(SPICE_DISPLAY_CHANNEL(channel), op->id,
> -                                     op->multi_media_time, latency_report);
> +                                     op->multi_media_time, margin_report);
>          if (st->playback_sync_drops_seq_len >=
>          STREAM_PLAYBACK_SYNC_DROP_SEQ_LEN_LIMIT) {
>              spice_session_sync_playback_latency(spice_channel_get_session(channel));
>              st->playback_sync_drops_seq_len = 0;


More information about the Spice-devel mailing list