RTSP h264 sob issue

WisdomPill anas.el.amraoui at live.com
Wed Feb 7 10:48:07 UTC 2018


Hi, I'm attempting to create a RTSP live streaming service that provides the
images that are fed to from another thread with opencv. I managed to create
the buffer and push it to the appsrc element but I got stumbled upon an
issue with the x264enc element, which i need to create a standard RTSP
stream that every commercial app could read. I pointed to the x264enc
element because after lots of parameters tweaking and other pipelines
without appsrc element the is a "sob issue", every n seconds periodically.
The sob issue is a sudden change of a region of the image which provoke an
ugly user experience. An example at the following link
https://www.dropbox.com/s/e3zlxuuj6dyd56p/sob_issue.mp4?dl=0 I found a
parameter that changes the period of the sob issue, however doesn't solve
the problem. The parameter is key-int-max, higher values can expand the
period but don't solve the issue and after that they also make vlc turn gray
after some time (less than a minute). The code that I'm using to create the
stream is the following.

#!/usr/bin/python3
import sys
from threading import Thread

import cv2
import gi

gi.require_version('Gst', '1.0')
gi.require_version('GstRtspServer', '1.0')
from gi.repository import Gst, GstRtspServer, GObject


class Context:
    def __init__(self):
        self._timestamp = 0
        self._need_data = True

    @property
    def timestamp(self):
        return self._timestamp

    @timestamp.setter
    def timestamp(self, value):
        self._timestamp = value

    @property
    def need_data(self):
        return self._need_data

    @need_data.setter
    def need_data(self, value):
        self._need_data = value

    def __str__(self):
        return 'timestamp -> {}, need_data -> {}'.format(self._timestamp,
self._need_data)


class SensorFactory(GstRtspServer.RTSPMediaFactory):
    def __init__(self, **properties):
        super(SensorFactory, self).__init__(**properties)
        if sys.platform == 'darwin':
            self.width = 1280
            self.height = 720
        else:
            self.width = 640
            self.height = 480
        self.fps = 6.
        self.bitrate = 256
        self.buffer_frames = 3
        self.frame_size = self.width * self.height * 3
        self.buffer_size = self.frame_size * self.buffer_frames
        self.key_int_max = 2 ** 10
        self.duration = int(1 / self.fps * Gst.SECOND)  # duration of a
frame in nanoseconds
        launch_string = '( appsrc name=source is-live=true
format=GST_FORMAT_TIME blocksize={} ' \
                       
'caps=video/x-raw,format=I420,width={},height={},framerate={}/1 ' \
                        '! x264enc key-int-max={} speed-preset=ultrafast
bitrate={} tune=zerolatency ' \
                        '! rtph264pay config-interval=1 name=pay0 pt=96
)'.format(self.buffer_size, self.width,
                                                                                 
self.height, int(self.fps),
                                                                                 
self.key_int_max, self.bitrate)

        print(launch_string)
        self.set_launch(launch_string)
        self.set_shared(True)
        self.set_eos_shutdown(True)
        self.set_latency(500)
        self.frame = None

    def set_last_frame(self, frame):
        self.frame = cv2.cvtColor(frame, cv2.COLOR_BGR2YUV_I420)

    def on_need_data(self, src, lenght, context):
        print('context address -> {}'.format(id(context)))
        print('need_data lenght -> {}, context -> {}'.format(lenght,
context))
        context.need_data = True
        while context.need_data:
            if self.frame is not None:
                data = self.frame.tostring()
                buf = Gst.Buffer.new_allocate(None, len(data), None)
                buf.fill(0, data)
                buf.duration = self.duration
                buf.pts = buf.dts = context.timestamp
                context.timestamp += buf.duration
                retval = src.emit('push-buffer', buf)
                if retval != Gst.FlowReturn.OK:
                    print(retval)
                    context.need_data = False
        print('context -> {}'.format(context))

    def on_enough_data(self, src, context):
        print('context address -> {}'.format(id(context)))
        print('enough_data context -> {}'.format(context))
        context.need_data = False

    def do_configure(self, rtsp_media):
        ctx = Context()
        appsrc = rtsp_media.get_element().get_child_by_name('source')
        appsrc.connect('need-data', self.on_need_data, ctx)
        appsrc.connect('enough-data', self.on_enough_data, ctx)


class GstServer(GstRtspServer.RTSPServer):
    def __init__(self, **properties):
        super(GstServer, self).__init__(**properties)
        self.factory = SensorFactory()
        self.get_mount_points().add_factory("/stream", self.factory)
        print(self.get_backlog())
        GObject.timeout_add_seconds(3, self.check_health)
        self.attach(None)

    def set_last_frame(self, frame):
        self.factory.set_last_frame(frame)

    def check_health(self):
        thread_pool = self.get_thread_pool()
        session_pool = self.get_session_pool()
        print('thread_pool: max_threads
{}'.format(thread_pool.get_max_threads()))
        print('session_pool: max_sessions {}, n_sessions
{}'.format(session_pool.get_max_sessions(),
                                                                   
session_pool.get_n_sessions()))

        return True


class LiveStreamingServer:
    def __init__(self):
        self.server = GstServer()
        self.loop = GObject.MainLoop()
        self.thread = Thread(target=self.loop.run)

    def start(self):
        self.thread.start()

    def set_last_frame(self, frame):
        self.server.set_last_frame(frame)


GObject.threads_init()
Gst.init(None)

s = LiveStreamingServer()
s.start()

cap = cv2.VideoCapture(0)

print('cap.isOpened() -> {}'.format(cap.isOpened()))

while cap.isOpened():
    ret, frame = cap.read()
    if ret:
        s.set_last_frame(frame)


cap.release()


Can anyone point to me the cause or give me a better explanation on why this
happens?

Also here I provide the reading pipeline of gstreamer which with debug level
set to 4 does not give me any cues.


#!/bin/bash

export GST_DEBUG=4
mkdir -p logs/$1
exec > >(tee "logs/$1/$(date +%F_%R).log")
exec 2>&1

gst-launch-1.0 -v rtspsrc location="rtsp://$1:8554/stream" latency=500 !
rtph264depay ! avdec_h264 skip-frame=1 ! videoconvert ! timeoverlay !
clockoverlay halignment=right ! autovideosink


The issue does not persist on localhost.



--
Sent from: http://gstreamer-devel.966125.n4.nabble.com/


More information about the gstreamer-devel mailing list