How to create one pipeline for both audio-video stream and only video stream.
Алексей Буров
burov_alexey at mail.ru
Fri Jan 20 15:03:54 UTC 2017
Hello, all.
I have a rtsp source and I need to restream it through my rtsp server.
The rtsp source can stream audio/video and sometimes only video.
I can create either audio/video pipeline or only video pipeline.
But I can't create working pipeline for these both cases.
I connect callback to 'pad-added' event and then I link the first video element and the first audio element (if audio exists) to rtspsrc element in 'pad-added' callback.
I create/add/link audio elements in 'pad-added' callback but the rtsp client has no audio in this case.
Please tell me what is wrong.
This pipeline works well with audio-video:
Gst.parse_launch(
'( rtspsrc location="rtsp://admin:admin@192.168.7.217" '
'latency=0 '
'timeout=5000000 '
'name=rtsp_src '
'rtsp_src. '
'! queue'
'! rtph264depay '
'! rtph264pay '
'name=pay0 '
'rtsp_src. '
'! queue'
'! rtppcmudepay '
'! rtppcmupay '
'name=pay1 )'
)
OS: gentoo, gstreamer: version 1.6.3, gst-rtsp-server: 1.6.2, python3
Code:
#!/usr/bin/env python3
"""RTSP restreamer based on GStreamer."""
import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstRtspServer', '1.0')
from gi.repository import Gst, GstRtspServer, GObject
loop = GObject.MainLoop()
GObject.threads_init()
Gst.init(None)
class AVPipeline(Gst.Pipeline):
def __init__(self):
Gst.Pipeline.__init__(self)
# rtsp source
rtspsrc = Gst.ElementFactory.make('rtspsrc', None)
rtspsrc.set_property('location', 'rtsp://admin:admin@192.168.7.217')
rtspsrc.set_property('latency', 500)
rtspsrc.set_property('timeout', 2000000)
self.add(rtspsrc)
self.link(rtspsrc)
rtspsrc.connect('pad-added', self.on_pad_added)
# video
vqueue = Gst.ElementFactory.make('queue', None)
rtph264depay = Gst.ElementFactory.make('rtph264depay', None)
rtph264pay = Gst.ElementFactory.make('rtph264pay', None)
rtph264pay.set_property('name', 'pay0')
rtph264pay.set_property('pt', 96)
self.add(vqueue)
self.add(rtph264depay)
self.add(rtph264pay)
vqueue.link(rtph264depay)
rtph264depay.link(rtph264pay)
self._tolink_video_elem = vqueue
def on_pad_added(self, element, pad):
string = pad.query_caps(None).to_string()
if string.startswith('application/x-rtp'):
if 'media=(string)video' in string:
pad.link(self._tolink_video_elem.get_static_pad('sink'))
print('Video connected')
elif 'media=(string)audio' in string:
# create audio
# Client doesn't get audio when I add audio elements in this point
#audio
aqueue = Gst.ElementFactory.make('queue', None)
rtppcmudepay = Gst.ElementFactory.make('rtppcmudepay', None)
rtppcmupay = Gst.ElementFactory.make('rtppcmupay', None)
rtppcmupay.set_property('name', 'pay1')
self.add(aqueue)
self.add(rtppcmudepay)
self.add(rtppcmupay)
aqueue.link(rtppcmudepay)
rtppcmudepay.link(rtppcmupay)
for elem in (aqueue, rtppcmudepay, rtppcmupay):
elem.sync_state_with_parent()
pad.link(aqueue.get_static_pad('sink'))
print('Audio connected')
class MyRTSPMediaFactory(GstRtspServer.RTSPMediaFactory):
LATENCY = 10000
def __init__(self):
GstRtspServer.RTSPMediaFactory.__init__(self)
self.set_shared(True)
self.set_property('latency', self.LATENCY)
self.set_transport_mode(GstRtspServer.RTSPTransportMode.PLAY)
def do_create_element(self, url):
return AVPipeline()
class Restreamer(object):
def __init__(self, host, port):
self._server = GstRtspServer.RTSPServer()
self._server.set_address(host)
self._server.set_service(str(port))
mount_points = self._server.get_mount_points()
factory = MyRTSPMediaFactory()
mount_points.add_factory('/test', factory)
self._server.attach(None)
def main():
server = Restreamer('0.0.0.0', 9999)
print('Started %s:%s' % (server._server.get_address(),
server._server.get_service()))
loop.run()
if __name__ == '__main__':
main()
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.freedesktop.org/archives/gstreamer-devel/attachments/20170120/a2f2b793/attachment-0001.html>
More information about the gstreamer-devel
mailing list