如何使用gstreamer和OpenCV服务RTSP组播服务器?

yqlxgs2m  于 2023-11-22  发布在  其他
关注(0)|答案(1)|浏览(120)

我想写一个程序,通过多播或广播的rtsp/tsp服务器流cv 2帧。
我写了一个代码下面的一些例子,但这个代码我只能流使用第一个客户端和所有其他客户端后,运行第一不能得到图像流(测试ffplay和vlc,url是rtsp://host_url:5000/stream)。
这段代码中有什么需要改进的地方吗?
我认为第一个客户端接收所有发出的块.

class SensorFactory(GstRtspServer.RTSPMediaFactory):
    def __init__(self, image_type):
        super(SensorFactory, self).__init__()
        self.number_frames = 0
        self.fps = 30.0
        self.duration = 1.0 / self.fps * Gst.SECOND  # duration of a frame in gst units
        self.launch_string = 'appsrc name=source is-live=true format=GST_FORMAT_TIME ' \
                             'caps=video/x-raw,format=BGR,width=640,height=360,framerate=30/1 ' \
                             '! videoconvert ! video/x-raw,format=I420 ' \
                             '! x264enc speed-preset=ultrafast tune=zerolatency ' \
                             '! rtph264pay name=pay0 pt=96'
        self.image_type = image_type
        
    def do_create_element(self, url):
        return Gst.parse_launch(self.launch_string)

    def on_need_data(self, src, length):
        # frame_list is a Manager.list() containing cv2 frame
        data = cv2.resize(frame_list[self.image_type], (640, 360), interpolation = cv2.INTER_LINEAR)
        data = data.tobytes()
        buf = Gst.Buffer.new_allocate(None, len(data), None)
        buf.fill(0, data)
        buf.duration = self.duration
        timestamp = self.number_frames * self.duration
        buf.pts = buf.dts = int(timestamp)
        buf.offset = timestamp
        self.number_frames += 1
        src.emit('push-buffer', buf)
    
    # attaching the source element to the rtsp media
    def do_configure(self, rtsp_media):
        self.number_frames = 0
        appsrc = rtsp_media.get_element().get_child_by_name('source')
        appsrc.connect('need-data', self.on_need_data)

class MulticastServer(GstRtspServer.RTSPServer):
    def __init__(self):
        GstRtspServer.RTSPServer.__init__(self)
        self.set_service("5000")

        self.factory0 = SensorFactory(0)
        self.factory0.set_shared(True)
        self.factory0.set_eos_shutdown(True)

        # Multicast address setup
        self.address_pool = GstRtspServer.RTSPAddressPool.new()
        self.address_pool.add_range("224.0.0.1", "240.0.0.10", 5000, 5010, 5)
        self.factory0.set_address_pool(self.address_pool)
        self.get_mount_points().add_factory("/stream", self.factory0)

Gst.init(None)
server = MulticastServer()
server.attach(None)

GLib.MainLoop().run()

字符串

lhcgjxsq

lhcgjxsq1#

这可能不是最佳的解决方案,但这个简单的方案可能适用于您的情况:

1.使用openCV VideoWriter和gstreamer后台编码成H264,并使用UDP/5000将RTPH264流到localhost:

import cv2

# Here we simulate a frame source using gstreamer backend from a videotestsrc element, using BGR format as expected by opencv appsink
cap = cv2.VideoCapture('videotestsrc ! video/x-raw,format=BGR,width=640,height=480 ! queue ! appsink drop=1', cv2.CAP_GSTREAMER)
if not cap.isOpened() :
   print('Error: failed to open capture')
   exit(-1)
w = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
h = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
fps = float(cap.get(cv2.CAP_PROP_FPS))
print('Capture opened, framing %dx%d@%f' % (w,h,fps))

# Now create a VideoWriter with gstreamer backend (4CC code is 0:RAW), using a pipeline that will convert BGR frames from opencv, encode into H264 and stream as RTPH264 to localhost over UDP/5000:
rtph264 = cv2.VideoWriter('appsrc ! video/x-raw,format=BGR ! queue ! videoconvert ! x264enc key-int-max=30 insert-vui=1 speed-preset=ultrafast tune=zerolatency ! h264parse ! rtph264pay ! udpsink host=127.0.0.1 port=5000', cv2.CAP_GSTREAMER, 0, fps, (w,h))
if not rtph264.isOpened() :
   print('Error: failed to open rtph264')
   exit(-2)

# Loop reading a frame from capture and pushing it into rtph264 writer:
while True:
   ret, frame = cap.read()
   if not ret:
      print('Read frame failed')
      break
   rtph264.write(frame)

rtph264.release()
cap.release()

字符串

2.使用同一台主机上的另一个线程或进程运行RTSP服务器,从UDP/5000阅读RTPH 264,然后只进行支付和偿还:

import gi
gi.require_version('Gst','1.0')
gi.require_version('GstVideo','1.0')
gi.require_version('GstRtspServer','1.0')
from gi.repository import GLib, Gst, GstVideo, GstRtspServer

Gst.init(None)
mainloop = GLib.MainLoop()
server = GstRtspServer.RTSPServer()
mounts = server.get_mount_points()
factory = GstRtspServer.RTSPMediaFactory()
factory.set_launch('( udpsrc address=127.0.0.1 port=5000 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! rtph264pay name=pay0 )')
mounts.add_factory("/test", factory)
server.attach(None)
print ("stream ready at rtsp://127.0.0.1:8554/test")
mainloop.run()

3.测试:

从localhost:

gst-play-1.0 -v rtsp://127.0.0.1:8554/test


另一个主持人:

gst-play-1.0 -v rtsp://<rtsp_server_ip>:8554/test

# Or (O latency may not be the best choice, you would adjust for your case):
gst-launch-1.0 rtspsrc location=rtsp://<rtsp_server_ip>:8554/test latency=0 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! autovideosink

# Or disabling sync:
gst-launch-1.0 rtspsrc location=rtsp://<rtsp_server_ip>:8554/test latency=0 ! application/x-rtp,encoding-name=H264 ! rtph264depay ! h264parse ! avdec_h264 ! videoconvert ! autovideosink sync=0

相关问题