使用Python捕获gstreamer网络视频

时间:2017-05-04 08:17:21

标签: python video-streaming gstreamer

我正在尝试使用Python捕获并显示网络视频流。已使用以下命令在我的笔记本电脑上创建了流:

gst-launch-1.0 v4l2src ! videorate ! video/x-raw,framerate=2/1,width=640,height=480 ! x264enc pass=qual quantizer=20 tune=zerolatency ! rtph264pay config-interval=10 pt=96 ! udpsink host=127.0.0.1 port=5000

它需要网络摄像头输入并通过UDP端口进行流式传输。我可以使用以下命令捕获流并显示它:

gst-launch-1.0 udpsrc port=5000 ! "application/x-rtp, payload=127" ! rtph264depay ! avdec_h264 ! xvimagesink sync=false

现在我正在尝试使用Python脚本执行相同的操作(捕获),但不缺。这是我的代码:

import gi
gi.require_version('Gst', '1.0')
from gi.repository import Gst

udpPipe = Gst.pipeline("player")
source = Gst.ElementFactory.make('udpsrc', None)
source.set_property("port", 5000)
source.set_property("host", "127.0.0.1")

rdepay = Gst.ElementFactory.make('rtph264depay', 'rdepay')
vdecode = Gst.ElementFactory.make('avdec_h264', 'vdecode')
sink = Gst.ElementFactory.make('xvimagesink', None)

udpPipe.add(source, rdepay, vdecode, sink)
gst.element_link_many(source, rdepay, vdecode, sink)
udpPipe.set_state(gst.STATE_PLAYING)

我得到的错误是:

/usr/lib/python2.7/dist-packages/gi/overrides/Gst.py:56: Warning: /build/glib2.0-prJhLS/glib2.0-2.48.2/./gobject/gsignal.c:1674: parameter 1 of type '<invalid>' for signal "GstBus::sync_message" is not a value type
  Gst.Bin.__init__(self, name=name)
/usr/lib/python2.7/dist-packages/gi/overrides/Gst.py:56: Warning: /build/glib2.0-prJhLS/glib2.0-2.48.2/./gobject/gsignal.c:1674: parameter 1 of type '<invalid>' for signal "GstBus::message" is not a value type
  Gst.Bin.__init__(self, name=name)
Traceback (most recent call last):
  File "getUdp.py", line 13, in <module>
    source = Gst.ElementFactory.make('udpsrc', None)
  File "/usr/lib/python2.7/dist-packages/gi/overrides/Gst.py", line 217, in make
    return Gst.ElementFactory.make(factory_name, instance_name)
TypeError: unbound method fake_method() must be called with ElementFactory instance as first argument (got str instance instead) 

有什么想法吗? : - (

2 个答案:

答案 0 :(得分:2)

今天我在Debian 9.3(延伸)上也遇到了同样的错误。 明确调用Gst.init可以解决问题。

以下代码在我的系统上弹出了一个xvimagesink窗口,同时包含python 2.7和3.5。

#!/usr/bin/python
import sys
import gi
gi.require_version('GLib', '2.0')
gi.require_version('Gst', '1.0')
from gi.repository import GLib, Gst

Gst.init(sys.argv)

udpPipe = Gst.Pipeline("player")
source = Gst.ElementFactory.make('udpsrc', None)
source.set_property("port", 5000)
#source.set_property("host", "127.0.0.1")
caps = Gst.caps_from_string("application/x-rtp, payload=127")
source.set_property("caps", caps)

rdepay = Gst.ElementFactory.make('rtph264depay', 'rdepay')
vdecode = Gst.ElementFactory.make('avdec_h264', 'vdecode')
sink = Gst.ElementFactory.make('xvimagesink', None)
sink.set_property("sync", False)

udpPipe.add(source, rdepay, vdecode, sink)

#Gst.element_link_many(source, rdepay, vdecode, sink)
source.link(rdepay)
rdepay.link(vdecode)
vdecode.link(sink)

udpPipe.set_state(Gst.State.PLAYING)

GLib.MainLoop().run()

我认为有必要调用Gst.init并使用PyGObject运行mainloop将gst-launch命令行转换为python脚本。

答案 1 :(得分:0)

您可以按以下方式使用“ mjpeg”流:

gst-launch-1.0 videotestsrc ! videoconvert ! videoscale ! video/x-raw,format=I420,width=800,height=600,framerate=25/1 ! jpegenc ! rtpjpegpay ! udpsink host=127.0.0.1 port=5000

在python3中,您可以像这样获得框架:

#!/usr/bin/env python

import cv2
import gi
import numpy as np

gi.require_version('Gst', '1.0')
from gi.repository import Gst


class Video():
    """BlueRov video capture class constructor

    Attributes:
        port (int): Video UDP port
        video_codec (string): Source h264 parser
        video_decode (string): Transform YUV (12bits) to BGR (24bits)
        video_pipe (object): GStreamer top-level pipeline
        video_sink (object): Gstreamer sink element
        video_sink_conf (string): Sink configuration
        video_source (string): Udp source ip and port
    """

    def __init__(self, port=5000):
        """Summary

        Args:
            port (int, optional): UDP port
        """

        Gst.init(None)

        self.port = port
        self._frame = None

        # [Software component diagram](https://www.ardusub.com/software/components.html)
        # UDP video stream (:5000)
        self.video_source = 'udpsrc port={}'.format(self.port)
        # [Rasp raw image](http://picamera.readthedocs.io/en/release-0.7/recipes2.html#raw-image-capture-yuv-format)
        # Cam -> CSI-2 -> H264 Raw (YUV 4-4-4 (12bits) I420)
        # self.video_codec = '! application/x-rtp, payload=96 ! rtph264depay ! h264parse ! avdec_h264'
        self.video_codec = '! application/x-rtp, payload=26 ! rtpjpegdepay ! jpegdec'
        # Python don't have nibble, convert YUV nibbles (4-4-4) to OpenCV standard BGR bytes (8-8-8)
        self.video_decode = \
            '! decodebin ! videoconvert ! video/x-raw,format=(string)BGR ! videoconvert'
        # Create a sink to get data
        self.video_sink_conf = \
            '! appsink emit-signals=true sync=false max-buffers=2 drop=true'

        self.video_pipe = None
        self.video_sink = None

        self.run()

    def start_gst(self, config=None):
        """ Start gstreamer pipeline and sink
        Pipeline description list e.g:
            [
                'videotestsrc ! decodebin', \
                '! videoconvert ! video/x-raw,format=(string)BGR ! videoconvert',
                '! appsink'
            ]

        Args:
            config (list, optional): Gstreamer pileline description list
        """

        if not config:
            config = \
                [
                    'videotestsrc ! decodebin',
                    '! videoconvert ! video/x-raw,format=(string)BGR ! videoconvert',
                    '! appsink'
                ]

        command = ' '.join(config)
        self.video_pipe = Gst.parse_launch(command)
        self.video_pipe.set_state(Gst.State.PLAYING)
        self.video_sink = self.video_pipe.get_by_name('appsink0')

    @staticmethod
    def gst_to_opencv(sample):
        """Transform byte array into np array

        Args:
            sample (TYPE): Description

        Returns:
            TYPE: Description
        """
        buf = sample.get_buffer()
        caps = sample.get_caps()
        array = np.ndarray(
            (
                caps.get_structure(0).get_value('height'),
                caps.get_structure(0).get_value('width'),
                3
            ),
            buffer=buf.extract_dup(0, buf.get_size()), dtype=np.uint8)
        return array

    def frame(self):
        """ Get Frame

        Returns:
            iterable: bool and image frame, cap.read() output
        """
        return self._frame

    def frame_available(self):
        """Check if frame is available

        Returns:
            bool: true if frame is available
        """
        return type(self._frame) != type(None)

    def run(self):
        """ Get frame to update _frame
        """

        self.start_gst(
            [
                self.video_source,
                self.video_codec,
                self.video_decode,
                self.video_sink_conf
            ])

        self.video_sink.connect('new-sample', self.callback)

    def callback(self, sink):
        sample = sink.emit('pull-sample')
        new_frame = self.gst_to_opencv(sample)
        self._frame = new_frame

        return Gst.FlowReturn.OK


if __name__ == '__main__':
    # Create the video object
    # Add port= if is necessary to use a different one
    video = Video(port=5000)

    while True:
        # Wait for the next frame
        if not video.frame_available():
            continue

        frame = video.frame()
        cv2.imshow('frame', frame)
        if cv2.waitKey(1) & 0xFF == ord('q'):
            break

REF

1:http://www.einarsundgren.se/gstreamer-basic-real-time-streaming-tutorial/

2:https://gist.github.com/patrickelectric/443645bb0fd6e71b34c504d20d475d5a

相关问题