Path: blob/master/libraries/AP_Camera/examples/gst_udp_to_wx.py
9767 views
""""1Capture a UDP video stream and display in wxpython23Usage4-----561. udpsink78gst-launch-1.0 -v videotestsrc ! 'video/x-raw,format=I420,width=640,height=480,framerate=50/1' ! queue ! videoconvert ! x264enc bitrate=800 speed-preset=6 tune=4 key-int-max=10 ! rtph264pay ! udpsink host=127.0.0.1 port=56009102. display in wxpython1112python ./gst_udp_to_wx.py1314Acknowledgments15---------------1617Video class to capture GStreamer frames18https://www.ardusub.com/developers/opencv.html1920ImagePanel class to display openCV images in wxWidgets21https://stackoverflow.com/questions/14804741/opencv-integration-with-wxpython22"""2324# flake8: noqa2526import copy27import cv228import gi29import numpy as np30import threading31import wx323334gi.require_version("Gst", "1.0")35from gi.repository import Gst363738class VideoStream:39"""BlueRov video capture class constructor4041Attributes:42port (int): Video UDP port43video_codec (string): Source h264 parser44video_decode (string): Transform YUV (12bits) to BGR (24bits)45video_pipe (object): GStreamer top-level pipeline46video_sink (object): Gstreamer sink element47video_sink_conf (string): Sink configuration48video_source (string): Udp source ip and port49latest_frame (np.ndarray): Latest retrieved video frame50"""5152def __init__(self, port=5600):53"""Summary5455Args:56port (int, optional): UDP port57"""5859Gst.init(None)6061self.port = port62self.latest_frame = self._new_frame = None6364# [Software component diagram](https://www.ardusub.com/software/components.html)65# UDP video stream (:5600)66self.video_source = "udpsrc port={}".format(self.port)67# [Rasp raw image](http://picamera.readthedocs.io/en/release-0.7/recipes2.html#raw-image-capture-yuv-format)68# Cam -> CSI-2 -> H264 Raw (YUV 4-4-4 (12bits) I420)69self.video_codec = (70"! application/x-rtp, payload=96 ! rtph264depay ! h264parse ! avdec_h264"71)72# Python don't have nibble, convert YUV nibbles (4-4-4) to OpenCV standard BGR bytes (8-8-8)73self.video_decode = (74"! decodebin ! videoconvert ! video/x-raw,format=(string)BGR ! videoconvert"75)76# Create a sink to get data77self.video_sink_conf = (78"! appsink emit-signals=true sync=false max-buffers=2 drop=true"79)8081self.video_pipe = None82self.video_sink = None8384self.run()8586def start_gst(self, config=None):87""" Start gstreamer pipeline and sink88Pipeline description list e.g:89[90'videotestsrc ! decodebin', \91'! videoconvert ! video/x-raw,format=(string)BGR ! videoconvert',92'! appsink'93]9495Args:96config (list, optional): Gstreamer pileline description list97"""9899if not config:100config = [101"videotestsrc ! decodebin",102"! videoconvert ! video/x-raw,format=(string)BGR ! videoconvert",103"! appsink",104]105106command = " ".join(config)107self.video_pipe = Gst.parse_launch(command)108self.video_pipe.set_state(Gst.State.PLAYING)109self.video_sink = self.video_pipe.get_by_name("appsink0")110111@staticmethod112def gst_to_opencv(sample):113"""Transform byte array into np array114115Args:116sample (TYPE): Description117118Returns:119TYPE: Description120"""121buf = sample.get_buffer()122caps_structure = sample.get_caps().get_structure(0)123array = np.ndarray(124(caps_structure.get_value("height"), caps_structure.get_value("width"), 3),125buffer=buf.extract_dup(0, buf.get_size()),126dtype=np.uint8,127)128return array129130def frame(self):131"""Get Frame132133Returns:134np.ndarray: latest retrieved image frame135"""136if self.frame_available:137self.latest_frame = self._new_frame138# reset to indicate latest frame has been 'consumed'139self._new_frame = None140return self.latest_frame141142def frame_available(self):143"""Check if a new frame is available144145Returns:146bool: true if a new frame is available147"""148return self._new_frame is not None149150def run(self):151"""Get frame to update _new_frame"""152153self.start_gst(154[155self.video_source,156self.video_codec,157self.video_decode,158self.video_sink_conf,159]160)161162self.video_sink.connect("new-sample", self.callback)163164def callback(self, sink):165sample = sink.emit("pull-sample")166self._new_frame = self.gst_to_opencv(sample)167168return Gst.FlowReturn.OK169170171class ImagePanel(wx.Panel):172def __init__(self, parent, video_stream, fps=30):173wx.Panel.__init__(self, parent)174175self._video_stream = video_stream176177# Shared between threads178self._frame_lock = threading.Lock()179self._latest_frame = None180181print("Waiting for video stream...")182waited = 0183while not self._video_stream.frame_available():184waited += 1185print("\r Frame not available (x{})".format(waited), end="")186cv2.waitKey(30)187print("\nSuccess! Video stream available")188189if self._video_stream.frame_available():190# Only retrieve and display a frame if it's new191frame = copy.deepcopy(self._video_stream.frame())192193# Frame size194height, width, _ = frame.shape195196parent.SetSize((width, height))197frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)198199self.bmp = wx.Bitmap.FromBuffer(width, height, frame)200201self.timer = wx.Timer(self)202self.timer.Start(int(1000 / fps))203204self.Bind(wx.EVT_PAINT, self.OnPaint)205self.Bind(wx.EVT_TIMER, self.NextFrame)206207def OnPaint(self, evt):208dc = wx.BufferedPaintDC(self)209dc.DrawBitmap(self.bmp, 0, 0)210211def NextFrame(self, event):212if self._video_stream.frame_available():213frame = copy.deepcopy(self._video_stream.frame())214215# Convert frame to bitmap for wxFrame216frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)217self.bmp.CopyFromBuffer(frame)218self.Refresh()219220221def main():222223# create the video stream224video_stream = VideoStream(port=5600)225226# app must run on the main thread227app = wx.App()228wx_frame = wx.Frame(None)229230# create the image panel231image_panel = ImagePanel(wx_frame, video_stream, fps=30)232233wx_frame.Show()234app.MainLoop()235236237if __name__ == "__main__":238main()239240241