Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.
Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.
Path: blob/master/libraries/AP_Camera/examples/gst_rtsp_to_wx.py
Views: 1799
""""1Capture a RTSP video stream and display in wxpython23Usage4-----561. rtsp server78python ./gst_rtsp_server.py9103. display in wxpython1112python ./gst_rtsp_to_wx.py1314Acknowledgments15---------------1617Video class to capture GStreamer frames18https://www.ardusub.com/developers/opencv.html1920ImagePanel class to display openCV images in wxWidgets21https://stackoverflow.com/questions/14804741/opencv-integration-with-wxpython22"""2324import copy25import cv226import gi27import numpy as np28import threading29import wx303132gi.require_version("Gst", "1.0")33from gi.repository import Gst343536class VideoStream:37"""BlueRov video capture class constructor - adapted to capture rtspsrc3839Attributes:40address (string): RTSP address41port (int): RTSP port42mount_point (string): video stream mount point43video_decode (string): Transform YUV (12bits) to BGR (24bits)44video_pipe (object): GStreamer top-level pipeline45video_sink (object): Gstreamer sink element46video_sink_conf (string): Sink configuration47video_source (string): Udp source ip and port48latest_frame (np.ndarray): Latest retrieved video frame49"""5051def __init__(52self, address="127.0.0.1", port=8554, mount_point="/camera", latency=5053):54Gst.init(None)5556self.address = address57self.port = port58self.mount_point = mount_point59self.latency = latency6061self.latest_frame = self._new_frame = None6263self.video_source = (64f"rtspsrc location=rtsp://{address}:{port}{mount_point} latency={latency}"65)6667# Python does not have nibble, convert YUV nibbles (4-4-4) to OpenCV standard BGR bytes (8-8-8)68self.video_decode = (69"! decodebin ! videoconvert ! video/x-raw,format=(string)BGR ! videoconvert"70)71# Create a sink to get data72self.video_sink_conf = (73"! appsink emit-signals=true sync=false max-buffers=2 drop=true"74)7576self.video_pipe = None77self.video_sink = None7879self.run()8081def start_gst(self, config=None):82""" Start gstreamer pipeline and sink83Pipeline description list e.g:84[85'videotestsrc ! decodebin', \86'! videoconvert ! video/x-raw,format=(string)BGR ! videoconvert',87'! appsink'88]8990Args:91config (list, optional): Gstreamer pileline description list92"""9394if not config:95config = [96"videotestsrc ! decodebin",97"! videoconvert ! video/x-raw,format=(string)BGR ! videoconvert",98"! appsink",99]100101command = " ".join(config)102self.video_pipe = Gst.parse_launch(command)103self.video_pipe.set_state(Gst.State.PLAYING)104self.video_sink = self.video_pipe.get_by_name("appsink0")105106@staticmethod107def gst_to_opencv(sample):108"""Transform byte array into np array109110Args:111sample (TYPE): Description112113Returns:114TYPE: Description115"""116buf = sample.get_buffer()117caps_structure = sample.get_caps().get_structure(0)118array = np.ndarray(119(caps_structure.get_value("height"), caps_structure.get_value("width"), 3),120buffer=buf.extract_dup(0, buf.get_size()),121dtype=np.uint8,122)123return array124125def frame(self):126"""Get Frame127128Returns:129np.ndarray: latest retrieved image frame130"""131if self.frame_available:132self.latest_frame = self._new_frame133# reset to indicate latest frame has been 'consumed'134self._new_frame = None135return self.latest_frame136137def frame_available(self):138"""Check if a new frame is available139140Returns:141bool: true if a new frame is available142"""143return self._new_frame is not None144145def run(self):146"""Get frame to update _new_frame"""147148self.start_gst(149[150self.video_source,151self.video_decode,152self.video_sink_conf,153]154)155156self.video_sink.connect("new-sample", self.callback)157158def callback(self, sink):159sample = sink.emit("pull-sample")160self._new_frame = self.gst_to_opencv(sample)161162return Gst.FlowReturn.OK163164165class ImagePanel(wx.Panel):166def __init__(self, parent, video_stream, fps=30):167wx.Panel.__init__(self, parent)168169self._video_stream = video_stream170171# Shared between threads172self._frame_lock = threading.Lock()173self._latest_frame = None174175print("Waiting for video stream...")176waited = 0177while not self._video_stream.frame_available():178waited += 1179print("\r Frame not available (x{})".format(waited), end="")180cv2.waitKey(30)181print("\nSuccess! Video stream available")182183if self._video_stream.frame_available():184# Only retrieve and display a frame if it's new185frame = copy.deepcopy(self._video_stream.frame())186187# Frame size188height, width, _ = frame.shape189190parent.SetSize((width, height))191frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)192193self.bmp = wx.Bitmap.FromBuffer(width, height, frame)194195self.timer = wx.Timer(self)196self.timer.Start(int(1000 / fps))197198self.Bind(wx.EVT_PAINT, self.OnPaint)199self.Bind(wx.EVT_TIMER, self.NextFrame)200201def OnPaint(self, evt):202dc = wx.BufferedPaintDC(self)203dc.DrawBitmap(self.bmp, 0, 0)204205def NextFrame(self, event):206if self._video_stream.frame_available():207frame = copy.deepcopy(self._video_stream.frame())208209# Convert frame to bitmap for wxFrame210frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)211self.bmp.CopyFromBuffer(frame)212self.Refresh()213214215def main():216217# create the video stream218video_stream = VideoStream(mount_point="/camera")219220# app must run on the main thread221app = wx.App()222wx_frame = wx.Frame(None)223224# create the image panel225image_panel = ImagePanel(wx_frame, video_stream, fps=30)226227wx_frame.Show()228app.MainLoop()229230231if __name__ == "__main__":232main()233234235