From f9f5bea1603dc438bf68dc6d9674b3ba0eff6598 Mon Sep 17 00:00:00 2001 From: Your Name Date: Sat, 29 Feb 2020 14:18:50 -0500 Subject: [PATCH] added asynchronous video capture and video for both wide and far cameras --- cameras/camera.py | 14 ++--- cameras/video_async.py | 39 ++++++++++++++ controls.py | 4 +- main.py | 89 ++++++++++++++++++-------------- web/handlers/CameraFeedWS.py | 4 +- web/handlers/FarCameraFeedWS.py | 44 ++++++++++++++++ web/handlers/WideCameraFeedWS.py | 43 +++++++++++++++ web/handlers/__init__.py | 2 + web/tornado_server.py | 7 +++ web/www/camera.html | 55 ++++++++++++-------- web/www/ws_streamer.js | 30 +++++++---- 11 files changed, 246 insertions(+), 85 deletions(-) create mode 100644 cameras/video_async.py create mode 100644 web/handlers/FarCameraFeedWS.py create mode 100644 web/handlers/WideCameraFeedWS.py diff --git a/cameras/camera.py b/cameras/camera.py index 9cec60f..6596d9c 100644 --- a/cameras/camera.py +++ b/cameras/camera.py @@ -8,17 +8,6 @@ class USBCam(): def __init__(self, source=0): if main_controller.camera_mode == "CALIBRATE": print("opening") - # self.WIDTH = self.get(cv2.CAP_PROP_FRAME_WIDTH), - # self.HEIGHT = self.get(cv2.CAP_PROP_FRAME_HEIGHT), - # self.FPS = self.get(cv2.CAP_PROP_FPS) - # Set camera properties - # self.cam = cv2.VideoCapture(source) - # self.cam.set(cv2.CAP_PROP_FRAME_WIDTH, 640) - # self.cam.set(cv2.CAP_PROP_FRAME_HEIGHT, 480) - # self.cam.set(cv2.CAP_PROP_FPS, 120) - # self.cam.set(cv2.CAP_PROP_AUTO_EXPOSURE, 0.25) - # self.cam.set(cv2.CAP_PROP_EXPOSURE, 0.02) - # self.cam.set(cv2.CAP_PROP_CONTRAST, 0.0) def open(self, source): self.cam = cv2.VideoCapture(source) @@ -42,6 +31,9 @@ def read_image(self): def getCam(self): return self.cam + def stop(self): + self.cam.release + class Camera(): def __init__(self, width, height, fps, flength=0): diff --git a/cameras/video_async.py b/cameras/video_async.py new file mode 100644 index 0000000..d16a08c --- /dev/null +++ b/cameras/video_async.py @@ -0,0 +1,39 @@ +import cv2 +import threading +from cameras.camera import USBCam + +class VideoCaptureAsync: + def __init__(self,camera): + self.camera = camera + self.grabbed, self.frame = self.camera.read() + self.started = False + self.read_lock = threading.Lock() + + + def startReading(self): + if self.started: + print('Started video capture async') + return None + self.started = True + self.thread = threading.Thread(target=self.update, args = ()) + self.thread.start() + + def update(self): + while self.started: + grabbed, frame = self.camera.read() + with self.read_lock: + self.frame = frame + self.grabbed = grabbed + + def read(self): + with self.read_lock: + frame = self.frame.copy() + grabbed = self.grabbed + return grabbed, frame + + def stop(self): + self.started = False + self.thread.join() + + def __exit__(self): + self.camera.stop() \ No newline at end of file diff --git a/controls.py b/controls.py index ceadb5b..2ee9fae 100644 --- a/controls.py +++ b/controls.py @@ -11,13 +11,13 @@ class Controls(): def __init__(self): self.enable_camera = True - self.enable_camera_feed = False + self.enable_camera_feed = True self.enable_calibration_feed = False self.enable_processing_feed = True self.enable_dual_camera = True self.send_tracking_data = True - self.camera_mode = CAMERA_MODE_BALL + self.camera_mode = CAMERA_MODE_RAW self.enable_feed = True self.color_profiles = {} diff --git a/main.py b/main.py index 97c90fd..f6267f3 100644 --- a/main.py +++ b/main.py @@ -10,12 +10,13 @@ from cameras import logitech_c270, generic from cameras.camera import USBCam, Camera from cameras import image_converter +from cameras.video_async import VideoCaptureAsync from processing import bay_tracker from processing import port_tracker from processing import ball_tracker2 from processing import color_calibrate - +from processing import cvfilters import controls from controls import main_controller @@ -53,8 +54,6 @@ def main(): # main method defined - cv2.destroyAllWindows() - # networktables.init(client=False) # dashboard = networktables.get() @@ -64,22 +63,18 @@ def main(): # main method defined # cap = cv2.VideoCapture(config.video_source_number) # cap set to a cv2 object with input from a preset source - mainCam = USBCam() - mainCam.open(config.video_source_number) + wideCam = USBCam() + wideCam.open(config.video_source_number) + wideVideo = VideoCaptureAsync(wideCam) + wideVideo.startReading() if(main_controller.enable_dual_camera): - longCam = USBCam() - longCam.open(config.wide_video_source_number) - - # Set camera properties - # cap.set(cv2.CAP_PROP_FRAME_WIDTH, 640) - # cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 480) - # cap.set(cv2.CAP_PROP_FPS, 120) - # cap.set(cv2.CAP_PROP_AUTO_EXPOSURE, 0.25) - # cap.set(cv2.CAP_PROP_EXPOSURE, 0.02) - # cap.set(cv2.CAP_PROP_CONTRAST, 0.0) + farCam = USBCam() + farCam.open(config.wide_video_source_number) + farVideo = VideoCaptureAsync(farCam) + farVideo.startReading() - cap = mainCam.getCam() + cap = wideCam.getCam() # Set camera properties camera = Camera(cap.get(cv2.CAP_PROP_FRAME_WIDTH), cap.get(cv2.CAP_PROP_FRAME_HEIGHT), @@ -97,17 +92,22 @@ def main(): # main method defined time.sleep(5) - camera_ws = create_connection("ws://localhost:5805/camera/ws") + # camera_ws = create_connection("ws://localhost:5805/camera/ws") + wide_camera_ws = create_connection("ws://localhost:5805/wide_camera/ws") + far_camera_ws = create_connection("ws://localhost:5805/far_camera/ws") processed_ws = create_connection("ws://localhost:5805/processed/ws") calibration_ws = create_connection("ws://localhost:5805/calibration/ws") tracking_ws = create_connection("ws://localhost:5805/tracking/ws") controller_listener.start("ws://localhost:5805/dashboard/ws") + + + logger.info('starting main loop ') frame_cnt = 0 while(True): - tracking_data = None + tracking_data = [] frame_cnt += 1 @@ -116,28 +116,41 @@ def main(): # main method defined if not cap.isOpened(): print('opening camera') if main_controller.enable_dual_camera: - longCam.open(config.video_source_number) - mainCam.open(config.wide_video_source_number) + farCam.open(config.video_source_number) + wideCam.open(config.wide_video_source_number) # if the cap is not already open, do so - - if main_controller.camera_mode == CAMERA_MODE_HEXAGON and main_controller.enable_dual_camera: - _, bgr_frame = longCam.read() - else: - _, bgr_frame = mainCam.read() - resized_frame = cv2.resize(bgr_frame, ((int)(640), (int)(480)), 0, 0, cv2.INTER_CUBIC) - rgb_frame = cv2.cvtColor(resized_frame, cv2.COLOR_BGR2RGB) + + + _, wide_bgr_frame = wideVideo.read() + wide_resized_frame = cvfilters.resize(wide_bgr_frame, 640, 480) + wide_rgb_frame = cv2.cvtColor(wide_resized_frame, cv2.COLOR_BGR2RGB) + + if main_controller.enable_dual_camera: + _, far_bgr_frame = farVideo.read() + far_resized_frame = cvfilters.resize(far_bgr_frame, 640, 480) + far_rgb_frame = cv2.cvtColor(far_resized_frame, cv2.COLOR_BGR2RGB) + + + + if main_controller.enable_camera_feed: - jpg=image_converter.convert_to_jpg(rgb_frame) - camera_ws.send_binary(jpg) + wide_jpg=image_converter.convert_to_jpg(wide_rgb_frame) + far_jpg=image_converter.convert_to_jpg(far_rgb_frame) + wide_camera_ws.send_binary(wide_jpg) + far_camera_ws.send_binary(far_jpg) + + # camera_ws.send_binary(jpg) # take rgb frame and convert it to a displayable jpg form, then send that as binary through websocket if main_controller.enable_calibration_feed: - - calibration_frame = rgb_frame.copy() + if main_controller.camera_mode == CAMERA_MODE_HEXAGON: + calibration_frame = far_rgb_frame.copy() + else: + calibration_frame = wide_rgb_frame.copy calibration_frame = color_calibrate.process(calibration_frame, camera_mode = main_controller.calibration.get('camera_mode', 'RAW'), @@ -150,7 +163,7 @@ def main(): # main method defined if main_controller.camera_mode == CAMERA_MODE_RAW: - processed_frame = rgb_frame + processed_frame = wide_rgb_frame # Camera mode set to "raw" - takes rgb frame elif main_controller.camera_mode == CAMERA_MODE_LOADING_BAY: @@ -158,7 +171,7 @@ def main(): # main method defined color_profile=main_controller.color_profiles[CAMERA_MODE_LOADING_BAY] # Set color profile to that of "camera mode loading bay" - processed_frame, tracking_data = bay_tracker.process(rgb_frame, + processed_frame, tracking_data = bay_tracker.process(wide_rgb_frame, camera, frame_cnt, color_profile) @@ -169,7 +182,7 @@ def main(): # main method defined color_profile=main_controller.color_profiles[CAMERA_MODE_BALL] # color profile set to the CAMERA MODE BALL one # print("ball") - processed_frame, tracking_data = ball_tracker2.process(rgb_frame, + processed_frame, tracking_data = ball_tracker2.process(wide_rgb_frame, camera, frame_cnt, color_profile) @@ -178,7 +191,7 @@ def main(): # main method defined color_profile=main_controller.color_profiles[CAMERA_MODE_HEXAGON] - processed_frame, tracking_data = port_tracker.process(rgb_frame, + processed_frame, tracking_data = port_tracker.process(far_rgb_frame, camera, frame_cnt, color_profile) @@ -207,8 +220,6 @@ def main(): # main method defined logger.info(tracking_data) tracking_data = sorted(tracking_data, key = lambda i: i['dist']) tracking_ws.send(json.dumps(dict(targets=tracking_data))) - # put into networktables - # dashboard.putStringArray(networktables.keys.vision_target_data, tracking_data) # cv2.imshow('frame', processed_frame ) # cv2.waitKey(0) @@ -218,7 +229,9 @@ def main(): # main method defined # IDLE mode if cap.isOpened(): print('closing camera') - cap.release() + wideCam.stop() + if main_controller.enable_dual_camera: + farCam.stop() time.sleep(.3) # if cv2.waitKey(1) & 0xFF == ord('q'): diff --git a/web/handlers/CameraFeedWS.py b/web/handlers/CameraFeedWS.py index cd442a5..966b7f3 100644 --- a/web/handlers/CameraFeedWS.py +++ b/web/handlers/CameraFeedWS.py @@ -11,6 +11,7 @@ class CameraFeedWS(WebSocketHandler): def open(self): self.uid = str(uuid.uuid4()) logger.info("CameraFeed websocket opened %s" % self.uid) + self.write_message('connected') self.write_message(json.dumps({ 'socketid':self.uid })) @@ -38,4 +39,5 @@ def on_message(self, message): def on_close(self): logger.info("CameraFeed websocket closed %s" % self.uid) - CameraFeedWS.watchers.remove(self) + if self in CameraFeedWS.watchers: + CameraFeedWS.watchers.remove(self) diff --git a/web/handlers/FarCameraFeedWS.py b/web/handlers/FarCameraFeedWS.py new file mode 100644 index 0000000..6f2f722 --- /dev/null +++ b/web/handlers/FarCameraFeedWS.py @@ -0,0 +1,44 @@ +import uuid +from tornado.websocket import WebSocketHandler, WebSocketClosedError +import logging +import json +logger = logging.getLogger(__name__) + +class FarCameraFeedWS(WebSocketHandler): + """ + watchers is a class level array, anyone connecting shares the same array + """ + watchers = set() + def open(self): + self.uid = str(uuid.uuid4()) + logger.info("CameraFeed websocket opened %s" % self.uid) + self.write_message('connected') + self.write_message(json.dumps({ + 'socketid':self.uid + })) + + def check_origin(self, origin): + """ + Allow CORS requests + """ + return True + + """ + broadcast to clients, assumes its target data + """ + def on_message(self, message): + # logger.info('pushing image') + if isinstance(message, str): + logger.info(message) + if message == 'open feed': + FarCameraFeedWS.watchers.add(self) + if message == 'close feed': + FarCameraFeedWS.watchers.remove(self) + else: + for waiter in FarCameraFeedWS.watchers: + waiter.write_message(message, binary=True) + + def on_close(self): + logger.info("CameraFeed websocket closed %s" % self.uid) + if self in FarCameraFeedWS.watchers: + FarCameraFeedWS.watchers.remove(self) diff --git a/web/handlers/WideCameraFeedWS.py b/web/handlers/WideCameraFeedWS.py new file mode 100644 index 0000000..f787459 --- /dev/null +++ b/web/handlers/WideCameraFeedWS.py @@ -0,0 +1,43 @@ +import uuid +from tornado.websocket import WebSocketHandler, WebSocketClosedError +import logging +import json +logger = logging.getLogger(__name__) + +class WideCameraFeedWS(WebSocketHandler): + """ + """ + watchers = set() + def open(self): + self.uid = str(uuid.uuid4()) + logger.info("WideCameraFeed websocket opened %s" % self.uid) + self.write_message('connected') + self.write_message(json.dumps({ + 'socketid':self.uid + })) + + def check_origin(self, origin): + """ + Allow CORS requests + """ + return True + + """ + broadcast to clients, assumes its target data + """ + def on_message(self, message): + # logger.info('pushing image') + if isinstance(message, str): + logger.info(message) + if message == 'open feed': + WideCameraFeedWS.watchers.add(self) + if message == 'close feed': + WideCameraFeedWS.watchers.remove(self) + else: + for waiter in WideCameraFeedWS.watchers: + waiter.write_message(message, binary=True) + + def on_close(self): + logger.info("CameraFeed websocket closed %s" % self.uid) + if self in WideCameraFeedWS.watchers: + WideCameraFeedWS.watchers.remove(self) diff --git a/web/handlers/__init__.py b/web/handlers/__init__.py index 65241f8..a004f2e 100644 --- a/web/handlers/__init__.py +++ b/web/handlers/__init__.py @@ -4,3 +4,5 @@ from .ObjectTrackingWS import ObjectTrackingWS from .ProcessedVideoWS import ProcessedVideoWS from .CalibrationFeedWS import CalibrationFeedWS +from .FarCameraFeedWS import FarCameraFeedWS +from .WideCameraFeedWS import WideCameraFeedWS diff --git a/web/tornado_server.py b/web/tornado_server.py index e1062c9..1baefae 100644 --- a/web/tornado_server.py +++ b/web/tornado_server.py @@ -13,6 +13,8 @@ from web.handlers import ControllerWS from web.handlers import ObjectTrackingWS from web.handlers import CameraFeedWS +from web.handlers import FarCameraFeedWS +from web.handlers import WideCameraFeedWS from web.handlers import ProcessedVideoWS from web.handlers import CalibrationFeedWS @@ -40,11 +42,16 @@ def start(): ("/dashboard/ws", ControllerWS), ("/tracking/ws", ObjectTrackingWS), (r"/camera/ws", CameraFeedWS), + (r"/wide_camera/ws", WideCameraFeedWS), + (r"/far_camera/ws", FarCameraFeedWS), (r"/processed/ws", ProcessedVideoWS), (r"/calibration/ws", CalibrationFeedWS ), (r"/calibrate/()", NonCachingStaticFileHandler, {"path": join(www_dir, "calibrate.html")}), (r"/processing/()", NonCachingStaticFileHandler, {"path": join(www_dir, "processed.html")}), (r"/camera/()", NonCachingStaticFileHandler, {"path": join(www_dir, "camera.html")}), + (r"/wide_camera/()", NonCachingStaticFileHandler, {"path": join(www_dir, "camera.html")}), + (r"/far_camera/()", NonCachingStaticFileHandler, {"path": join(www_dir, "camera.html")}), + (r"/()", NonCachingStaticFileHandler, {"path": join(www_dir, "index.html")}), #(r'/lib/(.*)', StaticFileHandler, {"path": lib_dir}), (r"/(.*)", NonCachingStaticFileHandler, {"path": www_dir}) diff --git a/web/www/camera.html b/web/www/camera.html index c93511b..6ce0b60 100644 --- a/web/www/camera.html +++ b/web/www/camera.html @@ -8,7 +8,7 @@ - Jetson Calibrate + Jetson Cameras @@ -36,13 +36,7 @@ - -
- -
+
@@ -61,18 +55,15 @@
-
- -
-
- -
- -
- +
+ +
+
+
+ +
@@ -112,20 +103,40 @@ self.enable_calibration_feed = data.enable_calibration_feed } } - start_camera_stream("/camera/ws", "image"); + start_camera_stream("/processed/ws", "processed_image"); + start_camera_stream("/far_camera/ws", "far_raw_image"); + start_camera_stream("/wide_camera/ws", "wide_raw_image"); + + + }, methods: { toggleCameraFeed: function() { this.controls_ws.send(JSON.stringify({request_type: 'contols', enable_camera_feed: !this.enable_camera_feed})) + this.showImage(this.enable_camera_feed, "wide_raw_image") + this.showImage(this.enable_camera_feed, "far_raw_image") + + }, toggleProcessingFeed: function() { this.controls_ws.send(JSON.stringify({request_type: 'contols', enable_processing_feed: !this.enable_processing_feed})) + this.showImage(this.enable_processing_feed, "processed_image") + }, - toggleCalibrationFeed: function() { - this.controls_ws.send(JSON.stringify({request_type: 'contols', - enable_calibration_feed: !this.enable_calibration_feed})) + + + showImage : function(enabled, image){ + var frame = document.getElementById(image) + if(!enabled){ + console.log("visible"); + frame.style.visibility= 'visible'; + } + else{ + console.log("invisible"); + frame.style.visibility = 'hidden'; + } } } diff --git a/web/www/ws_streamer.js b/web/www/ws_streamer.js index d5e8705..0676faa 100644 --- a/web/www/ws_streamer.js +++ b/web/www/ws_streamer.js @@ -10,11 +10,13 @@ function new_web_socket(uri_path) { return ws; } +var imageMap = {}; +var socketMap = {}; var start_camera_stream = function( websocket_source, target) { - var image = document.getElementById(target) - ws_imagestream = new_web_socket( websocket_source); + imageMap[websocket_source] = document.getElementById(target) + socketMap[websocket_source] = new_web_socket( websocket_source); time_0 = (new Date()).getTime(); counter = 0; @@ -33,21 +35,27 @@ var start_camera_stream = function( websocket_source, target) { } - ws_imagestream.onmessage = function(e) { + socketMap[websocket_source].onmessage = function(e) { + + if(e.data == 'connected'){ + console.log('opening feed ' + websocket_source); + socketMap[websocket_source].send("open feed"); + } + if (e.data instanceof Blob) { - update_fps() - image.src = URL.createObjectURL(e.data); - image.onload = function() { - URL.revokeObjectURL(image.src); + //update_fps() + imageMap[websocket_source].src = URL.createObjectURL(e.data); + imageMap[websocket_source].onload = function() { + URL.revokeObjectURL(imageMap[websocket_source].src); } } } - ws_imagestream.onopen = function() { - console.log('connected ws_imagestream...'); - ws_imagestream.send("open feed") + socketMap[websocket_source].onopen = function() { + console.log('connected ' + websocket_source); + //ws_imagestream.send("open feed") }; - ws_imagestream.onclose = function() { + socketMap[websocket_source].onclose = function() { console.log('closed feed '); }; };