diff --git a/examples/python/RVC4/FeatureTracker/feature_detector_from_host_camera.py b/examples/python/RVC4/FeatureTracker/feature_detector_from_host_camera.py index 4bc93377f..08e5fb081 100644 --- a/examples/python/RVC4/FeatureTracker/feature_detector_from_host_camera.py +++ b/examples/python/RVC4/FeatureTracker/feature_detector_from_host_camera.py @@ -12,10 +12,12 @@ def drawFeatures(frame, features): for feature in features: # descriptor = feature.descriptor # print(descriptor) - if feature.age < 2: - cv2.circle(frame, (int(feature.position.x), int(feature.position.y)), circleRadius, greenColor, -1, cv2.LINE_AA, 0) - else: + if feature.age > 2: cv2.circle(frame, (int(feature.position.x), int(feature.position.y)), circleRadius, pointColor, -1, cv2.LINE_AA, 0) + # elif feature.age < 255: + # cv2.circle(frame, (int(feature.position.x), int(feature.position.y)), circleRadius, (0, 0, feature.age), -1, cv2.LINE_AA, 0) + else: + cv2.circle(frame, (int(feature.position.x), int(feature.position.y)), circleRadius, greenColor, -1, cv2.LINE_AA, 0) class HostCamera(dai.node.ThreadedHostNode): def __init__(self): @@ -23,13 +25,16 @@ def __init__(self): self.output = dai.Node.Output(self) def run(self): # Create a VideoCapture object - cap = cv2.VideoCapture(0) + cap = cv2.VideoCapture("/home/nebojsa/Documents/depthai-device-kb/external/depthai-core/examples/python/models/construction_vest.mp4") + # cap = cv2.VideoCapture(0) if not cap.isOpened(): pipeline.stop() raise RuntimeError("Error: Couldn't open host camera") while self.isRunning(): # frame = depthImg ret, frame = cap.read() + # frame = cv2.imread("/home/nebojsa/Downloads/image.jpg") + frame = cv2.resize(frame, (640, 640), interpolation = cv2.INTER_LINEAR) frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) imgFrame = dai.ImgFrame() imgFrame.setData(frame) @@ -52,12 +57,15 @@ def run(self): initialRobustness = 100 def on_trackbar(val): - cfg = dai.FeatureTrackerConfigRvc4() + cfg = dai.FeatureTrackerConfig() + cornerDetector = dai.FeatureTrackerConfig.CornerDetector() + cornerDetector.numMaxFeatures = cv2.getTrackbarPos('numMaxFeatures', 'Features') - cfg.setHarrisCornerDetectorThreshold(cv2.getTrackbarPos('harris_score','Features')) - cfg.setHarrisCornerDetectorRobustness(cv2.getTrackbarPos('robustness','Features')) - cfg.setNumMaxFeatures(cv2.getTrackbarPos('numMaxFeatures', 'Features')) + thresholds = dai.FeatureTrackerConfig.CornerDetector.Thresholds() + thresholds.initialValue = cv2.getTrackbarPos('harris_score','Features') + cornerDetector.thresholds = thresholds + cfg.setCornerDetector(cornerDetector) cfg.setMotionEstimator(motionEstimator) inputConfigQueue.send(cfg) @@ -67,7 +75,7 @@ def on_trackbar(val): # create trackbars threshold and robustness change cv2.createTrackbar('harris_score','Features',2000,25000, on_trackbar) -cv2.createTrackbar('robustness','Features',100,127, on_trackbar) +# cv2.createTrackbar('robustness','Features',100,127, on_trackbar) cv2.createTrackbar('numMaxFeatures','Features',256,1024, on_trackbar) @@ -77,12 +85,24 @@ def on_trackbar(val): hostCamera = pipeline.create(HostCamera) camQueue = hostCamera.output.createOutputQueue() - featureTrackerLeft = pipeline.create(dai.node.FeatureTrackerRvc4) + featureTrackerLeft = pipeline.create(dai.node.FeatureTracker) - featureTrackerLeft.initialConfig.setHarrisCornerDetectorThreshold(initialThreshold) - featureTrackerLeft.initialConfig.setHarrisCornerDetectorRobustness(initialRobustness) - featureTrackerLeft.initialConfig.setCornerDetector(dai.FeatureTrackerConfigRvc4.CornerDetector.Type.HARRIS) - featureTrackerLeft.initialConfig.setNumMaxFeatures(256) + # featureTrackerLeft.initialConfig.setHarrisCornerDetectorThreshold(initialThreshold) + # featureTrackerLeft.initialConfig.setHarrisCornerDetectorRobustness(initialRobustness) + featureTrackerLeft.initialConfig.setCornerDetector(dai.FeatureTrackerConfig.CornerDetector.Type.HARRIS) + featureTrackerLeft.initialConfig.setMotionEstimator(False) + + motionEstimator = dai.FeatureTrackerConfig.MotionEstimator() + motionEstimator.enable = True + featureTrackerLeft.initialConfig.setMotionEstimator(motionEstimator) + + cornerDetector = dai.FeatureTrackerConfig.CornerDetector() + cornerDetector.numMaxFeatures = 256 + + + + + # featureTrackerLeft.initialConfig.setNumMaxFeatures(256) outputFeaturePassthroughQueue = featureTrackerLeft.passthroughInputImage.createOutputQueue() @@ -91,10 +111,12 @@ def on_trackbar(val): hostCamera.output.link(featureTrackerLeft.inputImage) inputConfigQueue = featureTrackerLeft.inputConfig.createInputQueue() - motionEstimator = dai.FeatureTrackerConfigRvc4.MotionEstimator() - motionEstimator.enable = False - thresholds = dai.FeatureTrackerConfigRvc4.CornerDetector.Thresholds() + thresholds = dai.FeatureTrackerConfig.CornerDetector.Thresholds() + thresholds.initialValue = cv2.getTrackbarPos('harris_score','Features') + + cornerDetector.thresholds = thresholds + featureTrackerLeft.initialConfig.setCornerDetector(cornerDetector) pipeline.start() while pipeline.isRunning(): @@ -118,10 +140,17 @@ def on_trackbar(val): if key == ord('q'): break elif key == ord('m'): - cfg = dai.FeatureTrackerConfigRvc4() - cfg.setHarrisCornerDetectorThreshold(cv2.getTrackbarPos('harris_score','Features')) - cfg.setHarrisCornerDetectorRobustness(cv2.getTrackbarPos('robustness','Features')) - cfg.setNumMaxFeatures(cv2.getTrackbarPos('numMaxFeatures', 'Features')) + cfg = dai.FeatureTrackerConfig() + cornerDetector = dai.FeatureTrackerConfig.CornerDetector() + cornerDetector.numMaxFeatures = cv2.getTrackbarPos('numMaxFeatures', 'Features') + + thresholds = dai.FeatureTrackerConfig.CornerDetector.Thresholds() + thresholds.initialValue = cv2.getTrackbarPos('harris_score','Features') + cornerDetector.thresholds = thresholds + + cfg.setCornerDetector(cornerDetector) + cfg.setMotionEstimator(motionEstimator) + if motionEstimator.enable == False: motionEstimator.enable = True cfg.setMotionEstimator(motionEstimator) diff --git a/examples/python/RVC4/FeatureTracker/feature_tracker_from_host_camera.py b/examples/python/RVC4/FeatureTracker/feature_tracker_from_host_camera.py index 040d9dbb8..5b88ab339 100644 --- a/examples/python/RVC4/FeatureTracker/feature_tracker_from_host_camera.py +++ b/examples/python/RVC4/FeatureTracker/feature_tracker_from_host_camera.py @@ -11,13 +11,16 @@ def __init__(self): self.output = dai.Node.Output(self) def run(self): # Create a VideoCapture object - cap = cv2.VideoCapture(0) + cap = cv2.VideoCapture("/home/nebojsa/Documents/depthai-device-kb/external/depthai-core/examples/python/models/construction_vest.mp4") + # cap = cv2.VideoCapture(0) if not cap.isOpened(): pipeline.stop() raise RuntimeError("Error: Couldn't open host camera") while self.isRunning(): # frame = depthImg ret, frame = cap.read() + # frame = cv2.imread("/home/nebojsa/Downloads/image.jpg") + frame = cv2.resize(frame, (640, 640), interpolation = cv2.INTER_LINEAR) frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) imgFrame = dai.ImgFrame() imgFrame.setData(frame) @@ -109,12 +112,15 @@ def __init__(self, trackbarName, windowName): initialRobustness = 100 def on_trackbar(val): - cfg = dai.FeatureTrackerConfigRvc4() + cfg = dai.FeatureTrackerConfig() + cornerDetector = dai.FeatureTrackerConfig.CornerDetector() + cornerDetector.numMaxFeatures = cv2.getTrackbarPos('numMaxFeatures', 'Features') - cfg.setHarrisCornerDetectorThreshold(cv2.getTrackbarPos('harris_score','Features')) - cfg.setHarrisCornerDetectorRobustness(cv2.getTrackbarPos('robustness','Features')) - cfg.setNumMaxFeatures(cv2.getTrackbarPos('numMaxFeatures', 'Features')) + thresholds = dai.FeatureTrackerConfig.CornerDetector.Thresholds() + thresholds.initialValue = cv2.getTrackbarPos('harris_score','Features') + cornerDetector.thresholds = thresholds + cfg.setCornerDetector(cornerDetector) cfg.setMotionEstimator(motionEstimator) inputConfigQueue.send(cfg) @@ -122,8 +128,7 @@ def on_trackbar(val): cv2.namedWindow('Features') # create trackbars threshold and robustness change -cv2.createTrackbar('harris_score','Features',2000,25000, on_trackbar) -cv2.createTrackbar('robustness','Features',100,127, on_trackbar) +cv2.createTrackbar('harris_score','Features',20000,25000, on_trackbar) cv2.createTrackbar('numMaxFeatures','Features',256,1024, on_trackbar) # Connect to device and start pipeline @@ -132,19 +137,22 @@ def on_trackbar(val): hostCamera = pipeline.create(HostCamera) camQueue = hostCamera.output.createOutputQueue() - featureTrackerLeft = pipeline.create(dai.node.FeatureTrackerRvc4) + featureTrackerLeft = pipeline.create(dai.node.FeatureTracker) - featureTrackerLeft.initialConfig.setHarrisCornerDetectorThreshold(initialThreshold) - featureTrackerLeft.initialConfig.setHarrisCornerDetectorRobustness(initialRobustness) - featureTrackerLeft.initialConfig.setCornerDetector(dai.FeatureTrackerConfigRvc4.CornerDetector.Type.HARRIS) - featureTrackerLeft.initialConfig.setNumMaxFeatures(256) - - # Disable optical flow - featureTrackerLeft.initialConfig.setMotionEstimator(True) - - # variable to keep track of state - motionEstimator = dai.FeatureTrackerConfigRvc4.MotionEstimator() - motionEstimator.enable = False + + # featureTrackerLeft.initialConfig.setHarrisCornerDetectorThreshold(initialThreshold) + # featureTrackerLeft.initialConfig.setHarrisCornerDetectorRobustness(initialRobustness) + featureTrackerLeft.initialConfig.setCornerDetector(dai.FeatureTrackerConfig.CornerDetector.Type.HARRIS) + featureTrackerLeft.initialConfig.setMotionEstimator(False) + + motionEstimator = dai.FeatureTrackerConfig.MotionEstimator() + motionEstimator.enable = True + featureTrackerLeft.initialConfig.setMotionEstimator(motionEstimator) + + cornerDetector = dai.FeatureTrackerConfig.CornerDetector() + cornerDetector.numMaxFeatures = 256 + + # featureTrackerLeft.initialConfig.setNumMaxFeatures(256) outputFeaturePassthroughQueue = featureTrackerLeft.passthroughInputImage.createOutputQueue() outputFeatureQueue = featureTrackerLeft.outputFeatures.createOutputQueue() @@ -154,6 +162,12 @@ def on_trackbar(val): # config = featureTrackerLeft.initialConfig inputConfigQueue = featureTrackerLeft.inputConfig.createInputQueue() + thresholds = dai.FeatureTrackerConfig.CornerDetector.Thresholds() + thresholds.initialValue = cv2.getTrackbarPos('harris_score','Features') + + cornerDetector.thresholds = thresholds + featureTrackerLeft.initialConfig.setCornerDetector(cornerDetector) + leftWindowName = "Features" leftFeatureDrawer = FeatureTrackerDrawer("Feature tracking duration (frames)", leftWindowName) @@ -181,10 +195,17 @@ def on_trackbar(val): if key == ord('q'): break elif key == ord('m'): - cfg = dai.FeatureTrackerConfigRvc4() - cfg.setHarrisCornerDetectorThreshold(cv2.getTrackbarPos('harris_score','Features')) - cfg.setHarrisCornerDetectorRobustness(cv2.getTrackbarPos('robustness','Features')) - cfg.setNumMaxFeatures(cv2.getTrackbarPos('numMaxFeatures', 'Features')) + cfg = dai.FeatureTrackerConfig() + cornerDetector = dai.FeatureTrackerConfig.CornerDetector() + cornerDetector.numMaxFeatures = cv2.getTrackbarPos('numMaxFeatures', 'Features') + + thresholds = dai.FeatureTrackerConfig.CornerDetector.Thresholds() + thresholds.initialValue = cv2.getTrackbarPos('harris_score','Features') + cornerDetector.thresholds = thresholds + + cfg.setCornerDetector(cornerDetector) + cfg.setMotionEstimator(motionEstimator) + if motionEstimator.enable == False: motionEstimator.enable = True cfg.setMotionEstimator(motionEstimator) diff --git a/examples/python/RVC4/FeatureTracker/feature_tracker_motion_from_host_camera.py b/examples/python/RVC4/FeatureTracker/feature_tracker_motion_from_host_camera.py index 883a34ecf..179ba8581 100644 --- a/examples/python/RVC4/FeatureTracker/feature_tracker_motion_from_host_camera.py +++ b/examples/python/RVC4/FeatureTracker/feature_tracker_motion_from_host_camera.py @@ -11,13 +11,16 @@ def __init__(self): self.output = dai.Node.Output(self) def run(self): # Create a VideoCapture object - cap = cv2.VideoCapture(0) + cap = cv2.VideoCapture("/home/nebojsa/Documents/depthai-device-kb/external/depthai-core/examples/python/models/construction_vest.mp4") + # cap = cv2.VideoCapture(0) if not cap.isOpened(): pipeline.stop() raise RuntimeError("Error: Couldn't open host camera") while self.isRunning(): # frame = depthImg ret, frame = cap.read() + # frame = cv2.imread("/home/nebojsa/Downloads/image.jpg") + frame = cv2.resize(frame, (640, 640), interpolation = cv2.INTER_LINEAR) frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) imgFrame = dai.ImgFrame() imgFrame.setData(frame) @@ -194,12 +197,16 @@ def __init__(self, windowName): initialRobustness = 100 def on_trackbar(val): - cfg = dai.FeatureTrackerConfigRvc4() + cfg = dai.FeatureTrackerConfig() + cornerDetector = dai.FeatureTrackerConfig.CornerDetector() + cornerDetector.numMaxFeatures = cv2.getTrackbarPos('numMaxFeatures', 'Features') - cfg.setHarrisCornerDetectorThreshold(cv2.getTrackbarPos('harris_score','Features')) - cfg.setHarrisCornerDetectorRobustness(cv2.getTrackbarPos('robustness','Features')) - cfg.setNumMaxFeatures(cv2.getTrackbarPos('numMaxFeatures', 'Features')) - cfg.setMotionEstimator(True) + thresholds = dai.FeatureTrackerConfig.CornerDetector.Thresholds() + thresholds.initialValue = cv2.getTrackbarPos('harris_score','Features') + cornerDetector.thresholds = thresholds + + cfg.setCornerDetector(cornerDetector) + cfg.setMotionEstimator(motionEstimator) inputConfigQueue.send(cfg) @@ -207,9 +214,8 @@ def on_trackbar(val): cv2.namedWindow('HostCamera') cv2.namedWindow('Features') -# create trackbars threshold and robustness change -cv2.createTrackbar('harris_score','Features',2000,25000, on_trackbar) -cv2.createTrackbar('robustness','Features',100,127, on_trackbar) +# create trackbars threshold and numMaxFeatures change +cv2.createTrackbar('harris_score','Features',20000,25000, on_trackbar) cv2.createTrackbar('numMaxFeatures','Features',256,1024, on_trackbar) # Connect to device and start pipeline @@ -218,12 +224,21 @@ def on_trackbar(val): hostCamera = pipeline.create(HostCamera) camQueue = hostCamera.output.createOutputQueue() - featureTrackerLeft = pipeline.create(dai.node.FeatureTrackerRvc4) + featureTrackerLeft = pipeline.create(dai.node.FeatureTracker) + + # featureTrackerLeft.initialConfig.setHarrisCornerDetectorThreshold(initialThreshold) + # featureTrackerLeft.initialConfig.setHarrisCornerDetectorRobustness(initialRobustness) + featureTrackerLeft.initialConfig.setCornerDetector(dai.FeatureTrackerConfig.CornerDetector.Type.HARRIS) + featureTrackerLeft.initialConfig.setMotionEstimator(False) + + motionEstimator = dai.FeatureTrackerConfig.MotionEstimator() + motionEstimator.enable = True + featureTrackerLeft.initialConfig.setMotionEstimator(motionEstimator) - featureTrackerLeft.initialConfig.setHarrisCornerDetectorThreshold(initialThreshold) - featureTrackerLeft.initialConfig.setHarrisCornerDetectorRobustness(initialRobustness) - featureTrackerLeft.initialConfig.setCornerDetector(dai.FeatureTrackerConfigRvc4.CornerDetector.Type.HARRIS) - featureTrackerLeft.initialConfig.setNumMaxFeatures(256) + cornerDetector = dai.FeatureTrackerConfig.CornerDetector() + cornerDetector.numMaxFeatures = 256 + + # featureTrackerLeft.initialConfig.setNumMaxFeatures(256) # Enable optical flow featureTrackerLeft.initialConfig.setMotionEstimator(True) @@ -234,6 +249,13 @@ def on_trackbar(val): hostCamera.output.link(featureTrackerLeft.inputImage) inputConfigQueue = featureTrackerLeft.inputConfig.createInputQueue() + + thresholds = dai.FeatureTrackerConfig.CornerDetector.Thresholds() + thresholds.initialValue = cv2.getTrackbarPos('harris_score','Features') + + cornerDetector.thresholds = thresholds + featureTrackerLeft.initialConfig.setCornerDetector(cornerDetector) + cfg = dai.FeatureTrackerConfig() left_window_name = "Features" @@ -262,5 +284,28 @@ def on_trackbar(val): print("Motions:", motions_left) cv2.imshow(left_window_name, leftFrame) - if cv2.waitKey(1) == ord('q'): - break \ No newline at end of file + key = cv2.waitKey(1) + if key == ord('q'): + break + elif key == ord('m'): + cfg = dai.FeatureTrackerConfig() + cornerDetector = dai.FeatureTrackerConfig.CornerDetector() + cornerDetector.numMaxFeatures = cv2.getTrackbarPos('numMaxFeatures', 'Features') + + thresholds = dai.FeatureTrackerConfig.CornerDetector.Thresholds() + thresholds.initialValue = cv2.getTrackbarPos('harris_score','Features') + cornerDetector.thresholds = thresholds + + cfg.setCornerDetector(cornerDetector) + cfg.setMotionEstimator(motionEstimator) + + if motionEstimator.enable == False: + motionEstimator.enable = True + cfg.setMotionEstimator(motionEstimator) + print("Enabling motionEstimator") + else: + motionEstimator.enable = False + cfg.setMotionEstimator(motionEstimator) + print("Disabling motionEstimator") + + inputConfigQueue.send(cfg)