From 7795cbd51bcbd9b816d80e452de64bf5b3273299 Mon Sep 17 00:00:00 2001 From: promise Date: Thu, 18 Jul 2024 18:29:59 +0100 Subject: [PATCH 1/3] i processed both camera frame --- .DS_Store | Bin 6148 -> 6148 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/.DS_Store b/.DS_Store index ddf36589b1407ca9e411e345b4a1c542adc1232d..9acbed4b94f8692b73cd68e45e9c95effdb87fc5 100644 GIT binary patch delta 71 zcmZoMXfc=|#>B)qF;Q%yo+2aX#(>?7jGUW!SVS2o|79)MEXEB!kF;Q%yo+6|0#(>?7ix)66F$!|77T{2vf}|L~+JX#Z2TYb>F0E%_C Date: Fri, 19 Jul 2024 00:01:49 +0100 Subject: [PATCH 2/3] i processed both camera frame --- camera.py | 68 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) diff --git a/camera.py b/camera.py index 976c947..b6b8163 100644 --- a/camera.py +++ b/camera.py @@ -58,3 +58,71 @@ # Read the first frame from iPhone camera (if available) ret_iphone, frame1_iphone = iphone_camera.read() ret_iphone, frame2_iphone = iphone_camera.read() + +while True: + # Process MacBook camera frames + if ret_macbook: + # Compute the absolute difference between the two frames + diff_macbook = cv2.absdiff(frame1_macbook, frame2_macbook) + gray_macbook = cv2.cvtColor(diff_macbook, cv2.COLOR_BGR2GRAY) + blur_macbook = cv2.GaussianBlur(gray_macbook, (5, 5), 0) + _, thresh_macbook = cv2.threshold(blur_macbook, 20, 255, cv2.THRESH_BINARY) + dilated_macbook = cv2.dilate(thresh_macbook, None, iterations=3) + contours_macbook, _ = cv2.findContours(dilated_macbook, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) + + motion_detected_macbook = False + for contour in contours_macbook: + if cv2.contourArea(contour) < 500: + continue + (x, y, w, h) = cv2.boundingRect(contour) + cv2.rectangle(frame1_macbook, (x, y), (x + w, y + h), (0, 255, 0), 2) + motion_detected_macbook = True + + # If motion is detected, publish to MQTT and print message + if motion_detected_macbook: + client.publish(MQTT_TOPIC, "Motion Detected (MacBook)") + print("Motion Detected (MacBook)") + + # Write the frame to the output file for MacBook camera + out_macbook.write(frame1_macbook) + + # Show the frame with contours for MacBook camera + cv2.imshow("Feed (MacBook)", frame1_macbook) + + # Update the frames for MacBook camera + frame1_macbook = frame2_macbook + ret_macbook, frame2_macbook = macbook_camera.read() + + # Process iPhone camera frames (if available) + if ret_iphone: + # Compute the absolute difference between the two frames (example) + diff_iphone = cv2.absdiff(frame1_iphone, frame2_iphone) + gray_iphone = cv2.cvtColor(diff_iphone, cv2.COLOR_BGR2GRAY) + blur_iphone = cv2.GaussianBlur(gray_iphone, (5, 5), 0) + _, thresh_iphone = cv2.threshold(blur_iphone, 20, 255, cv2.THRESH_BINARY) + dilated_iphone = cv2.dilate(thresh_iphone, None, iterations=3) + contours_iphone, _ = cv2.findContours(dilated_iphone, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) + + motion_detected_iphone = False + for contour in contours_iphone: + if cv2.contourArea(contour) < 500: + continue + (x, y, w, h) = cv2.boundingRect(contour) + cv2.rectangle(frame1_iphone, (x, y), (x + w, y + h), (0, 255, 0), 2) + motion_detected_iphone = True + + # If motion is detected, publish to MQTT and print message + if motion_detected_iphone: + client.publish(MQTT_TOPIC, "Motion Detected (iPhone)") + print("Motion Detected (iPhone)") + + # Write the frame to the output file for iPhone camera (if available) + if iphone_camera.isOpened(): + out_iphone.write(frame1_iphone) + + # Show the frame with contours for iPhone camera (if available) + cv2.imshow("Feed (iPhone)", frame1_iphone) + + # Update the frames for iPhone camera (if available) + frame1_iphone = frame2_iphone + ret_iphone, frame2_iphone = iphone_camera.read() From 8fe6ad4d21df43ad2527eb5dab7a767dc4713d6b Mon Sep 17 00:00:00 2001 From: Daniel Date: Fri, 19 Jul 2024 16:03:26 +0100 Subject: [PATCH 3/3] # Exit on 'q' key press --- camera.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/camera.py b/camera.py index b6b8163..258aa5d 100644 --- a/camera.py +++ b/camera.py @@ -126,3 +126,11 @@ # Update the frames for iPhone camera (if available) frame1_iphone = frame2_iphone ret_iphone, frame2_iphone = iphone_camera.read() +# Exit on 'q' key press + if cv2.waitKey(10) & 0xFF == ord('q'): + break + +# Clean up +macbook_camera.release() +out_macbook.release() +cv2.destroyAllWindows() \ No newline at end of file