Advertisement
rhandycan1

Python/Hand detection_motor_pump

Jan 18th, 2025
56
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 7.30 KB | None | 0 0
  1. # HAND DETECTION_MOTOR_PUMP/Python
  2. # By: Randy Canada
  3. import cv2
  4. import mediapipe as mp
  5. import serial
  6. import time
  7.  
  8. print(cv2.__version__)
  9.  
  10. arduino = serial.Serial('COM3', 115200)
  11.  
  12. class poseTracker:
  13.     def __init__(self, width=1280, height=720):
  14.         self.width = width
  15.         self.height = height
  16.         self.cam = cv2.VideoCapture(0, cv2.CAP_DSHOW)
  17.         self.cam.set(cv2.CAP_PROP_FRAME_WIDTH, width)
  18.         self.cam.set(cv2.CAP_PROP_FRAME_HEIGHT, height)
  19.         self.cam.set(cv2.CAP_PROP_FPS, 30)
  20.         self.cam.set(cv2.CAP_PROP_FOURCC, cv2.VideoWriter_fourcc(*'MJPG'))
  21.         self.hands = mp.solutions.hands.Hands(static_image_mode=False, max_num_hands=2, min_detection_confidence=0.3, min_tracking_confidence=0.3)
  22.         self.faceMesh = mp.solutions.face_mesh.FaceMesh(static_image_mode=False, max_num_faces=3, min_detection_confidence=0.5, min_tracking_confidence=0.5)
  23.         self.mpDraw = mp.solutions.drawing_utils
  24.         self.last_gesture = None  # Initialize last_gesture
  25.         self.last_gesture_time = time.time()  # Initialize last_gesture_time
  26.  
  27.     def mesh(self, frame):
  28.         frameRGB = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
  29.         results = self.faceMesh.process(frameRGB)
  30.         meshBound = []
  31.         if results.multi_face_landmarks:
  32.             for faceLandmarks in results.multi_face_landmarks:
  33.                 indexCount = 0
  34.                 for lm in faceLandmarks.landmark:
  35.                     meshBound.append((int(lm.x * self.width), int(lm.y * self.height)))
  36.         return meshBound
  37.  
  38.     def handDetection(self, frame):
  39.         frameRGB = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
  40.         hands_results = self.hands.process(frameRGB)
  41.         self.drawSpecCircle = self.mpDraw.DrawingSpec(thickness=1, circle_radius=1, color=(255, 0, 0))
  42.         self.drawSpecLine = self.mpDraw.DrawingSpec(thickness=2, circle_radius=2, color=(0, 255, 0))
  43.         myHands = []
  44.         handsType = []
  45.  
  46.         if hands_results.multi_hand_landmarks:
  47.             for hand_landmarks, handedness in zip(hands_results.multi_hand_landmarks, hands_results.multi_handedness):
  48.                 myHand = []
  49.                 count = 0
  50.                 for Landmark in hand_landmarks.landmark:
  51.                     #cv2.putText(frame, str(count), (int(Landmark.x * self.width), int(Landmark.y * self.height)), cv2.FONT_HERSHEY_SCRIPT_COMPLEX, fontScale=0.8, color=(0, 0, 0), thickness=1)
  52.                     count += 1
  53.                     myHand.append((int(Landmark.x * self.width), int(Landmark.y * self.height)))
  54.                 myHands.append(myHand)
  55.                 handsType.append(handedness.classification[0].label)
  56.                 # Customize the color of hand connections here
  57.                 self.mpDraw.draw_landmarks(frame, hand_landmarks, mp.solutions.hands.HAND_CONNECTIONS,self.drawSpecCircle,self.drawSpecLine)
  58.         return myHands, handsType
  59.  
  60.     def run(self):
  61.         while True:
  62.             ret, frame = self.cam.read()
  63.             if not ret:
  64.                 break
  65.            
  66.             # meshLocation = self.mesh(frame)
  67.             myHands1, handsType = self.handDetection(frame)
  68.  
  69.             right_hand_points = []
  70.             left_hand_points = []
  71.             gesture_detected = False
  72.  
  73.             if myHands1:
  74.                 for hand, handType in zip(myHands1, handsType):
  75.                     if handType == 'Right':
  76.                         right_hand_points.extend(hand)
  77.                     elif handType == 'Left':
  78.                         left_hand_points.extend(hand)
  79.                 print(handsType)
  80.  
  81.                 rightThumbDown = (right_hand_points and right_hand_points[0][1] < right_hand_points[1][1] < right_hand_points[2][1] < right_hand_points[3][1] < right_hand_points[4][1]
  82.                                 and right_hand_points[4][1] > right_hand_points[6][1])
  83.                 leftThumbDown = (left_hand_points and left_hand_points[0][1] < left_hand_points[1][1] < left_hand_points[2][1] < left_hand_points[3][1] < left_hand_points[4][1]
  84.                                 and left_hand_points[4][1] > left_hand_points[6][1])
  85.                 rightThumbUp =  (right_hand_points and right_hand_points[4][1] < right_hand_points[3][1] < right_hand_points[2][1] < right_hand_points[1][1] and
  86.                                 right_hand_points[6][1] > right_hand_points[4][1] and right_hand_points[12][1] > right_hand_points[4][1]
  87.                                 and right_hand_points[16][1] > right_hand_points[4][1] and right_hand_points[17][1] > right_hand_points[4][1])
  88.                 leftThumbUp  = (left_hand_points and left_hand_points[4][1] < left_hand_points[3][1] < left_hand_points[2][1] < left_hand_points[1][1] and
  89.                                 left_hand_points[6][1] > left_hand_points[4][1] and left_hand_points[12][1] > left_hand_points[4][1] and
  90.                                 left_hand_points[16][1] > left_hand_points[4][1] and left_hand_points[17][1] > left_hand_points[4][1])
  91.                 highFive = ((right_hand_points and right_hand_points[20][1] < right_hand_points[18][1] and right_hand_points[8][1] < right_hand_points[0][1]) or
  92.                             (left_hand_points and left_hand_points[20][1] < left_hand_points[18][1]) and left_hand_points[8][1] < left_hand_points[0][1])
  93.  
  94.                 if (rightThumbUp and not leftThumbDown) or (leftThumbUp and not rightThumbDown):
  95.                     gesture = 'Thumb'
  96.                     cv2.putText(frame, 'Start Pouring!', (500, 100), cv2.FONT_ITALIC, fontScale=2, color=(255, 0, 0), thickness=3)
  97.                     gesture_detected = True
  98.                 elif highFive:
  99.                     gesture = 'HighFive'
  100.                     cv2.putText(frame, 'Stop Pouring!', (500, 100), cv2.FONT_ITALIC, fontScale=2, color=(0, 0, 255), thickness=3)
  101.                     gesture_detected = True
  102.                 else:
  103.                     gesture = 'Unknown'
  104.                     cv2.putText(frame, 'Unknown Gesture', (500, 100), cv2.FONT_ITALIC, fontScale=2, color=(0, 255, 0), thickness=3)
  105.                     gesture_detected =False
  106.  
  107.                 current_time = time.time()
  108.                 if gesture != self.last_gesture and (current_time - self.last_gesture_time) > 1:  # Debounce time of 1 second
  109.                     self.last_gesture = gesture
  110.                     self.last_gesture_time = current_time
  111.                     if gesture == 'Thumb':
  112.                         gesture = gesture + '\r'
  113.                         print(f"Sending gesture: {gesture.strip()}")
  114.                         arduino.write(gesture.encode())
  115.                     if gesture == 'HighFive':
  116.                         gesture = gesture + '\r'
  117.                         print(f"Sending gesture: {gesture.strip()}")
  118.                         arduino.write(b'HighFive\r')
  119.                     else:
  120.                         if self.last_gesture != "Unknown":
  121.                             self.last_gesture = "Unknown"
  122.                             print("Sending gesture: Unknown")
  123.                             #self.last_gesture = None
  124.                            
  125.             cv2.imshow('my WEBcam', frame)
  126.             cv2.moveWindow('my WEBcam', 0, 0)
  127.             if cv2.waitKey(1) & 0xff == ord('q'):
  128.                 break
  129.  
  130.         self.cam.release()
  131.         cv2.destroy
  132.  
  133. pose_Tracker = poseTracker()
  134. pose_Tracker.run()
  135. arduino.close()
  136.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement