Advertisement
NaroxEG

Python - HC05 Communication + Mediapipe

Jul 6th, 2024 (edited)
817
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.63 KB | None | 0 0
  1. import cv2
  2. import mediapipe as mp
  3. import serial
  4.  
  5. # Initialize MediaPipe Hands.
  6. mp_hands = mp.solutions.hands
  7. mp_drawing = mp.solutions.drawing_utils
  8. hands = mp_hands.Hands(min_detection_confidence=0.5, min_tracking_confidence=0.5)
  9.  
  10. bluetooth_port = 'COM5'
  11. baud_rate = 9600
  12.  
  13. ser = serial.Serial(bluetooth_port, baud_rate)
  14.  
  15. def recognize_gesture(landmarks):
  16.     # Extract the required landmarks for the fingers
  17.     thumb_tip = landmarks[mp_hands.HandLandmark.THUMB_TIP]
  18.     index_tip = landmarks[mp_hands.HandLandmark.INDEX_FINGER_TIP]
  19.     middle_tip = landmarks[mp_hands.HandLandmark.MIDDLE_FINGER_TIP]
  20.     ring_tip = landmarks[mp_hands.HandLandmark.RING_FINGER_TIP]
  21.     pinky_tip = landmarks[mp_hands.HandLandmark.PINKY_TIP]
  22.  
  23.     thumb_ip = landmarks[mp_hands.HandLandmark.THUMB_IP]
  24.     index_dip = landmarks[mp_hands.HandLandmark.INDEX_FINGER_DIP]
  25.     middle_pip = landmarks[mp_hands.HandLandmark.MIDDLE_FINGER_PIP]
  26.     ring_pip = landmarks[mp_hands.HandLandmark.RING_FINGER_PIP]
  27.     pinky_pip = landmarks[mp_hands.HandLandmark.PINKY_PIP]
  28.  
  29.     wrist = landmarks[mp_hands.HandLandmark.WRIST]
  30.  
  31.     # Calculate if fingers are raised
  32.     is_thumb_up = thumb_tip.y < thumb_ip.y and abs(thumb_tip.x - wrist.x) > abs(thumb_tip.y - wrist.y)
  33.     is_index_up = index_tip.y < index_dip.y
  34.     is_middle_up = middle_tip.y < middle_pip.y
  35.     is_ring_up = ring_tip.y < ring_pip.y
  36.     is_pinky_up = pinky_tip.y < pinky_pip.y
  37.  
  38.     return f"${int(is_thumb_up)}{int(is_index_up)}{int(is_middle_up)}{int(is_ring_up)}{int(is_pinky_up)}"
  39.  
  40. # For webcam input:
  41. cap = cv2.VideoCapture(0)
  42.  
  43. while cap.isOpened():
  44.     success, image = cap.read()
  45.     if not success:
  46.         break
  47.  
  48.     # Flip the image horizontally for a later selfie-view display
  49.     # Convert the BGR image to RGB.
  50.     image = cv2.cvtColor(cv2.flip(image, 1), cv2.COLOR_BGR2RGB)
  51.     image.flags.writeable = False
  52.     results = hands.process(image)
  53.  
  54.     # Draw the hand annotations on the image.
  55.     image.flags.writeable = True
  56.     image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
  57.     if results.multi_hand_landmarks:
  58.         for hand_landmarks in results.multi_hand_landmarks:
  59.             mp_drawing.draw_landmarks(image, hand_landmarks, mp_hands.HAND_CONNECTIONS)
  60.             landmarks = hand_landmarks.landmark
  61.             gesture = recognize_gesture(landmarks)
  62.             ser.write(gesture.encode())
  63.             cv2.putText(image, gesture, (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 0, 0), 2, cv2.LINE_AA)
  64.    
  65.     # Display the image.
  66.     cv2.imshow('MediaPipe Hands', image)
  67.     if cv2.waitKey(5) & 0xFF == 27:
  68.         break
  69.  
  70. hands.close()
  71. cap.release()
  72. cv2.destroyAllWindows()
  73.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement