Advertisement
j0h

wiggle.py

j0h
May 8th, 2024
586
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 2.54 KB | None | 0 0
  1. #!/usr/bin/python3
  2. '''
  3. wiggle.py you wave your face around the camera, to control your mouse.
  4. this program is intened to be used with "dasher"
  5. https://github.com/dasher-project/
  6. though its interoperation is not integrated yet
  7.  
  8. Dasher is an on screen typing tool where you use a mouse to navigate
  9. a sea of letters. where you point the mose determines what you type.
  10. as sentences form predictive text appears more frequently.
  11. '''
  12. import cv2
  13. import dlib
  14. import numpy as np
  15. import os
  16. import pyautogui
  17. pyautogui.FAILSAFE=0
  18. # Initialize the dlib facial landmark predictor
  19. predictor_path = "shape_predictor_68_face_landmarks.dat"
  20. predictor = dlib.shape_predictor(predictor_path)
  21.  
  22. # Initialize the dlib face detector
  23. detector = dlib.get_frontal_face_detector()
  24.  
  25. # Start the webcam
  26. cap = cv2.VideoCapture(0)
  27.  
  28. while True:
  29.     ret, frame = cap.read()
  30.     if not ret:
  31.         break
  32.  
  33.     # Convert the frame to grayscale
  34.     gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
  35.  
  36.     # Detect faces in the frame
  37.     faces = detector(gray)
  38.  
  39.     for face in faces:
  40.         # Detect facial landmarks
  41.         landmarks = predictor(gray, face)
  42.        
  43.         # Extract landmarks for the left and right eyes, and the nose tip
  44.         left_eye = np.array([(landmarks.part(i).x, landmarks.part(i).y) for i in range(36, 42)])
  45.         right_eye = np.array([(landmarks.part(i).x, landmarks.part(i).y) for i in range(42, 48)])
  46.         nose_tip = np.array([(landmarks.part(i).x, landmarks.part(i).y) for i in range(30, 31)])
  47.        
  48.         # Calculate the average position of the eyes and nose tip
  49.         avg_eye_pos = np.mean(np.concatenate((left_eye, right_eye)), axis=0)
  50.         avg_nose_pos = np.mean(nose_tip, axis=0)
  51.        
  52.         # Calculate the direction of gaze
  53.         #gaze_direction = avg_eye_pos - avg_nose_pos
  54.         gaze_direction = avg_nose_pos - avg_eye_pos
  55.  
  56.         # Scale the gaze direction to control the mouse movement
  57.         mouse_move_x = int(gaze_direction[0] * 5)
  58.        # mouse_move_x = int(nose_tip[0])
  59.         mouse_move_y = int(gaze_direction[0] * 5)
  60.        # mouse_move_y = int(nose_tip[1])
  61.  
  62.         # Move the mouse cursor
  63.         current_x, current_y = pyautogui.position()
  64.         new_x = max(0, min(current_x + mouse_move_x, pyautogui.size().width - 1))
  65.         new_y = max(0, min(current_y + mouse_move_y, pyautogui.size().height - 1))
  66.         pyautogui.moveTo(new_x, new_y)
  67.  
  68.     cv2.imshow('Gaze Tracking', frame)
  69.  
  70.     if cv2.waitKey(1) & 0xFF == ord('q'):
  71.         break
  72.  
  73. cap.release()
  74. cv2.destroyAllWindows()
  75.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement