Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- #!/usr/bin/python3
- '''
- wiggle.py you wave your face around the camera, to control your mouse.
- this program is intened to be used with "dasher"
- https://github.com/dasher-project/
- though its interoperation is not integrated yet
- Dasher is an on screen typing tool where you use a mouse to navigate
- a sea of letters. where you point the mose determines what you type.
- as sentences form predictive text appears more frequently.
- '''
- import cv2
- import dlib
- import numpy as np
- import os
- import pyautogui
- pyautogui.FAILSAFE=0
- # Initialize the dlib facial landmark predictor
- predictor_path = "shape_predictor_68_face_landmarks.dat"
- predictor = dlib.shape_predictor(predictor_path)
- # Initialize the dlib face detector
- detector = dlib.get_frontal_face_detector()
- # Start the webcam
- cap = cv2.VideoCapture(0)
- while True:
- ret, frame = cap.read()
- if not ret:
- break
- # Convert the frame to grayscale
- gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
- # Detect faces in the frame
- faces = detector(gray)
- for face in faces:
- # Detect facial landmarks
- landmarks = predictor(gray, face)
- # Extract landmarks for the left and right eyes, and the nose tip
- left_eye = np.array([(landmarks.part(i).x, landmarks.part(i).y) for i in range(36, 42)])
- right_eye = np.array([(landmarks.part(i).x, landmarks.part(i).y) for i in range(42, 48)])
- nose_tip = np.array([(landmarks.part(i).x, landmarks.part(i).y) for i in range(30, 31)])
- # Calculate the average position of the eyes and nose tip
- avg_eye_pos = np.mean(np.concatenate((left_eye, right_eye)), axis=0)
- avg_nose_pos = np.mean(nose_tip, axis=0)
- # Calculate the direction of gaze
- #gaze_direction = avg_eye_pos - avg_nose_pos
- gaze_direction = avg_nose_pos - avg_eye_pos
- # Scale the gaze direction to control the mouse movement
- mouse_move_x = int(gaze_direction[0] * 5)
- # mouse_move_x = int(nose_tip[0])
- mouse_move_y = int(gaze_direction[0] * 5)
- # mouse_move_y = int(nose_tip[1])
- # Move the mouse cursor
- current_x, current_y = pyautogui.position()
- new_x = max(0, min(current_x + mouse_move_x, pyautogui.size().width - 1))
- new_y = max(0, min(current_y + mouse_move_y, pyautogui.size().height - 1))
- pyautogui.moveTo(new_x, new_y)
- cv2.imshow('Gaze Tracking', frame)
- if cv2.waitKey(1) & 0xFF == ord('q'):
- break
- cap.release()
- cv2.destroyAllWindows()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement