#!/usr/bin/python3 import cv2 from picamera2 import Picamera2 import mediapipe as mp mp_drawing = mp.solutions.drawing_utils mp_drawing_styles = mp.solutions.drawing_styles mp_hands = mp.solutions.hands # Grab images as numpy arrays and leave everything else to OpenCV. cv2.startWindowThread() picam2 = Picamera2() picam2.configure(picam2.create_preview_configuration(main={"format": 'XRGB8888', "size": (1280, 720)})) picam2.start() cv2.namedWindow("Camera", cv2.WND_PROP_FULLSCREEN) cv2.setWindowProperty("Camera", cv2.WND_PROP_FULLSCREEN, cv2.WINDOW_FULLSCREEN) with mp_hands.Hands( model_complexity=0, min_detection_confidence=0.5, min_tracking_confidence=0.5, max_num_hands=20) as hands: while True: image = picam2.capture_array() image.flags.writeable = False image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) results = hands.process(image) # Draw the hand annotations on the image. image.flags.writeable = True image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR) if results.multi_hand_landmarks: for hand_landmarks in results.multi_hand_landmarks: x = hand_landmarks.landmark[mp_hands.HandLandmark.INDEX_FINGER_TIP].x y = hand_landmarks.landmark[mp_hands.HandLandmark.INDEX_FINGER_TIP].y mp_drawing.draw_landmarks( image, hand_landmarks, mp_hands.HAND_CONNECTIONS, mp_drawing_styles.get_default_hand_landmarks_style(), mp_drawing_styles.get_default_hand_connections_style()) # Flip the image horizontally for a selfie-view display. cv2.imshow("Camera", image) if cv2.waitKey(1) & 0xFF == ord('q'): break