在计算机视觉与人机交互领域,手势识别是一个非常有趣的应用场景。本文将带你用 Mediapipe 和 Python 实现一个基于摄像头的手势识别“剪刀石头布”小游戏,并展示实时手势与游戏结果。
1. 项目概述
该小游戏能够实现:
-
实时检测手势,包括 石头(Rock)、剪刀(Scissors)、布(Paper) 和 OK手势。
-
玩家展示 OK 手势时重置游戏。
-
当玩家展示 R/P/S 手势时,与电脑随机出拳进行比对,显示胜负结果。
-
在摄像头画面上绘制手部关键点和手势文字,同时在屏幕左上角显示电脑出拳和本轮结果。
整个项目的核心依赖:
-
mediapipe
:用于手部关键点检测。 -
opencv-python
:用于摄像头读取与图像显示。 -
numpy
:用于手势判断的数学计算。 -
random
:模拟电脑出拳。
2. 核心类:SimpleHandGestureGame
class SimpleHandGestureGame:def __init__(self, model_path="hand_landmarker.task", num_hands=1):# 初始化 Mediapipe HandLandmarkerbase_options = python.BaseOptions(model_asset_path=model_path)options = vision.HandLandmarkerOptions(base_options=base_options, num_hands=num_hands)self.detector = vision.HandLandmarker.create_from_options(options)self.computer_choice = Noneself.round_result = ""self.round_played = False
2.1 手势绘制 _draw_landmarks
def _draw_landmarks(self, rgb_image, detection_result):annotated_image = np.copy(rgb_image)if detection_result.hand_landmarks:for hand_landmarks in detection_result.hand_landmarks:proto_landmarks = landmark_pb2.NormalizedLandmarkList()proto_landmarks.landmark.extend([landmark_pb2.NormalizedLandmark(x=lm.x, y=lm.y, z=lm.z) for lm in hand_landmarks])solutions.drawing_utils.draw_landmarks(image=annotated_image,landmark_list=proto_landmarks,connections=mp.solutions.hands.HAND_CONNECTIONS,landmark_drawing_spec=solutions.drawing_styles.get_default_hand_landmarks_style(),connection_drawing_spec=solutions.drawing_styles.get_default_hand_connections_style())return annotated_image
2.2 手势识别 _judge_gesture
我们通过手指关键点的伸直状态判断手势:
-
石头(Rock):全部手指弯曲
-
剪刀(Scissors):食指和中指伸直,其他弯曲
-
布(Paper):五指全部伸直
-
OK:拇指与食指形成圆圈,其余三指伸直
def _judge_gesture(self, hand_landmarks):def is_straight(tip, pip, mcp=None):if mcp:a, b, c = np.array([tip.x, tip.y]), np.array([pip.x, pip.y]), np.array([mcp.x, mcp.y])ba, bc = a - b, c - bcos_angle = np.dot(ba, bc) / (np.linalg.norm(ba) * np.linalg.norm(bc) + 1e-6)return np.arccos(np.clip(cos_angle, -1, 1)) * 180 / np.pi > 160else:return tip.y < pip.ythumb_straight = is_straight(hand_landmarks[4], hand_landmarks[2], hand_landmarks[1])index_straight = is_straight(hand_landmarks[8], hand_landmarks[6])middle_straight = is_straight(hand_landmarks[12], hand_landmarks[10])ring_straight = is_straight(hand_landmarks[16], hand_landmarks[14])pinky_straight = is_straight(hand_landmarks[20], hand_landmarks[18])total = sum([thumb_straight, index_straight, middle_straight, ring_straight, pinky_straight])# OK gesturethumb_tip, index_tip = np.array([hand_landmarks[4].x, hand_landmarks[4].y]), np.array([hand_landmarks[8].x, hand_landmarks[8].y])if np.linalg.norm(thumb_tip - index_tip) < 0.05 and middle_straight and ring_straight and pinky_straight:return "OK"if total == 0: return "Rock"if total == 2 and index_straight and middle_straight: return "Scissors"if total == 5: return "Paper"return "Undefined"
2.3 游戏逻辑 _play_game
def _play_game(self, player_choice):choices = ["Rock", "Scissors", "Paper"]if self.computer_choice is None:self.computer_choice = random.choice(choices)if player_choice == self.computer_choice:self.round_result = "Draw"elif (player_choice == "Rock" and self.computer_choice == "Scissors") or \(player_choice == "Scissors" and self.computer_choice == "Paper") or \(player_choice == "Paper" and self.computer_choice == "Rock"):self.round_result = "You Win"else:self.round_result = "Computer Wins"self.round_played = True
2.4 图像处理 do
最终的 do
方法负责:
-
读取摄像头帧
-
调用 Mediapipe 检测手势
-
绘制手部关键点和手势文字
-
显示电脑出拳及胜负结果
def do(self, frame, device=None):if frame is None: return Nonemp_image = mp.Image(image_format=mp.ImageFormat.SRGB, data=cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))detection_result = self.detector.detect(mp_image)annotated = self._draw_landmarks(mp_image.numpy_view(), detection_result)# ...绘制手势文字和游戏结果...return cv2.cvtColor(annotated, cv2.COLOR_RGB2BGR)
3. 快速体验
import cv2
import numpy as np
import mediapipe as mp
from mediapipe import solutions
from mediapipe.framework.formats import landmark_pb2
from mediapipe.tasks import python
from mediapipe.tasks.python import vision
import randomclass SimpleHandGestureGame:def __init__(self, model_path="文件地址/hand_landmarker.task", num_hands=1):"""Initialize Mediapipe HandLandmarker and game state"""base_options = python.BaseOptions(model_asset_path=model_path)options = vision.HandLandmarkerOptions(base_options=base_options, num_hands=num_hands)self.detector = vision.HandLandmarker.create_from_options(options)self.computer_choice = Noneself.round_result = ""self.round_played = Falsedef _draw_landmarks(self, rgb_image, detection_result):annotated_image = np.copy(rgb_image)if detection_result.hand_landmarks:for hand_landmarks in detection_result.hand_landmarks:proto_landmarks = landmark_pb2.NormalizedLandmarkList()proto_landmarks.landmark.extend([landmark_pb2.NormalizedLandmark(x=lm.x, y=lm.y, z=lm.z) for lm in hand_landmarks])solutions.drawing_utils.draw_landmarks(image=annotated_image,landmark_list=proto_landmarks,connections=mp.solutions.hands.HAND_CONNECTIONS,landmark_drawing_spec=solutions.drawing_styles.get_default_hand_landmarks_style(),connection_drawing_spec=solutions.drawing_styles.get_default_hand_connections_style())return annotated_imagedef _judge_gesture(self, hand_landmarks):"""Determine hand gesture: Rock-Paper-Scissors + OK"""def is_straight(tip, pip, mcp=None):if mcp:a, b, c = np.array([tip.x, tip.y]), np.array([pip.x, pip.y]), np.array([mcp.x, mcp.y])ba, bc = a - b, c - bcos_angle = np.dot(ba, bc) / (np.linalg.norm(ba) * np.linalg.norm(bc) + 1e-6)return np.arccos(np.clip(cos_angle, -1, 1)) * 180 / np.pi > 160else:return tip.y < pip.ythumb_straight = is_straight(hand_landmarks[4], hand_landmarks[2], hand_landmarks[1])index_straight = is_straight(hand_landmarks[8], hand_landmarks[6])middle_straight = is_straight(hand_landmarks[12], hand_landmarks[10])ring_straight = is_straight(hand_landmarks[16], hand_landmarks[14])pinky_straight = is_straight(hand_landmarks[20], hand_landmarks[18])thumb, index, middle, ring, pinky = thumb_straight, index_straight, middle_straight, ring_straight, pinky_straighttotal = sum([thumb, index, middle, ring, pinky])# OK gesturethumb_tip, index_tip = np.array([hand_landmarks[4].x, hand_landmarks[4].y]), np.array([hand_landmarks[8].x, hand_landmarks[8].y])if np.linalg.norm(thumb_tip - index_tip) < 0.05 and middle and ring and pinky:return "OK"# Rock-Paper-Scissorsif total == 0:return "Rock"if total == 2 and index and middle:return "Scissors"if total == 5:return "Paper"return "Undefined"def _play_game(self, player_choice):"""Determine the result of Rock-Paper-Scissors round"""choices = ["Rock", "Scissors", "Paper"]if self.computer_choice is None:self.computer_choice = random.choice(choices)if player_choice == self.computer_choice:self.round_result = "Draw"elif (player_choice == "Rock" and self.computer_choice == "Scissors") or \(player_choice == "Scissors" and self.computer_choice == "Paper") or \(player_choice == "Paper" and self.computer_choice == "Rock"):self.round_result = "You Win"else:self.round_result = "Computer Wins"self.round_played = Truedef do(self, frame, device=None):"""Process a single frame, overlay hand gesture and game result (vertically)"""if frame is None:return Nonemp_image = mp.Image(image_format=mp.ImageFormat.SRGB, data=cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))detection_result = self.detector.detect(mp_image)annotated = self._draw_landmarks(mp_image.numpy_view(), detection_result)gesture_display = ""if detection_result.hand_landmarks:for hand_landmarks in detection_result.hand_landmarks:gesture = self._judge_gesture(hand_landmarks)if gesture == "OK":self.computer_choice = random.choice(["Rock", "Scissors", "Paper"])self.round_result = ""self.round_played = Falsegesture_display = "Game Ready..."elif gesture in ["Rock", "Scissors", "Paper"] and not self.round_played:self._play_game(gesture)gesture_display = f"{gesture}"else:gesture_display = gestureh, w, _ = annotated.shapeindex_finger_tip = hand_landmarks[8]cx, cy = int(index_finger_tip.x * w), int(index_finger_tip.y * h)cv2.putText(annotated, gesture_display, (cx, cy - 20), cv2.FONT_HERSHEY_SIMPLEX, 1.0, (0, 255, 0), 2)if self.round_result:start_x, start_y, line_height = 30, 50, 40lines = [f"Computer Choice: {self.computer_choice}", f"Result: {self.round_result}"]for i, line in enumerate(lines):cv2.putText(annotated, line, (start_x, start_y + i * line_height), cv2.FONT_HERSHEY_SIMPLEX, 1.0, (0, 0, 255), 3)return cv2.cvtColor(annotated, cv2.COLOR_RGB2BGR)
4. 总结
本文展示了如何用 Mediapipe HandLandmarker 快速搭建一个实时手势识别小游戏。通过关键点计算与简单逻辑判断,实现了石头、剪刀、布和 OK 手势识别,并结合游戏逻辑输出结果。
对 PiscTrace or PiscCode感兴趣?更多精彩内容请移步官网看看~🔗 PiscTrace