-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathutilities.py
104 lines (77 loc) · 3.88 KB
/
utilities.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
#!/usr/bin/env python
import pyautogui
import cv2
import numpy as np
class Detector:
colorbar_window = 'Colorbars'
def __int__(self):
pass
def nothing(self, x):
pass
def caliberate_hsv_values(self):
video_feed = cv2.VideoCapture(0)
# Palm-HSV Calibration (Pt.1)
# ---------------------------
# Enable the snippet below to enable a colorbar to manually caliberate HSV values to
# detect palm.
# defining colorbar windows to dynamically control HSV values
cv2.namedWindow(self.colorbar_window)
cv2.createTrackbar("Hue_Low", self.colorbar_window, 0, 179, self.nothing)
cv2.createTrackbar("Hue_High", self.colorbar_window, 0, 179, self.nothing)
cv2.createTrackbar("Saturation_Low", self.colorbar_window, 0, 255, self.nothing)
cv2.createTrackbar("Saturation_High", self.colorbar_window, 0, 255, self.nothing)
cv2.createTrackbar("Value_Low", self.colorbar_window, 0, 255, self.nothing)
cv2.createTrackbar("Value_High", self.colorbar_window, 0, 255, self.nothing)
while True:
_, screen = video_feed.read()
screen = cv2.flip(screen, 1)
screen_hsv = cv2.cvtColor(screen, cv2.COLOR_BGR2HSV)
# Palm-HSV Calibration (Pt. 2)
# ----------------------------
# Enable the snippet below to enable a colorbar to manually caliberate HSV values to
# detect palm.
# read trackbar positions for each trackbar
Hue_Low = cv2.getTrackbarPos("Hue_Low", self.colorbar_window)
Hue_High = cv2.getTrackbarPos("Hue_High", self.colorbar_window)
Saturation_Low = cv2.getTrackbarPos("Saturation_Low", self.colorbar_window)
Saturation_High = cv2.getTrackbarPos("Saturation_High", self.colorbar_window)
Value_Low = cv2.getTrackbarPos("Value_Low", self.colorbar_window)
Value_High = cv2.getTrackbarPos("Value_High", self.colorbar_window)
lower_skin_thresh = np.array([Hue_Low, Saturation_Low, Value_Low], dtype=int)
upper_skin_thresh = np.array([Hue_High, Saturation_High, Value_High], dtype=int)
thresh_screen_hsv = cv2.inRange(screen_hsv, lower_skin_thresh, upper_skin_thresh)
# Blurring the threshold image
# blurred_threshold = cv2.blur(thresh_screen_hsv, ksize=(3,3))
# blurred_threshold = cv2.GaussianBlur(thresh_screen_hsv, (5,5), 0)
blurred_threshold = cv2.bilateralFilter(thresh_screen_hsv, 8, 200, 200)
blurred_threshold_final = cv2.medianBlur(blurred_threshold, ksize=5)
# cv2.imshow("Video Feed", screen_hsv)
#cv2.imshow("threshold feed", thresh_screen_hsv)
cv2.imshow("final", blurred_threshold_final)
#cv2.imshow("img", thresh_screen_hsv)
# break out of the loop (exit program)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# release the web-cam
video_feed.release()
cv2.destroyAllWindows()
return lower_skin_thresh, upper_skin_thresh
class Mouse:
screen_resolution = pyautogui.size()
video_feed = cv2.VideoCapture(0)
cam_resolution = video_feed.read()[1]
# print cam_resolution
video_feed.release()
cv2.destroyAllWindows()
def __int__(self):
pass
def move_cursor(self, hand_contour):
moment = cv2.moments(hand_contour)
centre_x = int(moment['m10'] / moment['m00'])
centre_y = int(moment['m01'] / moment['m00'])
pointer_x = int((centre_x * self.screen_resolution[0]) / self.cam_resolution.shape[1])
pointer_y = int((centre_y * self.screen_resolution[1]) / self.cam_resolution.shape[0])
# cv2.circle(hand, (centre_x, centre_y), 7, (255, 255, 255), -1)
pyautogui.moveTo(pointer_x, pointer_y, 0.1)
# cv2.imshow("Hand", hand)
return