From f2ccac85df4af4d03e1cdd1bb5eb1d6942dff089 Mon Sep 17 00:00:00 2001 From: Wallace Date: Wed, 17 Jul 2024 19:53:56 +0200 Subject: [PATCH 1/4] [WiP] adding context tracking to the tracker --- examples/display_calibration.py | 66 +++++++++++++++++ examples/simple_example_v2.py | 2 +- eyeGestures/calibration_v2.py | 42 ++++++++++- eyeGestures/eyegestures.py | 127 ++++++++++++++------------------ eyeGestures/utils.py | 3 + 5 files changed, 166 insertions(+), 74 deletions(-) create mode 100644 examples/display_calibration.py diff --git a/examples/display_calibration.py b/examples/display_calibration.py new file mode 100644 index 0000000..49e06dd --- /dev/null +++ b/examples/display_calibration.py @@ -0,0 +1,66 @@ +import os +import sys +import cv2 +import pygame +import numpy as np + +dir_path = os.path.dirname(os.path.realpath(__file__)) +sys.path.append(f'{dir_path}/..') + +from eyeGestures.utils import VideoCapture +from eyeGestures.eyegestures import EyeGestures_v2 + +gestures = EyeGestures_v2() +gestures.uploadCalibrationMap([[0,0],[0,1],[1,0],[1,1]]) +cap = VideoCapture(0) + +# Initialize Pygame +pygame.init() + +# Get the display dimensions +screen_info = pygame.display.Info() +screen_width = screen_info.current_w +screen_height = screen_info.current_h + +# Set up the screen +screen = pygame.display.set_mode((screen_width, screen_height), pygame.FULLSCREEN) +pygame.display.set_caption("Fullscreen Red Cursor") + +# Set up colors +RED = (255, 0, 0) +BLUE = (0, 0, 255) +GREEN = (0, 255, 0) +YELLOW = (255,255,0) + +clock = pygame.time.Clock() + +# Main game loop +running = True +iterator = 0 +first = [0,0] +while running: + # Event handling + for event in pygame.event.get(): + if event.type == pygame.QUIT: + running = False + elif event.type == pygame.KEYDOWN: + if event.key == pygame.K_q and pygame.key.get_mods() & pygame.KMOD_CTRL: + running = False + + calibration = gestures.calibrationMat.getNextPoint(screen_width,screen_height) + + if calibration[0] == first[0] and calibration[1] == first[1]: + screen.fill((0, 0, 0)) + + if first[0] == 0 and first[1] == 0: + first = calibration + # Display frame on Pygame screen + pygame.draw.circle(screen, YELLOW, calibration, 200) + pygame.display.flip() + + # Cap the frame rate + clock.tick(10) + +# Quit Pygame +pygame.quit() +cap.release() diff --git a/examples/simple_example_v2.py b/examples/simple_example_v2.py index 940bfc3..8a02dba 100644 --- a/examples/simple_example_v2.py +++ b/examples/simple_example_v2.py @@ -50,7 +50,7 @@ ret, frame = cap.read() frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) - calibrate = (iterator <= 300) + calibrate = (iterator <= 600) iterator += 1 event, calibration = gestures.step(frame, calibrate, screen_width, screen_height) diff --git a/eyeGestures/calibration_v2.py b/eyeGestures/calibration_v2.py index 4966b92..1362d2b 100644 --- a/eyeGestures/calibration_v2.py +++ b/eyeGestures/calibration_v2.py @@ -6,6 +6,11 @@ def euclidean_distance(point1, point2): class Calibrator: + PRECISION_LIMIT = 50 + PRECISION_STEP = 10 + ACCEPTANCE_RADIUS = 500 + CALIBRATION_RADIUS = 1000 + def __init__(self): self.X = [] self.Y_y = [] @@ -15,6 +20,13 @@ def __init__(self): self.reg_y = scireg.Ridge(alpha=1.0) self.fitted = False + self.matrix = CalibrationMatrix() + + self.precision_limit = self.PRECISION_LIMIT + self.precision_step = self.PRECISION_STEP + self.acceptance_radius = self.ACCEPTANCE_RADIUS + self.calibration_radius = self.CALIBRATION_RADIUS + def add(self,x,y): self.X.append(x.flatten()) self.Y_y.append(y[1]) @@ -38,9 +50,32 @@ def predict(self,x): else: return np.array([0.0,0.0]) + def movePoint(self): + self.matrix.movePoint() + + def getCurrentPoint(self,width,heigth): + return self.matrix.getCurrentPoint(width,heigth) + + def updMatrix(self,points): + return self.matrix.updMatrix(points) + def unfit(self): + self.acceptance_radius = self.ACCEPTANCE_RADIUS + self.calibration_radius = self.CALIBRATION_RADIUS self.fitted = False + def increase_precision(self): + if self.acceptance_radius > self.precision_limit: + self.acceptance_radius -= self.precision_step + if self.calibration_radius > self.precision_limit and self.acceptance_radius < self.calibration_radius: + self.calibration_radius -= self.precision_step + + def insideClbRadius(self,point,width,height): + return euclidean_distance(point,self.getCurrentPoint(width,height)) < self.calibration_radius + + def insideAcptcRadius(self,point,width,height): + return euclidean_distance(point,self.getCurrentPoint(width,height)) < self.acceptance_radius + class CalibrationMatrix: def __init__(self): @@ -53,13 +88,14 @@ def __init__(self): [1.0,0.25],[0.75,0.25],[0.5,0.25],[0.25,0.25],[0.0,0.25]]) pass - def update_calibration_matrix(self,points): + def updMatrix(self,points): self.points = points self.iterator = 0 - def getNextPoint(self,width=1.0,height=1.0): - it = self.iterator + def movePoint(self): self.iterator += 1 self.iterator %= len(self.points) + def getCurrentPoint(self,width=1.0,height=1.0): + it = self.iterator return np.array([self.points[it,0] * width, self.points[it,1] * height]) \ No newline at end of file diff --git a/eyeGestures/eyegestures.py b/eyeGestures/eyegestures.py index afb2203..dce2b36 100644 --- a/eyeGestures/eyegestures.py +++ b/eyeGestures/eyegestures.py @@ -4,7 +4,7 @@ from eyeGestures.calibration_v1 import Calibrator as Calibrator_v1 from eyeGestures.calibration_v2 import Calibrator as Calibrator_v2, CalibrationMatrix, euclidean_distance from eyeGestures.gevent import Gevent, Cevent -from eyeGestures.utils import timeit +from eyeGestures.utils import timeit, Buffor import numpy as np import pickle import cv2 @@ -16,57 +16,38 @@ class EyeGestures_v2: """Main class for EyeGesture tracker. It configures and manages entire algorithm""" - - PRECISION_LIMIT = 50 - PRECISION_STEP = 10 - ACCEPTANCE_RADIUS = 500 - CALIBRATION_RADIUS = 1000 - EYEGESTURES_CALIBRATION_THRESH = 850 - EYEGESTURES_CALIBRATION_FILENAME = 'eygestures_calib.eyec' - def __init__(self): self.monitor_width = 1 self.monitor_height = 1 - self.clb = Calibrator_v2() + self.clb = dict() # Calibrator_v2() self.cap = None self.gestures = EyeGestures_v1(285,115,200,100) - self.calibration = False + self.calibration = dict() self.CN = 5 - self.trackerSignal = None - self.fitSignal = None - - self.average_points = np.zeros((20,2)) - self.filled_points = 0 + self.average_points = dict() + self.iterator = dict() + self.filled_points= dict() self.enable_CN = False self.calibrate_gestures = False - self.calibrationMat = CalibrationMatrix() - self.fit_point = self.calibrationMat.getNextPoint() - self.iterator = 0 self.fix = 0.8 - self.precision_limit = self.PRECISION_LIMIT - self.precision_step = self.PRECISION_STEP - self.acceptance_radius = self.ACCEPTANCE_RADIUS - self.calibration_radius = self.CALIBRATION_RADIUS - # after corssing this thresh we are disabling classical calib - self.eyegestures_calibration_threshold = self.EYEGESTURES_CALIBRATION_THRESH - - def saveModel(self): - return pickle.dumps(self.clb) + def saveModel(self, context = "main"): + if context in self.clb: + return pickle.dumps(self.clb[context]) - def loadModel(self,model): - self.clb = pickle.loads(model) + def loadModel(self,model, context = "main"): + self.clb[context] = pickle.loads(model) - def uploadCalibrationMap(self,points): - self.calibrationMat.update_calibration_matrix(np.array(points)) - self.fit_point = self.calibrationMat.getNextPoint() + def uploadCalibrationMap(self,points,context = "main"): + self.addContext(context) + self.clb[context].updMatrix(np.array(points)) - def getLandmarks(self, frame, calibrate = False): + def getLandmarks(self, frame, calibrate = False, context="main"): frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB) frame = cv2.flip(frame,1) @@ -74,7 +55,7 @@ def getLandmarks(self, frame, calibrate = False): event, cevent = self.gestures.step( frame, - "main", + context, calibrate, # set calibration - switch to False to stop calibration self.monitor_width, self.monitor_height, @@ -87,23 +68,15 @@ def getLandmarks(self, frame, calibrate = False): cursors = np.array([cursor_x,cursor_y]).reshape(1, 2) eye_events = np.array([event.blink,event.fixation]).reshape(1, 2) key_points = np.concatenate((cursors,l_eye_landmarks,r_eye_landmarks,eye_events)) - return np.array((cursor_x, cursor_y)), key_points, event.blink, event.fixation, cevent - - def increase_precision(self): - if self.acceptance_radius > self.precision_limit: - self.acceptance_radius -= self.precision_step - if self.calibration_radius > self.precision_limit and self.acceptance_radius < self.calibration_radius: - self.calibration_radius -= self.precision_step + return np.array((cursor_x, cursor_y)), key_points, event.blink, event.fixation, cevent def setClassicImpact(self,impact): self.CN = impact - def reset(self): - self.acceptance_radius = self.ACCEPTANCE_RADIUS - self.calibration_radius = self.CALIBRATION_RADIUS - self.average_points = np.zeros((20,2)) - self.filled_points = 0 - self.clb.unfit() + def reset(self, context = "main"): + self.filled_points[context] = 0 + if context in self.clb: + self.addContext(context) def setFixation(self,fix): self.fix = fix @@ -117,47 +90,61 @@ def enableCNCalib(self): def disableCNCalib(self): self.enable_CN = False - def step(self, frame, calibration, width, height): - self.calibration = calibration + def addContext(self, context): + if context not in self.clb: + self.clb[context] = Calibrator_v2() + self.average_points[context] = Buffor(20) + self.iterator[context] = 0 + self.average_points[context] = np.zeros((20,2)) + self.filled_points[context] = 0 + self.calibration[context] = False + + + def step(self, frame, calibration, width, height, context="main"): + self.addContext(context) + + self.calibration[context] = calibration self.monitor_width = width self.monitor_height = height - classic_point, key_points, blink, fixation, cevent = self.getLandmarks(frame,self.calibrate_gestures and self.enable_CN) + classic_point, key_points, blink, fixation, cevent = self.getLandmarks(frame, + self.calibrate_gestures and self.enable_CN, + context = context) margin = 10 - if (classic_point[0] <= margin) and self.calibration: + if (classic_point[0] <= margin) and self.calibration[context]: self.calibrate_gestures = cevent.calibration - elif (classic_point[0] >= width - margin) and self.calibration: + elif (classic_point[0] >= width - margin) and self.calibration[context]: self.calibrate_gestures = cevent.calibration - elif (cevent.point[1] <= margin) and self.calibration: + elif (cevent.point[1] <= margin) and self.calibration[context]: self.calibrate_gestures = cevent.calibration - elif (classic_point[1] >= height - margin) and self.calibration: + elif (classic_point[1] >= height - margin) and self.calibration[context]: self.calibrate_gestures = cevent.calibration else: self.calibrate_gestures = False - y_point = self.clb.predict(key_points) - self.average_points[1:,:] = self.average_points[:(self.average_points.shape[0] - 1),:] + y_point = self.clb[context].predict(key_points) + self.average_points[context][1:,:] = self.average_points[context][:(self.average_points[context].shape[0] - 1),:] if fixation <= self.fix: - self.average_points[0,:] = y_point + self.average_points[context][0,:] = y_point - if self.filled_points < self.average_points.shape[0] and (y_point != np.array([0.0,0.0])).any(): - self.filled_points += 1 + if self.filled_points[context] < self.average_points[context].shape[0] and (y_point != np.array([0.0,0.0])).any(): + self.filled_points[context] += 1 - averaged_point = (np.sum(self.average_points[:,:],axis=0) + (classic_point * self.CN))/(self.filled_points + self.CN) + averaged_point = (np.sum(self.average_points[context][:,:],axis=0) + (classic_point * self.CN))/(self.filled_points[context] + self.CN) - if self.calibration and (euclidean_distance(averaged_point,self.fit_point) < self.calibration_radius or self.filled_points < self.average_points.shape[0] * 10): - self.clb.add(key_points,self.fit_point) + if self.calibration[context] and self.clb[context].insideClbRadius(averaged_point,width,height) or self.filled_points[context] < self.average_points[context].shape[0] * 10: + self.clb[context].add(key_points,self.clb[context].getCurrentPoint(width,height)) - if self.calibration and (euclidean_distance(averaged_point,self.fit_point) < self.acceptance_radius): - self.iterator += 1 - if self.iterator > 10: - self.iterator = 0 - self.fit_point = self.calibrationMat.getNextPoint(width,height) - self.increase_precision() + if self.calibration[context] and self.clb[context].insideAcptcRadius(averaged_point,width,height): + self.iterator[context] += 1 + if self.iterator[context] > 10: + self.iterator[context] = 0 + self.clb[context].movePoint() + self.clb[context].increase_precision() gevent = Gevent(averaged_point,blink,fixation >= self.fix) - cevent = Cevent(self.fit_point,self.acceptance_radius, self.calibration_radius) + cevent = Cevent(self.clb[context].getCurrentPoint(width,height),self.clb[context].acceptance_radius, self.clb[context].calibration_radius) return (gevent, cevent) class EyeGestures_v1: diff --git a/eyeGestures/utils.py b/eyeGestures/utils.py index b943191..58c9be2 100644 --- a/eyeGestures/utils.py +++ b/eyeGestures/utils.py @@ -114,6 +114,9 @@ def getFirst(self): def getLen(self): return len(self.__buffor) + def isFull(self): + return len(self.__buffor) >= self.length + def flush(self): tmp = self.__buffor[-1] self.__buffor = [] From 9db44869ffef0398aa9dfb23e01635e5a97e78ec Mon Sep 17 00:00:00 2001 From: Wallace Date: Wed, 17 Jul 2024 20:01:50 +0200 Subject: [PATCH 2/4] fixing context tracking --- eyeGestures/eyegestures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/eyeGestures/eyegestures.py b/eyeGestures/eyegestures.py index dce2b36..950c162 100644 --- a/eyeGestures/eyegestures.py +++ b/eyeGestures/eyegestures.py @@ -133,7 +133,7 @@ def step(self, frame, calibration, width, height, context="main"): averaged_point = (np.sum(self.average_points[context][:,:],axis=0) + (classic_point * self.CN))/(self.filled_points[context] + self.CN) - if self.calibration[context] and self.clb[context].insideClbRadius(averaged_point,width,height) or self.filled_points[context] < self.average_points[context].shape[0] * 10: + if self.calibration[context] and (self.clb[context].insideClbRadius(averaged_point,width,height) or self.filled_points[context] < self.average_points[context].shape[0] * 10): self.clb[context].add(key_points,self.clb[context].getCurrentPoint(width,height)) if self.calibration[context] and self.clb[context].insideAcptcRadius(averaged_point,width,height): From 60036af112c7997044469e1b11aaad4d4367de53 Mon Sep 17 00:00:00 2001 From: Wallace Date: Wed, 17 Jul 2024 20:03:52 +0200 Subject: [PATCH 3/4] updating pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 1e55d40..30831fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,7 @@ exclude = [ [project] name = "eyeGestures" -version = "2.3.2" +version = "2.4.2" authors = [ { name="Piotr Walas", email="piotr.walas@eyegestures.com" }, ] From a5d136b458d39653800c07bb6f17a39e38aff9c7 Mon Sep 17 00:00:00 2001 From: Wallace Date: Wed, 17 Jul 2024 20:06:15 +0200 Subject: [PATCH 4/4] bumping setuptools --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 6475135..e839d06 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,7 +13,7 @@ pytest==7.4.2 scikit_learn==1.3.2 scipy==1.12.0 screeninfo==0.8.1 -setuptools==69.0.2 +setuptools>=70.0.0 scipy==1.12.0 opencv-contrib-python==4.9.0.80 xlwt==1.3.0