From 97be4219cf0b1f8e5423002a8925e005a6ff3a0d Mon Sep 17 00:00:00 2001 From: Patrick Lindemann Date: Fri, 2 Jun 2023 21:44:57 +0200 Subject: [PATCH] Replaced deprecated np.int and np.float dtypes with np.int32 and np.float32 --- chesscog/corner_detection/detect_corners.py | 12 ++++++------ chesscog/occupancy_classifier/create_dataset.py | 4 ++-- chesscog/piece_classifier/create_dataset.py | 4 ++-- chesscog/report/prepare_confusion_matrix.py | 4 ++-- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/chesscog/corner_detection/detect_corners.py b/chesscog/corner_detection/detect_corners.py index 8a5d5599..48fb9a19 100644 --- a/chesscog/corner_detection/detect_corners.py +++ b/chesscog/corner_detection/detect_corners.py @@ -103,7 +103,7 @@ def find_corners(cfg: CN, img: np.ndarray) -> np.ndarray: inverse_transformation_matrix = np.linalg.inv(transformation_matrix) # Warp grayscale image - dims = tuple(warped_img_size.astype(np.int)) + dims = tuple(warped_img_size.astype(np.int32)) warped = cv2.warpPerspective(gray, transformation_matrix, dims) borders = np.zeros_like(gray) borders[3:-3, 3:-3] = 1 @@ -122,7 +122,7 @@ def find_corners(cfg: CN, img: np.ndarray) -> np.ndarray: corners = np.array([[xmin, ymin], [xmax, ymin], [xmax, ymax], - [xmin, ymax]]).astype(np.float) + [xmin, ymax]]).astype(np.float32) corners = corners * scale img_corners = _warp_points(inverse_transformation_matrix, corners) img_corners = img_corners / img_scale @@ -353,8 +353,8 @@ def _quantize_points(cfg: CN, warped_scaled_points: np.ndarray, intersection_poi mean_col_xs = warped_scaled_points[..., 0].mean(axis=0) mean_row_ys = warped_scaled_points[..., 1].mean(axis=1) - col_xs = np.rint(mean_col_xs).astype(np.int) - row_ys = np.rint(mean_row_ys).astype(np.int) + col_xs = np.rint(mean_col_xs).astype(np.int32) + row_ys = np.rint(mean_row_ys).astype(np.int32) # Remove duplicates col_xs, col_indices = np.unique(col_xs, return_index=True) @@ -407,7 +407,7 @@ def _compute_vertical_borders(cfg: CN, warped: np.ndarray, mask: np.ndarray, sca G_x[~mask] = 0 def get_nonmax_supressed(x): - x = (x * scale[0]).astype(np.int) + x = (x * scale[0]).astype(np.int32) thresh = cfg.BORDER_REFINEMENT.LINE_WIDTH // 2 return G_x[:, x-thresh:x+thresh+1].max(axis=1) @@ -431,7 +431,7 @@ def _compute_horizontal_borders(cfg: CN, warped: np.ndarray, mask: np.ndarray, s G_y[~mask] = 0 def get_nonmax_supressed(y): - y = (y * scale[1]).astype(np.int) + y = (y * scale[1]).astype(np.int32) thresh = cfg.BORDER_REFINEMENT.LINE_WIDTH // 2 return G_y[y-thresh:y+thresh+1].max(axis=0) diff --git a/chesscog/occupancy_classifier/create_dataset.py b/chesscog/occupancy_classifier/create_dataset.py index e46a0791..f2b6791b 100644 --- a/chesscog/occupancy_classifier/create_dataset.py +++ b/chesscog/occupancy_classifier/create_dataset.py @@ -73,7 +73,7 @@ def warp_chessboard_image(img: np.ndarray, corners: np.ndarray) -> np.ndarray: [BOARD_SIZE + SQUARE_SIZE, BOARD_SIZE + \ SQUARE_SIZE], # bottom right [SQUARE_SIZE, BOARD_SIZE + SQUARE_SIZE] # bottom left - ], dtype=np.float) + ], dtype=np.float32) transformation_matrix, mask = cv2.findHomography(src_points, dst_points) return cv2.warpPerspective(img, transformation_matrix, (IMG_SIZE, IMG_SIZE)) @@ -84,7 +84,7 @@ def _extract_squares_from_sample(id: str, subset: str = "", input_dir: Path = RE with (input_dir / subset / (id + ".json")).open("r") as f: label = json.load(f) - corners = np.array(label["corners"], dtype=np.float) + corners = np.array(label["corners"], dtype=np.float32) unwarped = warp_chessboard_image(img, corners) board = chess.Board(label["fen"]) diff --git a/chesscog/piece_classifier/create_dataset.py b/chesscog/piece_classifier/create_dataset.py index 792b25b1..06a0baf5 100644 --- a/chesscog/piece_classifier/create_dataset.py +++ b/chesscog/piece_classifier/create_dataset.py @@ -100,7 +100,7 @@ def warp_chessboard_image(img: np.ndarray, corners: np.ndarray) -> np.ndarray: [BOARD_SIZE + MARGIN, \ BOARD_SIZE + MARGIN], # bottom right [MARGIN, BOARD_SIZE + MARGIN] # bottom left - ], dtype=np.float) + ], dtype=np.float32) transformation_matrix, mask = cv2.findHomography(src_points, dst_points) return cv2.warpPerspective(img, transformation_matrix, (IMG_SIZE, IMG_SIZE)) @@ -111,7 +111,7 @@ def _extract_squares_from_sample(id: str, subset: str = "", input_dir: Path = RE with (input_dir / subset / (id + ".json")).open("r") as f: label = json.load(f) - corners = np.array(label["corners"], dtype=np.float) + corners = np.array(label["corners"], dtype=np.float32) unwarped = warp_chessboard_image(img, corners) board = chess.Board(label["fen"]) diff --git a/chesscog/report/prepare_confusion_matrix.py b/chesscog/report/prepare_confusion_matrix.py index 0bdf5ccc..78cad134 100644 --- a/chesscog/report/prepare_confusion_matrix.py +++ b/chesscog/report/prepare_confusion_matrix.py @@ -54,7 +54,7 @@ def _get_category(piece: typing.Union[chess.Piece, None]) -> str: def _get_confusion_matrix(predicted: chess.Board, actual: chess.Board) -> np.ndarray: - matrix = np.zeros((len(CATEGORIES), len(CATEGORIES)), dtype=np.int) + matrix = np.zeros((len(CATEGORIES), len(CATEGORIES)), dtype=np.int32) for square in chess.SQUARES: pred = _get_category(predicted.piece_at(square)) act = _get_category(actual.piece_at(square)) @@ -81,7 +81,7 @@ def _get_confusion_matrix(predicted: chess.Board, actual: chess.Board) -> np.nda # Filter out samples where the corners could not be detected df = df[(df["num_incorrect_corners"] != 4) | (df["error"] != "None")] - matrix = np.zeros((len(CATEGORIES), len(CATEGORIES)), dtype=np.int) + matrix = np.zeros((len(CATEGORIES), len(CATEGORIES)), dtype=np.int32) for i, row in df.iterrows(): actual = chess.Board(row.fen_actual) predicted = chess.Board(row.fen_predicted)