Skip to content

Commit

Permalink
Merge pull request #6 from illini-robomaster/dev
Browse files Browse the repository at this point in the history
Vision v0.1.0 Release
  • Loading branch information
RogerQi authored Jun 15, 2023
2 parents 40d38fe + 9696c67 commit 865032d
Show file tree
Hide file tree
Showing 28 changed files with 4,880 additions and 451 deletions.
25 changes: 25 additions & 0 deletions .github/workflows/formatting.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
name: formatting_check

on: [push]

jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10"]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install pycodestyle
pip install pydocstyle
- name: Analysing the code with pycodestyle and pydocstyle
run: |
pycodestyle --max-line-length=100 --exclude="Camera/mvsdk.py" ./
pydocstyle --match='(?!test_)(?!mvsdk).*\.py' ./
196 changes: 142 additions & 54 deletions Aiming/Aim.py
Original file line number Diff line number Diff line change
@@ -1,45 +1,140 @@
"""Hosts the Aim class, which is the main class for auto-aiming."""
from .Tracking import basic_tracker
from .DistEst import pnp_estimator
from .TrajModel import *
import numpy as np
import config
import Utils

from .Tracking import basic_tracker
from IPython import embed


class Aim:
"""
The main class for auto-aiming.
Its workflow:
1. takes in a list of predictions from the detection module
2. apply tracker to fill in missing predictions / append or associate predictions
3. It selects a single armor to hit
4. It computes the yaw/pitch difference to the armor
5. Finally, apply posterior calibration (e.g., range table to hit far targets)
"""

def __init__(self, config):
"""Get the config and initialize the tracker.
Args:
config (python object): shared config
"""
self.CFG = config
self.tracker = basic_tracker(self.CFG)
self.distance_estimator = pnp_estimator(self.CFG)

def preprocess(self, pred_list, stm32_state_dict, rgb_img):
"""Preprocess predictions to compute armor distances and absolute yaw/pitch.
Args:
pred_list (list): a list of predictions
stm32_state_dict (dict): a dictionary of stm32 state
def process_one(self, pred_list, enemy_team, rgb_img):
Returns:
list: a list of tuples (armor_type, abs_yaw, abs_pitch, y_distance, z_distance)
"""
gimbal_yaw = stm32_state_dict['cur_yaw']
gimbal_pitch = stm32_state_dict['cur_pitch']

ret_list = []

for pred in pred_list:
armor_name, conf, armor_type, bbox, armor = pred
c_x, c_y, w, h = bbox
rel_yaw, rel_pitch = self.get_rotation_angle(c_x, c_y)
abs_yaw = gimbal_yaw + rel_yaw
abs_pitch = gimbal_pitch + rel_pitch
y_distance, z_distance = self.distance_estimator.estimate_distance(armor, rgb_img)

ret_list.append((armor_type, abs_yaw, abs_pitch, y_distance, z_distance))

return ret_list

def pick_target(self, distance_angle_list, stm32_state_dict):
"""Select a priortized target from a list of targets.
Args:
distance_angle_list (list): list of targets returned from tracker
stm32_state_dict (dict): a dictionary of stm32 state
Returns:
target: a tuple of (target_pitch, target_yaw, target_y_dist, target_z_dist)
"""
gimbal_pitch = stm32_state_dict['cur_pitch']
gimbal_yaw = stm32_state_dict['cur_yaw']

target_pitch = None
target_yaw = None
target_y_dist = None
target_z_dist = None
min_angle_diff = 9999

for y_dist, z_dist, predicted_pitch, predicted_yaw in distance_angle_list:
pitch_diff = Utils.get_radian_diff(predicted_pitch, gimbal_pitch)
yaw_diff = Utils.get_radian_diff(predicted_yaw, gimbal_yaw)
angle_diff = pitch_diff + yaw_diff
if angle_diff < min_angle_diff:
target_pitch = predicted_pitch
target_yaw = predicted_yaw
target_y_dist = y_dist
target_z_dist = z_dist
min_angle_diff = angle_diff

return target_pitch, target_yaw, target_y_dist, target_z_dist

def process_one(self, pred_list, enemy_team, rgb_img, stm32_state_dict):
"""Process one frame of predictions.
Args:
pred_list (list): a list of predictions
enemy_team (str): 'blue' or 'red'
rgb_img (np.ndarray): RGB image
stm32_state_dict (dict): a dictionary of stm32 state
Returns:
dict: a dictionary of results
"""
assert enemy_team in ['blue', 'red']

# TODO: use assertion to check enemy_team
observed_armors = self.preprocess(pred_list, stm32_state_dict, rgb_img)

final_bbox_list, final_id_list = self.tracker.process_one(pred_list, enemy_team, rgb_img)
distance_angle_list, final_id_list = self.tracker.process_one(
observed_armors, enemy_team, rgb_img)

# TODO: integrate this into tracking for consistent tracking
closet_pred, closet_dist = self.get_closet_pred(final_bbox_list, rgb_img)
target_dist_angle_tuple = self.pick_target(distance_angle_list, stm32_state_dict)

if closet_pred is None:
if target_dist_angle_tuple[0] is None:
return None
center_x, center_y, width, height = closet_pred

# Get yaw/pitch differences in radians
yaw_diff, pitch_diff = self.get_rotation_angle(center_x, center_y)
target_pitch, target_yaw, target_y_dist, target_z_dist = target_dist_angle_tuple

calibrated_yaw_diff, calibrated_pitch_diff = self.posterior_calibration(yaw_diff, pitch_diff, closet_dist)
calibrated_pitch, calibrated_yaw = self.posterior_calibration(
target_pitch, target_yaw, target_y_dist, target_z_dist)

return {
'yaw_diff': calibrated_yaw_diff,
'pitch_diff': calibrated_pitch_diff,
'center_x': center_x,
'center_y': center_y,
'final_bbox_list': final_bbox_list,
'final_id_list': final_id_list,
# 'yaw_diff': calibrated_yaw_diff,
# 'pitch_diff': calibrated_pitch_diff,
'abs_yaw': calibrated_yaw,
'abs_pitch': calibrated_pitch,
# 'center_x': center_x,
# 'center_y': center_y,
# 'final_bbox_list': final_bbox_list,
# 'final_id_list': final_id_list,
}

def posterior_calibration(self, yaw_diff, pitch_diff, distance):
"""Given a set of naively estimated parameters, return calibrated parameters
from a range table?

def posterior_calibration(self, raw_pitch, raw_yaw, target_y_dist, target_z_dist):
"""Given a set of naively estimated parameters, return calibrated parameters.
Idea:
Use a range table?
Args:
yaw_diff (float): yaw difference in radians
Expand All @@ -49,38 +144,31 @@ def posterior_calibration(self, yaw_diff, pitch_diff, distance):
Returns:
(float, float): calibrated yaw_diff, pitch_diff in radians
"""
if distance >= 2**16:
# In this case, distance is a rough estimation from bbox size
return (yaw_diff, pitch_diff)
else:
# In this case, distance comes from D455 stereo estimation
# TODO: compute a range table
return (yaw_diff, pitch_diff)

def get_closet_pred(self, bbox_list, rgb_img):
'''Get the closet prediction to camera focal point'''
# TODO: instead of camera focal point; calibrate closet pred to operator view
H, W, C = rgb_img.shape
focal_y = H / 2
focal_x = W / 2
closet_pred = None
closet_dist = None # Cloest to camera in z-axis
closet_dist = 99999999
for bbox in bbox_list:
center_x, center_y, width, height = bbox
cur_dist = (center_x - focal_x)**2 + (center_y - focal_y)**2
if closet_pred is None:
closet_pred = bbox
closet_dist = cur_dist
else:
if cur_dist < closet_dist:
closet_pred = bbox
closet_dist = cur_dist
return closet_pred, closet_dist

@staticmethod
def get_rotation_angle(bbox_center_x, bbox_center_y):
yaw_diff = (bbox_center_x - config.IMG_CENTER_X) * (config.AUTOAIM_CAMERA.YAW_FOV_HALF / config.IMG_CENTER_X)
pitch_diff = (bbox_center_y - config.IMG_CENTER_Y) * (config.AUTOAIM_CAMERA.PITCH_FOV_HALF / config.IMG_CENTER_Y)
target_pitch = raw_pitch
target_yaw = raw_yaw

# Gravity calibration
pitch_diff = calibrate_pitch_gravity(self.CFG, target_z_dist, target_y_dist)
target_pitch -= pitch_diff

return (target_pitch, target_yaw)

def get_rotation_angle(self, bbox_center_x, bbox_center_y):
"""Given a bounding box center, return the yaw/pitch difference in radians.
Args:
bbox_center_x (float): x coordinate of the center of the bounding box
bbox_center_y (float): y coordinate of the center of the bounding box
Returns:
(float, float): yaw_diff, pitch_diff in radians
"""
yaw_diff = (bbox_center_x - self.CFG.IMG_CENTER_X) * \
(self.CFG.AUTOAIM_CAMERA.YAW_FOV_HALF / self.CFG.IMG_CENTER_X)
pitch_diff = (bbox_center_y - self.CFG.IMG_CENTER_Y) * \
(self.CFG.AUTOAIM_CAMERA.PITCH_FOV_HALF / self.CFG.IMG_CENTER_Y)

yaw_diff = -yaw_diff # counter-clockwise is positive
pitch_diff = pitch_diff # down is positive

return yaw_diff, pitch_diff
3 changes: 3 additions & 0 deletions Aiming/DistEst/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
"""Aggregate all distance estimators into one module."""

from .pnp_solver import pnp_estimator
54 changes: 54 additions & 0 deletions Aiming/DistEst/pnp_solver.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
"""Simple distance solver using PnP."""
import numpy as np
import cv2
import Utils


class pnp_estimator:
"""Distance estimator using PnP."""

def __init__(self, cfg):
"""Initialize the PnP solver.
Args:
cfg (object): python config node object
"""
self.cfg = cfg
self.K = Utils.get_intrinsic_matrix(self.cfg)

# 3D armor board coordinates centered at armor board center
# The unit is in mm (millimeter)
self.armor_3d_pts = np.array([
[-65, -125 / 4, 0], # left top
[-65, 125 / 4, 0],
[65, -125 / 4, 0],
[65, 125 / 4, 0]
]).reshape((4, 3, 1))

def estimate_distance(self, armor, img_rgb):
"""Estimate the distance to the armor.
Args:
armor (armor): selected armor object
img_rgb (np.array): RGB image of the frame
Returns:
(y_dist, z_dist): distance along y-axis (pitch) and z-axis (distance from camera)
"""
obj_2d_pts = np.array([
armor.left_light.top.astype(int),
armor.left_light.btm.astype(int),
armor.right_light.top.astype(int),
armor.right_light.btm.astype(int),
]).reshape((4, 2, 1))

retval, rvec, tvec = cv2.solvePnP(self.armor_3d_pts.astype(float),
obj_2d_pts.astype(float),
self.K,
distCoeffs=None)

# rot_mat, _ = cv2.Rodrigues(rvec)
abs_dist = np.sqrt(np.sum(tvec**2))

# return -tvec[1], tvec[2]
return 0, abs_dist / 1000.0
Loading

0 comments on commit 865032d

Please sign in to comment.