From 30a7816c458d36dd9cb9ccf5f66112aba74305df Mon Sep 17 00:00:00 2001 From: Yihong Jian <55000997+yihjian@users.noreply.github.com> Date: Sat, 15 Jan 2022 16:00:50 -0600 Subject: [PATCH 01/74] Create pylint.yml --- .github/workflows/pylint.yml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 .github/workflows/pylint.yml diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml new file mode 100644 index 0000000..10c49c6 --- /dev/null +++ b/.github/workflows/pylint.yml @@ -0,0 +1,23 @@ +name: Pylint + +on: [push] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10"] + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pylint + - name: Analysing the code with pylint + run: | + pylint `ls -R|grep .py$|xargs` From ce005cf05d033a74b747c04da82ff1aeefc8df44 Mon Sep 17 00:00:00 2001 From: Yihong Jian <55000997+yihjian@users.noreply.github.com> Date: Sat, 15 Jan 2022 16:04:51 -0600 Subject: [PATCH 02/74] Update requirements.txt --- requirements.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/requirements.txt b/requirements.txt index e69de29..0ea69e1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -0,0 +1,2 @@ +opencv-python +pylint From 1603d7858f7d0cfb1cb439a8af63400765ce05b5 Mon Sep 17 00:00:00 2001 From: Yihong Jian <55000997+yihjian@users.noreply.github.com> Date: Sat, 15 Jan 2022 16:05:17 -0600 Subject: [PATCH 03/74] Update pylint.yml --- .github/workflows/pylint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 10c49c6..80a2c1a 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -17,7 +17,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install pylint + pip install -r requirments.txt - name: Analysing the code with pylint run: | pylint `ls -R|grep .py$|xargs` From 928c4dbb19dc7c2ed0caf72e447ed86b89e20787 Mon Sep 17 00:00:00 2001 From: Yihong Jian <55000997+yihjian@users.noreply.github.com> Date: Sat, 15 Jan 2022 16:06:24 -0600 Subject: [PATCH 04/74] Update pylint.yml --- .github/workflows/pylint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 80a2c1a..a113b15 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -17,7 +17,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install -r requirments.txt + pip install -r requirements.txt - name: Analysing the code with pylint run: | pylint `ls -R|grep .py$|xargs` From 1152c5a087608ef283d2fc69408e76ef7634fb70 Mon Sep 17 00:00:00 2001 From: Yihong Jian <55000997+yihjian@users.noreply.github.com> Date: Sat, 15 Jan 2022 16:17:24 -0600 Subject: [PATCH 05/74] Update pylint.yml --- .github/workflows/pylint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index a113b15..60877a7 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -20,4 +20,4 @@ jobs: pip install -r requirements.txt - name: Analysing the code with pylint run: | - pylint `ls -R|grep .py$|xargs` + pylint $(find . -name "*.py" | xargs) From 304ab01a0691a59334df793762098bff2aa06d8b Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 26 Feb 2023 15:37:08 -0600 Subject: [PATCH 06/74] finished draft documentation for comm protocol implementation --- docs/comm_protocol.md | 74 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) create mode 100644 docs/comm_protocol.md diff --git a/docs/comm_protocol.md b/docs/comm_protocol.md new file mode 100644 index 0000000..65afd9e --- /dev/null +++ b/docs/comm_protocol.md @@ -0,0 +1,74 @@ +# iRM Communication Protocol + +This documents the communication protocol design for exchanging information between +onboard Nvidia Jetson computers and the STM32 control board. + +For implementation on the STM32 side, please refer to the example [here](https://github.com/illini-robomaster/iRM_Embedded_2023/tree/main/examples/minipc). + +For implementation on the Jetson side, please refer to the main communicator [here](../Communication/communicator.py). + +# Protocol from Jetson to STM32 + +## Packet Struct + +| Name | Content | Size | +| ------------- | ------------------------- | ------- | +| HEADER | 2 ASCII char | 2 bytes | +| SEQ_NUM | uint32_t counter | 4 bytes | +| REL_YAW | int32_t discretized float | 4 bytes | +| REL_PITCH | int32_t discretized float | 4 bytes | +| CRC_CHECKSUM | uint8_t checksum | 1 bytes | +| PACK_END | 2 ASCII char | 2 bytes | +| TOTAL | | 17 bytes| + +## Detailed explanations + +Note: the memory layout of STM32 is **little-endian**. Hence when integers that require +decoding are sent, they need to be encoded in little endianness. However, for strings +(e.g., the HEADER ASCII and PACK_END ASCII), they do not need to be reversed since array +layouts are not affected by endianness. + +### HEADER +2 ASCII chars. Current options are 'ST' and 'MY', which stands for "Search Target" +and "Move Yoke". +- **ST**: The "Search Target" mode means either 1) the AutoAim algorithm is not engaged + or 2) the AutoAim algorithm is engaged but the target is not found. In the former case, the + STM32 board should simply ignore input from the Jetson board. In the latter case, the STM32 + should hold pitch to the horizontal position, and rotate yaw to search for the target. +- **MY**: The "Move Yoke" mode means the AutoAim algorithm is engaged and the target is found. + In this case, the relative pitch/yaw angles are sent to the STM32 board, and the gimbal + should move relatively to the current position. (**NOTE**: filtering to compensate for + latency and smoothness needs to be implemented on the STM32 side due to real-time access + to gimbal data.) + +### SEQ_NUM + +uint32_t counter. This is a counter that is incremented by 1 every time a packet is sent. + +### REL_YAW + +int32_t discretized float. This is the relative yaw angle to the current yaw angle. It is +discretized by multiplying the float by 1e+6 and then casting to int32_t. For example, if the +relative yaw angle is 0.123456, then the int32_t value should be 123456. + +### REL_PITCH + +int32_t discretized float. This is the relative pitch angle to the current pitch angle. +It is discretized by multiplying the float by 1e+6 and then casting to int32_t. For example, if +the relative pitch angle is 1, then the int32_t value should be 1000000. + +### CRC_CHECKSUM + +uint8_t checksum. This is the CRC checksum of the packet. The CRC standard used +is the MAXIM_DOW standard. The CRC checksum is calculated on the packet contents BEFORE the +PACK_END (i.e., CRC is computed for the first 14 bytes up to end to REL_PITCH). + +### PACK_END + +2 ASCII chars. This is the end of the packet denoted by ASCII characters 'ED'. + +# Protocol from STM32 to Jetson + +TODO: to be implemented. The first implementation should tell Jetson what is the color of +the armor of our own robots. However it should be implemented in a generic manner so that +subsequent implementations can easily add other useful information. From 3216ab64f0c9d2b68e5710b8dfd0c62a0afb3747 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 4 Mar 2023 17:29:37 -0600 Subject: [PATCH 07/74] implemented and integrated CRC-verified communicator; grossly tested --- Communication/communicator.py | 233 +++++++++++++++++++++------------- config.py | 1 + vision.py | 67 ++-------- 3 files changed, 158 insertions(+), 143 deletions(-) diff --git a/Communication/communicator.py b/Communication/communicator.py index e816684..1d028fb 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -1,88 +1,147 @@ -import time +import os import serial -import crc8 -import crc16 - -import config - -# USB is for TTL-only device -USB_PREFIX = "/dev/ttyUSB" -# ACM is for st-link/TTL device -ACM_PREFIX = "/dev/ttyACM" - -success_flag = False - -for prefix in [USB_PREFIX, ACM_PREFIX]: - if success_flag: break - for i in range(5): - if success_flag: break - try: - serial_port = serial.Serial( - port=prefix + str(i), - baudrate=115200, - bytesize=serial.EIGHTBITS, - parity=serial.PARITY_NONE, - stopbits=serial.STOPBITS_ONE, - ) - success_flag = True - break # if succeed, break - except serial.serialutil.SerialException: - serial_port = None - -# Wait a second to let the port initialize -time.sleep(1) - -def create_packet(header, seq_num, yaw_offset, pitch_offset): - assert header in [config.SEARCH_TARGET, config.MOVE_YOKE] - packet = header - assert seq_num >= 0 and seq_num < 2**32 - 1 # uint32 - packet += seq_num.to_bytes(4, 'big') - # YAW/PITCH offset should not be too high - assert yaw_offset >= -config.AUTOAIM_CAMERA.YAW_FOV_HALF and yaw_offset <= config.AUTOAIM_CAMERA.YAW_FOV_HALF - assert pitch_offset >= -config.AUTOAIM_CAMERA.PITCH_FOV_HALF and pitch_offset <= config.AUTOAIM_CAMERA.PITCH_FOV_HALF - discrete_yaw_offset = int(yaw_offset * 100000) - discrete_pitch_offset = int(pitch_offset * 100000) - packet += (discrete_yaw_offset & 0xFFFFFFFF).to_bytes(4, 'big') - packet += (discrete_pitch_offset & 0xFFFFFFFF).to_bytes(4, 'big') - # ENDING - packet += config.PACK_END - return packet - -def create_packet_w_crc(cmd_id, data, seq): - ''' - Args: - cmd_id: bytes, ID for command to send, see "variables" for different cmd - data: bytes, data to send - seq: int, n-th packet to send - Return: - Bytes of encoded package with format: - SOF(1 byte) data_len(2 bytes) seq (1 bytes) crc8 (1 bytes) data (x bytes) crc16 (2 bytes) - ''' - # header - SOF = b'\xa5' - data_len = len(data).to_bytes(2,'big') - seq = seq.to_bytes(1,'big') - hash = crc8.crc8() #crc8 - hash.update(SOF+data_len+seq) - crc_header = hash.digest() - - #tail - crc_data = crc16.crc16xmodem(data).to_bytes(2,'big') - - pkt = SOF+data_len+seq+crc_header+cmd_id+data+crc_data - return pkt - -# Testing code -if __name__ =='__main__': - assert serial_port is not None, "No serial device found; check root priviledge and USB devices" - try: - cmd_id = b'\xde\xad' - data = 0xffff*b'\xAA' - pkt = create_packet(cmd_id,data,0) - serial_port.write(pkt) - while True: - if serial_port.inWaiting() > 0: - data = serial_port.read() - print(data) - except: - print("Falied to write") +import crc +import threading + +class UARTCommunicator(object): + def __init__(self, cfg, crc_standard=crc.Crc8.MAXIM_DOW, endianness='little', buffer_size=10): + self.cfg = cfg + self.crc_standard = crc_standard + self.endianness = endianness + + self.crc_calculator = crc.Calculator(self.crc_standard, optimized=True) + + self.serial_port = self.guess_uart_device_() + + self.circular_buffer = [] + self.buffer_size = buffer_size + + self.enemy_color = self.cfg.DEFAULT_ENEMY_TEAM + self.stm32_color = 'red' if self.enemy_color == 'blue' else 'blue' + + self.seq_num = 0 + + def is_valid(self): + return self.serial_port is not None + + def try_read_one(self): + # Read from serial port, if any packet is waiting + if self.serial_port is not None: + if (self.serial_port.inWaiting() > 0): + # read the bytes and convert from binary array to ASCII + byte_array = self.serial_port.read(self.serial_port.inWaiting()) + + for c in byte_array: + if len(self.circular_buffer) >= self.buffer_size: + self.circular_buffer = self.circular_buffer[1:] # pop first element + self.circular_buffer.append(c) + + def process_one_packet(self, header, yaw_offset, pitch_offset): + packet = self.create_packet(header, yaw_offset, pitch_offset) + self.send_packet(packet) + + def send_packet(self, packet): + if self.serial_port is not None: + self.serial_port.write(packet) + + @staticmethod + def guess_uart_device_(): + # This function is for UNIX-like systems only! + + # OSX prefix: "tty.usbmodem" + # Jetson / Linux prefix: "ttyUSB", "ttyACM" + UART_PREFIX_LIST = ("tty.usbmodem", "ttyUSB", "ttyACM") + + dev_list = os.listdir("/dev") + + serial_port = None # ret val + + for dev_name in dev_list: + if dev_name.startswith(UART_PREFIX_LIST): + try: + dev_path = os.path.join("/dev", dev_name) + serial_port = serial.Serial( + port=dev_path, + baudrate=115200, + bytesize=serial.EIGHTBITS, + parity=serial.PARITY_NONE, + stopbits=serial.STOPBITS_ONE, + ) + except serial.serialutil.SerialException: + serial_port = None + + if serial_port is not None: + return serial_port + + print("NO SERIAL DEVICE FOUND! WRITING TO VACCUM!") + + return serial_port + + def create_packet(self, header, yaw_offset, pitch_offset): + """ + Packet struct + + Big endian + + HEADER (2 bytes chars) + SEQNUM (4 bytes uint32; wrap around) + REL_YAW (4 bytes int32; radians * 1000000/1e+6) + REL_PITCH (4 bytes int32; radians * 1000000/1e+6) + CRC8 (1 byte uint8; CRC checksum MAXIM_DOW of contents BEFORE CRC) + (i.e., CRC does not include itself and PACK_END!) + PACK_END (2 bytes chars) + + Total (17 bytes) + """ + assert header in [self.cfg.SEARCH_TARGET, self.cfg.MOVE_YOKE] + packet = header + assert isinstance(self.seq_num, int) and self.seq_num >= 0 + if self.seq_num >= 2 ** 32: + self.seq_num = self.seq_num % (2 ** 32) + packet += self.seq_num.to_bytes(4, self.endianness) + + discrete_yaw_offset = int(yaw_offset * 1e+6) + discrete_pitch_offset = int(pitch_offset * 1e+6) + + # TODO: add more sanity check here? + packet += (discrete_yaw_offset & 0xFFFFFFFF).to_bytes(4, self.endianness) + packet += (discrete_pitch_offset & 0xFFFFFFFF).to_bytes(4, self.endianness) + + # Compute CRC + crc8_checksum = self.crc_calculator.checksum(packet) + assert crc8_checksum >= 0 and crc8_checksum < 256 + + packet += crc8_checksum.to_bytes(1, self.endianness) + + # ENDING + packet += self.cfg.PACK_END + + return packet + + def get_current_stm32_state(self): + # Decode packet sent from the STM32 controller + # TODO: if a robot is revived, the serial port might get + # garbage value in between... + + # TODO: implement a proper CRC-verified packet decoder + blue_cnt = 0 + red_cnt = 0 + + for l in self.circular_buffer: + if l == ord('R'): red_cnt += 1 + if l == ord('B'): blue_cnt += 1 + + if blue_cnt > red_cnt: + self.stm32_color = 'blue' + self.enemy_color = 'red' + + if red_cnt > blue_cnt: + self.stm32_color = 'red' + self.enemy_color = 'blue' + + ret_dict = { + 'my_color': self.stm32_color, + 'enemy_color': self.enemy_color, + } + + return ret_dict diff --git a/config.py b/config.py index ad4a3c4..d8c611e 100644 --- a/config.py +++ b/config.py @@ -22,3 +22,4 @@ IMG_CENTER_Y = IMG_HEIGHT // 2 DEBUG_DISPLAY = False +DEFAULT_ENEMY_TEAM = 'red' diff --git a/vision.py b/vision.py index 354c61e..ebfedf6 100644 --- a/vision.py +++ b/vision.py @@ -1,73 +1,33 @@ import time import cv2 from Aiming.Aim import Aim -from Communication.communicator import serial_port, create_packet # from Detection.YOLO import Yolo from Detection.CV_mix_DL import cv_mix_dl_detector +from Communication.communicator import UARTCommunicator import config -DEFAULT_ENEMY_TEAM = 'red' - -class serial_circular_buffer: - def __init__(self, buffer_size=10): - self.buffer = [] - self.buffer_size = buffer_size - self.default_color = DEFAULT_ENEMY_TEAM - - def receive(self, byte_array): - for c in byte_array: - if len(self.buffer) >= self.buffer_size: - self.buffer = self.buffer[1:] # pop first element - self.buffer.append(c) - - def get_enemy_color(self): - # TODO: if a robot is revived, the serial port might get - # garbage value in between... - blue_cnt = 0 - red_cnt = 0 - - for l in self.buffer: - if l == ord('R'): red_cnt += 1 - if l == ord('B'): blue_cnt += 1 - - if blue_cnt > red_cnt: - self.default_color = 'blue' - return 'blue' - - if red_cnt > blue_cnt: - self.default_color = 'red' - return 'red' - - return self.default_color - def main(): - model = cv_mix_dl_detector(config, DEFAULT_ENEMY_TEAM) + model = cv_mix_dl_detector(config, config.DEFAULT_ENEMY_TEAM) # model = Yolo(config.MODEL_CFG_PATH, config.WEIGHT_PATH, config.META_PATH) aimer = Aim(config) - communicator = serial_port - pkt_seq = 0 - autoaim_camera = config.AUTOAIM_CAMERA(config.IMG_WIDTH, config.IMG_HEIGHT) + communicator = UARTCommunicator(config) - # color buffer which retrieves enemy color from STM32 - my_color_buffer = serial_circular_buffer() + autoaim_camera = config.AUTOAIM_CAMERA(config.IMG_WIDTH, config.IMG_HEIGHT) - if communicator is None: + if communicator.is_valid(): + print("OPENED SERIAL DEVICE AT: " + communicator.serial_port.name) + else: print("SERIAL DEVICE IS NOT AVAILABLE!!!") while True: start = time.time() frame = autoaim_camera.get_frame() - - if communicator is not None: - if (communicator.inWaiting() > 0): - # read the bytes and convert from binary array to ASCII - byte_array = communicator.read(communicator.inWaiting()) - my_color_buffer.receive(byte_array) # TODO: add a global reset function if enemy functions change # (e.g., clear the buffer in the armor tracker) - enemy_team = my_color_buffer.get_enemy_color() + stm32_state_dict = communicator.get_current_stm32_state() + enemy_team = stm32_state_dict['enemy_color'] model.change_color(enemy_team) pred = model.detect(frame) @@ -119,14 +79,14 @@ def main(): show_frame = frame.copy() if ret_dict: - packet = create_packet(config.MOVE_YOKE, pkt_seq, ret_dict['yaw_diff'], ret_dict['pitch_diff']) + communicator.process_one_packet(config.MOVE_YOKE, ret_dict['yaw_diff'], ret_dict['pitch_diff']) show_frame = cv2.circle(show_frame, (int(ret_dict['center_x']), int(ret_dict['center_y'])), 10, (0, 255, 0), 10) else: show_frame = cv2.putText(show_frame, 'NOT FOUND', (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2) - packet = create_packet(config.SEARCH_TARGET, pkt_seq, 0, 0) + communicator.process_one_packet(config.SEARCH_TARGET, 0, 0) if config.DEBUG_DISPLAY: print('----------------\n',pred) @@ -135,10 +95,5 @@ def main(): if cv2.waitKey(1) & 0xFF == ord('q'): exit(0) - if communicator is not None: - communicator.write(packet) - - pkt_seq += 1 - if __name__ == "__main__": main() From 9791496d67807f5b92e7a79ebe6d154b53d53207 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 5 Mar 2023 02:00:27 -0600 Subject: [PATCH 08/74] added code for stress testing STM32 --- Communication/communicator.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/Communication/communicator.py b/Communication/communicator.py index 1d028fb..57796a7 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -1,6 +1,7 @@ import os import serial import crc +import time import threading class UARTCommunicator(object): @@ -98,7 +99,7 @@ def create_packet(self, header, yaw_offset, pitch_offset): assert isinstance(self.seq_num, int) and self.seq_num >= 0 if self.seq_num >= 2 ** 32: self.seq_num = self.seq_num % (2 ** 32) - packet += self.seq_num.to_bytes(4, self.endianness) + packet += (self.seq_num & 0xFFFFFFFF).to_bytes(4, self.endianness) discrete_yaw_offset = int(yaw_offset * 1e+6) discrete_pitch_offset = int(pitch_offset * 1e+6) @@ -116,6 +117,8 @@ def create_packet(self, header, yaw_offset, pitch_offset): # ENDING packet += self.cfg.PACK_END + self.seq_num += 1 + return packet def get_current_stm32_state(self): @@ -145,3 +148,14 @@ def get_current_stm32_state(self): } return ret_dict + +if __name__ == '__main__': + import sys + import os + # setting path + sys.path.append(os.path.join(os.path.dirname(__file__), '..')) + import config + uart = UARTCommunicator(config) + for i in range(1000): + time.sleep(0.005) # simulate 200Hz + uart.process_one_packet(config.SEARCH_TARGET, 0.0, 0.0) From 6b1e640b4c3b408b7e4b5c34c1ee35643ab8d208 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Mon, 6 Mar 2023 11:34:01 -0600 Subject: [PATCH 09/74] remove COMM TODO from README --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index c3b76c9..578b90b 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,6 @@ Please follow instruction from the RMCV101 repo [here](https://github.com/illini - Implement a more robust tracker (e.g., EKF / Kalman) - Implement depth estimators (either use D455 or estimate from monocular cameras) - Optimize code efficiency on Jetson -- Implement communication protocol and test with embedded team - Implememt MDVS camera driver in Python ## File Structure From 72f84ebf2e93fd35595b41cdf4a25c26729f1ab4 Mon Sep 17 00:00:00 2001 From: Roger Q Date: Sat, 11 Mar 2023 03:42:24 -0600 Subject: [PATCH 10/74] Update pylint.yml Follow github official pylint example --- .github/workflows/pylint.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 60877a7..383e65c 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -9,15 +9,15 @@ jobs: matrix: python-version: ["3.8", "3.9", "3.10"] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v3 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip - pip install -r requirements.txt + pip install pylint - name: Analysing the code with pylint run: | - pylint $(find . -name "*.py" | xargs) + pylint $(git ls-files '*.py') From 2dae433f7c8791ac96284a1dc2be80237fe8ed80 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 11 Mar 2023 03:49:31 -0600 Subject: [PATCH 11/74] run autopep8 for all files --- Aiming/Aim.py | 22 +-- Aiming/Tracking/basic_tracker.py | 38 +++--- Aiming/Tracking/consistent_id_gen.py | 1 + Camera/read_from_file.py | 12 +- Camera/shl_200ws.py | 3 +- Communication/communicator.py | 60 ++++---- Detection/CV_mix_DL.py | 197 ++++++++++++++++----------- Detection/YOLO.py | 22 ++- Utils/__init__.py | 2 + config.py | 6 +- vision.py | 26 ++-- 11 files changed, 231 insertions(+), 158 deletions(-) diff --git a/Aiming/Aim.py b/Aiming/Aim.py index ae981cc..0650d53 100644 --- a/Aiming/Aim.py +++ b/Aiming/Aim.py @@ -4,6 +4,7 @@ from .Tracking import basic_tracker + class Aim: def __init__(self, config): self.CFG = config @@ -14,10 +15,12 @@ def process_one(self, pred_list, enemy_team, rgb_img): # TODO: use assertion to check enemy_team - final_bbox_list, final_id_list = self.tracker.process_one(pred_list, enemy_team, rgb_img) + final_bbox_list, final_id_list = self.tracker.process_one( + pred_list, enemy_team, rgb_img) # TODO: integrate this into tracking for consistent tracking - closet_pred, closet_dist = self.get_closet_pred(final_bbox_list, rgb_img) + closet_pred, closet_dist = self.get_closet_pred( + final_bbox_list, rgb_img) if closet_pred is None: return None @@ -26,7 +29,8 @@ def process_one(self, pred_list, enemy_team, rgb_img): # Get yaw/pitch differences in radians yaw_diff, pitch_diff = self.get_rotation_angle(center_x, center_y) - calibrated_yaw_diff, calibrated_pitch_diff = self.posterior_calibration(yaw_diff, pitch_diff, closet_dist) + calibrated_yaw_diff, calibrated_pitch_diff = self.posterior_calibration( + yaw_diff, pitch_diff, closet_dist) return { 'yaw_diff': calibrated_yaw_diff, @@ -36,7 +40,7 @@ def process_one(self, pred_list, enemy_team, rgb_img): 'final_bbox_list': final_bbox_list, 'final_id_list': final_id_list, } - + def posterior_calibration(self, yaw_diff, pitch_diff, distance): """Given a set of naively estimated parameters, return calibrated parameters from a range table? @@ -56,7 +60,7 @@ def posterior_calibration(self, yaw_diff, pitch_diff, distance): # In this case, distance comes from D455 stereo estimation # TODO: compute a range table return (yaw_diff, pitch_diff) - + def get_closet_pred(self, bbox_list, rgb_img): '''Get the closet prediction to camera focal point''' # TODO: instead of camera focal point; calibrate closet pred to operator view @@ -64,7 +68,7 @@ def get_closet_pred(self, bbox_list, rgb_img): focal_y = H / 2 focal_x = W / 2 closet_pred = None - closet_dist = None # Cloest to camera in z-axis + closet_dist = None # Cloest to camera in z-axis closet_dist = 99999999 for bbox in bbox_list: center_x, center_y, width, height = bbox @@ -80,7 +84,9 @@ def get_closet_pred(self, bbox_list, rgb_img): @staticmethod def get_rotation_angle(bbox_center_x, bbox_center_y): - yaw_diff = (bbox_center_x - config.IMG_CENTER_X) * (config.AUTOAIM_CAMERA.YAW_FOV_HALF / config.IMG_CENTER_X) - pitch_diff = (bbox_center_y - config.IMG_CENTER_Y) * (config.AUTOAIM_CAMERA.PITCH_FOV_HALF / config.IMG_CENTER_Y) + yaw_diff = (bbox_center_x - config.IMG_CENTER_X) * \ + (config.AUTOAIM_CAMERA.YAW_FOV_HALF / config.IMG_CENTER_X) + pitch_diff = (bbox_center_y - config.IMG_CENTER_Y) * \ + (config.AUTOAIM_CAMERA.PITCH_FOV_HALF / config.IMG_CENTER_Y) return yaw_diff, pitch_diff diff --git a/Aiming/Tracking/basic_tracker.py b/Aiming/Tracking/basic_tracker.py index 967b1cb..788810f 100644 --- a/Aiming/Tracking/basic_tracker.py +++ b/Aiming/Tracking/basic_tracker.py @@ -9,20 +9,21 @@ # TODO: move this to config FRAME_BUFFER_SIZE = 10 + class tracked_armor(object): def __init__(self, bbox, roi, frame_tick, armor_id): self.bbox_buffer = [bbox] self.roi_buffer = [roi] self.observed_frame_tick = [frame_tick] - self.armor_id = armor_id # unique ID - + self.armor_id = armor_id # unique ID + def compute_cost(self, other_armor): assert isinstance(other_armor, tracked_armor) # TODO: use more sophisticated metrics (e.g., RGB) as cost function c_x, c_y, w, h = self.bbox_buffer[-1] o_c_x, o_c_y, o_w, o_h = other_armor.bbox_buffer[-1] return np.square(c_x - o_c_x) + np.square(c_y - o_c_y) - + def update(self, other_armor, frame_tick): # Only call if these two armors are matched self.bbox_buffer.append(other_armor.bbox_buffer[-1]) @@ -32,7 +33,7 @@ def update(self, other_armor, frame_tick): # Maintain each armor's buffer so that anything older than FRAME_BUFFER_SIZE is dropped self.bbox_buffer = self.bbox_buffer[-FRAME_BUFFER_SIZE:] self.roi_buffer = self.roi_buffer[-FRAME_BUFFER_SIZE:] - + def predict_bbox(self, cur_frame_tick): if cur_frame_tick == self.observed_frame_tick[-1] or len(self.bbox_buffer) == 1: return self.bbox_buffer[-1] @@ -40,26 +41,28 @@ def predict_bbox(self, cur_frame_tick): # Linear extrapolation c_x, c_y, w, h = self.bbox_buffer[-1] o_c_x, o_c_y, o_w, o_h = self.bbox_buffer[-2] - delta_tick = self.observed_frame_tick[-1] - self.observed_frame_tick[-2] + delta_tick = self.observed_frame_tick[-1] - \ + self.observed_frame_tick[-2] new_delta_tick = cur_frame_tick - self.observed_frame_tick[-1] delta_x = (c_x - o_c_x) * new_delta_tick / delta_tick delta_y = (c_y - o_c_y) * new_delta_tick / delta_tick return (int(c_x + delta_x), int(c_y + delta_y), w, h) + class basic_tracker(object): ''' Basic tracker that can handle only one target. It memorizes the state of last two predictions and do linear extrapolation ''' - SE_THRESHOLD = 3200 # (40, 40) pixels away + SE_THRESHOLD = 3200 # (40, 40) pixels away def __init__(self, config): self.CFG = config self.active_armors = [] self.id_gen = ConsistentIdGenerator() - self.frame_tick = 0 # TODO: use timestamp may be a better idea - + self.frame_tick = 0 # TODO: use timestamp may be a better idea + def process_one(self, pred_list, enemy_team, rgb_img): new_armors = [] for name, conf, bbox in pred_list: @@ -68,24 +71,27 @@ def process_one(self, pred_list, enemy_team, rgb_img): upper_x = int(c_x + w / 2) lower_y = int(c_y - h / 2) upper_y = int(c_y + h / 2) - roi = rgb_img[lower_y:upper_y,lower_x:upper_x] + roi = rgb_img[lower_y:upper_y, lower_x:upper_x] new_armors.append(tracked_armor(bbox, roi, self.frame_tick, -1)) if len(self.active_armors) > 0: # Try to associate with current armors - cost_matrix = np.zeros((len(new_armors), len(self.active_armors)), dtype=float) + cost_matrix = np.zeros( + (len(new_armors), len(self.active_armors)), dtype=float) for i in range(len(new_armors)): for j in range(len(self.active_armors)): cost_ij = new_armors[i].compute_cost(self.active_armors[j]) cost_matrix[i, j] = cost_ij - row_ind, col_ind = scipy.optimize.linear_sum_assignment(cost_matrix) + row_ind, col_ind = scipy.optimize.linear_sum_assignment( + cost_matrix) for i, j in zip(row_ind, col_ind): if cost_matrix[i, j] < self.SE_THRESHOLD: assert new_armors[i] is not None - self.active_armors[j].update(new_armors[i], self.frame_tick) + self.active_armors[j].update( + new_armors[i], self.frame_tick) new_armors[i] = None - + new_armors = [i for i in new_armors if i is not None] for n in new_armors: @@ -93,12 +99,12 @@ def process_one(self, pred_list, enemy_team, rgb_img): # Maintain current buffer. If an armor has not been observed by 10 frames, it is dropped self.active_armors = [i for i in self.active_armors - if self.frame_tick - i.observed_frame_tick[-1] < FRAME_BUFFER_SIZE] - + if self.frame_tick - i.observed_frame_tick[-1] < FRAME_BUFFER_SIZE] + # Unassociated armors are registered as new armors for a in new_armors: self.active_armors.append(a) - + # Create a list of bbox and unique IDs to return ret_bbox_list = [] ret_id_list = [] diff --git a/Aiming/Tracking/consistent_id_gen.py b/Aiming/Tracking/consistent_id_gen.py index 3d8db48..c367b90 100644 --- a/Aiming/Tracking/consistent_id_gen.py +++ b/Aiming/Tracking/consistent_id_gen.py @@ -1,5 +1,6 @@ import threading + class ConsistentIdGenerator(object): def __init__(self, lock=False): self.lock_flag = lock diff --git a/Camera/read_from_file.py b/Camera/read_from_file.py index 7ccb0fc..5a30fb3 100644 --- a/Camera/read_from_file.py +++ b/Camera/read_from_file.py @@ -6,6 +6,8 @@ import Utils # TODO: make an abstract camera base class + + class fake_camera(object): # Needs to be calibrated per camera YAW_FOV_HALF = Utils.deg_to_rad(42) / 2 @@ -15,14 +17,14 @@ def __init__(self, width, height): self.width = width self.height = height - assert len(sys.argv) == 2 # main py file; video file + assert len(sys.argv) == 2 # main py file; video file video_path = sys.argv[1] assert video_path.endswith('.mp4') self.cap = cv2.VideoCapture(video_path) assert self.cap.isOpened() - self.alive = True # init to True + self.alive = True # init to True # Timing and frame counter are always in place for devlopment purpose self.timing = None @@ -34,18 +36,18 @@ def get_frame(self): if not self.cap.isOpened(): self.alive = False - + ret, frame = self.cap.read() if not ret: self.alive = False - + if not self.alive: print("Total frames: {}".format(self.frame_cnt)) print("Total time: {}".format(time.time() - self.timing)) print("FPS: {}".format(self.frame_cnt / (time.time() - self.timing))) raise Exception("Video file exhausted") - + frame = cv2.resize(frame, (self.width, self.height)) self.frame_cnt += 1 diff --git a/Camera/shl_200ws.py b/Camera/shl_200ws.py index a1ba2df..11925e3 100644 --- a/Camera/shl_200ws.py +++ b/Camera/shl_200ws.py @@ -2,7 +2,8 @@ import cv2 import time -cap = cv2.VideoCapture('v4l2src device=/dev/video0 ! image/jpeg,framerate=61612/513,width=640,height=480 ! jpegdec ! videoconvert ! video/x-raw,format=BGR ! appsink max-buffers=1 drop=true', cv2.CAP_GSTREAMER) +cap = cv2.VideoCapture( + 'v4l2src device=/dev/video0 ! image/jpeg,framerate=61612/513,width=640,height=480 ! jpegdec ! videoconvert ! video/x-raw,format=BGR ! appsink max-buffers=1 drop=true', cv2.CAP_GSTREAMER) if not cap.isOpened(): print("Cannot open camera") diff --git a/Communication/communicator.py b/Communication/communicator.py index 57796a7..c036277 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -4,14 +4,15 @@ import time import threading + class UARTCommunicator(object): def __init__(self, cfg, crc_standard=crc.Crc8.MAXIM_DOW, endianness='little', buffer_size=10): self.cfg = cfg self.crc_standard = crc_standard self.endianness = endianness - + self.crc_calculator = crc.Calculator(self.crc_standard, optimized=True) - + self.serial_port = self.guess_uart_device_() self.circular_buffer = [] @@ -19,43 +20,45 @@ def __init__(self, cfg, crc_standard=crc.Crc8.MAXIM_DOW, endianness='little', bu self.enemy_color = self.cfg.DEFAULT_ENEMY_TEAM self.stm32_color = 'red' if self.enemy_color == 'blue' else 'blue' - + self.seq_num = 0 - + def is_valid(self): return self.serial_port is not None - + def try_read_one(self): # Read from serial port, if any packet is waiting if self.serial_port is not None: if (self.serial_port.inWaiting() > 0): # read the bytes and convert from binary array to ASCII - byte_array = self.serial_port.read(self.serial_port.inWaiting()) + byte_array = self.serial_port.read( + self.serial_port.inWaiting()) for c in byte_array: if len(self.circular_buffer) >= self.buffer_size: - self.circular_buffer = self.circular_buffer[1:] # pop first element + # pop first element + self.circular_buffer = self.circular_buffer[1:] self.circular_buffer.append(c) - + def process_one_packet(self, header, yaw_offset, pitch_offset): packet = self.create_packet(header, yaw_offset, pitch_offset) self.send_packet(packet) - + def send_packet(self, packet): if self.serial_port is not None: self.serial_port.write(packet) - + @staticmethod def guess_uart_device_(): # This function is for UNIX-like systems only! - + # OSX prefix: "tty.usbmodem" # Jetson / Linux prefix: "ttyUSB", "ttyACM" UART_PREFIX_LIST = ("tty.usbmodem", "ttyUSB", "ttyACM") dev_list = os.listdir("/dev") - - serial_port = None # ret val + + serial_port = None # ret val for dev_name in dev_list: if dev_name.startswith(UART_PREFIX_LIST): @@ -70,14 +73,14 @@ def guess_uart_device_(): ) except serial.serialutil.SerialException: serial_port = None - + if serial_port is not None: return serial_port - + print("NO SERIAL DEVICE FOUND! WRITING TO VACCUM!") - + return serial_port - + def create_packet(self, header, yaw_offset, pitch_offset): """ Packet struct @@ -91,7 +94,7 @@ def create_packet(self, header, yaw_offset, pitch_offset): CRC8 (1 byte uint8; CRC checksum MAXIM_DOW of contents BEFORE CRC) (i.e., CRC does not include itself and PACK_END!) PACK_END (2 bytes chars) - + Total (17 bytes) """ assert header in [self.cfg.SEARCH_TARGET, self.cfg.MOVE_YOKE] @@ -105,8 +108,10 @@ def create_packet(self, header, yaw_offset, pitch_offset): discrete_pitch_offset = int(pitch_offset * 1e+6) # TODO: add more sanity check here? - packet += (discrete_yaw_offset & 0xFFFFFFFF).to_bytes(4, self.endianness) - packet += (discrete_pitch_offset & 0xFFFFFFFF).to_bytes(4, self.endianness) + packet += (discrete_yaw_offset & + 0xFFFFFFFF).to_bytes(4, self.endianness) + packet += (discrete_pitch_offset & + 0xFFFFFFFF).to_bytes(4, self.endianness) # Compute CRC crc8_checksum = self.crc_calculator.checksum(packet) @@ -131,17 +136,19 @@ def get_current_stm32_state(self): red_cnt = 0 for l in self.circular_buffer: - if l == ord('R'): red_cnt += 1 - if l == ord('B'): blue_cnt += 1 - + if l == ord('R'): + red_cnt += 1 + if l == ord('B'): + blue_cnt += 1 + if blue_cnt > red_cnt: self.stm32_color = 'blue' self.enemy_color = 'red' - + if red_cnt > blue_cnt: self.stm32_color = 'red' self.enemy_color = 'blue' - + ret_dict = { 'my_color': self.stm32_color, 'enemy_color': self.enemy_color, @@ -149,6 +156,7 @@ def get_current_stm32_state(self): return ret_dict + if __name__ == '__main__': import sys import os @@ -157,5 +165,5 @@ def get_current_stm32_state(self): import config uart = UARTCommunicator(config) for i in range(1000): - time.sleep(0.005) # simulate 200Hz + time.sleep(0.005) # simulate 200Hz uart.process_one_packet(config.SEARCH_TARGET, 0.0, 0.0) diff --git a/Detection/CV_mix_DL.py b/Detection/CV_mix_DL.py index 7f3b802..8a1e580 100644 --- a/Detection/CV_mix_DL.py +++ b/Detection/CV_mix_DL.py @@ -7,14 +7,15 @@ RED = 0 BLUE = 1 + def auto_align_brightness(img, target_v=50): # Only decrease! hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) h, s, v = cv2.split(hsv) - + cur_v = np.mean(v) v_diff = int(cur_v - target_v) - + if v_diff > 0: value = v_diff # needs lower brightness @@ -33,15 +34,17 @@ def auto_align_brightness(img, target_v=50): img = cv2.cvtColor(final_hsv, cv2.COLOR_HSV2BGR) return img + def color_test(rgb_img, rect, color): rgb_roi = rgb_img[rect[1]:rect[1]+rect[3], rect[0]:rect[0]+rect[2]] - sum_r = np.sum(rgb_roi[:,:,0]) - sum_b = np.sum(rgb_roi[:,:,2]) + sum_r = np.sum(rgb_roi[:, :, 0]) + sum_b = np.sum(rgb_roi[:, :, 2]) if color == RED: return sum_r >= sum_b else: return sum_b >= sum_r + class cv_mix_dl_detector: def __init__(self, config, detect_color, model_path='fc.onnx'): # TODO: fix config parameter in ipython notebook @@ -49,10 +52,10 @@ def __init__(self, config, detect_color, model_path='fc.onnx'): self.armor_proposer = cv_armor_proposer(self.CFG, detect_color) self.digit_classifier = dl_digit_classifier(self.CFG, model_path) self.change_color(detect_color) - + def detect(self, rgb_img): return self(rgb_img) - + def __call__(self, rgb_img): rgb_img = auto_align_brightness(rgb_img) # defaults to BGR format @@ -67,12 +70,13 @@ def __call__(self, rgb_img): # Pad into formats ret_list = [] - # name, conf, bbox + # name, conf, bbox # center_x, center_y, width, height = bbox for armor in armor_list: center_x, center_y = armor.center height = (armor.left_light.length + armor.right_light.length) / 2 - width = cv2.norm(armor.left_light.center - armor.right_light.center) + width = cv2.norm(armor.left_light.center - + armor.right_light.center) bbox = (center_x, center_y, width, height) name = f'armor_{self.detect_color_str}' conf = armor.confidence @@ -99,80 +103,89 @@ def change_color(self, new_color): else: raise NotImplementedError -def rect_contains(rect,pt): + +def rect_contains(rect, pt): return rect[0] < pt[0] < rect[0]+rect[2] and rect[1] < pt[1] < rect[1]+rect[3] + class light_class: def __init__(self, rotated_rect): ((self.center_x, self.center_y), (_, _), _) = rotated_rect # sort according to y - pts = sorted(cv2.boxPoints(rotated_rect), key = lambda x : x[1]) + pts = sorted(cv2.boxPoints(rotated_rect), key=lambda x: x[1]) self.top = (pts[0] + pts[1]) / 2 self.btm = (pts[2] + pts[3]) / 2 self.length = cv2.norm(self.top - self.btm) self.width = cv2.norm(pts[0] - pts[1]) - self.tilt_angle = np.arctan2(np.abs(self.top[0] - self.btm[0]), np.abs(self.top[1] - self.btm[1])) + self.tilt_angle = np.arctan2( + np.abs(self.top[0] - self.btm[0]), np.abs(self.top[1] - self.btm[1])) self.tilt_angle = self.tilt_angle / np.pi * 180 - + self.center = np.array([self.center_x, self.center_y]) - + self.color = None + class armor_class: def __init__(self, light1, light2, color): assert light1.color == light2.color assert light1.color == color self.color = color self.confidence = 0.0 - + if light1.center_x < light2.center_x: self.left_light = light1 self.right_light = light2 else: self.left_light = light2 self.right_light = light1 - + self.center = (self.left_light.center + self.right_light.center) / 2 - self.armor_type = None # 'large', 'small' - + self.armor_type = None # 'large', 'small' + def extract_number(self, rgb_img): light_length = 12 warp_height = 28 small_armor_width = 32 large_armor_width = 54 - + roi_size = (20, 28) lights_vertices = np.array([ self.left_light.btm, self.left_light.top, self.right_light.top, self.right_light.btm ]).astype(np.float32) - + top_light_y = (warp_height - light_length) / 2 - 1 bottom_light_y = top_light_y + light_length - + if self.armor_type == 'small': warp_width = small_armor_width else: warp_width = large_armor_width - + target_vertices = np.array([ [0, bottom_light_y], [0, top_light_y], [warp_width - 1, top_light_y], [warp_width - 1, bottom_light_y] - ]).astype(np.float32) # default to float64 for some reason... - - rotation_matrix = cv2.getPerspectiveTransform(lights_vertices, target_vertices) - - self.number_image = cv2.warpPerspective(rgb_img, rotation_matrix, (warp_width, warp_height)) - + ]).astype(np.float32) # default to float64 for some reason... + + rotation_matrix = cv2.getPerspectiveTransform( + lights_vertices, target_vertices) + + self.number_image = cv2.warpPerspective( + rgb_img, rotation_matrix, (warp_width, warp_height)) + start_x = int((warp_width - roi_size[0]) / 2) - self.number_image = self.number_image[:roi_size[1], start_x:start_x+roi_size[0]] - + self.number_image = self.number_image[:roi_size[1], + start_x:start_x+roi_size[0]] + # Binarize self.number_image = cv2.cvtColor(self.number_image, cv2.COLOR_RGB2GRAY) - thres, self.number_image = cv2.threshold(self.number_image, 0, 255, cv2.THRESH_BINARY+cv2.THRESH_OTSU) + thres, self.number_image = cv2.threshold( + self.number_image, 0, 255, cv2.THRESH_BINARY+cv2.THRESH_OTSU) + class cv_armor_proposer(object): # Hyperparameters @@ -198,7 +211,7 @@ class cv_armor_proposer(object): def __init__(self, config, detect_color): self.CFG = config self.detect_color = detect_color - + def __call__(self, rgb_img): binary_img = self.preprocess(rgb_img) if self.CFG.DEBUG_DISPLAY: @@ -211,7 +224,8 @@ def __call__(self, rgb_img): viz_img = rgb_img.copy() # visualize lights for light in light_list: - cv2.rectangle(viz_img, (int(light.top[0]), int(light.top[1])), (int(light.btm[0]), int(light.btm[1])), (0, 255, 0), 2) + cv2.rectangle(viz_img, (int(light.top[0]), int(light.top[1])), (int( + light.btm[0]), int(light.btm[1])), (0, 255, 0), 2) cv2.imshow('lights', viz_img) cv2.waitKey(1) @@ -220,22 +234,24 @@ def __call__(self, rgb_img): viz_img = rgb_img.copy() # visualize armors for armor in armor_list: - cv2.rectangle(viz_img, (int(armor.left_light.top[0]), int(armor.left_light.top[1])), (int(armor.right_light.btm[0]), int(armor.right_light.btm[1])), (0, 255, 0), 2) + cv2.rectangle(viz_img, (int(armor.left_light.top[0]), int(armor.left_light.top[1])), (int( + armor.right_light.btm[0]), int(armor.right_light.btm[1])), (0, 255, 0), 2) cv2.imshow('armors', viz_img) cv2.waitKey(1) - + return armor_list - + def preprocess(self, rgb_img): """ Return binarized image """ gray_img = cv2.cvtColor(rgb_img, cv2.COLOR_RGB2GRAY) - thres, binary_img = cv2.threshold(gray_img, self.MIN_LIGHTNESS, 255, cv2.THRESH_BINARY) + thres, binary_img = cv2.threshold( + gray_img, self.MIN_LIGHTNESS, 255, cv2.THRESH_BINARY) return binary_img - + def filter_contours_rects(self, contours, rects, rgb_img): """ Filter out contours that are too small or too large @@ -248,46 +264,55 @@ def filter_contours_rects(self, contours, rects, rgb_img): # Area test if w * h < self.LIGHT_AREA_THRES: continue - + # Color test if not color_test(rgb_img, rects[i], self.detect_color): continue filtered_rects.append(rects[i]) filtered_contours.append(contours[i]) - + return filtered_contours, filtered_rects def find_lights(self, rgb_img, binary_img): - bin_contours, _ = cv2.findContours(binary_img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + bin_contours, _ = cv2.findContours( + binary_img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) # Use difference of color to handle white light if self.detect_color == RED: - color_diff = rgb_img[:,:,0].astype(int) - rgb_img[:,:,2] + color_diff = rgb_img[:, :, 0].astype(int) - rgb_img[:, :, 2] color_diff[color_diff < 0] = 0 color_diff = color_diff.astype(np.uint8) - _, color_bin = cv2.threshold(color_diff, self.LUMINANCE_THRES, 255, cv2.THRESH_BINARY) - color_contours, _ = cv2.findContours(color_bin, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + _, color_bin = cv2.threshold( + color_diff, self.LUMINANCE_THRES, 255, cv2.THRESH_BINARY) + color_contours, _ = cv2.findContours( + color_bin, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) else: - color_diff = rgb_img[:,:,2].astype(int) - rgb_img[:,:,0] + color_diff = rgb_img[:, :, 2].astype(int) - rgb_img[:, :, 0] color_diff[color_diff < 0] = 0 color_diff = color_diff.astype(np.uint8) - _, color_bin = cv2.threshold(color_diff, self.LUMINANCE_THRES, 255, cv2.THRESH_BINARY) - color_contours, _ = cv2.findContours(color_bin, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) - + _, color_bin = cv2.threshold( + color_diff, self.LUMINANCE_THRES, 255, cv2.THRESH_BINARY) + color_contours, _ = cv2.findContours( + color_bin, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) + if self.CFG.DEBUG_DISPLAY: cv2.imshow('color', color_bin) cv2.waitKey(1) - + # Preprocess contours for early filtering - bin_contours = [contour for contour in bin_contours if contour.shape[0] >= 5] - color_contours = [contour for contour in color_contours if contour.shape[0] >= 5] + bin_contours = [ + contour for contour in bin_contours if contour.shape[0] >= 5] + color_contours = [ + contour for contour in color_contours if contour.shape[0] >= 5] bin_rects = [cv2.boundingRect(contour) for contour in bin_contours] col_rects = [cv2.boundingRect(contour) for contour in color_contours] # Basic filtering for contours and rects - bin_contours, bin_rects = self.filter_contours_rects(bin_contours, bin_rects, rgb_img) - color_contours, col_rects = self.filter_contours_rects(color_contours, col_rects, rgb_img) + bin_contours, bin_rects = self.filter_contours_rects( + bin_contours, bin_rects, rgb_img) + color_contours, col_rects = self.filter_contours_rects( + color_contours, col_rects, rgb_img) # Apply NMS to contours from binary image and color difference image # COLOR contours are generally less reliable @@ -296,10 +321,13 @@ def find_lights(self, rgb_img, binary_img): append_flag = True for bin_rect in bin_rects: # Compute rectangle overlap - x_overlap = max(0, min(bin_rect[0] + bin_rect[2], col_rect[0] + col_rect[2]) - max(bin_rect[0], col_rect[0])) - y_overlap = max(0, min(bin_rect[1] + bin_rect[3], col_rect[1] + col_rect[3]) - max(bin_rect[1], col_rect[1])) + x_overlap = max(0, min( + bin_rect[0] + bin_rect[2], col_rect[0] + col_rect[2]) - max(bin_rect[0], col_rect[0])) + y_overlap = max(0, min( + bin_rect[1] + bin_rect[3], col_rect[1] + col_rect[3]) - max(bin_rect[1], col_rect[1])) overlap = x_overlap * y_overlap - min_area = min(bin_rect[2] * bin_rect[3], col_rect[2] * col_rect[3]) + min_area = min(bin_rect[2] * bin_rect[3], + col_rect[2] * col_rect[3]) normalized_overlap = overlap / min_area NMS_THRES = 0.5 if normalized_overlap > NMS_THRES: @@ -321,15 +349,15 @@ def find_lights(self, rgb_img, binary_img): light.color = self.detect_color light_list.append(light) return light_list - + def is_light(self, light): ratio = light.width / light.length ratio_ok = self.LIGHT_MIN_RATIO < ratio and ratio < self.LIGHT_MAX_RATIO angle_ok = light.tilt_angle < self.LIGHT_MAX_ANGLE - + return ratio_ok and angle_ok - + def match_lights(self, light_list): armor_list = [] @@ -348,25 +376,25 @@ def match_lights(self, light_list): if self.is_armor(armor): armor_list.append(armor) - + return armor_list - + def contain_light(self, light1, light2, light_list): pts = np.array([ light1.top, light1.btm, light2.top, light2.btm ]) rect = cv2.boundingRect(pts) - + for test_light in light_list: if test_light == light1 or test_light == light2: continue - + if rect_contains(rect, test_light.top) or rect_contains(rect, test_light.btm)\ - or rect_contains(rect, test_light.center): + or rect_contains(rect, test_light.center): return True - + return False - + def is_armor(self, armor): light1 = armor.left_light light2 = armor.right_light @@ -374,29 +402,31 @@ def is_armor(self, armor): light_length_ratio = light1.length / light2.length else: light_length_ratio = light2.length / light1.length - + light_ratio_ok = light_length_ratio > self.ARMOR_MIN_LIGHT_RATIO - + avg_light_length = (light1.length + light2.length) / 2 - center_dist = cv2.norm(light1.center - light2.center) / avg_light_length - - center_dist_ok = ((self.ARMOR_MIN_SMALL_CENTER_DISTANCE < center_dist)\ - and (center_dist < self.ARMOR_MAX_SMALL_CENTER_DISTANCE)) or\ - ((self.ARMOR_MIN_LARGE_CENTER_DISTANCE < center_dist)\ - and (center_dist < self.ARMOR_MAX_LARGE_CENTER_DISTANCE)) - + center_dist = cv2.norm( + light1.center - light2.center) / avg_light_length + + center_dist_ok = ((self.ARMOR_MIN_SMALL_CENTER_DISTANCE < center_dist) + and (center_dist < self.ARMOR_MAX_SMALL_CENTER_DISTANCE)) or\ + ((self.ARMOR_MIN_LARGE_CENTER_DISTANCE < center_dist) + and (center_dist < self.ARMOR_MAX_LARGE_CENTER_DISTANCE)) + # test light center connection angle diff = light1.center - light2.center angle = abs(np.arctan(diff[1] / diff[0])) / np.pi * 180 angle_ok = angle < self.armor_max_angle - + if center_dist > self.ARMOR_MIN_LARGE_CENTER_DISTANCE: armor.armor_type = 'large' else: armor.armor_type = 'small' - + return light_ratio_ok and center_dist_ok and angle_ok + class dl_digit_classifier: LABEL_NAMES_LIST = np.array(['B', '1', '2', '3', '4', '5', 'G', 'O', 'N']) CLASSIFIER_THRESHOLD = 0.7 @@ -404,30 +434,31 @@ class dl_digit_classifier: def __init__(self, config, model_path): self.CFG = config self.net = cv2.dnn.readNetFromONNX(model_path) - + @staticmethod def normalize(image): return image.astype(np.float32) / 255.0 - + def __call__(self, armor_list): ret_list = [] for armor in armor_list: input_image = self.normalize(armor.number_image) - blob = cv2.dnn.blobFromImage(input_image, scalefactor=1., size=(28, 20)) # BCHW + blob = cv2.dnn.blobFromImage( + input_image, scalefactor=1., size=(28, 20)) # BCHW self.net.setInput(blob) outputs = self.net.forward() - max_probs = outputs.max(axis = 1) + max_probs = outputs.max(axis=1) softmax_probs = np.exp(outputs - max_probs) my_sum = np.sum(softmax_probs, axis=1) softmax_probs = softmax_probs / my_sum - + # Test max_class = softmax_probs.argmax(axis=1) max_class_names = self.LABEL_NAMES_LIST[max_class] - + if softmax_probs[0, max_class] < self.CLASSIFIER_THRESHOLD or max_class_names == 'N': continue - + # TODO: use digit predictions to improve accuracy? # Right now using that logic causes a lot of false negatives... armor.confidence = softmax_probs[0, max_class] diff --git a/Detection/YOLO.py b/Detection/YOLO.py index 3df28d9..e789627 100644 --- a/Detection/YOLO.py +++ b/Detection/YOLO.py @@ -5,7 +5,8 @@ from config import DARKNET_LIB_PATH # Note: darknet is patched per issue https://github.com/pjreddie/darknet/issues/289 -#============= copied from pjreddie's darknet repo +# ============= copied from pjreddie's darknet repo + def sample(probs): s = sum(probs) @@ -17,6 +18,7 @@ def sample(probs): return i return len(probs)-1 + def c_array(ctype, values): arr = (ctype*len(values))() arr[:] = values @@ -31,6 +33,8 @@ class BOX(Structure): # Follow up-to-date definition here # https://github.com/AlexeyAB/darknet/blob/ccb392ddf2ab49f66989e6318a2e379c9238068c/darknet.py#L56 + + class DETECTION(Structure): _fields_ = [("bbox", BOX), ("classes", c_int), @@ -52,10 +56,12 @@ class IMAGE(Structure): ("c", c_int), ("data", POINTER(c_float))] + class METADATA(Structure): _fields_ = [("classes", c_int), ("names", POINTER(c_char_p))] + lib = CDLL(DARKNET_LIB_PATH, RTLD_GLOBAL) lib.network_width.argtypes = [c_void_p] lib.network_width.restype = c_int @@ -74,7 +80,8 @@ class METADATA(Structure): make_image.restype = IMAGE get_network_boxes = lib.get_network_boxes -get_network_boxes.argtypes = [c_void_p, c_int, c_int, c_float, c_float, POINTER(c_int), c_int, POINTER(c_int)] +get_network_boxes.argtypes = [c_void_p, c_int, c_int, + c_float, c_float, POINTER(c_int), c_int, POINTER(c_int)] get_network_boxes.restype = POINTER(DETECTION) make_network_boxes = lib.make_network_boxes @@ -129,13 +136,15 @@ class METADATA(Structure): ndarray_image.argtypes = [POINTER(c_ubyte), POINTER(c_long), POINTER(c_long)] ndarray_image.restype = IMAGE + def nparray_to_image(img): data = img.ctypes.data_as(POINTER(c_ubyte)) image = ndarray_image(data, img.ctypes.shape, img.ctypes.strides) return image -#============= copied from pjreddie's darknet repo +# ============= copied from pjreddie's darknet repo + class Yolo: def __init__(self, cfg_path, weight_path, meta_path, thresh=.5, hier_thresh=.5, nms=.45): @@ -169,7 +178,8 @@ def detect(self, image): num = c_int(0) pnum = pointer(num) predict_image(self.net, im) - dets = get_network_boxes(self.net, im.w, im.h, self.thresh, self.hier_thresh, None, 0, pnum) + dets = get_network_boxes(self.net, im.w, im.h, + self.thresh, self.hier_thresh, None, 0, pnum) num = pnum[0] do_nms_obj(dets, num, self.meta.classes, self.nms) @@ -178,9 +188,9 @@ def detect(self, image): for i in range(self.meta.classes): if dets[j].prob[i] > 0: b = dets[j].bbox - res.append((self.meta.names[i], dets[j].prob[i], (b.x, b.y, b.w, b.h))) + res.append( + (self.meta.names[i], dets[j].prob[i], (b.x, b.y, b.w, b.h))) res = sorted(res, key=lambda x: -x[1]) free_image(im) free_detections(dets, num) return res - diff --git a/Utils/__init__.py b/Utils/__init__.py index 183c7da..b631338 100644 --- a/Utils/__init__.py +++ b/Utils/__init__.py @@ -1,8 +1,10 @@ # Commonly used utils functions import numpy as np + def deg_to_rad(deg): return deg * ((2 * np.pi) / 360) + def rad_to_deg(rad): return rad * (360. / (2 * np.pi)) diff --git a/config.py b/config.py index d8c611e..542b1b7 100644 --- a/config.py +++ b/config.py @@ -1,4 +1,5 @@ # darknet utils +from Camera.read_from_file import fake_camera DARKNET_LIB_PATH = r'/home/illinirm/darknet/libdarknet.so' MODEL_CFG_PATH = r'/home/illinirm/darknet/dji_roco_preprocessed/yolov3-tiny-custom.cfg' WEIGHT_PATH = r'/home/illinirm/darknet/dji_roco_preprocessed/yolov3-tiny-custom_final.weights' @@ -10,13 +11,12 @@ PACK_END = b'ED' # from Camera.d455 import D455_camera -from Camera.read_from_file import fake_camera AUTOAIM_CAMERA = fake_camera # Compute some constants and define camera to use -IMG_HEIGHT=360 -IMG_WIDTH=640 +IMG_HEIGHT = 360 +IMG_WIDTH = 640 IMG_CENTER_X = IMG_WIDTH // 2 IMG_CENTER_Y = IMG_HEIGHT // 2 diff --git a/vision.py b/vision.py index ebfedf6..04e7051 100644 --- a/vision.py +++ b/vision.py @@ -6,6 +6,7 @@ from Communication.communicator import UARTCommunicator import config + def main(): model = cv_mix_dl_detector(config, config.DEFAULT_ENEMY_TEAM) # model = Yolo(config.MODEL_CFG_PATH, config.WEIGHT_PATH, config.META_PATH) @@ -23,7 +24,7 @@ def main(): while True: start = time.time() frame = autoaim_camera.get_frame() - + # TODO: add a global reset function if enemy functions change # (e.g., clear the buffer in the armor tracker) stm32_state_dict = communicator.get_current_stm32_state() @@ -50,11 +51,12 @@ def main(): lower_y = int(bbox[1] - bbox[3] / 2) upper_x = int(bbox[0] + bbox[2] / 2) upper_y = int(bbox[1] + bbox[3] / 2) - viz_frame = cv2.rectangle(viz_frame, (lower_x, lower_y), (upper_x, upper_y), (0, 255, 0), 2) + viz_frame = cv2.rectangle( + viz_frame, (lower_x, lower_y), (upper_x, upper_y), (0, 255, 0), 2) cv2.imshow('all_detected', viz_frame) if cv2.waitKey(1) & 0xFF == ord('q'): exit(0) - + # Tracking and filtering # Pour all predictions into the aimer, which returns relative angles ret_dict = aimer.process_one(pred, enemy_team, frame) @@ -68,7 +70,8 @@ def main(): lower_y = int(bbox[1] - bbox[3] / 2) upper_x = int(bbox[0] + bbox[2] / 2) upper_y = int(bbox[1] + bbox[3] / 2) - viz_frame = cv2.rectangle(viz_frame, (lower_x, lower_y), (upper_x, upper_y), (0, 255, 0), 2) + viz_frame = cv2.rectangle( + viz_frame, (lower_x, lower_y), (upper_x, upper_y), (0, 255, 0), 2) viz_frame = cv2.putText(viz_frame, str(unique_id), (lower_x, lower_y), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2) cv2.imshow('filtered_detected', viz_frame) @@ -79,21 +82,24 @@ def main(): show_frame = frame.copy() if ret_dict: - communicator.process_one_packet(config.MOVE_YOKE, ret_dict['yaw_diff'], ret_dict['pitch_diff']) + communicator.process_one_packet( + config.MOVE_YOKE, ret_dict['yaw_diff'], ret_dict['pitch_diff']) show_frame = cv2.circle(show_frame, - (int(ret_dict['center_x']), int(ret_dict['center_y'])), + (int(ret_dict['center_x']), + int(ret_dict['center_y'])), 10, (0, 255, 0), 10) else: show_frame = cv2.putText(show_frame, 'NOT FOUND', (50, 50), - cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2) + cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2) communicator.process_one_packet(config.SEARCH_TARGET, 0, 0) - + if config.DEBUG_DISPLAY: - print('----------------\n',pred) - print('fps:',1./elapsed) + print('----------------\n', pred) + print('fps:', 1./elapsed) cv2.imshow('target', show_frame) if cv2.waitKey(1) & 0xFF == ord('q'): exit(0) + if __name__ == "__main__": main() From 1bf2429b2a7b1a050ad5161f8a002f9c3b22f999 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 11 Mar 2023 03:52:31 -0600 Subject: [PATCH 12/74] install missing dependency for linting --- .github/workflows/pylint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 383e65c..23f8e79 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -17,7 +17,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install pylint + pip install -r requirements.txt - name: Analysing the code with pylint run: | pylint $(git ls-files '*.py') From 0dfee5798cbe795ce23409f37edc6ed573b30663 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 11 Mar 2023 03:57:49 -0600 Subject: [PATCH 13/74] update requirements.txt and pylintrc for CI --- .pylintrc | 591 +++++++++++++++++++++++++++++++++++++++++++++++ requirements.txt | 4 + 2 files changed, 595 insertions(+) create mode 100644 .pylintrc diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..2b55a44 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,591 @@ +[MASTER] + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-whitelist=cv2 + +# Specify a score threshold to be exceeded before program exits with error. +fail-under=10.0 + +# Add files or directories to the blacklist. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the blacklist. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=print-statement, + parameter-unpacking, + unpacking-in-except, + old-raise-syntax, + backtick, + long-suffix, + old-ne-operator, + old-octal-literal, + import-star-module-level, + non-ascii-bytes-literal, + raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead, + apply-builtin, + basestring-builtin, + buffer-builtin, + cmp-builtin, + coerce-builtin, + execfile-builtin, + file-builtin, + long-builtin, + raw_input-builtin, + reduce-builtin, + standarderror-builtin, + unicode-builtin, + xrange-builtin, + coerce-method, + delslice-method, + getslice-method, + setslice-method, + no-absolute-import, + old-division, + dict-iter-method, + dict-view-method, + next-method-called, + metaclass-assignment, + indexing-exception, + raising-string, + reload-builtin, + oct-method, + hex-method, + nonzero-method, + cmp-method, + input-builtin, + round-builtin, + intern-builtin, + unichr-builtin, + map-builtin-not-iterating, + zip-builtin-not-iterating, + range-builtin-not-iterating, + filter-builtin-not-iterating, + using-cmp-argument, + eq-without-hash, + div-method, + idiv-method, + rdiv-method, + exception-message-attribute, + invalid-str-codec, + sys-max-int, + bad-python3-import, + deprecated-string-function, + deprecated-str-translate-call, + deprecated-itertools-function, + deprecated-types-field, + next-method-defined, + dict-items-not-iterating, + dict-keys-not-iterating, + dict-values-not-iterating, + deprecated-operator-function, + deprecated-urllib-function, + xreadlines-attribute, + deprecated-sys-function, + exception-escape, + comprehension-escape + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'error', 'warning', 'refactor', and 'convention' +# which contain the number of messages in each category, as well as 'statement' +# which is the total number of statements analyzed. This score is used by the +# global evaluation report (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. +#class-attribute-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _ + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. +#variable-rgx= + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it work, +# install the python-enchant package. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + +# Regular expression of note tags to take in consideration. +#notes-rgx= + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[DESIGN] + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules=optparse,tkinter.tix + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled). +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled). +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=cls + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "BaseException, Exception". +overgeneral-exceptions=BaseException, + Exception diff --git a/requirements.txt b/requirements.txt index 0ea69e1..82fd91a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,6 @@ opencv-python pylint +scipy +ipython +pyserial +crc From 233b7b5122f7d0c63e8e65b5936e05abfb2321ba Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 11 Mar 2023 04:06:05 -0600 Subject: [PATCH 14/74] update action to fix missing cv2 member --- .github/workflows/pylint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 23f8e79..230bd97 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -20,4 +20,4 @@ jobs: pip install -r requirements.txt - name: Analysing the code with pylint run: | - pylint $(git ls-files '*.py') + pylint $(git ls-files '*.py') --generated-members="cv2.*" From c3be0a57a3ab15d8c3a4e99036f5b16ebc12d43c Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 11 Mar 2023 04:08:58 -0600 Subject: [PATCH 15/74] remove unused pylintrc --- .pylintrc | 591 ------------------------------------------------------ 1 file changed, 591 deletions(-) delete mode 100644 .pylintrc diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index 2b55a44..0000000 --- a/.pylintrc +++ /dev/null @@ -1,591 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -extension-pkg-whitelist=cv2 - -# Specify a score threshold to be exceeded before program exits with error. -fail-under=10.0 - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. -jobs=1 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python module names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=print-statement, - parameter-unpacking, - unpacking-in-except, - old-raise-syntax, - backtick, - long-suffix, - old-ne-operator, - old-octal-literal, - import-star-module-level, - non-ascii-bytes-literal, - raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - useless-suppression, - deprecated-pragma, - use-symbolic-message-instead, - apply-builtin, - basestring-builtin, - buffer-builtin, - cmp-builtin, - coerce-builtin, - execfile-builtin, - file-builtin, - long-builtin, - raw_input-builtin, - reduce-builtin, - standarderror-builtin, - unicode-builtin, - xrange-builtin, - coerce-method, - delslice-method, - getslice-method, - setslice-method, - no-absolute-import, - old-division, - dict-iter-method, - dict-view-method, - next-method-called, - metaclass-assignment, - indexing-exception, - raising-string, - reload-builtin, - oct-method, - hex-method, - nonzero-method, - cmp-method, - input-builtin, - round-builtin, - intern-builtin, - unichr-builtin, - map-builtin-not-iterating, - zip-builtin-not-iterating, - range-builtin-not-iterating, - filter-builtin-not-iterating, - using-cmp-argument, - eq-without-hash, - div-method, - idiv-method, - rdiv-method, - exception-message-attribute, - invalid-str-codec, - sys-max-int, - bad-python3-import, - deprecated-string-function, - deprecated-str-translate-call, - deprecated-itertools-function, - deprecated-types-field, - next-method-defined, - dict-items-not-iterating, - dict-keys-not-iterating, - dict-values-not-iterating, - deprecated-operator-function, - deprecated-urllib-function, - xreadlines-attribute, - deprecated-sys-function, - exception-escape, - comprehension-escape - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'error', 'warning', 'refactor', and 'convention' -# which contain the number of messages in each category, as well as 'statement' -# which is the total number of statements analyzed. This score is used by the -# global evaluation report (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit - - -[BASIC] - -# Naming style matching correct argument names. -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. -#argument-rgx= - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Bad variable names regexes, separated by a comma. If names match any regex, -# they will always be refused -bad-names-rgxs= - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. -#class-attribute-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=i, - j, - k, - ex, - Run, - _ - -# Good variable names regexes, separated by a comma. If names match any regex, -# they will always be accepted -good-names-rgxs= - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style. -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. -#variable-rgx= - - -[STRING] - -# This flag controls whether inconsistent-quotes generates a warning when the -# character used as a quote delimiter is used inconsistently within a module. -check-quote-consistency=no - -# This flag controls whether the implicit-str-concat should generate a warning -# on implicit string concatenation in sequences defined over several lines. -check-str-concat-over-line-jumps=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - -# List of decorators that change the signature of a decorated function. -signature-mutators= - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io - - -[LOGGING] - -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module. -max-module-lines=1000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it work, -# install the python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - -# Regular expression of note tags to take in consideration. -#notes-rgx= - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[DESIGN] - -# Maximum number of arguments for function / method. -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr=5 - -# Maximum number of branch for function / method body. -max-branches=12 - -# Maximum number of locals for function / method body. -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=optparse,tkinter.tix - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[CLASSES] - -# Warn about protected attribute access inside special methods -check-protected-access-in-special-methods=no - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp, - __post_init__ - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception From baec2d7718d9ebf11ade5363829b3eedf31965d3 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 12 Mar 2023 00:22:05 -0600 Subject: [PATCH 16/74] update pylint config --- .github/workflows/pylint.yml | 2 +- .pylintrc | 633 +++++++++++++++++++++++++++++++++++ 2 files changed, 634 insertions(+), 1 deletion(-) create mode 100644 .pylintrc diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index 230bd97..23f8e79 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -20,4 +20,4 @@ jobs: pip install -r requirements.txt - name: Analysing the code with pylint run: | - pylint $(git ls-files '*.py') --generated-members="cv2.*" + pylint $(git ls-files '*.py') diff --git a/.pylintrc b/.pylintrc new file mode 100644 index 0000000..60bc128 --- /dev/null +++ b/.pylintrc @@ -0,0 +1,633 @@ +[MAIN] + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint +# in a server-like mode. +clear-cache-post-run=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +extension-pkg-whitelist= + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. +ignore=CVS + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, +# it can't be used as an escape character. +ignore-paths= + +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=^\.# + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.9 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. If left empty, argument names will be checked with the set +# naming style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +#class-attribute-rgx= + +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. If left empty, class names will be checked with the set naming style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. If left empty, function names will be checked with the set +# naming style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _, + x, + y, + w, + h + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Regular expression matching correct type alias names. If left empty, type +# alias names will be checked with the set naming style. +#typealias-rgx= + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. If left empty, variable names will be checked with the set +# naming style. +#variable-rgx= + + +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + +# Maximum number of arguments for function / method. +max-args=5 + +# Maximum number of attributes for a class (see R0902). +max-attributes=7 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=builtins.BaseException,builtins.Exception + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=100 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow explicit reexports by alias from a package __init__. +allow-reexport-from-package=no + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[METHOD_ARGS] + +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX + +# Regular expression of note tags to take in consideration. +notes-rgx= + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[SIMILARITIES] + +# Comments are removed from the similarity computation +ignore-comments=yes + +# Docstrings are removed from the similarity computation +ignore-docstrings=yes + +# Imports are removed from the similarity computation +ignore-imports=yes + +# Signatures are removed from the similarity computation +ignore-signatures=yes + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work.. +spelling-dict= + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members="cv2.*" + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of names allowed to shadow builtins +allowed-redefined-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io From 29ac6b073f982da2f178b8320b489eb17b46415e Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 12 Mar 2023 01:48:24 -0600 Subject: [PATCH 17/74] switch from painful pylint to autopep8 --- .github/workflows/autopep8.yml | 2 + .github/workflows/pylint.yml | 23 -- .pylintrc | 633 --------------------------------- 3 files changed, 2 insertions(+), 656 deletions(-) create mode 100644 .github/workflows/autopep8.yml delete mode 100644 .github/workflows/pylint.yml delete mode 100644 .pylintrc diff --git a/.github/workflows/autopep8.yml b/.github/workflows/autopep8.yml new file mode 100644 index 0000000..4efbfde --- /dev/null +++ b/.github/workflows/autopep8.yml @@ -0,0 +1,2 @@ +- name: autopep8 + uses: peter-evans/autopep8@v2.0.0 diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml deleted file mode 100644 index 23f8e79..0000000 --- a/.github/workflows/pylint.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Pylint - -on: [push] - -jobs: - build: - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ["3.8", "3.9", "3.10"] - steps: - - uses: actions/checkout@v3 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt - - name: Analysing the code with pylint - run: | - pylint $(git ls-files '*.py') diff --git a/.pylintrc b/.pylintrc deleted file mode 100644 index 60bc128..0000000 --- a/.pylintrc +++ /dev/null @@ -1,633 +0,0 @@ -[MAIN] - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Clear in-memory caches upon conclusion of linting. Useful if running pylint -# in a server-like mode. -clear-cache-post-run=no - -# Load and enable all available extensions. Use --list-extensions to see a list -# all available extensions. -#enable-all-extensions= - -# In error mode, messages with a category besides ERROR or FATAL are -# suppressed, and no reports are done by default. Error mode is compatible with -# disabling specific errors. -#errors-only= - -# Always return a 0 (non-error) status code, even if lint errors are found. -# This is primarily useful in continuous integration scripts. -#exit-zero= - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -extension-pkg-allow-list= - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. (This is an alternative name to extension-pkg-allow-list -# for backward compatibility.) -extension-pkg-whitelist= - -# Return non-zero exit code if any of these messages/categories are detected, -# even if score is above --fail-under value. Syntax same as enable. Messages -# specified are enabled, while categories only check already-enabled messages. -fail-on= - -# Specify a score threshold under which the program will exit with error. -fail-under=10 - -# Interpret the stdin as a python script, whose filename needs to be passed as -# the module_or_package argument. -#from-stdin= - -# Files or directories to be skipped. They should be base names, not paths. -ignore=CVS - -# Add files or directories matching the regular expressions patterns to the -# ignore-list. The regex matches against paths and can be in Posix or Windows -# format. Because '\\' represents the directory delimiter on Windows systems, -# it can't be used as an escape character. -ignore-paths= - -# Files or directories matching the regular expression patterns are skipped. -# The regex matches against base names, not paths. The default value ignores -# Emacs file locks -ignore-patterns=^\.# - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use, and will cap the count on Windows to -# avoid hangs. -jobs=1 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python module names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Minimum Python version to use for version dependent checks. Will default to -# the version used to run pylint. -py-version=3.9 - -# Discover python modules and packages in the file system subtree. -recursive=no - -# Add paths to the list of the source roots. Supports globbing patterns. The -# source root is an absolute path or a path relative to the current working -# directory used to determine a package namespace for modules located under the -# source root. -source-roots= - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - -# In verbose mode, extra non-checker-related info will be displayed. -#verbose= - - -[BASIC] - -# Naming style matching correct argument names. -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. If left empty, argument names will be checked with the set -# naming style. -#argument-rgx= - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. If left empty, attribute names will be checked with the set naming -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Bad variable names regexes, separated by a comma. If names match any regex, -# they will always be refused -bad-names-rgxs= - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. If left empty, class attribute names will be checked -# with the set naming style. -#class-attribute-rgx= - -# Naming style matching correct class constant names. -class-const-naming-style=UPPER_CASE - -# Regular expression matching correct class constant names. Overrides class- -# const-naming-style. If left empty, class constant names will be checked with -# the set naming style. -#class-const-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. If left empty, class names will be checked with the set naming style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style. If left empty, constant names will be checked with the set naming -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. If left empty, function names will be checked with the set -# naming style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=i, - j, - k, - ex, - Run, - _, - x, - y, - w, - h - -# Good variable names regexes, separated by a comma. If names match any regex, -# they will always be accepted -good-names-rgxs= - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. If left empty, inline iteration names will be checked -# with the set naming style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. If left empty, method names will be checked with the set naming style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style. If left empty, module names will be checked with the set naming style. -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Regular expression matching correct type alias names. If left empty, type -# alias names will be checked with the set naming style. -#typealias-rgx= - -# Regular expression matching correct type variable names. If left empty, type -# variable names will be checked with the set naming style. -#typevar-rgx= - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. If left empty, variable names will be checked with the set -# naming style. -#variable-rgx= - - -[CLASSES] - -# Warn about protected attribute access inside special methods -check-protected-access-in-special-methods=no - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp, - __post_init__ - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - - -[DESIGN] - -# List of regular expressions of class ancestor names to ignore when counting -# public methods (see R0903) -exclude-too-few-public-methods= - -# List of qualified class names to ignore when counting class parents (see -# R0901) -ignored-parents= - -# Maximum number of arguments for function / method. -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr=5 - -# Maximum number of branch for function / method body. -max-branches=12 - -# Maximum number of locals for function / method body. -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when caught. -overgeneral-exceptions=builtins.BaseException,builtins.Exception - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=100 - -# Maximum number of lines in a module. -max-module-lines=1000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow explicit reexports by alias from a package __init__. -allow-reexport-from-package=no - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules= - -# Output a graph (.gv or any supported image format) of external dependencies -# to the given file (report RP0402 must not be disabled). -ext-import-graph= - -# Output a graph (.gv or any supported image format) of all (i.e. internal and -# external) dependencies to the given file (report RP0402 must not be -# disabled). -import-graph= - -# Output a graph (.gv or any supported image format) of internal dependencies -# to the given file (report RP0402 must not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[LOGGING] - -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, -# UNDEFINED. -confidence=HIGH, - CONTROL_FLOW, - INFERENCE, - INFERENCE_FAILURE, - UNDEFINED - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then re-enable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=raw-checker-failed, - bad-inline-option, - locally-disabled, - file-ignored, - suppressed-message, - useless-suppression, - deprecated-pragma, - use-symbolic-message-instead - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[METHOD_ARGS] - -# List of qualified names (i.e., library.method) which require a timeout -# parameter e.g. 'requests.api.get,requests.api.post' -timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX - -# Regular expression of note tags to take in consideration. -notes-rgx= - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit,argparse.parse_error - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'fatal', 'error', 'warning', 'refactor', -# 'convention', and 'info' which contain the number of messages in each -# category, as well as 'statement' which is the total number of statements -# analyzed. This score is used by the global evaluation report (RP0004). -evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -#output-format= - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[SIMILARITIES] - -# Comments are removed from the similarity computation -ignore-comments=yes - -# Docstrings are removed from the similarity computation -ignore-docstrings=yes - -# Imports are removed from the similarity computation -ignore-imports=yes - -# Signatures are removed from the similarity computation -ignore-signatures=yes - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. No available dictionaries : You need to install -# both the python package and the system dependency for enchant to work.. -spelling-dict= - -# List of comma separated words that should be considered directives if they -# appear at the beginning of a comment and should not be checked. -spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no - - -[STRING] - -# This flag controls whether inconsistent-quotes generates a warning when the -# character used as a quote delimiter is used inconsistently within a module. -check-quote-consistency=no - -# This flag controls whether the implicit-str-concat should generate a warning -# on implicit string concatenation in sequences defined over several lines. -check-str-concat-over-line-jumps=no - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members="cv2.*" - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of symbolic message names to ignore for Mixin members. -ignored-checks-for-mixins=no-member, - not-async-context-manager, - not-context-manager, - attribute-defined-outside-init - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - -# Regex pattern to define which classes are considered mixins. -mixin-class-rgx=.*[Mm]ixin - -# List of decorators that change the signature of a decorated function. -signature-mutators= - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of names allowed to shadow builtins -allowed-redefined-builtins= - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io From 989152a9f4eb472ed0c4466e9162c993a0705f7f Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 12 Mar 2023 01:56:52 -0600 Subject: [PATCH 18/74] switch to pycodestyle / pydocstyle --- .github/workflows/autopep8.yml | 2 -- .github/workflows/formatting.yml | 25 +++++++++++++++++++++++++ 2 files changed, 25 insertions(+), 2 deletions(-) delete mode 100644 .github/workflows/autopep8.yml create mode 100644 .github/workflows/formatting.yml diff --git a/.github/workflows/autopep8.yml b/.github/workflows/autopep8.yml deleted file mode 100644 index 4efbfde..0000000 --- a/.github/workflows/autopep8.yml +++ /dev/null @@ -1,2 +0,0 @@ -- name: autopep8 - uses: peter-evans/autopep8@v2.0.0 diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml new file mode 100644 index 0000000..216dbdc --- /dev/null +++ b/.github/workflows/formatting.yml @@ -0,0 +1,25 @@ +name: formatting_check + +on: [push] + +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10"] + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install pycodestyle + pip install pydocstyle + - name: Analysing the code with pylint + run: | + pycodestyle --max-line-length=100 ./ + pydocstyle ./ \ No newline at end of file From b064dd5244e1baf69b333da4f61d310ef04e2675 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 12 Mar 2023 03:00:06 -0500 Subject: [PATCH 19/74] autopep8 linting --- Aiming/Aim.py | 43 ++++++++++++++++------ Aiming/Tracking/__init__.py | 2 +- Aiming/Tracking/basic_tracker.py | 9 +++-- Camera/shl_200ws.py | 3 +- Communication/communicator.py | 13 ++++--- Detection/CV_mix_DL.py | 62 +++++++++++++++++++++----------- Detection/YOLO.py | 30 +++++++++++----- vision.py | 4 +-- 8 files changed, 116 insertions(+), 50 deletions(-) diff --git a/Aiming/Aim.py b/Aiming/Aim.py index 0650d53..d1acca5 100644 --- a/Aiming/Aim.py +++ b/Aiming/Aim.py @@ -1,16 +1,34 @@ -import numpy as np -import config -import Utils - +""" +Hosts the Aim class, which is the main class for auto-aiming. +""" from .Tracking import basic_tracker class Aim: + """ + The main class for auto-aiming. Its workflow: + 1. takes in a list of predictions from the detection module + 2. apply tracker to fill in missing predictions / append or associate predictions + 3. It selects a single armor to hit + 4. It computes the yaw/pitch difference to the armor + 5. Finally, apply posterior calibration (e.g., range table to hit far targets) + """ + def __init__(self, config): self.CFG = config self.tracker = basic_tracker(self.CFG) def process_one(self, pred_list, enemy_team, rgb_img): + """Process one frame of predictions + + Args: + pred_list (list): a list of predictions + enemy_team (str): 'blue' or 'red' + rgb_img (np.ndarray): RGB image + + Returns: + dict: a dictionary of results + """ assert enemy_team in ['blue', 'red'] # TODO: use assertion to check enemy_team @@ -63,7 +81,8 @@ def posterior_calibration(self, yaw_diff, pitch_diff, distance): def get_closet_pred(self, bbox_list, rgb_img): '''Get the closet prediction to camera focal point''' - # TODO: instead of camera focal point; calibrate closet pred to operator view + # TODO: instead of camera focal point; calibrate closet pred to + # operator view H, W, C = rgb_img.shape focal_y = H / 2 focal_x = W / 2 @@ -82,11 +101,13 @@ def get_closet_pred(self, bbox_list, rgb_img): closet_dist = cur_dist return closet_pred, closet_dist - @staticmethod - def get_rotation_angle(bbox_center_x, bbox_center_y): - yaw_diff = (bbox_center_x - config.IMG_CENTER_X) * \ - (config.AUTOAIM_CAMERA.YAW_FOV_HALF / config.IMG_CENTER_X) - pitch_diff = (bbox_center_y - config.IMG_CENTER_Y) * \ - (config.AUTOAIM_CAMERA.PITCH_FOV_HALF / config.IMG_CENTER_Y) + def get_rotation_angle(self, bbox_center_x, bbox_center_y): + """ + Given a bounding box center, return the yaw/pitch difference in radians + """ + yaw_diff = (bbox_center_x - self.CFG.IMG_CENTER_X) * \ + (self.CFG.AUTOAIM_CAMERA.YAW_FOV_HALF / self.CFG.IMG_CENTER_X) + pitch_diff = (bbox_center_y - self.CFG.IMG_CENTER_Y) * \ + (self.CFG.AUTOAIM_CAMERA.PITCH_FOV_HALF / self.CFG.IMG_CENTER_Y) return yaw_diff, pitch_diff diff --git a/Aiming/Tracking/__init__.py b/Aiming/Tracking/__init__.py index 2d2f5c0..ef56483 100644 --- a/Aiming/Tracking/__init__.py +++ b/Aiming/Tracking/__init__.py @@ -1 +1 @@ -from .basic_tracker import basic_tracker \ No newline at end of file +from .basic_tracker import basic_tracker diff --git a/Aiming/Tracking/basic_tracker.py b/Aiming/Tracking/basic_tracker.py index 788810f..6b13155 100644 --- a/Aiming/Tracking/basic_tracker.py +++ b/Aiming/Tracking/basic_tracker.py @@ -30,12 +30,14 @@ def update(self, other_armor, frame_tick): self.roi_buffer.append(other_armor.roi_buffer[-1]) self.observed_frame_tick.append(frame_tick) - # Maintain each armor's buffer so that anything older than FRAME_BUFFER_SIZE is dropped + # Maintain each armor's buffer so that anything older than + # FRAME_BUFFER_SIZE is dropped self.bbox_buffer = self.bbox_buffer[-FRAME_BUFFER_SIZE:] self.roi_buffer = self.roi_buffer[-FRAME_BUFFER_SIZE:] def predict_bbox(self, cur_frame_tick): - if cur_frame_tick == self.observed_frame_tick[-1] or len(self.bbox_buffer) == 1: + if cur_frame_tick == self.observed_frame_tick[-1] or len( + self.bbox_buffer) == 1: return self.bbox_buffer[-1] else: # Linear extrapolation @@ -97,7 +99,8 @@ def process_one(self, pred_list, enemy_team, rgb_img): for n in new_armors: n.armor_id = self.id_gen.get_id() - # Maintain current buffer. If an armor has not been observed by 10 frames, it is dropped + # Maintain current buffer. If an armor has not been observed by 10 + # frames, it is dropped self.active_armors = [i for i in self.active_armors if self.frame_tick - i.observed_frame_tick[-1] < FRAME_BUFFER_SIZE] diff --git a/Camera/shl_200ws.py b/Camera/shl_200ws.py index 11925e3..0c1ab9d 100644 --- a/Camera/shl_200ws.py +++ b/Camera/shl_200ws.py @@ -3,7 +3,8 @@ import time cap = cv2.VideoCapture( - 'v4l2src device=/dev/video0 ! image/jpeg,framerate=61612/513,width=640,height=480 ! jpegdec ! videoconvert ! video/x-raw,format=BGR ! appsink max-buffers=1 drop=true', cv2.CAP_GSTREAMER) + 'v4l2src device=/dev/video0 ! image/jpeg,framerate=61612/513,width=640,height=480 ! jpegdec ! videoconvert ! video/x-raw,format=BGR ! appsink max-buffers=1 drop=true', # noqa + cv2.CAP_GSTREAMER) if not cap.isOpened(): print("Cannot open camera") diff --git a/Communication/communicator.py b/Communication/communicator.py index c036277..1285cce 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -6,7 +6,12 @@ class UARTCommunicator(object): - def __init__(self, cfg, crc_standard=crc.Crc8.MAXIM_DOW, endianness='little', buffer_size=10): + def __init__( + self, + cfg, + crc_standard=crc.Crc8.MAXIM_DOW, + endianness='little', + buffer_size=10): self.cfg = cfg self.crc_standard = crc_standard self.endianness = endianness @@ -135,10 +140,10 @@ def get_current_stm32_state(self): blue_cnt = 0 red_cnt = 0 - for l in self.circular_buffer: - if l == ord('R'): + for read_byte in self.circular_buffer: + if read_byte == ord('R'): red_cnt += 1 - if l == ord('B'): + if read_byte == ord('B'): blue_cnt += 1 if blue_cnt > red_cnt: diff --git a/Detection/CV_mix_DL.py b/Detection/CV_mix_DL.py index 8a1e580..bea070d 100644 --- a/Detection/CV_mix_DL.py +++ b/Detection/CV_mix_DL.py @@ -36,7 +36,7 @@ def auto_align_brightness(img, target_v=50): def color_test(rgb_img, rect, color): - rgb_roi = rgb_img[rect[1]:rect[1]+rect[3], rect[0]:rect[0]+rect[2]] + rgb_roi = rgb_img[rect[1]:rect[1] + rect[3], rect[0]:rect[0] + rect[2]] sum_r = np.sum(rgb_roi[:, :, 0]) sum_b = np.sum(rgb_roi[:, :, 2]) if color == RED: @@ -105,7 +105,8 @@ def change_color(self, new_color): def rect_contains(rect, pt): - return rect[0] < pt[0] < rect[0]+rect[2] and rect[1] < pt[1] < rect[1]+rect[3] + return rect[0] < pt[0] < rect[0] + \ + rect[2] and rect[1] < pt[1] < rect[1] + rect[3] class light_class: @@ -120,7 +121,12 @@ def __init__(self, rotated_rect): self.width = cv2.norm(pts[0] - pts[1]) self.tilt_angle = np.arctan2( - np.abs(self.top[0] - self.btm[0]), np.abs(self.top[1] - self.btm[1])) + np.abs( + self.top[0] - + self.btm[0]), + np.abs( + self.top[1] - + self.btm[1])) self.tilt_angle = self.tilt_angle / np.pi * 180 self.center = np.array([self.center_x, self.center_y]) @@ -179,12 +185,12 @@ def extract_number(self, rgb_img): start_x = int((warp_width - roi_size[0]) / 2) self.number_image = self.number_image[:roi_size[1], - start_x:start_x+roi_size[0]] + start_x:start_x + roi_size[0]] # Binarize self.number_image = cv2.cvtColor(self.number_image, cv2.COLOR_RGB2GRAY) thres, self.number_image = cv2.threshold( - self.number_image, 0, 255, cv2.THRESH_BINARY+cv2.THRESH_OTSU) + self.number_image, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU) class cv_armor_proposer(object): @@ -224,8 +230,12 @@ def __call__(self, rgb_img): viz_img = rgb_img.copy() # visualize lights for light in light_list: - cv2.rectangle(viz_img, (int(light.top[0]), int(light.top[1])), (int( - light.btm[0]), int(light.btm[1])), (0, 255, 0), 2) + cv2.rectangle( + viz_img, (int( + light.top[0]), int( + light.top[1])), (int( + light.btm[0]), int( + light.btm[1])), (0, 255, 0), 2) cv2.imshow('lights', viz_img) cv2.waitKey(1) @@ -234,8 +244,12 @@ def __call__(self, rgb_img): viz_img = rgb_img.copy() # visualize armors for armor in armor_list: - cv2.rectangle(viz_img, (int(armor.left_light.top[0]), int(armor.left_light.top[1])), (int( - armor.right_light.btm[0]), int(armor.right_light.btm[1])), (0, 255, 0), 2) + cv2.rectangle( + viz_img, (int( + armor.left_light.top[0]), int( + armor.left_light.top[1])), (int( + armor.right_light.btm[0]), int( + armor.right_light.btm[1])), (0, 255, 0), 2) cv2.imshow('armors', viz_img) cv2.waitKey(1) @@ -321,10 +335,14 @@ def find_lights(self, rgb_img, binary_img): append_flag = True for bin_rect in bin_rects: # Compute rectangle overlap - x_overlap = max(0, min( - bin_rect[0] + bin_rect[2], col_rect[0] + col_rect[2]) - max(bin_rect[0], col_rect[0])) - y_overlap = max(0, min( - bin_rect[1] + bin_rect[3], col_rect[1] + col_rect[3]) - max(bin_rect[1], col_rect[1])) + x_overlap = max(0, + min(bin_rect[0] + bin_rect[2], + col_rect[0] + col_rect[2]) - max(bin_rect[0], + col_rect[0])) + y_overlap = max(0, + min(bin_rect[1] + bin_rect[3], + col_rect[1] + col_rect[3]) - max(bin_rect[1], + col_rect[1])) overlap = x_overlap * y_overlap min_area = min(bin_rect[2] * bin_rect[3], col_rect[2] * col_rect[3]) @@ -389,8 +407,10 @@ def contain_light(self, light1, light2, light_list): if test_light == light1 or test_light == light2: continue - if rect_contains(rect, test_light.top) or rect_contains(rect, test_light.btm)\ - or rect_contains(rect, test_light.center): + if rect_contains( + rect, test_light.top) or rect_contains( + rect, test_light.btm) or rect_contains( + rect, test_light.center): return True return False @@ -409,10 +429,11 @@ def is_armor(self, armor): center_dist = cv2.norm( light1.center - light2.center) / avg_light_length - center_dist_ok = ((self.ARMOR_MIN_SMALL_CENTER_DISTANCE < center_dist) - and (center_dist < self.ARMOR_MAX_SMALL_CENTER_DISTANCE)) or\ - ((self.ARMOR_MIN_LARGE_CENTER_DISTANCE < center_dist) - and (center_dist < self.ARMOR_MAX_LARGE_CENTER_DISTANCE)) + center_dist_ok = ( + (self.ARMOR_MIN_SMALL_CENTER_DISTANCE < center_dist) and ( + center_dist < self.ARMOR_MAX_SMALL_CENTER_DISTANCE)) or ( + (self.ARMOR_MIN_LARGE_CENTER_DISTANCE < center_dist) and ( + center_dist < self.ARMOR_MAX_LARGE_CENTER_DISTANCE)) # test light center connection angle diff = light1.center - light2.center @@ -456,7 +477,8 @@ def __call__(self, armor_list): max_class = softmax_probs.argmax(axis=1) max_class_names = self.LABEL_NAMES_LIST[max_class] - if softmax_probs[0, max_class] < self.CLASSIFIER_THRESHOLD or max_class_names == 'N': + if softmax_probs[0, + max_class] < self.CLASSIFIER_THRESHOLD or max_class_names == 'N': continue # TODO: use digit predictions to improve accuracy? diff --git a/Detection/YOLO.py b/Detection/YOLO.py index e789627..386fcce 100644 --- a/Detection/YOLO.py +++ b/Detection/YOLO.py @@ -10,17 +10,17 @@ def sample(probs): s = sum(probs) - probs = [a/s for a in probs] + probs = [a / s for a in probs] r = random.uniform(0, 1) for i in range(len(probs)): r = r - probs[i] if r <= 0: return i - return len(probs)-1 + return len(probs) - 1 def c_array(ctype, values): - arr = (ctype*len(values))() + arr = (ctype * len(values))() arr[:] = values return arr @@ -80,8 +80,15 @@ class METADATA(Structure): make_image.restype = IMAGE get_network_boxes = lib.get_network_boxes -get_network_boxes.argtypes = [c_void_p, c_int, c_int, - c_float, c_float, POINTER(c_int), c_int, POINTER(c_int)] +get_network_boxes.argtypes = [ + c_void_p, + c_int, + c_int, + c_float, + c_float, + POINTER(c_int), + c_int, + POINTER(c_int)] get_network_boxes.restype = POINTER(DETECTION) make_network_boxes = lib.make_network_boxes @@ -147,12 +154,19 @@ def nparray_to_image(img): class Yolo: - def __init__(self, cfg_path, weight_path, meta_path, thresh=.5, hier_thresh=.5, nms=.45): + def __init__( + self, + cfg_path, + weight_path, + meta_path, + thresh=.5, + hier_thresh=.5, + nms=.45): ''' Args: cfg_path: path to darknet model definition files weight_path: path to darkent model weight - meta_path: path to detection meta path + meta_path: path to detection meta path (note: see https://github.com/pjreddie/darknet/blob/master/python/darknet.py for example meta definition) thresh: confidence threshold for unet @@ -173,7 +187,7 @@ def detect(self, image): Return: An array of tuple (class name, prob, (x, y, w, h)) ''' - #im = load_image(image, 0, 0) + # im = load_image(image, 0, 0) im = nparray_to_image(image) num = c_int(0) pnum = pointer(num) diff --git a/vision.py b/vision.py index 04e7051..13af6f8 100644 --- a/vision.py +++ b/vision.py @@ -42,7 +42,7 @@ def main(): name_str = name pred[i] = (name_str, conf, bbox) - elapsed = time.time()-start + elapsed = time.time() - start if config.DEBUG_DISPLAY: viz_frame = frame.copy() @@ -95,7 +95,7 @@ def main(): if config.DEBUG_DISPLAY: print('----------------\n', pred) - print('fps:', 1./elapsed) + print('fps:', 1. / elapsed) cv2.imshow('target', show_frame) if cv2.waitKey(1) & 0xFF == ord('q'): exit(0) From b6affbfd19cd6fb3d7929f7ce134967f2c414f1d Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 12 Mar 2023 03:04:40 -0500 Subject: [PATCH 20/74] mark not-used SHL200 camera driver and pure YOLO as deprecated --- Camera/{shl_200ws.py => shl_200ws.py.deprecated} | 0 Detection/{YOLO.py => YOLO.py.deprecated} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename Camera/{shl_200ws.py => shl_200ws.py.deprecated} (100%) rename Detection/{YOLO.py => YOLO.py.deprecated} (100%) diff --git a/Camera/shl_200ws.py b/Camera/shl_200ws.py.deprecated similarity index 100% rename from Camera/shl_200ws.py rename to Camera/shl_200ws.py.deprecated diff --git a/Detection/YOLO.py b/Detection/YOLO.py.deprecated similarity index 100% rename from Detection/YOLO.py rename to Detection/YOLO.py.deprecated From c97a740647ca5fa25dd4c33cdc294d33f87ceba9 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 12 Mar 2023 22:12:18 -0500 Subject: [PATCH 21/74] added __docstring__ for Aiming/ --- Aiming/Aim.py | 41 ++++++++++++----- Aiming/Tracking/__init__.py | 3 ++ Aiming/Tracking/basic_tracker.py | 66 +++++++++++++++++++++++++--- Aiming/Tracking/consistent_id_gen.py | 16 ++++++- 4 files changed, 109 insertions(+), 17 deletions(-) diff --git a/Aiming/Aim.py b/Aiming/Aim.py index d1acca5..a7e24df 100644 --- a/Aiming/Aim.py +++ b/Aiming/Aim.py @@ -1,12 +1,12 @@ -""" -Hosts the Aim class, which is the main class for auto-aiming. -""" +"""Hosts the Aim class, which is the main class for auto-aiming.""" from .Tracking import basic_tracker class Aim: """ - The main class for auto-aiming. Its workflow: + The main class for auto-aiming. + + Its workflow: 1. takes in a list of predictions from the detection module 2. apply tracker to fill in missing predictions / append or associate predictions 3. It selects a single armor to hit @@ -15,11 +15,16 @@ class Aim: """ def __init__(self, config): + """Get the config and initialize the tracker. + + Args: + config (python object): shared config + """ self.CFG = config self.tracker = basic_tracker(self.CFG) def process_one(self, pred_list, enemy_team, rgb_img): - """Process one frame of predictions + """Process one frame of predictions. Args: pred_list (list): a list of predictions @@ -60,8 +65,10 @@ def process_one(self, pred_list, enemy_team, rgb_img): } def posterior_calibration(self, yaw_diff, pitch_diff, distance): - """Given a set of naively estimated parameters, return calibrated parameters - from a range table? + """Given a set of naively estimated parameters, return calibrated parameters. + + Idea: + Use a range table? Args: yaw_diff (float): yaw difference in radians @@ -80,7 +87,15 @@ def posterior_calibration(self, yaw_diff, pitch_diff, distance): return (yaw_diff, pitch_diff) def get_closet_pred(self, bbox_list, rgb_img): - '''Get the closet prediction to camera focal point''' + """Get the closet prediction to camera focal point. + + Args: + bbox_list (list): a list of bounding boxes + rgb_img (np.ndarray): RGB image + + Returns: + (bbox_list, float): closet_pred, closet_dist + """ # TODO: instead of camera focal point; calibrate closet pred to # operator view H, W, C = rgb_img.shape @@ -102,8 +117,14 @@ def get_closet_pred(self, bbox_list, rgb_img): return closet_pred, closet_dist def get_rotation_angle(self, bbox_center_x, bbox_center_y): - """ - Given a bounding box center, return the yaw/pitch difference in radians + """Given a bounding box center, return the yaw/pitch difference in radians. + + Args: + bbox_center_x (float): x coordinate of the center of the bounding box + bbox_center_y (float): y coordinate of the center of the bounding box + + Returns: + (float, float): yaw_diff, pitch_diff in radians """ yaw_diff = (bbox_center_x - self.CFG.IMG_CENTER_X) * \ (self.CFG.AUTOAIM_CAMERA.YAW_FOV_HALF / self.CFG.IMG_CENTER_X) diff --git a/Aiming/Tracking/__init__.py b/Aiming/Tracking/__init__.py index ef56483..bef85a2 100644 --- a/Aiming/Tracking/__init__.py +++ b/Aiming/Tracking/__init__.py @@ -1 +1,4 @@ +"""Aggregate all the tracking algorithms into one module.""" + +# Basic tracker: simple linear extrapolation + Hungarian algorithm on image plane from .basic_tracker import basic_tracker diff --git a/Aiming/Tracking/basic_tracker.py b/Aiming/Tracking/basic_tracker.py index 6b13155..06e29be 100644 --- a/Aiming/Tracking/basic_tracker.py +++ b/Aiming/Tracking/basic_tracker.py @@ -1,23 +1,43 @@ -import os +"""Defines a basic linear tracker and base classes for future dev.""" import scipy.optimize import numpy as np from .consistent_id_gen import ConsistentIdGenerator -from IPython import embed - # TODO: move this to config FRAME_BUFFER_SIZE = 10 - +# TODO: this class should be part of abstract base tracker class class tracked_armor(object): + """A class that represents a tracked armor. + + It stores the history of bounding boxes and ROIs, and can predict the + bounding box of the next frame. + """ + def __init__(self, bbox, roi, frame_tick, armor_id): + """Initialize from prediction. + + Args: + bbox (tuple): (center_x, center_y, w, h) + roi (np.ndarray): ROI of the armor + frame_tick (int): frame tick + armor_id (int): unique ID + """ self.bbox_buffer = [bbox] self.roi_buffer = [roi] self.observed_frame_tick = [frame_tick] self.armor_id = armor_id # unique ID def compute_cost(self, other_armor): + """Compute the cost of matching this armor with another armor. + + Args: + other_armor (tracked_armor): another armor + + Returns: + float: cost + """ assert isinstance(other_armor, tracked_armor) # TODO: use more sophisticated metrics (e.g., RGB) as cost function c_x, c_y, w, h = self.bbox_buffer[-1] @@ -25,6 +45,12 @@ def compute_cost(self, other_armor): return np.square(c_x - o_c_x) + np.square(c_y - o_c_y) def update(self, other_armor, frame_tick): + """Update the state of this armor with matched armor. + + Args: + other_armor (tracked_armor): another armor + frame_tick (int): frame tick + """ # Only call if these two armors are matched self.bbox_buffer.append(other_armor.bbox_buffer[-1]) self.roi_buffer.append(other_armor.roi_buffer[-1]) @@ -36,6 +62,18 @@ def update(self, other_armor, frame_tick): self.roi_buffer = self.roi_buffer[-FRAME_BUFFER_SIZE:] def predict_bbox(self, cur_frame_tick): + """Predict the bounding box of the tracked armor at cur frame tick. + + Args: + cur_frame_tick (int): current frame tick + + TODO + - Use Kalman filter to do prediction + - Support future frame idx for predictions + + Returns: + tuple: (center_x, center_y, w, h) + """ if cur_frame_tick == self.observed_frame_tick[-1] or len( self.bbox_buffer) == 1: return self.bbox_buffer[-1] @@ -52,20 +90,36 @@ def predict_bbox(self, cur_frame_tick): class basic_tracker(object): - ''' + """ Basic tracker that can handle only one target. It memorizes the state of last two predictions and do linear extrapolation - ''' + """ + SE_THRESHOLD = 3200 # (40, 40) pixels away def __init__(self, config): + """Initialize the simple lineartracker. + + Args: + config (python object): shared config + """ self.CFG = config self.active_armors = [] self.id_gen = ConsistentIdGenerator() self.frame_tick = 0 # TODO: use timestamp may be a better idea def process_one(self, pred_list, enemy_team, rgb_img): + """Process one set of detections and rgb images. + + Args: + pred_list (list): list of (name, conf, bbox) tuples + enemy_team (str): enemy team name (blue or red) + rgb_img (np.ndarray): RGB image + + Returns: + (list, list): list of tracked_armors (detections+predictions) and their IDs + """ new_armors = [] for name, conf, bbox in pred_list: c_x, c_y, w, h = bbox diff --git a/Aiming/Tracking/consistent_id_gen.py b/Aiming/Tracking/consistent_id_gen.py index c367b90..bff8daa 100644 --- a/Aiming/Tracking/consistent_id_gen.py +++ b/Aiming/Tracking/consistent_id_gen.py @@ -1,14 +1,28 @@ +"""Module for generating consistent id for each object.""" + import threading +class ConsistentIdGenerator: + """Return a consistent id for each object.""" -class ConsistentIdGenerator(object): def __init__(self, lock=False): + """Initialize the ID generator. + + Args: + lock (bool, optional): Whether this function runs in multi-threading. + Defaults to False for efficiency. + """ self.lock_flag = lock if self.lock_flag: self._lock = threading.Lock() self._id = 0 def get_id(self): + """Get a consistent int id. + + Returns: + int: a unique id + """ if self.lock_flag: with self._lock: self._id += 1 From 86469dc9b9bb3ed4842c2bc34cdec0c82845a998 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 12 Mar 2023 23:36:56 -0500 Subject: [PATCH 22/74] camera/ now conform to pydocstyle --- Camera/read_from_file.py | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/Camera/read_from_file.py b/Camera/read_from_file.py index 5a30fb3..a7e6260 100644 --- a/Camera/read_from_file.py +++ b/Camera/read_from_file.py @@ -1,4 +1,4 @@ -# Read from file +"""Hosts the fake camera class that reads from a file for testing.""" import sys import time import numpy as np @@ -7,13 +7,24 @@ # TODO: make an abstract camera base class +class fake_camera: + """ + Fake camera class that mimics the behavior of the real camera. + + It reads from a video file instead. + """ -class fake_camera(object): # Needs to be calibrated per camera YAW_FOV_HALF = Utils.deg_to_rad(42) / 2 PITCH_FOV_HALF = Utils.deg_to_rad(42) / 2 def __init__(self, width, height): + """Initialize fake camera. + + Args: + width (int): width of image to be resized to + height (int): height of image to be resized to + """ self.width = width self.height = height @@ -31,6 +42,14 @@ def __init__(self, width, height): self.frame_cnt = 0 def get_frame(self): + """Call to get a frame from the camera. + + Raises: + Exception: raised when video file is exhausted + + Returns: + np.ndarray: RGB image frame + """ if self.timing is None: self.timing = time.time() From 640e8bff7edf7c2f8c871f6a985346eca78110c3 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 12 Mar 2023 23:50:33 -0500 Subject: [PATCH 23/74] Communication/ now conform to pydocstyle --- Communication/communicator.py | 63 +++++++++++++++++++++++++++++------ 1 file changed, 53 insertions(+), 10 deletions(-) diff --git a/Communication/communicator.py b/Communication/communicator.py index 1285cce..6c31ab4 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -1,17 +1,27 @@ +"""Host the UART communicator. See UARTCommunicator.""" import os import serial import crc import time import threading +class UARTCommunicator: + """USB-TTL-UART communicator for Jetson-STM32 communication.""" -class UARTCommunicator(object): def __init__( self, cfg, crc_standard=crc.Crc8.MAXIM_DOW, endianness='little', buffer_size=10): + """Initialize the UART communicator. + + Args: + cfg (python object): shared config + crc_standard (crc.Crc8): CRC standard + endianness (str): endianness of the packet. Either 'big' or 'little' + buffer_size (int): size of the circular buffer + """ self.cfg = cfg self.crc_standard = crc_standard self.endianness = endianness @@ -29,9 +39,11 @@ def __init__( self.seq_num = 0 def is_valid(self): + """Return if communicator is valid.""" return self.serial_port is not None def try_read_one(self): + """Try to read one packet from the serial port and store to internal buffer.""" # Read from serial port, if any packet is waiting if self.serial_port is not None: if (self.serial_port.inWaiting() > 0): @@ -46,19 +58,34 @@ def try_read_one(self): self.circular_buffer.append(c) def process_one_packet(self, header, yaw_offset, pitch_offset): + """Process a batch of numbers into a CRC-checked packet and send it out. + + Args: + header (str): either 'ST' or 'MV' + yaw_offset (float): yaw offset in radians + pitch_offset (float): pitch offset in radians + """ packet = self.create_packet(header, yaw_offset, pitch_offset) self.send_packet(packet) def send_packet(self, packet): + """Send a packet out.""" if self.serial_port is not None: self.serial_port.write(packet) @staticmethod def guess_uart_device_(): - # This function is for UNIX-like systems only! + """Guess the UART device path and open it. + + Note: this function is for UNIX-like systems only! - # OSX prefix: "tty.usbmodem" - # Jetson / Linux prefix: "ttyUSB", "ttyACM" + OSX prefix: "tty.usbmodem" + Jetson / Linux prefix: "ttyUSB", "ttyACM" + + Returns: + serial.Serial: the serial port object + """ + # list of possible prefixes UART_PREFIX_LIST = ("tty.usbmodem", "ttyUSB", "ttyACM") dev_list = os.listdir("/dev") @@ -88,7 +115,17 @@ def guess_uart_device_(): def create_packet(self, header, yaw_offset, pitch_offset): """ - Packet struct + Create CRC-checked packet from user input. + + Args: + header (str): either 'ST' or 'MV' + yaw_offset (float): yaw offset in radians + pitch_offset (float): pitch offset in radians + + Returns: + bytes: the packet to be sent to serial + + For more details, see doc/comms_protocol.md, but here is a summary of the packet format: Big endian @@ -132,11 +169,16 @@ def create_packet(self, header, yaw_offset, pitch_offset): return packet def get_current_stm32_state(self): - # Decode packet sent from the STM32 controller - # TODO: if a robot is revived, the serial port might get - # garbage value in between... - - # TODO: implement a proper CRC-verified packet decoder + """Read from buffer from STM32 to Jetson and return the current state. + + TODO: + - Decode packet sent from the STM32 controller + - If a robot is revived, the serial port might get garbage value in between + - implement a proper CRC-verified packet decoder + + Returns: + dict: a dictionary containing the current state of the STM32 + """ blue_cnt = 0 red_cnt = 0 @@ -163,6 +205,7 @@ def get_current_stm32_state(self): if __name__ == '__main__': + # Testing example if run as main import sys import os # setting path From 29fbb4c8ab750069ddbcea2527c790baf7e4d293 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 12 Mar 2023 23:51:45 -0500 Subject: [PATCH 24/74] Utils/ now conform to pydocstyle --- Utils/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Utils/__init__.py b/Utils/__init__.py index b631338..01ca9b3 100644 --- a/Utils/__init__.py +++ b/Utils/__init__.py @@ -1,10 +1,10 @@ -# Commonly used utils functions +"""Host commonly used utils functions.""" import numpy as np - def deg_to_rad(deg): + """Convert degree to radian.""" return deg * ((2 * np.pi) / 360) - def rad_to_deg(rad): + """Convert radian to degree.""" return rad * (360. / (2 * np.pi)) From 26647859192088e4107113339c4b286929b7ddbe Mon Sep 17 00:00:00 2001 From: RogerQi Date: Mon, 13 Mar 2023 00:24:50 -0500 Subject: [PATCH 25/74] Detection/ now conform to pydocstyle on local machine --- Detection/CV_mix_DL.py | 251 +++++++++++++++++++++++++++++++++++++++-- 1 file changed, 240 insertions(+), 11 deletions(-) diff --git a/Detection/CV_mix_DL.py b/Detection/CV_mix_DL.py index bea070d..a84909f 100644 --- a/Detection/CV_mix_DL.py +++ b/Detection/CV_mix_DL.py @@ -1,15 +1,23 @@ +"""Host routines for computational / learning-based detections.""" import numpy as np import cv2 import os import time -from IPython import embed +import Utils # Internal ENUM RED = 0 BLUE = 1 - def auto_align_brightness(img, target_v=50): - # Only decrease! + """Standardize brightness of image. + + Args: + img (np.ndarray): BGR image + target_v (int, optional): target brightness. Defaults to 50. + + Returns: + np.ndarray: BGR image with standardized brightness + """ hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) h, s, v = cv2.split(hsv) @@ -34,8 +42,17 @@ def auto_align_brightness(img, target_v=50): img = cv2.cvtColor(final_hsv, cv2.COLOR_HSV2BGR) return img - def color_test(rgb_img, rect, color): + """Test if the color of the roi is the same as the given color. + + Args: + rgb_img (np.ndarray): RGB image + rect (tuple): (x, y, w, h) + color (int): RED or BLUE (enum) + + Returns: + bool: True if the color of the roi is the same as the given color + """ rgb_roi = rgb_img[rect[1]:rect[1] + rect[3], rect[0]:rect[0] + rect[2]] sum_r = np.sum(rgb_roi[:, :, 0]) sum_b = np.sum(rgb_roi[:, :, 2]) @@ -44,19 +61,45 @@ def color_test(rgb_img, rect, color): else: return sum_b >= sum_r - class cv_mix_dl_detector: + """A routine that combines CV and DL to detect armors. + + It uses CV to propose potential armors, and then uses DL to filter out false positives. + """ + def __init__(self, config, detect_color, model_path='fc.onnx'): - # TODO: fix config parameter in ipython notebook + """Initialize the detector. + + Args: + config (python object): shared config.py + detect_color (int): RED or BLUE (enum) + model_path (str, optional): path to the DL model. Defaults to 'fc.onnx'. + """ self.CFG = config self.armor_proposer = cv_armor_proposer(self.CFG, detect_color) self.digit_classifier = dl_digit_classifier(self.CFG, model_path) self.change_color(detect_color) def detect(self, rgb_img): + """Detect armors in the given image. + + Args: + rgb_img (np.ndarray): RGB image + + Returns: + list: list of detected armors + """ return self(rgb_img) def __call__(self, rgb_img): + """Detect armors in the given image. + + Args: + rgb_img (np.ndarray): RGB image + + Returns: + list: list of detected armors + """ rgb_img = auto_align_brightness(rgb_img) # defaults to BGR format rgb_img = cv2.cvtColor(rgb_img, cv2.COLOR_BGR2RGB) @@ -84,6 +127,11 @@ def __call__(self, rgb_img): return ret_list def change_color(self, new_color): + """Change the color of the detector. + + Args: + new_color (int): RED or BLUE (enum) + """ if new_color in [0, 1]: assert new_color in [0, 1] self.detect_color = new_color @@ -105,12 +153,44 @@ def change_color(self, new_color): def rect_contains(rect, pt): + """Determine if a pt is inside a rect. + + Args: + rect (tuple): (x, y, w, h) + pt (tuple): (x, y) + + Returns: + bool: True if the pt is inside the rect + """ return rect[0] < pt[0] < rect[0] + \ rect[2] and rect[1] < pt[1] < rect[1] + rect[3] class light_class: + """ + A class that represents a light bar on the armor board. + + Most importantly, given a rotated rect from OpenCV, it can calculate the + center, top, bottom, length, width, and tilt angle of the light bar. + + For instance, OpenCV could say a rotated rect is 180 degrees tilted, but + in fact, it is 0 degrees tilted. This class can correct the tilt angle. + + Attributes: + center (np.ndarray): center of the light bar + top (np.ndarray): top point of the light bar + btm (np.ndarray): bottom point of the light bar + length (float): length of the light bar + width (float): width of the light bar + tilt_angle (float): tilt angle of the light bar + """ + def __init__(self, rotated_rect): + """Initialize the light bar. + + Args: + rotated_rect (tuple): (center, (width, height), angle) + """ ((self.center_x, self.center_y), (_, _), _) = rotated_rect # sort according to y pts = sorted(cv2.boxPoints(rotated_rect), key=lambda x: x[1]) @@ -135,7 +215,26 @@ def __init__(self, rotated_rect): class armor_class: + """ + A class that represents an armor board. + + Attributes: + left_light (light_class): left light bar + right_light (light_class): right light bar + center (np.ndarray): center of the armor board + color (int): color of the armor board + confidence (float): confidence of the armor board + armor_type (str): 'large' or 'small' + """ + def __init__(self, light1, light2, color): + """Initialize the armor board. + + Args: + light1 (light_class): left light bar + light2 (light_class): right light bar + color (int): color of the armor board + """ assert light1.color == light2.color assert light1.color == color self.color = color @@ -152,6 +251,15 @@ def __init__(self, light1, light2, color): self.armor_type = None # 'large', 'small' def extract_number(self, rgb_img): + """Extract number from the armor board using perspective transform. + + The perspective transform matrix is computed by using top and bottom + points of the left and right light bars as source vertices, and + manually set target vertices. + + Args: + rgb_img (np.ndarray): RGB image + """ light_length = 12 warp_height = 28 small_armor_width = 32 @@ -193,7 +301,9 @@ def extract_number(self, rgb_img): self.number_image, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU) -class cv_armor_proposer(object): +class cv_armor_proposer: + """Armor proposer using OpenCV.""" + # Hyperparameters MIN_LIGHTNESS = 160 LUMINANCE_THRES = 90 @@ -215,10 +325,24 @@ class cv_armor_proposer(object): armor_max_angle = 35.0 def __init__(self, config, detect_color): + """Initialize the armor proposer. + + Args: + config (python object): shared config.py + detect_color (int): color to detect (0: red, 1: blue ENUM) + """ self.CFG = config self.detect_color = detect_color def __call__(self, rgb_img): + """Run the armor proposer for a single image. + + Args: + rgb_img (np.ndarray): RGB image + + Returns: + list of armor_class: list of detected armors + """ binary_img = self.preprocess(rgb_img) if self.CFG.DEBUG_DISPLAY: # visualize binary image @@ -256,8 +380,13 @@ def __call__(self, rgb_img): return armor_list def preprocess(self, rgb_img): - """ - Return binarized image + """Preprocess for binarized image. + + Args: + rgb_img (np.ndarray): RGB image + + Returns: + np.ndarray: binarized image """ gray_img = cv2.cvtColor(rgb_img, cv2.COLOR_RGB2GRAY) @@ -267,8 +396,16 @@ def preprocess(self, rgb_img): return binary_img def filter_contours_rects(self, contours, rects, rgb_img): - """ - Filter out contours that are too small or too large + """Filter out contours that are too small and not of interested color. + + Args: + contours (list of np.ndarray): contours + rects (list of np.ndarray): bounding rectangles + rgb_img (np.ndarray): RGB image + + Returns: + list of np.ndarray: filtered contours + list of np.ndarray: filtered bounding rectangles """ filtered_contours = [] filtered_rects = [] @@ -289,6 +426,36 @@ def filter_contours_rects(self, contours, rects, rgb_img): return filtered_contours, filtered_rects def find_lights(self, rgb_img, binary_img): + """Find potential light bars in the image. + + It follows open-sourced implementation to detect light bar from + binarzied images. However, in practice we find that is not robust + enough. So we augment it with color difference contours too. + + Two contours are often overlapped and we apply NMS to remove + overlapped ones. We trust binary contours more than color difference. + + The NMS is also different from the one in object detection. Namely, + in detection we usually do + + IoU = intersection / union + + So that a small object in front of a big object will not be removed. + + However, in our case, using this leads to inefficiency because two light bars + can not be contained in each other. So we use + + IoU = intersection / area of smaller one + + instead. + + Args: + rgb_img (np.ndarray): RGB image + binary_img (np.ndarray): binarized image + + Returns: + list of light_class: list of detected lights + """ bin_contours, _ = cv2.findContours( binary_img, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) # Use difference of color to handle white light @@ -369,6 +536,18 @@ def find_lights(self, rgb_img, binary_img): return light_list def is_light(self, light): + """Apply filtering to determine if a light bar is valid. + + Criteria: + 1. Aspect ratio can not be too large or too small (light bars are slim verticals) + 2. Tilt angle (light bars are vertical) + + Args: + light (light_class): light bar to be filtered + + Returns: + bool: True if the light bar is valid + """ ratio = light.width / light.length ratio_ok = self.LIGHT_MIN_RATIO < ratio and ratio < self.LIGHT_MAX_RATIO @@ -377,6 +556,14 @@ def is_light(self, light): return ratio_ok and angle_ok def match_lights(self, light_list): + """Match pairs of lights into armors. + + Args: + light_list (list of light_class): list of detected lights + + Returns: + list of armor_class: list of detected armors + """ armor_list = [] for i in range(len(light_list)): @@ -398,6 +585,18 @@ def match_lights(self, light_list): return armor_list def contain_light(self, light1, light2, light_list): + """Test if a pair of light is contained in another light bar. + + Empirically, this is a good way to filter out false positives. + + Args: + light1 (light_class): light bar 1 + light2 (light_class): light bar 2 + light_list (list of light_class): list of all detected lights + + Returns: + bool: True if light1 and light2 are contained in another light bar + """ pts = np.array([ light1.top, light1.btm, light2.top, light2.btm ]) @@ -416,6 +615,19 @@ def contain_light(self, light1, light2, light_list): return False def is_armor(self, armor): + """Apply filtering to determine if an armor is valid. + + Criteria: + 1. The length of two light bars can not be too different + 2. The ratio of distance between two light bars and their length + 3. The angle of armor boards two light bars would form + + Args: + armor (armor_class): armor to be filtered + + Returns: + bool: True if the armor meets all the criteria + """ light1 = armor.left_light light2 = armor.right_light if light1.length < light2.length: @@ -449,18 +661,35 @@ def is_armor(self, armor): class dl_digit_classifier: + """Classify digits in armor number using deep learning model.""" + LABEL_NAMES_LIST = np.array(['B', '1', '2', '3', '4', '5', 'G', 'O', 'N']) CLASSIFIER_THRESHOLD = 0.7 def __init__(self, config, model_path): + """Initialize the classifier. + + Args: + config (python object): shared config.py + model_path (str): path to the model file + """ self.CFG = config self.net = cv2.dnn.readNetFromONNX(model_path) @staticmethod def normalize(image): + """Normalize image to [0, 1].""" return image.astype(np.float32) / 255.0 def __call__(self, armor_list): + """Classify a batch of armor numbers. + + Args: + armor_list (list of armor_class): list of armors to be classified + + Returns: + list of str: list of classified armor numbers + """ ret_list = [] for armor in armor_list: input_image = self.normalize(armor.number_image) From 069d9a4dce26af50c801485db75cb1901e7da1df Mon Sep 17 00:00:00 2001 From: RogerQi Date: Mon, 13 Mar 2023 00:26:29 -0500 Subject: [PATCH 26/74] run autopep8 again; now it should pass formatting check --- Aiming/Aim.py | 6 +-- Aiming/Tracking/__init__.py | 3 +- Aiming/Tracking/basic_tracker.py | 14 +++--- Aiming/Tracking/consistent_id_gen.py | 1 + Camera/read_from_file.py | 1 + Communication/communicator.py | 7 +-- Detection/CV_mix_DL.py | 75 +++++++++++++++------------- Utils/__init__.py | 2 + config.py | 9 ++-- vision.py | 8 +++ 10 files changed, 71 insertions(+), 55 deletions(-) diff --git a/Aiming/Aim.py b/Aiming/Aim.py index a7e24df..dc27c0f 100644 --- a/Aiming/Aim.py +++ b/Aiming/Aim.py @@ -5,7 +5,7 @@ class Aim: """ The main class for auto-aiming. - + Its workflow: 1. takes in a list of predictions from the detection module 2. apply tracker to fill in missing predictions / append or associate predictions @@ -88,7 +88,7 @@ def posterior_calibration(self, yaw_diff, pitch_diff, distance): def get_closet_pred(self, bbox_list, rgb_img): """Get the closet prediction to camera focal point. - + Args: bbox_list (list): a list of bounding boxes rgb_img (np.ndarray): RGB image @@ -122,7 +122,7 @@ def get_rotation_angle(self, bbox_center_x, bbox_center_y): Args: bbox_center_x (float): x coordinate of the center of the bounding box bbox_center_y (float): y coordinate of the center of the bounding box - + Returns: (float, float): yaw_diff, pitch_diff in radians """ diff --git a/Aiming/Tracking/__init__.py b/Aiming/Tracking/__init__.py index bef85a2..94390fa 100644 --- a/Aiming/Tracking/__init__.py +++ b/Aiming/Tracking/__init__.py @@ -1,4 +1,5 @@ """Aggregate all the tracking algorithms into one module.""" -# Basic tracker: simple linear extrapolation + Hungarian algorithm on image plane +# Basic tracker: simple linear extrapolation + Hungarian algorithm on +# image plane from .basic_tracker import basic_tracker diff --git a/Aiming/Tracking/basic_tracker.py b/Aiming/Tracking/basic_tracker.py index 06e29be..47baab6 100644 --- a/Aiming/Tracking/basic_tracker.py +++ b/Aiming/Tracking/basic_tracker.py @@ -8,6 +8,8 @@ FRAME_BUFFER_SIZE = 10 # TODO: this class should be part of abstract base tracker class + + class tracked_armor(object): """A class that represents a tracked armor. @@ -17,7 +19,7 @@ class tracked_armor(object): def __init__(self, bbox, roi, frame_tick, armor_id): """Initialize from prediction. - + Args: bbox (tuple): (center_x, center_y, w, h) roi (np.ndarray): ROI of the armor @@ -31,10 +33,10 @@ def __init__(self, bbox, roi, frame_tick, armor_id): def compute_cost(self, other_armor): """Compute the cost of matching this armor with another armor. - + Args: other_armor (tracked_armor): another armor - + Returns: float: cost """ @@ -63,10 +65,10 @@ def update(self, other_armor, frame_tick): def predict_bbox(self, cur_frame_tick): """Predict the bounding box of the tracked armor at cur frame tick. - + Args: cur_frame_tick (int): current frame tick - + TODO - Use Kalman filter to do prediction - Support future frame idx for predictions @@ -100,7 +102,7 @@ class basic_tracker(object): def __init__(self, config): """Initialize the simple lineartracker. - + Args: config (python object): shared config """ diff --git a/Aiming/Tracking/consistent_id_gen.py b/Aiming/Tracking/consistent_id_gen.py index bff8daa..806b3ee 100644 --- a/Aiming/Tracking/consistent_id_gen.py +++ b/Aiming/Tracking/consistent_id_gen.py @@ -2,6 +2,7 @@ import threading + class ConsistentIdGenerator: """Return a consistent id for each object.""" diff --git a/Camera/read_from_file.py b/Camera/read_from_file.py index a7e6260..3a2d163 100644 --- a/Camera/read_from_file.py +++ b/Camera/read_from_file.py @@ -7,6 +7,7 @@ # TODO: make an abstract camera base class + class fake_camera: """ Fake camera class that mimics the behavior of the real camera. diff --git a/Communication/communicator.py b/Communication/communicator.py index 6c31ab4..6d441b8 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -5,6 +5,7 @@ import time import threading + class UARTCommunicator: """USB-TTL-UART communicator for Jetson-STM32 communication.""" @@ -59,7 +60,7 @@ def try_read_one(self): def process_one_packet(self, header, yaw_offset, pitch_offset): """Process a batch of numbers into a CRC-checked packet and send it out. - + Args: header (str): either 'ST' or 'MV' yaw_offset (float): yaw offset in radians @@ -76,7 +77,7 @@ def send_packet(self, packet): @staticmethod def guess_uart_device_(): """Guess the UART device path and open it. - + Note: this function is for UNIX-like systems only! OSX prefix: "tty.usbmodem" @@ -170,7 +171,7 @@ def create_packet(self, header, yaw_offset, pitch_offset): def get_current_stm32_state(self): """Read from buffer from STM32 to Jetson and return the current state. - + TODO: - Decode packet sent from the STM32 controller - If a robot is revived, the serial port might get garbage value in between diff --git a/Detection/CV_mix_DL.py b/Detection/CV_mix_DL.py index a84909f..bcbd464 100644 --- a/Detection/CV_mix_DL.py +++ b/Detection/CV_mix_DL.py @@ -8,13 +8,14 @@ RED = 0 BLUE = 1 + def auto_align_brightness(img, target_v=50): """Standardize brightness of image. - + Args: img (np.ndarray): BGR image target_v (int, optional): target brightness. Defaults to 50. - + Returns: np.ndarray: BGR image with standardized brightness """ @@ -42,14 +43,15 @@ def auto_align_brightness(img, target_v=50): img = cv2.cvtColor(final_hsv, cv2.COLOR_HSV2BGR) return img + def color_test(rgb_img, rect, color): """Test if the color of the roi is the same as the given color. - + Args: rgb_img (np.ndarray): RGB image rect (tuple): (x, y, w, h) color (int): RED or BLUE (enum) - + Returns: bool: True if the color of the roi is the same as the given color """ @@ -61,15 +63,16 @@ def color_test(rgb_img, rect, color): else: return sum_b >= sum_r + class cv_mix_dl_detector: """A routine that combines CV and DL to detect armors. - + It uses CV to propose potential armors, and then uses DL to filter out false positives. """ - + def __init__(self, config, detect_color, model_path='fc.onnx'): """Initialize the detector. - + Args: config (python object): shared config.py detect_color (int): RED or BLUE (enum) @@ -82,10 +85,10 @@ def __init__(self, config, detect_color, model_path='fc.onnx'): def detect(self, rgb_img): """Detect armors in the given image. - + Args: rgb_img (np.ndarray): RGB image - + Returns: list: list of detected armors """ @@ -93,10 +96,10 @@ def detect(self, rgb_img): def __call__(self, rgb_img): """Detect armors in the given image. - + Args: rgb_img (np.ndarray): RGB image - + Returns: list: list of detected armors """ @@ -128,7 +131,7 @@ def __call__(self, rgb_img): def change_color(self, new_color): """Change the color of the detector. - + Args: new_color (int): RED or BLUE (enum) """ @@ -154,11 +157,11 @@ def change_color(self, new_color): def rect_contains(rect, pt): """Determine if a pt is inside a rect. - + Args: rect (tuple): (x, y, w, h) pt (tuple): (x, y) - + Returns: bool: True if the pt is inside the rect """ @@ -187,7 +190,7 @@ class light_class: def __init__(self, rotated_rect): """Initialize the light bar. - + Args: rotated_rect (tuple): (center, (width, height), angle) """ @@ -229,7 +232,7 @@ class armor_class: def __init__(self, light1, light2, color): """Initialize the armor board. - + Args: light1 (light_class): left light bar light2 (light_class): right light bar @@ -256,7 +259,7 @@ def extract_number(self, rgb_img): The perspective transform matrix is computed by using top and bottom points of the left and right light bars as source vertices, and manually set target vertices. - + Args: rgb_img (np.ndarray): RGB image """ @@ -326,7 +329,7 @@ class cv_armor_proposer: def __init__(self, config, detect_color): """Initialize the armor proposer. - + Args: config (python object): shared config.py detect_color (int): color to detect (0: red, 1: blue ENUM) @@ -336,10 +339,10 @@ def __init__(self, config, detect_color): def __call__(self, rgb_img): """Run the armor proposer for a single image. - + Args: rgb_img (np.ndarray): RGB image - + Returns: list of armor_class: list of detected armors """ @@ -384,7 +387,7 @@ def preprocess(self, rgb_img): Args: rgb_img (np.ndarray): RGB image - + Returns: np.ndarray: binarized image """ @@ -402,7 +405,7 @@ def filter_contours_rects(self, contours, rects, rgb_img): contours (list of np.ndarray): contours rects (list of np.ndarray): bounding rectangles rgb_img (np.ndarray): RGB image - + Returns: list of np.ndarray: filtered contours list of np.ndarray: filtered bounding rectangles @@ -448,11 +451,11 @@ def find_lights(self, rgb_img, binary_img): IoU = intersection / area of smaller one instead. - + Args: rgb_img (np.ndarray): RGB image binary_img (np.ndarray): binarized image - + Returns: list of light_class: list of detected lights """ @@ -541,10 +544,10 @@ def is_light(self, light): Criteria: 1. Aspect ratio can not be too large or too small (light bars are slim verticals) 2. Tilt angle (light bars are vertical) - + Args: light (light_class): light bar to be filtered - + Returns: bool: True if the light bar is valid """ @@ -557,10 +560,10 @@ def is_light(self, light): def match_lights(self, light_list): """Match pairs of lights into armors. - + Args: light_list (list of light_class): list of detected lights - + Returns: list of armor_class: list of detected armors """ @@ -588,12 +591,12 @@ def contain_light(self, light1, light2, light_list): """Test if a pair of light is contained in another light bar. Empirically, this is a good way to filter out false positives. - + Args: light1 (light_class): light bar 1 light2 (light_class): light bar 2 light_list (list of light_class): list of all detected lights - + Returns: bool: True if light1 and light2 are contained in another light bar """ @@ -616,15 +619,15 @@ def contain_light(self, light1, light2, light_list): def is_armor(self, armor): """Apply filtering to determine if an armor is valid. - + Criteria: 1. The length of two light bars can not be too different 2. The ratio of distance between two light bars and their length 3. The angle of armor boards two light bars would form - + Args: armor (armor_class): armor to be filtered - + Returns: bool: True if the armor meets all the criteria """ @@ -668,7 +671,7 @@ class dl_digit_classifier: def __init__(self, config, model_path): """Initialize the classifier. - + Args: config (python object): shared config.py model_path (str): path to the model file @@ -683,10 +686,10 @@ def normalize(image): def __call__(self, armor_list): """Classify a batch of armor numbers. - + Args: armor_list (list of armor_class): list of armors to be classified - + Returns: list of str: list of classified armor numbers """ diff --git a/Utils/__init__.py b/Utils/__init__.py index 01ca9b3..39c78ea 100644 --- a/Utils/__init__.py +++ b/Utils/__init__.py @@ -1,10 +1,12 @@ """Host commonly used utils functions.""" import numpy as np + def deg_to_rad(deg): """Convert degree to radian.""" return deg * ((2 * np.pi) / 360) + def rad_to_deg(rad): """Convert radian to degree.""" return rad * (360. / (2 * np.pi)) diff --git a/config.py b/config.py index 542b1b7..9a72058 100644 --- a/config.py +++ b/config.py @@ -1,15 +1,12 @@ -# darknet utils -from Camera.read_from_file import fake_camera -DARKNET_LIB_PATH = r'/home/illinirm/darknet/libdarknet.so' -MODEL_CFG_PATH = r'/home/illinirm/darknet/dji_roco_preprocessed/yolov3-tiny-custom.cfg' -WEIGHT_PATH = r'/home/illinirm/darknet/dji_roco_preprocessed/yolov3-tiny-custom_final.weights' -META_PATH = r'/home/illinirm/darknet/dji_roco_preprocessed/roco.data' +"""Config file that is shared across the whole project.""" # communication utils +from Camera.read_from_file import fake_camera SEARCH_TARGET = b'ST' MOVE_YOKE = b'MY' PACK_END = b'ED' +# Define camera # from Camera.d455 import D455_camera AUTOAIM_CAMERA = fake_camera diff --git a/vision.py b/vision.py index 13af6f8..9ff96e9 100644 --- a/vision.py +++ b/vision.py @@ -1,3 +1,10 @@ +""" +Main file for both development and deployment. + +For development purpose, you may want to turn on the DEBUG_DISPLAY flag +in config.py +""" + import time import cv2 from Aiming.Aim import Aim @@ -8,6 +15,7 @@ def main(): + """Define the main while-true control loop that manages everything.""" model = cv_mix_dl_detector(config, config.DEFAULT_ENEMY_TEAM) # model = Yolo(config.MODEL_CFG_PATH, config.WEIGHT_PATH, config.META_PATH) aimer = Aim(config) From c009856d224f401adf8c9732d43bd18a2e753638 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Mon, 13 Mar 2023 00:27:56 -0500 Subject: [PATCH 27/74] correct workflow name --- .github/workflows/formatting.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index 216dbdc..7756a5d 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -19,7 +19,7 @@ jobs: python -m pip install --upgrade pip pip install pycodestyle pip install pydocstyle - - name: Analysing the code with pylint + - name: Analysing the code with pycodestyle and pydocstyle run: | pycodestyle --max-line-length=100 ./ pydocstyle ./ \ No newline at end of file From 1739ee38c16824aa0d7579b4f1bd78a1618f003f Mon Sep 17 00:00:00 2001 From: RogerQi Date: Mon, 13 Mar 2023 00:30:34 -0500 Subject: [PATCH 28/74] move some common functions to Utils/ for clarity --- Detection/CV_mix_DL.py | 83 +++--------------------------------------- Utils/__init__.py | 73 ++++++++++++++++++++++++++++++++++++- 2 files changed, 76 insertions(+), 80 deletions(-) diff --git a/Detection/CV_mix_DL.py b/Detection/CV_mix_DL.py index bcbd464..dc55087 100644 --- a/Detection/CV_mix_DL.py +++ b/Detection/CV_mix_DL.py @@ -8,62 +8,6 @@ RED = 0 BLUE = 1 - -def auto_align_brightness(img, target_v=50): - """Standardize brightness of image. - - Args: - img (np.ndarray): BGR image - target_v (int, optional): target brightness. Defaults to 50. - - Returns: - np.ndarray: BGR image with standardized brightness - """ - hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) - h, s, v = cv2.split(hsv) - - cur_v = np.mean(v) - v_diff = int(cur_v - target_v) - - if v_diff > 0: - value = v_diff - # needs lower brightness - v[v < value] = 0 - v[v >= value] -= value - final_hsv = cv2.merge((h, s, v)) - img = cv2.cvtColor(final_hsv, cv2.COLOR_HSV2BGR) - return img - else: - # brighten - value = -v_diff - # needs lower brightness - v[v > (255 - value)] = 255 - v[v <= (255 - value)] += value - final_hsv = cv2.merge((h, s, v)) - img = cv2.cvtColor(final_hsv, cv2.COLOR_HSV2BGR) - return img - - -def color_test(rgb_img, rect, color): - """Test if the color of the roi is the same as the given color. - - Args: - rgb_img (np.ndarray): RGB image - rect (tuple): (x, y, w, h) - color (int): RED or BLUE (enum) - - Returns: - bool: True if the color of the roi is the same as the given color - """ - rgb_roi = rgb_img[rect[1]:rect[1] + rect[3], rect[0]:rect[0] + rect[2]] - sum_r = np.sum(rgb_roi[:, :, 0]) - sum_b = np.sum(rgb_roi[:, :, 2]) - if color == RED: - return sum_r >= sum_b - else: - return sum_b >= sum_r - - class cv_mix_dl_detector: """A routine that combines CV and DL to detect armors. @@ -103,7 +47,7 @@ def __call__(self, rgb_img): Returns: list: list of detected armors """ - rgb_img = auto_align_brightness(rgb_img) + rgb_img = Utils.auto_align_brightness(rgb_img) # defaults to BGR format rgb_img = cv2.cvtColor(rgb_img, cv2.COLOR_BGR2RGB) # propose potential armors @@ -154,21 +98,6 @@ def change_color(self, new_color): else: raise NotImplementedError - -def rect_contains(rect, pt): - """Determine if a pt is inside a rect. - - Args: - rect (tuple): (x, y, w, h) - pt (tuple): (x, y) - - Returns: - bool: True if the pt is inside the rect - """ - return rect[0] < pt[0] < rect[0] + \ - rect[2] and rect[1] < pt[1] < rect[1] + rect[3] - - class light_class: """ A class that represents a light bar on the armor board. @@ -303,7 +232,6 @@ def extract_number(self, rgb_img): thres, self.number_image = cv2.threshold( self.number_image, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU) - class cv_armor_proposer: """Armor proposer using OpenCV.""" @@ -420,7 +348,7 @@ def filter_contours_rects(self, contours, rects, rgb_img): continue # Color test - if not color_test(rgb_img, rects[i], self.detect_color): + if not Utils.color_test(rgb_img, rects[i], self.detect_color): continue filtered_rects.append(rects[i]) @@ -609,9 +537,9 @@ def contain_light(self, light1, light2, light_list): if test_light == light1 or test_light == light2: continue - if rect_contains( - rect, test_light.top) or rect_contains( - rect, test_light.btm) or rect_contains( + if Utils.rect_contains( + rect, test_light.top) or Utils.rect_contains( + rect, test_light.btm) or Utils.rect_contains( rect, test_light.center): return True @@ -662,7 +590,6 @@ def is_armor(self, armor): return light_ratio_ok and center_dist_ok and angle_ok - class dl_digit_classifier: """Classify digits in armor number using deep learning model.""" diff --git a/Utils/__init__.py b/Utils/__init__.py index 39c78ea..98488c2 100644 --- a/Utils/__init__.py +++ b/Utils/__init__.py @@ -1,12 +1,81 @@ """Host commonly used utils functions.""" import numpy as np - +import cv2 def deg_to_rad(deg): """Convert degree to radian.""" return deg * ((2 * np.pi) / 360) - def rad_to_deg(rad): """Convert radian to degree.""" return rad * (360. / (2 * np.pi)) + +def auto_align_brightness(img, target_v=50): + """Standardize brightness of image. + + Args: + img (np.ndarray): BGR image + target_v (int, optional): target brightness. Defaults to 50. + + Returns: + np.ndarray: BGR image with standardized brightness + """ + hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV) + h, s, v = cv2.split(hsv) + + cur_v = np.mean(v) + v_diff = int(cur_v - target_v) + + if v_diff > 0: + value = v_diff + # needs lower brightness + v[v < value] = 0 + v[v >= value] -= value + final_hsv = cv2.merge((h, s, v)) + img = cv2.cvtColor(final_hsv, cv2.COLOR_HSV2BGR) + return img + else: + # brighten + value = -v_diff + # needs lower brightness + v[v > (255 - value)] = 255 + v[v <= (255 - value)] += value + final_hsv = cv2.merge((h, s, v)) + img = cv2.cvtColor(final_hsv, cv2.COLOR_HSV2BGR) + return img + +def color_test(rgb_img, rect, color): + """Test if the color of the roi is the same as the given color. + + Args: + rgb_img (np.ndarray): RGB image + rect (tuple): (x, y, w, h) + color (int): RED or BLUE (enum) + + Returns: + bool: True if the color of the roi is the same as the given color + """ + # Internal ENUM + RED = 0 + BLUE = 1 + + rgb_roi = rgb_img[rect[1]:rect[1] + rect[3], rect[0]:rect[0] + rect[2]] + sum_r = np.sum(rgb_roi[:, :, 0]) + sum_b = np.sum(rgb_roi[:, :, 2]) + if color == RED: + return sum_r >= sum_b + else: + return sum_b >= sum_r + +def rect_contains(rect, pt): + """Determine if a pt is inside a rect. + + Args: + rect (tuple): (x, y, w, h) + pt (tuple): (x, y) + + Returns: + bool: True if the pt is inside the rect + """ + return rect[0] < pt[0] < rect[0] + \ + rect[2] and rect[1] < pt[1] < rect[1] + rect[3] From d95fabc9bcf1994353a6b8c912b939d0727e31d9 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Mon, 13 Mar 2023 00:35:08 -0500 Subject: [PATCH 29/74] add CI code formatting guide to README --- Detection/CV_mix_DL.py | 4 ++++ README.md | 26 ++++++++++++++++++++++++++ Utils/__init__.py | 7 ++++++- 3 files changed, 36 insertions(+), 1 deletion(-) diff --git a/Detection/CV_mix_DL.py b/Detection/CV_mix_DL.py index dc55087..562bdba 100644 --- a/Detection/CV_mix_DL.py +++ b/Detection/CV_mix_DL.py @@ -8,6 +8,7 @@ RED = 0 BLUE = 1 + class cv_mix_dl_detector: """A routine that combines CV and DL to detect armors. @@ -98,6 +99,7 @@ def change_color(self, new_color): else: raise NotImplementedError + class light_class: """ A class that represents a light bar on the armor board. @@ -232,6 +234,7 @@ def extract_number(self, rgb_img): thres, self.number_image = cv2.threshold( self.number_image, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU) + class cv_armor_proposer: """Armor proposer using OpenCV.""" @@ -590,6 +593,7 @@ def is_armor(self, armor): return light_ratio_ok and center_dist_ok and angle_ok + class dl_digit_classifier: """Classify digits in armor number using deep learning model.""" diff --git a/README.md b/README.md index 578b90b..16595af 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,32 @@ repo for iRM vision. +## Contributing + +Now the repo uses CI to check code style. We decided pylint is too painful to use. +Instead we use `pycodestyle` and `pydocstyle` to check if the code conform to autopep8 +and doc standards. + +Code that fails the CI will **NOT** be merged. + +To check if your code is conform to the standard, run + +```bash +pycodestyle --max-line-length=100 . +pydocstyle . +``` + +These two tools can be installed from either pip or anaconda. + +The CI will also tell if your code is failing, but please don't rely on repeatedly submitting +commits to check your coding style. + +To automatically fix your code to autopep8 standard, you can run + +```bash +autopep8 --in-place --aggressive --aggressive --recursive . +``` + ## Data preparing We recorded some demo videos to demo auto-aiming capabilities of our robots, diff --git a/Utils/__init__.py b/Utils/__init__.py index 98488c2..a1c415a 100644 --- a/Utils/__init__.py +++ b/Utils/__init__.py @@ -2,14 +2,17 @@ import numpy as np import cv2 + def deg_to_rad(deg): """Convert degree to radian.""" return deg * ((2 * np.pi) / 360) + def rad_to_deg(rad): """Convert radian to degree.""" return rad * (360. / (2 * np.pi)) + def auto_align_brightness(img, target_v=50): """Standardize brightness of image. @@ -44,6 +47,7 @@ def auto_align_brightness(img, target_v=50): img = cv2.cvtColor(final_hsv, cv2.COLOR_HSV2BGR) return img + def color_test(rgb_img, rect, color): """Test if the color of the roi is the same as the given color. @@ -58,7 +62,7 @@ def color_test(rgb_img, rect, color): # Internal ENUM RED = 0 BLUE = 1 - + rgb_roi = rgb_img[rect[1]:rect[1] + rect[3], rect[0]:rect[0] + rect[2]] sum_r = np.sum(rgb_roi[:, :, 0]) sum_b = np.sum(rgb_roi[:, :, 2]) @@ -67,6 +71,7 @@ def color_test(rgb_img, rect, color): else: return sum_b >= sum_r + def rect_contains(rect, pt): """Determine if a pt is inside a rect. From 7390e95a9f267be56b70496dd60bc6b1a77f7c35 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Mon, 13 Mar 2023 02:03:48 -0500 Subject: [PATCH 30/74] add official MDVS python SDK and modify CI to ignore SDK --- .github/workflows/formatting.yml | 4 +- Camera/mvsdk.py | 2403 ++++++++++++++++++++++++++++++ 2 files changed, 2405 insertions(+), 2 deletions(-) create mode 100644 Camera/mvsdk.py diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml index 7756a5d..a48d04b 100644 --- a/.github/workflows/formatting.yml +++ b/.github/workflows/formatting.yml @@ -21,5 +21,5 @@ jobs: pip install pydocstyle - name: Analysing the code with pycodestyle and pydocstyle run: | - pycodestyle --max-line-length=100 ./ - pydocstyle ./ \ No newline at end of file + pycodestyle --max-line-length=100 --exclude="Camera/mvsdk.py" ./ + pydocstyle --match='(?!test_)(?!mvsdk).*\.py' ./ \ No newline at end of file diff --git a/Camera/mvsdk.py b/Camera/mvsdk.py new file mode 100644 index 0000000..6154792 --- /dev/null +++ b/Camera/mvsdk.py @@ -0,0 +1,2403 @@ +#coding=utf-8 + +"""MDVS camera SDK. + +Adapted from official MDVS SDK example code, which is located at +https://mindvision.com.cn/uploadfiles/2021/04/20/75287253862433139.zip + +More official MDVS demo programs can be found here + https://mindvision.com.cn/rjxz/list_12.aspx?lcid=139 +""" +import platform +from ctypes import * +from threading import local + +# 回调函数类型 +CALLBACK_FUNC_TYPE = None + +# SDK动态库 +_sdk = None + +def _Init(): + global _sdk + global CALLBACK_FUNC_TYPE + + is_win = (platform.system() == "Windows") + is_x86 = (platform.architecture()[0] == '32bit') + + if is_win: + _sdk = windll.MVCAMSDK if is_x86 else windll.MVCAMSDK_X64 + CALLBACK_FUNC_TYPE = WINFUNCTYPE + else: + _sdk = cdll.LoadLibrary("libMVSDK.so") + CALLBACK_FUNC_TYPE = CFUNCTYPE + +_Init() + +#-------------------------------------------类型定义-------------------------------------------------- + +# 状态码定义 +CAMERA_STATUS_SUCCESS = 0 # 操作成功 +CAMERA_STATUS_FAILED = -1 # 操作失败 +CAMERA_STATUS_INTERNAL_ERROR = -2 # 内部错误 +CAMERA_STATUS_UNKNOW = -3 # 未知错误 +CAMERA_STATUS_NOT_SUPPORTED = -4 # 不支持该功能 +CAMERA_STATUS_NOT_INITIALIZED = -5 # 初始化未完成 +CAMERA_STATUS_PARAMETER_INVALID = -6 # 参数无效 +CAMERA_STATUS_PARAMETER_OUT_OF_BOUND = -7 # 参数越界 +CAMERA_STATUS_UNENABLED = -8 # 未使能 +CAMERA_STATUS_USER_CANCEL = -9 # 用户手动取消了,比如roi面板点击取消,返回 +CAMERA_STATUS_PATH_NOT_FOUND = -10 # 注册表中没有找到对应的路径 +CAMERA_STATUS_SIZE_DISMATCH = -11 # 获得图像数据长度和定义的尺寸不匹配 +CAMERA_STATUS_TIME_OUT = -12 # 超时错误 +CAMERA_STATUS_IO_ERROR = -13 # 硬件IO错误 +CAMERA_STATUS_COMM_ERROR = -14 # 通讯错误 +CAMERA_STATUS_BUS_ERROR = -15 # 总线错误 +CAMERA_STATUS_NO_DEVICE_FOUND = -16 # 没有发现设备 +CAMERA_STATUS_NO_LOGIC_DEVICE_FOUND = -17 # 未找到逻辑设备 +CAMERA_STATUS_DEVICE_IS_OPENED = -18 # 设备已经打开 +CAMERA_STATUS_DEVICE_IS_CLOSED = -19 # 设备已经关闭 +CAMERA_STATUS_DEVICE_VEDIO_CLOSED = -20 # 没有打开设备视频,调用录像相关的函数时,如果相机视频没有打开,则回返回该错误。 +CAMERA_STATUS_NO_MEMORY = -21 # 没有足够系统内存 +CAMERA_STATUS_FILE_CREATE_FAILED = -22 # 创建文件失败 +CAMERA_STATUS_FILE_INVALID = -23 # 文件格式无效 +CAMERA_STATUS_WRITE_PROTECTED = -24 # 写保护,不可写 +CAMERA_STATUS_GRAB_FAILED = -25 # 数据采集失败 +CAMERA_STATUS_LOST_DATA = -26 # 数据丢失,不完整 +CAMERA_STATUS_EOF_ERROR = -27 # 未接收到帧结束符 +CAMERA_STATUS_BUSY = -28 # 正忙(上一次操作还在进行中),此次操作不能进行 +CAMERA_STATUS_WAIT = -29 # 需要等待(进行操作的条件不成立),可以再次尝试 +CAMERA_STATUS_IN_PROCESS = -30 # 正在进行,已经被操作过 +CAMERA_STATUS_IIC_ERROR = -31 # IIC传输错误 +CAMERA_STATUS_SPI_ERROR = -32 # SPI传输错误 +CAMERA_STATUS_USB_CONTROL_ERROR = -33 # USB控制传输错误 +CAMERA_STATUS_USB_BULK_ERROR = -34 # USB BULK传输错误 +CAMERA_STATUS_SOCKET_INIT_ERROR = -35 # 网络传输套件初始化失败 +CAMERA_STATUS_GIGE_FILTER_INIT_ERROR = -36 # 网络相机内核过滤驱动初始化失败,请检查是否正确安装了驱动,或者重新安装。 +CAMERA_STATUS_NET_SEND_ERROR = -37 # 网络数据发送错误 +CAMERA_STATUS_DEVICE_LOST = -38 # 与网络相机失去连接,心跳检测超时 +CAMERA_STATUS_DATA_RECV_LESS = -39 # 接收到的字节数比请求的少 +CAMERA_STATUS_FUNCTION_LOAD_FAILED = -40 # 从文件中加载程序失败 +CAMERA_STATUS_CRITICAL_FILE_LOST = -41 # 程序运行所必须的文件丢失。 +CAMERA_STATUS_SENSOR_ID_DISMATCH = -42 # 固件和程序不匹配,原因是下载了错误的固件。 +CAMERA_STATUS_OUT_OF_RANGE = -43 # 参数超出有效范围。 +CAMERA_STATUS_REGISTRY_ERROR = -44 # 安装程序注册错误。请重新安装程序,或者运行安装目录Setup/Installer.exe +CAMERA_STATUS_ACCESS_DENY = -45 # 禁止访问。指定相机已经被其他程序占用时,再申请访问该相机,会返回该状态。(一个相机不能被多个程序同时访问) +#AIA的标准兼容的错误码 +CAMERA_AIA_PACKET_RESEND = 0x0100 #该帧需要重传 +CAMERA_AIA_NOT_IMPLEMENTED = 0x8001 #设备不支持的命令 +CAMERA_AIA_INVALID_PARAMETER = 0x8002 #命令参数非法 +CAMERA_AIA_INVALID_ADDRESS = 0x8003 #不可访问的地址 +CAMERA_AIA_WRITE_PROTECT = 0x8004 #访问的对象不可写 +CAMERA_AIA_BAD_ALIGNMENT = 0x8005 #访问的地址没有按照要求对齐 +CAMERA_AIA_ACCESS_DENIED = 0x8006 #没有访问权限 +CAMERA_AIA_BUSY = 0x8007 #命令正在处理中 +CAMERA_AIA_DEPRECATED = 0x8008 #0x8008-0x0800B 0x800F 该指令已经废弃 +CAMERA_AIA_PACKET_UNAVAILABLE = 0x800C #包无效 +CAMERA_AIA_DATA_OVERRUN = 0x800D #数据溢出,通常是收到的数据比需要的多 +CAMERA_AIA_INVALID_HEADER = 0x800E #数据包头部中某些区域与协议不匹配 +CAMERA_AIA_PACKET_NOT_YET_AVAILABLE = 0x8010 #图像分包数据还未准备好,多用于触发模式,应用程序访问超时 +CAMERA_AIA_PACKET_AND_PREV_REMOVED_FROM_MEMORY = 0x8011 #需要访问的分包已经不存在。多用于重传时数据已经不在缓冲区中 +CAMERA_AIA_PACKET_REMOVED_FROM_MEMORY = 0x8012 #CAMERA_AIA_PACKET_AND_PREV_REMOVED_FROM_MEMORY +CAMERA_AIA_NO_REF_TIME = 0x0813 #没有参考时钟源。多用于时间同步的命令执行时 +CAMERA_AIA_PACKET_TEMPORARILY_UNAVAILABLE = 0x0814 #由于信道带宽问题,当前分包暂时不可用,需稍后进行访问 +CAMERA_AIA_OVERFLOW = 0x0815 #设备端数据溢出,通常是队列已满 +CAMERA_AIA_ACTION_LATE = 0x0816 #命令执行已经超过有效的指定时间 +CAMERA_AIA_ERROR = 0x8FFF #错误 + +# 图像格式定义 +CAMERA_MEDIA_TYPE_MONO = 0x01000000 +CAMERA_MEDIA_TYPE_RGB = 0x02000000 +CAMERA_MEDIA_TYPE_COLOR = 0x02000000 +CAMERA_MEDIA_TYPE_OCCUPY1BIT = 0x00010000 +CAMERA_MEDIA_TYPE_OCCUPY2BIT = 0x00020000 +CAMERA_MEDIA_TYPE_OCCUPY4BIT = 0x00040000 +CAMERA_MEDIA_TYPE_OCCUPY8BIT = 0x00080000 +CAMERA_MEDIA_TYPE_OCCUPY10BIT = 0x000A0000 +CAMERA_MEDIA_TYPE_OCCUPY12BIT = 0x000C0000 +CAMERA_MEDIA_TYPE_OCCUPY16BIT = 0x00100000 +CAMERA_MEDIA_TYPE_OCCUPY24BIT = 0x00180000 +CAMERA_MEDIA_TYPE_OCCUPY32BIT = 0x00200000 +CAMERA_MEDIA_TYPE_OCCUPY36BIT = 0x00240000 +CAMERA_MEDIA_TYPE_OCCUPY48BIT = 0x00300000 +CAMERA_MEDIA_TYPE_EFFECTIVE_PIXEL_SIZE_MASK = 0x00FF0000 +CAMERA_MEDIA_TYPE_EFFECTIVE_PIXEL_SIZE_SHIFT = 16 +CAMERA_MEDIA_TYPE_ID_MASK = 0x0000FFFF +CAMERA_MEDIA_TYPE_COUNT = 0x46 + +#mono +CAMERA_MEDIA_TYPE_MONO1P = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY1BIT | 0x0037) +CAMERA_MEDIA_TYPE_MONO2P = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY2BIT | 0x0038) +CAMERA_MEDIA_TYPE_MONO4P = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY4BIT | 0x0039) +CAMERA_MEDIA_TYPE_MONO8 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY8BIT | 0x0001) +CAMERA_MEDIA_TYPE_MONO8S = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY8BIT | 0x0002) +CAMERA_MEDIA_TYPE_MONO10 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0003) +CAMERA_MEDIA_TYPE_MONO10_PACKED = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0004) +CAMERA_MEDIA_TYPE_MONO12 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0005) +CAMERA_MEDIA_TYPE_MONO12_PACKED = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0006) +CAMERA_MEDIA_TYPE_MONO14 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0025) +CAMERA_MEDIA_TYPE_MONO16 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0007) + +# Bayer +CAMERA_MEDIA_TYPE_BAYGR8 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY8BIT | 0x0008) +CAMERA_MEDIA_TYPE_BAYRG8 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY8BIT | 0x0009) +CAMERA_MEDIA_TYPE_BAYGB8 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY8BIT | 0x000A) +CAMERA_MEDIA_TYPE_BAYBG8 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY8BIT | 0x000B) + +CAMERA_MEDIA_TYPE_BAYGR10_MIPI = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY10BIT | 0x0026) +CAMERA_MEDIA_TYPE_BAYRG10_MIPI = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY10BIT | 0x0027) +CAMERA_MEDIA_TYPE_BAYGB10_MIPI = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY10BIT | 0x0028) +CAMERA_MEDIA_TYPE_BAYBG10_MIPI = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY10BIT | 0x0029) + +CAMERA_MEDIA_TYPE_BAYGR10 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x000C) +CAMERA_MEDIA_TYPE_BAYRG10 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x000D) +CAMERA_MEDIA_TYPE_BAYGB10 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x000E) +CAMERA_MEDIA_TYPE_BAYBG10 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x000F) + +CAMERA_MEDIA_TYPE_BAYGR12 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0010) +CAMERA_MEDIA_TYPE_BAYRG12 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0011) +CAMERA_MEDIA_TYPE_BAYGB12 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0012) +CAMERA_MEDIA_TYPE_BAYBG12 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0013) + +CAMERA_MEDIA_TYPE_BAYGR10_PACKED = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0026) +CAMERA_MEDIA_TYPE_BAYRG10_PACKED = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0027) +CAMERA_MEDIA_TYPE_BAYGB10_PACKED = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0028) +CAMERA_MEDIA_TYPE_BAYBG10_PACKED = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0029) + +CAMERA_MEDIA_TYPE_BAYGR12_PACKED = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x002A) +CAMERA_MEDIA_TYPE_BAYRG12_PACKED = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x002B) +CAMERA_MEDIA_TYPE_BAYGB12_PACKED = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x002C) +CAMERA_MEDIA_TYPE_BAYBG12_PACKED = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x002D) + +CAMERA_MEDIA_TYPE_BAYGR16 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x002E) +CAMERA_MEDIA_TYPE_BAYRG16 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x002F) +CAMERA_MEDIA_TYPE_BAYGB16 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0030) +CAMERA_MEDIA_TYPE_BAYBG16 = (CAMERA_MEDIA_TYPE_MONO | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0031) + +# RGB +CAMERA_MEDIA_TYPE_RGB8 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x0014) +CAMERA_MEDIA_TYPE_BGR8 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x0015) +CAMERA_MEDIA_TYPE_RGBA8 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY32BIT | 0x0016) +CAMERA_MEDIA_TYPE_BGRA8 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY32BIT | 0x0017) +CAMERA_MEDIA_TYPE_RGB10 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x0018) +CAMERA_MEDIA_TYPE_BGR10 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x0019) +CAMERA_MEDIA_TYPE_RGB12 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x001A) +CAMERA_MEDIA_TYPE_BGR12 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x001B) +CAMERA_MEDIA_TYPE_RGB16 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x0033) +CAMERA_MEDIA_TYPE_RGB10V1_PACKED = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY32BIT | 0x001C) +CAMERA_MEDIA_TYPE_RGB10P32 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY32BIT | 0x001D) +CAMERA_MEDIA_TYPE_RGB12V1_PACKED = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY36BIT | 0X0034) +CAMERA_MEDIA_TYPE_RGB565P = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0035) +CAMERA_MEDIA_TYPE_BGR565P = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0X0036) + +# YUV and YCbCr +CAMERA_MEDIA_TYPE_YUV411_8_UYYVYY = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x001E) +CAMERA_MEDIA_TYPE_YUV422_8_UYVY = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x001F) +CAMERA_MEDIA_TYPE_YUV422_8 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0032) +CAMERA_MEDIA_TYPE_YUV8_UYV = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x0020) +CAMERA_MEDIA_TYPE_YCBCR8_CBYCR = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x003A) +#CAMERA_MEDIA_TYPE_YCBCR422_8 : YYYYCbCrCbCr +CAMERA_MEDIA_TYPE_YCBCR422_8 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x003B) +CAMERA_MEDIA_TYPE_YCBCR422_8_CBYCRY = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0043) +CAMERA_MEDIA_TYPE_YCBCR411_8_CBYYCRYY = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x003C) +CAMERA_MEDIA_TYPE_YCBCR601_8_CBYCR = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x003D) +CAMERA_MEDIA_TYPE_YCBCR601_422_8 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x003E) +CAMERA_MEDIA_TYPE_YCBCR601_422_8_CBYCRY = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0044) +CAMERA_MEDIA_TYPE_YCBCR601_411_8_CBYYCRYY = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x003F) +CAMERA_MEDIA_TYPE_YCBCR709_8_CBYCR = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x0040) +CAMERA_MEDIA_TYPE_YCBCR709_422_8 = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0041) +CAMERA_MEDIA_TYPE_YCBCR709_422_8_CBYCRY = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY16BIT | 0x0045) +CAMERA_MEDIA_TYPE_YCBCR709_411_8_CBYYCRYY = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY12BIT | 0x0042) + +# RGB Planar +CAMERA_MEDIA_TYPE_RGB8_PLANAR = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY24BIT | 0x0021) +CAMERA_MEDIA_TYPE_RGB10_PLANAR = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x0022) +CAMERA_MEDIA_TYPE_RGB12_PLANAR = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x0023) +CAMERA_MEDIA_TYPE_RGB16_PLANAR = (CAMERA_MEDIA_TYPE_COLOR | CAMERA_MEDIA_TYPE_OCCUPY48BIT | 0x0024) + +# 保存格式 +FILE_JPG = 1 +FILE_BMP = 2 +FILE_RAW = 4 +FILE_PNG = 8 +FILE_BMP_8BIT = 16 +FILE_PNG_8BIT = 32 +FILE_RAW_16BIT = 64 + +# 触发信号 +EXT_TRIG_LEADING_EDGE = 0 +EXT_TRIG_TRAILING_EDGE = 1 +EXT_TRIG_HIGH_LEVEL = 2 +EXT_TRIG_LOW_LEVEL = 3 +EXT_TRIG_DOUBLE_EDGE = 4 + +# IO模式 +IOMODE_TRIG_INPUT = 0 +IOMODE_STROBE_OUTPUT = 1 +IOMODE_GP_INPUT = 2 +IOMODE_GP_OUTPUT = 3 +IOMODE_PWM_OUTPUT = 4 + + +# 相机操作异常信息 +class CameraException(Exception): + """docstring for CameraException""" + def __init__(self, error_code): + super(CameraException, self).__init__() + self.error_code = error_code + self.message = CameraGetErrorString(error_code) + + def __str__(self): + return 'error_code:{} message:{}'.format(self.error_code, self.message) + +class MvStructure(Structure): + def __str__(self): + strs = [] + for field in self._fields_: + name = field[0] + value = getattr(self, name) + if isinstance(value, type(b'')): + value = _string_buffer_to_str(value) + strs.append("{}:{}".format(name, value)) + return '\n'.join(strs) + + def __repr__(self): + return self.__str__() + + def clone(self): + obj = type(self)() + memmove(byref(obj), byref(self), sizeof(self)) + return obj + +# 相机的设备信息,只读信息,请勿修改 +class tSdkCameraDevInfo(MvStructure): + _fields_ = [("acProductSeries", c_char * 32), #产品系列 + ("acProductName", c_char * 32), #产品名称 + ("acFriendlyName", c_char * 32), #产品昵称 + ("acLinkName", c_char * 32), #内核符号连接名,内部使用 + ("acDriverVersion", c_char * 32), #驱动版本 + ("acSensorType", c_char * 32), #sensor类型 + ("acPortType", c_char * 32), #接口类型 + ("acSn", c_char * 32), #产品唯一序列号 + ("uInstance", c_uint)] #该型号相机在该电脑上的实例索引号,用于区分同型号多相机 + + def GetProductSeries(self): + return _string_buffer_to_str(self.acProductSeries) + def GetProductName(self): + return _string_buffer_to_str(self.acProductName) + def GetFriendlyName(self): + return _string_buffer_to_str(self.acFriendlyName) + def GetLinkName(self): + return _string_buffer_to_str(self.acLinkName) + def GetDriverVersion(self): + return _string_buffer_to_str(self.acDriverVersion) + def GetSensorType(self): + return _string_buffer_to_str(self.acSensorType) + def GetPortType(self): + return _string_buffer_to_str(self.acPortType) + def GetSn(self): + return _string_buffer_to_str(self.acSn) + +# 相机的分辨率设定范围 +class tSdkResolutionRange(MvStructure): + _fields_ = [("iHeightMax", c_int), #图像最大高度 + ("iHeightMin", c_int), #图像最小高度 + ("iWidthMax", c_int), #图像最大宽度 + ("iWidthMin", c_int), #图像最小宽度 + ("uSkipModeMask", c_uint), #SKIP模式掩码,为0,表示不支持SKIP 。bit0为1,表示支持SKIP 2x2 bit1为1,表示支持SKIP 3x3.... + ("uBinSumModeMask", c_uint), #BIN(求和)模式掩码,为0,表示不支持BIN 。bit0为1,表示支持BIN 2x2 bit1为1,表示支持BIN 3x3.... + ("uBinAverageModeMask", c_uint),#BIN(求均值)模式掩码,为0,表示不支持BIN 。bit0为1,表示支持BIN 2x2 bit1为1,表示支持BIN 3x3.... + ("uResampleMask", c_uint)] #硬件重采样的掩码 + +#相机的分辨率描述 +class tSdkImageResolution(MvStructure): + _fields_ = [ + ("iIndex", c_int), # 索引号,[0,N]表示预设的分辨率(N 为预设分辨率的最大个数,一般不超过20),OXFF 表示自定义分辨率(ROI) + ("acDescription", c_char * 32), # 该分辨率的描述信息。仅预设分辨率时该信息有效。自定义分辨率可忽略该信息 + ("uBinSumMode", c_uint), # BIN(求和)的模式,范围不能超过tSdkResolutionRange中uBinSumModeMask + ("uBinAverageMode", c_uint), # BIN(求均值)的模式,范围不能超过tSdkResolutionRange中uBinAverageModeMask + ("uSkipMode", c_uint), # 是否SKIP的尺寸,为0表示禁止SKIP模式,范围不能超过tSdkResolutionRange中uSkipModeMask + ("uResampleMask", c_uint), # 硬件重采样的掩码 + ("iHOffsetFOV", c_int), # 采集视场相对于Sensor最大视场左上角的垂直偏移 + ("iVOffsetFOV", c_int), # 采集视场相对于Sensor最大视场左上角的水平偏移 + ("iWidthFOV", c_int), # 采集视场的宽度 + ("iHeightFOV", c_int), # 采集视场的高度 + ("iWidth", c_int), # 相机最终输出的图像的宽度 + ("iHeight", c_int), # 相机最终输出的图像的高度 + ("iWidthZoomHd", c_int), # 硬件缩放的宽度,不需要进行此操作的分辨率,此变量设置为0. + ("iHeightZoomHd", c_int), # 硬件缩放的高度,不需要进行此操作的分辨率,此变量设置为0. + ("iWidthZoomSw", c_int), # 软件缩放的宽度,不需要进行此操作的分辨率,此变量设置为0. + ("iHeightZoomSw", c_int), # 软件缩放的高度,不需要进行此操作的分辨率,此变量设置为0. + ] + + def GetDescription(self): + return _string_buffer_to_str(self.acDescription) + +#相机白平衡模式描述信息 +class tSdkColorTemperatureDes(MvStructure): + _fields_ = [ + ("iIndex", c_int), # 模式索引号 + ("acDescription", c_char * 32), # 描述信息 + ] + + def GetDescription(self): + return _string_buffer_to_str(self.acDescription) + +#相机帧率描述信息 +class tSdkFrameSpeed(MvStructure): + _fields_ = [ + ("iIndex", c_int), # 帧率索引号,一般0对应于低速模式,1对应于普通模式,2对应于高速模式 + ("acDescription", c_char * 32), # 描述信息 + ] + + def GetDescription(self): + return _string_buffer_to_str(self.acDescription) + +#相机曝光功能范围定义 +class tSdkExpose(MvStructure): + _fields_ = [ + ("uiTargetMin", c_uint), #自动曝光亮度目标最小值 + ("uiTargetMax", c_uint), #自动曝光亮度目标最大值 + ("uiAnalogGainMin", c_uint), #模拟增益的最小值,单位为fAnalogGainStep中定义 + ("uiAnalogGainMax", c_uint), #模拟增益的最大值,单位为fAnalogGainStep中定义 + ("fAnalogGainStep", c_float), #模拟增益每增加1,对应的增加的放大倍数。例如,uiAnalogGainMin一般为16,fAnalogGainStep一般为0.125,那么最小放大倍数就是16*0.125 = 2倍 + ("uiExposeTimeMin", c_uint), #手动模式下,曝光时间的最小值,单位:行。根据CameraGetExposureLineTime可以获得一行对应的时间(微秒),从而得到整帧的曝光时间 + ("uiExposeTimeMax", c_uint), #手动模式下,曝光时间的最大值,单位:行 + ] + +#触发模式描述 +class tSdkTrigger(MvStructure): + _fields_ = [ + ("iIndex", c_int), # 模式索引号 + ("acDescription", c_char * 32), # 描述信息 + ] + + def GetDescription(self): + return _string_buffer_to_str(self.acDescription) + +#传输分包大小描述(主要是针对网络相机有效) +class tSdkPackLength(MvStructure): + _fields_ = [ + ("iIndex", c_int), # 模式索引号 + ("acDescription", c_char * 32), # 描述信息 + ("iPackSize", c_uint), + ] + + def GetDescription(self): + return _string_buffer_to_str(self.acDescription) + +#预设的LUT表描述 +class tSdkPresetLut(MvStructure): + _fields_ = [ + ("iIndex", c_int), # 编号 + ("acDescription", c_char * 32), # 描述信息 + ] + + def GetDescription(self): + return _string_buffer_to_str(self.acDescription) + +#AE算法描述 +class tSdkAeAlgorithm(MvStructure): + _fields_ = [ + ("iIndex", c_int), # 编号 + ("acDescription", c_char * 32), # 描述信息 + ] + + def GetDescription(self): + return _string_buffer_to_str(self.acDescription) + +#RAW转RGB算法描述 +class tSdkBayerDecodeAlgorithm(MvStructure): + _fields_ = [ + ("iIndex", c_int), # 编号 + ("acDescription", c_char * 32), # 描述信息 + ] + + def GetDescription(self): + return _string_buffer_to_str(self.acDescription) + +#帧率统计信息 +class tSdkFrameStatistic(MvStructure): + _fields_ = [ + ("iTotal", c_int), #当前采集的总帧数(包括错误帧) + ("iCapture", c_int), #当前采集的有效帧的数量 + ("iLost", c_int), #当前丢帧的数量 + ] + +#相机输出的图像数据格式 +class tSdkMediaType(MvStructure): + _fields_ = [ + ("iIndex", c_int), # 格式种类编号 + ("acDescription", c_char * 32), # 描述信息 + ("iMediaType", c_uint), # 对应的图像格式编码,如CAMERA_MEDIA_TYPE_BAYGR8。 + ] + + def GetDescription(self): + return _string_buffer_to_str(self.acDescription) + +#伽马的设定范围 +class tGammaRange(MvStructure): + _fields_ = [ + ("iMin", c_int), #最小值 + ("iMax", c_int), #最大值 + ] + +#对比度的设定范围 +class tContrastRange(MvStructure): + _fields_ = [ + ("iMin", c_int), #最小值 + ("iMax", c_int), #最大值 + ] + +#RGB三通道数字增益的设定范围 +class tRgbGainRange(MvStructure): + _fields_ = [ + ("iRGainMin", c_int), #红色增益的最小值 + ("iRGainMax", c_int), #红色增益的最大值 + ("iGGainMin", c_int), #绿色增益的最小值 + ("iGGainMax", c_int), #绿色增益的最大值 + ("iBGainMin", c_int), #蓝色增益的最小值 + ("iBGainMax", c_int), #蓝色增益的最大值 + ] + +#饱和度设定的范围 +class tSaturationRange(MvStructure): + _fields_ = [ + ("iMin", c_int), #最小值 + ("iMax", c_int), #最大值 + ] + +#锐化的设定范围 +class tSharpnessRange(MvStructure): + _fields_ = [ + ("iMin", c_int), #最小值 + ("iMax", c_int), #最大值 + ] + +#ISP模块的使能信息 +class tSdkIspCapacity(MvStructure): + _fields_ = [ + ("bMonoSensor", c_int), #表示该型号相机是否为黑白相机,如果是黑白相机,则颜色相关的功能都无法调节 + ("bWbOnce", c_int), #表示该型号相机是否支持手动白平衡功能 + ("bAutoWb", c_int), #表示该型号相机是否支持自动白平衡功能 + ("bAutoExposure", c_int), #表示该型号相机是否支持自动曝光功能 + ("bManualExposure", c_int), #表示该型号相机是否支持手动曝光功能 + ("bAntiFlick", c_int), #表示该型号相机是否支持抗频闪功能 + ("bDeviceIsp", c_int), #表示该型号相机是否支持硬件ISP功能 + ("bForceUseDeviceIsp", c_int), #bDeviceIsp和bForceUseDeviceIsp同时为TRUE时,表示强制只用硬件ISP,不可取消。 + ("bZoomHD", c_int), #相机硬件是否支持图像缩放输出(只能是缩小)。 + ] + +# 定义整合的设备描述信息,这些信息可以用于动态构建UI +class tSdkCameraCapbility(MvStructure): + _fields_ = [ + ("pTriggerDesc", POINTER(tSdkTrigger)), + ("iTriggerDesc", c_int), #触发模式的个数,即pTriggerDesc数组的大小 + ("pImageSizeDesc", POINTER(tSdkImageResolution)), + ("iImageSizeDesc", c_int), #预设分辨率的个数,即pImageSizeDesc数组的大小 + ("pClrTempDesc", POINTER(tSdkColorTemperatureDes)), + ("iClrTempDesc", c_int), #预设色温个数 + ("pMediaTypeDesc", POINTER(tSdkMediaType)), + ("iMediaTypeDesc", c_int), #相机输出图像格式的种类个数,即pMediaTypeDesc数组的大小。 + ("pFrameSpeedDesc", POINTER(tSdkFrameSpeed)), #可调节帧速类型,对应界面上普通 高速 和超级三种速度设置 + ("iFrameSpeedDesc", c_int), #可调节帧速类型的个数,即pFrameSpeedDesc数组的大小。 + ("pPackLenDesc", POINTER(tSdkPackLength)), #传输包长度,一般用于网络设备 + ("iPackLenDesc", c_int), #可供选择的传输分包长度的个数,即pPackLenDesc数组的大小。 + ("iOutputIoCounts", c_int), #可编程输出IO的个数 + ("iInputIoCounts", c_int), #可编程输入IO的个数 + ("pPresetLutDesc", POINTER(tSdkPresetLut)), #相机预设的LUT表 + ("iPresetLut", c_int), #相机预设的LUT表的个数,即pPresetLutDesc数组的大小 + ("iUserDataMaxLen", c_int), #指示该相机中用于保存用户数据区的最大长度。为0表示无。 + ("bParamInDevice", c_int), #指示该设备是否支持从设备中读写参数组。1为支持,0不支持。 + ("pAeAlmSwDesc", POINTER(tSdkAeAlgorithm)),#软件自动曝光算法描述 + ("iAeAlmSwDesc", c_int), #软件自动曝光算法个数 + ("pAeAlmHdDesc", POINTER(tSdkAeAlgorithm)),#硬件自动曝光算法描述,为NULL表示不支持硬件自动曝光 + ("iAeAlmHdDesc", c_int), #硬件自动曝光算法个数,为0表示不支持硬件自动曝光 + ("pBayerDecAlmSwDesc", POINTER(tSdkBayerDecodeAlgorithm)),#软件Bayer转换为RGB数据的算法描述 + ("iBayerDecAlmSwDesc", c_int), #软件Bayer转换为RGB数据的算法个数 + ("pBayerDecAlmHdDesc", POINTER(tSdkBayerDecodeAlgorithm)),#硬件Bayer转换为RGB数据的算法描述,为NULL表示不支持 + ("iBayerDecAlmHdDesc", c_int), #硬件Bayer转换为RGB数据的算法个数,为0表示不支持 + + # 图像参数的调节范围定义,用于动态构建UI + ("sExposeDesc", tSdkExpose), #曝光的范围值 + ("sResolutionRange", tSdkResolutionRange), #分辨率范围描述 + ("sRgbGainRange", tRgbGainRange), #图像数字增益范围描述 + ("sSaturationRange", tSaturationRange), #饱和度范围描述 + ("sGammaRange", tGammaRange), #伽马范围描述 + ("sContrastRange", tContrastRange), #对比度范围描述 + ("sSharpnessRange", tSharpnessRange), #锐化范围描述 + ("sIspCapacity", tSdkIspCapacity), #ISP能力描述 + ] + +#图像帧头信息 +class tSdkFrameHead(MvStructure): + _fields_ = [ + ("uiMediaType", c_uint), # 图像格式,Image Format + ("uBytes", c_uint), # 图像数据字节数,Total bytes + ("iWidth", c_int), # 宽度 Image height + ("iHeight", c_int), # 高度 Image width + ("iWidthZoomSw", c_int), # 软件缩放的宽度,不需要进行软件裁剪的图像,此变量设置为0. + ("iHeightZoomSw", c_int), # 软件缩放的高度,不需要进行软件裁剪的图像,此变量设置为0. + ("bIsTrigger", c_int), # 指示是否为触发帧 is trigger + ("uiTimeStamp", c_uint), # 该帧的采集时间,单位0.1毫秒 + ("uiExpTime", c_uint), # 当前图像的曝光值,单位为微秒us + ("fAnalogGain", c_float), # 当前图像的模拟增益倍数 + ("iGamma", c_int), # 该帧图像的伽马设定值,仅当LUT模式为动态参数生成时有效,其余模式下为-1 + ("iContrast", c_int), # 该帧图像的对比度设定值,仅当LUT模式为动态参数生成时有效,其余模式下为-1 + ("iSaturation", c_int), # 该帧图像的饱和度设定值,对于黑白相机无意义,为0 + ("fRgain", c_float), # 该帧图像处理的红色数字增益倍数,对于黑白相机无意义,为1 + ("fGgain", c_float), # 该帧图像处理的绿色数字增益倍数,对于黑白相机无意义,为1 + ("fBgain", c_float), # 该帧图像处理的蓝色数字增益倍数,对于黑白相机无意义,为1 + ] + +# Grabber统计信息 +class tSdkGrabberStat(MvStructure): + _fields_ = [ + ("Width", c_int), # 帧图像大小 + ("Height", c_int), # 帧图像大小 + ("Disp", c_int), # 显示帧数量 + ("Capture", c_int), # 采集的有效帧的数量 + ("Lost", c_int), # 丢帧的数量 + ("Error", c_int), # 错帧的数量 + ("DispFps", c_float), # 显示帧率 + ("CapFps", c_float), # 捕获帧率 + ] + +# 方法回调辅助类 +class method(object): + def __init__(self, FuncType): + super(method, self).__init__() + self.FuncType = FuncType + self.cache = {} + + def __call__(self, cb): + self.cb = cb + return self + + def __get__(self, obj, objtype): + try: + return self.cache[obj] + except KeyError as e: + def cl(*args): + return self.cb(obj, *args) + r = self.cache[obj] = self.FuncType(cl) + return r + +# 图像捕获的回调函数定义 +CAMERA_SNAP_PROC = CALLBACK_FUNC_TYPE(None, c_int, c_void_p, POINTER(tSdkFrameHead), c_void_p) + +# 相机连接状态回调 +CAMERA_CONNECTION_STATUS_CALLBACK = CALLBACK_FUNC_TYPE(None, c_int, c_uint, c_uint, c_void_p) + +# 异步抓图完成回调 +pfnCameraGrabberSaveImageComplete = CALLBACK_FUNC_TYPE(None, c_void_p, c_void_p, c_int, c_void_p) + +# 帧监听回调 +pfnCameraGrabberFrameListener = CALLBACK_FUNC_TYPE(c_int, c_void_p, c_int, c_void_p, POINTER(tSdkFrameHead), c_void_p) + +# 采集器图像捕获的回调 +pfnCameraGrabberFrameCallback = CALLBACK_FUNC_TYPE(None, c_void_p, c_void_p, POINTER(tSdkFrameHead), c_void_p) + +#-----------------------------------函数接口------------------------------------------ + +# 线程局部存储 +_tls = local() + +# 存储最后一次SDK调用返回的错误码 +def GetLastError(): + try: + return _tls.last_error + except AttributeError as e: + _tls.last_error = 0 + return 0 + +def SetLastError(err_code): + _tls.last_error = err_code + +def _string_buffer_to_str(buf): + s = buf if isinstance(buf, type(b'')) else buf.value + + for codec in ('gbk', 'utf-8'): + try: + s = s.decode(codec) + break + except UnicodeDecodeError as e: + continue + + if isinstance(s, str): + return s + else: + return s.encode('utf-8') + +def _str_to_string_buffer(str): + if type(str) is type(u''): + s = str.encode('gbk') + else: + s = str.decode('utf-8').encode('gbk') + return create_string_buffer(s) + +def CameraSdkInit(iLanguageSel): + err_code = _sdk.CameraSdkInit(iLanguageSel) + SetLastError(err_code) + return err_code + +def CameraEnumerateDevice(MaxCount = 32): + Nums = c_int(MaxCount) + pCameraList = (tSdkCameraDevInfo * Nums.value)() + err_code = _sdk.CameraEnumerateDevice(pCameraList, byref(Nums)) + SetLastError(err_code) + return pCameraList[0:Nums.value] + +def CameraEnumerateDeviceEx(): + return _sdk.CameraEnumerateDeviceEx() + +def CameraIsOpened(pCameraInfo): + pOpened = c_int() + err_code = _sdk.CameraIsOpened(byref(pCameraInfo), byref(pOpened) ) + SetLastError(err_code) + return pOpened.value != 0 + +def CameraInit(pCameraInfo, emParamLoadMode = -1, emTeam = -1): + pCameraHandle = c_int() + err_code = _sdk.CameraInit(byref(pCameraInfo), emParamLoadMode, emTeam, byref(pCameraHandle)) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return pCameraHandle.value + +def CameraInitEx(iDeviceIndex, emParamLoadMode = -1, emTeam = -1): + pCameraHandle = c_int() + err_code = _sdk.CameraInitEx(iDeviceIndex, emParamLoadMode, emTeam, byref(pCameraHandle)) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return pCameraHandle.value + +def CameraInitEx2(CameraName): + pCameraHandle = c_int() + err_code = _sdk.CameraInitEx2(_str_to_string_buffer(CameraName), byref(pCameraHandle)) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return pCameraHandle.value + +def CameraSetCallbackFunction(hCamera, pCallBack, pContext = 0): + err_code = _sdk.CameraSetCallbackFunction(hCamera, pCallBack, c_void_p(pContext), None) + SetLastError(err_code) + return err_code + +def CameraUnInit(hCamera): + err_code = _sdk.CameraUnInit(hCamera) + SetLastError(err_code) + return err_code + +def CameraGetInformation(hCamera): + pbuffer = c_char_p() + err_code = _sdk.CameraGetInformation(hCamera, byref(pbuffer) ) + SetLastError(err_code) + if err_code == 0 and pbuffer.value is not None: + return _string_buffer_to_str(pbuffer) + return '' + +def CameraImageProcess(hCamera, pbyIn, pbyOut, pFrInfo): + err_code = _sdk.CameraImageProcess(hCamera, c_void_p(pbyIn), c_void_p(pbyOut), byref(pFrInfo)) + SetLastError(err_code) + return err_code + +def CameraImageProcessEx(hCamera, pbyIn, pbyOut, pFrInfo, uOutFormat, uReserved): + err_code = _sdk.CameraImageProcessEx(hCamera, c_void_p(pbyIn), c_void_p(pbyOut), byref(pFrInfo), uOutFormat, uReserved) + SetLastError(err_code) + return err_code + +def CameraDisplayInit(hCamera, hWndDisplay): + err_code = _sdk.CameraDisplayInit(hCamera, hWndDisplay) + SetLastError(err_code) + return err_code + +def CameraDisplayRGB24(hCamera, pFrameBuffer, pFrInfo): + err_code = _sdk.CameraDisplayRGB24(hCamera, c_void_p(pFrameBuffer), byref(pFrInfo) ) + SetLastError(err_code) + return err_code + +def CameraSetDisplayMode(hCamera, iMode): + err_code = _sdk.CameraSetDisplayMode(hCamera, iMode) + SetLastError(err_code) + return err_code + +def CameraSetDisplayOffset(hCamera, iOffsetX, iOffsetY): + err_code = _sdk.CameraSetDisplayOffset(hCamera, iOffsetX, iOffsetY) + SetLastError(err_code) + return err_code + +def CameraSetDisplaySize(hCamera, iWidth, iHeight): + err_code = _sdk.CameraSetDisplaySize(hCamera, iWidth, iHeight) + SetLastError(err_code) + return err_code + +def CameraGetImageBuffer(hCamera, wTimes): + pbyBuffer = c_void_p() + pFrameInfo = tSdkFrameHead() + err_code = _sdk.CameraGetImageBuffer(hCamera, byref(pFrameInfo), byref(pbyBuffer), wTimes) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return (pbyBuffer.value, pFrameInfo) + +def CameraGetImageBufferEx(hCamera, wTimes): + _sdk.CameraGetImageBufferEx.restype = c_void_p + piWidth = c_int() + piHeight = c_int() + pFrameBuffer = _sdk.CameraGetImageBufferEx(hCamera, byref(piWidth), byref(piHeight), wTimes) + err_code = CAMERA_STATUS_SUCCESS if pFrameBuffer else CAMERA_STATUS_TIME_OUT + SetLastError(err_code) + if pFrameBuffer: + return (pFrameBuffer, piWidth.value, piHeight.value) + else: + raise CameraException(err_code) + +def CameraSnapToBuffer(hCamera, wTimes): + pbyBuffer = c_void_p() + pFrameInfo = tSdkFrameHead() + err_code = _sdk.CameraSnapToBuffer(hCamera, byref(pFrameInfo), byref(pbyBuffer), wTimes) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return (pbyBuffer.value, pFrameInfo) + +def CameraReleaseImageBuffer(hCamera, pbyBuffer): + err_code = _sdk.CameraReleaseImageBuffer(hCamera, c_void_p(pbyBuffer) ) + SetLastError(err_code) + return err_code + +def CameraPlay(hCamera): + err_code = _sdk.CameraPlay(hCamera) + SetLastError(err_code) + return err_code + +def CameraPause(hCamera): + err_code = _sdk.CameraPause(hCamera) + SetLastError(err_code) + return err_code + +def CameraStop(hCamera): + err_code = _sdk.CameraStop(hCamera) + SetLastError(err_code) + return err_code + +def CameraInitRecord(hCamera, iFormat, pcSavePath, b2GLimit, dwQuality, iFrameRate): + err_code = _sdk.CameraInitRecord(hCamera, iFormat, _str_to_string_buffer(pcSavePath), b2GLimit, dwQuality, iFrameRate) + SetLastError(err_code) + return err_code + +def CameraStopRecord(hCamera): + err_code = _sdk.CameraStopRecord(hCamera) + SetLastError(err_code) + return err_code + +def CameraPushFrame(hCamera, pbyImageBuffer, pFrInfo): + err_code = _sdk.CameraPushFrame(hCamera, c_void_p(pbyImageBuffer), byref(pFrInfo) ) + SetLastError(err_code) + return err_code + +def CameraSaveImage(hCamera, lpszFileName, pbyImageBuffer, pFrInfo, byFileType, byQuality): + err_code = _sdk.CameraSaveImage(hCamera, _str_to_string_buffer(lpszFileName), c_void_p(pbyImageBuffer), byref(pFrInfo), byFileType, byQuality) + SetLastError(err_code) + return err_code + +def CameraSaveImageEx(hCamera, lpszFileName, pbyImageBuffer, uImageFormat, iWidth, iHeight, byFileType, byQuality): + err_code = _sdk.CameraSaveImageEx(hCamera, _str_to_string_buffer(lpszFileName), c_void_p(pbyImageBuffer), uImageFormat, iWidth, iHeight, byFileType, byQuality) + SetLastError(err_code) + return err_code + +def CameraGetImageResolution(hCamera): + psCurVideoSize = tSdkImageResolution() + err_code = _sdk.CameraGetImageResolution(hCamera, byref(psCurVideoSize) ) + SetLastError(err_code) + return psCurVideoSize + +def CameraSetImageResolution(hCamera, pImageResolution): + err_code = _sdk.CameraSetImageResolution(hCamera, byref(pImageResolution) ) + SetLastError(err_code) + return err_code + +def CameraSetImageResolutionEx(hCamera, iIndex, Mode, ModeSize, x, y, width, height, ZoomWidth, ZoomHeight): + err_code = _sdk.CameraSetImageResolutionEx(hCamera, iIndex, Mode, ModeSize, x, y, width, height, ZoomWidth, ZoomHeight) + SetLastError(err_code) + return err_code + +def CameraGetMediaType(hCamera): + piMediaType = c_int() + err_code = _sdk.CameraGetMediaType(hCamera, byref(piMediaType) ) + SetLastError(err_code) + return piMediaType.value + +def CameraSetMediaType(hCamera, iMediaType): + err_code = _sdk.CameraSetMediaType(hCamera, iMediaType) + SetLastError(err_code) + return err_code + +def CameraSetAeState(hCamera, bAeState): + err_code = _sdk.CameraSetAeState(hCamera, bAeState) + SetLastError(err_code) + return err_code + +def CameraGetAeState(hCamera): + pAeState = c_int() + err_code = _sdk.CameraGetAeState(hCamera, byref(pAeState) ) + SetLastError(err_code) + return pAeState.value + +def CameraSetSharpness(hCamera, iSharpness): + err_code = _sdk.CameraSetSharpness(hCamera, iSharpness) + SetLastError(err_code) + return err_code + +def CameraGetSharpness(hCamera): + piSharpness = c_int() + err_code = _sdk.CameraGetSharpness(hCamera, byref(piSharpness) ) + SetLastError(err_code) + return piSharpness.value + +def CameraSetLutMode(hCamera, emLutMode): + err_code = _sdk.CameraSetLutMode(hCamera, emLutMode) + SetLastError(err_code) + return err_code + +def CameraGetLutMode(hCamera): + pemLutMode = c_int() + err_code = _sdk.CameraGetLutMode(hCamera, byref(pemLutMode) ) + SetLastError(err_code) + return pemLutMode.value + +def CameraSelectLutPreset(hCamera, iSel): + err_code = _sdk.CameraSelectLutPreset(hCamera, iSel) + SetLastError(err_code) + return err_code + +def CameraGetLutPresetSel(hCamera): + piSel = c_int() + err_code = _sdk.CameraGetLutPresetSel(hCamera, byref(piSel) ) + SetLastError(err_code) + return piSel.value + +def CameraSetCustomLut(hCamera, iChannel, pLut): + pLutNative = (c_ushort * 4096)(*pLut) + err_code = _sdk.CameraSetCustomLut(hCamera, iChannel, pLutNative) + SetLastError(err_code) + return err_code + +def CameraGetCustomLut(hCamera, iChannel): + pLutNative = (c_ushort * 4096)() + err_code = _sdk.CameraGetCustomLut(hCamera, iChannel, pLutNative) + SetLastError(err_code) + return pLutNative[:] + +def CameraGetCurrentLut(hCamera, iChannel): + pLutNative = (c_ushort * 4096)() + err_code = _sdk.CameraGetCurrentLut(hCamera, iChannel, pLutNative) + SetLastError(err_code) + return pLutNative[:] + +def CameraSetWbMode(hCamera, bAuto): + err_code = _sdk.CameraSetWbMode(hCamera, bAuto) + SetLastError(err_code) + return err_code + +def CameraGetWbMode(hCamera): + pbAuto = c_int() + err_code = _sdk.CameraGetWbMode(hCamera, byref(pbAuto) ) + SetLastError(err_code) + return pbAuto.value + +def CameraSetPresetClrTemp(hCamera, iSel): + err_code = _sdk.CameraSetPresetClrTemp(hCamera, iSel) + SetLastError(err_code) + return err_code + +def CameraGetPresetClrTemp(hCamera): + piSel = c_int() + err_code = _sdk.CameraGetPresetClrTemp(hCamera, byref(piSel) ) + SetLastError(err_code) + return piSel.value + +def CameraSetUserClrTempGain(hCamera, iRgain, iGgain, iBgain): + err_code = _sdk.CameraSetUserClrTempGain(hCamera, iRgain, iGgain, iBgain) + SetLastError(err_code) + return err_code + +def CameraGetUserClrTempGain(hCamera): + piRgain = c_int() + piGgain = c_int() + piBgain = c_int() + err_code = _sdk.CameraGetUserClrTempGain(hCamera, byref(piRgain), byref(piGgain), byref(piBgain) ) + SetLastError(err_code) + return (piRgain.value, piGgain.value, piBgain.value) + +def CameraSetUserClrTempMatrix(hCamera, pMatrix): + pMatrixNative = (c_float * 9)(*pMatrix) + err_code = _sdk.CameraSetUserClrTempMatrix(hCamera, pMatrixNative) + SetLastError(err_code) + return err_code + +def CameraGetUserClrTempMatrix(hCamera): + pMatrixNative = (c_float * 9)() + err_code = _sdk.CameraGetUserClrTempMatrix(hCamera, pMatrixNative) + SetLastError(err_code) + return pMatrixNative[:] + +def CameraSetClrTempMode(hCamera, iMode): + err_code = _sdk.CameraSetClrTempMode(hCamera, iMode) + SetLastError(err_code) + return err_code + +def CameraGetClrTempMode(hCamera): + piMode = c_int() + err_code = _sdk.CameraGetClrTempMode(hCamera, byref(piMode) ) + SetLastError(err_code) + return piMode.value + +def CameraSetOnceWB(hCamera): + err_code = _sdk.CameraSetOnceWB(hCamera) + SetLastError(err_code) + return err_code + +def CameraSetOnceBB(hCamera): + err_code = _sdk.CameraSetOnceBB(hCamera) + SetLastError(err_code) + return err_code + +def CameraSetAeTarget(hCamera, iAeTarget): + err_code = _sdk.CameraSetAeTarget(hCamera, iAeTarget) + SetLastError(err_code) + return err_code + +def CameraGetAeTarget(hCamera): + piAeTarget = c_int() + err_code = _sdk.CameraGetAeTarget(hCamera, byref(piAeTarget) ) + SetLastError(err_code) + return piAeTarget.value + +def CameraSetAeExposureRange(hCamera, fMinExposureTime, fMaxExposureTime): + err_code = _sdk.CameraSetAeExposureRange(hCamera, c_double(fMinExposureTime), c_double(fMaxExposureTime) ) + SetLastError(err_code) + return err_code + +def CameraGetAeExposureRange(hCamera): + fMinExposureTime = c_double() + fMaxExposureTime = c_double() + err_code = _sdk.CameraGetAeExposureRange(hCamera, byref(fMinExposureTime), byref(fMaxExposureTime) ) + SetLastError(err_code) + return (fMinExposureTime.value, fMaxExposureTime.value) + +def CameraSetAeAnalogGainRange(hCamera, iMinAnalogGain, iMaxAnalogGain): + err_code = _sdk.CameraSetAeAnalogGainRange(hCamera, iMinAnalogGain, iMaxAnalogGain) + SetLastError(err_code) + return err_code + +def CameraGetAeAnalogGainRange(hCamera): + iMinAnalogGain = c_int() + iMaxAnalogGain = c_int() + err_code = _sdk.CameraGetAeAnalogGainRange(hCamera, byref(iMinAnalogGain), byref(iMaxAnalogGain) ) + SetLastError(err_code) + return (iMinAnalogGain.value, iMaxAnalogGain.value) + +def CameraSetAeThreshold(hCamera, iThreshold): + err_code = _sdk.CameraSetAeThreshold(hCamera, iThreshold) + SetLastError(err_code) + return err_code + +def CameraGetAeThreshold(hCamera): + iThreshold = c_int() + err_code = _sdk.CameraGetAeThreshold(hCamera, byref(iThreshold)) + SetLastError(err_code) + return iThreshold.value + +def CameraSetExposureTime(hCamera, fExposureTime): + err_code = _sdk.CameraSetExposureTime(hCamera, c_double(fExposureTime) ) + SetLastError(err_code) + return err_code + +def CameraGetExposureLineTime(hCamera): + pfLineTime = c_double() + err_code = _sdk.CameraGetExposureLineTime(hCamera, byref(pfLineTime)) + SetLastError(err_code) + return pfLineTime.value + +def CameraGetExposureTime(hCamera): + pfExposureTime = c_double() + err_code = _sdk.CameraGetExposureTime(hCamera, byref(pfExposureTime)) + SetLastError(err_code) + return pfExposureTime.value + +def CameraGetExposureTimeRange(hCamera): + pfMin = c_double() + pfMax = c_double() + pfStep = c_double() + err_code = _sdk.CameraGetExposureTimeRange(hCamera, byref(pfMin), byref(pfMax), byref(pfStep)) + SetLastError(err_code) + return (pfMin.value, pfMax.value, pfStep.value) + +def CameraSetAnalogGain(hCamera, iAnalogGain): + err_code = _sdk.CameraSetAnalogGain(hCamera, iAnalogGain) + SetLastError(err_code) + return err_code + +def CameraGetAnalogGain(hCamera): + piAnalogGain = c_int() + err_code = _sdk.CameraGetAnalogGain(hCamera, byref(piAnalogGain)) + SetLastError(err_code) + return piAnalogGain.value + +def CameraSetGain(hCamera, iRGain, iGGain, iBGain): + err_code = _sdk.CameraSetGain(hCamera, iRGain, iGGain, iBGain) + SetLastError(err_code) + return err_code + +def CameraGetGain(hCamera): + piRGain = c_int() + piGGain = c_int() + piBGain = c_int() + err_code = _sdk.CameraGetGain(hCamera, byref(piRGain), byref(piGGain), byref(piBGain)) + SetLastError(err_code) + return (piRGain.value, piGGain.value, piBGain.value) + +def CameraSetGamma(hCamera, iGamma): + err_code = _sdk.CameraSetGamma(hCamera, iGamma) + SetLastError(err_code) + return err_code + +def CameraGetGamma(hCamera): + piGamma = c_int() + err_code = _sdk.CameraGetGamma(hCamera, byref(piGamma)) + SetLastError(err_code) + return piGamma.value + +def CameraSetContrast(hCamera, iContrast): + err_code = _sdk.CameraSetContrast(hCamera, iContrast) + SetLastError(err_code) + return err_code + +def CameraGetContrast(hCamera): + piContrast = c_int() + err_code = _sdk.CameraGetContrast(hCamera, byref(piContrast)) + SetLastError(err_code) + return piContrast.value + +def CameraSetSaturation(hCamera, iSaturation): + err_code = _sdk.CameraSetSaturation(hCamera, iSaturation) + SetLastError(err_code) + return err_code + +def CameraGetSaturation(hCamera): + piSaturation = c_int() + err_code = _sdk.CameraGetSaturation(hCamera, byref(piSaturation)) + SetLastError(err_code) + return piSaturation.value + +def CameraSetMonochrome(hCamera, bEnable): + err_code = _sdk.CameraSetMonochrome(hCamera, bEnable) + SetLastError(err_code) + return err_code + +def CameraGetMonochrome(hCamera): + pbEnable = c_int() + err_code = _sdk.CameraGetMonochrome(hCamera, byref(pbEnable)) + SetLastError(err_code) + return pbEnable.value + +def CameraSetInverse(hCamera, bEnable): + err_code = _sdk.CameraSetInverse(hCamera, bEnable) + SetLastError(err_code) + return err_code + +def CameraGetInverse(hCamera): + pbEnable = c_int() + err_code = _sdk.CameraGetInverse(hCamera, byref(pbEnable)) + SetLastError(err_code) + return pbEnable.value + +def CameraSetAntiFlick(hCamera, bEnable): + err_code = _sdk.CameraSetAntiFlick(hCamera, bEnable) + SetLastError(err_code) + return err_code + +def CameraGetAntiFlick(hCamera): + pbEnable = c_int() + err_code = _sdk.CameraGetAntiFlick(hCamera, byref(pbEnable)) + SetLastError(err_code) + return pbEnable.value + +def CameraGetLightFrequency(hCamera): + piFrequencySel = c_int() + err_code = _sdk.CameraGetLightFrequency(hCamera, byref(piFrequencySel)) + SetLastError(err_code) + return piFrequencySel.value + +def CameraSetLightFrequency(hCamera, iFrequencySel): + err_code = _sdk.CameraSetLightFrequency(hCamera, iFrequencySel) + SetLastError(err_code) + return err_code + +def CameraSetFrameSpeed(hCamera, iFrameSpeed): + err_code = _sdk.CameraSetFrameSpeed(hCamera, iFrameSpeed) + SetLastError(err_code) + return err_code + +def CameraGetFrameSpeed(hCamera): + piFrameSpeed = c_int() + err_code = _sdk.CameraGetFrameSpeed(hCamera, byref(piFrameSpeed)) + SetLastError(err_code) + return piFrameSpeed.value + +def CameraSetParameterMode(hCamera, iMode): + err_code = _sdk.CameraSetParameterMode(hCamera, iMode) + SetLastError(err_code) + return err_code + +def CameraGetParameterMode(hCamera): + piTarget = c_int() + err_code = _sdk.CameraGetParameterMode(hCamera, byref(piTarget)) + SetLastError(err_code) + return piTarget.value + +def CameraSetParameterMask(hCamera, uMask): + err_code = _sdk.CameraSetParameterMask(hCamera, uMask) + SetLastError(err_code) + return err_code + +def CameraSaveParameter(hCamera, iTeam): + err_code = _sdk.CameraSaveParameter(hCamera, iTeam) + SetLastError(err_code) + return err_code + +def CameraSaveParameterToFile(hCamera, sFileName): + err_code = _sdk.CameraSaveParameterToFile(hCamera, _str_to_string_buffer(sFileName)) + SetLastError(err_code) + return err_code + +def CameraReadParameterFromFile(hCamera, sFileName): + err_code = _sdk.CameraReadParameterFromFile(hCamera, _str_to_string_buffer(sFileName)) + SetLastError(err_code) + return err_code + +def CameraLoadParameter(hCamera, iTeam): + err_code = _sdk.CameraLoadParameter(hCamera, iTeam) + SetLastError(err_code) + return err_code + +def CameraGetCurrentParameterGroup(hCamera): + piTeam = c_int() + err_code = _sdk.CameraGetCurrentParameterGroup(hCamera, byref(piTeam)) + SetLastError(err_code) + return piTeam.value + +def CameraSetTransPackLen(hCamera, iPackSel): + err_code = _sdk.CameraSetTransPackLen(hCamera, iPackSel) + SetLastError(err_code) + return err_code + +def CameraGetTransPackLen(hCamera): + piPackSel = c_int() + err_code = _sdk.CameraGetTransPackLen(hCamera, byref(piPackSel)) + SetLastError(err_code) + return piPackSel.value + +def CameraIsAeWinVisible(hCamera): + pbIsVisible = c_int() + err_code = _sdk.CameraIsAeWinVisible(hCamera, byref(pbIsVisible)) + SetLastError(err_code) + return pbIsVisible.value + +def CameraSetAeWinVisible(hCamera, bIsVisible): + err_code = _sdk.CameraSetAeWinVisible(hCamera, bIsVisible) + SetLastError(err_code) + return err_code + +def CameraGetAeWindow(hCamera): + piHOff = c_int() + piVOff = c_int() + piWidth = c_int() + piHeight = c_int() + err_code = _sdk.CameraGetAeWindow(hCamera, byref(piHOff), byref(piVOff), byref(piWidth), byref(piHeight)) + SetLastError(err_code) + return (piHOff.value, piVOff.value, piWidth.value, piHeight.value) + +def CameraSetAeWindow(hCamera, iHOff, iVOff, iWidth, iHeight): + err_code = _sdk.CameraSetAeWindow(hCamera, iHOff, iVOff, iWidth, iHeight) + SetLastError(err_code) + return err_code + +def CameraSetMirror(hCamera, iDir, bEnable): + err_code = _sdk.CameraSetMirror(hCamera, iDir, bEnable) + SetLastError(err_code) + return err_code + +def CameraGetMirror(hCamera, iDir): + pbEnable = c_int() + err_code = _sdk.CameraGetMirror(hCamera, iDir, byref(pbEnable)) + SetLastError(err_code) + return pbEnable.value + +def CameraSetRotate(hCamera, iRot): + err_code = _sdk.CameraSetRotate(hCamera, iRot) + SetLastError(err_code) + return err_code + +def CameraGetRotate(hCamera): + iRot = c_int() + err_code = _sdk.CameraGetRotate(hCamera, byref(iRot)) + SetLastError(err_code) + return iRot.value + +def CameraGetWbWindow(hCamera): + PiHOff = c_int() + PiVOff = c_int() + PiWidth = c_int() + PiHeight = c_int() + err_code = _sdk.CameraGetWbWindow(hCamera, byref(PiHOff), byref(PiVOff), byref(PiWidth), byref(PiHeight)) + SetLastError(err_code) + return (PiHOff.value, PiVOff.value, PiWidth.value, PiHeight.value) + +def CameraSetWbWindow(hCamera, iHOff, iVOff, iWidth, iHeight): + err_code = _sdk.CameraSetWbWindow(hCamera, iHOff, iVOff, iWidth, iHeight) + SetLastError(err_code) + return err_code + +def CameraIsWbWinVisible(hCamera): + pbShow = c_int() + err_code = _sdk.CameraIsWbWinVisible(hCamera, byref(pbShow)) + SetLastError(err_code) + return pbShow.value + +def CameraSetWbWinVisible(hCamera, bShow): + err_code = _sdk.CameraSetWbWinVisible(hCamera, bShow) + SetLastError(err_code) + return err_code + +def CameraImageOverlay(hCamera, pRgbBuffer, pFrInfo): + err_code = _sdk.CameraImageOverlay(hCamera, c_void_p(pRgbBuffer), byref(pFrInfo)) + SetLastError(err_code) + return err_code + +def CameraSetCrossLine(hCamera, iLine, x, y, uColor, bVisible): + err_code = _sdk.CameraSetCrossLine(hCamera, iLine, x, y, uColor, bVisible) + SetLastError(err_code) + return err_code + +def CameraGetCrossLine(hCamera, iLine): + px = c_int() + py = c_int() + pcolor = c_uint() + pbVisible = c_int() + err_code = _sdk.CameraGetCrossLine(hCamera, iLine, byref(px), byref(py), byref(pcolor), byref(pbVisible)) + SetLastError(err_code) + return (px.value, py.value, pcolor.value, pbVisible.value) + +def CameraGetCapability(hCamera): + pCameraInfo = tSdkCameraCapbility() + err_code = _sdk.CameraGetCapability(hCamera, byref(pCameraInfo)) + SetLastError(err_code) + return pCameraInfo + +def CameraWriteSN(hCamera, pbySN, iLevel): + err_code = _sdk.CameraWriteSN(hCamera, _str_to_string_buffer(pbySN), iLevel) + SetLastError(err_code) + return err_code + +def CameraReadSN(hCamera, iLevel): + pbySN = create_string_buffer(64) + err_code = _sdk.CameraReadSN(hCamera, pbySN, iLevel) + SetLastError(err_code) + return _string_buffer_to_str(pbySN) + +def CameraSetTriggerDelayTime(hCamera, uDelayTimeUs): + err_code = _sdk.CameraSetTriggerDelayTime(hCamera, uDelayTimeUs) + SetLastError(err_code) + return err_code + +def CameraGetTriggerDelayTime(hCamera): + puDelayTimeUs = c_uint() + err_code = _sdk.CameraGetTriggerDelayTime(hCamera, byref(puDelayTimeUs)) + SetLastError(err_code) + return puDelayTimeUs.value + +def CameraSetTriggerCount(hCamera, iCount): + err_code = _sdk.CameraSetTriggerCount(hCamera, iCount) + SetLastError(err_code) + return err_code + +def CameraGetTriggerCount(hCamera): + piCount = c_int() + err_code = _sdk.CameraGetTriggerCount(hCamera, byref(piCount)) + SetLastError(err_code) + return piCount.value + +def CameraSoftTrigger(hCamera): + err_code = _sdk.CameraSoftTrigger(hCamera) + SetLastError(err_code) + return err_code + +def CameraSetTriggerMode(hCamera, iModeSel): + err_code = _sdk.CameraSetTriggerMode(hCamera, iModeSel) + SetLastError(err_code) + return err_code + +def CameraGetTriggerMode(hCamera): + piModeSel = c_int() + err_code = _sdk.CameraGetTriggerMode(hCamera, byref(piModeSel)) + SetLastError(err_code) + return piModeSel.value + +def CameraSetStrobeMode(hCamera, iMode): + err_code = _sdk.CameraSetStrobeMode(hCamera, iMode) + SetLastError(err_code) + return err_code + +def CameraGetStrobeMode(hCamera): + piMode = c_int() + err_code = _sdk.CameraGetStrobeMode(hCamera, byref(piMode)) + SetLastError(err_code) + return piMode.value + +def CameraSetStrobeDelayTime(hCamera, uDelayTimeUs): + err_code = _sdk.CameraSetStrobeDelayTime(hCamera, uDelayTimeUs) + SetLastError(err_code) + return err_code + +def CameraGetStrobeDelayTime(hCamera): + upDelayTimeUs = c_uint() + err_code = _sdk.CameraGetStrobeDelayTime(hCamera, byref(upDelayTimeUs)) + SetLastError(err_code) + return upDelayTimeUs.value + +def CameraSetStrobePulseWidth(hCamera, uTimeUs): + err_code = _sdk.CameraSetStrobePulseWidth(hCamera, uTimeUs) + SetLastError(err_code) + return err_code + +def CameraGetStrobePulseWidth(hCamera): + upTimeUs = c_uint() + err_code = _sdk.CameraGetStrobePulseWidth(hCamera, byref(upTimeUs)) + SetLastError(err_code) + return upTimeUs.value + +def CameraSetStrobePolarity(hCamera, uPolarity): + err_code = _sdk.CameraSetStrobePolarity(hCamera, uPolarity) + SetLastError(err_code) + return err_code + +def CameraGetStrobePolarity(hCamera): + upPolarity = c_uint() + err_code = _sdk.CameraGetStrobePolarity(hCamera, byref(upPolarity)) + SetLastError(err_code) + return upPolarity.value + +def CameraSetExtTrigSignalType(hCamera, iType): + err_code = _sdk.CameraSetExtTrigSignalType(hCamera, iType) + SetLastError(err_code) + return err_code + +def CameraGetExtTrigSignalType(hCamera): + ipType = c_int() + err_code = _sdk.CameraGetExtTrigSignalType(hCamera, byref(ipType)) + SetLastError(err_code) + return ipType.value + +def CameraSetExtTrigShutterType(hCamera, iType): + err_code = _sdk.CameraSetExtTrigShutterType(hCamera, iType) + SetLastError(err_code) + return err_code + +def CameraGetExtTrigShutterType(hCamera): + ipType = c_int() + err_code = _sdk.CameraGetExtTrigShutterType(hCamera, byref(ipType)) + SetLastError(err_code) + return ipType.value + +def CameraSetExtTrigDelayTime(hCamera, uDelayTimeUs): + err_code = _sdk.CameraSetExtTrigDelayTime(hCamera, uDelayTimeUs) + SetLastError(err_code) + return err_code + +def CameraGetExtTrigDelayTime(hCamera): + upDelayTimeUs = c_uint() + err_code = _sdk.CameraGetExtTrigDelayTime(hCamera, byref(upDelayTimeUs)) + SetLastError(err_code) + return upDelayTimeUs.value + +def CameraSetExtTrigJitterTime(hCamera, uTimeUs): + err_code = _sdk.CameraSetExtTrigJitterTime(hCamera, uTimeUs) + SetLastError(err_code) + return err_code + +def CameraGetExtTrigJitterTime(hCamera): + upTimeUs = c_uint() + err_code = _sdk.CameraGetExtTrigJitterTime(hCamera, byref(upTimeUs)) + SetLastError(err_code) + return upTimeUs.value + +def CameraGetExtTrigCapability(hCamera): + puCapabilityMask = c_uint() + err_code = _sdk.CameraGetExtTrigCapability(hCamera, byref(puCapabilityMask)) + SetLastError(err_code) + return puCapabilityMask.value + +def CameraPauseLevelTrigger(hCamera): + err_code = _sdk.CameraPauseLevelTrigger(hCamera) + SetLastError(err_code) + return err_code + +def CameraGetResolutionForSnap(hCamera): + pImageResolution = tSdkImageResolution() + err_code = _sdk.CameraGetResolutionForSnap(hCamera, byref(pImageResolution)) + SetLastError(err_code) + return pImageResolution + +def CameraSetResolutionForSnap(hCamera, pImageResolution): + err_code = _sdk.CameraSetResolutionForSnap(hCamera, byref(pImageResolution)) + SetLastError(err_code) + return err_code + +def CameraCustomizeResolution(hCamera): + pImageCustom = tSdkImageResolution() + err_code = _sdk.CameraCustomizeResolution(hCamera, byref(pImageCustom)) + SetLastError(err_code) + return pImageCustom + +def CameraCustomizeReferWin(hCamera, iWinType, hParent): + piHOff = c_int() + piVOff = c_int() + piWidth = c_int() + piHeight = c_int() + err_code = _sdk.CameraCustomizeReferWin(hCamera, iWinType, hParent, byref(piHOff), byref(piVOff), byref(piWidth), byref(piHeight)) + SetLastError(err_code) + return (piHOff.value, piVOff.value, piWidth.value, piHeight.value) + +def CameraShowSettingPage(hCamera, bShow): + err_code = _sdk.CameraShowSettingPage(hCamera, bShow) + SetLastError(err_code) + return err_code + +def CameraCreateSettingPage(hCamera, hParent, pWinText, pCallbackFunc = None, pCallbackCtx = 0, uReserved = 0): + err_code = _sdk.CameraCreateSettingPage(hCamera, hParent, _str_to_string_buffer(pWinText), pCallbackFunc, c_void_p(pCallbackCtx), uReserved) + SetLastError(err_code) + return err_code + +def CameraCreateSettingPageEx(hCamera): + err_code = _sdk.CameraCreateSettingPageEx(hCamera) + SetLastError(err_code) + return err_code + +def CameraSetActiveSettingSubPage(hCamera, index): + err_code = _sdk.CameraSetActiveSettingSubPage(hCamera, index) + SetLastError(err_code) + return err_code + +def CameraSetSettingPageParent(hCamera, hParentWnd, Flags): + err_code = _sdk.CameraSetSettingPageParent(hCamera, hParentWnd, Flags) + SetLastError(err_code) + return err_code + +def CameraGetSettingPageHWnd(hCamera): + hWnd = c_void_p() + err_code = _sdk.CameraGetSettingPageHWnd(hCamera, byref(hWnd)) + SetLastError(err_code) + return hWnd.value + +def CameraSpecialControl(hCamera, dwCtrlCode, dwParam, lpData): + err_code = _sdk.CameraSpecialControl(hCamera, dwCtrlCode, dwParam, c_void_p(lpData) ) + SetLastError(err_code) + return err_code + +def CameraGetFrameStatistic(hCamera): + psFrameStatistic = tSdkFrameStatistic() + err_code = _sdk.CameraGetFrameStatistic(hCamera, byref(psFrameStatistic)) + SetLastError(err_code) + return psFrameStatistic + +def CameraSetNoiseFilter(hCamera, bEnable): + err_code = _sdk.CameraSetNoiseFilter(hCamera, bEnable) + SetLastError(err_code) + return err_code + +def CameraGetNoiseFilterState(hCamera): + pEnable = c_int() + err_code = _sdk.CameraGetNoiseFilterState(hCamera, byref(pEnable)) + SetLastError(err_code) + return pEnable.value + +def CameraRstTimeStamp(hCamera): + err_code = _sdk.CameraRstTimeStamp(hCamera) + SetLastError(err_code) + return err_code + +def CameraSaveUserData(hCamera, uStartAddr, pbData): + err_code = _sdk.CameraSaveUserData(hCamera, uStartAddr, pbData, len(pbData)) + SetLastError(err_code) + return err_code + +def CameraLoadUserData(hCamera, uStartAddr, ilen): + pbData = create_string_buffer(ilen) + err_code = _sdk.CameraLoadUserData(hCamera, uStartAddr, pbData, ilen) + SetLastError(err_code) + return pbData[:] + +def CameraGetFriendlyName(hCamera): + pName = create_string_buffer(64) + err_code = _sdk.CameraGetFriendlyName(hCamera, pName) + SetLastError(err_code) + return _string_buffer_to_str(pName) + +def CameraSetFriendlyName(hCamera, pName): + pNameBuf = _str_to_string_buffer(pName) + resize(pNameBuf, 64) + err_code = _sdk.CameraSetFriendlyName(hCamera, pNameBuf) + SetLastError(err_code) + return err_code + +def CameraSdkGetVersionString(): + pVersionString = create_string_buffer(64) + err_code = _sdk.CameraSdkGetVersionString(pVersionString) + SetLastError(err_code) + return _string_buffer_to_str(pVersionString) + +def CameraCheckFwUpdate(hCamera): + pNeedUpdate = c_int() + err_code = _sdk.CameraCheckFwUpdate(hCamera, byref(pNeedUpdate)) + SetLastError(err_code) + return pNeedUpdate.value + +def CameraGetFirmwareVersion(hCamera): + pVersion = create_string_buffer(64) + err_code = _sdk.CameraGetFirmwareVersion(hCamera, pVersion) + SetLastError(err_code) + return _string_buffer_to_str(pVersion) + +def CameraGetEnumInfo(hCamera): + pCameraInfo = tSdkCameraDevInfo() + err_code = _sdk.CameraGetEnumInfo(hCamera, byref(pCameraInfo)) + SetLastError(err_code) + return pCameraInfo + +def CameraGetInerfaceVersion(hCamera): + pVersion = create_string_buffer(64) + err_code = _sdk.CameraGetInerfaceVersion(hCamera, pVersion) + SetLastError(err_code) + return _string_buffer_to_str(pVersion) + +def CameraSetIOState(hCamera, iOutputIOIndex, uState): + err_code = _sdk.CameraSetIOState(hCamera, iOutputIOIndex, uState) + SetLastError(err_code) + return err_code + +def CameraGetIOState(hCamera, iInputIOIndex): + puState = c_int() + err_code = _sdk.CameraGetIOState(hCamera, iInputIOIndex, byref(puState)) + SetLastError(err_code) + return puState.value + +def CameraSetInPutIOMode(hCamera, iInputIOIndex, iMode): + err_code = _sdk.CameraSetInPutIOMode(hCamera, iInputIOIndex, iMode) + SetLastError(err_code) + return err_code + +def CameraSetOutPutIOMode(hCamera, iOutputIOIndex, iMode): + err_code = _sdk.CameraSetOutPutIOMode(hCamera, iOutputIOIndex, iMode) + SetLastError(err_code) + return err_code + +def CameraSetOutPutPWM(hCamera, iOutputIOIndex, iCycle, uDuty): + err_code = _sdk.CameraSetOutPutPWM(hCamera, iOutputIOIndex, iCycle, uDuty) + SetLastError(err_code) + return err_code + +def CameraSetAeAlgorithm(hCamera, iIspProcessor, iAeAlgorithmSel): + err_code = _sdk.CameraSetAeAlgorithm(hCamera, iIspProcessor, iAeAlgorithmSel) + SetLastError(err_code) + return err_code + +def CameraGetAeAlgorithm(hCamera, iIspProcessor): + piAlgorithmSel = c_int() + err_code = _sdk.CameraGetAeAlgorithm(hCamera, iIspProcessor, byref(piAlgorithmSel)) + SetLastError(err_code) + return piAlgorithmSel.value + +def CameraSetBayerDecAlgorithm(hCamera, iIspProcessor, iAlgorithmSel): + err_code = _sdk.CameraSetBayerDecAlgorithm(hCamera, iIspProcessor, iAlgorithmSel) + SetLastError(err_code) + return err_code + +def CameraGetBayerDecAlgorithm(hCamera, iIspProcessor): + piAlgorithmSel = c_int() + err_code = _sdk.CameraGetBayerDecAlgorithm(hCamera, iIspProcessor, byref(piAlgorithmSel)) + SetLastError(err_code) + return piAlgorithmSel.value + +def CameraSetIspProcessor(hCamera, iIspProcessor): + err_code = _sdk.CameraSetIspProcessor(hCamera, iIspProcessor) + SetLastError(err_code) + return err_code + +def CameraGetIspProcessor(hCamera): + piIspProcessor = c_int() + err_code = _sdk.CameraGetIspProcessor(hCamera, byref(piIspProcessor)) + SetLastError(err_code) + return piIspProcessor.value + +def CameraSetBlackLevel(hCamera, iBlackLevel): + err_code = _sdk.CameraSetBlackLevel(hCamera, iBlackLevel) + SetLastError(err_code) + return err_code + +def CameraGetBlackLevel(hCamera): + piBlackLevel = c_int() + err_code = _sdk.CameraGetBlackLevel(hCamera, byref(piBlackLevel)) + SetLastError(err_code) + return piBlackLevel.value + +def CameraSetWhiteLevel(hCamera, iWhiteLevel): + err_code = _sdk.CameraSetWhiteLevel(hCamera, iWhiteLevel) + SetLastError(err_code) + return err_code + +def CameraGetWhiteLevel(hCamera): + piWhiteLevel = c_int() + err_code = _sdk.CameraGetWhiteLevel(hCamera, byref(piWhiteLevel)) + SetLastError(err_code) + return piWhiteLevel.value + +def CameraSetIspOutFormat(hCamera, uFormat): + err_code = _sdk.CameraSetIspOutFormat(hCamera, uFormat) + SetLastError(err_code) + return err_code + +def CameraGetIspOutFormat(hCamera): + puFormat = c_int() + err_code = _sdk.CameraGetIspOutFormat(hCamera, byref(puFormat)) + SetLastError(err_code) + return puFormat.value + +def CameraGetErrorString(iStatusCode): + _sdk.CameraGetErrorString.restype = c_char_p + msg = _sdk.CameraGetErrorString(iStatusCode) + if msg: + return _string_buffer_to_str(msg) + else: + return '' + +def CameraGetImageBufferEx2(hCamera, pImageData, uOutFormat, wTimes): + piWidth = c_int() + piHeight = c_int() + err_code = _sdk.CameraGetImageBufferEx2(hCamera, c_void_p(pImageData), uOutFormat, byref(piWidth), byref(piHeight), wTimes) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return (piWidth.value, piHeight.value) + +def CameraGetImageBufferEx3(hCamera, pImageData, uOutFormat, wTimes): + piWidth = c_int() + piHeight = c_int() + puTimeStamp = c_int() + err_code = _sdk.CameraGetImageBufferEx3(hCamera, c_void_p(pImageData), uOutFormat, byref(piWidth), byref(piHeight), byref(puTimeStamp), wTimes) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return (piWidth.value, piHeight.value, puTimeStamp.value) + +def CameraGetCapabilityEx2(hCamera): + pMaxWidth = c_int() + pMaxHeight = c_int() + pbColorCamera = c_int() + err_code = _sdk.CameraGetCapabilityEx2(hCamera, byref(pMaxWidth), byref(pMaxHeight), byref(pbColorCamera)) + SetLastError(err_code) + return (pMaxWidth.value, pMaxHeight.value, pbColorCamera.value) + +def CameraReConnect(hCamera): + err_code = _sdk.CameraReConnect(hCamera) + SetLastError(err_code) + return err_code + +def CameraConnectTest(hCamera): + err_code = _sdk.CameraConnectTest(hCamera) + SetLastError(err_code) + return err_code + +def CameraSetLedEnable(hCamera, index, enable): + err_code = _sdk.CameraSetLedEnable(hCamera, index, enable) + SetLastError(err_code) + return err_code + +def CameraGetLedEnable(hCamera, index): + enable = c_int() + err_code = _sdk.CameraGetLedEnable(hCamera, index, byref(enable)) + SetLastError(err_code) + return enable.value + +def CameraSetLedOnOff(hCamera, index, onoff): + err_code = _sdk.CameraSetLedOnOff(hCamera, index, onoff) + SetLastError(err_code) + return err_code + +def CameraGetLedOnOff(hCamera, index): + onoff = c_int() + err_code = _sdk.CameraGetLedOnOff(hCamera, index, byref(onoff)) + SetLastError(err_code) + return onoff.value + +def CameraSetLedDuration(hCamera, index, duration): + err_code = _sdk.CameraSetLedDuration(hCamera, index, duration) + SetLastError(err_code) + return err_code + +def CameraGetLedDuration(hCamera, index): + duration = c_uint() + err_code = _sdk.CameraGetLedDuration(hCamera, index, byref(duration)) + SetLastError(err_code) + return duration.value + +def CameraSetLedBrightness(hCamera, index, uBrightness): + err_code = _sdk.CameraSetLedBrightness(hCamera, index, uBrightness) + SetLastError(err_code) + return err_code + +def CameraGetLedBrightness(hCamera, index): + uBrightness = c_uint() + err_code = _sdk.CameraGetLedBrightness(hCamera, index, byref(uBrightness)) + SetLastError(err_code) + return uBrightness.value + +def CameraEnableTransferRoi(hCamera, uEnableMask): + err_code = _sdk.CameraEnableTransferRoi(hCamera, uEnableMask) + SetLastError(err_code) + return err_code + +def CameraSetTransferRoi(hCamera, index, X1, Y1, X2, Y2): + err_code = _sdk.CameraSetTransferRoi(hCamera, index, X1, Y1, X2, Y2) + SetLastError(err_code) + return err_code + +def CameraGetTransferRoi(hCamera, index): + pX1 = c_uint() + pY1 = c_uint() + pX2 = c_uint() + pY2 = c_uint() + err_code = _sdk.CameraGetTransferRoi(hCamera, index, byref(pX1), byref(pY1), byref(pX2), byref(pY2)) + SetLastError(err_code) + return (pX1.value, pY1.value, pX2.value, pY2.value) + +def CameraAlignMalloc(size, align = 16): + _sdk.CameraAlignMalloc.restype = c_void_p + r = _sdk.CameraAlignMalloc(size, align) + return r + +def CameraAlignFree(membuffer): + _sdk.CameraAlignFree(c_void_p(membuffer)) + +def CameraSetAutoConnect(hCamera, bEnable): + err_code = _sdk.CameraSetAutoConnect(hCamera, bEnable) + SetLastError(err_code) + return err_code + +def CameraGetAutoConnect(hCamera): + pbEnable = c_int() + err_code = _sdk.CameraGetAutoConnect(hCamera, byref(pbEnable)) + SetLastError(err_code) + return pbEnable.value + +def CameraGetReConnectCounts(hCamera): + puCounts = c_int() + err_code = _sdk.CameraGetReConnectCounts(hCamera, byref(puCounts)) + SetLastError(err_code) + return puCounts.value + +def CameraSetSingleGrabMode(hCamera, bEnable): + err_code = _sdk.CameraSetSingleGrabMode(hCamera, bEnable) + SetLastError(err_code) + return err_code + +def CameraGetSingleGrabMode(hCamera): + pbEnable = c_int() + err_code = _sdk.CameraGetSingleGrabMode(hCamera, byref(pbEnable)) + SetLastError(err_code) + return pbEnable.value + +def CameraRestartGrab(hCamera): + err_code = _sdk.CameraRestartGrab(hCamera) + SetLastError(err_code) + return err_code + +def CameraEvaluateImageDefinition(hCamera, iAlgorithSel, pbyIn, pFrInfo): + DefinitionValue = c_double() + err_code = _sdk.CameraEvaluateImageDefinition(hCamera, iAlgorithSel, c_void_p(pbyIn), byref(pFrInfo), byref(DefinitionValue)) + SetLastError(err_code) + return DefinitionValue.value + +def CameraDrawText(pRgbBuffer, pFrInfo, pFontFileName, FontWidth, FontHeight, pText, Left, Top, Width, Height, TextColor, uFlags): + err_code = _sdk.CameraDrawText(c_void_p(pRgbBuffer), byref(pFrInfo), _str_to_string_buffer(pFontFileName), FontWidth, FontHeight, _str_to_string_buffer(pText), Left, Top, Width, Height, TextColor, uFlags) + SetLastError(err_code) + return err_code + +def CameraGigeGetIp(pCameraInfo): + CamIp = create_string_buffer(32) + CamMask = create_string_buffer(32) + CamGateWay = create_string_buffer(32) + EtIp = create_string_buffer(32) + EtMask = create_string_buffer(32) + EtGateWay = create_string_buffer(32) + err_code = _sdk.CameraGigeGetIp(byref(pCameraInfo), CamIp, CamMask, CamGateWay, EtIp, EtMask, EtGateWay) + SetLastError(err_code) + return (_string_buffer_to_str(CamIp), _string_buffer_to_str(CamMask), _string_buffer_to_str(CamGateWay), + _string_buffer_to_str(EtIp), _string_buffer_to_str(EtMask), _string_buffer_to_str(EtGateWay) ) + +def CameraGigeSetIp(pCameraInfo, Ip, SubMask, GateWay, bPersistent): + err_code = _sdk.CameraGigeSetIp(byref(pCameraInfo), + _str_to_string_buffer(Ip), _str_to_string_buffer(SubMask), _str_to_string_buffer(GateWay), bPersistent) + SetLastError(err_code) + return err_code + +def CameraGigeGetMac(pCameraInfo): + CamMac = create_string_buffer(32) + EtMac = create_string_buffer(32) + err_code = _sdk.CameraGigeGetMac(byref(pCameraInfo), CamMac, EtMac) + SetLastError(err_code) + return (_string_buffer_to_str(CamMac), _string_buffer_to_str(EtMac) ) + +def CameraEnableFastResponse(hCamera): + err_code = _sdk.CameraEnableFastResponse(hCamera) + SetLastError(err_code) + return err_code + +def CameraSetCorrectDeadPixel(hCamera, bEnable): + err_code = _sdk.CameraSetCorrectDeadPixel(hCamera, bEnable) + SetLastError(err_code) + return err_code + +def CameraGetCorrectDeadPixel(hCamera): + pbEnable = c_int() + err_code = _sdk.CameraGetCorrectDeadPixel(hCamera, byref(pbEnable)) + SetLastError(err_code) + return pbEnable.value + +def CameraFlatFieldingCorrectSetEnable(hCamera, bEnable): + err_code = _sdk.CameraFlatFieldingCorrectSetEnable(hCamera, bEnable) + SetLastError(err_code) + return err_code + +def CameraFlatFieldingCorrectGetEnable(hCamera): + pbEnable = c_int() + err_code = _sdk.CameraFlatFieldingCorrectGetEnable(hCamera, byref(pbEnable)) + SetLastError(err_code) + return pbEnable.value + +def CameraFlatFieldingCorrectSetParameter(hCamera, pDarkFieldingImage, pDarkFieldingFrInfo, pLightFieldingImage, pLightFieldingFrInfo): + err_code = _sdk.CameraFlatFieldingCorrectSetParameter(hCamera, c_void_p(pDarkFieldingImage), byref(pDarkFieldingFrInfo), c_void_p(pLightFieldingImage), byref(pLightFieldingFrInfo)) + SetLastError(err_code) + return err_code + +def CameraFlatFieldingCorrectGetParameterState(hCamera): + pbValid = c_int() + pFilePath = create_string_buffer(1024) + err_code = _sdk.CameraFlatFieldingCorrectGetParameterState(hCamera, byref(pbValid), pFilePath) + SetLastError(err_code) + return (pbValid.value, _string_buffer_to_str(pFilePath) ) + +def CameraFlatFieldingCorrectSaveParameterToFile(hCamera, pszFileName): + err_code = _sdk.CameraFlatFieldingCorrectSaveParameterToFile(hCamera, _str_to_string_buffer(pszFileName)) + SetLastError(err_code) + return err_code + +def CameraFlatFieldingCorrectLoadParameterFromFile(hCamera, pszFileName): + err_code = _sdk.CameraFlatFieldingCorrectLoadParameterFromFile(hCamera, _str_to_string_buffer(pszFileName)) + SetLastError(err_code) + return err_code + +def CameraCommonCall(hCamera, pszCall, uResultBufSize): + pszResult = create_string_buffer(uResultBufSize) if uResultBufSize > 0 else None + err_code = _sdk.CameraCommonCall(hCamera, _str_to_string_buffer(pszCall), pszResult, uResultBufSize) + SetLastError(err_code) + return _string_buffer_to_str(pszResult) if pszResult else '' + +def CameraSetDenoise3DParams(hCamera, bEnable, nCount, Weights): + assert(nCount >= 2 and nCount <= 8) + if Weights: + assert(len(Weights) == nCount) + WeightsNative = (c_float * nCount)(*Weights) + else: + WeightsNative = None + err_code = _sdk.CameraSetDenoise3DParams(hCamera, bEnable, nCount, WeightsNative) + SetLastError(err_code) + return err_code + +def CameraGetDenoise3DParams(hCamera): + bEnable = c_int() + nCount = c_int() + bUseWeight = c_int() + Weights = (c_float * 8)() + err_code = _sdk.CameraGetDenoise3DParams(hCamera, byref(bEnable), byref(nCount), byref(bUseWeight), Weights) + SetLastError(err_code) + bEnable, nCount, bUseWeight = bEnable.value, nCount.value, bUseWeight.value + if bUseWeight: + Weights = Weights[:nCount] + else: + Weights = None + return (bEnable, nCount, bUseWeight, Weights) + +def CameraManualDenoise3D(InFramesHead, InFramesData, nCount, Weights, OutFrameHead, OutFrameData): + assert(nCount > 0) + assert(len(InFramesData) == nCount) + assert(Weights is None or len(Weights) == nCount) + InFramesDataNative = (c_void_p * nCount)(*InFramesData) + WeightsNative = (c_float * nCount)(*Weights) if Weights else None + err_code = _sdk.CameraManualDenoise3D(byref(InFramesHead), InFramesDataNative, nCount, WeightsNative, byref(OutFrameHead), c_void_p(OutFrameData)) + SetLastError(err_code) + return err_code + +def CameraCustomizeDeadPixels(hCamera, hParent): + err_code = _sdk.CameraCustomizeDeadPixels(hCamera, hParent) + SetLastError(err_code) + return err_code + +def CameraReadDeadPixels(hCamera): + pNumPixel = c_int() + err_code = _sdk.CameraReadDeadPixels(hCamera, None, None, byref(pNumPixel)) + SetLastError(err_code) + if pNumPixel.value < 1: + return None + UShortArray = c_ushort * pNumPixel.value + pRows = UShortArray() + pCols = UShortArray() + err_code = _sdk.CameraReadDeadPixels(hCamera, pRows, pCols, byref(pNumPixel)) + SetLastError(err_code) + if err_code == 0: + pNumPixel = pNumPixel.value + else: + pNumPixel = 0 + return (pRows[:pNumPixel], pCols[:pNumPixel]) + +def CameraAddDeadPixels(hCamera, pRows, pCols, NumPixel): + UShortArray = c_ushort * NumPixel + pRowsNative = UShortArray(*pRows) + pColsNative = UShortArray(*pCols) + err_code = _sdk.CameraAddDeadPixels(hCamera, pRowsNative, pColsNative, NumPixel) + SetLastError(err_code) + return err_code + +def CameraRemoveDeadPixels(hCamera, pRows, pCols, NumPixel): + UShortArray = c_ushort * NumPixel + pRowsNative = UShortArray(*pRows) + pColsNative = UShortArray(*pCols) + err_code = _sdk.CameraRemoveDeadPixels(hCamera, pRowsNative, pColsNative, NumPixel) + SetLastError(err_code) + return err_code + +def CameraRemoveAllDeadPixels(hCamera): + err_code = _sdk.CameraRemoveAllDeadPixels(hCamera) + SetLastError(err_code) + return err_code + +def CameraSaveDeadPixels(hCamera): + err_code = _sdk.CameraSaveDeadPixels(hCamera) + SetLastError(err_code) + return err_code + +def CameraSaveDeadPixelsToFile(hCamera, sFileName): + err_code = _sdk.CameraSaveDeadPixelsToFile(hCamera, _str_to_string_buffer(sFileName)) + SetLastError(err_code) + return err_code + +def CameraLoadDeadPixelsFromFile(hCamera, sFileName): + err_code = _sdk.CameraLoadDeadPixelsFromFile(hCamera, _str_to_string_buffer(sFileName)) + SetLastError(err_code) + return err_code + +def CameraGetImageBufferPriority(hCamera, wTimes, Priority): + pFrameInfo = tSdkFrameHead() + pbyBuffer = c_void_p() + err_code = _sdk.CameraGetImageBufferPriority(hCamera, byref(pFrameInfo), byref(pbyBuffer), wTimes, Priority) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return (pbyBuffer.value, pFrameInfo) + +def CameraGetImageBufferPriorityEx(hCamera, wTimes, Priority): + _sdk.CameraGetImageBufferPriorityEx.restype = c_void_p + piWidth = c_int() + piHeight = c_int() + pFrameBuffer = _sdk.CameraGetImageBufferPriorityEx(hCamera, byref(piWidth), byref(piHeight), wTimes, Priority) + err_code = CAMERA_STATUS_SUCCESS if pFrameBuffer else CAMERA_STATUS_TIME_OUT + SetLastError(err_code) + if pFrameBuffer: + return (pFrameBuffer, piWidth.value, piHeight.value) + else: + raise CameraException(err_code) + +def CameraGetImageBufferPriorityEx2(hCamera, pImageData, uOutFormat, wTimes, Priority): + piWidth = c_int() + piHeight = c_int() + err_code = _sdk.CameraGetImageBufferPriorityEx2(hCamera, c_void_p(pImageData), uOutFormat, byref(piWidth), byref(piHeight), wTimes, Priority) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return (piWidth.value, piHeight.value) + +def CameraGetImageBufferPriorityEx3(hCamera, pImageData, uOutFormat, wTimes, Priority): + piWidth = c_int() + piHeight = c_int() + puTimeStamp = c_uint() + err_code = _sdk.CameraGetImageBufferPriorityEx3(hCamera, c_void_p(pImageData), uOutFormat, byref(piWidth), byref(piHeight), byref(puTimeStamp), wTimes, Priority) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return (piWidth.value, piHeight.value, puTimeStamp.value) + +def CameraClearBuffer(hCamera): + err_code = _sdk.CameraClearBuffer(hCamera) + SetLastError(err_code) + return err_code + +def CameraSoftTriggerEx(hCamera, uFlags): + err_code = _sdk.CameraSoftTriggerEx(hCamera, uFlags) + SetLastError(err_code) + return err_code + +def CameraSetHDR(hCamera, value): + err_code = _sdk.CameraSetHDR(hCamera, value) + SetLastError(err_code) + return err_code + +def CameraGetHDR(hCamera): + value = c_int() + err_code = _sdk.CameraGetHDR(hCamera, byref(value)) + SetLastError(err_code) + return value.value + +def CameraGetFrameID(hCamera): + FrameID = c_uint() + err_code = _sdk.CameraGetFrameID(hCamera, byref(FrameID)) + SetLastError(err_code) + return FrameID.value + +def CameraGetFrameTimeStamp(hCamera): + TimeStamp = c_uint64() + TimeStampL = c_uint32.from_buffer(TimeStamp) + TimeStampH = c_uint32.from_buffer(TimeStamp, 4) + err_code = _sdk.CameraGetFrameTimeStamp(hCamera, byref(TimeStampL), byref(TimeStampH)) + SetLastError(err_code) + return TimeStamp.value + +def CameraSetHDRGainMode(hCamera, value): + err_code = _sdk.CameraSetHDRGainMode(hCamera, value) + SetLastError(err_code) + return err_code + +def CameraGetHDRGainMode(hCamera): + value = c_int() + err_code = _sdk.CameraGetHDRGainMode(hCamera, byref(value)) + SetLastError(err_code) + return value.value + +def CameraCreateDIBitmap(hDC, pFrameBuffer, pFrameHead): + outBitmap = c_void_p() + err_code = _sdk.CameraCreateDIBitmap(hDC, c_void_p(pFrameBuffer), byref(pFrameHead), byref(outBitmap)) + SetLastError(err_code) + return outBitmap.value + +def CameraDrawFrameBuffer(pFrameBuffer, pFrameHead, hWnd, Algorithm, Mode): + err_code = _sdk.CameraDrawFrameBuffer(c_void_p(pFrameBuffer), byref(pFrameHead), c_void_p(hWnd), Algorithm, Mode) + SetLastError(err_code) + return err_code + +def CameraFlipFrameBuffer(pFrameBuffer, pFrameHead, Flags): + err_code = _sdk.CameraFlipFrameBuffer(c_void_p(pFrameBuffer), byref(pFrameHead), Flags) + SetLastError(err_code) + return err_code + +def CameraConvertFrameBufferFormat(hCamera, pInFrameBuffer, pOutFrameBuffer, outWidth, outHeight, outMediaType, pFrameHead): + err_code = _sdk.CameraConvertFrameBufferFormat(hCamera, c_void_p(pInFrameBuffer), c_void_p(pOutFrameBuffer), outWidth, outHeight, outMediaType, byref(pFrameHead)) + SetLastError(err_code) + return err_code + +def CameraSetConnectionStatusCallback(hCamera, pCallBack, pContext = 0): + err_code = _sdk.CameraSetConnectionStatusCallback(hCamera, pCallBack, c_void_p(pContext) ) + SetLastError(err_code) + return err_code + +def CameraSetLightingControllerMode(hCamera, index, mode): + err_code = _sdk.CameraSetLightingControllerMode(hCamera, index, mode) + SetLastError(err_code) + return err_code + +def CameraSetLightingControllerState(hCamera, index, state): + err_code = _sdk.CameraSetLightingControllerState(hCamera, index, state) + SetLastError(err_code) + return err_code + +def CameraSetFrameResendCount(hCamera, count): + err_code = _sdk.CameraSetFrameResendCount(hCamera, count) + SetLastError(err_code) + return err_code + +def CameraSetUndistortParams(hCamera, width, height, cameraMatrix, distCoeffs): + assert(len(cameraMatrix) == 4) + assert(len(distCoeffs) == 5) + cameraMatrixNative = (c_double * len(cameraMatrix))(*cameraMatrix) + distCoeffsNative = (c_double * len(distCoeffs))(*distCoeffs) + err_code = _sdk.CameraSetUndistortParams(hCamera, width, height, cameraMatrixNative, distCoeffsNative) + SetLastError(err_code) + return err_code + +def CameraGetUndistortParams(hCamera): + width = c_int() + height = c_int() + cameraMatrix = (c_double * 4)() + distCoeffs = (c_double * 5)() + err_code = _sdk.CameraGetUndistortParams(hCamera, byref(width), byref(height), cameraMatrix, distCoeffs) + SetLastError(err_code) + width, height = width.value, height.value + cameraMatrix = cameraMatrix[:] + distCoeffs = distCoeffs[:] + return (width, height, cameraMatrix, distCoeffs) + +def CameraSetUndistortEnable(hCamera, bEnable): + err_code = _sdk.CameraSetUndistortEnable(hCamera, bEnable) + SetLastError(err_code) + return err_code + +def CameraGetUndistortEnable(hCamera): + value = c_int() + err_code = _sdk.CameraGetUndistortEnable(hCamera, byref(value)) + SetLastError(err_code) + return value.value + +def CameraCustomizeUndistort(hCamera, hParent): + err_code = _sdk.CameraCustomizeUndistort(hCamera, hParent) + SetLastError(err_code) + return err_code + +def CameraGetEyeCount(hCamera): + EyeCount = c_int() + err_code = _sdk.CameraGetEyeCount(hCamera, byref(EyeCount)) + SetLastError(err_code) + return EyeCount.value + +def CameraMultiEyeImageProcess(hCamera, iEyeIndex, pbyIn, pInFrInfo, pbyOut, pOutFrInfo, uOutFormat, uReserved): + err_code = _sdk.CameraMultiEyeImageProcess(hCamera, iEyeIndex, c_void_p(pbyIn), byref(pInFrInfo), c_void_p(pbyOut), byref(pOutFrInfo), uOutFormat, uReserved) + SetLastError(err_code) + return err_code + +# CameraGrabber + +def CameraGrabber_CreateFromDevicePage(): + Grabber = c_void_p() + err_code = _sdk.CameraGrabber_CreateFromDevicePage(byref(Grabber)) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return Grabber.value + +def CameraGrabber_CreateByIndex(Index): + Grabber = c_void_p() + err_code = _sdk.CameraGrabber_CreateByIndex(byref(Grabber), Index) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return Grabber.value + +def CameraGrabber_CreateByName(Name): + Grabber = c_void_p() + err_code = _sdk.CameraGrabber_CreateByName(byref(Grabber), _str_to_string_buffer(Name)) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return Grabber.value + +def CameraGrabber_Create(pDevInfo): + Grabber = c_void_p() + err_code = _sdk.CameraGrabber_Create(byref(Grabber), byref(pDevInfo)) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return Grabber.value + +def CameraGrabber_Destroy(Grabber): + err_code = _sdk.CameraGrabber_Destroy(c_void_p(Grabber)) + SetLastError(err_code) + return err_code + +def CameraGrabber_SetHWnd(Grabber, hWnd): + err_code = _sdk.CameraGrabber_SetHWnd(c_void_p(Grabber), c_void_p(hWnd) ) + SetLastError(err_code) + return err_code + +def CameraGrabber_SetPriority(Grabber, Priority): + err_code = _sdk.CameraGrabber_SetPriority(c_void_p(Grabber), Priority) + SetLastError(err_code) + return err_code + +def CameraGrabber_StartLive(Grabber): + err_code = _sdk.CameraGrabber_StartLive(c_void_p(Grabber)) + SetLastError(err_code) + return err_code + +def CameraGrabber_StopLive(Grabber): + err_code = _sdk.CameraGrabber_StopLive(c_void_p(Grabber)) + SetLastError(err_code) + return err_code + +def CameraGrabber_SaveImage(Grabber, TimeOut): + Image = c_void_p() + err_code = _sdk.CameraGrabber_SaveImage(c_void_p(Grabber), byref(Image), TimeOut) + SetLastError(err_code) + if err_code != 0: + raise CameraException(err_code) + return Image.value + +def CameraGrabber_SaveImageAsync(Grabber): + err_code = _sdk.CameraGrabber_SaveImageAsync(c_void_p(Grabber)) + SetLastError(err_code) + return err_code + +def CameraGrabber_SaveImageAsyncEx(Grabber, UserData): + err_code = _sdk.CameraGrabber_SaveImageAsyncEx(c_void_p(Grabber), c_void_p(UserData)) + SetLastError(err_code) + return err_code + +def CameraGrabber_SetSaveImageCompleteCallback(Grabber, Callback, Context = 0): + err_code = _sdk.CameraGrabber_SetSaveImageCompleteCallback(c_void_p(Grabber), Callback, c_void_p(Context)) + SetLastError(err_code) + return err_code + +def CameraGrabber_SetFrameListener(Grabber, Listener, Context = 0): + err_code = _sdk.CameraGrabber_SetFrameListener(c_void_p(Grabber), Listener, c_void_p(Context)) + SetLastError(err_code) + return err_code + +def CameraGrabber_SetRawCallback(Grabber, Callback, Context = 0): + err_code = _sdk.CameraGrabber_SetRawCallback(c_void_p(Grabber), Callback, c_void_p(Context)) + SetLastError(err_code) + return err_code + +def CameraGrabber_SetRGBCallback(Grabber, Callback, Context = 0): + err_code = _sdk.CameraGrabber_SetRGBCallback(c_void_p(Grabber), Callback, c_void_p(Context)) + SetLastError(err_code) + return err_code + +def CameraGrabber_GetCameraHandle(Grabber): + hCamera = c_int() + err_code = _sdk.CameraGrabber_GetCameraHandle(c_void_p(Grabber), byref(hCamera)) + SetLastError(err_code) + return hCamera.value + +def CameraGrabber_GetStat(Grabber): + stat = tSdkGrabberStat() + err_code = _sdk.CameraGrabber_GetStat(c_void_p(Grabber), byref(stat)) + SetLastError(err_code) + return stat + +def CameraGrabber_GetCameraDevInfo(Grabber): + DevInfo = tSdkCameraDevInfo() + err_code = _sdk.CameraGrabber_GetCameraDevInfo(c_void_p(Grabber), byref(DevInfo)) + SetLastError(err_code) + return DevInfo + +# CameraImage + +def CameraImage_Create(pFrameBuffer, pFrameHead, bCopy): + Image = c_void_p() + err_code = _sdk.CameraImage_Create(byref(Image), c_void_p(pFrameBuffer), byref(pFrameHead), bCopy) + SetLastError(err_code) + return Image.value + +def CameraImage_CreateEmpty(): + Image = c_void_p() + err_code = _sdk.CameraImage_CreateEmpty(byref(Image)) + SetLastError(err_code) + return Image.value + +def CameraImage_Destroy(Image): + err_code = _sdk.CameraImage_Destroy(c_void_p(Image)) + SetLastError(err_code) + return err_code + +def CameraImage_GetData(Image): + DataBuffer = c_void_p() + HeadPtr = c_void_p() + err_code = _sdk.CameraImage_GetData(c_void_p(Image), byref(DataBuffer), byref(HeadPtr)) + SetLastError(err_code) + if err_code == 0: + return (DataBuffer.value, tSdkFrameHead.from_address(HeadPtr.value) ) + else: + return (0, None) + +def CameraImage_GetUserData(Image): + UserData = c_void_p() + err_code = _sdk.CameraImage_GetUserData(c_void_p(Image), byref(UserData)) + SetLastError(err_code) + return UserData.value + +def CameraImage_SetUserData(Image, UserData): + err_code = _sdk.CameraImage_SetUserData(c_void_p(Image), c_void_p(UserData)) + SetLastError(err_code) + return err_code + +def CameraImage_IsEmpty(Image): + IsEmpty = c_int() + err_code = _sdk.CameraImage_IsEmpty(c_void_p(Image), byref(IsEmpty)) + SetLastError(err_code) + return IsEmpty.value + +def CameraImage_Draw(Image, hWnd, Algorithm): + err_code = _sdk.CameraImage_Draw(c_void_p(Image), c_void_p(hWnd), Algorithm) + SetLastError(err_code) + return err_code + +def CameraImage_DrawFit(Image, hWnd, Algorithm): + err_code = _sdk.CameraImage_DrawFit(c_void_p(Image), c_void_p(hWnd), Algorithm) + SetLastError(err_code) + return err_code + +def CameraImage_DrawToDC(Image, hDC, Algorithm, xDst, yDst, cxDst, cyDst): + err_code = _sdk.CameraImage_DrawToDC(c_void_p(Image), c_void_p(hDC), Algorithm, xDst, yDst, cxDst, cyDst) + SetLastError(err_code) + return err_code + +def CameraImage_DrawToDCFit(Image, hDC, Algorithm, xDst, yDst, cxDst, cyDst): + err_code = _sdk.CameraImage_DrawToDCFit(c_void_p(Image), c_void_p(hDC), Algorithm, xDst, yDst, cxDst, cyDst) + SetLastError(err_code) + return err_code + +def CameraImage_BitBlt(Image, hWnd, xDst, yDst, cxDst, cyDst, xSrc, ySrc): + err_code = _sdk.CameraImage_BitBlt(c_void_p(Image), c_void_p(hWnd), xDst, yDst, cxDst, cyDst, xSrc, ySrc) + SetLastError(err_code) + return err_code + +def CameraImage_BitBltToDC(Image, hDC, xDst, yDst, cxDst, cyDst, xSrc, ySrc): + err_code = _sdk.CameraImage_BitBltToDC(c_void_p(Image), c_void_p(hDC), xDst, yDst, cxDst, cyDst, xSrc, ySrc) + SetLastError(err_code) + return err_code + +def CameraImage_SaveAsBmp(Image, FileName): + err_code = _sdk.CameraImage_SaveAsBmp(c_void_p(Image), _str_to_string_buffer(FileName)) + SetLastError(err_code) + return err_code + +def CameraImage_SaveAsJpeg(Image, FileName, Quality): + err_code = _sdk.CameraImage_SaveAsJpeg(c_void_p(Image), _str_to_string_buffer(FileName), Quality) + SetLastError(err_code) + return err_code + +def CameraImage_SaveAsPng(Image, FileName): + err_code = _sdk.CameraImage_SaveAsPng(c_void_p(Image), _str_to_string_buffer(FileName)) + SetLastError(err_code) + return err_code + +def CameraImage_SaveAsRaw(Image, FileName, Format): + err_code = _sdk.CameraImage_SaveAsRaw(c_void_p(Image), _str_to_string_buffer(FileName), Format) + SetLastError(err_code) + return err_code + +def CameraImage_IPicture(Image): + NewPic = c_void_p() + err_code = _sdk.CameraImage_IPicture(c_void_p(Image), byref(NewPic)) + SetLastError(err_code) + return NewPic.value From a0d75d35e8fedcccc4f38596f970b577ab2aad91 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Mon, 13 Mar 2023 02:04:49 -0500 Subject: [PATCH 31/74] adapt MDVS official demo code for a camera class --- Camera/mdvs.py | 140 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 140 insertions(+) create mode 100644 Camera/mdvs.py diff --git a/Camera/mdvs.py b/Camera/mdvs.py new file mode 100644 index 0000000..04ffdb0 --- /dev/null +++ b/Camera/mdvs.py @@ -0,0 +1,140 @@ +"""Wrap the MDVS SDK to build a camera driver.""" +import cv2 +import numpy as np +import mvsdk +import time +import Utils + + +class mdvs_camera: + """MDVS camera driver. + + Adapted from official MDVS SDK example code, which is located at + https://mindvision.com.cn/uploadfiles/2021/04/20/75287253862433139.zip + + More official MDVS demo programs can be found here + https://mindvision.com.cn/rjxz/list_12.aspx?lcid=139 + """ + + # Needs to be calibrated per camera + YAW_FOV_HALF = Utils.deg_to_rad(42) / 2 + PITCH_FOV_HALF = Utils.deg_to_rad(42) / 2 + + def __init__(self, width, height): + """Initialize the MDVS camera. + + Args: + width (int): width of image to be resized to + height (int): height of image to be resized to + """ + self.width = width + self.height = height + + # Enumerate camera devices + DevList = mvsdk.CameraEnumerateDevice() + nDev = len(DevList) + if nDev < 1: + print("No camera was found!") + return + + for i, DevInfo in enumerate(DevList): + print( + "{}: {} {}".format( + i, + DevInfo.GetFriendlyName(), + DevInfo.GetPortType())) + i = 0 if nDev == 1 else int(input("Select camera: ")) + DevInfo = DevList[i] + print(DevInfo) + + # Initialize connection to camera + self.cam = 0 + try: + self.cam = mvsdk.CameraInit(DevInfo, -1, -1) + except mvsdk.CameraException as e: + print("CameraInit Failed({}): {}".format(e.error_code, e.message)) + return + + # Get camera capability (tech specs) + cap = mvsdk.CameraGetCapability(self.cam) + + # Test if the camera is a mono camera or color camera + monoCamera = (cap.sIspCapacity.bMonoSensor != 0) + + # Set camera output format based on mono or color + if monoCamera: + mvsdk.CameraSetIspOutFormat( + self.cam, mvsdk.CAMERA_MEDIA_TYPE_MONO8) + else: + mvsdk.CameraSetIspOutFormat(self.cam, mvsdk.CAMERA_MEDIA_TYPE_BGR8) + + # Set camera trigger mode to continuous grab + mvsdk.CameraSetTriggerMode(self.cam, 0) + + # Set to manual exposure mode and set exposure time to 30ms + mvsdk.CameraSetAeState(self.cam, 0) + mvsdk.CameraSetExposureTime(self.cam, 30 * 1000) + + # Calls SDK internal thread to start grabbing images + # If in trigger mode, the image grabbing won't start until a trigger + # frame is received + mvsdk.CameraPlay(self.cam) + + # Compute how much buffer is needed to store the image + # Use maximum supported resolution of the camera to compute + FrameBufferSize = cap.sResolutionRange.iWidthMax * \ + cap.sResolutionRange.iHeightMax * (1 if monoCamera else 3) + + # Allocate RGB buffer, used to store the image converted from RAW by ISP + # Remark: RAW data is transferred to PC, and then converted to RGB by software ISP + # If it is a monochrome camera, the format does not need to be converted + # but the ISP still has other processing, so this buffer is still + # needed + self.frame_buffer = mvsdk.CameraAlignMalloc(FrameBufferSize, 16) + + def get_frame(self): + """Call to get a frame from the camera. + + Raises: + Exception: raised when video file is exhausted + + Returns: + np.ndarray: RGB image frame + """ + # Grab one frame from camera + try: + pRawData, FrameHead = mvsdk.CameraGetImageBuffer(self.cam, 200) + mvsdk.CameraImageProcess( + self.cam, pRawData, self.frame_buffer, FrameHead) + mvsdk.CameraReleaseImageBuffer(self.cam, pRawData) + + # At this point the frame is stored in pFrameBuffer. + # For color cameras, pFrameBuffer = RGB data, and formono cameras, + # pFrameBuffer = 8-bit grayscale data + frame_data = ( + mvsdk.c_ubyte * + FrameHead.uBytes).from_address( + self.frame_buffer) + + # Convert C array to numpy array + frame = np.frombuffer(frame_data, dtype=np.uint8) + frame = frame.reshape( + (FrameHead.iHeight, + FrameHead.iWidth, + 1 if FrameHead.uiMediaType == mvsdk.CAMERA_MEDIA_TYPE_MONO8 else 3)) + + frame = cv2.resize( + frame, (self.width, self.height), interpolation=cv2.INTER_LINEAR) + except mvsdk.CameraException as e: + if e.error_code != mvsdk.CAMERA_STATUS_TIME_OUT: + print( + "CameraGetImageBuffer failed({}): {}".format( + e.error_code, e.message)) + + def __del__(self): + """Clean up MDVS driver connection and buffer.""" + # Shut down SDK camera internal thread + mvsdk.CameraUnInit(self.cam) + + # Release frame buffer + mvsdk.CameraAlignFree(self.frame_buffer) From c33ecdf0c70a6ec0bbba9359a2db136273e4a0a0 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Tue, 14 Mar 2023 23:21:15 -0500 Subject: [PATCH 32/74] add missing frame return --- Camera/mdvs.py | 1 + 1 file changed, 1 insertion(+) diff --git a/Camera/mdvs.py b/Camera/mdvs.py index 04ffdb0..59ab454 100644 --- a/Camera/mdvs.py +++ b/Camera/mdvs.py @@ -125,6 +125,7 @@ def get_frame(self): frame = cv2.resize( frame, (self.width, self.height), interpolation=cv2.INTER_LINEAR) + return frame except mvsdk.CameraException as e: if e.error_code != mvsdk.CAMERA_STATUS_TIME_OUT: print( From ba54266afc85d345221b25223be3585f1ce74ecb Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 19 Mar 2023 13:21:59 -0500 Subject: [PATCH 33/74] added stm->jetson comm protocol docs --- docs/comm_protocol.md | 34 +++++++++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/docs/comm_protocol.md b/docs/comm_protocol.md index 65afd9e..a520dc7 100644 --- a/docs/comm_protocol.md +++ b/docs/comm_protocol.md @@ -29,8 +29,10 @@ decoding are sent, they need to be encoded in little endianness. However, for st layouts are not affected by endianness. ### HEADER + 2 ASCII chars. Current options are 'ST' and 'MY', which stands for "Search Target" and "Move Yoke". + - **ST**: The "Search Target" mode means either 1) the AutoAim algorithm is not engaged or 2) the AutoAim algorithm is engaged but the target is not found. In the former case, the STM32 board should simply ignore input from the Jetson board. In the latter case, the STM32 @@ -69,6 +71,32 @@ PACK_END (i.e., CRC is computed for the first 14 bytes up to end to REL_PITCH). # Protocol from STM32 to Jetson -TODO: to be implemented. The first implementation should tell Jetson what is the color of -the armor of our own robots. However it should be implemented in a generic manner so that -subsequent implementations can easily add other useful information. +## Packet Struct + +| Name | Content | Size | +| ------------- | ------------------------- | ------- | +| HEADER | 2 ASCII char | 2 bytes | +| MY_COLOR | uint8_t ENUM | 1 byte | +| CRC_CHECKSUM | uint8_t checksum | 1 bytes | +| PACK_END | 2 ASCII char | 2 bytes | +| TOTAL | | 6 bytes | + +Following the same convention as above, the memory layout of the STM32 is little-endian. + +### HEADER + +2 ASCII chars. Now the header can only be 'HD'. + +### MY_COLOR + +uint8_t ENUM. This is the color of the robot. 0 stands for red, 1 stands for blue. + +### CRC_CHECKSUM + +uint8_t checksum. This is the CRC checksum of the packet. The CRC standard used +is the MAXIM_DOW standard. The CRC checksum is calculated on the packet contents BEFORE the +PACK_END (i.e., CRC is computed for bytes preceeding, but not including, the CRC_CHECKSUM). + +### PACK_END + +2 ASCII chars. This is the end of the packet denoted by ASCII characters 'ED'. From 15af08e876f17ac49e41a61788b7cb65c3414397 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 19 Mar 2023 13:35:45 -0500 Subject: [PATCH 34/74] implemented jetson receiving code w/ test case --- Communication/communicator.py | 129 ++++++++++++++++++++++++---------- 1 file changed, 93 insertions(+), 36 deletions(-) diff --git a/Communication/communicator.py b/Communication/communicator.py index 6d441b8..6437f04 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -5,6 +5,9 @@ import time import threading +# STM32 to Jetson packet size +STJ_PACKET_SIZE = 6 + class UARTCommunicator: """USB-TTL-UART communicator for Jetson-STM32 communication.""" @@ -14,7 +17,7 @@ def __init__( cfg, crc_standard=crc.Crc8.MAXIM_DOW, endianness='little', - buffer_size=10): + buffer_size=STJ_PACKET_SIZE * 100): """Initialize the UART communicator. Args: @@ -34,9 +37,11 @@ def __init__( self.circular_buffer = [] self.buffer_size = buffer_size - self.enemy_color = self.cfg.DEFAULT_ENEMY_TEAM - self.stm32_color = 'red' if self.enemy_color == 'blue' else 'blue' + self.stm32_state_dict = { + 'my_color': 'red' if self.cfg.DEFAULT_ENEMY_TEAM == 'blue' else 'blue', + 'my_color_str': self.cfg.DEFAULT_ENEMY_TEAM.lower()} + self.parsed_packet_cnt = 0 self.seq_num = 0 def is_valid(self): @@ -114,6 +119,70 @@ def guess_uart_device_(): return serial_port + def packet_search(self): + """Parse internal circular buffer.""" + start_idx = 0 + while start_idx <= len(self.circular_buffer) - STJ_PACKET_SIZE: + header_letters = ( + self.circular_buffer[start_idx], self.circular_buffer[start_idx + 1]) + if header_letters == (ord('H'), ord('D')): + # Try to parse + possible_packet = self.circular_buffer[start_idx:start_idx + STJ_PACKET_SIZE] + ret_dict = self.try_parse_one(possible_packet) + if ret_dict is not None: + # Successfully parsed one + self.parsed_packet_cnt += 1 + self.stm32_state_dict = ret_dict + # Remove parsed bytes from the circular buffer + self.circular_buffer = self.circular_buffer[start_idx + + STJ_PACKET_SIZE:] + start_idx = 0 + else: + start_idx += 1 + + def try_parse_one(self, possible_packet): + """ + Parse a possible packet. + + For details on the struct of the packet, refer to docs/comm_protocol.md + + Args: + possible_packet (list): a list of bytes + + Returns: + dict: a dictionary of parsed data; None if parsing failed + """ + assert len(possible_packet) == STJ_PACKET_SIZE + assert possible_packet[0] == ord('H') + assert possible_packet[1] == ord('D') + + # Check packet end + if possible_packet[-2] != ord('E') or possible_packet[-1] != ord('D'): + return None + + # Compute checksum + crc_checksum = self.crc_calculator.checksum( + bytes(possible_packet[:-3])) + if crc_checksum != possible_packet[-3]: + return None + + # Valid packet + + # 0 for RED; 1 for BLUE + my_color_int = int(possible_packet[2]) + + if my_color_int == 0: + my_color = 'red' + enemy_color = 'blue' + else: + my_color = 'blue' + enemy_color = 'red' + + return { + 'my_color': my_color, + 'enemy_color': enemy_color, + } + def create_packet(self, header, yaw_offset, pitch_offset): """ Create CRC-checked packet from user input. @@ -170,39 +239,8 @@ def create_packet(self, header, yaw_offset, pitch_offset): return packet def get_current_stm32_state(self): - """Read from buffer from STM32 to Jetson and return the current state. - - TODO: - - Decode packet sent from the STM32 controller - - If a robot is revived, the serial port might get garbage value in between - - implement a proper CRC-verified packet decoder - - Returns: - dict: a dictionary containing the current state of the STM32 - """ - blue_cnt = 0 - red_cnt = 0 - - for read_byte in self.circular_buffer: - if read_byte == ord('R'): - red_cnt += 1 - if read_byte == ord('B'): - blue_cnt += 1 - - if blue_cnt > red_cnt: - self.stm32_color = 'blue' - self.enemy_color = 'red' - - if red_cnt > blue_cnt: - self.stm32_color = 'red' - self.enemy_color = 'blue' - - ret_dict = { - 'my_color': self.stm32_color, - 'enemy_color': self.enemy_color, - } - - return ret_dict + """Read from buffer from STM32 to Jetson and return the current state.""" + return self.stm32_state_dict if __name__ == '__main__': @@ -213,6 +251,25 @@ def get_current_stm32_state(self): sys.path.append(os.path.join(os.path.dirname(__file__), '..')) import config uart = UARTCommunicator(config) + + print("Starting packet sending test.") for i in range(1000): time.sleep(0.005) # simulate 200Hz uart.process_one_packet(config.SEARCH_TARGET, 0.0, 0.0) + + print("Packet sending test complete.") + print("You should see the light change from bllue to green on type C board.") + print("Starting packet receiving test.") + + while True: + if uart.parsed_packet_cnt == 1000: + print("Receiver successfully parsed exactly 1000 packets.") + break + if uart.parsed_packet_cnt > 1000: + print("Repeatedly parsed one packet?") + break + uart.try_read_one() + uart.packet_search() + time.sleep(0.001) + + print("Packet receiving test complete.") From 91ad820c60ad9721da0c8fa73442ec211675b2a4 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 19 Mar 2023 13:43:20 -0500 Subject: [PATCH 35/74] add multi-threading for efficient jetson packet parsing --- Communication/communicator.py | 42 +++++++++++++++++++++++++++++++++-- 1 file changed, 40 insertions(+), 2 deletions(-) diff --git a/Communication/communicator.py b/Communication/communicator.py index 6437f04..476d510 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -4,6 +4,7 @@ import crc import time import threading +from copy import deepcopy # STM32 to Jetson packet size STJ_PACKET_SIZE = 6 @@ -44,12 +45,40 @@ def __init__( self.parsed_packet_cnt = 0 self.seq_num = 0 + self.state_dict_lock = threading.Lock() + + def start_listening(self): + """Start a thread to listen to the serial port.""" + self.listen_thread = threading.Thread(target=self.listen_) + self.listen_thread.start() + + def listen_(self, interval=0.001): + """ + Listen to the serial port. + + This function updates circular_buffer and stm32_state_dict. + + TODO: test this function on real jetson / STM32! + + Args: + interval (float): interval between two reads + """ + while True: + self.try_read_one() + self.packet_search() + time.sleep(interval) + def is_valid(self): """Return if communicator is valid.""" return self.serial_port is not None def try_read_one(self): - """Try to read one packet from the serial port and store to internal buffer.""" + """ + Try to read one packet from the serial port and store to internal buffer. + + Returns: + bool: True if a packet is read; False otherwise + """ # Read from serial port, if any packet is waiting if self.serial_port is not None: if (self.serial_port.inWaiting() > 0): @@ -63,6 +92,10 @@ def try_read_one(self): self.circular_buffer = self.circular_buffer[1:] self.circular_buffer.append(c) + return True + else: + return False + def process_one_packet(self, header, yaw_offset, pitch_offset): """Process a batch of numbers into a CRC-checked packet and send it out. @@ -132,7 +165,9 @@ def packet_search(self): if ret_dict is not None: # Successfully parsed one self.parsed_packet_cnt += 1 + self.state_dict_lock.acquire() self.stm32_state_dict = ret_dict + self.state_dict_lock.release() # Remove parsed bytes from the circular buffer self.circular_buffer = self.circular_buffer[start_idx + STJ_PACKET_SIZE:] @@ -240,7 +275,10 @@ def create_packet(self, header, yaw_offset, pitch_offset): def get_current_stm32_state(self): """Read from buffer from STM32 to Jetson and return the current state.""" - return self.stm32_state_dict + self.state_dict_lock.acquire() + ret_dict = deepcopy(self.stm32_state_dict) + self.state_dict_lock.release() + return ret_dict if __name__ == '__main__': From fb47a73350a67326b6495e9e8831a2351bfa602e Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 19 Mar 2023 14:02:51 -0500 Subject: [PATCH 36/74] update linting instruction to avoid linting mdvs sdk.py --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 16595af..9008db4 100644 --- a/README.md +++ b/README.md @@ -25,9 +25,11 @@ commits to check your coding style. To automatically fix your code to autopep8 standard, you can run ```bash -autopep8 --in-place --aggressive --aggressive --recursive . +autopep8 --in-place --aggressive --aggressive xxx.py ``` +Remember to change `xxx.py` to file you want to modify. + ## Data preparing We recorded some demo videos to demo auto-aiming capabilities of our robots, From 842273d05f203fb84e376b0e3073d06005015583 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 19 Mar 2023 14:03:20 -0500 Subject: [PATCH 37/74] now camera config takes in cfg node; make exposure time a param --- Camera/mdvs.py | 21 ++++++++++++--------- Camera/read_from_file.py | 10 +++++----- config.py | 3 +++ vision.py | 2 +- 4 files changed, 21 insertions(+), 15 deletions(-) diff --git a/Camera/mdvs.py b/Camera/mdvs.py index 59ab454..493e742 100644 --- a/Camera/mdvs.py +++ b/Camera/mdvs.py @@ -16,19 +16,22 @@ class mdvs_camera: https://mindvision.com.cn/rjxz/list_12.aspx?lcid=139 """ - # Needs to be calibrated per camera - YAW_FOV_HALF = Utils.deg_to_rad(42) / 2 - PITCH_FOV_HALF = Utils.deg_to_rad(42) / 2 + # Computed using the tool at https://mindvision.com.cn/jtxx/list_108.aspx?lcid=21&lcids=1656 + # Config: 4mm lens, MV-SUA133GC + YAW_FOV_HALF = Utils.deg_to_rad(65.238) / 2 + PITCH_FOV_HALF = Utils.deg_to_rad(54.225) / 2 - def __init__(self, width, height): + def __init__(self, cfg): """Initialize the MDVS camera. Args: - width (int): width of image to be resized to - height (int): height of image to be resized to + cfg (python object): shared config object """ - self.width = width - self.height = height + self.cfg = cfg + self.width = self.cfg.IMG_WIDTH + self.height = self.cfg.IMG_HEIGHT + + self.exposure_time = int(self.cfg.EXPOSURE_TIME) # in ms # Enumerate camera devices DevList = mvsdk.CameraEnumerateDevice() @@ -73,7 +76,7 @@ def __init__(self, width, height): # Set to manual exposure mode and set exposure time to 30ms mvsdk.CameraSetAeState(self.cam, 0) - mvsdk.CameraSetExposureTime(self.cam, 30 * 1000) + mvsdk.CameraSetExposureTime(self.cam, self.exposure_time * 1000) # Calls SDK internal thread to start grabbing images # If in trigger mode, the image grabbing won't start until a trigger diff --git a/Camera/read_from_file.py b/Camera/read_from_file.py index 3a2d163..5a56d51 100644 --- a/Camera/read_from_file.py +++ b/Camera/read_from_file.py @@ -19,15 +19,15 @@ class fake_camera: YAW_FOV_HALF = Utils.deg_to_rad(42) / 2 PITCH_FOV_HALF = Utils.deg_to_rad(42) / 2 - def __init__(self, width, height): + def __init__(self, cfg): """Initialize fake camera. Args: - width (int): width of image to be resized to - height (int): height of image to be resized to + cfg (python object): shared config object """ - self.width = width - self.height = height + self.cfg = cfg + self.width = self.cfg.IMG_WIDTH + self.height = self.cfg.IMG_HEIGHT assert len(sys.argv) == 2 # main py file; video file video_path = sys.argv[1] diff --git a/config.py b/config.py index 9a72058..4679fad 100644 --- a/config.py +++ b/config.py @@ -11,6 +11,9 @@ AUTOAIM_CAMERA = fake_camera +# This param needs to be tuned per arena / camera setup +EXPOSURE_TIME = 30 + # Compute some constants and define camera to use IMG_HEIGHT = 360 IMG_WIDTH = 640 diff --git a/vision.py b/vision.py index 9ff96e9..5f52ec8 100644 --- a/vision.py +++ b/vision.py @@ -22,7 +22,7 @@ def main(): communicator = UARTCommunicator(config) - autoaim_camera = config.AUTOAIM_CAMERA(config.IMG_WIDTH, config.IMG_HEIGHT) + autoaim_camera = config.AUTOAIM_CAMERA(config) if communicator.is_valid(): print("OPENED SERIAL DEVICE AT: " + communicator.serial_port.name) From 57d8842ec95c52bf9d4f0f4fd40e50848014ce40 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 19 Mar 2023 14:04:48 -0500 Subject: [PATCH 38/74] fix a minor issue of inconsistent dict id --- Communication/communicator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Communication/communicator.py b/Communication/communicator.py index 476d510..72e9580 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -40,7 +40,7 @@ def __init__( self.stm32_state_dict = { 'my_color': 'red' if self.cfg.DEFAULT_ENEMY_TEAM == 'blue' else 'blue', - 'my_color_str': self.cfg.DEFAULT_ENEMY_TEAM.lower()} + 'enemy_color': self.cfg.DEFAULT_ENEMY_TEAM.lower()} self.parsed_packet_cnt = 0 self.seq_num = 0 From 4e69b536fae6f7ab390159ee015804912c86ad36 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 19 Mar 2023 14:10:07 -0500 Subject: [PATCH 39/74] update TODO --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9008db4..c71172d 100644 --- a/README.md +++ b/README.md @@ -47,7 +47,7 @@ Please follow instruction from the RMCV101 repo [here](https://github.com/illini - Implement a more robust tracker (e.g., EKF / Kalman) - Implement depth estimators (either use D455 or estimate from monocular cameras) - Optimize code efficiency on Jetson -- Implememt MDVS camera driver in Python +- Update building instruction for getting video files / MDVS SDK ## File Structure From abab3989c23429465d0223663b54b2f42ec07373 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 19 Mar 2023 14:14:06 -0500 Subject: [PATCH 40/74] added a base class for cameras --- Camera/camera_base.py | 25 +++++++++++++++++++++++++ Camera/mdvs.py | 10 ++++------ Camera/read_from_file.py | 8 +++----- 3 files changed, 32 insertions(+), 11 deletions(-) create mode 100644 Camera/camera_base.py diff --git a/Camera/camera_base.py b/Camera/camera_base.py new file mode 100644 index 0000000..d4df09a --- /dev/null +++ b/Camera/camera_base.py @@ -0,0 +1,25 @@ +"""Base class for all camera implementations.""" +from abc import abstractmethod + +class CameraBase: + """Base class for all camera implementations.""" + + def __init__(self, cfg): + """Initialize the camera. + + Args: + cfg (python object): shared config object + """ + self.cfg = cfg + self.width = self.cfg.IMG_WIDTH + self.height = self.cfg.IMG_HEIGHT + self.exposure_time = int(self.cfg.EXPOSURE_TIME) + + @abstractmethod + def get_frame(self): + """Call to get a frame from the camera. + + Returns: + np.ndarray: RGB image frame + """ + raise NotImplementedError diff --git a/Camera/mdvs.py b/Camera/mdvs.py index 493e742..896deb6 100644 --- a/Camera/mdvs.py +++ b/Camera/mdvs.py @@ -5,8 +5,10 @@ import time import Utils +from Camera.camera_base import CameraBase -class mdvs_camera: + +class mdvs_camera(CameraBase): """MDVS camera driver. Adapted from official MDVS SDK example code, which is located at @@ -27,11 +29,7 @@ def __init__(self, cfg): Args: cfg (python object): shared config object """ - self.cfg = cfg - self.width = self.cfg.IMG_WIDTH - self.height = self.cfg.IMG_HEIGHT - - self.exposure_time = int(self.cfg.EXPOSURE_TIME) # in ms + super().__init__(cfg) # Enumerate camera devices DevList = mvsdk.CameraEnumerateDevice() diff --git a/Camera/read_from_file.py b/Camera/read_from_file.py index 5a56d51..186c356 100644 --- a/Camera/read_from_file.py +++ b/Camera/read_from_file.py @@ -5,10 +5,10 @@ import cv2 import Utils -# TODO: make an abstract camera base class +from Camera.camera_base import CameraBase -class fake_camera: +class fake_camera(CameraBase): """ Fake camera class that mimics the behavior of the real camera. @@ -25,9 +25,7 @@ def __init__(self, cfg): Args: cfg (python object): shared config object """ - self.cfg = cfg - self.width = self.cfg.IMG_WIDTH - self.height = self.cfg.IMG_HEIGHT + super().__init__(cfg) assert len(sys.argv) == 2 # main py file; video file video_path = sys.argv[1] From a610dd3f43e89193293a0abe7bd71a4992f809df Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 19 Mar 2023 14:15:03 -0500 Subject: [PATCH 41/74] fix linting --- Camera/camera_base.py | 1 + 1 file changed, 1 insertion(+) diff --git a/Camera/camera_base.py b/Camera/camera_base.py index d4df09a..fee02a4 100644 --- a/Camera/camera_base.py +++ b/Camera/camera_base.py @@ -1,6 +1,7 @@ """Base class for all camera implementations.""" from abc import abstractmethod + class CameraBase: """Base class for all camera implementations.""" From 953a811e2393b0de225635c9e176dfc7ca3b19ac Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 19 Mar 2023 14:29:42 -0500 Subject: [PATCH 42/74] update linting instruction to fix max_line_len and exclude SDK --- Communication/communicator.py | 15 +++++---------- README.md | 4 +--- 2 files changed, 6 insertions(+), 13 deletions(-) diff --git a/Communication/communicator.py b/Communication/communicator.py index 72e9580..bbb2e07 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -83,8 +83,7 @@ def try_read_one(self): if self.serial_port is not None: if (self.serial_port.inWaiting() > 0): # read the bytes and convert from binary array to ASCII - byte_array = self.serial_port.read( - self.serial_port.inWaiting()) + byte_array = self.serial_port.read(self.serial_port.inWaiting()) for c in byte_array: if len(self.circular_buffer) >= self.buffer_size: @@ -169,8 +168,7 @@ def packet_search(self): self.stm32_state_dict = ret_dict self.state_dict_lock.release() # Remove parsed bytes from the circular buffer - self.circular_buffer = self.circular_buffer[start_idx + - STJ_PACKET_SIZE:] + self.circular_buffer = self.circular_buffer[start_idx + STJ_PACKET_SIZE:] start_idx = 0 else: start_idx += 1 @@ -196,8 +194,7 @@ def try_parse_one(self, possible_packet): return None # Compute checksum - crc_checksum = self.crc_calculator.checksum( - bytes(possible_packet[:-3])) + crc_checksum = self.crc_calculator.checksum(bytes(possible_packet[:-3])) if crc_checksum != possible_packet[-3]: return None @@ -255,10 +252,8 @@ def create_packet(self, header, yaw_offset, pitch_offset): discrete_pitch_offset = int(pitch_offset * 1e+6) # TODO: add more sanity check here? - packet += (discrete_yaw_offset & - 0xFFFFFFFF).to_bytes(4, self.endianness) - packet += (discrete_pitch_offset & - 0xFFFFFFFF).to_bytes(4, self.endianness) + packet += (discrete_yaw_offset & 0xFFFFFFFF).to_bytes(4, self.endianness) + packet += (discrete_pitch_offset & 0xFFFFFFFF).to_bytes(4, self.endianness) # Compute CRC crc8_checksum = self.crc_calculator.checksum(packet) diff --git a/README.md b/README.md index c71172d..5b34d15 100644 --- a/README.md +++ b/README.md @@ -25,11 +25,9 @@ commits to check your coding style. To automatically fix your code to autopep8 standard, you can run ```bash -autopep8 --in-place --aggressive --aggressive xxx.py +autopep8 --in-place --aggressive --aggressive --max-line-length=100 --exclude="mvsdk.py" --recursive . ``` -Remember to change `xxx.py` to file you want to modify. - ## Data preparing We recorded some demo videos to demo auto-aiming capabilities of our robots, From 8c497707b305aa295bf63b04509f482a079e1835 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Thu, 23 Mar 2023 20:42:17 -0400 Subject: [PATCH 43/74] change inaccurate comment in read_from_file --- Camera/read_from_file.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Camera/read_from_file.py b/Camera/read_from_file.py index 186c356..f0b3db5 100644 --- a/Camera/read_from_file.py +++ b/Camera/read_from_file.py @@ -47,7 +47,7 @@ def get_frame(self): Exception: raised when video file is exhausted Returns: - np.ndarray: RGB image frame + np.ndarray: BGR image frame """ if self.timing is None: self.timing = time.time() From 58e83de8f6f6cd444c2df0dae6f5711b2bdf74f5 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Fri, 24 Mar 2023 17:25:12 -0400 Subject: [PATCH 44/74] implement a simple CV camera for testing recorder --- Camera/simple_cv.py | 39 +++++++++++++++++++++++++++++++++++++++ config.py | 5 +++-- 2 files changed, 42 insertions(+), 2 deletions(-) create mode 100644 Camera/simple_cv.py diff --git a/Camera/simple_cv.py b/Camera/simple_cv.py new file mode 100644 index 0000000..abe5f04 --- /dev/null +++ b/Camera/simple_cv.py @@ -0,0 +1,39 @@ +"""Hosts the fake camera class that reads from a file for testing.""" +import sys +import time +import numpy as np +import cv2 +import Utils + +from Camera.camera_base import CameraBase + + +class simple_cv_camera(CameraBase): + """Simple OpenCV camera.""" + + # Needs to be calibrated per camera + YAW_FOV_HALF = Utils.deg_to_rad(42) / 2 + PITCH_FOV_HALF = Utils.deg_to_rad(42) / 2 + + def __init__(self, cfg): + """Initialize a simple camera from defualt camera cam. + + Args: + cfg (python object): shared config object + """ + super().__init__(cfg) + + self.cap = cv2.VideoCapture(0) + assert self.cap.isOpened() + + def get_frame(self): + """Call to get a frame from the camera. + + Returns: + np.ndarray: BGR image frame + """ + ret, frame = self.cap.read() + if not ret: + raise + frame = cv2.resize(frame, (self.width, self.height)) + return frame diff --git a/config.py b/config.py index 4679fad..86b53b0 100644 --- a/config.py +++ b/config.py @@ -1,15 +1,16 @@ """Config file that is shared across the whole project.""" # communication utils -from Camera.read_from_file import fake_camera SEARCH_TARGET = b'ST' MOVE_YOKE = b'MY' PACK_END = b'ED' # Define camera # from Camera.d455 import D455_camera +# from Camera.read_from_file import fake_camera +from Camera.simple_cv import simple_cv_camera -AUTOAIM_CAMERA = fake_camera +AUTOAIM_CAMERA = simple_cv_camera # This param needs to be tuned per arena / camera setup EXPOSURE_TIME = 30 From 93616c71981868f1c99392e55a2f0630be452aa7 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 25 Mar 2023 20:44:35 -0400 Subject: [PATCH 45/74] implemented and tested automatic recorder --- recorder.py | 64 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) create mode 100644 recorder.py diff --git a/recorder.py b/recorder.py new file mode 100644 index 0000000..8e030fb --- /dev/null +++ b/recorder.py @@ -0,0 +1,64 @@ +""" +Main file for recording data. + +PLease change the base_data_folder constant to a large disk +""" + +import os +import time +import cv2 +import config +import shutil +import datetime + +TIME_PER_CLIP = 10 # in seconds +FPS_LIMIT = 1. / 100 + +def main(): + """Define the main while-true control loop that manages everything.""" + autoaim_camera = config.AUTOAIM_CAMERA(config) + + base_data_folder = "/tmp" + cur_datetime = datetime.datetime.now() + cur_data_folder = os.path.join(base_data_folder, cur_datetime.strftime("%Y-%m-%d_%H-%M-%S")) + os.makedirs(cur_data_folder, exist_ok=True) + + cur_video_writer = None + cur_video_clip_idx = 0 + + while True: + if cur_video_writer is None: + cur_free_space = shutil.disk_usage(base_data_folder).free # in bytes + cur_free_space = cur_free_space / 1024 / 1024 / 1024 # in GB + if cur_free_space < 1: + print("Not enough space left on disk. Idling...") + time.sleep(100) + continue + fourcc = cv2.VideoWriter_fourcc(*'mp4v') + video_fn = os.path.join(cur_data_folder, "video_{}.mp4".format(cur_video_clip_idx)) + cur_video_writer = cv2.VideoWriter( + video_fn, fourcc, 30, (config.IMG_WIDTH, config.IMG_HEIGHT)) + writer_start_time = time.time() + last_frame_time = writer_start_time + + # Time right before capturing + cur_time = time.time() + # We don't want capture to be too frequent + if cur_time - last_frame_time < FPS_LIMIT: + continue + frame = autoaim_camera.get_frame() + last_frame_time = time.time() + assert cur_video_writer is not None + assert frame is not None + cur_video_writer.write(frame) + + # Check if we need to start a new video clip + # TIME_PER_CLIP seconds of data + if last_frame_time - writer_start_time > TIME_PER_CLIP: + cur_video_writer.release() + cur_video_writer = None + cur_video_clip_idx += 1 + + +if __name__ == "__main__": + main() From 4eab78aea127542e7293484dfa25571e9955bfe5 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 25 Mar 2023 20:45:40 -0400 Subject: [PATCH 46/74] fix linting --- config.py | 2 +- recorder.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/config.py b/config.py index 86b53b0..61ff026 100644 --- a/config.py +++ b/config.py @@ -1,6 +1,7 @@ """Config file that is shared across the whole project.""" # communication utils +from Camera.simple_cv import simple_cv_camera SEARCH_TARGET = b'ST' MOVE_YOKE = b'MY' PACK_END = b'ED' @@ -8,7 +9,6 @@ # Define camera # from Camera.d455 import D455_camera # from Camera.read_from_file import fake_camera -from Camera.simple_cv import simple_cv_camera AUTOAIM_CAMERA = simple_cv_camera diff --git a/recorder.py b/recorder.py index 8e030fb..6b908b5 100644 --- a/recorder.py +++ b/recorder.py @@ -14,6 +14,7 @@ TIME_PER_CLIP = 10 # in seconds FPS_LIMIT = 1. / 100 + def main(): """Define the main while-true control loop that manages everything.""" autoaim_camera = config.AUTOAIM_CAMERA(config) From c55cb0d89b71e747d4459df93a5e27a7a8ace43e Mon Sep 17 00:00:00 2001 From: cpeng14 <92764093+Franciszabi@users.noreply.github.com> Date: Fri, 7 Apr 2023 22:31:08 -0500 Subject: [PATCH 47/74] Add files via upload basically bullshit, wrote merely for learning kalman filter, totally bugged and needs to be fixed. --- Aiming/Tracking/KF track test.ipynb | 145 ++++++++++++++++++++++++++++ 1 file changed, 145 insertions(+) create mode 100644 Aiming/Tracking/KF track test.ipynb diff --git a/Aiming/Tracking/KF track test.ipynb b/Aiming/Tracking/KF track test.ipynb new file mode 100644 index 0000000..60a9f54 --- /dev/null +++ b/Aiming/Tracking/KF track test.ipynb @@ -0,0 +1,145 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 8, + "id": "2b268ada", + "metadata": {}, + "outputs": [], + "source": [ + "import cv2\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "8ff1cbb8", + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "# 创建卡尔曼滤波器\n", + "kf = cv2.KalmanFilter(4, 2)\n", + "\n", + "# 初始化状态估计值和协方差矩阵\n", + "kf.statePost = np.array([0, 0, 0, 0], dtype=np.float32)\n", + "kf.errorCovPost = np.eye(4, dtype=np.float32)\n", + "\n", + "# 设置卡尔曼滤波器参数\n", + "dt = 1.0 # 采样间隔\n", + "kf.transitionMatrix = np.array([[1, 0, dt, 0],\n", + " [0, 1, 0, dt],\n", + " [0, 0, 1, 0],\n", + " [0, 0, 0, 1]], dtype=np.float32)\n", + "kf.processNoiseCov = np.array([[1, 0, 0, 0],\n", + " [0, 1, 0, 0],\n", + " [0, 0, 1, 0],\n", + " [0, 0, 0, 1]], dtype=np.float32) * 0.0001\n", + "kf.measurementMatrix = np.array([[1, 0, 0, 0],\n", + " [0, 1, 0, 0]], dtype=np.float32)\n", + "kf.measurementNoiseCov = np.array([[1, 0],\n", + " [0, 1]], dtype=np.float32) * 1" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "9c9cbede", + "metadata": {}, + "outputs": [], + "source": [ + "# 打开摄像头\n", + "cap = cv2.VideoCapture(0)\n", + "\n", + "# 读取一帧图像\n", + "ret, frame = cap.read()\n", + "\n", + "# 将图像转换为灰度图\n", + "gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n", + "\n", + "# 检测目标,提取目标的中心点坐标\n", + "x, y, w, h = cv2.selectROI(\"SelectROI\", frame, fromCenter=False, showCrosshair=True)\n", + "\n", + "while True:\n", + " # 读取一帧图像\n", + " ret, frame = cap.read()\n", + " if not ret:\n", + " break\n", + "\n", + " # 将图像转换为灰度图\n", + " gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n", + "\n", + " # 检测目标,提取目标的中心点坐标\n", + " \n", + " cx, cy = x + w/2, y + h/2\n", + "\n", + " # 对中心点坐标进行卡尔曼滤波\n", + " prediction = kf.predict()\n", + " measurement = np.array([cx, cy], dtype=np.float32)\n", + " kf.correct(measurement)\n", + "\n", + " # 绘制目标位置(绿)和卡尔曼滤波器预测的位置(红)\n", + " cv2.circle(frame, (int(cx), int(cy)), 5, (0, 255, 0), -1)\n", + " cv2.circle(frame, (int(prediction[0]), int(prediction[1])), 5, (0, 0, 255), -1)\n", + "\n", + " # 显示结果\n", + " cv2.imshow(\"Kalman Filter\", frame)\n", + "\n", + " # 按下ESC键退出\n", + " if cv2.waitKey(1) == 27:\n", + " break\n", + "\n", + "# 释放摄像头并关闭窗口\n", + "cap.release()\n", + "cv2.destroyAllWindows()\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6d9d6bc8", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "562b9a62", + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "16e8b8a4", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.15" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From 952841f802939fd6884951d56e26aea8aa04afeb Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 15 Apr 2023 19:46:14 -0500 Subject: [PATCH 48/74] Tested working on real gimbal; need filtering --- Aiming/Aim.py | 3 +++ Communication/communicator.py | 48 ++++++++++++++++++++--------------- config.py | 6 ++--- 3 files changed, 33 insertions(+), 24 deletions(-) diff --git a/Aiming/Aim.py b/Aiming/Aim.py index dc27c0f..69c5cba 100644 --- a/Aiming/Aim.py +++ b/Aiming/Aim.py @@ -130,5 +130,8 @@ def get_rotation_angle(self, bbox_center_x, bbox_center_y): (self.CFG.AUTOAIM_CAMERA.YAW_FOV_HALF / self.CFG.IMG_CENTER_X) pitch_diff = (bbox_center_y - self.CFG.IMG_CENTER_Y) * \ (self.CFG.AUTOAIM_CAMERA.PITCH_FOV_HALF / self.CFG.IMG_CENTER_Y) + + yaw_diff = -yaw_diff # counter-clockwise is positive + pitch_diff = pitch_diff # down is positive return yaw_diff, pitch_diff diff --git a/Communication/communicator.py b/Communication/communicator.py index bbb2e07..c93b070 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -277,6 +277,7 @@ def get_current_stm32_state(self): if __name__ == '__main__': + TESTING_CRC = True # Testing example if run as main import sys import os @@ -285,24 +286,29 @@ def get_current_stm32_state(self): import config uart = UARTCommunicator(config) - print("Starting packet sending test.") - for i in range(1000): - time.sleep(0.005) # simulate 200Hz - uart.process_one_packet(config.SEARCH_TARGET, 0.0, 0.0) - - print("Packet sending test complete.") - print("You should see the light change from bllue to green on type C board.") - print("Starting packet receiving test.") - - while True: - if uart.parsed_packet_cnt == 1000: - print("Receiver successfully parsed exactly 1000 packets.") - break - if uart.parsed_packet_cnt > 1000: - print("Repeatedly parsed one packet?") - break - uart.try_read_one() - uart.packet_search() - time.sleep(0.001) - - print("Packet receiving test complete.") + if TESTING_CRC: + print("Starting packet sending test.") + for i in range(1000): + time.sleep(0.005) # simulate 200Hz + uart.process_one_packet(config.SEARCH_TARGET, 0.0, 0.0) + + print("Packet sending test complete.") + print("You should see the light change from bllue to green on type C board.") + print("Starting packet receiving test.") + + while True: + if uart.parsed_packet_cnt == 1000: + print("Receiver successfully parsed exactly 1000 packets.") + break + if uart.parsed_packet_cnt > 1000: + print("Repeatedly parsed one packet?") + break + uart.try_read_one() + uart.packet_search() + time.sleep(0.001) + + print("Packet receiving test complete.") + else: + while True: + time.sleep(0.005) + uart.process_one_packet(config.SEARCH_TARGET, 0.01, 0.0) diff --git a/config.py b/config.py index 61ff026..5683c92 100644 --- a/config.py +++ b/config.py @@ -8,9 +8,9 @@ # Define camera # from Camera.d455 import D455_camera -# from Camera.read_from_file import fake_camera +from Camera.read_from_file import fake_camera -AUTOAIM_CAMERA = simple_cv_camera +AUTOAIM_CAMERA = fake_camera # This param needs to be tuned per arena / camera setup EXPOSURE_TIME = 30 @@ -22,5 +22,5 @@ IMG_CENTER_X = IMG_WIDTH // 2 IMG_CENTER_Y = IMG_HEIGHT // 2 -DEBUG_DISPLAY = False +DEBUG_DISPLAY = True DEFAULT_ENEMY_TEAM = 'red' From 90e4990fe339ad41c253094bbea1dc7131a3b84c Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 15 Apr 2023 22:18:19 -0500 Subject: [PATCH 49/74] added kalman filter sample --- examples/filtering_sample.ipynb | 246 ++++++++++++++++++++++++++++++++ 1 file changed, 246 insertions(+) create mode 100644 examples/filtering_sample.ipynb diff --git a/examples/filtering_sample.ipynb b/examples/filtering_sample.ipynb new file mode 100644 index 0000000..bbd0e33 --- /dev/null +++ b/examples/filtering_sample.ipynb @@ -0,0 +1,246 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "bba7d274", + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import numpy as np\n", + "import scipy.interpolate\n", + "import matplotlib.pyplot as plt" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "333c60c4", + "metadata": {}, + "outputs": [], + "source": [ + "np.random.seed(42)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "c48c7b0e", + "metadata": {}, + "outputs": [], + "source": [ + "# Generate simulation data\n", + "\n", + "# Gimbal control freq (in Hz)\n", + "gimbal_freq = 1000\n", + "\n", + "# Actual freq on jetson (in Hz)\n", + "jetson_freq = 50\n", + "\n", + "gimbal_t = np.linspace(0, 1, gimbal_freq)\n", + "jetson_t = np.linspace(0, 1, jetson_freq)\n", + "\n", + "gt_gimbal_signal = np.sin(2 * np.pi * gimbal_t)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "6aebfa4d", + "metadata": {}, + "outputs": [], + "source": [ + "jetson_noise_std = 0.1\n", + "\n", + "jetson_idx = np.round(np.linspace(0, gimbal_freq - 1, jetson_freq)).astype(int)\n", + "jetson_signal = gt_gimbal_signal[jetson_idx]\n", + "\n", + "jetson_signal = jetson_signal + np.random.normal(scale=jetson_noise_std, size=jetson_signal.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "9436a6d7", + "metadata": {}, + "outputs": [], + "source": [ + "inter_func = scipy.interpolate.interp1d(jetson_t, jetson_signal, kind='previous')\n", + "extrapolated_signal = inter_func(gimbal_t)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "0ce72da8", + "metadata": { + "scrolled": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjEAAAFzCAYAAADPF1gOAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAABMQElEQVR4nO3dd3iUVfr/8fedTu9NOgiKEmmhCSKoCIJdEF3LyqpYVt3vWnFtCPayuv5WVl0VdXXFFbtYEEQBkS4QmoJ0BOmd9PP74xkiQsokZOaZmXxe1zUXU56Z+TDJldw5zznnNuccIiIiItEmzu8AIiIiIqWhIkZERESikooYERERiUoqYkRERCQqqYgRERGRqKQiRkRERKJSgt8Bylrt2rVds2bN/I4hIiIiZWTu3LlbnXN1Dr8/5oqYZs2aMWfOHL9jiIiISBkxszUF3a/TSSIiIhKVVMSIiIhIVFIRIyIiIlEp5ubEiIiIFCY7O5v169eTkZHhdxQpQEpKCo0aNSIxMTGo41XEiIhIubF+/XqqVKlCs2bNMDO/48ghnHNs27aN9evX07x586Ceo9NJIiJSbmRkZFCrVi0VMBHIzKhVq1aJRslUxIiISLmiAiZylfRroyJGREQkjOLj42nfvj1t27blnHPOYefOnUUev2XLFrp27UqHDh2YOnVqeEKG2FVXXcW4ceOO+nVUxIiIiIRRhQoVmD9/PosWLaJmzZo8//zzRR4/adIkUlNT+eGHHzjllFOCeo/c3NyyiBrxVMSIiIj4pHv37mzYsAGAn3/+mf79+9OpUydOOeUUli1bxvz587nzzjv56KOPaN++PQcOHGDChAl0796djh07MnjwYPbu3Qt4O9bfdddddOzYkXfffbfI4x544AE6duxIamoqy5YtA2Dv3r0MHTqU1NRUTjrpJN577z2AQl/noGXLltGlS5f826tXryY1NRWAkSNH0rlzZ9q2bcuwYcNwzpXp56fVSSIiUi49+Mlilvyyu0xf84RjqvLAOScGdWxubi6TJk3i6quvBmDYsGG88MILtGrVipkzZ3LjjTfy9ddfM3LkSObMmcM///lPtm7dykMPPcTEiROpVKkSjz/+OH//+9+5//77AahVqxbz5s1j69atXHjhhYUeV7t2bebNm8fo0aN56qmnePnllxk1ahTVqlUjPT0dgB07dhT7fgDHH388WVlZrFq1iubNm/POO+8wZMgQAG666ab8Y6+44go+/fRTzjnnnLL5sFERI+XFnDGQHsT519RBkDY09HlEpNw6cOAA7du3Z8OGDbRp04a+ffuyd+9epk+fzuDBg/OPy8zMPOK5M2bMYMmSJfTo0QOArKwsunfvnv/4weKhuOMuvPBCADp16sT7778PwMSJExk7dmz+MTVq1ODTTz8t8nUOuvjii3nnnXcYPnw477zzDu+88w4AkydP5oknnmD//v1s376dE088MXaKGDN7FTgb2Oyca1vA4wb8AxgA7Aeucs7NC29KiQnp42BTOtRPLfyYTd5fHypiRMqHYEdMytrBOTH79++nX79+PP/881x11VVUr16d+fPnF/lc5xx9+/bl7bffLvDxSpUqBXVccnIy4E0yzsnJKfX7HTRkyBAGDx7MhRdeiJnRqlUrMjIyuPHGG5kzZw6NGzdmxIgRZb7JoN9zYl4D+hfx+FlAq8BlGPCvMGSSWFU/FYaOL/xSVIEjIlLGKlasyHPPPcfTTz9NxYoVad68Oe+++y7gFQ8LFiw44jndunXju+++Y8WKFQDs27ePn376qdTHHapv376/m2S8Y8eOoF+nZcuWxMfHM2rUqPzRoIMFS+3atdm7d2+ZrEY6nK9FjHNuCrC9iEPOA95wnhlAdTNrEJ50Ekscjlzn2HUgm/1ZOWTm5JKXV7YTzERESqpDhw6cdNJJvP3227z11lu88sortGvXjhNPPJGPPvroiOPr1KnDa6+9xqWXXspJJ51E9+7d8yfmlua4Q917773s2LGDtm3b0q5dOyZPnlyi1xkyZAhvvvkmF198MQDVq1fn2muvpW3btvTr14/OnTuX4hMqmpX1TOESBzBrBnxayOmkT4HHnHPTArcnAXc55+YU9nppaWluzpxCH5YY9+vuDKb8tIUZK7ez+Jdd7M3MYf2OA4xNGgXAJVn3Ffi8+lVT+FfOfVRMimdqj9fpcWxtWterQnycNsUSiSVLly6lTZs2fseQIhT0NTKzuc65tMOPjYmJvWY2DO90E02aNPE5jYTT7oxs/jFxOa9MW1Xg43WqJHPmCfWovSmZlIQ4bm57LJWTE8jJc2Tn5rF2+37y8hxz1+4gMyOPzJw8Hhq/9Hev0fu4Otx/9gm0qFM5HP8lEREJUqQXMRuAxofcbhS473eccy8BL4E3EhOeaOKXzJxcHv1sGa9NX33EYwNS69PvxPr0alWHGpWSfntgjFeA3HbmcYW/8Jha5DnHGz278PWyzfmv/82PW/jmx28B6NaiJs8O6UD9aill9d8REZFSivQi5mPgJjMbC3QFdjnnNvqcSXzy46Y9DHphOnsyfj+T/qqTm3Hrma2pmhJc6/aixJnRq3UderWuw4hzTyQ3z/H2rLU8/vky9mTmMGPldro9OgmAJwedxOC0xsW8ooiIhIrfS6zfBnoDtc1sPfAAkAjgnHsB+AxvefUKvCXWWvsaK0qwb8vkKgMZOmb27+4+/fi6PHNJ+zIpXIoSH2dc3q0pl3drSl6eY/Q3K3hqgjcz/45xC7lj3EJu6nMst53ZWk3lRETCzNcixjl3aTGPO+DPYYoj4RTEvi05vyxgzqptDM2qm3/f3y9ux4UdG4Uj4RHi4oybTmvFTae14qdf93DRv7xRoX9OXsE/J6/g1r6tufm0Y1XMiIiESaSfTpJYdnDflsNs2pVBt0cn5a8oAnhnWDe6tqgVznRFal2vCukj+rF1byb9n53K1r2Z/P2rn/j7Vz/xyh/TOL1NPb8jiojEPL83uxPJl5vnuOb1OflzTsArFlY/NjCiCphD1a6czJx7z2DG3afn33f163NoNnw8W/ceuWW4iAhA5cqFr3bcuXMno0ePDmOa4r322mvcdNNNfsc4gkZiJCJMXb6FK16ZlX/7xt4t6bYxyMIlmPk1xbUcOEr1q6Ww+rGBzFi5jUtemgFA2kMTGdqjGfeffYJOMYlI0A4WMTfeeKPfUSKeRmLEV7l5jv7PTskvYKqkJLB0ZH/u7H988C9ycH5NUeqnes0dQ6xbi1qsenQAF6d583bGfLea5nd/xpY9GpURkSM9+eSTdO7cmZNOOokHHngAgOHDh/Pzzz/Tvn177rjjDjZu3EivXr1o3749bdu2ZerUqQC8/fbbpKam0rZtW+66667816xcuTL33HMP7dq1o1u3bvz666+/e8+8vDyaNWvGzp078+9r1aoVv/76K5988gldu3alQ4cOnHHGGUc8N9JoJEZ8sy8rhxP/9ln+7dGXdWRAaim7ShQyv8YPZsYTg9rx176t6f7o1wB0fngi9519Alf3bO5zOhHJ9/nw4v8AKqn6qXDWY0EdOmHCBJYvX86sWbNwznHuuecyZcoUHnvsMRYtWpTfDPLpp5+mX79+3HPPPeTm5rJ//35++eUX7rrrLubOnUuNGjU488wz+fDDDzn//PPZt28f3bp14+GHH+bOO+/k3//+N/fee2/++8bFxXHeeefxwQcfMHToUGbOnEnTpk2pV68ePXv2ZMaMGZgZL7/8Mk888QRPP/102X5GZUhFjPhizbZ9bNztNQerUTGR7+8+nZTE+CMP3JQOYwYW/WIhPlVUWg2qVWDVowO498NFvDVzLaM+XcKr01Yx+fbeJCVoEFSkvJswYQITJkygQ4cOAOzdu5fly5cfsfN8586d+dOf/kR2djbnn38+7du35+uvv6Z3797UqVMHgMsuu4wpU6Zw/vnnk5SUxNlnnw1Ap06d+Oqrr4547yFDhjBy5EiGDh3K2LFj85s2rl+/niFDhrBx40aysrJo3jyy//BSESNhlZ2bR7sHJ/AqXgHztwHHM6xXy4IPDvb0T5hOFZWGmfHwBalc3q0pZ/1jKht2HqD1vZ8z+fbeNK9dye94IuVbkCMmoeKc4+677+a666773f2rV6/+3e1evXoxZcoUxo8fz1VXXcWtt95KtWrVCn3dxMTE/Hl48fHx5OTkHHFM9+7dWbFiBVu2bOHDDz/MH6m5+eabufXWWzn33HP55ptvGDFixNH9J0NMRYyEzcGl0wAkQfvG1elWWAEDkDbUu8SANg2qsuLhs2g74ksysvPo89Q3PDW4HYM6+bPnjYj4r1+/ftx3331cdtllVK5cmQ0bNpCYmEiVKlXYs2dP/nFr1qyhUaNGXHvttWRmZjJv3jzuuusubrnlFrZu3UqNGjV4++23ufnmm4N+bzPjggsu4NZbb6VNmzbUquUtpNi1axcNGzYE4PXXXy/b/3AIqIiRsJj84+b8XXerJCfQpWlN4srZip2E+DiWjuyff3rp9ncXMGvVNp4Y1M7vaCISRjk5OSQnJ3PmmWeydOlSunfvDngTct98801atmxJjx49aNu2LWeddRZt27blySefJDExkcqVK/PGG2/QoEEDHnvsMfr06YNzjoEDB3LeeeeVKMeQIUPo3Lkzr732Wv59I0aMYPDgwdSoUYPTTjuNVasKbq4bKczbFDd2pKWluTlz5vgdQw7x/OQVPPnljwAM6tSIpwa3+22eS4RMxgXKNlMxy7537M/ix1/38FHuyXxX7Rwm396b+LjyVdSJ+GHp0qW0adPG1wwLFizg2muvZdasWcUfXA4V9DUys7nOubTDj9XsQgkZ5xzXvD47v4B5/KJUr4ApD4pZ9l2jYhJdUjZwXvx01m7fT8u/fUZGdm4YA4qIH1544QUuvfRSHnroIb+jxASdTpKQcM7R95kprNi8F4AP/9yD9o2r+xsq3IpZ9h03ZiCdnQOvnyTH3/cF8+/vS/WKSWEKKCLhdv3113P99df7HSNmqIiR4AXZedrhuP/nNqzIOQ2AqXf2oXHNiqFOF5Xizfj5kQG0ue8LsnLzaD/yK76/+zQaVKvgdzQRkYinIkY8wRQoa6Z5/zbtWeghec6xd80PDIzbzn84TSMLQYiPM5aN6s8Zz3zLyi376P7o1yr8RELIOadWIBGqpPN0VcSI5+AcjqI2jWva09uPpZBlzxnZuRx/3xf53aeXjOxHxSR9iwUjLs74+rbeXPHKTKYu38opT0zmq7/2olW9KkGPgBX1tRERT0pKCtu2baNWrVoqZCKMc45t27aRkpIS9HP0G0Z+cxRb92fmeAXMQV2a1SROBUyJ/efqrtz033l8unAjfZ+Zwud/OYU2wRSYBycRq4gRKVKjRo1Yv349W7Zs8TuKFCAlJYVGjYLfP0u/ZeSo5eTmcdy9vxUwXZvXxNBfOKX1zz90xLl5jE/fyFn/mMqS5jlULK7ALK41g4gA3m62kb6VvgRPS6zlqOTlOY695/P828sfPksFTBl4/rKOnHlCPQAWbthFRo6WX4uIHE5FjJSac44Wh3ShXjaqP4nx+pYqKy9dmUaf47zmbvPX7WRzoGGmiIh4dDpJSsU5R8/HJ+ffXjKy3++7UBfXfTpCO09HmjFDu7D4kQT2ZOTQ5ZFJLHjgTKpVSPQ7lohIRFARI6Vy8Yvfs2HnAQAWjjjz96uQgukoHamdp4srvkryOmVUpJ3YoBrz1+2ELGj34ASWjuxPhaT4Yp8nIhLrVMRIid3+7gJmr94BwKy/nU7VlMNGBqK1+3RZFlVlXKS1b1ydhOVGTp6jzf1fsOLhs0jQqTsRKedUxEiJ/HvKSsbNXQ/AV3/tRd2qwa/nj3h+FF/BjPwERnV+fOgsWgbmIJ3yxGSmDz9N+1yISLmmP+UkaFOXb+Hhz5YC8ObVXb2N2KT0UgcFd8opMKoTH2csfrAfABt3ZXDtG+rWLiLlm0ZiJChrt+3nile8tvGPXJBKz1a1fU4UA0ox8lMpOYHv7z6N7o9+zcSlm3l+8gr+3OfYEAUUEYlsGomRYu06kE2vJ72VSEN7NOMPXZv4nKh8a1CtAh/+uQcAT375I9N/3upzIhERf6iIkSJl5+bR7sEJAKQ2rMYD55zocyIBb6LvE4NOAuAP/55JpjbDE5FySEWMFMo5x4B/TM2//fFNPXxMI4e7OK0xF3X0eoz8sG4nuSXs/ioiEu1UxEihHvt8Gcs37wW8zey0EibyPH1xO+pVTQZg9urtJW5jLyISzVTESIGmLd/Ki1NWAvDF/53y+83sJKJMu+u0/OvXvjHXxyQiIuGlIkaO8OvuDC5/ZSYATww6iePrV/U5kRQlMT6Ojk1qADBx6a988MN6nxOJiISHihj5nZzcPLo+MgmACzs05OK0xj4nkmAkxcfRJlBs/vWdBazbvt/nRCIioaciRn7nkpdmAJCSGMffh7T3N4yUSLUKiVx3agvA29E3I1srlkQktqmIkXy/7DrAnDVeT6S59/b1OY2Uxt1ntaFh9QoAnPnMFJ/TiIiElooYAWBfVg5rA6cgPr6pB5WSNZE3Wn1zR28A1m7fz98n/OhvGBGRENJvKiErJ4/0DbsAuP3M1pzUqLq/geSoJMbHMeGvvTjzmSk89/UKeh9fN3/ib1DmjIH0ccUflzooOruVi0jM0EiM0OepbwBvcqj68MSG1vWqcN/ZJwBw4ejpJdvRN32c1zm7KJvSgyt0RERCSCMx5dwr01axYecBSIJ2jatrQ7sYcnXP5rw3dz1LNu6m5+OTmX3PGcE/uX4qDB1f+ONjBh59QBGRo6SRmHJs/Y79jPp0CQDtGlUjXgVMzHn/xpMB2LInk39987PPaUREypaKmHIqOzePno97nalvPu1YKiRqUC4WpSTG821gou/jXyzT/jEiElP0m6ucuv4/3vb0VVMSuO3M42CMz4Hk6G1KL/A0T1NgWt39rN95gNFPT+LBB58iKUF/v4hI9NNPsnLos/SNTFq2GYDpd5/ucxopE6mDvHkshWhUoyIn2BrOi5/ODW+qv5KIxAaNxJQzezKyufGteQC8fGUalbUfTGxIG1rscudKrw6A1duZtGwzXyzaSP+2DcIUTkQkNDQSU84c7It0Rpu6nHFCPZ/TSDjFm9G6bhUArn9zHvsyc3xOJCJydFTElCOvTlvF/ixvv5DRl3XyOY34oWalJHofVweAno9/7XMaEZGjoyKmnNi46wAjA8upv77tVE3sLMdevMIrYHfsz2bsrLU+pxERKT39Jisnuj/q/dV9dc/mtKhT2ec04qfkhHgm3toLgOHvp7N1b6bPiURESkdFTDkw8pMl+dcPbkUv5duxdaswJK0xAL2f/MbfMCIipaQiJsat3rqPV79bBcC0u/r4nEYiyWMXeUuy92bm8PevfvI5jYhIyamIiWG5eY7egeaOw886nkY1KvobSCKKmTHptlMBeG7ScjbuOuBzIhGRktEmITHskc+WAnBp/CSuW/k8rCyiN9Km9CI3S5PY1LJOZW7o3ZJ/ffMz3R/9mlWPDkAdtEQkWmgkJkat2LyHV6Z5p5EebL4U27So6CfUT/V2fZVy544zj8u/PlpNIkUkimgkJgY55zjj71MA+OsZrUlaF+cVKUPH+5xMIlFcnDHljj70enIyT375I9e0zCU5Id7vWCIixdJITAx67PNlAFRIjOcvZ7TyOY1Egya1KnLVyc0AWLB+l79hRESCpCImxmzcdYAXp6wE4Js7evsbRqLKA+d4y+/znGPT7gyf04iIFE9FTIzp9cRkAG45vRX1qqb4nEaiiZnxze29AVi9bR/b92X5G0hEpBgqYmLIP79eTnauA+CvOo0kpdCsdiVqV04G4ILR3/mcRkSkaJrYG6nmjIH0ccEdmzqI7W0u46kJ3oZlE2/thZkWykrptKxTia17M1mzbT/j5q5nUKdGfkcSESmQryMxZtbfzH40sxVmNryAx68ysy1mNj9wucaPnL5IH+ft3VKcTemQPo4zn/kWgMu6NuHYulVCHE5imWGc0KAqALe/u4B9mTk+JxIRKZhvIzFmFg88D/QF1gOzzexj59ySww59xzl3U9gDRoJglkWPGcjWfZls3evNX3jgnBPDEEyi1qZ0GDOw2GOq1k/llFa1mbp8K398dRbjbjg5PPlERErAz5GYLsAK59xK51wWMBY4z8c8USnPOVZs3gvAuOu7k5SgaU5SiNRBwe3KHNj48IXLOwEwZ80OZq3aHuJwIiIl5+ecmIbAukNurwe6FnDcRWbWC/gJ+Ktzbt3hB5jZMGAYQJMmTUIQNXIt27QHgJNb1iKtWU2f00hESxvqXYJUCXju0g7c8vYPXPzi96x8ZABxcZprJSKRI9L/bP8EaOacOwn4Cni9oIOccy8559Kcc2l16tQJa0A/rdi8h90Z2QC8/Mc0n9NILDq33TG0qFMJgBGfLPY5jYjI7/lZxGwAGh9yu1HgvnzOuW3OuczAzZeBTmHKFvEObS3Qqm5lKiZpoZmExrvXdQfgje/XsGmXNsETkcjh52++2UArM2uOV7xcAvzh0APMrIFzbmPg5rnA0vBGjFyPf/EjAHFm1NrzU9GTNdWhWo5CrcrJ/OX0Vvxj0nLO+ec0Zt9zht+RREQAH0dinHM5wE3Al3jFyf+cc4vNbKSZnRs47BYzW2xmC4BbgKv8SRtZtu3N5IVvvW7DrU6/qvgCRR2q5Sj9X2DzxC17MvnfnCOmpYmI+MLXcxDOuc+Azw677/5Drt8N3B3uXJHuvOe9nVSH9mhGjV4Dodd1PieSWGdmvH/jyVw4ejp3jlvIRa0d8dpQUUR8FukTe+UwE5f8yvodBwD424A2PqeR8qRjkxp0b1ELgJ8Dy/pFRPykIiaK5OTmcc0bcwAYM7QzifH68kl4vXSlN7d++/4s9mVpJ18R8Zd+C0aRkZ96mxmfeExV+hxX1+c0Uh5VSUnk3oHeCGD6hl0453xOJCLlmYqYKPHr7gze+H4NAG/8qYvPaaQ8u+aUFvnX35273sckIlLeqYiJEoNemA7ATX2OpVblZJ/TSHnXrlF1AO4ct5CM7Fx/w4hIuaUd0qLAlJ+2sG67N5n3tjNb+5xGBCokxpMav5axSaNY9dRjtKlfteADUweVqNWBiEhJaCQmwmXl5HHlq7MAGDusG6ZlrRIJUgdRoXF7AHYdyOZAQaMxm9IhfVx4c4lIuaKRmAg3KjCZt2H1CnQLLG8V8V3aUOLShrJm9lruei8dVsKqRwf8vsguahdpEZEyoJGYCJadm8d/ZniTeT+9uafPaUSOdHHab+3PDn6vioiEi4qYCJa+YRcAf+rRnBqVknxOI3IkM8svsO//aLEm+YpIWKmIiVC7DmSTlZsHwPCzjvc5jUjh2jasxqmt6wBw9/vpPqcRkfJERUwEcs6xdNNuAMZc1ZmkBH2ZJLI9d0kHAD74YQPrtu/3OY2IlBf67RiBXpm2CoDkhDj6HK+deSXyVauYyF9O9zpdH2xQKiISaipiIszezBweGr8UgBOOKWTvDZEI9H9neEXM9n1ZzFy5zec0IlIeqIiJMDe8OReAGhWTSI6P9zmNSPDMjA9uPBmAIS/NwKG+SiISWipiIsiqrfuYunwrAK3qVvY5jUjJdWhSg4bVKwCwVnNjRCTEVMRECOcc/Z6dAsCo804kTjvzSpR67wZvNGbjrgxy8vJ8TiMisUxFTIT4aP4vZOV4P/Av79bU5zQipVe/WgqXdvE2wVu+ea/PaUQklqntgB/mjPldT5k8HPVXbWdsEpzQoCr22miv70z9VB9DipTeA+ecyPz50DBjBftf7EfFpGJ+1KhRpIiUgkZi/JA+zitSAjbs8DpUV0qKp2pKondn/VTvB7tIFEpJjCf3xItY4pqyMLDzdKHUKFJESkkjMX6pnwpDx7MnI5tTRkwAYNbtp0PVFJ+DiZSN7oNvp8UPJwDwz04dOPukYwo+UI0iRaSUNBLjsytfnQXAhR0bUlcFjMSQuLjf+ird9N8fyMvTkmsRKVsqYny0eus+fli7E4DHLzrJ3zAiIdC2YTXaNPA2bXx24k8+pxGRWKMixkcXv/g9AM8MaUdivL4UEpv+c3UXAJ77egV7MrJ9TiMisUS/OX2yY38Wm/dkAnB++4Y+pxEJndqVk7mgg/c9fsOb83xOIyKxREWMD/Jw/PjrHgDGDuuGaWM7iXFPDPJOl05bsZU12/b5nEZEYoWKGB8cXFJdr2oy3VrU8jmNSOglxscx4hxvpdKFo6f7nEZEYoWKmDDbm5nDhp1eEfPRn3v6nEYkfK7s3gyAbfuymLjkV3/DiEhMUBETZne9txCAWpWSqF9NS6ql/IiLM14b2hmAa96YoyXXInLUVMSE0YadBxi/cCMAzWtX8jmNSPj1Pq4uDQLF+2vTV/sbRkSinoqYMLr4BW9JdcPqFUiI00cv5dM7w7oDMPLTJWTnqsu1iJSefpOGyaINu/LnwjSqUcHnNCL+aVKrIqe0qg3A8PfSizlaRKRwKmLC5KJ/eSsy3ry6K4aWVEv59vxlHQF4b956svM0GiMipaMiJgw+XvALmTl5JCfE0TPwF6hIeVY1JZHrT20JwI+b9vicRkSiVZFFjJk1MrPbzewjM5ttZlPMbLSZDTQzFUBByMrJ45a3fwDgvRtO9jmNSOS4o99xgLftwIHsHJ/TiEg0KrQQMbMxwKtAFvA4cClwIzAR6A9MM7Ne4QgZzZ78chkALepUom3Daj6nEYkc8XGWv5Pvol92+5xGRKJRQhGPPe2cW1TA/YuA980sCWgSmlixYXdGNv+eugr4bUWGiPxmcKdGzPwYcvMcXy7eRL8T6/sdSUSiSKEjMQUVMGZWw8xOCjye5ZxbEcpw0e5v73srLy7s0JA6VZJ9TiMSecyMNvWrAnDdf+aSqw3wRKQEip3XYmbfmFlVM6sJzAP+bWbPhD5adNu46wCfBja2e/C8E31OIxK5qlVIJCne+1E05rtVPqcRkWgSzOTcas653cCFwBvOua7A6aGNFf0uftHb2O6mPsdSJSXR5zQike2EY7zRmIfGLyUrR0uuRSQ4wRQxCWbWALgY+DTEeWLCog27WLfd29jur31b+5xGJPKlJMRzckuvo/sjny31OY2IRItgipiRwJfACufcbDNrASwPbazodslLMwB4bWhn4uO0sZ1IMEYHNsB7bfpqdmdk+5xGRKJBsUWMc+5d59xJzrkbA7dXOucuCn206PTtT1vYm5lDYrzR+7i6fscRiRrVKybxh67egsdhb8zxOY2IRINCl1ib2f8DCl0q4Jy7JSSJolhunuOPr84C4J3rtKRapKRGndeW/85cy4yV21m3fT+Na1b0O5KIRLCiRmLmAHOBFKAj3imk5UB7ICnkyaLQ6MneivO6VZLp2KSGz2lEok98nPHQ+W0B+NNrs31OIyKRrtCRGOfc6wBmdgPQ0zmXE7j9AjA1PPGix/6sHJ7+6icujZ/EA3WXwJgiVqFvSof6qeELJxJF/tClCfd+uIjlm/cyd812OjWt6XckEYlQwUzsrQFUPeR25cB9cohHP/PaC1xVZTYpW5cUfXD9VEgdFIZUItEnLs54/g/eJN+L/vU9zmkDPBEpWFFtBw56DPjBzCYDBvQCRoQyVLTZdSCb/8xYA0CLOpUhLhWGjvc5lUj0GpD6W/uBcXPXMzitsY9pRCRSBbM6aQzQFfgAeB/ofvBUk3iufd1bSXFpl8Ykxqm5t8jRMjM+vbknAHeMW6h2BCJSoGB/42YCG4EdQGt1r/7Nmm37mLV6OwAPnKP2AiJlpW3DapzUyOv8rnYEIlKQYHonXQNMwdvw7sHAvyNCGyt6XBrY2G7EOSeQkhjvcxqR2PLC5Z0Arx1Bdq7aEYjI7wUzEvMXoDOwxjnXB+gA7AxlqGix+Jdd/LIrA4CrejT3OY1I7DmmegX6HFcHgLveW+hzGhGJNMEUMRnOuQwAM0t2zi0DjgttrOhw0b+mAzBmaGefk4jErucD7Qjen7eBHfuyfE4jIpEkmCJmvZlVBz4EvjKzj4A1oQwVDT5P30hGdh7xcUYftRcQCZmKSQlc16sFANe9OdfnNCISSYpdYu2cuyBwdURgmXU14IuQpopEnw/3NqkDHI4aq7YzNglOPKYqjHn+t+O0kZ1IyW1KhzEDC334Lhy744/n7VWns2bbPprWqhTGcCISqYociTGzeDNbdvC2c+5b59zHzrlyPaa7YecBAJLi46iSnPj7B7WRnUjJpA4qtvCP27SIW+rMB+DyV2aGIZSIRIMiR2Kcc7lm9qOZNXHOrQ1XqIh01mMAHMjKpef93kDUd8NPg+oV/EwlEv3ShnqXoowZSH0cbIJ12w8wc+U2uraoFZ58IhKxgm07sNjMJpnZxwcvoQ4WqZ6a8CMAZ7SpS0MVMCJhY/zWjmDISzPUjkBEgmo7cF+o3tzM+gP/AOKBl51zjx32eDLwBtAJ2AYMcc6tDlWe4uzJyOaVad6mW08MaudXDJFya0BqfeLjjNw8x0fzf+H8Dg39jiQiPiq0iDEzc55vizumNG9sZvHA80BfYD0w28w+ds4d2j3xamCHc+5YM7sEeBwYUpr3KwtXB9oLDElrTM1KSX7FECmfNqVjr53NwqY5pG/YBR+Am18Tw347JnVQ8aemRCRmFHU6abKZ3WxmTQ6908ySzOw0M3sd+ONRvHcXYIVzbmVgovBY4LzDjjkPONinaRxwupkZPvj+523MWuW1Fxh5vtoLiITVIZN/KyUlUCGwO/avuzN/O2ZTOqSP8yOdSLk3+psVfL3s17C/b1Gnk/oDfwLeNrPmeLv0VsArfCYAzzrnfjiK924IrDvk9nq8RpMFHuOcyzGzXUAtYOuhB5nZMGAYQJMmTQiFeWt3APDohakkJ6i9gEhYHTb5t+6uA3R/9GvYCMtvOovE+Lgil2iLSOis37GfJ774kT92b8ppx9cL63sXOhLjnMtwzo12zvUAmgKnAx2cc02dc9ceZQFTppxzLznn0pxzaXXq1AnJe/y5z7Gsfmwgl3YJTZEkIsFrUK0CJ7f0Vifd+8Ein9OIlG/D3vA2oWxRp3LY3zuoLtbOuWzn3Ebn3M4yfO8NQONDbjcK3FfgMWaWgLfR3rYyzCAiUerFK7zmkO/MWcd2tSMQ8UX6+l0s2bgbgMu7NQ37+wdVxITIbKCVmTU3syTgEuDwpdsf89u8m0HA16WdSCwisaVKSiLX9PQar/7fO/P9DSNSDjnnOOef0wB4Zkg74uPCP2XVtyLGOZcD3AR8CSwF/uecW2xmI83s3MBhrwC1zGwFcCsw3J+0IhKJbu/n9aKd8tMWMnJyfU4jUr58unBj/vXz2/uz3UFQRYyZNTWzMwLXK5hZlbJ4c+fcZ8651s65ls65hwP33e+c+zhwPcM5N9g5d6xzrotzbmVZvK+IxIaUxHjuCBQySwND2iISenl5jpvf9qbGvnfDyfi0cLj4IsbMrsVb3vxi4K5GeB2tRUR8d7DDdWZOHnsysn1OI1I+vDXL60TUsk4lOjWt4VuOYHbs/TPeni4zAZxzy82sbkhTiYgEKSE+jv93aQd4D9ymdNyYAb/fAK8g2hRPpNRy8xz3feitChxzVRdfswRzOinz0K7VgVVCmlwrIhHjnHbH8EleD5a4puzYV8xojDbFEzkqD433Ntbv0rwmTWpV9DVLMCMx35rZ34AKZtYXuBH4JLSxRERK5tIb7ufs/zcN1sOqRwcUfo5em+KJlNrujGzGfLcagH9fmeZvGIIbiRkObAHSgeuAz4B7QxlKRKSk2jasRtPAX4VvzVzrcxqR2HTzf73JvEPSGlOtQqLPacBibduVtLQ0N2fOHL9jiIgP1m3fzylPTAZg2aj+pCQW0CLk4EjM0PFhTCYSxeaMgfRxZObk8sO6nQB0aVaTuMNHO0M418zM5jrnjhj6KXQkxszSzWxhYZeQpBQROQqNa1akczNvpcQDHy32OY1IjEgfB5vS+XHTHgAa16h4ZAHj01yzoubEnB22FCIiZeSVqzpz0ogJvDNnHXeddTw1KyX5HUkk6u2t0YZzV98CwIpbzoL4w8ZAfJprVlQDyDVFXcIZUkQkWFVTErk60I7gznEaNBY5Wg7Hol92AfDU4HYkHF7A+CiYze72mNnuwy7rzOwDM2sRjpAiIiVx+5neLr4Tl/7Kuu37fU4jEt0ObbB6UUd/2gsUJphy6lngDqAh3m69twP/BcYCr4YsmYhIKVVIiuf2M1sDcMHo6T6nEYleeXmO5Zv3AvDRn3v41l6gMMEUMec65150zu1xzu12zr0E9HPOvQP4t9ewiEgRrj+1JQBb92Yye/V2n9OIRKe3Z3vbFaQkxtOucXV/wxQgmCJmv5ldbGZxgcvFQEbgsdhany0iMSMhPo5X/uityBz8wvc+pxGJPjm5edzzgddeoE39Mun7XOaC2bH3MuAfwGi8omUGcLmZVQBuCmE2EZGjcnqbelSvmMjO/dlMWvorp7ep53ckkcgR2P+lMGu27GVsUiap8WtJTqgVxmDBK3Ykxjm30jl3jnOutnOuTuD6CufcAefctHCEFBEprf9d1x2Aq1+fQ6xt7ilyVAL7vxQkOzePLXszAUhu1M7byC4CFTsSY2Z1gGuBZoce75z7U+hiiYiUjdb1qtC8diVWbd3HMxOXc6vfgUQiSf3UAnevHvryTKZlbWVYrxZ0G9DGh2DBCWZOzEdANWAiMP6Qi4hIVPjvtV0BeG7ScnI1GiNSpNVb9zFtxVYAbgus8otUwcyJqeicuyvkSUREQqRBtQoMSK3PZ+mbWLllL63qRuYkRZFIcO4/vZki9wxoQ3JCAf3HIkgwIzGfmtmAkCcREQmhJwa1A2DbviyycvN8TiMSmb79aQu7M3IA+FNg5+ug1E/1LmEWzEjMX4C/mVkmkA0Y4JxzVUOaTESkDFVOTuC6U1vA97Bs425O8juQSIRxzvHHV2cB8NrQzsTHlWBju7MeC1GqogWzOqmKcy7OOVfBOVc1cFsFjIhEnVv7euf392fnsnD9Tn/DiESY9+dtAKBe1WR6H1fX5zTBCaqLk5nVMLMuZtbr4CXUwUREylpyQjwtalcC4EK1IxDJ55zjtncXAPD2td18ThO8YBpAXgNMAb4EHgz8OyK0sUREQqNulRQAcvIcXy/71ec0IpHhwU+WANCucXVa1Knsc5rgBTMS8xegM7DGOdcH6ADsDGUoEZFQSm1YDYA/vTbH5yQi/tu5P4vXpq8G4PWhnf0NU0LBFDEZzrkMADNLds4tA44LbSwRkdCplJRAk5oVAXh+8gqf04j468//nQfAFd2aUr1iks9pSiaYIma9mVUHPgS+MrOPgDWhDCUiEmpjh3nn/Z/88kf2Zub4nEbEHxnZuXy3YhsA954duTvzFiaY1UkXOOd2OudGAPcBrwDnhziXiEhIHVO9Amee4DWEvHPcAp/TiPgjfcMuAO4dGPkb2xUkqNVJBznnvnXOfeycywpVIBGRcHn2kvYAfJa+iY27DvgbRiTMduzPym/D8aceJdjYLoKUqIgREYklFZMSuOX0VgBc/OL3PqcRCZ+8PMePv+4B4D9XdyGuJBvbRRAVMSJSrt182rEArNt+gBkrt/mcRiQ83prpTW1NTojjlFZ1fE5TesW2HTCzq4EpzrnlYcgjIhJWifFxjL6sIze+NY9LXprBqkcHYHNfg/RxxT85dRCkDQ15RpGylJWTx30fLWZsErRpEN0b8AczEtMEeNHMVprZu2Z2s5m1D3EuEZGwGZDagOoVEwH4cP4Gr4DZlF70kzalB1foiESYv4z9AYBqFRJJicLJvIcqdiTGOfcAgJlVAK4F7gCeBaL7fy4icoj3bjiZ05/+lr++s4Dzj3NY/VQYOr7wJ4wZGL5wImVk064MPl+0CYDW9ar4nOboBdN24F4z+xyYABwL3A40CnUwEZFwalmnMh2bVAdg5ZZ9/oYRCZGrxnhdqu/sfxzxFp2TeQ9V7EgMcCGQA4wHvgW+d85lhjSViEgobUovcCTlf8l5zEnaQZ19a8iq2Z7o2rtUpGhzVm9n2SZvRdL1vVrCKp8DlYFgNrvrCJwBzAL6AulmNi3UwUREQiJ1ENRPLfChhLg4jqlWgSWuKS/v6hTmYCKhk5vnGPSCt43Avy7rGLVLqg8XzOqktsApwKlAGrAOmBriXCIioZE2tMgVRcfk5tHrns9hC5y8biftG1cPXzaREHll2koAkuLjOCu1gc9pyk4wq5MeA6oCzwFtnHN9nHP3hzaWiIg/EuLj+PvF7QA4//nvyMtzPicSOToZ2bk88tkyAL78ay+f05StYE4nne2ce9w5N905lx2OUCIifrqgQ8P862/OVL9biW63vev1Bjvt+Lo0r13J5zRlK5jVSa3MbJyZLQnsFbPSzFaGI5yIiB/MjIm3en+x3v/RYjJzcn1OJFI663fsZ/zCjcBvvcJiSTCnk8YA/8JbodQHeAN4M5ShRET8dmzdKvQ8tjYAd45b6HMakdIZ9C9vMu8d/Y6jakqiz2nKXjBFTAXn3CTAnHNrnHMjAO3yJCIx7/nLOgLw0fxf1OVaos7s1dvZtDsDgD/3OdbnNKFRaBFjZp+ZWXMg08zigOVmdpOZXQBUDltCERGfVKuQyB39jgNg8Avqci3RIzfP5X/Pjh3Wzec0oVPUSMwY4Evgc6AScAvQCbgcuDL00URE/Hdj75YArN+hLtcSPR7/wluNVKdKMt1a1PI5TegUWsQ4594FOuKNukwDLgEWAdOBHmFJJyLiMzPjrWu6AnDJSzPIzs3zOZFI0Xbsy+KlKd76m09u6ulzmtAqbk5MFrAPSAaq4BU0lQPXRUTKhR7H1qZ2Za8JweOfL/M5jUjRrnptNgBXdGtK/WopPqcJraLmxPQH5gMVgY7OuQeccw8evIQroIhIJPjsL6cA8PK0VezYl+VzGpGCzV2znQXrdgJwz8A2/oYJg6JGYu4BBjvnhjvn9ocrkIhIJKpbJYUrujUFYMhLmuQrkcc5x0WBJdVPD25HSmK8z4lCr6g5Mac45xaHM4yISCQ7+JftT7/uZXeGNjCXyPLG997u0tUrJnJRp0Y+pwmPYhtAioiIJyUxntGXdeTGt+axZONuujavSWz0ApZotz8rhwc+9sYdPrmpJ8wZA+njin7SpvRCO7pHi2A2uxMRkYABqQ1oWL0CAL/szPA5jYhn6BhvMm/fE+rRuGZFr4DZlF70k+qnQuqgMKQLHY3EiIiU0LgburPmaVi3Yz+V92dTrWLsbecu0WPZpt3MXLUdgH8FdpkGvCJl6HifUoWHRmJEREqoQbUK1K6cDMCVY2b5nEbKs7w8R/9npwLwj0vakxBfvn6tl6//rYhIGWlZpxIAC9btZPbq7T6nkfLqua+XA2AG57Vv6HOa8FMRIyJSCobRuq7XRm7wC99rJ18Ju10Hsnl2olfEfHN7b3/D+ERFjIhIKdWslEzVFG9q4ZNf/uhzGilvLhj9HQCDOzWiaa1KPqfxh4oYEZGjMPG2UwF4acpKNu/RaiUJj+k/b2Xlln0AjDq/rc9p/KMiRkTkKNStksJVJzcD4PSnv/U3jJQLzjn+8O+ZALx6VVq52Jm3ML4UMWZW08y+MrPlgX9rFHJcrpnND1w+DndOEZFg3BvYyXdPRg5fLfnV5zQS6+79cBHgTS4/7fh6Pqfxl1/7xAwHJjnnHjOz4YHbdxVw3AHnXPuwJhMRCdamdBgzkARgcfMc0jfsgrGQ16wmcRbYyzd1EKQN9TWmxI5fdh7grZlrAXj3+pN9TuM/v04nnQe8Hrj+OnC+TzlEREonddDvtmyvlJRA5WTv78Kfft3j3bkpvfit30VKoO/fvVOWt/VtTc1KST6n8Z9fIzH1nHMbA9c3AYWNh6WY2RwgB3jMOfdhQQeZ2TBgGECTJk3KOKqISAHShh4xwtIyK5c2938BWfDF0FM4/vNLfQonsejVaavYl5ULwE2nHetzmsgQspEYM5toZosKuJx36HHOOQe4Ql6mqXMuDfgD8KyZtSzoIOfcS865NOdcWp06dcr2PyIiEqQKSfE8O6Q9AP2fnYor9EebSMnszshm5KdLAPj05p6YqfUohLCIcc6d4ZxrW8DlI+BXM2sAEPh3cyGvsSHw70rgG6BDqPKKiJSF8zs0JDHe+wWzdtt+n9NIrDj/eW9PmLNPakDbhtV8ThM5/JoT8zHwx8D1PwIfHX6AmdUws+TA9dpAD2BJ2BKKiJTSd3edBsDG3Rlk5uT6nEai3eRlm/P3hHlqcDuf00QWv4qYx4C+ZrYcOCNwGzNLM7OXA8e0AeaY2QJgMt6cGBUxIhLx6lZN4Ybe3tnvH9btxDtrLlJyObl5DH1tNgCv/6lLud4TpiC+TOx1zm0DTi/g/jnANYHr04HUw48REYkGt595HLOne9ff+H4NfwxsiCdSEgcLmI5NqnNqa835PJxfq5NERGJafJzRvnF15q/byQMfL+acdscc3ZLYOWOCW66tfWlixtKNu5m6fCsA/7m6q89pIpPaDoiIhEhKQjx1KicDcN7z047uxdLHefvOFEX70sSMrJw8zvrHVABeuLwjlZI15lAQfSoiIiHUok4l2A7rth/g7VlrubTLUexlVT8Vho4v/PExA0v/2hJR7npvIQC1KiXRv22D3z8YzKjcpvTfbcYYq1TEiIiEkG1axOLmz3ktCT6F7PQaJMYVMAiu00ASMH/dTj74YQMAk+/ofeQBB0fliipS6qd631MxTkWMiEioBH6JVML7i3rbviwWrNtJp6Y1MA7ZrOzgaSIVMeVeTm5e/p4wo85vS9WUxIIPLG5UrpxQESMiEiqHtCZonptH33s+B2BUm7Zc0a3pb8fpNJAE/GXsfMArei/vqjY6xdHEXhGRMEiIj+OjP/cA4L4PF7FjX5bPiSTSLFy/k/HpXlvBibeeqtYCQVARIyISJu0aV+eijo0A6PH41z6nkUiSm+c495/eaaQnB51EDXWoDoqKGBGRMHr8Im8y5v6sXF6eutLnNBIprn7d29SuZZ1KDE5r7HOa6KEiRkQkjBLi4/g2sOLkofFL2bQrw99A4rspP23hmx+3APDRTT19ThNdVMSIiIRZ01qVuLpncwC6PToJh3orlVd7M3O48tVZALx6VRqVtaldiaiIERHxwb0D2+RfX711n49JxE/9npkCQPvG1Tnt+Ho+p4k+KmJERHxgZsy42+uD++ueTPZkZvucSMLtrZlr2LDzAADvXNfN5zTRSUWMiIhP6ldLYdR5JwKw+JfdZOXk+ZxIwmXDzgPc88EiAD76cw+SE+J9ThSdVMSIiPjoiu7NSEn0foENeG6qz2kkHJxz9HjMW2J/RbemtGtc3d9AUUxFjIiIz1IbVgNgxea9jJu73uc0EmoHd+WNjzNGBkbipHRUxIiI+CzeLL+Quf3dBWzerWXXseq7FVv5eMEvAMy+5wztynuUVMSIiESASkkJXN7N65XT5ZFJ5ORqfkys2bU/m8tengnAS1d0oqZ25T1qKmJERCLEQ+enkpTg/Vi+/s25PqeRspSX52g3cgIA3VrU5MwT6/ucKDaoiBERiSCz7zkDgIlLN/Oe5sfEjHs+TM+//tY1Wk5dVlTEiIhEkGoVEnn9T10AuO3dBazfsd/nRHK0pq/Yytuz1gHw/d2nER+neTBlRfsbi4hEgk3pMGYgAKcCk2vt5dc9max/ZhQNm9fENi2C+qn+ZpQS274viz8E5sH8/eJ2NKhWwedEsUUjMSIifksddESB0qx2JeICK1fS1+/yHk8d5Ec6KaW8PEfHUV8BcFbb+lzYsZHPiWKPRmJERPyWNtS7HMKAEzJzaPvAl5AFdzQ7jj+nHetPPimVoa/Nzr8++viFMOb+4p+UOuiI7wUpnEZiREQiVOXkBCbddioAT375Iz+s3eFzIgnWK9NW8e1PWwCYf39fLH2cd8qwKJvSIX1cGNLFDo3EiIhEsJZ1KjPyvBO5/6PFXDB6Oj/c15ca2l8kos1bu4NRny4B4L0bTqZ6xcDXq34qDB1f+BMDc6IkeCpiREQi3JXdmzF+4UZmrtpOh1Ffsfzhs0iM10B6JNq1P5sLR08HYPhZx9OpaY2SvcAhE7yLPEaTvAGdThIRiQpvX/vb3iKnPf2Nf0GkUM79tqFdl2Y1uf7UliV7gQImeBdIk7zzaSRGRCQKxMUZix/sx4kPfMm67Qe4+/2FPHrhSX7HkkP0fWZK/vW3h5ViQ7sCJnhL0TQSIyISJSolJ/DtHb0BeHvWOt6fpx19I8W9H6azYvNeABY92E8b2oWJihgRkSjStFYl3gjs6Hvr/xawcP1OfwMJb3y/mjdnrAVg2l19qJyskxzhoiJGRCTK9Gpdh9v6tgbg3H9+xy87D/icqPya8tMW7v9oMQBjh3WjUY2KPicqX1QuiohEoZtPb8WC9TuZuHQzJz/2NYse7Edlv0OVM8t/3cOVr87i0vhJ/KXufOp/WwG+LeRgrSgKCY3EiIhEqX9fmUbD6l4vnrYPfInD+Zyo/Ni8JyN/Iu811eZSf/+Kop+gFUUhoZEYEZEoZWZ8e0dvjr3ncwBmrtpO1+Y10ZTS0NqbmUOXhycB3lLqlsmVgWI2spOQ0EiMiEgUS4iPY9mo/vm3563Z6V+YcuBAVq7XzwqoXzWFd64rxVJqKTMaiRERiXIpifGkjziTxY+MopVbxcJRPUhtVA0rakxGjQZLLDMnlzb3f5F/+/u7T8NM415+0kiMiEgMqJKSSGr/a1jimrI/O5dFG3YXPkdGjQZLLCc3j+Pu/a2A+fmRASpgIoBGYkREYkSlk6+h6YmX0f3RryELOh6ozns3nHzkL1s1GoQ5Y4Iu5ByOOau2MzbQx7FLs5rEvT76twO08sg3GokREYkhDapVYOqdfQCYt3YnF7/4vc+JIlT6OK/4KIbDMXPV9vzbnZvVJO7wolArj3yjkRgRkRjTuGZFpt7Zh1OemMzs1Ts45/9N45Obe/odK/LUL3pFUWZO7u9OIS0Z2Y/4JP3ajCT6aoiIxKDGNSvy/d2n0f3Rr0nfsIsej33N1Dv7EBdsT59gT7fE6AThA1m/n8S7dGR/KiTF+5hICqLTSSIiMapBtQrMu68vABt2HqD1vZ+TlZMX3JODOd0SoxOEd+7P+l0Bs/zhs1TARCiNxIiIxLCalZJYOOJMThoxgZw8R+t7P+fn1o74YFbWFHO6JRYnCK/bvp9TnpgMQGK88eOos4IfvZKw00iMiEiMq5qSyE8PnZV/e/bq7WTm5vqYKDLNWLktv4A5tm5lFTBRQEWMiEg5kJQQx4qHz+KYaikA/LB2J9N/3upzqsjx2neruOSlGQCc1/4YJt56qgqYKKAiRkSknEiIj2P63adTq5K34ckf/j2TZ776yedU/srLc1z28gxGfLIEgPvPPoF/XNLB51QSLM2JEREpZ1rVrUKV3RmwEf4xaTmfLvyFCX89lfhyNvKQ6xwt//ZZ/u1x13cnrVlNHxNJSWkkRkSkHKpfNYUv/u8UAH7eso+Wf/uMX3dn+JwqfHZnZDN79W+b2M299wwVMFFIRYyISDl1fP2qLB35Wwfsro9M4vXpq/0LFAbOOW59Zz5LNu4GILVhNX5+ZAC1Kif7nExKQ0WMiEg5ViEpntWPDeQPXZsA8MDHi2n34ARyXSHNI6PYxl0HaH73Z7z/wwYAWtSuxCc39yx3p9FiiYoYERHhkQtS+TTQmmDXAe9Uy+Y9sXN66cFPFnuNMQM6Na1B3SopPiaSsqCJvSIiAkDbhtVY+cgAzh/9HWyGlVv30WX4eObd15eagRVNUeGQlgn7snJI37CLfkC/JKhXNYVmtSpimxar83QM0EiMiIjki4szPr6pJyceUzX/vo6jvuL6/8wlJzfIlgV+Sx+H27SQH9btIH3Drvy7OzSpTvNalTBMnadjhEZiRETkCFWSE+navCZ/rNmU179fwxeLN3HsPZ9z/aktuav/cVgwbQt8kJObx4pNu9l5oCGXZN0DwGMXpnJJlyY+J5NQ0EiMiIgUyDAePK8ty0b1p0WdSgC88O3PNL/7Mx79fCmOyJn8m5Gdy7VvzOHYez5n54FsAPqeUI+VjwxQARPDNBIjIiJFSkmM5+vberNlTyb9np3C9n1ZvPjtSvokbadqSiIND2RTrUJi4S9wyByVIqUOgrShJcq2eus+rnh1Juu2H8i/r0pyAsc3qEq3K9NK9FoSfXwZiTGzwWa22MzyzKzQ7zIz629mP5rZCjMbHs6MIiLye3WqJDPvvr7MvucMjqtXBfA2jWv34ASaDR/PmO9WkZdXwOhM+jjYlF70i29KD67QAQ5k5TLq0yU0Gz6e3k99k1/A9D2hHstG9efEY6oF16Vbop5fIzGLgAuBFws7wMzigeeBvsB6YLaZfeycWxKeiCIiUpA6VZL58q+9cGNqsmbbfgj0kXzwkyU8GOhBdGX3pvzl9Fa/bSJXPxWGji/8RccMLPI9V27Zy9MTfmJ8+sYjHhtxzgn88eRmETtPR0LHlyLGObcUKO4brguwwjm3MnDsWOA8QEWMiMjR2pRedOGwKb3YJciG0axWJVbfPpB12/dzz4eLmPLTFgDe+H4Nb3y/hvg446NKu6iUFM/s2eto36Q69aqmUDUlIf93gHMOh8M52LE3k7Xb97Ns4x4W/7KLBet3smjD7iPee3CnRtw78ASqVSziNJbEvEieE9MQWHfI7fVAV5+yiIjEjmCWFpdwCXLjmhV5409dANiTkc3bs9ay+JfdzF61nb0HctibmcOd7y084nnxcUZunmNsktfH6JKHJh5xTLNaFWnbsBpdW9Tiks6NSYzXmhTxhKyIMbOJQP0CHrrHOfdRGb/XMGAYQJMmmoUuIlKktKElnkBbElVSEhnWq2X+bTfmabJzHWN6dubnLXvJzXMs37yXWpWSSIg34uPiaLSoAoYxvOPxVElJoFGNipzQoCp1qqinkRQuZEWMc+6Mo3yJDUDjQ243CtxX0Hu9BLwEkJaWFjlr/kREBMNIijf6HF+XPsfXLfig9RUBuP7UlgU/LlKASB6Tmw20MrPmZpYEXAJ87HMmERERiRB+LbG+wMzWA92B8Wb2ZeD+Y8zsMwDnXA5wE/AlsBT4n3NusR95RUREJPL4tTrpA+CDAu7/BRhwyO3PgM/CGE1ERESiRCSfThIREREplIoYERERiUoqYkRERCQqqYgRERGRqKQiRkRERKKSihgRERGJSipiREREJCpFcgNIERGJdMV1wz54TDEdsUVKQ0WMiIiUTrBdrkvYEVskWCpiRESkdELcDVukOCpiREQkOswZA+njij9Op6/KDU3sFRGR6JA+zitQiqPTV+WGRmJERCR61E+FoeP9TiERQiMxIiIiEpVUxIiIiEhUUhEjIiIiUUlFjIiIiEQlFTEiIiISlVTEiIiISFRSESMiIiJRSfvEiIhIZCiumaR24pXDqIgRERH/BbPDrnbilcOoiBEREf+pmaSUgubEiIiISFRSESMiIiJRSUWMiIiIRCUVMSIiIhKVVMSIiIhIVFIRIyIiIlFJRYyIiIhEJRUxIiIiEpVUxIiIiEhUUhEjIiIiUUlFjIiIiEQlFTEiIiISlVTEiIiISFQy55zfGcqUmW0B1oTo5WsDW0P02nIkfd7ho886fPRZh5c+7/AJ5Wfd1DlX5/A7Y66ICSUzm+OcS/M7R3mhzzt89FmHjz7r8NLnHT5+fNY6nSQiIiJRSUWMiIiIRCUVMSXzkt8Byhl93uGjzzp89FmHlz7v8An7Z605MSIiIhKVNBIjIiIiUUlFTAHMrL+Z/WhmK8xseAGPJ5vZO4HHZ5pZMx9ixoQgPutbzWyJmS00s0lm1tSPnLGiuM/7kOMuMjNnZlrVUUrBfNZmdnHg+3uxmf033BljRRA/R5qY2WQz+yHws2SAHzljgZm9amabzWxRIY+bmT0X+FosNLOOIQ3knNPlkAsQD/wMtACSgAXACYcdcyPwQuD6JcA7fueOxkuQn3UfoGLg+g36rEP7eQeOqwJMAWYAaX7njsZLkN/brYAfgBqB23X9zh2NlyA/65eAGwLXTwBW+507Wi9AL6AjsKiQxwcAnwMGdANmhjKPRmKO1AVY4Zxb6ZzLAsYC5x12zHnA64Hr44DTzczCmDFWFPtZO+cmO+f2B27OABqFOWMsCeZ7G2AU8DiQEc5wMSaYz/pa4Hnn3A4A59zmMGeMFcF81g6oGrheDfgljPliinNuCrC9iEPOA95wnhlAdTNrEKo8KmKO1BBYd8jt9YH7CjzGOZcD7AJqhSVdbAnmsz7U1XgVvpROsZ93YOi3sXNufDiDxaBgvrdbA63N7Dszm2Fm/cOWLrYE81mPAC43s/XAZ8DN4YlWLpX05/pRSQjVC4uUJTO7HEgDTvU7S6wyszjg78BVPkcpLxLwTin1xhthnGJmqc65nX6GilGXAq855542s+7Af8ysrXMuz+9gcnQ0EnOkDUDjQ243CtxX4DFmloA3PLktLOliSzCfNWZ2BnAPcK5zLjNM2WJRcZ93FaAt8I2ZrcY7n/2xJveWSjDf2+uBj51z2c65VcBPeEWNlEwwn/XVwP8AnHPfAyl4fX6k7AX1c72sqIg50myglZk1N7MkvIm7Hx92zMfAHwPXBwFfu8CMJimRYj9rM+sAvIhXwGjOwNEp8vN2zu1yztV2zjVzzjXDm4N0rnNujj9xo1owP0c+xBuFwcxq451eWhnGjLEimM96LXA6gJm1wStitoQ1ZfnxMXBlYJVSN2CXc25jqN5Mp5MO45zLMbObgC/xZr2/6pxbbGYjgTnOuY+BV/CGI1fgTXC6xL/E0SvIz/pJoDLwbmDu9Frn3Lm+hY5iQX7eUgaC/Ky/BM40syVALnCHc04juiUU5Gd9G/BvM/sr3iTfq/SHZ+mY2dt4xXftwByjB4BEAOfcC3hzjgYAK4D9wNCQ5tHXUURERKKRTieJiIhIVFIRIyIiIlFJRYyIiIhEJRUxIiIiEpVUxIiIiEhUUhEjIiIiUUlFjIgUycxqmdn8wGWTmW0IXN9rZqND9J7/Z2ZXhuK1S8PMVgc2pCvs8bFmpt12RcJM+8SISNDMbASw1zn3VAjfIwGYB3QMNFj1XaANQ5pzbmshj58KXO6cuzaswUTKOY3EiEipmFlvM/s0cH2Emb1uZlPNbI2ZXWhmT5hZupl9YWaJgeM6mdm3ZjbXzL40swYFvPRpwLyDBYyZ3WJmS8xsoZmNDdxXycxeNbNZZvaDmZ0XuD/ezJ4ys0WB428O3H964Lj0wPOSA/evNrMHzWxe4LHjA/fXMrMJZrbYzF4G7JD3HW9mCwLvMSSQeSpwRqAAE5EwUREjImWlJV4Bci7wJjDZOZcKHAAGBgqZ/wcMcs51Al4FHi7gdXoAcw+5PRzo4Jw7Cbg+cN89eD3LugB9gCfNrBIwDGgGtA8c/5aZpQCvAUMCeRKAGw55/a3OuY7Av4DbA/c9AExzzp0IfAA0CdzfH/jFOdfOOdcW+AIg0A15BdCuBJ+XiBwlFTEiUlY+d85lA+l4PWy+CNyfjldYHIfXJfsrM5sP3IvX4fZwDfh9c76FeMXI5cDB00tnAsMDr/MNXkO/JsAZwIsHR3Gcc9sD77vKOfdT4LmvA70Oef33A//ODeQk8PibgdcYD+w45P/S18weN7NTnHO7DnmdzcAxBX4yIhISGvoUkbKSCd6ohJllH9JgLw/vZ40Bi51z3Yt5nQN4RclBA/GKinOAe8wsNfBaFznnfjz0iYEmoaXKjdeEscific65n8ysI16Du4fMbJJzbmTg4ZRAdhEJE43EiEi4/AjUMbPuAGaWaGYnFnDcUuDYwDFxQGPn3GTgLqAaXlfzL4GbLVC1mFmHwHO/Aq47ODfFzGoG3reZmR0bOOYK4Ntisk4B/hB4jbOAGoHrxwD7nXNv4nVY73jIc1oDi4L4HESkjGgkRkTCwjmXZWaDgOfMrBrez59ngcWHHfo58J/A9XjgzcDxBjznnNtpZqMCz10YKHRWAWcDL+MVEwvNLBv4t3Pun2Y2FHg3UNzMBl4oJu6DwNtmthiYDqwN3J+KN/8mD8gmMLfGzOoBB5xzm0r6uYhI6WmJtYhEHDP7ALjTObfc7yzBMLO/Arudc6/4nUWkPNHpJBGJRMPxJvhGi514E4ZFJIw0EiMiIiJRSSMxIiIiEpVUxIiIiEhUUhEjIiIiUUlFjIiIiEQlFTEiIiISlf4/Y1LpcGe3LDkAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.figure(figsize=(9, 6))\n", + "plt.xlabel(\"Time (seconds)\")\n", + "plt.ylabel(\"Yaw angle (rads)\")\n", + "plt.step(gimbal_t, gt_gimbal_signal, label=\"Reference val\", where='post')\n", + "plt.step(jetson_t, jetson_signal, label=\"Jetson val\", where='post')\n", + "# plt.step(gimbal_t, extrapolate_jetson_signal, label=\"Extrapolated Jetson val\")\n", + "plt.legend()" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "9a012d3a", + "metadata": {}, + "outputs": [], + "source": [ + "# Recursive variables\n", + "xhat = 0 # posterior\n", + "xhatminus = 0 # a priori\n", + "\n", + "P = 0 # posterior variance estimation\n", + "Pminus = 0 # a priori variance estimation\n", + "K = np.array([0, 0]) # Kalman gain\n", + "\n", + "# Constant modeling variables\n", + "Q = 1 # noise variance\n", + "\n", + "H = 1\n", + "\n", + "A = 1 # state transition\n", + "B = 0 # control transition\n", + "\n", + "R = 20 # state variance\n", + "\n", + "xhat_list = []\n", + "\n", + "jetson_signal_ptr = 0\n", + "\n", + "last_signal = 0\n", + "last_last_signal = 0\n", + "\n", + "last_t = 0.1\n", + "last_last_t = -1\n", + "\n", + "for i in range(gimbal_freq):\n", + " if jetson_signal_ptr < len(jetson_t) and gimbal_t[i] >= jetson_t[jetson_signal_ptr]:\n", + " last_last_signal = last_signal\n", + " last_signal = jetson_signal[jetson_signal_ptr]\n", + " last_last_t = last_t\n", + " last_t = jetson_t[jetson_signal_ptr]\n", + " jetson_signal_ptr += 1\n", + " # state transition: A @ xhat + B @ u + N\n", + " xhatminus = A * xhat + B\n", + " Pminus = A*P*A + Q\n", + " K = Pminus * H / (H * Pminus * H + R)\n", + " \n", + " # Update with pseudo posteriori observation\n", + " xhat = xhatminus + K * (extrapolated_signal[i] - H * xhatminus)\n", + " xhat_list.append(xhat)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "id": "aacef2ec", + "metadata": {}, + "outputs": [], + "source": [ + "xhat_arr = np.array(xhat_list)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "fcfdf560", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjEAAAFzCAYAAADPF1gOAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/YYfK9AAAACXBIWXMAAAsTAAALEwEAmpwYAABykklEQVR4nO3dd3hUxdfA8e/sppKEhBII0jtIAgFCk96bgNJVELBgA/3ZURAQUBHF9iqigjQVEFBBqkgvUkINvXdCCwklCSk77x93CSVtIdndbDif59nH3Xvnzp5sMDmZOzNHaa0RQgghhHA1JmcHIIQQQghxPySJEUIIIYRLkiRGCCGEEC5JkhghhBBCuCRJYoQQQgjhkiSJEUIIIYRLcnN2ANmtYMGCulSpUs4OQwghhBDZZMuWLRe11oF3H891SUypUqUIDw93dhhCCCGEyCZKqeNpHZfbSUIIIYRwSZLECCGEEMIlSRIjhBBCCJeU6+bECCGEEOlJTEzk1KlTxMfHOzsUkQYvLy+KFSuGu7u7Te0liRFCCPHAOHXqFH5+fpQqVQqllLPDEbfRWnPp0iVOnTpF6dKlbbpGbicJIYR4YMTHx1OgQAFJYHIgpRQFChS4p1EySWKEEEI8UCSBybnu9XsjSYwQQgjhQGazmdDQUIKDg+nQoQPR0dEZtr9w4QJ16tShevXqrFmzxjFB2lnfvn2ZPXt2lvuRJEYIIYRwIG9vb7Zv386uXbvInz8/3333XYbtly1bRkhICNu2baNhw4Y2vUdycnJ2hJrjSRIjhBBCOEm9evU4ffo0AIcPH6ZNmzbUrFmThg0bsm/fPrZv384777zD3LlzCQ0NJS4ujn/++Yd69epRo0YNunXrxrVr1wBjx/p3332XGjVqMGvWrAzbDRs2jBo1ahASEsK+ffsAuHbtGv369SMkJISqVasyZ84cgHT7uWnfvn3Url075fWxY8cICQkBYMSIEdSqVYvg4GD69++P1jpbPz9ZnSSEEOKB9OHfu9lz5kq29vnwQ3kZ1qGKTW2Tk5NZtmwZzz77LAD9+/dn/PjxlC9fno0bN/Lyyy+zfPlyRowYQXh4ON9++y0XL15k1KhR/Pvvv/j4+PDpp5/yxRdfMHToUAAKFCjA1q1buXjxIp07d063XcGCBdm6dSvjxo3j888/Z8KECYwcORJ/f38iIiIAuHz5cqbvB1CpUiUSEhI4evQopUuXZubMmfTo0QOAAQMGpLTt3bs38+fPp0OHDtnzYSNJjHhQhE+CCBvuv4Z0hbB+9o9HCPHAiouLIzQ0lNOnT1O5cmVatmzJtWvXWL9+Pd26dUtpd+PGjVTXbtiwgT179lC/fn0AEhISqFevXsr5m8lDZu06d+4MQM2aNfnjjz8A+Pfff5kxY0ZKm3z58jF//vwM+7mpe/fuzJw5k0GDBjFz5kxmzpwJwIoVKxgzZgyxsbFERUVRpUqV3JPEKKV+Bh4Fzmutg9M4r4CvgXZALNBXa73VsVGKXCFiNkRGQFBI+m0ijb8+JIkR4sFg64hJdrs5JyY2NpbWrVvz3Xff0bdvXwICAti+fXuG12qtadmyJdOnT0/zvI+Pj03tPD09AWOScVJS0n2/3009evSgW7dudO7cGaUU5cuXJz4+npdffpnw8HCKFy/O8OHDs32TQWfPiZkMtMngfFugvPXRH/jeATGJ3CooBPotSP+RUYIjhBDZLE+ePHzzzTeMHTuWPHnyULp0aWbNmgUYycOOHTtSXVO3bl3WrVvHoUOHALh+/ToHDhy473a3a9my5R2TjC9fvmxzP2XLlsVsNjNy5MiU0aCbCUvBggW5du1atqxGuptTkxit9WogKoMmnYCp2rABCFBKFXFMdCI30WiStSYmLpHYhCRuJCVjsWTvBDMhhLhX1atXp2rVqkyfPp1ff/2ViRMnUq1aNapUqcLcuXNTtQ8MDGTy5Mk88cQTVK1alXr16qVMzL2fdrcbMmQIly9fJjg4mGrVqrFixYp76qdHjx788ssvdO/eHYCAgACef/55goODad26NbVq1bqPTyhjKrtnCt9zAEqVAuancztpPjBaa73W+noZ8K7WOjy9/sLCwnR4eLqnRS537ko8qw9cYMORKHafieHajSROXY5jhsdIAHomfJDmdUF5vfg+6QPyeJhZU38K9csVpEJhP8wm2RRLiNxk7969VK5c2dlhiAyk9T1SSm3RWofd3TZXTOxVSvXHuN1EiRIlnByNcKQr8Yl8/e9BJq49mub5QD9PWj1cmIKRnni5mRgYXA5fTzeSLJrEZAsnomKxWDThJy7xg18cpzws7F35J8kLKqX00aRiIEMffZgygb6O+rKEEELYIKcnMaeB4re9LmY9dget9Y/Aj2CMxDgmNOEsN5KS+WThPiavP5bqXLuQIFpXCaJR+UDy+XjcOjHJSEDebFUx1TXzj8xn2Y33WGd97VNiKp3z/ZbS/8r9F1i5fxUAdcvk56se1Qny98rOL0kIIcR9yOlJzDxggFJqBlAHiNFan3VyTMJJ9kdepev49VyNv3Mmfd9HSvFGqwrk9bKtdPtNCckJtJ7TmotxFwHIr01EKQsaC882ycvwju1JtmimbzrBp4v2cfVGEhuORFH3k2UAfNa1Kt3Cimf0FkIIIezI2UuspwNNgIJKqVPAMMAdQGs9HliIsbz6EMYSa1n7mlvcw74tK/za02/S5jsON69UiC97ht5z4nLT6WunaTPn1sK4aW2nEbpgMEt0LG+ZLvLRpo8Y32I8ZpOiV92S9KpbEotFM27lIT7/x5iZ//bsnbw9eycDmpbjzVYVpKicEEI4mFOTGK31E5mc18ArDgpHOJIN+7YkndlB+NFL9EsolHLsi+7V6FyjWJbeevv57fRe1BsAPw8/VnVfhbvZSIZak4e3gHWn1xGXFIe3m3fKdSaTYkCz8gxoVp4D567S5XtjVOjbFYf4dsUh3mhZgYHNykkyI4QQDpLTbyeJ3Ozmvi13iYyJp+4ny1JWFAHM7F+XOmUKZPktd1zYkZLAtCzZki+afJGqTe+HezNtzzTGho9lSN0hafZTobAfEcNbc/HaDdp8tYaL127wxdIDfLH0ABP7hNG8cuEsxyqEECJjzt7sTogUyRbNc1PCU+acgJEsHBvdPlsSmBMk0mthLwC6VeiWZgIDMCB0AAAz988kNjE2wz4L+noSPqQFG95rnnLs2SnhlBq0gIvXUm8ZLoQQAL6+6a92jI6OZty4cQ6MJnOTJ09mwIABzg4jFRmJETnCmoMX6D1xU8rrl5uUpe5ZGxMXG+bXxJ2LoH0RfwDal2nP0HpD022bxz0PbUq1YfGxxXwW/hnD6g3LNIQgfy+OjW7PhiOX6PnjBgDCRv1Lv/qlGProw3KLSQhhs5tJzMsvv+zsUHI8GYkRTpVs0bT5anVKAuPn5cbeEW14p02lTK68zc35NXeJxcJKYvlUXaa2NYEp41+G0Q1HZ9rlzdtIsw/MJuZGjM2h1C1TgKOftKN7mDFvZ9K6Y5R+byEXrsqojBAitc8++4xatWpRtWpVhg0z/mAaNGgQhw8fJjQ0lLfffpuzZ8/SqFEjQkNDCQ4OZs2aNQBMnz6dkJAQgoODeffdd1P69PX1ZfDgwVSrVo26dety7ty5O97TYrFQqlQpoqOjU46VL1+ec+fO8ffff1OnTh2qV69OixYtUl2b08hIjHCa6wlJVHl/YcrrcU/VoF3IfVaVCAohqc9cZh2YxYSICZyPPZ+qSbmAcszqMMum7vw9/elWoRuzDsxi4PKBTG071eZQlFKM6VqN11tWoN4nywGo9dG/fPDowzzboLTN/Qgh7GzRoDT/AMqSoBBom/kfSgD//PMPBw8eZNOmTWit6dixI6tXr2b06NHs2rUrpRjk2LFjad26NYMHDyY5OZnY2FjOnDnDu+++y5YtW8iXLx+tWrXir7/+4rHHHuP69evUrVuXjz76iHfeeYeffvqJIUNuze8zmUx06tSJP//8k379+rFx40ZKlixJ4cKFadCgARs2bEApxYQJExgzZgxjx47N3s8oG0kSI5zi+KXrnL1iFAfLl8ed/95rjpe7OXXDyAiY1B4AC5p4NLFoEqyP61g4GX2U9/LlIWla9Tsurf9Qfeo9VI8mxZtQwq/EPd/SebvW28w6MItt57ex9dxWahSucU/XF/H35ugn7Rjy1y5+3XiCkfP38PPao6x4qwkebjIIKsSD7p9//uGff/6henXjZ9e1a9c4ePBgqp3na9WqxTPPPENiYiKPPfYYoaGhLF++nCZNmhAYGAjAU089xerVq3nsscfw8PDg0UcfBaBmzZosXbo01Xv36NGDESNG0K9fP2bMmJFStPHUqVP06NGDs2fPkpCQQOnSOfsPL0lihEMlJluo9uE//IyRwLzfrhL9G5VN1U5rzct5TZxQBTjJCXRG+Uf+PClP6xapywd1P6BE3qyXn/B28+bjBh/z/tr36bO4Dzue3oFJ3VvyoZTio8dD6FW3JG2/XsPp6DgqDFnEireaULqgT5ZjFEJkgY0jJvaitea9997jhRdeuOP4sWPH7njdqFEjVq9ezYIFC+jbty9vvPEG/v7+6fbr7u6e8keb2WwmKSkpVZt69epx6NAhLly4wF9//ZUyUjNw4EDeeOMNOnbsyMqVKxk+fHjWvkg7kyRGOMzNpdMAeEBo8QDqppPA1PilBkmWJFDQpXwXridep7R/aTzNnsQlxVHUtyjuZncUCrMyU9q/NBXzpy4pkFUdynZgQsQEjsQcod/ifkxpO+W++qlcJC+HPmpL8PAlxCdaaPr5Sj7vVo2uNbO2540QwnW1bt2aDz74gKeeegpfX19Onz6Nu7s7fn5+XL16NaXd8ePHKVasGM8//zw3btxg69atvPvuu7z66qtcvHiRfPnyMX36dAYOHGjzeyulePzxx3njjTeoXLkyBQoYCyliYmIoWrQoAFOm3N/PO0eSJEY4xIr951N23fXzdKN2yfyY0ri9o7WmwYwGRgIDrH9iPX4efg6N9W6/tvuVetPrsfX8VmYdmEW3Ct3uqx83s4m9I9qk3F56a9YONh29xJiu1bI5YiFETpaUlISnpyetWrVi79691KtXDzAm5P7yyy+ULVuW+vXrExwcTNu2bQkODuazzz7D3d0dX19fpk6dSpEiRRg9ejRNmzZFa0379u3p1KnTPcXRo0cPatWqxeTJk1OODR8+nG7dupEvXz6aNWvG0aNpF9fNKZSxKW7uERYWpsPDw50dhrjNdysO8dmS/QB0rVmMz7tVS5nncvdmdy8ufZF1Z4xSjBue3ICPuwNvuaQTE0DEhQieXPgkAFPbTqV6oeqp2twhk2Xfl2MT2H/uKnOTH2GdfwdWvNUEs0mWYQthb3v37qVy5cpOjWHHjh08//zzbNq0KfPGD6C0vkdKqS1a67C728rsQmE3Wmuem7I5JYH5tEuIkcCkY2LExJQEZlWPVY5NYDIREhjCazVeA+DpRU+zP2p/xheks+z7pnx5PKjtdZpO5vWciIql7PsLiU9Mzs6QhRA50Pjx43niiScYNWqUs0PJFeR2krALrTUtv1zNofPXAPjrlfqEFg9It/2RmCN8tfUrAGY8OoP8XvkdEOW9eS7kOQ5cPsCio4vo+ndXZneYnfE8nHTKKtxkmtSeWlqDUU+SSh8sZvvQlgTk8cjmyIUQOcWLL77Iiy++6Owwcg1JYoTtbKw8rdEMPVyZQ0nNAFjzTlOK37aC6G7JlmQ6/WXcy30l9BWqFKiSPfHawZhGY0iyJLH0+FK6/t2V0Q1H075M+3vqIy4pjm+2fsMvphMU0WY+ejqGD3/NR0KyhdARS/nvvWYU8ffOvCMhhHjASRIjDLYkKMfXGv8t2SDdJhatuXZ8G+1NUUyjmU0jC/+37f8AcFNuvFD1hQzb5gRfNPmCL7Z8waRdkxi0ZhB/H/mbcc3HZbr8+p9j//DmqjfvOHZWJTN68yeM6D2EHxcU5siF69T7ZHmmiZ8QQghJYsRNN+dwBIWk36ZkAwjpCmH90jwdn5hMpQ8Wp1Sf3jOiNXk8Mv4ndinuEhN3TQRgabelLlNj6I2ab1CzUE0GLB/AutPrqDa1GpNaTyIsKNW8M37f/zsjN4y841jHsh0ZdHAr64jjbdMlPto4ip71e1J0dwvWHLxIwzErWPp6I8oX9rN5BCyj740QQuRGksSIWzKZw5GRG0lGAnNT7VL5MWWSwAA8989zADxR6QkKehe8r/d2lsbFG7PhyQ3U/a0uAP2WGAnE3E5zKQMcIZFOU24lhWZl5rf2v/FwgYeNAwfb0wYfvJt9yIDlA5ixfwb5ff+hfchYFkSco+WXq1n0WkMqZ5JgniGJY5cPUiZiBkGSxAghHiCyOklkWVKyhYpDbiUwdUqnvQfM3U6QyKHoQwC8W+vdTFrnTD7uPkT0iWBs41u1RTrN7USI6QSdTGdTjs3tNJftT2+/lcDcpnHxxvzZ8U8AouKjWJ3Uj0eqRAHQ9us1xCYk3Uow+y0gsvtEBpULIcR0ghDTCVqbzvBCAR/eUhft/NUKIbKDr69vyvOFCxdSoUIFjh8/zvDhwylatCihoaGEhoYyaNCgLL9X3759mT3bhpFcFyVJjMgSi0VTbvCilNcHP2qLwrZbQi+rCwB81vgzzKY06ia5kFalWhHRJ4IPH/nwjuP/q/E/IvpEUCagTIbXl8tXjvBe4RT1NXbKjLCMwa/yeyi3y+w8HUN0UhJD1g4hZEoILWe3ZMGRWyNm3St0B2CHSuDU1VPZ/JUJIexl2bJlvPrqqyxatIiSJUsC8Prrr7N9+3a2b9/O6NHOLYvgCiSJEfdNa02Z26pQ7xvZBnezicskc5xELsVdItmS9t4nJ0jkuDJ25W1dsrVD4nWEzuU7E9EngnWWYmy2FOPZkGdtvtbT7MniLov5rvl31iMa3/Kf8nzZ6zT0OMPcw3NT2g6sPpCtvbYS0SeCD+p9wLuWAAC+3/F9Nn41Qgh7Wb16Nc8//zzz58+nbNnU5VfSsm/fPmrXrp3y+tixY4SEGLeZR4wYQa1atQgODqZ///7kto1s0yNzYsR90VrT4NMVKa/3jGiNp5uJN1a+wVLTaePg700AcNOQpKCYdksZozlpMhKYYfWGucxk3nuRNwt/HzQq1oiIPhFM3jWZsVtu3aZKvFKVlX2/J8gvb6prOuPLp0Qz7/A8htQdgrebLNEWIjOfbvqUfVH7srXPSvkr8W7tjG+P37hxg8cee4yVK1dSqVKlO859+eWX/PLLL0Z8n35K69a3/sirVKkSCQkJHD16lNKlSzNz5syU6tMDBgxg6NChAPTu3Zv58+fToUOH7PzSciRJYsR96f7Df5yOjgNg5/BWmExJVJ16ayv+p7UfnijcteKISsRHK66jOUES/pgI1GYC8hajU7l7q/Vhd5ERt8oPZLWfjFZ62aBvcF/6BvclZlIbjpy8wuOxT1L3ozXsHdEGb487b7/lwUQT7c1KFcfv+3+nT5U+WXpvIYT9uLu788gjjzBx4kS+/vrrO869/vrrvPXWW+le2717d2bOnMmgQYOYOXMmM2fOBGDFihWMGTOG2NhYoqKiqFKliiQxQqTlrVk72HzsMgCb3m+OVrHU+vXW3jFreqwhwCvASdFlQUjX7OsrKCTb+vPHTPXi+XA7qEiyaCoPXcyhj9riZr5ztGeIzsdKFcfn4Z/T++Heme5bI8SDLrMRE3sxmUz8/vvvNG/enI8//pj333/f5mt79OhBt27d6Ny5M0opypcvT3x8PC+//DLh4eEUL16c4cOHEx8fb8evIOeQJEbck59WH2H2FmPy6NLXG+HllUD96UYC42X2YsOTG1x3km5YP8fvs2LLyI91VGf/qLaUtc5BajhmBesHNbvjVlxh3KiYryL7L+9n/pH5dCzb0Z6RCyGyIE+ePCxYsICGDRtSuHBhnn3WtvlzZcuWxWw2M3LkyJRbSTcTloIFC3Lt2jVmz55N167Z+EdZDiZ/qgmbrTl4gY8W7gXgl2frUKqgF/Wn1wegoHdBNj610XUTGGcI6WrbLSfrqI7ZpNj9oXF//GxMPM9PTV2t/etmxtD04LWD051ULYTIGfLnz8/ixYsZNWoU8+bNs/m6Hj168Msvv9C9u7EyMSAggOeff57g4GBat25NrVq17BVyjqNy2wzmsLAwHR6e+oe7yMTN0YB0Nrs7cSmWRp8ZE3k/fjyEJ+uUoMWsFpyLPYdZmdnae6vcvnCQszFx1PtkOQBvt67IK03L3fH96zKvCwcuH6BX5V5OGy4XIqfau3cvlStXdnYYIgNpfY+UUlu01qm2RJffOiJTMXGJKQlMv/qleLJOCcZsHsO52HMAbHpqkyQwDlTE35u/XjFGwD5bsp/1h+/c5O6Hlj8A8MveX4iKj3J4fEII4Sjym0dkKDHZQrUP/wEgpKg/wzpU4cDlA0zbMw2AhY8vxMOccYFHkf1CiwcwpmtVAJ78aSM3km7dOiroXZCnKj8FQLe/uzklPiGEcARJYkS6tNa0+3pNyut5A+qTbEmmy7wuAAyqPYjieYs7K7wHXvew4nSpUQyAbSejSb7t1vCbYUa17POx55l1YJZT4hNCCHuTJEaka/SifRw8fw0wNrNTSvHDTuNWhbvJnScrPenM8AQwtns1Cuf1BGDzsaiUXTrdTe5Ma2uMlo34bwRnrp1xWoxC5DS5bS5obnKv3xtJYkSa1h68yA+rjwCw+H8NyePhxvXE6ynb2i/qvChX7rTrita+2yzl+fNTt6Q8Dy0USo+KxhLM1nNak2hJdHhsQuQ0Xl5eXLp0SRKZHEhrzaVLl/Dy8rL5GtknRqRy7ko8vSZuBGBM16pUCjK2uX/535cB6FCmA4V9CjstPnEnd7OJGiXysfXEZf7de44/t53i8erGbabBdQaz5NgSom9EU+uXWmzrvU2ST/FAK1asGKdOneLChQvODkWkwcvLi2LFitncXpIYcYekZAt1Pl4GQOfqRekeZsx5OXPtDFvPbwVgaL2hTotPpM3DbKJyUF44Aa/P3EFYyfwUz58HpRT/dvuXsF/CSNbJNPm9Ccu7LZf9fMQDy93dndKlSzs7DJFN5HaSuEPPHzcA4OVu4oseoSnHX1n2CgAf1P0ALzfbh/qE4/h7u/NC4zKAsaNvfKKxYsnT7Mm6J9YBEBUfxdrTa50WoxBCZCdJYkSKMzFxhB83aiJtGdIy5fjpa6c5FH0IgG4VZMluTvZe28oUDTAqWLf6cnXK8bweefn90d8B+Hb7t06JTQghspskMQKA6wlJnIiKBYyl1D6et+40vr/GKE42tN5QmU/hAla+3QSAE1GxfPHP/pTjFfJVAGBf1D6uJFxxRmhCCJGtZE6MICHJQsTpGADealWBqsUCUs5djr+cMhfmsbKPOSE6ca/czSb+eb0Rrb5czTfLD9GkUiFqlMiH2WSmc/nO/HHwD/448Ad9g/um3UH4JIiYnfkbhXR1fMFMIYS4jYzECJp+vhIwJoe+0rTcHefG7xgPQL8q/XA3uzs6NHGfKhT244NHHwag87j1KTv6PhtsVModt2Nc+hdHzDYqZ2ckMsK2REcIIexIRmIecBPXHuV0dBx4QLXiAXfcLkq2JPPbvt8A0v+rXeRYzzYozZwtp9hz9goNPl3B5sEtKJG3BMV8i3Hq2in2R+2nYv6KaV8cFJJuMVDgVsFJIYRwIhmJeYCduhzLyPl7AKhWzB/zXfNdbt5GqlGoBvm98js8PpF1f7z8CAAXrt7g+5WHAXgr7C0ARm0Y5bS4hBAiO0gS84BKTLbQ4FOjMvXAZuXwdk89KPfjzh8BeLXGqw6NTWQfL3czq6wTfT9dvI+TUbE0LdEUgO0XthMdH+284IQQIovkdtID6sVpxvb0eb3ceLNVRZh05/lrCdfYcNbYM6Z6oeqODk/cj8iING/zlATWForlVHQc48Yu48MPP+fFai8yfsd4hq4fyjfNvrmnt9l+fjv91Umq48l3liTcTPJjRAjhHDIS8wBaGHGWZfvOA7D+veZptll+cjkA7cu0x6Tkn0mOF9LVmMeSjmL58vCwOk4n83pe+mUL/UP6A7Di5AquJly16S2SLcl0+LMDvRf1Jk5p1qt4Iq9HZkv4QghxP+RPqAfM1fhEXv7VmOsy4ekwfD3T/icw99BcwFiVJFxAWL9Mlzv7/NwOjkWxbN95lu29SL8q/Zi0exKvr3ydCa0mZHjtsZhjdPirQ8rr+tqLdSqeWQdm8XrN17PlSxBCiHslf2I/YG7WRWpRuRAtHk67iGN8UjybIjfhZfZKf/WKcDlmpahQyA+AF3/ZynNVjFISG89u5OSVk+let+z4spQEJr9XfsJ7hTNOBwLw866f7Ry1EEKkT5KYB8jPa48Sm2DsFzLuqZrptlt/Zj0AXSp0cUhcwnHy+3jQpKKRgDT+bBUf1P0AgI5zO6K1TtX+j4N/8L+V/wOgz8N9WNVjFZ5mT0woArVRRFImBwshnEWSmAfE2Zg4RliXUy9/szEebul/65ccWwJA5/KdHRKbcKwfehsJ7OXYRJKj6wCQZEliyu4pd7RbfHQxw9YPA4zCn2/VeuuO88/rvADMOTjH3iELIUSaJIl5QNT7xJio+2yD0pQJ9M2w7cKjC4FbtXZE7uLpZubfNxoB8N6fu/ipubGh4dgtYzkfa0z43sUN3l79NgAjHhlB94rdU/XzOD4AfLX1KwdELYQQqUkS8wAY8feelOc3t6JPz825Ea1KtrJrTMK5yhXyo0dYcQCe++kMT1R6AoDms5pzDQtPmM4B8HK1l3m8/ONp9uGFidL+pQHYH7U/zTZCCGFPksTkcscuXufndUcBWPtu00zbrzq1CiBlQzSRe43uYizJvnYjCY+YW4lKPdMpAGoWrslLoS9l2MfzIc8D8M22e9trRgghsoMkMblYskXTxFrccVDbShTLlyfTa8ZtNwoDNi7W2J6hiRxAKcWyN43v87fLjzCn3b93nM9s2TVAi5ItAFh9ajXxSfHZH6QQQmRA9onJxT5euBeAJ8zLeOHId3BEpd84MgIdFMzVxKt4u3nj5+HnoCiFM5UN9OWlJmX5fuVhWo0NZ837C5nzR0+6al+bduL1dvOmTak2LD62mDkH5/BU5accELUQQhhkJCaXOnT+KhPXGreRPiy9FxW5K+MLgkJYVyoMgJYlW9o7PJGDvN3q1l5A87bc4H86gGL38PfNq9WN2lqjN41Oc5m2EELYi4zE5EJaa1p8sRqA11tUwOOkydiSvt+CDK/bs/NHOAHdKnRzRJgihzCZFKvfbkqjz1bw2ZL9PFc2GU83s83XF89bnKK+RTl97TTrz6ynftH6doxWCCFukZGYXGj0on0AeLubea1FeZuv++vQXwAU8ytmj7BEDlaiQB76PlIKgB2nYu75+pH1RwLw+kopQSCEcBxJYnKZszFx/LD6CAAr325i83XJlmROXj1J5fyVKehd0E7RiZxsWAdj+b1FayKv3Nsk3VpBtVAo4pLiCI8Mt0d4QgiRiiQxuUyjMSsAeLV5eQrn9bL5uh0XdgDg7+lvl7hEzqeUYuVbTQA4duk6UdcT7un6m6uZnv/n+ewOTQgh0iRJTC7y7fKDJCYbEytfv4fbSAAnrp4Abu37IR5MpQr6UNDXE4DHx627p2trF6mNn4cfSTqJ5SeW2yM8IYS4g0zszanCJ0HEbNvahnQlqvJTfP7PAQD+faMRSmWwnPouQ9cN5c9DfwKQzyvfPYcqcpeygT5cvHaD45dimb3lFF1r2j5HakqbKXSe15nXVrxGeK9wPM2edoxUCPGgc+pIjFKqjVJqv1LqkFJqUBrn+yqlLiiltlsfzzkjTqeImA2REZm3i4yAiNm0+tLYafepOiUoV8i2PV4SkhOoNrVaSgLzvxr/o3y+exvBEbmPQvFwEaO441uzdnD9RpLN15bPV57K+SsD8MbKN+wSnxBC3OS0kRillBn4DmgJnAI2K6Xmaa333NV0ptZ6gMMDzAlsWBbNpPZcvH6Di9eM+QvDOlSxqetrCdeoN71eyuulXZcS5BN036EKFxEZAZPaZ9omb1AIDcsXZM3Bi/T5eROzX3rE5reY2nYqtX6txepTq1l5ciVNijfJUshCCJEeZ47E1AYOaa2PaK0TgBlAJyfG45IsWnPo/DUAZr9YDw+3zL+l1xOv35HAbO29VRKYB0FIVyMxzkxQCIR0ZXyvmgCEH7/MpqNRNr+Nl5sXXzX5CoCBywdyLeHa/UQrhBCZcuacmKLAydtenwLqpNGui1KqEXAAeF1rffLuBkqp/kB/gBIlStgh1JxrX+RVAB4pW4CwUvkzbZ9oSaTub3UBY8v4jU9uvKf5M8KFhfUzHjbyAb55ojqvTt9G9x/+48jH7TCZbPu30rxkc+oUqcPGsxuZc3AOfar0uc+ghRAifTl9ddLfQCmtdVVgKTAlrUZa6x+11mFa67DAwECHBuhMh85f5Up8IgAT+oTZdE3r2a1Tnv/3xH+SwIgMdaz2EGUCfQAY/vfue7r2g7ofAPDV1q+kHIEQwi6cmcScBorf9rqY9VgKrfUlrfUN68sJQE0HxZbj3V5aoHwhX/J4ZD6o9n/b/o8LcRcA2PjkRswm27eWFw+uWS8Ytx6n/necyBjbN8ErmbckQT5BJFmS2HUxk9pdQghxH5yZxGwGyiulSiulPICewLzbGyilitz2siOw14Hx5WifLt4PgEkpClw9YEzWTO8RGcFxEvlx548ALOy8kDzueZwZvnAhBXw9ea25sWqtw7dr7+naoXWHAjBk3ZBsj0sIIZyWxGitk4ABwBKM5OR3rfVupdQIpVRHa7NXlVK7lVI7gFeBvs6JNme5dO0G41cdBqB8876ZT9YMCuFR01kA3qz5JsX9imfcXoi7/M+6eeKFqzf4PTzVtLR0NSzWEIAjMUeIvB5pl9iEEA8uldvuVYeFhenw8FxQu+XmMtg0llg3+HQ5py7H0a9+KZuWVP/ftv/jx50/4uvuy39P/pfdkYoHxNYTl+k8bj0Ahyt8h1mpzLcAAOYcmMPw/4YTXCCY6Y9Ot3eYQohcSCm1RWudavJnTp/YK+7y755znLocB8D77Spn2DYhOYEvwr9IuY00u6ONOwALkYYaJfJRr0wBAA6ft33ZdJcKXQDYdWkXZ66dsUtsQogHkyQxLiQp2cJzU41Rpkn9auFuTvvbt/jYYkKmhFDzl5pM2j0JgH7B/SjqW9RhsYrc6cenjbn1UbEJXE+wfSff4fWGA/DSvy/ZIywhxANKkhgXMmK+sZlxlYfy0rRioVTnp+2ZRsiUEN5e9XbKsUfLPMqaHmt4o6ZsAS+yzs/LnSHtjRHAiNMxNi+dfrz844AxN2bd6XsrLCmEEOmRJMZFnLsSz9T/jgMw9Znad5zbfWk3IVNCGLN5DABmZWZ2h9lE9Ingk4afEOAV4OhwRS72XMMyKc9nbTll0zUmZeK75t8B8OK/L5KQnGCX2IQQDxZJYlxE1/HGhMoBTctRwNeoDKy1ZuDygfSc3zOl3YLHF7D96e1UzF/RKXGKB0O1YgEAvDN7J/GJyTZd06hYo5Rbmq+teM1eoQkhHiDOLDsgbLT6wAVORhmTed9sVSHl+If/fcjKkysBGFZvGF0rdHVCdOJB5O1uJsR8ghkeIzn6+WgqB+VNu2FI1ztKHczqMItHpj/C2tNrWXVyFY2LN3ZQxEKI3EhGYnK4hCQLT/+8CYAZ/eumlAk4d/0ccw7OAWBVj1WSwAjHCumKd/FQAGLiEolLazQmMgIi7lwR5+fhx5dNvgRgwPIBsneMECJLZCQmhxtpncxbNMCbutblrXBrOP7l0JfJ75V54UchslVYP0xh/Ti++QTvzomAI3D0k3Z31uK6udfRXVqUbEGnsp2Ye3guLWe3ZOOTG2UHaSHEfZGRmBwsMdnCtA3GZN75AxukHD8cfZjdl4xifP1D+jslNiEAuofd2v355r9VW4xqMIrCeQoDUOe3OsQlxWV7bEKI3E+SmBws4nQMAM/UL00+H4+U468ufxUw5sFIEUfhTEqplAR76NzdNk/yBVjadWnK81UnV2V7bEKI3E+SmBwqJi6RhGQLAIPaVko5fuLKCU5cPQFA5/KdnRKbELcLLupP4wqBALz3R4TN1ymlWPC4UbbgXOw5u8QmhMjdJInJgbTW7I28AsCkvrXwcLv1bRqxYQQAQ+oMwaTk2ydyhm96Vgfgz22nORkVa/N1N+fCTNszzS5xCSFyN/ktmANNXHsUAE83E00r3dqZ92rCVTae3QjcqkcjRE7gn8ed15obla47fWf7jrwFvQtSKm8p4pPj7RWaECIXkyQmh7l2I4lRC/YC8PBDd+69MXXPVAB6Ve6Fm0kWlomc5X8tjCQm6noCG49csvm6on5FibkRw66Lu+wVmhAil5IkJod56ZctAOTL44Gn+dakXYu2MH7HeKNNqBTREzmPUoo/X34EgB4/bkBjW12lruWNPY5OXzttt9iEELmTJDE5yNGL11lz8CIA5Qv53nHu5l+p5QLKkdcjnd1RhXCy6iXyUTTAG4ATNs6NKe5nLNOesnuK3eISQuROksTkEFprWn+1GoCRnapgun3TMOCbbd8A8H6d9x0emxD3Ys5LxmjM2Zh4kiyWTNtXyFcBbzdvDkUfsrkqthBCgCQxOcbc7WdISDJ+4PeqWzLV+ZsTemsWrunQuIS4V0H+XjxR2xhdOXj+WqbtlVIU9S1KXFIcR68ctXd4QohcRGaHOkP4pDtqyljQBB2NYoYHPFwkL2ryOKPuTFAIQEqRx+4VusuyauEShnWowvbtUDT+ELE/tCaPR8Y/ap4vEcy70YfYem4rZfzLOCZIIYTLk9+IzhAx20hSrE5fNrZc9/Ewk9fL3TgYFGJUAAZ+2vkTgBR5FC7Dy91McpUu7NEl2WndeTpdkRFUOLEVuDXiKIQQtpCRGGcJCoF+C7gan0jD4f8AsOmt5pDX645mFm1h58WdAJTLV87hYQpxv+p1e4sy2x4G4Nua1Xm06kNpN5zUnnJAUd+iLD62mI8afISH2SPttkIIcRsZiXGyp3/eBEDnGkUpdFcCA7f+Mm1ZsiXuJneHxiZEVphMt+oqDfhtGxZLxpN2i/oWBeDcdSlBIISwjSQxTnTs4nW2nYgG4NMuVdNsM+vALMDY4E4IVxNc1J/KRYwtAb7690CGbbuUN3ahnr5/ut3jEkLkDpLEOFH3H/4D4Mse1XA3p/2tOBx9GIBK+SuleV6InG7as7UB+Gb5Ia7GJ6bbrlWpVgD8tvc3h8QlhHB9ksQ4yeXYBM5fvQHAY6FF02xz/MpxjsQcoVpgtZRCeUK4moK+njxe3fg3/tIvW9Nt52Zyw9/Tn2SdTFxSnKPCE0K4MElinMCCZv+5qwDM6F8XddfGdjdtPWf8wG9fpr3DYhPCHsZ0NW6Xrj10keOXrqfb7vmQ5wH44+AfDolLCOHaJIlxgptLqgvn9aRumQLptlt6fCkgG9wJ1+duNjG8g7FSqfO49em2u7mNwLfbvnVIXEII1yZJjINdu5HE6WgjiZn7SoN02yVZklhzeg0l/EpQIV8FR4UnhN08Xa8UAJeuJ/DvnrRXIPm4++Bp9uRa4jUux192YHRCCFckSYyDvTvH2POlgI8HQf6pl1TfdPbaWQD8Pf0dEpcQ9mYyKSb3qwXAc1PD011y3b9qfwBmH5id5nkhhLhJkhgHOh0dx4KdRnJSuqBPhm2n7Z0GwBOVnrB7XEI4SpOKhShiTd4nrz+WZpvuFboDMGnXJEeFJYRwUZLEOFD38caS6qIB3riZ0v/od1/czfR9xl4ZMh9G5DYz+9cDYMT8PSQmp65yHeAVQLmAclxNvMqeS3scHZ4QwoVIEuMgu07HpMyFKZbPO912cUlx9FzQE4CPGnzEQ77pbNUuhIsqUSAPDcsXBGDQnIg029xcpfTlli8dFpcQwvVIEuMgXb43VmT88mwdFGkvqQZ4ZdkrAFTOX5mOZTs6JDYhHO27p2oAMGfrKRItqUdjWpZsCcCGsxuIuZFJAUkhxANLkhgHmLfjDDeSLHi6mWhg/Qs0LTsu7GBz5GYAfm33q6PCE8Lh8nq582LjsgDsj7ya6ry72Z3O5TsDMCFigkNjE0K4jgyTGKVUMaXUW0qpuUqpzUqp1UqpcUqp9kopSYBskJBk4dXp2wCY89Ij6bZLtiTTa6FRH+nzxp/jbpZijyJ3e7t1RcDYdiAuMSnV+YHVBwIwefdkEpITHBqbEMI1pJuIKKUmAT8DCcCnwBPAy8C/QBtgrVKqkSOCdGWfLdkHQJlAH4KLpr9cesb+GQC4m9xpXaq1Q2ITwpnMJpWyk++uM1dSnS/oXZBaQcaS7Bn7Zjg0NiGEa8hoNGWs1rqV1vobrfV6rfUhrfUurfUfWuuBQBPgjGPCdE1X4hP5ac1R4NaKjLQkWZIYvWk0ALM7yt4Y4sHRrWYxAJItmiW7I1Od/6j+RwB8Fv4ZyZZkh8YmhMj50k1itNa77j6mlMqnlKpqPZ+gtT5kz+Bc3ft/GCsvOlcvSqCfZ7rtZu6fCUBoYChl/Ms4JDYhcgKlFJWD8gLwwrQtJN+1AV4R3yLUKGRMApaVSkKIu2U6r0UptVIplVcplR/YCvyklJKfJpk4GxPHfOvGdh92qpJuO4u2pIzCjGowyiGxCZGT+Hu742E2fhRNWnc01flvmn0DwJQ9U0iypJ47I4R4cNkyOddfa30F6AxM1VrXAZrbNyzX1/0HY2O7AU3L4eeV/iTdtafXAlAqbylK5i3pkNiEyGkefsgYjRm1YC8JSXcuufb39E+ZJ/bTzp8cHpsQIueyJYlxU0oVAboD8+0cT66w63QMJ6OMje1eb5lx8cah64YCMLrhaLvHJURO5eVm5pGyRkX3jxfuTXX+/TrvAzBuxzhZqSSESGFLEjMCWAIc0lpvVkqVAQ7aNyzX1vPHDQBM7lcLsyn9je0uxV3iUvwlAKoUTP+WkxAPgnHWDfAmrz/GlfjEO87l98qfUoLj223fOjw2IUTOlGkSo7WepbWuqrV+2fr6iNa6i/1Dc02rDlzg2o0k3M2KJhULZdh2/I7xAHxQ9wNHhCZEjhaQx4Mn65QAoP/U8FTnxzYeC8Ck3ZO4lnDNobEJIXKmjPaJ+T+l1DfpPRwZpKtItmj6/LwJgJkvpL+k+qabq5IeK/eYPcMSwmWM7BQMwIYjUZyMir3jXAHvAnQo0wGA99a85/DYhBA5T0YjMeHAFsALqIFxC+kgEAp42D0yFzRuhbHivJCfJzVK5Muw7Wbi0WgeeegRPMzycQoBxgZ4ox4zEplnJm9OdX5w3cEArDy1kt0Xdzs0NiFEzuOW3gmt9RQApdRLQAOtdZL19XhgjWPCcx2xCUmMXXqAJ8zLGFZoD0zKYBV6ZATjixi3mvpW6euYAIVwEU/WLsGQv3Zx8Pw1thyPombJ/CnnfNx9GFJnCKM2jqLngp7seHoHJqmAIsQDy5b/+/MBeW977Ws9Jm7zyUKjvEBfv814XdyTYdukoGA2qRsAKZMVhRAGk0nx3ZPGJN8u3/+H1ndugNejUg+K+BQB4IWlLzg8PiFEzpHuSMxtRgPblFIrAAU0AobbMyhXExOXyLQNxwEoE+gLphDotyDd9tsiN8OSZ2hYtKHcShIiDe1CglKez95yim5hxe84P7vjbOpPr8+Gsxv48+CfPF7+cUeHKITIAWxZnTQJqAP8CfwB1Lt5q0kYnp9irKR4onZx3E2ZD25N3j0ZgOdCnrNnWEK4LKUU8wc2AODt2TtTlSPI65GXaW2nATB0/VC2nd/m8BiFEM5n683kG8BZ4DJQQapX33L80nU2HYsCYFiHzPd6iU+KZ/Wp1QAEFwy2a2xCuLLgov5ULWZUfk+rHEFooVAGhA4A4OlFT7Mvap9D4xNCOJ8ttZOeA1ZjbHj3ofW/w+0blut4wrqx3fAOD+Plbk63XWxiLN3/7k6tX2sBUC2wmtxKEiIT43sZc8ZGLdhLYrIl1fkXqr1A+zLtAej2dze2n9/uyPCEEE5my0jMa0At4LjWuilQHYi2Z1CuYveZGM7ExAPQt37pNNtorflqy1fU+a0Oe6OM7dTbl2nPpNaTHBanEK7qoQBvmlYMBODdOTvTbDO64eiURKb3ot5Ex0c7KjwhhJPZksTEa63jAZRSnlrrfUBF+4blGrp8vx6ASf1qpXn+SsIVqk6tysRdEwFoXKwx23tvZ3TD0bib0y8KKYS45TtrOYI/tp7m8vW06yaNbjiajmU7AvD04qcdFpsQwrlsSWJOKaUCgL+ApUqpucBxewblChZFnCU+0YLZpGiaRnmBPZf2UH96/ZTXS7su5dvm32I2pX/LSQiRWh4PN15oVAaAF37Zkm67Dx/5EICjMUfltpIQDwh19x4MGTZWqjHgDyzWWufIUrJhYWE6PDx13ZUsWzQIIiMA0Gg2HjUm81Z5KC9+nreNqkRGsCqoHANMFwBj7svUtlNlQy4h0jOpvfH/VlBIuk0saAYfqsT05OasersJJQv4pNluc+RmnlnyDAAbn9xIHvc8dglZCOFYSqktWuuwu49n+JtVKWVWSqVM+ddar9Jaz8upCYyjnI6OA8DDbLozgQFW35bAvFTtJX5p94skMEJkJKRrhgkMgClyF68Gbgeg18SN6barFVSLcgHlAOgxv0e2hSiEyJkyHYmx3j4aqLU+4ZiQssZuIzFWcQnJVB66GIB1g5pRNMAbrTV/H/mb77Z9x5nrZwB4vebrPBP8jN3iEOKBMqk9Gk3p/a8AMLN/XeqUKZBm00RLIjWmGfNongt5jtdqvOawMIUQ9nFfIzFW+YDdSqllSql5Nx/ZH6Jr+Pyf/QC0qFyI8wn7aDijIVWnVmXw2sEpCczA6gMlgREimylulSPo8eOGVOUIbnI3uTO7w2wAJkRMYMmxJQ6LUQjhWLaUHfjAXm+ulGoDfA2YgQla69F3nfcEpgI1gUtAD631MXvFk5mr8YlMXHsUt7w72Mh0Ni66dS40MJQhdYdQMb8s3BLCXtqFBGE2KZItmrnbz/BY9aJptquYvyKfNPyE99a8x1ur3iKPWx4aFmvo4GiFEPaWbhKjlFLasCqzNvfzxkopM/Ad0BI4BWxWSs3TWt9ePfFZ4LLWupxSqifwKeC0G929pv6NX+Vhdxz7ttm3NC7e2EkRCfEAiYxATX6UnSWTiDgdA3+C3p4fhbrVJqQrhPUD4NEyj3Ix9iJjt4zl5WUv83GDj+lQtoOTghdC2ENGt5NWKKUGKqVK3H5QKeWhlGqmlJoC9MnCe9cGDmmtj1gnCs8AOt3VphNws07TbKC5UkrhBGPWTeGI560E5rd2vxHRJ0ISGCEc4bbJvz4ebnhbd8c+d+XGrTaRERAx+47L+gb35e2wtwF4f+37DF47ON3bUEKI+zdu5SGW7zvn8PfN6HZSG+AZYLpSqjTGLr3eGInPP8BXWuusVF0rCpy87fUpjEKTabbRWicppWKAAsDF2xsppfoD/QFKlCiBPZyLNpF0tRJtyjbgy7YyUVAIhwrrlzLCAlAoJo56nyyHs3BwQFvczSZjqXYanq7yNA/5PsTrK19n3uF5zDs8j9U9VpPPK5+johciVzt1OZYxi/fTp15JmlUq7ND3TnckRmsdr7Uep7WuD5QEmgPVtdYltdbPZzGByVZa6x+11mFa67DAwEC7vMfY9r3ZO2CWJDBC5ABF/L15pKyxOmnIn7sybd+iZAuWdVuW8vrd1e/aLTYhHjT9pxqbUJYJ9HX4e9u0gYnWOlFrfVZrHZ2N730aKH7b62LWY2m2UUq5YWy0dykbYxBCuKgfehvFIWeGnyQqnXIEtyuUpxA7n96Jm3JLqWMmhMiaiFMx7Dl7BYBedUs6/P2duQvbZqC8Uqq0UsoD6AncvXR7Hrfm3XQFlt/vRGIhRO7i5+XOcw2Mwqv/m7ndpmuUUhTKU4joG9GcuXbGjtEJkftprenw7VoAvuxRDbPJ8VNWnZbEaK2TgAHAEmAv8LvWerdSaoRSqqO12USggFLqEPAGMMg50QohcqK3WhtbGqw+cIH4pGSbrnmh2gsA7LyYdlVsIYRt5u88m/L8sdC0tzuwN5uSGKVUSaVUC+tzb6WUX3a8udZ6oda6gta6rNb6I+uxoVrredbn8Vrrblrrclrr2lrrI9nxvkKI3MHL3czb1kRmr3VIOzMV8xnt155aa7e4hMjtLBbNwOnG1Ng5Lz2CkxYOZ57EKKWex1je/IP1UDGMitZCCOF0Nytc30iycDU+MdP2VQpWoYBXAeYenkuyxbbRGyHEnX7dZFQiKhvoQ82SzlvpZ8uOva9g7OmyEUBrfVApVciuUQkhhI3czCb+74nqMAd0ZAR6Urs7N8BLQ6E8Zi4B0TeiKeCddg0mIUTaki2aD/4yVgVO6lvbqbHYcjvpxu1Vq62rhGRyrRAix+hQ7SH+ttRnjy7J5euZjMZERtAjNgmAmftnOiA6IXKXUQuMjfVrl85PiQJ5nBqLLSMxq5RS7wPeSqmWwMvA3/YNSwgh7s0TLw3l0f9bC6fg6Cft0r9HP6k9HdAMJ4qZ+2fycujLjg1UCBd2JT6RSeuOAfDT06mKSjucLSMxg4ALQATwArAQGGLPoIQQ4l4FF/WnpPWvwl83nsiwrYf1dlNUfJSUIRDiHgz8zZjM2yOsOP7e7k6OBlRu+x84LCxMh4eHOzsMIYQTnIyKpeGYFQDsG9kGL2uNpTtYyxN8G9qWH3b+wKj6o+hU7u6ybUKIFOGTIGI2N5KS2XYyGoDapfJjunu087YCrNlNKbVFa51q6CfdkRilVIRSamd6D7tEKYQQWVA8fx5qlTJWSgybuzvDtl0rdAVg/I7xdo9LCJcWMRsiI9gfeRWA4vnypE5g0ijA6ggZzYl51GFRCCFENpnYtxZVh//DzPCTvNu2Evl9PNJsVziPUaju1LVTxCbGksfduRMUhcjJruWrTMdjrwJw6NW2YL5rDCSdAqz2llEByOMZPRwZpBBC2CqvlzvPWssRvDM7/UFjpRQ9K/YE4J/j/zgkNiFckUaz60wMAJ93q4bb3QmME9my2d1VpdSVux4nlVJ/KqXKOCJIIYS4F2+1Mnbl/XfvOU5Gxabb7pngZwAYt32cQ+ISwhXdXmC1Sw3nlBdIjy3p1FfA20BRjN163wJ+A2YAP9stMiGEuE/eHmbealUBgMfHrU+3XRHfIgR4BnD2+llOXjnpqPCEcBkWi+bg+WsAzH2lvtPKC6THliSmo9b6B631Va31Fa31j0BrrfVMwHl7DQshRAZebFwWgIvXbrD5WFS67foFG6spxu+UCb5C3G36ZmO7Ai93M9WKBzg3mDTYksTEKqW6K6VM1kd3IN56LnetzxZC5BpuZhMT+xgrMruN/y/ddt0qdANg3uF53Ei+4ZDYhHAFSckWBv9plBeoHJQtdZ+znS079j4FfA2Mw0haNgC9lFLewAA7xiaEEFnSvHJhAvK4Ex2byLK952heuXCqNn4eftR/qD7rzqxj9oHZPFX5KSdEKoQTWPd/Sc/xC9eY4XGDEPMJPN1yZo2xTEditNZHtNYdtNYFtdaB1ueHtNZxWmupZS+EyNF+f6EeAM9OCU93d953a78LwOhNo2UHX/HgsO7/kpbEZAsXrhkjk57Fqhkb2eVAmY7EKKUCgeeBUre311o/Y7+whBAie1Qo7Efpgj4cvXidL/89yBtptCntX5rifsU5efUky08sp3nJ5g6PUwinCAqBfgtSHe43YSNrEy7Sv1EZ6rarnGEXy08s5xt1lv9pf5rYKcz02DInZi7gD/wLLLjtIYQQLuG35+sA8M2ygySnM9IytvFYAP638n8yGiMeaMcuXmftoYsAvGld5Zee7ee389qK1zisEtmhEjJsaw+2JDF5tNbvaq1/11rPufmwe2RCCJFNivh70y4kCIAjF66l2aZygcoU8DLu+y8/sdxhsQmR03T81pgpMrhdZTzd0qg/ZhUdH03vRb0BeE7n5TUd4Ijw7mBLEjNfKdXO7pEIIYQdjelaDYBL1xNISLak2WZCqwmAMRqTmJzosNiEyClWHbjAlfgkAJ6x7nydFq01DWc2BKBOkTq8VrihcWvKwWxJYl7DSGTirLv1XlVKXbF3YEIIkZ18Pd14obGxyfi+s2n/CCuXrxzlAsoBMGjNIIfFJkROoLWmz8+bAJjcrxZmU/ob233434cpz8e3GA9tRxsPB7NldZKf1tqktfbWWue1vs7riOCEECI7vdHSuL8fm5jMzlPRabaZ2nYqYNRT+m77d44KTQin+2PraQAK5/WkScVC6bY7FnOMOQeNWSUruq/AzWTLbi32YVMVJ6VUPqVUbaVUo5sPewcmhBDZzdPNTJmCPgB0TqccgZ+HH5NaTwJg/I7xfBH+hcPiE8JZtNa8OWsHANOfr5th2+7zuwPwXu33KOhd0O6xZcSWApDPAauBJcCH1v8Ot29YQghhH4X8vABIsmiW7zuXZpuwoDDGNTeKQk7aPYkX/33RYfEJ4Qwf/r0HgGrFAygT6Jtuu/lH5hOXFIeHyYMnKz/pqPDSZeucmFrAca11U6A6EG3PoIQQwp5CivoD8Mzk8HTbNCzWkGltpwGw7vQ6QqaEEB0f7YjwhHCo6NgEJq8/BsCUfrXSbZeYnMh7a94D4Lf2vzkitEzZksTEa63jAZRSnlrrfUBF+4YlhBD24+PhRon8eQD4bsWhdNuFFgpldY/VKa8bzmzIN1u/kX1kRK7yym9bAehdtyQBeTzSbTchwli9Vy6gHBXz54w0wJYk5pRSKgD4C1iqlJoLHLdnUEIIYW8z+hv3/T9bsp9rN5LSbZfPKx87n95Jy5ItAfgp4ieqTq3KX4f+ckSYQthVfGIy6w5dAmDIo+nvzJuYnMi4HcYt1vEtck7Fd1tWJz2utY7WWg8HPgAmAo/ZOS4hhLCrhwK8afWwURDyndk7MmyrlOKLJl+wtOvSlGMfrPuAZr83s2uMQthbxOkYAIa0z3hju5n7ZwJQJ6gOhX1SF1J1FptWJ92ktV6ltZ6ntXb83sJCCJHNvuoZCsDCiEjOxsRl2j7IJ4iIPhH82fFPAC7EXZBN8YTLuhybkFKG45n6GW9s9+nmTwH4oN4HDonNVveUxAghRG6Sx8ONV5uXB6D7D//ZfF25fOV4sZqxYunvI3/bJTYh7Mli0ew/dxWAac/WxpTBxnZbzxtzZkrmLUnJvCUdEp+tJIkRQjzQBjYzdug9GRXHhiOXbL6uR8UeAEzaNckucQlhT79uNKa2erqZaFg+MMO232z9BoAhdYfYPa57lek2e0qpZ4HVWuuDDohHCCEcyt1sYtxTNXj51630/HEDRz9ph9oyGSJmZ3hdfjSY4NiVYyRZkpy6a6kQ9yIhycIHc3czwwMqF8l4A/64pLiUkZi6RTLeBM8ZbBmJKQH8oJQ6opSapZQaqJQKtXNcQgjhMO1CihCQxx2Av7afNhKYyIgMrzFF7qKTNnb/XX1qdYZthchJXpuxDQB/b3e8MpjMCzB592QA+lbpa+eo7k+mfzporYcBKKW8geeBt4GvgIy/ciGEcCFzXnqE5mNX8frMHTxWUaOCQqDfgvQvmNSePjqBueo63+/4nmYlZKWSyPkiY+JZtCsSgAqF/TJsm2RJYtx2Y1n1M8HP2D22+2FL2YEhSqlFwD9AOeAtoJi9AxNCCEcqG+hLjRIBABy5cN2ma8pgjN7si9rH9UTbrhHCmfpOMqpUv9OmImaV/mRegB92/gBAw6INyeeVz+6x3Q9bbuJ2BpKABcAq4D+t9Q27RiWEEPYUGQGT2qc6/LunhXCPywReP05C/lDS37vUYEbRsWxH5h2ex+Kji+lSoYt94hUiG4Qfi2JfpLEi6cVGZeFo+m0t2sL4Hcamdh83+NgR4d0XWza7qwG0ADYBLYEIpdRaewcmhBB2EdIVgkLSPOVmMvGQvzd7dEkmxNS0qbtng58FYOKuidkWohDZLdmi6Tre2Ebg+6dqZLikGmDR0UUA1ChUgwCvAHuHd99sWZ0UDDQEGgNhwElgjZ3jEkII+wjrZzzS8VCyhUaDF8EFeORkNKHFAzLsrkxAGQp4FeDk1ZNEXo8kyCcomwMWIusmrj0CgIfZRNuQIpm2H7N5DADDHhlm17iyypbVSaOBvMA3QGWtdVOt9VD7hiWEEM7hZjbxRfdqADz23ToslsyLPT4X8hwA3+/43q6xCXE/4hOT+XjhPgCWvN4o0/aHLh8iKj4Kf09/yviXsXd4WWLL7aRHtdafaq3Xa61lf20hRK73ePWiKc9/2Zh5vdubc2H+OPgHN5JlyqDIWd6cZdQGa1apEKUL+mTa/mahx9drvG7XuLKDLauTyiulZiul9lj3ijmilDriiOCEEMIZlFL8+4bxF+vQubu5kZScYXtvN2+aFG8CwOwDGW+SJ4Qjnbocy4KdZ4FbtcIykpCcwNLjRqHTdmXa2TO0bGHL7aRJwPcYK5SaAlOBX+wZlBBCOFu5Qn40KFcQgHdm78y0/dC6xl320ZtGo3Xmt6CEcISu3xuTed9uXZG8Xu6Ztt8UaSzBrlOkDt5u3naNLTvYksR4a62XAUprfVxrPRxIvTZRCCFyme+eqgHA3O1nMq1yHZgnkEr5KwHw16G/7B2aEJnafCyKyCvxALzStJxN19z8t/ti1RftFVa2SjeJUUotVEqVBm4opUzAQaXUAKXU44CvwyIUQggn8fd25+3WFQHoNj7zKtdfNf0KgKHrh5JokSmEwnmSLTrl3+yM/rbVPIpPimfJsSV4mDwICwqzZ3jZJqORmEnAEmAR4AO8CtQEegFP2z80IYRwvpeblAXg1OXMq1wX9S1K5fyVAfh006d2j02I9Hy62FiNFOjnSd0yBWy6ZvuF7QA0Kpb5CqacIt0kRms9C6iBMeqyFugJ7ALWA/UdEp0QQjiZUopfn6sDQM8fN5CYbMmw/Y8tfwRg5v6ZXIy7aPf4hLjb5esJ/LjaWH/z94AGNl/37/F/Aej1cC+7xGUPmc2JSQCuA56AH0ZC42t9LoQQD4T65QpS0NcoQvDpon0Ztg3wCkjZxbfdH+1kkq9wuL6TNwPQu25Jgvy9bLpGa83M/TMxKRM1C9u2W3VOkNGcmDbAdiAPUENrPUxr/eHNh6MCFEKInGDhaw0BmLD2KJevJ2TYdkD1AQDEJcXx277f7B6bEDdtOR7FjpPRAAxuX9nm687HngcgrLBrzIW5KaORmMFAN631IK11rKMCEkKInKiQnxe965YEoMePGU/ydTO5MbuDsV/M6E2jORZzzN7hCYHWmi7WJdVju1XDy91s87ULjy4EoGPZjnaJzV4ymhPTUGu925HBCCFETnbzL9sD565xJT7j1UcV81ekX7BRo+m9Ne/ZPTYhpv5n7C4dkMedLjWL3dO1EyImANC6VOtsj8ueMi0AKYQQwuDlbmbcUzV4+det7Dl7hTql85NRLeA3ar7BpF2T2HVpF1cSrpDXI6/DYhUPltiEJIbNM8Yd/h7QAMInQUQmu0dHRqRUdL+ScAU35YaXm21zaHIKWza7E0IIYdUupAhFA4ydTM9Ex2fa/rUarwEw6r9Rdo1LPNj6TTIm87Z8uDDF8+cxEpjIiIwvCgqBkK6sObUGgM7lO9s7zGwnIzFCCHGPZr9Uj+Nj4eTlWHxjE/HPk/527v2q9OPrrV+z6NgihtYbiq+H7BUqste+yCtsPBoFwPfWXaYBI0nptyDT6yO2GwUfO5ZzrfkwICMxQghxz4r4e1PQ1xOApydtyrCt2WSmZ8WegLGTrxDZyWLRtPnKGEn5umcobuZ7/7V+s2hpSb+S2RqbI0gSI4QQ96FsoA8AO05Gs/lYVIZt3wx7E4Clx5dyIfaC3WMTD45vlh8EQCnoFFr0nq9PtCRyIe4CVQpUIcArIJujsz9JYoQQ4j4oFBUKGbeGuo3/L8OdfL3cvHih6gsAvPDvCw6JT+R+MXGJfPWvkcSsfKvJffURHhkOQOE8hbMrLIeSJEYIIe5Tfh9P8noZUws/W7I/w7YvVDOSl4OXD7L61Gq7xyZyv8fHrQOgW81ilCzgc199HI4+DMAzIc9kW1yOJEmMEEJkwb9vNgbgx9VHOH81/dVK7ib3lCrXryx7hcRkqXIt7t/6wxc5cuE6ACMfC77vfuYfmQ9Afq/82RKXo0kSI4QQWVDIz4u+j5QCoPnYVRm2bV6iOSEFjX05uvzdxd6hiVxKa82TP20E4Oe+Yfe0M+/d/Zy9fpZyAeUo7lc8O0N0GKckMUqp/EqppUqpg9b/5kunXbJSarv1Mc/RcQohhC2GWHfyvRqfxNI95zJsO6GVsTPq0ZijTNo1ye6xidxnyF+7AGNyebNK9z+X5WjMUaLio3A3pb9FQE7nrH1iBgHLtNajlVKDrK/fTaNdnNY61KGRCSGErSIjYFJ73IDdpZOIOB0DM8BSKj8mZd3LN6QrhPVLuSSPex7+7Pgnj897nC+2fEFwwWBqBdVyTvzC5ZyJjuPXjScAmPXiI1nq63qicTuq18O9shyXszjrdlInYIr1+RTgMSfFIYQQ9yeka8qW7QA+Hm74ehp/Fx44d9U4GBmR5tbv5fKV4/067wPwzJJn2H1RytQJ27T8wrhl+WbLCuT38bjvfrTWPLXwKQAKeBXIlticwVkjMYW11metzyOB9MbDvJRS4UASMFpr/VdajZRS/YH+ACVKlMjmUIUQIg1h/e4YYQEom5BM5aGLIQEW92tIpUVPpHv5E5We4ODlg8w6MIueC3oyre00QguF2jlo4cp+XnuU6wnJAAxoVi5LfT37z7NoNABhQWFZjs1Z7DYSo5T6Vym1K41Hp9vbaa01WD/J1EpqrcOAJ4GvlFJl02qktf5Rax2mtQ4LDAzM3i9ECCFs5O1h5qseoQC0+WpNyi+J9AytN5SOZY2t3j/e+LG9wxMu7Ep8IiPm7wFg/sAGKJVR6dGM/X34bzZHGrWW1vRYg6fZM1tidAa7JTFa6xZa6+A0HnOBc0qpIgDW/55Pp4/T1v8eAVYC1e0VrxBCZIfHqhfF3Wz8gjlxKTbT9h81+IhC3oXYG7VXdvMV6XrsO2NPmEerFiG4qP9993Mx7iLvrzVuZc54dIZL7tJ7O2fNiZkH9LE+7wPMvbuBUiqfUsrT+rwgUB/Y47AIhRDiPq17txkAZ6/EcyMpOdP2nSsY1YN/3furXeMSrmnFvvMpe8J83q1alvrqMs9Y2t+pbCeqFKiS5diczVlJzGigpVLqINDC+hqlVJhSaoK1TWUgXCm1A1iBMSdGkhghRI5XKK8XLzUx7n5vOxmNcdc8fU9WehKAibsmYtHply8QD56kZAv9Jhu3fqY8U/u+94QB+PPgn0TFG3W+RtYfmS3xOZtTkhit9SWtdXOtdXnrbaco6/FwrfVz1ufrtdYhWutq1v9OdEasQghxP95qVTHl+dT/jmfYNp9XPsoFGBM1N5zZYNe4hGu5mcDUKBFA4wr3P+fzRvKNlCrqv7T7JUtzanISZ61OEkKIXM1sUoQWD2D7yWiGzdtNh2oPZbgkdvgjw+m1sBdD1g1hefflqRuET0pzuXYqd+1LI1zX3rNXWHPwIgDTnq2Tpb7+b+v/AVAtsBrVArN2SyonkbIDQghhJ15uZgJ9jZUfnb5bm2Hbm79YLsRd4NTVU6kbRMw29p3JSDr70gjXk5Bkoe3XawAY36sGPp73P+YQmxjLlD3G1mw363flFjISI4QQdlQm0Aei4GRUHNM3neCJ2unvZTWm0RjeWf0Or698nVkdZqVuEBQC/Rak/2aT2mdDxCIneHfOTgAK+HjQJrjInSdtGZWLjEjZjHHiLmM2RrPizSjoXTDbY3UmSWKEEMKOVOQudpf+xihJMB8SI/LhbkpjEDykK61qPM07vMO+qH2cvHKS4nldsyifyJrtJ6P5c9tpAFa83SR1g5ujcrftGJ1KUAiEdCXRksiPO38EYNgjw+wQrXNJEiOEEPYS0hUAH4y/qC9dT2DHyWhqlsyH4raJldbbROawfgypM4RRG0cxYPkA5j6WaveJDFnQnCaJYlrnmombD5qkZEvKnjAjHwsmr1c6xRkzG5Wzmn/wTwBqB9Umv1f+bIszp5AkRggh7OW20gSlky20HLwIgJGVg+ldt+StdrfdBupaoSujNo7iSMwRNp7dSJ0imU/oTLYkM3LDSOaYThr9H57LY+Uey76vQzjMazO2A0bS26tO1svo3FyR9FGDj7LcV04kE3uFEMIB3Mwm5r5SH4AP/trF5esJabYzm8x80eQLAJ775zmSLEkZ9rv46GJCp4Uy5+CclGOTd03OnqCFQ+08Fc2CCKOs4L9vNM7yaNqBywcAo8BjkE9QluPLiSSJEUIIB6lWPIAuNYoBUP/TNJZRW7Uo0SJl6D+9mkrJlmTazGnD26vfBiCfZz7+sxh9H445TLIl852CRc6RbNF0/Na4jfRZ16rky0KF6ps+3fQpAB/U+yDLfeVUksQIIYQDfdrFmIwZm5DMhDVH0myjlGJOR2NkZdaBWey5dOdm5ZfiLhE6LZTT14zJnxNbTWR1z9X4YqKL9gFg5cmV9vkChF08O8XY1K5soA/dwrI+oTvRksimyE0ANCrWKMv95VSSxAghhAO5mU2ssq44GbVgL5Ex8Wm2K+hdkEG1BwHQY34PYjHKERyOPkyT343r3UxuhPcKp3aR2inX9dJ+AHy7/Vs7fQUiu60+cIGV+43in3MHNMiePk+tBowaSe6mdCYH5wKSxAghhIOVLODDsw1KA1D3k2Vo0q6t9FTlpwgpaIzc1FWnOEgCj819DIA6Reqwrfc2PM2ed1xTBuMX1qHoQ1xJuGKnr0Bkl2s3knj6Z2PE5Oe+YfhmYVO723215SsAngl+Jlv6y6kkiRFCCCcY0r5yyvNjF6+n225a22kAaAWdTZEANC3elAmtJqTZ3oSiS3mjUvG8Q/OyK1xhJ62/NEZMQosH0KxS4Wzp82LcRY5dOQZAaf/S2dJnTiVJjBBCOIFSig3vNQfg3NUbXL2RmGY7s8nM2p63ShaEBobyddOvM+z75l/fNzc5EznTrxuPczo6DoCZL9TNlj5jE2Np+ntTAHpU7JHr9wuSJEYIIZwkyN+LkZ2qALD7zBUSkixptvP39GedpRhTLIWY0nZKpr+YSuQtwUM+D3H5xmWOxRzL7rBFNjgdHcfgP3cBMPeV+ni6mbPcp9aaOr8Z+woV8i7E+3Xez3KfOZ0kMUII4US965XCy934BdbumzXptsuLiRp4YVK2/dh+pforAIwNH5v1IEW20lpTf7SxxL533ZJUKx6QLf32Wdwn5fnirott/rfiynL/VyiEEDlcSFF/AA6dv8bsLWlUsL4PbUu1BWDlqZVcT0x/zo1wvJu78ppNihHWkbisWnZiGdvObwNg3RPrcvWKpNtJEiOEEE5mViolkXlr1g7OX0l72fW9cDe7061CNwC+2/5dlvsT2WPdoYvM23EGgM2DW2TLnJWYGzH8b8X/APi13a/k9cib5T5dhSQxQgiRA/h4uNGrrlErp/bHy0hKTnt+zL14o+YbAEzbM41ES9oTh4XjxMQm8tSEjQD82Lsm+bNhV16AV5e/CkCDog2oGlg1W/p0FZLECCFEDjHqsRA83Iwfyy/+siXL/fl6+NKiRAsAPtqQOwsAugqLRVNtxD8A1C2Tn1ZVsqeW0Y4LO9h6fisA3zT7Jlv6dCWSxAghRA6yebCRdPy79zxzsmF+zKgGowCYc3AOMTdistyfuD+D/4pIef7rc9mznFprTa+FvQD4pOEnD8w8mNtJEiOEEDmIv7c7U54xygi8OWsHpy7HZqk/H3cfnqz0JACvrXgty/GJe7f+0EWmbzoJwH/vNcNsyp69WxYfW5zyvH3p9tnSp6vJnv2NhRBCZE1kBEwyfhE1BlYUuMa5qzc49eVIipbOj4rcBUEh99X1W7Xe4rd9v7Hl3BY2R26mVlCtbAxcZCTqegJPWufBfNG9GkX8vbOlX43mndXvADC9/fRcv6ldemQkRgghnC2ka6oEpVRBH0zWX0wRp2KM8yFd76t7d5M7YxqNAeCZJc+QkJyQtXiFTSwWTY2RSwFoGxxE5xrFsq3vNRgr2B7yeYjggsHZ1q+rkZEYIYRwtrB+xuM2Cnj4RhLBw5ZAArxdqiKvhJW777doW7otX235ijPXz9B7UW9mPjozi0GLzPSbvDnl+bhKO2HS0MwvCuma6t9CWt5TlwAY03jMfceXG8hIjBBC5FC+nm4se7MxAJ8t2c+2E5ez1N+cjnMA2HNpDwuOLMhyfCJ9E9ceZdWBCwBsH9oSFTHbuGWYkcgIiJidad8nSeKKsuBmcqNaYLXsCNdlSRIjhBA5WNlA35RdXR8ft57L1+//VpCvh29KVexNkZuyJT6R2tYTlxk5fw8Ac156hIA81v1ggkKg34L0HzbOefpMGcnsJw0/sUv8rkRuJwkhRA73dL1SLNh5lo1Ho6g+cikHP2qLu/n+/gYNLRSKj7sPfxz8gyF1hzyQy3LtKSY2kc7j1gMwqG0lapbMd28d3DbB+27xWLiMhRUmo/J1q5KtshRrbiAjMUII4QKmP39rb5FmY1dmqa/qhaoDcPrq6Sz1I+6k9a0N7WqXys+Ljcve0/XJwZ3ZFFSeyVxhsLpEbxVJW3WG5uo0IaYT1DKdopXJKFnQM3+1B6LAY2ZkJEYIIVyAyaTY/WFrqgxbwsmoON77YyefdL6/LeY7l+/M2tNrWXh0IS+HvpzNkT64Wn65OuX59P62b2intebrrV8zcc/EVEMLhbwLEZgnkEfylSfAMwAfdx9K+5fmkYceya6wXZokMUII4SJ8PN1Y9XYTGn+2kumbTlKrVP77WrbbsmRLAL7f8b0kMdlkyF8RHDp/DYBdH7a2eUO7w9GHeWzuYymvqxeqzhs136Bi/op4u2XPnjK5mYxFCSGECylZwIep1h193/h9BztPRd9XP0V9iwJwMe5idoX2wJr63zF+2XACgLXvNsXX07bxgS/Cv7gjgVnRfQVT204ltFCoJDA2kiRGCCFcTKMKgbzZsgIAHb9dx5nouHvuo/fDvQGYd3hetsb2oFl94AJD5+4GYEb/uhTLlyfTa7TWdJ3XlUm7JwHwSugrRPSJoKB3QbvGmhsprbWzY8hWYWFhOjw83NlhCCGE3T03ZTP/7j0PGLcwfH/rZJzol/keMFcTrvLI9EfwMnuxudfmTNuL1A6eu0rLL1fzhHkZrxXaTlDeDEZPIiMgKITkPvNoOKMhVxOvAvBXp78oG3BvE4AfREqpLVrrsLuPy0iMEEK4qJ+eDqNogPGLM3jYEjS2/1Hq5+FHqbyliE+O50j0EXuFmGudvxqfMpH3Of8tBMUeyviCoBAswV1oPqt5SgKz7ol1ksBkkUzsFUIIF6WUYtXbTSg3eBEAG49GUad0fmwtBdirci9GbRzFjxE/MrrhaPsFmstcu5FE7Y+WAcZS6rKevkBIpiNg/f95jkvxRrmADU9uwMfdx96h5noyEiOEEC7MzWxi38g2Ka+3Ho+2+dp2ZdoBsODIAhItidkdWq4Ul5Bs1LMCgvJ6MfMF25ZST983nY1njWrWK7qvkAQmm8hIjBBCuDgvdzMRw1ux++ORlNdH2TmyPiHF/FEZjcmEdMUvrB91i9Rlw9kNrDixglalZAfYjNxISqby0MUpr/97rxlKZTzutfzEcl5b8VrK66ltp8oE3mwkIzFCCJEL+Hm5E9LmOfboksQmJrPr9JX058jcVmjwjZpvAPDppk8dFapLSkq2UHHIrQTm8MftMkxg5h6aS8iUkDsSmE8afpKyW7LIHjISI4QQuYTPI89RsspT1PtkOSRAjbgA5rz0SOpftrfV5qlcoDIeJg/Ox53nWMwxSvmXcmzQzhI+yaaK0QAaTfjRKGZY6zjWLpUf05RxtxpYVx4BnL52mjZz2txx/fT20wkuGJwtYYs7yUiMEELkIkX8vVnzTlMAtp6IpvsP/2V6zZC6QwB4f+37do0tR4mYbSQfmdBoNh6NSnldq1R+THcnhUEhENKVL7d8eUcCM/PRmUT0iZAExo5kJEYIIXKZ4vnzsOadpjQcs4LNxy7T4f/W8vfABum271i2I0PXDyXiYgTnY89TKE8hB0brREEZryi6kZR8xy2kPSNaY/ZI/Wsz0ZJIm9ltOB9n7NnTv2p/BlYfmP3xilQkiRFCiFyoeP48/PdeM+p9spyI0zHUH72cNe80xZRGTR+zyczrNV/nyy1fMnD5QGY+OtP22y0hXSGsnx2+AueKS7hzEu/eEW3w9jCnarf13Fb6LO6T8nrh4wspnre4Q2IUcjtJCCFyrSL+3mz9wCj2eDo6jgpDFpGQZEmzbb8qRiKy59IeTl49advtltsmCOcm0bEJdyQwBz9qe0cCk2RJYsR/IwiZEnJHArOl1xZJYBxMkhghhMjF8vt4sHO4sXQ6yaKpMGQRyWmUm1FK8XGDjwHotbCXcfDm7ZbbHolP/8XPtbrQwDuGkIf8aaBOsS9qn8O+Hns7GRVL6IilALibFUc+boe72fhVadEWhq0fRvVp1Zl1YBYA3m7efNf8OyL6ROBh9nBa3A8qSWKEECKXy+vlzoFRbVNebz4WxY3k5FTtHi3zKABR8VH8wbU7zi07voyQKSHU+KUGX275kpgbMQDEKAuz9s+yY/SOs+HIJRqOWQFAuUK+7B/ZNuX22/oz66k2tRp/HPwDgKqBVfnvif/Y9NQmGhVr5LSYH3SSxAghxAPAw83EoY/a8pC/FwDbTkSz/vDFO9oopfit3W8ADDNFcQULk3dNJmRKCP9b+b+Udk2KN2Fp16WssRQFYMXJFY75Iuxo8rqj9PxxAwCdQh/i3zcaYzIptNa8uPRFXlj6AmCMvPz3xH/82u5XfD18nRmyQCb2CiHEA8PNbGL9e805+OmHXLqewJM/beS15uV5vWWFlDYhgSE0Kd6ElSdXUt90CraMBcDT7MnPrX+mamDV23o0U0N7sjXuAgcvH6R8vvKO/YKygcWi6f3zRtYdMmoaDX30YZ5pUBqAuKQ4av9aO6Xt540/p3Wp1k6JU6RNRmKEEOIBU76QH6UKGLV7vl52kOZjV5JsuTVP5ovGX9zRfnGXxYT3Cr8rgTF01cZoxOTdk+0XsJ0ka02Z9xemJDCzX6yXksBcSbhyRwKztudaSWByIElihBDiARSU14vF/2sIwOEL1yn7/kLOXYkHwN3sTrilOCssRYnoE0FR36Lp9tMUbwDmHZ5HkiXJ/oFnkyvxiWw+dmsTuy1DWhBWKj8A1xOvU396/ZRzO57egb+nv8NjFJmTJEYIIR5QlYLysnfErR1m63y8jCnrjwHgiaIgqfdFuZsvJqoFVgOMya85ndaaN2ZuZ8/ZKwCEFPXn8MftKODrCUCyJZm6vxmVqQM8A9j59E5MSn5V5lTynRFCiAeYt4eZY6Pb82SdEgAMm7ebah/+k+Yy7PS8GfYmAGPDx9olxuxyNiaO0u8t5I9tpwEoU9CHvwc2wHzbBoAtZ7dMeb6i+4pMq1QL55IkRgghBB8/HsJ8a2mCmDjjVsv5q/E2XXuzMvORmCNExUdl0to5Pvx7t1EY06pmyXwU8vO6o83XW7/mQtwFADY+uRE3k6x9yekkiRFCCAFAcFF/jnzcjqrFjPkfRy5ep9SgBURdT8j02ndqvQPAZ5s/s2uMNgmfZFTqntSe6z+0ZsPQurTe/CwzPEayouBnHK34He7nd99xyckrJ5kQMQGAJV2WkMc9jzMiF/dIkhghhBApTCbFvAENqPJQ3pRjNUYu5cVpW0hKTrtkAcATlZ4AYP6R+SQmJ9o9zgxFzEZH7mTbyctEnI5JOVy9RAClC/igUCmVp8GYJ9Puz3YAvBX2Fg/5PuSUsMW9k7EyIYQQqfh5ulOndH765C/JlP+Os3h3JOUGL+LFxmV5t03FVHNF3ExudCzbkXmH5/FZ+Ge8X+d9p8SdlGzhUOQVouOK0jNhMACjO4fQs3aJdK/5Zts3gLEXTp8qfdJtJ3IeGYkRQgiRJoXiw07B7BvZhjKBxr4y41cdpvR7C/lk0V40d07+vZm4TN83neuJ1x0aa3xiMs9PDafc4EVExxkjQS0fLsyRj9tlmMDE3IhJuY0097G5DolVZB8ZiRFCCJEhL3czy99swoWrN2j91Wqirifww6ojNPWIIq+XO0XjEvH3dsfH3YcOZTrw95G/GbZ+GJ83/tzoIHySbdWuQ7pCWL97iu3Yxev0/nkjJ6PiUo75ebpRqUhe6j4dlun1H234CIDHyz2e4X44ImdyykiMUqqbUmq3UsqilEr3X5lSqo1Sar9S6pBSapAjYxRCCHGnQD9Ptn7Qks2DW1CxsB9gbBpX7cN/KDVoAZPWHeX92sYtnCXHlhB5PdK4MGI2REZk3HlkhG2JDhCXkMzI+XsoNWgBTT5fmZLAtHy4MPtGtqHKQ/6YbVgaHRUfxaJjiwAYVFt+xbgiZ43E7AI6Az+k10ApZQa+A1oCp4DNSql5Wus9jglRCCFEWgL9PFnyeiP0pPwcvxQL1jqSH/69hw//3oNHweZ4Bi6jzZw2bOu9DQXGRNp+C1L1lWxJZvel3Wya/zJBXOfRdN7zyIVrjP3nAAsizqY6N7zDw/R5pNQ97+kyZvMYAHpV7iWrkVyUU5IYrfVeILN/cLWBQ1rrI9a2M4BOgCQxQgiRVZERxjLkjM4HhWTYhUJRqoAPx95qz8moWAb/tYvVBy6QcLEZnoHLSNbJVBozjDlJMfh4mNm8+SShJQLQ5st8H/EFy04su9WZ9b5AyOVLXLpqYt/Zq+w+E8OOU9HsOn0l1Xt3q1mMIe0fxj+P+/189cQmxrLgiJFUDaw+8L76EM6Xk+fEFAVO3vb6FFDHSbEIIUTuYV1anKHbliDbonj+PEx9xiiYeDU+kf9b68P00//DI+hPjh/3Jt9VxQdrPsajwNpU1yZE1aWh+2Y2+iXTcsLHJFxqdsf5UgXyEFzUnzplCtCzVnHczVmfCTFlzxTAmAsjozCuy25JjFLqXyAojVODtdbZOgVcKdUf6A9QokT6s9CFEEJgTJ69xwm098LPy533WzTHsqEHM/fPZFBJY86KB7cSmIruT1LdvwMebm6YHzLRcVd/HucSnoX+YVijVymWLw8PF8lLoJ9ntsd3+tppxm0fB8Bbtd7K9v6F49gtidFat8hiF6eB4re9LmY9ltZ7/Qj8CBAWFmZ7wQ8hhBB2M6TuEBYcWcC1xGsAVClQhR9b/Uhej7ypG5/yIUhHE6mSqVsxibIBgXaJ6WLcRdrMMYpeti3dNu1YhMvIyfvEbAbKK6VKK6U8gJ7APCfHJIQQ4h4s776c8ZZANlqKMePRGRkmDe/ofACM2jDKLrHEJcXR9PemANQJqsOYRmPs8j7CcZy1xPpxpdQpoB6wQCm1xHr8IaXUQgCtdRIwAFgC7AV+11rvTq9PIYQQOY+3mzf18SaPDb9umuANQPi58GzfLE9rTfs/jInMvu6+TGg9IVv7F87hlCRGa/2n1rqY1tpTa11Ya93aevyM1rrdbe0Waq0raK3Laq0/ckasQgghHMMdRa/KvQD4fvv32dr31D1TUypUr+6xOlv7Fs6Tk28nCSGEeMC8UPUFwFg9lF2FJM9dP8fn4cbuwXM6zsHdfH/LskXOI0mMEEKIHCPAK4B6ReoBt5ZBZ9Xjcx8HoGfFnlTIVyFb+hQ5gyQxQgghcpRhjwwD4OutX5NoydpozPoz67maeBWA9+q8l+XYRM4iSYwQQogcpahvUWoF1QLg54ifs9TXC0uN21OzOszCpORXXm4j31EhhBA5zscNPgbg2+3fEpsYe199zDowC4CHfB6iUv5K2RabyDkkiRFCCJHjBPkE0aaUsSld70W97/n6ZDQj/hsBwE+tfsrW2ETOIUmMEEKIHOmjBsbOGgcuH2DLuS33dO1cjH1mygWUo0ReKUeTW+XkApBCCCFyusyqYd9sk0lF7LR4mD34ufXPPLPkGfou7sv6J9bj5+GX6XUazTBTFABjm4y95/cVrkNGYoQQQtyfkK62JSf3WBH7drWCalEtsBoAjWY2wqItmV4TQQIAhfIUoox/mft6X+EaZCRGCCHE/bFzNeybprSZQui0UJIsSfRZ1IchdYfgbnbH3WQ8PMwemJXZeJjMfKWiARhWb5jdYxPOJUmMEEKIHM1sMrO823KazWrG9gvb6fp3JqM6yvhP/Yfq2z844VSSxAghhMjxAvMEstxSlD2XD5CYryQJaC6QjDeKZIzVSMZ/jec1yrTGbDI7OWphb5LECCGEcAmBmGmcrzL0W+DsUEQOIRN7hRBCCOGSJIkRQgghhEuSJEYIIYQQLkmSGCGEEEK4JElihBBCCOGSJIkRQgghhEuSJEYIIYQQLkn2iRFCCJEzZFZM8j4LSYrcS5IYIYQQzmdLgcgsFJIUuZMkMUIIIZzPQcUkRe4ic2KEEEII4ZIkiRFCCCGES5IkRgghhBAuSZIYIYQQQrgkSWKEEEII4ZIkiRFCCCGES5IkRgghhBAuSZIYIYQQQrgkSWKEEEII4ZIkiRFCCCGES5IkRgghhBAuSZIYIYQQQrgkSWKEEEII4ZKU1trZMWQrpdQF4Lidui8IXLRT3yI1+bwdRz5rx5HP2rHk83Yce37WJbXWgXcfzHVJjD0ppcK11mHOjuNBIZ+348hn7TjyWTuWfN6O44zPWm4nCSGEEMIlSRIjhBBCCJckScy9+dHZATxg5PN2HPmsHUc+a8eSz9txHP5Zy5wYIYQQQrgkGYkRQgghhEuSJCYNSqk2Sqn9SqlDSqlBaZz3VErNtJ7fqJQq5YQwcwUbPus3lFJ7lFI7lVLLlFIlnRFnbpHZ531buy5KKa2UklUd98mWz1op1d3673u3Uuo3R8eYW9jwc6SEUmqFUmqb9WdJO2fEmRsopX5WSp1XSu1K57xSSn1j/V7sVErVsGtAWmt53PYAzMBhoAzgAewAHr6rzcvAeOvznsBMZ8ftig8bP+umQB7r85fks7bv521t5wesBjYAYc6O2xUfNv7bLg9sA/JZXxdydtyu+LDxs/4ReMn6/GHgmLPjdtUH0AioAexK53w7YBGggLrARnvGIyMxqdUGDmmtj2itE4AZQKe72nQCplifzwaaK6WUA2PMLTL9rLXWK7TWsdaXG4BiDo4xN7Hl3zbASOBTIN6RweUytnzWzwPfaa0vA2itzzs4xtzCls9aA3mtz/2BMw6ML1fRWq8GojJo0gmYqg0bgAClVBF7xSNJTGpFgZO3vT5lPZZmG611EhADFHBIdLmLLZ/17Z7FyPDF/cn087YO/RbXWi9wZGC5kC3/tisAFZRS65RSG5RSbRwWXe5iy2c9HOillDoFLAQGOia0B9K9/lzPEjd7dSxEdlJK9QLCgMbOjiW3UkqZgC+Avk4O5UHhhnFLqQnGCONqpVSI1jramUHlUk8Ak7XWY5VS9YBpSqlgrbXF2YGJrJGRmNROA8Vve13MeizNNkopN4zhyUsOiS53seWzRinVAhgMdNRa33BQbLlRZp+3HxAMrFRKHcO4nz1PJvfeF1v+bZ8C5mmtE7XWR4EDGEmNuDe2fNbPAr8DaK3/A7ww6vyI7GfTz/XsIklMapuB8kqp0kopD4yJu/PuajMP6GN93hVYrq0zmsQ9yfSzVkpVB37ASGBkzkDWZPh5a61jtNYFtdaltNalMOYgddRahzsnXJdmy8+RvzBGYVBKFcS4vXTEgTHmFrZ81ieA5gBKqcoYScwFh0b54JgHPG1dpVQXiNFan7XXm8ntpLtorZOUUgOAJRiz3n/WWu9WSo0AwrXW84CJGMORhzAmOPV0XsSuy8bP+jPAF5hlnTt9Qmvd0WlBuzAbP2+RDWz8rJcArZRSe4Bk4G2ttYzo3iMbP+s3gZ+UUq9jTPLtK3943h+l1HSM5LugdY7RMMAdQGs9HmPOUTvgEBAL9LNrPPJ9FEIIIYQrkttJQgghhHBJksQIIYQQwiVJEiOEEEIIlyRJjBBCCCFckiQxQgghhHBJksQIIYQQwiVJEiOEyJBSqoBSarv1EamUOm19fk0pNc5O7/k/pdTT9uj7fiiljlk3pEvv/AyllOy2K4SDyT4xQgibKaWGA9e01p/b8T3cgK1ADWuBVaezlmEI01pfTOd8Y6CX1vp5hwYmxANORmKEEPdFKdVEKTXf+ny4UmqKUmqNUuq4UqqzUmqMUipCKbVYKeVubVdTKbVKKbVFKbVEKVUkja6bAVtvJjBKqVeVUnuUUjuVUjOsx3yUUj8rpTYppbYppTpZj5uVUp8rpXZZ2w+0Hm9ubRdhvc7TevyYUupDpdRW67lK1uMFlFL/KKV2K6UmAOq2912glNphfY8e1pjXAC2sCZgQwkEkiRFCZJeyGAlIR+AXYIXWOgSIA9pbE5n/A7pqrWsCPwMfpdFPfWDLba8HAdW11lWBF63HBmPULKsNNAU+U0r5AP2BUkCotf2vSikvYDLQwxqPG/DSbf1f1FrXAL4H3rIeGwas1VpXAf4ESliPtwHOaK2raa2DgcUA1mrIh4Bq9/B5CSGySJIYIUR2WaS1TgQiMGrYLLYej8BILCpiVMleqpTaDgzBqHB7tyLcWZxvJ0Yy0gu4eXupFTDI2s9KjIJ+JYAWwA83R3G01lHW9z2qtT5gvXYK0Oi2/v+w/neLNU6s53+x9rEAuHzb19JSKfWpUqqh1jrmtn7OAw+l+ckIIexChj6FENnlBhijEkqpxNsK7FkwftYoYLfWul4m/cRhJCU3tcdIKjoAg5VSIda+umit999+obVI6H3FjVGEMcOfiVrrA0qpGhgF7kYppZZprUdYT3tZYxdCOIiMxAghHGU/EKiUqgeglHJXSlVJo91eoJy1jQkorrVeAbwL+GNUNV8CDFTWrEUpVd167VLghZtzU5RS+a3vW0opVc7apjewKpNYVwNPWvtoC+SzPn8IiNVa/4JRYb3GbddUAHbZ8DkIIbKJjMQIIRxCa52glOoKfKOU8sf4+fMVsPuupouAadbnZuAXa3sFfKO1jlZKjbReu9Oa6BwFHgUmYCQTO5VSicBPWutvlVL9gFnW5GYzMD6TcD8EpiuldgPrgRPW4yEY828sQCLWuTVKqcJAnNY68l4/FyHE/ZMl1kKIHEcp9Sfwjtb6oLNjsYVS6nXgitZ6orNjEeJBIreThBA50SCMCb6uIhpjwrAQwoFkJEYIIYQQLklGYoQQQgjhkiSJEUIIIYRLkiRGCCGEEC5JkhghhBBCuCRJYoQQQgjhkv4fl7NKah0GcoIAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "plt.figure(figsize=(9, 6))\n", + "plt.xlabel(\"Time (seconds)\")\n", + "plt.ylabel(\"Yaw angle (rads)\")\n", + "plt.step(gimbal_t, gt_gimbal_signal, label=\"Reference val\", where='post')\n", + "plt.step(jetson_t, jetson_signal, label=\"Jetson val\", where='post')\n", + "# plt.step(gimbal_t, extrapolate_jetson_signal, label=\"Extrapolated Jetson val\")\n", + "plt.step(gimbal_t, xhat_arr, label=\"KF val\", where='post')\n", + "plt.legend()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "65de680d", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.15" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} From 8fde32d2ba37a093f55be6cd3aad3221523c9172 Mon Sep 17 00:00:00 2001 From: cpeng14 <92764093+Franciszabi@users.noreply.github.com> Date: Sat, 22 Apr 2023 23:05:10 -0500 Subject: [PATCH 50/74] Added acceleration and measurement noise matrix Added acceleration and measurement noise matrix --- Aiming/Tracking/KF_tracker_modified.py | 253 +++++++++++++++++++++++++ 1 file changed, 253 insertions(+) create mode 100644 Aiming/Tracking/KF_tracker_modified.py diff --git a/Aiming/Tracking/KF_tracker_modified.py b/Aiming/Tracking/KF_tracker_modified.py new file mode 100644 index 0000000..1dea4c8 --- /dev/null +++ b/Aiming/Tracking/KF_tracker_modified.py @@ -0,0 +1,253 @@ +"""Defines a basic linear tracker and base classes for future dev.""" +import scipy.optimize +import numpy as np +import cv2 + +from .consistent_id_gen import ConsistentIdGenerator + +# TODO: move this to config +FRAME_BUFFER_SIZE = 10 + +# TODO: this class should be part of abstract base tracker class + + +class KalmanTracker(object): + """A class that represents a Kalman Fitler tracker. + + It is the matrix for KF tracking that will be stored in each armor. + """ + def __init__(self): + """Initialize from armor. + """ + self.kalman = cv2.KalmanFilter(4, 2) + self.kalman.measurementMatrix = np.array([[1, 0, 0, 0], [0, 1, 0, 0]], np.float32) + self.kalman.transitionMatrix = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 1, 0], [0, 0, 0, 1]], np.float32) + self.kalman.measurementNoiseCov = np.array([[1, 0], [0, 1]], np.float32) * 1 + self.kalman.processNoiseCov = np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], np.float32) * 1 + self.kalman.controlMatrix = np.array([[0.5, 0], + [0, 0.5], + [1, 0], + [0, 1]], np.float32) + initial_state = np.array([320, 180, 0, 0], np.float32) # initialize the KF position to center + self.kalman.statePre = initial_state + + self.measurement = np.array((2, 1), np.float32) + self.prediction = np.zeros((2, 1), np.float32) + self.acceleration = np.array((2,1), np.float32) + + def update(self, x, y): + """Update the value of KF_matrix. + + Args: + x (int): detected x value + y (int): detected y value + """ + acceleration = np.array([[(x - self.measurement[0])],[(y - self.measurement[1])]], np.float32) + self.measurement = np.array([[x], [y]], np.float32) + #self.kalman.transitionMatrix[2, 0] = 0.5 * acceleration[0, 0] + #self.kalman.transitionMatrix[3, 1] = 0.5 * acceleration[1, 0] + self.kalman.correct(self.measurement) + self.prediction = self.kalman.predict() + + def get_prediction(self): + """Predict the x and y values + + Returns: + tuple: predicted x and y values + """ + return int(self.prediction[0]), int(self.prediction[1]) + +# class KalmanTracker(object): # 匀加速运动模型 +# def __init__(self, dt): +# self.kalman = cv2.KalmanFilter(6, 2) +# self.kalman.measurementMatrix = np.array([[1, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0]], np.float32) +# self.kalman.transitionMatrix = np.array([ +# [1, 0, dt, 0, 0.5*dt**2, 0], +# [0, 1, 0, dt, 0, 0.5*dt**2], +# [0, 0, 1, 0, dt, 0], +# [0, 0, 0, 1, 0, dt], +# [0, 0, 0, 0, 1, 0], +# [0, 0, 0, 0, 0, 1]], np.float32) +# self.kalman.processNoiseCov = np.eye(6, dtype=np.float32) * 0.03 + +# initial_state = np.array([320, 180, 0, 0, 0, 0], np.float32) # initialize the KF position to center +# self.kalman.statePre = initial_state + +# self.measurement = np.array((2, 1), np.float32) +# self.prediction = np.zeros((2, 1), np.float32) + +# def update(self, x, y): +# self.measurement = np.array([[x], [y]], np.float32) +# self.kalman.correct(self.measurement) +# self.prediction = self.kalman.predict() + +# def get_prediction(self): +# return int(self.prediction[0]), int(self.prediction[1]) + +class tracked_armor(object): + """A class that represents a tracked armor. + + It stores the history of bounding boxes and ROIs, and can predict the + bounding box of the next frame. + """ + + def __init__(self, bbox, roi, frame_tick, armor_id): + """Initialize from prediction. + + Args: + bbox (tuple): (center_x, center_y, w, h) + roi (np.ndarray): ROI of the armor + frame_tick (int): frame tick + armor_id (int): unique ID + """ + self.bbox_buffer = [bbox] + self.roi_buffer = [roi] + self.observed_frame_tick = [frame_tick] + self.armor_id = armor_id # unique ID + self.KF_matrix = KalmanTracker() + # self.KF_matrix = KalmanTracker(0.05) # dt = 0.05 + + def compute_cost(self, other_armor): + """Compute the cost of matching this armor with another armor. + + Args: + other_armor (tracked_armor): another armor + + Returns: + float: cost + """ + assert isinstance(other_armor, tracked_armor) + # TODO: use more sophisticated metrics (e.g., RGB) as cost function + c_x, c_y, w, h = self.bbox_buffer[-1] + o_c_x, o_c_y, o_w, o_h = other_armor.bbox_buffer[-1] + return np.square(c_x - o_c_x) + np.square(c_y - o_c_y) + + def update(self, other_armor, frame_tick): + """Update the state of this armor with matched armor. + + Args: + other_armor (tracked_armor): another armor + frame_tick (int): frame tick + """ + # Only call if these two armors are matched + self.bbox_buffer.append(other_armor.bbox_buffer[-1]) + self.roi_buffer.append(other_armor.roi_buffer[-1]) + self.observed_frame_tick.append(frame_tick) + + # Maintain each armor's buffer so that anything older than + # FRAME_BUFFER_SIZE is dropped + self.bbox_buffer = self.bbox_buffer[-FRAME_BUFFER_SIZE:] + self.roi_buffer = self.roi_buffer[-FRAME_BUFFER_SIZE:] + + def predict_bbox(self, cur_frame_tick): + """Predict the bounding box of the tracked armor at cur frame tick. + + Args: + cur_frame_tick (int): current frame tick + + TODO + - Use Kalman filter to do prediction + - Support future frame idx for predictions + + Returns: + tuple: (center_x, center_y, w, h) + """ + if cur_frame_tick == self.observed_frame_tick[-1] or len( + self.bbox_buffer) == 1: + # print(self.bbox_buffer[-1]) + c_x, c_y, w, h = self.bbox_buffer[-1] + self.KF_matrix.update(c_x, c_y) + return self.bbox_buffer[-1] + else: + # KF tracking + c_x, c_y, w, h = self.bbox_buffer[-1] + predicted_x, predicted_y = self.KF_matrix.get_prediction() + self.KF_matrix.update(predicted_x, predicted_y) + return (int(predicted_x), int(predicted_y), w, h) + + +class KF_tracker(object): + """ + Basic tracker that can handle only one target. + + It memorizes the state of last two predictions and do linear extrapolation + """ + + SE_THRESHOLD = 3200 # (40, 40) pixels away + + def __init__(self, config): + """Initialize the simple lineartracker. + + Args: + config (python object): shared config + """ + self.CFG = config + self.active_armors = [] + self.id_gen = ConsistentIdGenerator() + self.frame_tick = 0 # TODO: use timestamp may be a better idea + + def process_one(self, pred_list, enemy_team, rgb_img): + """Process one set of detections and rgb images. + + Args: + pred_list (list): list of (name, conf, bbox) tuples + enemy_team (str): enemy team name (blue or red) + rgb_img (np.ndarray): RGB image + + Returns: + (list, list): list of tracked_armors (detections+predictions) and their IDs + """ + new_armors = [] + for name, conf, bbox in pred_list: + c_x, c_y, w, h = bbox + lower_x = int(c_x - w / 2) + upper_x = int(c_x + w / 2) + lower_y = int(c_y - h / 2) + upper_y = int(c_y + h / 2) + roi = rgb_img[lower_y:upper_y, lower_x:upper_x] + new_armors.append(tracked_armor(bbox, roi, self.frame_tick, -1)) + + if len(self.active_armors) > 0: + # Try to associate with current armors + cost_matrix = np.zeros( + (len(new_armors), len(self.active_armors)), dtype=float) + for i in range(len(new_armors)): + for j in range(len(self.active_armors)): + cost_ij = new_armors[i].compute_cost(self.active_armors[j]) + cost_matrix[i, j] = cost_ij + row_ind, col_ind = scipy.optimize.linear_sum_assignment( + cost_matrix) + + for i, j in zip(row_ind, col_ind): + if cost_matrix[i, j] < self.SE_THRESHOLD: + assert new_armors[i] is not None + self.active_armors[j].update( + new_armors[i], self.frame_tick) + new_armors[i] = None + + new_armors = [i for i in new_armors if i is not None] + + for n in new_armors: + n.armor_id = self.id_gen.get_id() + + # Maintain current buffer. If an armor has not been observed by 10 + # frames, it is dropped + self.active_armors = [i for i in self.active_armors + if self.frame_tick - i.observed_frame_tick[-1] < FRAME_BUFFER_SIZE] + + # Unassociated armors are registered as new armors + for a in new_armors: + self.active_armors.append(a) + + # Create a list of bbox and unique IDs to return + ret_bbox_list = [] + ret_id_list = [] + for a in self.active_armors: + # If an armor is observed, directly use the bbox + ret_bbox_list.append(a.predict_bbox(self.frame_tick)) + ret_id_list.append(a.armor_id) + + self.frame_tick += 1 + + return ret_bbox_list, ret_id_list + From c95334e24de79c674952cb9ae554cea8be002213 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 22 Apr 2023 23:09:28 -0500 Subject: [PATCH 51/74] fix mdvs import error --- Camera/mdvs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Camera/mdvs.py b/Camera/mdvs.py index 896deb6..18b67a3 100644 --- a/Camera/mdvs.py +++ b/Camera/mdvs.py @@ -1,10 +1,10 @@ """Wrap the MDVS SDK to build a camera driver.""" import cv2 import numpy as np -import mvsdk import time import Utils +from Camera import mvsdk from Camera.camera_base import CameraBase From ab93861edf2ad1a005f9664c7fa3f2e5e55c882b Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 22 Apr 2023 23:10:15 -0500 Subject: [PATCH 52/74] update MDVS camera FOV --- Camera/mdvs.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Camera/mdvs.py b/Camera/mdvs.py index 18b67a3..4d56dad 100644 --- a/Camera/mdvs.py +++ b/Camera/mdvs.py @@ -19,9 +19,9 @@ class mdvs_camera(CameraBase): """ # Computed using the tool at https://mindvision.com.cn/jtxx/list_108.aspx?lcid=21&lcids=1656 - # Config: 4mm lens, MV-SUA133GC - YAW_FOV_HALF = Utils.deg_to_rad(65.238) / 2 - PITCH_FOV_HALF = Utils.deg_to_rad(54.225) / 2 + # Config: 6mm lens, MV-SUA133GC + YAW_FOV_HALF = Utils.deg_to_rad(46.245) / 2 + PITCH_FOV_HALF = Utils.deg_to_rad(37.761) / 2 def __init__(self, cfg): """Initialize the MDVS camera. From f845b28bab720db4f424d8a5179b0c3f8c288f85 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 22 Apr 2023 23:14:57 -0500 Subject: [PATCH 53/74] added an option for upside-down mounting; change img res to downscaling in MDVS --- Camera/mdvs.py | 2 ++ config.py | 7 +++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Camera/mdvs.py b/Camera/mdvs.py index 4d56dad..2fa4a64 100644 --- a/Camera/mdvs.py +++ b/Camera/mdvs.py @@ -126,6 +126,8 @@ def get_frame(self): frame = cv2.resize( frame, (self.width, self.height), interpolation=cv2.INTER_LINEAR) + if self.cfg.ROTATE_180: + frame = cv2.rotate(frame, cv2.ROTATE_180) return frame except mvsdk.CameraException as e: if e.error_code != mvsdk.CAMERA_STATUS_TIME_OUT: diff --git a/config.py b/config.py index 5683c92..aa4fbe9 100644 --- a/config.py +++ b/config.py @@ -8,15 +8,16 @@ # Define camera # from Camera.d455 import D455_camera +# from Camera.mdvs import mdvs_camera from Camera.read_from_file import fake_camera AUTOAIM_CAMERA = fake_camera # This param needs to be tuned per arena / camera setup -EXPOSURE_TIME = 30 +EXPOSURE_TIME = 5 # Compute some constants and define camera to use -IMG_HEIGHT = 360 +IMG_HEIGHT = 512 IMG_WIDTH = 640 IMG_CENTER_X = IMG_WIDTH // 2 @@ -24,3 +25,5 @@ DEBUG_DISPLAY = True DEFAULT_ENEMY_TEAM = 'red' + +ROTATE_180 = True # Camera is mounted upside down From 99f103a4946807dc7e8db9a8c02f8841d2328ab3 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 22 Apr 2023 23:16:40 -0500 Subject: [PATCH 54/74] fix viz error when there is no bbox --- vision.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/vision.py b/vision.py index 5f52ec8..77c917f 100644 --- a/vision.py +++ b/vision.py @@ -71,17 +71,18 @@ def main(): if config.DEBUG_DISPLAY: viz_frame = frame.copy() - for i in range(len(ret_dict['final_bbox_list'])): - bbox = ret_dict['final_bbox_list'][i] - unique_id = ret_dict['final_id_list'][i] - lower_x = int(bbox[0] - bbox[2] / 2) - lower_y = int(bbox[1] - bbox[3] / 2) - upper_x = int(bbox[0] + bbox[2] / 2) - upper_y = int(bbox[1] + bbox[3] / 2) - viz_frame = cv2.rectangle( - viz_frame, (lower_x, lower_y), (upper_x, upper_y), (0, 255, 0), 2) - viz_frame = cv2.putText(viz_frame, str(unique_id), (lower_x, lower_y), - cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2) + if ret_dict: + for i in range(len(ret_dict['final_bbox_list'])): + bbox = ret_dict['final_bbox_list'][i] + unique_id = ret_dict['final_id_list'][i] + lower_x = int(bbox[0] - bbox[2] / 2) + lower_y = int(bbox[1] - bbox[3] / 2) + upper_x = int(bbox[0] + bbox[2] / 2) + upper_y = int(bbox[1] + bbox[3] / 2) + viz_frame = cv2.rectangle( + viz_frame, (lower_x, lower_y), (upper_x, upper_y), (0, 255, 0), 2) + viz_frame = cv2.putText(viz_frame, str(unique_id), (lower_x, lower_y), + cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2) cv2.imshow('filtered_detected', viz_frame) if cv2.waitKey(1) & 0xFF == ord('q'): exit(0) From bdca97b52ce8480964946547960d982689b2f8cc Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 22 Apr 2023 23:17:38 -0500 Subject: [PATCH 55/74] add flag for debug PRINT --- config.py | 3 ++- vision.py | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/config.py b/config.py index aa4fbe9..66bc47b 100644 --- a/config.py +++ b/config.py @@ -23,7 +23,8 @@ IMG_CENTER_X = IMG_WIDTH // 2 IMG_CENTER_Y = IMG_HEIGHT // 2 -DEBUG_DISPLAY = True +DEBUG_DISPLAY = False +DEBUG_PRINT = True DEFAULT_ENEMY_TEAM = 'red' ROTATE_180 = True # Camera is mounted upside down diff --git a/vision.py b/vision.py index 77c917f..222b7e5 100644 --- a/vision.py +++ b/vision.py @@ -102,9 +102,11 @@ def main(): cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2) communicator.process_one_packet(config.SEARCH_TARGET, 0, 0) - if config.DEBUG_DISPLAY: + if config.DEBUG_PRINT: print('----------------\n', pred) print('fps:', 1. / elapsed) + + if config.DEBUG_DISPLAY: cv2.imshow('target', show_frame) if cv2.waitKey(1) & 0xFF == ord('q'): exit(0) From c126e378596483e9c1357707bb78cd2b71332b34 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Thu, 27 Apr 2023 14:26:01 -0500 Subject: [PATCH 56/74] communicator now supports additional parameters --- Aiming/Aim.py | 2 +- Communication/communicator.py | 25 +++++++++++++++++-------- config.py | 2 +- 3 files changed, 19 insertions(+), 10 deletions(-) diff --git a/Aiming/Aim.py b/Aiming/Aim.py index 69c5cba..314dca5 100644 --- a/Aiming/Aim.py +++ b/Aiming/Aim.py @@ -130,7 +130,7 @@ def get_rotation_angle(self, bbox_center_x, bbox_center_y): (self.CFG.AUTOAIM_CAMERA.YAW_FOV_HALF / self.CFG.IMG_CENTER_X) pitch_diff = (bbox_center_y - self.CFG.IMG_CENTER_Y) * \ (self.CFG.AUTOAIM_CAMERA.PITCH_FOV_HALF / self.CFG.IMG_CENTER_Y) - + yaw_diff = -yaw_diff # counter-clockwise is positive pitch_diff = pitch_diff # down is positive diff --git a/Communication/communicator.py b/Communication/communicator.py index c93b070..3d2dba6 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -7,7 +7,8 @@ from copy import deepcopy # STM32 to Jetson packet size -STJ_PACKET_SIZE = 6 +STJ_PACKET_SIZE = 14 +INT_FP_SCALE = 1e+6 class UARTCommunicator: @@ -40,7 +41,9 @@ def __init__( self.stm32_state_dict = { 'my_color': 'red' if self.cfg.DEFAULT_ENEMY_TEAM == 'blue' else 'blue', - 'enemy_color': self.cfg.DEFAULT_ENEMY_TEAM.lower()} + 'enemy_color': self.cfg.DEFAULT_ENEMY_TEAM.lower(), + 'cur_yaw': 0, + 'cur_pitch': 0} self.parsed_packet_cnt = 0 self.seq_num = 0 @@ -167,9 +170,9 @@ def packet_search(self): self.state_dict_lock.acquire() self.stm32_state_dict = ret_dict self.state_dict_lock.release() - # Remove parsed bytes from the circular buffer - self.circular_buffer = self.circular_buffer[start_idx + STJ_PACKET_SIZE:] - start_idx = 0 + # Remove parsed bytes from the circular buffer + self.circular_buffer = self.circular_buffer[start_idx + STJ_PACKET_SIZE:] + start_idx = 0 else: start_idx += 1 @@ -203,6 +206,9 @@ def try_parse_one(self, possible_packet): # 0 for RED; 1 for BLUE my_color_int = int(possible_packet[2]) + cur_yaw = int.from_bytes(possible_packet[3:7], "little") / INT_FP_SCALE + cur_pitch = int.from_bytes(possible_packet[7:11], "little") / INT_FP_SCALE + if my_color_int == 0: my_color = 'red' enemy_color = 'blue' @@ -213,6 +219,8 @@ def try_parse_one(self, possible_packet): return { 'my_color': my_color, 'enemy_color': enemy_color, + 'cur_yaw': cur_yaw, + 'cur_pitch': cur_pitch, } def create_packet(self, header, yaw_offset, pitch_offset): @@ -248,8 +256,8 @@ def create_packet(self, header, yaw_offset, pitch_offset): self.seq_num = self.seq_num % (2 ** 32) packet += (self.seq_num & 0xFFFFFFFF).to_bytes(4, self.endianness) - discrete_yaw_offset = int(yaw_offset * 1e+6) - discrete_pitch_offset = int(pitch_offset * 1e+6) + discrete_yaw_offset = int(yaw_offset * INT_FP_SCALE) + discrete_pitch_offset = int(pitch_offset * INT_FP_SCALE) # TODO: add more sanity check here? packet += (discrete_yaw_offset & 0xFFFFFFFF).to_bytes(4, self.endianness) @@ -307,8 +315,9 @@ def get_current_stm32_state(self): uart.packet_search() time.sleep(0.001) + print(uart.get_current_stm32_state()) print("Packet receiving test complete.") else: - while True: + while True: time.sleep(0.005) uart.process_one_packet(config.SEARCH_TARGET, 0.01, 0.0) diff --git a/config.py b/config.py index 66bc47b..0fe4ed5 100644 --- a/config.py +++ b/config.py @@ -1,6 +1,7 @@ """Config file that is shared across the whole project.""" # communication utils +from Camera.read_from_file import fake_camera from Camera.simple_cv import simple_cv_camera SEARCH_TARGET = b'ST' MOVE_YOKE = b'MY' @@ -9,7 +10,6 @@ # Define camera # from Camera.d455 import D455_camera # from Camera.mdvs import mdvs_camera -from Camera.read_from_file import fake_camera AUTOAIM_CAMERA = fake_camera From 95e6474d35a123e6d91411f6fc293a7457fdbc91 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Thu, 27 Apr 2023 15:32:30 -0500 Subject: [PATCH 57/74] fix signed bug in floating point num decoding --- Communication/communicator.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/Communication/communicator.py b/Communication/communicator.py index 3d2dba6..69f2228 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -93,7 +93,6 @@ def try_read_one(self): # pop first element self.circular_buffer = self.circular_buffer[1:] self.circular_buffer.append(c) - return True else: return False @@ -206,8 +205,8 @@ def try_parse_one(self, possible_packet): # 0 for RED; 1 for BLUE my_color_int = int(possible_packet[2]) - cur_yaw = int.from_bytes(possible_packet[3:7], "little") / INT_FP_SCALE - cur_pitch = int.from_bytes(possible_packet[7:11], "little") / INT_FP_SCALE + cur_yaw = int.from_bytes(possible_packet[3:7], "little", signed=True) / INT_FP_SCALE + cur_pitch = int.from_bytes(possible_packet[7:11], "little", signed=True) / INT_FP_SCALE if my_color_int == 0: my_color = 'red' @@ -285,7 +284,7 @@ def get_current_stm32_state(self): if __name__ == '__main__': - TESTING_CRC = True + TESTING_CRC = False # Testing example if run as main import sys import os @@ -318,6 +317,14 @@ def get_current_stm32_state(self): print(uart.get_current_stm32_state()) print("Packet receiving test complete.") else: + cur_packet_cnt = uart.parsed_packet_cnt while True: - time.sleep(0.005) - uart.process_one_packet(config.SEARCH_TARGET, 0.01, 0.0) + uart.try_read_one() + uart.packet_search() + if uart.parsed_packet_cnt > cur_packet_cnt: + cur_packet_cnt = uart.parsed_packet_cnt + print(uart.get_current_stm32_state()) + time.sleep(0.001) + # while True: + # time.sleep(0.005) + # uart.process_one_packet(config.SEARCH_TARGET, 0.01, 0.0) From 82f1d866f5fd42b9e936807c29844d2cf8c8603c Mon Sep 17 00:00:00 2001 From: cpeng14 <92764093+Franciszabi@users.noreply.github.com> Date: Fri, 12 May 2023 01:23:38 -0500 Subject: [PATCH 58/74] Add files via upload Implemented EKF --- Aiming/Tracking/EKF_tracker.py | 231 +++++++++++++++++++++++++++++++++ 1 file changed, 231 insertions(+) create mode 100644 Aiming/Tracking/EKF_tracker.py diff --git a/Aiming/Tracking/EKF_tracker.py b/Aiming/Tracking/EKF_tracker.py new file mode 100644 index 0000000..02166d7 --- /dev/null +++ b/Aiming/Tracking/EKF_tracker.py @@ -0,0 +1,231 @@ +"""Implemented EKF tracker with filterpy library""" +import scipy.optimize +import numpy as np +from filterpy.kalman import ExtendedKalmanFilter +from scipy.spatial.distance import mahalanobis as scipy_mahalanobis +from .consistent_id_gen import ConsistentIdGenerator + +# TODO: move this to config +FRAME_BUFFER_SIZE = 10 + +class KalmanTracker(object): + """A class that represents a Kalman Fitler tracker. + + It is the matrix for KF tracking that will be stored in each armor. + """ + def __init__(self): + """Initialize from armor. + """ + dt = 0.05 + proccess_error = 0.05 + self.kalman = ExtendedKalmanFilter(dim_x=4,dim_z=2) + #F:transition matrix + self.kalman.F = np.array([[1, 0, dt, 0], [0, dt, 0, 1], [0, 0, 1, 0], [0, 0, 0, 1]], np.float32) + #x:initial state初始状态 + self.kalman.x = np.array([0, 0, 0, 0]) + #R:measurement noise matrix + self.kalman.R *= 10 + #Q:process noise matrix + self.kalman.Q = np.eye(4) * 0.001 + #P:初始协方差矩阵, 初始状态很不确定时把这个拧大一点 + self.kalman.P *= np.array([320, 180, 0, 0], np.float32) # initialize the KF position to center + #measurement and prediction + self.measurement = np.array((2, 1), np.float32) + self.prediction = np.zeros((2, 1), np.float32) + + def H_of(self, x): + # H矩阵的jacobian + # 观测模型是线性的,所以H矩阵实际上是恒定的,与x无关 + return np.array([[1, 0, 0, 0], + [0, 1, 0, 0]]) + + def hx(self,x): + # 将状态向量转成测量向量 + # 直接返回x和y的坐标作为测量值 + return np.array([x[0], x[1]]) + + def update(self, x, y): + self.measurement = np.array([x, y], np.float32) + self.kalman.update(self.measurement, self.H_of, self.hx) + self.kalman.predict() + self.prediction = self.kalman.x + + def get_prediction(self): + """Predict the x and y values + + Returns: + tuple: predicted x and y values + """ + + return int(self.prediction[0]), int(self.prediction[1]) + + + +class tracked_armor(object): + """A class that represents a tracked armor. + + It stores the history of bounding boxes and ROIs, and can predict the + bounding box of the next frame. + """ + + def __init__(self, bbox, roi, frame_tick, armor_id): + """Initialize from prediction. + + Args: + bbox (tuple): (center_x, center_y, w, h) + roi (np.ndarray): ROI of the armor + frame_tick (int): frame tick + armor_id (int): unique ID + """ + self.bbox_buffer = [bbox] + self.roi_buffer = [roi] + self.observed_frame_tick = [frame_tick] + self.armor_id = armor_id # unique ID + self.KF_matrix = KalmanTracker() + + + + def compute_cost(self, other_armor): + """Compute the cost of matching this armor with another armor. + + Args: + other_armor (tracked_armor): another armor + + Returns: + float: cost + """ + assert isinstance(other_armor, tracked_armor) + # TODO: use more sophisticated metrics (e.g., RGB) as cost function + c_x, c_y, w, h = self.bbox_buffer[-1] + o_c_x, o_c_y, o_w, o_h = other_armor.bbox_buffer[-1] + return np.square(c_x - o_c_x) + np.square(c_y - o_c_y) + + def update(self, other_armor, frame_tick): + """Update the state of this armor with matched armor. + + Args: + other_armor (tracked_armor): another armor + frame_tick (int): frame tick + """ + # Only call if these two armors are matched + self.bbox_buffer.append(other_armor.bbox_buffer[-1]) + self.roi_buffer.append(other_armor.roi_buffer[-1]) + self.observed_frame_tick.append(frame_tick) + + # Maintain each armor's buffer so that anything older than + # FRAME_BUFFER_SIZE is dropped + self.bbox_buffer = self.bbox_buffer[-FRAME_BUFFER_SIZE:] + self.roi_buffer = self.roi_buffer[-FRAME_BUFFER_SIZE:] + + def predict_bbox(self, cur_frame_tick): + """Predict the bounding box of the tracked armor at cur frame tick. + + Args: + cur_frame_tick (int): current frame tick + + TODO + - Use Kalman filter to do prediction + - Support future frame idx for predictions + + Returns: + tuple: (center_x, center_y, w, h) + """ + if cur_frame_tick == self.observed_frame_tick[-1] or len( + self.bbox_buffer) == 1: + # print(self.bbox_buffer[-1]) + c_x, c_y, w, h = self.bbox_buffer[-1] + self.KF_matrix.update(c_x, c_y) + return self.bbox_buffer[-1] + else: + # KF tracking + c_x, c_y, w, h = self.bbox_buffer[-1] + predicted_x, predicted_y = self.KF_matrix.get_prediction() + self.KF_matrix.update(predicted_x, predicted_y) + return (int(predicted_x), int(predicted_y), w, h) + + +class KF_tracker(object): + """ + Basic tracker that can handle only one target. + + It memorizes the state of last two predictions and do linear extrapolation + """ + + SE_THRESHOLD = 3200 # (40, 40) pixels away + + def __init__(self, config): + """Initialize the simple lineartracker. + + Args: + config (python object): shared config + """ + self.CFG = config + self.active_armors = [] + self.id_gen = ConsistentIdGenerator() + self.frame_tick = 0 # TODO: use timestamp may be a better idea + + + def process_one(self, pred_list, enemy_team, rgb_img): + """Process one set of detections and rgb images. + + Args: + pred_list (list): list of (name, conf, bbox) tuples + enemy_team (str): enemy team name (blue or red) + rgb_img (np.ndarray): RGB image + + Returns: + (list, list): list of tracked_armors (detections+predictions) and their IDs + """ + new_armors = [] + for name, conf, bbox in pred_list: + c_x, c_y, w, h = bbox + lower_x = int(c_x - w / 2) + upper_x = int(c_x + w / 2) + lower_y = int(c_y - h / 2) + upper_y = int(c_y + h / 2) + roi = rgb_img[lower_y:upper_y, lower_x:upper_x] + new_armors.append(tracked_armor(bbox, roi, self.frame_tick, -1)) + + if len(self.active_armors) > 0: + # Try to associate with current armors + cost_matrix = np.zeros( + (len(new_armors), len(self.active_armors)), dtype=float) + for i in range(len(new_armors)): + for j in range(len(self.active_armors)): + cost_ij = new_armors[i].compute_cost(self.active_armors[j]) + cost_matrix[i, j] = cost_ij + row_ind, col_ind = scipy.optimize.linear_sum_assignment( + cost_matrix) + + for i, j in zip(row_ind, col_ind): + if cost_matrix[i, j] < self.SE_THRESHOLD: + assert new_armors[i] is not None + self.active_armors[j].update( + new_armors[i], self.frame_tick) + new_armors[i] = None + + new_armors = [i for i in new_armors if i is not None] + + for n in new_armors: + n.armor_id = self.id_gen.get_id() + + # Maintain current buffer. If an armor has not been observed by 10 + # frames, it is dropped + self.active_armors = [i for i in self.active_armors + if self.frame_tick - i.observed_frame_tick[-1] < FRAME_BUFFER_SIZE] + + # Unassociated armors are registered as new armors + for a in new_armors: + self.active_armors.append(a) + + # Create a list of bbox and unique IDs to return + ret_bbox_list = [] + ret_id_list = [] + for a in self.active_armors: + # If an armor is observed, directly use the bbox + ret_bbox_list.append(a.predict_bbox(self.frame_tick)) + ret_id_list.append(a.armor_id) + + self.frame_tick += 1 + + return ret_bbox_list, ret_id_list \ No newline at end of file From 822075e3a8b35d873ab1a85274a02da8e182c729 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 28 May 2023 21:27:30 -0500 Subject: [PATCH 59/74] use absolute angles instead --- Aiming/Aim.py | 15 +++++++++++++-- Communication/communicator.py | 10 +++++++++- config.py | 4 ++-- vision.py | 9 ++++++--- 4 files changed, 30 insertions(+), 8 deletions(-) diff --git a/Aiming/Aim.py b/Aiming/Aim.py index 314dca5..450621c 100644 --- a/Aiming/Aim.py +++ b/Aiming/Aim.py @@ -1,6 +1,6 @@ """Hosts the Aim class, which is the main class for auto-aiming.""" from .Tracking import basic_tracker - +import numpy as np class Aim: """ @@ -23,13 +23,14 @@ def __init__(self, config): self.CFG = config self.tracker = basic_tracker(self.CFG) - def process_one(self, pred_list, enemy_team, rgb_img): + def process_one(self, pred_list, enemy_team, rgb_img, stm32_state_dict): """Process one frame of predictions. Args: pred_list (list): a list of predictions enemy_team (str): 'blue' or 'red' rgb_img (np.ndarray): RGB image + stm32_state_dict (dict): a dictionary of stm32 state Returns: dict: a dictionary of results @@ -54,10 +55,20 @@ def process_one(self, pred_list, enemy_team, rgb_img): calibrated_yaw_diff, calibrated_pitch_diff = self.posterior_calibration( yaw_diff, pitch_diff, closet_dist) + + # Clip yaw/pitch differences to [-2pi, 2pi] + calibrated_pitch_diff = np.clip(calibrated_pitch_diff, -2*np.pi, 2*np.pi) + calibrated_yaw_diff = np.clip(calibrated_yaw_diff, -2*np.pi, 2*np.pi) + + # Compute absolute yaw / pitch + abs_pitch = calibrated_pitch_diff + stm32_state_dict['cur_pitch'] + abs_yaw = calibrated_yaw_diff + stm32_state_dict['cur_yaw'] return { 'yaw_diff': calibrated_yaw_diff, 'pitch_diff': calibrated_pitch_diff, + 'abs_yaw': abs_yaw, + 'abs_pitch': abs_pitch, 'center_x': center_x, 'center_y': center_y, 'final_bbox_list': final_bbox_list, diff --git a/Communication/communicator.py b/Communication/communicator.py index 69f2228..f85b8f9 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -135,6 +135,7 @@ def guess_uart_device_(): for dev_name in dev_list: if dev_name.startswith(UART_PREFIX_LIST): try: + print("Trying to open serial port: {}".format(dev_name)) dev_path = os.path.join("/dev", dev_name) serial_port = serial.Serial( port=dev_path, @@ -318,13 +319,20 @@ def get_current_stm32_state(self): print("Packet receiving test complete.") else: cur_packet_cnt = uart.parsed_packet_cnt + cur_time = time.time() + prv_parsed_packet_cnt = 0 while True: uart.try_read_one() uart.packet_search() if uart.parsed_packet_cnt > cur_packet_cnt: cur_packet_cnt = uart.parsed_packet_cnt - print(uart.get_current_stm32_state()) + # print(uart.get_current_stm32_state()) time.sleep(0.001) + if time.time() > cur_time + 1: + print("Parsed {} packets in 1 second.".format( + cur_packet_cnt - prv_parsed_packet_cnt)) + prv_parsed_packet_cnt = cur_packet_cnt + cur_time = time.time() # while True: # time.sleep(0.005) # uart.process_one_packet(config.SEARCH_TARGET, 0.01, 0.0) diff --git a/config.py b/config.py index 0fe4ed5..ce362bc 100644 --- a/config.py +++ b/config.py @@ -23,8 +23,8 @@ IMG_CENTER_X = IMG_WIDTH // 2 IMG_CENTER_Y = IMG_HEIGHT // 2 -DEBUG_DISPLAY = False -DEBUG_PRINT = True +DEBUG_DISPLAY = True +DEBUG_PRINT = False DEFAULT_ENEMY_TEAM = 'red' ROTATE_180 = True # Camera is mounted upside down diff --git a/vision.py b/vision.py index 222b7e5..1e4e42f 100644 --- a/vision.py +++ b/vision.py @@ -21,6 +21,7 @@ def main(): aimer = Aim(config) communicator = UARTCommunicator(config) + communicator.start_listening() autoaim_camera = config.AUTOAIM_CAMERA(config) @@ -31,11 +32,11 @@ def main(): while True: start = time.time() + stm32_state_dict = communicator.get_current_stm32_state() frame = autoaim_camera.get_frame() # TODO: add a global reset function if enemy functions change # (e.g., clear the buffer in the armor tracker) - stm32_state_dict = communicator.get_current_stm32_state() enemy_team = stm32_state_dict['enemy_color'] model.change_color(enemy_team) @@ -67,7 +68,7 @@ def main(): # Tracking and filtering # Pour all predictions into the aimer, which returns relative angles - ret_dict = aimer.process_one(pred, enemy_team, frame) + ret_dict = aimer.process_one(pred, enemy_team, frame, stm32_state_dict) if config.DEBUG_DISPLAY: viz_frame = frame.copy() @@ -91,8 +92,10 @@ def main(): show_frame = frame.copy() if ret_dict: + print("Current yaw angle: ", stm32_state_dict['cur_yaw']) + print("Target abs Yaw angle: ", ret_dict['abs_yaw']) communicator.process_one_packet( - config.MOVE_YOKE, ret_dict['yaw_diff'], ret_dict['pitch_diff']) + config.MOVE_YOKE, ret_dict['abs_yaw'], ret_dict['abs_pitch']) show_frame = cv2.circle(show_frame, (int(ret_dict['center_x']), int(ret_dict['center_y'])), From c407c39ab0173cd1d574f1f63033376d92da3a98 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Mon, 29 May 2023 22:22:25 -0500 Subject: [PATCH 60/74] fix linting --- Aiming/Aim.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Aiming/Aim.py b/Aiming/Aim.py index 450621c..787490c 100644 --- a/Aiming/Aim.py +++ b/Aiming/Aim.py @@ -2,6 +2,7 @@ from .Tracking import basic_tracker import numpy as np + class Aim: """ The main class for auto-aiming. @@ -55,10 +56,10 @@ def process_one(self, pred_list, enemy_team, rgb_img, stm32_state_dict): calibrated_yaw_diff, calibrated_pitch_diff = self.posterior_calibration( yaw_diff, pitch_diff, closet_dist) - + # Clip yaw/pitch differences to [-2pi, 2pi] - calibrated_pitch_diff = np.clip(calibrated_pitch_diff, -2*np.pi, 2*np.pi) - calibrated_yaw_diff = np.clip(calibrated_yaw_diff, -2*np.pi, 2*np.pi) + calibrated_pitch_diff = np.clip(calibrated_pitch_diff, -2 * np.pi, 2 * np.pi) + calibrated_yaw_diff = np.clip(calibrated_yaw_diff, -2 * np.pi, 2 * np.pi) # Compute absolute yaw / pitch abs_pitch = calibrated_pitch_diff + stm32_state_dict['cur_pitch'] From 161804fb63d18e5d76f5b7572cb5baf08240cda8 Mon Sep 17 00:00:00 2001 From: cpeng14 <92764093+Franciszabi@users.noreply.github.com> Date: Tue, 30 May 2023 09:06:00 -0500 Subject: [PATCH 61/74] Delete KF track test.ipynb --- Aiming/Tracking/KF track test.ipynb | 145 ---------------------------- 1 file changed, 145 deletions(-) delete mode 100644 Aiming/Tracking/KF track test.ipynb diff --git a/Aiming/Tracking/KF track test.ipynb b/Aiming/Tracking/KF track test.ipynb deleted file mode 100644 index 60a9f54..0000000 --- a/Aiming/Tracking/KF track test.ipynb +++ /dev/null @@ -1,145 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": 8, - "id": "2b268ada", - "metadata": {}, - "outputs": [], - "source": [ - "import cv2\n", - "import numpy as np" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "id": "8ff1cbb8", - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "# 创建卡尔曼滤波器\n", - "kf = cv2.KalmanFilter(4, 2)\n", - "\n", - "# 初始化状态估计值和协方差矩阵\n", - "kf.statePost = np.array([0, 0, 0, 0], dtype=np.float32)\n", - "kf.errorCovPost = np.eye(4, dtype=np.float32)\n", - "\n", - "# 设置卡尔曼滤波器参数\n", - "dt = 1.0 # 采样间隔\n", - "kf.transitionMatrix = np.array([[1, 0, dt, 0],\n", - " [0, 1, 0, dt],\n", - " [0, 0, 1, 0],\n", - " [0, 0, 0, 1]], dtype=np.float32)\n", - "kf.processNoiseCov = np.array([[1, 0, 0, 0],\n", - " [0, 1, 0, 0],\n", - " [0, 0, 1, 0],\n", - " [0, 0, 0, 1]], dtype=np.float32) * 0.0001\n", - "kf.measurementMatrix = np.array([[1, 0, 0, 0],\n", - " [0, 1, 0, 0]], dtype=np.float32)\n", - "kf.measurementNoiseCov = np.array([[1, 0],\n", - " [0, 1]], dtype=np.float32) * 1" - ] - }, - { - "cell_type": "code", - "execution_count": 11, - "id": "9c9cbede", - "metadata": {}, - "outputs": [], - "source": [ - "# 打开摄像头\n", - "cap = cv2.VideoCapture(0)\n", - "\n", - "# 读取一帧图像\n", - "ret, frame = cap.read()\n", - "\n", - "# 将图像转换为灰度图\n", - "gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n", - "\n", - "# 检测目标,提取目标的中心点坐标\n", - "x, y, w, h = cv2.selectROI(\"SelectROI\", frame, fromCenter=False, showCrosshair=True)\n", - "\n", - "while True:\n", - " # 读取一帧图像\n", - " ret, frame = cap.read()\n", - " if not ret:\n", - " break\n", - "\n", - " # 将图像转换为灰度图\n", - " gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\n", - "\n", - " # 检测目标,提取目标的中心点坐标\n", - " \n", - " cx, cy = x + w/2, y + h/2\n", - "\n", - " # 对中心点坐标进行卡尔曼滤波\n", - " prediction = kf.predict()\n", - " measurement = np.array([cx, cy], dtype=np.float32)\n", - " kf.correct(measurement)\n", - "\n", - " # 绘制目标位置(绿)和卡尔曼滤波器预测的位置(红)\n", - " cv2.circle(frame, (int(cx), int(cy)), 5, (0, 255, 0), -1)\n", - " cv2.circle(frame, (int(prediction[0]), int(prediction[1])), 5, (0, 0, 255), -1)\n", - "\n", - " # 显示结果\n", - " cv2.imshow(\"Kalman Filter\", frame)\n", - "\n", - " # 按下ESC键退出\n", - " if cv2.waitKey(1) == 27:\n", - " break\n", - "\n", - "# 释放摄像头并关闭窗口\n", - "cap.release()\n", - "cv2.destroyAllWindows()\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "6d9d6bc8", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "562b9a62", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "16e8b8a4", - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3 (ipykernel)", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.15" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} From fc9aea8c7fd4047ece258f07628cf182e820c16c Mon Sep 17 00:00:00 2001 From: cpeng14 <92764093+Franciszabi@users.noreply.github.com> Date: Tue, 30 May 2023 09:12:41 -0500 Subject: [PATCH 62/74] Delete EKF_tracker.py --- Aiming/Tracking/EKF_tracker.py | 231 --------------------------------- 1 file changed, 231 deletions(-) delete mode 100644 Aiming/Tracking/EKF_tracker.py diff --git a/Aiming/Tracking/EKF_tracker.py b/Aiming/Tracking/EKF_tracker.py deleted file mode 100644 index 02166d7..0000000 --- a/Aiming/Tracking/EKF_tracker.py +++ /dev/null @@ -1,231 +0,0 @@ -"""Implemented EKF tracker with filterpy library""" -import scipy.optimize -import numpy as np -from filterpy.kalman import ExtendedKalmanFilter -from scipy.spatial.distance import mahalanobis as scipy_mahalanobis -from .consistent_id_gen import ConsistentIdGenerator - -# TODO: move this to config -FRAME_BUFFER_SIZE = 10 - -class KalmanTracker(object): - """A class that represents a Kalman Fitler tracker. - - It is the matrix for KF tracking that will be stored in each armor. - """ - def __init__(self): - """Initialize from armor. - """ - dt = 0.05 - proccess_error = 0.05 - self.kalman = ExtendedKalmanFilter(dim_x=4,dim_z=2) - #F:transition matrix - self.kalman.F = np.array([[1, 0, dt, 0], [0, dt, 0, 1], [0, 0, 1, 0], [0, 0, 0, 1]], np.float32) - #x:initial state初始状态 - self.kalman.x = np.array([0, 0, 0, 0]) - #R:measurement noise matrix - self.kalman.R *= 10 - #Q:process noise matrix - self.kalman.Q = np.eye(4) * 0.001 - #P:初始协方差矩阵, 初始状态很不确定时把这个拧大一点 - self.kalman.P *= np.array([320, 180, 0, 0], np.float32) # initialize the KF position to center - #measurement and prediction - self.measurement = np.array((2, 1), np.float32) - self.prediction = np.zeros((2, 1), np.float32) - - def H_of(self, x): - # H矩阵的jacobian - # 观测模型是线性的,所以H矩阵实际上是恒定的,与x无关 - return np.array([[1, 0, 0, 0], - [0, 1, 0, 0]]) - - def hx(self,x): - # 将状态向量转成测量向量 - # 直接返回x和y的坐标作为测量值 - return np.array([x[0], x[1]]) - - def update(self, x, y): - self.measurement = np.array([x, y], np.float32) - self.kalman.update(self.measurement, self.H_of, self.hx) - self.kalman.predict() - self.prediction = self.kalman.x - - def get_prediction(self): - """Predict the x and y values - - Returns: - tuple: predicted x and y values - """ - - return int(self.prediction[0]), int(self.prediction[1]) - - - -class tracked_armor(object): - """A class that represents a tracked armor. - - It stores the history of bounding boxes and ROIs, and can predict the - bounding box of the next frame. - """ - - def __init__(self, bbox, roi, frame_tick, armor_id): - """Initialize from prediction. - - Args: - bbox (tuple): (center_x, center_y, w, h) - roi (np.ndarray): ROI of the armor - frame_tick (int): frame tick - armor_id (int): unique ID - """ - self.bbox_buffer = [bbox] - self.roi_buffer = [roi] - self.observed_frame_tick = [frame_tick] - self.armor_id = armor_id # unique ID - self.KF_matrix = KalmanTracker() - - - - def compute_cost(self, other_armor): - """Compute the cost of matching this armor with another armor. - - Args: - other_armor (tracked_armor): another armor - - Returns: - float: cost - """ - assert isinstance(other_armor, tracked_armor) - # TODO: use more sophisticated metrics (e.g., RGB) as cost function - c_x, c_y, w, h = self.bbox_buffer[-1] - o_c_x, o_c_y, o_w, o_h = other_armor.bbox_buffer[-1] - return np.square(c_x - o_c_x) + np.square(c_y - o_c_y) - - def update(self, other_armor, frame_tick): - """Update the state of this armor with matched armor. - - Args: - other_armor (tracked_armor): another armor - frame_tick (int): frame tick - """ - # Only call if these two armors are matched - self.bbox_buffer.append(other_armor.bbox_buffer[-1]) - self.roi_buffer.append(other_armor.roi_buffer[-1]) - self.observed_frame_tick.append(frame_tick) - - # Maintain each armor's buffer so that anything older than - # FRAME_BUFFER_SIZE is dropped - self.bbox_buffer = self.bbox_buffer[-FRAME_BUFFER_SIZE:] - self.roi_buffer = self.roi_buffer[-FRAME_BUFFER_SIZE:] - - def predict_bbox(self, cur_frame_tick): - """Predict the bounding box of the tracked armor at cur frame tick. - - Args: - cur_frame_tick (int): current frame tick - - TODO - - Use Kalman filter to do prediction - - Support future frame idx for predictions - - Returns: - tuple: (center_x, center_y, w, h) - """ - if cur_frame_tick == self.observed_frame_tick[-1] or len( - self.bbox_buffer) == 1: - # print(self.bbox_buffer[-1]) - c_x, c_y, w, h = self.bbox_buffer[-1] - self.KF_matrix.update(c_x, c_y) - return self.bbox_buffer[-1] - else: - # KF tracking - c_x, c_y, w, h = self.bbox_buffer[-1] - predicted_x, predicted_y = self.KF_matrix.get_prediction() - self.KF_matrix.update(predicted_x, predicted_y) - return (int(predicted_x), int(predicted_y), w, h) - - -class KF_tracker(object): - """ - Basic tracker that can handle only one target. - - It memorizes the state of last two predictions and do linear extrapolation - """ - - SE_THRESHOLD = 3200 # (40, 40) pixels away - - def __init__(self, config): - """Initialize the simple lineartracker. - - Args: - config (python object): shared config - """ - self.CFG = config - self.active_armors = [] - self.id_gen = ConsistentIdGenerator() - self.frame_tick = 0 # TODO: use timestamp may be a better idea - - - def process_one(self, pred_list, enemy_team, rgb_img): - """Process one set of detections and rgb images. - - Args: - pred_list (list): list of (name, conf, bbox) tuples - enemy_team (str): enemy team name (blue or red) - rgb_img (np.ndarray): RGB image - - Returns: - (list, list): list of tracked_armors (detections+predictions) and their IDs - """ - new_armors = [] - for name, conf, bbox in pred_list: - c_x, c_y, w, h = bbox - lower_x = int(c_x - w / 2) - upper_x = int(c_x + w / 2) - lower_y = int(c_y - h / 2) - upper_y = int(c_y + h / 2) - roi = rgb_img[lower_y:upper_y, lower_x:upper_x] - new_armors.append(tracked_armor(bbox, roi, self.frame_tick, -1)) - - if len(self.active_armors) > 0: - # Try to associate with current armors - cost_matrix = np.zeros( - (len(new_armors), len(self.active_armors)), dtype=float) - for i in range(len(new_armors)): - for j in range(len(self.active_armors)): - cost_ij = new_armors[i].compute_cost(self.active_armors[j]) - cost_matrix[i, j] = cost_ij - row_ind, col_ind = scipy.optimize.linear_sum_assignment( - cost_matrix) - - for i, j in zip(row_ind, col_ind): - if cost_matrix[i, j] < self.SE_THRESHOLD: - assert new_armors[i] is not None - self.active_armors[j].update( - new_armors[i], self.frame_tick) - new_armors[i] = None - - new_armors = [i for i in new_armors if i is not None] - - for n in new_armors: - n.armor_id = self.id_gen.get_id() - - # Maintain current buffer. If an armor has not been observed by 10 - # frames, it is dropped - self.active_armors = [i for i in self.active_armors - if self.frame_tick - i.observed_frame_tick[-1] < FRAME_BUFFER_SIZE] - - # Unassociated armors are registered as new armors - for a in new_armors: - self.active_armors.append(a) - - # Create a list of bbox and unique IDs to return - ret_bbox_list = [] - ret_id_list = [] - for a in self.active_armors: - # If an armor is observed, directly use the bbox - ret_bbox_list.append(a.predict_bbox(self.frame_tick)) - ret_id_list.append(a.armor_id) - - self.frame_tick += 1 - - return ret_bbox_list, ret_id_list \ No newline at end of file From fbe77ead30d18e58ed1496ebe5a533351d54b5f4 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Wed, 31 May 2023 21:12:53 -0500 Subject: [PATCH 63/74] fix linting; comments; and doc --- Aiming/Tracking/KF_tracker_modified.py | 34 +++++++++++++++----------- Aiming/Tracking/__init__.py | 3 ++- 2 files changed, 22 insertions(+), 15 deletions(-) diff --git a/Aiming/Tracking/KF_tracker_modified.py b/Aiming/Tracking/KF_tracker_modified.py index 1dea4c8..acfc229 100644 --- a/Aiming/Tracking/KF_tracker_modified.py +++ b/Aiming/Tracking/KF_tracker_modified.py @@ -16,24 +16,27 @@ class KalmanTracker(object): It is the matrix for KF tracking that will be stored in each armor. """ + def __init__(self): - """Initialize from armor. - """ + """Initialize from armor.""" self.kalman = cv2.KalmanFilter(4, 2) self.kalman.measurementMatrix = np.array([[1, 0, 0, 0], [0, 1, 0, 0]], np.float32) - self.kalman.transitionMatrix = np.array([[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 1, 0], [0, 0, 0, 1]], np.float32) + self.kalman.transitionMatrix = np.array( + [[1, 0, 1, 0], [0, 1, 0, 1], [0, 0, 1, 0], [0, 0, 0, 1]], np.float32) self.kalman.measurementNoiseCov = np.array([[1, 0], [0, 1]], np.float32) * 1 - self.kalman.processNoiseCov = np.array([[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], np.float32) * 1 + self.kalman.processNoiseCov = np.array( + [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 1, 0], [0, 0, 0, 1]], np.float32) * 1 self.kalman.controlMatrix = np.array([[0.5, 0], [0, 0.5], [1, 0], [0, 1]], np.float32) - initial_state = np.array([320, 180, 0, 0], np.float32) # initialize the KF position to center + # initialize the KF position to center + initial_state = np.array([320, 180, 0, 0], np.float32) self.kalman.statePre = initial_state self.measurement = np.array((2, 1), np.float32) self.prediction = np.zeros((2, 1), np.float32) - self.acceleration = np.array((2,1), np.float32) + self.acceleration = np.array((2, 1), np.float32) def update(self, x, y): """Update the value of KF_matrix. @@ -42,25 +45,27 @@ def update(self, x, y): x (int): detected x value y (int): detected y value """ - acceleration = np.array([[(x - self.measurement[0])],[(y - self.measurement[1])]], np.float32) + acceleration = np.array( + [[(x - self.measurement[0])], [(y - self.measurement[1])]], np.float32) self.measurement = np.array([[x], [y]], np.float32) - #self.kalman.transitionMatrix[2, 0] = 0.5 * acceleration[0, 0] - #self.kalman.transitionMatrix[3, 1] = 0.5 * acceleration[1, 0] + # self.kalman.transitionMatrix[2, 0] = 0.5 * acceleration[0, 0] + # self.kalman.transitionMatrix[3, 1] = 0.5 * acceleration[1, 0] self.kalman.correct(self.measurement) self.prediction = self.kalman.predict() def get_prediction(self): - """Predict the x and y values + """Predict the x and y values. Returns: tuple: predicted x and y values """ return int(self.prediction[0]), int(self.prediction[1]) -# class KalmanTracker(object): # 匀加速运动模型 +# class KalmanTracker(object): # assume constant acceleration # def __init__(self, dt): # self.kalman = cv2.KalmanFilter(6, 2) -# self.kalman.measurementMatrix = np.array([[1, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0]], np.float32) +# self.kalman.measurementMatrix = np.array([[1, 0, 0, 0, 0, 0], +# [0, 1, 0, 0, 0, 0]], np.float32) # self.kalman.transitionMatrix = np.array([ # [1, 0, dt, 0, 0.5*dt**2, 0], # [0, 1, 0, dt, 0, 0.5*dt**2], @@ -70,7 +75,8 @@ def get_prediction(self): # [0, 0, 0, 0, 0, 1]], np.float32) # self.kalman.processNoiseCov = np.eye(6, dtype=np.float32) * 0.03 -# initial_state = np.array([320, 180, 0, 0, 0, 0], np.float32) # initialize the KF position to center +# # initialize the KF position to center +# initial_state = np.array([320, 180, 0, 0, 0, 0], np.float32) # self.kalman.statePre = initial_state # self.measurement = np.array((2, 1), np.float32) @@ -84,6 +90,7 @@ def get_prediction(self): # def get_prediction(self): # return int(self.prediction[0]), int(self.prediction[1]) + class tracked_armor(object): """A class that represents a tracked armor. @@ -250,4 +257,3 @@ def process_one(self, pred_list, enemy_team, rgb_img): self.frame_tick += 1 return ret_bbox_list, ret_id_list - diff --git a/Aiming/Tracking/__init__.py b/Aiming/Tracking/__init__.py index 2d2f5c0..78fad7e 100644 --- a/Aiming/Tracking/__init__.py +++ b/Aiming/Tracking/__init__.py @@ -1 +1,2 @@ -from .basic_tracker import basic_tracker \ No newline at end of file +from .basic_tracker import basic_tracker +from .KF_tracker_modified import KF_tracker From 5f204d252f28ffc8b811f5c58f6b1a39a04cc32b Mon Sep 17 00:00:00 2001 From: cpeng14 <92764093+Franciszabi@users.noreply.github.com> Date: Sat, 3 Jun 2023 16:09:40 +0800 Subject: [PATCH 64/74] Implemented uniformly accelerated model for EKF --- Aiming/Tracking/EKF_tracker.py | 234 +++++++++++++++++++++++++++++++++ 1 file changed, 234 insertions(+) create mode 100644 Aiming/Tracking/EKF_tracker.py diff --git a/Aiming/Tracking/EKF_tracker.py b/Aiming/Tracking/EKF_tracker.py new file mode 100644 index 0000000..fb39655 --- /dev/null +++ b/Aiming/Tracking/EKF_tracker.py @@ -0,0 +1,234 @@ +"""Implemented EKF tracker with filterpy library""" +import scipy.optimize +import numpy as np +from filterpy.kalman import ExtendedKalmanFilter +from scipy.spatial.distance import mahalanobis as scipy_mahalanobis +from .consistent_id_gen import ConsistentIdGenerator + +# TODO: move this to config +FRAME_BUFFER_SIZE = 10 + +class KalmanTracker(object): + """A class that represents a Kalman Fitler tracker. + + It is the matrix for KF tracking that will be stored in each armor. + """ + def __init__(self): + """Initialize EKF from armor. + """ + dt = 1 + self.kalman = ExtendedKalmanFilter(dim_x=6,dim_z=2) + #F:transition matrix + self.kalman.F = np.array([[1, 0, dt, 0, 0.5*(dt**2), 0], + [0, 1, 0, dt,0, 0.5*(dt**2)], + [0, 0, 1, 0, dt, 0], + [0, 0, 0, 1, 0, dt], + [0, 0, 0, 0, 1, 0], + [0, 0, 0, 0, 0, 1]], np.float32) + #x:initial state初始状态 initialize the KF position to center + self.kalman.x = np.array([320, 180, 0, 0, 0 ,0]) + #R:measurement noise matrix + #self.kalman.R *= 10 + self.kalman.R = np.array([[1, 0], [0, 1]], np.float32) * 1 + #Q:process noise matrix + self.kalman.Q = np.eye(6) + #P:初始协方差矩阵, 初始状态很不确定时把这个拧大一点 + self.kalman.P *= 10 + #measurement and prediction + self.measurement = np.array((2, 1), np.float32) + self.prediction = np.zeros((2, 1), np.float32) + + def H_of(self, x): + # H矩阵的jacobian + # 观测模型是线性的,所以H矩阵实际上是恒定的,与x无关 + return np.array([[1, 0, 0, 0, 0, 0], + [0, 1, 0, 0, 0, 0]]) + + def hx(self,x): + # 将状态向量转成测量向量 + # 直接返回x和y的坐标作为测量值 + return np.array([x[0], x[1]]) + + def update(self, x, y): + self.measurement = np.array([x, y], np.float32) + self.kalman.update(self.measurement, self.H_of, self.hx) + self.kalman.predict() + self.prediction = self.kalman.x + + def get_prediction(self): + """Predict the x and y values + + Returns: + tuple: predicted x and y values + """ + + return int(self.prediction[0]), int(self.prediction[1]) + + + +class tracked_armor(object): + """A class that represents a tracked armor. + + It stores the history of bounding boxes and ROIs, and can predict the + bounding box of the next frame. + """ + + def __init__(self, bbox, roi, frame_tick, armor_id): + """Initialize from prediction. + + Args: + bbox (tuple): (center_x, center_y, w, h) + roi (np.ndarray): ROI of the armor + frame_tick (int): frame tick + armor_id (int): unique ID + """ + self.bbox_buffer = [bbox] + self.roi_buffer = [roi] + self.observed_frame_tick = [frame_tick] + self.armor_id = armor_id # unique ID + self.KF_matrix = KalmanTracker() + + + + def compute_cost(self, other_armor): + """Compute the cost of matching this armor with another armor. + + Args: + other_armor (tracked_armor): another armor + + Returns: + float: cost + """ + assert isinstance(other_armor, tracked_armor) + # TODO: use more sophisticated metrics (e.g., RGB) as cost function + c_x, c_y, w, h = self.bbox_buffer[-1] + o_c_x, o_c_y, o_w, o_h = other_armor.bbox_buffer[-1] + return np.square(c_x - o_c_x) + np.square(c_y - o_c_y) + + def update(self, other_armor, frame_tick): + """Update the state of this armor with matched armor. + + Args: + other_armor (tracked_armor): another armor + frame_tick (int): frame tick + """ + # Only call if these two armors are matched + self.bbox_buffer.append(other_armor.bbox_buffer[-1]) + self.roi_buffer.append(other_armor.roi_buffer[-1]) + self.observed_frame_tick.append(frame_tick) + + # Maintain each armor's buffer so that anything older than + # FRAME_BUFFER_SIZE is dropped + self.bbox_buffer = self.bbox_buffer[-FRAME_BUFFER_SIZE:] + self.roi_buffer = self.roi_buffer[-FRAME_BUFFER_SIZE:] + + def predict_bbox(self, cur_frame_tick): + """Predict the bounding box of the tracked armor at cur frame tick. + + Args: + cur_frame_tick (int): current frame tick + + TODO + - Use Kalman filter to do prediction + - Support future frame idx for predictions + + Returns: + tuple: (center_x, center_y, w, h) + """ + if cur_frame_tick == self.observed_frame_tick[-1] or len( + self.bbox_buffer) == 1: + # print(self.bbox_buffer[-1]) + c_x, c_y, w, h = self.bbox_buffer[-1] + self.KF_matrix.update(c_x, c_y) + return self.bbox_buffer[-1] + else: + # KF tracking + c_x, c_y, w, h = self.bbox_buffer[-1] + predicted_x, predicted_y = self.KF_matrix.get_prediction() + self.KF_matrix.update(predicted_x, predicted_y) + return (int(predicted_x), int(predicted_y), w, h) + + +class KF_tracker(object): + """ + EKF tracker + """ + + SE_THRESHOLD = 3200 # (40, 40) pixels away + + def __init__(self, config): + """simple EKF. + + Args: + config (python object): shared config + """ + self.CFG = config + self.active_armors = [] + self.id_gen = ConsistentIdGenerator() + self.frame_tick = 0 # TODO: use timestamp may be a better idea + + + def process_one(self, pred_list, enemy_team, rgb_img): + """Process one set of detections and rgb images. + + Args: + pred_list (list): list of (name, conf, bbox) tuples + enemy_team (str): enemy team name (blue or red) + rgb_img (np.ndarray): RGB image + + Returns: + (list, list): list of tracked_armors (detections+predictions) and their IDs + """ + new_armors = [] + for name, conf, bbox in pred_list: + c_x, c_y, w, h = bbox + lower_x = int(c_x - w / 2) + upper_x = int(c_x + w / 2) + lower_y = int(c_y - h / 2) + upper_y = int(c_y + h / 2) + roi = rgb_img[lower_y:upper_y, lower_x:upper_x] + new_armors.append(tracked_armor(bbox, roi, self.frame_tick, -1)) + + if len(self.active_armors) > 0: + # Try to associate with current armors + cost_matrix = np.zeros( + (len(new_armors), len(self.active_armors)), dtype=float) + for i in range(len(new_armors)): + for j in range(len(self.active_armors)): + cost_ij = new_armors[i].compute_cost(self.active_armors[j]) + cost_matrix[i, j] = cost_ij + row_ind, col_ind = scipy.optimize.linear_sum_assignment( + cost_matrix) + + for i, j in zip(row_ind, col_ind): + if cost_matrix[i, j] < self.SE_THRESHOLD: + assert new_armors[i] is not None + self.active_armors[j].update( + new_armors[i], self.frame_tick) + new_armors[i] = None + + new_armors = [i for i in new_armors if i is not None] + + for n in new_armors: + n.armor_id = self.id_gen.get_id() + + # Maintain current buffer. If an armor has not been observed by 10 + # frames, it is dropped + self.active_armors = [i for i in self.active_armors + if self.frame_tick - i.observed_frame_tick[-1] < FRAME_BUFFER_SIZE] + + # Unassociated armors are registered as new armors + for a in new_armors: + self.active_armors.append(a) + + # Create a list of bbox and unique IDs to return + ret_bbox_list = [] + ret_id_list = [] + for a in self.active_armors: + # If an armor is observed, directly use the bbox + ret_bbox_list.append(a.predict_bbox(self.frame_tick)) + ret_id_list.append(a.armor_id) + + self.frame_tick += 1 + + return ret_bbox_list, ret_id_list \ No newline at end of file From 2bcebb11b012398edf51654154a56ef0eef242aa Mon Sep 17 00:00:00 2001 From: RogerQi Date: Wed, 7 Jun 2023 12:30:59 -0500 Subject: [PATCH 65/74] [chore] return armor number and armor obj for further processing --- Aiming/Tracking/basic_tracker.py | 4 ++-- Communication/communicator.py | 7 +++++-- Detection/CV_mix_DL.py | 12 +++++------- vision.py | 6 +++--- 4 files changed, 15 insertions(+), 14 deletions(-) diff --git a/Aiming/Tracking/basic_tracker.py b/Aiming/Tracking/basic_tracker.py index 47baab6..85492d2 100644 --- a/Aiming/Tracking/basic_tracker.py +++ b/Aiming/Tracking/basic_tracker.py @@ -123,7 +123,7 @@ def process_one(self, pred_list, enemy_team, rgb_img): (list, list): list of tracked_armors (detections+predictions) and their IDs """ new_armors = [] - for name, conf, bbox in pred_list: + for name, conf, armor_type, bbox, armor in pred_list: c_x, c_y, w, h = bbox lower_x = int(c_x - w / 2) upper_x = int(c_x + w / 2) @@ -155,7 +155,7 @@ def process_one(self, pred_list, enemy_team, rgb_img): for n in new_armors: n.armor_id = self.id_gen.get_id() - # Maintain current buffer. If an armor has not been observed by 10 + # Maintain current buffer. If an armor has not been observed by FRAME_BUFFER_SIZE # frames, it is dropped self.active_armors = [i for i in self.active_armors if self.frame_tick - i.observed_frame_tick[-1] < FRAME_BUFFER_SIZE] diff --git a/Communication/communicator.py b/Communication/communicator.py index f85b8f9..7c837f1 100644 --- a/Communication/communicator.py +++ b/Communication/communicator.py @@ -7,7 +7,7 @@ from copy import deepcopy # STM32 to Jetson packet size -STJ_PACKET_SIZE = 14 +STJ_PACKET_SIZE = 18 INT_FP_SCALE = 1e+6 @@ -43,7 +43,8 @@ def __init__( 'my_color': 'red' if self.cfg.DEFAULT_ENEMY_TEAM == 'blue' else 'blue', 'enemy_color': self.cfg.DEFAULT_ENEMY_TEAM.lower(), 'cur_yaw': 0, - 'cur_pitch': 0} + 'cur_pitch': 0, + 'debug_int': 0} self.parsed_packet_cnt = 0 self.seq_num = 0 @@ -208,6 +209,7 @@ def try_parse_one(self, possible_packet): cur_yaw = int.from_bytes(possible_packet[3:7], "little", signed=True) / INT_FP_SCALE cur_pitch = int.from_bytes(possible_packet[7:11], "little", signed=True) / INT_FP_SCALE + debug_int = int.from_bytes(possible_packet[11:15], "little", signed=True) if my_color_int == 0: my_color = 'red' @@ -221,6 +223,7 @@ def try_parse_one(self, possible_packet): 'enemy_color': enemy_color, 'cur_yaw': cur_yaw, 'cur_pitch': cur_pitch, + 'debug_int': debug_int, } def create_packet(self, header, yaw_offset, pitch_offset): diff --git a/Detection/CV_mix_DL.py b/Detection/CV_mix_DL.py index 562bdba..a39078b 100644 --- a/Detection/CV_mix_DL.py +++ b/Detection/CV_mix_DL.py @@ -66,12 +66,10 @@ def __call__(self, rgb_img): for armor in armor_list: center_x, center_y = armor.center height = (armor.left_light.length + armor.right_light.length) / 2 - width = cv2.norm(armor.left_light.center - - armor.right_light.center) + width = cv2.norm(armor.left_light.center - armor.right_light.center) bbox = (center_x, center_y, width, height) name = f'armor_{self.detect_color_str}' - conf = armor.confidence - ret_list.append((name, conf, bbox)) + ret_list.append((name, float(armor.confidence), str(armor.number[0]), bbox, armor)) return ret_list def change_color(self, new_color): @@ -598,7 +596,7 @@ class dl_digit_classifier: """Classify digits in armor number using deep learning model.""" LABEL_NAMES_LIST = np.array(['B', '1', '2', '3', '4', '5', 'G', 'O', 'N']) - CLASSIFIER_THRESHOLD = 0.7 + CLASSIFIER_THRESHOLD = 0.9 def __init__(self, config, model_path): """Initialize the classifier. @@ -640,13 +638,13 @@ def __call__(self, armor_list): max_class = softmax_probs.argmax(axis=1) max_class_names = self.LABEL_NAMES_LIST[max_class] - if softmax_probs[0, - max_class] < self.CLASSIFIER_THRESHOLD or max_class_names == 'N': + if softmax_probs[0, max_class] < self.CLASSIFIER_THRESHOLD or max_class_names == 'N': continue # TODO: use digit predictions to improve accuracy? # Right now using that logic causes a lot of false negatives... armor.confidence = softmax_probs[0, max_class] + armor.number = max_class_names ret_list.append(armor) return ret_list diff --git a/vision.py b/vision.py index 1e4e42f..fb28494 100644 --- a/vision.py +++ b/vision.py @@ -43,19 +43,19 @@ def main(): pred = model.detect(frame) for i in range(len(pred)): - name, conf, bbox = pred[i] + name, conf, armor_type, bbox, armor = pred[i] # name from C++ string is in bytes; decoding is needed if isinstance(name, bytes): name_str = name.decode('utf-8') else: name_str = name - pred[i] = (name_str, conf, bbox) + pred[i] = (name_str, conf, armor_type, bbox, armor) elapsed = time.time() - start if config.DEBUG_DISPLAY: viz_frame = frame.copy() - for _, _, bbox in pred: + for _, _, _, bbox, _ in pred: lower_x = int(bbox[0] - bbox[2] / 2) lower_y = int(bbox[1] - bbox[3] / 2) upper_x = int(bbox[0] + bbox[2] / 2) From 177e3e43b0f7148d57ab5377dfbef87d69b4bbf0 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Thu, 8 Jun 2023 19:56:57 -0500 Subject: [PATCH 66/74] [chore] linting and adding docs --- Aiming/Tracking/EKF_tracker.py | 147 +++++++-------------------------- 1 file changed, 31 insertions(+), 116 deletions(-) diff --git a/Aiming/Tracking/EKF_tracker.py b/Aiming/Tracking/EKF_tracker.py index fb39655..b966e65 100644 --- a/Aiming/Tracking/EKF_tracker.py +++ b/Aiming/Tracking/EKF_tracker.py @@ -1,4 +1,4 @@ -"""Implemented EKF tracker with filterpy library""" +"""Implemented EKF tracker with filterpy library.""" import scipy.optimize import numpy as np from filterpy.kalman import ExtendedKalmanFilter @@ -8,63 +8,65 @@ # TODO: move this to config FRAME_BUFFER_SIZE = 10 + class KalmanTracker(object): """A class that represents a Kalman Fitler tracker. It is the matrix for KF tracking that will be stored in each armor. """ + def __init__(self): - """Initialize EKF from armor. - """ + """Initialize EKF from armor.""" dt = 1 - self.kalman = ExtendedKalmanFilter(dim_x=6,dim_z=2) - #F:transition matrix - self.kalman.F = np.array([[1, 0, dt, 0, 0.5*(dt**2), 0], - [0, 1, 0, dt,0, 0.5*(dt**2)], - [0, 0, 1, 0, dt, 0], - [0, 0, 0, 1, 0, dt], - [0, 0, 0, 0, 1, 0], - [0, 0, 0, 0, 0, 1]], np.float32) - #x:initial state初始状态 initialize the KF position to center - self.kalman.x = np.array([320, 180, 0, 0, 0 ,0]) - #R:measurement noise matrix - #self.kalman.R *= 10 + self.kalman = ExtendedKalmanFilter(dim_x=6, dim_z=2) + # F:transition matrix + self.kalman.F = np.array([[1, 0, dt, 0, 0.5 * (dt**2), 0], + [0, 1, 0, dt, 0, 0.5 * (dt**2)], + [0, 0, 1, 0, dt, 0], + [0, 0, 0, 1, 0, dt], + [0, 0, 0, 0, 1, 0], + [0, 0, 0, 0, 0, 1]], np.float32) + # x:initial state初始状态 initialize the KF position to center + self.kalman.x = np.array([320, 180, 0, 0, 0, 0]) + # R:measurement noise matrix + # self.kalman.R *= 10 self.kalman.R = np.array([[1, 0], [0, 1]], np.float32) * 1 - #Q:process noise matrix + # Q:process noise matrix self.kalman.Q = np.eye(6) - #P:初始协方差矩阵, 初始状态很不确定时把这个拧大一点 - self.kalman.P *= 10 - #measurement and prediction + # P:初始协方差矩阵, 初始状态很不确定时把这个拧大一点 + self.kalman.P *= 10 + # measurement and prediction self.measurement = np.array((2, 1), np.float32) self.prediction = np.zeros((2, 1), np.float32) def H_of(self, x): - # H矩阵的jacobian - # 观测模型是线性的,所以H矩阵实际上是恒定的,与x无关 + """Compute the jacobian of the measurement function.""" + # Jocobian of the measurement function H + # The measurement model is linear, so the H matrix is + # actually constant and independent of x return np.array([[1, 0, 0, 0, 0, 0], - [0, 1, 0, 0, 0, 0]]) + [0, 1, 0, 0, 0, 0]]) - def hx(self,x): - # 将状态向量转成测量向量 - # 直接返回x和y的坐标作为测量值 + def hx(self, x): + """Compute the measurement from the state vector x.""" + # Return x and y directly as the measurement return np.array([x[0], x[1]]) - + def update(self, x, y): + """Update the state of this armor with matched armor.""" self.measurement = np.array([x, y], np.float32) self.kalman.update(self.measurement, self.H_of, self.hx) self.kalman.predict() self.prediction = self.kalman.x def get_prediction(self): - """Predict the x and y values + """Predict the x and y values. Returns: tuple: predicted x and y values """ - return int(self.prediction[0]), int(self.prediction[1]) - class tracked_armor(object): """A class that represents a tracked armor. @@ -87,8 +89,6 @@ def __init__(self, bbox, roi, frame_tick, armor_id): self.observed_frame_tick = [frame_tick] self.armor_id = armor_id # unique ID self.KF_matrix = KalmanTracker() - - def compute_cost(self, other_armor): """Compute the cost of matching this armor with another armor. @@ -147,88 +147,3 @@ def predict_bbox(self, cur_frame_tick): predicted_x, predicted_y = self.KF_matrix.get_prediction() self.KF_matrix.update(predicted_x, predicted_y) return (int(predicted_x), int(predicted_y), w, h) - - -class KF_tracker(object): - """ - EKF tracker - """ - - SE_THRESHOLD = 3200 # (40, 40) pixels away - - def __init__(self, config): - """simple EKF. - - Args: - config (python object): shared config - """ - self.CFG = config - self.active_armors = [] - self.id_gen = ConsistentIdGenerator() - self.frame_tick = 0 # TODO: use timestamp may be a better idea - - - def process_one(self, pred_list, enemy_team, rgb_img): - """Process one set of detections and rgb images. - - Args: - pred_list (list): list of (name, conf, bbox) tuples - enemy_team (str): enemy team name (blue or red) - rgb_img (np.ndarray): RGB image - - Returns: - (list, list): list of tracked_armors (detections+predictions) and their IDs - """ - new_armors = [] - for name, conf, bbox in pred_list: - c_x, c_y, w, h = bbox - lower_x = int(c_x - w / 2) - upper_x = int(c_x + w / 2) - lower_y = int(c_y - h / 2) - upper_y = int(c_y + h / 2) - roi = rgb_img[lower_y:upper_y, lower_x:upper_x] - new_armors.append(tracked_armor(bbox, roi, self.frame_tick, -1)) - - if len(self.active_armors) > 0: - # Try to associate with current armors - cost_matrix = np.zeros( - (len(new_armors), len(self.active_armors)), dtype=float) - for i in range(len(new_armors)): - for j in range(len(self.active_armors)): - cost_ij = new_armors[i].compute_cost(self.active_armors[j]) - cost_matrix[i, j] = cost_ij - row_ind, col_ind = scipy.optimize.linear_sum_assignment( - cost_matrix) - - for i, j in zip(row_ind, col_ind): - if cost_matrix[i, j] < self.SE_THRESHOLD: - assert new_armors[i] is not None - self.active_armors[j].update( - new_armors[i], self.frame_tick) - new_armors[i] = None - - new_armors = [i for i in new_armors if i is not None] - - for n in new_armors: - n.armor_id = self.id_gen.get_id() - - # Maintain current buffer. If an armor has not been observed by 10 - # frames, it is dropped - self.active_armors = [i for i in self.active_armors - if self.frame_tick - i.observed_frame_tick[-1] < FRAME_BUFFER_SIZE] - - # Unassociated armors are registered as new armors - for a in new_armors: - self.active_armors.append(a) - - # Create a list of bbox and unique IDs to return - ret_bbox_list = [] - ret_id_list = [] - for a in self.active_armors: - # If an armor is observed, directly use the bbox - ret_bbox_list.append(a.predict_bbox(self.frame_tick)) - ret_id_list.append(a.armor_id) - - self.frame_tick += 1 - - return ret_bbox_list, ret_id_list \ No newline at end of file From 54416b41039a8777149fd57cb60aa48139bd791a Mon Sep 17 00:00:00 2001 From: RogerQi Date: Thu, 8 Jun 2023 19:59:10 -0500 Subject: [PATCH 67/74] temporarily disable prediction due to use of relative angles --- Aiming/Tracking/basic_tracker.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Aiming/Tracking/basic_tracker.py b/Aiming/Tracking/basic_tracker.py index 85492d2..aa527ed 100644 --- a/Aiming/Tracking/basic_tracker.py +++ b/Aiming/Tracking/basic_tracker.py @@ -5,7 +5,7 @@ from .consistent_id_gen import ConsistentIdGenerator # TODO: move this to config -FRAME_BUFFER_SIZE = 10 +FRAME_BUFFER_SIZE = 1 # TODO: this class should be part of abstract base tracker class From e7fa8657430acc292aafff21a6c66117022d1524 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sat, 10 Jun 2023 22:56:39 -0500 Subject: [PATCH 68/74] add useful util functions --- Utils/__init__.py | 52 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/Utils/__init__.py b/Utils/__init__.py index a1c415a..800b647 100644 --- a/Utils/__init__.py +++ b/Utils/__init__.py @@ -84,3 +84,55 @@ def rect_contains(rect, pt): """ return rect[0] < pt[0] < rect[0] + \ rect[2] and rect[1] < pt[1] < rect[1] + rect[3] + + +def get_intrinsic_matrix(cfg): + """Get the camera intrinsic matrix. + + Args: + cfg (object): config.py node object + + Returns: + K: camera intrinsic matrix 3x3 + """ + if hasattr(cfg, 'K'): + return cfg.K + elif hasattr(cfg.AUTOAIM_CAMERA, 'K'): + return cfg.AUTOAIM_CAMERA.K + else: + # Infer from image size + K = np.eye(3) + + H = cfg.IMG_HEIGHT + W = cfg.IMG_WIDTH + fl = min(H, W) * 2 + cx = W / 2 + cy = H / 2 + + K[0, 0] = fl + K[1, 1] = fl + K[0, 2] = cx + K[1, 2] = cy + + return K + +def get_radian_diff(angle1, angle2): + """Compute the abs difference between two angles in radians. + + Parameters: + angle1 (float): First angle in radians. + angle2 (float): Second angle in radians. + + Returns: + float: The difference between the angles in radians, ranging from 0 to pi. + """ + # Normalize angles to be between 0 and 2*pi + angle1 = angle1 % (2 * np.pi) + angle2 = angle2 % (2 * np.pi) + + # Compute the difference along the lesser arc + diff = abs(angle1 - angle2) + if diff > np.pi: + diff = 2 * np.pi - diff + + return diff From cdc94addf162251fb88d517af4ef63c88792cb31 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 11 Jun 2023 21:40:29 -0500 Subject: [PATCH 69/74] [feat] modify interface to track absolute gimbal angles --- Aiming/Aim.py | 149 +++++++++++++---------- Aiming/Tracking/EKF_tracker.py | 95 +++++++++------ Aiming/Tracking/basic_tracker.py | 201 +++++++++++++++---------------- vision.py | 46 ++++--- 4 files changed, 268 insertions(+), 223 deletions(-) diff --git a/Aiming/Aim.py b/Aiming/Aim.py index 787490c..44113a8 100644 --- a/Aiming/Aim.py +++ b/Aiming/Aim.py @@ -1,6 +1,10 @@ """Hosts the Aim class, which is the main class for auto-aiming.""" from .Tracking import basic_tracker +from .DistEst import pnp_estimator import numpy as np +import Utils + +from IPython import embed class Aim: @@ -23,6 +27,66 @@ def __init__(self, config): """ self.CFG = config self.tracker = basic_tracker(self.CFG) + self.distance_estimator = pnp_estimator(self.CFG) + + def preprocess(self, pred_list, stm32_state_dict, rgb_img): + """Preprocess predictions to compute armor distances and absolute yaw/pitch. + + Args: + pred_list (list): a list of predictions + stm32_state_dict (dict): a dictionary of stm32 state + + Returns: + list: a list of tuples (armor_type, abs_yaw, abs_pitch, y_distance, z_distance) + """ + gimbal_yaw = stm32_state_dict['cur_yaw'] + gimbal_pitch = stm32_state_dict['cur_pitch'] + + ret_list = [] + + for pred in pred_list: + armor_name, conf, armor_type, bbox, armor = pred + c_x, c_y, w, h = bbox + rel_yaw, rel_pitch = self.get_rotation_angle(c_x, c_y) + abs_yaw = gimbal_yaw + rel_yaw + abs_pitch = gimbal_pitch + rel_pitch + y_distance, z_distance = self.distance_estimator.estimate_distance(armor, rgb_img) + + ret_list.append((armor_type, abs_yaw, abs_pitch, y_distance, z_distance)) + + return ret_list + + def pick_target(self, distance_angle_list, stm32_state_dict): + """Select a priortized target from a list of targets. + + Args: + distance_angle_list (list): list of targets returned from tracker + stm32_state_dict (dict): a dictionary of stm32 state + + Returns: + target: a tuple of (target_pitch, target_yaw, target_y_dist, target_z_dist) + """ + gimbal_pitch = stm32_state_dict['cur_pitch'] + gimbal_yaw = stm32_state_dict['cur_yaw'] + + target_pitch = None + target_yaw = None + target_y_dist = None + target_z_dist = None + min_angle_diff = 9999 + + for y_dist, z_dist, predicted_pitch, predicted_yaw in distance_angle_list: + pitch_diff = Utils.get_radian_diff(predicted_pitch, gimbal_pitch) + yaw_diff = Utils.get_radian_diff(predicted_yaw, gimbal_yaw) + angle_diff = pitch_diff + yaw_diff + if angle_diff < min_angle_diff: + target_pitch = predicted_pitch + target_yaw = predicted_yaw + target_y_dist = y_dist + target_z_dist = z_dist + min_angle_diff = angle_diff + + return target_pitch, target_yaw, target_y_dist, target_z_dist def process_one(self, pred_list, enemy_team, rgb_img, stm32_state_dict): """Process one frame of predictions. @@ -38,45 +102,34 @@ def process_one(self, pred_list, enemy_team, rgb_img, stm32_state_dict): """ assert enemy_team in ['blue', 'red'] - # TODO: use assertion to check enemy_team + observed_armors = self.preprocess(pred_list, stm32_state_dict, rgb_img) - final_bbox_list, final_id_list = self.tracker.process_one( - pred_list, enemy_team, rgb_img) + distance_angle_list, final_id_list = self.tracker.process_one( + observed_armors, enemy_team, rgb_img) # TODO: integrate this into tracking for consistent tracking - closet_pred, closet_dist = self.get_closet_pred( - final_bbox_list, rgb_img) + target_dist_angle_tuple = self.pick_target(distance_angle_list, stm32_state_dict) - if closet_pred is None: + if target_dist_angle_tuple[0] is None: return None - center_x, center_y, width, height = closet_pred - - # Get yaw/pitch differences in radians - yaw_diff, pitch_diff = self.get_rotation_angle(center_x, center_y) - - calibrated_yaw_diff, calibrated_pitch_diff = self.posterior_calibration( - yaw_diff, pitch_diff, closet_dist) - # Clip yaw/pitch differences to [-2pi, 2pi] - calibrated_pitch_diff = np.clip(calibrated_pitch_diff, -2 * np.pi, 2 * np.pi) - calibrated_yaw_diff = np.clip(calibrated_yaw_diff, -2 * np.pi, 2 * np.pi) + target_pitch, target_yaw, target_y_dist, target_z_dist = target_dist_angle_tuple - # Compute absolute yaw / pitch - abs_pitch = calibrated_pitch_diff + stm32_state_dict['cur_pitch'] - abs_yaw = calibrated_yaw_diff + stm32_state_dict['cur_yaw'] + calibrated_pitch, calibrated_yaw = self.posterior_calibration( + target_pitch, target_yaw, target_y_dist, target_z_dist) return { - 'yaw_diff': calibrated_yaw_diff, - 'pitch_diff': calibrated_pitch_diff, - 'abs_yaw': abs_yaw, - 'abs_pitch': abs_pitch, - 'center_x': center_x, - 'center_y': center_y, - 'final_bbox_list': final_bbox_list, - 'final_id_list': final_id_list, + # 'yaw_diff': calibrated_yaw_diff, + # 'pitch_diff': calibrated_pitch_diff, + 'abs_yaw': calibrated_yaw, + 'abs_pitch': calibrated_pitch, + # 'center_x': center_x, + # 'center_y': center_y, + # 'final_bbox_list': final_bbox_list, + # 'final_id_list': final_id_list, } - def posterior_calibration(self, yaw_diff, pitch_diff, distance): + def posterior_calibration(self, raw_pitch, raw_yaw, target_y_dist, target_z_dist): """Given a set of naively estimated parameters, return calibrated parameters. Idea: @@ -90,43 +143,9 @@ def posterior_calibration(self, yaw_diff, pitch_diff, distance): Returns: (float, float): calibrated yaw_diff, pitch_diff in radians """ - if distance >= 2**16: - # In this case, distance is a rough estimation from bbox size - return (yaw_diff, pitch_diff) - else: - # In this case, distance comes from D455 stereo estimation - # TODO: compute a range table - return (yaw_diff, pitch_diff) - - def get_closet_pred(self, bbox_list, rgb_img): - """Get the closet prediction to camera focal point. - - Args: - bbox_list (list): a list of bounding boxes - rgb_img (np.ndarray): RGB image - - Returns: - (bbox_list, float): closet_pred, closet_dist - """ - # TODO: instead of camera focal point; calibrate closet pred to - # operator view - H, W, C = rgb_img.shape - focal_y = H / 2 - focal_x = W / 2 - closet_pred = None - closet_dist = None # Cloest to camera in z-axis - closet_dist = 99999999 - for bbox in bbox_list: - center_x, center_y, width, height = bbox - cur_dist = (center_x - focal_x)**2 + (center_y - focal_y)**2 - if closet_pred is None: - closet_pred = bbox - closet_dist = cur_dist - else: - if cur_dist < closet_dist: - closet_pred = bbox - closet_dist = cur_dist - return closet_pred, closet_dist + # In this case, distance comes from D455 stereo estimation + # TODO: compute a range table + return (raw_pitch, raw_yaw) def get_rotation_angle(self, bbox_center_x, bbox_center_y): """Given a bounding box center, return the yaw/pitch difference in radians. diff --git a/Aiming/Tracking/EKF_tracker.py b/Aiming/Tracking/EKF_tracker.py index b966e65..219f0b3 100644 --- a/Aiming/Tracking/EKF_tracker.py +++ b/Aiming/Tracking/EKF_tracker.py @@ -5,6 +5,8 @@ from scipy.spatial.distance import mahalanobis as scipy_mahalanobis from .consistent_id_gen import ConsistentIdGenerator +import Utils + # TODO: move this to config FRAME_BUFFER_SIZE = 10 @@ -15,7 +17,7 @@ class KalmanTracker(object): It is the matrix for KF tracking that will be stored in each armor. """ - def __init__(self): + def __init__(self, init_pitch, init_yaw): """Initialize EKF from armor.""" dt = 1 self.kalman = ExtendedKalmanFilter(dim_x=6, dim_z=2) @@ -26,18 +28,18 @@ def __init__(self): [0, 0, 0, 1, 0, dt], [0, 0, 0, 0, 1, 0], [0, 0, 0, 0, 0, 1]], np.float32) - # x:initial state初始状态 initialize the KF position to center - self.kalman.x = np.array([320, 180, 0, 0, 0, 0]) + # x:initial state initialize the KF position to center + self.kalman.x = np.array([init_pitch, init_yaw, 0, 0, 0, 0]) # R:measurement noise matrix # self.kalman.R *= 10 self.kalman.R = np.array([[1, 0], [0, 1]], np.float32) * 1 # Q:process noise matrix self.kalman.Q = np.eye(6) - # P:初始协方差矩阵, 初始状态很不确定时把这个拧大一点 + # P:initial covariance matrix. Tune this up if initial state is not accurate self.kalman.P *= 10 # measurement and prediction - self.measurement = np.array((2, 1), np.float32) - self.prediction = np.zeros((2, 1), np.float32) + self.measurement = np.array([init_pitch, init_yaw], np.float32) + self.prediction = np.array([init_pitch, init_yaw], np.float32) def H_of(self, x): """Compute the jacobian of the measurement function.""" @@ -52,8 +54,14 @@ def hx(self, x): # Return x and y directly as the measurement return np.array([x[0], x[1]]) - def update(self, x, y): - """Update the state of this armor with matched armor.""" + def update(self, x, y, certain_flag=True): + """Update the state of this armor with matched armor. + + Args: + x: x state (2d coordinates) + y: y state (2d coordinates) + certain_x (bool): whether the x value is certain (observed) + """ self.measurement = np.array([x, y], np.float32) self.kalman.update(self.measurement, self.H_of, self.hx) self.kalman.predict() @@ -65,7 +73,7 @@ def get_prediction(self): Returns: tuple: predicted x and y values """ - return int(self.prediction[0]), int(self.prediction[1]) + return self.prediction[0], self.prediction[1] class tracked_armor(object): @@ -75,7 +83,7 @@ class tracked_armor(object): bounding box of the next frame. """ - def __init__(self, bbox, roi, frame_tick, armor_id): + def __init__(self, armor_type, abs_yaw, abs_pitch, y_distance, z_distance, frame_tick): """Initialize from prediction. Args: @@ -84,11 +92,14 @@ def __init__(self, bbox, roi, frame_tick, armor_id): frame_tick (int): frame tick armor_id (int): unique ID """ - self.bbox_buffer = [bbox] - self.roi_buffer = [roi] + self.pitch_buffer = [abs_pitch] + self.yaw_buffer = [abs_yaw] + self.armor_type = armor_type + self.y_distance_buffer = [y_distance] + self.z_distance_buffer = [z_distance] self.observed_frame_tick = [frame_tick] - self.armor_id = armor_id # unique ID - self.KF_matrix = KalmanTracker() + self.armor_id = -1 # unique ID + self.KF_matrix = KalmanTracker(abs_pitch, abs_yaw) def compute_cost(self, other_armor): """Compute the cost of matching this armor with another armor. @@ -100,10 +111,15 @@ def compute_cost(self, other_armor): float: cost """ assert isinstance(other_armor, tracked_armor) + if self.armor_type != other_armor.armor_type: + return 99999999 # TODO: use more sophisticated metrics (e.g., RGB) as cost function - c_x, c_y, w, h = self.bbox_buffer[-1] - o_c_x, o_c_y, o_w, o_h = other_armor.bbox_buffer[-1] - return np.square(c_x - o_c_x) + np.square(c_y - o_c_y) + my_yaw = self.yaw_buffer[-1] + my_pitch = self.pitch_buffer[-1] + other_yaw = other_armor.yaw_buffer[-1] + other_pitch = other_armor.pitch_buffer[-1] + return Utils.get_radian_diff(my_yaw, other_yaw) + \ + Utils.get_radian_diff(my_pitch, other_pitch) def update(self, other_armor, frame_tick): """Update the state of this armor with matched armor. @@ -113,37 +129,40 @@ def update(self, other_armor, frame_tick): frame_tick (int): frame tick """ # Only call if these two armors are matched - self.bbox_buffer.append(other_armor.bbox_buffer[-1]) - self.roi_buffer.append(other_armor.roi_buffer[-1]) + assert len(other_armor.pitch_buffer) == 1 + self.pitch_buffer.append(other_armor.pitch_buffer[-1]) + self.yaw_buffer.append(other_armor.yaw_buffer[-1]) + self.y_distance_buffer.append(other_armor.y_distance_buffer[-1]) + self.z_distance_buffer.append(other_armor.z_distance_buffer[-1]) self.observed_frame_tick.append(frame_tick) # Maintain each armor's buffer so that anything older than # FRAME_BUFFER_SIZE is dropped - self.bbox_buffer = self.bbox_buffer[-FRAME_BUFFER_SIZE:] - self.roi_buffer = self.roi_buffer[-FRAME_BUFFER_SIZE:] + self.pitch_buffer = self.pitch_buffer[-FRAME_BUFFER_SIZE:] + self.yaw_buffer = self.yaw_buffer[-FRAME_BUFFER_SIZE:] + self.y_distance_buffer = self.y_distance_buffer[-FRAME_BUFFER_SIZE:] + self.z_distance_buffer = self.z_distance_buffer[-FRAME_BUFFER_SIZE:] + self.observed_frame_tick = self.observed_frame_tick[-FRAME_BUFFER_SIZE:] - def predict_bbox(self, cur_frame_tick): - """Predict the bounding box of the tracked armor at cur frame tick. + def predict_distance_angle(self, cur_frame_tick): + """Predict the distance and angle of the tracked armor at cur frame tick. Args: cur_frame_tick (int): current frame tick - TODO - - Use Kalman filter to do prediction - - Support future frame idx for predictions - Returns: - tuple: (center_x, center_y, w, h) + tuple: (y_distance, z_distance, yaw, pitch) """ - if cur_frame_tick == self.observed_frame_tick[-1] or len( - self.bbox_buffer) == 1: - # print(self.bbox_buffer[-1]) - c_x, c_y, w, h = self.bbox_buffer[-1] - self.KF_matrix.update(c_x, c_y) - return self.bbox_buffer[-1] + if cur_frame_tick == self.observed_frame_tick[-1] or len(self.y_distance_buffer) == 1: + cur_pitch = self.pitch_buffer[-1] + cur_yaw = self.yaw_buffer[-1] + self.KF_matrix.update(cur_pitch, cur_yaw) + return self.y_distance_buffer[-1], self.z_distance_buffer[-1], cur_pitch, cur_yaw else: # KF tracking - c_x, c_y, w, h = self.bbox_buffer[-1] - predicted_x, predicted_y = self.KF_matrix.get_prediction() - self.KF_matrix.update(predicted_x, predicted_y) - return (int(predicted_x), int(predicted_y), w, h) + predicted_pitch, predicted_yaw = self.KF_matrix.get_prediction() + self.KF_matrix.update(predicted_pitch, predicted_yaw) + # TODO: use filtering to predict distances as well + latest_y_dist = self.y_distance_buffer[-1] + latest_z_dist = self.z_distance_buffer[-1] + return latest_y_dist, latest_z_dist, predicted_pitch, predicted_yaw diff --git a/Aiming/Tracking/basic_tracker.py b/Aiming/Tracking/basic_tracker.py index aa527ed..867f861 100644 --- a/Aiming/Tracking/basic_tracker.py +++ b/Aiming/Tracking/basic_tracker.py @@ -4,91 +4,91 @@ from .consistent_id_gen import ConsistentIdGenerator -# TODO: move this to config -FRAME_BUFFER_SIZE = 1 - # TODO: this class should be part of abstract base tracker class +from .EKF_tracker import tracked_armor - -class tracked_armor(object): - """A class that represents a tracked armor. - - It stores the history of bounding boxes and ROIs, and can predict the - bounding box of the next frame. - """ - - def __init__(self, bbox, roi, frame_tick, armor_id): - """Initialize from prediction. - - Args: - bbox (tuple): (center_x, center_y, w, h) - roi (np.ndarray): ROI of the armor - frame_tick (int): frame tick - armor_id (int): unique ID - """ - self.bbox_buffer = [bbox] - self.roi_buffer = [roi] - self.observed_frame_tick = [frame_tick] - self.armor_id = armor_id # unique ID - - def compute_cost(self, other_armor): - """Compute the cost of matching this armor with another armor. - - Args: - other_armor (tracked_armor): another armor - - Returns: - float: cost - """ - assert isinstance(other_armor, tracked_armor) - # TODO: use more sophisticated metrics (e.g., RGB) as cost function - c_x, c_y, w, h = self.bbox_buffer[-1] - o_c_x, o_c_y, o_w, o_h = other_armor.bbox_buffer[-1] - return np.square(c_x - o_c_x) + np.square(c_y - o_c_y) - - def update(self, other_armor, frame_tick): - """Update the state of this armor with matched armor. - - Args: - other_armor (tracked_armor): another armor - frame_tick (int): frame tick - """ - # Only call if these two armors are matched - self.bbox_buffer.append(other_armor.bbox_buffer[-1]) - self.roi_buffer.append(other_armor.roi_buffer[-1]) - self.observed_frame_tick.append(frame_tick) - - # Maintain each armor's buffer so that anything older than - # FRAME_BUFFER_SIZE is dropped - self.bbox_buffer = self.bbox_buffer[-FRAME_BUFFER_SIZE:] - self.roi_buffer = self.roi_buffer[-FRAME_BUFFER_SIZE:] - - def predict_bbox(self, cur_frame_tick): - """Predict the bounding box of the tracked armor at cur frame tick. - - Args: - cur_frame_tick (int): current frame tick - - TODO - - Use Kalman filter to do prediction - - Support future frame idx for predictions - - Returns: - tuple: (center_x, center_y, w, h) - """ - if cur_frame_tick == self.observed_frame_tick[-1] or len( - self.bbox_buffer) == 1: - return self.bbox_buffer[-1] - else: - # Linear extrapolation - c_x, c_y, w, h = self.bbox_buffer[-1] - o_c_x, o_c_y, o_w, o_h = self.bbox_buffer[-2] - delta_tick = self.observed_frame_tick[-1] - \ - self.observed_frame_tick[-2] - new_delta_tick = cur_frame_tick - self.observed_frame_tick[-1] - delta_x = (c_x - o_c_x) * new_delta_tick / delta_tick - delta_y = (c_y - o_c_y) * new_delta_tick / delta_tick - return (int(c_x + delta_x), int(c_y + delta_y), w, h) +# TODO: move this to config +FRAME_BUFFER_SIZE = 10 + +# class tracked_armor(object): +# """A class that represents a tracked armor. + +# It stores the history of bounding boxes and ROIs, and can predict the +# bounding box of the next frame. +# """ + +# def __init__(self, bbox, roi, frame_tick, armor_id): +# """Initialize from prediction. + +# Args: +# bbox (tuple): (center_x, center_y, w, h) +# roi (np.ndarray): ROI of the armor +# frame_tick (int): frame tick +# armor_id (int): unique ID +# """ +# self.bbox_buffer = [bbox] +# self.roi_buffer = [roi] +# self.observed_frame_tick = [frame_tick] +# self.armor_id = armor_id # unique ID + +# def compute_cost(self, other_armor): +# """Compute the cost of matching this armor with another armor. + +# Args: +# other_armor (tracked_armor): another armor + +# Returns: +# float: cost +# """ +# assert isinstance(other_armor, tracked_armor) +# # TODO: use more sophisticated metrics (e.g., RGB) as cost function +# c_x, c_y, w, h = self.bbox_buffer[-1] +# o_c_x, o_c_y, o_w, o_h = other_armor.bbox_buffer[-1] +# return np.square(c_x - o_c_x) + np.square(c_y - o_c_y) + +# def update(self, other_armor, frame_tick): +# """Update the state of this armor with matched armor. + +# Args: +# other_armor (tracked_armor): another armor +# frame_tick (int): frame tick +# """ +# # Only call if these two armors are matched +# self.bbox_buffer.append(other_armor.bbox_buffer[-1]) +# self.roi_buffer.append(other_armor.roi_buffer[-1]) +# self.observed_frame_tick.append(frame_tick) + +# # Maintain each armor's buffer so that anything older than +# # FRAME_BUFFER_SIZE is dropped +# self.bbox_buffer = self.bbox_buffer[-FRAME_BUFFER_SIZE:] +# self.roi_buffer = self.roi_buffer[-FRAME_BUFFER_SIZE:] + +# def predict_bbox(self, cur_frame_tick): +# """Predict the bounding box of the tracked armor at cur frame tick. + +# Args: +# cur_frame_tick (int): current frame tick + +# TODO +# - Use Kalman filter to do prediction +# - Support future frame idx for predictions + +# Returns: +# tuple: (center_x, center_y, w, h) +# """ +# if cur_frame_tick == self.observed_frame_tick[-1] or len( +# self.bbox_buffer) == 1: +# return self.bbox_buffer[-1] +# else: +# # Linear extrapolation +# c_x, c_y, w, h = self.bbox_buffer[-1] +# o_c_x, o_c_y, o_w, o_h = self.bbox_buffer[-2] +# delta_tick = self.observed_frame_tick[-1] - \ +# self.observed_frame_tick[-2] +# new_delta_tick = cur_frame_tick - self.observed_frame_tick[-1] +# delta_x = (c_x - o_c_x) * new_delta_tick / delta_tick +# delta_y = (c_y - o_c_y) * new_delta_tick / delta_tick +# return (int(c_x + delta_x), int(c_y + delta_y), w, h) class basic_tracker(object): @@ -115,7 +115,7 @@ def process_one(self, pred_list, enemy_team, rgb_img): """Process one set of detections and rgb images. Args: - pred_list (list): list of (name, conf, bbox) tuples + pred_list (list): (armor_type, abs_yaw, abs_pitch, y_distance, z_distance) tuples enemy_team (str): enemy team name (blue or red) rgb_img (np.ndarray): RGB image @@ -123,31 +123,30 @@ def process_one(self, pred_list, enemy_team, rgb_img): (list, list): list of tracked_armors (detections+predictions) and their IDs """ new_armors = [] - for name, conf, armor_type, bbox, armor in pred_list: - c_x, c_y, w, h = bbox - lower_x = int(c_x - w / 2) - upper_x = int(c_x + w / 2) - lower_y = int(c_y - h / 2) - upper_y = int(c_y + h / 2) - roi = rgb_img[lower_y:upper_y, lower_x:upper_x] - new_armors.append(tracked_armor(bbox, roi, self.frame_tick, -1)) + for armor_type, abs_yaw, abs_pitch, y_distance, z_distance in pred_list: + new_armors.append( + tracked_armor( + armor_type, + abs_yaw, + abs_pitch, + y_distance, + z_distance, + self.frame_tick)) if len(self.active_armors) > 0: # Try to associate with current armors - cost_matrix = np.zeros( - (len(new_armors), len(self.active_armors)), dtype=float) + cost_matrix = np.zeros((len(new_armors), len(self.active_armors)), dtype=float) for i in range(len(new_armors)): for j in range(len(self.active_armors)): cost_ij = new_armors[i].compute_cost(self.active_armors[j]) cost_matrix[i, j] = cost_ij - row_ind, col_ind = scipy.optimize.linear_sum_assignment( - cost_matrix) + + row_ind, col_ind = scipy.optimize.linear_sum_assignment(cost_matrix) for i, j in zip(row_ind, col_ind): if cost_matrix[i, j] < self.SE_THRESHOLD: assert new_armors[i] is not None - self.active_armors[j].update( - new_armors[i], self.frame_tick) + self.active_armors[j].update(new_armors[i], self.frame_tick) new_armors[i] = None new_armors = [i for i in new_armors if i is not None] @@ -165,13 +164,13 @@ def process_one(self, pred_list, enemy_team, rgb_img): self.active_armors.append(a) # Create a list of bbox and unique IDs to return - ret_bbox_list = [] + ret_distance_angle_list = [] ret_id_list = [] for a in self.active_armors: # If an armor is observed, directly use the bbox - ret_bbox_list.append(a.predict_bbox(self.frame_tick)) + ret_distance_angle_list.append(a.predict_distance_angle(self.frame_tick)) ret_id_list.append(a.armor_id) self.frame_tick += 1 - return ret_bbox_list, ret_id_list + return ret_distance_angle_list, ret_id_list diff --git a/vision.py b/vision.py index fb28494..c5fee47 100644 --- a/vision.py +++ b/vision.py @@ -70,23 +70,23 @@ def main(): # Pour all predictions into the aimer, which returns relative angles ret_dict = aimer.process_one(pred, enemy_team, frame, stm32_state_dict) - if config.DEBUG_DISPLAY: - viz_frame = frame.copy() - if ret_dict: - for i in range(len(ret_dict['final_bbox_list'])): - bbox = ret_dict['final_bbox_list'][i] - unique_id = ret_dict['final_id_list'][i] - lower_x = int(bbox[0] - bbox[2] / 2) - lower_y = int(bbox[1] - bbox[3] / 2) - upper_x = int(bbox[0] + bbox[2] / 2) - upper_y = int(bbox[1] + bbox[3] / 2) - viz_frame = cv2.rectangle( - viz_frame, (lower_x, lower_y), (upper_x, upper_y), (0, 255, 0), 2) - viz_frame = cv2.putText(viz_frame, str(unique_id), (lower_x, lower_y), - cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2) - cv2.imshow('filtered_detected', viz_frame) - if cv2.waitKey(1) & 0xFF == ord('q'): - exit(0) + # if config.DEBUG_DISPLAY: + # viz_frame = frame.copy() + # if ret_dict: + # for i in range(len(ret_dict['final_bbox_list'])): + # bbox = ret_dict['final_bbox_list'][i] + # unique_id = ret_dict['final_id_list'][i] + # lower_x = int(bbox[0] - bbox[2] / 2) + # lower_y = int(bbox[1] - bbox[3] / 2) + # upper_x = int(bbox[0] + bbox[2] / 2) + # upper_y = int(bbox[1] + bbox[3] / 2) + # viz_frame = cv2.rectangle( + # viz_frame, (lower_x, lower_y), (upper_x, upper_y), (0, 255, 0), 2) + # viz_frame = cv2.putText(viz_frame, str(unique_id), (lower_x, lower_y), + # cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 255, 0), 2) + # cv2.imshow('filtered_detected', viz_frame) + # if cv2.waitKey(1) & 0xFF == ord('q'): + # exit(0) # TODO: put this into debug display show_frame = frame.copy() @@ -96,9 +96,17 @@ def main(): print("Target abs Yaw angle: ", ret_dict['abs_yaw']) communicator.process_one_packet( config.MOVE_YOKE, ret_dict['abs_yaw'], ret_dict['abs_pitch']) + # Reverse compute center_x and center_y from yaw angle + yaw_diff = ret_dict['abs_yaw'] - stm32_state_dict['cur_yaw'] + pitch_diff = ret_dict['abs_pitch'] - stm32_state_dict['cur_pitch'] + target_x = -yaw_diff / (config.AUTOAIM_CAMERA.YAW_FOV_HALF / + config.IMG_CENTER_X) + config.IMG_CENTER_X + target_y = pitch_diff / (config.AUTOAIM_CAMERA.PITCH_FOV_HALF / + config.IMG_CENTER_Y) + config.IMG_CENTER_Y + # import pdb; pdb.set_trace() show_frame = cv2.circle(show_frame, - (int(ret_dict['center_x']), - int(ret_dict['center_y'])), + (int(target_x), + int(target_y)), 10, (0, 255, 0), 10) else: show_frame = cv2.putText(show_frame, 'NOT FOUND', (50, 50), From 345c1c21a2719e35322103200610a23f2c52a8f3 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 11 Jun 2023 21:40:56 -0500 Subject: [PATCH 70/74] [chore] some linting --- Camera/mdvs.py | 10 +++------- Utils/__init__.py | 1 + 2 files changed, 4 insertions(+), 7 deletions(-) diff --git a/Camera/mdvs.py b/Camera/mdvs.py index 2fa4a64..766e014 100644 --- a/Camera/mdvs.py +++ b/Camera/mdvs.py @@ -105,8 +105,7 @@ def get_frame(self): # Grab one frame from camera try: pRawData, FrameHead = mvsdk.CameraGetImageBuffer(self.cam, 200) - mvsdk.CameraImageProcess( - self.cam, pRawData, self.frame_buffer, FrameHead) + mvsdk.CameraImageProcess(self.cam, pRawData, self.frame_buffer, FrameHead) mvsdk.CameraReleaseImageBuffer(self.cam, pRawData) # At this point the frame is stored in pFrameBuffer. @@ -124,16 +123,13 @@ def get_frame(self): FrameHead.iWidth, 1 if FrameHead.uiMediaType == mvsdk.CAMERA_MEDIA_TYPE_MONO8 else 3)) - frame = cv2.resize( - frame, (self.width, self.height), interpolation=cv2.INTER_LINEAR) + frame = cv2.resize(frame, (self.width, self.height), interpolation=cv2.INTER_LINEAR) if self.cfg.ROTATE_180: frame = cv2.rotate(frame, cv2.ROTATE_180) return frame except mvsdk.CameraException as e: if e.error_code != mvsdk.CAMERA_STATUS_TIME_OUT: - print( - "CameraGetImageBuffer failed({}): {}".format( - e.error_code, e.message)) + print("CameraGetImageBuffer failed({}): {}".format(e.error_code, e.message)) def __del__(self): """Clean up MDVS driver connection and buffer.""" diff --git a/Utils/__init__.py b/Utils/__init__.py index 800b647..2a20bc7 100644 --- a/Utils/__init__.py +++ b/Utils/__init__.py @@ -116,6 +116,7 @@ def get_intrinsic_matrix(cfg): return K + def get_radian_diff(angle1, angle2): """Compute the abs difference between two angles in radians. From 124c57b12f66e4ebbf50641d4cccbfc9f15dd2c3 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Sun, 11 Jun 2023 21:41:19 -0500 Subject: [PATCH 71/74] [feat] Add simple distance estimation using PnP solver --- Aiming/DistEst/__init__.py | 3 ++ Aiming/DistEst/pnp_solver.py | 53 ++++++++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+) create mode 100644 Aiming/DistEst/__init__.py create mode 100644 Aiming/DistEst/pnp_solver.py diff --git a/Aiming/DistEst/__init__.py b/Aiming/DistEst/__init__.py new file mode 100644 index 0000000..7251d9d --- /dev/null +++ b/Aiming/DistEst/__init__.py @@ -0,0 +1,3 @@ +"""Aggregate all distance estimators into one module.""" + +from .pnp_solver import pnp_estimator diff --git a/Aiming/DistEst/pnp_solver.py b/Aiming/DistEst/pnp_solver.py new file mode 100644 index 0000000..e65b5a1 --- /dev/null +++ b/Aiming/DistEst/pnp_solver.py @@ -0,0 +1,53 @@ +"""Simple distance solver using PnP.""" +import numpy as np +import cv2 +import Utils + + +class pnp_estimator: + """Distance estimator using PnP.""" + + def __init__(self, cfg): + """Initialize the PnP solver. + + Args: + cfg (object): python config node object + """ + self.cfg = cfg + self.K = Utils.get_intrinsic_matrix(self.cfg) + + # 3D armor board coordinates centered at armor board center + # The unit is in mm (millimeter) + self.armor_3d_pts = np.array([ + [-65, -125 / 4, 0], # left top + [-65, 125 / 4, 0], + [65, -125 / 4, 0], + [65, 125 / 4, 0] + ]).reshape((4, 3, 1)) + + def estimate_distance(self, armor, img_rgb): + """Estimate the distance to the armor. + + Args: + armor (armor): selected armor object + img_rgb (np.array): RGB image of the frame + + Returns: + (y_dist, z_dist): distance along y-axis (pitch) and z-axis (distance from camera) + """ + obj_2d_pts = np.array([ + armor.left_light.top.astype(int), + armor.left_light.btm.astype(int), + armor.right_light.top.astype(int), + armor.right_light.btm.astype(int), + ]).reshape((4, 2, 1)) + + retval, rvec, tvec = cv2.solvePnP(self.armor_3d_pts.astype(float), + obj_2d_pts.astype(float), + self.K, + distCoeffs=None) + + # rot_mat, _ = cv2.Rodrigues(rvec) + # abs_dist = np.sqrt(np.sum(tvec**2)) + + return -tvec[1], tvec[2] From 2766e23d208b7df8e5875be58e62867041eb1bf5 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Mon, 12 Jun 2023 23:33:09 -0500 Subject: [PATCH 72/74] [feat] add trajectory modeling prototype to offset pitch for gravity --- Aiming/Aim.py | 12 ++++++--- Aiming/DistEst/pnp_solver.py | 5 ++-- Aiming/TrajModel/__init__.py | 6 +++++ Aiming/TrajModel/gravity.py | 49 ++++++++++++++++++++++++++++++++++++ config.py | 22 ++++++++++------ 5 files changed, 81 insertions(+), 13 deletions(-) create mode 100644 Aiming/TrajModel/__init__.py create mode 100644 Aiming/TrajModel/gravity.py diff --git a/Aiming/Aim.py b/Aiming/Aim.py index 44113a8..f31f5b8 100644 --- a/Aiming/Aim.py +++ b/Aiming/Aim.py @@ -1,6 +1,7 @@ """Hosts the Aim class, which is the main class for auto-aiming.""" from .Tracking import basic_tracker from .DistEst import pnp_estimator +from .TrajModel import * import numpy as np import Utils @@ -143,9 +144,14 @@ def posterior_calibration(self, raw_pitch, raw_yaw, target_y_dist, target_z_dist Returns: (float, float): calibrated yaw_diff, pitch_diff in radians """ - # In this case, distance comes from D455 stereo estimation - # TODO: compute a range table - return (raw_pitch, raw_yaw) + target_pitch = raw_pitch + target_yaw = raw_yaw + + # Gravity calibration + pitch_diff = calibrate_pitch_gravity(target_z_dist, target_y_dist) + target_pitch -= pitch_diff + + return (target_pitch, target_yaw) def get_rotation_angle(self, bbox_center_x, bbox_center_y): """Given a bounding box center, return the yaw/pitch difference in radians. diff --git a/Aiming/DistEst/pnp_solver.py b/Aiming/DistEst/pnp_solver.py index e65b5a1..69fade8 100644 --- a/Aiming/DistEst/pnp_solver.py +++ b/Aiming/DistEst/pnp_solver.py @@ -48,6 +48,7 @@ def estimate_distance(self, armor, img_rgb): distCoeffs=None) # rot_mat, _ = cv2.Rodrigues(rvec) - # abs_dist = np.sqrt(np.sum(tvec**2)) + abs_dist = np.sqrt(np.sum(tvec**2)) - return -tvec[1], tvec[2] + # return -tvec[1], tvec[2] + return 0, abs_dist / 1000.0 diff --git a/Aiming/TrajModel/__init__.py b/Aiming/TrajModel/__init__.py new file mode 100644 index 0000000..a24a3e6 --- /dev/null +++ b/Aiming/TrajModel/__init__.py @@ -0,0 +1,6 @@ +"""Aggregate all the trajectory modeling methods into one module. + +For a complete explanation of each module, please refer to the README.md. +""" + +from .gravity import calibrate_pitch_gravity diff --git a/Aiming/TrajModel/gravity.py b/Aiming/TrajModel/gravity.py new file mode 100644 index 0000000..294a891 --- /dev/null +++ b/Aiming/TrajModel/gravity.py @@ -0,0 +1,49 @@ +"""Calibrate pitch offset to account for gravity.""" +import numpy as np +import numdifftools + +def calibrate_pitch_gravity(cfg, x1, y1): + """Calibrate pitch offset to account for gravity and air resistance. + + Args: + cfg (python object): config.py config node object + x1 (float): horizontal coordinate in hypothetical landing spot + y1 (float): vertical coordinate in hypothetical landing spot + + Returns: + pitch_offset (float): how much to offset pitch angle + """ + g = cfg.GRAVITY_CONSTANT + v = cfg.INITIAL_BULLET_SPEED + # define the function + # TODO(roger): solving this equation assumes no air resistance + + # You can get where this function comes from by asking GPT4 with the following prompt: + + # Consider a 2D coordinate frame where a cannon is at the origin point (0, 0). + # Given the initial velocity of a cannon ball and its landing location, (x1, y1), + # compute the pitch angle of the cannon. + def f(theta): + return x1 * np.tan(theta) - (g * x1**2) / (2 * v**2 * np.cos(theta)**2) - y1 + + # define the derivative of the function + # def df(theta): + # return derivative(f, theta, dx=1e-6) + df = numdifftools.Derivative(f) + + # initial guess + theta = 0 + + # Newton's method + success_flag = False + for i in range(100): + theta_new = theta - f(theta) / df(theta) + if abs(theta_new - theta) < 1e-6: + success_flag = True + break + theta = theta_new + + if success_flag: + return theta + else: + return 0 diff --git a/config.py b/config.py index ce362bc..c8537db 100644 --- a/config.py +++ b/config.py @@ -1,16 +1,11 @@ """Config file that is shared across the whole project.""" -# communication utils +# ========== Camera ========== from Camera.read_from_file import fake_camera from Camera.simple_cv import simple_cv_camera -SEARCH_TARGET = b'ST' -MOVE_YOKE = b'MY' -PACK_END = b'ED' - # Define camera # from Camera.d455 import D455_camera # from Camera.mdvs import mdvs_camera - AUTOAIM_CAMERA = fake_camera # This param needs to be tuned per arena / camera setup @@ -23,8 +18,19 @@ IMG_CENTER_X = IMG_WIDTH // 2 IMG_CENTER_Y = IMG_HEIGHT // 2 +ROTATE_180 = True # Camera is mounted upside down + +# ========== Trajectory Modeling ========== +GRAVITY_CONSTANT = 9.81 # acceleration due to gravity +INITIAL_BULLET_SPEED = 15.0 # empirically measured + +# ========== Communication ========== +SEARCH_TARGET = b'ST' +MOVE_YOKE = b'MY' +PACK_END = b'ED' + +# ========== DEBUGGING ========== + DEBUG_DISPLAY = True DEBUG_PRINT = False DEFAULT_ENEMY_TEAM = 'red' - -ROTATE_180 = True # Camera is mounted upside down From 59eb4725fcf61a460edf30851b5946b20de32af2 Mon Sep 17 00:00:00 2001 From: RogerQi Date: Tue, 13 Jun 2023 03:58:37 -0500 Subject: [PATCH 73/74] [chore] fix linting --- Aiming/TrajModel/gravity.py | 1 + 1 file changed, 1 insertion(+) diff --git a/Aiming/TrajModel/gravity.py b/Aiming/TrajModel/gravity.py index 294a891..e73bb4b 100644 --- a/Aiming/TrajModel/gravity.py +++ b/Aiming/TrajModel/gravity.py @@ -2,6 +2,7 @@ import numpy as np import numdifftools + def calibrate_pitch_gravity(cfg, x1, y1): """Calibrate pitch offset to account for gravity and air resistance. From 9696c67a587bebbaf7888cb0580529bb2651202f Mon Sep 17 00:00:00 2001 From: RogerQi Date: Thu, 15 Jun 2023 03:59:19 -0500 Subject: [PATCH 74/74] [chore] update API --- Aiming/Aim.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Aiming/Aim.py b/Aiming/Aim.py index f31f5b8..fdc3ee0 100644 --- a/Aiming/Aim.py +++ b/Aiming/Aim.py @@ -148,7 +148,7 @@ def posterior_calibration(self, raw_pitch, raw_yaw, target_y_dist, target_z_dist target_yaw = raw_yaw # Gravity calibration - pitch_diff = calibrate_pitch_gravity(target_z_dist, target_y_dist) + pitch_diff = calibrate_pitch_gravity(self.CFG, target_z_dist, target_y_dist) target_pitch -= pitch_diff return (target_pitch, target_yaw)