diff --git a/mycobot_280/mycobot_280/CMakeLists.txt b/mycobot_280/mycobot_280/CMakeLists.txt index 4998774a..619e25b1 100644 --- a/mycobot_280/mycobot_280/CMakeLists.txt +++ b/mycobot_280/mycobot_280/CMakeLists.txt @@ -30,6 +30,7 @@ catkin_install_python(PROGRAMS scripts/follow_display_gripper.py scripts/slider_control_gripper.py scripts/listen_real_gripper.py + scripts/detect_stag.py DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} ) diff --git a/mycobot_280/mycobot_280/camera_calibration/EyesInHand_matrix.json b/mycobot_280/mycobot_280/camera_calibration/EyesInHand_matrix.json new file mode 100644 index 00000000..39c9e29d --- /dev/null +++ b/mycobot_280/mycobot_280/camera_calibration/EyesInHand_matrix.json @@ -0,0 +1 @@ +[[-0.996091560743614, 0.02359232376557422, -0.0851175943897141, -22.980358398950056], [-0.04045681823508953, -0.9785014715102557, 0.20223282649104501, -7.126480805661541], [-0.07851654904314409, 0.20488599881789024, 0.9756315283009006, 30.736245192702807], [0.0, 0.0, 0.0, 1.0]] \ No newline at end of file diff --git a/mycobot_280/mycobot_280/camera_calibration/camera_calibration.py b/mycobot_280/mycobot_280/camera_calibration/camera_calibration.py new file mode 100644 index 00000000..3f37039c --- /dev/null +++ b/mycobot_280/mycobot_280/camera_calibration/camera_calibration.py @@ -0,0 +1,197 @@ +import cv2 +from uvc_camera import UVCCamera +import stag +import numpy as np +import json +import time +import os +from scipy.linalg import svd +from pymycobot import * +from marker_utils import * +import shutil +import glob + + +ports = glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyACM*') +print(ports) +if ports: + arm_port = ports[0] +else: + raise Exception("No MyCobot device found") + + +mc = MyCobot(port=arm_port) # 设置端口 + +np.set_printoptions(suppress=True, formatter={'float_kind': '{:.2f}'.format}) + + + +class camera_detect: + #Camera parameter initialize + def __init__(self, camera_id, marker_size, mtx, dist): + self.camera_id = camera_id + self.mtx = mtx + self.dist = dist + self.marker_size = marker_size + self.camera = UVCCamera(self.camera_id, self.mtx, self.dist) + self.camera_open() + + self.origin_mycbot_horizontal = [0,60,-60,0,0,0] + self.origin_mycbot_level = [0, 5, -104, 14, 0, 90] + + # Initialize EyesInHand_matrix to None or load from a document if available + self.EyesInHand_matrix = None + file_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + self.matrix_file_path = os.path.join(file_dir, "config","EyesInHand_matrix.json") + + self.load_matrix() + + def save_matrix(self, filename="EyesInHand_matrix.json"): + # Save the EyesInHand_matrix to a JSON file + if self.EyesInHand_matrix is not None: + with open(filename, 'w') as f: + json.dump(self.EyesInHand_matrix.tolist(), f) + + try: + # 复制文件到目标路径 + shutil.copy(filename, self.matrix_file_path) + print(f"File copied to {self.matrix_file_path}") + except IOError as e: + print(f"Failed to copy file: {e}") + + def load_matrix(self, filename="EyesInHand_matrix.json"): + # Load the EyesInHand_matrix from a JSON file, if it exists + try: + with open(filename, 'r') as f: + self.EyesInHand_matrix = np.array(json.load(f)) + except FileNotFoundError: + print("Matrix file not found. EyesInHand_matrix will be initialized later.") + + def wait(self): + time.sleep(0.5) + while(mc.is_moving() == 1): + time.sleep(0.2) + + def camera_open(self): + self.camera.capture() # 打开摄像头 + + # 获取物体坐标(相机系) + def calc_markers_base_position(self, corners, ids): + if len(corners) == 0: + return [] + rvecs, tvecs = solve_marker_pnp(corners, self.marker_size, self.mtx, self.dist) # 通过二维码角点获取物体旋转向量和平移向量 + for i, tvec, rvec in zip(ids, tvecs, rvecs): + tvec = tvec.squeeze().tolist() + rvec = rvec.squeeze().tolist() + rotvector = np.array([[rvec[0], rvec[1], rvec[2]]]) + Rotation = cv2.Rodrigues(rotvector)[0] # 将旋转向量转为旋转矩阵 + Euler = self.CvtRotationMatrixToEulerAngle(Rotation) # 将旋转矩阵转为欧拉角 + target_coords = np.array([tvec[0], tvec[1], tvec[2], Euler[0], Euler[1], Euler[2]]) # 物体坐标(相机系) + return target_coords + + + def eyes_in_hand_calculate(self, pose, tbe1, Mc1, tbe2, Mc2, tbe3, Mc3, Mr): + + tbe1, Mc1, tbe2, Mc2, tbe3, Mc3, Mr = map(np.array, [tbe1, Mc1, tbe2, Mc2, tbe3, Mc3, Mr]) + # Convert pose from degrees to radians + euler = np.array(pose) * np.pi / 180 + Rbe = self.CvtEulerAngleToRotationMatrix(euler) + print("Rbe", Rbe) + Reb = Rbe.T + + A = np.hstack([(Mc2 - Mc1).reshape(-1, 1), + (Mc3 - Mc1).reshape(-1, 1), + (Mc3 - Mc2).reshape(-1, 1)]) + + b = Reb @ np.hstack([(tbe1 - tbe2).reshape(-1, 1), + (tbe1 - tbe3).reshape(-1, 1), + (tbe2 - tbe3).reshape(-1, 1)]) + + print("A = ", A) + print("B = ", b) + U, S, Vt = svd(A @ b.T) + Rce = Vt.T @ U.T + + tce = Reb @ (Mr - (1/3)*(tbe1 + tbe2 + tbe3) - (1/3)*(Rbe @ Rce @ (Mc1 + Mc2 + Mc3))) + + eyes_in_hand_matrix = np.vstack([np.hstack([Rce, tce.reshape(-1, 1)]), np.array([0, 0, 0, 1])]) + + return eyes_in_hand_matrix + + # 读取Camera坐标(单次) + def stag_identify(self): + self.camera.update_frame() # 刷新相机界面 + frame = self.camera.color_frame() # 获取当前帧 + (corners, ids, rejected_corners) = stag.detectMarkers(frame, 11) # 获取画面中二维码的角度和id + # 绘制检测到的标记及其ID + stag.drawDetectedMarkers(frame, corners, ids) + # 绘制被拒绝的候选区域,颜色设为红色 + stag.drawDetectedMarkers(frame, rejected_corners, border_color=(255, 0, 0)) + marker_pos_pack = self.calc_markers_base_position(corners, ids) # 获取物的坐标(相机系) + if(len(marker_pos_pack) == 0): + marker_pos_pack = self.stag_identify() + # print("Camera coords = ", marker_pos_pack) + # cv2.imshow("rrrr", frame) + # cv2.waitKey(1) + return marker_pos_pack + + def Eyes_in_hand_calibration(self, ml): + ml.send_angles(self.origin_mycbot_level, 50) # 移动到观测点 + self.wait() + input("make sure camera can observe the stag, enter any key quit") + coords = ml.get_coords() + pose = coords[3:6] + print(pose) + # self.camera_open_loop() + Mc1,tbe1 = self.reg_get(ml) + ml.send_coord(1, coords[0] + 30, 30) + self.wait() + Mc2,tbe2 = self.reg_get(ml) + ml.send_coord(1, coords[0] - 10, 30) + self.wait() + ml.send_coord(3, coords[2] + 20, 30) + self.wait() + Mc3,tbe3 = self.reg_get(ml) + + input("Move the end of the robot arm to the calibration point, press any key to release servo") + ml.release_all_servos() + input("focus servo and get current coords") + ml.power_on() + time.sleep(1) + coords = ml.get_coords() + while len(coords) == 0: + coords = ml.get_coords() + Mr = coords[0:3] + print(Mr) + + self.EyesInHand_matrix = self.eyes_in_hand_calculate(pose, tbe1, Mc1, tbe2, Mc2, tbe3, Mc3, Mr) + print("EyesInHand_matrix = ", self.EyesInHand_matrix) + self.save_matrix() # Save the matrix to a file after calculating it + print("save successe") + + def reg_get(self, ml): + for i in range(50): + Mc_all = self.stag_identify() + tbe_all = ml.get_coords() # 获取机械臂当前坐标 + while (tbe_all is None): + tbe_all = ml.get_coords() + + tbe = tbe_all[0:3] + Mc = Mc_all[0:3] + print("tbe = ", tbe) + print("Mc = ", Mc) + return Mc,tbe + + +if __name__ == "__main__": + if mc.is_power_on()==0: + mc.power_on() + camera_params = np.load("camera_params.npz") # 相机配置文件 + mtx, dist = camera_params["mtx"], camera_params["dist"] + m = camera_detect(0, 32, mtx, dist) + tool_len = 20 + mc.set_tool_reference([0, 0, tool_len, 0, 0, 0]) + mc.set_end_type(1) + + m.Eyes_in_hand_calibration(mc) #手眼标定 + diff --git a/mycobot_280/mycobot_280/camera_calibration/camera_params.npz b/mycobot_280/mycobot_280/camera_calibration/camera_params.npz new file mode 100644 index 00000000..70b136d6 Binary files /dev/null and b/mycobot_280/mycobot_280/camera_calibration/camera_params.npz differ diff --git a/mycobot_280/mycobot_280/camera_calibration/marker_utils.py b/mycobot_280/mycobot_280/camera_calibration/marker_utils.py new file mode 100644 index 00000000..21c325bc --- /dev/null +++ b/mycobot_280/mycobot_280/camera_calibration/marker_utils.py @@ -0,0 +1,59 @@ +import cv2 +import numpy as np +import typing as T +from numpy.typing import NDArray, ArrayLike + + +class MarkerInfo(T.TypedDict): + corners: np.ndarray + tvec: np.ndarray + rvec: np.ndarray + num_id: int + + +def solve_marker_pnp(corners: NDArray, marker_size: int, mtx: NDArray, dist: NDArray): + """ + This will estimate the rvec and tvec for each of the marker corners detected by: + corners, ids, rejectedImgPoints = detector.detectMarkers(image) + corners - is an array of detected corners for each detected marker in the image + marker_size - is the size of the detected markers + mtx - is the camera matrix + distortion - is the camera distortion matrix + RETURN list of rvecs, tvecs, and trash (so that it corresponds to the old estimatePoseSingleMarkers()) + """ + marker_points = np.array( + [ + [-marker_size / 2, marker_size / 2, 0], + [marker_size / 2, marker_size / 2, 0], + [marker_size / 2, -marker_size / 2, 0], + [-marker_size / 2, -marker_size / 2, 0], + ], + dtype=np.float32, + ) + rvecs = [] + tvecs = [] + for corner in corners: + retval, rvec, tvec = cv2.solvePnP( + marker_points, + corner, + mtx, + dist, + flags=cv2.SOLVEPNP_IPPE_SQUARE, + ) + if retval: + rvecs.append(rvec) + tvecs.append(tvec) + + rvecs = np.array(rvecs) # type: ignore + tvecs = np.array(tvecs) # type: ignore + (rvecs - tvecs).any() # type: ignore + return rvecs, tvecs + + +def draw_marker(frame: np.ndarray, corners, tvecs, rvecs, ids, mtx, dist) -> None: + # cv2.aruco.drawDetectedMarkers(frame, corners, None, borderColor=(0, 255, 0)) + cv2.aruco.drawDetectedMarkers(frame, corners, ids, borderColor=(0, 200, 200)) + for i in range(len(ids)): + corner, tvec, rvec, marker_id = corners[i], tvecs[i], rvecs[i], ids[i] + cv2.drawFrameAxes(frame, mtx, dist, rvec, tvec, 60, 2) + diff --git a/mycobot_280/mycobot_280/camera_calibration/readme.docx b/mycobot_280/mycobot_280/camera_calibration/readme.docx new file mode 100644 index 00000000..841b6913 Binary files /dev/null and b/mycobot_280/mycobot_280/camera_calibration/readme.docx differ diff --git a/mycobot_280/mycobot_280/camera_calibration/stag.png b/mycobot_280/mycobot_280/camera_calibration/stag.png new file mode 100644 index 00000000..6745a0aa Binary files /dev/null and b/mycobot_280/mycobot_280/camera_calibration/stag.png differ diff --git a/mycobot_280/mycobot_280/camera_calibration/uvc_camera.py b/mycobot_280/mycobot_280/camera_calibration/uvc_camera.py new file mode 100644 index 00000000..70fe0683 --- /dev/null +++ b/mycobot_280/mycobot_280/camera_calibration/uvc_camera.py @@ -0,0 +1,55 @@ +import cv2 +import numpy as np +import time +import typing + + +class UVCCamera: + def __init__( + self, + cam_index=0, + mtx=None, + dist=None, + capture_size: typing.Tuple[int, int] = (640, 480), + ): + super().__init__() + self.cam_index = cam_index + self.mtx = mtx + self.dist = dist + self.curr_color_frame: typing.Union[np.ndarray, None] = None + self.capture_size = capture_size + + def capture(self): + self.cap = cv2.VideoCapture(self.cam_index) #windows + width, height = self.capture_size + self.cap.set(cv2.CAP_PROP_FRAME_WIDTH, width) + self.cap.set(cv2.CAP_PROP_FRAME_HEIGHT, height) + + def update_frame(self) -> bool: + ret, self.curr_color_frame = self.cap.read() + return ret + + def color_frame(self) -> typing.Union[np.ndarray, None]: + return self.curr_color_frame + + def release(self): + self.cap.release() + + +if __name__ == "__main__": + cam = UVCCamera(2) + cam.capture() + while True: + if not cam.update_frame(): + continue + + frame = cam.color_frame() + if frame is None: + time.sleep(0.01) + continue + + print(frame.shape) + window_name = "preview" + cv2.imshow(window_name, frame) + if cv2.waitKey(1) == ord("q"): + break diff --git a/mycobot_280/mycobot_280/config/EyesInHand_matrix.json b/mycobot_280/mycobot_280/config/EyesInHand_matrix.json new file mode 100644 index 00000000..39c9e29d --- /dev/null +++ b/mycobot_280/mycobot_280/config/EyesInHand_matrix.json @@ -0,0 +1 @@ +[[-0.996091560743614, 0.02359232376557422, -0.0851175943897141, -22.980358398950056], [-0.04045681823508953, -0.9785014715102557, 0.20223282649104501, -7.126480805661541], [-0.07851654904314409, 0.20488599881789024, 0.9756315283009006, 30.736245192702807], [0.0, 0.0, 0.0, 1.0]] \ No newline at end of file diff --git a/mycobot_280/mycobot_280/config/camera_params.npz b/mycobot_280/mycobot_280/config/camera_params.npz new file mode 100644 index 00000000..70b136d6 Binary files /dev/null and b/mycobot_280/mycobot_280/config/camera_params.npz differ diff --git a/mycobot_280/mycobot_280/config/mycobot_with_marker.rviz b/mycobot_280/mycobot_280/config/mycobot_with_marker.rviz index 8b0dc7f4..903ce355 100644 --- a/mycobot_280/mycobot_280/config/mycobot_with_marker.rviz +++ b/mycobot_280/mycobot_280/config/mycobot_with_marker.rviz @@ -11,7 +11,7 @@ Panels: - /Marker1 - /Marker1/Namespaces1 Splitter Ratio: 0.5 - Tree Height: 775 + Tree Height: 609 - Class: rviz/Selection Name: Selection - Class: rviz/Tool Properties @@ -20,17 +20,18 @@ Panels: - /2D Nav Goal1 - /Publish Point1 Name: Tool Properties - Splitter Ratio: 0.588679016 + Splitter Ratio: 0.5886790156364441 - Class: rviz/Views Expanded: - /Current View1 Name: Views Splitter Ratio: 0.5 - Class: rviz/Time - Experimental: false Name: Time SyncMode: 0 SyncSource: "" +Preferences: + PromptSaveOnExit: true Toolbars: toolButtonStyle: 2 Visualization Manager: @@ -42,7 +43,7 @@ Visualization Manager: Color: 160; 160; 164 Enabled: true Line Style: - Line Width: 0.0299999993 + Line Width: 0.029999999329447746 Value: Lines Name: Grid Normal Cell Count: 0 @@ -64,6 +65,11 @@ Visualization Manager: Expand Link Details: false Expand Tree: false Link Tree Style: Links in Alphabetic Order + g_base: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true joint1: Alpha: 1 Show Axes: false @@ -107,10 +113,12 @@ Visualization Manager: Visual Enabled: true - Class: rviz/TF Enabled: true + Filter (blacklist): "" + Filter (whitelist): "" Frame Timeout: 15 Frames: All Enabled: true - basic_shapes: + g_base: Value: true joint1: Value: true @@ -126,29 +134,30 @@ Visualization Manager: Value: true joint6_flange: Value: true - Marker Scale: 0.300000012 + Marker Alpha: 1 + Marker Scale: 0.30000001192092896 Name: TF Show Arrows: true Show Axes: true Show Names: true Tree: - joint1: - joint2: - joint3: - joint4: - joint5: - joint6: - joint6_flange: - basic_shapes: + g_base: + joint1: + joint2: + joint3: + joint4: + joint5: + joint6: + joint6_flange: {} Update Interval: 0 Value: true - Class: rviz/Marker Enabled: true - Marker Topic: /visualization_marker + Marker Topic: cube Name: Marker Namespaces: - basic_cube: true + {} Queue Size: 100 Value: true Enabled: true @@ -166,7 +175,10 @@ Visualization Manager: - Class: rviz/FocusCamera - Class: rviz/Measure - Class: rviz/SetInitialPose + Theta std deviation: 0.2617993950843811 Topic: /initialpose + X std deviation: 0.5 + Y std deviation: 0.5 - Class: rviz/SetGoal Topic: /move_base_simple/goal - Class: rviz/PublishPoint @@ -176,33 +188,33 @@ Visualization Manager: Views: Current: Class: rviz/Orbit - Distance: 2.11990476 + Distance: 1.3179463148117065 Enable Stereo Rendering: - Stereo Eye Separation: 0.0599999987 + Stereo Eye Separation: 0.05999999865889549 Stereo Focal Distance: 1 Swap Stereo Eyes: false Value: false + Field of View: 0.7853981852531433 Focal Point: - X: -0.0706475973 - Y: -0.0814988762 - Z: 0.107583851 + X: -0.07064759731292725 + Y: -0.0814988762140274 + Z: 0.10758385062217712 Focal Shape Fixed Size: true - Focal Shape Size: 0.0500000007 + Focal Shape Size: 0.05000000074505806 Invert Z Axis: false Name: Current View - Near Clip Distance: 0.00999999978 - Pitch: 0.375398338 + Near Clip Distance: 0.009999999776482582 + Pitch: 0.35539817810058594 Target Frame: - Value: Orbit (rviz) - Yaw: 0.235389769 + Yaw: 4.873575210571289 Saved: ~ Window Geometry: Displays: collapsed: false - Height: 1056 + Height: 906 Hide Left Dock: false Hide Right Dock: false - QMainWindow State: 000000ff00000000fd00000004000000000000016a00000396fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000006100fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000002800000396000000d700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000022f00000396fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000002800000396000000ad00fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073f0000003efc0100000002fb0000000800540069006d006501000000000000073f0000030000fffffffb0000000800540069006d006501000000000000045000000000000000000000039a0000039600000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + QMainWindow State: 000000ff00000000fd00000004000000000000016a000002ecfc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003d000002ec000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000022f000002ecfc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000003d000002ec000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000007380000003efc0100000002fb0000000800540069006d0065010000000000000738000003bc00fffffffb0000000800540069006d0065010000000000000450000000000000000000000393000002ec00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 Selection: collapsed: false Time: @@ -211,6 +223,6 @@ Window Geometry: collapsed: false Views: collapsed: false - Width: 1855 - X: 65 - Y: 24 + Width: 1848 + X: 72 + Y: 27 diff --git a/mycobot_280/mycobot_280/launch/detect_marker_with_topic.launch b/mycobot_280/mycobot_280/launch/detect_marker_with_topic.launch new file mode 100644 index 00000000..e3f9397d --- /dev/null +++ b/mycobot_280/mycobot_280/launch/detect_marker_with_topic.launch @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/mycobot_280/mycobot_280/launch/open_camera.launch b/mycobot_280/mycobot_280/launch/open_camera.launch new file mode 100644 index 00000000..b7da7302 --- /dev/null +++ b/mycobot_280/mycobot_280/launch/open_camera.launch @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/mycobot_280/mycobot_280/scripts/detect_stag.py b/mycobot_280/mycobot_280/scripts/detect_stag.py new file mode 100644 index 00000000..ce4630ee --- /dev/null +++ b/mycobot_280/mycobot_280/scripts/detect_stag.py @@ -0,0 +1,432 @@ +#!/usr/bin/env python + +import rospy +from sensor_msgs.msg import Image +from cv_bridge import CvBridge, CvBridgeError +import cv2 +import numpy as np +from numpy.typing import NDArray, ArrayLike +import stag +import os +import json +import time +import threading +from mycobot_communication.msg import MycobotAngles, MycobotSetAngles, MycobotCoords, MycobotSetCoords, MycobotSetEndType, MycobotSetFreshMode, MycobotSetToolReference +from visualization_msgs.msg import Marker + +np.set_printoptions(suppress=True, formatter={'float_kind': '{:.2f}'.format}) + +class STAGRecognizer: + def __init__(self): + rospy.init_node('stag_recognizer', anonymous=True) + self.bridge = CvBridge() + self.tool_len = 20 + + self.marker_size = 32 + self.origin_mycbot_horizontal = [0,60,-60,0,0,0] + + self.EyesInHand_matrix = None + # 订阅摄像头话题 + self.image_sub = rospy.Subscriber("/usb_cam/image_raw", Image, self.image_callback) + + # 获取config文件目录并设置相机参数文件路径 + file_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + params_file_path = os.path.join(file_dir, "config","camera_params.npz") + matrix_file_path = os.path.join(file_dir, "config","EyesInHand_matrix.json") + self.load_matrix(filename=matrix_file_path) + # 加载相机参数 + try: + camera_params = np.load(params_file_path) + self.mtx, self.dist = camera_params["mtx"], camera_params["dist"] + except FileNotFoundError: + rospy.logerr(f"Camera parameters file not found: {params_file_path}") + raise + + self.current_frame = None + + # 创建发布者,发布机械臂坐标和角度 + self.coords_pub = rospy.Publisher('mycobot/coords_goal', MycobotSetCoords, queue_size=5) + self.angles_pub = rospy.Publisher('mycobot/angles_goal', MycobotSetAngles, queue_size=5) + self.fresh_mode_pub = rospy.Publisher('mycobot/fresh_mode_status', MycobotSetFreshMode, queue_size=5) + self.end_type_pub = rospy.Publisher('mycobot/end_type_status', MycobotSetEndType, queue_size=5) + self.tool_reference_pub = rospy.Publisher('mycobot/tool_reference_goal', MycobotSetToolReference, queue_size=5) + + # 创建订阅者,订阅机械臂的真实坐标和角度 + rospy.Subscriber('mycobot/coords_real', MycobotCoords, self.coords_callback) + + self.current_coords = None + self.current_angles = None + self.lock = threading.Lock() + self.set_tool_reference([0, 0, self.tool_len, 0, 0, 0]) + self.set_end_type(1) + + # init a node and a publisher + # rospy.init_node("marker", anonymous=True) + self.pub = rospy.Publisher('cube', Marker, queue_size=1) + # init a Marker + self.marker = Marker() + self.marker.header.frame_id = "joint1" + self.marker.ns = "cube" + self.marker.type = self.marker.CUBE + self.marker.action = self.marker.ADD + self.marker.scale.x = 0.04 + self.marker.scale.y = 0.04 + self.marker.scale.z = 0.04 + self.marker.color.a = 1.0 + self.marker.color.g = 1.0 + self.marker.color.r = 1.0 + + # marker position initial + self.marker.pose.position.x = 0 + self.marker.pose.position.y = 0 + self.marker.pose.position.z = 0.03 + self.marker.pose.orientation.x = 0 + self.marker.pose.orientation.y = 0 + self.marker.pose.orientation.z = 0 + self.marker.pose.orientation.w = 1.0 + + def load_matrix(self, filename="EyesInHand_matrix.json"): + # Load the EyesInHand_matrix from a JSON file, if it exists + try: + with open(filename, 'r') as f: + self.EyesInHand_matrix = np.array(json.load(f)) + except FileNotFoundError: + print("Matrix file not found. EyesInHand_matrix will be initialized later.") + + # publish marker + def pub_marker(self, x, y, z=0.03): + self.marker.header.stamp = rospy.Time.now() + self.marker.pose.position.x = x + self.marker.pose.position.y = y + self.marker.pose.position.z = z + self.marker.color.g = 0 + self.pub.publish(self.marker) + + def coords_callback(self, data): + # 获取机械臂当前的坐标,并保留小数点后两位 + with self.lock: + self.current_coords = [round(data.x, 2), round(data.y, 2), round(data.z, 2), + round(data.rx, 2), round(data.ry, 2), round(data.rz, 2)] + self.coords_updated = True + # rospy.loginfo(f"Current coords11111: {self.current_coords}") + + def send_angles(self, angles, speed): + msg = MycobotSetAngles() + msg.joint_1, msg.joint_2, msg.joint_3, msg.joint_4, msg.joint_5, msg.joint_6 = angles + msg.speed = speed + self.angles_pub.publish(msg) + + def send_coords(self, coords, speed, model): + # 创建 MycobotSetCoords 消息对象 + msg = MycobotSetCoords() + + # coords 是一个包含 [x, y, z, rx, ry, rz] 的列表 + if len(coords) != 6: + raise ValueError("coords must be a list of 6 elements") + msg.x, msg.y, msg.z, msg.rx, msg.ry, msg.rz = coords + msg.speed = speed + msg.model = model + + # 发布消息 + self.coords_pub.publish(msg) + + + def get_coords(self): + with self.lock: + if self.coords_updated: + self.coords_updated = False + return self.current_coords.copy() + return [] + + def set_fresh_mode(self, mode): + msg = MycobotSetFreshMode() + msg.Status = mode + self.fresh_mode_pub.publish(msg) + + def set_end_type(self, end_type): + msg = MycobotSetEndType() + msg.Status = end_type + self.end_type_pub.publish(msg) + + def set_tool_reference(self, coords): + msg = MycobotSetToolReference() + msg.x, msg.y, msg.z, msg.rx, msg.ry, msg.rz = coords + self.tool_reference_pub.publish(msg) + + + def solve_marker_pnp(self, corners: NDArray, marker_size: int, mtx: NDArray, dist: NDArray): + """ + This will estimate the rvec and tvec for each of the marker corners detected by: + corners, ids, rejectedImgPoints = detector.detectMarkers(image) + corners - is an array of detected corners for each detected marker in the image + marker_size - is the size of the detected markers + mtx - is the camera matrix + distortion - is the camera distortion matrix + RETURN list of rvecs, tvecs, and trash (so that it corresponds to the old estimatePoseSingleMarkers()) + """ + marker_points = np.array( + [ + [-marker_size / 2, marker_size / 2, 0], + [marker_size / 2, marker_size / 2, 0], + [marker_size / 2, -marker_size / 2, 0], + [-marker_size / 2, -marker_size / 2, 0], + ], + dtype=np.float32, + ) + rvecs = [] + tvecs = [] + for corner in corners: + retval, rvec, tvec = cv2.solvePnP( + marker_points, + corner, + mtx, + dist, + flags=cv2.SOLVEPNP_IPPE_SQUARE, + ) + if retval: + rvecs.append(rvec) + tvecs.append(tvec) + + rvecs = np.array(rvecs) # type: ignore + tvecs = np.array(tvecs) # type: ignore + (rvecs - tvecs).any() # type: ignore + return rvecs, tvecs + + def image_callback(self, data): + try: + # 将 ROS 图像消息转换为 OpenCV 格式 + cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8") + self.current_frame = cv_image + except CvBridgeError as e: + rospy.logerr(e) + return + + # 应用相机校正 + # frame_undistorted = cv2.undistort(cv_image, self.mtx, self.dist, None, self.mtx) + + # 检测 STAG 标记 + corners, ids, rejected_corners = stag.detectMarkers(cv_image, 11) + # 绘制检测到的标记及其ID + stag.drawDetectedMarkers(cv_image, corners, ids) + # 绘制被拒绝的候选区域,颜色设为红色 + stag.drawDetectedMarkers(cv_image, rejected_corners, border_color=(255, 0, 0)) + + # cv2.imshow("STAG Detection", cv_image) + # cv2.waitKey(1) + + def calc_markers_base_position(self, corners, ids): + """获取物体坐标(相机系) + + Args: + corners (_type_): _description_ + ids (_type_): _description_ + + Returns: + _type_: _description_ + """ + if len(corners) == 0: + return [] + # 通过二维码角点获取物体旋转向量和平移向量 + rvecs, tvecs = self.solve_marker_pnp(corners, self.marker_size, self.mtx, self.dist) + for i, tvec, rvec in zip(ids, tvecs, rvecs): + tvec = tvec.squeeze().tolist() + rvec = rvec.squeeze().tolist() + rotvector = np.array([[rvec[0], rvec[1], rvec[2]]]) + # 将旋转向量转为旋转矩阵 + Rotation = cv2.Rodrigues(rotvector)[0] + # 将旋转矩阵转为欧拉角 + Euler = self.CvtRotationMatrixToEulerAngle(Rotation) + # 物体坐标(相机系) + target_coords = np.array([tvec[0], tvec[1], tvec[2], Euler[0], Euler[1], Euler[2]]) + return target_coords + + def CvtRotationMatrixToEulerAngle(self, pdtRotationMatrix): + """将旋转矩阵转为欧拉角 + + Args: + pdtRotationMatrix (_type_): _description_ + + Returns: + _type_: _description_ + """ + pdtEulerAngle = np.zeros(3) + pdtEulerAngle[2] = np.arctan2(pdtRotationMatrix[1, 0], pdtRotationMatrix[0, 0]) + fCosRoll = np.cos(pdtEulerAngle[2]) + fSinRoll = np.sin(pdtEulerAngle[2]) + pdtEulerAngle[1] = np.arctan2(-pdtRotationMatrix[2, 0], + (fCosRoll * pdtRotationMatrix[0, 0]) + (fSinRoll * pdtRotationMatrix[1, 0])) + pdtEulerAngle[0] = np.arctan2((fSinRoll * pdtRotationMatrix[0, 2]) - (fCosRoll * pdtRotationMatrix[1, 2]), + (-fSinRoll * pdtRotationMatrix[0, 1]) + (fCosRoll * pdtRotationMatrix[1, 1])) + return pdtEulerAngle + + def CvtEulerAngleToRotationMatrix(self, ptrEulerAngle): + """将欧拉角转为旋转矩阵 + + Args: + ptrEulerAngle (_type_): _description_ + + Returns: + _type_: _description_ + """ + ptrSinAngle = np.sin(ptrEulerAngle) + ptrCosAngle = np.cos(ptrEulerAngle) + ptrRotationMatrix = np.zeros((3, 3)) + ptrRotationMatrix[0, 0] = ptrCosAngle[2] * ptrCosAngle[1] + ptrRotationMatrix[0, 1] = ptrCosAngle[2] * ptrSinAngle[1] * ptrSinAngle[0] - ptrSinAngle[2] * ptrCosAngle[0] + ptrRotationMatrix[0, 2] = ptrCosAngle[2] * ptrSinAngle[1] * ptrCosAngle[0] + ptrSinAngle[2] * ptrSinAngle[0] + ptrRotationMatrix[1, 0] = ptrSinAngle[2] * ptrCosAngle[1] + ptrRotationMatrix[1, 1] = ptrSinAngle[2] * ptrSinAngle[1] * ptrSinAngle[0] + ptrCosAngle[2] * ptrCosAngle[0] + ptrRotationMatrix[1, 2] = ptrSinAngle[2] * ptrSinAngle[1] * ptrCosAngle[0] - ptrCosAngle[2] * ptrSinAngle[0] + ptrRotationMatrix[2, 0] = -ptrSinAngle[1] + ptrRotationMatrix[2, 1] = ptrCosAngle[1] * ptrSinAngle[0] + ptrRotationMatrix[2, 2] = ptrCosAngle[1] * ptrCosAngle[0] + return ptrRotationMatrix + + def Transformation_matrix(self,coord): + """坐标转换为齐次变换矩阵 + + Args: + coord (_type_): (x,y,z,rx,ry,rz) + + Returns: + _type_: _description_ + """ + position_robot = coord[:3] + pose_robot = coord[3:] + # 将欧拉角转为旋转矩阵 + RBT = self.CvtEulerAngleToRotationMatrix(pose_robot) + PBT = np.array([[position_robot[0]], + [position_robot[1]], + [position_robot[2]]]) + temp = np.concatenate((RBT, PBT), axis=1) + array_1x4 = np.array([[0, 0, 0, 1]]) + # 将两个数组按行拼接起来 + matrix = np.concatenate((temp, array_1x4), axis=0) + return matrix + + def Eyes_in_hand(self, coord, marker_positions, Matrix_TC): + # 相机坐标 + Position_Camera = np.transpose(marker_positions[:3]) + # 机械臂坐标矩阵 + Matrix_BT = self.Transformation_matrix(coord) + # 物体坐标(相机系) + Position_Camera = np.append(Position_Camera, 1) + + # 物体坐标(基坐标系) + Position_B = Matrix_BT @ Matrix_TC @ Position_Camera + return Position_B + + def waitl(self, ml): + """等待机械臂运行结束 + + Args: + ml (_type_): _description_ + """ + time.sleep(0.2) + while ml.is_moving(): + time.sleep(0.03) + + def stag_identify(self): + """读取Camera坐标(单次) + + Returns: + _type_: _description_ + """ + try: + if self.current_frame is None: + rospy.logwarn("No image received yet") + return [] + # 获取当前帧 + frame = self.current_frame + # 获取画面中二维码的角度和id + corners, ids, rejected_corners = stag.detectMarkers(frame, 11) + # 获取物的坐标(相机系) + marker_pos_pack = self.calc_markers_base_position(corners, ids) + if len(marker_pos_pack) == 0 and not rospy.is_shutdown(): + # rospy.logwarn("No markers detected") + marker_pos_pack = self.stag_identify() # 递归调用 + + # print("Camera coords = ", marker_pos_pack) + return marker_pos_pack + except RecursionError: + # rospy.logerr("Recursion depth exceeded in marker detection") + return [0, 0, 0, 0] # 返回默认值 + + def vision_trace(self, mode, ml): + sp = 40 + #水平面抓取 + if mode == 0: + # 移动到观测点 + ml.send_angles(self.origin_mycbot_horizontal, sp) + # 等待机械臂运动结束 + self.waitl(ml) + input("Enter any key to start trace") + + target_coords = self.stag_robot_identify(ml) + print(target_coords) + + time.sleep(1) + # 机械臂移动到二维码前方 + ml.send_coords(target_coords, 30) + # 等待机械臂运动结束 + self.waitl(ml) + + def stag_robot_identify(self): + marker_pos_pack = self.stag_identify() + # 如果返回的是默认值,直接退出函数,不返回任何数据 + if marker_pos_pack == [0, 0, 0, 0]: + rospy.logwarn("No markers detected, skipping processing") + return None # 直接返回 None + target_coords = self.get_coords() + while len(target_coords)==0: + target_coords = self.get_coords() + # print("Current coords:", target_coords) + cur_coords = np.array(target_coords.copy()) + cur_coords[-3:] *= (np.pi / 180) + fact_bcl = self.Eyes_in_hand(cur_coords, marker_pos_pack, self.EyesInHand_matrix) + for i in range(3): + target_coords[i] = fact_bcl[i] + return target_coords + + def coord_limit(self, coords): + min_coord = [100, -150, 0] + max_coord = [400, 150, 400] + for i in range(3): + if(coords[i] < min_coord[i]): + coords[i] = min_coord[i] + + if(coords[i] > max_coord[i]): + coords[i] = max_coord[i] + + def vision_trace_loop(self): + self.set_fresh_mode(1) + time.sleep(1) + # 移动到观测点 + self.send_angles(self.origin_mycbot_horizontal, 50) + time.sleep(3) + origin = self.get_coords() + + rate = rospy.Rate(30) + while not rospy.is_shutdown(): + target_coords = self.stag_robot_identify() + # 如果没有返回目标坐标,跳过本次循环 + if target_coords is None: + continue # 跳过这次循环,等下次识别 + target_coords[0] -= 300 + rospy.loginfo('Target Coords: %s', target_coords) + self.coord_limit(target_coords) + for i in range(3): + target_coords[i+3] = origin[i+3] + self.pub_marker(target_coords[0]/1000.0, target_coords[1]/1000.0, target_coords[2]/1000.0) + self.send_coords(target_coords, 30, 0) # 机械臂移动到二维码前方 + rate.sleep() + + +if __name__ == '__main__': + try: + sr = STAGRecognizer() + sr.vision_trace_loop() + rospy.spin() + except KeyboardInterrupt: + rospy.loginfo("Shutting down...") + cv2.destroyAllWindows() diff --git a/mycobot_communication/CMakeLists.txt b/mycobot_communication/CMakeLists.txt index bf1f2821..4cf256c7 100644 --- a/mycobot_communication/CMakeLists.txt +++ b/mycobot_communication/CMakeLists.txt @@ -56,6 +56,9 @@ add_message_files(FILES MycobotSetCoords.msg MycobotGripperStatus.msg MycobotPumpStatus.msg + MycobotSetEndType.msg + MycobotSetFreshMode.msg + MycobotSetToolReference.msg ) ## Generate services in the 'srv' folder @@ -66,6 +69,9 @@ add_service_files(FILES SetCoords.srv GripperStatus.srv PumpStatus.srv + SetEndType.srv + SetFreshMode.srv + SetToolReference.srv ) ## Generate added messages and services diff --git a/mycobot_communication/msg/MycobotSetEndType.msg b/mycobot_communication/msg/MycobotSetEndType.msg new file mode 100644 index 00000000..ae923e46 --- /dev/null +++ b/mycobot_communication/msg/MycobotSetEndType.msg @@ -0,0 +1 @@ +uint8 Status \ No newline at end of file diff --git a/mycobot_communication/msg/MycobotSetFreshMode.msg b/mycobot_communication/msg/MycobotSetFreshMode.msg new file mode 100644 index 00000000..ae923e46 --- /dev/null +++ b/mycobot_communication/msg/MycobotSetFreshMode.msg @@ -0,0 +1 @@ +uint8 Status \ No newline at end of file diff --git a/mycobot_communication/msg/MycobotSetToolReference.msg b/mycobot_communication/msg/MycobotSetToolReference.msg new file mode 100644 index 00000000..08f6dbda --- /dev/null +++ b/mycobot_communication/msg/MycobotSetToolReference.msg @@ -0,0 +1,6 @@ +float32 x +float32 y +float32 z +float32 rx +float32 ry +float32 rz \ No newline at end of file diff --git a/mycobot_communication/scripts/mycobot_topics.py b/mycobot_communication/scripts/mycobot_topics.py index 5039b0de..5f01adb8 100755 --- a/mycobot_communication/scripts/mycobot_topics.py +++ b/mycobot_communication/scripts/mycobot_topics.py @@ -15,8 +15,11 @@ MycobotSetCoords, MycobotGripperStatus, MycobotPumpStatus, + MycobotSetEndType, + MycobotSetFreshMode, + MycobotSetToolReference, ) - +from std_msgs.msg import UInt8 from pymycobot.mycobot import MyCobot @@ -85,6 +88,10 @@ def start(self): sb = threading.Thread(target=self.sub_set_coords) sg = threading.Thread(target=self.sub_gripper_status) sp = threading.Thread(target=self.sub_pump_status) + + sfm = threading.Thread(target=self.sub_fresh_mode_status) + set = threading.Thread(target=self.sub_end_type_status) + str = threading.Thread(target=self.sub_set_tool_reference) pa.setDaemon(True) pa.start() @@ -98,6 +105,13 @@ def start(self): sg.start() sp.setDaemon(True) sp.start() + + sfm.setDaemon(True) + sfm.start + set.setDaemon(True) + set.start() + str.setDaemon(True) + str.start() pa.join() pb.join() @@ -105,6 +119,10 @@ def start(self): sb.join() sg.join() sp.join() + + sfm.join() + set.join() + str.join() def pub_real_angles(self): """Publish real angle""" @@ -113,17 +131,19 @@ def pub_real_angles(self): MycobotAngles, queue_size=5) ma = MycobotAngles() while not rospy.is_shutdown(): - self.lock.acquire() - angles = self.mc.get_angles() - self.lock.release() - if angles: - ma.joint_1 = angles[0] - ma.joint_2 = angles[1] - ma.joint_3 = angles[2] - ma.joint_4 = angles[3] - ma.joint_5 = angles[4] - ma.joint_6 = angles[5] - pub.publish(ma) + with self.lock: + try: + angles = self.mc.get_angles() + if angles: + ma.joint_1 = angles[0] + ma.joint_2 = angles[1] + ma.joint_3 = angles[2] + ma.joint_4 = angles[3] + ma.joint_5 = angles[4] + ma.joint_6 = angles[5] + pub.publish(ma) + except Exception as e: + rospy.logerr(f"SerialException: {e}") time.sleep(0.25) def pub_real_coords(self): @@ -134,17 +154,19 @@ def pub_real_coords(self): ma = MycobotCoords() while not rospy.is_shutdown(): - self.lock.acquire() - coords = self.mc.get_coords() - self.lock.release() - if coords: - ma.x = coords[0] - ma.y = coords[1] - ma.z = coords[2] - ma.rx = coords[3] - ma.ry = coords[4] - ma.rz = coords[5] - pub.publish(ma) + with self.lock: + try: + coords = self.mc.get_coords() + if coords: + ma.x = coords[0] + ma.y = coords[1] + ma.z = coords[2] + ma.rx = coords[3] + ma.ry = coords[4] + ma.rz = coords[5] + pub.publish(ma) + except Exception as e: + rospy.logerr(f"SerialException: {e}") time.sleep(0.25) def sub_set_angles(self): @@ -206,6 +228,44 @@ def callback(data): "mycobot/pump_status", MycobotPumpStatus, callback=callback ) rospy.spin() + + def sub_fresh_mode_status(self): + """Subscribe to fresh mode Status""" + """订阅运动模式状态""" + def callback(data): + if data.Status==1: + self.mc.set_fresh_mode(1) + else: + self.mc.set_fresh_mode(0) + + sub = rospy.Subscriber( + "mycobot/fresh_mode_status", MycobotSetFreshMode, callback=callback + ) + rospy.spin() + + def sub_end_type_status(self): + """Subscribe to end type Status""" + """订阅末端类型状态""" + def callback(data): + if data.Status==1: + self.mc.set_end_type(1) + else: + self.mc.set_end_type(0) + + sub = rospy.Subscriber( + "mycobot/end_type_status", MycobotSetEndType, callback=callback + ) + rospy.spin() + + def sub_set_tool_reference(self): + def callback(data): + coords = [data.x, data.y, data.z, data.rx, data.ry, data.rz] + self.mc.set_tool_reference(coords) + + sub = rospy.Subscriber( + "mycobot/tool_reference_goal", MycobotSetToolReference, callback=callback + ) + rospy.spin() if __name__ == "__main__": diff --git a/mycobot_communication/srv/SetEndType.srv b/mycobot_communication/srv/SetEndType.srv new file mode 100644 index 00000000..42d43226 --- /dev/null +++ b/mycobot_communication/srv/SetEndType.srv @@ -0,0 +1,5 @@ +uint8 Status + +--- + +bool Flag \ No newline at end of file diff --git a/mycobot_communication/srv/SetFreshMode.srv b/mycobot_communication/srv/SetFreshMode.srv new file mode 100644 index 00000000..42d43226 --- /dev/null +++ b/mycobot_communication/srv/SetFreshMode.srv @@ -0,0 +1,5 @@ +uint8 Status + +--- + +bool Flag \ No newline at end of file diff --git a/mycobot_communication/srv/SetToolReference.srv b/mycobot_communication/srv/SetToolReference.srv new file mode 100644 index 00000000..eb16fc72 --- /dev/null +++ b/mycobot_communication/srv/SetToolReference.srv @@ -0,0 +1,10 @@ +float32 x +float32 y +float32 z +float32 rx +float32 ry +float32 rz + +--- + +bool Flag \ No newline at end of file