From 5cf5dbe1f34f5d0d7e738041da589725ca1393a3 Mon Sep 17 00:00:00 2001 From: Adam Serafin Date: Wed, 13 Sep 2023 13:25:51 +0200 Subject: [PATCH] Pre_release_v281_iron (#406) --- README.md | 239 +++++++++++++++++- depthai-ros/CHANGELOG.rst | 9 + depthai-ros/CMakeLists.txt | 2 +- depthai-ros/package.xml | 2 +- depthai_bridge/CMakeLists.txt | 8 +- .../include/depthai_bridge/ImageConverter.hpp | 31 ++- .../include/depthai_bridge/TFPublisher.hpp | 22 +- depthai_bridge/package.xml | 5 +- depthai_bridge/src/ImageConverter.cpp | 23 +- depthai_bridge/src/TFPublisher.cpp | 13 +- depthai_descriptions/CMakeLists.txt | 2 +- depthai_descriptions/launch/urdf_launch.py | 14 +- depthai_descriptions/package.xml | 2 +- depthai_examples/CMakeLists.txt | 2 +- depthai_examples/package.xml | 2 +- depthai_filters/CMakeLists.txt | 2 +- depthai_filters/package.xml | 2 +- depthai_filters/src/spatial_bb.cpp | 2 +- depthai_ros_driver/CMakeLists.txt | 2 +- depthai_ros_driver/config/camera.yaml | 123 --------- depthai_ros_driver/config/isaac_vslam.yaml | 15 ++ depthai_ros_driver/config/pcl.yaml | 3 +- .../include/depthai_ros_driver/camera.hpp | 20 +- .../dai_nodes/base_node.hpp | 12 +- .../dai_nodes/nn/detection.hpp | 14 +- .../dai_nodes/sensors/sensor_helpers.hpp | 8 +- .../param_handlers/sensor_param_handler.hpp | 13 +- .../param_handlers/stereo_param_handler.hpp | 2 +- .../pipeline/pipeline_generator.hpp | 13 +- .../include/depthai_ros_driver/utils.hpp | 3 +- depthai_ros_driver/launch/camera.launch.py | 3 +- depthai_ros_driver/package.xml | 2 +- .../src/dai_nodes/sensors/mono.cpp | 10 +- .../src/dai_nodes/sensors/rgb.cpp | 13 +- .../src/dai_nodes/sensors/sensor_helpers.cpp | 68 ++++- .../src/dai_nodes/sensors/sensor_wrapper.cpp | 2 +- depthai_ros_driver/src/dai_nodes/stereo.cpp | 52 ++-- .../param_handlers/sensor_param_handler.cpp | 84 +++--- .../param_handlers/stereo_param_handler.cpp | 10 +- .../src/pipeline/base_types.cpp | 14 +- .../src/pipeline/pipeline_generator.cpp | 23 +- depthai_ros_driver/src/utils.cpp | 16 +- depthai_ros_msgs/CMakeLists.txt | 2 +- depthai_ros_msgs/package.xml | 2 +- 44 files changed, 557 insertions(+), 354 deletions(-) create mode 100644 depthai_ros_driver/config/isaac_vslam.yaml diff --git a/README.md b/README.md index 844825b0..d3acce0d 100644 --- a/README.md +++ b/README.md @@ -137,7 +137,8 @@ docker run -it -v /dev/:/dev/ --privileged -e DISPLAY -v /tmp/.X11-unix:/tmp/.X1 Currently, recommended way to launch cameras is to use executables from depthai_ros_driver package. -This runs your camera as a ROS2 Component and gives you the ability to customize your camera using ROS parameters. +This runs your camera as a ROS2 Component/ROS Nodelet and gives you the ability to customize your camera using ROS parameters. +Main API difference between ROS2 and ROS1 is that parameter names use different convention - parameters in ROS2 are namespaced using `.` and parameters in ROS1 are namespaced using `_`, for example in ROS2: `camera.i_pipeline_type`, ROS1 `camera_i_pipeline_type`. For sake of simplicity, this Readme uses ROS2 convention. List of all available parameters with their default values can be found at the bottom of Readme. Paramerers that begin with `r_` can be freely modified during runtime, for example with rqt. Parameters that begin with `i_` are set when camera is initializing, to change them you have to call `stop` and `start` services. This can be used to hot swap NNs during runtime, changing resolutions, etc. Below you can see some examples: @@ -166,12 +167,30 @@ It is also possible to set custom URDF path (for now only absolute path works) a **NOTE ON IMU EXTRINSICS** If your camera has uncalibrated IMU, a warning will be shown, and IMU will be published with zero rotation and translation. You can override this behavior by setting `camera.i_tf_imu_from_descr`: true. This will publish default IMU extrinsics from URDF based on camera model. + +#### Available sensors and their resolutions: + +* IMX378 - {"12MP", "4K", "1080P"}, default 1080P +* OV9282 - {"800P", "720P", "400P"}, default 800P +* OV9782 - {"800P", "720P", "400P"}, default 800P +* OV9281 - {"800P", "720P", "400P"}, default 800P +* IMX214 - {"13MP", "12MP", "4K", "1080P"}, default 1080P +* IMX412 - {"13MP", "12MP", "4K", "1080P"}, default 1080P +* OV7750 - {"480P", "400P"}, default 480P +* OV7251 - {"480P", "400P"}, default 480P +* IMX477 - {"12MP", "4K", "1080P"}, default 1080P +* IMX577 - {"12MP", "4K", "1080P"}, default 1080P +* AR0234 - {"1200P"}, default 1200P +* IMX582 - {"48MP", "12MP", "4K"}, default 4K +* LCM48 - {"48MP", "12MP", "4K"}, default 4K + #### Setting RGB parameters +![](docs/param_rgb.gif) By default RGB camera outputs `ISP` frame. To set custom width and height of output image, you can set `i_isp_num` and `i_isp_den` which scale image dimensions (2 and 3 by default, so from 1920x1080 to 1280x720), note for RGBD alignment to work resulting width and height must be divisible by 16. Additionally you can set `i.output_isp: false` to use `video` output and set custom size using `i_width` and `i_height` parameters. -![](docs/param_rgb.gif) + #### Setting Stereo parameters ![](docs/param_stereo.gif) ##### Depth alignment @@ -179,6 +198,9 @@ When setting `stereo.i_align_depth: true`, stereo output is aligned to board soc You can enable rectified Stereo streams by setting, for example in the case of right stream `i_publish_right_rect: true`. You can also set `i_publish_synced_rect_pair: true` to get both images with the same timestamps. +##### Stereo socket order +By default, right camera is treated as first when used for stereo computation, which is reflected in CameraInfo messages. If you want to reverse that logic, set `stereo.i_reverse_stereo_socket_order: true` (this can be also set for individual sensors). + ##### Custom Sensor sockets Configuration of which sensors are used for computing stereo pair can be done either programatically, by specifying them in a constructor of a Stereo node (for example when building a custom pipeline), or via parameters - `stereo.i_left_socket_id`/`stereo.i_right_socket_id`. Please note that currently if you want to use rgb/center socket instead of one of the given pairs you will need to build a custom pipeline for that. @@ -302,7 +324,6 @@ an example can be seen by running `ros2 launch depthai_filters example_wls_filt - FeatureTrackerOverlay - publishes Tracked Features overlay based on features and images coming from the driver - Features3D - uses depth image to republish features as 3D pointcloud - ### Using external sources for NN inference or Stereo Depth There is a possibility of using external image topics as input for NNs or Depth calculation. @@ -382,4 +403,214 @@ For easier development inside isolated workspace, one can use Visual Studio Code - Create separate workspace - Clone repository into src - Copy `.devcontainer` directory into main workspace directory -- Open workspace directory in VSCode \ No newline at end of file +- Open workspace directory in VSCode + +### List of parameters: +```yaml +/oak: + ros__parameters: + camera: + i_calibration_dump: false + i_enable_imu: true + i_enable_ir: true + i_external_calibration_path: '' + i_floodlight_brightness: 0 + i_ip: '' + i_laser_dot_brightness: 800 + i_mx_id: '' + i_nn_type: spatial + i_pipeline_dump: false + i_pipeline_type: RGBD + i_publish_tf_from_calibration: false + i_tf_base_frame: oak + i_tf_cam_pitch: '0.0' + i_tf_cam_pos_x: '0.0' + i_tf_cam_pos_y: '0.0' + i_tf_cam_pos_z: '0.0' + i_tf_cam_roll: '0.0' + i_tf_cam_yaw: '0.0' + i_tf_camera_model: '' + i_tf_camera_name: oak + i_tf_custom_urdf_location: '' + i_tf_custom_xacro_args: '' + i_tf_imu_from_descr: 'false' + i_tf_parent_frame: oak-d-base-frame + i_usb_port_id: '' + i_usb_speed: SUPER_PLUS + diagnostic_updater: + period: 1.0 + imu: + i_acc_cov: 0.0 + i_acc_freq: 400 + i_batch_report_threshold: 5 + i_enable_rotation: false + i_get_base_device_timestamp: false + i_gyro_cov: 0.0 + i_gyro_freq: 400 + i_mag_cov: 0.0 + i_max_batch_reports: 10 + i_message_type: IMU + i_rot_cov: -1.0 + i_sync_method: LINEAR_INTERPOLATE_ACCEL + i_update_ros_base_time_on_ros_msg: false + left: + i_add_exposure_offset: false + i_board_socket_id: 1 + i_calibration_file: '' + i_disable_node: false + i_enable_feature_tracker: false + i_enable_lazy_publisher: true + i_exposure_offset: 0 + i_fps: 30.0 + i_fsync_continuous: false + i_fsync_trigger: false + i_get_base_device_timestamp: false + i_height: 720 + i_low_bandwidth: false + i_low_bandwidth_quality: 50 + i_max_q_size: 30 + i_publish_topic: false + i_resolution: '720' + i_reverse_stereo_socket_order: false + i_sensor_img_orientation: NORMAL + i_set_isp3a_fps: false + i_simulate_from_topic: false + i_simulated_topic_name: '' + i_update_ros_base_time_on_ros_msg: false + i_width: 1280 + r_exposure: 1000 + r_iso: 800 + r_set_man_exposure: false + nn: + i_disable_resize: false + i_enable_passthrough: false + i_enable_passthrough_depth: false + i_get_base_device_timestamp: false + i_num_inference_threads: 2 + i_num_pool_frames: 4 + i_update_ros_base_time_on_ros_msg: false + rgb: + i_add_exposure_offset: false + i_board_socket_id: 0 + i_calibration_file: '' + i_disable_node: false + i_enable_feature_tracker: false + i_enable_lazy_publisher: true + i_enable_preview: false + i_exposure_offset: 0 + i_fps: 30.0 + i_fsync_continuous: false + i_fsync_trigger: false + i_get_base_device_timestamp: false + i_height: 720 + i_interleaved: false + i_isp_den: 3 + i_isp_num: 2 + i_keep_preview_aspect_ratio: true + i_low_bandwidth: false + i_low_bandwidth_quality: 50 + i_max_q_size: 30 + i_output_isp: true + i_preview_height: 300 + i_preview_size: 300 + i_preview_width: 300 + i_publish_topic: true + i_resolution: 1080p + i_reverse_stereo_socket_order: false + i_sensor_img_orientation: NORMAL + i_set_isp3a_fps: false + i_set_isp_scale: true + i_simulate_from_topic: false + i_simulated_topic_name: '' + i_update_ros_base_time_on_ros_msg: false + i_width: 1280 + r_exposure: 20000 + r_focus: 1 + r_iso: 800 + r_set_man_exposure: false + r_set_man_focus: false + r_set_man_whitebalance: false + r_whitebalance: 3300 + right: + i_add_exposure_offset: false + i_board_socket_id: 2 + i_calibration_file: '' + i_disable_node: false + i_enable_feature_tracker: false + i_enable_lazy_publisher: true + i_exposure_offset: 0 + i_fps: 30.0 + i_fsync_continuous: false + i_fsync_trigger: false + i_get_base_device_timestamp: false + i_height: 720 + i_low_bandwidth: false + i_low_bandwidth_quality: 50 + i_max_q_size: 30 + i_publish_topic: false + i_resolution: '720' + i_reverse_stereo_socket_order: false + i_sensor_img_orientation: NORMAL + i_set_isp3a_fps: false + i_simulate_from_topic: false + i_simulated_topic_name: '' + i_update_ros_base_time_on_ros_msg: false + i_width: 1280 + r_exposure: 1000 + r_iso: 800 + r_set_man_exposure: false + stereo: + i_add_exposure_offset: false + i_align_depth: true + i_bilateral_sigma: 0 + i_board_socket_id: 0 + i_depth_filter_size: 5 + i_depth_preset: HIGH_ACCURACY + i_disparity_width: DISPARITY_96 + i_enable_alpha_scaling: false + i_enable_brightness_filter: false + i_enable_companding: false + i_enable_decimation_filter: false + i_enable_disparity_shift: false + i_enable_distortion_correction: false + i_enable_lazy_publisher: true + i_enable_spatial_filter: false + i_enable_speckle_filter: false + i_enable_temporal_filter: false + i_enable_threshold_filter: false + i_exposure_offset: 0 + i_extended_disp: false + i_get_base_device_timestamp: false + i_height: 720 + i_left_rect_add_exposure_offset: false + i_left_rect_enable_feature_tracker: false + i_left_rect_exposure_offset: 0 + i_left_rect_low_bandwidth: false + i_left_rect_low_bandwidth_quality: 50 + i_left_socket_id: 1 + i_low_bandwidth: false + i_low_bandwidth_quality: 50 + i_lr_check: true + i_lrc_threshold: 10 + i_max_q_size: 30 + i_output_disparity: false + i_publish_left_rect: false + i_publish_right_rect: false + i_publish_synced_rect_pair: false + i_publish_topic: true + i_rectify_edge_fill_color: 0 + i_reverse_stereo_socket_order: false + i_right_rect_add_exposure_offset: false + i_right_rect_enable_feature_tracker: false + i_right_rect_exposure_offset: 0 + i_right_rect_low_bandwidth: false + i_right_rect_low_bandwidth_quality: 50 + i_right_socket_id: 2 + i_set_input_size: false + i_socket_name: rgb + i_stereo_conf_threshold: 240 + i_subpixel: false + i_update_ros_base_time_on_ros_msg: false + i_width: 1280 + use_sim_time: false +``` \ No newline at end of file diff --git a/depthai-ros/CHANGELOG.rst b/depthai-ros/CHANGELOG.rst index 49baa52c..fe7cc640 100644 --- a/depthai-ros/CHANGELOG.rst +++ b/depthai-ros/CHANGELOG.rst @@ -2,6 +2,15 @@ Changelog for package depthai-ros ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +2.8.1 (2023-09-12) +------------------- + +* Added support for OpenCV Stereo order convention +* Added disparity to depth use spec translation parameter +* Updated sensor socket logic +* Fixed issues when running robot_state_publisher as component +* Added missing tf2 dependencies + 2.8.0 (2023-09-01) ------------------- * Add camera image orientation param diff --git a/depthai-ros/CMakeLists.txt b/depthai-ros/CMakeLists.txt index 17607a09..da7c44c4 100644 --- a/depthai-ros/CMakeLists.txt +++ b/depthai-ros/CMakeLists.txt @@ -1,6 +1,6 @@ cmake_minimum_required(VERSION 3.10.2) # CMake version in Ubuntu 18.04 LTS -project(depthai-ros VERSION 2.8.0 LANGUAGES CXX C) +project(depthai-ros VERSION 2.8.1 LANGUAGES CXX C) set(CMAKE_CXX_STANDARD 14) diff --git a/depthai-ros/package.xml b/depthai-ros/package.xml index 85100343..c089df8a 100644 --- a/depthai-ros/package.xml +++ b/depthai-ros/package.xml @@ -1,7 +1,7 @@ depthai-ros - 2.8.0 + 2.8.1 The depthai-ros package diff --git a/depthai_bridge/CMakeLists.txt b/depthai_bridge/CMakeLists.txt index 6e66bca9..fa6e93de 100644 --- a/depthai_bridge/CMakeLists.txt +++ b/depthai_bridge/CMakeLists.txt @@ -1,7 +1,7 @@ cmake_minimum_required(VERSION 3.10.2) # CMake version in Ubuntu 18.04 LTS set(CMAKE_POSITION_INDEPENDENT_CODE ON) -project(depthai_bridge VERSION 2.8.0 LANGUAGES CXX C) +project(depthai_bridge VERSION 2.8.1 LANGUAGES CXX C) set(CMAKE_CXX_STANDARD 14) set(CMAKE_CXX_STANDARD_REQUIRED ON) @@ -38,6 +38,9 @@ find_package(stereo_msgs REQUIRED) find_package(std_msgs REQUIRED) find_package(vision_msgs REQUIRED) find_package(tf2_ros REQUIRED) +find_package(tf2 REQUIRED) +find_package(tf2_geometry_msgs REQUIRED) +find_package(composition_interfaces REQUIRED) set(dependencies camera_info_manager @@ -50,6 +53,9 @@ set(dependencies std_msgs vision_msgs tf2_ros + tf2_geometry_msgs + tf2 + composition_interfaces ) include_directories( diff --git a/depthai_bridge/include/depthai_bridge/ImageConverter.hpp b/depthai_bridge/include/depthai_bridge/ImageConverter.hpp index 6287fbc9..736c5a02 100644 --- a/depthai_bridge/include/depthai_bridge/ImageConverter.hpp +++ b/depthai_bridge/include/depthai_bridge/ImageConverter.hpp @@ -25,12 +25,7 @@ namespace ImageMsgs = sensor_msgs::msg; using ImagePtr = ImageMsgs::Image::SharedPtr; using TimePoint = std::chrono::time_point; -ImageMsgs::CameraInfo calibrationToCameraInfo(dai::CalibrationHandler calibHandler, - dai::CameraBoardSocket cameraId, - int width = -1, - int height = -1, - Point2f topLeftPixelId = Point2f(), - Point2f bottomRightPixelId = Point2f()); + class ImageConverter { public: // ImageConverter() = default; @@ -55,9 +50,29 @@ class ImageConverter { _updateRosBaseTimeOnToRosMsg = update; } + /** + * @brief Sets converter behavior to convert from bitstream to raw data. + * @param srcType: The type of the bitstream data used for conversion. + */ void convertFromBitstream(dai::RawImgFrame::Type srcType); + + /** + * @brief Sets exposure offset when getting timestamps from the message. + * @param offset: The exposure offset to be added to the timestamp. + */ void addExposureOffset(dai::CameraExposureOffset& offset); - void convertDispToDepth(); + + /** + * @brief Sets converter behavior to convert from disparity to depth when converting messages from bitstream. + * @param baseline: The baseline of the stereo pair. + */ + void convertDispToDepth(double baseline); + + /** + * @brief Reverses the order of the stereo sockets when creating CameraInfo to calculate Tx component of Projection matrix. + * By default the right socket is used as the base, calling this function will set left as base. + */ + void reverseStereoSocketOrder(); void toRosMsg(std::shared_ptr inData, std::deque& outImageMsgs); ImageMsgs::Image toRosMsgRawPtr(std::shared_ptr inData, const sensor_msgs::msg::CameraInfo& info = sensor_msgs::msg::CameraInfo()); @@ -100,6 +115,8 @@ class ImageConverter { bool _convertDispToDepth = false; bool _addExpOffset = false; dai::CameraExposureOffset _expOffset; + bool _reverseStereoSocketOrder = false; + double _baseline; }; } // namespace ros diff --git a/depthai_bridge/include/depthai_bridge/TFPublisher.hpp b/depthai_bridge/include/depthai_bridge/TFPublisher.hpp index 01ef7a34..0e384ada 100644 --- a/depthai_bridge/include/depthai_bridge/TFPublisher.hpp +++ b/depthai_bridge/include/depthai_bridge/TFPublisher.hpp @@ -28,7 +28,7 @@ class TFPublisher { const std::string& imuFromDescr, const std::string& customURDFLocation, const std::string& customXacroArgs); - /* + /** * @brief Obtain URDF description by running Xacro with provided arguments. */ std::string getURDF(); @@ -36,31 +36,31 @@ class TFPublisher { geometry_msgs::msg::Vector3 transFromExtr(nlohmann::json translation); private: - /* + /** * @brief Converts model name to one of the available model families */ void convertModelName(); - /* + /** * @brief Prepare arguments for xacro command. If custom URDF location is not provided, check if model name is available in depthai_descriptions package. */ std::string prepareXacroArgs(); - /* + /** * @brief Get URDF description and set it as a parameter for robot_state_publisher */ void publishDescription(); - /* + /** * @brief Publish camera transforms ("standard" and optical) based on calibration data. * Frame names are based on socket names and use following convention: [base_frame]_[socket_name]_camera_frame and * [base_frame]_[socket_name]_camera_optical_frame */ void publishCamTransforms(nlohmann::json camData, rclcpp::Node* node); - /* + /** * @brief Publish IMU transform based on calibration data. * Frame name is based on IMU name and uses following convention: [base_frame]_imu_frame. * If IMU extrinsics are not set, warning is printed out and imu frame is published with zero translation and rotation. */ void publishImuTransform(nlohmann::json json, rclcpp::Node* node); - /* + /** * @brief Check if model STL file is available in depthai_descriptions package. */ bool modelNameAvailable(); @@ -82,6 +82,14 @@ class TFPublisher { std::string _customXacroArgs; std::vector _camFeatures; rclcpp::Logger _logger; + const std::unordered_map _socketNameMap = { + {dai::CameraBoardSocket::AUTO, "rgb"}, + {dai::CameraBoardSocket::CAM_A, "rgb"}, + {dai::CameraBoardSocket::CAM_B, "left"}, + {dai::CameraBoardSocket::CAM_C, "right"}, + {dai::CameraBoardSocket::CAM_D, "left_back"}, + {dai::CameraBoardSocket::CAM_E, "right_back"}, + }; }; } // namespace ros } // namespace dai \ No newline at end of file diff --git a/depthai_bridge/package.xml b/depthai_bridge/package.xml index efad0950..435e70c4 100644 --- a/depthai_bridge/package.xml +++ b/depthai_bridge/package.xml @@ -1,7 +1,7 @@ depthai_bridge - 2.8.0 + 2.8.1 The depthai_bridge package Sachin Guruswamy @@ -26,6 +26,9 @@ stereo_msgs vision_msgs tf2_ros + tf2 + tf2_geometry_msgs + composition_interfaces robot_state_publisher xacro diff --git a/depthai_bridge/src/ImageConverter.cpp b/depthai_bridge/src/ImageConverter.cpp index 0bc2d1a5..65bef6b3 100644 --- a/depthai_bridge/src/ImageConverter.cpp +++ b/depthai_bridge/src/ImageConverter.cpp @@ -45,8 +45,9 @@ void ImageConverter::convertFromBitstream(dai::RawImgFrame::Type srcType) { _srcType = srcType; } -void ImageConverter::convertDispToDepth() { +void ImageConverter::convertDispToDepth(double baseline) { _convertDispToDepth = true; + _baseline = baseline; } void ImageConverter::addExposureOffset(dai::CameraExposureOffset& offset) { @@ -54,6 +55,10 @@ void ImageConverter::addExposureOffset(dai::CameraExposureOffset& offset) { _addExpOffset = true; } +void ImageConverter::reverseStereoSocketOrder() { + _reverseStereoSocketOrder = true; +} + ImageMsgs::Image ImageConverter::toRosMsgRawPtr(std::shared_ptr inData, const sensor_msgs::msg::CameraInfo& info) { if(_updateRosBaseTimeOnToRosMsg) { updateRosBaseTime(); @@ -108,7 +113,7 @@ ImageMsgs::Image ImageConverter::toRosMsgRawPtr(std::shared_ptr i // converting disparity if(_convertDispToDepth) { - auto factor = std::abs(info.p[3]) * 10000; + auto factor = std::abs(_baseline * 10) * info.p[0]; cv::Mat depthOut = cv::Mat(cv::Size(output.cols, output.rows), CV_16UC1); depthOut.forEach([&output, &factor](uint16_t& pixel, const int* position) -> void { auto disp = output.at(position); @@ -375,12 +380,20 @@ ImageMsgs::CameraInfo ImageConverter::calibrationToCameraInfo( std::copy(stereoIntrinsics[i].begin(), stereoIntrinsics[i].end(), stereoFlatIntrinsics.begin() + 4 * i); stereoFlatIntrinsics[(4 * i) + 3] = 0; } + // Check stereo socket order + dai::CameraBoardSocket stereoSocketFirst = calibHandler.getStereoLeftCameraId(); + dai::CameraBoardSocket stereoSocketSecond = calibHandler.getStereoRightCameraId(); + double factor = 1.0; + if(_reverseStereoSocketOrder) { + stereoSocketFirst = calibHandler.getStereoRightCameraId(); + stereoSocketSecond = calibHandler.getStereoLeftCameraId(); + factor = -1.0; + } - if(calibHandler.getStereoLeftCameraId() == cameraId) { + if(stereoSocketFirst == cameraId) { // This defines where the first camera is w.r.t second camera coordinate system giving it a translation to place all the points in the first // camera to second camera by multiplying that translation vector using transformation function. - stereoFlatIntrinsics[3] = stereoFlatIntrinsics[0] - * calibHandler.getCameraExtrinsics(calibHandler.getStereoRightCameraId(), calibHandler.getStereoLeftCameraId())[0][3] + stereoFlatIntrinsics[3] = factor * stereoFlatIntrinsics[0] * calibHandler.getCameraExtrinsics(stereoSocketFirst, stereoSocketSecond)[0][3] / 100.0; // Converting to meters rectifiedRotation = calibHandler.getStereoLeftRectificationRotation(); } else { diff --git a/depthai_bridge/src/TFPublisher.cpp b/depthai_bridge/src/TFPublisher.cpp index ae9bd691..730d5793 100644 --- a/depthai_bridge/src/TFPublisher.cpp +++ b/depthai_bridge/src/TFPublisher.cpp @@ -127,18 +127,7 @@ void TFPublisher::publishImuTransform(nlohmann::json json, rclcpp::Node* node) { } std::string TFPublisher::getCamSocketName(int socketNum) { - std::string name; - for(auto& cam : _camFeatures) { - if(cam.socket == static_cast(socketNum)) { - if(cam.name == "color" || cam.name == "center") { - name = "rgb"; - } else { - name = cam.name; - } - return name; - } - } - throw std::runtime_error("Camera socket not found"); + return _socketNameMap.at(static_cast(socketNum)); } geometry_msgs::msg::Vector3 TFPublisher::transFromExtr(nlohmann::json translation) { diff --git a/depthai_descriptions/CMakeLists.txt b/depthai_descriptions/CMakeLists.txt index 6e6c8916..b3f8a25f 100644 --- a/depthai_descriptions/CMakeLists.txt +++ b/depthai_descriptions/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.8) -project(depthai_descriptions VERSION 2.8.0) +project(depthai_descriptions VERSION 2.8.1) if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") add_compile_options(-Wall -Wextra -Wpedantic) diff --git a/depthai_descriptions/launch/urdf_launch.py b/depthai_descriptions/launch/urdf_launch.py index 039ddfaa..2ce03ede 100644 --- a/depthai_descriptions/launch/urdf_launch.py +++ b/depthai_descriptions/launch/urdf_launch.py @@ -10,7 +10,12 @@ def launch_setup(context, *args, **kwargs): bringup_dir = get_package_share_directory('depthai_descriptions') - xacro_path = os.path.join(bringup_dir, 'urdf', 'depthai_descr.urdf.xacro') + use_base_descr = LaunchConfiguration('use_base_descr', default='false') + xacro_path = '' + if use_base_descr.perform(context) == 'true': + xacro_path = os.path.join(bringup_dir, 'urdf', 'base_descr.urdf.xacro') + else: + xacro_path = os.path.join(bringup_dir, 'urdf', 'depthai_descr.urdf.xacro') camera_model = LaunchConfiguration('camera_model', default='OAK-D') tf_prefix = LaunchConfiguration('tf_prefix', default='oak') @@ -134,7 +139,12 @@ def generate_launch_description(): DeclareLaunchArgument( 'use_composition', default_value='false', - description='Use composition to start the robot_state_publisher node. Default value will be false') + description='Use composition to start the robot_state_publisher node. Default value will be false'), + DeclareLaunchArgument( + 'use_base_descr', + default_value='false', + description='Launch base description. Default value will be false' + ) ] return LaunchDescription( diff --git a/depthai_descriptions/package.xml b/depthai_descriptions/package.xml index f82814aa..278f1b55 100644 --- a/depthai_descriptions/package.xml +++ b/depthai_descriptions/package.xml @@ -1,7 +1,7 @@ depthai_descriptions - 2.8.0 + 2.8.1 The depthai_descriptions package Sachin Guruswamy diff --git a/depthai_examples/CMakeLists.txt b/depthai_examples/CMakeLists.txt index 5acb9a8f..7ea3c030 100644 --- a/depthai_examples/CMakeLists.txt +++ b/depthai_examples/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.10.2) # CMake version in Ubuntu 18.04 LTS -project(depthai_examples VERSION 2.8.0 LANGUAGES CXX C) +project(depthai_examples VERSION 2.8.1 LANGUAGES CXX C) set(CMAKE_CXX_STANDARD 14) set(CMAKE_CXX_STANDARD_REQUIRED ON) diff --git a/depthai_examples/package.xml b/depthai_examples/package.xml index cfba823a..0b52a656 100644 --- a/depthai_examples/package.xml +++ b/depthai_examples/package.xml @@ -1,7 +1,7 @@ depthai_examples - 2.8.0 + 2.8.1 The depthai_examples package diff --git a/depthai_filters/CMakeLists.txt b/depthai_filters/CMakeLists.txt index 148f4ad4..c6fbc975 100644 --- a/depthai_filters/CMakeLists.txt +++ b/depthai_filters/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.8) -project(depthai_filters VERSION 2.8.0 LANGUAGES CXX C) +project(depthai_filters VERSION 2.8.1 LANGUAGES CXX C) if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") add_compile_options(-Wall -Wextra -Wpedantic) diff --git a/depthai_filters/package.xml b/depthai_filters/package.xml index e834f431..894ba98b 100644 --- a/depthai_filters/package.xml +++ b/depthai_filters/package.xml @@ -2,7 +2,7 @@ depthai_filters - 2.8.0 + 2.8.1 Depthai filters package Adam Serafin MIT diff --git a/depthai_filters/src/spatial_bb.cpp b/depthai_filters/src/spatial_bb.cpp index 5ebe2363..5a3a7f0f 100644 --- a/depthai_filters/src/spatial_bb.cpp +++ b/depthai_filters/src/spatial_bb.cpp @@ -1,6 +1,6 @@ #include "depthai_filters/spatial_bb.hpp" -#include "cv_bridge/cv_bridge.hpp" +#include "cv_bridge/cv_bridge.h" #include "depthai_filters/utils.hpp" #include "geometry_msgs/msg/point32.hpp" #include "opencv2/opencv.hpp" diff --git a/depthai_ros_driver/CMakeLists.txt b/depthai_ros_driver/CMakeLists.txt index 27e84c2a..e0241045 100644 --- a/depthai_ros_driver/CMakeLists.txt +++ b/depthai_ros_driver/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.22) -project(depthai_ros_driver VERSION 2.8.0) +project(depthai_ros_driver VERSION 2.8.1) set(CMAKE_POSITION_INDEPENDENT_CODE ON) set(CMAKE_BUILD_SHARED_LIBS ON) set(CMAKE_EXPORT_COMPILE_COMMANDS ON) diff --git a/depthai_ros_driver/config/camera.yaml b/depthai_ros_driver/config/camera.yaml index 1cfe524a..b8a25dab 100644 --- a/depthai_ros_driver/config/camera.yaml +++ b/depthai_ros_driver/config/camera.yaml @@ -3,130 +3,7 @@ camera: i_enable_imu: true i_enable_ir: true - i_floodlight_brightness: 0 - i_ip: '' - i_laser_dot_brightness: 800 - i_mx_id: '' i_nn_type: spatial i_pipeline_type: RGBD - i_usb_port_id: '' - i_usb_speed: SUPER_PLUS - i_pipeline_dump: false - i_calibration_dump: false - i_external_calibration_path: '' - imu: - i_acc_freq: 400 - i_acc_cov: 0.0 - i_batch_report_threshold: 1 - i_get_base_device_timestamp: false - i_enable_rotation: false - i_gyro_cov: 0.0 - i_gyro_freq: 400 - i_mag_cov: 0.0 - i_mag_freq: 100 - i_max_batch_reports: 5 - i_message_type: IMU - i_rot_cov: -1.0 - i_rot_freq: 400 - i_sync_method: LINEAR_INTERPOLATE_ACCEL - left: - i_board_socket_id: 1 - i_calibration_file: '' - i_get_base_device_timestamp: false - i_fps: 30.0 - i_height: 720 - i_low_bandwidth: false - i_low_bandwidth_quality: 50 - i_max_q_size: 30 - i_publish_topic: false - i_simulate_from_topic: false - i_disable_node: false - i_resolution: '720' - i_width: 1280 - r_exposure: 1000 - r_iso: 800 - r_set_man_exposure: false nn: i_nn_config_path: depthai_ros_driver/mobilenet - i_disable_resize: false - i_enable_passthrough: false - i_enable_passthrough_depth: false - rgb: - i_board_socket_id: 0 - i_simulate_from_topic: false - i_get_base_device_timestamp: false - i_disable_node: false - i_calibration_file: '' - i_enable_preview: false - i_fps: 30.0 - i_height: 720 - i_interleaved: false - i_keep_preview_aspect_ratio: true - i_low_bandwidth: false - i_low_bandwidth_quality: 50 - i_max_q_size: 30 - i_preview_size: 300 - i_publish_topic: true - i_resolution: '1080' - i_set_isp_scale: true - i_isp_num: 2 - i_isp_den: 3 - i_output_isp: true - i_width: 1280 - r_exposure: 20000 - r_focus: 1 - r_iso: 800 - r_set_man_exposure: false - r_set_man_focus: false - r_set_man_whitebalance: false - r_whitebalance: 3300 - right: - i_board_socket_id: 2 - i_calibration_file: '' - i_get_base_device_timestamp: false - i_fps: 30.0 - i_height: 720 - i_low_bandwidth: false - i_low_bandwidth_quality: 50 - i_max_q_size: 30 - i_publish_topic: false - i_simulate_from_topic: false - i_disable_node: false - i_resolution: '720' - i_width: 1280 - r_exposure: 1000 - r_iso: 800 - r_set_man_exposure: false - stereo: - i_align_depth: true - i_get_base_device_timestamp: false - i_output_disparity: false - i_bilateral_sigma: 0 - i_board_socket_id: 0 - i_depth_filter_size: 5 - i_depth_preset: HIGH_ACCURACY - i_disparity_width: DISPARITY_96 - i_enable_companding: false - i_enable_decimation_filter: false - i_enable_distortion_correction: false - i_enable_spatial_filter: false - i_enable_speckle_filter: false - i_enable_temporal_filter: false - i_enable_threshold_filter: false - i_enable_brightness_filter: false - i_brightness_filter_min_brightness: 1 - i_brightness_filter_max_brightness: 256 - i_extended_disp: false - i_height: 720 - i_low_bandwidth: false - i_low_bandwidth_quality: 50 - i_lr_check: true - i_lrc_threshold: 10 - i_max_q_size: 30 - i_rectify_edge_fill_color: 0 - i_stereo_conf_threshold: 240 - i_set_input_size: false - i_input_width: 1280 - i_input_height: 720 - i_width: 1280 - use_sim_time: false \ No newline at end of file diff --git a/depthai_ros_driver/config/isaac_vslam.yaml b/depthai_ros_driver/config/isaac_vslam.yaml new file mode 100644 index 00000000..937cef16 --- /dev/null +++ b/depthai_ros_driver/config/isaac_vslam.yaml @@ -0,0 +1,15 @@ +/oak: + ros__parameters: + camera: + i_enable_imu: true + i_enable_ir: false + i_pipeline_type: depth + stereo: + i_reverse_stereo_socket_order: true + i_publish_synced_rect_pair: true + left: + i_resolution: '400' + i_fps: 90.0 + right: + i_resolution: '400' + i_fps: 90.0 \ No newline at end of file diff --git a/depthai_ros_driver/config/pcl.yaml b/depthai_ros_driver/config/pcl.yaml index 6fcddb92..1ea1fbb1 100644 --- a/depthai_ros_driver/config/pcl.yaml +++ b/depthai_ros_driver/config/pcl.yaml @@ -3,6 +3,7 @@ camera: i_nn_type: none stereo: - i_align_depth: false + i_align_depth: true + i_board_socket_id: 2 i_subpixel: true i_publish_right_rect: true \ No newline at end of file diff --git a/depthai_ros_driver/include/depthai_ros_driver/camera.hpp b/depthai_ros_driver/include/depthai_ros_driver/camera.hpp index 12376b38..19bc51ed 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/camera.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/camera.hpp @@ -21,45 +21,45 @@ using Trigger = std_srvs::srv::Trigger; class Camera : public rclcpp::Node { public: explicit Camera(const rclcpp::NodeOptions& options = rclcpp::NodeOptions()); - /* + /** * @brief Destructor of the class Camera. Stops the device and destroys the pipeline. */ ~Camera(); - /* + /** * @brief Creates the pipeline and starts the device. Also sets up parameter callback and services. */ void onConfigure(); private: - /* + /** * @brief Print information about the device type. */ void getDeviceType(); - /* + /** * @brief Create the pipeline by using PipelineGenerator. */ void createPipeline(); - /* + /** * @brief Connect either to a first available device or to a device with a specific USB port, MXID or IP. Loops continuously until a device is found. */ void startDevice(); - /* + /** * @brief Sets up the queues and creates publishers for the nodes in the pipeline. */ void setupQueues(); - /* + /** * @brief Sets IR floodlight and dot pattern projector. */ void setIR(); - /* + /** * @brief Saves pipeline as a json to a file. */ void savePipeline(); - /* + /** * @brief Saves calibration data to a json file. */ void saveCalib(); - /* + /** * @brief Loads calibration data from a path. * @param path Path to the calibration file. */ diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/base_node.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/base_node.hpp index b3ab1f9c..de6ca37e 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/base_node.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/base_node.hpp @@ -19,7 +19,7 @@ namespace depthai_ros_driver { namespace dai_nodes { class BaseNode { public: - /* + /** * @brief Constructor of the class BaseNode. Creates a node in the pipeline. * * @param[in] daiNodeName The dai node name @@ -32,11 +32,11 @@ class BaseNode { virtual void link(dai::Node::Input in, int linkType = 0); virtual dai::Node::Input getInput(int linkType = 0); virtual void setupQueues(std::shared_ptr device) = 0; - /* + /** * @brief Sets the names of the queues. */ virtual void setNames() = 0; - /* + /** * @brief Link inputs and outputs. * * @param pipeline The pipeline @@ -46,19 +46,19 @@ class BaseNode { void setNodeName(const std::string& daiNodeName); void setROSNodePointer(rclcpp::Node* node); - /* + /** * @brief Gets the ROS node pointer. * * @return The ROS node pointer. */ rclcpp::Node* getROSNode(); - /* + /** * @brief Gets the name of the node. * * @return The name. */ std::string getName(); - /* + /** * @brief Append ROS node name to the frameName given. * * @param[in] frameName The frame name diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp index ec3a390b..d5898914 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/nn/detection.hpp @@ -27,7 +27,7 @@ namespace nn { template class Detection : public BaseNode { public: - /* + /** * @brief Constructor of the class Detection. Creates a DetectionNetwork node in the pipeline. Also creates an ImageManip node in the pipeline. * The ImageManip node is used to resize the input frames to the size required by the DetectionNetwork node. * @@ -47,7 +47,7 @@ class Detection : public BaseNode { setXinXout(pipeline); } ~Detection() = default; - /* + /** * @brief Sets up the queues for the DetectionNetwork node and the ImageManip node. Also sets up the publishers for the DetectionNetwork node and the * ImageManip node. * @@ -90,7 +90,7 @@ class Detection : public BaseNode { ptQ->addCallback(std::bind(sensor_helpers::basicCameraPub, std::placeholders::_1, std::placeholders::_2, *imageConverter, ptPub, infoManager)); } }; - /* + /** * @brief Links the input of the DetectionNetwork node to the output of the ImageManip node. * * @param[in] in The input of the DetectionNetwork node @@ -99,7 +99,7 @@ class Detection : public BaseNode { void link(dai::Node::Input in, int /*linkType*/) override { detectionNode->out.link(in); }; - /* + /** * @brief Gets the input of the DetectionNetwork node. * * @param[in] linkType The link type (not used) @@ -117,7 +117,7 @@ class Detection : public BaseNode { nnQName = getName() + "_nn"; ptQName = getName() + "_pt"; }; - /* + /** * @brief Sets the XLinkOut for the DetectionNetwork node and the ImageManip node. Additionally enables the passthrough. * * @param pipeline The pipeline @@ -132,7 +132,7 @@ class Detection : public BaseNode { detectionNode->passthrough.link(xoutPT->input); } }; - /* + /** * @brief Closes the queues for the DetectionNetwork node and the passthrough. */ void closeQueues() override { @@ -147,7 +147,7 @@ class Detection : public BaseNode { }; private: - /* + /** * @brief Callback for the DetectionNetwork node. Converts the ImgDetections to Detection2DArray and publishes it. * * @param[in] name The name of the stream diff --git a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp index 09608c75..3066088c 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp @@ -6,8 +6,10 @@ #include "depthai-shared/datatype/RawImgFrame.hpp" #include "depthai-shared/properties/ColorCameraProperties.hpp" +#include "depthai-shared/properties/MonoCameraProperties.hpp" #include "depthai-shared/properties/VideoEncoderProperties.hpp" #include "depthai/pipeline/datatype/ADatatype.hpp" +#include "depthai/pipeline/datatype/CameraControl.hpp" #include "image_transport/camera_publisher.hpp" #include "sensor_msgs/msg/camera_info.hpp" @@ -43,7 +45,11 @@ struct ImageSensor { bool color; }; extern std::vector availableSensors; - +extern const std::unordered_map socketNameMap; +extern const std::unordered_map monoResolutionMap; +extern const std::unordered_map rgbResolutionMap; +extern const std::unordered_map fSyncModeMap; +extern const std::unordered_map cameraImageOrientationMap; void basicCameraPub(const std::string& /*name*/, const std::shared_ptr& data, dai::ros::ImageConverter& converter, diff --git a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/sensor_param_handler.hpp b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/sensor_param_handler.hpp index 5c47deaa..517022de 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/sensor_param_handler.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/sensor_param_handler.hpp @@ -4,8 +4,6 @@ #include #include -#include "depthai-shared/properties/ColorCameraProperties.hpp" -#include "depthai-shared/properties/MonoCameraProperties.hpp" #include "depthai/pipeline/datatype/CameraControl.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "depthai_ros_driver/param_handlers/base_param_handler.hpp" @@ -26,24 +24,19 @@ namespace depthai_ros_driver { namespace param_handlers { class SensorParamHandler : public BaseParamHandler { public: - explicit SensorParamHandler(rclcpp::Node* node, const std::string& name); + explicit SensorParamHandler(rclcpp::Node* node, const std::string& name, dai::CameraBoardSocket socket); ~SensorParamHandler(); - void declareCommonParams(); + void declareCommonParams(dai::CameraBoardSocket socket); void declareParams(std::shared_ptr monoCam, - dai::CameraBoardSocket socket, dai_nodes::sensor_helpers::ImageSensor sensor, bool publish); void declareParams(std::shared_ptr colorCam, - dai::CameraBoardSocket socket, dai_nodes::sensor_helpers::ImageSensor sensor, bool publish); dai::CameraControl setRuntimeParams(const std::vector& params) override; private: - std::unordered_map monoResolutionMap; - std::unordered_map rgbResolutionMap; - std::unordered_map fSyncModeMap; - std::unordered_map cameraImageOrientationMap; + dai::CameraBoardSocket socketID; }; } // namespace param_handlers } // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/stereo_param_handler.hpp b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/stereo_param_handler.hpp index 26182143..c45c6595 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/param_handlers/stereo_param_handler.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/param_handlers/stereo_param_handler.hpp @@ -24,7 +24,7 @@ class StereoParamHandler : public BaseParamHandler { public: explicit StereoParamHandler(rclcpp::Node* node, const std::string& name); ~StereoParamHandler(); - void declareParams(std::shared_ptr stereo, const std::vector& camFeatures); + void declareParams(std::shared_ptr stereo); dai::CameraControl setRuntimeParams(const std::vector& params) override; void updateSocketsFromParams(dai::CameraBoardSocket& left, dai::CameraBoardSocket& right); diff --git a/depthai_ros_driver/include/depthai_ros_driver/pipeline/pipeline_generator.hpp b/depthai_ros_driver/include/depthai_ros_driver/pipeline/pipeline_generator.hpp index 5904c4ef..a1b7d38c 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/pipeline/pipeline_generator.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/pipeline/pipeline_generator.hpp @@ -23,24 +23,19 @@ enum class PipelineType { RGB, RGBD, RGBStereo, Stereo, Depth, CamArray }; class PipelineGenerator { public: ~PipelineGenerator() = default; - /* + /** * @brief Validates the pipeline type. If the pipeline type is not valid for the number of sensors, it will be changed to the default type. * - * @param node The node + * @param node The node used for logging * @param[in] type The type * @param[in] sensorNum The sensor number * * @return The validated pipeline type. */ - PipelineType validatePipeline(rclcpp::Node* node, PipelineType type, int sensorNum); - /* - * @brief Creates the pipeline by using a plugin. Plugin types need to be of type depthai_ros_driver::pipeline_gen::BasePipeline. - * - * @param node The node - * @param device The device + std::string validatePipeline(rclcpp::Node* node,const std::string& typeStr, int sensorNum); + /** * @param pipeline The pipeline * @param[in] pipelineType The pipeline type name (plugin name or one of the default types) - * @param[in] nnType The neural network type (none, rgb, spatial) * @param[in] enableImu Indicates if IMU is enabled * * @return Vector BaseNodes created. diff --git a/depthai_ros_driver/include/depthai_ros_driver/utils.hpp b/depthai_ros_driver/include/depthai_ros_driver/utils.hpp index 841cb6ef..a98b4483 100644 --- a/depthai_ros_driver/include/depthai_ros_driver/utils.hpp +++ b/depthai_ros_driver/include/depthai_ros_driver/utils.hpp @@ -9,7 +9,6 @@ namespace dai { enum class CameraBoardSocket; -struct CameraFeatures; } // namespace dai namespace depthai_ros_driver { @@ -29,6 +28,6 @@ T getValFromMap(const std::string& name, const std::unordered_map camFeatures); +std::string getSocketName(dai::CameraBoardSocket socket); } // namespace utils } // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/launch/camera.launch.py b/depthai_ros_driver/launch/camera.launch.py index a51dd3f8..9e743c02 100644 --- a/depthai_ros_driver/launch/camera.launch.py +++ b/depthai_ros_driver/launch/camera.launch.py @@ -91,7 +91,8 @@ def launch_setup(context, *args, **kwargs): 'cam_roll': cam_roll, 'cam_pitch': cam_pitch, 'cam_yaw': cam_yaw, - 'use_composition': use_composition}.items()), + 'use_composition': use_composition, + 'use_base_descr': pass_tf_args_as_params}.items()), ComposableNodeContainer( name=name+"_container", diff --git a/depthai_ros_driver/package.xml b/depthai_ros_driver/package.xml index e0bd5ca0..15f4ebbb 100644 --- a/depthai_ros_driver/package.xml +++ b/depthai_ros_driver/package.xml @@ -2,7 +2,7 @@ depthai_ros_driver - 2.8.0 + 2.8.1 Depthai ROS Monolithic node. Adam Serafin Sachin Guruswamy diff --git a/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp b/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp index 22cb09bc..e71b5495 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/mono.cpp @@ -28,8 +28,8 @@ Mono::Mono(const std::string& daiNodeName, RCLCPP_DEBUG(node->get_logger(), "Creating node %s", daiNodeName.c_str()); setNames(); monoCamNode = pipeline->create(); - ph = std::make_unique(node, daiNodeName); - ph->declareParams(monoCamNode, socket, sensor, publish); + ph = std::make_unique(node, daiNodeName, socket); + ph->declareParams(monoCamNode, sensor, publish); setXinXout(pipeline); RCLCPP_DEBUG(node->get_logger(), "Node %s created", daiNodeName.c_str()); } @@ -58,8 +58,7 @@ void Mono::setXinXout(std::shared_ptr pipeline) { void Mono::setupQueues(std::shared_ptr device) { if(ph->getParam("i_publish_topic")) { - auto tfPrefix = getTFPrefix( - utils::getSocketName(static_cast(ph->getParam("i_board_socket_id")), device->getConnectedCameraFeatures())); + auto tfPrefix = getTFPrefix(utils::getSocketName(static_cast(ph->getParam("i_board_socket_id")))); imageConverter = std::make_unique(tfPrefix + "_camera_optical_frame", false, ph->getParam("i_get_base_device_timestamp")); imageConverter->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); @@ -70,6 +69,9 @@ void Mono::setupQueues(std::shared_ptr device) { auto offset = static_cast(ph->getParam("i_exposure_offset")); imageConverter->addExposureOffset(offset); } + if(ph->getParam("i_reverse_stereo_socket_order")) { + imageConverter->reverseStereoSocketOrder(); + } infoManager = std::make_shared( getROSNode()->create_sub_node(std::string(getROSNode()->get_name()) + "/" + getName()).get(), "/" + getName()); if(ph->getParam("i_calibration_file").empty()) { diff --git a/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp b/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp index 6794c732..7286d19d 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/rgb.cpp @@ -28,8 +28,8 @@ RGB::RGB(const std::string& daiNodeName, RCLCPP_DEBUG(node->get_logger(), "Creating node %s", daiNodeName.c_str()); setNames(); colorCamNode = pipeline->create(); - ph = std::make_unique(node, daiNodeName); - ph->declareParams(colorCamNode, socket, sensor, publish); + ph = std::make_unique(node, daiNodeName, socket); + ph->declareParams(colorCamNode, sensor, publish); setXinXout(pipeline); RCLCPP_DEBUG(node->get_logger(), "Node %s created", daiNodeName.c_str()); } @@ -69,8 +69,7 @@ void RGB::setXinXout(std::shared_ptr pipeline) { void RGB::setupQueues(std::shared_ptr device) { if(ph->getParam("i_publish_topic")) { - auto tfPrefix = getTFPrefix( - utils::getSocketName(static_cast(ph->getParam("i_board_socket_id")), device->getConnectedCameraFeatures())); + auto tfPrefix = getTFPrefix(utils::getSocketName(static_cast(ph->getParam("i_board_socket_id")))); infoManager = std::make_shared( getROSNode()->create_sub_node(std::string(getROSNode()->get_name()) + "/" + getName()).get(), "/" + getName()); imageConverter = @@ -84,6 +83,10 @@ void RGB::setupQueues(std::shared_ptr device) { imageConverter->addExposureOffset(offset); } + if(ph->getParam("i_reverse_stereo_socket_order")) { + imageConverter->reverseStereoSocketOrder(); + } + if(ph->getParam("i_calibration_file").empty()) { infoManager->setCameraInfo(sensor_helpers::getCalibInfo(getROSNode()->get_logger(), *imageConverter, @@ -123,7 +126,7 @@ void RGB::setupQueues(std::shared_ptr device) { previewInfoManager = std::make_shared( getROSNode()->create_sub_node(std::string(getROSNode()->get_name()) + "/" + previewQName).get(), previewQName); - auto tfPrefix = getTFPrefix(getName()); + auto tfPrefix = getTFPrefix(utils::getSocketName(static_cast(ph->getParam("i_board_socket_id")))); imageConverter = std::make_unique(tfPrefix + "_camera_optical_frame", false); imageConverter->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); if(ph->getParam("i_calibration_file").empty()) { diff --git a/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp b/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp index 9b7af0d8..40a39bf0 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/sensor_helpers.cpp @@ -10,19 +10,61 @@ namespace depthai_ros_driver { namespace dai_nodes { namespace sensor_helpers { -std::vector availableSensors{{"IMX378", "4k", {"12mp", "4k"}, true}, - {"OV9282", "800p", {"800p", "720p", "400p"}, false}, - {"OV9782", "800p", {"800p", "720p", "400p"}, true}, - {"OV9281", "800p", {"800p", "720p", "400p"}, true}, - {"IMX214", "1080p", {"13mp", "12mp", "4k", "1080p"}, true}, - {"IMX412", "1080p", {"13mp", "12mp", "4k", "1080p"}, true}, - {"OV7750", "480p", {"480p", "400p"}, false}, - {"OV7251", "480p", {"480p", "400p"}, false}, - {"IMX477", "1080p", {"12mp", "4k", "1080p"}, true}, - {"IMX577", "1080p", {"12mp", "4k", "1080p"}, true}, - {"AR0234", "1200p", {"1200p"}, true}, - {"IMX582", "4k", {"48mp", "12mp", "4k"}, true}, - {"LCM48", "4k", {"48mp", "12mp", "4k"}, true}}; +std::vector availableSensors = {{"IMX378", "1080P", {"12MP", "4K", "1080P"}, true}, + {"OV9282", "800P", {"800P", "720P", "400P"}, false}, + {"OV9782", "800P", {"800P", "720P", "400P"}, true}, + {"OV9281", "800P", {"800P", "720P", "400P"}, true}, + {"IMX214", "1080P", {"13MP", "12MP", "4K", "1080P"}, true}, + {"IMX412", "1080P", {"13MP", "12MP", "4K", "1080P"}, true}, + {"OV7750", "480P", {"480P", "400P"}, false}, + {"OV7251", "480P", {"480P", "400P"}, false}, + {"IMX477", "1080P", {"12MP", "4K", "1080P"}, true}, + {"IMX577", "1080P", {"12MP", "4K", "1080P"}, true}, + {"AR0234", "1200P", {"1200P"}, true}, + {"IMX582", "4K", {"48MP", "12MP", "4K"}, true}, + {"LCM48", "4K", {"48MP", "12MP", "4K"}, true}}; +const std::unordered_map socketNameMap = { + {dai::CameraBoardSocket::AUTO, "rgb"}, + {dai::CameraBoardSocket::CAM_A, "rgb"}, + {dai::CameraBoardSocket::CAM_B, "left"}, + {dai::CameraBoardSocket::CAM_C, "right"}, + {dai::CameraBoardSocket::CAM_D, "left_back"}, + {dai::CameraBoardSocket::CAM_E, "right_back"}, +}; +const std::unordered_map monoResolutionMap = { + {"400P", dai::MonoCameraProperties::SensorResolution::THE_400_P}, + {"480P", dai::MonoCameraProperties::SensorResolution::THE_480_P}, + {"720P", dai::MonoCameraProperties::SensorResolution::THE_720_P}, + {"800P", dai::MonoCameraProperties::SensorResolution::THE_800_P}, + {"1200P", dai::MonoCameraProperties::SensorResolution::THE_1200_P}, +}; + +const std::unordered_map rgbResolutionMap = { + {"720P", dai::ColorCameraProperties::SensorResolution::THE_720_P}, + {"1080P", dai::ColorCameraProperties::SensorResolution::THE_1080_P}, + {"4K", dai::ColorCameraProperties::SensorResolution::THE_4_K}, + {"12MP", dai::ColorCameraProperties::SensorResolution::THE_12_MP}, + {"13MP", dai::ColorCameraProperties::SensorResolution::THE_13_MP}, + {"800P", dai::ColorCameraProperties::SensorResolution::THE_800_P}, + {"1200P", dai::ColorCameraProperties::SensorResolution::THE_1200_P}, + {"5MP", dai::ColorCameraProperties::SensorResolution::THE_5_MP}, + {"4000x3000", dai::ColorCameraProperties::SensorResolution::THE_4000X3000}, + {"5312X6000", dai::ColorCameraProperties::SensorResolution::THE_5312X6000}, + {"48MP", dai::ColorCameraProperties::SensorResolution::THE_48_MP}, + {"1440X1080", dai::ColorCameraProperties::SensorResolution::THE_1440X1080}}; + +const std::unordered_map fSyncModeMap = { + {"OFF", dai::CameraControl::FrameSyncMode::OFF}, + {"OUTPUT", dai::CameraControl::FrameSyncMode::OUTPUT}, + {"INPUT", dai::CameraControl::FrameSyncMode::INPUT}, +}; +const std::unordered_map cameraImageOrientationMap = { + {"NORMAL", dai::CameraImageOrientation::NORMAL}, + {"ROTATE_180_DEG", dai::CameraImageOrientation::ROTATE_180_DEG}, + {"AUTO", dai::CameraImageOrientation::AUTO}, + {"HORIZONTAL_MIRROR", dai::CameraImageOrientation::HORIZONTAL_MIRROR}, + {"VERTICAL_FLIP", dai::CameraImageOrientation::VERTICAL_FLIP}, +}; void basicCameraPub(const std::string& /*name*/, const std::shared_ptr& data, diff --git a/depthai_ros_driver/src/dai_nodes/sensors/sensor_wrapper.cpp b/depthai_ros_driver/src/dai_nodes/sensors/sensor_wrapper.cpp index f91a8480..4f748b8f 100644 --- a/depthai_ros_driver/src/dai_nodes/sensors/sensor_wrapper.cpp +++ b/depthai_ros_driver/src/dai_nodes/sensors/sensor_wrapper.cpp @@ -21,7 +21,7 @@ SensorWrapper::SensorWrapper(const std::string& daiNodeName, bool publish) : BaseNode(daiNodeName, node, pipeline) { RCLCPP_DEBUG(node->get_logger(), "Creating node %s base", daiNodeName.c_str()); - ph = std::make_unique(node, daiNodeName); + ph = std::make_unique(node, daiNodeName, socket); if(ph->getParam("i_simulate_from_topic")) { std::string topicName = ph->getParam("i_simulated_topic_name"); diff --git a/depthai_ros_driver/src/dai_nodes/stereo.cpp b/depthai_ros_driver/src/dai_nodes/stereo.cpp index c8e49fb7..347000e2 100644 --- a/depthai_ros_driver/src/dai_nodes/stereo.cpp +++ b/depthai_ros_driver/src/dai_nodes/stereo.cpp @@ -13,6 +13,7 @@ #include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" #include "depthai_ros_driver/dai_nodes/sensors/sensor_wrapper.hpp" #include "depthai_ros_driver/param_handlers/stereo_param_handler.hpp" +#include "depthai_ros_driver/utils.hpp" #include "image_transport/camera_publisher.hpp" #include "image_transport/image_transport.hpp" #include "rclcpp/node.hpp" @@ -40,10 +41,14 @@ Stereo::Stereo(const std::string& daiNodeName, continue; } } + RCLCPP_DEBUG(node->get_logger(), + "Creating stereo node with left sensor %s and right sensor %s", + utils::getSocketName(leftSensInfo.socket).c_str(), + utils::getSocketName(rightSensInfo.socket).c_str()); + left = std::make_unique(utils::getSocketName(leftSensInfo.socket), node, pipeline, device, leftSensInfo.socket, false); + right = std::make_unique(utils::getSocketName(rightSensInfo.socket), node, pipeline, device, rightSensInfo.socket, false); stereoCamNode = pipeline->create(); - left = std::make_unique(leftSensInfo.name, node, pipeline, device, leftSensInfo.socket, false); - right = std::make_unique(rightSensInfo.name, node, pipeline, device, rightSensInfo.socket, false); - ph->declareParams(stereoCamNode, features); + ph->declareParams(stereoCamNode); setXinXout(pipeline); left->link(stereoCamNode->left); right->link(stereoCamNode->right); @@ -118,7 +123,8 @@ void Stereo::setupRectQueue(std::shared_ptr device, rclcpp::Publisher::SharedPtr infoPub, image_transport::CameraPublisher& pubIT, bool isLeft) { - auto tfPrefix = getTFPrefix(sensorInfo.name); + auto sensorName = utils::getSocketName(sensorInfo.socket); + auto tfPrefix = getTFPrefix(sensorName); conv = std::make_unique(tfPrefix + "_camera_optical_frame", false, ph->getParam("i_get_base_device_timestamp")); conv->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); bool lowBandwidth = ph->getParam(isLeft ? "i_left_rect_low_bandwidth" : "i_right_rect_low_bandwidth"); @@ -131,27 +137,34 @@ void Stereo::setupRectQueue(std::shared_ptr device, conv->addExposureOffset(offset); } im = std::make_shared( - getROSNode()->create_sub_node(std::string(getROSNode()->get_name()) + "/" + sensorInfo.name).get(), "/rect"); - + getROSNode()->create_sub_node(std::string(getROSNode()->get_name()) + "/" + sensorName).get(), "/rect"); + if(ph->getParam("i_reverse_stereo_socket_order")) { + conv->reverseStereoSocketOrder(); + } auto info = sensor_helpers::getCalibInfo(getROSNode()->get_logger(), *conv, device, sensorInfo.socket, - ph->getOtherNodeParam(sensorInfo.name, "i_width"), - ph->getOtherNodeParam(sensorInfo.name, "i_height")); + ph->getOtherNodeParam(sensorName, "i_width"), + ph->getOtherNodeParam(sensorName, "i_height")); for(auto& d : info.d) { d = 0.0; } + for(auto& r : info.r) { + r = 0.0; + } + info.r[0] = info.r[4] = info.r[8] = 1.0; + im->setCameraInfo(info); - q = device->getOutputQueue(queueName, ph->getOtherNodeParam(sensorInfo.name, "i_max_q_size"), false); + q = device->getOutputQueue(queueName, ph->getOtherNodeParam(sensorName, "i_max_q_size"), false); // if publish synced pair is set to true then we skip individual publishing of left and right rectified frames bool addCallback = !ph->getParam("i_publish_synced_rect_pair"); if(ipcEnabled()) { - pub = getROSNode()->create_publisher("~/" + sensorInfo.name + "/image_rect", 10); + pub = getROSNode()->create_publisher("~/" + sensorName + "/image_rect", 10); infoPub = getROSNode()->create_publisher("~/" + getName() + "/camera_info", 10); if(addCallback) { q->addCallback(std::bind(sensor_helpers::splitPub, @@ -164,7 +177,7 @@ void Stereo::setupRectQueue(std::shared_ptr device, ph->getParam("i_enable_lazy_publisher"))); } } else { - pubIT = image_transport::create_camera_publisher(getROSNode(), "~/" + sensorInfo.name + "/image_rect"); + pubIT = image_transport::create_camera_publisher(getROSNode(), "~/" + sensorName + "/image_rect"); if(addCallback) { q->addCallback(std::bind( sensor_helpers::cameraPub, std::placeholders::_1, std::placeholders::_2, *conv, pubIT, im, ph->getParam("i_enable_lazy_publisher"))); @@ -185,20 +198,20 @@ void Stereo::setupStereoQueue(std::shared_ptr device) { if(ph->getParam("i_align_depth")) { tfPrefix = getTFPrefix(ph->getParam("i_socket_name")); } else { - tfPrefix = getTFPrefix(rightSensInfo.name); + tfPrefix = getTFPrefix(utils::getSocketName(rightSensInfo.socket).c_str()); } stereoConv = std::make_unique(tfPrefix + "_camera_optical_frame", false, ph->getParam("i_get_base_device_timestamp")); stereoConv->setUpdateRosBaseTimeOnToRosMsg(ph->getParam("i_update_ros_base_time_on_ros_msg")); if(ph->getParam("i_low_bandwidth")) { stereoConv->convertFromBitstream(dai::RawImgFrame::Type::RAW8); } - if(!ph->getParam("i_output_disparity")) { - stereoConv->convertDispToDepth(); - } if(ph->getParam("i_add_exposure_offset")) { auto offset = static_cast(ph->getParam("i_exposure_offset")); stereoConv->addExposureOffset(offset); } + if(ph->getParam("i_reverse_stereo_socket_order")) { + stereoConv->reverseStereoSocketOrder(); + } stereoIM = std::make_shared( getROSNode()->create_sub_node(std::string(getROSNode()->get_name()) + "/" + getName()).get(), "/" + getName()); auto info = sensor_helpers::getCalibInfo(getROSNode()->get_logger(), @@ -208,7 +221,13 @@ void Stereo::setupStereoQueue(std::shared_ptr device) { ph->getParam("i_width"), ph->getParam("i_height")); auto calibHandler = device->readCalibration(); - + if(!ph->getParam("i_output_disparity")) { + if(ph->getParam("i_reverse_stereo_socket_order")) { + stereoConv->convertDispToDepth(calibHandler.getBaselineDistance(leftSensInfo.socket, rightSensInfo.socket, false)); + } else { + stereoConv->convertDispToDepth(calibHandler.getBaselineDistance(rightSensInfo.socket, leftSensInfo.socket, false)); + } + } // remove distortion since image is rectified for(auto& d : info.d) { d = 0.0; @@ -217,7 +236,6 @@ void Stereo::setupStereoQueue(std::shared_ptr device) { r = 0.0; } info.r[0] = info.r[4] = info.r[8] = 1.0; - info.p[3] = calibHandler.getBaselineDistance(leftSensInfo.socket, rightSensInfo.socket); stereoIM->setCameraInfo(info); stereoQ = device->getOutputQueue(stereoQName, ph->getParam("i_max_q_size"), false); if(ipcEnabled()) { diff --git a/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp b/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp index 088b4e08..386be4af 100644 --- a/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/sensor_param_handler.cpp @@ -11,12 +11,12 @@ namespace depthai_ros_driver { namespace param_handlers { -SensorParamHandler::SensorParamHandler(rclcpp::Node* node, const std::string& name) : BaseParamHandler(node, name) { - declareCommonParams(); +SensorParamHandler::SensorParamHandler(rclcpp::Node* node, const std::string& name, dai::CameraBoardSocket socket) : BaseParamHandler(node, name) { + declareCommonParams(socket); }; SensorParamHandler::~SensorParamHandler() = default; -void SensorParamHandler::declareCommonParams() { +void SensorParamHandler::declareCommonParams(dai::CameraBoardSocket socket) { declareAndLogParam("i_max_q_size", 30); declareAndLogParam("i_low_bandwidth", false); declareAndLogParam("i_low_bandwidth_quality", 50); @@ -25,42 +25,32 @@ void SensorParamHandler::declareCommonParams() { declareAndLogParam("i_simulated_topic_name", ""); declareAndLogParam("i_disable_node", false); declareAndLogParam("i_get_base_device_timestamp", false); - declareAndLogParam("i_board_socket_id", 0); + socketID = static_cast(declareAndLogParam("i_board_socket_id", static_cast(socket), 0)); declareAndLogParam("i_update_ros_base_time_on_ros_msg", false); declareAndLogParam("i_enable_feature_tracker", false); declareAndLogParam("i_enable_lazy_publisher", true); declareAndLogParam("i_add_exposure_offset", false); declareAndLogParam("i_exposure_offset", 0); - fSyncModeMap = { - {"OFF", dai::CameraControl::FrameSyncMode::OFF}, - {"OUTPUT", dai::CameraControl::FrameSyncMode::OUTPUT}, - {"INPUT", dai::CameraControl::FrameSyncMode::INPUT}, - }; - cameraImageOrientationMap = { - {"NORMAL", dai::CameraImageOrientation::NORMAL}, - {"ROTATE_180_DEG", dai::CameraImageOrientation::ROTATE_180_DEG}, - {"AUTO", dai::CameraImageOrientation::AUTO}, - {"HORIZONTAL_MIRROR", dai::CameraImageOrientation::HORIZONTAL_MIRROR}, - {"VERTICAL_FLIP", dai::CameraImageOrientation::VERTICAL_FLIP}, - }; + declareAndLogParam("i_reverse_stereo_socket_order", false); } -void SensorParamHandler::declareParams(std::shared_ptr monoCam, - dai::CameraBoardSocket socket, - dai_nodes::sensor_helpers::ImageSensor, - bool publish) { - monoResolutionMap = { - {"400", dai::MonoCameraProperties::SensorResolution::THE_400_P}, - {"480", dai::MonoCameraProperties::SensorResolution::THE_480_P}, - {"720", dai::MonoCameraProperties::SensorResolution::THE_720_P}, - {"800", dai::MonoCameraProperties::SensorResolution::THE_800_P}, - {"1200", dai::MonoCameraProperties::SensorResolution::THE_1200_P}, - }; - declareAndLogParam("i_board_socket_id", static_cast(socket), true); - monoCam->setBoardSocket(socket); +void SensorParamHandler::declareParams(std::shared_ptr monoCam, dai_nodes::sensor_helpers::ImageSensor sensor, bool publish) { + monoCam->setBoardSocket(socketID); monoCam->setFps(declareAndLogParam("i_fps", 30.0)); declareAndLogParam("i_publish_topic", publish); - monoCam->setResolution(utils::getValFromMap(declareAndLogParam("i_resolution", "720"), monoResolutionMap)); + auto resString = declareAndLogParam("i_resolution", sensor.defaultResolution); + + // if resolution not in allowed resolutions, use default + if(std::find(sensor.allowedResolutions.begin(), sensor.allowedResolutions.end(), resString) == sensor.allowedResolutions.end()) { + RCLCPP_WARN(getROSNode()->get_logger(), + "Resolution %s not supported by sensor %s. Using default resolution %s", + resString.c_str(), + sensor.name.c_str(), + sensor.defaultResolution.c_str()); + resString = sensor.defaultResolution; + } + + monoCam->setResolution(utils::getValFromMap(resString, dai_nodes::sensor_helpers::monoResolutionMap)); declareAndLogParam("i_width", monoCam->getResolutionWidth()); declareAndLogParam("i_height", monoCam->getResolutionHeight()); size_t iso = declareAndLogParam("r_iso", 800, getRangedIntDescriptor(100, 1600)); @@ -70,7 +60,8 @@ void SensorParamHandler::declareParams(std::shared_ptr mo monoCam->initialControl.setManualExposure(exposure, iso); } if(declareAndLogParam("i_fsync_continuous", false)) { - monoCam->initialControl.setFrameSyncMode(utils::getValFromMap(declareAndLogParam("i_fsync_mode", "INPUT"), fSyncModeMap)); + monoCam->initialControl.setFrameSyncMode( + utils::getValFromMap(declareAndLogParam("i_fsync_mode", "INPUT"), dai_nodes::sensor_helpers::fSyncModeMap)); } if(declareAndLogParam("i_fsync_trigger", false)) { monoCam->initialControl.setExternalTrigger(declareAndLogParam("i_num_frames_burst", 1), declareAndLogParam("i_num_frames_discard", 0)); @@ -78,29 +69,14 @@ void SensorParamHandler::declareParams(std::shared_ptr mo if(declareAndLogParam("i_set_isp3a_fps", false)) { monoCam->setIsp3aFps(declareAndLogParam("i_isp3a_fps", 10)); } - monoCam->setImageOrientation(utils::getValFromMap(declareAndLogParam("i_sensor_img_orientation", "NORMAL"), cameraImageOrientationMap)); + monoCam->setImageOrientation( + utils::getValFromMap(declareAndLogParam("i_sensor_img_orientation", "NORMAL"), dai_nodes::sensor_helpers::cameraImageOrientationMap)); } -void SensorParamHandler::declareParams(std::shared_ptr colorCam, - dai::CameraBoardSocket socket, - dai_nodes::sensor_helpers::ImageSensor sensor, - bool publish) { - rgbResolutionMap = {{"720p", dai::ColorCameraProperties::SensorResolution::THE_720_P}, - {"1080p", dai::ColorCameraProperties::SensorResolution::THE_1080_P}, - {"4k", dai::ColorCameraProperties::SensorResolution::THE_4_K}, - {"12mp", dai::ColorCameraProperties::SensorResolution::THE_12_MP}, - {"13mp", dai::ColorCameraProperties::SensorResolution::THE_13_MP}, - {"800p", dai::ColorCameraProperties::SensorResolution::THE_800_P}, - {"1200p", dai::ColorCameraProperties::SensorResolution::THE_1200_P}, - {"5MP", dai::ColorCameraProperties::SensorResolution::THE_5_MP}, - {"4000x3000", dai::ColorCameraProperties::SensorResolution::THE_4000X3000}, - {"5312X6000", dai::ColorCameraProperties::SensorResolution::THE_5312X6000}, - {"48mp", dai::ColorCameraProperties::SensorResolution::THE_48_MP}, - {"1440X1080", dai::ColorCameraProperties::SensorResolution::THE_1440X1080}}; +void SensorParamHandler::declareParams(std::shared_ptr colorCam, dai_nodes::sensor_helpers::ImageSensor sensor, bool publish) { declareAndLogParam("i_publish_topic", publish); - declareAndLogParam("i_board_socket_id", static_cast(socket)); + colorCam->setBoardSocket(socketID); declareAndLogParam("i_output_isp", true); declareAndLogParam("i_enable_preview", false); - colorCam->setBoardSocket(socket); colorCam->setFps(declareAndLogParam("i_fps", 30.0)); int preview_size = declareAndLogParam("i_preview_size", 300); int preview_width = declareAndLogParam("i_preview_width", preview_size); @@ -120,7 +96,7 @@ void SensorParamHandler::declareParams(std::shared_ptr c int width = colorCam->getResolutionWidth(); int height = colorCam->getResolutionHeight(); - colorCam->setResolution(utils::getValFromMap(resString, rgbResolutionMap)); + colorCam->setResolution(utils::getValFromMap(resString, dai_nodes::sensor_helpers::rgbResolutionMap)); colorCam->setInterleaved(declareAndLogParam("i_interleaved", false)); if(declareAndLogParam("i_set_isp_scale", true)) { @@ -154,7 +130,8 @@ void SensorParamHandler::declareParams(std::shared_ptr c colorCam->initialControl.setManualWhiteBalance(whitebalance); } if(declareAndLogParam("i_fsync_continuous", false)) { - colorCam->initialControl.setFrameSyncMode(utils::getValFromMap(declareAndLogParam("i_fsync_mode", "INPUT"), fSyncModeMap)); + colorCam->initialControl.setFrameSyncMode( + utils::getValFromMap(declareAndLogParam("i_fsync_mode", "INPUT"), dai_nodes::sensor_helpers::fSyncModeMap)); } if(declareAndLogParam("i_fsync_trigger", false)) { colorCam->initialControl.setExternalTrigger(declareAndLogParam("i_num_frames_burst", 1), declareAndLogParam("i_num_frames_discard", 0)); @@ -162,7 +139,8 @@ void SensorParamHandler::declareParams(std::shared_ptr c if(declareAndLogParam("i_set_isp3a_fps", false)) { colorCam->setIsp3aFps(declareAndLogParam("i_isp3a_fps", 10)); } - colorCam->setImageOrientation(utils::getValFromMap(declareAndLogParam("i_sensor_img_orientation", "NORMAL"), cameraImageOrientationMap)); + colorCam->setImageOrientation( + utils::getValFromMap(declareAndLogParam("i_sensor_img_orientation", "NORMAL"), dai_nodes::sensor_helpers::cameraImageOrientationMap)); } dai::CameraControl SensorParamHandler::setRuntimeParams(const std::vector& params) { dai::CameraControl ctrl; diff --git a/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp b/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp index cf2fc505..2ec0d1b2 100644 --- a/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp +++ b/depthai_ros_driver/src/param_handlers/stereo_param_handler.cpp @@ -51,7 +51,7 @@ void StereoParamHandler::updateSocketsFromParams(dai::CameraBoardSocket& left, d right = static_cast(newRightS); } -void StereoParamHandler::declareParams(std::shared_ptr stereo, const std::vector& camFeatures) { +void StereoParamHandler::declareParams(std::shared_ptr stereo) { declareAndLogParam("i_max_q_size", 30); declareAndLogParam("i_low_bandwidth", false); declareAndLogParam("i_low_bandwidth_quality", 50); @@ -62,6 +62,7 @@ void StereoParamHandler::declareParams(std::shared_ptr s declareAndLogParam("i_add_exposure_offset", false); declareAndLogParam("i_exposure_offset", 0); declareAndLogParam("i_enable_lazy_publisher", true); + declareAndLogParam("i_reverse_stereo_socket_order", false); declareAndLogParam("i_publish_synced_rect_pair", false); declareAndLogParam("i_publish_left_rect", false); @@ -84,7 +85,7 @@ void StereoParamHandler::declareParams(std::shared_ptr s auto socket = static_cast(declareAndLogParam("i_board_socket_id", static_cast(dai::CameraBoardSocket::CAM_A))); std::string socketName; if(declareAndLogParam("i_align_depth", true)) { - socketName = utils::getSocketName(socket, camFeatures); + socketName = utils::getSocketName(socket); try { width = getROSNode()->get_parameter(socketName + ".i_width").as_int(); height = getROSNode()->get_parameter(socketName + ".i_height").as_int(); @@ -105,6 +106,9 @@ void StereoParamHandler::declareParams(std::shared_ptr s if(declareAndLogParam("i_enable_distortion_correction", false)) { stereo->enableDistortionCorrection(true); } + if(declareAndLogParam("i_set_disparity_to_depth_use_spec_translation", false)) { + stereo->setDisparityToDepthUseSpecTranslation(true); + } stereo->initialConfig.setBilateralFilterSigma(declareAndLogParam("i_bilateral_sigma", 0)); stereo->initialConfig.setLeftRightCheckThreshold(declareAndLogParam("i_lrc_threshold", 10)); @@ -175,4 +179,4 @@ dai::CameraControl StereoParamHandler::setRuntimeParams(const std::vector> RGB::createPipeline(rclcpp::No break; } case NNType::Spatial: { - RCLCPP_WARN(node->get_logger(), "Spatial NN selected, but configuration is RGB."); + RCLCPP_WARN(node->get_logger(), "Spatial NN selected, but configuration is RGB. Please change camera.i_nn_type parameter to RGB."); } default: break; @@ -93,7 +93,7 @@ std::vector> RGBStereo::createPipeline(rclc break; } case NNType::Spatial: { - RCLCPP_WARN(node->get_logger(), "Spatial NN selected, but configuration is RGBStereo."); + RCLCPP_WARN(node->get_logger(), "Spatial NN selected, but configuration is RGBStereo. Please change camera.i_nn_type parameter to RGB."); } default: break; @@ -130,13 +130,9 @@ std::vector> CamArray::createPipeline(rclcp std::vector> daiNodes; for(auto& feature : device->getConnectedCameraFeatures()) { - if(feature.name == "color") { - auto daiNode = std::make_unique("rgb", node, pipeline, device, feature.socket); - daiNodes.push_back(std::move(daiNode)); - } else { - auto daiNode = std::make_unique(feature.name, node, pipeline, device, feature.socket); - daiNodes.push_back(std::move(daiNode)); - } + auto name = utils::getSocketName(feature.socket); + auto daiNode = std::make_unique(name, node, pipeline, device, feature.socket); + daiNodes.push_back(std::move(daiNode)); }; return daiNodes; } diff --git a/depthai_ros_driver/src/pipeline/pipeline_generator.cpp b/depthai_ros_driver/src/pipeline/pipeline_generator.cpp index e9948a14..b26e6039 100644 --- a/depthai_ros_driver/src/pipeline/pipeline_generator.cpp +++ b/depthai_ros_driver/src/pipeline/pipeline_generator.cpp @@ -23,9 +23,8 @@ std::vector> PipelineGenerator::createPipel // Check if one of the default types. try { std::string pTypeUpCase = utils::getUpperCaseStr(pipelineType); - auto pType = utils::getValFromMap(pTypeUpCase, pipelineTypeMap); - pType = validatePipeline(node, pType, device->getCameraSensorNames().size()); - pluginType = utils::getValFromMap(pTypeUpCase, pluginTypeMap); + auto pTypeValidated = validatePipeline(node, pTypeUpCase, device->getCameraSensorNames().size()); + pluginType = utils::getValFromMap(pTypeValidated, pluginTypeMap); } catch(std::out_of_range& e) { RCLCPP_DEBUG(node->get_logger(), "Pipeline type [%s] not found in base types, trying to load as a plugin.", pipelineType.c_str()); } @@ -52,22 +51,20 @@ std::vector> PipelineGenerator::createPipel return daiNodes; } -PipelineType PipelineGenerator::validatePipeline(rclcpp::Node* node, PipelineType type, int sensorNum) { +std::string PipelineGenerator::validatePipeline(rclcpp::Node* node, const std::string& typeStr, int sensorNum) { + auto pType = utils::getValFromMap(typeStr, pipelineTypeMap); if(sensorNum == 1) { - if(type != PipelineType::RGB) { + if(pType != PipelineType::RGB) { RCLCPP_ERROR(node->get_logger(), "Wrong pipeline chosen for camera as it has only one sensor. Switching to RGB."); - return PipelineType::RGB; + return "RGB"; } } else if(sensorNum == 2) { - if(type != PipelineType::Stereo && type != PipelineType::Depth) { - RCLCPP_ERROR(node->get_logger(), "Wrong pipeline chosen for camera as it has only stereo pair. Switching to Stereo."); - return PipelineType::Stereo; + if(pType != PipelineType::Stereo && pType != PipelineType::Depth) { + RCLCPP_ERROR(node->get_logger(), "Wrong pipeline chosen for camera as it has only stereo pair. Switching to Depth."); + return "DEPTH"; } - } else if(sensorNum > 3 && type != PipelineType::CamArray) { - RCLCPP_ERROR(node->get_logger(), "For cameras with more than three sensors you can only use CamArray. Switching to CamArray."); - return PipelineType::CamArray; } - return type; + return typeStr; } } // namespace pipeline_gen } // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_driver/src/utils.cpp b/depthai_ros_driver/src/utils.cpp index 6d50a2c5..d4813e55 100644 --- a/depthai_ros_driver/src/utils.cpp +++ b/depthai_ros_driver/src/utils.cpp @@ -2,6 +2,7 @@ #include "depthai-shared/common/CameraBoardSocket.hpp" #include "depthai-shared/common/CameraFeatures.hpp" +#include "depthai_ros_driver/dai_nodes/sensors/sensor_helpers.hpp" namespace depthai_ros_driver { namespace utils { std::string getUpperCaseStr(const std::string& string) { @@ -9,19 +10,8 @@ std::string getUpperCaseStr(const std::string& string) { for(auto& c : upper) c = toupper(c); return upper; } -std::string getSocketName(dai::CameraBoardSocket socket, std::vector camFeatures) { - std::string name; - for(auto& cam : camFeatures) { - if(cam.socket == socket) { - if(cam.name == "color" || cam.name == "center") { - name = "rgb"; - } else { - name = cam.name; - } - return name; - } - } - throw std::runtime_error("Camera socket not found"); +std::string getSocketName(dai::CameraBoardSocket socket) { + return dai_nodes::sensor_helpers::socketNameMap.at(socket); } } // namespace utils } // namespace depthai_ros_driver \ No newline at end of file diff --git a/depthai_ros_msgs/CMakeLists.txt b/depthai_ros_msgs/CMakeLists.txt index 0cea7ad0..8e8000ef 100644 --- a/depthai_ros_msgs/CMakeLists.txt +++ b/depthai_ros_msgs/CMakeLists.txt @@ -1,6 +1,6 @@ cmake_minimum_required(VERSION 3.10.2) # CMake version in Ubuntu 18.04 LTS -project(depthai_ros_msgs VERSION 2.8.0) +project(depthai_ros_msgs VERSION 2.8.1) if(POLICY CMP0057) cmake_policy(SET CMP0057 NEW) diff --git a/depthai_ros_msgs/package.xml b/depthai_ros_msgs/package.xml index da69c6ad..b6d4d628 100644 --- a/depthai_ros_msgs/package.xml +++ b/depthai_ros_msgs/package.xml @@ -1,7 +1,7 @@ depthai_ros_msgs - 2.8.0 + 2.8.1 Package to keep interface independent of the driver Sachin Guruswamy