diff --git a/.clang-format b/.clang-format new file mode 100644 index 00000000..817fd6d6 --- /dev/null +++ b/.clang-format @@ -0,0 +1,65 @@ +--- +BasedOnStyle: Google +AccessModifierOffset: -2 +ConstructorInitializerIndentWidth: 2 +AlignEscapedNewlinesLeft: false +AlignTrailingComments: true +AllowAllParametersOfDeclarationOnNextLine: false +AllowShortIfStatementsOnASingleLine: false +AllowShortLoopsOnASingleLine: false +AllowShortFunctionsOnASingleLine: None +AlwaysBreakTemplateDeclarations: true +AlwaysBreakBeforeMultilineStrings: true +BreakBeforeBinaryOperators: false +BreakBeforeTernaryOperators: false +BreakConstructorInitializersBeforeComma: true +BinPackParameters: true +ColumnLimit: 120 +ConstructorInitializerAllOnOneLineOrOnePerLine: true +DerivePointerBinding: false +PointerBindsToType: true +ExperimentalAutoDetectBinPacking: false +IndentCaseLabels: true +MaxEmptyLinesToKeep: 1 +NamespaceIndentation: None +ObjCSpaceBeforeProtocolList: true +PenaltyBreakBeforeFirstCallParameter: 19 +PenaltyBreakComment: 60 +PenaltyBreakString: 1 +PenaltyBreakFirstLessLess: 1000 +PenaltyExcessCharacter: 1000 +PenaltyReturnTypeOnItsOwnLine: 90 +SpacesBeforeTrailingComments: 2 +Cpp11BracedListStyle: false +Standard: Auto +IndentWidth: 2 +TabWidth: 2 +UseTab: Never +IndentFunctionDeclarationAfterType: false +SpacesInParentheses: false +SpacesInAngles: false +SpaceInEmptyParentheses: false +SpacesInCStyleCastParentheses: false +SpaceAfterControlStatementKeyword: true +SpaceBeforeAssignmentOperators: true +ContinuationIndentWidth: 4 +SortIncludes: false +SpaceAfterCStyleCast: false + +# Configure each individual brace in BraceWrapping +BreakBeforeBraces: Custom + +# Control of individual brace wrapping cases +BraceWrapping: { + AfterClass: 'true' + AfterControlStatement: 'true' + AfterEnum : 'true' + AfterFunction : 'true' + AfterNamespace : 'true' + AfterStruct : 'true' + AfterUnion : 'true' + BeforeCatch : 'true' + BeforeElse : 'true' + IndentBraces : 'false' +} +... diff --git a/.github/ISSUE_TEMPLATE/1_feature_request.yml b/.github/ISSUE_TEMPLATE/1_feature_request.yml new file mode 100644 index 00000000..7b161ecd --- /dev/null +++ b/.github/ISSUE_TEMPLATE/1_feature_request.yml @@ -0,0 +1,43 @@ +name: Feature request 🧭 +description: Suggest an idea for this project. +labels: "feature_request" +body: + - type: markdown + attributes: + value: | + # Welcome 👋 + + Thanks for taking the time to fill out this feature request module. + Please fill out each section below. This info allows Stereolabs developers to correctly evaluate your request. + + Useful Links: + - Documentation: https://www.stereolabs.com/docs/ + - Stereolabs support: https://support.stereolabs.com/hc/en-us/ + - type: checkboxes + attributes: + label: Preliminary Checks + description: Please make sure that you verify each checkbox and follow the instructions for them. + options: + - label: "This issue is not a duplicate. Before opening a new issue, please search existing issues." + required: true + - label: "This issue is not a question, bug report, or anything other than a feature request directly related to this project." + required: true + - type: textarea + attributes: + label: Proposal + description: "What would you like to have as a new feature?" + placeholder: "A clear and concise description of what you want to happen." + validations: + required: true + - type: textarea + attributes: + label: Use-Case + description: "How would this help you?" + placeholder: "Tell us more what you'd like to achieve." + validations: + required: false + - type: textarea + id: anything-else + attributes: + label: Anything else? + description: "Let us know if you have anything else to share" diff --git a/.github/ISSUE_TEMPLATE/2_bug_report.yml b/.github/ISSUE_TEMPLATE/2_bug_report.yml new file mode 100644 index 00000000..3deb72c7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/2_bug_report.yml @@ -0,0 +1,87 @@ +name: Bug Report 🐛 +description: Something isn't working as expected? Report your bugs here. +labels: "bug" +body: + - type: markdown + attributes: + value: | + # Welcome 👋 + + Thanks for taking the time to fill out this bug report. + Please fill out each section below. This info allows Stereolabs developers to diagnose (and fix!) your issue as quickly as possible. Otherwise we might need to close the issue without e.g. clear reproduction steps. + + Bug reports also shoulnd't be used for generic questions, please use the [Stereolabs Community forum](https://community.stereolabs.com/) instead. + + Useful Links: + - Documentation: https://www.stereolabs.com/docs/ + - Stereolabs support: https://support.stereolabs.com/hc/en-us/ + - type: checkboxes + attributes: + label: Preliminary Checks + description: Please make sure that you verify each checkbox and follow the instructions for them. + options: + - label: "This issue is not a duplicate. Before opening a new issue, please search existing issues." + required: true + - label: "This issue is not a question, feature request, or anything other than a bug report directly related to this project." + required: true + - type: textarea + attributes: + label: Description + description: Describe the issue that you're seeing. + placeholder: Be as precise as you can. Feel free to share screenshots, videos, or data. The more information you provide the easier will be to provide you with a fast solution. + validations: + required: true + - type: textarea + attributes: + label: Steps to Reproduce + description: Clear steps describing how to reproduce the issue. + value: | + 1. + 2. + 3. + ... + validations: + required: true + - type: textarea + attributes: + label: Expected Result + description: Describe what you expected to happen. + validations: + required: true + - type: textarea + attributes: + label: Actual Result + description: Describe what actually happened. + validations: + required: true + - type: dropdown + attributes: + label: ZED Camera model + description: What model of ZED camera are you using? + options: + - "ZED" + - "ZED Mini" + - "ZED2" + - "ZED2i" + validations: + required: true + - type: textarea + attributes: + label: Environment + render: shell + description: Useful information about your system. + placeholder: | + OS: Operating System + CPU: e.g. ARM + GPU: Nvidia Jetson Xavier NX + ZED SDK version: e.g. v4.0 + Other info: e.g. ROS Noetic + validations: + required: true + - type: textarea + attributes: + label: Anything else? + description: Please add any other information or comment that you think may be useful for solving the problem + placeholder: + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index e4cbe9eb..00000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,35 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: "[BUG]" -labels: bug -assignees: Myzhar - ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**Device information:** - - OS: [e.g. Ubuntu] - - OS version: [e.g. 18.04] - - CPU: [e.g. i7, Jeston Nano, Jetson TX2] - - GPU Nvidia: [e.g. GTX1080, RTX1070] - - ZED SDK Version: [e.g. v3.1.2, v2.8.5] - - ROS Wrapper version: [e.g. v2.8.5, latest master] - -**To Reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Additional context** -Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..5b74d2eb --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,12 @@ +blank_issues_enabled: false +contact_links: + - name: Online Documentation + url: https://www.stereolabs.com/docs/ + about: Check out the Stereolabs documentation for answers to common questions. + - name: Stereolabs Community [Ask a question] + url: https://community.stereolabs.com/ + about: Ask questions & discuss with other users and Stereolabs developers. + - name: Stereolabs Twitter + url: https://twitter.com/Stereolabs3D + about: The official Stereolabs Twitter account to ask questions, comment our products and share your projects with the ZED community. + diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index b24084f9..00000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: "[Feature]" -labels: Feature request -assignees: Myzhar - ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md deleted file mode 100644 index e66d3822..00000000 --- a/.github/ISSUE_TEMPLATE/question.md +++ /dev/null @@ -1,10 +0,0 @@ ---- -name: Question -about: Issue a question to developers and users -title: "[Question] " -labels: question -assignees: Myzhar - ---- - - diff --git a/.github/workflows/stale_issues.yml b/.github/workflows/stale_issues.yml new file mode 100644 index 00000000..9e01bd59 --- /dev/null +++ b/.github/workflows/stale_issues.yml @@ -0,0 +1,23 @@ +name: 'Stale issue handler' +on: + workflow_dispatch: + schedule: + - cron: '00 00 * * *' + +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@main + id: stale + with: + stale-issue-message: 'This issue is stale because it has been open 30 days with no activity. Remove stale label or comment otherwise it will be automatically closed in 5 days' + stale-pr-message: 'This PR is stale because it has been open 30 days with no activity. Remove stale label or comment otherwise it will be automatically closed in 5 days' + days-before-stale: 30 + days-before-close: 5 + operations-per-run: 1500 + exempt-issue-labels: 'feature_request' + exempt-pr-labels: 'feature_request' + enable-statistics: 'true' + close-issue-label: 'closed_for_stale' + close-pr-label: 'closed_for_stale' diff --git a/.github/workflows/submodule_update.yml b/.github/workflows/submodule_update.yml new file mode 100644 index 00000000..5c38c8ab --- /dev/null +++ b/.github/workflows/submodule_update.yml @@ -0,0 +1,32 @@ +name: Submodule update (manual) + +on: + #schedule: + # # * is a special character in YAML so you have to quote this string + # - cron: '0 12 * * *' + workflow_dispatch: + +jobs: + submodule_update: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Checkout submodules + run: git submodule update --init --recursive + + # Update references + - name: Git Submodule Update + run: | + git pull --recurse-submodules + git submodule update --remote --recursive + + - uses: stefanzweifel/git-auto-commit-action@v4 + with: + commit_message: Automatic submodule update + commit_user_name: bot-stereolabs + commit_user_email: bot-stereolabs@example.org + commit_author: bot-stereolabs + status_options: '--untracked-files=no' diff --git a/.gitignore b/.gitignore index 6732e720..4326ae65 100644 --- a/.gitignore +++ b/.gitignore @@ -37,3 +37,6 @@ Makefile* # QtCtreator CMake CMakeLists.txt.user* +#VSCode +*vscode* + diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml deleted file mode 100644 index f702a38b..00000000 --- a/.gitlab-ci.yml +++ /dev/null @@ -1,8 +0,0 @@ -image: stereolabs/zed:3.0-ros-cuda10.0-ubuntu18.04 - -build: - stage: build - before_script: - - cp $(pwd)/* /home/ros/src/ -Rf - script: - - cd /home/ros/ ; source "/opt/ros/melodic/setup.bash"; catkin_make diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..718bdaee --- /dev/null +++ b/.gitmodules @@ -0,0 +1,4 @@ +[submodule "zed-ros-interfaces"] + path = zed-ros-interfaces + url = https://github.com/stereolabs/zed-ros-interfaces.git + branch = main diff --git a/CHANGELOG.rst b/CHANGELOG.rst new file mode 100644 index 00000000..ff498829 --- /dev/null +++ b/CHANGELOG.rst @@ -0,0 +1,207 @@ +CHANGELOG +========= + +07-29-2024 +---------- +- Fix wrong dynamic parameters initialization + +05-04-2024 +---------- +- Fix compatibility with ZED SDK v4.1 +- Add support to Positional Tracking Gen 2 +- The parameter `pos_tracking_mode` can accept the new values `GEN_1` and `GEN_2`, instead of `STANDARD` and `QUALITY` +- Add support to `NEURAL+` depth mode. Value `NEURAL_PLUS` for the parameter `depth_mode` +- The annoying warning `Elaboration takes longer...` is now emitted only in DEBUG mode + +v4.0.8 +------ + +- Remove start_object_detection and stop_object_detection services +- Add enable_object_detection service that takes a bool as parameter to enable/disable the OD module (same behavior as in ROS 2 Wrapper) +- Add parameter 'object_detection/allow_reduced_precision_inference' +- Add parameter 'object_detection/prediction_timeout' +- The parameter 'object_detection/model' is no more an integer, but a string with the full name of the supported OD model +- Add pose and odometry status publishers +- Improve odometry and pose publishing and TF broadcasting +- Add ROS '.clang-format' +- Code refactoring +- Add 'set_roi' and 'reset_roi' services +- Add parameter 'pos_tracking/depth_min_range' +- Add parameter 'pos_tracking/set_as_static' +- Change the parameter 'general/resolution' to 'general/grab_resolution' and replace numeric values with strings +- Add 'general.svo_realtime' parameter +- Change 'general/verbose' to 'general/sdk_verbose' +- Add 'general/region_of_interest' parameter +- Change the parameter 'depth/quality' to 'depth/depth_mode' and replace numeric values with strings +- Improve the behavior of the "NO DEPTH" mode +- Remove the parameters 'depth_downsample_factor' and 'img_downsample_factor' +- Add the parameter 'pub_resolution' and 'pub_downscale_factor' +- Add 'pos_tracking/set_gravity_as_origin' parameter. If 'true' align the positional tracking world to imu gravity measurement. Keep the yaw from the user initial pose. +- Add 'pos_tracking/pos_tracking_mode' parameter. Matches the ZED SDK setting: 'QUALITY', 'STANDARD' +- Fix the warning 'Elaboration takes longer [...]' +- 'pub_frame_rate' now controls the 'InitParameters::grab_compute_capping_fps' parameter of the ZED SDK instead of controlling the frequency of a parallel thread. It's not a Dynamic parameter anymore. +- Change 'general/camera_flip' parameter to string: 'AUTO', 'ON', 'OFF' +- Change 'general/verbose' from bool to integer + +v4.0.5 +------ +- Support for ZED SDK v4.0 +- Remove parameter 'object_detection.body_fitting' +- Remove parameter 'depth.sensing_mode' +- Remove parameter 'video.extrinsic_in_camera_frame' +- Skeleton Tracking is no more available. Migrate to ROS 2 Wrapper if you need it +- 'sensors' and 'object_detection' parameters are now in 'common.yaml' +- Move parameters 'general.resolution' and 'general.grab_frame_rate' to cameras yaml files to support the different configurations on ZED X and ZED X Mini. +- Remove support for ROS Melodic that reached EOL +- Add 1080p for ZED X and ZED X Mini + +v3.8.x +------ +- Fix the frame links of barometer, magnetometer, and temperature sensors for ZED2i +- Add parameter 'sensors/max_pub_rate' to set the maximum publishing frequency of sensors data +- Improve Sensors thread +- Fix wrong TF broadcasting when calling the 'set_pose', 'reset_tracking', and 'reset_odometry' services. Now the initial odometry is coherent with the new starting point. +- Add Plane Detection. See [ZED Documentation](https://www.stereolabs.com/docs/ros/plane_detection/) +- Fix TF timestamp issue in SVO mode +- Fix units for Atmospheric pressure data. Now the value is correctly published in 'Pascal' according to the [topic specification](http://docs.ros.org/en/melodic/api/sensor_msgs/html/msg/FluidPressure.html). + +v3.7.x +--------- +- Add support for the new Neural Depth mode +- Add support for head detection model +- Add support for sport-related object class +- Add support for X_MEDIUM neural network models +- Enable AI for ZED Mini +- Add new '_base_link' frame on the base of the camera to easily handle camera positioning on robots. Thx @civerachb-cpr +- Improve URDF by adding 3° slope for ZED and ZED2, X-offset for optical frames to correctly match the CMOS sensors position on the PCB, X-offset for mounting screw on ZED2i +- Add 'zed_macro.urdf.xacro' to be included by other 'xacro' file to easily integrate ZED cameras in the robot descriptions. See [PR #771](https://github.com/stereolabs/zed-ros-wrapper/pull/771) for details. Thx @civerachb-cpr +- New parameter 'save_area_memory_db_on_exit' to force Area Memory saving when the node is closed and Area Memory is enabled and valid. +- Add service 'save_Area_map' to trigger an Area Memory saving. +- New tool function to transform a relative path to absolute. +- Enabled static IMU TF broadcasting even it 'publish_tf' is set to false, making the two options independent. Thx to @bjsowa +- Moved the 'zed_interfaces' folder in the new ['zed-ros-interfaces'](https://github.com/stereolabs/zed-ros-interfaces) repository. The new repository is useful to receive the topics from a ZED node on system where the 'zed-ros-wrapper' repository cannot be fully installed, i.e. systems without CUDA support. For this repository nothing changes because the 'zed_interfaces' folder is replaced by the 'zed-ros-interfaces' git submodule to automatically satisfy all the dependencies. +- Fix sensors topics pubblication for ZED2i. The support for the new camera was not complete +- Fix sensor_msgs type for depth image in OpenNI mode, from 'sensor_msgs::image_encodings::mono16' to 'sensor_msgs::image_encodings::TYPE_16UC1'. Depth image in OpenNI mode is now compatible with the nodelet 'depthimage_to_laserscan' + +v3.5.x +--------- +- Add support for ROS Noetic +- Add support for SDK v3.5 +- Add support for the new ZED 2i +- Add new parameter 'pos_tracking/pos_tracking_enabled' to enable positional tracking from start even if not required by any subscribed topic. This is useful, for example, to keep the TF always updated. +- Add new example to start multiple ZED Nodelets inside the same nodelet manager +- Fixed issue #690 + +v3.4.x +--------- +- Add support for new DEPTH16_MM data type for depth (OPENNI MODE) +- Fix issue #660: detected objects topic not published if depth computation not active +- Improved support for ZED Object Detection +- Add Skeleton Tracking support +- New Rviz plugin for Object Detection in 'zed-ros-examples' +- New parameters and name changing to fit the new OD features, also the 'start_object_detection' service has been modified to match the new features: + - new 'model' parameter to choose the AI model + - new 'max_range' parameter to limit the detection range + - new 'sk_body_fitting' parameter to enable Skeleton fitting for skeleton AI models + - 'people' -> 'mc_people' to indicate that it is related to multiclass AI models + - 'vehicles'-> 'mc_vehicles' to indicate that it is related to multiclass AI models + - new 'mc_bag' parameter to enable bags detection with multiclass AI models + - new 'mc_animal' parameter to enable animals detection with multiclass AI models + - new 'mc_electronics' parameter to enable electronic devices detection with multiclass AI models + - new 'mc_fruit_vegetable' parameter to enable fruits and vegetables detection with multiclass AI models + +RGB/Depth sync fix #629 (2020-11-02) +------------------------------- +- Fixed sync issue between RGB and Depth data (Thx @dennisVi) +- Fixed issues with SVO and sensors data (Thx @dennisVi) + +ASYNC Object Detection (2020-09-18) +----------------------------------- +- Object Detection now runs asynchronously respect to data grabbing and Object Detected data are published only when available not affecting the frequency of the publishing of the other data types +- Depth OpenNI topic name changed from 'depth/depth_raw_registered' to 'depth/depth_registered' + +IMU timestamp fix (2020-08-25) +------------------------------ +- Added new parameter 'sensors/publish_imu_tf' to enable/disable IMU TF broadcasting +- Fixed duplicated IMU timestamp issue (see ticket #577) +- Fixed problem with IMU TF in Rviz: 'odom' and 'zed_camera_center' TFs are now published at the same frequency of the IMU TF, if available) +- IMU TF is now published once as static TF even if the IMU topic is not subscribed + +Timestamp fix (2020-06-03) +-------------------------- +- Fix timestamp update coherency due to parallel threads. Thanks to @matlabbe + +IMU fix (2020-05-24) +-------------------- +- Fix issue with IMU frame link when 'publish_tf' and 'publish_map_tf' are disabled + +New package: zed_nodelets (2020-03-20) +--------------------------------------- +- Added the new 'zed_interfaces/RGBDSensors' custom topic that contains RGB, Depth, IMU and Magnetometer synchronized topics +- Added a new package 'zed_nodelets' that contains the main 'zed_nodelets/ZEDWrapperNodelet' and new nodelets +- Added a new nodelet 'zed_nodelets/RgbdSensorsSyncNodelet' that subscribes to RGB, Depth, IMU and Magnetometer topics and republish them in a single synchronized message +- Added a new nodelet 'zed_nodelets/RgbdSensorsDemuxNodelet' that subscribes to RGBDSensors and republish RGB, Depth, IMU and Magnetometer as single topics +- Renamed 'zed_interfaces/objects' to 'zed_interfaces/Objects' +- Renamed 'zed_interfaces/object_stamped' to 'zed_interfaces/ObjectStamped' +- Reorganized the 'zed_wrapper/launch' folder adding the 'include' folder +- New online documentation to explain in details the new 'zed_nodelets' package: https://www.stereolabs.com/docs/ros/zed_nodelets/ + +v3.1 +----- +- Added new package 'zed_interfaces' with isolated declarations of custom messages, services and actions +- Removed not used 'world_frame' parameter +- Removed the'publish_pose_covariance' parameter, now covariance for pose and odometry is always published +- Removed '_m' from parameters 'mapping/resolution_m' and 'mapping/max_mapping_range_m' +- Renamed the parameter 'depth_resample_factor' to 'depth_downsample_factor' +- Renamed the parameter 'img_resample_factor' to 'img_downsample_factor' +- Renamed the parameter 'odometry_db' to 'area_memory_db_path' +- Renamed the parameter 'frame_rate' to 'grab_frame_rate' +- Added new dynamic parameter 'pub_frame_rate' to reduce Video and Depth publishing frequency respect to grabbing frame rate ['grab_frame_rate'] +- Added new dynamic parameter 'gamma' for Gamma Control +- Added new dynamic parameter 'depth_texture_conf' to filter depth according to textureness information +- Added new TF frames for all the sensors available on ZED2 +- Added publishers for gray images +- Added publisher for Camera to IMU transform: '///camera_imu_transform' +- Default value for 'depth_confidence' changed from 100 to 50 +- Added 'base_frame' as launch parameter to propagate the value of the parameter in the Xacro description + + +Bug fix (2020-03-06) +-------------------- +- Fix default value for dynamic parameters not set from 'common.yaml' + +XACRO and more (2020-01-31) +--------------------------- +- Added xacro support for parametric URDF +- Removed redundant URDFs and added a single parametric URDF based on xacro +- Fixed auto white balance at node start (thanks to @kjaget) +- Removed 'fixed_covariance' and 'fixed_cov_value' parameters (not required anymore) +- Removed 'sens_pub_rate' parameter +- Removed 'confidence_image' message +- Removed 'color_enhancement' parameter, always ON by default +- Mapping does not use presets for resolution, but a float value in range [0.01,0.2] +- Added new parameter 'max_mapping_range_m' for mapping depth range (set to '-1' for auto calculation) +- Moved "multi-camera" launch file in ['zed-ros-examples'](https://github.com/stereolabs/zed-ros-examples/tree/master/examples/zed_multicamera_example) +- Added current GPU ID to Diagnostic information +- The 'confidence' dynamic parameter is now called 'depth_confidence' +- Removed dynamic parametes 'map_resize_factor' +- Added new parameter 'video/img_resample_factor' +- Added new parameter 'depth/map_resample_factor' +- Updated the names for the parameters of the Object Detection module [only ZED2] + +SDK v3.0 (2020-01-27) +--------------------- +- Added a new repository ['zed-ros-examples'](https://github.com/stereolabs/zed-ros-examples) to keep separated the main ZED Wrapper node from Examples and Tutorials. A clean robot installation is now allowed +- ZED 2 support +- Color enhancement support +- Max range is not a dynamic parameter anymore +- Camera temperature added to diagnostic (only ZED2) +- New service to start/stop mapping +- Support for Object Detection (only ZED2) +- Advanced support for on-board sensors (only ZED-M and ZED2) +- New tutorials, see ['zed-ros-examples'](https://github.com/stereolabs/zed-ros-examples) + + + + + diff --git a/LICENSE b/LICENSE index b0e047d1..93631b21 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2020 Stereolabs +Copyright (c) 2023 Stereolabs Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index c99416e4..bd988680 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,14 @@ -![](./images/Picto+STEREOLABS_Black.jpg) +

+ Stereolabs
+ ROS Noetic Ninjemis Integration +

-# Stereolabs ZED Camera - ROS Integration - -This package lets you use the ZED stereo camera with ROS. It outputs the camera left and right images, depth map, point cloud, pose information and supports the use of multiple ZED cameras. +This package lets you use the ZED stereo camera with ROS. It outputs the camera's left and right images, depth map, point cloud, and pose information and supports the use of multiple ZED cameras. [More information](https://www.stereolabs.com/documentation/guides/using-zed-with-ros/introduction.html) +**Note:** The `zed_interfaces` package has been removed from this repository and moved to its own [`zed-ros-interfaces` repository](https://github.com/stereolabs/zed-ros-interfaces) to allow better integration of the ZED Wrapper on remote ground stations that do not require the full package to be installed. To update your repository please follow the [new update instructions](https://github.com/stereolabs/zed-ros-wrapper#update-the-repository). For more information please read issue [#750](https://github.com/stereolabs/zed-ros-wrapper/issues/750). + ## Getting started - First, download the latest version of the ZED SDK on [stereolabs.com](https://www.stereolabs.com/developers/) @@ -14,44 +17,54 @@ This package lets you use the ZED stereo camera with ROS. It outputs the camera ### Prerequisites -- Ubuntu 16.04 or newer (Ubuntu 18 recommended) -- [ZED SDK **≥ 3.0**](https://www.stereolabs.com/developers/) and its dependency [CUDA](https://developer.nvidia.com/cuda-downloads) -- [ROS Kinetic](http://wiki.ros.org/kinetic/Installation/Ubuntu) or [ROS Melodic](http://wiki.ros.org/melodic/Installation/Ubuntu) - -*Note:* an older version of the wrapper compatible with the **SDK v2.8.x** is available [here](https://github.com/stereolabs/zed-ros-wrapper/releases/tag/v2.x) +- Ubuntu 20.04 +- [ZED SDK **v4.1**](https://www.stereolabs.com/developers/) and its dependency [CUDA](https://developer.nvidia.com/cuda-downloads) +- [ROS Noetic](http://wiki.ros.org/noetic/Installation/Ubuntu) -### Build the program +### Build the repository The zed_ros_wrapper is a catkin package. It depends on the following ROS packages: - - nav_msgs - - tf2_geometry_msgs - - message_runtime - - catkin - - roscpp - - stereo_msgs - - rosconsole - - robot_state_publisher - - urdf - - sensor_msgs - - image_transport - - roslint - - diagnostic_updater - - dynamic_reconfigure - - tf2_ros - - message_generation - - nodelet + - roscpp +- image_transport +- rosconsole +- sensor_msgs +- stereo_msgs +- std_msgs +- std_srvs +- message_filters +- tf2_ros +- nodelet +- tf2_geometry_msgs +- message_generation +- diagnostic_updater +- dynamic_reconfigure +- zed_interfaces Open a terminal, clone the repository, update the dependencies and build the packages: $ cd ~/catkin_ws/src - $ git clone https://github.com/stereolabs/zed-ros-wrapper.git + $ git clone --recursive https://github.com/stereolabs/zed-ros-wrapper.git $ cd ../ $ rosdep install --from-paths src --ignore-src -r -y $ catkin_make -DCMAKE_BUILD_TYPE=Release $ source ./devel/setup.bash -### Run the program +#### Update the local repository + +To update the repository to the latest release you must use the following command to retrieve the latest commits of `zed-ros-wrapper` and of all the submodules: + + $ git checkout master # if you are not on the main branch + $ git pull --recurse-submodules # update recursively all the submodules + +Remember to always clean the cache of your catkin workspace before compiling with the `catkin_make` command to be sure that everything will work as expected: + + $ roscd + $ cd .. + $ rm -rf build devel + $ catkin_make -DCMAKE_BUILD_TYPE=Release + +### Run the ZED wrapper To launch the ZED node use @@ -63,16 +76,28 @@ ZED Mini camera: $ roslaunch zed_wrapper zedm.launch -ZED2 camera: +ZED 2 camera: $ roslaunch zed_wrapper zed2.launch - To select the ZED from its serial number: +ZED 2i camera: + + $ roslaunch zed_wrapper zed2i.launch + +ZED X camera: + + $ roslaunch zed_wrapper zedx.launch + +ZED X Mini camera: + + $ roslaunch zed_wrapper zedxm.launch + + To select the camera from its serial number: $ roslaunch zed_wrapper zed.launch serial_number:=1010 #replace 1010 with the actual SN ### Rviz visualization -Example launch files to start a pre-configured Rviz environment to visualize the data of ZED, ZED Mini and ZED 2 cameras are provided in the [`zed-ros-examples` repository](https://github.com/stereolabs/zed-ros-examples/tree/master/zed_display_rviz) +Example launch files to start a pre-configured Rviz environment to visualize the data of ZED, ZED Mini, ZED 2, ZED X, and ZED X Mini cameras are provided in the [`zed-ros-examples` repository](https://github.com/stereolabs/zed-ros-examples/tree/master/zed_display_rviz) ### SVO recording [SVO recording](https://www.stereolabs.com/docs/video/#video-recording) can be started and stopped while the ZED node is running using the service `start_svo_recording` and the service `stop_svo_recording`. @@ -81,14 +106,20 @@ Example launch files to start a pre-configured Rviz environment to visualize the ### Object Detection The SDK v3.0 introduces the Object Detection and Tracking module. **The Object Detection module is available only with a ZED 2 camera**. -The Object Detection can be enabled *automatically* when the node start setting the parameter `object_detection/od_enabled` to `true` in the file `zed2.yaml`. +The Object Detection can be enabled *automatically* when the node start setting the parameter `object_detection/od_enabled` to `true` in the file `common.yaml`. -The Object Detection can be enabled/disabled *manually* calling the services `start_object_detection` and `stop_object_detection`. +The Object Detection can be enabled/disabled *manually* calling the service `enable_object_detection`. + +### Body Tracking +The Body Tracking module is not available for the ZED ROS Wrapper. Please consider migrating to the [ZED ROS2 Wrapper](https://github.com/stereolabs/zed-ros2-wrapper) if you need it. ### Spatial Mapping The Spatial Mapping can be enabled automatically when the node start setting the parameter `mapping/mapping_enabled` to `true` in the file `common.yaml`. The Spatial Mapping can be enabled/disabled manually calling the services `start_3d_mapping` and `stop_3d_mapping`. +### Geo Tracking (GNSS Fusion) +The Geo tracking module is not available for the ZED ROS Wrapper. Please consider migrating to the [ZED ROS2 Wrapper](https://github.com/stereolabs/zed-ros2-wrapper) if you need it. + ### Diagnostic The ZED node publishes diagnostic information that can be used by the robotic system using a [diagnostic_aggregator node](http://wiki.ros.org/diagnostic_aggregator). diff --git a/images/Picto+STEREOLABS_Black.jpg b/images/Picto+STEREOLABS_Black.jpg index 744c9ffe..060e2435 100644 Binary files a/images/Picto+STEREOLABS_Black.jpg and b/images/Picto+STEREOLABS_Black.jpg differ diff --git a/latest_changes.md b/latest_changes.md deleted file mode 100644 index 6108d7c0..00000000 --- a/latest_changes.md +++ /dev/null @@ -1,73 +0,0 @@ -LATEST CHANGES -============== - -New package: zed_nodelets (2020-03-20) ---------------------------------------- -- Added the new `zed_interfaces/RGBDSensors` custom topic that contains RGB, Depth, IMU and Magnetometer synchronized topics -- Added a new package `zed_nodelets` that contains the main `zed_nodelets/ZEDWrapperNodelet` and new nodelets -- Added a new nodelet `zed_nodelets/RgbdSensorsSyncNodelet` that subscribes to RGB, Depth, IMU and Magnetometer topics and republish them in a single synchronized message -- Added a new nodelet `zed_nodelets/RgbdSensorsDemuxNodelet` that subscribes to RGBDSensors and republish RGB, Depth, IMU and Magnetometer as single topics -- Renamed `zed_interfaces/objects` to `zed_interfaces/Objects` -- Renamed `zed_interfaces/object_stamped` to `zed_interfaces/ObjectStamped` -- Reorganized the `zed_wrapper/launch` folder adding the `include` folder -- New online documentation to explain in details the new `zed_nodelets` package: https://www.stereolabs.com/docs/ros/zed_nodelets/ - -v3.1 ------ -- Added new package `zed_interfaces` with isolated declarations of custom messages, services and actions -- Removed not used `world_frame` parameter -- Removed the`publish_pose_covariance` parameter, now covariance for pose and odometry is always published -- Removed `_m` from parameters `mapping/resolution_m` and `mapping/max_mapping_range_m` -- Renamed the parameter `depth_resample_factor` to `depth_downsample_factor` -- Renamed the parameter `img_resample_factor` to `img_downsample_factor` -- Renamed the parameter `odometry_db` to `area_memory_db_path` -- Renamed the parameter `frame_rate` to `grab_frame_rate` -- Added new dynamic parameter `pub_frame_rate` to reduce Video and Depth publishing frequency respect to grabbing frame rate [`grab_frame_rate`] -- Added new dynamic parameter `gamma` for Gamma Control -- Added new dynamic parameter `depth_texture_conf` to filter depth according to textureness information -- Added new TF frames for all the sensors available on ZED2 -- Added publishers for gray images -- Added publisher for Camera to IMU transform: `///camera_imu_transform` -- Default value for `depth_confidence` changed from 100 to 50 -- Added `base_frame` as launch parameter to propagate the value of the parameter in the Xacro description - - -Bug fix (2020-03-06) --------------------- -- Fix default value for dynamic parameters not set from `common.yaml` - -XACRO and more (2020-01-31) ---------------------------- -- Added xacro support for parametric URDF -- Removed redundant URDFs and added a single parametric URDF based on xacro -- Fixed auto white balance at node start (thanks to @kjaget) -- Removed `fixed_covariance` and `fixed_cov_value` parameters (not required anymore) -- Removed `sens_pub_rate` parameter -- Removed `confidence_image` message -- Removed `color_enhancement` parameter, always ON by default -- Mapping does not use presets for resolution, but a float value in range [0.01,0.2] -- Added new parameter `max_mapping_range_m` for mapping depth range (set to `-1` for auto calculation) -- Moved "multi-camera" launch file in [`zed-ros-examples`](https://github.com/stereolabs/zed-ros-examples/tree/master/examples/zed_multicamera_example) -- Added current GPU ID to Diagnostic information -- The `confidence` dynamic parameter is now called `depth_confidence` -- Removed dynamic parametes `map_resize_factor` -- Added new parameter `video/img_resample_factor` -- Added new parameter `depth/map_resample_factor` -- Updated the names for the parameters of the Object Detection module [only ZED2] - -SDK v3.0 (2020-01-27) ---------------------- -- Added a new repository [`zed-ros-examples`](https://github.com/stereolabs/zed-ros-examples) to keep separated the main ZED Wrapper node from Examples and Tutorials. A clean robot installation is now allowed -- ZED 2 support -- Color enhancement support -- Max range is not a dynamic parameter anymore -- Camera temperature added to diagnostic (only ZED2) -- New service to start/stop mapping -- Support for Object Detection (only ZED2) -- Advanced support for on-board sensors (only ZED-M and ZED2) -- New tutorials, see [`zed-ros-examples`](https://github.com/stereolabs/zed-ros-examples) - - - - - diff --git a/zed-ros-interfaces b/zed-ros-interfaces new file mode 160000 index 00000000..12f55da8 --- /dev/null +++ b/zed-ros-interfaces @@ -0,0 +1 @@ +Subproject commit 12f55da885dd3f0f68c168d0bccd33c4e892328d diff --git a/zed_interfaces/CMakeLists.txt b/zed_interfaces/CMakeLists.txt deleted file mode 100644 index 7c76ec09..00000000 --- a/zed_interfaces/CMakeLists.txt +++ /dev/null @@ -1,68 +0,0 @@ -cmake_minimum_required(VERSION 2.8.7) - -project(zed_interfaces) - -find_package(catkin REQUIRED COMPONENTS - std_msgs - geometry_msgs - sensor_msgs - - - #actionlib_msgs - message_generation -) - -add_message_files( - DIRECTORY msg - FILES - ObjectStamped.msg - Objects.msg - RGBDSensors.msg - ) - -#add_action_files(DIRECTORY action FILES act.action) - -add_service_files( - DIRECTORY srv - FILES - set_pose.srv - reset_odometry.srv - reset_tracking.srv - start_svo_recording.srv - stop_svo_recording.srv - start_remote_stream.srv - stop_remote_stream.srv - set_led_status.srv - toggle_led.srv - start_3d_mapping.srv - stop_3d_mapping.srv - start_object_detection.srv - stop_object_detection.srv - ) - -generate_messages( - DEPENDENCIES - #actionlib_msgs - std_msgs - geometry_msgs - sensor_msgs -) - -catkin_package( - INCLUDE_DIRS include - CATKIN_DEPENDS - message_generation - std_msgs - geometry_msgs - sensor_msgs - #actionlib_msgs -) - -############################################################################### - -#Add all files in subdirectories of the project in -# a dummy_target so qtcreator have access to all files -FILE(GLOB_RECURSE all_files ${CMAKE_SOURCE_DIR}/*) -add_custom_target(all_files_${PROJECT_NAME} SOURCES ${all_files}) - - diff --git a/zed_interfaces/include/foo b/zed_interfaces/include/foo deleted file mode 100644 index e69de29b..00000000 diff --git a/zed_interfaces/msg/ObjectStamped.msg b/zed_interfaces/msg/ObjectStamped.msg deleted file mode 100644 index ae477b74..00000000 --- a/zed_interfaces/msg/ObjectStamped.msg +++ /dev/null @@ -1,41 +0,0 @@ -# Standard Header -Header header - -# Object label -string label - -# Object label ID -int16 label_id - -# Object confidence level (1-99) -float32 confidence - -# Object centroid -geometry_msgs/Point32 position - -# Object velocity -geometry_msgs/Vector3 linear_vel - -# Tracking state -# 0 -> OFF (object not valid) -# 1 -> OK -# 2 -> SEARCHING (occlusion occurred, trajectory is estimated) -int8 tracking_state - -# 2D Bounding box projected to Camera image -# 0 ------- 1 -# | | -# | | -# | | -# 3 ------- 2 -geometry_msgs/Point32[4] bbox_2d - -# 3D Bounding box in world frame -# 1 ------- 2 -# /. /| -# 0 ------- 3 | -# | . | | -# | 5.......| 6 -# |. |/ -# 4 ------- 7 -geometry_msgs/Point32[8] bbox_3d diff --git a/zed_interfaces/msg/Objects.msg b/zed_interfaces/msg/Objects.msg deleted file mode 100644 index 1eadcc2b..00000000 --- a/zed_interfaces/msg/Objects.msg +++ /dev/null @@ -1,2 +0,0 @@ -# Array of `object_stamped` topics -zed_interfaces/ObjectStamped[] objects diff --git a/zed_interfaces/msg/RGBDSensors.msg b/zed_interfaces/msg/RGBDSensors.msg deleted file mode 100644 index ffabc3f2..00000000 --- a/zed_interfaces/msg/RGBDSensors.msg +++ /dev/null @@ -1,16 +0,0 @@ -# Header -Header header - -# CameraInfo -sensor_msgs/CameraInfo rgbCameraInfo -sensor_msgs/CameraInfo depthCameraInfo - -# Raw -sensor_msgs/Image rgb -sensor_msgs/Image depth - -# IMU -sensor_msgs/Imu imu - -# Magnetometer -sensor_msgs/MagneticField mag diff --git a/zed_interfaces/package.xml b/zed_interfaces/package.xml deleted file mode 100644 index 5b6cad8e..00000000 --- a/zed_interfaces/package.xml +++ /dev/null @@ -1,29 +0,0 @@ - - - zed_interfaces - 3.1.0 - zed_interfaces is a packages that defines custom messages, services and actions used by the - zed-ros-wrapper package and other packages. - - STEREOLABS - MIT - - http://stereolabs.com/ - https://github.com/stereolabs/zed-ros-wrapper - https://github.com/stereolabs/zed-ros-wrapper/issues - - catkin - - std_msgs - sensor_msgs - actionlib_msgs - geometry_msgs - message_generation - - std_msgs - sensor_msgs - actionlib_msgs - geometry_msgs - message_generation - - diff --git a/zed_interfaces/srv/reset_odometry.srv b/zed_interfaces/srv/reset_odometry.srv deleted file mode 100644 index 931e58c3..00000000 --- a/zed_interfaces/srv/reset_odometry.srv +++ /dev/null @@ -1,2 +0,0 @@ ---- -bool reset_done \ No newline at end of file diff --git a/zed_interfaces/srv/reset_tracking.srv b/zed_interfaces/srv/reset_tracking.srv deleted file mode 100644 index 931e58c3..00000000 --- a/zed_interfaces/srv/reset_tracking.srv +++ /dev/null @@ -1,2 +0,0 @@ ---- -bool reset_done \ No newline at end of file diff --git a/zed_interfaces/srv/set_led_status.srv b/zed_interfaces/srv/set_led_status.srv deleted file mode 100644 index c2f49e4b..00000000 --- a/zed_interfaces/srv/set_led_status.srv +++ /dev/null @@ -1,3 +0,0 @@ -bool led_enabled ---- -bool done diff --git a/zed_interfaces/srv/set_pose.srv b/zed_interfaces/srv/set_pose.srv deleted file mode 100644 index 60c0008e..00000000 --- a/zed_interfaces/srv/set_pose.srv +++ /dev/null @@ -1,10 +0,0 @@ -# Translation XYZ [meters] -float32 x -float32 y -float32 z -# Orientation RPY [radians] -float32 R -float32 P -float32 Y ---- -bool done diff --git a/zed_interfaces/srv/start_3d_mapping.srv b/zed_interfaces/srv/start_3d_mapping.srv deleted file mode 100644 index 857232ae..00000000 --- a/zed_interfaces/srv/start_3d_mapping.srv +++ /dev/null @@ -1,13 +0,0 @@ -# Starts 3D fused pointcloud generation, if not automatically enabled with the parameter `mapping/mapping_enabled` - -# Resolution of the fused pointcloud in meters [0.01, 0.2] -float32 resolution - -# Max mapping range in meters [0.2, 20.0] -float32 max_mapping_range - -# Frequency of the publishing of the fused colored point cloud -float32 fused_pointcloud_freq - ---- -bool done diff --git a/zed_interfaces/srv/start_object_detection.srv b/zed_interfaces/srv/start_object_detection.srv deleted file mode 100644 index cc7882d6..00000000 --- a/zed_interfaces/srv/start_object_detection.srv +++ /dev/null @@ -1,18 +0,0 @@ -# Starts object detection, if not automatically enabled with the parameter `object_detection/od_enabled` - -# Minimum Confidence level -float32 confidence - -# Object tracking -bool tracking - -# Detect people -bool people - -# Detect vehicles -bool vehicles - - - ---- -bool done diff --git a/zed_interfaces/srv/start_remote_stream.srv b/zed_interfaces/srv/start_remote_stream.srv deleted file mode 100644 index 6741958e..00000000 --- a/zed_interfaces/srv/start_remote_stream.srv +++ /dev/null @@ -1,25 +0,0 @@ -# Defines the codec used for streaming (`0`: AVCHD [H264], `1`: HEVC [H265]) -# Note: If HEVC (H265) is used, make sure the recieving host is compatible with HEVC decoding (basically a pascal NVIDIA card). If not, prefer to use AVCHD (H264) since every compatible NVIDIA card supports AVCHD decoding -uint8 codec=0 - -# Defines the PORT the data will be streamed on. -# Note: port must be an even number. Any odd number will be rejected. -uint16 port=30000 - -# Defines the streaming BITRATE in Kbits/s -uint32 bitrate=2000 - -# Defines the GOP SIZE in frame unit. -# Note: if value is set to -1, the gop size will match 2 seconds, depending on camera fps. -# Note: The gop size determines the maximum distance between IDR/I-frames. Very high GOP size will result in slightly more efficient compression, especially on static scene. But it can result in more latency if IDR/I-frame packet are lost during streaming. -# Note: Default value is -1. Maximum allowed value is 256 (frames). -int32 gop_size=-1 - -# Enable/Disable adaptive bitrate -# Note: Bitrate will be adjusted regarding the number of packet loss during streaming. -# Note_ if activated, bitrate can vary between [bitrate/4, bitrate] -# Warning: Currently, the adaptive bitrate only works when "sending" device is a NVIDIA jetson (X1,X2,Xavier,Nano) -bool adaptative_bitrate=False ---- -bool result -string info diff --git a/zed_interfaces/srv/start_svo_recording.srv b/zed_interfaces/srv/start_svo_recording.srv deleted file mode 100644 index 4b6a3dda..00000000 --- a/zed_interfaces/srv/start_svo_recording.srv +++ /dev/null @@ -1,5 +0,0 @@ -# Full path is requiredm no relative paths -string svo_filename ---- -bool result -string info diff --git a/zed_interfaces/srv/stop_3d_mapping.srv b/zed_interfaces/srv/stop_3d_mapping.srv deleted file mode 100644 index 25872614..00000000 --- a/zed_interfaces/srv/stop_3d_mapping.srv +++ /dev/null @@ -1,5 +0,0 @@ -# Stops 3D fused pointcloud generation, if automatically enabled with the parameter `mapping/mapping_enabled` or with -# the `start_object_detection` service - ---- -bool done diff --git a/zed_interfaces/srv/stop_object_detection.srv b/zed_interfaces/srv/stop_object_detection.srv deleted file mode 100644 index 9fe28601..00000000 --- a/zed_interfaces/srv/stop_object_detection.srv +++ /dev/null @@ -1,5 +0,0 @@ -# Stops object detection, if automatically enabled with the parameter `object_detection/od_enabled` or with -# the `start_mapping` service - ---- -bool done diff --git a/zed_interfaces/srv/stop_remote_stream.srv b/zed_interfaces/srv/stop_remote_stream.srv deleted file mode 100644 index 29c919ac..00000000 --- a/zed_interfaces/srv/stop_remote_stream.srv +++ /dev/null @@ -1,2 +0,0 @@ ---- -bool done diff --git a/zed_interfaces/srv/stop_svo_recording.srv b/zed_interfaces/srv/stop_svo_recording.srv deleted file mode 100644 index 17c8551b..00000000 --- a/zed_interfaces/srv/stop_svo_recording.srv +++ /dev/null @@ -1,3 +0,0 @@ ---- -bool done -string info diff --git a/zed_interfaces/srv/toggle_led.srv b/zed_interfaces/srv/toggle_led.srv deleted file mode 100644 index 4bec2c79..00000000 --- a/zed_interfaces/srv/toggle_led.srv +++ /dev/null @@ -1,2 +0,0 @@ ---- -bool new_led_status diff --git a/zed_nodelets/CMakeLists.txt b/zed_nodelets/CMakeLists.txt index 0c887d95..811a7aaf 100644 --- a/zed_nodelets/CMakeLists.txt +++ b/zed_nodelets/CMakeLists.txt @@ -1,7 +1,10 @@ -cmake_minimum_required(VERSION 2.8.7) +cmake_minimum_required(VERSION 3.5) project(zed_nodelets) +## Generate symbols for IDE indexer +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + # if CMAKE_BUILD_TYPE is not specified, take 'Release' as default IF(NOT CMAKE_BUILD_TYPE) SET(CMAKE_BUILD_TYPE Release) @@ -19,8 +22,8 @@ function(checkPackage package customMessage) endif() endfunction(checkPackage) -find_package(ZED 3) -checkPackage("ZED" "ZED SDK v3.x not found, install it from:\n https://www.stereolabs.com/developers/") +find_package(ZED 4) +checkPackage("ZED" "ZED SDK v4.x not found, install it from:\n https://www.stereolabs.com/developers/") exec_program(uname ARGS -p OUTPUT_VARIABLE CMAKE_SYSTEM_NAME2) if ( CMAKE_SYSTEM_NAME2 MATCHES "aarch64" ) # Jetson TX @@ -38,21 +41,22 @@ if (OPENMP_FOUND) set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${OpenMP_EXE_LINKER_FLAGS}") endif() -find_package(catkin COMPONENTS - image_transport - roscpp - rosconsole - sensor_msgs - stereo_msgs - std_msgs - message_filters - tf2_ros - nodelet - tf2_geometry_msgs - message_generation - diagnostic_updater - zed_interfaces - dynamic_reconfigure +find_package(catkin REQUIRED COMPONENTS + roscpp + image_transport + rosconsole + sensor_msgs + stereo_msgs + std_msgs + std_srvs + message_filters + tf2_ros + nodelet + tf2_geometry_msgs + message_generation + diagnostic_updater + dynamic_reconfigure + zed_interfaces ) generate_dynamic_reconfigure_options( @@ -70,8 +74,10 @@ catkin_package( dynamic_reconfigure tf2_ros tf2_geometry_msgs - message_runtime + message_runtime zed_interfaces + geometry_msgs + visualization_msgs ) ############################################################################### @@ -88,14 +94,14 @@ set(RGBD_SENS_DEMUX_SRC ${CMAKE_CURRENT_SOURCE_DIR}/src/rgbd_sensors_demux_nodel # INCLUDES # Specify locations of header files. -include_directories( - ${catkin_INCLUDE_DIRS} - ${CUDA_INCLUDE_DIRS} - ${ZED_INCLUDE_DIRS} - ${CMAKE_CURRENT_SOURCE_DIR}/src/tools/include - ${CMAKE_CURRENT_SOURCE_DIR}/src/zed_nodelet/include - ${CMAKE_CURRENT_SOURCE_DIR}/src/rgbd_sensors_sync_nodelet/include - ${CMAKE_CURRENT_SOURCE_DIR}/src/rgbd_sensors_demux_nodelet/include +set(INCLUDE_DIRS + ${catkin_INCLUDE_DIRS} + ${CUDA_INCLUDE_DIRS} + ${ZED_INCLUDE_DIRS} + ${CMAKE_CURRENT_SOURCE_DIR}/src/tools/include + ${CMAKE_CURRENT_SOURCE_DIR}/src/zed_nodelet/include + ${CMAKE_CURRENT_SOURCE_DIR}/src/rgbd_sensors_sync_nodelet/include + ${CMAKE_CURRENT_SOURCE_DIR}/src/rgbd_sensors_demux_nodelet/include ) link_directories(${ZED_LIBRARY_DIR}) @@ -106,11 +112,12 @@ link_directories(${CUDA_LIBRARY_DIRS}) ############################################################################### # ZED WRAPPER NODELET -add_definitions(-std=c++11) +add_definitions(-std=c++11 -Wno-deprecated-declarations) set(LINK_LIBRARIES ${catkin_LIBRARIES} ${ZED_LIBRARIES} ${CUDA_LIBRARIES} + stdc++fs ) add_library(ZEDNodelets @@ -119,6 +126,7 @@ add_library(ZEDNodelets ${RGBD_SENS_SYNC_SRC} ${RGBD_SENS_DEMUX_SRC} ) +target_include_directories(ZEDNodelets PRIVATE ${INCLUDE_DIRS}) target_link_libraries(ZEDNodelets ${LINK_LIBRARIES}) add_dependencies( ZEDNodelets diff --git a/zed_nodelets/cfg/Zed.cfg b/zed_nodelets/cfg/Zed.cfg index be86bed4..fe41f8cd 100755 --- a/zed_nodelets/cfg/Zed.cfg +++ b/zed_nodelets/cfg/Zed.cfg @@ -6,7 +6,6 @@ from dynamic_reconfigure.parameter_generator_catkin import * gen = ParameterGenerator() group_general = gen.add_group("general") -group_general.add("pub_frame_rate", double_t, 0, "Video and Depth data frequency", 15.0, 0.1, 60.0); group_depth = gen.add_group("depth") group_depth.add("depth_confidence", int_t, 1, "Depth confidence threshold", 50, 1, 100) diff --git a/zed_nodelets/package.xml b/zed_nodelets/package.xml index e0d5a7f7..fcbf410d 100644 --- a/zed_nodelets/package.xml +++ b/zed_nodelets/package.xml @@ -1,7 +1,7 @@ zed_nodelets - 3.1.0 + 3.5.0 zed_nodelets is package containing all the nodelets provided by the ZED ROS wrapper. STEREOLABS @@ -25,6 +25,8 @@ dynamic_reconfigure nodelet diagnostic_updater + geometry_msgs + visualization_msgs zed_interfaces diff --git a/zed_nodelets/src/rgbd_sensors_demux_nodelet/include/rgbd_sensor_demux.hpp b/zed_nodelets/src/rgbd_sensors_demux_nodelet/include/rgbd_sensor_demux.hpp index ba5a20b4..713e85a0 100644 --- a/zed_nodelets/src/rgbd_sensors_demux_nodelet/include/rgbd_sensor_demux.hpp +++ b/zed_nodelets/src/rgbd_sensors_demux_nodelet/include/rgbd_sensor_demux.hpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2020, STEREOLABS. +// Copyright (c) 2023, STEREOLABS. // // All rights reserved. // @@ -21,8 +21,8 @@ #ifndef RGBD_SENSOR_DEMUX_HPP #define RGBD_SENSOR_DEMUX_HPP -#include #include +#include #include #include @@ -35,41 +35,41 @@ #include "zed_interfaces/RGBDSensors.h" -namespace zed_nodelets { - -class RgbdSensorsDemuxNodelet : public nodelet::Nodelet { - +namespace zed_nodelets +{ +class RgbdSensorsDemuxNodelet : public nodelet::Nodelet +{ public: - RgbdSensorsDemuxNodelet(); - virtual ~RgbdSensorsDemuxNodelet(); + RgbdSensorsDemuxNodelet(); + virtual ~RgbdSensorsDemuxNodelet(); protected: - /* \brief Initialization function called by the Nodelet base class - */ - virtual void onInit(); + /*! \brief Initialization function called by the Nodelet base class + */ + virtual void onInit(); - /* \brief Callback for full topics synchronization - */ - void msgCallback( const zed_interfaces::RGBDSensorsPtr& msg ); + /*! \brief Callback for full topics synchronization + */ + void msgCallback(const zed_interfaces::RGBDSensorsPtr& msg); private: - // Node handlers - ros::NodeHandle mNh; // Node handler - ros::NodeHandle mNhP; // Private Node handler - - // Publishers - image_transport::CameraPublisher mPubRgb; - image_transport::CameraPublisher mPubDepth; - ros::Publisher mPubIMU; - ros::Publisher mPubMag; - - // Subscribers - ros::Subscriber mSub; + // Node handlers + ros::NodeHandle mNh; // Node handler + ros::NodeHandle mNhP; // Private Node handler + + // Publishers + image_transport::CameraPublisher mPubRgb; + image_transport::CameraPublisher mPubDepth; + ros::Publisher mPubIMU; + ros::Publisher mPubMag; + + // Subscribers + ros::Subscriber mSub; }; -} +} // namespace zed_nodelets #include PLUGINLIB_EXPORT_CLASS(zed_nodelets::RgbdSensorsDemuxNodelet, nodelet::Nodelet) -#endif // RGBD_SENSOR_DEMUX_HPP +#endif // RGBD_SENSOR_DEMUX_HPP diff --git a/zed_nodelets/src/rgbd_sensors_demux_nodelet/src/rgbd_sensor_demux.cpp b/zed_nodelets/src/rgbd_sensors_demux_nodelet/src/rgbd_sensor_demux.cpp index 81145b4f..f024d56c 100644 --- a/zed_nodelets/src/rgbd_sensors_demux_nodelet/src/rgbd_sensor_demux.cpp +++ b/zed_nodelets/src/rgbd_sensors_demux_nodelet/src/rgbd_sensor_demux.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2020, STEREOLABS. +// Copyright (c) 2023, STEREOLABS. // // All rights reserved. // @@ -26,91 +26,104 @@ #include -namespace zed_nodelets { - -RgbdSensorsDemuxNodelet::RgbdSensorsDemuxNodelet() { - +namespace zed_nodelets +{ +RgbdSensorsDemuxNodelet::RgbdSensorsDemuxNodelet() +{ } -RgbdSensorsDemuxNodelet::~RgbdSensorsDemuxNodelet() { - +RgbdSensorsDemuxNodelet::~RgbdSensorsDemuxNodelet() +{ } -void RgbdSensorsDemuxNodelet::onInit() { - // Node handlers - mNh = getNodeHandle(); - mNhP = getPrivateNodeHandle(); +void RgbdSensorsDemuxNodelet::onInit() +{ + // Node handlers + mNh = getNodeHandle(); + mNhP = getPrivateNodeHandle(); #ifndef NDEBUG - if (ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, - ros::console::levels::Debug)) { - ros::console::notifyLoggerLevelsChanged(); - } + if (ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Debug)) + { + ros::console::notifyLoggerLevelsChanged(); + } #endif - NODELET_INFO( "********** Starting nodelet '%s' **********",getName().c_str() ); + NODELET_INFO("********** Starting nodelet '%s' **********", getName().c_str()); - mSub = mNh.subscribe( "rgbd_sens", 1, &RgbdSensorsDemuxNodelet::msgCallback, this ); - NODELET_INFO_STREAM( " * Subscribed to topic: " << mSub.getTopic().c_str() ); + mSub = mNh.subscribe("rgbd_sens", 1, &RgbdSensorsDemuxNodelet::msgCallback, this); + NODELET_INFO_STREAM(" * Subscribed to topic: " << mSub.getTopic().c_str()); } -void RgbdSensorsDemuxNodelet::msgCallback(const zed_interfaces::RGBDSensorsPtr &msg) { - if( !msg->rgb.header.stamp.isZero() ) { - if( mPubRgb.getTopic().empty() ) { - ros::NodeHandle rgb_pnh(mNhP, "rgb"); - image_transport::ImageTransport it(rgb_pnh); - - mPubRgb = it.advertiseCamera("image_rect_color", 1); // rgb - NODELET_INFO_STREAM("Advertised on topic " << mPubRgb.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRgb.getInfoTopic()); - } - - if(mPubRgb.getNumSubscribers() > 0 ){ - mPubRgb.publish( msg->rgb, msg->rgbCameraInfo ); - } +void RgbdSensorsDemuxNodelet::msgCallback(const zed_interfaces::RGBDSensorsPtr& msg) +{ + if (!msg->rgb.header.stamp.isZero()) + { + if (mPubRgb.getTopic().empty()) + { + ros::NodeHandle rgb_pnh(mNhP, "rgb"); + image_transport::ImageTransport it(rgb_pnh); + + mPubRgb = it.advertiseCamera("image_rect_color", 1); // rgb + NODELET_INFO_STREAM("Advertised on topic " << mPubRgb.getTopic()); + NODELET_INFO_STREAM("Advertised on topic " << mPubRgb.getInfoTopic()); } - if( !msg->depth.header.stamp.isZero() ) { - if( mPubDepth.getTopic().empty() ) { - ros::NodeHandle depth_pnh(mNhP, "depth"); - image_transport::ImageTransport it(depth_pnh); - - mPubDepth = it.advertiseCamera("depth_registered", 1); // depth - NODELET_INFO_STREAM("Advertised on topic " << mPubDepth.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubDepth.getInfoTopic()); - } - - if(mPubDepth.getNumSubscribers() > 0 ){ - mPubDepth.publish( msg->depth, msg->depthCameraInfo ); - } + if (mPubRgb.getNumSubscribers() > 0) + { + mPubRgb.publish(msg->rgb, msg->rgbCameraInfo); + } + } + + if (!msg->depth.header.stamp.isZero()) + { + if (mPubDepth.getTopic().empty()) + { + ros::NodeHandle depth_pnh(mNhP, "depth"); + image_transport::ImageTransport it(depth_pnh); + + mPubDepth = it.advertiseCamera("depth_registered", 1); // depth + NODELET_INFO_STREAM("Advertised on topic " << mPubDepth.getTopic()); + NODELET_INFO_STREAM("Advertised on topic " << mPubDepth.getInfoTopic()); } - if( !msg->imu.header.stamp.isZero() ) { - if( mPubIMU.getTopic().empty() ) { - ros::NodeHandle imu_pnh(mNhP, "imu"); + if (mPubDepth.getNumSubscribers() > 0) + { + mPubDepth.publish(msg->depth, msg->depthCameraInfo); + } + } - mPubIMU = imu_pnh.advertise("data", 1); // IMU - NODELET_INFO_STREAM("Advertised on topic " << mPubIMU.getTopic()); - } + if (!msg->imu.header.stamp.isZero()) + { + if (mPubIMU.getTopic().empty()) + { + ros::NodeHandle imu_pnh(mNhP, "imu"); - if(mPubIMU.getNumSubscribers() > 0 ){ - mPubIMU.publish( msg->imu ); - } + mPubIMU = imu_pnh.advertise("data", 1); // IMU + NODELET_INFO_STREAM("Advertised on topic " << mPubIMU.getTopic()); } - if( !msg->mag.header.stamp.isZero() ) { - if( mPubMag.getTopic().empty() ) { - ros::NodeHandle imu_pnh(mNhP, "imu"); + if (mPubIMU.getNumSubscribers() > 0) + { + mPubIMU.publish(msg->imu); + } + } - mPubMag = imu_pnh.advertise("mag", 1); // IMU - NODELET_INFO_STREAM("Advertised on topic " << mPubMag.getTopic()); - } + if (!msg->mag.header.stamp.isZero()) + { + if (mPubMag.getTopic().empty()) + { + ros::NodeHandle imu_pnh(mNhP, "imu"); - if(mPubMag.getNumSubscribers() > 0 ){ - mPubMag.publish( msg->mag ); - } + mPubMag = imu_pnh.advertise("mag", 1); // IMU + NODELET_INFO_STREAM("Advertised on topic " << mPubMag.getTopic()); } + if (mPubMag.getNumSubscribers() > 0) + { + mPubMag.publish(msg->mag); + } + } } -} +} // namespace zed_nodelets diff --git a/zed_nodelets/src/rgbd_sensors_sync_nodelet/include/rgbd_sensor_sync.hpp b/zed_nodelets/src/rgbd_sensors_sync_nodelet/include/rgbd_sensor_sync.hpp index a073b297..0f5d4719 100644 --- a/zed_nodelets/src/rgbd_sensors_sync_nodelet/include/rgbd_sensor_sync.hpp +++ b/zed_nodelets/src/rgbd_sensors_sync_nodelet/include/rgbd_sensor_sync.hpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2020, STEREOLABS. +// Copyright (c) 2023, STEREOLABS. // // All rights reserved. // @@ -21,8 +21,8 @@ #ifndef RGBD_SENSOR_SYNC_HPP #define RGBD_SENSOR_SYNC_HPP -#include #include +#include #include #include @@ -30,115 +30,124 @@ #include #include -#include -#include #include +#include +#include #include #include #include "zed_interfaces/RGBDSensors.h" -namespace zed_nodelets { - -class RgbdSensorsSyncNodelet : public nodelet::Nodelet { - +namespace zed_nodelets +{ +class RgbdSensorsSyncNodelet : public nodelet::Nodelet +{ public: - RgbdSensorsSyncNodelet(); - virtual ~RgbdSensorsSyncNodelet(); + RgbdSensorsSyncNodelet(); + virtual ~RgbdSensorsSyncNodelet(); protected: - /* \brief Initialization function called by the Nodelet base class - */ - virtual void onInit(); - - /* \brief Reads parameters from the param server - */ - void readParameters(); - - /* \brief Callback for full topics synchronization - */ - void callbackFull( - const sensor_msgs::ImageConstPtr& rgb, - const sensor_msgs::ImageConstPtr& depth, - const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, - const sensor_msgs::CameraInfoConstPtr& depthCameraInfo, - const sensor_msgs::ImuConstPtr& imu, - const sensor_msgs::MagneticFieldConstPtr& mag ); - - /* \brief Callback for RGBD topics synchronization - */ - void callbackRGBD( - const sensor_msgs::ImageConstPtr& rgb, - const sensor_msgs::ImageConstPtr& depth, - const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, - const sensor_msgs::CameraInfoConstPtr& depthCameraInfo ); - - /* \brief Callback for RGBD + IMU topics synchronization - */ - void callbackRGBDIMU( - const sensor_msgs::ImageConstPtr& rgb, - const sensor_msgs::ImageConstPtr& depth, - const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, - const sensor_msgs::CameraInfoConstPtr& depthCameraInfo, - const sensor_msgs::ImuConstPtr& imu); - - /* \brief Callback for RGBD + Mag topics synchronization - */ - void callbackRGBDMag( - const sensor_msgs::ImageConstPtr& rgb, - const sensor_msgs::ImageConstPtr& depth, - const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, - const sensor_msgs::CameraInfoConstPtr& depthCameraInfo, - const sensor_msgs::MagneticFieldConstPtr& mag ); + /*! \brief Initialization function called by the Nodelet base class + */ + virtual void onInit(); + + /*! \brief Reads parameters from the param server + */ + void readParameters(); + + /*! \brief Callback for full topics synchronization + */ + void callbackFull(const sensor_msgs::ImageConstPtr& rgb, const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, + const sensor_msgs::CameraInfoConstPtr& depthCameraInfo, const sensor_msgs::ImuConstPtr& imu, + const sensor_msgs::MagneticFieldConstPtr& mag); + + /*! \brief Callback for RGBD topics synchronization + */ + void callbackRGBD(const sensor_msgs::ImageConstPtr& rgb, const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, + const sensor_msgs::CameraInfoConstPtr& depthCameraInfo); + + /*! \brief Callback for RGBD + IMU topics synchronization + */ + void callbackRGBDIMU(const sensor_msgs::ImageConstPtr& rgb, const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, + const sensor_msgs::CameraInfoConstPtr& depthCameraInfo, const sensor_msgs::ImuConstPtr& imu); + + /*! \brief Callback for RGBD + Mag topics synchronization + */ + void callbackRGBDMag(const sensor_msgs::ImageConstPtr& rgb, const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, + const sensor_msgs::CameraInfoConstPtr& depthCameraInfo, + const sensor_msgs::MagneticFieldConstPtr& mag); private: - // Node handlers - ros::NodeHandle mNh; // Node handler - ros::NodeHandle mNhP; // Private Node handler - - // Publishers - ros::Publisher mPubRaw; - - // Subscribers - image_transport::SubscriberFilter mSubRgbImage; - image_transport::SubscriberFilter mSubDepthImage; - message_filters::Subscriber mSubRgbCamInfo; - message_filters::Subscriber mSubDepthCamInfo; - message_filters::Subscriber mSubImu; - message_filters::Subscriber mSubMag; - - // Approx sync policies - typedef message_filters::sync_policies::ApproximateTime ApproxFullSyncPolicy; - typedef message_filters::sync_policies::ApproximateTime ApproxRgbdImuSyncPolicy; - typedef message_filters::sync_policies::ApproximateTime ApproxRgbdMagSyncPolicy; - typedef message_filters::sync_policies::ApproximateTime ApproxRgbdSyncPolicy; - message_filters::Synchronizer* mApproxFullSync = nullptr; - message_filters::Synchronizer* mApproxRgbdImuSync = nullptr; - message_filters::Synchronizer* mApproxRgbdMagSync = nullptr; - message_filters::Synchronizer* mApproxRgbdSync = nullptr; - - // Exact sync policies - typedef message_filters::sync_policies::ExactTime ExactFullSyncPolicy; - typedef message_filters::sync_policies::ExactTime ExactRgbdImuSyncPolicy; - typedef message_filters::sync_policies::ExactTime ExactRgbdMagSyncPolicy; - typedef message_filters::sync_policies::ExactTime ExactRgbdSyncPolicy; - message_filters::Synchronizer* mExactFullSync = nullptr; - message_filters::Synchronizer* mExactRgbdImuSync = nullptr; - message_filters::Synchronizer* mExactRgbdMagSync = nullptr; - message_filters::Synchronizer* mExactRgbdSync = nullptr; - - // Params - std::string mZedNodeletName = "zed_node"; - bool mUseApproxSync = true; - bool mUseImu = true; - bool mUseMag = true; - int mQueueSize = 50; + // Node handlers + ros::NodeHandle mNh; // Node handler + ros::NodeHandle mNhP; // Private Node handler + + // Publishers + ros::Publisher mPubRaw; + + // Subscribers + image_transport::SubscriberFilter mSubRgbImage; + image_transport::SubscriberFilter mSubDepthImage; + message_filters::Subscriber mSubRgbCamInfo; + message_filters::Subscriber mSubDepthCamInfo; + message_filters::Subscriber mSubImu; + message_filters::Subscriber mSubMag; + + // Approx sync policies + typedef message_filters::sync_policies::ApproximateTime + ApproxFullSyncPolicy; + typedef message_filters::sync_policies::ApproximateTime< + sensor_msgs::Image, sensor_msgs::Image, sensor_msgs::CameraInfo, sensor_msgs::CameraInfo, sensor_msgs::Imu> + ApproxRgbdImuSyncPolicy; + typedef message_filters::sync_policies::ApproximateTime + ApproxRgbdMagSyncPolicy; + typedef message_filters::sync_policies::ApproximateTime + ApproxRgbdSyncPolicy; + message_filters::Synchronizer* mApproxFullSync = nullptr; + message_filters::Synchronizer* mApproxRgbdImuSync = nullptr; + message_filters::Synchronizer* mApproxRgbdMagSync = nullptr; + message_filters::Synchronizer* mApproxRgbdSync = nullptr; + + // Exact sync policies + typedef message_filters::sync_policies::ExactTime + ExactFullSyncPolicy; + typedef message_filters::sync_policies::ExactTime + ExactRgbdImuSyncPolicy; + typedef message_filters::sync_policies::ExactTime + ExactRgbdMagSyncPolicy; + typedef message_filters::sync_policies::ExactTime + ExactRgbdSyncPolicy; + message_filters::Synchronizer* mExactFullSync = nullptr; + message_filters::Synchronizer* mExactRgbdImuSync = nullptr; + message_filters::Synchronizer* mExactRgbdMagSync = nullptr; + message_filters::Synchronizer* mExactRgbdSync = nullptr; + + // Params + std::string mZedNodeletName = "zed_node"; + bool mUseApproxSync = true; + bool mUseImu = true; + bool mUseMag = true; + int mQueueSize = 50; }; -} +} // namespace zed_nodelets #include PLUGINLIB_EXPORT_CLASS(zed_nodelets::RgbdSensorsSyncNodelet, nodelet::Nodelet) -#endif // RGBD_SENSOR_SYNC_HPP +#endif // RGBD_SENSOR_SYNC_HPP diff --git a/zed_nodelets/src/rgbd_sensors_sync_nodelet/src/rgbd_sensor_sync.cpp b/zed_nodelets/src/rgbd_sensors_sync_nodelet/src/rgbd_sensor_sync.cpp index e3147857..b2b329b6 100644 --- a/zed_nodelets/src/rgbd_sensors_sync_nodelet/src/rgbd_sensor_sync.cpp +++ b/zed_nodelets/src/rgbd_sensors_sync_nodelet/src/rgbd_sensor_sync.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2020, STEREOLABS. +// Copyright (c) 2023, STEREOLABS. // // All rights reserved. // @@ -26,423 +26,426 @@ #include -namespace zed_nodelets { - -RgbdSensorsSyncNodelet::RgbdSensorsSyncNodelet() { - +namespace zed_nodelets +{ +RgbdSensorsSyncNodelet::RgbdSensorsSyncNodelet() +{ } -RgbdSensorsSyncNodelet::~RgbdSensorsSyncNodelet() { - if(mApproxFullSync) - delete mApproxFullSync; - if(mApproxRgbdImuSync) - delete mApproxRgbdImuSync; - if(mApproxRgbdMagSync) - delete mApproxRgbdMagSync; - if(mApproxRgbdSync) - delete mApproxRgbdSync; - - if(mExactFullSync) - delete mExactFullSync; - if(mExactRgbdImuSync) - delete mExactRgbdImuSync; - if(mExactRgbdMagSync) - delete mExactRgbdMagSync; - if(mExactRgbdSync) - delete mExactRgbdSync; +RgbdSensorsSyncNodelet::~RgbdSensorsSyncNodelet() +{ + if (mApproxFullSync) + delete mApproxFullSync; + if (mApproxRgbdImuSync) + delete mApproxRgbdImuSync; + if (mApproxRgbdMagSync) + delete mApproxRgbdMagSync; + if (mApproxRgbdSync) + delete mApproxRgbdSync; + + if (mExactFullSync) + delete mExactFullSync; + if (mExactRgbdImuSync) + delete mExactRgbdImuSync; + if (mExactRgbdMagSync) + delete mExactRgbdMagSync; + if (mExactRgbdSync) + delete mExactRgbdSync; } -void RgbdSensorsSyncNodelet::onInit() { - // Node handlers - mNh = getNodeHandle(); - mNhP = getPrivateNodeHandle(); +void RgbdSensorsSyncNodelet::onInit() +{ + // Node handlers + mNh = getNodeHandle(); + mNhP = getPrivateNodeHandle(); #ifndef NDEBUG - if (ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, - ros::console::levels::Debug)) { - ros::console::notifyLoggerLevelsChanged(); - } + if (ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Debug)) + { + ros::console::notifyLoggerLevelsChanged(); + } #endif - NODELET_INFO( "********** Starting nodelet '%s' **********",getName().c_str() ); - - readParameters(); - - mPubRaw = mNhP.advertise("rgbd_sens", 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubRaw.getTopic()); - - if( mUseApproxSync ) { - NODELET_DEBUG( "Using Approximate Time sync"); - - if( mUseImu && mUseMag ) { - mApproxFullSync = new message_filters::Synchronizer(ApproxFullSyncPolicy(mQueueSize), - mSubRgbImage, mSubDepthImage, - mSubRgbCamInfo, mSubDepthCamInfo, - mSubImu,mSubMag); - mApproxFullSync->registerCallback(boost::bind(&RgbdSensorsSyncNodelet::callbackFull, this, - _1, _2, _3, _4, _5, _6)); - - NODELET_DEBUG("RGB + Depth + IMU + Magnetometer Sync" ); - - } else if( mUseImu ) { - NODELET_DEBUG("RGB + Depth + IMU Sync" ); - - mApproxRgbdImuSync = new message_filters::Synchronizer(ApproxRgbdImuSyncPolicy(mQueueSize), - mSubRgbImage, mSubDepthImage, - mSubRgbCamInfo, mSubDepthCamInfo, - mSubImu); - mApproxRgbdImuSync->registerCallback(boost::bind(&RgbdSensorsSyncNodelet::callbackRGBDIMU, this, - _1, _2, _3, _4, _5)); - - - } else if( mUseMag ) { - NODELET_DEBUG("RGB + Depth + Magnetometer Sync" ); + NODELET_INFO("********** Starting nodelet '%s' **********", getName().c_str()); - mApproxRgbdMagSync = new message_filters::Synchronizer(ApproxRgbdMagSyncPolicy(mQueueSize), - mSubRgbImage, mSubDepthImage, - mSubRgbCamInfo, mSubDepthCamInfo, - mSubMag); - mApproxRgbdMagSync->registerCallback(boost::bind(&RgbdSensorsSyncNodelet::callbackRGBDMag, this, - _1, _2, _3, _4, _5)); + readParameters(); - } else { - NODELET_DEBUG("RGB + Depth Sync" ); + mPubRaw = mNhP.advertise("rgbd_sens", 1); + NODELET_INFO_STREAM("Advertised on topic " << mPubRaw.getTopic()); - mApproxRgbdSync = new message_filters::Synchronizer(ApproxRgbdSyncPolicy(mQueueSize), - mSubRgbImage, mSubDepthImage, - mSubRgbCamInfo, mSubDepthCamInfo); - mApproxRgbdSync->registerCallback(boost::bind(&RgbdSensorsSyncNodelet::callbackRGBD, this, - _1, _2, _3, _4)); - } + if (mUseApproxSync) + { + NODELET_DEBUG("Using Approximate Time sync"); - } else { - NODELET_DEBUG( "Using Exact Time sync"); - - if( mUseImu && mUseMag ) { - mExactFullSync = new message_filters::Synchronizer(ExactFullSyncPolicy(mQueueSize), - mSubRgbImage, mSubDepthImage, - mSubRgbCamInfo, mSubDepthCamInfo, - mSubImu,mSubMag); - mExactFullSync->registerCallback(boost::bind(&RgbdSensorsSyncNodelet::callbackFull, this, - _1, _2, _3, _4, _5, _6)); - - NODELET_DEBUG("RGB + Depth + IMU + Magnetometer Sync" ); - - } else if( mUseImu ) { - NODELET_DEBUG("RGB + Depth + IMU Sync" ); - - mExactRgbdImuSync = new message_filters::Synchronizer(ExactRgbdImuSyncPolicy(mQueueSize), - mSubRgbImage, mSubDepthImage, - mSubRgbCamInfo, mSubDepthCamInfo, - mSubImu); - mExactRgbdImuSync->registerCallback(boost::bind(&RgbdSensorsSyncNodelet::callbackRGBDIMU, this, - _1, _2, _3, _4, _5)); - - - } else if( mUseMag ) { - NODELET_DEBUG("RGB + Depth + Magnetometer Sync" ); - - mExactRgbdMagSync = new message_filters::Synchronizer(ExactRgbdMagSyncPolicy(mQueueSize), - mSubRgbImage, mSubDepthImage, - mSubRgbCamInfo, mSubDepthCamInfo, - mSubMag); - mExactRgbdMagSync->registerCallback(boost::bind(&RgbdSensorsSyncNodelet::callbackRGBDMag, this, - _1, _2, _3, _4, _5)); - - } else { - NODELET_DEBUG("RGB + Depth Sync" ); + if (mUseImu && mUseMag) + { + mApproxFullSync = new message_filters::Synchronizer( + ApproxFullSyncPolicy(mQueueSize), mSubRgbImage, mSubDepthImage, mSubRgbCamInfo, mSubDepthCamInfo, mSubImu, + mSubMag); + mApproxFullSync->registerCallback( + boost::bind(&RgbdSensorsSyncNodelet::callbackFull, this, _1, _2, _3, _4, _5, _6)); - mExactRgbdSync = new message_filters::Synchronizer(ExactRgbdSyncPolicy(mQueueSize), - mSubRgbImage, mSubDepthImage, - mSubRgbCamInfo, mSubDepthCamInfo); - mExactRgbdSync->registerCallback(boost::bind(&RgbdSensorsSyncNodelet::callbackRGBD, this, - _1, _2, _3, _4)); - } + NODELET_DEBUG("RGB + Depth + IMU + Magnetometer Sync"); } + else if (mUseImu) + { + NODELET_DEBUG("RGB + Depth + IMU Sync"); - // Create remappings - ros::NodeHandle rgb_nh(mNh, mZedNodeletName+"/rgb"); - ros::NodeHandle depth_nh(mNh, mZedNodeletName+"/depth"); - ros::NodeHandle rgb_pnh(mNhP, mZedNodeletName+"/rgb"); - ros::NodeHandle depth_pnh(mNhP, mZedNodeletName+"/depth"); - ros::NodeHandle imu_nh(mNh, mZedNodeletName+"/imu"); - - image_transport::ImageTransport rgb_it(rgb_nh); - image_transport::ImageTransport depth_it(depth_nh); - - image_transport::TransportHints hintsRgb("raw", ros::TransportHints(), rgb_pnh); - image_transport::TransportHints hintsDepth("raw", ros::TransportHints(), depth_pnh); - - mSubRgbImage.subscribe(rgb_it, rgb_nh.resolveName("image_rect_color"), 1, hintsRgb); - mSubDepthImage.subscribe(depth_it, depth_nh.resolveName("depth_registered"), 1, hintsDepth); - mSubRgbCamInfo.subscribe(rgb_nh, "camera_info", 1); - mSubDepthCamInfo.subscribe(depth_nh, "camera_info", 1); - - NODELET_INFO_STREAM( " * Subscribed to topic: " << mSubRgbImage.getTopic().c_str()); - NODELET_INFO_STREAM( " * Subscribed to topic: " << mSubRgbCamInfo.getTopic().c_str()); - NODELET_INFO_STREAM( " * Subscribed to topic: " << mSubDepthImage.getTopic().c_str()); - NODELET_INFO_STREAM( " * Subscribed to topic: " << mSubDepthCamInfo.getTopic().c_str()); - - if( mUseImu ) { - mSubImu.subscribe(imu_nh,"data",1); - NODELET_INFO_STREAM( " * Subscribed to topic: " << mSubImu.getTopic().c_str()); + mApproxRgbdImuSync = new message_filters::Synchronizer( + ApproxRgbdImuSyncPolicy(mQueueSize), mSubRgbImage, mSubDepthImage, mSubRgbCamInfo, mSubDepthCamInfo, mSubImu); + mApproxRgbdImuSync->registerCallback( + boost::bind(&RgbdSensorsSyncNodelet::callbackRGBDIMU, this, _1, _2, _3, _4, _5)); } + else if (mUseMag) + { + NODELET_DEBUG("RGB + Depth + Magnetometer Sync"); - if(mUseMag) { - mSubMag.subscribe(imu_nh,"mag",1); - NODELET_INFO_STREAM( " * Subscribed to topic: " << mSubMag.getTopic().c_str()); + mApproxRgbdMagSync = new message_filters::Synchronizer( + ApproxRgbdMagSyncPolicy(mQueueSize), mSubRgbImage, mSubDepthImage, mSubRgbCamInfo, mSubDepthCamInfo, mSubMag); + mApproxRgbdMagSync->registerCallback( + boost::bind(&RgbdSensorsSyncNodelet::callbackRGBDMag, this, _1, _2, _3, _4, _5)); } -} - -void RgbdSensorsSyncNodelet::readParameters() { - NODELET_INFO("*** PARAMETERS [%s]***", getName().c_str()); - - mNhP.getParam("zed_nodelet_name", mZedNodeletName); - mNhP.getParam("approx_sync", mUseApproxSync); - mNhP.getParam("queue_size", mQueueSize); - mNhP.getParam("sub_imu", mUseImu); - mNhP.getParam("sub_mag", mUseMag); - - NODELET_INFO(" * zed_nodelet_name -> %s", mZedNodeletName.c_str()); - NODELET_INFO(" * approx_sync -> %s", mUseApproxSync?"true":"false"); - NODELET_INFO(" * queue_size -> %d", mQueueSize); - NODELET_INFO(" * sub_imu -> %s", mUseImu?"true":"false"); - NODELET_INFO(" * sub_mag -> %s", mUseMag?"true":"false"); -} - -void RgbdSensorsSyncNodelet::callbackRGBD(const sensor_msgs::ImageConstPtr &rgb, const sensor_msgs::ImageConstPtr &depth, - const sensor_msgs::CameraInfoConstPtr &rgbCameraInfo, - const sensor_msgs::CameraInfoConstPtr &depthCameraInfo) { - // ----> Frequency calculation - static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - - double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); - last_time = now; - - double freq = 1e6/elapsed_usec; - NODELET_DEBUG( "Freq: %.2f", freq); - // <---- Frequency calculation - - double rgbStamp = rgb->header.stamp.toSec(); - double depthStamp = depth->header.stamp.toSec(); - double rgbInfoStamp = rgbCameraInfo->header.stamp.toSec(); - double depthInfoStamp = depthCameraInfo->header.stamp.toSec(); - - uint32_t subraw = mPubRaw.getNumSubscribers(); + else + { + NODELET_DEBUG("RGB + Depth Sync"); - if(subraw==0) { - return; + mApproxRgbdSync = new message_filters::Synchronizer( + ApproxRgbdSyncPolicy(mQueueSize), mSubRgbImage, mSubDepthImage, mSubRgbCamInfo, mSubDepthCamInfo); + mApproxRgbdSync->registerCallback(boost::bind(&RgbdSensorsSyncNodelet::callbackRGBD, this, _1, _2, _3, _4)); } + } + else + { + NODELET_DEBUG("Using Exact Time sync"); - zed_interfaces::RGBDSensorsPtr outSyncMsg = boost::make_shared(); - - outSyncMsg->header.frame_id = rgb->header.frame_id; - outSyncMsg->header.stamp = rgb->header.stamp; - outSyncMsg->rgbCameraInfo = *rgbCameraInfo; - outSyncMsg->depthCameraInfo = *depthCameraInfo; - - outSyncMsg->rgb = *rgb; - outSyncMsg->depth = *depth; - - mPubRaw.publish(outSyncMsg); - - if( rgbStamp != rgb->header.stamp.toSec() ) + if (mUseImu && mUseMag) { - NODELET_ERROR("Input stamps changed between the beginning and the end of the callback! Make " - "sure the node publishing the topics doesn't override the same data after publishing them. A " - "solution is to use this node within another nodelet manager. "); - NODELET_ERROR("Stamps: " - "rgb=%f->%f", - rgbStamp, rgb->header.stamp.toSec()); - } -} - -void RgbdSensorsSyncNodelet::callbackRGBDIMU(const sensor_msgs::ImageConstPtr &rgb, const sensor_msgs::ImageConstPtr &depth, - const sensor_msgs::CameraInfoConstPtr &rgbCameraInfo, - const sensor_msgs::CameraInfoConstPtr &depthCameraInfo, - const sensor_msgs::ImuConstPtr &imu) { - // ----> Frequency calculation - static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - - double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); - last_time = now; + mExactFullSync = new message_filters::Synchronizer( + ExactFullSyncPolicy(mQueueSize), mSubRgbImage, mSubDepthImage, mSubRgbCamInfo, mSubDepthCamInfo, mSubImu, + mSubMag); + mExactFullSync->registerCallback( + boost::bind(&RgbdSensorsSyncNodelet::callbackFull, this, _1, _2, _3, _4, _5, _6)); - double freq = 1e6/elapsed_usec; - NODELET_DEBUG( "Freq: %.2f", freq); - // <---- Frequency calculation - - double rgbStamp = rgb->header.stamp.toSec(); - double depthStamp = depth->header.stamp.toSec(); - double rgbInfoStamp = rgbCameraInfo->header.stamp.toSec(); - double depthInfoStamp = depthCameraInfo->header.stamp.toSec(); - double imuStamp = imu->header.stamp.toSec(); - - double diff = rgbStamp-imuStamp; - - NODELET_DEBUG( "Callback RGBD+IMU - RGB TS: %.9f - IMU TS: %.9f - Diff: %.9f", - rgbStamp, imuStamp, diff); - - uint32_t subraw = mPubRaw.getNumSubscribers(); - - if(subraw==0) { - return; + NODELET_DEBUG("RGB + Depth + IMU + Magnetometer Sync"); } - - zed_interfaces::RGBDSensorsPtr outSyncMsg = boost::make_shared(); - - outSyncMsg->header.frame_id = rgb->header.frame_id; - outSyncMsg->header.stamp = rgb->header.stamp; - outSyncMsg->rgbCameraInfo = *rgbCameraInfo; - outSyncMsg->depthCameraInfo = *depthCameraInfo; - - outSyncMsg->rgb = *rgb; - outSyncMsg->depth = *depth; - outSyncMsg->imu = *imu; - - mPubRaw.publish(outSyncMsg); - - if( rgbStamp != rgb->header.stamp.toSec() || - imuStamp != imu->header.stamp.toSec() ) + else if (mUseImu) { - NODELET_ERROR("Input stamps changed between the beginning and the end of the callback! Make " - "sure the node publishing the topics doesn't override the same data after publishing them. A " - "solution is to use this node within another nodelet manager. "); - NODELET_ERROR("Stamps: " - "rgb=%f->%f IMU=%f->%f", - rgbStamp, rgb->header.stamp.toSec(), - imuStamp, imu->header.stamp.toSec()); - } - -} - -void RgbdSensorsSyncNodelet::callbackRGBDMag(const sensor_msgs::ImageConstPtr &rgb, const sensor_msgs::ImageConstPtr &depth, - const sensor_msgs::CameraInfoConstPtr &rgbCameraInfo, - const sensor_msgs::CameraInfoConstPtr &depthCameraInfo, - const sensor_msgs::MagneticFieldConstPtr &mag) { - // ----> Frequency calculation - static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - - double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); - last_time = now; - - double freq = 1e6/elapsed_usec; - NODELET_DEBUG( "Freq: %.2f", freq); - // <---- Frequency calculation - - - double rgbStamp = rgb->header.stamp.toSec(); - double depthStamp = depth->header.stamp.toSec(); - double rgbInfoStamp = rgbCameraInfo->header.stamp.toSec(); - double depthInfoStamp = depthCameraInfo->header.stamp.toSec(); - double magStamp = mag->header.stamp.toSec(); + NODELET_DEBUG("RGB + Depth + IMU Sync"); - double diffMag = rgbStamp-magStamp; - - NODELET_DEBUG( "Callback rgbd+mag - RGB TS: %.9f - MAG TS: %.9f - Diff MAG: %.9f", - rgbStamp, magStamp, diffMag); - - uint32_t subraw = mPubRaw.getNumSubscribers(); - - if(subraw==0) { - return; + mExactRgbdImuSync = new message_filters::Synchronizer( + ExactRgbdImuSyncPolicy(mQueueSize), mSubRgbImage, mSubDepthImage, mSubRgbCamInfo, mSubDepthCamInfo, mSubImu); + mExactRgbdImuSync->registerCallback( + boost::bind(&RgbdSensorsSyncNodelet::callbackRGBDIMU, this, _1, _2, _3, _4, _5)); } - - zed_interfaces::RGBDSensorsPtr outSyncMsg = boost::make_shared(); - - outSyncMsg->header.frame_id = rgb->header.frame_id; - outSyncMsg->header.stamp = rgb->header.stamp; - outSyncMsg->rgbCameraInfo = *rgbCameraInfo; - outSyncMsg->depthCameraInfo = *depthCameraInfo; - - outSyncMsg->rgb = *rgb; - outSyncMsg->depth = *depth; - outSyncMsg->mag = *mag; - - mPubRaw.publish(outSyncMsg); - - if( rgbStamp != rgb->header.stamp.toSec() || - magStamp != mag->header.stamp.toSec() ) + else if (mUseMag) { - NODELET_ERROR("Input stamps changed between the beginning and the end of the callback! Make " - "sure the node publishing the topics doesn't override the same data after publishing them. A " - "solution is to use this node within another nodelet manager. "); - NODELET_ERROR("Stamps: " - "rgb=%f->%f MAG=%f->%f", - rgbStamp, rgb->header.stamp.toSec(), - magStamp, mag->header.stamp.toSec()); - } - - -} - -void RgbdSensorsSyncNodelet::callbackFull(const sensor_msgs::ImageConstPtr &rgb, const sensor_msgs::ImageConstPtr &depth, - const sensor_msgs::CameraInfoConstPtr &rgbCameraInfo, - const sensor_msgs::CameraInfoConstPtr &depthCameraInfo, - const sensor_msgs::ImuConstPtr &imu, - const sensor_msgs::MagneticFieldConstPtr &mag) { - // ----> Frequency calculation - static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - - double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); - last_time = now; - - double freq = 1e6/elapsed_usec; - NODELET_DEBUG( "Freq: %.2f", freq); - // <---- Frequency calculation - - double rgbStamp = rgb->header.stamp.toSec(); - double depthStamp = depth->header.stamp.toSec(); - double rgbInfoStamp = rgbCameraInfo->header.stamp.toSec(); - double depthInfoStamp = depthCameraInfo->header.stamp.toSec(); - double imuStamp = imu->header.stamp.toSec(); - double magStamp = mag->header.stamp.toSec(); - - double diffImu = rgbStamp-imuStamp; - double diffMag = rgbStamp-magStamp; - - NODELET_DEBUG( "Callback FULL - RGB TS: %.9f - IMU TS: %.9f - Diff IMU: %.9f", - rgbStamp, imuStamp, diffImu); - NODELET_DEBUG( "Callback FULL - RGB TS: %.9f - MAG TS: %.9f - Diff MAG: %.9f", - rgbStamp, magStamp, diffMag); + NODELET_DEBUG("RGB + Depth + Magnetometer Sync"); - - uint32_t subraw = mPubRaw.getNumSubscribers(); - - if(subraw==0) { - return; + mExactRgbdMagSync = new message_filters::Synchronizer( + ExactRgbdMagSyncPolicy(mQueueSize), mSubRgbImage, mSubDepthImage, mSubRgbCamInfo, mSubDepthCamInfo, mSubMag); + mExactRgbdMagSync->registerCallback( + boost::bind(&RgbdSensorsSyncNodelet::callbackRGBDMag, this, _1, _2, _3, _4, _5)); } + else + { + NODELET_DEBUG("RGB + Depth Sync"); - zed_interfaces::RGBDSensorsPtr outSyncMsg = boost::make_shared(); + mExactRgbdSync = new message_filters::Synchronizer( + ExactRgbdSyncPolicy(mQueueSize), mSubRgbImage, mSubDepthImage, mSubRgbCamInfo, mSubDepthCamInfo); + mExactRgbdSync->registerCallback(boost::bind(&RgbdSensorsSyncNodelet::callbackRGBD, this, _1, _2, _3, _4)); + } + } + + // Create remappings + ros::NodeHandle rgb_nh(mNh, mZedNodeletName + "/rgb"); + ros::NodeHandle depth_nh(mNh, mZedNodeletName + "/depth"); + ros::NodeHandle rgb_pnh(mNhP, mZedNodeletName + "/rgb"); + ros::NodeHandle depth_pnh(mNhP, mZedNodeletName + "/depth"); + ros::NodeHandle imu_nh(mNh, mZedNodeletName + "/imu"); + + image_transport::ImageTransport rgb_it(rgb_nh); + image_transport::ImageTransport depth_it(depth_nh); + + image_transport::TransportHints hintsRgb("raw", ros::TransportHints(), rgb_pnh); + image_transport::TransportHints hintsDepth("raw", ros::TransportHints(), depth_pnh); + + mSubRgbImage.subscribe(rgb_it, rgb_nh.resolveName("image_rect_color"), 1, hintsRgb); + mSubDepthImage.subscribe(depth_it, depth_nh.resolveName("depth_registered"), 1, hintsDepth); + mSubRgbCamInfo.subscribe(rgb_nh, "camera_info", 1); + mSubDepthCamInfo.subscribe(depth_nh, "camera_info", 1); + + NODELET_INFO_STREAM(" * Subscribed to topic: " << mSubRgbImage.getTopic().c_str()); + NODELET_INFO_STREAM(" * Subscribed to topic: " << mSubRgbCamInfo.getTopic().c_str()); + NODELET_INFO_STREAM(" * Subscribed to topic: " << mSubDepthImage.getTopic().c_str()); + NODELET_INFO_STREAM(" * Subscribed to topic: " << mSubDepthCamInfo.getTopic().c_str()); + + if (mUseImu) + { + mSubImu.subscribe(imu_nh, "data", 1); + NODELET_INFO_STREAM(" * Subscribed to topic: " << mSubImu.getTopic().c_str()); + } + + if (mUseMag) + { + mSubMag.subscribe(imu_nh, "mag", 1); + NODELET_INFO_STREAM(" * Subscribed to topic: " << mSubMag.getTopic().c_str()); + } +} - outSyncMsg->header.frame_id = rgb->header.frame_id; - outSyncMsg->header.stamp = rgb->header.stamp; - outSyncMsg->rgbCameraInfo = *rgbCameraInfo; - outSyncMsg->depthCameraInfo = *depthCameraInfo; +void RgbdSensorsSyncNodelet::readParameters() +{ + NODELET_INFO("*** PARAMETERS [%s]***", getName().c_str()); + + mNhP.getParam("zed_nodelet_name", mZedNodeletName); + mNhP.getParam("approx_sync", mUseApproxSync); + mNhP.getParam("queue_size", mQueueSize); + mNhP.getParam("sub_imu", mUseImu); + mNhP.getParam("sub_mag", mUseMag); + + NODELET_INFO(" * zed_nodelet_name -> %s", mZedNodeletName.c_str()); + NODELET_INFO(" * approx_sync -> %s", mUseApproxSync ? "true" : "false"); + NODELET_INFO(" * queue_size -> %d", mQueueSize); + NODELET_INFO(" * sub_imu -> %s", mUseImu ? "true" : "false"); + NODELET_INFO(" * sub_mag -> %s", mUseMag ? "true" : "false"); +} - outSyncMsg->rgb = *rgb; - outSyncMsg->depth = *depth; - outSyncMsg->imu = *imu; - outSyncMsg->mag = *mag; +void RgbdSensorsSyncNodelet::callbackRGBD(const sensor_msgs::ImageConstPtr& rgb, + const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, + const sensor_msgs::CameraInfoConstPtr& depthCameraInfo) +{ + // ----> Frequency calculation + static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + + double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); + last_time = now; + + double freq = 1e6 / elapsed_usec; + NODELET_DEBUG("Freq: %.2f", freq); + // <---- Frequency calculation + + double rgbStamp = rgb->header.stamp.toSec(); + double depthStamp = depth->header.stamp.toSec(); + double rgbInfoStamp = rgbCameraInfo->header.stamp.toSec(); + double depthInfoStamp = depthCameraInfo->header.stamp.toSec(); + + uint32_t subraw = mPubRaw.getNumSubscribers(); + + if (subraw == 0) + { + return; + } + + zed_interfaces::RGBDSensorsPtr outSyncMsg = boost::make_shared(); + + outSyncMsg->header.frame_id = rgb->header.frame_id; + outSyncMsg->header.stamp = rgb->header.stamp; + outSyncMsg->rgbCameraInfo = *rgbCameraInfo; + outSyncMsg->depthCameraInfo = *depthCameraInfo; + + outSyncMsg->rgb = *rgb; + outSyncMsg->depth = *depth; + + mPubRaw.publish(outSyncMsg); + + if (rgbStamp != rgb->header.stamp.toSec()) + { + NODELET_ERROR( + "Input stamps changed between the beginning and the end of the callback! Make " + "sure the node publishing the topics doesn't override the same data after publishing them. A " + "solution is to use this node within another nodelet manager. "); + NODELET_ERROR( + "Stamps: " + "rgb=%f->%f", + rgbStamp, rgb->header.stamp.toSec()); + } +} - mPubRaw.publish(outSyncMsg); +void RgbdSensorsSyncNodelet::callbackRGBDIMU(const sensor_msgs::ImageConstPtr& rgb, + const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, + const sensor_msgs::CameraInfoConstPtr& depthCameraInfo, + const sensor_msgs::ImuConstPtr& imu) +{ + // ----> Frequency calculation + static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + + double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); + last_time = now; + + double freq = 1e6 / elapsed_usec; + NODELET_DEBUG("Freq: %.2f", freq); + // <---- Frequency calculation + + double rgbStamp = rgb->header.stamp.toSec(); + double depthStamp = depth->header.stamp.toSec(); + double rgbInfoStamp = rgbCameraInfo->header.stamp.toSec(); + double depthInfoStamp = depthCameraInfo->header.stamp.toSec(); + double imuStamp = imu->header.stamp.toSec(); + + double diff = rgbStamp - imuStamp; + + NODELET_DEBUG("Callback RGBD+IMU - RGB TS: %.9f - IMU TS: %.9f - Diff: %.9f", rgbStamp, imuStamp, diff); + + uint32_t subraw = mPubRaw.getNumSubscribers(); + + if (subraw == 0) + { + return; + } + + zed_interfaces::RGBDSensorsPtr outSyncMsg = boost::make_shared(); + + outSyncMsg->header.frame_id = rgb->header.frame_id; + outSyncMsg->header.stamp = rgb->header.stamp; + outSyncMsg->rgbCameraInfo = *rgbCameraInfo; + outSyncMsg->depthCameraInfo = *depthCameraInfo; + + outSyncMsg->rgb = *rgb; + outSyncMsg->depth = *depth; + outSyncMsg->imu = *imu; + + mPubRaw.publish(outSyncMsg); + + if (rgbStamp != rgb->header.stamp.toSec() || imuStamp != imu->header.stamp.toSec()) + { + NODELET_ERROR( + "Input stamps changed between the beginning and the end of the callback! Make " + "sure the node publishing the topics doesn't override the same data after publishing them. A " + "solution is to use this node within another nodelet manager. "); + NODELET_ERROR( + "Stamps: " + "rgb=%f->%f IMU=%f->%f", + rgbStamp, rgb->header.stamp.toSec(), imuStamp, imu->header.stamp.toSec()); + } +} - if( rgbStamp != rgb->header.stamp.toSec() || - imuStamp != imu->header.stamp.toSec() || - magStamp != mag->header.stamp.toSec() ) - { - NODELET_ERROR("Input stamps changed between the beginning and the end of the callback! Make " - "sure the node publishing the topics doesn't override the same data after publishing them. A " - "solution is to use this node within another nodelet manager. "); - NODELET_ERROR("Stamps: " - "rgb=%f->%f IMU=%f->%f MAG=%f->%f", - rgbStamp, rgb->header.stamp.toSec(), - imuStamp, imu->header.stamp.toSec(), - magStamp, mag->header.stamp.toSec()); - } +void RgbdSensorsSyncNodelet::callbackRGBDMag(const sensor_msgs::ImageConstPtr& rgb, + const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, + const sensor_msgs::CameraInfoConstPtr& depthCameraInfo, + const sensor_msgs::MagneticFieldConstPtr& mag) +{ + // ----> Frequency calculation + static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + + double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); + last_time = now; + + double freq = 1e6 / elapsed_usec; + NODELET_DEBUG("Freq: %.2f", freq); + // <---- Frequency calculation + + double rgbStamp = rgb->header.stamp.toSec(); + double depthStamp = depth->header.stamp.toSec(); + double rgbInfoStamp = rgbCameraInfo->header.stamp.toSec(); + double depthInfoStamp = depthCameraInfo->header.stamp.toSec(); + double magStamp = mag->header.stamp.toSec(); + + double diffMag = rgbStamp - magStamp; + + NODELET_DEBUG("Callback rgbd+mag - RGB TS: %.9f - MAG TS: %.9f - Diff MAG: %.9f", rgbStamp, magStamp, diffMag); + + uint32_t subraw = mPubRaw.getNumSubscribers(); + + if (subraw == 0) + { + return; + } + + zed_interfaces::RGBDSensorsPtr outSyncMsg = boost::make_shared(); + + outSyncMsg->header.frame_id = rgb->header.frame_id; + outSyncMsg->header.stamp = rgb->header.stamp; + outSyncMsg->rgbCameraInfo = *rgbCameraInfo; + outSyncMsg->depthCameraInfo = *depthCameraInfo; + + outSyncMsg->rgb = *rgb; + outSyncMsg->depth = *depth; + outSyncMsg->mag = *mag; + + mPubRaw.publish(outSyncMsg); + + if (rgbStamp != rgb->header.stamp.toSec() || magStamp != mag->header.stamp.toSec()) + { + NODELET_ERROR( + "Input stamps changed between the beginning and the end of the callback! Make " + "sure the node publishing the topics doesn't override the same data after publishing them. A " + "solution is to use this node within another nodelet manager. "); + NODELET_ERROR( + "Stamps: " + "rgb=%f->%f MAG=%f->%f", + rgbStamp, rgb->header.stamp.toSec(), magStamp, mag->header.stamp.toSec()); + } } +void RgbdSensorsSyncNodelet::callbackFull(const sensor_msgs::ImageConstPtr& rgb, + const sensor_msgs::ImageConstPtr& depth, + const sensor_msgs::CameraInfoConstPtr& rgbCameraInfo, + const sensor_msgs::CameraInfoConstPtr& depthCameraInfo, + const sensor_msgs::ImuConstPtr& imu, + const sensor_msgs::MagneticFieldConstPtr& mag) +{ + // ----> Frequency calculation + static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + + double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); + last_time = now; + + double freq = 1e6 / elapsed_usec; + NODELET_DEBUG("Freq: %.2f", freq); + // <---- Frequency calculation + + double rgbStamp = rgb->header.stamp.toSec(); + double depthStamp = depth->header.stamp.toSec(); + double rgbInfoStamp = rgbCameraInfo->header.stamp.toSec(); + double depthInfoStamp = depthCameraInfo->header.stamp.toSec(); + double imuStamp = imu->header.stamp.toSec(); + double magStamp = mag->header.stamp.toSec(); + + double diffImu = rgbStamp - imuStamp; + double diffMag = rgbStamp - magStamp; + + NODELET_DEBUG("Callback FULL - RGB TS: %.9f - IMU TS: %.9f - Diff IMU: %.9f", rgbStamp, imuStamp, diffImu); + NODELET_DEBUG("Callback FULL - RGB TS: %.9f - MAG TS: %.9f - Diff MAG: %.9f", rgbStamp, magStamp, diffMag); + + uint32_t subraw = mPubRaw.getNumSubscribers(); + + if (subraw == 0) + { + return; + } + + zed_interfaces::RGBDSensorsPtr outSyncMsg = boost::make_shared(); + + outSyncMsg->header.frame_id = rgb->header.frame_id; + outSyncMsg->header.stamp = rgb->header.stamp; + outSyncMsg->rgbCameraInfo = *rgbCameraInfo; + outSyncMsg->depthCameraInfo = *depthCameraInfo; + + outSyncMsg->rgb = *rgb; + outSyncMsg->depth = *depth; + outSyncMsg->imu = *imu; + outSyncMsg->mag = *mag; + + mPubRaw.publish(outSyncMsg); + + if (rgbStamp != rgb->header.stamp.toSec() || imuStamp != imu->header.stamp.toSec() || + magStamp != mag->header.stamp.toSec()) + { + NODELET_ERROR( + "Input stamps changed between the beginning and the end of the callback! Make " + "sure the node publishing the topics doesn't override the same data after publishing them. A " + "solution is to use this node within another nodelet manager. "); + NODELET_ERROR( + "Stamps: " + "rgb=%f->%f IMU=%f->%f MAG=%f->%f", + rgbStamp, rgb->header.stamp.toSec(), imuStamp, imu->header.stamp.toSec(), magStamp, mag->header.stamp.toSec()); + } } + +} // namespace zed_nodelets diff --git a/zed_nodelets/src/tools/include/sl_tools.h b/zed_nodelets/src/tools/include/sl_tools.h index 0a93042b..7e2e649f 100644 --- a/zed_nodelets/src/tools/include/sl_tools.h +++ b/zed_nodelets/src/tools/include/sl_tools.h @@ -1,9 +1,6 @@ -#ifndef SL_TOOLS_H -#define SL_TOOLS_H - /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2020, STEREOLABS. +// Copyright (c) 2023, STEREOLABS. // // All rights reserved. // @@ -21,102 +18,133 @@ // /////////////////////////////////////////////////////////////////////////// +#ifndef SL_TOOLS_H +#define SL_TOOLS_H + #include #include #include #include #include -namespace sl_tools { - - /* \brief Check if a ZED camera is ready - * \param serial_number : the serial number of the camera to be checked - */ - int checkCameraReady(unsigned int serial_number); - - /* \brief Get ZED camera properties - * \param serial_number : the serial number of the camera - */ - sl::DeviceProperties getZEDFromSN(unsigned int serial_number); - - std::vector convertRodrigues(sl::float3 r); - - /* \brief Test if a file exist - * \param name : the path to the file - */ - bool file_exist(const std::string& name); - - /* \brief Get Stereolabs SDK version - * \param major : major value for version - * \param minor : minor value for version - * \param sub_minor _ sub_minor value for version - */ - std::string getSDKVersion(int& major, int& minor, int& sub_minor); - - /* \brief Convert StereoLabs timestamp to ROS timestamp - * \param t : Stereolabs timestamp to be converted - */ - ros::Time slTime2Ros(sl::Timestamp t); - - /* \brief sl::Mat to ros message conversion - * \param imgMsgPtr : the image topic message to publish - * \param img : the image to publish - * \param frameId : the id of the reference frame of the image - * \param t : the ros::Time to stamp the image - */ - void imageToROSmsg(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat img, std::string frameId, ros::Time t); - - /* \brief Two sl::Mat to ros message conversion - * \param imgMsgPtr : the image topic message to publish - * \param left : the left image to publish - * \param right : the right image to publish - * \param frameId : the id of the reference frame of the image - * \param t : the ros::Time to stamp the image - */ - void imagesToROSmsg(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat left, sl::Mat right, std::string frameId, ros::Time t); - - /* \brief String tokenization - */ - std::vector split_string(const std::string& s, char seperator); - - /*! - * \brief The CSmartMean class is used to - * make a mobile window mean of a sequence of values - * and reject outliers. - * Tutorial: - * https://www.myzhar.com/blog/tutorials/tutorial-exponential-weighted-average-good-moving-windows-average/ - */ - class CSmartMean { - public: - CSmartMean(int winSize); - - int getValCount() { - return mValCount; ///< Return the number of values in the sequence - } - - double getMean() { - return mMean; ///< Return the updated mean - } - - /*! - * \brief addValue - * Add a value to the sequence - * \param val value to be added - * \return mean value - */ - double addValue(double val); - - private: - int mWinSize; ///< The size of the window (number of values ti evaluate) - int mValCount; ///< The number of values in sequence - - double mMeanCorr; ///< Used for bias correction - double mMean; ///< The mean of the last \ref mWinSize values - - double mGamma; ///< Weight value - }; - - -} // namespace sl_tools +namespace sl_tools +{ +/*! \brief Test if a file exist + * \param name : the path to the file + */ +bool file_exist(const std::string& name); + +/*! \brief Convert a path to absolute + * \param file_path the file path to be converted + * \return the absolute path + */ +std::string resolveFilePath(std::string file_path); + +/*! \brief Get Stereolabs SDK version + * \param major : major value for version + * \param minor : minor value for version + * \param sub_minor _ sub_minor value for version + */ +std::string getSDKVersion(int& major, int& minor, int& sub_minor); + +/*! \brief Convert StereoLabs timestamp to ROS timestamp + * \param t : Stereolabs timestamp to be converted + */ +ros::Time slTime2Ros(sl::Timestamp t); + +/*! \brief check if ZED + * \param camModel the model to check + */ +bool isZED(sl::MODEL camModel); + +/*! \brief check if ZED Mini + * \param camModel the model to check + */ +bool isZEDM(sl::MODEL camModel); + +/*! \brief check if ZED2 or ZED2i + * \param camModel the model to check + */ +bool isZED2OrZED2i(sl::MODEL camModel); + +/*! \brief check if ZED-X or ZED-X Mini + * \param camModel the model to check + */ +bool isZEDX(sl::MODEL camModel); + +/*! \brief Creates an sl::Mat containing a ROI from a polygon + * \param poly the ROI polygon. Coordinates must be normalized from 0.0 to 1.0 + * \param out_roi the `sl::Mat` containing the ROI + */ +bool generateROI(const std::vector& poly, sl::Mat& out_roi); + +/*! \brief Parse a vector of vector of floats from a string. + * \param input + * \param error_return + * Syntax is [[1.0, 2.0], [3.3, 4.4, 5.5], ...] */ +std::vector> parseStringVector(const std::string& input, std::string& error_return); + +/*! \brief sl::Mat to ros message conversion + * \param imgMsgPtr : the image topic message to publish + * \param img : the image to publish + * \param frameId : the id of the reference frame of the image + * \param t : the ros::Time to stamp the image + */ +void imageToROSmsg(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat img, std::string frameId, ros::Time t); + +/*! \brief Two sl::Mat to ros message conversion + * \param imgMsgPtr : the image topic message to publish + * \param left : the left image to publish + * \param right : the right image to publish + * \param frameId : the id of the reference frame of the image + * \param t : the ros::Time to stamp the image + */ +void imagesToROSmsg(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat left, sl::Mat right, std::string frameId, ros::Time t); + +/*! \brief String tokenization + */ +std::vector split_string(const std::string& s, char seperator); + +/*! + * \brief The CSmartMean class is used to + * make a mobile window mean of a sequence of values + * and reject outliers. + * Tutorial: + * https://www.myzhar.com/blog/tutorials/tutorial-exponential-weighted-average-good-moving-windows-average/ + */ +class CSmartMean +{ +public: + CSmartMean(int winSize); + + int getValCount() + { + return mValCount; ///< Return the number of values in the sequence + } + + double getMean() + { + return mMean; ///< Return the updated mean + } + + /*! + * \brief addValue + * Add a value to the sequence + * \param val value to be added + * \return mean value + */ + double addValue(double val); + +private: + int mWinSize; ///< The size of the window (number of values ti evaluate) + int mValCount; ///< The number of values in sequence + + double mMeanCorr; ///< Used for bias correction + double mMean; ///< The mean of the last \ref mWinSize values + + double mGamma; ///< Weight value +}; + +} // namespace sl_tools #endif // SL_TOOLS_H diff --git a/zed_nodelets/src/tools/src/sl_tools.cpp b/zed_nodelets/src/tools/src/sl_tools.cpp index 8dc02507..6687bbb6 100644 --- a/zed_nodelets/src/tools/src/sl_tools.cpp +++ b/zed_nodelets/src/tools/src/sl_tools.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2020, STEREOLABS. +// Copyright (c) 2023, STEREOLABS. // // All rights reserved. // @@ -18,357 +18,511 @@ // /////////////////////////////////////////////////////////////////////////// -#include "sl_tools.h" - -#include -#include -#include - #include +#include #include +#include // for std::experimental::filesystem::absolute +#include +#include -namespace sl_tools { - - int checkCameraReady(unsigned int serial_number) { - int id = -1; - auto f = sl::Camera::getDeviceList(); - - for (auto& it : f) - if (it.serial_number == serial_number && - it.camera_state == sl::CAMERA_STATE::AVAILABLE) { - id = it.id; - } +#include "sl_tools.h" - return id; +namespace sl_tools +{ +bool file_exist(const std::string& name) +{ + struct stat buffer; + return (stat(name.c_str(), &buffer) == 0); +} + +namespace fs = std::experimental::filesystem; +std::string resolveFilePath(std::string file_path) +{ + if (file_path.empty()) + { + return file_path; + } + + std::string abs_path = file_path; + if (file_path[0] == '~') + { + std::string home = getenv("HOME"); + file_path.erase(0, 1); + abs_path = home + file_path; + } + else if (file_path[0] == '.') + { + if (file_path[1] == '.' && file_path[2] == '/') + { + file_path.erase(0, 2); + fs::path current_path = fs::current_path(); + fs::path parent_path = current_path.parent_path(); + abs_path = parent_path.string() + file_path; } - - sl::DeviceProperties getZEDFromSN(unsigned int serial_number) { - sl::DeviceProperties prop; - auto f = sl::Camera::getDeviceList(); - - for (auto& it : f) { - if (it.serial_number == serial_number && - it.camera_state == sl::CAMERA_STATE::AVAILABLE) { - prop = it; - } - } - - return prop; + else if (file_path[1] == '/') + { + file_path.erase(0, 1); + fs::path current_path = fs::current_path(); + abs_path = current_path.string() + file_path; } - - std::vector convertRodrigues(sl::float3 r) { - float theta = sqrt(r.x * r.x + r.y * r.y + r.z * r.z); - - std::vector R = {1.0f, 0.0f, 0.0f, - 0.0f, 1.0f, 0.0f, - 0.0f, 0.0f, 1.0f - }; - - if (theta < DBL_EPSILON) { - return R; - } else { - float c = cos(theta); - float s = sin(theta); - float c1 = 1.f - c; - float itheta = theta ? 1.f / theta : 0.f; - - r *= itheta; - - std::vector rrt = {1.0f, 0.0f, 0.0f, - 0.0f, 1.0f, 0.0f, - 0.0f, 0.0f, 1.0f - }; - - float* p = rrt.data(); - p[0] = r.x * r.x; - p[1] = r.x * r.y; - p[2] = r.x * r.z; - p[3] = r.x * r.y; - p[4] = r.y * r.y; - p[5] = r.y * r.z; - p[6] = r.x * r.z; - p[7] = r.y * r.z; - p[8] = r.z * r.z; - - std::vector r_x = {1.0f, 0.0f, 0.0f, - 0.0f, 1.0f, 0.0f, - 0.0f, 0.0f, 1.0f - }; - p = r_x.data(); - p[0] = 0; - p[1] = -r.z; - p[2] = r.y; - p[3] = r.z; - p[4] = 0; - p[5] = -r.x; - p[6] = -r.y; - p[7] = r.x; - p[8] = 0; - - // R = cos(theta)*I + (1 - cos(theta))*r*rT + sin(theta)*[r_x] - - sl::Matrix3f eye; - eye.setIdentity(); - - sl::Matrix3f sl_R(R.data()); - sl::Matrix3f sl_rrt(rrt.data()); - sl::Matrix3f sl_r_x(r_x.data()); - - sl_R = eye * c + sl_rrt * c1 + sl_r_x * s; - - R[0] = sl_R.r00; - R[1] = sl_R.r01; - R[2] = sl_R.r02; - R[3] = sl_R.r10; - R[4] = sl_R.r11; - R[5] = sl_R.r12; - R[6] = sl_R.r20; - R[7] = sl_R.r21; - R[8] = sl_R.r22; - } - - return R; + else + { + std::cerr << "[sl_tools::resolveFilePath] Invalid file path '" << file_path << "' replaced with null string." + << std::endl; + return std::string(); } - - - bool file_exist(const std::string& name) { - struct stat buffer; - return (stat(name.c_str(), &buffer) == 0); + } + else if (file_path[0] != '/') + { + fs::path current_path = fs::current_path(); + abs_path = current_path.string() + "/" + file_path; + } + + return abs_path; +} + +std::string getSDKVersion(int& major, int& minor, int& sub_minor) +{ + std::string ver = sl::Camera::getSDKVersion().c_str(); + std::vector strings; + std::istringstream f(ver); + std::string s; + + while (getline(f, s, '.')) + { + strings.push_back(s); + } + + major = 0; + minor = 0; + sub_minor = 0; + + switch (strings.size()) + { + case 3: + sub_minor = std::stoi(strings[2]); + + case 2: + minor = std::stoi(strings[1]); + + case 1: + major = std::stoi(strings[0]); + } + + return ver; +} + +ros::Time slTime2Ros(sl::Timestamp t) +{ + uint32_t sec = static_cast(t.getNanoseconds() / 1000000000); + uint32_t nsec = static_cast(t.getNanoseconds() % 1000000000); + return ros::Time(sec, nsec); +} + +bool isZED(sl::MODEL camModel) +{ + if (camModel == sl::MODEL::ZED) + { + return true; + } + return false; +} + +bool isZEDM(sl::MODEL camModel) +{ + if (camModel == sl::MODEL::ZED_M) + { + return true; + } + return false; +} + +bool isZED2OrZED2i(sl::MODEL camModel) +{ + if (camModel == sl::MODEL::ZED2) + { + return true; + } + if (camModel == sl::MODEL::ZED2i) + { + return true; + } + return false; +} + +bool isZEDX(sl::MODEL camModel) +{ + if (camModel == sl::MODEL::ZED_X) + { + return true; + } + if (camModel == sl::MODEL::ZED_XM) + { + return true; + } + return false; +} + +void imageToROSmsg(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat img, std::string frameId, ros::Time t) +{ + if (!imgMsgPtr) + { + return; + } + + imgMsgPtr->header.stamp = t; + imgMsgPtr->header.frame_id = frameId; + imgMsgPtr->height = img.getHeight(); + imgMsgPtr->width = img.getWidth(); + + int num = 1; // for endianness detection + imgMsgPtr->is_bigendian = !(*(char*)&num == 1); + + imgMsgPtr->step = img.getStepBytes(); + + size_t size = imgMsgPtr->step * imgMsgPtr->height; + imgMsgPtr->data.resize(size); + + sl::MAT_TYPE dataType = img.getDataType(); + + switch (dataType) + { + case sl::MAT_TYPE::F32_C1: /**< float 1 channel.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC1; + memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); + break; + + case sl::MAT_TYPE::F32_C2: /**< float 2 channels.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC2; + memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); + break; + + case sl::MAT_TYPE::F32_C3: /**< float 3 channels.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC3; + memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); + break; + + case sl::MAT_TYPE::F32_C4: /**< float 4 channels.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC4; + memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); + break; + + case sl::MAT_TYPE::U8_C1: /**< unsigned char 1 channel.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::MONO8; + memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); + break; + + case sl::MAT_TYPE::U8_C2: /**< unsigned char 2 channels.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_8UC2; + memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); + break; + + case sl::MAT_TYPE::U8_C3: /**< unsigned char 3 channels.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::BGR8; + memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); + break; + + case sl::MAT_TYPE::U8_C4: /**< unsigned char 4 channels.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::BGRA8; + memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); + break; + + case sl::MAT_TYPE::U16_C1: /**< unsigned short 1 channel.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_16UC1; + memcpy((uint16_t*)(&imgMsgPtr->data[0]), img.getPtr(), size); + break; + } +} + +void imagesToROSmsg(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat left, sl::Mat right, std::string frameId, ros::Time t) +{ + if (left.getWidth() != right.getWidth() || left.getHeight() != right.getHeight() || + left.getChannels() != right.getChannels() || left.getDataType() != right.getDataType()) + { + return; + } + + if (!imgMsgPtr) + { + imgMsgPtr = boost::make_shared(); + } + + imgMsgPtr->header.stamp = t; + imgMsgPtr->header.frame_id = frameId; + imgMsgPtr->height = left.getHeight(); + imgMsgPtr->width = 2 * left.getWidth(); + + int num = 1; // for endianness detection + imgMsgPtr->is_bigendian = !(*(char*)&num == 1); + + imgMsgPtr->step = 2 * left.getStepBytes(); + + size_t size = imgMsgPtr->step * imgMsgPtr->height; + imgMsgPtr->data.resize(size); + + sl::MAT_TYPE dataType = left.getDataType(); + + int dataSize = 0; + char* srcL; + char* srcR; + + switch (dataType) + { + case sl::MAT_TYPE::F32_C1: /**< float 1 channel.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC1; + dataSize = sizeof(float); + srcL = (char*)left.getPtr(); + srcR = (char*)right.getPtr(); + break; + + case sl::MAT_TYPE::F32_C2: /**< float 2 channels.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC2; + dataSize = 2 * sizeof(float); + srcL = (char*)left.getPtr(); + srcR = (char*)right.getPtr(); + break; + + case sl::MAT_TYPE::F32_C3: /**< float 3 channels.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC3; + dataSize = 3 * sizeof(float); + srcL = (char*)left.getPtr(); + srcR = (char*)right.getPtr(); + break; + + case sl::MAT_TYPE::F32_C4: /**< float 4 channels.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC4; + dataSize = 4 * sizeof(float); + srcL = (char*)left.getPtr(); + srcR = (char*)right.getPtr(); + break; + + case sl::MAT_TYPE::U8_C1: /**< unsigned char 1 channel.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::MONO8; + dataSize = sizeof(char); + srcL = (char*)left.getPtr(); + srcR = (char*)right.getPtr(); + break; + + case sl::MAT_TYPE::U8_C2: /**< unsigned char 2 channels.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_8UC2; + dataSize = 2 * sizeof(char); + srcL = (char*)left.getPtr(); + srcR = (char*)right.getPtr(); + break; + + case sl::MAT_TYPE::U8_C3: /**< unsigned char 3 channels.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::BGR8; + dataSize = 3 * sizeof(char); + srcL = (char*)left.getPtr(); + srcR = (char*)right.getPtr(); + break; + + case sl::MAT_TYPE::U8_C4: /**< unsigned char 4 channels.*/ + imgMsgPtr->encoding = sensor_msgs::image_encodings::BGRA8; + dataSize = 4 * sizeof(char); + srcL = (char*)left.getPtr(); + srcR = (char*)right.getPtr(); + break; + } + + char* dest = (char*)(&imgMsgPtr->data[0]); + + for (int i = 0; i < left.getHeight(); i++) + { + memcpy(dest, srcL, left.getStepBytes()); + dest += left.getStepBytes(); + memcpy(dest, srcR, right.getStepBytes()); + dest += right.getStepBytes(); + + srcL += left.getStepBytes(); + srcR += right.getStepBytes(); + } +} + +std::vector split_string(const std::string& s, char seperator) +{ + std::vector output; + std::string::size_type prev_pos = 0, pos = 0; + + while ((pos = s.find(seperator, pos)) != std::string::npos) + { + std::string substring(s.substr(prev_pos, pos - prev_pos)); + output.push_back(substring); + prev_pos = ++pos; + } + + output.push_back(s.substr(prev_pos, pos - prev_pos)); + return output; +} + +inline bool contains(std::vector& poly, sl::float2 test) +{ + int i, j; + bool c = false; + const int nvert = poly.size(); + for (i = 0, j = nvert - 1; i < nvert; j = i++) + { + if (((poly[i].y > test.y) != (poly[j].y > test.y)) && + (test.x < (poly[j].x - poly[i].x) * (test.y - poly[i].y) / (poly[j].y - poly[i].y) + poly[i].x)) + { + c = !c; } - - std::string getSDKVersion(int& major, int& minor, int& sub_minor) { - std::string ver = sl::Camera::getSDKVersion().c_str(); - std::vector strings; - std::istringstream f(ver); - std::string s; - - while (getline(f, s, '.')) { - strings.push_back(s); - } - - major = 0; - minor = 0; - sub_minor = 0; - - switch (strings.size()) { - case 3: - sub_minor = std::stoi(strings[2]); - - case 2: - minor = std::stoi(strings[1]); - - case 1: - major = std::stoi(strings[0]); - } - - return ver; + } + return c; +} + +bool generateROI(const std::vector& poly, sl::Mat& out_roi) +{ + if (poly.size() < 3) + { + out_roi = sl::Mat(); + return false; + } + + // Set each pixel to valid + // std::cerr << "Setting ROI mask to full valid" << std::endl; + out_roi.setTo(255, sl::MEM::CPU); + + // ----> De-normalize coordinates + size_t w = out_roi.getWidth(); + size_t h = out_roi.getHeight(); + + // std::cerr << "De-normalize coordinates" << std::endl; + // std::cerr << "Image resolution: " << w << "x" << h << std::endl; + std::vector poly_img; + size_t idx = 0; + for (auto& it : poly) + { + sl::float2 pt; + pt.x = it.x * w; + pt.y = it.y * h; + + if (pt.x >= w) + { + pt.x = (w - 1); } - - ros::Time slTime2Ros(sl::Timestamp t) { - uint32_t sec = static_cast(t.getNanoseconds() / 1000000000); - uint32_t nsec = static_cast(t.getNanoseconds() % 1000000000); - return ros::Time(sec, nsec); + if (pt.y >= h) + { + pt.y = (h - 1); } - void imageToROSmsg(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat img, std::string frameId, ros::Time t) { - - if(!imgMsgPtr) { - return; - } - - imgMsgPtr->header.stamp = t; - imgMsgPtr->header.frame_id = frameId; - imgMsgPtr->height = img.getHeight(); - imgMsgPtr->width = img.getWidth(); - - int num = 1; // for endianness detection - imgMsgPtr->is_bigendian = !(*(char*)&num == 1); - - imgMsgPtr->step = img.getStepBytes(); - - size_t size = imgMsgPtr->step * imgMsgPtr->height; - imgMsgPtr->data.resize(size); - - sl::MAT_TYPE dataType = img.getDataType(); - - switch (dataType) { - case sl::MAT_TYPE::F32_C1: /**< float 1 channel.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC1; - memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); - break; - - case sl::MAT_TYPE::F32_C2: /**< float 2 channels.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC2; - memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); - break; - - case sl::MAT_TYPE::F32_C3: /**< float 3 channels.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC3; - memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); - break; - - case sl::MAT_TYPE::F32_C4: /**< float 4 channels.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC4; - memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); - break; - - case sl::MAT_TYPE::U8_C1: /**< unsigned char 1 channel.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::MONO8; - memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); - break; - - case sl::MAT_TYPE::U8_C2: /**< unsigned char 2 channels.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_8UC2; - memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); - break; - - case sl::MAT_TYPE::U8_C3: /**< unsigned char 3 channels.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::BGR8; - memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); - break; - - case sl::MAT_TYPE::U8_C4: /**< unsigned char 4 channels.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::BGRA8; - memcpy((char*)(&imgMsgPtr->data[0]), img.getPtr(), size); - break; - } + poly_img.push_back(pt); + + ++idx; + } + // <---- De-normalize coordinates + + // ----> Unset ROI pixels outside the polygon + // std::cerr << "Unset ROI pixels outside the polygon" << std::endl; + // std::cerr << "Set mask" << std::endl; + for (int v = 0; v < h; v++) + { + for (int u = 0; u < w; u++) + { + if (!contains(poly_img, sl::float2(u, v))) + { + out_roi.setValue(u, v, 0, sl::MEM::CPU); + } } - - void imagesToROSmsg(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat left, sl::Mat right, std::string frameId, ros::Time t) { - - if (left.getWidth() != right.getWidth() || - left.getHeight() != right.getHeight() || - left.getChannels() != right.getChannels() || - left.getDataType() != right.getDataType()) { - return; + } + // std::cerr << "Mask ready" << std::endl; + // std::cerr << "ROI resolution: " << w << "x" << h << std::endl; + // <---- Unset ROI pixels outside the polygon + + return true; +} + +std::vector> parseStringVector(const std::string& input, std::string& error_return) +{ + std::vector> result; + + std::stringstream input_ss(input); + int depth = 0; + std::vector current_vector; + while (!!input_ss && !input_ss.eof()) + { + switch (input_ss.peek()) + { + case EOF: + break; + case '[': + depth++; + if (depth > 2) + { + error_return = "Array depth greater than 2"; + return result; } - - if(!imgMsgPtr) { - imgMsgPtr = boost::make_shared(); + input_ss.get(); + current_vector.clear(); + break; + case ']': + depth--; + if (depth < 0) + { + error_return = "More close ] than open ["; + return result; } - - imgMsgPtr->header.stamp = t; - imgMsgPtr->header.frame_id = frameId; - imgMsgPtr->height = left.getHeight(); - imgMsgPtr->width = 2 * left.getWidth(); - - int num = 1; // for endianness detection - imgMsgPtr->is_bigendian = !(*(char*)&num == 1); - - imgMsgPtr->step = 2 * left.getStepBytes(); - - size_t size = imgMsgPtr->step * imgMsgPtr->height; - imgMsgPtr->data.resize(size); - - sl::MAT_TYPE dataType = left.getDataType(); - - int dataSize = 0; - char* srcL; - char* srcR; - - switch (dataType) { - case sl::MAT_TYPE::F32_C1: /**< float 1 channel.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC1; - dataSize = sizeof(float); - srcL = (char*)left.getPtr(); - srcR = (char*)right.getPtr(); - break; - - case sl::MAT_TYPE::F32_C2: /**< float 2 channels.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC2; - dataSize = 2 * sizeof(float); - srcL = (char*)left.getPtr(); - srcR = (char*)right.getPtr(); - break; - - case sl::MAT_TYPE::F32_C3: /**< float 3 channels.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC3; - dataSize = 3 * sizeof(float); - srcL = (char*)left.getPtr(); - srcR = (char*)right.getPtr(); - break; - - case sl::MAT_TYPE::F32_C4: /**< float 4 channels.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_32FC4; - dataSize = 4 * sizeof(float); - srcL = (char*)left.getPtr(); - srcR = (char*)right.getPtr(); - break; - - case sl::MAT_TYPE::U8_C1: /**< unsigned char 1 channel.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::MONO8; - dataSize = sizeof(char); - srcL = (char*)left.getPtr(); - srcR = (char*)right.getPtr(); - break; - - case sl::MAT_TYPE::U8_C2: /**< unsigned char 2 channels.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::TYPE_8UC2; - dataSize = 2 * sizeof(char); - srcL = (char*)left.getPtr(); - srcR = (char*)right.getPtr(); - break; - - case sl::MAT_TYPE::U8_C3: /**< unsigned char 3 channels.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::BGR8; - dataSize = 3 * sizeof(char); - srcL = (char*)left.getPtr(); - srcR = (char*)right.getPtr(); - break; - - case sl::MAT_TYPE::U8_C4: /**< unsigned char 4 channels.*/ - imgMsgPtr->encoding = sensor_msgs::image_encodings::BGRA8; - dataSize = 4 * sizeof(char); - srcL = (char*)left.getPtr(); - srcR = (char*)right.getPtr(); - break; + input_ss.get(); + if (depth == 1) + { + result.push_back(current_vector); } - - char* dest = (char*)(&imgMsgPtr->data[0]); - - for (int i = 0; i < left.getHeight(); i++) { - memcpy(dest, srcL, left.getStepBytes()); - dest += left.getStepBytes(); - memcpy(dest, srcR, right.getStepBytes()); - dest += right.getStepBytes(); - - srcL += left.getStepBytes(); - srcR += right.getStepBytes(); + break; + case ',': + case ' ': + case '\t': + input_ss.get(); + break; + default: // All other characters should be part of the numbers. + if (depth != 2) + { + std::stringstream err_ss; + err_ss << "Numbers at depth other than 2. Char was '" << char(input_ss.peek()) << "'."; + error_return = err_ss.str(); + return result; } + float value; + input_ss >> value; + if (!!input_ss) + { + current_vector.push_back(value); + } + break; } + } - std::vector split_string(const std::string& s, char seperator) { - std::vector output; - std::string::size_type prev_pos = 0, pos = 0; - - while ((pos = s.find(seperator, pos)) != std::string::npos) { - std::string substring(s.substr(prev_pos, pos - prev_pos)); - output.push_back(substring); - prev_pos = ++pos; - } + if (depth != 0) + { + error_return = "Unterminated vector string."; + } + else + { + error_return = ""; + } - output.push_back(s.substr(prev_pos, pos - prev_pos)); - return output; - } + return result; +} - CSmartMean::CSmartMean(int winSize) { - mValCount = 0; +CSmartMean::CSmartMean(int winSize) +{ + mValCount = 0; - mMeanCorr = 0.0; - mMean = 0.0; - mWinSize = winSize; + mMeanCorr = 0.0; + mMean = 0.0; + mWinSize = winSize; - mGamma = (static_cast(mWinSize) - 1.) / static_cast(mWinSize); - } + mGamma = (static_cast(mWinSize) - 1.) / static_cast(mWinSize); +} - double CSmartMean::addValue(double val) { - mValCount++; +double CSmartMean::addValue(double val) +{ + mValCount++; - mMeanCorr = mGamma * mMeanCorr + (1. - mGamma) * val; - mMean = mMeanCorr / (1. - pow(mGamma, mValCount)); + mMeanCorr = mGamma * mMeanCorr + (1. - mGamma) * val; + mMean = mMeanCorr / (1. - pow(mGamma, mValCount)); - return mMean; - } + return mMean; +} -} // namespace +} // namespace sl_tools diff --git a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp index 0a30b6c6..cc33cec5 100644 --- a/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp +++ b/zed_nodelets/src/zed_nodelet/include/zed_wrapper_nodelet.hpp @@ -1,9 +1,6 @@ -#ifndef ZED_WRAPPER_NODELET_H -#define ZED_WRAPPER_NODELET_H - -/////////////////////////////////////////////////////////////////////////// +/////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2020, STEREOLABS. +// Copyright (c) 2023, STEREOLABS. // // All rights reserved. // @@ -21,675 +18,809 @@ // /////////////////////////////////////////////////////////////////////////// -#include "sl_tools.h" - -#include +#ifndef ZED_WRAPPER_NODELET_H +#define ZED_WRAPPER_NODELET_H -#include -#include -#include -#include -#include -#include +#include #include +#include #include +#include +#include +#include #include -#include +#include +#include +#include +#include +#include +#include + +#include + +#include "sl_tools.h" // Dynamic reconfiguration #include // Services +#include #include +#include +#include +#include #include -#include -#include -#include +#include #include +#include +#include #include -#include +#include #include -#include -#include -#include -#include +#include +#include // Topics +#include #include #include -#include #include -#include +#include #include +#include #include #include #include #include -#include #include +#include #include #include #include -#include -using namespace std; - -namespace zed_nodelets { - -class ZEDWrapperNodelet : public nodelet::Nodelet { - - typedef enum _dyn_params { - DATAPUB_FREQ = 0, - CONFIDENCE = 1, - TEXTURE_CONF = 2, - POINTCLOUD_FREQ = 3, - BRIGHTNESS = 4, - CONTRAST = 5, - HUE = 6, - SATURATION = 7, - SHARPNESS = 8, - GAMMA = 9, - AUTO_EXP_GAIN = 10, - GAIN = 11, - EXPOSURE = 12, - AUTO_WB = 13, - WB_TEMP = 14 - } DynParams; +namespace zed_nodelets +{ +typedef enum +{ + NATIVE, //!< Same camera grab resolution + CUSTOM //!< Custom Rescale Factor +} PubRes; + +class ZEDWrapperNodelet : public nodelet::Nodelet +{ + typedef enum _dyn_params + { + DATAPUB_FREQ = 0, + CONFIDENCE = 1, + TEXTURE_CONF = 2, + POINTCLOUD_FREQ = 3, + BRIGHTNESS = 4, + CONTRAST = 5, + HUE = 6, + SATURATION = 7, + SHARPNESS = 8, + GAMMA = 9, + AUTO_EXP_GAIN = 10, + GAIN = 11, + EXPOSURE = 12, + AUTO_WB = 13, + WB_TEMP = 14 + } DynParams; public: - /* \brief Default constructor - */ - ZEDWrapperNodelet(); + /*! \brief Default constructor + */ + ZEDWrapperNodelet(); - /* \brief \ref ZEDWrapperNodelet destructor - */ - virtual ~ZEDWrapperNodelet(); + /*! \brief \ref ZEDWrapperNodelet destructor + */ + virtual ~ZEDWrapperNodelet(); protected: - /* \brief Initialization function called by the Nodelet base class - */ - virtual void onInit(); - - /* \brief Reads parameters from the param server - */ - void readParameters(); - - /* \brief ZED camera polling thread function - */ - void device_poll_thread_func(); - - /* \brief Pointcloud publishing function - */ - void pointcloud_thread_func(); - - /* \brief Publish the pose of the camera in "Map" frame with a ros Publisher - * \param t : the ros::Time to stamp the image - */ - void publishPose(ros::Time t); - - /* \brief Publish the pose of the camera in "Odom" frame with a ros Publisher - * \param base2odomTransf : Transformation representing the camera pose - * from base frame to odom frame - * \param slPose : latest odom pose from ZED SDK - * \param t : the ros::Time to stamp the image - */ - void publishOdom(tf2::Transform odom2baseTransf, sl::Pose& slPose, ros::Time t); - - /* \brief Publish the pose of the camera in "Map" frame as a transformation - * \param baseTransform : Transformation representing the camera pose from - * odom frame to map frame - * \param t : the ros::Time to stamp the image - */ - void publishPoseFrame(tf2::Transform baseTransform, ros::Time t); - - /* \brief Publish the odometry of the camera in "Odom" frame as a - * transformation - * \param odomTransf : Transformation representing the camera pose from - * base frame to odom frame - * \param t : the ros::Time to stamp the image - */ - void publishOdomFrame(tf2::Transform odomTransf, ros::Time t); - - /* \brief Publish the pose of the imu in "Odom" frame as a transformation - * \param imuTransform : Transformation representing the imu pose from base - * frame to odom framevoid - * \param t : the ros::Time to stamp the image - */ - void publishImuFrame(tf2::Transform imuTransform, ros::Time t); - - /* \brief Publish a sl::Mat image with a ros Publisher - * \param imgMsgPtr : the image message to publish - * \param img : the image to publish - * \param pubImg : the publisher object to use (different image publishers - * exist) - * \param camInfoMsg : the camera_info to be published with image - * \param imgFrameId : the id of the reference frame of the image (different - * image frames exist) - * \param t : the ros::Time to stamp the image - */ - void publishImage(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat img, image_transport::CameraPublisher& pubImg, sensor_msgs::CameraInfoPtr camInfoMsg, - string imgFrameId, ros::Time t); - - /* \brief Publish a sl::Mat depth image with a ros Publisher - * \param imgMsgPtr : the depth image topic message to publish - * \param depth : the depth image to publish - * \param t : the ros::Time to stamp the depth image - */ - void publishDepth(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat depth, ros::Time t); - - /* \brief Publish a single pointCloud with a ros Publisher - */ - void publishPointCloud(); - - /* \brief Publish a fused pointCloud with a ros Publisher - */ - void pubFusedPointCloudCallback(const ros::TimerEvent& e); - - /* \brief Publish the informations of a camera with a ros Publisher - * \param cam_info_msg : the information message to publish - * \param pub_cam_info : the publisher object to use - * \param t : the ros::Time to stamp the message - */ - void publishCamInfo(sensor_msgs::CameraInfoPtr camInfoMsg, - ros::Publisher pubCamInfo, ros::Time t); - - /* \brief Publish a sl::Mat disparity image with a ros Publisher - * \param disparity : the disparity image to publish - * \param t : the ros::Time to stamp the depth image - */ - void publishDisparity(sl::Mat disparity, ros::Time t); - - /* \brief Get the information of the ZED cameras and store them in an - * information message - * \param zed : the sl::zed::Camera* pointer to an instance - * \param left_cam_info_msg : the information message to fill with the left - * camera informations - * \param right_cam_info_msg : the information message to fill with the right - * camera informations - * \param left_frame_id : the id of the reference frame of the left camera - * \param right_frame_id : the id of the reference frame of the right camera - */ - void fillCamInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr leftCamInfoMsg, - sensor_msgs::CameraInfoPtr rightCamInfoMsg, - string leftFrameId, string rightFrameId, - bool rawParam = false); - - /* \brief Get the information of the ZED cameras and store them in an - * information message for depth topics - * \param zed : the sl::zed::Camera* pointer to an instance - * \param depth_info_msg : the information message to fill with the left - * camera informations - * \param frame_id : the id of the reference frame of the left camera - */ - void fillCamDepthInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr depth_info_msg, - string frame_id ); - - /* \bried Check if FPS and Resolution chosen by user are correct. - * Modifies FPS to match correct value. - */ - void checkResolFps(); - - /* \brief Callback to handle dynamic reconfigure events in ROS - */ - void dynamicReconfCallback(zed_nodelets::ZedConfig& config, uint32_t level); - - /* \brief Callback to publish Video and Depth data - * \param e : the ros::TimerEvent binded to the callback - */ - void pubVideoDepthCallback(const ros::TimerEvent& e); - - /* \brief Callback to publish Path data with a ROS publisher. - * \param e : the ros::TimerEvent binded to the callback - */ - void pubPathCallback(const ros::TimerEvent& e); - - /* \brief Callback to publish Sensor Data with a ROS publisher. - * \param e : the ros::TimerEvent binded to the callback - */ - void pubSensCallback(const ros::TimerEvent& e); - - /* \brief Callback to update node diagnostic status - * \param stat : node status - */ - void updateDiagnostic(diagnostic_updater::DiagnosticStatusWrapper& stat); - - /* \brief Service callback to reset_tracking service - * Tracking pose is reinitialized to the value available in the ROS Param - * server - */ - bool on_reset_tracking(zed_interfaces::reset_tracking::Request& req, - zed_interfaces::reset_tracking::Response& res); - - /* \brief Service callback to reset_odometry service - * Odometry is reset to clear drift and odometry frame gets the latest - * pose - * from ZED tracking. - */ - bool on_reset_odometry(zed_interfaces::reset_odometry::Request& req, - zed_interfaces::reset_odometry::Response& res); - - /* \brief Service callback to set_pose service - * Tracking pose is set to the new values - */ - bool on_set_pose(zed_interfaces::set_pose::Request& req, - zed_interfaces::set_pose::Response& res); - - /* \brief Service callback to start_svo_recording service - */ - bool on_start_svo_recording(zed_interfaces::start_svo_recording::Request& req, - zed_interfaces::start_svo_recording::Response& res); - - /* \brief Service callback to stop_svo_recording service - */ - bool on_stop_svo_recording(zed_interfaces::stop_svo_recording::Request& req, - zed_interfaces::stop_svo_recording::Response& res); - - /* \brief Service callback to start_remote_stream service - */ - bool on_start_remote_stream(zed_interfaces::start_remote_stream::Request& req, - zed_interfaces::start_remote_stream::Response& res); - - /* \brief Service callback to stop_remote_stream service - */ - bool on_stop_remote_stream(zed_interfaces::stop_remote_stream::Request& req, - zed_interfaces::stop_remote_stream::Response& res); - - /* \brief Service callback to set_led_status service - */ - bool on_set_led_status(zed_interfaces::set_led_status::Request& req, - zed_interfaces::set_led_status::Response& res); - - /* \brief Service callback to toggle_led service - */ - bool on_toggle_led(zed_interfaces::toggle_led::Request& req, - zed_interfaces::toggle_led::Response& res); - - /* \brief Service callback to start_3d_mapping service - */ - bool on_start_3d_mapping(zed_interfaces::start_3d_mapping::Request& req, - zed_interfaces::start_3d_mapping::Response& res); - - /* \brief Service callback to stop_3d_mapping service - */ - bool on_stop_3d_mapping(zed_interfaces::stop_3d_mapping::Request& req, - zed_interfaces::stop_3d_mapping::Response& res); - - /* \brief Service callback to start_object_detection service - */ - bool on_start_object_detection(zed_interfaces::start_object_detection::Request& req, - zed_interfaces::start_object_detection::Response& res); - - /* \brief Service callback to stop_object_detection service - */ - bool on_stop_object_detection(zed_interfaces::stop_object_detection::Request& req, - zed_interfaces::stop_object_detection::Response& res); - - /* \brief Utility to initialize the pose variables - */ - bool set_pose(float xt, float yt, float zt, float rr, float pr, float yr); - - /* \brief Utility to initialize the most used transforms - */ - void initTransforms(); - - /* \brief Utility to initialize the static transform from Sensor to Base - */ - bool getSens2BaseTransform(); - - /* \brief Utility to initialize the static transform from Sensor to Camera - */ - bool getSens2CameraTransform(); - - /* \brief Utility to initialize the static transform from Camera to Base - */ - bool getCamera2BaseTransform(); - - /* \bried Start tracking - */ - void start_pos_tracking(); - - /* \bried Start spatial mapping - */ - bool start_3d_mapping(); - - /* \bried Stop spatial mapping - */ - void stop_3d_mapping(); - - /* \bried Start object detection - */ - bool start_obj_detect(); - - /* \bried Stop object detection - */ - void stop_obj_detect(); - - /* \brief Perform object detection and publish result - */ - void detectObjects(bool publishObj, bool publishViz); - - /* \brief Generates an univoque color for each object class ID - */ - inline sl::float3 generateColorClass(int idx) { - sl::float3 clr; - clr.r = static_cast(33 + (idx * 456262)); - clr.g = static_cast(233 + (idx * 1564684)); - clr.b = static_cast(133 + (idx * 76873242)); - return clr / 255.f; - } - - /* \brief Update Dynamic reconfigure parameters - */ - void updateDynamicReconfigure(); + /*! \brief Initialization function called by the Nodelet base class + */ + virtual void onInit(); + + /*! \brief Initialize services + */ + void initServices(); + + /*! \brief Reads parameters from the param server + */ + void readParameters(); + + /*! \brief Reads general parameters from the param server + */ + void readGeneralParams(); + + /*! \brief Reads depth parameters from the param server + */ + void readDepthParams(); + + /*! \brief Reads positional tracking parameters from the param server + */ + void readPosTrkParams(); + + /*! \brief Reads spatial mapping parameters from the param server + */ + void readMappingParams(); + + /*! \brief Reads object detection parameters from the param server + */ + void readObjDetParams(); + + /*! \brief Reads sensors parameters from the param server + */ + void readSensParams(); + + /*! \brief Reads SVO parameters from the param server + */ + void readSvoParams(); + + /*! \brief Reads dynamic parameters from the param server + */ + void readDynParams(); + + /*! \brief ZED camera polling thread function + */ + void device_poll_thread_func(); + + /*! \brief Pointcloud publishing function + */ + void pointcloud_thread_func(); + + /*! \brief Sensors data publishing function + */ + void sensors_thread_func(); + + /*! \brief Publish odometry status message + */ + void publishPoseStatus(); + + /*! \brief Publish odometry status message + */ + void publishOdomStatus(); + + /*! \brief Process odometry information + */ + void processOdometry(); + + /*! \brief Process pose information + */ + void processPose(); + + /*! \brief Publish the pose of the camera in "Map" frame with a ros Publisher + */ + void publishPose(); + + /*! \brief Publish the pose of the camera in "Odom" frame with a ros Publisher + * \param base2odomTransf : Transformation representing the camera pose + * from base frame to odom frame + * \param slPose : latest odom pose from ZED SDK + * \param t : the ros::Time to stamp the image + */ + void publishOdom(tf2::Transform odom2baseTransf, sl::Pose& slPose, ros::Time t); + + /*! \brief Publish the odom -> base_link TF + * \param t : the ros::Time to stamp the image + */ + void publishTFs(ros::Time t); + + /*! \brief Publish the odom -> base_link TF + * \param t : the ros::Time to stamp the image + */ + void publishOdomTF(ros::Time t); + + /*! \brief Publish the map -> odom TF + * \param t : the ros::Time to stamp the image + */ + void publishPoseTF(ros::Time t); + + /*! + * \brief Publish IMU frame once as static TF + */ + void publishStaticImuFrame(); + + /*! \brief Publish a sl::Mat image with a ros Publisher + * \param imgMsgPtr : the image message to publish + * \param img : the image to publish + * \param pubImg : the publisher object to use (different image publishers + * exist) + * \param camInfoMsg : the camera_info to be published with image + * \param imgFrameId : the id of the reference frame of the image (different + * image frames exist) + * \param t : the ros::Time to stamp the image + */ + void publishImage(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat img, image_transport::CameraPublisher& pubImg, + sensor_msgs::CameraInfoPtr camInfoMsg, std::string imgFrameId, ros::Time t); + + /*! \brief Publish a sl::Mat depth image with a ros Publisher + * \param imgMsgPtr : the depth image topic message to publish + * \param depth : the depth image to publish + * \param t : the ros::Time to stamp the depth image + */ + void publishDepth(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat depth, ros::Time t); + + /*! \brief Publish a single pointCloud with a ros Publisher + */ + void publishPointCloud(); + + /*! \brief Publish a fused pointCloud with a ros Publisher + */ + void callback_pubFusedPointCloud(const ros::TimerEvent& e); + + /*! + * @brief Publish Color and Depth images + */ + void pubVideoDepth(); + + /*! \brief Publish the informations of a camera with a ros Publisher + * \param cam_info_msg : the information message to publish + * \param pub_cam_info : the publisher object to use + * \param t : the ros::Time to stamp the message + */ + void publishCamInfo(sensor_msgs::CameraInfoPtr camInfoMsg, ros::Publisher pubCamInfo, ros::Time t); + + /*! \brief Publish a sl::Mat disparity image with a ros Publisher + * \param disparity : the disparity image to publish + * \param t : the ros::Time to stamp the depth image + */ + void publishDisparity(sl::Mat disparity, ros::Time t); + + /*! \brief Publish sensors data and TF + * \param t : the ros::Time to stamp the depth image + */ + void publishSensData(ros::Time t = ros::Time(0)); + + /*! \brief Get the information of the ZED cameras and store them in an + * information message + * \param zed : the sl::zed::Camera* pointer to an instance + * \param left_cam_info_msg : the information message to fill with the left + * camera informations + * \param right_cam_info_msg : the information message to fill with the right + * camera informations + * \param left_frame_id : the id of the reference frame of the left camera + * \param right_frame_id : the id of the reference frame of the right camera + */ + void fillCamInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr leftCamInfoMsg, + sensor_msgs::CameraInfoPtr rightCamInfoMsg, std::string leftFrameId, std::string rightFrameId, + bool rawParam = false); + + /*! \brief Get the information of the ZED cameras and store them in an + * information message for depth topics + * \param zed : the sl::zed::Camera* pointer to an instance + * \param depth_info_msg : the information message to fill with the left + * camera informations + * \param frame_id : the id of the reference frame of the left camera + */ + void fillCamDepthInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr depth_info_msg, std::string frame_id); + + /*! \brief Check if FPS and Resolution chosen by user are correct. + * Modifies FPS to match correct value. + */ + void checkResolFps(); + + // ----> Region of Interest + std::string getRoiParam(std::string paramName, std::vector>& outVal); + std::string parseRoiPoly(const std::vector>& in_poly, std::vector& out_poly); + void resetRoi(); + // <---- Region of Interest + + /*! \brief Callback to handle dynamic reconfigure events in ROS + */ + void callback_dynamicReconf(zed_nodelets::ZedConfig& config, uint32_t level); + + /*! \brief Callback to publish Path data with a ROS publisher. + * \param e : the ros::TimerEvent binded to the callback + */ + void callback_pubPath(const ros::TimerEvent& e); + + /*! \brief Callback to update node diagnostic status + * \param stat : node status + */ + void callback_updateDiagnostic(diagnostic_updater::DiagnosticStatusWrapper& stat); + + /*! \brief Callback to receive geometry_msgs::PointStamped topics + * \param msg : pointer to the received message + */ + void clickedPtCallback(geometry_msgs::PointStampedConstPtr msg); + + /*! \brief Service callback to reset_tracking service + * Tracking pose is reinitialized to the value available in the ROS Param + * server + */ + bool on_reset_tracking(zed_interfaces::reset_tracking::Request& req, zed_interfaces::reset_tracking::Response& res); + + /*! \brief Service callback to reset_odometry service + * Odometry is reset to clear drift and odometry frame gets the latest + * pose + * from ZED tracking. + */ + bool on_reset_odometry(zed_interfaces::reset_odometry::Request& req, zed_interfaces::reset_odometry::Response& res); + + /*! \brief Service callback to set_pose service + * Tracking pose is set to the new values + */ + bool on_set_pose(zed_interfaces::set_pose::Request& req, zed_interfaces::set_pose::Response& res); + + /*! \brief Service callback to start_svo_recording service + */ + bool on_start_svo_recording(zed_interfaces::start_svo_recording::Request& req, + zed_interfaces::start_svo_recording::Response& res); + + /*! \brief Service callback to stop_svo_recording service + */ + bool on_stop_svo_recording(zed_interfaces::stop_svo_recording::Request& req, + zed_interfaces::stop_svo_recording::Response& res); + + /*! \brief Service callback to start_remote_stream service + */ + bool on_start_remote_stream(zed_interfaces::start_remote_stream::Request& req, + zed_interfaces::start_remote_stream::Response& res); + + /*! \brief Service callback to stop_remote_stream service + */ + bool on_stop_remote_stream(zed_interfaces::stop_remote_stream::Request& req, + zed_interfaces::stop_remote_stream::Response& res); + + /*! \brief Service callback to set_roi service + */ + bool on_set_roi(zed_interfaces::set_roi::Request& req, zed_interfaces::set_roi::Response& res); + + /*! \brief Service callback to reset_roi service + */ + bool on_reset_roi(zed_interfaces::reset_roi::Request& req, zed_interfaces::reset_roi::Response& res); + + /*! \brief Service callback to set_led_status service + */ + bool on_set_led_status(zed_interfaces::set_led_status::Request& req, zed_interfaces::set_led_status::Response& res); + + /*! \brief Service callback to toggle_led service + */ + bool on_toggle_led(zed_interfaces::toggle_led::Request& req, zed_interfaces::toggle_led::Response& res); + + /*! \brief Service callback to start_3d_mapping service + */ + bool on_start_3d_mapping(zed_interfaces::start_3d_mapping::Request& req, + zed_interfaces::start_3d_mapping::Response& res); + + /*! \brief Service callback to stop_3d_mapping service + */ + bool on_stop_3d_mapping(zed_interfaces::stop_3d_mapping::Request& req, + zed_interfaces::stop_3d_mapping::Response& res); + + /*! \brief Service callback to save_3d_map service + */ + bool on_save_3d_map(zed_interfaces::save_3d_map::Request& req, zed_interfaces::save_3d_map::Response& res); + + /*! \brief Service callback to enable_object_detection service + */ + bool on_enable_object_detection(std_srvs::SetBool::Request& req, std_srvs::SetBool::Response& res); + + /*! \brief Service callback to save_area_memory service + */ + bool on_save_area_memory(zed_interfaces::save_area_memory::Request& req, + zed_interfaces::save_area_memory::Response& res); + + /*! \brief Utility to initialize the pose variables + */ + bool set_pose(float xt, float yt, float zt, float rr, float pr, float yr); + + /*! \brief Utility to initialize the most used transforms + */ + void initTransforms(); + + /*! \brief Utility to initialize the static transform from Sensor to Base + */ + bool getSens2BaseTransform(); + + /*! \brief Utility to initialize the static transform from Sensor to Camera + */ + bool getSens2CameraTransform(); + + /*! \brief Utility to initialize the static transform from Camera to Base + */ + bool getCamera2BaseTransform(); + + /*! \brief Start tracking + */ + void start_pos_tracking(); + + /*! \brief Start spatial mapping + */ + bool start_3d_mapping(); + + /*! \brief Stop spatial mapping + */ + void stop_3d_mapping(); + + /*! \brief Start object detection + */ + bool start_obj_detect(); + + /*! \brief Stop object detection + */ + void stop_obj_detect(); + + /*! \brief Publish object detection results + */ + void processDetectedObjects(ros::Time t); + + /*! \brief Process camera settings + */ + void processCameraSettings(); + + /*! \brief Process point cloud + * \param ts Frame timestamp + */ + void processPointcloud(ros::Time ts); + + /*! \brief Generates an univoque color for each object class ID + */ + inline sl::float3 generateColorClass(int idx) + { + sl::float3 clr; + clr.r = static_cast(33 + (idx * 456262)); + clr.g = static_cast(233 + (idx * 1564684)); + clr.b = static_cast(133 + (idx * 76873242)); + return clr / 255.f; + } + + /*! \brief Update Dynamic reconfigure parameters + */ + void updateDynamicReconfigure(); + + /*! \brief Save the current area map if positional tracking + * and area memory are active + */ + bool saveAreaMap(std::string file_path, std::string* out_msg = nullptr); private: - uint64_t mFrameCount = 0; - - // SDK version - int mVerMajor; - int mVerMinor; - int mVerSubMinor; - - // ROS - ros::NodeHandle mNh; - ros::NodeHandle mNhNs; - std::thread mDevicePollThread; - std::thread mPcThread; // Point Cloud thread - - bool mStopNode; - - // Publishers - image_transport::CameraPublisher mPubRgb; // - image_transport::CameraPublisher mPubRawRgb; // - image_transport::CameraPublisher mPubLeft; // - image_transport::CameraPublisher mPubRawLeft; // - image_transport::CameraPublisher mPubRight; // - image_transport::CameraPublisher mPubRawRight; // - image_transport::CameraPublisher mPubDepth; // - image_transport::Publisher mPubStereo; - image_transport::Publisher mPubRawStereo; - - image_transport::CameraPublisher mPubRgbGray; - image_transport::CameraPublisher mPubRawRgbGray; - image_transport::CameraPublisher mPubLeftGray; - image_transport::CameraPublisher mPubRawLeftGray; - image_transport::CameraPublisher mPubRightGray; - image_transport::CameraPublisher mPubRawRightGray; - - ros::Publisher mPubConfMap; // - ros::Publisher mPubDisparity; // - ros::Publisher mPubCloud; - ros::Publisher mPubFusedCloud; - ros::Publisher mPubPose; - ros::Publisher mPubPoseCov; - ros::Publisher mPubOdom; - ros::Publisher mPubOdomPath; - ros::Publisher mPubMapPath; - ros::Publisher mPubImu; - ros::Publisher mPubImuRaw; - ros::Publisher mPubImuTemp; - ros::Publisher mPubImuMag; - //ros::Publisher mPubImuMagRaw; - ros::Publisher mPubPressure; - ros::Publisher mPubTempL; - ros::Publisher mPubTempR; - ros::Publisher mPubCamImuTransf; - - // Timers - ros::Timer mImuTimer; - ros::Timer mPathTimer; - ros::Timer mFusedPcTimer; - ros::Timer mVideoDepthTimer; - - // Services - ros::ServiceServer mSrvSetInitPose; - ros::ServiceServer mSrvResetOdometry; - ros::ServiceServer mSrvResetTracking; - ros::ServiceServer mSrvSvoStartRecording; - ros::ServiceServer mSrvSvoStopRecording; - ros::ServiceServer mSrvSvoStartStream; - ros::ServiceServer mSrvSvoStopStream; - ros::ServiceServer mSrvSetLedStatus; - ros::ServiceServer mSrvToggleLed; - ros::ServiceServer mSrvStartMapping; - ros::ServiceServer mSrvStopMapping; - ros::ServiceServer mSrvStartObjDet; - ros::ServiceServer mSrvStopObjDet; - - // ----> Topics (ONLY THOSE NOT CHANGING WHILE NODE RUNS) - // Camera info - sensor_msgs::CameraInfoPtr mRgbCamInfoMsg; - sensor_msgs::CameraInfoPtr mLeftCamInfoMsg; - sensor_msgs::CameraInfoPtr mRightCamInfoMsg; - sensor_msgs::CameraInfoPtr mRgbCamInfoRawMsg; - sensor_msgs::CameraInfoPtr mLeftCamInfoRawMsg; - sensor_msgs::CameraInfoPtr mRightCamInfoRawMsg; - sensor_msgs::CameraInfoPtr mDepthCamInfoMsg; - - geometry_msgs::TransformPtr mCameraImuTransfMgs; - // <---- Topics - - // ROS TF - tf2_ros::TransformBroadcaster mTransformPoseBroadcaster; - tf2_ros::TransformBroadcaster mTransformOdomBroadcaster; - tf2_ros::TransformBroadcaster mTransformImuBroadcaster; - - std::string mRgbFrameId; - std::string mRgbOptFrameId; - - std::string mDepthFrameId; - std::string mDepthOptFrameId; - - std::string mDisparityFrameId; - std::string mDisparityOptFrameId; - - std::string mConfidenceFrameId; - std::string mConfidenceOptFrameId; - - std::string mCloudFrameId; - std::string mPointCloudFrameId; - - std::string mMapFrameId; - std::string mOdometryFrameId; - std::string mBaseFrameId; - std::string mCameraFrameId; - - std::string mRightCamFrameId; - std::string mRightCamOptFrameId; - std::string mLeftCamFrameId; - std::string mLeftCamOptFrameId; - std::string mImuFrameId; - - std::string mBaroFrameId; - std::string mMagFrameId; - std::string mTempLeftFrameId; - std::string mTempRightFrameId; - - bool mPublishTf; - bool mPublishMapTf; - bool mCameraFlip; - bool mCameraSelfCalib; - - // Launch file parameters - std::string mCameraName; - sl::RESOLUTION mCamResol; - int mCamFrameRate; - sl::DEPTH_MODE mDepthMode; - sl::SENSING_MODE mCamSensingMode; - int mGpuId; - int mZedId; - int mDepthStabilization; - std::string mAreaMemDbPath; - std::string mSvoFilepath; - std::string mRemoteStreamAddr; - double mSensPubRate = 400.0; - bool mSensTimestampSync; - double mPathPubRate; - int mPathMaxCount; - bool mVerbose; - bool mSvoMode = false; - double mCamMinDepth; - double mCamMaxDepth; - - bool mTrackingActivated; - - bool mTrackingReady; - bool mTwoDMode = false; - double mFixedZValue = 0.0; - bool mFloorAlignment = false; - bool mImuFusion = true; - bool mGrabActive = false; // Indicate if camera grabbing is active (at least one topic subscribed) - sl::ERROR_CODE mConnStatus; - sl::ERROR_CODE mGrabStatus; - sl::POSITIONAL_TRACKING_STATE mTrackingStatus; - bool mSensPublishing = false; - bool mPcPublishing = false; - - // Last frame time - ros::Time mPrevFrameTimestamp; - ros::Time mFrameTimestamp; - - // Positional Tracking variables - sl::Pose mLastZedPose; // Sensor to Map transform - sl::Transform mInitialPoseSl; - std::vector mInitialBasePose; - std::vector mOdomPath; - std::vector mMapPath; - - // TF Transforms - tf2::Transform mMap2OdomTransf; // Coordinates of the odometry frame in map frame - tf2::Transform mOdom2BaseTransf; // Coordinates of the base in odometry frame - tf2::Transform mMap2BaseTransf; // Coordinates of the base in map frame - tf2::Transform mMap2CameraTransf; // Coordinates of the camera in base frame - tf2::Transform mSensor2BaseTransf; // Coordinates of the base frame in sensor frame - tf2::Transform mSensor2CameraTransf; // Coordinates of the camera frame in sensor frame - tf2::Transform mCamera2BaseTransf; // Coordinates of the base frame in camera frame - - bool mSensor2BaseTransfValid = false; - bool mSensor2CameraTransfValid = false; - bool mCamera2BaseTransfValid = false; - - // initialization Transform listener - boost::shared_ptr mTfBuffer; - boost::shared_ptr mTfListener; - - // Zed object - sl::InitParameters mZedParams; - sl::Camera mZed; - unsigned int mZedSerialNumber; - sl::MODEL mZedUserCamModel; // Camera model set by ROS Param - sl::MODEL mZedRealCamModel; // Real camera model by SDK API - unsigned int mCamFwVersion; // Camera FW version - unsigned int mSensFwVersion; // Sensors FW version - - // Dynamic Parameters - int mCamBrightness = 4; - int mCamContrast = 4; - int mCamHue = 0; - int mCamSaturation = 4; - int mCamSharpness = 3; - int mCamGamma = 1; - bool mCamAutoExposure = true; - int mCamGain = 100; - int mCamExposure = 100; - bool mCamAutoWB = true; - int mCamWB = 4200; - - int mCamDepthConfidence = 50; - int mCamDepthTextureConf = 100; - double mPointCloudFreq = 15.; - double mVideoDepthFreq = 15.; - - double mCamImageResizeFactor = 1.0; - double mCamDepthResizeFactor = 1.0; - - // flags - bool mTriggerAutoExposure = true; - bool mTriggerAutoWB = true; - bool mComputeDepth; - bool mOpenniDepthMode; // 16 bit UC data in mm else 32F in m, for more info -> http://www.ros.org/reps/rep-0118.html - bool mPoseSmoothing = false; // Always disabled. Enable only for AR/VR applications - bool mAreaMemory; - bool mInitOdomWithPose; - bool mResetOdom = false; - bool mUseOldExtrinsic = false; - bool mUpdateDynParams = false; - bool mPublishingData = false; - - // SVO recording - bool mRecording = false; - sl::RecordingStatus mRecStatus; - sl::SVO_COMPRESSION_MODE mSvoComprMode; - - // Streaming - bool mStreaming = false; - - // Mat - int mCamWidth; - int mCamHeight; - sl::Resolution mMatResolVideo; - sl::Resolution mMatResolDepth; - - // Thread Sync - std::mutex mCloseZedMutex; - std::mutex mCamDataMutex; - std::mutex mPcMutex; - std::mutex mRecMutex; - std::mutex mPosTrkMutex; - std::mutex mDynParMutex; - std::mutex mMappingMutex; - std::mutex mObjDetMutex; - std::condition_variable mPcDataReadyCondVar; - bool mPcDataReady; - - // Point cloud variables - sl::Mat mCloud; - sl::FusedPointCloud mFusedPC; - ros::Time mPointCloudTime; - - // Dynamic reconfigure - boost::recursive_mutex mDynServerMutex; // To avoid Dynamic Reconfigure Server warning - boost::shared_ptr> mDynRecServer; - - // Diagnostic - float mTempLeft = -273.15f; - float mTempRight = -273.15f; - std::unique_ptr mElabPeriodMean_sec; - std::unique_ptr mGrabPeriodMean_usec; - std::unique_ptr mVideoDepthPeriodMean_sec; - std::unique_ptr mPcPeriodMean_usec; - std::unique_ptr mSensPeriodMean_usec; - std::unique_ptr mObjDetPeriodMean_msec; - - diagnostic_updater::Updater mDiagUpdater; // Diagnostic Updater - - // Camera IMU transform - sl::Transform mSlCamImuTransf; - - // Spatial mapping - bool mMappingEnabled; - bool mMappingRunning; - float mMappingRes = 0.1; - float mMaxMappingRange = -1; - double mFusedPcPubFreq = 2.0; - - // Object Detection - bool mObjDetEnabled = false; - bool mObjDetRunning = false; - float mObjDetConfidence = 50.f; - bool mObjDetTracking = true; - bool mObjDetPeople = true; - bool mObjDetVehicles = true; - std::vector mObjDetFilter; - - ros::Publisher mPubObjDet; - ros::Publisher mPubObjDetViz; -}; // class ZEDROSWrapperNodelet -} // namespace + uint64_t mFrameCount = 0; + + // SDK version + int mVerMajor; + int mVerMinor; + int mVerSubMinor; + + // ROS + ros::NodeHandle mNh; + ros::NodeHandle mNhNs; + std::thread mDevicePollThread; + std::thread mPcThread; // Point Cloud thread + std::thread mSensThread; // Sensors data thread + + bool mStopNode = false; + + // Publishers + image_transport::CameraPublisher mPubRgb; // + image_transport::CameraPublisher mPubRawRgb; // + image_transport::CameraPublisher mPubLeft; // + image_transport::CameraPublisher mPubRawLeft; // + image_transport::CameraPublisher mPubRight; // + image_transport::CameraPublisher mPubRawRight; // + image_transport::CameraPublisher mPubDepth; // + image_transport::Publisher mPubStereo; + image_transport::Publisher mPubRawStereo; + + image_transport::CameraPublisher mPubRgbGray; + image_transport::CameraPublisher mPubRawRgbGray; + image_transport::CameraPublisher mPubLeftGray; + image_transport::CameraPublisher mPubRawLeftGray; + image_transport::CameraPublisher mPubRightGray; + image_transport::CameraPublisher mPubRawRightGray; + + ros::Publisher mPubConfMap; // + ros::Publisher mPubDisparity; // + ros::Publisher mPubCloud; + ros::Publisher mPubFusedCloud; + ros::Publisher mPubPose; + ros::Publisher mPubPoseCov; + ros::Publisher mPubOdom; + ros::Publisher mPubOdomPath; + ros::Publisher mPubMapPath; + ros::Publisher mPubImu; + ros::Publisher mPubImuRaw; + ros::Publisher mPubImuTemp; + ros::Publisher mPubImuMag; + // ros::Publisher mPubImuMagRaw; + ros::Publisher mPubPressure; + ros::Publisher mPubTempL; + ros::Publisher mPubTempR; + ros::Publisher mPubCamImuTransf; + + ros::Publisher mPubMarker; // Publisher for Rviz markers + ros::Publisher mPubPlane; // Publisher for detected planes + + ros::Publisher mPubPoseStatus; + ros::Publisher mPubOdomStatus; + + // Subscribers + ros::Subscriber mClickedPtSub; + + // Timers + ros::Timer mPathTimer; + ros::Timer mFusedPcTimer; + + // Services + ros::ServiceServer mSrvSetInitPose; + ros::ServiceServer mSrvResetOdometry; + ros::ServiceServer mSrvResetTracking; + ros::ServiceServer mSrvSvoStartRecording; + ros::ServiceServer mSrvSvoStopRecording; + ros::ServiceServer mSrvSvoStartStream; + ros::ServiceServer mSrvSvoStopStream; + ros::ServiceServer mSrvSetLedStatus; + ros::ServiceServer mSrvToggleLed; + ros::ServiceServer mSrvStartMapping; + ros::ServiceServer mSrvStopMapping; + ros::ServiceServer mSrvSave3dMap; + ros::ServiceServer mSrvEnableObjDet; + ros::ServiceServer mSrvSaveAreaMemory; + ros::ServiceServer mSrvSetRoi; + ros::ServiceServer mSrvResetRoi; + + // ----> Topics (ONLY THOSE NOT CHANGING WHILE NODE RUNS) + // Camera info + sensor_msgs::CameraInfoPtr mRgbCamInfoMsg; + sensor_msgs::CameraInfoPtr mLeftCamInfoMsg; + sensor_msgs::CameraInfoPtr mRightCamInfoMsg; + sensor_msgs::CameraInfoPtr mRgbCamInfoRawMsg; + sensor_msgs::CameraInfoPtr mLeftCamInfoRawMsg; + sensor_msgs::CameraInfoPtr mRightCamInfoRawMsg; + sensor_msgs::CameraInfoPtr mDepthCamInfoMsg; + + geometry_msgs::TransformPtr mCameraImuTransfMgs; + // <---- Topics + + // ROS TF + tf2_ros::TransformBroadcaster mTfBroadcaster; + tf2_ros::StaticTransformBroadcaster mStaticTfBroadcaster; + + std::string mRgbFrameId; + std::string mRgbOptFrameId; + + std::string mDepthFrameId; + std::string mDepthOptFrameId; + + std::string mDisparityFrameId; + std::string mDisparityOptFrameId; + + std::string mConfidenceFrameId; + std::string mConfidenceOptFrameId; + + std::string mCloudFrameId; + std::string mPointCloudFrameId; + + std::string mMapFrameId = "map"; + std::string mOdomFrameId = "odom"; + std::string mBaseFrameId = "base_link"; + std::string mCameraFrameId; + + std::string mRightCamFrameId; + std::string mRightCamOptFrameId; + std::string mLeftCamFrameId; + std::string mLeftCamOptFrameId; + std::string mImuFrameId; + + std::string mBaroFrameId; + std::string mMagFrameId; + std::string mTempLeftFrameId; + std::string mTempRightFrameId; + + // Launch file parameters + std::string mCameraName; + sl::RESOLUTION mCamResol; + int mCamFrameRate; + double mPubFrameRate = 15.; + sl::DEPTH_MODE mDepthMode = sl::DEPTH_MODE::ULTRA; + int mGpuId; + int mZedId; + int mDepthStabilization; + std::string mAreaMemDbPath; + bool mSaveAreaMapOnClosing = true; + std::string mSvoFilepath; + bool mSvoRealtime = true; + std::string mRemoteStreamAddr; + bool mSensTimestampSync; + double mSensPubRate = 400.0; + double mPathPubRate; + int mPathMaxCount; + int mSdkVerbose = 1; + std::vector> mRoiParam; + bool mSvoMode = false; + double mCamMinDepth; + double mCamMaxDepth; + std::string mClickedPtTopic = "/clicked_point"; + + // Positional tracking + bool mPosTrackingEnabled = false; + sl::POSITIONAL_TRACKING_MODE mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::GEN_2; + bool mPosTrackingReady = false; + bool mPosTrackingStarted = false; + bool mPosTrackingRequired = false; + bool mTwoDMode = false; + double mFixedZValue = 0.0; + bool mFloorAlignment = false; + bool mImuFusion = true; + bool mSetGravityAsOrigin = false; + bool mPublishTF; + bool mPublishMapTF; + bool mPublishImuTf; + sl::FLIP_MODE mCameraFlip; + bool mCameraSelfCalib; + bool mIsStatic = false; + double mPosTrkMinDepth = 0.0; + + // Flags + bool mGrabActive = false; // Indicate if camera grabbing is active (at least one topic subscribed) + sl::ERROR_CODE mConnStatus; + sl::ERROR_CODE mGrabStatus; + sl::POSITIONAL_TRACKING_STATE mPosTrackingStatusWorld; + sl::POSITIONAL_TRACKING_STATE mPosTrackingStatusCamera; + bool mSensPublishing = false; + bool mPcPublishing = false; + bool mDepthDisabled = false; + + // Last frame time + ros::Time mPrevFrameTimestamp; + ros::Time mFrameTimestamp; + + // Positional Tracking variables + sl::Pose mLastZedPose; // Sensor to Map transform + sl::Transform mInitialPoseSl; + std::vector mInitialBasePose; + std::vector mOdomPath; + std::vector mMapPath; + ros::Time mLastTs_odom; + ros::Time mLastTs_pose; + + // IMU TF + tf2::Transform mLastImuPose; + + // TF Transforms + tf2::Transform mMap2OdomTransf; // Coordinates of the odometry frame in map frame + tf2::Transform mOdom2BaseTransf; // Coordinates of the base in odometry frame + tf2::Transform mMap2BaseTransf; // Coordinates of the base in map frame + tf2::Transform mSensor2BaseTransf; // Coordinates of the base frame in sensor frame + tf2::Transform mSensor2CameraTransf; // Coordinates of the camera frame in sensor frame + tf2::Transform mCamera2BaseTransf; // Coordinates of the base frame in camera frame + + bool mSensor2BaseTransfValid = false; + bool mSensor2CameraTransfValid = false; + bool mCamera2BaseTransfValid = false; + bool mStaticImuFramePublished = false; + + // initialization Transform listener + boost::shared_ptr mTfBuffer; + boost::shared_ptr mTfListener; + + // Zed object + sl::InitParameters mZedParams; + sl::Camera mZed; + unsigned int mZedSerialNumber; + sl::MODEL mZedUserCamModel; // Camera model set by ROS Param + sl::MODEL mZedRealCamModel; // Real camera model by SDK API + unsigned int mCamFwVersion; // Camera FW version + unsigned int mSensFwVersion; // Sensors FW version + + // Dynamic Parameters + int mCamBrightness = 4; + int mCamContrast = 4; + int mCamHue = 0; + int mCamSaturation = 4; + int mCamSharpness = 3; + int mCamGamma = 1; + bool mCamAutoExposure = true; + int mCamGain = 100; + int mCamExposure = 100; + bool mCamAutoWB = true; + int mCamWB = 4200; + + int mCamDepthConfidence = 50; + int mCamDepthTextureConf = 100; + double mPointCloudFreq = 15.; + + PubRes mPubResolution = PubRes::NATIVE; // Use native grab resolution by default + double mCustomDownscaleFactor = 1.0; // Used to rescale data with user factor + + // flags + bool mTriggerAutoExposure = true; + bool mTriggerAutoWB = true; + bool mComputeDepth; + bool mOpenniDepthMode; // 16 bit UC data in mm else 32F in m, for more info -> http://www.ros.org/reps/rep-0118.html + bool mPoseSmoothing = false; // Always disabled. Enable only for AR/VR applications + bool mAreaMemory; + bool mInitOdomWithPose; + bool mUpdateDynParams = false; + bool mPublishingData = false; + + // SVO recording + bool mRecording = false; + sl::RecordingStatus mRecStatus; + sl::SVO_COMPRESSION_MODE mSvoComprMode; + + // Streaming + bool mStreaming = false; + + // Mat + int mCamWidth; + int mCamHeight; + sl::Resolution mMatResol; + + // Thread Sync + std::mutex mCloseZedMutex; + std::mutex mCamDataMutex; + std::mutex mPcMutex; + std::mutex mRecMutex; + std::mutex mPosTrkMutex; + std::mutex mOdomMutex; + std::mutex mDynParMutex; + std::mutex mMappingMutex; + std::mutex mObjDetMutex; + std::condition_variable mPcDataReadyCondVar; + bool mPcDataReady; + + // Point cloud variables + sl::Mat mCloud; + sl::FusedPointCloud mFusedPC; + ros::Time mPointCloudTime; + + // Dynamic reconfigure + boost::recursive_mutex mDynServerMutex; // To avoid Dynamic Reconfigure Server warning + boost::shared_ptr> mDynRecServer; + + // Diagnostic + float mTempLeft = -273.15f; + float mTempRight = -273.15f; + std::unique_ptr mElabPeriodMean_sec; + std::unique_ptr mGrabPeriodMean_usec; + std::unique_ptr mVideoDepthPeriodMean_sec; + std::unique_ptr mPcPeriodMean_usec; + std::unique_ptr mSensPeriodMean_usec; + std::unique_ptr mObjDetPeriodMean_msec; + + diagnostic_updater::Updater mDiagUpdater; // Diagnostic Updater + + // Camera IMU transform + sl::Transform mSlCamImuTransf; + geometry_msgs::TransformStamped mStaticImuTransformStamped; + + // Spatial mapping + bool mMappingEnabled; + bool mMappingRunning; + bool mMapSave = false; + float mMappingRes = 0.1; + float mMaxMappingRange = -1; + double mFusedPcPubFreq = 2.0; + + // Object Detection + bool mObjDetEnabled = false; + bool mObjDetRunning = false; + bool mObjDetTracking = true; + bool mObjDetReducedPrecision = false; + bool mObjDetBodyFitting = true; + float mObjDetConfidence = 50.f; + float mObjDetMaxRange = 10.f; + double mObjDetPredTimeout = 0.5; + std::vector mObjDetFilter; + bool mObjDetPeopleEnable = true; + bool mObjDetVehiclesEnable = true; + bool mObjDetBagsEnable = true; + bool mObjDetAnimalsEnable = true; + bool mObjDetElectronicsEnable = true; + bool mObjDetFruitsEnable = true; + bool mObjDetSportEnable = true; + + sl::OBJECT_DETECTION_MODEL mObjDetModel = sl::OBJECT_DETECTION_MODEL::MULTI_CLASS_BOX_MEDIUM; + sl::OBJECT_FILTERING_MODE mObjFilterMode = sl::OBJECT_FILTERING_MODE::NMS3D; + + ros::Publisher mPubObjDet; +}; // class ZEDROSWrapperNodelet +} // namespace zed_nodelets #include PLUGINLIB_EXPORT_CLASS(zed_nodelets::ZEDWrapperNodelet, nodelet::Nodelet); - -#endif // ZED_WRAPPER_NODELET_H +#endif // ZED_WRAPPER_NODELET_H diff --git a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp index b2720233..7150918c 100644 --- a/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp +++ b/zed_nodelets/src/zed_nodelet/src/zed_wrapper_nodelet.cpp @@ -1,6 +1,6 @@ /////////////////////////////////////////////////////////////////////////// // -// Copyright (c) 2020, STEREOLABS. +// Copyright (c) 2023, STEREOLABS. // // All rights reserved. // @@ -18,3486 +18,4150 @@ // /////////////////////////////////////////////////////////////////////////// +#include +#include +#include + #include "zed_wrapper_nodelet.hpp" -#include -#include +#include "zed_wrapper_nodelet.hpp" #ifndef NDEBUG #include #endif -#include "zed_interfaces/ObjectStamped.h" -#include "zed_interfaces/Objects.h" +#include +#include +#include +#include -#include "visualization_msgs/Marker.h" -#include "visualization_msgs/MarkerArray.h" - -namespace zed_nodelets { +//#define DEBUG_SENS_TS 1 +namespace zed_nodelets +{ #ifndef DEG2RAD #define DEG2RAD 0.017453293 #define RAD2DEG 57.295777937 #endif -#define MAG_FREQ 50. -#define BARO_FREQ 25. - -ZEDWrapperNodelet::ZEDWrapperNodelet() : Nodelet() {} +#define MAG_FREQ 50. +#define BARO_FREQ 25. -ZEDWrapperNodelet::~ZEDWrapperNodelet() { - if (mDevicePollThread.joinable()) { - mDevicePollThread.join(); - } - - if (mPcThread.joinable()) { - mPcThread.join(); - } +ZEDWrapperNodelet::ZEDWrapperNodelet() : Nodelet() +{ } -void ZEDWrapperNodelet::onInit() { - - // Node handlers - mNh = getMTNodeHandle(); - mNhNs = getMTPrivateNodeHandle(); - - mStopNode = false; - mPcDataReady = false; - -#ifndef NDEBUG - - if (ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, - ros::console::levels::Debug)) { - ros::console::notifyLoggerLevelsChanged(); - } - -#endif - - NODELET_INFO( "********** Starting nodelet '%s' **********",getName().c_str() ); - - std::string ver = sl_tools::getSDKVersion(mVerMajor, mVerMinor, mVerSubMinor); - NODELET_INFO_STREAM("SDK version : " << ver); - - if( mVerMajor < 3 ) { - NODELET_ERROR( "This version of the ZED ROS Wrapper is designed to be used with ZED SDK v3.x"); - ros::shutdown(); - raise(SIGINT); - raise(SIGABRT); - exit(-1); - } - - readParameters(); - - initTransforms(); - - // Set the video topic names - std::string rgbTopicRoot = "rgb"; - std::string rightTopicRoot = "right"; - std::string leftTopicRoot = "left"; - std::string stereoTopicRoot = "stereo"; - std::string img_topic = "/image_rect_color"; - std::string img_raw_topic = "/image_raw_color"; - std::string img_gray_topic = "/image_rect_gray"; - std::string img_raw_gray_topic_ = "/image_raw_gray"; - std::string raw_suffix = "_raw"; - string left_topic = leftTopicRoot + img_topic; - string left_raw_topic = leftTopicRoot + raw_suffix + img_raw_topic; - string right_topic = rightTopicRoot + img_topic; - string right_raw_topic = rightTopicRoot + raw_suffix + img_raw_topic; - string rgb_topic = rgbTopicRoot + img_topic; - string rgb_raw_topic = rgbTopicRoot + raw_suffix + img_raw_topic; - string stereo_topic = stereoTopicRoot + img_topic; - string stereo_raw_topic = stereoTopicRoot + raw_suffix + img_raw_topic; - string left_gray_topic = leftTopicRoot + img_gray_topic; - string left_raw_gray_topic = leftTopicRoot + raw_suffix + img_raw_gray_topic_; - string right_gray_topic = rightTopicRoot + img_gray_topic; - string right_raw_gray_topic = rightTopicRoot + raw_suffix + img_raw_gray_topic_; - string rgb_gray_topic = rgbTopicRoot + img_gray_topic; - string rgb_raw_gray_topic = rgbTopicRoot + raw_suffix + img_raw_gray_topic_; - - // Set the disparity topic name - std::string disparityTopic = "disparity/disparity_image"; - - // Set the depth topic names - string depth_topic_root = "depth"; - - if (mOpenniDepthMode) { - NODELET_INFO_STREAM("Openni depth mode activated"); - depth_topic_root += "/depth_raw_registered"; - } else { - depth_topic_root += "/depth_registered"; - } - - - string pointcloud_topic = "point_cloud/cloud_registered"; - - string pointcloud_fused_topic = "mapping/fused_cloud"; - - string object_det_topic_root = "obj_det"; - string object_det_topic = object_det_topic_root + "/objects"; - string object_det_rviz_topic = object_det_topic_root + "/object_markers"; - - std::string confImgRoot = "confidence"; - string conf_map_topic_name = "confidence_map"; - string conf_map_topic = confImgRoot + "/" + conf_map_topic_name; - - // Set the positional tracking topic names - std::string poseTopic = "pose"; - string pose_cov_topic; - pose_cov_topic = poseTopic + "_with_covariance"; - - std::string odometryTopic = "odom"; - string odom_path_topic = "path_odom"; - string map_path_topic = "path_map"; - - // Create camera info - mRgbCamInfoMsg.reset(new sensor_msgs::CameraInfo()); - mLeftCamInfoMsg.reset(new sensor_msgs::CameraInfo()); - mRightCamInfoMsg.reset(new sensor_msgs::CameraInfo()); - mRgbCamInfoRawMsg.reset(new sensor_msgs::CameraInfo()); - mLeftCamInfoRawMsg.reset(new sensor_msgs::CameraInfo()); - mRightCamInfoRawMsg.reset(new sensor_msgs::CameraInfo()); - mDepthCamInfoMsg.reset(new sensor_msgs::CameraInfo()); - - // Initialization transformation listener - mTfBuffer.reset(new tf2_ros::Buffer); - mTfListener.reset(new tf2_ros::TransformListener(*mTfBuffer)); - - // Try to initialize the ZED - if (!mSvoFilepath.empty() || !mRemoteStreamAddr.empty()) { - if (!mSvoFilepath.empty()) { - mZedParams.input.setFromSVOFile(mSvoFilepath.c_str()); - mZedParams.svo_real_time_mode = false; - } else if (!mRemoteStreamAddr.empty()) { - std::vector configStream = sl_tools::split_string(mRemoteStreamAddr, ':'); - sl::String ip = sl::String(configStream.at(0).c_str()); - - if (configStream.size() == 2) { - mZedParams.input.setFromStream(ip, atoi(configStream.at(1).c_str())); - } else { - mZedParams.input.setFromStream(ip); - } - } +ZEDWrapperNodelet::~ZEDWrapperNodelet() +{ + // std::cerr << "Destroying " << getName() << std::endl; - mSvoMode = true; - } else { - mZedParams.camera_fps = mCamFrameRate; - mZedParams.camera_resolution = static_cast(mCamResol); + if (mDevicePollThread.joinable()) + { + mDevicePollThread.join(); + } - if (mZedSerialNumber == 0) { - mZedParams.input.setFromCameraID(mZedId); - } else { - bool waiting_for_camera = true; + if (mPcThread.joinable()) + { + mPcThread.join(); + } - while (waiting_for_camera) { - // Ctrl+C check - if (!mNhNs.ok()) { - mStopNode = true; // Stops other threads + if (mSensThread.joinable()) + { + mSensThread.join(); + } - std::lock_guard lock(mCloseZedMutex); - NODELET_DEBUG("Closing ZED"); - mZed.close(); + if (mZed.isOpened()) + { + mZed.close(); + } - NODELET_DEBUG("ZED pool thread finished"); - return; - } + std::cerr << "ZED " << mZedSerialNumber << " closed." << std::endl; - sl::DeviceProperties prop = sl_tools::getZEDFromSN(mZedSerialNumber); - - if (prop.id < -1 || - prop.camera_state == sl::CAMERA_STATE::NOT_AVAILABLE) { - std::string msg = "ZED SN" + to_string(mZedSerialNumber) + - " not detected ! Please connect this ZED"; - NODELET_INFO_STREAM(msg.c_str()); - std::this_thread::sleep_for(std::chrono::milliseconds(2000)); - } else { - waiting_for_camera = false; - mZedParams.input.setFromCameraID(prop.id); - } - } - } - } + // std::cerr << "ZED Nodelet '" << getName().c_str() << "' correctly stopped" << std::endl; +} - mZedParams.coordinate_system = sl::COORDINATE_SYSTEM::RIGHT_HANDED_Z_UP_X_FWD; - NODELET_INFO_STREAM(" * Camera coordinate system\t-> " << sl::toString(mZedParams.coordinate_system)); +void ZEDWrapperNodelet::onInit() +{ + // Node handlers + mNh = getMTNodeHandle(); - mZedParams.coordinate_units = sl::UNIT::METER; - mZedParams.depth_mode = static_cast(mDepthMode); - mZedParams.sdk_verbose = mVerbose; - mZedParams.sdk_gpu_id = mGpuId; - mZedParams.depth_stabilization = mDepthStabilization; - mZedParams.camera_image_flip = mCameraFlip; - mZedParams.depth_minimum_distance = static_cast(mCamMinDepth); - mZedParams.depth_maximum_distance = static_cast(mCamMaxDepth); - mZedParams.camera_disable_self_calib = !mCameraSelfCalib; + mNhNs = getMTPrivateNodeHandle(); + mStopNode = false; + mPcDataReady = false; - mZedParams.enable_image_enhancement = true; // Always active +#ifndef NDEBUG - mDiagUpdater.add("ZED Diagnostic", this, &ZEDWrapperNodelet::updateDiagnostic); - mDiagUpdater.setHardwareID("ZED camera"); + if (ros::console::set_logger_level(ROSCONSOLE_DEFAULT_NAME, ros::console::levels::Debug)) + { + ros::console::notifyLoggerLevelsChanged(); + } - mConnStatus = sl::ERROR_CODE::CAMERA_NOT_DETECTED; +#endif - NODELET_INFO_STREAM(" *** Opening " << sl::toString( mZedUserCamModel) << "..."); - while (mConnStatus != sl::ERROR_CODE::SUCCESS) { - mConnStatus = mZed.open(mZedParams); - NODELET_INFO_STREAM("ZED connection -> " << sl::toString(mConnStatus)); - std::this_thread::sleep_for(std::chrono::milliseconds(2000)); + NODELET_INFO("********** Starting nodelet '%s' **********", getName().c_str()); + + std::string ver = sl_tools::getSDKVersion(mVerMajor, mVerMinor, mVerSubMinor); + NODELET_INFO_STREAM("SDK version: " << ver); + + if (mVerMajor < 3) + { + NODELET_ERROR("This version of the ZED ROS Wrapper is designed to be used with ZED SDK v3.x"); + ros::shutdown(); + raise(SIGINT); + raise(SIGABRT); + exit(-1); + } + + readParameters(); + + initTransforms(); + + // Set the video topic names + std::string rgbTopicRoot = "rgb"; + std::string rightTopicRoot = "right"; + std::string leftTopicRoot = "left"; + std::string stereoTopicRoot = "stereo"; + std::string img_topic = "/image_rect_color"; + std::string img_raw_topic = "/image_raw_color"; + std::string img_gray_topic = "/image_rect_gray"; + std::string img_raw_gray_topic_ = "/image_raw_gray"; + std::string raw_suffix = "_raw"; + std::string left_topic = leftTopicRoot + img_topic; + std::string left_raw_topic = leftTopicRoot + raw_suffix + img_raw_topic; + std::string right_topic = rightTopicRoot + img_topic; + std::string right_raw_topic = rightTopicRoot + raw_suffix + img_raw_topic; + std::string rgb_topic = rgbTopicRoot + img_topic; + std::string rgb_raw_topic = rgbTopicRoot + raw_suffix + img_raw_topic; + std::string stereo_topic = stereoTopicRoot + img_topic; + std::string stereo_raw_topic = stereoTopicRoot + raw_suffix + img_raw_topic; + std::string left_gray_topic = leftTopicRoot + img_gray_topic; + std::string left_raw_gray_topic = leftTopicRoot + raw_suffix + img_raw_gray_topic_; + std::string right_gray_topic = rightTopicRoot + img_gray_topic; + std::string right_raw_gray_topic = rightTopicRoot + raw_suffix + img_raw_gray_topic_; + std::string rgb_gray_topic = rgbTopicRoot + img_gray_topic; + std::string rgb_raw_gray_topic = rgbTopicRoot + raw_suffix + img_raw_gray_topic_; + + // Set the disparity topic name + std::string disparityTopic = "disparity/disparity_image"; + + // Set the depth topic names + std::string depth_topic_root = "depth"; + + if (mOpenniDepthMode) + { + NODELET_INFO_STREAM("Openni depth mode activated -> Units: mm, Encoding: TYPE_16UC1"); + } + depth_topic_root += "/depth_registered"; + + std::string pointcloud_topic = "point_cloud/cloud_registered"; + + std::string pointcloud_fused_topic = "mapping/fused_cloud"; + + std::string object_det_topic_root = "obj_det"; + std::string object_det_topic = object_det_topic_root + "/objects"; + + std::string confImgRoot = "confidence"; + std::string conf_map_topic_name = "confidence_map"; + std::string conf_map_topic = confImgRoot + "/" + conf_map_topic_name; + + // Set the positional tracking topic names + std::string poseTopic = "pose"; + std::string pose_cov_topic; + pose_cov_topic = poseTopic + "_with_covariance"; + + std::string odometryTopic = "odom"; + std::string odom_path_topic = "path_odom"; + std::string map_path_topic = "path_map"; + + std::string odomStatusTopic = odometryTopic + "/status"; + std::string poseStatusTopic = poseTopic + "/status"; + + // Extracted plane topics + std::string marker_topic = "plane_marker"; + std::string plane_topic = "plane"; + + // Create camera info + mRgbCamInfoMsg.reset(new sensor_msgs::CameraInfo()); + mLeftCamInfoMsg.reset(new sensor_msgs::CameraInfo()); + mRightCamInfoMsg.reset(new sensor_msgs::CameraInfo()); + mRgbCamInfoRawMsg.reset(new sensor_msgs::CameraInfo()); + mLeftCamInfoRawMsg.reset(new sensor_msgs::CameraInfo()); + mRightCamInfoRawMsg.reset(new sensor_msgs::CameraInfo()); + mDepthCamInfoMsg.reset(new sensor_msgs::CameraInfo()); + + // Initialization transformation listener + mTfBuffer.reset(new tf2_ros::Buffer); + mTfListener.reset(new tf2_ros::TransformListener(*mTfBuffer)); + + mZedParams.coordinate_system = sl::COORDINATE_SYSTEM::RIGHT_HANDED_Z_UP_X_FWD; + NODELET_INFO_STREAM(" * Camera coordinate system\t-> " << sl::toString(mZedParams.coordinate_system)); + + // Try to initialize the ZED + std::stringstream ss; // Input mode info for logging + std::string cam_id; + if (!mSvoFilepath.empty() || !mRemoteStreamAddr.empty()) + { + if (!mSvoFilepath.empty()) + { + mZedParams.input.setFromSVOFile(mSvoFilepath.c_str()); + mZedParams.svo_real_time_mode = mSvoRealtime; + + // Input mode info for logging + ss << "SVO - " << mSvoFilepath.c_str(); + cam_id = ss.str(); + } + else if (!mRemoteStreamAddr.empty()) + { + std::vector configStream = sl_tools::split_string(mRemoteStreamAddr, ':'); + sl::String ip = sl::String(configStream.at(0).c_str()); + + if (configStream.size() == 2) + { + mZedParams.input.setFromStream(ip, atoi(configStream.at(1).c_str())); + } + else + { + mZedParams.input.setFromStream(ip); + } + + // Input mode info for logging + ss << "NETWORK STREAM - " << mRemoteStreamAddr.c_str(); + cam_id = ss.str(); + } + + mSvoMode = true; + } + else + { + mZedParams.camera_fps = mCamFrameRate; + mZedParams.grab_compute_capping_fps = static_cast(mPubFrameRate); + mZedParams.camera_resolution = mCamResol; + + if (mZedSerialNumber == 0) + { + mZedParams.input.setFromCameraID(mZedId); + + // Input mode info for logging + ss << "LIVE CAMERA with ID " << mZedId; + cam_id = ss.str(); + } + else + { + mZedParams.input.setFromSerialNumber(mZedSerialNumber); + + // Input mode info for logging + ss << "LIVE CAMERA with SN " << mZedSerialNumber; + cam_id = ss.str(); + } + } + + mZedParams.async_grab_camera_recovery = true; + mZedParams.coordinate_units = sl::UNIT::METER; + mZedParams.depth_mode = static_cast(mDepthMode); + mZedParams.sdk_verbose = mSdkVerbose; + mZedParams.sdk_gpu_id = mGpuId; + mZedParams.depth_stabilization = mDepthStabilization; + mZedParams.camera_image_flip = mCameraFlip; + mZedParams.depth_minimum_distance = static_cast(mCamMinDepth); + mZedParams.depth_maximum_distance = static_cast(mCamMaxDepth); + mZedParams.camera_disable_self_calib = !mCameraSelfCalib; + mZedParams.open_timeout_sec = 10.0f; + + mZedParams.enable_image_enhancement = true; // Always active + + mDiagUpdater.add("ZED Diagnostic", this, &ZEDWrapperNodelet::callback_updateDiagnostic); + mDiagUpdater.setHardwareID("ZED camera"); + + mConnStatus = sl::ERROR_CODE::CAMERA_NOT_DETECTED; + + NODELET_INFO_STREAM(" *** Opening " << sl::toString(mZedUserCamModel) << " - " << ss.str().c_str() << " ***"); + while (mConnStatus != sl::ERROR_CODE::SUCCESS) + { + mConnStatus = mZed.open(mZedParams); + NODELET_INFO_STREAM("ZED connection [" << cam_id.c_str() << "]: " << sl::toString(mConnStatus)); + std::this_thread::sleep_for(std::chrono::milliseconds(2000)); + + if (!mNhNs.ok()) + { + mStopNode = true; + + std::lock_guard lock(mCloseZedMutex); + NODELET_INFO_STREAM("Closing ZED " << mZedSerialNumber << "..."); + if (mRecording) + { + mRecording = false; + mZed.disableRecording(); + } + if (mZed.isOpened()) + { + mZed.close(); + } + NODELET_INFO_STREAM("... ZED " << mZedSerialNumber << " closed."); + + NODELET_DEBUG("ZED pool thread finished"); + return; + } + + mDiagUpdater.update(); + } + NODELET_INFO_STREAM(" ... " << sl::toString(mZedRealCamModel) << " ready"); + + // CUdevice devid; + cuCtxGetDevice(&mGpuId); + + NODELET_INFO_STREAM("ZED SDK running on GPU #" << mGpuId); + + // Disable AEC_AGC and Auto Whitebalance to trigger it if use set to automatic + mZed.setCameraSettings(sl::VIDEO_SETTINGS::AEC_AGC, 0); + mZed.setCameraSettings(sl::VIDEO_SETTINGS::WHITEBALANCE_AUTO, 0); + + mZedRealCamModel = mZed.getCameraInformation().camera_model; + + if (mZedRealCamModel == sl::MODEL::ZED) + { + if (mZedUserCamModel != mZedRealCamModel) + { + NODELET_WARN( + "Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 'zed'"); + } + } + else if (mZedRealCamModel == sl::MODEL::ZED_M) + { + if (mZedUserCamModel != mZedRealCamModel) + { + NODELET_WARN( + "Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 'zedm'"); + } + mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; + NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); + } + else if (mZedRealCamModel == sl::MODEL::ZED2) + { + if (mZedUserCamModel != mZedRealCamModel) + { + NODELET_WARN( + "Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 'zed2'"); + } + mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; + NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); + } + else if (mZedRealCamModel == sl::MODEL::ZED2i) + { + if (mZedUserCamModel != mZedRealCamModel) + { + NODELET_WARN( + "Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 'zed2i'"); + } + mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; + NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); + } + else if (mZedRealCamModel == sl::MODEL::ZED_X) + { + if (mZedUserCamModel != mZedRealCamModel) + { + NODELET_WARN( + "Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 'zedx'"); + } + mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; + NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); + } + else if (mZedRealCamModel == sl::MODEL::ZED_XM) + { + if (mZedUserCamModel != mZedRealCamModel) + { + NODELET_WARN( + "Camera model does not match user parameter. Please modify " + "the value of the parameter 'camera_model' to 'zedxm'"); + } + mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; + NODELET_INFO("Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str()); + } + + NODELET_INFO_STREAM(" * CAMERA MODEL\t-> " << sl::toString(mZedRealCamModel).c_str()); + mZedSerialNumber = mZed.getCameraInformation().serial_number; + NODELET_INFO_STREAM(" * Serial Number: " << mZedSerialNumber); + + if (!mSvoMode) + { + mCamFwVersion = mZed.getCameraInformation().camera_configuration.firmware_version; + NODELET_INFO_STREAM(" * Camera FW Version: " << mCamFwVersion); + if (mZedRealCamModel != sl::MODEL::ZED) + { + mSensFwVersion = mZed.getCameraInformation().sensors_configuration.firmware_version; + NODELET_INFO_STREAM(" * Sensors FW Version: " << mSensFwVersion); + } + } + + // Set the IMU topic names using real camera model + std::string imu_topic; + std::string imu_topic_raw; + std::string imu_temp_topic; + std::string imu_mag_topic; + // std::string imu_mag_topic_raw; + std::string pressure_topic; + std::string temp_topic_root = "temperature"; + std::string temp_topic_left = temp_topic_root + "/left"; + std::string temp_topic_right = temp_topic_root + "/right"; + + if (mZedRealCamModel != sl::MODEL::ZED) + { + std::string imuTopicRoot = "imu"; + std::string imu_topic_name = "data"; + std::string imu_topic_raw_name = "data_raw"; + std::string imu_topic_mag_name = "mag"; + // std::string imu_topic_mag_raw_name = "mag_raw"; + std::string pressure_topic_name = "atm_press"; + imu_topic = imuTopicRoot + "/" + imu_topic_name; + imu_topic_raw = imuTopicRoot + "/" + imu_topic_raw_name; + imu_temp_topic = temp_topic_root + "/" + imuTopicRoot; + imu_mag_topic = imuTopicRoot + "/" + imu_topic_mag_name; + // imu_mag_topic_raw = imuTopicRoot + "/" + imu_topic_mag_raw_name; + pressure_topic = /*imuTopicRoot + "/" +*/ pressure_topic_name; + } + + mDiagUpdater.setHardwareIDf("%s - s/n: %d [GPU #%d]", sl::toString(mZedRealCamModel).c_str(), mZedSerialNumber, + mGpuId); + + // ----> Dynamic Reconfigure parameters + mDynRecServer = boost::make_shared>(mDynServerMutex); + dynamic_reconfigure::Server::CallbackType f; + f = boost::bind(&ZEDWrapperNodelet::callback_dynamicReconf, this, _1, _2); + mDynRecServer->setCallback(f); + // Update parameters + zed_nodelets::ZedConfig config; + mDynRecServer->getConfigDefault(config); + mDynServerMutex.lock(); + mDynRecServer->updateConfig(config); + mDynServerMutex.unlock(); + + updateDynamicReconfigure(); + // <---- Dynamic Reconfigure parameters + + // ----> Publishers + NODELET_INFO("*** PUBLISHERS ***"); + + // Image publishers + image_transport::ImageTransport it_zed(mNhNs); + + mPubRgb = it_zed.advertiseCamera(rgb_topic, 1); // rgb + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgb.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgb.getInfoTopic()); + mPubRawRgb = it_zed.advertiseCamera(rgb_raw_topic, 1); // rgb raw + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgb.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgb.getInfoTopic()); + mPubLeft = it_zed.advertiseCamera(left_topic, 1); // left + NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeft.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeft.getInfoTopic()); + mPubRawLeft = it_zed.advertiseCamera(left_raw_topic, 1); // left raw + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeft.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeft.getInfoTopic()); + mPubRight = it_zed.advertiseCamera(right_topic, 1); // right + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRight.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRight.getInfoTopic()); + mPubRawRight = it_zed.advertiseCamera(right_raw_topic, 1); // right raw + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRight.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRight.getInfoTopic()); + + mPubRgbGray = it_zed.advertiseCamera(rgb_gray_topic, 1); // rgb + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgbGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRgbGray.getInfoTopic()); + mPubRawRgbGray = it_zed.advertiseCamera(rgb_raw_gray_topic, 1); // rgb raw + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgbGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRgbGray.getInfoTopic()); + mPubLeftGray = it_zed.advertiseCamera(left_gray_topic, 1); // left + NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeftGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubLeftGray.getInfoTopic()); + mPubRawLeftGray = it_zed.advertiseCamera(left_raw_gray_topic, 1); // left raw + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeftGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawLeftGray.getInfoTopic()); + mPubRightGray = it_zed.advertiseCamera(right_gray_topic, 1); // right + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRightGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRightGray.getInfoTopic()); + mPubRawRightGray = it_zed.advertiseCamera(right_raw_gray_topic, 1); // right raw + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRightGray.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawRightGray.getInfoTopic()); + + mPubStereo = it_zed.advertise(stereo_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubStereo.getTopic()); + mPubRawStereo = it_zed.advertise(stereo_raw_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubRawStereo.getTopic()); + + // Detected planes publisher + mPubPlane = mNhNs.advertise(plane_topic, 1); + + if (!mDepthDisabled) + { + mPubDepth = it_zed.advertiseCamera(depth_topic_root, 1); // depth + NODELET_INFO_STREAM(" * Advertised on topic " << mPubDepth.getTopic()); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubDepth.getInfoTopic()); - if (!mNhNs.ok()) { - mStopNode = true; // Stops other threads + // Confidence Map publisher + mPubConfMap = mNhNs.advertise(conf_map_topic, 1); // confidence map + NODELET_INFO_STREAM(" * Advertised on topic " << mPubConfMap.getTopic()); - std::lock_guard lock(mCloseZedMutex); - NODELET_DEBUG("Closing ZED"); - mZed.close(); + // Disparity publisher + mPubDisparity = mNhNs.advertise(disparityTopic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubDisparity.getTopic()); - NODELET_DEBUG("ZED pool thread finished"); - return; - } + // PointCloud publishers + mPubCloud = mNhNs.advertise(pointcloud_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubCloud.getTopic()); - mDiagUpdater.update(); + if (mMappingEnabled) + { + mPubFusedCloud = mNhNs.advertise(pointcloud_fused_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); } - NODELET_INFO_STREAM(" ... " << sl::toString( mZedRealCamModel) << " ready"); - - //CUdevice devid; - cuCtxGetDevice(&mGpuId); - - NODELET_INFO_STREAM("ZED SDK running on GPU #" << mGpuId); - - // Disable AEC_AGC and Auto Whitebalance to trigger it if use set to automatic - mZed.setCameraSettings(sl::VIDEO_SETTINGS::AEC_AGC, 0); - mZed.setCameraSettings(sl::VIDEO_SETTINGS::WHITEBALANCE_AUTO, 0); - - mZedRealCamModel = mZed.getCameraInformation().camera_model; - - if (mZedRealCamModel == sl::MODEL::ZED) { - if (mZedUserCamModel != mZedRealCamModel) { - NODELET_WARN("Camera model does not match user parameter. Please modify " - "the value of the parameter 'camera_model' to 'zed'"); - } - } else if (mZedRealCamModel == sl::MODEL::ZED_M) { - if (mZedUserCamModel != mZedRealCamModel) { - NODELET_WARN("Camera model does not match user parameter. Please modify " - "the value of the parameter 'camera_model' to 'zedm'"); - } -#if ZED_SDK_MAJOR_VERSION==3 && ZED_SDK_MINOR_VERSION<1 - mSlCamImuTransf = mZed.getCameraInformation().camera_imu_transform; -#else - mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; -#endif - NODELET_INFO( "Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str() ); - } else if (mZedRealCamModel == sl::MODEL::ZED2) { - if (mZedUserCamModel != mZedRealCamModel) { - NODELET_WARN("Camera model does not match user parameter. Please modify " - "the value of the parameter 'camera_model' to 'zed2'"); - } - -#if ZED_SDK_MAJOR_VERSION==3 && ZED_SDK_MINOR_VERSION<1 - mSlCamImuTransf = mZed.getCameraInformation().camera_imu_transform; -#else - mSlCamImuTransf = mZed.getCameraInformation().sensors_configuration.camera_imu_transform; -#endif - - NODELET_INFO( "Camera-IMU Transform: \n %s", mSlCamImuTransf.getInfos().c_str() ); + // Object detection publishers + if (mObjDetEnabled) + { + mPubObjDet = mNhNs.advertise(object_det_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubObjDet.getTopic()); } - NODELET_INFO_STREAM(" * CAMERA MODEL\t -> " << sl::toString(mZedRealCamModel).c_str()); - mZedSerialNumber = mZed.getCameraInformation().serial_number; - NODELET_INFO_STREAM(" * Serial Number -> " << mZedSerialNumber); + // Odometry and Pose publisher + mPubPose = mNhNs.advertise(poseTopic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubPose.getTopic()); + mPubPoseCov = mNhNs.advertise(pose_cov_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubPoseCov.getTopic()); - if (!mSvoMode) { -#if ZED_SDK_MAJOR_VERSION==3 && ZED_SDK_MINOR_VERSION<1 - mCamFwVersion = mZed.getCameraInformation().camera_firmware_version; -#else - mCamFwVersion = mZed.getCameraInformation().camera_configuration.firmware_version; -#endif + mPubOdom = mNhNs.advertise(odometryTopic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubOdom.getTopic()); - NODELET_INFO_STREAM(" * Camera FW Version -> " << mCamFwVersion); - if(mZedRealCamModel!=sl::MODEL::ZED) { -#if ZED_SDK_MAJOR_VERSION==3 && ZED_SDK_MINOR_VERSION<1 - mSensFwVersion = mZed.getCameraInformation().sensors_firmware_version; -#else - mSensFwVersion = mZed.getCameraInformation().sensors_configuration.firmware_version; -#endif - NODELET_INFO_STREAM(" * Sensors FW Version -> " << mSensFwVersion); - } - } else { - NODELET_INFO_STREAM(" * Input type\t -> " << sl::toString(mZed.getCameraInformation().input_type).c_str()); - } - - // Set the IMU topic names using real camera model - string imu_topic; - string imu_topic_raw; - string imu_temp_topic; - string imu_mag_topic; - //string imu_mag_topic_raw; - string pressure_topic; - string temp_topic_root = "temperature"; - string temp_topic_left = temp_topic_root + "/left"; - string temp_topic_right = temp_topic_root + "/right"; - - if (mZedRealCamModel != sl::MODEL::ZED) { - std::string imuTopicRoot = "imu"; - string imu_topic_name = "data"; - string imu_topic_raw_name = "data_raw"; - string imu_topic_mag_name = "mag"; - //string imu_topic_mag_raw_name = "mag_raw"; - string pressure_topic_name = "atm_press"; - imu_topic = imuTopicRoot + "/" + imu_topic_name; - imu_topic_raw = imuTopicRoot + "/" + imu_topic_raw_name; - imu_temp_topic = temp_topic_root + "/" + imuTopicRoot; - imu_mag_topic = imuTopicRoot + "/" + imu_topic_mag_name; - //imu_mag_topic_raw = imuTopicRoot + "/" + imu_topic_mag_raw_name; - pressure_topic = /*imuTopicRoot + "/" +*/ pressure_topic_name; - } - - mDiagUpdater.setHardwareIDf("%s - s/n: %d [GPU #%d]", sl::toString(mZedRealCamModel).c_str(), mZedSerialNumber, mGpuId); - - // ----> Dynamic Reconfigure parameters - mDynRecServer = boost::make_shared>(mDynServerMutex); - dynamic_reconfigure::Server::CallbackType f; - f = boost::bind(&ZEDWrapperNodelet::dynamicReconfCallback, this, _1, _2); - mDynRecServer->setCallback(f); - // Update parameters - zed_nodelets::ZedConfig config; - mDynRecServer->getConfigDefault(config); - mDynServerMutex.lock(); - mDynRecServer->updateConfig(config); - mDynServerMutex.unlock(); + mPubOdomStatus = mNhNs.advertise(odomStatusTopic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubOdomStatus.getTopic()); + mPubPoseStatus = mNhNs.advertise(poseStatusTopic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubPoseStatus.getTopic()); - updateDynamicReconfigure(); - // <---- Dynamic Reconfigure parameters - - // Create all the publishers - // Image publishers - image_transport::ImageTransport it_zed(mNhNs); - - mPubRgb = it_zed.advertiseCamera(rgb_topic, 1); // rgb - NODELET_INFO_STREAM("Advertised on topic " << mPubRgb.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRgb.getInfoTopic()); - mPubRawRgb = it_zed.advertiseCamera(rgb_raw_topic, 1); // rgb raw - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRgb.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRgb.getInfoTopic()); - mPubLeft = it_zed.advertiseCamera(left_topic, 1); // left - NODELET_INFO_STREAM("Advertised on topic " << mPubLeft.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubLeft.getInfoTopic()); - mPubRawLeft = it_zed.advertiseCamera(left_raw_topic, 1); // left raw - NODELET_INFO_STREAM("Advertised on topic " << mPubRawLeft.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawLeft.getInfoTopic()); - mPubRight = it_zed.advertiseCamera(right_topic, 1); // right - NODELET_INFO_STREAM("Advertised on topic " << mPubRight.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRight.getInfoTopic()); - mPubRawRight = it_zed.advertiseCamera(right_raw_topic, 1); // right raw - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRight.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRight.getInfoTopic()); - - mPubRgbGray = it_zed.advertiseCamera(rgb_gray_topic, 1); // rgb - NODELET_INFO_STREAM("Advertised on topic " << mPubRgbGray.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRgbGray.getInfoTopic()); - mPubRawRgbGray = it_zed.advertiseCamera(rgb_raw_gray_topic, 1); // rgb raw - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRgbGray.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRgbGray.getInfoTopic()); - mPubLeftGray = it_zed.advertiseCamera(left_gray_topic, 1); // left - NODELET_INFO_STREAM("Advertised on topic " << mPubLeftGray.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubLeftGray.getInfoTopic()); - mPubRawLeftGray = it_zed.advertiseCamera(left_raw_gray_topic, 1); // left raw - NODELET_INFO_STREAM("Advertised on topic " << mPubRawLeftGray.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawLeftGray.getInfoTopic()); - mPubRightGray = it_zed.advertiseCamera(right_gray_topic, 1); // right - NODELET_INFO_STREAM("Advertised on topic " << mPubRightGray.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRightGray.getInfoTopic()); - mPubRawRightGray = it_zed.advertiseCamera(right_raw_gray_topic, 1); // right raw - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRightGray.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawRightGray.getInfoTopic()); - - mPubDepth = it_zed.advertiseCamera(depth_topic_root, 1); // depth - NODELET_INFO_STREAM("Advertised on topic " << mPubDepth.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubDepth.getInfoTopic()); - - mPubStereo = it_zed.advertise(stereo_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubStereo.getTopic()); - mPubRawStereo = it_zed.advertise(stereo_raw_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubRawStereo.getTopic()); + // Rviz markers publisher + mPubMarker = mNhNs.advertise(marker_topic, 10, true); - // Confidence Map publisher - mPubConfMap = mNhNs.advertise(conf_map_topic, 1); // confidence map - NODELET_INFO_STREAM("Advertised on topic " << mPubConfMap.getTopic()); + // Camera Path + if (mPathPubRate > 0) + { + mPubOdomPath = mNhNs.advertise(odom_path_topic, 1, true); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubOdomPath.getTopic()); + mPubMapPath = mNhNs.advertise(map_path_topic, 1, true); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubMapPath.getTopic()); - // Disparity publisher - mPubDisparity = mNhNs.advertise(disparityTopic, static_cast(mVideoDepthFreq)); - NODELET_INFO_STREAM("Advertised on topic " << mPubDisparity.getTopic()); + mPathTimer = mNhNs.createTimer(ros::Duration(1.0 / mPathPubRate), &ZEDWrapperNodelet::callback_pubPath, this); - // PointCloud publishers - mPubCloud = mNhNs.advertise(pointcloud_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubCloud.getTopic()); + if (mPathMaxCount != -1) + { + NODELET_DEBUG_STREAM("Path vectors reserved " << mPathMaxCount << " poses."); + mOdomPath.reserve(mPathMaxCount); + mMapPath.reserve(mPathMaxCount); - if (mMappingEnabled) { - mPubFusedCloud = mNhNs.advertise(pointcloud_fused_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); + NODELET_DEBUG_STREAM("Path vector sizes: " << mOdomPath.size() << " " << mMapPath.size()); + } } + else + { + NODELET_INFO_STREAM("Path topics not published -> mPathPubRate: " << mPathPubRate); + } + } - // Object detection publishers - if (mObjDetEnabled) { - mPubObjDet = mNhNs.advertise(object_det_topic, 1); + // Sensor publishers + if (!sl_tools::isZED(mZedRealCamModel)) + { + // IMU Publishers + mPubImu = mNhNs.advertise(imu_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubImu.getTopic()); + mPubImuRaw = mNhNs.advertise(imu_topic_raw, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubImuRaw.getTopic()); - NODELET_INFO_STREAM("Advertised on topic " << mPubObjDet.getTopic()); - mPubObjDetViz = mNhNs.advertise(object_det_rviz_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubObjDetViz.getTopic()); + if (mZedRealCamModel != sl::MODEL::ZED_M) + { + // IMU temperature sensor + mPubImuTemp = mNhNs.advertise(imu_temp_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubImuTemp.getTopic()); } - // Odometry and Pose publisher - mPubPose = mNhNs.advertise(poseTopic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubPose.getTopic()); - - mPubPoseCov = mNhNs.advertise(pose_cov_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubPoseCov.getTopic()); + if (mZedRealCamModel == sl::MODEL::ZED2 || mZedRealCamModel == sl::MODEL::ZED2i) + { + mPubImuMag = mNhNs.advertise(imu_mag_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubImuMag.getTopic()); + // Atmospheric pressure + mPubPressure = mNhNs.advertise(pressure_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubPressure.getTopic()); - mPubOdom = mNhNs.advertise(odometryTopic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubOdom.getTopic()); + // CMOS sensor temperatures + mPubTempL = mNhNs.advertise(temp_topic_left, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubTempL.getTopic()); + mPubTempR = mNhNs.advertise(temp_topic_right, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubTempR.getTopic()); + } - // Camera Path - if (mPathPubRate > 0) { - mPubOdomPath = mNhNs.advertise(odom_path_topic, 1, true); - NODELET_INFO_STREAM("Advertised on topic " << mPubOdomPath.getTopic()); - mPubMapPath = mNhNs.advertise(map_path_topic, 1, true); - NODELET_INFO_STREAM("Advertised on topic " << mPubMapPath.getTopic()); + // Publish camera imu transform in a latched topic + if (mZedRealCamModel != sl::MODEL::ZED) + { + std::string cam_imu_tr_topic = "left_cam_imu_transform"; + mPubCamImuTransf = mNhNs.advertise(cam_imu_tr_topic, 1, true); - mPathTimer = mNhNs.createTimer(ros::Duration(1.0 / mPathPubRate), - &ZEDWrapperNodelet::pubPathCallback, this); + sl::Orientation sl_rot = mSlCamImuTransf.getOrientation(); + sl::Translation sl_tr = mSlCamImuTransf.getTranslation(); - if (mPathMaxCount != -1) { - NODELET_DEBUG_STREAM("Path vectors reserved " << mPathMaxCount << " poses."); - mOdomPath.reserve(mPathMaxCount); - mMapPath.reserve(mPathMaxCount); + mCameraImuTransfMgs = boost::make_shared(); - NODELET_DEBUG_STREAM("Path vector sizes: " << mOdomPath.size() << " " << mMapPath.size()); - } - } else { - NODELET_INFO_STREAM("Path topics not published -> mPathPubRate: " << mPathPubRate); - } - - // Sensor publishers - /*if (!mSvoMode)*/ { - if (mSensPubRate > 0 && mZedRealCamModel != sl::MODEL::ZED) { - // IMU Publishers - mPubImu = mNhNs.advertise(imu_topic, 1/*static_cast(mSensPubRate)*/); - NODELET_INFO_STREAM("Advertised on topic " << mPubImu.getTopic() << " @ " - << mSensPubRate << " Hz"); - mPubImuRaw = mNhNs.advertise(imu_topic_raw, 1/*static_cast(mSensPubRate)*/); - NODELET_INFO_STREAM("Advertised on topic " << mPubImuRaw.getTopic() << " @ " - << mSensPubRate << " Hz"); - mPubImuMag = mNhNs.advertise(imu_mag_topic, 1/*MAG_FREQ*/); - NODELET_INFO_STREAM("Advertised on topic " << mPubImuMag.getTopic() << " @ " - << std::min(MAG_FREQ,mSensPubRate) << " Hz"); - //mPubImuMagRaw = mNhNs.advertise(imu_mag_topic_raw, static_cast(MAG_FREQ)); - //NODELET_INFO_STREAM("Advertised on topic " << mPubImuMagRaw.getTopic() << " @ " - // << std::min(MAG_FREQ,mSensPubRate) << " Hz"); - - if( mZedRealCamModel == sl::MODEL::ZED2 ) { - // IMU temperature sensor - mPubImuTemp = mNhNs.advertise(imu_temp_topic, 1/*static_cast(mSensPubRate)*/); - NODELET_INFO_STREAM("Advertised on topic " << mPubImuTemp.getTopic() << " @ " << mSensPubRate << " Hz"); - - // Atmospheric pressure - mPubPressure = mNhNs.advertise(pressure_topic, 1/*static_cast(BARO_FREQ)*/); - NODELET_INFO_STREAM("Advertised on topic " << mPubPressure.getTopic() << " @ " - << std::min(BARO_FREQ,mSensPubRate ) << " Hz"); - - // CMOS sensor temperatures - mPubTempL = mNhNs.advertise(temp_topic_left, 1/*static_cast(BARO_FREQ)*/); - NODELET_INFO_STREAM("Advertised on topic " << mPubTempL.getTopic() << " @ " - << std::min(BARO_FREQ,mSensPubRate ) << " Hz"); - mPubTempR = mNhNs.advertise(temp_topic_right, 1/*static_cast(BARO_FREQ)*/); - NODELET_INFO_STREAM("Advertised on topic " << mPubTempR.getTopic() << " @ " - << std::min(BARO_FREQ,mSensPubRate ) << " Hz"); - } + mCameraImuTransfMgs->rotation.x = sl_rot.ox; + mCameraImuTransfMgs->rotation.y = sl_rot.oy; + mCameraImuTransfMgs->rotation.z = sl_rot.oz; + mCameraImuTransfMgs->rotation.w = sl_rot.ow; - mFrameTimestamp = ros::Time::now(); - mImuTimer = mNhNs.createTimer(ros::Duration(1.0 / mSensPubRate), - &ZEDWrapperNodelet::pubSensCallback, this); - mSensPeriodMean_usec.reset(new sl_tools::CSmartMean(mSensPubRate / 2)); + mCameraImuTransfMgs->translation.x = sl_tr.x; + mCameraImuTransfMgs->translation.y = sl_tr.y; + mCameraImuTransfMgs->translation.z = sl_tr.z; + NODELET_DEBUG("Camera-IMU Rotation: \n %s", sl_rot.getRotationMatrix().getInfos().c_str()); + NODELET_DEBUG("Camera-IMU Translation: \n %g %g %g", sl_tr.x, sl_tr.y, sl_tr.z); - } + mPubCamImuTransf.publish(mCameraImuTransfMgs); - // Publish camera imu transform in a latched topic - if (mZedRealCamModel != sl::MODEL::ZED) { - string cam_imu_tr_topic = "left_cam_imu_transform"; - mPubCamImuTransf = mNhNs.advertise( cam_imu_tr_topic, 1, true ); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubCamImuTransf.getTopic() << " [LATCHED]"); + } - sl::Orientation sl_rot = mSlCamImuTransf.getOrientation(); - sl::Translation sl_tr = mSlCamImuTransf.getTranslation(); + if (!mSvoMode && !mSensTimestampSync) + { + mFrameTimestamp = ros::Time::now(); + mSensPeriodMean_usec.reset(new sl_tools::CSmartMean(mSensPubRate / 2)); + } + else + { + mSensPeriodMean_usec.reset(new sl_tools::CSmartMean(mCamFrameRate / 2)); + } + } + // <---- Publishers - mCameraImuTransfMgs = boost::make_shared(); + // ----> Subscribers + NODELET_INFO("*** SUBSCRIBERS ***"); + mClickedPtSub = mNhNs.subscribe(mClickedPtTopic, 10, &ZEDWrapperNodelet::clickedPtCallback, this); + NODELET_INFO_STREAM(" * Subscribed to topic " << mClickedPtTopic.c_str()); + // <---- Subscribers - mCameraImuTransfMgs->rotation.x = sl_rot.ox; - mCameraImuTransfMgs->rotation.y = sl_rot.oy; - mCameraImuTransfMgs->rotation.z = sl_rot.oz; - mCameraImuTransfMgs->rotation.w = sl_rot.ow; + // ----> Services + initServices(); + // <---- Services - mCameraImuTransfMgs->translation.x = sl_tr.x; - mCameraImuTransfMgs->translation.y = sl_tr.y; - mCameraImuTransfMgs->translation.z = sl_tr.z; + // ----> Threads + if (!mDepthDisabled) + { + // Start Pointcloud thread + mPcThread = std::thread(&ZEDWrapperNodelet::pointcloud_thread_func, this); + } - NODELET_DEBUG( "Camera-IMU Rotation: \n %s", sl_rot.getRotationMatrix().getInfos().c_str() ); - NODELET_DEBUG( "Camera-IMU Translation: \n %g %g %g", sl_tr.x, sl_tr.y, sl_tr.z ); + // Start pool thread + mDevicePollThread = std::thread(&ZEDWrapperNodelet::device_poll_thread_func, this); - mPubCamImuTransf.publish( mCameraImuTransfMgs ); + // Start Sensors thread + mSensThread = std::thread(&ZEDWrapperNodelet::sensors_thread_func, this); + // <---- Threads - NODELET_INFO_STREAM("Advertised on topic " << mPubCamImuTransf.getTopic() << " [LATCHED]"); - } - } + NODELET_INFO("+++ ZED Node started +++"); +} - // Services +void ZEDWrapperNodelet::initServices() +{ + NODELET_INFO("*** SERVICES ***"); + if (!mDepthDisabled) + { mSrvSetInitPose = mNhNs.advertiseService("set_pose", &ZEDWrapperNodelet::on_set_pose, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSetInitPose.getService().c_str()); mSrvResetOdometry = mNhNs.advertiseService("reset_odometry", &ZEDWrapperNodelet::on_reset_odometry, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvResetOdometry.getService().c_str()); mSrvResetTracking = mNhNs.advertiseService("reset_tracking", &ZEDWrapperNodelet::on_reset_tracking, this); - mSrvSvoStartRecording = mNhNs.advertiseService("start_svo_recording", &ZEDWrapperNodelet::on_start_svo_recording, this); - mSrvSvoStopRecording = mNhNs.advertiseService("stop_svo_recording", &ZEDWrapperNodelet::on_stop_svo_recording, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvResetTracking.getService().c_str()); - mSrvSetLedStatus = mNhNs.advertiseService("set_led_status", &ZEDWrapperNodelet::on_set_led_status, this); - mSrvToggleLed = mNhNs.advertiseService("toggle_led", &ZEDWrapperNodelet::on_toggle_led, this); - mSrvSvoStartStream = mNhNs.advertiseService("start_remote_stream", &ZEDWrapperNodelet::on_start_remote_stream, this); - mSrvSvoStopStream = mNhNs.advertiseService("stop_remote_stream", &ZEDWrapperNodelet::on_stop_remote_stream, this); + mSrvSaveAreaMemory = mNhNs.advertiseService("save_area_memory", &ZEDWrapperNodelet::on_save_area_memory, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSaveAreaMemory.getService().c_str()); mSrvStartMapping = mNhNs.advertiseService("start_3d_mapping", &ZEDWrapperNodelet::on_start_3d_mapping, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvStartMapping.getService().c_str()); mSrvStopMapping = mNhNs.advertiseService("stop_3d_mapping", &ZEDWrapperNodelet::on_stop_3d_mapping, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvStopMapping.getService().c_str()); + mSrvSave3dMap = mNhNs.advertiseService("save_3d_map", &ZEDWrapperNodelet::on_save_3d_map, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSave3dMap.getService().c_str()); + + mSrvEnableObjDet = + mNhNs.advertiseService("enable_object_detection", &ZEDWrapperNodelet::on_enable_object_detection, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvEnableObjDet.getService().c_str()); + } + + mSrvSvoStartRecording = + mNhNs.advertiseService("start_svo_recording", &ZEDWrapperNodelet::on_start_svo_recording, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStartRecording.getService().c_str()); + mSrvSvoStopRecording = mNhNs.advertiseService("stop_svo_recording", &ZEDWrapperNodelet::on_stop_svo_recording, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStopRecording.getService().c_str()); + + mSrvSetLedStatus = mNhNs.advertiseService("set_led_status", &ZEDWrapperNodelet::on_set_led_status, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSetLedStatus.getService().c_str()); + mSrvToggleLed = mNhNs.advertiseService("toggle_led", &ZEDWrapperNodelet::on_toggle_led, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvToggleLed.getService().c_str()); + + mSrvSvoStartStream = mNhNs.advertiseService("start_remote_stream", &ZEDWrapperNodelet::on_start_remote_stream, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStartStream.getService().c_str()); + mSrvSvoStopStream = mNhNs.advertiseService("stop_remote_stream", &ZEDWrapperNodelet::on_stop_remote_stream, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSvoStopStream.getService().c_str()); + + mSrvSetRoi = mNhNs.advertiseService("set_roi", &ZEDWrapperNodelet::on_set_roi, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSetRoi.getService().c_str()); + mSrvResetRoi = mNhNs.advertiseService("reset_roi", &ZEDWrapperNodelet::on_reset_roi, this); + NODELET_INFO_STREAM(" * Advertised on service " << mSrvSetRoi.getService().c_str()); +} - mSrvStartObjDet = mNhNs.advertiseService("start_object_detection", &ZEDWrapperNodelet::on_start_object_detection, this); - mSrvStopObjDet = mNhNs.advertiseService("stop_object_detection", &ZEDWrapperNodelet::on_stop_object_detection, this); - - // Start Pointcloud thread - mPcThread = std::thread(&ZEDWrapperNodelet::pointcloud_thread_func, this); - - // Start pool thread - mDevicePollThread = std::thread(&ZEDWrapperNodelet::device_poll_thread_func, this); - - // Start data publishing timer - mVideoDepthTimer = mNhNs.createTimer(ros::Duration(1.0 / mVideoDepthFreq), &ZEDWrapperNodelet::pubVideoDepthCallback, - this); +void ZEDWrapperNodelet::readGeneralParams() +{ + NODELET_INFO_STREAM("*** GENERAL PARAMETERS ***"); + + std::string camera_model; + mNhNs.getParam("general/camera_model", camera_model); + + if (camera_model == "zed") + { + mZedUserCamModel = sl::MODEL::ZED; + NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); + } + else if (camera_model == "zedm") + { + mZedUserCamModel = sl::MODEL::ZED_M; + NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); + } + else if (camera_model == "zed2") + { + mZedUserCamModel = sl::MODEL::ZED2; + NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); + } + else if (camera_model == "zed2i") + { + mZedUserCamModel = sl::MODEL::ZED2i; + NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); + } + else if (camera_model == "zedx") + { + mZedUserCamModel = sl::MODEL::ZED_X; + NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); + } + else if (camera_model == "zedxm") + { + mZedUserCamModel = sl::MODEL::ZED_XM; + NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); + } + else + { + NODELET_ERROR_STREAM("Camera model not valid: " << camera_model); + exit(EXIT_FAILURE); + } + + mNhNs.getParam("general/camera_name", mCameraName); + NODELET_INFO_STREAM(" * Camera Name\t\t\t-> " << mCameraName.c_str()); + + std::string resol = "AUTO"; + mNhNs.getParam("general/grab_resolution", resol); + if (resol == "AUTO") + { + mCamResol = sl::RESOLUTION::AUTO; + } + else if (sl_tools::isZEDX(mZedUserCamModel)) + { + if (resol == "HD1200") + { + mCamResol = sl::RESOLUTION::HD1200; + } + else if (resol == "HD1080") + { + mCamResol = sl::RESOLUTION::HD1080; + } + else if (resol == "SVGA") + { + mCamResol = sl::RESOLUTION::SVGA; + } + else + { + NODELET_WARN("Not valid 'general.grab_resolution' value: '%s'. Using 'AUTO' setting.", resol.c_str()); + mCamResol = sl::RESOLUTION::AUTO; + } + } + else + { + if (resol == "HD2K") + { + mCamResol = sl::RESOLUTION::HD2K; + } + else if (resol == "HD1080") + { + mCamResol = sl::RESOLUTION::HD1080; + } + else if (resol == "HD720") + { + mCamResol = sl::RESOLUTION::HD720; + } + else if (resol == "VGA") + { + mCamResol = sl::RESOLUTION::VGA; + } + else + { + NODELET_WARN("Not valid 'general.grab_resolution' value: '%s'. Using 'AUTO' setting.", resol.c_str()); + mCamResol = sl::RESOLUTION::AUTO; + } + } + NODELET_INFO_STREAM(" * Camera resolution\t\t-> " << sl::toString(mCamResol).c_str()); + + mNhNs.getParam("general/grab_frame_rate", mCamFrameRate); + checkResolFps(); + NODELET_INFO_STREAM(" * Camera Grab Framerate\t-> " << mCamFrameRate); + mNhNs.getParam("general/pub_frame_rate", mPubFrameRate); + if (mPubFrameRate > mCamFrameRate) + { + mPubFrameRate = mCamFrameRate; + NODELET_WARN_STREAM( + "'general/pub_frame_rate' cannot be bigger than 'general/grab_frame_rate'. Forced to the correct value: " + << mPubFrameRate); + } + NODELET_INFO_STREAM(" * Grab compute capping FPS\t-> " << mPubFrameRate << " Hz"); + + std::string out_resol = "CUSTOM"; + mNhNs.getParam("general/pub_resolution", out_resol); + if (out_resol == "NATIVE") + { + mPubResolution = PubRes::NATIVE; + } + else if (out_resol == "CUSTOM") + { + mPubResolution = PubRes::CUSTOM; + } + else + { + NODELET_WARN("Not valid 'general/pub_resolution' value: '%s'. Using default setting instead.", out_resol.c_str()); + out_resol = "CUSTOM"; + mPubResolution = PubRes::CUSTOM; + } + NODELET_INFO_STREAM(" * Publishing resolution\t-> " << out_resol.c_str()); + + if (mPubResolution == PubRes::CUSTOM) + { + mNhNs.getParam("general/pub_downscale_factor", mCustomDownscaleFactor); + NODELET_INFO_STREAM(" * Publishing downscale factor\t-> " << mCustomDownscaleFactor); + } + else + { + mCustomDownscaleFactor = 1.0; + } + + mNhNs.getParam("general/gpu_id", mGpuId); + NODELET_INFO_STREAM(" * Gpu ID\t\t\t-> " << mGpuId); + mNhNs.getParam("general/zed_id", mZedId); + NODELET_INFO_STREAM(" * Camera ID\t\t\t-> " << mZedId); + mNhNs.getParam("general/sdk_verbose", mSdkVerbose); + NODELET_INFO_STREAM(" * SDK Verbose level\t\t-> " << mSdkVerbose); + std::string flip_str; + mNhNs.getParam("general/camera_flip", flip_str); + if (flip_str == "ON") + { + mCameraFlip = sl::FLIP_MODE::ON; + } + else if (flip_str == "OFF") + { + mCameraFlip = sl::FLIP_MODE::OFF; + } + else + { + mCameraFlip = sl::FLIP_MODE::AUTO; + } + NODELET_INFO_STREAM(" * Camera Flip\t\t\t-> " << sl::toString(mCameraFlip).c_str()); + mNhNs.getParam("general/self_calib", mCameraSelfCalib); + NODELET_INFO_STREAM(" * Self calibration\t\t-> " << (mCameraSelfCalib ? "ENABLED" : "DISABLED")); + + int tmp_sn = 0; + mNhNs.getParam("general/serial_number", tmp_sn); + + if (tmp_sn > 0) + { + mZedSerialNumber = static_cast(tmp_sn); + NODELET_INFO_STREAM(" * Serial number\t\t-> " << mZedSerialNumber); + } + + std::string parsed_str = getRoiParam("general/region_of_interest", mRoiParam); + NODELET_INFO_STREAM(" * Region of interest\t\t-> " << parsed_str.c_str()); } -void ZEDWrapperNodelet::readParameters() { - - NODELET_INFO_STREAM("*** PARAMETERS ***"); - - // ----> General - // Get parameters from param files - mNhNs.getParam("general/camera_name", mCameraName); - NODELET_INFO_STREAM(" * Camera Name\t\t\t-> " << mCameraName.c_str()); - int resol; - mNhNs.getParam("general/resolution", resol); - mCamResol = static_cast(resol); - NODELET_INFO_STREAM(" * Camera Resolution\t\t-> " << sl::toString(mCamResol).c_str()); - mNhNs.getParam("general/grab_frame_rate", mCamFrameRate); - checkResolFps(); - NODELET_INFO_STREAM(" * Camera Grab Framerate\t-> " << mCamFrameRate); - - mNhNs.getParam("general/gpu_id", mGpuId); - NODELET_INFO_STREAM(" * Gpu ID\t\t\t-> " << mGpuId); - mNhNs.getParam("general/zed_id", mZedId); - NODELET_INFO_STREAM(" * Camera ID\t\t\t-> " << mGpuId); - mNhNs.getParam("general/verbose", mVerbose); - NODELET_INFO_STREAM(" * Verbose\t\t\t-> " << (mVerbose ? "ENABLED" : "DISABLED")); - mNhNs.param("general/camera_flip", mCameraFlip, false); - NODELET_INFO_STREAM(" * Camera Flip\t\t\t-> " << (mCameraFlip ? "ENABLED" : "DISABLED")); - mNhNs.param("general/self_calib", mCameraSelfCalib, true); - NODELET_INFO_STREAM(" * Self calibration\t\t-> " << (mCameraSelfCalib ? "ENABLED" : "DISABLED")); - - int tmp_sn = 0; - mNhNs.getParam("general/serial_number", tmp_sn); - - if (tmp_sn > 0) { - mZedSerialNumber = static_cast(tmp_sn); - NODELET_INFO_STREAM(" * Serial number\t\t-> " << mZedSerialNumber); - } - - - string camera_model; - mNhNs.getParam("general/camera_model", camera_model); - - if (camera_model == "zed") { - mZedUserCamModel = sl::MODEL::ZED; - NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); - } else if (camera_model == "zedm") { - mZedUserCamModel = sl::MODEL::ZED_M; - NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); - } else if (camera_model == "zed2") { - mZedUserCamModel = sl::MODEL::ZED2; - NODELET_INFO_STREAM(" * Camera Model by param\t-> " << camera_model); - } else { - NODELET_ERROR_STREAM("Camera model not valid: " << camera_model); - } - // <---- General - - // ----> Video - mNhNs.getParam("video/img_downsample_factor", mCamImageResizeFactor); - NODELET_INFO_STREAM(" * Image resample factor\t-> " << mCamImageResizeFactor); - - mNhNs.getParam("video/extrinsic_in_camera_frame", mUseOldExtrinsic); - NODELET_INFO_STREAM(" * Extrinsic param. frame\t-> " << (mUseOldExtrinsic?"X RIGHT - Y DOWN - Z FWD":"X FWD - Y LEFT - Z UP")); - // <---- Video - - // -----> Depth - int depth_mode; - mNhNs.getParam("depth/quality", depth_mode); - mDepthMode = static_cast(depth_mode); - NODELET_INFO_STREAM(" * Depth quality\t\t-> " << sl::toString(mDepthMode).c_str()); - int sensing_mode; - mNhNs.getParam("depth/sensing_mode", sensing_mode ); - mCamSensingMode = static_cast(sensing_mode); - NODELET_INFO_STREAM(" * Depth Sensing mode\t\t-> " << sl::toString(mCamSensingMode).c_str()); +void ZEDWrapperNodelet::readDepthParams() +{ + NODELET_INFO_STREAM("*** DEPTH PARAMETERS ***"); + + std::string depth_mode_str = sl::toString(mDepthMode).c_str(); + mNhNs.getParam("depth/depth_mode", depth_mode_str); + + bool matched = false; + for (int mode = static_cast(sl::DEPTH_MODE::NONE); mode < static_cast(sl::DEPTH_MODE::LAST); ++mode) + { + std::string test_str = sl::toString(static_cast(mode)).c_str(); + std::replace(test_str.begin(), test_str.end(), ' ', + '_'); // Replace spaces with underscores to match the YAML setting + if (test_str == depth_mode_str) + { + matched = true; + mDepthMode = static_cast(mode); + break; + } + } + + if (!matched) + { + NODELET_WARN_STREAM( + "The parameter 'depth.depth_mode' contains a not valid string. Please check it in 'common.yaml'."); + NODELET_WARN("Using default DEPTH_MODE."); + mDepthMode = sl::DEPTH_MODE::ULTRA; + } + + if (mDepthMode == sl::DEPTH_MODE::NONE) + { + mDepthDisabled = true; + mDepthStabilization = 0; + NODELET_INFO_STREAM(" * Depth mode\t\t\t-> " << sl::toString(mDepthMode).c_str() << " - DEPTH DISABLED"); + } + else + { + mDepthDisabled = false; + NODELET_INFO_STREAM(" * Depth mode\t\t\t-> " << sl::toString(mDepthMode).c_str() << " [" + << static_cast(mDepthMode) << "]"); + } + + if (!mDepthDisabled) + { mNhNs.getParam("depth/openni_depth_mode", mOpenniDepthMode); NODELET_INFO_STREAM(" * OpenNI mode\t\t\t-> " << (mOpenniDepthMode ? "ENABLED" : "DISABLED")); mNhNs.getParam("depth/depth_stabilization", mDepthStabilization); - NODELET_INFO_STREAM(" * Depth Stabilization\t\t-> " << (mDepthStabilization ? "ENABLED" : "DISABLED")); + NODELET_INFO_STREAM(" * Depth Stabilization\t\t-> " << mDepthStabilization); mNhNs.getParam("depth/min_depth", mCamMinDepth); - NODELET_INFO_STREAM(" * Minimum depth\t\t-> " << mCamMinDepth << " m"); + NODELET_INFO_STREAM(" * Minimum depth\t\t-> " << mCamMinDepth << " m"); mNhNs.getParam("depth/max_depth", mCamMaxDepth); NODELET_INFO_STREAM(" * Maximum depth\t\t-> " << mCamMaxDepth << " m"); - mNhNs.getParam("depth/depth_downsample_factor", mCamDepthResizeFactor); - NODELET_INFO_STREAM(" * Depth resample factor\t-> " << mCamDepthResizeFactor); - // <----- Depth + } +} + +void ZEDWrapperNodelet::readPosTrkParams() +{ + if (!mDepthDisabled) + { + NODELET_INFO_STREAM("*** POSITIONAL TRACKING PARAMETERS ***"); + + mNhNs.getParam("pos_tracking/pos_tracking_enabled", mPosTrackingEnabled); + NODELET_INFO_STREAM(" * Positional tracking\t\t-> " << (mPosTrackingEnabled ? "ENABLED" : "DISABLED")); + + std::string pos_trk_mode; + mNhNs.getParam("pos_tracking/pos_tracking_mode", pos_trk_mode); + if (pos_trk_mode == "GEN_1") + { + mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::GEN_1; + } + else if (pos_trk_mode == "GEN_2") + { + mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::GEN_2; + } + else + { + NODELET_WARN_STREAM("'pos_tracking/pos_tracking_mode' not valid ('" << pos_trk_mode + << "'). Using default value."); + mPosTrkMode = sl::POSITIONAL_TRACKING_MODE::GEN_2; + } + NODELET_INFO_STREAM(" * Positional tracking mode\t-> " << sl::toString(mPosTrkMode).c_str()); + + mNhNs.getParam("pos_tracking/set_gravity_as_origin", mSetGravityAsOrigin); + NODELET_INFO_STREAM(" * Set gravity as origin\t-> " << (mSetGravityAsOrigin ? "ENABLED" : "DISABLED")); - // ----> Tracking mNhNs.getParam("pos_tracking/path_pub_rate", mPathPubRate); - NODELET_INFO_STREAM(" * Path rate\t\t\t-> " << mPathPubRate << " Hz"); + NODELET_INFO_STREAM(" * Path rate\t\t\t-> " << mPathPubRate << " Hz"); mNhNs.getParam("pos_tracking/path_max_count", mPathMaxCount); - NODELET_INFO_STREAM(" * Path history size\t\t-> " << (mPathMaxCount == -1) ? std::string("INFINITE") : std::to_string( - mPathMaxCount)); + NODELET_INFO_STREAM(" * Path history size\t\t-> " << ((mPathMaxCount == -1) ? std::string("INFINITE") : + std::to_string(mPathMaxCount))); - if (mPathMaxCount < 2 && mPathMaxCount != -1) { - mPathMaxCount = 2; + if (mPathMaxCount < 2 && mPathMaxCount != -1) + { + mPathMaxCount = 2; } mNhNs.getParam("pos_tracking/initial_base_pose", mInitialBasePose); mNhNs.getParam("pos_tracking/area_memory_db_path", mAreaMemDbPath); + mAreaMemDbPath = sl_tools::resolveFilePath(mAreaMemDbPath); NODELET_INFO_STREAM(" * Odometry DB path\t\t-> " << mAreaMemDbPath.c_str()); - mNhNs.param("pos_tracking/area_memory", mAreaMemory, false); - NODELET_INFO_STREAM(" * Spatial Memory\t\t-> " << (mAreaMemory ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/imu_fusion", mImuFusion, true); + + mNhNs.getParam("pos_tracking/save_area_memory_db_on_exit", mSaveAreaMapOnClosing); + NODELET_INFO_STREAM(" * Save Area Memory on closing\t-> " << (mSaveAreaMapOnClosing ? "ENABLED" : "DISABLED")); + mNhNs.getParam("pos_tracking/area_memory", mAreaMemory); + NODELET_INFO_STREAM(" * Area Memory\t\t\t-> " << (mAreaMemory ? "ENABLED" : "DISABLED")); + mNhNs.getParam("pos_tracking/imu_fusion", mImuFusion); NODELET_INFO_STREAM(" * IMU Fusion\t\t\t-> " << (mImuFusion ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/floor_alignment", mFloorAlignment, false); + mNhNs.getParam("pos_tracking/floor_alignment", mFloorAlignment); NODELET_INFO_STREAM(" * Floor alignment\t\t-> " << (mFloorAlignment ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/init_odom_with_first_valid_pose", mInitOdomWithPose, true); - NODELET_INFO_STREAM(" * Init Odometry with first valid pose data -> " << (mInitOdomWithPose ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/two_d_mode", mTwoDMode, false); - NODELET_INFO_STREAM(" * Two D mode\t\t\t-> " << (mTwoDMode ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/fixed_z_value", mFixedZValue, 0.0); + mNhNs.getParam("pos_tracking/init_odom_with_first_valid_pose", mInitOdomWithPose); + NODELET_INFO_STREAM(" * Init Odometry with first valid pose data -> " + << (mInitOdomWithPose ? "ENABLED" : "DISABLED")); + mNhNs.getParam("pos_tracking/two_d_mode", mTwoDMode); + NODELET_INFO_STREAM(" * Force 2D mode\t\t-> " << (mTwoDMode ? "ENABLED" : "DISABLED")); + + if (mTwoDMode) + { + mNhNs.getParam("pos_tracking/fixed_z_value", mFixedZValue); + NODELET_INFO_STREAM(" * Fixed Z value\t\t-> " << mFixedZValue); + } + + mNhNs.getParam("pos_tracking/publish_tf", mPublishTF); + NODELET_INFO_STREAM(" * Broadcast odometry TF\t-> " << (mPublishTF ? "ENABLED" : "DISABLED")); + mNhNs.getParam("pos_tracking/publish_map_tf", mPublishMapTF); + NODELET_INFO_STREAM(" * Broadcast map pose TF\t-> " + << (mPublishTF ? (mPublishMapTF ? "ENABLED" : "DISABLED") : "DISABLED")); + + mNhNs.getParam("pos_tracking/set_as_static", mIsStatic); + NODELET_INFO_STREAM(" * Camera is static\t\t-> " << (mIsStatic ? "ENABLED" : "DISABLED")); + mNhNs.getParam("pos_tracking/depth_min_range", mPosTrkMinDepth); + NODELET_INFO_STREAM(" * Depth minimum range\t\t-> " << mPosTrkMinDepth); + } +} - if (mTwoDMode) { - NODELET_INFO_STREAM(" * Fixed Z value\t\t-> " << mFixedZValue); - } - // <---- Tracking +void ZEDWrapperNodelet::readMappingParams() +{ + NODELET_INFO_STREAM("*** MAPPING PARAMETERS ***"); - // ----> Mapping - mNhNs.param("mapping/mapping_enabled", mMappingEnabled, false); + if (!mDepthDisabled) + { + mNhNs.getParam("mapping/mapping_enabled", mMappingEnabled); - if (mMappingEnabled) { - NODELET_INFO_STREAM(" * Mapping\t\t\t-> ENABLED"); + if (mMappingEnabled) + { + NODELET_INFO_STREAM(" * Mapping\t\t\t-> ENABLED"); - mNhNs.getParam("mapping/resolution", mMappingRes); - NODELET_INFO_STREAM(" * Mapping resolution\t\t-> " << mMappingRes << " m" ); + mNhNs.getParam("mapping/resolution", mMappingRes); + NODELET_INFO_STREAM(" * Mapping resolution\t\t-> " << mMappingRes << " m"); - mNhNs.getParam("mapping/max_mapping_range", mMaxMappingRange); - NODELET_INFO_STREAM(" * Mapping max range\t\t-> " << mMaxMappingRange << " m" << ((mMaxMappingRange < 0.0)?" [AUTO]":"")); + mNhNs.getParam("mapping/max_mapping_range", mMaxMappingRange); + NODELET_INFO_STREAM(" * Mapping max range\t\t-> " << mMaxMappingRange << " m" + << ((mMaxMappingRange < 0.0) ? " [AUTO]" : "")); - mNhNs.getParam("mapping/fused_pointcloud_freq", mFusedPcPubFreq); - NODELET_INFO_STREAM(" * Fused point cloud freq:\t-> " << mFusedPcPubFreq << " Hz"); - } else { - NODELET_INFO_STREAM(" * Mapping\t\t\t-> DISABLED"); + mNhNs.getParam("mapping/fused_pointcloud_freq", mFusedPcPubFreq); + NODELET_INFO_STREAM(" * Fused point cloud freq:\t-> " << mFusedPcPubFreq << " Hz"); + } + else + { + NODELET_INFO_STREAM(" * Mapping\t\t\t-> DISABLED"); } - // <---- Mapping + } + mNhNs.getParam("mapping/clicked_point_topic", mClickedPtTopic); + NODELET_INFO_STREAM(" * Clicked point topic\t\t-> " << mClickedPtTopic.c_str()); +} - // ----> Object Detection - mNhNs.param("object_detection/od_enabled", mObjDetEnabled, false); +void ZEDWrapperNodelet::readObjDetParams() +{ + if (!mDepthDisabled) + { + NODELET_INFO_STREAM("*** OBJECT DETECTION PARAMETERS ***"); + + mNhNs.getParam("object_detection/od_enabled", mObjDetEnabled); + NODELET_INFO_STREAM(" * Object Detection\t\t-> " << (mObjDetEnabled ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/confidence_threshold", mObjDetConfidence); + NODELET_INFO_STREAM(" * Object confidence\t\t-> " << mObjDetConfidence); + mNhNs.getParam("object_detection/object_tracking_enabled", mObjDetTracking); + NODELET_INFO_STREAM(" * Object tracking\t\t-> " << (mObjDetTracking ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/max_range", mObjDetMaxRange); + if (mObjDetMaxRange > mCamMaxDepth) + { + NODELET_WARN("Detection max range cannot be major than depth max range. Automatically fixed."); + mObjDetMaxRange = mCamMaxDepth; + } + NODELET_INFO_STREAM(" * Detection max range\t\t-> " << mObjDetMaxRange); + + mNhNs.getParam("object_detection/allow_reduced_precision_inference", mObjDetReducedPrecision); + NODELET_INFO_STREAM(" * Allow reduced precision\t-> " << (mObjDetReducedPrecision ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/prediction_timeout", mObjDetPredTimeout); + NODELET_INFO_STREAM(" * Prediction Timeout\t\t-> " << mObjDetPredTimeout); + + std::string model_str; + mNhNs.getParam("object_detection/model", model_str); + + NODELET_DEBUG_STREAM(" 'object_detection.model': " << model_str.c_str()); + + bool matched = false; + for (int idx = static_cast(sl::OBJECT_DETECTION_MODEL::MULTI_CLASS_BOX_FAST); + idx < static_cast(sl::OBJECT_DETECTION_MODEL::CUSTOM_BOX_OBJECTS); idx++) + { + sl::OBJECT_DETECTION_MODEL test_model = static_cast(idx); + std::string test_model_str = sl::toString(test_model).c_str(); + std::replace(test_model_str.begin(), test_model_str.end(), ' ', + '_'); // Replace spaces with underscores to match the YAML setting + // NODELETDEBUG(" Comparing '%s' to '%s'", test_model_str.c_str(), model_str.c_str()); + if (model_str == test_model_str) + { + mObjDetModel = test_model; + matched = true; + break; + } + } + if (!matched) + { + NODELET_WARN_STREAM("The value of the parameter 'object_detection.model' is not valid: '" + << model_str << "'. Using the default value."); + } + NODELET_INFO_STREAM(" * Object Det. model\t\t->" << sl::toString(mObjDetModel).c_str()); + + mNhNs.getParam("object_detection/mc_people", mObjDetPeopleEnable); + NODELET_INFO_STREAM(" * Detect people\t\t-> " << (mObjDetPeopleEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_vehicle", mObjDetVehiclesEnable); + NODELET_INFO_STREAM(" * Detect vehicles\t\t-> " << (mObjDetVehiclesEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_bag", mObjDetBagsEnable); + NODELET_INFO_STREAM(" * Detect bags\t\t\t-> " << (mObjDetBagsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_animal", mObjDetAnimalsEnable); + NODELET_INFO_STREAM(" * Detect animals\t\t-> " << (mObjDetAnimalsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_electronics", mObjDetElectronicsEnable); + NODELET_INFO_STREAM(" * Detect electronics\t\t-> " << (mObjDetElectronicsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_fruit_vegetable", mObjDetFruitsEnable); + NODELET_INFO_STREAM(" * Detect fruit and vegetables\t-> " << (mObjDetFruitsEnable ? "ENABLED" : "DISABLED")); + mNhNs.getParam("object_detection/mc_sport", mObjDetSportEnable); + NODELET_INFO_STREAM(" * Detect sport-related objects\t-> " << (mObjDetSportEnable ? "ENABLED" : "DISABLED")); + } +} - if (mObjDetEnabled) { - NODELET_INFO_STREAM(" * Object Detection\t\t-> ENABLED"); - mNhNs.getParam("object_detection/confidence_threshold", mObjDetConfidence); - NODELET_INFO_STREAM(" * Object confidence\t\t-> " << mObjDetConfidence); - mNhNs.getParam("object_detection/object_tracking_enabled", mObjDetTracking); - NODELET_INFO_STREAM(" * Object tracking\t\t-> " << (mObjDetTracking?"ENABLED":"DISABLED")); - mNhNs.getParam("object_detection/people_detection", mObjDetPeople); - NODELET_INFO_STREAM(" * People detection\t\t-> " << (mObjDetPeople?"ENABLED":"DISABLED")); - mNhNs.getParam("object_detection/vehicle_detection", mObjDetVehicles); - NODELET_INFO_STREAM(" * Vehicles detection\t\t-> " << (mObjDetVehicles?"ENABLED":"DISABLED")); - } else { - NODELET_INFO_STREAM(" * Object Detection\t\t-> DISABLED"); - } - // <---- Object Detection +void ZEDWrapperNodelet::readSensParams() +{ + NODELET_INFO_STREAM("*** SENSORS PARAMETERS ***"); - // ----> Sensors + if (!sl_tools::isZED(mZedUserCamModel)) + { mNhNs.getParam("sensors/sensors_timestamp_sync", mSensTimestampSync); NODELET_INFO_STREAM(" * Sensors timestamp sync\t-> " << (mSensTimestampSync ? "ENABLED" : "DISABLED")); - // <---- Sensors - - // ----> SVO - mNhNs.param("svo_file", mSvoFilepath, std::string()); - NODELET_INFO_STREAM(" * SVO input file: \t\t-> " << mSvoFilepath.c_str()); - - int svo_compr = 0; - mNhNs.getParam("general/svo_compression", svo_compr); + mNhNs.getParam("sensors/max_pub_rate", mSensPubRate); + if (!sl_tools::isZEDM(mZedUserCamModel)) + { + if (mSensPubRate > 800.) + mSensPubRate = 800.; + } + else + { + if (mSensPubRate > 400.) + mSensPubRate = 400.; + } + + NODELET_INFO_STREAM(" * Max sensors rate\t\t-> " << mSensPubRate); + + mNhNs.getParam("sensors/publish_imu_tf", mPublishImuTf); + NODELET_INFO_STREAM(" * Broadcast IMU pose TF\t-> " << (mPublishImuTf ? "ENABLED" : "DISABLED")); + } + else + { + NODELET_INFO_STREAM(" * The ZED camera has no available inertial/environmental sensors."); + } +} - if (svo_compr >= static_cast(sl::SVO_COMPRESSION_MODE::LAST)) { - NODELET_WARN_STREAM("The parameter `general/svo_compression` has an invalid value. Please check it in the configuration file `common.yaml`"); +void ZEDWrapperNodelet::readSvoParams() +{ + NODELET_INFO_STREAM("*** SVO PARAMETERS ***"); - svo_compr = 0; - } + mNhNs.getParam("svo_file", mSvoFilepath); + mSvoFilepath = sl_tools::resolveFilePath(mSvoFilepath); + NODELET_INFO_STREAM(" * SVO input file: \t\t-> " << mSvoFilepath.c_str()); - mSvoComprMode = static_cast(svo_compr); + if (!mSvoFilepath.empty()) + { + mNhNs.getParam("general/svo_realtime", mSvoRealtime); + NODELET_INFO_STREAM(" * SVO realtime mode\t\t-> " << (mSvoRealtime ? "ENABLED" : "DISABLED")); + } - NODELET_INFO_STREAM(" * SVO REC compression\t\t-> " << sl::toString(mSvoComprMode)); - // <---- SVO + int svo_compr = 0; + mNhNs.getParam("general/svo_compression", svo_compr); - // Remote Stream - mNhNs.param("stream", mRemoteStreamAddr, std::string()); + if (svo_compr >= static_cast(sl::SVO_COMPRESSION_MODE::LAST)) + { + NODELET_WARN_STREAM( + "The parameter `general/svo_compression` has an invalid value. Please check it in the " + "configuration file `common.yaml`"); - // ----> Coordinate frames - mNhNs.param("pos_tracking/map_frame", mMapFrameId, "map"); - mNhNs.param("pos_tracking/odometry_frame", mOdometryFrameId, "odom"); - mNhNs.param("general/base_frame", mBaseFrameId, "base_link"); + svo_compr = 0; + } - mCameraFrameId = mCameraName + "_camera_center"; - mImuFrameId = mCameraName + "_imu_link"; - mLeftCamFrameId = mCameraName + "_left_camera_frame"; - mLeftCamOptFrameId = mCameraName + "_left_camera_optical_frame"; - mRightCamFrameId = mCameraName + "_right_camera_frame"; - mRightCamOptFrameId = mCameraName + "_right_camera_optical_frame"; + mSvoComprMode = static_cast(svo_compr); - mBaroFrameId = mCameraName + "_baro_link"; - mMagFrameId = mCameraName + "_mag_link"; - mTempLeftFrameId = mCameraName + "_temp_left_link"; - mTempRightFrameId = mCameraName + "_temp_right_link"; + NODELET_INFO_STREAM(" * SVO REC compression\t\t-> " << sl::toString(mSvoComprMode)); +} - mDepthFrameId = mLeftCamFrameId; - mDepthOptFrameId = mLeftCamOptFrameId; +void ZEDWrapperNodelet::readDynParams() +{ + std::cerr << "Ci sei o no?" << std::endl << std::flush; + + NODELET_INFO_STREAM("*** DYNAMIC PARAMETERS (Init. values) ***"); + + mNhNs.getParam("brightness", mCamBrightness); + NODELET_INFO_STREAM(" * [DYN] brightness\t\t-> " << mCamBrightness); + mNhNs.getParam("contrast", mCamContrast); + NODELET_INFO_STREAM(" * [DYN] contrast\t\t-> " << mCamContrast); + mNhNs.getParam("hue", mCamHue); + NODELET_INFO_STREAM(" * [DYN] hue\t\t\t-> " << mCamHue); + mNhNs.getParam("saturation", mCamSaturation); + NODELET_INFO_STREAM(" * [DYN] saturation\t\t-> " << mCamSaturation); + mNhNs.getParam("sharpness", mCamSharpness); + NODELET_INFO_STREAM(" * [DYN] sharpness\t\t-> " << mCamSharpness); + mNhNs.getParam("gamma", mCamGamma); + NODELET_INFO_STREAM(" * [DYN] gamma\t\t\t-> " << mCamGamma); + mNhNs.getParam("auto_exposure_gain", mCamAutoExposure); + NODELET_INFO_STREAM(" * [DYN] auto_exposure_gain\t-> " << (mCamAutoExposure ? "ENABLED" : "DISABLED")); + mNhNs.getParam("gain", mCamGain); + mNhNs.getParam("exposure", mCamExposure); + if (!mCamAutoExposure) + { + NODELET_INFO_STREAM(" * [DYN] gain\t\t-> " << mCamGain); + NODELET_INFO_STREAM(" * [DYN] exposure\t\t-> " << mCamExposure); + } + mNhNs.getParam("auto_whitebalance", mCamAutoWB); + NODELET_INFO_STREAM(" * [DYN] auto_whitebalance\t-> " << (mCamAutoWB ? "ENABLED" : "DISABLED")); + mNhNs.getParam("whitebalance_temperature", mCamWB); + if (!mCamAutoWB) + { + NODELET_INFO_STREAM(" * [DYN] whitebalance_temperature\t\t-> " << mCamWB); + } + + if (mCamAutoExposure) + { + mTriggerAutoExposure = true; + } + if (mCamAutoWB) + { + mTriggerAutoWB = true; + } + + if (!mDepthDisabled) + { + mNhNs.getParam("depth_confidence", mCamDepthConfidence); + NODELET_INFO_STREAM(" * [DYN] Depth confidence\t-> " << mCamDepthConfidence); + mNhNs.getParam("depth_texture_conf", mCamDepthTextureConf); + NODELET_INFO_STREAM(" * [DYN] Depth texture conf.\t-> " << mCamDepthTextureConf); - // Note: Depth image frame id must match color image frame id - mCloudFrameId = mDepthOptFrameId; - mRgbFrameId = mDepthFrameId; - mRgbOptFrameId = mCloudFrameId; - mDisparityFrameId = mDepthFrameId; - mDisparityOptFrameId = mDepthOptFrameId; - mConfidenceFrameId = mDepthFrameId; - mConfidenceOptFrameId = mDepthOptFrameId; + mNhNs.getParam("point_cloud_freq", mPointCloudFreq); + if (mPointCloudFreq > mPubFrameRate) + { + mPointCloudFreq = mPubFrameRate; + NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than 'general/pub_frame_rate'. Set to " + << mPointCloudFreq); + } + NODELET_INFO_STREAM(" * [DYN] point_cloud_freq\t-> " << mPointCloudFreq << " Hz"); + } +} - // Print TF frames +void ZEDWrapperNodelet::readParameters() +{ + // ----> General + readGeneralParams(); + // <---- General + + // ----> Dynamic + readDynParams(); + // <---- Dynamic + + // ----> Video + // NODELET_INFO_STREAM("*** VIDEO PARAMETERS ***"); + // Note: there are no "static" video parameters + // <---- Video + + // -----> Depth + readDepthParams(); + // <----- Depth + + // ----> Positional Tracking + readPosTrkParams(); + // <---- Positional Tracking + + // ----> Mapping + readMappingParams(); + // <---- Mapping + + // ----> Object Detection + readObjDetParams(); + // <---- Object Detection + + // ----> Sensors + readSensParams(); + // <---- Sensors + + // ----> SVO + readSvoParams(); + // <---- SVO + + // Remote Stream + mNhNs.getParam("stream", mRemoteStreamAddr); + + // ----> Coordinate frames + NODELET_INFO_STREAM("*** COORDINATE FRAMES ***"); + + mNhNs.getParam("pos_tracking/map_frame", mMapFrameId); + mNhNs.getParam("pos_tracking/odometry_frame", mOdomFrameId); + mNhNs.getParam("general/base_frame", mBaseFrameId); + + mCameraFrameId = mCameraName + "_camera_center"; + mImuFrameId = mCameraName + "_imu_link"; + mLeftCamFrameId = mCameraName + "_left_camera_frame"; + mLeftCamOptFrameId = mCameraName + "_left_camera_optical_frame"; + mRightCamFrameId = mCameraName + "_right_camera_frame"; + mRightCamOptFrameId = mCameraName + "_right_camera_optical_frame"; + + mBaroFrameId = mCameraName + "_baro_link"; + mMagFrameId = mCameraName + "_mag_link"; + mTempLeftFrameId = mCameraName + "_temp_left_link"; + mTempRightFrameId = mCameraName + "_temp_right_link"; + + mDepthFrameId = mLeftCamFrameId; + mDepthOptFrameId = mLeftCamOptFrameId; + + // Note: Depth image frame id must match color image frame id + mCloudFrameId = mDepthOptFrameId; + mRgbFrameId = mDepthFrameId; + mRgbOptFrameId = mCloudFrameId; + mDisparityFrameId = mDepthFrameId; + mDisparityOptFrameId = mDepthOptFrameId; + mConfidenceFrameId = mDepthFrameId; + mConfidenceOptFrameId = mDepthOptFrameId; + + // Print TF frames + NODELET_INFO_STREAM(" * camera_frame\t\t\t-> " << mCameraFrameId); + NODELET_INFO_STREAM(" * imu_link\t\t\t-> " << mImuFrameId); + NODELET_INFO_STREAM(" * left_camera_frame\t\t-> " << mLeftCamFrameId); + NODELET_INFO_STREAM(" * left_camera_optical_frame\t-> " << mLeftCamOptFrameId); + NODELET_INFO_STREAM(" * right_camera_frame\t\t-> " << mRightCamFrameId); + NODELET_INFO_STREAM(" * right_camera_optical_frame\t-> " << mRightCamOptFrameId); + + if (!mDepthDisabled) + { NODELET_INFO_STREAM(" * map_frame\t\t\t-> " << mMapFrameId); - NODELET_INFO_STREAM(" * odometry_frame\t\t-> " << mOdometryFrameId); + NODELET_INFO_STREAM(" * odometry_frame\t\t-> " << mOdomFrameId); NODELET_INFO_STREAM(" * base_frame\t\t\t-> " << mBaseFrameId); - NODELET_INFO_STREAM(" * camera_frame\t\t\t-> " << mCameraFrameId); - NODELET_INFO_STREAM(" * imu_link\t\t\t-> " << mImuFrameId); - NODELET_INFO_STREAM(" * left_camera_frame\t\t-> " << mLeftCamFrameId); - NODELET_INFO_STREAM(" * left_camera_optical_frame\t-> " << mLeftCamOptFrameId); - NODELET_INFO_STREAM(" * right_camera_frame\t\t-> " << mRightCamFrameId); - NODELET_INFO_STREAM(" * right_camera_optical_frame\t-> " << mRightCamOptFrameId); NODELET_INFO_STREAM(" * depth_frame\t\t\t-> " << mDepthFrameId); NODELET_INFO_STREAM(" * depth_optical_frame\t\t-> " << mDepthOptFrameId); NODELET_INFO_STREAM(" * disparity_frame\t\t-> " << mDisparityFrameId); NODELET_INFO_STREAM(" * disparity_optical_frame\t-> " << mDisparityOptFrameId); NODELET_INFO_STREAM(" * confidence_frame\t\t-> " << mConfidenceFrameId); NODELET_INFO_STREAM(" * confidence_optical_frame\t-> " << mConfidenceOptFrameId); - // <---- Coordinate frames + } + // <---- Coordinate frames +} - // ----> TF broadcasting - mNhNs.param("pos_tracking/publish_tf", mPublishTf, true); - NODELET_INFO_STREAM(" * Broadcast odometry TF\t-> " << (mPublishTf ? "ENABLED" : "DISABLED")); - mNhNs.param("pos_tracking/publish_map_tf", mPublishMapTf, true); - NODELET_INFO_STREAM(" * Broadcast map pose TF\t-> " << (mPublishTf ? (mPublishMapTf ? "ENABLED" : "DISABLED") : - "DISABLED")); - // <---- TF broadcasting +std::string ZEDWrapperNodelet::getRoiParam(std::string paramName, std::vector>& outVal) +{ + outVal.clear(); - // ----> Dynamic - mNhNs.getParam("depth_confidence", mCamDepthConfidence); - NODELET_INFO_STREAM(" * [DYN] Depth confidence\t-> " << mCamDepthConfidence); - mNhNs.getParam("depth_texture_conf", mCamDepthTextureConf); - NODELET_INFO_STREAM(" * [DYN] Depth texture conf.\t-> " << mCamDepthTextureConf); + std::string roi_param_str = ""; - mNhNs.getParam("pub_frame_rate", mVideoDepthFreq); - NODELET_INFO_STREAM(" * [DYN] pub_frame_rate\t\t-> " << mVideoDepthFreq << " Hz"); - mNhNs.getParam("point_cloud_freq", mPointCloudFreq); - NODELET_INFO_STREAM(" * [DYN] point_cloud_freq\t-> " << mPointCloudFreq << " Hz"); - mNhNs.getParam("brightness", mCamBrightness); - NODELET_INFO_STREAM(" * [DYN] brightness\t\t-> " << mCamBrightness); - mNhNs.getParam("contrast", mCamContrast); - NODELET_INFO_STREAM(" * [DYN] contrast\t\t-> " << mCamContrast); - mNhNs.getParam("hue", mCamHue); - NODELET_INFO_STREAM(" * [DYN] hue\t\t\t-> " << mCamHue); - mNhNs.getParam("saturation", mCamSaturation); - NODELET_INFO_STREAM(" * [DYN] saturation\t\t-> " << mCamSaturation); - mNhNs.getParam("sharpness", mCamSharpness); - NODELET_INFO_STREAM(" * [DYN] sharpness\t\t-> " << mCamSharpness); -#if (ZED_SDK_MAJOR_VERSION==3 && ZED_SDK_MINOR_VERSION>=1) - mNhNs.getParam("gamma", mCamGamma); - NODELET_INFO_STREAM(" * [DYN] gamma\t\t\t-> " << mCamGamma); -#endif - mNhNs.getParam("auto_exposure_gain", mCamAutoExposure); - NODELET_INFO_STREAM(" * [DYN] auto_exposure_gain\t-> " << (mCamAutoExposure ? "ENABLED" : "DISABLED")); - mNhNs.getParam("gain", mCamGain); - mNhNs.getParam("exposure", mCamExposure); - if(!mCamAutoExposure) { - NODELET_INFO_STREAM(" * [DYN] gain\t\t-> " << mCamGain); - NODELET_INFO_STREAM(" * [DYN] exposure\t\t-> " << mCamExposure); - } - mNhNs.getParam("auto_whitebalance", mCamAutoWB); - NODELET_INFO_STREAM(" * [DYN] auto_whitebalance\t-> " << (mCamAutoWB ? "ENABLED" : "DISABLED")); - mNhNs.getParam("whitebalance_temperature", mCamWB); - if(!mCamAutoWB) { - NODELET_INFO_STREAM(" * [DYN] whitebalance_temperature\t\t-> " << mCamWB); - } + mNhNs.getParam("general/region_of_interest", roi_param_str); - if (mCamAutoExposure) { - mTriggerAutoExposure = true; - } - if (mCamAutoWB) { - mTriggerAutoWB = true; - } - // <---- Dynamic + if (roi_param_str.empty()) + { + return std::string(); + } + + std::string error; + outVal = sl_tools::parseStringVector(roi_param_str, error); + + if (error != "") + { + NODELET_WARN_STREAM("Error parsing " << paramName << " parameter: " << error.c_str()); + NODELET_WARN_STREAM(" " << paramName << " string was " << roi_param_str.c_str()); + outVal.clear(); + } + + return roi_param_str; +} + +std::string ZEDWrapperNodelet::parseRoiPoly(const std::vector>& in_poly, + std::vector& out_poly) +{ + out_poly.clear(); + + std::string ss; + ss = "["; + + size_t poly_size = in_poly.size(); + + if (poly_size < 3) + { + if (poly_size != 0) + { + NODELET_WARN_STREAM("A vector with " << poly_size + << " points is not enough to create a polygon to set a Region " + "of Interest."); + return std::string(); + } + } + else + { + for (size_t i; i < poly_size; ++i) + { + if (in_poly[i].size() != 2) + { + NODELET_WARN_STREAM("The component with index '" << i + << "' of the ROI vector " + "has not the correct size."); + out_poly.clear(); + return std::string(); + } + else if (in_poly[i][0] < 0.0 || in_poly[i][1] < 0.0 || in_poly[i][0] > 1.0 || in_poly[i][1] > 1.0) + { + NODELET_WARN_STREAM("The component with index '" << i + << "' of the ROI vector " + "is not a " + "valid normalized point: [" + << in_poly[i][0] << "," << in_poly[i][1] << "]."); + out_poly.clear(); + return std::string(); + } + else + { + sl::float2 pt; + pt.x = in_poly[i][0]; + pt.y = in_poly[i][1]; + out_poly.push_back(pt); + ss += "["; + ss += std::to_string(pt.x); + ss += ","; + ss += std::to_string(pt.y); + ss += "]"; + } + + if (i != poly_size - 1) + { + ss += ","; + } + } + } + ss += "]"; + + return ss; } -void ZEDWrapperNodelet::checkResolFps() { - switch (mCamResol) { +void ZEDWrapperNodelet::checkResolFps() +{ + switch (mCamResol) + { case sl::RESOLUTION::HD2K: - if (mCamFrameRate != 15) { - NODELET_WARN_STREAM("Wrong FrameRate (" - << mCamFrameRate - << ") for the resolution HD2K. Set to 15 FPS."); - mCamFrameRate = 15; - } + if (mCamFrameRate != 15) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD2K. Forced to 15 FPS."); + mCamFrameRate = 15; + } - break; + break; case sl::RESOLUTION::HD1080: - if (mCamFrameRate == 15 || mCamFrameRate == 30) { - break; + if (mZedUserCamModel == sl::MODEL::ZED_X || mZedUserCamModel == sl::MODEL::ZED_XM) + { + if (mCamFrameRate == 60 || mCamFrameRate == 30 || mCamFrameRate == 15) + { + break; } - if (mCamFrameRate > 15 && mCamFrameRate < 30) { - NODELET_WARN_STREAM("Wrong FrameRate (" - << mCamFrameRate - << ") for the resolution HD1080. Set to 15 FPS."); - mCamFrameRate = 15; - } else if (mCamFrameRate > 30) { - NODELET_WARN_STREAM("Wrong FrameRate (" - << mCamFrameRate - << ") for the resolution HD1080. Set to 30 FPS."); - mCamFrameRate = 30; - } else { - NODELET_WARN_STREAM("Wrong FrameRate (" - << mCamFrameRate - << ") for the resolution HD1080. Set to 15 FPS."); - mCamFrameRate = 15; + if (mCamFrameRate < 23) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Forced to 15 FPS."); + mCamFrameRate = 15; + } + else if (mCamFrameRate >= 23 && mCamFrameRate < 45) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Forced to 30 FPS."); + mCamFrameRate = 30; } - - break; - - case sl::RESOLUTION::HD720: - if (mCamFrameRate == 15 || mCamFrameRate == 30 || mCamFrameRate == 60) { - break; + else if (mCamFrameRate >= 45) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Forced to 60 FPS."); + mCamFrameRate = 60; + } + } + else + { + if (mCamFrameRate == 15 || mCamFrameRate == 30) + { + break; } - if (mCamFrameRate > 15 && mCamFrameRate < 30) { - NODELET_WARN_STREAM("Wrong FrameRate (" - << mCamFrameRate - << ") for the resolution HD720. Set to 15 FPS."); - mCamFrameRate = 15; - } else if (mCamFrameRate > 30 && mCamFrameRate < 60) { - NODELET_WARN_STREAM("Wrong FrameRate (" - << mCamFrameRate - << ") for the resolution HD720. Set to 30 FPS."); - mCamFrameRate = 30; - } else if (mCamFrameRate > 60) { - NODELET_WARN_STREAM("Wrong FrameRate (" - << mCamFrameRate - << ") for the resolution HD720. Set to 60 FPS."); - mCamFrameRate = 60; - } else { - NODELET_WARN_STREAM("Wrong FrameRate (" - << mCamFrameRate - << ") for the resolution HD720. Set to 15 FPS."); - mCamFrameRate = 15; + if (mCamFrameRate < 23) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Forced to 15 FPS."); + mCamFrameRate = 15; + } + else + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Forced to 30 FPS."); + mCamFrameRate = 30; } + } + break; + + case sl::RESOLUTION::HD720: + if (mCamFrameRate == 15 || mCamFrameRate == 30 || mCamFrameRate == 60) + { break; + } + + if (mCamFrameRate < 23) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Forced to 15 FPS."); + mCamFrameRate = 15; + } + else if (mCamFrameRate >= 23 && mCamFrameRate < 45) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Forced to 30 FPS."); + mCamFrameRate = 30; + } + else if (mCamFrameRate >= 45) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Forced to 60 FPS."); + mCamFrameRate = 60; + } + else + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Forced to 30 FPS."); + mCamFrameRate = 30; + } - case sl::RESOLUTION::VGA: - if (mCamFrameRate == 15 || mCamFrameRate == 30 || mCamFrameRate == 60 || - mCamFrameRate == 100) { - break; - } + break; - if (mCamFrameRate > 15 && mCamFrameRate < 30) { - NODELET_WARN_STREAM("Wrong FrameRate (" - << mCamFrameRate - << ") for the resolution VGA. Set to 15 FPS."); - mCamFrameRate = 15; - } else if (mCamFrameRate > 30 && mCamFrameRate < 60) { - NODELET_WARN_STREAM("Wrong FrameRate (" - << mCamFrameRate - << ") for the resolution VGA. Set to 30 FPS."); - mCamFrameRate = 30; - } else if (mCamFrameRate > 60 && mCamFrameRate < 100) { - NODELET_WARN_STREAM("Wrong FrameRate (" - << mCamFrameRate - << ") for the resolution VGA. Set to 60 FPS."); - mCamFrameRate = 60; - } else if (mCamFrameRate > 100) { - NODELET_WARN_STREAM("Wrong FrameRate (" - << mCamFrameRate - << ") for the resolution VGA. Set to 100 FPS."); - mCamFrameRate = 100; - } else { - NODELET_WARN_STREAM("Wrong FrameRate (" - << mCamFrameRate - << ") for the resolution VGA. Set to 15 FPS."); - mCamFrameRate = 15; - } + case sl::RESOLUTION::VGA: + if (mCamFrameRate == 15 || mCamFrameRate == 30 || mCamFrameRate == 60 || mCamFrameRate == 100) + { + break; + } + + if (mCamFrameRate < 23) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Forced to 15 FPS."); + mCamFrameRate = 15; + } + else if (mCamFrameRate >= 23 && mCamFrameRate < 45) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Forced to 30 FPS."); + mCamFrameRate = 30; + } + else if (mCamFrameRate >= 45 && mCamFrameRate < 80) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Forced to 60 FPS."); + mCamFrameRate = 60; + } + else if (mCamFrameRate >= 80) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution VGA. Forced to 100 FPS."); + mCamFrameRate = 100; + } + + break; + + case sl::RESOLUTION::HD1200: + if (mCamFrameRate == 60 || mCamFrameRate == 30) + { + break; + } + if (mCamFrameRate < 45) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1200. Forced to 30 FPS."); + mCamFrameRate = 30; + } + else + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1200. Forced to 60 FPS."); + mCamFrameRate = 60; + } + + break; + + case sl::RESOLUTION::SVGA: + if (mCamFrameRate == 120 || mCamFrameRate == 60 || mCamFrameRate == 30 || mCamFrameRate == 15) + { break; + } + + if (mCamFrameRate < 23) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Forced to 15 FPS."); + mCamFrameRate = 15; + } + else if (mCamFrameRate >= 23 && mCamFrameRate < 45) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Forced to 30 FPS."); + mCamFrameRate = 30; + } + else if (mCamFrameRate >= 45 && mCamFrameRate < 90) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1080. Forced to 60 FPS."); + mCamFrameRate = 60; + } + else if (mCamFrameRate >= 90) + { + NODELET_WARN_STREAM("Wrong FrameRate (" << mCamFrameRate << ") for the resolution HD1200. Forced to 120 FPS."); + mCamFrameRate = 120; + } + + break; + + case sl::RESOLUTION::AUTO: + break; default: - NODELET_WARN_STREAM("Invalid resolution. Set to HD720 @ 30 FPS"); - mCamResol = sl::RESOLUTION::HD720; - mCamFrameRate = 30; - } + NODELET_WARN_STREAM("Invalid resolution. Set to AUTO with default frame rate"); + mCamResol = sl::RESOLUTION::AUTO; + } } -void ZEDWrapperNodelet::initTransforms() { - // According to REP 105 -> http://www.ros.org/reps/rep-0105.html +void ZEDWrapperNodelet::initTransforms() +{ + // According to REP 105 -> http://www.ros.org/reps/rep-0105.html - // base_link <- odom <- map - // ^ | - // | | - // ------------------- + // base_link <- odom <- map + // ^ | + // | | + // ------------------- - // ----> Dynamic transforms - mOdom2BaseTransf.setIdentity(); // broadcasted if `publish_tf` is true - mMap2OdomTransf.setIdentity(); // broadcasted if `publish_map_tf` is true - mMap2BaseTransf.setIdentity(); // used internally, but not broadcasted - mMap2CameraTransf.setIdentity(); // used internally, but not broadcasted - // <---- Dynamic transforms + // ----> Dynamic transforms + mOdom2BaseTransf.setIdentity(); // broadcasted if `publish_tf` is true + mMap2OdomTransf.setIdentity(); // broadcasted if `publish_map_tf` is true + mMap2BaseTransf.setIdentity(); // used internally, but not broadcasted + // <---- Dynamic transforms } -bool ZEDWrapperNodelet::getCamera2BaseTransform() { - NODELET_DEBUG("Getting static TF from '%s' to '%s'", mCameraFrameId.c_str(), mBaseFrameId.c_str()); - - mCamera2BaseTransfValid = false; +bool ZEDWrapperNodelet::getCamera2BaseTransform() +{ + NODELET_DEBUG("Getting static TF from '%s' to '%s'", mCameraFrameId.c_str(), mBaseFrameId.c_str()); + + mCamera2BaseTransfValid = false; + + // ----> Static transforms + // Sensor to Base link + try + { + // Save the transformation + geometry_msgs::TransformStamped c2b = + mTfBuffer->lookupTransform(mCameraFrameId, mBaseFrameId, ros::Time(0), ros::Duration(0.1)); + + // Get the TF2 transformation + tf2::fromMsg(c2b.transform, mCamera2BaseTransf); + + double roll, pitch, yaw; + tf2::Matrix3x3(mCamera2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + + NODELET_INFO("Static transform Camera Center to Base [%s -> %s]", mCameraFrameId.c_str(), mBaseFrameId.c_str()); + NODELET_INFO(" * Translation: {%.3f,%.3f,%.3f}", mCamera2BaseTransf.getOrigin().x(), + mCamera2BaseTransf.getOrigin().y(), mCamera2BaseTransf.getOrigin().z()); + NODELET_INFO(" * Rotation: {%.3f,%.3f,%.3f}", roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + } + catch (tf2::TransformException& ex) + { static bool first_error = true; + if (!first_error) + { + NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); + NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", mCameraFrameId.c_str(), + mBaseFrameId.c_str()); + NODELET_WARN_THROTTLE(1.0, + "Note: one of the possible cause of the problem is the absense of an instance " + "of the `robot_state_publisher` node publishing the correct static TF transformations " + "or a modified URDF not correctly reproducing the ZED " + "TF chain '%s' -> '%s' -> '%s'", + mBaseFrameId.c_str(), mCameraFrameId.c_str(), mDepthFrameId.c_str()); + first_error = false; + } + + mCamera2BaseTransf.setIdentity(); + return false; + } - - // ----> Static transforms - // Sensor to Base link - try { - // Save the transformation - geometry_msgs::TransformStamped c2b = - mTfBuffer->lookupTransform(mCameraFrameId, mBaseFrameId, ros::Time(0), ros::Duration(0.1)); - - // Get the TF2 transformation - tf2::fromMsg(c2b.transform, mCamera2BaseTransf); - - double roll, pitch, yaw; - tf2::Matrix3x3(mCamera2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - - NODELET_INFO("Static transform Camera Center to Base [%s -> %s]", - mCameraFrameId.c_str(), mBaseFrameId.c_str()); - NODELET_INFO(" * Translation: {%.3f,%.3f,%.3f}", - mCamera2BaseTransf.getOrigin().x(), mCamera2BaseTransf.getOrigin().y(), mCamera2BaseTransf.getOrigin().z()); - NODELET_INFO(" * Rotation: {%.3f,%.3f,%.3f}", - roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); - - } catch (tf2::TransformException& ex) { - if(!first_error) { - NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); - NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", - mCameraFrameId.c_str(), mBaseFrameId.c_str()); - NODELET_WARN_THROTTLE(1.0, "Note: one of the possible cause of the problem is the absense of an instance " - "of the `robot_state_publisher` node publishing the correct static TF transformations " - "or a modified URDF not correctly reproducing the ZED " - "TF chain '%s' -> '%s' -> '%s'", - mBaseFrameId.c_str(), mCameraFrameId.c_str(),mDepthFrameId.c_str()); - first_error=false; - } - - mCamera2BaseTransf.setIdentity(); - return false; - } - - // <---- Static transforms - mCamera2BaseTransfValid = true; - return true; + // <---- Static transforms + mCamera2BaseTransfValid = true; + return true; } -bool ZEDWrapperNodelet::getSens2CameraTransform() { - NODELET_DEBUG("Getting static TF from '%s' to '%s'", mDepthFrameId.c_str(), mCameraFrameId.c_str()); - - mSensor2CameraTransfValid = false; - +bool ZEDWrapperNodelet::getSens2CameraTransform() +{ + NODELET_DEBUG("Getting static TF from '%s' to '%s'", mDepthFrameId.c_str(), mCameraFrameId.c_str()); + + mSensor2CameraTransfValid = false; + + // ----> Static transforms + // Sensor to Camera Center + try + { + // Save the transformation + geometry_msgs::TransformStamped s2c = + mTfBuffer->lookupTransform(mDepthFrameId, mCameraFrameId, ros::Time(0), ros::Duration(0.1)); + // Get the TF2 transformation + tf2::fromMsg(s2c.transform, mSensor2CameraTransf); + + double roll, pitch, yaw; + tf2::Matrix3x3(mSensor2CameraTransf.getRotation()).getRPY(roll, pitch, yaw); + + NODELET_INFO("Static transform Sensor to Camera Center [%s -> %s]", mDepthFrameId.c_str(), mCameraFrameId.c_str()); + NODELET_INFO(" * Translation: {%.3f,%.3f,%.3f}", mSensor2CameraTransf.getOrigin().x(), + mSensor2CameraTransf.getOrigin().y(), mSensor2CameraTransf.getOrigin().z()); + NODELET_INFO(" * Rotation: {%.3f,%.3f,%.3f}", roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + } + catch (tf2::TransformException& ex) + { static bool first_error = true; + if (!first_error) + { + NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); + NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", mDepthFrameId.c_str(), + mCameraFrameId.c_str()); + NODELET_WARN_THROTTLE(1.0, + "Note: one of the possible cause of the problem is the absense of an instance " + "of the `robot_state_publisher` node publishing the correct static TF transformations " + "or a modified URDF not correctly reproducing the ZED " + "TF chain '%s' -> '%s' -> '%s'", + mBaseFrameId.c_str(), mCameraFrameId.c_str(), mDepthFrameId.c_str()); + first_error = false; + } + + mSensor2CameraTransf.setIdentity(); + return false; + } - // ----> Static transforms - // Sensor to Camera Center - try { - // Save the transformation - geometry_msgs::TransformStamped s2c = - mTfBuffer->lookupTransform(mDepthFrameId, mCameraFrameId, ros::Time(0), ros::Duration(0.1)); - // Get the TF2 transformation - tf2::fromMsg(s2c.transform, mSensor2CameraTransf); - - double roll, pitch, yaw; - tf2::Matrix3x3(mSensor2CameraTransf.getRotation()).getRPY(roll, pitch, yaw); - - NODELET_INFO("Static transform Sensor to Camera Center [%s -> %s]", - mDepthFrameId.c_str(), mCameraFrameId.c_str()); - NODELET_INFO(" * Translation: {%.3f,%.3f,%.3f}", - mSensor2CameraTransf.getOrigin().x(), mSensor2CameraTransf.getOrigin().y(), mSensor2CameraTransf.getOrigin().z()); - NODELET_INFO(" * Rotation: {%.3f,%.3f,%.3f}", - roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); - } catch (tf2::TransformException& ex) { - if(!first_error) { - NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); - NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", - mDepthFrameId.c_str(), mCameraFrameId.c_str()); - NODELET_WARN_THROTTLE(1.0, "Note: one of the possible cause of the problem is the absense of an instance " - "of the `robot_state_publisher` node publishing the correct static TF transformations " - "or a modified URDF not correctly reproducing the ZED " - "TF chain '%s' -> '%s' -> '%s'", - mBaseFrameId.c_str(), mCameraFrameId.c_str(),mDepthFrameId.c_str()); - first_error = false; - } - - mSensor2CameraTransf.setIdentity(); - return false; - } - - // <---- Static transforms + // <---- Static transforms - mSensor2CameraTransfValid = true; - return true; + mSensor2CameraTransfValid = true; + return true; } -bool ZEDWrapperNodelet::getSens2BaseTransform() { - NODELET_DEBUG("Getting static TF from '%s' to '%s'", mDepthFrameId.c_str(), mBaseFrameId.c_str()); - - mSensor2BaseTransfValid = false; +bool ZEDWrapperNodelet::getSens2BaseTransform() +{ + NODELET_DEBUG("Getting static TF from '%s' to '%s'", mDepthFrameId.c_str(), mBaseFrameId.c_str()); + + mSensor2BaseTransfValid = false; + + // ----> Static transforms + // Sensor to Base link + try + { + // Save the transformation + geometry_msgs::TransformStamped s2b = + mTfBuffer->lookupTransform(mDepthFrameId, mBaseFrameId, ros::Time(0), ros::Duration(0.1)); + // Get the TF2 transformation + tf2::fromMsg(s2b.transform, mSensor2BaseTransf); + + double roll, pitch, yaw; + tf2::Matrix3x3(mSensor2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + + NODELET_INFO("Static transform Sensor to Base [%s -> %s]", mDepthFrameId.c_str(), mBaseFrameId.c_str()); + NODELET_INFO(" * Translation: {%.3f,%.3f,%.3f}", mSensor2BaseTransf.getOrigin().x(), + mSensor2BaseTransf.getOrigin().y(), mSensor2BaseTransf.getOrigin().z()); + NODELET_INFO(" * Rotation: {%.3f,%.3f,%.3f}", roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + } + catch (tf2::TransformException& ex) + { static bool first_error = true; + if (!first_error) + { + NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); + NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", mDepthFrameId.c_str(), + mBaseFrameId.c_str()); + NODELET_WARN_THROTTLE(1.0, + "Note: one of the possible cause of the problem is the absense of an instance " + "of the `robot_state_publisher` node publishing the correct static TF transformations " + "or a modified URDF not correctly reproducing the ZED " + "TF chain '%s' -> '%s' -> '%s'", + mBaseFrameId.c_str(), mCameraFrameId.c_str(), mDepthFrameId.c_str()); + first_error = false; + } + + mSensor2BaseTransf.setIdentity(); + return false; + } - // ----> Static transforms - // Sensor to Base link - try { - // Save the transformation - geometry_msgs::TransformStamped s2b = - mTfBuffer->lookupTransform(mDepthFrameId, mBaseFrameId, ros::Time(0), ros::Duration(0.1)); - // Get the TF2 transformation - tf2::fromMsg(s2b.transform, mSensor2BaseTransf); - - double roll, pitch, yaw; - tf2::Matrix3x3(mSensor2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - - NODELET_INFO("Static transform Sensor to Base [%s -> %s]", - mDepthFrameId.c_str(), mBaseFrameId.c_str()); - NODELET_INFO(" * Translation: {%.3f,%.3f,%.3f}", - mSensor2BaseTransf.getOrigin().x(), mSensor2BaseTransf.getOrigin().y(), mSensor2BaseTransf.getOrigin().z()); - NODELET_INFO(" * Rotation: {%.3f,%.3f,%.3f}", - roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); - - } catch (tf2::TransformException& ex) { - if(!first_error) { - NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); - NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", - mDepthFrameId.c_str(), mBaseFrameId.c_str()); - NODELET_WARN_THROTTLE(1.0, "Note: one of the possible cause of the problem is the absense of an instance " - "of the `robot_state_publisher` node publishing the correct static TF transformations " - "or a modified URDF not correctly reproducing the ZED " - "TF chain '%s' -> '%s' -> '%s'", - mBaseFrameId.c_str(), mCameraFrameId.c_str(),mDepthFrameId.c_str()); - first_error = false; - } - - mSensor2BaseTransf.setIdentity(); - return false; - } - - // <---- Static transforms + // <---- Static transforms - mSensor2BaseTransfValid = true; - return true; + mSensor2BaseTransfValid = true; + return true; } -bool ZEDWrapperNodelet::set_pose(float xt, float yt, float zt, float rr, - float pr, float yr) { - initTransforms(); - - if (!mSensor2BaseTransfValid) { - getSens2BaseTransform(); - } - - if (!mSensor2CameraTransfValid) { - getSens2CameraTransform(); - } - - if (!mCamera2BaseTransfValid) { - getCamera2BaseTransform(); - } - - // Apply Base to sensor transform - tf2::Transform initPose; - tf2::Vector3 origin(xt, yt, zt); - initPose.setOrigin(origin); - tf2::Quaternion quat; - quat.setRPY(rr, pr, yr); - initPose.setRotation(quat); - - initPose = initPose * mSensor2BaseTransf.inverse(); - - // SL pose - sl::float3 t_vec; - t_vec[0] = initPose.getOrigin().x(); - t_vec[1] = initPose.getOrigin().y(); - t_vec[2] = initPose.getOrigin().z(); +bool ZEDWrapperNodelet::set_pose(float xt, float yt, float zt, float rr, float pr, float yr) +{ + initTransforms(); + + if (!mSensor2BaseTransfValid) + { + getSens2BaseTransform(); + } + + if (!mSensor2CameraTransfValid) + { + getSens2CameraTransform(); + } + + if (!mCamera2BaseTransfValid) + { + getCamera2BaseTransform(); + } + + // Apply Base to sensor transform + tf2::Transform initPose; + tf2::Vector3 origin(xt, yt, zt); + initPose.setOrigin(origin); + tf2::Quaternion quat; + quat.setRPY(rr, pr, yr); + initPose.setRotation(quat); + + initPose = initPose * mSensor2BaseTransf.inverse(); + + // SL pose + sl::float3 t_vec; + t_vec[0] = initPose.getOrigin().x(); + t_vec[1] = initPose.getOrigin().y(); + t_vec[2] = initPose.getOrigin().z(); + + sl::float4 q_vec; + q_vec[0] = initPose.getRotation().x(); + q_vec[1] = initPose.getRotation().y(); + q_vec[2] = initPose.getRotation().z(); + q_vec[3] = initPose.getRotation().w(); + + sl::Translation trasl(t_vec); + sl::Orientation orient(q_vec); + mInitialPoseSl.setTranslation(trasl); + mInitialPoseSl.setOrientation(orient); + + return (mSensor2BaseTransfValid & mSensor2CameraTransfValid & mCamera2BaseTransfValid); +} - sl::float4 q_vec; - q_vec[0] = initPose.getRotation().x(); - q_vec[1] = initPose.getRotation().y(); - q_vec[2] = initPose.getRotation().z(); - q_vec[3] = initPose.getRotation().w(); +bool ZEDWrapperNodelet::on_set_pose(zed_interfaces::set_pose::Request& req, zed_interfaces::set_pose::Response& res) +{ + mInitialBasePose.resize(6); + mInitialBasePose[0] = req.x; + mInitialBasePose[1] = req.y; + mInitialBasePose[2] = req.z; + mInitialBasePose[3] = req.R; + mInitialBasePose[4] = req.P; + mInitialBasePose[5] = req.Y; - sl::Translation trasl(t_vec); - sl::Orientation orient(q_vec); - mInitialPoseSl.setTranslation(trasl); - mInitialPoseSl.setOrientation(orient); + std::lock_guard lock(mPosTrkMutex); - return (mSensor2BaseTransfValid & mSensor2CameraTransfValid & mCamera2BaseTransfValid); + // Restart tracking + start_pos_tracking(); + res.done = true; + return true; } -bool ZEDWrapperNodelet::on_set_pose( - zed_interfaces::set_pose::Request& req, - zed_interfaces::set_pose::Response& res) { - mInitialBasePose.resize(6); - mInitialBasePose[0] = req.x; - mInitialBasePose[1] = req.y; - mInitialBasePose[2] = req.z; - mInitialBasePose[3] = req.R; - mInitialBasePose[4] = req.P; - mInitialBasePose[5] = req.Y; - - if (!set_pose(mInitialBasePose[0], mInitialBasePose[1], mInitialBasePose[2], - mInitialBasePose[3], mInitialBasePose[4], mInitialBasePose[5])) { - res.done = false; - return false; - } - - std::lock_guard lock(mPosTrkMutex); +bool ZEDWrapperNodelet::on_reset_tracking(zed_interfaces::reset_tracking::Request& req, + zed_interfaces::reset_tracking::Response& res) +{ + if (!mPosTrackingStarted) + { + res.reset_done = false; + return false; + } - // Disable tracking - mTrackingActivated = false; - mZed.disablePositionalTracking(); + std::lock_guard lock(mPosTrkMutex); - // Restart tracking - start_pos_tracking(); + // Restart tracking + start_pos_tracking(); - res.done = true; - return true; + res.reset_done = true; + return true; } -bool ZEDWrapperNodelet::on_reset_tracking( - zed_interfaces::reset_tracking::Request& req, - zed_interfaces::reset_tracking::Response& res) { - if (!mTrackingActivated) { - res.reset_done = false; - return false; - } +bool ZEDWrapperNodelet::on_reset_odometry(zed_interfaces::reset_odometry::Request& req, + zed_interfaces::reset_odometry::Response& res) +{ + std::lock_guard lock(mOdomMutex); + mOdom2BaseTransf.setIdentity(); + mOdomPath.clear(); - if (!set_pose(mInitialBasePose[0], mInitialBasePose[1], mInitialBasePose[2], - mInitialBasePose[3], mInitialBasePose[4], mInitialBasePose[5])) { - res.reset_done = false; - return false; - } + res.reset_done = true; + return true; +} - std::lock_guard lock(mPosTrkMutex); +bool ZEDWrapperNodelet::start_3d_mapping() +{ + if (!mMappingEnabled) + { + return false; + } + + NODELET_INFO_STREAM("*** Starting Spatial Mapping ***"); + + sl::SpatialMappingParameters params; + params.map_type = sl::SpatialMappingParameters::SPATIAL_MAP_TYPE::FUSED_POINT_CLOUD; + params.use_chunk_only = true; + + sl::SpatialMappingParameters spMapPar; + + float lRes = spMapPar.allowed_resolution.first; + float hRes = spMapPar.allowed_resolution.second; + + if (mMappingRes < lRes) + { + NODELET_WARN_STREAM("'mapping/resolution' value (" + << mMappingRes << " m) is lower than the allowed resolution values. Fixed automatically"); + mMappingRes = lRes; + } + if (mMappingRes > hRes) + { + NODELET_WARN_STREAM("'mapping/resolution' value (" + << mMappingRes << " m) is higher than the allowed resolution values. Fixed automatically"); + mMappingRes = hRes; + } + + params.resolution_meter = mMappingRes; + + float lRng = spMapPar.allowed_range.first; + float hRng = spMapPar.allowed_range.second; + + if (mMaxMappingRange < 0) + { + mMaxMappingRange = sl::SpatialMappingParameters::getRecommendedRange(mMappingRes, mZed); + NODELET_INFO_STREAM("Mapping: max range set to " << mMaxMappingRange << " m for a resolution of " << mMappingRes + << " m"); + } + else if (mMaxMappingRange < lRng) + { + NODELET_WARN_STREAM("'mapping/max_mapping_range_m' value (" + << mMaxMappingRange << " m) is lower than the allowed resolution values. Fixed automatically"); + mMaxMappingRange = lRng; + } + else if (mMaxMappingRange > hRng) + { + NODELET_WARN_STREAM("'mapping/max_mapping_range_m' value (" + << mMaxMappingRange << " m) is higher than the allowed resolution values. Fixed automatically"); + mMaxMappingRange = hRng; + } + + params.range_meter = mMaxMappingRange; + + sl::ERROR_CODE err = mZed.enableSpatialMapping(params); + + if (err == sl::ERROR_CODE::SUCCESS) + { + if (mPubFusedCloud.getTopic().empty()) + { + std::string pointcloud_fused_topic = "mapping/fused_cloud"; + mPubFusedCloud = mNhNs.advertise(pointcloud_fused_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); + } + + mMappingRunning = true; + + mFusedPcTimer = + mNhNs.createTimer(ros::Duration(1.0 / mFusedPcPubFreq), &ZEDWrapperNodelet::callback_pubFusedPointCloud, this); + + NODELET_INFO_STREAM(" * Resolution: " << params.resolution_meter << " m"); + NODELET_INFO_STREAM(" * Max Mapping Range: " << params.range_meter << " m"); + NODELET_INFO_STREAM(" * Map point cloud publishing rate: " << mFusedPcPubFreq << " Hz"); - // Disable tracking - mTrackingActivated = false; - mZed.disablePositionalTracking(); + return true; + } + else + { + mMappingRunning = false; + mFusedPcTimer.stop(); - // Restart tracking - start_pos_tracking(); + NODELET_WARN("Mapping not activated: %s", sl::toString(err).c_str()); - res.reset_done = true; - return true; + return false; + } } -bool ZEDWrapperNodelet::on_reset_odometry( - zed_interfaces::reset_odometry::Request& req, - zed_interfaces::reset_odometry::Response& res) { - mResetOdom = true; - res.reset_done = true; - return true; +void ZEDWrapperNodelet::stop_3d_mapping() +{ + mFusedPcTimer.stop(); + mMappingRunning = false; + mMappingEnabled = false; + mZed.disableSpatialMapping(); + + NODELET_INFO("*** Spatial Mapping stopped ***"); } -bool ZEDWrapperNodelet::start_3d_mapping() { - if (!mMappingEnabled) { - return false; - } +bool ZEDWrapperNodelet::start_obj_detect() +{ + if (mZedRealCamModel == sl::MODEL::ZED) + { + NODELET_ERROR_STREAM("Object detection not started. OD is not available for ZED camera model"); + return false; + } - NODELET_INFO_STREAM("*** Starting Spatial Mapping ***"); + if (mDepthDisabled) + { + NODELET_WARN("Cannot start Object Detection if `depth/depth_mode` is set to `NONE`"); + return false; + } - sl::SpatialMappingParameters params; - params.map_type = sl::SpatialMappingParameters::SPATIAL_MAP_TYPE::FUSED_POINT_CLOUD; - params.use_chunk_only = true; + if (!mObjDetEnabled) + { + return false; + } - sl::SpatialMappingParameters spMapPar; + if (!mCamera2BaseTransfValid || !mSensor2CameraTransfValid || !mSensor2BaseTransfValid) + { + NODELET_DEBUG("Tracking transforms not yet ready, OD starting postponed"); + return false; + } + + NODELET_INFO_STREAM("*** Starting Object Detection ***"); + + sl::ObjectDetectionParameters od_p; + od_p.enable_segmentation = false; + od_p.enable_tracking = mObjDetTracking; + od_p.detection_model = mObjDetModel; + od_p.filtering_mode = mObjFilterMode; + od_p.prediction_timeout_s = mObjDetPredTimeout; + od_p.allow_reduced_precision_inference = mObjDetReducedPrecision; + od_p.max_range = mObjDetMaxRange; + + mObjDetFilter.clear(); + if (mObjDetPeopleEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::PERSON); + } + if (mObjDetVehiclesEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::VEHICLE); + } + if (mObjDetBagsEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::BAG); + } + if (mObjDetAnimalsEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::ANIMAL); + } + if (mObjDetElectronicsEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::ELECTRONICS); + } + if (mObjDetFruitsEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::FRUIT_VEGETABLE); + } + if (mObjDetSportEnable) + { + mObjDetFilter.push_back(sl::OBJECT_CLASS::SPORT); + } + + sl::ERROR_CODE objDetError = mZed.enableObjectDetection(od_p); + + if (objDetError != sl::ERROR_CODE::SUCCESS) + { + NODELET_ERROR_STREAM("Object detection error: " << sl::toString(objDetError)); - float lRes = spMapPar.allowed_resolution.first; - float hRes = spMapPar.allowed_resolution.second; + mObjDetRunning = false; + return false; + } - if(mMappingRes < lRes) { - NODELET_WARN_STREAM( "'mapping/resolution' value (" << mMappingRes << " m) is lower than the allowed resolution values. Fixed automatically" ); - mMappingRes = lRes; - } - if(mMappingRes > hRes) { - NODELET_WARN_STREAM( "'mapping/resolution' value (" << mMappingRes << " m) is higher than the allowed resolution values. Fixed automatically" ); - mMappingRes = hRes; - } + if (mPubObjDet.getTopic().empty()) + { + std::string object_det_topic_root = "obj_det"; + std::string object_det_topic = object_det_topic_root + "/objects"; - params.resolution_meter = mMappingRes; + mPubObjDet = mNhNs.advertise(object_det_topic, 1); + NODELET_INFO_STREAM(" * Advertised on topic " << mPubObjDet.getTopic()); + } - float lRng = spMapPar.allowed_range.first; - float hRng = spMapPar.allowed_range.second; + mObjDetRunning = true; + return true; +} - if(mMaxMappingRange < 0) { - mMaxMappingRange = sl::SpatialMappingParameters::getRecommendedRange( mMappingRes, mZed ); - NODELET_INFO_STREAM("Mapping: max range set to " << mMaxMappingRange << " m for a resolution of " << mMappingRes << " m" ); - } else if(mMaxMappingRange < lRng) { - NODELET_WARN_STREAM( "'mapping/max_mapping_range_m' value (" << mMaxMappingRange << " m) is lower than the allowed resolution values. Fixed automatically" ); - mMaxMappingRange = lRng; - } else if(mMaxMappingRange > hRng) { - NODELET_WARN_STREAM( "'mapping/max_mapping_range_m' value (" << mMaxMappingRange << " m) is higher than the allowed resolution values. Fixed automatically" ); - mMaxMappingRange = hRng; - } +void ZEDWrapperNodelet::stop_obj_detect() +{ + if (mObjDetRunning) + { + NODELET_INFO("*** Stopping Object Detection ***"); + mObjDetRunning = false; + mObjDetEnabled = false; + mZed.disableObjectDetection(); - params.range_meter = mMaxMappingRange; + // ----> Send an empty message to indicate that no more objects are tracked + // (e.g clean Rviz2) + zed_interfaces::ObjectsStampedPtr objMsg = boost::make_shared(); - sl::ERROR_CODE err = mZed.enableSpatialMapping(params); + objMsg->header.stamp = mFrameTimestamp; + objMsg->header.frame_id = mLeftCamFrameId; - if (err == sl::ERROR_CODE::SUCCESS) { - if(mPubFusedCloud.getTopic().empty()) { - string pointcloud_fused_topic = "mapping/fused_cloud"; - mPubFusedCloud = mNhNs.advertise(pointcloud_fused_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubFusedCloud.getTopic() << " @ " << mFusedPcPubFreq << " Hz"); - } + objMsg->objects.clear(); - mMappingRunning = true; + NODELET_DEBUG_STREAM("Publishing EMPTY OBJ message " << mPubObjDet.getTopic().c_str()); + mPubObjDet.publish(objMsg); + // <---- Send an empty message to indicate that no more objects are tracked + // (e.g clean Rviz2) + } +} - mFusedPcTimer = mNhNs.createTimer(ros::Duration(1.0 / mFusedPcPubFreq), &ZEDWrapperNodelet::pubFusedPointCloudCallback, - this); +void ZEDWrapperNodelet::start_pos_tracking() +{ + NODELET_INFO_STREAM("*** Starting Positional Tracking ***"); - NODELET_INFO_STREAM(" * Resolution: " << params.resolution_meter << " m"); - NODELET_INFO_STREAM(" * Max Mapping Range: " << params.range_meter << " m"); - NODELET_INFO_STREAM(" * Map point cloud publishing rate: " << mFusedPcPubFreq << " Hz"); + mPosTrackingReady = false; // Useful to not publish wrong TF with IMU frame broadcasting - return true; - } else { - mMappingRunning = false; - mFusedPcTimer.stop(); + if (mDepthDisabled) + { + NODELET_WARN("Cannot start Positional Tracking if `depth.depth_mode` is set to 'NONE'"); + return; + } - NODELET_WARN("Mapping not activated: %s", sl::toString(err).c_str()); + NODELET_INFO(" * Waiting for valid static transformations..."); - return false; - } -} + bool transformOk = false; + double elapsed = 0.0; -void ZEDWrapperNodelet::stop_3d_mapping() { - mFusedPcTimer.stop(); - mMappingRunning = false; - mMappingEnabled = false; - mZed.disableSpatialMapping(); + auto start = std::chrono::high_resolution_clock::now(); - NODELET_INFO("*** Spatial Mapping stopped ***"); -} + do + { + transformOk = set_pose(mInitialBasePose[0], mInitialBasePose[1], mInitialBasePose[2], mInitialBasePose[3], + mInitialBasePose[4], mInitialBasePose[5]); -bool ZEDWrapperNodelet::start_obj_detect() { - if(mZedRealCamModel!=sl::MODEL::ZED2) { - NODELET_ERROR_STREAM( "Object detection not started. OD is available only using a ZED2 camera model"); - return false; - } + elapsed = std::chrono::duration_cast(std::chrono::high_resolution_clock::now() - start) + .count(); - if(!mObjDetEnabled) { - return false; - } + std::this_thread::sleep_for(std::chrono::milliseconds(500)); - if( !mCamera2BaseTransfValid || !mSensor2CameraTransfValid || !mSensor2BaseTransfValid) { - NODELET_DEBUG( "Tracking transforms not yet ready, OD starting postponed"); - return false; + if (elapsed > 10000) + { + NODELET_WARN(" !!! Failed to get static transforms. Is the 'ROBOT STATE PUBLISHER' node correctly working? "); + break; } - NODELET_INFO_STREAM("*** Starting Object Detection ***"); + } while (transformOk == false); - sl::ObjectDetectionParameters od_p; - od_p.enable_mask_output = false; - od_p.enable_tracking = mObjDetTracking; - od_p.image_sync = true; + if (transformOk) + { + NODELET_DEBUG("Time required to get valid static transforms: %g sec", elapsed / 1000.); + } - sl::ERROR_CODE objDetError = mZed.enableObjectDetection(od_p); + NODELET_INFO("Initial ZED left camera pose (ZED pos. tracking): "); + NODELET_INFO(" * T: [%g,%g,%g]", mInitialPoseSl.getTranslation().x, mInitialPoseSl.getTranslation().y, + mInitialPoseSl.getTranslation().z); + NODELET_INFO(" * Q: [%g,%g,%g,%g]", mInitialPoseSl.getOrientation().ox, mInitialPoseSl.getOrientation().oy, + mInitialPoseSl.getOrientation().oz, mInitialPoseSl.getOrientation().ow); - if (objDetError != sl::ERROR_CODE::SUCCESS) { - NODELET_ERROR_STREAM("Object detection error: " << sl::toString(objDetError)); + // Positional Tracking parameters + sl::PositionalTrackingParameters posTrackParams; - mObjDetRunning = false; - return false; - } + posTrackParams.initial_world_transform = mInitialPoseSl; + posTrackParams.enable_area_memory = mAreaMemory; - if(mPubObjDet.getTopic().empty()) { - string object_det_topic_root = "obj_det"; - string object_det_topic = object_det_topic_root + "/objects"; - string object_det_rviz_topic = object_det_topic_root + "/object_markers"; + mPoseSmoothing = false; // Always false. Pose Smoothing is to be enabled only for VR/AR applications + posTrackParams.enable_pose_smoothing = mPoseSmoothing; + posTrackParams.set_floor_as_origin = mFloorAlignment; + posTrackParams.depth_min_range = static_cast(mPosTrkMinDepth); - mPubObjDet = mNhNs.advertise(object_det_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubObjDet.getTopic()); - mPubObjDetViz = mNhNs.advertise(object_det_rviz_topic, 1); - NODELET_INFO_STREAM("Advertised on topic " << mPubObjDetViz.getTopic()); + if (mAreaMemDbPath != "" && !sl_tools::file_exist(mAreaMemDbPath)) + { + posTrackParams.area_file_path = ""; + NODELET_WARN_STREAM("area_memory_db_path [" << mAreaMemDbPath << "] doesn't exist or is unreachable. "); + if (mSaveAreaMapOnClosing) + { + NODELET_INFO_STREAM( + "The file will be automatically created when closing the node or calling the " + "'save_area_map' service if a valid Area Memory is available."); } + } + else + { + posTrackParams.area_file_path = mAreaMemDbPath.c_str(); + } - mObjDetFilter.clear(); - if(mObjDetPeople) { - mObjDetFilter.push_back(sl::OBJECT_CLASS::PERSON); - } - if(mObjDetVehicles) { - mObjDetFilter.push_back(sl::OBJECT_CLASS::VEHICLE); - } + posTrackParams.enable_imu_fusion = mImuFusion; + posTrackParams.set_as_static = mIsStatic; + posTrackParams.set_gravity_as_origin = mSetGravityAsOrigin; + posTrackParams.mode = mPosTrkMode; + sl::ERROR_CODE err = mZed.enablePositionalTracking(posTrackParams); - mObjDetRunning = true; - return false; -} + if (err == sl::ERROR_CODE::SUCCESS) + { + mPosTrackingStarted = true; + } + else + { + mPosTrackingStarted = false; -void ZEDWrapperNodelet::stop_obj_detect() { - if (mObjDetRunning) { - NODELET_INFO_STREAM("*** Stopping Object Detection ***"); - mObjDetRunning = false; - mObjDetEnabled = false; - mZed.disableObjectDetection(); - } + NODELET_WARN("Pos. Tracking not started: %s", sl::toString(err).c_str()); + } } -void ZEDWrapperNodelet::start_pos_tracking() { - NODELET_INFO_STREAM("*** Starting Positional Tracking ***"); +bool ZEDWrapperNodelet::on_save_area_memory(zed_interfaces::save_area_memory::Request& req, + zed_interfaces::save_area_memory::Response& res) +{ + std::string file_path = sl_tools::resolveFilePath(req.area_memory_filename); - NODELET_INFO(" * Waiting for valid static transformations..."); + bool ret = saveAreaMap(file_path, &res.info); - bool transformOk = false; - double elapsed = 0.0; + return ret; +} - auto start = std::chrono::high_resolution_clock::now(); +bool ZEDWrapperNodelet::saveAreaMap(std::string file_path, std::string* out_msg) +{ + std::ostringstream os; - do { - transformOk = set_pose(mInitialBasePose[0], mInitialBasePose[1], mInitialBasePose[2], - mInitialBasePose[3], mInitialBasePose[4], mInitialBasePose[5]); + bool node_running = mNhNs.ok(); + if (!mZed.isOpened()) + { + os << "Cannot save Area Memory. The camera is closed."; - elapsed = std::chrono::duration_cast(std::chrono::high_resolution_clock::now() - - start).count(); + if (node_running) + NODELET_WARN_STREAM(os.str().c_str()); + else + std::cerr << os.str() << std::endl; - std::this_thread::sleep_for(std::chrono::milliseconds(100)); + if (out_msg) + *out_msg = os.str(); - if (elapsed > 10000) { - NODELET_WARN(" !!! Failed to get static transforms. Is the 'ROBOT STATE PUBLISHER' node correctly working? "); - break; - } + return false; + } - } while (transformOk == false); + if (mPosTrackingStarted && mAreaMemory) + { + sl::ERROR_CODE err = mZed.saveAreaMap(sl::String(file_path.c_str())); + if (err != sl::ERROR_CODE::SUCCESS) + { + os << "Error saving positional tracking area memory: " << sl::toString(err).c_str(); - if (transformOk) { - NODELET_DEBUG("Time required to get valid static transforms: %g sec", elapsed / 1000.); - } + if (node_running) + NODELET_WARN_STREAM(os.str().c_str()); + else + std::cerr << os.str() << std::endl; - NODELET_INFO("Initial ZED left camera pose (ZED pos. tracking): "); - NODELET_INFO(" * T: [%g,%g,%g]", - mInitialPoseSl.getTranslation().x, mInitialPoseSl.getTranslation().y, mInitialPoseSl.getTranslation().z); - NODELET_INFO(" * Q: [%g,%g,%g,%g]", - mInitialPoseSl.getOrientation().ox, mInitialPoseSl.getOrientation().oy, - mInitialPoseSl.getOrientation().oz, mInitialPoseSl.getOrientation().ow); + if (out_msg) + *out_msg = os.str(); - if (mAreaMemDbPath != "" && !sl_tools::file_exist(mAreaMemDbPath)) { - mAreaMemDbPath = ""; - NODELET_WARN("area_memory_db_path path doesn't exist or is unreachable."); + return false; } - // Tracking parameters - sl::PositionalTrackingParameters trackParams; + if (node_running) + NODELET_INFO_STREAM("Saving Area Memory file: " << file_path); + else + std::cerr << "Saving Area Memory file: " << file_path << " "; - trackParams.area_file_path = mAreaMemDbPath.c_str(); + sl::AREA_EXPORTING_STATE state; + do + { + std::this_thread::sleep_for(std::chrono::milliseconds(500)); + state = mZed.getAreaExportState(); + if (node_running) + NODELET_INFO_STREAM("."); + else + std::cerr << "."; + } while (state == sl::AREA_EXPORTING_STATE::RUNNING); + if (!node_running) + std::cerr << std::endl; - mPoseSmoothing = false; // Always false. Pose Smoothing is to be enabled only for VR/AR applications - trackParams.enable_pose_smoothing = mPoseSmoothing; + if (state == sl::AREA_EXPORTING_STATE::SUCCESS) + { + os << "Area Memory file saved correctly."; - trackParams.enable_area_memory = mAreaMemory; - trackParams.enable_imu_fusion = mImuFusion; - trackParams.initial_world_transform = mInitialPoseSl; + if (node_running) + NODELET_INFO_STREAM(os.str().c_str()); + else + std::cerr << os.str() << std::endl; - trackParams.set_floor_as_origin = mFloorAlignment; + if (out_msg) + *out_msg = os.str(); + return true; + } - sl::ERROR_CODE err = mZed.enablePositionalTracking(trackParams); + os << "Error saving Area Memory file: " << sl::toString(state).c_str(); - if (err == sl::ERROR_CODE::SUCCESS) { - mTrackingActivated = true; - } else { - mTrackingActivated = false; + if (node_running) + NODELET_WARN_STREAM(os.str().c_str()); + else + std::cerr << os.str() << std::endl; - NODELET_WARN("Tracking not activated: %s", sl::toString(err).c_str()); - } + if (out_msg) + *out_msg = os.str(); + + return false; + } + return false; } -void ZEDWrapperNodelet::publishOdom(tf2::Transform odom2baseTransf, sl::Pose& slPose, ros::Time t) { - nav_msgs::OdometryPtr odomMsg = boost::make_shared(); +void ZEDWrapperNodelet::publishOdom(tf2::Transform odom2baseTransf, sl::Pose& slPose, ros::Time t) +{ + size_t odomSub = mPubOdom.getNumSubscribers(); + + if (odomSub) + { + nav_msgs::OdometryPtr odomMsg = boost::make_shared(); odomMsg->header.stamp = t; - odomMsg->header.frame_id = mOdometryFrameId; // frame - odomMsg->child_frame_id = mBaseFrameId; // camera_frame - // conversion from Tranform to message - geometry_msgs::Transform base2odom = tf2::toMsg(odom2baseTransf); + odomMsg->header.frame_id = mOdomFrameId; // frame + odomMsg->child_frame_id = mBaseFrameId; // camera_frame + // Add all value in odometry message - odomMsg->pose.pose.position.x = base2odom.translation.x; - odomMsg->pose.pose.position.y = base2odom.translation.y; - odomMsg->pose.pose.position.z = base2odom.translation.z; - odomMsg->pose.pose.orientation.x = base2odom.rotation.x; - odomMsg->pose.pose.orientation.y = base2odom.rotation.y; - odomMsg->pose.pose.orientation.z = base2odom.rotation.z; - odomMsg->pose.pose.orientation.w = base2odom.rotation.w; + odomMsg->pose.pose.position.x = odom2baseTransf.getOrigin().x(); + odomMsg->pose.pose.position.y = odom2baseTransf.getOrigin().y(); + odomMsg->pose.pose.position.z = odom2baseTransf.getOrigin().z(); + odomMsg->pose.pose.orientation.x = odom2baseTransf.getRotation().x(); + odomMsg->pose.pose.orientation.y = odom2baseTransf.getRotation().y(); + odomMsg->pose.pose.orientation.z = odom2baseTransf.getRotation().z(); + odomMsg->pose.pose.orientation.w = odom2baseTransf.getRotation().w(); // Odometry pose covariance - - for (size_t i = 0; i < odomMsg->pose.covariance.size(); i++) { - odomMsg->pose.covariance[i] = static_cast(slPose.pose_covariance[i]); - - if (mTwoDMode) { - if (i == 14 || i == 21 || i == 28) { - odomMsg->pose.covariance[i] = 1e-9; // Very low covariance if 2D mode - } else if ((i >= 2 && i <= 4) || - (i >= 8 && i <= 10) || - (i >= 12 && i <= 13) || - (i >= 15 && i <= 16) || - (i >= 18 && i <= 20) || - (i == 22) || - (i >= 24 && i <= 27)) { - odomMsg->pose.covariance[i] = 0.0; - } + for (size_t i = 0; i < odomMsg->pose.covariance.size(); i++) + { + odomMsg->pose.covariance[i] = static_cast(slPose.pose_covariance[i]); + + if (mTwoDMode) + { + if ((i >= 2 && i <= 4) || (i >= 8 && i <= 10) || (i >= 12 && i <= 29) || (i >= 32 && i <= 34)) + { + odomMsg->pose.covariance[i] = 1e-9; // Very low covariance if 2D mode } + } } // Publish odometry message + NODELET_DEBUG("Publishing ODOM message"); mPubOdom.publish(odomMsg); + } } -void ZEDWrapperNodelet::publishPose(ros::Time t) { - tf2::Transform base_pose; - base_pose.setIdentity(); - - if (mPublishMapTf) { - base_pose = mMap2BaseTransf; - } else if (mPublishTf) { - base_pose = mOdom2BaseTransf; +void ZEDWrapperNodelet::publishPose() +{ + size_t poseSub = mPubPose.getNumSubscribers(); + size_t poseCovSub = mPubPoseCov.getNumSubscribers(); + + std_msgs::Header header; + header.stamp = mFrameTimestamp; + header.frame_id = mMapFrameId; // frame + + geometry_msgs::Pose pose; + + // Add all value in Pose message + pose.position.x = mMap2BaseTransf.getOrigin().x(); + pose.position.y = mMap2BaseTransf.getOrigin().y(); + pose.position.z = mMap2BaseTransf.getOrigin().z(); + pose.orientation.x = mMap2BaseTransf.getRotation().x(); + pose.orientation.y = mMap2BaseTransf.getRotation().y(); + pose.orientation.z = mMap2BaseTransf.getRotation().z(); + pose.orientation.w = mMap2BaseTransf.getRotation().w(); + + if (poseSub > 0) + { + geometry_msgs::PoseStampedPtr poseNoCov = boost::make_shared(); + + poseNoCov->header = header; + poseNoCov->pose = pose; + + // Publish pose stamped message + NODELET_DEBUG("Publishing POSE NO COV message"); + mPubPose.publish(poseNoCov); + } + + if (poseCovSub > 0) + { + geometry_msgs::PoseWithCovarianceStampedPtr poseCov = + boost::make_shared(); + + poseCov->header = header; + poseCov->pose.pose = pose; + + // Odometry pose covariance if available + + for (size_t i = 0; i < poseCov->pose.covariance.size(); i++) + { + poseCov->pose.covariance[i] = static_cast(mLastZedPose.pose_covariance[i]); + + if (mTwoDMode) + { + if ((i >= 2 && i <= 4) || (i >= 8 && i <= 10) || (i >= 12 && i <= 29) || (i >= 32 && i <= 34)) + { + poseCov->pose.covariance[i] = 1e-9; // Very low covariance if 2D mode + } + } } - std_msgs::Header header; - header.stamp = mFrameTimestamp; - header.frame_id = mMapFrameId; - geometry_msgs::Pose pose; - - // conversion from Tranform to message - geometry_msgs::Transform base2frame = tf2::toMsg(base_pose); - - // Add all value in Pose message - pose.position.x = base2frame.translation.x; - pose.position.y = base2frame.translation.y; - pose.position.z = base2frame.translation.z; - pose.orientation.x = base2frame.rotation.x; - pose.orientation.y = base2frame.rotation.y; - pose.orientation.z = base2frame.rotation.z; - pose.orientation.w = base2frame.rotation.w; - - if (mPubPose.getNumSubscribers() > 0) { - - geometry_msgs::PoseStamped poseNoCov; - - poseNoCov.header = header; - poseNoCov.pose = pose; - - // Publish pose stamped message - mPubPose.publish(poseNoCov); - } - - if (mPubPoseCov.getNumSubscribers() > 0) { - geometry_msgs::PoseWithCovarianceStampedPtr poseCovMsg = boost::make_shared(); - - poseCovMsg->header = header; - poseCovMsg->pose.pose = pose; - - // Odometry pose covariance if available - for (size_t i = 0; i < poseCovMsg->pose.covariance.size(); i++) { - poseCovMsg->pose.covariance[i] = static_cast(mLastZedPose.pose_covariance[i]); - - if (mTwoDMode) { - if (i == 14 || i == 21 || i == 28) { - poseCovMsg->pose.covariance[i] = 1e-9; // Very low covariance if 2D mode - } else if ((i >= 2 && i <= 4) || - (i >= 8 && i <= 10) || - (i >= 12 && i <= 13) || - (i >= 15 && i <= 16) || - (i >= 18 && i <= 20) || - (i == 22) || - (i >= 24 && i <= 27)) { - poseCovMsg->pose.covariance[i] = 0.0; - } - } - } - - // Publish pose with covariance stamped message - mPubPoseCov.publish(poseCovMsg); - } - -} - -void ZEDWrapperNodelet::publishOdomFrame(tf2::Transform odomTransf, ros::Time t) { - if (!mSensor2BaseTransfValid) { - getSens2BaseTransform(); - } - - if (!mSensor2CameraTransfValid) { - getSens2CameraTransform(); - } - - if (!mCamera2BaseTransfValid) { - getCamera2BaseTransform(); - } - - geometry_msgs::TransformStamped transformStamped; - transformStamped.header.stamp = t; - transformStamped.header.frame_id = mOdometryFrameId; - transformStamped.child_frame_id = mBaseFrameId; - // conversion from Tranform to message - transformStamped.transform = tf2::toMsg(odomTransf); - // Publish transformation - mTransformOdomBroadcaster.sendTransform(transformStamped); -} - -void ZEDWrapperNodelet::publishPoseFrame(tf2::Transform baseTransform, ros::Time t) { - if (!mSensor2BaseTransfValid) { - getSens2BaseTransform(); - } - - if (!mSensor2CameraTransfValid) { - getSens2CameraTransform(); - } - - if (!mCamera2BaseTransfValid) { - getCamera2BaseTransform(); - } - - geometry_msgs::TransformStamped transformStamped; - transformStamped.header.stamp = t; - transformStamped.header.frame_id = mMapFrameId; - transformStamped.child_frame_id = mOdometryFrameId; - // conversion from Tranform to message - transformStamped.transform = tf2::toMsg(baseTransform); - // Publish transformation - mTransformPoseBroadcaster.sendTransform(transformStamped); + // Publish pose with covariance stamped message + NODELET_DEBUG("Publishing POSE COV message"); + mPubPoseCov.publish(std::move(poseCov)); + } } -void ZEDWrapperNodelet::publishImuFrame(tf2::Transform imuTransform, ros::Time t) { - if (!mSensor2BaseTransfValid) { - getSens2BaseTransform(); - } - - if (!mSensor2CameraTransfValid) { - getSens2CameraTransform(); - } - - if (!mCamera2BaseTransfValid) { - getCamera2BaseTransform(); - } - - geometry_msgs::TransformStamped transformStamped; - transformStamped.header.stamp = t; - transformStamped.header.frame_id = mCameraFrameId; - transformStamped.child_frame_id = mImuFrameId; - // conversion from Tranform to message - transformStamped.transform = tf2::toMsg(imuTransform); - // Publish transformation - mTransformImuBroadcaster.sendTransform(transformStamped); +void ZEDWrapperNodelet::publishStaticImuFrame() +{ + // Publish IMU TF as static TF + if (!mPublishImuTf) + { + return; + } + + if (mStaticImuFramePublished) + { + return; + } + + mStaticImuTransformStamped.header.stamp = ros::Time::now(); + mStaticImuTransformStamped.header.frame_id = mLeftCamFrameId; + mStaticImuTransformStamped.child_frame_id = mImuFrameId; + sl::Translation sl_tr = mSlCamImuTransf.getTranslation(); + mStaticImuTransformStamped.transform.translation.x = sl_tr.x; + mStaticImuTransformStamped.transform.translation.y = sl_tr.y; + mStaticImuTransformStamped.transform.translation.z = sl_tr.z; + sl::Orientation sl_or = mSlCamImuTransf.getOrientation(); + mStaticImuTransformStamped.transform.rotation.x = sl_or.ox; + mStaticImuTransformStamped.transform.rotation.y = sl_or.oy; + mStaticImuTransformStamped.transform.rotation.z = sl_or.oz; + mStaticImuTransformStamped.transform.rotation.w = sl_or.ow; + + // Publish transformation + mStaticTfBroadcaster.sendTransform(mStaticImuTransformStamped); + + NODELET_INFO_STREAM("Published static transform '" << mImuFrameId << "' -> '" << mLeftCamFrameId << "'"); + + mStaticImuFramePublished = true; } void ZEDWrapperNodelet::publishImage(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat img, image_transport::CameraPublisher& pubImg, sensor_msgs::CameraInfoPtr camInfoMsg, - string imgFrameId, ros::Time t) { - camInfoMsg->header.stamp = t; - sl_tools::imageToROSmsg( imgMsgPtr, img, imgFrameId, t); - pubImg.publish(imgMsgPtr, camInfoMsg); + std::string imgFrameId, ros::Time t) +{ + camInfoMsg->header.stamp = t; + sl_tools::imageToROSmsg(imgMsgPtr, img, imgFrameId, t); + pubImg.publish(imgMsgPtr, camInfoMsg); } -void ZEDWrapperNodelet::publishDepth(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat depth, ros::Time t) { - - mDepthCamInfoMsg->header.stamp = t; +void ZEDWrapperNodelet::publishDepth(sensor_msgs::ImagePtr imgMsgPtr, sl::Mat depth, ros::Time t) +{ + mDepthCamInfoMsg->header.stamp = t; - if (!mOpenniDepthMode) { - sl_tools::imageToROSmsg(imgMsgPtr, depth, mDepthOptFrameId, t); - mPubDepth.publish(imgMsgPtr, mDepthCamInfoMsg); - return; - } + // NODELET_DEBUG_STREAM("mOpenniDepthMode: " << mOpenniDepthMode); - // OPENNI CONVERSION (meter -> millimeters - float32 -> uint16) - if(!imgMsgPtr) { - imgMsgPtr = boost::make_shared(); - } - - imgMsgPtr->header.stamp = t; - imgMsgPtr->header.frame_id = mDepthOptFrameId; - imgMsgPtr->height = depth.getHeight(); - imgMsgPtr->width = depth.getWidth(); - - int num = 1; // for endianness detection - imgMsgPtr->is_bigendian = !(*(char*)&num == 1); - - imgMsgPtr->step = imgMsgPtr->width * sizeof(uint16_t); - imgMsgPtr->encoding = sensor_msgs::image_encodings::MONO16; - - size_t size = imgMsgPtr->step * imgMsgPtr->height; - imgMsgPtr->data.resize(size); - - uint16_t* data = (uint16_t*)(&imgMsgPtr->data[0]); - - int dataSize = imgMsgPtr->width * imgMsgPtr->height; - sl::float1* depthDataPtr = depth.getPtr(); + if (!mOpenniDepthMode) + { + // NODELET_INFO("Using float32"); + sl_tools::imageToROSmsg(imgMsgPtr, depth, mDepthOptFrameId, t); + mPubDepth.publish(imgMsgPtr, mDepthCamInfoMsg); - for (int i = 0; i < dataSize; i++) { - *(data++) = static_cast(std::round(*(depthDataPtr++) * 1000)); // in mm, rounded - } + return; + } - mPubDepth.publish(imgMsgPtr, mDepthCamInfoMsg); + // NODELET_INFO("Using depth16"); + sl_tools::imageToROSmsg(imgMsgPtr, depth, mDepthOptFrameId, t); + mPubDepth.publish(imgMsgPtr, mDepthCamInfoMsg); } -void ZEDWrapperNodelet::publishDisparity(sl::Mat disparity, ros::Time t) { +void ZEDWrapperNodelet::publishDisparity(sl::Mat disparity, ros::Time t) +{ + sl::CameraInformation zedParam = mZed.getCameraInformation(mMatResol); - sl::CameraInformation zedParam = mZed.getCameraInformation(mMatResolDepth); + sensor_msgs::ImagePtr disparityImgMsg = boost::make_shared(); + stereo_msgs::DisparityImagePtr disparityMsg = boost::make_shared(); - sensor_msgs::ImagePtr disparityImgMsg = boost::make_shared(); - stereo_msgs::DisparityImagePtr disparityMsg = boost::make_shared(); + sl_tools::imageToROSmsg(disparityImgMsg, disparity, mDisparityFrameId, t); - sl_tools::imageToROSmsg(disparityImgMsg, disparity, mDisparityFrameId, t); + disparityMsg->image = *disparityImgMsg; + disparityMsg->header = disparityMsg->image.header; - disparityMsg->image = *disparityImgMsg; - disparityMsg->header = disparityMsg->image.header; + disparityMsg->f = zedParam.camera_configuration.calibration_parameters.left_cam.fx; + disparityMsg->T = zedParam.camera_configuration.calibration_parameters.getCameraBaseline(); -#if ZED_SDK_MAJOR_VERSION==3 && ZED_SDK_MINOR_VERSION<1 - disparityMsg->f = zedParam.calibration_parameters.left_cam.fx; - disparityMsg->T = zedParam.calibration_parameters.T.x; -#else - disparityMsg->f = zedParam.camera_configuration.calibration_parameters.left_cam.fx; - disparityMsg->T = zedParam.camera_configuration.calibration_parameters.getCameraBaseline(); -#endif - - if (disparityMsg->T > 0) { - disparityMsg->T *= -1.0f; - } + if (disparityMsg->T > 0) + { + disparityMsg->T *= -1.0f; + } - disparityMsg->min_disparity = disparityMsg->f * disparityMsg->T / mZed.getInitParameters().depth_minimum_distance; - disparityMsg->max_disparity = disparityMsg->f * disparityMsg->T / mZed.getInitParameters().depth_maximum_distance; + disparityMsg->min_disparity = disparityMsg->f * disparityMsg->T / mZed.getInitParameters().depth_minimum_distance; + disparityMsg->max_disparity = disparityMsg->f * disparityMsg->T / mZed.getInitParameters().depth_maximum_distance; - mPubDisparity.publish(disparityMsg); + mPubDisparity.publish(disparityMsg); } -void ZEDWrapperNodelet::pointcloud_thread_func() { - std::unique_lock lock(mPcMutex); - - while (!mStopNode) { - while (!mPcDataReady) { // loop to avoid spurious wakeups - if (mPcDataReadyCondVar.wait_for(lock, std::chrono::milliseconds(500)) == std::cv_status::timeout) { - // Check thread stopping - if (mStopNode) { - return; - } else { - continue; - } - } +void ZEDWrapperNodelet::pointcloud_thread_func() +{ + std::unique_lock lock(mPcMutex); + + while (!mStopNode) + { + while (!mPcDataReady) + { // loop to avoid spurious wakeups + if (mPcDataReadyCondVar.wait_for(lock, std::chrono::milliseconds(500)) == std::cv_status::timeout) + { + // Check thread stopping + if (mStopNode) + { + return; } - - // ----> Check publishing frequency - double pc_period_msec = 1000.0 / mPointCloudFreq; - - static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - - double elapsed_msec = std::chrono::duration_cast(now - last_time).count(); - - if (elapsed_msec < pc_period_msec) { - std::this_thread::sleep_for(std::chrono::milliseconds(static_cast(pc_period_msec - elapsed_msec))); + else + { + continue; } + } + } - // <---- Check publishing frequency + // ----> Check publishing frequency + double pc_period_msec = 1000.0 / mPointCloudFreq; - last_time = std::chrono::steady_clock::now(); - publishPointCloud(); + static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + double elapsed_msec = std::chrono::duration_cast(now - last_time).count(); - mPcDataReady = false; + if (elapsed_msec < pc_period_msec) + { + std::this_thread::sleep_for(std::chrono::milliseconds(static_cast(pc_period_msec - elapsed_msec))); } + // <---- Check publishing frequency - NODELET_DEBUG("Pointcloud thread finished"); -} + last_time = std::chrono::steady_clock::now(); + publishPointCloud(); -void ZEDWrapperNodelet::publishPointCloud() { - sensor_msgs::PointCloud2Ptr pointcloudMsg = boost::make_shared(); + mPcDataReady = false; + } - // Publish freq calculation - static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + NODELET_DEBUG("Pointcloud thread finished"); +} - double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); - last_time = now; +void ZEDWrapperNodelet::publishPointCloud() +{ + sensor_msgs::PointCloud2Ptr pointcloudMsg = boost::make_shared(); - mPcPeriodMean_usec->addValue(elapsed_usec); + // Publish freq calculation + static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - // Initialize Point Cloud message - // https://github.com/ros/common_msgs/blob/jade-devel/sensor_msgs/include/sensor_msgs/point_cloud2_iterator.h + double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); + last_time = now; - int ptsCount = mMatResolDepth.width * mMatResolDepth.height; + mPcPeriodMean_usec->addValue(elapsed_usec); - pointcloudMsg->header.stamp = mPointCloudTime; + // Initialize Point Cloud message + // https://github.com/ros/common_msgs/blob/jade-devel/sensor_msgs/include/sensor_msgs/point_cloud2_iterator.h - if (pointcloudMsg->width != mMatResolDepth.width || pointcloudMsg->height != mMatResolDepth.height) { - pointcloudMsg->header.frame_id = mPointCloudFrameId; // Set the header values of the ROS message + int ptsCount = mMatResol.width * mMatResol.height; - pointcloudMsg->is_bigendian = false; - pointcloudMsg->is_dense = false; + pointcloudMsg->header.stamp = mPointCloudTime; - pointcloudMsg->width = mMatResolDepth.width; - pointcloudMsg->height = mMatResolDepth.height; + if (pointcloudMsg->width != mMatResol.width || pointcloudMsg->height != mMatResol.height) + { + pointcloudMsg->header.frame_id = mPointCloudFrameId; // Set the header values of the ROS message - sensor_msgs::PointCloud2Modifier modifier(*pointcloudMsg); - modifier.setPointCloud2Fields(4, - "x", 1, sensor_msgs::PointField::FLOAT32, - "y", 1, sensor_msgs::PointField::FLOAT32, - "z", 1, sensor_msgs::PointField::FLOAT32, - "rgb", 1, sensor_msgs::PointField::FLOAT32); - } + pointcloudMsg->is_bigendian = false; + pointcloudMsg->is_dense = false; - // Data copy - sl::Vector4* cpu_cloud = mCloud.getPtr(); - float* ptCloudPtr = (float*)(&pointcloudMsg->data[0]); + pointcloudMsg->width = mMatResol.width; + pointcloudMsg->height = mMatResol.height; - // We can do a direct memcpy since data organization is the same - memcpy(ptCloudPtr, (float*)cpu_cloud, 4 * ptsCount * sizeof(float)); + sensor_msgs::PointCloud2Modifier modifier(*pointcloudMsg); + modifier.setPointCloud2Fields(4, "x", 1, sensor_msgs::PointField::FLOAT32, "y", 1, sensor_msgs::PointField::FLOAT32, + "z", 1, sensor_msgs::PointField::FLOAT32, "rgb", 1, sensor_msgs::PointField::FLOAT32); + } - // Pointcloud publishing - mPubCloud.publish(pointcloudMsg); -} + // Data copy + sl::Vector4* cpu_cloud = mCloud.getPtr(); + float* ptCloudPtr = (float*)(&pointcloudMsg->data[0]); -void ZEDWrapperNodelet::pubFusedPointCloudCallback(const ros::TimerEvent& e) { + // We can do a direct memcpy since data organization is the same + memcpy(ptCloudPtr, (float*)cpu_cloud, 4 * ptsCount * sizeof(float)); - sensor_msgs::PointCloud2Ptr pointcloudFusedMsg = boost::make_shared(); + // Pointcloud publishing + mPubCloud.publish(pointcloudMsg); +} - uint32_t fusedCloudSubnumber = mPubFusedCloud.getNumSubscribers(); +void ZEDWrapperNodelet::callback_pubFusedPointCloud(const ros::TimerEvent& e) +{ + sensor_msgs::PointCloud2Ptr pointcloudFusedMsg = boost::make_shared(); - if (fusedCloudSubnumber == 0) { - return; - } + uint32_t fusedCloudSubnumber = mPubFusedCloud.getNumSubscribers(); - std::lock_guard lock(mCloseZedMutex); + if (fusedCloudSubnumber == 0) + { + return; + } - if (!mZed.isOpened()) { - return; - } + std::lock_guard lock(mCloseZedMutex); - pointcloudFusedMsg->header.stamp = mFrameTimestamp; - mZed.requestSpatialMapAsync(); + if (!mZed.isOpened()) + { + return; + } - while (mZed.getSpatialMapRequestStatusAsync() == sl::ERROR_CODE::FAILURE) { - //Mesh is still generating - std::this_thread::sleep_for(std::chrono::milliseconds(1)); - } + // pointcloudFusedMsg->header.stamp = t; + mZed.requestSpatialMapAsync(); - sl::ERROR_CODE res = mZed.retrieveSpatialMapAsync(mFusedPC); + while (mZed.getSpatialMapRequestStatusAsync() == sl::ERROR_CODE::FAILURE) + { + // Mesh is still generating + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + } - if (res != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM("Fused point cloud not extracted: " << sl::toString(res).c_str()); - return; - } + sl::ERROR_CODE res = mZed.retrieveSpatialMapAsync(mFusedPC); - size_t ptsCount = mFusedPC.getNumberOfPoints(); - bool resized = false; + if (res != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Fused point cloud not extracted: " << sl::toString(res).c_str()); + return; + } - if (pointcloudFusedMsg->width != ptsCount || pointcloudFusedMsg->height != 1) { - // Initialize Point Cloud message - // https://github.com/ros/common_msgs/blob/jade-devel/sensor_msgs/include/sensor_msgs/point_cloud2_iterator.h - pointcloudFusedMsg->header.frame_id = mMapFrameId; // Set the header values of the ROS message - pointcloudFusedMsg->is_bigendian = false; - pointcloudFusedMsg->is_dense = false; - pointcloudFusedMsg->width = ptsCount; - pointcloudFusedMsg->height = 1; + size_t ptsCount = mFusedPC.getNumberOfPoints(); + bool resized = false; - sensor_msgs::PointCloud2Modifier modifier(*pointcloudFusedMsg); - modifier.setPointCloud2Fields(4, - "x", 1, sensor_msgs::PointField::FLOAT32, - "y", 1, sensor_msgs::PointField::FLOAT32, - "z", 1, sensor_msgs::PointField::FLOAT32, - "rgb", 1, sensor_msgs::PointField::FLOAT32); + if (pointcloudFusedMsg->width != ptsCount || pointcloudFusedMsg->height != 1) + { + // Initialize Point Cloud message + // https://github.com/ros/common_msgs/blob/jade-devel/sensor_msgs/include/sensor_msgs/point_cloud2_iterator.h + pointcloudFusedMsg->header.frame_id = mMapFrameId; // Set the header values of the ROS message + pointcloudFusedMsg->is_bigendian = false; + pointcloudFusedMsg->is_dense = false; + pointcloudFusedMsg->width = ptsCount; + pointcloudFusedMsg->height = 1; - resized = true; - } + sensor_msgs::PointCloud2Modifier modifier(*pointcloudFusedMsg); + modifier.setPointCloud2Fields(4, "x", 1, sensor_msgs::PointField::FLOAT32, "y", 1, sensor_msgs::PointField::FLOAT32, + "z", 1, sensor_msgs::PointField::FLOAT32, "rgb", 1, sensor_msgs::PointField::FLOAT32); - std::chrono::steady_clock::time_point start_time = std::chrono::steady_clock::now(); + resized = true; + } - //NODELET_INFO_STREAM("Chunks: " << mFusedPC.chunks.size()); + //std::chrono::steady_clock::time_point start_time = std::chrono::steady_clock::now(); - int index = 0; - float* ptCloudPtr = (float*)(&pointcloudFusedMsg->data[0]); - int updated = 0; + // NODELET_INFO_STREAM("Chunks: " << mFusedPC.chunks.size()); - for (int c = 0; c < mFusedPC.chunks.size(); c++) { - if (mFusedPC.chunks[c].has_been_updated || resized) { - updated++; + float* ptCloudPtr = (float*)(&pointcloudFusedMsg->data[0]); + int updated = 0; - size_t chunkSize = mFusedPC.chunks[c].vertices.size(); + for (int c = 0; c < mFusedPC.chunks.size(); c++) + { + if (mFusedPC.chunks[c].has_been_updated || resized) + { + updated++; - if (chunkSize > 0) { + size_t chunkSize = mFusedPC.chunks[c].vertices.size(); - float* cloud_pts = (float*)(mFusedPC.chunks[c].vertices.data()); + if (chunkSize > 0) + { + float* cloud_pts = (float*)(mFusedPC.chunks[c].vertices.data()); - memcpy(ptCloudPtr, cloud_pts, 4 * chunkSize * sizeof(float)); + memcpy(ptCloudPtr, cloud_pts, 4 * chunkSize * sizeof(float)); - ptCloudPtr += 4 * chunkSize; - } + ptCloudPtr += 4 * chunkSize; - } else { - index += mFusedPC.chunks[c].vertices.size(); - } + pointcloudFusedMsg->header.stamp = sl_tools::slTime2Ros(mFusedPC.chunks[c].timestamp); + } } + } - std::chrono::steady_clock::time_point end_time = std::chrono::steady_clock::now(); - - //NODELET_INFO_STREAM("Updated: " << updated); - + //std::chrono::steady_clock::time_point end_time = std::chrono::steady_clock::now(); - //double elapsed_usec = std::chrono::duration_cast(end_time - start_time).count(); + // NODELET_INFO_STREAM("Updated: " << updated); + // double elapsed_usec = std::chrono::duration_cast(end_time - start_time).count(); - // NODELET_INFO_STREAM("Data copy: " << elapsed_usec << " usec [" << ptsCount << "] - " << (static_cast - // (ptsCount) / elapsed_usec) << " pts/usec"); + // NODELET_INFO_STREAM("Data copy: " << elapsed_usec << " usec [" << ptsCount << "] - " << (static_cast + // (ptsCount) / elapsed_usec) << " pts/usec"); - // Pointcloud publishing - mPubFusedCloud.publish(pointcloudFusedMsg); + // Pointcloud publishing + mPubFusedCloud.publish(pointcloudFusedMsg); } -void ZEDWrapperNodelet::publishCamInfo(sensor_msgs::CameraInfoPtr camInfoMsg, - ros::Publisher pubCamInfo, ros::Time t) { - static int seq = 0; - camInfoMsg->header.stamp = t; - camInfoMsg->header.seq = seq; - pubCamInfo.publish(camInfoMsg); - seq++; +void ZEDWrapperNodelet::publishCamInfo(sensor_msgs::CameraInfoPtr camInfoMsg, ros::Publisher pubCamInfo, ros::Time t) +{ + static int seq = 0; + camInfoMsg->header.stamp = t; + camInfoMsg->header.seq = seq; + pubCamInfo.publish(camInfoMsg); + seq++; } void ZEDWrapperNodelet::fillCamInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr leftCamInfoMsg, - sensor_msgs::CameraInfoPtr rightCamInfoMsg, string leftFrameId, - string rightFrameId, bool rawParam /*= false*/) { - sl::CalibrationParameters zedParam; - -#if ZED_SDK_MAJOR_VERSION==3 && ZED_SDK_MINOR_VERSION<1 - if (rawParam) { - zedParam = zed.getCameraInformation(mMatResolVideo).calibration_parameters_raw; // ok - } else { - zedParam = zed.getCameraInformation(mMatResolVideo).calibration_parameters; // ok - } -#else - if (rawParam) { - zedParam = zed.getCameraInformation(mMatResolVideo).camera_configuration.calibration_parameters_raw; - } else { - zedParam = zed.getCameraInformation(mMatResolVideo).camera_configuration.calibration_parameters; - } -#endif - - float baseline = zedParam.getCameraBaseline(); - leftCamInfoMsg->distortion_model = - sensor_msgs::distortion_models::PLUMB_BOB; - rightCamInfoMsg->distortion_model = - sensor_msgs::distortion_models::PLUMB_BOB; - leftCamInfoMsg->D.resize(5); - rightCamInfoMsg->D.resize(5); - leftCamInfoMsg->D[0] = zedParam.left_cam.disto[0]; // k1 - leftCamInfoMsg->D[1] = zedParam.left_cam.disto[1]; // k2 - leftCamInfoMsg->D[2] = zedParam.left_cam.disto[4]; // k3 - leftCamInfoMsg->D[3] = zedParam.left_cam.disto[2]; // p1 - leftCamInfoMsg->D[4] = zedParam.left_cam.disto[3]; // p2 - rightCamInfoMsg->D[0] = zedParam.right_cam.disto[0]; // k1 - rightCamInfoMsg->D[1] = zedParam.right_cam.disto[1]; // k2 - rightCamInfoMsg->D[2] = zedParam.right_cam.disto[4]; // k3 - rightCamInfoMsg->D[3] = zedParam.right_cam.disto[2]; // p1 - rightCamInfoMsg->D[4] = zedParam.right_cam.disto[3]; // p2 - leftCamInfoMsg->K.fill(0.0); - rightCamInfoMsg->K.fill(0.0); - leftCamInfoMsg->K[0] = static_cast(zedParam.left_cam.fx); - leftCamInfoMsg->K[2] = static_cast(zedParam.left_cam.cx); - leftCamInfoMsg->K[4] = static_cast(zedParam.left_cam.fy); - leftCamInfoMsg->K[5] = static_cast(zedParam.left_cam.cy); - leftCamInfoMsg->K[8] = 1.0; - rightCamInfoMsg->K[0] = static_cast(zedParam.right_cam.fx); - rightCamInfoMsg->K[2] = static_cast(zedParam.right_cam.cx); - rightCamInfoMsg->K[4] = static_cast(zedParam.right_cam.fy); - rightCamInfoMsg->K[5] = static_cast(zedParam.right_cam.cy); - rightCamInfoMsg->K[8] = 1.0; - leftCamInfoMsg->R.fill(0.0); - rightCamInfoMsg->R.fill(0.0); - - for (size_t i = 0; i < 3; i++) { - // identity - rightCamInfoMsg->R[i + i * 3] = 1; - leftCamInfoMsg->R[i + i * 3] = 1; - } - -#if ZED_SDK_MAJOR_VERSION==3 && ZED_SDK_MINOR_VERSION<1 - if (rawParam) { - std::vector R_ = sl_tools::convertRodrigues(zedParam.R); - float* p = R_.data(); - - for (int i = 0; i < 9; i++) { - rightCamInfoMsg->R[i] = p[i]; - } - } -#else - if (rawParam) { - if(mUseOldExtrinsic) { // Camera frame (Z forward, Y down, X right) - std::vector R_ = sl_tools::convertRodrigues(zedParam.R); - float* p = R_.data(); - - for (int i = 0; i < 9; i++) { - rightCamInfoMsg->R[i] = p[i]; - } - } else { // ROS frame (X forward, Z up, Y left) - for (int i = 0; i < 9; i++) { - rightCamInfoMsg->R[i] = zedParam.stereo_transform.getRotationMatrix().r[i]; - } - } - } -#endif - - leftCamInfoMsg->P.fill(0.0); - rightCamInfoMsg->P.fill(0.0); - leftCamInfoMsg->P[0] = static_cast(zedParam.left_cam.fx); - leftCamInfoMsg->P[2] = static_cast(zedParam.left_cam.cx); - leftCamInfoMsg->P[5] = static_cast(zedParam.left_cam.fy); - leftCamInfoMsg->P[6] = static_cast(zedParam.left_cam.cy); - leftCamInfoMsg->P[10] = 1.0; - // http://docs.ros.org/api/sensor_msgs/html/msg/CameraInfo.html - rightCamInfoMsg->P[3] = static_cast(-1 * zedParam.left_cam.fx * baseline); - rightCamInfoMsg->P[0] = static_cast(zedParam.right_cam.fx); - rightCamInfoMsg->P[2] = static_cast(zedParam.right_cam.cx); - rightCamInfoMsg->P[5] = static_cast(zedParam.right_cam.fy); - rightCamInfoMsg->P[6] = static_cast(zedParam.right_cam.cy); - rightCamInfoMsg->P[10] = 1.0; - leftCamInfoMsg->width = rightCamInfoMsg->width = static_cast(mMatResolVideo.width); - leftCamInfoMsg->height = rightCamInfoMsg->height = static_cast(mMatResolVideo.height); - leftCamInfoMsg->header.frame_id = leftFrameId; - rightCamInfoMsg->header.frame_id = rightFrameId; + sensor_msgs::CameraInfoPtr rightCamInfoMsg, std::string leftFrameId, + std::string rightFrameId, bool rawParam /*= false*/) +{ + sl::CalibrationParameters zedParam; + + if (rawParam) + { + zedParam = zed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters_raw; + } + else + { + zedParam = zed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters; + } + + float baseline = zedParam.getCameraBaseline(); + leftCamInfoMsg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; + rightCamInfoMsg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; + leftCamInfoMsg->D.resize(5); + rightCamInfoMsg->D.resize(5); + leftCamInfoMsg->D[0] = zedParam.left_cam.disto[0]; // k1 + leftCamInfoMsg->D[1] = zedParam.left_cam.disto[1]; // k2 + leftCamInfoMsg->D[2] = zedParam.left_cam.disto[4]; // k3 + leftCamInfoMsg->D[3] = zedParam.left_cam.disto[2]; // p1 + leftCamInfoMsg->D[4] = zedParam.left_cam.disto[3]; // p2 + rightCamInfoMsg->D[0] = zedParam.right_cam.disto[0]; // k1 + rightCamInfoMsg->D[1] = zedParam.right_cam.disto[1]; // k2 + rightCamInfoMsg->D[2] = zedParam.right_cam.disto[4]; // k3 + rightCamInfoMsg->D[3] = zedParam.right_cam.disto[2]; // p1 + rightCamInfoMsg->D[4] = zedParam.right_cam.disto[3]; // p2 + leftCamInfoMsg->K.fill(0.0); + rightCamInfoMsg->K.fill(0.0); + leftCamInfoMsg->K[0] = static_cast(zedParam.left_cam.fx); + leftCamInfoMsg->K[2] = static_cast(zedParam.left_cam.cx); + leftCamInfoMsg->K[4] = static_cast(zedParam.left_cam.fy); + leftCamInfoMsg->K[5] = static_cast(zedParam.left_cam.cy); + leftCamInfoMsg->K[8] = 1.0; + rightCamInfoMsg->K[0] = static_cast(zedParam.right_cam.fx); + rightCamInfoMsg->K[2] = static_cast(zedParam.right_cam.cx); + rightCamInfoMsg->K[4] = static_cast(zedParam.right_cam.fy); + rightCamInfoMsg->K[5] = static_cast(zedParam.right_cam.cy); + rightCamInfoMsg->K[8] = 1.0; + leftCamInfoMsg->R.fill(0.0); + rightCamInfoMsg->R.fill(0.0); + + for (size_t i = 0; i < 3; i++) + { + // identity + rightCamInfoMsg->R[i + i * 3] = 1; + leftCamInfoMsg->R[i + i * 3] = 1; + } + + if (rawParam) + { + for (int i = 0; i < 9; i++) + { + rightCamInfoMsg->R[i] = zedParam.stereo_transform.getRotationMatrix().r[i]; + } + } + + leftCamInfoMsg->P.fill(0.0); + rightCamInfoMsg->P.fill(0.0); + leftCamInfoMsg->P[0] = static_cast(zedParam.left_cam.fx); + leftCamInfoMsg->P[2] = static_cast(zedParam.left_cam.cx); + leftCamInfoMsg->P[5] = static_cast(zedParam.left_cam.fy); + leftCamInfoMsg->P[6] = static_cast(zedParam.left_cam.cy); + leftCamInfoMsg->P[10] = 1.0; + // http://docs.ros.org/api/sensor_msgs/html/msg/CameraInfo.html + rightCamInfoMsg->P[3] = static_cast(-1 * zedParam.left_cam.fx * baseline); + rightCamInfoMsg->P[0] = static_cast(zedParam.right_cam.fx); + rightCamInfoMsg->P[2] = static_cast(zedParam.right_cam.cx); + rightCamInfoMsg->P[5] = static_cast(zedParam.right_cam.fy); + rightCamInfoMsg->P[6] = static_cast(zedParam.right_cam.cy); + rightCamInfoMsg->P[10] = 1.0; + leftCamInfoMsg->width = rightCamInfoMsg->width = static_cast(mMatResol.width); + leftCamInfoMsg->height = rightCamInfoMsg->height = static_cast(mMatResol.height); + leftCamInfoMsg->header.frame_id = leftFrameId; + rightCamInfoMsg->header.frame_id = rightFrameId; } void ZEDWrapperNodelet::fillCamDepthInfo(sl::Camera& zed, sensor_msgs::CameraInfoPtr depth_info_msg, - string frame_id ) { - sl::CalibrationParameters zedParam; - -#if ZED_SDK_MAJOR_VERSION==3 && ZED_SDK_MINOR_VERSION<1 - zedParam = zed.getCameraInformation(mMatResolDepth).calibration_parameters; -#else - zedParam = zed.getCameraInformation(mMatResolDepth).camera_configuration.calibration_parameters; -#endif - - float baseline = zedParam.getCameraBaseline(); - depth_info_msg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; - depth_info_msg->D.resize(5); - depth_info_msg->D[0] = zedParam.left_cam.disto[0]; // k1 - depth_info_msg->D[1] = zedParam.left_cam.disto[1]; // k2 - depth_info_msg->D[2] = zedParam.left_cam.disto[4]; // k3 - depth_info_msg->D[3] = zedParam.left_cam.disto[2]; // p1 - depth_info_msg->D[4] = zedParam.left_cam.disto[3]; // p2 - depth_info_msg->K.fill(0.0); - depth_info_msg->K[0] = static_cast(zedParam.left_cam.fx); - depth_info_msg->K[2] = static_cast(zedParam.left_cam.cx); - depth_info_msg->K[4] = static_cast(zedParam.left_cam.fy); - depth_info_msg->K[5] = static_cast(zedParam.left_cam.cy); - depth_info_msg->K[8] = 1.0; - depth_info_msg->R.fill(0.0); - - for (size_t i = 0; i < 3; i++) { - // identity - depth_info_msg->R[i + i * 3] = 1; - } - - depth_info_msg->P.fill(0.0); - depth_info_msg->P[0] = static_cast(zedParam.left_cam.fx); - depth_info_msg->P[2] = static_cast(zedParam.left_cam.cx); - depth_info_msg->P[5] = static_cast(zedParam.left_cam.fy); - depth_info_msg->P[6] = static_cast(zedParam.left_cam.cy); - depth_info_msg->P[10] = 1.0; - // http://docs.ros.org/api/sensor_msgs/html/msg/CameraInfo.html - depth_info_msg->width = static_cast(mMatResolDepth.width); - depth_info_msg->height = static_cast(mMatResolDepth.height); - depth_info_msg->header.frame_id = frame_id; + std::string frame_id) +{ + sl::CalibrationParameters zedParam; + + zedParam = zed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters; + + depth_info_msg->distortion_model = sensor_msgs::distortion_models::PLUMB_BOB; + depth_info_msg->D.resize(5); + depth_info_msg->D[0] = zedParam.left_cam.disto[0]; // k1 + depth_info_msg->D[1] = zedParam.left_cam.disto[1]; // k2 + depth_info_msg->D[2] = zedParam.left_cam.disto[4]; // k3 + depth_info_msg->D[3] = zedParam.left_cam.disto[2]; // p1 + depth_info_msg->D[4] = zedParam.left_cam.disto[3]; // p2 + depth_info_msg->K.fill(0.0); + depth_info_msg->K[0] = static_cast(zedParam.left_cam.fx); + depth_info_msg->K[2] = static_cast(zedParam.left_cam.cx); + depth_info_msg->K[4] = static_cast(zedParam.left_cam.fy); + depth_info_msg->K[5] = static_cast(zedParam.left_cam.cy); + depth_info_msg->K[8] = 1.0; + depth_info_msg->R.fill(0.0); + + for (size_t i = 0; i < 3; i++) + { + // identity + depth_info_msg->R[i + i * 3] = 1; + } + + depth_info_msg->P.fill(0.0); + depth_info_msg->P[0] = static_cast(zedParam.left_cam.fx); + depth_info_msg->P[2] = static_cast(zedParam.left_cam.cx); + depth_info_msg->P[5] = static_cast(zedParam.left_cam.fy); + depth_info_msg->P[6] = static_cast(zedParam.left_cam.cy); + depth_info_msg->P[10] = 1.0; + // http://docs.ros.org/api/sensor_msgs/html/msg/CameraInfo.html + depth_info_msg->width = static_cast(mMatResol.width); + depth_info_msg->height = static_cast(mMatResol.height); + depth_info_msg->header.frame_id = frame_id; } -void ZEDWrapperNodelet::updateDynamicReconfigure() { - //NODELET_DEBUG_STREAM( "updateDynamicReconfigure MUTEX LOCK"); - mDynParMutex.lock(); - zed_nodelets::ZedConfig config; - - config.auto_exposure_gain = mCamAutoExposure; - config.auto_whitebalance = mCamAutoWB; - config.brightness = mCamBrightness; - config.depth_confidence = mCamDepthConfidence; - config.depth_texture_conf = mCamDepthTextureConf; - config.contrast = mCamContrast; - config.exposure = mCamExposure; - config.gain = mCamGain; - config.hue = mCamHue; - config.saturation = mCamSaturation; - config.sharpness = mCamSharpness; - config.gamma = mCamGamma; - config.whitebalance_temperature = mCamWB/100; - config.point_cloud_freq = mPointCloudFreq; - config.pub_frame_rate = mVideoDepthFreq; - mDynParMutex.unlock(); - - mDynServerMutex.lock(); - mDynRecServer->updateConfig(config); - mDynServerMutex.unlock(); - - mUpdateDynParams = false; - - //NODELET_DEBUG_STREAM( "updateDynamicReconfigure MUTEX UNLOCK"); +void ZEDWrapperNodelet::updateDynamicReconfigure() +{ + // NODELET_DEBUG_STREAM( "updateDynamicReconfigure MUTEX LOCK"); + mDynParMutex.lock(); + zed_nodelets::ZedConfig config; + + config.auto_exposure_gain = mCamAutoExposure; + config.auto_whitebalance = mCamAutoWB; + config.brightness = mCamBrightness; + config.depth_confidence = mCamDepthConfidence; + config.depth_texture_conf = mCamDepthTextureConf; + config.contrast = mCamContrast; + config.exposure = mCamExposure; + config.gain = mCamGain; + config.hue = mCamHue; + config.saturation = mCamSaturation; + config.sharpness = mCamSharpness; + config.gamma = mCamGamma; + config.whitebalance_temperature = mCamWB / 100; + config.point_cloud_freq = mPointCloudFreq; + mDynParMutex.unlock(); + + mDynServerMutex.lock(); + mDynRecServer->updateConfig(config); + mDynServerMutex.unlock(); + + mUpdateDynParams = false; + + // NODELET_DEBUG_STREAM( "updateDynamicReconfigure MUTEX UNLOCK"); } -void ZEDWrapperNodelet::dynamicReconfCallback(zed_nodelets::ZedConfig& config, uint32_t level) { - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX LOCK"); - mDynParMutex.lock(); - DynParams param = static_cast(level); - - switch (param) { - case DATAPUB_FREQ: - if(config.pub_frame_rate>mCamFrameRate) { - mVideoDepthFreq = mCamFrameRate; - NODELET_WARN_STREAM( "'pub_frame_rate' cannot be major than camera grabbing framerate. Set to " << mVideoDepthFreq ); - - mUpdateDynParams = true; - } else { - mVideoDepthFreq = config.pub_frame_rate; - NODELET_INFO("Reconfigure Video and Depth pub. frequency: %g", mVideoDepthFreq); - } - - mVideoDepthTimer.setPeriod( ros::Duration(1.0 / mVideoDepthFreq) ); - - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; +void ZEDWrapperNodelet::callback_dynamicReconf(zed_nodelets::ZedConfig& config, uint32_t level) +{ + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX LOCK"); + mDynParMutex.lock(); + DynParams param = static_cast(level); + switch (param) + { case CONFIDENCE: - mCamDepthConfidence = config.depth_confidence; - NODELET_INFO("Reconfigure confidence threshold: %d", mCamDepthConfidence); - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamDepthConfidence = config.depth_confidence; + NODELET_INFO("Reconfigure confidence threshold: %d", mCamDepthConfidence); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case TEXTURE_CONF: - mCamDepthTextureConf = config.depth_texture_conf; - NODELET_INFO("Reconfigure texture confidence threshold: %d", mCamDepthTextureConf); - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamDepthTextureConf = config.depth_texture_conf; + NODELET_INFO("Reconfigure texture confidence threshold: %d", mCamDepthTextureConf); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case POINTCLOUD_FREQ: - if(config.point_cloud_freq>mCamFrameRate) { - mPointCloudFreq = mCamFrameRate; - NODELET_WARN_STREAM( "'point_cloud_freq' cannot be major than camera grabbing framerate. Set to " << mPointCloudFreq ); - - mUpdateDynParams = true; - } else { - mPointCloudFreq = config.point_cloud_freq; - NODELET_INFO("Reconfigure point cloud pub. frequency: %g", mPointCloudFreq); - } - - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + if (config.point_cloud_freq > mPubFrameRate) + { + mPointCloudFreq = mPubFrameRate; + NODELET_WARN_STREAM("'point_cloud_freq' cannot be major than Video/Depth publishing framerate. Set to " + << mPointCloudFreq); + + mUpdateDynParams = true; + } + else + { + mPointCloudFreq = config.point_cloud_freq; + NODELET_INFO("Reconfigure point cloud pub. frequency: %g", mPointCloudFreq); + } + + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case BRIGHTNESS: - mCamBrightness = config.brightness; - NODELET_INFO("Reconfigure image brightness: %d", mCamBrightness); - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamBrightness = config.brightness; + NODELET_INFO("Reconfigure image brightness: %d", mCamBrightness); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case CONTRAST: - mCamContrast = config.contrast; - NODELET_INFO("Reconfigure image contrast: %d", mCamContrast); - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamContrast = config.contrast; + NODELET_INFO("Reconfigure image contrast: %d", mCamContrast); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case HUE: - mCamHue = config.hue; - NODELET_INFO("Reconfigure image hue: %d", mCamHue); - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamHue = config.hue; + NODELET_INFO("Reconfigure image hue: %d", mCamHue); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case SATURATION: - mCamSaturation = config.saturation; - NODELET_INFO("Reconfigure image saturation: %d", mCamSaturation); - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamSaturation = config.saturation; + NODELET_INFO("Reconfigure image saturation: %d", mCamSaturation); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case SHARPNESS: - mCamSharpness = config.sharpness; - NODELET_INFO("Reconfigure image sharpness: %d", mCamSharpness); - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamSharpness = config.sharpness; + NODELET_INFO("Reconfigure image sharpness: %d", mCamSharpness); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case GAMMA: -#if (ZED_SDK_MAJOR_VERSION==3 && ZED_SDK_MINOR_VERSION>=1) - mCamGamma = config.gamma; - NODELET_INFO("Reconfigure image gamma: %d", mCamGamma); - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); -#else - NODELET_DEBUG_STREAM( "Gamma Control is not available for SDK older that v3.1"); - mDynParMutex.unlock(); -#endif - break; + mCamGamma = config.gamma; + NODELET_INFO("Reconfigure image gamma: %d", mCamGamma); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case AUTO_EXP_GAIN: + if (config.auto_exposure_gain != mCamAutoExposure) + { mCamAutoExposure = config.auto_exposure_gain; - NODELET_INFO_STREAM("Reconfigure auto exposure/gain: " << mCamAutoExposure?"ENABLED":"DISABLED"); - if( !mCamAutoExposure ) { - mZed.setCameraSettings(sl::VIDEO_SETTINGS::AEC_AGC, 0 ); - mTriggerAutoExposure = false; - } else { - mTriggerAutoExposure = true; - } - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + NODELET_INFO_STREAM("Reconfigure auto exposure/gain: " << (mCamAutoExposure ? "ENABLED" : "DISABLED")); + } + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case GAIN: - mCamGain = config.gain; - if(mCamAutoExposure) { - NODELET_WARN("Reconfigure gain has no effect if 'auto_exposure_gain' is enabled"); - } else { - NODELET_INFO("Reconfigure gain: %d", mCamGain); - } - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamGain = config.gain; + if (mCamAutoExposure) + { + NODELET_WARN("Reconfigure gain has no effect if 'auto_exposure_gain' is enabled"); + } + else + { + NODELET_INFO("Reconfigure gain: %d", mCamGain); + } + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case EXPOSURE: - mCamExposure = config.exposure; - if(mCamAutoExposure) { - NODELET_WARN("Reconfigure exposure has no effect if 'auto_exposure_gain' is enabled"); - } else { - NODELET_INFO("Reconfigure exposure: %d", mCamExposure); - } - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamExposure = config.exposure; + if (mCamAutoExposure) + { + NODELET_WARN("Reconfigure exposure has no effect if 'auto_exposure_gain' is enabled"); + } + else + { + NODELET_INFO("Reconfigure exposure: %d", mCamExposure); + } + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case AUTO_WB: + if (config.auto_whitebalance != mCamAutoWB) + { mCamAutoWB = config.auto_whitebalance; - NODELET_INFO_STREAM("Reconfigure auto white balance: " << mCamAutoWB?"ENABLED":"DISABLED"); - if( !mCamAutoWB ) { - mZed.setCameraSettings(sl::VIDEO_SETTINGS::WHITEBALANCE_AUTO, 0 ); - mTriggerAutoWB = false; - } else { - mTriggerAutoWB = true; - } - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + NODELET_INFO_STREAM("Reconfigure auto white balance: " << (mCamAutoWB ? "ENABLED" : "DISABLED")); + mTriggerAutoWB = true; + } + else + { + mTriggerAutoWB = false; + } + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; case WB_TEMP: - mCamWB = config.whitebalance_temperature*100; - if(mCamAutoWB) { - NODELET_WARN("Reconfigure white balance temperature has no effect if 'auto_whitebalance' is enabled"); - } else { - NODELET_INFO("Reconfigure white balance temperature: %d", mCamWB); - } - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - break; + mCamWB = config.whitebalance_temperature * 100; + if (mCamAutoWB) + { + NODELET_WARN("Reconfigure white balance temperature has no effect if 'auto_whitebalance' is enabled"); + } + else + { + NODELET_INFO("Reconfigure white balance temperature: %d", mCamWB); + } + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + break; default: - NODELET_DEBUG_STREAM( "dynamicReconfCallback Unknown param: " << level); - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); - } -} - -void ZEDWrapperNodelet::pubVideoDepthCallback(const ros::TimerEvent& e) { - static sl::Timestamp lastTs = 0; - - uint32_t rgbSubnumber = mPubRgb.getNumSubscribers(); - uint32_t rgbRawSubnumber = mPubRawRgb.getNumSubscribers(); - uint32_t leftSubnumber = mPubLeft.getNumSubscribers(); - uint32_t leftRawSubnumber = mPubRawLeft.getNumSubscribers(); - uint32_t rightSubnumber = mPubRight.getNumSubscribers(); - uint32_t rightRawSubnumber = mPubRawRight.getNumSubscribers(); - uint32_t rgbGraySubnumber = mPubRgbGray.getNumSubscribers(); - uint32_t rgbGrayRawSubnumber = mPubRawRgbGray.getNumSubscribers(); - uint32_t leftGraySubnumber = mPubLeftGray.getNumSubscribers(); - uint32_t leftGrayRawSubnumber = mPubRawLeftGray.getNumSubscribers(); - uint32_t rightGraySubnumber = mPubRightGray.getNumSubscribers(); - uint32_t rightGrayRawSubnumber = mPubRawRightGray.getNumSubscribers(); - uint32_t depthSubnumber = mPubDepth.getNumSubscribers(); - uint32_t disparitySubnumber = mPubDisparity.getNumSubscribers(); - uint32_t confMapSubnumber = mPubConfMap.getNumSubscribers(); - uint32_t stereoSubNumber = mPubStereo.getNumSubscribers(); - uint32_t stereoRawSubNumber = mPubRawStereo.getNumSubscribers(); - - if(rgbSubnumber+rgbRawSubnumber+ - leftSubnumber+leftRawSubnumber+ - rightSubnumber+rightRawSubnumber+ - rgbGraySubnumber+rgbGrayRawSubnumber+ - leftGraySubnumber+leftGrayRawSubnumber+ - rightGraySubnumber+rightGrayRawSubnumber+ - depthSubnumber+disparitySubnumber+confMapSubnumber+ - stereoSubNumber+stereoRawSubNumber == 0) { - - mPublishingData = false; - lastTs = 0; - return; - } - - mPublishingData = true; - - sl::Mat leftZEDMat, rightZEDMat, leftGrayZEDMat, rightGrayZEDMat, - depthZEDMat, disparityZEDMat, confMapZEDMat; - - - - // Retrieve RGBA Left image and use Timestamp to check if image is new - mZed.retrieveImage(leftZEDMat, sl::VIEW::LEFT, sl::MEM::CPU, mMatResolVideo); - if(leftZEDMat.timestamp==lastTs) { - return; // No new image! - } - if(lastTs.data_ns!=0) { - double period_sec = static_cast(leftZEDMat.timestamp.data_ns - lastTs.data_ns)/1e9; - //NODELET_DEBUG_STREAM( "PUBLISHING PERIOD: " << period_sec << " sec @" << 1./period_sec << " Hz") ; - - mVideoDepthPeriodMean_sec->addValue(period_sec); - //NODELET_DEBUG_STREAM( "MEAN PUBLISHING PERIOD: " << mVideoDepthPeriodMean_sec->getMean() << " sec @" << 1./mVideoDepthPeriodMean_sec->getMean() << " Hz") ; - } - lastTs = leftZEDMat.timestamp; - - // Publish the left == rgb image if someone has subscribed to - if (leftSubnumber > 0 || rgbSubnumber > 0) { - if (leftSubnumber > 0) { - sensor_msgs::ImagePtr leftImgMsg = boost::make_shared(); - - publishImage(leftImgMsg, leftZEDMat, mPubLeft, mLeftCamInfoMsg, mLeftCamOptFrameId, mFrameTimestamp); - } - - if (rgbSubnumber > 0) { - sensor_msgs::ImagePtr rgbImgMsg = boost::make_shared(); - - // rgb is the left image - publishImage(rgbImgMsg, leftZEDMat, mPubRgb, mRgbCamInfoMsg, mDepthOptFrameId, mFrameTimestamp); - - } - } - - // Publish the left == rgb GRAY image if someone has subscribed to - if (leftGraySubnumber > 0 || rgbGraySubnumber > 0) { - - // Retrieve RGBA Left image - mZed.retrieveImage(leftGrayZEDMat, sl::VIEW::LEFT_GRAY, sl::MEM::CPU, mMatResolVideo); - - if (leftGraySubnumber > 0) { - sensor_msgs::ImagePtr leftGrayImgMsg = boost::make_shared(); - - publishImage(leftGrayImgMsg, leftGrayZEDMat, mPubLeftGray, mLeftCamInfoMsg, mLeftCamOptFrameId, - mFrameTimestamp); - } - - if (rgbGraySubnumber > 0) { - sensor_msgs::ImagePtr rgbGrayImgMsg = boost::make_shared(); - - publishImage(rgbGrayImgMsg, leftGrayZEDMat, mPubRgbGray, mRgbCamInfoMsg, mDepthOptFrameId, - mFrameTimestamp); // rgb is the left image - } - } - - // Publish the left_raw == rgb_raw image if someone has subscribed to - if (leftRawSubnumber > 0 || rgbRawSubnumber > 0) { - - // Retrieve RGBA Left image - mZed.retrieveImage(leftZEDMat, sl::VIEW::LEFT_UNRECTIFIED, sl::MEM::CPU, mMatResolVideo); - - if (leftRawSubnumber > 0) { - sensor_msgs::ImagePtr rawLeftImgMsg = boost::make_shared(); - - publishImage(rawLeftImgMsg, leftZEDMat, mPubRawLeft, mLeftCamInfoRawMsg, mLeftCamOptFrameId, - mFrameTimestamp); - } - - if (rgbRawSubnumber > 0) { - sensor_msgs::ImagePtr rawRgbImgMsg = boost::make_shared(); - - publishImage(rawRgbImgMsg, leftZEDMat, mPubRawRgb, mRgbCamInfoRawMsg, mDepthOptFrameId, - mFrameTimestamp); - } - } - - // Publish the left_raw == rgb_raw GRAY image if someone has subscribed to - if (leftGrayRawSubnumber > 0 || rgbGrayRawSubnumber > 0) { - - // Retrieve RGBA Left image - mZed.retrieveImage(leftGrayZEDMat, sl::VIEW::LEFT_UNRECTIFIED_GRAY, sl::MEM::CPU, mMatResolVideo); - - if (leftGrayRawSubnumber > 0) { - sensor_msgs::ImagePtr rawLeftGrayImgMsg = boost::make_shared(); - - publishImage(rawLeftGrayImgMsg, leftGrayZEDMat, mPubRawLeftGray, mLeftCamInfoRawMsg, mLeftCamOptFrameId, - mFrameTimestamp); - } - - if (rgbGrayRawSubnumber > 0) { - sensor_msgs::ImagePtr rawRgbGrayImgMsg = boost::make_shared(); - - publishImage(rawRgbGrayImgMsg, leftGrayZEDMat, mPubRawRgbGray, mRgbCamInfoRawMsg, mDepthOptFrameId, - mFrameTimestamp); - } - } - - // Publish the right image if someone has subscribed to - if (rightSubnumber > 0) { - - // Retrieve RGBA Right image - mZed.retrieveImage(rightZEDMat, sl::VIEW::RIGHT, sl::MEM::CPU, mMatResolVideo); - - sensor_msgs::ImagePtr rightImgMsg = boost::make_shared(); - - publishImage(rightImgMsg, rightZEDMat, mPubRight, mRightCamInfoMsg, mRightCamOptFrameId, mFrameTimestamp); - } - - // Publish the right image GRAY if someone has subscribed to - if (rightGraySubnumber > 0) { - - // Retrieve RGBA Right image - mZed.retrieveImage(rightGrayZEDMat, sl::VIEW::RIGHT_GRAY, sl::MEM::CPU, mMatResolVideo); - sensor_msgs::ImagePtr rightGrayImgMsg = boost::make_shared(); - - publishImage(rightGrayImgMsg, rightGrayZEDMat, mPubRightGray, mRightCamInfoMsg, mRightCamOptFrameId, mFrameTimestamp); - } - - // Publish the right raw image if someone has subscribed to - if (rightRawSubnumber > 0) { - - // Retrieve RGBA Right image - mZed.retrieveImage(rightZEDMat, sl::VIEW::RIGHT_UNRECTIFIED, sl::MEM::CPU, mMatResolVideo); - - sensor_msgs::ImagePtr rawRightImgMsg = boost::make_shared(); - - publishImage(rawRightImgMsg, rightZEDMat, mPubRawRight, mRightCamInfoRawMsg, mRightCamOptFrameId, mFrameTimestamp); - } - - // Publish the right raw image GRAY if someone has subscribed to - if (rightGrayRawSubnumber > 0) { - // Retrieve RGBA Right image - mZed.retrieveImage(rightGrayZEDMat, sl::VIEW::RIGHT_UNRECTIFIED_GRAY, sl::MEM::CPU, mMatResolVideo); - - sensor_msgs::ImagePtr rawRightGrayImgMsg = boost::make_shared(); - - publishImage(rawRightGrayImgMsg, rightGrayZEDMat, mPubRawRightGray, mRightCamInfoRawMsg, mRightCamOptFrameId, mFrameTimestamp); - } - - // Stereo couple side-by-side - if (stereoSubNumber > 0) { - - // Retrieve RGBA Right image - mZed.retrieveImage(rightZEDMat, sl::VIEW::RIGHT, sl::MEM::CPU, mMatResolVideo); - mZed.retrieveImage(leftZEDMat, sl::VIEW::LEFT, sl::MEM::CPU, mMatResolVideo); - - sensor_msgs::ImagePtr stereoImgMsg = boost::make_shared(); - - sl_tools::imagesToROSmsg(stereoImgMsg, leftZEDMat, rightZEDMat, mCameraFrameId, mFrameTimestamp); - - mPubStereo.publish(stereoImgMsg); - } - - // Stereo RAW couple side-by-side - if (stereoRawSubNumber > 0) { - - // Retrieve RGBA Right image - mZed.retrieveImage(rightZEDMat, sl::VIEW::RIGHT_UNRECTIFIED, sl::MEM::CPU, mMatResolVideo); - mZed.retrieveImage(leftZEDMat, sl::VIEW::LEFT_UNRECTIFIED, sl::MEM::CPU, mMatResolVideo); - - sensor_msgs::ImagePtr rawStereoImgMsg = boost::make_shared(); - - sl_tools::imagesToROSmsg(rawStereoImgMsg, leftZEDMat, rightZEDMat, mCameraFrameId, mFrameTimestamp); - - mPubRawStereo.publish(rawStereoImgMsg); - } - - // Publish the depth image if someone has subscribed to - if (depthSubnumber > 0 || disparitySubnumber > 0) { - - mZed.retrieveMeasure(depthZEDMat, sl::MEASURE::DEPTH, sl::MEM::CPU, mMatResolDepth); - - sensor_msgs::ImagePtr depthImgMsg = boost::make_shared(); - - publishDepth(depthImgMsg, depthZEDMat, mFrameTimestamp); // in meters - } - - // Publish the disparity image if someone has subscribed to - if (disparitySubnumber > 0) { - - mZed.retrieveMeasure(disparityZEDMat, sl::MEASURE::DISPARITY, sl::MEM::CPU, mMatResolDepth); - publishDisparity(disparityZEDMat, mFrameTimestamp); - } - - // Publish the confidence map if someone has subscribed to - if (confMapSubnumber > 0) { - - mZed.retrieveMeasure(confMapZEDMat, sl::MEASURE::CONFIDENCE, sl::MEM::CPU, mMatResolDepth); - - sensor_msgs::ImagePtr confMapMsg = boost::make_shared(); - - sl_tools::imageToROSmsg(confMapMsg, confMapZEDMat, mConfidenceOptFrameId, mFrameTimestamp); - - mPubConfMap.publish(confMapMsg); - } + NODELET_DEBUG_STREAM("dynamicReconfCallback Unknown param: " << level); + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "dynamicReconfCallback MUTEX UNLOCK"); + } } -void ZEDWrapperNodelet::pubPathCallback(const ros::TimerEvent& e) { - uint32_t mapPathSub = mPubMapPath.getNumSubscribers(); - uint32_t odomPathSub = mPubOdomPath.getNumSubscribers(); - - geometry_msgs::PoseStamped odomPose; - geometry_msgs::PoseStamped mapPose; - - odomPose.header.stamp = mFrameTimestamp; - odomPose.header.frame_id = mMapFrameId; // frame - // conversion from Tranform to message - geometry_msgs::Transform base2odom = tf2::toMsg(mOdom2BaseTransf); - // Add all value in Pose message - odomPose.pose.position.x = base2odom.translation.x; - odomPose.pose.position.y = base2odom.translation.y; - odomPose.pose.position.z = base2odom.translation.z; - odomPose.pose.orientation.x = base2odom.rotation.x; - odomPose.pose.orientation.y = base2odom.rotation.y; - odomPose.pose.orientation.z = base2odom.rotation.z; - odomPose.pose.orientation.w = base2odom.rotation.w; - - mapPose.header.stamp = mFrameTimestamp; - mapPose.header.frame_id = mMapFrameId; // frame - // conversion from Tranform to message - geometry_msgs::Transform base2map = tf2::toMsg(mMap2BaseTransf); - // Add all value in Pose message - mapPose.pose.position.x = base2map.translation.x; - mapPose.pose.position.y = base2map.translation.y; - mapPose.pose.position.z = base2map.translation.z; - mapPose.pose.orientation.x = base2map.rotation.x; - mapPose.pose.orientation.y = base2map.rotation.y; - mapPose.pose.orientation.z = base2map.rotation.z; - mapPose.pose.orientation.w = base2map.rotation.w; - - // Circular vector - if (mPathMaxCount != -1) { - if (mOdomPath.size() == mPathMaxCount) { - NODELET_DEBUG("Path vectors full: rotating "); - std::rotate(mOdomPath.begin(), mOdomPath.begin() + 1, mOdomPath.end()); - std::rotate(mMapPath.begin(), mMapPath.begin() + 1, mMapPath.end()); - - mMapPath[mPathMaxCount - 1] = mapPose; - mOdomPath[mPathMaxCount - 1] = odomPose; - } else { - //NODELET_DEBUG_STREAM("Path vectors adding last available poses"); - mMapPath.push_back(mapPose); - mOdomPath.push_back(odomPose); - } - } else { - //NODELET_DEBUG_STREAM("No limit path vectors, adding last available poses"); - mMapPath.push_back(mapPose); - mOdomPath.push_back(odomPose); - } - - if (mapPathSub > 0) { - nav_msgs::PathPtr mapPath = boost::make_shared(); - mapPath->header.frame_id = mMapFrameId; - mapPath->header.stamp = mFrameTimestamp; - mapPath->poses = mMapPath; - - mPubMapPath.publish(mapPath); - } - - if (odomPathSub > 0) { - nav_msgs::PathPtr odomPath = boost::make_shared(); - odomPath->header.frame_id = mMapFrameId; - odomPath->header.stamp = mFrameTimestamp; - odomPath->poses = mOdomPath; - - mPubOdomPath.publish(odomPath); - } +void ZEDWrapperNodelet::pubVideoDepth() +{ + static sl::Timestamp lastZedTs = 0; // Used to calculate stable publish frequency + + uint32_t rgbSubnumber = mPubRgb.getNumSubscribers(); + uint32_t rgbRawSubnumber = mPubRawRgb.getNumSubscribers(); + uint32_t leftSubnumber = mPubLeft.getNumSubscribers(); + uint32_t leftRawSubnumber = mPubRawLeft.getNumSubscribers(); + uint32_t rightSubnumber = mPubRight.getNumSubscribers(); + uint32_t rightRawSubnumber = mPubRawRight.getNumSubscribers(); + uint32_t rgbGraySubnumber = mPubRgbGray.getNumSubscribers(); + uint32_t rgbGrayRawSubnumber = mPubRawRgbGray.getNumSubscribers(); + uint32_t leftGraySubnumber = mPubLeftGray.getNumSubscribers(); + uint32_t leftGrayRawSubnumber = mPubRawLeftGray.getNumSubscribers(); + uint32_t rightGraySubnumber = mPubRightGray.getNumSubscribers(); + uint32_t rightGrayRawSubnumber = mPubRawRightGray.getNumSubscribers(); + uint32_t stereoSubNumber = mPubStereo.getNumSubscribers(); + uint32_t stereoRawSubNumber = mPubRawStereo.getNumSubscribers(); + uint32_t depthSubnumber = 0; + uint32_t disparitySubnumber = 0; + uint32_t confMapSubnumber = 0; + if (!mDepthDisabled) + { + depthSubnumber = mPubDepth.getNumSubscribers(); + disparitySubnumber = mPubDisparity.getNumSubscribers(); + confMapSubnumber = mPubConfMap.getNumSubscribers(); + } + + uint32_t tot_sub = rgbSubnumber + rgbRawSubnumber + leftSubnumber + leftRawSubnumber + rightSubnumber + + rightRawSubnumber + rgbGraySubnumber + rgbGrayRawSubnumber + leftGraySubnumber + + leftGrayRawSubnumber + rightGraySubnumber + rightGrayRawSubnumber + depthSubnumber + + disparitySubnumber + confMapSubnumber + stereoSubNumber + stereoRawSubNumber; + + bool retrieved = false; + + sl::Mat mat_left, mat_left_raw; + sl::Mat mat_right, mat_right_raw; + sl::Mat mat_left_gray, mat_left_raw_gray; + sl::Mat mat_right_gray, mat_right_raw_gray; + sl::Mat mat_depth, mat_disp, mat_conf; + + sl::Timestamp ts_rgb = 0; // used to check RGB/Depth sync + sl::Timestamp ts_depth; // used to check RGB/Depth sync + sl::Timestamp grab_ts = 0; + + // ----> Retrieve all required image data + if (rgbSubnumber + leftSubnumber + stereoSubNumber > 0) + { + mZed.retrieveImage(mat_left, sl::VIEW::LEFT, sl::MEM::CPU, mMatResol); + retrieved = true; + ts_rgb = mat_left.timestamp; + grab_ts = mat_left.timestamp; + } + if (rgbRawSubnumber + leftRawSubnumber + stereoRawSubNumber > 0) + { + mZed.retrieveImage(mat_left_raw, sl::VIEW::LEFT_UNRECTIFIED, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_left_raw.timestamp; + } + if (rightSubnumber + stereoSubNumber > 0) + { + mZed.retrieveImage(mat_right, sl::VIEW::RIGHT, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_right.timestamp; + } + if (rightRawSubnumber + stereoRawSubNumber > 0) + { + mZed.retrieveImage(mat_right_raw, sl::VIEW::RIGHT_UNRECTIFIED, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_right_raw.timestamp; + } + if (rgbGraySubnumber + leftGraySubnumber > 0) + { + mZed.retrieveImage(mat_left_gray, sl::VIEW::LEFT_GRAY, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_left_gray.timestamp; + } + if (rgbGrayRawSubnumber + leftGrayRawSubnumber > 0) + { + mZed.retrieveImage(mat_left_raw_gray, sl::VIEW::LEFT_UNRECTIFIED_GRAY, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_left_raw_gray.timestamp; + } + if (rightGraySubnumber > 0) + { + mZed.retrieveImage(mat_right_gray, sl::VIEW::RIGHT_GRAY, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_right_gray.timestamp; + } + if (rightGrayRawSubnumber > 0) + { + mZed.retrieveImage(mat_right_raw_gray, sl::VIEW::RIGHT_UNRECTIFIED_GRAY, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_right_raw_gray.timestamp; + } + if (depthSubnumber > 0) + { + if (!mOpenniDepthMode) + { + mZed.retrieveMeasure(mat_depth, sl::MEASURE::DEPTH, sl::MEM::CPU, mMatResol); + } + else + { + mZed.retrieveMeasure(mat_depth, sl::MEASURE::DEPTH_U16_MM, sl::MEM::CPU, mMatResol); + } + retrieved = true; + grab_ts = mat_depth.timestamp; + + ts_depth = mat_depth.timestamp; + + if (ts_rgb.data_ns != 0 && (ts_depth.data_ns != ts_rgb.data_ns)) + { + NODELET_WARN_STREAM("!!!!! DEPTH/RGB ASYNC !!!!! - Delta: " << 1e-9 * static_cast(ts_depth - ts_rgb) + << " sec"); + } + } + if (disparitySubnumber > 0) + { + mZed.retrieveMeasure(mat_disp, sl::MEASURE::DISPARITY, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_disp.timestamp; + } + if (confMapSubnumber > 0) + { + mZed.retrieveMeasure(mat_conf, sl::MEASURE::CONFIDENCE, sl::MEM::CPU, mMatResol); + retrieved = true; + grab_ts = mat_conf.timestamp; + } + // <---- Retrieve all required image data + + // ----> Data ROS timestamp + ros::Time stamp = sl_tools::slTime2Ros(grab_ts); + if (mSvoMode) + { + stamp = ros::Time::now(); + } + // <---- Data ROS timestamp + + // ----> Publish sensor data if sync is required by user or SVO + if (mZedRealCamModel != sl::MODEL::ZED) + { + if (mSensTimestampSync) + { + // NODELET_INFO_STREAM("tot_sub: " << tot_sub << " - retrieved: " << retrieved << " - + // grab_ts.data_ns!=lastZedTs.data_ns: " << (grab_ts.data_ns!=lastZedTs.data_ns)); + if (tot_sub > 0 && retrieved && (grab_ts.data_ns != lastZedTs.data_ns)) + { + // NODELET_INFO("CALLBACK"); + publishSensData(stamp); + } + } + else if (mSvoMode) + { + publishSensData(stamp); + } + } + // <---- Publish sensor data if sync is required by user or SVO + + if (!retrieved) + { + mPublishingData = false; + lastZedTs = 0; + return; + } + mPublishingData = true; + + // ----> Check if a grab has been done before publishing the same images + if (grab_ts.data_ns == lastZedTs.data_ns) + { + // Data not updated by a grab calling in the grab thread + return; + } + if (lastZedTs.data_ns != 0) + { + double period_sec = static_cast(grab_ts.data_ns - lastZedTs.data_ns) / 1e9; + // NODELET_DEBUG_STREAM( "PUBLISHING PERIOD: " << period_sec << " sec @" << 1./period_sec << " Hz") ; + + mVideoDepthPeriodMean_sec->addValue(period_sec); + // NODELET_DEBUG_STREAM( "MEAN PUBLISHING PERIOD: " << mVideoDepthPeriodMean_sec->getMean() << " sec @" + // << 1./mVideoDepthPeriodMean_sec->getMean() << " Hz") ; + } + lastZedTs = grab_ts; + // <---- Check if a grab has been done before publishing the same images + + // Publish the left = rgb image if someone has subscribed to + if (leftSubnumber > 0) + { + sensor_msgs::ImagePtr leftImgMsg = boost::make_shared(); + publishImage(leftImgMsg, mat_left, mPubLeft, mLeftCamInfoMsg, mLeftCamOptFrameId, stamp); + } + if (rgbSubnumber > 0) + { + sensor_msgs::ImagePtr rgbImgMsg = boost::make_shared(); + publishImage(rgbImgMsg, mat_left, mPubRgb, mRgbCamInfoMsg, mDepthOptFrameId, stamp); + } + + // Publish the left = rgb GRAY image if someone has subscribed to + if (leftGraySubnumber > 0) + { + sensor_msgs::ImagePtr leftGrayImgMsg = boost::make_shared(); + publishImage(leftGrayImgMsg, mat_left_gray, mPubLeftGray, mLeftCamInfoMsg, mLeftCamOptFrameId, stamp); + } + if (rgbGraySubnumber > 0) + { + sensor_msgs::ImagePtr rgbGrayImgMsg = boost::make_shared(); + publishImage(rgbGrayImgMsg, mat_left_gray, mPubRgbGray, mRgbCamInfoMsg, mDepthOptFrameId, stamp); + } + + // Publish the left_raw = rgb_raw image if someone has subscribed to + if (leftRawSubnumber > 0) + { + sensor_msgs::ImagePtr rawLeftImgMsg = boost::make_shared(); + publishImage(rawLeftImgMsg, mat_left_raw, mPubRawLeft, mLeftCamInfoRawMsg, mLeftCamOptFrameId, stamp); + } + if (rgbRawSubnumber > 0) + { + sensor_msgs::ImagePtr rawRgbImgMsg = boost::make_shared(); + publishImage(rawRgbImgMsg, mat_left_raw, mPubRawRgb, mRgbCamInfoRawMsg, mDepthOptFrameId, stamp); + } + + // Publish the left_raw == rgb_raw GRAY image if someone has subscribed to + if (leftGrayRawSubnumber > 0) + { + sensor_msgs::ImagePtr rawLeftGrayImgMsg = boost::make_shared(); + + publishImage(rawLeftGrayImgMsg, mat_left_raw_gray, mPubRawLeftGray, mLeftCamInfoRawMsg, mLeftCamOptFrameId, stamp); + } + if (rgbGrayRawSubnumber > 0) + { + sensor_msgs::ImagePtr rawRgbGrayImgMsg = boost::make_shared(); + publishImage(rawRgbGrayImgMsg, mat_left_raw_gray, mPubRawRgbGray, mRgbCamInfoRawMsg, mDepthOptFrameId, stamp); + } + + // Publish the right image if someone has subscribed to + if (rightSubnumber > 0) + { + sensor_msgs::ImagePtr rightImgMsg = boost::make_shared(); + publishImage(rightImgMsg, mat_right, mPubRight, mRightCamInfoMsg, mRightCamOptFrameId, stamp); + } + + // Publish the right image GRAY if someone has subscribed to + if (rightGraySubnumber > 0) + { + sensor_msgs::ImagePtr rightGrayImgMsg = boost::make_shared(); + publishImage(rightGrayImgMsg, mat_right_gray, mPubRightGray, mRightCamInfoMsg, mRightCamOptFrameId, stamp); + } + + // Publish the right raw image if someone has subscribed to + if (rightRawSubnumber > 0) + { + sensor_msgs::ImagePtr rawRightImgMsg = boost::make_shared(); + publishImage(rawRightImgMsg, mat_right_raw, mPubRawRight, mRightCamInfoRawMsg, mRightCamOptFrameId, stamp); + } + + // Publish the right raw image GRAY if someone has subscribed to + if (rightGrayRawSubnumber > 0) + { + sensor_msgs::ImagePtr rawRightGrayImgMsg = boost::make_shared(); + publishImage(rawRightGrayImgMsg, mat_right_raw_gray, mPubRawRightGray, mRightCamInfoRawMsg, mRightCamOptFrameId, + stamp); + } + + // Stereo couple side-by-side + if (stereoSubNumber > 0) + { + sensor_msgs::ImagePtr stereoImgMsg = boost::make_shared(); + sl_tools::imagesToROSmsg(stereoImgMsg, mat_left, mat_right, mCameraFrameId, stamp); + mPubStereo.publish(stereoImgMsg); + } + + // Stereo RAW couple side-by-side + if (stereoRawSubNumber > 0) + { + sensor_msgs::ImagePtr rawStereoImgMsg = boost::make_shared(); + sl_tools::imagesToROSmsg(rawStereoImgMsg, mat_left_raw, mat_right_raw, mCameraFrameId, stamp); + mPubRawStereo.publish(rawStereoImgMsg); + } + + // Publish the depth image if someone has subscribed to + if (depthSubnumber > 0) + { + sensor_msgs::ImagePtr depthImgMsg = boost::make_shared(); + publishDepth(depthImgMsg, mat_depth, stamp); + } + + // Publish the disparity image if someone has subscribed to + if (disparitySubnumber > 0) + { + publishDisparity(mat_disp, stamp); + } + + // Publish the confidence map if someone has subscribed to + if (confMapSubnumber > 0) + { + sensor_msgs::ImagePtr confMapMsg = boost::make_shared(); + sl_tools::imageToROSmsg(confMapMsg, mat_conf, mConfidenceOptFrameId, stamp); + mPubConfMap.publish(confMapMsg); + } } -void ZEDWrapperNodelet::pubSensCallback(const ros::TimerEvent& e) { - std::lock_guard lock(mCloseZedMutex); - - if (!mZed.isOpened()) { - return; - } - - uint32_t imu_SubNumber = mPubImu.getNumSubscribers(); - uint32_t imu_RawSubNumber = mPubImuRaw.getNumSubscribers(); - uint32_t imu_TempSubNumber = 0; - uint32_t imu_MagSubNumber = 0; - //uint32_t imu_MagRawSubNumber = 0; - uint32_t pressSubNumber = 0; - uint32_t tempLeftSubNumber = 0; - uint32_t tempRightSubNumber = 0; - - if( mZedRealCamModel == sl::MODEL::ZED2 ) { - imu_TempSubNumber = mPubImuTemp.getNumSubscribers(); - imu_MagSubNumber = mPubImuMag.getNumSubscribers(); - //imu_MagRawSubNumber = mPubImuMagRaw.getNumSubscribers(); - pressSubNumber = mPubPressure.getNumSubscribers(); - tempLeftSubNumber = mPubTempL.getNumSubscribers(); - tempRightSubNumber = mPubTempR.getNumSubscribers(); - } - - int totSub = imu_SubNumber + imu_RawSubNumber + imu_TempSubNumber + imu_MagSubNumber + /*imu_MagRawSubNumber +*/ - pressSubNumber + tempLeftSubNumber + tempRightSubNumber; - - ros::Time ts_imu; - ros::Time ts_baro; - ros::Time ts_mag; - ros::Time ts_mag_raw; - - static ros::Time lastTs_baro = ros::Time(); - static ros::Time lastT_mag = ros::Time(); - //static ros::Time lastT_mag_raw = ros::Time(); - - sl::SensorsData sens_data; - - if(mSvoMode) { - if( mZed.getSensorsData(sens_data, sl::TIME_REFERENCE::IMAGE) != sl::ERROR_CODE::SUCCESS ) - return; - } else { - if ( mSensTimestampSync && mGrabActive) { - if( mZed.getSensorsData(sens_data, sl::TIME_REFERENCE::IMAGE) != sl::ERROR_CODE::SUCCESS ) - return; - } else if ( !mSensTimestampSync ) { - if( mZed.getSensorsData(sens_data, sl::TIME_REFERENCE::CURRENT) != sl::ERROR_CODE::SUCCESS ) - return; - } else { - return; - } - } - - if (mSvoMode) { - ts_imu = ros::Time::now(); - ts_baro = ros::Time::now(); - ts_mag = ros::Time::now(); - //ts_mag_raw = ros::Time::now(); - } else { - if (mSensTimestampSync && mGrabActive) { - ts_imu = mFrameTimestamp; - ts_baro = mFrameTimestamp; - ts_mag = mFrameTimestamp; - ts_mag_raw = mFrameTimestamp; - } else { - ts_imu = sl_tools::slTime2Ros(sens_data.imu.timestamp); - ts_baro = sl_tools::slTime2Ros(sens_data.barometer.timestamp); - ts_mag = sl_tools::slTime2Ros(sens_data.magnetometer.timestamp); - //ts_mag_raw = sl_tools::slTime2Ros(sens_data.magnetometer.timestamp); - } - } - - if( mZedRealCamModel == sl::MODEL::ZED2 ) { - // Update temperatures for Diagnostic - sens_data.temperature.get( sl::SensorsData::TemperatureData::SENSOR_LOCATION::ONBOARD_LEFT, mTempLeft); - sens_data.temperature.get( sl::SensorsData::TemperatureData::SENSOR_LOCATION::ONBOARD_RIGHT, mTempRight); - } - - if (totSub<1) { // Nothing to publish - return; - } - - if( imu_SubNumber > 0 || imu_RawSubNumber > 0 || - imu_TempSubNumber > 0 || pressSubNumber > 0 || - imu_MagSubNumber > 0 /*|| imu_MagRawSubNumber > 0*/ ) { - // Publish freq calculation - static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - - double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); - last_time = now; - - mSensPeriodMean_usec->addValue(elapsed_usec); - - mSensPublishing = true; - } else { - mSensPublishing = false; - } - - if (imu_TempSubNumber>0) { - sensor_msgs::TemperaturePtr imuTempMsg = boost::make_shared(); - - imuTempMsg->header.stamp = ts_imu; - imuTempMsg->header.frame_id = mImuFrameId; - float imu_temp; - sens_data.temperature.get( sl::SensorsData::TemperatureData::SENSOR_LOCATION::IMU, imu_temp); - imuTempMsg->temperature = static_cast(imu_temp); - imuTempMsg->variance = 0.0; - - mPubImuTemp.publish(imuTempMsg); - } - - - if( sens_data.barometer.is_available && lastTs_baro != ts_baro ) { - lastTs_baro = ts_baro; - - if( pressSubNumber>0 ) { - sensor_msgs::FluidPressurePtr pressMsg = boost::make_shared(); - - pressMsg->header.stamp = ts_baro; - pressMsg->header.frame_id = mBaroFrameId; - pressMsg->fluid_pressure = sens_data.barometer.pressure * 1e-2; // Pascal - pressMsg->variance = 1.0585e-2; - - mPubPressure.publish(pressMsg); - } - - if( tempLeftSubNumber>0 ) { - sensor_msgs::TemperaturePtr tempLeftMsg = boost::make_shared(); - - tempLeftMsg->header.stamp = ts_baro; - tempLeftMsg->header.frame_id = mTempLeftFrameId; - tempLeftMsg->temperature = static_cast(mTempLeft); - tempLeftMsg->variance = 0.0; - - mPubTempL.publish(tempLeftMsg); - } - - if( tempRightSubNumber>0 ) { - sensor_msgs::TemperaturePtr tempRightMsg = boost::make_shared(); - - tempRightMsg->header.stamp = ts_baro; - tempRightMsg->header.frame_id = mTempRightFrameId; - tempRightMsg->temperature = static_cast(mTempRight); - tempRightMsg->variance = 0.0; - - mPubTempR.publish(tempRightMsg); - } - } - - if( imu_MagSubNumber>0 ) { - if( sens_data.magnetometer.is_available && lastT_mag != ts_mag ) { - lastT_mag = ts_mag; - - sensor_msgs::MagneticFieldPtr magMsg = boost::make_shared(); - - magMsg->header.stamp = ts_mag; - magMsg->header.frame_id = mMagFrameId; - magMsg->magnetic_field.x = sens_data.magnetometer.magnetic_field_calibrated.x*1e-6; // Tesla - magMsg->magnetic_field.y = sens_data.magnetometer.magnetic_field_calibrated.y*1e-6; // Tesla - magMsg->magnetic_field.z = sens_data.magnetometer.magnetic_field_calibrated.z*1e-6; // Tesla - magMsg->magnetic_field_covariance[0] = 0.039e-6; - magMsg->magnetic_field_covariance[1] = 0.0f; - magMsg->magnetic_field_covariance[2] = 0.0f; - magMsg->magnetic_field_covariance[3] = 0.0f; - magMsg->magnetic_field_covariance[4] = 0.037e-6; - magMsg->magnetic_field_covariance[5] = 0.0f; - magMsg->magnetic_field_covariance[6] = 0.0f; - magMsg->magnetic_field_covariance[7] = 0.0f; - magMsg->magnetic_field_covariance[8] = 0.047e-6; - - mPubImuMag.publish(magMsg); - } - } - - // if( imu_MagRawSubNumber>0 ) { - // if( sens_data.magnetometer.is_available && lastT_mag_raw != ts_mag_raw ) { - // lastT_mag_raw = ts_mag_raw; - - // sensor_msgs::MagneticFieldPtr mMagRawMsg = boost::make_shared(); - - - // mMagRawMsg->header.stamp = ts_mag; - // mMagRawMsg->header.frame_id = mImuFrameId; - // mMagRawMsg->magnetic_field.x = sens_data.magnetometer.magnetic_field_uncalibrated.x*1e-6; // Tesla - // mMagRawMsg->magnetic_field.y = sens_data.magnetometer.magnetic_field_uncalibrated.y*1e-6; // Tesla - // mMagRawMsg->magnetic_field.z = sens_data.magnetometer.magnetic_field_uncalibrated.z*1e-6; // Tesla - // mMagRawMsg->magnetic_field_covariance[0] = 0.039e-6; - // mMagRawMsg->magnetic_field_covariance[1] = 0.0f; - // mMagRawMsg->magnetic_field_covariance[2] = 0.0f; - // mMagRawMsg->magnetic_field_covariance[3] = 0.0f; - // mMagRawMsg->magnetic_field_covariance[4] = 0.037e-6; - // mMagRawMsg->magnetic_field_covariance[5] = 0.0f; - // mMagRawMsg->magnetic_field_covariance[6] = 0.0f; - // mMagRawMsg->magnetic_field_covariance[7] = 0.0f; - // mMagRawMsg->magnetic_field_covariance[8] = 0.047e-6; - - // mPubImuMagRaw.publish(mMagRawMsg); - // } - // } - - if (imu_SubNumber > 0) { - sensor_msgs::ImuPtr imuMsg = boost::make_shared(); - - imuMsg->header.stamp = ts_imu; - imuMsg->header.frame_id = mImuFrameId; - - imuMsg->orientation.x = sens_data.imu.pose.getOrientation()[0]; - imuMsg->orientation.y = sens_data.imu.pose.getOrientation()[1]; - imuMsg->orientation.z = sens_data.imu.pose.getOrientation()[2]; - imuMsg->orientation.w = sens_data.imu.pose.getOrientation()[3]; - - imuMsg->angular_velocity.x = sens_data.imu.angular_velocity[0] * DEG2RAD; - imuMsg->angular_velocity.y = sens_data.imu.angular_velocity[1] * DEG2RAD; - imuMsg->angular_velocity.z = sens_data.imu.angular_velocity[2] * DEG2RAD; - - imuMsg->linear_acceleration.x = sens_data.imu.linear_acceleration[0]; - imuMsg->linear_acceleration.y = sens_data.imu.linear_acceleration[1]; - imuMsg->linear_acceleration.z = sens_data.imu.linear_acceleration[2]; - - for (int i = 0; i < 3; ++i) { - - int r = 0; - - if (i == 0) { - r = 0; - } else if (i == 1) { - r = 1; - } else { - r = 2; - } - - imuMsg->orientation_covariance[i * 3 + 0] = - sens_data.imu.pose_covariance.r[r * 3 + 0] * DEG2RAD * DEG2RAD; - imuMsg->orientation_covariance[i * 3 + 1] = - sens_data.imu.pose_covariance.r[r * 3 + 1] * DEG2RAD * DEG2RAD; - imuMsg->orientation_covariance[i * 3 + 2] = - sens_data.imu.pose_covariance.r[r * 3 + 2] * DEG2RAD * DEG2RAD; - - imuMsg->linear_acceleration_covariance[i * 3 + 0] = - sens_data.imu.linear_acceleration_covariance.r[r * 3 + 0]; - imuMsg->linear_acceleration_covariance[i * 3 + 1] = - sens_data.imu.linear_acceleration_covariance.r[r * 3 + 1]; - imuMsg->linear_acceleration_covariance[i * 3 + 2] = - sens_data.imu.linear_acceleration_covariance.r[r * 3 + 2]; - - imuMsg->angular_velocity_covariance[i * 3 + 0] = - sens_data.imu.angular_velocity_covariance.r[r * 3 + 0] * DEG2RAD * DEG2RAD; - imuMsg->angular_velocity_covariance[i * 3 + 1] = - sens_data.imu.angular_velocity_covariance.r[r * 3 + 1] * DEG2RAD * DEG2RAD; - imuMsg->angular_velocity_covariance[i * 3 + 2] = - sens_data.imu.angular_velocity_covariance.r[r * 3 + 2] * DEG2RAD * DEG2RAD; - } - - mPubImu.publish(imuMsg); - } - - if (imu_RawSubNumber > 0) { - sensor_msgs::ImuPtr imuRawMsg = boost::make_shared(); - - imuRawMsg->header.stamp = mFrameTimestamp; // t; - imuRawMsg->header.frame_id = mImuFrameId; - imuRawMsg->angular_velocity.x = sens_data.imu.angular_velocity[0] * DEG2RAD; - imuRawMsg->angular_velocity.y = sens_data.imu.angular_velocity[1] * DEG2RAD; - imuRawMsg->angular_velocity.z = sens_data.imu.angular_velocity[2] * DEG2RAD; - imuRawMsg->linear_acceleration.x = sens_data.imu.linear_acceleration[0]; - imuRawMsg->linear_acceleration.y = sens_data.imu.linear_acceleration[1]; - imuRawMsg->linear_acceleration.z = sens_data.imu.linear_acceleration[2]; - - for (int i = 0; i < 3; ++i) { - - int r = 0; - - if (i == 0) { - r = 0; - } else if (i == 1) { - r = 1; - } else { - r = 2; - } - - imuRawMsg->linear_acceleration_covariance[i * 3 + 0] = - sens_data.imu.linear_acceleration_covariance.r[r * 3 + 0]; - imuRawMsg->linear_acceleration_covariance[i * 3 + 1] = - sens_data.imu.linear_acceleration_covariance.r[r * 3 + 1]; - imuRawMsg->linear_acceleration_covariance[i * 3 + 2] = - sens_data.imu.linear_acceleration_covariance.r[r * 3 + 2]; - imuRawMsg->angular_velocity_covariance[i * 3 + 0] = - sens_data.imu.angular_velocity_covariance.r[r * 3 + 0] * DEG2RAD * DEG2RAD; - imuRawMsg->angular_velocity_covariance[i * 3 + 1] = - sens_data.imu.angular_velocity_covariance.r[r * 3 + 1] * DEG2RAD * DEG2RAD; - imuRawMsg->angular_velocity_covariance[i * 3 + 2] = - sens_data.imu.angular_velocity_covariance.r[r * 3 + 2] * DEG2RAD * DEG2RAD; - } - - imuRawMsg->orientation_covariance[0] = - -1; // Orientation data is not available in "data_raw" -> See ROS REP145 - // http://www.ros.org/reps/rep-0145.html#topics - mPubImuRaw.publish(imuRawMsg); - } - - // Publish IMU tf only if enabled - if (mPublishTf) { - // Camera to pose transform from TF buffer - tf2::Transform cam_to_pose; - - std::string poseFrame; - static bool first_error = true; - - // Look up the transformation from base frame to map link - try { - poseFrame = mPublishMapTf ? mMapFrameId : mOdometryFrameId; - - // Save the transformation from base to frame - geometry_msgs::TransformStamped c2p = - mTfBuffer->lookupTransform(poseFrame, mCameraFrameId, ros::Time(0)); - // Get the TF2 transformation - tf2::fromMsg(c2p.transform, cam_to_pose); - } catch (tf2::TransformException& ex) { - if(!first_error) { - NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); - NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", - mCameraFrameId.c_str(), mMapFrameId.c_str()); - NODELET_WARN_THROTTLE(1.0, "Note: one of the possible cause of the problem is the absense of an instance " - "of the `robot_state_publisher` node publishing the correct static TF transformations " - "or a modified URDF not correctly reproducing the ZED " - "TF chain '%s' -> '%s' -> '%s'", - mBaseFrameId.c_str(), mCameraFrameId.c_str(),mDepthFrameId.c_str()); - first_error=false; - } - - return; - } - - // IMU Quaternion in Map frame - tf2::Quaternion imu_q; - imu_q.setX(sens_data.imu.pose.getOrientation()[0]); - imu_q.setY(sens_data.imu.pose.getOrientation()[1]); - imu_q.setZ(sens_data.imu.pose.getOrientation()[2]); - imu_q.setW(sens_data.imu.pose.getOrientation()[3]); - // Pose Quaternion from ZED Camera - tf2::Quaternion map_q = cam_to_pose.getRotation(); - // Difference between IMU and ZED Quaternion - tf2::Quaternion delta_q = imu_q * map_q.inverse(); - tf2::Transform imu_pose; - imu_pose.setIdentity(); - imu_pose.setRotation(delta_q); - // Note, the frame is published, but its values will only change if someone - // has subscribed to IMU - publishImuFrame(imu_pose, mFrameTimestamp); // publish the imu Frame - } +void ZEDWrapperNodelet::callback_pubPath(const ros::TimerEvent& e) +{ + uint32_t mapPathSub = mPubMapPath.getNumSubscribers(); + uint32_t odomPathSub = mPubOdomPath.getNumSubscribers(); + + geometry_msgs::PoseStamped odomPose; + geometry_msgs::PoseStamped mapPose; + + odomPose.header.stamp = mFrameTimestamp; + odomPose.header.frame_id = mMapFrameId; // map_frame + mOdomMutex.lock(); // + odomPose.pose.position.x = mOdom2BaseTransf.getOrigin().x(); + odomPose.pose.position.y = mOdom2BaseTransf.getOrigin().y(); + odomPose.pose.position.z = mOdom2BaseTransf.getOrigin().z(); + odomPose.pose.orientation.x = mOdom2BaseTransf.getRotation().x(); + odomPose.pose.orientation.y = mOdom2BaseTransf.getRotation().y(); + odomPose.pose.orientation.z = mOdom2BaseTransf.getRotation().z(); + odomPose.pose.orientation.w = mOdom2BaseTransf.getRotation().w(); + mOdomMutex.unlock(); // + + mapPose.header.stamp = mFrameTimestamp; + mapPose.header.frame_id = mMapFrameId; // map_frame + mapPose.pose.position.x = mMap2BaseTransf.getOrigin().x(); + mapPose.pose.position.y = mMap2BaseTransf.getOrigin().y(); + mapPose.pose.position.z = mMap2BaseTransf.getOrigin().z(); + mapPose.pose.orientation.x = mMap2BaseTransf.getRotation().x(); + mapPose.pose.orientation.y = mMap2BaseTransf.getRotation().y(); + mapPose.pose.orientation.z = mMap2BaseTransf.getRotation().z(); + mapPose.pose.orientation.w = mMap2BaseTransf.getRotation().w(); + + // Circular vector + std::lock_guard lock(mOdomMutex); // + if (mPathMaxCount != -1) + { + if (mOdomPath.size() == mPathMaxCount) + { + NODELET_DEBUG("Path vectors full: rotating "); + std::rotate(mOdomPath.begin(), mOdomPath.begin() + 1, mOdomPath.end()); + std::rotate(mMapPath.begin(), mMapPath.begin() + 1, mMapPath.end()); + + mMapPath[mPathMaxCount - 1] = mapPose; + mOdomPath[mPathMaxCount - 1] = odomPose; + } + else + { + // NODELET_DEBUG( "Path vectors adding last available poses"); + mMapPath.push_back(mapPose); + mOdomPath.push_back(odomPose); + } + } + else + { + // NODELET_DEBUG( "No limit path vectors, adding last available poses"); + mMapPath.push_back(mapPose); + mOdomPath.push_back(odomPose); + } + + if (mapPathSub > 0) + { + nav_msgs::PathPtr mapPathMsg = boost::make_shared(); + mapPathMsg->header.frame_id = mMapFrameId; + mapPathMsg->header.stamp = mFrameTimestamp; + mapPathMsg->poses = mMapPath; + + NODELET_DEBUG("Publishing MAP PATH message"); + mPubMapPath.publish(mapPathMsg); + } + + if (odomPathSub > 0) + { + nav_msgs::PathPtr odomPathMsg = boost::make_shared(); + odomPathMsg->header.frame_id = mOdomFrameId; + odomPathMsg->header.stamp = mFrameTimestamp; + odomPathMsg->poses = mOdomPath; + + NODELET_DEBUG("Publishing ODOM PATH message"); + mPubOdomPath.publish(odomPathMsg); + } } -void ZEDWrapperNodelet::device_poll_thread_func() { - ros::Rate loop_rate(mCamFrameRate); - - mRecording = false; - - mElabPeriodMean_sec.reset(new sl_tools::CSmartMean(mCamFrameRate)); - mGrabPeriodMean_usec.reset(new sl_tools::CSmartMean(mCamFrameRate)); - mVideoDepthPeriodMean_sec.reset(new sl_tools::CSmartMean(mCamFrameRate)); - mPcPeriodMean_usec.reset(new sl_tools::CSmartMean(mCamFrameRate)); - mObjDetPeriodMean_msec.reset(new sl_tools::CSmartMean(mCamFrameRate)); - - // Timestamp initialization - if (mSvoMode) { - mFrameTimestamp = ros::Time::now(); - } else { - mFrameTimestamp = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::CURRENT)); - } - - mPrevFrameTimestamp = mFrameTimestamp; - - mTrackingActivated = false; - mMappingRunning = false; - mRecording = false; - - // Get the parameters of the ZED images -#if ZED_SDK_MAJOR_VERSION==3 && ZED_SDK_MINOR_VERSION<1 - mCamWidth = mZed.getCameraInformation().camera_resolution.width; - mCamHeight = mZed.getCameraInformation().camera_resolution.height; -#else - mCamWidth = mZed.getCameraInformation().camera_configuration.resolution.width; - mCamHeight = mZed.getCameraInformation().camera_configuration.resolution.height; -#endif - NODELET_DEBUG_STREAM("Camera Frame size : " << mCamWidth << "x" << mCamHeight); - int v_w = static_cast(mCamWidth * mCamImageResizeFactor); - int v_h = static_cast(mCamHeight * mCamImageResizeFactor); - mMatResolVideo = sl::Resolution(v_w,v_h); - NODELET_DEBUG_STREAM("Image Mat size : " << mMatResolVideo.width << "x" << mMatResolVideo.height); - int d_w = static_cast(mCamWidth * mCamDepthResizeFactor); - int d_h = static_cast(mCamHeight * mCamDepthResizeFactor); - mMatResolDepth = sl::Resolution(d_w,d_h); - NODELET_DEBUG_STREAM("Depth Mat size : " << mMatResolDepth.width << "x" << mMatResolDepth.height); - - // Create and fill the camera information messages - fillCamInfo(mZed, mLeftCamInfoMsg, mRightCamInfoMsg, mLeftCamOptFrameId, mRightCamOptFrameId); - fillCamInfo(mZed, mLeftCamInfoRawMsg, mRightCamInfoRawMsg, mLeftCamOptFrameId, mRightCamOptFrameId, true); - fillCamDepthInfo(mZed,mDepthCamInfoMsg,mLeftCamOptFrameId); - - // the reference camera is the Left one (next to the ZED logo) - - mRgbCamInfoMsg = mLeftCamInfoMsg; - mRgbCamInfoRawMsg = mLeftCamInfoRawMsg; - - sl::RuntimeParameters runParams; - runParams.sensing_mode = static_cast(mCamSensingMode); - - // Main loop - while (mNhNs.ok()) { - // Check for subscribers - uint32_t rgbSubnumber = mPubRgb.getNumSubscribers(); - uint32_t rgbRawSubnumber = mPubRawRgb.getNumSubscribers(); - uint32_t leftSubnumber = mPubLeft.getNumSubscribers(); - uint32_t leftRawSubnumber = mPubRawLeft.getNumSubscribers(); - uint32_t rightSubnumber = mPubRight.getNumSubscribers(); - uint32_t rightRawSubnumber = mPubRawRight.getNumSubscribers(); - uint32_t rgbGraySubnumber = mPubRgbGray.getNumSubscribers(); - uint32_t rgbGrayRawSubnumber = mPubRawRgbGray.getNumSubscribers(); - uint32_t leftGraySubnumber = mPubLeftGray.getNumSubscribers(); - uint32_t leftGrayRawSubnumber = mPubRawLeftGray.getNumSubscribers(); - uint32_t rightGraySubnumber = mPubRightGray.getNumSubscribers(); - uint32_t rightGrayRawSubnumber = mPubRawRightGray.getNumSubscribers(); - uint32_t depthSubnumber = mPubDepth.getNumSubscribers(); - uint32_t disparitySubnumber = mPubDisparity.getNumSubscribers(); - uint32_t cloudSubnumber = mPubCloud.getNumSubscribers(); - uint32_t fusedCloudSubnumber = mPubFusedCloud.getNumSubscribers(); - uint32_t poseSubnumber = mPubPose.getNumSubscribers(); - uint32_t poseCovSubnumber = mPubPoseCov.getNumSubscribers(); - uint32_t odomSubnumber = mPubOdom.getNumSubscribers(); - uint32_t confMapSubnumber = mPubConfMap.getNumSubscribers(); - uint32_t pathSubNumber = mPubMapPath.getNumSubscribers() + mPubOdomPath.getNumSubscribers(); - uint32_t stereoSubNumber = mPubStereo.getNumSubscribers(); - uint32_t stereoRawSubNumber = mPubRawStereo.getNumSubscribers(); - - uint32_t objDetSubnumber = 0; - uint32_t objDetVizSubnumber = 0; - bool objDetActive = false; - if (mObjDetEnabled) { - objDetSubnumber = mPubObjDet.getNumSubscribers(); - objDetVizSubnumber = mPubObjDetViz.getNumSubscribers(); - if (objDetSubnumber > 0 || objDetVizSubnumber > 0) { - objDetActive = true; - } - } - - mGrabActive = mRecording || mStreaming || mMappingEnabled || mObjDetEnabled || mTrackingActivated || - ((rgbSubnumber + rgbRawSubnumber + leftSubnumber + - leftRawSubnumber + rightSubnumber + rightRawSubnumber + - rgbGraySubnumber + rgbGrayRawSubnumber + leftGraySubnumber + - leftGrayRawSubnumber + rightGraySubnumber + rightGrayRawSubnumber + - depthSubnumber + disparitySubnumber + cloudSubnumber + - poseSubnumber + poseCovSubnumber + odomSubnumber + - confMapSubnumber /*+ imuSubnumber + imuRawsubnumber*/ + pathSubNumber + - stereoSubNumber + stereoRawSubNumber) > 0); - - // Run the loop only if there is some subscribers or SVO is active - if (mGrabActive) { - std::lock_guard lock(mPosTrkMutex); - - // Note: ones tracking is started it is never stopped anymore to not lose tracking information - bool computeTracking = (mMappingEnabled || mObjDetEnabled || (mComputeDepth & mDepthStabilization) || poseSubnumber > 0 || - poseCovSubnumber > 0 || odomSubnumber > 0 || pathSubNumber > 0); - - // Start the tracking? - if ((computeTracking) && !mTrackingActivated && (mDepthMode != sl::DEPTH_MODE::NONE)) { - start_pos_tracking(); - } - - // Start the mapping? - mMappingMutex.lock(); - if (mMappingEnabled && !mMappingRunning) { - start_3d_mapping(); - } - mMappingMutex.unlock(); - - // Start the object detection? - mObjDetMutex.lock(); - if (mObjDetEnabled && !mObjDetRunning) { - start_obj_detect(); - } - mObjDetMutex.unlock(); - - // Detect if one of the subscriber need to have the depth information - mComputeDepth = mDepthMode != sl::DEPTH_MODE::NONE && - ((depthSubnumber + disparitySubnumber + cloudSubnumber + fusedCloudSubnumber + - poseSubnumber + poseCovSubnumber + odomSubnumber + confMapSubnumber) > 0); - - if (mComputeDepth) { - runParams.confidence_threshold = mCamDepthConfidence; - runParams.textureness_confidence_threshold = mCamDepthTextureConf; - runParams.enable_depth = true; // Ask to compute the depth - } else { - runParams.enable_depth = false; // Ask to not compute the depth - } - - mGrabStatus = mZed.grab(runParams); - - std::chrono::steady_clock::time_point start_elab = std::chrono::steady_clock::now(); - - // cout << toString(grab_status) << endl; - if (mGrabStatus != sl::ERROR_CODE::SUCCESS) { - // Detect if a error occurred (for example: - // the zed have been disconnected) and - // re-initialize the ZED - - NODELET_INFO_STREAM_ONCE(toString(mGrabStatus)); - - std::this_thread::sleep_for(std::chrono::milliseconds(1)); - - if ((ros::Time::now() - mPrevFrameTimestamp).toSec() > 5 && !mSvoMode) { - mCloseZedMutex.lock(); - mZed.close(); - mCloseZedMutex.unlock(); - - mConnStatus = sl::ERROR_CODE::CAMERA_NOT_DETECTED; - - while (mConnStatus != sl::ERROR_CODE::SUCCESS) { - if (!mNhNs.ok()) { - mStopNode = true; - - std::lock_guard lock(mCloseZedMutex); - NODELET_DEBUG("Closing ZED"); - if(mRecording) { - mRecording=false; - mZed.disableRecording(); - } - mZed.close(); - - NODELET_DEBUG("ZED pool thread finished"); - return; - } - - int id = sl_tools::checkCameraReady(mZedSerialNumber); - - if (id >= 0) { - mZedParams.input.setFromCameraID(id); - mConnStatus = mZed.open(mZedParams); // Try to initialize the ZED - NODELET_INFO_STREAM(toString(mConnStatus)); - } else { - NODELET_INFO_STREAM("Waiting for the ZED (S/N " << mZedSerialNumber << ") to be re-connected"); - } - - mDiagUpdater.force_update(); - std::this_thread::sleep_for(std::chrono::milliseconds(2000)); - } - - mTrackingActivated = false; +void ZEDWrapperNodelet::sensors_thread_func() +{ + ros::Rate loop_rate(mSensPubRate); - computeTracking = mDepthStabilization || poseSubnumber > 0 || poseCovSubnumber > 0 || - odomSubnumber > 0; + std::chrono::steady_clock::time_point prev_usec = std::chrono::steady_clock::now(); - if (computeTracking) { // Start the tracking - start_pos_tracking(); - } - } - - mDiagUpdater.update(); - - continue; - } - - mFrameCount++; - - // SVO recording - mRecMutex.lock(); - - if (mRecording) { - mRecStatus = mZed.getRecordingStatus(); - - if (!mRecStatus.status) { - NODELET_ERROR_THROTTLE(1.0, "Error saving frame to SVO"); - } - - mDiagUpdater.force_update(); - } - - mRecMutex.unlock(); - - // Timestamp - mPrevFrameTimestamp = mFrameTimestamp; - - // Publish freq calculation - static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - - double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); - last_time = now; - - mGrabPeriodMean_usec->addValue(elapsed_usec); - - // NODELET_INFO_STREAM("Grab time: " << elapsed_usec / 1000 << " msec"); - - // Timestamp - if (mSvoMode) { - mFrameTimestamp = ros::Time::now(); - } else { - mFrameTimestamp = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::IMAGE)); - } - - // ----> Camera Settings - if( !mSvoMode && mFrameCount%5 == 0 ) { - //NODELET_DEBUG_STREAM( "[" << mFrameCount << "] device_poll_thread_func MUTEX LOCK"); - mDynParMutex.lock(); - - int brightness = mZed.getCameraSettings(sl::VIDEO_SETTINGS::BRIGHTNESS); - if( brightness != mCamBrightness ) { - mZed.setCameraSettings(sl::VIDEO_SETTINGS::BRIGHTNESS, mCamBrightness); - NODELET_DEBUG_STREAM( "mCamBrightness changed: " << mCamBrightness << " <- " << brightness); - mUpdateDynParams = true; - } - - int contrast = mZed.getCameraSettings(sl::VIDEO_SETTINGS::CONTRAST); - if( contrast != mCamContrast ) { - mZed.setCameraSettings(sl::VIDEO_SETTINGS::CONTRAST, mCamContrast); - NODELET_DEBUG_STREAM( "mCamContrast changed: " << mCamContrast << " <- " << contrast); - mUpdateDynParams = true; - } - - int hue = mZed.getCameraSettings(sl::VIDEO_SETTINGS::HUE); - if( hue != mCamHue ) { - mZed.setCameraSettings(sl::VIDEO_SETTINGS::HUE, mCamHue); - NODELET_DEBUG_STREAM( "mCamHue changed: " << mCamHue << " <- " << hue); - mUpdateDynParams = true; - } - - int saturation = mZed.getCameraSettings(sl::VIDEO_SETTINGS::SATURATION); - if( saturation != mCamSaturation ) { - mZed.setCameraSettings(sl::VIDEO_SETTINGS::SATURATION, mCamSaturation); - NODELET_DEBUG_STREAM( "mCamSaturation changed: " << mCamSaturation << " <- " << saturation); - mUpdateDynParams = true; - } - - int sharpness = mZed.getCameraSettings(sl::VIDEO_SETTINGS::SHARPNESS); - if( sharpness != mCamSharpness ) { - mZed.setCameraSettings(sl::VIDEO_SETTINGS::SHARPNESS, mCamSharpness); - NODELET_DEBUG_STREAM( "mCamSharpness changed: " << mCamSharpness << " <- " << sharpness); - mUpdateDynParams = true; - } - -#if (ZED_SDK_MAJOR_VERSION==3 && ZED_SDK_MINOR_VERSION>=1) - int gamma = mZed.getCameraSettings(sl::VIDEO_SETTINGS::GAMMA); - if( gamma != mCamGamma ) { - mZed.setCameraSettings(sl::VIDEO_SETTINGS::GAMMA, mCamGamma); - NODELET_DEBUG_STREAM( "mCamGamma changed: " << mCamGamma << " <- " << gamma); - mUpdateDynParams = true; - } -#endif - - if (mCamAutoExposure) { - if( mTriggerAutoExposure ) { - mZed.setCameraSettings(sl::VIDEO_SETTINGS::AEC_AGC, 1); - mTriggerAutoExposure = false; - } - } else { - int exposure = mZed.getCameraSettings(sl::VIDEO_SETTINGS::EXPOSURE); - if (exposure != mCamExposure) { - mZed.setCameraSettings(sl::VIDEO_SETTINGS::EXPOSURE, mCamExposure); - NODELET_DEBUG_STREAM( "mCamExposure changed: " << mCamExposure << " <- " << exposure); - mUpdateDynParams = true; - } - - int gain = mZed.getCameraSettings(sl::VIDEO_SETTINGS::GAIN); - if (gain != mCamGain) { - mZed.setCameraSettings(sl::VIDEO_SETTINGS::GAIN, mCamGain); - NODELET_DEBUG_STREAM( "mCamGain changed: " << mCamGain << " <- " << gain); - mUpdateDynParams = true; - } - } - - if (mCamAutoWB ) { - if(mTriggerAutoWB) { - mZed.setCameraSettings(sl::VIDEO_SETTINGS::WHITEBALANCE_AUTO, 1); - mTriggerAutoWB = false; - } - } else { - int wb = mZed.getCameraSettings(sl::VIDEO_SETTINGS::WHITEBALANCE_TEMPERATURE); - if (wb != mCamWB) { - mZed.setCameraSettings(sl::VIDEO_SETTINGS::WHITEBALANCE_TEMPERATURE, mCamWB); - NODELET_DEBUG_STREAM( "mCamWB changed: " << mCamWB << " <- " << wb); - mUpdateDynParams = true; - } - } - mDynParMutex.unlock(); - //NODELET_DEBUG_STREAM( "device_poll_thread_func MUTEX UNLOCK"); - } - - if(mUpdateDynParams) { - NODELET_DEBUG( "Update Dynamic Parameters"); - updateDynamicReconfigure(); - } - // <---- Camera Settings - - mCamDataMutex.lock(); + int count_warn = 0; - // Publish the point cloud if someone has subscribed to - if (cloudSubnumber > 0) { + while (!mStopNode) + { + std::chrono::steady_clock::time_point start_elab = std::chrono::steady_clock::now(); - // Run the point cloud conversion asynchronously to avoid slowing down - // all the program - // Retrieve raw pointCloud data if latest Pointcloud is ready - std::unique_lock lock(mPcMutex, std::defer_lock); - - if (lock.try_lock()) { - mZed.retrieveMeasure(mCloud, sl::MEASURE::XYZBGRA, sl::MEM::CPU, mMatResolDepth); - - mPointCloudFrameId = mDepthFrameId; - mPointCloudTime = mFrameTimestamp; - - // Signal Pointcloud thread that a new pointcloud is ready - mPcDataReadyCondVar.notify_one(); - mPcDataReady = true; - mPcPublishing = true; - } - } else { - mPcPublishing = false; - } - mCamDataMutex.unlock(); - - mObjDetMutex.lock(); - if (mObjDetRunning) { - std::chrono::steady_clock::time_point start = std::chrono::steady_clock::now(); - detectObjects(objDetSubnumber > 0, objDetVizSubnumber > 0); - std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); - - double elapsed_msec = std::chrono::duration_cast(now - start).count(); - mObjDetPeriodMean_msec->addValue(elapsed_msec); - } - mObjDetMutex.unlock(); - - // Publish the odometry if someone has subscribed to - if (computeTracking) { - - if (!mSensor2BaseTransfValid) { - getSens2BaseTransform(); - } - - if (!mSensor2CameraTransfValid) { - getSens2CameraTransform(); - } + mCloseZedMutex.lock(); + if (!mZed.isOpened()) + { + mCloseZedMutex.unlock(); + loop_rate.sleep(); + continue; + } - if (!mCamera2BaseTransfValid) { - getCamera2BaseTransform(); - } + publishSensData(); + mCloseZedMutex.unlock(); - if (!mInitOdomWithPose) { - sl::Pose deltaOdom; - mTrackingStatus = mZed.getPosition(deltaOdom, sl::REFERENCE_FRAME::CAMERA); + if (!loop_rate.sleep()) + { + if (++count_warn > 10) + { + NODELET_INFO_THROTTLE(1.0, "Sensors thread is not synchronized with the Sensors rate"); + NODELET_INFO_STREAM_THROTTLE(1.0, "Expected cycle time: " << loop_rate.expectedCycleTime() + << " - Real cycle time: " << loop_rate.cycleTime()); + NODELET_WARN_STREAM_THROTTLE(10.0, "Sensors data publishing takes longer (" + << loop_rate.cycleTime() << " sec) than requested by the Sensors rate (" + << loop_rate.expectedCycleTime() + << " sec). Please consider to " + "lower the 'max_pub_rate' setting or to " + "reduce the power requirements reducing " + "the resolutions."); + } - sl::Translation translation = deltaOdom.getTranslation(); - sl::Orientation quat = deltaOdom.getOrientation(); + loop_rate.reset(); + } + else + { + count_warn = 0; + } + } -#if 0 - NODELET_DEBUG("delta ODOM [%s] - %.2f,%.2f,%.2f %.2f,%.2f,%.2f,%.2f", - sl::toString(mTrackingStatus).c_str(), - translation(0), translation(1), translation(2), - quat(0), quat(1), quat(2), quat(3)); + NODELET_DEBUG("Sensors thread finished"); +} - NODELET_DEBUG_STREAM("ODOM -> Tracking Status: " << sl::toString(mTrackingStatus)); +void ZEDWrapperNodelet::publishSensData(ros::Time t) +{ + // NODELET_INFO("publishSensData"); + + uint32_t imu_SubNumber = mPubImu.getNumSubscribers(); + uint32_t imu_RawSubNumber = mPubImuRaw.getNumSubscribers(); + uint32_t imu_TempSubNumber = 0; + uint32_t imu_MagSubNumber = 0; + uint32_t pressSubNumber = 0; + uint32_t tempLeftSubNumber = 0; + uint32_t tempRightSubNumber = 0; + + if (mZedRealCamModel != sl::MODEL::ZED_M) + { + imu_TempSubNumber = mPubImuTemp.getNumSubscribers(); + } + + if (mZedRealCamModel == sl::MODEL::ZED2 || mZedRealCamModel == sl::MODEL::ZED2i) + { + imu_MagSubNumber = mPubImuMag.getNumSubscribers(); + pressSubNumber = mPubPressure.getNumSubscribers(); + tempLeftSubNumber = mPubTempL.getNumSubscribers(); + tempRightSubNumber = mPubTempR.getNumSubscribers(); + } + + uint32_t tot_sub = imu_SubNumber + imu_RawSubNumber + imu_TempSubNumber + imu_MagSubNumber + pressSubNumber + + tempLeftSubNumber + tempRightSubNumber; + + if (tot_sub > 0) + { + mSensPublishing = true; + } + else + { + mSensPublishing = false; + } + + bool sensors_data_published = false; + + ros::Time ts_imu; + ros::Time ts_baro; + ros::Time ts_mag; + + static ros::Time lastTs_imu = ros::Time(); + static ros::Time lastTs_baro = ros::Time(); + static ros::Time lastT_mag = ros::Time(); + + sl::SensorsData sens_data; + + if (mSvoMode || mSensTimestampSync) + { + if (mZed.getSensorsData(sens_data, sl::TIME_REFERENCE::IMAGE) != sl::ERROR_CODE::SUCCESS) + { + NODELET_DEBUG("Not retrieved sensors data in IMAGE REFERENCE TIME"); + return; + } + } + else + { + if (mZed.getSensorsData(sens_data, sl::TIME_REFERENCE::CURRENT) != sl::ERROR_CODE::SUCCESS) + { + NODELET_DEBUG("Not retrieved sensors data in CURRENT REFERENCE TIME"); + return; + } + } + + if (t != ros::Time(0)) + { + ts_imu = t; + ts_baro = t; + ts_mag = t; + } + else + { + ts_imu = sl_tools::slTime2Ros(sens_data.imu.timestamp); + ts_baro = sl_tools::slTime2Ros(sens_data.barometer.timestamp); + ts_mag = sl_tools::slTime2Ros(sens_data.magnetometer.timestamp); + } + + bool new_imu_data = ts_imu != lastTs_imu; + bool new_baro_data = ts_baro != lastTs_baro; + bool new_mag_data = ts_mag != lastT_mag; + + if (!new_imu_data && !new_baro_data && !new_mag_data) + { + NODELET_DEBUG("No updated sensors data"); + return; + } + + if (mPublishImuTf && !mStaticImuFramePublished) + { + NODELET_DEBUG("Publishing static IMU TF"); + publishStaticImuFrame(); + } + + if (mZedRealCamModel == sl::MODEL::ZED2 || mZedRealCamModel == sl::MODEL::ZED2i) + { + // Update temperatures for Diagnostic + sens_data.temperature.get(sl::SensorsData::TemperatureData::SENSOR_LOCATION::ONBOARD_LEFT, mTempLeft); + sens_data.temperature.get(sl::SensorsData::TemperatureData::SENSOR_LOCATION::ONBOARD_RIGHT, mTempRight); + } + + if (imu_TempSubNumber > 0 && new_imu_data) + { + lastTs_imu = ts_imu; + + sensor_msgs::TemperaturePtr imuTempMsg = boost::make_shared(); + + imuTempMsg->header.stamp = ts_imu; + +#ifdef DEBUG_SENS_TS + static ros::Time old_ts; + if (old_ts == imuTempMsg->header.stamp) + { + NODELET_WARN_STREAM("Publishing IMU data with old timestamp " << old_ts); + } + old_ts = imuTempMsg->header.stamp; #endif - if (mTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK || mTrackingStatus == sl::POSITIONAL_TRACKING_STATE::SEARCHING || - mTrackingStatus == sl::POSITIONAL_TRACKING_STATE::FPS_TOO_LOW) { - // Transform ZED delta odom pose in TF2 Transformation - geometry_msgs::Transform deltaTransf; - deltaTransf.translation.x = translation(0); - deltaTransf.translation.y = translation(1); - deltaTransf.translation.z = translation(2); - deltaTransf.rotation.x = quat(0); - deltaTransf.rotation.y = quat(1); - deltaTransf.rotation.z = quat(2); - deltaTransf.rotation.w = quat(3); - tf2::Transform deltaOdomTf; - tf2::fromMsg(deltaTransf, deltaOdomTf); - // delta odom from sensor to base frame - tf2::Transform deltaOdomTf_base = - mSensor2BaseTransf.inverse() * deltaOdomTf * mSensor2BaseTransf; - - // Propagate Odom transform in time - mOdom2BaseTransf = mOdom2BaseTransf * deltaOdomTf_base; - - if (mTwoDMode) { - tf2::Vector3 tr_2d = mOdom2BaseTransf.getOrigin(); - tr_2d.setZ(mFixedZValue); - mOdom2BaseTransf.setOrigin(tr_2d); - - double roll, pitch, yaw; - tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - - tf2::Quaternion quat_2d; - quat_2d.setRPY(0.0, 0.0, yaw); - - mOdom2BaseTransf.setRotation(quat_2d); - } - -#if 0 //#ifndef NDEBUG // Enable for TF checking - double roll, pitch, yaw; - tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - - NODELET_DEBUG("+++ Odometry [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", - mOdometryFrameId.c_str(), mBaseFrameId.c_str(), - mOdom2BaseTransf.getOrigin().x(), mOdom2BaseTransf.getOrigin().y(), mOdom2BaseTransf.getOrigin().z(), - roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + imuTempMsg->header.frame_id = mImuFrameId; + float imu_temp; + sens_data.temperature.get(sl::SensorsData::TemperatureData::SENSOR_LOCATION::IMU, imu_temp); + imuTempMsg->temperature = static_cast(imu_temp); + imuTempMsg->variance = 0.0; + + sensors_data_published = true; + mPubImuTemp.publish(imuTempMsg); + } /*else { + NODELET_DEBUG("No new IMU temp."); + }*/ + + if (sens_data.barometer.is_available && new_baro_data) + { + lastTs_baro = ts_baro; + + if (pressSubNumber > 0) + { + sensor_msgs::FluidPressurePtr pressMsg = boost::make_shared(); + + pressMsg->header.stamp = ts_baro; + +#ifdef DEBUG_SENS_TS + static ros::Time old_ts; + if (old_ts == pressMsg->header.stamp) + { + NODELET_WARN_STREAM("Publishing BARO data with old timestamp " << old_ts); + } + old_ts = pressMsg->header.stamp; #endif + pressMsg->header.frame_id = mBaroFrameId; + pressMsg->fluid_pressure = sens_data.barometer.pressure; // Pascal + pressMsg->variance = 1.0585e-2; - // Publish odometry message - if (odomSubnumber > 0) { - publishOdom(mOdom2BaseTransf, deltaOdom, mFrameTimestamp); - } + sensors_data_published = true; + mPubPressure.publish(pressMsg); + } - mTrackingReady = true; - } - } else if (mFloorAlignment) { - NODELET_WARN_THROTTLE(5.0, "Odometry will be published as soon as the floor as been detected for the first time"); - } + if (tempLeftSubNumber > 0) + { + sensor_msgs::TemperaturePtr tempLeftMsg = boost::make_shared(); - } + tempLeftMsg->header.stamp = ts_baro; - // Publish the zed camera pose if someone has subscribed to - if (computeTracking) { - static sl::POSITIONAL_TRACKING_STATE oldStatus; - mTrackingStatus = mZed.getPosition(mLastZedPose, sl::REFERENCE_FRAME::WORLD); +#ifdef DEBUG_SENS_TS + static ros::Time old_ts; + if (old_ts == tempLeftMsg->header.stamp) + { + NODELET_WARN_STREAM("Publishing BARO data with old timestamp " << old_ts); + } + old_ts = tempLeftMsg->header.stamp; +#endif - sl::Translation translation = mLastZedPose.getTranslation(); - sl::Orientation quat = mLastZedPose.getOrientation(); + tempLeftMsg->header.frame_id = mTempLeftFrameId; + tempLeftMsg->temperature = static_cast(mTempLeft); + tempLeftMsg->variance = 0.0; -#if 0 //#ifndef NDEBUG // Enable for TF checking - double roll, pitch, yaw; - tf2::Matrix3x3(tf2::Quaternion(quat.ox, quat.oy, quat.oz, quat.ow)).getRPY(roll, pitch, yaw); + sensors_data_published = true; + mPubTempL.publish(tempLeftMsg); + } - NODELET_DEBUG("Sensor POSE [%s -> %s] - {%.2f,%.2f,%.2f} {%.2f,%.2f,%.2f}", - mLeftCamFrameId.c_str(), mMapFrameId.c_str(), - translation.x, translation.y, translation.z, - roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + if (tempRightSubNumber > 0) + { + sensor_msgs::TemperaturePtr tempRightMsg = boost::make_shared(); - NODELET_DEBUG_STREAM("MAP -> Tracking Status: " << sl::toString(mTrackingStatus)); + tempRightMsg->header.stamp = ts_baro; +#ifdef DEBUG_SENS_TS + static ros::Time old_ts; + if (old_ts == tempRightMsg->header.stamp) + { + NODELET_WARN_STREAM("Publishing BARO data with old timestamp " << old_ts); + } + old_ts = tempRightMsg->header.stamp; #endif - if (mTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK || - mTrackingStatus == sl::POSITIONAL_TRACKING_STATE::SEARCHING /*|| status == sl::POSITIONAL_TRACKING_STATE::FPS_TOO_LOW*/) { - // Transform ZED pose in TF2 Transformation - geometry_msgs::Transform map2sensTransf; - - map2sensTransf.translation.x = translation(0); - map2sensTransf.translation.y = translation(1); - map2sensTransf.translation.z = translation(2); - map2sensTransf.rotation.x = quat(0); - map2sensTransf.rotation.y = quat(1); - map2sensTransf.rotation.z = quat(2); - map2sensTransf.rotation.w = quat(3); - tf2::Transform map_to_sens_transf; - tf2::fromMsg(map2sensTransf, map_to_sens_transf); - - mMap2BaseTransf = map_to_sens_transf * mSensor2BaseTransf; // Base position in map frame - - if (mTwoDMode) { - tf2::Vector3 tr_2d = mMap2BaseTransf.getOrigin(); - tr_2d.setZ(mFixedZValue); - mMap2BaseTransf.setOrigin(tr_2d); - - double roll, pitch, yaw; - tf2::Matrix3x3(mMap2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - - tf2::Quaternion quat_2d; - quat_2d.setRPY(0.0, 0.0, yaw); - - mMap2BaseTransf.setRotation(quat_2d); - } - -#if 0 //#ifndef NDEBUG // Enable for TF checking - double roll, pitch, yaw; - tf2::Matrix3x3(mMap2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - - NODELET_DEBUG("*** Base POSE [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", - mMapFrameId.c_str(), mBaseFrameId.c_str(), - mMap2BaseTransf.getOrigin().x(), mMap2BaseTransf.getOrigin().y(), mMap2BaseTransf.getOrigin().z(), - roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); -#endif + tempRightMsg->header.frame_id = mTempRightFrameId; + tempRightMsg->temperature = static_cast(mTempRight); + tempRightMsg->variance = 0.0; - bool initOdom = false; + sensors_data_published = true; + mPubTempR.publish(tempRightMsg); + } + } /*else { + NODELET_DEBUG("No new BAROM. DATA"); + }*/ - if (!(mFloorAlignment)) { - initOdom = mInitOdomWithPose; - } else { - initOdom = (mTrackingStatus == sl::POSITIONAL_TRACKING_STATE::OK) & mInitOdomWithPose; - } + if (imu_MagSubNumber > 0) + { + if (sens_data.magnetometer.is_available && new_mag_data) + { + lastT_mag = ts_mag; - if (initOdom || mResetOdom) { + sensor_msgs::MagneticFieldPtr magMsg = boost::make_shared(); - NODELET_INFO("Odometry aligned to last tracking pose"); + magMsg->header.stamp = ts_mag; - // Propagate Odom transform in time - mOdom2BaseTransf = mMap2BaseTransf; - mMap2BaseTransf.setIdentity(); +#ifdef DEBUG_SENS_TS + static ros::Time old_ts; + if (old_ts == magMsg->header.stamp) + { + NODELET_WARN_STREAM("Publishing MAG data with old timestamp " << old_ts); + } + old_ts = magMsg->header.stamp; +#endif - if (odomSubnumber > 0) { - // Publish odometry message - publishOdom(mOdom2BaseTransf, mLastZedPose, mFrameTimestamp); - } + magMsg->header.frame_id = mMagFrameId; + magMsg->magnetic_field.x = sens_data.magnetometer.magnetic_field_calibrated.x * 1e-6; // Tesla + magMsg->magnetic_field.y = sens_data.magnetometer.magnetic_field_calibrated.y * 1e-6; // Tesla + magMsg->magnetic_field.z = sens_data.magnetometer.magnetic_field_calibrated.z * 1e-6; // Tesla + magMsg->magnetic_field_covariance[0] = 0.039e-6; + magMsg->magnetic_field_covariance[1] = 0.0f; + magMsg->magnetic_field_covariance[2] = 0.0f; + magMsg->magnetic_field_covariance[3] = 0.0f; + magMsg->magnetic_field_covariance[4] = 0.037e-6; + magMsg->magnetic_field_covariance[5] = 0.0f; + magMsg->magnetic_field_covariance[6] = 0.0f; + magMsg->magnetic_field_covariance[7] = 0.0f; + magMsg->magnetic_field_covariance[8] = 0.047e-6; + + sensors_data_published = true; + mPubImuMag.publish(magMsg); + } + } /*else { + NODELET_DEBUG("No new MAG. DATA"); + }*/ + + if (imu_SubNumber > 0 && new_imu_data) + { + lastTs_imu = ts_imu; + + sensor_msgs::ImuPtr imuMsg = boost::make_shared(); + + imuMsg->header.stamp = ts_imu; + +#ifdef DEBUG_SENS_TS + static ros::Time old_ts; + if (old_ts == imuMsg->header.stamp) + { + NODELET_WARN_STREAM("Publishing IMU data with old timestamp " << old_ts); + } + else + { + NODELET_INFO_STREAM("Publishing IMU data with new timestamp. Freq: " << 1. / (ts_imu.toSec() - old_ts.toSec())); + old_ts = imuMsg->header.stamp; + } +#endif - mInitOdomWithPose = false; - mResetOdom = false; - } else { - // Transformation from map to odometry frame - //mMap2OdomTransf = mOdom2BaseTransf.inverse() * mMap2BaseTransf; - mMap2OdomTransf = mMap2BaseTransf * mOdom2BaseTransf.inverse(); + imuMsg->header.frame_id = mImuFrameId; + + imuMsg->orientation.x = sens_data.imu.pose.getOrientation()[0]; + imuMsg->orientation.y = sens_data.imu.pose.getOrientation()[1]; + imuMsg->orientation.z = sens_data.imu.pose.getOrientation()[2]; + imuMsg->orientation.w = sens_data.imu.pose.getOrientation()[3]; + + imuMsg->angular_velocity.x = sens_data.imu.angular_velocity[0] * DEG2RAD; + imuMsg->angular_velocity.y = sens_data.imu.angular_velocity[1] * DEG2RAD; + imuMsg->angular_velocity.z = sens_data.imu.angular_velocity[2] * DEG2RAD; + + imuMsg->linear_acceleration.x = sens_data.imu.linear_acceleration[0]; + imuMsg->linear_acceleration.y = sens_data.imu.linear_acceleration[1]; + imuMsg->linear_acceleration.z = sens_data.imu.linear_acceleration[2]; + + for (int i = 0; i < 3; ++i) + { + int r = 0; + + if (i == 0) + { + r = 0; + } + else if (i == 1) + { + r = 1; + } + else + { + r = 2; + } + + imuMsg->orientation_covariance[i * 3 + 0] = sens_data.imu.pose_covariance.r[r * 3 + 0] * DEG2RAD * DEG2RAD; + imuMsg->orientation_covariance[i * 3 + 1] = sens_data.imu.pose_covariance.r[r * 3 + 1] * DEG2RAD * DEG2RAD; + imuMsg->orientation_covariance[i * 3 + 2] = sens_data.imu.pose_covariance.r[r * 3 + 2] * DEG2RAD * DEG2RAD; + + imuMsg->linear_acceleration_covariance[i * 3 + 0] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 0]; + imuMsg->linear_acceleration_covariance[i * 3 + 1] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 1]; + imuMsg->linear_acceleration_covariance[i * 3 + 2] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 2]; + + imuMsg->angular_velocity_covariance[i * 3 + 0] = + sens_data.imu.angular_velocity_covariance.r[r * 3 + 0] * DEG2RAD * DEG2RAD; + imuMsg->angular_velocity_covariance[i * 3 + 1] = + sens_data.imu.angular_velocity_covariance.r[r * 3 + 1] * DEG2RAD * DEG2RAD; + imuMsg->angular_velocity_covariance[i * 3 + 2] = + sens_data.imu.angular_velocity_covariance.r[r * 3 + 2] * DEG2RAD * DEG2RAD; + } + + sensors_data_published = true; + mPubImu.publish(imuMsg); + } /*else { + NODELET_DEBUG("No new IMU DATA"); + }*/ + + if (imu_RawSubNumber > 0 && new_imu_data) + { + lastTs_imu = ts_imu; + + sensor_msgs::ImuPtr imuRawMsg = boost::make_shared(); + + imuRawMsg->header.stamp = ts_imu; + imuRawMsg->header.frame_id = mImuFrameId; + imuRawMsg->angular_velocity.x = sens_data.imu.angular_velocity[0] * DEG2RAD; + imuRawMsg->angular_velocity.y = sens_data.imu.angular_velocity[1] * DEG2RAD; + imuRawMsg->angular_velocity.z = sens_data.imu.angular_velocity[2] * DEG2RAD; + imuRawMsg->linear_acceleration.x = sens_data.imu.linear_acceleration[0]; + imuRawMsg->linear_acceleration.y = sens_data.imu.linear_acceleration[1]; + imuRawMsg->linear_acceleration.z = sens_data.imu.linear_acceleration[2]; + + for (int i = 0; i < 3; ++i) + { + int r = 0; + + if (i == 0) + { + r = 0; + } + else if (i == 1) + { + r = 1; + } + else + { + r = 2; + } + + imuRawMsg->linear_acceleration_covariance[i * 3 + 0] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 0]; + imuRawMsg->linear_acceleration_covariance[i * 3 + 1] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 1]; + imuRawMsg->linear_acceleration_covariance[i * 3 + 2] = sens_data.imu.linear_acceleration_covariance.r[r * 3 + 2]; + imuRawMsg->angular_velocity_covariance[i * 3 + 0] = + sens_data.imu.angular_velocity_covariance.r[r * 3 + 0] * DEG2RAD * DEG2RAD; + imuRawMsg->angular_velocity_covariance[i * 3 + 1] = + sens_data.imu.angular_velocity_covariance.r[r * 3 + 1] * DEG2RAD * DEG2RAD; + imuRawMsg->angular_velocity_covariance[i * 3 + 2] = + sens_data.imu.angular_velocity_covariance.r[r * 3 + 2] * DEG2RAD * DEG2RAD; + } + + // Orientation data is not available in "data_raw" -> See ROS REP145 + // http://www.ros.org/reps/rep-0145.html#topics + imuRawMsg->orientation_covariance[0] = -1; + sensors_data_published = true; + mPubImuRaw.publish(imuRawMsg); + } + + // ----> Update Diagnostic + if (sensors_data_published) + { + // Publish freq calculation + static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); -#if 0 //#ifndef NDEBUG // Enable for TF checking - double roll, pitch, yaw; - tf2::Matrix3x3(mMap2OdomTransf.getRotation()).getRPY(roll, pitch, yaw); + double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); + last_time = now; - NODELET_DEBUG("+++ Diff [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", - mMapFrameId.c_str(), mOdometryFrameId.c_str(), - mMap2OdomTransf.getOrigin().x(), mMap2OdomTransf.getOrigin().y(), mMap2OdomTransf.getOrigin().z(), - roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); -#endif - } + mSensPeriodMean_usec->addValue(elapsed_usec); + } + // <---- Update Diagnostic +} - // Publish Pose message - if ((poseSubnumber + poseCovSubnumber) > 0) { - publishPose(mFrameTimestamp); - } +void ZEDWrapperNodelet::device_poll_thread_func() +{ + ros::Rate loop_rate(mPubFrameRate); + + mRecording = false; + + mElabPeriodMean_sec.reset(new sl_tools::CSmartMean(mCamFrameRate)); + mGrabPeriodMean_usec.reset(new sl_tools::CSmartMean(mCamFrameRate)); + mVideoDepthPeriodMean_sec.reset(new sl_tools::CSmartMean(mCamFrameRate)); + mPcPeriodMean_usec.reset(new sl_tools::CSmartMean(mCamFrameRate)); + mObjDetPeriodMean_msec.reset(new sl_tools::CSmartMean(mCamFrameRate)); + + // Timestamp initialization + if (mSvoMode) + { + mFrameTimestamp = ros::Time::now(); + } + else + { + mFrameTimestamp = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::CURRENT)); + } + mPrevFrameTimestamp = mFrameTimestamp; + + mPosTrackingStarted = false; + mMappingRunning = false; + mRecording = false; + + // Get the parameters of the ZED images + mCamWidth = mZed.getCameraInformation().camera_configuration.resolution.width; + mCamHeight = mZed.getCameraInformation().camera_configuration.resolution.height; + NODELET_DEBUG_STREAM("Original Camera grab frame size: " << mCamWidth << "x" << mCamHeight); + int pub_w, pub_h; + pub_w = static_cast(std::round(mCamWidth / mCustomDownscaleFactor)); + pub_h = static_cast(std::round(mCamHeight / mCustomDownscaleFactor)); + + if (pub_w > mCamWidth || pub_h > mCamHeight) + { + NODELET_WARN_STREAM("The publishing resolution (" + << pub_w << "x" << pub_h << ") cannot be higher than the grabbing resolution (" << mCamWidth + << "x" << mCamHeight << "). Using grab resolution for output messages."); + pub_w = mCamWidth; + pub_h = mCamHeight; + } + mMatResol = sl::Resolution(pub_w, pub_h); + NODELET_DEBUG_STREAM("Publishing frame size: " << mMatResol.width << "x" << mMatResol.height); + + // ----> Set Region of Interest + if (!mRoiParam.empty()) + { + NODELET_INFO("*** Setting ROI ***"); + sl::Resolution resol(mCamWidth, mCamHeight); + std::vector sl_poly; + std::string log_msg = parseRoiPoly(mRoiParam, sl_poly); + + // Create ROI mask + sl::Mat roi_mask(resol, sl::MAT_TYPE::U8_C1, sl::MEM::CPU); + + if (!sl_tools::generateROI(sl_poly, roi_mask)) + { + NODELET_WARN(" * Error generating the region of interest image mask."); + } + else + { + sl::ERROR_CODE err = mZed.setRegionOfInterest(roi_mask); + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM(" * Error while setting ZED SDK region of interest: " << sl::toString(err).c_str()); + } + else + { + NODELET_INFO(" * Region of Interest correctly set."); + } + } + } + // <---- Set Region of Interest + + // Create and fill the camera information messages + fillCamInfo(mZed, mLeftCamInfoMsg, mRightCamInfoMsg, mLeftCamOptFrameId, mRightCamOptFrameId); + fillCamInfo(mZed, mLeftCamInfoRawMsg, mRightCamInfoRawMsg, mLeftCamOptFrameId, mRightCamOptFrameId, true); + fillCamDepthInfo(mZed, mDepthCamInfoMsg, mLeftCamOptFrameId); + + // the reference camera is the Left one (next to the ZED logo) + mRgbCamInfoMsg = mLeftCamInfoMsg; + mRgbCamInfoRawMsg = mLeftCamInfoRawMsg; + + sl::RuntimeParameters runParams; + + // Main loop + while (mNhNs.ok()) + { + // ----> Count subscribers + uint32_t rgbSubnumber = mPubRgb.getNumSubscribers(); + uint32_t rgbRawSubnumber = mPubRawRgb.getNumSubscribers(); + uint32_t leftSubnumber = mPubLeft.getNumSubscribers(); + uint32_t leftRawSubnumber = mPubRawLeft.getNumSubscribers(); + uint32_t rightSubnumber = mPubRight.getNumSubscribers(); + uint32_t rightRawSubnumber = mPubRawRight.getNumSubscribers(); + uint32_t rgbGraySubnumber = mPubRgbGray.getNumSubscribers(); + uint32_t rgbGrayRawSubnumber = mPubRawRgbGray.getNumSubscribers(); + uint32_t leftGraySubnumber = mPubLeftGray.getNumSubscribers(); + uint32_t leftGrayRawSubnumber = mPubRawLeftGray.getNumSubscribers(); + uint32_t rightGraySubnumber = mPubRightGray.getNumSubscribers(); + uint32_t rightGrayRawSubnumber = mPubRawRightGray.getNumSubscribers(); + uint32_t depthSubnumber = 0; + uint32_t objDetSubnumber = 0; + uint32_t disparitySubnumber = 0; + uint32_t cloudSubnumber = 0; + uint32_t fusedCloudSubnumber = 0; + uint32_t poseSubnumber = 0; + uint32_t poseCovSubnumber = 0; + uint32_t odomSubnumber = 0; + uint32_t confMapSubnumber = 0; + uint32_t pathSubNumber = 0; + if (!mDepthDisabled) + { + depthSubnumber = mPubDepth.getNumSubscribers(); + disparitySubnumber = mPubDisparity.getNumSubscribers(); + cloudSubnumber = mPubCloud.getNumSubscribers(); + fusedCloudSubnumber = mPubFusedCloud.getNumSubscribers(); + poseSubnumber = mPubPose.getNumSubscribers(); + poseCovSubnumber = mPubPoseCov.getNumSubscribers(); + odomSubnumber = mPubOdom.getNumSubscribers(); + confMapSubnumber = mPubConfMap.getNumSubscribers(); + pathSubNumber = mPubMapPath.getNumSubscribers() + mPubOdomPath.getNumSubscribers(); + + if (mObjDetEnabled && mObjDetRunning) + { + objDetSubnumber = mPubObjDet.getNumSubscribers(); + } + } + uint32_t stereoSubNumber = mPubStereo.getNumSubscribers(); + uint32_t stereoRawSubNumber = mPubRawStereo.getNumSubscribers(); + // <---- Count subscribers + + mGrabActive = mRecording || mStreaming || mMappingEnabled || mObjDetEnabled || mPosTrackingEnabled || + mPosTrackingStarted || + ((rgbSubnumber + rgbRawSubnumber + leftSubnumber + leftRawSubnumber + rightSubnumber + + rightRawSubnumber + rgbGraySubnumber + rgbGrayRawSubnumber + leftGraySubnumber + + leftGrayRawSubnumber + rightGraySubnumber + rightGrayRawSubnumber + depthSubnumber + + disparitySubnumber + cloudSubnumber + poseSubnumber + poseCovSubnumber + odomSubnumber + + confMapSubnumber + pathSubNumber + stereoSubNumber + stereoRawSubNumber + objDetSubnumber) > 0); + + // Run the loop only if there is some subscribers or SVO is active + if (mGrabActive) + { + std::lock_guard lock(mPosTrkMutex); + + // Note: once tracking is started it is never stopped anymore to not lose tracking information + mPosTrackingRequired = + !mDepthDisabled && (mPosTrackingEnabled || mPosTrackingStarted || mMappingEnabled || mObjDetEnabled || + (mComputeDepth & mDepthStabilization) || poseSubnumber > 0 || poseCovSubnumber > 0 || + odomSubnumber > 0 || pathSubNumber > 0); + + // Start the tracking? + if (mPosTrackingRequired && !mPosTrackingStarted) + { + start_pos_tracking(); + } + + // Start the mapping? + mMappingMutex.lock(); + if (mMappingEnabled && !mMappingRunning) + { + start_3d_mapping(); + } + mMappingMutex.unlock(); + + // Start the object detection? + if (!mDepthDisabled) + { + mObjDetMutex.lock(); + if (mObjDetEnabled && !mObjDetRunning) + { + start_obj_detect(); + } + mObjDetMutex.unlock(); + } + + // Detect if one of the subscriber need to have the depth information + mComputeDepth = !mDepthDisabled && + (mPosTrackingRequired || + ((depthSubnumber + disparitySubnumber + cloudSubnumber + fusedCloudSubnumber + poseSubnumber + + poseCovSubnumber + odomSubnumber + confMapSubnumber + objDetSubnumber) > 0)); + + // ----> Depth runtime parameters + if (mComputeDepth) + { + runParams.confidence_threshold = mCamDepthConfidence; + runParams.texture_confidence_threshold = mCamDepthTextureConf; + runParams.enable_depth = true; // Ask to compute the depth + } + else + { + runParams.enable_depth = false; // Ask to not compute the depth + } + // <---- Depth runtime parameters + + std::chrono::steady_clock::time_point start_elab = std::chrono::steady_clock::now(); + + // ZED Grab + mGrabStatus = mZed.grab(runParams); + + // cout << toString(grab_status) << endl; + if (mGrabStatus != sl::ERROR_CODE::SUCCESS) + { + // Detect if a error occurred (for example: the zed have been disconnected) and re-initialize the ZED + + NODELET_INFO_STREAM_THROTTLE(1.0, "Camera grab error: " << sl::toString(mGrabStatus).c_str()); - mTrackingReady = true; + std::this_thread::sleep_for(std::chrono::milliseconds(1)); + if (mGrabStatus != sl::ERROR_CODE::CAMERA_REBOOTING) + { + std::this_thread::sleep_for(std::chrono::milliseconds(50)); + continue; + } + else if (mSvoMode && mGrabStatus == sl::ERROR_CODE::END_OF_SVOFILE_REACHED) + { + NODELET_WARN("SVO reached the end. The node will be stopped."); + mZed.close(); + exit(EXIT_SUCCESS); + } + else if (!mRemoteStreamAddr.empty()) + { + NODELET_ERROR("Remote stream problem. The node will be stopped."); + mZed.close(); + exit(EXIT_FAILURE); + } + else + { + if ((ros::Time::now() - mPrevFrameTimestamp).toSec() > 5 && !mSvoMode) + { + mCloseZedMutex.lock(); + if (mZed.isOpened()) + { + mZed.close(); + } + mCloseZedMutex.unlock(); + + mConnStatus = sl::ERROR_CODE::CAMERA_NOT_DETECTED; + + while (mConnStatus != sl::ERROR_CODE::SUCCESS) + { + if (!mNhNs.ok()) + { + mStopNode = true; + + std::lock_guard stop_lock(mCloseZedMutex); + NODELET_INFO_STREAM("Closing ZED " << mZedSerialNumber << "..."); + if (mRecording) + { + mRecording = false; + mZed.disableRecording(); + } + if (mZed.isOpened()) + { + mZed.close(); } + NODELET_INFO_STREAM("... ZED " << mZedSerialNumber << " closed."); + + NODELET_DEBUG("ZED pool thread finished"); + return; + } - oldStatus = mTrackingStatus; + mZedParams.input.setFromSerialNumber(mZedSerialNumber); + mConnStatus = mZed.open(mZedParams); // Try to initialize the ZED + NODELET_INFO_STREAM("Connection status: " << toString(mConnStatus)); + + mDiagUpdater.force_update(); + std::this_thread::sleep_for(std::chrono::milliseconds(1000)); } - // Publish pose tf only if enabled - if (mPublishTf) { - // Note, the frame is published, but its values will only change if - // someone has subscribed to odom - publishOdomFrame(mOdom2BaseTransf, mFrameTimestamp); // publish the base Frame in odometry frame + mPosTrackingStarted = false; - if (mPublishMapTf) { - // Note, the frame is published, but its values will only change if - // someone has subscribed to map - publishPoseFrame(mMap2OdomTransf, mFrameTimestamp); // publish the odometry Frame in map frame - } + // Start the tracking? + if ((mPosTrackingRequired) && !mPosTrackingStarted && !mDepthDisabled) + { + start_pos_tracking(); } + } + } + + mDiagUpdater.update(); + + continue; + } + + mFrameCount++; -#if 0 //#ifndef NDEBUG // Enable for TF checking - // Double check: map_to_pose must be equal to mMap2BaseTransf + // ----> Timestamp + if (mSvoMode) + { + mFrameTimestamp = ros::Time::now(); + } + else + { + mFrameTimestamp = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::IMAGE)); + } + mPrevFrameTimestamp = mFrameTimestamp; + ros::Time stamp = mFrameTimestamp; // Fix processing Timestamp + // <---- Timestamp + + // Publish Color and Depth images + pubVideoDepth(); + + // ----> SVO recording + mRecMutex.lock(); + if (mRecording) + { + mRecStatus = mZed.getRecordingStatus(); + if (!mRecStatus.status) + { + NODELET_ERROR_THROTTLE(1.0, "Error saving frame to SVO"); + } + mDiagUpdater.force_update(); + } + mRecMutex.unlock(); + // <---- SVO recording + + // ----> Grab freq calculation + static std::chrono::steady_clock::time_point last_time = std::chrono::steady_clock::now(); + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + double elapsed_usec = std::chrono::duration_cast(now - last_time).count(); + last_time = now; + mGrabPeriodMean_usec->addValue(elapsed_usec); + // NODELET_INFO_STREAM("Grab time: " << elapsed_usec / 1000 << " msec"); + // <---- Grab freq calculation + + // ----> Camera Settings + processCameraSettings(); + // <---- Camera Settings + + // ----> Point Cloud + if (!mDepthDisabled && cloudSubnumber > 0) + { + processPointcloud(mFrameTimestamp); + } + else + { + mPcPublishing = false; + } + // <---- Point Cloud + + // ----> Object Detection + mObjDetMutex.lock(); + if (mObjDetRunning && objDetSubnumber > 0) + { + processDetectedObjects(stamp); + } + mObjDetMutex.unlock(); + // <---- Object Detection + + // ----> Process Positional Tracking + if (!mDepthDisabled) + { + if (mPosTrackingStarted) + { + processOdometry(); + processPose(); + } + + // Publish `odom` and `map` TFs at the grab frequency + // RCLCPP_INFO(get_logger(), "Publishing TF -> threadFunc_zedGrab"); + publishTFs(mFrameTimestamp); + } + // <---- Process Positional Tracking + +#if 0 //#ifndef NDEBUG // Enable for TF checking \ + // Double check: map_to_pose must be equal to mMap2BaseTransf tf2::Transform map_to_base; @@ -3523,750 +4187,1614 @@ void ZEDWrapperNodelet::device_poll_thread_func() { NODELET_DEBUG("*******************************"); #endif - std::chrono::steady_clock::time_point end_elab = std::chrono::steady_clock::now(); - - double elab_usec = std::chrono::duration_cast(end_elab - start_elab).count(); - - double mean_elab_sec = mElabPeriodMean_sec->addValue(elab_usec / 1000000.); - - static int count_warn = 0; - - if (!loop_rate.sleep()) { - if (mean_elab_sec > (1. / mCamFrameRate)) { - if (++count_warn > 10) { - NODELET_DEBUG_THROTTLE( - 1.0, - "Working thread is not synchronized with the Camera frame rate"); - NODELET_DEBUG_STREAM_THROTTLE( - 1.0, "Expected cycle time: " << loop_rate.expectedCycleTime() - << " - Real cycle time: " - << mean_elab_sec); - NODELET_WARN_STREAM_THROTTLE(10.0, "Elaboration takes longer (" << mean_elab_sec << " sec) than requested " - "by the FPS rate (" << loop_rate.expectedCycleTime() << " sec). Please consider to " - "lower the 'frame_rate' setting or to reduce the power requirements reducing the resolutions."); - } - - loop_rate.reset(); - } else { - count_warn = 0; - } - } - } else { - NODELET_DEBUG_THROTTLE(5.0, "No topics subscribed by users"); + std::chrono::steady_clock::time_point end_elab = std::chrono::steady_clock::now(); + + double elab_usec = std::chrono::duration_cast(end_elab - start_elab).count(); + + double mean_elab_sec = mElabPeriodMean_sec->addValue(elab_usec / 1000000.); + + if (!loop_rate.sleep()) + { + static int count_warn = 0; + if (mean_elab_sec > (1. / mPubFrameRate)) + { + + if (++count_warn > 10) + { + NODELET_DEBUG_THROTTLE(1.0, "Working thread is not synchronized with the Camera grab rate"); + NODELET_DEBUG_STREAM_THROTTLE(1.0, "Expected cycle time: " << loop_rate.expectedCycleTime() + << " - Real cycle time: " << mean_elab_sec); + NODELET_DEBUG_STREAM_THROTTLE(10.0, "Elaboration takes longer (" + << mean_elab_sec << " sec) than requested by the FPS rate (" + << loop_rate.expectedCycleTime() + << " sec). Please consider to " + "lower the 'general/pub_frame_rate' setting or to " + "reduce the power requirements by reducing the camera " + "resolutions."); + } + + loop_rate.reset(); + } + else + { + count_warn = 0; + } + } + } + else + { + NODELET_DEBUG_THROTTLE(5.0, "No processing required. Waiting for subscribers or modules activation."); - // Publish odometry tf only if enabled - if (mPublishTf) { - ros::Time t; + std::this_thread::sleep_for(std::chrono::milliseconds(10)); // No subscribers, we just wait + loop_rate.reset(); + } - if (mSvoMode) { - t = ros::Time::now(); - } else { - t = sl_tools::slTime2Ros(mZed.getTimestamp(sl::TIME_REFERENCE::CURRENT)); - } + mDiagUpdater.update(); + } // while loop - publishOdomFrame(mOdom2BaseTransf, mFrameTimestamp); // publish the base Frame in odometry frame + if (mSaveAreaMapOnClosing && mPosTrackingStarted) + { + saveAreaMap(mAreaMemDbPath); + } - if (mPublishMapTf) { - publishPoseFrame(mMap2OdomTransf, mFrameTimestamp); // publish the odometry Frame in map frame - } - } + mStopNode = true; // Stops other threads - std::this_thread::sleep_for( - std::chrono::milliseconds(10)); // No subscribers, we just wait - loop_rate.reset(); - } + std::lock_guard lock(mCloseZedMutex); + NODELET_DEBUG("Closing ZED"); - mDiagUpdater.update(); - } // while loop + if (mRecording) + { + mRecording = false; + mZed.disableRecording(); + } - mStopNode = true; // Stops other threads + mZed.close(); - std::lock_guard lock(mCloseZedMutex); - NODELET_DEBUG("Closing ZED"); + NODELET_DEBUG("ZED pool thread finished"); +} // namespace zed_nodelets - if(mRecording) { - mRecording=false; - mZed.disableRecording(); - } - mZed.close(); +void ZEDWrapperNodelet::processPointcloud(ros::Time ts) +{ + // Run the point cloud conversion asynchronously to avoid slowing down + // all the program + // Retrieve raw pointCloud data if latest Pointcloud is ready + std::unique_lock lock(mPcMutex, std::defer_lock); + + if (lock.try_lock()) + { + mZed.retrieveMeasure(mCloud, sl::MEASURE::XYZBGRA, sl::MEM::CPU, mMatResol); - NODELET_DEBUG("ZED pool thread finished"); + mPointCloudFrameId = mDepthFrameId; + mPointCloudTime = ts; + + // Signal Pointcloud thread that a new pointcloud is ready + mPcDataReadyCondVar.notify_one(); + mPcDataReady = true; + mPcPublishing = true; + } } -void ZEDWrapperNodelet::updateDiagnostic(diagnostic_updater::DiagnosticStatusWrapper& stat) { +void ZEDWrapperNodelet::processCameraSettings() +{ + + sl::VIDEO_SETTINGS setting; + sl::ERROR_CODE err; + + if (!mSvoMode && mFrameCount % 5 == 0) + { + // NODELET_DEBUG_STREAM( "[" << mFrameCount << "] device_poll_thread_func MUTEX LOCK"); + mDynParMutex.lock(); - if (mConnStatus != sl::ERROR_CODE::SUCCESS) { - stat.summary(diagnostic_msgs::DiagnosticStatus::ERROR, sl::toString(mConnStatus).c_str()); - return; + int value; + setting = sl::VIDEO_SETTINGS::BRIGHTNESS; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamBrightness) + { + err = mZed.setCameraSettings(setting, mCamBrightness); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamBrightness << " <- " << value); + mUpdateDynParams = true; + } + + setting = sl::VIDEO_SETTINGS::CONTRAST; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamContrast) + { + err = mZed.setCameraSettings(setting, mCamContrast); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamContrast << " <- " << value); + mUpdateDynParams = true; + } + + setting = sl::VIDEO_SETTINGS::HUE; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamHue) + { + err = mZed.setCameraSettings(setting, mCamHue); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamHue << " <- " << value); + mUpdateDynParams = true; + } + + setting = sl::VIDEO_SETTINGS::SATURATION; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamSaturation) + { + err = mZed.setCameraSettings(setting, mCamSaturation); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamSaturation << " <- " << value); + mUpdateDynParams = true; + } + + setting = sl::VIDEO_SETTINGS::SHARPNESS; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamSharpness) + { + err = mZed.setCameraSettings(setting, mCamSharpness); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamSharpness << " <- " << value); + mUpdateDynParams = true; + } + + setting = sl::VIDEO_SETTINGS::GAMMA; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamGamma) + { + err = mZed.setCameraSettings(setting, mCamGamma); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamGamma << " <- " << value); + mUpdateDynParams = true; + } + + if (mTriggerAutoExposure) + { + setting = sl::VIDEO_SETTINGS::AEC_AGC; + err = mZed.getCameraSettings(setting, value); + int aec_agc = (mCamAutoExposure ? 1 : 0); + if (err == sl::ERROR_CODE::SUCCESS && value != aec_agc) + { + err = mZed.setCameraSettings(setting, aec_agc); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << aec_agc << " <- " << value); + mUpdateDynParams = true; + } + } + if (!mCamAutoExposure) + { + setting = sl::VIDEO_SETTINGS::EXPOSURE; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamExposure) + { + err = mZed.setCameraSettings(setting, mCamExposure); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamExposure << " <- " << value); + mUpdateDynParams = true; + } + + setting = sl::VIDEO_SETTINGS::GAIN; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamGain) + { + err = mZed.setCameraSettings(setting, mCamGain); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamGain << " <- " << value); + mUpdateDynParams = true; + } + } + + if (mTriggerAutoWB) + { + setting = sl::VIDEO_SETTINGS::WHITEBALANCE_AUTO; + err = mZed.getCameraSettings(setting, value); + int wb_auto = (mCamAutoWB ? 1 : 0); + if (err == sl::ERROR_CODE::SUCCESS && value != wb_auto) + { + err = mZed.setCameraSettings(setting, wb_auto); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << wb_auto << " <- " << value); + mUpdateDynParams = true; + } + } + if (!mCamAutoWB) + { + setting = sl::VIDEO_SETTINGS::WHITEBALANCE_TEMPERATURE; + err = mZed.getCameraSettings(setting, value); + if (err == sl::ERROR_CODE::SUCCESS && value != mCamWB) + { + err = mZed.setCameraSettings(setting, mCamWB); + } + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error setting parameter " << sl::toString(setting) << ": " << sl::toString(err)); + } + else + { + NODELET_DEBUG_STREAM(sl::toString(setting) << " changed: " << mCamWB << " <- " << value); + mUpdateDynParams = true; + } } + mDynParMutex.unlock(); + // NODELET_DEBUG_STREAM( "device_poll_thread_func MUTEX UNLOCK"); + } - if (mGrabActive) { - if (mGrabStatus == sl::ERROR_CODE::SUCCESS /*|| mGrabStatus == sl::ERROR_CODE::NOT_A_NEW_FRAME*/) { + if (mUpdateDynParams) + { + NODELET_DEBUG("Update Dynamic Parameters"); + updateDynamicReconfigure(); + } +} - stat.summary(diagnostic_msgs::DiagnosticStatus::OK, "Camera grabbing"); +void ZEDWrapperNodelet::callback_updateDiagnostic(diagnostic_updater::DiagnosticStatusWrapper& stat) +{ + if (mConnStatus != sl::ERROR_CODE::SUCCESS) + { + stat.summary(diagnostic_msgs::DiagnosticStatus::ERROR, sl::toString(mConnStatus).c_str()); + return; + } + + if (mGrabActive) + { + if (mGrabStatus == sl::ERROR_CODE::SUCCESS /*|| mGrabStatus == sl::ERROR_CODE::NOT_A_NEW_FRAME*/) + { + stat.summary(diagnostic_msgs::DiagnosticStatus::OK, "Camera grabbing"); + + double freq = 1000000. / mGrabPeriodMean_usec->getMean(); + double freq_perc = 100. * freq / mPubFrameRate; + stat.addf("Capture", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); + + stat.addf("General Processing", "Mean Time: %.3f sec (Max. %.3f sec)", mElabPeriodMean_sec->getMean(), + 1. / mPubFrameRate); + + if (mPublishingData) + { + freq = 1. / mVideoDepthPeriodMean_sec->getMean(); + freq_perc = 100. * freq / mPubFrameRate; + stat.addf("Video/Depth Publish", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); + } + + if (mSvoMode) + { + int frame = mZed.getSVOPosition(); + int totFrames = mZed.getSVONumberOfFrames(); + double svo_perc = 100. * (static_cast(frame) / totFrames); + + stat.addf("Playing SVO", "Frame: %d/%d (%.1f%%)", frame, totFrames, svo_perc); + } + + if (mComputeDepth) + { + stat.add("Depth status", "ACTIVE"); + + if (mPcPublishing) + { + freq = 1000000. / mPcPeriodMean_usec->getMean(); + freq_perc = 100. * freq / mPointCloudFreq; + stat.addf("Point Cloud", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); + } + else + { + stat.add("Point Cloud", "Topic not subscribed"); + } - double freq = 1000000. / mGrabPeriodMean_usec->getMean(); - double freq_perc = 100.*freq / mCamFrameRate; - stat.addf("Capture", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); + if (mFloorAlignment) + { + if (mInitOdomWithPose) + { + stat.add("Floor Detection", "NOT INITIALIZED"); + } + else + { + stat.add("Floor Detection", "INITIALIZED"); + } + } - stat.addf("Processing Time", "Mean time: %.3f sec (Max. %.3f sec)", mElabPeriodMean_sec->getMean(), 1. / mCamFrameRate); + if (mPosTrackingStarted) + { + stat.addf("Pos. Tracking status [Pose]", "%s", sl::toString(mPosTrackingStatusWorld).c_str()); + stat.addf("Pos. Tracking status [Odometry]", "%s", sl::toString(mPosTrackingStatusCamera).c_str()); + } + else + { + stat.add("Pos. Tracking status", "INACTIVE"); + } - if(mPublishingData) { - freq = 1. / mVideoDepthPeriodMean_sec->getMean(); - freq_perc = 100.*freq / mVideoDepthFreq; - stat.addf("Video/Depth Publish", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); - } + if (mObjDetRunning) + { + freq = 1000. / mObjDetPeriodMean_msec->getMean(); + freq_perc = 100. * freq / mPubFrameRate; + stat.addf("Object detection", "Mean Frequency: %.3f Hz (%.1f%%)", freq, freq_perc); + } + else + { + stat.add("Object Detection", "INACTIVE"); + } + } + else + { + stat.add("Depth status", "INACTIVE"); + } + } + else + { + stat.summaryf(diagnostic_msgs::DiagnosticStatus::ERROR, "Camera error: %s", sl::toString(mGrabStatus).c_str()); + } + } + else + { + stat.summary(diagnostic_msgs::DiagnosticStatus::OK, "Waiting for data subscriber"); + stat.add("Capture", "INACTIVE"); + } + + if (mSensPublishing) + { + double freq = 1000000. / mSensPeriodMean_usec->getMean(); + double freq_perc = 100. * freq / mSensPubRate; + stat.addf("IMU", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); + } + else + { + stat.add("IMU", "Topics not subscribed"); + } + + if (mZedRealCamModel == sl::MODEL::ZED2 || mZedRealCamModel == sl::MODEL::ZED2i) + { + stat.addf("Left CMOS Temp.", "%.1f °C", mTempLeft); + stat.addf("Right CMOS Temp.", "%.1f °C", mTempRight); + + if (mTempLeft > 70.f || mTempRight > 70.f) + { + stat.summary(diagnostic_msgs::DiagnosticStatus::WARN, "Camera temperature"); + } + } + else + { + stat.add("Left CMOS Temp.", "N/A"); + stat.add("Right CMOS Temp.", "N/A"); + } + + if (mRecording) + { + if (!mRecStatus.status) + { + if (mGrabActive) + { + stat.add("SVO Recording", "ERROR"); + stat.summary(diagnostic_msgs::DiagnosticStatus::WARN, + "Error adding frames to SVO file while recording. Check " + "free disk space"); + } + else + { + stat.add("SVO Recording", "WAITING"); + } + } + else + { + stat.add("SVO Recording", "ACTIVE"); + stat.addf("SVO compression time", "%g msec", mRecStatus.average_compression_time); + stat.addf("SVO compression ratio", "%.1f%%", mRecStatus.average_compression_ratio); + } + } + else + { + stat.add("SVO Recording", "NOT ACTIVE"); + } +} - if( mSvoMode ) { - int frame = mZed.getSVOPosition(); - int totFrames = mZed.getSVONumberOfFrames(); - double svo_perc = 100.*(static_cast(frame)/totFrames); +bool ZEDWrapperNodelet::on_start_svo_recording(zed_interfaces::start_svo_recording::Request& req, + zed_interfaces::start_svo_recording::Response& res) +{ + std::lock_guard lock(mRecMutex); + + if (mRecording) + { + res.result = false; + res.info = "Recording was already active"; + return false; + } - stat.addf("Playing SVO", "Frame: %d/%d (%.1f%%)", frame,totFrames, svo_perc); - } + // Check filename + if (req.svo_filename.empty()) + { + req.svo_filename = "zed.svo"; + } - if (mComputeDepth) { - stat.add("Depth status", "ACTIVE"); + sl::ERROR_CODE err; - if (mPcPublishing) { - double freq = 1000000. / mPcPeriodMean_usec->getMean(); - double freq_perc = 100.*freq / mPointCloudFreq; - stat.addf("Point Cloud", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); - } else { - stat.add("Point Cloud", "Topic not subscribed"); - } + sl::RecordingParameters recParams; + recParams.compression_mode = mSvoComprMode; + recParams.video_filename = req.svo_filename.c_str(); + err = mZed.enableRecording(recParams); - if (mFloorAlignment) { - if (mInitOdomWithPose) { - stat.add("Floor Detection", "NOT INITIALIZED"); - } else { - stat.add("Floor Detection", "INITIALIZED"); - } - } + if (err == sl::ERROR_CODE::SVO_UNSUPPORTED_COMPRESSION) + { + recParams.compression_mode = mSvoComprMode == sl::SVO_COMPRESSION_MODE::H265 ? sl::SVO_COMPRESSION_MODE::H264 : + sl::SVO_COMPRESSION_MODE::H265; - if (mTrackingActivated) { - stat.addf("Tracking status", "%s", sl::toString(mTrackingStatus).c_str()); - } else { - stat.add("Tracking status", "INACTIVE"); - } + NODELET_WARN_STREAM("The chosen " << sl::toString(mSvoComprMode).c_str() << "mode is not available. Trying " + << sl::toString(recParams.compression_mode).c_str()); - if (mObjDetRunning) { - stat.addf("Object data processing", "%.3f sec", mObjDetPeriodMean_msec->getMean() / 1000.); - } else { - stat.add("Object Detection", "INACTIVE"); - } - } else { - stat.add("Depth status", "INACTIVE"); - } - } else { - stat.summaryf(diagnostic_msgs::DiagnosticStatus::ERROR, "Camera error: %s", sl::toString(mGrabStatus).c_str()); - } + err = mZed.enableRecording(recParams); - } else { - stat.summary(diagnostic_msgs::DiagnosticStatus::OK, "Waiting for data subscriber"); - stat.add("Capture", "INACTIVE"); - } + if (err == sl::ERROR_CODE::SVO_UNSUPPORTED_COMPRESSION) + { + NODELET_WARN_STREAM(sl::toString(recParams.compression_mode).c_str() + << "not available. Trying " << sl::toString(sl::SVO_COMPRESSION_MODE::H264).c_str()); + recParams.compression_mode = sl::SVO_COMPRESSION_MODE::H264; + err = mZed.enableRecording(recParams); - if (mSensPublishing) { - double freq = 1000000. / mSensPeriodMean_usec->getMean(); - double freq_perc = 100.*freq / mSensPubRate; - stat.addf("IMU", "Mean Frequency: %.1f Hz (%.1f%%)", freq, freq_perc); - } else { - stat.add("IMU", "Topics not subscribed"); + if (err == sl::ERROR_CODE::SVO_UNSUPPORTED_COMPRESSION) + { + recParams.compression_mode = sl::SVO_COMPRESSION_MODE::LOSSLESS; + err = mZed.enableRecording(recParams); + } } + } - if( mSensPubRate > 0 && mZedRealCamModel == sl::MODEL::ZED2 ) { - stat.addf("Left CMOS Temp.", "%.1f °C", mTempLeft); - stat.addf("Right CMOS Temp.", "%.1f °C", mTempRight); + if (err != sl::ERROR_CODE::SUCCESS) + { + res.result = false; + res.info = sl::toString(err).c_str(); + mRecording = false; + return false; + } - if( mTempLeft>70.f || mTempRight>70.f ) { - stat.summary(diagnostic_msgs::DiagnosticStatus::WARN, "Camera temperature"); - } - } else { - stat.add("Left CMOS Temp.", "N/A"); - stat.add("Right CMOS Temp.", "N/A"); - } - - if (mRecording) { - if (!mRecStatus.status) { - if (mGrabActive) { - stat.add("SVO Recording", "ERROR"); - stat.summary(diagnostic_msgs::DiagnosticStatus::WARN, - "Error adding frames to SVO file while recording. Check free disk space"); - } else { - stat.add("SVO Recording", "WAITING"); - } - } else { - stat.add("SVO Recording", "ACTIVE"); - stat.addf("SVO compression time", "%g msec", mRecStatus.average_compression_time); - stat.addf("SVO compression ratio", "%.1f%%", mRecStatus.average_compression_ratio); - } - } else { - stat.add("SVO Recording", "NOT ACTIVE"); - } + mSvoComprMode = recParams.compression_mode; + mRecording = true; + res.info = "Recording started ("; + res.info += sl::toString(mSvoComprMode).c_str(); + res.info += ")"; + res.result = true; + + NODELET_INFO_STREAM("SVO recording STARTED: " << req.svo_filename << " (" << sl::toString(mSvoComprMode).c_str() + << ")"); + + return true; } -bool ZEDWrapperNodelet::on_start_svo_recording(zed_interfaces::start_svo_recording::Request& req, - zed_interfaces::start_svo_recording::Response& res) { - std::lock_guard lock(mRecMutex); +bool ZEDWrapperNodelet::on_stop_svo_recording(zed_interfaces::stop_svo_recording::Request& req, + zed_interfaces::stop_svo_recording::Response& res) +{ + std::lock_guard lock(mRecMutex); + + if (!mRecording) + { + res.done = false; + res.info = "Recording was not active"; + NODELET_WARN_STREAM("Can't stop SVO recording. Recording was not active"); + return false; + } - if (mRecording) { - res.result = false; - res.info = "Recording was just active"; - return false; - } + mZed.disableRecording(); + mRecording = false; + res.info = "Recording stopped"; + res.done = true; - // Check filename - if (req.svo_filename.empty()) { - req.svo_filename = "zed.svo"; - } + NODELET_INFO_STREAM("SVO recording STOPPED"); - sl::ERROR_CODE err; + return true; +} - sl::RecordingParameters recParams; - recParams.compression_mode = mSvoComprMode; - recParams.video_filename = req.svo_filename.c_str(); - err = mZed.enableRecording(recParams); +bool ZEDWrapperNodelet::on_start_remote_stream(zed_interfaces::start_remote_stream::Request& req, + zed_interfaces::start_remote_stream::Response& res) +{ + if (mStreaming) + { + res.result = false; + res.info = "SVO remote streaming was just active"; + return false; + } - if (err == sl::ERROR_CODE::SVO_UNSUPPORTED_COMPRESSION) { - recParams.compression_mode = mSvoComprMode == sl::SVO_COMPRESSION_MODE::H265 ? sl::SVO_COMPRESSION_MODE::H264 : - sl::SVO_COMPRESSION_MODE::H265; + sl::StreamingParameters params; + params.codec = static_cast(req.codec); + params.port = req.port; + params.bitrate = req.bitrate; + params.gop_size = req.gop_size; + params.adaptative_bitrate = req.adaptative_bitrate; - NODELET_WARN_STREAM("The chosen " << sl::toString(mSvoComprMode).c_str() << "mode is not available. Trying " << - sl::toString(recParams.compression_mode).c_str()); + if ((params.gop_size < -1) || (params.gop_size > 256)) + { + mStreaming = false; - err = mZed.enableRecording(recParams); + res.result = false; + res.info = "`gop_size` wrong ("; + res.info += params.gop_size; + res.info += "). Remote streaming not started"; - if (err == sl::ERROR_CODE::SVO_UNSUPPORTED_COMPRESSION) { - NODELET_WARN_STREAM(sl::toString(recParams.compression_mode).c_str() << "not available. Trying " << sl::toString( - sl::SVO_COMPRESSION_MODE::H264).c_str()); - recParams.compression_mode = sl::SVO_COMPRESSION_MODE::H264; - err = mZed.enableRecording(recParams); + NODELET_ERROR_STREAM(res.info); + return false; + } - if (err == sl::ERROR_CODE::SVO_UNSUPPORTED_COMPRESSION) { - recParams.compression_mode = sl::SVO_COMPRESSION_MODE::LOSSLESS; - err = mZed.enableRecording(recParams); - } - } - } + if (params.port % 2 != 0) + { + mStreaming = false; - if (err != sl::ERROR_CODE::SUCCESS) { - res.result = false; - res.info = sl::toString(err).c_str(); - mRecording = false; - return false; - } + res.result = false; + res.info = "`port` must be an even number. Remote streaming not started"; - mSvoComprMode = recParams.compression_mode; - mRecording = true; - res.info = "Recording started ("; - res.info += sl::toString(mSvoComprMode).c_str(); - res.info += ")"; - res.result = true; + NODELET_ERROR_STREAM(res.info); + return false; + } - NODELET_INFO_STREAM("SVO recording STARTED: " << req.svo_filename << " (" << sl::toString(mSvoComprMode).c_str() << ")"); + sl::ERROR_CODE err = mZed.enableStreaming(params); - return true; -} + if (err != sl::ERROR_CODE::SUCCESS) + { + mStreaming = false; -bool ZEDWrapperNodelet::on_stop_svo_recording(zed_interfaces::stop_svo_recording::Request& req, - zed_interfaces::stop_svo_recording::Response& res) { - std::lock_guard lock(mRecMutex); + res.result = false; + res.info = sl::toString(err).c_str(); - if (!mRecording) { - res.done = false; - res.info = "Recording was not active"; - return false; - } + NODELET_ERROR_STREAM("Remote streaming not started (" << res.info << ")"); - mZed.disableRecording(); - mRecording = false; - res.info = "Recording stopped"; - res.done = true; + return false; + } - NODELET_INFO_STREAM("SVO recording STOPPED"); + mStreaming = true; - return true; + NODELET_INFO_STREAM("Remote streaming STARTED"); + + res.result = true; + res.info = "Remote streaming STARTED"; + return true; } -bool ZEDWrapperNodelet::on_start_remote_stream(zed_interfaces::start_remote_stream::Request& req, - zed_interfaces::start_remote_stream::Response& res) { - if (mStreaming) { - res.result = false; - res.info = "SVO remote streaming was just active"; - return false; - } +bool ZEDWrapperNodelet::on_stop_remote_stream(zed_interfaces::stop_remote_stream::Request& req, + zed_interfaces::stop_remote_stream::Response& res) +{ + if (mStreaming) + { + mZed.disableStreaming(); + } + + mStreaming = false; + NODELET_INFO_STREAM("SVO remote streaming STOPPED"); + + res.done = true; + return true; +} - sl::StreamingParameters params; - params.codec = static_cast(req.codec); - params.port = req.port; - params.bitrate = req.bitrate; - params.gop_size = req.gop_size; - params.adaptative_bitrate = req.adaptative_bitrate; +bool ZEDWrapperNodelet::on_set_led_status(zed_interfaces::set_led_status::Request& req, + zed_interfaces::set_led_status::Response& res) +{ + if (mCamFwVersion < 1523) + { + NODELET_WARN_STREAM("To set the status of the blue LED the camera must be updated to FW 1523 or newer"); + return false; + } - if (params.gop_size < -1 || params.gop_size > 256) { + mZed.setCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS, req.led_enabled ? 1 : 0); - mStreaming = false; + return true; +} - res.result = false; - res.info = "`gop_size` wrong ("; - res.info += params.gop_size; - res.info += "). Remote streaming not started"; +bool ZEDWrapperNodelet::on_toggle_led(zed_interfaces::toggle_led::Request& req, + zed_interfaces::toggle_led::Response& res) +{ + if (mCamFwVersion < 1523) + { + NODELET_WARN_STREAM("To set the status of the blue LED the camera must be updated to FW 1523 or newer"); + return false; + } - NODELET_ERROR_STREAM(res.info); - return false; - } + int status; + sl::ERROR_CODE err = mZed.getCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS, status); + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error getting the current status of the led: " << sl::toString(err).c_str()); + return false; + } - if (params.port % 2 != 0) { - mStreaming = false; + int new_status = status == 0 ? 1 : 0; + err = mZed.setCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS, new_status); + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Error getting the current status of the led: " << sl::toString(err).c_str()); + return false; + } - res.result = false; - res.info = "`port` must be an even number. Remote streaming not started"; + res.new_led_status = new_status; - NODELET_ERROR_STREAM(res.info); - return false; - } + return true; +} - sl::ERROR_CODE err = mZed.enableStreaming(params); +bool ZEDWrapperNodelet::on_set_roi(zed_interfaces::set_roi::Request& req, zed_interfaces::set_roi::Response& res) +{ + NODELET_INFO("** Set ROI service called **"); + NODELET_INFO_STREAM(" * ROI string: " << req.roi.c_str()); - if (err != sl::ERROR_CODE::SUCCESS) { - mStreaming = false; + if (req.roi == "") + { + std::string err_msg = + "Error while setting ZED SDK region of interest: a vector of normalized points describing a " + "polygon is required. e.g. '[[0.5,0.25],[0.75,0.5],[0.5,0.75],[0.25,0.5]]'"; - res.result = false; - res.info = sl::toString(err).c_str(); + NODELET_WARN_STREAM(" * " << err_msg); - NODELET_ERROR_STREAM("Remote streaming not started (" << res.info << ")"); + res.message = err_msg; + res.success = false; + return false; + } - return false; - } + std::string error; + std::vector> parsed_poly = sl_tools::parseStringVector(req.roi, error); - mStreaming = true; + if (error != "") + { + std::string err_msg = "Error while setting ZED SDK region of interest: "; + err_msg += error; - NODELET_INFO_STREAM("Remote streaming STARTED"); + NODELET_WARN_STREAM(" * " << err_msg); - res.result = true; - res.info = "Remote streaming STARTED"; - return true; + res.message = err_msg; + res.success = false; + return false; + } + + // ----> Set Region of Interest + // Create mask + NODELET_INFO(" * Setting ROI"); + std::vector sl_poly; + //std::string log_msg = + parseRoiPoly(parsed_poly, sl_poly); + // NODELET_INFO_STREAM(" * Parsed ROI: " << log_msg.c_str()); + sl::Resolution resol(mCamWidth, mCamHeight); + sl::Mat roi_mask(resol, sl::MAT_TYPE::U8_C1, sl::MEM::CPU); + if (!sl_tools::generateROI(sl_poly, roi_mask)) + { + std::string err_msg = "Error generating the region of interest image mask. "; + err_msg += error; + + NODELET_WARN_STREAM(" * " << err_msg); + + res.message = err_msg; + res.success = false; + return false; + } + else + { + sl::ERROR_CODE err = mZed.setRegionOfInterest(roi_mask); + if (err != sl::ERROR_CODE::SUCCESS) + { + std::string err_msg = "Error while setting ZED SDK region of interest: "; + err_msg += sl::toString(err).c_str(); + + NODELET_WARN_STREAM(" * " << err_msg); + + res.message = err_msg; + res.success = false; + return false; + } + else + { + NODELET_INFO(" * Region of Interest correctly set."); + + res.message = "Region of Interest correctly set."; + res.success = true; + return true; + } + } + // <---- Set Region of Interest } -bool ZEDWrapperNodelet::on_stop_remote_stream(zed_interfaces::stop_remote_stream::Request& req, - zed_interfaces::stop_remote_stream::Response& res) { - if (mStreaming) { - mZed.disableStreaming(); - } +bool ZEDWrapperNodelet::on_reset_roi(zed_interfaces::reset_roi::Request& req, zed_interfaces::reset_roi::Response& res) +{ + NODELET_INFO("** Reset ROI service called **"); - mStreaming = false; - NODELET_INFO_STREAM("SVO remote streaming STOPPED"); + sl::Mat empty_roi; + sl::ERROR_CODE err = mZed.setRegionOfInterest(empty_roi); - res.done = true; + if (err != sl::ERROR_CODE::SUCCESS) + { + std::string err_msg = " * Error while resetting ZED SDK region of interest: "; + err_msg += sl::toString(err); + NODELET_WARN_STREAM(" * Error while resetting ZED SDK region of interest: " << err_msg); + res.reset_done = false; + return false; + } + else + { + NODELET_INFO(" * Region of Interest correctly reset."); + res.reset_done = true; return true; + } } -bool ZEDWrapperNodelet::on_set_led_status(zed_interfaces::set_led_status::Request& req, - zed_interfaces::set_led_status::Response& res) { - if (mCamFwVersion < 1523) { - NODELET_WARN_STREAM("To set the status of the blue LED the camera must be updated to FW 1523 or newer"); - return false; - } +bool ZEDWrapperNodelet::on_start_3d_mapping(zed_interfaces::start_3d_mapping::Request& req, + zed_interfaces::start_3d_mapping::Response& res) +{ + if (mMappingEnabled && mMappingRunning) + { + NODELET_WARN_STREAM("Spatial mapping was already running"); - mZed.setCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS, req.led_enabled ? 1 : 0); + res.done = false; + return res.done; + } - return true; -} + mMappingRunning = false; -bool ZEDWrapperNodelet::on_toggle_led(zed_interfaces::toggle_led::Request& req, - zed_interfaces::toggle_led::Response& res) { - if (mCamFwVersion < 1523) { - NODELET_WARN_STREAM("To set the status of the blue LED the camera must be updated to FW 1523 or newer"); - return false; - } + mMappingRes = req.resolution; + mMaxMappingRange = req.max_mapping_range; + mFusedPcPubFreq = req.fused_pointcloud_freq; - int status = mZed.getCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS); - int new_status = status == 0 ? 1 : 0; - mZed.setCameraSettings(sl::VIDEO_SETTINGS::LED_STATUS, new_status); + NODELET_DEBUG_STREAM(" * Received mapping resolution\t\t-> " << mMappingRes << " m"); + NODELET_DEBUG_STREAM(" * Received mapping max range\t-> " << mMaxMappingRange << " m"); + NODELET_DEBUG_STREAM(" * Received fused point cloud freq:\t-> " << mFusedPcPubFreq << " Hz"); - return (new_status == 1); + mMappingEnabled = true; + res.done = true; + + return res.done; } +bool ZEDWrapperNodelet::on_stop_3d_mapping(zed_interfaces::stop_3d_mapping::Request& req, + zed_interfaces::stop_3d_mapping::Response& res) +{ + if (mMappingEnabled) + { + mPubFusedCloud.shutdown(); + mMappingMutex.lock(); + stop_3d_mapping(); + mMappingMutex.unlock(); -bool ZEDWrapperNodelet::on_start_3d_mapping(zed_interfaces::start_3d_mapping::Request& req, - zed_interfaces::start_3d_mapping::Response& res) { - if( mMappingEnabled && mMappingRunning) { - NODELET_WARN_STREAM("Spatial mapping was just running"); + res.done = true; + } + else + { + res.done = false; + } - res.done = false; - return res.done; - } + return res.done; +} - mMappingRunning = false; +bool ZEDWrapperNodelet::on_save_3d_map(zed_interfaces::save_3d_map::Request& req, + zed_interfaces::save_3d_map::Response& res) +{ + if (!mMappingEnabled) + { + res.result = false; + res.info = "3D Mapping was not active"; + NODELET_WARN_STREAM("Can't save 3D map. Mapping was not active"); + return false; + } - mMappingRes = req.resolution; - mMaxMappingRange = req.max_mapping_range; - mFusedPcPubFreq = req.fused_pointcloud_freq; + mMapSave = true; - NODELET_DEBUG_STREAM(" * Received mapping resolution\t\t-> " << mMappingRes << " m"); + std::lock_guard lock(mMappingMutex); + sl::String filename = req.map_filename.c_str(); + if (req.file_format < 0 || req.file_format > static_cast(sl::MESH_FILE_FORMAT::OBJ)) + { + res.result = false; + res.info = "File format not correct"; + NODELET_WARN_STREAM("Can't save 3D map. File format not correct"); + return false; + } + sl::MESH_FILE_FORMAT file_format = static_cast(req.file_format); + bool success = mFusedPC.save(filename, file_format); - NODELET_DEBUG_STREAM(" * Received mapping max range\t-> " << mMaxMappingRange << " m" ); + if (!success) + { + res.result = false; + res.info = "3D Map not saved"; + NODELET_ERROR_STREAM("3D Map not saved"); + return false; + } - NODELET_DEBUG_STREAM(" * Received fused point cloud freq:\t-> " << mFusedPcPubFreq << " Hz"); + res.info = "3D map saved"; + res.result = true; + return true; +} - mMappingEnabled = true; - res.done = true; +bool ZEDWrapperNodelet::on_enable_object_detection(std_srvs::SetBool::Request& req, std_srvs::SetBool::Response& res) +{ + NODELET_INFO("Called 'enable_object_detection' service"); - return res.done; -} + std::lock_guard lock(mObjDetMutex); -bool ZEDWrapperNodelet::on_stop_3d_mapping(zed_interfaces::stop_3d_mapping::Request& req, - zed_interfaces::stop_3d_mapping::Response& res) { + if (mZedRealCamModel == sl::MODEL::ZED) + { + mObjDetEnabled = false; + mObjDetRunning = false; - if( mMappingEnabled ) { - mPubFusedCloud.shutdown(); - mMappingMutex.lock(); - stop_3d_mapping(); - mMappingMutex.unlock(); + NODELET_ERROR_STREAM("Object detection not started. OD is not available for ZED camera model"); + res.message = "Object detection not started. OD is not available for ZED camera model"; + res.success = false; + return res.success; + } - res.done = true; - } else { - res.done = false; + if (req.data) + { + NODELET_INFO("Starting Object Detection"); + // Start + if (mObjDetEnabled && mObjDetRunning) + { + NODELET_WARN("Object Detection is already running"); + res.message = "Object Detection is already running"; + res.success = false; + return res.success; } - return res.done; + mObjDetEnabled = true; + + if (start_obj_detect()) + { + res.message = "Object Detection started"; + res.success = true; + return res.success; + } + else + { + res.message = "Error occurred starting Object Detection. See log for more info"; + res.success = false; + return res.success; + } + } + else + { + NODELET_INFO("Stopping Object Detection"); + // Stop + if (!mObjDetEnabled || !mObjDetRunning) + { + NODELET_WARN("Object Detection was not running"); + res.message = "Object Detection was not running"; + res.success = false; + return res.success; + } + + stop_obj_detect(); + + res.message = "Object Detection stopped"; + res.success = true; + return res.success; + } } -bool ZEDWrapperNodelet::on_start_object_detection(zed_interfaces::start_object_detection::Request& req, - zed_interfaces::start_object_detection::Response& res) { - if(mZedRealCamModel!=sl::MODEL::ZED2) { - mObjDetEnabled = false; - mObjDetRunning = false; +void ZEDWrapperNodelet::processDetectedObjects(ros::Time t) +{ + static std::chrono::steady_clock::time_point old_time = std::chrono::steady_clock::now(); - NODELET_ERROR_STREAM( "Object detection not started. OD is available only using a ZED2 camera model"); - return false; - } + sl::ObjectDetectionRuntimeParameters objectTracker_parameters_rt; + objectTracker_parameters_rt.detection_confidence_threshold = mObjDetConfidence; + objectTracker_parameters_rt.object_class_filter = mObjDetFilter; + + sl::Objects objects; + + sl::ERROR_CODE objDetRes = mZed.retrieveObjects(objects, objectTracker_parameters_rt); + + if (objDetRes != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN_STREAM("Object Detection error: " << sl::toString(objDetRes)); + return; + } + + if (!objects.is_new) + { + return; + } - if( mObjDetEnabled && mObjDetRunning) { - NODELET_WARN_STREAM("Object Detection was just running"); + // ----> Diagnostic information update + std::chrono::steady_clock::time_point now = std::chrono::steady_clock::now(); + double elapsed_msec = std::chrono::duration_cast(now - old_time).count(); + mObjDetPeriodMean_msec->addValue(elapsed_msec); + old_time = now; + // <---- Diagnostic information update - res.done = false; - return res.done; + NODELET_DEBUG_STREAM("Detected " << objects.object_list.size() << " objects"); + + size_t objCount = objects.object_list.size(); + + zed_interfaces::ObjectsStampedPtr objMsg = boost::make_shared(); + objMsg->header.stamp = t; + objMsg->header.frame_id = mLeftCamFrameId; + + objMsg->objects.resize(objCount); + + size_t idx = 0; + for (auto data : objects.object_list) + { + objMsg->objects[idx].label = sl::toString(data.label).c_str(); + objMsg->objects[idx].sublabel = sl::toString(data.sublabel).c_str(); + objMsg->objects[idx].label_id = data.raw_label; + objMsg->objects[idx].instance_id = data.id; + objMsg->objects[idx].confidence = data.confidence; + + memcpy(&(objMsg->objects[idx].position[0]), &(data.position[0]), 3 * sizeof(float)); + memcpy(&(objMsg->objects[idx].position_covariance[0]), &(data.position_covariance[0]), 6 * sizeof(float)); + memcpy(&(objMsg->objects[idx].velocity[0]), &(data.velocity[0]), 3 * sizeof(float)); + + objMsg->objects[idx].tracking_available = mObjDetTracking; + objMsg->objects[idx].tracking_state = static_cast(data.tracking_state); + // NODELET_INFO_STREAM( "[" << idx << "] Tracking: " << + // sl::toString(static_cast(data.tracking_state))); + objMsg->objects[idx].action_state = static_cast(data.action_state); + + if (data.bounding_box_2d.size() == 4) + { + memcpy(&(objMsg->objects[idx].bounding_box_2d.corners[0]), &(data.bounding_box_2d[0]), 8 * sizeof(unsigned int)); + } + if (data.bounding_box.size() == 8) + { + memcpy(&(objMsg->objects[idx].bounding_box_3d.corners[0]), &(data.bounding_box[0]), 24 * sizeof(float)); } - mObjDetRunning = false; + memcpy(&(objMsg->objects[idx].dimensions_3d[0]), &(data.dimensions[0]), 3 * sizeof(float)); - mObjDetConfidence = req.confidence; - mObjDetTracking = req.tracking; - mObjDetPeople = req.people; - mObjDetVehicles = req.vehicles; + // Body Detection is in a separate module in ZED SDK v4 + objMsg->objects[idx].skeleton_available = false; - NODELET_DEBUG_STREAM(" * Object min. confidence\t-> " << mObjDetConfidence); - NODELET_DEBUG_STREAM(" * Object tracking\t\t-> " << (mObjDetTracking?"ENABLED":"DISABLED")); - NODELET_DEBUG_STREAM(" * People detection\t\t-> " << (mObjDetPeople?"ENABLED":"DISABLED")); - NODELET_DEBUG_STREAM(" * Vehicles detection\t\t-> " << (mObjDetVehicles?"ENABLED":"DISABLED")); + // at the end of the loop + idx++; + } - mObjDetEnabled = true; - res.done = true; + mPubObjDet.publish(objMsg); +} - return res.done; +void ZEDWrapperNodelet::clickedPtCallback(geometry_msgs::PointStampedConstPtr msg) +{ + // ----> Check for result subscribers + uint32_t markerSubNumber = mPubMarker.getNumSubscribers(); + uint32_t planeSubNumber = mPubPlane.getNumSubscribers(); + + if ((markerSubNumber + planeSubNumber) == 0) + { + return; + } + // <---- Check for result subscribers + + ros::Time ts = ros::Time::now(); + + float X = msg->point.x; + float Y = msg->point.y; + float Z = msg->point.z; + + NODELET_INFO_STREAM("Clicked 3D point [X FW, Y LF, Z UP]: [" << X << "," << Y << "," << Z << "]"); + + // ----> Transform the point from `map` frame to `left_camera_optical_frame` + double camX, camY, camZ; + try + { + // Save the transformation + geometry_msgs::TransformStamped m2o = + mTfBuffer->lookupTransform(mLeftCamOptFrameId, msg->header.frame_id, ros::Time(0), ros::Duration(0.1)); + + NODELET_INFO("'%s' -> '%s': {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f,%.3f}", msg->header.frame_id.c_str(), + mLeftCamOptFrameId.c_str(), m2o.transform.translation.x, m2o.transform.translation.y, + m2o.transform.translation.z, m2o.transform.rotation.x, m2o.transform.rotation.y, + m2o.transform.rotation.z, m2o.transform.rotation.w); + + // Get the TF2 transformation + geometry_msgs::PointStamped ptCam; + + tf2::doTransform(*msg, ptCam, m2o); + + camX = ptCam.point.x; + camY = ptCam.point.y; + camZ = ptCam.point.z; + + NODELET_INFO("Point in camera coordinates [Z FW, X RG, Y DW]: {%.3f,%.3f,%.3f}", camX, camY, camZ); + } + catch (tf2::TransformException& ex) + { + NODELET_DEBUG_THROTTLE(1.0, "Transform error: %s", ex.what()); + NODELET_WARN_THROTTLE(1.0, "The tf from '%s' to '%s' is not available.", msg->header.frame_id.c_str(), + mLeftCamOptFrameId.c_str()); + + return; + } + // <---- Transform the point from `map` frame to `left_camera_optical_frame` + + // ----> Project the point into 2D image coordinates + sl::CalibrationParameters zedParam; + zedParam = mZed.getCameraInformation(mMatResol).camera_configuration.calibration_parameters; // ok + + float f_x = zedParam.left_cam.fx; + float f_y = zedParam.left_cam.fy; + float c_x = zedParam.left_cam.cx; + float c_y = zedParam.left_cam.cy; + + float out_scale_factor = static_cast(mMatResol.width) / mCamWidth; + + float u = ((camX / camZ) * f_x + c_x) / out_scale_factor; + float v = ((camY / camZ) * f_y + c_y) / out_scale_factor; + NODELET_INFO_STREAM("Clicked point image coordinates: [" << u << "," << v << "]"); + // <---- Project the point into 2D image coordinates + + // ----> Extract plane from clicked point + sl::Plane plane; + sl::ERROR_CODE err = mZed.findPlaneAtHit(sl::uint2(u, v), plane); + if (err != sl::ERROR_CODE::SUCCESS) + { + NODELET_WARN("Error extracting plane at point [%.3f,%.3f,%.3f]: %s", X, Y, Z, sl::toString(err).c_str()); + return; + } + + sl::float3 center = plane.getCenter(); + sl::float2 dims = plane.getExtents(); + + if (dims[0] == 0 || dims[1] == 0) + { + NODELET_INFO("Plane not found at point [%.3f,%.3f,%.3f]", X, Y, Z); + return; + } + + NODELET_INFO("Found plane at point [%.3f,%.3f,%.3f] -> Center: [%.3f,%.3f,%.3f], Dims: %.3fx%.3f", X, Y, Z, center.x, + center.y, center.z, dims[0], dims[1]); + // <---- Extract plane from clicked point + + if (markerSubNumber > 0) + { + // ----> Publish a blue sphere in the clicked point + visualization_msgs::MarkerPtr pt_marker = boost::make_shared(); + // Set the frame ID and timestamp. See the TF tutorials for information on these. + static int hit_pt_id = 0; + pt_marker->header.stamp = ts; + // Set the marker action. Options are ADD and DELETE + pt_marker->action = visualization_msgs::Marker::ADD; + pt_marker->lifetime = ros::Duration(); + + // Set the namespace and id for this marker. This serves to create a unique ID + // Any marker sent with the same namespace and id will overwrite the old one + pt_marker->ns = "plane_hit_points"; + pt_marker->id = hit_pt_id++; + pt_marker->header.frame_id = mMapFrameId; + + // Set the marker type. + pt_marker->type = visualization_msgs::Marker::SPHERE; + + // Set the pose of the marker. This is a full 6DOF pose relative to the frame/time specified in the header + pt_marker->pose.position.x = X; + pt_marker->pose.position.y = Y; + pt_marker->pose.position.z = Z; + pt_marker->pose.orientation.x = 0.0; + pt_marker->pose.orientation.y = 0.0; + pt_marker->pose.orientation.z = 0.0; + pt_marker->pose.orientation.w = 1.0; + + // Set the scale of the marker -- 1x1x1 here means 1m on a side + pt_marker->scale.x = 0.025; + pt_marker->scale.y = 0.025; + pt_marker->scale.z = 0.025; + + // Set the color -- be sure to set alpha to something non-zero! + pt_marker->color.r = 0.2f; + pt_marker->color.g = 0.1f; + pt_marker->color.b = 0.75f; + pt_marker->color.a = 0.8; + + // Publish the marker + mPubMarker.publish(pt_marker); + // ----> Publish a blue sphere in the clicked point + + // ----> Publish the plane as green mesh + visualization_msgs::MarkerPtr plane_marker = boost::make_shared(); + // Set the frame ID and timestamp. See the TF tutorials for information on these. + static int plane_mesh_id = 0; + plane_marker->header.stamp = ts; + // Set the marker action. Options are ADD and DELETE + plane_marker->action = visualization_msgs::Marker::ADD; + plane_marker->lifetime = ros::Duration(); + + // Set the namespace and id for this marker. This serves to create a unique ID + // Any marker sent with the same namespace and id will overwrite the old one + plane_marker->ns = "plane_meshes"; + plane_marker->id = plane_mesh_id++; + plane_marker->header.frame_id = mLeftCamFrameId; + + // Set the marker type. + plane_marker->type = visualization_msgs::Marker::TRIANGLE_LIST; + + // Set the pose of the marker. This is a full 6DOF pose relative to the frame/time specified in the header + plane_marker->pose.position.x = 0; + plane_marker->pose.position.y = 0; + plane_marker->pose.position.z = 0; + plane_marker->pose.orientation.x = 0.0; + plane_marker->pose.orientation.y = 0.0; + plane_marker->pose.orientation.z = 0.0; + plane_marker->pose.orientation.w = 1.0; + + // Set the color -- be sure to set alpha to something non-zero! + plane_marker->color.r = 0.10f; + plane_marker->color.g = 0.75f; + plane_marker->color.b = 0.20f; + plane_marker->color.a = 0.75; + + // Set the scale of the marker -- 1x1x1 here means 1m on a side + plane_marker->scale.x = 1.0; + plane_marker->scale.y = 1.0; + plane_marker->scale.z = 1.0; + + sl::Mesh mesh = plane.extractMesh(); + size_t triangCount = mesh.getNumberOfTriangles(); + size_t ptCount = triangCount * 3; + plane_marker->points.resize(ptCount); + plane_marker->colors.resize(ptCount); + + size_t ptIdx = 0; + for (size_t t = 0; t < triangCount; t++) + { + for (int p = 0; p < 3; p++) + { + uint vIdx = mesh.triangles[t][p]; + plane_marker->points[ptIdx].x = mesh.vertices[vIdx][0]; + plane_marker->points[ptIdx].y = mesh.vertices[vIdx][1]; + plane_marker->points[ptIdx].z = mesh.vertices[vIdx][2]; + + // Set the color -- be sure to set alpha to something non-zero! + plane_marker->colors[ptIdx].r = 0.10f; + plane_marker->colors[ptIdx].g = 0.75f; + plane_marker->colors[ptIdx].b = 0.20f; + plane_marker->colors[ptIdx].a = 0.75; + + ptIdx++; + } + } + + // Publish the marker + mPubMarker.publish(plane_marker); + // <---- Publish the plane as green mesh + } + + if (planeSubNumber > 0) + { + // ----> Publish the plane as custom message + + zed_interfaces::PlaneStampedPtr planeMsg = boost::make_shared(); + planeMsg->header.stamp = ts; + planeMsg->header.frame_id = mLeftCamFrameId; + + // Plane equation + sl::float4 sl_coeff = plane.getPlaneEquation(); + planeMsg->coefficients.coef[0] = static_cast(sl_coeff[0]); + planeMsg->coefficients.coef[1] = static_cast(sl_coeff[1]); + planeMsg->coefficients.coef[2] = static_cast(sl_coeff[2]); + planeMsg->coefficients.coef[3] = static_cast(sl_coeff[3]); + + // Plane Normal + sl::float3 sl_normal = plane.getNormal(); + planeMsg->normal.x = sl_normal[0]; + planeMsg->normal.y = sl_normal[1]; + planeMsg->normal.z = sl_normal[2]; + + // Plane Center + sl::float3 sl_center = plane.getCenter(); + planeMsg->center.x = sl_center[0]; + planeMsg->center.y = sl_center[1]; + planeMsg->center.z = sl_center[2]; + + // Plane extents + sl::float3 sl_extents = plane.getExtents(); + planeMsg->extents[0] = sl_extents[0]; + planeMsg->extents[1] = sl_extents[1]; + + // Plane pose + sl::Pose sl_pose = plane.getPose(); + sl::Orientation sl_rot = sl_pose.getOrientation(); + sl::Translation sl_tr = sl_pose.getTranslation(); + + planeMsg->pose.rotation.x = sl_rot.ox; + planeMsg->pose.rotation.y = sl_rot.oy; + planeMsg->pose.rotation.z = sl_rot.oz; + planeMsg->pose.rotation.w = sl_rot.ow; + + planeMsg->pose.translation.x = sl_tr.x; + planeMsg->pose.translation.y = sl_tr.y; + planeMsg->pose.translation.z = sl_tr.z; + + // Plane Bounds + std::vector sl_bounds = plane.getBounds(); + planeMsg->bounds.points.resize(sl_bounds.size()); + memcpy(planeMsg->bounds.points.data(), sl_bounds.data(), 3 * sl_bounds.size() * sizeof(float)); + + // Plane mesh + sl::Mesh sl_mesh = plane.extractMesh(); + size_t triangCount = sl_mesh.triangles.size(); + size_t ptsCount = sl_mesh.vertices.size(); + planeMsg->mesh.triangles.resize(triangCount); + planeMsg->mesh.vertices.resize(ptsCount); + + // memcpy not allowed because data types are different + for (size_t i = 0; i < triangCount; i++) + { + planeMsg->mesh.triangles[i].vertex_indices[0] = sl_mesh.triangles[i][0]; + planeMsg->mesh.triangles[i].vertex_indices[1] = sl_mesh.triangles[i][1]; + planeMsg->mesh.triangles[i].vertex_indices[2] = sl_mesh.triangles[i][2]; + } + + // memcpy not allowed because data types are different + for (size_t i = 0; i < ptsCount; i++) + { + planeMsg->mesh.vertices[i].x = sl_mesh.vertices[i][0]; + planeMsg->mesh.vertices[i].y = sl_mesh.vertices[i][1]; + planeMsg->mesh.vertices[i].z = sl_mesh.vertices[i][2]; + } + + mPubPlane.publish(planeMsg); + // <---- Publish the plane as custom message + } } -/* \brief Service callback to stop_object_detection service - */ -bool ZEDWrapperNodelet::on_stop_object_detection(zed_interfaces::stop_object_detection::Request& req, - zed_interfaces::stop_object_detection::Response& res) { - if( mObjDetEnabled ) { - mObjDetMutex.lock(); - stop_obj_detect(); - mObjDetMutex.unlock(); +void ZEDWrapperNodelet::publishPoseStatus() +{ + size_t statusSub = mPubPoseStatus.getNumSubscribers(); - res.done = true; - } else { - res.done = false; - } + if (statusSub > 0) + { + zed_interfaces::PosTrackStatusPtr msg = boost::make_shared(); + msg->status = static_cast(mPosTrackingStatusWorld); - return res.done; + mPubPoseStatus.publish(msg); + } +} + +void ZEDWrapperNodelet::publishOdomStatus() +{ + size_t statusSub = mPubOdomStatus.getNumSubscribers(); + + if (statusSub > 0) + { + zed_interfaces::PosTrackStatusPtr msg = boost::make_shared(); + msg->status = static_cast(mPosTrackingStatusCamera); + + mPubPoseStatus.publish(msg); + } } -void ZEDWrapperNodelet::detectObjects(bool publishObj, bool publishViz) { +void ZEDWrapperNodelet::processOdometry() +{ + if (!mSensor2BaseTransfValid) + { + getSens2BaseTransform(); + } + + if (!mSensor2CameraTransfValid) + { + getSens2CameraTransform(); + } + + if (!mCamera2BaseTransfValid) + { + getCamera2BaseTransform(); + } + + // if (!mInitOdomWithPose) { + sl::Pose deltaOdom; - sl::ObjectDetectionRuntimeParameters objectTracker_parameters_rt; - objectTracker_parameters_rt.detection_confidence_threshold = mObjDetConfidence; - objectTracker_parameters_rt.object_class_filter = mObjDetFilter; + mPosTrackingStatusCamera = mZed.getPosition(deltaOdom, sl::REFERENCE_FRAME::CAMERA); - sl::Objects objects; + publishOdomStatus(); - sl::ERROR_CODE objDetRes = mZed.retrieveObjects(objects, objectTracker_parameters_rt); + NODELET_DEBUG("delta ODOM [%s]:\n%s", sl::toString(mPosTrackingStatusCamera).c_str(), + deltaOdom.pose_data.getInfos().c_str()); - if (objDetRes != sl::ERROR_CODE::SUCCESS) { - NODELET_WARN_STREAM("Object Detection error: " << sl::toString(objDetRes)); - return; + if (mPosTrackingStatusCamera == sl::POSITIONAL_TRACKING_STATE::OK || + mPosTrackingStatusCamera == sl::POSITIONAL_TRACKING_STATE::SEARCHING || + mPosTrackingStatusCamera == sl::POSITIONAL_TRACKING_STATE::FPS_TOO_LOW) + { + sl::Translation translation = deltaOdom.getTranslation(); + sl::Orientation quat = deltaOdom.getOrientation(); + + // Transform ZED delta odom pose in TF2 Transformation + tf2::Transform deltaOdomTf; + deltaOdomTf.setOrigin(tf2::Vector3(translation.x, translation.y, translation.z)); + // w at the end in the constructor + deltaOdomTf.setRotation(tf2::Quaternion(quat.x, quat.y, quat.z, quat.w)); + + // delta odom from sensor to base frame + tf2::Transform deltaOdomTf_base = mSensor2BaseTransf.inverse() * deltaOdomTf * mSensor2BaseTransf; + + // Propagate Odom transform in time + mOdomMutex.lock(); // + mOdom2BaseTransf = mOdom2BaseTransf * deltaOdomTf_base; + + if (mTwoDMode) + { + tf2::Vector3 tr_2d = mOdom2BaseTransf.getOrigin(); + tr_2d.setZ(mFixedZValue); + mOdom2BaseTransf.setOrigin(tr_2d); + + double roll, pitch, yaw; + tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + + tf2::Quaternion quat_2d; + quat_2d.setRPY(0.0, 0.0, yaw); + + mOdom2BaseTransf.setRotation(quat_2d); } - // NODELET_DEBUG_STREAM("Detected " << objects.object_list.size() << " objects"); + double roll, pitch, yaw; + tf2::Matrix3x3(mOdom2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + + NODELET_DEBUG("+++ Odometry [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mOdomFrameId.c_str(), + mBaseFrameId.c_str(), mOdom2BaseTransf.getOrigin().x(), mOdom2BaseTransf.getOrigin().y(), + mOdom2BaseTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); - size_t objCount = objects.object_list.size(); + // Publish odometry message + publishOdom(mOdom2BaseTransf, deltaOdom, mFrameTimestamp); + mPosTrackingReady = true; - zed_interfaces::Objects objMsg; + mOdomMutex.unlock(); // + } +} +void ZEDWrapperNodelet::processPose() +{ + if (!mSensor2BaseTransfValid) + { + getSens2BaseTransform(); + } - objMsg.objects.resize(objCount); + if (!mSensor2CameraTransfValid) + { + getSens2CameraTransform(); + } - std_msgs::Header header; - header.stamp = mFrameTimestamp; - //header.frame_id = mCameraFrameId; - header.frame_id = mLeftCamFrameId; + if (!mCamera2BaseTransfValid) + { + getCamera2BaseTransform(); + } - visualization_msgs::MarkerArray objMarkersMsg; - //objMarkersMsg.markers.resize(objCount * 3); + mPosTrackingStatusWorld = mZed.getPosition(mLastZedPose, sl::REFERENCE_FRAME::WORLD); - for (size_t i = 0; i < objCount; i++) { - sl::ObjectData data = objects.object_list[i]; + NODELET_DEBUG_STREAM("ZED Pose: " << mLastZedPose.pose_data.getInfos()); - if (publishObj) { - objMsg.objects[i].header = header; + publishPoseStatus(); - objMsg.objects[i].label = sl::toString(data.label).c_str(); - objMsg.objects[i].label_id = data.id; - objMsg.objects[i].confidence = data.confidence; + sl::Translation translation = mLastZedPose.getTranslation(); + sl::Orientation quat = mLastZedPose.getOrientation(); - objMsg.objects[i].tracking_state = static_cast(data.tracking_state); + if (quat.sum() == 0) + { + return; + } - objMsg.objects[i].position.x = data.position.x; - objMsg.objects[i].position.y = data.position.y; - objMsg.objects[i].position.z = data.position.z; + NODELET_DEBUG_STREAM("MAP -> Tracking Status: " << sl::toString(mPosTrackingStatusWorld).c_str()); - objMsg.objects[i].linear_vel.x = data.velocity.x; - objMsg.objects[i].linear_vel.y = data.velocity.y; - objMsg.objects[i].linear_vel.z = data.velocity.z; + NODELET_DEBUG("Sensor POSE [%s -> %s]:\n%s}", mLeftCamFrameId.c_str(), mMapFrameId.c_str(), + mLastZedPose.pose_data.getInfos().c_str()); - for (int c = 0; c < data.bounding_box_2d.size(); c++) { - objMsg.objects[i].bbox_2d[c].x = data.bounding_box_2d[c].x; - objMsg.objects[i].bbox_2d[c].y = data.bounding_box_2d[c].y; - objMsg.objects[i].bbox_2d[c].z = 0.0f; - } + if (mPosTrackingStatusWorld == sl::POSITIONAL_TRACKING_STATE::OK) + { + double roll, pitch, yaw; + tf2::Matrix3x3(tf2::Quaternion(quat.ox, quat.oy, quat.oz, quat.ow)).getRPY(roll, pitch, yaw); - for (int c = 0; c < data.bounding_box.size(); c++) { - objMsg.objects[i].bbox_3d[c].x = data.bounding_box[c].x; - objMsg.objects[i].bbox_3d[c].y = data.bounding_box[c].y; - objMsg.objects[i].bbox_3d[c].z = data.bounding_box[c].z; - } - } + tf2::Transform map_to_sens_transf; + map_to_sens_transf.setOrigin(tf2::Vector3(translation(0), translation(1), translation(2))); + map_to_sens_transf.setRotation(tf2::Quaternion(quat(0), quat(1), quat(2), quat(3))); - if (publishViz) { + mMap2BaseTransf = + mSensor2BaseTransf.inverse() * map_to_sens_transf * mSensor2BaseTransf; // Base position in map frame - if( data.bounding_box.size()!=8 ) { - continue; // No 3D information available - } + if (mTwoDMode) + { + tf2::Vector3 tr_2d = mMap2BaseTransf.getOrigin(); + tr_2d.setZ(mFixedZValue); + mMap2BaseTransf.setOrigin(tr_2d); - visualization_msgs::Marker bbx_marker; - - bbx_marker.header = header; - bbx_marker.ns = "bbox"; - bbx_marker.id = data.id; - bbx_marker.type = visualization_msgs::Marker::CUBE; - bbx_marker.action = visualization_msgs::Marker::ADD; - bbx_marker.pose.position.x = data.position.x; - bbx_marker.pose.position.y = data.position.y; - bbx_marker.pose.position.z = data.position.z; - bbx_marker.pose.orientation.x = 0.0; - bbx_marker.pose.orientation.y = 0.0; - bbx_marker.pose.orientation.z = 0.0; - bbx_marker.pose.orientation.w = 1.0; - - bbx_marker.scale.x = fabs(data.bounding_box[0].x - data.bounding_box[1].x); - bbx_marker.scale.y = fabs(data.bounding_box[0].y - data.bounding_box[3].y); - bbx_marker.scale.z = fabs(data.bounding_box[0].z - data.bounding_box[4].z); - sl::float3 color = generateColorClass(data.id); - bbx_marker.color.b = color.b; - bbx_marker.color.g = color.g; - bbx_marker.color.r = color.r; - bbx_marker.color.a = 0.4; - bbx_marker.lifetime = ros::Duration(0.3); - bbx_marker.frame_locked = true; - - objMarkersMsg.markers.push_back(bbx_marker); - - visualization_msgs::Marker label; - label.header = header; - label.lifetime = ros::Duration(0.3); - label.action = visualization_msgs::Marker::ADD; - label.id = data.id; - label.ns = "label"; - label.type = visualization_msgs::Marker::TEXT_VIEW_FACING; - label.scale.z = 0.1; - - label.color.r = 255-color.r; - label.color.g = 255-color.g; - label.color.b = 255-color.b; - label.color.a = 0.75f; - - label.pose.position.x = data.position.x; - label.pose.position.y = data.position.y; - label.pose.position.z = data.position.z+1.1*bbx_marker.scale.z/2; - - label.text = std::to_string(data.id) + ". " + std::string(sl::toString(data.label).c_str()); - - objMarkersMsg.markers.push_back(label); - - // visualization_msgs::Marker lines; - // visualization_msgs::Marker label; - // visualization_msgs::Marker spheres; - - // lines.pose.orientation.w = label.pose.orientation.w = spheres.pose.orientation.w = 1.0; - - // lines.header = header; - // lines.lifetime = ros::Duration(0.3); - // lines.action = visualization_msgs::Marker::ADD; - // lines.id = data.id; - // lines.ns = "bbox"; - // lines.type = visualization_msgs::Marker::LINE_LIST; - // lines.scale.x = 0.005; - - // label.header = header; - // label.lifetime = ros::Duration(0.3); - // label.action = visualization_msgs::Marker::ADD; - // label.id = data.id; - // label.ns = "label"; - // label.type = visualization_msgs::Marker::TEXT_VIEW_FACING; - // //label.scale.x = 0.05; - // //label.scale.y = 0.05; - // label.scale.z = 0.1; - - // spheres.header = header; - // spheres.lifetime = ros::Duration(0.3); - // spheres.action = visualization_msgs::Marker::ADD; - // spheres.id = data.id; - // spheres.ns = "spheres"; - // spheres.type = visualization_msgs::Marker::SPHERE_LIST; - // spheres.scale.x = 0.02; - // spheres.scale.y = 0.02; - // spheres.scale.z = 0.02; - - // spheres.color.r = data.tracking_state == sl::OBJECT_TRACKING_STATE::OK ? - // 0.1f : (data.tracking_state == sl::OBJECT_TRACKING_STATE::SEARCHING ? - // 0.9f : 0.3f); - // spheres.color.g = data.tracking_state == sl::OBJECT_TRACKING_STATE::OK ? - // 0.9f : (data.tracking_state == sl::OBJECT_TRACKING_STATE::SEARCHING ? - // 0.1f : 0.3f); - // spheres.color.b = data.tracking_state == sl::OBJECT_TRACKING_STATE::OK ? - // 0.1f : (data.tracking_state == sl::OBJECT_TRACKING_STATE::SEARCHING ? - // 0.1f : 0.3f); - // spheres.color.a = 0.75f; - - // sl::float3 color = generateColorClass(data.id); - - // lines.color.r = color.r; - // lines.color.g = color.g; - // lines.color.b = color.b; - // lines.color.a = 0.75f; - - // label.color.r = color.r; - // label.color.g = color.g; - // label.color.b = color.b; - // label.color.a = 0.75f; - - // label.pose.position.x = data.position.x; - // label.pose.position.y = data.position.y; - // label.pose.position.z = data.position.z; - - // label.text = std::to_string(data.id) + ". " + std::string(sl::toString(data.label).c_str()); - - // geometry_msgs::Point p0; - // geometry_msgs::Point p1; - - // // Centroid - // p0.x = data.position.x; - // p0.y = data.position.y; - // p0.z = data.position.z; - - // spheres.points.resize(9); - // spheres.points[8] = p0;; - - // lines.points.resize(24); - - // int linePtIdx = 0; - - // // Top square - // for (int v = 0; v < 3; v++) { - // lines.points[linePtIdx].x = data.bounding_box[v].x; - // lines.points[linePtIdx].y = data.bounding_box[v].y; - // lines.points[linePtIdx].z = data.bounding_box[v].z; - // linePtIdx++; - - // lines.points[linePtIdx].x = data.bounding_box[v + 1].x; - // lines.points[linePtIdx].y = data.bounding_box[v + 1].y; - // lines.points[linePtIdx].z = data.bounding_box[v + 1].z; - // linePtIdx++; - // } - - // lines.points[linePtIdx].x = data.bounding_box[3].x; - // lines.points[linePtIdx].y = data.bounding_box[3].y; - // lines.points[linePtIdx].z = data.bounding_box[3].z; - // linePtIdx++; - - // lines.points[linePtIdx].x = data.bounding_box[0].x; - // lines.points[linePtIdx].y = data.bounding_box[0].y; - // lines.points[linePtIdx].z = data.bounding_box[0].z; - // linePtIdx++; - - // // Bottom square - // for (int v = 4; v < 7; v++) { - // lines.points[linePtIdx].x = data.bounding_box[v].x; - // lines.points[linePtIdx].y = data.bounding_box[v].y; - // lines.points[linePtIdx].z = data.bounding_box[v].z; - // linePtIdx++; - - // lines.points[linePtIdx].x = data.bounding_box[v + 1].x; - // lines.points[linePtIdx].y = data.bounding_box[v + 1].y; - // lines.points[linePtIdx].z = data.bounding_box[v + 1].z; - // linePtIdx++; - // } - - // lines.points[linePtIdx].x = data.bounding_box[7].x; - // lines.points[linePtIdx].y = data.bounding_box[7].y; - // lines.points[linePtIdx].z = data.bounding_box[7].z; - // linePtIdx++; - - // lines.points[linePtIdx].x = data.bounding_box[4].x; - // lines.points[linePtIdx].y = data.bounding_box[4].y; - // lines.points[linePtIdx].z = data.bounding_box[4].z; - // linePtIdx++; - - // // Lateral lines and vertex spheres - // for (int v = 0; v < 4; v++) { - // lines.points[linePtIdx].x = data.bounding_box[v].x; - // lines.points[linePtIdx].y = data.bounding_box[v].y; - // lines.points[linePtIdx].z = data.bounding_box[v].z; - // linePtIdx++; - - // lines.points[linePtIdx].x = data.bounding_box[v + 4].x; - // lines.points[linePtIdx].y = data.bounding_box[v + 4].y; - // lines.points[linePtIdx].z = data.bounding_box[v + 4].z; - // linePtIdx++; - - // spheres.points[v * 2].x = data.bounding_box[v].x; - // spheres.points[v * 2].y = data.bounding_box[v].y; - // spheres.points[v * 2].z = data.bounding_box[v].z; - - // spheres.points[v * 2 + 1].x = data.bounding_box[v + 4].x; - // spheres.points[v * 2 + 1].y = data.bounding_box[v + 4].y; - // spheres.points[v * 2 + 1].z = data.bounding_box[v + 4].z; - // } - - // objMarkersMsg.markers[i * 3] = lines; - // objMarkersMsg.markers[i * 3 + 1] = label; - // objMarkersMsg.markers[i * 3 + 2] = spheres; - } - } + tf2::Matrix3x3(mMap2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); - if (publishObj) { - mPubObjDet.publish(objMsg); + tf2::Quaternion quat_2d; + quat_2d.setRPY(0.0, 0.0, yaw); + + mMap2BaseTransf.setRotation(quat_2d); } - if (mPubObjDetViz) { - mPubObjDetViz.publish(objMarkersMsg); + // double roll, pitch, yaw; + tf2::Matrix3x3(mMap2BaseTransf.getRotation()).getRPY(roll, pitch, yaw); + + NODELET_DEBUG("*** Base POSE [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mMapFrameId.c_str(), + mBaseFrameId.c_str(), mMap2BaseTransf.getOrigin().x(), mMap2BaseTransf.getOrigin().y(), + mMap2BaseTransf.getOrigin().z(), roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + + // Transformation from map to odometry frame + mOdomMutex.lock(); // + mMap2OdomTransf = mMap2BaseTransf * mOdom2BaseTransf.inverse(); + mOdomMutex.unlock(); // + + // double roll, pitch, yaw; + tf2::Matrix3x3(mMap2OdomTransf.getRotation()).getRPY(roll, pitch, yaw); + + NODELET_DEBUG("+++ Diff [%s -> %s] - {%.3f,%.3f,%.3f} {%.3f,%.3f,%.3f}", mMapFrameId.c_str(), mOdomFrameId.c_str(), + mMap2OdomTransf.getOrigin().x(), mMap2OdomTransf.getOrigin().y(), mMap2OdomTransf.getOrigin().z(), + roll * RAD2DEG, pitch * RAD2DEG, yaw * RAD2DEG); + //} + + // Publish Pose message + publishPose(); + mPosTrackingReady = true; + } +} + +void ZEDWrapperNodelet::publishTFs(ros::Time t) +{ + // NODELET_DEBUG("publishTFs"); + + // NODELET_DEBUG_STREAM("publishTFs - t type:" << + // t.get_clock_type()); + + if (!mPosTrackingReady) + { + return; + } + + if (t.isZero()) + { + NODELET_DEBUG("Time zero: not publishing TFs"); + return; + } + + // Publish pose tf only if enabled + if (mDepthMode != sl::DEPTH_MODE::NONE && mPublishTF) + { + publishOdomTF(t); // publish the base Frame in odometry frame + + if (mPublishMapTF) + { + publishPoseTF(t); // publish the odometry Frame in map frame } + } +} + +void ZEDWrapperNodelet::publishOdomTF(ros::Time t) +{ + // NODELET_DEBUG("publishOdomTF"); + + // ----> Avoid duplicated TF publishing + static ros::Time last_stamp; + + if (t == last_stamp) + { + return; + } + last_stamp = t; + // <---- Avoid duplicated TF publishing + + if (!mSensor2BaseTransfValid) + { + getSens2BaseTransform(); + } + + if (!mSensor2CameraTransfValid) + { + getSens2CameraTransform(); + } + + if (!mCamera2BaseTransfValid) + { + getCamera2BaseTransform(); + } + + geometry_msgs::TransformStamped transformStamped; + transformStamped.header.stamp = t; + + // NODELET_DEBUG_STREAM(get_logger(), "Odom TS: " << transformStamped.header.stamp); + + transformStamped.header.frame_id = mOdomFrameId; + transformStamped.child_frame_id = mBaseFrameId; + // conversion from Tranform to message + mOdomMutex.lock(); // + tf2::Vector3 translation = mOdom2BaseTransf.getOrigin(); + tf2::Quaternion quat = mOdom2BaseTransf.getRotation(); + mOdomMutex.unlock(); // + transformStamped.transform.translation.x = translation.x(); + transformStamped.transform.translation.y = translation.y(); + transformStamped.transform.translation.z = translation.z(); + transformStamped.transform.rotation.x = quat.x(); + transformStamped.transform.rotation.y = quat.y(); + transformStamped.transform.rotation.z = quat.z(); + transformStamped.transform.rotation.w = quat.w(); + + // Publish transformation + mTfBroadcaster.sendTransform(transformStamped); +} +void ZEDWrapperNodelet::publishPoseTF(ros::Time t) +{ + // NODELET_DEBUG("publishPoseTF"); + + // ----> Avoid duplicated TF publishing + static ros::Time last_stamp; + + if (t == last_stamp) + { + return; + } + last_stamp = t; + // <---- Avoid duplicated TF publishing + + if (!mSensor2BaseTransfValid) + { + getSens2BaseTransform(); + } + + if (!mSensor2CameraTransfValid) + { + getSens2CameraTransform(); + } + + if (!mCamera2BaseTransfValid) + { + getCamera2BaseTransform(); + } + + geometry_msgs::TransformStamped transformStamped; + transformStamped.header.stamp = t; + transformStamped.header.frame_id = mMapFrameId; + transformStamped.child_frame_id = mOdomFrameId; + // conversion from Tranform to message + tf2::Vector3 translation = mMap2OdomTransf.getOrigin(); + tf2::Quaternion quat = mMap2OdomTransf.getRotation(); + transformStamped.transform.translation.x = translation.x(); + transformStamped.transform.translation.y = translation.y(); + transformStamped.transform.translation.z = translation.z(); + transformStamped.transform.rotation.x = quat.x(); + transformStamped.transform.rotation.y = quat.y(); + transformStamped.transform.rotation.z = quat.z(); + transformStamped.transform.rotation.w = quat.w(); + + // Publish transformation + mTfBroadcaster.sendTransform(transformStamped); } -} // namespace +} // namespace zed_nodelets diff --git a/zed_ros/CMakeLists.txt b/zed_ros/CMakeLists.txt index b983ecce..b659e33e 100644 --- a/zed_ros/CMakeLists.txt +++ b/zed_ros/CMakeLists.txt @@ -1,4 +1,4 @@ cmake_minimum_required(VERSION 2.8.3) project(zed_ros) find_package(catkin REQUIRED) -catkin_metapackage() +catkin_metapackage() \ No newline at end of file diff --git a/zed_ros/package.xml b/zed_ros/package.xml index f82d8b33..61ae8670 100644 --- a/zed_ros/package.xml +++ b/zed_ros/package.xml @@ -1,7 +1,7 @@ zed_ros - 3.1.0 + 3.5.0 Stereolabs zed-ros-wrapper support meta package STEREOLABS diff --git a/zed_wrapper/.gitignore b/zed_wrapper/.gitignore index 7a8a3cd7..20e7ef7a 100644 --- a/zed_wrapper/.gitignore +++ b/zed_wrapper/.gitignore @@ -39,3 +39,7 @@ Makefile* # QtCtreator CMake CMakeLists.txt.user* +# VS code +.vscode +.vscode/* + diff --git a/zed_wrapper/CMakeLists.txt b/zed_wrapper/CMakeLists.txt index 2f0ea326..f0b7d62d 100644 --- a/zed_wrapper/CMakeLists.txt +++ b/zed_wrapper/CMakeLists.txt @@ -1,16 +1,16 @@ -cmake_minimum_required(VERSION 2.8.7) +cmake_minimum_required(VERSION 3.5) project(zed_wrapper) -#Fix for QtCretor -list(APPEND CMAKE_PREFIX_PATH "/opt/ros/$ENV{ROS_DISTRO}/") +## Generate symbols for IDE indexer +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) # if CMAKE_BUILD_TYPE is not specified, take 'Release' as default IF(NOT CMAKE_BUILD_TYPE) SET(CMAKE_BUILD_TYPE Release) ENDIF(NOT CMAKE_BUILD_TYPE) -find_package(catkin COMPONENTS +find_package(catkin REQUIRED COMPONENTS roscpp rosconsole nodelet diff --git a/zed_wrapper/README.md b/zed_wrapper/README.md deleted file mode 100644 index df76353c..00000000 --- a/zed_wrapper/README.md +++ /dev/null @@ -1,113 +0,0 @@ -# Stereolabs ZED Camera - ROS Integration - -This package lets you use the ZED stereo camera with ROS. It outputs the camera left and right images, depth map, point cloud, pose information and supports the use of multiple ZED cameras. - -[More information](https://www.stereolabs.com/documentation/guides/using-zed-with-ros/introduction.html) - -## Getting started - -- First, download the latest version of the ZED SDK on [stereolabs.com](https://www.stereolabs.com/developers/) -- [Install](#build-the-program) the ZED ROS wrapper -- For more information, check out our [ROS documentation](https://www.stereolabs.com/documentation/guides/using-zed-with-ros/introduction.html). If you want to customize the wrapper, check the [ZED API documentation](https://www.stereolabs.com/developers/documentation/API/) - -### Prerequisites - -- Ubuntu 16.04 or newer (Ubuntu 18 recommended) -- [ZED SDK **≥ 3.0**](https://www.stereolabs.com/developers/) and its dependency [CUDA](https://developer.nvidia.com/cuda-downloads) -- [ROS Kinetic](http://wiki.ros.org/kinetic/Installation/Ubuntu) or [ROS Melodic](http://wiki.ros.org/melodic/Installation/Ubuntu) - -### Build the program - -The zed_ros_wrapper is a catkin package. It depends on the following ROS packages: - - - nav_msgs - - tf2_geometry_msgs - - message_runtime - - catkin - - roscpp - - stereo_msgs - - rosconsole - - robot_state_publisher - - urdf - - sensor_msgs - - image_transport - - diagnostic_updater - - dynamic_reconfigure - - tf2_ros - - message_generation - - nodelet - -Open a terminal, clone the repository, update the dependencies and build the packages: - - $ cd ~/catkin_ws/src - $ git clone https://github.com/stereolabs/zed-ros-wrapper.git - $ cd ../ - $ rosdep install --from-paths src --ignore-src -r -y - $ catkin_make -DCMAKE_BUILD_TYPE=Release - $ source ./devel/setup.bash - -### Run the program - -To launch the ZED node use: - - - ZED camera: `$ roslaunch zed_wrapper zed.launch` - - ZED Mini camera: `$ roslaunch zed_wrapper zedm.launch` - - ZED2 camera: `$ roslaunch zed_wrapper zed2.launch` - - To select the ZED from its serial number: - `$ roslaunch zed_wrapper zed.launch serial_number:=1010 #replace 1010 with the actual SN` - -### Parameters -To configure the features of the ZED node you can modify the configuration YAML files in the `params` folder. -A detailed description of each parameter is available in the [official online documentation](https://www.stereolabs.com/documentation/guides/using-zed-with-ros/introduction.html). - -### SVO recording -[SVO recording](https://www.stereolabs.com/docs/video/#video-recording) can be started and stopped while the ZED node is running using the service `start_svo_recording` and the service `stop_svo_recording`. -[More information](https://www.stereolabs.com/docs/ros/zed_node/#services) - -### Object Detection -The SDK v3.0 introduces the Object Detection and Tracking module. **The Object Detection module is available only with a ZED 2 camera**. -The Object Detection can be enabled automatically when the node start setting the parameter `object_detection/od_enabled` to `true` in the file `zed2.yaml`. -The Object Detection can be enabled/disabled manually calling the services `start_object_detection` and `stop_object_detection`. - -### Spatial Mapping -The Spatial Mapping can be enabled automatically when the node start setting the parameter `mapping/mapping_enabled` to `true` in the file `common.yaml`. -The Spatial Mapping can be enabled/disabled manually calling the services `start_3d_mapping` and `stop_3d_mapping`. - -### Diagnostic -The ZED node publishes diagnostic information that can be used by the robotic system using a [diagnostic_aggregator node](http://wiki.ros.org/diagnostic_aggregator). - -With the `rqt` plugin `Runtime monitor`, it is possible to retrieve all the diagnostic information, checking that the node -is working as expected. - -### 2D mode -For robots moving on a planar surface it is possible to activate the "2D mode" (parameter `tracking/two_d_mode` in `common.yaml`). -The value of the coordinate Z for odometry and pose will have a fixed value (parameter `tracking/fixed_z_value` in `common.yaml`). -Roll and pitch and relative velocities will be fixed to zero. - -### Diagnostic -The ZED node publishes diagnostic information that can be used by the robotic system using a [diagnostic_aggregator node](http://wiki.ros.org/diagnostic_aggregator). - -With the `rqt` plugin `Runtime monitor`, it is possible to retrieve all the diagnostic information, checking that the node is working as expected: - -![](../images/rqt_diagnostic.jpg) - -A brief explanation of each field: - - - `Component`: name of the diagnostic component - - `Message`: summary of the status of the ZED node - - `HardwareID`: Model of the ZED camera and its serial number - - `Capture`: grabbing frequency (if video or depth data are subscribed) and the percentage respect to the camera frame rate - - `Processing time`: time in seconds spent to elaborate data and the time limit to achieve max frame rate - - `Depth status`: indicates if the depth processing is performed - - `Point Cloud`: point cloud publishing frequency (if there is at least a subscriber) and the percentage respect to the camera frame rate - - `Floor Detection`: if the floor detection is enabled, indicates if the floor has been detected and the camera position correctly initialized - - `Tracking status`: indicates the status of the tracking, if enabled - - `IMU`: the publishing frequency of the IMU topics, if the camera is the ZED Mini and there is at least a subscriber - - -[Detailed information](https://www.stereolabs.com/documentation/guides/using-zed-with-ros/introduction.html) - - - - diff --git a/zed_wrapper/cfg/Zed.cfg b/zed_wrapper/cfg/Zed.cfg index 051c419e..9a11d9fe 100755 --- a/zed_wrapper/cfg/Zed.cfg +++ b/zed_wrapper/cfg/Zed.cfg @@ -6,7 +6,6 @@ from dynamic_reconfigure.parameter_generator_catkin import * gen = ParameterGenerator() group_general = gen.add_group("general") -group_general.add("pub_frame_rate", double_t, 0, "Video and Depth data frequency", 15.0, 0.1, 60.0); group_depth = gen.add_group("depth") group_depth.add("depth_confidence", int_t, 1, "Depth confidence threshold", 50, 1, 100) diff --git a/zed_wrapper/launch/include/zed_camera.launch.xml b/zed_wrapper/launch/include/zed_camera.launch.xml index f0736a92..9a50e661 100644 --- a/zed_wrapper/launch/include/zed_camera.launch.xml +++ b/zed_wrapper/launch/include/zed_camera.launch.xml @@ -1,6 +1,6 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/zed_wrapper/launch/zed_no_tf.launch b/zed_wrapper/launch/zed_no_tf.launch index c6514e7b..dbc59806 100644 --- a/zed_wrapper/launch/zed_no_tf.launch +++ b/zed_wrapper/launch/zed_no_tf.launch @@ -1,6 +1,6 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/zed_wrapper/launch/zedxm.launch b/zed_wrapper/launch/zedxm.launch new file mode 100644 index 00000000..adcc9e63 --- /dev/null +++ b/zed_wrapper/launch/zedxm.launch @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/zed_wrapper/nodelet_plugins.xml b/zed_wrapper/nodelet_plugins.xml deleted file mode 100644 index b956da92..00000000 --- a/zed_wrapper/nodelet_plugins.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - This is the nodelet of ROS interface for Stereolabs ZED Camera. - - diff --git a/zed_wrapper/package.xml b/zed_wrapper/package.xml index 0388bfe3..d05b057c 100644 --- a/zed_wrapper/package.xml +++ b/zed_wrapper/package.xml @@ -1,7 +1,7 @@ zed_wrapper - 3.1.0 + 3.5.0 zed_wrapper is a ROS package containing the ZED wrapper node for the ZED library for interfacing with the ZED camera. The zed_wrapper_node is a simple loader for the ZEDWrapperNodelet that is the main interface between ROS and the ZED SDK. diff --git a/zed_wrapper/params/common.yaml b/zed_wrapper/params/common.yaml index cd8b97d2..ad73a711 100644 --- a/zed_wrapper/params/common.yaml +++ b/zed_wrapper/params/common.yaml @@ -1,54 +1,60 @@ -# params/common_yaml +# params/common.yaml # Common parameters to Stereolabs ZED and ZED mini cameras --- # Dynamic parameters cannot have a namespace -brightness: 4 # Dynamic -contrast: 4 # Dynamic -hue: 0 # Dynamic -saturation: 4 # Dynamic -sharpness: 4 # Dynamic -gamma: 8 # Dynamic - Requires SDK >=v3.1 -auto_exposure_gain: true # Dynamic -gain: 100 # Dynamic - works only if `auto_exposure_gain` is false -exposure: 100 # Dynamic - works only if `auto_exposure_gain` is false -auto_whitebalance: true # Dynamic -whitebalance_temperature: 42 # Dynamic - works only if `auto_whitebalance` is false -depth_confidence: 50 # Dynamic -depth_texture_conf: 100 # Dynamic -pub_frame_rate: 15.0 # Dynamic - frequency of publishing of video and depth data -point_cloud_freq: 15.0 # Dynamic - frequency of the pointcloud publishing (equal or less to `grab_frame_rate` value) +brightness: 4 # Dynamic +contrast: 4 # Dynamic +hue: 0 # Dynamic +saturation: 4 # Dynamic +sharpness: 4 # Dynamic +gamma: 8 # Dynamic - Requires SDK >=v3.1 +auto_exposure_gain: true # Dynamic +gain: 100 # Dynamic - works only if `auto_exposure_gain` is false +exposure: 100 # Dynamic - works only if `auto_exposure_gain` is false +auto_whitebalance: true # Dynamic +whitebalance_temperature: 42 # Dynamic - works only if `auto_whitebalance` is false +depth_confidence: 50 # Dynamic +depth_texture_conf: 100 # Dynamic +point_cloud_freq: 10.0 # Dynamic - frequency of the pointcloud publishing (equal or less to `grab_frame_rate` value) general: - camera_name: zed # A name for the camera (can be different from camera model and node name) + camera_name: zed # A name for the camera (can be different from camera model and node name and can be overwritten by the launch file) zed_id: 0 serial_number: 0 - resolution: 2 # '0': HD2K, '1': HD1080, '2': HD720, '3': VGA - grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations gpu_id: -1 base_frame: 'base_link' # must be equal to the frame_id used in the URDF file - verbose: false # Enable info message by the ZED SDK + sdk_verbose: 1 # Set verbose level of the ZED SDK svo_compression: 2 # `0`: LOSSLESS, `1`: AVCHD, `2`: HEVC self_calib: true # enable/disable self calibration at starting - camera_flip: false + camera_flip: 'AUTO' # camera flip mode: 'OFF', 'ON', 'AUTO' + pub_resolution: 'CUSTOM' # The resolution used for output. 'NATIVE' to use the same `general.grab_resolution` - `CUSTOM` to apply the `general.pub_downscale_factor` downscale factory to reduce bandwidth in transmission + pub_downscale_factor: 2.0 # rescale factor used to rescale image before publishing when 'pub_resolution' is 'CUSTOM' + pub_frame_rate: 15.0 # frequency of publishing of video and depth data (see SDK API "InitParameters::grab_compute_capping_fps") + svo_realtime: true # if true the input SVO will be played trying to respect the original framerate eventually skipping frames, otherwise every frame will be processed respecting the `pub_frame_rate` setting + region_of_interest: '[]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. + #region_of_interest: '[[0.25,0.33],[0.75,0.33],[0.75,0.5],[0.5,0.75],[0.25,0.5]]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. + #region_of_interest: '[[0.25,0.25],[0.75,0.25],[0.75,0.75],[0.25,0.75]]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. + #region_of_interest: '[[0.5,0.25],[0.75,0.5],[0.5,0.75],[0.25,0.5]]' # A polygon defining the ROI where the ZED SDK perform the processing ignoring the rest. Coordinates must be normalized to '1.0' to be resolution independent. -video: - img_downsample_factor: 1.0 # Resample factor for images [0.01,1.0] The SDK works with native image sizes, but publishes rescaled image. - extrinsic_in_camera_frame: true # if `false` extrinsic parameter in `camera_info` will use ROS native frame (X FORWARD, Z UP) instead of the camera frame (Z FORWARD, Y DOWN) [`true` use old behavior as for version < v3.1] +#video: depth: - quality: 3 # '0': NONE, '1': PERFORMANCE, '2': QUALITY, '3': ULTRA - sensing_mode: 0 # '0': STANDARD, '1': FILL (not use FILL for robotic applications) - depth_stabilization: 1 # `0`: disabled, `1`: enabled - openni_depth_mode: 0 # '0': 32bit float meters, '1': 16bit uchar millimeters - depth_downsample_factor: 1.0 # Resample factor for depth data matrices [0.01,1.0] The SDK works with native data sizes, but publishes rescaled matrices (depth map, point cloud, ...) + depth_mode: 'ULTRA' # 'NONE', 'PERFORMANCE', 'QUALITY', 'ULTRA', 'NEURAL', `NEURAL_PLUS` + depth_stabilization: 1 # [0-100] - 0: Disabled + openni_depth_mode: false # 'false': 32bit float meter units, 'true': 16bit uchar millimeter units pos_tracking: + pos_tracking_enabled: true # True to enable positional tracking from start + pos_tracking_mode: 'GEN_2' # Matches the ZED SDK setting: 'GEN_1', 'GEN_2' + set_gravity_as_origin: true # If 'true' align the positional tracking world to imu gravity measurement. Keep the yaw from the user initial pose. + imu_fusion: true # enable/disable IMU fusion. When set to false, only the optical odometry will be used. publish_tf: true # publish `odom -> base_link` TF publish_map_tf: true # publish `map -> odom` TF - map_frame: 'map' - odometry_frame: 'odom' - area_memory_db_path: '' + map_frame: 'map' # main frame + odometry_frame: 'odom' # odometry frame + area_memory_db_path: '' # file loaded when the node starts to restore the "known visual features" map. + save_area_memory_db_on_exit: false # save the "known visual features" map when the node is correctly closed to the path indicated by `area_memory_db_path` area_memory: true # Enable to detect loop closure floor_alignment: false # Enable to automatically calculate camera/floor offset initial_base_pose: [0.0,0.0,0.0, 0.0,0.0,0.0] # Initial position of the `base_frame` -> [X, Y, Z, R, P, Y] @@ -57,11 +63,32 @@ pos_tracking: path_max_count: -1 # use '-1' for unlimited path size two_d_mode: false # Force navigation on a plane. If true the Z value will be fixed to "fixed_z_value", roll and pitch to zero fixed_z_value: 0.00 # Value to be used for Z coordinate if `two_d_mode` is true + depth_min_range: 0.0 # Set this value for removing fixed zones of the robot in the FoV of the camerafrom the visual odometry evaluation + set_as_static: false # If 'true' the camera will be static and not move in the environment mapping: - mapping_enabled: false # True to enable mapping and fused point cloud pubblication + mapping_enabled: false # True to enable mapping and fused point cloud publication resolution: 0.05 # maps resolution in meters [0.01f, 0.2f] max_mapping_range: -1 # maximum depth range while mapping in meters (-1 for automatic calculation) [2.0, 20.0] fused_pointcloud_freq: 1.0 # frequency of the publishing of the fused colored point cloud + clicked_point_topic: '/clicked_point' # Topic published by Rviz when a point of the cloud is clicked. Used for plane detection +sensors: + sensors_timestamp_sync: false # Synchronize Sensors messages timestamp with latest received frame + max_pub_rate: 200. # max frequency of publishing of sensors data. MAX: 400. - MIN: grab rate + publish_imu_tf: true # publish `IMU -> _left_camera_frame` TF +object_detection: + od_enabled: false # True to enable Object Detection [not available for ZED] + model: 'MULTI_CLASS_BOX_ACCURATE' # 'MULTI_CLASS_BOX_FAST', 'MULTI_CLASS_BOX_MEDIUM', 'MULTI_CLASS_BOX_ACCURATE', 'PERSON_HEAD_BOX_FAST', 'PERSON_HEAD_BOX_ACCURATE' + max_range: 15. # Maximum detection range + allow_reduced_precision_inference: true # Allow inference to run at a lower precision to improve runtime and memory usage + prediction_timeout: 0.5 # During this time [sec], the object will have OK state even if it is not detected. Set this parameter to 0 to disable SDK predictions + object_tracking_enabled: true # Enable/disable the tracking of the detected objects + mc_people: true # Enable/disable the detection of persons for 'MULTI_CLASS_BOX_X' models + mc_vehicle: true # Enable/disable the detection of vehicles for 'MULTI_CLASS_BOX_X' models + mc_bag: true # Enable/disable the detection of bags for 'MULTI_CLASS_BOX_X' models + mc_animal: true # Enable/disable the detection of animals for 'MULTI_CLASS_BOX_X' models + mc_electronics: true # Enable/disable the detection of electronic devices for 'MULTI_CLASS_BOX_X' models + mc_fruit_vegetable: true # Enable/disable the detection of fruits and vegetables for 'MULTI_CLASS_BOX_X' models + mc_sport: true # Enable/disable the detection of sport-related objects for 'MULTI_CLASS_BOX_X' models diff --git a/zed_wrapper/params/zed.yaml b/zed_wrapper/params/zed.yaml index 1594c7f7..15212700 100644 --- a/zed_wrapper/params/zed.yaml +++ b/zed_wrapper/params/zed.yaml @@ -4,7 +4,9 @@ general: camera_model: 'zed' + grab_resolution: 'HD720' # 'HD2K', 'HD1080', 'HD720', 'VGA', 'AUTO' + grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations depth: - min_depth: 0.7 # Min: 0.3, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory - max_depth: 10.0 # Max: 40.0 + min_depth: 0.3 # Min: 0.3, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory + max_depth: 15.0 # Max: 40.0 diff --git a/zed_wrapper/params/zed2.yaml b/zed_wrapper/params/zed2.yaml index b3026fbf..e6f18f53 100644 --- a/zed_wrapper/params/zed2.yaml +++ b/zed_wrapper/params/zed2.yaml @@ -1,23 +1,13 @@ -# params/zed2_yaml +# params/zed2.yaml # Parameters for Stereolabs ZED2 camera --- general: camera_model: 'zed2' + grab_resolution: 'HD720' # 'HD2K', 'HD1080', 'HD720', 'VGA', 'AUTO' + grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations depth: - min_depth: 0.7 # Min: 0.2, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory - max_depth: 20.0 # Max: 40.0 - -pos_tracking: - imu_fusion: true # enable/disable IMU fusion. When set to false, only the optical odometry will be used. - -sensors: - sensors_timestamp_sync: false # Synchronize Sensors messages timestamp with latest received frame - -object_detection: - od_enabled: false # True to enable Object Detection [only ZED 2] - confidence_threshold: 50 # Minimum value of the detection confidence of an object [0,100] - object_tracking_enabled: true # Enable/disable the tracking of the detected objects - people_detection: true # Enable/disable the detection of persons - vehicle_detection: false # Enable/disable the detection of vehicles + min_depth: 0.3 # Min: 0.2, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory + max_depth: 15.0 # Max: 40.0 + \ No newline at end of file diff --git a/zed_wrapper/params/zed2i.yaml b/zed_wrapper/params/zed2i.yaml new file mode 100644 index 00000000..2742f370 --- /dev/null +++ b/zed_wrapper/params/zed2i.yaml @@ -0,0 +1,12 @@ +# params/zed2i.yaml +# Parameters for Stereolabs ZED 2i camera +--- + +general: + camera_model: 'zed2i' + grab_resolution: 'HD720' # 'HD2K', 'HD1080', 'HD720', 'VGA', 'AUTO' + grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations + +depth: + min_depth: 0.3 # Min: 0.3, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory + max_depth: 15.0 # Max: 40.0 diff --git a/zed_wrapper/params/zedm.yaml b/zed_wrapper/params/zedm.yaml index 03d9e7d0..f328af8e 100644 --- a/zed_wrapper/params/zedm.yaml +++ b/zed_wrapper/params/zedm.yaml @@ -1,16 +1,12 @@ -# params/zedm_yaml +# params/zedm.yaml # Parameters for Stereolabs ZED mini camera --- general: camera_model: 'zedm' + grab_resolution: 'HD720' # 'HD2K', 'HD1080', 'HD720', 'VGA', 'AUTO' + grab_frame_rate: 30 # Frequency of frame grabbing for internal SDK operations depth: - min_depth: 0.35 # Min: 0.1, Max: 3.0 - Default 0.3 - Note: reducing this value wil require more computational power and GPU memory - max_depth: 10.0 # Max: 20.0 - -pos_tracking: - imu_fusion: true # enable/disable IMU fusion. When set to false, only the optical odometry will be used. - -sensors: - sensors_timestamp_sync: false # Synchronize Sensors messages timestamp with latest received frame + min_depth: 0.1 # Min: 0.1, Max: 3.0 - Default 0.3 - Note: reducing this value wil require more computational power and GPU memory + max_depth: 15.0 # Max: 20.0 diff --git a/zed_wrapper/params/zedx.yaml b/zed_wrapper/params/zedx.yaml new file mode 100644 index 00000000..9778857c --- /dev/null +++ b/zed_wrapper/params/zedx.yaml @@ -0,0 +1,13 @@ +# params/zedx.yaml +# Parameters for Stereolabs ZED X camera +--- + +general: + camera_model: 'zedx' + grab_resolution: 'HD1200' # HD1080, HD1200, SVGA, AUTO + grab_frame_rate: 60 # 120, 60, 30, 15 + +depth: + min_depth: 0.2 # Min: 0.2, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory + max_depth: 15.0 # Max: 40.0 + diff --git a/zed_wrapper/params/zedxm.yaml b/zed_wrapper/params/zedxm.yaml new file mode 100644 index 00000000..f3e91fac --- /dev/null +++ b/zed_wrapper/params/zedxm.yaml @@ -0,0 +1,12 @@ +# params/zedxm.yaml +# Parameters for Stereolabs ZED X Mini camera +--- + +general: + camera_model: 'zedxm' + grab_resolution: 'HD1200' # HD1080, HD1200, SVGA, AUTO + grab_frame_rate: 60 # 120, 60, 30, 15 + +depth: + min_depth: 0.1 # Min: 0.1, Max: 3.0 - Default 0.7 - Note: reducing this value wil require more computational power and GPU memory + max_depth: 15.0 # Max: 40.0 diff --git a/zed_wrapper/records/record_depth.sh b/zed_wrapper/records/record_depth.sh deleted file mode 100644 index b7f73c33..00000000 --- a/zed_wrapper/records/record_depth.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/bash -rosbag record /camera/rgb/camera_info /camera/depth_registered/image_raw/compressedDepth /camera/rgb/image_rect_color/compressed /tf diff --git a/zed_wrapper/records/record_stereo.sh b/zed_wrapper/records/record_stereo.sh deleted file mode 100644 index 97d4cfcc..00000000 --- a/zed_wrapper/records/record_stereo.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/bash -rosbag record /camera/left/camera_info /camera/right/camera_info /camera/left/image_raw/compressed /camera/right/image_raw/compressed /tf diff --git a/zed_wrapper/src/zed_wrapper_node.cpp b/zed_wrapper/src/zed_wrapper_node.cpp index 2134a00e..25235ab0 100644 --- a/zed_wrapper/src/zed_wrapper_node.cpp +++ b/zed_wrapper/src/zed_wrapper_node.cpp @@ -1,7 +1,6 @@ // ///////////////////////////////////////////////////////////////////////// - // -// Copyright (c) 2020, STEREOLABS. +// Copyright (c) 2023, STEREOLABS. // // All rights reserved. // @@ -22,16 +21,17 @@ #include #include -int main(int argc, char** argv) { - ros::init(argc, argv, "zed_camera_node"); +int main(int argc, char** argv) +{ + ros::init(argc, argv, "zed_camera_node"); - // Start the ZED Nodelet - nodelet::Loader nodelet; - nodelet::M_string remap(ros::names::getRemappings()); - nodelet::V_string nargv; - nodelet.load(ros::this_node::getName(), "zed_nodelets/ZEDWrapperNodelet", remap, nargv); + // Start the ZED Nodelet + nodelet::Loader nodelet; + nodelet::M_string remap(ros::names::getRemappings()); + nodelet::V_string nargv; + nodelet.load(ros::this_node::getName(), "zed_nodelets/ZEDWrapperNodelet", remap, nargv); - ros::spin(); + ros::spin(); - return 0; + return 0; } diff --git a/zed_wrapper/urdf/include/materials.urdf.xacro b/zed_wrapper/urdf/include/materials.urdf.xacro index ccbf7480..fdf32ce3 100644 --- a/zed_wrapper/urdf/include/materials.urdf.xacro +++ b/zed_wrapper/urdf/include/materials.urdf.xacro @@ -1,21 +1,19 @@ @@ -28,4 +26,13 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + + + + + + + diff --git a/zed_wrapper/urdf/models/zed.stl b/zed_wrapper/urdf/models/zed.stl deleted file mode 100644 index 913d2cd9..00000000 Binary files a/zed_wrapper/urdf/models/zed.stl and /dev/null differ diff --git a/zed_wrapper/urdf/models/zed2.stl b/zed_wrapper/urdf/models/zed2.stl deleted file mode 100644 index 913d2cd9..00000000 Binary files a/zed_wrapper/urdf/models/zed2.stl and /dev/null differ diff --git a/zed_wrapper/urdf/models/zedm.stl b/zed_wrapper/urdf/models/zedm.stl deleted file mode 100644 index 7048bc77..00000000 Binary files a/zed_wrapper/urdf/models/zedm.stl and /dev/null differ diff --git a/zed_wrapper/urdf/zed_descr.urdf.xacro b/zed_wrapper/urdf/zed_descr.urdf.xacro index 4d218f96..1ae2e0b8 100644 --- a/zed_wrapper/urdf/zed_descr.urdf.xacro +++ b/zed_wrapper/urdf/zed_descr.urdf.xacro @@ -1,141 +1,40 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + diff --git a/zed_wrapper/urdf/zed_macro.urdf.xacro b/zed_wrapper/urdf/zed_macro.urdf.xacro new file mode 100644 index 00000000..4cc9eebb --- /dev/null +++ b/zed_wrapper/urdf/zed_macro.urdf.xacro @@ -0,0 +1,172 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +