From bca487d37c447ff249e7601e1b698b1b591a8f34 Mon Sep 17 00:00:00 2001 From: Taekjin LEE Date: Tue, 19 Nov 2024 14:06:14 +0900 Subject: [PATCH] feat(autoware_multi_object_tracker): new function to add odometry uncertainty (#9139) * feat: add Odometry uncertainty to object tracking Signed-off-by: Taekjin LEE * feat: Add odometry heading uncertainty to object pose covariance feat: Rotate object pose covariance matrix to account for yaw uncertainty Rotate the object pose covariance matrix in the uncertainty_processor.cpp file to account for the yaw uncertainty. This ensures that the covariance matrix accurately represents the position uncertainty of the object. Refactor the code to rotate the covariance matrix using Eigen's Rotation2D class. The yaw uncertainty is added to the y-y element of the rotated covariance matrix. Finally, update the object_pose_cov array with the updated covariance values. Closes #123 Signed-off-by: Taekjin LEE * feat: Add odometry motion uncertainty to object pose covariance Signed-off-by: Taekjin LEE refactoring Signed-off-by: Taekjin LEE * feat: Update ego twist uncertainty to the object velocity uncertainty Signed-off-by: Taekjin LEE * feat: update object twist covariance by odometry yaw rate uncertainty Signed-off-by: Taekjin LEE * feat: move uncertainty modeling to input side Signed-off-by: Taekjin LEE * feat: add option to select odometry uncertainty Signed-off-by: Taekjin LEE * refactor: rename consider_odometry_uncertainty to enable_odometry_uncertainty Signed-off-by: Taekjin LEE * fix: transform to world first, add odometry covariance later Signed-off-by: Taekjin LEE style(pre-commit): autofix Signed-off-by: Taekjin LEE * feat: Add odometry heading uncertainty to object pose covariance Signed-off-by: Taekjin LEE --------- Signed-off-by: Taekjin LEE --- .../multi_object_tracker_node.param.yaml | 1 + .../uncertainty/uncertainty_processor.hpp | 3 + .../lib/uncertainty/uncertainty_processor.cpp | 101 ++++++++++++++++++ .../autoware_multi_object_tracker/package.xml | 1 + .../src/multi_object_tracker_node.cpp | 45 ++++++-- .../src/multi_object_tracker_node.hpp | 1 + .../src/processor/input_manager.cpp | 11 +- 7 files changed, 154 insertions(+), 9 deletions(-) diff --git a/perception/autoware_multi_object_tracker/config/multi_object_tracker_node.param.yaml b/perception/autoware_multi_object_tracker/config/multi_object_tracker_node.param.yaml index 09621a40c499f..7dd588ec8eeba 100644 --- a/perception/autoware_multi_object_tracker/config/multi_object_tracker_node.param.yaml +++ b/perception/autoware_multi_object_tracker/config/multi_object_tracker_node.param.yaml @@ -13,6 +13,7 @@ publish_rate: 10.0 world_frame_id: map enable_delay_compensation: false + consider_odometry_uncertainty: false # debug parameters publish_processing_time: false diff --git a/perception/autoware_multi_object_tracker/include/autoware/multi_object_tracker/uncertainty/uncertainty_processor.hpp b/perception/autoware_multi_object_tracker/include/autoware/multi_object_tracker/uncertainty/uncertainty_processor.hpp index c67e71b38a6f9..12bd865795b85 100644 --- a/perception/autoware_multi_object_tracker/include/autoware/multi_object_tracker/uncertainty/uncertainty_processor.hpp +++ b/perception/autoware_multi_object_tracker/include/autoware/multi_object_tracker/uncertainty/uncertainty_processor.hpp @@ -23,6 +23,7 @@ #include #include +#include namespace autoware::multi_object_tracker { @@ -34,6 +35,7 @@ using autoware::multi_object_tracker::object_model::ObjectModel; using autoware_perception_msgs::msg::DetectedObject; using autoware_perception_msgs::msg::DetectedObjects; using autoware_perception_msgs::msg::ObjectClassification; +using nav_msgs::msg::Odometry; ObjectModel decodeObjectModel(const ObjectClassification & object_class); @@ -44,6 +46,7 @@ object_model::StateCovariance covarianceFromObjectClass( void normalizeUncertainty(DetectedObjects & detected_objects); +void addOdometryUncertainty(const Odometry & odometry, DetectedObjects & detected_objects); } // namespace uncertainty } // namespace autoware::multi_object_tracker diff --git a/perception/autoware_multi_object_tracker/lib/uncertainty/uncertainty_processor.cpp b/perception/autoware_multi_object_tracker/lib/uncertainty/uncertainty_processor.cpp index 840879f9bd7aa..3f0b854931842 100644 --- a/perception/autoware_multi_object_tracker/lib/uncertainty/uncertainty_processor.cpp +++ b/perception/autoware_multi_object_tracker/lib/uncertainty/uncertainty_processor.cpp @@ -17,6 +17,9 @@ #include "autoware/multi_object_tracker/uncertainty/uncertainty_processor.hpp" +#include +#include + namespace autoware::multi_object_tracker { namespace uncertainty @@ -135,5 +138,103 @@ void normalizeUncertainty(DetectedObjects & detected_objects) } } +void addOdometryUncertainty(const Odometry & odometry, DetectedObjects & detected_objects) +{ + const auto & odom_pose = odometry.pose.pose; + const auto & odom_pose_cov = odometry.pose.covariance; + const auto & odom_twist = odometry.twist.twist; + const auto & odom_twist_cov = odometry.twist.covariance; + + // ego motion uncertainty, velocity multiplied by time uncertainty + const double ego_yaw = tf2::getYaw(odom_pose.orientation); + const double dt = + (rclcpp::Time(odometry.header.stamp) - rclcpp::Time(detected_objects.header.stamp)).seconds(); + const double dt2 = dt * dt; + Eigen::MatrixXd m_rot_ego = Eigen::Rotation2D(ego_yaw).toRotationMatrix(); + Eigen::MatrixXd m_cov_motion = Eigen::MatrixXd(2, 2); + m_cov_motion << odom_twist.linear.x * odom_twist.linear.x * dt2, 0, 0, + odom_twist.linear.y * odom_twist.linear.y * dt2; + + // ego position uncertainty, position covariance + motion covariance + Eigen::MatrixXd m_cov_ego_pose = Eigen::MatrixXd(2, 2); + m_cov_ego_pose << odom_pose_cov[0], odom_pose_cov[1], odom_pose_cov[6], odom_pose_cov[7]; + m_cov_ego_pose = m_cov_ego_pose + m_rot_ego * m_cov_motion * m_rot_ego.transpose(); + + // ego yaw uncertainty, position covariance + yaw motion covariance + const double & cov_ego_yaw = odom_pose_cov[35]; + const double cov_yaw = cov_ego_yaw + odom_twist.angular.z * odom_twist.angular.z * dt2; + + // ego velocity uncertainty, velocity covariance + Eigen::MatrixXd m_cov_ego_twist = Eigen::MatrixXd(2, 2); + m_cov_ego_twist << odom_twist_cov[0], odom_twist_cov[1], odom_twist_cov[6], odom_twist_cov[7]; + const double & cov_yaw_rate = odom_twist_cov[35]; + + for (auto & object : detected_objects.objects) { + auto & object_pose = object.kinematics.pose_with_covariance.pose; + auto & object_pose_cov = object.kinematics.pose_with_covariance.covariance; + const double dx = object_pose.position.x - odom_pose.position.x; + const double dy = object_pose.position.y - odom_pose.position.y; + const double r2 = dx * dx + dy * dy; + const double theta = std::atan2(dy, dx); + + // 1. add odometry position and motion uncertainty to the object position covariance + Eigen::MatrixXd m_pose_cov = Eigen::MatrixXd(2, 2); + m_pose_cov << object_pose_cov[XYZRPY_COV_IDX::X_X], object_pose_cov[XYZRPY_COV_IDX::X_Y], + object_pose_cov[XYZRPY_COV_IDX::Y_X], object_pose_cov[XYZRPY_COV_IDX::Y_Y]; + + // 1-a. add odometry position uncertainty to the object position covariance + // object position and it covariance is based on the world frame (map) + m_pose_cov = m_pose_cov + m_cov_ego_pose; + + // 1-b. add odometry heading uncertainty to the object position covariance + // uncertainty is proportional to the distance and the uncertainty orientation is vertical to + // the vector to the object + { + const double cov_by_yaw = cov_ego_yaw * r2; + // rotate the covariance matrix, add the yaw uncertainty, and rotate back + Eigen::MatrixXd m_rot_theta = Eigen::Rotation2D(theta).toRotationMatrix(); + Eigen::MatrixXd m_cov_rot = m_rot_theta.transpose() * m_pose_cov * m_rot_theta; + m_cov_rot(1, 1) += cov_by_yaw; // yaw uncertainty is added to y-y element + m_pose_cov = m_rot_theta * m_cov_rot * m_rot_theta.transpose(); + } + // 1-c. add odometry yaw uncertainty to the object yaw covariance + object_pose_cov[XYZRPY_COV_IDX::YAW_YAW] += cov_yaw; // yaw-yaw + + // update the covariance matrix + object_pose_cov[XYZRPY_COV_IDX::X_X] = m_pose_cov(0, 0); + object_pose_cov[XYZRPY_COV_IDX::X_Y] = m_pose_cov(0, 1); + object_pose_cov[XYZRPY_COV_IDX::Y_X] = m_pose_cov(1, 0); + object_pose_cov[XYZRPY_COV_IDX::Y_Y] = m_pose_cov(1, 1); + + // 2. add odometry velocity uncertainty to the object velocity covariance + auto & object_twist_cov = object.kinematics.twist_with_covariance.covariance; + Eigen::MatrixXd m_twist_cov = Eigen::MatrixXd(2, 2); + m_twist_cov << object_twist_cov[XYZRPY_COV_IDX::X_X], object_twist_cov[XYZRPY_COV_IDX::X_Y], + object_twist_cov[XYZRPY_COV_IDX::Y_X], object_twist_cov[XYZRPY_COV_IDX::Y_Y]; + + // 2-a. add odometry velocity uncertainty to the object linear twist covariance + { + const double obj_yaw = tf2::getYaw(object_pose.orientation); // object yaw is global frame + Eigen::MatrixXd m_rot_theta = Eigen::Rotation2D(obj_yaw - ego_yaw).toRotationMatrix(); + m_twist_cov = m_twist_cov + m_rot_theta.transpose() * m_cov_ego_twist * m_rot_theta; + } + + // 2-b. add odometry yaw rate uncertainty to the object linear twist covariance + { + const double cov_by_yaw_rate = cov_yaw_rate * r2; + Eigen::MatrixXd m_rot_theta = Eigen::Rotation2D(theta).toRotationMatrix(); + Eigen::MatrixXd m_twist_cov_rot = m_rot_theta.transpose() * m_twist_cov * m_rot_theta; + m_twist_cov_rot(1, 1) += cov_by_yaw_rate; // yaw rate uncertainty is added to y-y element + m_twist_cov = m_rot_theta * m_twist_cov_rot * m_rot_theta.transpose(); + } + + // update the covariance matrix + object_twist_cov[XYZRPY_COV_IDX::X_X] = m_twist_cov(0, 0); + object_twist_cov[XYZRPY_COV_IDX::X_Y] = m_twist_cov(0, 1); + object_twist_cov[XYZRPY_COV_IDX::Y_X] = m_twist_cov(1, 0); + object_twist_cov[XYZRPY_COV_IDX::Y_Y] = m_twist_cov(1, 1); + } +} + } // namespace uncertainty } // namespace autoware::multi_object_tracker diff --git a/perception/autoware_multi_object_tracker/package.xml b/perception/autoware_multi_object_tracker/package.xml index e16e4c0d2a836..5eb591d60133f 100644 --- a/perception/autoware_multi_object_tracker/package.xml +++ b/perception/autoware_multi_object_tracker/package.xml @@ -21,6 +21,7 @@ eigen glog mussp + nav_msgs rclcpp rclcpp_components tf2 diff --git a/perception/autoware_multi_object_tracker/src/multi_object_tracker_node.cpp b/perception/autoware_multi_object_tracker/src/multi_object_tracker_node.cpp index 24bf4ef9c123d..68a356cd91da5 100644 --- a/perception/autoware_multi_object_tracker/src/multi_object_tracker_node.cpp +++ b/perception/autoware_multi_object_tracker/src/multi_object_tracker_node.cpp @@ -87,6 +87,7 @@ MultiObjectTracker::MultiObjectTracker(const rclcpp::NodeOptions & node_options) double publish_rate = declare_parameter("publish_rate"); // [hz] world_frame_id_ = declare_parameter("world_frame_id"); bool enable_delay_compensation{declare_parameter("enable_delay_compensation")}; + enable_odometry_uncertainty_ = declare_parameter("consider_odometry_uncertainty"); declare_parameter("selected_input_channels", std::vector()); std::vector selected_input_channels = @@ -274,19 +275,49 @@ void MultiObjectTracker::runProcess( return; } - // Model the object uncertainty if it is empty - DetectedObjects input_objects_with_uncertainty = uncertainty::modelUncertainty(input_objects); - - // Normalize the object uncertainty - uncertainty::normalizeUncertainty(input_objects_with_uncertainty); - // Transform the objects to the world frame DetectedObjects transformed_objects; if (!autoware::object_recognition_utils::transformObjects( - input_objects_with_uncertainty, world_frame_id_, tf_buffer_, transformed_objects)) { + input_objects, world_frame_id_, tf_buffer_, transformed_objects)) { return; } + // the object uncertainty + if (enable_odometry_uncertainty_) { + // Create a modeled odometry message + nav_msgs::msg::Odometry odometry; + odometry.header.stamp = measurement_time + rclcpp::Duration::from_seconds(0.001); + + // set odometry pose from self_transform + auto & odom_pose = odometry.pose.pose; + odom_pose.position.x = self_transform->translation.x; + odom_pose.position.y = self_transform->translation.y; + odom_pose.position.z = self_transform->translation.z; + odom_pose.orientation = self_transform->rotation; + + // set odometry twist + auto & odom_twist = odometry.twist.twist; + odom_twist.linear.x = 10.0; // m/s + odom_twist.linear.y = 0.1; // m/s + odom_twist.angular.z = 0.1; // rad/s + + // model the uncertainty + auto & odom_pose_cov = odometry.pose.covariance; + odom_pose_cov[0] = 0.1; // x-x + odom_pose_cov[7] = 0.1; // y-y + odom_pose_cov[35] = 0.0001; // yaw-yaw + + auto & odom_twist_cov = odometry.twist.covariance; + odom_twist_cov[0] = 2.0; // x-x [m^2/s^2] + odom_twist_cov[7] = 0.2; // y-y [m^2/s^2] + odom_twist_cov[35] = 0.001; // yaw-yaw [rad^2/s^2] + + // Add the odometry uncertainty to the object uncertainty + uncertainty::addOdometryUncertainty(odometry, transformed_objects); + } + // Normalize the object uncertainty + uncertainty::normalizeUncertainty(transformed_objects); + /* prediction */ processor_->predict(measurement_time); diff --git a/perception/autoware_multi_object_tracker/src/multi_object_tracker_node.hpp b/perception/autoware_multi_object_tracker/src/multi_object_tracker_node.hpp index 04d83ebd47acb..b9886ee3fc847 100644 --- a/perception/autoware_multi_object_tracker/src/multi_object_tracker_node.hpp +++ b/perception/autoware_multi_object_tracker/src/multi_object_tracker_node.hpp @@ -87,6 +87,7 @@ class MultiObjectTracker : public rclcpp::Node std::string world_frame_id_; // tracking frame std::unique_ptr association_; std::unique_ptr processor_; + bool enable_odometry_uncertainty_; // input manager std::unique_ptr input_manager_; diff --git a/perception/autoware_multi_object_tracker/src/processor/input_manager.cpp b/perception/autoware_multi_object_tracker/src/processor/input_manager.cpp index decd2139c5b81..c9ee8dae9562a 100644 --- a/perception/autoware_multi_object_tracker/src/processor/input_manager.cpp +++ b/perception/autoware_multi_object_tracker/src/processor/input_manager.cpp @@ -14,6 +14,8 @@ #include "input_manager.hpp" +#include + #include namespace autoware::multi_object_tracker @@ -49,8 +51,13 @@ void InputStream::init(const InputChannel & input_channel) void InputStream::onMessage( const autoware_perception_msgs::msg::DetectedObjects::ConstSharedPtr msg) { - const DetectedObjects objects = *msg; - objects_que_.push_back(objects); + const DetectedObjects & objects = *msg; + + // Model the object uncertainty only if it is not available + DetectedObjects objects_with_uncertainty = uncertainty::modelUncertainty(objects); + + // Move the objects_with_uncertainty to the objects queue + objects_que_.push_back(std::move(objects_with_uncertainty)); while (objects_que_.size() > que_size_) { objects_que_.pop_front(); }