diff --git a/perception/multi_object_tracker/CMakeLists.txt b/perception/multi_object_tracker/CMakeLists.txt new file mode 100644 index 0000000000000..c944af87e4bc5 --- /dev/null +++ b/perception/multi_object_tracker/CMakeLists.txt @@ -0,0 +1,70 @@ +cmake_minimum_required(VERSION 3.5) +project(multi_object_tracker) + +### Compile options +if(NOT CMAKE_CXX_STANDARD) + set(CMAKE_CXX_STANDARD 14) + set(CMAKE_CXX_STANDARD_REQUIRED ON) + set(CMAKE_CXX_EXTENSIONS OFF) +endif() +if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") + add_compile_options(-Wall -Wextra -Wpedantic -Werror) +endif() + +# Ignore -Wnonportable-include-path in Clang for mussp +if(CMAKE_CXX_COMPILER_ID MATCHES "Clang") + add_compile_options(-Wno-nonportable-include-path) +endif() + +### Find Packages +find_package(ament_cmake_auto REQUIRED) + +### Find Eigen Dependencies +find_package(eigen3_cmake_module REQUIRED) +find_package(Eigen3 REQUIRED) + +### Find dependencies listed in the package.xml +ament_auto_find_build_dependencies() + +# Generate exe file +set(MULTI_OBJECT_TRACKER_SRC + src/multi_object_tracker_core.cpp + src/utils/utils.cpp + src/tracker/model/tracker_base.cpp + src/tracker/model/big_vehicle_tracker.cpp + src/tracker/model/normal_vehicle_tracker.cpp + src/tracker/model/multiple_vehicle_tracker.cpp + src/tracker/model/bicycle_tracker.cpp + src/tracker/model/pedestrian_tracker.cpp + src/tracker/model/pedestrian_and_bicycle_tracker.cpp + src/tracker/model/unknown_tracker.cpp + src/data_association/data_association.cpp +) + +ament_auto_add_library(mu_successive_shortest_path SHARED + src/data_association/mu_successive_shortest_path/mu_successive_shortest_path_wrapper.cpp +) + +ament_auto_add_library(multi_object_tracker_node SHARED + ${MULTI_OBJECT_TRACKER_SRC} +) + +target_link_libraries(multi_object_tracker_node + mu_successive_shortest_path + Eigen3::Eigen +) + +rclcpp_components_register_node(multi_object_tracker_node + PLUGIN "MultiObjectTracker" + EXECUTABLE multi_object_tracker +) + +if(BUILD_TESTING) + find_package(ament_lint_auto REQUIRED) + ament_lint_auto_find_test_dependencies() +endif() + +ament_auto_package(INSTALL_TO_SHARE + launch + config +) diff --git a/perception/multi_object_tracker/README.md b/perception/multi_object_tracker/README.md new file mode 100644 index 0000000000000..e6111384f1476 --- /dev/null +++ b/perception/multi_object_tracker/README.md @@ -0,0 +1,131 @@ +# multi_object_tracker + +## Purpose + +The results of the detection are processed by a time series. The main purpose is to give ID and estimate velocity. + +## Inner-workings / Algorithms + +This multi object tracker consists of data association and EKF. + +![multi_object_tracker_overview](image/multi_object_tracker_overview.svg) + +### Data association + +The data association performs maximum score matching, called min cost max flow problem. +In this package, mussp[1] is used as solver. +In addition, when associating observations to tracers, data association have gates such as the area of the object from the BEV, Mahalanobis distance, and maximum distance, depending on the class label. + +### EKF Tracker + +Models for pedestrians, bicycles (motorcycles), cars and unknown are available. +The pedestrian or bicycle tracker is running at the same time as the respective EKF model in order to enable the transition between pedestrian and bicycle tracking. +For big vehicles such as trucks and buses, we have separate models for passenger cars and large vehicles because they are difficult to distinguish from passenger cars and are not stable. Therefore, separate models are prepared for passenger cars and big vehicles, and these models are run at the same time as the respective EKF models to ensure stability. + + + +## Inputs / Outputs + +### Input + +| Name | Type | Description | +| --------- | ----------------------------------------------------- | ----------- | +| `~/input` | `autoware_auto_perception_msgs::msg::DetectedObjects` | obstacles | + +### Output + +| Name | Type | Description | +| ---------- | ---------------------------------------------------- | ------------------ | +| `~/output` | `autoware_auto_perception_msgs::msg::TrackedObjects` | modified obstacles | + +## Parameters + + + +### Core Parameters + +| Name | Type | Description | +| --------------------------- | ------ | -------------------------------------------------------------- | +| `can_assign_matrix` | double | Assignment table for data association | +| `max_dist_matrix` | double | Maximum distance table for data association | +| `max_area_matrix` | double | Maximum area table for data association | +| `min_area_matrix` | double | Minimum area table for data association | +| `max_rad_matrix` | double | Maximum angle table for data association | +| `world_frame_id` | double | tracking frame | +| `enable_delay_compensation` | bool | Estimate obstacles at current time considering detection delay | +| `publish_rate` | double | if enable_delay_compensation is true, how many hertz to output | + +## Assumptions / Known limits + + + +## (Optional) Error detection and handling + + + +## (Optional) Performance characterization + +### Evaluation of muSSP + +According to our evaluation, muSSP is faster than normal [SSP](src/data_association/successive_shortest_path) when the matrix size is more than 100. + +Execution time for varying matrix size at 95% sparsity. In real data, the sparsity was often around 95%. +![mussp_evaluation1](image/mussp_evaluation1.png) + +Execution time for varying the sparsity with matrix size 100. +![mussp_evaluation2](image/mussp_evaluation2.png) + +## (Optional) References/External links + +This package makes use of external code. + +| Name | License | Original Repository | +| -------------------------------------------------------------- | ------------------------------------------------------------------- | ------------------------------------ | +| [muSSP](src/data_association/mu_successive_shortest_path/impl) | [Apache 2.0](src/data_association/mu_successive_shortest_path/impl) | | + +[1] C. Wang, Y. Wang, Y. Wang, C.-t. Wu, and G. Yu, “muSSP: Efficient +Min-cost Flow Algorithm for Multi-object Tracking,” NeurIPS, 2019 + +## (Optional) Future extensions / Unimplemented parts + + diff --git a/perception/multi_object_tracker/config/data_association_matrix.param.yaml b/perception/multi_object_tracker/config/data_association_matrix.param.yaml new file mode 100644 index 0000000000000..393bb949d048d --- /dev/null +++ b/perception/multi_object_tracker/config/data_association_matrix.param.yaml @@ -0,0 +1,52 @@ +/**: + ros__parameters: + can_assign_matrix: + #UNKNOWN, CAR, TRUCK, BUS, BICYCLE, MOTORBIKE, PEDESTRIAN, ANIMAL + [0, 0, 0, 0, 0, 0, 0, 0, #UNKNOWN + 1, 1, 1, 1, 0, 0, 0, 0, #CAR + 1, 1, 1, 1, 0, 0, 0, 0, #TRUCK + 1, 1, 1, 1, 0, 0, 0, 0, #BUS + 0, 0, 0, 0, 1, 1, 1, 0, #BICYCLE + 0, 0, 0, 0, 1, 1, 1, 0, #MOTORBIKE + 0, 0, 0, 0, 1, 1, 1, 0, #PEDESTRIAN + 0, 0, 0, 0, 0, 0, 0, 1] #ANIMAL + max_dist_matrix: + #UNKNOWN, CAR, TRUCK, BUS, BICYCLE, MOTORBIKE, PEDESTRIAN, ANIMAL + [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, #UNKNOWN + 4.0, 2.0, 5.0, 5.0, 1.0, 1.0, 1.0, 1.0, #CAR + 4.0, 2.0, 5.0, 5.0, 1.0, 1.0, 1.0, 1.0, #TRUCK + 4.0, 2.0, 5.0, 5.0, 1.0, 1.0, 1.0, 1.0, #BUS + 3.0, 1.0, 1.0, 1.0, 3.0, 3.0, 2.0, 1.0, #BICYCLE + 3.0, 1.0, 1.0, 1.0, 3.0, 3.0, 2.0, 1.0, #MOTORBIKE + 2.0, 1.0, 1.0, 1.0, 3.0, 3.0, 2.0, 1.0, #PEDESTRIAN + 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0] #ANIMAL + max_area_matrix: + #UNKNOWN, CAR, TRUCK, BUS, BICYCLE, MOTORBIKE, PEDESTRIAN, ANIMAL + [10000.00, 10000.00, 10000.00, 10000.00, 10000.00, 10000.00, 10000.00, 10000.00, #UNKNOWN + 12.10, 12.10, 19.75, 32.40, 10000.00, 10000.00, 10000.00, 10000.00, #CAR + 19.75, 12.10, 19.75, 32.40, 10000.00, 10000.00, 10000.00, 10000.00, #TRUCK + 32.40, 12.10, 19.75, 32.40, 10000.00, 10000.00, 10000.00, 10000.00, #BUS + 2.50, 10000.00, 10000.00, 10000.00, 2.50, 2.50, 1.00, 10000.00, #BICYCLE + 3.00, 10000.00, 10000.00, 10000.00, 2.50, 2.50, 1.00, 10000.00, #MOTORBIKE + 2.00, 10000.00, 10000.00, 10000.00, 1.50, 1.50, 1.00, 10000.00, #PEDESTRIAN + 2.00, 10000.00, 10000.00, 10000.00, 10000.00, 10000.00, 10000.00, 10000.00] #ANIMAL + min_area_matrix: + #UNKNOWN, CAR, TRUCK, BUS, BICYCLE, MOTORBIKE, PEDESTRIAN, ANIMAL + [ 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, #UNKNOWN + 3.600, 3.600, 6.000, 10.000, 0.000, 0.000, 0.000, 0.000, #CAR + 6.000, 3.600, 6.000, 10.000, 0.000, 0.000, 0.000, 0.000, #TRUCK + 10.000, 3.600, 6.000, 10.000, 0.000, 0.000, 0.000, 0.000, #BUS + 0.001, 0.000, 0.000, 0.000, 0.100, 0.100, 0.100, 0.000, #BICYCLE + 0.001, 0.000, 0.000, 0.000, 0.100, 0.100, 0.100, 0.000, #MOTORBIKE + 0.001, 0.000, 0.000, 0.000, 0.100, 0.100, 0.100, 0.000, #PEDESTRIAN + 0.500, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000] #ANIMAL + max_rad_matrix: # If value is greater than pi, it will be ignored. + #UNKNOWN, CAR, TRUCK, BUS, BICYCLE, MOTORBIKE, PEDESTRIAN, ANIMAL + [3.150, 3.150, 3.150, 3.150, 3.150, 3.150, 3.150, 3.150, #UNKNOWN + 3.150, 1.047, 1.047, 1.047, 3.150, 3.150, 3.150, 3.150, #CAR + 3.150, 1.047, 1.047, 1.047, 3.150, 3.150, 3.150, 3.150, #TRUCK + 3.150, 1.047, 1.047, 1.047, 3.150, 3.150, 3.150, 3.150, #BUS + 3.150, 3.150, 3.150, 3.150, 3.150, 3.150, 3.150, 3.150, #BICYCLE + 3.150, 3.150, 3.150, 3.150, 3.150, 3.150, 3.150, 3.150, #MOTORBIKE + 3.150, 3.150, 3.150, 3.150, 3.150, 3.150, 3.150, 3.150, #PEDESTRIAN + 3.150, 3.150, 3.150, 3.150, 3.150, 3.150, 3.150, 3.150] #ANIMAL diff --git a/perception/multi_object_tracker/image/multi_object_tracker_overview.svg b/perception/multi_object_tracker/image/multi_object_tracker_overview.svg new file mode 100644 index 0000000000000..23fc0455db016 --- /dev/null +++ b/perception/multi_object_tracker/image/multi_object_tracker_overview.svg @@ -0,0 +1,3 @@ + + +
Trackers
Tracke...
Observations
Observ...
Match
Match

Data association

Optimization of combination between trackers and observations

Data association...

Update

Tracker

Update...

Update

Tracker

prediction

Update...

Generate

Tracker

Generate...

Check tracker life cycle
Check tracker life cycle

Delete the tracker

Delete the...

Nothing to do

Nothing to...

Send the result

Send the re...
If the tracker
doesn’t match with
any observation in
a certain period
If the tracker...
If the count of
the match for the
tracker is less than
a certain number
If the count of...
Else
Else

Tracker

not matched

Tracker...

Observation

not matched

Observation...
Viewer does not support full SVG 1.1
\ No newline at end of file diff --git a/perception/multi_object_tracker/image/mussp_evaluation1.png b/perception/multi_object_tracker/image/mussp_evaluation1.png new file mode 100644 index 0000000000000..b17b743051808 Binary files /dev/null and b/perception/multi_object_tracker/image/mussp_evaluation1.png differ diff --git a/perception/multi_object_tracker/image/mussp_evaluation2.png b/perception/multi_object_tracker/image/mussp_evaluation2.png new file mode 100644 index 0000000000000..91ed6d648d449 Binary files /dev/null and b/perception/multi_object_tracker/image/mussp_evaluation2.png differ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/data_association/data_association.hpp b/perception/multi_object_tracker/include/multi_object_tracker/data_association/data_association.hpp new file mode 100644 index 0000000000000..23feca474fe4f --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/data_association/data_association.hpp @@ -0,0 +1,62 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#ifndef MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__DATA_ASSOCIATION_HPP_ +#define MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__DATA_ASSOCIATION_HPP_ + +#include +#include +#include +#include + +#define EIGEN_MPL2_ONLY +#include "multi_object_tracker/data_association/solver/gnn_solver.hpp" +#include "multi_object_tracker/tracker/tracker.hpp" + +#include +#include + +#include + +class DataAssociation +{ +private: + Eigen::MatrixXi can_assign_matrix_; + Eigen::MatrixXd max_dist_matrix_; + Eigen::MatrixXd max_area_matrix_; + Eigen::MatrixXd min_area_matrix_; + Eigen::MatrixXd max_rad_matrix_; + const double score_threshold_; + std::unique_ptr gnn_solver_ptr_; + +public: + EIGEN_MAKE_ALIGNED_OPERATOR_NEW + DataAssociation( + std::vector can_assign_vector, std::vector max_dist_vector, + std::vector max_area_vector, std::vector min_area_vector, + std::vector max_rad_vector); + void assign( + const Eigen::MatrixXd & src, std::unordered_map & direct_assignment, + std::unordered_map & reverse_assignment); + Eigen::MatrixXd calcScoreMatrix( + const autoware_auto_perception_msgs::msg::DetectedObjects & measurements, + const std::list> & trackers); + virtual ~DataAssociation() {} +}; + +#endif // MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__DATA_ASSOCIATION_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/data_association/solver/gnn_solver.hpp b/perception/multi_object_tracker/include/multi_object_tracker/data_association/solver/gnn_solver.hpp new file mode 100644 index 0000000000000..631fd73f8583d --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/data_association/solver/gnn_solver.hpp @@ -0,0 +1,22 @@ +// Copyright 2021 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__SOLVER__GNN_SOLVER_HPP_ +#define MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__SOLVER__GNN_SOLVER_HPP_ + +#include "multi_object_tracker/data_association/solver/gnn_solver_interface.hpp" +#include "multi_object_tracker/data_association/solver/mu_successive_shortest_path.hpp" +#include "multi_object_tracker/data_association/solver/successive_shortest_path.hpp" + +#endif // MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__SOLVER__GNN_SOLVER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/data_association/solver/gnn_solver_interface.hpp b/perception/multi_object_tracker/include/multi_object_tracker/data_association/solver/gnn_solver_interface.hpp new file mode 100644 index 0000000000000..1fb508bcf12b2 --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/data_association/solver/gnn_solver_interface.hpp @@ -0,0 +1,35 @@ +// Copyright 2021 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__SOLVER__GNN_SOLVER_INTERFACE_HPP_ +#define MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__SOLVER__GNN_SOLVER_INTERFACE_HPP_ + +#include +#include + +namespace gnn_solver +{ +class GnnSolverInterface +{ +public: + GnnSolverInterface() = default; + virtual ~GnnSolverInterface() = default; + + virtual void maximizeLinearAssignment( + const std::vector> & cost, std::unordered_map * direct_assignment, + std::unordered_map * reverse_assignment) = 0; +}; +} // namespace gnn_solver + +#endif // MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__SOLVER__GNN_SOLVER_INTERFACE_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/data_association/solver/mu_successive_shortest_path.hpp b/perception/multi_object_tracker/include/multi_object_tracker/data_association/solver/mu_successive_shortest_path.hpp new file mode 100644 index 0000000000000..a4d9d727e6cc5 --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/data_association/solver/mu_successive_shortest_path.hpp @@ -0,0 +1,37 @@ +// Copyright 2021 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__SOLVER__MU_SUCCESSIVE_SHORTEST_PATH_HPP_ +#define MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__SOLVER__MU_SUCCESSIVE_SHORTEST_PATH_HPP_ + +#include "multi_object_tracker/data_association/solver/gnn_solver_interface.hpp" + +#include +#include + +namespace gnn_solver +{ +class MuSSP : public GnnSolverInterface +{ +public: + MuSSP() = default; + ~MuSSP() = default; + + void maximizeLinearAssignment( + const std::vector> & cost, std::unordered_map * direct_assignment, + std::unordered_map * reverse_assignment) override; +}; +} // namespace gnn_solver + +#endif // MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__SOLVER__MU_SUCCESSIVE_SHORTEST_PATH_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/data_association/solver/successive_shortest_path.hpp b/perception/multi_object_tracker/include/multi_object_tracker/data_association/solver/successive_shortest_path.hpp new file mode 100644 index 0000000000000..1c4b15cd68b59 --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/data_association/solver/successive_shortest_path.hpp @@ -0,0 +1,37 @@ +// Copyright 2021 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__SOLVER__SUCCESSIVE_SHORTEST_PATH_HPP_ +#define MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__SOLVER__SUCCESSIVE_SHORTEST_PATH_HPP_ + +#include "multi_object_tracker/data_association/solver/gnn_solver_interface.hpp" + +#include +#include + +namespace gnn_solver +{ +class SSP : public GnnSolverInterface +{ +public: + SSP() = default; + ~SSP() = default; + + void maximizeLinearAssignment( + const std::vector> & cost, std::unordered_map * direct_assignment, + std::unordered_map * reverse_assignment) override; +}; +} // namespace gnn_solver + +#endif // MULTI_OBJECT_TRACKER__DATA_ASSOCIATION__SOLVER__SUCCESSIVE_SHORTEST_PATH_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/multi_object_tracker_core.hpp b/perception/multi_object_tracker/include/multi_object_tracker/multi_object_tracker_core.hpp new file mode 100644 index 0000000000000..d79180e4b2dd5 --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/multi_object_tracker_core.hpp @@ -0,0 +1,79 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +/// + +#ifndef MULTI_OBJECT_TRACKER__MULTI_OBJECT_TRACKER_CORE_HPP_ +#define MULTI_OBJECT_TRACKER__MULTI_OBJECT_TRACKER_CORE_HPP_ + +#include "multi_object_tracker/data_association/data_association.hpp" +#include "multi_object_tracker/tracker/model/tracker_base.hpp" + +#include + +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +class MultiObjectTracker : public rclcpp::Node +{ +public: + explicit MultiObjectTracker(const rclcpp::NodeOptions & node_options); + +private: + rclcpp::Publisher::SharedPtr + tracked_objects_pub_; + rclcpp::Subscription::SharedPtr + detected_object_sub_; + rclcpp::TimerBase::SharedPtr publish_timer_; // publish timer + + tf2_ros::Buffer tf_buffer_; + tf2_ros::TransformListener tf_listener_; + + void onMeasurement( + const autoware_auto_perception_msgs::msg::DetectedObjects::ConstSharedPtr input_objects_msg); + void onTimer(); + + std::string world_frame_id_; // tracking frame + std::list> list_tracker_; + std::unique_ptr data_association_; + + void checkTrackerLifeCycle( + std::list> & list_tracker, const rclcpp::Time & time, + const geometry_msgs::msg::Transform & self_transform); + void sanitizeTracker( + std::list> & list_tracker, const rclcpp::Time & time); + std::shared_ptr createNewTracker( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const rclcpp::Time & time) const; + + void publish(const rclcpp::Time & time) const; + inline bool shouldTrackerPublish(const std::shared_ptr tracker) const; +}; + +#endif // MULTI_OBJECT_TRACKER__MULTI_OBJECT_TRACKER_CORE_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/bicycle_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/bicycle_tracker.hpp new file mode 100644 index 0000000000000..30e23c096ce65 --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/bicycle_tracker.hpp @@ -0,0 +1,92 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__BICYCLE_TRACKER_HPP_ +#define MULTI_OBJECT_TRACKER__TRACKER__MODEL__BICYCLE_TRACKER_HPP_ + +#include "multi_object_tracker/tracker/model/tracker_base.hpp" + +#include + +class BicycleTracker : public Tracker +{ +private: + autoware_auto_perception_msgs::msg::DetectedObject object_; + rclcpp::Logger logger_; + +private: + KalmanFilter ekf_; + rclcpp::Time last_update_time_; + enum IDX { + X = 0, + Y = 1, + YAW = 2, + VX = 3, + WZ = 4, + }; + struct EkfParams + { + char dim_x = 5; + float q_cov_x; + float q_cov_y; + float q_cov_yaw; + float q_cov_wz; + float q_cov_vx; + float p0_cov_vx; + float p0_cov_wz; + // if use_measurement_covariance_ is false, use following params + bool use_measurement_covariance; + float r_cov_x; + float r_cov_y; + float r_cov_yaw; + float p0_cov_x; + float p0_cov_y; + float p0_cov_yaw; + } ekf_params_; + + double max_vx_; + double max_wz_; + float z_; + +private: + struct BoundingBox + { + double width; + double length; + double height; + }; + BoundingBox bounding_box_; + +public: + BicycleTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object); + + bool predict(const rclcpp::Time & time) override; + bool predict(const double dt, KalmanFilter & ekf) const; + bool measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const rclcpp::Time & time) override; + bool measureWithPose(const autoware_auto_perception_msgs::msg::DetectedObject & object); + bool measureWithShape(const autoware_auto_perception_msgs::msg::DetectedObject & object); + bool getTrackedObject( + const rclcpp::Time & time, + autoware_auto_perception_msgs::msg::TrackedObject & object) const override; + virtual ~BicycleTracker() {} +}; + +#endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__BICYCLE_TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/big_vehicle_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/big_vehicle_tracker.hpp new file mode 100644 index 0000000000000..991d9940a0f1e --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/big_vehicle_tracker.hpp @@ -0,0 +1,91 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__BIG_VEHICLE_TRACKER_HPP_ +#define MULTI_OBJECT_TRACKER__TRACKER__MODEL__BIG_VEHICLE_TRACKER_HPP_ + +#include "multi_object_tracker/tracker/model/tracker_base.hpp" + +#include + +class BigVehicleTracker : public Tracker +{ +private: + autoware_auto_perception_msgs::msg::DetectedObject object_; + rclcpp::Logger logger_; + +private: + KalmanFilter ekf_; + rclcpp::Time last_update_time_; + enum IDX { + X = 0, + Y = 1, + YAW = 2, + VX = 3, + WZ = 4, + }; + struct EkfParams + { + char dim_x = 5; + float q_cov_x; + float q_cov_y; + float q_cov_yaw; + float q_cov_wz; + float q_cov_vx; + float p0_cov_vx; + float p0_cov_wz; + // if use_measurement_covariance_ is false, use following params + bool use_measurement_covariance; + float r_cov_x; + float r_cov_y; + float r_cov_yaw; + float p0_cov_x; + float p0_cov_y; + float p0_cov_yaw; + } ekf_params_; + double max_vx_; + double max_wz_; + float z_; + +private: + struct BoundingBox + { + double width; + double length; + double height; + }; + BoundingBox bounding_box_; + +public: + BigVehicleTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object); + + bool predict(const rclcpp::Time & time) override; + bool predict(const double dt, KalmanFilter & ekf) const; + bool measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const rclcpp::Time & time) override; + bool measureWithPose(const autoware_auto_perception_msgs::msg::DetectedObject & object); + bool measureWithShape(const autoware_auto_perception_msgs::msg::DetectedObject & object); + bool getTrackedObject( + const rclcpp::Time & time, + autoware_auto_perception_msgs::msg::TrackedObject & object) const override; + virtual ~BigVehicleTracker() {} +}; + +#endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__BIG_VEHICLE_TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/multiple_vehicle_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/multiple_vehicle_tracker.hpp new file mode 100644 index 0000000000000..144b1b6c9b8b4 --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/multiple_vehicle_tracker.hpp @@ -0,0 +1,49 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__MULTIPLE_VEHICLE_TRACKER_HPP_ +#define MULTI_OBJECT_TRACKER__TRACKER__MODEL__MULTIPLE_VEHICLE_TRACKER_HPP_ + +#include "multi_object_tracker/tracker/model/big_vehicle_tracker.hpp" +#include "multi_object_tracker/tracker/model/normal_vehicle_tracker.hpp" +#include "multi_object_tracker/tracker/model/tracker_base.hpp" + +#include +#include + +class MultipleVehicleTracker : public Tracker +{ +private: + NormalVehicleTracker normal_vehicle_tracker_; + BigVehicleTracker big_vehicle_tracker_; + +public: + MultipleVehicleTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object); + + bool predict(const rclcpp::Time & time) override; + bool measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const rclcpp::Time & time) override; + bool getTrackedObject( + const rclcpp::Time & time, + autoware_auto_perception_msgs::msg::TrackedObject & object) const override; + virtual ~MultipleVehicleTracker() {} +}; + +#endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__MULTIPLE_VEHICLE_TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/normal_vehicle_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/normal_vehicle_tracker.hpp new file mode 100644 index 0000000000000..bdaef7309c916 --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/normal_vehicle_tracker.hpp @@ -0,0 +1,92 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__NORMAL_VEHICLE_TRACKER_HPP_ +#define MULTI_OBJECT_TRACKER__TRACKER__MODEL__NORMAL_VEHICLE_TRACKER_HPP_ + +#include "tracker_base.hpp" + +#include + +class NormalVehicleTracker : public Tracker +{ +private: + autoware_auto_perception_msgs::msg::DetectedObject object_; + rclcpp::Logger logger_; + +private: + KalmanFilter ekf_; + rclcpp::Time last_update_time_; + enum IDX { + X = 0, + Y = 1, + YAW = 2, + VX = 3, + WZ = 4, + }; + struct EkfParams + { + char dim_x = 5; + float q_cov_x; + float q_cov_y; + float q_cov_yaw; + float q_cov_wz; + float q_cov_vx; + float p0_cov_vx; + float p0_cov_wz; + // if use_measurement_covariance_ is false, use following params + bool use_measurement_covariance; + float r_cov_x; + float r_cov_y; + float r_cov_yaw; + float p0_cov_x; + float p0_cov_y; + float p0_cov_yaw; + } ekf_params_; + + double max_vx_; + double max_wz_; + float z_; + +private: + struct BoundingBox + { + double width; + double length; + double height; + }; + BoundingBox bounding_box_; + +public: + NormalVehicleTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object); + + bool predict(const rclcpp::Time & time) override; + bool predict(const double dt, KalmanFilter & ekf) const; + bool measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const rclcpp::Time & time) override; + bool measureWithPose(const autoware_auto_perception_msgs::msg::DetectedObject & object); + bool measureWithShape(const autoware_auto_perception_msgs::msg::DetectedObject & object); + bool getTrackedObject( + const rclcpp::Time & time, + autoware_auto_perception_msgs::msg::TrackedObject & object) const override; + virtual ~NormalVehicleTracker() {} +}; + +#endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__NORMAL_VEHICLE_TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_and_bicycle_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_and_bicycle_tracker.hpp new file mode 100644 index 0000000000000..ca1e5b519d708 --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_and_bicycle_tracker.hpp @@ -0,0 +1,48 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__PEDESTRIAN_AND_BICYCLE_TRACKER_HPP_ +#define MULTI_OBJECT_TRACKER__TRACKER__MODEL__PEDESTRIAN_AND_BICYCLE_TRACKER_HPP_ + +#include "multi_object_tracker/tracker/model/bicycle_tracker.hpp" +#include "multi_object_tracker/tracker/model/pedestrian_tracker.hpp" +#include "multi_object_tracker/tracker/model/tracker_base.hpp" + +#include + +class PedestrianAndBicycleTracker : public Tracker +{ +private: + PedestrianTracker pedestrian_tracker_; + BicycleTracker bicycle_tracker_; + +public: + PedestrianAndBicycleTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object); + + bool predict(const rclcpp::Time & time) override; + bool measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const rclcpp::Time & time) override; + bool getTrackedObject( + const rclcpp::Time & time, + autoware_auto_perception_msgs::msg::TrackedObject & object) const override; + virtual ~PedestrianAndBicycleTracker() {} +}; + +#endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__PEDESTRIAN_AND_BICYCLE_TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_tracker.hpp new file mode 100644 index 0000000000000..a8f1923bb68cb --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/pedestrian_tracker.hpp @@ -0,0 +1,98 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__PEDESTRIAN_TRACKER_HPP_ +#define MULTI_OBJECT_TRACKER__TRACKER__MODEL__PEDESTRIAN_TRACKER_HPP_ + +#include "multi_object_tracker/tracker/model/tracker_base.hpp" + +#include + +class PedestrianTracker : public Tracker +{ +private: + autoware_auto_perception_msgs::msg::DetectedObject object_; + rclcpp::Logger logger_; + +private: + KalmanFilter ekf_; + rclcpp::Time last_update_time_; + enum IDX { + X = 0, + Y = 1, + YAW = 2, + VX = 3, + WZ = 4, + }; + struct EkfParams + { + char dim_x = 5; + float q_cov_x; + float q_cov_y; + float q_cov_yaw; + float q_cov_wz; + float q_cov_vx; + float p0_cov_vx; + float p0_cov_wz; + // if use_measurement_covariance_ is false, use following params + bool use_measurement_covariance; + float r_cov_x; + float r_cov_y; + float r_cov_yaw; + float p0_cov_x; + float p0_cov_y; + float p0_cov_yaw; + } ekf_params_; + + double max_vx_; + double max_wz_; + float z_; + +private: + struct BoundingBox + { + double width; + double length; + double height; + }; + struct Cylinder + { + double width; + double height; + }; + BoundingBox bounding_box_; + Cylinder cylinder_; + +public: + PedestrianTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object); + + bool predict(const rclcpp::Time & time) override; + bool predict(const double dt, KalmanFilter & ekf) const; + bool measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const rclcpp::Time & time) override; + bool measureWithPose(const autoware_auto_perception_msgs::msg::DetectedObject & object); + bool measureWithShape(const autoware_auto_perception_msgs::msg::DetectedObject & object); + bool getTrackedObject( + const rclcpp::Time & time, + autoware_auto_perception_msgs::msg::TrackedObject & object) const override; + virtual ~PedestrianTracker() {} +}; + +#endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__PEDESTRIAN_TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/tracker_base.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/tracker_base.hpp new file mode 100644 index 0000000000000..ed18f5b5ff777 --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/tracker_base.hpp @@ -0,0 +1,93 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__TRACKER_BASE_HPP_ +#define MULTI_OBJECT_TRACKER__TRACKER__MODEL__TRACKER_BASE_HPP_ + +#define EIGEN_MPL2_ONLY +#include "multi_object_tracker/utils/utils.hpp" + +#include +#include + +#include "autoware_auto_perception_msgs/msg/detected_object.hpp" +#include "autoware_auto_perception_msgs/msg/tracked_object.hpp" +#include "geometry_msgs/msg/point.hpp" +#include "unique_identifier_msgs/msg/uuid.hpp" + +#include + +class Tracker +{ +protected: + unique_identifier_msgs::msg::UUID getUUID() const { return uuid_; } + void setClassification( + const std::vector & classification) + { + classification_ = classification; + } + +private: + unique_identifier_msgs::msg::UUID uuid_; + std::vector classification_; + int no_measurement_count_; + int total_no_measurement_count_; + int total_measurement_count_; + rclcpp::Time last_update_with_measurement_time_; + +public: + Tracker( + const rclcpp::Time & time, + const std::vector & classification); + virtual ~Tracker() {} + bool updateWithMeasurement( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const rclcpp::Time & measurement_time); + bool updateWithoutMeasurement(); + std::vector getClassification() const + { + return classification_; + } + std::uint8_t getHighestProbLabel() const { return utils::getHighestProbLabel(classification_); } + int getNoMeasurementCount() const { return no_measurement_count_; } + int getTotalNoMeasurementCount() const { return total_no_measurement_count_; } + int getTotalMeasurementCount() const { return total_measurement_count_; } + double getElapsedTimeFromLastUpdate(const rclcpp::Time & current_time) const + { + return (current_time - last_update_with_measurement_time_).seconds(); + } + virtual geometry_msgs::msg::PoseWithCovariance getPoseWithCovariance( + const rclcpp::Time & time) const; + + /* + * Pure virtual function + */ + +protected: + virtual bool measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const rclcpp::Time & time) = 0; + +public: + virtual bool getTrackedObject( + const rclcpp::Time & time, + autoware_auto_perception_msgs::msg::TrackedObject & object) const = 0; + virtual bool predict(const rclcpp::Time & time) = 0; +}; + +#endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__TRACKER_BASE_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/unknown_tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/unknown_tracker.hpp new file mode 100644 index 0000000000000..6007e20e8de6b --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/model/unknown_tracker.hpp @@ -0,0 +1,77 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// v1.0 Yukihiro Saito +// + +#ifndef MULTI_OBJECT_TRACKER__TRACKER__MODEL__UNKNOWN_TRACKER_HPP_ +#define MULTI_OBJECT_TRACKER__TRACKER__MODEL__UNKNOWN_TRACKER_HPP_ + +#include "tracker_base.hpp" + +#include + +class UnknownTracker : public Tracker +{ +private: + autoware_auto_perception_msgs::msg::DetectedObject object_; + rclcpp::Logger logger_; + +private: + KalmanFilter ekf_; + rclcpp::Time last_update_time_; + enum IDX { + X = 0, + Y = 1, + VX = 2, + VY = 3, + }; + struct EkfParams + { + char dim_x = 4; + float q_cov_x; + float q_cov_y; + float q_cov_vx; + float q_cov_vy; + float p0_cov_vx; + float p0_cov_vy; + // if use_measurement_covariance_ is false, use following params + bool use_measurement_covariance; + float r_cov_x; + float r_cov_y; + float p0_cov_x; + float p0_cov_y; + } ekf_params_; + float max_vx_, max_vy_; + float z_; + +public: + UnknownTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object); + + bool predict(const rclcpp::Time & time) override; + bool predict(const double dt, KalmanFilter & ekf) const; + bool measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const rclcpp::Time & time) override; + bool measureWithPose(const autoware_auto_perception_msgs::msg::DetectedObject & object); + bool measureWithShape(const autoware_auto_perception_msgs::msg::DetectedObject & object); + bool getTrackedObject( + const rclcpp::Time & time, + autoware_auto_perception_msgs::msg::TrackedObject & object) const override; + virtual ~UnknownTracker() {} +}; + +#endif // MULTI_OBJECT_TRACKER__TRACKER__MODEL__UNKNOWN_TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/tracker/tracker.hpp b/perception/multi_object_tracker/include/multi_object_tracker/tracker/tracker.hpp new file mode 100644 index 0000000000000..75560750f07b6 --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/tracker/tracker.hpp @@ -0,0 +1,31 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#ifndef MULTI_OBJECT_TRACKER__TRACKER__TRACKER_HPP_ +#define MULTI_OBJECT_TRACKER__TRACKER__TRACKER_HPP_ + +#include "model/bicycle_tracker.hpp" +#include "model/big_vehicle_tracker.hpp" +#include "model/multiple_vehicle_tracker.hpp" +#include "model/normal_vehicle_tracker.hpp" +#include "model/pedestrian_and_bicycle_tracker.hpp" +#include "model/pedestrian_tracker.hpp" +#include "model/tracker_base.hpp" +#include "model/unknown_tracker.hpp" + +#endif // MULTI_OBJECT_TRACKER__TRACKER__TRACKER_HPP_ diff --git a/perception/multi_object_tracker/include/multi_object_tracker/utils/utils.hpp b/perception/multi_object_tracker/include/multi_object_tracker/utils/utils.hpp new file mode 100644 index 0000000000000..95f81275e3e97 --- /dev/null +++ b/perception/multi_object_tracker/include/multi_object_tracker/utils/utils.hpp @@ -0,0 +1,87 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#ifndef MULTI_OBJECT_TRACKER__UTILS__UTILS_HPP_ +#define MULTI_OBJECT_TRACKER__UTILS__UTILS_HPP_ + +#include + +#include +#include +#include +#include +#include + +#include +#include + +namespace utils +{ +double getPolygonArea(const geometry_msgs::msg::Polygon & footprint); +double getRectangleArea(const geometry_msgs::msg::Vector3 & dimensions); +double getCircleArea(const geometry_msgs::msg::Vector3 & dimensions); +double getArea(const autoware_auto_perception_msgs::msg::Shape & shape); +double get2dIoU( + const autoware_auto_perception_msgs::msg::TrackedObject & object1, + const autoware_auto_perception_msgs::msg::TrackedObject & object2); +std::uint8_t getHighestProbLabel( + const std::vector & classification); +autoware_auto_perception_msgs::msg::TrackedObject toTrackedObject( + const autoware_auto_perception_msgs::msg::DetectedObject & detected_object); + +enum MSG_COV_IDX { + X_X = 0, + X_Y = 1, + X_Z = 2, + X_ROLL = 3, + X_PITCH = 4, + X_YAW = 5, + Y_X = 6, + Y_Y = 7, + Y_Z = 8, + Y_ROLL = 9, + Y_PITCH = 10, + Y_YAW = 11, + Z_X = 12, + Z_Y = 13, + Z_Z = 14, + Z_ROLL = 15, + Z_PITCH = 16, + Z_YAW = 17, + ROLL_X = 18, + ROLL_Y = 19, + ROLL_Z = 20, + ROLL_ROLL = 21, + ROLL_PITCH = 22, + ROLL_YAW = 23, + PITCH_X = 24, + PITCH_Y = 25, + PITCH_Z = 26, + PITCH_ROLL = 27, + PITCH_PITCH = 28, + PITCH_YAW = 29, + YAW_X = 30, + YAW_Y = 31, + YAW_Z = 32, + YAW_ROLL = 33, + YAW_PITCH = 34, + YAW_YAW = 35 +}; +} // namespace utils + +#endif // MULTI_OBJECT_TRACKER__UTILS__UTILS_HPP_ diff --git a/perception/multi_object_tracker/launch/multi_object_tracker.launch.xml b/perception/multi_object_tracker/launch/multi_object_tracker.launch.xml new file mode 100644 index 0000000000000..1baeb42d9b1d5 --- /dev/null +++ b/perception/multi_object_tracker/launch/multi_object_tracker.launch.xml @@ -0,0 +1,17 @@ + + + + + + + + + + + + + + + + + diff --git a/perception/multi_object_tracker/package.xml b/perception/multi_object_tracker/package.xml new file mode 100644 index 0000000000000..0b1214f625594 --- /dev/null +++ b/perception/multi_object_tracker/package.xml @@ -0,0 +1,32 @@ + + + multi_object_tracker + 1.0.0 + The ROS2 multi_object_tracker package + Yukihiro Saito + + Jilada Eccleston + Apache License 2.0 + + ament_cmake_auto + eigen3_cmake_module + + autoware_auto_perception_msgs + autoware_perception_msgs + autoware_utils + eigen + kalman_filter + mussp + rclcpp + rclcpp_components + tf2 + tf2_ros + unique_identifier_msgs + + ament_lint_auto + autoware_lint_common + + + ament_cmake + + diff --git a/perception/multi_object_tracker/src/data_association/data_association.cpp b/perception/multi_object_tracker/src/data_association/data_association.cpp new file mode 100644 index 0000000000000..3f3c523c73afa --- /dev/null +++ b/perception/multi_object_tracker/src/data_association/data_association.cpp @@ -0,0 +1,212 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "multi_object_tracker/data_association/data_association.hpp" + +#include "multi_object_tracker/data_association/solver/gnn_solver.hpp" +#include "multi_object_tracker/utils/utils.hpp" + +#include +#include +#include +#include +#include + +namespace +{ +double getDistance( + const geometry_msgs::msg::Point & measurement, const geometry_msgs::msg::Point & tracker) +{ + const double diff_x = tracker.x - measurement.x; + const double diff_y = tracker.y - measurement.y; + // const double diff_z = tracker.z - measurement.z; + return std::sqrt(diff_x * diff_x + diff_y * diff_y); +} + +double getMahalanobisDistance( + const geometry_msgs::msg::Point & measurement, const geometry_msgs::msg::Point & tracker, + const Eigen::Matrix2d & covariance) +{ + Eigen::Vector2d measurement_point; + measurement_point << measurement.x, measurement.y; + Eigen::Vector2d tracker_point; + tracker_point << tracker.x, tracker.y; + Eigen::MatrixXd mahalanobis_squared = (measurement_point - tracker_point).transpose() * + covariance.inverse() * (measurement_point - tracker_point); + return std::sqrt(mahalanobis_squared(0)); +} + +Eigen::Matrix2d getXYCovariance(const geometry_msgs::msg::PoseWithCovariance & pose_covariance) +{ + Eigen::Matrix2d covariance; + covariance << pose_covariance.covariance[0], pose_covariance.covariance[1], + pose_covariance.covariance[6], pose_covariance.covariance[7]; + return covariance; +} + +double getFormedYawAngle( + const geometry_msgs::msg::Quaternion & measurement_quat, + const geometry_msgs::msg::Quaternion & tracker_quat, const bool distinguish_front_or_back = true) +{ + const double measurement_yaw = autoware_utils::normalizeRadian(tf2::getYaw(measurement_quat)); + const double tracker_yaw = autoware_utils::normalizeRadian(tf2::getYaw(tracker_quat)); + const double angle_range = distinguish_front_or_back ? M_PI : M_PI_2; + const double angle_step = distinguish_front_or_back ? 2.0 * M_PI : M_PI; + // Fixed measurement_yaw to be in the range of +-90 or 180 degrees of X_t(IDX::YAW) + double measurement_fixed_yaw = measurement_yaw; + while (angle_range <= tracker_yaw - measurement_fixed_yaw) { + measurement_fixed_yaw = measurement_fixed_yaw + angle_step; + } + while (angle_range <= measurement_fixed_yaw - tracker_yaw) { + measurement_fixed_yaw = measurement_fixed_yaw - angle_step; + } + return std::fabs(measurement_fixed_yaw - tracker_yaw); +} +} // namespace + +DataAssociation::DataAssociation( + std::vector can_assign_vector, std::vector max_dist_vector, + std::vector max_area_vector, std::vector min_area_vector, + std::vector max_rad_vector) +: score_threshold_(0.01) +{ + { + const int assign_label_num = static_cast(std::sqrt(can_assign_vector.size())); + Eigen::Map can_assign_matrix_tmp( + can_assign_vector.data(), assign_label_num, assign_label_num); + can_assign_matrix_ = can_assign_matrix_tmp.transpose(); + } + { + const int max_dist_label_num = static_cast(std::sqrt(max_dist_vector.size())); + Eigen::Map max_dist_matrix_tmp( + max_dist_vector.data(), max_dist_label_num, max_dist_label_num); + max_dist_matrix_ = max_dist_matrix_tmp.transpose(); + } + { + const int max_area_label_num = static_cast(std::sqrt(max_area_vector.size())); + Eigen::Map max_area_matrix_tmp( + max_area_vector.data(), max_area_label_num, max_area_label_num); + max_area_matrix_ = max_area_matrix_tmp.transpose(); + } + { + const int min_area_label_num = static_cast(std::sqrt(min_area_vector.size())); + Eigen::Map min_area_matrix_tmp( + min_area_vector.data(), min_area_label_num, min_area_label_num); + min_area_matrix_ = min_area_matrix_tmp.transpose(); + } + { + const int max_rad_label_num = static_cast(std::sqrt(max_rad_vector.size())); + Eigen::Map max_rad_matrix_tmp( + max_rad_vector.data(), max_rad_label_num, max_rad_label_num); + max_rad_matrix_ = max_rad_matrix_tmp.transpose(); + } + + gnn_solver_ptr_ = std::make_unique(); +} + +void DataAssociation::assign( + const Eigen::MatrixXd & src, std::unordered_map & direct_assignment, + std::unordered_map & reverse_assignment) +{ + std::vector> score(src.rows()); + for (int row = 0; row < src.rows(); ++row) { + score.at(row).resize(src.cols()); + for (int col = 0; col < src.cols(); ++col) { + score.at(row).at(col) = src(row, col); + } + } + // Solve + gnn_solver_ptr_->maximizeLinearAssignment(score, &direct_assignment, &reverse_assignment); + + for (auto itr = direct_assignment.begin(); itr != direct_assignment.end();) { + if (src(itr->first, itr->second) < score_threshold_) { + itr = direct_assignment.erase(itr); + continue; + } else { + ++itr; + } + } + for (auto itr = reverse_assignment.begin(); itr != reverse_assignment.end();) { + if (src(itr->second, itr->first) < score_threshold_) { + itr = reverse_assignment.erase(itr); + continue; + } else { + ++itr; + } + } +} + +Eigen::MatrixXd DataAssociation::calcScoreMatrix( + const autoware_auto_perception_msgs::msg::DetectedObjects & measurements, + const std::list> & trackers) +{ + Eigen::MatrixXd score_matrix = + Eigen::MatrixXd::Zero(trackers.size(), measurements.objects.size()); + size_t tracker_idx = 0; + for (auto tracker_itr = trackers.begin(); tracker_itr != trackers.end(); + ++tracker_itr, ++tracker_idx) { + const std::uint8_t tracker_label = (*tracker_itr)->getHighestProbLabel(); + + for (size_t measurement_idx = 0; measurement_idx < measurements.objects.size(); + ++measurement_idx) { + const autoware_auto_perception_msgs::msg::DetectedObject & measurement_object = + measurements.objects.at(measurement_idx); + const std::uint8_t measurement_label = + utils::getHighestProbLabel(measurement_object.classification); + + double score = 0.0; + const geometry_msgs::msg::PoseWithCovariance tracker_pose_covariance = + (*tracker_itr)->getPoseWithCovariance(measurements.header.stamp); + if (can_assign_matrix_(tracker_label, measurement_label)) { + const double max_dist = max_dist_matrix_(tracker_label, measurement_label); + const double max_area = max_area_matrix_(tracker_label, measurement_label); + const double min_area = min_area_matrix_(tracker_label, measurement_label); + const double max_rad = max_rad_matrix_(tracker_label, measurement_label); + const double dist = getDistance( + measurement_object.kinematics.pose_with_covariance.pose.position, + tracker_pose_covariance.pose.position); + const double area = utils::getArea(measurement_object.shape); + score = (max_dist - std::min(dist, max_dist)) / max_dist; + + // dist gate + if (max_dist < dist) { + score = 0.0; + // area gate + } else if (area < min_area || max_area < area) { + score = 0.0; + // angle gate + } else if (std::fabs(max_rad) < M_PI) { + const double angle = getFormedYawAngle( + measurement_object.kinematics.pose_with_covariance.pose.orientation, + tracker_pose_covariance.pose.orientation, false); + if (std::fabs(max_rad) < std::fabs(angle)) { + score = 0.0; + } + // mahalanobis dist gate + } else if (score < score_threshold_) { + double mahalanobis_dist = getMahalanobisDistance( + measurements.objects.at(measurement_idx).kinematics.pose_with_covariance.pose.position, + tracker_pose_covariance.pose.position, getXYCovariance(tracker_pose_covariance)); + + if (2.448 /*95%*/ <= mahalanobis_dist) { + score = 0.0; + } + } + } + score_matrix(tracker_idx, measurement_idx) = score; + } + } + + return score_matrix; +} diff --git a/perception/multi_object_tracker/src/data_association/mu_successive_shortest_path/mu_successive_shortest_path_wrapper.cpp b/perception/multi_object_tracker/src/data_association/mu_successive_shortest_path/mu_successive_shortest_path_wrapper.cpp new file mode 100644 index 0000000000000..18779a7fffbdf --- /dev/null +++ b/perception/multi_object_tracker/src/data_association/mu_successive_shortest_path/mu_successive_shortest_path_wrapper.cpp @@ -0,0 +1,41 @@ +// Copyright 2021 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "multi_object_tracker/data_association/solver/mu_successive_shortest_path.hpp" + +#include + +#include +#include +#include +#include +#include +#include +#include + +namespace gnn_solver +{ +void MuSSP::maximizeLinearAssignment( + const std::vector> & cost, std::unordered_map * direct_assignment, + std::unordered_map * reverse_assignment) +{ + // Terminate if the graph is empty + if (cost.size() == 0 || cost.at(0).size() == 0) { + return; + } + + // Solve DA by muSSP + solve_muSSP(cost, direct_assignment, reverse_assignment); +} +} // namespace gnn_solver diff --git a/perception/multi_object_tracker/src/data_association/successive_shortest_path/successive_shortest_path.cpp b/perception/multi_object_tracker/src/data_association/successive_shortest_path/successive_shortest_path.cpp new file mode 100644 index 0000000000000..0b52c09cb54b8 --- /dev/null +++ b/perception/multi_object_tracker/src/data_association/successive_shortest_path/successive_shortest_path.cpp @@ -0,0 +1,370 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "multi_object_tracker/data_association/solver/successive_shortest_path.hpp" + +#include +#include +#include +#include +#include +#include +#include + +namespace gnn_solver +{ +struct ResidualEdge +{ + // Destination node + const int dst; + int capacity; + const double cost; + int flow; + // Access to the reverse edge by adjacency_list.at(dst).at(reverse) + const int reverse; + + // ResidualEdge() + // : dst(0), capacity(0), cost(0), flow(0), reverse(0) {} + + ResidualEdge(int dst, int capacity, double cost, int flow, int reverse) + : dst(dst), capacity(capacity), cost(cost), flow(flow), reverse(reverse) + { + } +}; + +void SSP::maximizeLinearAssignment( + const std::vector> & cost, std::unordered_map * direct_assignment, + std::unordered_map * reverse_assignment) +{ + // NOTE: Need to set as default arguments + bool sparse_cost = true; + // bool sparse_cost = false; + + // Hyperparameters + // double MAX_COST = 6; + const double MAX_COST = 10; + const double INF_DIST = 10000000; + const double EPS = 1e-5; + + // When there is no agents or no tasks, terminate + if (cost.size() == 0 || cost.at(0).size() == 0) { + return; + } + + // Construct a bipartite graph from the cost matrix + int n_agents = cost.size(); + int n_tasks = cost.at(0).size(); + + int n_dummies; + if (sparse_cost) { + n_dummies = n_agents; + } else { + n_dummies = 0; + } + + int source = 0; + int sink = n_agents + n_tasks + 1; + int n_nodes = n_agents + n_tasks + n_dummies + 2; + + // // Print cost matrix + // std::cout << std::endl; + // for (int agent = 0; agent < n_agents; agent++) + // { + // for (int task = 0; task < n_tasks; task++) + // { + // std::cout << cost.at(agent).at(task) << " "; + // } + // std::cout << std::endl; + // } + + // std::chrono::system_clock::time_point start_time, end_time; + // start_time = std::chrono::system_clock::now(); + + // Adjacency list of residual graph (index: nodes) + // - 0: source node + // - {1, ..., n_agents}: agent nodes + // - {n_agents+1, ..., n_agents+n_tasks}: task nodes + // - n_agents+n_tasks+1: sink node + // - {n_agents+n_tasks+2, ..., n_agents+n_tasks+1+n_agents}: + // dummy node (when sparse_cost is true) + std::vector> adjacency_list(n_nodes); + + // Reserve memory + for (int v = 0; v < n_nodes; ++v) { + if (v == source) { + // Source + adjacency_list.at(v).reserve(n_agents); + } else if (v <= n_agents) { + // Agents + adjacency_list.at(v).reserve(n_tasks + 1 + 1); + } else if (v <= n_agents + n_tasks) { + // Tasks + adjacency_list.at(v).reserve(n_agents + 1); + } else if (v == sink) { + // Sink + adjacency_list.at(v).reserve(n_tasks + n_dummies); + } else { + // Dummies + adjacency_list.at(v).reserve(2); + } + } + + // Add edges form source + for (int agent = 0; agent < n_agents; ++agent) { + // From source to agent + adjacency_list.at(source).emplace_back(agent + 1, 1, 0, 0, adjacency_list.at(agent + 1).size()); + // From agent to source + adjacency_list.at(agent + 1).emplace_back( + source, 0, 0, 0, adjacency_list.at(source).size() - 1); + } + + // Add edges from agents + for (int agent = 0; agent < n_agents; ++agent) { + for (int task = 0; task < n_tasks; ++task) { + if (!sparse_cost || cost.at(agent).at(task) > EPS) { + // From agent to task + adjacency_list.at(agent + 1).emplace_back( + task + n_agents + 1, 1, MAX_COST - cost.at(agent).at(task), 0, + adjacency_list.at(task + n_agents + 1).size()); + + // From task to agent + adjacency_list.at(task + n_agents + 1) + .emplace_back( + agent + 1, 0, cost.at(agent).at(task) - MAX_COST, 0, + adjacency_list.at(agent + 1).size() - 1); + } + } + } + + // Add edges form tasks + for (int task = 0; task < n_tasks; ++task) { + // From task to sink + adjacency_list.at(task + n_agents + 1) + .emplace_back(sink, 1, 0, 0, adjacency_list.at(sink).size()); + + // From sink to task + adjacency_list.at(sink).emplace_back( + task + n_agents + 1, 0, 0, 0, adjacency_list.at(task + n_agents + 1).size() - 1); + } + + // Add edges from dummy + if (sparse_cost) { + for (int agent = 0; agent < n_agents; ++agent) { + // From agent to dummy + adjacency_list.at(agent + 1).emplace_back( + agent + n_agents + n_tasks + 2, 1, MAX_COST, 0, + adjacency_list.at(agent + n_agents + n_tasks + 2).size()); + + // From dummy to agent + adjacency_list.at(agent + n_agents + n_tasks + 2) + .emplace_back(agent + 1, 0, -MAX_COST, 0, adjacency_list.at(agent + 1).size() - 1); + + // From dummy to sink + adjacency_list.at(agent + n_agents + n_tasks + 2) + .emplace_back(sink, 1, 0, 0, adjacency_list.at(sink).size()); + + // From sink to dummy + adjacency_list.at(sink).emplace_back( + agent + n_agents + n_tasks + 2, 0, 0, 0, + adjacency_list.at(agent + n_agents + n_tasks + 2).size() - 1); + } + } + + // Maximum flow value + const int max_flow = std::min(n_agents, n_tasks); + + // Feasible potentials + std::vector potentials(n_nodes, 0); + + // Shortest path lengths + std::vector distances(n_nodes, INF_DIST); + + // Whether previously visited the node or not + std::vector is_visited(n_nodes, false); + + // Parent node () + std::vector> prev_values(n_nodes); + + for (int i = 0; i < max_flow; ++i) { + // Initialize priority queue () + std::priority_queue< + std::pair, std::vector>, + std::greater>> + p_queue; + + // Reset all trajectory states + if (i > 0) { + std::fill(distances.begin(), distances.end(), INF_DIST); + std::fill(is_visited.begin(), is_visited.end(), false); + } + + // Start trajectory from the source node + p_queue.push(std::make_pair(0, source)); + distances.at(source) = 0; + + while (!p_queue.empty()) { + // Get the next element + std::pair cur_elem = p_queue.top(); + // std::cout << "[pop]: (" << cur_elem.first << ", " << cur_elem.second << ")" << std::endl; + p_queue.pop(); + + double cur_node_dist = cur_elem.first; + int cur_node = cur_elem.second; + + // If already visited node, skip and continue + if (is_visited.at(cur_node)) { + continue; + } + assert(cur_node_dist == distances.at(cur_node)); + + // Mark as visited + is_visited.at(cur_node) = true; + // Update potential + potentials.at(cur_node) += cur_node_dist; + + // When reached to the sink node, terminate. + if (cur_node == sink) { + break; + } + + // Loop over the incident nodes(/edges) + for (auto it_incident_edge = adjacency_list.at(cur_node).cbegin(); + it_incident_edge != adjacency_list.at(cur_node).cend(); it_incident_edge++) { + // If the node is not visited and have capacity to increase flow, visit. + if (!is_visited.at(it_incident_edge->dst) && it_incident_edge->capacity > 0) { + // Calculate reduced cost + double reduced_cost = + it_incident_edge->cost + potentials.at(cur_node) - potentials.at(it_incident_edge->dst); + assert(reduced_cost >= 0); + if (distances.at(it_incident_edge->dst) > reduced_cost) { + distances.at(it_incident_edge->dst) = reduced_cost; + prev_values.at(it_incident_edge->dst) = + std::make_pair(cur_node, it_incident_edge - adjacency_list.at(cur_node).cbegin()); + // std::cout << "[push]: (" << reduced_cost << ", " << next_v << ")" << std::endl; + p_queue.push(std::make_pair(reduced_cost, it_incident_edge->dst)); + } + } + } + } + + // Shortest path length to sink is greater than MAX_COST, + // which means no non-dummy routes left, terminate + if (potentials.at(sink) >= MAX_COST) { + break; + } + + // Update potentials of unvisited nodes + for (int v = 0; v < n_nodes; ++v) { + if (!is_visited.at(v)) { + potentials.at(v) += distances.at(sink); + } + } + // //Print potentials + // for (int v = 0; v < n_nodes; ++v) + // { + // std::cout << potentials.at(v) << ", "; + // } + // std::cout << std::endl; + + // Increase/decrease flow and capacity along the shortest path from the source to the sink + int v = sink; + int prev_v; + while (v != source) { + ResidualEdge & e_forward = + adjacency_list.at(prev_values.at(v).first).at(prev_values.at(v).second); + assert(e_forward.dst == v); + ResidualEdge & e_backward = adjacency_list.at(v).at(e_forward.reverse); + prev_v = e_backward.dst; + + if (e_backward.flow == 0) { + // Increase flow + // State A + assert(e_forward.capacity == 1); + assert(e_forward.flow == 0); + assert(e_backward.capacity == 0); + assert(e_backward.flow == 0); + + e_forward.capacity -= 1; + e_forward.flow += 1; + e_backward.capacity += 1; + + // State B + assert(e_forward.capacity == 0); + assert(e_forward.flow == 1); + assert(e_backward.capacity == 1); + assert(e_backward.flow == 0); + } else { + // Decrease flow + // State B + assert(e_forward.capacity == 1); + assert(e_forward.flow == 0); + assert(e_backward.capacity == 0); + assert(e_backward.flow == 1); + + e_forward.capacity -= 1; + e_backward.capacity += 1; + e_backward.flow -= 1; + + // State A + assert(e_forward.capacity == 0); + assert(e_forward.flow == 0); + assert(e_backward.capacity == 1); + assert(e_backward.flow == 0); + } + + v = prev_v; + } + +#ifndef NDEBUG + // Check if the potentials are feasible potentials + for (int v = 0; v < n_nodes; ++v) { + for (auto it_incident_edge = adjacency_list.at(v).cbegin(); + it_incident_edge != adjacency_list.at(v).cend(); ++it_incident_edge) { + if (it_incident_edge->capacity > 0) { + double reduced_cost = + it_incident_edge->cost + potentials.at(v) - potentials.at(it_incident_edge->dst); + assert(reduced_cost >= 0); + } + } + } +#endif + } + + // Output + for (int agent = 0; agent < n_agents; ++agent) { + for (auto it_incident_edge = adjacency_list.at(agent + 1).cbegin(); + it_incident_edge != adjacency_list.at(agent + 1).cend(); ++it_incident_edge) { + int task = it_incident_edge->dst - (n_agents + 1); + + // If the flow value is 1 and task is not dummy, assign the task to the agent. + if (it_incident_edge->flow == 1 && 0 <= task && task < n_tasks) { + (*direct_assignment)[agent] = task; + (*reverse_assignment)[task] = agent; + break; + } + } + } + +#ifndef NDEBUG + // Check if the result is valid assignment + for (int agent = 0; agent < n_agents; ++agent) { + if (direct_assignment->find(agent) != direct_assignment->cend()) { + int task = (*direct_assignment).at(agent); + assert(direct_assignment->at(agent) == task); + assert(reverse_assignment->at(task) == agent); + } + } +#endif +} +} // namespace gnn_solver diff --git a/perception/multi_object_tracker/src/multi_object_tracker_core.cpp b/perception/multi_object_tracker/src/multi_object_tracker_core.cpp new file mode 100644 index 0000000000000..d9f0d3ff44428 --- /dev/null +++ b/perception/multi_object_tracker/src/multi_object_tracker_core.cpp @@ -0,0 +1,386 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// + +#include + +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +#define EIGEN_MPL2_ONLY +#include "multi_object_tracker/multi_object_tracker_core.hpp" +#include "multi_object_tracker/utils/utils.hpp" + +#include +#include +#include + +using Label = autoware_auto_perception_msgs::msg::ObjectClassification; + +namespace +{ +boost::optional getTransform( + const tf2_ros::Buffer & tf_buffer, const std::string & source_frame_id, + const std::string & target_frame_id, const rclcpp::Time & time) +{ + try { + geometry_msgs::msg::TransformStamped self_transform_stamped; + self_transform_stamped = tf_buffer.lookupTransform( + /*target*/ target_frame_id, /*src*/ source_frame_id, time, + rclcpp::Duration::from_seconds(0.5)); + return self_transform_stamped.transform; + } catch (tf2::TransformException & ex) { + RCLCPP_WARN_STREAM(rclcpp::get_logger("multi_object_tracker"), ex.what()); + return boost::none; + } +} + +bool transformDetectedObjects( + const autoware_auto_perception_msgs::msg::DetectedObjects & input_msg, + const std::string & target_frame_id, const tf2_ros::Buffer & tf_buffer, + autoware_auto_perception_msgs::msg::DetectedObjects & output_msg) +{ + output_msg = input_msg; + + /* transform to world coordinate */ + if (input_msg.header.frame_id != target_frame_id) { + output_msg.header.frame_id = target_frame_id; + tf2::Transform tf_target2objects_world; + tf2::Transform tf_target2objects; + tf2::Transform tf_objects_world2objects; + { + const auto ros_target2objects_world = + getTransform(tf_buffer, input_msg.header.frame_id, target_frame_id, input_msg.header.stamp); + if (!ros_target2objects_world) { + return false; + } + tf2::fromMsg(*ros_target2objects_world, tf_target2objects_world); + } + for (size_t i = 0; i < output_msg.objects.size(); ++i) { + tf2::fromMsg( + output_msg.objects.at(i).kinematics.pose_with_covariance.pose, tf_objects_world2objects); + tf_target2objects = tf_target2objects_world * tf_objects_world2objects; + tf2::toMsg(tf_target2objects, output_msg.objects.at(i).kinematics.pose_with_covariance.pose); + // TODO(yukkysaito) transform covariance + } + } + return true; +} + +inline float getVelocity(const autoware_auto_perception_msgs::msg::TrackedObject & object) +{ + return std::hypot( + object.kinematics.twist_with_covariance.twist.linear.x, + object.kinematics.twist_with_covariance.twist.linear.y); +} + +inline geometry_msgs::msg::Pose getPose( + const autoware_auto_perception_msgs::msg::TrackedObject & object) +{ + return object.kinematics.pose_with_covariance.pose; +} + +float getXYSquareDistance( + const geometry_msgs::msg::Transform & self_transform, + const autoware_auto_perception_msgs::msg::TrackedObject & object) +{ + const auto object_pos = getPose(object).position; + const float x = self_transform.translation.x - object_pos.x; + const float y = self_transform.translation.y - object_pos.y; + return x * x + y * y; +} + +/** + * @brief If the tracker is stable at a low speed and has a vehicle type, it will keep + * tracking for a longer time to deal with detection lost due to occlusion, etc. + * @param tracker The tracker to be determined. + * @param time Target time to determine. + * @param self_transform Position of the vehicle at the target time. + * @return Result of deciding whether to leave tracker or not. + */ +bool isSpecificAlivePattern( + const std::shared_ptr & tracker, const rclcpp::Time & time, + const geometry_msgs::msg::Transform & self_transform) +{ + autoware_auto_perception_msgs::msg::TrackedObject object; + tracker->getTrackedObject(time, object); + + constexpr float min_detection_rate = 0.2; + constexpr int min_measurement_count = 5; + constexpr float max_elapsed_time = 10.0; + constexpr float max_velocity = 1.0; + constexpr float max_distance = 100.0; + + const std::uint8_t label = tracker->getHighestProbLabel(); + + const float detection_rate = + tracker->getTotalMeasurementCount() / + (tracker->getTotalNoMeasurementCount() + tracker->getTotalMeasurementCount()); + + const bool big_vehicle = (label == Label::TRUCK || label == Label::BUS); + + const bool slow_velocity = getVelocity(object) < max_velocity; + + const bool high_confidence = + (min_detection_rate < detection_rate || + min_measurement_count < tracker->getTotalMeasurementCount()); + + const bool not_too_far = + getXYSquareDistance(self_transform, object) < max_distance * max_distance; + + const bool within_max_survival_period = + tracker->getElapsedTimeFromLastUpdate(time) < max_elapsed_time; + + const bool is_specific_alive_pattern = + high_confidence && big_vehicle && within_max_survival_period && not_too_far && slow_velocity; + + return is_specific_alive_pattern; +} + +} // namespace + +MultiObjectTracker::MultiObjectTracker(const rclcpp::NodeOptions & node_options) +: rclcpp::Node("multi_object_tracker", node_options), + tf_buffer_(this->get_clock()), + tf_listener_(tf_buffer_) +{ + // Create publishers and subscribers + detected_object_sub_ = create_subscription( + "input", rclcpp::QoS{1}, + std::bind(&MultiObjectTracker::onMeasurement, this, std::placeholders::_1)); + tracked_objects_pub_ = + create_publisher("output", rclcpp::QoS{1}); + + // Parameters + double publish_rate = declare_parameter("publish_rate", 30.0); + world_frame_id_ = declare_parameter("world_frame_id", "world"); + bool enable_delay_compensation{declare_parameter("enable_delay_compensation", false)}; + + auto cti = std::make_shared( + this->get_node_base_interface(), this->get_node_timers_interface()); + tf_buffer_.setCreateTimerInterface(cti); + + // Create ROS time based timer + if (enable_delay_compensation) { + auto timer_callback = std::bind(&MultiObjectTracker::onTimer, this); + auto period = std::chrono::duration_cast( + std::chrono::duration(1.0 / publish_rate)); + + publish_timer_ = std::make_shared>( + this->get_clock(), period, std::move(timer_callback), + this->get_node_base_interface()->get_context()); + this->get_node_timers_interface()->add_timer(publish_timer_, nullptr); + } + + const auto tmp = this->declare_parameter>("can_assign_matrix"); + const std::vector can_assign_matrix(tmp.begin(), tmp.end()); + + const auto max_dist_matrix = this->declare_parameter>("max_dist_matrix"); + const auto max_area_matrix = this->declare_parameter>("max_area_matrix"); + const auto min_area_matrix = this->declare_parameter>("min_area_matrix"); + const auto max_rad_matrix = this->declare_parameter>("max_rad_matrix"); + + data_association_ = std::make_unique( + can_assign_matrix, max_dist_matrix, max_area_matrix, min_area_matrix, max_rad_matrix); +} + +void MultiObjectTracker::onMeasurement( + const autoware_auto_perception_msgs::msg::DetectedObjects::ConstSharedPtr input_objects_msg) +{ + const auto self_transform = + getTransform(tf_buffer_, "base_link", world_frame_id_, input_objects_msg->header.stamp); + if (!self_transform) { + return; + } + + /* transform to world coordinate */ + autoware_auto_perception_msgs::msg::DetectedObjects transformed_objects; + if (!transformDetectedObjects( + *input_objects_msg, world_frame_id_, tf_buffer_, transformed_objects)) { + return; + } + /* tracker prediction */ + rclcpp::Time measurement_time = input_objects_msg->header.stamp; + for (auto itr = list_tracker_.begin(); itr != list_tracker_.end(); ++itr) { + (*itr)->predict(measurement_time); + } + + /* global nearest neighbor */ + std::unordered_map direct_assignment, reverse_assignment; + Eigen::MatrixXd score_matrix = data_association_->calcScoreMatrix( + transformed_objects, list_tracker_); // row : tracker, col : measurement + data_association_->assign(score_matrix, direct_assignment, reverse_assignment); + + /* tracker measurement update */ + int tracker_idx = 0; + for (auto tracker_itr = list_tracker_.begin(); tracker_itr != list_tracker_.end(); + ++tracker_itr, ++tracker_idx) { + if (direct_assignment.find(tracker_idx) != direct_assignment.end()) { // found + (*(tracker_itr)) + ->updateWithMeasurement( + transformed_objects.objects.at(direct_assignment.find(tracker_idx)->second), + measurement_time); + } else { // not found + (*(tracker_itr))->updateWithoutMeasurement(); + } + } + + /* life cycle check */ + checkTrackerLifeCycle(list_tracker_, measurement_time, *self_transform); + /* sanitize trackers */ + sanitizeTracker(list_tracker_, measurement_time); + + /* new tracker */ + for (size_t i = 0; i < transformed_objects.objects.size(); ++i) { + if (reverse_assignment.find(i) != reverse_assignment.end()) { // found + continue; + } + list_tracker_.push_back(createNewTracker(transformed_objects.objects.at(i), measurement_time)); + } + + if (publish_timer_ == nullptr) { + publish(measurement_time); + } +} + +std::shared_ptr MultiObjectTracker::createNewTracker( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const rclcpp::Time & time) const +{ + const std::uint8_t label = utils::getHighestProbLabel(object.classification); + if (label == Label::CAR || label == Label::TRUCK || label == Label::BUS) { + return std::make_shared(time, object); + } else if (label == Label::PEDESTRIAN) { + return std::make_shared(time, object); + } else if (label == Label::BICYCLE || label == Label::MOTORCYCLE) { + return std::make_shared(time, object); + } else { + return std::make_shared(time, object); + } +} + +void MultiObjectTracker::onTimer() +{ + rclcpp::Time current_time = this->now(); + const auto self_transform = getTransform(tf_buffer_, world_frame_id_, "base_link", current_time); + if (!self_transform) { + return; + } + + /* life cycle check */ + checkTrackerLifeCycle(list_tracker_, current_time, *self_transform); + /* sanitize trackers */ + sanitizeTracker(list_tracker_, current_time); + + // Publish + publish(current_time); +} + +void MultiObjectTracker::checkTrackerLifeCycle( + std::list> & list_tracker, const rclcpp::Time & time, + const geometry_msgs::msg::Transform & self_transform) +{ + /* params */ + constexpr float max_elapsed_time = 1.0; + + /* delete tracker */ + for (auto itr = list_tracker.begin(); itr != list_tracker.end(); ++itr) { + const bool is_old = max_elapsed_time < (*itr)->getElapsedTimeFromLastUpdate(time); + const bool is_specific_alive_pattern = isSpecificAlivePattern(*itr, time, self_transform); + if (is_old && !is_specific_alive_pattern) { + auto erase_itr = itr; + --itr; + list_tracker.erase(erase_itr); + } + } +} + +void MultiObjectTracker::sanitizeTracker( + std::list> & list_tracker, const rclcpp::Time & time) +{ + constexpr float min_iou = 0.1; + constexpr double distance_threshold = 5.0; + /* delete collision tracker */ + for (auto itr1 = list_tracker.begin(); itr1 != list_tracker.end(); ++itr1) { + autoware_auto_perception_msgs::msg::TrackedObject object1; + (*itr1)->getTrackedObject(time, object1); + for (auto itr2 = std::next(itr1); itr2 != list_tracker.end(); ++itr2) { + autoware_auto_perception_msgs::msg::TrackedObject object2; + (*itr2)->getTrackedObject(time, object2); + const double distance = std::hypot( + object1.kinematics.pose_with_covariance.pose.position.x - + object2.kinematics.pose_with_covariance.pose.position.x, + object1.kinematics.pose_with_covariance.pose.position.y - + object2.kinematics.pose_with_covariance.pose.position.y); + if (distance_threshold < distance) { + continue; + } + if (min_iou < utils::get2dIoU(object1, object2)) { + if ((*itr1)->getTotalMeasurementCount() < (*itr2)->getTotalMeasurementCount()) { + itr1 = list_tracker.erase(itr1); + --itr1; + break; + } else { + itr2 = list_tracker.erase(itr2); + --itr2; + } + } + } + } +} + +inline bool MultiObjectTracker::shouldTrackerPublish( + const std::shared_ptr tracker) const +{ + constexpr int measurement_count_threshold = 3; + if (tracker->getTotalMeasurementCount() < measurement_count_threshold) { + return false; + } + return true; +} + +void MultiObjectTracker::publish(const rclcpp::Time & time) const +{ + const auto subscriber_count = tracked_objects_pub_->get_subscription_count() + + tracked_objects_pub_->get_intra_process_subscription_count(); + if (subscriber_count < 1) { + return; + } + // Create output msg + autoware_auto_perception_msgs::msg::TrackedObjects output_msg; + output_msg.header.frame_id = world_frame_id_; + output_msg.header.stamp = time; + for (auto itr = list_tracker_.begin(); itr != list_tracker_.end(); ++itr) { + if (!shouldTrackerPublish(*itr)) { + continue; + } + autoware_auto_perception_msgs::msg::TrackedObject object; + (*itr)->getTrackedObject(time, object); + output_msg.objects.push_back(object); + } + + // Publish + tracked_objects_pub_->publish(output_msg); +} + +RCLCPP_COMPONENTS_REGISTER_NODE(MultiObjectTracker) diff --git a/perception/multi_object_tracker/src/tracker/model/bicycle_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/bicycle_tracker.cpp new file mode 100644 index 0000000000000..5d34e995f2f27 --- /dev/null +++ b/perception/multi_object_tracker/src/tracker/model/bicycle_tracker.cpp @@ -0,0 +1,403 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#include "multi_object_tracker/tracker/model/bicycle_tracker.hpp" + +#include "multi_object_tracker/utils/utils.hpp" + +#include + +#include +#include +#include +#include +#include + +#define EIGEN_MPL2_ONLY +#include +#include + +BicycleTracker::BicycleTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object) +: Tracker(time, object.classification), + logger_(rclcpp::get_logger("BicycleTracker")), + last_update_time_(time), + z_(object.kinematics.pose_with_covariance.pose.position.z) +{ + object_ = object; + + // initialize params + ekf_params_.use_measurement_covariance = false; + float q_stddev_x = 0.0; // [m/s] + float q_stddev_y = 0.0; // [m/s] + float q_stddev_yaw = autoware_utils::deg2rad(10); // [rad/s] + float q_stddev_vx = autoware_utils::kmph2mps(10); // [m/(s*s)] + float q_stddev_wz = autoware_utils::deg2rad(25); // [rad/(s*s)] + float r_stddev_x = 0.6; // [m] + float r_stddev_y = 0.4; // [m] + float r_stddev_yaw = autoware_utils::deg2rad(30); // [rad] + float p0_stddev_x = 0.8; // [m/s] + float p0_stddev_y = 0.5; // [m/s] + float p0_stddev_yaw = autoware_utils::deg2rad(1000); // [rad/s] + float p0_stddev_vx = autoware_utils::kmph2mps(1000); // [m/(s*s)] + float p0_stddev_wz = autoware_utils::deg2rad(10); // [rad/(s*s)] + ekf_params_.q_cov_x = std::pow(q_stddev_x, 2.0); + ekf_params_.q_cov_y = std::pow(q_stddev_y, 2.0); + ekf_params_.q_cov_yaw = std::pow(q_stddev_yaw, 2.0); + ekf_params_.q_cov_vx = std::pow(q_stddev_vx, 2.0); + ekf_params_.q_cov_wz = std::pow(q_stddev_wz, 2.0); + ekf_params_.r_cov_x = std::pow(r_stddev_x, 2.0); + ekf_params_.r_cov_y = std::pow(r_stddev_y, 2.0); + ekf_params_.r_cov_yaw = std::pow(r_stddev_yaw, 2.0); + ekf_params_.p0_cov_x = std::pow(p0_stddev_x, 2.0); + ekf_params_.p0_cov_y = std::pow(p0_stddev_y, 2.0); + ekf_params_.p0_cov_yaw = std::pow(p0_stddev_yaw, 2.0); + ekf_params_.p0_cov_vx = std::pow(p0_stddev_vx, 2.0); + ekf_params_.p0_cov_wz = std::pow(p0_stddev_wz, 2.0); + max_vx_ = autoware_utils::kmph2mps(100); // [m/s] + max_wz_ = autoware_utils::deg2rad(30); // [rad/s] + + // initialize X matrix + Eigen::MatrixXd X(ekf_params_.dim_x, 1); + X(IDX::X) = object.kinematics.pose_with_covariance.pose.position.x; + X(IDX::Y) = object.kinematics.pose_with_covariance.pose.position.y; + X(IDX::YAW) = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + if (object.kinematics.has_twist) { + X(IDX::VX) = object.kinematics.twist_with_covariance.twist.linear.x; + X(IDX::WZ) = object.kinematics.twist_with_covariance.twist.angular.z; + } else { + X(IDX::VX) = 0.0; + X(IDX::WZ) = 0.0; + } + + // initialize P matrix + Eigen::MatrixXd P = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + if ( + !ekf_params_.use_measurement_covariance || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] == 0.0) { + const double cos_yaw = std::cos(X(IDX::YAW)); + const double sin_yaw = std::sin(X(IDX::YAW)); + const double sin_2yaw = std::sin(2.0f * X(IDX::YAW)); + // Rotate the covariance matrix according to the vehicle yaw + // because p0_cov_x and y are in the vehicle coordinate system. + P(IDX::X, IDX::X) = + ekf_params_.p0_cov_x * cos_yaw * cos_yaw + ekf_params_.p0_cov_y * sin_yaw * sin_yaw; + P(IDX::X, IDX::Y) = 0.5f * (ekf_params_.p0_cov_x - ekf_params_.p0_cov_y) * sin_2yaw; + P(IDX::Y, IDX::Y) = + ekf_params_.p0_cov_x * sin_yaw * sin_yaw + ekf_params_.p0_cov_y * cos_yaw * cos_yaw; + P(IDX::Y, IDX::X) = P(IDX::X, IDX::Y); + P(IDX::YAW, IDX::YAW) = ekf_params_.p0_cov_yaw; + P(IDX::VX, IDX::VX) = ekf_params_.p0_cov_vx; + P(IDX::WZ, IDX::WZ) = ekf_params_.p0_cov_wz; + } else { + P(IDX::X, IDX::X) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + P(IDX::X, IDX::Y) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y]; + P(IDX::Y, IDX::Y) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + P(IDX::Y, IDX::X) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X]; + P(IDX::YAW, IDX::YAW) = + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; + if (object.kinematics.has_twist_covariance) { + P(IDX::VX, IDX::VX) = + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + P(IDX::WZ, IDX::WZ) = + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; + } else { + P(IDX::VX, IDX::VX) = ekf_params_.p0_cov_vx; + P(IDX::WZ, IDX::WZ) = ekf_params_.p0_cov_wz; + } + } + + if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + bounding_box_ = { + object.shape.dimensions.x, object.shape.dimensions.y, object.shape.dimensions.z}; + } else { + bounding_box_ = {1.0, 0.5, 1.7}; + } + ekf_.init(X, P); +} + +bool BicycleTracker::predict(const rclcpp::Time & time) +{ + const double dt = (time - last_update_time_).seconds(); + bool ret = predict(dt, ekf_); + if (ret) { + last_update_time_ = time; + } + return ret; +} + +bool BicycleTracker::predict(const double dt, KalmanFilter & ekf) const +{ + /* == Nonlinear model == + * + * x_{k+1} = x_k + vx_k * cos(yaw_k) * dt + * y_{k+1} = y_k + vx_k * sin(yaw_k) * dt + * yaw_{k+1} = yaw_k + (wz_k) * dt + * vx_{k+1} = vx_k + * wz_{k+1} = wz_k + * + */ + + /* == Linearized model == + * + * A = [ 1, 0, -vx*sin(yaw)*dt, cos(yaw)*dt, 0] + * [ 0, 1, vx*cos(yaw)*dt, sin(yaw)*dt, 0] + * [ 0, 0, 1, 0, dt] + * [ 0, 0, 0, 1, 0] + * [ 0, 0, 0, 0, 1] + */ + + // X t + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state + ekf.getX(X_t); + const double cos_yaw = std::cos(X_t(IDX::YAW)); + const double sin_yaw = std::sin(X_t(IDX::YAW)); + const double sin_2yaw = std::sin(2.0f * X_t(IDX::YAW)); + + // X t+1 + Eigen::MatrixXd X_next_t(ekf_params_.dim_x, 1); // predicted state + X_next_t(IDX::X) = X_t(IDX::X) + X_t(IDX::VX) * cos_yaw * dt; // dx = v * cos(yaw) + X_next_t(IDX::Y) = X_t(IDX::Y) + X_t(IDX::VX) * sin_yaw * dt; // dy = v * sin(yaw) + X_next_t(IDX::YAW) = X_t(IDX::YAW) + (X_t(IDX::WZ)) * dt; // dyaw = omega + X_next_t(IDX::VX) = X_t(IDX::VX); + X_next_t(IDX::WZ) = X_t(IDX::WZ); + + // A + Eigen::MatrixXd A = Eigen::MatrixXd::Identity(ekf_params_.dim_x, ekf_params_.dim_x); + A(IDX::X, IDX::YAW) = -X_t(IDX::VX) * sin_yaw * dt; + A(IDX::X, IDX::VX) = cos_yaw * dt; + A(IDX::Y, IDX::YAW) = X_t(IDX::VX) * cos_yaw * dt; + A(IDX::Y, IDX::VX) = sin_yaw * dt; + A(IDX::YAW, IDX::WZ) = dt; + + // Q + Eigen::MatrixXd Q = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + // Rotate the covariance matrix according to the vehicle yaw + // because q_cov_x and y are in the vehicle coordinate system. + Q(IDX::X, IDX::X) = + (ekf_params_.q_cov_x * cos_yaw * cos_yaw + ekf_params_.q_cov_y * sin_yaw * sin_yaw) * dt * dt; + Q(IDX::X, IDX::Y) = (0.5f * (ekf_params_.q_cov_x - ekf_params_.q_cov_y) * sin_2yaw) * dt * dt; + Q(IDX::Y, IDX::Y) = + (ekf_params_.q_cov_x * sin_yaw * sin_yaw + ekf_params_.q_cov_y * cos_yaw * cos_yaw) * dt * dt; + Q(IDX::Y, IDX::X) = Q(IDX::X, IDX::Y); + Q(IDX::YAW, IDX::YAW) = ekf_params_.q_cov_yaw * dt * dt; + Q(IDX::VX, IDX::VX) = ekf_params_.q_cov_vx * dt * dt; + Q(IDX::WZ, IDX::WZ) = ekf_params_.q_cov_wz * dt * dt; + Eigen::MatrixXd B = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + Eigen::MatrixXd u = Eigen::MatrixXd::Zero(ekf_params_.dim_x, 1); + + if (!ekf.predict(X_next_t, A, Q)) { + RCLCPP_WARN(logger_, "Cannot predict"); + } + + return true; +} + +bool BicycleTracker::measureWithPose( + const autoware_auto_perception_msgs::msg::DetectedObject & object) +{ + constexpr int dim_y = 2; // pos x, pos y, depending on Pose output + // double measurement_yaw = + // autoware_utils::normalizeRadian(tf2::getYaw(object.state.pose_covariance.pose.orientation)); + // { + // Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); + // ekf_.getX(X_t); + // while (M_PI_2 <= X_t(IDX::YAW) - measurement_yaw) { + // measurement_yaw = measurement_yaw + M_PI; + // } + // while (M_PI_2 <= measurement_yaw - X_t(IDX::YAW)) { + // measurement_yaw = measurement_yaw - M_PI; + // } + // float theta = std::acos( + // std::cos(X_t(IDX::YAW)) * std::cos(measurement_yaw) + + // std::sin(X_t(IDX::YAW)) * std::sin(measurement_yaw)); + // if (autoware_utils::deg2rad(60) < std::fabs(theta)) return false; + // } + + /* Set measurement matrix */ + Eigen::MatrixXd Y(dim_y, 1); + Y << object.kinematics.pose_with_covariance.pose.position.x, + object.kinematics.pose_with_covariance.pose.position.y; + + /* Set measurement matrix */ + Eigen::MatrixXd C = Eigen::MatrixXd::Zero(dim_y, ekf_params_.dim_x); + C(0, IDX::X) = 1.0; // for pos x + C(1, IDX::Y) = 1.0; // for pos y + // C(2, IDX::YAW) = 1.0; // for yaw + + /* Set measurement noise covariance */ + Eigen::MatrixXd R = Eigen::MatrixXd::Zero(dim_y, dim_y); + if ( + !ekf_params_.use_measurement_covariance || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] == 0.0) { + R(0, 0) = ekf_params_.r_cov_x; // x - x + R(0, 1) = 0.0; // x - y + R(1, 1) = ekf_params_.r_cov_y; // y - y + R(1, 0) = R(0, 1); // y - x + // R(2, 2) = ekf_params_.r_cov_yaw; // yaw - yaw + } else { + R(0, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + R(0, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y]; + // R(0, 2) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_YAW]; + R(1, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X]; + R(1, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + // R(1, 2) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_YAW]; + // R(2, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_X]; + // R(2, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_Y]; + // R(2, 2) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; + } + if (!ekf_.update(Y, C, R)) { + RCLCPP_WARN(logger_, "Cannot update"); + } + + // normalize yaw and limit vx, wz + { + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); + Eigen::MatrixXd P_t(ekf_params_.dim_x, ekf_params_.dim_x); + ekf_.getX(X_t); + ekf_.getP(P_t); + X_t(IDX::YAW) = autoware_utils::normalizeRadian(X_t(IDX::YAW)); + if (!(-max_vx_ <= X_t(IDX::VX) && X_t(IDX::VX) <= max_vx_)) { + X_t(IDX::VX) = X_t(IDX::VX) < 0 ? -max_vx_ : max_vx_; + } + if (!(-max_wz_ <= X_t(IDX::WZ) && X_t(IDX::WZ) <= max_wz_)) { + X_t(IDX::WZ) = X_t(IDX::WZ) < 0 ? -max_wz_ : max_wz_; + } + ekf_.init(X_t, P_t); + } + + // position z + constexpr float gain = 0.9; + z_ = gain * z_ + (1.0 - gain) * object.kinematics.pose_with_covariance.pose.position.z; + + return true; +} + +bool BicycleTracker::measureWithShape( + const autoware_auto_perception_msgs::msg::DetectedObject & object) +{ + if (object.shape.type != autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + return false; + } + constexpr float gain = 0.9; + + bounding_box_.width = gain * bounding_box_.width + (1.0 - gain) * object.shape.dimensions.x; + bounding_box_.length = gain * bounding_box_.length + (1.0 - gain) * object.shape.dimensions.y; + bounding_box_.height = gain * bounding_box_.height + (1.0 - gain) * object.shape.dimensions.z; + + return true; +} + +bool BicycleTracker::measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time) +{ + object_ = object; + + if (0.01 /*10msec*/ < std::fabs((time - last_update_time_).seconds())) { + RCLCPP_WARN( + logger_, "There is a large gap between predicted time and measurement time. (%f)", + (time - last_update_time_).seconds()); + } + + measureWithPose(object); + measureWithShape(object); + + return true; +} + +bool BicycleTracker::getTrackedObject( + const rclcpp::Time & time, autoware_auto_perception_msgs::msg::TrackedObject & object) const +{ + object = utils::toTrackedObject(object_); + object.object_id = getUUID(); + object.classification = getClassification(); + + // predict kinematics + KalmanFilter tmp_ekf_for_no_update = ekf_; + const double dt = (time - last_update_time_).seconds(); + if (0.001 /*1msec*/ < dt) { + predict(dt, tmp_ekf_for_no_update); + } + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state + Eigen::MatrixXd P(ekf_params_.dim_x, ekf_params_.dim_x); // predicted state + tmp_ekf_for_no_update.getX(X_t); + tmp_ekf_for_no_update.getP(P); + + // position + object.kinematics.pose_with_covariance.pose.position.x = X_t(IDX::X); + object.kinematics.pose_with_covariance.pose.position.y = X_t(IDX::Y); + object.kinematics.pose_with_covariance.pose.position.z = z_; + // quaternion + { + double roll, pitch, yaw; + tf2::Quaternion original_quaternion; + tf2::fromMsg(object_.kinematics.pose_with_covariance.pose.orientation, original_quaternion); + tf2::Matrix3x3(original_quaternion).getRPY(roll, pitch, yaw); + tf2::Quaternion filtered_quaternion; + filtered_quaternion.setRPY(roll, pitch, X_t(IDX::YAW)); + object.kinematics.pose_with_covariance.pose.orientation.x = filtered_quaternion.x(); + object.kinematics.pose_with_covariance.pose.orientation.y = filtered_quaternion.y(); + object.kinematics.pose_with_covariance.pose.orientation.z = filtered_quaternion.z(); + object.kinematics.pose_with_covariance.pose.orientation.w = filtered_quaternion.w(); + object.kinematics.orientation_availability = + autoware_auto_perception_msgs::msg::TrackedObjectKinematics::SIGN_UNKNOWN; + } + // position covariance + constexpr double z_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double r_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double p_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] = P(IDX::X, IDX::X); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y] = P(IDX::X, IDX::Y); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X] = P(IDX::Y, IDX::X); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] = P(IDX::Y, IDX::Y); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Z_Z] = z_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = r_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = p_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] = + P(IDX::YAW, IDX::YAW); + + // twist + object.kinematics.twist_with_covariance.twist.linear.x = X_t(IDX::VX); + object.kinematics.twist_with_covariance.twist.angular.z = X_t(IDX::WZ); + // twist covariance + constexpr double vy_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double vz_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double wx_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double wy_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X] = P(IDX::VX, IDX::VX); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] = vy_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Z_Z] = vz_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_YAW] = + P(IDX::VX, IDX::WZ); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_X] = + P(IDX::WZ, IDX::VX); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = wx_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = wy_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] = + P(IDX::WZ, IDX::WZ); + + // set shape + object.shape.dimensions.x = bounding_box_.width; + object.shape.dimensions.y = bounding_box_.length; + object.shape.dimensions.z = bounding_box_.height; + + return true; +} diff --git a/perception/multi_object_tracker/src/tracker/model/big_vehicle_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/big_vehicle_tracker.cpp new file mode 100644 index 0000000000000..19cee1613a2c5 --- /dev/null +++ b/perception/multi_object_tracker/src/tracker/model/big_vehicle_tracker.cpp @@ -0,0 +1,421 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#include +#include +#include +#include +#include + +#define EIGEN_MPL2_ONLY +#include "multi_object_tracker/tracker/model/big_vehicle_tracker.hpp" +#include "multi_object_tracker/utils/utils.hpp" + +#include +#include +#include + +BigVehicleTracker::BigVehicleTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object) +: Tracker(time, object.classification), + logger_(rclcpp::get_logger("BigVehicleTracker")), + last_update_time_(time), + z_(object.kinematics.pose_with_covariance.pose.position.z) +{ + object_ = object; + + // initialize params + ekf_params_.use_measurement_covariance = false; + float q_stddev_x = 0.0; // [m/s] + float q_stddev_y = 0.0; // [m/s] + float q_stddev_yaw = autoware_utils::deg2rad(20); // [rad/s] + float q_stddev_vx = autoware_utils::kmph2mps(10); // [m/(s*s)] + float q_stddev_wz = autoware_utils::deg2rad(20); // [rad/(s*s)] + float r_stddev_x = 1.5; // [m] + float r_stddev_y = 0.5; // [m] + float r_stddev_yaw = autoware_utils::deg2rad(30); // [rad] + float p0_stddev_x = 1.5; // [m] + float p0_stddev_y = 0.5; // [m] + float p0_stddev_yaw = autoware_utils::deg2rad(30); // [rad] + float p0_stddev_vx = autoware_utils::kmph2mps(1000); // [m/s] + float p0_stddev_wz = autoware_utils::deg2rad(10); // [rad/s] + ekf_params_.q_cov_x = std::pow(q_stddev_x, 2.0); + ekf_params_.q_cov_y = std::pow(q_stddev_y, 2.0); + ekf_params_.q_cov_yaw = std::pow(q_stddev_yaw, 2.0); + ekf_params_.q_cov_vx = std::pow(q_stddev_vx, 2.0); + ekf_params_.q_cov_wz = std::pow(q_stddev_wz, 2.0); + ekf_params_.r_cov_x = std::pow(r_stddev_x, 2.0); + ekf_params_.r_cov_y = std::pow(r_stddev_y, 2.0); + ekf_params_.r_cov_yaw = std::pow(r_stddev_yaw, 2.0); + ekf_params_.p0_cov_x = std::pow(p0_stddev_x, 2.0); + ekf_params_.p0_cov_y = std::pow(p0_stddev_y, 2.0); + ekf_params_.p0_cov_yaw = std::pow(p0_stddev_yaw, 2.0); + ekf_params_.p0_cov_vx = std::pow(p0_stddev_vx, 2.0); + ekf_params_.p0_cov_wz = std::pow(p0_stddev_wz, 2.0); + max_vx_ = autoware_utils::kmph2mps(100); // [m/s] + max_wz_ = autoware_utils::deg2rad(30); // [rad/s] + + // initialize X matrix + Eigen::MatrixXd X(ekf_params_.dim_x, 1); + X(IDX::X) = object.kinematics.pose_with_covariance.pose.position.x; + X(IDX::Y) = object.kinematics.pose_with_covariance.pose.position.y; + X(IDX::YAW) = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + if (object.kinematics.has_twist) { + X(IDX::VX) = object.kinematics.twist_with_covariance.twist.linear.x; + X(IDX::WZ) = object.kinematics.twist_with_covariance.twist.angular.z; + } else { + X(IDX::VX) = 0.0; + X(IDX::WZ) = 0.0; + } + + // initialize P matrix + Eigen::MatrixXd P = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + if ( + !ekf_params_.use_measurement_covariance || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] == 0.0) { + const double cos_yaw = std::cos(X(IDX::YAW)); + const double sin_yaw = std::sin(X(IDX::YAW)); + const double sin_2yaw = std::sin(2.0f * X(IDX::YAW)); + // Rotate the covariance matrix according to the vehicle yaw + // because p0_cov_x and y are in the vehicle coordinate system. + P(IDX::X, IDX::X) = + ekf_params_.p0_cov_x * cos_yaw * cos_yaw + ekf_params_.p0_cov_y * sin_yaw * sin_yaw; + P(IDX::X, IDX::Y) = 0.5f * (ekf_params_.p0_cov_x - ekf_params_.p0_cov_y) * sin_2yaw; + P(IDX::Y, IDX::Y) = + ekf_params_.p0_cov_x * sin_yaw * sin_yaw + ekf_params_.p0_cov_y * cos_yaw * cos_yaw; + P(IDX::Y, IDX::X) = P(IDX::X, IDX::Y); + P(IDX::YAW, IDX::YAW) = ekf_params_.p0_cov_yaw; + P(IDX::VX, IDX::VX) = ekf_params_.p0_cov_vx; + P(IDX::WZ, IDX::WZ) = ekf_params_.p0_cov_wz; + } else { + P(IDX::X, IDX::X) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + P(IDX::X, IDX::Y) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y]; + P(IDX::Y, IDX::Y) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + P(IDX::Y, IDX::X) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X]; + P(IDX::YAW, IDX::YAW) = + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; + if (object.kinematics.has_twist_covariance) { + P(IDX::VX, IDX::VX) = + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + P(IDX::WZ, IDX::WZ) = + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; + } else { + P(IDX::VX, IDX::VX) = ekf_params_.p0_cov_vx; + P(IDX::WZ, IDX::WZ) = ekf_params_.p0_cov_wz; + } + } + + if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + bounding_box_ = { + object.shape.dimensions.x, object.shape.dimensions.y, object.shape.dimensions.z}; + } else { + bounding_box_ = {2.0, 7.0, 2.0}; + } + ekf_.init(X, P); +} + +bool BigVehicleTracker::predict(const rclcpp::Time & time) +{ + const double dt = (time - last_update_time_).seconds(); + bool ret = predict(dt, ekf_); + if (ret) { + last_update_time_ = time; + } + return ret; +} + +bool BigVehicleTracker::predict(const double dt, KalmanFilter & ekf) const +{ + /* == Nonlinear model == + * + * x_{k+1} = x_k + vx_k * cos(yaw_k) * dt + * y_{k+1} = y_k + vx_k * sin(yaw_k) * dt + * yaw_{k+1} = yaw_k + (wz_k) * dt + * vx_{k+1} = vx_k + * wz_{k+1} = wz_k + * + */ + + /* == Linearized model == + * + * A = [ 1, 0, -vx*sin(yaw)*dt, cos(yaw)*dt, 0] + * [ 0, 1, vx*cos(yaw)*dt, sin(yaw)*dt, 0] + * [ 0, 0, 1, 0, dt] + * [ 0, 0, 0, 1, 0] + * [ 0, 0, 0, 0, 1] + */ + + // X t + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state + ekf.getX(X_t); + const double cos_yaw = std::cos(X_t(IDX::YAW)); + const double sin_yaw = std::sin(X_t(IDX::YAW)); + const double sin_2yaw = std::sin(2.0f * X_t(IDX::YAW)); + + // X t+1 + Eigen::MatrixXd X_next_t(ekf_params_.dim_x, 1); // predicted state + X_next_t(IDX::X) = X_t(IDX::X) + X_t(IDX::VX) * cos_yaw * dt; // dx = v * cos(yaw) + X_next_t(IDX::Y) = X_t(IDX::Y) + X_t(IDX::VX) * sin_yaw * dt; // dy = v * sin(yaw) + X_next_t(IDX::YAW) = X_t(IDX::YAW) + (X_t(IDX::WZ)) * dt; // dyaw = omega + X_next_t(IDX::VX) = X_t(IDX::VX); + X_next_t(IDX::WZ) = X_t(IDX::WZ); + + // A + Eigen::MatrixXd A = Eigen::MatrixXd::Identity(ekf_params_.dim_x, ekf_params_.dim_x); + A(IDX::X, IDX::YAW) = -X_t(IDX::VX) * sin_yaw * dt; + A(IDX::X, IDX::VX) = cos_yaw * dt; + A(IDX::Y, IDX::YAW) = X_t(IDX::VX) * cos_yaw * dt; + A(IDX::Y, IDX::VX) = sin_yaw * dt; + A(IDX::YAW, IDX::WZ) = dt; + + // Q + Eigen::MatrixXd Q = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + // Rotate the covariance matrix according to the vehicle yaw + // because q_cov_x and y are in the vehicle coordinate system. + Q(IDX::X, IDX::X) = + (ekf_params_.q_cov_x * cos_yaw * cos_yaw + ekf_params_.q_cov_y * sin_yaw * sin_yaw) * dt * dt; + Q(IDX::X, IDX::Y) = (0.5f * (ekf_params_.q_cov_x - ekf_params_.q_cov_y) * sin_2yaw) * dt * dt; + Q(IDX::Y, IDX::Y) = + (ekf_params_.q_cov_x * sin_yaw * sin_yaw + ekf_params_.q_cov_y * cos_yaw * cos_yaw) * dt * dt; + Q(IDX::Y, IDX::X) = Q(IDX::X, IDX::Y); + Q(IDX::YAW, IDX::YAW) = ekf_params_.q_cov_yaw * dt * dt; + Q(IDX::VX, IDX::VX) = ekf_params_.q_cov_vx * dt * dt; + Q(IDX::WZ, IDX::WZ) = ekf_params_.q_cov_wz * dt * dt; + Eigen::MatrixXd B = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + Eigen::MatrixXd u = Eigen::MatrixXd::Zero(ekf_params_.dim_x, 1); + + if (!ekf.predict(X_next_t, A, Q)) { + RCLCPP_WARN(logger_, "Cannot predict"); + } + + return true; +} + +bool BigVehicleTracker::measureWithPose( + const autoware_auto_perception_msgs::msg::DetectedObject & object) +{ + using Label = autoware_auto_perception_msgs::msg::ObjectClassification; + + float r_cov_x; + float r_cov_y; + const uint8_t label = utils::getHighestProbLabel(object.classification); + + if (label == Label::CAR) { + constexpr float r_stddev_x = 8.0; // [m] + constexpr float r_stddev_y = 0.8; // [m] + r_cov_x = std::pow(r_stddev_x, 2.0); + r_cov_y = std::pow(r_stddev_y, 2.0); + } else if (label == Label::TRUCK || label == Label::BUS) { + r_cov_x = ekf_params_.r_cov_x; + r_cov_y = ekf_params_.r_cov_y; + } else { + r_cov_x = ekf_params_.r_cov_x; + r_cov_y = ekf_params_.r_cov_y; + } + + constexpr int dim_y = 3; // pos x, pos y, yaw, depending on Pose output + double measurement_yaw = autoware_utils::normalizeRadian( + tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation)); + { + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); + ekf_.getX(X_t); + // Fixed measurement_yaw to be in the range of +-90 degrees of X_t(IDX::YAW) + while (M_PI_2 <= X_t(IDX::YAW) - measurement_yaw) { + measurement_yaw = measurement_yaw + M_PI; + } + while (M_PI_2 <= measurement_yaw - X_t(IDX::YAW)) { + measurement_yaw = measurement_yaw - M_PI; + } + } + + /* Set measurement matrix */ + Eigen::MatrixXd Y(dim_y, 1); + Y << object.kinematics.pose_with_covariance.pose.position.x, + object.kinematics.pose_with_covariance.pose.position.y, measurement_yaw; + + /* Set measurement matrix */ + Eigen::MatrixXd C = Eigen::MatrixXd::Zero(dim_y, ekf_params_.dim_x); + C(0, IDX::X) = 1.0; // for pos x + C(1, IDX::Y) = 1.0; // for pos y + C(2, IDX::YAW) = 1.0; // for yaw + + /* Set measurement noise covariance */ + Eigen::MatrixXd R = Eigen::MatrixXd::Zero(dim_y, dim_y); + if ( + !ekf_params_.use_measurement_covariance || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] == 0.0) { + const double cos_yaw = std::cos(measurement_yaw); + const double sin_yaw = std::sin(measurement_yaw); + const double sin_2yaw = std::sin(2.0f * measurement_yaw); + R(0, 0) = r_cov_x * cos_yaw * cos_yaw + r_cov_y * sin_yaw * sin_yaw; // x - x + R(0, 1) = 0.5f * (r_cov_x - r_cov_y) * sin_2yaw; // x - y + R(1, 1) = r_cov_x * sin_yaw * sin_yaw + r_cov_y * cos_yaw * cos_yaw; // y - y + R(1, 0) = R(0, 1); // y - x + R(2, 2) = ekf_params_.r_cov_yaw; // yaw - yaw + } else { + R(0, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + R(0, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y]; + R(0, 2) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_YAW]; + R(1, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X]; + R(1, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + R(1, 2) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_YAW]; + R(2, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_X]; + R(2, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_Y]; + R(2, 2) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; + } + if (!ekf_.update(Y, C, R)) { + RCLCPP_WARN(logger_, "Cannot update"); + } + + // normalize yaw and limit vx, wz + { + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); + Eigen::MatrixXd P_t(ekf_params_.dim_x, ekf_params_.dim_x); + ekf_.getX(X_t); + ekf_.getP(P_t); + X_t(IDX::YAW) = autoware_utils::normalizeRadian(X_t(IDX::YAW)); + if (!(-max_vx_ <= X_t(IDX::VX) && X_t(IDX::VX) <= max_vx_)) { + X_t(IDX::VX) = X_t(IDX::VX) < 0 ? -max_vx_ : max_vx_; + } + if (!(-max_wz_ <= X_t(IDX::WZ) && X_t(IDX::WZ) <= max_wz_)) { + X_t(IDX::WZ) = X_t(IDX::WZ) < 0 ? -max_wz_ : max_wz_; + } + ekf_.init(X_t, P_t); + } + + // position z + constexpr float gain = 0.9; + z_ = gain * z_ + (1.0 - gain) * object.kinematics.pose_with_covariance.pose.position.z; + + return true; +} + +bool BigVehicleTracker::measureWithShape( + const autoware_auto_perception_msgs::msg::DetectedObject & object) +{ + if (object.shape.type != autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + return false; + } + constexpr float gain = 0.9; + + bounding_box_.width = gain * bounding_box_.width + (1.0 - gain) * object.shape.dimensions.x; + bounding_box_.length = gain * bounding_box_.length + (1.0 - gain) * object.shape.dimensions.y; + bounding_box_.height = gain * bounding_box_.height + (1.0 - gain) * object.shape.dimensions.z; + + return true; +} + +bool BigVehicleTracker::measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time) +{ + object_ = object; + + if (0.01 /*10msec*/ < std::fabs((time - last_update_time_).seconds())) { + RCLCPP_WARN( + logger_, "There is a large gap between predicted time and measurement time. (%f)", + (time - last_update_time_).seconds()); + } + + measureWithPose(object); + measureWithShape(object); + + return true; +} + +bool BigVehicleTracker::getTrackedObject( + const rclcpp::Time & time, autoware_auto_perception_msgs::msg::TrackedObject & object) const +{ + object = utils::toTrackedObject(object_); + object.object_id = getUUID(); + object.classification = getClassification(); + + // predict state + KalmanFilter tmp_ekf_for_no_update = ekf_; + const double dt = (time - last_update_time_).seconds(); + if (0.001 /*1msec*/ < dt) { + predict(dt, tmp_ekf_for_no_update); + } + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state + Eigen::MatrixXd P(ekf_params_.dim_x, ekf_params_.dim_x); // predicted state + tmp_ekf_for_no_update.getX(X_t); + tmp_ekf_for_no_update.getP(P); + + // position + object.kinematics.pose_with_covariance.pose.position.x = X_t(IDX::X); + object.kinematics.pose_with_covariance.pose.position.y = X_t(IDX::Y); + object.kinematics.pose_with_covariance.pose.position.z = z_; + // quaternion + { + double roll, pitch, yaw; + tf2::Quaternion original_quaternion; + tf2::fromMsg(object_.kinematics.pose_with_covariance.pose.orientation, original_quaternion); + tf2::Matrix3x3(original_quaternion).getRPY(roll, pitch, yaw); + tf2::Quaternion filtered_quaternion; + filtered_quaternion.setRPY(roll, pitch, X_t(IDX::YAW)); + object.kinematics.pose_with_covariance.pose.orientation.x = filtered_quaternion.x(); + object.kinematics.pose_with_covariance.pose.orientation.y = filtered_quaternion.y(); + object.kinematics.pose_with_covariance.pose.orientation.z = filtered_quaternion.z(); + object.kinematics.pose_with_covariance.pose.orientation.w = filtered_quaternion.w(); + object.kinematics.orientation_availability = + autoware_auto_perception_msgs::msg::TrackedObjectKinematics::SIGN_UNKNOWN; + } + // position covariance + constexpr double z_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double r_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double p_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] = P(IDX::X, IDX::X); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y] = P(IDX::X, IDX::Y); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X] = P(IDX::Y, IDX::X); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] = P(IDX::Y, IDX::Y); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Z_Z] = z_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = r_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = p_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] = + P(IDX::YAW, IDX::YAW); + + // twist + object.kinematics.twist_with_covariance.twist.linear.x = X_t(IDX::VX); + object.kinematics.twist_with_covariance.twist.angular.z = X_t(IDX::WZ); + // twist covariance + constexpr double vy_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double vz_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double wx_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double wy_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X] = P(IDX::VX, IDX::VX); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] = vy_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Z_Z] = vz_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_YAW] = + P(IDX::VX, IDX::WZ); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_X] = + P(IDX::WZ, IDX::VX); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = wx_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = wy_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] = + P(IDX::WZ, IDX::WZ); + + // set shape + object.shape.dimensions.x = bounding_box_.width; + object.shape.dimensions.y = bounding_box_.length; + object.shape.dimensions.z = bounding_box_.height; + + return true; +} diff --git a/perception/multi_object_tracker/src/tracker/model/multiple_vehicle_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/multiple_vehicle_tracker.cpp new file mode 100644 index 0000000000000..1444ede87bf38 --- /dev/null +++ b/perception/multi_object_tracker/src/tracker/model/multiple_vehicle_tracker.cpp @@ -0,0 +1,61 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#include "multi_object_tracker/tracker/model/multiple_vehicle_tracker.hpp" + +#include + +MultipleVehicleTracker::MultipleVehicleTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object) +: Tracker(time, object.classification), + normal_vehicle_tracker_(time, object), + big_vehicle_tracker_(time, object) +{ +} + +bool MultipleVehicleTracker::predict(const rclcpp::Time & time) +{ + big_vehicle_tracker_.predict(time); + normal_vehicle_tracker_.predict(time); + return true; +} + +bool MultipleVehicleTracker::measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time) +{ + big_vehicle_tracker_.measure(object, time); + normal_vehicle_tracker_.measure(object, time); + setClassification(object.classification); + return true; +} + +bool MultipleVehicleTracker::getTrackedObject( + const rclcpp::Time & time, autoware_auto_perception_msgs::msg::TrackedObject & object) const +{ + using Label = autoware_auto_perception_msgs::msg::ObjectClassification; + const uint8_t label = getHighestProbLabel(); + + if (label == Label::CAR) { + normal_vehicle_tracker_.getTrackedObject(time, object); + } else if (label == Label::BUS || label == Label::TRUCK) { + big_vehicle_tracker_.getTrackedObject(time, object); + } + object.object_id = getUUID(); + object.classification = getClassification(); + return true; +} diff --git a/perception/multi_object_tracker/src/tracker/model/normal_vehicle_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/normal_vehicle_tracker.cpp new file mode 100644 index 0000000000000..907a28c9b5582 --- /dev/null +++ b/perception/multi_object_tracker/src/tracker/model/normal_vehicle_tracker.cpp @@ -0,0 +1,421 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#include +#include +#include +#include +#include + +#define EIGEN_MPL2_ONLY +#include "multi_object_tracker/tracker/model/normal_vehicle_tracker.hpp" +#include "multi_object_tracker/utils/utils.hpp" + +#include +#include +#include + +NormalVehicleTracker::NormalVehicleTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object) +: Tracker(time, object.classification), + logger_(rclcpp::get_logger("NormalVehicleTracker")), + last_update_time_(time), + z_(object.kinematics.pose_with_covariance.pose.position.z) +{ + object_ = object; + + // initialize params + ekf_params_.use_measurement_covariance = false; + float q_stddev_x = 0.0; // object coordinate [m/s] + float q_stddev_y = 0.0; // object coordinate [m/s] + float q_stddev_yaw = autoware_utils::deg2rad(20); // map coordinate[rad/s] + float q_stddev_vx = autoware_utils::kmph2mps(10); // object coordinate [m/(s*s)] + float q_stddev_wz = autoware_utils::deg2rad(20); // object coordinate [rad/(s*s)] + float r_stddev_x = 1.0; // object coordinate [m] + float r_stddev_y = 0.3; // object coordinate [m] + float r_stddev_yaw = autoware_utils::deg2rad(30); // map coordinate [rad] + float p0_stddev_x = 1.0; // object coordinate [m/s] + float p0_stddev_y = 0.3; // object coordinate [m/s] + float p0_stddev_yaw = autoware_utils::deg2rad(30); // map coordinate [rad] + float p0_stddev_vx = autoware_utils::kmph2mps(1000); // object coordinate [m/s] + float p0_stddev_wz = autoware_utils::deg2rad(10); // object coordinate [rad/s] + ekf_params_.q_cov_x = std::pow(q_stddev_x, 2.0); + ekf_params_.q_cov_y = std::pow(q_stddev_y, 2.0); + ekf_params_.q_cov_yaw = std::pow(q_stddev_yaw, 2.0); + ekf_params_.q_cov_vx = std::pow(q_stddev_vx, 2.0); + ekf_params_.q_cov_wz = std::pow(q_stddev_wz, 2.0); + ekf_params_.r_cov_x = std::pow(r_stddev_x, 2.0); + ekf_params_.r_cov_y = std::pow(r_stddev_y, 2.0); + ekf_params_.r_cov_yaw = std::pow(r_stddev_yaw, 2.0); + ekf_params_.p0_cov_x = std::pow(p0_stddev_x, 2.0); + ekf_params_.p0_cov_y = std::pow(p0_stddev_y, 2.0); + ekf_params_.p0_cov_yaw = std::pow(p0_stddev_yaw, 2.0); + ekf_params_.p0_cov_vx = std::pow(p0_stddev_vx, 2.0); + ekf_params_.p0_cov_wz = std::pow(p0_stddev_wz, 2.0); + max_vx_ = autoware_utils::kmph2mps(100); // [m/s] + max_wz_ = autoware_utils::deg2rad(30); // [rad/s] + + // initialize X matrix + Eigen::MatrixXd X(ekf_params_.dim_x, 1); + X(IDX::X) = object.kinematics.pose_with_covariance.pose.position.x; + X(IDX::Y) = object.kinematics.pose_with_covariance.pose.position.y; + X(IDX::YAW) = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + if (object.kinematics.has_twist) { + X(IDX::VX) = object.kinematics.twist_with_covariance.twist.linear.x; + X(IDX::WZ) = object.kinematics.twist_with_covariance.twist.angular.z; + } else { + X(IDX::VX) = 0.0; + X(IDX::WZ) = 0.0; + } + + // initialize P matrix + Eigen::MatrixXd P = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + if ( + !ekf_params_.use_measurement_covariance || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] == 0.0) { + const double cos_yaw = std::cos(X(IDX::YAW)); + const double sin_yaw = std::sin(X(IDX::YAW)); + const double sin_2yaw = std::sin(2.0f * X(IDX::YAW)); + // Rotate the covariance matrix according to the vehicle yaw + // because p0_cov_x and y are in the vehicle coordinate system. + P(IDX::X, IDX::X) = + ekf_params_.p0_cov_x * cos_yaw * cos_yaw + ekf_params_.p0_cov_y * sin_yaw * sin_yaw; + P(IDX::X, IDX::Y) = 0.5f * (ekf_params_.p0_cov_x - ekf_params_.p0_cov_y) * sin_2yaw; + P(IDX::Y, IDX::Y) = + ekf_params_.p0_cov_x * sin_yaw * sin_yaw + ekf_params_.p0_cov_y * cos_yaw * cos_yaw; + P(IDX::Y, IDX::X) = P(IDX::X, IDX::Y); + P(IDX::YAW, IDX::YAW) = ekf_params_.p0_cov_yaw; + P(IDX::VX, IDX::VX) = ekf_params_.p0_cov_vx; + P(IDX::WZ, IDX::WZ) = ekf_params_.p0_cov_wz; + } else { + P(IDX::X, IDX::X) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + P(IDX::X, IDX::Y) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y]; + P(IDX::Y, IDX::Y) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + P(IDX::Y, IDX::X) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X]; + P(IDX::YAW, IDX::YAW) = + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; + if (object.kinematics.has_twist_covariance) { + P(IDX::VX, IDX::VX) = + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + P(IDX::WZ, IDX::WZ) = + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; + } else { + P(IDX::VX, IDX::VX) = ekf_params_.p0_cov_vx; + P(IDX::WZ, IDX::WZ) = ekf_params_.p0_cov_wz; + } + } + + if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + bounding_box_ = { + object.shape.dimensions.x, object.shape.dimensions.y, object.shape.dimensions.z}; + } else { + bounding_box_ = {1.7, 4.0, 2.0}; + } + ekf_.init(X, P); +} + +bool NormalVehicleTracker::predict(const rclcpp::Time & time) +{ + const double dt = (time - last_update_time_).seconds(); + bool ret = predict(dt, ekf_); + if (ret) { + last_update_time_ = time; + } + return ret; +} + +bool NormalVehicleTracker::predict(const double dt, KalmanFilter & ekf) const +{ + /* == Nonlinear model == + * + * x_{k+1} = x_k + vx_k * cos(yaw_k) * dt + * y_{k+1} = y_k + vx_k * sin(yaw_k) * dt + * yaw_{k+1} = yaw_k + (wz_k) * dt + * vx_{k+1} = vx_k + * wz_{k+1} = wz_k + * + */ + + /* == Linearized model == + * + * A = [ 1, 0, -vx*sin(yaw)*dt, cos(yaw)*dt, 0] + * [ 0, 1, vx*cos(yaw)*dt, sin(yaw)*dt, 0] + * [ 0, 0, 1, 0, dt] + * [ 0, 0, 0, 1, 0] + * [ 0, 0, 0, 0, 1] + */ + + // X t + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state + ekf.getX(X_t); + const double cos_yaw = std::cos(X_t(IDX::YAW)); + const double sin_yaw = std::sin(X_t(IDX::YAW)); + const double sin_2yaw = std::sin(2.0f * X_t(IDX::YAW)); + + // X t+1 + Eigen::MatrixXd X_next_t(ekf_params_.dim_x, 1); // predicted state + X_next_t(IDX::X) = X_t(IDX::X) + X_t(IDX::VX) * cos_yaw * dt; // dx = v * cos(yaw) + X_next_t(IDX::Y) = X_t(IDX::Y) + X_t(IDX::VX) * sin_yaw * dt; // dy = v * sin(yaw) + X_next_t(IDX::YAW) = X_t(IDX::YAW) + (X_t(IDX::WZ)) * dt; // dyaw = omega + X_next_t(IDX::VX) = X_t(IDX::VX); + X_next_t(IDX::WZ) = X_t(IDX::WZ); + + // A + Eigen::MatrixXd A = Eigen::MatrixXd::Identity(ekf_params_.dim_x, ekf_params_.dim_x); + A(IDX::X, IDX::YAW) = -X_t(IDX::VX) * sin_yaw * dt; + A(IDX::X, IDX::VX) = cos_yaw * dt; + A(IDX::Y, IDX::YAW) = X_t(IDX::VX) * cos_yaw * dt; + A(IDX::Y, IDX::VX) = sin_yaw * dt; + A(IDX::YAW, IDX::WZ) = dt; + + // Q + Eigen::MatrixXd Q = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + // Rotate the covariance matrix according to the vehicle yaw + // because q_cov_x and y are in the vehicle coordinate system. + Q(IDX::X, IDX::X) = + (ekf_params_.q_cov_x * cos_yaw * cos_yaw + ekf_params_.q_cov_y * sin_yaw * sin_yaw) * dt * dt; + Q(IDX::X, IDX::Y) = (0.5f * (ekf_params_.q_cov_x - ekf_params_.q_cov_y) * sin_2yaw) * dt * dt; + Q(IDX::Y, IDX::Y) = + (ekf_params_.q_cov_x * sin_yaw * sin_yaw + ekf_params_.q_cov_y * cos_yaw * cos_yaw) * dt * dt; + Q(IDX::Y, IDX::X) = Q(IDX::X, IDX::Y); + Q(IDX::YAW, IDX::YAW) = ekf_params_.q_cov_yaw * dt * dt; + Q(IDX::VX, IDX::VX) = ekf_params_.q_cov_vx * dt * dt; + Q(IDX::WZ, IDX::WZ) = ekf_params_.q_cov_wz * dt * dt; + Eigen::MatrixXd B = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + Eigen::MatrixXd u = Eigen::MatrixXd::Zero(ekf_params_.dim_x, 1); + + if (!ekf.predict(X_next_t, A, Q)) { + RCLCPP_WARN(logger_, "Cannot predict"); + } + + return true; +} + +bool NormalVehicleTracker::measureWithPose( + const autoware_auto_perception_msgs::msg::DetectedObject & object) +{ + using Label = autoware_auto_perception_msgs::msg::ObjectClassification; + + float r_cov_x; + float r_cov_y; + const uint8_t label = utils::getHighestProbLabel(object.classification); + + if (label == Label::CAR) { + r_cov_x = ekf_params_.r_cov_x; + r_cov_y = ekf_params_.r_cov_y; + } else if (label == Label::TRUCK || label == Label::BUS) { + constexpr float r_stddev_x = 8.0; // [m] + constexpr float r_stddev_y = 0.8; // [m] + r_cov_x = std::pow(r_stddev_x, 2.0); + r_cov_y = std::pow(r_stddev_y, 2.0); + } else { + r_cov_x = ekf_params_.r_cov_x; + r_cov_y = ekf_params_.r_cov_y; + } + + constexpr int dim_y = 3; // pos x, pos y, yaw, depending on Pose output + double measurement_yaw = autoware_utils::normalizeRadian( + tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation)); + { + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); + ekf_.getX(X_t); + // Fixed measurement_yaw to be in the range of +-90 degrees of X_t(IDX::YAW) + while (M_PI_2 <= X_t(IDX::YAW) - measurement_yaw) { + measurement_yaw = measurement_yaw + M_PI; + } + while (M_PI_2 <= measurement_yaw - X_t(IDX::YAW)) { + measurement_yaw = measurement_yaw - M_PI; + } + } + + /* Set measurement matrix */ + Eigen::MatrixXd Y(dim_y, 1); + Y << object.kinematics.pose_with_covariance.pose.position.x, + object.kinematics.pose_with_covariance.pose.position.y, measurement_yaw; + + /* Set measurement matrix */ + Eigen::MatrixXd C = Eigen::MatrixXd::Zero(dim_y, ekf_params_.dim_x); + C(0, IDX::X) = 1.0; // for pos x + C(1, IDX::Y) = 1.0; // for pos y + C(2, IDX::YAW) = 1.0; // for yaw + + /* Set measurement noise covariance */ + Eigen::MatrixXd R = Eigen::MatrixXd::Zero(dim_y, dim_y); + if ( + !ekf_params_.use_measurement_covariance || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] == 0.0) { + const double cos_yaw = std::cos(measurement_yaw); + const double sin_yaw = std::sin(measurement_yaw); + const double sin_2yaw = std::sin(2.0f * measurement_yaw); + R(0, 0) = r_cov_x * cos_yaw * cos_yaw + r_cov_y * sin_yaw * sin_yaw; // x - x + R(0, 1) = 0.5f * (r_cov_x - r_cov_y) * sin_2yaw; // x - y + R(1, 1) = r_cov_x * sin_yaw * sin_yaw + r_cov_y * cos_yaw * cos_yaw; // y - y + R(1, 0) = R(0, 1); // y - x + R(2, 2) = ekf_params_.r_cov_yaw; // yaw - yaw + } else { + R(0, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + R(0, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y]; + R(0, 2) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_YAW]; + R(1, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X]; + R(1, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + R(1, 2) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_YAW]; + R(2, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_X]; + R(2, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_Y]; + R(2, 2) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; + } + if (!ekf_.update(Y, C, R)) { + RCLCPP_WARN(logger_, "Cannot update"); + } + + // normalize yaw and limit vx, wz + { + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); + Eigen::MatrixXd P_t(ekf_params_.dim_x, ekf_params_.dim_x); + ekf_.getX(X_t); + ekf_.getP(P_t); + X_t(IDX::YAW) = autoware_utils::normalizeRadian(X_t(IDX::YAW)); + if (!(-max_vx_ <= X_t(IDX::VX) && X_t(IDX::VX) <= max_vx_)) { + X_t(IDX::VX) = X_t(IDX::VX) < 0 ? -max_vx_ : max_vx_; + } + if (!(-max_wz_ <= X_t(IDX::WZ) && X_t(IDX::WZ) <= max_wz_)) { + X_t(IDX::WZ) = X_t(IDX::WZ) < 0 ? -max_wz_ : max_wz_; + } + ekf_.init(X_t, P_t); + } + + // position z + constexpr float gain = 0.9; + z_ = gain * z_ + (1.0 - gain) * object.kinematics.pose_with_covariance.pose.position.z; + + return true; +} + +bool NormalVehicleTracker::measureWithShape( + const autoware_auto_perception_msgs::msg::DetectedObject & object) +{ + if (object.shape.type != autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + return false; + } + constexpr float gain = 0.9; + + bounding_box_.width = gain * bounding_box_.width + (1.0 - gain) * object.shape.dimensions.x; + bounding_box_.length = gain * bounding_box_.length + (1.0 - gain) * object.shape.dimensions.y; + bounding_box_.height = gain * bounding_box_.height + (1.0 - gain) * object.shape.dimensions.z; + + return true; +} + +bool NormalVehicleTracker::measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time) +{ + object_ = object; + + if (0.01 /*10msec*/ < std::fabs((time - last_update_time_).seconds())) { + RCLCPP_WARN( + logger_, "There is a large gap between predicted time and measurement time. (%f)", + (time - last_update_time_).seconds()); + } + + measureWithPose(object); + measureWithShape(object); + + return true; +} + +bool NormalVehicleTracker::getTrackedObject( + const rclcpp::Time & time, autoware_auto_perception_msgs::msg::TrackedObject & object) const +{ + object = utils::toTrackedObject(object_); + object.object_id = getUUID(); + object.classification = getClassification(); + + // predict kinematics + KalmanFilter tmp_ekf_for_no_update = ekf_; + const double dt = (time - last_update_time_).seconds(); + if (0.001 /*1msec*/ < dt) { + predict(dt, tmp_ekf_for_no_update); + } + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state + Eigen::MatrixXd P(ekf_params_.dim_x, ekf_params_.dim_x); // predicted state + tmp_ekf_for_no_update.getX(X_t); + tmp_ekf_for_no_update.getP(P); + + // position + object.kinematics.pose_with_covariance.pose.position.x = X_t(IDX::X); + object.kinematics.pose_with_covariance.pose.position.y = X_t(IDX::Y); + object.kinematics.pose_with_covariance.pose.position.z = z_; + // quaternion + { + double roll, pitch, yaw; + tf2::Quaternion original_quaternion; + tf2::fromMsg(object_.kinematics.pose_with_covariance.pose.orientation, original_quaternion); + tf2::Matrix3x3(original_quaternion).getRPY(roll, pitch, yaw); + tf2::Quaternion filtered_quaternion; + filtered_quaternion.setRPY(roll, pitch, X_t(IDX::YAW)); + object.kinematics.pose_with_covariance.pose.orientation.x = filtered_quaternion.x(); + object.kinematics.pose_with_covariance.pose.orientation.y = filtered_quaternion.y(); + object.kinematics.pose_with_covariance.pose.orientation.z = filtered_quaternion.z(); + object.kinematics.pose_with_covariance.pose.orientation.w = filtered_quaternion.w(); + object.kinematics.orientation_availability = + autoware_auto_perception_msgs::msg::TrackedObjectKinematics::SIGN_UNKNOWN; + } + // position covariance + constexpr double z_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double r_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double p_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] = P(IDX::X, IDX::X); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y] = P(IDX::X, IDX::Y); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X] = P(IDX::Y, IDX::X); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] = P(IDX::Y, IDX::Y); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Z_Z] = z_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = r_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = p_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] = + P(IDX::YAW, IDX::YAW); + + // twist + object.kinematics.twist_with_covariance.twist.linear.x = X_t(IDX::VX); + object.kinematics.twist_with_covariance.twist.angular.z = X_t(IDX::WZ); + // twist covariance + constexpr double vy_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double vz_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double wx_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double wy_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X] = P(IDX::VX, IDX::VX); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] = vy_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Z_Z] = vz_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_YAW] = + P(IDX::VX, IDX::WZ); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_X] = + P(IDX::WZ, IDX::VX); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = wx_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = wy_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] = + P(IDX::WZ, IDX::WZ); + + // set shape + object.shape.dimensions.x = bounding_box_.width; + object.shape.dimensions.y = bounding_box_.length; + object.shape.dimensions.z = bounding_box_.height; + + return true; +} diff --git a/perception/multi_object_tracker/src/tracker/model/pedestrian_and_bicycle_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/pedestrian_and_bicycle_tracker.cpp new file mode 100644 index 0000000000000..74d1d254bdbb8 --- /dev/null +++ b/perception/multi_object_tracker/src/tracker/model/pedestrian_and_bicycle_tracker.cpp @@ -0,0 +1,61 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#include "multi_object_tracker/tracker/model/pedestrian_and_bicycle_tracker.hpp" + +#include + +PedestrianAndBicycleTracker::PedestrianAndBicycleTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object) +: Tracker(time, object.classification), + pedestrian_tracker_(time, object), + bicycle_tracker_(time, object) +{ +} + +bool PedestrianAndBicycleTracker::predict(const rclcpp::Time & time) +{ + pedestrian_tracker_.predict(time); + bicycle_tracker_.predict(time); + return true; +} + +bool PedestrianAndBicycleTracker::measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time) +{ + pedestrian_tracker_.measure(object, time); + bicycle_tracker_.measure(object, time); + setClassification(object.classification); + return true; +} + +bool PedestrianAndBicycleTracker::getTrackedObject( + const rclcpp::Time & time, autoware_auto_perception_msgs::msg::TrackedObject & object) const +{ + using Label = autoware_auto_perception_msgs::msg::ObjectClassification; + const uint8_t label = getHighestProbLabel(); + + if (label == Label::PEDESTRIAN) { + pedestrian_tracker_.getTrackedObject(time, object); + } else if (label == Label::BICYCLE || label == Label::MOTORCYCLE) { + bicycle_tracker_.getTrackedObject(time, object); + } + object.object_id = getUUID(); + object.classification = getClassification(); + return true; +} diff --git a/perception/multi_object_tracker/src/tracker/model/pedestrian_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/pedestrian_tracker.cpp new file mode 100644 index 0000000000000..88bd5f44488c4 --- /dev/null +++ b/perception/multi_object_tracker/src/tracker/model/pedestrian_tracker.cpp @@ -0,0 +1,409 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#include "multi_object_tracker/tracker/model/pedestrian_tracker.hpp" + +#include "multi_object_tracker/utils/utils.hpp" + +#include + +#include +#include +#include +#include +#include + +#define EIGEN_MPL2_ONLY +#include +#include + +PedestrianTracker::PedestrianTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object) +: Tracker(time, object.classification), + logger_(rclcpp::get_logger("PedestrianTracker")), + last_update_time_(time), + z_(object.kinematics.pose_with_covariance.pose.position.z) +{ + object_ = object; + + // initialize params + ekf_params_.use_measurement_covariance = false; + float q_stddev_x = 0.0; // [m/s] + float q_stddev_y = 0.0; // [m/s] + float q_stddev_yaw = autoware_utils::deg2rad(20); // [rad/s] + float q_stddev_vx = autoware_utils::kmph2mps(5); // [m/(s*s)] + float q_stddev_wz = autoware_utils::deg2rad(20); // [rad/(s*s)] + float r_stddev_x = 0.4; // [m] + float r_stddev_y = 0.4; // [m] + float r_stddev_yaw = autoware_utils::deg2rad(30); // [rad] + float p0_stddev_x = 1.0; // [m/s] + float p0_stddev_y = 1.0; // [m/s] + float p0_stddev_yaw = autoware_utils::deg2rad(1000); // [rad/s] + float p0_stddev_vx = autoware_utils::kmph2mps(5); // [m/(s*s)] + float p0_stddev_wz = autoware_utils::deg2rad(10); // [rad/(s*s)] + ekf_params_.q_cov_x = std::pow(q_stddev_x, 2.0); + ekf_params_.q_cov_y = std::pow(q_stddev_y, 2.0); + ekf_params_.q_cov_yaw = std::pow(q_stddev_yaw, 2.0); + ekf_params_.q_cov_vx = std::pow(q_stddev_vx, 2.0); + ekf_params_.q_cov_wz = std::pow(q_stddev_wz, 2.0); + ekf_params_.r_cov_x = std::pow(r_stddev_x, 2.0); + ekf_params_.r_cov_y = std::pow(r_stddev_y, 2.0); + ekf_params_.r_cov_yaw = std::pow(r_stddev_yaw, 2.0); + ekf_params_.p0_cov_x = std::pow(p0_stddev_x, 2.0); + ekf_params_.p0_cov_y = std::pow(p0_stddev_y, 2.0); + ekf_params_.p0_cov_yaw = std::pow(p0_stddev_yaw, 2.0); + ekf_params_.p0_cov_vx = std::pow(p0_stddev_vx, 2.0); + ekf_params_.p0_cov_wz = std::pow(p0_stddev_wz, 2.0); + max_vx_ = autoware_utils::kmph2mps(10); // [m/s] + max_wz_ = autoware_utils::deg2rad(30); // [rad/s] + + // initialize X matrix + Eigen::MatrixXd X(ekf_params_.dim_x, 1); + X(IDX::X) = object.kinematics.pose_with_covariance.pose.position.x; + X(IDX::Y) = object.kinematics.pose_with_covariance.pose.position.y; + X(IDX::YAW) = tf2::getYaw(object.kinematics.pose_with_covariance.pose.orientation); + if (object.kinematics.has_twist) { + X(IDX::VX) = object.kinematics.twist_with_covariance.twist.linear.x; + X(IDX::WZ) = object.kinematics.twist_with_covariance.twist.angular.z; + } else { + X(IDX::VX) = 0.0; + X(IDX::WZ) = 0.0; + } + + // initialize P matrix + Eigen::MatrixXd P = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + if ( + !ekf_params_.use_measurement_covariance || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] == 0.0) { + const double cos_yaw = std::cos(X(IDX::YAW)); + const double sin_yaw = std::sin(X(IDX::YAW)); + const double sin_2yaw = std::sin(2.0f * X(IDX::YAW)); + // Rotate the covariance matrix according to the vehicle yaw + // because p0_cov_x and y are in the vehicle coordinate system. + P(IDX::X, IDX::X) = + ekf_params_.p0_cov_x * cos_yaw * cos_yaw + ekf_params_.p0_cov_y * sin_yaw * sin_yaw; + P(IDX::X, IDX::Y) = 0.5f * (ekf_params_.p0_cov_x - ekf_params_.p0_cov_y) * sin_2yaw; + P(IDX::Y, IDX::Y) = + ekf_params_.p0_cov_x * sin_yaw * sin_yaw + ekf_params_.p0_cov_y * cos_yaw * cos_yaw; + P(IDX::Y, IDX::X) = P(IDX::X, IDX::Y); + P(IDX::YAW, IDX::YAW) = ekf_params_.p0_cov_yaw; + P(IDX::VX, IDX::VX) = ekf_params_.p0_cov_vx; + P(IDX::WZ, IDX::WZ) = ekf_params_.p0_cov_wz; + } else { + P(IDX::X, IDX::X) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + P(IDX::X, IDX::Y) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y]; + P(IDX::Y, IDX::Y) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + P(IDX::Y, IDX::X) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X]; + P(IDX::YAW, IDX::YAW) = + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; + if (object.kinematics.has_twist_covariance) { + P(IDX::VX, IDX::VX) = + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + P(IDX::WZ, IDX::WZ) = + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; + } else { + P(IDX::VX, IDX::VX) = ekf_params_.p0_cov_vx; + P(IDX::WZ, IDX::WZ) = ekf_params_.p0_cov_wz; + } + } + + bounding_box_ = {0.5, 0.5, 1.7}; + cylinder_ = {0.3, 1.7}; + if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + bounding_box_ = { + object.shape.dimensions.x, object.shape.dimensions.y, object.shape.dimensions.z}; + } else if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::CYLINDER) { + cylinder_ = {object.shape.dimensions.x, object.shape.dimensions.z}; + } + + ekf_.init(X, P); +} + +bool PedestrianTracker::predict(const rclcpp::Time & time) +{ + const double dt = (time - last_update_time_).seconds(); + bool ret = predict(dt, ekf_); + if (ret) { + last_update_time_ = time; + } + return ret; +} + +bool PedestrianTracker::predict(const double dt, KalmanFilter & ekf) const +{ + /* == Nonlinear model == + * + * x_{k+1} = x_k + vx_k * cos(yaw_k) * dt + * y_{k+1} = y_k + vx_k * sin(yaw_k) * dt + * yaw_{k+1} = yaw_k + (wz_k) * dt + * vx_{k+1} = vx_k + * wz_{k+1} = wz_k + * + */ + + /* == Linearized model == + * + * A = [ 1, 0, -vx*sin(yaw)*dt, cos(yaw)*dt, 0] + * [ 0, 1, vx*cos(yaw)*dt, sin(yaw)*dt, 0] + * [ 0, 0, 1, 0, dt] + * [ 0, 0, 0, 1, 0] + * [ 0, 0, 0, 0, 1] + */ + + // X t + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state + ekf.getX(X_t); + const double cos_yaw = std::cos(X_t(IDX::YAW)); + const double sin_yaw = std::sin(X_t(IDX::YAW)); + const double sin_2yaw = std::sin(2.0f * X_t(IDX::YAW)); + + // X t+1 + Eigen::MatrixXd X_next_t(ekf_params_.dim_x, 1); // predicted state + X_next_t(IDX::X) = X_t(IDX::X) + X_t(IDX::VX) * cos_yaw * dt; // dx = v * cos(yaw) + X_next_t(IDX::Y) = X_t(IDX::Y) + X_t(IDX::VX) * sin_yaw * dt; // dy = v * sin(yaw) + X_next_t(IDX::YAW) = X_t(IDX::YAW) + (X_t(IDX::WZ)) * dt; // dyaw = omega + X_next_t(IDX::VX) = X_t(IDX::VX); + X_next_t(IDX::WZ) = X_t(IDX::WZ); + + // A + Eigen::MatrixXd A = Eigen::MatrixXd::Identity(ekf_params_.dim_x, ekf_params_.dim_x); + A(IDX::X, IDX::YAW) = -X_t(IDX::VX) * sin_yaw * dt; + A(IDX::X, IDX::VX) = cos_yaw * dt; + A(IDX::Y, IDX::YAW) = X_t(IDX::VX) * cos_yaw * dt; + A(IDX::Y, IDX::VX) = sin_yaw * dt; + A(IDX::YAW, IDX::WZ) = dt; + + // Q + Eigen::MatrixXd Q = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + // Rotate the covariance matrix according to the vehicle yaw + // because q_cov_x and y are in the vehicle coordinate system. + Q(IDX::X, IDX::X) = + (ekf_params_.q_cov_x * cos_yaw * cos_yaw + ekf_params_.q_cov_y * sin_yaw * sin_yaw) * dt * dt; + Q(IDX::X, IDX::Y) = (0.5f * (ekf_params_.q_cov_x - ekf_params_.q_cov_y) * sin_2yaw) * dt * dt; + Q(IDX::Y, IDX::Y) = + (ekf_params_.q_cov_x * sin_yaw * sin_yaw + ekf_params_.q_cov_y * cos_yaw * cos_yaw) * dt * dt; + Q(IDX::Y, IDX::X) = Q(IDX::X, IDX::Y); + Q(IDX::YAW, IDX::YAW) = ekf_params_.q_cov_yaw * dt * dt; + Q(IDX::VX, IDX::VX) = ekf_params_.q_cov_vx * dt * dt; + Q(IDX::WZ, IDX::WZ) = ekf_params_.q_cov_wz * dt * dt; + Eigen::MatrixXd B = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + Eigen::MatrixXd u = Eigen::MatrixXd::Zero(ekf_params_.dim_x, 1); + + if (!ekf.predict(X_next_t, A, Q)) { + RCLCPP_WARN(logger_, "Cannot predict"); + } + + return true; +} + +bool PedestrianTracker::measureWithPose( + const autoware_auto_perception_msgs::msg::DetectedObject & object) +{ + constexpr int dim_y = 2; // pos x, pos y depending on Pose output + // double measurement_yaw = + // autoware_utils::normalizeRadian(tf2::getYaw(object.state.pose_covariance.pose.orientation)); + // { + // Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); + // ekf_.getX(X_t); + // while (M_PI_2 <= X_t(IDX::YAW) - measurement_yaw) { + // measurement_yaw = measurement_yaw + M_PI; + // } + // while (M_PI_2 <= measurement_yaw - X_t(IDX::YAW)) { + // measurement_yaw = measurement_yaw - M_PI; + // } + // float theta = std::acos( + // std::cos(X_t(IDX::YAW)) * std::cos(measurement_yaw) + + // std::sin(X_t(IDX::YAW)) * std::sin(measurement_yaw)); + // if (autoware_utils::deg2rad(60) < std::fabs(theta)) return false; + // } + + /* Set measurement matrix */ + Eigen::MatrixXd Y(dim_y, 1); + Y << object.kinematics.pose_with_covariance.pose.position.x, + object.kinematics.pose_with_covariance.pose.position.y; + + /* Set measurement matrix */ + Eigen::MatrixXd C = Eigen::MatrixXd::Zero(dim_y, ekf_params_.dim_x); + C(0, IDX::X) = 1.0; // for pos x + C(1, IDX::Y) = 1.0; // for pos y + // C(2, IDX::YAW) = 1.0; // for yaw + + /* Set measurement noise covariance */ + Eigen::MatrixXd R = Eigen::MatrixXd::Zero(dim_y, dim_y); + if ( + !ekf_params_.use_measurement_covariance || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] == 0.0) { + R(0, 0) = ekf_params_.r_cov_x; // x - x + R(0, 1) = 0.0; // x - y + R(1, 1) = ekf_params_.r_cov_y; // y - y + R(1, 0) = R(0, 1); // y - x + // R(2, 2) = ekf_params_.r_cov_yaw; // yaw - yaw + } else { + R(0, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + R(0, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y]; + // R(0, 2) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_YAW]; + R(1, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X]; + R(1, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + // R(1, 2) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_YAW]; + // R(2, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_X]; + // R(2, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_Y]; + // R(2, 2) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW]; + } + if (!ekf_.update(Y, C, R)) { + RCLCPP_WARN(logger_, "Cannot update"); + } + + // normalize yaw and limit vx, wz + { + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); + Eigen::MatrixXd P_t(ekf_params_.dim_x, ekf_params_.dim_x); + ekf_.getX(X_t); + ekf_.getP(P_t); + X_t(IDX::YAW) = autoware_utils::normalizeRadian(X_t(IDX::YAW)); + if (!(-max_vx_ <= X_t(IDX::VX) && X_t(IDX::VX) <= max_vx_)) { + X_t(IDX::VX) = X_t(IDX::VX) < 0 ? -max_vx_ : max_vx_; + } + if (!(-max_wz_ <= X_t(IDX::WZ) && X_t(IDX::WZ) <= max_wz_)) { + X_t(IDX::WZ) = X_t(IDX::WZ) < 0 ? -max_wz_ : max_wz_; + } + ekf_.init(X_t, P_t); + } + + // position z + constexpr float gain = 0.9; + z_ = gain * z_ + (1.0 - gain) * object.kinematics.pose_with_covariance.pose.position.z; + + return true; +} + +bool PedestrianTracker::measureWithShape( + const autoware_auto_perception_msgs::msg::DetectedObject & object) +{ + constexpr float gain = 0.9; + if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + bounding_box_.width = gain * bounding_box_.width + (1.0 - gain) * object.shape.dimensions.x; + bounding_box_.length = gain * bounding_box_.length + (1.0 - gain) * object.shape.dimensions.y; + bounding_box_.height = gain * bounding_box_.height + (1.0 - gain) * object.shape.dimensions.z; + } else if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::CYLINDER) { + cylinder_.width = gain * cylinder_.width + (1.0 - gain) * object.shape.dimensions.x; + cylinder_.height = gain * cylinder_.height + (1.0 - gain) * object.shape.dimensions.z; + } else { + return false; + } + + return true; +} + +bool PedestrianTracker::measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time) +{ + object_ = object; + + if (0.01 /*10msec*/ < std::fabs((time - last_update_time_).seconds())) { + RCLCPP_WARN( + logger_, "There is a large gap between predicted time and measurement time. (%f)", + (time - last_update_time_).seconds()); + } + + measureWithPose(object); + measureWithShape(object); + + return true; +} + +bool PedestrianTracker::getTrackedObject( + const rclcpp::Time & time, autoware_auto_perception_msgs::msg::TrackedObject & object) const +{ + object = utils::toTrackedObject(object_); + object.object_id = getUUID(); + object.classification = getClassification(); + + // predict kinematics + KalmanFilter tmp_ekf_for_no_update = ekf_; + const double dt = (time - last_update_time_).seconds(); + if (0.001 /*1msec*/ < dt) { + predict(dt, tmp_ekf_for_no_update); + } + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state + Eigen::MatrixXd P(ekf_params_.dim_x, ekf_params_.dim_x); // predicted state + tmp_ekf_for_no_update.getX(X_t); + tmp_ekf_for_no_update.getP(P); + + // position + object.kinematics.pose_with_covariance.pose.position.x = X_t(IDX::X); + object.kinematics.pose_with_covariance.pose.position.y = X_t(IDX::Y); + object.kinematics.pose_with_covariance.pose.position.z = z_; + // quaternion + { + double roll, pitch, yaw; + tf2::Quaternion original_quaternion; + tf2::fromMsg(object_.kinematics.pose_with_covariance.pose.orientation, original_quaternion); + tf2::Matrix3x3(original_quaternion).getRPY(roll, pitch, yaw); + tf2::Quaternion filtered_quaternion; + filtered_quaternion.setRPY(roll, pitch, X_t(IDX::YAW)); + object.kinematics.pose_with_covariance.pose.orientation.x = filtered_quaternion.x(); + object.kinematics.pose_with_covariance.pose.orientation.y = filtered_quaternion.y(); + object.kinematics.pose_with_covariance.pose.orientation.z = filtered_quaternion.z(); + object.kinematics.pose_with_covariance.pose.orientation.w = filtered_quaternion.w(); + object.kinematics.orientation_availability = + autoware_auto_perception_msgs::msg::TrackedObjectKinematics::SIGN_UNKNOWN; + } + // position covariance + constexpr double z_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double r_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double p_cov = 0.1 * 0.1; // TOTODO(yukkysaito)DO Currently tentative + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] = P(IDX::X, IDX::X); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y] = P(IDX::X, IDX::Y); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X] = P(IDX::Y, IDX::X); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] = P(IDX::Y, IDX::Y); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Z_Z] = z_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = r_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = p_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] = + P(IDX::YAW, IDX::YAW); + + // twist + object.kinematics.twist_with_covariance.twist.linear.x = X_t(IDX::VX); + object.kinematics.twist_with_covariance.twist.angular.z = X_t(IDX::WZ); + // twist covariance + constexpr double vy_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double vz_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double wx_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double wy_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X] = P(IDX::VX, IDX::VX); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] = vy_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Z_Z] = vz_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_YAW] = + P(IDX::VX, IDX::WZ); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_X] = + P(IDX::WZ, IDX::VX); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = wx_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = wy_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] = + P(IDX::WZ, IDX::WZ); + + // set shape + object.shape.dimensions.x = bounding_box_.width; + object.shape.dimensions.y = bounding_box_.length; + object.shape.dimensions.z = bounding_box_.height; + + return true; +} diff --git a/perception/multi_object_tracker/src/tracker/model/tracker_base.cpp b/perception/multi_object_tracker/src/tracker/model/tracker_base.cpp new file mode 100644 index 0000000000000..82db76d9901e2 --- /dev/null +++ b/perception/multi_object_tracker/src/tracker/model/tracker_base.cpp @@ -0,0 +1,63 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// + +#include "multi_object_tracker/tracker/model/tracker_base.hpp" + +#include "multi_object_tracker/utils/utils.hpp" + +#include +#include + +Tracker::Tracker( + const rclcpp::Time & time, + const std::vector & classification) +: classification_(classification), + no_measurement_count_(0), + total_no_measurement_count_(0), + total_measurement_count_(1), + last_update_with_measurement_time_(time) +{ + // Generate random number + std::mt19937 gen(std::random_device{}()); + std::independent_bits_engine bit_eng(gen); + std::generate(uuid_.uuid.begin(), uuid_.uuid.end(), bit_eng); +} + +bool Tracker::updateWithMeasurement( + const autoware_auto_perception_msgs::msg::DetectedObject & object, + const rclcpp::Time & measurement_time) +{ + no_measurement_count_ = 0; + ++total_measurement_count_; + last_update_with_measurement_time_ = measurement_time; + measure(object, measurement_time); + return true; +} + +bool Tracker::updateWithoutMeasurement() +{ + ++no_measurement_count_; + ++total_no_measurement_count_; + return true; +} + +geometry_msgs::msg::PoseWithCovariance Tracker::getPoseWithCovariance( + const rclcpp::Time & time) const +{ + autoware_auto_perception_msgs::msg::TrackedObject object; + getTrackedObject(time, object); + return object.kinematics.pose_with_covariance; +} diff --git a/perception/multi_object_tracker/src/tracker/model/unknown_tracker.cpp b/perception/multi_object_tracker/src/tracker/model/unknown_tracker.cpp new file mode 100644 index 0000000000000..ebc484b84ef76 --- /dev/null +++ b/perception/multi_object_tracker/src/tracker/model/unknown_tracker.cpp @@ -0,0 +1,298 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include +#include +#include +#include +#include +#define EIGEN_MPL2_ONLY +#include "multi_object_tracker/tracker/model/unknown_tracker.hpp" +#include "multi_object_tracker/utils/utils.hpp" + +#include +#include +#include + +UnknownTracker::UnknownTracker( + const rclcpp::Time & time, const autoware_auto_perception_msgs::msg::DetectedObject & object) +: Tracker(time, object.classification), + logger_(rclcpp::get_logger("UnknownTracker")), + last_update_time_(time), + z_(object.kinematics.pose_with_covariance.pose.position.z) +{ + object_ = object; + + // initialize params + ekf_params_.use_measurement_covariance = false; + float q_stddev_x = 0.0; // [m/s] + float q_stddev_y = 0.0; // [m/s] + float q_stddev_vx = autoware_utils::kmph2mps(0.1); // [m/(s*s)] + float q_stddev_vy = autoware_utils::kmph2mps(0.1); // [m/(s*s)] + float r_stddev_x = 0.4; // [m] + float r_stddev_y = 0.4; // [m] + float p0_stddev_x = 1.0; // [m/s] + float p0_stddev_y = 1.0; // [m/s] + float p0_stddev_vx = autoware_utils::kmph2mps(0.1); // [m/(s*s)] + float p0_stddev_vy = autoware_utils::kmph2mps(0.1); // [m/(s*s)] + ekf_params_.q_cov_x = std::pow(q_stddev_x, 2.0); + ekf_params_.q_cov_y = std::pow(q_stddev_y, 2.0); + ekf_params_.q_cov_vx = std::pow(q_stddev_vx, 2.0); + ekf_params_.q_cov_vy = std::pow(q_stddev_vy, 2.0); + ekf_params_.r_cov_x = std::pow(r_stddev_x, 2.0); + ekf_params_.r_cov_y = std::pow(r_stddev_y, 2.0); + ekf_params_.p0_cov_x = std::pow(p0_stddev_x, 2.0); + ekf_params_.p0_cov_y = std::pow(p0_stddev_y, 2.0); + ekf_params_.p0_cov_vx = std::pow(p0_stddev_vx, 2.0); + ekf_params_.p0_cov_vy = std::pow(p0_stddev_vy, 2.0); + max_vx_ = autoware_utils::kmph2mps(5); // [m/s] + max_vy_ = autoware_utils::kmph2mps(5); // [m/s] + + // initialize X matrix + Eigen::MatrixXd X(ekf_params_.dim_x, 1); + X(IDX::X) = object.kinematics.pose_with_covariance.pose.position.x; + X(IDX::Y) = object.kinematics.pose_with_covariance.pose.position.y; + if (object.kinematics.has_twist) { + X(IDX::VX) = object.kinematics.twist_with_covariance.twist.linear.x; + X(IDX::VY) = object.kinematics.twist_with_covariance.twist.linear.y; + } else { + X(IDX::VX) = 0.0; + X(IDX::VY) = 0.0; + } + + // initialize P matrix + Eigen::MatrixXd P = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + if ( + !ekf_params_.use_measurement_covariance || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] == 0.0) { + // Rotate the covariance matrix according to the vehicle yaw + // because p0_cov_x and y are in the vehicle coordinate system. + P(IDX::X, IDX::X) = ekf_params_.p0_cov_x; + P(IDX::X, IDX::Y) = 0.0; + P(IDX::Y, IDX::Y) = ekf_params_.p0_cov_y; + P(IDX::Y, IDX::X) = P(IDX::X, IDX::Y); + P(IDX::VX, IDX::VX) = ekf_params_.p0_cov_vx; + P(IDX::VY, IDX::VY) = ekf_params_.p0_cov_vy; + } else { + P(IDX::X, IDX::X) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + P(IDX::X, IDX::Y) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y]; + P(IDX::Y, IDX::Y) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + P(IDX::Y, IDX::X) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X]; + if (object.kinematics.has_twist_covariance) { + P(IDX::VX, IDX::VX) = + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + P(IDX::VY, IDX::VY) = + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + } else { + P(IDX::VX, IDX::VX) = ekf_params_.p0_cov_vx; + P(IDX::VY, IDX::VY) = ekf_params_.p0_cov_vy; + } + } + + ekf_.init(X, P); +} + +bool UnknownTracker::predict(const rclcpp::Time & time) +{ + const double dt = (time - last_update_time_).seconds(); + bool ret = predict(dt, ekf_); + if (ret) { + last_update_time_ = time; + } + return ret; +} + +bool UnknownTracker::predict(const double dt, KalmanFilter & ekf) const +{ + /* == Nonlinear model == + * + * x_{k+1} = x_k + vx_k * dt + * y_{k+1} = y_k + vx_k * dt + * vx_{k+1} = vx_k + * vy_{k+1} = vy_k + * + */ + + /* == Linearized model == + * + * A = [ 1, 0, dt, 0] + * [ 0, 1, 0, dt] + * [ 0, 0, 1, 0] + * [ 0, 0, 0, 1] + */ + + // X t + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state + ekf.getX(X_t); + + // X t+1 + Eigen::MatrixXd X_next_t(ekf_params_.dim_x, 1); // predicted state + X_next_t(IDX::X) = X_t(IDX::X) + X_t(IDX::VX) * dt; + X_next_t(IDX::Y) = X_t(IDX::Y) + X_t(IDX::VY) * dt; + X_next_t(IDX::VX) = X_t(IDX::VX); + X_next_t(IDX::VY) = X_t(IDX::VY); + + // A + Eigen::MatrixXd A = Eigen::MatrixXd::Identity(ekf_params_.dim_x, ekf_params_.dim_x); + A(IDX::X, IDX::VX) = dt; + A(IDX::Y, IDX::VY) = dt; + + // Q + Eigen::MatrixXd Q = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + // Rotate the covariance matrix according to the vehicle yaw + // because q_cov_x and y are in the vehicle coordinate system. + Q(IDX::X, IDX::X) = ekf_params_.q_cov_x * dt * dt; + Q(IDX::X, IDX::Y) = 0.0; + Q(IDX::Y, IDX::Y) = ekf_params_.q_cov_y * dt * dt; + Q(IDX::Y, IDX::X) = Q(IDX::X, IDX::Y); + Q(IDX::VX, IDX::VX) = ekf_params_.q_cov_vx * dt * dt; + Q(IDX::VY, IDX::VY) = ekf_params_.q_cov_vy * dt * dt; + Eigen::MatrixXd B = Eigen::MatrixXd::Zero(ekf_params_.dim_x, ekf_params_.dim_x); + Eigen::MatrixXd u = Eigen::MatrixXd::Zero(ekf_params_.dim_x, 1); + + if (!ekf.predict(X_next_t, A, Q)) { + RCLCPP_WARN(logger_, "Pedestrian : Cannot predict"); + } + + return true; +} + +bool UnknownTracker::measureWithPose( + const autoware_auto_perception_msgs::msg::DetectedObject & object) +{ + constexpr int dim_y = 2; // pos x, pos y depending on Pose output + + /* Set measurement matrix */ + Eigen::MatrixXd Y(dim_y, 1); + Y << object.kinematics.pose_with_covariance.pose.position.x, + object.kinematics.pose_with_covariance.pose.position.y; + + /* Set measurement matrix */ + Eigen::MatrixXd C = Eigen::MatrixXd::Zero(dim_y, ekf_params_.dim_x); + C(0, IDX::X) = 1.0; // for pos x + C(1, IDX::Y) = 1.0; // for pos y + + /* Set measurement noise covariance */ + Eigen::MatrixXd R = Eigen::MatrixXd::Zero(dim_y, dim_y); + if ( + !ekf_params_.use_measurement_covariance || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] == 0.0 || + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] == 0.0) { + R(0, 0) = ekf_params_.r_cov_x; // x - x + R(0, 1) = 0.0; // x - y + R(1, 1) = ekf_params_.r_cov_y; // y - y + R(1, 0) = R(0, 1); // y - x + } else { + R(0, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X]; + R(0, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y]; + R(1, 0) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X]; + R(1, 1) = object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y]; + } + if (!ekf_.update(Y, C, R)) { + RCLCPP_WARN(logger_, "Pedestrian : Cannot update"); + } + + // limit vx, vy + { + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); + Eigen::MatrixXd P_t(ekf_params_.dim_x, ekf_params_.dim_x); + ekf_.getX(X_t); + ekf_.getP(P_t); + if (!(-max_vx_ <= X_t(IDX::VX) && X_t(IDX::VX) <= max_vx_)) { + X_t(IDX::VX) = X_t(IDX::VX) < 0 ? -max_vx_ : max_vx_; + } + if (!(-max_vy_ <= X_t(IDX::VY) && X_t(IDX::VY) <= max_vy_)) { + X_t(IDX::VY) = X_t(IDX::VY) < 0 ? -max_vy_ : max_vy_; + } + ekf_.init(X_t, P_t); + } + + // position z + constexpr float gain = 0.9; + z_ = gain * z_ + (1.0 - gain) * object.kinematics.pose_with_covariance.pose.position.z; + + return true; +} + +bool UnknownTracker::measure( + const autoware_auto_perception_msgs::msg::DetectedObject & object, const rclcpp::Time & time) +{ + object_ = object; + + if (0.01 /*10msec*/ < std::fabs((time - last_update_time_).seconds())) { + RCLCPP_WARN( + logger_, + "Pedestrian : There is a large gap between predicted time and measurement time. (%f)", + (time - last_update_time_).seconds()); + } + + measureWithPose(object); + + return true; +} + +bool UnknownTracker::getTrackedObject( + const rclcpp::Time & time, autoware_auto_perception_msgs::msg::TrackedObject & object) const +{ + object = utils::toTrackedObject(object_); + object.object_id = getUUID(); + object.classification = getClassification(); + + // predict kinematics + KalmanFilter tmp_ekf_for_no_update = ekf_; + const double dt = (time - last_update_time_).seconds(); + if (0.001 /*1msec*/ < dt) { + predict(dt, tmp_ekf_for_no_update); + } + Eigen::MatrixXd X_t(ekf_params_.dim_x, 1); // predicted state + Eigen::MatrixXd P(ekf_params_.dim_x, ekf_params_.dim_x); // predicted state + tmp_ekf_for_no_update.getX(X_t); + tmp_ekf_for_no_update.getP(P); + + // position + object.kinematics.pose_with_covariance.pose.position.x = X_t(IDX::X); + object.kinematics.pose_with_covariance.pose.position.y = X_t(IDX::Y); + object.kinematics.pose_with_covariance.pose.position.z = z_; + // position covariance + constexpr double z_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double r_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double p_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double yaw_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_X] = P(IDX::X, IDX::X); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::X_Y] = P(IDX::X, IDX::Y); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_X] = P(IDX::Y, IDX::X); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] = P(IDX::Y, IDX::Y); + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::Z_Z] = z_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = r_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = p_cov; + object.kinematics.pose_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] = yaw_cov; + + // twist + object.kinematics.twist_with_covariance.twist.linear.x = X_t(IDX::VX); + object.kinematics.twist_with_covariance.twist.linear.y = X_t(IDX::VY); + // twist covariance + constexpr double vz_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double wx_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double wy_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + constexpr double wz_cov = 0.1 * 0.1; // TODO(yukkysaito) Currently tentative + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::X_X] = P(IDX::VX, IDX::VX); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Y_Y] = P(IDX::VY, IDX::VY); + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::Z_Z] = vz_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::ROLL_ROLL] = wx_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::PITCH_PITCH] = wy_cov; + object.kinematics.twist_with_covariance.covariance[utils::MSG_COV_IDX::YAW_YAW] = wz_cov; + + return true; +} diff --git a/perception/multi_object_tracker/src/utils/utils.cpp b/perception/multi_object_tracker/src/utils/utils.cpp new file mode 100644 index 0000000000000..723262f551136 --- /dev/null +++ b/perception/multi_object_tracker/src/utils/utils.cpp @@ -0,0 +1,218 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// Author: v1.0 Yukihiro Saito +// + +#include "multi_object_tracker/utils/utils.hpp" + +#include + +#include +#include + +#include +#include + +namespace utils +{ +void toPolygon2d( + const autoware_auto_perception_msgs::msg::TrackedObject & object, + autoware_utils::Polygon2d & output); +bool isClockWise(const autoware_utils::Polygon2d & polygon); +autoware_utils::Polygon2d inverseClockWise(const autoware_utils::Polygon2d & polygon); + +double getArea(const autoware_auto_perception_msgs::msg::Shape & shape) +{ + double area = 0.0; + if (shape.type == autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + area = getRectangleArea(shape.dimensions); + } else if (shape.type == autoware_auto_perception_msgs::msg::Shape::CYLINDER) { + area = getCircleArea(shape.dimensions); + } else if (shape.type == autoware_auto_perception_msgs::msg::Shape::POLYGON) { + area = getPolygonArea(shape.footprint); + } + return area; +} + +double getPolygonArea(const geometry_msgs::msg::Polygon & footprint) +{ + double area = 0.0; + + for (int i = 0; i < static_cast(footprint.points.size()); ++i) { + int j = (i + 1) % static_cast(footprint.points.size()); + area += 0.5 * (footprint.points.at(i).x * footprint.points.at(j).y - + footprint.points.at(j).x * footprint.points.at(i).y); + } + + return area; +} + +double getRectangleArea(const geometry_msgs::msg::Vector3 & dimensions) +{ + return static_cast(dimensions.x * dimensions.y); +} + +double getCircleArea(const geometry_msgs::msg::Vector3 & dimensions) +{ + return static_cast((dimensions.x / 2.0) * (dimensions.x / 2.0) * M_PI); +} + +double get2dIoU( + const autoware_auto_perception_msgs::msg::TrackedObject & object1, + const autoware_auto_perception_msgs::msg::TrackedObject & object2) +{ + autoware_utils::Polygon2d polygon1, polygon2; + toPolygon2d(object1, polygon1); + toPolygon2d(object2, polygon2); + + std::vector union_polygons; + std::vector intersection_polygons; + boost::geometry::union_(polygon1, polygon2, union_polygons); + boost::geometry::intersection(polygon1, polygon2, intersection_polygons); + + double union_area = 0.0; + double intersection_area = 0.0; + for (const auto & union_polygon : union_polygons) { + union_area += boost::geometry::area(union_polygon); + } + for (const auto & intersection_polygon : intersection_polygons) { + intersection_area += boost::geometry::area(intersection_polygon); + } + double iou; + if (union_area < 0.01) { + iou = 0.0f; + } + iou = std::min(1.0, intersection_area / union_area); + return iou; +} + +autoware_utils::Polygon2d inverseClockWise(const autoware_utils::Polygon2d & polygon) +{ + autoware_utils::Polygon2d inverted_polygon; + for (int i = polygon.outer().size() - 1; 0 <= i; --i) { + inverted_polygon.outer().push_back(polygon.outer().at(i)); + } + return inverted_polygon; +} + +bool isClockWise(const autoware_utils::Polygon2d & polygon) +{ + const int n = polygon.outer().size(); + const double x_offset = polygon.outer().at(0).x(); + const double y_offset = polygon.outer().at(0).y(); + double sum = 0.0; + for (std::size_t i = 0; i < polygon.outer().size(); ++i) { + sum += + (polygon.outer().at(i).x() - x_offset) * (polygon.outer().at((i + 1) % n).y() - y_offset) - + (polygon.outer().at(i).y() - y_offset) * (polygon.outer().at((i + 1) % n).x() - x_offset); + } + + return sum < 0.0; +} + +void toPolygon2d( + const autoware_auto_perception_msgs::msg::TrackedObject & object, + autoware_utils::Polygon2d & output) +{ + if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX) { + const auto & pose = object.kinematics.pose_with_covariance.pose; + double yaw = autoware_utils::normalizeRadian(tf2::getYaw(pose.orientation)); + Eigen::Matrix2d rotation; + rotation << std::cos(yaw), -std::sin(yaw), std::sin(yaw), std::cos(yaw); + Eigen::Vector2d offset0, offset1, offset2, offset3; + offset0 = rotation * + Eigen::Vector2d(object.shape.dimensions.x * 0.5f, object.shape.dimensions.y * 0.5f); + offset1 = rotation * + Eigen::Vector2d(object.shape.dimensions.x * 0.5f, -object.shape.dimensions.y * 0.5f); + offset2 = rotation * + Eigen::Vector2d(-object.shape.dimensions.x * 0.5f, -object.shape.dimensions.y * 0.5f); + offset3 = rotation * + Eigen::Vector2d(-object.shape.dimensions.x * 0.5f, object.shape.dimensions.y * 0.5f); + output.outer().push_back(boost::geometry::make( + pose.position.x + offset0.x(), pose.position.y + offset0.y())); + output.outer().push_back(boost::geometry::make( + pose.position.x + offset1.x(), pose.position.y + offset1.y())); + output.outer().push_back(boost::geometry::make( + pose.position.x + offset2.x(), pose.position.y + offset2.y())); + output.outer().push_back(boost::geometry::make( + pose.position.x + offset3.x(), pose.position.y + offset3.y())); + output.outer().push_back(output.outer().front()); + } else if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::CYLINDER) { + const auto & center = object.kinematics.pose_with_covariance.pose.position; + const auto & radius = object.shape.dimensions.x * 0.5; + constexpr int n = 6; + for (int i = 0; i < n; ++i) { + Eigen::Vector2d point; + point.x() = std::cos( + (static_cast(i) / static_cast(n)) * 2.0 * M_PI + + M_PI / static_cast(n)) * + radius + + center.x; + point.y() = std::sin( + (static_cast(i) / static_cast(n)) * 2.0 * M_PI + + M_PI / static_cast(n)) * + radius + + center.y; + output.outer().push_back( + boost::geometry::make(point.x(), point.y())); + } + output.outer().push_back(output.outer().front()); + } else if (object.shape.type == autoware_auto_perception_msgs::msg::Shape::POLYGON) { + const auto & pose = object.kinematics.pose_with_covariance.pose; + // don't use yaw + // double yaw = autoware_utils::normalizeRadian(tf2::getYaw(pose.orientation)); + // Eigen::Matrix2d rotation; + // rotation << std::cos(yaw), -std::sin(yaw), std::sin(yaw), std::cos(yaw); + for (const auto & point : object.shape.footprint.points) { + output.outer().push_back(boost::geometry::make( + pose.position.x + point.x, pose.position.y + point.y)); + } + output.outer().push_back(output.outer().front()); + } + output = isClockWise(output) ? output : inverseClockWise(output); +} + +std::uint8_t getHighestProbLabel( + const std::vector & classification) +{ + std::uint8_t label = autoware_auto_perception_msgs::msg::ObjectClassification::UNKNOWN; + float highest_prob = 0.0; + for (const auto & _class : classification) { + if (highest_prob < _class.probability) { + highest_prob = _class.probability; + label = _class.label; + } + } + return label; +} + +autoware_auto_perception_msgs::msg::TrackedObject toTrackedObject( + const autoware_auto_perception_msgs::msg::DetectedObject & detected_object) +{ + autoware_auto_perception_msgs::msg::TrackedObject tracked_object; + tracked_object.existence_probability = detected_object.existence_probability; + + tracked_object.classification = detected_object.classification; + + tracked_object.kinematics.pose_with_covariance = detected_object.kinematics.pose_with_covariance; + tracked_object.kinematics.twist_with_covariance = + detected_object.kinematics.twist_with_covariance; + + tracked_object.shape = detected_object.shape; + return tracked_object; +} + +} // namespace utils