From 7da1565dd70ab011538bbf0a89e163a853b73e3e Mon Sep 17 00:00:00 2001 From: Takeshi Miura <57553950+1222-takeshi@users.noreply.github.com> Date: Fri, 3 Dec 2021 17:09:55 +0900 Subject: [PATCH] feat: add traffic_light_recognition packages (#94) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * release v0.4.0 * check if gdown command exists (#707) * Add nodelets of tlr nodes (#715) * Add classifier nodelet Signed-off-by: Daisuke Nishimatsu * Replace boost::shared_ptr into std::shared_ptr Signed-off-by: Daisuke Nishimatsu * Add lock guard Signed-off-by: Daisuke Nishimatsu * Add detetcor nodelet Signed-off-by: Daisuke Nishimatsu * Integrate main into node Signed-off-by: Daisuke Nishimatsu * Add SubscriberStatusCallback Signed-off-by: Daisuke Nishimatsu * add image_transport_decompresser nodelet Signed-off-by: Yukihiro Saito * Add visualizer nodelet Signed-off-by: Daisuke Nishimatsu * fixed bug Signed-off-by: Yukihiro Saito * Fix plugin name Signed-off-by: Daisuke Nishimatsu * Launch nodelet Signed-off-by: Daisuke Nishimatsu * Fix classifier constructor Signed-off-by: Daisuke Nishimatsu * add decompresser node Signed-off-by: Yukihiro Saito * fix typo Signed-off-by: Yukihiro Saito * fixed bug Signed-off-by: Yukihiro Saito * fixed bug Signed-off-by: Yukihiro Saito * cosmetic change Signed-off-by: Yukihiro Saito * add param Signed-off-by: Yukihiro Saito * fix bug Signed-off-by: Yukihiro Saito * Fix build warning Signed-off-by: Daisuke Nishimatsu * change rgb Signed-off-by: Yukihiro Saito * change rgb Signed-off-by: Yukihiro Saito Co-authored-by: Yukihiro Saito * remove libutils dependency when unable gpu (#761) * Fix typo cliped -> clipped (#776) Signed-off-by: Kosuke Takeuchi * install launch when disable gpu (#829) * add launch and xml install when disable gpu * remove unnecessary install * change raw pointer to vector and shared_ptr (#817) * change raw pointer to vector and shared_ptr * fix bug * Fix/cublas dependency (#849) * fix cublas depencency * fix cublas depencency * remove ROS1 packages temporarily Signed-off-by: mitsudome-r * Revert "remove ROS1 packages temporarily" This reverts commit 9b343cf4c11815735a176ef31f9e6e9948f20c74. Signed-off-by: mitsudome-r * add COLCON_IGNORE to ros1 packages Signed-off-by: mitsudome-r * Rename launch files to launch.xml (#28) * Port traffic light classifier (#70) * port package.xml and CMakeLists.txt Signed-off-by: mitsudome-r * port ROS messages Signed-off-by: mitsudome-r * port to ros2 Signed-off-by: mitsudome-r * make wget buildtool_depend Signed-off-by: mitsudome-r * remove main function Signed-off-by: mitsudome-r * add connectCb as a timer callback Signed-off-by: mitsudome-r * fix indentation Signed-off-by: mitsudome-r * remove catkin includes and libraries from CMakeLists.txt Signed-off-by: mitsudome-r * [traffic_light_ssd_fine_detector] port to ROS2 (#113) Signed-off-by: mitsudome-r * Ros2 port traffic light map (#99) * remove colcon ignore * ported CMakelists * port package * remove colcon ignore * ported CMakelists * port package * ported traffic_light_map_detector to ROS2 - compiling * port launch and config files, tidied up * change rclcpp duration arguments * launch file corrections * lookupTransform uses exact timestamp * Convert calls of Duration to Duration::from_seconds where appropriate (#131) * Rename h files to hpp (#142) * Change includes * Rename files * Adjustments to make things compile * Other packages * Adjust copyright notice on 532 out of 699 source files (#143) * Use quotes for includes where appropriate (#144) * Use quotes for includes where appropriate * Fix lint tests * Make tests pass hopefully * fixing trasient_local in ROS2 packages (#160) * adding linters in traffic_light_map_based_detector (#179) * traffic_light_classifier: Fix engine save dir (#250) Signed-off-by: wep21 * traffic light map based detector: Fix launch (#253) Signed-off-by: wep21 * Ros2 v0.8.0 traffic light map based detector (#262) * change alpha (#903) Signed-off-by: Yukihiro Saito * fix typos in perception (#862) Co-authored-by: Yukihiro Saito Co-authored-by: Kazuki Miyahara * Ros2 v0.8.0 traffic light classifier (#261) * fix typos in perception (#862) * update README.md in perception (#1007) * update traffic light recognition model (#1086) * update traffic light recognition model * download model when hash has changed * fix CMakeLists * udpate tl model to scale ai dataset one (#1118) Co-authored-by: Kazuki Miyahara Co-authored-by: Satoshi Tanaka Co-authored-by: Taichi Higashide * Ros2 v0.8.0 traffic light ssd fine detector (#260) * fix typos in perception (#862) * update README.md in perception (#1007) * update traffic light recognition model (#1086) * update traffic light recognition model * download model when hash has changed * fix CMakeLists * udpate tl model to scale ai dataset one (#1118) Co-authored-by: Kazuki Miyahara Co-authored-by: Satoshi Tanaka Co-authored-by: Taichi Higashide * Rename ROS-related .yaml to .param.yaml (#352) * Rename ROS-related .yaml to .param.yaml Signed-off-by: Kenji Miyake * Remove prefix 'default_' of yaml files Signed-off-by: Kenji Miyake * Rename vehicle_info.yaml to vehicle_info.param.yaml Signed-off-by: Kenji Miyake * Rename diagnostic_aggregator's param files Signed-off-by: Kenji Miyake * Fix overlooked parameters Signed-off-by: Kenji Miyake * Ros2 v0.8.0 beta rm std msgs ssd traffic light (#395) (#400) * rm std_msgs float32 * fix typo Co-authored-by: taikitanaka3 <65527974+taikitanaka3@users.noreply.github.com> * add use_sim-time option (#454) * Fix rolling build errors (#1225) * Add missing include files Signed-off-by: Kenji Miyake * Replace rclcpp::Duration Signed-off-by: Kenji Miyake * Use reference for exceptions Signed-off-by: Kenji Miyake * Use from_seconds Signed-off-by: Kenji Miyake * Sync public repo (#1228) * [simple_planning_simulator] add readme (#424) * add readme of simple_planning_simulator Signed-off-by: Takamasa Horibe * Update simulator/simple_planning_simulator/README.md * set transit_margin_time to intersect. planner (#460) * Fix pose2twist (#462) Signed-off-by: Takagi, Isamu * Ros2 vehicle info param server (#447) * add vehicle_info_param_server * update vehicle info * apply format * fix bug * skip unnecessary search * delete vehicle param file * fix bug * Ros2 fix topic name part2 (#425) * Fix topic name of traffic_light_classifier Signed-off-by: Takagi, Isamu * Fix topic name of traffic_light_visualization Signed-off-by: Takagi, Isamu * Fix topic name of traffic_light_ssd_fine_detector Signed-off-by: Takagi, Isamu * Fix topic name of traffic_light_map_based_detector Signed-off-by: Takagi, Isamu * Fix lint traffic_light_recognition Signed-off-by: Takagi, Isamu * Fix lint traffic_light_ssd_fine_detector Signed-off-by: Takagi, Isamu * Fix lint traffic_light_classifier Signed-off-by: Takagi, Isamu * Fix lint traffic_light_classifier Signed-off-by: Takagi, Isamu * Fix lint traffic_light_ssd_fine_detector Signed-off-by: Takagi, Isamu * Fix issues in hdd_reader (#466) * Fix some issues detected by Coverity Scan and Clang-Tidy * Update launch command * Add more `close(new_sock)` * Simplify the definitions of struct * fix: re-construct laneletMapLayer for reindex RTree (#463) * Rviz overlay render fix (#461) * Moved painiting in SteeringAngle plugin to update() Signed-off-by: Adam Dabrowski * super class now back to MFD Signed-off-by: Adam Dabrowski * uncrustified Signed-off-by: Adam Dabrowski * acquire data in mutex Signed-off-by: Adam Dabrowski * back to RTD as superclass Signed-off-by: Adam Dabrowski * Rviz overlay render in update (#465) * Moved painiting in SteeringAngle plugin to update() Signed-off-by: Adam Dabrowski * super class now back to MFD Signed-off-by: Adam Dabrowski * uncrustified Signed-off-by: Adam Dabrowski * acquire data in mutex Signed-off-by: Adam Dabrowski * removed unnecessary includes and some dead code Signed-off-by: Adam Dabrowski * Adepted remaining vehicle plugin classes to render-in-update concept. Returned to MFD superclass Signed-off-by: Adam Dabrowski * restored RTD superclass Signed-off-by: Adam Dabrowski Co-authored-by: Takamasa Horibe Co-authored-by: tkimura4 Co-authored-by: Takagi, Isamu <43976882+isamu-takagi@users.noreply.github.com> Co-authored-by: Kazuki Miyahara Co-authored-by: Makoto Tokunaga Co-authored-by: Adam DÄ…browski * Unify Apache-2.0 license name (#1242) * Remove use_sim_time for set_parameter (#1260) Signed-off-by: wep21 * handle wrong route (#1312) Signed-off-by: wep21 Co-authored-by: Yukihiro Saito * Dealing with wrong image size (#1320) * Dealing with wrong image size * Fix typo Signed-off-by: wep21 Co-authored-by: Yukihiro Saito * Perception components (#1368) * [bev_optical_flow]: component node Signed-off-by: wep21 * [object_merger]: component node Signed-off-by: wep21 * [object_range_splitter]: component node Signed-off-by: wep21 * [shape_estimation]: component node Signed-off-by: wep21 * [map_based_prediction]: component node Signed-off-by: wep21 * [naive_path_prediction]: component node Signed-off-by: wep21 * [roi_image_saver]: component node Signed-off-by: wep21 * [lidar_apollo_instance_segmentation]: component node Signed-off-by: wep21 * [object_flow_fusion]: component node Signed-off-by: wep21 * [traffic_light_map_based_detector]: component node Signed-off-by: wep21 * [dynamic_object_visualization]: component node Signed-off-by: wep21 * Fix typo Signed-off-by: wep21 * Use sensor data qos for traffic light recognition (#1440) Signed-off-by: wep21 * Fix/ssd fine detector (#1421) (#1468) * modify fitInFrame * cosmetic change * bug fix * bug fix * cosmetic change Co-authored-by: Yukihiro Saito * Feature/tl map based detector ros2 (#1475) * refactor and rectified distortion (#1397) * refactor and rectified distortion * fix typo * cosmetic change * add readme * cosmetic change * cosmetic change * update readme * Update perception/traffic_light_recognition/traffic_light_map_based_detector/src/node.cpp Co-authored-by: Akihito Ohsato * use image geometry (#1439) Co-authored-by: Akihito Ohsato * Apply lint Signed-off-by: wep21 Co-authored-by: Yukihiro Saito Co-authored-by: Akihito Ohsato * Add pre-commit (#1560) * add pre-commit * add pre-commit-config * add additional settings for private repository * use default pre-commit-config * update pre-commit setting * Ignore whitespace for line breaks in markdown * Update .github/workflows/pre-commit.yml Co-authored-by: Kazuki Miyahara * exclude svg * remove pretty-format-json * add double-quote-string-fixer * consider COLCON_IGNORE file when seaching modified package * format file * pre-commit fixes * Update pre-commit.yml * Update .pre-commit-config.yaml Co-authored-by: Kazuki Miyahara Co-authored-by: pre-commit Co-authored-by: Kenji Miyake <31987104+kenji-miyake@users.noreply.github.com> * Fix build error with TensorRT v8 (#1612) * Fix build error with TensorRT v8 Signed-off-by: wep21 * Fix typo Signed-off-by: wep21 * Add markdownlint and prettier (#1661) * Add markdownlint and prettier Signed-off-by: Kenji Miyake * Ignore .param.yaml Signed-off-by: Kenji Miyake * Apply format Signed-off-by: Kenji Miyake * suppress warnings for declare parameters (#1724) * fix for lanelet2_extension * fix for traffic light ssd fine detector * fix for topic_state_monitor * fix for dummy diag publisher * fix for remote cmd converter * fix for vehicle_info_util * fix for multi object tracker * fix for freespace planner * fix for autoware_error_monitor * add Werror for multi object tracker * fix for multi object tracker * add Werror for liraffic light ssd fine detector * add Werror for topic state monitor * add Werror * add Werror * add Werror * add Werror * fix style * suppress warnings for traffic light classifier (#1762) * add Werror * fix type * fix unused * Fix -Wunused-parameter (#1836) * Fix -Wunused-parameter Signed-off-by: Kenji Miyake * Fix mistake Signed-off-by: Kenji Miyake * fix spell * Fix lint issues Signed-off-by: Kenji Miyake * Ignore flake8 warnings Signed-off-by: Kenji Miyake Co-authored-by: Hiroki OTA * Fix compiler warnings (#1837) * Fix -Wunused-private-field Signed-off-by: Kenji Miyake * Fix -Wunused-variable Signed-off-by: Kenji Miyake * Fix -Wformat-security Signed-off-by: Kenji Miyake * Fix -Winvalid-constexpr Signed-off-by: Kenji Miyake * Fix -Wdelete-non-abstract-non-virtual-dtor Signed-off-by: Kenji Miyake * Fix -Wdelete-abstract-non-virtual-dtor Signed-off-by: Kenji Miyake * Fix -Winconsistent-missing-override Signed-off-by: Kenji Miyake * Fix -Wrange-loop-construct Signed-off-by: Kenji Miyake * Fix "invalid application of 'sizeof' to an incomplete type" Signed-off-by: Kenji Miyake * Ignore -Wgnu-anonymous-struct and -Wnested-anon-types Signed-off-by: Kenji Miyake * Fix lint Signed-off-by: Kenji Miyake * Ignore -Wno-deprecated-declarations in CUDA-related packages Signed-off-by: Kenji Miyake * Fix mistake Signed-off-by: Kenji Miyake * Fix -Wunused-parameter Signed-off-by: Kenji Miyake * Fix clang warnings (#1859) * Fix -Wreturn-std-move Signed-off-by: Kenji Miyake * Fix -Wunused-private-field Signed-off-by: Kenji Miyake * Ignore -Wnonportable-include-path for mussp Signed-off-by: Kenji Miyake * Fix -Wunused-const-variable Signed-off-by: Kenji Miyake * Fix "can not be used when making a shared object" Signed-off-by: Kenji Miyake * Invoke code formatter at pre-commit (#1935) * Run ament_uncrustify at pre-commit * Reformat existing files * Fix copyright and cpplint errors Signed-off-by: Kenji Miyake Co-authored-by: Kenji Miyake * add sort-package-xml hook in pre-commit (#1881) * add sort xml hook in pre-commit * change retval to exit_status * rename * add prettier plugin-xml * use early return * add license note * add tier4 license * restore prettier * change license order * move local hooks to public repo * move prettier-xml to pre-commit-hooks-ros * update version for bug-fix * apply pre-commit * Fix readme traffic light map based detector (#2282) * fix type of ~input/route in readme * fix some grammar mistakes * Change formatter to clang-format and black (#2332) * Revert "Temporarily comment out pre-commit hooks" This reverts commit 748e9cdb145ce12f8b520bcbd97f5ff899fc28a3. * Replace ament_lint_common with autoware_lint_common Signed-off-by: Kenji Miyake * Remove ament_cmake_uncrustify and ament_clang_format Signed-off-by: Kenji Miyake * Apply Black Signed-off-by: Kenji Miyake * Apply clang-format Signed-off-by: Kenji Miyake * Fix build errors Signed-off-by: Kenji Miyake * Fix for cpplint * Fix include double quotes to angle brackets Signed-off-by: Kenji Miyake * Apply clang-format Signed-off-by: Kenji Miyake * Fix build errors Signed-off-by: Kenji Miyake * Add COLCON_IGNORE (#500) Signed-off-by: Kenji Miyake * [traffic_light_ssd_fine_detector] support autoware auto msgs (#514) * remove COLCON_IGNORE * support autoware auto msgs * fix sort pacakagge * [traffic_light_map_based_detector] support autoware auto msgs (#511) * [traffic_light_classifier] support autoware auto msgs (#504) * update README of traffic_light_classifier (#649) * update README of traffic_light_classifier * Update label explanation (#652) * Update README.md * fix format * fix markdown lint * update readme Co-authored-by: Taichi Higashide * add readme for traffic light ssd fine detector (#637) * declare variable before subscriber registration (#705) * update traffic light classifier readme (#726) * Update traffic light topic name (#729) * Update traffic light topic name Signed-off-by: wep21 * Update traffic light topic name in perception Signed-off-by: wep21 Co-authored-by: mitsudome-r Co-authored-by: Daichi Murakami Co-authored-by: Daisuke Nishimatsu <42202095+wep21@users.noreply.github.com> Co-authored-by: Yukihiro Saito Co-authored-by: Taichi Higashide Co-authored-by: Kosuke Takeuchi Co-authored-by: Nikolai Morin Co-authored-by: Ryohsuke Mitsudome <43976834+mitsudome-r@users.noreply.github.com> Co-authored-by: simon-t4 <49227606+simon-t4@users.noreply.github.com> Co-authored-by: nik-tier4 <71747268+nik-tier4@users.noreply.github.com> Co-authored-by: Kazuki Miyahara Co-authored-by: Satoshi Tanaka Co-authored-by: Kenji Miyake <31987104+kenji-miyake@users.noreply.github.com> Co-authored-by: taikitanaka3 <65527974+taikitanaka3@users.noreply.github.com> Co-authored-by: tkimura4 Co-authored-by: Takamasa Horibe Co-authored-by: Takagi, Isamu <43976882+isamu-takagi@users.noreply.github.com> Co-authored-by: Makoto Tokunaga Co-authored-by: Adam DÄ…browski Co-authored-by: Akihito Ohsato Co-authored-by: Keisuke Shima <19993104+KeisukeShima@users.noreply.github.com> Co-authored-by: pre-commit Co-authored-by: Hiroki OTA Co-authored-by: Takeshi Ishita Co-authored-by: Kenji Miyake Co-authored-by: kminoda <44218668+kminoda@users.noreply.github.com> Co-authored-by: Yusuke Muramatsu --- .../traffic_light_classifier/.gitignore | 1 + .../traffic_light_classifier/CMakeLists.txt | 184 +++++++ perception/traffic_light_classifier/README.md | 131 +++++ .../cfg/HSVFilter.cfg | 29 + .../image/TrafficLightDataStructure.jpg | Bin 0 -> 48292 bytes .../classifier_interface.hpp | 36 ++ .../cnn_classifier.hpp | 113 ++++ .../color_classifier.hpp | 94 ++++ .../traffic_light_classifier/nodelet.hpp | 89 ++++ .../traffic_light_classifier.launch.xml | 26 + .../traffic_light_classifier/package.xml | 25 + .../src/cnn_classifier.cpp | 265 ++++++++++ .../src/color_classifier.cpp | 248 +++++++++ .../traffic_light_classifier/src/nodelet.cpp | 122 +++++ .../utils/trt_common.cpp | 149 ++++++ .../utils/trt_common.hpp | 146 ++++++ .../CMakeLists.txt | 47 ++ .../README.md | 37 ++ ...raffic_light_map_based_detector.param.yaml | 7 + ...raffic_light_map_based_detector_result.svg | 3 + .../traffic_light_map_based_detector/node.hpp | 131 +++++ ...raffic_light_map_based_detector.launch.xml | 18 + .../package.xml | 32 ++ .../src/node.cpp | 494 ++++++++++++++++++ .../.gitignore | 1 + .../CMakeLists.txt | 168 ++++++ .../traffic_light_ssd_fine_detector/README.md | 63 +++ .../nodelet.hpp | 114 ++++ ...traffic_light_ssd_fine_detector.launch.xml | 37 ++ .../lib/include/cuda_utils.hpp | 75 +++ .../lib/include/trt_ssd.hpp | 100 ++++ .../lib/src/trt_ssd.cpp | 190 +++++++ .../package.xml | 36 ++ .../src/nodelet.cpp | 361 +++++++++++++ .../traffic_light_ssd_fine_detector.xml | 10 + 35 files changed, 3582 insertions(+) create mode 100644 perception/traffic_light_classifier/.gitignore create mode 100644 perception/traffic_light_classifier/CMakeLists.txt create mode 100644 perception/traffic_light_classifier/README.md create mode 100755 perception/traffic_light_classifier/cfg/HSVFilter.cfg create mode 100644 perception/traffic_light_classifier/image/TrafficLightDataStructure.jpg create mode 100644 perception/traffic_light_classifier/include/traffic_light_classifier/classifier_interface.hpp create mode 100644 perception/traffic_light_classifier/include/traffic_light_classifier/cnn_classifier.hpp create mode 100644 perception/traffic_light_classifier/include/traffic_light_classifier/color_classifier.hpp create mode 100644 perception/traffic_light_classifier/include/traffic_light_classifier/nodelet.hpp create mode 100644 perception/traffic_light_classifier/launch/traffic_light_classifier.launch.xml create mode 100644 perception/traffic_light_classifier/package.xml create mode 100644 perception/traffic_light_classifier/src/cnn_classifier.cpp create mode 100644 perception/traffic_light_classifier/src/color_classifier.cpp create mode 100644 perception/traffic_light_classifier/src/nodelet.cpp create mode 100644 perception/traffic_light_classifier/utils/trt_common.cpp create mode 100644 perception/traffic_light_classifier/utils/trt_common.hpp create mode 100644 perception/traffic_light_map_based_detector/CMakeLists.txt create mode 100644 perception/traffic_light_map_based_detector/README.md create mode 100644 perception/traffic_light_map_based_detector/config/traffic_light_map_based_detector.param.yaml create mode 100644 perception/traffic_light_map_based_detector/docs/traffic_light_map_based_detector_result.svg create mode 100644 perception/traffic_light_map_based_detector/include/traffic_light_map_based_detector/node.hpp create mode 100644 perception/traffic_light_map_based_detector/launch/traffic_light_map_based_detector.launch.xml create mode 100644 perception/traffic_light_map_based_detector/package.xml create mode 100644 perception/traffic_light_map_based_detector/src/node.cpp create mode 100644 perception/traffic_light_ssd_fine_detector/.gitignore create mode 100644 perception/traffic_light_ssd_fine_detector/CMakeLists.txt create mode 100644 perception/traffic_light_ssd_fine_detector/README.md create mode 100644 perception/traffic_light_ssd_fine_detector/include/traffic_light_ssd_fine_detector/nodelet.hpp create mode 100644 perception/traffic_light_ssd_fine_detector/launch/traffic_light_ssd_fine_detector.launch.xml create mode 100644 perception/traffic_light_ssd_fine_detector/lib/include/cuda_utils.hpp create mode 100644 perception/traffic_light_ssd_fine_detector/lib/include/trt_ssd.hpp create mode 100644 perception/traffic_light_ssd_fine_detector/lib/src/trt_ssd.cpp create mode 100644 perception/traffic_light_ssd_fine_detector/package.xml create mode 100644 perception/traffic_light_ssd_fine_detector/src/nodelet.cpp create mode 100644 perception/traffic_light_ssd_fine_detector/traffic_light_ssd_fine_detector.xml diff --git a/perception/traffic_light_classifier/.gitignore b/perception/traffic_light_classifier/.gitignore new file mode 100644 index 0000000000000..60baa9cb833f9 --- /dev/null +++ b/perception/traffic_light_classifier/.gitignore @@ -0,0 +1 @@ +data/* diff --git a/perception/traffic_light_classifier/CMakeLists.txt b/perception/traffic_light_classifier/CMakeLists.txt new file mode 100644 index 0000000000000..4e591fa478c39 --- /dev/null +++ b/perception/traffic_light_classifier/CMakeLists.txt @@ -0,0 +1,184 @@ +cmake_minimum_required(VERSION 3.5) +project(traffic_light_classifier) + +if(NOT CMAKE_CXX_STANDARD) + set(CMAKE_CXX_STANDARD 14) +endif() +if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") + add_compile_options(-Wall -Wextra -Wpedantic -Wno-deprecated-declarations -Werror) +endif() + +option(CUDA_VERBOSE "Verbose output of CUDA modules" OFF) +# set flags for CUDA availability +option(CUDA_AVAIL "CUDA available" OFF) +find_package(CUDA) +if (CUDA_FOUND) + find_library(CUBLAS_LIBRARIES cublas HINTS + ${CUDA_TOOLKIT_ROOT_DIR}/lib64 + ${CUDA_TOOLKIT_ROOT_DIR}/lib + ) + if (CUDA_VERBOSE) + message(STATUS "CUDA is available!") + message(STATUS "CUDA Libs: ${CUDA_LIBRARIES}") + message(STATUS "CUDA Headers: ${CUDA_INCLUDE_DIRS}") + endif () + set(CUDA_AVAIL ON) +else() + message(STATUS "CUDA NOT FOUND") + set(CUDA_AVAIL OFF) +endif (CUDA_FOUND) + +# set flags for TensorRT availability +option(TRT_AVAIL "TensorRT available" OFF) +# try to find the tensorRT modules +find_library(NVINFER NAMES nvinfer) +find_library(NVONNXPARSER nvonnxparser) +find_library(NVINFER_PLUGIN NAMES nvinfer_plugin) +if(NVINFER AND NVONNXPARSER AND NVINFER_PLUGIN) + if (CUDA_VERBOSE) + message(STATUS "TensorRT is available!") + message(STATUS "NVINFER: ${NVINFER}") + message(STATUS "NVPARSERS: ${NVPARSERS}") + message(STATUS "NVINFER_PLUGIN: ${NVINFER_PLUGIN}") + message(STATUS "NVONNXPARSER: ${NVONNXPARSER}") + endif () + set(TRT_AVAIL ON) +else() + message(STATUS "TensorRT is NOT Available") + set(TRT_AVAIL OFF) +endif() + +# set flags for CUDNN availability +option(CUDNN_AVAIL "CUDNN available" OFF) +# try to find the CUDNN module +find_library(CUDNN_LIBRARY + NAMES libcudnn.so${__cudnn_ver_suffix} libcudnn${__cudnn_ver_suffix}.dylib ${__cudnn_lib_win_name} + PATHS $ENV{LD_LIBRARY_PATH} ${__libpath_cudart} ${CUDNN_ROOT_DIR} ${PC_CUDNN_LIBRARY_DIRS} ${CMAKE_INSTALL_PREFIX} + PATH_SUFFIXES lib lib64 bin + DOC "CUDNN library." ) +if(CUDNN_LIBRARY) + if (CUDA_VERBOSE) + message(STATUS "CUDNN is available!") + message(STATUS "CUDNN_LIBRARY: ${CUDNN_LIBRARY}") + endif () + set(CUDNN_AVAIL ON) + +else() + message(STATUS "CUDNN is NOT Available") + set(CUDNN_AVAIL OFF) +endif() + +# Download caffemodel and prototxt +set(PRETRAINED_MODEL_LINK "https://drive.google.com/uc?id=15CQceCn9TZDU6huKJacQvUnDiLHionb3") +set(PRETRAINED_MODEL_HASH 7dc31c696b0400ddfc2cc5521586fa51) +set(LAMP_LABEL_LINK "https://drive.google.com/uc?id=1D7n3oGSWLkWgxET6PcWqEzOhmmPcqM52") +set(LAMP_LABEL_HASH 20167c8e9a1f9d2ec7b0b0088c4100f0) + +find_program(GDOWN_AVAIL "gdown") +if (NOT GDOWN_AVAIL) + message(STATUS "gdown: command not found. External files could not be downloaded.") +endif() +set(PATH "${CMAKE_CURRENT_SOURCE_DIR}/data") +if (NOT EXISTS "${PATH}") + execute_process(COMMAND mkdir -p ${PATH}) +endif() +set(FILE "${PATH}/traffic_light_classifier_mobilenetv2.onnx") +message(STATUS "Checking and downloading traffic_light_classifier_mobilenetv2.onnx") +if (EXISTS "${FILE}") + file(MD5 "${FILE}" EXISTING_FILE_HASH) + if (NOT "${PRETRAINED_MODEL_HASH}" EQUAL "${EXISTING_FILE_HASH}") + message(STATUS "... file hash changed. Downloading now ...") + execute_process(COMMAND gdown --quiet "${PRETRAINED_MODEL_LINK}" -O ${PATH}/traffic_light_classifier_mobilenetv2.onnx) + endif() +else() + message(STATUS "... file does not exist. Downloading now ...") + execute_process(COMMAND gdown --quiet "${PRETRAINED_MODEL_LINK}" -O ${PATH}/traffic_light_classifier_mobilenetv2.onnx) +endif() + +set(FILE "${PATH}/lamp_labels.txt") +message(STATUS "Checking and downloading lamp_labels.txt") +if (EXISTS "${FILE}") + file(MD5 "${FILE}" EXISTING_FILE_HASH) + if (NOT "${LAMP_LABEL_HASH}" EQUAL "${EXISTING_FILE_HASH}") + message(STATUS "... file does not exist. Downloading now ...") + execute_process(COMMAND gdown --quiet "${LAMP_LABEL_LINK}" -O ${PATH}/lamp_labels.txt) + endif() +else() + message(STATUS "... file does not exist. Downloading now ...") + execute_process(COMMAND gdown --quiet "${LAMP_LABEL_LINK}" -O ${PATH}/lamp_labels.txt) +endif() + +find_package(ament_cmake_auto REQUIRED) +ament_auto_find_build_dependencies() +find_package(OpenCV REQUIRED) +find_package(Boost REQUIRED COMPONENTS filesystem) + +if(TRT_AVAIL AND CUDA_AVAIL AND CUDNN_AVAIL) + add_definitions(-DENABLE_GPU) + + include_directories( + utils + ${OpenCV_INCLUDE_DIRS} + ${CUDA_INCLUDE_DIRS} + ) + + ament_auto_add_library(libutils SHARED + utils/trt_common.cpp + ) + target_link_libraries(libutils + ${OpenCV_LIBRARIES} + ${NVINFER} + ${NVONNXPARSER} + ${NVINFER_PLUGIN} + ${CUDA_LIBRARIES} + ${CUBLAS_LIBRARIES} + ${CUDA_curand_LIBRARY} + ${CUDNN_LIBRARY} + ${Boost_LIBRARIES} + ) + + ament_auto_add_library(traffic_light_classifier_nodelet SHARED + src/color_classifier.cpp + src/cnn_classifier.cpp + src/nodelet.cpp + ) + target_link_libraries(traffic_light_classifier_nodelet + libutils + ${OpenCV_LIBRARIES} + ) + rclcpp_components_register_node(traffic_light_classifier_nodelet + PLUGIN "traffic_light::TrafficLightClassifierNodelet" + EXECUTABLE traffic_light_classifier_node + ) + + ament_auto_package(INSTALL_TO_SHARE + data + launch + ) + +else() + message(STATUS "CUDA and/or TensorRT were not found. build only color classifier") + + include_directories( + ${OpenCV_INCLUDE_DIRS} + ) + + ament_auto_add_library(traffic_light_classifier_nodelet SHARED + src/color_classifier.cpp + src/nodelet.cpp + ) + target_link_libraries(traffic_light_classifier_nodelet + ${OpenCV_LIBRARIES} + ) + + rclcpp_components_register_node(traffic_light_classifier_nodelet + PLUGIN "traffic_light::TrafficLightClassifierNodelet" + EXECUTABLE traffic_light_classifier_node + ) + + ament_auto_package(INSTALL_TO_SHARE + data + launch + ) + +endif() diff --git a/perception/traffic_light_classifier/README.md b/perception/traffic_light_classifier/README.md new file mode 100644 index 0000000000000..6b07c73e93b3d --- /dev/null +++ b/perception/traffic_light_classifier/README.md @@ -0,0 +1,131 @@ +# traffic_light_classifier + +## Purpose + +traffic_light_classifier is a package for classifying traffic light labels using cropped image around a traffic light. This package has two classifier models: `cnn_classifier` and `hsv_classifier`. + +## Inner-workings / Algorithms + +### cnn_classifier + +Traffic light labels are classified by MobileNetV2. + +### hsv_classifier + +Traffic light colors (green, yellow and red) are classified in HSV model. + +### About Label + +The message type is designed to comply with the unified road signs proposed at the [Vienna Convention](https://en.wikipedia.org/wiki/Vienna_Convention_on_Road_Signs_and_Signals#Traffic_lights). This idea has been also proposed in [Autoware.Auto](https://gitlab.com/autowarefoundation/autoware.auto/autoware_auto_msgs/-/merge_requests/16). + +There are rules for naming labels that nodes receive. One traffic light is represented by the following character string separated by commas. `color1-shape1, color2-shape2` . + +For example, the simple red and red cross traffic light label must be expressed as "red-circle, red-cross". + +These colors and shapes are assigned to the message as follows: +![TrafficLightDataStructure.jpg](./image/TrafficLightDataStructure.jpg) + +## Inputs / Outputs + +### Input + +| Name | Type | Description | +| --------------- | ---------------------------------------------------------- | ---------------------- | +| `~/input/image` | `sensor_msgs::msg::Image` | input image | +| `~/input/rois` | `autoware_auto_perception_msgs::msg::TrafficLightRoiArray` | rois of traffic lights | + +### Output + +| Name | Type | Description | +| -------------------------- | -------------------------------------------------------- | ------------------- | +| `~/output/traffic_signals` | `autoware_auto_perception_msgs::msg::TrafficSignalArray` | classified signals | +| `~/output/debug/image` | `sensor_msgs::msg::Image` | image for debugging | + +## Parameters + +### Node Parameters + +| Name | Type | Description | +| ----------------- | ---- | ------------------------------------------- | +| `classifier_type` | int | if the value is `1`, cnn_classifier is used | + +### Core Parameters + +#### cnn_classifier + +| Name | Type | Description | +| ----------------- | ---- | ------------------------------------ | +| `model_file_path` | str | path to the model file | +| `label_file_path` | str | path to the label file | +| `precision` | str | TensorRT precision, `fp16` or `int8` | +| `input_c` | str | the channel size of an input image | +| `input_h` | str | the height of an input image | +| `input_w` | str | the width of an input image | + +#### hsv_classifier + +| Name | Type | Description | +| -------------- | ---- | ---------------------------------------------- | +| `green_min_h` | int | the minimum hue of green color | +| `green_min_s` | int | the minimum saturation of green color | +| `green_min_v` | int | the minimum value (brightness) of green color | +| `green_max_h` | int | the maximum hue of green color | +| `green_max_s` | int | the maximum saturation of green color | +| `green_max_v` | int | the maximum value (brightness) of green color | +| `yellow_min_h` | int | the minimum hue of yellow color | +| `yellow_min_s` | int | the minimum saturation of yellow color | +| `yellow_min_v` | int | the minimum value (brightness) of yellow color | +| `yellow_max_h` | int | the maximum hue of yellow color | +| `yellow_max_s` | int | the maximum saturation of yellow color | +| `yellow_max_v` | int | the maximum value (brightness) of yellow color | +| `red_min_h` | int | the minimum hue of red color | +| `red_min_s` | int | the minimum saturation of red color | +| `red_min_v` | int | the minimum value (brightness) of red color | +| `red_max_h` | int | the maximum hue of red color | +| `red_max_s` | int | the maximum saturation of red color | +| `red_max_v` | int | the maximum value (brightness) of red color | + +## Assumptions / Known limits + + + +## (Optional) Error detection and handling + + + +## (Optional) Performance characterization + + + +## References/External links + +[1] M. Sandler, A. Howard, M. Zhu, A. Zhmoginov and L. Chen, "MobileNetV2: Inverted Residuals and Linear Bottlenecks," 2018 IEEE/CVF Conference on Computer Vision and Pattern Recognition, Salt Lake City, UT, 2018, pp. 4510-4520, doi: 10.1109/CVPR.2018.00474. + +## (Optional) Future extensions / Unimplemented parts + + diff --git a/perception/traffic_light_classifier/cfg/HSVFilter.cfg b/perception/traffic_light_classifier/cfg/HSVFilter.cfg new file mode 100755 index 0000000000000..a662704741d2a --- /dev/null +++ b/perception/traffic_light_classifier/cfg/HSVFilter.cfg @@ -0,0 +1,29 @@ +#! /usr/bin/env python + +# set up parameters that we care about +PACKAGE = 'traffic_light_classifier' + +from dynamic_reconfigure.parameter_generator_catkin import * + +gen = ParameterGenerator () +# def add (self, name, paramtype, level, description, default = None, min = None, max = None, edit_method = ""): +gen.add ("green_min_h", int_t, 0, "min h green", 50, 0, 180) +gen.add ("green_max_h", int_t, 0, "max h green", 120, 0, 180) +gen.add ("green_min_s", int_t, 0, "min s green", 100, 0, 255) +gen.add ("green_max_s", int_t, 0, "max s green", 200, 0, 255) +gen.add ("green_min_v", int_t, 0, "min v green", 150, 0, 255) +gen.add ("green_max_v", int_t, 0, "max v green", 255, 0, 255) +gen.add ("yellow_min_h", int_t, 0, "min h yellow", 0, 0, 180) +gen.add ("yellow_max_h", int_t, 0, "max h yellow", 50, 0, 180) +gen.add ("yellow_min_s", int_t, 0, "min s yellow", 80, 0, 255) +gen.add ("yellow_max_s", int_t, 0, "max s yellow", 200, 0, 255) +gen.add ("yellow_min_v", int_t, 0, "min v yellow", 150, 0, 255) +gen.add ("yellow_max_v", int_t, 0, "max v yellow", 255, 0, 255) +gen.add ("red_min_h", int_t, 0, "min h red", 160, 0, 180) +gen.add ("red_max_h", int_t, 0, "max h red", 180, 0, 180) +gen.add ("red_min_s", int_t, 0, "min s red", 100, 0, 255) +gen.add ("red_max_s", int_t, 0, "max s red", 255, 0, 255) +gen.add ("red_min_v", int_t, 0, "min v red", 150, 0, 255) +gen.add ("red_max_v", int_t, 0, "max v red", 255, 0, 255) + +exit (gen.generate (PACKAGE, "traffic_light_classifier", "HSVFilter")) diff --git a/perception/traffic_light_classifier/image/TrafficLightDataStructure.jpg b/perception/traffic_light_classifier/image/TrafficLightDataStructure.jpg new file mode 100644 index 0000000000000000000000000000000000000000..52bac72b28d5c20f241ba106a9203e22b49f0d84 GIT binary patch literal 48292 zcmeFZ1yo#Jwk>>$LW2_^xDyB#+zAjogy0a|-62?TNFX7F2MO-(?h@P`g1fsr{FUy$ zx4)$C?eE(E#(iVFVjS%3+WV{{d(AcH-1YF|VG+Q1B`zfnfM5Us1pNXYW&jZY5gr}^ z9u5%!0RagK@ev9(Dhe_(3O?3jbZlY*QW9bUA|f&>CK@sd21+6#TCQgd%q;Be?4&e2 z0^F?pOl<6|KRW>-At9k4qu`;U;<1twk+c4%Uk}XyCL#YOk6@z>Xo9B zvWn_kHFX0+BV!X&GjoS`j!w=luJ8RmeDn|a^f@p*;%j77bj-Kd)U@=B%&hF3+|si0 zipr|$n%eKJZS5VMUEMt+qhsR}lRu`WmzGyn*VZ>Sx3-UtPfpLyFD|dHf5rs@z<&)3 z`u(qg{T3G{G%greSTHQY&$vJ^&d?2v2@6Ng0*@sukDzDwn1a;@5&LCWN=XY6C7Z$_ zj=ufyBU~!>CF-M}q5UPY|5#wY|E0+OHL$F|5>i5WPBNfN@>nVxJqeQvIEaXj+?1P|X8xIF+b z&odqXFUF?JJ@euJ*W!Qte2f#$sSrv<%^5_k@A?r8zh0d4 z0=p|2x^9W|sy@O8fcSI3j>I&_18@y*)^y1n@c=-EK!4Ifkk`FWtj#?N)dR4??vbYR z03;j7Jpg`4f|vckZ=?O*mFeUIu!*1f04#o&y=VJ>BmZwc+YRP|v04;UlhsuXvkTtu zgs;crh+oY=0PiIlc~&~V!RA$p4!#bNwF`A401EDk?CZNz%uO3>J~oMk#+Di7sx6dt z!lu9Wdyeu+LF_sYuseV#_he<86>lSEd=az5@8iNeJR+6z5D;FxNJjpuj7oQ|k!={b zNjI+{7|E^uVG*Ns$_LiNZn|g)B!(^bzh+a@CL}B;wD7wq^rGPcd*-jvRk;`vBx|GLA-YU%e@N zKixbkt8rovC&ugOTx|-bxvy9w-Utz8aKZ)`Qh>g|IVy?8r!(XugsH&%d;|OJc4sRF5dMs>gswz(5!YD#lV$sig3}t~ zP;;))cG4LPvClMU!HbfRZ}Op=?a^UXv71JtHqYJZSLWdH>ibn(_q8Y(u)E60STHoe zu&6ZxL&0O9nJx8p{~Dw7e%_VADh5>-6>W(z@O)gpg%bfzx#IR?b_@1i*jHifK~zEU zbh*A#i|4x*pC{_+^R&$m+=#tLE)8|YXJ&xje9 zn0;|YQI(%*=az`*hq(9TT@1zVDf;;Zrlh-Qs2)8J*tfB=+*hC4XfpigQsu@PA7vi%lazXRK;a}IGov8Qsad&Yu(G77UF*D1>ai} zN9xKP)QoghUXF~fD!3=ytdc|7W}Kys+44F*z;Ah!d)gVt;@)L zCgFyb8~7>CkjO)Lo&6&I(|iw+@6)GTwQKt+gk}V8)AKfUH|AHuZGq>55tKYPY1w$) z07!_W-^0TuOPynU^y^18rO20^3@sAC+vS5xP5iNKxr=g5&8aTrU)k_`|@#Hm8xpz z4T+ldG@}~HT@fX{<3)e|?a zn4w77gDNqdwna}4lIW~E-yc^H>e{ad)p0~!cY{Okz{P+P3HbvMYA90DP>!%^O^K5y z1c#H5w==En2kT%SU80^2QRdoU5bACwIt0-SsTf?&h*w2j&TCJ%SlKYJNHE6;#v4H- ztHFr@!8Kq3T5x)5&BCQDKYi_#R}~|hJx`IirX*eLs*wuVOb z^CI>xZ8b(Lnfw_vJg_e}6&SL3mW-AgxJrI>mmhmrY=Qh~w}0j>^10*j{ZPpjB+j7@ zuWJrs=*gk5u?%;L%PVv0iJxeprL7d)Ke5ljU^r_qO9&^WjJnAN8A5Orb1>-CCjBIZ zbDm~n;ReV%nCrN;l6Q6=->pOhv&GxJ^(4D2OEawsld#V5u%1A7AMNCXAO23~p5?%@ z5)N!6j)69@RVg!|hb$8(=5}YSA;2*rD*l=2^^bX<(S$&09I8b$-?ZAxkCq%MSzED= z9l3cnIDO&M-S|(RAx#gb@MSc^CZq)dyV-??mj@yh{%Rc3I~K%M4%5kIe*4F9D6?oR z9S)=19LCS_FYP^EG4cxNPZh3_QEAjrCEE((@Q{>)>cs4wbuifFt+1!H`gp@Eb}S^` zrq2}1JZ-+?(N4!~=~N#bbC)to-_9JlervTTO;aE4`V_36mwPu~eN}tC=$woiOYd88 z`875(F+Kq01<`k$efSu`rL=2lFZvJ`ZQWD~$U#1^h9sx}C%(cm z;(Ab(1Vw=2Hey%SgpU7bn@5|g@exKyf)~vNuYjJ;h~<)CxbZLsfn{6J1JKpfQ(-~3 zaxdN>cwcYbbjnSy@>_-Rd&Tj8`0t*>!P~%C!8?~0C@6h2o?nClJ?NaY2VfFHdJQBa zPe#kXosxCtp`fKSBl3x;5SKm9-#CJ0$?axW6l&BAS*zFkcD~y&lCQMk0>rNH7h8Vfp#XT1q|-)4iXL0x~Z8Q#tI`B9W1#6~y%Hs@nY(3_WM-DKx7P@2XI4Q+s}1#>THF{H=JgzbRFuTQwqhU|8P(m;>I!42V=A zu9UHUlEc>IwNW>63G-`(1WJVC#VArFNQh~dY|X@Oqxy#l=}$q#<3o}V1e^VV0~+hu z@VJs=6mOwzA=p^JGjbWHF|eIvUc5SX8b8zYt0knXhUjq}{yqp?v!gJ-Bj1@d$WbcE z1V8pkQ==Qt#Gpc=4}M=(xSvxHbw=D0sHrjCjbg##Z1wd9oULYAfi4Bm>FT<4KiY8l z&L>MNE2~Ufe0+*WTsgBf&1W#)lo2|g^&6%m^oKPCS|F^}!d-NPw&w$EKTgxDk2n&F zqoj0eF~p5wbiMOp{O|!_gVF#^sGG?6O9*O<8~do{tR`+0J_X$)cb{wT8D|VWcq;Ta z@U;C)bzkbH$fHa`UK`lX;3^w@^8=T=>V`CKMCtOP@d9QEjxf@3geC0Om?`97KwRW; zohdAMEZAkIU4euw_Iw-B5`~=UgA8nG8gcGX{fEe|K}2{~-W%oKIhgtLL#=Ncfx6p^ z$>Kn6X)wOzW@lnkp}t=$?!?ec`V&?8ZtD^TMI&UUHbY`n1iD=`iBW9eY!dTbuUR4E z)JCFl1xB0ddZaWwPy!~UG@u!6pq{a`er4BC-kX(uum(=K6K`b;gPqJ0nxabJHO@P$MTw|H=?h-`CjL?h`!`EOaq7J z#Ru1KK;&Q#FB1p6s8fA6+sQtBd#%ghY1XD!>JTnT1P;E$?oGl>l_U$sr;P7^lpT+r zKwuaY1K1;?aqFT}`t>sK z7arEsy%?TaiiaFz5Hm>?E;-1EcUUisRauWj^#az(#>SI!$PRyPF4>{kvL3fAp z0-QFw9tFt+VOZTf@X$@*z*u>jqg4(fspaGlvJ=w}C7l>+Jp3-SH++llcGS&Nl<2&% zQ2+!$4rW*k8vqc51OG-xu-1z_0AxSBcCH5>fLD|S>ODv3w|;sLK+%ri9c^%hDfrt_ z^vBF2fr?xW?RGPcx-zPe$1Dmxs&rkO+pRoH#qTl1?5~@AIb@!0am?{y*{;mS+<>Ya@~+p8ar%z1Bc9h~;!=P(|x1RCW5H=G}FHBce9R0Xwq4{-rw@ ziC}+PAmc+xx%?~V{Q(}+V0d^CQDQSu$Cty#+e^6_o%xK4wTUDf8wP!WdqoM`lSC2S z4X1ftgPa^k(vMuzk?ScOou5AVt`kSs>LvOvl<|K1IqG&+l3sAY8-rSj~Vmr zU5Cm_Mc=a9BfWT|k`mnpcz_(*Q%HqJwkClO_{4jo zh#YP$sF_iX3y%ct#(t{6y$pSG7!z{9Wne??Jrkc;p+DOCq-q}TupwyYi*&-HeI(K~ zG#U(gGJ3yeT@jRHUQCq=tykO2^W>Ep;ei1+&hoj%gxH0*mm2z@?(0N230u-SA&9i&N_M z4S!=?wuugf@wZMmb|v0) z!K96wM;{%j66ys-67O6nsHype~ z!-%HVr4N&l8pBllOykyzL6!&xb@5(KTRU$)wDG*6OZbB}2Z8qU61-~8=cYwCg@8@D z1iMvvO&5uJ4*o_&m{vbFXyZH)Y3yBr>OB_zL4;g`|Ni=2K|w)%3LA9FL!5L-sL!;Q z_3EhgLNYzxe1Sdsu6s_~g`C7pr>;J{Kg2R3aO&@woVA|q0iehXc>uJTy*6QfD}AAm zfVEI?)nfDk*y-4L07Rg*@&Cn;0ag7M1~t{2wl{~mF*PchrCgc`_VMFm$%VN=<5Pqp z^9Fg%84*|O^9xLd;_6mP6hAd-hZp|Kgj)nw{P7Y_8;nm=qvFk(z#}>a!tjLUp_6DW$uA^Dn>TXHnk5$fV`?qZ+i2Qs(TmahPF71QV#j54SC%i_nb=wo z=x1=M^5^%5N5)6?H9jUzQO&@HeMV2f(_$l=K5kkLVI9({bbN1{WHJ1`;qxGA!dxdo4_546eLnw5Q?kG@MdW5I!2 zxnIW4#nSa%_FF(~CYcofk}!D4u3ZW@z@ER?l<~d;Win*r=}=w5McXApyxrs|HfUHZ zQ>`^#2+|=!Zi3IlG&ni+?bB($iQ47+&NnqtTjNG%k0^X#vNgGNZPOy^5Cq^sE8%v} z-zDf@=~veTI&wXu??%y}vT()Aer+@ckC+UP;EQxmVc~hcRK&PaQK^xcz(i(Z;bLtbsR9+E$wz6Oo#gGPQiRvFnn+)@}4dvI80E#?*Eg6zg@o$(fGf1^)Fri@Eon&Ei%?W+?Wrzg&|NcY&BT|wDs?&lW8_ttnrK%=L9pJ$wPR_ZNx;bi35utv{2zF60HB_Ut#n*Zx z0-h!CFz7XT5y3XM750sBHY%~pE@E!w)JA#JBZLa9Us~8?x2*NTW+@ThYOoH4_df6e zNYE~;_r0I8W=uyXw2RmE<%NFn`Hf`nEYww2)E!tjV}H1QW(Dy&h&KO+g9OvJ2_B;c z+&ASv0H>+E(2@@WTJmv{Uf-8J03)Av{#u|u2Jh}R2Rv4xmrisre z>;gXb93tL+5L=3 zA6UgA>9F9qBE8mdNxz{iO4NnR1gScb&aDs;|6Y0M7nuNF^^?~sA^M${+5@m9>X@gd z{gXSh6g;~#c>ppnCkg;ov(&1O>#?H2RQM)N&GOqa#;6Vhg!!laFZW-JP+1TpWySP{ z?Q>^R8N40O4A-k2=ny87PpUokM&>!22}MXj@nFfb<5#I1nSDJ&@fo?!fLo4Lc1;L> z;aAP=Hx*1K(?Z>|_+vg3Y!5}CWu%FsG6*)SH!_$}05<-l>b z=lG`T!8vGg7S0UO;T=?%cTw`?II~>g)i0GyQ2m0(b3C%u&7yV$cDG<q0XbocR#Ea_4Q(d#^i}Ij>Dy z=x$A~uE{JYh76j#4}k9eGllKXy8rjy53d@~v`WZz=cVnnB^n8R3Q2xGg$rkQuF$6t zOQql!VEadq`(ODTCvqw23Wwe6`pEAA5W<0uh7V2OiW1QDjVE{t`bQ-F$3Ywhv&0;V zwlShijp=b_A9uEPtm4g(q+u2cFK)y(7-jb`i6`aIwY&MsM}OJ}_7m^(#@vFJ8q~yF=Vkz(VyRm#(;M3C=Pyo-FS&}jO$*bG69#wIk-)2F9@3z> zS5@DED>9{1+O#=W>{9R^piK?@%k~==V)0vsOGYU|KgoLI7PkYP^cO|_u9dFGu;xtF zgX-xU)vVG*HCGsvyajpv{+K9+6HWIDRufH-uwQBYI8_Ey4IzA<+q~}hn4#WXKoz#d zQW;6yUeXkd&SOwv7>4sD_vq<}ur8sEqEdMhvPJENf7%2iV-d;54JGbl!qTD*H0NXQF(-0_a!cH}F~whY-LBA33ZZx3QT=P2;rpsPCL;e_BaowI+*#7kgH7 zf6PYY->JNd4wZxaQ=8oo56@2e4`l(BhvJKa_|kT7Yu?AV>l%oolZP8DZ+=`;k=pGaA@h|mYMz~8R=f;{7Gc_UhYrL;q)jLb{l{T1nwC_EkY z(KU%D`2Dq_jb=QSp``pIvexuG?)1xIhgU(T0eXb-aSgOME~ls ztXep07keH^3+|~mOr#jcy-*1pIFP82A&@kDq8cW4;*p9=X}Ga0q9 z*)vH}#vKxSpR-n<&;MD2sYa&GpQq!!gLc0d;EKYFo0TPT&KGBZx^`ZaA&`ugFX>C` zzP=a_huMYx0BEWfDOkbz=^EOiG6Fb}%Qjayjt;YGuO-IFDWFtD+rlxO1?ky};Jvpg zSOp6D{|^BEKOF)l&@x;CMV6s3hv1p96Zazo1zA0{q{9eP(o>>QuR|umBPf`nr@zOt z0^j>Vtu=6+_v~KZWs<#y9AqAVW@rJ@;mu*1Uvw!|j(&{>HR?Pey@XElNSrl4o}UIAO|KP+TdHYdaqDONldv7UqYeMb0sRq| z{s(HLzn5hHSU>%??|1n94_=Eu)OWvkhEm{l;lw-_Tv^5PN;Y->(Zb~phxxeHt0A2& zXQzOBjqgWytcd9&h4*vmQ*()BFM}mg$gpT(To?Vf5qw=HB-07i@C-54V9d&4DC?Z# zUT;P-*q`5zknC*U)m=}_{nXN6&|yhV0T?{!({YTLkuQQW4SH@K!WlijH)gbVw?aI5 zmzY+SVbQ{`Do1rFSt>?0TNpqV2!#LT=KQT%DgB)o2YP-v(dA49srBWt6OMBz!{ydB z9ZC?)D}uq_nxvarJq>%6M`hVuO)U&=D*4va%y^T?&S!g>MixtQ=tJwiW_5El=q+ zMC_^?I;Rbe*B`^_(F{_Ci?eh`R&PO26h}ozm)wjiDg(!@HrPJ+a)=qTl-dIu0fM20 znl4di45^8Hn}zchHmpXq_rBWV#c-Cda$i~^^6OgQ;q z--yjtd^|YJM?rI;{Q*F9{-n<(9~VjBkI@4L2wuAd3%&w&vhxej_#~I(b|gE4R9H^*%Zdz5E5ZvkTx&tPl7%vWr@eJ?V>$?#nZTU^kusv#z2W_R zWX=Yc3-t|jng}Smcz<>$unB(gi||{L_4NVxuBh9=1J8aF)xmRfXm_?;!GFlxNqfb0 z_uUJP??w)`TuH}Nc--(N(d4>v>&o$bXHQ~UpI91&=35Wu@x7Ku?aUh#EIj~;ITu`aOBWuViKmU0CYO`@`#Up}TSDlm zq09n^{KirLAVdE-NnH<}S}8J1pe#0&&fbLoHFsD*c>{WigSVUo1%EfS1hTd#v`Z){ zNxGj`C|$(nf}j`P7EP8Lhw|{gq_y{4f4G$(H{VD38?_X3iUFbO8kr-dpn?#un`8mQ zp1%KVG6bE`({clx=@tsQ&6U>pVi5zLmccFom$SddciC%qm?U6I;lb^POH=<6_r-UR z4Q5#~L+b-;klbEh&0HR$W!Bz)P_ZHUvnn2-_l=CL90T7oGT>ia!7|E%Wor zNdW2^f;7Wtx!YCvc8`NGi`AFQBi)xhu_Ad|XSHK05v04v6RID2Vse$xh`PH&WeRg!e z@>Esf?lYdPK}By+a&1uDR%Prbgs!BX<2UmXvC68!3oc5Wu?-1p-?OGNKJ^?BDM;fX zKhxX?eBGXLL#<1j=8=VQIq2wH*P27kL1yQ#yUK7;*QD^RIU$DR?YOm0X;|u1hazig zn$ot7N_C0)?#;QNM^6F+?Um4u5xxswmkC_7+1Joyk|7&&pGZ%bX zxW}}@xEk~^!ld^(F@i^Uvv6NOkt#uV<(9=*#aEiPetNC%&9?uW#7CeU>8_ZG5Xy#3 zU(jjI%GGkvX+9=aO`?58VQJFWW@s`>6z^FMV#L8h_E|e%A*t(v^PqMVG}RXV2`iC&MD{ z5Hq&s6dydF^vYc7+3|&ts*|&O^M}*xjp+}OGre;*6bA#lkyk7`gI<lu=}kO+FcF-Rzn->p+@|TRMtsJI-ZTEc%T8QuST36_t5OD^B!v*pTD8#z8EXs zfP3eqW(!SpsnQEmYG%)K-eqqv5>ML*By%7wOCs?+=6~F&u>X!a>oKHN1#OexzxXx&ntk+2Ct%x0J z=ZCX0l;Q5uktI?CaD^{3*)`^t20Lg?5aWP}(ytq!lU!g?IoKM_a4oWq+_|S2RaAaF z)8$mToA;jJq^6!eSAwbi6ZaqrIyUO#OU`o(4>JP@8&#ma_gQnl%!*rg)_4@|EM*V~ zIdQ$Ro@?nOqok&qY(N|!ZIwNG>T_3!Yl_GC9mQezBj7sj+z8k7^{rOh%@oB|w67OP zbJY`fTw4$S1MPl$b>W7{$jb1pd!*GsC)0eUH-DL2b<~MmtlHbMvcwA2NL}F;oT;6A zjD-Ef4Ovj+z-tf07vphyi+AGdj*pZ5nLYJ8W7`V@wtl!zqAcd#yv?S)6|lLu56HR| zsEgW>u5^`REW_s**Ha|&R^%JNDT)a2ea;`{x)_p3s;(X7m4y|ebMtMEs44NtC zGQiJB_5O2f5It1qK1Pf28a-RM|0<=rzn@xCnz*koDknZt1Ew*DB(14cTC?wIuYXsp zZYB3w^b^9j%G2&R(&)~Ft4r|yjIv_P zS%%dIFATC3LI`J;3~P12AJ`5XWD zSGh@XJS~Xz?-q#+9|1X}ftWw~ChR5j3&YAsIEkfSmWD(SGdGhsB(^!ciEC4so)MuQ z|KwAshP%)gZ)w%jA7JU84$A{Duj(dF(@2%_Rf%f2=doGaPuco_c=&H~;Z$|d-%<$! z)jc>YHP!aRd$3Fs0}?2$vsh@RF!i2vWzH2rqrFipcoK;?Wc&2ZCMd0Bi{AFU#t|}O zHc#uy!p;0z-6Oca=#r>R5DE#%_PlXOe6MS#VcS;xvgDb4E;DkCzGwQe`Bz@XuaE18 zKP&&>CT(;lGe(&a_tXqU#|BBrPGr=MQ)ztK2%MMxTwv8Gc08BHKuAMfMatNFI7K$aFGP6;*ntTRJe3^2Gf#(Myko(z-P^i{IxZe+?ksGC_4d1?=}_QKN6 zyIr3QuBiQ10+xfm&tI}mN{+fqnP``-ngKi5TtU5#wKtaWQ3yjX~ey$j#`AKLt%H9k7%4q%rn+cHq)_m#=dbc8N zHC;ZBcmP7=|A8|V{Dm(SLinxyuTlI)$%g!qjvWI1+V=M_{z$X_Z;BXHHjzPSJiZtt zt7u_T(gd@=a+>GF>(Q7*m`~Tf%9kD`&TvesV$gk`kc(I^oTkYw>F9=pcdAtL(o01B zFP4`7Kc~UJO^g4=5M(a05;Q*NcwVP(3&3``wnQw?)`9x)#x!D6u;3p=01K6pdZ}k# z$mWX@WGbVXR;#)mtsUq*P@MY39|a_NLW)2b*mOor3u7*BB*xH(Bh*c`2$JWA7g}16 zu;?t#O{m_9g0)9~(cu5A1MyozEQbLFtUF|1J|29PG~kW7uOHF}8)Z7iNAb)U*dRia z6}gf-mgH|CLg`D!VQ8i|DHd?KRw|c+eM4izOfT*i6yz)0D25LI05G$cM(QJiD)%~5 z@c38aBd(O>1-i$Xu? zJ=5=4wwmWS2Sp|}l(;TRB?mJ1gXK`73`vF$e>^?6P|fNpe$X#pRTiSr8J=BVl~Yil zasN{N$(Hlw{zTho}JL!lX$Kzy%x=Tx-Ioj&*ony&uC>gt!QB*ihR(Y-Db0|?f8Z*ksLt%~B3QGJT(c|2_UzFg3Y^u+X=*qmhd31@|*2I{kcm7Piz z;cMEgOCM<$3uR^42oq+&`3e;PM zEJWN+POemj0pL@e7vUXtL_4HknzYJqTHJ-W=59x;TujPMQ%dSdRMVoJad(UZcl2Cp zM>L<#8U))3+^bX<3?p9GhUr;2ss@cX3X{N!n9{ycbqME~WfLM)-TB)gD5ZqBHGV5lF3vsEy{QMOo0he`(y$9lLyNvd;3=xie``P?A z(Vz1-mw!Z(B}^PrdAnh=g7=bMo!0`vrsUNxBSgbfwhL1NcGO5Dhn|yhUTLu|3qKY~ zibAl)i5`~v9=R2c*vV0)ru}rKl_2gSaB5A*SiYLH@6lg;c7N(p`%l{7AcGim{Owms zx-EnKgQO|RNVM4>ZYuWz6_CGD1=ZOlzOf7I;2xBpi3KH8VwGeKv~)=IocQwEdn#W2 z)Cr`qlgM3vL>kkW0?a(k0`Cpqou*i&Y6ZYS3?ohS+f0gR5>J6tmoF5bvA?lZA@Fpn zizf9Q_p~!j!tNW*RFNbvo}BI}(B(3X%OyL3vCBbN_%ppspS*KUu!%xv4G>0)wqA;n z9rs0)v8Jd#g2{^ri6a(XTuIz0!ePV)g)C*K*`Sz?e1+>sdgOmjTSJ;$;3$`iBlHL! zAcix#{MBj$r}bv=M&ea{$q9c`dGp~xhts|STkxHE)~^6Vr2tw*{8QlYmk-bQr=aF% zA0KgwKEtiGzR1eDGBFL|d?WG3mVaK5Ew<#BzQ>J6-~*5E6`4b2SWB}7x0{2+(U*~K z&DQ{<-{H!yBdV7}y+ywsxuSSN&Y*{pz}{VY*mRE(5^9?G+n71tZ-gQ7i3xeyfxqdN zzY*jA5fkliBJe-acwzoK0`X757^Mi)Jyxog*or!M`(0Bti*V zw|n6-ugSBGpF{=+vana?oLbq}Cm8i&oyxO_P!$Qk_G=>XBztL9l~dVN{+AQRj=7>4 zlynSRHxl8jw_2Oi%B4TlJrXBnD=PgZ6Np8{3{4(kUv!I{^|Q(dDnzZ%U!{$<_m`oT zivnWhTr(ymm;DHhe0RZ%Zi*vlk;bgg=xqQ$RJImkwcxv0!b^N94@uzY3sA-9wT9AT?kLnF3*QRm7`eTFw=-Bejdan)-xSxwKxn#<_=?C7 zZK_?pa=f5$twPedYFf`VbOjGl{w*`Vf3FY3&M#iSxM_(nUcw)-2qDVk085Oyg&*SBQg>?3Z z;@26(NR(uEbf`4qqo;i06{A*Bh~N5xK~!~#`F&DUhdeYD+J9YvLqBL><=|-Lz)w$A z+1HQEb;yE8h4!t5^2LYi*b*oy_hpYs;1nqnBC^@02q^D8`bJgXti_e!9I-a#_gxb?}A|nYAxD6oStB`@vK2P1y zt~D(=Z z`b&F-(Pdwr=i2!7t&H7ff>XM=8D2-js^(GO&JAD>Eb0t2dz+juS|e>*$;PMujy-=d zYW(k>6Jbir1?Ny-j^2A5P&J>szY;t(7KE;-&W6geU!WST|M~=v#7$Sb#;M`ojo|+g z?)DEhp^?aGZmkhEO1f}NNw|>Ye&fqOOo!_B;t`@puL6&1BaQSzo6lh75Lg}i#kaKt z$f|sKrVo0_ai=tL?9B-a90 zoz5l#q>C%Ux!yuQzlPMFKb);{F~1&{>M)~vlhvGNs64}T)xuGpP+Z9s^2EMUE^+_~ z0gomY=(WMo6tRG7?@08qPQlX1S7iSXL1pLN{8WN9XOYX3*SPBl`?GbCgE6@SN_ZVx-_P^xg;%8y#Bea95uoLrA6i4pkxJa~O3XLm*-99VWFs?lsSj-j& z^9;xfUEn-!P3d>4q;OH$r&u%aUro=eTh}`-wj@dSK+gb7W_BhEk7KO%IS?CLV7J(j zh@@-$)+)QUTe%SsxU;6DvNh*7Z_YGAqt)GJd+~x9o+zT<;Da};?{>83iCO2zsjG0e zY#8K4gCQ`i?}CK4LgK*04)W8|X@mT>Q?t|E0_8G~(%IlmN9mx8x`LG@#^hiT+)v>De2OF@L#o;LkuZwRN^wR5s z>k{E?y5Zw8>YbgVjhqrn8s@sm-}+rR6Pe)OvMJ1TW>jp%Sx@ZQ@f*vRv@iu#F~ECn zeLopJNk%i*(omHMqpnGKu51;iABfGlZXzGLuN0aQ{NmgWVlV#AbIJ`$TbbRJ$}~+2 z8r?9p9iMxob?pRAb@s=foiA6AjlP>KF-;D9S`o`3Qc{Yqq-X2!0}b&s%gb=eqioMp z_Q3fnW`4$bg|KqNde;k=O^B%HGU|FtVcGdax71hKt&f((sOMmVdnLveSiEW-Qq?ex z=rn@S>|LK_FsBX$xGH(LiCN8`j95|yy2OsKMhsM8uB)-`eNndPNmF1A6)2uQugjWeK*}@V1tD#y5|2Go5 z;&+z&uj2F1PoemCwtrBae70y#3w5pf zoSiOS|0Q0ZLa`hi33>-Li1;8{akT0oKCx;A^1C-R~|ZV zD(iwr+;&_FsXBfn3gN9T+Iq->C(5k(H8R_(GM~x!N3L>46Q)0U+{fa3wc$;55UdpP z9;jG4xPph$$~u!t*Yecn6QmyxtgOr3J{Rm$L~%6M&2}|^yt_&C;3m66%$jk@6RRn{ zu{r;MDZIfC$_P_sf(U~X9Uvk4v|!`F)Y^fVixAVQ^NuyPT5{T1mdOz7`^bTr%O#u4 zD&PKh=dt@{Sa5FNUx-o}6$hYnT4S3OY4ZTPwD}v8?|R+b%&aSrxy9P_CA;IT9J$w| zI#k03t8;9M?%M@EcxpcY6|=7Rb3sj%l^*x&EC)hQX!8g6_IT+n)NGy?Fix*=C9~zA zaCaj>f+YlaNGf11(_U-6v{2jMOOc~6HFWKGipGj~zBO2#jG9Z&^c>U7es-JLqmar< zz;~8C7|;d{IE==D8XCR6_O+x~|?ir^P(>rgH9 zUPD^9Ii9hJ47%z(+<;u01peKC*#tGimaj#Ue_o3mr-br}ogkrY8NhC$QWe!MONvgX z8?|7&=IggfC%rIx17il}b`TGpz3fG9n)viaiTK`JrIn1@aNa)Iv1AQ^=8Ww5IL z`wH5RD|9NuP!>P`E!kkC{`&VeQ^p!!$AVg7I-Fon`aWKq_CA~Oc8F@h>n@J2>p-OR3l&BPmEchCREyf*#||H-tg2BXm=}I# ztP4TttH$f%AxiuT!oD!Qu1HKHQpK7P!5->TnCqqFf#l3U!anGQqOtNBmll+398`K# za=+%1r;5;cvgm%&b+2fUOxuh(fQGJ&Q)N&j?3JsjF0P0CxZeDA6JIS6H!?(*=wi@a z*H3*6^lfP@RuKJd{$V}Vl~BEcfjy~6@LAU1r1<|n^8fEBmVbND{|&1CkLz##AbBdV zkV}E7(BK%w>`N>1dy(2Q@W=<4e=K`bj>fK?)`{2S)iS5V7~pxNIq$@l$*ZS_O?{8x z+*08{n;Os{iiKJ}mpNpx+`i1YYKYX;gzr0skE}5nNREB_|HwP*u&DlZ?GGR!ol1ku z(4lm<#DIh-4T^MkN`oLE3=PsC-O?@H-7s`XNFyleyZr65_g2r|?z8tk=e^GLzJGWj z1G8og*P8G8KKFCqpNB@31=BZ>;`U{(?^3q&siK0mV&J)@;fEUeYHBnsG2-YEgNZYW zEu`eeac;>MC1!!WC5Dq;dpiP5_e@ndbnamC#6ql4Lk&`+7Pi$Z=FROqgH!pdECgrP zS&}pfc2{bL?1gfKh1sJ!t-gV*%_R4zjyS5mfu#A;z7$?oG+2|RV!L)1D=qD8!5qN9o*{e^{{j0k#)~sFtVABe!BUHD)Vmvn=@3C+oak zj(n--$%B~z;2PJ{YuISCY`kDaxNGJ%|YVo>y{Sr@%ysDB{ z3ptWkO+w#M_C>-SS6`pbkJk6=UiWwm6{gjD=1k={ECh|{l$f0%jL3^ASYYmb4kihhJE73|L?~`88OPqY9 zOVK9~*nONJR2q})hw7PbT>Vd|TYpRW`gavC#CNs6Y*t1!07@C1cO;Y)D3ymsOx(jz zx}c&{L8_2%AWtu~Yw=>zzEPl(j?KL~Y_!jWi$QLzWmf+58U9a-*YhA>Qb@Uv((EF0 z6snF4O^>$}cTQ1BMogdz2bvI`7>R%ZhOqRdLbTsl)da~6mqT24t^Mp7^CvCdxUwaU zc`b!BCRaFpYb(4R^?XA8!b9MdDMc#3fz_x2Nqg5~I)D;s-N$-LHFvgVO6s{qL9wtm zb1kM$a$&H=fIubpAu=OX0jjlwGN5o~OF$-<(%%3{ndN)YLyQz*5Z-Rrjn(IwJzIiz zN1BO4LmpDJ^hVZo@5wLtTB@JKqlrqfL*l_^ zQtRFyyqDWxrlqu`g&KqcKn^8l0n>w=Lj(V?K}f^dZ%Mh*ixf4c5Z~l;6~$=6r|2M! zI3Vlfk1>~H&Uhuvx=@6tyC`FCrAs<5Z<&ptYSdrirLQ_;iS`peJ z*1Eo?gtF5)q~a;EM|palC~KRRYYo9_^tusys(bXmfz&8`1z}#621vOS_HMcV;7@TI zCFDgDOwyqqP8Mvf|HhcaFxq*y+KsBw0~5PRky)$!!)Ph6;o?;I25Nnkc4Zu1X#o@> zm_WSg1^qV=$sYlyJ<@NWYa}3!L=5a%x=b&nn}OoxdwvN8-}@->dw%I39;9b_M#(%; z%YpmSP1_R;{INql{nkBYDYiRL&!v~Z3f03$=f_InU$HxGoP=SCarRK5sHVaB^xO2& zB-D<|sOXs^Ui?BM$U6)L@Bp}m*)*@Umxh7K#j@kY2(su;xVU1i>@2M+*qaxo-#}g% zuB$c$c~oO1|JPEqs!n`iU9oOx2`@OrwrCA}5>z<6V$Ji-kHkfy9k3>H7z`C)Ywv}; zwLsNx%7{Ecggd~RQakuuX{y8~7sf0fO<>M5o)J(yh7v|eXY6rv*xchyixjfEle7bO zRR8pX_OyPX3?yNQ5Dca^n1I=dk?+DOzK&U4g`kECsQ^~4E+4n1h8FRUO zLS^X7laSfy4){Zx+`Ohhac@p>4C&>tR!x}O+J59IwQB9Dm4L;^GT6oJ{xo%4x!*D@ zlQOe88{|;08kkA&)qcu~aYdJnC&a=ZVnl{Vhz!#2An@-&&5_YjD&AJFiNAQ)YV(14 zL4ZL`Vu!%PhMCaIArK!S0>|TWclMLkvkcy7lRWr5v|<2*u%M%qJk)vJOaEQGI-Ys_ z9ceB~xjEjq$9q^Gd$wUI(nF~BcAX&gO4$BM2i}g#jJyFkPdI{RD#us_$Ainzc8vFH z^k0W5YrkB0RoQuNypd01F^?D$%Bhj}W;Bv!{RR@o*t?bd1(jTnGLFJRzP9m_Tp*w* zo*&!?6}SBUw#HJd3bw!0aA<2S&J7A_E|J@#7`3)8zoZmtR7URmg`4zGo2ULQqtu^^ zyUgqE5R+%qY9Y(sRb;r`$BtKpYUNc*^E-(O(>DCYGv7evTi$0za(`+_fqcXPGUmH7 z8eUS+F(&@}w*U2$w(Rv9s(B`UBZ{D2fOU-nxAGn$RSDi&Y<2R6M{@gsTVYN4Z_~;YJ zgay%1g&kr11L5j4f?)jHqEnC7VvVWV(vR=v^9`~4SNutz{{>Po^)tfYCrSQSIsc!L z^oitEd6r(xITHgN{kP8=5~VNzyBHpz=jU>nU=(PRB}R z)U<6awG7N~cTAU30wqt>c;mys@M|7pRD8R@TaNr>GN?5&4%z1-cgx@V^|=Lb z6B&gYKTw4cmM@c&Bl&+Wr2M)6SPZg{nBAGHyiC~5ip-eQG9HeqsFYq<#i1L+eQ!*G zvw#$O1U1XqL7bKw%Nk!4eLo-k>}yW|IitR&nAKbwA3xFoFMa4~j56xomA54amEpIk zcDGqG%BrD>D+}m-Sao6rL~rbM*n6m4rb>4bRriLf|ZM7rMFG|o1YX8 z7sH)Q;f^lvpBE*1mIW9_X*?1pk`+tiMJe<;s`T1kmIb~vUwt))e`lTg2bW-T7!w&2?1P>&l2)BDL9l0XRyag?Q* zUccUObHup+;N#O!DF9JpC8BoybOa^#5BXG0VYgywDTQrJEdWn)Vd(WoRbf0OV?KJ} z*L5g9^z~Dfm(T(H`M_lR7r|OGRI19dR|QmGWi6XG1G53QWq;ZYuX-^{aS=7HR$HVr ze;X`!;v&uA$q0)26@=&cNjpG!X*C-D1|kDYiDdWBSMt4=%Qt~s_@5UTtoWZvIw5u{VAVo(fviL<;)Siv z_z}B5D4h5=?BM^jxS}e*y-30`c@N*M27x#-#ovNrDfg4wPVx8x>tskvU2qP~^4dE$ zoA4`lE0ER?+%F{TDGCcEXu999QX<%nEAPN{s{;xaK@WnlH&7_arTx5vw@?Fo9>473 zMSd;=s!RcQ#m}Wc=KosFdF+d^0$6wG-8D&#eR?Nt@8jG+C4lY=V-V8!9kG%0zIsnSgfwA3<78 zEwm-%z1GW<=SsB~yAH(HxBwIAM@NbnY1Om+3wU?W<)e3^BC38Y{wMrHs9R=VnD#7~ zx>GjLbc-uD$@R`IrXx*L$&MWHwA!rh(`Y!!^J`hWiTo%~&_iqToO|-UoKZZW#qS@9 zQFcms9As&mCyvjXC7LxQDJ$^cd9u7rk2Ol$8|CaNDaTEa^1_~K0aG?%*R9yxp-Xeg zH9NaouFZOSGRyHSYLuDAjXohBqhWyGs?+X^+d9&O@sY@D%lnC&PpWIQZ$IQ^6+E{f zi^aUl>Z3S@Ly~Lm1s>zlPlMkbZLQ~I)8<1%FIg2^Hi|Jkg`l6zg`-*U-rya@@Fwad zJ#NP9HKjG3O;jrH3T@d$cR?XpYc}86RV@|)>td8(O+{K)XMH)&iXVKuFVxd}5dxYV zZB%&Y`l^pBOmXo`FQ|9D+If?2c-@<+$=lN+M?=ZeT|nHzIuL5;6_S}>$}`?bRVez} z<6icX*1QZ+)%8T;D?42`BLf5WM=^S`w9!89CyRCc&1kOePpghF?~SXIPsQyt&&gzayAW29i8coz%zu=<; zHL^fk5Is@CN|T5U{*Jv=j<^?BkP{hBWyVC~U><7@{_4XBKMVnUqKXqtUtmxLyaIfL z63-UxP(TY$VpfZ!i?;v;@-ST)2;o}vnj9X$;)bPzS!}{OlV!mvHoSz)jvavmEjaUg zB=8O2Rxr7vr0fxLvk&(IHWu54jSZP{;_WEY7UYj~>?yOz{)R6Nl9f?i@;<$Ej$#cv z?FrH;ll4*)43gMPzuAc%&ZdhkHLHhLn0=wq1BX1X?Q@pR81>P^MicfFrw~W#zwbf6 zDp-n7k~?-_O(%bzqdnE|1ZOsJGK#P5?utJ-ReT-)$9gJcjd{2Q+^8}#CR*WeusHVE z4-I;Zo&oj~!}NmTyaas~_;bBdEb6jqrN}fPiN6ILsUP02g|nlb+&S904lx z$R^-r{uN1C^eeOSFOHMHw_g0dxcb2^%~agsa^KtZNMwe>|T4gC+eahi&bee3w%>fu6 z*=ON;WC_=iZ0<)Ozds_%itano5}125!tp3#Z#cQ&iI;a!98xa135B_Z4SqLwBMxoY zgnb{xn^vNM35;xft^|hIa=2|vC#`f;0@n?F!lJ776EGw3eb6HV*sJubOBJP+xBIMj zCJ$(jRTp9`CDa|P`=U6CIK*BIPb`n1yxDOcZ7jI4cN0%oSauRaeV**oTx>80DChyF z?3ki7vK`V(bX(7)DO_75hs#cj-MCT&q!Zv>a{~#QhTgO;4}|UQhAdcxxN7d;v8C_P zcE{i=mwS}sVRX24**<)Au}KIzzmmPPKeZ^xoWT-R(2E)Q;;hlWJa&`(ZC&rO3E%Fl zkRnD;+!;qTjYO1eU*SigKUAR4n1Qs8$mJU_|A|5C_G*7 zyn6{OWMcY$k*tPJw}v1uMr7Bj$ADR`2%C>-z-}(bK@4eUT6tzELFGwLu;7`4jAIVcEiw%wi^;JZMtIwNn&8k0L#FfT&V!&(m|4?j%5HC9S_(LE5a zmjH<(;c=XYJ&h1FA?a??Ef7#hnwi;+H%Wm+DXNp*!Z@0SH=ve48=+Z-p5D`CTg=Y@w<$ zrq63EZs48B97#O=$zyf%C^F-zj|kd(W>zG|du0iSB6Vu%7?nT3u>V%!|L;8ZZ!rY_ z^1`!tMq!zDv3gI#mCYuUQA}QEnT)VZ4jBdry%s4ZVenSv2Kd-A`!-J6ioH%Bf%b;6 z6c+F&w*eMkU_1DOP3{-UD+7|8G2<d#$Ick!2y4ZfGz>H5TEYIG?M``1iqadaYir`z%U3zcU)jonQPVPVbhfpecHS*$ zXbt5-#aWnIp`SuwfX8X@Am~18=RnzWNk}mF#B-yWx-bKnjGFLCtj=cWgEgUqjY)1< zZqwbc4$B^DeZ%85g4^VQ%gM_)u@WbdL%Y+GX1Va0u3j5-ny7A&V{b3g96XmHw6F7N z`Sq9#cw3A-Mk}y_H6^C|v9fh#?|s=jpN{L%4l}Q#KRTqkE|-`!!+8mIKG?vu&}w9b zV&}>w&rFk1dU{1U9l^f3fvD-*Are6|eT=O=JzJ(o5jMEyJZB2zk2B{bjct5p+a)(V zGGR9_z}FMpRq2a53!SzmiTtBFZuY2QNyy^&swtX|g@Z0qJP_aj4NJgkOWY|Ju1Fv# zG^1L*S;E0L2(l;bD&$I}E4>vTF-xQUtlUWEpeOO^+;BQZvb{V^MslJkF?z=aGa}N^ zo;PTM+#vj|d}{Nqm##S=Xy2K5&dQU!Kqh^6TPYnIs>WBmT$8HKBcmS2-*~R+t58m) zX3P(E74KbNcIh-Bdu1Vb7PXvkugZDsjjX-)aPqzIw6_t|6vybWuwmDpl59;$Mqs25 z7x73eEn)0s9fe2?Xy=D}D@XA+(7MG00FeUHm6E9dp??V&>LYp6XyyHxA7Cc`y9ED# zioWE%g%bW%+7agh0my0IruzE$JJav4*0%3*Yv>l$XC$DNgfDuSd4rdNbmId=OA%#m zh*;nSlHmLCDNTyIYC0dd$MoX+qFntycPEVb+z0SfoB%=(2k1z($O7LKw7+xp0_I^% zn3CE)fEd=yMx#3Kv%>_us*G8kN!IWmW{iAhIwyK2iW^Ydc(SaK5gfxD9oBA2pL&(7 zjGXql$Zwzk^`;|LSPJE3jz)LGv3XTEj4MUzOj{0ce=e~4 zU*+qP$zW+IpGErH9av#e{dfwh%*9lX#!o%*wOIwm;L?pw_c765g;xyNMF`l6GEG%A zYbWsBV)Cjh_%Yy@5d25w_Z!vfcZ$~E{MmnpRQCPADC!sCL>CwhqEkS-@U}O=wl04D zg*Q=k$#nmfrT1*4oWacn<2gW!1y<*ut$07;Wn;dVw!O44VmMUy@!hBaThuiG5>`GO z>_`ORsmbvZB+CLwiAp~ucTtLo{*japU}Mz)!R!Z^;(rqK0VIaz_rAbikd8m6e1Y@& zt)LI^c>cT2!eUhy`7P6@muB?occC}j+2p}277*)_P!1AB@hek=i9d3a03>}n&wDzo z^yam&`IKri00w@2?Ft97)(!v{`+rz<{~eeJcy+h<@G59gaVmpGq1-yYFmpeY1xgS3 zQmTFw_}1xZA{SpQacn7jl43)RGnCp^J-S=`li28}E3;}M#UoA5xIkrF90TZ@u-efn zqh&(hTqa@&Uk_Zg#JoG8(u`%(eeUhN0NrE6V~5n#E61!Ox&Sv9p&GlAvw2yPou=0- z*QQ7?a$JD(vi0HiWKP|9PN2*Eb*h*&x#e+%rl`lHbI^v|g_3We_C!vzM2|O|HAy@e z$b563xezuJr}9Pe^ZL2U@91`$4ue#Qvt4O5!TKe+)FDgLjoeNz>Js)JLon1vDnK}rea+^)=q*JqnK7J85xx zpRs}4#NaFDgef71B8wZ!hRV8$BAs~+?tv-W_NrYY?ViU54h{66)LQ!EbR6$@je$0J zU&9)oWW#kW>>gIv?{%f_N1|l{^ChJ5rAsgjXp{6vcErg;Hb19mEt?Twuqy5t*NyH4 zzVF(YZ+D`Fci-cu@AnL&MjUGnw7YK& z-ma}N*3P7uYkNPB~ct}G1L3T@bmDfY7rD&y8Tb~VXB zY?4+W=eddyRijhecE*mYiA+qVboqN#Gh+&f-~ zaWaSN5rP$+CY~;4*1Psv#2+0M-B-k@0zi%f88L+*@f|F~c{7yJ!~^N4HMLZnEg~AZ zXD>#mq{%PV?X*5CBfFFJRADfbyqm`t|DvInPfjHA3RnCKk$!olchrH#E6<(-*ywgW z!#vP78B$vNURM226<&|ekF2Fmo;YmkEqp1PCJeSMcQeA5C`Xhe1QVYQ(*TsU!lb~5 zFwMw&q2Y@p5Ku-B$gXwB(^KGMV)?w0eKuSOk{9%plcJd;4(!AEX?sH7hgAh1)JyI1 zv5>Z@yT9IEONhbV;p6@Os6G%vFlz3j%hgxodm~+PcL5Qo(1f3XZ<(@oSsBG9@W5)k zos`APc+-Nx2Mxm>OtL*zskUS0Q9+UHtHDmEg6qQxz(@ne(JF@}z>3o=n7;1)K5vEr7$>Fb#ba=T{tHbr3Jl{1E9|TZ6`Jma?!0rA9lj?!X#Wj_%WYBSeuiOeq|~_KK8=Sw zaW=+g=xA$Sgd?fIoP27>9vJDeUs^iviIXE~3hP3?cQjG)$X*-wl2ejqr=!U;lGv+rgCTxe4#R;@#M+ME=_5y)6uG3%#H3Rn*mkp`8? zJu&*{W{WAqf5l|Q%>2u+&N>1;VhJ3rs+gtPqo-Ez{ek?8 zuI2DC>8m^>UFO;t zQBr04%p$25A}?dcmgs>`wHs3MPYu=$){GSiuocPSZX8 zHtk>%gJ@F{35P1^^0GJjvaN;Gt`o@8FmTdETQ?y@!Q=SO6T;y32N3)A@M4e!Tu1xa zT+tzBhCaVU7DqC!_&TmQRe+p+=w2Tq+`iFK=&l=tBWZ*;=3@L~c0?CrJ zv<#uR@sV^sOGj~IH%w#Yc$t!!*zJvOX73uz-Kz%RJ%+ZXVOG6Gz zmSA?hYt9B;n@@!bLxrN%O#c9~V;0i`1ek+s@B25=?Z7nxltMl709&ZEK$j--ZJWo_o41e@7Nm9!Do*CQM z>1*L{P4YHBXL2U580d)b>)WS(==u)+Jl+s}Y5(9duoH>-dOZ2YfY39AZ(jHPjb48$ z&R2jpPdP?Igt?Oqo4mC~UxK=B=C{f-SL1-H)Y`}dxm=0ez|(PT%(^*7CZ zPbhPD9OTAm>I($|z3XqUUo#;1X_6UJ>tU2vuOcXO1|CG%iXur|r|+`p>j>I_?)066 zQa*S|(7G?Y1?>pri0paXD%iI-7p`pUcA=bRYQvhvp4;Qmp;I&1^>yWLI&gXpm1u=!)QY#NRA%)4!+^Oh6!YR&Il3H6yVd7n)IcADd_pgFy|i_8_ zcC*vnIh$+(-Aydg0A!gDi?@pe73j{)K$k*c_xM1tG_4D4E7CEyN1<5+(?rCF*Ae{a zQe3+RY9V>WHQt;^EobzK0eS{?Z5!cDHa`|ZHD)i6--@Ulzl>J>@s0}C=#n#zgeoJWp)JxYrj|*CpP1h4KGLK-j+e+b`YUPVukGntMj!{P%>g-F= z2BQM7YdzxB%=gq|Y3Q>F?;*8!x){N(HzViK?gyTO$q`V|G~!|gu6}om+*AT4YyFS3 zv9H|2pVaVe9uEt5-@>$lyht$k3Ly%v*>R}sa-iFsAKInBpcaFLUrzpZux ziI#*!&MXX-XSuN$uwA3@d{=>d(Cu{s%{KGMtWxwpbIs1MD3 z#~sCU&?+Cuiy<4vvirc|XkZaKy|*AkptHgnS1e;zP7ssdD6>#SWydnU{qMFu_s5@zNY5H zc~sDB0))x_RqPA@*3DA8)*j-5((G+^A*uam{BRkgm<5hra0F4^kP><(#d}mj1oFG% z^A^)ngATUAHujNOcJMff4^ORKM8**MDc7p3C($b3xLRwi^0TQf#4dd~cx?n&%Z6~! z(=S$wvL_q;B%(Xoz|{&R*zH@M06dM67i-R**MRRp{x9KOVz^}kH-$G|Lu`{|0EL4M z+HxxM<3#UdhHo5HE(UHd?1U?WGX7_PD7}f%bjFLbY*(A@c z-dn>Lzix|}y$zKnM7>u-SvU-)daVU4+CdLDU$|+8G(70;L*s7&*Me?*#xn!?Vwk8w z_%dbm^ylMSe{)#MKbR>qVT{hvfdE8HpG>i=PjJa!G9;+UzcHgxSwJwNHknH-{V5^B zoo);%?Ut?Km zwBJzhX!?ci+oagLAKs((GC9isB{}l1`ObpVCl9!>MC1##iSJdV^?OqpB*-Ni)f&-`}@(#r`E`gxK(tvGkk8v8y@=Z`mt_I^&*Oci`yJ zUwrjZqyYvI>5%l;K^BX32g-#vA0EX2qi&D;p}Rro(NxCx{^^}0sKx6hH$%TA52I%s zuewj-RI)V;uR0?7q*3*p*skPnsc2^xxaFwEF0A8_J}g`FTE&eJ2m7I}bXn?HOxf=6 zOfju&B-${RMc|%W(dz)y8vzR!|F#&(m$;hd*<&mf&GqoX;L!|b(YU@je-5;0<_OMd zm}{4mpp5FoyT*nJDns?!CqmA^AQ&|b=N>RWV@ssNNQB$@`64Ct*uaZUc*FpnnA-fh zUw@?mYebPa2`VZ{E@1FA>u5oSQwgg^ghmGPsVE(;A99AK3XGs&1908hnvD|bRANKl zM&EUQc&bOy8y^uJFnymL?Cok2x{Qq{LUU#8vdtbXlU1U!i!S41ADl&<$Q|YkT@wWZ zhD$>^nvH~^$u8%0f>x3>a5IH@(^MU)IITko*ZQT-grscui|8ji_xVVTOw)hl8M_KgyOmw0|Kro-+W;aFmnwEk!Ep~YWM=xcUn62PCY&rSjj7Z{_E~kkUCEI)3 zOuP4Wx7MO=MHm1`&#JOjh^mfHtwza`iD{h5@%UkBn^|FGbZAI#@Y_B|473= zA6!vj1j7@?Gv@AXV!ijqv0Ey(AjV6WZS`D6EP_50Mp$;bc@~@UqY<5`Pz;$!@kGV1 zbJ7^L{RT6fW=!+->SIMwQlP(k`zP!6e+>Wn(Z22I-S)F}8&s1oX=j()PgInuvkOw6 zm*yu~cQ?+g(AG|>J0+%AH=#0te16XQ7PrycHPwAt%{j9G{#^4MJu!-kgLn8$w|7NX zBfM^j_{!YE(?p@lm)dXovFyl;r)Kd&-s=LO+3(}Yelmws9sXnt2W+lIozuGulMe-8 z(3YVjThxzFLdO48y8bfg_Ls%$2gBOGvPJ*(Isa{2_CL1mzcq7!ro*F=i*Gop)tR|c zdnCbWtvYFKRS35bvBkHfZlS0YA>7jQUo0CNR~|zd6gH?Yr*ej?&m7@Av=)CZ#n)Z( zfB{i`+*>l%Hi$mD1yYBjlof^G$B}#xSDq~ zlH)e>ar4$dhG}B=z0H@fabBO|d(TEKQ5F3%xKxa2-$THft|@v?+-sbo!*w?0gLzlo zry-%{Zf>IPz??b*>uFOKEi*bJyfn_u;7A|KwZbTD;ijE~-;F~RBeL`mrfnK5?kI0F z@{`iv3QqvR9)k8AU`F&vxxx-@@GW&U&N{yx0`hgYHdzck?>*|~`DPv)OMYmn0skUsd7f8rQZ^JgfR{AmkpC7$(~C9)%2v&EsHz$nxX3+MmbGewR|4|E-^A zK|oMGqp~A#N~#I*ZyJnj6Rv++E6aZ|jQk4|u-}fo697NJ zTo0hkCApD!8_tqMW)c^mxZz>*q)Wi?bxA#faGf3wG6F$>{UqmL%nxnre>j*dQqR zbKYAffkK#u7sk2ulTH>254UMPqP`upsonrAcyadU>3->Jw(OLy#4oy01)rEEdUnL) zu-rRr~w2BVnSf_zolH>weWAGNlfr?kjYT=M)^+g9JSFy=-$awDV78 z?*yqHs;-~Fcc$VoOU|^42WV7@7ct^{DPTH>!cFFa@`3&<8g&4x=A|UG`f=TJJc2-q z)&un`Mib6nRqSp^HgQqJa3xM>dodE@6@;OkR+^MVTKm^~fyG0-Y|(&GC}wBVQ)y(WjkV@wv49P`vpw z5E&QXJ^`PZ6nAAQm7c}6+$hOhfv29IEPYRJr3`-qk^d2|zrp)-|NH<*a18+oF4$wh zqjVa7^#w?8nE>gnZifHZ0=(?Mao>6VmGgUoE5^G`7)WmkpWeSB0a}wseSa)RE0o_8 zTo>^-nET#eH>eK7HNA&Df%F!Ni|Cd8_nUO-SDB*b%@4q0=`Wae!~Z{fGE^0NB3U@s zul7a1*265cuNg%~jU0s683*E`Q+Cxqbbo4?iNa9?HqV1ZL`@y)&Z9bFE32`Rnb^yr zA#wk_r2RLye_1RDtwL*j>B#TWPYe?u=;@%ov6tPky}gBg=#9;&wLrT_cfDF4U^-*O zH_Rvcd_)V>1Fz$eMRUMlrHV(3;MqV)IlI0XTiH`y?~lu5HwA&Z${kJX!VqjE;m*U+G-a` zMeRNw<3KDtv?hji3G`@F$G(AbhvRFWIZmq0A)=TaDlZ>mu#Z~I2LuIw)7b^M;dBSu0a&~pK(>Pf7nAE6;8 zB_B;WcWDS|_Q7kqxc)~s!r-|rh}zkSzE*FyN-VSerWrRwl&$2Scpo^}T0 z@lq=hJssJDZI+%lU4)LcPGv&Po%xAYG{8pdq#eem%5x77e4Vg3hz(yW#PzjNRg}zy$1%)@ zf^ipaBZ0nb4}xs#n}aAbUe2I9gkk z;X!w&)ux=|NV)1?9nJM}_v<{T5sit*u_mVQ)$I*?8ynqbx~Xsnd;iQ>MSisBWI41flDv6}MGL-^=f zvCuJhSDKqAAuH}fBiVH(AN(vsgo*q-Zk(Vm3AKT9&^dGKoN<-Y5}ydg2(Hk@X9EUc zq|}$o>T-*ShwZ~IoM!Ct4ICujK(1dSdQ;9xy7{@|MccJ8N=dMs-Nt*c4evy@5I7Jw zHYc^_skPxkXtwsRP`1RMJ8kwZu(^u)q8 zp&n|BcPF^=b2JlCsvA>H-#N9PKehpFK!rZHKKJ-stl3Dh^68CqRlIy~2G1dUuOCwi zZ@?Veo+nlOlL~KuBd_Oh?!!&T!IKSNj*?l9+ne^Gq@a_uuX0y1E~~NCfW>0W>WShj zcq?D53ZvCWV}vvRUR8P++S<~GL+5OVd{f2yh9}lV`}-_LNg%fzMopxnE?H%zI zw8OJx|6NkU2YpeHqdJyLLn$L^e#?}M8K*4^Ny#yqk9Iz`DIu>PPl2TRae2UmjkCdN|)}6Wm#URF>i@BPM zk9gqPZLl=H_{xuK##T21w`9`+^zztkSM;9B=wH^{Uul~EC5k4|(zT=iG-8%MzH)~T z;%b5Jfd1RP%MKq-c(%6C)tGE&6O3I9|N0%(`A%~R<~B`OC^j)AP80ns^#L*c)Y~aj zuTLD2<0D>#ARSxCmq*_~X+uR*1`Sz@4_~`HqSE)in9Tjl3I0EOQ~1S8QC{_fLkGB? zklO~Y>+ZcZrwxttjG3zlsafZXo#wv8)Bv-vywI~itnEwmAVu(M_ftHPqDsd znX~@@6cd%k#=Zy3(N!B`UNJtPPdWClrZ-ZVXD~)GrKcFq%_|_a2`(zFtYynOF^Jq- zO~^ckYRGta zPE8jXlXe*^tt@irK?V^|H`CNUrqBmF*1p5%8ns-io3)NwY?Gx|ue9x1Mb+(PbFTgf ze}DjCr>Dr7aJ+t48KrwSN{m$~!HV^$?2e-u%F^37Vyq!?3RIw?`Bc_Cdsk7TBz1MU z^`yr@wZracJS*4I@TQsv~BkbPb*t5t}sI(Ya`>57S zLg8|stahn(ip}wp8{I_FiXV5>EqvOcts{spHdK|m#9{oFL29kvNk?Dgj*8jn+_UU} zg881dPayw~M&hd0GV$M&jQ~29s6mHIvo3^TeRu&Tu|}nmd9DV`^we0LGSpUSR|?n& zjc=(%BYr?`_gebNblHIzS*02#{B(j~(ERKHS(Yo|tvXt?*aT^9V{tNQ*LW-WN&CC$ z5;i&>+gXX}ewLvAGye@Mv4FQC^J%9+wR6+pYEvZpkQN*KSU>wInLWIY8y%YGrxh%L zoWy+FwcUZj5B+`BKVPc}%cFE4&`r!yEueGUiNnn2_$vlAU7O*z$CG zWZ0#@@23{}!(vDj6rH2HnuE;wBoMWRc(d7CZm=?#=1A=wt{$17vCw8TZ7X-F81b02 zddG!nNHRQwbkxsrQ8su}Se#{S~ZoRdpgZM&kqp=QyKXn=fU|5kc zoNK9p4<$t30&n)uOY{G)DIZHoWKqDVBkm z4aMveD+`ME_bE3b7@wRlfT{OdNe+}gOdJ%SIt7p}uzZmsaSp9p+$7c~ClV^*$fA0j z%HAF1BrlA(xRT}f(bO_va(Le^0@?+QKpq<7n|HVp((}=L)Aq%Aqu6SHH3Y6f2i3J| zK^lyyL(BM}W6ehoo*-?uSV_{B?}a?9KGw`~`Ua9gG-AWvmBY3>;k7&oT?=`mIAIe9 z-2LklNmR7^gC>IdVxN*A9Z-3J{(5koSQ?daZ|aCa_}||5^5qW zsM^0rao@YL=28FIC+k=m==q}ww(O(Z z#34Hw>pQ8(RPtuz1V(_ zUX``YzRl=Z)-%M$M(R-OIq!oQt_MQe*7_6W2Sp7SAb5|}<)#_9A3#@tDPzB^tzsm< zBxkFBb$I{JlhZ}2yx?j9dFIT-k;SZ8$@lm$Rpkvnr(;a0y3fsdipzndyn zr2R(t=w?*3*svzZaSd{p>EwYMDQ&RTI>)|FAc}B9Ap4L^ZeIRgmAv;?nKYF^$7B8% z;6go*nRSFTO9Nm)fmu|t#JW>!k%D^1)S2~r?K@F+?l$JTdJ!Et(8n4!NY=|Gik#C_ za+6WsJY8ngVY5C?Po3D~j35RdDLD9q(bf%>k$F}LpL}^(T7%R)s8!YNZ3C!Q`tyT! zX87}SOWfsB^TxSHWHgxHv^Ae+R8uO0^(j)Apmsoqr z*AaS#K@>zpDcMU(cHA4;aOoPJC}iU|f`)3!eJzwSD(ulcpZ+rA@b72NVu3E)-yz-q zOOG4Qk$);TC1*(EjAG?h*xUPo_(y8LL(8Hme+; zSAvKYL`;fjqW4kxAfH>f5WLPmCkejY-@k`tM?Vloh{YU{?lB{#h)Vu>{bh(O-RQeI z3kz-2FY^U*9~URe`ShJDI7$|6U#7kE+f=NB4UNDtBHhgH&SpmOgrcexM$MfB^p#TI zAzsFtN`6Vv8Lc61h1sQXXG*j>yZ0%3x6jJB3nWkPLGA&zGzUn)n^kC-Q~&(!Tm$*q zxJb-*I_aN#GQaqcGtvYvrH{M;(Ty|XR8=)xRB~h8{k!$>=gRzlHl-+Es?lHI>i%SW z*-*2jnAR5~ijeajFy^yMLF$*5JF&|VV4S;;-fm_&H`?srNoT0sQf`gsm!NaiPQN{& zJnkWwCz(e(@$uH<0?!H)xM7N)XGCk*K8Lm)>Nu6+%X$VR|H&(Ednf5;G9wzdo?x~t ze@2GzlpfAn;%*9TCGMQWr)zDTW)HNo6laHKH7EG32IFkr9xL%}a*yz#xb&drYH2H- zDtxLPL1h<)<(BQ<^8=g0lU)|w6OaP#VsMs>ydsEbp_u7n&V=o($_Bej6ay7l*L*U0 z1P=ICsNHyKm3;(|SL_rCv!hMT!0#naw5V$K!FS&9_&?hoaHsIIdvni+Y^TR=SKjMA zzw*QzSTaXf3UJPx&=Gm#UG0LVd)a-A#~7baj5@)7<8$ML7XIb(52JuP`j$rCbuF&1 zuG*+_JxEjV$kRhgD+0^~jEW~;-e-BtZ!WYR#XB_ujCA<<2~;@X0k~BdxQ|R^-305k zbG+&nGT5I2-s$a@zwvzO63@dVRj=0WW`jbo7>5<(j9k1;aOVeFfsAT`-p z_K1+Pk@iugsinX>jXpT8nQ=eh;uU8Vp;uzrM~aR)&J@3RW#Vr6dy?uq-j%$~d;2~i z@@-mQ#_Yw5Q$m2}#{n00u+Dg$zmoao-`NHAlYlckD2qH^gN|cEJEIR{k;hfwA~yWX zFu+UaP|xTCUb~EPKHu6X4Q6fB0|PJ8)G<2ey=y(@`@gx}_~+8%iC=jWeGCuk%}=}I zsnfzaw^q>6y7=$oCTqUOXFx zct9gyw;=1S2;+oZOTR9-x#Hz!L-|klPS#w>|G|Hs{Wkln%=?f< zL;ldyBnM+^MLvCVz_SjW9Amk794=-)9`3 z{NvhR5ymMsjyV;5mxCwnwcq?K{^}I|Yl|zs%BEBnAGuZA`TOI0wo3PfS{{zCn>@d* oou$FJ@z{hT6WzKdF5p_Vn1S&ES1JAzWwD;WLB+E?*#F-I04`np(f|Me literal 0 HcmV?d00001 diff --git a/perception/traffic_light_classifier/include/traffic_light_classifier/classifier_interface.hpp b/perception/traffic_light_classifier/include/traffic_light_classifier/classifier_interface.hpp new file mode 100644 index 0000000000000..e60042d2257ad --- /dev/null +++ b/perception/traffic_light_classifier/include/traffic_light_classifier/classifier_interface.hpp @@ -0,0 +1,36 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef TRAFFIC_LIGHT_CLASSIFIER__CLASSIFIER_INTERFACE_HPP_ +#define TRAFFIC_LIGHT_CLASSIFIER__CLASSIFIER_INTERFACE_HPP_ + +#include +#include + +#include + +#include + +namespace traffic_light +{ +class ClassifierInterface +{ +public: + virtual bool getTrafficSignal( + const cv::Mat & input_image, + autoware_auto_perception_msgs::msg::TrafficSignal & traffic_signal) = 0; +}; +} // namespace traffic_light + +#endif // TRAFFIC_LIGHT_CLASSIFIER__CLASSIFIER_INTERFACE_HPP_ diff --git a/perception/traffic_light_classifier/include/traffic_light_classifier/cnn_classifier.hpp b/perception/traffic_light_classifier/include/traffic_light_classifier/cnn_classifier.hpp new file mode 100644 index 0000000000000..ef91f58310caa --- /dev/null +++ b/perception/traffic_light_classifier/include/traffic_light_classifier/cnn_classifier.hpp @@ -0,0 +1,113 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef TRAFFIC_LIGHT_CLASSIFIER__CNN_CLASSIFIER_HPP_ +#define TRAFFIC_LIGHT_CLASSIFIER__CNN_CLASSIFIER_HPP_ + +#include "traffic_light_classifier/classifier_interface.hpp" + +#include +#include +#include +#include +#include + +#include + +#include + +#include +#include +#include +#include + +namespace traffic_light +{ +class CNNClassifier : public ClassifierInterface +{ +public: + explicit CNNClassifier(rclcpp::Node * node_ptr); + virtual ~CNNClassifier() = default; + + bool getTrafficSignal( + const cv::Mat & input_image, + autoware_auto_perception_msgs::msg::TrafficSignal & traffic_signal) override; + +private: + void preProcess(cv::Mat & image, std::vector & tensor, bool normalize = true); + bool postProcess( + std::vector & output_data_host, + autoware_auto_perception_msgs::msg::TrafficSignal & traffic_signal); + bool readLabelfile(std::string filepath, std::vector & labels); + bool isColorLabel(const std::string label); + void calcSoftmax(std::vector & data, std::vector & probs, int num_output); + std::vector argsort(std::vector & tensor, int num_output); + void outputDebugImage( + cv::Mat & debug_image, + const autoware_auto_perception_msgs::msg::TrafficSignal & traffic_signal); + +private: + std::map state2label_{ + // color + {autoware_auto_perception_msgs::msg::TrafficLight::RED, "red"}, + {autoware_auto_perception_msgs::msg::TrafficLight::AMBER, "yellow"}, + {autoware_auto_perception_msgs::msg::TrafficLight::GREEN, "green"}, + {autoware_auto_perception_msgs::msg::TrafficLight::WHITE, "white"}, + // shape + {autoware_auto_perception_msgs::msg::TrafficLight::CIRCLE, "circle"}, + {autoware_auto_perception_msgs::msg::TrafficLight::LEFT_ARROW, "left"}, + {autoware_auto_perception_msgs::msg::TrafficLight::RIGHT_ARROW, "right"}, + {autoware_auto_perception_msgs::msg::TrafficLight::UP_ARROW, "straight"}, + {autoware_auto_perception_msgs::msg::TrafficLight::DOWN_ARROW, "down"}, + {autoware_auto_perception_msgs::msg::TrafficLight::DOWN_LEFT_ARROW, "down_left"}, + {autoware_auto_perception_msgs::msg::TrafficLight::DOWN_RIGHT_ARROW, "down_right"}, + {autoware_auto_perception_msgs::msg::TrafficLight::CROSS, "cross"}, + // other + {autoware_auto_perception_msgs::msg::TrafficLight::UNKNOWN, "unknown"}, + }; + + std::map label2state_{ + // color + {"red", autoware_auto_perception_msgs::msg::TrafficLight::RED}, + {"yellow", autoware_auto_perception_msgs::msg::TrafficLight::AMBER}, + {"green", autoware_auto_perception_msgs::msg::TrafficLight::GREEN}, + {"white", autoware_auto_perception_msgs::msg::TrafficLight::WHITE}, + // shape + {"circle", autoware_auto_perception_msgs::msg::TrafficLight::CIRCLE}, + {"left", autoware_auto_perception_msgs::msg::TrafficLight::LEFT_ARROW}, + {"right", autoware_auto_perception_msgs::msg::TrafficLight::RIGHT_ARROW}, + {"straight", autoware_auto_perception_msgs::msg::TrafficLight::UP_ARROW}, + {"down", autoware_auto_perception_msgs::msg::TrafficLight::DOWN_ARROW}, + {"down_left", autoware_auto_perception_msgs::msg::TrafficLight::DOWN_LEFT_ARROW}, + {"down_right", autoware_auto_perception_msgs::msg::TrafficLight::DOWN_RIGHT_ARROW}, + {"cross", autoware_auto_perception_msgs::msg::TrafficLight::CROSS}, + // other + {"unknown", autoware_auto_perception_msgs::msg::TrafficLight::UNKNOWN}, + }; + + rclcpp::Node * node_ptr_; + + std::shared_ptr trt_; + image_transport::Publisher image_pub_; + std::vector labels_; + std::vector mean_{0.242, 0.193, 0.201}; + std::vector std_{1.0, 1.0, 1.0}; + int input_c_; + int input_h_; + int input_w_; +}; + +} // namespace traffic_light + +#endif // TRAFFIC_LIGHT_CLASSIFIER__CNN_CLASSIFIER_HPP_ diff --git a/perception/traffic_light_classifier/include/traffic_light_classifier/color_classifier.hpp b/perception/traffic_light_classifier/include/traffic_light_classifier/color_classifier.hpp new file mode 100644 index 0000000000000..27548c9bd92c9 --- /dev/null +++ b/perception/traffic_light_classifier/include/traffic_light_classifier/color_classifier.hpp @@ -0,0 +1,94 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef TRAFFIC_LIGHT_CLASSIFIER__COLOR_CLASSIFIER_HPP_ +#define TRAFFIC_LIGHT_CLASSIFIER__COLOR_CLASSIFIER_HPP_ + +#include "traffic_light_classifier/classifier_interface.hpp" + +#include +#include +#include +#include + +#include + +#include + +#include + +namespace traffic_light +{ +struct HSVConfig +{ + int green_min_h; + int green_min_s; + int green_min_v; + int green_max_h; + int green_max_s; + int green_max_v; + int yellow_min_h; + int yellow_min_s; + int yellow_min_v; + int yellow_max_h; + int yellow_max_s; + int yellow_max_v; + int red_min_h; + int red_min_s; + int red_min_v; + int red_max_h; + int red_max_s; + int red_max_v; +}; + +class ColorClassifier : public ClassifierInterface +{ +public: + explicit ColorClassifier(rclcpp::Node * node_ptr); + virtual ~ColorClassifier() = default; + + bool getTrafficSignal( + const cv::Mat & input_image, + autoware_auto_perception_msgs::msg::TrafficSignal & traffic_signal) override; + +private: + bool filterHSV( + const cv::Mat & input_image, cv::Mat & green_image, cv::Mat & yellow_image, + cv::Mat & red_image); + rcl_interfaces::msg::SetParametersResult parametersCallback( + const std::vector & parameters); + +private: + enum HSV { + Hue = 0, + Sat = 1, + Val = 2, + }; + image_transport::Publisher image_pub_; + + rclcpp::node_interfaces::OnSetParametersCallbackHandle::SharedPtr set_param_res_; + rclcpp::Node * node_ptr_; + + HSVConfig hsv_config_; + cv::Scalar min_hsv_green_; + cv::Scalar max_hsv_green_; + cv::Scalar min_hsv_yellow_; + cv::Scalar max_hsv_yellow_; + cv::Scalar min_hsv_red_; + cv::Scalar max_hsv_red_; +}; + +} // namespace traffic_light + +#endif // TRAFFIC_LIGHT_CLASSIFIER__COLOR_CLASSIFIER_HPP_ diff --git a/perception/traffic_light_classifier/include/traffic_light_classifier/nodelet.hpp b/perception/traffic_light_classifier/include/traffic_light_classifier/nodelet.hpp new file mode 100644 index 0000000000000..c04e610eb939b --- /dev/null +++ b/perception/traffic_light_classifier/include/traffic_light_classifier/nodelet.hpp @@ -0,0 +1,89 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef TRAFFIC_LIGHT_CLASSIFIER__NODELET_HPP_ +#define TRAFFIC_LIGHT_CLASSIFIER__NODELET_HPP_ + +#include "traffic_light_classifier/classifier_interface.hpp" + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include +#include + +#if ENABLE_GPU +#include "traffic_light_classifier/cnn_classifier.hpp" +#endif + +#include "traffic_light_classifier/color_classifier.hpp" + +#include +#include + +namespace traffic_light +{ +class TrafficLightClassifierNodelet : public rclcpp::Node +{ +public: + explicit TrafficLightClassifierNodelet(const rclcpp::NodeOptions & options); + void imageRoiCallback( + const sensor_msgs::msg::Image::ConstSharedPtr & input_image_msg, + const autoware_auto_perception_msgs::msg::TrafficLightRoiArray::ConstSharedPtr & + input_rois_msg); + +private: + enum ClassifierType { + HSVFilter = 0, + CNN = 1, + }; + void connectCb(); + + rclcpp::TimerBase::SharedPtr timer_; + image_transport::SubscriberFilter image_sub_; + message_filters::Subscriber roi_sub_; + typedef message_filters::sync_policies::ExactTime< + sensor_msgs::msg::Image, autoware_auto_perception_msgs::msg::TrafficLightRoiArray> + SyncPolicy; + typedef message_filters::Synchronizer Sync; + std::shared_ptr sync_; + typedef message_filters::sync_policies::ApproximateTime< + sensor_msgs::msg::Image, autoware_auto_perception_msgs::msg::TrafficLightRoiArray> + ApproximateSyncPolicy; + typedef message_filters::Synchronizer ApproximateSync; + std::shared_ptr approximate_sync_; + bool is_approximate_sync_; + rclcpp::Publisher::SharedPtr + traffic_signal_array_pub_; + std::shared_ptr classifier_ptr_; +}; + +} // namespace traffic_light + +#endif // TRAFFIC_LIGHT_CLASSIFIER__NODELET_HPP_ diff --git a/perception/traffic_light_classifier/launch/traffic_light_classifier.launch.xml b/perception/traffic_light_classifier/launch/traffic_light_classifier.launch.xml new file mode 100644 index 0000000000000..eb2d2951cf3db --- /dev/null +++ b/perception/traffic_light_classifier/launch/traffic_light_classifier.launch.xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/perception/traffic_light_classifier/package.xml b/perception/traffic_light_classifier/package.xml new file mode 100644 index 0000000000000..6f5bf6146b999 --- /dev/null +++ b/perception/traffic_light_classifier/package.xml @@ -0,0 +1,25 @@ + + + traffic_light_classifier + 0.1.0 + The traffic_light_classifier package + + Yukihiro Saito + Apache License 2.0 + + ament_cmake_auto + wget + + autoware_auto_perception_msgs + cv_bridge + image_transport + libboost-filesystem-dev + message_filters + rclcpp + rclcpp_components + sensor_msgs + + + ament_cmake + + diff --git a/perception/traffic_light_classifier/src/cnn_classifier.cpp b/perception/traffic_light_classifier/src/cnn_classifier.cpp new file mode 100644 index 0000000000000..40f1757e6329f --- /dev/null +++ b/perception/traffic_light_classifier/src/cnn_classifier.cpp @@ -0,0 +1,265 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "traffic_light_classifier/cnn_classifier.hpp" + +#include + +#include +#include + +#include +#include +#include + +namespace traffic_light +{ +CNNClassifier::CNNClassifier(rclcpp::Node * node_ptr) : node_ptr_(node_ptr) +{ + image_pub_ = image_transport::create_publisher( + node_ptr_, "~/output/debug/image", rclcpp::QoS{1}.get_rmw_qos_profile()); + + std::string precision; + std::string label_file_path; + std::string model_file_path; + precision = node_ptr_->declare_parameter("precision", "fp16"); + label_file_path = node_ptr_->declare_parameter("label_file_path", "labels.txt"); + model_file_path = node_ptr_->declare_parameter("model_file_path", "model.onnx"); + input_c_ = node_ptr_->declare_parameter("input_c", 3); + input_h_ = node_ptr_->declare_parameter("input_h", 224); + input_w_ = node_ptr_->declare_parameter("input_w", 224); + + readLabelfile(label_file_path, labels_); + + std::string cache_dir = + ament_index_cpp::get_package_share_directory("traffic_light_classifier") + "/data"; + trt_ = std::make_shared(model_file_path, cache_dir, precision); + trt_->setup(); +} + +bool CNNClassifier::getTrafficSignal( + const cv::Mat & input_image, autoware_auto_perception_msgs::msg::TrafficSignal & traffic_signal) +{ + if (!trt_->isInitialized()) { + RCLCPP_WARN(node_ptr_->get_logger(), "failed to init tensorrt"); + return false; + } + + int num_input = trt_->getNumInput(); + int num_output = trt_->getNumOutput(); + + std::vector input_data_host(num_input); + + cv::Mat image = input_image.clone(); + preProcess(image, input_data_host, true); + + auto input_data_device = Tn::make_unique(num_input); + cudaMemcpy( + input_data_device.get(), input_data_host.data(), num_input * sizeof(float), + cudaMemcpyHostToDevice); + + auto output_data_device = Tn::make_unique(num_output); + + // do inference + std::vector bindings = {input_data_device.get(), output_data_device.get()}; + + trt_->context_->executeV2(bindings.data()); + + std::vector output_data_host(num_output); + cudaMemcpy( + output_data_host.data(), output_data_device.get(), num_output * sizeof(float), + cudaMemcpyDeviceToHost); + + postProcess(output_data_host, traffic_signal); + + /* debug */ + if (0 < image_pub_.getNumSubscribers()) { + cv::Mat debug_image = input_image.clone(); + outputDebugImage(debug_image, traffic_signal); + } + + return true; +} + +void CNNClassifier::outputDebugImage( + cv::Mat & debug_image, const autoware_auto_perception_msgs::msg::TrafficSignal & traffic_signal) +{ + float probability; + std::string label; + for (std::size_t i = 0; i < traffic_signal.lights.size(); i++) { + auto light = traffic_signal.lights.at(i); + const auto light_label = state2label_[light.color] + "-" + state2label_[light.shape]; + label += light_label; + // all lamp confidence are the same + probability = light.confidence; + if (i < traffic_signal.lights.size() - 1) { + label += ","; + } + } + + const int expand_w = 200; + const int expand_h = + std::max(static_cast((expand_w * debug_image.rows) / debug_image.cols), 1); + + cv::resize(debug_image, debug_image, cv::Size(expand_w, expand_h)); + cv::Mat text_img(cv::Size(expand_w, 50), CV_8UC3, cv::Scalar(0, 0, 0)); + std::string text = label + " " + std::to_string(probability); + cv::putText( + text_img, text, cv::Point(5, 25), cv::FONT_HERSHEY_COMPLEX, 0.5, cv::Scalar(0, 255, 0), 1); + cv::vconcat(debug_image, text_img, debug_image); + + const auto debug_image_msg = + cv_bridge::CvImage(std_msgs::msg::Header(), "rgb8", debug_image).toImageMsg(); + image_pub_.publish(debug_image_msg); +} + +void CNNClassifier::preProcess(cv::Mat & image, std::vector & input_tensor, bool normalize) +{ + /* normalize */ + /* ((channel[0] / 255) - mean[0]) / std[0] */ + + // cv::cvtColor(image, image, cv::COLOR_BGR2RGB, 3); + cv::resize(image, image, cv::Size(input_w_, input_h_)); + + const size_t strides_cv[3] = { + static_cast(input_w_ * input_c_), static_cast(input_c_), 1}; + const size_t strides[3] = { + static_cast(input_h_ * input_w_), static_cast(input_w_), 1}; + + for (int i = 0; i < input_h_; i++) { + for (int j = 0; j < input_w_; j++) { + for (int k = 0; k < input_c_; k++) { + const size_t offset_cv = i * strides_cv[0] + j * strides_cv[1] + k * strides_cv[2]; + const size_t offset = k * strides[0] + i * strides[1] + j * strides[2]; + if (normalize) { + input_tensor[offset] = + ((static_cast(image.data[offset_cv]) / 255) - mean_[k]) / std_[k]; + } else { + input_tensor[offset] = static_cast(image.data[offset_cv]); + } + } + } + } +} + +bool CNNClassifier::postProcess( + std::vector & output_tensor, + autoware_auto_perception_msgs::msg::TrafficSignal & traffic_signal) +{ + std::vector probs; + int num_output = trt_->getNumOutput(); + calcSoftmax(output_tensor, probs, num_output); + std::vector sorted_indices = argsort(output_tensor, num_output); + + // ROS_INFO("label: %s, score: %.2f\%", + // labels_[sorted_indices[0]].c_str(), + // probs[sorted_indices[0]] * 100); + + std::string match_label = labels_[sorted_indices[0]]; + float probability = probs[sorted_indices[0]]; + + // label names are assumed to be comma-separated to represent each lamp + // e.g. + // match_label: "red","red-cross","right" + // split_label: ["red","red-cross","right"] + // if shape doesn't have color suffix, set GREEN to color state. + // if color doesn't have shape suffix, set CIRCLE to shape state. + std::vector split_label; + boost::algorithm::split(split_label, match_label, boost::is_any_of(",")); + for (auto label : split_label) { + if (label2state_.find(label) == label2state_.end()) { + RCLCPP_DEBUG( + node_ptr_->get_logger(), "cnn_classifier does not have a key [%s]", label.c_str()); + continue; + } + autoware_auto_perception_msgs::msg::TrafficLight light; + if (label.find("-") != std::string::npos) { + // found "-" delimiter in label string + std::vector color_and_shape; + boost::algorithm::split(color_and_shape, label, boost::is_any_of("-")); + light.color = label2state_[color_and_shape.at(0)]; + light.shape = label2state_[color_and_shape.at(1)]; + } else { + if (label == state2label_[autoware_auto_perception_msgs::msg::TrafficLight::UNKNOWN]) { + light.color = autoware_auto_perception_msgs::msg::TrafficLight::UNKNOWN; + light.shape = autoware_auto_perception_msgs::msg::TrafficLight::UNKNOWN; + } else if (isColorLabel(label)) { + light.color = label2state_[label]; + light.shape = autoware_auto_perception_msgs::msg::TrafficLight::CIRCLE; + } else { + light.color = autoware_auto_perception_msgs::msg::TrafficLight::GREEN; + light.shape = label2state_[label]; + } + } + light.confidence = probability; + traffic_signal.lights.push_back(light); + } + + return true; +} + +bool CNNClassifier::readLabelfile(std::string filepath, std::vector & labels) +{ + std::ifstream labelsFile(filepath); + if (!labelsFile.is_open()) { + RCLCPP_ERROR(node_ptr_->get_logger(), "Could not open label file. [%s]", filepath.c_str()); + return false; + } + std::string label; + while (getline(labelsFile, label)) { + labels.push_back(label); + } + return true; +} + +void CNNClassifier::calcSoftmax( + std::vector & data, std::vector & probs, int num_output) +{ + float exp_sum = 0.0; + for (int i = 0; i < num_output; ++i) { + exp_sum += exp(data[i]); + } + + for (int i = 0; i < num_output; ++i) { + probs.push_back(exp(data[i]) / exp_sum); + } +} + +std::vector CNNClassifier::argsort(std::vector & tensor, int num_output) +{ + std::vector indices(num_output); + for (int i = 0; i < num_output; i++) { + indices[i] = i; + } + std::sort(indices.begin(), indices.begin() + num_output, [tensor](size_t idx1, size_t idx2) { + return tensor[idx1] > tensor[idx2]; + }); + + return indices; +} + +bool CNNClassifier::isColorLabel(const std::string label) +{ + using autoware_auto_perception_msgs::msg::TrafficSignal; + if ( + label == state2label_[autoware_auto_perception_msgs::msg::TrafficLight::GREEN] || + label == state2label_[autoware_auto_perception_msgs::msg::TrafficLight::AMBER] || + label == state2label_[autoware_auto_perception_msgs::msg::TrafficLight::RED] || + label == state2label_[autoware_auto_perception_msgs::msg::TrafficLight::WHITE]) { + return true; + } + return false; +} + +} // namespace traffic_light diff --git a/perception/traffic_light_classifier/src/color_classifier.cpp b/perception/traffic_light_classifier/src/color_classifier.cpp new file mode 100644 index 0000000000000..0267b3cd3c3ac --- /dev/null +++ b/perception/traffic_light_classifier/src/color_classifier.cpp @@ -0,0 +1,248 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#include "traffic_light_classifier/color_classifier.hpp" + +#include + +#include +#include +#include + +namespace traffic_light +{ +ColorClassifier::ColorClassifier(rclcpp::Node * node_ptr) : node_ptr_(node_ptr) +{ + using std::placeholders::_1; + image_pub_ = image_transport::create_publisher( + node_ptr_, "~/debug/image", rclcpp::QoS{1}.get_rmw_qos_profile()); + + hsv_config_.green_min_h = node_ptr_->declare_parameter("green_min_h", 50); + hsv_config_.green_min_s = node_ptr_->declare_parameter("green_min_s", 100); + hsv_config_.green_min_v = node_ptr_->declare_parameter("green_min_v", 150); + hsv_config_.green_max_h = node_ptr_->declare_parameter("green_max_h", 120); + hsv_config_.green_max_s = node_ptr_->declare_parameter("green_max_s", 200); + hsv_config_.green_max_v = node_ptr_->declare_parameter("green_max_v", 255); + hsv_config_.yellow_min_h = node_ptr_->declare_parameter("yellow_min_h", 0); + hsv_config_.yellow_min_s = node_ptr_->declare_parameter("yellow_min_s", 80); + hsv_config_.yellow_min_v = node_ptr_->declare_parameter("yellow_min_v", 150); + hsv_config_.yellow_max_h = node_ptr_->declare_parameter("yellow_max_h", 50); + hsv_config_.yellow_max_s = node_ptr_->declare_parameter("yellow_max_s", 200); + hsv_config_.yellow_max_v = node_ptr_->declare_parameter("yellow_max_v", 255); + hsv_config_.red_min_h = node_ptr_->declare_parameter("red_min_h", 160); + hsv_config_.red_min_s = node_ptr_->declare_parameter("red_min_s", 100); + hsv_config_.red_min_v = node_ptr_->declare_parameter("red_min_v", 150); + hsv_config_.red_max_h = node_ptr_->declare_parameter("red_max_h", 180); + hsv_config_.red_max_s = node_ptr_->declare_parameter("red_max_s", 255); + hsv_config_.red_max_v = node_ptr_->declare_parameter("red_max_v", 255); + + // set parameter callback + set_param_res_ = node_ptr_->add_on_set_parameters_callback( + std::bind(&ColorClassifier::parametersCallback, this, _1)); +} + +bool ColorClassifier::getTrafficSignal( + const cv::Mat & input_image, autoware_auto_perception_msgs::msg::TrafficSignal & traffic_signal) +{ + cv::Mat green_image; + cv::Mat yellow_image; + cv::Mat red_image; + filterHSV(input_image, green_image, yellow_image, red_image); + // binarize + cv::Mat green_bin_image; + cv::Mat yellow_bin_image; + cv::Mat red_bin_image; + const int bin_threshold = 127; + cv::threshold(green_image, green_bin_image, bin_threshold, 255, cv::THRESH_BINARY); + cv::threshold(yellow_image, yellow_bin_image, bin_threshold, 255, cv::THRESH_BINARY); + cv::threshold(red_image, red_bin_image, bin_threshold, 255, cv::THRESH_BINARY); + // filter noise + cv::Mat green_filtered_bin_image; + cv::Mat yellow_filtered_bin_image; + cv::Mat red_filtered_bin_image; + cv::Mat element4 = (cv::Mat_(3, 3) << 0, 1, 0, 1, 1, 1, 0, 1, 0); + cv::erode(green_bin_image, green_filtered_bin_image, element4, cv::Point(-1, -1), 1); + cv::erode(yellow_bin_image, yellow_filtered_bin_image, element4, cv::Point(-1, -1), 1); + cv::erode(red_bin_image, red_filtered_bin_image, element4, cv::Point(-1, -1), 1); + cv::dilate(green_filtered_bin_image, green_filtered_bin_image, cv::Mat(), cv::Point(-1, -1), 1); + cv::dilate(yellow_filtered_bin_image, yellow_filtered_bin_image, cv::Mat(), cv::Point(-1, -1), 1); + cv::dilate(red_filtered_bin_image, red_filtered_bin_image, cv::Mat(), cv::Point(-1, -1), 1); + + /* debug */ +#if 1 + if (0 < image_pub_.getNumSubscribers()) { + cv::Mat debug_raw_image; + cv::Mat debug_green_image; + cv::Mat debug_yellow_image; + cv::Mat debug_red_image; + cv::hconcat(input_image, input_image, debug_raw_image); + cv::hconcat(debug_raw_image, input_image, debug_raw_image); + cv::hconcat(green_image, green_bin_image, debug_green_image); + cv::hconcat(debug_green_image, green_filtered_bin_image, debug_green_image); + cv::hconcat(yellow_image, yellow_bin_image, debug_yellow_image); + cv::hconcat(debug_yellow_image, yellow_filtered_bin_image, debug_yellow_image); + cv::hconcat(red_image, red_bin_image, debug_red_image); + cv::hconcat(debug_red_image, red_filtered_bin_image, debug_red_image); + + cv::Mat debug_image; + cv::vconcat(debug_green_image, debug_yellow_image, debug_image); + cv::vconcat(debug_image, debug_red_image, debug_image); + cv::cvtColor(debug_image, debug_image, cv::COLOR_GRAY2RGB); + cv::vconcat(debug_raw_image, debug_image, debug_image); + const int width = input_image.cols; + const int height = input_image.rows; + cv::line( + debug_image, cv::Point(0, 0), cv::Point(debug_image.cols, 0), cv::Scalar(255, 255, 255), 1, + CV_AA, 0); + cv::line( + debug_image, cv::Point(0, height), cv::Point(debug_image.cols, height), + cv::Scalar(255, 255, 255), 1, CV_AA, 0); + cv::line( + debug_image, cv::Point(0, height * 2), cv::Point(debug_image.cols, height * 2), + cv::Scalar(255, 255, 255), 1, CV_AA, 0); + cv::line( + debug_image, cv::Point(0, height * 3), cv::Point(debug_image.cols, height * 3), + cv::Scalar(255, 255, 255), 1, CV_AA, 0); + + cv::line( + debug_image, cv::Point(0, 0), cv::Point(0, debug_image.rows), cv::Scalar(255, 255, 255), 1, + CV_AA, 0); + cv::line( + debug_image, cv::Point(width, 0), cv::Point(width, debug_image.rows), + cv::Scalar(255, 255, 255), 1, CV_AA, 0); + cv::line( + debug_image, cv::Point(width * 2, 0), cv::Point(width * 2, debug_image.rows), + cv::Scalar(255, 255, 255), 1, CV_AA, 0); + cv::line( + debug_image, cv::Point(width * 3, 0), cv::Point(width * 3, debug_image.rows), + cv::Scalar(255, 255, 255), 1, CV_AA, 0); + + cv::putText( + debug_image, "green", cv::Point(0, height * 1.5), cv::FONT_HERSHEY_SIMPLEX, 1.0, + cv::Scalar(255, 255, 255), 1, CV_AA); + cv::putText( + debug_image, "yellow", cv::Point(0, height * 2.5), cv::FONT_HERSHEY_SIMPLEX, 1.0, + cv::Scalar(255, 255, 255), 1, CV_AA); + cv::putText( + debug_image, "red", cv::Point(0, height * 3.5), cv::FONT_HERSHEY_SIMPLEX, 1.0, + cv::Scalar(255, 255, 255), 1, CV_AA); + const auto debug_image_msg = + cv_bridge::CvImage(std_msgs::msg::Header(), "bgr8", debug_image).toImageMsg(); + image_pub_.publish(debug_image_msg); + } +#endif + /* --- */ + + const int green_pixel_num = cv::countNonZero(green_filtered_bin_image); + const int yellow_pixel_num = cv::countNonZero(yellow_filtered_bin_image); + const int red_pixel_num = cv::countNonZero(red_filtered_bin_image); + const double green_ratio = + static_cast(green_pixel_num) / + static_cast(green_filtered_bin_image.rows * green_filtered_bin_image.cols); + const double yellow_ratio = + static_cast(yellow_pixel_num) / + static_cast(yellow_filtered_bin_image.rows * yellow_filtered_bin_image.cols); + const double red_ratio = + static_cast(red_pixel_num) / + static_cast(red_filtered_bin_image.rows * red_filtered_bin_image.cols); + + if (yellow_ratio < green_ratio && red_ratio < green_ratio) { + autoware_auto_perception_msgs::msg::TrafficLight light; + light.color = autoware_auto_perception_msgs::msg::TrafficLight::GREEN; + light.confidence = std::min(1.0, static_cast(green_pixel_num) / (20.0 * 20.0)); + traffic_signal.lights.push_back(light); + } else if (green_ratio < yellow_ratio && red_ratio < yellow_ratio) { + autoware_auto_perception_msgs::msg::TrafficLight light; + light.color = autoware_auto_perception_msgs::msg::TrafficLight::AMBER; + light.confidence = std::min(1.0, static_cast(yellow_pixel_num) / (20.0 * 20.0)); + traffic_signal.lights.push_back(light); + } else if (green_ratio < red_ratio && yellow_ratio < red_ratio) { + autoware_auto_perception_msgs::msg::TrafficLight light; + light.color = ::autoware_auto_perception_msgs::msg::TrafficLight::RED; + light.confidence = std::min(1.0, static_cast(red_pixel_num) / (20.0 * 20.0)); + traffic_signal.lights.push_back(light); + } else { + autoware_auto_perception_msgs::msg::TrafficLight light; + light.color = ::autoware_auto_perception_msgs::msg::TrafficLight::UNKNOWN; + light.confidence = 0.0; + traffic_signal.lights.push_back(light); + } + return true; +} + +bool ColorClassifier::filterHSV( + const cv::Mat & input_image, cv::Mat & green_image, cv::Mat & yellow_image, cv::Mat & red_image) +{ + cv::Mat hsv_image; + cv::cvtColor(input_image, hsv_image, cv::COLOR_BGR2HSV); + try { + cv::inRange(hsv_image, min_hsv_green_, max_hsv_green_, green_image); + cv::inRange(hsv_image, min_hsv_yellow_, max_hsv_yellow_, yellow_image); + cv::inRange(hsv_image, min_hsv_red_, max_hsv_red_, red_image); + } catch (cv::Exception & e) { + RCLCPP_ERROR(node_ptr_->get_logger(), "failed to filter image by hsv value : %s", e.what()); + return false; + } + return true; +} +rcl_interfaces::msg::SetParametersResult ColorClassifier::parametersCallback( + const std::vector & parameters) +{ + auto update_param = [&](const std::string & name, int & v) { + auto it = std::find_if( + parameters.cbegin(), parameters.cend(), + [&name](const rclcpp::Parameter & parameter) { return parameter.get_name() == name; }); + if (it != parameters.cend()) { + v = it->as_int(); + return true; + } + return false; + }; + + update_param("green_min_h", hsv_config_.green_min_h); + update_param("green_min_s", hsv_config_.green_min_s); + update_param("green_min_v", hsv_config_.green_min_v); + update_param("green_max_h", hsv_config_.green_max_h); + update_param("green_max_s", hsv_config_.green_max_s); + update_param("green_max_v", hsv_config_.green_max_v); + update_param("yellow_min_h", hsv_config_.yellow_min_h); + update_param("yellow_min_s", hsv_config_.yellow_min_s); + update_param("yellow_min_v", hsv_config_.yellow_min_v); + update_param("yellow_max_h", hsv_config_.yellow_max_h); + update_param("yellow_max_s", hsv_config_.yellow_max_s); + update_param("yellow_max_v", hsv_config_.yellow_max_v); + update_param("red_min_h", hsv_config_.red_min_h); + update_param("red_min_s", hsv_config_.red_min_s); + update_param("red_min_v", hsv_config_.red_min_v); + update_param("red_max_h", hsv_config_.red_max_h); + update_param("red_max_s", hsv_config_.red_max_s); + update_param("red_max_v", hsv_config_.red_max_v); + + min_hsv_green_ = + cv::Scalar(hsv_config_.green_min_h, hsv_config_.green_min_s, hsv_config_.green_min_v); + max_hsv_green_ = + cv::Scalar(hsv_config_.green_max_h, hsv_config_.green_max_s, hsv_config_.green_max_v); + min_hsv_yellow_ = + cv::Scalar(hsv_config_.yellow_min_h, hsv_config_.yellow_min_s, hsv_config_.yellow_min_v); + max_hsv_yellow_ = + cv::Scalar(hsv_config_.yellow_max_h, hsv_config_.yellow_max_s, hsv_config_.yellow_max_v); + min_hsv_red_ = cv::Scalar(hsv_config_.red_min_h, hsv_config_.red_min_s, hsv_config_.red_min_v); + max_hsv_red_ = cv::Scalar(hsv_config_.red_max_h, hsv_config_.red_max_s, hsv_config_.red_max_v); + + rcl_interfaces::msg::SetParametersResult result; + result.successful = true; + result.reason = "success"; + return result; +} + +} // namespace traffic_light diff --git a/perception/traffic_light_classifier/src/nodelet.cpp b/perception/traffic_light_classifier/src/nodelet.cpp new file mode 100644 index 0000000000000..768407be3964e --- /dev/null +++ b/perception/traffic_light_classifier/src/nodelet.cpp @@ -0,0 +1,122 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +#include "traffic_light_classifier/nodelet.hpp" + +#include +#include +#include +#include + +namespace traffic_light +{ +TrafficLightClassifierNodelet::TrafficLightClassifierNodelet(const rclcpp::NodeOptions & options) +: Node("traffic_light_classifier_node", options) +{ + using std::placeholders::_1; + using std::placeholders::_2; + is_approximate_sync_ = this->declare_parameter("approximate_sync", false); + if (is_approximate_sync_) { + approximate_sync_.reset(new ApproximateSync(ApproximateSyncPolicy(10), image_sub_, roi_sub_)); + approximate_sync_->registerCallback( + std::bind(&TrafficLightClassifierNodelet::imageRoiCallback, this, _1, _2)); + } else { + sync_.reset(new Sync(SyncPolicy(10), image_sub_, roi_sub_)); + sync_->registerCallback( + std::bind(&TrafficLightClassifierNodelet::imageRoiCallback, this, _1, _2)); + } + + traffic_signal_array_pub_ = + this->create_publisher( + "~/output/traffic_signals", rclcpp::QoS{1}); + + // + auto timer_callback = std::bind(&TrafficLightClassifierNodelet::connectCb, this); + const auto period_s = 0.1; + const auto period_ns = + std::chrono::duration_cast(std::chrono::duration(period_s)); + timer_ = std::make_shared>( + this->get_clock(), period_ns, std::move(timer_callback), + this->get_node_base_interface()->get_context()); + this->get_node_timers_interface()->add_timer(timer_, nullptr); + + int classifier_type = this->declare_parameter( + "classifier_type", static_cast(TrafficLightClassifierNodelet::ClassifierType::HSVFilter)); + if (classifier_type == TrafficLightClassifierNodelet::ClassifierType::HSVFilter) { + classifier_ptr_ = std::make_shared(this); + } else if (classifier_type == TrafficLightClassifierNodelet::ClassifierType::CNN) { +#if ENABLE_GPU + classifier_ptr_ = std::make_shared(this); +#else + RCLCPP_ERROR( + this->get_logger(), "please install CUDA, CUDNN and TensorRT to use cnn classifier"); +#endif + } +} + +void TrafficLightClassifierNodelet::connectCb() +{ + // set callbacks only when there are subscribers to this node + if ( + traffic_signal_array_pub_->get_subscription_count() == 0 && + traffic_signal_array_pub_->get_intra_process_subscription_count() == 0) { + image_sub_.unsubscribe(); + roi_sub_.unsubscribe(); + } else if (!image_sub_.getSubscriber()) { + image_sub_.subscribe(this, "~/input/image", "raw", rmw_qos_profile_sensor_data); + roi_sub_.subscribe(this, "~/input/rois", rclcpp::QoS{1}.get_rmw_qos_profile()); + } +} + +void TrafficLightClassifierNodelet::imageRoiCallback( + const sensor_msgs::msg::Image::ConstSharedPtr & input_image_msg, + const autoware_auto_perception_msgs::msg::TrafficLightRoiArray::ConstSharedPtr & input_rois_msg) +{ + if (classifier_ptr_.use_count() == 0) { + return; + } + + cv_bridge::CvImagePtr cv_ptr; + try { + cv_ptr = cv_bridge::toCvCopy(input_image_msg, sensor_msgs::image_encodings::RGB8); + } catch (cv_bridge::Exception & e) { + RCLCPP_ERROR( + this->get_logger(), "Could not convert from '%s' to 'rgb8'.", + input_image_msg->encoding.c_str()); + } + + autoware_auto_perception_msgs::msg::TrafficSignalArray output_msg; + + for (size_t i = 0; i < input_rois_msg->rois.size(); ++i) { + const sensor_msgs::msg::RegionOfInterest & roi = input_rois_msg->rois.at(i).roi; + cv::Mat clipped_image( + cv_ptr->image, cv::Rect(roi.x_offset, roi.y_offset, roi.width, roi.height)); + + autoware_auto_perception_msgs::msg::TrafficSignal traffic_signal; + traffic_signal.map_primitive_id = input_rois_msg->rois.at(i).id; + if (!classifier_ptr_->getTrafficSignal(clipped_image, traffic_signal)) { + RCLCPP_ERROR(this->get_logger(), "failed classify image, abort callback"); + return; + } + output_msg.signals.push_back(traffic_signal); + } + + output_msg.header = input_image_msg->header; + traffic_signal_array_pub_->publish(output_msg); +} + +} // namespace traffic_light + +#include + +RCLCPP_COMPONENTS_REGISTER_NODE(traffic_light::TrafficLightClassifierNodelet) diff --git a/perception/traffic_light_classifier/utils/trt_common.cpp b/perception/traffic_light_classifier/utils/trt_common.cpp new file mode 100644 index 0000000000000..6e10d7d71ab57 --- /dev/null +++ b/perception/traffic_light_classifier/utils/trt_common.cpp @@ -0,0 +1,149 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include + +#include +#include + +namespace Tn +{ +void check_error(const ::cudaError_t e, decltype(__FILE__) f, decltype(__LINE__) n) +{ + if (e != ::cudaSuccess) { + std::stringstream s; + s << ::cudaGetErrorName(e) << " (" << e << ")@" << f << "#L" << n << ": " + << ::cudaGetErrorString(e); + throw std::runtime_error{s.str()}; + } +} + +TrtCommon::TrtCommon(std::string model_path, std::string cache_dir, std::string precision) +: model_file_path_(model_path), + cache_dir_(cache_dir), + precision_(precision), + input_name_("input_0"), + output_name_("output_0"), + is_initialized_(false), + max_batch_size_(1) +{ +} + +void TrtCommon::setup() +{ + const boost::filesystem::path path(model_file_path_); + std::string extension = path.extension().string(); + + if (boost::filesystem::exists(path)) { + if (extension == ".engine") { + loadEngine(model_file_path_); + } else if (extension == ".onnx") { + std::string cache_engine_path = cache_dir_ + "/" + path.stem().string() + ".engine"; + const boost::filesystem::path cache_path(cache_engine_path); + if (boost::filesystem::exists(cache_path)) { + loadEngine(cache_engine_path); + } else { + logger_.log(nvinfer1::ILogger::Severity::kINFO, "start build engine"); + buildEngineFromOnnx(model_file_path_, cache_engine_path); + logger_.log(nvinfer1::ILogger::Severity::kINFO, "end build engine"); + } + } else { + is_initialized_ = false; + return; + } + } else { + is_initialized_ = false; + return; + } + + context_ = UniquePtr(engine_->createExecutionContext()); + input_dims_ = engine_->getBindingDimensions(getInputBindingIndex()); + output_dims_ = engine_->getBindingDimensions(getOutputBindingIndex()); + is_initialized_ = true; +} + +bool TrtCommon::loadEngine(std::string engine_file_path) +{ + std::ifstream engine_file(engine_file_path); + std::stringstream engine_buffer; + engine_buffer << engine_file.rdbuf(); + std::string engine_str = engine_buffer.str(); + runtime_ = UniquePtr(nvinfer1::createInferRuntime(logger_)); + engine_ = UniquePtr(runtime_->deserializeCudaEngine( + reinterpret_cast(engine_str.data()), engine_str.size(), nullptr)); + return true; +} + +bool TrtCommon::buildEngineFromOnnx(std::string onnx_file_path, std::string output_engine_file_path) +{ + auto builder = UniquePtr(nvinfer1::createInferBuilder(logger_)); + const auto explicitBatch = + 1U << static_cast(nvinfer1::NetworkDefinitionCreationFlag::kEXPLICIT_BATCH); + auto network = UniquePtr(builder->createNetworkV2(explicitBatch)); + auto config = UniquePtr(builder->createBuilderConfig()); + + auto parser = UniquePtr(nvonnxparser::createParser(*network, logger_)); + if (!parser->parseFromFile( + onnx_file_path.c_str(), static_cast(nvinfer1::ILogger::Severity::kERROR))) { + return false; + } + + builder->setMaxBatchSize(max_batch_size_); + config->setMaxWorkspaceSize(16 << 20); + + if (precision_ == "fp16") { + config->setFlag(nvinfer1::BuilderFlag::kFP16); + } else if (precision_ == "int8") { + config->setFlag(nvinfer1::BuilderFlag::kINT8); + } else { + return false; + } + + engine_ = UniquePtr(builder->buildEngineWithConfig(*network, *config)); + if (!engine_) { + return false; + } + + // save engine + nvinfer1::IHostMemory * data = engine_->serialize(); + std::ofstream file; + file.open(output_engine_file_path, std::ios::binary | std::ios::out); + if (!file.is_open()) { + return false; + } + file.write((const char *)data->data(), data->size()); + file.close(); + + return true; +} + +bool TrtCommon::isInitialized() { return is_initialized_; } + +int TrtCommon::getNumInput() +{ + return std::accumulate( + input_dims_.d, input_dims_.d + input_dims_.nbDims, 1, std::multiplies()); +} + +int TrtCommon::getNumOutput() +{ + return std::accumulate( + output_dims_.d, output_dims_.d + output_dims_.nbDims, 1, std::multiplies()); +} + +int TrtCommon::getInputBindingIndex() { return engine_->getBindingIndex(input_name_.c_str()); } + +int TrtCommon::getOutputBindingIndex() { return engine_->getBindingIndex(output_name_.c_str()); } + +} // namespace Tn diff --git a/perception/traffic_light_classifier/utils/trt_common.hpp b/perception/traffic_light_classifier/utils/trt_common.hpp new file mode 100644 index 0000000000000..269f7b11c6716 --- /dev/null +++ b/perception/traffic_light_classifier/utils/trt_common.hpp @@ -0,0 +1,146 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef PERCEPTION__TRAFFIC_LIGHT_RECOGNITION__TRAFFIC_LIGHT_CLASSIFIER__UTILS__TRT_COMMON_HPP_ +#define PERCEPTION__TRAFFIC_LIGHT_RECOGNITION__TRAFFIC_LIGHT_CLASSIFIER__UTILS__TRT_COMMON_HPP_ + +#include +#include + +#include + +#include <./cudnn.h> +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#define CHECK_CUDA_ERROR(e) (Tn::check_error(e, __FILE__, __LINE__)) + +namespace Tn +{ +class Logger : public nvinfer1::ILogger +{ +public: + Logger() : Logger(Severity::kINFO) {} + + explicit Logger(Severity severity) : reportableSeverity(severity) {} + + void log(Severity severity, const char * msg) noexcept override + { + // suppress messages with severity enum value greater than the reportable + if (severity > reportableSeverity) { + return; + } + + switch (severity) { + case Severity::kINTERNAL_ERROR: + std::cerr << "[TRT_COMMON][INTERNAL_ERROR]: "; + break; + case Severity::kERROR: + std::cerr << "[TRT_COMMON][ERROR]: "; + break; + case Severity::kWARNING: + std::cerr << "[TRT_COMMON][WARNING]: "; + break; + case Severity::kINFO: + std::cerr << "[TRT_COMMON][INFO]: "; + break; + default: + std::cerr << "[TRT_COMMON][UNKNOWN]: "; + break; + } + std::cerr << msg << std::endl; + } + + Severity reportableSeverity{Severity::kWARNING}; +}; + +void check_error(const ::cudaError_t e, decltype(__FILE__) f, decltype(__LINE__) n); + +struct InferDeleter +{ + void operator()(void * p) const { ::cudaFree(p); } +}; + +template +using UniquePtr = std::unique_ptr; + +// auto array = Tn::make_unique(n); +// ::cudaMemcpy(array.get(), src_array, sizeof(float)*n, ::cudaMemcpyHostToDevice); +template +typename std::enable_if::value, Tn::UniquePtr>::type make_unique( + const std::size_t n) +{ + using U = typename std::remove_extent::type; + U * p; + ::cudaMalloc(reinterpret_cast(&p), sizeof(U) * n); + return Tn::UniquePtr{p}; +} + +// auto value = Tn::make_unique(); +// ::cudaMemcpy(value.get(), src_value, sizeof(my_class), ::cudaMemcpyHostToDevice); +template +Tn::UniquePtr make_unique() +{ + T * p; + ::cudaMalloc(reinterpret_cast(&p), sizeof(T)); + return Tn::UniquePtr{p}; +} + +class TrtCommon +{ +public: + TrtCommon(std::string model_path, std::string cache_dir, std::string precision); + ~TrtCommon() {} + + bool loadEngine(std::string engine_file_path); + bool buildEngineFromOnnx(std::string onnx_file_path, std::string output_engine_file_path); + void setup(); + + bool isInitialized(); + int getNumInput(); + int getNumOutput(); + int getInputBindingIndex(); + int getOutputBindingIndex(); + + UniquePtr context_; + +private: + Logger logger_; + std::string model_file_path_; + UniquePtr runtime_; + UniquePtr engine_; + + nvinfer1::Dims input_dims_; + nvinfer1::Dims output_dims_; + std::string cache_dir_; + std::string precision_; + std::string input_name_; + std::string output_name_; + bool is_initialized_; + size_t max_batch_size_; +}; + +} // namespace Tn + +#endif // PERCEPTION__TRAFFIC_LIGHT_RECOGNITION__TRAFFIC_LIGHT_CLASSIFIER__UTILS__TRT_COMMON_HPP_ diff --git a/perception/traffic_light_map_based_detector/CMakeLists.txt b/perception/traffic_light_map_based_detector/CMakeLists.txt new file mode 100644 index 0000000000000..054b75f7b5c99 --- /dev/null +++ b/perception/traffic_light_map_based_detector/CMakeLists.txt @@ -0,0 +1,47 @@ +cmake_minimum_required(VERSION 3.5) +project(traffic_light_map_based_detector) + +if(NOT CMAKE_CXX_STANDARD) + set(CMAKE_CXX_STANDARD 14) + set(CMAKE_CXX_STANDARD_REQUIRED ON) + set(CMAKE_CXX_EXTENSIONS OFF) +endif() +if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") + add_compile_options(-Wall -Wextra -Wpedantic -Werror) +endif() + +find_package(ament_cmake_auto REQUIRED) +ament_auto_find_build_dependencies() + +find_package(Eigen3 REQUIRED) + +########### +## Build ## +########### + +include_directories( + ${EIGEN3_INCLUDE_DIR} +) + +ament_auto_add_library(traffic_light_map_based_detector SHARED + src/node.cpp +) + +rclcpp_components_register_node(traffic_light_map_based_detector + PLUGIN "traffic_light::MapBasedDetector" + EXECUTABLE traffic_light_map_based_detector_node +) + +if(BUILD_TESTING) + find_package(ament_lint_auto REQUIRED) + ament_lint_auto_find_test_dependencies() +endif() + +############# +## Install ## +############# + +ament_auto_package(INSTALL_TO_SHARE + launch + config +) diff --git a/perception/traffic_light_map_based_detector/README.md b/perception/traffic_light_map_based_detector/README.md new file mode 100644 index 0000000000000..19e8dddf60e01 --- /dev/null +++ b/perception/traffic_light_map_based_detector/README.md @@ -0,0 +1,37 @@ +# The `traffic_light_map_based_detector` Package + +## Overview + +`traffic_light_map_based_detector` calculates where the traffic lights will appear in the image based on the HD map. + +Calibration and vibration errors can be entered as parameters, and the size of the detected RegionOfInterest will change according to the error. + +![traffic_light_map_based_detector_result](./docs/traffic_light_map_based_detector_result.svg) + +If the node receives route information, it only looks at traffic lights on that route. +If the node receives no route information, it looks at a radius of 200 meters and the angle between the traffic light and the camera is less than 40 degrees. + +## Input topics + +| Name | Type | Description | +| -------------------- | ---------------------------------------- | ----------------------- | +| `~input/vector_map` | autoware_auto_mapping_msgs::HADMapBin | vector map | +| `~input/camera_info` | sensor_msgs::CameraInfo | target camera parameter | +| `~input/route` | autoware_auto_planning_msgs::HADMapRoute | optional: route | + +## Output topics + +| Name | Type | Description | +| ---------------- | --------------------------------------------------- | -------------------------------------------------------------------- | +| `~output/rois` | autoware_auto_perception_msgs::TrafficLightRoiArray | location of traffic lights in image corresponding to the camera info | +| `~debug/markers` | visualization_msgs::MarkerArray | visualization to debug | + +## Node parameters + +| Parameter | Type | Description | +| ---------------------- | ------ | ----------------------------------------------------------- | +| `max_vibration_pitch` | double | Maximum error in pitch direction. If -5~+5, it will be 10. | +| `max_vibration_yaw` | double | Maximum error in yaw direction. If -5~+5, it will be 10. | +| `max_vibration_height` | double | Maximum error in height direction. If -5~+5, it will be 10. | +| `max_vibration_width` | double | Maximum error in width direction. If -5~+5, it will be 10. | +| `max_vibration_depth` | double | Maximum error in depth direction. If -5~+5, it will be 10. | diff --git a/perception/traffic_light_map_based_detector/config/traffic_light_map_based_detector.param.yaml b/perception/traffic_light_map_based_detector/config/traffic_light_map_based_detector.param.yaml new file mode 100644 index 0000000000000..38fc290464857 --- /dev/null +++ b/perception/traffic_light_map_based_detector/config/traffic_light_map_based_detector.param.yaml @@ -0,0 +1,7 @@ +/**: + ros__parameters: + max_vibration_pitch: 0.01745329251 # -0.5 ~ 0.5 deg + max_vibration_yaw: 0.01745329251 # -0.5 ~ 0.5 deg + max_vibration_height: 0.1 # -0.05 ~ 0.05 m + max_vibration_width: 0.1 # -0.05 ~ 0.05 m + max_vibration_depth: 0.1 # -0.05 ~ 0.05 m diff --git a/perception/traffic_light_map_based_detector/docs/traffic_light_map_based_detector_result.svg b/perception/traffic_light_map_based_detector/docs/traffic_light_map_based_detector_result.svg new file mode 100644 index 0000000000000..b3f5c95df415d --- /dev/null +++ b/perception/traffic_light_map_based_detector/docs/traffic_light_map_based_detector_result.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/perception/traffic_light_map_based_detector/include/traffic_light_map_based_detector/node.hpp b/perception/traffic_light_map_based_detector/include/traffic_light_map_based_detector/node.hpp new file mode 100644 index 0000000000000..860688b1b94d0 --- /dev/null +++ b/perception/traffic_light_map_based_detector/include/traffic_light_map_based_detector/node.hpp @@ -0,0 +1,131 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* + * Copyright 2019 Autoware Foundation. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Authors: Yukihiro Saito + * + */ + +#ifndef TRAFFIC_LIGHT_MAP_BASED_DETECTOR__NODE_HPP_ +#define TRAFFIC_LIGHT_MAP_BASED_DETECTOR__NODE_HPP_ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include +#include +#include + +namespace traffic_light +{ +class MapBasedDetector : public rclcpp::Node +{ +public: + explicit MapBasedDetector(const rclcpp::NodeOptions & node_options); + +private: + struct Config + { + double max_vibration_pitch; + double max_vibration_yaw; + double max_vibration_height; + double max_vibration_width; + double max_vibration_depth; + }; + + struct IdLessThan + { + bool operator()( + const lanelet::ConstLineString3d & left, const lanelet::ConstLineString3d & right) const + { + return left.id() < right.id(); + } + }; + +private: + rclcpp::Subscription::SharedPtr map_sub_; + rclcpp::Subscription::SharedPtr camera_info_sub_; + rclcpp::Subscription::SharedPtr route_sub_; + + rclcpp::Publisher::SharedPtr roi_pub_; + rclcpp::Publisher::SharedPtr viz_pub_; + + tf2_ros::Buffer tf_buffer_; + tf2_ros::TransformListener tf_listener_; + + using TrafficLightSet = std::set; + + std::shared_ptr all_traffic_lights_ptr_; + std::shared_ptr route_traffic_lights_ptr_; + + lanelet::LaneletMapPtr lanelet_map_ptr_; + lanelet::traffic_rules::TrafficRulesPtr traffic_rules_ptr_; + lanelet::routing::RoutingGraphPtr routing_graph_ptr_; + Config config_; + + void mapCallback(const autoware_auto_mapping_msgs::msg::HADMapBin::ConstSharedPtr input_msg); + void cameraInfoCallback(const sensor_msgs::msg::CameraInfo::ConstSharedPtr input_msg); + void routeCallback(const autoware_auto_planning_msgs::msg::HADMapRoute::ConstSharedPtr input_msg); + void getVisibleTrafficLights( + const TrafficLightSet & all_traffic_lights, const geometry_msgs::msg::Pose & camera_pose, + const image_geometry::PinholeCameraModel & pinhole_camera_model, + std::vector & visible_traffic_lights); + bool isInDistanceRange( + const geometry_msgs::msg::Point & tl_point, const geometry_msgs::msg::Point & camera_point, + const double max_distance_range) const; + bool isInAngleRange( + const double & tl_yaw, const double & camera_yaw, const double max_angle_range) const; + bool isInImageFrame( + const image_geometry::PinholeCameraModel & pinhole_camera_model, + const geometry_msgs::msg::Point & point) const; + bool getTrafficLightRoi( + const geometry_msgs::msg::Pose & camera_pose, + const image_geometry::PinholeCameraModel & pinhole_camera_model, + const lanelet::ConstLineString3d traffic_light, const Config & config, + autoware_auto_perception_msgs::msg::TrafficLightRoi & tl_roi); + void publishVisibleTrafficLights( + const geometry_msgs::msg::PoseStamped camera_pose_stamped, + const std::vector & visible_traffic_lights, + const rclcpp::Publisher::SharedPtr pub); +}; +} // namespace traffic_light +#endif // TRAFFIC_LIGHT_MAP_BASED_DETECTOR__NODE_HPP_ diff --git a/perception/traffic_light_map_based_detector/launch/traffic_light_map_based_detector.launch.xml b/perception/traffic_light_map_based_detector/launch/traffic_light_map_based_detector.launch.xml new file mode 100644 index 0000000000000..bb8804d2f006f --- /dev/null +++ b/perception/traffic_light_map_based_detector/launch/traffic_light_map_based_detector.launch.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/perception/traffic_light_map_based_detector/package.xml b/perception/traffic_light_map_based_detector/package.xml new file mode 100644 index 0000000000000..268f9f19a9e5e --- /dev/null +++ b/perception/traffic_light_map_based_detector/package.xml @@ -0,0 +1,32 @@ + + + traffic_light_map_based_detector + 0.1.0 + The traffic_light_map_based_detector package + + Yukihiro Saito + Apache License 2.0 + + ament_cmake_auto + + autoware_auto_mapping_msgs + autoware_auto_perception_msgs + autoware_auto_planning_msgs + autoware_utils + geometry_msgs + image_geometry + lanelet2_extension + rclcpp + sensor_msgs + tf2 + tf2_ros + + + ament_lint_auto + autoware_lint_common + + + + ament_cmake + + diff --git a/perception/traffic_light_map_based_detector/src/node.cpp b/perception/traffic_light_map_based_detector/src/node.cpp new file mode 100644 index 0000000000000..39e8d87471073 --- /dev/null +++ b/perception/traffic_light_map_based_detector/src/node.cpp @@ -0,0 +1,494 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +/* + * Copyright 2019 Autoware Foundation. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Authors: Yukihiro Saito + * + */ + +#include "traffic_light_map_based_detector/node.hpp" + +#include +#include +#include +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#define EIGEN_MPL2_ONLY +#include +#include + +namespace +{ +cv::Point2d calcRawImagePointFromPoint3D( + const image_geometry::PinholeCameraModel & pinhole_camera_model, const cv::Point3d & point3d) +{ + cv::Point2d rectified_image_point = pinhole_camera_model.project3dToPixel(point3d); + return pinhole_camera_model.unrectifyPoint(rectified_image_point); +} + +cv::Point2d calcRawImagePointFromPoint3D( + const image_geometry::PinholeCameraModel & pinhole_camera_model, + const geometry_msgs::msg::Point & point3d) +{ + return calcRawImagePointFromPoint3D( + pinhole_camera_model, cv::Point3d(point3d.x, point3d.y, point3d.z)); +} + +void roundInImageFrame( + const image_geometry::PinholeCameraModel & pinhole_camera_model, cv::Point2d & point) +{ + const sensor_msgs::msg::CameraInfo camera_info = pinhole_camera_model.cameraInfo(); + point.x = + std::max(std::min(point.x, static_cast(static_cast(camera_info.width) - 1)), 0.0); + point.y = + std::max(std::min(point.y, static_cast(static_cast(camera_info.height) - 1)), 0.0); +} +} // namespace + +namespace traffic_light +{ +MapBasedDetector::MapBasedDetector(const rclcpp::NodeOptions & node_options) +: Node("traffic_light_map_based_detector", node_options), + tf_buffer_(this->get_clock()), + tf_listener_(tf_buffer_) +{ + using std::placeholders::_1; + + // subscribers + map_sub_ = create_subscription( + "~/input/vector_map", rclcpp::QoS{1}.transient_local(), + std::bind(&MapBasedDetector::mapCallback, this, _1)); + camera_info_sub_ = create_subscription( + "~/input/camera_info", rclcpp::SensorDataQoS(), + std::bind(&MapBasedDetector::cameraInfoCallback, this, _1)); + route_sub_ = create_subscription( + "~/input/route", 1, std::bind(&MapBasedDetector::routeCallback, this, _1)); + + // publishers + roi_pub_ = this->create_publisher( + "~/output/rois", 1); + viz_pub_ = this->create_publisher("~/debug/markers", 1); + + // parameter declaration needs default values: are 0.0 goof defaults for this? + config_.max_vibration_pitch = declare_parameter("max_vibration_pitch", 0.0); + config_.max_vibration_yaw = declare_parameter("max_vibration_yaw", 0.0); + config_.max_vibration_height = declare_parameter("max_vibration_height", 0.0); + config_.max_vibration_width = declare_parameter("max_vibration_width", 0.0); + config_.max_vibration_depth = declare_parameter("max_vibration_depth", 0.0); +} + +void MapBasedDetector::cameraInfoCallback( + const sensor_msgs::msg::CameraInfo::ConstSharedPtr input_msg) +{ + if (all_traffic_lights_ptr_ == nullptr && route_traffic_lights_ptr_ == nullptr) { + return; + } + + image_geometry::PinholeCameraModel pinhole_camera_model; + pinhole_camera_model.fromCameraInfo(*input_msg); + + autoware_auto_perception_msgs::msg::TrafficLightRoiArray output_msg; + output_msg.header = input_msg->header; + + /* Camera pose */ + geometry_msgs::msg::PoseStamped camera_pose_stamped; + try { + geometry_msgs::msg::TransformStamped transform; + transform = tf_buffer_.lookupTransform( + "map", input_msg->header.frame_id, input_msg->header.stamp, + rclcpp::Duration::from_seconds(0.2)); + camera_pose_stamped.header = input_msg->header; + camera_pose_stamped.pose = autoware_utils::transform2pose(transform.transform); + } catch (tf2::TransformException & ex) { + RCLCPP_WARN_THROTTLE( + get_logger(), *get_clock(), 5000, "cannot get transform from map frame to camera frame"); + return; + } + + /* + * visible_traffic_lights : for each traffic light in map check if in range and in view angle of + * camera + */ + std::vector visible_traffic_lights; + // If get a route, use only traffic lights on the route. + if (route_traffic_lights_ptr_ != nullptr) { + getVisibleTrafficLights( + *route_traffic_lights_ptr_, camera_pose_stamped.pose, pinhole_camera_model, + visible_traffic_lights); + // If don't get a route, use the traffic lights around ego vehicle. + } else if (all_traffic_lights_ptr_ != nullptr) { + getVisibleTrafficLights( + *all_traffic_lights_ptr_, camera_pose_stamped.pose, pinhole_camera_model, + visible_traffic_lights); + // This shouldn't run. + } else { + return; + } + + /* + * Get the ROI from the lanelet and the intrinsic matrix of camera to determine where it appears + * in image. + */ + for (const auto & traffic_light : visible_traffic_lights) { + autoware_auto_perception_msgs::msg::TrafficLightRoi tl_roi; + if (!getTrafficLightRoi( + camera_pose_stamped.pose, pinhole_camera_model, traffic_light, config_, tl_roi)) { + continue; + } + output_msg.rois.push_back(tl_roi); + } + roi_pub_->publish(output_msg); + publishVisibleTrafficLights(camera_pose_stamped, visible_traffic_lights, viz_pub_); +} + +bool MapBasedDetector::getTrafficLightRoi( + const geometry_msgs::msg::Pose & camera_pose, + const image_geometry::PinholeCameraModel & pinhole_camera_model, + const lanelet::ConstLineString3d traffic_light, const Config & config, + autoware_auto_perception_msgs::msg::TrafficLightRoi & tl_roi) +{ + const double tl_height = traffic_light.attributeOr("height", 0.0); + const auto & tl_left_down_point = traffic_light.front(); + const auto & tl_right_down_point = traffic_light.back(); + + tf2::Transform tf_map2camera( + tf2::Quaternion( + camera_pose.orientation.x, camera_pose.orientation.y, camera_pose.orientation.z, + camera_pose.orientation.w), + tf2::Vector3(camera_pose.position.x, camera_pose.position.y, camera_pose.position.z)); + // id + tl_roi.id = traffic_light.id(); + + // for roi.x_offset and roi.y_offset + { + tf2::Transform tf_map2tl( + tf2::Quaternion(0, 0, 0, 1), + tf2::Vector3( + tl_left_down_point.x(), tl_left_down_point.y(), tl_left_down_point.z() + tl_height)); + tf2::Transform tf_camera2tl; + tf_camera2tl = tf_map2camera.inverse() * tf_map2tl; + // max vibration + const double max_vibration_x = + std::sin(config.max_vibration_yaw * 0.5) * tf_camera2tl.getOrigin().z() + + config.max_vibration_width * 0.5; + const double max_vibration_y = + std::sin(config.max_vibration_pitch * 0.5) * tf_camera2tl.getOrigin().z() + + config.max_vibration_height * 0.5; + const double max_vibration_z = config.max_vibration_depth * 0.5; + // target position in camera coordinate + geometry_msgs::msg::Point point3d; + point3d.x = tf_camera2tl.getOrigin().x() - max_vibration_x; + point3d.y = tf_camera2tl.getOrigin().y() - max_vibration_y; + point3d.z = tf_camera2tl.getOrigin().z() - max_vibration_z; + if (point3d.z <= 0.0) { + return false; + } + cv::Point2d point2d = calcRawImagePointFromPoint3D(pinhole_camera_model, point3d); + roundInImageFrame(pinhole_camera_model, point2d); + tl_roi.roi.x_offset = point2d.x; + tl_roi.roi.y_offset = point2d.y; + } + + // for roi.width and roi.height + { + tf2::Transform tf_map2tl( + tf2::Quaternion(0, 0, 0, 1), + tf2::Vector3(tl_right_down_point.x(), tl_right_down_point.y(), tl_right_down_point.z())); + tf2::Transform tf_camera2tl; + tf_camera2tl = tf_map2camera.inverse() * tf_map2tl; + // max vibration + const double max_vibration_x = + std::sin(config.max_vibration_yaw * 0.5) * tf_camera2tl.getOrigin().z() + + config.max_vibration_width * 0.5; + const double max_vibration_y = + std::sin(config.max_vibration_pitch * 0.5) * tf_camera2tl.getOrigin().z() + + config.max_vibration_height * 0.5; + const double max_vibration_z = config.max_vibration_depth * 0.5; + // target position in camera coordinate + geometry_msgs::msg::Point point3d; + point3d.x = tf_camera2tl.getOrigin().x() + max_vibration_x; + point3d.y = tf_camera2tl.getOrigin().y() + max_vibration_y; + point3d.z = tf_camera2tl.getOrigin().z() - max_vibration_z; + if (point3d.z <= 0.0) { + return false; + } + cv::Point2d point2d = calcRawImagePointFromPoint3D(pinhole_camera_model, point3d); + roundInImageFrame(pinhole_camera_model, point2d); + tl_roi.roi.width = point2d.x - tl_roi.roi.x_offset; + tl_roi.roi.height = point2d.y - tl_roi.roi.y_offset; + if (tl_roi.roi.width < 1 || tl_roi.roi.height < 1) { + return false; + } + } + return true; +} + +void MapBasedDetector::mapCallback( + const autoware_auto_mapping_msgs::msg::HADMapBin::ConstSharedPtr input_msg) +{ + lanelet_map_ptr_ = std::make_shared(); + + lanelet::utils::conversion::fromBinMsg(*input_msg, lanelet_map_ptr_); + lanelet::ConstLanelets all_lanelets = lanelet::utils::query::laneletLayer(lanelet_map_ptr_); + std::vector all_lanelet_traffic_lights = + lanelet::utils::query::autowareTrafficLights(all_lanelets); + all_traffic_lights_ptr_ = std::make_shared(); + for (auto tl_itr = all_lanelet_traffic_lights.begin(); tl_itr != all_lanelet_traffic_lights.end(); + ++tl_itr) { + lanelet::AutowareTrafficLightConstPtr tl = *tl_itr; + + auto lights = tl->trafficLights(); + for (auto lsp : lights) { + if (!lsp.isLineString()) { // traffic lights must be linestrings + continue; + } + all_traffic_lights_ptr_->insert(static_cast(lsp)); + } + } +} + +void MapBasedDetector::routeCallback( + const autoware_auto_planning_msgs::msg::HADMapRoute::ConstSharedPtr input_msg) +{ + if (lanelet_map_ptr_ == nullptr) { + RCLCPP_WARN(get_logger(), "cannot set traffic light in route because don't receive map"); + return; + } + lanelet::ConstLanelets route_lanelets; + for (const auto & segment : input_msg->segments) { + for (const auto & primitive : segment.primitives) { + try { + route_lanelets.push_back(lanelet_map_ptr_->laneletLayer.get(primitive.id)); + } catch (const lanelet::NoSuchPrimitiveError & ex) { + RCLCPP_ERROR(get_logger(), "%s", ex.what()); + return; + } + } + } + std::vector route_lanelet_traffic_lights = + lanelet::utils::query::autowareTrafficLights(route_lanelets); + route_traffic_lights_ptr_ = std::make_shared(); + for (auto tl_itr = route_lanelet_traffic_lights.begin(); + tl_itr != route_lanelet_traffic_lights.end(); ++tl_itr) { + lanelet::AutowareTrafficLightConstPtr tl = *tl_itr; + + auto lights = tl->trafficLights(); + for (auto lsp : lights) { + if (!lsp.isLineString()) { // traffic lights must be linestrings + continue; + } + route_traffic_lights_ptr_->insert(static_cast(lsp)); + } + } +} + +void MapBasedDetector::getVisibleTrafficLights( + const MapBasedDetector::TrafficLightSet & all_traffic_lights, + const geometry_msgs::msg::Pose & camera_pose, + const image_geometry::PinholeCameraModel & pinhole_camera_model, + std::vector & visible_traffic_lights) +{ + for (const auto & traffic_light : all_traffic_lights) { + const auto & tl_left_down_point = traffic_light.front(); + const auto & tl_right_down_point = traffic_light.back(); + const double tl_height = traffic_light.attributeOr("height", 0.0); + + // check distance range + geometry_msgs::msg::Point tl_central_point; + tl_central_point.x = (tl_right_down_point.x() + tl_left_down_point.x()) / 2.0; + tl_central_point.y = (tl_right_down_point.y() + tl_left_down_point.y()) / 2.0; + tl_central_point.z = (tl_right_down_point.z() + tl_left_down_point.z() + tl_height) / 2.0; + constexpr double max_distance_range = 200.0; + if (!isInDistanceRange(tl_central_point, camera_pose.position, max_distance_range)) { + continue; + } + + // check angle range + const double tl_yaw = autoware_utils::normalizeRadian( + std::atan2( + tl_right_down_point.y() - tl_left_down_point.y(), + tl_right_down_point.x() - tl_left_down_point.x()) + + M_PI_2); + constexpr double max_angle_range = autoware_utils::deg2rad(40.0); + + // get direction of z axis + tf2::Vector3 camera_z_dir(0, 0, 1); + tf2::Matrix3x3 camera_rotation_matrix(tf2::Quaternion( + camera_pose.orientation.x, camera_pose.orientation.y, camera_pose.orientation.z, + camera_pose.orientation.w)); + camera_z_dir = camera_rotation_matrix * camera_z_dir; + double camera_yaw = std::atan2(camera_z_dir.y(), camera_z_dir.x()); + camera_yaw = autoware_utils::normalizeRadian(camera_yaw); + if (!isInAngleRange(tl_yaw, camera_yaw, max_angle_range)) { + continue; + } + + // check within image frame + tf2::Transform tf_map2camera( + tf2::Quaternion( + camera_pose.orientation.x, camera_pose.orientation.y, camera_pose.orientation.z, + camera_pose.orientation.w), + tf2::Vector3(camera_pose.position.x, camera_pose.position.y, camera_pose.position.z)); + tf2::Transform tf_map2tl( + tf2::Quaternion(0, 0, 0, 1), + tf2::Vector3(tl_central_point.x, tl_central_point.y, tl_central_point.z)); + tf2::Transform tf_camera2tl; + tf_camera2tl = tf_map2camera.inverse() * tf_map2tl; + + geometry_msgs::msg::Point camera2tl_point; + camera2tl_point.x = tf_camera2tl.getOrigin().x(); + camera2tl_point.y = tf_camera2tl.getOrigin().y(); + camera2tl_point.z = tf_camera2tl.getOrigin().z(); + if (!isInImageFrame(pinhole_camera_model, camera2tl_point)) { + continue; + } + visible_traffic_lights.push_back(traffic_light); + } +} + +bool MapBasedDetector::isInDistanceRange( + const geometry_msgs::msg::Point & tl_point, const geometry_msgs::msg::Point & camera_point, + const double max_distance_range) const +{ + const double sq_dist = (tl_point.x - camera_point.x) * (tl_point.x - camera_point.x) + + (tl_point.y - camera_point.y) * (tl_point.y - camera_point.y); + return sq_dist < (max_distance_range * max_distance_range); +} + +bool MapBasedDetector::isInAngleRange( + const double & tl_yaw, const double & camera_yaw, const double max_angle_range) const +{ + Eigen::Vector2d vec1, vec2; + vec1 << std::cos(tl_yaw), std::sin(tl_yaw); + vec2 << std::cos(camera_yaw), std::sin(camera_yaw); + const double diff_angle = std::acos(vec1.dot(vec2)); + return std::fabs(diff_angle) < max_angle_range; +} + +bool MapBasedDetector::isInImageFrame( + const image_geometry::PinholeCameraModel & pinhole_camera_model, + const geometry_msgs::msg::Point & point) const +{ + if (point.z <= 0.0) { + return false; + } + + cv::Point2d point2d = calcRawImagePointFromPoint3D(pinhole_camera_model, point); + if (0 <= point2d.x && point2d.x < pinhole_camera_model.cameraInfo().width) { + if (0 <= point2d.y && point2d.y < pinhole_camera_model.cameraInfo().height) { + return true; + } + } + return false; +} + +void MapBasedDetector::publishVisibleTrafficLights( + const geometry_msgs::msg::PoseStamped camera_pose_stamped, + const std::vector & visible_traffic_lights, + const rclcpp::Publisher::SharedPtr pub) +{ + visualization_msgs::msg::MarkerArray output_msg; + for (const auto & traffic_light : visible_traffic_lights) { + const auto & tl_left_down_point = traffic_light.front(); + const auto & tl_right_down_point = traffic_light.back(); + const double tl_height = traffic_light.attributeOr("height", 0.0); + const int id = traffic_light.id(); + + geometry_msgs::msg::Point tl_central_point; + tl_central_point.x = (tl_right_down_point.x() + tl_left_down_point.x()) / 2.0; + tl_central_point.y = (tl_right_down_point.y() + tl_left_down_point.y()) / 2.0; + tl_central_point.z = (tl_right_down_point.z() + tl_left_down_point.z() + tl_height) / 2.0; + + visualization_msgs::msg::Marker marker; + + tf2::Transform tf_map2camera( + tf2::Quaternion( + camera_pose_stamped.pose.orientation.x, camera_pose_stamped.pose.orientation.y, + camera_pose_stamped.pose.orientation.z, camera_pose_stamped.pose.orientation.w), + tf2::Vector3( + camera_pose_stamped.pose.position.x, camera_pose_stamped.pose.position.y, + camera_pose_stamped.pose.position.z)); + tf2::Transform tf_map2tl( + tf2::Quaternion(0, 0, 0, 1), + tf2::Vector3(tl_central_point.x, tl_central_point.y, tl_central_point.z)); + tf2::Transform tf_camera2tl; + tf_camera2tl = tf_map2camera.inverse() * tf_map2tl; + + marker.header = camera_pose_stamped.header; + marker.id = id; + marker.type = visualization_msgs::msg::Marker::LINE_LIST; + marker.ns = std::string("beam"); + marker.scale.x = 0.05; + marker.action = visualization_msgs::msg::Marker::MODIFY; + marker.pose.position.x = 0.0; + marker.pose.position.y = 0.0; + marker.pose.position.z = 0.0; + marker.pose.orientation.x = 0.0; + marker.pose.orientation.y = 0.0; + marker.pose.orientation.z = 0.0; + marker.pose.orientation.w = 1.0; + geometry_msgs::msg::Point point; + point.x = 0.0; + point.y = 0.0; + point.z = 0.0; + marker.points.push_back(point); + point.x = tf_camera2tl.getOrigin().x(); + point.y = tf_camera2tl.getOrigin().y(); + point.z = tf_camera2tl.getOrigin().z(); + marker.points.push_back(point); + + marker.lifetime = rclcpp::Duration::from_seconds(0.2); + marker.color.a = 0.999; // Don't forget to set the alpha! + marker.color.r = 0.0; + marker.color.g = 1.0; + marker.color.b = 0.0; + + output_msg.markers.push_back(marker); + } + pub->publish(output_msg); +} +} // namespace traffic_light + +#include +RCLCPP_COMPONENTS_REGISTER_NODE(traffic_light::MapBasedDetector) diff --git a/perception/traffic_light_ssd_fine_detector/.gitignore b/perception/traffic_light_ssd_fine_detector/.gitignore new file mode 100644 index 0000000000000..8fce603003c1e --- /dev/null +++ b/perception/traffic_light_ssd_fine_detector/.gitignore @@ -0,0 +1 @@ +data/ diff --git a/perception/traffic_light_ssd_fine_detector/CMakeLists.txt b/perception/traffic_light_ssd_fine_detector/CMakeLists.txt new file mode 100644 index 0000000000000..7fc6ad12675f2 --- /dev/null +++ b/perception/traffic_light_ssd_fine_detector/CMakeLists.txt @@ -0,0 +1,168 @@ +cmake_minimum_required(VERSION 3.5) + +if(NOT CMAKE_CXX_STANDARD) + set(CMAKE_CXX_STANDARD 14) + set(CMAKE_CXX_STANDARD_REQUIRED ON) + set(CMAKE_CXX_EXTENSIONS OFF) +endif() +if(CMAKE_COMPILER_IS_GNUCXX OR CMAKE_CXX_COMPILER_ID MATCHES "Clang") + add_compile_options(-Wall -Wextra -Wpedantic -Werror) +endif() + +project(traffic_light_ssd_fine_detector) + +option(CUDA_VERBOSE "Verbose output of CUDA modules" OFF) + +find_package(ament_cmake_auto REQUIRED) +ament_auto_find_build_dependencies() + +find_package(OpenCV REQUIRED) + +# set flags for CUDA availability +option(CUDA_AVAIL "CUDA available" OFF) +find_package(CUDA) +if (CUDA_FOUND) + find_library(CUBLAS_LIBRARIES cublas HINTS + ${CUDA_TOOLKIT_ROOT_DIR}/lib64 + ${CUDA_TOOLKIT_ROOT_DIR}/lib + ) + if (CUDA_VERBOSE) + message(STATUS "CUDA is available!") + message(STATUS "CUDA Libs: ${CUDA_LIBRARIES}") + message(STATUS "CUDA Headers: ${CUDA_INCLUDE_DIRS}") + endif () + set(CUDA_AVAIL ON) +else() + message(STATUS "CUDA NOT FOUND") + set(CUDA_AVAIL OFF) +endif (CUDA_FOUND) + +# set flags for TensorRT availability +option(TRT_AVAIL "TensorRT available" OFF) +# try to find the tensorRT modules +find_library(NVINFER NAMES nvinfer) +find_library(NVONNXPARSER nvonnxparser) +find_library(NVINFER_PLUGIN NAMES nvinfer_plugin) +if(NVINFER AND NVONNXPARSER AND NVINFER_PLUGIN) + if (CUDA_VERBOSE) + message(STATUS "TensorRT is available!") + message(STATUS "NVINFER: ${NVINFER}") + message(STATUS "NVPARSERS: ${NVPARSERS}") + message(STATUS "NVINFER_PLUGIN: ${NVINFER_PLUGIN}") + message(STATUS "NVONNXPARSER: ${NVONNXPARSER}") + endif () + set(TRT_AVAIL ON) +else() + message(STATUS "TensorRT is NOT Available") + set(TRT_AVAIL OFF) +endif() + +# set flags for CUDNN availability +option(CUDNN_AVAIL "CUDNN available" OFF) +# try to find the CUDNN module +find_library(CUDNN_LIBRARY +NAMES libcudnn.so${__cudnn_ver_suffix} libcudnn${__cudnn_ver_suffix}.dylib ${__cudnn_lib_win_name} +PATHS $ENV{LD_LIBRARY_PATH} ${__libpath_cudart} ${CUDNN_ROOT_DIR} ${PC_CUDNN_LIBRARY_DIRS} ${CMAKE_INSTALL_PREFIX} +PATH_SUFFIXES lib lib64 bin +DOC "CUDNN library." ) +if(CUDNN_LIBRARY) + if (CUDA_VERBOSE) + message(STATUS "CUDNN is available!") + message(STATUS "CUDNN_LIBRARY: ${CUDNN_LIBRARY}") + endif () + set(CUDNN_AVAIL ON) +else() + message(STATUS "CUDNN is NOT Available") + set(CUDNN_AVAIL OFF) +endif() + +# Download caffemodel and prototxt +set(PRETRAINED_MODEL_LINK "https://drive.google.com/uc?id=1USFDPRH9JrVdGoqt27qHjRgittwc0kcO") +set(PRETRAINED_MODEL_HASH 34ce7f2cbacbf6da8bc35769f027b73f) +set(LAMP_LABEL_LINK "https://drive.google.com/uc?id=1hPcKvKgKz0fqEo0cNAXH7roEletqZErL") +set(LAMP_LABEL_HASH e9f45efb02f2a9aa8ac27b3d5c164905) + +find_program(GDOWN_AVAIL "gdown") +if (NOT GDOWN_AVAIL) + message(STATUS "gdown: command not found. External files could not be downloaded.") +endif() +set(PATH "${CMAKE_CURRENT_SOURCE_DIR}/data") +if (NOT EXISTS "${PATH}") + execute_process(COMMAND mkdir -p ${PATH}) +endif() + +set(FILE "${PATH}/mb2-ssd-lite-tlr.onnx") +message(STATUS "Checking and downloading mb2-ssd-lite-tlr.onnx") +if (EXISTS "${FILE}") + file(MD5 "${FILE}" EXISTING_FILE_HASH) + if (NOT "${PRETRAINED_MODEL_HASH}" EQUAL "${EXISTING_FILE_HASH}") + message(STATUS "... file hash changed. Downloading now ...") + execute_process(COMMAND gdown --quiet "${PRETRAINED_MODEL_LINK}" -O ${PATH}/mb2-ssd-lite-tlr.onnx) + endif() +else() + message(STATUS "... file does not exist. Downloading now ...") + execute_process(COMMAND gdown --quiet "${PRETRAINED_MODEL_LINK}" -O ${PATH}/mb2-ssd-lite-tlr.onnx) +endif() + +set(FILE "${PATH}/voc_labels_tl.txt") +message(STATUS "Checking and downloading voc_labels_tl.txt") +if (EXISTS "${FILE}") + file(MD5 "${FILE}" EXISTING_FILE_HASH) + if (NOT "${LAMP_LABEL_HASH}" EQUAL "${EXISTING_FILE_HASH}") + message(STATUS "... file does not exist. Downloading now ...") + execute_process(COMMAND gdown --quiet "${LAMP_LABEL_LINK}" -O ${PATH}/voc_labels_tl.txt) + endif() +else() + message(STATUS "... file does not exist. Downloading now ...") + execute_process(COMMAND gdown --quiet "${LAMP_LABEL_LINK}" -O ${PATH}/voc_labels_tl.txt) +endif() + +if(TRT_AVAIL AND CUDA_AVAIL AND CUDNN_AVAIL) + include_directories( + ${OpenCV_INCLUDE_DIRS} + ${CUDA_INCLUDE_DIRS} + ) + + set(CMAKE_CXX_FLAGS "-O2 -Wall -Wunused-variable ${CMAKE_CXX_FLAGS} -fpic -std=c++11 -pthread") + + ### ssd ### + ament_auto_add_library(ssd SHARED + lib/src/trt_ssd.cpp + ) + + target_include_directories(ssd PUBLIC + lib/include + ) + + target_link_libraries(ssd + ${NVINFER} + ${NVONNXPARSER} + ${NVINFER_PLUGIN} + ${CUDA_LIBRARIES} + ${CUBLAS_LIBRARIES} + ${CUDA_curand_LIBRARY} + ${CUDNN_LIBRARY} + ) + + ament_auto_add_library(traffic_light_ssd_fine_detector_nodelet SHARED + src/nodelet.cpp + ) + + target_link_libraries(traffic_light_ssd_fine_detector_nodelet + ${OpenCV_LIB} + ssd + ) + + rclcpp_components_register_node(traffic_light_ssd_fine_detector_nodelet + PLUGIN "traffic_light::TrafficLightSSDFineDetectorNodelet" + EXECUTABLE traffic_light_ssd_fine_detector_node + ) + + ament_auto_package(INSTALL_TO_SHARE + data + launch + ) + +else() + message(STATUS "TrafficLightSSDFineDetector won't be built, CUDA and/or TensorRT were not found.") +endif() diff --git a/perception/traffic_light_ssd_fine_detector/README.md b/perception/traffic_light_ssd_fine_detector/README.md new file mode 100644 index 0000000000000..91fe13f3b6899 --- /dev/null +++ b/perception/traffic_light_ssd_fine_detector/README.md @@ -0,0 +1,63 @@ +# traffic_light_ssd_fine_detector + +## Purpose + +It is a package for traffic light detection using MobileNetV2 and SSDLite. + +The trained model is based on [pytorch-ssd](https://github.com/qfgaohao/pytorch-ssd). + +## Inner-workings / Algorithms + +Based on the camera image and the global ROI array detected by `map_based_detection` node, a CNN-based detection method enables highly accurate traffic light detection. + +## Inputs / Outputs + +### Input + +| Name | Type | Description | +| --------------- | ---------------------------------------------------------- | ------------------------------------------------ | +| `~/input/image` | `sensor_msgs/Image` | The full size camera image | +| `~/input/rois` | `autoware_auto_perception_msgs::msg::TrafficLightRoiArray` | The array of ROIs detected by map_based_detector | + +### Output + +| Name | Type | Description | +| --------------------- | ---------------------------------------------------------- | ---------------------------- | +| `~/output/rois` | `autoware_auto_perception_msgs::msg::TrafficLightRoiArray` | The detected accurate rois | +| `~/debug/exe_time_ms` | `autoware_debug_msgs::msg::Float32Stamped` | The time taken for inference | + +## Parameters + +### Core Parameters + +| Name | Type | Default Value | Description | +| -------------- | ------------------- | ------------- | ------------------------------------------------------------------------------- | +| `score_thresh` | double | 0.7 | If the objectness score is less than this value, the object is ignored | +| `mean` | std::vector | [0.5,0.5,0.5] | Average value of the normalized values of the image data used for training | +| `std` | std::vector | [0.5,0.5,0.5] | Standard deviation of the normalized values of the image data used for training | + +### Node Parameters + +| Name | Type | Default Value | Description | +| ------------------ | ------ | ------------------------------ | -------------------------------------------------------------------- | +| `onnx_file` | string | "./data/mb2-ssd-lite-tlr.onnx" | The onnx file name for yolo model | +| `label_file` | string | "./data/voc_labels_tl.txt" | The label file with label names for detected objects written on it | +| `mode` | string | "FP32" | The inference mode: "FP32", "FP16", "INT8" | +| `max_batch_size` | int | 8 | The size of the batch processed at one time by inference by TensorRT | +| `approximate_sync` | bool | false | Flag for whether to ues approximate sync policy | + +## Assumptions / Known limits + +## Onnx model + +- + +## Reference repositories + +pytorch-ssd github repository + +- + +MobileNetV2 + +- M. Sandler, A. Howard, M. Zhu, A. Zhmoginov and L. Chen, "MobileNetV2: Inverted Residuals and Linear Bottlenecks," 2018 IEEE/CVF Conference on Computer Vision and Pattern Recognition, Salt Lake City, UT, 2018, pp. 4510-4520, doi: 10.1109/CVPR.2018.00474. diff --git a/perception/traffic_light_ssd_fine_detector/include/traffic_light_ssd_fine_detector/nodelet.hpp b/perception/traffic_light_ssd_fine_detector/include/traffic_light_ssd_fine_detector/nodelet.hpp new file mode 100644 index 0000000000000..73065cb924552 --- /dev/null +++ b/perception/traffic_light_ssd_fine_detector/include/traffic_light_ssd_fine_detector/nodelet.hpp @@ -0,0 +1,114 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef TRAFFIC_LIGHT_SSD_FINE_DETECTOR__NODELET_HPP_ +#define TRAFFIC_LIGHT_SSD_FINE_DETECTOR__NODELET_HPP_ + +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +typedef struct Detection +{ + float x, y, w, h, prob; +} Detection; + +namespace traffic_light +{ +class TrafficLightSSDFineDetectorNodelet : public rclcpp::Node +{ +public: + explicit TrafficLightSSDFineDetectorNodelet(const rclcpp::NodeOptions & options); + void connectCb(); + void callback( + const sensor_msgs::msg::Image::ConstSharedPtr image_msg, + const autoware_auto_perception_msgs::msg::TrafficLightRoiArray::ConstSharedPtr + traffic_light_roi_msg); + +private: + bool cvMat2CnnInput( + const std::vector & in_imgs, const int num_rois, std::vector & data); + bool cnnOutput2BoxDetection( + const float * scores, const float * boxes, const int tlr_id, + const std::vector & in_imgs, const int num_rois, std::vector & detections); + bool rosMsg2CvMat(const sensor_msgs::msg::Image::ConstSharedPtr image_msg, cv::Mat & image); + bool fitInFrame(cv::Point & lt, cv::Point & rb, const cv::Size & size); + void cvRect2TlRoiMsg( + const cv::Rect & rect, const int32_t id, + autoware_auto_perception_msgs::msg::TrafficLightRoi & tl_roi); + bool readLabelFile(std::string filepath, std::vector & labels); + bool getTlrIdFromLabel(const std::vector & labels, int & tlr_id); + + // variables + std::shared_ptr image_transport_; + image_transport::SubscriberFilter image_sub_; + message_filters::Subscriber roi_sub_; + std::mutex connect_mutex_; + rclcpp::Publisher::SharedPtr + output_roi_pub_; + rclcpp::Publisher::SharedPtr exe_time_pub_; + rclcpp::TimerBase::SharedPtr timer_; + + typedef message_filters::sync_policies::ExactTime< + sensor_msgs::msg::Image, autoware_auto_perception_msgs::msg::TrafficLightRoiArray> + SyncPolicy; + typedef message_filters::Synchronizer Sync; + std::shared_ptr sync_; + + typedef message_filters::sync_policies::ApproximateTime< + sensor_msgs::msg::Image, autoware_auto_perception_msgs::msg::TrafficLightRoiArray> + ApproximateSyncPolicy; + typedef message_filters::Synchronizer ApproximateSync; + std::shared_ptr approximate_sync_; + + bool is_approximate_sync_; + double score_thresh_; + + int tlr_id_; + int channel_; + int width_; + int height_; + int class_num_; + int detection_per_class_; + + std::vector mean_; + std::vector std_; + + std::unique_ptr net_ptr_; +}; // TrafficLightSSDFineDetectorNodelet + +} // namespace traffic_light + +#endif // TRAFFIC_LIGHT_SSD_FINE_DETECTOR__NODELET_HPP_ diff --git a/perception/traffic_light_ssd_fine_detector/launch/traffic_light_ssd_fine_detector.launch.xml b/perception/traffic_light_ssd_fine_detector/launch/traffic_light_ssd_fine_detector.launch.xml new file mode 100644 index 0000000000000..03180319f7aa9 --- /dev/null +++ b/perception/traffic_light_ssd_fine_detector/launch/traffic_light_ssd_fine_detector.launch.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/perception/traffic_light_ssd_fine_detector/lib/include/cuda_utils.hpp b/perception/traffic_light_ssd_fine_detector/lib/include/cuda_utils.hpp new file mode 100644 index 0000000000000..79984ecba8e3d --- /dev/null +++ b/perception/traffic_light_ssd_fine_detector/lib/include/cuda_utils.hpp @@ -0,0 +1,75 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +/* + * This code is licensed under CC0 1.0 Universal (Public Domain). + * You can use this without any limitation. + * https://creativecommons.org/publicdomain/zero/1.0/deed.en + */ + +#ifndef CUDA_UTILS_HPP_ +#define CUDA_UTILS_HPP_ + +#include <./cuda_runtime_api.h> + +#include +#include +#include +#include + +#define CHECK_CUDA_ERROR(e) (cuda::check_error(e, __FILE__, __LINE__)) + +namespace cuda +{ +void check_error(const ::cudaError_t e, decltype(__FILE__) f, decltype(__LINE__) n) +{ + if (e != ::cudaSuccess) { + std::stringstream s; + s << ::cudaGetErrorName(e) << " (" << e << ")@" << f << "#L" << n << ": " + << ::cudaGetErrorString(e); + throw std::runtime_error{s.str()}; + } +} + +struct deleter +{ + void operator()(void * p) const { CHECK_CUDA_ERROR(::cudaFree(p)); } +}; +template +using unique_ptr = std::unique_ptr; + +// auto array = cuda::make_unique(n); +// ::cudaMemcpy(array.get(), src_array, sizeof(float)*n, ::cudaMemcpyHostToDevice); +template +typename std::enable_if::value, cuda::unique_ptr>::type make_unique( + const std::size_t n) +{ + using U = typename std::remove_extent::type; + U * p; + CHECK_CUDA_ERROR(::cudaMalloc(reinterpret_cast(&p), sizeof(U) * n)); + return cuda::unique_ptr{p}; +} + +// auto value = cuda::make_unique(); +// ::cudaMemcpy(value.get(), src_value, sizeof(my_class), ::cudaMemcpyHostToDevice); +template +cuda::unique_ptr make_unique() +{ + T * p; + CHECK_CUDA_ERROR(::cudaMalloc(reinterpret_cast(&p), sizeof(T))); + return cuda::unique_ptr{p}; +} +} // namespace cuda + +#endif // CUDA_UTILS_HPP_ diff --git a/perception/traffic_light_ssd_fine_detector/lib/include/trt_ssd.hpp b/perception/traffic_light_ssd_fine_detector/lib/include/trt_ssd.hpp new file mode 100644 index 0000000000000..bbe5ae8da513c --- /dev/null +++ b/perception/traffic_light_ssd_fine_detector/lib/include/trt_ssd.hpp @@ -0,0 +1,100 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef TRT_SSD_HPP_ +#define TRT_SSD_HPP_ + +#include <./cuda_runtime.h> +#include + +#include +#include +#include +#include + +namespace ssd +{ +struct Deleter +{ + template + void operator()(T * obj) const + { + if (obj) { + obj->destroy(); + } + } +}; + +template +using unique_ptr = std::unique_ptr; + +class Logger : public nvinfer1::ILogger +{ +public: + explicit Logger(bool verbose) : verbose_(verbose) {} + + void log(Severity severity, const char * msg) noexcept override + { + if (verbose_ || ((severity != Severity::kINFO) && (severity != Severity::kVERBOSE))) { + std::cout << msg << std::endl; + } + } + +private: + bool verbose_{false}; +}; + +class Net +{ +public: + // Create engine from engine path + explicit Net(const std::string & engine_path, bool verbose = false); + + // Create engine from serialized onnx model + Net( + const std::string & onnx_file_path, const std::string & precision, const int max_batch_size, + bool verbose = false, size_t workspace_size = (1ULL << 30)); + + ~Net(); + + // Save model to path + void save(const std::string & path); + + // Infer using pre-allocated GPU buffers {data, scores, boxes} + void infer(std::vector & buffers, const int batch_size); + + // Get (c, h, w) size of the fixed input + std::vector getInputSize(); + + std::vector getOutputScoreSize(); + + // Get max allowed batch size + int getMaxBatchSize(); + + // Get max number of detections + int getMaxDetections(); + +private: + unique_ptr runtime_ = nullptr; + unique_ptr engine_ = nullptr; + unique_ptr context_ = nullptr; + cudaStream_t stream_ = nullptr; + + void load(const std::string & path); + void prepare(); +}; + +} // namespace ssd + +#endif // TRT_SSD_HPP_ diff --git a/perception/traffic_light_ssd_fine_detector/lib/src/trt_ssd.cpp b/perception/traffic_light_ssd_fine_detector/lib/src/trt_ssd.cpp new file mode 100644 index 0000000000000..766de7eb4cde1 --- /dev/null +++ b/perception/traffic_light_ssd_fine_detector/lib/src/trt_ssd.cpp @@ -0,0 +1,190 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include + +#include +#include + +#include +#include +#include +#include +#include + +namespace ssd +{ +void Net::load(const std::string & path) +{ + std::ifstream file(path, std::ios::in | std::ios::binary); + file.seekg(0, file.end); + size_t size = file.tellg(); + file.seekg(0, file.beg); + + char * buffer = new char[size]; + file.read(buffer, size); + file.close(); + if (runtime_) { + engine_ = + unique_ptr(runtime_->deserializeCudaEngine(buffer, size, nullptr)); + } + delete[] buffer; +} + +void Net::prepare() +{ + if (engine_) { + context_ = unique_ptr(engine_->createExecutionContext()); + } + cudaStreamCreate(&stream_); +} + +Net::Net(const std::string & path, bool verbose) +{ + Logger logger(verbose); + runtime_ = unique_ptr(nvinfer1::createInferRuntime(logger)); + load(path); + prepare(); +} + +Net::~Net() +{ + if (stream_) { + cudaStreamDestroy(stream_); + } +} + +Net::Net( + const std::string & onnx_file_path, const std::string & precision, const int max_batch_size, + bool verbose, size_t workspace_size) +{ + Logger logger(verbose); + runtime_ = unique_ptr(nvinfer1::createInferRuntime(logger)); + if (!runtime_) { + std::cout << "Fail to create runtime" << std::endl; + return; + } + bool fp16 = precision.compare("FP16") == 0; + bool int8 = precision.compare("INT8") == 0; + + // Create builder + auto builder = unique_ptr(nvinfer1::createInferBuilder(logger)); + if (!builder) { + std::cout << "Fail to create builder" << std::endl; + return; + } + auto config = unique_ptr(builder->createBuilderConfig()); + if (!config) { + std::cout << "Fail to create builder config" << std::endl; + return; + } + // Allow use of FP16 layers when running in INT8 + if (fp16 || int8) { + config->setFlag(nvinfer1::BuilderFlag::kFP16); + } + config->setMaxWorkspaceSize(workspace_size); + + // Parse ONNX FCN + std::cout << "Building " << precision << " core model..." << std::endl; + const auto flag = + 1U << static_cast(nvinfer1::NetworkDefinitionCreationFlag::kEXPLICIT_BATCH); + auto network = unique_ptr(builder->createNetworkV2(flag)); + if (!network) { + std::cout << "Fail to create network" << std::endl; + return; + } + auto parser = unique_ptr(nvonnxparser::createParser(*network, logger)); + if (!parser) { + std::cout << "Fail to create parser" << std::endl; + return; + } + parser->parseFromFile( + onnx_file_path.c_str(), static_cast(nvinfer1::ILogger::Severity::kERROR)); + + // TODO(someone): int8 calibrator + /* std::unique_ptr calib; + if (int8) { + config->setFlag(BuilderFlag::kINT8); + ImageStream stream(batch, inputDims, calibration_images); + calib = std::unique_ptr(new Int8EntropyCalibrator(stream, model_name, + calibration_table)); config->setInt8Calibrator(calib.get()); + }*/ + + // create profile + auto profile = builder->createOptimizationProfile(); + profile->setDimensions( + network->getInput(0)->getName(), nvinfer1::OptProfileSelector::kMIN, + nvinfer1::Dims4{1, 3, 300, 300}); + profile->setDimensions( + network->getInput(0)->getName(), nvinfer1::OptProfileSelector::kOPT, + nvinfer1::Dims4{max_batch_size, 3, 300, 300}); + profile->setDimensions( + network->getInput(0)->getName(), nvinfer1::OptProfileSelector::kMAX, + nvinfer1::Dims4{max_batch_size, 3, 300, 300}); + config->addOptimizationProfile(profile); + + // Build engine + std::cout << "Applying optimizations and building TRT CUDA engine..." << std::endl; + engine_ = unique_ptr(builder->buildEngineWithConfig(*network, *config)); + if (!engine_) { + std::cout << "Fail to create engine" << std::endl; + return; + } + context_ = unique_ptr(engine_->createExecutionContext()); + if (!context_) { + std::cout << "Fail to create context" << std::endl; + return; + } +} + +void Net::save(const std::string & path) +{ + std::cout << "Writing to " << path << "..." << std::endl; + auto serialized = unique_ptr(engine_->serialize()); + std::ofstream file(path, std::ios::out | std::ios::binary); + file.write(reinterpret_cast(serialized->data()), serialized->size()); +} + +void Net::infer(std::vector & buffers, const int batch_size) +{ + if (!context_) { + throw std::runtime_error("Fail to create context"); + } + auto input_dims = engine_->getBindingDimensions(0); + context_->setBindingDimensions( + 0, nvinfer1::Dims4(batch_size, input_dims.d[1], input_dims.d[2], input_dims.d[3])); + context_->enqueueV2(buffers.data(), stream_, nullptr); + cudaStreamSynchronize(stream_); +} + +std::vector Net::getInputSize() +{ + auto dims = engine_->getBindingDimensions(0); + return {dims.d[1], dims.d[2], dims.d[3]}; +} + +std::vector Net::getOutputScoreSize() +{ + auto dims = engine_->getBindingDimensions(1); + return {dims.d[1], dims.d[2]}; +} + +int Net::getMaxBatchSize() +{ + return engine_->getProfileDimensions(0, 0, nvinfer1::OptProfileSelector::kMAX).d[0]; +} + +int Net::getMaxDetections() { return engine_->getBindingDimensions(1).d[1]; } + +} // namespace ssd diff --git a/perception/traffic_light_ssd_fine_detector/package.xml b/perception/traffic_light_ssd_fine_detector/package.xml new file mode 100644 index 0000000000000..d78e59e77916b --- /dev/null +++ b/perception/traffic_light_ssd_fine_detector/package.xml @@ -0,0 +1,36 @@ + + + + traffic_light_ssd_fine_detector + 0.1.0 + The traffic_light_ssd_fine_detector package + + + + + Daisuke Nishimatsu + + + + + + Apache License 2.0 + + ament_cmake_auto + + autoware_auto_perception_msgs + autoware_debug_msgs + cv_bridge + image_transport + message_filters + rclcpp + rclcpp_components + sensor_msgs + + autoware_lint_common + + + + ament_cmake + + diff --git a/perception/traffic_light_ssd_fine_detector/src/nodelet.cpp b/perception/traffic_light_ssd_fine_detector/src/nodelet.cpp new file mode 100644 index 0000000000000..8578d3ebbc099 --- /dev/null +++ b/perception/traffic_light_ssd_fine_detector/src/nodelet.cpp @@ -0,0 +1,361 @@ +// Copyright 2020 Tier IV, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "traffic_light_ssd_fine_detector/nodelet.hpp" + +#include +#include + +#include +#include +#include +#include + +namespace traffic_light +{ +inline std::vector toFloatVector(const std::vector double_vector) +{ + return std::vector(double_vector.begin(), double_vector.end()); +} + +TrafficLightSSDFineDetectorNodelet::TrafficLightSSDFineDetectorNodelet( + const rclcpp::NodeOptions & options) +: Node("traffic_light_ssd_fine_detector_node", options) +{ + using std::placeholders::_1; + using std::placeholders::_2; + + std::string package_path = + ament_index_cpp::get_package_share_directory("traffic_light_ssd_fine_detector"); + std::string data_path = package_path + "/data/"; + std::string engine_path = package_path + "/data/mb2-ssd-lite.engine"; + std::ifstream fs(engine_path); + std::vector labels; + const int max_batch_size = this->declare_parameter("max_batch_size", 8); + const std::string onnx_file = this->declare_parameter("onnx_file"); + const std::string label_file = this->declare_parameter("label_file"); + const std::string mode = this->declare_parameter("mode", "FP32"); + + if (readLabelFile(label_file, labels)) { + if (!getTlrIdFromLabel(labels, tlr_id_)) { + RCLCPP_ERROR(this->get_logger(), "Could not find tlr id"); + } + } + + if (fs.is_open()) { + RCLCPP_INFO(this->get_logger(), "Found %s", engine_path.c_str()); + net_ptr_.reset(new ssd::Net(engine_path, false)); + if (max_batch_size != net_ptr_->getMaxBatchSize()) { + RCLCPP_INFO( + this->get_logger(), + "Required max batch size %d does not correspond to Profile max batch size %d. Rebuild " + "engine " + "from onnx", + max_batch_size, net_ptr_->getMaxBatchSize()); + net_ptr_.reset(new ssd::Net(onnx_file, mode, max_batch_size)); + net_ptr_->save(engine_path); + } + } else { + RCLCPP_INFO( + this->get_logger(), "Could not find %s, try making TensorRT engine from onnx", + engine_path.c_str()); + net_ptr_.reset(new ssd::Net(onnx_file, mode, max_batch_size)); + net_ptr_->save(engine_path); + } + channel_ = net_ptr_->getInputSize()[0]; + width_ = net_ptr_->getInputSize()[1]; + height_ = net_ptr_->getInputSize()[2]; + detection_per_class_ = net_ptr_->getOutputScoreSize()[0]; + class_num_ = net_ptr_->getOutputScoreSize()[1]; + + is_approximate_sync_ = this->declare_parameter("approximate_sync", false); + score_thresh_ = this->declare_parameter("score_thresh", 0.7); + mean_ = toFloatVector(this->declare_parameter("mean", std::vector({0.5, 0.5, 0.5}))); + std_ = toFloatVector(this->declare_parameter("std", std::vector({0.5, 0.5, 0.5}))); + + auto timer_callback = std::bind(&TrafficLightSSDFineDetectorNodelet::connectCb, this); + const auto period_s = 0.1; + const auto period_ns = + std::chrono::duration_cast(std::chrono::duration(period_s)); + timer_ = std::make_shared>( + this->get_clock(), period_ns, std::move(timer_callback), + this->get_node_base_interface()->get_context()); + this->get_node_timers_interface()->add_timer(timer_, nullptr); + + std::lock_guard lock(connect_mutex_); + output_roi_pub_ = + this->create_publisher( + "~/output/rois", 1); + exe_time_pub_ = + this->create_publisher("~/debug/exe_time_ms", 1); + if (is_approximate_sync_) { + approximate_sync_.reset(new ApproximateSync(ApproximateSyncPolicy(10), image_sub_, roi_sub_)); + approximate_sync_->registerCallback( + std::bind(&TrafficLightSSDFineDetectorNodelet::callback, this, _1, _2)); + } else { + sync_.reset(new Sync(SyncPolicy(10), image_sub_, roi_sub_)); + sync_->registerCallback(std::bind(&TrafficLightSSDFineDetectorNodelet::callback, this, _1, _2)); + } +} + +void TrafficLightSSDFineDetectorNodelet::connectCb() +{ + std::lock_guard lock(connect_mutex_); + if (output_roi_pub_->get_subscription_count() == 0) { + image_sub_.unsubscribe(); + roi_sub_.unsubscribe(); + } else if (!image_sub_.getSubscriber()) { + image_sub_.subscribe(this, "~/input/image", "raw", rmw_qos_profile_sensor_data); + roi_sub_.subscribe(this, "~/input/rois", rclcpp::QoS{1}.get_rmw_qos_profile()); + } +} + +void TrafficLightSSDFineDetectorNodelet::callback( + const sensor_msgs::msg::Image::ConstSharedPtr in_image_msg, + const autoware_auto_perception_msgs::msg::TrafficLightRoiArray::ConstSharedPtr in_roi_msg) +{ + if (in_image_msg->width < 2 || in_image_msg->height < 2) { + return; + } + + using std::chrono::high_resolution_clock; + using std::chrono::milliseconds; + const auto exe_start_time = high_resolution_clock::now(); + cv::Mat original_image; + autoware_auto_perception_msgs::msg::TrafficLightRoiArray out_rois; + + rosMsg2CvMat(in_image_msg, original_image); + int num_rois = in_roi_msg->rois.size(); + int batch_count = 0; + const int batch_size = net_ptr_->getMaxBatchSize(); + while (num_rois != 0) { + const int num_infer = (num_rois / batch_size > 0) ? batch_size : num_rois % batch_size; + auto data_d = cuda::make_unique(num_infer * channel_ * width_ * height_); + auto scores_d = cuda::make_unique(num_infer * detection_per_class_ * class_num_); + auto boxes_d = cuda::make_unique(num_infer * detection_per_class_ * 4); + std::vector buffers = {data_d.get(), scores_d.get(), boxes_d.get()}; + std::vector lts, rbs; + std::vector cropped_imgs; + + for (int i = 0; i < num_infer; ++i) { + int roi_index = i + batch_count * batch_size; + lts.push_back(cv::Point( + in_roi_msg->rois.at(roi_index).roi.x_offset, in_roi_msg->rois.at(roi_index).roi.y_offset)); + rbs.push_back(cv::Point( + in_roi_msg->rois.at(roi_index).roi.x_offset + in_roi_msg->rois.at(roi_index).roi.width, + in_roi_msg->rois.at(roi_index).roi.y_offset + in_roi_msg->rois.at(roi_index).roi.height)); + fitInFrame(lts.at(i), rbs.at(i), cv::Size(original_image.size())); + cropped_imgs.push_back(cv::Mat(original_image, cv::Rect(lts.at(i), rbs.at(i)))); + } + + std::vector data(num_infer * channel_ * width_ * height_); + if (!cvMat2CnnInput(cropped_imgs, num_infer, data)) { + RCLCPP_ERROR(this->get_logger(), "Fail to preprocess image"); + return; + } + + cudaMemcpy(data_d.get(), data.data(), data.size() * sizeof(float), cudaMemcpyHostToDevice); + + try { + net_ptr_->infer(buffers, num_infer); + } catch (std::exception & e) { + RCLCPP_ERROR(this->get_logger(), "%s", e.what()); + return; + } + + auto scores = std::make_unique(num_infer * detection_per_class_ * class_num_); + auto boxes = std::make_unique(num_infer * detection_per_class_ * 4); + cudaMemcpy( + scores.get(), scores_d.get(), sizeof(float) * num_infer * detection_per_class_ * class_num_, + cudaMemcpyDeviceToHost); + cudaMemcpy( + boxes.get(), boxes_d.get(), sizeof(float) * num_infer * detection_per_class_ * 4, + cudaMemcpyDeviceToHost); + // Get Output + std::vector detections; + if (!cnnOutput2BoxDetection( + scores.get(), boxes.get(), tlr_id_, cropped_imgs, num_infer, detections)) { + RCLCPP_ERROR(this->get_logger(), "Fail to postprocess image"); + return; + } + + for (int i = 0; i < num_infer; ++i) { + if (detections.at(i).prob > score_thresh_) { + cv::Point lt_roi = + cv::Point(lts.at(i).x + detections.at(i).x, lts.at(i).y + detections.at(i).y); + cv::Point rb_roi = cv::Point( + lts.at(i).x + detections.at(i).x + detections.at(i).w, + lts.at(i).y + detections.at(i).y + detections.at(i).h); + fitInFrame(lt_roi, rb_roi, cv::Size(original_image.size())); + autoware_auto_perception_msgs::msg::TrafficLightRoi tl_roi; + cvRect2TlRoiMsg( + cv::Rect(lt_roi, rb_roi), in_roi_msg->rois.at(i + batch_count * batch_size).id, tl_roi); + out_rois.rois.push_back(tl_roi); + } + } + num_rois -= num_infer; + ++batch_count; + } + out_rois.header = in_roi_msg->header; + output_roi_pub_->publish(out_rois); + const auto exe_end_time = high_resolution_clock::now(); + const double exe_time = + std::chrono::duration_cast(exe_end_time - exe_start_time).count(); + autoware_debug_msgs::msg::Float32Stamped exe_time_msg; + exe_time_msg.data = exe_time; + exe_time_msg.stamp = this->now(); + exe_time_pub_->publish(exe_time_msg); +} + +bool TrafficLightSSDFineDetectorNodelet::cvMat2CnnInput( + const std::vector & in_imgs, const int num_rois, std::vector & data) +{ + for (int i = 0; i < num_rois; ++i) { + // cv::Mat rgb; + // cv::cvtColor(in_imgs.at(i), rgb, CV_BGR2RGB); + cv::Mat resized; + cv::resize(in_imgs.at(i), resized, cv::Size(width_, height_)); + + cv::Mat pixels; + resized.convertTo(pixels, CV_32FC3, 1.0 / 255, 0); + std::vector img; + if (pixels.isContinuous()) { + img.assign( + reinterpret_cast(pixels.datastart), + reinterpret_cast(pixels.dataend)); + } else { + return false; + } + + for (int c = 0; c < channel_; ++c) { + for (int j = 0, hw = width_ * height_; j < hw; ++j) { + data[i * channel_ * width_ * height_ + c * hw + j] = + (img[channel_ * j + 2 - c] - mean_[c]) / std_[c]; + } + } + } + return true; +} + +bool TrafficLightSSDFineDetectorNodelet::cnnOutput2BoxDetection( + const float * scores, const float * boxes, const int tlr_id, const std::vector & in_imgs, + const int num_rois, std::vector & detections) +{ + if (tlr_id > class_num_ - 1) { + return false; + } + for (int i = 0; i < num_rois; ++i) { + std::vector tlr_scores; + Detection det; + for (int j = 0; j < detection_per_class_; ++j) { + tlr_scores.push_back(scores[i * detection_per_class_ * class_num_ + tlr_id + j * class_num_]); + } + std::vector::iterator iter = std::max_element(tlr_scores.begin(), tlr_scores.end()); + size_t index = std::distance(tlr_scores.begin(), iter); + size_t box_index = i * detection_per_class_ * 4 + index * 4; + cv::Point lt, rb; + lt.x = boxes[box_index] * in_imgs.at(i).cols; + lt.y = boxes[box_index + 1] * in_imgs.at(i).rows; + rb.x = boxes[box_index + 2] * in_imgs.at(i).cols; + rb.y = boxes[box_index + 3] * in_imgs.at(i).rows; + fitInFrame(lt, rb, cv::Size(in_imgs.at(i).cols, in_imgs.at(i).rows)); + det.x = lt.x; + det.y = lt.y; + det.w = rb.x - lt.x; + det.h = rb.y - lt.y; + + det.prob = tlr_scores[index]; + detections.push_back(det); + } + return true; +} + +bool TrafficLightSSDFineDetectorNodelet::rosMsg2CvMat( + const sensor_msgs::msg::Image::ConstSharedPtr image_msg, cv::Mat & image) +{ + try { + cv_bridge::CvImagePtr cv_image = cv_bridge::toCvCopy(image_msg, "rgb8"); + image = cv_image->image; + } catch (cv_bridge::Exception & e) { + RCLCPP_ERROR( + this->get_logger(), "Failed to convert sensor_msgs::msg::Image to cv::Mat \n%s", e.what()); + return false; + } + + return true; +} + +bool TrafficLightSSDFineDetectorNodelet::fitInFrame( + cv::Point & lt, cv::Point & rb, const cv::Size & size) +{ + const int width = static_cast(size.width); + const int height = static_cast(size.height); + { + const int x_min = 0, x_max = width - 2; + const int y_min = 0, y_max = height - 2; + lt.x = std::min(std::max(lt.x, x_min), x_max); + lt.y = std::min(std::max(lt.y, y_min), y_max); + } + { + const int x_min = lt.x + 1, x_max = width - 1; + const int y_min = lt.y + 1, y_max = height - 1; + rb.x = std::min(std::max(rb.x, x_min), x_max); + rb.y = std::min(std::max(rb.y, y_min), y_max); + } + + return true; +} + +void TrafficLightSSDFineDetectorNodelet::cvRect2TlRoiMsg( + const cv::Rect & rect, const int32_t id, + autoware_auto_perception_msgs::msg::TrafficLightRoi & tl_roi) +{ + tl_roi.id = id; + tl_roi.roi.x_offset = rect.x; + tl_roi.roi.y_offset = rect.y; + tl_roi.roi.width = rect.width; + tl_roi.roi.height = rect.height; +} + +bool TrafficLightSSDFineDetectorNodelet::readLabelFile( + std::string filepath, std::vector & labels) +{ + std::ifstream labelsFile(filepath); + if (!labelsFile.is_open()) { + RCLCPP_ERROR(this->get_logger(), "Could not open label file. [%s]", filepath.c_str()); + return false; + } + std::string label; + while (getline(labelsFile, label)) { + labels.push_back(label); + } + return true; +} + +bool TrafficLightSSDFineDetectorNodelet::getTlrIdFromLabel( + const std::vector & labels, int & tlr_id) +{ + for (size_t i = 0; i < labels.size(); ++i) { + if (labels.at(i) == "traffic_light") { + tlr_id = i; + return true; + } + } + return false; +} + +} // namespace traffic_light + +#include +RCLCPP_COMPONENTS_REGISTER_NODE(traffic_light::TrafficLightSSDFineDetectorNodelet) diff --git a/perception/traffic_light_ssd_fine_detector/traffic_light_ssd_fine_detector.xml b/perception/traffic_light_ssd_fine_detector/traffic_light_ssd_fine_detector.xml new file mode 100644 index 0000000000000..f92034cfde57d --- /dev/null +++ b/perception/traffic_light_ssd_fine_detector/traffic_light_ssd_fine_detector.xml @@ -0,0 +1,10 @@ + + + + + + + +