A ROS2-based human detection system combining RGB camera and LiDAR sensor fusion.
- RGB Camera
- LiDAR
- ROS2 Jazzy
git clone <this-repo-url>
cd smartpole_hps_ws# Outside the workspace
git clone https://github.com/tier4/nebula.git
cd nebula
# Import dependencies
vcs import < build_depends-${ROS_DISTRO}.repos
rosdep install --from-paths . --ignore-src -y
# Build Nebula
colcon build --symlink-install --cmake-args \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_EXPORT_COMPILE_COMMANDS=1Download livehps.t7 from Google Drive and place it in:
smartpole_hps_ws/src/livehps_ros2/resource/
Download the SMPL model from Google Drive and extract it. Place the extracted smpl folder in:
smartpole_hps_ws/src/livehps_ros2/resource/
sudo apt install ros-jazzy-usb-camNote: Run each of the following steps in a separate terminal.
# Outside the workspace
cd <BASE_DIR>/nebula
source install/setup.bash
# Change PandarQT128 to match your LiDAR model
ros2 launch nebula nebula_launch.py sensor_model:=PandarQT128# Adjust pixel_format, image_width, and image_height to match your camera
ros2 run usb_cam usb_cam_node_exe --ros-args \
-p pixel_format:="uyvy2rgb" \
-p image_width:=1920 \
-p image_height:=1280# In the workspace (smartpole_hps_ws)
source install/setup.bash
ros2 launch lidar_human_tracker tracker.launch.py# In the workspace (smartpole_hps_ws)
source install/setup.bash
ros2 launch livehps_ros2 livehps.launch.py# In the workspace (smartpole_hps_ws)
source install/setup.bash
# Adjust all parameters to match your setup
ros2 run lidar_colorizer lidar_colorizer --ros-args \
-p image_topic:=/image_raw \
-p lidar_topic:=/pandar_points \
-p output_topic:=/pandar_points_colored \
-p extrinsics_file:=<BASE_DIR>/smartpole_hps_ws/example_manual_extrinsics.yaml \
-p camera_info_file:=<BASE_DIR>/smartpole_hps_ws/example_camera_info.yaml