Repository: StephLin/LIO-SEGMOT Branch: master Commit: 9d8ad1398d44 Files: 48 Total size: 381.1 KB Directory structure: gitextract_j229x4hx/ ├── .clang-format ├── .github/ │ └── stale.yml ├── .gitignore ├── CITATION.bib ├── CMakeLists.txt ├── LICENSE ├── README.md ├── config/ │ ├── doc/ │ │ └── kitti2bag/ │ │ ├── README.md │ │ └── kitti2bag.py │ ├── params.yaml │ ├── params_hsinchu.yaml │ └── params_kitti.yaml ├── include/ │ ├── factor.h │ ├── solver.h │ └── utility.h ├── launch/ │ ├── debug.launch │ ├── gdbserver_debug.launch │ ├── include/ │ │ ├── config/ │ │ │ ├── robot.urdf.xacro │ │ │ └── rviz.rviz │ │ ├── module_loam.launch │ │ ├── module_loam_debug.launch │ │ ├── module_loam_gdbserver_debug.launch │ │ ├── module_navsat.launch │ │ ├── module_robot_state_publisher.launch │ │ ├── module_rviz.launch │ │ └── rosconsole/ │ │ ├── rosconsole_error.conf │ │ ├── rosconsole_info.conf │ │ └── rosconsole_warn.conf │ ├── run.launch │ ├── run_hsinchu.launch │ └── run_kitti.launch ├── msg/ │ ├── Diagnosis.msg │ ├── ObjectState.msg │ ├── ObjectStateArray.msg │ ├── cloud_info.msg │ └── flags.msg ├── package.xml ├── scripts/ │ └── diagnose.py ├── src/ │ ├── factor.cpp │ ├── featureExtraction.cpp │ ├── imageProjection.cpp │ ├── imuPreintegration.cpp │ ├── mapOptimization.cpp │ ├── offlineBagPlayer.cpp │ └── solver.cpp └── srv/ ├── detection.srv ├── save_estimation_result.srv └── save_map.srv ================================================ FILE CONTENTS ================================================ ================================================ FILE: .clang-format ================================================ --- Language: Cpp # BasedOnStyle: Google AccessModifierOffset: -1 AlignAfterOpenBracket: Align AlignConsecutiveAssignments: true AlignConsecutiveDeclarations: false AlignEscapedNewlines: Left AlignOperands: true AlignTrailingComments: true AllowAllParametersOfDeclarationOnNextLine: true AllowShortBlocksOnASingleLine: false AllowShortCaseLabelsOnASingleLine: false AllowShortFunctionsOnASingleLine: All AllowShortIfStatementsOnASingleLine: true AllowShortLoopsOnASingleLine: true AlwaysBreakAfterDefinitionReturnType: None AlwaysBreakAfterReturnType: None AlwaysBreakBeforeMultilineStrings: true AlwaysBreakTemplateDeclarations: true BinPackArguments: true BinPackParameters: true BraceWrapping: AfterClass: false AfterControlStatement: false AfterEnum: false AfterFunction: false AfterNamespace: false AfterObjCDeclaration: false AfterStruct: false AfterUnion: false AfterExternBlock: false BeforeCatch: false BeforeElse: false IndentBraces: false SplitEmptyFunction: true SplitEmptyRecord: true SplitEmptyNamespace: true BreakBeforeBinaryOperators: None BreakBeforeBraces: Attach BreakBeforeInheritanceComma: false BreakBeforeTernaryOperators: true BreakConstructorInitializersBeforeComma: false BreakConstructorInitializers: BeforeColon BreakAfterJavaFieldAnnotations: false BreakStringLiterals: true ColumnLimit: 0 CommentPragmas: '^ IWYU pragma:' CompactNamespaces: false ConstructorInitializerAllOnOneLineOrOnePerLine: true ConstructorInitializerIndentWidth: 4 ContinuationIndentWidth: 4 Cpp11BracedListStyle: true DerivePointerAlignment: true DisableFormat: false ExperimentalAutoDetectBinPacking: false FixNamespaceComments: true ForEachMacros: - foreach - Q_FOREACH - BOOST_FOREACH IncludeBlocks: Preserve IncludeCategories: - Regex: '^' Priority: 2 - Regex: '^<.*\.h>' Priority: 1 - Regex: '^<.*' Priority: 2 - Regex: '.*' Priority: 3 IncludeIsMainRegex: '([-_](test|unittest))?$' IndentCaseLabels: true IndentPPDirectives: None IndentWidth: 2 IndentWrappedFunctionNames: false JavaScriptQuotes: Leave JavaScriptWrapImports: true KeepEmptyLinesAtTheStartOfBlocks: false MacroBlockBegin: '' MacroBlockEnd: '' MaxEmptyLinesToKeep: 1 NamespaceIndentation: None ObjCBlockIndentWidth: 2 ObjCSpaceAfterProperty: false ObjCSpaceBeforeProtocolList: false PenaltyBreakAssignment: 2 PenaltyBreakBeforeFirstCallParameter: 1 PenaltyBreakComment: 300 PenaltyBreakFirstLessLess: 120 PenaltyBreakString: 1000 PenaltyExcessCharacter: 1000000 PenaltyReturnTypeOnItsOwnLine: 200 PointerAlignment: Left RawStringFormats: - Delimiters: [pb] Language: TextProto BasedOnStyle: google ReflowComments: true SortIncludes: true SortUsingDeclarations: true SpaceAfterCStyleCast: false SpaceAfterTemplateKeyword: true SpaceBeforeAssignmentOperators: true SpaceBeforeParens: ControlStatements SpaceInEmptyParentheses: false SpacesBeforeTrailingComments: 2 SpacesInAngles: false SpacesInContainerLiterals: true SpacesInCStyleCastParentheses: false SpacesInParentheses: false SpacesInSquareBrackets: false Standard: Auto TabWidth: 8 UseTab: Never ... ================================================ FILE: .github/stale.yml ================================================ # Number of days of inactivity before an issue becomes stale daysUntilStale: 14 # Number of days of inactivity before a stale issue is closed daysUntilClose: 1 # Issues with these labels will never be considered stale exemptLabels: - pinned - security # Label to use when marking an issue as stale staleLabel: stale # Comment to post when marking an issue as stale. Set to `false` to disable markComment: > This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Thank you for your contributions. # Comment to post when closing a stale issue. Set to `false` to disable closeComment: false ================================================ FILE: .gitignore ================================================ devel/ logs/ build/ bin/ lib/ msg_gen/ srv_gen/ msg/*Action.msg msg/*ActionFeedback.msg msg/*ActionGoal.msg msg/*ActionResult.msg msg/*Feedback.msg msg/*Goal.msg msg/*Result.msg msg/_*.py build_isolated/ devel_isolated/ # Generated by dynamic reconfigure *.cfgc /cfg/cpp/ /cfg/*.py # Ignore generated docs *.dox *.wikidoc # eclipse stuff .project .cproject # qcreator stuff CMakeLists.txt.user srv/_*.py *.pcd *.pyc qtcreator-* *.user /planning/cfg /planning/docs /planning/src *~ # Emacs .#* # Catkin custom files CATKIN_IGNORE # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover *.py,cover .hypothesis/ .pytest_cache/ cover/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 db.sqlite3-journal # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder .pybuilder/ target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv # For a library or package, you might want to ignore these files since the code is # intended to run in multiple environments; otherwise, check them in: # .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. #Pipfile.lock # PEP 582; used by e.g. github.com/David-OConnor/pyflow __pypackages__/ # Celery stuff celerybeat-schedule celerybeat.pid # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ # pytype static type analyzer .pytype/ # Cython debug symbols cython_debug/ # PyCharm # JetBrains specific template is maintainted in a separate JetBrains.gitignore that can # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ # VSCode .vscode ================================================ FILE: CITATION.bib ================================================ @article{lin2023lio-segmot, title={Asynchronous State Estimation of Simultaneous Ego-motion Estimation and Multiple Object Tracking for LiDAR-Inertial Odometry}, author={Lin, Yu-Kai and Lin, Wen-Chieh and Wang, Chieh-Chih}, booktitle = {2023 International Conference on Robotics and Automation, {ICRA} 2023, London, UK, May 2023}, pages = {10616--10622}, year = {2023}, } ================================================ FILE: CMakeLists.txt ================================================ cmake_minimum_required(VERSION 2.8.3) project(lio_segmot) set(CMAKE_BUILD_TYPE "Release") set(CMAKE_CXX_FLAGS "-std=c++11") set(CMAKE_CXX_FLAGS_RELEASE "-O3 -Wall -g0 -pthread") find_package(catkin REQUIRED COMPONENTS tf roscpp rospy cv_bridge rosbag # pcl library pcl_conversions # msgs std_msgs sensor_msgs geometry_msgs nav_msgs message_generation visualization_msgs jsk_recognition_msgs jsk_topic_tools ) find_package(OpenMP REQUIRED) find_package(PCL REQUIRED QUIET) find_package(OpenCV REQUIRED QUIET) find_package(GTSAM REQUIRED QUIET) add_message_files( DIRECTORY msg FILES cloud_info.msg ObjectState.msg ObjectStateArray.msg Diagnosis.msg flags.msg ) add_service_files( DIRECTORY srv FILES save_map.srv detection.srv save_estimation_result.srv ) generate_messages( DEPENDENCIES geometry_msgs std_msgs nav_msgs sensor_msgs jsk_recognition_msgs ) catkin_package( INCLUDE_DIRS include DEPENDS PCL GTSAM CATKIN_DEPENDS std_msgs nav_msgs geometry_msgs sensor_msgs message_runtime message_generation visualization_msgs jsk_recognition_msgs jsk_topic_tools ) # include directories include_directories( include ${catkin_INCLUDE_DIRS} ${PCL_INCLUDE_DIRS} ${OpenCV_INCLUDE_DIRS} ${GTSAM_INCLUDE_DIR} ) # link directories link_directories( include ${PCL_LIBRARY_DIRS} ${OpenCV_LIBRARY_DIRS} ${GTSAM_LIBRARY_DIRS} ) ########### ## Build ## ########### # Range Image Projection add_executable(${PROJECT_NAME}_imageProjection src/imageProjection.cpp) add_dependencies(${PROJECT_NAME}_imageProjection ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp) target_link_libraries(${PROJECT_NAME}_imageProjection ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES}) # Feature Association add_executable(${PROJECT_NAME}_featureExtraction src/featureExtraction.cpp) add_dependencies(${PROJECT_NAME}_featureExtraction ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp) target_link_libraries(${PROJECT_NAME}_featureExtraction ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES}) # Mapping Optimization add_executable(${PROJECT_NAME}_mapOptimization src/mapOptimization.cpp src/factor.cpp src/solver.cpp) add_dependencies(${PROJECT_NAME}_mapOptimization ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp) target_compile_options(${PROJECT_NAME}_mapOptimization PRIVATE ${OpenMP_CXX_FLAGS}) target_link_libraries(${PROJECT_NAME}_mapOptimization ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} ${OpenMP_CXX_FLAGS} gtsam) # IMU Preintegration add_executable(${PROJECT_NAME}_imuPreintegration src/imuPreintegration.cpp) target_link_libraries(${PROJECT_NAME}_imuPreintegration ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} gtsam) # Offline ROSBag Player add_executable(${PROJECT_NAME}_offlineBagPlayer src/offlineBagPlayer.cpp) target_link_libraries(${PROJECT_NAME}_offlineBagPlayer ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} gtsam) ================================================ FILE: LICENSE ================================================ BSD 3-Clause License Copyright (c) 2020, Tixiao Shan Copyright (c) 2022-2023, Yu-Kai Lin All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ================================================ FILE: README.md ================================================ # LIO-SEGMOT The official implementation of LIO-SEGMOT (**L**iDAR-**I**nertial **O**dometry via **S**imultaneous **Eg**o-motion Estimation and **M**ultiple **O**bject **T**racking), an optimization-based odometry approach targeted for dynamic environments. LIO-SEGMOT can provide continuous object tracking results while preserving the keyframe selection mechanism in the odometry system. This work is accepted for publication in [ICRA 2023](https://www.icra2023.org/). ![TEASER_GIF](./docs/_static/images/LIO-SEGMOT_KITTI_teaser.GIF) You can check out [our video](https://youtu.be/5HtnDFPerVo) to understand the main idea of LIO-SEGMOT. For more, please refer to our paper: - Yu-Kai Lin, Wen-Chieh Lin, Chieh-Chih Wang, **Asynchronous State Estimation of Simultaneous Ego-motion Estimation and Multiple Object Tracking for LiDAR-Inertial Odometry**. _2023 International Conference on Robotics and Automation (ICRA)_, pp. 10616--10622, May 2023. ([paper](https://doi.org/10.1109/ICRA48891.2023.10161269)) ([preprint](https://gpl.cs.nctu.edu.tw/Steve-Lin/LIO-SEGMOT/preprint.pdf)) ([code](https://github.com/StephLin/LIO-SEGMOT)) ([video](https://youtu.be/5HtnDFPerVo)) If you use this project in your research, please cite: ```bibtex @article{lin2023lio-segmot, title={Asynchronous State Estimation of Simultaneous Ego-motion Estimation and Multiple Object Tracking for LiDAR-Inertial Odometry}, author={Lin, Yu-Kai and Lin, Wen-Chieh and Wang, Chieh-Chih}, booktitle = {2023 International Conference on Robotics and Automation, {ICRA} 2023, London, UK, May 2023}, pages = {10616--10622}, year = {2023}, } ``` - [:newspaper: Poster](#newspaper-poster) - [:gear: Installation](#gear-installation) - [Step 1. Preparing the Dependencies](#step-1-preparing-the-dependencies) - [Step 2. Building the LIO-SEGMOT Project](#step-2-building-the-lio-segmot-project) - [Step 3. Preparing Object Detection Services](#step-3-preparing-object-detection-services) - [:card_file_box: Sample Datasets](#card_file_box-sample-datasets) - [:running_man: Run](#running_man-run) - [:wheelchair: Services of LIO-SEGMOT](#wheelchair-services-of-lio-segmot) - [`/lio_segmot/save_map`](#lio_segmotsave_map) - [`/lio_segmot/save_estimation_result`](#lio_segmotsave_estimation_result) - [:memo: Remarks](#memo-remarks) - [Hyperparameters](#hyperparameters) - [Hierarchical Criterion](#hierarchical-criterion) - [Factor Graph Optimization](#factor-graph-optimization) - [High-speed Moving Object Supports in Early Steps](#high-speed-moving-object-supports-in-early-steps) - [Lifecycle Management of Tracking Objects](#lifecycle-management-of-tracking-objects) - [Limitations of LIO-SEGMOT](#limitations-of-lio-segmot) - [Possible Future Research Directions](#possible-future-research-directions) - [:gift: Acknowledgement](#gift-acknowledgement) ## :newspaper: Poster ![Poster](./docs/_static/images/poster.png) ## :gear: Installation The project is originally developed in **Ubuntu 18.04**, and the following instruction supposes that you are using Ubuntu 18.04 as well. I am not sure if it also works with other Ubuntu versions or other Linux distributions, but maybe you can give it a try :+1: Also, please feel free to open an issue if you encounter any problems of the following instruction. ### Step 1. Preparing the Dependencies Please prepare the following packages or libraries used in LIO-SEGMOT: 1. [ROS Melodic](http://wiki.ros.org/melodic/Installation/Ubuntu) (w/ Desktop-Full Install) and the following dependencies: ```bash #!/bin/bash sudo apt update sudo apt install -y "ros-${ROS_DISTRO}-navigation" \ "ros-${ROS_DISTRO}-robot-localization" \ "ros-${ROS_DISTRO}-robot-state-publisher" \ "ros-${ROS_DISTRO}-jsk-recognition-msgs" \ "ros-${ROS_DISTRO}-jsk-rviz-plugins" ``` 2. [gtsam 4.0.3](https://github.com/borglab/gtsam/tree/4.0.3) ```bash #!/bin/bash cd ~ git clone -b 4.0.3 https://github.com/borglab/gtsam && cd gtsam mkdir build && cd build cmake .. sudo make install ``` ### Step 2. Building the LIO-SEGMOT Project You can use the following command to build the project: ```bash #!/bin/bash source "/opt/ros/${ROS_DISTRO}/setup.bash" mkdir -p ~/catkin_ws/src cd ~/catkin_ws/src git clone git@github.com:StephLin/LIO-SEGMOT.git cd .. catkin_make ``` ### Step 3. Preparing Object Detection Services We provide two object detection services for LIO-SEGMOT: - [StephLin/SE-SSD-ROS (Apache-2.0 license)](https://github.com/StephLin/SE-SSD-ROS) (based on [Vegeta2020/SE-SSD](https://github.com/Vegeta2020/SE-SSD)) - [StephLin/livox_detection_lio_segmot (GPL-3.0 license)](https://github.com/StephLin/livox_detection_lio_segmot) (based on [Livox-SDK/livox_detection](https://github.com/Livox-SDK/livox_detection)) Please refer to their installation instructions accordingly. ## :card_file_box: Sample Datasets We provide the following pre-built bag files for KITTI raw sequences and the Hsinchu dataset (GuangfuRoad sequence): | Dataset | Sequence | Bag File | Ground Truth Trajectory | | ------- | ----------- | ---------------------------------------------------- | ---------------------------------------------------- | | KITTI | 0926-0009 | [bag](http://140.113.150.180:5000/sharing/BrHtqyElq) | [tum](http://140.113.150.180:5000/sharing/HGebyDSCR) | | KITTI | 0926-0013 | [bag](http://140.113.150.180:5000/sharing/AQQa3kMSH) | [tum](http://140.113.150.180:5000/sharing/gREpr4xdI) | | KITTI | 0926-0014 | [bag](http://140.113.150.180:5000/sharing/HOgV5T79H) | [tum](http://140.113.150.180:5000/sharing/POuFRJBQI) | | KITTI | 0926-0015 | [bag](http://140.113.150.180:5000/sharing/XnoNLKSUQ) | [tum](http://140.113.150.180:5000/sharing/RBw1BeftU) | | KITTI | 0926-0032 | [bag](http://140.113.150.180:5000/sharing/ikbtkpWve) | [tum](http://140.113.150.180:5000/sharing/aQdaEnVjc) | | KITTI | 0926-0051 | [bag](http://140.113.150.180:5000/sharing/N1o9NcgU4) | [tum](http://140.113.150.180:5000/sharing/Wzu8QWoEC) | | KITTI | 0926-0101 | [bag](http://140.113.150.180:5000/sharing/lhhohwfJT) | [tum](http://140.113.150.180:5000/sharing/JCOaJHw04) | | Hsinchu | GuangfuRoad | [bag](http://gofile.me/56KxB/mz6HOYOMG) | [tum](http://gofile.me/56KxB/t5smaOAZk) | If you cannot download sample datasets from above links, please refer to [this alternative link (Google Drive)](https://drive.google.com/drive/folders/17QwfTMCv-2XdgOLxGdrnCwybcSYQhs7S?usp=drive_link). Ground truth robot trajectories (based on GPS data provided by KITTI) are stored as [the TUM format](https://vision.in.tum.de/data/datasets/rgbd-dataset/file_formats#ground-truth_trajectories). Each row has 8 components containing timestamps (sec), xyz-position (meter), and xyzw-orientation (quaternion): ``` timestamp x y z qx qy qz qw ``` ## :running_man: Run Please follow the steps to execute LIO-SEGMOT properly: 1. (Optional) Launch the ROS core: ```bash roscore ``` 2. Launch the core LIO-SEGMOT service: ```bash #!/bin/bash # Please select one of the following configs to launch the service properly: # 1. With KITTI configuration roslaunch lio_segmot run_kitti.launch # 2. With Hsinchu configuration roslaunch lio_segmot run_hsinchu.launch # 3. Undefined (same as KITTI configuration) roslaunch lio_segmot run.launch ``` 3. Launch the selected object detection service: ```bash #!/bin/bash # SE-SSD-ROS & livox_detection_lio_segmot # Please check their documentation to see how they are launched ``` 4. Start the customized ROS bag player: ```bash #!/bin/bash rosrun lio_segmot lio_segmot_offlineBagPlayer _bag_filename:="path/to/your/sequence.bag" ``` The default registered LiDAR and IMU topics are `/points_raw` and `/imu_raw`, respectively. If you want to register other LiDAR/IMU topics, please add additional options `_lidar_topic` and `_imu_topic`. For example, if you are using the GuangfuRoad sequence (`/velodyne_points` and `/imu/data` for LiDAR and IMU topics, respectively): ```bash rosrun lio_segmot lio_segmot_offlineBagPlayer _bag_filename:="GuangfuRoad-06-13.bag" \ _lidar_topic:="/velodyne_points" \ _imu_topic:="/imu/data" ``` ## :wheelchair: Services of LIO-SEGMOT ### `/lio_segmot/save_map` ```txt Usage: rosservice call /lio_segmot/save_map [RESOLUTION] [OUTPUT_DIR] Example: rosservice call /lio_segmot/save_map 0.2 /path/to/a/directory/ ``` This service saves LiDAR map to the local machine. ### `/lio_segmot/save_estimation_result` ```txt Usage: rosservice call /lio_segmot/save_estimation_result ``` This service outputs current estimation results including - `nav_msgs::Path robotTrajectory`: The robot trajectory - $\color{gray}\textsf{(INTERNAL USE)}$ `nav_msgs::Path[] objectTrajectories`: Trajectories for each object (indexed by the factor graph) - $\color{gray}\textsf{(INTERNAL USE)}$ `nav_msgs::Path[] objectVelocities`: Linear and angular velocities for each object (indexed by the factor graph) - `nav_msgs::Path[] trackingObjectTrajectories`: Trajectories for each object (indexed by LIO-SEGMOT) - `nav_msgs::Path[] trackingObjectVelocities`: Linear and angular velocities for each object (indexed by LIO-SEGMOT) - `lio_segmot::ObjectStateArray[] trackingObjectStates`: States for each object during its lifetime (indexed by LIO-SEGMOT) - $\color{gray}\textsf{(INTERNAL USE)}$ `lio_segmot::flags[] objectFlags`: Flags for each object during its lifetime (indexed by the factor graph) - `lio_segmot::flags[] trackingObjectFlags`: Flags for each object during its lifetime (indexed by LIO-SEGMOT) in which custom types `lio_segmot::ObjectStateArray` (underlying `lio_segmot::ObjectState`) and `lio_segmot::flags` are given by - `lio_segmot::ObjectStateArray` ```cpp Header header lio_segmot::ObjectState[] objects ``` - `lio_segmot::ObjectState` ```cpp Header header // The corresponding detection (measurement) jsk_recognition_msgs::BoundingBox detection // States of object pose and velocity in the factor graph geometry_msgs::Pose pose geometry_msgs::Pose velocity // Residual and innovation of the tightly-coupled detection factor bool hasTightlyCoupledDetectionError float64 tightlyCoupledDetectionError // Residual float64 initialTightlyCoupledDetectionError // Innovation // Residual and innovation of the loosely-coupled detection factor bool hasLooselyCoupledDetectionError float64 looselyCoupledDetectionError // Residual float64 initialLooselyCoupledDetectionError // Innovation // Residual and innovation of the smooth movement factor bool hasMotionError float64 motionError // Residual float64 initialMotionError // Innovation int32 index // Object index int32 lostCount // Counter of losing detections float64 confidence // Detection's confidence score (given by detection methods) bool isTightlyCoupled // Is the object tightly-coupled at this moment? bool isFirst // Is the object just initialized at this moment? ``` - `lio_segmot/flags` ```cpp // Flags of the object in its lifetime int32[] flags // 1: the object is tightly-coupled // 0: the object is loosely-coupled ``` ## :memo: Remarks ### Hyperparameters There are various covariance matrices designed for the hirarchical criterion (innovation filtering) and factor graph optimization (increments of LIO-SEGMOT w.r.t. LIO-SAM) in LIO-SEGMOT. This section is going to explain them. > All covariance matrices in settings are expressed as diagonal vectors. Taking [the KITTI configuration](./config/params_kitti.yaml) for example, we have the following settings: #### Hierarchical Criterion This section collects settings for the hierarchical criterion. It can be viewed as a kind of innovation filtering. In brief, the criterion is designed to progressively make the following decisions when a new detection $\boldsymbol{z}\in SE(3)$ is coming into the system: | ID | Description | | -------- | --------------------------------------------------------------------------- | | **(Q1)** | Does the detection belong to any existing object $\boldsymbol{x}_{t,i}$? | | **(Q2)** | If Q1 holds, does $\boldsymbol{z}$ follows the $i$-th object's motion? | | **(Q3)** | If Q1 and Q2 holds, should the tightly-coupled detection factor be applied? | The first two questions **(Q1)** and **(Q2)** are determined by using the Mahalanobis distance of the error vector, $$ \Big\Vert\text{ detection error of }\boldsymbol{z}\text{ and the }i\text{-th object } \boldsymbol{x} _{t,i}\text{ }\Big\Vert _{\Sigma} \leq \varepsilon, $$ with given covariance with given covariance matrices $\Sigma\in\{\Sigma_ {\text{Q}_ 1},\Sigma_ {\text{Q}_ 2}\}\subsetneq\mathbb{R}^{6\times 6}$ and a threshold $\varepsilon>0$. We assume that $\Sigma_ {\text{Q}_ 2}-\Sigma_ {\text{Q}_ 1}$ is positive semidefinite (PSD), i.e., $\Sigma_ {\text{Q}_ 2}-\Sigma_{\text{Q}_ 1} \succeq 0$, to prevent ambiguity of the hierarchical criterion that **(Q2)** holds but **(Q1)** does not hold. Two spatial information-based tests are conducted to determine **(Q3)**, which are the detection constraint and the velocity constraint: - **(Detection Constraint)** The above equation holds with another given covariance matrix $\Sigma_ {\text{Q}_ {3,1}}$ that satisfies $\Sigma_ {\text{Q}_ {3,1}}-\Sigma_ {\text{Q}_ {2}} \succeq 0$. - **(Velocity constraint)** The variance of velocities in previous steps is small enough. That is, $$\frac{1}{N}\sum_ {s=1}^{N} \Big\Vert \text{Log}(\boldsymbol{v}_ {t-s,i}) - \text{Log}(\bar{\boldsymbol{v}}_ {t,i}) \Big\Vert_ {\Sigma_{Q_ {3,2}}}^2 \leq \varepsilon$$ with a given covariance matrix $\Sigma_{Q_{3,2}}$, where $N$ is the fixed number of previous velocities of object states and $\bar{\boldsymbol{v}}_{t,i}\in SE(3)$ is the mean of the $N$ previous velocities. If **(Q1)** holds for the detection $\boldsymbol{z}$ and the corresponding $i$-th object, the new state of the $i$-th object along with a loosely-coupled detection factor would be added to the factor graph. Furthermore, if **(Q2)** holds, a constant velocity factor and a smooth movement factor would be also added to the factor graph. Finally, if **(Q3)** holds, the loosely-coupled detection factor would be replaced with a tightly-coupled detection factor. It means that the $i$-th object are regarded as a reliable object that are suitable to refine the odometry. | Notation | Setting | Description | Default Value | | ---------------------------------- | --------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------- | | $\varepsilon$ | `detectionMatchThreshold` | The threshold to classify all Mahalanobis distances in the hirarchical criterion (except for the tightly-coupled detection factor). | 19.5 | | $\Sigma_{\text{Q}_1}$ | `dataAssociationVarianceVector` | The covariance matrix to determine if a detection belongs to a given object. This covariance is used to maintain tracking ID. | `[3.0e-4, 3.0e-4, 3.0e-4, 5.0e-2, 3.0e-2, 3.0e-2]` | | $\Sigma_{\text{Q}_2}$ | `looselyCoupledMatchingVarianceVector` | The covariance matrix to determine if a detection follows the object's motion. | `[1.0e-4, 1.0e-4, 1.0e-4, 2.0e-3, 2.0e-3, 2.0e-3]` | | $\varepsilon^\prime$ | `tightCouplingDetectionErrorThreshold` | The threshold to classify the Mahalanobis distance in the detection constraint of the tightly-coupled detection factor checks. | 26.0 | | $\Sigma_{\text{Q}_{3,1}}^\prime$ | `tightlyCoupledMatchingVarianceVector` | The covariance to determine if a detection satisfies the detection constraint in the tightly-coupled detection factor checks. | `[8.0e-6, 8.0e-6, 8.0e-6, 1.0e-4, 1.0e-4, 1.0e-4]` | | $N$ | `numberOfVelocityConsistencySteps` | The number of samples used in velocity constraint of the tightly-coupled detection checks. | 4 | | $N^\prime$ | `numberOfPreLooseCouplingSteps` | The number of steps that objects should only use loosely-coupled detection factors (due to velocity constraint of the tightly-coupled detection checks, see [below](#high-speed-moving-object-supports-in-early-steps) for more information). | 6 | | $\sigma_{\text{Q}_{3,2}}^\text{A}$ | `objectAngularVelocityConsistencyVarianceThreshold` | The angular part of the covariance matrix to determine if the object satisfies the velocity constraint in the tightly-coupled detection factor checks. | 1.0e-5 | | $\sigma_{\text{Q}_{3,2}}^\text{L}$ | `objectLinearVelocityConsistencyVarianceThreshold` | The linear part of the covariance matrix to determine if the object satisfies the velocity constraint in the tightly-coupled detection factor checks. | 1.0e-2 | For engineering purposes, we use two thresholds $\varepsilon$ and $\varepsilon^\prime$ in the implementation. In addition, we decouple the angular part and the linear part of $\Sigma_{\text{Q}_{3,2}}$. The following equations are shown to coincide with the expression used in our paper: $$ \begin{aligned} \displaystyle\Sigma _{\text{Q} _{3,1}} &= \left(\frac{\varepsilon^\prime}{\varepsilon}\right)^2 \cdot \displaystyle\Sigma _{\text{Q} _{3,1}}^\prime, \\ \displaystyle\Sigma _{\text{Q} _{3,2}}^\prime &= \begin{bmatrix}\sigma _{\text{Q} _{3,2}}^\text{A} \\ & \sigma _{\text{Q} _{3,2}}^\text{A} \\ && \sigma _{\text{Q} _{3,2}}^\text{A} \\ &&& \sigma _{\text{Q} _{3,2}}^\text{L} \\ &&&& \sigma _{\text{Q} _{3,2}}^\text{L} \\ &&&&& \sigma _{\text{Q} _{3,2}}^\text{L} \end{bmatrix}, \\ \displaystyle\Sigma _{\text{Q} _{3,2}} &= \frac{1}{\varepsilon^2} \cdot \displaystyle\Sigma _{\text{Q} _{3,2}}^\prime. \end{aligned} $$ #### Factor Graph Optimization Covariance matrices used in factor graph optimization. Different from the above section, they are essential to "balance" different types of measurements in a unified factor graph. Those matrices are relatively rare to be modified. | Notation | Setting | Description | Default Value | | -------------------- | ---------------------------------------- | --------------------------------------------------------------------------------------------- | -------------------------------------------------- | | $\Sigma_{\text{C}}$ | `constantVelocityDiagonalVarianceVector` | The covariance matrix of constant velocity factors used in factor graph optimization. | `[2.0e-4, 2.0e-4, 1.0e-3, 2.0e-1, 1.0e-1, 1.0e-1]` | | $\Sigma_{\text{M}}$ | `motionDiagonalVarianceVector` | The covariance matrix of smooth movement factors used in factor graph optimization. | `[2.0e-4, 2.0e-4, 1.0e-3, 1.0e-1, 1.0e-2, 1.0e-2]` | | $\Sigma_{\text{LC}}$ | `looselyCoupledDetectionVarianceVector` | The covariance matrix of loosely-coupled detection factors used in factor graph optimization. | `[2.0e-4, 2.0e-4, 2.0e-4, 1.5e-3, 1.5e-3, 1.5e-3]` | | $\Sigma_{\text{TC}}$ | `tightlyCoupledDetectionVarianceVector` | The covariance matrix of tightly-coupled detection factors used in factor graph optimization. | `[2.0e-4, 2.0e-4, 2.0e-4, 1.5e-3, 1.5e-3, 1.5e-3]` | #### High-speed Moving Object Supports in Early Steps As all tracking objects' velocities are initialized with zero-speed, it may be hard to associate detections in different moments for high-speed moving objects in the early stage. To mitigate this issue, a larger covariance matrix for **(Q2)** in the hierarchical criterion is used in the first few steps to accommodate objects that are moving fast. Since those steps are used to figure out the inital speed of an object, we do not account them for the velocity constraint in the tightly-coupled detection factor checks. Therefore, we have $N^\prime = N + N^\text{E}$. | Notation | Setting | Description | Default Value | | ------------------------------ | --------------------------------------------- | --------------------------------------------------------------------------------------------- | -------------------------------------------------- | | $N^\text{E}$ | `numberOfEarlySteps` | Number of the first steps to accommodate high-speed moving objects. | 2 | | $\Sigma_{\text{Q}_2}^\text{E}$ | `earlyLooselyCoupledMatchingVarianceVector` | The covariance matrix to determine if a detection follows the object's motion. | `[3.0e-4, 3.0e-4, 3.0e-4, 5.0e-2, 5.0e-3, 5.0e-3]` | | $\Sigma_{\text{C}}^\text{E}$ | `earlyConstantVelocityDiagonalVarianceVector` | The covariance matrix of loosely-coupled detection factors used in factor graph optimization. | `[2.0e-4, 2.0e-4, 1.0e-3, 2.0e-1, 1.0e-1, 1.0e-1]` | #### Lifecycle Management of Tracking Objects In real world applications, it's likely to lose detections of tracking objects due to occlusion or other complicate environment circumstances. To mitigate frequent ID-switching in multiple object tracking due to the above issue, we still track each object for a little while, even though they do not have any corresponding detections. | Notation | Setting | Description | Default Value | | ------------ | ---------------------------- | ------------------------------------------------------------------------------------------ | ------------- | | $N^\text{L}$ | `trackingStepsForLostObject` | Number of steps that LIO-SEGMOT still keeps an object of missing detections in the system. | 3 | ### Limitations of LIO-SEGMOT Currently, most state-of-the-art object detection approaches (e.g., [SE-SSD](https://arxiv.org/abs/2104.09804), [PointPillars](https://arxiv.org/abs/1812.05784), [PointRCNN](https://arxiv.org/abs/1812.04244), [PV-RCNN](https://arxiv.org/abs/1912.13192), [SPG](https://arxiv.org/abs/2108.06709), and [ST3D](https://arxiv.org/abs/2103.05346)) are constructed under machine learning-based neural network architectures, while the issue of domain adaptation for data-driven object detection approaches still remains a challenging open problem targeted by recent researches (e.g, [SPG](https://arxiv.org/abs/2108.06709) and [ST3D](https://arxiv.org/abs/2103.05346)). That is, the performance of object detection models change varying under different geographic appearances or weather conditions. [The model weight of SE-SSD](https://github.com/Vegeta2020/SE-SSD) used in LIO-SEGMOT is trained under a subset of the KITTI dataset, and thus it might work well in other subsets of the KITTI dataset. However, we can observe that there are numerous false positive detections in the Hsinchu dataset. Therefore, it forces us to choose different detection models (i.e., [PointPillars w/ Livox's model weights](https://github.com/Livox-SDK/livox_detection)) when experimenting LIO-SEGMOT in different real world datasets. In addition, since the model still cannot perform as good detection results in the Hsinchu dataset as SE-SSD does in the KITTI dataset, we need to use a more strict criterion for the detection constraint in the tightly-coupled detection factor checks (by decreasing $\varepsilon^\prime$ from 26.0 to 19.0). This points out the first limitation of LIO-SEGMOT. That is, covariance matrices and thresholds related to object detections (mainly hyperparameters in the hierarchical criterion) are required to be adjusted according to the stability of object detections. Despite it affects generalization capability of the proposed method, we believe that the problem can be mitigated with the breakthrough of the domain adaptation for 3-D object detection. The second limitation of LIO-SEGMOT is related to the motion model of tracking objects. If an object does not move at constant velocity, LIO-SEGMOT may miscalculate the object velocity, leading to inaccurate predicted object pose in the subsequent state. The main reason is that objects' velocities are supposed to be steady and constant in LIO-SEGMOT. It is possible to be resolved by introducing multiple motion models to LIO-SEGMOT and adaptively selecting proper models during the factor graph optimization, in which the concept is similar to an interacting multiple model (IMM) in filtering-based object tracking approaches. ### Possible Future Research Directions There are two possible future research directions of LIO-SEGMOT: - The optimization problem of LIO-SEGMOT produces a multi-robot architecturethat may break the efficiency of maintaining single root Bayes trees in iSAM2. It causes an unignorable computational cost, especially when there are lots of dynamic objects coupled in the system. In forthcoming researches, we would like to overcome the bottleneck by introducing the [multi-robot iSAM2 (MR-iSAM2)](http://doi.org/10.1109/iros51168.2021.9636687) algorithm. - In addition, rule-based coupling conditions make the proposed method lack the ability to explore global optimality when considering combinatorial ambiguity as unknown integer variables. It raises a complicated mix-integer programming (MIP) problem, whereas a recent work called [multi-hypothesis iSAM (MH-iSAM2)](https://www.cs.cmu.edu/~kaess/pub/Hsiao19icra.pdf) still promotes us to investigate the challenging problem in the future. ## :gift: Acknowledgement The project is mainly developed based on [Tixiao Shan](https://github.com/TixiaoShan)'s excellent work [LIO-SAM](https://github.com/TixiaoShan/LIO-SAM), which helps me a lot in constructing LIO-SEGMOT. I would like to express my sincere thanks first. In addition, I would like to thank [Wu Zheng](https://github.com/Vegeta2020) and [Livox](https://github.com/Livox-SDK) for developing and releasing their awesome object detection modules [SE-SSD](https://github.com/Vegeta2020/SE-SSD) and [Livox Detection](https://github.com/Livox-SDK/livox_detection) respectively. ================================================ FILE: config/doc/kitti2bag/README.md ================================================ # kitti2bag ## How to run it? ```bash wget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0084/2011_09_26_drive_0084_sync.zip wget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0084/2011_09_26_drive_0084_extract.zip wget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_calib.zip unzip 2011_09_26_drive_0084_sync.zip unzip 2011_09_26_drive_0084_extract.zip unzip 2011_09_26_calib.zip python kitti2bag.py -t 2011_09_26 -r 0084 raw_synced . ``` That's it. You have a bag that contains your data. Other source files can be found at [KITTI raw data](http://www.cvlibs.net/datasets/kitti/raw_data.php) page. ================================================ FILE: config/doc/kitti2bag/kitti2bag.py ================================================ #!env python # -*- coding: utf-8 -*- import sys try: import pykitti except ImportError as e: print('Could not load module \'pykitti\'. Please run `pip install pykitti`') sys.exit(1) import argparse import os from datetime import datetime import cv2 import numpy as np import rosbag import rospy import sensor_msgs.point_cloud2 as pcl2 import tf from cv_bridge import CvBridge from geometry_msgs.msg import Transform, TransformStamped, TwistStamped from sensor_msgs.msg import CameraInfo, Imu, NavSatFix, PointField from std_msgs.msg import Header from tf2_msgs.msg import TFMessage from tqdm import tqdm def save_imu_data(bag, kitti, imu_frame_id, topic): print("Exporting IMU") for timestamp, oxts in zip(kitti.timestamps, kitti.oxts): q = tf.transformations.quaternion_from_euler(oxts.packet.roll, oxts.packet.pitch, oxts.packet.yaw) imu = Imu() imu.header.frame_id = imu_frame_id imu.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f"))) imu.orientation.x = q[0] imu.orientation.y = q[1] imu.orientation.z = q[2] imu.orientation.w = q[3] imu.linear_acceleration.x = oxts.packet.af imu.linear_acceleration.y = oxts.packet.al imu.linear_acceleration.z = oxts.packet.au imu.angular_velocity.x = oxts.packet.wf imu.angular_velocity.y = oxts.packet.wl imu.angular_velocity.z = oxts.packet.wu bag.write(topic, imu, t=imu.header.stamp) def save_imu_data_raw(bag, kitti, imu_frame_id, topic): print("Exporting IMU Raw") synced_path = kitti.data_path unsynced_path = synced_path.replace('sync', 'extract') imu_path = os.path.join(unsynced_path, 'oxts') # read time stamp (convert to ros seconds format) ambiguous_index = None with open(os.path.join(imu_path, 'timestamps.txt')) as f: lines = f.readlines() imu_datetimes = [] imu_status = [] for line in lines: if len(line) == 1: continue timestamp = datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f') imu_datetime = float(timestamp.strftime('%s.%f')) if len(imu_datetimes) == 0: pass elif imu_datetimes[-1] > imu_datetime: if ambiguous_index is None: ambiguous_index = np.argwhere(np.array(imu_datetimes) < imu_datetime).flatten()[-1] + 1 for i in range(ambiguous_index, len(imu_datetimes)): imu_status[i] = False if len(imu_datetimes) > 0 and imu_datetime - imu_datetimes[-1] >= 0.02: ambiguous_index = len(imu_datetimes) imu_datetimes.append(imu_datetime) imu_status.append(True) # # fix imu time using a linear model (may not be ideal, ^_^) # imu_index = np.asarray(range(len(imu_datetimes)), dtype=np.float64) # z = np.polyfit(imu_index, imu_datetimes, 1) # imu_datetimes_new = z[0] * imu_index + z[1] # imu_datetimes = imu_datetimes_new.tolist() # assert((np.diff(imu_datetimes) > 0).all()) # get all imu data imu_data_dir = os.path.join(imu_path, 'data') imu_filenames = sorted(os.listdir(imu_data_dir)) imu_data = [None] * len(imu_filenames) for i, imu_file in enumerate(imu_filenames): imu_data_file = open(os.path.join(imu_data_dir, imu_file), "r") for line in imu_data_file: if len(line) == 1: continue stripped_line = line.strip() line_list = stripped_line.split() imu_data[i] = line_list if len(imu_data) > len(imu_datetimes): imu_data = imu_data[:len(imu_datetimes)] assert len(imu_datetimes) == len(imu_data) print("Max IMU time interval: " + str(np.diff(np.array(imu_datetimes)[np.argwhere(imu_status).flatten()]).max())) for timestamp, data, status in zip(imu_datetimes, imu_data, imu_status): if not status: continue roll, pitch, yaw = float(data[3]), float(data[4]), float(data[5]), q = tf.transformations.quaternion_from_euler(roll, pitch, yaw) imu = Imu() imu.header.frame_id = imu_frame_id imu.header.stamp = rospy.Time.from_sec(timestamp) imu.orientation.x = q[0] imu.orientation.y = q[1] imu.orientation.z = q[2] imu.orientation.w = q[3] imu.linear_acceleration.x = float(data[11]) imu.linear_acceleration.y = float(data[12]) imu.linear_acceleration.z = float(data[13]) imu.angular_velocity.x = float(data[17]) imu.angular_velocity.y = float(data[18]) imu.angular_velocity.z = float(data[19]) bag.write(topic, imu, t=imu.header.stamp) imu.header.frame_id = 'imu_enu_link' bag.write('/imu_correct', imu, t=imu.header.stamp) # for LIO-SAM GPS def save_dynamic_tf(bag, kitti, kitti_type, initial_time): print("Exporting time dependent transformations") if kitti_type.find("raw") != -1: for timestamp, oxts in zip(kitti.timestamps, kitti.oxts): tf_oxts_msg = TFMessage() tf_oxts_transform = TransformStamped() tf_oxts_transform.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f"))) tf_oxts_transform.header.frame_id = 'world' tf_oxts_transform.child_frame_id = 'base_link' transform = (oxts.T_w_imu) t = transform[0:3, 3] q = tf.transformations.quaternion_from_matrix(transform) oxts_tf = Transform() oxts_tf.translation.x = t[0] oxts_tf.translation.y = t[1] oxts_tf.translation.z = t[2] oxts_tf.rotation.x = q[0] oxts_tf.rotation.y = q[1] oxts_tf.rotation.z = q[2] oxts_tf.rotation.w = q[3] tf_oxts_transform.transform = oxts_tf tf_oxts_msg.transforms.append(tf_oxts_transform) bag.write('/tf', tf_oxts_msg, tf_oxts_msg.transforms[0].header.stamp) elif kitti_type.find("odom") != -1: timestamps = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps) for timestamp, tf_matrix in zip(timestamps, kitti.T_w_cam0): tf_msg = TFMessage() tf_stamped = TransformStamped() tf_stamped.header.stamp = rospy.Time.from_sec(timestamp) tf_stamped.header.frame_id = 'world' tf_stamped.child_frame_id = 'camera_left' t = tf_matrix[0:3, 3] q = tf.transformations.quaternion_from_matrix(tf_matrix) transform = Transform() transform.translation.x = t[0] transform.translation.y = t[1] transform.translation.z = t[2] transform.rotation.x = q[0] transform.rotation.y = q[1] transform.rotation.z = q[2] transform.rotation.w = q[3] tf_stamped.transform = transform tf_msg.transforms.append(tf_stamped) bag.write('/tf', tf_msg, tf_msg.transforms[0].header.stamp) def save_dynamic_tf_as_tum_file(filename, kitti, kitti_type, initial_time): print("Exporting time dependent transformations as the tum format") with open(filename, 'w') as file: if kitti_type.find("raw") != -1: for timestamp, oxts in zip(kitti.timestamps, kitti.oxts): tf_oxts_msg = TFMessage() tf_oxts_transform = TransformStamped() tf_oxts_transform.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f"))) tf_oxts_transform.header.frame_id = 'world' tf_oxts_transform.child_frame_id = 'base_link' transform = (oxts.T_w_imu) t = transform[0:3, 3] q = tf.transformations.quaternion_from_matrix(transform) oxts_tf = Transform() oxts_tf.translation.x = t[0] oxts_tf.translation.y = t[1] oxts_tf.translation.z = t[2] oxts_tf.rotation.x = q[0] oxts_tf.rotation.y = q[1] oxts_tf.rotation.z = q[2] oxts_tf.rotation.w = q[3] tum_stamp = '%s %f %f %f %f %f %f %f\n' tum_stamp = tum_stamp % (timestamp.strftime('%s.%f'), t[0], t[1], t[2], q[0], q[1], q[2], q[3]) file.write(tum_stamp) elif kitti_type.find("odom") != -1: timestamps = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps) for timestamp, tf_matrix in zip(timestamps, kitti.T_w_cam0): tf_msg = TFMessage() tf_stamped = TransformStamped() tf_stamped.header.stamp = rospy.Time.from_sec(timestamp) tf_stamped.header.frame_id = 'world' tf_stamped.child_frame_id = 'camera_left' t = tf_matrix[0:3, 3] q = tf.transformations.quaternion_from_matrix(tf_matrix) transform = Transform() transform.translation.x = t[0] transform.translation.y = t[1] transform.translation.z = t[2] transform.rotation.x = q[0] transform.rotation.y = q[1] transform.rotation.z = q[2] transform.rotation.w = q[3] tum_stamp = '%s %f %f %f %f %f %f %f\n' tum_stamp = tum_stamp % (timestamp.strftime('%s.%f'), t[0], t[1], t[2], q[0], q[1], q[2], q[3]) file.write(tum_stamp) def save_camera_data(bag, kitti_type, kitti, util, bridge, camera, camera_frame_id, topic, initial_time): print("Exporting camera {}".format(camera)) if kitti_type.find("raw") != -1: camera_pad = '{0:02d}'.format(camera) image_dir = os.path.join(kitti.data_path, 'image_{}'.format(camera_pad)) image_path = os.path.join(image_dir, 'data') image_filenames = sorted(os.listdir(image_path)) with open(os.path.join(image_dir, 'timestamps.txt')) as f: image_datetimes = map(lambda x: datetime.strptime(x[:-4], '%Y-%m-%d %H:%M:%S.%f'), f.readlines()) calib = CameraInfo() calib.header.frame_id = camera_frame_id calib.width, calib.height = tuple(util['S_rect_{}'.format(camera_pad)].tolist()) calib.distortion_model = 'plumb_bob' calib.K = util['K_{}'.format(camera_pad)] calib.R = util['R_rect_{}'.format(camera_pad)] calib.D = util['D_{}'.format(camera_pad)] calib.P = util['P_rect_{}'.format(camera_pad)] elif kitti_type.find("odom") != -1: camera_pad = '{0:01d}'.format(camera) image_path = os.path.join(kitti.sequence_path, 'image_{}'.format(camera_pad)) image_filenames = sorted(os.listdir(image_path)) image_datetimes = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps) calib = CameraInfo() calib.header.frame_id = camera_frame_id calib.P = util['P{}'.format(camera_pad)] iterable = zip(image_datetimes, image_filenames) for dt, filename in tqdm(iterable, total=len(image_filenames)): image_filename = os.path.join(image_path, filename) cv_image = cv2.imread(image_filename) calib.height, calib.width = cv_image.shape[:2] if camera in (0, 1): cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY) encoding = "mono8" if camera in (0, 1) else "bgr8" image_message = bridge.cv2_to_imgmsg(cv_image, encoding=encoding) image_message.header.frame_id = camera_frame_id if kitti_type.find("raw") != -1: image_message.header.stamp = rospy.Time.from_sec(float(datetime.strftime(dt, "%s.%f"))) topic_ext = "/image_raw" elif kitti_type.find("odom") != -1: image_message.header.stamp = rospy.Time.from_sec(dt) topic_ext = "/image_rect" calib.header.stamp = image_message.header.stamp bag.write(topic + topic_ext, image_message, t = image_message.header.stamp) bag.write(topic + '/camera_info', calib, t = calib.header.stamp) def save_velo_data(bag, kitti, velo_frame_id, topic): print("Exporting velodyne data") velo_path = os.path.join(kitti.data_path, 'velodyne_points') velo_data_dir = os.path.join(velo_path, 'data') velo_filenames = sorted(os.listdir(velo_data_dir)) with open(os.path.join(velo_path, 'timestamps.txt')) as f: lines = f.readlines() velo_datetimes = [] for line in lines: if len(line) == 1: continue dt = datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f') velo_datetimes.append(dt) iterable = zip(velo_datetimes, velo_filenames) count = 0 for dt, filename in tqdm(iterable, total=len(velo_filenames)): if dt is None: continue velo_filename = os.path.join(velo_data_dir, filename) # read binary data scan = (np.fromfile(velo_filename, dtype=np.float32)).reshape(-1, 4) # get ring channel depth = np.linalg.norm(scan, 2, axis=1) pitch = np.arcsin(scan[:, 2] / depth) # arcsin(z, depth) fov_down = -24.8 / 180.0 * np.pi fov = (abs(-24.8) + abs(2.0)) / 180.0 * np.pi proj_y = (pitch + abs(fov_down)) / fov # in [0.0, 1.0] proj_y *= 64 # in [0.0, H] proj_y = np.floor(proj_y) proj_y = np.minimum(64 - 1, proj_y) proj_y = np.maximum(0, proj_y).astype(np.int32) # in [0,H-1] proj_y = proj_y.reshape(-1, 1) scan = np.concatenate((scan,proj_y), axis=1) scan = scan.tolist() for i in range(len(scan)): scan[i][-1] = int(scan[i][-1]) # create header header = Header() header.frame_id = velo_frame_id header.stamp = rospy.Time.from_sec(float(datetime.strftime(dt, "%s.%f"))) # fill pcl msg fields = [PointField('x', 0, PointField.FLOAT32, 1), PointField('y', 4, PointField.FLOAT32, 1), PointField('z', 8, PointField.FLOAT32, 1), PointField('intensity', 12, PointField.FLOAT32, 1), PointField('ring', 16, PointField.UINT16, 1)] pcl_msg = pcl2.create_cloud(header, fields, scan) pcl_msg.is_dense = True # print(pcl_msg) bag.write(topic, pcl_msg, t=pcl_msg.header.stamp) # count += 1 # if count > 200: # break def get_static_transform(from_frame_id, to_frame_id, transform): t = transform[0:3, 3] q = tf.transformations.quaternion_from_matrix(transform) tf_msg = TransformStamped() tf_msg.header.frame_id = from_frame_id tf_msg.child_frame_id = to_frame_id tf_msg.transform.translation.x = float(t[0]) tf_msg.transform.translation.y = float(t[1]) tf_msg.transform.translation.z = float(t[2]) tf_msg.transform.rotation.x = float(q[0]) tf_msg.transform.rotation.y = float(q[1]) tf_msg.transform.rotation.z = float(q[2]) tf_msg.transform.rotation.w = float(q[3]) return tf_msg def inv(transform): "Invert rigid body transformation matrix" R = transform[0:3, 0:3] t = transform[0:3, 3] t_inv = -1 * R.T.dot(t) transform_inv = np.eye(4) transform_inv[0:3, 0:3] = R.T transform_inv[0:3, 3] = t_inv return transform_inv def save_static_transforms(bag, transforms, timestamps): print("Exporting static transformations") tfm = TFMessage() for transform in transforms: t = get_static_transform(from_frame_id=transform[0], to_frame_id=transform[1], transform=transform[2]) tfm.transforms.append(t) for timestamp in timestamps: time = rospy.Time.from_sec(float(timestamp.strftime("%s.%f"))) for i in range(len(tfm.transforms)): tfm.transforms[i].header.stamp = time bag.write('/tf_static', tfm, t=time) def save_gps_fix_data(bag, kitti, gps_frame_id, topic): for timestamp, oxts in zip(kitti.timestamps, kitti.oxts): navsatfix_msg = NavSatFix() navsatfix_msg.header.frame_id = gps_frame_id navsatfix_msg.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f"))) navsatfix_msg.latitude = oxts.packet.lat navsatfix_msg.longitude = oxts.packet.lon navsatfix_msg.altitude = oxts.packet.alt navsatfix_msg.status.service = 1 bag.write(topic, navsatfix_msg, t=navsatfix_msg.header.stamp) def save_gps_vel_data(bag, kitti, gps_frame_id, topic): for timestamp, oxts in zip(kitti.timestamps, kitti.oxts): twist_msg = TwistStamped() twist_msg.header.frame_id = gps_frame_id twist_msg.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f"))) twist_msg.twist.linear.x = oxts.packet.vf twist_msg.twist.linear.y = oxts.packet.vl twist_msg.twist.linear.z = oxts.packet.vu twist_msg.twist.angular.x = oxts.packet.wf twist_msg.twist.angular.y = oxts.packet.wl twist_msg.twist.angular.z = oxts.packet.wu bag.write(topic, twist_msg, t=twist_msg.header.stamp) if __name__ == "__main__": parser = argparse.ArgumentParser(description = "Convert KITTI dataset to ROS bag file the easy way!") # Accepted argument values kitti_types = ["raw_synced", "odom_color", "odom_gray"] odometry_sequences = [] for s in range(22): odometry_sequences.append(str(s).zfill(2)) parser.add_argument("kitti_type", choices = kitti_types, help = "KITTI dataset type") parser.add_argument("dir", nargs = "?", default = os.getcwd(), help = "base directory of the dataset, if no directory passed the deafult is current working directory") parser.add_argument("-t", "--date", help = "date of the raw dataset (i.e. 2011_09_26), option is only for RAW datasets.") parser.add_argument("-r", "--drive", help = "drive number of the raw dataset (i.e. 0001), option is only for RAW datasets.") parser.add_argument("-s", "--sequence", choices = odometry_sequences,help = "sequence of the odometry dataset (between 00 - 21), option is only for ODOMETRY datasets.") args = parser.parse_args() bridge = CvBridge() compression = rosbag.Compression.NONE # compression = rosbag.Compression.BZ2 # compression = rosbag.Compression.LZ4 # CAMERAS cameras = [ (0, 'camera_gray_left', '/kitti/camera_gray_left'), (1, 'camera_gray_right', '/kitti/camera_gray_right'), (2, 'camera_color_left', '/kitti/camera_color_left'), (3, 'camera_color_right', '/kitti/camera_color_right') ] if args.kitti_type.find("raw") != -1: if args.date == None: print("Date option is not given. It is mandatory for raw dataset.") print("Usage for raw dataset: kitti2bag raw_synced [dir] -t -r ") sys.exit(1) elif args.drive == None: print("Drive option is not given. It is mandatory for raw dataset.") print("Usage for raw dataset: kitti2bag raw_synced [dir] -t -r ") sys.exit(1) bag = rosbag.Bag("kitti_{}_drive_{}_{}.bag".format(args.date, args.drive, args.kitti_type[4:]), 'w', compression=compression) tum_filename = "kitti_{}_drive_{}_{}.tum".format(args.date, args.drive, args.kitti_type[4:]) kitti = pykitti.raw(args.dir, args.date, args.drive) if not os.path.exists(kitti.data_path): print('Path {} does not exists. Exiting.'.format(kitti.data_path)) sys.exit(1) if len(kitti.timestamps) == 0: print('Dataset is empty? Exiting.') sys.exit(1) try: # IMU imu_frame_id = 'imu_link' imu_topic = '/kitti/oxts/imu' imu_raw_topic = '/imu_raw' gps_fix_topic = '/gps/fix' gps_vel_topic = '/gps/vel' velo_frame_id = 'velodyne' velo_topic = '/points_raw' T_base_link_to_imu = np.eye(4, 4) T_base_link_to_imu[0:3, 3] = [-2.71/2.0-0.05, 0.32, 0.93] # tf_static transforms = [ ('base_link', imu_frame_id, T_base_link_to_imu), (imu_frame_id, velo_frame_id, inv(kitti.calib.T_velo_imu)), (imu_frame_id, cameras[0][1], inv(kitti.calib.T_cam0_imu)), (imu_frame_id, cameras[1][1], inv(kitti.calib.T_cam1_imu)), (imu_frame_id, cameras[2][1], inv(kitti.calib.T_cam2_imu)), (imu_frame_id, cameras[3][1], inv(kitti.calib.T_cam3_imu)) ] util = pykitti.utils.read_calib_file(os.path.join(kitti.calib_path, 'calib_cam_to_cam.txt')) # Export save_static_transforms(bag, transforms, kitti.timestamps) save_dynamic_tf(bag, kitti, args.kitti_type, initial_time=None) save_dynamic_tf_as_tum_file(tum_filename, kitti, args.kitti_type, initial_time=None) # save_imu_data(bag, kitti, imu_frame_id, imu_topic) save_imu_data_raw(bag, kitti, imu_frame_id, imu_raw_topic) save_gps_fix_data(bag, kitti, imu_frame_id, gps_fix_topic) save_gps_vel_data(bag, kitti, imu_frame_id, gps_vel_topic) for camera in cameras: save_camera_data(bag, args.kitti_type, kitti, util, bridge, camera=camera[0], camera_frame_id=camera[1], topic=camera[2], initial_time=None) break save_velo_data(bag, kitti, velo_frame_id, velo_topic) finally: print("## OVERVIEW ##") print(bag) bag.close() elif args.kitti_type.find("odom") != -1: if args.sequence == None: print("Sequence option is not given. It is mandatory for odometry dataset.") print("Usage for odometry dataset: kitti2bag {odom_color, odom_gray} [dir] -s ") sys.exit(1) bag = rosbag.Bag("kitti_data_odometry_{}_sequence_{}.bag".format(args.kitti_type[5:],args.sequence), 'w', compression=compression) kitti = pykitti.odometry(args.dir, args.sequence) if not os.path.exists(kitti.sequence_path): print('Path {} does not exists. Exiting.'.format(kitti.sequence_path)) sys.exit(1) kitti.load_calib() kitti.load_timestamps() if len(kitti.timestamps) == 0: print('Dataset is empty? Exiting.') sys.exit(1) if args.sequence in odometry_sequences[:11]: print("Odometry dataset sequence {} has ground truth information (poses).".format(args.sequence)) kitti.load_poses() try: util = pykitti.utils.read_calib_file(os.path.join(args.dir,'sequences',args.sequence, 'calib.txt')) current_epoch = (datetime.utcnow() - datetime(1970, 1, 1)).total_seconds() # Export if args.kitti_type.find("gray") != -1: used_cameras = cameras[:2] elif args.kitti_type.find("color") != -1: used_cameras = cameras[-2:] save_dynamic_tf(bag, kitti, args.kitti_type, initial_time=current_epoch) for camera in used_cameras: save_camera_data(bag, args.kitti_type, kitti, util, bridge, camera=camera[0], camera_frame_id=camera[1], topic=camera[2], initial_time=current_epoch) finally: print("## OVERVIEW ##") print(bag) bag.close() ================================================ FILE: config/params.yaml ================================================ lio_segmot: # Topics pointCloudTopic: "points_raw" # Point cloud data imuTopic: "imu_raw" # IMU data odomTopic: "odometry/imu" # IMU pre-preintegration odometry, same frequency as IMU gpsTopic: "odometry/gpsz" # GPS odometry topic from navsat, see module_navsat.launch file # Frames lidarFrame: "base_link" baselinkFrame: "base_link" odometryFrame: "odom" mapFrame: "map" # GPS Settings useImuHeadingInitialization: false # if using GPS data, set to "true" useGpsElevation: false # if GPS elevation is bad, set to "false" gpsCovThreshold: 2.0 # m^2, threshold for using GPS data poseCovThreshold: 25.0 # m^2, threshold for using GPS data # Export settings savePCD: false # https://github.com/TixiaoShan/LIO-SAM/issues/3 savePCDDirectory: "/Downloads/LOAM/" # in your home folder, starts and ends with "/". Warning: the code deletes "LOAM" folder then recreates it. See "mapOptimization" for implementation # Sensor Settings sensor: velodyne # lidar sensor type, either 'velodyne' or 'ouster' N_SCAN: 64 # number of lidar channel (i.e., 16, 32, 64, 128) Horizon_SCAN: 1800 # lidar horizontal resolution (Velodyne:1800, Ouster:512,1024,2048) downsampleRate: 1 # default: 1. Downsample your data if too many points. i.e., 16 = 64 / 4, 16 = 16 / 1 lidarMinRange: 1.0 # default: 1.0, minimum lidar range to be used lidarMaxRange: 1000.0 # default: 1000.0, maximum lidar range to be used # IMU Settings imuAccNoise: 3.9939570888238808e-03 imuGyrNoise: 1.5636343949698187e-03 imuAccBiasN: 6.4356659353532566e-05 imuGyrBiasN: 3.5640318696367613e-05 imuGravity: 9.80511 imuRPYWeight: 0.01 # Extrinsics (lidar -> IMU, KITTI) extrinsicTrans: [-8.086759e-01, 3.195559e-01, -7.997231e-01] extrinsicRot: [ 9.999976e-01, 7.553071e-04, -2.035826e-03, -7.854027e-04, 9.998898e-01, -1.482298e-02, 2.024406e-03, 1.482454e-02, 9.998881e-01] extrinsicRPY: [ 9.999976e-01, 7.553071e-04, -2.035826e-03, -7.854027e-04, 9.998898e-01, -1.482298e-02, 2.024406e-03, 1.482454e-02, 9.998881e-01] # Extrinsics (lidar -> IMU, ITRI) # extrinsicTrans: [0, 0, 0] # extrinsicRot: [1, 0, 0, # 0, 1, 0, # 0, 0, 1] # extrinsicRPY: [1, 0, 0, # 0, 1, 0, # 0, 0, 1] # Extrinsics (lidar -> IMU, UrbanLoco) # extrinsicTrans: [0, 0, -0.0762] # extrinsicRot: [ 0, 1, 0, # -1, 0, 0, # 0, 0, 1] # extrinsicRPY: [ 0, 1, 0, # -1, 0, 0, # 0, 0, 1] # LOAM feature threshold edgeThreshold: 1.0 surfThreshold: 0.1 edgeFeatureMinValidNum: 10 surfFeatureMinValidNum: 100 # voxel filter paprams odometrySurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor mappingCornerLeafSize: 0.2 # default: 0.2 - outdoor, 0.1 - indoor mappingSurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor # robot motion constraint (in case you are using a 2D robot) z_tollerance: 1000 # meters rotation_tollerance: 1000 # radians # CPU Params numberOfCores: 8 # number of cores for mapping optimization mappingProcessInterval: 0.09 # seconds, regulate mapping frequency # Surrounding map surroundingkeyframeAddingDistThreshold: 1.0 # meters, regulate keyframe adding threshold surroundingkeyframeAddingAngleThreshold: 0.2 # radians, regulate keyframe adding threshold surroundingKeyframeDensity: 2.0 # meters, downsample surrounding keyframe poses surroundingKeyframeSearchRadius: 50.0 # meters, within n meters scan-to-map optimization (when loop closure disabled) # Loop closure loopClosureEnableFlag: true loopClosureFrequency: 1.0 # Hz, regulate loop closure constraint add frequency surroundingKeyframeSize: 50 # submap size (when loop closure enabled) historyKeyframeSearchRadius: 15.0 # meters, key frame that is within n meters from current pose will be considerd for loop closure historyKeyframeSearchTimeDiff: 30.0 # seconds, key frame that is n seconds older will be considered for loop closure historyKeyframeSearchNum: 25 # number of hostory key frames will be fused into a submap for loop closure historyKeyframeFitnessScore: 0.3 # icp threshold, the smaller the better alignment # Visualization globalMapVisualizationSearchRadius: 1000.0 # meters, global map visualization radius globalMapVisualizationPoseDensity: 10.0 # meters, global map visualization keyframe density globalMapVisualizationLeafSize: 1.0 # meters, global map visualization cloud density # Dynamic object data association detectionMatchThreshold: 19.5 dataAssociationVarianceVector: [3.0e-4, 3.0e-4, 3.0e-4, 5.0e-2, 3.0e-2, 3.0e-2] earlyLooselyCoupledMatchingVarianceVector: [3.0e-4, 3.0e-4, 3.0e-4, 5.0e-2, 5.0e-3, 5.0e-3] looselyCoupledMatchingVarianceVector: [1.0e-4, 1.0e-4, 1.0e-4, 2.0e-3, 2.0e-3, 2.0e-3] tightlyCoupledMatchingVarianceVector: [8.0e-6, 8.0e-6, 8.0e-6, 1.0e-4, 1.0e-4, 1.0e-4] # tightlyCoupledMatchingVarianceVector: [2.0e-7, 2.0e-7, 2.0e-7, 1.5e-5, 1.5e-5, 1.5e-5] # Factor covariance matrices (as diagonal vectors) # priorOdometryDiagonalVarianceVector: [1.0e-2, 1.0e-2, M_PI*M_PI, 1.0e+8, 1.0e+8, 1.0e+8] # odometryDiagonalVarianceVector: [1.0e-6, 1.0e-6, 1.0e-6, 1.0e-4, 1.0e-4, 1.0e-4] earlyConstantVelocityDiagonalVarianceVector: [2.0e-4, 2.0e-4, 1.0e-3, 2.0e-1, 1.0e-1, 1.0e-1] constantVelocityDiagonalVarianceVector: [2.0e-4, 2.0e-4, 1.0e-3, 2.0e-1, 1.0e-1, 1.0e-1] motionDiagonalVarianceVector: [2.0e-4, 2.0e-4, 1.0e-3, 1.0e-1, 1.0e-2, 1.0e-2] looselyCoupledDetectionVarianceVector: [2.0e-4, 2.0e-4, 2.0e-4, 1.5e-3, 1.5e-3, 1.5e-3] tightlyCoupledDetectionVarianceVector: [2.0e-4, 2.0e-4, 2.0e-4, 1.5e-3, 1.5e-3, 1.5e-3] numberOfEarlySteps: 2 numberOfPreLooseCouplingSteps: 6 numberOfVelocityConsistencySteps: 4 numberOfInterLooseCouplingSteps: 0 tightCouplingDetectionErrorThreshold: 26.0 objectAngularVelocityConsistencyVarianceThreshold: 1.0e-5 # rad^2 objectLinearVelocityConsistencyVarianceThreshold: 1.0e-2 # m^2 # Tracking trackingStepsForLostObject: 3 # Navsat (convert GPS coordinates to Cartesian) navsat: frequency: 50 wait_for_datum: false delay: 0.0 magnetic_declination_radians: 0 yaw_offset: 0 zero_altitude: true broadcast_utm_transform: false broadcast_utm_transform_as_parent_frame: false publish_filtered_gps: false # EKF for Navsat ekf_gps: publish_tf: false map_frame: map odom_frame: odom base_link_frame: base_link world_frame: odom frequency: 50 two_d_mode: false sensor_timeout: 0.01 # ------------------------------------- # External IMU: # ------------------------------------- imu0: imu_correct # make sure the input is aligned with ROS REP105. "imu_correct" is manually transformed by myself. EKF can also transform the data using tf between your imu and base_link imu0_config: [false, false, false, true, true, true, false, false, false, false, false, true, true, true, true] imu0_differential: false imu0_queue_size: 50 imu0_remove_gravitational_acceleration: true # ------------------------------------- # Odometry (From Navsat): # ------------------------------------- odom0: odometry/gps odom0_config: [true, true, true, false, false, false, false, false, false, false, false, false, false, false, false] odom0_differential: false odom0_queue_size: 10 # x y z r p y x_dot y_dot z_dot r_dot p_dot y_dot x_ddot y_ddot z_ddot process_noise_covariance: [ 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015] ================================================ FILE: config/params_hsinchu.yaml ================================================ lio_segmot: # Topics pointCloudTopic: "points_raw" # Point cloud data imuTopic: "imu_raw" # IMU data odomTopic: "odometry/imu" # IMU pre-preintegration odometry, same frequency as IMU gpsTopic: "odometry/gpsz" # GPS odometry topic from navsat, see module_navsat.launch file # Frames lidarFrame: "base_link" baselinkFrame: "base_link" odometryFrame: "odom" mapFrame: "map" # GPS Settings useImuHeadingInitialization: false # if using GPS data, set to "true" useGpsElevation: false # if GPS elevation is bad, set to "false" gpsCovThreshold: 2.0 # m^2, threshold for using GPS data poseCovThreshold: 25.0 # m^2, threshold for using GPS data # Export settings savePCD: false # https://github.com/TixiaoShan/LIO-SAM/issues/3 savePCDDirectory: "/Downloads/LOAM/" # in your home folder, starts and ends with "/". Warning: the code deletes "LOAM" folder then recreates it. See "mapOptimization" for implementation # Sensor Settings sensor: velodyne # lidar sensor type, either 'velodyne' or 'ouster' N_SCAN: 64 # number of lidar channel (i.e., 16, 32, 64, 128) Horizon_SCAN: 1800 # lidar horizontal resolution (Velodyne:1800, Ouster:512,1024,2048) downsampleRate: 1 # default: 1. Downsample your data if too many points. i.e., 16 = 64 / 4, 16 = 16 / 1 lidarMinRange: 1.0 # default: 1.0, minimum lidar range to be used lidarMaxRange: 1000.0 # default: 1000.0, maximum lidar range to be used # IMU Settings imuAccNoise: 3.9939570888238808e-03 imuGyrNoise: 1.5636343949698187e-03 imuAccBiasN: 6.4356659353532566e-05 imuGyrBiasN: 3.5640318696367613e-05 imuGravity: 9.80511 imuRPYWeight: 0.01 # Extrinsics (lidar -> IMU, KITTI) # extrinsicTrans: [-8.086759e-01, 3.195559e-01, -7.997231e-01] # extrinsicRot: [ 9.999976e-01, 7.553071e-04, -2.035826e-03, # -7.854027e-04, 9.998898e-01, -1.482298e-02, # 2.024406e-03, 1.482454e-02, 9.998881e-01] # extrinsicRPY: [ 9.999976e-01, 7.553071e-04, -2.035826e-03, # -7.854027e-04, 9.998898e-01, -1.482298e-02, # 2.024406e-03, 1.482454e-02, 9.998881e-01] # Extrinsics (lidar -> IMU, ITRI) extrinsicTrans: [0, 0, 0] extrinsicRot: [1, 0, 0, 0, 1, 0, 0, 0, 1] extrinsicRPY: [1, 0, 0, 0, 1, 0, 0, 0, 1] # Extrinsics (lidar -> IMU, UrbanLoco) # extrinsicTrans: [0, 0, -0.0762] # extrinsicRot: [ 0, 1, 0, # -1, 0, 0, # 0, 0, 1] # extrinsicRPY: [ 0, 1, 0, # -1, 0, 0, # 0, 0, 1] # LOAM feature threshold edgeThreshold: 1.0 surfThreshold: 0.1 edgeFeatureMinValidNum: 10 surfFeatureMinValidNum: 100 # voxel filter paprams odometrySurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor mappingCornerLeafSize: 0.2 # default: 0.2 - outdoor, 0.1 - indoor mappingSurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor # robot motion constraint (in case you are using a 2D robot) z_tollerance: 1000 # meters rotation_tollerance: 1000 # radians # CPU Params numberOfCores: 8 # number of cores for mapping optimization mappingProcessInterval: 0.09 # seconds, regulate mapping frequency # Surrounding map surroundingkeyframeAddingDistThreshold: 1.0 # meters, regulate keyframe adding threshold surroundingkeyframeAddingAngleThreshold: 0.2 # radians, regulate keyframe adding threshold surroundingKeyframeDensity: 2.0 # meters, downsample surrounding keyframe poses surroundingKeyframeSearchRadius: 50.0 # meters, within n meters scan-to-map optimization (when loop closure disabled) # Loop closure loopClosureEnableFlag: true loopClosureFrequency: 1.0 # Hz, regulate loop closure constraint add frequency surroundingKeyframeSize: 50 # submap size (when loop closure enabled) historyKeyframeSearchRadius: 15.0 # meters, key frame that is within n meters from current pose will be considerd for loop closure historyKeyframeSearchTimeDiff: 30.0 # seconds, key frame that is n seconds older will be considered for loop closure historyKeyframeSearchNum: 25 # number of hostory key frames will be fused into a submap for loop closure historyKeyframeFitnessScore: 0.3 # icp threshold, the smaller the better alignment # Visualization globalMapVisualizationSearchRadius: 1000.0 # meters, global map visualization radius globalMapVisualizationPoseDensity: 10.0 # meters, global map visualization keyframe density globalMapVisualizationLeafSize: 1.0 # meters, global map visualization cloud density # Dynamic object data association detectionMatchThreshold: 19.5 dataAssociationVarianceVector: [3.0e-4, 3.0e-4, 3.0e-4, 5.0e-2, 3.0e-2, 3.0e-2] earlyLooselyCoupledMatchingVarianceVector: [3.0e-4, 3.0e-4, 3.0e-4, 5.0e-2, 5.0e-3, 5.0e-3] looselyCoupledMatchingVarianceVector: [1.0e-4, 1.0e-4, 1.0e-4, 2.0e-3, 2.0e-3, 2.0e-3] tightlyCoupledMatchingVarianceVector: [8.0e-6, 8.0e-6, 8.0e-6, 1.0e-4, 1.0e-4, 1.0e-4] # tightlyCoupledMatchingVarianceVector: [2.0e-7, 2.0e-7, 2.0e-7, 1.5e-5, 1.5e-5, 1.5e-5] # Factor covariance matrices (as diagonal vectors) # priorOdometryDiagonalVarianceVector: [1.0e-2, 1.0e-2, M_PI*M_PI, 1.0e+8, 1.0e+8, 1.0e+8] # odometryDiagonalVarianceVector: [1.0e-6, 1.0e-6, 1.0e-6, 1.0e-4, 1.0e-4, 1.0e-4] earlyConstantVelocityDiagonalVarianceVector: [2.0e-4, 2.0e-4, 1.0e-3, 2.0e-1, 1.0e-1, 1.0e-1] constantVelocityDiagonalVarianceVector: [2.0e-4, 2.0e-4, 1.0e-3, 2.0e-1, 1.0e-1, 1.0e-1] motionDiagonalVarianceVector: [2.0e-4, 2.0e-4, 1.0e-3, 1.0e-1, 1.0e-2, 1.0e-2] looselyCoupledDetectionVarianceVector: [2.0e-4, 2.0e-4, 2.0e-4, 1.5e-3, 1.5e-3, 1.5e-3] tightlyCoupledDetectionVarianceVector: [2.0e-4, 2.0e-4, 2.0e-4, 1.5e-3, 1.5e-3, 1.5e-3] numberOfEarlySteps: 2 numberOfPreLooseCouplingSteps: 6 numberOfVelocityConsistencySteps: 4 numberOfInterLooseCouplingSteps: 0 tightCouplingDetectionErrorThreshold: 19.5 objectAngularVelocityConsistencyVarianceThreshold: 1.0e-5 # rad^2 objectLinearVelocityConsistencyVarianceThreshold: 1.0e-2 # m^2 # Tracking trackingStepsForLostObject: 3 # Navsat (convert GPS coordinates to Cartesian) navsat: frequency: 50 wait_for_datum: false delay: 0.0 magnetic_declination_radians: 0 yaw_offset: 0 zero_altitude: true broadcast_utm_transform: false broadcast_utm_transform_as_parent_frame: false publish_filtered_gps: false # EKF for Navsat ekf_gps: publish_tf: false map_frame: map odom_frame: odom base_link_frame: base_link world_frame: odom frequency: 50 two_d_mode: false sensor_timeout: 0.01 # ------------------------------------- # External IMU: # ------------------------------------- imu0: imu_correct # make sure the input is aligned with ROS REP105. "imu_correct" is manually transformed by myself. EKF can also transform the data using tf between your imu and base_link imu0_config: [false, false, false, true, true, true, false, false, false, false, false, true, true, true, true] imu0_differential: false imu0_queue_size: 50 imu0_remove_gravitational_acceleration: true # ------------------------------------- # Odometry (From Navsat): # ------------------------------------- odom0: odometry/gps odom0_config: [true, true, true, false, false, false, false, false, false, false, false, false, false, false, false] odom0_differential: false odom0_queue_size: 10 # x y z r p y x_dot y_dot z_dot r_dot p_dot y_dot x_ddot y_ddot z_ddot process_noise_covariance: [ 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015] ================================================ FILE: config/params_kitti.yaml ================================================ lio_segmot: # Topics pointCloudTopic: "points_raw" # Point cloud data imuTopic: "imu_raw" # IMU data odomTopic: "odometry/imu" # IMU pre-preintegration odometry, same frequency as IMU gpsTopic: "odometry/gpsz" # GPS odometry topic from navsat, see module_navsat.launch file # Frames lidarFrame: "base_link" baselinkFrame: "base_link" odometryFrame: "odom" mapFrame: "map" # GPS Settings useImuHeadingInitialization: false # if using GPS data, set to "true" useGpsElevation: false # if GPS elevation is bad, set to "false" gpsCovThreshold: 2.0 # m^2, threshold for using GPS data poseCovThreshold: 25.0 # m^2, threshold for using GPS data # Export settings savePCD: false # https://github.com/TixiaoShan/LIO-SAM/issues/3 savePCDDirectory: "/Downloads/LOAM/" # in your home folder, starts and ends with "/". Warning: the code deletes "LOAM" folder then recreates it. See "mapOptimization" for implementation # Sensor Settings sensor: velodyne # lidar sensor type, either 'velodyne' or 'ouster' N_SCAN: 64 # number of lidar channel (i.e., 16, 32, 64, 128) Horizon_SCAN: 1800 # lidar horizontal resolution (Velodyne:1800, Ouster:512,1024,2048) downsampleRate: 1 # default: 1. Downsample your data if too many points. i.e., 16 = 64 / 4, 16 = 16 / 1 lidarMinRange: 1.0 # default: 1.0, minimum lidar range to be used lidarMaxRange: 1000.0 # default: 1000.0, maximum lidar range to be used # IMU Settings imuAccNoise: 3.9939570888238808e-03 imuGyrNoise: 1.5636343949698187e-03 imuAccBiasN: 6.4356659353532566e-05 imuGyrBiasN: 3.5640318696367613e-05 imuGravity: 9.80511 imuRPYWeight: 0.01 # Extrinsics (lidar -> IMU, KITTI) extrinsicTrans: [-8.086759e-01, 3.195559e-01, -7.997231e-01] extrinsicRot: [ 9.999976e-01, 7.553071e-04, -2.035826e-03, -7.854027e-04, 9.998898e-01, -1.482298e-02, 2.024406e-03, 1.482454e-02, 9.998881e-01] extrinsicRPY: [ 9.999976e-01, 7.553071e-04, -2.035826e-03, -7.854027e-04, 9.998898e-01, -1.482298e-02, 2.024406e-03, 1.482454e-02, 9.998881e-01] # Extrinsics (lidar -> IMU, ITRI) # extrinsicTrans: [0, 0, 0] # extrinsicRot: [1, 0, 0, # 0, 1, 0, # 0, 0, 1] # extrinsicRPY: [1, 0, 0, # 0, 1, 0, # 0, 0, 1] # Extrinsics (lidar -> IMU, UrbanLoco) # extrinsicTrans: [0, 0, -0.0762] # extrinsicRot: [ 0, 1, 0, # -1, 0, 0, # 0, 0, 1] # extrinsicRPY: [ 0, 1, 0, # -1, 0, 0, # 0, 0, 1] # LOAM feature threshold edgeThreshold: 1.0 surfThreshold: 0.1 edgeFeatureMinValidNum: 10 surfFeatureMinValidNum: 100 # voxel filter paprams odometrySurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor mappingCornerLeafSize: 0.2 # default: 0.2 - outdoor, 0.1 - indoor mappingSurfLeafSize: 0.4 # default: 0.4 - outdoor, 0.2 - indoor # robot motion constraint (in case you are using a 2D robot) z_tollerance: 1000 # meters rotation_tollerance: 1000 # radians # CPU Params numberOfCores: 8 # number of cores for mapping optimization mappingProcessInterval: 0.09 # seconds, regulate mapping frequency # Surrounding map surroundingkeyframeAddingDistThreshold: 1.0 # meters, regulate keyframe adding threshold surroundingkeyframeAddingAngleThreshold: 0.2 # radians, regulate keyframe adding threshold surroundingKeyframeDensity: 2.0 # meters, downsample surrounding keyframe poses surroundingKeyframeSearchRadius: 50.0 # meters, within n meters scan-to-map optimization (when loop closure disabled) # Loop closure loopClosureEnableFlag: true loopClosureFrequency: 1.0 # Hz, regulate loop closure constraint add frequency surroundingKeyframeSize: 50 # submap size (when loop closure enabled) historyKeyframeSearchRadius: 15.0 # meters, key frame that is within n meters from current pose will be considerd for loop closure historyKeyframeSearchTimeDiff: 30.0 # seconds, key frame that is n seconds older will be considered for loop closure historyKeyframeSearchNum: 25 # number of hostory key frames will be fused into a submap for loop closure historyKeyframeFitnessScore: 0.3 # icp threshold, the smaller the better alignment # Visualization globalMapVisualizationSearchRadius: 1000.0 # meters, global map visualization radius globalMapVisualizationPoseDensity: 10.0 # meters, global map visualization keyframe density globalMapVisualizationLeafSize: 1.0 # meters, global map visualization cloud density # Dynamic object data association detectionMatchThreshold: 19.5 dataAssociationVarianceVector: [3.0e-4, 3.0e-4, 3.0e-4, 5.0e-2, 3.0e-2, 3.0e-2] earlyLooselyCoupledMatchingVarianceVector: [3.0e-4, 3.0e-4, 3.0e-4, 5.0e-2, 5.0e-3, 5.0e-3] looselyCoupledMatchingVarianceVector: [1.0e-4, 1.0e-4, 1.0e-4, 2.0e-3, 2.0e-3, 2.0e-3] tightlyCoupledMatchingVarianceVector: [8.0e-6, 8.0e-6, 8.0e-6, 1.0e-4, 1.0e-4, 1.0e-4] # tightlyCoupledMatchingVarianceVector: [2.0e-7, 2.0e-7, 2.0e-7, 1.5e-5, 1.5e-5, 1.5e-5] # Factor covariance matrices (as diagonal vectors) # priorOdometryDiagonalVarianceVector: [1.0e-2, 1.0e-2, M_PI*M_PI, 1.0e+8, 1.0e+8, 1.0e+8] # odometryDiagonalVarianceVector: [1.0e-6, 1.0e-6, 1.0e-6, 1.0e-4, 1.0e-4, 1.0e-4] earlyConstantVelocityDiagonalVarianceVector: [2.0e-4, 2.0e-4, 1.0e-3, 2.0e-1, 1.0e-1, 1.0e-1] constantVelocityDiagonalVarianceVector: [2.0e-4, 2.0e-4, 1.0e-3, 2.0e-1, 1.0e-1, 1.0e-1] motionDiagonalVarianceVector: [2.0e-4, 2.0e-4, 1.0e-3, 1.0e-1, 1.0e-2, 1.0e-2] looselyCoupledDetectionVarianceVector: [2.0e-4, 2.0e-4, 2.0e-4, 1.5e-3, 1.5e-3, 1.5e-3] tightlyCoupledDetectionVarianceVector: [2.0e-4, 2.0e-4, 2.0e-4, 1.5e-3, 1.5e-3, 1.5e-3] numberOfEarlySteps: 2 numberOfPreLooseCouplingSteps: 6 numberOfVelocityConsistencySteps: 4 numberOfInterLooseCouplingSteps: 0 tightCouplingDetectionErrorThreshold: 26.0 objectAngularVelocityConsistencyVarianceThreshold: 1.0e-5 # rad^2 objectLinearVelocityConsistencyVarianceThreshold: 1.0e-2 # m^2 # Tracking trackingStepsForLostObject: 3 # Navsat (convert GPS coordinates to Cartesian) navsat: frequency: 50 wait_for_datum: false delay: 0.0 magnetic_declination_radians: 0 yaw_offset: 0 zero_altitude: true broadcast_utm_transform: false broadcast_utm_transform_as_parent_frame: false publish_filtered_gps: false # EKF for Navsat ekf_gps: publish_tf: false map_frame: map odom_frame: odom base_link_frame: base_link world_frame: odom frequency: 50 two_d_mode: false sensor_timeout: 0.01 # ------------------------------------- # External IMU: # ------------------------------------- imu0: imu_correct # make sure the input is aligned with ROS REP105. "imu_correct" is manually transformed by myself. EKF can also transform the data using tf between your imu and base_link imu0_config: [false, false, false, true, true, true, false, false, false, false, false, true, true, true, true] imu0_differential: false imu0_queue_size: 50 imu0_remove_gravitational_acceleration: true # ------------------------------------- # Odometry (From Navsat): # ------------------------------------- odom0: odometry/gps odom0_config: [true, true, true, false, false, false, false, false, false, false, false, false, false, false, false] odom0_differential: false odom0_queue_size: 10 # x y z r p y x_dot y_dot z_dot r_dot p_dot y_dot x_ddot y_ddot z_ddot process_noise_covariance: [ 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015] ================================================ FILE: include/factor.h ================================================ // Copyright 2021 Yu-Kai Lin. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. #pragma once #include #include #include #include #include #include #include #include #include #include class MaxMixturePoint3 : public gtsam::Point3 {}; class Detection { public: using BoundingBox = jsk_recognition_msgs::BoundingBox; protected: BoundingBox box; gtsam::Pose3 pose; gtsam::Vector6 variances; public: Detection(BoundingBox box, gtsam::Vector6 variances); const BoundingBox getBoundingBox() const { return this->box; } const gtsam::Pose3 getPose() const { return this->pose; }; gtsam::noiseModel::Diagonal::shared_ptr getDiagonal() const { return gtsam::noiseModel::Diagonal::Variances(this->variances); } const double error(const gtsam::Pose3 x) const; }; std::tuple getDetectionIndexAndError(const gtsam::Pose3 &d, std::vector detections); class TightlyCoupledDetectionFactor : public gtsam::NoiseModelFactor2 { private: using This = TightlyCoupledDetectionFactor; using Base = gtsam::NoiseModelFactor2; protected: std::vector detections; std::vector noiseModels; mutable int cachedDetectionIndex = -1; public: /** Constructor */ TightlyCoupledDetectionFactor(gtsam::Key robotPoseKey, gtsam::Key objectPoseKey, std::vector detections) : Base(boost::shared_ptr(), robotPoseKey, objectPoseKey), detections(detections) { for (const auto &detection : detections) { this->noiseModels.push_back(detection.getDiagonal()); } } /** Contructor with cached detection index */ TightlyCoupledDetectionFactor(gtsam::Key robotPoseKey, gtsam::Key objectPoseKey, std::vector detections, int cachedDetectionIndex) : Base(boost::shared_ptr(), robotPoseKey, objectPoseKey), detections(detections), cachedDetectionIndex(cachedDetectionIndex) { for (const auto &detection : detections) { this->noiseModels.push_back(detection.getDiagonal()); } } /** print */ virtual void print(const std::string &s, const gtsam::KeyFormatter &keyFormatter = gtsam::DefaultKeyFormatter) const { std::cout << s << "TightlyCoupledDetectionFactor(" << keyFormatter(this->robotPoseKey()) << ", " << keyFormatter(this->objectPoseKey()) << ")\n"; } /** equals */ virtual bool equals(const NonlinearFactor &expected, double tol = 1e-9) const { const This *e = dynamic_cast(&expected); return e != NULL && Base::equals(*e, tol); } gtsam::Key robotPoseKey() const { return key1(); } gtsam::Key objectPoseKey() const { return key2(); } const std::vector &getDetections() const { return this->detections; } const int getCachedDetectionIndex() const { return this->cachedDetectionIndex; } virtual gtsam::Vector evaluateError(const gtsam::Pose3 &robotPose, const gtsam::Pose3 &objectPose, const gtsam::Pose3 &measured, boost::optional H1 = boost::none, boost::optional H2 = boost::none) const; virtual gtsam::Vector evaluateError(const gtsam::Pose3 &robotPose, const gtsam::Pose3 &objectPose, boost::optional H1 = boost::none, boost::optional H2 = boost::none) const override { throw std::runtime_error("This evaluateError function is revoked."); } virtual gtsam::Vector unwhitenedError(const gtsam::Pose3 &measured, const gtsam::Values &x, boost::optional &> H = boost::none) const; virtual gtsam::Vector unwhitenedError(const gtsam::Values &x, boost::optional &> H = boost::none) const override { throw std::runtime_error("This unwhitenedError function is revoked."); }; virtual boost::shared_ptr linearize(const gtsam::Values &c) const override; virtual double error(const gtsam::Values &c) const override; std::size_t size() const { return 2; } virtual size_t dim() const override { return 6; } }; class LooselyCoupledDetectionFactor : public gtsam::NoiseModelFactor1 { private: using This = LooselyCoupledDetectionFactor; using Base = gtsam::NoiseModelFactor1; protected: std::vector detections; std::vector noiseModels; gtsam::Key robotPoseKey_; mutable int cachedDetectionIndex = -1; public: /** Constructor */ LooselyCoupledDetectionFactor(gtsam::Key robotPoseKey, gtsam::Key objectPoseKey, std::vector detections) : Base(boost::shared_ptr(), objectPoseKey), robotPoseKey_(robotPoseKey), detections(detections) { for (const auto &detection : detections) { this->noiseModels.push_back(detection.getDiagonal()); } } /** Contructor with cached detection index */ LooselyCoupledDetectionFactor(gtsam::Key robotPoseKey, gtsam::Key objectPoseKey, std::vector detections, int cachedDetectionIndex) : Base(boost::shared_ptr(), objectPoseKey), robotPoseKey_(robotPoseKey), detections(detections), cachedDetectionIndex(cachedDetectionIndex) { for (const auto &detection : detections) { this->noiseModels.push_back(detection.getDiagonal()); } } /** print */ virtual void print(const std::string &s, const gtsam::KeyFormatter &keyFormatter = gtsam::DefaultKeyFormatter) const { std::cout << s << "LooselyCoupledDetectionFactor(" << keyFormatter(this->robotPoseKey()) << "," << keyFormatter(this->objectPoseKey()) << ")\n"; } /** equals */ virtual bool equals(const NonlinearFactor &expected, double tol = 1e-9) const { const This *e = dynamic_cast(&expected); return e != NULL && Base::equals(*e, tol); } gtsam::Key robotPoseKey() const { return this->robotPoseKey_; } gtsam::Key objectPoseKey() const { return this->key(); } const std::vector &getDetections() const { return this->detections; } const int getCachedDetectionIndex() const { return this->cachedDetectionIndex; } virtual gtsam::Vector evaluateError(const gtsam::Pose3 &robotPose, const gtsam::Pose3 &objectPose, const gtsam::Pose3 &measured, boost::optional H1 = boost::none) const; virtual gtsam::Vector evaluateError(const gtsam::Pose3 &x, boost::optional H = boost::none) const override { throw std::runtime_error("This evaluateError function is revoked."); } virtual gtsam::Vector unwhitenedError(const gtsam::Pose3 &measured, const gtsam::Values &x, boost::optional &> H = boost::none) const; virtual gtsam::Vector unwhitenedError(const gtsam::Values &x, boost::optional &> H = boost::none) const override { throw std::runtime_error("This unwhitenedError function is revoked."); }; virtual boost::shared_ptr linearize(const gtsam::Values &c) const override; virtual double error(const gtsam::Values &c) const override; std::size_t size() const { return 1; } virtual size_t dim() const override { return 6; } }; class ConstantVelocityFactor : public gtsam::BetweenFactor { private: using This = ConstantVelocityFactor; using Base = gtsam::BetweenFactor; public: ConstantVelocityFactor(gtsam::Key key1, gtsam::Key key2, const gtsam::SharedNoiseModel &model = nullptr) : Base(key1, key2, gtsam::Pose3::identity(), model) { } virtual ~ConstantVelocityFactor() {} ///@name Testable ///@{ /** print */ virtual void print(const std::string &s, const gtsam::KeyFormatter &keyFormatter = gtsam::DefaultKeyFormatter) const { std::cout << s << "ConstantVelocityFactor(" << keyFormatter(this->key1()) << "," << keyFormatter(this->key2()) << ")\n"; this->noiseModel_->print(" noise model: "); } /** equals */ virtual bool equals(const NonlinearFactor &expected, double tol = 1e-9) const { const This *e = dynamic_cast(&expected); return e != NULL && Base::equals(*e, tol); } }; class StablePoseFactor : public gtsam::NoiseModelFactor3 { private: using This = StablePoseFactor; using Base = gtsam::NoiseModelFactor3; protected: double deltaTime; public: /** Constructor */ StablePoseFactor(gtsam::Key previousPoseKey, gtsam::Key velocityKey, gtsam::Key nextPoseKey, double deltaTime, const gtsam::SharedNoiseModel &model = nullptr) : Base(model, previousPoseKey, velocityKey, nextPoseKey), deltaTime(deltaTime) { } /** print */ virtual void print(const std::string &s, const gtsam::KeyFormatter &keyFormatter = gtsam::DefaultKeyFormatter) const { std::cout << s << "StablePoseFactor(" << keyFormatter(this->key1()) << "," << keyFormatter(this->key2()) << "," << keyFormatter(this->key3()) << ")" << " dt = " << this->deltaTime << '\n'; this->noiseModel_->print(" noise model: "); } /** equals */ virtual bool equals(const NonlinearFactor &expected, double tol = 1e-9) const { const This *e = dynamic_cast(&expected); return e != NULL && Base::equals(*e, tol); } inline gtsam::Key previousPoseKey() const { return key1(); } inline gtsam::Key velocityKey() const { return key2(); } inline gtsam::Key nextPoseKey() const { return key3(); } virtual gtsam::Vector evaluateError(const gtsam::Pose3 &previousPose, const gtsam::Pose3 &velocity, const gtsam::Pose3 &nextPose, boost::optional H1 = boost::none, boost::optional H2 = boost::none, boost::optional H3 = boost::none) const override; }; ================================================ FILE: include/solver.h ================================================ // Copyright 2022 Yu-Kai Lin. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. #pragma once #include #include "factor.h" class MaxMixtureISAM2 : public gtsam::ISAM2 { public: explicit MaxMixtureISAM2(const gtsam::ISAM2Params& params) : gtsam::ISAM2(params){}; MaxMixtureISAM2() : gtsam::ISAM2(){}; virtual gtsam::ISAM2Result update( const gtsam::NonlinearFactorGraph& newFactors = gtsam::NonlinearFactorGraph(), const gtsam::Values& newTheta = gtsam::Values(), const gtsam::FactorIndices& removeFactorIndices = gtsam::FactorIndices(), const boost::optional >& constrainedKeys = boost::none, const boost::optional >& noRelinKeys = boost::none, const boost::optional >& extraReelimKeys = boost::none, bool force_relinearize = false) override; /** * Add variable, update, and relinearize if need; almost everything is same as * the original ISAM2 solver, except for the relinearization condition for * max-mixture models. */ virtual gtsam::ISAM2Result update(const gtsam::NonlinearFactorGraph& newFactors, const gtsam::Values& newTheta, const gtsam::ISAM2UpdateParams& updateParams) override; }; ================================================ FILE: include/utility.h ================================================ #pragma once #ifndef _UTILITY_LIDAR_ODOMETRY_H_ #define _UTILITY_LIDAR_ODOMETRY_H_ #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include using namespace std; typedef pcl::PointXYZI PointType; enum class SensorType { VELODYNE, OUSTER }; class ParamServer { public: ros::NodeHandle nh; std::string robot_id; // Topics string pointCloudTopic; string imuTopic; string odomTopic; string gpsTopic; // Frames string lidarFrame; string baselinkFrame; string odometryFrame; string mapFrame; // GPS Settings bool useImuHeadingInitialization; bool useGpsElevation; float gpsCovThreshold; float poseCovThreshold; // Save pcd bool savePCD; string savePCDDirectory; // Lidar Sensor Configuration SensorType sensor; int N_SCAN; int Horizon_SCAN; int downsampleRate; float lidarMinRange; float lidarMaxRange; // IMU float imuAccNoise; float imuGyrNoise; float imuAccBiasN; float imuGyrBiasN; float imuGravity; float imuRPYWeight; vector extRotV; vector extRPYV; vector extTransV; Eigen::Matrix3d extRot; Eigen::Matrix3d extRPY; Eigen::Vector3d extTrans; Eigen::Quaterniond extQRPY; // LOAM float edgeThreshold; float surfThreshold; int edgeFeatureMinValidNum; int surfFeatureMinValidNum; // voxel filter paprams float odometrySurfLeafSize; float mappingCornerLeafSize; float mappingSurfLeafSize; float z_tollerance; float rotation_tollerance; // CPU Params int numberOfCores; double mappingProcessInterval; // Surrounding map float surroundingkeyframeAddingDistThreshold; float surroundingkeyframeAddingAngleThreshold; float surroundingKeyframeDensity; float surroundingKeyframeSearchRadius; // Loop closure bool loopClosureEnableFlag; float loopClosureFrequency; int surroundingKeyframeSize; float historyKeyframeSearchRadius; float historyKeyframeSearchTimeDiff; int historyKeyframeSearchNum; float historyKeyframeFitnessScore; // global map visualization radius float globalMapVisualizationSearchRadius; float globalMapVisualizationPoseDensity; float globalMapVisualizationLeafSize; // Dynamic object data association float detectionMatchThreshold; vector dataAssociationVarianceVector; vector earlyLooselyCoupledMatchingVarianceVector; vector looselyCoupledMatchingVarianceVector; vector tightlyCoupledMatchingVarianceVector; Eigen::Matrix dataAssociationVarianceEigenVector; Eigen::Matrix earlyLooselyCoupledMatchingVarianceEigenVector; Eigen::Matrix looselyCoupledMatchingVarianceEigenVector; Eigen::Matrix tightlyCoupledMatchingVarianceEigenVector; // Factor covariance matrices (presented as diagonal vectors) vector priorOdometryDiagonalVarianceVector; vector odometryDiagonalVarianceVector; vector earlyConstantVelocityDiagonalVarianceVector; vector constantVelocityDiagonalVarianceVector; vector motionDiagonalVarianceVector; vector looselyCoupledDetectionVarianceVector; vector tightlyCoupledDetectionVarianceVector; Eigen::Matrix priorOdometryDiagonalVarianceEigenVector; Eigen::Matrix odometryDiagonalVarianceEigenVector; Eigen::Matrix earlyConstantVelocityDiagonalVarianceEigenVector; Eigen::Matrix constantVelocityDiagonalVarianceEigenVector; Eigen::Matrix motionDiagonalVarianceEigenVector; Eigen::Matrix looselyCoupledDetectionVarianceEigenVector; Eigen::Matrix tightlyCoupledDetectionVarianceEigenVector; // Gentle coupling options int numberOfEarlySteps; int numberOfPreLooseCouplingSteps; int numberOfVelocityConsistencySteps; int numberOfInterLooseCouplingSteps; float tightCouplingDetectionErrorThreshold; float objectAngularVelocityConsistencyVarianceThreshold; float objectLinearVelocityConsistencyVarianceThreshold; // Tracking int trackingStepsForLostObject; ParamServer() { nh.param("/robot_id", robot_id, "roboat"); nh.param("lio_segmot/pointCloudTopic", pointCloudTopic, "points_raw"); nh.param("lio_segmot/imuTopic", imuTopic, "imu_correct"); nh.param("lio_segmot/odomTopic", odomTopic, "odometry/imu"); nh.param("lio_segmot/gpsTopic", gpsTopic, "odometry/gps"); nh.param("lio_segmot/lidarFrame", lidarFrame, "base_link"); nh.param("lio_segmot/baselinkFrame", baselinkFrame, "base_link"); nh.param("lio_segmot/odometryFrame", odometryFrame, "odom"); nh.param("lio_segmot/mapFrame", mapFrame, "map"); nh.param("lio_segmot/useImuHeadingInitialization", useImuHeadingInitialization, false); nh.param("lio_segmot/useGpsElevation", useGpsElevation, false); nh.param("lio_segmot/gpsCovThreshold", gpsCovThreshold, 2.0); nh.param("lio_segmot/poseCovThreshold", poseCovThreshold, 25.0); nh.param("lio_segmot/savePCD", savePCD, false); nh.param("lio_segmot/savePCDDirectory", savePCDDirectory, "/Downloads/LOAM/"); std::string sensorStr; nh.param("lio_segmot/sensor", sensorStr, ""); if (sensorStr == "velodyne") { sensor = SensorType::VELODYNE; } else if (sensorStr == "ouster") { sensor = SensorType::OUSTER; } else { ROS_ERROR_STREAM( "Invalid sensor type (must be either 'velodyne' or 'ouster'): " << sensorStr); ros::shutdown(); } nh.param("lio_segmot/N_SCAN", N_SCAN, 16); nh.param("lio_segmot/Horizon_SCAN", Horizon_SCAN, 1800); nh.param("lio_segmot/downsampleRate", downsampleRate, 1); nh.param("lio_segmot/lidarMinRange", lidarMinRange, 1.0); nh.param("lio_segmot/lidarMaxRange", lidarMaxRange, 1000.0); nh.param("lio_segmot/imuAccNoise", imuAccNoise, 0.01); nh.param("lio_segmot/imuGyrNoise", imuGyrNoise, 0.001); nh.param("lio_segmot/imuAccBiasN", imuAccBiasN, 0.0002); nh.param("lio_segmot/imuGyrBiasN", imuGyrBiasN, 0.00003); nh.param("lio_segmot/imuGravity", imuGravity, 9.80511); nh.param("lio_segmot/imuRPYWeight", imuRPYWeight, 0.01); nh.param>("lio_segmot/extrinsicRot", extRotV, vector()); nh.param>("lio_segmot/extrinsicRPY", extRPYV, vector()); nh.param>("lio_segmot/extrinsicTrans", extTransV, vector()); extRot = Eigen::Map>(extRotV.data(), 3, 3); extRPY = Eigen::Map>(extRPYV.data(), 3, 3); extTrans = Eigen::Map>(extTransV.data(), 3, 1); extQRPY = Eigen::Quaterniond(extRPY); nh.param("lio_segmot/edgeThreshold", edgeThreshold, 0.1); nh.param("lio_segmot/surfThreshold", surfThreshold, 0.1); nh.param("lio_segmot/edgeFeatureMinValidNum", edgeFeatureMinValidNum, 10); nh.param("lio_segmot/surfFeatureMinValidNum", surfFeatureMinValidNum, 100); nh.param("lio_segmot/odometrySurfLeafSize", odometrySurfLeafSize, 0.2); nh.param("lio_segmot/mappingCornerLeafSize", mappingCornerLeafSize, 0.2); nh.param("lio_segmot/mappingSurfLeafSize", mappingSurfLeafSize, 0.2); nh.param("lio_segmot/z_tollerance", z_tollerance, FLT_MAX); nh.param("lio_segmot/rotation_tollerance", rotation_tollerance, FLT_MAX); nh.param("lio_segmot/numberOfCores", numberOfCores, 2); nh.param("lio_segmot/mappingProcessInterval", mappingProcessInterval, 0.15); nh.param("lio_segmot/surroundingkeyframeAddingDistThreshold", surroundingkeyframeAddingDistThreshold, 1.0); nh.param("lio_segmot/surroundingkeyframeAddingAngleThreshold", surroundingkeyframeAddingAngleThreshold, 0.2); nh.param("lio_segmot/surroundingKeyframeDensity", surroundingKeyframeDensity, 1.0); nh.param("lio_segmot/surroundingKeyframeSearchRadius", surroundingKeyframeSearchRadius, 50.0); nh.param("lio_segmot/loopClosureEnableFlag", loopClosureEnableFlag, false); nh.param("lio_segmot/loopClosureFrequency", loopClosureFrequency, 1.0); nh.param("lio_segmot/surroundingKeyframeSize", surroundingKeyframeSize, 50); nh.param("lio_segmot/historyKeyframeSearchRadius", historyKeyframeSearchRadius, 10.0); nh.param("lio_segmot/historyKeyframeSearchTimeDiff", historyKeyframeSearchTimeDiff, 30.0); nh.param("lio_segmot/historyKeyframeSearchNum", historyKeyframeSearchNum, 25); nh.param("lio_segmot/historyKeyframeFitnessScore", historyKeyframeFitnessScore, 0.3); nh.param("lio_segmot/globalMapVisualizationSearchRadius", globalMapVisualizationSearchRadius, 1e3); nh.param("lio_segmot/globalMapVisualizationPoseDensity", globalMapVisualizationPoseDensity, 10.0); nh.param("lio_segmot/globalMapVisualizationLeafSize", globalMapVisualizationLeafSize, 1.0); nh.param("lio_segmot/detectionMatchThreshold", detectionMatchThreshold, 19.5); nh.param>("lio_segmot/dataAssociationVarianceVector", dataAssociationVarianceVector, {1e-4, 1e-4, 1e-4, 1e-2, 2e-3, 2e-3}); nh.param>("lio_segmot/earlyLooselyCoupledMatchingVarianceVector", earlyLooselyCoupledMatchingVarianceVector, {1e-4, 1e-4, 1e-4, 1e-2, 2e-3, 2e-3}); nh.param>("lio_segmot/looselyCoupledMatchingVarianceVector", looselyCoupledMatchingVarianceVector, {1e-4, 1e-4, 1e-4, 1e-2, 2e-3, 2e-3}); nh.param>("lio_segmot/tightlyCoupledMatchingVarianceVector", tightlyCoupledMatchingVarianceVector, {1e-4, 1e-4, 1e-4, 1e-2, 2e-3, 2e-3}); nh.param>("lio_segmot/priorOdometryDiagonalVarianceVector", priorOdometryDiagonalVarianceVector, {1e-2, 1e-2, M_PI * M_PI, 1e8, 1e8, 1e8}); nh.param>("lio_segmot/odometryDiagonalVarianceVector", odometryDiagonalVarianceVector, {1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4}); nh.param>("lio_segmot/earlyConstantVelocityDiagonalVarianceVector", earlyConstantVelocityDiagonalVarianceVector, {1e-3, 1e-3, 1e-3, 2e-1, 1e-1, 1e-1}); nh.param>("lio_segmot/constantVelocityDiagonalVarianceVector", constantVelocityDiagonalVarianceVector, {1e-3, 1e-3, 1e-3, 2e-1, 1e-1, 1e-1}); nh.param>("lio_segmot/motionDiagonalVarianceVector", motionDiagonalVarianceVector, {1e-4, 1e-4, 1e-2, 1e-1, 1e-2, 1e-2}); nh.param>("lio_segmot/looselyCoupledDetectionVarianceVector", looselyCoupledDetectionVarianceVector, {1e-4, 1e-4, 1e-4, 1e-2, 2e-3, 2e-3}); nh.param>("lio_segmot/tightlyCoupledDetectionVarianceVector", tightlyCoupledDetectionVarianceVector, {1e-4, 1e-4, 1e-4, 1e-2, 2e-3, 2e-3}); dataAssociationVarianceEigenVector = Eigen::Map>(dataAssociationVarianceVector.data()); earlyLooselyCoupledMatchingVarianceEigenVector = Eigen::Map>(earlyLooselyCoupledMatchingVarianceVector.data()); looselyCoupledMatchingVarianceEigenVector = Eigen::Map>(looselyCoupledMatchingVarianceVector.data()); tightlyCoupledMatchingVarianceEigenVector = Eigen::Map>(tightlyCoupledMatchingVarianceVector.data()); priorOdometryDiagonalVarianceEigenVector = Eigen::Map>(priorOdometryDiagonalVarianceVector.data()); odometryDiagonalVarianceEigenVector = Eigen::Map>(odometryDiagonalVarianceVector.data()); earlyConstantVelocityDiagonalVarianceEigenVector = Eigen::Map>(earlyConstantVelocityDiagonalVarianceVector.data()); constantVelocityDiagonalVarianceEigenVector = Eigen::Map>(constantVelocityDiagonalVarianceVector.data()); motionDiagonalVarianceEigenVector = Eigen::Map>(motionDiagonalVarianceVector.data()); looselyCoupledDetectionVarianceEigenVector = Eigen::Map>(looselyCoupledDetectionVarianceVector.data()); tightlyCoupledDetectionVarianceEigenVector = Eigen::Map>(tightlyCoupledDetectionVarianceVector.data()); nh.param("lio_segmot/numberOfEarlySteps", numberOfEarlySteps, 1); nh.param("lio_segmot/numberOfPreLooseCouplingSteps", numberOfPreLooseCouplingSteps, 10); nh.param("lio_segmot/numberOfVelocityConsistencySteps", numberOfVelocityConsistencySteps, 4); nh.param("lio_segmot/numberOfInterLooseCouplingSteps", numberOfInterLooseCouplingSteps, 0); nh.param("lio_segmot/tightCouplingDetectionErrorThreshold", tightCouplingDetectionErrorThreshold, 500.0); nh.param("lio_segmot/objectAngularVelocityConsistencyVarianceThreshold", objectAngularVelocityConsistencyVarianceThreshold, 1e-5); nh.param("lio_segmot/objectLinearVelocityConsistencyVarianceThreshold", objectLinearVelocityConsistencyVarianceThreshold, 1e-2); nh.param("lio_segmot/trackingStepsForLostObject", trackingStepsForLostObject, 3); usleep(100); } sensor_msgs::Imu imuConverter(const sensor_msgs::Imu &imu_in) { sensor_msgs::Imu imu_out = imu_in; // rotate acceleration Eigen::Vector3d acc(imu_in.linear_acceleration.x, imu_in.linear_acceleration.y, imu_in.linear_acceleration.z); acc = extRot * acc; imu_out.linear_acceleration.x = acc.x(); imu_out.linear_acceleration.y = acc.y(); imu_out.linear_acceleration.z = acc.z(); // rotate gyroscope Eigen::Vector3d gyr(imu_in.angular_velocity.x, imu_in.angular_velocity.y, imu_in.angular_velocity.z); gyr = extRot * gyr; imu_out.angular_velocity.x = gyr.x(); imu_out.angular_velocity.y = gyr.y(); imu_out.angular_velocity.z = gyr.z(); // rotate roll pitch yaw Eigen::Quaterniond q_from(imu_in.orientation.w, imu_in.orientation.x, imu_in.orientation.y, imu_in.orientation.z); Eigen::Quaterniond q_final = q_from * extQRPY; imu_out.orientation.x = q_final.x(); imu_out.orientation.y = q_final.y(); imu_out.orientation.z = q_final.z(); imu_out.orientation.w = q_final.w(); if (sqrt(q_final.x() * q_final.x() + q_final.y() * q_final.y() + q_final.z() * q_final.z() + q_final.w() * q_final.w()) < 0.1) { ROS_ERROR("Invalid quaternion, please use a 9-axis IMU!"); ros::shutdown(); } return imu_out; } }; sensor_msgs::PointCloud2 publishCloud(ros::Publisher *thisPub, pcl::PointCloud::Ptr thisCloud, ros::Time thisStamp, std::string thisFrame) { sensor_msgs::PointCloud2 tempCloud; pcl::toROSMsg(*thisCloud, tempCloud); tempCloud.header.stamp = thisStamp; tempCloud.header.frame_id = thisFrame; if (thisPub->getNumSubscribers() != 0) thisPub->publish(tempCloud); return tempCloud; } template double ROS_TIME(T msg) { return msg->header.stamp.toSec(); } template void imuAngular2rosAngular(sensor_msgs::Imu *thisImuMsg, T *angular_x, T *angular_y, T *angular_z) { *angular_x = thisImuMsg->angular_velocity.x; *angular_y = thisImuMsg->angular_velocity.y; *angular_z = thisImuMsg->angular_velocity.z; } template void imuAccel2rosAccel(sensor_msgs::Imu *thisImuMsg, T *acc_x, T *acc_y, T *acc_z) { *acc_x = thisImuMsg->linear_acceleration.x; *acc_y = thisImuMsg->linear_acceleration.y; *acc_z = thisImuMsg->linear_acceleration.z; } template void imuRPY2rosRPY(sensor_msgs::Imu *thisImuMsg, T *rosRoll, T *rosPitch, T *rosYaw) { double imuRoll, imuPitch, imuYaw; tf::Quaternion orientation; tf::quaternionMsgToTF(thisImuMsg->orientation, orientation); tf::Matrix3x3(orientation).getRPY(imuRoll, imuPitch, imuYaw); *rosRoll = imuRoll; *rosPitch = imuPitch; *rosYaw = imuYaw; } float pointDistance(PointType p) { return sqrt(p.x * p.x + p.y * p.y + p.z * p.z); } float pointDistance(PointType p1, PointType p2) { return sqrt((p1.x - p2.x) * (p1.x - p2.x) + (p1.y - p2.y) * (p1.y - p2.y) + (p1.z - p2.z) * (p1.z - p2.z)); } #endif ================================================ FILE: launch/debug.launch ================================================ ================================================ FILE: launch/gdbserver_debug.launch ================================================ ================================================ FILE: launch/include/config/robot.urdf.xacro ================================================ ================================================ FILE: launch/include/config/rviz.rviz ================================================ Panels: - Class: rviz/Displays Help Height: 191 Name: Displays Property Tree Widget: Expanded: - /Global Options1 - /Status1 - /links1 - /Odometry1 - /Point Cloud1 - /Feature Cloud1 - /Mapping1 - /Mapping1/Map (global)1 - /Tracking1 - /Tracking1/Dynamic Objects1 Splitter Ratio: 0.5940054655075073 Tree Height: 686 - Class: rviz/Selection Name: Selection - Class: rviz/Views Expanded: - /Current View1 Name: Views Splitter Ratio: 0.5 - Class: rviz/Tool Properties Expanded: ~ Name: Tool Properties Splitter Ratio: 0.5 Preferences: PromptSaveOnExit: true Toolbars: toolButtonStyle: 2 Visualization Manager: Class: "" Displays: - Class: rviz/Group Displays: - Class: rviz/TF Enabled: false Frame Timeout: 999 Frames: All Enabled: false Marker Scale: 3 Name: TF Show Arrows: true Show Axes: true Show Names: true Tree: {} Update Interval: 0 Value: false - Class: rviz/Axes Enabled: true Length: 2 Name: map Radius: 0.5 Reference Frame: map Value: true - Class: rviz/Axes Enabled: true Length: 1 Name: base_link Radius: 0.30000001192092896 Reference Frame: base_link Value: true - Class: rviz/Axes Enabled: true Length: 1 Name: lidar_link Radius: 0.10000000149011612 Reference Frame: lidar_link Value: true Enabled: true Name: links - Class: rviz/Group Displays: - Angle Tolerance: 0.10000000149011612 Class: rviz/Odometry Covariance: Orientation: Alpha: 0.5 Color: 255; 255; 127 Color Style: Unique Frame: Local Offset: 1 Scale: 1 Value: true Position: Alpha: 0.30000001192092896 Color: 204; 51; 204 Scale: 1 Value: true Value: false Enabled: true Keep: 300 Name: Odom IMU Position Tolerance: 0.10000000149011612 Shape: Alpha: 1 Axes Length: 0.5 Axes Radius: 0.10000000149011612 Color: 255; 25; 0 Head Length: 0.30000001192092896 Head Radius: 0.10000000149011612 Shaft Length: 1 Shaft Radius: 0.05000000074505806 Value: Axes Topic: /odometry/imu Unreliable: false Value: true - Angle Tolerance: 0.10000000149011612 Class: rviz/Odometry Covariance: Orientation: Alpha: 0.5 Color: 255; 255; 127 Color Style: Unique Frame: Local Offset: 1 Scale: 1 Value: true Position: Alpha: 0.30000001192092896 Color: 204; 51; 204 Scale: 1 Value: true Value: false Enabled: true Keep: 300 Name: Odom GPS Position Tolerance: 0.10000000149011612 Shape: Alpha: 1 Axes Length: 1 Axes Radius: 0.30000001192092896 Color: 255; 25; 0 Head Length: 0.30000001192092896 Head Radius: 0.10000000149011612 Shaft Length: 1 Shaft Radius: 0.05000000074505806 Value: Axes Topic: /odometry/gps Unreliable: false Value: true - Angle Tolerance: 0.10000000149011612 Class: rviz/Odometry Covariance: Orientation: Alpha: 0.5 Color: 255; 255; 127 Color Style: Unique Frame: Local Offset: 1 Scale: 1 Value: true Position: Alpha: 0.30000001192092896 Color: 204; 51; 204 Scale: 1 Value: true Value: false Enabled: false Keep: 300 Name: Odom lidar Position Tolerance: 0.10000000149011612 Shape: Alpha: 1 Axes Length: 0.25 Axes Radius: 0.10000000149011612 Color: 255; 25; 0 Head Length: 0.30000001192092896 Head Radius: 0.10000000149011612 Shaft Length: 1 Shaft Radius: 0.05000000074505806 Value: Axes Topic: /lio_segmot/mapping/odometry Unreliable: false Value: false Enabled: false Name: Odometry - Class: rviz/Group Displays: - Alpha: 1 Autocompute Intensity Bounds: false Autocompute Value Bounds: Max Value: 10 Min Value: -10 Value: true Axis: Z Channel Name: intensity Class: rviz/PointCloud2 Color: 255; 255; 255 Color Transformer: Intensity Decay Time: 0 Enabled: true Invert Rainbow: false Max Color: 191; 64; 64 Max Intensity: 0.9900000095367432 Min Color: 191; 64; 64 Min Intensity: 0 Name: Velodyne Position Transformer: XYZ Queue Size: 10 Selectable: false Size (Pixels): 2 Size (m): 0.009999999776482582 Style: Points Topic: /lio_segmot/mapping/cloud_deskewed Unreliable: false Use Fixed Frame: true Use rainbow: false Value: true - Alpha: 1 Autocompute Intensity Bounds: true Autocompute Value Bounds: Max Value: 10 Min Value: -10 Value: true Axis: Z Channel Name: intensity Class: rviz/PointCloud2 Color: 255; 255; 255 Color Transformer: Intensity Decay Time: 0 Enabled: false Invert Rainbow: false Max Color: 255; 255; 255 Min Color: 0; 0; 0 Name: Reg Cloud Position Transformer: XYZ Queue Size: 10 Selectable: true Size (Pixels): 3 Size (m): 0.009999999776482582 Style: Points Topic: /lio_segmot/mapping/cloud_registered Unreliable: false Use Fixed Frame: true Use rainbow: true Value: false - Class: jsk_rviz_plugin/BoundingBoxArray Enabled: true Name: Detections Topic: /lio_segmot/mapping/detections Unreliable: false Value: true alpha: 0.6000000238418579 color: 171; 255; 152 coloring: Flat color line width: 0.15000000596046448 only edge: false show coords: false Enabled: true Name: Point Cloud - Class: rviz/Group Displays: - Alpha: 1 Autocompute Intensity Bounds: true Autocompute Value Bounds: Max Value: 10 Min Value: -10 Value: true Axis: Z Channel Name: intensity Class: rviz/PointCloud2 Color: 0; 255; 0 Color Transformer: FlatColor Decay Time: 0 Enabled: true Invert Rainbow: false Max Color: 255; 255; 255 Min Color: 0; 0; 0 Name: Edge Feature Position Transformer: XYZ Queue Size: 10 Selectable: true Size (Pixels): 5 Size (m): 0.009999999776482582 Style: Points Topic: /lio_segmot/feature/cloud_corner Unreliable: false Use Fixed Frame: true Use rainbow: true Value: true - Alpha: 1 Autocompute Intensity Bounds: true Autocompute Value Bounds: Max Value: 10 Min Value: -10 Value: true Axis: Z Channel Name: intensity Class: rviz/PointCloud2 Color: 255; 85; 255 Color Transformer: FlatColor Decay Time: 0 Enabled: true Invert Rainbow: false Max Color: 255; 255; 255 Min Color: 0; 0; 0 Name: Plannar Feature Position Transformer: XYZ Queue Size: 10 Selectable: true Size (Pixels): 3 Size (m): 0.009999999776482582 Style: Points Topic: /lio_segmot/feature/cloud_surface Unreliable: false Use Fixed Frame: true Use rainbow: true Value: true Enabled: false Name: Feature Cloud - Class: rviz/Group Displays: - Alpha: 0.30000001192092896 Autocompute Intensity Bounds: false Autocompute Value Bounds: Max Value: 7.4858574867248535 Min Value: -1.2309398651123047 Value: true Axis: Z Channel Name: intensity Class: rviz/PointCloud2 Color: 255; 255; 255 Color Transformer: Intensity Decay Time: 300 Enabled: false Invert Rainbow: false Max Color: 46; 52; 54 Max Intensity: 0.9900000095367432 Min Color: 46; 52; 54 Min Intensity: 0 Name: Map (cloud) Position Transformer: XYZ Queue Size: 10 Selectable: false Size (Pixels): 2 Size (m): 0.009999999776482582 Style: Points Topic: /lio_segmot/mapping/cloud_registered Unreliable: false Use Fixed Frame: true Use rainbow: false Value: false - Alpha: 1 Autocompute Intensity Bounds: true Autocompute Value Bounds: Max Value: 36.61034393310547 Min Value: -2.3476977348327637 Value: true Axis: Z Channel Name: intensity Class: rviz/PointCloud2 Color: 255; 255; 255 Color Transformer: AxisColor Decay Time: 0 Enabled: false Invert Rainbow: false Max Color: 255; 255; 255 Min Color: 0; 0; 0 Name: Map (local) Position Transformer: XYZ Queue Size: 10 Selectable: true Size (Pixels): 2 Size (m): 0.10000000149011612 Style: Flat Squares Topic: /lio_segmot/mapping/map_local Unreliable: false Use Fixed Frame: true Use rainbow: true Value: false - Alpha: 1 Autocompute Intensity Bounds: true Autocompute Value Bounds: Max Value: 10 Min Value: -10 Value: true Axis: Z Channel Name: intensity Class: rviz/PointCloud2 Color: 255; 255; 255 Color Transformer: Intensity Decay Time: 0 Enabled: true Invert Rainbow: false Max Color: 75; 75; 75 Min Color: 75; 75; 75 Name: Map (global) Position Transformer: XYZ Queue Size: 10 Selectable: true Size (Pixels): 2 Size (m): 0.009999999776482582 Style: Points Topic: /lio_segmot/mapping/map_global Unreliable: false Use Fixed Frame: true Use rainbow: false Value: true - Alpha: 1 Buffer Length: 1 Class: rviz/Path Color: 85; 255; 255 Enabled: true Head Diameter: 0.30000001192092896 Head Length: 0.20000000298023224 Length: 0.30000001192092896 Line Style: Billboards Line Width: 0.10000000149011612 Name: Path (global) Offset: X: 0 Y: 0 Z: 0 Pose Color: 255; 85; 255 Pose Style: None Radius: 0.029999999329447746 Shaft Diameter: 0.10000000149011612 Shaft Length: 0.10000000149011612 Topic: /lio_segmot/mapping/path Unreliable: false Value: true - Alpha: 1 Buffer Length: 1 Class: rviz/Path Color: 255; 85; 255 Enabled: true Head Diameter: 0.30000001192092896 Head Length: 0.20000000298023224 Length: 0.30000001192092896 Line Style: Billboards Line Width: 0.10000000149011612 Name: Path (local) Offset: X: 0 Y: 0 Z: 0 Pose Color: 255; 85; 255 Pose Style: None Radius: 0.029999999329447746 Shaft Diameter: 0.10000000149011612 Shaft Length: 0.10000000149011612 Topic: /lio_segmot/imu/path Unreliable: false Value: true - Alpha: 1 Autocompute Intensity Bounds: true Autocompute Value Bounds: Max Value: 20.802837371826172 Min Value: -0.03934507071971893 Value: true Axis: Z Channel Name: intensity Class: rviz/PointCloud2 Color: 255; 255; 255 Color Transformer: AxisColor Decay Time: 0 Enabled: false Invert Rainbow: false Max Color: 255; 255; 255 Min Color: 0; 0; 0 Name: Loop closure Position Transformer: XYZ Queue Size: 10 Selectable: true Size (Pixels): 2 Size (m): 0.30000001192092896 Style: Flat Squares Topic: /lio_segmot/mapping/icp_loop_closure_corrected_cloud Unreliable: false Use Fixed Frame: true Use rainbow: true Value: false - Class: rviz/MarkerArray Enabled: true Marker Topic: /lio_segmot/mapping/loop_closure_constraints Name: Loop constraint Namespaces: {} Queue Size: 100 Value: true - Class: jsk_rviz_plugin/BoundingBoxArray Enabled: false Name: Dynamic Objects Topic: /lio_segmot/mapping/objects Unreliable: false Value: false alpha: 0.800000011920929 color: 25; 255; 0 coloring: Label line width: 0.15000000596046448 only edge: true show coords: true - Class: rviz/MarkerArray Enabled: false Marker Topic: /lio_segmot/mapping/object_paths Name: Dynamic Object Paths Namespaces: {} Queue Size: 100 Value: false - Class: rviz/MarkerArray Enabled: false Marker Topic: /lio_segmot/mapping/object_labels Name: Dynamic Object Labels Namespaces: {} Queue Size: 100 Value: false - Class: rviz/MarkerArray Enabled: false Marker Topic: /lio_segmot/mapping/object_velocities Name: Dynamic Object Predicted Path Namespaces: {} Queue Size: 100 Value: false - Class: rviz/Marker Enabled: true Marker Topic: /lio_segmot/mapping/tightly_coupled_object_points Name: Tightly-coupled nodes Namespaces: {} Queue Size: 100 Value: true Enabled: true Name: Mapping - Class: rviz/Group Displays: - Class: jsk_rviz_plugin/BoundingBoxArray Enabled: true Name: Dynamic Objects Topic: /lio_segmot/tracking/objects Unreliable: false Value: true alpha: 0.800000011920929 color: 25; 255; 0 coloring: Label line width: 0.15000000596046448 only edge: true show coords: false - Class: rviz/MarkerArray Enabled: true Marker Topic: /lio_segmot/tracking/object_paths Name: Dynamic Object Paths Namespaces: {} Queue Size: 100 Value: true - Class: rviz/MarkerArray Enabled: false Marker Topic: /lio_segmot/tracking/object_labels Name: Dynamic Object Labels Namespaces: {} Queue Size: 100 Value: false - Class: rviz/MarkerArray Enabled: true Marker Topic: /lio_segmot/tracking/object_velocities Name: Dynamic Object Predicted Path Namespaces: {} Queue Size: 100 Value: true - Class: rviz/MarkerArray Enabled: true Marker Topic: /lio_segmot/tracking/object_velocity_arrows Name: Dynamic Object Predicted Path Arrow Namespaces: {} Queue Size: 100 Value: true Enabled: true Name: Tracking Enabled: true Global Options: Background Color: 0; 0; 0 Default Light: true Fixed Frame: map Frame Rate: 30 Name: root Tools: - Class: rviz/Interact Hide Inactive Objects: true - Class: rviz/FocusCamera - Class: rviz/Measure Value: true Views: Current: Class: rviz/Orbit Distance: 182.35630798339844 Enable Stereo Rendering: Stereo Eye Separation: 0.05999999865889549 Stereo Focal Distance: 1 Swap Stereo Eyes: false Value: false Focal Point: X: 68.5766372680664 Y: 25.248287200927734 Z: -78.56790161132812 Focal Shape Fixed Size: false Focal Shape Size: 0.05000000074505806 Invert Z Axis: false Name: Current View Near Clip Distance: 0.009999999776482582 Pitch: 0.8203978538513184 Target Frame: base_link Value: Orbit (rviz) Yaw: 3.4916627407073975 Saved: ~ Window Geometry: Displays: collapsed: false Height: 1022 Hide Left Dock: false Hide Right Dock: true QMainWindow State: 000000ff00000000fd000000040000000000000171000003a8fc0200000009fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb0000000a0049006d006100670065000000002e000001340000000000000000fb000000100044006900730070006c006100790073010000003b000003a8000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730000000286000000a70000005c00ffffff000000010000011100000435fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002e00000435000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000004b000000040fc0100000002fb0000000800540069006d00650000000000000004b00000000000000000fb0000000800540069006d00650100000000000004500000000000000000000005c2000003a800000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 Selection: collapsed: false Tool Properties: collapsed: false Views: collapsed: true Width: 1849 X: 71 Y: 28 ================================================ FILE: launch/include/module_loam.launch ================================================ ================================================ FILE: launch/include/module_loam_debug.launch ================================================ ================================================ FILE: launch/include/module_loam_gdbserver_debug.launch ================================================ ================================================ FILE: launch/include/module_navsat.launch ================================================ ================================================ FILE: launch/include/module_robot_state_publisher.launch ================================================ ================================================ FILE: launch/include/module_rviz.launch ================================================ ================================================ FILE: launch/include/rosconsole/rosconsole_error.conf ================================================ # Set the default ros output to warning and higher log4j.logger.ros = ERROR ================================================ FILE: launch/include/rosconsole/rosconsole_info.conf ================================================ # Set the default ros output to warning and higher log4j.logger.ros = INFO ================================================ FILE: launch/include/rosconsole/rosconsole_warn.conf ================================================ # Set the default ros output to warning and higher log4j.logger.ros = WARN ================================================ FILE: launch/run.launch ================================================ ================================================ FILE: launch/run_hsinchu.launch ================================================ ================================================ FILE: launch/run_kitti.launch ================================================ ================================================ FILE: msg/Diagnosis.msg ================================================ Header header float64 computationalTime int32 numberOfDetections int32 numberOfTightlyCoupledObjects ================================================ FILE: msg/ObjectState.msg ================================================ Header header jsk_recognition_msgs/BoundingBox detection geometry_msgs/Pose pose geometry_msgs/Pose velocity bool hasTightlyCoupledDetectionError float64 tightlyCoupledDetectionError float64 initialTightlyCoupledDetectionError bool hasLooselyCoupledDetectionError float64 looselyCoupledDetectionError float64 initialLooselyCoupledDetectionError bool hasMotionError float64 motionError float64 initialMotionError int32 index int32 lostCount float64 confidence bool isTightlyCoupled bool isFirst ================================================ FILE: msg/ObjectStateArray.msg ================================================ Header header lio_segmot/ObjectState[] objects ================================================ FILE: msg/cloud_info.msg ================================================ # Cloud Info Header header int32[] startRingIndex int32[] endRingIndex int32[] pointColInd # point column index in range image float32[] pointRange # point range int64 imuAvailable int64 odomAvailable # Attitude for LOAM initialization float32 imuRollInit float32 imuPitchInit float32 imuYawInit # Initial guess from imu pre-integration float32 initialGuessX float32 initialGuessY float32 initialGuessZ float32 initialGuessRoll float32 initialGuessPitch float32 initialGuessYaw # Point cloud messages sensor_msgs/PointCloud2 cloud_raw # original cloud without any processing sensor_msgs/PointCloud2 cloud_deskewed # original cloud deskewed sensor_msgs/PointCloud2 cloud_corner # extracted corner feature sensor_msgs/PointCloud2 cloud_surface # extracted surface feature ================================================ FILE: msg/flags.msg ================================================ int32[] flags ================================================ FILE: package.xml ================================================ lio_segmot 1.0.0 Lidar Odometry Tixiao Shan TODO Tixiao Shan catkin roscpp roscpp rospy rospy rosbag rosbag tf tf cv_bridge cv_bridge pcl_conversions pcl_conversions std_msgs std_msgs sensor_msgs sensor_msgs geometry_msgs geometry_msgs nav_msgs nav_msgs visualization_msgs visualization_msgs message_generation message_generation message_runtime message_runtime GTSAM GTSAM jsk_recognition_msgs jsk_recognition_msgs jsk_topic_tools jsk_topic_tools ================================================ FILE: scripts/diagnose.py ================================================ import copy import multiprocessing as mp from typing import Dict, List, Tuple import dash import dash_bootstrap_components as dbc import numpy as np import plotly.graph_objects as go import rospy from dash import dcc, html from dash.dependencies import Input, Output from lio_segmot.msg import ObjectState, ObjectStateArray from std_msgs.msg import Header # ------------------------------------------------------------------------------------------------ # # Environmental Variables # # ------------------------------------------------------------------------------------------------ # RECENT_HISTORY_LENGTH = 15 # ------------------------------------------------------------------------------------------------ # # Utils # # ------------------------------------------------------------------------------------------------ # def get_color(index): colormap = [ (0.121569, 0.466667, 0.705882), (0.682353, 0.780392, 0.909804), (1.000000, 0.498039, 0.054902), (1.000000, 0.733333, 0.470588), (0.172549, 0.627451, 0.172549), (0.596078, 0.874510, 0.541176), (0.839216, 0.152941, 0.156863), (1.000000, 0.596078, 0.588235), (0.580392, 0.403922, 0.741176), (0.772549, 0.690196, 0.835294), (0.549020, 0.337255, 0.294118), (0.768627, 0.611765, 0.580392), (0.890196, 0.466667, 0.760784), (0.968627, 0.713725, 0.823529), (0.498039, 0.498039, 0.498039), (0.780392, 0.780392, 0.780392), (0.737255, 0.741176, 0.133333), (0.858824, 0.858824, 0.552941), (0.090196, 0.745098, 0.811765), (0.619608, 0.854902, 0.898039), ] colormap = [(int(i * 255), int(j * 255), int(k * 255)) for (i, j, k) in colormap] color = colormap[index % 20] return "rgba({}, {}, {}, 1)".format(color[0], color[1], color[2]) # ------------------------------------------------------------------------------------------------ # # Data # # ------------------------------------------------------------------------------------------------ # class DynamicObject: def __init__(self) -> None: self.states: Dict[int, ObjectState] = {} # {stamp index}: {object state} self.index: int = -1 self.name: str = "" self.color: str = "rgba(0, 0, 0, 1)" def get_confidence_score(self, index: int) -> float: if index not in self.states.keys(): raise ValueError("Invalid index") return self.states[index].detection.value def get_initial_tightly_coupled_detection_error(self, index: int) -> float: if index not in self.states.keys(): raise ValueError("Invalid index") return self.states[index].initialTightlyCoupledDetectionError def get_initial_loosely_coupled_detection_error(self, index: int) -> float: if index not in self.states.keys(): raise ValueError("Invalid index") return self.states[index].initialLooselyCoupledDetectionError def get_tightly_coupled_detection_error(self, index: int) -> float: if index not in self.states.keys(): raise ValueError("Invalid index") return self.states[index].tightlyCoupledDetectionError def get_loosely_coupled_detection_error(self, index: int) -> float: if index not in self.states.keys(): raise ValueError("Invalid index") return self.states[index].looselyCoupledDetectionError def get_initial_motion_error(self, index: int) -> float: if index not in self.states.keys(): raise ValueError("Invalid index") return self.states[index].initialMotionError def get_motion_error(self, index: int) -> float: if index not in self.states.keys(): raise ValueError("Invalid index") return self.states[index].motionError def recent_indices(self, start_index: int, value_type: str = "") -> np.ndarray: indices = np.array(list(self.states.keys())) indices = indices[np.where(indices >= start_index)[0]] if value_type in ["", "confidence_score"]: return indices elif value_type in [ "initial_tightly_coupled_detection_error", "tightly_coupled_detection_error", ]: return indices[ np.where( np.array( [ 1 if self.states[idx].hasTightlyCoupledDetectionError else 0 for idx in indices ] ) )[0] ] elif value_type in [ "initial_loosely_coupled_detection_error", "loosely_coupled_detection_error", ]: return indices[ np.where( np.array( [ 1 if self.states[idx].hasLooselyCoupledDetectionError else 0 for idx in indices ] ) )[0] ] elif value_type in ["initial_motion_error", "motion_error"]: return indices[ np.where( np.array( [1 if self.states[idx].hasMotionError else 0 for idx in indices] ) )[0] ] else: raise ValueError("Invalid value type") def has_recent_data(self, start_index: int) -> bool: return len(self.recent_indices(start_index)) > 0 def get_recent_values_plot_data( self, value_type: str, start_index: int, stamps: np.ndarray ) -> go.Scatter: indices = self.recent_indices(start_index, value_type) if len(indices) == 0: return go.Scatter(x=[], y=[], name=self.name) x = stamps[indices] y = None if value_type == "confidence_score": y = [self.states[i].detection.value for i in indices] elif value_type == "initial_loosely_coupled_detection_error": y = [self.states[i].initialLooselyCoupledDetectionError for i in indices] elif value_type == "initial_tightly_coupled_detection_error": y = [self.states[i].initialTightlyCoupledDetectionError for i in indices] elif value_type == "loosely_coupled_detection_error": y = [self.states[i].looselyCoupledDetectionError for i in indices] elif value_type == "tightly_coupled_detection_error": y = [self.states[i].tightlyCoupledDetectionError for i in indices] elif value_type == "initial_motion_error": y = [self.states[i].initialMotionError for i in indices] elif value_type == "motion_error": y = [self.states[i].motionError for i in indices] else: raise ValueError("Invalid value type") return go.Scatter(x=x, y=y, name=self.name, marker=dict(color=self.color)) class Data: def __init__(self) -> None: self.objects: Dict[int, DynamicObject] = {} self.tightly_coupled_objects: Dict[int, DynamicObject] = {} self.start_time = -1 self.stamps: List[float] = [] self.stamps_hash: List[str] = [] def add_new_object_state(self, object_state: ObjectState) -> None: stamp_index = self.update_time_stamp(object_state.header) if object_state.index not in self.objects.keys(): self.objects[object_state.index] = DynamicObject() self.objects[object_state.index].index = object_state.index self.objects[object_state.index].name = "Object {}".format( object_state.index ) self.objects[object_state.index].color = get_color(object_state.index) self.objects[object_state.index].states[stamp_index] = copy.deepcopy( object_state ) if object_state.isTightlyCoupled: if object_state.index not in self.tightly_coupled_objects.keys(): self.tightly_coupled_objects[object_state.index] = DynamicObject() self.tightly_coupled_objects[ object_state.index ].index = object_state.index self.tightly_coupled_objects[ object_state.index ].name = "Object {}".format(object_state.index) self.tightly_coupled_objects[object_state.index].color = get_color( object_state.index ) self.tightly_coupled_objects[object_state.index].states[ stamp_index ] = copy.deepcopy(object_state) def update_time_stamp(self, header: Header) -> int: stamp = header.stamp.to_sec() if self.start_time < 0: self.start_time = stamp stamp = stamp - self.start_time stamp_hash = "{}.{}".format(header.stamp.secs, header.stamp.nsecs) # TODO: Check ordering of time stamps if stamp_hash not in self.stamps_hash: self.stamps.append(stamp) self.stamps_hash.append(stamp_hash) return len(self.stamps) - 1 else: length = len(self.stamps) return next( ( length - i - 1 for i, e in enumerate(self.stamps_hash[::-1]) if e == stamp_hash ) ) @property def number_of_stamps(self): return len(self.stamps) def get_recent_values_plot_data( self, value_type: str, length: int, only_tightly_coupled: bool = False ) -> List[go.Scatter]: start_index = max(self.number_of_stamps - length, 0) np_stamps = np.array(self.stamps) available_object_states = ( self.tightly_coupled_objects.values() if only_tightly_coupled else self.objects.values() ) return [ d.get_recent_values_plot_data(value_type, start_index, np_stamps) for d in available_object_states if d.has_recent_data(start_index) ] def get_recent_plot_xaxis_range(self, length: int) -> Tuple[float, float]: start_index = max(self.number_of_stamps - length, 0) end_stamp = max(self.stamps[-1], self.stamps[start_index] + length * 0.1) + 0.2 return self.stamps[start_index], end_stamp data = Data() queue = mp.Queue(maxsize=5) confidence_score_min_y = 0.0 confidence_score_max_y = 0.55 initial_detection_error_min_y = 0.0 initial_detection_error_max_y = 200 detection_error_min_y = 0.0 detection_error_max_y = 20 initial_motion_error_min_y = 0.0 initial_motion_error_max_y = 5.0 motion_error_min_y = 0.0 motion_error_max_y = 20 # ------------------------------------------------------------------------------------------------ # # ROS # # ------------------------------------------------------------------------------------------------ # class Callback: def __init__(self, queue: mp.Queue) -> None: self.queue: mp.Queue = queue def __call__(self, msg: ObjectStateArray) -> None: queue.put(msg) def ros_main(): callback = Callback(queue) rospy.init_node("lio_segmotmot_diagnose", anonymous=True) rospy.Subscriber("lio_segmot/mapping/object_states", ObjectStateArray, callback) rospy.spin() # ------------------------------------------------------------------------------------------------ # # Dash # # ------------------------------------------------------------------------------------------------ # app = dash.Dash(__name__, external_stylesheets=[dbc.themes.FLATLY]) app.layout = html.Div( html.Div( [ dbc.NavbarSimple( children=[], brand="Coupling LIO-SAM and MOT", brand_href="#", color="primary", dark=True, ), dbc.Toast( "Data is reset!", id="data-reset-toast", header="Notification", is_open=False, dismissable=True, icon="info", # top: 66 positions the toast below the navbar style={ "position": "fixed", "top": 66, "right": 10, "width": 350, "z-index": "100", }, ), html.Div( [ dbc.Row( [ dbc.Col( [html.H3("SE-SSD's Confidence Score")], class_name="col-6", ), dbc.Col( [ dbc.InputGroup( [ dbc.InputGroupText("min(y)"), dbc.Input( type="number", id="confidence-score-min-y", placeholder="min(y)", value=confidence_score_min_y, ), ] ), ], class_name="col-3", ), dbc.Col( [ dbc.InputGroup( [ dbc.InputGroupText("max(y)"), dbc.Input( type="number", id="confidence-score-max-y", placeholder="max(y)", value=confidence_score_max_y, ), ] ), ], class_name="col-3", ), ], style={"margin-top": "30px"}, ), dbc.Row( [ dbc.Col( dcc.Graph(id="detection-score-all"), class_name="col-lg-6 col-12", ), dbc.Col( dcc.Graph(id="detection-score-tightly-coupled"), class_name="col-lg-6 col-12", ), ] ), html.Hr(), dbc.Row( [ dbc.Col( [html.H3("Initial Detection Error")], class_name="col-6" ), dbc.Col( [ dbc.InputGroup( [ dbc.InputGroupText("min(y)"), dbc.Input( type="number", id="initial-detection-error-min-y", placeholder="min(y)", value=initial_detection_error_min_y, ), ] ), ], class_name="col-3", ), dbc.Col( [ dbc.InputGroup( [ dbc.InputGroupText("max(y)"), dbc.Input( type="number", id="initial-detection-error-max-y", placeholder="max(y)", value=initial_detection_error_max_y, ), ] ), ], class_name="col-3", ), ], style={"margin-top": "30px"}, ), dbc.Row( [ dbc.Col( dcc.Graph(id="initial-loosely-coupled-detection-error"), class_name="col-lg-6 col-12", ), dbc.Col( dcc.Graph(id="initial-tightly-coupled-detection-error"), class_name="col-lg-6 col-12", ), ] ), html.Hr(), dbc.Row( [ dbc.Col([html.H3("Detection Error")], class_name="col-6"), dbc.Col( [ dbc.InputGroup( [ dbc.InputGroupText("min(y)"), dbc.Input( type="number", id="detection-error-min-y", placeholder="min(y)", value=detection_error_min_y, ), ] ), ], class_name="col-3", ), dbc.Col( [ dbc.InputGroup( [ dbc.InputGroupText("max(y)"), dbc.Input( type="number", id="detection-error-max-y", placeholder="max(y)", value=detection_error_max_y, ), ] ), ], class_name="col-3", ), ], style={"margin-top": "30px"}, ), dbc.Row( [ dbc.Col( dcc.Graph(id="loosely-coupled-detection-error"), class_name="col-lg-6 col-12", ), dbc.Col( dcc.Graph(id="tightly-coupled-detection-error"), class_name="col-lg-6 col-12", ), ] ), # html.Hr(), # dbc.Row( # [ # dbc.Col( # [html.H3("Initial Motion Error")], class_name="col-6" # ), # dbc.Col( # [ # dbc.InputGroup( # [ # dbc.InputGroupText("min(y)"), # dbc.Input( # type="number", # id="initial-motion-error-min-y", # placeholder="min(y)", # value=initial_motion_error_min_y, # ), # ] # ), # ], # class_name="col-3", # ), # dbc.Col( # [ # dbc.InputGroup( # [ # dbc.InputGroupText("max(y)"), # dbc.Input( # type="number", # id="initial-motion-error-max-y", # placeholder="max(y)", # value=initial_motion_error_max_y, # ), # ] # ), # ], # class_name="col-3", # ), # ], # style={"margin-top": "30px"}, # ), # dbc.Row( # [ # dbc.Col( # dcc.Graph(id="initial-motion-error-all"), # class_name="col-lg-6 col-12", # ), # dbc.Col( # dcc.Graph(id="initial-motion-error-tightly-coupled"), # class_name="col-lg-6 col-12", # ), # ] # ), html.Hr(), dbc.Row( [ dbc.Col([html.H3("Motion Error")], class_name="col-6"), dbc.Col( [ dbc.InputGroup( [ dbc.InputGroupText("min(y)"), dbc.Input( type="number", id="motion-error-min-y", placeholder="min(y)", value=motion_error_min_y, ), ] ), ], class_name="col-3", ), dbc.Col( [ dbc.InputGroup( [ dbc.InputGroupText("max(y)"), dbc.Input( type="number", id="motion-error-max-y", placeholder="max(y)", value=motion_error_max_y, ), ] ), ], class_name="col-3", ), ], style={"margin-top": "30px"}, ), dbc.Row( [ dbc.Col( dcc.Graph(id="motion-error-all"), class_name="col-lg-6 col-12", ), dbc.Col( dcc.Graph(id="motion-error-tightly-coupled"), class_name="col-lg-6 col-12", ), ] ), html.Hr(), html.Div( [ dbc.Button( "Reset", id="reset-button", color="danger", n_clicks=0, className="me-1", ), ] ), ], className="container", ), dcc.Interval( id="interval-component", interval=1 * 200, # in milliseconds n_intervals=0, ), ] ) ) @app.callback( Output("data-reset-toast", "is_open"), Input("reset-button", "n_clicks"), ) def reset(n): global data data = Data() return True @app.callback( Output("detection-score-all", "figure"), Output("detection-score-tightly-coupled", "figure"), Output("initial-loosely-coupled-detection-error", "figure"), Output("initial-tightly-coupled-detection-error", "figure"), Output("loosely-coupled-detection-error", "figure"), Output("tightly-coupled-detection-error", "figure"), # Output("initial-motion-error-all", "figure"), # Output("initial-motion-error-tightly-coupled", "figure"), Output("motion-error-all", "figure"), Output("motion-error-tightly-coupled", "figure"), Input("interval-component", "n_intervals"), Input("confidence-score-min-y", "value"), Input("confidence-score-max-y", "value"), Input("initial-detection-error-min-y", "value"), Input("initial-detection-error-max-y", "value"), Input("detection-error-min-y", "value"), Input("detection-error-max-y", "value"), # Input("initial-motion-error-min-y", "value"), # Input("initial-motion-error-max-y", "value"), Input("motion-error-min-y", "value"), Input("motion-error-max-y", "value"), ) def update( n, c_min_y, c_max_y, i_d_min_y, i_d_max_y, d_min_y, d_max_y, # i_m_min_y, # i_m_max_y, m_min_y, m_max_y, ): global confidence_score_min_y global confidence_score_max_y global initial_detection_error_min_y global initial_detection_error_max_y global detection_error_min_y global detection_error_max_y global initial_motion_error_min_y global initial_motion_error_max_y global motion_error_min_y global motion_error_max_y changed = False if confidence_score_min_y != c_min_y: changed = True confidence_score_min_y = c_min_y if confidence_score_max_y != c_max_y: changed = True confidence_score_max_y = c_max_y if initial_detection_error_min_y != i_d_min_y: changed = True initial_detection_error_min_y = i_d_min_y if initial_detection_error_max_y != i_d_max_y: changed = True initial_detection_error_max_y = i_d_max_y if detection_error_min_y != d_min_y: changed = True detection_error_min_y = d_min_y if detection_error_max_y != d_max_y: changed = True detection_error_max_y = d_max_y # if initial_motion_error_min_y != i_m_min_y: # changed = True # initial_motion_error_min_y = i_m_min_y # if initial_motion_error_max_y != i_m_max_y: # changed = True # initial_motion_error_max_y = i_m_max_y if motion_error_min_y != m_min_y: changed = True motion_error_min_y = m_min_y if motion_error_max_y != m_max_y: changed = True motion_error_max_y = m_max_y if queue.empty() and not changed: return [dash.no_update] * 8 elif changed and len(data.objects) == 0: return [dash.no_update] * 8 while not queue.empty(): msg: ObjectStateArray = queue.get() for object_state in msg.objects: data.add_new_object_state(object_state) confidence_score_all = go.Figure( data=[ go.Scatter( x=[data.stamps[-1]] * 2, y=[confidence_score_min_y, confidence_score_max_y], marker={"color": "red"}, ) ] + data.get_recent_values_plot_data( "confidence_score", RECENT_HISTORY_LENGTH, False ), layout_xaxis_range=data.get_recent_plot_xaxis_range(RECENT_HISTORY_LENGTH), layout_yaxis_range=[0, 0.55], ) confidence_score_all.update_layout( title="All Objects", xaxis_title="Time Stamp", yaxis_title="Confidence Score", showlegend=True, ) confidence_score_tightly_coupled = go.Figure( data=[ go.Scatter( x=[data.stamps[-1]] * 2, y=[confidence_score_min_y - 1, confidence_score_max_y + 1], marker={"color": "red"}, ) ] + data.get_recent_values_plot_data( "confidence_score", RECENT_HISTORY_LENGTH, True ), layout_xaxis_range=data.get_recent_plot_xaxis_range(RECENT_HISTORY_LENGTH), layout_yaxis_range=[confidence_score_min_y, confidence_score_max_y], ) confidence_score_tightly_coupled.update_layout( title="Tightly-coupled Objects", xaxis_title="Time Stamp", yaxis_title="Confidence Score", showlegend=True, ) initial_loosely_coupled_detection_error = go.Figure( data=[ go.Scatter( x=[data.stamps[-1]] * 2, y=[ initial_detection_error_min_y - 1, initial_detection_error_max_y + 1, ], marker={"color": "red"}, ) ] + data.get_recent_values_plot_data( "initial_loosely_coupled_detection_error", RECENT_HISTORY_LENGTH, False ), layout_xaxis_range=data.get_recent_plot_xaxis_range(RECENT_HISTORY_LENGTH), layout_yaxis_range=[ initial_detection_error_min_y, initial_detection_error_max_y, ], ) initial_loosely_coupled_detection_error.update_layout( title="Loosely-coupled Objects", xaxis_title="Time Stamp", yaxis_title="Loosely-coupled Detection Error", showlegend=True, ) initial_tightly_coupled_detection_error = go.Figure( data=[ go.Scatter( x=[data.stamps[-1]] * 2, y=[ initial_detection_error_min_y - 1, initial_detection_error_max_y + 1, ], marker={"color": "red"}, ) ] + data.get_recent_values_plot_data( "initial_tightly_coupled_detection_error", RECENT_HISTORY_LENGTH, False ), layout_xaxis_range=data.get_recent_plot_xaxis_range(RECENT_HISTORY_LENGTH), layout_yaxis_range=[ initial_detection_error_min_y, initial_detection_error_max_y, ], ) initial_tightly_coupled_detection_error.update_layout( title="Tightly-coupled Objects", xaxis_title="Time Stamp", yaxis_title="Tightly-coupled Detection Error", showlegend=True, ) loosely_coupled_detection_error = go.Figure( data=[ go.Scatter( x=[data.stamps[-1]] * 2, y=[detection_error_min_y - 1, detection_error_max_y + 1], marker={"color": "red"}, ) ] + data.get_recent_values_plot_data( "loosely_coupled_detection_error", RECENT_HISTORY_LENGTH, False ), layout_xaxis_range=data.get_recent_plot_xaxis_range(RECENT_HISTORY_LENGTH), layout_yaxis_range=[detection_error_min_y, detection_error_max_y], ) loosely_coupled_detection_error.update_layout( title="Loosely-coupled Objects", xaxis_title="Time Stamp", yaxis_title="Loosely-coupled Detection Error", showlegend=True, ) tightly_coupled_detection_error = go.Figure( data=[ go.Scatter( x=[data.stamps[-1]] * 2, y=[detection_error_min_y - 1, detection_error_max_y + 1], marker={"color": "red"}, ) ] + data.get_recent_values_plot_data( "tightly_coupled_detection_error", RECENT_HISTORY_LENGTH, False ), layout_xaxis_range=data.get_recent_plot_xaxis_range(RECENT_HISTORY_LENGTH), layout_yaxis_range=[detection_error_min_y, detection_error_max_y], ) tightly_coupled_detection_error.update_layout( title="Tightly-coupled Objects", xaxis_title="Time Stamp", yaxis_title="Tightly-coupled Detection Error", showlegend=True, ) # initial_motion_error_all = go.Figure( # data=[ # go.Scatter( # x=[data.stamps[-1]] * 2, # y=[initial_motion_error_min_y - 1, initial_motion_error_max_y + 1], # marker={"color": "red"}, # ) # ] # + data.get_recent_values_plot_data( # "initial_motion_error", RECENT_HISTORY_LENGTH, False # ), # layout_xaxis_range=data.get_recent_plot_xaxis_range(RECENT_HISTORY_LENGTH), # layout_yaxis_range=[initial_motion_error_min_y, initial_motion_error_max_y], # ) # initial_motion_error_all.update_layout( # title="All Objects", # xaxis_title="Time Stamp", # yaxis_title="Motion Error", # showlegend=True, # ) # initial_motion_error_tightly_coupled = go.Figure( # data=[ # go.Scatter( # x=[data.stamps[-1]] * 2, # y=[initial_motion_error_min_y - 1, initial_motion_error_max_y + 1], # marker={"color": "red"}, # ) # ] # + data.get_recent_values_plot_data( # "initial_motion_error", RECENT_HISTORY_LENGTH, True # ), # layout_xaxis_range=data.get_recent_plot_xaxis_range(RECENT_HISTORY_LENGTH), # layout_yaxis_range=[initial_motion_error_min_y, initial_motion_error_max_y], # ) # initial_motion_error_tightly_coupled.update_layout( # title="Tightly-coupled Objects", # xaxis_title="Time Stamp", # yaxis_title="Motion Error", # showlegend=True, # ) motion_error_all = go.Figure( data=[ go.Scatter( x=[data.stamps[-1]] * 2, y=[motion_error_min_y - 1, motion_error_max_y + 1], marker={"color": "red"}, ) ] + data.get_recent_values_plot_data( "motion_error", RECENT_HISTORY_LENGTH, False ), layout_xaxis_range=data.get_recent_plot_xaxis_range(RECENT_HISTORY_LENGTH), layout_yaxis_range=[motion_error_min_y, motion_error_max_y], ) motion_error_all.update_layout( title="All Objects", xaxis_title="Time Stamp", yaxis_title="Motion Error", showlegend=True, ) motion_error_tightly_coupled = go.Figure( data=[ go.Scatter( x=[data.stamps[-1]] * 2, y=[motion_error_min_y - 1, motion_error_max_y + 1], marker={"color": "red"}, ) ] + data.get_recent_values_plot_data("motion_error", RECENT_HISTORY_LENGTH, True), layout_xaxis_range=data.get_recent_plot_xaxis_range(RECENT_HISTORY_LENGTH), layout_yaxis_range=[motion_error_min_y, motion_error_max_y], ) motion_error_tightly_coupled.update_layout( title="Tightly-coupled Objects", xaxis_title="Time Stamp", yaxis_title="Motion Error", showlegend=True, ) return ( confidence_score_all, confidence_score_tightly_coupled, initial_loosely_coupled_detection_error, initial_tightly_coupled_detection_error, loosely_coupled_detection_error, tightly_coupled_detection_error, # initial_motion_error_all, # initial_motion_error_tightly_coupled, motion_error_all, motion_error_tightly_coupled, ) if __name__ == "__main__": ros_main_process = mp.Process(target=ros_main) ros_main_process.start() app.run_server(debug=True) ros_main_process.join() ================================================ FILE: src/factor.cpp ================================================ #include "factor.h" #include #include #include #include #include #include #include /* -------------------------------------------------------------------------- */ /* Detection */ /* -------------------------------------------------------------------------- */ Detection::Detection(Detection::BoundingBox box, gtsam::Vector6 variances) : box(box), variances(variances) { auto p = box.pose.position; auto q = box.pose.orientation; this->pose = gtsam::Pose3(gtsam::Rot3(q.w, q.x, q.y, q.z), gtsam::Point3(p.x, p.y, p.z)); } /* -------------------------------------------------------------------------- */ const double Detection::error(const gtsam::Pose3 x) const { gtsam::Vector errorVector = gtsam::traits::Local(this->pose, x); return sqrt(this->getDiagonal()->Mahalanobis(errorVector)); } /* -------------------------------------------------------------------------- */ std::tuple getDetectionIndexAndError(const gtsam::Pose3 &d, std::vector detections) { if (detections.size() == 0) { throw std::runtime_error("Does not receive any detection."); } size_t idx = 0; double error = detections[0].error(d); // Figure out the optimal detection index for (size_t i = 1; i < detections.size(); ++i) { double error_i = detections[i].error(d); if (error_i < error) { idx = i; error = error_i; } } return std::make_tuple(idx, error); } /* -------------------------------------------------------------------------- */ /* Tightly-Coupled Detection Factor */ /* -------------------------------------------------------------------------- */ gtsam::Vector TightlyCoupledDetectionFactor::evaluateError(const gtsam::Pose3 &robotPose, const gtsam::Pose3 &objectPose, const gtsam::Pose3 &measured, boost::optional H1, boost::optional H2) const { gtsam::Pose3 hx = gtsam::traits::Between(robotPose, objectPose, H1, H2); return gtsam::traits::Local(measured, hx); } /* -------------------------------------------------------------------------- */ gtsam::Vector TightlyCoupledDetectionFactor::unwhitenedError(const gtsam::Pose3 &measured, const gtsam::Values &x, boost::optional &> H) const { if (this->active(x)) { const gtsam::Pose3 &x1 = x.at(robotPoseKey()); const gtsam::Pose3 &x2 = x.at(objectPoseKey()); if (H) { return evaluateError(x1, x2, measured, (*H)[0], (*H)[1]); } else { return evaluateError(x1, x2, measured); } } else { return gtsam::Vector::Zero(this->dim()); } } /* -------------------------------------------------------------------------- */ boost::shared_ptr TightlyCoupledDetectionFactor::linearize(const gtsam::Values &c) const { // Only linearize if the factor is active if (!active(c)) return boost::shared_ptr(); // Determine which detection is used to generate the factor function, a.k.a. // the Max-Mixture model double error; const auto robotPose = c.at(this->robotPoseKey()); const auto objectPose = c.at(this->objectPoseKey()); std::tie(cachedDetectionIndex, error) = getDetectionIndexAndError(robotPose.inverse() * objectPose, this->detections); auto measured = this->detections[cachedDetectionIndex].getPose(); auto noiseModel = this->noiseModels[cachedDetectionIndex]; // Call evaluate error to get Jacobians and RHS vector b std::vector A(size()); gtsam::Vector b = -unwhitenedError(measured, c, A); if (noiseModel && b.size() != noiseModel->dim()) throw std::invalid_argument( boost::str( boost::format( "NoiseModelFactor: NoiseModel has dimension %1% instead of %2%.") % noiseModel->dim() % b.size())); // Whiten the corresponding system now if (noiseModel) noiseModel->WhitenSystem(A, b); // Fill in terms, needed to create JacobianFactor below std::vector > terms(size()); for (size_t j = 0; j < size(); ++j) { terms[j].first = keys()[j]; terms[j].second.swap(A[j]); } // TODO pass unwhitened + noise model to Gaussian factor using gtsam::noiseModel::Constrained; if (noiseModel && noiseModel->isConstrained()) return gtsam::GaussianFactor::shared_ptr( new gtsam::JacobianFactor(terms, b, boost::static_pointer_cast(noiseModel)->unit())); else return gtsam::GaussianFactor::shared_ptr(new gtsam::JacobianFactor(terms, b)); } /* -------------------------------------------------------------------------- */ double TightlyCoupledDetectionFactor::error(const gtsam::Values &c) const { if (active(c)) { // Determine which detection is used to generate the factor function, a.k.a. // the Max-Mixture model size_t index; double error; const auto robotPose = c.at(this->robotPoseKey()); const auto objectPose = c.at(this->objectPoseKey()); std::tie(index, error) = getDetectionIndexAndError(robotPose.inverse() * objectPose, this->detections); auto measured = this->detections[index].getPose(); auto noiseModel = this->noiseModels[index]; const gtsam::Vector b = unwhitenedError(measured, c); if (noiseModel && b.size() != noiseModel->dim()) throw std::invalid_argument( boost::str( boost::format( "NoiseModelFactor: NoiseModel has dimension %1% instead of %2%.") % noiseModel->dim() % b.size())); return 0.5 * noiseModel->distance(b); } else { return 0.0; } } /* -------------------------------------------------------------------------- */ /* Loosely-Coupled Detection Factor */ /* -------------------------------------------------------------------------- */ gtsam::Vector LooselyCoupledDetectionFactor::evaluateError(const gtsam::Pose3 &robotPose, const gtsam::Pose3 &objectPose, const gtsam::Pose3 &measured, boost::optional H1) const { gtsam::Pose3 hx = gtsam::traits::Between(robotPose, objectPose, boost::none, H1); return gtsam::traits::Local(measured, hx); } /* -------------------------------------------------------------------------- */ gtsam::Vector LooselyCoupledDetectionFactor::unwhitenedError(const gtsam::Pose3 &measured, const gtsam::Values &x, boost::optional &> H) const { if (this->active(x)) { const gtsam::Pose3 &x1 = x.at(this->robotPoseKey()); const gtsam::Pose3 &x2 = x.at(this->objectPoseKey()); if (H) { return evaluateError(x1, x2, measured, (*H)[0]); } else { return evaluateError(x1, x2, measured); } } else { return gtsam::Vector::Zero(this->dim()); } } /* -------------------------------------------------------------------------- */ boost::shared_ptr LooselyCoupledDetectionFactor::linearize(const gtsam::Values &c) const { // Only linearize if the factor is active if (!active(c)) return boost::shared_ptr(); // Determine which detection is used to generate the factor function, a.k.a. // the Max-Mixture model double error; const auto robotPose = c.at(this->robotPoseKey()); const auto objectPose = c.at(this->objectPoseKey()); std::tie(cachedDetectionIndex, error) = getDetectionIndexAndError(robotPose.inverse() * objectPose, this->detections); auto measured = this->detections[cachedDetectionIndex].getPose(); auto noiseModel = this->noiseModels[cachedDetectionIndex]; // Call evaluate error to get Jacobians and RHS vector b std::vector A(size()); gtsam::Vector b = -unwhitenedError(measured, c, A); if (noiseModel && b.size() != noiseModel->dim()) throw std::invalid_argument( boost::str( boost::format( "NoiseModelFactor: NoiseModel has dimension %1% instead of %2%.") % noiseModel->dim() % b.size())); // Whiten the corresponding system now if (noiseModel) noiseModel->WhitenSystem(A, b); // Fill in terms, needed to create JacobianFactor below std::vector > terms(size()); for (size_t j = 0; j < size(); ++j) { terms[j].first = keys()[j]; terms[j].second.swap(A[j]); } // TODO pass unwhitened + noise model to Gaussian factor using gtsam::noiseModel::Constrained; if (noiseModel && noiseModel->isConstrained()) return gtsam::GaussianFactor::shared_ptr( new gtsam::JacobianFactor(terms, b, boost::static_pointer_cast(noiseModel)->unit())); else return gtsam::GaussianFactor::shared_ptr(new gtsam::JacobianFactor(terms, b)); } /* -------------------------------------------------------------------------- */ double LooselyCoupledDetectionFactor::error(const gtsam::Values &c) const { if (active(c)) { // Determine which detection is used to generate the factor function, a.k.a. // the Max-Mixture model size_t index; double error; const auto robotPose = c.at(this->robotPoseKey()); const auto objectPose = c.at(this->objectPoseKey()); std::tie(index, error) = getDetectionIndexAndError(robotPose.inverse() * objectPose, this->detections); auto measured = this->detections[index].getPose(); auto noiseModel = this->noiseModels[index]; const gtsam::Vector b = unwhitenedError(measured, c); if (noiseModel && b.size() != noiseModel->dim()) throw std::invalid_argument( boost::str( boost::format( "NoiseModelFactor: NoiseModel has dimension %1% instead of %2%.") % noiseModel->dim() % b.size())); return 0.5 * noiseModel->distance(b); } else { return 0.0; } } /* -------------------------------------------------------------------------- */ /* Stable Pose Factor */ /* -------------------------------------------------------------------------- */ gtsam::Vector StablePoseFactor::evaluateError(const gtsam::Pose3 &previousPose, const gtsam::Pose3 &velocity, const gtsam::Pose3 &nextPose, boost::optional H1, boost::optional H2, boost::optional H3) const { auto identity = gtsam::Pose3::identity(); auto deltaPoseVec = gtsam::traits::Local(identity, velocity) * this->deltaTime; auto deltaPose = gtsam::traits::Retract(identity, deltaPoseVec); gtsam::Pose3 hx = nextPose.inverse() * previousPose * deltaPose; if (H1) *H1 = deltaPose.inverse().AdjointMap(); if (H2) *H2 = this->deltaTime * gtsam::Pose3::ExpmapDerivative(deltaPoseVec) * gtsam::Pose3::LogmapDerivative(velocity); if (H3) *H3 = -hx.inverse().AdjointMap(); return gtsam::traits::Local(identity, hx); } ================================================ FILE: src/featureExtraction.cpp ================================================ #include "lio_segmot/cloud_info.h" #include "utility.h" struct smoothness_t { float value; size_t ind; }; struct by_value { bool operator()(smoothness_t const &left, smoothness_t const &right) { return left.value < right.value; } }; class FeatureExtraction : public ParamServer { public: ros::Subscriber subLaserCloudInfo; ros::Publisher pubLaserCloudInfo; ros::Publisher pubCornerPoints; ros::Publisher pubSurfacePoints; pcl::PointCloud::Ptr extractedCloud; pcl::PointCloud::Ptr cornerCloud; pcl::PointCloud::Ptr surfaceCloud; pcl::VoxelGrid downSizeFilter; lio_segmot::cloud_info cloudInfo; std_msgs::Header cloudHeader; std::vector cloudSmoothness; float *cloudCurvature; int *cloudNeighborPicked; int *cloudLabel; FeatureExtraction() { subLaserCloudInfo = nh.subscribe("lio_segmot/deskew/cloud_info", 1, &FeatureExtraction::laserCloudInfoHandler, this, ros::TransportHints().tcpNoDelay()); pubLaserCloudInfo = nh.advertise("lio_segmot/feature/cloud_info", 1); pubCornerPoints = nh.advertise("lio_segmot/feature/cloud_corner", 1); pubSurfacePoints = nh.advertise("lio_segmot/feature/cloud_surface", 1); initializationValue(); } void initializationValue() { cloudSmoothness.resize(N_SCAN * Horizon_SCAN); downSizeFilter.setLeafSize(odometrySurfLeafSize, odometrySurfLeafSize, odometrySurfLeafSize); extractedCloud.reset(new pcl::PointCloud()); cornerCloud.reset(new pcl::PointCloud()); surfaceCloud.reset(new pcl::PointCloud()); cloudCurvature = new float[N_SCAN * Horizon_SCAN]; cloudNeighborPicked = new int[N_SCAN * Horizon_SCAN]; cloudLabel = new int[N_SCAN * Horizon_SCAN]; } void laserCloudInfoHandler(const lio_segmot::cloud_infoConstPtr &msgIn) { cloudInfo = *msgIn; // new cloud info cloudHeader = msgIn->header; // new cloud header pcl::fromROSMsg(msgIn->cloud_deskewed, *extractedCloud); // new cloud for extraction calculateSmoothness(); markOccludedPoints(); extractFeatures(); publishFeatureCloud(); } void calculateSmoothness() { int cloudSize = extractedCloud->points.size(); for (int i = 5; i < cloudSize - 5; i++) { float diffRange = cloudInfo.pointRange[i - 5] + cloudInfo.pointRange[i - 4] + cloudInfo.pointRange[i - 3] + cloudInfo.pointRange[i - 2] + cloudInfo.pointRange[i - 1] - cloudInfo.pointRange[i] * 10 + cloudInfo.pointRange[i + 1] + cloudInfo.pointRange[i + 2] + cloudInfo.pointRange[i + 3] + cloudInfo.pointRange[i + 4] + cloudInfo.pointRange[i + 5]; cloudCurvature[i] = diffRange * diffRange; //diffX * diffX + diffY * diffY + diffZ * diffZ; cloudNeighborPicked[i] = 0; cloudLabel[i] = 0; // cloudSmoothness for sorting cloudSmoothness[i].value = cloudCurvature[i]; cloudSmoothness[i].ind = i; } } void markOccludedPoints() { int cloudSize = extractedCloud->points.size(); // mark occluded points and parallel beam points for (int i = 5; i < cloudSize - 6; ++i) { // occluded points float depth1 = cloudInfo.pointRange[i]; float depth2 = cloudInfo.pointRange[i + 1]; int columnDiff = std::abs(int(cloudInfo.pointColInd[i + 1] - cloudInfo.pointColInd[i])); if (columnDiff < 10) { // 10 pixel diff in range image if (depth1 - depth2 > 0.3) { cloudNeighborPicked[i - 5] = 1; cloudNeighborPicked[i - 4] = 1; cloudNeighborPicked[i - 3] = 1; cloudNeighborPicked[i - 2] = 1; cloudNeighborPicked[i - 1] = 1; cloudNeighborPicked[i] = 1; } else if (depth2 - depth1 > 0.3) { cloudNeighborPicked[i + 1] = 1; cloudNeighborPicked[i + 2] = 1; cloudNeighborPicked[i + 3] = 1; cloudNeighborPicked[i + 4] = 1; cloudNeighborPicked[i + 5] = 1; cloudNeighborPicked[i + 6] = 1; } } // parallel beam float diff1 = std::abs(float(cloudInfo.pointRange[i - 1] - cloudInfo.pointRange[i])); float diff2 = std::abs(float(cloudInfo.pointRange[i + 1] - cloudInfo.pointRange[i])); if (diff1 > 0.02 * cloudInfo.pointRange[i] && diff2 > 0.02 * cloudInfo.pointRange[i]) cloudNeighborPicked[i] = 1; } } void extractFeatures() { cornerCloud->clear(); surfaceCloud->clear(); pcl::PointCloud::Ptr surfaceCloudScan(new pcl::PointCloud()); pcl::PointCloud::Ptr surfaceCloudScanDS(new pcl::PointCloud()); for (int i = 0; i < N_SCAN; i++) { surfaceCloudScan->clear(); for (int j = 0; j < 6; j++) { int sp = (cloudInfo.startRingIndex[i] * (6 - j) + cloudInfo.endRingIndex[i] * j) / 6; int ep = (cloudInfo.startRingIndex[i] * (5 - j) + cloudInfo.endRingIndex[i] * (j + 1)) / 6 - 1; if (sp >= ep) continue; std::sort(cloudSmoothness.begin() + sp, cloudSmoothness.begin() + ep, by_value()); int largestPickedNum = 0; for (int k = ep; k >= sp; k--) { int ind = cloudSmoothness[k].ind; if (cloudNeighborPicked[ind] == 0 && cloudCurvature[ind] > edgeThreshold) { largestPickedNum++; if (largestPickedNum <= 20) { cloudLabel[ind] = 1; cornerCloud->push_back(extractedCloud->points[ind]); } else { break; } cloudNeighborPicked[ind] = 1; for (int l = 1; l <= 5; l++) { int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l - 1])); if (columnDiff > 10) break; cloudNeighborPicked[ind + l] = 1; } for (int l = -1; l >= -5; l--) { int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l + 1])); if (columnDiff > 10) break; cloudNeighborPicked[ind + l] = 1; } } } for (int k = sp; k <= ep; k++) { int ind = cloudSmoothness[k].ind; if (cloudNeighborPicked[ind] == 0 && cloudCurvature[ind] < surfThreshold) { cloudLabel[ind] = -1; cloudNeighborPicked[ind] = 1; for (int l = 1; l <= 5; l++) { int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l - 1])); if (columnDiff > 10) break; cloudNeighborPicked[ind + l] = 1; } for (int l = -1; l >= -5; l--) { int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l + 1])); if (columnDiff > 10) break; cloudNeighborPicked[ind + l] = 1; } } } for (int k = sp; k <= ep; k++) { if (cloudLabel[k] <= 0) { surfaceCloudScan->push_back(extractedCloud->points[k]); } } } surfaceCloudScanDS->clear(); downSizeFilter.setInputCloud(surfaceCloudScan); downSizeFilter.filter(*surfaceCloudScanDS); *surfaceCloud += *surfaceCloudScanDS; } } void freeCloudInfoMemory() { cloudInfo.startRingIndex.clear(); cloudInfo.endRingIndex.clear(); cloudInfo.pointColInd.clear(); cloudInfo.pointRange.clear(); } void publishFeatureCloud() { // free cloud info memory freeCloudInfoMemory(); // save newly extracted features cloudInfo.cloud_corner = publishCloud(&pubCornerPoints, cornerCloud, cloudHeader.stamp, lidarFrame); cloudInfo.cloud_surface = publishCloud(&pubSurfacePoints, surfaceCloud, cloudHeader.stamp, lidarFrame); // publish to mapOptimization pubLaserCloudInfo.publish(cloudInfo); } }; int main(int argc, char **argv) { ros::init(argc, argv, "lio_segmot"); FeatureExtraction FE; ROS_INFO("\033[1;32m----> Feature Extraction Started.\033[0m"); ros::spin(); return 0; } ================================================ FILE: src/imageProjection.cpp ================================================ #include "lio_segmot/cloud_info.h" #include "utility.h" struct VelodynePointXYZIRT { PCL_ADD_POINT4D PCL_ADD_INTENSITY; uint16_t ring; float time; EIGEN_MAKE_ALIGNED_OPERATOR_NEW } EIGEN_ALIGN16; POINT_CLOUD_REGISTER_POINT_STRUCT(VelodynePointXYZIRT, (float, x, x)(float, y, y)(float, z, z)(float, intensity, intensity)(uint16_t, ring, ring)(float, time, time)) struct OusterPointXYZIRT { PCL_ADD_POINT4D; float intensity; uint32_t t; uint16_t reflectivity; uint8_t ring; uint16_t noise; uint32_t range; EIGEN_MAKE_ALIGNED_OPERATOR_NEW } EIGEN_ALIGN16; POINT_CLOUD_REGISTER_POINT_STRUCT(OusterPointXYZIRT, (float, x, x)(float, y, y)(float, z, z)(float, intensity, intensity)(uint32_t, t, t)(uint16_t, reflectivity, reflectivity)(uint8_t, ring, ring)(uint16_t, noise, noise)(uint32_t, range, range)) // Use the Velodyne point format as a common representation using PointXYZIRT = VelodynePointXYZIRT; const int queueLength = 2000; class ImageProjection : public ParamServer { private: std::mutex imuLock; std::mutex odoLock; ros::Subscriber subLaserCloud; ros::Publisher pubLaserCloud; ros::Publisher pubExtractedCloud; ros::Publisher pubLaserCloudInfo; ros::Publisher pubReady; ros::Subscriber subImu; std::deque imuQueue; ros::Subscriber subOdom; std::deque odomQueue; std::deque cloudQueue; sensor_msgs::PointCloud2 currentCloudMsg; double *imuTime = new double[queueLength]; double *imuRotX = new double[queueLength]; double *imuRotY = new double[queueLength]; double *imuRotZ = new double[queueLength]; int imuPointerCur; bool firstPointFlag; Eigen::Affine3f transStartInverse; pcl::PointCloud::Ptr laserCloudIn; pcl::PointCloud::Ptr rawCloudIn; pcl::PointCloud::Ptr tmpOusterCloudIn; pcl::PointCloud::Ptr rawCloud; pcl::PointCloud::Ptr fullCloud; pcl::PointCloud::Ptr extractedCloud; int deskewFlag; cv::Mat rangeMat; bool odomDeskewFlag; float odomIncreX; float odomIncreY; float odomIncreZ; lio_segmot::cloud_info cloudInfo; double timeScanCur; double timeScanEnd; std_msgs::Header cloudHeader; public: ImageProjection() : deskewFlag(0) { subImu = nh.subscribe(imuTopic, 2000, &ImageProjection::imuHandler, this, ros::TransportHints().tcpNoDelay()); subOdom = nh.subscribe(odomTopic + "_incremental", 2000, &ImageProjection::odometryHandler, this, ros::TransportHints().tcpNoDelay()); subLaserCloud = nh.subscribe(pointCloudTopic, 5, &ImageProjection::cloudHandler, this, ros::TransportHints().tcpNoDelay()); pubExtractedCloud = nh.advertise("lio_segmot/deskew/cloud_deskewed", 1); pubLaserCloudInfo = nh.advertise("lio_segmot/deskew/cloud_info", 1); pubReady = nh.advertise("lio_segmot/ready", 1); allocateMemory(); resetParameters(); pcl::console::setVerbosityLevel(pcl::console::L_ERROR); } void allocateMemory() { laserCloudIn.reset(new pcl::PointCloud()); rawCloudIn.reset(new pcl::PointCloud()); tmpOusterCloudIn.reset(new pcl::PointCloud()); rawCloud.reset(new pcl::PointCloud()); fullCloud.reset(new pcl::PointCloud()); extractedCloud.reset(new pcl::PointCloud()); fullCloud->points.resize(N_SCAN * Horizon_SCAN); cloudInfo.startRingIndex.assign(N_SCAN, 0); cloudInfo.endRingIndex.assign(N_SCAN, 0); cloudInfo.pointColInd.assign(N_SCAN * Horizon_SCAN, 0); cloudInfo.pointRange.assign(N_SCAN * Horizon_SCAN, 0); resetParameters(); } void resetParameters() { laserCloudIn->clear(); extractedCloud->clear(); // reset range matrix for range image projection rangeMat = cv::Mat(N_SCAN, Horizon_SCAN, CV_32F, cv::Scalar::all(FLT_MAX)); imuPointerCur = 0; firstPointFlag = true; odomDeskewFlag = false; for (int i = 0; i < queueLength; ++i) { imuTime[i] = 0; imuRotX[i] = 0; imuRotY[i] = 0; imuRotZ[i] = 0; } } ~ImageProjection() {} void imuHandler(const sensor_msgs::Imu::ConstPtr &imuMsg) { sensor_msgs::Imu thisImu = imuConverter(*imuMsg); std::lock_guard lock1(imuLock); imuQueue.push_back(thisImu); // debug IMU data // cout << std::setprecision(6); // cout << "IMU acc: " << endl; // cout << "x: " << thisImu.linear_acceleration.x << // ", y: " << thisImu.linear_acceleration.y << // ", z: " << thisImu.linear_acceleration.z << endl; // cout << "IMU gyro: " << endl; // cout << "x: " << thisImu.angular_velocity.x << // ", y: " << thisImu.angular_velocity.y << // ", z: " << thisImu.angular_velocity.z << endl; // double imuRoll, imuPitch, imuYaw; // tf::Quaternion orientation; // tf::quaternionMsgToTF(thisImu.orientation, orientation); // tf::Matrix3x3(orientation).getRPY(imuRoll, imuPitch, imuYaw); // cout << "IMU roll pitch yaw: " << endl; // cout << "roll: " << imuRoll << ", pitch: " << imuPitch << ", yaw: " << imuYaw << endl << endl; } void odometryHandler(const nav_msgs::Odometry::ConstPtr &odometryMsg) { std::lock_guard lock2(odoLock); odomQueue.push_back(*odometryMsg); } void cloudHandler(const sensor_msgs::PointCloud2ConstPtr &laserCloudMsg) { if (!cachePointCloud(laserCloudMsg) || !deskewInfo()) { pubReady.publish(std_msgs::Empty()); return; } projectPointCloud(); cloudExtraction(); publishClouds(); resetParameters(); } bool cachePointCloud(const sensor_msgs::PointCloud2ConstPtr &laserCloudMsg) { // cache point cloud cloudQueue.push_back(*laserCloudMsg); if (cloudQueue.size() <= 2) return false; // convert cloud currentCloudMsg = std::move(cloudQueue.front()); cloudQueue.pop_front(); if (sensor == SensorType::VELODYNE) { pcl::moveFromROSMsg(currentCloudMsg, *rawCloudIn); *laserCloudIn += *rawCloudIn; rawCloud->clear(); for (const auto &p : *rawCloudIn) { PointType xyzi; xyzi.x = p.x; xyzi.y = p.y; xyzi.z = p.z; xyzi.intensity = p.intensity; rawCloud->push_back(xyzi); } } else if (sensor == SensorType::OUSTER) { // Convert to Velodyne format pcl::moveFromROSMsg(currentCloudMsg, *tmpOusterCloudIn); laserCloudIn->points.resize(tmpOusterCloudIn->size()); laserCloudIn->is_dense = tmpOusterCloudIn->is_dense; for (size_t i = 0; i < tmpOusterCloudIn->size(); i++) { auto &src = tmpOusterCloudIn->points[i]; auto &dst = laserCloudIn->points[i]; dst.x = src.x; dst.y = src.y; dst.z = src.z; dst.intensity = src.intensity; dst.ring = src.ring; dst.time = src.t * 1e-9f; } } else { ROS_ERROR_STREAM("Unknown sensor type: " << int(sensor)); ros::shutdown(); } // get timestamp cloudHeader = currentCloudMsg.header; timeScanCur = cloudHeader.stamp.toSec(); timeScanEnd = timeScanCur + laserCloudIn->points.back().time; // check dense flag if (laserCloudIn->is_dense == false) { ROS_ERROR("Point cloud is not in dense format, please remove NaN points first!"); ros::shutdown(); } // check ring channel static int ringFlag = 0; if (ringFlag == 0) { ringFlag = -1; for (int i = 0; i < (int)currentCloudMsg.fields.size(); ++i) { if (currentCloudMsg.fields[i].name == "ring") { ringFlag = 1; break; } } if (ringFlag == -1) { ROS_ERROR("Point cloud ring channel not available, please configure your point cloud data!"); ros::shutdown(); } } // check point time if (deskewFlag == 0) { deskewFlag = -1; for (auto &field : currentCloudMsg.fields) { if (field.name == "time" || field.name == "t") { deskewFlag = 1; break; } } if (deskewFlag == -1) ROS_WARN("Point cloud timestamp not available, deskew function disabled, system will drift significantly!"); } return true; } bool deskewInfo() { std::lock_guard lock1(imuLock); std::lock_guard lock2(odoLock); // make sure IMU data available for the scan if (imuQueue.empty() || imuQueue.front().header.stamp.toSec() > timeScanCur || imuQueue.back().header.stamp.toSec() < timeScanEnd) { ROS_DEBUG("Waiting for IMU data ..."); return false; } imuDeskewInfo(); odomDeskewInfo(); return true; } void imuDeskewInfo() { cloudInfo.imuAvailable = false; while (!imuQueue.empty()) { if (imuQueue.front().header.stamp.toSec() < timeScanCur - 0.01) imuQueue.pop_front(); else break; } if (imuQueue.empty()) return; imuPointerCur = 0; for (int i = 0; i < (int)imuQueue.size(); ++i) { sensor_msgs::Imu thisImuMsg = imuQueue[i]; double currentImuTime = thisImuMsg.header.stamp.toSec(); // get roll, pitch, and yaw estimation for this scan if (currentImuTime <= timeScanCur) imuRPY2rosRPY(&thisImuMsg, &cloudInfo.imuRollInit, &cloudInfo.imuPitchInit, &cloudInfo.imuYawInit); if (currentImuTime > timeScanEnd + 0.01) break; if (imuPointerCur == 0) { imuRotX[0] = 0; imuRotY[0] = 0; imuRotZ[0] = 0; imuTime[0] = currentImuTime; ++imuPointerCur; continue; } // get angular velocity double angular_x, angular_y, angular_z; imuAngular2rosAngular(&thisImuMsg, &angular_x, &angular_y, &angular_z); // integrate rotation double timeDiff = currentImuTime - imuTime[imuPointerCur - 1]; imuRotX[imuPointerCur] = imuRotX[imuPointerCur - 1] + angular_x * timeDiff; imuRotY[imuPointerCur] = imuRotY[imuPointerCur - 1] + angular_y * timeDiff; imuRotZ[imuPointerCur] = imuRotZ[imuPointerCur - 1] + angular_z * timeDiff; imuTime[imuPointerCur] = currentImuTime; ++imuPointerCur; } --imuPointerCur; if (imuPointerCur <= 0) return; cloudInfo.imuAvailable = true; } void odomDeskewInfo() { cloudInfo.odomAvailable = false; while (!odomQueue.empty()) { if (odomQueue.front().header.stamp.toSec() < timeScanCur - 0.01) odomQueue.pop_front(); else break; } if (odomQueue.empty()) return; if (odomQueue.front().header.stamp.toSec() > timeScanCur) return; // get start odometry at the beinning of the scan nav_msgs::Odometry startOdomMsg; for (int i = 0; i < (int)odomQueue.size(); ++i) { startOdomMsg = odomQueue[i]; if (ROS_TIME(&startOdomMsg) < timeScanCur) continue; else break; } tf::Quaternion orientation; tf::quaternionMsgToTF(startOdomMsg.pose.pose.orientation, orientation); double roll, pitch, yaw; tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw); // Initial guess used in mapOptimization cloudInfo.initialGuessX = startOdomMsg.pose.pose.position.x; cloudInfo.initialGuessY = startOdomMsg.pose.pose.position.y; cloudInfo.initialGuessZ = startOdomMsg.pose.pose.position.z; cloudInfo.initialGuessRoll = roll; cloudInfo.initialGuessPitch = pitch; cloudInfo.initialGuessYaw = yaw; cloudInfo.odomAvailable = true; // get end odometry at the end of the scan odomDeskewFlag = false; if (odomQueue.back().header.stamp.toSec() < timeScanEnd) return; nav_msgs::Odometry endOdomMsg; for (int i = 0; i < (int)odomQueue.size(); ++i) { endOdomMsg = odomQueue[i]; if (ROS_TIME(&endOdomMsg) < timeScanEnd) continue; else break; } if (int(round(startOdomMsg.pose.covariance[0])) != int(round(endOdomMsg.pose.covariance[0]))) return; Eigen::Affine3f transBegin = pcl::getTransformation(startOdomMsg.pose.pose.position.x, startOdomMsg.pose.pose.position.y, startOdomMsg.pose.pose.position.z, roll, pitch, yaw); tf::quaternionMsgToTF(endOdomMsg.pose.pose.orientation, orientation); tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw); Eigen::Affine3f transEnd = pcl::getTransformation(endOdomMsg.pose.pose.position.x, endOdomMsg.pose.pose.position.y, endOdomMsg.pose.pose.position.z, roll, pitch, yaw); Eigen::Affine3f transBt = transBegin.inverse() * transEnd; float rollIncre, pitchIncre, yawIncre; pcl::getTranslationAndEulerAngles(transBt, odomIncreX, odomIncreY, odomIncreZ, rollIncre, pitchIncre, yawIncre); odomDeskewFlag = true; } void findRotation(double pointTime, float *rotXCur, float *rotYCur, float *rotZCur) { *rotXCur = 0; *rotYCur = 0; *rotZCur = 0; int imuPointerFront = 0; while (imuPointerFront < imuPointerCur) { if (pointTime < imuTime[imuPointerFront]) break; ++imuPointerFront; } if (pointTime > imuTime[imuPointerFront] || imuPointerFront == 0) { *rotXCur = imuRotX[imuPointerFront]; *rotYCur = imuRotY[imuPointerFront]; *rotZCur = imuRotZ[imuPointerFront]; } else { int imuPointerBack = imuPointerFront - 1; double ratioFront = (pointTime - imuTime[imuPointerBack]) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]); double ratioBack = (imuTime[imuPointerFront] - pointTime) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]); *rotXCur = imuRotX[imuPointerFront] * ratioFront + imuRotX[imuPointerBack] * ratioBack; *rotYCur = imuRotY[imuPointerFront] * ratioFront + imuRotY[imuPointerBack] * ratioBack; *rotZCur = imuRotZ[imuPointerFront] * ratioFront + imuRotZ[imuPointerBack] * ratioBack; } } void findPosition(double relTime, float *posXCur, float *posYCur, float *posZCur) { *posXCur = 0; *posYCur = 0; *posZCur = 0; // If the sensor moves relatively slow, like walking speed, positional deskew seems to have little benefits. Thus code below is commented. // if (cloudInfo.odomAvailable == false || odomDeskewFlag == false) // return; // float ratio = relTime / (timeScanEnd - timeScanCur); // *posXCur = ratio * odomIncreX; // *posYCur = ratio * odomIncreY; // *posZCur = ratio * odomIncreZ; } PointType deskewPoint(PointType *point, double relTime) { if (deskewFlag == -1 || cloudInfo.imuAvailable == false) return *point; double pointTime = timeScanCur + relTime; float rotXCur, rotYCur, rotZCur; findRotation(pointTime, &rotXCur, &rotYCur, &rotZCur); float posXCur, posYCur, posZCur; findPosition(relTime, &posXCur, &posYCur, &posZCur); if (firstPointFlag == true) { transStartInverse = (pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur, rotZCur)).inverse(); firstPointFlag = false; } // transform points to start Eigen::Affine3f transFinal = pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur, rotZCur); Eigen::Affine3f transBt = transStartInverse * transFinal; PointType newPoint; newPoint.x = transBt(0, 0) * point->x + transBt(0, 1) * point->y + transBt(0, 2) * point->z + transBt(0, 3); newPoint.y = transBt(1, 0) * point->x + transBt(1, 1) * point->y + transBt(1, 2) * point->z + transBt(1, 3); newPoint.z = transBt(2, 0) * point->x + transBt(2, 1) * point->y + transBt(2, 2) * point->z + transBt(2, 3); newPoint.intensity = point->intensity; return newPoint; } void projectPointCloud() { int cloudSize = laserCloudIn->points.size(); // range image projection for (int i = 0; i < cloudSize; ++i) { PointType thisPoint; thisPoint.x = laserCloudIn->points[i].x; thisPoint.y = laserCloudIn->points[i].y; thisPoint.z = laserCloudIn->points[i].z; thisPoint.intensity = laserCloudIn->points[i].intensity; float range = pointDistance(thisPoint); if (range < lidarMinRange || range > lidarMaxRange) continue; int rowIdn = laserCloudIn->points[i].ring; if (rowIdn < 0 || rowIdn >= N_SCAN) continue; if (rowIdn % downsampleRate != 0) continue; float horizonAngle = atan2(thisPoint.x, thisPoint.y) * 180 / M_PI; static float ang_res_x = 360.0 / float(Horizon_SCAN); int columnIdn = -round((horizonAngle - 90.0) / ang_res_x) + Horizon_SCAN / 2; if (columnIdn >= Horizon_SCAN) columnIdn -= Horizon_SCAN; if (columnIdn < 0 || columnIdn >= Horizon_SCAN) continue; if (rangeMat.at(rowIdn, columnIdn) != FLT_MAX) continue; thisPoint = deskewPoint(&thisPoint, laserCloudIn->points[i].time); rangeMat.at(rowIdn, columnIdn) = range; int index = columnIdn + rowIdn * Horizon_SCAN; fullCloud->points[index] = thisPoint; } } void cloudExtraction() { int count = 0; // extract segmented cloud for lidar odometry for (int i = 0; i < N_SCAN; ++i) { cloudInfo.startRingIndex[i] = count - 1 + 5; for (int j = 0; j < Horizon_SCAN; ++j) { if (rangeMat.at(i, j) != FLT_MAX) { // mark the points' column index for marking occlusion later cloudInfo.pointColInd[count] = j; // save range info cloudInfo.pointRange[count] = rangeMat.at(i, j); // save extracted cloud extractedCloud->push_back(fullCloud->points[j + i * Horizon_SCAN]); // size of extracted cloud ++count; } } cloudInfo.endRingIndex[i] = count - 1 - 5; } } void publishClouds() { cloudInfo.header = cloudHeader; cloudInfo.cloud_raw = publishCloud(&pubExtractedCloud, rawCloud, cloudHeader.stamp, lidarFrame); cloudInfo.cloud_deskewed = publishCloud(&pubExtractedCloud, extractedCloud, cloudHeader.stamp, lidarFrame); pubLaserCloudInfo.publish(cloudInfo); } }; int main(int argc, char **argv) { ros::init(argc, argv, "lio_segmot"); ImageProjection IP; ROS_INFO("\033[1;32m----> Image Projection Started.\033[0m"); ros::MultiThreadedSpinner spinner(3); spinner.spin(); return 0; } ================================================ FILE: src/imuPreintegration.cpp ================================================ #include "utility.h" #include #include #include #include #include #include #include #include #include #include #include #include #include #include using gtsam::symbol_shorthand::B; // Bias (ax,ay,az,gx,gy,gz) using gtsam::symbol_shorthand::V; // Vel (xdot,ydot,zdot) using gtsam::symbol_shorthand::X; // Pose3 (x,y,z,r,p,y) class TransformFusion : public ParamServer { public: std::mutex mtx; ros::Subscriber subImuOdometry; ros::Subscriber subLaserOdometry; ros::Publisher pubImuOdometry; ros::Publisher pubImuPath; Eigen::Affine3f lidarOdomAffine; Eigen::Affine3f imuOdomAffineFront; Eigen::Affine3f imuOdomAffineBack; tf::TransformListener tfListener; tf::StampedTransform lidar2Baselink; double lidarOdomTime = -1; deque imuOdomQueue; TransformFusion() { if (lidarFrame != baselinkFrame) { try { tfListener.waitForTransform(lidarFrame, baselinkFrame, ros::Time(0), ros::Duration(3.0)); tfListener.lookupTransform(lidarFrame, baselinkFrame, ros::Time(0), lidar2Baselink); } catch (tf::TransformException ex) { ROS_ERROR("%s", ex.what()); } } subLaserOdometry = nh.subscribe("lio_segmot/mapping/odometry", 5, &TransformFusion::lidarOdometryHandler, this, ros::TransportHints().tcpNoDelay()); subImuOdometry = nh.subscribe(odomTopic + "_incremental", 2000, &TransformFusion::imuOdometryHandler, this, ros::TransportHints().tcpNoDelay()); pubImuOdometry = nh.advertise(odomTopic, 2000); pubImuPath = nh.advertise("lio_segmot/imu/path", 1); } Eigen::Affine3f odom2affine(nav_msgs::Odometry odom) { double x, y, z, roll, pitch, yaw; x = odom.pose.pose.position.x; y = odom.pose.pose.position.y; z = odom.pose.pose.position.z; tf::Quaternion orientation; tf::quaternionMsgToTF(odom.pose.pose.orientation, orientation); tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw); return pcl::getTransformation(x, y, z, roll, pitch, yaw); } void lidarOdometryHandler(const nav_msgs::Odometry::ConstPtr& odomMsg) { std::lock_guard lock(mtx); lidarOdomAffine = odom2affine(*odomMsg); lidarOdomTime = odomMsg->header.stamp.toSec(); } void imuOdometryHandler(const nav_msgs::Odometry::ConstPtr& odomMsg) { // static tf static tf::TransformBroadcaster tfMap2Odom; static tf::Transform map_to_odom = tf::Transform(tf::createQuaternionFromRPY(0, 0, 0), tf::Vector3(0, 0, 0)); tfMap2Odom.sendTransform(tf::StampedTransform(map_to_odom, odomMsg->header.stamp, mapFrame, odometryFrame)); std::lock_guard lock(mtx); imuOdomQueue.push_back(*odomMsg); // get latest odometry (at current IMU stamp) if (lidarOdomTime == -1) return; while (!imuOdomQueue.empty()) { if (imuOdomQueue.front().header.stamp.toSec() <= lidarOdomTime) imuOdomQueue.pop_front(); else break; } Eigen::Affine3f imuOdomAffineFront = odom2affine(imuOdomQueue.front()); Eigen::Affine3f imuOdomAffineBack = odom2affine(imuOdomQueue.back()); Eigen::Affine3f imuOdomAffineIncre = imuOdomAffineFront.inverse() * imuOdomAffineBack; Eigen::Affine3f imuOdomAffineLast = lidarOdomAffine * imuOdomAffineIncre; float x, y, z, roll, pitch, yaw; pcl::getTranslationAndEulerAngles(imuOdomAffineLast, x, y, z, roll, pitch, yaw); // publish latest odometry nav_msgs::Odometry laserOdometry = imuOdomQueue.back(); laserOdometry.pose.pose.position.x = x; laserOdometry.pose.pose.position.y = y; laserOdometry.pose.pose.position.z = z; laserOdometry.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(roll, pitch, yaw); pubImuOdometry.publish(laserOdometry); // publish tf static tf::TransformBroadcaster tfOdom2BaseLink; tf::Transform tCur; tf::poseMsgToTF(laserOdometry.pose.pose, tCur); if (lidarFrame != baselinkFrame) tCur = tCur * lidar2Baselink; tf::StampedTransform odom_2_baselink = tf::StampedTransform(tCur, odomMsg->header.stamp, odometryFrame, baselinkFrame); tfOdom2BaseLink.sendTransform(odom_2_baselink); // publish IMU path static nav_msgs::Path imuPath; static double last_path_time = -1; double imuTime = imuOdomQueue.back().header.stamp.toSec(); if (imuTime - last_path_time > 0.1) { last_path_time = imuTime; geometry_msgs::PoseStamped pose_stamped; pose_stamped.header.stamp = imuOdomQueue.back().header.stamp; pose_stamped.header.frame_id = odometryFrame; pose_stamped.pose = laserOdometry.pose.pose; imuPath.poses.push_back(pose_stamped); while (!imuPath.poses.empty() && imuPath.poses.front().header.stamp.toSec() < lidarOdomTime - 1.0) imuPath.poses.erase(imuPath.poses.begin()); if (pubImuPath.getNumSubscribers() != 0) { imuPath.header.stamp = imuOdomQueue.back().header.stamp; imuPath.header.frame_id = odometryFrame; pubImuPath.publish(imuPath); } } } }; class IMUPreintegration : public ParamServer { public: std::mutex mtx; ros::Subscriber subImu; ros::Subscriber subOdometry; ros::Publisher pubImuOdometry; bool systemInitialized = false; gtsam::noiseModel::Diagonal::shared_ptr priorPoseNoise; gtsam::noiseModel::Diagonal::shared_ptr priorVelNoise; gtsam::noiseModel::Diagonal::shared_ptr priorBiasNoise; gtsam::noiseModel::Diagonal::shared_ptr correctionNoise; gtsam::noiseModel::Diagonal::shared_ptr correctionNoise2; gtsam::Vector noiseModelBetweenBias; gtsam::PreintegratedImuMeasurements* imuIntegratorOpt_; gtsam::PreintegratedImuMeasurements* imuIntegratorImu_; std::deque imuQueOpt; std::deque imuQueImu; gtsam::Pose3 prevPose_; gtsam::Vector3 prevVel_; gtsam::NavState prevState_; gtsam::imuBias::ConstantBias prevBias_; gtsam::NavState prevStateOdom; gtsam::imuBias::ConstantBias prevBiasOdom; bool doneFirstOpt = false; double lastImuT_imu = -1; double lastImuT_opt = -1; gtsam::ISAM2 optimizer; gtsam::NonlinearFactorGraph graphFactors; gtsam::Values graphValues; const double delta_t = 0; int key = 1; gtsam::Pose3 imu2Lidar = gtsam::Pose3(gtsam::Rot3(1, 0, 0, 0), gtsam::Point3(-extTrans.x(), -extTrans.y(), -extTrans.z())); gtsam::Pose3 lidar2Imu = gtsam::Pose3(gtsam::Rot3(1, 0, 0, 0), gtsam::Point3(extTrans.x(), extTrans.y(), extTrans.z())); IMUPreintegration() { subImu = nh.subscribe(imuTopic, 2000, &IMUPreintegration::imuHandler, this, ros::TransportHints().tcpNoDelay()); subOdometry = nh.subscribe("lio_segmot/mapping/odometry_incremental", 5, &IMUPreintegration::odometryHandler, this, ros::TransportHints().tcpNoDelay()); pubImuOdometry = nh.advertise(odomTopic + "_incremental", 2000); boost::shared_ptr p = gtsam::PreintegrationParams::MakeSharedU(imuGravity); p->accelerometerCovariance = gtsam::Matrix33::Identity(3, 3) * pow(imuAccNoise, 2); // acc white noise in continuous p->gyroscopeCovariance = gtsam::Matrix33::Identity(3, 3) * pow(imuGyrNoise, 2); // gyro white noise in continuous p->integrationCovariance = gtsam::Matrix33::Identity(3, 3) * pow(1e-4, 2); // error committed in integrating position from velocities gtsam::imuBias::ConstantBias prior_imu_bias((gtsam::Vector(6) << 0, 0, 0, 0, 0, 0).finished()); ; // assume zero initial bias priorPoseNoise = gtsam::noiseModel::Diagonal::Sigmas((gtsam::Vector(6) << 1e-2, 1e-2, 1e-2, 1e-2, 1e-2, 1e-2).finished()); // rad,rad,rad,m, m, m priorVelNoise = gtsam::noiseModel::Isotropic::Sigma(3, 1e4); // m/s priorBiasNoise = gtsam::noiseModel::Isotropic::Sigma(6, 1e-3); // 1e-2 ~ 1e-3 seems to be good correctionNoise = gtsam::noiseModel::Diagonal::Sigmas((gtsam::Vector(6) << 0.05, 0.05, 0.05, 0.1, 0.1, 0.1).finished()); // rad,rad,rad,m, m, m correctionNoise2 = gtsam::noiseModel::Diagonal::Sigmas((gtsam::Vector(6) << 1, 1, 1, 1, 1, 1).finished()); // rad,rad,rad,m, m, m noiseModelBetweenBias = (gtsam::Vector(6) << imuAccBiasN, imuAccBiasN, imuAccBiasN, imuGyrBiasN, imuGyrBiasN, imuGyrBiasN).finished(); imuIntegratorImu_ = new gtsam::PreintegratedImuMeasurements(p, prior_imu_bias); // setting up the IMU integration for IMU message thread imuIntegratorOpt_ = new gtsam::PreintegratedImuMeasurements(p, prior_imu_bias); // setting up the IMU integration for optimization } void resetOptimization() { gtsam::ISAM2Params optParameters; optParameters.relinearizeThreshold = 0.1; optParameters.relinearizeSkip = 1; optimizer = gtsam::ISAM2(optParameters); gtsam::NonlinearFactorGraph newGraphFactors; graphFactors = newGraphFactors; gtsam::Values NewGraphValues; graphValues = NewGraphValues; } void resetParams() { lastImuT_imu = -1; doneFirstOpt = false; systemInitialized = false; } void odometryHandler(const nav_msgs::Odometry::ConstPtr& odomMsg) { std::lock_guard lock(mtx); double currentCorrectionTime = ROS_TIME(odomMsg); // make sure we have imu data to integrate if (imuQueOpt.empty()) return; float p_x = odomMsg->pose.pose.position.x; float p_y = odomMsg->pose.pose.position.y; float p_z = odomMsg->pose.pose.position.z; float r_x = odomMsg->pose.pose.orientation.x; float r_y = odomMsg->pose.pose.orientation.y; float r_z = odomMsg->pose.pose.orientation.z; float r_w = odomMsg->pose.pose.orientation.w; bool degenerate = (int)odomMsg->pose.covariance[0] == 1 ? true : false; gtsam::Pose3 lidarPose = gtsam::Pose3(gtsam::Rot3::Quaternion(r_w, r_x, r_y, r_z), gtsam::Point3(p_x, p_y, p_z)); // 0. initialize system if (systemInitialized == false) { resetOptimization(); // pop old IMU message while (!imuQueOpt.empty()) { if (ROS_TIME(&imuQueOpt.front()) < currentCorrectionTime - delta_t) { lastImuT_opt = ROS_TIME(&imuQueOpt.front()); imuQueOpt.pop_front(); } else break; } // initial pose prevPose_ = lidarPose.compose(lidar2Imu); gtsam::PriorFactor priorPose(X(0), prevPose_, priorPoseNoise); graphFactors.add(priorPose); // initial velocity prevVel_ = gtsam::Vector3(0, 0, 0); gtsam::PriorFactor priorVel(V(0), prevVel_, priorVelNoise); graphFactors.add(priorVel); // initial bias prevBias_ = gtsam::imuBias::ConstantBias(); gtsam::PriorFactor priorBias(B(0), prevBias_, priorBiasNoise); graphFactors.add(priorBias); // add values graphValues.insert(X(0), prevPose_); graphValues.insert(V(0), prevVel_); graphValues.insert(B(0), prevBias_); // optimize once optimizer.update(graphFactors, graphValues); graphFactors.resize(0); graphValues.clear(); imuIntegratorImu_->resetIntegrationAndSetBias(prevBias_); imuIntegratorOpt_->resetIntegrationAndSetBias(prevBias_); key = 1; systemInitialized = true; return; } // reset graph for speed if (key == 100) { // get updated noise before reset gtsam::noiseModel::Gaussian::shared_ptr updatedPoseNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(X(key - 1))); gtsam::noiseModel::Gaussian::shared_ptr updatedVelNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(V(key - 1))); gtsam::noiseModel::Gaussian::shared_ptr updatedBiasNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(B(key - 1))); // reset graph resetOptimization(); // add pose gtsam::PriorFactor priorPose(X(0), prevPose_, updatedPoseNoise); graphFactors.add(priorPose); // add velocity gtsam::PriorFactor priorVel(V(0), prevVel_, updatedVelNoise); graphFactors.add(priorVel); // add bias gtsam::PriorFactor priorBias(B(0), prevBias_, updatedBiasNoise); graphFactors.add(priorBias); // add values graphValues.insert(X(0), prevPose_); graphValues.insert(V(0), prevVel_); graphValues.insert(B(0), prevBias_); // optimize once optimizer.update(graphFactors, graphValues); graphFactors.resize(0); graphValues.clear(); key = 1; } // 1. integrate imu data and optimize while (!imuQueOpt.empty()) { // pop and integrate imu data that is between two optimizations sensor_msgs::Imu* thisImu = &imuQueOpt.front(); double imuTime = ROS_TIME(thisImu); if (imuTime < currentCorrectionTime - delta_t) { double dt = (lastImuT_opt < 0) ? (1.0 / 500.0) : (imuTime - lastImuT_opt); imuIntegratorOpt_->integrateMeasurement( gtsam::Vector3(thisImu->linear_acceleration.x, thisImu->linear_acceleration.y, thisImu->linear_acceleration.z), gtsam::Vector3(thisImu->angular_velocity.x, thisImu->angular_velocity.y, thisImu->angular_velocity.z), dt); lastImuT_opt = imuTime; imuQueOpt.pop_front(); } else break; } // add imu factor to graph const gtsam::PreintegratedImuMeasurements& preint_imu = dynamic_cast(*imuIntegratorOpt_); gtsam::ImuFactor imu_factor(X(key - 1), V(key - 1), X(key), V(key), B(key - 1), preint_imu); graphFactors.add(imu_factor); // add imu bias between factor graphFactors.add(gtsam::BetweenFactor(B(key - 1), B(key), gtsam::imuBias::ConstantBias(), gtsam::noiseModel::Diagonal::Sigmas(sqrt(imuIntegratorOpt_->deltaTij()) * noiseModelBetweenBias))); // add pose factor gtsam::Pose3 curPose = lidarPose.compose(lidar2Imu); gtsam::PriorFactor pose_factor(X(key), curPose, degenerate ? correctionNoise2 : correctionNoise); graphFactors.add(pose_factor); // insert predicted values gtsam::NavState propState_ = imuIntegratorOpt_->predict(prevState_, prevBias_); graphValues.insert(X(key), propState_.pose()); graphValues.insert(V(key), propState_.v()); graphValues.insert(B(key), prevBias_); // optimize optimizer.update(graphFactors, graphValues); optimizer.update(); graphFactors.resize(0); graphValues.clear(); // Overwrite the beginning of the preintegration for the next step. gtsam::Values result = optimizer.calculateEstimate(); prevPose_ = result.at(X(key)); prevVel_ = result.at(V(key)); prevState_ = gtsam::NavState(prevPose_, prevVel_); prevBias_ = result.at(B(key)); // Reset the optimization preintegration object. imuIntegratorOpt_->resetIntegrationAndSetBias(prevBias_); // check optimization if (failureDetection(prevVel_, prevBias_)) { resetParams(); return; } // 2. after optiization, re-propagate imu odometry preintegration prevStateOdom = prevState_; prevBiasOdom = prevBias_; // first pop imu message older than current correction data double lastImuQT = -1; while (!imuQueImu.empty() && ROS_TIME(&imuQueImu.front()) < currentCorrectionTime - delta_t) { lastImuQT = ROS_TIME(&imuQueImu.front()); imuQueImu.pop_front(); } // repropogate if (!imuQueImu.empty()) { // reset bias use the newly optimized bias imuIntegratorImu_->resetIntegrationAndSetBias(prevBiasOdom); // integrate imu message from the beginning of this optimization for (int i = 0; i < (int)imuQueImu.size(); ++i) { sensor_msgs::Imu* thisImu = &imuQueImu[i]; double imuTime = ROS_TIME(thisImu); double dt = (lastImuQT < 0) ? (1.0 / 500.0) : (imuTime - lastImuQT); imuIntegratorImu_->integrateMeasurement(gtsam::Vector3(thisImu->linear_acceleration.x, thisImu->linear_acceleration.y, thisImu->linear_acceleration.z), gtsam::Vector3(thisImu->angular_velocity.x, thisImu->angular_velocity.y, thisImu->angular_velocity.z), dt); lastImuQT = imuTime; } } ++key; doneFirstOpt = true; } bool failureDetection(const gtsam::Vector3& velCur, const gtsam::imuBias::ConstantBias& biasCur) { Eigen::Vector3f vel(velCur.x(), velCur.y(), velCur.z()); if (vel.norm() > 30) { ROS_WARN("Large velocity, reset IMU-preintegration!"); return true; } Eigen::Vector3f ba(biasCur.accelerometer().x(), biasCur.accelerometer().y(), biasCur.accelerometer().z()); Eigen::Vector3f bg(biasCur.gyroscope().x(), biasCur.gyroscope().y(), biasCur.gyroscope().z()); if (ba.norm() > 1.0 || bg.norm() > 1.0) { ROS_WARN("Large bias, reset IMU-preintegration!"); return true; } return false; } void imuHandler(const sensor_msgs::Imu::ConstPtr& imu_raw) { std::lock_guard lock(mtx); sensor_msgs::Imu thisImu = imuConverter(*imu_raw); imuQueOpt.push_back(thisImu); imuQueImu.push_back(thisImu); if (doneFirstOpt == false) return; double imuTime = ROS_TIME(&thisImu); double dt = (lastImuT_imu < 0) ? (1.0 / 500.0) : (imuTime - lastImuT_imu); lastImuT_imu = imuTime; // integrate this single imu message imuIntegratorImu_->integrateMeasurement(gtsam::Vector3(thisImu.linear_acceleration.x, thisImu.linear_acceleration.y, thisImu.linear_acceleration.z), gtsam::Vector3(thisImu.angular_velocity.x, thisImu.angular_velocity.y, thisImu.angular_velocity.z), dt); // predict odometry gtsam::NavState currentState = imuIntegratorImu_->predict(prevStateOdom, prevBiasOdom); // publish odometry nav_msgs::Odometry odometry; odometry.header.stamp = thisImu.header.stamp; odometry.header.frame_id = odometryFrame; odometry.child_frame_id = "odom_imu"; // transform imu pose to ldiar gtsam::Pose3 imuPose = gtsam::Pose3(currentState.quaternion(), currentState.position()); gtsam::Pose3 lidarPose = imuPose.compose(imu2Lidar); odometry.pose.pose.position.x = lidarPose.translation().x(); odometry.pose.pose.position.y = lidarPose.translation().y(); odometry.pose.pose.position.z = lidarPose.translation().z(); odometry.pose.pose.orientation.x = lidarPose.rotation().toQuaternion().x(); odometry.pose.pose.orientation.y = lidarPose.rotation().toQuaternion().y(); odometry.pose.pose.orientation.z = lidarPose.rotation().toQuaternion().z(); odometry.pose.pose.orientation.w = lidarPose.rotation().toQuaternion().w(); odometry.twist.twist.linear.x = currentState.velocity().x(); odometry.twist.twist.linear.y = currentState.velocity().y(); odometry.twist.twist.linear.z = currentState.velocity().z(); odometry.twist.twist.angular.x = thisImu.angular_velocity.x + prevBiasOdom.gyroscope().x(); odometry.twist.twist.angular.y = thisImu.angular_velocity.y + prevBiasOdom.gyroscope().y(); odometry.twist.twist.angular.z = thisImu.angular_velocity.z + prevBiasOdom.gyroscope().z(); pubImuOdometry.publish(odometry); } }; int main(int argc, char** argv) { ros::init(argc, argv, "roboat_loam"); IMUPreintegration ImuP; TransformFusion TF; ROS_INFO("\033[1;32m----> IMU Preintegration Started.\033[0m"); ros::MultiThreadedSpinner spinner(4); spinner.spin(); return 0; } ================================================ FILE: src/mapOptimization.cpp ================================================ #include #include "factor.h" #include "lio_segmot/Diagnosis.h" #include "lio_segmot/ObjectStateArray.h" #include "lio_segmot/cloud_info.h" #include "lio_segmot/detection.h" #include "lio_segmot/flags.h" #include "lio_segmot/save_estimation_result.h" #include "lio_segmot/save_map.h" #include "solver.h" #include "utility.h" #include #include #include #include #include #include #include #include #include #include #include #include #include // #define ENABLE_COMPACT_VERSION_OF_FACTOR_GRAPH // #define MAP_OPTIMIZATION_DEBUG #define ENABLE_SIMULTANEOUS_LOCALIZATION_AND_TRACKING #define ENABLE_ASYNCHRONOUS_STATE_ESTIMATE_FOR_SLOT // #define ENABLE_MINIMAL_MEMORY_USAGE using namespace gtsam; using symbol_shorthand::B; // Bias (ax,ay,az,gx,gy,gz) using symbol_shorthand::G; // GPS pose using symbol_shorthand::V; // Vel (xdot,ydot,zdot) using symbol_shorthand::X; // Pose3 (x,y,z,r,p,y) using BoundingBox = jsk_recognition_msgs::BoundingBox; using BoundingBoxPtr = jsk_recognition_msgs::BoundingBoxPtr; using BoundingBoxConstPtr = jsk_recognition_msgs::BoundingBoxConstPtr; using BoundingBoxArray = jsk_recognition_msgs::BoundingBoxArray; using BoundingBoxArrayPtr = jsk_recognition_msgs::BoundingBoxArrayPtr; using BoundingBoxArrayConstPtr = jsk_recognition_msgs::BoundingBoxArrayConstPtr; /* * A point cloud type that has 6D pose info ([x,y,z,roll,pitch,yaw] intensity is time stamp) */ struct PointXYZIRPYT { PCL_ADD_POINT4D PCL_ADD_INTENSITY; // preferred way of adding a XYZ+padding float roll; float pitch; float yaw; double time; EIGEN_MAKE_ALIGNED_OPERATOR_NEW // make sure our new allocators are aligned } EIGEN_ALIGN16; // enforce SSE padding for correct memory alignment POINT_CLOUD_REGISTER_POINT_STRUCT(PointXYZIRPYT, (float, x, x)(float, y, y)(float, z, z)(float, intensity, intensity)(float, roll, roll)(float, pitch, pitch)(float, yaw, yaw)(double, time, time)) typedef PointXYZIRPYT PointTypePose; geometry_msgs::Pose gtsamPose2ROSPose(const Pose3& pose) { geometry_msgs::Pose p; auto trans = pose.translation(); p.position.x = trans.x(); p.position.y = trans.y(); p.position.z = trans.z(); auto quat = pose.rotation().toQuaternion(); p.orientation.w = quat.w(); p.orientation.x = quat.x(); p.orientation.y = quat.y(); p.orientation.z = quat.z(); return p; } class ObjectState { public: Pose3 pose = Pose3::identity(); Pose3 velocity = Pose3::identity(); uint64_t poseNodeIndex = 0; uint64_t velocityNodeIndex = 0; uint64_t objectIndex = 0; uint64_t objectIndexForTracking = 0; int lostCount = 0; int trackScore = 0; ros::Time timestamp = ros::Time(); BoundingBox box = BoundingBox(); BoundingBox detection = BoundingBox(); double confidence = 0; bool isTightlyCoupled = false; bool isFirst = false; TightlyCoupledDetectionFactor::shared_ptr tightlyCoupledDetectionFactorPtr = nullptr; LooselyCoupledDetectionFactor::shared_ptr looselyCoupledDetectionFactorPtr = nullptr; StablePoseFactor::shared_ptr motionFactorPtr = nullptr; double initialDetectionError = 0; double initialMotionError = 0; std::vector previousVelocityNodeIndices; ObjectState(Pose3 pose = Pose3::identity(), Pose3 velocity = Pose3::identity(), uint64_t poseNodeIndex = 0, uint64_t velocityNodeIndex = 0, uint64_t objectIndex = 0, uint64_t objectIndexForTracking = 0, int lostCount = 0, int trackScore = 0, ros::Time timestamp = ros::Time(), BoundingBox box = BoundingBox(), BoundingBox detection = BoundingBox(), double confidence = 0, bool isTightlyCoupled = false, bool isFirst = false, std::vector previousVelocityNodeIndices = std::vector()) : pose(pose), velocity(velocity), poseNodeIndex(poseNodeIndex), velocityNodeIndex(velocityNodeIndex), objectIndex(objectIndex), objectIndexForTracking(objectIndexForTracking), lostCount(lostCount), trackScore(trackScore), timestamp(timestamp), box(box), detection(detection), confidence(confidence), isTightlyCoupled(isTightlyCoupled), isFirst(isFirst), previousVelocityNodeIndices(previousVelocityNodeIndices) { } ObjectState clone() const { return ObjectState(pose, velocity, poseNodeIndex, velocityNodeIndex, objectIndex, objectIndexForTracking, lostCount, trackScore, timestamp, box, detection, confidence, isTightlyCoupled, isFirst, previousVelocityNodeIndices); } bool isTurning(float threshold) const { auto rot = gtsam::traits::Local(gtsam::Rot3::identity(), this->velocity.rotation()); return rot.maxCoeff() > threshold; } bool isMovingFast(float threshold) const { auto v = gtsam::traits::Local(gtsam::Pose3::identity(), this->velocity); return sqrt(pow(v(3), 2) + pow(v(4), 2) + pow(v(5), 2)) > threshold; } bool velocityIsConsistent(int samplingSize, Values& currentEstimates, double angleThreshold, double velocityThreshold) const { int size = previousVelocityNodeIndices.size(); if (size < samplingSize) return false; Eigen::VectorXd angles = Eigen::VectorXd::Zero(samplingSize); Eigen::VectorXd velocities = Eigen::VectorXd::Zero(samplingSize); std::vector vs; gtsam::Vector6 vMean = gtsam::Vector6::Zero(); for (int i = 0; i < samplingSize; ++i) { auto vi = currentEstimates.at(previousVelocityNodeIndices[size - i - 1]); auto v = gtsam::traits::Local(gtsam::Pose3::identity(), vi); angles(i) = sqrt(pow(v(0), 2) + pow(v(1), 2) + pow(v(2), 2)); velocities(i) = sqrt(pow(v(3), 2) + pow(v(4), 2) + pow(v(5), 2)); vs.push_back(v); vMean += v; } vMean /= samplingSize; gtsam::Matrix6 covariance = gtsam::Matrix6::Zero(); covariance(0, 0) = covariance(1, 1) = covariance(2, 2) = angleThreshold; covariance(3, 3) = covariance(4, 4) = covariance(5, 5) = velocityThreshold; auto covarianceInverse = covariance.inverse(); double error = 0.0; for (int i = 0; i < samplingSize; ++i) { auto v = vs[i] - vMean; error += v.transpose() * covarianceInverse * v; } error /= samplingSize; double angleVar = (angles.array() - angles.mean()).pow(2).mean(); double velocityVar = (velocities.array() - velocities.mean()).pow(2).mean(); // return angleVar < angleThreshold && velocityVar < velocityThreshold; return error < 1.0 * 1.0; } }; class Timer { private: std::chrono::time_point start; std::chrono::time_point end; public: Timer() { start = std::chrono::high_resolution_clock::now(); } void reset() { start = std::chrono::high_resolution_clock::now(); } void stop() { end = std::chrono::high_resolution_clock::now(); } double elapsed() const { return std::chrono::duration_cast(end - start).count(); } }; class mapOptimization : public ParamServer { public: // gtsam NonlinearFactorGraph gtSAMgraph; NonlinearFactorGraph gtSAMgraphForLooselyCoupledObjects; Values initialEstimate; Values initialEstimateForLooselyCoupledObjects; Values initialEstimateForAnalysis; Values optimizedEstimate; MaxMixtureISAM2* isam; Values isamCurrentEstimate; Eigen::MatrixXd poseCovariance; ros::Publisher pubLaserCloudSurround; ros::Publisher pubLaserOdometryGlobal; ros::Publisher pubLaserOdometryIncremental; ros::Publisher pubKeyPoses; ros::Publisher pubPath; ros::Publisher pubKeyFrameCloud; ros::Publisher pubHistoryKeyFrames; ros::Publisher pubIcpKeyFrames; ros::Publisher pubRecentKeyFrames; ros::Publisher pubRecentKeyFrame; ros::Publisher pubCloudRegisteredRaw; ros::Publisher pubLoopConstraintEdge; ros::Subscriber subCloud; ros::Subscriber subGPS; ros::Subscriber subLoop; ros::Publisher pubDetection; ros::Publisher pubLaserCloudDeskewed; ros::Publisher pubObjects; ros::Publisher pubObjectPaths; ros::Publisher pubTightlyCoupledObjectPoints; ros::Publisher pubObjectLabels; ros::Publisher pubObjectVelocities; ros::Publisher pubObjectVelocityArrows; ros::Publisher pubObjectStates; ros::Publisher pubTrackingObjects; ros::Publisher pubTrackingObjectPaths; ros::Publisher pubTrackingObjectLabels; ros::Publisher pubTrackingObjectVelocities; ros::Publisher pubTrackingObjectVelocityArrows; ros::Publisher pubDiagnosis; ros::Publisher pubReady; ros::ServiceServer srvSaveMap; ros::ServiceServer srvSaveEstimationResult; ros::ServiceClient detectionClient; lio_segmot::detection detectionSrv; std::deque gpsQueue; lio_segmot::cloud_info cloudInfo; vector::Ptr> cornerCloudKeyFrames; vector::Ptr> surfCloudKeyFrames; pcl::PointCloud::Ptr cloudKeyPoses3D; pcl::PointCloud::Ptr cloudKeyPoses6D; pcl::PointCloud::Ptr copy_cloudKeyPoses3D; pcl::PointCloud::Ptr copy_cloudKeyPoses6D; std::vector keyPoseIndices; pcl::PointCloud::Ptr laserCloudCornerLast; // corner feature set from odoOptimization pcl::PointCloud::Ptr laserCloudSurfLast; // surf feature set from odoOptimization pcl::PointCloud::Ptr laserCloudCornerLastDS; // downsampled corner featuer set from odoOptimization pcl::PointCloud::Ptr laserCloudSurfLastDS; // downsampled surf featuer set from odoOptimization pcl::PointCloud::Ptr laserCloudOri; pcl::PointCloud::Ptr coeffSel; std::vector laserCloudOriCornerVec; // corner point holder for parallel computation std::vector coeffSelCornerVec; std::vector laserCloudOriCornerFlag; std::vector laserCloudOriSurfVec; // surf point holder for parallel computation std::vector coeffSelSurfVec; std::vector laserCloudOriSurfFlag; map, pcl::PointCloud>> laserCloudMapContainer; pcl::PointCloud::Ptr laserCloudCornerFromMap; pcl::PointCloud::Ptr laserCloudSurfFromMap; pcl::PointCloud::Ptr laserCloudCornerFromMapDS; pcl::PointCloud::Ptr laserCloudSurfFromMapDS; pcl::KdTreeFLANN::Ptr kdtreeCornerFromMap; pcl::KdTreeFLANN::Ptr kdtreeSurfFromMap; pcl::KdTreeFLANN::Ptr kdtreeSurroundingKeyPoses; pcl::KdTreeFLANN::Ptr kdtreeHistoryKeyPoses; pcl::VoxelGrid downSizeFilterCorner; pcl::VoxelGrid downSizeFilterSurf; pcl::VoxelGrid downSizeFilterICP; pcl::VoxelGrid downSizeFilterSurroundingKeyPoses; // for surrounding key poses of scan-to-map optimization ros::Time timeLaserInfoStamp; double timeLaserInfoCur; double deltaTime; float transformTobeMapped[6]; std::mutex mtx; std::mutex mtxLoopInfo; bool isDegenerate = false; cv::Mat matP; int laserCloudCornerFromMapDSNum = 0; int laserCloudSurfFromMapDSNum = 0; int laserCloudCornerLastDSNum = 0; int laserCloudSurfLastDSNum = 0; bool aLoopIsClosed = false; map loopIndexContainer; // from new to old vector> loopIndexQueue; vector loopPoseQueue; vector loopNoiseQueue; deque loopInfoVec; nav_msgs::Path globalPath; Eigen::Affine3f transPointAssociateToMap; Eigen::Affine3f incrementalOdometryAffineFront; Eigen::Affine3f incrementalOdometryAffineBack; BoundingBoxArrayPtr detections; std::vector detectionVector; std::vector tightlyCoupledDetectionVector; std::vector earlyLooselyCoupledMatchingVector; std::vector looselyCoupledMatchingVector; std::vector tightlyCoupledMatchingVector; std::vector dataAssociationVector; bool detectionIsActive = false; std::vector> objects; visualization_msgs::MarkerArray objectPaths; visualization_msgs::Marker tightlyCoupledObjectPoints; visualization_msgs::MarkerArray objectLabels; visualization_msgs::MarkerArray objectVelocities; visualization_msgs::MarkerArray objectVelocityArrows; visualization_msgs::MarkerArray trackingObjectPaths; visualization_msgs::MarkerArray trackingObjectLabels; visualization_msgs::MarkerArray trackingObjectVelocities; visualization_msgs::MarkerArray trackingObjectVelocityArrows; lio_segmot::ObjectStateArray objectStates; uint64_t numberOfRegisteredObjects = 0; uint64_t numberOfTrackingObjects = 0; bool anyObjectIsTightlyCoupled = false; uint64_t numberOfNodes = 0; Timer timer; int numberOfTightlyCoupledObjectsAtThisMoment = 0; mapOptimization() { ISAM2Params parameters; parameters.relinearizeThreshold = 0.1; parameters.relinearizeSkip = 1; isam = new MaxMixtureISAM2(parameters); pubKeyPoses = nh.advertise("lio_segmot/mapping/trajectory", 1); pubLaserCloudSurround = nh.advertise("lio_segmot/mapping/map_global", 1); pubLaserOdometryGlobal = nh.advertise("lio_segmot/mapping/odometry", 1); pubLaserOdometryIncremental = nh.advertise("lio_segmot/mapping/odometry_incremental", 1); pubPath = nh.advertise("lio_segmot/mapping/path", 1); subCloud = nh.subscribe("lio_segmot/feature/cloud_info", 1, &mapOptimization::laserCloudInfoHandler, this, ros::TransportHints().tcpNoDelay()); subGPS = nh.subscribe(gpsTopic, 200, &mapOptimization::gpsHandler, this, ros::TransportHints().tcpNoDelay()); subLoop = nh.subscribe("lio_loop/loop_closure_detection", 1, &mapOptimization::loopInfoHandler, this, ros::TransportHints().tcpNoDelay()); srvSaveMap = nh.advertiseService("lio_segmot/save_map", &mapOptimization::saveMapService, this); srvSaveEstimationResult = nh.advertiseService("lio_segmot/save_estimation_result", &mapOptimization::saveEstimationResultService, this); detectionClient = nh.serviceClient("lio_segmot_detector"); pubHistoryKeyFrames = nh.advertise("lio_segmot/mapping/icp_loop_closure_history_cloud", 1); pubIcpKeyFrames = nh.advertise("lio_segmot/mapping/icp_loop_closure_corrected_cloud", 1); pubLoopConstraintEdge = nh.advertise("/lio_segmot/mapping/loop_closure_constraints", 1); pubRecentKeyFrames = nh.advertise("lio_segmot/mapping/map_local", 1); pubRecentKeyFrame = nh.advertise("lio_segmot/mapping/cloud_registered", 1); pubCloudRegisteredRaw = nh.advertise("lio_segmot/mapping/cloud_registered_raw", 1); pubDetection = nh.advertise("lio_segmot/mapping/detections", 1); pubLaserCloudDeskewed = nh.advertise("lio_segmot/mapping/cloud_deskewed", 1); pubObjects = nh.advertise("lio_segmot/mapping/objects", 1); pubObjectPaths = nh.advertise("lio_segmot/mapping/object_paths", 1); pubTightlyCoupledObjectPoints = nh.advertise("lio_segmot/mapping/tightly_coupled_object_points", 1); pubObjectLabels = nh.advertise("lio_segmot/mapping/object_labels", 1); pubObjectVelocities = nh.advertise("lio_segmot/mapping/object_velocities", 1); pubObjectVelocityArrows = nh.advertise("lio_segmot/mapping/object_velocity_arrows", 1); pubObjectStates = nh.advertise("lio_segmot/mapping/object_states", 1); pubTrackingObjects = nh.advertise("lio_segmot/tracking/objects", 1); pubTrackingObjectPaths = nh.advertise("lio_segmot/tracking/object_paths", 1); pubTrackingObjectLabels = nh.advertise("lio_segmot/tracking/object_labels", 1); pubTrackingObjectVelocities = nh.advertise("lio_segmot/tracking/object_velocities", 1); pubTrackingObjectVelocityArrows = nh.advertise("lio_segmot/tracking/object_velocity_arrows", 1); pubDiagnosis = nh.advertise("lio_segmot/diagnosis", 1); pubReady = nh.advertise("lio_segmot/ready", 1); downSizeFilterCorner.setLeafSize(mappingCornerLeafSize, mappingCornerLeafSize, mappingCornerLeafSize); downSizeFilterSurf.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize); downSizeFilterICP.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize); downSizeFilterSurroundingKeyPoses.setLeafSize(surroundingKeyframeDensity, surroundingKeyframeDensity, surroundingKeyframeDensity); // for surrounding key poses of scan-to-map optimization allocateMemory(); } void allocateMemory() { cloudKeyPoses3D.reset(new pcl::PointCloud()); cloudKeyPoses6D.reset(new pcl::PointCloud()); copy_cloudKeyPoses3D.reset(new pcl::PointCloud()); copy_cloudKeyPoses6D.reset(new pcl::PointCloud()); kdtreeSurroundingKeyPoses.reset(new pcl::KdTreeFLANN()); kdtreeHistoryKeyPoses.reset(new pcl::KdTreeFLANN()); laserCloudCornerLast.reset(new pcl::PointCloud()); // corner feature set from odoOptimization laserCloudSurfLast.reset(new pcl::PointCloud()); // surf feature set from odoOptimization laserCloudCornerLastDS.reset(new pcl::PointCloud()); // downsampled corner featuer set from odoOptimization laserCloudSurfLastDS.reset(new pcl::PointCloud()); // downsampled surf featuer set from odoOptimization laserCloudOri.reset(new pcl::PointCloud()); coeffSel.reset(new pcl::PointCloud()); laserCloudOriCornerVec.resize(N_SCAN * Horizon_SCAN); coeffSelCornerVec.resize(N_SCAN * Horizon_SCAN); laserCloudOriCornerFlag.resize(N_SCAN * Horizon_SCAN); laserCloudOriSurfVec.resize(N_SCAN * Horizon_SCAN); coeffSelSurfVec.resize(N_SCAN * Horizon_SCAN); laserCloudOriSurfFlag.resize(N_SCAN * Horizon_SCAN); std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false); std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false); laserCloudCornerFromMap.reset(new pcl::PointCloud()); laserCloudSurfFromMap.reset(new pcl::PointCloud()); laserCloudCornerFromMapDS.reset(new pcl::PointCloud()); laserCloudSurfFromMapDS.reset(new pcl::PointCloud()); kdtreeCornerFromMap.reset(new pcl::KdTreeFLANN()); kdtreeSurfFromMap.reset(new pcl::KdTreeFLANN()); for (int i = 0; i < 6; ++i) { transformTobeMapped[i] = 0; } matP = cv::Mat(6, 6, CV_32F, cv::Scalar::all(0)); detections.reset(new BoundingBoxArray()); tightlyCoupledObjectPoints.action = visualization_msgs::Marker::ADD; tightlyCoupledObjectPoints.type = visualization_msgs::Marker::SPHERE_LIST; tightlyCoupledObjectPoints.color.a = 0.4; tightlyCoupledObjectPoints.color.r = 1.0; tightlyCoupledObjectPoints.color.g = 1.0; tightlyCoupledObjectPoints.color.b = 1.0; tightlyCoupledObjectPoints.scale.x = 1.0; tightlyCoupledObjectPoints.scale.y = 1.0; tightlyCoupledObjectPoints.scale.z = 1.0; tightlyCoupledObjectPoints.pose.orientation.w = 1.0; } void laserCloudInfoHandler(const lio_segmot::cloud_infoConstPtr& msgIn) { // extract time stamp timeLaserInfoStamp = msgIn->header.stamp; timeLaserInfoCur = msgIn->header.stamp.toSec(); // extract info and feature cloud cloudInfo = *msgIn; pcl::fromROSMsg(msgIn->cloud_corner, *laserCloudCornerLast); pcl::fromROSMsg(msgIn->cloud_surface, *laserCloudSurfLast); std::lock_guard lock(mtx); timer.reset(); numberOfTightlyCoupledObjectsAtThisMoment = 0; static double timeLastProcessing = -1; if (timeLaserInfoCur - timeLastProcessing >= mappingProcessInterval) { #ifdef ENABLE_SIMULTANEOUS_LOCALIZATION_AND_TRACKING std::thread t(&mapOptimization::getDetections, this); deltaTime = timeLaserInfoCur - timeLastProcessing; timeLastProcessing = timeLaserInfoCur; #endif updateInitialGuess(); extractSurroundingKeyFrames(); downsampleCurrentScan(); scan2MapOptimization(); #ifdef ENABLE_SIMULTANEOUS_LOCALIZATION_AND_TRACKING t.join(); #endif saveKeyFramesAndFactor(); correctPoses(); timer.stop(); publishOdometry(); publishFrames(); } pubReady.publish(std_msgs::Empty()); } void getDetections() { detectionIsActive = false; detectionSrv.request.cloud = cloudInfo.cloud_raw; if (detectionClient.call(detectionSrv)) { *detections = detectionSrv.response.detections; detectionIsActive = true; } } void gpsHandler(const nav_msgs::Odometry::ConstPtr& gpsMsg) { gpsQueue.push_back(*gpsMsg); } void pointAssociateToMap(PointType const* const pi, PointType* const po) { po->x = transPointAssociateToMap(0, 0) * pi->x + transPointAssociateToMap(0, 1) * pi->y + transPointAssociateToMap(0, 2) * pi->z + transPointAssociateToMap(0, 3); po->y = transPointAssociateToMap(1, 0) * pi->x + transPointAssociateToMap(1, 1) * pi->y + transPointAssociateToMap(1, 2) * pi->z + transPointAssociateToMap(1, 3); po->z = transPointAssociateToMap(2, 0) * pi->x + transPointAssociateToMap(2, 1) * pi->y + transPointAssociateToMap(2, 2) * pi->z + transPointAssociateToMap(2, 3); po->intensity = pi->intensity; } pcl::PointCloud::Ptr transformPointCloud(pcl::PointCloud::Ptr cloudIn, PointTypePose* transformIn) { pcl::PointCloud::Ptr cloudOut(new pcl::PointCloud()); int cloudSize = cloudIn->size(); cloudOut->resize(cloudSize); Eigen::Affine3f transCur = pcl::getTransformation(transformIn->x, transformIn->y, transformIn->z, transformIn->roll, transformIn->pitch, transformIn->yaw); #pragma omp parallel for num_threads(numberOfCores) for (int i = 0; i < cloudSize; ++i) { const auto& pointFrom = cloudIn->points[i]; cloudOut->points[i].x = transCur(0, 0) * pointFrom.x + transCur(0, 1) * pointFrom.y + transCur(0, 2) * pointFrom.z + transCur(0, 3); cloudOut->points[i].y = transCur(1, 0) * pointFrom.x + transCur(1, 1) * pointFrom.y + transCur(1, 2) * pointFrom.z + transCur(1, 3); cloudOut->points[i].z = transCur(2, 0) * pointFrom.x + transCur(2, 1) * pointFrom.y + transCur(2, 2) * pointFrom.z + transCur(2, 3); cloudOut->points[i].intensity = pointFrom.intensity; } return cloudOut; } gtsam::Pose3 pclPointTogtsamPose3(PointTypePose thisPoint) { return gtsam::Pose3(gtsam::Rot3::RzRyRx(double(thisPoint.roll), double(thisPoint.pitch), double(thisPoint.yaw)), gtsam::Point3(double(thisPoint.x), double(thisPoint.y), double(thisPoint.z))); } gtsam::Pose3 trans2gtsamPose(float transformIn[]) { return gtsam::Pose3(gtsam::Rot3::RzRyRx(transformIn[0], transformIn[1], transformIn[2]), gtsam::Point3(transformIn[3], transformIn[4], transformIn[5])); } Eigen::Affine3f pclPointToAffine3f(PointTypePose thisPoint) { return pcl::getTransformation(thisPoint.x, thisPoint.y, thisPoint.z, thisPoint.roll, thisPoint.pitch, thisPoint.yaw); } Eigen::Affine3f trans2Affine3f(float transformIn[]) { return pcl::getTransformation(transformIn[3], transformIn[4], transformIn[5], transformIn[0], transformIn[1], transformIn[2]); } PointTypePose trans2PointTypePose(float transformIn[]) { PointTypePose thisPose6D; thisPose6D.x = transformIn[3]; thisPose6D.y = transformIn[4]; thisPose6D.z = transformIn[5]; thisPose6D.roll = transformIn[0]; thisPose6D.pitch = transformIn[1]; thisPose6D.yaw = transformIn[2]; return thisPose6D; } bool saveMapService(lio_segmot::save_mapRequest& req, lio_segmot::save_mapResponse& res) { string saveMapDirectory; cout << "****************************************************" << endl; cout << "Saving map to pcd files ..." << endl; if (req.destination.empty()) saveMapDirectory = std::getenv("HOME") + savePCDDirectory; else saveMapDirectory = std::getenv("HOME") + req.destination; cout << "Save destination: " << saveMapDirectory << endl; // create directory and remove old files; int unused = system((std::string("exec rm -r ") + saveMapDirectory).c_str()); unused = system((std::string("mkdir -p ") + saveMapDirectory).c_str()); // save key frame transformations pcl::io::savePCDFileBinary(saveMapDirectory + "/trajectory.pcd", *cloudKeyPoses3D); pcl::io::savePCDFileBinary(saveMapDirectory + "/transformations.pcd", *cloudKeyPoses6D); // extract global point cloud map pcl::PointCloud::Ptr globalCornerCloud(new pcl::PointCloud()); pcl::PointCloud::Ptr globalCornerCloudDS(new pcl::PointCloud()); pcl::PointCloud::Ptr globalSurfCloud(new pcl::PointCloud()); pcl::PointCloud::Ptr globalSurfCloudDS(new pcl::PointCloud()); pcl::PointCloud::Ptr globalMapCloud(new pcl::PointCloud()); for (int i = 0; i < (int)cloudKeyPoses3D->size(); i++) { *globalCornerCloud += *transformPointCloud(cornerCloudKeyFrames[i], &cloudKeyPoses6D->points[i]); *globalSurfCloud += *transformPointCloud(surfCloudKeyFrames[i], &cloudKeyPoses6D->points[i]); cout << "\r" << std::flush << "Processing feature cloud " << i << " of " << cloudKeyPoses6D->size() << " ..."; } if (req.resolution != 0) { cout << "\n\nSave resolution: " << req.resolution << endl; // down-sample and save corner cloud downSizeFilterCorner.setInputCloud(globalCornerCloud); downSizeFilterCorner.setLeafSize(req.resolution, req.resolution, req.resolution); downSizeFilterCorner.filter(*globalCornerCloudDS); pcl::io::savePCDFileBinary(saveMapDirectory + "/CornerMap.pcd", *globalCornerCloudDS); // down-sample and save surf cloud downSizeFilterSurf.setInputCloud(globalSurfCloud); downSizeFilterSurf.setLeafSize(req.resolution, req.resolution, req.resolution); downSizeFilterSurf.filter(*globalSurfCloudDS); pcl::io::savePCDFileBinary(saveMapDirectory + "/SurfMap.pcd", *globalSurfCloudDS); } else { // save corner cloud pcl::io::savePCDFileBinary(saveMapDirectory + "/CornerMap.pcd", *globalCornerCloud); // save surf cloud pcl::io::savePCDFileBinary(saveMapDirectory + "/SurfMap.pcd", *globalSurfCloud); } // save global point cloud map *globalMapCloud += *globalCornerCloud; *globalMapCloud += *globalSurfCloud; int ret = pcl::io::savePCDFileBinary(saveMapDirectory + "/GlobalMap.pcd", *globalMapCloud); res.success = ret == 0; downSizeFilterCorner.setLeafSize(mappingCornerLeafSize, mappingCornerLeafSize, mappingCornerLeafSize); downSizeFilterSurf.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize); cout << "****************************************************" << endl; cout << "Saving map to pcd files completed\n" << endl; return true; } bool saveEstimationResultService(lio_segmot::save_estimation_resultRequest& req, lio_segmot::save_estimation_resultResponse& res) { res.robotTrajectory = globalPath; res.objectTrajectories = std::vector(numberOfRegisteredObjects, nav_msgs::Path()); res.objectVelocities = std::vector(numberOfRegisteredObjects, nav_msgs::Path()); res.trackingObjectTrajectories = std::vector(numberOfTrackingObjects, nav_msgs::Path()); res.trackingObjectVelocities = std::vector(numberOfTrackingObjects, nav_msgs::Path()); res.trackingObjectStates = std::vector(numberOfTrackingObjects, lio_segmot::ObjectStateArray()); res.objectFlags = std::vector(numberOfRegisteredObjects, lio_segmot::flags()); res.trackingObjectFlags = std::vector(numberOfTrackingObjects, lio_segmot::flags()); for (int t = 0; t < objects.size(); ++t) { for (const auto& pairedObject : objects[t]) { const auto& object = pairedObject.second; if (object.lostCount > 0) continue; geometry_msgs::PoseStamped ps; ps.header.frame_id = odometryFrame; ps.header.stamp = object.timestamp; ps.pose = gtsamPose2ROSPose(isamCurrentEstimate.at(object.poseNodeIndex)); res.objectTrajectories[object.objectIndex].poses.push_back(ps); res.trackingObjectTrajectories[object.objectIndexForTracking].poses.push_back(ps); ps.pose = gtsamPose2ROSPose(isamCurrentEstimate.at(object.velocityNodeIndex)); res.objectVelocities[object.objectIndex].poses.push_back(ps); res.trackingObjectVelocities[object.objectIndexForTracking].poses.push_back(ps); res.objectFlags[object.objectIndex].flags.push_back(object.isTightlyCoupled ? 1 : 0); res.trackingObjectFlags[object.objectIndexForTracking].flags.push_back(object.isTightlyCoupled ? 1 : 0); lio_segmot::ObjectState state; state.header.frame_id = odometryFrame; state.header.stamp = object.timestamp; state.pose = gtsamPose2ROSPose(isamCurrentEstimate.at(object.poseNodeIndex)); state.velocity = gtsamPose2ROSPose(isamCurrentEstimate.at(object.velocityNodeIndex)); state.detection = object.detection; res.trackingObjectStates[object.objectIndexForTracking].objects.push_back(state); } } return true; } void visualizeGlobalMapThread() { ros::Rate rate(0.2); while (ros::ok()) { rate.sleep(); publishGlobalMap(); } if (savePCD == false) return; lio_segmot::save_mapRequest req; lio_segmot::save_mapResponse res; if (!saveMapService(req, res)) { cout << "Fail to save map" << endl; } } void publishGlobalMap() { if (pubLaserCloudSurround.getNumSubscribers() == 0) return; if (cloudKeyPoses3D->points.empty() == true) return; pcl::KdTreeFLANN::Ptr kdtreeGlobalMap(new pcl::KdTreeFLANN()); ; pcl::PointCloud::Ptr globalMapKeyPoses(new pcl::PointCloud()); pcl::PointCloud::Ptr globalMapKeyPosesDS(new pcl::PointCloud()); pcl::PointCloud::Ptr globalMapKeyFrames(new pcl::PointCloud()); pcl::PointCloud::Ptr globalMapKeyFramesDS(new pcl::PointCloud()); // kd-tree to find near key frames to visualize std::vector pointSearchIndGlobalMap; std::vector pointSearchSqDisGlobalMap; // search near key frames to visualize mtx.lock(); kdtreeGlobalMap->setInputCloud(cloudKeyPoses3D); kdtreeGlobalMap->radiusSearch(cloudKeyPoses3D->back(), globalMapVisualizationSearchRadius, pointSearchIndGlobalMap, pointSearchSqDisGlobalMap, 0); mtx.unlock(); for (int i = 0; i < (int)pointSearchIndGlobalMap.size(); ++i) globalMapKeyPoses->push_back(cloudKeyPoses3D->points[pointSearchIndGlobalMap[i]]); // downsample near selected key frames pcl::VoxelGrid downSizeFilterGlobalMapKeyPoses; // for global map visualization downSizeFilterGlobalMapKeyPoses.setLeafSize(globalMapVisualizationPoseDensity, globalMapVisualizationPoseDensity, globalMapVisualizationPoseDensity); // for global map visualization downSizeFilterGlobalMapKeyPoses.setInputCloud(globalMapKeyPoses); downSizeFilterGlobalMapKeyPoses.filter(*globalMapKeyPosesDS); for (auto& pt : globalMapKeyPosesDS->points) { kdtreeGlobalMap->nearestKSearch(pt, 1, pointSearchIndGlobalMap, pointSearchSqDisGlobalMap); pt.intensity = cloudKeyPoses3D->points[pointSearchIndGlobalMap[0]].intensity; } // extract visualized and downsampled key frames for (int i = 0; i < (int)globalMapKeyPosesDS->size(); ++i) { if (pointDistance(globalMapKeyPosesDS->points[i], cloudKeyPoses3D->back()) > globalMapVisualizationSearchRadius) continue; int thisKeyInd = (int)globalMapKeyPosesDS->points[i].intensity; *globalMapKeyFrames += *transformPointCloud(cornerCloudKeyFrames[thisKeyInd], &cloudKeyPoses6D->points[thisKeyInd]); *globalMapKeyFrames += *transformPointCloud(surfCloudKeyFrames[thisKeyInd], &cloudKeyPoses6D->points[thisKeyInd]); } // downsample visualized points pcl::VoxelGrid downSizeFilterGlobalMapKeyFrames; // for global map visualization downSizeFilterGlobalMapKeyFrames.setLeafSize(globalMapVisualizationLeafSize, globalMapVisualizationLeafSize, globalMapVisualizationLeafSize); // for global map visualization downSizeFilterGlobalMapKeyFrames.setInputCloud(globalMapKeyFrames); downSizeFilterGlobalMapKeyFrames.filter(*globalMapKeyFramesDS); publishCloud(&pubLaserCloudSurround, globalMapKeyFramesDS, timeLaserInfoStamp, odometryFrame); } void loopClosureThread() { if (loopClosureEnableFlag == false) return; ros::Rate rate(loopClosureFrequency); while (ros::ok()) { rate.sleep(); performLoopClosure(); visualizeLoopClosure(); } } void loopInfoHandler(const std_msgs::Float64MultiArray::ConstPtr& loopMsg) { std::lock_guard lock(mtxLoopInfo); if (loopMsg->data.size() != 2) return; loopInfoVec.push_back(*loopMsg); while (loopInfoVec.size() > 5) loopInfoVec.pop_front(); } void performLoopClosure() { if (cloudKeyPoses3D->points.empty() == true) return; mtx.lock(); *copy_cloudKeyPoses3D = *cloudKeyPoses3D; *copy_cloudKeyPoses6D = *cloudKeyPoses6D; mtx.unlock(); // find keys int loopKeyCur; int loopKeyPre; if (detectLoopClosureExternal(&loopKeyCur, &loopKeyPre) == false) if (detectLoopClosureDistance(&loopKeyCur, &loopKeyPre) == false) return; // extract cloud pcl::PointCloud::Ptr cureKeyframeCloud(new pcl::PointCloud()); pcl::PointCloud::Ptr prevKeyframeCloud(new pcl::PointCloud()); { loopFindNearKeyframes(cureKeyframeCloud, loopKeyCur, 0); loopFindNearKeyframes(prevKeyframeCloud, loopKeyPre, historyKeyframeSearchNum); if (cureKeyframeCloud->size() < 300 || prevKeyframeCloud->size() < 1000) return; if (pubHistoryKeyFrames.getNumSubscribers() != 0) publishCloud(&pubHistoryKeyFrames, prevKeyframeCloud, timeLaserInfoStamp, odometryFrame); } // ICP Settings static pcl::IterativeClosestPoint icp; icp.setMaxCorrespondenceDistance(historyKeyframeSearchRadius * 2); icp.setMaximumIterations(100); icp.setTransformationEpsilon(1e-6); icp.setEuclideanFitnessEpsilon(1e-6); icp.setRANSACIterations(0); // Align clouds icp.setInputSource(cureKeyframeCloud); icp.setInputTarget(prevKeyframeCloud); pcl::PointCloud::Ptr unused_result(new pcl::PointCloud()); icp.align(*unused_result); if (icp.hasConverged() == false || icp.getFitnessScore() > historyKeyframeFitnessScore) return; // publish corrected cloud if (pubIcpKeyFrames.getNumSubscribers() != 0) { pcl::PointCloud::Ptr closed_cloud(new pcl::PointCloud()); pcl::transformPointCloud(*cureKeyframeCloud, *closed_cloud, icp.getFinalTransformation()); publishCloud(&pubIcpKeyFrames, closed_cloud, timeLaserInfoStamp, odometryFrame); } // Get pose transformation float x, y, z, roll, pitch, yaw; Eigen::Affine3f correctionLidarFrame; correctionLidarFrame = icp.getFinalTransformation(); // transform from world origin to wrong pose Eigen::Affine3f tWrong = pclPointToAffine3f(copy_cloudKeyPoses6D->points[loopKeyCur]); // transform from world origin to corrected pose Eigen::Affine3f tCorrect = correctionLidarFrame * tWrong; // pre-multiplying -> successive rotation about a fixed frame pcl::getTranslationAndEulerAngles(tCorrect, x, y, z, roll, pitch, yaw); gtsam::Pose3 poseFrom = Pose3(Rot3::RzRyRx(roll, pitch, yaw), Point3(x, y, z)); gtsam::Pose3 poseTo = pclPointTogtsamPose3(copy_cloudKeyPoses6D->points[loopKeyPre]); gtsam::Vector Vector6(6); float noiseScore = icp.getFitnessScore(); Vector6 << noiseScore, noiseScore, noiseScore, noiseScore, noiseScore, noiseScore; noiseModel::Diagonal::shared_ptr constraintNoise = noiseModel::Diagonal::Variances(Vector6); // Add pose constraint mtx.lock(); loopIndexQueue.push_back(make_pair(loopKeyCur, loopKeyPre)); loopPoseQueue.push_back(poseFrom.between(poseTo)); loopNoiseQueue.push_back(constraintNoise); mtx.unlock(); // add loop constriant loopIndexContainer[loopKeyCur] = loopKeyPre; } bool detectLoopClosureDistance(int* latestID, int* closestID) { int loopKeyCur = copy_cloudKeyPoses3D->size() - 1; int loopKeyPre = -1; // check loop constraint added before auto it = loopIndexContainer.find(loopKeyCur); if (it != loopIndexContainer.end()) return false; // find the closest history key frame std::vector pointSearchIndLoop; std::vector pointSearchSqDisLoop; kdtreeHistoryKeyPoses->setInputCloud(copy_cloudKeyPoses3D); kdtreeHistoryKeyPoses->radiusSearch(copy_cloudKeyPoses3D->back(), historyKeyframeSearchRadius, pointSearchIndLoop, pointSearchSqDisLoop, 0); for (int i = 0; i < (int)pointSearchIndLoop.size(); ++i) { int id = pointSearchIndLoop[i]; if (abs(copy_cloudKeyPoses6D->points[id].time - timeLaserInfoCur) > historyKeyframeSearchTimeDiff) { loopKeyPre = id; break; } } if (loopKeyPre == -1 || loopKeyCur == loopKeyPre) return false; *latestID = loopKeyCur; *closestID = loopKeyPre; return true; } bool detectLoopClosureExternal(int* latestID, int* closestID) { // this function is not used yet, please ignore it int loopKeyCur = -1; int loopKeyPre = -1; std::lock_guard lock(mtxLoopInfo); if (loopInfoVec.empty()) return false; double loopTimeCur = loopInfoVec.front().data[0]; double loopTimePre = loopInfoVec.front().data[1]; loopInfoVec.pop_front(); if (abs(loopTimeCur - loopTimePre) < historyKeyframeSearchTimeDiff) return false; int cloudSize = copy_cloudKeyPoses6D->size(); if (cloudSize < 2) return false; // latest key loopKeyCur = cloudSize - 1; for (int i = cloudSize - 1; i >= 0; --i) { if (copy_cloudKeyPoses6D->points[i].time >= loopTimeCur) loopKeyCur = round(copy_cloudKeyPoses6D->points[i].intensity); else break; } // previous key loopKeyPre = 0; for (int i = 0; i < cloudSize; ++i) { if (copy_cloudKeyPoses6D->points[i].time <= loopTimePre) loopKeyPre = round(copy_cloudKeyPoses6D->points[i].intensity); else break; } if (loopKeyCur == loopKeyPre) return false; auto it = loopIndexContainer.find(loopKeyCur); if (it != loopIndexContainer.end()) return false; *latestID = loopKeyCur; *closestID = loopKeyPre; return true; } void loopFindNearKeyframes(pcl::PointCloud::Ptr& nearKeyframes, const int& key, const int& searchNum) { // extract near keyframes nearKeyframes->clear(); int cloudSize = copy_cloudKeyPoses6D->size(); for (int i = -searchNum; i <= searchNum; ++i) { int keyNear = key + i; if (keyNear < 0 || keyNear >= cloudSize) continue; *nearKeyframes += *transformPointCloud(cornerCloudKeyFrames[keyNear], ©_cloudKeyPoses6D->points[keyNear]); *nearKeyframes += *transformPointCloud(surfCloudKeyFrames[keyNear], ©_cloudKeyPoses6D->points[keyNear]); } if (nearKeyframes->empty()) return; // downsample near keyframes pcl::PointCloud::Ptr cloud_temp(new pcl::PointCloud()); downSizeFilterICP.setInputCloud(nearKeyframes); downSizeFilterICP.filter(*cloud_temp); *nearKeyframes = *cloud_temp; } void visualizeLoopClosure() { if (loopIndexContainer.empty()) return; visualization_msgs::MarkerArray markerArray; // loop nodes visualization_msgs::Marker markerNode; markerNode.header.frame_id = odometryFrame; markerNode.header.stamp = timeLaserInfoStamp; markerNode.action = visualization_msgs::Marker::ADD; markerNode.type = visualization_msgs::Marker::SPHERE_LIST; markerNode.ns = "loop_nodes"; markerNode.id = 0; markerNode.pose.orientation.w = 1; markerNode.scale.x = 0.3; markerNode.scale.y = 0.3; markerNode.scale.z = 0.3; markerNode.color.r = 0; markerNode.color.g = 0.8; markerNode.color.b = 1; markerNode.color.a = 1; // loop edges visualization_msgs::Marker markerEdge; markerEdge.header.frame_id = odometryFrame; markerEdge.header.stamp = timeLaserInfoStamp; markerEdge.action = visualization_msgs::Marker::ADD; markerEdge.type = visualization_msgs::Marker::LINE_LIST; markerEdge.ns = "loop_edges"; markerEdge.id = 1; markerEdge.pose.orientation.w = 1; markerEdge.scale.x = 0.1; markerEdge.color.r = 0.9; markerEdge.color.g = 0.9; markerEdge.color.b = 0; markerEdge.color.a = 1; for (auto it = loopIndexContainer.begin(); it != loopIndexContainer.end(); ++it) { int key_cur = it->first; int key_pre = it->second; geometry_msgs::Point p; p.x = copy_cloudKeyPoses6D->points[key_cur].x; p.y = copy_cloudKeyPoses6D->points[key_cur].y; p.z = copy_cloudKeyPoses6D->points[key_cur].z; markerNode.points.push_back(p); markerEdge.points.push_back(p); p.x = copy_cloudKeyPoses6D->points[key_pre].x; p.y = copy_cloudKeyPoses6D->points[key_pre].y; p.z = copy_cloudKeyPoses6D->points[key_pre].z; markerNode.points.push_back(p); markerEdge.points.push_back(p); } markerArray.markers.push_back(markerNode); markerArray.markers.push_back(markerEdge); pubLoopConstraintEdge.publish(markerArray); } void updateInitialGuess() { // save current transformation before any processing incrementalOdometryAffineFront = trans2Affine3f(transformTobeMapped); static Eigen::Affine3f lastImuTransformation; // initialization if (cloudKeyPoses3D->points.empty()) { transformTobeMapped[0] = cloudInfo.imuRollInit; transformTobeMapped[1] = cloudInfo.imuPitchInit; transformTobeMapped[2] = cloudInfo.imuYawInit; if (!useImuHeadingInitialization) transformTobeMapped[2] = 0; lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return; return; } // use imu pre-integration estimation for pose guess static bool lastImuPreTransAvailable = false; static Eigen::Affine3f lastImuPreTransformation; if (cloudInfo.odomAvailable == true) { Eigen::Affine3f transBack = pcl::getTransformation(cloudInfo.initialGuessX, cloudInfo.initialGuessY, cloudInfo.initialGuessZ, cloudInfo.initialGuessRoll, cloudInfo.initialGuessPitch, cloudInfo.initialGuessYaw); if (lastImuPreTransAvailable == false) { lastImuPreTransformation = transBack; lastImuPreTransAvailable = true; } else { Eigen::Affine3f transIncre = lastImuPreTransformation.inverse() * transBack; Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped); Eigen::Affine3f transFinal = transTobe * transIncre; pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5], transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]); lastImuPreTransformation = transBack; lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return; return; } } // use imu incremental estimation for pose guess (only rotation) if (cloudInfo.imuAvailable == true) { Eigen::Affine3f transBack = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); Eigen::Affine3f transIncre = lastImuTransformation.inverse() * transBack; Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped); Eigen::Affine3f transFinal = transTobe * transIncre; pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5], transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]); lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return; return; } } void extractForLoopClosure() { pcl::PointCloud::Ptr cloudToExtract(new pcl::PointCloud()); int numPoses = cloudKeyPoses3D->size(); for (int i = numPoses - 1; i >= 0; --i) { if ((int)cloudToExtract->size() <= surroundingKeyframeSize) cloudToExtract->push_back(cloudKeyPoses3D->points[i]); else break; } extractCloud(cloudToExtract); } void extractNearby() { pcl::PointCloud::Ptr surroundingKeyPoses(new pcl::PointCloud()); pcl::PointCloud::Ptr surroundingKeyPosesDS(new pcl::PointCloud()); std::vector pointSearchInd; std::vector pointSearchSqDis; // extract all the nearby key poses and downsample them kdtreeSurroundingKeyPoses->setInputCloud(cloudKeyPoses3D); // create kd-tree kdtreeSurroundingKeyPoses->radiusSearch(cloudKeyPoses3D->back(), (double)surroundingKeyframeSearchRadius, pointSearchInd, pointSearchSqDis); for (int i = 0; i < (int)pointSearchInd.size(); ++i) { int id = pointSearchInd[i]; surroundingKeyPoses->push_back(cloudKeyPoses3D->points[id]); } downSizeFilterSurroundingKeyPoses.setInputCloud(surroundingKeyPoses); downSizeFilterSurroundingKeyPoses.filter(*surroundingKeyPosesDS); for (auto& pt : surroundingKeyPosesDS->points) { kdtreeSurroundingKeyPoses->nearestKSearch(pt, 1, pointSearchInd, pointSearchSqDis); pt.intensity = cloudKeyPoses3D->points[pointSearchInd[0]].intensity; } // also extract some latest key frames in case the robot rotates in one position int numPoses = cloudKeyPoses3D->size(); for (int i = numPoses - 1; i >= 0; --i) { if (timeLaserInfoCur - cloudKeyPoses6D->points[i].time < 10.0) surroundingKeyPosesDS->push_back(cloudKeyPoses3D->points[i]); else break; } extractCloud(surroundingKeyPosesDS); } void extractCloud(pcl::PointCloud::Ptr cloudToExtract) { // fuse the map laserCloudCornerFromMap->clear(); laserCloudSurfFromMap->clear(); for (int i = 0; i < (int)cloudToExtract->size(); ++i) { if (pointDistance(cloudToExtract->points[i], cloudKeyPoses3D->back()) > surroundingKeyframeSearchRadius) continue; int thisKeyInd = (int)cloudToExtract->points[i].intensity; if (laserCloudMapContainer.find(thisKeyInd) != laserCloudMapContainer.end()) { // transformed cloud available *laserCloudCornerFromMap += laserCloudMapContainer[thisKeyInd].first; *laserCloudSurfFromMap += laserCloudMapContainer[thisKeyInd].second; } else { // transformed cloud not available pcl::PointCloud laserCloudCornerTemp = *transformPointCloud(cornerCloudKeyFrames[thisKeyInd], &cloudKeyPoses6D->points[thisKeyInd]); pcl::PointCloud laserCloudSurfTemp = *transformPointCloud(surfCloudKeyFrames[thisKeyInd], &cloudKeyPoses6D->points[thisKeyInd]); *laserCloudCornerFromMap += laserCloudCornerTemp; *laserCloudSurfFromMap += laserCloudSurfTemp; laserCloudMapContainer[thisKeyInd] = make_pair(laserCloudCornerTemp, laserCloudSurfTemp); } } // Downsample the surrounding corner key frames (or map) downSizeFilterCorner.setInputCloud(laserCloudCornerFromMap); downSizeFilterCorner.filter(*laserCloudCornerFromMapDS); laserCloudCornerFromMapDSNum = laserCloudCornerFromMapDS->size(); // Downsample the surrounding surf key frames (or map) downSizeFilterSurf.setInputCloud(laserCloudSurfFromMap); downSizeFilterSurf.filter(*laserCloudSurfFromMapDS); laserCloudSurfFromMapDSNum = laserCloudSurfFromMapDS->size(); // clear map cache if too large if (laserCloudMapContainer.size() > 1000) laserCloudMapContainer.clear(); } void extractSurroundingKeyFrames() { if (cloudKeyPoses3D->points.empty() == true) return; // if (loopClosureEnableFlag == true) // { // extractForLoopClosure(); // } else { // extractNearby(); // } extractNearby(); } void downsampleCurrentScan() { // Downsample cloud from current scan laserCloudCornerLastDS->clear(); downSizeFilterCorner.setInputCloud(laserCloudCornerLast); downSizeFilterCorner.filter(*laserCloudCornerLastDS); laserCloudCornerLastDSNum = laserCloudCornerLastDS->size(); laserCloudSurfLastDS->clear(); downSizeFilterSurf.setInputCloud(laserCloudSurfLast); downSizeFilterSurf.filter(*laserCloudSurfLastDS); laserCloudSurfLastDSNum = laserCloudSurfLastDS->size(); } void updatePointAssociateToMap() { transPointAssociateToMap = trans2Affine3f(transformTobeMapped); } void cornerOptimization() { updatePointAssociateToMap(); #pragma omp parallel for num_threads(numberOfCores) for (int i = 0; i < laserCloudCornerLastDSNum; i++) { PointType pointOri, pointSel, coeff; std::vector pointSearchInd; std::vector pointSearchSqDis; pointOri = laserCloudCornerLastDS->points[i]; pointAssociateToMap(&pointOri, &pointSel); kdtreeCornerFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis); cv::Mat matA1(3, 3, CV_32F, cv::Scalar::all(0)); cv::Mat matD1(1, 3, CV_32F, cv::Scalar::all(0)); cv::Mat matV1(3, 3, CV_32F, cv::Scalar::all(0)); if (pointSearchSqDis[4] < 1.0) { float cx = 0, cy = 0, cz = 0; for (int j = 0; j < 5; j++) { cx += laserCloudCornerFromMapDS->points[pointSearchInd[j]].x; cy += laserCloudCornerFromMapDS->points[pointSearchInd[j]].y; cz += laserCloudCornerFromMapDS->points[pointSearchInd[j]].z; } cx /= 5; cy /= 5; cz /= 5; float a11 = 0, a12 = 0, a13 = 0, a22 = 0, a23 = 0, a33 = 0; for (int j = 0; j < 5; j++) { float ax = laserCloudCornerFromMapDS->points[pointSearchInd[j]].x - cx; float ay = laserCloudCornerFromMapDS->points[pointSearchInd[j]].y - cy; float az = laserCloudCornerFromMapDS->points[pointSearchInd[j]].z - cz; a11 += ax * ax; a12 += ax * ay; a13 += ax * az; a22 += ay * ay; a23 += ay * az; a33 += az * az; } a11 /= 5; a12 /= 5; a13 /= 5; a22 /= 5; a23 /= 5; a33 /= 5; matA1.at(0, 0) = a11; matA1.at(0, 1) = a12; matA1.at(0, 2) = a13; matA1.at(1, 0) = a12; matA1.at(1, 1) = a22; matA1.at(1, 2) = a23; matA1.at(2, 0) = a13; matA1.at(2, 1) = a23; matA1.at(2, 2) = a33; cv::eigen(matA1, matD1, matV1); if (matD1.at(0, 0) > 3 * matD1.at(0, 1)) { float x0 = pointSel.x; float y0 = pointSel.y; float z0 = pointSel.z; float x1 = cx + 0.1 * matV1.at(0, 0); float y1 = cy + 0.1 * matV1.at(0, 1); float z1 = cz + 0.1 * matV1.at(0, 2); float x2 = cx - 0.1 * matV1.at(0, 0); float y2 = cy - 0.1 * matV1.at(0, 1); float z2 = cz - 0.1 * matV1.at(0, 2); // clang-format off float a012 = sqrt(((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1)) * ((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1)) + ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1)) * ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1)) + ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1)) * ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1))); float l12 = sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2) + (z1 - z2) * (z1 - z2)); float la = ((y1 - y2) * ((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1)) + (z1 - z2) * ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1))) / a012 / l12; float lb = -((x1 - x2) * ((x0 - x1) * (y0 - y2) - (x0 - x2) * (y0 - y1)) - (z1 - z2) * ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1))) / a012 / l12; float lc = -((x1 - x2) * ((x0 - x1) * (z0 - z2) - (x0 - x2) * (z0 - z1)) + (y1 - y2) * ((y0 - y1) * (z0 - z2) - (y0 - y2) * (z0 - z1))) / a012 / l12; // clang-format on float ld2 = a012 / l12; float s = 1 - 0.9 * fabs(ld2); coeff.x = s * la; coeff.y = s * lb; coeff.z = s * lc; coeff.intensity = s * ld2; if (s > 0.1) { laserCloudOriCornerVec[i] = pointOri; coeffSelCornerVec[i] = coeff; laserCloudOriCornerFlag[i] = true; } } } } } void surfOptimization() { updatePointAssociateToMap(); #pragma omp parallel for num_threads(numberOfCores) for (int i = 0; i < laserCloudSurfLastDSNum; i++) { PointType pointOri, pointSel, coeff; std::vector pointSearchInd; std::vector pointSearchSqDis; pointOri = laserCloudSurfLastDS->points[i]; pointAssociateToMap(&pointOri, &pointSel); kdtreeSurfFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis); Eigen::Matrix matA0; Eigen::Matrix matB0; Eigen::Vector3f matX0; matA0.setZero(); matB0.fill(-1); matX0.setZero(); if (pointSearchSqDis[4] < 1.0) { for (int j = 0; j < 5; j++) { matA0(j, 0) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].x; matA0(j, 1) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].y; matA0(j, 2) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].z; } matX0 = matA0.colPivHouseholderQr().solve(matB0); float pa = matX0(0, 0); float pb = matX0(1, 0); float pc = matX0(2, 0); float pd = 1; float ps = sqrt(pa * pa + pb * pb + pc * pc); pa /= ps; pb /= ps; pc /= ps; pd /= ps; bool planeValid = true; for (int j = 0; j < 5; j++) { if (fabs(pa * laserCloudSurfFromMapDS->points[pointSearchInd[j]].x + pb * laserCloudSurfFromMapDS->points[pointSearchInd[j]].y + pc * laserCloudSurfFromMapDS->points[pointSearchInd[j]].z + pd) > 0.2) { planeValid = false; break; } } if (planeValid) { float pd2 = pa * pointSel.x + pb * pointSel.y + pc * pointSel.z + pd; float s = 1 - 0.9 * fabs(pd2) / sqrt(sqrt(pointSel.x * pointSel.x + pointSel.y * pointSel.y + pointSel.z * pointSel.z)); coeff.x = s * pa; coeff.y = s * pb; coeff.z = s * pc; coeff.intensity = s * pd2; if (s > 0.1) { laserCloudOriSurfVec[i] = pointOri; coeffSelSurfVec[i] = coeff; laserCloudOriSurfFlag[i] = true; } } } } } void combineOptimizationCoeffs() { // combine corner coeffs for (int i = 0; i < laserCloudCornerLastDSNum; ++i) { if (laserCloudOriCornerFlag[i] == true) { laserCloudOri->push_back(laserCloudOriCornerVec[i]); coeffSel->push_back(coeffSelCornerVec[i]); } } // combine surf coeffs for (int i = 0; i < laserCloudSurfLastDSNum; ++i) { if (laserCloudOriSurfFlag[i] == true) { laserCloudOri->push_back(laserCloudOriSurfVec[i]); coeffSel->push_back(coeffSelSurfVec[i]); } } // reset flag for next iteration std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false); std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false); } bool LMOptimization(int iterCount) { // This optimization is from the original loam_velodyne by Ji Zhang, need to cope with coordinate transformation // lidar <- camera --- camera <- lidar // x = z --- x = y // y = x --- y = z // z = y --- z = x // roll = yaw --- roll = pitch // pitch = roll --- pitch = yaw // yaw = pitch --- yaw = roll // lidar -> camera float srx = sin(transformTobeMapped[1]); float crx = cos(transformTobeMapped[1]); float sry = sin(transformTobeMapped[2]); float cry = cos(transformTobeMapped[2]); float srz = sin(transformTobeMapped[0]); float crz = cos(transformTobeMapped[0]); int laserCloudSelNum = laserCloudOri->size(); if (laserCloudSelNum < 50) { return false; } cv::Mat matA(laserCloudSelNum, 6, CV_32F, cv::Scalar::all(0)); cv::Mat matAt(6, laserCloudSelNum, CV_32F, cv::Scalar::all(0)); cv::Mat matAtA(6, 6, CV_32F, cv::Scalar::all(0)); cv::Mat matB(laserCloudSelNum, 1, CV_32F, cv::Scalar::all(0)); cv::Mat matAtB(6, 1, CV_32F, cv::Scalar::all(0)); cv::Mat matX(6, 1, CV_32F, cv::Scalar::all(0)); PointType pointOri, coeff; for (int i = 0; i < laserCloudSelNum; i++) { // lidar -> camera pointOri.x = laserCloudOri->points[i].y; pointOri.y = laserCloudOri->points[i].z; pointOri.z = laserCloudOri->points[i].x; // lidar -> camera coeff.x = coeffSel->points[i].y; coeff.y = coeffSel->points[i].z; coeff.z = coeffSel->points[i].x; coeff.intensity = coeffSel->points[i].intensity; // in camera float arx = (crx * sry * srz * pointOri.x + crx * crz * sry * pointOri.y - srx * sry * pointOri.z) * coeff.x + (-srx * srz * pointOri.x - crz * srx * pointOri.y - crx * pointOri.z) * coeff.y + (crx * cry * srz * pointOri.x + crx * cry * crz * pointOri.y - cry * srx * pointOri.z) * coeff.z; float ary = ((cry * srx * srz - crz * sry) * pointOri.x + (sry * srz + cry * crz * srx) * pointOri.y + crx * cry * pointOri.z) * coeff.x + ((-cry * crz - srx * sry * srz) * pointOri.x + (cry * srz - crz * srx * sry) * pointOri.y - crx * sry * pointOri.z) * coeff.z; float arz = ((crz * srx * sry - cry * srz) * pointOri.x + (-cry * crz - srx * sry * srz) * pointOri.y) * coeff.x + (crx * crz * pointOri.x - crx * srz * pointOri.y) * coeff.y + ((sry * srz + cry * crz * srx) * pointOri.x + (crz * sry - cry * srx * srz) * pointOri.y) * coeff.z; // lidar -> camera matA.at(i, 0) = arz; matA.at(i, 1) = arx; matA.at(i, 2) = ary; matA.at(i, 3) = coeff.z; matA.at(i, 4) = coeff.x; matA.at(i, 5) = coeff.y; matB.at(i, 0) = -coeff.intensity; } cv::transpose(matA, matAt); matAtA = matAt * matA; matAtB = matAt * matB; cv::solve(matAtA, matAtB, matX, cv::DECOMP_QR); if (iterCount == 0) { cv::Mat matE(1, 6, CV_32F, cv::Scalar::all(0)); cv::Mat matV(6, 6, CV_32F, cv::Scalar::all(0)); cv::Mat matV2(6, 6, CV_32F, cv::Scalar::all(0)); cv::eigen(matAtA, matE, matV); matV.copyTo(matV2); isDegenerate = false; float eignThre[6] = {100, 100, 100, 100, 100, 100}; for (int i = 5; i >= 0; i--) { if (matE.at(0, i) < eignThre[i]) { for (int j = 0; j < 6; j++) { matV2.at(i, j) = 0; } isDegenerate = true; } else { break; } } matP = matV.inv() * matV2; } if (isDegenerate) { cv::Mat matX2(6, 1, CV_32F, cv::Scalar::all(0)); matX.copyTo(matX2); matX = matP * matX2; } transformTobeMapped[0] += matX.at(0, 0); transformTobeMapped[1] += matX.at(1, 0); transformTobeMapped[2] += matX.at(2, 0); transformTobeMapped[3] += matX.at(3, 0); transformTobeMapped[4] += matX.at(4, 0); transformTobeMapped[5] += matX.at(5, 0); float deltaR = sqrt( pow(pcl::rad2deg(matX.at(0, 0)), 2) + pow(pcl::rad2deg(matX.at(1, 0)), 2) + pow(pcl::rad2deg(matX.at(2, 0)), 2)); float deltaT = sqrt( pow(matX.at(3, 0) * 100, 2) + pow(matX.at(4, 0) * 100, 2) + pow(matX.at(5, 0) * 100, 2)); if (deltaR < 0.05 && deltaT < 0.05) { return true; // converged } return false; // keep optimizing } void scan2MapOptimization() { if (cloudKeyPoses3D->points.empty()) return; if (laserCloudCornerLastDSNum > edgeFeatureMinValidNum && laserCloudSurfLastDSNum > surfFeatureMinValidNum) { kdtreeCornerFromMap->setInputCloud(laserCloudCornerFromMapDS); kdtreeSurfFromMap->setInputCloud(laserCloudSurfFromMapDS); for (int iterCount = 0; iterCount < 30; iterCount++) { laserCloudOri->clear(); coeffSel->clear(); cornerOptimization(); surfOptimization(); combineOptimizationCoeffs(); if (LMOptimization(iterCount) == true) break; } transformUpdate(); } else { ROS_WARN("Not enough features! Only %d edge and %d planar features available.", laserCloudCornerLastDSNum, laserCloudSurfLastDSNum); } } void transformUpdate() { if (cloudInfo.imuAvailable == true) { if (std::abs(cloudInfo.imuPitchInit) < 1.4) { double imuWeight = imuRPYWeight; tf::Quaternion imuQuaternion; tf::Quaternion transformQuaternion; double rollMid, pitchMid, yawMid; // slerp roll transformQuaternion.setRPY(transformTobeMapped[0], 0, 0); imuQuaternion.setRPY(cloudInfo.imuRollInit, 0, 0); tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid); transformTobeMapped[0] = rollMid; // slerp pitch transformQuaternion.setRPY(0, transformTobeMapped[1], 0); imuQuaternion.setRPY(0, cloudInfo.imuPitchInit, 0); tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid); transformTobeMapped[1] = pitchMid; } } transformTobeMapped[0] = constraintTransformation(transformTobeMapped[0], rotation_tollerance); transformTobeMapped[1] = constraintTransformation(transformTobeMapped[1], rotation_tollerance); transformTobeMapped[5] = constraintTransformation(transformTobeMapped[5], z_tollerance); incrementalOdometryAffineBack = trans2Affine3f(transformTobeMapped); } float constraintTransformation(float value, float limit) { if (value < -limit) value = -limit; if (value > limit) value = limit; return value; } bool saveFrame() { if (cloudKeyPoses3D->points.empty()) return true; Eigen::Affine3f transStart = pclPointToAffine3f(cloudKeyPoses6D->back()); Eigen::Affine3f transFinal = pcl::getTransformation(transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5], transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]); Eigen::Affine3f transBetween = transStart.inverse() * transFinal; float x, y, z, roll, pitch, yaw; pcl::getTranslationAndEulerAngles(transBetween, x, y, z, roll, pitch, yaw); if (abs(roll) < surroundingkeyframeAddingAngleThreshold && abs(pitch) < surroundingkeyframeAddingAngleThreshold && abs(yaw) < surroundingkeyframeAddingAngleThreshold && sqrt(x * x + y * y + z * z) < surroundingkeyframeAddingDistThreshold) return false; return true; } void addOdomFactor() { if (cloudKeyPoses3D->points.empty()) { auto currentKeyIndex = numberOfNodes++; // 0 keyPoseIndices.push_back(currentKeyIndex); noiseModel::Diagonal::shared_ptr priorNoise = noiseModel::Diagonal::Variances(priorOdometryDiagonalVarianceEigenVector); // rad*rad, meter*meter gtSAMgraph.add(PriorFactor(0, trans2gtsamPose(transformTobeMapped), priorNoise)); initialEstimate.insert(currentKeyIndex, trans2gtsamPose(transformTobeMapped)); initialEstimateForAnalysis.insert(currentKeyIndex, trans2gtsamPose(transformTobeMapped)); } else { auto previousKeyIndex = keyPoseIndices.back(); auto currentKeyIndex = numberOfNodes++; keyPoseIndices.push_back(currentKeyIndex); noiseModel::Diagonal::shared_ptr odometryNoise = noiseModel::Diagonal::Variances(odometryDiagonalVarianceEigenVector); gtsam::Pose3 poseFrom = pclPointTogtsamPose3(cloudKeyPoses6D->points.back()); gtsam::Pose3 poseTo = trans2gtsamPose(transformTobeMapped); gtSAMgraph.add(BetweenFactor(previousKeyIndex, currentKeyIndex, poseFrom.between(poseTo), odometryNoise)); initialEstimate.insert(currentKeyIndex, poseTo); initialEstimateForAnalysis.insert(currentKeyIndex, poseTo); } } void addGPSFactor() { if (gpsQueue.empty()) return; // wait for system initialized and settles down if (cloudKeyPoses3D->points.empty()) return; else { if (pointDistance(cloudKeyPoses3D->front(), cloudKeyPoses3D->back()) < 5.0) return; } // pose covariance small, no need to correct if (poseCovariance(3, 3) < poseCovThreshold && poseCovariance(4, 4) < poseCovThreshold) return; // last gps position static PointType lastGPSPoint; while (!gpsQueue.empty()) { if (gpsQueue.front().header.stamp.toSec() < timeLaserInfoCur - 0.2) { // message too old gpsQueue.pop_front(); } else if (gpsQueue.front().header.stamp.toSec() > timeLaserInfoCur + 0.2) { // message too new break; } else { nav_msgs::Odometry thisGPS = gpsQueue.front(); gpsQueue.pop_front(); // GPS too noisy, skip float noise_x = thisGPS.pose.covariance[0]; float noise_y = thisGPS.pose.covariance[7]; float noise_z = thisGPS.pose.covariance[14]; if (noise_x > gpsCovThreshold || noise_y > gpsCovThreshold) continue; float gps_x = thisGPS.pose.pose.position.x; float gps_y = thisGPS.pose.pose.position.y; float gps_z = thisGPS.pose.pose.position.z; if (!useGpsElevation) { gps_z = transformTobeMapped[5]; noise_z = 0.01; } // GPS not properly initialized (0,0,0) if (abs(gps_x) < 1e-6 && abs(gps_y) < 1e-6) continue; // Add GPS every a few meters PointType curGPSPoint; curGPSPoint.x = gps_x; curGPSPoint.y = gps_y; curGPSPoint.z = gps_z; if (pointDistance(curGPSPoint, lastGPSPoint) < 5.0) continue; else lastGPSPoint = curGPSPoint; gtsam::Vector Vector3(3); Vector3 << max(noise_x, 1.0f), max(noise_y, 1.0f), max(noise_z, 1.0f); noiseModel::Diagonal::shared_ptr gps_noise = noiseModel::Diagonal::Variances(Vector3); gtsam::GPSFactor gps_factor(cloudKeyPoses3D->size(), gtsam::Point3(gps_x, gps_y, gps_z), gps_noise); gtSAMgraph.add(gps_factor); aLoopIsClosed = true; break; } } } void addLoopFactor() { if (loopIndexQueue.empty()) return; for (int i = 0; i < (int)loopIndexQueue.size(); ++i) { int indexFrom = loopIndexQueue[i].first; int indexTo = loopIndexQueue[i].second; gtsam::Pose3 poseBetween = loopPoseQueue[i]; gtsam::noiseModel::Diagonal::shared_ptr noiseBetween = loopNoiseQueue[i]; gtSAMgraph.add(BetweenFactor(indexFrom, indexTo, poseBetween, noiseBetween)); } loopIndexQueue.clear(); loopPoseQueue.clear(); loopNoiseQueue.clear(); aLoopIsClosed = true; } void propagateObjectPoses() { std::map nextObjects; // Only triggered in the beginning. if (objects.empty()) { objects.push_back(nextObjects); return; } #ifdef MAP_OPTIMIZATION_DEBUG std::cout << "DELTA_TIME :: " << deltaTime << "\n\n"; #endif for (const auto& pairedObject : objects.back()) { // Drop the object if it is lost for a long time if (pairedObject.second.lostCount > trackingStepsForLostObject) continue; // Propagate the object using the constant velocity model as well as // register a new variable node for the object auto identity = Pose3::identity(); auto nextObject = pairedObject.second.clone(); auto deltaPoseVec = gtsam::traits::Local(identity, nextObject.velocity) * deltaTime; auto deltaPose = gtsam::traits::Retract(identity, deltaPoseVec); nextObject.pose = nextObject.pose * deltaPose; nextObject.isFirst = false; nextObject.timestamp = timeLaserInfoStamp; if (pairedObject.second.lostCount == 0) { nextObject.poseNodeIndex = numberOfNodes++; nextObject.velocityNodeIndex = numberOfNodes++; initialEstimate.insert(nextObject.poseNodeIndex, nextObject.pose); initialEstimate.insert(nextObject.velocityNodeIndex, nextObject.velocity); initialEstimateForAnalysis.insert(nextObject.poseNodeIndex, nextObject.pose); initialEstimateForAnalysis.insert(nextObject.velocityNodeIndex, nextObject.velocity); nextObject.previousVelocityNodeIndices.push_back(pairedObject.second.velocityNodeIndex); } else { nextObject.poseNodeIndex = -1; nextObject.velocityNodeIndex = -1; } nextObjects[pairedObject.first] = nextObject; } objects.push_back(nextObjects); #ifdef ENABLE_MINIMAL_MEMORY_USAGE if (objects.size() > 2 && objects.size() > numberOfPreLooseCouplingSteps + 1 && objects.size() > numberOfVelocityConsistencySteps + 1) { objects.erase(objects.begin()); } #endif } void addConstantVelocityFactor() { // Skip the process if there is no enough time stamps for adding constant // velocity factors if (objects.size() < 2) return; for (const auto& pairedObject : objects.back()) { // Skip if the object is lost at this stamp or is a new object if (pairedObject.second.isFirst) continue; if (pairedObject.second.lostCount > 0) continue; auto noiseModel = noiseModel::Diagonal::Variances(constantVelocityDiagonalVarianceEigenVector); auto earlyNoiseModel = noiseModel::Diagonal::Variances(earlyConstantVelocityDiagonalVarianceEigenVector); auto currentObject = pairedObject.second; auto objectIndex = currentObject.objectIndex; auto previousObject = objects[objects.size() - 2][objectIndex]; if (pairedObject.second.isTightlyCoupled) { gtSAMgraph.add(ConstantVelocityFactor(previousObject.velocityNodeIndex, currentObject.velocityNodeIndex, noiseModel)); } else { if (objectPaths.markers[pairedObject.second.objectIndex].points.size() <= numberOfEarlySteps) { gtSAMgraphForLooselyCoupledObjects.add(ConstantVelocityFactor(previousObject.velocityNodeIndex, currentObject.velocityNodeIndex, earlyNoiseModel)); } else { gtSAMgraphForLooselyCoupledObjects.add(ConstantVelocityFactor(previousObject.velocityNodeIndex, currentObject.velocityNodeIndex, noiseModel)); } } } } void addStablePoseFactor() { // Skip the process if there is no enough time stamps for adding constant // velocity factors if (objects.size() < 2) return; for (auto& pairedObject : objects.back()) { // Skip if the object is lost at this stamp or is a new object if (pairedObject.second.isFirst) continue; if (pairedObject.second.lostCount > 0) continue; auto noise = noiseModel::Diagonal::Variances(motionDiagonalVarianceEigenVector); auto& currentObject = pairedObject.second; auto objectIndex = currentObject.objectIndex; auto previousObject = objects[objects.size() - 2][objectIndex]; if (pairedObject.second.isTightlyCoupled) { gtSAMgraph.add(StablePoseFactor(previousObject.poseNodeIndex, #ifdef ENABLE_COMPACT_VERSION_OF_FACTOR_GRAPH currentObject.velocityNodeIndex, #else previousObject.velocityNodeIndex, #endif currentObject.poseNodeIndex, deltaTime, noise)); } else { gtSAMgraphForLooselyCoupledObjects.add(StablePoseFactor(previousObject.poseNodeIndex, #ifdef ENABLE_COMPACT_VERSION_OF_FACTOR_GRAPH currentObject.velocityNodeIndex, #else previousObject.velocityNodeIndex, #endif currentObject.poseNodeIndex, deltaTime, noise)); } currentObject.motionFactorPtr = boost::make_shared(previousObject.poseNodeIndex, #ifdef ENABLE_COMPACT_VERSION_OF_FACTOR_GRAPH currentObject.velocityNodeIndex, #else previousObject.velocityNodeIndex, #endif currentObject.poseNodeIndex, deltaTime, noise); initialEstimateForAnalysis.insert(previousObject.poseNodeIndex, previousObject.pose); initialEstimateForAnalysis.insert(previousObject.velocityNodeIndex, previousObject.velocity); currentObject.initialMotionError = currentObject.motionFactorPtr->error(initialEstimateForAnalysis); } } void addDetectionFactor(bool requiredMockDetection = false) { anyObjectIsTightlyCoupled = false; if (detections->boxes.size() == 0 && objects.size() == 0) { // Skip the process if there is no detection and no active moving objects return; } else if (detections->boxes.size() == 0 && objects.size() > 0) { // Set every moving object as lost if there is no detection for (auto& pairedObject : objects.back()) { auto& object = pairedObject.second; ++object.lostCount; object.confidence = 0.0; objectPaths.markers[object.objectIndex].scale.x = 0.3; objectPaths.markers[object.objectIndex].scale.y = 0.3; objectPaths.markers[object.objectIndex].scale.z = 0.3; } return; } // After the above condition, the following process will suppose that there // exist at least one detection in this step // Initialize an indicator matrix for data association of objects and // detections, where we secretly add a row to the matrix in case of no // active objects at the current stamp Eigen::MatrixXi indicator = Eigen::MatrixXi::Zero(objects.back().size() + 1, detections->boxes.size()); std::vector trackingObjectIndices(detections->boxes.size(), -1); // Create a vector of Detections. auto smallEgoMotion = Pose3(Rot3::RzRyRx(0, 0, 0), Point3(0, 0, 0)); if (requiredMockDetection) { Eigen::Affine3f transStart = pclPointToAffine3f(cloudKeyPoses6D->back()); Eigen::Affine3f transFinal = pcl::getTransformation(transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5], transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]); Eigen::Affine3f transBetween = transStart.inverse() * transFinal; float x, y, z, roll, pitch, yaw; pcl::getTranslationAndEulerAngles(transBetween, x, y, z, roll, pitch, yaw); smallEgoMotion = Pose3(Rot3::RzRyRx(roll, pitch, yaw), Point3(x, y, z)); } detectionVector.clear(); tightlyCoupledDetectionVector.clear(); earlyLooselyCoupledMatchingVector.clear(); looselyCoupledMatchingVector.clear(); tightlyCoupledMatchingVector.clear(); dataAssociationVector.clear(); for (auto box : detections->boxes) { if (requiredMockDetection) { auto pose = Pose3(Rot3::Quaternion(box.pose.orientation.w, box.pose.orientation.x, box.pose.orientation.y, box.pose.orientation.z), Point3(box.pose.position.x, box.pose.position.y, box.pose.position.z)); pose = smallEgoMotion * pose; auto quat = pose.rotation().toQuaternion(); box.pose.orientation.w = quat.w(); box.pose.orientation.x = quat.x(); box.pose.orientation.y = quat.y(); box.pose.orientation.z = quat.z(); auto pos = pose.translation(); box.pose.position.x = pos.x(); box.pose.position.y = pos.y(); box.pose.position.z = pos.z(); } detectionVector.emplace_back(box, looselyCoupledDetectionVarianceEigenVector); tightlyCoupledDetectionVector.emplace_back(box, tightlyCoupledDetectionVarianceEigenVector); earlyLooselyCoupledMatchingVector.emplace_back(box, earlyLooselyCoupledMatchingVarianceEigenVector); looselyCoupledMatchingVector.emplace_back(box, looselyCoupledMatchingVarianceEigenVector); tightlyCoupledMatchingVector.emplace_back(box, tightlyCoupledMatchingVarianceEigenVector); dataAssociationVector.emplace_back(box, dataAssociationVarianceEigenVector); } auto egoPoseKey = keyPoseIndices.back(); auto egoPose = initialEstimate.at(egoPoseKey); auto invEgoPose = egoPose.inverse(); // Perform the data association for each object, and determine if this // object is lost at this stamp #ifdef MAP_OPTIMIZATION_DEBUG std::cout << "OBJECT_INDICES ::\n"; #endif size_t i = 0; // object index with respect to the indicator matrix for (auto& pairedObject : objects.back()) { auto& object = pairedObject.second; size_t j; // detection index with respect to the indicator matrix double error; size_t dataAssociationJ; double dataAssociationError; #ifdef MAP_OPTIMIZATION_DEBUG std::cout << object.objectIndex << ' '; #endif auto&& predictedPose = invEgoPose * object.pose; if (object.trackScore >= numberOfPreLooseCouplingSteps) { // std::tie(j, error) = getDetectionIndexAndError(predictedPose, tightlyCoupledMatchingVector); std::tie(j, error) = getDetectionIndexAndError(predictedPose, looselyCoupledMatchingVector); } else if (objectPaths.markers[object.objectIndex].points.size() <= numberOfEarlySteps) { // Set the detection factor error to the early-loosely-coupled detection // error for new objects, as they need some chances tp obtain velocity std::tie(j, error) = getDetectionIndexAndError(predictedPose, earlyLooselyCoupledMatchingVector); } else { std::tie(j, error) = getDetectionIndexAndError(predictedPose, looselyCoupledMatchingVector); } std::tie(dataAssociationJ, dataAssociationError) = getDetectionIndexAndError(predictedPose, dataAssociationVector); // TODO: Increase code readability if (error < detectionMatchThreshold) { // found // If the object is lost, we still need to create a new moving object // for it. In this way we would temporarily skip it and later we will // add the object properly if (object.lostCount > 0) { trackingObjectIndices[j] = object.objectIndexForTracking; // A tricky part: set the lostCount to a large number so that the // system will subtly remove this ``retired'' object object.lostCount = std::numeric_limits::max(); } else { indicator(i, j) = 1; object.lostCount = 0; // The maximum of `trackScore` is `numberOfPreLooseCouplingSteps` + 1 if (object.trackScore <= numberOfPreLooseCouplingSteps) { ++object.trackScore; } object.detection = detections->boxes[j]; if (object.trackScore >= numberOfPreLooseCouplingSteps + 1) { // Use a tighter threshold to determine if this detection is good // for coupling. If this detection does not meet the requirement, // the object will be loosely-coupled, and the corresponding // `trackScore` will be deducted. auto tightlyCoupledDetectionFactorPtr = boost::make_shared(egoPoseKey, object.poseNodeIndex, tightlyCoupledDetectionVector); // auto&& detectionError = tightlyCoupledDetectionFactorPtr->error(initialEstimateForAnalysis); double detectionError; std::tie(j, detectionError) = getDetectionIndexAndError(predictedPose, tightlyCoupledMatchingVector); auto spatialConsistencyTest = detectionError <= tightCouplingDetectionErrorThreshold; auto temporalConsistencyTest = object.velocityIsConsistent(numberOfVelocityConsistencySteps, isamCurrentEstimate, objectAngularVelocityConsistencyVarianceThreshold, objectLinearVelocityConsistencyVarianceThreshold); if (spatialConsistencyTest && temporalConsistencyTest) { ++numberOfTightlyCoupledObjectsAtThisMoment; anyObjectIsTightlyCoupled = true; object.isTightlyCoupled = true; gtSAMgraph.add(TightlyCoupledDetectionFactor(egoPoseKey, object.poseNodeIndex, tightlyCoupledDetectionVector, j)); object.tightlyCoupledDetectionFactorPtr = tightlyCoupledDetectionFactorPtr; object.initialDetectionError = detectionError; } else { object.trackScore -= numberOfInterLooseCouplingSteps; object.isTightlyCoupled = false; initialEstimateForLooselyCoupledObjects.insert(object.poseNodeIndex, initialEstimate.at(object.poseNodeIndex)); initialEstimateForLooselyCoupledObjects.insert(object.velocityNodeIndex, initialEstimate.at(object.velocityNodeIndex)); initialEstimate.erase(object.poseNodeIndex); initialEstimate.erase(object.velocityNodeIndex); gtSAMgraphForLooselyCoupledObjects.add(LooselyCoupledDetectionFactor(egoPoseKey, object.poseNodeIndex, detectionVector, j)); object.looselyCoupledDetectionFactorPtr = boost::make_shared(egoPoseKey, object.poseNodeIndex, detectionVector); object.initialDetectionError = object.looselyCoupledDetectionFactorPtr->error(initialEstimateForAnalysis); } } else { // Pre-loosely coupled detection to stabilize the object velocity object.isTightlyCoupled = false; initialEstimateForLooselyCoupledObjects.insert(object.poseNodeIndex, initialEstimate.at(object.poseNodeIndex)); initialEstimateForLooselyCoupledObjects.insert(object.velocityNodeIndex, initialEstimate.at(object.velocityNodeIndex)); initialEstimate.erase(object.poseNodeIndex); initialEstimate.erase(object.velocityNodeIndex); gtSAMgraphForLooselyCoupledObjects.add(LooselyCoupledDetectionFactor(egoPoseKey, object.poseNodeIndex, detectionVector, j)); object.looselyCoupledDetectionFactorPtr = boost::make_shared(egoPoseKey, object.poseNodeIndex, detectionVector); object.initialDetectionError = object.looselyCoupledDetectionFactorPtr->error(initialEstimateForAnalysis); } } } else { // lost ++object.lostCount; object.confidence = 0.0; object.trackScore = 0.0; objectPaths.markers[object.objectIndex].scale.x = 0.3; objectPaths.markers[object.objectIndex].scale.y = 0.3; objectPaths.markers[object.objectIndex].scale.z = 0.3; // Use a larger threshold to track object without association in the // factor graph. They are different objects in the factor graph, but in // the tracking (visualization) system, they are the same object. if (dataAssociationError < detectionMatchThreshold) { trackingObjectIndices[dataAssociationJ] = object.objectIndexForTracking; // A tricky part: set the lostCount to a large number so that the // system will subtly remove this ``retired'' object object.lostCount = std::numeric_limits::max(); } else { trackingObjectPaths.markers[object.objectIndexForTracking].scale.x = 0.3; trackingObjectPaths.markers[object.objectIndexForTracking].scale.y = 0.3; trackingObjectPaths.markers[object.objectIndexForTracking].scale.z = 0.3; } } ++i; } #ifdef MAP_OPTIMIZATION_DEBUG cout << "\n\n" << "INDICATOR ::\n" << indicator << "\n\n"; #endif // Register a new dynamic object if the detection does not belong to any // current active objects for (size_t idx = 0; idx < detectionVector.size(); ++idx) { if (indicator.col(idx).sum() == 0) { // Initialize the object ObjectState object; object.detection = detections->boxes[idx]; object.pose = egoPose * detectionVector[idx].getPose(); object.velocity = Pose3::identity(); object.poseNodeIndex = numberOfNodes++; object.velocityNodeIndex = numberOfNodes++; object.objectIndex = numberOfRegisteredObjects++; object.isFirst = true; object.timestamp = timeLaserInfoStamp; if (trackingObjectIndices[idx] < 0) { object.objectIndexForTracking = numberOfTrackingObjects++; // Initialize a path object (marker) for visualizing path visualization_msgs::Marker marker; marker.id = object.objectIndexForTracking; marker.type = visualization_msgs::Marker::SPHERE_LIST; std_msgs::ColorRGBA color = jsk_topic_tools::colorCategory20(object.objectIndexForTracking); marker.color.a = 1.0; marker.color.r = color.r; marker.color.g = color.g; marker.color.b = color.b; marker.scale.x = 0.6; marker.scale.y = 0.6; marker.scale.z = 0.6; marker.pose.orientation = tf::createQuaternionMsgFromYaw(0); trackingObjectPaths.markers.push_back(marker); visualization_msgs::Marker labelMarker; labelMarker.id = object.objectIndexForTracking; labelMarker.type = visualization_msgs::Marker::TEXT_VIEW_FACING; labelMarker.color.a = 1.0; labelMarker.color.r = color.r; labelMarker.color.g = color.g; labelMarker.color.b = color.b; labelMarker.scale.z = 1.2; labelMarker.text = "Object " + std::to_string(object.objectIndexForTracking); trackingObjectLabels.markers.push_back(labelMarker); visualization_msgs::Marker velocityMarker; velocityMarker.id = object.objectIndexForTracking; velocityMarker.type = visualization_msgs::Marker::LINE_STRIP; velocityMarker.color.a = 0.7; velocityMarker.color.r = color.r; velocityMarker.color.g = color.g; velocityMarker.color.b = color.b; velocityMarker.scale.x = 0.4; velocityMarker.scale.y = 0.4; velocityMarker.scale.z = 0.4; velocityMarker.pose.orientation = tf::createQuaternionMsgFromYaw(0); trackingObjectVelocities.markers.push_back(velocityMarker); velocityMarker.type = visualization_msgs::Marker::ARROW; trackingObjectVelocityArrows.markers.push_back(velocityMarker); } else { object.objectIndexForTracking = trackingObjectIndices[idx]; trackingObjectPaths.markers[object.objectIndexForTracking].scale.x = 0.6; trackingObjectPaths.markers[object.objectIndexForTracking].scale.y = 0.6; trackingObjectPaths.markers[object.objectIndexForTracking].scale.z = 0.6; trackingObjectLabels.markers[object.objectIndexForTracking].text = "Object " + std::to_string(object.objectIndexForTracking); } // TODO: Propagate the bounding box in the post-processing object.box = detectionVector[idx].getBoundingBox(); objects.back()[object.objectIndex] = object; // Initialize a path object (marker) for visualizing path visualization_msgs::Marker marker; marker.id = object.objectIndex; marker.type = visualization_msgs::Marker::SPHERE_LIST; std_msgs::ColorRGBA color = jsk_topic_tools::colorCategory20(object.objectIndex); marker.color.a = 1.0; marker.color.r = color.r; marker.color.g = color.g; marker.color.b = color.b; marker.scale.x = 0.6; marker.scale.y = 0.6; marker.scale.z = 0.6; marker.pose.orientation = tf::createQuaternionMsgFromYaw(0); objectPaths.markers.push_back(marker); visualization_msgs::Marker labelMarker; labelMarker.id = object.objectIndex; labelMarker.type = visualization_msgs::Marker::TEXT_VIEW_FACING; labelMarker.color.a = 1.0; labelMarker.color.r = color.r; labelMarker.color.g = color.g; labelMarker.color.b = color.b; labelMarker.scale.z = 1.2; labelMarker.text = "Object " + std::to_string(object.objectIndex); objectLabels.markers.push_back(labelMarker); visualization_msgs::Marker velocityMarker; velocityMarker.id = object.objectIndex; velocityMarker.type = visualization_msgs::Marker::LINE_STRIP; velocityMarker.color.a = 0.7; velocityMarker.color.r = color.r; velocityMarker.color.g = color.g; velocityMarker.color.b = color.b; velocityMarker.scale.x = 0.4; velocityMarker.scale.y = 0.4; velocityMarker.scale.z = 0.4; velocityMarker.pose.orientation = tf::createQuaternionMsgFromYaw(0); objectVelocities.markers.push_back(velocityMarker); velocityMarker.type = visualization_msgs::Marker::ARROW; objectVelocityArrows.markers.push_back(velocityMarker); initialEstimateForLooselyCoupledObjects.insert(object.poseNodeIndex, object.pose); initialEstimateForLooselyCoupledObjects.insert(object.velocityNodeIndex, object.velocity); initialEstimateForAnalysis.insert(object.poseNodeIndex, object.pose); initialEstimateForAnalysis.insert(object.velocityNodeIndex, object.velocity); // detection factor gtSAMgraphForLooselyCoupledObjects.add(LooselyCoupledDetectionFactor(egoPoseKey, object.poseNodeIndex, detectionVector, idx)); objects.back()[object.objectIndex].looselyCoupledDetectionFactorPtr = boost::make_shared(egoPoseKey, object.poseNodeIndex, detectionVector); #ifndef ENABLE_COMPACT_VERSION_OF_FACTOR_GRAPH // prior velocity factor (the noise should be large) auto noise = noiseModel::Diagonal::Variances((Vector(6) << 1e-2, 1e-2, 1e0, 1e8, 1e2, 1e2).finished()); gtSAMgraphForLooselyCoupledObjects.add(PriorFactor(object.velocityNodeIndex, object.velocity, noise)); #endif } } } void saveKeyFramesAndFactor() { bool requiredSaveFrame = saveFrame(); #ifdef ENABLE_SIMULTANEOUS_LOCALIZATION_AND_TRACKING if (requiredSaveFrame) { // odom factor addOdomFactor(); // gps factor addGPSFactor(); // loop factor addLoopFactor(); } else { #ifdef ENABLE_ASYNCHRONOUS_STATE_ESTIMATE_FOR_SLOT // add the latest ego-pose to the initial guess set auto egoPose6D = cloudKeyPoses6D->back(); Pose3 latestEgoPose = Pose3(Rot3::RzRyRx((Vector3() << egoPose6D.roll, egoPose6D.pitch, egoPose6D.yaw).finished()), Point3((Vector3() << egoPose6D.x, egoPose6D.y, egoPose6D.z).finished())); initialEstimate.insert(keyPoseIndices.back(), latestEgoPose); initialEstimateForAnalysis.insert(keyPoseIndices.back(), latestEgoPose); #else return; #endif } #else // LIO-SAM if (!requiredSaveFrame) return; // odom factor addOdomFactor(); // gps factor addGPSFactor(); // loop factor addLoopFactor(); #endif #ifdef ENABLE_SIMULTANEOUS_LOCALIZATION_AND_TRACKING // perform dynamic object propagation propagateObjectPoses(); // detection factor (for multi-object tracking tracking) addDetectionFactor(!requiredSaveFrame); // // constant velocity factor (for multi-object tracking) addConstantVelocityFactor(); // stable pose factor (for multi-object tracking) addStablePoseFactor(); #endif #ifdef MAP_OPTIMIZATION_DEBUG std::cout << "****************************************************" << endl; gtSAMgraph.print("GTSAM Graph:\n"); #endif if (!requiredSaveFrame) { initialEstimate.erase(keyPoseIndices.back()); initialEstimateForAnalysis.erase(keyPoseIndices.back()); } // update iSAM isam->update(gtSAMgraph, initialEstimate); isam->update(); if (aLoopIsClosed == true) { isam->update(); isam->update(); isam->update(); isam->update(); isam->update(); } gtSAMgraph.resize(0); initialEstimate.clear(); initialEstimateForAnalysis.clear(); #ifdef ENABLE_SIMULTANEOUS_LOCALIZATION_AND_TRACKING if (!gtSAMgraphForLooselyCoupledObjects.empty()) { isam->update(gtSAMgraphForLooselyCoupledObjects, initialEstimateForLooselyCoupledObjects); isam->update(); } gtSAMgraphForLooselyCoupledObjects.resize(0); initialEstimateForLooselyCoupledObjects.clear(); #endif isamCurrentEstimate = isam->calculateEstimate(); if (requiredSaveFrame) { // save key poses PointType thisPose3D; PointTypePose thisPose6D; Pose3 latestEstimate; latestEstimate = isamCurrentEstimate.at(keyPoseIndices.back()); // cout << "****************************************************" << endl; // isamCurrentEstimate.print("Current estimate: "); thisPose3D.x = latestEstimate.translation().x(); thisPose3D.y = latestEstimate.translation().y(); thisPose3D.z = latestEstimate.translation().z(); thisPose3D.intensity = cloudKeyPoses3D->size(); // this can be used as index cloudKeyPoses3D->push_back(thisPose3D); thisPose6D.x = thisPose3D.x; thisPose6D.y = thisPose3D.y; thisPose6D.z = thisPose3D.z; thisPose6D.intensity = thisPose3D.intensity; // this can be used as index thisPose6D.roll = latestEstimate.rotation().roll(); thisPose6D.pitch = latestEstimate.rotation().pitch(); thisPose6D.yaw = latestEstimate.rotation().yaw(); thisPose6D.time = timeLaserInfoCur; cloudKeyPoses6D->push_back(thisPose6D); // cout << "****************************************************" << endl; // cout << "Pose covariance:" << endl; // cout << isam->marginalCovariance(isamCurrentEstimate.size()-1) << endl << endl; poseCovariance = isam->marginalCovariance(keyPoseIndices.back()); // save updated transform transformTobeMapped[0] = latestEstimate.rotation().roll(); transformTobeMapped[1] = latestEstimate.rotation().pitch(); transformTobeMapped[2] = latestEstimate.rotation().yaw(); transformTobeMapped[3] = latestEstimate.translation().x(); transformTobeMapped[4] = latestEstimate.translation().y(); transformTobeMapped[5] = latestEstimate.translation().z(); // save all the received edge and surf points pcl::PointCloud::Ptr thisCornerKeyFrame(new pcl::PointCloud()); pcl::PointCloud::Ptr thisSurfKeyFrame(new pcl::PointCloud()); pcl::copyPointCloud(*laserCloudCornerLastDS, *thisCornerKeyFrame); pcl::copyPointCloud(*laserCloudSurfLastDS, *thisSurfKeyFrame); // save key frame cloud cornerCloudKeyFrames.push_back(thisCornerKeyFrame); surfCloudKeyFrames.push_back(thisSurfKeyFrame); // save path for visualization updatePath(thisPose6D); } // save dynamic objects if (objects.size() > 0) { size_t i = 0; for (auto& pairedObject : objects.back()) { auto& object = pairedObject.second; // Since lost objects do not join the optimization, skip the value // update for them if (object.lostCount > 0) continue; #ifdef MAP_OPTIMIZATION_DEBUG std::cout << "(OBJECT " << object.objectIndex << ") [BEFORE]\nPOSITION ::\n" << object.pose << "\n" << "VELOCITY ::\n" << object.velocity << "\n"; #endif object.pose = isamCurrentEstimate.at(object.poseNodeIndex); #ifdef ENABLE_COMPACT_VERSION_OF_FACTOR_GRAPH if (!object.isFirst) { object.velocity = isamCurrentEstimate.at(object.velocityNodeIndex); } #else object.velocity = isamCurrentEstimate.at(object.velocityNodeIndex); #endif #ifdef MAP_OPTIMIZATION_DEBUG std::cout << "(OBJECT " << object.objectIndex << ") [AFTER ]\nPOSITION ::\n" << object.pose << "\n" << "VELOCITY ::\n" << object.velocity << "\n\n"; #endif // TODO: Reproduce the post-processing of tracking (refer to FG-3DMOT) // // Check if there is any detection belongs to the object. // int index; // double error; // std::tie(index, error) = getDetectionIndexAndError(object.pose, detectionVector); // if (error < detectionMatchThreshold) { // found // object.box = detections->boxes[index]; auto p = object.pose.translation(); object.box.pose.position.x = p.x(); object.box.pose.position.y = p.y(); object.box.pose.position.z = p.z(); auto r = object.pose.rotation(); object.box.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(r.roll(), r.pitch(), r.yaw()); object.box.header.frame_id = odometryFrame; object.box.label = object.objectIndex; // object.confidence = object.box.value; // } else { // lost // object.confidence = 0; // } ++i; } } } void correctPoses() { if (cloudKeyPoses3D->points.empty()) return; if (aLoopIsClosed || anyObjectIsTightlyCoupled) { // clear map cache laserCloudMapContainer.clear(); // clear path globalPath.poses.clear(); // update key poses int numPoses = keyPoseIndices.size(); for (int i = 0; i < numPoses; ++i) { auto poseIndex = keyPoseIndices[i]; cloudKeyPoses3D->points[i].x = isamCurrentEstimate.at(poseIndex).translation().x(); cloudKeyPoses3D->points[i].y = isamCurrentEstimate.at(poseIndex).translation().y(); cloudKeyPoses3D->points[i].z = isamCurrentEstimate.at(poseIndex).translation().z(); cloudKeyPoses6D->points[i].x = cloudKeyPoses3D->points[i].x; cloudKeyPoses6D->points[i].y = cloudKeyPoses3D->points[i].y; cloudKeyPoses6D->points[i].z = cloudKeyPoses3D->points[i].z; cloudKeyPoses6D->points[i].roll = isamCurrentEstimate.at(poseIndex).rotation().roll(); cloudKeyPoses6D->points[i].pitch = isamCurrentEstimate.at(poseIndex).rotation().pitch(); cloudKeyPoses6D->points[i].yaw = isamCurrentEstimate.at(poseIndex).rotation().yaw(); updatePath(cloudKeyPoses6D->points[i]); } aLoopIsClosed = false; } } void updatePath(const PointTypePose& pose_in) { geometry_msgs::PoseStamped pose_stamped; pose_stamped.header.stamp = ros::Time().fromSec(pose_in.time); pose_stamped.header.frame_id = odometryFrame; pose_stamped.pose.position.x = pose_in.x; pose_stamped.pose.position.y = pose_in.y; pose_stamped.pose.position.z = pose_in.z; tf::Quaternion q = tf::createQuaternionFromRPY(pose_in.roll, pose_in.pitch, pose_in.yaw); pose_stamped.pose.orientation.x = q.x(); pose_stamped.pose.orientation.y = q.y(); pose_stamped.pose.orientation.z = q.z(); pose_stamped.pose.orientation.w = q.w(); globalPath.poses.push_back(pose_stamped); } void publishOdometry() { // Publish odometry for ROS (global) nav_msgs::Odometry laserOdometryROS; laserOdometryROS.header.stamp = timeLaserInfoStamp; laserOdometryROS.header.frame_id = odometryFrame; laserOdometryROS.child_frame_id = "odom_mapping"; laserOdometryROS.pose.pose.position.x = transformTobeMapped[3]; laserOdometryROS.pose.pose.position.y = transformTobeMapped[4]; laserOdometryROS.pose.pose.position.z = transformTobeMapped[5]; laserOdometryROS.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]); pubLaserOdometryGlobal.publish(laserOdometryROS); // Publish TF static tf::TransformBroadcaster br; tf::Transform t_odom_to_lidar = tf::Transform(tf::createQuaternionFromRPY(transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]), tf::Vector3(transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5])); tf::StampedTransform trans_odom_to_lidar = tf::StampedTransform(t_odom_to_lidar, timeLaserInfoStamp, odometryFrame, "lidar_link"); br.sendTransform(trans_odom_to_lidar); // Publish odometry for ROS (incremental) static bool lastIncreOdomPubFlag = false; static nav_msgs::Odometry laserOdomIncremental; // incremental odometry msg static Eigen::Affine3f increOdomAffine; // incremental odometry in affine if (lastIncreOdomPubFlag == false) { lastIncreOdomPubFlag = true; laserOdomIncremental = laserOdometryROS; increOdomAffine = trans2Affine3f(transformTobeMapped); } else { Eigen::Affine3f affineIncre = incrementalOdometryAffineFront.inverse() * incrementalOdometryAffineBack; increOdomAffine = increOdomAffine * affineIncre; float x, y, z, roll, pitch, yaw; pcl::getTranslationAndEulerAngles(increOdomAffine, x, y, z, roll, pitch, yaw); if (cloudInfo.imuAvailable == true) { if (std::abs(cloudInfo.imuPitchInit) < 1.4) { double imuWeight = 0.1; tf::Quaternion imuQuaternion; tf::Quaternion transformQuaternion; double rollMid, pitchMid, yawMid; // slerp roll transformQuaternion.setRPY(roll, 0, 0); imuQuaternion.setRPY(cloudInfo.imuRollInit, 0, 0); tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid); roll = rollMid; // slerp pitch transformQuaternion.setRPY(0, pitch, 0); imuQuaternion.setRPY(0, cloudInfo.imuPitchInit, 0); tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid); pitch = pitchMid; } } laserOdomIncremental.header.stamp = timeLaserInfoStamp; laserOdomIncremental.header.frame_id = odometryFrame; laserOdomIncremental.child_frame_id = "odom_mapping"; laserOdomIncremental.pose.pose.position.x = x; laserOdomIncremental.pose.pose.position.y = y; laserOdomIncremental.pose.pose.position.z = z; laserOdomIncremental.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(roll, pitch, yaw); if (isDegenerate) laserOdomIncremental.pose.covariance[0] = 1; else laserOdomIncremental.pose.covariance[0] = 0; } pubLaserOdometryIncremental.publish(laserOdomIncremental); } void publishFrames() { if (cloudKeyPoses3D->points.empty()) return; // publish key poses publishCloud(&pubKeyPoses, cloudKeyPoses3D, timeLaserInfoStamp, odometryFrame); // Publish surrounding key frames publishCloud(&pubRecentKeyFrames, laserCloudSurfFromMapDS, timeLaserInfoStamp, odometryFrame); // publish registered key frame if (pubRecentKeyFrame.getNumSubscribers() != 0) { pcl::PointCloud::Ptr cloudOut(new pcl::PointCloud()); PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped); *cloudOut += *transformPointCloud(laserCloudCornerLastDS, &thisPose6D); *cloudOut += *transformPointCloud(laserCloudSurfLastDS, &thisPose6D); publishCloud(&pubRecentKeyFrame, cloudOut, timeLaserInfoStamp, odometryFrame); } // publish registered high-res raw cloud if (pubCloudRegisteredRaw.getNumSubscribers() != 0) { pcl::PointCloud::Ptr cloudOut(new pcl::PointCloud()); pcl::fromROSMsg(cloudInfo.cloud_deskewed, *cloudOut); PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped); *cloudOut = *transformPointCloud(cloudOut, &thisPose6D); publishCloud(&pubCloudRegisteredRaw, cloudOut, timeLaserInfoStamp, odometryFrame); } // publish path if (pubPath.getNumSubscribers() != 0) { globalPath.header.stamp = timeLaserInfoStamp; globalPath.header.frame_id = odometryFrame; pubPath.publish(globalPath); } // publish detections if (pubDetection.getNumSubscribers() != 0 && detectionIsActive) { pubDetection.publish(detections); } if (pubLaserCloudDeskewed.getNumSubscribers() != 0) { cloudInfo.header.stamp = timeLaserInfoStamp; pubLaserCloudDeskewed.publish(cloudInfo.cloud_deskewed); } // public dynamic objects if (detectionIsActive) { BoundingBoxArray objectMessage; BoundingBoxArray trackingObjectMessage; objectMessage.header = detections->header; objectMessage.header.frame_id = odometryFrame; objectMessage.header.stamp = timeLaserInfoStamp; trackingObjectMessage.header = detections->header; trackingObjectMessage.header.frame_id = odometryFrame; trackingObjectMessage.header.stamp = timeLaserInfoStamp; tightlyCoupledObjectPoints.header.frame_id = odometryFrame; tightlyCoupledObjectPoints.header.stamp = timeLaserInfoStamp; for (auto& marker : objectVelocities.markers) { marker.points.clear(); } for (auto& marker : objectVelocityArrows.markers) { marker.points.clear(); marker.scale.x = 0; marker.scale.y = 0; marker.scale.z = 0; } for (auto& marker : trackingObjectVelocities.markers) { marker.points.clear(); } for (auto& marker : trackingObjectVelocityArrows.markers) { marker.points.clear(); marker.scale.x = 0; marker.scale.y = 0; marker.scale.z = 0; } objectStates.objects.clear(); objectStates.header.frame_id = odometryFrame; objectStates.header.stamp = timeLaserInfoStamp; std::vector trackingObjectIsActive(numberOfTrackingObjects, false); for (auto& pairedObject : objects.back()) { auto& object = pairedObject.second; // Only publish active objects if (object.lostCount == 0) { // TODO: A better data structure to present moving object with path // Mark the tracking object as an active instance trackingObjectIsActive[object.objectIndexForTracking] = true; // Bounding box object.box.header.stamp = timeLaserInfoStamp; objectMessage.boxes.push_back(object.box); trackingObjectMessage.boxes.push_back(object.box); trackingObjectMessage.boxes.back().label = object.objectIndexForTracking; // Path geometry_msgs::Point point; point.x = object.box.pose.position.x; point.y = object.box.pose.position.y; point.z = object.box.pose.position.z; objectPaths.markers[object.objectIndex].points.push_back(point); objectPaths.markers[object.objectIndex].header.frame_id = odometryFrame; objectPaths.markers[object.objectIndex].header.stamp = timeLaserInfoStamp; trackingObjectPaths.markers[object.objectIndexForTracking].points.push_back(point); trackingObjectPaths.markers[object.objectIndexForTracking].header.frame_id = odometryFrame; trackingObjectPaths.markers[object.objectIndexForTracking].header.stamp = timeLaserInfoStamp; // Tightly-coupled nodes (poses) if (object.isTightlyCoupled) { tightlyCoupledObjectPoints.points.push_back(point); } // Label objectLabels.markers[object.objectIndex].pose.position.x = object.box.pose.position.x; objectLabels.markers[object.objectIndex].pose.position.y = object.box.pose.position.y; objectLabels.markers[object.objectIndex].pose.position.z = object.box.pose.position.z + 2.0; objectLabels.markers[object.objectIndex].header.frame_id = odometryFrame; objectLabels.markers[object.objectIndex].header.stamp = timeLaserInfoStamp; trackingObjectLabels.markers[object.objectIndexForTracking].pose.position.x = object.box.pose.position.x; trackingObjectLabels.markers[object.objectIndexForTracking].pose.position.y = object.box.pose.position.y; trackingObjectLabels.markers[object.objectIndexForTracking].pose.position.z = object.box.pose.position.z + 2.0; trackingObjectLabels.markers[object.objectIndexForTracking].header.frame_id = odometryFrame; trackingObjectLabels.markers[object.objectIndexForTracking].header.stamp = timeLaserInfoStamp; // Velocity (prediction of path) objectVelocities.markers[object.objectIndex].header.frame_id = odometryFrame; objectVelocities.markers[object.objectIndex].header.stamp = timeLaserInfoStamp; objectVelocityArrows.markers[object.objectIndex].header.frame_id = odometryFrame; objectVelocityArrows.markers[object.objectIndex].header.stamp = timeLaserInfoStamp; objectVelocityArrows.markers[object.objectIndex].scale.x = 0.4; objectVelocityArrows.markers[object.objectIndex].scale.y = 0.8; objectVelocityArrows.markers[object.objectIndex].scale.z = 1.0; trackingObjectVelocities.markers[object.objectIndexForTracking].header.frame_id = odometryFrame; trackingObjectVelocities.markers[object.objectIndexForTracking].header.stamp = timeLaserInfoStamp; trackingObjectVelocityArrows.markers[object.objectIndexForTracking].header.frame_id = odometryFrame; trackingObjectVelocityArrows.markers[object.objectIndexForTracking].header.stamp = timeLaserInfoStamp; trackingObjectVelocityArrows.markers[object.objectIndexForTracking].scale.x = 0.4; trackingObjectVelocityArrows.markers[object.objectIndexForTracking].scale.y = 0.8; trackingObjectVelocityArrows.markers[object.objectIndexForTracking].scale.z = 1.0; // .. Compute the delta pose with respect to the delta time auto identity = gtsam::Pose3::identity(); auto deltaPoseVec = gtsam::traits::Local(identity, object.velocity) * 0.1; auto deltaPose = gtsam::traits::Retract(identity, deltaPoseVec); auto nextPose = object.pose; // .. Object position at relative time stamp from 1 to 5 for (int timeStamp = 1; timeStamp <= 5; ++timeStamp) { nextPose = nextPose * deltaPose; point.x = nextPose.translation().x(); point.y = nextPose.translation().y(); point.z = nextPose.translation().z(); if (timeStamp <= 4) { objectVelocities.markers[object.objectIndex].points.push_back(point); trackingObjectVelocities.markers[object.objectIndexForTracking].points.push_back(point); } if (timeStamp >= 4) { objectVelocityArrows.markers[object.objectIndex].points.push_back(point); trackingObjectVelocityArrows.markers[object.objectIndexForTracking].points.push_back(point); } } // Diagnosis lio_segmot::ObjectState state; state.header.frame_id = odometryFrame; state.header.stamp = timeLaserInfoStamp; state.detection = object.detection; state.pose = object.box.pose; state.velocity.position.x = object.velocity.translation().x(); state.velocity.position.y = object.velocity.translation().y(); state.velocity.position.z = object.velocity.translation().z(); state.velocity.orientation.x = object.velocity.rotation().quaternion().x(); state.velocity.orientation.y = object.velocity.rotation().quaternion().y(); state.velocity.orientation.z = object.velocity.rotation().quaternion().z(); state.velocity.orientation.w = object.velocity.rotation().quaternion().w(); state.index = object.objectIndex; state.lostCount = object.lostCount; state.confidence = object.confidence; state.isTightlyCoupled = object.isTightlyCoupled; state.isFirst = object.isFirst; state.hasTightlyCoupledDetectionError = false; if (object.tightlyCoupledDetectionFactorPtr) { state.hasTightlyCoupledDetectionError = true; state.tightlyCoupledDetectionError = object.tightlyCoupledDetectionFactorPtr->error(isamCurrentEstimate); state.initialTightlyCoupledDetectionError = object.initialDetectionError; } state.hasLooselyCoupledDetectionError = false; if (object.looselyCoupledDetectionFactorPtr) { state.hasLooselyCoupledDetectionError = true; state.looselyCoupledDetectionError = object.looselyCoupledDetectionFactorPtr->error(isamCurrentEstimate); state.initialLooselyCoupledDetectionError = object.initialDetectionError; } state.hasMotionError = false; if (object.motionFactorPtr) { state.hasMotionError = true; state.motionError = object.motionFactorPtr->error(isamCurrentEstimate); state.initialMotionError = object.initialMotionError; } objectStates.objects.push_back(state); } } // Hide moving object if it only appears one time and it is not active for (auto& pairedObject : objects.back()) { auto& object = pairedObject.second; auto& index = object.objectIndexForTracking; if (!trackingObjectIsActive[index] && object.lostCount != 0) { if (trackingObjectPaths.markers[index].points.size() <= 1) { trackingObjectPaths.markers[index].points.clear(); trackingObjectLabels.markers[index].text = ""; } } } pubObjects.publish(objectMessage); pubObjectPaths.publish(objectPaths); pubTightlyCoupledObjectPoints.publish(tightlyCoupledObjectPoints); pubObjectLabels.publish(objectLabels); pubObjectVelocities.publish(objectVelocities); pubObjectVelocityArrows.publish(objectVelocityArrows); pubTrackingObjects.publish(trackingObjectMessage); pubTrackingObjectPaths.publish(trackingObjectPaths); pubTrackingObjectLabels.publish(trackingObjectLabels); pubTrackingObjectVelocities.publish(trackingObjectVelocities); pubTrackingObjectVelocityArrows.publish(trackingObjectVelocityArrows); pubObjectStates.publish(objectStates); } lio_segmot::Diagnosis diagnosis; diagnosis.header.frame_id = odometryFrame; diagnosis.header.stamp = timeLaserInfoStamp; diagnosis.numberOfDetections = detections ? detections->boxes.size() : 0; diagnosis.computationalTime = timer.elapsed(); diagnosis.numberOfTightlyCoupledObjects = numberOfTightlyCoupledObjectsAtThisMoment; pubDiagnosis.publish(diagnosis); } }; int main(int argc, char** argv) { ros::init(argc, argv, "lio_segmot"); mapOptimization MO; ROS_INFO("\033[1;32m----> Map Optimization Started.\033[0m"); std::thread loopthread(&mapOptimization::loopClosureThread, &MO); std::thread visualizeMapThread(&mapOptimization::visualizeGlobalMapThread, &MO); ros::spin(); loopthread.join(); visualizeMapThread.join(); return 0; } ================================================ FILE: src/offlineBagPlayer.cpp ================================================ #include "utility.h" #include #include std::queue> patchedInstances; float base_rate; std::string bag_filename; std::string imu_topic; std::string lidar_topic; std::string pub_imu_topic; std::string pub_lidar_topic; ros::Publisher pubImu; ros::Publisher pubLiDAR; ros::Time currentTime; void playInstances(const std::vector& instances) { for (auto& instance : instances) { auto duration = instance.getTime() - currentTime; if (duration.toSec() > 0) { ros::WallDuration(duration.toSec() / base_rate).sleep(); } auto imuMsg = instance.instantiate(); if (imuMsg != nullptr) { pubImu.publish(imuMsg); } auto lidarMsg = instance.instantiate(); if (lidarMsg != nullptr) { pubLiDAR.publish(lidarMsg); } currentTime = instance.getTime(); } } void odometryIsDoneCallback(const std_msgs::EmptyConstPtr& msg) { playInstances(patchedInstances.front()); patchedInstances.pop(); ROS_INFO("Remaining patches: %lu", patchedInstances.size()); } int main(int argc, char* argv[]) { ros::init(argc, argv, "offline_bag_player"); ros::NodeHandle _nh("~"); ros::NodeHandle nh; _nh.param("base_rate", base_rate, 1.0); _nh.param("bag_filename", bag_filename, ""); _nh.param("imu_topic", imu_topic, "/imu_raw"); _nh.param("lidar_topic", lidar_topic, "/points_raw"); _nh.param("pub_imu_topic", pub_imu_topic, "/imu_raw"); _nh.param("pub_lidar_topic", pub_lidar_topic, "/points_raw"); ros::Subscriber sub = nh.subscribe("lio_segmot/ready", 10, &odometryIsDoneCallback); pubImu = nh.advertise(pub_imu_topic, 2000); pubLiDAR = nh.advertise(pub_lidar_topic, 1); if (bag_filename.empty()) { ROS_ERROR("bag_filename is empty"); return -1; } // Read and process the bag file rosbag::Bag bag; ROS_INFO("Opening bag file: %s", bag_filename.c_str()); bag.open(bag_filename, rosbag::bagmode::Read); std::vector topics; topics.push_back(imu_topic); topics.push_back(lidar_topic); rosbag::View view(bag, rosbag::TopicQuery(topics)); std::vector instances; bool timeIsInitialized = false; BOOST_FOREACH (rosbag::MessageInstance const m, view) { instances.push_back(m); if (m.getTopic() == lidar_topic) { patchedInstances.push(instances); instances.clear(); } if (!timeIsInitialized) { currentTime = m.getTime(); timeIsInitialized = true; } } // Make sure that the system is ready to subscribe the data ROS_INFO("Pending ..."); while (pubImu.getNumSubscribers() < 2 || pubLiDAR.getNumSubscribers() < 1) { ros::spinOnce(); } ROS_INFO("Start playing the bag ..."); // Play the first patch of the data playInstances(patchedInstances.front()); patchedInstances.pop(); while (!patchedInstances.empty() && ros::ok()) { ros::spinOnce(); } bag.close(); return 0; } ================================================ FILE: src/solver.cpp ================================================ #include "solver.h" #include gtsam::KeySet gatherMaxMixtureRelinearizationKeys(const gtsam::NonlinearFactorGraph nonlinearFactors, const gtsam::Values theta, const gtsam::VectorValues delta, gtsam::KeySet* markedKeys) { gtsam::KeySet relinKeys; LooselyCoupledDetectionFactor* lcdf; TightlyCoupledDetectionFactor* tcdf; for (const auto& factor : nonlinearFactors) { lcdf = dynamic_cast(factor.get()); tcdf = dynamic_cast(factor.get()); // SKip if the factor is not based on a max-mixture model. Currently we // only support max-mixture models for ``LooselyCoupledDetectionFactor`` // and ``TightlyCoupledDetectionFactor`` if (lcdf == nullptr && tcdf == nullptr) continue; int index; int cachedIndex; double error; const std::vector* detections; gtsam::Key robotPoseKey; gtsam::Key objectPoseKey; gtsam::Pose3 robotPose; gtsam::Pose3 objectPose; if (lcdf != nullptr) { detections = &lcdf->getDetections(); robotPoseKey = lcdf->robotPoseKey(); objectPoseKey = lcdf->objectPoseKey(); cachedIndex = lcdf->getCachedDetectionIndex(); } else { // tcdf != nullptr detections = &tcdf->getDetections(); robotPoseKey = tcdf->robotPoseKey(); objectPoseKey = tcdf->objectPoseKey(); cachedIndex = tcdf->getCachedDetectionIndex(); } robotPose = gtsam::traits::Retract(theta.at(robotPoseKey), delta.at(robotPoseKey)); objectPose = gtsam::traits::Retract(theta.at(objectPoseKey), delta.at(objectPoseKey)); std::tie(index, error) = getDetectionIndexAndError(robotPose.inverse() * objectPose, *detections); if (index != cachedIndex) { relinKeys.insert(robotPoseKey); relinKeys.insert(objectPoseKey); markedKeys->insert(robotPoseKey); markedKeys->insert(objectPoseKey); } } return relinKeys; } gtsam::ISAM2Result MaxMixtureISAM2::update( const gtsam::NonlinearFactorGraph& newFactors, const gtsam::Values& newTheta, const gtsam::FactorIndices& removeFactorIndices, const boost::optional >& constrainedKeys, const boost::optional >& noRelinKeys, const boost::optional >& extraReelimKeys, bool force_relinearize) { gtsam::ISAM2UpdateParams params; params.constrainedKeys = constrainedKeys; params.extraReelimKeys = extraReelimKeys; params.force_relinearize = force_relinearize; params.noRelinKeys = noRelinKeys; params.removeFactorIndices = removeFactorIndices; return update(newFactors, newTheta, params); } gtsam::ISAM2Result MaxMixtureISAM2::update(const gtsam::NonlinearFactorGraph& newFactors, const gtsam::Values& newTheta, const gtsam::ISAM2UpdateParams& updateParams) { gttic(ISAM2_update); this->update_count_ += 1; gtsam::UpdateImpl::LogStartingUpdate(newFactors, *this); gtsam::ISAM2Result result(params_.enableDetailedResults); gtsam::UpdateImpl update(params_, updateParams); // Update delta if we need it to check relinearization later if (update.relinarizationNeeded(update_count_)) updateDelta(updateParams.forceFullSolve); // 1. Add any new factors \Factors:=\Factors\cup\Factors'. update.pushBackFactors(newFactors, &nonlinearFactors_, &linearFactors_, &variableIndex_, &result.newFactorsIndices, &result.keysWithRemovedFactors); update.computeUnusedKeys(newFactors, variableIndex_, result.keysWithRemovedFactors, &result.unusedKeys); // 2. Initialize any new variables \Theta_{new} and add // \Theta:=\Theta\cup\Theta_{new}. addVariables(newTheta, result.details()); if (params_.evaluateNonlinearError) update.error(nonlinearFactors_, calculateEstimate(), &result.errorBefore); // 3. Mark linear update update.gatherInvolvedKeys(newFactors, nonlinearFactors_, result.keysWithRemovedFactors, &result.markedKeys); update.updateKeys(result.markedKeys, &result); gtsam::KeySet relinKeys; result.variablesRelinearized = 0; if (update.relinarizationNeeded(update_count_)) { // 4. Mark keys in \Delta above threshold \beta: relinKeys = update.gatherRelinearizeKeys(roots_, delta_, fixedVariables_, &result.markedKeys); // also mark keys whose max-mixture model is changed relinKeys.merge(gatherMaxMixtureRelinearizationKeys(nonlinearFactors_, theta_, delta_, &result.markedKeys)); update.recordRelinearizeDetail(relinKeys, result.details()); if (!relinKeys.empty()) { // 5. Mark cliques that involve marked variables \Theta_{J} and ancestors. update.findFluid(roots_, relinKeys, &result.markedKeys, result.details()); // 6. Update linearization point for marked variables: // \Theta_{J}:=\Theta_{J}+\Delta_{J}. gtsam::UpdateImpl::ExpmapMasked(delta_, relinKeys, &theta_); } result.variablesRelinearized = result.markedKeys.size(); } // 7. Linearize new factors update.linearizeNewFactors(newFactors, theta_, nonlinearFactors_.size(), result.newFactorsIndices, &linearFactors_); update.augmentVariableIndex(newFactors, result.newFactorsIndices, &variableIndex_); // 8. Redo top of Bayes tree and update data structures recalculate(updateParams, relinKeys, &result); if (!result.unusedKeys.empty()) removeVariables(result.unusedKeys); result.cliques = this->nodes().size(); if (params_.evaluateNonlinearError) update.error(nonlinearFactors_, calculateEstimate(), &result.errorAfter); return result; } ================================================ FILE: srv/detection.srv ================================================ sensor_msgs/PointCloud2 cloud --- jsk_recognition_msgs/BoundingBoxArray detections ================================================ FILE: srv/save_estimation_result.srv ================================================ --- nav_msgs/Path robotTrajectory nav_msgs/Path[] objectTrajectories nav_msgs/Path[] objectVelocities nav_msgs/Path[] trackingObjectTrajectories nav_msgs/Path[] trackingObjectVelocities lio_segmot/ObjectStateArray[] trackingObjectStates lio_segmot/flags[] objectFlags lio_segmot/flags[] trackingObjectFlags ================================================ FILE: srv/save_map.srv ================================================ float32 resolution string destination --- bool success