Showing preview only (276K chars total). Download the full file or copy to clipboard to get everything.
Repository: Gaochao-hit/LIO-SAM_based_relocalization
Branch: master
Commit: b162b829660d
Files: 27
Total size: 264.3 KB
Directory structure:
gitextract_uwlmj5r8/
├── .github/
│ └── stale.yml
├── CMakeLists.txt
├── LICENSE
├── README.md
├── config/
│ ├── doc/
│ │ └── kitti2bag/
│ │ ├── README.md
│ │ └── kitti2bag.py
│ └── params.yaml
├── include/
│ └── utility.h
├── launch/
│ ├── include/
│ │ ├── config/
│ │ │ ├── robot.urdf.xacro
│ │ │ └── rviz.rviz
│ │ ├── module_loam.launch
│ │ ├── module_navsat.launch
│ │ ├── module_relocolize.launch
│ │ ├── module_robot_state_publisher.launch
│ │ ├── module_rviz.launch
│ │ └── rosconsole/
│ │ ├── rosconsole_error.conf
│ │ ├── rosconsole_info.conf
│ │ └── rosconsole_warn.conf
│ ├── run.launch
│ └── run_relocalize.launch
├── msg/
│ └── cloud_info.msg
├── package.xml
└── src/
├── featureExtraction.cpp
├── globalLocalize.cpp
├── imageProjection.cpp
├── imuPreintegration.cpp
└── mapOptmization.cpp
================================================
FILE CONTENTS
================================================
================================================
FILE: .github/stale.yml
================================================
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 14
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 1
# Issues with these labels will never be considered stale
exemptLabels:
- pinned
- security
# Label to use when marking an issue as stale
staleLabel: stale
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: false
================================================
FILE: CMakeLists.txt
================================================
cmake_minimum_required(VERSION 2.8.3)
project(lio_sam)
set(CMAKE_BUILD_TYPE "Release")
set(CMAKE_CXX_FLAGS "-std=c++11")
set(CMAKE_CXX_FLAGS_RELEASE "-O3 -Wall -g -pthread")
find_package(catkin REQUIRED COMPONENTS
tf
roscpp
rospy
cv_bridge
# pcl library
pcl_conversions
# msgs
std_msgs
sensor_msgs
geometry_msgs
nav_msgs
message_generation
)
find_package(OpenMP REQUIRED)
find_package(PCL REQUIRED QUIET)
find_package(OpenCV REQUIRED QUIET)
find_package(GTSAM REQUIRED QUIET)
add_message_files(
DIRECTORY msg
FILES
cloud_info.msg
)
generate_messages(
DEPENDENCIES
geometry_msgs
std_msgs
nav_msgs
sensor_msgs
)
catkin_package(
INCLUDE_DIRS include
DEPENDS PCL GTSAM
CATKIN_DEPENDS
std_msgs
nav_msgs
geometry_msgs
sensor_msgs
message_runtime
message_generation
)
# include directories
include_directories(
include
${catkin_INCLUDE_DIRS}
${PCL_INCLUDE_DIRS}
${OpenCV_INCLUDE_DIRS}
${GTSAM_INCLUDE_DIR}
)
# link directories
link_directories(
include
${PCL_LIBRARY_DIRS}
${OpenCV_LIBRARY_DIRS}
${GTSAM_LIBRARY_DIRS}
)
###########
## Build ##
###########
# Range Image Projection
add_executable(${PROJECT_NAME}_imageProjection src/imageProjection.cpp)
add_dependencies(${PROJECT_NAME}_imageProjection ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)
target_link_libraries(${PROJECT_NAME}_imageProjection ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES})
# Feature Association
add_executable(${PROJECT_NAME}_featureExtraction src/featureExtraction.cpp)
add_dependencies(${PROJECT_NAME}_featureExtraction ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)
target_link_libraries(${PROJECT_NAME}_featureExtraction ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES})
# Mapping Optimization
add_executable(${PROJECT_NAME}_mapOptmization src/mapOptmization.cpp)
add_dependencies(${PROJECT_NAME}_mapOptmization ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)
target_compile_options(${PROJECT_NAME}_mapOptmization PRIVATE ${OpenMP_CXX_FLAGS})
target_link_libraries(${PROJECT_NAME}_mapOptmization ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} ${OpenMP_CXX_FLAGS} gtsam)
# IMU Preintegration
add_executable(${PROJECT_NAME}_imuPreintegration src/imuPreintegration.cpp)
target_link_libraries(${PROJECT_NAME}_imuPreintegration ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} gtsam)
#Global Localization
add_executable(${PROJECT_NAME}_globalLocalize src/globalLocalize.cpp)
add_dependencies(${PROJECT_NAME}_globalLocalize ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)
target_compile_options(${PROJECT_NAME}_globalLocalize PRIVATE ${OpenMP_CXX_FLAGS})
target_link_libraries(${PROJECT_NAME}_globalLocalize ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} ${OpenMP_CXX_FLAGS} gtsam)
================================================
FILE: LICENSE
================================================
BSD 3-Clause License
Copyright (c) 2020, Tixiao Shan
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
================================================
FILE: README.md
================================================
A simple version of system that can relocalize in a built map is developed in this repository. The sysytem is bsed on LIO_SAM.
The repository is developed based on the origional version of LIO-SAM in which the GPS is not fused.
## Run the package
1. Make sure the map should be saved in the right folder:
```
Firstly, you need to run LIO-SAM, and then save the map in the default folder
```
2. Run the launch file:
```
roslaunch lio_sam run_relocalize.launch
```
3. Play existing bag files:
```
rosbag play your-bag.bag
```
-A video of the demonstration of the method can be found on [YouTube](https://youtu.be/PRsH8SpuSIc)
## Notes
- **Initialization:** During the initialization stage, had better keep the robot still. Or if you play bags, fistly play the bag for about 0.5s, and then pause the bag until the initialization succeed. The initialization method requres you to give it initial guesses by hand on the the Rviz.
******************************************************************************************************************************************************************
# LIO-SAM
**A real-time lidar-inertial odometry package. We strongly recommend the users read this document thoroughly and test the package with the provided dataset first. A video of the demonstration of the method can be found on [YouTube](https://www.youtube.com/watch?v=A0H8CoORZJU).**
<p align='center'>
<img src="./config/doc/demo.gif" alt="drawing" width="800"/>
</p>
<p align='center'>
<img src="./config/doc/device-hand-2.png" alt="drawing" width="200"/>
<img src="./config/doc/device-hand.png" alt="drawing" width="200"/>
<img src="./config/doc/device-jackal.png" alt="drawing" width="200"/>
<img src="./config/doc/device-boat.png" alt="drawing" width="200"/>
</p>
## Menu
- [**System architecture**](#system-architecture)
- [**Package dependency**](#dependency)
- [**Package install**](#install)
- [**Prepare lidar data**](#prepare-lidar-data) (must read)
- [**Prepare IMU data**](#prepare-imu-data) (must read)
- [**Sample datasets**](#sample-datasets)
- [**Run the package**](#run-the-package)
- [**Other notes**](#other-notes)
- [**Paper**](#paper)
- [**TODO**](#todo)
- [**Acknowledgement**](#acknowledgement)
## System architecture
<p align='center'>
<img src="./config/doc/system.png" alt="drawing" width="800"/>
</p>
We design a system that maintains two graphs and runs up to 10x faster than real-time.
- The factor graph in "mapOptimization.cpp" optimizes lidar odometry factor and GPS factor. This factor graph is maintained consistently throughout the whole test.
- The factor graph in "imuPreintegration.cpp" optimizes IMU and lidar odometry factor and estimates IMU bias. This factor graph is reset periodically and guarantees real-time odometry estimation at IMU frequency.
## Dependency
- [ROS](http://wiki.ros.org/ROS/Installation) (tested with Kinetic and Melodic)
```
sudo apt-get install -y ros-kinetic-navigation
sudo apt-get install -y ros-kinetic-robot-localization
sudo apt-get install -y ros-kinetic-robot-state-publisher
```
- [gtsam](https://github.com/borglab/gtsam/releases) (Georgia Tech Smoothing and Mapping library)
```
wget -O ~/Downloads/gtsam.zip https://github.com/borglab/gtsam/archive/4.0.2.zip
cd ~/Downloads/ && unzip gtsam.zip -d ~/Downloads/
cd ~/Downloads/gtsam-4.0.2/
mkdir build && cd build
cmake -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF ..
sudo make install -j8
```
## Install
Use the following commands to download and compile the package.
```
cd ~/catkin_ws/src
git clone https://github.com/TixiaoShan/LIO-SAM.git
cd ..
catkin_make
```
## Prepare lidar data
The user needs to prepare the point cloud data in the correct format for cloud deskewing, which is mainly done in "imageProjection.cpp". The two requirements are:
- **Provide point time stamp**. LIO-SAM uses IMU data to perform point cloud deskew. Thus, the relative point time in a scan needs to be known. The up-to-date Velodyne ROS driver should output this information directly. Here, we assume the point time channel is called "time." The definition of the point type is located at the top of the "imageProjection.cpp." "deskewPoint()" function utilizes this relative time to obtain the transformation of this point relative to the beginning of the scan. When the lidar rotates at 10Hz, the timestamp of a point should vary between 0 and 0.1 seconds. If you are using other lidar sensors, you may need to change the name of this time channel and make sure that it is the relative time in a scan.
- **Provide point ring number**. LIO-SAM uses this information to organize the point correctly in a matrix. The ring number indicates which channel of the sensor that this point belongs to. The definition of the point type is located at the top of "imageProjection.cpp." The up-to-date Velodyne ROS driver should output this information directly. Again, if you are using other lidar sensors, you may need to rename this information. Note that only mechanical lidars are supported by the package currently.
## Prepare IMU data
- **IMU requirement**. Like the original LOAM implementation, LIO-SAM only works with a 9-axis IMU, which gives roll, pitch, and yaw estimation. The roll and pitch estimation is mainly used to initialize the system at the correct attitude. The yaw estimation initializes the system at the right heading when using GPS data. Theoretically, an initialization procedure like VINS-Mono will enable LIO-SAM to work with a 6-axis IMU. The performance of the system largely depends on the quality of the IMU measurements. The higher the IMU data rate, the better the system accuracy. We use Microstrain 3DM-GX5-25, which outputs data at 500Hz. We recommend using an IMU that gives at least a 200Hz output rate. Note that the internal IMU of Ouster lidar is not usable due to high vibration.
- **IMU alignment**. LIO-SAM transforms IMU raw data from the IMU frame to the Lidar frame, which follows the ROS REP-105 convention (x - forward, y - left, z - upward). To make the system function properly, the correct extrinsic transformation needs to be provided in "params.yaml" file. Using our setup as an example:
- we need to set the readings of x-z acceleration and gyro negative to transform the IMU data in the lidar frame, which is indicated by "extrinsicRot" in "params.yaml."
- The transformation of attitude readings is slightly different. We rotate the attitude measurements by -90 degrees around "lidar-z" axis and get the corresponding roll, pitch, and yaw readings in the lidar frame. This transformation is indicated by "extrinsicRPY" in "params.yaml."
- **IMU debug**. It's strongly recommended that the user uncomment the debug lines in "imuHandler()" of "imageProjection.cpp" and test the output of the transformed IMU data. The user can rotate the sensor suite to check whether the readings correspond to the sensor's movement.
<p align='center'>
<img src="./config/doc/imu-transform.png" alt="drawing" width="800"/>
</p>
## Sample datasets
* Download some sample datasets to test the functionality of the package. The datasets below is configured to run using the default settings:
- [**Walking dataset**](https://drive.google.com/file/d/1HN5fYPXEHbDq0E5JtbQPkCHIHUoTFFnN/view?usp=sharing)
- [**Garden dataset**](https://drive.google.com/file/d/1q6yuVhyJbkUBhut9yhfox2WdV4VZ9BZX/view?usp=sharing)
- [**Park dataset**](https://drive.google.com/file/d/19PZieaJaVkXDs2ZromaHTxYoq0zkiHae/view?usp=sharing)
* The datasets below need the parameters to be configured. In these datasets, the point cloud topic is "points_raw." The IMU topic is "imu_correct," which gives the IMU data in ROS REP105 standard. Because no IMU transformation is needed for this dataset, the following configurations need to be changed to run this dataset successfully:
- The "imuTopic" parameter in "config/params.yaml" needs to be set to "imu_correct".
- The "extrinsicRot" and "extrinsicRPY" in "config/params.yaml" needs to be set as identity matrices.
- [**Rotation dataset**](https://drive.google.com/file/d/1V4ijY4PgLdjKmdzcQ18Xu7VdcHo2UaWI/view?usp=sharing)
- [**Campus dataset (large)**](https://drive.google.com/file/d/1q4Sf7s2veVc7bs08Qeha3stOiwsytopL/view?usp=sharing)
- [**Campus dataset (small)**](https://drive.google.com/file/d/1_V-cFMTQ4RO-_16mU9YPUE8ozsPeddCv/view?usp=sharing)
* Ouster (OS1-128) dataset. No extrinsics need to be changed for this dataset if you are using the default settings. Please follow the Ouster notes below to configure the package to run with Ouster data. A video of the dataset can be found on [YouTube](https://youtu.be/O7fKgZQzkEo):
- [**Rooftop dataset**](https://drive.google.com/file/d/1Qy2rZdPudFhDbATPpblioBb8fRtjDGQj/view?usp=sharing)
* KITTI dataset. The extrinsics can be found in the Notes KITTI section below. To generate more bags using other KITTI raw data, you can use the python script provided in "config/doc/kitti2bag".
- [**2011_09_30_drive_0028**](https://drive.google.com/file/d/12h3ooRAZVTjoMrf3uv1_KriEXm33kHc7/view?usp=sharing)
## Run the package
1. Run the launch file:
```
roslaunch lio_sam run.launch
```
2. Play existing bag files:
```
rosbag play your-bag.bag -r 3
```
## Other notes
- **Loop closure:** The loop function here gives an example of proof of concept. It is directly adapted from LeGO-LOAM loop closure. For more advanced loop closure implementation, please refer to [ScanContext](https://github.com/irapkaist/SC-LeGO-LOAM). Set the "loopClosureEnableFlag" in "params.yaml" to "true" to test the loop closure function. In Rviz, uncheck "Map (cloud)" and check "Map (global)". This is because the visualized map - "Map (cloud)" - is simply a stack of point clouds in Rviz. Their postion will not be updated after pose correction. The loop closure function here is simply adapted from LeGO-LOAM, which is an ICP-based method. Because ICP runs pretty slow, it is suggested that the playback speed is set to be "-r 1". You can try the Campus dataset (large) for testing. The loop closure happens when the sensor returns back to the original starting location.
<p align='center'>
<img src="./config/doc/loop-closure.gif" alt="drawing" width="400"/>
</p>
- **Using GPS:** The park dataset is provided for testing LIO-SAM with GPS data. This dataset is gathered by [Yewei Huang](https://robustfieldautonomylab.github.io/people.html). To enable the GPS function, change "gpsTopic" in "params.yaml" to "odometry/gps". In Rviz, uncheck "Map (cloud)" and check "Map (global)". Also check "Odom GPS", which visualizes the GPS odometry. "gpsCovThreshold" can be adjusted to filter bad GPS readings. "poseCovThreshold" can be used to adjust the frequency of adding GPS factor to the graph. For example, you will notice the trajectory is constantly corrected by GPS whey you set "poseCovThreshold" to 1.0. Because of the heavy iSAM optimization, it's recommended that the playback speed is "-r 1".
<p align='center'>
<img src="./config/doc/gps-demo.gif" alt="drawing" width="400"/>
</p>
- **KITTI:** Since LIO-SAM needs a high-frequency IMU for function properly, we need to use KITTI raw data for testing. One problem remains unsolved is that the intrinsics of the IMU are unknown, which has a big impact on the accuracy of LIO-SAM. Download the provided sample data and make the following changes in "params.yaml":
- extrinsicTrans: [-8.086759e-01, 3.195559e-01, -7.997231e-01]
- extrinsicRot: [9.999976e-01, 7.553071e-04, -2.035826e-03, -7.854027e-04, 9.998898e-01, -1.482298e-02, 2.024406e-03, 1.482454e-02, 9.998881e-01]
- extrinsicRPY: [9.999976e-01, 7.553071e-04, -2.035826e-03, -7.854027e-04, 9.998898e-01, -1.482298e-02, 2.024406e-03, 1.482454e-02, 9.998881e-01]
- N_SCAN: 64
- downsampleRate: 2 or 4
- loopClosureEnableFlag: true or false
<p align='center'>
<img src="./config/doc/kitti-map.png" alt="drawing" width="300"/>
<img src="./config/doc/kitti-demo.gif" alt="drawing" width="300"/>
</p>
- **Ouster lidar:** To make LIO-SAM work with Ouster lidar, some preparations needs to be done on hardware and software level.
- Hardware:
- Use an external IMU. LIO-SAM does not work with the internal 6-axis IMU of Ouster lidar. You need to attach a 9-axis IMU to the lidar and perform data-gathering.
- Configure the driver. Change "timestamp_mode" in your Ouster launch file to "TIME_FROM_PTP_1588" so you can have ROS format timestamp for the point clouds.
- Software:
- Change "timeField" in "params.yaml" to "t". "t" is the point timestamp in a scan for Ouster lidar.
- Change "N_SCAN" and "Horizon_SCAN" in "params.yaml" according to your lidar, i.e., N_SCAN=128, Horizon_SCAN=1024.
- Comment the point definition for Velodyne on top of "imageProjection.cpp".
- Uncomment the point definition for Ouster on top of "imageProjection.cpp".
- Comment line "deskewPoint(&thisPoint, laserCloudIn->points[i].time)" in "imageProjection.cpp".
- Uncomment line "deskewPoint(&thisPoint, (float)laserCloudIn->points[i].t / 1000000000.0" in "imageProjection.cpp".
- Run "catkin_make" to re-compile the package.
<p align='center'>
<img src="./config/doc/ouster-device.jpg" alt="drawing" width="300"/>
<img src="./config/doc/ouster-demo.gif" alt="drawing" width="300"/>
</p>
## Paper
Thank you for citing [LIO-SAM (IROS-2020)](./config/doc/paper.pdf) if you use any of this code.
```
@inproceedings{liosam2020shan,
title={LIO-SAM: Tightly-coupled Lidar Inertial Odometry via Smoothing and Mapping},
author={Shan, Tixiao and Englot, Brendan and Meyers, Drew and Wang, Wei and Ratti, Carlo and Rus Daniela},
journal={arXiv preprint arXiv:2007.00258}
year={2020}
}
```
Part of the code is adapted from [LeGO-LOAM](https://github.com/RobustFieldAutonomyLab/LeGO-LOAM).
```
@inproceedings{legoloam2018shan,
title={LeGO-LOAM: Lightweight and Ground-Optimized Lidar Odometry and Mapping on Variable Terrain},
author={Shan, Tixiao and Englot, Brendan},
booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
pages={4758-4765},
year={2018},
organization={IEEE}
}
```
## TODO
- [ ] Add loop closure visualization and fix potential bug
## Acknowledgement
- LIO-SAM is based on LOAM (J. Zhang and S. Singh. LOAM: Lidar Odometry and Mapping in Real-time).
================================================
FILE: config/doc/kitti2bag/README.md
================================================
# kitti2bag
## How to run it?
```bash
wget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0084/2011_09_26_drive_0084_sync.zip
wget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0084/2011_09_26_drive_0084_extract.zip
wget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_calib.zip
unzip 2011_09_26_drive_0084_sync.zip
unzip 2011_09_26_drive_0084_extract.zip
unzip 2011_09_26_calib.zip
python kitti2bag.py -t 2011_09_26 -r 0084 raw_synced .
```
That's it. You have a bag that contains your data.
Other source files can be found at [KITTI raw data](http://www.cvlibs.net/datasets/kitti/raw_data.php) page.
================================================
FILE: config/doc/kitti2bag/kitti2bag.py
================================================
#!env python
# -*- coding: utf-8 -*-
import sys
try:
import pykitti
except ImportError as e:
print('Could not load module \'pykitti\'. Please run `pip install pykitti`')
sys.exit(1)
import tf
import os
import cv2
import rospy
import rosbag
import progressbar
from tf2_msgs.msg import TFMessage
from datetime import datetime
from std_msgs.msg import Header
from sensor_msgs.msg import CameraInfo, Imu, PointField, NavSatFix
import sensor_msgs.point_cloud2 as pcl2
from geometry_msgs.msg import TransformStamped, TwistStamped, Transform
from cv_bridge import CvBridge
import numpy as np
import argparse
def save_imu_data(bag, kitti, imu_frame_id, topic):
print("Exporting IMU")
for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):
q = tf.transformations.quaternion_from_euler(oxts.packet.roll, oxts.packet.pitch, oxts.packet.yaw)
imu = Imu()
imu.header.frame_id = imu_frame_id
imu.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f")))
imu.orientation.x = q[0]
imu.orientation.y = q[1]
imu.orientation.z = q[2]
imu.orientation.w = q[3]
imu.linear_acceleration.x = oxts.packet.af
imu.linear_acceleration.y = oxts.packet.al
imu.linear_acceleration.z = oxts.packet.au
imu.angular_velocity.x = oxts.packet.wf
imu.angular_velocity.y = oxts.packet.wl
imu.angular_velocity.z = oxts.packet.wu
bag.write(topic, imu, t=imu.header.stamp)
def save_imu_data_raw(bag, kitti, imu_frame_id, topic):
print("Exporting IMU Raw")
synced_path = kitti.data_path
unsynced_path = synced_path.replace('sync', 'extract')
imu_path = os.path.join(unsynced_path, 'oxts')
# read time stamp (convert to ros seconds format)
with open(os.path.join(imu_path, 'timestamps.txt')) as f:
lines = f.readlines()
imu_datetimes = []
for line in lines:
if len(line) == 1:
continue
timestamp = datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f')
imu_datetimes.append(float(timestamp.strftime("%s.%f")))
# fix imu time using a linear model (may not be ideal, ^_^)
imu_index = np.asarray(range(len(imu_datetimes)), dtype=np.float64)
z = np.polyfit(imu_index, imu_datetimes, 1)
imu_datetimes_new = z[0] * imu_index + z[1]
imu_datetimes = imu_datetimes_new.tolist()
# get all imu data
imu_data_dir = os.path.join(imu_path, 'data')
imu_filenames = sorted(os.listdir(imu_data_dir))
imu_data = [None] * len(imu_filenames)
for i, imu_file in enumerate(imu_filenames):
imu_data_file = open(os.path.join(imu_data_dir, imu_file), "r")
for line in imu_data_file:
if len(line) == 1:
continue
stripped_line = line.strip()
line_list = stripped_line.split()
imu_data[i] = line_list
assert len(imu_datetimes) == len(imu_data)
for timestamp, data in zip(imu_datetimes, imu_data):
roll, pitch, yaw = float(data[3]), float(data[4]), float(data[5]),
q = tf.transformations.quaternion_from_euler(roll, pitch, yaw)
imu = Imu()
imu.header.frame_id = imu_frame_id
imu.header.stamp = rospy.Time.from_sec(timestamp)
imu.orientation.x = q[0]
imu.orientation.y = q[1]
imu.orientation.z = q[2]
imu.orientation.w = q[3]
imu.linear_acceleration.x = float(data[11])
imu.linear_acceleration.y = float(data[12])
imu.linear_acceleration.z = float(data[13])
imu.angular_velocity.x = float(data[17])
imu.angular_velocity.y = float(data[18])
imu.angular_velocity.z = float(data[19])
bag.write(topic, imu, t=imu.header.stamp)
imu.header.frame_id = 'imu_enu_link'
bag.write('/imu_correct', imu, t=imu.header.stamp) # for LIO-SAM GPS
def save_dynamic_tf(bag, kitti, kitti_type, initial_time):
print("Exporting time dependent transformations")
if kitti_type.find("raw") != -1:
for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):
tf_oxts_msg = TFMessage()
tf_oxts_transform = TransformStamped()
tf_oxts_transform.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f")))
tf_oxts_transform.header.frame_id = 'world'
tf_oxts_transform.child_frame_id = 'base_link'
transform = (oxts.T_w_imu)
t = transform[0:3, 3]
q = tf.transformations.quaternion_from_matrix(transform)
oxts_tf = Transform()
oxts_tf.translation.x = t[0]
oxts_tf.translation.y = t[1]
oxts_tf.translation.z = t[2]
oxts_tf.rotation.x = q[0]
oxts_tf.rotation.y = q[1]
oxts_tf.rotation.z = q[2]
oxts_tf.rotation.w = q[3]
tf_oxts_transform.transform = oxts_tf
tf_oxts_msg.transforms.append(tf_oxts_transform)
bag.write('/tf', tf_oxts_msg, tf_oxts_msg.transforms[0].header.stamp)
elif kitti_type.find("odom") != -1:
timestamps = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps)
for timestamp, tf_matrix in zip(timestamps, kitti.T_w_cam0):
tf_msg = TFMessage()
tf_stamped = TransformStamped()
tf_stamped.header.stamp = rospy.Time.from_sec(timestamp)
tf_stamped.header.frame_id = 'world'
tf_stamped.child_frame_id = 'camera_left'
t = tf_matrix[0:3, 3]
q = tf.transformations.quaternion_from_matrix(tf_matrix)
transform = Transform()
transform.translation.x = t[0]
transform.translation.y = t[1]
transform.translation.z = t[2]
transform.rotation.x = q[0]
transform.rotation.y = q[1]
transform.rotation.z = q[2]
transform.rotation.w = q[3]
tf_stamped.transform = transform
tf_msg.transforms.append(tf_stamped)
bag.write('/tf', tf_msg, tf_msg.transforms[0].header.stamp)
def save_camera_data(bag, kitti_type, kitti, util, bridge, camera, camera_frame_id, topic, initial_time):
print("Exporting camera {}".format(camera))
if kitti_type.find("raw") != -1:
camera_pad = '{0:02d}'.format(camera)
image_dir = os.path.join(kitti.data_path, 'image_{}'.format(camera_pad))
image_path = os.path.join(image_dir, 'data')
image_filenames = sorted(os.listdir(image_path))
with open(os.path.join(image_dir, 'timestamps.txt')) as f:
image_datetimes = map(lambda x: datetime.strptime(x[:-4], '%Y-%m-%d %H:%M:%S.%f'), f.readlines())
calib = CameraInfo()
calib.header.frame_id = camera_frame_id
calib.width, calib.height = tuple(util['S_rect_{}'.format(camera_pad)].tolist())
calib.distortion_model = 'plumb_bob'
calib.K = util['K_{}'.format(camera_pad)]
calib.R = util['R_rect_{}'.format(camera_pad)]
calib.D = util['D_{}'.format(camera_pad)]
calib.P = util['P_rect_{}'.format(camera_pad)]
elif kitti_type.find("odom") != -1:
camera_pad = '{0:01d}'.format(camera)
image_path = os.path.join(kitti.sequence_path, 'image_{}'.format(camera_pad))
image_filenames = sorted(os.listdir(image_path))
image_datetimes = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps)
calib = CameraInfo()
calib.header.frame_id = camera_frame_id
calib.P = util['P{}'.format(camera_pad)]
iterable = zip(image_datetimes, image_filenames)
bar = progressbar.ProgressBar()
for dt, filename in bar(iterable):
image_filename = os.path.join(image_path, filename)
cv_image = cv2.imread(image_filename)
calib.height, calib.width = cv_image.shape[:2]
if camera in (0, 1):
cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY)
encoding = "mono8" if camera in (0, 1) else "bgr8"
image_message = bridge.cv2_to_imgmsg(cv_image, encoding=encoding)
image_message.header.frame_id = camera_frame_id
if kitti_type.find("raw") != -1:
image_message.header.stamp = rospy.Time.from_sec(float(datetime.strftime(dt, "%s.%f")))
topic_ext = "/image_raw"
elif kitti_type.find("odom") != -1:
image_message.header.stamp = rospy.Time.from_sec(dt)
topic_ext = "/image_rect"
calib.header.stamp = image_message.header.stamp
bag.write(topic + topic_ext, image_message, t = image_message.header.stamp)
bag.write(topic + '/camera_info', calib, t = calib.header.stamp)
def save_velo_data(bag, kitti, velo_frame_id, topic):
print("Exporting velodyne data")
velo_path = os.path.join(kitti.data_path, 'velodyne_points')
velo_data_dir = os.path.join(velo_path, 'data')
velo_filenames = sorted(os.listdir(velo_data_dir))
with open(os.path.join(velo_path, 'timestamps.txt')) as f:
lines = f.readlines()
velo_datetimes = []
for line in lines:
if len(line) == 1:
continue
dt = datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f')
velo_datetimes.append(dt)
iterable = zip(velo_datetimes, velo_filenames)
bar = progressbar.ProgressBar()
count = 0
for dt, filename in bar(iterable):
if dt is None:
continue
velo_filename = os.path.join(velo_data_dir, filename)
# read binary data
scan = (np.fromfile(velo_filename, dtype=np.float32)).reshape(-1, 4)
# get ring channel
depth = np.linalg.norm(scan, 2, axis=1)
pitch = np.arcsin(scan[:, 2] / depth) # arcsin(z, depth)
fov_down = -24.8 / 180.0 * np.pi
fov = (abs(-24.8) + abs(2.0)) / 180.0 * np.pi
proj_y = (pitch + abs(fov_down)) / fov # in [0.0, 1.0]
proj_y *= 64 # in [0.0, H]
proj_y = np.floor(proj_y)
proj_y = np.minimum(64 - 1, proj_y)
proj_y = np.maximum(0, proj_y).astype(np.int32) # in [0,H-1]
proj_y = proj_y.reshape(-1, 1)
scan = np.concatenate((scan,proj_y), axis=1)
# create header
header = Header()
header.frame_id = velo_frame_id
header.stamp = rospy.Time.from_sec(float(datetime.strftime(dt, "%s.%f")))
# fill pcl msg
fields = [PointField('x', 0, PointField.FLOAT32, 1),
PointField('y', 4, PointField.FLOAT32, 1),
PointField('z', 8, PointField.FLOAT32, 1),
PointField('intensity', 12, PointField.FLOAT32, 1),
PointField('ring', 16, PointField.UINT16, 1)]
pcl_msg = pcl2.create_cloud(header, fields, scan)
pcl_msg.is_dense = True
# print(pcl_msg)
bag.write(topic, pcl_msg, t=pcl_msg.header.stamp)
# count += 1
# if count > 200:
# break
def get_static_transform(from_frame_id, to_frame_id, transform):
t = transform[0:3, 3]
q = tf.transformations.quaternion_from_matrix(transform)
tf_msg = TransformStamped()
tf_msg.header.frame_id = from_frame_id
tf_msg.child_frame_id = to_frame_id
tf_msg.transform.translation.x = float(t[0])
tf_msg.transform.translation.y = float(t[1])
tf_msg.transform.translation.z = float(t[2])
tf_msg.transform.rotation.x = float(q[0])
tf_msg.transform.rotation.y = float(q[1])
tf_msg.transform.rotation.z = float(q[2])
tf_msg.transform.rotation.w = float(q[3])
return tf_msg
def inv(transform):
"Invert rigid body transformation matrix"
R = transform[0:3, 0:3]
t = transform[0:3, 3]
t_inv = -1 * R.T.dot(t)
transform_inv = np.eye(4)
transform_inv[0:3, 0:3] = R.T
transform_inv[0:3, 3] = t_inv
return transform_inv
def save_static_transforms(bag, transforms, timestamps):
print("Exporting static transformations")
tfm = TFMessage()
for transform in transforms:
t = get_static_transform(from_frame_id=transform[0], to_frame_id=transform[1], transform=transform[2])
tfm.transforms.append(t)
for timestamp in timestamps:
time = rospy.Time.from_sec(float(timestamp.strftime("%s.%f")))
for i in range(len(tfm.transforms)):
tfm.transforms[i].header.stamp = time
bag.write('/tf_static', tfm, t=time)
def save_gps_fix_data(bag, kitti, gps_frame_id, topic):
for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):
navsatfix_msg = NavSatFix()
navsatfix_msg.header.frame_id = gps_frame_id
navsatfix_msg.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f")))
navsatfix_msg.latitude = oxts.packet.lat
navsatfix_msg.longitude = oxts.packet.lon
navsatfix_msg.altitude = oxts.packet.alt
navsatfix_msg.status.service = 1
bag.write(topic, navsatfix_msg, t=navsatfix_msg.header.stamp)
def save_gps_vel_data(bag, kitti, gps_frame_id, topic):
for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):
twist_msg = TwistStamped()
twist_msg.header.frame_id = gps_frame_id
twist_msg.header.stamp = rospy.Time.from_sec(float(timestamp.strftime("%s.%f")))
twist_msg.twist.linear.x = oxts.packet.vf
twist_msg.twist.linear.y = oxts.packet.vl
twist_msg.twist.linear.z = oxts.packet.vu
twist_msg.twist.angular.x = oxts.packet.wf
twist_msg.twist.angular.y = oxts.packet.wl
twist_msg.twist.angular.z = oxts.packet.wu
bag.write(topic, twist_msg, t=twist_msg.header.stamp)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description = "Convert KITTI dataset to ROS bag file the easy way!")
# Accepted argument values
kitti_types = ["raw_synced", "odom_color", "odom_gray"]
odometry_sequences = []
for s in range(22):
odometry_sequences.append(str(s).zfill(2))
parser.add_argument("kitti_type", choices = kitti_types, help = "KITTI dataset type")
parser.add_argument("dir", nargs = "?", default = os.getcwd(), help = "base directory of the dataset, if no directory passed the deafult is current working directory")
parser.add_argument("-t", "--date", help = "date of the raw dataset (i.e. 2011_09_26), option is only for RAW datasets.")
parser.add_argument("-r", "--drive", help = "drive number of the raw dataset (i.e. 0001), option is only for RAW datasets.")
parser.add_argument("-s", "--sequence", choices = odometry_sequences,help = "sequence of the odometry dataset (between 00 - 21), option is only for ODOMETRY datasets.")
args = parser.parse_args()
bridge = CvBridge()
compression = rosbag.Compression.NONE
# compression = rosbag.Compression.BZ2
# compression = rosbag.Compression.LZ4
# CAMERAS
cameras = [
(0, 'camera_gray_left', '/kitti/camera_gray_left'),
(1, 'camera_gray_right', '/kitti/camera_gray_right'),
(2, 'camera_color_left', '/kitti/camera_color_left'),
(3, 'camera_color_right', '/kitti/camera_color_right')
]
if args.kitti_type.find("raw") != -1:
if args.date == None:
print("Date option is not given. It is mandatory for raw dataset.")
print("Usage for raw dataset: kitti2bag raw_synced [dir] -t <date> -r <drive>")
sys.exit(1)
elif args.drive == None:
print("Drive option is not given. It is mandatory for raw dataset.")
print("Usage for raw dataset: kitti2bag raw_synced [dir] -t <date> -r <drive>")
sys.exit(1)
bag = rosbag.Bag("kitti_{}_drive_{}_{}.bag".format(args.date, args.drive, args.kitti_type[4:]), 'w', compression=compression)
kitti = pykitti.raw(args.dir, args.date, args.drive)
if not os.path.exists(kitti.data_path):
print('Path {} does not exists. Exiting.'.format(kitti.data_path))
sys.exit(1)
if len(kitti.timestamps) == 0:
print('Dataset is empty? Exiting.')
sys.exit(1)
try:
# IMU
imu_frame_id = 'imu_link'
imu_topic = '/kitti/oxts/imu'
imu_raw_topic = '/imu_raw'
gps_fix_topic = '/gps/fix'
gps_vel_topic = '/gps/vel'
velo_frame_id = 'velodyne'
velo_topic = '/points_raw'
T_base_link_to_imu = np.eye(4, 4)
T_base_link_to_imu[0:3, 3] = [-2.71/2.0-0.05, 0.32, 0.93]
# tf_static
transforms = [
('base_link', imu_frame_id, T_base_link_to_imu),
(imu_frame_id, velo_frame_id, inv(kitti.calib.T_velo_imu)),
(imu_frame_id, cameras[0][1], inv(kitti.calib.T_cam0_imu)),
(imu_frame_id, cameras[1][1], inv(kitti.calib.T_cam1_imu)),
(imu_frame_id, cameras[2][1], inv(kitti.calib.T_cam2_imu)),
(imu_frame_id, cameras[3][1], inv(kitti.calib.T_cam3_imu))
]
util = pykitti.utils.read_calib_file(os.path.join(kitti.calib_path, 'calib_cam_to_cam.txt'))
# Export
# save_static_transforms(bag, transforms, kitti.timestamps)
# save_dynamic_tf(bag, kitti, args.kitti_type, initial_time=None)
# save_imu_data(bag, kitti, imu_frame_id, imu_topic)
save_imu_data_raw(bag, kitti, imu_frame_id, imu_raw_topic)
save_gps_fix_data(bag, kitti, imu_frame_id, gps_fix_topic)
save_gps_vel_data(bag, kitti, imu_frame_id, gps_vel_topic)
for camera in cameras:
save_camera_data(bag, args.kitti_type, kitti, util, bridge, camera=camera[0], camera_frame_id=camera[1], topic=camera[2], initial_time=None)
break
save_velo_data(bag, kitti, velo_frame_id, velo_topic)
finally:
print("## OVERVIEW ##")
print(bag)
bag.close()
elif args.kitti_type.find("odom") != -1:
if args.sequence == None:
print("Sequence option is not given. It is mandatory for odometry dataset.")
print("Usage for odometry dataset: kitti2bag {odom_color, odom_gray} [dir] -s <sequence>")
sys.exit(1)
bag = rosbag.Bag("kitti_data_odometry_{}_sequence_{}.bag".format(args.kitti_type[5:],args.sequence), 'w', compression=compression)
kitti = pykitti.odometry(args.dir, args.sequence)
if not os.path.exists(kitti.sequence_path):
print('Path {} does not exists. Exiting.'.format(kitti.sequence_path))
sys.exit(1)
kitti.load_calib()
kitti.load_timestamps()
if len(kitti.timestamps) == 0:
print('Dataset is empty? Exiting.')
sys.exit(1)
if args.sequence in odometry_sequences[:11]:
print("Odometry dataset sequence {} has ground truth information (poses).".format(args.sequence))
kitti.load_poses()
try:
util = pykitti.utils.read_calib_file(os.path.join(args.dir,'sequences',args.sequence, 'calib.txt'))
current_epoch = (datetime.utcnow() - datetime(1970, 1, 1)).total_seconds()
# Export
if args.kitti_type.find("gray") != -1:
used_cameras = cameras[:2]
elif args.kitti_type.find("color") != -1:
used_cameras = cameras[-2:]
save_dynamic_tf(bag, kitti, args.kitti_type, initial_time=current_epoch)
for camera in used_cameras:
save_camera_data(bag, args.kitti_type, kitti, util, bridge, camera=camera[0], camera_frame_id=camera[1], topic=camera[2], initial_time=current_epoch)
finally:
print("## OVERVIEW ##")
print(bag)
bag.close()
================================================
FILE: config/params.yaml
================================================
lio_sam:
# Topics
pointCloudTopic: "points_raw" # Point cloud data
imuTopic: "imu_correct" # IMU data
odomTopic: "odometry/imu" # IMU pre-preintegration odometry, same frequency as IMU
gpsTopic: "odometry/gpsz" # GPS odometry topic from navsat, see module_navsat.launch file
# GPS Settings
useImuHeadingInitialization: true # if using GPS data, set to "true"
useGpsElevation: false # if GPS elevation is bad, set to "false"
gpsCovThreshold: 2.0 # m^2, threshold for using GPS data
poseCovThreshold: 25.0 # m^2, threshold for using GPS data
# Export settings
savePCD: false # https://github.com/TixiaoShan/LIO-SAM/issues/3
savePCDDirectory: "/Downloads/LOAM/" # in your home folder, starts and ends with "/". Warning: the code deletes "LOAM" folder then recreates it. See "mapOptimization" for implementation
# Sensor Settings
N_SCAN: 16 # number of lidar channel (i.e., 16, 32, 64, 128)
Horizon_SCAN: 1800 # lidar horizontal resolution (Velodyne:1800, Ouster:512,1024,2048)
timeField: "time" # point timestamp field, Velodyne - "time", Ouster - "t"
downsampleRate: 1 # default: 1. Downsample your data if too many points. i.e., 16 = 64 / 4, 16 = 16 / 1
# IMU Settings
imuAccNoise: 3.9939570888238808e-03
imuGyrNoise: 1.5636343949698187e-03
imuAccBiasN: 6.4356659353532566e-05
imuGyrBiasN: 3.5640318696367613e-05
imuGravity: 9.80511
# Extrinsics (lidar -> IMU)
extrinsicTrans: [0.0, 0.0, 0.0]
extrinsicRot: [1, 0, 0,
0, 1, 0,
0, 0, 1]
extrinsicRPY: [1, 0, 0,
0, 1, 0,
0, 0, 1]
# extrinsicRot: [1, 0, 0,
# 0, 1, 0,
# 0, 0, 1]
# extrinsicRPY: [1, 0, 0,
# 0, 1, 0,
# 0, 0, 1]
# LOAM feature threshold
edgeThreshold: 1.0
surfThreshold: 0.1
edgeFeatureMinValidNum: 10
surfFeatureMinValidNum: 100
# voxel filter paprams
odometrySurfLeafSize: 0.4 # default: 0.4
mappingCornerLeafSize: 0.2 # default: 0.2
mappingSurfLeafSize: 0.4 # default: 0.4
# robot motion constraint (in case you are using a 2D robot)
z_tollerance: 1000 # meters
rotation_tollerance: 1000 # radians
# CPU Params
numberOfCores: 4 # number of cores for mapping optimization
mappingProcessInterval: 0.15 # seconds, regulate mapping frequency
# Surrounding map
surroundingkeyframeAddingDistThreshold: 1.0 # meters, regulate keyframe adding threshold
surroundingkeyframeAddingAngleThreshold: 0.2 # radians, regulate keyframe adding threshold
surroundingKeyframeDensity: 2.0 # meters, downsample surrounding keyframe poses
surroundingKeyframeSearchRadius: 50.0 # meters, within n meters scan-to-map optimization (when loop closure disabled)
# Loop closure
loopClosureEnableFlag: false
surroundingKeyframeSize: 25 # submap size (when loop closure enabled)
historyKeyframeSearchRadius: 15.0 # meters, key frame that is within n meters from current pose will be considerd for loop closure
historyKeyframeSearchTimeDiff: 30.0 # seconds, key frame that is n seconds older will be considered for loop closure
historyKeyframeSearchNum: 25 # number of hostory key frames will be fused into a submap for loop closure
historyKeyframeFitnessScore: 0.3 # icp threshold, the smaller the better alignment
# Visualization
globalMapVisualizationSearchRadius: 1000.0 # meters, global map visualization radius
globalMapVisualizationPoseDensity: 10.0 # meters, global map visualization keyframe density
globalMapVisualizationLeafSize: 1.0 # meters, global map visualization cloud density
# Navsat (convert GPS coordinates to Cartesian)
navsat:
frequency: 50
wait_for_datum: false
delay: 0.0
magnetic_declination_radians: 0
yaw_offset: 0
zero_altitude: true
broadcast_utm_transform: false
broadcast_utm_transform_as_parent_frame: false
publish_filtered_gps: false
# EKF for Navsat
ekf_gps:
publish_tf: false
map_frame: map
odom_frame: odom
base_link_frame: base_link
world_frame: odom
frequency: 50
two_d_mode: false
sensor_timeout: 0.01
# -------------------------------------
# External IMU:
# -------------------------------------
imu0: imu_correct
# make sure the input is aligned with ROS REP105. "imu_correct" is manually transformed by myself. EKF can also transform the data using tf between your imu and base_link
imu0_config: [false, false, false,
true, true, true,
false, false, false,
false, false, true,
true, true, true]
imu0_differential: false
imu0_queue_size: 50
imu0_remove_gravitational_acceleration: true
# -------------------------------------
# Odometry (From Navsat):
# -------------------------------------
odom0: odometry/gps
odom0_config: [true, true, true,
false, false, false,
false, false, false,
false, false, false,
false, false, false]
odom0_differential: false
odom0_queue_size: 10
# x y z r p y x_dot y_dot z_dot r_dot p_dot y_dot x_ddot y_ddot z_ddot
process_noise_covariance: [ 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 10.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0.03, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0.1, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0.25, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0.04, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.5, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.01, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.015]
================================================
FILE: include/utility.h
================================================
#pragma once
#ifndef _UTILITY_LIDAR_ODOMETRY_H_
#define _UTILITY_LIDAR_ODOMETRY_H_
#include <ros/ros.h>
#include <std_msgs/Header.h>
#include <sensor_msgs/Imu.h>
#include <sensor_msgs/PointCloud2.h>
#include <sensor_msgs/NavSatFix.h>
#include <nav_msgs/Odometry.h>
#include <nav_msgs/Path.h>
#include <opencv/cv.h>
#include <pcl/point_cloud.h>
#include <pcl/point_types.h>
#include <pcl/search/impl/search.hpp>
#include <pcl/range_image/range_image.h>
#include <pcl/kdtree/kdtree_flann.h>
#include <pcl/common/common.h>
#include <pcl/common/transforms.h>
#include <pcl/registration/icp.h>
#include <pcl/registration/ndt.h>
#include <pcl/io/pcd_io.h>
#include <pcl/filters/filter.h>
#include <pcl/filters/voxel_grid.h>
#include <pcl/filters/crop_box.h>
#include <pcl_conversions/pcl_conversions.h>
#include <tf/LinearMath/Quaternion.h>
#include <tf/transform_listener.h>
#include <tf/transform_datatypes.h>
#include <tf/transform_broadcaster.h>
#include <vector>
#include <cmath>
#include <algorithm>
#include <queue>
#include <deque>
#include <iostream>
#include <fstream>
#include <ctime>
#include <cfloat>
#include <iterator>
#include <sstream>
#include <string>
#include <limits>
#include <iomanip>
#include <array>
#include <thread>
#include <mutex>
using namespace std;
typedef pcl::PointXYZI PointType;
class ParamServer
{
public:
ros::NodeHandle nh;
std::string robot_id;
string pointCloudTopic;
string imuTopic;
string odomTopic;
string gpsTopic;
// GPS Settings
bool useImuHeadingInitialization;
bool useGpsElevation;
float gpsCovThreshold;
float poseCovThreshold;
// Save pcd
bool savePCD;
string savePCDDirectory;
// Velodyne Sensor Configuration: Velodyne
int N_SCAN;
int Horizon_SCAN;
string timeField;
int downsampleRate;
// IMU
float imuAccNoise;
float imuGyrNoise;
float imuAccBiasN;
float imuGyrBiasN;
float imuGravity;
vector<double> extRotV;
vector<double> extRPYV;
vector<double> extTransV;
Eigen::Matrix3d extRot;
Eigen::Matrix3d extRPY;
Eigen::Vector3d extTrans;
Eigen::Quaterniond extQRPY;
// LOAM
float edgeThreshold;
float surfThreshold;
int edgeFeatureMinValidNum;
int surfFeatureMinValidNum;
// voxel filter paprams
float odometrySurfLeafSize;
float mappingCornerLeafSize;
float mappingSurfLeafSize ;
float z_tollerance;
float rotation_tollerance;
// CPU Params
int numberOfCores;
double mappingProcessInterval;
// Surrounding map
float surroundingkeyframeAddingDistThreshold;
float surroundingkeyframeAddingAngleThreshold;
float surroundingKeyframeDensity;
float surroundingKeyframeSearchRadius;
// Loop closure
bool loopClosureEnableFlag;
int surroundingKeyframeSize;
float historyKeyframeSearchRadius;
float historyKeyframeSearchTimeDiff;
int historyKeyframeSearchNum;
float historyKeyframeFitnessScore;
// global map visualization radius
float globalMapVisualizationSearchRadius;
float globalMapVisualizationPoseDensity;
float globalMapVisualizationLeafSize;
ParamServer()
{
nh.param<std::string>("/robot_id", robot_id, "roboat");
nh.param<std::string>("lio_sam/pointCloudTopic", pointCloudTopic, "points_raw");
nh.param<std::string>("lio_sam/imuTopic", imuTopic, "imu_correct");
nh.param<std::string>("lio_sam/odomTopic", odomTopic, "odometry/imu");
nh.param<std::string>("lio_sam/gpsTopic", gpsTopic, "odometry/gps");
nh.param<bool>("lio_sam/useImuHeadingInitialization", useImuHeadingInitialization, false);
nh.param<bool>("lio_sam/useGpsElevation", useGpsElevation, false);
nh.param<float>("lio_sam/gpsCovThreshold", gpsCovThreshold, 2.0);
nh.param<float>("lio_sam/poseCovThreshold", poseCovThreshold, 25.0);
nh.param<bool>("lio_sam/savePCD", savePCD, false);
nh.param<std::string>("lio_sam/savePCDDirectory", savePCDDirectory, "/Downloads/LOAM/");
nh.param<int>("lio_sam/N_SCAN", N_SCAN, 16);
nh.param<int>("lio_sam/Horizon_SCAN", Horizon_SCAN, 1800);
nh.param<std::string>("lio_sam/timeField", timeField, "time");
nh.param<int>("lio_sam/downsampleRate", downsampleRate, 1);
nh.param<float>("lio_sam/imuAccNoise", imuAccNoise, 0.01);
nh.param<float>("lio_sam/imuGyrNoise", imuGyrNoise, 0.001);
nh.param<float>("lio_sam/imuAccBiasN", imuAccBiasN, 0.0002);
nh.param<float>("lio_sam/imuGyrBiasN", imuGyrBiasN, 0.00003);
nh.param<float>("lio_sam/imuGravity", imuGravity, 9.80511);
nh.param<vector<double>>("lio_sam/extrinsicRot", extRotV, vector<double>());
nh.param<vector<double>>("lio_sam/extrinsicRPY", extRPYV, vector<double>());
nh.param<vector<double>>("lio_sam/extrinsicTrans", extTransV, vector<double>());
extRot = Eigen::Map<const Eigen::Matrix<double, -1, -1, Eigen::RowMajor>>(extRotV.data(), 3, 3);
extRPY = Eigen::Map<const Eigen::Matrix<double, -1, -1, Eigen::RowMajor>>(extRPYV.data(), 3, 3);
extTrans = Eigen::Map<const Eigen::Matrix<double, -1, -1, Eigen::RowMajor>>(extTransV.data(), 3, 1);
extQRPY = Eigen::Quaterniond(extRPY);
nh.param<float>("lio_sam/edgeThreshold", edgeThreshold, 0.1);
nh.param<float>("lio_sam/surfThreshold", surfThreshold, 0.1);
nh.param<int>("lio_sam/edgeFeatureMinValidNum", edgeFeatureMinValidNum, 10);
nh.param<int>("lio_sam/surfFeatureMinValidNum", surfFeatureMinValidNum, 100);
nh.param<float>("lio_sam/odometrySurfLeafSize", odometrySurfLeafSize, 0.2);
nh.param<float>("lio_sam/mappingCornerLeafSize", mappingCornerLeafSize, 0.2);
nh.param<float>("lio_sam/mappingSurfLeafSize", mappingSurfLeafSize, 0.2);
nh.param<float>("lio_sam/z_tollerance", z_tollerance, FLT_MAX);
nh.param<float>("lio_sam/rotation_tollerance", rotation_tollerance, FLT_MAX);
nh.param<int>("lio_sam/numberOfCores", numberOfCores, 2);
nh.param<double>("lio_sam/mappingProcessInterval", mappingProcessInterval, 0.15);
nh.param<float>("lio_sam/surroundingkeyframeAddingDistThreshold", surroundingkeyframeAddingDistThreshold, 1.0);
nh.param<float>("lio_sam/surroundingkeyframeAddingAngleThreshold", surroundingkeyframeAddingAngleThreshold, 0.2);
nh.param<float>("lio_sam/surroundingKeyframeDensity", surroundingKeyframeDensity, 1.0);
nh.param<float>("lio_sam/surroundingKeyframeSearchRadius", surroundingKeyframeSearchRadius, 50.0);
nh.param<bool>("lio_sam/loopClosureEnableFlag", loopClosureEnableFlag, false);
nh.param<int>("lio_sam/surroundingKeyframeSize", surroundingKeyframeSize, 50);
nh.param<float>("lio_sam/historyKeyframeSearchRadius", historyKeyframeSearchRadius, 10.0);
nh.param<float>("lio_sam/historyKeyframeSearchTimeDiff", historyKeyframeSearchTimeDiff, 30.0);
nh.param<int>("lio_sam/historyKeyframeSearchNum", historyKeyframeSearchNum, 25);
nh.param<float>("lio_sam/historyKeyframeFitnessScore", historyKeyframeFitnessScore, 0.3);
nh.param<float>("lio_sam/globalMapVisualizationSearchRadius", globalMapVisualizationSearchRadius, 1e3);
nh.param<float>("lio_sam/globalMapVisualizationPoseDensity", globalMapVisualizationPoseDensity, 10.0);
nh.param<float>("lio_sam/globalMapVisualizationLeafSize", globalMapVisualizationLeafSize, 1.0);
usleep(100);
}
//gc: tranform to lidar frame
sensor_msgs::Imu imuConverter(const sensor_msgs::Imu& imu_in)
{
sensor_msgs::Imu imu_out = imu_in;
// rotate acceleration
Eigen::Vector3d acc(imu_in.linear_acceleration.x, imu_in.linear_acceleration.y, imu_in.linear_acceleration.z);
acc = extRot * acc;
imu_out.linear_acceleration.x = acc.x();
imu_out.linear_acceleration.y = acc.y();
imu_out.linear_acceleration.z = acc.z();
// rotate gyroscope
Eigen::Vector3d gyr(imu_in.angular_velocity.x, imu_in.angular_velocity.y, imu_in.angular_velocity.z);
gyr = extRot * gyr;
imu_out.angular_velocity.x = gyr.x();
imu_out.angular_velocity.y = gyr.y();
imu_out.angular_velocity.z = gyr.z();
// rotate roll pitch yaw
Eigen::Quaterniond q_from(imu_in.orientation.w, imu_in.orientation.x, imu_in.orientation.y, imu_in.orientation.z);
Eigen::Quaterniond q_final = q_from * extQRPY;
imu_out.orientation.x = q_final.x();
imu_out.orientation.y = q_final.y();
imu_out.orientation.z = q_final.z();
imu_out.orientation.w = q_final.w();
if (sqrt(q_final.x()*q_final.x() + q_final.y()*q_final.y() + q_final.z()*q_final.z() + q_final.w()*q_final.w()) < 0.1)
{
ROS_ERROR("Invalid quaternion, please use a 9-axis IMU!");
ros::shutdown();
}
return imu_out;
}
};
sensor_msgs::PointCloud2 publishCloud(ros::Publisher *thisPub, pcl::PointCloud<PointType>::Ptr thisCloud, ros::Time thisStamp, std::string thisFrame)
{
sensor_msgs::PointCloud2 tempCloud;
pcl::toROSMsg(*thisCloud, tempCloud);
tempCloud.header.stamp = thisStamp;
tempCloud.header.frame_id = thisFrame;
if (thisPub->getNumSubscribers() != 0)
thisPub->publish(tempCloud);
return tempCloud;
}
template<typename T>
double ROS_TIME(T msg)
{
return msg->header.stamp.toSec();
}
template<typename T>
void imuAngular2rosAngular(sensor_msgs::Imu *thisImuMsg, T *angular_x, T *angular_y, T *angular_z)
{
*angular_x = thisImuMsg->angular_velocity.x;
*angular_y = thisImuMsg->angular_velocity.y;
*angular_z = thisImuMsg->angular_velocity.z;
}
template<typename T>
void imuAccel2rosAccel(sensor_msgs::Imu *thisImuMsg, T *acc_x, T *acc_y, T *acc_z)
{
*acc_x = thisImuMsg->linear_acceleration.x;
*acc_y = thisImuMsg->linear_acceleration.y;
*acc_z = thisImuMsg->linear_acceleration.z;
}
template<typename T>
void imuRPY2rosRPY(sensor_msgs::Imu *thisImuMsg, T *rosRoll, T *rosPitch, T *rosYaw)
{
double imuRoll, imuPitch, imuYaw;
tf::Quaternion orientation;
tf::quaternionMsgToTF(thisImuMsg->orientation, orientation);
tf::Matrix3x3(orientation).getRPY(imuRoll, imuPitch, imuYaw);
*rosRoll = imuRoll;
*rosPitch = imuPitch;
*rosYaw = imuYaw;
}
float pointDistance(PointType p)
{
return sqrt(p.x*p.x + p.y*p.y + p.z*p.z);
}
float pointDistance(PointType p1, PointType p2)
{
return sqrt((p1.x-p2.x)*(p1.x-p2.x) + (p1.y-p2.y)*(p1.y-p2.y) + (p1.z-p2.z)*(p1.z-p2.z));
}
#endif
================================================
FILE: launch/include/config/robot.urdf.xacro
================================================
<?xml version="1.0"?>
<robot name="roboat" xmlns:xacro="http://roboat.org">
<xacro:property name="PI" value="3.1415926535897931" />
<link name="base_link"></link>
<joint name="base_link_joint" type="fixed">
<parent link="base_link"/>
<child link="chassis_link" />
<origin xyz="0 0 0" rpy="0 0 0" />
</joint>
<link name="chassis_link"></link>
<link name="imu_link"> </link>
<joint name="imu_joint" type="fixed">
<parent link="chassis_link" />
<child link="imu_link" />
<origin xyz="0 -0.1 0" rpy="${PI} 0 ${PI}" />
</joint>
<link name="imu_enu_link"> </link>
<joint name="imu_enu_joint" type="fixed">
<parent link="imu_link" />
<child link="imu_enu_link" />
<origin xyz="0 0 0" rpy="${PI} 0 ${PI}" />
</joint>
<link name="velodyne"> </link>
<joint name="velodyne_joint" type="fixed">
<parent link="chassis_link" />
<child link="velodyne" />
<origin xyz="0 0 0" rpy="0 0 0" />
</joint>
<link name="realsense_link"> </link>
<joint name="realsense_joint" type="fixed">
<parent link="chassis_link" />
<child link="realsense_link" />
<origin xyz="0.1 0 -0.1" rpy="0 0 0" />
</joint>
<link name="navsat_link"> </link>
<joint name="navsat_joint" type="fixed">
<parent link="chassis_link" />
<child link="navsat_link" />
<origin xyz="-0.2 0 0.2" rpy="0 0 0" />
</joint>
<link name="loam_camera"> </link>
<joint name="_loam_camera_joint" type="fixed">
<parent link="chassis_link" />
<child link="loam_camera" />
<origin xyz="0 0 0" rpy="1.570796 0 1.570796" />
</joint>
</robot>
================================================
FILE: launch/include/config/rviz.rviz
================================================
Panels:
- Class: rviz/Displays
Help Height: 70
Name: Displays
Property Tree Widget:
Expanded:
- /links1
- /Odometry1
- /Point Cloud1
- /Point Cloud1/Velodyne1
- /Feature Cloud1
- /Mapping1
- /Grid1
- /PointCloud21
- /PointCloud22
- /PointCloud23
Splitter Ratio: 0.5600000023841858
Tree Height: 292
- Class: rviz/Selection
Name: Selection
- Class: rviz/Views
Expanded:
- /Current View1
Name: Views
Splitter Ratio: 0.5
- Class: rviz/Tool Properties
Expanded:
- /2D Pose Estimate1
Name: Tool Properties
Splitter Ratio: 0.5
- Class: rviz/Views
Expanded:
- /Current View1
Name: Views
Splitter Ratio: 0.5
Preferences:
PromptSaveOnExit: true
Toolbars:
toolButtonStyle: 2
Visualization Manager:
Class: ""
Displays:
- Class: rviz/Group
Displays:
- Class: rviz/TF
Enabled: false
Frame Timeout: 999
Frames:
All Enabled: false
Marker Scale: 3
Name: TF
Show Arrows: true
Show Axes: true
Show Names: true
Tree:
{}
Update Interval: 0
Value: false
- Class: rviz/Axes
Enabled: true
Length: 2
Name: map
Radius: 0.5
Reference Frame: map
Value: true
- Class: rviz/Axes
Enabled: true
Length: 1
Name: base_link
Radius: 0.30000001192092896
Reference Frame: base_link
Value: true
Enabled: true
Name: links
- Class: rviz/Group
Displays:
- Angle Tolerance: 0.10000000149011612
Class: rviz/Odometry
Covariance:
Orientation:
Alpha: 0.5
Color: 255; 255; 127
Color Style: Unique
Frame: Local
Offset: 1
Scale: 1
Value: true
Position:
Alpha: 0.30000001192092896
Color: 204; 51; 204
Scale: 1
Value: true
Value: false
Enabled: false
Keep: 300
Name: Odo IMU
Position Tolerance: 0.10000000149011612
Shape:
Alpha: 1
Axes Length: 0.25
Axes Radius: 0.10000000149011612
Color: 255; 25; 0
Head Length: 0.30000001192092896
Head Radius: 0.10000000149011612
Shaft Length: 1
Shaft Radius: 0.05000000074505806
Value: Axes
Topic: /odometry/imu
Unreliable: false
Value: false
- Angle Tolerance: 0.10000000149011612
Class: rviz/Odometry
Covariance:
Orientation:
Alpha: 0.5
Color: 255; 255; 127
Color Style: Unique
Frame: Local
Offset: 1
Scale: 1
Value: true
Position:
Alpha: 0.30000001192092896
Color: 204; 51; 204
Scale: 1
Value: true
Value: false
Enabled: true
Keep: 300
Name: Odom GPS
Position Tolerance: 0.10000000149011612
Shape:
Alpha: 1
Axes Length: 1
Axes Radius: 0.30000001192092896
Color: 255; 25; 0
Head Length: 0.30000001192092896
Head Radius: 0.10000000149011612
Shaft Length: 1
Shaft Radius: 0.05000000074505806
Value: Axes
Topic: /odometry/gps
Unreliable: false
Value: true
- Angle Tolerance: 0.10000000149011612
Class: rviz/Odometry
Covariance:
Orientation:
Alpha: 0.5
Color: 255; 255; 127
Color Style: Unique
Frame: Local
Offset: 1
Scale: 1
Value: true
Position:
Alpha: 0.30000001192092896
Color: 204; 51; 204
Scale: 1
Value: true
Value: false
Enabled: false
Keep: 300
Name: Odom lidar
Position Tolerance: 0.10000000149011612
Shape:
Alpha: 1
Axes Length: 0.25
Axes Radius: 0.10000000149011612
Color: 255; 25; 0
Head Length: 0.30000001192092896
Head Radius: 0.10000000149011612
Shaft Length: 1
Shaft Radius: 0.05000000074505806
Value: Axes
Topic: /lio_sam/mapping/odometry
Unreliable: false
Value: false
Enabled: false
Name: Odometry
- Class: rviz/Group
Displays:
- Alpha: 1
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 18.805150985717773
Min Value: -3.3611395359039307
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 255; 255; 255
Color Transformer: FlatColor
Decay Time: 0
Enabled: true
Invert Rainbow: false
Max Color: 255; 255; 255
Max Intensity: 255
Min Color: 0; 0; 0
Min Intensity: 0
Name: Velodyne
Position Transformer: XYZ
Queue Size: 10
Selectable: false
Size (Pixels): 2
Size (m): 0.009999999776482582
Style: Points
Topic: /lio_sam/deskew/cloud_deskewed
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
Value: true
- Alpha: 1
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 10
Min Value: -10
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 255; 255; 255
Color Transformer: Intensity
Decay Time: 0
Enabled: false
Invert Rainbow: false
Max Color: 255; 255; 255
Max Intensity: 0.7900000214576721
Min Color: 0; 0; 0
Min Intensity: 0
Name: Reg Cloud
Position Transformer: XYZ
Queue Size: 10
Selectable: true
Size (Pixels): 3
Size (m): 0.009999999776482582
Style: Points
Topic: /lio_sam/mapping/cloud_registered
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
Value: false
Enabled: true
Name: Point Cloud
- Class: rviz/Group
Displays:
- Alpha: 1
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 10
Min Value: -10
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 0; 255; 0
Color Transformer: FlatColor
Decay Time: 0
Enabled: true
Invert Rainbow: false
Max Color: 255; 255; 255
Max Intensity: 15.077729225158691
Min Color: 0; 0; 0
Min Intensity: 0.019985778257250786
Name: Edge Feature
Position Transformer: XYZ
Queue Size: 10
Selectable: true
Size (Pixels): 5
Size (m): 0.009999999776482582
Style: Points
Topic: /lio_sam/feature/cloud_corner
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
Value: true
- Alpha: 1
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 10
Min Value: -10
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 255; 85; 255
Color Transformer: FlatColor
Decay Time: 0
Enabled: true
Invert Rainbow: false
Max Color: 255; 255; 255
Max Intensity: 15.100607872009277
Min Color: 0; 0; 0
Min Intensity: 0.0007188677554950118
Name: Plannar Feature
Position Transformer: XYZ
Queue Size: 10
Selectable: true
Size (Pixels): 3
Size (m): 0.009999999776482582
Style: Points
Topic: /lio_sam/feature/cloud_surface
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
Value: true
Enabled: false
Name: Feature Cloud
- Class: rviz/Group
Displays:
- Alpha: 0.30000001192092896
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 7.4858574867248535
Min Value: -1.2309398651123047
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 255; 255; 255
Color Transformer: Intensity
Decay Time: 500
Enabled: false
Invert Rainbow: false
Max Color: 255; 255; 255
Max Intensity: 101.66666412353516
Min Color: 0; 0; 0
Min Intensity: 1
Name: Map (cloud)
Position Transformer: XYZ
Queue Size: 10
Selectable: false
Size (Pixels): 2
Size (m): 0.009999999776482582
Style: Points
Topic: /lio_sam/mapping/cloud_registered
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
Value: false
- Alpha: 1
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 23.98253059387207
Min Value: -2.447427749633789
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 255; 255; 255
Color Transformer: AxisColor
Decay Time: 0
Enabled: false
Invert Rainbow: false
Max Color: 255; 255; 255
Max Intensity: 15.100604057312012
Min Color: 0; 0; 0
Min Intensity: 0.014545874670147896
Name: Map (local)
Position Transformer: XYZ
Queue Size: 10
Selectable: true
Size (Pixels): 2
Size (m): 0.10000000149011612
Style: Flat Squares
Topic: /lio_sam/mapping/map_local
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
Value: false
- Alpha: 1
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 10
Min Value: -10
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 255; 255; 255
Color Transformer: Intensity
Decay Time: 0
Enabled: false
Invert Rainbow: false
Max Color: 255; 255; 255
Max Intensity: 15.100096702575684
Min Color: 0; 0; 0
Min Intensity: 0.007923072203993797
Name: Map (global)
Position Transformer: XYZ
Queue Size: 10
Selectable: true
Size (Pixels): 2
Size (m): 0.009999999776482582
Style: Points
Topic: /lio_sam/mapping/map_global
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
Value: false
- Alpha: 1
Buffer Length: 1
Class: rviz/Path
Color: 85; 255; 255
Enabled: true
Head Diameter: 0.30000001192092896
Head Length: 0.20000000298023224
Length: 0.30000001192092896
Line Style: Billboards
Line Width: 0.10000000149011612
Name: Path (global)
Offset:
X: 0
Y: 0
Z: 0
Pose Color: 255; 85; 255
Pose Style: None
Radius: 0.029999999329447746
Shaft Diameter: 0.10000000149011612
Shaft Length: 0.10000000149011612
Topic: /lio_sam/mapping/path
Unreliable: false
Value: true
- Alpha: 1
Buffer Length: 1
Class: rviz/Path
Color: 255; 0; 127
Enabled: true
Head Diameter: 0.30000001192092896
Head Length: 0.20000000298023224
Length: 0.30000001192092896
Line Style: Billboards
Line Width: 0.10000000149011612
Name: Path (local)
Offset:
X: 0
Y: 0
Z: 0
Pose Color: 255; 85; 255
Pose Style: None
Radius: 0.029999999329447746
Shaft Diameter: 0.10000000149011612
Shaft Length: 0.10000000149011612
Topic: /lio_sam/imu/path
Unreliable: false
Value: true
Enabled: true
Name: Mapping
- Alpha: 0.5
Cell Size: 1
Class: rviz/Grid
Color: 160; 160; 164
Enabled: true
Line Style:
Line Width: 0.029999999329447746
Value: Lines
Name: Grid
Normal Cell Count: 0
Offset:
X: 0
Y: 0
Z: 0
Plane: XY
Plane Cell Count: 200
Reference Frame: <Fixed Frame>
Value: true
- Alpha: 1
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 10
Min Value: -10
Value: true
Axis: Z
Channel Name: x
Class: rviz/PointCloud2
Color: 255; 255; 255
Color Transformer: Intensity
Decay Time: 0
Enabled: true
Invert Rainbow: false
Max Color: 255; 255; 255
Max Intensity: 182.80648803710938
Min Color: 0; 0; 0
Min Intensity: -118.49858856201172
Name: PointCloud2
Position Transformer: XYZ
Queue Size: 10
Selectable: true
Size (Pixels): 3
Size (m): 0.20000000298023224
Style: Flat Squares
Topic: /lio_sam/mapping/cloud_map_map
Unreliable: false
Use Fixed Frame: true
Use rainbow: false
Value: true
- Alpha: 1
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 10
Min Value: -10
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 255; 255; 255
Color Transformer: Intensity
Decay Time: 0
Enabled: true
Invert Rainbow: false
Max Color: 255; 255; 255
Max Intensity: 120
Min Color: 0; 0; 0
Min Intensity: 0
Name: PointCloud2
Position Transformer: XYZ
Queue Size: 10
Selectable: true
Size (Pixels): 3
Size (m): 0.5
Style: Flat Squares
Topic: /lio_sam/mapping/lasercloud_in_world
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
Value: true
- Alpha: 1
Autocompute Intensity Bounds: true
Autocompute Value Bounds:
Max Value: 19.07742691040039
Min Value: -7.043781757354736
Value: true
Axis: Z
Channel Name: intensity
Class: rviz/PointCloud2
Color: 255; 255; 255
Color Transformer: AxisColor
Decay Time: 0
Enabled: true
Invert Rainbow: false
Max Color: 255; 255; 255
Max Intensity: -999999
Min Color: 0; 0; 0
Min Intensity: 999999
Name: PointCloud2
Position Transformer: XYZ
Queue Size: 10
Selectable: true
Size (Pixels): 3
Size (m): 0.30000001192092896
Style: Flat Squares
Topic: /lio_sam/mapping/laserclouinmapframe
Unreliable: false
Use Fixed Frame: true
Use rainbow: true
Value: true
Enabled: true
Global Options:
Background Color: 0; 0; 0
Default Light: true
Fixed Frame: map
Frame Rate: 30
Name: root
Tools:
- Class: rviz/Interact
Hide Inactive Objects: true
- Class: rviz/FocusCamera
- Class: rviz/Measure
- Class: rviz/SetInitialPose
Theta std deviation: 0.2617993950843811
Topic: initialpose
X std deviation: 0.5
Y std deviation: 0.5
Value: true
Views:
Current:
Class: rviz/Orbit
Distance: 133.46669006347656
Enable Stereo Rendering:
Stereo Eye Separation: 0.05999999865889549
Stereo Focal Distance: 1
Swap Stereo Eyes: false
Value: false
Focal Point:
X: 5.347261428833008
Y: 2.5245871543884277
Z: -9.126422882080078
Focal Shape Fixed Size: false
Focal Shape Size: 0.05000000074505806
Invert Z Axis: false
Name: Current View
Near Clip Distance: 0.009999999776482582
Pitch: 1.1797966957092285
Target Frame: base_link
Value: Orbit (rviz)
Yaw: 4.47352409362793
Saved: ~
Window Geometry:
Displays:
collapsed: false
Height: 713
Hide Left Dock: false
Hide Right Dock: true
QMainWindow State: 000000ff00000000fd0000000300000000000001a30000026ffc020000000afb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb0000000a0049006d006100670065000000002e000001340000000000000000fb000000100044006900730070006c006100790073010000003d000001a7000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730000000286000000a70000005c00fffffffb0000000a0056006900650077007301000001ea000000c2000000a400ffffff000000010000011100000435fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002e00000435000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b2000000000000000000000003000004b000000040fc0100000002fb0000000800540069006d00650000000000000004b00000000000000000fb0000000800540069006d006501000000000000045000000000000000000000036a0000026f00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000
Selection:
collapsed: false
Tool Properties:
collapsed: false
Views:
collapsed: false
Width: 1299
X: 67
Y: 27
================================================
FILE: launch/include/module_loam.launch
================================================
<launch>
<arg name="project" default="lio_sam"/>
<node pkg="$(arg project)" type="$(arg project)_imuPreintegration" name="$(arg project)_imuPreintegration" output="screen" respawn="true"/>
<node pkg="$(arg project)" type="$(arg project)_imageProjection" name="$(arg project)_imageProjection" output="screen" respawn="true"/>
<node pkg="$(arg project)" type="$(arg project)_featureExtraction" name="$(arg project)_featureExtraction" output="screen" respawn="true"/>
<node pkg="$(arg project)" type="$(arg project)_mapOptmization" name="$(arg project)_mapOptmization" output="screen" respawn="true"/>
</launch>
================================================
FILE: launch/include/module_navsat.launch
================================================
<launch>
<arg name="project" default="lio_sam"/>
<env name="ROSCONSOLE_CONFIG_FILE" value="$(find lio_sam)/launch/include/rosconsole/rosconsole_error.conf"/>
<!-- EKF GPS-->
<node pkg="robot_localization" type="ekf_localization_node" name="ekf_gps" respawn="true">
<remap from="odometry/filtered" to="odometry/navsat" />
</node>
<!-- Navsat -->
<node pkg="robot_localization" type="navsat_transform_node" name="navsat" respawn="true">
<!-- <rosparam param="datum">[42.35893211, -71.09345588, 0.0, world, base_link]</rosparam> -->
<remap from="imu/data" to="imu_correct" />
<remap from="gps/fix" to="gps/fix" />
<remap from="odometry/filtered" to="odometry/navsat" />
</node>
</launch>
================================================
FILE: launch/include/module_relocolize.launch
================================================
<launch>
<arg name="project" default="lio_sam"/>
<node pkg="$(arg project)" type="$(arg project)_imuPreintegration" name="$(arg project)_imuPreintegration" output="screen" respawn="true"/>
<node pkg="$(arg project)" type="$(arg project)_imageProjection" name="$(arg project)_imageProjection" output="screen" respawn="true"/>
<node pkg="$(arg project)" type="$(arg project)_featureExtraction" name="$(arg project)_featureExtraction" output="screen" respawn="true"/>
<node pkg="$(arg project)" type="$(arg project)_globalLocalize" name="$(arg project)_mapOptmization" output="screen" respawn="true"/>
</launch>
================================================
FILE: launch/include/module_robot_state_publisher.launch
================================================
<launch>
<arg name="project" default="lio_sam"/>
<param name="robot_description" command="$(find xacro)/xacro $(find lio_sam)/launch/include/config/robot.urdf.xacro --inorder" />
<node name="robot_state_publisher" pkg="robot_state_publisher" type="robot_state_publisher" respawn="true">
<!-- <param name="tf_prefix" value="$(env ROS_HOSTNAME)"/> -->
</node>
</launch>
================================================
FILE: launch/include/module_rviz.launch
================================================
<launch>
<arg name="project" default="lio_sam"/>
<!--- Run Rviz-->
<node pkg="rviz" type="rviz" name="$(arg project)_rviz" args="-d $(find lio_sam)/launch/include/config/rviz.rviz" />
</launch>
================================================
FILE: launch/include/rosconsole/rosconsole_error.conf
================================================
# Set the default ros output to warning and higher
log4j.logger.ros = ERROR
================================================
FILE: launch/include/rosconsole/rosconsole_info.conf
================================================
# Set the default ros output to warning and higher
log4j.logger.ros = INFO
================================================
FILE: launch/include/rosconsole/rosconsole_warn.conf
================================================
# Set the default ros output to warning and higher
log4j.logger.ros = WARN
================================================
FILE: launch/run.launch
================================================
<launch>
<arg name="project" default="lio_sam"/>
<!-- Parameters -->
<rosparam file="$(find lio_sam)/config/params.yaml" command="load" />
<!--- LOAM -->
<include file="$(find lio_sam)/launch/include/module_loam.launch" />
<!--- Robot State TF -->
<include file="$(find lio_sam)/launch/include/module_robot_state_publisher.launch" />
<!--- Run Navsat -->
<include file="$(find lio_sam)/launch/include/module_navsat.launch" />
<!--- Run Rviz-->
<include file="$(find lio_sam)/launch/include/module_rviz.launch" />
</launch>
================================================
FILE: launch/run_relocalize.launch
================================================
<launch>
<arg name="project" default="lio_sam"/>
<!-- Parameters -->
<rosparam file="$(find lio_sam)/config/params.yaml" command="load" />
<!--- LOAM -->
<include file="$(find lio_sam)/launch/include/module_relocolize.launch" />
<!--- Robot State TF -->
<include file="$(find lio_sam)/launch/include/module_robot_state_publisher.launch" />
<!--- Run Navsat -->
<include file="$(find lio_sam)/launch/include/module_navsat.launch" />
<!--- Run Rviz-->
<include file="$(find lio_sam)/launch/include/module_rviz.launch" />
</launch>
================================================
FILE: msg/cloud_info.msg
================================================
# Cloud Info
Header header
int32[] startRingIndex
int32[] endRingIndex
int32[] pointColInd # point column index in range image
float32[] pointRange # point range
int64 imuAvailable
int64 odomAvailable
# Attitude for LOAM initialization
float32 imuRollInit
float32 imuPitchInit
float32 imuYawInit
# Initial guess from imu pre-integration
float32 initialGuessX
float32 initialGuessY
float32 initialGuessZ
float32 initialGuessRoll
float32 initialGuessPitch
float32 initialGuessYaw
# Preintegration reset ID
int64 imuPreintegrationResetId
# Point cloud messages
sensor_msgs/PointCloud2 cloud_deskewed # original cloud deskewed
sensor_msgs/PointCloud2 cloud_corner # extracted corner feature
sensor_msgs/PointCloud2 cloud_surface # extracted surface feature
================================================
FILE: package.xml
================================================
<?xml version="1.0"?>
<package>
<name>lio_sam</name>
<version>1.0.0</version>
<description>Lidar Odometry</description>
<maintainer email="shant@mit.edu">Tixiao Shan</maintainer>
<license>TODO</license>
<author>Tixiao Shan</author>
<buildtool_depend>catkin</buildtool_depend>
<build_depend>roscpp</build_depend>
<run_depend>roscpp</run_depend>
<build_depend>rospy</build_depend>
<run_depend>rospy</run_depend>
<build_depend>tf</build_depend>
<run_depend>tf</run_depend>
<build_depend>cv_bridge</build_depend>
<run_depend>cv_bridge</run_depend>
<build_depend>pcl_conversions</build_depend>
<run_depend>pcl_conversions</run_depend>
<build_depend>std_msgs</build_depend>
<run_depend>std_msgs</run_depend>
<build_depend>sensor_msgs</build_depend>
<run_depend>sensor_msgs</run_depend>
<build_depend>geometry_msgs</build_depend>
<run_depend>geometry_msgs</run_depend>
<build_depend>nav_msgs</build_depend>
<run_depend>nav_msgs</run_depend>
<build_depend>message_generation</build_depend>
<run_depend>message_generation</run_depend>
<build_depend>message_runtime</build_depend>
<run_depend>message_runtime</run_depend>
<build_depend>GTSAM</build_depend>
<run_depend>GTSAM</run_depend>
</package>
================================================
FILE: src/featureExtraction.cpp
================================================
#include "utility.h"
#include "lio_sam/cloud_info.h"
struct smoothness_t{
float value;
size_t ind;
};
struct by_value{
bool operator()(smoothness_t const &left, smoothness_t const &right) {
return left.value < right.value;
}
};
class FeatureExtraction : public ParamServer
{
public:
ros::Subscriber subLaserCloudInfo;
ros::Publisher pubLaserCloudInfo;
ros::Publisher pubCornerPoints;
ros::Publisher pubSurfacePoints;
pcl::PointCloud<PointType>::Ptr extractedCloud;
pcl::PointCloud<PointType>::Ptr cornerCloud;
pcl::PointCloud<PointType>::Ptr surfaceCloud;
pcl::VoxelGrid<PointType> downSizeFilter;
lio_sam::cloud_info cloudInfo;
std_msgs::Header cloudHeader;
std::vector<smoothness_t> cloudSmoothness;
float *cloudCurvature;
int *cloudNeighborPicked;
int *cloudLabel;
FeatureExtraction()
{
subLaserCloudInfo = nh.subscribe<lio_sam::cloud_info>("lio_sam/deskew/cloud_info", 10, &FeatureExtraction::laserCloudInfoHandler, this, ros::TransportHints().tcpNoDelay());
pubLaserCloudInfo = nh.advertise<lio_sam::cloud_info> ("lio_sam/feature/cloud_info", 10);
pubCornerPoints = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/feature/cloud_corner", 1);
pubSurfacePoints = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/feature/cloud_surface", 1);
initializationValue();
}
void initializationValue()
{
cloudSmoothness.resize(N_SCAN*Horizon_SCAN);
downSizeFilter.setLeafSize(odometrySurfLeafSize, odometrySurfLeafSize, odometrySurfLeafSize);
extractedCloud.reset(new pcl::PointCloud<PointType>());
cornerCloud.reset(new pcl::PointCloud<PointType>());
surfaceCloud.reset(new pcl::PointCloud<PointType>());
cloudCurvature = new float[N_SCAN*Horizon_SCAN];
cloudNeighborPicked = new int[N_SCAN*Horizon_SCAN];
cloudLabel = new int[N_SCAN*Horizon_SCAN];
}
void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr& msgIn)
{
cloudInfo = *msgIn; // new cloud info
cloudHeader = msgIn->header; // new cloud header
pcl::fromROSMsg(msgIn->cloud_deskewed, *extractedCloud); // new cloud for extraction
calculateSmoothness();//gc: calculate curvature
markOccludedPoints();//gc: check unstable points
extractFeatures();//gc: extract features
publishFeatureCloud();
}
//gc:
void calculateSmoothness()
{
int cloudSize = extractedCloud->points.size();
for (int i = 5; i < cloudSize - 5; i++)
{
float diffRange = cloudInfo.pointRange[i-5] + cloudInfo.pointRange[i-4]
+ cloudInfo.pointRange[i-3] + cloudInfo.pointRange[i-2]
+ cloudInfo.pointRange[i-1] - cloudInfo.pointRange[i] * 10
+ cloudInfo.pointRange[i+1] + cloudInfo.pointRange[i+2]
+ cloudInfo.pointRange[i+3] + cloudInfo.pointRange[i+4]
+ cloudInfo.pointRange[i+5];
cloudCurvature[i] = diffRange*diffRange;//diffX * diffX + diffY * diffY + diffZ * diffZ;
cloudNeighborPicked[i] = 0;
cloudLabel[i] = 0;
// cloudSmoothness for sorting
cloudSmoothness[i].value = cloudCurvature[i];
cloudSmoothness[i].ind = i;
}
}
void markOccludedPoints()
{
int cloudSize = extractedCloud->points.size();
// mark occluded points and parallel beam points
for (int i = 5; i < cloudSize - 6; ++i)
{
// occluded points
float depth1 = cloudInfo.pointRange[i];
float depth2 = cloudInfo.pointRange[i+1];
int columnDiff = std::abs(int(cloudInfo.pointColInd[i+1] - cloudInfo.pointColInd[i]));
if (columnDiff < 10){
// 10 pixel diff in range image
if (depth1 - depth2 > 0.3){
cloudNeighborPicked[i - 5] = 1;
cloudNeighborPicked[i - 4] = 1;
cloudNeighborPicked[i - 3] = 1;
cloudNeighborPicked[i - 2] = 1;
cloudNeighborPicked[i - 1] = 1;
cloudNeighborPicked[i] = 1;
}else if (depth2 - depth1 > 0.3){
cloudNeighborPicked[i + 1] = 1;
cloudNeighborPicked[i + 2] = 1;
cloudNeighborPicked[i + 3] = 1;
cloudNeighborPicked[i + 4] = 1;
cloudNeighborPicked[i + 5] = 1;
cloudNeighborPicked[i + 6] = 1;
}
}
// parallel beam
//gc:points on local planar surfaces that are roughly parallel to laser beam
float diff1 = std::abs(float(cloudInfo.pointRange[i-1] - cloudInfo.pointRange[i]));
float diff2 = std::abs(float(cloudInfo.pointRange[i+1] - cloudInfo.pointRange[i]));
if (diff1 > 0.02 * cloudInfo.pointRange[i] && diff2 > 0.02 * cloudInfo.pointRange[i])
cloudNeighborPicked[i] = 1;
}
}
void extractFeatures()
{
cornerCloud->clear();
surfaceCloud->clear();
pcl::PointCloud<PointType>::Ptr surfaceCloudScan(new pcl::PointCloud<PointType>());
pcl::PointCloud<PointType>::Ptr surfaceCloudScanDS(new pcl::PointCloud<PointType>());
//gc: for every ring
for (int i = 0; i < N_SCAN; i++)
{
surfaceCloudScan->clear();
//gc: for every section
for (int j = 0; j < 6; j++)
{
int sp = (cloudInfo.startRingIndex[i] * (6 - j) + cloudInfo.endRingIndex[i] * j) / 6;
int ep = (cloudInfo.startRingIndex[i] * (5 - j) + cloudInfo.endRingIndex[i] * (j + 1)) / 6 - 1;
if (sp >= ep)
continue;
std::sort(cloudSmoothness.begin()+sp, cloudSmoothness.begin()+ep, by_value());
int largestPickedNum = 0;
for (int k = ep; k >= sp; k--)
{
int ind = cloudSmoothness[k].ind;
if (cloudNeighborPicked[ind] == 0 && cloudCurvature[ind] > edgeThreshold)
{
largestPickedNum++;
if (largestPickedNum <= 20){
cloudLabel[ind] = 1;
cornerCloud->push_back(extractedCloud->points[ind]);
} else {
break;
}
cloudNeighborPicked[ind] = 1;
for (int l = 1; l <= 5; l++)
{
int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l - 1]));
if (columnDiff > 10)
break;
cloudNeighborPicked[ind + l] = 1;
}
for (int l = -1; l >= -5; l--)
{
int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l + 1]));
if (columnDiff > 10)
break;
cloudNeighborPicked[ind + l] = 1;
}
}
}
for (int k = sp; k <= ep; k++)
{
int ind = cloudSmoothness[k].ind;
if (cloudNeighborPicked[ind] == 0 && cloudCurvature[ind] < surfThreshold)
{
cloudLabel[ind] = -1;
cloudNeighborPicked[ind] = 1;
for (int l = 1; l <= 5; l++) {
int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l - 1]));
if (columnDiff > 10)
break;
cloudNeighborPicked[ind + l] = 1;
}
for (int l = -1; l >= -5; l--) {
int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l + 1]));
if (columnDiff > 10)
break;
cloudNeighborPicked[ind + l] = 1;
}
}
}
for (int k = sp; k <= ep; k++)
{
if (cloudLabel[k] <= 0){
surfaceCloudScan->push_back(extractedCloud->points[k]);
}
}
}
surfaceCloudScanDS->clear();
downSizeFilter.setInputCloud(surfaceCloudScan);//gc: only surface points should be down sample
downSizeFilter.filter(*surfaceCloudScanDS);
*surfaceCloud += *surfaceCloudScanDS;
}
}
void freeCloudInfoMemory()
{
cloudInfo.startRingIndex.clear();
cloudInfo.endRingIndex.clear();
cloudInfo.pointColInd.clear();
cloudInfo.pointRange.clear();
}
void publishFeatureCloud()
{
// free cloud info memory
freeCloudInfoMemory();
// save newly extracted features
cloudInfo.cloud_corner = publishCloud(&pubCornerPoints, cornerCloud, cloudHeader.stamp, "base_link");
cloudInfo.cloud_surface = publishCloud(&pubSurfacePoints, surfaceCloud, cloudHeader.stamp, "base_link");
// publish to mapOptimization
pubLaserCloudInfo.publish(cloudInfo);
}
};
int main(int argc, char** argv)
{
ros::init(argc, argv, "lio_sam");
FeatureExtraction FE;
ROS_INFO("\033[1;32m----> Feature Extraction Started.\033[0m");
ros::spin();
return 0;
}
================================================
FILE: src/globalLocalize.cpp
================================================
//
// Created by gc2 on 20-7-20.
//
#include "utility.h"
#include "lio_sam/cloud_info.h"
#include <gtsam/geometry/Rot3.h>
#include <gtsam/geometry/Pose3.h>
#include <gtsam/slam/PriorFactor.h>
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/navigation/GPSFactor.h>
#include <gtsam/navigation/ImuFactor.h>
#include <gtsam/navigation/CombinedImuFactor.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/nonlinear/Marginals.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/inference/Symbol.h>
#include <geometry_msgs/PoseWithCovarianceStamped.h>
#include <gtsam/nonlinear/ISAM2.h>
using namespace gtsam;
using symbol_shorthand::X; // Pose3 (x,y,z,r,p,y)
using symbol_shorthand::V; // Vel (xdot,ydot,zdot)
using symbol_shorthand::B; // Bias (ax,ay,az,gx,gy,gz)
using symbol_shorthand::G; // GPS pose
/*
* A point cloud type that has 6D pose info ([x,y,z,roll,pitch,yaw] intensity is time stamp)
*/
struct PointXYZIRPYT
{
PCL_ADD_POINT4D
PCL_ADD_INTENSITY; // preferred way of adding a XYZ+padding
float roll;
float pitch;
float yaw;
double time;
EIGEN_MAKE_ALIGNED_OPERATOR_NEW // make sure our new allocators are aligned
} EIGEN_ALIGN16; // enforce SSE padding for correct memory alignment
POINT_CLOUD_REGISTER_POINT_STRUCT (PointXYZIRPYT,
(float, x, x) (float, y, y)
(float, z, z) (float, intensity, intensity)
(float, roll, roll) (float, pitch, pitch) (float, yaw, yaw)
(double, time, time))
typedef PointXYZIRPYT PointTypePose;
class mapOptimization : public ParamServer
{
public:
// gtsam
NonlinearFactorGraph gtSAMgraph;
Values initialEstimate;
Values optimizedEstimate;
ISAM2 *isam;
Values isamCurrentEstimate;
Eigen::MatrixXd poseCovariance;
ros::Publisher pubLaserCloudSurround;
ros::Publisher pubOdomAftMappedROS;
ros::Publisher pubKeyPoses;
ros::Publisher pubPath;
ros::Publisher pubHistoryKeyFrames;
ros::Publisher pubIcpKeyFrames;
ros::Publisher pubRecentKeyFrames;
ros::Publisher pubRecentKeyFrame;
ros::Publisher pubCloudRegisteredRaw;
ros::Subscriber subLaserCloudInfo;
ros::Subscriber subGPS;
std::deque<nav_msgs::Odometry> gpsQueue;
lio_sam::cloud_info cloudInfo;
vector<pcl::PointCloud<PointType>::Ptr> cornerCloudKeyFrames;
vector<pcl::PointCloud<PointType>::Ptr> surfCloudKeyFrames;
pcl::PointCloud<PointType>::Ptr cloudKeyPoses3D;//gc: can be used to illustrate the path of odometry // keep
pcl::PointCloud<PointTypePose>::Ptr cloudKeyPoses6D;//gc: can be used to illustrate the path of odometry //keep
//addded**********************************by gc
std::mutex mtxWin;
std::vector<PointType> win_cloudKeyPoses3D;
std::vector<PointTypePose> win_cloudKeyPoses6D;
std::vector<pcl::PointCloud<PointType>::Ptr> win_cornerCloudKeyFrames;
std::vector<pcl::PointCloud<PointType>::Ptr> win_surfCloudKeyFrames;
//added***********************************by gc
pcl::PointCloud<PointType>::Ptr laserCloudCornerLast; // corner feature set from odoOptimization
pcl::PointCloud<PointType>::Ptr laserCloudSurfLast; // surf feature set from odoOptimization
pcl::PointCloud<PointType>::Ptr laserCloudCornerLastDS; // downsampled corner featuer set from odoOptimization
pcl::PointCloud<PointType>::Ptr laserCloudSurfLastDS; // downsampled surf featuer set from odoOptimization
pcl::PointCloud<PointType>::Ptr laserCloudOri;
pcl::PointCloud<PointType>::Ptr coeffSel;
std::vector<PointType> laserCloudOriCornerVec; // corner point holder for parallel computation
std::vector<PointType> coeffSelCornerVec;
std::vector<bool> laserCloudOriCornerFlag;
std::vector<PointType> laserCloudOriSurfVec; // surf point holder for parallel computation
std::vector<PointType> coeffSelSurfVec;
std::vector<bool> laserCloudOriSurfFlag;
pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMap;
pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMap;
pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMapDS;
pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMapDS;
pcl::KdTreeFLANN<PointType>::Ptr kdtreeCornerFromMap;
pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurfFromMap;
//pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurroundingKeyPoses;
//pcl::KdTreeFLANN<PointType>::Ptr kdtreeHistoryKeyPoses;
pcl::PointCloud<PointType>::Ptr latestKeyFrameCloud;
pcl::PointCloud<PointType>::Ptr nearHistoryKeyFrameCloud;
pcl::VoxelGrid<PointType> downSizeFilterCorner;
pcl::VoxelGrid<PointType> downSizeFilterSurf;
pcl::VoxelGrid<PointType> downSizeFilterICP;
pcl::VoxelGrid<PointType> downSizeFilterSurroundingKeyPoses; // for surrounding key poses of scan-to-map optimization
ros::Time timeLaserInfoStamp;
double timeLaserCloudInfoLast;
float transformTobeMapped[6];
std::mutex mtx;
double timeLastProcessing = -1;
bool isDegenerate = false;
Eigen::Matrix<float, 6, 6> matP;
int winSize = 30;
int laserCloudCornerFromMapDSNum = 0;
int laserCloudSurfFromMapDSNum = 0;
int laserCloudCornerLastDSNum = 0;
int laserCloudSurfLastDSNum = 0;
bool aLoopIsClosed = false;
int imuPreintegrationResetId = 0;
nav_msgs::Path globalPath;
Eigen::Affine3f transPointAssociateToMap;
/*************added by gc*****************/
pcl::PointCloud<PointType>::Ptr cloudGlobalMap;
pcl::PointCloud<PointType>::Ptr cloudGlobalMapDS;
pcl::PointCloud<PointType>::Ptr cloudScanForInitialize;
ros::Subscriber subIniPoseFromRviz;
ros::Publisher pubLaserCloudInWorld;
ros::Publisher pubMapWorld;
//ros::Publisher fortest_publasercloudINWorld;
float transformInTheWorld[6];// the pose in the world, i.e. the prebuilt map
float tranformOdomToWorld[6];
int globalLocaSkipFrames = 3;
int frameNum = 1;
tf::TransformBroadcaster tfOdom2Map;
std::mutex mtxtranformOdomToWorld;
std::mutex mtx_general;
bool globalLocalizeInitialiized = false;
ros::Subscriber subImu;
enum InitializedFlag
{
NonInitialized,
Initializing,
Initialized
};
InitializedFlag initializedFlag;
geometry_msgs::PoseStamped poseOdomToMap;
ros::Publisher pubOdomToMapPose;
/*************added by gc******************/
mapOptimization()
{
//std::cout << "come in" << std::endl;
ISAM2Params parameters;
parameters.relinearizeThreshold = 0.1;
parameters.relinearizeSkip = 1;
isam = new ISAM2(parameters);
pubKeyPoses = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/trajectory", 1);
pubLaserCloudSurround = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/map_global", 1);
pubOdomAftMappedROS = nh.advertise<nav_msgs::Odometry> ("lio_sam/mapping/odometry", 1);
pubPath = nh.advertise<nav_msgs::Path>("lio_sam/mapping/path", 1);
subLaserCloudInfo = nh.subscribe<lio_sam::cloud_info>("lio_sam/feature/cloud_info", 10, &mapOptimization::laserCloudInfoHandler, this, ros::TransportHints().tcpNoDelay());
subGPS = nh.subscribe<nav_msgs::Odometry> (gpsTopic, 200, &mapOptimization::gpsHandler, this, ros::TransportHints().tcpNoDelay());
//std::cout << "come in2" << std::endl;
//added ******************by gc
subIniPoseFromRviz = nh.subscribe("/initialpose", 8, &mapOptimization::initialpose_callback, this);
pubMapWorld = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/cloud_map_map",1);//
//fortest_publasercloudINWorld = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/laserclouinmapframe",1);
pubLaserCloudInWorld = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/lasercloud_in_world", 1);//added
pubOdomToMapPose = nh.advertise<geometry_msgs::PoseStamped>("lio_sam/mapping/pose_odomTo_map", 1);
//subImu = nh.subscribe<sensor_msgs::Imu> (imuTopic, 200, &mapOptimization::imuHandler, this, ros::TransportHints().tcpNoDelay());
//added ******************by gc
pubHistoryKeyFrames = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/icp_loop_closure_history_cloud", 1);
pubIcpKeyFrames = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/icp_loop_closure_corrected_cloud", 1);
pubRecentKeyFrames = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/map_local", 1);
pubRecentKeyFrame = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/cloud_registered", 1);
pubCloudRegisteredRaw = nh.advertise<sensor_msgs::PointCloud2>("lio_sam/mapping/cloud_registered_raw", 1);
downSizeFilterCorner.setLeafSize(mappingCornerLeafSize, mappingCornerLeafSize, mappingCornerLeafSize);
downSizeFilterSurf.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);
downSizeFilterICP.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);
downSizeFilterSurroundingKeyPoses.setLeafSize(surroundingKeyframeDensity, surroundingKeyframeDensity, surroundingKeyframeDensity); // for surrounding key poses of scan-to-map optimization
//std::cout << "come in3" << std::endl;
allocateMemory();
}
void allocateMemory()
{
cloudGlobalMap.reset(new pcl::PointCloud<PointType>());//addded by gc
cloudGlobalMapDS.reset(new pcl::PointCloud<PointType>());//added
cloudScanForInitialize.reset(new pcl::PointCloud<PointType>());
resetLIO();
//added by gc
for (int i = 0; i < 6; ++i){
transformInTheWorld[i] = 0;
}
for (int i = 0; i < 6; ++i){
tranformOdomToWorld[i] = 0;
}
initializedFlag = NonInitialized;
cloudGlobalLoad();//added by gc
//added by gc
}
void resetLIO()
{
cloudKeyPoses3D.reset(new pcl::PointCloud<PointType>());
cloudKeyPoses6D.reset(new pcl::PointCloud<PointTypePose>());
//kdtreeSurroundingKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());
//kdtreeHistoryKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());
laserCloudCornerLast.reset(new pcl::PointCloud<PointType>()); // corner feature set from odoOptimization
laserCloudSurfLast.reset(new pcl::PointCloud<PointType>()); // surf feature set from odoOptimization
laserCloudCornerLastDS.reset(new pcl::PointCloud<PointType>()); // downsampled corner featuer set from odoOptimization
laserCloudSurfLastDS.reset(new pcl::PointCloud<PointType>()); // downsampled surf featuer set from odoOptimization
laserCloudOri.reset(new pcl::PointCloud<PointType>());
coeffSel.reset(new pcl::PointCloud<PointType>());
laserCloudOriCornerVec.resize(N_SCAN * Horizon_SCAN);
coeffSelCornerVec.resize(N_SCAN * Horizon_SCAN);
laserCloudOriCornerFlag.resize(N_SCAN * Horizon_SCAN);
laserCloudOriSurfVec.resize(N_SCAN * Horizon_SCAN);
coeffSelSurfVec.resize(N_SCAN * Horizon_SCAN);
laserCloudOriSurfFlag.resize(N_SCAN * Horizon_SCAN);
std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false);
std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);
laserCloudCornerFromMap.reset(new pcl::PointCloud<PointType>());
laserCloudSurfFromMap.reset(new pcl::PointCloud<PointType>());
laserCloudCornerFromMapDS.reset(new pcl::PointCloud<PointType>());
laserCloudSurfFromMapDS.reset(new pcl::PointCloud<PointType>());
kdtreeCornerFromMap.reset(new pcl::KdTreeFLANN<PointType>());
kdtreeSurfFromMap.reset(new pcl::KdTreeFLANN<PointType>());
latestKeyFrameCloud.reset(new pcl::PointCloud<PointType>());
nearHistoryKeyFrameCloud.reset(new pcl::PointCloud<PointType>());
for (int i = 0; i < 6; ++i){
transformTobeMapped[i] = 0;
}
matP.setZero();
}
void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr& msgIn)
{
// extract time stamp
//added
timeLaserInfoStamp = msgIn->header.stamp;
timeLaserCloudInfoLast = msgIn->header.stamp.toSec();
// extract info and feature cloud
cloudInfo = *msgIn;
pcl::fromROSMsg(msgIn->cloud_corner, *laserCloudCornerLast);
pcl::fromROSMsg(msgIn->cloud_surface, *laserCloudSurfLast);
/************************************added by gc*****************************/
//if the sysytem is not initialized ffer the first scan for the system to initialize
//the LIO system stsrt working only when the localization initializing is finished
if(initializedFlag == NonInitialized || initializedFlag == Initializing)
{
if(cloudScanForInitialize->points.size() == 0)
{
downsampleCurrentScan();
mtx_general.lock();
*cloudScanForInitialize += *laserCloudCornerLastDS;
*cloudScanForInitialize += *laserCloudSurfLastDS;
mtx_general.unlock();
laserCloudCornerLastDS->clear();
laserCloudSurfLastDS->clear();
laserCloudCornerLastDSNum = 0;
laserCloudSurfLastDSNum = 0;
transformTobeMapped[0] = cloudInfo.imuRollInit;
transformTobeMapped[1] = cloudInfo.imuPitchInit;
transformTobeMapped[2] = cloudInfo.imuYawInit;
if (!useImuHeadingInitialization)//gc: if not use the heading of init_IMU as Initialization
transformTobeMapped[2] = 0;
}
return;
}
frameNum++;
/************************************added by gc*****************************/
std::lock_guard<std::mutex> lock(mtx);
if (timeLaserCloudInfoLast - timeLastProcessing >= mappingProcessInterval) {//gc:control the rate of mapping process
timeLastProcessing = timeLaserCloudInfoLast;
updateInitialGuess();//gc: update initial value for states
extractSurroundingKeyFrames();//gc:
downsampleCurrentScan();//gc:down sample the current corner points and surface points
scan2MapOptimization();//gc: calculate the tranformtion using lidar measurement with the Imu preintegration as initial values
//and then interpolate roll and pitch angle using IMU measurement and above measurement
saveKeyFramesAndFactor();//gc: save corner cloud and surface cloud of this scan, and add odom and GPS factors
//correctPoses();
publishOdometry();
publishFrames();
}
}
void gpsHandler(const nav_msgs::Odometry::ConstPtr& gpsMsg)
{
gpsQueue.push_back(*gpsMsg);
}
void pointAssociateToMap(PointType const * const pi, PointType * const po)
{
po->x = transPointAssociateToMap(0,0) * pi->x + transPointAssociateToMap(0,1) * pi->y + transPointAssociateToMap(0,2) * pi->z + transPointAssociateToMap(0,3);
po->y = transPointAssociateToMap(1,0) * pi->x + transPointAssociateToMap(1,1) * pi->y + transPointAssociateToMap(1,2) * pi->z + transPointAssociateToMap(1,3);
po->z = transPointAssociateToMap(2,0) * pi->x + transPointAssociateToMap(2,1) * pi->y + transPointAssociateToMap(2,2) * pi->z + transPointAssociateToMap(2,3);
po->intensity = pi->intensity;
}
pcl::PointCloud<PointType>::Ptr transformPointCloud(pcl::PointCloud<PointType>::Ptr cloudIn, PointTypePose* transformIn)
{
pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
PointType *pointFrom;
int cloudSize = cloudIn->size();
cloudOut->resize(cloudSize);
Eigen::Affine3f transCur = pcl::getTransformation(transformIn->x, transformIn->y, transformIn->z, transformIn->roll, transformIn->pitch, transformIn->yaw);
for (int i = 0; i < cloudSize; ++i){
pointFrom = &cloudIn->points[i];
cloudOut->points[i].x = transCur(0,0) * pointFrom->x + transCur(0,1) * pointFrom->y + transCur(0,2) * pointFrom->z + transCur(0,3);
cloudOut->points[i].y = transCur(1,0) * pointFrom->x + transCur(1,1) * pointFrom->y + transCur(1,2) * pointFrom->z + transCur(1,3);
cloudOut->points[i].z = transCur(2,0) * pointFrom->x + transCur(2,1) * pointFrom->y + transCur(2,2) * pointFrom->z + transCur(2,3);
cloudOut->points[i].intensity = pointFrom->intensity;
}
return cloudOut;
}
gtsam::Pose3 pclPointTogtsamPose3(PointTypePose thisPoint)
{
return gtsam::Pose3(gtsam::Rot3::RzRyRx(double(thisPoint.roll), double(thisPoint.pitch), double(thisPoint.yaw)),
gtsam::Point3(double(thisPoint.x), double(thisPoint.y), double(thisPoint.z)));
}
gtsam::Pose3 trans2gtsamPose(float transformIn[])
{
return gtsam::Pose3(gtsam::Rot3::RzRyRx(transformIn[0], transformIn[1], transformIn[2]),
gtsam::Point3(transformIn[3], transformIn[4], transformIn[5]));
}
Eigen::Affine3f pclPointToAffine3f(PointTypePose thisPoint)
{
return pcl::getTransformation(thisPoint.x, thisPoint.y, thisPoint.z, thisPoint.roll, thisPoint.pitch, thisPoint.yaw);
}
Eigen::Affine3f trans2Affine3f(float transformIn[])
{
return pcl::getTransformation(transformIn[3], transformIn[4], transformIn[5], transformIn[0], transformIn[1], transformIn[2]);
}
PointTypePose trans2PointTypePose(float transformIn[])
{
PointTypePose thisPose6D;
thisPose6D.x = transformIn[3];
thisPose6D.y = transformIn[4];
thisPose6D.z = transformIn[5];
thisPose6D.roll = transformIn[0];
thisPose6D.pitch = transformIn[1];
thisPose6D.yaw = transformIn[2];
return thisPose6D;
}
void updateInitialGuess()
{
static Eigen::Affine3f lastImuTransformation;//gc: note that this is static type
// initialization
if (cloudKeyPoses3D->points.empty())//gc: there is no key pose 初始化
{
transformTobeMapped[0] = cloudInfo.imuRollInit;
transformTobeMapped[1] = cloudInfo.imuPitchInit;
transformTobeMapped[2] = cloudInfo.imuYawInit;
if (!useImuHeadingInitialization)//gc: if not use the heading of init_IMU as Initialization
transformTobeMapped[2] = 0;
lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;
return;
}
// use imu pre-integration estimation for pose guess
if (cloudInfo.odomAvailable == true && cloudInfo.imuPreintegrationResetId == imuPreintegrationResetId)
{
transformTobeMapped[0] = cloudInfo.initialGuessRoll;
transformTobeMapped[1] = cloudInfo.initialGuessPitch;
transformTobeMapped[2] = cloudInfo.initialGuessYaw;
transformTobeMapped[3] = cloudInfo.initialGuessX;
transformTobeMapped[4] = cloudInfo.initialGuessY;
transformTobeMapped[5] = cloudInfo.initialGuessZ;
lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;
return;
}
// }
// use imu incremental estimation for pose guess (only rotation)
if (cloudInfo.imuAvailable == true)
{
Eigen::Affine3f transBack = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit);
Eigen::Affine3f transIncre = lastImuTransformation.inverse() * transBack;//gc: the transform of IMU between two scans
Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped);
Eigen::Affine3f transFinal = transTobe * transIncre;
pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5],
transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);
lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;
return;
}
}
void extractForLoopClosure()
{
//change-1
/**************gc added**************/
//{
//in this place the maximum of numPoses is winSize
pcl::PointCloud<PointType>::Ptr cloudToExtract(new pcl::PointCloud<PointType>());
int numPoses = win_cloudKeyPoses3D.size();
for (int i = numPoses-1; i >=0; --i)
{
cloudToExtract->push_back(win_cloudKeyPoses3D[i]);
}
extractCloud(cloudToExtract);
// }
/**************gc added**************/
// {
// pcl::PointCloud<PointType>::Ptr cloudToExtract(new pcl::PointCloud<PointType>());
// int numPoses = cloudKeyPoses3D->size();
// for (int i = numPoses-1; i >= 0; --i)
// {
// if ((int)cloudToExtract->size() <= surroundingKeyframeSize)
// cloudToExtract->push_back(cloudKeyPoses3D->points[i]);
// else
// break;
// }
//
// extractCloud(cloudToExtract);
// }
}
//gc:search nearby key poses and downsample them and then extract the local map points
// void extractNearby()
// {
//
// }
//gc: extract the nearby Map points
void extractCloud(pcl::PointCloud<PointType>::Ptr cloudToExtract)
{
//change-2
/**************gc added**************/
//std::cout << "cloudToExtract size: " << cloudToExtract->size() << std::endl;
std::vector<pcl::PointCloud<PointType>> laserCloudCornerSurroundingVec;
std::vector<pcl::PointCloud<PointType>> laserCloudSurfSurroundingVec;
laserCloudCornerSurroundingVec.resize(cloudToExtract->size());
laserCloudSurfSurroundingVec.resize(cloudToExtract->size());
// extract surrounding map
#pragma omp parallel for num_threads(numberOfCores)
for (int i = 0; i < (int)cloudToExtract->size(); ++i)
{
PointTypePose thisPose6D;
thisPose6D = win_cloudKeyPoses6D[i];
laserCloudCornerSurroundingVec[i] = *transformPointCloud(win_cornerCloudKeyFrames[i], &thisPose6D);
laserCloudSurfSurroundingVec[i] = *transformPointCloud(win_surfCloudKeyFrames[i], &thisPose6D);
}
// fuse the map
laserCloudCornerFromMap->clear();
laserCloudSurfFromMap->clear();
for (int i = 0; i < (int)cloudToExtract->size(); ++i)
{
*laserCloudCornerFromMap += laserCloudCornerSurroundingVec[i];
*laserCloudSurfFromMap += laserCloudSurfSurroundingVec[i];
}
// Downsample the surrounding corner key frames (or map)
downSizeFilterCorner.setInputCloud(laserCloudCornerFromMap);
downSizeFilterCorner.filter(*laserCloudCornerFromMapDS);
laserCloudCornerFromMapDSNum = laserCloudCornerFromMapDS->size();
//std::cout << "the size of laserCloudCornerFromMapDS: " << laserCloudCornerFromMapDSNum << std::endl;
// Downsample the surrounding surf key frames (or map)
downSizeFilterSurf.setInputCloud(laserCloudSurfFromMap);
downSizeFilterSurf.filter(*laserCloudSurfFromMapDS);
laserCloudSurfFromMapDSNum = laserCloudSurfFromMapDS->size();
/**************gc added**************/
// {
// std::vector<pcl::PointCloud<PointType>> laserCloudCornerSurroundingVec;
// std::vector<pcl::PointCloud<PointType>> laserCloudSurfSurroundingVec;
//
// laserCloudCornerSurroundingVec.resize(cloudToExtract->size());
// laserCloudSurfSurroundingVec.resize(cloudToExtract->size());
//
// // extract surrounding map
//#pragma omp parallel for num_threads(numberOfCores)
// for (int i = 0; i < (int)cloudToExtract->size(); ++i)
// {
// if (pointDistance(cloudToExtract->points[i], cloudKeyPoses3D->back()) > surroundingKeyframeSearchRadius)
// continue;
// int thisKeyInd = (int)cloudToExtract->points[i].intensity;//gc: the index of this key frame
// //gc: tranform the corner points and surfpoints of the nearby keyFrames into the world frame
// laserCloudCornerSurroundingVec[i] = *transformPointCloud(cornerCloudKeyFrames[thisKeyInd], &cloudKeyPoses6D->points[thisKeyInd]);
// laserCloudSurfSurroundingVec[i] = *transformPointCloud(surfCloudKeyFrames[thisKeyInd], &cloudKeyPoses6D->points[thisKeyInd]);
// }
//
// // fuse the map
// laserCloudCornerFromMap->clear();
// laserCloudSurfFromMap->clear();
// for (int i = 0; i < (int)cloudToExtract->size(); ++i)
// {
// *laserCloudCornerFromMap += laserCloudCornerSurroundingVec[i];
// *laserCloudSurfFromMap += laserCloudSurfSurroundingVec[i];
// }
//
// // Downsample the surrounding corner key frames (or map)
// downSizeFilterCorner.setInputCloud(laserCloudCornerFromMap);
// downSizeFilterCorner.filter(*laserCloudCornerFromMapDS);
// laserCloudCornerFromMapDSNum = laserCloudCornerFromMapDS->size();
// // Downsample the surrounding surf key frames (or map)
// downSizeFilterSurf.setInputCloud(laserCloudSurfFromMap);
// downSizeFilterSurf.filter(*laserCloudSurfFromMapDS);
// laserCloudSurfFromMapDSNum = laserCloudSurfFromMapDS->size();
// }
}
void extractSurroundingKeyFrames()
{
if (cloudKeyPoses3D->points.empty() == true)
return;
//if (loopClosureEnableFlag == true)//gc:TODO: a little weired: Loop closure should search the whole map while
//{
//std::cout << "the size of cloudKeyPoses3D: " << cloudKeyPoses3D->points.size() << std::endl;
extractForLoopClosure(); //gc: the name is misleading
//} else {
// extractNearby();
//}
}
void downsampleCurrentScan()
{
// Downsample cloud from current scan
laserCloudCornerLastDS->clear();
downSizeFilterCorner.setInputCloud(laserCloudCornerLast);
downSizeFilterCorner.filter(*laserCloudCornerLastDS);
laserCloudCornerLastDSNum = laserCloudCornerLastDS->size();
laserCloudSurfLastDS->clear();
downSizeFilterSurf.setInputCloud(laserCloudSurfLast);
downSizeFilterSurf.filter(*laserCloudSurfLastDS);
laserCloudSurfLastDSNum = laserCloudSurfLastDS->size();
}
void updatePointAssociateToMap()
{
transPointAssociateToMap = trans2Affine3f(transformTobeMapped);
}
void cornerOptimization()
{
updatePointAssociateToMap();
#pragma omp parallel for num_threads(numberOfCores)
//gc: for every corner point
for (int i = 0; i < laserCloudCornerLastDSNum; i++)
{
PointType pointOri, pointSel, coeff;
std::vector<int> pointSearchInd;
std::vector<float> pointSearchSqDis;
pointOri = laserCloudCornerLastDS->points[i];
//gc: calculate its location in the map using the prediction pose
pointAssociateToMap(&pointOri, &pointSel);
kdtreeCornerFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);
cv::Mat matA1(3, 3, CV_32F, cv::Scalar::all(0));
cv::Mat matD1(1, 3, CV_32F, cv::Scalar::all(0));
cv::Mat matV1(3, 3, CV_32F, cv::Scalar::all(0));
if (pointSearchSqDis[4] < 1.0) {
float cx = 0, cy = 0, cz = 0;
for (int j = 0; j < 5; j++) {
cx += laserCloudCornerFromMapDS->points[pointSearchInd[j]].x;
cy += laserCloudCornerFromMapDS->points[pointSearchInd[j]].y;
cz += laserCloudCornerFromMapDS->points[pointSearchInd[j]].z;
}
//gc: the average coordinate of the most nearest points
cx /= 5; cy /= 5; cz /= 5;
float a11 = 0, a12 = 0, a13 = 0, a22 = 0, a23 = 0, a33 = 0;
for (int j = 0; j < 5; j++) {
float ax = laserCloudCornerFromMapDS->points[pointSearchInd[j]].x - cx;
float ay = laserCloudCornerFromMapDS->points[pointSearchInd[j]].y - cy;
float az = laserCloudCornerFromMapDS->points[pointSearchInd[j]].z - cz;
a11 += ax * ax; a12 += ax * ay; a13 += ax * az;
a22 += ay * ay; a23 += ay * az;
a33 += az * az;
}
a11 /= 5; a12 /= 5; a13 /= 5; a22 /= 5; a23 /= 5; a33 /= 5;
matA1.at<float>(0, 0) = a11; matA1.at<float>(0, 1) = a12; matA1.at<float>(0, 2) = a13;
matA1.at<float>(1, 0) = a12; matA1.at<float>(1, 1) = a22; matA1.at<float>(1, 2) = a23;
matA1.at<float>(2, 0) = a13; matA1.at<float>(2, 1) = a23; matA1.at<float>(2, 2) = a33;
cv::eigen(matA1, matD1, matV1);
if (matD1.at<float>(0, 0) > 3 * matD1.at<float>(0, 1)) {
float x0 = pointSel.x;
float y0 = pointSel.y;
float z0 = pointSel.z;
float x1 = cx + 0.1 * matV1.at<float>(0, 0);
float y1 = cy + 0.1 * matV1.at<float>(0, 1);
float z1 = cz + 0.1 * matV1.at<float>(0, 2);
float x2 = cx - 0.1 * matV1.at<float>(0, 0);
float y2 = cy - 0.1 * matV1.at<float>(0, 1);
float z2 = cz - 0.1 * matV1.at<float>(0, 2);
float a012 = sqrt(((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1)) * ((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1))
+ ((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1)) * ((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1))
+ ((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1)) * ((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1)));
float l12 = sqrt((x1 - x2)*(x1 - x2) + (y1 - y2)*(y1 - y2) + (z1 - z2)*(z1 - z2));
float la = ((y1 - y2)*((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1))
+ (z1 - z2)*((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1))) / a012 / l12;
float lb = -((x1 - x2)*((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1))
- (z1 - z2)*((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1))) / a012 / l12;
float lc = -((x1 - x2)*((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1))
+ (y1 - y2)*((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1))) / a012 / l12;
float ld2 = a012 / l12;
float s = 1 - 0.9 * fabs(ld2);
coeff.x = s * la;
coeff.y = s * lb;
coeff.z = s * lc;
coeff.intensity = s * ld2;
if (s > 0.1) {
laserCloudOriCornerVec[i] = pointOri;
coeffSelCornerVec[i] = coeff;
laserCloudOriCornerFlag[i] = true;
}
}
}
}
}
void surfOptimization()
{
updatePointAssociateToMap();
#pragma omp parallel for num_threads(numberOfCores)
for (int i = 0; i < laserCloudSurfLastDSNum; i++)
{
PointType pointOri, pointSel, coeff;
std::vector<int> pointSearchInd;
std::vector<float> pointSearchSqDis;
pointOri = laserCloudSurfLastDS->points[i];
pointAssociateToMap(&pointOri, &pointSel);
kdtreeSurfFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);
Eigen::Matrix<float, 5, 3> matA0;
Eigen::Matrix<float, 5, 1> matB0;
Eigen::Vector3f matX0;
matA0.setZero();
matB0.fill(-1);
matX0.setZero();
if (pointSearchSqDis[4] < 1.0) {
for (int j = 0; j < 5; j++) {
matA0(j, 0) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].x;
matA0(j, 1) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].y;
matA0(j, 2) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].z;
}
matX0 = matA0.colPivHouseholderQr().solve(matB0);
float pa = matX0(0, 0);
float pb = matX0(1, 0);
float pc = matX0(2, 0);
float pd = 1;
float ps = sqrt(pa * pa + pb * pb + pc * pc);
pa /= ps; pb /= ps; pc /= ps; pd /= ps;
bool planeValid = true;
for (int j = 0; j < 5; j++) {
if (fabs(pa * laserCloudSurfFromMapDS->points[pointSearchInd[j]].x +
pb * laserCloudSurfFromMapDS->points[pointSearchInd[j]].y +
pc * laserCloudSurfFromMapDS->points[pointSearchInd[j]].z + pd) > 0.2) {
planeValid = false;
break;
}
}
if (planeValid) {
float pd2 = pa * pointSel.x + pb * pointSel.y + pc * pointSel.z + pd;
float s = 1 - 0.9 * fabs(pd2) / sqrt(sqrt(pointSel.x * pointSel.x
+ pointSel.y * pointSel.y + pointSel.z * pointSel.z));
coeff.x = s * pa;
coeff.y = s * pb;
coeff.z = s * pc;
coeff.intensity = s * pd2;
if (s > 0.1) {
laserCloudOriSurfVec[i] = pointOri;
coeffSelSurfVec[i] = coeff;
laserCloudOriSurfFlag[i] = true;
}
}
}
}
}
void combineOptimizationCoeffs()
{
// combine corner coeffs
for (int i = 0; i < laserCloudCornerLastDSNum; ++i){
if (laserCloudOriCornerFlag[i] == true){
laserCloudOri->push_back(laserCloudOriCornerVec[i]);
coeffSel->push_back(coeffSelCornerVec[i]);
}
}
// combine surf coeffs
for (int i = 0; i < laserCloudSurfLastDSNum; ++i){
if (laserCloudOriSurfFlag[i] == true){
laserCloudOri->push_back(laserCloudOriSurfVec[i]);
coeffSel->push_back(coeffSelSurfVec[i]);
}
}
// reset flag for next iteration
std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false);
std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);
}
bool LMOptimization(int iterCount)
{
// This optimization is from the original loam_velodyne by Ji Zhang, need to cope with coordinate transformation
// lidar <- camera --- camera <- lidar
// x = z --- x = y
// y = x --- y = z
// z = y --- z = x
// roll = yaw --- roll = pitch
// pitch = roll --- pitch = yaw
// yaw = pitch --- yaw = roll
// lidar -> camera
float srx = sin(transformTobeMapped[1]);
float crx = cos(transformTobeMapped[1]);
float sry = sin(transformTobeMapped[2]);
float cry = cos(transformTobeMapped[2]);
float srz = sin(transformTobeMapped[0]);
float crz = cos(transformTobeMapped[0]);
int laserCloudSelNum = laserCloudOri->size();
if (laserCloudSelNum < 50) {
return false;
}
cv::Mat matA(laserCloudSelNum, 6, CV_32F, cv::Scalar::all(0));
cv::Mat matAt(6, laserCloudSelNum, CV_32F, cv::Scalar::all(0));
cv::Mat matAtA(6, 6, CV_32F, cv::Scalar::all(0));
cv::Mat matB(laserCloudSelNum, 1, CV_32F, cv::Scalar::all(0));
cv::Mat matAtB(6, 1, CV_32F, cv::Scalar::all(0));
cv::Mat matX(6, 1, CV_32F, cv::Scalar::all(0));
cv::Mat matP(6, 6, CV_32F, cv::Scalar::all(0));
PointType pointOri, coeff;
for (int i = 0; i < laserCloudSelNum; i++) {
// lidar -> camera
pointOri.x = laserCloudOri->points[i].y;
pointOri.y = laserCloudOri->points[i].z;
pointOri.z = laserCloudOri->points[i].x;
// lidar -> camera
coeff.x = coeffSel->points[i].y;
coeff.y = coeffSel->points[i].z;
coeff.z = coeffSel->points[i].x;
coeff.intensity = coeffSel->points[i].intensity;
// in camera
float arx = (crx*sry*srz*pointOri.x + crx*crz*sry*pointOri.y - srx*sry*pointOri.z) * coeff.x
+ (-srx*srz*pointOri.x - crz*srx*pointOri.y - crx*pointOri.z) * coeff.y
+ (crx*cry*srz*pointOri.x + crx*cry*crz*pointOri.y - cry*srx*pointOri.z) * coeff.z;
float ary = ((cry*srx*srz - crz*sry)*pointOri.x
+ (sry*srz + cry*crz*srx)*pointOri.y + crx*cry*pointOri.z) * coeff.x
+ ((-cry*crz - srx*sry*srz)*pointOri.x
+ (cry*srz - crz*srx*sry)*pointOri.y - crx*sry*pointOri.z) * coeff.z;
float arz = ((crz*srx*sry - cry*srz)*pointOri.x + (-cry*crz-srx*sry*srz)*pointOri.y)*coeff.x
+ (crx*crz*pointOri.x - crx*srz*pointOri.y) * coeff.y
+ ((sry*srz + cry*crz*srx)*pointOri.x + (crz*sry-cry*srx*srz)*pointOri.y)*coeff.z;
// lidar -> camera
matA.at<float>(i, 0) = arz;
matA.at<float>(i, 1) = arx;
matA.at<float>(i, 2) = ary;
matA.at<float>(i, 3) = coeff.z;
matA.at<float>(i, 4) = coeff.x;
matA.at<float>(i, 5) = coeff.y;
matB.at<float>(i, 0) = -coeff.intensity;
}
cv::transpose(matA, matAt);
matAtA = matAt * matA;
matAtB = matAt * matB;
cv::solve(matAtA, matAtB, matX, cv::DECOMP_QR);
if (iterCount == 0) {
cv::Mat matE(1, 6, CV_32F, cv::Scalar::all(0));
cv::Mat matV(6, 6, CV_32F, cv::Scalar::all(0));
cv::Mat matV2(6, 6, CV_32F, cv::Scalar::all(0));
cv::eigen(matAtA, matE, matV);
matV.copyTo(matV2);
isDegenerate = false;
float eignThre[6] = {100, 100, 100, 100, 100, 100};
for (int i = 5; i >= 0; i--) {
if (matE.at<float>(0, i) < eignThre[i]) {
for (int j = 0; j < 6; j++) {
matV2.at<float>(i, j) = 0;
}
isDegenerate = true;
} else {
break;
}
}
matP = matV.inv() * matV2;
}
if (isDegenerate) {
cv::Mat matX2(6, 1, CV_32F, cv::Scalar::all(0));
matX.copyTo(matX2);
matX = matP * matX2;
}
transformTobeMapped[0] += matX.at<float>(0, 0);
transformTobeMapped[1] += matX.at<float>(1, 0);
transformTobeMapped[2] += matX.at<float>(2, 0);
transformTobeMapped[3] += matX.at<float>(3, 0);
transformTobeMapped[4] += matX.at<float>(4, 0);
transformTobeMapped[5] += matX.at<float>(5, 0);
float deltaR = sqrt(
pow(pcl::rad2deg(matX.at<float>(0, 0)), 2) +
pow(pcl::rad2deg(matX.at<float>(1, 0)), 2) +
pow(pcl::rad2deg(matX.at<float>(2, 0)), 2));
float deltaT = sqrt(
pow(matX.at<float>(3, 0) * 100, 2) +
pow(matX.at<float>(4, 0) * 100, 2) +
pow(matX.at<float>(5, 0) * 100, 2));
if (deltaR < 0.05 && deltaT < 0.05) {
return true; // converged
}
return false; // keep optimizing
}
void scan2MapOptimization()
{
if (cloudKeyPoses3D->points.empty())
return;
if (laserCloudCornerLastDSNum > edgeFeatureMinValidNum && laserCloudSurfLastDSNum > surfFeatureMinValidNum)
{
//std::cout << "kdtree input 0.01: " << std::endl;
kdtreeCornerFromMap->setInputCloud(laserCloudCornerFromMapDS);
kdtreeSurfFromMap->setInputCloud(laserCloudSurfFromMapDS);
//std::cout << "kdtree input 0.02: " << std::endl;
for (int iterCount = 0; iterCount < 30; iterCount++)
{
laserCloudOri->clear();
coeffSel->clear();
//gc: calculate some coeff and judge whether tho point is valid corner point
cornerOptimization();
//gc: calculate some coeff and judge whether tho point is valid surface point
surfOptimization();
combineOptimizationCoeffs();
//gc: the true iteration steps, calculate the transform
if (LMOptimization(iterCount) == true)
break;
}
//gc: interpolate the roll and pitch angle using the IMU measurement and Lidar calculation
transformUpdate();
} else {
ROS_WARN("Not enough features! Only %d edge and %d planar features available.", laserCloudCornerLastDSNum, laserCloudSurfLastDSNum);
}
}
//gc: interpolate the roll and pitch angle using the IMU measurement and Lidar calculation
void transformUpdate()
{
if (cloudInfo.imuAvailable == true)
{
if (std::abs(cloudInfo.imuPitchInit) < 1.4)
{
double imuWeight = 0.01;
tf::Quaternion imuQuaternion;
tf::Quaternion transformQuaternion;
double rollMid, pitchMid, yawMid;
// slerp roll
transformQuaternion.setRPY(transformTobeMapped[0], 0, 0);
imuQuaternion.setRPY(cloudInfo.imuRollInit, 0, 0);
//gc: interpolate between Imu roll measurement and angle from lidar calculation
tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
transformTobeMapped[0] = rollMid;
// slerp pitch
transformQuaternion.setRPY(0, transformTobeMapped[1], 0);
imuQuaternion.setRPY(0, cloudInfo.imuPitchInit, 0);
//gc: interpolate between Imu roll measurement and angle from lidar calculation
tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);
transformTobeMapped[1] = pitchMid;
}
}
transformTobeMapped[0] = constraintTransformation(transformTobeMapped[0], rotation_tollerance);
transformTobeMapped[1] = constraintTransformation(transformTobeMapped[1], rotation_tollerance);
transformTobeMapped[5] = constraintTransformation(transformTobeMapped[5], z_tollerance);
}
float constraintTransformation(float value, float limit)
{
if (value < -limit)
value = -limit;
if (value > limit)
value = limit;
return value;
}
bool saveFrame()
{
if (cloudKeyPoses3D->points.empty())
return true;
Eigen::Affine3f transStart = pclPointToAffine3f(cloudKeyPoses6D->back());
Eigen::Affine3f transFinal = pcl::getTransformation(transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5],
transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);
Eigen::Affine3f transBetween = transStart.inverse() * transFinal;
float x, y, z, roll, pitch, yaw;
pcl::getTranslationAndEulerAngles(transBetween, x, y, z, roll, pitch, yaw);
//gc: judge whther should generate key pose
if (abs(roll) < surroundingkeyframeAddingAngleThreshold &&
abs(pitch) < surroundingkeyframeAddingAngleThreshold &&
abs(yaw) < surroundingkeyframeAddingAngleThreshold &&
sqrt(x*x + y*y + z*z) < surroundingkeyframeAddingDistThreshold)
return false;
return true;
}
void addOdomFactor()
{
//gc: the first key pose
if (cloudKeyPoses3D->points.empty())
{
noiseModel::Diagonal::shared_ptr priorNoise = noiseModel::Diagonal::Variances((Vector(6) << 1e-2, 1e-2, M_PI*M_PI, 1e8, 1e8, 1e8).finished()); // rad*rad, meter*meter
gtSAMgraph.add(PriorFactor<Pose3>(0, trans2gtsamPose(transformTobeMapped), priorNoise));
initialEstimate.insert(0, trans2gtsamPose(transformTobeMapped));
}else{
noiseModel::Diagonal::shared_ptr odometryNoise = noiseModel::Diagonal::Variances((Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());
gtsam::Pose3 poseFrom = pclPointTogtsamPose3(cloudKeyPoses6D->points.back());
gtsam::Pose3 poseTo = trans2gtsamPose(transformTobeMapped);
//gc: add constraint between current pose and previous pose
gtSAMgraph.add(BetweenFactor<Pose3>(cloudKeyPoses3D->size()-1, cloudKeyPoses3D->size(), poseFrom.between(poseTo), odometryNoise));
initialEstimate.insert(cloudKeyPoses3D->size(), poseTo);
}
}
void saveKeyFramesAndFactor()
{
//gc: judge whther should generate key pose
if (saveFrame() == false)
return;
// odom factor
//gc: add odom factor in the graph
addOdomFactor();
// update iSAM
isam->update(gtSAMgraph, initialEstimate);
isam->update();
gtSAMgraph.resize(0);
initialEstimate.clear();
//save key poses
PointType thisPose3D;
PointTypePose thisPose6D;
Pose3 latestEstimate;
isamCurrentEstimate = isam->calculateEstimate();
latestEstimate = isamCurrentEstimate.at<Pose3>(isamCurrentEstimate.size()-1);
// cout << "****************************************************" << endl;
// isamCurrentEstimate.print("Current estimate: ");
//gc:cloudKeyPoses3D can be used to calculate the nearest key frames
thisPose3D.x = latestEstimate.translation().x();
thisPose3D.y = latestEstimate.translation().y();
thisPose3D.z = latestEstimate.translation().z();
thisPose3D.intensity = cloudKeyPoses3D->size(); // this can be used as index
cloudKeyPoses3D->push_back(thisPose3D);
thisPose6D.x = thisPose3D.x;
thisPose6D.y = thisPose3D.y;
thisPose6D.z = thisPose3D.z;
thisPose6D.intensity = thisPose3D.intensity ; // this can be used as index
thisPose6D.roll = latestEstimate.rotation().roll();
thisPose6D.pitch = latestEstimate.rotation().pitch();
thisPose6D.yaw = latestEstimate.rotation().yaw();
thisPose6D.time = timeLaserCloudInfoLast;
cloudKeyPoses6D->push_back(thisPose6D);
//change-3
/*added gc*/
mtxWin.lock();
//std::cout <<"in saveKeyFramesAndFactor(): the size of cloudKeyPoses3D is: " << cloudKeyPoses3D->points.size() << std::endl;
win_cloudKeyPoses3D.push_back(thisPose3D);
win_cloudKeyPoses6D.push_back(thisPose6D);
if(win_cloudKeyPoses3D.size() > winSize)
{
win_cloudKeyPoses3D.erase(win_cloudKeyPoses3D.begin());
win_cloudKeyPoses6D.erase(win_cloudKeyPoses6D.begin());
}
/*added gc*/
// cout << "****************************************************" << endl;
// cout << "Pose covariance:" << endl;
// cout << isam->marginalCovariance(isamCurrentEstimate.size()-1) << endl << endl;
poseCovariance = isam->marginalCovariance(isamCurrentEstimate.size()-1);
// save updated transform
transformTobeMapped[0] = latestEstimate.rotation().roll();
transformTobeMapped[1] = latestEstimate.rotation().pitch();
transformTobeMapped[2] = latestEstimate.rotation().yaw();
transformTobeMapped[3] = latestEstimate.translation().x();
transformTobeMapped[4] = latestEstimate.translation().y();
transformTobeMapped[5] = latestEstimate.translation().z();
// save all the received edge and surf points
pcl::PointCloud<PointType>::Ptr thisCornerKeyFrame(new pcl::PointCloud<PointType>());
pcl::PointCloud<PointType>::Ptr thisSurfKeyFrame(new pcl::PointCloud<PointType>());
//gc:
pcl::copyPointCloud(*laserCloudCornerLastDS, *thisCornerKeyFrame);
pcl::copyPointCloud(*laserCloudSurfLastDS, *thisSurfKeyFrame);
// save key frame cloud
cornerCloudKeyFrames.push_back(thisCornerKeyFrame);
surfCloudKeyFrames.push_back(thisSurfKeyFrame);
//change-4
/*added gc*/
win_cornerCloudKeyFrames.push_back(thisCornerKeyFrame);
win_surfCloudKeyFrames.push_back(thisSurfKeyFrame);
if(win_cornerCloudKeyFrames.size() > winSize)
{
win_cornerCloudKeyFrames.erase(win_cornerCloudKeyFrames.begin());
win_surfCloudKeyFrames.erase(win_surfCloudKeyFrames.begin());
}
mtxWin.unlock();
/*added gc*/
// save path for visualization
updatePath(thisPose6D);
}
void updatePath(const PointTypePose& pose_in)
{
geometry_msgs::PoseStamped pose_stamped;
pose_stamped.header.stamp = ros::Time().fromSec(pose_in.time);
pose_stamped.header.frame_id = "odom";
pose_stamped.pose.position.x = pose_in.x;
pose_stamped.pose.position.y = pose_in.y;
pose_stamped.pose.position.z = pose_in.z;
tf::Quaternion q = tf::createQuaternionFromRPY(pose_in.roll, pose_in.pitch, pose_in.yaw);
pose_stamped.pose.orientation.x = q.x();
pose_stamped.pose.orientation.y = q.y();
pose_stamped.pose.orientation.z = q.z();
pose_stamped.pose.orientation.w = q.w();
globalPath.poses.push_back(pose_stamped);
}
void publishOdometry()
{
// Publish odometry for ROS
nav_msgs::Odometry laserOdometryROS;
laserOdometryROS.header.stamp = timeLaserInfoStamp;
laserOdometryROS.header.frame_id = "odom";
laserOdometryROS.child_frame_id = "odom_mapping";
laserOdometryROS.pose.pose.position.x = transformTobeMapped[3];
laserOdometryROS.pose.pose.position.y = transformTobeMapped[4];
laserOdometryROS.pose.pose.position.z = transformTobeMapped[5];
laserOdometryROS.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);
laserOdometryROS.pose.covariance[0] = double(imuPreintegrationResetId);
pubOdomAftMappedROS.publish(laserOdometryROS);
}
void publishFrames()
{
if (cloudKeyPoses3D->points.empty())
return;
// publish key poses
publishCloud(&pubKeyPoses, cloudKeyPoses3D, timeLaserInfoStamp, "odom");
// Publish surrounding key frames
publishCloud(&pubRecentKeyFrames, laserCloudSurfFromMapDS, timeLaserInfoStamp, "odom");
// publish registered key frame
//gc: feature points
if (pubRecentKeyFrame.getNumSubscribers() != 0)
{
pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);
*cloudOut += *transformPointCloud(laserCloudCornerLastDS, &thisPose6D);
*cloudOut += *transformPointCloud(laserCloudSurfLastDS, &thisPose6D);
publishCloud(&pubRecentKeyFrame, cloudOut, timeLaserInfoStamp, "odom");
}
//added *****************by gc
if(pubLaserCloudInWorld.getNumSubscribers() != 0)
{
pcl::PointCloud<PointType>::Ptr cloudInBase(new pcl::PointCloud<PointType>());
pcl::PointCloud<PointType>::Ptr cloudOutInWorld(new pcl::PointCloud<PointType>());
PointTypePose thisPose6DInOdom = trans2PointTypePose(transformTobeMapped);
Eigen::Affine3f T_thisPose6DInOdom = pclPointToAffine3f(thisPose6DInOdom);
mtxtranformOdomToWorld.lock();
PointTypePose pose_Odom_Map = trans2PointTypePose(tranformOdomToWorld);
mtxtranformOdomToWorld.unlock();
Eigen::Affine3f T_pose_Odom_Map = pclPointToAffine3f(pose_Odom_Map);
Eigen::Affine3f T_poseInMap = T_pose_Odom_Map * T_thisPose6DInOdom;
*cloudInBase += *laserCloudCornerLastDS;
*cloudInBase += *laserCloudSurfLastDS;
pcl::transformPointCloud(*cloudInBase, *cloudOutInWorld, T_poseInMap.matrix());
publishCloud(&pubLaserCloudInWorld, cloudOutInWorld, timeLaserInfoStamp, "map");
}
//added *********************by gc
// publish registered high-res raw cloud
//gc: whole point_cloud of the scan
if (pubCloudRegisteredRaw.getNumSubscribers() != 0)
{
pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());
pcl::fromROSMsg(cloudInfo.cloud_deskewed, *cloudOut);
PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);
*cloudOut = *transformPointCloud(cloudOut, &thisPose6D);
publishCloud(&pubCloudRegisteredRaw, cloudOut, timeLaserInfoStamp, "odom");
}
// publish path
if (pubPath.getNumSubscribers() != 0)
{
globalPath.header.stamp = timeLaserInfoStamp;
globalPath.header.frame_id = "odom";
pubPath.publish(globalPath);
}
}
/*************added by gc*****Todo: (1) ICP or matching point to edge and surface? (2) global_pcd or whole keyframes************/
void cloudGlobalLoad()
{
pcl::io::loadPCDFile(std::getenv("HOME") + savePCDDirectory + "cloudGlobal.pcd", *cloudGlobalMap);
pcl::PointCloud<PointType>::Ptr cloud_temp(new pcl::PointCloud<PointType>());
downSizeFilterICP.setInputCloud(cloudGlobalMap);
downSizeFilterICP.filter(*cloud_temp);
//*cloudGlobalMap = *cloud_temp;
*cloudGlobalMapDS = *cloud_temp;
std::cout << "test 0.01 the size of global cloud: " << cloudGlobalMap->points.size() << std::endl;
std::cout << "test 0.02 the size of global map after filter: " << cloudGlobalMapDS->points.size() << std::endl;
}
void globalLocalizeThread()
{
//ros::Rate rate(0.2);
while (ros::ok())
{
//avoid ICP using the same initial guess for many times
if(initializedFlag == NonInitialized)
{
ICPLocalizeInitialize();
}
else if(initializedFlag == Initializing)
{
std::cout << "Offer A New Guess Please " << std::endl;//do nothing, wait for a new initial guess
ros::Duration(1.0).sleep();
}
else
{
ros::Duration(10.0).sleep();
double t_start = ros::Time::now().toSec();
ICPscanMatchGlobal();
double t_end = ros::Time::now().toSec();
//std::cout << "ICP time consuming: " << t_end-t_start;
}
//rate.sleep();
//}
}
}
void ICPLocalizeInitialize()
{
pcl::PointCloud<PointType>::Ptr laserCloudIn(new pcl::PointCloud<PointType>());
mtx_general.lock();
*laserCloudIn += *cloudScanForInitialize;
mtx_general.unlock();
//publishCloud(&fortest_publasercloudINWorld, laserCloudIn, timeLaserInfoStamp, "map");
if(laserCloudIn->points.size() == 0)
return;
//cloudScanForInitialize->clear();
std::cout << "the size of incoming lasercloud: " << laserCloudIn->points.size() << std::endl;
pcl::NormalDistributionsTransform<PointType, PointType> ndt;
ndt.setTransformationEpsilon(0.01);
ndt.setResolution(1.0);
pcl::IterativeClosestPoint<PointType, PointType> icp;
icp.setMaxCorrespondenceDistance(100);
icp.setMaximumIterations(100);
icp.setTransformationEpsilon(1e-6);
icp.setEuclideanFitnessEpsilon(1e-6);
icp.setRANSACIterations(0);
ndt.setInputSource(laserCloudIn);
ndt.setInputTarget(cloudGlobalMapDS);
pcl::PointCloud<PointType>::Ptr unused_result_0(new pcl::PointCloud<PointType>());
PointTypePose thisPose6DInWorld = trans2PointTypePose(transformInTheWorld);
Eigen::Affine3f T_thisPose6DInWorld = pclPointToAffine3f(thisPose6DInWorld);
ndt.align(*unused_result_0, T_thisPose6DInWorld.matrix());
//use the outcome of ndt as the initial guess for ICP
icp.setInputSource(laserCloudIn);
icp.setInputTarget(cloudGlobalMapDS);
pcl::PointCloud<PointType>::Ptr unused_result(new pcl::PointCloud<PointType>());
icp.align(*unused_result, ndt.getFinalTransformation());
std::cout << "the pose before initializing is: x" << transformInTheWorld[3] << " y" << transformInTheWorld[4]
<< " z" << transformInTheWorld[5] <<std::endl;
std::cout << "the pose in odom before initializing is: x" << tranformOdomToWorld[3] << " y" << tranformOdomToWorld[4]
<< " z" << tranformOdomToWorld[5] <<std::endl;
std::cout << "the icp score in initializing process is: " << icp.getFitnessScore() << std::endl;
std::cout << "the pose after initializing process is: "<< icp.getFinalTransformation() << std::endl;
PointTypePose thisPose6DInOdom = trans2PointTypePose(transformTobeMapped);
std::cout<< "transformTobeMapped X_Y_Z: " << transformTobeMapped[3] << " " << transformTobeMapped[4] << " " << transformTobeMapped[5] << std::endl;
Eigen::Affine3f T_thisPose6DInOdom = pclPointToAffine3f(thisPose6DInOdom);
Eigen::Affine3f T_thisPose6DInMap;
T_thisPose6DInMap = icp.getFinalTransformation();
float x_g, y_g, z_g, R_g, P_g, Y_g;
pcl::getTranslationAndEulerAngles (T_thisPose6DInMap, x_g, y_g, z_g, R_g, P_g, Y_g);
transformInTheWorld[0] = R_g;
transformInTheWorld[1] = P_g;
transformInTheWorld[2] = Y_g;
transformInTheWorld[3] = x_g;
transformInTheWorld[4] = y_g;
transformInTheWorld[5] = z_g;
Eigen::Affine3f transOdomToMap = T_thisPose6DInMap * T_thisPose6DInOdom.inverse();
float deltax, deltay, deltaz, deltaR, deltaP, deltaY;
pcl::getTranslationAndEulerAngles (transOdomToMap, deltax, deltay, deltaz, deltaR, deltaP, deltaY);
mtxtranformOdomToWorld.lock();
//renew tranformOdomToWorld
tranformOdomToWorld[0] = deltaR;
tranformOdomToWorld[1] = deltaP;
tranformOdomToWorld[2] = deltaY;
tranformOdomToWorld[3] = deltax;
tranformOdomToWorld[4] = deltay;
tranformOdomToWorld[5] = deltaz;
mtxtranformOdomToWorld.unlock();
std::cout << "the pose of odom relative to Map: x" << tranformOdomToWorld[3] << " y" << tranformOdomToWorld[4]
<< " z" << tranformOdomToWorld[5] <<std::endl;
publishCloud(&pubLaserCloudInWorld, unused_result, timeLaserInfoStamp, "map");
publishCloud(&pubMapWorld, cloudGlobalMapDS, timeLaserInfoStamp, "map");
if (icp.hasConverged() == false || icp.getFitnessScore() > historyKeyframeFitnessScore)
{
initializedFlag = Initializing;
std::cout << "Initializing Fail" << std::endl;
return;
} else{
initializedFlag = Initialized;
std::cout << "Initializing Succeed" << std::endl;
geometry_msgs::PoseStamped pose_odomTo_map;
tf::Quaternion q_odomTo_map = tf::createQuaternionFromRPY(deltaR, deltaP, deltaY);
pose_odomTo_map.header.stamp = timeLaserInfoStamp;
pose_odomTo_map.header.frame_id = "map";
pose_odomTo_map.pose.position.x = deltax; pose_odomTo_map.pose.position.y = deltay; pose_odomTo_map.pose.position.z = deltaz;
pose_odomTo_map.pose.orientation.x = q_odomTo_map.x();
pose_odomTo_map.pose.orientation.y = q_odomTo_map.y();
pose_odomTo_map.pose.orientation.z = q_odomTo_map.z();
pose_odomTo_map.pose.orientation.w = q_odomTo_map.w();
pubOdomToMapPose.publish(pose_odomTo_map);
}
//cloudScanForInitialize.reset(new pcl::PointCloud<PointType>());
}
void ICPscanMatchGlobal()
{
//initializing
/*
if(initializedFlag == NonInitialized)
{
ICPLocalizeInitialize();
return;
}*/
if (cloudKeyPoses3D->points.empty() == true)
return;
//change-5
/******************added by gc************************/
mtxWin.lock();
int latestFrameIDGlobalLocalize;
latestFrameIDGlobalLocalize = win_cloudKeyPoses3D.size() - 1;
pcl::PointCloud<PointType>::Ptr latestCloudIn(new pcl::PointCloud<PointType>());
*latestCloudIn += *transformPointCloud(win_cornerCloudKeyFrames[latestFrameIDGlobalLocalize], &win_cloudKeyPoses6D[latestFrameIDGlobalLocalize]);
*latestCloudIn += *transformPointCloud(win_surfCloudKeyFrames[latestFrameIDGlobalLocalize], &win_cloudKeyPoses6D[latestFrameIDGlobalLocalize]);
std::cout << "the size of input cloud: " << latestCloudIn->points.size() <<std::endl;
mtxWin.unlock();
/******************added by gc************************/
// int latestFrameIDGlobalLocalize;
// latestFrameIDGlobalLocalize = cloudKeyPoses3D->size() - 1;
//
//
// //latest Frame cloudpoints In the odom Frame
//
// pcl::PointCloud<PointType>::Ptr latestCloudIn(new pcl::PointCloud<PointType>());
// *latestCloudIn += *transformPointCloud(cornerCloudKeyFrames[latestFrameIDGlobalLocalize], &cloudKeyPoses6D->points[latestFrameIDGlobalLocalize]);
// *latestCloudIn += *transformPointCloud(surfCloudKeyFrames[latestFrameIDGlobalLocalize], &cloudKeyPoses6D->points[latestFrameIDGlobalLocalize]);
// std::cout << "the size of input cloud: " << latestCloudIn->points.size() <<std::endl;
pcl::NormalDistributionsTransform<PointType, PointType> ndt;
ndt.setTransformationEpsilon(0.01);
ndt.setResolution(1.0);
pcl::IterativeClosestPoint<PointType, PointType> icp;
icp.setMaxCorrespondenceDistance(100);
icp.setMaximumIterations(100);
icp.setTransformationEpsilon(1e-6);
icp.setEuclideanFitnessEpsilon(1e-6);
icp.setRANSACIterations(0);
// Align cloud
//3. calculate the tranform of odom relative to world
//Eigen::Affine3f transodomToWorld_init = pcl::getTransformation(0,0,0,0,0,0);
mtxtranformOdomToWorld.lock();
Eigen::Affine3f transodomToWorld_init = pcl::getTransformation(tranformOdomToWorld[3], tranformOdomToWorld[4],tranformOdomToWorld[5],tranformOdomToWorld[0],tranformOdomToWorld[1],tranformOdomToWorld[2]);
mtxtranformOdomToWorld.unlock();
Eigen::Matrix4f matricInitGuess = transodomToWorld_init.matrix();
//std::cout << "matricInitGuess: " << matricInitGuess << std::endl;
//Firstly perform ndt in coarse resolution
ndt.setInputSource(latestCloudIn);
ndt.setInputTarget(cloudGlobalMapDS);
pcl::PointCloud<PointType>::Ptr unused_result_0(new pcl::PointCloud<PointType>());
ndt.align(*unused_result_0, matricInitGuess);
//use the outcome of ndt as the initial guess for ICP
icp.setInputSource(latestCloudIn);
icp.setInputTarget(cloudGlobalMapDS);
pcl::PointCloud<PointType>::Ptr unused_result(new pcl::PointCloud<PointType>());
icp.align(*unused_result, ndt.getFinalTransformation());
std::cout << "ICP converg flag:" << icp.hasConverged() << ". Fitness score: " << icp.getFitnessScore() << std::endl << std::endl;
//if (icp.hasConverged() == false || icp.getFitnessScore() > historyKeyframeFitnessScore)
// return;
Eigen::Affine3f transodomToWorld_New;
transodomToWorld_New = icp.getFinalTransformation();
float x, y, z, roll, pitch, yaw;
pcl::getTranslationAndEulerAngles (transodomToWorld_New, x, y, z, roll, pitch, yaw);
//std::cout << " in test 0.03: deltaX = " << x << " deltay = " << y << " deltaZ = " << z << std::endl;
mtxtranformOdomToWorld.lock();
//renew tranformOdomToWorld
tranformOdomToWorld[0] = roll;
tranformOdomToWorld[1] = pitch;
tranformOdomToWorld[2] = yaw;
tranformOdomToWorld[3] = x;
tranformOdomToWorld[4] = y;
tranformOdomToWorld[5] = z;
mtxtranformOdomToWorld.unlock();
//publish the laserpointcloud in world frame
//publish global map
publishCloud(&pubMapWorld, cloudGlobalMapDS, timeLaserInfoStamp, "map");//publish world map
if (icp.hasConverged() == true && icp.getFitnessScore() < historyKeyframeFitnessScore)
{
geometry_msgs::PoseStamped pose_odomTo_map;
tf::Quaternion q_odomTo_map = tf::createQuaternionFromRPY(roll, pitch, yaw);
pose_odomTo_map.header.stamp = timeLaserInfoStamp;
pose_odomTo_map.header.frame_id = "map";
pose_odomTo_map.pose.position.x = x; pose_odomTo_map.pose.position.y = y; pose_odomTo_map.pose.position.z = z;
pose_odomTo_map.pose.orientation.x = q_odomTo_map.x();
pose_odomTo_map.pose.orientation.y = q_odomTo_map.y();
pose_odomTo_map.pose.orientation.z = q_odomTo_map.z();
pose_odomTo_map.pose.orientation.w = q_odomTo_map.w();
pubOdomToMapPose.publish(pose_odomTo_map);
}
//publish the trsformation between map and odom
}
void keyFramesLoad()
{
}
void initialpose_callback(const geometry_msgs::PoseWithCovarianceStampedConstPtr& pose_msg)
{
//first calculate global pose
//x-y-z
if(initializedFlag == Initialized)
return;
float x = pose_msg->pose.pose.position.x;
float y = pose_msg->pose.pose.position.y;
float z = pose_msg->pose.pose.position.z;
//roll-pitch-yaw
tf::Quaternion q_global;
double roll_global; double pitch_global; double yaw_global;
q_global.setX(pose_msg->pose.pose.orientation.x);
q_global.setY(pose_msg->pose.pose.orientation.y);
q_global.setZ(pose_msg->pose.pose.orientation.z);
q_global.setW(pose_msg->pose.pose.orientation.w);
tf::Matrix3x3(q_global).getRPY(roll_global, pitch_global, yaw_global);
//global transformation
transformInTheWorld[0] = roll_global;
transformInTheWorld[1] = pitch_global;
transformInTheWorld[2] = yaw_global;
transformInTheWorld[3] = x;
transformInTheWorld[4] = y;
transformInTheWorld[5] = z;
PointTypePose thisPose6DInWorld = trans2PointTypePose(transformInTheWorld);
Eigen::Affine3f T_thisPose6DInWorld = pclPointToAffine3f(thisPose6DInWorld);
//Odom transformation
PointTypePose thisPose6DInOdom = trans2PointTypePose(transformTobeMapped);
Eigen::Affine3f T_thisPose6DInOdom = pclPointToAffine3f(thisPose6DInOdom);
//transformation: Odom to Map
Eigen::Affine3f T_OdomToMap = T_thisPose6DInWorld * T_thisPose6DInOdom.inverse();
float delta_x, delta_y, delta_z, delta_roll, delta_pitch, delta_yaw;
pcl::getTranslationAndEulerAngles (T_OdomToMap, delta_x, delta_y, delta_z, delta_roll, delta_pitch, delta_yaw);
mtxtranformOdomToWorld.lock();
//keep for co-operate the initializing and lio, not useful for the present
tranformOdomToWorld[0] = delta_roll;
tranformOdomToWorld[1] = delta_pitch;
tranformOdomToWorld[2] = delta_yaw;
tranformOdomToWorld[3] = delta_x;
tranformOdomToWorld[4] = delta_y;
tranformOdomToWorld[5] = delta_z;
mtxtranformOdomToWorld.unlock();
initializedFlag = NonInitialized;
//globalLocalizeInitialiized = false;
}
/*************added by gc******************/
};
int main(int argc, char** argv)
{
ros::init(argc, argv, "lio_sam");
mapOptimization MO;
ROS_INFO("\033[1;32m----> Map Optimization Started.\033[0m");
//std::thread loopthread(&mapOptimization::loopClosureThread, &MO);
//std::thread visualizeMapThread(&mapOptimization::visualizeGlobalMapThread, &MO);
std::thread localizeInWorldThread(&mapOptimization::globalLocalizeThread, &MO);
ros::spin();
//loopthread.join();
//visualizeMapThread.join();
localizeInWorldThread.join();
return 0;
}
================================================
FILE: src/imageProjection.cpp
================================================
#include "utility.h"
#include "lio_sam/cloud_info.h"
// Velodyne
//PCL自定义点类型
struct PointXYZIRT
{
PCL_ADD_POINT4D
PCL_ADD_INTENSITY;
uint16_t ring;
float time;
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
} EIGEN_ALIGN16;
POINT_CLOUD_REGISTER_POINT_STRUCT (PointXYZIRT,
(float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity)
(uint16_t, ring, ring) (float, time, time)
)
// Ouster
// struct PointXYZIRT {
// PCL_ADD_POINT4D;
// float intensity;
// uint32_t t;
// uint16_t reflectivity;
// uint8_t ring;
// uint16_t noise;
// uint32_t range;
// EIGEN_MAKE_ALIGNED_OPERATOR_NEW
// }EIGEN_ALIGN16;
// POINT_CLOUD_REGISTER_POINT_STRUCT(PointXYZIRT,
// (float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity)
// (uint32_t, t, t) (uint16_t, reflectivity, reflectivity)
// (uint8_t, ring, ring) (uint16_t, noise, noise) (uint32_t, range, range)
// )
const int queueLength = 500;
class ImageProjection : public ParamServer
{
private:
std::mutex imuLock;
std::mutex odoLock;
ros::Subscriber subLaserCloud;
ros::Publisher pubLaserCloud;
ros::Publisher pubExtractedCloud;
ros::Publisher pubLaserCloudInfo;
ros::Subscriber subImu; //gc: IMU data
std::deque<sensor_msgs::Imu> imuQueue;
ros::Subscriber subOdom;//gc: IMU pre-preintegration odometry
std::deque<nav_msgs::Odometry> odomQueue;
std::deque<sensor_msgs::PointCloud2> cloudQueue;
sensor_msgs::PointCloud2 currentCloudMsg;
double *imuTime = new double[queueLength];
double *imuRotX = new double[queueLength];
double *imuRotY = new double[queueLength];
double *imuRotZ = new double[queueLength];
int imuPointerCur;
bool firstPointFlag;
Eigen::Affine3f transStartInverse;
pcl::PointCloud<PointXYZIRT>::Ptr laserCloudIn;
pcl::PointCloud<PointType>::Ptr fullCloud;
pcl::PointCloud<PointType>::Ptr extractedCloud;
int deskewFlag;
cv::Mat rangeMat;
bool odomDeskewFlag;
float odomIncreX;
float odomIncreY;
float odomIncreZ;
lio_sam::cloud_info cloudInfo;
double timeScanCur;
double timeScanNext;
std_msgs::Header cloudHeader;
public:
ImageProjection():
deskewFlag(0)
{
subImu = nh.subscribe<sensor_msgs::Imu>(imuTopic, 2000, &ImageProjection::imuHandler, this, ros::TransportHints().tcpNoDelay());
subOdom = nh.subscribe<nav_msgs::Odometry>(odomTopic, 2000, &ImageProjection::odometryHandler, this, ros::TransportHints().tcpNoDelay());
subLaserCloud = nh.subscribe<sensor_msgs::PointCloud2>(pointCloudTopic, 5, &ImageProjection::cloudHandler, this, ros::TransportHints().tcpNoDelay());
pubExtractedCloud = nh.advertise<sensor_msgs::PointCloud2> ("lio_sam/deskew/cloud_deskewed", 1);
pubLaserCloudInfo = nh.advertise<lio_sam::cloud_info> ("lio_sam/deskew/cloud_info", 10);
allocateMemory();
resetParameters();
pcl::console::setVerbosityLevel(pcl::console::L_ERROR);
}
void allocateMemory()
{
laserCloudIn.reset(new pcl::PointCloud<PointXYZIRT>());
fullCloud.reset(new pcl::PointCloud<PointType>());
extractedCloud.reset(new pcl::PointCloud<PointType>());
fullCloud->points.resize(N_SCAN*Horizon_SCAN);
cloudInfo.startRingIndex.assign(N_SCAN, 0);
cloudInfo.endRingIndex.assign(N_SCAN, 0);
cloudInfo.pointColInd.assign(N_SCAN*Horizon_SCAN, 0);
cloudInfo.pointRange.assign(N_SCAN*Horizon_SCAN, 0);
resetParameters();
}
void resetParameters()
{
laserCloudIn->clear();
extractedCloud->clear();
// reset range matrix for range image projection
rangeMat = cv::Mat(N_SCAN, Horizon_SCAN, CV_32F, cv::Scalar::all(FLT_MAX));
imuPointerCur = 0;
firstPointFlag = true;
odomDeskewFlag = false;
for (int i = 0; i < queueLength; ++i)
{
imuTime[i] = 0;
imuRotX[i] = 0;
imuRotY[i] = 0;
imuRotZ[i] = 0;
}
}
~ImageProjection(){}
void imuHandler(const sensor_msgs::Imu::ConstPtr& imuMsg)
{
sensor_msgs::Imu thisImu = imuConverter(*imuMsg);
std::lock_guard<std::mutex> lock1(imuLock);
imuQueue.push_back(thisImu);
// debug IMU data
// cout << std::setprecision(6);
// cout << "IMU acc: " << endl;
// cout << "x: " << thisImu.linear_acceleration.x <<
// ", y: " << thisImu.linear_acceleration.y <<
// ", z: " << thisImu.linear_acceleration.z << endl;
// cout << "IMU gyro: " << endl;
// cout << "x: " << thisImu.angular_velocity.x <<
// ", y: " << thisImu.angular_velocity.y <<
// ", z: " << thisImu.angular_velocity.z << endl;
// double imuRoll, imuPitch, imuYaw;
// tf::Quaternion orientation;
// tf::quaternionMsgToTF(thisImu.orientation, orientation);
// tf::Matrix3x3(orientation).getRPY(imuRoll, imuPitch, imuYaw);
// cout << "IMU roll pitch yaw: " << endl;
// cout << "roll: " << imuRoll << ", pitch: " << imuPitch << ", yaw: " << imuYaw << endl << endl;
}
//gc: IMu prediction
void odometryHandler(const nav_msgs::Odometry::ConstPtr& odometryMsg)
{
std::lock_guard<std::mutex> lock2(odoLock);
odomQueue.push_back(*odometryMsg);
}
void cloudHandler(const sensor_msgs::PointCloud2ConstPtr& laserCloudMsg)
{
if (!cachePointCloud(laserCloudMsg))//gc: cache pointclouds and make format-checking
return;
if (!deskewInfo())//gc:calculate to get the relative transformation betwwen the start of the scan and the time when the imu came
return;
projectPointCloud();//gc: deskew the pointcloud and project the pointcloud into one image
cloudExtraction();
publishClouds();
resetParameters();
}
//gc: cashe pointclouds and make format-checking
bool cachePointCloud(const sensor_msgs::PointCloud2ConstPtr& laserCloudMsg)
{
// cache point cloud
cloudQueue.push_back(*laserCloudMsg);
if (cloudQueue.size() <= 2)
return false;
else
{
currentCloudMsg = cloudQueue.front();
cloudQueue.pop_front();
cloudHeader = currentCloudMsg.header;
timeScanCur = cloudHeader.stamp.toSec();
timeScanNext = cloudQueue.front().header.stamp.toSec();
}
// convert cloud
pcl::fromROSMsg(currentCloudMsg, *laserCloudIn);
// check dense flag
if (laserCloudIn->is_dense == false)
{
ROS_ERROR("Point cloud is not in dense format, please remove NaN points first!");
ros::shutdown();
}
// check ring channel
static int ringFlag = 0;
if (ringFlag == 0)
{
ringFlag = -1;
for (int i = 0; i < (int)currentCloudMsg.fields.size(); ++i)
{
if (currentCloudMsg.fields[i].name == "ring")
{
ringFlag = 1;
break;
}
}
if (ringFlag == -1)
{
ROS_ERROR("Point cloud ring channel not available, please configure your point cloud data!");
ros::shutdown();
}
}
// check point time
if (deskewFlag == 0)
{
deskewFlag = -1;
for (int i = 0; i < (int)currentCloudMsg.fields.size(); ++i)
{
if (currentCloudMsg.fields[i].name == timeField)
{
deskewFlag = 1;
break;
}
}
if (deskewFlag == -1)
ROS_WARN("Point cloud timestamp not available, deskew function disabled, system will drift significantly!");
}
return true;
}
bool deskewInfo()
{
std::lock_guard<std::mutex> lock1(imuLock);
std::lock_guard<std::mutex> lock2(odoLock);
// make sure IMU data available for the scan
if (imuQueue.empty() || imuQueue.front().header.stamp.toSec() > timeScanCur || imuQueue.back().header.stamp.toSec() < timeScanNext)
{
ROS_DEBUG("Waiting for IMU data ...");
return false;
}
imuDeskewInfo();
odomDeskewInfo();
return true;
}
void imuDeskewInfo()
{
cloudInfo.imuAvailable = false;
while (!imuQueue.empty())
{
if (imuQueue.front().header.stamp.toSec() < timeScanCur - 0.01)
imuQueue.pop_front();
else
break;
}
if (imuQueue.empty())
return;
imuPointerCur = 0;
for (int i = 0; i < (int)imuQueue.size(); ++i)
{
sensor_msgs::Imu thisImuMsg = imuQueue[i];
double currentImuTime = thisImuMsg.header.stamp.toSec();
// get roll, pitch, and yaw estimation for this scan
//gc: not sychronized, something maybe should be done
if (currentImuTime <= timeScanCur)
imuRPY2rosRPY(&thisImuMsg, &cloudInfo.imuRollInit, &cloudInfo.imuPitchInit, &cloudInfo.imuYawInit);
if (currentImuTime > timeScanNext + 0.01)//gc: integrate between this scan and next scan
break;
if (imuPointerCur == 0){
imuRotX[0] = 0;
imuRotY[0] = 0;
imuRotZ[0] = 0;
imuTime[0] = currentImuTime;
++imuPointerCur;
continue;
}
// get angular velocity
double angular_x, angular_y, angular_z;
imuAngular2rosAngular(&thisImuMsg, &angular_x, &angular_y, &angular_z);
// integrate rotation
double timeDiff = currentImuTime - imuTime[imuPointerCur-1];
imuRotX[imuPointerCur] = imuRotX[imuPointerCur-1] + angular_x * timeDiff;
imuRotY[imuPointerCur] = imuRotY[imuPointerCur-1] + angular_y * timeDiff;
imuRotZ[imuPointerCur] = imuRotZ[imuPointerCur-1] + angular_z * timeDiff;
imuTime[imuPointerCur] = currentImuTime;
++imuPointerCur;
}
--imuPointerCur;
if (imuPointerCur <= 0)
return;
cloudInfo.imuAvailable = true;
}
void odomDeskewInfo()
{
cloudInfo.odomAvailable = false;
while (!odomQueue.empty())
{
if (odomQueue.front().header.stamp.toSec() < timeScanCur - 0.01)
odomQueue.pop_front();
else
break;
}
if (odomQueue.empty())
return;
if (odomQueue.front().header.stamp.toSec() > timeScanCur)
return;
// get start odometry at the beinning of the scan
nav_msgs::Odometry startOdomMsg;
for (int i = 0; i < (int)odomQueue.size(); ++i)
{
startOdomMsg = odomQueue[i];
if (ROS_TIME(&startOdomMsg) < timeScanCur)
continue;
else
break;
}
tf::Quaternion orientation;
tf::quaternionMsgToTF(startOdomMsg.pose.pose.orientation, orientation);
double roll, pitch, yaw;
tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
// Initial guess used in mapOptimization
cloudInfo.initialGuessX = startOdomMsg.pose.pose.position.x;
cloudInfo.initialGuessY = startOdomMsg.pose.pose.position.y;
cloudInfo.initialGuessZ = startOdomMsg.pose.pose.position.z;
cloudInfo.initialGuessRoll = roll;
cloudInfo.initialGuessPitch = pitch;
cloudInfo.initialGuessYaw = yaw;
cloudInfo.imuPreintegrationResetId = round(startOdomMsg.pose.covariance[0]);
cloudInfo.odomAvailable = true;
// get end odometry at the end of the scan
odomDeskewFlag = false;
if (odomQueue.back().header.stamp.toSec() < timeScanNext)
return;
nav_msgs::Odometry endOdomMsg;
for (int i = 0; i < (int)odomQueue.size(); ++i)
{
endOdomMsg = odomQueue[i];
if (ROS_TIME(&endOdomMsg) < timeScanNext)
continue;
else
break;
}
if (int(round(startOdomMsg.pose.covariance[0])) != int(round(endOdomMsg.pose.covariance[0])))
return;
Eigen::Affine3f transBegin = pcl::getTransformation(startOdomMsg.pose.pose.position.x, startOdomMsg.pose.pose.position.y, startOdomMsg.pose.pose.position.z, roll, pitch, yaw);
tf::quaternionMsgToTF(endOdomMsg.pose.pose.orientation, orientation);
tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);
Eigen::Affine3f transEnd = pcl::getTransformation(endOdomMsg.pose.pose.position.x, endOdomMsg.pose.pose.position.y, endOdomMsg.pose.pose.position.z, roll, pitch, yaw);
//gc: the relative transforme between the end snad the start
Eigen::Affine3f transBt = transBegin.inverse() * transEnd;
float rollIncre, pitchIncre, yawIncre;
//gc: get the transform difference in the direction of X, Y and Z
pcl::getTranslationAndEulerAngles(transBt, odomIncreX, odomIncreY, odomIncreZ, rollIncre, pitchIncre, yawIncre);
odomDeskewFlag = true;
}
void findRotation(double pointTime, float *rotXCur, float *rotYCur, float *rotZCur)
{
*rotXCur = 0; *rotYCur = 0; *rotZCur = 0;
int imuPointerFront = 0;
while (imuPointerFront < imuPointerCur)
{
if (pointTime < imuTime[imuPointerFront])
break;
++imuPointerFront;
}
if (pointTime > imuTime[imuPointerFront] || imuPointerFront == 0)
{
*rotXCur = imuRotX[imuPointerFront];
*rotYCur = imuRotY[imuPointerFront];
*rotZCur = imuRotZ[imuPointerFront];
} else {
int imuPointerBack = imuPointerFront - 1;
double ratioFront = (pointTime - imuTime[imuPointerBack]) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]);
double ratioBack = (imuTime[imuPointerFront] - pointTime) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]);
*rotXCur = imuRotX[imuPointerFront] * ratioFront + imuRotX[imuPointerBack] * ratioBack;
*rotYCur = imuRotY[imuPointerFront] * ratioFront + imuRotY[imuPointerBack] * ratioBack;
*rotZCur = imuRotZ[imuPointerFront] * ratioFront + imuRotZ[imuPointerBack] * ratioBack;
}
}
void findPosition(double relTime, float *posXCur, float *posYCur, float *posZCur)
{
*posXCur = 0; *posYCur = 0; *posZCur = 0;
// If the sensor moves relatively slow, like walking speed, positional deskew seems to have little benefits. Thus code below is commented.
// if (cloudInfo.odomAvailable == false || odomDeskewFlag == false)
// return;
// float ratio = relTime / (timeScanNext - timeScanCur);
// *posXCur = ratio * odomIncreX;
// *posYCur = ratio * odomIncreY;
// *posZCur = ratio * odomIncreZ;
}
PointType deskewPoint(PointType *point, double relTime)
{
if (deskewFlag == -1 || cloudInfo.imuAvailable == false)
return *point;
double pointTime = timeScanCur + relTime;
float rotXCur, rotYCur, rotZCur;
findRotation(pointTime, &rotXCur, &rotYCur, &rotZCur);//gc: Interpolation to find the rotation of lidar relative the scan start
float posXCur, posYCur, posZCur;
findPosition(relTime, &posXCur, &posYCur, &posZCur);
if (firstPointFlag == true)
{
transStartInverse = (pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur, rotZCur)).inverse();
firstPointFlag = false;
}
// transform points to start
Eigen::Affine3f transFinal = pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur, rotZCur);
Eigen::Affine3f transBt = transStartInverse * transFinal;
PointType newPoint;
newPoint.x = transBt(0,0) * point->x + transBt(0,1) * point->y + transBt(0,2) * point->z + transBt(0,3);
newPoint.y = transBt(1,0) * point->x + transBt(1,1) * point->y + transBt(1,2) * point->z + transBt(1,3);
newPoint.z = transBt(2,0) * point->x + transBt(2,1) * point->y + transBt(2,2) * point->z + transBt(2,3);
newPoint.intensity = point->intensity;
return newPoint;
}
void projectPointCloud()
{
int cloudSize = laserCloudIn->points.size();
// range image projection
for (int i = 0; i < cloudSize; ++i)
{
PointType thisPoint;
thisPoint.x = laserCloudIn->points[i].x;
thisPoint.y = laserCloudIn->points[i].y;
thisPoint.z = laserCloudIn->points[i].z;
thisPoint.intensity = laserCloudIn->points[i].intensity;
int rowIdn = laserCloudIn->points[i].ring;
if (rowIdn < 0 || rowIdn >= N_SCAN)
continue;
if (rowIdn % downsampleRate != 0)
continue;
float horizonAngle = atan2(thisPoint.x, thisPoint.y) * 180 / M_PI;
static float ang_res_x = 360.0/float(Horizon_SCAN);//gc: resolution in the direction
int columnIdn = -round((horizonAngle-90.0)/ang_res_x) + Horizon_SCAN/2;
if (columnIdn >= Horizon_SCAN)
columnIdn -= Horizon_SCAN;
if (columnIdn < 0 || columnIdn >= Horizon_SCAN)
continue;
float range = pointDistance(thisPoint);
if (range < 1.0)
continue;
if (rangeMat.at<float>(rowIdn, columnIdn) != FLT_MAX)
continue;
// for the amsterdam dataset
// if (range < 6.0 && rowIdn <= 7 && (columnIdn >= 1600 || columnIdn <= 200))
// continue;
// if (thisPoint.z < -2.0)
// continue;
thisPoint = deskewPoint(&thisPoint, laserCloudIn->points[i].time); // Velodyne
// thisPoint = deskewPoint(&thisPoint, (float)laserCloudIn->points[i].t / 1000000000.0); // Ouster
rangeMat.at<float>(rowIdn, columnIdn) = pointDistance(thisPoint);
int index = columnIdn + rowIdn * Horizon_SCAN;
fullCloud->points[index] = thisPoint;
}
}
void cloudExtraction()
{
int count = 0;
// extract segmented cloud for lidar odometry
for (int i = 0; i < N_SCAN; ++i)
{
cloudInfo.startRingIndex[i] = count - 1 + 5;//gc: the start index of every ring
for (int j = 0; j < Horizon_SCAN; ++j)
{
if (rangeMat.at<float>(i,j) != FLT_MAX)
{
// mark the points' column index for marking occlusion later
cloudInfo.pointColInd[count] = j;//gc: the colume index of every point
// save range info
cloudInfo.pointRange[count] = rangeMat.at<float>(i,j);
// save extracted cloud
extractedCloud->push_back(fullCloud->points[j + i*Horizon_SCAN]);
// size of extracted cloud
++count;
}
}
cloudInfo.endRingIndex[i] = count -1 - 5;//gc: the end index of every ring
}
}
void publishClouds()
{
cloudInfo.header = cloudHeader;
cloudInfo.cloud_deskewed = publishCloud(&pubExtractedCloud, extractedCloud, cloudHeader.stamp, "base_link");
pubLaserCloudInfo.publish(cloudInfo);
}
};
int main(int argc, char** argv)
{
ros::init(argc, argv, "lio_sam");
ImageProjection IP;
ROS_INFO("\033[1;32m----> Image Projection Started.\033[0m");
ros::MultiThreadedSpinner spinner(3);
spinner.spin();
return 0;
}
================================================
FILE: src/imuPreintegration.cpp
================================================
#include "utility.h"
#include <gtsam/geometry/Rot3.h>
#include <gtsam/geometry/Pose3.h>
#include <gtsam/slam/PriorFactor.h>
#include <gtsam/slam/BetweenFactor.h>
#include <gtsam/navigation/GPSFactor.h>
#include <gtsam/navigation/ImuFactor.h>
#include <gtsam/navigation/CombinedImuFactor.h>
#include <gtsam/nonlinear/NonlinearFactorGraph.h>
#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>
#include <gtsam/nonlinear/Marginals.h>
#include <gtsam/nonlinear/Values.h>
#include <gtsam/inference/Symbol.h>
#include <gtsam/nonlinear/ISAM2.h>
#include <gtsam_unstable/nonlinear/IncrementalFixedLagSmoother.h>
using gtsam::symbol_shorthand::X; // Pose3 (x,y,z,r,p,y)
using gtsam::symbol_shorthand::V; // Vel (xdot,ydot,zdot)
using gtsam::symbol_shorthand::B; // Bias (ax,ay,az,gx,gy,gz)
class IMUPreintegration : public ParamServer
{
public:
ros::Subscriber subImu;
ros::Subscriber subOdometry;
ros::Publisher pubImuOdometry;
ros::Publisher pubImuPath;
// map -> odom
tf::Transform map_to_odom;
tf::TransformBroadcaster tfMap2Odom;
// odom -> base_link
tf::TransformBroadcaster tfOdom2BaseLink;
bool systemInitialized = false;
gtsam::noiseModel::Diagonal::shared_ptr priorPoseNoise;
gtsam::noiseModel::Diagonal::shared_ptr priorVelNoise;
gtsam::noiseModel::Diagonal::shared_ptr priorBiasNoise;
gtsam::noiseModel::Diagonal::shared_ptr correctionNoise;
gtsam::Vector noiseModelBetweenBias;
gtsam::PreintegratedImuMeasurements *imuIntegratorOpt_;
gtsam::PreintegratedImuMeasurements *imuIntegratorImu_;
std::deque<sensor_msgs::Imu> imuQueOpt;
std::deque<sensor_msgs::Imu> imuQueImu;
gtsam::Pose3 prevPose_;
gtsam::Vector3 prevVel_;
gtsam::NavState prevState_;
gtsam::imuBias::ConstantBias prevBias_;
gtsam::NavState prevStateOdom;
gtsam::imuBias::ConstantBias prevBiasOdom;
bool doneFirstOpt = false;
double lastImuT_imu = -1;
double lastImuT_opt = -1;
gtsam::ISAM2 optimizer;
gtsam::NonlinearFactorGraph graphFactors;
gtsam::Values graphValues;
const double delta_t = 0;
int key = 1;
int imuPreintegrationResetId = 0;
gtsam::Pose3 imu2Lidar = gtsam::Pose3(gtsam::Rot3(1, 0, 0, 0), gtsam::Point3(-extTrans.x(), -extTrans.y(), -extTrans.z()));
gtsam::Pose3 lidar2Imu = gtsam::Pose3(gtsam::Rot3(1, 0, 0, 0), gtsam::Point3(extTrans.x(), extTrans.y(), extTrans.z()));;
//added ***************************** by gc
ros::Subscriber subPoseOdomToMap;
geometry_msgs::PoseStamped poseOdomToMap;
//added ***************************** by gc
IMUPreintegration()
{
//added***********gc
subPoseOdomToMap = nh.subscribe<geometry_msgs::PoseStamped>("lio_sam/mapping/pose_odomTo_map", 1,&IMUPreintegration::odomToMapPoseHandler, this, ros::TransportHints().tcpNoDelay());
//added***********gc
subImu = nh.subscribe<sensor_msgs::Imu> (imuTopic, 2000, &IMUPreintegration::imuHandler, this, ros::TransportHints().tcpNoDelay());
subOdometry = nh.subscribe<nav_msgs::Odometry>("lio_sam/mapping/odometry", 5, &IMUPreintegration::odometryHandler, this, ros::TransportHints().tcpNoDelay());
pubImuOdometry = nh.advertise<nav_msgs::Odometry> (odomTopic, 2000);
pubImuPath = nh.advertise<nav_msgs::Path> ("lio_sam/imu/path", 1);
map_to_odom = tf::Transform(tf::createQuaternionFromRPY(0, 0, 0), tf::Vector3(0, 0, 0));
boost::shared_ptr<gtsam::PreintegrationParams> p = gtsam::PreintegrationParams::MakeSharedU(imuGravity);
p->accelerometerCovariance = gtsam::Matrix33::Identity(3,3) * pow(imuAccNoise, 2); // acc white noise in continuous
p->gyroscopeCovariance = gtsam::Matrix33::Identity(3,3) * pow(imuGyrNoise, 2); // gyro white noise in continuous
p->integrationCovariance = gtsam::Matrix33::Identity(3,3) * pow(1e-4, 2); // error committed in integrating position from velocities
gtsam::imuBias::ConstantBias prior_imu_bias((gtsam::Vector(6) << 0, 0, 0, 0, 0, 0).finished());; // assume zero initial bias
priorPoseNoise = gtsam::noiseModel::Diagonal::Sigmas((gtsam::Vector(6) << 1e-2, 1e-2, 1e-2, 1e-2, 1e-2, 1e-2).finished()); // rad,rad,rad,m, m, m
priorVelNoise = gtsam::noiseModel::Isotropic::Sigma(3, 1e4); // m/s
priorBiasNoise = gtsam::noiseModel::Isotropic::Sigma(6, 1e-3); // 1e-2 ~ 1e-3 seems to be good
correctionNoise = gtsam::noiseModel::Isotropic::Sigma(6, 1e-2); // meter
noiseModelBetweenBias = (gtsam::Vector(6) << imuAccBiasN, imuAccBiasN, imuAccBiasN, imuGyrBiasN, imuGyrBiasN, imuGyrBiasN).finished();
imuIntegratorImu_ = new gtsam::PreintegratedImuMeasurements(p, prior_imu_bias); // setting up the IMU integration for IMU message thread
imuIntegratorOpt_ = new gtsam::PreintegratedImuMeasurements(p, prior_imu_bias); // setting up the IMU integration for optimization
}
void odomToMapPoseHandler(const geometry_msgs::PoseStamped::ConstPtr& poseOdomToMapmsg)
{
tf::Quaternion q_tem;
q_tem.setX(poseOdomToMapmsg->pose.orientation.x);
q_tem.setY(poseOdomToMapmsg->pose.orientation.y);
q_tem.setZ(poseOdomToMapmsg->pose.orientation.z);
q_tem.setW(poseOdomToMapmsg->pose.orientation.w);
tf::Vector3 p_tem(poseOdomToMapmsg->pose.position.x, poseOdomToMapmsg->pose.position.y, poseOdomToMapmsg->pose.position.z);
map_to_odom = tf::Transform(q_tem, p_tem);
}
void resetOptimization()
{
gtsam::ISAM2Params optParameters;
optParameters.relinearizeThreshold = 0.1;
optParameters.relinearizeSkip = 1;
optimizer = gtsam::ISAM2(optParameters);
gtsam::NonlinearFactorGraph newGraphFactors;
graphFactors = newGraphFactors;
gtsam::Values NewGraphValues;
graphValues = NewGraphValues;
}
void resetParams()
{
lastImuT_imu = -1;
doneFirstOpt = false;
systemInitialized = false;
}
//gc: the odometry computed by the mapoptimization module
void odometryHandler(const nav_msgs::Odometry::ConstPtr& odomMsg)
{
double currentCorrectionTime = ROS_TIME(odomMsg);
// make sure we have imu data to integrate
if (imuQueOpt.empty())
return;
float p_x = odomMsg->pose.pose.position.x;
float p_y = odomMsg->pose.pose.position.y;
float p_z = odomMsg->pose.pose.position.z;
float r_x = odomMsg->pose.pose.orientation.x;
float r_y = odomMsg->pose.pose.orientation.y;
float r_z = odomMsg->pose.pose.orientation.z;
float r_w = odomMsg->pose.pose.orientation.w;
int currentResetId = round(odomMsg->pose.covariance[0]);
gtsam::Pose3 lidarPose = gtsam::Pose3(gtsam::Rot3::Quaternion(r_w, r_x, r_y, r_z), gtsam::Point3(p_x, p_y, p_z));
// correction pose jumped, reset imu pre-integration
if (currentResetId != imuPreintegrationResetId)
{
resetParams();
imuPreintegrationResetId = currentResetId;
return;
}
// 0. initialize system
if (systemInitialized == false)
{
resetOptimization();
// pop old IMU message
while (!imuQueOpt.empty())
{
if (ROS_TIME(&imuQueOpt.front()) < currentCorrectionTime - delta_t)
{
lastImuT_opt = ROS_TIME(&imuQueOpt.front());
imuQueOpt.pop_front();
}
else
break;
}
// initial pose
prevPose_ = lidarPose.compose(lidar2Imu);
// 构造因子
gtsam::PriorFactor<gtsam::Pose3> priorPose(X(0), prevPose_, priorPoseNoise);
//gc: add a factor
graphFactors.add(priorPose);
// 构造因子
// initial velocity
prevVel_ = gtsam::Vector3(0, 0, 0);
gtsam::PriorFactor<gtsam::Vector3> priorVel(V(0), prevVel_, priorVelNoise);
graphFactors.add(priorVel);
// initial bias
// 构造因子
prevBias_ = gtsam::imuBias::ConstantBias();
gtsam::PriorFactor<gtsam::imuBias::ConstantBias> priorBias(B(0), prevBias_, priorBiasNoise);
graphFactors.add(priorBias);
// add values
// gc: insert(key,初始值)
graphValues.insert(X(0), prevPose_);
graphValues.insert(V(0), prevVel_);
graphValues.insert(B(0), prevBias_);
// optimize once
optimizer.update(graphFactors, graphValues);
graphFactors.resize(0);
graphValues.clear();
imuIntegratorImu_->resetIntegrationAndSetBias(prevBias_);
imuIntegratorOpt_->resetIntegrationAndSetBias(prevBias_);
key = 1;
systemInitialized = true;
return;
}
// reset graph for speed
if (key == 100)
{
// get updated noise before reset
gtsam::noiseModel::Gaussian::shared_ptr updatedPoseNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(X(key-1)));
gtsam::noiseModel::Gaussian::shared_ptr updatedVelNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(V(key-1)));
gtsam::noiseModel::Gaussian::shared_ptr updatedBiasNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(B(key-1)));
// reset graph
resetOptimization();
// add pose
gtsam::PriorFactor<gtsam::Pose3> priorPose(X(0), prevPose_, updatedPoseNoise);
graphFactors.add(priorPose);
// add velocity
gtsam::PriorFactor<gtsam::Vector3> priorVel(V(0), prevVel_, updatedVelNoise);
graphFactors.add(priorVel);
// add bias
gtsam::PriorFactor<gtsam::imuBias::ConstantBias> priorBias(B(0), prevBias_, updatedBiasNoise);
graphFactors.add(priorBias);
// add values
graphValues.insert(X(0), prevPose_);
graphValues.insert(V(0), prevVel_);
graphValues.insert(B(0), prevBias_);
// optimize once
optimizer.update(graphFactors, graphValues);
graphFactors.resize(0);
graphValues.clear();
key = 1;
}
// 1. integrate imu data and optimize
while (!imuQueOpt.empty())
{
// pop and integrate imu data that is between two optimizations
sensor_msgs::Imu *thisImu = &imuQueOpt.front();
double imuTime = ROS_TIME(thisImu);
if (imuTime < currentCorrectionTime - delta_t)
{
double dt = (lastImuT_opt < 0) ? (1.0 / 500.0) : (imuTime - lastImuT_opt);
imuIntegratorOpt_->integrateMeasurement(
gtsam::Vector3(thisImu->linear_acceleration.x, thisImu->linear_acceleration.y, thisImu->linear_acceleration.z),
gtsam::Vector3(thisImu->angular_velocity.x, thisImu->angular_velocity.y, thisImu->angular_velocity.z), dt);
lastImuT_opt = imuTime;
imuQueOpt.pop_front();
}
else
break;
}
// add imu factor to graph
const gtsam::PreintegratedImuMeasurements& preint_imu = dynamic_cast<const gtsam::PreintegratedImuMeasurements&>(*imuIntegratorOpt_);
//gc: 状态变量pose 状态变量velo
gitextract_uwlmj5r8/
├── .github/
│ └── stale.yml
├── CMakeLists.txt
├── LICENSE
├── README.md
├── config/
│ ├── doc/
│ │ └── kitti2bag/
│ │ ├── README.md
│ │ └── kitti2bag.py
│ └── params.yaml
├── include/
│ └── utility.h
├── launch/
│ ├── include/
│ │ ├── config/
│ │ │ ├── robot.urdf.xacro
│ │ │ └── rviz.rviz
│ │ ├── module_loam.launch
│ │ ├── module_navsat.launch
│ │ ├── module_relocolize.launch
│ │ ├── module_robot_state_publisher.launch
│ │ ├── module_rviz.launch
│ │ └── rosconsole/
│ │ ├── rosconsole_error.conf
│ │ ├── rosconsole_info.conf
│ │ └── rosconsole_warn.conf
│ ├── run.launch
│ └── run_relocalize.launch
├── msg/
│ └── cloud_info.msg
├── package.xml
└── src/
├── featureExtraction.cpp
├── globalLocalize.cpp
├── imageProjection.cpp
├── imuPreintegration.cpp
└── mapOptmization.cpp
SYMBOL INDEX (136 symbols across 7 files)
FILE: config/doc/kitti2bag/kitti2bag.py
function save_imu_data (line 28) | def save_imu_data(bag, kitti, imu_frame_id, topic):
function save_imu_data_raw (line 47) | def save_imu_data_raw(bag, kitti, imu_frame_id, topic):
function save_dynamic_tf (line 105) | def save_dynamic_tf(bag, kitti, kitti_type, initial_time):
function save_camera_data (line 161) | def save_camera_data(bag, kitti_type, kitti, util, bridge, camera, camer...
function save_velo_data (line 211) | def save_velo_data(bag, kitti, velo_frame_id, topic):
function get_static_transform (line 273) | def get_static_transform(from_frame_id, to_frame_id, transform):
function inv (line 289) | def inv(transform):
function save_static_transforms (line 300) | def save_static_transforms(bag, transforms, timestamps):
function save_gps_fix_data (line 313) | def save_gps_fix_data(bag, kitti, gps_frame_id, topic):
function save_gps_vel_data (line 325) | def save_gps_vel_data(bag, kitti, gps_frame_id, topic):
FILE: include/utility.h
type pcl (line 56) | typedef pcl::PointXYZI PointType;
function class (line 58) | class ParamServer
function pointDistance (line 291) | float pointDistance(PointType p)
function pointDistance (line 297) | float pointDistance(PointType p1, PointType p2)
FILE: src/featureExtraction.cpp
type smoothness_t (line 4) | struct smoothness_t{
type by_value (line 9) | struct by_value{
class FeatureExtraction (line 15) | class FeatureExtraction : public ParamServer
method FeatureExtraction (line 40) | FeatureExtraction()
method initializationValue (line 51) | void initializationValue()
method laserCloudInfoHandler (line 66) | void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr& msgIn)
method calculateSmoothness (line 81) | void calculateSmoothness()
method markOccludedPoints (line 103) | void markOccludedPoints()
method extractFeatures (line 142) | void extractFeatures()
method freeCloudInfoMemory (line 241) | void freeCloudInfoMemory()
method publishFeatureCloud (line 249) | void publishFeatureCloud()
function main (line 262) | int main(int argc, char** argv)
FILE: src/globalLocalize.cpp
type PointXYZIRPYT (line 34) | struct PointXYZIRPYT
class mapOptimization (line 55) | class mapOptimization : public ParamServer
type InitializedFlag (line 180) | enum InitializedFlag
method mapOptimization (line 194) | mapOptimization()
method allocateMemory (line 236) | void allocateMemory()
method resetLIO (line 254) | void resetLIO()
method laserCloudInfoHandler (line 298) | void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr& msgIn)
method gpsHandler (line 376) | void gpsHandler(const nav_msgs::Odometry::ConstPtr& gpsMsg)
method pointAssociateToMap (line 381) | void pointAssociateToMap(PointType const * const pi, PointType * const...
method transformPointCloud (line 389) | pcl::PointCloud<PointType>::Ptr transformPointCloud(pcl::PointCloud<Po...
method pclPointTogtsamPose3 (line 411) | gtsam::Pose3 pclPointTogtsamPose3(PointTypePose thisPoint)
method trans2gtsamPose (line 417) | gtsam::Pose3 trans2gtsamPose(float transformIn[])
method pclPointToAffine3f (line 423) | Eigen::Affine3f pclPointToAffine3f(PointTypePose thisPoint)
method trans2Affine3f (line 428) | Eigen::Affine3f trans2Affine3f(float transformIn[])
method PointTypePose (line 433) | PointTypePose trans2PointTypePose(float transformIn[])
method updateInitialGuess (line 445) | void updateInitialGuess()
method extractForLoopClosure (line 496) | void extractForLoopClosure()
method extractCloud (line 535) | void extractCloud(pcl::PointCloud<PointType>::Ptr cloudToExtract)
method extractSurroundingKeyFrames (line 621) | void extractSurroundingKeyFrames()
method downsampleCurrentScan (line 635) | void downsampleCurrentScan()
method updatePointAssociateToMap (line 649) | void updatePointAssociateToMap()
method cornerOptimization (line 654) | void cornerOptimization()
method surfOptimization (line 749) | void surfOptimization()
method combineOptimizationCoeffs (line 820) | void combineOptimizationCoeffs()
method LMOptimization (line 841) | bool LMOptimization(int iterCount)
method scan2MapOptimization (line 965) | void scan2MapOptimization()
method transformUpdate (line 1000) | void transformUpdate()
method constraintTransformation (line 1032) | float constraintTransformation(float value, float limit)
method saveFrame (line 1042) | bool saveFrame()
method addOdomFactor (line 1063) | void addOdomFactor()
method saveKeyFramesAndFactor (line 1083) | void saveKeyFramesAndFactor()
method updatePath (line 1193) | void updatePath(const PointTypePose& pose_in)
method publishOdometry (line 1210) | void publishOdometry()
method publishFrames (line 1225) | void publishFrames()
method cloudGlobalLoad (line 1284) | void cloudGlobalLoad()
method globalLocalizeThread (line 1298) | void globalLocalizeThread()
method ICPLocalizeInitialize (line 1333) | void ICPLocalizeInitialize()
method ICPscanMatchGlobal (line 1441) | void ICPscanMatchGlobal()
method keyFramesLoad (line 1562) | void keyFramesLoad()
method initialpose_callback (line 1568) | void initialpose_callback(const geometry_msgs::PoseWithCovarianceStamp...
function main (line 1627) | int main(int argc, char** argv)
FILE: src/imageProjection.cpp
type PointXYZIRT (line 6) | struct PointXYZIRT
class ImageProjection (line 40) | class ImageProjection : public ParamServer
method ImageProjection (line 90) | ImageProjection():
method allocateMemory (line 106) | void allocateMemory()
method resetParameters (line 123) | void resetParameters()
method imuHandler (line 145) | void imuHandler(const sensor_msgs::Imu::ConstPtr& imuMsg)
method odometryHandler (line 170) | void odometryHandler(const nav_msgs::Odometry::ConstPtr& odometryMsg)
method cloudHandler (line 176) | void cloudHandler(const sensor_msgs::PointCloud2ConstPtr& laserCloudMsg)
method cachePointCloud (line 193) | bool cachePointCloud(const sensor_msgs::PointCloud2ConstPtr& laserClou...
method deskewInfo (line 259) | bool deskewInfo()
method imuDeskewInfo (line 278) | void imuDeskewInfo()
method odomDeskewInfo (line 338) | void odomDeskewInfo()
method findRotation (line 422) | void findRotation(double pointTime, float *rotXCur, float *rotYCur, fl...
method findPosition (line 449) | void findPosition(double relTime, float *posXCur, float *posYCur, floa...
method PointType (line 465) | PointType deskewPoint(PointType *point, double relTime)
method projectPointCloud (line 497) | void projectPointCloud()
method cloudExtraction (line 550) | void cloudExtraction()
method publishClouds (line 576) | void publishClouds()
function main (line 584) | int main(int argc, char** argv)
FILE: src/imuPreintegration.cpp
class IMUPreintegration (line 23) | class IMUPreintegration : public ParamServer
method IMUPreintegration (line 83) | IMUPreintegration()
method odomToMapPoseHandler (line 113) | void odomToMapPoseHandler(const geometry_msgs::PoseStamped::ConstPtr& ...
method resetOptimization (line 127) | void resetOptimization()
method resetParams (line 141) | void resetParams()
method odometryHandler (line 148) | void odometryHandler(const nav_msgs::Odometry::ConstPtr& odomMsg)
method failureDetection (line 349) | bool failureDetection(const gtsam::Vector3& velCur, const gtsam::imuBi...
method imuHandler (line 369) | void imuHandler(const sensor_msgs::Imu::ConstPtr& imu_raw)
function main (line 452) | int main(int argc, char** argv)
FILE: src/mapOptmization.cpp
type PointXYZIRPYT (line 29) | struct PointXYZIRPYT
class mapOptimization (line 49) | class mapOptimization : public ParamServer
method mapOptimization (line 143) | mapOptimization()
method allocateMemory (line 173) | void allocateMemory()
method laserCloudInfoHandler (line 217) | void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr& msgIn)
method gpsHandler (line 253) | void gpsHandler(const nav_msgs::Odometry::ConstPtr& gpsMsg)
method pointAssociateToMap (line 258) | void pointAssociateToMap(PointType const * const pi, PointType * const...
method transformPointCloud (line 266) | pcl::PointCloud<PointType>::Ptr transformPointCloud(pcl::PointCloud<Po...
method pclPointTogtsamPose3 (line 288) | gtsam::Pose3 pclPointTogtsamPose3(PointTypePose thisPoint)
method trans2gtsamPose (line 294) | gtsam::Pose3 trans2gtsamPose(float transformIn[])
method pclPointToAffine3f (line 300) | Eigen::Affine3f pclPointToAffine3f(PointTypePose thisPoint)
method trans2Affine3f (line 305) | Eigen::Affine3f trans2Affine3f(float transformIn[])
method PointTypePose (line 310) | PointTypePose trans2PointTypePose(float transformIn[])
method visualizeGlobalMapThread (line 337) | void visualizeGlobalMapThread()
method publishGlobalMap (line 384) | void publishGlobalMap()
method loopClosureThread (line 442) | void loopClosureThread()
method detectLoopClosure (line 455) | bool detectLoopClosure(int *latestID, int *closestID)
method performLoopClosure (line 516) | void performLoopClosure()
method updateInitialGuess (line 612) | void updateInitialGuess()
method extractForLoopClosure (line 685) | void extractForLoopClosure()
method extractNearby (line 700) | void extractNearby()
method extractCloud (line 733) | void extractCloud(pcl::PointCloud<PointType>::Ptr cloudToExtract)
method extractSurroundingKeyFrames (line 772) | void extractSurroundingKeyFrames()
method downsampleCurrentScan (line 785) | void downsampleCurrentScan()
method updatePointAssociateToMap (line 799) | void updatePointAssociateToMap()
method cornerOptimization (line 804) | void cornerOptimization()
method surfOptimization (line 899) | void surfOptimization()
method combineOptimizationCoeffs (line 970) | void combineOptimizationCoeffs()
method LMOptimization (line 991) | bool LMOptimization(int iterCount)
method scan2MapOptimization (line 1115) | void scan2MapOptimization()
method transformUpdate (line 1147) | void transformUpdate()
method constraintTransformation (line 1179) | float constraintTransformation(float value, float limit)
method saveFrame (line 1189) | bool saveFrame()
method addOdomFactor (line 1210) | void addOdomFactor()
method addGPSFactor (line 1228) | void addGPSFactor()
method saveKeyFramesAndFactor (line 1310) | void saveKeyFramesAndFactor()
method correctPoses (line 1388) | void correctPoses()
method updatePath (line 1421) | void updatePath(const PointTypePose& pose_in)
method publishOdometry (line 1438) | void publishOdometry()
method publishFrames (line 1453) | void publishFrames()
function main (line 1492) | int main(int argc, char** argv)
Condensed preview — 27 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (280K chars).
[
{
"path": ".github/stale.yml",
"chars": 682,
"preview": "# Number of days of inactivity before an issue becomes stale\ndaysUntilStale: 14\n# Number of days of inactivity before a "
},
{
"path": "CMakeLists.txt",
"chars": 2858,
"preview": "cmake_minimum_required(VERSION 2.8.3)\nproject(lio_sam)\n\nset(CMAKE_BUILD_TYPE \"Release\")\nset(CMAKE_CXX_FLAGS \"-std=c++11\""
},
{
"path": "LICENSE",
"chars": 1512,
"preview": "BSD 3-Clause License\n \nCopyright (c) 2020, Tixiao Shan\nAll rights reserved.\n\nRedistribution and use in source and binary"
},
{
"path": "README.md",
"chars": 14515,
"preview": "A simple version of system that can relocalize in a built map is developed in this repository. The sysytem is bsed on LI"
},
{
"path": "config/doc/kitti2bag/README.md",
"chars": 680,
"preview": "# kitti2bag\n\n## How to run it?\n\n```bash\nwget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0"
},
{
"path": "config/doc/kitti2bag/kitti2bag.py",
"chars": 19929,
"preview": "#!env python\n# -*- coding: utf-8 -*-\n\nimport sys\n\ntry:\n import pykitti\nexcept ImportError as e:\n print('Could not "
},
{
"path": "config/params.yaml",
"chars": 7584,
"preview": "lio_sam:\n\n # Topics\n pointCloudTopic: \"points_raw\" # Point cloud data\n imuTopic: \"imu_correct\" "
},
{
"path": "include/utility.h",
"chars": 10656,
"preview": "#pragma once\n#ifndef _UTILITY_LIDAR_ODOMETRY_H_\n#define _UTILITY_LIDAR_ODOMETRY_H_\n\n#include <ros/ros.h>\n\n#include <std_"
},
{
"path": "launch/include/config/robot.urdf.xacro",
"chars": 1611,
"preview": "<?xml version=\"1.0\"?>\n<robot name=\"roboat\" xmlns:xacro=\"http://roboat.org\">\n <xacro:property name=\"PI\" value=\"3.1415926"
},
{
"path": "launch/include/config/rviz.rviz",
"chars": 18708,
"preview": "Panels:\n - Class: rviz/Displays\n Help Height: 70\n Name: Displays\n Property Tree Widget:\n Expanded:\n "
},
{
"path": "launch/include/module_loam.launch",
"chars": 682,
"preview": "<launch>\n\n <arg name=\"project\" default=\"lio_sam\"/>\n \n <node pkg=\"$(arg project)\" type=\"$(arg project)_imuPreint"
},
{
"path": "launch/include/module_navsat.launch",
"chars": 765,
"preview": "<launch>\n\n <arg name=\"project\" default=\"lio_sam\"/>\n\n <env name=\"ROSCONSOLE_CONFIG_FILE\" value=\"$(find lio_sam)/lau"
},
{
"path": "launch/include/module_relocolize.launch",
"chars": 683,
"preview": "<launch>\n\n <arg name=\"project\" default=\"lio_sam\"/>\n \n <node pkg=\"$(arg project)\" type=\"$(arg project)_imuPreint"
},
{
"path": "launch/include/module_robot_state_publisher.launch",
"chars": 394,
"preview": "<launch>\n\n\t<arg name=\"project\" default=\"lio_sam\"/>\n\n <param name=\"robot_description\" command=\"$(find xacro)/xacro $(f"
},
{
"path": "launch/include/module_rviz.launch",
"chars": 209,
"preview": "<launch>\n\n <arg name=\"project\" default=\"lio_sam\"/>\n\n <!--- Run Rviz-->\n <node pkg=\"rviz\" type=\"rviz\" name=\"$(ar"
},
{
"path": "launch/include/rosconsole/rosconsole_error.conf",
"chars": 75,
"preview": "# Set the default ros output to warning and higher\nlog4j.logger.ros = ERROR"
},
{
"path": "launch/include/rosconsole/rosconsole_info.conf",
"chars": 74,
"preview": "# Set the default ros output to warning and higher\nlog4j.logger.ros = INFO"
},
{
"path": "launch/include/rosconsole/rosconsole_warn.conf",
"chars": 74,
"preview": "# Set the default ros output to warning and higher\nlog4j.logger.ros = WARN"
},
{
"path": "launch/run.launch",
"chars": 578,
"preview": "<launch>\n\n <arg name=\"project\" default=\"lio_sam\"/>\n \n <!-- Parameters -->\n <rosparam file=\"$(find lio_sam)/c"
},
{
"path": "launch/run_relocalize.launch",
"chars": 584,
"preview": "<launch>\n\n <arg name=\"project\" default=\"lio_sam\"/>\n \n <!-- Parameters -->\n <rosparam file=\"$(find lio_sam)/c"
},
{
"path": "msg/cloud_info.msg",
"chars": 768,
"preview": "# Cloud Info\nHeader header \n\nint32[] startRingIndex\nint32[] endRingIndex\n\nint32[] pointColInd # point column index in r"
},
{
"path": "package.xml",
"chars": 1265,
"preview": "<?xml version=\"1.0\"?>\n<package>\n <name>lio_sam</name>\n <version>1.0.0</version>\n <description>Lidar Odometry</descrip"
},
{
"path": "src/featureExtraction.cpp",
"chars": 10003,
"preview": "#include \"utility.h\"\n#include \"lio_sam/cloud_info.h\"\n\nstruct smoothness_t{ \n float value;\n size_t ind;\n};\n\nstruct "
},
{
"path": "src/globalLocalize.cpp",
"chars": 69820,
"preview": "//\n// Created by gc2 on 20-7-20.\n//\n\n#include \"utility.h\"\n#include \"lio_sam/cloud_info.h\"\n\n#include <gtsam/geometry/Rot3"
},
{
"path": "src/imageProjection.cpp",
"chars": 20143,
"preview": "#include \"utility.h\"\n#include \"lio_sam/cloud_info.h\"\n\n// Velodyne\n//PCL自定义点类型\nstruct PointXYZIRT\n{\n PCL_ADD_POINT4D\n "
},
{
"path": "src/imuPreintegration.cpp",
"chars": 20056,
"preview": "#include \"utility.h\"\n\n#include <gtsam/geometry/Rot3.h>\n#include <gtsam/geometry/Pose3.h>\n#include <gtsam/slam/PriorFacto"
},
{
"path": "src/mapOptmization.cpp",
"chars": 65697,
"preview": "#include \"utility.h\"\n#include \"lio_sam/cloud_info.h\"\n\n#include <gtsam/geometry/Rot3.h>\n#include <gtsam/geometry/Pose3.h>"
}
]
About this extraction
This page contains the full source code of the Gaochao-hit/LIO-SAM_based_relocalization GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 27 files (264.3 KB), approximately 72.4k tokens, and a symbol index with 136 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.
Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.