[
  {
    "path": ".github/stale.yml",
    "content": "# Number of days of inactivity before an issue becomes stale\ndaysUntilStale: 14\n# Number of days of inactivity before a stale issue is closed\ndaysUntilClose: 1\n# Issues with these labels will never be considered stale\nexemptLabels:\n  - pinned\n  - security\n# Label to use when marking an issue as stale\nstaleLabel: stale\n# Comment to post when marking an issue as stale. Set to `false` to disable\nmarkComment: >\n  This issue has been automatically marked as stale because it has not had\n  recent activity. It will be closed if no further activity occurs. Thank you\n  for your contributions.\n# Comment to post when closing a stale issue. Set to `false` to disable\ncloseComment: false\n"
  },
  {
    "path": "CMakeLists.txt",
    "content": "cmake_minimum_required(VERSION 2.8.3)\nproject(lio_sam)\n\nset(CMAKE_BUILD_TYPE \"Release\")\nset(CMAKE_CXX_FLAGS \"-std=c++11\")\nset(CMAKE_CXX_FLAGS_RELEASE \"-O3 -Wall -g -pthread\")\n\nfind_package(catkin REQUIRED COMPONENTS\n  tf\n  roscpp\n  rospy\n  cv_bridge\n  # pcl library\n  pcl_conversions\n  # msgs\n  std_msgs\n  sensor_msgs\n  geometry_msgs\n  nav_msgs\n  message_generation\n)\n\nfind_package(OpenMP REQUIRED)\nfind_package(PCL REQUIRED QUIET)\nfind_package(OpenCV REQUIRED QUIET)\nfind_package(GTSAM REQUIRED QUIET)\n\nadd_message_files(\n  DIRECTORY msg\n  FILES\n  cloud_info.msg\n)\n\ngenerate_messages(\n  DEPENDENCIES\n  geometry_msgs\n  std_msgs\n  nav_msgs\n  sensor_msgs\n)\n\ncatkin_package(\n  INCLUDE_DIRS include\n  DEPENDS PCL GTSAM\n\n  CATKIN_DEPENDS \n  std_msgs\n  nav_msgs\n  geometry_msgs\n  sensor_msgs\n  message_runtime \n  message_generation\n)\n\n# include directories\ninclude_directories(\n\tinclude\n\t${catkin_INCLUDE_DIRS}\n\t${PCL_INCLUDE_DIRS}\n  ${OpenCV_INCLUDE_DIRS}\n\t${GTSAM_INCLUDE_DIR}\n)\n\n# link directories\nlink_directories(\n\tinclude\n\t${PCL_LIBRARY_DIRS}\n  ${OpenCV_LIBRARY_DIRS}\n  ${GTSAM_LIBRARY_DIRS}\n)\n\n###########\n## Build ##\n###########\n\n# Range Image Projection\nadd_executable(${PROJECT_NAME}_imageProjection src/imageProjection.cpp)\nadd_dependencies(${PROJECT_NAME}_imageProjection ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)\ntarget_link_libraries(${PROJECT_NAME}_imageProjection ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES})\n\n# Feature Association\nadd_executable(${PROJECT_NAME}_featureExtraction src/featureExtraction.cpp)\nadd_dependencies(${PROJECT_NAME}_featureExtraction ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)\ntarget_link_libraries(${PROJECT_NAME}_featureExtraction ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES})\n\n# Mapping Optimization\nadd_executable(${PROJECT_NAME}_mapOptmization src/mapOptmization.cpp)\nadd_dependencies(${PROJECT_NAME}_mapOptmization ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)\ntarget_compile_options(${PROJECT_NAME}_mapOptmization PRIVATE ${OpenMP_CXX_FLAGS})\ntarget_link_libraries(${PROJECT_NAME}_mapOptmization ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} ${OpenMP_CXX_FLAGS} gtsam)\n\n# IMU Preintegration\nadd_executable(${PROJECT_NAME}_imuPreintegration src/imuPreintegration.cpp)\ntarget_link_libraries(${PROJECT_NAME}_imuPreintegration ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} gtsam)\n\n#Global Localization\nadd_executable(${PROJECT_NAME}_globalLocalize src/globalLocalize.cpp)\nadd_dependencies(${PROJECT_NAME}_globalLocalize ${catkin_EXPORTED_TARGETS} ${PROJECT_NAME}_generate_messages_cpp)\ntarget_compile_options(${PROJECT_NAME}_globalLocalize PRIVATE ${OpenMP_CXX_FLAGS})\ntarget_link_libraries(${PROJECT_NAME}_globalLocalize ${catkin_LIBRARIES} ${PCL_LIBRARIES} ${OpenCV_LIBRARIES} ${OpenMP_CXX_FLAGS} gtsam)\n"
  },
  {
    "path": "LICENSE",
    "content": "BSD 3-Clause License\n \nCopyright (c) 2020, Tixiao Shan\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n\n* Redistributions of source code must retain the above copyright notice, this\n  list of conditions and the following disclaimer.\n\n* Redistributions in binary form must reproduce the above copyright notice,\n  this list of conditions and the following disclaimer in the documentation\n  and/or other materials provided with the distribution.\n\n* Neither the name of the copyright holder nor the names of its\n  contributors may be used to endorse or promote products derived from\n  this software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"\nAND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\nIMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\nFOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\nDAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\nSERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\nCAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\nOR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
  },
  {
    "path": "README.md",
    "content": "A simple version of system that can relocalize in a built map is developed in this repository. The sysytem is bsed on LIO_SAM.\nThe repository is developed based on the origional version of LIO-SAM in which the GPS is not fused.\n## Run the package\n1. Make sure the map should be saved in the right folder:\n```\nFirstly, you need to run LIO-SAM, and then save the map in the default folder\n```\n\n2. Run the launch file:\n```\nroslaunch lio_sam run_relocalize.launch\n```\n\n3. Play existing bag files:\n```\nrosbag play your-bag.bag\n```\n\n -A video of the demonstration of the method can be found on [YouTube](https://youtu.be/PRsH8SpuSIc)\n ## Notes\n\n  - **Initialization:** During the initialization stage, had better keep the robot still. Or if you play bags, fistly play the bag for about 0.5s, and then pause the bag until the initialization succeed. The initialization method requres you to give it initial guesses by hand on the the Rviz.\n\n******************************************************************************************************************************************************************\n# LIO-SAM\n\n**A real-time lidar-inertial odometry package. We strongly recommend the users read this document thoroughly and test the package with the provided dataset first. A video of the demonstration of the method can be found on [YouTube](https://www.youtube.com/watch?v=A0H8CoORZJU).**\n\n<p align='center'>\n    <img src=\"./config/doc/demo.gif\" alt=\"drawing\" width=\"800\"/>\n</p>\n\n<p align='center'>\n    <img src=\"./config/doc/device-hand-2.png\" alt=\"drawing\" width=\"200\"/>\n    <img src=\"./config/doc/device-hand.png\" alt=\"drawing\" width=\"200\"/>\n    <img src=\"./config/doc/device-jackal.png\" alt=\"drawing\" width=\"200\"/>\n    <img src=\"./config/doc/device-boat.png\" alt=\"drawing\" width=\"200\"/>\n</p>\n\n## Menu\n\n  - [**System architecture**](#system-architecture)\n\n  - [**Package dependency**](#dependency)\n\n  - [**Package install**](#install)\n\n  - [**Prepare lidar data**](#prepare-lidar-data) (must read)\n\n  - [**Prepare IMU data**](#prepare-imu-data) (must read)\n\n  - [**Sample datasets**](#sample-datasets)\n\n  - [**Run the package**](#run-the-package)\n\n  - [**Other notes**](#other-notes)\n\n  - [**Paper**](#paper)\n\n  - [**TODO**](#todo)\n\n  - [**Acknowledgement**](#acknowledgement)\n\n## System architecture\n\n<p align='center'>\n    <img src=\"./config/doc/system.png\" alt=\"drawing\" width=\"800\"/>\n</p>\n\nWe design a system that maintains two graphs and runs up to 10x faster than real-time. \n  - The factor graph in \"mapOptimization.cpp\" optimizes lidar odometry factor and GPS factor. This factor graph is maintained consistently throughout the whole test. \n  - The factor graph in \"imuPreintegration.cpp\" optimizes IMU and lidar odometry factor and estimates IMU bias. This factor graph is reset periodically and guarantees real-time odometry estimation at IMU frequency.\n\n## Dependency\n\n- [ROS](http://wiki.ros.org/ROS/Installation) (tested with Kinetic and Melodic)\n  ```\n  sudo apt-get install -y ros-kinetic-navigation\n  sudo apt-get install -y ros-kinetic-robot-localization\n  sudo apt-get install -y ros-kinetic-robot-state-publisher\n  ```\n- [gtsam](https://github.com/borglab/gtsam/releases) (Georgia Tech Smoothing and Mapping library)\n  ```\n  wget -O ~/Downloads/gtsam.zip https://github.com/borglab/gtsam/archive/4.0.2.zip\n  cd ~/Downloads/ && unzip gtsam.zip -d ~/Downloads/\n  cd ~/Downloads/gtsam-4.0.2/\n  mkdir build && cd build\n  cmake -DGTSAM_BUILD_WITH_MARCH_NATIVE=OFF ..\n  sudo make install -j8\n  ```\n\n## Install\n\nUse the following commands to download and compile the package.\n\n```\ncd ~/catkin_ws/src\ngit clone https://github.com/TixiaoShan/LIO-SAM.git\ncd ..\ncatkin_make\n```\n\n## Prepare lidar data\n\nThe user needs to prepare the point cloud data in the correct format for cloud deskewing, which is mainly done in \"imageProjection.cpp\". The two requirements are:\n  - **Provide point time stamp**. LIO-SAM uses IMU data to perform point cloud deskew. Thus, the relative point time in a scan needs to be known. The up-to-date Velodyne ROS driver should output this information directly. Here, we assume the point time channel is called \"time.\" The definition of the point type is located at the top of the \"imageProjection.cpp.\" \"deskewPoint()\" function utilizes this relative time to obtain the transformation of this point relative to the beginning of the scan. When the lidar rotates at 10Hz, the timestamp of a point should vary between 0 and 0.1 seconds. If you are using other lidar sensors, you may need to change the name of this time channel and make sure that it is the relative time in a scan.\n  - **Provide point ring number**. LIO-SAM uses this information to organize the point correctly in a matrix. The ring number indicates which channel of the sensor that this point belongs to. The definition of the point type is located at the top of \"imageProjection.cpp.\" The up-to-date Velodyne ROS driver should output this information directly. Again, if you are using other lidar sensors, you may need to rename this information. Note that only mechanical lidars are supported by the package currently. \n\n## Prepare IMU data\n\n  - **IMU requirement**. Like the original LOAM implementation, LIO-SAM only works with a 9-axis IMU, which gives roll, pitch, and yaw estimation. The roll and pitch estimation is mainly used to initialize the system at the correct attitude. The yaw estimation initializes the system at the right heading when using GPS data. Theoretically, an initialization procedure like VINS-Mono will enable LIO-SAM to work with a 6-axis IMU. The performance of the system largely depends on the quality of the IMU measurements. The higher the IMU data rate, the better the system accuracy. We use Microstrain 3DM-GX5-25, which outputs data at 500Hz. We recommend using an IMU that gives at least a 200Hz output rate. Note that the internal IMU of Ouster lidar is not usable due to high vibration.\n\n  - **IMU alignment**. LIO-SAM transforms IMU raw data from the IMU frame to the Lidar frame, which follows the ROS REP-105 convention (x - forward, y - left, z - upward). To make the system function properly, the correct extrinsic transformation needs to be provided in \"params.yaml\" file. Using our setup as an example:\n    - we need to set the readings of x-z acceleration and gyro negative to transform the IMU data in the lidar frame, which is indicated by \"extrinsicRot\" in \"params.yaml.\" \n    - The transformation of attitude readings is slightly different. We rotate the attitude measurements by -90 degrees around \"lidar-z\" axis and get the corresponding roll, pitch, and yaw readings in the lidar frame. This transformation is indicated by \"extrinsicRPY\" in \"params.yaml.\"\n\n  - **IMU debug**. It's strongly recommended that the user uncomment the debug lines in \"imuHandler()\" of \"imageProjection.cpp\" and test the output of the transformed IMU data. The user can rotate the sensor suite to check whether the readings correspond to the sensor's movement.\n\n<p align='center'>\n    <img src=\"./config/doc/imu-transform.png\" alt=\"drawing\" width=\"800\"/>\n</p>\n\n## Sample datasets\n\n  * Download some sample datasets to test the functionality of the package. The datasets below is configured to run using the default settings:\n    - [**Walking dataset**](https://drive.google.com/file/d/1HN5fYPXEHbDq0E5JtbQPkCHIHUoTFFnN/view?usp=sharing)\n    - [**Garden dataset**](https://drive.google.com/file/d/1q6yuVhyJbkUBhut9yhfox2WdV4VZ9BZX/view?usp=sharing)\n    - [**Park dataset**](https://drive.google.com/file/d/19PZieaJaVkXDs2ZromaHTxYoq0zkiHae/view?usp=sharing)\n\n  * The datasets below need the parameters to be configured. In these datasets, the point cloud topic is \"points_raw.\" The IMU topic is \"imu_correct,\" which gives the IMU data in ROS REP105 standard. Because no IMU transformation is needed for this dataset, the following configurations need to be changed to run this dataset successfully:\n    - The \"imuTopic\" parameter in \"config/params.yaml\" needs to be set to \"imu_correct\".\n    - The \"extrinsicRot\" and \"extrinsicRPY\" in \"config/params.yaml\" needs to be set as identity matrices.\n      - [**Rotation dataset**](https://drive.google.com/file/d/1V4ijY4PgLdjKmdzcQ18Xu7VdcHo2UaWI/view?usp=sharing)\n      - [**Campus dataset (large)**](https://drive.google.com/file/d/1q4Sf7s2veVc7bs08Qeha3stOiwsytopL/view?usp=sharing)\n      - [**Campus dataset (small)**](https://drive.google.com/file/d/1_V-cFMTQ4RO-_16mU9YPUE8ozsPeddCv/view?usp=sharing)\n\n  * Ouster (OS1-128) dataset. No extrinsics need to be changed for this dataset if you are using the default settings. Please follow the Ouster notes below to configure the package to run with Ouster data. A video of the dataset can be found on [YouTube](https://youtu.be/O7fKgZQzkEo):\n    - [**Rooftop dataset**](https://drive.google.com/file/d/1Qy2rZdPudFhDbATPpblioBb8fRtjDGQj/view?usp=sharing)\n\n  * KITTI dataset. The extrinsics can be found in the Notes KITTI section below. To generate more bags using other KITTI raw data, you can use the python script provided in \"config/doc/kitti2bag\".\n    - [**2011_09_30_drive_0028**](https://drive.google.com/file/d/12h3ooRAZVTjoMrf3uv1_KriEXm33kHc7/view?usp=sharing)\n\n## Run the package\n\n1. Run the launch file:\n```\nroslaunch lio_sam run.launch\n```\n\n2. Play existing bag files:\n```\nrosbag play your-bag.bag -r 3\n```\n\n## Other notes\n\n  - **Loop closure:** The loop function here gives an example of proof of concept. It is directly adapted from LeGO-LOAM loop closure. For more advanced loop closure implementation, please refer to [ScanContext](https://github.com/irapkaist/SC-LeGO-LOAM). Set the \"loopClosureEnableFlag\" in \"params.yaml\" to \"true\" to test the loop closure function. In Rviz, uncheck \"Map (cloud)\" and check \"Map (global)\". This is because the visualized map - \"Map (cloud)\" - is simply a stack of point clouds in Rviz. Their postion will not be updated after pose correction. The loop closure function here is simply adapted from LeGO-LOAM, which is an ICP-based method. Because ICP runs pretty slow, it is suggested that the playback speed is set to be \"-r 1\". You can try the Campus dataset (large) for testing. The loop closure happens when the sensor returns back to the original starting location.\n\n<p align='center'>\n    <img src=\"./config/doc/loop-closure.gif\" alt=\"drawing\" width=\"400\"/>\n</p>\n\n  - **Using GPS:** The park dataset is provided for testing LIO-SAM with GPS data. This dataset is gathered by [Yewei Huang](https://robustfieldautonomylab.github.io/people.html). To enable the GPS function, change \"gpsTopic\" in \"params.yaml\" to \"odometry/gps\". In Rviz, uncheck \"Map (cloud)\" and check \"Map (global)\". Also check \"Odom GPS\", which visualizes the GPS odometry. \"gpsCovThreshold\" can be adjusted to filter bad GPS readings. \"poseCovThreshold\" can be used to adjust the frequency of adding GPS factor to the graph. For example, you will notice the trajectory is constantly corrected by GPS whey you set \"poseCovThreshold\" to 1.0. Because of the heavy iSAM optimization, it's recommended that the playback speed is \"-r 1\".\n\n<p align='center'>\n    <img src=\"./config/doc/gps-demo.gif\" alt=\"drawing\" width=\"400\"/>\n</p>\n\n  - **KITTI:** Since LIO-SAM needs a high-frequency IMU for function properly, we need to use KITTI raw data for testing. One problem remains unsolved is that the intrinsics of the IMU are unknown, which has a big impact on the accuracy of LIO-SAM. Download the provided sample data and make the following changes in \"params.yaml\":\n    - extrinsicTrans: [-8.086759e-01, 3.195559e-01, -7.997231e-01] \n    - extrinsicRot: [9.999976e-01, 7.553071e-04, -2.035826e-03, -7.854027e-04, 9.998898e-01, -1.482298e-02, 2.024406e-03, 1.482454e-02, 9.998881e-01]\n    - extrinsicRPY: [9.999976e-01, 7.553071e-04, -2.035826e-03, -7.854027e-04, 9.998898e-01, -1.482298e-02, 2.024406e-03, 1.482454e-02, 9.998881e-01]\n    - N_SCAN: 64\n    - downsampleRate: 2 or 4\n    - loopClosureEnableFlag: true or false\n\n<p align='center'>\n    <img src=\"./config/doc/kitti-map.png\" alt=\"drawing\" width=\"300\"/>\n    <img src=\"./config/doc/kitti-demo.gif\" alt=\"drawing\" width=\"300\"/>\n</p>\n\n  - **Ouster lidar:** To make LIO-SAM work with Ouster lidar, some preparations needs to be done on hardware and software level.\n    - Hardware:\n      - Use an external IMU. LIO-SAM does not work with the internal 6-axis IMU of Ouster lidar. You need to attach a 9-axis IMU to the lidar and perform data-gathering.\n      - Configure the driver. Change \"timestamp_mode\" in your Ouster launch file to \"TIME_FROM_PTP_1588\" so you can have ROS format timestamp for the point clouds.\n    - Software:\n      - Change \"timeField\" in \"params.yaml\" to \"t\". \"t\" is the point timestamp in a scan for Ouster lidar.\n      - Change \"N_SCAN\" and \"Horizon_SCAN\" in \"params.yaml\" according to your lidar, i.e., N_SCAN=128, Horizon_SCAN=1024.\n      - Comment the point definition for Velodyne on top of \"imageProjection.cpp\".\n      - Uncomment the point definition for Ouster on top of \"imageProjection.cpp\".\n      - Comment line \"deskewPoint(&thisPoint, laserCloudIn->points[i].time)\" in \"imageProjection.cpp\".\n      - Uncomment line \"deskewPoint(&thisPoint, (float)laserCloudIn->points[i].t / 1000000000.0\" in \"imageProjection.cpp\".\n      - Run \"catkin_make\" to re-compile the package.\n\n<p align='center'>\n    <img src=\"./config/doc/ouster-device.jpg\" alt=\"drawing\" width=\"300\"/>\n    <img src=\"./config/doc/ouster-demo.gif\" alt=\"drawing\" width=\"300\"/>\n</p>\n\n## Paper \n\nThank you for citing [LIO-SAM (IROS-2020)](./config/doc/paper.pdf) if you use any of this code. \n```\n@inproceedings{liosam2020shan,\n  title={LIO-SAM: Tightly-coupled Lidar Inertial Odometry via Smoothing and Mapping},\n  author={Shan, Tixiao and Englot, Brendan and Meyers, Drew and Wang, Wei and Ratti, Carlo and Rus Daniela},\n  journal={arXiv preprint arXiv:2007.00258}\n  year={2020}\n}\n```\n\nPart of the code is adapted from [LeGO-LOAM](https://github.com/RobustFieldAutonomyLab/LeGO-LOAM).\n```\n@inproceedings{legoloam2018shan,\n  title={LeGO-LOAM: Lightweight and Ground-Optimized Lidar Odometry and Mapping on Variable Terrain},\n  author={Shan, Tixiao and Englot, Brendan},\n  booktitle={IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},\n  pages={4758-4765},\n  year={2018},\n  organization={IEEE}\n}\n```\n\n## TODO\n\n  - [ ] Add loop closure visualization and fix potential bug\n\n## Acknowledgement\n\n  - LIO-SAM is based on LOAM (J. Zhang and S. Singh. LOAM: Lidar Odometry and Mapping in Real-time).\n"
  },
  {
    "path": "config/doc/kitti2bag/README.md",
    "content": "# kitti2bag\n\n## How to run it?\n\n```bash\nwget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0084/2011_09_26_drive_0084_sync.zip\nwget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_drive_0084/2011_09_26_drive_0084_extract.zip\nwget https://s3.eu-central-1.amazonaws.com/avg-kitti/raw_data/2011_09_26_calib.zip\nunzip 2011_09_26_drive_0084_sync.zip\nunzip 2011_09_26_drive_0084_extract.zip\nunzip 2011_09_26_calib.zip\npython kitti2bag.py -t 2011_09_26 -r 0084 raw_synced .\n```\n\nThat's it. You have a bag that contains your data.\n\nOther source files can be found at [KITTI raw data](http://www.cvlibs.net/datasets/kitti/raw_data.php) page."
  },
  {
    "path": "config/doc/kitti2bag/kitti2bag.py",
    "content": "#!env python\n# -*- coding: utf-8 -*-\n\nimport sys\n\ntry:\n    import pykitti\nexcept ImportError as e:\n    print('Could not load module \\'pykitti\\'. Please run `pip install pykitti`')\n    sys.exit(1)\n\nimport tf\nimport os\nimport cv2\nimport rospy\nimport rosbag\nimport progressbar\nfrom tf2_msgs.msg import TFMessage\nfrom datetime import datetime\nfrom std_msgs.msg import Header\nfrom sensor_msgs.msg import CameraInfo, Imu, PointField, NavSatFix\nimport sensor_msgs.point_cloud2 as pcl2\nfrom geometry_msgs.msg import TransformStamped, TwistStamped, Transform\nfrom cv_bridge import CvBridge\nimport numpy as np\nimport argparse\n\ndef save_imu_data(bag, kitti, imu_frame_id, topic):\n    print(\"Exporting IMU\")\n    for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):\n        q = tf.transformations.quaternion_from_euler(oxts.packet.roll, oxts.packet.pitch, oxts.packet.yaw)\n        imu = Imu()\n        imu.header.frame_id = imu_frame_id\n        imu.header.stamp = rospy.Time.from_sec(float(timestamp.strftime(\"%s.%f\")))\n        imu.orientation.x = q[0]\n        imu.orientation.y = q[1]\n        imu.orientation.z = q[2]\n        imu.orientation.w = q[3]\n        imu.linear_acceleration.x = oxts.packet.af\n        imu.linear_acceleration.y = oxts.packet.al\n        imu.linear_acceleration.z = oxts.packet.au\n        imu.angular_velocity.x = oxts.packet.wf\n        imu.angular_velocity.y = oxts.packet.wl\n        imu.angular_velocity.z = oxts.packet.wu\n        bag.write(topic, imu, t=imu.header.stamp)\n\ndef save_imu_data_raw(bag, kitti, imu_frame_id, topic):\n    print(\"Exporting IMU Raw\")\n    synced_path = kitti.data_path\n    unsynced_path = synced_path.replace('sync', 'extract')\n    imu_path = os.path.join(unsynced_path, 'oxts')\n\n    # read time stamp (convert to ros seconds format)\n    with open(os.path.join(imu_path, 'timestamps.txt')) as f:\n        lines = f.readlines()\n        imu_datetimes = []\n        for line in lines:\n            if len(line) == 1:\n                continue\n            timestamp = datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f')\n            imu_datetimes.append(float(timestamp.strftime(\"%s.%f\")))\n\n    # fix imu time using a linear model (may not be ideal, ^_^)\n    imu_index = np.asarray(range(len(imu_datetimes)), dtype=np.float64)\n    z = np.polyfit(imu_index, imu_datetimes, 1)\n    imu_datetimes_new = z[0] * imu_index + z[1]\n    imu_datetimes = imu_datetimes_new.tolist()\n\n    # get all imu data\n    imu_data_dir = os.path.join(imu_path, 'data')\n    imu_filenames = sorted(os.listdir(imu_data_dir))\n    imu_data = [None] * len(imu_filenames)\n    for i, imu_file in enumerate(imu_filenames):\n        imu_data_file = open(os.path.join(imu_data_dir, imu_file), \"r\")\n        for line in imu_data_file:\n            if len(line) == 1:\n                continue\n            stripped_line = line.strip()\n            line_list = stripped_line.split()\n            imu_data[i] = line_list\n\n    assert len(imu_datetimes) == len(imu_data)\n    \n    for timestamp, data in zip(imu_datetimes, imu_data):\n        roll, pitch, yaw = float(data[3]), float(data[4]), float(data[5]), \n        q = tf.transformations.quaternion_from_euler(roll, pitch, yaw)\n        imu = Imu()\n        imu.header.frame_id = imu_frame_id\n        imu.header.stamp = rospy.Time.from_sec(timestamp)\n        imu.orientation.x = q[0]\n        imu.orientation.y = q[1]\n        imu.orientation.z = q[2]\n        imu.orientation.w = q[3]\n        imu.linear_acceleration.x = float(data[11])\n        imu.linear_acceleration.y = float(data[12])\n        imu.linear_acceleration.z = float(data[13])\n        imu.angular_velocity.x = float(data[17])\n        imu.angular_velocity.y = float(data[18])\n        imu.angular_velocity.z = float(data[19])\n        bag.write(topic, imu, t=imu.header.stamp)\n\n        imu.header.frame_id = 'imu_enu_link'\n        bag.write('/imu_correct', imu, t=imu.header.stamp) # for LIO-SAM GPS\n\ndef save_dynamic_tf(bag, kitti, kitti_type, initial_time):\n    print(\"Exporting time dependent transformations\")\n    if kitti_type.find(\"raw\") != -1:\n        for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):\n            tf_oxts_msg = TFMessage()\n            tf_oxts_transform = TransformStamped()\n            tf_oxts_transform.header.stamp = rospy.Time.from_sec(float(timestamp.strftime(\"%s.%f\")))\n            tf_oxts_transform.header.frame_id = 'world'\n            tf_oxts_transform.child_frame_id = 'base_link'\n\n            transform = (oxts.T_w_imu)\n            t = transform[0:3, 3]\n            q = tf.transformations.quaternion_from_matrix(transform)\n            oxts_tf = Transform()\n\n            oxts_tf.translation.x = t[0]\n            oxts_tf.translation.y = t[1]\n            oxts_tf.translation.z = t[2]\n\n            oxts_tf.rotation.x = q[0]\n            oxts_tf.rotation.y = q[1]\n            oxts_tf.rotation.z = q[2]\n            oxts_tf.rotation.w = q[3]\n\n            tf_oxts_transform.transform = oxts_tf\n            tf_oxts_msg.transforms.append(tf_oxts_transform)\n\n            bag.write('/tf', tf_oxts_msg, tf_oxts_msg.transforms[0].header.stamp)\n\n    elif kitti_type.find(\"odom\") != -1:\n        timestamps = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps)\n        for timestamp, tf_matrix in zip(timestamps, kitti.T_w_cam0):\n            tf_msg = TFMessage()\n            tf_stamped = TransformStamped()\n            tf_stamped.header.stamp = rospy.Time.from_sec(timestamp)\n            tf_stamped.header.frame_id = 'world'\n            tf_stamped.child_frame_id = 'camera_left'\n            \n            t = tf_matrix[0:3, 3]\n            q = tf.transformations.quaternion_from_matrix(tf_matrix)\n            transform = Transform()\n\n            transform.translation.x = t[0]\n            transform.translation.y = t[1]\n            transform.translation.z = t[2]\n\n            transform.rotation.x = q[0]\n            transform.rotation.y = q[1]\n            transform.rotation.z = q[2]\n            transform.rotation.w = q[3]\n\n            tf_stamped.transform = transform\n            tf_msg.transforms.append(tf_stamped)\n\n            bag.write('/tf', tf_msg, tf_msg.transforms[0].header.stamp)\n\ndef save_camera_data(bag, kitti_type, kitti, util, bridge, camera, camera_frame_id, topic, initial_time):\n    print(\"Exporting camera {}\".format(camera))\n    if kitti_type.find(\"raw\") != -1:\n        camera_pad = '{0:02d}'.format(camera)\n        image_dir = os.path.join(kitti.data_path, 'image_{}'.format(camera_pad))\n        image_path = os.path.join(image_dir, 'data')\n        image_filenames = sorted(os.listdir(image_path))\n        with open(os.path.join(image_dir, 'timestamps.txt')) as f:\n            image_datetimes = map(lambda x: datetime.strptime(x[:-4], '%Y-%m-%d %H:%M:%S.%f'), f.readlines())\n        \n        calib = CameraInfo()\n        calib.header.frame_id = camera_frame_id\n        calib.width, calib.height = tuple(util['S_rect_{}'.format(camera_pad)].tolist())\n        calib.distortion_model = 'plumb_bob'\n        calib.K = util['K_{}'.format(camera_pad)]\n        calib.R = util['R_rect_{}'.format(camera_pad)]\n        calib.D = util['D_{}'.format(camera_pad)]\n        calib.P = util['P_rect_{}'.format(camera_pad)]\n            \n    elif kitti_type.find(\"odom\") != -1:\n        camera_pad = '{0:01d}'.format(camera)\n        image_path = os.path.join(kitti.sequence_path, 'image_{}'.format(camera_pad))\n        image_filenames = sorted(os.listdir(image_path))\n        image_datetimes = map(lambda x: initial_time + x.total_seconds(), kitti.timestamps)\n        \n        calib = CameraInfo()\n        calib.header.frame_id = camera_frame_id\n        calib.P = util['P{}'.format(camera_pad)]\n    \n    iterable = zip(image_datetimes, image_filenames)\n    bar = progressbar.ProgressBar()\n    for dt, filename in bar(iterable):\n        image_filename = os.path.join(image_path, filename)\n        cv_image = cv2.imread(image_filename)\n        calib.height, calib.width = cv_image.shape[:2]\n        if camera in (0, 1):\n            cv_image = cv2.cvtColor(cv_image, cv2.COLOR_BGR2GRAY)\n        encoding = \"mono8\" if camera in (0, 1) else \"bgr8\"\n        image_message = bridge.cv2_to_imgmsg(cv_image, encoding=encoding)\n        image_message.header.frame_id = camera_frame_id\n        if kitti_type.find(\"raw\") != -1:\n            image_message.header.stamp = rospy.Time.from_sec(float(datetime.strftime(dt, \"%s.%f\")))\n            topic_ext = \"/image_raw\"\n        elif kitti_type.find(\"odom\") != -1:\n            image_message.header.stamp = rospy.Time.from_sec(dt)\n            topic_ext = \"/image_rect\"\n        calib.header.stamp = image_message.header.stamp\n        bag.write(topic + topic_ext, image_message, t = image_message.header.stamp)\n        bag.write(topic + '/camera_info', calib, t = calib.header.stamp) \n        \ndef save_velo_data(bag, kitti, velo_frame_id, topic):\n    print(\"Exporting velodyne data\")\n    velo_path = os.path.join(kitti.data_path, 'velodyne_points')\n    velo_data_dir = os.path.join(velo_path, 'data')\n    velo_filenames = sorted(os.listdir(velo_data_dir))\n    with open(os.path.join(velo_path, 'timestamps.txt')) as f:\n        lines = f.readlines()\n        velo_datetimes = []\n        for line in lines:\n            if len(line) == 1:\n                continue\n            dt = datetime.strptime(line[:-4], '%Y-%m-%d %H:%M:%S.%f')\n            velo_datetimes.append(dt)\n\n    iterable = zip(velo_datetimes, velo_filenames)\n    bar = progressbar.ProgressBar()\n\n    count = 0\n\n    for dt, filename in bar(iterable):\n        if dt is None:\n            continue\n\n        velo_filename = os.path.join(velo_data_dir, filename)\n\n        # read binary data\n        scan = (np.fromfile(velo_filename, dtype=np.float32)).reshape(-1, 4)\n\n        # get ring channel\n        depth = np.linalg.norm(scan, 2, axis=1)\n        pitch = np.arcsin(scan[:, 2] / depth) # arcsin(z, depth)\n        fov_down = -24.8 / 180.0 * np.pi\n        fov = (abs(-24.8) + abs(2.0)) / 180.0 * np.pi\n        proj_y = (pitch + abs(fov_down)) / fov  # in [0.0, 1.0]\n        proj_y *= 64  # in [0.0, H]\n        proj_y = np.floor(proj_y)\n        proj_y = np.minimum(64 - 1, proj_y)\n        proj_y = np.maximum(0, proj_y).astype(np.int32)  # in [0,H-1]\n        proj_y = proj_y.reshape(-1, 1)\n        scan = np.concatenate((scan,proj_y), axis=1)\n\n        # create header\n        header = Header()\n        header.frame_id = velo_frame_id\n        header.stamp = rospy.Time.from_sec(float(datetime.strftime(dt, \"%s.%f\")))\n\n        # fill pcl msg\n        fields = [PointField('x', 0, PointField.FLOAT32, 1),\n                  PointField('y', 4, PointField.FLOAT32, 1),\n                  PointField('z', 8, PointField.FLOAT32, 1),\n                  PointField('intensity', 12, PointField.FLOAT32, 1),\n                  PointField('ring', 16, PointField.UINT16, 1)]\n        pcl_msg = pcl2.create_cloud(header, fields, scan)\n        pcl_msg.is_dense = True\n        # print(pcl_msg)\n\n        bag.write(topic, pcl_msg, t=pcl_msg.header.stamp)\n\n        # count += 1\n        # if count > 200:\n        #     break\n\ndef get_static_transform(from_frame_id, to_frame_id, transform):\n    t = transform[0:3, 3]\n    q = tf.transformations.quaternion_from_matrix(transform)\n    tf_msg = TransformStamped()\n    tf_msg.header.frame_id = from_frame_id\n    tf_msg.child_frame_id = to_frame_id\n    tf_msg.transform.translation.x = float(t[0])\n    tf_msg.transform.translation.y = float(t[1])\n    tf_msg.transform.translation.z = float(t[2])\n    tf_msg.transform.rotation.x = float(q[0])\n    tf_msg.transform.rotation.y = float(q[1])\n    tf_msg.transform.rotation.z = float(q[2])\n    tf_msg.transform.rotation.w = float(q[3])\n    return tf_msg\n\n\ndef inv(transform):\n    \"Invert rigid body transformation matrix\"\n    R = transform[0:3, 0:3]\n    t = transform[0:3, 3]\n    t_inv = -1 * R.T.dot(t)\n    transform_inv = np.eye(4)\n    transform_inv[0:3, 0:3] = R.T\n    transform_inv[0:3, 3] = t_inv\n    return transform_inv\n\n\ndef save_static_transforms(bag, transforms, timestamps):\n    print(\"Exporting static transformations\")\n    tfm = TFMessage()\n    for transform in transforms:\n        t = get_static_transform(from_frame_id=transform[0], to_frame_id=transform[1], transform=transform[2])\n        tfm.transforms.append(t)\n    for timestamp in timestamps:\n        time = rospy.Time.from_sec(float(timestamp.strftime(\"%s.%f\")))\n        for i in range(len(tfm.transforms)):\n            tfm.transforms[i].header.stamp = time\n        bag.write('/tf_static', tfm, t=time)\n\n\ndef save_gps_fix_data(bag, kitti, gps_frame_id, topic):\n    for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):\n        navsatfix_msg = NavSatFix()\n        navsatfix_msg.header.frame_id = gps_frame_id\n        navsatfix_msg.header.stamp = rospy.Time.from_sec(float(timestamp.strftime(\"%s.%f\")))\n        navsatfix_msg.latitude = oxts.packet.lat\n        navsatfix_msg.longitude = oxts.packet.lon\n        navsatfix_msg.altitude = oxts.packet.alt\n        navsatfix_msg.status.service = 1\n        bag.write(topic, navsatfix_msg, t=navsatfix_msg.header.stamp)\n\n\ndef save_gps_vel_data(bag, kitti, gps_frame_id, topic):\n    for timestamp, oxts in zip(kitti.timestamps, kitti.oxts):\n        twist_msg = TwistStamped()\n        twist_msg.header.frame_id = gps_frame_id\n        twist_msg.header.stamp = rospy.Time.from_sec(float(timestamp.strftime(\"%s.%f\")))\n        twist_msg.twist.linear.x = oxts.packet.vf\n        twist_msg.twist.linear.y = oxts.packet.vl\n        twist_msg.twist.linear.z = oxts.packet.vu\n        twist_msg.twist.angular.x = oxts.packet.wf\n        twist_msg.twist.angular.y = oxts.packet.wl\n        twist_msg.twist.angular.z = oxts.packet.wu\n        bag.write(topic, twist_msg, t=twist_msg.header.stamp)\n\n\nif __name__ == \"__main__\":\n    parser = argparse.ArgumentParser(description = \"Convert KITTI dataset to ROS bag file the easy way!\")\n    # Accepted argument values\n    kitti_types = [\"raw_synced\", \"odom_color\", \"odom_gray\"]\n    odometry_sequences = []\n    for s in range(22):\n        odometry_sequences.append(str(s).zfill(2))\n    \n    parser.add_argument(\"kitti_type\", choices = kitti_types, help = \"KITTI dataset type\")\n    parser.add_argument(\"dir\", nargs = \"?\", default = os.getcwd(), help = \"base directory of the dataset, if no directory passed the deafult is current working directory\")\n    parser.add_argument(\"-t\", \"--date\", help = \"date of the raw dataset (i.e. 2011_09_26), option is only for RAW datasets.\")\n    parser.add_argument(\"-r\", \"--drive\", help = \"drive number of the raw dataset (i.e. 0001), option is only for RAW datasets.\")\n    parser.add_argument(\"-s\", \"--sequence\", choices = odometry_sequences,help = \"sequence of the odometry dataset (between 00 - 21), option is only for ODOMETRY datasets.\")\n    args = parser.parse_args()\n\n    bridge = CvBridge()\n    compression = rosbag.Compression.NONE\n    # compression = rosbag.Compression.BZ2\n    # compression = rosbag.Compression.LZ4\n    \n    # CAMERAS\n    cameras = [\n        (0, 'camera_gray_left', '/kitti/camera_gray_left'),\n        (1, 'camera_gray_right', '/kitti/camera_gray_right'),\n        (2, 'camera_color_left', '/kitti/camera_color_left'),\n        (3, 'camera_color_right', '/kitti/camera_color_right')\n    ]\n\n    if args.kitti_type.find(\"raw\") != -1:\n    \n        if args.date == None:\n            print(\"Date option is not given. It is mandatory for raw dataset.\")\n            print(\"Usage for raw dataset: kitti2bag raw_synced [dir] -t <date> -r <drive>\")\n            sys.exit(1)\n        elif args.drive == None:\n            print(\"Drive option is not given. It is mandatory for raw dataset.\")\n            print(\"Usage for raw dataset: kitti2bag raw_synced [dir] -t <date> -r <drive>\")\n            sys.exit(1)\n        \n        bag = rosbag.Bag(\"kitti_{}_drive_{}_{}.bag\".format(args.date, args.drive, args.kitti_type[4:]), 'w', compression=compression)\n        kitti = pykitti.raw(args.dir, args.date, args.drive)\n        if not os.path.exists(kitti.data_path):\n            print('Path {} does not exists. Exiting.'.format(kitti.data_path))\n            sys.exit(1)\n\n        if len(kitti.timestamps) == 0:\n            print('Dataset is empty? Exiting.')\n            sys.exit(1)\n\n        try:\n            # IMU\n            imu_frame_id = 'imu_link'\n            imu_topic = '/kitti/oxts/imu'\n            imu_raw_topic = '/imu_raw'\n            gps_fix_topic = '/gps/fix'\n            gps_vel_topic = '/gps/vel'\n            velo_frame_id = 'velodyne'\n            velo_topic = '/points_raw'\n\n            T_base_link_to_imu = np.eye(4, 4)\n            T_base_link_to_imu[0:3, 3] = [-2.71/2.0-0.05, 0.32, 0.93]\n\n            # tf_static\n            transforms = [\n                ('base_link', imu_frame_id, T_base_link_to_imu),\n                (imu_frame_id, velo_frame_id, inv(kitti.calib.T_velo_imu)),\n                (imu_frame_id, cameras[0][1], inv(kitti.calib.T_cam0_imu)),\n                (imu_frame_id, cameras[1][1], inv(kitti.calib.T_cam1_imu)),\n                (imu_frame_id, cameras[2][1], inv(kitti.calib.T_cam2_imu)),\n                (imu_frame_id, cameras[3][1], inv(kitti.calib.T_cam3_imu))\n            ]\n\n            util = pykitti.utils.read_calib_file(os.path.join(kitti.calib_path, 'calib_cam_to_cam.txt'))\n\n            # Export\n            # save_static_transforms(bag, transforms, kitti.timestamps)\n            # save_dynamic_tf(bag, kitti, args.kitti_type, initial_time=None)\n            # save_imu_data(bag, kitti, imu_frame_id, imu_topic)\n            save_imu_data_raw(bag, kitti, imu_frame_id, imu_raw_topic)\n            save_gps_fix_data(bag, kitti, imu_frame_id, gps_fix_topic)\n            save_gps_vel_data(bag, kitti, imu_frame_id, gps_vel_topic)\n            for camera in cameras:\n                save_camera_data(bag, args.kitti_type, kitti, util, bridge, camera=camera[0], camera_frame_id=camera[1], topic=camera[2], initial_time=None)\n                break\n            save_velo_data(bag, kitti, velo_frame_id, velo_topic)\n\n        finally:\n            print(\"## OVERVIEW ##\")\n            print(bag)\n            bag.close()\n            \n    elif args.kitti_type.find(\"odom\") != -1:\n        \n        if args.sequence == None:\n            print(\"Sequence option is not given. It is mandatory for odometry dataset.\")\n            print(\"Usage for odometry dataset: kitti2bag {odom_color, odom_gray} [dir] -s <sequence>\")\n            sys.exit(1)\n            \n        bag = rosbag.Bag(\"kitti_data_odometry_{}_sequence_{}.bag\".format(args.kitti_type[5:],args.sequence), 'w', compression=compression)\n        \n        kitti = pykitti.odometry(args.dir, args.sequence)\n        if not os.path.exists(kitti.sequence_path):\n            print('Path {} does not exists. Exiting.'.format(kitti.sequence_path))\n            sys.exit(1)\n\n        kitti.load_calib()         \n        kitti.load_timestamps() \n             \n        if len(kitti.timestamps) == 0:\n            print('Dataset is empty? Exiting.')\n            sys.exit(1)\n            \n        if args.sequence in odometry_sequences[:11]:\n            print(\"Odometry dataset sequence {} has ground truth information (poses).\".format(args.sequence))\n            kitti.load_poses()\n\n        try:\n            util = pykitti.utils.read_calib_file(os.path.join(args.dir,'sequences',args.sequence, 'calib.txt'))\n            current_epoch = (datetime.utcnow() - datetime(1970, 1, 1)).total_seconds()\n            # Export\n            if args.kitti_type.find(\"gray\") != -1:\n                used_cameras = cameras[:2]\n            elif args.kitti_type.find(\"color\") != -1:\n                used_cameras = cameras[-2:]\n\n            save_dynamic_tf(bag, kitti, args.kitti_type, initial_time=current_epoch)\n            for camera in used_cameras:\n                save_camera_data(bag, args.kitti_type, kitti, util, bridge, camera=camera[0], camera_frame_id=camera[1], topic=camera[2], initial_time=current_epoch)\n\n        finally:\n            print(\"## OVERVIEW ##\")\n            print(bag)\n            bag.close()\n\n"
  },
  {
    "path": "config/params.yaml",
    "content": "lio_sam:\n\n  # Topics\n  pointCloudTopic: \"points_raw\"               # Point cloud data\n  imuTopic: \"imu_correct\"                         # IMU data\n  odomTopic: \"odometry/imu\"                   # IMU pre-preintegration odometry, same frequency as IMU\n  gpsTopic: \"odometry/gpsz\"                   # GPS odometry topic from navsat, see module_navsat.launch file\n\n  # GPS Settings\n  useImuHeadingInitialization: true           # if using GPS data, set to \"true\"\n  useGpsElevation: false                      # if GPS elevation is bad, set to \"false\"\n  gpsCovThreshold: 2.0                        # m^2, threshold for using GPS data\n  poseCovThreshold: 25.0                      # m^2, threshold for using GPS data\n  \n  # Export settings\n  savePCD: false                              # https://github.com/TixiaoShan/LIO-SAM/issues/3\n  savePCDDirectory: \"/Downloads/LOAM/\"        # in your home folder, starts and ends with \"/\". Warning: the code deletes \"LOAM\" folder then recreates it. See \"mapOptimization\" for implementation\n\n  # Sensor Settings\n  N_SCAN: 16                                  # number of lidar channel (i.e., 16, 32, 64, 128)\n  Horizon_SCAN: 1800                          # lidar horizontal resolution (Velodyne:1800, Ouster:512,1024,2048)\n  timeField: \"time\"                           # point timestamp field, Velodyne - \"time\", Ouster - \"t\"\n  downsampleRate: 1                           # default: 1. Downsample your data if too many points. i.e., 16 = 64 / 4, 16 = 16 / 1 \n\n  # IMU Settings\n  imuAccNoise: 3.9939570888238808e-03\n  imuGyrNoise: 1.5636343949698187e-03\n  imuAccBiasN: 6.4356659353532566e-05\n  imuGyrBiasN: 3.5640318696367613e-05\n  imuGravity: 9.80511\n\n  # Extrinsics (lidar -> IMU)\n  extrinsicTrans: [0.0, 0.0, 0.0]\n  extrinsicRot: [1, 0, 0,\n                  0, 1, 0,\n                  0, 0, 1]\n  extrinsicRPY: [1,  0, 0,\n                 0, 1, 0,\n                  0, 0, 1]\n  # extrinsicRot: [1, 0, 0,\n  #                 0, 1, 0,\n  #                 0, 0, 1]\n  # extrinsicRPY: [1, 0, 0,\n  #                 0, 1, 0,\n  #                 0, 0, 1]\n\n  # LOAM feature threshold\n  edgeThreshold: 1.0\n  surfThreshold: 0.1\n  edgeFeatureMinValidNum: 10\n  surfFeatureMinValidNum: 100\n\n  # voxel filter paprams\n  odometrySurfLeafSize: 0.4                     # default: 0.4\n  mappingCornerLeafSize: 0.2                    # default: 0.2\n  mappingSurfLeafSize: 0.4                      # default: 0.4\n\n  # robot motion constraint (in case you are using a 2D robot)\n  z_tollerance: 1000                            # meters\n  rotation_tollerance: 1000                     # radians\n\n  # CPU Params\n  numberOfCores: 4                              # number of cores for mapping optimization\n  mappingProcessInterval: 0.15                  # seconds, regulate mapping frequency\n\n  # Surrounding map\n  surroundingkeyframeAddingDistThreshold: 1.0   # meters, regulate keyframe adding threshold\n  surroundingkeyframeAddingAngleThreshold: 0.2  # radians, regulate keyframe adding threshold\n  surroundingKeyframeDensity: 2.0               # meters, downsample surrounding keyframe poses   \n  surroundingKeyframeSearchRadius: 50.0         # meters, within n meters scan-to-map optimization (when loop closure disabled)\n\n  # Loop closure\n  loopClosureEnableFlag: false\n  surroundingKeyframeSize: 25                   # submap size (when loop closure enabled)\n  historyKeyframeSearchRadius: 15.0             # meters, key frame that is within n meters from current pose will be considerd for loop closure\n  historyKeyframeSearchTimeDiff: 30.0           # seconds, key frame that is n seconds older will be considered for loop closure\n  historyKeyframeSearchNum: 25                  # number of hostory key frames will be fused into a submap for loop closure\n  historyKeyframeFitnessScore: 0.3              # icp threshold, the smaller the better alignment\n\n  # Visualization\n  globalMapVisualizationSearchRadius: 1000.0    # meters, global map visualization radius\n  globalMapVisualizationPoseDensity: 10.0       # meters, global map visualization keyframe density\n  globalMapVisualizationLeafSize: 1.0           # meters, global map visualization cloud density\n\n\n\n\n# Navsat (convert GPS coordinates to Cartesian)\nnavsat:\n  frequency: 50\n  wait_for_datum: false\n  delay: 0.0\n  magnetic_declination_radians: 0\n  yaw_offset: 0\n  zero_altitude: true\n  broadcast_utm_transform: false\n  broadcast_utm_transform_as_parent_frame: false\n  publish_filtered_gps: false\n\n# EKF for Navsat\nekf_gps:\n  publish_tf: false\n  map_frame: map\n  odom_frame: odom\n  base_link_frame: base_link\n  world_frame: odom\n\n  frequency: 50\n  two_d_mode: false\n  sensor_timeout: 0.01\n  # -------------------------------------\n  # External IMU:\n  # -------------------------------------\n  imu0: imu_correct\n  # make sure the input is aligned with ROS REP105. \"imu_correct\" is manually transformed by myself. EKF can also transform the data using tf between your imu and base_link\n  imu0_config: [false, false, false,\n                true,  true,  true,\n                false, false, false,\n                false, false, true,\n                true,  true,  true]\n  imu0_differential: false\n  imu0_queue_size: 50 \n  imu0_remove_gravitational_acceleration: true\n  # -------------------------------------\n  # Odometry (From Navsat):\n  # -------------------------------------\n  odom0: odometry/gps\n  odom0_config: [true,  true,  true,\n                 false, false, false,\n                 false, false, false,\n                 false, false, false,\n                 false, false, false]\n  odom0_differential: false\n  odom0_queue_size: 10\n\n  #                            x     y     z     r     p     y   x_dot  y_dot  z_dot  r_dot p_dot y_dot x_ddot y_ddot z_ddot\n  process_noise_covariance: [  1.0,  0,    0,    0,    0,    0,    0,     0,     0,     0,    0,    0,    0,    0,      0,\n                               0,    1.0,  0,    0,    0,    0,    0,     0,     0,     0,    0,    0,    0,    0,      0,\n                               0,    0,    10.0, 0,    0,    0,    0,     0,     0,     0,    0,    0,    0,    0,      0,\n                               0,    0,    0,    0.03, 0,    0,    0,     0,     0,     0,    0,    0,    0,    0,      0,\n                               0,    0,    0,    0,    0.03, 0,    0,     0,     0,     0,    0,    0,    0,    0,      0,\n                               0,    0,    0,    0,    0,    0.1,  0,     0,     0,     0,    0,    0,    0,    0,      0,\n                               0,    0,    0,    0,    0,    0,    0.25,  0,     0,     0,    0,    0,    0,    0,      0,\n                               0,    0,    0,    0,    0,    0,    0,     0.25,  0,     0,    0,    0,    0,    0,      0,\n                               0,    0,    0,    0,    0,    0,    0,     0,     0.04,  0,    0,    0,    0,    0,      0,\n                               0,    0,    0,    0,    0,    0,    0,     0,     0,     0.01, 0,    0,    0,    0,      0,\n                               0,    0,    0,    0,    0,    0,    0,     0,     0,     0,    0.01, 0,    0,    0,      0,\n                               0,    0,    0,    0,    0,    0,    0,     0,     0,     0,    0,    0.5,  0,    0,      0,\n                               0,    0,    0,    0,    0,    0,    0,     0,     0,     0,    0,    0,    0.01, 0,      0,\n                               0,    0,    0,    0,    0,    0,    0,     0,     0,     0,    0,    0,    0,    0.01,   0,\n                               0,    0,    0,    0,    0,    0,    0,     0,     0,     0,    0,    0,    0,    0,      0.015]\n"
  },
  {
    "path": "include/utility.h",
    "content": "#pragma once\n#ifndef _UTILITY_LIDAR_ODOMETRY_H_\n#define _UTILITY_LIDAR_ODOMETRY_H_\n\n#include <ros/ros.h>\n\n#include <std_msgs/Header.h>\n#include <sensor_msgs/Imu.h>\n#include <sensor_msgs/PointCloud2.h>\n#include <sensor_msgs/NavSatFix.h>\n#include <nav_msgs/Odometry.h>\n#include <nav_msgs/Path.h>\n\n#include <opencv/cv.h>\n\n#include <pcl/point_cloud.h>\n#include <pcl/point_types.h>\n#include <pcl/search/impl/search.hpp>\n#include <pcl/range_image/range_image.h>\n#include <pcl/kdtree/kdtree_flann.h>\n#include <pcl/common/common.h>\n#include <pcl/common/transforms.h>\n#include <pcl/registration/icp.h>\n#include <pcl/registration/ndt.h>\n#include <pcl/io/pcd_io.h>\n#include <pcl/filters/filter.h>\n#include <pcl/filters/voxel_grid.h>\n#include <pcl/filters/crop_box.h> \n#include <pcl_conversions/pcl_conversions.h>\n\n#include <tf/LinearMath/Quaternion.h>\n#include <tf/transform_listener.h>\n#include <tf/transform_datatypes.h>\n#include <tf/transform_broadcaster.h>\n \n#include <vector>\n#include <cmath>\n#include <algorithm>\n#include <queue>\n#include <deque>\n#include <iostream>\n#include <fstream>\n#include <ctime>\n#include <cfloat>\n#include <iterator>\n#include <sstream>\n#include <string>\n#include <limits>\n#include <iomanip>\n#include <array>\n#include <thread>\n#include <mutex>\n\nusing namespace std;\n\ntypedef pcl::PointXYZI PointType;\n\nclass ParamServer\n{\npublic:\n\n    ros::NodeHandle nh;\n\n    std::string robot_id;\n\n    string pointCloudTopic;\n    string imuTopic;\n    string odomTopic;\n    string gpsTopic;\n\n    // GPS Settings\n    bool useImuHeadingInitialization;\n    bool useGpsElevation;\n    float gpsCovThreshold;\n    float poseCovThreshold;\n\n    // Save pcd\n    bool savePCD;\n    string savePCDDirectory;\n\n    // Velodyne Sensor Configuration: Velodyne\n    int N_SCAN;\n    int Horizon_SCAN;\n    string timeField;\n    int downsampleRate;\n\n    // IMU\n    float imuAccNoise;\n    float imuGyrNoise;\n    float imuAccBiasN;\n    float imuGyrBiasN;\n    float imuGravity;\n    vector<double> extRotV;\n    vector<double> extRPYV;\n    vector<double> extTransV;\n    Eigen::Matrix3d extRot;\n    Eigen::Matrix3d extRPY;\n    Eigen::Vector3d extTrans;\n    Eigen::Quaterniond extQRPY;\n\n    // LOAM\n    float edgeThreshold;\n    float surfThreshold;\n    int edgeFeatureMinValidNum;\n    int surfFeatureMinValidNum;\n\n    // voxel filter paprams\n    float odometrySurfLeafSize;\n    float mappingCornerLeafSize;\n    float mappingSurfLeafSize ;\n\n    float z_tollerance; \n    float rotation_tollerance;\n\n    // CPU Params\n    int numberOfCores;\n    double mappingProcessInterval;\n\n    // Surrounding map\n    float surroundingkeyframeAddingDistThreshold; \n    float surroundingkeyframeAddingAngleThreshold; \n    float surroundingKeyframeDensity;\n    float surroundingKeyframeSearchRadius;\n    \n    // Loop closure\n    bool loopClosureEnableFlag;\n    int   surroundingKeyframeSize;\n    float historyKeyframeSearchRadius;\n    float historyKeyframeSearchTimeDiff;\n    int   historyKeyframeSearchNum;\n    float historyKeyframeFitnessScore;\n\n    // global map visualization radius\n    float globalMapVisualizationSearchRadius;\n    float globalMapVisualizationPoseDensity;\n    float globalMapVisualizationLeafSize;\n\n    ParamServer()\n    {\n        nh.param<std::string>(\"/robot_id\", robot_id, \"roboat\");\n\n        nh.param<std::string>(\"lio_sam/pointCloudTopic\", pointCloudTopic, \"points_raw\");\n        nh.param<std::string>(\"lio_sam/imuTopic\", imuTopic, \"imu_correct\");\n        nh.param<std::string>(\"lio_sam/odomTopic\", odomTopic, \"odometry/imu\");\n        nh.param<std::string>(\"lio_sam/gpsTopic\", gpsTopic, \"odometry/gps\");\n\n        nh.param<bool>(\"lio_sam/useImuHeadingInitialization\", useImuHeadingInitialization, false);\n        nh.param<bool>(\"lio_sam/useGpsElevation\", useGpsElevation, false);\n        nh.param<float>(\"lio_sam/gpsCovThreshold\", gpsCovThreshold, 2.0);\n        nh.param<float>(\"lio_sam/poseCovThreshold\", poseCovThreshold, 25.0);\n\n        nh.param<bool>(\"lio_sam/savePCD\", savePCD, false);\n        nh.param<std::string>(\"lio_sam/savePCDDirectory\", savePCDDirectory, \"/Downloads/LOAM/\");\n\n        nh.param<int>(\"lio_sam/N_SCAN\", N_SCAN, 16);\n        nh.param<int>(\"lio_sam/Horizon_SCAN\", Horizon_SCAN, 1800);\n        nh.param<std::string>(\"lio_sam/timeField\", timeField, \"time\");\n        nh.param<int>(\"lio_sam/downsampleRate\", downsampleRate, 1);\n\n        nh.param<float>(\"lio_sam/imuAccNoise\", imuAccNoise, 0.01);\n        nh.param<float>(\"lio_sam/imuGyrNoise\", imuGyrNoise, 0.001);\n        nh.param<float>(\"lio_sam/imuAccBiasN\", imuAccBiasN, 0.0002);\n        nh.param<float>(\"lio_sam/imuGyrBiasN\", imuGyrBiasN, 0.00003);\n        nh.param<float>(\"lio_sam/imuGravity\", imuGravity, 9.80511);\n        nh.param<vector<double>>(\"lio_sam/extrinsicRot\", extRotV, vector<double>());\n        nh.param<vector<double>>(\"lio_sam/extrinsicRPY\", extRPYV, vector<double>());\n        nh.param<vector<double>>(\"lio_sam/extrinsicTrans\", extTransV, vector<double>());\n        extRot = Eigen::Map<const Eigen::Matrix<double, -1, -1, Eigen::RowMajor>>(extRotV.data(), 3, 3);\n        extRPY = Eigen::Map<const Eigen::Matrix<double, -1, -1, Eigen::RowMajor>>(extRPYV.data(), 3, 3);\n        extTrans = Eigen::Map<const Eigen::Matrix<double, -1, -1, Eigen::RowMajor>>(extTransV.data(), 3, 1);\n        extQRPY = Eigen::Quaterniond(extRPY);\n\n        nh.param<float>(\"lio_sam/edgeThreshold\", edgeThreshold, 0.1);\n        nh.param<float>(\"lio_sam/surfThreshold\", surfThreshold, 0.1);\n        nh.param<int>(\"lio_sam/edgeFeatureMinValidNum\", edgeFeatureMinValidNum, 10);\n        nh.param<int>(\"lio_sam/surfFeatureMinValidNum\", surfFeatureMinValidNum, 100);\n\n        nh.param<float>(\"lio_sam/odometrySurfLeafSize\", odometrySurfLeafSize, 0.2);\n        nh.param<float>(\"lio_sam/mappingCornerLeafSize\", mappingCornerLeafSize, 0.2);\n        nh.param<float>(\"lio_sam/mappingSurfLeafSize\", mappingSurfLeafSize, 0.2);\n\n        nh.param<float>(\"lio_sam/z_tollerance\", z_tollerance, FLT_MAX);\n        nh.param<float>(\"lio_sam/rotation_tollerance\", rotation_tollerance, FLT_MAX);\n\n        nh.param<int>(\"lio_sam/numberOfCores\", numberOfCores, 2);\n        nh.param<double>(\"lio_sam/mappingProcessInterval\", mappingProcessInterval, 0.15);\n\n        nh.param<float>(\"lio_sam/surroundingkeyframeAddingDistThreshold\", surroundingkeyframeAddingDistThreshold, 1.0);\n        nh.param<float>(\"lio_sam/surroundingkeyframeAddingAngleThreshold\", surroundingkeyframeAddingAngleThreshold, 0.2);\n        nh.param<float>(\"lio_sam/surroundingKeyframeDensity\", surroundingKeyframeDensity, 1.0);\n        nh.param<float>(\"lio_sam/surroundingKeyframeSearchRadius\", surroundingKeyframeSearchRadius, 50.0);\n\n        nh.param<bool>(\"lio_sam/loopClosureEnableFlag\", loopClosureEnableFlag, false);\n        nh.param<int>(\"lio_sam/surroundingKeyframeSize\", surroundingKeyframeSize, 50);\n        nh.param<float>(\"lio_sam/historyKeyframeSearchRadius\", historyKeyframeSearchRadius, 10.0);\n        nh.param<float>(\"lio_sam/historyKeyframeSearchTimeDiff\", historyKeyframeSearchTimeDiff, 30.0);\n        nh.param<int>(\"lio_sam/historyKeyframeSearchNum\", historyKeyframeSearchNum, 25);\n        nh.param<float>(\"lio_sam/historyKeyframeFitnessScore\", historyKeyframeFitnessScore, 0.3);\n\n        nh.param<float>(\"lio_sam/globalMapVisualizationSearchRadius\", globalMapVisualizationSearchRadius, 1e3);\n        nh.param<float>(\"lio_sam/globalMapVisualizationPoseDensity\", globalMapVisualizationPoseDensity, 10.0);\n        nh.param<float>(\"lio_sam/globalMapVisualizationLeafSize\", globalMapVisualizationLeafSize, 1.0);\n\n        usleep(100);\n    }\n    //gc: tranform to lidar frame\n    sensor_msgs::Imu imuConverter(const sensor_msgs::Imu& imu_in)\n    {\n        sensor_msgs::Imu imu_out = imu_in;\n        // rotate acceleration\n        Eigen::Vector3d acc(imu_in.linear_acceleration.x, imu_in.linear_acceleration.y, imu_in.linear_acceleration.z);\n        acc = extRot * acc;\n        imu_out.linear_acceleration.x = acc.x();\n        imu_out.linear_acceleration.y = acc.y();\n        imu_out.linear_acceleration.z = acc.z();\n        // rotate gyroscope\n        Eigen::Vector3d gyr(imu_in.angular_velocity.x, imu_in.angular_velocity.y, imu_in.angular_velocity.z);\n        gyr = extRot * gyr;\n        imu_out.angular_velocity.x = gyr.x();\n        imu_out.angular_velocity.y = gyr.y();\n        imu_out.angular_velocity.z = gyr.z();\n        // rotate roll pitch yaw\n        Eigen::Quaterniond q_from(imu_in.orientation.w, imu_in.orientation.x, imu_in.orientation.y, imu_in.orientation.z);\n        Eigen::Quaterniond q_final = q_from * extQRPY;\n        imu_out.orientation.x = q_final.x();\n        imu_out.orientation.y = q_final.y();\n        imu_out.orientation.z = q_final.z();\n        imu_out.orientation.w = q_final.w();\n\n        if (sqrt(q_final.x()*q_final.x() + q_final.y()*q_final.y() + q_final.z()*q_final.z() + q_final.w()*q_final.w()) < 0.1)\n        {\n            ROS_ERROR(\"Invalid quaternion, please use a 9-axis IMU!\");\n            ros::shutdown();\n        }\n\n        return imu_out;\n    }\n};\n\n\nsensor_msgs::PointCloud2 publishCloud(ros::Publisher *thisPub, pcl::PointCloud<PointType>::Ptr thisCloud, ros::Time thisStamp, std::string thisFrame)\n{\n    sensor_msgs::PointCloud2 tempCloud;\n    pcl::toROSMsg(*thisCloud, tempCloud);\n    tempCloud.header.stamp = thisStamp;\n    tempCloud.header.frame_id = thisFrame;\n    if (thisPub->getNumSubscribers() != 0)\n        thisPub->publish(tempCloud);\n    return tempCloud;\n}\n\ntemplate<typename T>\ndouble ROS_TIME(T msg)\n{\n    return msg->header.stamp.toSec();\n}\n\n\ntemplate<typename T>\nvoid imuAngular2rosAngular(sensor_msgs::Imu *thisImuMsg, T *angular_x, T *angular_y, T *angular_z)\n{\n    *angular_x = thisImuMsg->angular_velocity.x;\n    *angular_y = thisImuMsg->angular_velocity.y;\n    *angular_z = thisImuMsg->angular_velocity.z;\n}\n\n\ntemplate<typename T>\nvoid imuAccel2rosAccel(sensor_msgs::Imu *thisImuMsg, T *acc_x, T *acc_y, T *acc_z)\n{\n    *acc_x = thisImuMsg->linear_acceleration.x;\n    *acc_y = thisImuMsg->linear_acceleration.y;\n    *acc_z = thisImuMsg->linear_acceleration.z;\n}\n\n\ntemplate<typename T>\nvoid imuRPY2rosRPY(sensor_msgs::Imu *thisImuMsg, T *rosRoll, T *rosPitch, T *rosYaw)\n{\n    double imuRoll, imuPitch, imuYaw;\n    tf::Quaternion orientation;\n    tf::quaternionMsgToTF(thisImuMsg->orientation, orientation);\n    tf::Matrix3x3(orientation).getRPY(imuRoll, imuPitch, imuYaw);\n\n    *rosRoll = imuRoll;\n    *rosPitch = imuPitch;\n    *rosYaw = imuYaw;\n}\n\n\nfloat pointDistance(PointType p)\n{\n    return sqrt(p.x*p.x + p.y*p.y + p.z*p.z);\n}\n\n\nfloat pointDistance(PointType p1, PointType p2)\n{\n    return sqrt((p1.x-p2.x)*(p1.x-p2.x) + (p1.y-p2.y)*(p1.y-p2.y) + (p1.z-p2.z)*(p1.z-p2.z));\n}\n\n#endif"
  },
  {
    "path": "launch/include/config/robot.urdf.xacro",
    "content": "<?xml version=\"1.0\"?>\n<robot name=\"roboat\" xmlns:xacro=\"http://roboat.org\">\n  <xacro:property name=\"PI\" value=\"3.1415926535897931\" />\n\n  <link name=\"base_link\"></link>\n\n  <joint name=\"base_link_joint\" type=\"fixed\">\n    <parent link=\"base_link\"/>\n    <child link=\"chassis_link\" />\n    <origin xyz=\"0 0 0\" rpy=\"0 0 0\" />\n  </joint>\n\n  <link name=\"chassis_link\"></link>\n  \n  <link name=\"imu_link\"> </link>\n  <joint name=\"imu_joint\" type=\"fixed\">\n    <parent link=\"chassis_link\" />\n    <child link=\"imu_link\" />\n    <origin xyz=\"0 -0.1 0\" rpy=\"${PI} 0 ${PI}\" />\n  </joint>\n\n  <link name=\"imu_enu_link\"> </link>\n  <joint name=\"imu_enu_joint\" type=\"fixed\">\n    <parent link=\"imu_link\" />\n    <child link=\"imu_enu_link\" />\n    <origin xyz=\"0 0 0\" rpy=\"${PI} 0 ${PI}\" />\n  </joint>\n\n  <link name=\"velodyne\"> </link>\n  <joint name=\"velodyne_joint\" type=\"fixed\">\n    <parent link=\"chassis_link\" />\n    <child link=\"velodyne\" />\n    <origin xyz=\"0 0 0\" rpy=\"0 0 0\" />\n  </joint>\n\n  <link name=\"realsense_link\"> </link>\n  <joint name=\"realsense_joint\" type=\"fixed\">\n    <parent link=\"chassis_link\" />\n    <child link=\"realsense_link\" />\n    <origin xyz=\"0.1 0 -0.1\" rpy=\"0 0 0\" />\n  </joint>\n\n  <link name=\"navsat_link\"> </link>\n  <joint name=\"navsat_joint\" type=\"fixed\">\n    <parent link=\"chassis_link\" />\n    <child link=\"navsat_link\" />\n    <origin xyz=\"-0.2 0 0.2\" rpy=\"0 0 0\" />\n  </joint>\n\n  <link name=\"loam_camera\"> </link>\n  <joint name=\"_loam_camera_joint\" type=\"fixed\">\n    <parent link=\"chassis_link\" />\n    <child link=\"loam_camera\" />\n    <origin xyz=\"0 0 0\" rpy=\"1.570796 0 1.570796\" />\n  </joint>\n\n</robot>\n"
  },
  {
    "path": "launch/include/config/rviz.rviz",
    "content": "Panels:\n  - Class: rviz/Displays\n    Help Height: 70\n    Name: Displays\n    Property Tree Widget:\n      Expanded:\n        - /links1\n        - /Odometry1\n        - /Point Cloud1\n        - /Point Cloud1/Velodyne1\n        - /Feature Cloud1\n        - /Mapping1\n        - /Grid1\n        - /PointCloud21\n        - /PointCloud22\n        - /PointCloud23\n      Splitter Ratio: 0.5600000023841858\n    Tree Height: 292\n  - Class: rviz/Selection\n    Name: Selection\n  - Class: rviz/Views\n    Expanded:\n      - /Current View1\n    Name: Views\n    Splitter Ratio: 0.5\n  - Class: rviz/Tool Properties\n    Expanded:\n      - /2D Pose Estimate1\n    Name: Tool Properties\n    Splitter Ratio: 0.5\n  - Class: rviz/Views\n    Expanded:\n      - /Current View1\n    Name: Views\n    Splitter Ratio: 0.5\nPreferences:\n  PromptSaveOnExit: true\nToolbars:\n  toolButtonStyle: 2\nVisualization Manager:\n  Class: \"\"\n  Displays:\n    - Class: rviz/Group\n      Displays:\n        - Class: rviz/TF\n          Enabled: false\n          Frame Timeout: 999\n          Frames:\n            All Enabled: false\n          Marker Scale: 3\n          Name: TF\n          Show Arrows: true\n          Show Axes: true\n          Show Names: true\n          Tree:\n            {}\n          Update Interval: 0\n          Value: false\n        - Class: rviz/Axes\n          Enabled: true\n          Length: 2\n          Name: map\n          Radius: 0.5\n          Reference Frame: map\n          Value: true\n        - Class: rviz/Axes\n          Enabled: true\n          Length: 1\n          Name: base_link\n          Radius: 0.30000001192092896\n          Reference Frame: base_link\n          Value: true\n      Enabled: true\n      Name: links\n    - Class: rviz/Group\n      Displays:\n        - Angle Tolerance: 0.10000000149011612\n          Class: rviz/Odometry\n          Covariance:\n            Orientation:\n              Alpha: 0.5\n              Color: 255; 255; 127\n              Color Style: Unique\n              Frame: Local\n              Offset: 1\n              Scale: 1\n              Value: true\n            Position:\n              Alpha: 0.30000001192092896\n              Color: 204; 51; 204\n              Scale: 1\n              Value: true\n            Value: false\n          Enabled: false\n          Keep: 300\n          Name: Odo IMU\n          Position Tolerance: 0.10000000149011612\n          Shape:\n            Alpha: 1\n            Axes Length: 0.25\n            Axes Radius: 0.10000000149011612\n            Color: 255; 25; 0\n            Head Length: 0.30000001192092896\n            Head Radius: 0.10000000149011612\n            Shaft Length: 1\n            Shaft Radius: 0.05000000074505806\n            Value: Axes\n          Topic: /odometry/imu\n          Unreliable: false\n          Value: false\n        - Angle Tolerance: 0.10000000149011612\n          Class: rviz/Odometry\n          Covariance:\n            Orientation:\n              Alpha: 0.5\n              Color: 255; 255; 127\n              Color Style: Unique\n              Frame: Local\n              Offset: 1\n              Scale: 1\n              Value: true\n            Position:\n              Alpha: 0.30000001192092896\n              Color: 204; 51; 204\n              Scale: 1\n              Value: true\n            Value: false\n          Enabled: true\n          Keep: 300\n          Name: Odom GPS\n          Position Tolerance: 0.10000000149011612\n          Shape:\n            Alpha: 1\n            Axes Length: 1\n            Axes Radius: 0.30000001192092896\n            Color: 255; 25; 0\n            Head Length: 0.30000001192092896\n            Head Radius: 0.10000000149011612\n            Shaft Length: 1\n            Shaft Radius: 0.05000000074505806\n            Value: Axes\n          Topic: /odometry/gps\n          Unreliable: false\n          Value: true\n        - Angle Tolerance: 0.10000000149011612\n          Class: rviz/Odometry\n          Covariance:\n            Orientation:\n              Alpha: 0.5\n              Color: 255; 255; 127\n              Color Style: Unique\n              Frame: Local\n              Offset: 1\n              Scale: 1\n              Value: true\n            Position:\n              Alpha: 0.30000001192092896\n              Color: 204; 51; 204\n              Scale: 1\n              Value: true\n            Value: false\n          Enabled: false\n          Keep: 300\n          Name: Odom lidar\n          Position Tolerance: 0.10000000149011612\n          Shape:\n            Alpha: 1\n            Axes Length: 0.25\n            Axes Radius: 0.10000000149011612\n            Color: 255; 25; 0\n            Head Length: 0.30000001192092896\n            Head Radius: 0.10000000149011612\n            Shaft Length: 1\n            Shaft Radius: 0.05000000074505806\n            Value: Axes\n          Topic: /lio_sam/mapping/odometry\n          Unreliable: false\n          Value: false\n      Enabled: false\n      Name: Odometry\n    - Class: rviz/Group\n      Displays:\n        - Alpha: 1\n          Autocompute Intensity Bounds: true\n          Autocompute Value Bounds:\n            Max Value: 18.805150985717773\n            Min Value: -3.3611395359039307\n            Value: true\n          Axis: Z\n          Channel Name: intensity\n          Class: rviz/PointCloud2\n          Color: 255; 255; 255\n          Color Transformer: FlatColor\n          Decay Time: 0\n          Enabled: true\n          Invert Rainbow: false\n          Max Color: 255; 255; 255\n          Max Intensity: 255\n          Min Color: 0; 0; 0\n          Min Intensity: 0\n          Name: Velodyne\n          Position Transformer: XYZ\n          Queue Size: 10\n          Selectable: false\n          Size (Pixels): 2\n          Size (m): 0.009999999776482582\n          Style: Points\n          Topic: /lio_sam/deskew/cloud_deskewed\n          Unreliable: false\n          Use Fixed Frame: true\n          Use rainbow: true\n          Value: true\n        - Alpha: 1\n          Autocompute Intensity Bounds: true\n          Autocompute Value Bounds:\n            Max Value: 10\n            Min Value: -10\n            Value: true\n          Axis: Z\n          Channel Name: intensity\n          Class: rviz/PointCloud2\n          Color: 255; 255; 255\n          Color Transformer: Intensity\n          Decay Time: 0\n          Enabled: false\n          Invert Rainbow: false\n          Max Color: 255; 255; 255\n          Max Intensity: 0.7900000214576721\n          Min Color: 0; 0; 0\n          Min Intensity: 0\n          Name: Reg Cloud\n          Position Transformer: XYZ\n          Queue Size: 10\n          Selectable: true\n          Size (Pixels): 3\n          Size (m): 0.009999999776482582\n          Style: Points\n          Topic: /lio_sam/mapping/cloud_registered\n          Unreliable: false\n          Use Fixed Frame: true\n          Use rainbow: true\n          Value: false\n      Enabled: true\n      Name: Point Cloud\n    - Class: rviz/Group\n      Displays:\n        - Alpha: 1\n          Autocompute Intensity Bounds: true\n          Autocompute Value Bounds:\n            Max Value: 10\n            Min Value: -10\n            Value: true\n          Axis: Z\n          Channel Name: intensity\n          Class: rviz/PointCloud2\n          Color: 0; 255; 0\n          Color Transformer: FlatColor\n          Decay Time: 0\n          Enabled: true\n          Invert Rainbow: false\n          Max Color: 255; 255; 255\n          Max Intensity: 15.077729225158691\n          Min Color: 0; 0; 0\n          Min Intensity: 0.019985778257250786\n          Name: Edge Feature\n          Position Transformer: XYZ\n          Queue Size: 10\n          Selectable: true\n          Size (Pixels): 5\n          Size (m): 0.009999999776482582\n          Style: Points\n          Topic: /lio_sam/feature/cloud_corner\n          Unreliable: false\n          Use Fixed Frame: true\n          Use rainbow: true\n          Value: true\n        - Alpha: 1\n          Autocompute Intensity Bounds: true\n          Autocompute Value Bounds:\n            Max Value: 10\n            Min Value: -10\n            Value: true\n          Axis: Z\n          Channel Name: intensity\n          Class: rviz/PointCloud2\n          Color: 255; 85; 255\n          Color Transformer: FlatColor\n          Decay Time: 0\n          Enabled: true\n          Invert Rainbow: false\n          Max Color: 255; 255; 255\n          Max Intensity: 15.100607872009277\n          Min Color: 0; 0; 0\n          Min Intensity: 0.0007188677554950118\n          Name: Plannar Feature\n          Position Transformer: XYZ\n          Queue Size: 10\n          Selectable: true\n          Size (Pixels): 3\n          Size (m): 0.009999999776482582\n          Style: Points\n          Topic: /lio_sam/feature/cloud_surface\n          Unreliable: false\n          Use Fixed Frame: true\n          Use rainbow: true\n          Value: true\n      Enabled: false\n      Name: Feature Cloud\n    - Class: rviz/Group\n      Displays:\n        - Alpha: 0.30000001192092896\n          Autocompute Intensity Bounds: true\n          Autocompute Value Bounds:\n            Max Value: 7.4858574867248535\n            Min Value: -1.2309398651123047\n            Value: true\n          Axis: Z\n          Channel Name: intensity\n          Class: rviz/PointCloud2\n          Color: 255; 255; 255\n          Color Transformer: Intensity\n          Decay Time: 500\n          Enabled: false\n          Invert Rainbow: false\n          Max Color: 255; 255; 255\n          Max Intensity: 101.66666412353516\n          Min Color: 0; 0; 0\n          Min Intensity: 1\n          Name: Map (cloud)\n          Position Transformer: XYZ\n          Queue Size: 10\n          Selectable: false\n          Size (Pixels): 2\n          Size (m): 0.009999999776482582\n          Style: Points\n          Topic: /lio_sam/mapping/cloud_registered\n          Unreliable: false\n          Use Fixed Frame: true\n          Use rainbow: true\n          Value: false\n        - Alpha: 1\n          Autocompute Intensity Bounds: true\n          Autocompute Value Bounds:\n            Max Value: 23.98253059387207\n            Min Value: -2.447427749633789\n            Value: true\n          Axis: Z\n          Channel Name: intensity\n          Class: rviz/PointCloud2\n          Color: 255; 255; 255\n          Color Transformer: AxisColor\n          Decay Time: 0\n          Enabled: false\n          Invert Rainbow: false\n          Max Color: 255; 255; 255\n          Max Intensity: 15.100604057312012\n          Min Color: 0; 0; 0\n          Min Intensity: 0.014545874670147896\n          Name: Map (local)\n          Position Transformer: XYZ\n          Queue Size: 10\n          Selectable: true\n          Size (Pixels): 2\n          Size (m): 0.10000000149011612\n          Style: Flat Squares\n          Topic: /lio_sam/mapping/map_local\n          Unreliable: false\n          Use Fixed Frame: true\n          Use rainbow: true\n          Value: false\n        - Alpha: 1\n          Autocompute Intensity Bounds: true\n          Autocompute Value Bounds:\n            Max Value: 10\n            Min Value: -10\n            Value: true\n          Axis: Z\n          Channel Name: intensity\n          Class: rviz/PointCloud2\n          Color: 255; 255; 255\n          Color Transformer: Intensity\n          Decay Time: 0\n          Enabled: false\n          Invert Rainbow: false\n          Max Color: 255; 255; 255\n          Max Intensity: 15.100096702575684\n          Min Color: 0; 0; 0\n          Min Intensity: 0.007923072203993797\n          Name: Map (global)\n          Position Transformer: XYZ\n          Queue Size: 10\n          Selectable: true\n          Size (Pixels): 2\n          Size (m): 0.009999999776482582\n          Style: Points\n          Topic: /lio_sam/mapping/map_global\n          Unreliable: false\n          Use Fixed Frame: true\n          Use rainbow: true\n          Value: false\n        - Alpha: 1\n          Buffer Length: 1\n          Class: rviz/Path\n          Color: 85; 255; 255\n          Enabled: true\n          Head Diameter: 0.30000001192092896\n          Head Length: 0.20000000298023224\n          Length: 0.30000001192092896\n          Line Style: Billboards\n          Line Width: 0.10000000149011612\n          Name: Path (global)\n          Offset:\n            X: 0\n            Y: 0\n            Z: 0\n          Pose Color: 255; 85; 255\n          Pose Style: None\n          Radius: 0.029999999329447746\n          Shaft Diameter: 0.10000000149011612\n          Shaft Length: 0.10000000149011612\n          Topic: /lio_sam/mapping/path\n          Unreliable: false\n          Value: true\n        - Alpha: 1\n          Buffer Length: 1\n          Class: rviz/Path\n          Color: 255; 0; 127\n          Enabled: true\n          Head Diameter: 0.30000001192092896\n          Head Length: 0.20000000298023224\n          Length: 0.30000001192092896\n          Line Style: Billboards\n          Line Width: 0.10000000149011612\n          Name: Path (local)\n          Offset:\n            X: 0\n            Y: 0\n            Z: 0\n          Pose Color: 255; 85; 255\n          Pose Style: None\n          Radius: 0.029999999329447746\n          Shaft Diameter: 0.10000000149011612\n          Shaft Length: 0.10000000149011612\n          Topic: /lio_sam/imu/path\n          Unreliable: false\n          Value: true\n      Enabled: true\n      Name: Mapping\n    - Alpha: 0.5\n      Cell Size: 1\n      Class: rviz/Grid\n      Color: 160; 160; 164\n      Enabled: true\n      Line Style:\n        Line Width: 0.029999999329447746\n        Value: Lines\n      Name: Grid\n      Normal Cell Count: 0\n      Offset:\n        X: 0\n        Y: 0\n        Z: 0\n      Plane: XY\n      Plane Cell Count: 200\n      Reference Frame: <Fixed Frame>\n      Value: true\n    - Alpha: 1\n      Autocompute Intensity Bounds: true\n      Autocompute Value Bounds:\n        Max Value: 10\n        Min Value: -10\n        Value: true\n      Axis: Z\n      Channel Name: x\n      Class: rviz/PointCloud2\n      Color: 255; 255; 255\n      Color Transformer: Intensity\n      Decay Time: 0\n      Enabled: true\n      Invert Rainbow: false\n      Max Color: 255; 255; 255\n      Max Intensity: 182.80648803710938\n      Min Color: 0; 0; 0\n      Min Intensity: -118.49858856201172\n      Name: PointCloud2\n      Position Transformer: XYZ\n      Queue Size: 10\n      Selectable: true\n      Size (Pixels): 3\n      Size (m): 0.20000000298023224\n      Style: Flat Squares\n      Topic: /lio_sam/mapping/cloud_map_map\n      Unreliable: false\n      Use Fixed Frame: true\n      Use rainbow: false\n      Value: true\n    - Alpha: 1\n      Autocompute Intensity Bounds: true\n      Autocompute Value Bounds:\n        Max Value: 10\n        Min Value: -10\n        Value: true\n      Axis: Z\n      Channel Name: intensity\n      Class: rviz/PointCloud2\n      Color: 255; 255; 255\n      Color Transformer: Intensity\n      Decay Time: 0\n      Enabled: true\n      Invert Rainbow: false\n      Max Color: 255; 255; 255\n      Max Intensity: 120\n      Min Color: 0; 0; 0\n      Min Intensity: 0\n      Name: PointCloud2\n      Position Transformer: XYZ\n      Queue Size: 10\n      Selectable: true\n      Size (Pixels): 3\n      Size (m): 0.5\n      Style: Flat Squares\n      Topic: /lio_sam/mapping/lasercloud_in_world\n      Unreliable: false\n      Use Fixed Frame: true\n      Use rainbow: true\n      Value: true\n    - Alpha: 1\n      Autocompute Intensity Bounds: true\n      Autocompute Value Bounds:\n        Max Value: 19.07742691040039\n        Min Value: -7.043781757354736\n        Value: true\n      Axis: Z\n      Channel Name: intensity\n      Class: rviz/PointCloud2\n      Color: 255; 255; 255\n      Color Transformer: AxisColor\n      Decay Time: 0\n      Enabled: true\n      Invert Rainbow: false\n      Max Color: 255; 255; 255\n      Max Intensity: -999999\n      Min Color: 0; 0; 0\n      Min Intensity: 999999\n      Name: PointCloud2\n      Position Transformer: XYZ\n      Queue Size: 10\n      Selectable: true\n      Size (Pixels): 3\n      Size (m): 0.30000001192092896\n      Style: Flat Squares\n      Topic: /lio_sam/mapping/laserclouinmapframe\n      Unreliable: false\n      Use Fixed Frame: true\n      Use rainbow: true\n      Value: true\n  Enabled: true\n  Global Options:\n    Background Color: 0; 0; 0\n    Default Light: true\n    Fixed Frame: map\n    Frame Rate: 30\n  Name: root\n  Tools:\n    - Class: rviz/Interact\n      Hide Inactive Objects: true\n    - Class: rviz/FocusCamera\n    - Class: rviz/Measure\n    - Class: rviz/SetInitialPose\n      Theta std deviation: 0.2617993950843811\n      Topic: initialpose\n      X std deviation: 0.5\n      Y std deviation: 0.5\n  Value: true\n  Views:\n    Current:\n      Class: rviz/Orbit\n      Distance: 133.46669006347656\n      Enable Stereo Rendering:\n        Stereo Eye Separation: 0.05999999865889549\n        Stereo Focal Distance: 1\n        Swap Stereo Eyes: false\n        Value: false\n      Focal Point:\n        X: 5.347261428833008\n        Y: 2.5245871543884277\n        Z: -9.126422882080078\n      Focal Shape Fixed Size: false\n      Focal Shape Size: 0.05000000074505806\n      Invert Z Axis: false\n      Name: Current View\n      Near Clip Distance: 0.009999999776482582\n      Pitch: 1.1797966957092285\n      Target Frame: base_link\n      Value: Orbit (rviz)\n      Yaw: 4.47352409362793\n    Saved: ~\nWindow Geometry:\n  Displays:\n    collapsed: false\n  Height: 713\n  Hide Left Dock: false\n  Hide Right Dock: true\n  QMainWindow State: 000000ff00000000fd0000000300000000000001a30000026ffc020000000afb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb0000000a0049006d006100670065000000002e000001340000000000000000fb000000100044006900730070006c006100790073010000003d000001a7000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730000000286000000a70000005c00fffffffb0000000a0056006900650077007301000001ea000000c2000000a400ffffff000000010000011100000435fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000002e00000435000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b2000000000000000000000003000004b000000040fc0100000002fb0000000800540069006d00650000000000000004b00000000000000000fb0000000800540069006d006501000000000000045000000000000000000000036a0000026f00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000\n  Selection:\n    collapsed: false\n  Tool Properties:\n    collapsed: false\n  Views:\n    collapsed: false\n  Width: 1299\n  X: 67\n  Y: 27\n"
  },
  {
    "path": "launch/include/module_loam.launch",
    "content": "<launch>\n\n    <arg name=\"project\" default=\"lio_sam\"/>\n    \n    <node pkg=\"$(arg project)\" type=\"$(arg project)_imuPreintegration\"   name=\"$(arg project)_imuPreintegration\"    output=\"screen\" \trespawn=\"true\"/>\n    <node pkg=\"$(arg project)\" type=\"$(arg project)_imageProjection\"     name=\"$(arg project)_imageProjection\"      output=\"screen\"     respawn=\"true\"/>\n    <node pkg=\"$(arg project)\" type=\"$(arg project)_featureExtraction\"   name=\"$(arg project)_featureExtraction\"    output=\"screen\"     respawn=\"true\"/>\n    <node pkg=\"$(arg project)\" type=\"$(arg project)_mapOptmization\"      name=\"$(arg project)_mapOptmization\"       output=\"screen\"     respawn=\"true\"/>\n    \n</launch>"
  },
  {
    "path": "launch/include/module_navsat.launch",
    "content": "<launch>\n\n    <arg name=\"project\" default=\"lio_sam\"/>\n\n    <env name=\"ROSCONSOLE_CONFIG_FILE\" value=\"$(find lio_sam)/launch/include/rosconsole/rosconsole_error.conf\"/>\n    \n    <!-- EKF GPS-->\n    <node pkg=\"robot_localization\" type=\"ekf_localization_node\" name=\"ekf_gps\" respawn=\"true\">\n        <remap from=\"odometry/filtered\" to=\"odometry/navsat\" />\n    </node>\n\n    <!-- Navsat -->\n    <node pkg=\"robot_localization\" type=\"navsat_transform_node\" name=\"navsat\" respawn=\"true\">\n        <!-- <rosparam param=\"datum\">[42.35893211, -71.09345588, 0.0, world, base_link]</rosparam> -->\n        <remap from=\"imu/data\" to=\"imu_correct\" />\n        <remap from=\"gps/fix\" to=\"gps/fix\" />\n        <remap from=\"odometry/filtered\" to=\"odometry/navsat\" />\n    </node>\n\n</launch>"
  },
  {
    "path": "launch/include/module_relocolize.launch",
    "content": "<launch>\n\n    <arg name=\"project\" default=\"lio_sam\"/>\n    \n    <node pkg=\"$(arg project)\" type=\"$(arg project)_imuPreintegration\"   name=\"$(arg project)_imuPreintegration\"    output=\"screen\" \trespawn=\"true\"/>\n    <node pkg=\"$(arg project)\" type=\"$(arg project)_imageProjection\"     name=\"$(arg project)_imageProjection\"      output=\"screen\"     respawn=\"true\"/>\n    <node pkg=\"$(arg project)\" type=\"$(arg project)_featureExtraction\"   name=\"$(arg project)_featureExtraction\"    output=\"screen\"     respawn=\"true\"/>\n    <node pkg=\"$(arg project)\" type=\"$(arg project)_globalLocalize\"      name=\"$(arg project)_mapOptmization\"       output=\"screen\"     respawn=\"true\"/>\n    \n</launch>\n"
  },
  {
    "path": "launch/include/module_robot_state_publisher.launch",
    "content": "<launch>\n\n\t<arg name=\"project\" default=\"lio_sam\"/>\n\n    <param name=\"robot_description\" command=\"$(find xacro)/xacro $(find lio_sam)/launch/include/config/robot.urdf.xacro --inorder\" />\n\n    <node name=\"robot_state_publisher\" pkg=\"robot_state_publisher\" type=\"robot_state_publisher\" respawn=\"true\">\n        <!-- <param name=\"tf_prefix\" value=\"$(env ROS_HOSTNAME)\"/> -->\n    </node>\n  \n</launch>"
  },
  {
    "path": "launch/include/module_rviz.launch",
    "content": "<launch>\n\n    <arg name=\"project\" default=\"lio_sam\"/>\n\n    <!--- Run Rviz-->\n    <node pkg=\"rviz\" type=\"rviz\" name=\"$(arg project)_rviz\" args=\"-d $(find lio_sam)/launch/include/config/rviz.rviz\" />\n\n</launch>\n"
  },
  {
    "path": "launch/include/rosconsole/rosconsole_error.conf",
    "content": "# Set the default ros output to warning and higher\nlog4j.logger.ros = ERROR"
  },
  {
    "path": "launch/include/rosconsole/rosconsole_info.conf",
    "content": "# Set the default ros output to warning and higher\nlog4j.logger.ros = INFO"
  },
  {
    "path": "launch/include/rosconsole/rosconsole_warn.conf",
    "content": "# Set the default ros output to warning and higher\nlog4j.logger.ros = WARN"
  },
  {
    "path": "launch/run.launch",
    "content": "<launch>\n\n    <arg name=\"project\" default=\"lio_sam\"/>\n    \n    <!-- Parameters -->\n    <rosparam file=\"$(find lio_sam)/config/params.yaml\" command=\"load\" />\n\n    <!--- LOAM -->\n    <include file=\"$(find lio_sam)/launch/include/module_loam.launch\" />\n\n    <!--- Robot State TF -->\n    <include file=\"$(find lio_sam)/launch/include/module_robot_state_publisher.launch\" />\n\n    <!--- Run Navsat -->\n    <include file=\"$(find lio_sam)/launch/include/module_navsat.launch\" />\n\n    <!--- Run Rviz-->\n    <include file=\"$(find lio_sam)/launch/include/module_rviz.launch\" />\n\n</launch>\n"
  },
  {
    "path": "launch/run_relocalize.launch",
    "content": "<launch>\n\n    <arg name=\"project\" default=\"lio_sam\"/>\n    \n    <!-- Parameters -->\n    <rosparam file=\"$(find lio_sam)/config/params.yaml\" command=\"load\" />\n\n    <!--- LOAM -->\n    <include file=\"$(find lio_sam)/launch/include/module_relocolize.launch\" />\n\n    <!--- Robot State TF -->\n    <include file=\"$(find lio_sam)/launch/include/module_robot_state_publisher.launch\" />\n\n    <!--- Run Navsat -->\n    <include file=\"$(find lio_sam)/launch/include/module_navsat.launch\" />\n\n    <!--- Run Rviz-->\n    <include file=\"$(find lio_sam)/launch/include/module_rviz.launch\" />\n\n</launch>\n"
  },
  {
    "path": "msg/cloud_info.msg",
    "content": "# Cloud Info\nHeader header \n\nint32[] startRingIndex\nint32[] endRingIndex\n\nint32[]  pointColInd # point column index in range image\nfloat32[] pointRange # point range \n\nint64 imuAvailable\nint64 odomAvailable\n\n# Attitude for LOAM initialization\nfloat32 imuRollInit\nfloat32 imuPitchInit\nfloat32 imuYawInit\n\n# Initial guess from imu pre-integration\nfloat32 initialGuessX\nfloat32 initialGuessY\nfloat32 initialGuessZ\nfloat32 initialGuessRoll\nfloat32 initialGuessPitch\nfloat32 initialGuessYaw\n\n# Preintegration reset ID\nint64 imuPreintegrationResetId\n\n# Point cloud messages\nsensor_msgs/PointCloud2 cloud_deskewed  # original cloud deskewed\nsensor_msgs/PointCloud2 cloud_corner    # extracted corner feature\nsensor_msgs/PointCloud2 cloud_surface   # extracted surface feature"
  },
  {
    "path": "package.xml",
    "content": "<?xml version=\"1.0\"?>\n<package>\n  <name>lio_sam</name>\n  <version>1.0.0</version>\n  <description>Lidar Odometry</description>\n\n  <maintainer email=\"shant@mit.edu\">Tixiao Shan</maintainer>\n\n  <license>TODO</license>\n\n  <author>Tixiao Shan</author>\n\n  <buildtool_depend>catkin</buildtool_depend>\n\n  <build_depend>roscpp</build_depend>\n  <run_depend>roscpp</run_depend>\n  <build_depend>rospy</build_depend>\n  <run_depend>rospy</run_depend>\n  \n  <build_depend>tf</build_depend>\n  <run_depend>tf</run_depend>\n\n  <build_depend>cv_bridge</build_depend>\n  <run_depend>cv_bridge</run_depend>\n\n  <build_depend>pcl_conversions</build_depend>\n  <run_depend>pcl_conversions</run_depend>\n\n  <build_depend>std_msgs</build_depend>\n  <run_depend>std_msgs</run_depend>\n  <build_depend>sensor_msgs</build_depend>\n  <run_depend>sensor_msgs</run_depend>\n  <build_depend>geometry_msgs</build_depend>\n  <run_depend>geometry_msgs</run_depend>\n  <build_depend>nav_msgs</build_depend>\n  <run_depend>nav_msgs</run_depend>\n\n  <build_depend>message_generation</build_depend>\n  <run_depend>message_generation</run_depend>\n  <build_depend>message_runtime</build_depend>\n  <run_depend>message_runtime</run_depend>\n\n  <build_depend>GTSAM</build_depend>\n  <run_depend>GTSAM</run_depend>\n\n</package>\n"
  },
  {
    "path": "src/featureExtraction.cpp",
    "content": "#include \"utility.h\"\n#include \"lio_sam/cloud_info.h\"\n\nstruct smoothness_t{ \n    float value;\n    size_t ind;\n};\n\nstruct by_value{ \n    bool operator()(smoothness_t const &left, smoothness_t const &right) { \n        return left.value < right.value;\n    }\n};\n\nclass FeatureExtraction : public ParamServer\n{\n\npublic:\n\n    ros::Subscriber subLaserCloudInfo;\n\n    ros::Publisher pubLaserCloudInfo;\n    ros::Publisher pubCornerPoints;\n    ros::Publisher pubSurfacePoints;\n\n    pcl::PointCloud<PointType>::Ptr extractedCloud;\n    pcl::PointCloud<PointType>::Ptr cornerCloud;\n    pcl::PointCloud<PointType>::Ptr surfaceCloud;\n\n    pcl::VoxelGrid<PointType> downSizeFilter;\n\n    lio_sam::cloud_info cloudInfo;\n    std_msgs::Header cloudHeader;\n\n    std::vector<smoothness_t> cloudSmoothness;\n    float *cloudCurvature;\n    int *cloudNeighborPicked;\n    int *cloudLabel;\n\n    FeatureExtraction()\n    {\n        subLaserCloudInfo = nh.subscribe<lio_sam::cloud_info>(\"lio_sam/deskew/cloud_info\", 10, &FeatureExtraction::laserCloudInfoHandler, this, ros::TransportHints().tcpNoDelay());\n\n        pubLaserCloudInfo = nh.advertise<lio_sam::cloud_info> (\"lio_sam/feature/cloud_info\", 10);\n        pubCornerPoints = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/feature/cloud_corner\", 1);\n        pubSurfacePoints = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/feature/cloud_surface\", 1);\n        \n        initializationValue();\n    }\n\n    void initializationValue()\n    {\n        cloudSmoothness.resize(N_SCAN*Horizon_SCAN);\n\n        downSizeFilter.setLeafSize(odometrySurfLeafSize, odometrySurfLeafSize, odometrySurfLeafSize);\n\n        extractedCloud.reset(new pcl::PointCloud<PointType>());\n        cornerCloud.reset(new pcl::PointCloud<PointType>());\n        surfaceCloud.reset(new pcl::PointCloud<PointType>());\n\n        cloudCurvature = new float[N_SCAN*Horizon_SCAN];\n        cloudNeighborPicked = new int[N_SCAN*Horizon_SCAN];\n        cloudLabel = new int[N_SCAN*Horizon_SCAN];\n    }\n\n    void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr& msgIn)\n    {\n        cloudInfo = *msgIn; // new cloud info\n        cloudHeader = msgIn->header; // new cloud header\n        pcl::fromROSMsg(msgIn->cloud_deskewed, *extractedCloud); // new cloud for extraction\n\n        calculateSmoothness();//gc: calculate curvature\n\n        markOccludedPoints();//gc: check unstable points\n\n        extractFeatures();//gc: extract features\n\n        publishFeatureCloud();\n    }\n    //gc:\n    void calculateSmoothness()\n    {\n        int cloudSize = extractedCloud->points.size();\n        for (int i = 5; i < cloudSize - 5; i++)\n        {\n            float diffRange = cloudInfo.pointRange[i-5] + cloudInfo.pointRange[i-4]\n                            + cloudInfo.pointRange[i-3] + cloudInfo.pointRange[i-2]\n                            + cloudInfo.pointRange[i-1] - cloudInfo.pointRange[i] * 10\n                            + cloudInfo.pointRange[i+1] + cloudInfo.pointRange[i+2]\n                            + cloudInfo.pointRange[i+3] + cloudInfo.pointRange[i+4]\n                            + cloudInfo.pointRange[i+5];            \n\n            cloudCurvature[i] = diffRange*diffRange;//diffX * diffX + diffY * diffY + diffZ * diffZ;\n\n            cloudNeighborPicked[i] = 0;\n            cloudLabel[i] = 0;\n            // cloudSmoothness for sorting\n            cloudSmoothness[i].value = cloudCurvature[i];\n            cloudSmoothness[i].ind = i;\n        }\n    }\n\n    void markOccludedPoints()\n    {\n        int cloudSize = extractedCloud->points.size();\n        // mark occluded points and parallel beam points\n        for (int i = 5; i < cloudSize - 6; ++i)\n        {\n            // occluded points\n            float depth1 = cloudInfo.pointRange[i];\n            float depth2 = cloudInfo.pointRange[i+1];\n            int columnDiff = std::abs(int(cloudInfo.pointColInd[i+1] - cloudInfo.pointColInd[i]));\n\n            if (columnDiff < 10){\n                // 10 pixel diff in range image\n                if (depth1 - depth2 > 0.3){\n                    cloudNeighborPicked[i - 5] = 1;\n                    cloudNeighborPicked[i - 4] = 1;\n                    cloudNeighborPicked[i - 3] = 1;\n                    cloudNeighborPicked[i - 2] = 1;\n                    cloudNeighborPicked[i - 1] = 1;\n                    cloudNeighborPicked[i] = 1;\n                }else if (depth2 - depth1 > 0.3){\n                    cloudNeighborPicked[i + 1] = 1;\n                    cloudNeighborPicked[i + 2] = 1;\n                    cloudNeighborPicked[i + 3] = 1;\n                    cloudNeighborPicked[i + 4] = 1;\n                    cloudNeighborPicked[i + 5] = 1;\n                    cloudNeighborPicked[i + 6] = 1;\n                }\n            }\n            // parallel beam\n            //gc:points on local planar surfaces that are roughly parallel to laser beam\n            float diff1 = std::abs(float(cloudInfo.pointRange[i-1] - cloudInfo.pointRange[i]));\n            float diff2 = std::abs(float(cloudInfo.pointRange[i+1] - cloudInfo.pointRange[i]));\n\n            if (diff1 > 0.02 * cloudInfo.pointRange[i] && diff2 > 0.02 * cloudInfo.pointRange[i])\n                cloudNeighborPicked[i] = 1;\n        }\n    }\n\n    void extractFeatures()\n    {\n        cornerCloud->clear();\n        surfaceCloud->clear();\n\n        pcl::PointCloud<PointType>::Ptr surfaceCloudScan(new pcl::PointCloud<PointType>());\n        pcl::PointCloud<PointType>::Ptr surfaceCloudScanDS(new pcl::PointCloud<PointType>());\n        //gc: for every ring\n        for (int i = 0; i < N_SCAN; i++)\n        {\n            surfaceCloudScan->clear();\n            //gc: for every section\n            for (int j = 0; j < 6; j++)\n            {\n\n                int sp = (cloudInfo.startRingIndex[i] * (6 - j) + cloudInfo.endRingIndex[i] * j) / 6;\n                int ep = (cloudInfo.startRingIndex[i] * (5 - j) + cloudInfo.endRingIndex[i] * (j + 1)) / 6 - 1;\n\n                if (sp >= ep)\n                    continue;\n\n                std::sort(cloudSmoothness.begin()+sp, cloudSmoothness.begin()+ep, by_value());\n\n                int largestPickedNum = 0;\n                for (int k = ep; k >= sp; k--)\n                {\n                    int ind = cloudSmoothness[k].ind;\n                    if (cloudNeighborPicked[ind] == 0 && cloudCurvature[ind] > edgeThreshold)\n                    {\n                        largestPickedNum++;\n                        if (largestPickedNum <= 20){\n                            cloudLabel[ind] = 1;\n                            cornerCloud->push_back(extractedCloud->points[ind]);\n                        } else {\n                            break;\n                        }\n\n                        cloudNeighborPicked[ind] = 1;\n                        for (int l = 1; l <= 5; l++)\n                        {\n                            int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l - 1]));\n                            if (columnDiff > 10)\n                                break;\n                            cloudNeighborPicked[ind + l] = 1;\n                        }\n                        for (int l = -1; l >= -5; l--)\n                        {\n                            int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l + 1]));\n                            if (columnDiff > 10)\n                                break;\n                            cloudNeighborPicked[ind + l] = 1;\n                        }\n                    }\n                }\n\n                for (int k = sp; k <= ep; k++)\n                {\n                    int ind = cloudSmoothness[k].ind;\n                    if (cloudNeighborPicked[ind] == 0 && cloudCurvature[ind] < surfThreshold)\n                    {\n\n                        cloudLabel[ind] = -1;\n                        cloudNeighborPicked[ind] = 1;\n\n                        for (int l = 1; l <= 5; l++) {\n\n                            int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l - 1]));\n                            if (columnDiff > 10)\n                                break;\n\n                            cloudNeighborPicked[ind + l] = 1;\n                        }\n                        for (int l = -1; l >= -5; l--) {\n\n                            int columnDiff = std::abs(int(cloudInfo.pointColInd[ind + l] - cloudInfo.pointColInd[ind + l + 1]));\n                            if (columnDiff > 10)\n                                break;\n\n                            cloudNeighborPicked[ind + l] = 1;\n                        }\n                    }\n                }\n\n                for (int k = sp; k <= ep; k++)\n                {\n                    if (cloudLabel[k] <= 0){\n                        surfaceCloudScan->push_back(extractedCloud->points[k]);\n                    }\n                }\n            }\n\n            surfaceCloudScanDS->clear();\n            downSizeFilter.setInputCloud(surfaceCloudScan);//gc: only surface points should be down sample\n            downSizeFilter.filter(*surfaceCloudScanDS);\n\n            *surfaceCloud += *surfaceCloudScanDS;\n        }\n    }\n\n    void freeCloudInfoMemory()\n    {\n        cloudInfo.startRingIndex.clear();\n        cloudInfo.endRingIndex.clear();\n        cloudInfo.pointColInd.clear();\n        cloudInfo.pointRange.clear();\n    }\n\n    void publishFeatureCloud()\n    {\n        // free cloud info memory\n        freeCloudInfoMemory();\n        // save newly extracted features\n        cloudInfo.cloud_corner  = publishCloud(&pubCornerPoints,  cornerCloud,  cloudHeader.stamp, \"base_link\");\n        cloudInfo.cloud_surface = publishCloud(&pubSurfacePoints, surfaceCloud, cloudHeader.stamp, \"base_link\");\n        // publish to mapOptimization\n        pubLaserCloudInfo.publish(cloudInfo);\n    }\n};\n\n\nint main(int argc, char** argv)\n{\n    ros::init(argc, argv, \"lio_sam\");\n\n    FeatureExtraction FE;\n\n    ROS_INFO(\"\\033[1;32m----> Feature Extraction Started.\\033[0m\");\n   \n    ros::spin();\n\n    return 0;\n}"
  },
  {
    "path": "src/globalLocalize.cpp",
    "content": "//\n// Created by gc2 on 20-7-20.\n//\n\n#include \"utility.h\"\n#include \"lio_sam/cloud_info.h\"\n\n#include <gtsam/geometry/Rot3.h>\n#include <gtsam/geometry/Pose3.h>\n#include <gtsam/slam/PriorFactor.h>\n#include <gtsam/slam/BetweenFactor.h>\n#include <gtsam/navigation/GPSFactor.h>\n#include <gtsam/navigation/ImuFactor.h>\n#include <gtsam/navigation/CombinedImuFactor.h>\n#include <gtsam/nonlinear/NonlinearFactorGraph.h>\n#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>\n#include <gtsam/nonlinear/Marginals.h>\n#include <gtsam/nonlinear/Values.h>\n#include <gtsam/inference/Symbol.h>\n#include <geometry_msgs/PoseWithCovarianceStamped.h>\n\n#include <gtsam/nonlinear/ISAM2.h>\n\nusing namespace gtsam;\n\nusing symbol_shorthand::X; // Pose3 (x,y,z,r,p,y)\nusing symbol_shorthand::V; // Vel   (xdot,ydot,zdot)\nusing symbol_shorthand::B; // Bias  (ax,ay,az,gx,gy,gz)\nusing symbol_shorthand::G; // GPS pose\n\n/*\n    * A point cloud type that has 6D pose info ([x,y,z,roll,pitch,yaw] intensity is time stamp)\n    */\nstruct PointXYZIRPYT\n{\n    PCL_ADD_POINT4D\n            PCL_ADD_INTENSITY;                  // preferred way of adding a XYZ+padding\n    float roll;\n    float pitch;\n    float yaw;\n    double time;\n    EIGEN_MAKE_ALIGNED_OPERATOR_NEW   // make sure our new allocators are aligned\n} EIGEN_ALIGN16;                    // enforce SSE padding for correct memory alignment\n\n\nPOINT_CLOUD_REGISTER_POINT_STRUCT (PointXYZIRPYT,\n(float, x, x) (float, y, y)\n(float, z, z) (float, intensity, intensity)\n(float, roll, roll) (float, pitch, pitch) (float, yaw, yaw)\n(double, time, time))\n\ntypedef PointXYZIRPYT  PointTypePose;\n\n\nclass mapOptimization : public ParamServer\n{\n\npublic:\n\n    // gtsam\n    NonlinearFactorGraph gtSAMgraph;\n    Values initialEstimate;\n    Values optimizedEstimate;\n    ISAM2 *isam;\n    Values isamCurrentEstimate;\n    Eigen::MatrixXd poseCovariance;\n\n    ros::Publisher pubLaserCloudSurround;\n    ros::Publisher pubOdomAftMappedROS;\n    ros::Publisher pubKeyPoses;\n    ros::Publisher pubPath;\n\n    ros::Publisher pubHistoryKeyFrames;\n    ros::Publisher pubIcpKeyFrames;\n    ros::Publisher pubRecentKeyFrames;\n    ros::Publisher pubRecentKeyFrame;\n    ros::Publisher pubCloudRegisteredRaw;\n\n    ros::Subscriber subLaserCloudInfo;\n    ros::Subscriber subGPS;\n\n    std::deque<nav_msgs::Odometry> gpsQueue;\n    lio_sam::cloud_info cloudInfo;\n\n    vector<pcl::PointCloud<PointType>::Ptr> cornerCloudKeyFrames;\n    vector<pcl::PointCloud<PointType>::Ptr> surfCloudKeyFrames;\n\n    pcl::PointCloud<PointType>::Ptr cloudKeyPoses3D;//gc: can be used to illustrate the path of odometry // keep\n    pcl::PointCloud<PointTypePose>::Ptr cloudKeyPoses6D;//gc: can be used to illustrate the path of odometry //keep\n    //addded**********************************by gc\n    std::mutex mtxWin;\n    std::vector<PointType> win_cloudKeyPoses3D;\n    std::vector<PointTypePose> win_cloudKeyPoses6D;\n\n    std::vector<pcl::PointCloud<PointType>::Ptr> win_cornerCloudKeyFrames;\n    std::vector<pcl::PointCloud<PointType>::Ptr> win_surfCloudKeyFrames;\n    //added***********************************by gc\n\n\n    pcl::PointCloud<PointType>::Ptr laserCloudCornerLast; // corner feature set from odoOptimization\n    pcl::PointCloud<PointType>::Ptr laserCloudSurfLast; // surf feature set from odoOptimization\n    pcl::PointCloud<PointType>::Ptr laserCloudCornerLastDS; // downsampled corner featuer set from odoOptimization\n    pcl::PointCloud<PointType>::Ptr laserCloudSurfLastDS; // downsampled surf featuer set from odoOptimization\n\n    pcl::PointCloud<PointType>::Ptr laserCloudOri;\n    pcl::PointCloud<PointType>::Ptr coeffSel;\n\n    std::vector<PointType> laserCloudOriCornerVec; // corner point holder for parallel computation\n    std::vector<PointType> coeffSelCornerVec;\n    std::vector<bool> laserCloudOriCornerFlag;\n    std::vector<PointType> laserCloudOriSurfVec; // surf point holder for parallel computation\n    std::vector<PointType> coeffSelSurfVec;\n    std::vector<bool> laserCloudOriSurfFlag;\n\n    pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMap;\n    pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMap;\n    pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMapDS;\n    pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMapDS;\n\n    pcl::KdTreeFLANN<PointType>::Ptr kdtreeCornerFromMap;\n    pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurfFromMap;\n\n    //pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurroundingKeyPoses;\n    //pcl::KdTreeFLANN<PointType>::Ptr kdtreeHistoryKeyPoses;\n\n    pcl::PointCloud<PointType>::Ptr latestKeyFrameCloud;\n    pcl::PointCloud<PointType>::Ptr nearHistoryKeyFrameCloud;\n\n    pcl::VoxelGrid<PointType> downSizeFilterCorner;\n    pcl::VoxelGrid<PointType> downSizeFilterSurf;\n    pcl::VoxelGrid<PointType> downSizeFilterICP;\n    pcl::VoxelGrid<PointType> downSizeFilterSurroundingKeyPoses; // for surrounding key poses of scan-to-map optimization\n\n    ros::Time timeLaserInfoStamp;\n    double timeLaserCloudInfoLast;\n\n    float transformTobeMapped[6];\n\n    std::mutex mtx;\n\n    double timeLastProcessing = -1;\n\n    bool isDegenerate = false;\n    Eigen::Matrix<float, 6, 6> matP;\n\n    int winSize = 30;\n    int laserCloudCornerFromMapDSNum = 0;\n    int laserCloudSurfFromMapDSNum = 0;\n    int laserCloudCornerLastDSNum = 0;\n    int laserCloudSurfLastDSNum = 0;\n\n    bool aLoopIsClosed = false;\n    int imuPreintegrationResetId = 0;\n\n    nav_msgs::Path globalPath;\n\n    Eigen::Affine3f transPointAssociateToMap;\n\n    /*************added by gc*****************/\n    pcl::PointCloud<PointType>::Ptr cloudGlobalMap;\n    pcl::PointCloud<PointType>::Ptr cloudGlobalMapDS;\n    pcl::PointCloud<PointType>::Ptr cloudScanForInitialize;\n\n    ros::Subscriber subIniPoseFromRviz;\n    ros::Publisher pubLaserCloudInWorld;\n    ros::Publisher pubMapWorld;\n    //ros::Publisher fortest_publasercloudINWorld;\n\n    float transformInTheWorld[6];// the pose in the world, i.e. the prebuilt map\n    float tranformOdomToWorld[6];\n    int globalLocaSkipFrames = 3;\n    int frameNum = 1;\n    tf::TransformBroadcaster tfOdom2Map;\n    std::mutex mtxtranformOdomToWorld;\n    std::mutex mtx_general;\n    bool globalLocalizeInitialiized = false;\n\n    ros::Subscriber subImu;\n\n    enum InitializedFlag\n    {\n        NonInitialized,\n        Initializing,\n        Initialized\n    };\n    InitializedFlag initializedFlag;\n\n    geometry_msgs::PoseStamped poseOdomToMap;\n    ros::Publisher pubOdomToMapPose;\n\n\n    /*************added by gc******************/\n\n    mapOptimization()\n    {\n\t//std::cout << \"come in\" << std::endl;\n        ISAM2Params parameters;\n        parameters.relinearizeThreshold = 0.1;\n        parameters.relinearizeSkip = 1;\n        isam = new ISAM2(parameters);\n\n        pubKeyPoses = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/trajectory\", 1);\n        pubLaserCloudSurround = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/map_global\", 1);\n        pubOdomAftMappedROS = nh.advertise<nav_msgs::Odometry> (\"lio_sam/mapping/odometry\", 1);\n        pubPath = nh.advertise<nav_msgs::Path>(\"lio_sam/mapping/path\", 1);\n\n        subLaserCloudInfo = nh.subscribe<lio_sam::cloud_info>(\"lio_sam/feature/cloud_info\", 10, &mapOptimization::laserCloudInfoHandler, this, ros::TransportHints().tcpNoDelay());\n        subGPS = nh.subscribe<nav_msgs::Odometry> (gpsTopic, 200, &mapOptimization::gpsHandler, this, ros::TransportHints().tcpNoDelay());\n\t//std::cout << \"come in2\" << std::endl;\n        //added ******************by gc\n        subIniPoseFromRviz = nh.subscribe(\"/initialpose\", 8, &mapOptimization::initialpose_callback, this);\n        pubMapWorld = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/cloud_map_map\",1);//\n        //fortest_publasercloudINWorld = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/laserclouinmapframe\",1);\n        pubLaserCloudInWorld = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/lasercloud_in_world\", 1);//added\n        pubOdomToMapPose = nh.advertise<geometry_msgs::PoseStamped>(\"lio_sam/mapping/pose_odomTo_map\", 1);\n\n        //subImu      = nh.subscribe<sensor_msgs::Imu>  (imuTopic,  200, &mapOptimization::imuHandler,      this, ros::TransportHints().tcpNoDelay());\n        //added ******************by gc\n\n        pubHistoryKeyFrames = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/icp_loop_closure_history_cloud\", 1);\n        pubIcpKeyFrames = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/icp_loop_closure_corrected_cloud\", 1);\n\n        pubRecentKeyFrames = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/map_local\", 1);\n        pubRecentKeyFrame = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/cloud_registered\", 1);\n        pubCloudRegisteredRaw = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/cloud_registered_raw\", 1);\n\n        downSizeFilterCorner.setLeafSize(mappingCornerLeafSize, mappingCornerLeafSize, mappingCornerLeafSize);\n        downSizeFilterSurf.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);\n        downSizeFilterICP.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);\n        downSizeFilterSurroundingKeyPoses.setLeafSize(surroundingKeyframeDensity, surroundingKeyframeDensity, surroundingKeyframeDensity); // for surrounding key poses of scan-to-map optimization\n\t//std::cout << \"come in3\" << std::endl;\n\n        allocateMemory();\n    }\n\n    void allocateMemory()\n    {\n        cloudGlobalMap.reset(new pcl::PointCloud<PointType>());//addded by gc\n\t    cloudGlobalMapDS.reset(new pcl::PointCloud<PointType>());//added\n        cloudScanForInitialize.reset(new pcl::PointCloud<PointType>());\n        resetLIO();\n        //added by gc\n        for (int i = 0; i < 6; ++i){\n            transformInTheWorld[i] = 0;\n        }\n\n        for (int i = 0; i < 6; ++i){\n            tranformOdomToWorld[i] = 0;\n        }\n        initializedFlag = NonInitialized;\n        cloudGlobalLoad();//added by gc\n        //added by gc\n    }\n    void resetLIO()\n    {\n        cloudKeyPoses3D.reset(new pcl::PointCloud<PointType>());\n        cloudKeyPoses6D.reset(new pcl::PointCloud<PointTypePose>());\n\n        //kdtreeSurroundingKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());\n        //kdtreeHistoryKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());\n\n        laserCloudCornerLast.reset(new pcl::PointCloud<PointType>()); // corner feature set from odoOptimization\n        laserCloudSurfLast.reset(new pcl::PointCloud<PointType>()); // surf feature set from odoOptimization\n        laserCloudCornerLastDS.reset(new pcl::PointCloud<PointType>()); // downsampled corner featuer set from odoOptimization\n        laserCloudSurfLastDS.reset(new pcl::PointCloud<PointType>()); // downsampled surf featuer set from odoOptimization\n\n        laserCloudOri.reset(new pcl::PointCloud<PointType>());\n        coeffSel.reset(new pcl::PointCloud<PointType>());\n\n        laserCloudOriCornerVec.resize(N_SCAN * Horizon_SCAN);\n        coeffSelCornerVec.resize(N_SCAN * Horizon_SCAN);\n        laserCloudOriCornerFlag.resize(N_SCAN * Horizon_SCAN);\n        laserCloudOriSurfVec.resize(N_SCAN * Horizon_SCAN);\n        coeffSelSurfVec.resize(N_SCAN * Horizon_SCAN);\n        laserCloudOriSurfFlag.resize(N_SCAN * Horizon_SCAN);\n\n        std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false);\n        std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);\n\n        laserCloudCornerFromMap.reset(new pcl::PointCloud<PointType>());\n        laserCloudSurfFromMap.reset(new pcl::PointCloud<PointType>());\n        laserCloudCornerFromMapDS.reset(new pcl::PointCloud<PointType>());\n        laserCloudSurfFromMapDS.reset(new pcl::PointCloud<PointType>());\n\n        kdtreeCornerFromMap.reset(new pcl::KdTreeFLANN<PointType>());\n        kdtreeSurfFromMap.reset(new pcl::KdTreeFLANN<PointType>());\n\n        latestKeyFrameCloud.reset(new pcl::PointCloud<PointType>());\n        nearHistoryKeyFrameCloud.reset(new pcl::PointCloud<PointType>());\n\n        for (int i = 0; i < 6; ++i){\n            transformTobeMapped[i] = 0;\n        }\n\n        matP.setZero();\n    }\n\n    void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr& msgIn)\n    {\n\n        // extract time stamp\n        //added\n\n        timeLaserInfoStamp = msgIn->header.stamp;\n        timeLaserCloudInfoLast = msgIn->header.stamp.toSec();\n\n        // extract info and feature cloud\n        cloudInfo = *msgIn;\n        pcl::fromROSMsg(msgIn->cloud_corner,  *laserCloudCornerLast);\n        pcl::fromROSMsg(msgIn->cloud_surface, *laserCloudSurfLast);\n\n        /************************************added by gc*****************************/\n\n        //if the sysytem is not initialized ffer the first scan for the system to initialize\n        //the LIO system stsrt working only when the localization initializing is finished\n         if(initializedFlag == NonInitialized || initializedFlag == Initializing)\n         {\n             if(cloudScanForInitialize->points.size() == 0)\n             {\n                 downsampleCurrentScan();\n                 mtx_general.lock();\n                 *cloudScanForInitialize += *laserCloudCornerLastDS;\n                 *cloudScanForInitialize += *laserCloudSurfLastDS;\n                 mtx_general.unlock();\n\t\t        laserCloudCornerLastDS->clear();\n\t\t        laserCloudSurfLastDS->clear();\n\t\t        laserCloudCornerLastDSNum = 0;\n\t\t        laserCloudSurfLastDSNum = 0;\n\n                 transformTobeMapped[0] = cloudInfo.imuRollInit;\n                 transformTobeMapped[1] = cloudInfo.imuPitchInit;\n                 transformTobeMapped[2] = cloudInfo.imuYawInit;\n                 if (!useImuHeadingInitialization)//gc: if not use the heading of init_IMU as Initialization\n                     transformTobeMapped[2] = 0;\n                \n             }\n\t\t\n             return;\n         }\n\n        frameNum++;\n\n\n\n\n        /************************************added by gc*****************************/\n\n\n        std::lock_guard<std::mutex> lock(mtx);\n\n        if (timeLaserCloudInfoLast - timeLastProcessing >= mappingProcessInterval) {//gc:control the rate of mapping process\n\n            timeLastProcessing = timeLaserCloudInfoLast;\n\n            updateInitialGuess();//gc: update initial value for states\n\n            extractSurroundingKeyFrames();//gc:\n\n            downsampleCurrentScan();//gc:down sample the current corner points and surface points\n\n            scan2MapOptimization();//gc: calculate the tranformtion using lidar measurement with the Imu preintegration as initial values\n            //and then interpolate roll and pitch angle using IMU measurement and above measurement\n\n\n\n            saveKeyFramesAndFactor();//gc: save corner cloud and surface cloud of this scan, and add odom and GPS factors\n\n            //correctPoses();\n\n            publishOdometry();\n\n            publishFrames();\n        }\n    }\n\n    void gpsHandler(const nav_msgs::Odometry::ConstPtr& gpsMsg)\n    {\n        gpsQueue.push_back(*gpsMsg);\n    }\n\n    void pointAssociateToMap(PointType const * const pi, PointType * const po)\n    {\n        po->x = transPointAssociateToMap(0,0) * pi->x + transPointAssociateToMap(0,1) * pi->y + transPointAssociateToMap(0,2) * pi->z + transPointAssociateToMap(0,3);\n        po->y = transPointAssociateToMap(1,0) * pi->x + transPointAssociateToMap(1,1) * pi->y + transPointAssociateToMap(1,2) * pi->z + transPointAssociateToMap(1,3);\n        po->z = transPointAssociateToMap(2,0) * pi->x + transPointAssociateToMap(2,1) * pi->y + transPointAssociateToMap(2,2) * pi->z + transPointAssociateToMap(2,3);\n        po->intensity = pi->intensity;\n    }\n\n    pcl::PointCloud<PointType>::Ptr transformPointCloud(pcl::PointCloud<PointType>::Ptr cloudIn, PointTypePose* transformIn)\n    {\n        pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());\n\n        PointType *pointFrom;\n\n        int cloudSize = cloudIn->size();\n        cloudOut->resize(cloudSize);\n\n        Eigen::Affine3f transCur = pcl::getTransformation(transformIn->x, transformIn->y, transformIn->z, transformIn->roll, transformIn->pitch, transformIn->yaw);\n\n        for (int i = 0; i < cloudSize; ++i){\n\n            pointFrom = &cloudIn->points[i];\n            cloudOut->points[i].x = transCur(0,0) * pointFrom->x + transCur(0,1) * pointFrom->y + transCur(0,2) * pointFrom->z + transCur(0,3);\n            cloudOut->points[i].y = transCur(1,0) * pointFrom->x + transCur(1,1) * pointFrom->y + transCur(1,2) * pointFrom->z + transCur(1,3);\n            cloudOut->points[i].z = transCur(2,0) * pointFrom->x + transCur(2,1) * pointFrom->y + transCur(2,2) * pointFrom->z + transCur(2,3);\n            cloudOut->points[i].intensity = pointFrom->intensity;\n        }\n        return cloudOut;\n    }\n\n    gtsam::Pose3 pclPointTogtsamPose3(PointTypePose thisPoint)\n    {\n        return gtsam::Pose3(gtsam::Rot3::RzRyRx(double(thisPoint.roll), double(thisPoint.pitch), double(thisPoint.yaw)),\n                            gtsam::Point3(double(thisPoint.x),    double(thisPoint.y),     double(thisPoint.z)));\n    }\n\n    gtsam::Pose3 trans2gtsamPose(float transformIn[])\n    {\n        return gtsam::Pose3(gtsam::Rot3::RzRyRx(transformIn[0], transformIn[1], transformIn[2]),\n                            gtsam::Point3(transformIn[3], transformIn[4], transformIn[5]));\n    }\n\n    Eigen::Affine3f pclPointToAffine3f(PointTypePose thisPoint)\n    {\n        return pcl::getTransformation(thisPoint.x, thisPoint.y, thisPoint.z, thisPoint.roll, thisPoint.pitch, thisPoint.yaw);\n    }\n\n    Eigen::Affine3f trans2Affine3f(float transformIn[])\n    {\n        return pcl::getTransformation(transformIn[3], transformIn[4], transformIn[5], transformIn[0], transformIn[1], transformIn[2]);\n    }\n\n    PointTypePose trans2PointTypePose(float transformIn[])\n    {\n        PointTypePose thisPose6D;\n        thisPose6D.x = transformIn[3];\n        thisPose6D.y = transformIn[4];\n        thisPose6D.z = transformIn[5];\n        thisPose6D.roll  = transformIn[0];\n        thisPose6D.pitch = transformIn[1];\n        thisPose6D.yaw   = transformIn[2];\n        return thisPose6D;\n    }\n\n    void updateInitialGuess()\n    {\n        static Eigen::Affine3f lastImuTransformation;//gc: note that this is static type\n        // initialization\n        if (cloudKeyPoses3D->points.empty())//gc: there is no key pose 初始化\n        {\n            transformTobeMapped[0] = cloudInfo.imuRollInit;\n            transformTobeMapped[1] = cloudInfo.imuPitchInit;\n            transformTobeMapped[2] = cloudInfo.imuYawInit;\n\n            if (!useImuHeadingInitialization)//gc: if not use the heading of init_IMU as Initialization\n                transformTobeMapped[2] = 0;\n\n            lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;\n            return;\n        }\n\n        // use imu pre-integration estimation for pose guess\n        if (cloudInfo.odomAvailable == true && cloudInfo.imuPreintegrationResetId == imuPreintegrationResetId)\n        {\n            transformTobeMapped[0] = cloudInfo.initialGuessRoll;\n            transformTobeMapped[1] = cloudInfo.initialGuessPitch;\n            transformTobeMapped[2] = cloudInfo.initialGuessYaw;\n\n            transformTobeMapped[3] = cloudInfo.initialGuessX;\n            transformTobeMapped[4] = cloudInfo.initialGuessY;\n            transformTobeMapped[5] = cloudInfo.initialGuessZ;\n\n            lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;\n            return;\n        }\n\n\n        // }\n\n        // use imu incremental estimation for pose guess (only rotation)\n        if (cloudInfo.imuAvailable == true)\n        {\n            Eigen::Affine3f transBack = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit);\n            Eigen::Affine3f transIncre = lastImuTransformation.inverse() * transBack;//gc: the transform of IMU between two scans\n\n            Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped);\n            Eigen::Affine3f transFinal = transTobe * transIncre;\n            pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5],\n                                              transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);\n\n            lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;\n            return;\n        }\n    }\n\n    void extractForLoopClosure()\n    {\n        //change-1\n        /**************gc added**************/\n        //{\n            //in this place the maximum of numPoses is winSize\n            pcl::PointCloud<PointType>::Ptr cloudToExtract(new pcl::PointCloud<PointType>());\n            int numPoses = win_cloudKeyPoses3D.size();\n            for (int i =  numPoses-1; i >=0; --i)\n            {\n                cloudToExtract->push_back(win_cloudKeyPoses3D[i]);\n\n            }\n            extractCloud(cloudToExtract);\n       // }\n        /**************gc added**************/\n\n\n//        {\n//            pcl::PointCloud<PointType>::Ptr cloudToExtract(new pcl::PointCloud<PointType>());\n//            int numPoses = cloudKeyPoses3D->size();\n//            for (int i = numPoses-1; i >= 0; --i)\n//            {\n//                if ((int)cloudToExtract->size() <= surroundingKeyframeSize)\n//                    cloudToExtract->push_back(cloudKeyPoses3D->points[i]);\n//                else\n//                    break;\n//            }\n//\n//            extractCloud(cloudToExtract);\n//        }\n\n    }\n    //gc:search nearby key poses and downsample them and then extract the local map points\n//    void extractNearby()\n//    {\n//\n//    }\n    //gc: extract the nearby Map points\n    void extractCloud(pcl::PointCloud<PointType>::Ptr cloudToExtract)\n    {\n        //change-2\n\n        /**************gc added**************/\n\t\t//std::cout << \"cloudToExtract size: \" << cloudToExtract->size() << std::endl;\n\n            std::vector<pcl::PointCloud<PointType>> laserCloudCornerSurroundingVec;\n            std::vector<pcl::PointCloud<PointType>> laserCloudSurfSurroundingVec;\n\n            laserCloudCornerSurroundingVec.resize(cloudToExtract->size());\n            laserCloudSurfSurroundingVec.resize(cloudToExtract->size());\n\n            // extract surrounding map\n#pragma omp parallel for num_threads(numberOfCores)\n            for (int i = 0; i < (int)cloudToExtract->size(); ++i)\n            {\n                PointTypePose thisPose6D;\n                thisPose6D = win_cloudKeyPoses6D[i];\n                laserCloudCornerSurroundingVec[i]  = *transformPointCloud(win_cornerCloudKeyFrames[i],  &thisPose6D);\n                laserCloudSurfSurroundingVec[i]    = *transformPointCloud(win_surfCloudKeyFrames[i],    &thisPose6D);\n            }\n\n\n            // fuse the map\n            laserCloudCornerFromMap->clear();\n            laserCloudSurfFromMap->clear();\n            for (int i = 0; i < (int)cloudToExtract->size(); ++i)\n            {\n                *laserCloudCornerFromMap += laserCloudCornerSurroundingVec[i];\n                *laserCloudSurfFromMap   += laserCloudSurfSurroundingVec[i];\n            }\n\n            // Downsample the surrounding corner key frames (or map)\n            downSizeFilterCorner.setInputCloud(laserCloudCornerFromMap);\n            downSizeFilterCorner.filter(*laserCloudCornerFromMapDS);\n            laserCloudCornerFromMapDSNum = laserCloudCornerFromMapDS->size();\n\t//std::cout << \"the size of laserCloudCornerFromMapDS: \" << laserCloudCornerFromMapDSNum << std::endl;\n            // Downsample the surrounding surf key frames (or map)\n            downSizeFilterSurf.setInputCloud(laserCloudSurfFromMap);\n            downSizeFilterSurf.filter(*laserCloudSurfFromMapDS);\n            laserCloudSurfFromMapDSNum = laserCloudSurfFromMapDS->size();\n\n        /**************gc added**************/\n\n\n//        {\n//            std::vector<pcl::PointCloud<PointType>> laserCloudCornerSurroundingVec;\n//            std::vector<pcl::PointCloud<PointType>> laserCloudSurfSurroundingVec;\n//\n//            laserCloudCornerSurroundingVec.resize(cloudToExtract->size());\n//            laserCloudSurfSurroundingVec.resize(cloudToExtract->size());\n//\n//            // extract surrounding map\n//#pragma omp parallel for num_threads(numberOfCores)\n//            for (int i = 0; i < (int)cloudToExtract->size(); ++i)\n//            {\n//                if (pointDistance(cloudToExtract->points[i], cloudKeyPoses3D->back()) > surroundingKeyframeSearchRadius)\n//                    continue;\n//                int thisKeyInd = (int)cloudToExtract->points[i].intensity;//gc: the index of this key frame\n//                //gc: tranform the corner points and surfpoints of the nearby keyFrames into the world frame\n//                laserCloudCornerSurroundingVec[i]  = *transformPointCloud(cornerCloudKeyFrames[thisKeyInd],  &cloudKeyPoses6D->points[thisKeyInd]);\n//                laserCloudSurfSurroundingVec[i]    = *transformPointCloud(surfCloudKeyFrames[thisKeyInd],    &cloudKeyPoses6D->points[thisKeyInd]);\n//            }\n//\n//            // fuse the map\n//            laserCloudCornerFromMap->clear();\n//            laserCloudSurfFromMap->clear();\n//            for (int i = 0; i < (int)cloudToExtract->size(); ++i)\n//            {\n//                *laserCloudCornerFromMap += laserCloudCornerSurroundingVec[i];\n//                *laserCloudSurfFromMap   += laserCloudSurfSurroundingVec[i];\n//            }\n//\n//            // Downsample the surrounding corner key frames (or map)\n//            downSizeFilterCorner.setInputCloud(laserCloudCornerFromMap);\n//            downSizeFilterCorner.filter(*laserCloudCornerFromMapDS);\n//            laserCloudCornerFromMapDSNum = laserCloudCornerFromMapDS->size();\n//            // Downsample the surrounding surf key frames (or map)\n//            downSizeFilterSurf.setInputCloud(laserCloudSurfFromMap);\n//            downSizeFilterSurf.filter(*laserCloudSurfFromMapDS);\n//            laserCloudSurfFromMapDSNum = laserCloudSurfFromMapDS->size();\n//        }\n\n    }\n\n    void extractSurroundingKeyFrames()\n    {\n        if (cloudKeyPoses3D->points.empty() == true)\n            return;\n\n        //if (loopClosureEnableFlag == true)//gc:TODO: a little weired: Loop closure should search the whole map while\n        //{\n\t\t//std::cout << \"the size of cloudKeyPoses3D: \" << cloudKeyPoses3D->points.size() << std::endl;\n            extractForLoopClosure(); //gc: the name is misleading\n        //} else {\n           // extractNearby();\n        //}\n    }\n\n    void downsampleCurrentScan()\n    {\n        // Downsample cloud from current scan\n        laserCloudCornerLastDS->clear();\n        downSizeFilterCorner.setInputCloud(laserCloudCornerLast);\n        downSizeFilterCorner.filter(*laserCloudCornerLastDS);\n        laserCloudCornerLastDSNum = laserCloudCornerLastDS->size();\n\n        laserCloudSurfLastDS->clear();\n        downSizeFilterSurf.setInputCloud(laserCloudSurfLast);\n        downSizeFilterSurf.filter(*laserCloudSurfLastDS);\n        laserCloudSurfLastDSNum = laserCloudSurfLastDS->size();\n    }\n\n    void updatePointAssociateToMap()\n    {\n        transPointAssociateToMap = trans2Affine3f(transformTobeMapped);\n    }\n\n    void cornerOptimization()\n    {\n        updatePointAssociateToMap();\n\n#pragma omp parallel for num_threads(numberOfCores)\n        //gc: for every corner point\n        for (int i = 0; i < laserCloudCornerLastDSNum; i++)\n        {\n            PointType pointOri, pointSel, coeff;\n            std::vector<int> pointSearchInd;\n            std::vector<float> pointSearchSqDis;\n\n            pointOri = laserCloudCornerLastDS->points[i];\n            //gc: calculate its location in the map using the prediction pose\n            pointAssociateToMap(&pointOri, &pointSel);\n            kdtreeCornerFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);\n\n            cv::Mat matA1(3, 3, CV_32F, cv::Scalar::all(0));\n            cv::Mat matD1(1, 3, CV_32F, cv::Scalar::all(0));\n            cv::Mat matV1(3, 3, CV_32F, cv::Scalar::all(0));\n\n            if (pointSearchSqDis[4] < 1.0) {\n                float cx = 0, cy = 0, cz = 0;\n                for (int j = 0; j < 5; j++) {\n                    cx += laserCloudCornerFromMapDS->points[pointSearchInd[j]].x;\n                    cy += laserCloudCornerFromMapDS->points[pointSearchInd[j]].y;\n                    cz += laserCloudCornerFromMapDS->points[pointSearchInd[j]].z;\n                }\n                //gc: the average coordinate of the most nearest points\n                cx /= 5; cy /= 5;  cz /= 5;\n\n                float a11 = 0, a12 = 0, a13 = 0, a22 = 0, a23 = 0, a33 = 0;\n                for (int j = 0; j < 5; j++) {\n                    float ax = laserCloudCornerFromMapDS->points[pointSearchInd[j]].x - cx;\n                    float ay = laserCloudCornerFromMapDS->points[pointSearchInd[j]].y - cy;\n                    float az = laserCloudCornerFromMapDS->points[pointSearchInd[j]].z - cz;\n\n                    a11 += ax * ax; a12 += ax * ay; a13 += ax * az;\n                    a22 += ay * ay; a23 += ay * az;\n                    a33 += az * az;\n                }\n                a11 /= 5; a12 /= 5; a13 /= 5; a22 /= 5; a23 /= 5; a33 /= 5;\n\n                matA1.at<float>(0, 0) = a11; matA1.at<float>(0, 1) = a12; matA1.at<float>(0, 2) = a13;\n                matA1.at<float>(1, 0) = a12; matA1.at<float>(1, 1) = a22; matA1.at<float>(1, 2) = a23;\n                matA1.at<float>(2, 0) = a13; matA1.at<float>(2, 1) = a23; matA1.at<float>(2, 2) = a33;\n\n                cv::eigen(matA1, matD1, matV1);\n\n                if (matD1.at<float>(0, 0) > 3 * matD1.at<float>(0, 1)) {\n\n                    float x0 = pointSel.x;\n                    float y0 = pointSel.y;\n                    float z0 = pointSel.z;\n                    float x1 = cx + 0.1 * matV1.at<float>(0, 0);\n                    float y1 = cy + 0.1 * matV1.at<float>(0, 1);\n                    float z1 = cz + 0.1 * matV1.at<float>(0, 2);\n                    float x2 = cx - 0.1 * matV1.at<float>(0, 0);\n                    float y2 = cy - 0.1 * matV1.at<float>(0, 1);\n                    float z2 = cz - 0.1 * matV1.at<float>(0, 2);\n\n                    float a012 = sqrt(((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1)) * ((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1))\n                                      + ((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1)) * ((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1))\n                                      + ((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1)) * ((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1)));\n\n                    float l12 = sqrt((x1 - x2)*(x1 - x2) + (y1 - y2)*(y1 - y2) + (z1 - z2)*(z1 - z2));\n\n                    float la = ((y1 - y2)*((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1))\n                                + (z1 - z2)*((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1))) / a012 / l12;\n\n                    float lb = -((x1 - x2)*((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1))\n                                 - (z1 - z2)*((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1))) / a012 / l12;\n\n                    float lc = -((x1 - x2)*((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1))\n                                 + (y1 - y2)*((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1))) / a012 / l12;\n\n                    float ld2 = a012 / l12;\n\n                    float s = 1 - 0.9 * fabs(ld2);\n\n                    coeff.x = s * la;\n                    coeff.y = s * lb;\n                    coeff.z = s * lc;\n                    coeff.intensity = s * ld2;\n\n                    if (s > 0.1) {\n                        laserCloudOriCornerVec[i] = pointOri;\n                        coeffSelCornerVec[i] = coeff;\n                        laserCloudOriCornerFlag[i] = true;\n                    }\n                }\n            }\n        }\n    }\n\n    void surfOptimization()\n    {\n        updatePointAssociateToMap();\n\n#pragma omp parallel for num_threads(numberOfCores)\n        for (int i = 0; i < laserCloudSurfLastDSNum; i++)\n        {\n            PointType pointOri, pointSel, coeff;\n            std::vector<int> pointSearchInd;\n            std::vector<float> pointSearchSqDis;\n\n            pointOri = laserCloudSurfLastDS->points[i];\n            pointAssociateToMap(&pointOri, &pointSel);\n            kdtreeSurfFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);\n\n            Eigen::Matrix<float, 5, 3> matA0;\n            Eigen::Matrix<float, 5, 1> matB0;\n            Eigen::Vector3f matX0;\n\n            matA0.setZero();\n            matB0.fill(-1);\n            matX0.setZero();\n\n            if (pointSearchSqDis[4] < 1.0) {\n                for (int j = 0; j < 5; j++) {\n                    matA0(j, 0) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].x;\n                    matA0(j, 1) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].y;\n                    matA0(j, 2) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].z;\n                }\n\n                matX0 = matA0.colPivHouseholderQr().solve(matB0);\n\n                float pa = matX0(0, 0);\n                float pb = matX0(1, 0);\n                float pc = matX0(2, 0);\n                float pd = 1;\n\n                float ps = sqrt(pa * pa + pb * pb + pc * pc);\n                pa /= ps; pb /= ps; pc /= ps; pd /= ps;\n\n                bool planeValid = true;\n                for (int j = 0; j < 5; j++) {\n                    if (fabs(pa * laserCloudSurfFromMapDS->points[pointSearchInd[j]].x +\n                             pb * laserCloudSurfFromMapDS->points[pointSearchInd[j]].y +\n                             pc * laserCloudSurfFromMapDS->points[pointSearchInd[j]].z + pd) > 0.2) {\n                        planeValid = false;\n                        break;\n                    }\n                }\n\n                if (planeValid) {\n                    float pd2 = pa * pointSel.x + pb * pointSel.y + pc * pointSel.z + pd;\n\n                    float s = 1 - 0.9 * fabs(pd2) / sqrt(sqrt(pointSel.x * pointSel.x\n                                                              + pointSel.y * pointSel.y + pointSel.z * pointSel.z));\n\n                    coeff.x = s * pa;\n                    coeff.y = s * pb;\n                    coeff.z = s * pc;\n                    coeff.intensity = s * pd2;\n\n                    if (s > 0.1) {\n                        laserCloudOriSurfVec[i] = pointOri;\n                        coeffSelSurfVec[i] = coeff;\n                        laserCloudOriSurfFlag[i] = true;\n                    }\n                }\n            }\n        }\n    }\n\n    void combineOptimizationCoeffs()\n    {\n        // combine corner coeffs\n        for (int i = 0; i < laserCloudCornerLastDSNum; ++i){\n            if (laserCloudOriCornerFlag[i] == true){\n                laserCloudOri->push_back(laserCloudOriCornerVec[i]);\n                coeffSel->push_back(coeffSelCornerVec[i]);\n            }\n        }\n        // combine surf coeffs\n        for (int i = 0; i < laserCloudSurfLastDSNum; ++i){\n            if (laserCloudOriSurfFlag[i] == true){\n                laserCloudOri->push_back(laserCloudOriSurfVec[i]);\n                coeffSel->push_back(coeffSelSurfVec[i]);\n            }\n        }\n        // reset flag for next iteration\n        std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false);\n        std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);\n    }\n\n    bool LMOptimization(int iterCount)\n    {\n        // This optimization is from the original loam_velodyne by Ji Zhang, need to cope with coordinate transformation\n        // lidar <- camera      ---     camera <- lidar\n        // x = z                ---     x = y\n        // y = x                ---     y = z\n        // z = y                ---     z = x\n        // roll = yaw           ---     roll = pitch\n        // pitch = roll         ---     pitch = yaw\n        // yaw = pitch          ---     yaw = roll\n\n        // lidar -> camera\n        float srx = sin(transformTobeMapped[1]);\n        float crx = cos(transformTobeMapped[1]);\n        float sry = sin(transformTobeMapped[2]);\n        float cry = cos(transformTobeMapped[2]);\n        float srz = sin(transformTobeMapped[0]);\n        float crz = cos(transformTobeMapped[0]);\n\n        int laserCloudSelNum = laserCloudOri->size();\n        if (laserCloudSelNum < 50) {\n            return false;\n        }\n\n        cv::Mat matA(laserCloudSelNum, 6, CV_32F, cv::Scalar::all(0));\n        cv::Mat matAt(6, laserCloudSelNum, CV_32F, cv::Scalar::all(0));\n        cv::Mat matAtA(6, 6, CV_32F, cv::Scalar::all(0));\n        cv::Mat matB(laserCloudSelNum, 1, CV_32F, cv::Scalar::all(0));\n        cv::Mat matAtB(6, 1, CV_32F, cv::Scalar::all(0));\n        cv::Mat matX(6, 1, CV_32F, cv::Scalar::all(0));\n        cv::Mat matP(6, 6, CV_32F, cv::Scalar::all(0));\n\n        PointType pointOri, coeff;\n\n        for (int i = 0; i < laserCloudSelNum; i++) {\n            // lidar -> camera\n            pointOri.x = laserCloudOri->points[i].y;\n            pointOri.y = laserCloudOri->points[i].z;\n            pointOri.z = laserCloudOri->points[i].x;\n            // lidar -> camera\n            coeff.x = coeffSel->points[i].y;\n            coeff.y = coeffSel->points[i].z;\n            coeff.z = coeffSel->points[i].x;\n            coeff.intensity = coeffSel->points[i].intensity;\n            // in camera\n            float arx = (crx*sry*srz*pointOri.x + crx*crz*sry*pointOri.y - srx*sry*pointOri.z) * coeff.x\n                        + (-srx*srz*pointOri.x - crz*srx*pointOri.y - crx*pointOri.z) * coeff.y\n                        + (crx*cry*srz*pointOri.x + crx*cry*crz*pointOri.y - cry*srx*pointOri.z) * coeff.z;\n\n            float ary = ((cry*srx*srz - crz*sry)*pointOri.x\n                         + (sry*srz + cry*crz*srx)*pointOri.y + crx*cry*pointOri.z) * coeff.x\n                        + ((-cry*crz - srx*sry*srz)*pointOri.x\n                           + (cry*srz - crz*srx*sry)*pointOri.y - crx*sry*pointOri.z) * coeff.z;\n\n            float arz = ((crz*srx*sry - cry*srz)*pointOri.x + (-cry*crz-srx*sry*srz)*pointOri.y)*coeff.x\n                        + (crx*crz*pointOri.x - crx*srz*pointOri.y) * coeff.y\n                        + ((sry*srz + cry*crz*srx)*pointOri.x + (crz*sry-cry*srx*srz)*pointOri.y)*coeff.z;\n            // lidar -> camera\n            matA.at<float>(i, 0) = arz;\n            matA.at<float>(i, 1) = arx;\n            matA.at<float>(i, 2) = ary;\n            matA.at<float>(i, 3) = coeff.z;\n            matA.at<float>(i, 4) = coeff.x;\n            matA.at<float>(i, 5) = coeff.y;\n            matB.at<float>(i, 0) = -coeff.intensity;\n        }\n\n        cv::transpose(matA, matAt);\n        matAtA = matAt * matA;\n        matAtB = matAt * matB;\n        cv::solve(matAtA, matAtB, matX, cv::DECOMP_QR);\n\n        if (iterCount == 0) {\n\n            cv::Mat matE(1, 6, CV_32F, cv::Scalar::all(0));\n            cv::Mat matV(6, 6, CV_32F, cv::Scalar::all(0));\n            cv::Mat matV2(6, 6, CV_32F, cv::Scalar::all(0));\n\n            cv::eigen(matAtA, matE, matV);\n            matV.copyTo(matV2);\n\n            isDegenerate = false;\n            float eignThre[6] = {100, 100, 100, 100, 100, 100};\n            for (int i = 5; i >= 0; i--) {\n                if (matE.at<float>(0, i) < eignThre[i]) {\n                    for (int j = 0; j < 6; j++) {\n                        matV2.at<float>(i, j) = 0;\n                    }\n                    isDegenerate = true;\n                } else {\n                    break;\n                }\n            }\n            matP = matV.inv() * matV2;\n        }\n\n        if (isDegenerate) {\n            cv::Mat matX2(6, 1, CV_32F, cv::Scalar::all(0));\n            matX.copyTo(matX2);\n            matX = matP * matX2;\n        }\n\n        transformTobeMapped[0] += matX.at<float>(0, 0);\n        transformTobeMapped[1] += matX.at<float>(1, 0);\n        transformTobeMapped[2] += matX.at<float>(2, 0);\n        transformTobeMapped[3] += matX.at<float>(3, 0);\n        transformTobeMapped[4] += matX.at<float>(4, 0);\n        transformTobeMapped[5] += matX.at<float>(5, 0);\n\n        float deltaR = sqrt(\n                pow(pcl::rad2deg(matX.at<float>(0, 0)), 2) +\n                pow(pcl::rad2deg(matX.at<float>(1, 0)), 2) +\n                pow(pcl::rad2deg(matX.at<float>(2, 0)), 2));\n        float deltaT = sqrt(\n                pow(matX.at<float>(3, 0) * 100, 2) +\n                pow(matX.at<float>(4, 0) * 100, 2) +\n                pow(matX.at<float>(5, 0) * 100, 2));\n\n        if (deltaR < 0.05 && deltaT < 0.05) {\n            return true; // converged\n        }\n        return false; // keep optimizing\n    }\n\n    void scan2MapOptimization()\n    {\n        if (cloudKeyPoses3D->points.empty())\n            return;\n\n        if (laserCloudCornerLastDSNum > edgeFeatureMinValidNum && laserCloudSurfLastDSNum > surfFeatureMinValidNum)\n        {\n            //std::cout << \"kdtree input 0.01: \" << std::endl;\n            kdtreeCornerFromMap->setInputCloud(laserCloudCornerFromMapDS);\n            kdtreeSurfFromMap->setInputCloud(laserCloudSurfFromMapDS);\n            //std::cout << \"kdtree input 0.02: \" << std::endl;\n\n\n            for (int iterCount = 0; iterCount < 30; iterCount++)\n            {\n                laserCloudOri->clear();\n                coeffSel->clear();\n                //gc: calculate some coeff and judge whether tho point is valid corner point\n                cornerOptimization();\n                //gc: calculate some coeff and judge whether tho point is valid surface point\n                surfOptimization();\n\n                combineOptimizationCoeffs();\n\n                //gc: the true iteration steps, calculate the transform\n                if (LMOptimization(iterCount) == true)\n                    break;\n            }\n            //gc: interpolate the roll and pitch angle using the IMU measurement and Lidar calculation\n            transformUpdate();\n        } else {\n            ROS_WARN(\"Not enough features! Only %d edge and %d planar features available.\", laserCloudCornerLastDSNum, laserCloudSurfLastDSNum);\n        }\n    }\n    //gc: interpolate the roll and pitch angle using the IMU measurement and Lidar calculation\n    void transformUpdate()\n    {\n        if (cloudInfo.imuAvailable == true)\n        {\n            if (std::abs(cloudInfo.imuPitchInit) < 1.4)\n            {\n                double imuWeight = 0.01;\n                tf::Quaternion imuQuaternion;\n                tf::Quaternion transformQuaternion;\n                double rollMid, pitchMid, yawMid;\n\n                // slerp roll\n                transformQuaternion.setRPY(transformTobeMapped[0], 0, 0);\n                imuQuaternion.setRPY(cloudInfo.imuRollInit, 0, 0);\n                //gc: interpolate between Imu roll measurement and angle from lidar calculation\n                tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);\n                transformTobeMapped[0] = rollMid;\n\n                // slerp pitch\n                transformQuaternion.setRPY(0, transformTobeMapped[1], 0);\n                imuQuaternion.setRPY(0, cloudInfo.imuPitchInit, 0);\n                //gc: interpolate between Imu roll measurement and angle from lidar calculation\n                tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);\n                transformTobeMapped[1] = pitchMid;\n            }\n        }\n\n        transformTobeMapped[0] = constraintTransformation(transformTobeMapped[0], rotation_tollerance);\n        transformTobeMapped[1] = constraintTransformation(transformTobeMapped[1], rotation_tollerance);\n        transformTobeMapped[5] = constraintTransformation(transformTobeMapped[5], z_tollerance);\n    }\n\n    float constraintTransformation(float value, float limit)\n    {\n        if (value < -limit)\n            value = -limit;\n        if (value > limit)\n            value = limit;\n\n        return value;\n    }\n\n    bool saveFrame()\n    {\n        if (cloudKeyPoses3D->points.empty())\n            return true;\n\n        Eigen::Affine3f transStart = pclPointToAffine3f(cloudKeyPoses6D->back());\n        Eigen::Affine3f transFinal = pcl::getTransformation(transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5],\n                                                            transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);\n        Eigen::Affine3f transBetween = transStart.inverse() * transFinal;\n        float x, y, z, roll, pitch, yaw;\n        pcl::getTranslationAndEulerAngles(transBetween, x, y, z, roll, pitch, yaw);\n        //gc: judge whther should generate key pose\n        if (abs(roll)  < surroundingkeyframeAddingAngleThreshold &&\n            abs(pitch) < surroundingkeyframeAddingAngleThreshold &&\n            abs(yaw)   < surroundingkeyframeAddingAngleThreshold &&\n            sqrt(x*x + y*y + z*z) < surroundingkeyframeAddingDistThreshold)\n            return false;\n\n        return true;\n    }\n\n    void addOdomFactor()\n    {\n        //gc: the first key pose\n        if (cloudKeyPoses3D->points.empty())\n        {\n            noiseModel::Diagonal::shared_ptr priorNoise = noiseModel::Diagonal::Variances((Vector(6) << 1e-2, 1e-2, M_PI*M_PI, 1e8, 1e8, 1e8).finished()); // rad*rad, meter*meter\n            gtSAMgraph.add(PriorFactor<Pose3>(0, trans2gtsamPose(transformTobeMapped), priorNoise));\n            initialEstimate.insert(0, trans2gtsamPose(transformTobeMapped));\n        }else{\n            noiseModel::Diagonal::shared_ptr odometryNoise = noiseModel::Diagonal::Variances((Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());\n            gtsam::Pose3 poseFrom = pclPointTogtsamPose3(cloudKeyPoses6D->points.back());\n            gtsam::Pose3 poseTo   = trans2gtsamPose(transformTobeMapped);\n            //gc: add constraint between current pose and previous pose\n            gtSAMgraph.add(BetweenFactor<Pose3>(cloudKeyPoses3D->size()-1, cloudKeyPoses3D->size(), poseFrom.between(poseTo), odometryNoise));\n            initialEstimate.insert(cloudKeyPoses3D->size(), poseTo);\n        }\n    }\n\n\n\n    void saveKeyFramesAndFactor()\n    {\n        //gc: judge whther should generate key pose\n        if (saveFrame() == false)\n            return;\n\n        // odom factor\n        //gc: add odom factor in the graph\n\n        addOdomFactor();\n\n        // update iSAM\n        isam->update(gtSAMgraph, initialEstimate);\n        isam->update();\n\n        gtSAMgraph.resize(0);\n        initialEstimate.clear();\n\n        //save key poses\n        PointType thisPose3D;\n        PointTypePose thisPose6D;\n        Pose3 latestEstimate;\n\n        isamCurrentEstimate = isam->calculateEstimate();\n        latestEstimate = isamCurrentEstimate.at<Pose3>(isamCurrentEstimate.size()-1);\n        // cout << \"****************************************************\" << endl;\n        // isamCurrentEstimate.print(\"Current estimate: \");\n\n        //gc:cloudKeyPoses3D can be used to calculate the nearest key frames\n        thisPose3D.x = latestEstimate.translation().x();\n        thisPose3D.y = latestEstimate.translation().y();\n        thisPose3D.z = latestEstimate.translation().z();\n        thisPose3D.intensity = cloudKeyPoses3D->size(); // this can be used as index\n        cloudKeyPoses3D->push_back(thisPose3D);\n\n\n        thisPose6D.x = thisPose3D.x;\n        thisPose6D.y = thisPose3D.y;\n        thisPose6D.z = thisPose3D.z;\n        thisPose6D.intensity = thisPose3D.intensity ; // this can be used as index\n        thisPose6D.roll  = latestEstimate.rotation().roll();\n        thisPose6D.pitch = latestEstimate.rotation().pitch();\n        thisPose6D.yaw   = latestEstimate.rotation().yaw();\n        thisPose6D.time = timeLaserCloudInfoLast;\n        cloudKeyPoses6D->push_back(thisPose6D);\n\n        //change-3\n        /*added    gc*/\n        mtxWin.lock();\n\t//std::cout <<\"in saveKeyFramesAndFactor(): the size of cloudKeyPoses3D is: \" << cloudKeyPoses3D->points.size() << std::endl;\n        \n        \n            \n            win_cloudKeyPoses3D.push_back(thisPose3D);\n            win_cloudKeyPoses6D.push_back(thisPose6D);\n\t\tif(win_cloudKeyPoses3D.size() > winSize)\n\t\t{\n\t\t\twin_cloudKeyPoses3D.erase(win_cloudKeyPoses3D.begin());\n\t\t\twin_cloudKeyPoses6D.erase(win_cloudKeyPoses6D.begin());\n\t\t}\n\t\t\n        \n        /*added    gc*/\n\n        // cout << \"****************************************************\" << endl;\n        // cout << \"Pose covariance:\" << endl;\n        // cout << isam->marginalCovariance(isamCurrentEstimate.size()-1) << endl << endl;\n        poseCovariance = isam->marginalCovariance(isamCurrentEstimate.size()-1);\n\n        // save updated transform\n        transformTobeMapped[0] = latestEstimate.rotation().roll();\n        transformTobeMapped[1] = latestEstimate.rotation().pitch();\n        transformTobeMapped[2] = latestEstimate.rotation().yaw();\n        transformTobeMapped[3] = latestEstimate.translation().x();\n        transformTobeMapped[4] = latestEstimate.translation().y();\n        transformTobeMapped[5] = latestEstimate.translation().z();\n\n        // save all the received edge and surf points\n        pcl::PointCloud<PointType>::Ptr thisCornerKeyFrame(new pcl::PointCloud<PointType>());\n        pcl::PointCloud<PointType>::Ptr thisSurfKeyFrame(new pcl::PointCloud<PointType>());\n        //gc:\n        pcl::copyPointCloud(*laserCloudCornerLastDS,  *thisCornerKeyFrame);\n        pcl::copyPointCloud(*laserCloudSurfLastDS,    *thisSurfKeyFrame);\n\n        // save key frame cloud\n        cornerCloudKeyFrames.push_back(thisCornerKeyFrame);\n        surfCloudKeyFrames.push_back(thisSurfKeyFrame);\n\n        //change-4\n        /*added    gc*/\n\n        \n            win_cornerCloudKeyFrames.push_back(thisCornerKeyFrame);\n            win_surfCloudKeyFrames.push_back(thisSurfKeyFrame);\n        \tif(win_cornerCloudKeyFrames.size() > winSize)\n\t\t{\n\t\t\twin_cornerCloudKeyFrames.erase(win_cornerCloudKeyFrames.begin());\n\t\t\twin_surfCloudKeyFrames.erase(win_surfCloudKeyFrames.begin());\n\t\t}\n\t\t\n        \n        mtxWin.unlock();\n        /*added    gc*/\n\n        // save path for visualization\n        updatePath(thisPose6D);\n    }\n\n\n\n    void updatePath(const PointTypePose& pose_in)\n    {\n        geometry_msgs::PoseStamped pose_stamped;\n        pose_stamped.header.stamp = ros::Time().fromSec(pose_in.time);\n        pose_stamped.header.frame_id = \"odom\";\n        pose_stamped.pose.position.x = pose_in.x;\n        pose_stamped.pose.position.y = pose_in.y;\n        pose_stamped.pose.position.z = pose_in.z;\n        tf::Quaternion q = tf::createQuaternionFromRPY(pose_in.roll, pose_in.pitch, pose_in.yaw);\n        pose_stamped.pose.orientation.x = q.x();\n        pose_stamped.pose.orientation.y = q.y();\n        pose_stamped.pose.orientation.z = q.z();\n        pose_stamped.pose.orientation.w = q.w();\n\n        globalPath.poses.push_back(pose_stamped);\n    }\n\n    void publishOdometry()\n    {\n        // Publish odometry for ROS\n        nav_msgs::Odometry laserOdometryROS;\n        laserOdometryROS.header.stamp = timeLaserInfoStamp;\n        laserOdometryROS.header.frame_id = \"odom\";\n        laserOdometryROS.child_frame_id = \"odom_mapping\";\n        laserOdometryROS.pose.pose.position.x = transformTobeMapped[3];\n        laserOdometryROS.pose.pose.position.y = transformTobeMapped[4];\n        laserOdometryROS.pose.pose.position.z = transformTobeMapped[5];\n        laserOdometryROS.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);\n        laserOdometryROS.pose.covariance[0] = double(imuPreintegrationResetId);\n        pubOdomAftMappedROS.publish(laserOdometryROS);\n    }\n\n    void publishFrames()\n    {\n        if (cloudKeyPoses3D->points.empty())\n            return;\n        // publish key poses\n        publishCloud(&pubKeyPoses, cloudKeyPoses3D, timeLaserInfoStamp, \"odom\");\n        // Publish surrounding key frames\n        publishCloud(&pubRecentKeyFrames, laserCloudSurfFromMapDS, timeLaserInfoStamp, \"odom\");\n        // publish registered key frame\n        //gc: feature points\n        if (pubRecentKeyFrame.getNumSubscribers() != 0)\n        {\n            pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());\n            PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);\n            *cloudOut += *transformPointCloud(laserCloudCornerLastDS,  &thisPose6D);\n            *cloudOut += *transformPointCloud(laserCloudSurfLastDS,    &thisPose6D);\n            publishCloud(&pubRecentKeyFrame, cloudOut, timeLaserInfoStamp, \"odom\");\n        }\n        //added *****************by gc\n        if(pubLaserCloudInWorld.getNumSubscribers() != 0)\n        {\n            pcl::PointCloud<PointType>::Ptr cloudInBase(new pcl::PointCloud<PointType>());\n            pcl::PointCloud<PointType>::Ptr cloudOutInWorld(new pcl::PointCloud<PointType>());\n            PointTypePose thisPose6DInOdom = trans2PointTypePose(transformTobeMapped);\n            Eigen::Affine3f T_thisPose6DInOdom = pclPointToAffine3f(thisPose6DInOdom);\n            mtxtranformOdomToWorld.lock();\n            PointTypePose pose_Odom_Map = trans2PointTypePose(tranformOdomToWorld);\n            mtxtranformOdomToWorld.unlock();\n            Eigen::Affine3f T_pose_Odom_Map = pclPointToAffine3f(pose_Odom_Map);\n\n            Eigen::Affine3f T_poseInMap = T_pose_Odom_Map * T_thisPose6DInOdom;\n            *cloudInBase += *laserCloudCornerLastDS;\n            *cloudInBase += *laserCloudSurfLastDS;\n            pcl::transformPointCloud(*cloudInBase, *cloudOutInWorld, T_poseInMap.matrix());\n            publishCloud(&pubLaserCloudInWorld, cloudOutInWorld, timeLaserInfoStamp, \"map\");\n        }\n\n\n        //added *********************by gc\n        // publish registered high-res raw cloud\n        //gc: whole point_cloud of the scan\n        if (pubCloudRegisteredRaw.getNumSubscribers() != 0)\n        {\n            pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());\n            pcl::fromROSMsg(cloudInfo.cloud_deskewed, *cloudOut);\n            PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);\n            *cloudOut = *transformPointCloud(cloudOut,  &thisPose6D);\n            publishCloud(&pubCloudRegisteredRaw, cloudOut, timeLaserInfoStamp, \"odom\");\n        }\n        // publish path\n        if (pubPath.getNumSubscribers() != 0)\n        {\n            globalPath.header.stamp = timeLaserInfoStamp;\n            globalPath.header.frame_id = \"odom\";\n            pubPath.publish(globalPath);\n        }\n    }\n\n    /*************added by gc*****Todo: (1) ICP or matching point to edge and surface?  (2) global_pcd or whole keyframes************/\n    void cloudGlobalLoad()\n    {\n        pcl::io::loadPCDFile(std::getenv(\"HOME\") + savePCDDirectory + \"cloudGlobal.pcd\", *cloudGlobalMap);\n\n        pcl::PointCloud<PointType>::Ptr cloud_temp(new pcl::PointCloud<PointType>());\n        downSizeFilterICP.setInputCloud(cloudGlobalMap);\n        downSizeFilterICP.filter(*cloud_temp);\n        //*cloudGlobalMap = *cloud_temp;\n        *cloudGlobalMapDS = *cloud_temp;\n\n        std::cout << \"test 0.01  the size of global cloud: \" << cloudGlobalMap->points.size() << std::endl;\n        std::cout << \"test 0.02  the size of global map after filter: \" << cloudGlobalMapDS->points.size() << std::endl;\n    }\n\n    void globalLocalizeThread()\n    {\n\n        //ros::Rate rate(0.2);\n        while (ros::ok())\n        {\n            //avoid ICP using the same initial guess for many times\n            if(initializedFlag == NonInitialized)\n            {\n                ICPLocalizeInitialize();\n\n            }\n            else if(initializedFlag == Initializing)\n            {\n                std::cout << \"Offer A New Guess Please \" << std::endl;//do nothing, wait for a new initial guess\n                ros::Duration(1.0).sleep();\n            }\n            else\n            {\n\t\t        ros::Duration(10.0).sleep();\n\n                double t_start = ros::Time::now().toSec();\n                ICPscanMatchGlobal();\n                double t_end = ros::Time::now().toSec();\n                //std::cout << \"ICP time consuming: \" << t_end-t_start;\n                \n            }\n\n\n\n            //rate.sleep();\n            //}\n        }\n    }\n\n    void ICPLocalizeInitialize()\n    {\n        pcl::PointCloud<PointType>::Ptr laserCloudIn(new pcl::PointCloud<PointType>());\n\n        mtx_general.lock();\n        *laserCloudIn += *cloudScanForInitialize;\n        mtx_general.unlock();\n\n        //publishCloud(&fortest_publasercloudINWorld, laserCloudIn, timeLaserInfoStamp, \"map\");\n\n        if(laserCloudIn->points.size() == 0)\n            return;\n\t//cloudScanForInitialize->clear();\n        std::cout << \"the size of incoming lasercloud: \" << laserCloudIn->points.size() << std::endl;\n\n        pcl::NormalDistributionsTransform<PointType, PointType> ndt;\n        ndt.setTransformationEpsilon(0.01);\n        ndt.setResolution(1.0);\n\n\n        pcl::IterativeClosestPoint<PointType, PointType> icp;\n        icp.setMaxCorrespondenceDistance(100);\n        icp.setMaximumIterations(100);\n        icp.setTransformationEpsilon(1e-6);\n        icp.setEuclideanFitnessEpsilon(1e-6);\n        icp.setRANSACIterations(0);\n\n        ndt.setInputSource(laserCloudIn);\n        ndt.setInputTarget(cloudGlobalMapDS);\n        pcl::PointCloud<PointType>::Ptr unused_result_0(new pcl::PointCloud<PointType>());\n\n        PointTypePose thisPose6DInWorld = trans2PointTypePose(transformInTheWorld);\n        Eigen::Affine3f T_thisPose6DInWorld = pclPointToAffine3f(thisPose6DInWorld);\n        ndt.align(*unused_result_0, T_thisPose6DInWorld.matrix());\n\n\n        //use the outcome of ndt as the initial guess for ICP\n        icp.setInputSource(laserCloudIn);\n        icp.setInputTarget(cloudGlobalMapDS);\n        pcl::PointCloud<PointType>::Ptr unused_result(new pcl::PointCloud<PointType>());\n        icp.align(*unused_result, ndt.getFinalTransformation());\n        std::cout << \"the pose before initializing is: x\" << transformInTheWorld[3] << \" y\" << transformInTheWorld[4]\n                  << \" z\" << transformInTheWorld[5] <<std::endl;\n\tstd::cout << \"the pose in odom before initializing is: x\" << tranformOdomToWorld[3] << \" y\" << tranformOdomToWorld[4]\n                  << \" z\" << tranformOdomToWorld[5] <<std::endl;\n        std::cout << \"the icp score in initializing process is: \" << icp.getFitnessScore() << std::endl;\n        std::cout << \"the pose after initializing process is: \"<< icp.getFinalTransformation() << std::endl;\n\n        PointTypePose thisPose6DInOdom = trans2PointTypePose(transformTobeMapped);\n\tstd::cout<< \"transformTobeMapped X_Y_Z: \" << transformTobeMapped[3] << \" \" << transformTobeMapped[4] << \" \" << transformTobeMapped[5] << std::endl;\n        Eigen::Affine3f T_thisPose6DInOdom = pclPointToAffine3f(thisPose6DInOdom);\n\n        Eigen::Affine3f T_thisPose6DInMap;\n        T_thisPose6DInMap = icp.getFinalTransformation();\n        float x_g, y_g, z_g, R_g, P_g, Y_g;\n        pcl::getTranslationAndEulerAngles (T_thisPose6DInMap, x_g, y_g, z_g, R_g, P_g, Y_g);\n        transformInTheWorld[0] = R_g;\n        transformInTheWorld[1] = P_g;\n        transformInTheWorld[2] = Y_g;\n        transformInTheWorld[3] = x_g;\n        transformInTheWorld[4] = y_g;\n        transformInTheWorld[5] = z_g;\n\n\n        Eigen::Affine3f transOdomToMap = T_thisPose6DInMap * T_thisPose6DInOdom.inverse();\n        float deltax, deltay, deltaz, deltaR, deltaP, deltaY;\n        pcl::getTranslationAndEulerAngles (transOdomToMap, deltax, deltay, deltaz, deltaR, deltaP, deltaY);\n\n        mtxtranformOdomToWorld.lock();\n            //renew tranformOdomToWorld\n        tranformOdomToWorld[0] = deltaR;\n        tranformOdomToWorld[1] = deltaP;\n        tranformOdomToWorld[2] = deltaY;\n        tranformOdomToWorld[3] = deltax;\n        tranformOdomToWorld[4] = deltay;\n        tranformOdomToWorld[5] = deltaz;\n        mtxtranformOdomToWorld.unlock();\n\tstd::cout << \"the pose of odom relative to Map: x\" << tranformOdomToWorld[3] << \" y\" << tranformOdomToWorld[4]\n                  << \" z\" << tranformOdomToWorld[5] <<std::endl;\n        publishCloud(&pubLaserCloudInWorld, unused_result, timeLaserInfoStamp, \"map\");\n\tpublishCloud(&pubMapWorld, cloudGlobalMapDS, timeLaserInfoStamp, \"map\");\n\n        if (icp.hasConverged() == false || icp.getFitnessScore() > historyKeyframeFitnessScore)\n        {\n            initializedFlag = Initializing;\n            std::cout << \"Initializing Fail\" << std::endl;\n            return;\n        } else{\n            initializedFlag = Initialized;\n            std::cout << \"Initializing Succeed\" << std::endl;\n            geometry_msgs::PoseStamped pose_odomTo_map;\n            tf::Quaternion q_odomTo_map = tf::createQuaternionFromRPY(deltaR, deltaP, deltaY);\n\n            pose_odomTo_map.header.stamp = timeLaserInfoStamp;\n            pose_odomTo_map.header.frame_id = \"map\";\n            pose_odomTo_map.pose.position.x = deltax; pose_odomTo_map.pose.position.y = deltay; pose_odomTo_map.pose.position.z = deltaz;\n            pose_odomTo_map.pose.orientation.x = q_odomTo_map.x();\n            pose_odomTo_map.pose.orientation.y = q_odomTo_map.y();\n            pose_odomTo_map.pose.orientation.z = q_odomTo_map.z();\n            pose_odomTo_map.pose.orientation.w = q_odomTo_map.w();\n            pubOdomToMapPose.publish(pose_odomTo_map);\n\n        }\n\n        //cloudScanForInitialize.reset(new pcl::PointCloud<PointType>());\n\n    }\n\n    void ICPscanMatchGlobal()\n    {\n\t    //initializing\n/*\n        if(initializedFlag == NonInitialized)\n        {\n            ICPLocalizeInitialize();\n            return;\n        }*/\n\n\t    if (cloudKeyPoses3D->points.empty() == true)\n            return;\n\n        //change-5\n        /******************added by gc************************/\n\n            mtxWin.lock();\n            int latestFrameIDGlobalLocalize;\n            latestFrameIDGlobalLocalize = win_cloudKeyPoses3D.size() - 1;\n\n            pcl::PointCloud<PointType>::Ptr latestCloudIn(new pcl::PointCloud<PointType>());\n            *latestCloudIn += *transformPointCloud(win_cornerCloudKeyFrames[latestFrameIDGlobalLocalize], &win_cloudKeyPoses6D[latestFrameIDGlobalLocalize]);\n            *latestCloudIn += *transformPointCloud(win_surfCloudKeyFrames[latestFrameIDGlobalLocalize],   &win_cloudKeyPoses6D[latestFrameIDGlobalLocalize]);\n            std::cout << \"the size of input cloud: \" << latestCloudIn->points.size() <<std::endl;\n\n            mtxWin.unlock();\n\n        /******************added by gc************************/\n\n\n//        int latestFrameIDGlobalLocalize;\n//        latestFrameIDGlobalLocalize = cloudKeyPoses3D->size() - 1;\n//\n//\n//        //latest Frame cloudpoints In the odom Frame\n//\n//        pcl::PointCloud<PointType>::Ptr latestCloudIn(new pcl::PointCloud<PointType>());\n//        *latestCloudIn += *transformPointCloud(cornerCloudKeyFrames[latestFrameIDGlobalLocalize], &cloudKeyPoses6D->points[latestFrameIDGlobalLocalize]);\n//        *latestCloudIn += *transformPointCloud(surfCloudKeyFrames[latestFrameIDGlobalLocalize],   &cloudKeyPoses6D->points[latestFrameIDGlobalLocalize]);\n//        std::cout << \"the size of input cloud: \" << latestCloudIn->points.size() <<std::endl;\n\n        pcl::NormalDistributionsTransform<PointType, PointType> ndt;\n        ndt.setTransformationEpsilon(0.01);\n        ndt.setResolution(1.0);\n\n\n        pcl::IterativeClosestPoint<PointType, PointType> icp;\n        icp.setMaxCorrespondenceDistance(100);\n        icp.setMaximumIterations(100);\n        icp.setTransformationEpsilon(1e-6);\n        icp.setEuclideanFitnessEpsilon(1e-6);\n        icp.setRANSACIterations(0);\n\n        // Align cloud\n        //3. calculate the tranform of odom relative to world\n\t//Eigen::Affine3f transodomToWorld_init = pcl::getTransformation(0,0,0,0,0,0);\n        mtxtranformOdomToWorld.lock();\n        Eigen::Affine3f transodomToWorld_init = pcl::getTransformation(tranformOdomToWorld[3], tranformOdomToWorld[4],tranformOdomToWorld[5],tranformOdomToWorld[0],tranformOdomToWorld[1],tranformOdomToWorld[2]);\n        mtxtranformOdomToWorld.unlock();\n\n        Eigen::Matrix4f matricInitGuess = transodomToWorld_init.matrix();\n\t    //std::cout << \"matricInitGuess: \" << matricInitGuess << std::endl;\n        //Firstly perform ndt in coarse resolution\n        ndt.setInputSource(latestCloudIn);\n        ndt.setInputTarget(cloudGlobalMapDS);\n        pcl::PointCloud<PointType>::Ptr unused_result_0(new pcl::PointCloud<PointType>());\n        ndt.align(*unused_result_0, matricInitGuess);\n        //use the outcome of ndt as the initial guess for ICP\n        icp.setInputSource(latestCloudIn);\n        icp.setInputTarget(cloudGlobalMapDS);\n        pcl::PointCloud<PointType>::Ptr unused_result(new pcl::PointCloud<PointType>());\n        icp.align(*unused_result, ndt.getFinalTransformation());\n\n        std::cout << \"ICP converg flag:\" << icp.hasConverged() << \". Fitness score: \" << icp.getFitnessScore() << std::endl << std::endl;\n\n\n        //if (icp.hasConverged() == false || icp.getFitnessScore() > historyKeyframeFitnessScore)\n           // return;\n\n        Eigen::Affine3f transodomToWorld_New;\n        transodomToWorld_New = icp.getFinalTransformation();\n        float x, y, z, roll, pitch, yaw;\n        pcl::getTranslationAndEulerAngles (transodomToWorld_New, x, y, z, roll, pitch, yaw);\n        //std::cout << \" in test 0.03: deltaX = \" << x << \" deltay = \" << y << \" deltaZ = \" << z << std::endl;\n\n        mtxtranformOdomToWorld.lock();\n        //renew tranformOdomToWorld\n        tranformOdomToWorld[0] = roll;\n        tranformOdomToWorld[1] = pitch;\n        tranformOdomToWorld[2] = yaw;\n        tranformOdomToWorld[3] = x;\n        tranformOdomToWorld[4] = y;\n        tranformOdomToWorld[5] = z;\n        mtxtranformOdomToWorld.unlock();\n        //publish the laserpointcloud in world frame\n\n        //publish global map\n        publishCloud(&pubMapWorld, cloudGlobalMapDS, timeLaserInfoStamp, \"map\");//publish world map\n\n        if (icp.hasConverged() == true && icp.getFitnessScore() < historyKeyframeFitnessScore)\n        {\n            geometry_msgs::PoseStamped pose_odomTo_map;\n            tf::Quaternion q_odomTo_map = tf::createQuaternionFromRPY(roll, pitch, yaw);\n\n            pose_odomTo_map.header.stamp = timeLaserInfoStamp;\n            pose_odomTo_map.header.frame_id = \"map\";\n            pose_odomTo_map.pose.position.x = x; pose_odomTo_map.pose.position.y = y; pose_odomTo_map.pose.position.z = z;\n            pose_odomTo_map.pose.orientation.x = q_odomTo_map.x();\n            pose_odomTo_map.pose.orientation.y = q_odomTo_map.y();\n            pose_odomTo_map.pose.orientation.z = q_odomTo_map.z();\n            pose_odomTo_map.pose.orientation.w = q_odomTo_map.w();\n            pubOdomToMapPose.publish(pose_odomTo_map);\n        }\n\n\n        //publish the trsformation between map and odom\n\n    }\n\n\n\n    void keyFramesLoad()\n    {\n\n    }\n\n\n    void initialpose_callback(const geometry_msgs::PoseWithCovarianceStampedConstPtr& pose_msg)\n    {\n        //first calculate global pose\n        //x-y-z\n        if(initializedFlag == Initialized)\n            return;\n\n        float x = pose_msg->pose.pose.position.x;\n        float y = pose_msg->pose.pose.position.y;\n        float z = pose_msg->pose.pose.position.z;\n\n        //roll-pitch-yaw\n        tf::Quaternion q_global;\n        double roll_global; double pitch_global; double yaw_global;\n\n        q_global.setX(pose_msg->pose.pose.orientation.x);\n        q_global.setY(pose_msg->pose.pose.orientation.y);\n        q_global.setZ(pose_msg->pose.pose.orientation.z);\n        q_global.setW(pose_msg->pose.pose.orientation.w);\n\n        tf::Matrix3x3(q_global).getRPY(roll_global, pitch_global, yaw_global);\n        //global transformation\n        transformInTheWorld[0] = roll_global;\n        transformInTheWorld[1] = pitch_global;\n        transformInTheWorld[2] = yaw_global;\n        transformInTheWorld[3] = x;\n        transformInTheWorld[4] = y;\n        transformInTheWorld[5] = z;\n        PointTypePose thisPose6DInWorld = trans2PointTypePose(transformInTheWorld);\n        Eigen::Affine3f T_thisPose6DInWorld = pclPointToAffine3f(thisPose6DInWorld);\n        //Odom transformation\n        PointTypePose thisPose6DInOdom = trans2PointTypePose(transformTobeMapped);\n        Eigen::Affine3f T_thisPose6DInOdom = pclPointToAffine3f(thisPose6DInOdom);\n        //transformation: Odom to Map\n        Eigen::Affine3f T_OdomToMap = T_thisPose6DInWorld * T_thisPose6DInOdom.inverse();\n        float delta_x, delta_y, delta_z, delta_roll, delta_pitch, delta_yaw;\n        pcl::getTranslationAndEulerAngles (T_OdomToMap, delta_x, delta_y, delta_z, delta_roll, delta_pitch, delta_yaw);\n\n        mtxtranformOdomToWorld.lock();\n        //keep for co-operate the initializing and lio, not useful for the present\n        tranformOdomToWorld[0] = delta_roll;\n        tranformOdomToWorld[1] = delta_pitch;\n        tranformOdomToWorld[2] = delta_yaw;\n        tranformOdomToWorld[3] = delta_x;\n        tranformOdomToWorld[4] = delta_y;\n        tranformOdomToWorld[5] = delta_z;\n\n        mtxtranformOdomToWorld.unlock();\n        initializedFlag = NonInitialized;\n\n        //globalLocalizeInitialiized = false;\n\n    }\n\n    /*************added by gc******************/\n\n};\n\n\nint main(int argc, char** argv)\n{\n    ros::init(argc, argv, \"lio_sam\");\n\n    mapOptimization MO;\n\n    ROS_INFO(\"\\033[1;32m----> Map Optimization Started.\\033[0m\");\n\n    //std::thread loopthread(&mapOptimization::loopClosureThread, &MO);\n    //std::thread visualizeMapThread(&mapOptimization::visualizeGlobalMapThread, &MO);\n    std::thread localizeInWorldThread(&mapOptimization::globalLocalizeThread, &MO);\n\n    ros::spin();\n\n    //loopthread.join();\n    //visualizeMapThread.join();\n    localizeInWorldThread.join();\n\n    return 0;\n}\n"
  },
  {
    "path": "src/imageProjection.cpp",
    "content": "#include \"utility.h\"\n#include \"lio_sam/cloud_info.h\"\n\n// Velodyne\n//PCL自定义点类型\nstruct PointXYZIRT\n{\n    PCL_ADD_POINT4D\n    PCL_ADD_INTENSITY;\n    uint16_t ring;\n    float time;\n    EIGEN_MAKE_ALIGNED_OPERATOR_NEW\n} EIGEN_ALIGN16;\n\nPOINT_CLOUD_REGISTER_POINT_STRUCT (PointXYZIRT,  \n    (float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity)\n    (uint16_t, ring, ring) (float, time, time)\n)\n\n// Ouster\n// struct PointXYZIRT {\n//     PCL_ADD_POINT4D;\n//     float intensity;\n//     uint32_t t;\n//     uint16_t reflectivity;\n//     uint8_t ring;\n//     uint16_t noise;\n//     uint32_t range;\n//     EIGEN_MAKE_ALIGNED_OPERATOR_NEW\n// }EIGEN_ALIGN16;\n\n// POINT_CLOUD_REGISTER_POINT_STRUCT(PointXYZIRT,\n//     (float, x, x) (float, y, y) (float, z, z) (float, intensity, intensity)\n//     (uint32_t, t, t) (uint16_t, reflectivity, reflectivity)\n//     (uint8_t, ring, ring) (uint16_t, noise, noise) (uint32_t, range, range)\n// )\n\nconst int queueLength = 500;\n\nclass ImageProjection : public ParamServer\n{\nprivate:\n\n    std::mutex imuLock;\n    std::mutex odoLock;\n\n    ros::Subscriber subLaserCloud;\n    ros::Publisher  pubLaserCloud;\n    \n    ros::Publisher pubExtractedCloud;\n    ros::Publisher pubLaserCloudInfo;\n\n    ros::Subscriber subImu; //gc: IMU data\n    std::deque<sensor_msgs::Imu> imuQueue;\n\n    ros::Subscriber subOdom;//gc: IMU pre-preintegration odometry\n    std::deque<nav_msgs::Odometry> odomQueue;\n\n    std::deque<sensor_msgs::PointCloud2> cloudQueue;\n    sensor_msgs::PointCloud2 currentCloudMsg;\n    \n    double *imuTime = new double[queueLength];\n    double *imuRotX = new double[queueLength];\n    double *imuRotY = new double[queueLength];\n    double *imuRotZ = new double[queueLength];\n\n    int imuPointerCur;\n    bool firstPointFlag;\n    Eigen::Affine3f transStartInverse;\n\n    pcl::PointCloud<PointXYZIRT>::Ptr laserCloudIn;\n    pcl::PointCloud<PointType>::Ptr   fullCloud;\n    pcl::PointCloud<PointType>::Ptr   extractedCloud;\n\n    int deskewFlag;\n    cv::Mat rangeMat;\n\n    bool odomDeskewFlag;\n    float odomIncreX;\n    float odomIncreY;\n    float odomIncreZ;\n\n    lio_sam::cloud_info cloudInfo;\n    double timeScanCur;\n    double timeScanNext;\n    std_msgs::Header cloudHeader;\n\n\npublic:\n    ImageProjection():\n    deskewFlag(0)\n    {\n        subImu        = nh.subscribe<sensor_msgs::Imu>(imuTopic, 2000, &ImageProjection::imuHandler, this, ros::TransportHints().tcpNoDelay());\n        subOdom       = nh.subscribe<nav_msgs::Odometry>(odomTopic, 2000, &ImageProjection::odometryHandler, this, ros::TransportHints().tcpNoDelay());\n        subLaserCloud = nh.subscribe<sensor_msgs::PointCloud2>(pointCloudTopic, 5, &ImageProjection::cloudHandler, this, ros::TransportHints().tcpNoDelay());\n\n        pubExtractedCloud = nh.advertise<sensor_msgs::PointCloud2> (\"lio_sam/deskew/cloud_deskewed\", 1);\n        pubLaserCloudInfo = nh.advertise<lio_sam::cloud_info> (\"lio_sam/deskew/cloud_info\", 10);\n\n        allocateMemory();\n        resetParameters();\n\n        pcl::console::setVerbosityLevel(pcl::console::L_ERROR);\n    }\n\n    void allocateMemory()\n    {\n        laserCloudIn.reset(new pcl::PointCloud<PointXYZIRT>());\n        fullCloud.reset(new pcl::PointCloud<PointType>());\n        extractedCloud.reset(new pcl::PointCloud<PointType>());\n\n        fullCloud->points.resize(N_SCAN*Horizon_SCAN);\n\n        cloudInfo.startRingIndex.assign(N_SCAN, 0);\n        cloudInfo.endRingIndex.assign(N_SCAN, 0);\n\n        cloudInfo.pointColInd.assign(N_SCAN*Horizon_SCAN, 0);\n        cloudInfo.pointRange.assign(N_SCAN*Horizon_SCAN, 0);\n\n        resetParameters();\n    }\n\n    void resetParameters()\n    {\n        laserCloudIn->clear();\n        extractedCloud->clear();\n        // reset range matrix for range image projection\n        rangeMat = cv::Mat(N_SCAN, Horizon_SCAN, CV_32F, cv::Scalar::all(FLT_MAX));\n\n        imuPointerCur = 0;\n        firstPointFlag = true;\n        odomDeskewFlag = false;\n\n        for (int i = 0; i < queueLength; ++i)\n        {\n            imuTime[i] = 0;\n            imuRotX[i] = 0;\n            imuRotY[i] = 0;\n            imuRotZ[i] = 0;\n        }\n    }\n\n    ~ImageProjection(){}\n\n    void imuHandler(const sensor_msgs::Imu::ConstPtr& imuMsg)\n    {\n        sensor_msgs::Imu thisImu = imuConverter(*imuMsg);\n\n        std::lock_guard<std::mutex> lock1(imuLock);\n        imuQueue.push_back(thisImu);\n\n        // debug IMU data\n        // cout << std::setprecision(6);\n        // cout << \"IMU acc: \" << endl;\n        // cout << \"x: \" << thisImu.linear_acceleration.x << \n        //       \", y: \" << thisImu.linear_acceleration.y << \n        //       \", z: \" << thisImu.linear_acceleration.z << endl;\n        // cout << \"IMU gyro: \" << endl;\n        // cout << \"x: \" << thisImu.angular_velocity.x << \n        //       \", y: \" << thisImu.angular_velocity.y << \n        //       \", z: \" << thisImu.angular_velocity.z << endl;\n        // double imuRoll, imuPitch, imuYaw;\n        // tf::Quaternion orientation;\n        // tf::quaternionMsgToTF(thisImu.orientation, orientation);\n        // tf::Matrix3x3(orientation).getRPY(imuRoll, imuPitch, imuYaw);\n        // cout << \"IMU roll pitch yaw: \" << endl;\n        // cout << \"roll: \" << imuRoll << \", pitch: \" << imuPitch << \", yaw: \" << imuYaw << endl << endl;\n    }\n    //gc: IMu prediction\n    void odometryHandler(const nav_msgs::Odometry::ConstPtr& odometryMsg)\n    {\n        std::lock_guard<std::mutex> lock2(odoLock);\n        odomQueue.push_back(*odometryMsg);\n    }\n\n    void cloudHandler(const sensor_msgs::PointCloud2ConstPtr& laserCloudMsg)\n    {\n        if (!cachePointCloud(laserCloudMsg))//gc: cache pointclouds and make format-checking\n            return;\n\n        if (!deskewInfo())//gc:calculate to get the relative transformation betwwen the start of the scan and the time when the imu came\n            return;\n\n        projectPointCloud();//gc: deskew the pointcloud and project the pointcloud into one image\n\n        cloudExtraction();\n\n        publishClouds();\n\n        resetParameters();\n    }\n    //gc: cashe pointclouds and make format-checking\n    bool cachePointCloud(const sensor_msgs::PointCloud2ConstPtr& laserCloudMsg)\n    {\n        // cache point cloud\n        cloudQueue.push_back(*laserCloudMsg);\n\n        if (cloudQueue.size() <= 2)\n            return false;\n        else\n        {\n            currentCloudMsg = cloudQueue.front();\n            cloudQueue.pop_front();\n\n            cloudHeader = currentCloudMsg.header;\n            timeScanCur = cloudHeader.stamp.toSec();\n            timeScanNext = cloudQueue.front().header.stamp.toSec();\n        }\n\n        // convert cloud\n        pcl::fromROSMsg(currentCloudMsg, *laserCloudIn);\n\n        // check dense flag\n        if (laserCloudIn->is_dense == false)\n        {\n            ROS_ERROR(\"Point cloud is not in dense format, please remove NaN points first!\");\n            ros::shutdown();\n        }\n\n        // check ring channel\n        static int ringFlag = 0;\n        if (ringFlag == 0)\n        {\n            ringFlag = -1;\n            for (int i = 0; i < (int)currentCloudMsg.fields.size(); ++i)\n            {\n                if (currentCloudMsg.fields[i].name == \"ring\")\n                {\n                    ringFlag = 1;\n                    break;\n                }\n            }\n            if (ringFlag == -1)\n            {\n                ROS_ERROR(\"Point cloud ring channel not available, please configure your point cloud data!\");\n                ros::shutdown();\n            }\n        }   \n\n        // check point time\n        if (deskewFlag == 0)\n        {\n            deskewFlag = -1;\n            for (int i = 0; i < (int)currentCloudMsg.fields.size(); ++i)\n            {\n                if (currentCloudMsg.fields[i].name == timeField)\n                {\n                    deskewFlag = 1;\n                    break;\n                }\n            }\n            if (deskewFlag == -1)\n                ROS_WARN(\"Point cloud timestamp not available, deskew function disabled, system will drift significantly!\");\n        }\n\n        return true;\n    }\n\n    bool deskewInfo()\n    {\n        std::lock_guard<std::mutex> lock1(imuLock);\n        std::lock_guard<std::mutex> lock2(odoLock);\n\n        // make sure IMU data available for the scan\n        if (imuQueue.empty() || imuQueue.front().header.stamp.toSec() > timeScanCur || imuQueue.back().header.stamp.toSec() < timeScanNext)\n        {\n            ROS_DEBUG(\"Waiting for IMU data ...\");\n            return false;\n        }\n\n        imuDeskewInfo();\n\n        odomDeskewInfo();\n\n        return true;\n    }\n\n    void imuDeskewInfo()\n    {\n        cloudInfo.imuAvailable = false;\n\n        while (!imuQueue.empty())\n        {\n            if (imuQueue.front().header.stamp.toSec() < timeScanCur - 0.01)\n                imuQueue.pop_front();\n            else\n                break;\n        }\n\n        if (imuQueue.empty())\n            return;\n\n        imuPointerCur = 0;\n\n        for (int i = 0; i < (int)imuQueue.size(); ++i)\n        {\n            sensor_msgs::Imu thisImuMsg = imuQueue[i];\n            double currentImuTime = thisImuMsg.header.stamp.toSec();\n\n            // get roll, pitch, and yaw estimation for this scan\n            //gc: not sychronized, something maybe should be done\n            if (currentImuTime <= timeScanCur)\n                imuRPY2rosRPY(&thisImuMsg, &cloudInfo.imuRollInit, &cloudInfo.imuPitchInit, &cloudInfo.imuYawInit);\n\n            if (currentImuTime > timeScanNext + 0.01)//gc: integrate between this scan and next scan\n                break;\n\n            if (imuPointerCur == 0){\n                imuRotX[0] = 0;\n                imuRotY[0] = 0;\n                imuRotZ[0] = 0;\n                imuTime[0] = currentImuTime;\n                ++imuPointerCur;\n                continue;\n            }\n\n            // get angular velocity\n            double angular_x, angular_y, angular_z;\n            imuAngular2rosAngular(&thisImuMsg, &angular_x, &angular_y, &angular_z);\n\n            // integrate rotation\n            double timeDiff = currentImuTime - imuTime[imuPointerCur-1];\n            imuRotX[imuPointerCur] = imuRotX[imuPointerCur-1] + angular_x * timeDiff;\n            imuRotY[imuPointerCur] = imuRotY[imuPointerCur-1] + angular_y * timeDiff;\n            imuRotZ[imuPointerCur] = imuRotZ[imuPointerCur-1] + angular_z * timeDiff;\n            imuTime[imuPointerCur] = currentImuTime;\n            ++imuPointerCur;\n        }\n\n        --imuPointerCur;\n\n        if (imuPointerCur <= 0)\n            return;\n\n        cloudInfo.imuAvailable = true;\n    }\n\n    void odomDeskewInfo()\n    {\n        cloudInfo.odomAvailable = false;\n\n        while (!odomQueue.empty())\n        {\n            if (odomQueue.front().header.stamp.toSec() < timeScanCur - 0.01)\n                odomQueue.pop_front();\n            else\n                break;\n        }\n\n        if (odomQueue.empty())\n            return;\n\n        if (odomQueue.front().header.stamp.toSec() > timeScanCur)\n            return;\n\n        // get start odometry at the beinning of the scan\n        nav_msgs::Odometry startOdomMsg;\n\n        for (int i = 0; i < (int)odomQueue.size(); ++i)\n        {\n            startOdomMsg = odomQueue[i];\n\n            if (ROS_TIME(&startOdomMsg) < timeScanCur)\n                continue;\n            else\n                break;\n        }\n\n        tf::Quaternion orientation;\n        tf::quaternionMsgToTF(startOdomMsg.pose.pose.orientation, orientation);\n\n        double roll, pitch, yaw;\n        tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);\n\n        // Initial guess used in mapOptimization\n        cloudInfo.initialGuessX = startOdomMsg.pose.pose.position.x;\n        cloudInfo.initialGuessY = startOdomMsg.pose.pose.position.y;\n        cloudInfo.initialGuessZ = startOdomMsg.pose.pose.position.z;\n        cloudInfo.initialGuessRoll  = roll;\n        cloudInfo.initialGuessPitch = pitch;\n        cloudInfo.initialGuessYaw   = yaw;\n        cloudInfo.imuPreintegrationResetId = round(startOdomMsg.pose.covariance[0]);\n\n        cloudInfo.odomAvailable = true;\n\n        // get end odometry at the end of the scan\n        odomDeskewFlag = false;\n\n        if (odomQueue.back().header.stamp.toSec() < timeScanNext)\n            return;\n\n        nav_msgs::Odometry endOdomMsg;\n\n        for (int i = 0; i < (int)odomQueue.size(); ++i)\n        {\n            endOdomMsg = odomQueue[i];\n\n            if (ROS_TIME(&endOdomMsg) < timeScanNext)\n                continue;\n            else\n                break;\n        }\n\n        if (int(round(startOdomMsg.pose.covariance[0])) != int(round(endOdomMsg.pose.covariance[0])))\n            return;\n\n        Eigen::Affine3f transBegin = pcl::getTransformation(startOdomMsg.pose.pose.position.x, startOdomMsg.pose.pose.position.y, startOdomMsg.pose.pose.position.z, roll, pitch, yaw);\n\n        tf::quaternionMsgToTF(endOdomMsg.pose.pose.orientation, orientation);\n        tf::Matrix3x3(orientation).getRPY(roll, pitch, yaw);\n        Eigen::Affine3f transEnd = pcl::getTransformation(endOdomMsg.pose.pose.position.x, endOdomMsg.pose.pose.position.y, endOdomMsg.pose.pose.position.z, roll, pitch, yaw);\n        //gc: the relative transforme between the end snad  the start\n        Eigen::Affine3f transBt = transBegin.inverse() * transEnd;\n\n        float rollIncre, pitchIncre, yawIncre;\n        //gc: get the transform difference in the direction of X, Y and Z\n        pcl::getTranslationAndEulerAngles(transBt, odomIncreX, odomIncreY, odomIncreZ, rollIncre, pitchIncre, yawIncre);\n\n        odomDeskewFlag = true;\n    }\n\n    void findRotation(double pointTime, float *rotXCur, float *rotYCur, float *rotZCur)\n    {\n        *rotXCur = 0; *rotYCur = 0; *rotZCur = 0;\n\n        int imuPointerFront = 0;\n        while (imuPointerFront < imuPointerCur)\n        {\n            if (pointTime < imuTime[imuPointerFront])\n                break;\n            ++imuPointerFront;\n        }\n\n        if (pointTime > imuTime[imuPointerFront] || imuPointerFront == 0)\n        {\n            *rotXCur = imuRotX[imuPointerFront];\n            *rotYCur = imuRotY[imuPointerFront];\n            *rotZCur = imuRotZ[imuPointerFront];\n        } else {\n            int imuPointerBack = imuPointerFront - 1;\n            double ratioFront = (pointTime - imuTime[imuPointerBack]) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]);\n            double ratioBack = (imuTime[imuPointerFront] - pointTime) / (imuTime[imuPointerFront] - imuTime[imuPointerBack]);\n            *rotXCur = imuRotX[imuPointerFront] * ratioFront + imuRotX[imuPointerBack] * ratioBack;\n            *rotYCur = imuRotY[imuPointerFront] * ratioFront + imuRotY[imuPointerBack] * ratioBack;\n            *rotZCur = imuRotZ[imuPointerFront] * ratioFront + imuRotZ[imuPointerBack] * ratioBack;\n        }\n    }\n\n    void findPosition(double relTime, float *posXCur, float *posYCur, float *posZCur)\n    {\n        *posXCur = 0; *posYCur = 0; *posZCur = 0;\n\n        // If the sensor moves relatively slow, like walking speed, positional deskew seems to have little benefits. Thus code below is commented.\n\n        // if (cloudInfo.odomAvailable == false || odomDeskewFlag == false)\n        //     return;\n\n        // float ratio = relTime / (timeScanNext - timeScanCur);\n\n        // *posXCur = ratio * odomIncreX;\n        // *posYCur = ratio * odomIncreY;\n        // *posZCur = ratio * odomIncreZ;\n    }\n\n    PointType deskewPoint(PointType *point, double relTime)\n    {\n        if (deskewFlag == -1 || cloudInfo.imuAvailable == false)\n            return *point;\n\n        double pointTime = timeScanCur + relTime;\n\n        float rotXCur, rotYCur, rotZCur;\n        findRotation(pointTime, &rotXCur, &rotYCur, &rotZCur);//gc: Interpolation to find the rotation of lidar relative the scan start\n\n        float posXCur, posYCur, posZCur;\n        findPosition(relTime, &posXCur, &posYCur, &posZCur);\n\n        if (firstPointFlag == true)\n        {\n            transStartInverse = (pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur, rotZCur)).inverse();\n            firstPointFlag = false;\n        }\n\n        // transform points to start\n        Eigen::Affine3f transFinal = pcl::getTransformation(posXCur, posYCur, posZCur, rotXCur, rotYCur, rotZCur);\n        Eigen::Affine3f transBt = transStartInverse * transFinal;\n\n        PointType newPoint;\n        newPoint.x = transBt(0,0) * point->x + transBt(0,1) * point->y + transBt(0,2) * point->z + transBt(0,3);\n        newPoint.y = transBt(1,0) * point->x + transBt(1,1) * point->y + transBt(1,2) * point->z + transBt(1,3);\n        newPoint.z = transBt(2,0) * point->x + transBt(2,1) * point->y + transBt(2,2) * point->z + transBt(2,3);\n        newPoint.intensity = point->intensity;\n\n        return newPoint;\n    }\n\n    void projectPointCloud()\n    {\n        int cloudSize = laserCloudIn->points.size();\n        // range image projection\n        for (int i = 0; i < cloudSize; ++i)\n        {\n            PointType thisPoint;\n            thisPoint.x = laserCloudIn->points[i].x;\n            thisPoint.y = laserCloudIn->points[i].y;\n            thisPoint.z = laserCloudIn->points[i].z;\n            thisPoint.intensity = laserCloudIn->points[i].intensity;\n\n            int rowIdn = laserCloudIn->points[i].ring;\n            if (rowIdn < 0 || rowIdn >= N_SCAN)\n                continue;\n\n            if (rowIdn % downsampleRate != 0)\n                continue;\n\n            float horizonAngle = atan2(thisPoint.x, thisPoint.y) * 180 / M_PI;\n\n            static float ang_res_x = 360.0/float(Horizon_SCAN);//gc: resolution in the direction\n            int columnIdn = -round((horizonAngle-90.0)/ang_res_x) + Horizon_SCAN/2;\n            if (columnIdn >= Horizon_SCAN)\n                columnIdn -= Horizon_SCAN;\n\n            if (columnIdn < 0 || columnIdn >= Horizon_SCAN)\n                continue;\n\n            float range = pointDistance(thisPoint);\n            \n            if (range < 1.0)\n                continue;\n\n            if (rangeMat.at<float>(rowIdn, columnIdn) != FLT_MAX)\n                continue;\n\n            // for the amsterdam dataset\n            // if (range < 6.0 && rowIdn <= 7 && (columnIdn >= 1600 || columnIdn <= 200))\n            //     continue;\n            // if (thisPoint.z < -2.0)\n            //     continue;\n\n            thisPoint = deskewPoint(&thisPoint, laserCloudIn->points[i].time); // Velodyne\n            // thisPoint = deskewPoint(&thisPoint, (float)laserCloudIn->points[i].t / 1000000000.0); // Ouster\n\n            rangeMat.at<float>(rowIdn, columnIdn) = pointDistance(thisPoint);\n\n            int index = columnIdn  + rowIdn * Horizon_SCAN;\n            fullCloud->points[index] = thisPoint;\n        }\n    }\n\n    void cloudExtraction()\n    {\n        int count = 0;\n        // extract segmented cloud for lidar odometry\n        for (int i = 0; i < N_SCAN; ++i)\n        {\n            cloudInfo.startRingIndex[i] = count - 1 + 5;//gc: the start index of every ring\n\n            for (int j = 0; j < Horizon_SCAN; ++j)\n            {\n                if (rangeMat.at<float>(i,j) != FLT_MAX)\n                {\n                    // mark the points' column index for marking occlusion later\n                    cloudInfo.pointColInd[count] = j;//gc: the colume index of every point\n                    // save range info\n                    cloudInfo.pointRange[count] = rangeMat.at<float>(i,j);\n                    // save extracted cloud\n                    extractedCloud->push_back(fullCloud->points[j + i*Horizon_SCAN]);\n                    // size of extracted cloud\n                    ++count;\n                }\n            }\n            cloudInfo.endRingIndex[i] = count -1 - 5;//gc: the end index of every ring\n        }\n    }\n    \n    void publishClouds()\n    {\n        cloudInfo.header = cloudHeader;\n        cloudInfo.cloud_deskewed  = publishCloud(&pubExtractedCloud, extractedCloud, cloudHeader.stamp, \"base_link\");\n        pubLaserCloudInfo.publish(cloudInfo);\n    }\n};\n\nint main(int argc, char** argv)\n{\n    ros::init(argc, argv, \"lio_sam\");\n\n    ImageProjection IP;\n    \n    ROS_INFO(\"\\033[1;32m----> Image Projection Started.\\033[0m\");\n\n    ros::MultiThreadedSpinner spinner(3);\n    spinner.spin();\n    \n    return 0;\n}"
  },
  {
    "path": "src/imuPreintegration.cpp",
    "content": "#include \"utility.h\"\n\n#include <gtsam/geometry/Rot3.h>\n#include <gtsam/geometry/Pose3.h>\n#include <gtsam/slam/PriorFactor.h>\n#include <gtsam/slam/BetweenFactor.h>\n#include <gtsam/navigation/GPSFactor.h>\n#include <gtsam/navigation/ImuFactor.h>\n#include <gtsam/navigation/CombinedImuFactor.h>\n#include <gtsam/nonlinear/NonlinearFactorGraph.h>\n#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>\n#include <gtsam/nonlinear/Marginals.h>\n#include <gtsam/nonlinear/Values.h>\n#include <gtsam/inference/Symbol.h>\n\n#include <gtsam/nonlinear/ISAM2.h>\n#include <gtsam_unstable/nonlinear/IncrementalFixedLagSmoother.h>\n\nusing gtsam::symbol_shorthand::X; // Pose3 (x,y,z,r,p,y)\nusing gtsam::symbol_shorthand::V; // Vel   (xdot,ydot,zdot)\nusing gtsam::symbol_shorthand::B; // Bias  (ax,ay,az,gx,gy,gz)\n\nclass IMUPreintegration : public ParamServer\n{\npublic:\n\n    ros::Subscriber subImu;\n    ros::Subscriber subOdometry;\n    ros::Publisher pubImuOdometry;\n    ros::Publisher pubImuPath;\n\n    // map -> odom\n    tf::Transform map_to_odom;\n    tf::TransformBroadcaster tfMap2Odom;\n    // odom -> base_link\n    tf::TransformBroadcaster tfOdom2BaseLink;\n\n    bool systemInitialized = false;\n\n    gtsam::noiseModel::Diagonal::shared_ptr priorPoseNoise;\n    gtsam::noiseModel::Diagonal::shared_ptr priorVelNoise;\n    gtsam::noiseModel::Diagonal::shared_ptr priorBiasNoise;\n    gtsam::noiseModel::Diagonal::shared_ptr correctionNoise;\n    gtsam::Vector noiseModelBetweenBias;\n\n\n    gtsam::PreintegratedImuMeasurements *imuIntegratorOpt_;\n    gtsam::PreintegratedImuMeasurements *imuIntegratorImu_;\n\n    std::deque<sensor_msgs::Imu> imuQueOpt;\n    std::deque<sensor_msgs::Imu> imuQueImu;\n\n    gtsam::Pose3 prevPose_;\n    gtsam::Vector3 prevVel_;\n    gtsam::NavState prevState_;\n    gtsam::imuBias::ConstantBias prevBias_;\n\n    gtsam::NavState prevStateOdom;\n    gtsam::imuBias::ConstantBias prevBiasOdom;\n\n    bool doneFirstOpt = false;\n    double lastImuT_imu = -1;\n    double lastImuT_opt = -1;\n\n    gtsam::ISAM2 optimizer;\n    gtsam::NonlinearFactorGraph graphFactors;\n    gtsam::Values graphValues;\n\n    const double delta_t = 0;\n\n    int key = 1;\n    int imuPreintegrationResetId = 0;\n\n    gtsam::Pose3 imu2Lidar = gtsam::Pose3(gtsam::Rot3(1, 0, 0, 0), gtsam::Point3(-extTrans.x(), -extTrans.y(), -extTrans.z()));\n    gtsam::Pose3 lidar2Imu = gtsam::Pose3(gtsam::Rot3(1, 0, 0, 0), gtsam::Point3(extTrans.x(), extTrans.y(), extTrans.z()));;\n\n    //added *****************************  by gc\n    ros::Subscriber subPoseOdomToMap;\n    geometry_msgs::PoseStamped poseOdomToMap;\n\n    //added *****************************  by gc\n\n    IMUPreintegration()\n    {\n        //added***********gc\n        subPoseOdomToMap = nh.subscribe<geometry_msgs::PoseStamped>(\"lio_sam/mapping/pose_odomTo_map\", 1,&IMUPreintegration::odomToMapPoseHandler,      this, ros::TransportHints().tcpNoDelay());\n        //added***********gc\n\n        subImu      = nh.subscribe<sensor_msgs::Imu>  (imuTopic,                   2000, &IMUPreintegration::imuHandler,      this, ros::TransportHints().tcpNoDelay());\n        subOdometry = nh.subscribe<nav_msgs::Odometry>(\"lio_sam/mapping/odometry\", 5,    &IMUPreintegration::odometryHandler, this, ros::TransportHints().tcpNoDelay());\n\n        pubImuOdometry = nh.advertise<nav_msgs::Odometry> (odomTopic, 2000);\n        pubImuPath     = nh.advertise<nav_msgs::Path>     (\"lio_sam/imu/path\", 1);\n\n        map_to_odom    = tf::Transform(tf::createQuaternionFromRPY(0, 0, 0), tf::Vector3(0, 0, 0));\n\n        boost::shared_ptr<gtsam::PreintegrationParams> p = gtsam::PreintegrationParams::MakeSharedU(imuGravity);\n        p->accelerometerCovariance  = gtsam::Matrix33::Identity(3,3) * pow(imuAccNoise, 2); // acc white noise in continuous\n        p->gyroscopeCovariance      = gtsam::Matrix33::Identity(3,3) * pow(imuGyrNoise, 2); // gyro white noise in continuous\n        p->integrationCovariance    = gtsam::Matrix33::Identity(3,3) * pow(1e-4, 2); // error committed in integrating position from velocities\n        gtsam::imuBias::ConstantBias prior_imu_bias((gtsam::Vector(6) << 0, 0, 0, 0, 0, 0).finished());; // assume zero initial bias\n\n        priorPoseNoise  = gtsam::noiseModel::Diagonal::Sigmas((gtsam::Vector(6) << 1e-2, 1e-2, 1e-2, 1e-2, 1e-2, 1e-2).finished()); // rad,rad,rad,m, m, m\n        priorVelNoise   = gtsam::noiseModel::Isotropic::Sigma(3, 1e4); // m/s\n        priorBiasNoise  = gtsam::noiseModel::Isotropic::Sigma(6, 1e-3); // 1e-2 ~ 1e-3 seems to be good\n        correctionNoise = gtsam::noiseModel::Isotropic::Sigma(6, 1e-2); // meter\n        noiseModelBetweenBias = (gtsam::Vector(6) << imuAccBiasN, imuAccBiasN, imuAccBiasN, imuGyrBiasN, imuGyrBiasN, imuGyrBiasN).finished();\n        \n        imuIntegratorImu_ = new gtsam::PreintegratedImuMeasurements(p, prior_imu_bias); // setting up the IMU integration for IMU message thread\n        imuIntegratorOpt_ = new gtsam::PreintegratedImuMeasurements(p, prior_imu_bias); // setting up the IMU integration for optimization\n    }\n\n    void odomToMapPoseHandler(const geometry_msgs::PoseStamped::ConstPtr& poseOdomToMapmsg)\n    {\n        tf::Quaternion q_tem;\n        q_tem.setX(poseOdomToMapmsg->pose.orientation.x);\n        q_tem.setY(poseOdomToMapmsg->pose.orientation.y);\n        q_tem.setZ(poseOdomToMapmsg->pose.orientation.z);\n        q_tem.setW(poseOdomToMapmsg->pose.orientation.w);\n        tf::Vector3 p_tem(poseOdomToMapmsg->pose.position.x, poseOdomToMapmsg->pose.position.y, poseOdomToMapmsg->pose.position.z);\n\n        map_to_odom    = tf::Transform(q_tem, p_tem);\n\n    }\n\n\n    void resetOptimization()\n    {\n        gtsam::ISAM2Params optParameters;\n        optParameters.relinearizeThreshold = 0.1;\n        optParameters.relinearizeSkip = 1;\n        optimizer = gtsam::ISAM2(optParameters);\n\n        gtsam::NonlinearFactorGraph newGraphFactors;\n        graphFactors = newGraphFactors;\n\n        gtsam::Values NewGraphValues;\n        graphValues = NewGraphValues;\n    }\n\n    void resetParams()\n    {\n        lastImuT_imu = -1;\n        doneFirstOpt = false;\n        systemInitialized = false;\n    }\n    //gc: the odometry computed by the mapoptimization module\n    void odometryHandler(const nav_msgs::Odometry::ConstPtr& odomMsg)\n    {\n        double currentCorrectionTime = ROS_TIME(odomMsg);\n\n        // make sure we have imu data to integrate\n        if (imuQueOpt.empty())\n            return;\n\n        float p_x = odomMsg->pose.pose.position.x;\n        float p_y = odomMsg->pose.pose.position.y;\n        float p_z = odomMsg->pose.pose.position.z;\n        float r_x = odomMsg->pose.pose.orientation.x;\n        float r_y = odomMsg->pose.pose.orientation.y;\n        float r_z = odomMsg->pose.pose.orientation.z;\n        float r_w = odomMsg->pose.pose.orientation.w;\n        int currentResetId = round(odomMsg->pose.covariance[0]);\n        gtsam::Pose3 lidarPose = gtsam::Pose3(gtsam::Rot3::Quaternion(r_w, r_x, r_y, r_z), gtsam::Point3(p_x, p_y, p_z));\n\n        // correction pose jumped, reset imu pre-integration\n        if (currentResetId != imuPreintegrationResetId)\n        {\n            resetParams();\n            imuPreintegrationResetId = currentResetId;\n            return;\n        }\n\n\n        // 0. initialize system\n        if (systemInitialized == false)\n        {\n            resetOptimization();\n\n            // pop old IMU message\n            while (!imuQueOpt.empty())\n            {\n                if (ROS_TIME(&imuQueOpt.front()) < currentCorrectionTime - delta_t)\n                {\n                    lastImuT_opt = ROS_TIME(&imuQueOpt.front());\n                    imuQueOpt.pop_front();\n                }\n                else\n                    break;\n            }\n            // initial pose\n            prevPose_ = lidarPose.compose(lidar2Imu);\n            // 构造因子\n            gtsam::PriorFactor<gtsam::Pose3> priorPose(X(0), prevPose_, priorPoseNoise);\n            //gc: add a factor\n            graphFactors.add(priorPose);\n            // 构造因子\n            // initial velocity\n            prevVel_ = gtsam::Vector3(0, 0, 0);\n            gtsam::PriorFactor<gtsam::Vector3> priorVel(V(0), prevVel_, priorVelNoise);\n            graphFactors.add(priorVel);\n            // initial bias\n            // 构造因子\n            prevBias_ = gtsam::imuBias::ConstantBias();\n            gtsam::PriorFactor<gtsam::imuBias::ConstantBias> priorBias(B(0), prevBias_, priorBiasNoise);\n            graphFactors.add(priorBias);\n            // add values\n            // gc: insert(key,初始值)\n            graphValues.insert(X(0), prevPose_);\n            graphValues.insert(V(0), prevVel_);\n            graphValues.insert(B(0), prevBias_);\n            // optimize once\n            optimizer.update(graphFactors, graphValues);\n            graphFactors.resize(0);\n            graphValues.clear();\n\n            imuIntegratorImu_->resetIntegrationAndSetBias(prevBias_);\n            imuIntegratorOpt_->resetIntegrationAndSetBias(prevBias_);\n            \n            key = 1;\n            systemInitialized = true;\n            return;\n        }\n\n\n        // reset graph for speed\n        if (key == 100)\n        {\n            // get updated noise before reset\n            gtsam::noiseModel::Gaussian::shared_ptr updatedPoseNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(X(key-1)));\n            gtsam::noiseModel::Gaussian::shared_ptr updatedVelNoise  = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(V(key-1)));\n            gtsam::noiseModel::Gaussian::shared_ptr updatedBiasNoise = gtsam::noiseModel::Gaussian::Covariance(optimizer.marginalCovariance(B(key-1)));\n            // reset graph\n            resetOptimization();\n            // add pose\n            gtsam::PriorFactor<gtsam::Pose3> priorPose(X(0), prevPose_, updatedPoseNoise);\n            graphFactors.add(priorPose);\n            // add velocity\n            gtsam::PriorFactor<gtsam::Vector3> priorVel(V(0), prevVel_, updatedVelNoise);\n            graphFactors.add(priorVel);\n            // add bias\n            gtsam::PriorFactor<gtsam::imuBias::ConstantBias> priorBias(B(0), prevBias_, updatedBiasNoise);\n            graphFactors.add(priorBias);\n            // add values\n            graphValues.insert(X(0), prevPose_);\n            graphValues.insert(V(0), prevVel_);\n            graphValues.insert(B(0), prevBias_);\n            // optimize once\n            optimizer.update(graphFactors, graphValues);\n            graphFactors.resize(0);\n            graphValues.clear();\n\n            key = 1;\n        }\n\n\n        // 1. integrate imu data and optimize\n        while (!imuQueOpt.empty())\n        {\n            // pop and integrate imu data that is between two optimizations\n            sensor_msgs::Imu *thisImu = &imuQueOpt.front();\n            double imuTime = ROS_TIME(thisImu);\n            if (imuTime < currentCorrectionTime - delta_t)\n            {\n                double dt = (lastImuT_opt < 0) ? (1.0 / 500.0) : (imuTime - lastImuT_opt);\n                imuIntegratorOpt_->integrateMeasurement(\n                        gtsam::Vector3(thisImu->linear_acceleration.x, thisImu->linear_acceleration.y, thisImu->linear_acceleration.z),\n                        gtsam::Vector3(thisImu->angular_velocity.x,    thisImu->angular_velocity.y,    thisImu->angular_velocity.z), dt);\n                \n                lastImuT_opt = imuTime;\n                imuQueOpt.pop_front();\n            }\n            else\n                break;\n        }\n        // add imu factor to graph\n        const gtsam::PreintegratedImuMeasurements& preint_imu = dynamic_cast<const gtsam::PreintegratedImuMeasurements&>(*imuIntegratorOpt_);\n        //gc:                       状态变量pose 状态变量velo  状态变量  状态变量   状态变量   measurement\n        gtsam::ImuFactor imu_factor(X(key - 1), V(key - 1), X(key), V(key), B(key - 1), preint_imu);\n        graphFactors.add(imu_factor);\n        // add imu bias between factor\n        //gc: BetweenFactor illustrate the relationship between two state\n        graphFactors.add(gtsam::BetweenFactor<gtsam::imuBias::ConstantBias>(B(key - 1), B(key), gtsam::imuBias::ConstantBias(),\n                         gtsam::noiseModel::Diagonal::Sigmas(sqrt(imuIntegratorOpt_->deltaTij()) * noiseModelBetweenBias)));\n        // add pose factor\n        gtsam::Pose3 curPose = lidarPose.compose(lidar2Imu);\n        gtsam::PriorFactor<gtsam::Pose3> pose_factor(X(key), curPose, correctionNoise);\n        graphFactors.add(pose_factor);\n        // insert predicted values\n        gtsam::NavState propState_ = imuIntegratorOpt_->predict(prevState_, prevBias_);\n        //gc: set the initial value\n        graphValues.insert(X(key), propState_.pose());\n        graphValues.insert(V(key), propState_.v());\n        graphValues.insert(B(key), prevBias_);\n        // optimize\n        optimizer.update(graphFactors, graphValues);\n        optimizer.update();\n        graphFactors.resize(0);\n        graphValues.clear();\n        // Overwrite the beginning of the preintegration for the next step.\n        gtsam::Values result = optimizer.calculateEstimate();\n        prevPose_  = result.at<gtsam::Pose3>(X(key));\n        prevVel_   = result.at<gtsam::Vector3>(V(key));\n        prevState_ = gtsam::NavState(prevPose_, prevVel_);\n        prevBias_  = result.at<gtsam::imuBias::ConstantBias>(B(key));\n        // Reset the optimization preintegration object.\n        imuIntegratorOpt_->resetIntegrationAndSetBias(prevBias_);\n        // check optimization\n        if (failureDetection(prevVel_, prevBias_))\n        {\n            resetParams();\n            return;\n        }\n\n\n        // 2. after optiization, re-propagate imu odometry preintegration\n        prevStateOdom = prevState_;\n        prevBiasOdom  = prevBias_;\n        // first pop imu message older than current correction data\n        double lastImuQT = -1;\n        while (!imuQueImu.empty() && ROS_TIME(&imuQueImu.front()) < currentCorrectionTime - delta_t)\n        {\n            lastImuQT = ROS_TIME(&imuQueImu.front());\n            imuQueImu.pop_front();\n        }\n        // repropogate\n        //gc: to get more correct imu_odom\n        if (!imuQueImu.empty())\n        {\n            // reset bias use the newly optimized bias\n            imuIntegratorImu_->resetIntegrationAndSetBias(prevBiasOdom);\n            // integrate imu message from the beginning of this optimization\n            for (int i = 0; i < (int)imuQueImu.size(); ++i)\n            {\n                sensor_msgs::Imu *thisImu = &imuQueImu[i];\n                double imuTime = ROS_TIME(thisImu);\n                double dt = (lastImuQT < 0) ? (1.0 / 500.0) :(imuTime - lastImuQT);\n\n                imuIntegratorImu_->integrateMeasurement(gtsam::Vector3(thisImu->linear_acceleration.x, thisImu->linear_acceleration.y, thisImu->linear_acceleration.z),\n                                                        gtsam::Vector3(thisImu->angular_velocity.x,    thisImu->angular_velocity.y,    thisImu->angular_velocity.z), dt);\n                lastImuQT = imuTime;\n            }\n        }\n\n        ++key;\n        doneFirstOpt = true;\n    }\n\n    bool failureDetection(const gtsam::Vector3& velCur, const gtsam::imuBias::ConstantBias& biasCur)\n    {\n        Eigen::Vector3f vel(velCur.x(), velCur.y(), velCur.z());\n        if (vel.norm() > 30)\n        {\n            ROS_WARN(\"Large velocity, reset IMU-preintegration!\");\n            return true;\n        }\n\n        Eigen::Vector3f ba(biasCur.accelerometer().x(), biasCur.accelerometer().y(), biasCur.accelerometer().z());\n        Eigen::Vector3f bg(biasCur.gyroscope().x(), biasCur.gyroscope().y(), biasCur.gyroscope().z());\n        if (ba.norm() > 1.0 || bg.norm() > 1.0)\n        {\n            ROS_WARN(\"Large bias, reset IMU-preintegration!\");\n            return true;\n        }\n\n        return false;\n    }\n\n    void imuHandler(const sensor_msgs::Imu::ConstPtr& imu_raw)\n    {\n        sensor_msgs::Imu thisImu = imuConverter(*imu_raw);\n        // publish static tf\n        //gc: the tf of map relative to odom  Todo: something can be done here to do localizaing in built map\n        //todo: noted by gc\n        tfMap2Odom.sendTransform(tf::StampedTransform(map_to_odom, thisImu.header.stamp, \"map\", \"odom\"));\n\n        imuQueOpt.push_back(thisImu);\n        imuQueImu.push_back(thisImu);\n\n        if (doneFirstOpt == false)\n            return;\n\n        double imuTime = ROS_TIME(&thisImu);\n        double dt = (lastImuT_imu < 0) ? (1.0 / 500.0) : (imuTime - lastImuT_imu);//gc: first IMU or parameter was reset\n        lastImuT_imu = imuTime;\n\n        // integrate this single imu message\n        imuIntegratorImu_->integrateMeasurement(gtsam::Vector3(thisImu.linear_acceleration.x, thisImu.linear_acceleration.y, thisImu.linear_acceleration.z),\n                                                gtsam::Vector3(thisImu.angular_velocity.x,    thisImu.angular_velocity.y,    thisImu.angular_velocity.z), dt);\n\n        // predict odometry\n        //gc: prevStateOdom is &\n        gtsam::NavState currentState = imuIntegratorImu_->predict(prevStateOdom, prevBiasOdom);\n\n        // publish odometry\n        nav_msgs::Odometry odometry;\n        odometry.header.stamp = thisImu.header.stamp;\n        odometry.header.frame_id = \"odom\";\n        odometry.child_frame_id = \"odom_imu\";\n\n        // transform imu pose to ldiar\n        gtsam::Pose3 imuPose = gtsam::Pose3(currentState.quaternion(), currentState.position());\n        gtsam::Pose3 lidarPose = imuPose.compose(imu2Lidar);\n\n        odometry.pose.pose.position.x = lidarPose.translation().x();\n        odometry.pose.pose.position.y = lidarPose.translation().y();\n        odometry.pose.pose.position.z = lidarPose.translation().z();\n        odometry.pose.pose.orientation.x = lidarPose.rotation().toQuaternion().x();\n        odometry.pose.pose.orientation.y = lidarPose.rotation().toQuaternion().y();\n        odometry.pose.pose.orientation.z = lidarPose.rotation().toQuaternion().z();\n        odometry.pose.pose.orientation.w = lidarPose.rotation().toQuaternion().w();\n        \n        odometry.twist.twist.linear.x = currentState.velocity().x();\n        odometry.twist.twist.linear.y = currentState.velocity().y();\n        odometry.twist.twist.linear.z = currentState.velocity().z();\n        odometry.twist.twist.angular.x = thisImu.angular_velocity.x + prevBiasOdom.gyroscope().x();\n        odometry.twist.twist.angular.y = thisImu.angular_velocity.y + prevBiasOdom.gyroscope().y();\n        odometry.twist.twist.angular.z = thisImu.angular_velocity.z + prevBiasOdom.gyroscope().z();\n        odometry.pose.covariance[0] = double(imuPreintegrationResetId);\n        pubImuOdometry.publish(odometry);//gc: publish the state predicted by IMu\n\n        // publish imu path\n        static nav_msgs::Path imuPath;\n        static double last_path_time = -1;\n        if (imuTime - last_path_time > 0.1)\n        {\n            last_path_time = imuTime;\n            geometry_msgs::PoseStamped pose_stamped;\n            pose_stamped.header.stamp = thisImu.header.stamp;\n            pose_stamped.header.frame_id = \"odom\";\n            pose_stamped.pose = odometry.pose.pose;\n            imuPath.poses.push_back(pose_stamped);\n            while(!imuPath.poses.empty() && abs(imuPath.poses.front().header.stamp.toSec() - imuPath.poses.back().header.stamp.toSec()) > 3.0)\n                imuPath.poses.erase(imuPath.poses.begin());\n            if (pubImuPath.getNumSubscribers() != 0)\n            {\n                imuPath.header.stamp = thisImu.header.stamp;\n                imuPath.header.frame_id = \"odom\";\n                pubImuPath.publish(imuPath);\n            }\n        }\n\n        // publish transformation\n        tf::Transform tCur;\n        tf::poseMsgToTF(odometry.pose.pose, tCur);\n        tf::StampedTransform odom_2_baselink = tf::StampedTransform(tCur, thisImu.header.stamp, \"odom\", \"base_link\");\n        tfOdom2BaseLink.sendTransform(odom_2_baselink);\n    }\n};\n\n\nint main(int argc, char** argv)\n{\n    ros::init(argc, argv, \"roboat_loam\");\n    \n    IMUPreintegration ImuP;\n\n    ROS_INFO(\"\\033[1;32m----> IMU Preintegration Started.\\033[0m\");\n    \n    ros::spin();\n    \n    return 0;\n}\n"
  },
  {
    "path": "src/mapOptmization.cpp",
    "content": "#include \"utility.h\"\n#include \"lio_sam/cloud_info.h\"\n\n#include <gtsam/geometry/Rot3.h>\n#include <gtsam/geometry/Pose3.h>\n#include <gtsam/slam/PriorFactor.h>\n#include <gtsam/slam/BetweenFactor.h>\n#include <gtsam/navigation/GPSFactor.h>\n#include <gtsam/navigation/ImuFactor.h>\n#include <gtsam/navigation/CombinedImuFactor.h>\n#include <gtsam/nonlinear/NonlinearFactorGraph.h>\n#include <gtsam/nonlinear/LevenbergMarquardtOptimizer.h>\n#include <gtsam/nonlinear/Marginals.h>\n#include <gtsam/nonlinear/Values.h>\n#include <gtsam/inference/Symbol.h>\n\n#include <gtsam/nonlinear/ISAM2.h>\n\nusing namespace gtsam;\n\nusing symbol_shorthand::X; // Pose3 (x,y,z,r,p,y)\nusing symbol_shorthand::V; // Vel   (xdot,ydot,zdot)\nusing symbol_shorthand::B; // Bias  (ax,ay,az,gx,gy,gz)\nusing symbol_shorthand::G; // GPS pose\n\n/*\n    * A point cloud type that has 6D pose info ([x,y,z,roll,pitch,yaw] intensity is time stamp)\n    */\nstruct PointXYZIRPYT\n{\n    PCL_ADD_POINT4D\n    PCL_ADD_INTENSITY;                  // preferred way of adding a XYZ+padding\n    float roll;\n    float pitch;\n    float yaw;\n    double time;\n    EIGEN_MAKE_ALIGNED_OPERATOR_NEW   // make sure our new allocators are aligned\n} EIGEN_ALIGN16;                    // enforce SSE padding for correct memory alignment\n\nPOINT_CLOUD_REGISTER_POINT_STRUCT (PointXYZIRPYT,\n                                   (float, x, x) (float, y, y)\n                                   (float, z, z) (float, intensity, intensity)\n                                   (float, roll, roll) (float, pitch, pitch) (float, yaw, yaw)\n                                   (double, time, time))\n\ntypedef PointXYZIRPYT  PointTypePose;\n\n\nclass mapOptimization : public ParamServer\n{\n\npublic:\n\n    // gtsam\n    NonlinearFactorGraph gtSAMgraph;\n    Values initialEstimate;\n    Values optimizedEstimate;\n    ISAM2 *isam;\n    Values isamCurrentEstimate;\n    Eigen::MatrixXd poseCovariance;\n\n    ros::Publisher pubLaserCloudSurround;\n    ros::Publisher pubOdomAftMappedROS;\n    ros::Publisher pubKeyPoses;\n    ros::Publisher pubPath;\n\n    ros::Publisher pubHistoryKeyFrames;\n    ros::Publisher pubIcpKeyFrames;\n    ros::Publisher pubRecentKeyFrames;\n    ros::Publisher pubRecentKeyFrame;\n    ros::Publisher pubCloudRegisteredRaw;\n\n    ros::Subscriber subLaserCloudInfo;\n    ros::Subscriber subGPS;\n\n    std::deque<nav_msgs::Odometry> gpsQueue;\n    lio_sam::cloud_info cloudInfo;\n\n    vector<pcl::PointCloud<PointType>::Ptr> cornerCloudKeyFrames;\n    vector<pcl::PointCloud<PointType>::Ptr> surfCloudKeyFrames;\n    \n    pcl::PointCloud<PointType>::Ptr cloudKeyPoses3D;\n    pcl::PointCloud<PointTypePose>::Ptr cloudKeyPoses6D;\n\n    pcl::PointCloud<PointType>::Ptr laserCloudCornerLast; // corner feature set from odoOptimization\n    pcl::PointCloud<PointType>::Ptr laserCloudSurfLast; // surf feature set from odoOptimization\n    pcl::PointCloud<PointType>::Ptr laserCloudCornerLastDS; // downsampled corner featuer set from odoOptimization\n    pcl::PointCloud<PointType>::Ptr laserCloudSurfLastDS; // downsampled surf featuer set from odoOptimization\n\n    pcl::PointCloud<PointType>::Ptr laserCloudOri;\n    pcl::PointCloud<PointType>::Ptr coeffSel;\n\n    std::vector<PointType> laserCloudOriCornerVec; // corner point holder for parallel computation\n    std::vector<PointType> coeffSelCornerVec;\n    std::vector<bool> laserCloudOriCornerFlag;\n    std::vector<PointType> laserCloudOriSurfVec; // surf point holder for parallel computation\n    std::vector<PointType> coeffSelSurfVec;\n    std::vector<bool> laserCloudOriSurfFlag;\n\n    pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMap;\n    pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMap;\n    pcl::PointCloud<PointType>::Ptr laserCloudCornerFromMapDS;\n    pcl::PointCloud<PointType>::Ptr laserCloudSurfFromMapDS;\n\n    pcl::KdTreeFLANN<PointType>::Ptr kdtreeCornerFromMap;\n    pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurfFromMap;\n\n    pcl::KdTreeFLANN<PointType>::Ptr kdtreeSurroundingKeyPoses;\n    pcl::KdTreeFLANN<PointType>::Ptr kdtreeHistoryKeyPoses;\n\n    pcl::PointCloud<PointType>::Ptr latestKeyFrameCloud;\n    pcl::PointCloud<PointType>::Ptr nearHistoryKeyFrameCloud;\n\n    pcl::VoxelGrid<PointType> downSizeFilterCorner;\n    pcl::VoxelGrid<PointType> downSizeFilterSurf;\n    pcl::VoxelGrid<PointType> downSizeFilterICP;\n    pcl::VoxelGrid<PointType> downSizeFilterSurroundingKeyPoses; // for surrounding key poses of scan-to-map optimization\n    \n    ros::Time timeLaserInfoStamp;\n    double timeLaserCloudInfoLast;\n\n    float transformTobeMapped[6];\n\n    std::mutex mtx;\n\n    double timeLastProcessing = -1;\n\n    bool isDegenerate = false;\n    Eigen::Matrix<float, 6, 6> matP;\n\n    int laserCloudCornerFromMapDSNum = 0;\n    int laserCloudSurfFromMapDSNum = 0;\n    int laserCloudCornerLastDSNum = 0;\n    int laserCloudSurfLastDSNum = 0;\n\n    bool aLoopIsClosed = false;\n    int imuPreintegrationResetId = 0;\n\n    nav_msgs::Path globalPath;\n\n    Eigen::Affine3f transPointAssociateToMap;\n\n    mapOptimization()\n    {\n        ISAM2Params parameters;\n        parameters.relinearizeThreshold = 0.1;\n        parameters.relinearizeSkip = 1;\n        isam = new ISAM2(parameters);\n\n        pubKeyPoses = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/trajectory\", 1);\n        pubLaserCloudSurround = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/map_global\", 1);\n        pubOdomAftMappedROS = nh.advertise<nav_msgs::Odometry> (\"lio_sam/mapping/odometry\", 1);\n        pubPath = nh.advertise<nav_msgs::Path>(\"lio_sam/mapping/path\", 1);\n\n        subLaserCloudInfo = nh.subscribe<lio_sam::cloud_info>(\"lio_sam/feature/cloud_info\", 10, &mapOptimization::laserCloudInfoHandler, this, ros::TransportHints().tcpNoDelay());\n        subGPS = nh.subscribe<nav_msgs::Odometry> (gpsTopic, 200, &mapOptimization::gpsHandler, this, ros::TransportHints().tcpNoDelay());\n\n        pubHistoryKeyFrames = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/icp_loop_closure_history_cloud\", 1);\n        pubIcpKeyFrames = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/icp_loop_closure_corrected_cloud\", 1);\n\n        pubRecentKeyFrames = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/map_local\", 1);\n        pubRecentKeyFrame = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/cloud_registered\", 1);\n        pubCloudRegisteredRaw = nh.advertise<sensor_msgs::PointCloud2>(\"lio_sam/mapping/cloud_registered_raw\", 1);\n\n        downSizeFilterCorner.setLeafSize(mappingCornerLeafSize, mappingCornerLeafSize, mappingCornerLeafSize);\n        downSizeFilterSurf.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);\n        downSizeFilterICP.setLeafSize(mappingSurfLeafSize, mappingSurfLeafSize, mappingSurfLeafSize);\n        downSizeFilterSurroundingKeyPoses.setLeafSize(surroundingKeyframeDensity, surroundingKeyframeDensity, surroundingKeyframeDensity); // for surrounding key poses of scan-to-map optimization\n\n        allocateMemory();\n    }\n\n    void allocateMemory()\n    {\n        cloudKeyPoses3D.reset(new pcl::PointCloud<PointType>());\n        cloudKeyPoses6D.reset(new pcl::PointCloud<PointTypePose>());\n\n        kdtreeSurroundingKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());\n        kdtreeHistoryKeyPoses.reset(new pcl::KdTreeFLANN<PointType>());\n\n        laserCloudCornerLast.reset(new pcl::PointCloud<PointType>()); // corner feature set from odoOptimization\n        laserCloudSurfLast.reset(new pcl::PointCloud<PointType>()); // surf feature set from odoOptimization\n        laserCloudCornerLastDS.reset(new pcl::PointCloud<PointType>()); // downsampled corner featuer set from odoOptimization\n        laserCloudSurfLastDS.reset(new pcl::PointCloud<PointType>()); // downsampled surf featuer set from odoOptimization\n\n        laserCloudOri.reset(new pcl::PointCloud<PointType>());\n        coeffSel.reset(new pcl::PointCloud<PointType>());\n\n        laserCloudOriCornerVec.resize(N_SCAN * Horizon_SCAN);\n        coeffSelCornerVec.resize(N_SCAN * Horizon_SCAN);\n        laserCloudOriCornerFlag.resize(N_SCAN * Horizon_SCAN);\n        laserCloudOriSurfVec.resize(N_SCAN * Horizon_SCAN);\n        coeffSelSurfVec.resize(N_SCAN * Horizon_SCAN);\n        laserCloudOriSurfFlag.resize(N_SCAN * Horizon_SCAN);\n\n        std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false);\n        std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);\n\n        laserCloudCornerFromMap.reset(new pcl::PointCloud<PointType>());\n        laserCloudSurfFromMap.reset(new pcl::PointCloud<PointType>());\n        laserCloudCornerFromMapDS.reset(new pcl::PointCloud<PointType>());\n        laserCloudSurfFromMapDS.reset(new pcl::PointCloud<PointType>());\n\n        kdtreeCornerFromMap.reset(new pcl::KdTreeFLANN<PointType>());\n        kdtreeSurfFromMap.reset(new pcl::KdTreeFLANN<PointType>());\n\n        latestKeyFrameCloud.reset(new pcl::PointCloud<PointType>());\n        nearHistoryKeyFrameCloud.reset(new pcl::PointCloud<PointType>());\n\n        for (int i = 0; i < 6; ++i){\n            transformTobeMapped[i] = 0;\n        }\n\n        matP.setZero();\n    }\n\n    void laserCloudInfoHandler(const lio_sam::cloud_infoConstPtr& msgIn)\n    {\n        // extract time stamp\n        timeLaserInfoStamp = msgIn->header.stamp;\n        timeLaserCloudInfoLast = msgIn->header.stamp.toSec();\n\n        // extract info and feature cloud\n        cloudInfo = *msgIn;\n        pcl::fromROSMsg(msgIn->cloud_corner,  *laserCloudCornerLast);\n        pcl::fromROSMsg(msgIn->cloud_surface, *laserCloudSurfLast);\n\n        std::lock_guard<std::mutex> lock(mtx);\n\n        if (timeLaserCloudInfoLast - timeLastProcessing >= mappingProcessInterval) {//gc:control the rate of mapping process\n\n            timeLastProcessing = timeLaserCloudInfoLast;\n\n            updateInitialGuess();//gc: update initial value for states\n\n            extractSurroundingKeyFrames();//gc:\n\n            downsampleCurrentScan();//gc:down sample the current corner points and surface points\n\n            scan2MapOptimization();//gc: calculate the tranformtion using lidar measurement with the Imu preintegration as initial values\n                                    //and then interpolate roll and pitch angle using IMU measurement and above measurement\n\n            saveKeyFramesAndFactor();//gc: save corner cloud and surface cloud of this scan, and add odom and GPS factors\n\n            correctPoses();\n\n            publishOdometry();\n\n            publishFrames();\n        }\n    }\n\n    void gpsHandler(const nav_msgs::Odometry::ConstPtr& gpsMsg)\n    {\n        gpsQueue.push_back(*gpsMsg);\n    }\n\n    void pointAssociateToMap(PointType const * const pi, PointType * const po)\n    {\n        po->x = transPointAssociateToMap(0,0) * pi->x + transPointAssociateToMap(0,1) * pi->y + transPointAssociateToMap(0,2) * pi->z + transPointAssociateToMap(0,3);\n        po->y = transPointAssociateToMap(1,0) * pi->x + transPointAssociateToMap(1,1) * pi->y + transPointAssociateToMap(1,2) * pi->z + transPointAssociateToMap(1,3);\n        po->z = transPointAssociateToMap(2,0) * pi->x + transPointAssociateToMap(2,1) * pi->y + transPointAssociateToMap(2,2) * pi->z + transPointAssociateToMap(2,3);\n        po->intensity = pi->intensity;\n    }\n\n    pcl::PointCloud<PointType>::Ptr transformPointCloud(pcl::PointCloud<PointType>::Ptr cloudIn, PointTypePose* transformIn)\n    {\n        pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());\n\n        PointType *pointFrom;\n\n        int cloudSize = cloudIn->size();\n        cloudOut->resize(cloudSize);\n\n        Eigen::Affine3f transCur = pcl::getTransformation(transformIn->x, transformIn->y, transformIn->z, transformIn->roll, transformIn->pitch, transformIn->yaw);\n        \n        for (int i = 0; i < cloudSize; ++i){\n\n            pointFrom = &cloudIn->points[i];\n            cloudOut->points[i].x = transCur(0,0) * pointFrom->x + transCur(0,1) * pointFrom->y + transCur(0,2) * pointFrom->z + transCur(0,3);\n            cloudOut->points[i].y = transCur(1,0) * pointFrom->x + transCur(1,1) * pointFrom->y + transCur(1,2) * pointFrom->z + transCur(1,3);\n            cloudOut->points[i].z = transCur(2,0) * pointFrom->x + transCur(2,1) * pointFrom->y + transCur(2,2) * pointFrom->z + transCur(2,3);\n            cloudOut->points[i].intensity = pointFrom->intensity;\n        }\n        return cloudOut;\n    }\n\n    gtsam::Pose3 pclPointTogtsamPose3(PointTypePose thisPoint)\n    {\n        return gtsam::Pose3(gtsam::Rot3::RzRyRx(double(thisPoint.roll), double(thisPoint.pitch), double(thisPoint.yaw)),\n                                  gtsam::Point3(double(thisPoint.x),    double(thisPoint.y),     double(thisPoint.z)));\n    }\n\n    gtsam::Pose3 trans2gtsamPose(float transformIn[])\n    {\n        return gtsam::Pose3(gtsam::Rot3::RzRyRx(transformIn[0], transformIn[1], transformIn[2]), \n                                  gtsam::Point3(transformIn[3], transformIn[4], transformIn[5]));\n    }\n\n    Eigen::Affine3f pclPointToAffine3f(PointTypePose thisPoint)\n    { \n        return pcl::getTransformation(thisPoint.x, thisPoint.y, thisPoint.z, thisPoint.roll, thisPoint.pitch, thisPoint.yaw);\n    }\n\n    Eigen::Affine3f trans2Affine3f(float transformIn[])\n    {\n        return pcl::getTransformation(transformIn[3], transformIn[4], transformIn[5], transformIn[0], transformIn[1], transformIn[2]);\n    }\n\n    PointTypePose trans2PointTypePose(float transformIn[])\n    {\n        PointTypePose thisPose6D;\n        thisPose6D.x = transformIn[3];\n        thisPose6D.y = transformIn[4];\n        thisPose6D.z = transformIn[5];\n        thisPose6D.roll  = transformIn[0];\n        thisPose6D.pitch = transformIn[1];\n        thisPose6D.yaw   = transformIn[2];\n        return thisPose6D;\n    }\n\n    \n\n\n\n\n\n\n\n\n\n\n\n\n\n\n    void visualizeGlobalMapThread()\n    {\n        ros::Rate rate(0.2);\n        while (ros::ok()){\n            rate.sleep();\n            publishGlobalMap();\n        }\n\n        if (savePCD == false)\n            return;\n\n        cout << \"****************************************************\" << endl;\n        cout << \"Saving map to pcd files ...\" << endl;\n        // create directory and remove old files;\n        savePCDDirectory = std::getenv(\"HOME\") + savePCDDirectory;\n        int unused = system((std::string(\"exec rm -r \") + savePCDDirectory).c_str());\n        unused = system((std::string(\"mkdir \") + savePCDDirectory).c_str());\n        // save key frame transformations\n        pcl::io::savePCDFileASCII(savePCDDirectory + \"trajectory.pcd\", *cloudKeyPoses3D);\n        pcl::io::savePCDFileASCII(savePCDDirectory + \"transformations.pcd\", *cloudKeyPoses6D);\n        // extract global point cloud map        \n        pcl::PointCloud<PointType>::Ptr globalCornerCloud(new pcl::PointCloud<PointType>());\n        pcl::PointCloud<PointType>::Ptr globalCornerCloudDS(new pcl::PointCloud<PointType>());\n        pcl::PointCloud<PointType>::Ptr globalSurfCloud(new pcl::PointCloud<PointType>());\n        pcl::PointCloud<PointType>::Ptr globalSurfCloudDS(new pcl::PointCloud<PointType>());\n        pcl::PointCloud<PointType>::Ptr globalMapCloud(new pcl::PointCloud<PointType>());\n        for (int i = 0; i < (int)cloudKeyPoses3D->size(); i++) {\n            *globalCornerCloud += *transformPointCloud(cornerCloudKeyFrames[i],  &cloudKeyPoses6D->points[i]);\n            *globalSurfCloud   += *transformPointCloud(surfCloudKeyFrames[i],    &cloudKeyPoses6D->points[i]);\n            cout << \"\\r\" << std::flush << \"Processing feature cloud \" << i << \" of \" << cloudKeyPoses6D->size() << \" ...\";\n        }\n        // down-sample and save corner cloud\n        downSizeFilterCorner.setInputCloud(globalCornerCloud);\n        downSizeFilterCorner.filter(*globalCornerCloudDS);\n        pcl::io::savePCDFileASCII(savePCDDirectory + \"cloudCorner.pcd\", *globalCornerCloudDS);\n        // down-sample and save surf cloud\n        downSizeFilterSurf.setInputCloud(globalSurfCloud);\n        downSizeFilterSurf.filter(*globalSurfCloudDS);\n        pcl::io::savePCDFileASCII(savePCDDirectory + \"cloudSurf.pcd\", *globalSurfCloudDS);\n        // down-sample and save global point cloud map\n        *globalMapCloud += *globalCornerCloud;\n        *globalMapCloud += *globalSurfCloud;\n        pcl::io::savePCDFileASCII(savePCDDirectory + \"cloudGlobal.pcd\", *globalMapCloud);\n        cout << \"****************************************************\" << endl;\n        cout << \"Saving map to pcd files completed\" << endl;\n    }\n\n    void publishGlobalMap()\n    {\n        if (pubLaserCloudSurround.getNumSubscribers() == 0)\n            return;\n\n        if (cloudKeyPoses3D->points.empty() == true)\n            return;\n\n        pcl::KdTreeFLANN<PointType>::Ptr kdtreeGlobalMap(new pcl::KdTreeFLANN<PointType>());;\n        pcl::PointCloud<PointType>::Ptr globalMapKeyPoses(new pcl::PointCloud<PointType>());\n        pcl::PointCloud<PointType>::Ptr globalMapKeyPosesDS(new pcl::PointCloud<PointType>());\n        pcl::PointCloud<PointType>::Ptr globalMapKeyFrames(new pcl::PointCloud<PointType>());\n        pcl::PointCloud<PointType>::Ptr globalMapKeyFramesDS(new pcl::PointCloud<PointType>());\n\n        // kd-tree to find near key frames to visualize\n        std::vector<int> pointSearchIndGlobalMap;\n        std::vector<float> pointSearchSqDisGlobalMap;\n        // search near key frames to visualize\n        mtx.lock();\n        kdtreeGlobalMap->setInputCloud(cloudKeyPoses3D);\n        kdtreeGlobalMap->radiusSearch(cloudKeyPoses3D->back(), globalMapVisualizationSearchRadius, pointSearchIndGlobalMap, pointSearchSqDisGlobalMap, 0);\n        mtx.unlock();\n\n        for (int i = 0; i < (int)pointSearchIndGlobalMap.size(); ++i)\n            globalMapKeyPoses->push_back(cloudKeyPoses3D->points[pointSearchIndGlobalMap[i]]);\n        // downsample near selected key frames\n        pcl::VoxelGrid<PointType> downSizeFilterGlobalMapKeyPoses; // for global map visualization\n        downSizeFilterGlobalMapKeyPoses.setLeafSize(globalMapVisualizationPoseDensity, globalMapVisualizationPoseDensity, globalMapVisualizationPoseDensity); // for global map visualization\n        downSizeFilterGlobalMapKeyPoses.setInputCloud(globalMapKeyPoses);\n        downSizeFilterGlobalMapKeyPoses.filter(*globalMapKeyPosesDS);\n\n        // extract visualized and downsampled key frames\n        for (int i = 0; i < (int)globalMapKeyPosesDS->size(); ++i){\n            if (pointDistance(globalMapKeyPosesDS->points[i], cloudKeyPoses3D->back()) > globalMapVisualizationSearchRadius)\n                continue;\n            int thisKeyInd = (int)globalMapKeyPosesDS->points[i].intensity;\n            *globalMapKeyFrames += *transformPointCloud(cornerCloudKeyFrames[thisKeyInd],  &cloudKeyPoses6D->points[thisKeyInd]);\n            *globalMapKeyFrames += *transformPointCloud(surfCloudKeyFrames[thisKeyInd],    &cloudKeyPoses6D->points[thisKeyInd]);\n        }\n        // downsample visualized points\n        pcl::VoxelGrid<PointType> downSizeFilterGlobalMapKeyFrames; // for global map visualization\n        downSizeFilterGlobalMapKeyFrames.setLeafSize(globalMapVisualizationLeafSize, globalMapVisualizationLeafSize, globalMapVisualizationLeafSize); // for global map visualization\n        downSizeFilterGlobalMapKeyFrames.setInputCloud(globalMapKeyFrames);\n        downSizeFilterGlobalMapKeyFrames.filter(*globalMapKeyFramesDS);\n        publishCloud(&pubLaserCloudSurround, globalMapKeyFramesDS, timeLaserInfoStamp, \"odom\");    \n    }\n\n\n\n\n\n\n\n\n\n\n\n\n    void loopClosureThread()\n    {\n        if (loopClosureEnableFlag == false)\n            return;\n\n        ros::Rate rate(0.1);\n        while (ros::ok())\n        {\n            rate.sleep();\n            performLoopClosure();\n        }\n    }\n\n    bool detectLoopClosure(int *latestID, int *closestID)\n    {\n        int latestFrameIDLoopCloure;\n        int closestHistoryFrameID;\n\n        latestKeyFrameCloud->clear();\n        nearHistoryKeyFrameCloud->clear();\n\n        std::lock_guard<std::mutex> lock(mtx);\n\n        // find the closest history key frame\n        std::vector<int> pointSearchIndLoop;\n        std::vector<float> pointSearchSqDisLoop;\n        kdtreeHistoryKeyPoses->setInputCloud(cloudKeyPoses3D);\n        //gc:search the last keyframe's nearest keyframes\n        kdtreeHistoryKeyPoses->radiusSearch(cloudKeyPoses3D->back(), historyKeyframeSearchRadius, pointSearchIndLoop, pointSearchSqDisLoop, 0);\n        \n        closestHistoryFrameID = -1;\n        for (int i = 0; i < (int)pointSearchIndLoop.size(); ++i)\n        {\n            int id = pointSearchIndLoop[i];\n            //gc: find the nearest keyframe whose time is not close\n            if (abs(cloudKeyPoses6D->points[id].time - timeLaserCloudInfoLast) > historyKeyframeSearchTimeDiff)\n            {\n                closestHistoryFrameID = id;\n                break;\n            }\n        }\n\n        if (closestHistoryFrameID == -1)\n            return false;\n        //gc: if the closest keyframe is\n        if ((int)cloudKeyPoses3D->size() - 1 == closestHistoryFrameID)\n            return false;\n\n        // save latest key frames\n        latestFrameIDLoopCloure = cloudKeyPoses3D->size() - 1;\n        *latestKeyFrameCloud += *transformPointCloud(cornerCloudKeyFrames[latestFrameIDLoopCloure], &cloudKeyPoses6D->points[latestFrameIDLoopCloure]);\n        *latestKeyFrameCloud += *transformPointCloud(surfCloudKeyFrames[latestFrameIDLoopCloure],   &cloudKeyPoses6D->points[latestFrameIDLoopCloure]);\n\n        // save history near key frames\n        //gc: combine the time_close frame of the nearset frame\n        bool nearFrameAvailable = false;\n        for (int j = -historyKeyframeSearchNum; j <= historyKeyframeSearchNum; ++j)\n        {\n            if (closestHistoryFrameID + j < 0 || closestHistoryFrameID + j > latestFrameIDLoopCloure)\n                continue;\n            *nearHistoryKeyFrameCloud += *transformPointCloud(cornerCloudKeyFrames[closestHistoryFrameID+j], &cloudKeyPoses6D->points[closestHistoryFrameID+j]);\n            *nearHistoryKeyFrameCloud += *transformPointCloud(surfCloudKeyFrames[closestHistoryFrameID+j],   &cloudKeyPoses6D->points[closestHistoryFrameID+j]);\n            nearFrameAvailable = true;\n        }\n\n        if (nearFrameAvailable == false)\n            return false;\n\n        *latestID = latestFrameIDLoopCloure;\n        *closestID = closestHistoryFrameID;\n\n        return true;\n    }\n\n    void performLoopClosure()\n    {\n        if (cloudKeyPoses3D->points.empty() == true)\n            return;\n\n        int latestFrameIDLoopCloure;\n        int closestHistoryFrameID;\n        if (detectLoopClosure(&latestFrameIDLoopCloure, &closestHistoryFrameID) == false)\n            return;\n\n\n        // ICP Settings\n        pcl::IterativeClosestPoint<PointType, PointType> icp;\n        icp.setMaxCorrespondenceDistance(100);\n        icp.setMaximumIterations(100);\n        icp.setTransformationEpsilon(1e-6);\n        icp.setEuclideanFitnessEpsilon(1e-6);\n        icp.setRANSACIterations(0);\n\n        // Downsample map cloud\n        pcl::PointCloud<PointType>::Ptr cloud_temp(new pcl::PointCloud<PointType>());\n        downSizeFilterICP.setInputCloud(nearHistoryKeyFrameCloud);\n        downSizeFilterICP.filter(*cloud_temp);\n        *nearHistoryKeyFrameCloud = *cloud_temp;\n        // publish history near key frames\n        publishCloud(&pubHistoryKeyFrames, nearHistoryKeyFrameCloud, timeLaserInfoStamp, \"odom\");\n\n        // Align clouds\n//        icp.setin\n        icp.setInputSource(latestKeyFrameCloud);\n        icp.setInputTarget(nearHistoryKeyFrameCloud);\n        pcl::PointCloud<PointType>::Ptr unused_result(new pcl::PointCloud<PointType>());\n        icp.align(*unused_result);\n\n        // std::cout << \"ICP converg flag:\" << icp.hasConverged() << \". Fitness score: \" << icp.getFitnessScore() << std::endl;    \n        if (icp.hasConverged() == false || icp.getFitnessScore() > historyKeyframeFitnessScore)\n            return;\n\n        // publish corrected cloud\n        if (pubIcpKeyFrames.getNumSubscribers() != 0){\n            pcl::PointCloud<PointType>::Ptr closed_cloud(new pcl::PointCloud<PointType>());\n            pcl::transformPointCloud(*latestKeyFrameCloud, *closed_cloud, icp.getFinalTransformation());\n            publishCloud(&pubIcpKeyFrames, closed_cloud, timeLaserInfoStamp, \"odom\");\n        }\n\n        // Get pose transformation\n        float x, y, z, roll, pitch, yaw;\n        Eigen::Affine3f correctionLidarFrame;\n        //gc: realtive transformation\n        correctionLidarFrame = icp.getFinalTransformation();\n        // transform from world origin to wrong pose\n        Eigen::Affine3f tWrong = pclPointToAffine3f(cloudKeyPoses6D->points[latestFrameIDLoopCloure]);\n        // transform from world origin to corrected pose\n        Eigen::Affine3f tCorrect = correctionLidarFrame * tWrong;// pre-multiplying -> successive rotation about a fixed frame\n        pcl::getTranslationAndEulerAngles (tCorrect, x, y, z, roll, pitch, yaw);\n\n        gtsam::Pose3 poseFrom = Pose3(Rot3::RzRyRx(roll, pitch, yaw), Point3(x, y, z));\n        gtsam::Pose3 poseTo = pclPointTogtsamPose3(cloudKeyPoses6D->points[closestHistoryFrameID]);\n\n        gtsam::Vector Vector6(6);\n        float noiseScore = icp.getFitnessScore();//gc: noise right??\n        Vector6 << noiseScore, noiseScore, noiseScore, noiseScore, noiseScore, noiseScore;\n        noiseModel::Diagonal::shared_ptr constraintNoise = noiseModel::Diagonal::Variances(Vector6);\n\n        // Add pose constraint\n        std::lock_guard<std::mutex> lock(mtx);\n        gtSAMgraph.add(BetweenFactor<Pose3>(latestFrameIDLoopCloure, closestHistoryFrameID, poseFrom.between(poseTo), constraintNoise));\n        isam->update(gtSAMgraph);\n        isam->update();\n        gtSAMgraph.resize(0);\n\n        isamCurrentEstimate = isam->calculateEstimate();\n        Pose3 latestEstimate = isamCurrentEstimate.at<Pose3>(isamCurrentEstimate.size()-1);\n\n        transformTobeMapped[0] = latestEstimate.rotation().roll();\n        transformTobeMapped[1] = latestEstimate.rotation().pitch();\n        transformTobeMapped[2] = latestEstimate.rotation().yaw();\n        transformTobeMapped[3] = latestEstimate.translation().x();\n        transformTobeMapped[4] = latestEstimate.translation().y();\n        transformTobeMapped[5] = latestEstimate.translation().z();\n\n        correctPoses();\n\n        aLoopIsClosed = true;\n    }\n\n\n\n\n\n\n\n    \n\n\n\n    void updateInitialGuess()\n    {\n        static Eigen::Affine3f lastImuTransformation;//gc: note that this is static type\n        // initialization\n        if (cloudKeyPoses3D->points.empty())//gc: there is no key pose 初始化\n        {\n            transformTobeMapped[0] = cloudInfo.imuRollInit;\n            transformTobeMapped[1] = cloudInfo.imuPitchInit;\n            transformTobeMapped[2] = cloudInfo.imuYawInit;\n\n            if (!useImuHeadingInitialization)//gc: if not use the heading of init_IMU as Initialization\n                transformTobeMapped[2] = 0;\n\n            lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;\n            return;\n        }\n\n        // use imu pre-integration estimation for pose guess\n        if (cloudInfo.odomAvailable == true && cloudInfo.imuPreintegrationResetId == imuPreintegrationResetId)\n        { \n            transformTobeMapped[0] = cloudInfo.initialGuessRoll;\n            transformTobeMapped[1] = cloudInfo.initialGuessPitch;\n            transformTobeMapped[2] = cloudInfo.initialGuessYaw;\n\n            transformTobeMapped[3] = cloudInfo.initialGuessX;\n            transformTobeMapped[4] = cloudInfo.initialGuessY;\n            transformTobeMapped[5] = cloudInfo.initialGuessZ;\n\n            lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;\n            return;\n        }\n\n        // use previous pose for pose guess\n        // if (cloudKeyPoses6D->points.size() >= 2)\n        // {\n        //     int oldId = cloudKeyPoses6D->points.size() - 2;\n        //     int preId = cloudKeyPoses6D->points.size() - 1;\n        //     Eigen::Affine3f transOld = pclPointToAffine3f(cloudKeyPoses6D->points[oldId]);\n        //     Eigen::Affine3f transPre = pclPointToAffine3f(cloudKeyPoses6D->points[preId]);\n        //     double deltaTimePre = cloudKeyPoses6D->points[preId].time - cloudKeyPoses6D->points[oldId].time;\n        //     double deltaTimeNow = timeLaserCloudInfoLast - cloudKeyPoses6D->points[preId].time;\n        //     double alpha = deltaTimeNow / deltaTimePre;\n\n        //     Eigen::Affine3f transIncPre = transOld.inverse() * transPre;\n        //     float x, y, z, roll, pitch, yaw;\n        //     pcl::getTranslationAndEulerAngles (transIncPre, x, y, z, roll, pitch, yaw);\n        //     Eigen::Affine3f transIncNow = pcl::getTransformation(alpha*x, alpha*y, alpha*z, alpha*roll, alpha*pitch, alpha*yaw);\n\n        //     Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped);\n        //     Eigen::Affine3f transFinal = transTobe * transIncNow;\n        //     pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5], \n        //                                                   transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);\n\n        //     lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;\n        //     return;\n        // }    \n\n        // use imu incremental estimation for pose guess (only rotation)\n        if (cloudInfo.imuAvailable == true)\n        {\n            Eigen::Affine3f transBack = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit);\n            Eigen::Affine3f transIncre = lastImuTransformation.inverse() * transBack;//gc: the transform of IMU between two scans\n\n            Eigen::Affine3f transTobe = trans2Affine3f(transformTobeMapped);\n            Eigen::Affine3f transFinal = transTobe * transIncre;\n            pcl::getTranslationAndEulerAngles(transFinal, transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5], \n                                                          transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);\n\n            lastImuTransformation = pcl::getTransformation(0, 0, 0, cloudInfo.imuRollInit, cloudInfo.imuPitchInit, cloudInfo.imuYawInit); // save imu before return;\n            return;\n        }\n    }\n\n    void extractForLoopClosure()\n    {\n        pcl::PointCloud<PointType>::Ptr cloudToExtract(new pcl::PointCloud<PointType>());\n        int numPoses = cloudKeyPoses3D->size();\n        for (int i = numPoses-1; i >= 0; --i)\n        {\n            if ((int)cloudToExtract->size() <= surroundingKeyframeSize)\n                cloudToExtract->push_back(cloudKeyPoses3D->points[i]);\n            else\n                break;\n        }\n\n        extractCloud(cloudToExtract);\n    }\n    //gc:search nearby key poses and downsample them and then extract the local map points\n    void extractNearby()\n    {\n        pcl::PointCloud<PointType>::Ptr surroundingKeyPoses(new pcl::PointCloud<PointType>());\n        pcl::PointCloud<PointType>::Ptr surroundingKeyPosesDS(new pcl::PointCloud<PointType>());//gc: the key poses after downsample\n        std::vector<int> pointSearchInd;\n        std::vector<float> pointSearchSqDis;\n\n        // extract all the nearby key poses and downsample them\n        kdtreeSurroundingKeyPoses->setInputCloud(cloudKeyPoses3D); // create kd-tree\n        //gc: kd-tree is used for nearby key poses\n        kdtreeSurroundingKeyPoses->radiusSearch(cloudKeyPoses3D->back(), (double)surroundingKeyframeSearchRadius, pointSearchInd, pointSearchSqDis);\n        for (int i = 0; i < (int)pointSearchInd.size(); ++i)\n        {\n            int id = pointSearchInd[i];\n            surroundingKeyPoses->push_back(cloudKeyPoses3D->points[id]);\n        }\n\n        downSizeFilterSurroundingKeyPoses.setInputCloud(surroundingKeyPoses);\n        downSizeFilterSurroundingKeyPoses.filter(*surroundingKeyPosesDS);\n\n        // also extract some latest key frames in case the robot rotates in one position\n        int numPoses = cloudKeyPoses3D->size();\n        for (int i = numPoses-1; i >= 0; --i)\n        {\n            if (timeLaserCloudInfoLast - cloudKeyPoses6D->points[i].time < 10.0)\n                surroundingKeyPosesDS->push_back(cloudKeyPoses3D->points[i]);\n            else\n                break;\n        }\n\n        extractCloud(surroundingKeyPosesDS);\n    }\n    //gc: extract the nearby Map points\n    void extractCloud(pcl::PointCloud<PointType>::Ptr cloudToExtract)\n    {\n        std::vector<pcl::PointCloud<PointType>> laserCloudCornerSurroundingVec;\n        std::vector<pcl::PointCloud<PointType>> laserCloudSurfSurroundingVec;\n\n        laserCloudCornerSurroundingVec.resize(cloudToExtract->size());\n        laserCloudSurfSurroundingVec.resize(cloudToExtract->size());\n\n        // extract surrounding map\n        #pragma omp parallel for num_threads(numberOfCores)\n        for (int i = 0; i < (int)cloudToExtract->size(); ++i)\n        {\n            if (pointDistance(cloudToExtract->points[i], cloudKeyPoses3D->back()) > surroundingKeyframeSearchRadius)\n                continue;\n            int thisKeyInd = (int)cloudToExtract->points[i].intensity;//gc: the index of this key frame\n            //gc: tranform the corner points and surfpoints of the nearby keyFrames into the world frame\n            laserCloudCornerSurroundingVec[i]  = *transformPointCloud(cornerCloudKeyFrames[thisKeyInd],  &cloudKeyPoses6D->points[thisKeyInd]);\n            laserCloudSurfSurroundingVec[i]    = *transformPointCloud(surfCloudKeyFrames[thisKeyInd],    &cloudKeyPoses6D->points[thisKeyInd]);\n        }\n\n        // fuse the map\n        laserCloudCornerFromMap->clear();\n        laserCloudSurfFromMap->clear(); \n        for (int i = 0; i < (int)cloudToExtract->size(); ++i)\n        {\n            *laserCloudCornerFromMap += laserCloudCornerSurroundingVec[i];\n            *laserCloudSurfFromMap   += laserCloudSurfSurroundingVec[i];\n        }\n\n        // Downsample the surrounding corner key frames (or map)\n        downSizeFilterCorner.setInputCloud(laserCloudCornerFromMap);\n        downSizeFilterCorner.filter(*laserCloudCornerFromMapDS);\n        laserCloudCornerFromMapDSNum = laserCloudCornerFromMapDS->size();\n        // Downsample the surrounding surf key frames (or map)\n        downSizeFilterSurf.setInputCloud(laserCloudSurfFromMap);\n        downSizeFilterSurf.filter(*laserCloudSurfFromMapDS);\n        laserCloudSurfFromMapDSNum = laserCloudSurfFromMapDS->size();\n    }\n\n    void extractSurroundingKeyFrames()\n    {\n        if (cloudKeyPoses3D->points.empty() == true)\n            return; \n        \n        if (loopClosureEnableFlag == true)//gc:TODO: a little weired: Loop closure should search the whole map while\n        {\n            extractForLoopClosure(); //gc: the name is misleading\n        } else {\n            extractNearby();\n        }\n    }\n\n    void downsampleCurrentScan()\n    {\n        // Downsample cloud from current scan\n        laserCloudCornerLastDS->clear();\n        downSizeFilterCorner.setInputCloud(laserCloudCornerLast);\n        downSizeFilterCorner.filter(*laserCloudCornerLastDS);\n        laserCloudCornerLastDSNum = laserCloudCornerLastDS->size();\n\n        laserCloudSurfLastDS->clear();\n        downSizeFilterSurf.setInputCloud(laserCloudSurfLast);\n        downSizeFilterSurf.filter(*laserCloudSurfLastDS);\n        laserCloudSurfLastDSNum = laserCloudSurfLastDS->size();\n    }\n\n    void updatePointAssociateToMap()\n    {\n        transPointAssociateToMap = trans2Affine3f(transformTobeMapped);\n    }\n\n    void cornerOptimization()\n    {\n        updatePointAssociateToMap();\n\n        #pragma omp parallel for num_threads(numberOfCores)\n        //gc: for every corner point\n        for (int i = 0; i < laserCloudCornerLastDSNum; i++)\n        {\n            PointType pointOri, pointSel, coeff;\n            std::vector<int> pointSearchInd;\n            std::vector<float> pointSearchSqDis;\n\n            pointOri = laserCloudCornerLastDS->points[i];\n            //gc: calculate its location in the map using the prediction pose\n            pointAssociateToMap(&pointOri, &pointSel);\n            kdtreeCornerFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);\n\n            cv::Mat matA1(3, 3, CV_32F, cv::Scalar::all(0));\n            cv::Mat matD1(1, 3, CV_32F, cv::Scalar::all(0));\n            cv::Mat matV1(3, 3, CV_32F, cv::Scalar::all(0));\n                    \n            if (pointSearchSqDis[4] < 1.0) {\n                float cx = 0, cy = 0, cz = 0;\n                for (int j = 0; j < 5; j++) {\n                    cx += laserCloudCornerFromMapDS->points[pointSearchInd[j]].x;\n                    cy += laserCloudCornerFromMapDS->points[pointSearchInd[j]].y;\n                    cz += laserCloudCornerFromMapDS->points[pointSearchInd[j]].z;\n                }\n                //gc: the average coordinate of the most nearest points\n                cx /= 5; cy /= 5;  cz /= 5;\n\n                float a11 = 0, a12 = 0, a13 = 0, a22 = 0, a23 = 0, a33 = 0;\n                for (int j = 0; j < 5; j++) {\n                    float ax = laserCloudCornerFromMapDS->points[pointSearchInd[j]].x - cx;\n                    float ay = laserCloudCornerFromMapDS->points[pointSearchInd[j]].y - cy;\n                    float az = laserCloudCornerFromMapDS->points[pointSearchInd[j]].z - cz;\n\n                    a11 += ax * ax; a12 += ax * ay; a13 += ax * az;\n                    a22 += ay * ay; a23 += ay * az;\n                    a33 += az * az;\n                }\n                a11 /= 5; a12 /= 5; a13 /= 5; a22 /= 5; a23 /= 5; a33 /= 5;\n\n                matA1.at<float>(0, 0) = a11; matA1.at<float>(0, 1) = a12; matA1.at<float>(0, 2) = a13;\n                matA1.at<float>(1, 0) = a12; matA1.at<float>(1, 1) = a22; matA1.at<float>(1, 2) = a23;\n                matA1.at<float>(2, 0) = a13; matA1.at<float>(2, 1) = a23; matA1.at<float>(2, 2) = a33;\n\n                cv::eigen(matA1, matD1, matV1);\n\n                if (matD1.at<float>(0, 0) > 3 * matD1.at<float>(0, 1)) {\n\n                    float x0 = pointSel.x;\n                    float y0 = pointSel.y;\n                    float z0 = pointSel.z;\n                    float x1 = cx + 0.1 * matV1.at<float>(0, 0);\n                    float y1 = cy + 0.1 * matV1.at<float>(0, 1);\n                    float z1 = cz + 0.1 * matV1.at<float>(0, 2);\n                    float x2 = cx - 0.1 * matV1.at<float>(0, 0);\n                    float y2 = cy - 0.1 * matV1.at<float>(0, 1);\n                    float z2 = cz - 0.1 * matV1.at<float>(0, 2);\n\n                    float a012 = sqrt(((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1)) * ((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1)) \n                                    + ((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1)) * ((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1)) \n                                    + ((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1)) * ((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1)));\n\n                    float l12 = sqrt((x1 - x2)*(x1 - x2) + (y1 - y2)*(y1 - y2) + (z1 - z2)*(z1 - z2));\n\n                    float la = ((y1 - y2)*((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1)) \n                              + (z1 - z2)*((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1))) / a012 / l12;\n\n                    float lb = -((x1 - x2)*((x0 - x1)*(y0 - y2) - (x0 - x2)*(y0 - y1)) \n                               - (z1 - z2)*((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1))) / a012 / l12;\n\n                    float lc = -((x1 - x2)*((x0 - x1)*(z0 - z2) - (x0 - x2)*(z0 - z1)) \n                               + (y1 - y2)*((y0 - y1)*(z0 - z2) - (y0 - y2)*(z0 - z1))) / a012 / l12;\n\n                    float ld2 = a012 / l12;\n\n                    float s = 1 - 0.9 * fabs(ld2);\n\n                    coeff.x = s * la;\n                    coeff.y = s * lb;\n                    coeff.z = s * lc;\n                    coeff.intensity = s * ld2;\n\n                    if (s > 0.1) {\n                        laserCloudOriCornerVec[i] = pointOri;\n                        coeffSelCornerVec[i] = coeff;\n                        laserCloudOriCornerFlag[i] = true;\n                    }\n                }\n            }\n        }\n    }\n\n    void surfOptimization()\n    {\n        updatePointAssociateToMap();\n\n        #pragma omp parallel for num_threads(numberOfCores)\n        for (int i = 0; i < laserCloudSurfLastDSNum; i++)\n        {\n            PointType pointOri, pointSel, coeff;\n            std::vector<int> pointSearchInd;\n            std::vector<float> pointSearchSqDis;\n\n            pointOri = laserCloudSurfLastDS->points[i];\n            pointAssociateToMap(&pointOri, &pointSel); \n            kdtreeSurfFromMap->nearestKSearch(pointSel, 5, pointSearchInd, pointSearchSqDis);\n\n            Eigen::Matrix<float, 5, 3> matA0;\n            Eigen::Matrix<float, 5, 1> matB0;\n            Eigen::Vector3f matX0;\n\n            matA0.setZero();\n            matB0.fill(-1);\n            matX0.setZero();\n\n            if (pointSearchSqDis[4] < 1.0) {\n                for (int j = 0; j < 5; j++) {\n                    matA0(j, 0) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].x;\n                    matA0(j, 1) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].y;\n                    matA0(j, 2) = laserCloudSurfFromMapDS->points[pointSearchInd[j]].z;\n                }\n\n                matX0 = matA0.colPivHouseholderQr().solve(matB0);\n\n                float pa = matX0(0, 0);\n                float pb = matX0(1, 0);\n                float pc = matX0(2, 0);\n                float pd = 1;\n\n                float ps = sqrt(pa * pa + pb * pb + pc * pc);\n                pa /= ps; pb /= ps; pc /= ps; pd /= ps;\n\n                bool planeValid = true;\n                for (int j = 0; j < 5; j++) {\n                    if (fabs(pa * laserCloudSurfFromMapDS->points[pointSearchInd[j]].x +\n                             pb * laserCloudSurfFromMapDS->points[pointSearchInd[j]].y +\n                             pc * laserCloudSurfFromMapDS->points[pointSearchInd[j]].z + pd) > 0.2) {\n                        planeValid = false;\n                        break;\n                    }\n                }\n\n                if (planeValid) {\n                    float pd2 = pa * pointSel.x + pb * pointSel.y + pc * pointSel.z + pd;\n\n                    float s = 1 - 0.9 * fabs(pd2) / sqrt(sqrt(pointSel.x * pointSel.x\n                            + pointSel.y * pointSel.y + pointSel.z * pointSel.z));\n\n                    coeff.x = s * pa;\n                    coeff.y = s * pb;\n                    coeff.z = s * pc;\n                    coeff.intensity = s * pd2;\n\n                    if (s > 0.1) {\n                        laserCloudOriSurfVec[i] = pointOri;\n                        coeffSelSurfVec[i] = coeff;\n                        laserCloudOriSurfFlag[i] = true;\n                    }\n                }\n            }\n        }\n    }\n\n    void combineOptimizationCoeffs()\n    {\n        // combine corner coeffs\n        for (int i = 0; i < laserCloudCornerLastDSNum; ++i){\n            if (laserCloudOriCornerFlag[i] == true){\n                laserCloudOri->push_back(laserCloudOriCornerVec[i]);\n                coeffSel->push_back(coeffSelCornerVec[i]);\n            }\n        }\n        // combine surf coeffs\n        for (int i = 0; i < laserCloudSurfLastDSNum; ++i){\n            if (laserCloudOriSurfFlag[i] == true){\n                laserCloudOri->push_back(laserCloudOriSurfVec[i]);\n                coeffSel->push_back(coeffSelSurfVec[i]);\n            }\n        }\n        // reset flag for next iteration\n        std::fill(laserCloudOriCornerFlag.begin(), laserCloudOriCornerFlag.end(), false);\n        std::fill(laserCloudOriSurfFlag.begin(), laserCloudOriSurfFlag.end(), false);\n    }\n\n    bool LMOptimization(int iterCount)\n    {\n        // This optimization is from the original loam_velodyne by Ji Zhang, need to cope with coordinate transformation\n        // lidar <- camera      ---     camera <- lidar\n        // x = z                ---     x = y\n        // y = x                ---     y = z\n        // z = y                ---     z = x\n        // roll = yaw           ---     roll = pitch\n        // pitch = roll         ---     pitch = yaw\n        // yaw = pitch          ---     yaw = roll\n\n        // lidar -> camera\n        float srx = sin(transformTobeMapped[1]);\n        float crx = cos(transformTobeMapped[1]);\n        float sry = sin(transformTobeMapped[2]);\n        float cry = cos(transformTobeMapped[2]);\n        float srz = sin(transformTobeMapped[0]);\n        float crz = cos(transformTobeMapped[0]);\n\n        int laserCloudSelNum = laserCloudOri->size();\n        if (laserCloudSelNum < 50) {\n            return false;\n        }\n\n        cv::Mat matA(laserCloudSelNum, 6, CV_32F, cv::Scalar::all(0));\n        cv::Mat matAt(6, laserCloudSelNum, CV_32F, cv::Scalar::all(0));\n        cv::Mat matAtA(6, 6, CV_32F, cv::Scalar::all(0));\n        cv::Mat matB(laserCloudSelNum, 1, CV_32F, cv::Scalar::all(0));\n        cv::Mat matAtB(6, 1, CV_32F, cv::Scalar::all(0));\n        cv::Mat matX(6, 1, CV_32F, cv::Scalar::all(0));\n        cv::Mat matP(6, 6, CV_32F, cv::Scalar::all(0));\n\n        PointType pointOri, coeff;\n\n        for (int i = 0; i < laserCloudSelNum; i++) {\n            // lidar -> camera\n            pointOri.x = laserCloudOri->points[i].y;\n            pointOri.y = laserCloudOri->points[i].z;\n            pointOri.z = laserCloudOri->points[i].x;\n            // lidar -> camera\n            coeff.x = coeffSel->points[i].y;\n            coeff.y = coeffSel->points[i].z;\n            coeff.z = coeffSel->points[i].x;\n            coeff.intensity = coeffSel->points[i].intensity;\n            // in camera\n            float arx = (crx*sry*srz*pointOri.x + crx*crz*sry*pointOri.y - srx*sry*pointOri.z) * coeff.x\n                      + (-srx*srz*pointOri.x - crz*srx*pointOri.y - crx*pointOri.z) * coeff.y\n                      + (crx*cry*srz*pointOri.x + crx*cry*crz*pointOri.y - cry*srx*pointOri.z) * coeff.z;\n\n            float ary = ((cry*srx*srz - crz*sry)*pointOri.x \n                      + (sry*srz + cry*crz*srx)*pointOri.y + crx*cry*pointOri.z) * coeff.x\n                      + ((-cry*crz - srx*sry*srz)*pointOri.x \n                      + (cry*srz - crz*srx*sry)*pointOri.y - crx*sry*pointOri.z) * coeff.z;\n\n            float arz = ((crz*srx*sry - cry*srz)*pointOri.x + (-cry*crz-srx*sry*srz)*pointOri.y)*coeff.x\n                      + (crx*crz*pointOri.x - crx*srz*pointOri.y) * coeff.y\n                      + ((sry*srz + cry*crz*srx)*pointOri.x + (crz*sry-cry*srx*srz)*pointOri.y)*coeff.z;\n            // lidar -> camera\n            matA.at<float>(i, 0) = arz;\n            matA.at<float>(i, 1) = arx;\n            matA.at<float>(i, 2) = ary;\n            matA.at<float>(i, 3) = coeff.z;\n            matA.at<float>(i, 4) = coeff.x;\n            matA.at<float>(i, 5) = coeff.y;\n            matB.at<float>(i, 0) = -coeff.intensity;\n        }\n\n        cv::transpose(matA, matAt);\n        matAtA = matAt * matA;\n        matAtB = matAt * matB;\n        cv::solve(matAtA, matAtB, matX, cv::DECOMP_QR);\n\n        if (iterCount == 0) {\n\n            cv::Mat matE(1, 6, CV_32F, cv::Scalar::all(0));\n            cv::Mat matV(6, 6, CV_32F, cv::Scalar::all(0));\n            cv::Mat matV2(6, 6, CV_32F, cv::Scalar::all(0));\n\n            cv::eigen(matAtA, matE, matV);\n            matV.copyTo(matV2);\n\n            isDegenerate = false;\n            float eignThre[6] = {100, 100, 100, 100, 100, 100};\n            for (int i = 5; i >= 0; i--) {\n                if (matE.at<float>(0, i) < eignThre[i]) {\n                    for (int j = 0; j < 6; j++) {\n                        matV2.at<float>(i, j) = 0;\n                    }\n                    isDegenerate = true;\n                } else {\n                    break;\n                }\n            }\n            matP = matV.inv() * matV2;\n        }\n\n        if (isDegenerate) {\n            cv::Mat matX2(6, 1, CV_32F, cv::Scalar::all(0));\n            matX.copyTo(matX2);\n            matX = matP * matX2;\n        }\n\n        transformTobeMapped[0] += matX.at<float>(0, 0);\n        transformTobeMapped[1] += matX.at<float>(1, 0);\n        transformTobeMapped[2] += matX.at<float>(2, 0);\n        transformTobeMapped[3] += matX.at<float>(3, 0);\n        transformTobeMapped[4] += matX.at<float>(4, 0);\n        transformTobeMapped[5] += matX.at<float>(5, 0);\n\n        float deltaR = sqrt(\n                            pow(pcl::rad2deg(matX.at<float>(0, 0)), 2) +\n                            pow(pcl::rad2deg(matX.at<float>(1, 0)), 2) +\n                            pow(pcl::rad2deg(matX.at<float>(2, 0)), 2));\n        float deltaT = sqrt(\n                            pow(matX.at<float>(3, 0) * 100, 2) +\n                            pow(matX.at<float>(4, 0) * 100, 2) +\n                            pow(matX.at<float>(5, 0) * 100, 2));\n\n        if (deltaR < 0.05 && deltaT < 0.05) {\n            return true; // converged\n        }\n        return false; // keep optimizing\n    }\n\n    void scan2MapOptimization()\n    {\n        if (cloudKeyPoses3D->points.empty())\n            return;\n\n        if (laserCloudCornerLastDSNum > edgeFeatureMinValidNum && laserCloudSurfLastDSNum > surfFeatureMinValidNum)\n        {\n            kdtreeCornerFromMap->setInputCloud(laserCloudCornerFromMapDS);\n            kdtreeSurfFromMap->setInputCloud(laserCloudSurfFromMapDS);\n\n            for (int iterCount = 0; iterCount < 30; iterCount++)\n            {\n                laserCloudOri->clear();\n                coeffSel->clear();\n                //gc: calculate some coeff and judge whether tho point is valid corner point\n                cornerOptimization();\n                //gc: calculate some coeff and judge whether tho point is valid surface point\n                surfOptimization();\n\n                combineOptimizationCoeffs();\n\n                //gc: the true iteration steps, calculate the transform\n                if (LMOptimization(iterCount) == true)\n                    break;              \n            }\n            //gc: interpolate the roll and pitch angle using the IMU measurement and Lidar calculation\n            transformUpdate();\n        } else {\n            ROS_WARN(\"Not enough features! Only %d edge and %d planar features available.\", laserCloudCornerLastDSNum, laserCloudSurfLastDSNum);\n        }\n    }\n    //gc: interpolate the roll and pitch angle using the IMU measurement and Lidar calculation\n    void transformUpdate()\n    {\n        if (cloudInfo.imuAvailable == true)\n        {\n            if (std::abs(cloudInfo.imuPitchInit) < 1.4)\n            {\n                double imuWeight = 0.01;\n                tf::Quaternion imuQuaternion;\n                tf::Quaternion transformQuaternion;\n                double rollMid, pitchMid, yawMid;\n\n                // slerp roll\n                transformQuaternion.setRPY(transformTobeMapped[0], 0, 0);\n                imuQuaternion.setRPY(cloudInfo.imuRollInit, 0, 0);\n                //gc: interpolate between Imu roll measurement and angle from lidar calculation\n                tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);\n                transformTobeMapped[0] = rollMid;\n\n                // slerp pitch\n                transformQuaternion.setRPY(0, transformTobeMapped[1], 0);\n                imuQuaternion.setRPY(0, cloudInfo.imuPitchInit, 0);\n                //gc: interpolate between Imu roll measurement and angle from lidar calculation\n                tf::Matrix3x3(transformQuaternion.slerp(imuQuaternion, imuWeight)).getRPY(rollMid, pitchMid, yawMid);\n                transformTobeMapped[1] = pitchMid;\n            }\n        }\n\n        transformTobeMapped[0] = constraintTransformation(transformTobeMapped[0], rotation_tollerance);\n        transformTobeMapped[1] = constraintTransformation(transformTobeMapped[1], rotation_tollerance);\n        transformTobeMapped[5] = constraintTransformation(transformTobeMapped[5], z_tollerance);\n    }\n\n    float constraintTransformation(float value, float limit)\n    {\n        if (value < -limit)\n            value = -limit;\n        if (value > limit)\n            value = limit;\n\n        return value;\n    }\n\n    bool saveFrame()\n    {\n        if (cloudKeyPoses3D->points.empty())\n            return true;\n\n        Eigen::Affine3f transStart = pclPointToAffine3f(cloudKeyPoses6D->back());\n        Eigen::Affine3f transFinal = pcl::getTransformation(transformTobeMapped[3], transformTobeMapped[4], transformTobeMapped[5], \n                                                            transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);\n        Eigen::Affine3f transBetween = transStart.inverse() * transFinal;\n        float x, y, z, roll, pitch, yaw;\n        pcl::getTranslationAndEulerAngles(transBetween, x, y, z, roll, pitch, yaw);\n        //gc: judge whther should generate key pose\n        if (abs(roll)  < surroundingkeyframeAddingAngleThreshold &&\n            abs(pitch) < surroundingkeyframeAddingAngleThreshold && \n            abs(yaw)   < surroundingkeyframeAddingAngleThreshold &&\n            sqrt(x*x + y*y + z*z) < surroundingkeyframeAddingDistThreshold)\n            return false;\n\n        return true;\n    }\n\n    void addOdomFactor()\n    {\n        //gc: the first key pose\n        if (cloudKeyPoses3D->points.empty())\n        {\n            noiseModel::Diagonal::shared_ptr priorNoise = noiseModel::Diagonal::Variances((Vector(6) << 1e-2, 1e-2, M_PI*M_PI, 1e8, 1e8, 1e8).finished()); // rad*rad, meter*meter\n            gtSAMgraph.add(PriorFactor<Pose3>(0, trans2gtsamPose(transformTobeMapped), priorNoise));\n            initialEstimate.insert(0, trans2gtsamPose(transformTobeMapped));\n        }else{\n            noiseModel::Diagonal::shared_ptr odometryNoise = noiseModel::Diagonal::Variances((Vector(6) << 1e-6, 1e-6, 1e-6, 1e-4, 1e-4, 1e-4).finished());\n            gtsam::Pose3 poseFrom = pclPointTogtsamPose3(cloudKeyPoses6D->points.back());\n            gtsam::Pose3 poseTo   = trans2gtsamPose(transformTobeMapped);\n            //gc: add constraint between current pose and previous pose\n            gtSAMgraph.add(BetweenFactor<Pose3>(cloudKeyPoses3D->size()-1, cloudKeyPoses3D->size(), poseFrom.between(poseTo), odometryNoise));\n            initialEstimate.insert(cloudKeyPoses3D->size(), poseTo);\n        }\n    }\n\n    void addGPSFactor()\n    {\n        if (gpsQueue.empty())\n            return;\n\n        // wait for system initialized and settles down\n        if (cloudKeyPoses3D->points.empty())\n            return;\n        else\n        {\n            //gc: the translation between the first and last pose is very small\n            if (pointDistance(cloudKeyPoses3D->front(), cloudKeyPoses3D->back()) < 5.0)\n                return;\n        }\n\n        // pose covariance small, no need to correct\n        //gc: the odom covariance is small TODO: although small maybe some interpolation\n        if (poseCovariance(3,3) < poseCovThreshold && poseCovariance(4,4) < poseCovThreshold)\n            return;\n\n        // last gps position\n        static PointType lastGPSPoint;\n\n        while (!gpsQueue.empty())\n        {\n            if (gpsQueue.front().header.stamp.toSec() < timeLaserCloudInfoLast - 0.2)\n            {\n                // message too old\n                gpsQueue.pop_front();\n            }\n            else if (gpsQueue.front().header.stamp.toSec() > timeLaserCloudInfoLast + 0.2)\n            {\n                // message too new\n                break;\n            }\n            else\n            {\n                nav_msgs::Odometry thisGPS = gpsQueue.front();\n                gpsQueue.pop_front();\n\n                // GPS too noisy, skip\n                float noise_x = thisGPS.pose.covariance[0];\n                float noise_y = thisGPS.pose.covariance[7];\n                float noise_z = thisGPS.pose.covariance[14];\n                if (noise_x > gpsCovThreshold || noise_y > gpsCovThreshold)\n                    continue;\n\n                float gps_x = thisGPS.pose.pose.position.x;\n                float gps_y = thisGPS.pose.pose.position.y;\n                float gps_z = thisGPS.pose.pose.position.z;\n                if (!useGpsElevation)\n                {\n                    gps_z = transformTobeMapped[5];\n                    noise_z = 0.01;\n                }\n\n                // GPS not properly initialized (0,0,0)\n                if (abs(gps_x) < 1e-6 && abs(gps_y) < 1e-6)\n                    continue;\n\n                // Add GPS every a few meters\n                PointType curGPSPoint;\n                curGPSPoint.x = gps_x;\n                curGPSPoint.y = gps_y;\n                curGPSPoint.z = gps_z;\n                if (pointDistance(curGPSPoint, lastGPSPoint) < 5.0)\n                    continue;\n                else\n                    lastGPSPoint = curGPSPoint;\n\n                gtsam::Vector Vector3(3);\n                Vector3 << max(noise_x, 1.0f), max(noise_y, 1.0f), max(noise_z, 1.0f);\n                noiseModel::Diagonal::shared_ptr gps_noise = noiseModel::Diagonal::Variances(Vector3);\n                gtsam::GPSFactor gps_factor(cloudKeyPoses3D->size(), gtsam::Point3(gps_x, gps_y, gps_z), gps_noise);\n                gtSAMgraph.add(gps_factor);\n\n                aLoopIsClosed = true;\n                break;\n            }\n        }\n    }\n\n    void saveKeyFramesAndFactor()\n    {\n        //gc: judge whther should generate key pose\n        if (saveFrame() == false)\n            return;\n\n        // odom factor\n        //gc: add odom factor in the graph\n        addOdomFactor();\n\n        // gps factor\n        addGPSFactor();\n\n        // cout << \"****************************************************\" << endl;\n        // gtSAMgraph.print(\"GTSAM Graph:\\n\");\n\n        // update iSAM\n        isam->update(gtSAMgraph, initialEstimate);\n        isam->update();\n\n        gtSAMgraph.resize(0);\n        initialEstimate.clear();\n\n        //save key poses\n        PointType thisPose3D;\n        PointTypePose thisPose6D;\n        Pose3 latestEstimate;\n\n        isamCurrentEstimate = isam->calculateEstimate();\n        latestEstimate = isamCurrentEstimate.at<Pose3>(isamCurrentEstimate.size()-1);\n        // cout << \"****************************************************\" << endl;\n        // isamCurrentEstimate.print(\"Current estimate: \");\n\n        //gc:cloudKeyPoses3D can be used to calculate the nearest key frames\n        thisPose3D.x = latestEstimate.translation().x();\n        thisPose3D.y = latestEstimate.translation().y();\n        thisPose3D.z = latestEstimate.translation().z();\n        thisPose3D.intensity = cloudKeyPoses3D->size(); // this can be used as index\n        cloudKeyPoses3D->push_back(thisPose3D);\n\n        thisPose6D.x = thisPose3D.x;\n        thisPose6D.y = thisPose3D.y;\n        thisPose6D.z = thisPose3D.z;\n        thisPose6D.intensity = thisPose3D.intensity ; // this can be used as index\n        thisPose6D.roll  = latestEstimate.rotation().roll();\n        thisPose6D.pitch = latestEstimate.rotation().pitch();\n        thisPose6D.yaw   = latestEstimate.rotation().yaw();\n        thisPose6D.time = timeLaserCloudInfoLast;\n        cloudKeyPoses6D->push_back(thisPose6D);\n\n        // cout << \"****************************************************\" << endl;\n        // cout << \"Pose covariance:\" << endl;\n        // cout << isam->marginalCovariance(isamCurrentEstimate.size()-1) << endl << endl;\n        poseCovariance = isam->marginalCovariance(isamCurrentEstimate.size()-1);\n\n        // save updated transform\n        transformTobeMapped[0] = latestEstimate.rotation().roll();\n        transformTobeMapped[1] = latestEstimate.rotation().pitch();\n        transformTobeMapped[2] = latestEstimate.rotation().yaw();\n        transformTobeMapped[3] = latestEstimate.translation().x();\n        transformTobeMapped[4] = latestEstimate.translation().y();\n        transformTobeMapped[5] = latestEstimate.translation().z();\n\n        // save all the received edge and surf points\n        pcl::PointCloud<PointType>::Ptr thisCornerKeyFrame(new pcl::PointCloud<PointType>());\n        pcl::PointCloud<PointType>::Ptr thisSurfKeyFrame(new pcl::PointCloud<PointType>());\n        //gc:\n        pcl::copyPointCloud(*laserCloudCornerLastDS,  *thisCornerKeyFrame);\n        pcl::copyPointCloud(*laserCloudSurfLastDS,    *thisSurfKeyFrame);\n\n        // save key frame cloud\n        cornerCloudKeyFrames.push_back(thisCornerKeyFrame);\n        surfCloudKeyFrames.push_back(thisSurfKeyFrame);\n\n        // save path for visualization\n        updatePath(thisPose6D);\n    }\n\n    void correctPoses()\n    {\n        if (cloudKeyPoses3D->points.empty())\n            return;\n\n        if (aLoopIsClosed == true)\n        {\n            // clear path\n            globalPath.poses.clear();\n            // update key poses\n            int numPoses = isamCurrentEstimate.size();\n            for (int i = 0; i < numPoses; ++i)\n            {\n                cloudKeyPoses3D->points[i].x = isamCurrentEstimate.at<Pose3>(i).translation().x();\n                cloudKeyPoses3D->points[i].y = isamCurrentEstimate.at<Pose3>(i).translation().y();\n                cloudKeyPoses3D->points[i].z = isamCurrentEstimate.at<Pose3>(i).translation().z();\n\n                cloudKeyPoses6D->points[i].x = cloudKeyPoses3D->points[i].x;\n                cloudKeyPoses6D->points[i].y = cloudKeyPoses3D->points[i].y;\n                cloudKeyPoses6D->points[i].z = cloudKeyPoses3D->points[i].z;\n                cloudKeyPoses6D->points[i].roll  = isamCurrentEstimate.at<Pose3>(i).rotation().roll();\n                cloudKeyPoses6D->points[i].pitch = isamCurrentEstimate.at<Pose3>(i).rotation().pitch();\n                cloudKeyPoses6D->points[i].yaw   = isamCurrentEstimate.at<Pose3>(i).rotation().yaw();\n\n                updatePath(cloudKeyPoses6D->points[i]);\n            }\n\n            aLoopIsClosed = false;\n            // ID for reseting IMU pre-integration\n            ++imuPreintegrationResetId;\n        }\n    }\n\n    void updatePath(const PointTypePose& pose_in)\n    {\n        geometry_msgs::PoseStamped pose_stamped;\n        pose_stamped.header.stamp = ros::Time().fromSec(pose_in.time);\n        pose_stamped.header.frame_id = \"odom\";\n        pose_stamped.pose.position.x = pose_in.x;\n        pose_stamped.pose.position.y = pose_in.y;\n        pose_stamped.pose.position.z = pose_in.z;\n        tf::Quaternion q = tf::createQuaternionFromRPY(pose_in.roll, pose_in.pitch, pose_in.yaw);\n        pose_stamped.pose.orientation.x = q.x();\n        pose_stamped.pose.orientation.y = q.y();\n        pose_stamped.pose.orientation.z = q.z();\n        pose_stamped.pose.orientation.w = q.w();\n\n        globalPath.poses.push_back(pose_stamped);\n    }\n\n    void publishOdometry()\n    {\n        // Publish odometry for ROS\n        nav_msgs::Odometry laserOdometryROS;\n        laserOdometryROS.header.stamp = timeLaserInfoStamp;\n        laserOdometryROS.header.frame_id = \"odom\";\n        laserOdometryROS.child_frame_id = \"odom_mapping\";\n        laserOdometryROS.pose.pose.position.x = transformTobeMapped[3];\n        laserOdometryROS.pose.pose.position.y = transformTobeMapped[4];\n        laserOdometryROS.pose.pose.position.z = transformTobeMapped[5];\n        laserOdometryROS.pose.pose.orientation = tf::createQuaternionMsgFromRollPitchYaw(transformTobeMapped[0], transformTobeMapped[1], transformTobeMapped[2]);\n        laserOdometryROS.pose.covariance[0] = double(imuPreintegrationResetId);\n        pubOdomAftMappedROS.publish(laserOdometryROS);\n    }\n\n    void publishFrames()\n    {\n        if (cloudKeyPoses3D->points.empty())\n            return;\n        // publish key poses\n        publishCloud(&pubKeyPoses, cloudKeyPoses3D, timeLaserInfoStamp, \"odom\");\n        // Publish surrounding key frames\n        publishCloud(&pubRecentKeyFrames, laserCloudSurfFromMapDS, timeLaserInfoStamp, \"odom\");\n        // publish registered key frame\n        //gc: feature points\n        if (pubRecentKeyFrame.getNumSubscribers() != 0)\n        {\n            pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());\n            PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);\n            *cloudOut += *transformPointCloud(laserCloudCornerLastDS,  &thisPose6D);\n            *cloudOut += *transformPointCloud(laserCloudSurfLastDS,    &thisPose6D);\n            publishCloud(&pubRecentKeyFrame, cloudOut, timeLaserInfoStamp, \"odom\");\n        }\n        // publish registered high-res raw cloud\n        //gc: whole point_cloud of the scan\n        if (pubCloudRegisteredRaw.getNumSubscribers() != 0)\n        {\n            pcl::PointCloud<PointType>::Ptr cloudOut(new pcl::PointCloud<PointType>());\n            pcl::fromROSMsg(cloudInfo.cloud_deskewed, *cloudOut);\n            PointTypePose thisPose6D = trans2PointTypePose(transformTobeMapped);\n            *cloudOut = *transformPointCloud(cloudOut,  &thisPose6D);\n            publishCloud(&pubCloudRegisteredRaw, cloudOut, timeLaserInfoStamp, \"odom\");\n        }\n        // publish path\n        if (pubPath.getNumSubscribers() != 0)\n        {\n            globalPath.header.stamp = timeLaserInfoStamp;\n            globalPath.header.frame_id = \"odom\";\n            pubPath.publish(globalPath);\n        }\n    }\n};\n\n\nint main(int argc, char** argv)\n{\n    ros::init(argc, argv, \"lio_sam\");\n\n    mapOptimization MO;\n\n    ROS_INFO(\"\\033[1;32m----> Map Optimization Started.\\033[0m\");\n    \n    std::thread loopthread(&mapOptimization::loopClosureThread, &MO);\n    std::thread visualizeMapThread(&mapOptimization::visualizeGlobalMapThread, &MO);\n\n    ros::spin();\n\n    loopthread.join();\n    visualizeMapThread.join();\n\n    return 0;\n}"
  }
]